Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

2.6.2 - Fix JsonShopConfiguration saves #157

Open
wants to merge 42 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
42 commits
Select commit Hold shift + click to select a range
af66aff
Add 'radius' as alias for 'distance' in /ts find
SparklingComet Oct 8, 2023
e6e4268
Version change to 2.6.2-DEV
SparklingComet Oct 8, 2023
c773a35
Make shop saving sync again
SparklingComet Oct 8, 2023
cfbcc92
Implement thread pool for JsonShopConfiguration saves
SparklingComet Oct 8, 2023
5ee50f1
Improve saving logic for maxThreads == 0 and filesBeingSaved
SparklingComet Oct 9, 2023
68293ed
Fix sync (maxThreads=0) saving
SparklingComet Oct 9, 2023
2d04772
Fix early loop end
SparklingComet Oct 9, 2023
558d0de
Remove unnecessary verification debug
SparklingComet Oct 9, 2023
306a0be
Work on concurrency issues
SparklingComet Oct 9, 2023
55b1540
Add comment about max-save-threads
SparklingComet Oct 11, 2023
f613e82
Switch from SaveTasks having their own private files and queues to mo…
SparklingComet Oct 11, 2023
bb0e518
Add synchronized(file) lock
SparklingComet Oct 12, 2023
d7c6576
Cache File objects for synchronied(File) lock
SparklingComet Oct 12, 2023
6371430
Reduce some excessive saving
SparklingComet Oct 13, 2023
e2d0a16
Rename Json...Configuration to Json...Data and add JsonShopData cache
SparklingComet Oct 13, 2023
9898123
Add build number from Jenkins to startup splash message
SparklingComet Oct 14, 2023
a5e114d
"Fix" for excess '}' in Shop save files.
KillerOfPie Oct 19, 2023
b927bc6
Fix for .json.bak files being included when counting shops in world.
KillerOfPie Oct 20, 2023
86f6745
Pick changes from 2.6.2-Debug
KillerOfPie Oct 30, 2023
34cc5d4
Replacing Gson Implementation to fix saving bugs
KillerOfPie Nov 5, 2023
991857f
Fix String errors from previous build
KillerOfPie Nov 10, 2023
a687c38
Fix NPE error when chest_linkage has no content
KillerOfPie Nov 12, 2023
dce1c96
Fix NPE error when loading data from a null shopLocation
KillerOfPie Nov 16, 2023
a4114a3
Fix NPE error on PlayerJoinEvent due to non-existent player data
KillerOfPie Nov 16, 2023
058f649
Fix UOE when reading old shop data
KillerOfPie Nov 16, 2023
4474ce3
Fix for reading shop data
KillerOfPie Nov 23, 2023
85c02aa
Fix for NPE while reading shop data
KillerOfPie Nov 23, 2023
8d0fa4f
Fix for errors reading shop and player files
KillerOfPie Nov 23, 2023
727d7af
Fix for errors reading shop and player files
KillerOfPie Nov 24, 2023
3d72c5c
Fix for errors saving shop data
KillerOfPie Nov 25, 2023
b494deb
Rewrite ShopChunk Loading to avoid jsonObj
KillerOfPie Nov 25, 2023
efe0067
Rewrite ShopChunk Loading to avoid jsonObj
KillerOfPie Nov 26, 2023
1ebb5d1
2.6.2 PTSD bug fix bandaging
KillerOfPie Dec 30, 2023
1eb53bd
Merge branch '2.6.2' into detached
KillerOfPie Dec 30, 2023
a07a1fd
2.6.2 PTSD bug fix bandaging
KillerOfPie Dec 30, 2023
0d3f736
PTSD Merger Fixes
KillerOfPie Dec 30, 2023
10ad85f
Fix for Creation values being deleted instead of ignored.
KillerOfPie Dec 30, 2023
54a7f3b
Fix for Shops not loading from file
KillerOfPie Dec 31, 2023
538712e
Potential fix for missing Items after server restart
KillerOfPie Dec 31, 2023
fb45e68
Exception fixes
KillerOfPie Jan 1, 2024
4fce958
Config Comments/FileCaching
KillerOfPie Jan 2, 2024
89f300f
Fixed Shop creation Error
KillerOfPie Jan 18, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Implement thread pool for JsonShopConfiguration saves
  • Loading branch information
SparklingComet committed Oct 8, 2023
commit cfbcc92fabc326fafd8cec0bc6bd5861aa114a21
4 changes: 4 additions & 0 deletions src/main/java/org/shanerx/tradeshop/TradeShop.java
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@
import org.shanerx.tradeshop.data.config.Language;
import org.shanerx.tradeshop.data.config.Setting;
import org.shanerx.tradeshop.data.storage.DataStorage;
import org.shanerx.tradeshop.data.storage.DataType;
import org.shanerx.tradeshop.data.storage.Json.JsonShopConfiguration;
import org.shanerx.tradeshop.player.JoinEventListener;
import org.shanerx.tradeshop.player.Permissions;
import org.shanerx.tradeshop.shop.ShopSign;
Expand Down Expand Up @@ -112,6 +114,8 @@ public void onEnable() {

@Override
public void onDisable() {
varManager.getDataStorage().ensureFinalSave();

if (getListManager() != null)
getListManager().clearManager();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ public enum Setting {

// System Options
DATA_STORAGE_TYPE(SettingSection.SYSTEM_OPTIONS, "data-storage-type", "FLATFILE"),
MAX_SAVE_THREADS(SettingSection.SYSTEM_OPTIONS, "max-save-threads", 1),
ENABLE_DEBUG(SettingSection.SYSTEM_OPTIONS, "enable-debug", 0),
CHECK_UPDATES(SettingSection.SYSTEM_OPTIONS, "check-updates", true),
ALLOW_METRICS(SettingSection.SYSTEM_OPTIONS, "allow-metrics", true),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,6 @@
public class DataStorage extends Utils {

private transient DataType dataType;
public final Map<File, String> saving;
private final String BROKEN_JSON_START = "}(.*[\"\\w:])";

private final Cache<World, LinkageConfiguration> linkCache = CacheBuilder.newBuilder()
Expand All @@ -84,7 +83,6 @@ public class DataStorage extends Utils {
.build();

public DataStorage(DataType dataType) {
saving = new ConcurrentHashMap<>();
reload(dataType);
}

Expand Down Expand Up @@ -131,8 +129,6 @@ public boolean validate() {
}
});

TradeShop.getPlugin().getDebugger().log("FLATFILE Malformed Files Found: " + correctedFiles.size(), DebugLevels.DATA_ERROR);

//Write corrected malformed files
if (correctedFiles.size() > 0) {
correctedFiles.forEach((k, v) -> {
Expand Down Expand Up @@ -314,5 +310,13 @@ protected LinkageConfiguration getLinkageConfiguration(World w) {

throw new NotImplementedException("Data storage type " + dataType + " has not been implemented yet.");
}

public void ensureFinalSave() {
// for onDisable !!!
if (dataType == DataType.FLATFILE) {
JsonShopConfiguration.SaveThreadMaster.getInstance().saveEverythingNow();
}
// SQLITE will have an analogous branch
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -27,21 +27,25 @@

import com.google.common.collect.Sets;
import com.google.gson.JsonElement;
import org.bukkit.Bukkit;
import com.google.gson.JsonObject;
import org.bukkit.scheduler.BukkitRunnable;
import org.shanerx.tradeshop.TradeShop;
import org.shanerx.tradeshop.data.config.Setting;
import org.shanerx.tradeshop.data.storage.ShopConfiguration;
import org.shanerx.tradeshop.shop.Shop;
import org.shanerx.tradeshop.shoplocation.ShopChunk;
import org.shanerx.tradeshop.shoplocation.ShopLocation;
import org.shanerx.tradeshop.utils.debug.DebugLevels;
import org.shanerx.tradeshop.utils.gsonprocessing.GsonProcessor;
import org.shanerx.tradeshop.utils.objects.Tuple;

import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.logging.Level;
import java.util.logging.Logger;

public class JsonShopConfiguration extends JsonConfiguration implements ShopConfiguration {

Expand All @@ -65,12 +69,8 @@ public void save(Shop shop) {

@Override
public void remove(ShopLocation loc) {
System.out.println("PREV: " + gson.toJson(jsonObj));

if (jsonObj.has(loc.serialize()))
jsonObj.remove(loc.serialize());

System.out.println("NOW: " + gson.toJson(jsonObj));
saveFile();
}

Expand Down Expand Up @@ -117,29 +117,7 @@ public int size() {

@Override
protected void saveFile() {
if (PLUGIN.getDataStorage().saving.containsKey(this.file)) {
return;
}

final String str = gson.toJson(jsonObj);
if (str.isEmpty() || jsonObj.entrySet().isEmpty()) {
this.file.delete();
return;
}

PLUGIN.getDataStorage().saving.put(this.file, str);
// Bukkit.getScheduler().runTaskAsynchronously(TradeShop.getPlugin(), () -> {
try {
System.out.println("SAVE: " + str);
FileWriter fileWriter = new FileWriter(this.file);
fileWriter.write(str);
fileWriter.flush();
fileWriter.close();
} catch (IOException e) {
PLUGIN.getLogger().log(Level.SEVERE, "Could not save " + this.file.getName() + " file! Data may be lost!", e);
}
PLUGIN.getDataStorage().saving.remove(this.file);
// });
SaveThreadMaster.getInstance().enqueue(this.file, this.jsonObj);
}

@Override
Expand All @@ -160,4 +138,123 @@ protected void loadFile() {
}
}
}

public static class SaveThreadMaster {

private static SaveThreadMaster singleton;

private Queue<Tuple<File, JsonObject>> saveQueue;
private Set<BukkitRunnable> runningTasks;
private Map<File, SaveTask> filesBeingSaved;

private int maxThreads;
private final GsonProcessor gson = new GsonProcessor();

private SaveThreadMaster() {
if (singleton != null) {
throw new UnsupportedOperationException("Attempting to create further instance of singleton class SaveThreadMaster!");
}

saveQueue = new ConcurrentLinkedQueue<>();
runningTasks = new HashSet<>();
filesBeingSaved = new ConcurrentHashMap<>();
maxThreads = Math.max(0, Setting.MAX_SAVE_THREADS.getInt());

singleton = this;
}

public static SaveThreadMaster getInstance() {
if (singleton == null) {
return new SaveThreadMaster();
}

return singleton;
}

Queue<Tuple<File, JsonObject>> getSaveQueue() {
return saveQueue;
}

private SaveTask makeRunnable() {
return new SaveTask();
}

void enqueue(File file, JsonObject jsonObj) {
if (filesBeingSaved.containsKey(file)) {
SaveTask task = filesBeingSaved.get(file);
task.enqueue(new Tuple<>(file, jsonObj));
} else {
saveQueue.add(new Tuple<>(file, jsonObj));
}

if (maxThreads == 0) {
makeRunnable().run();
if (!saveQueue.isEmpty()) {
throw new IllegalStateException("saveQueue should be empty but has unsaved shop data: " + saveQueue.size());
}
} else if (runningTasks.size() < maxThreads) {
makeRunnable().runTaskAsynchronously(TradeShop.getPlugin());
}
}

public void saveEverythingNow() {
if (saveQueue.isEmpty()) return;
for (int i = 0; i < maxThreads; ++i) {
makeRunnable().runTaskAsynchronously(TradeShop.getPlugin());
}
}
}

static class SaveTask extends BukkitRunnable {
private SaveThreadMaster master;
private Queue<Tuple<File, JsonObject>> ownQueue;

SaveTask() {
this.master = SaveThreadMaster.getInstance();
ownQueue = new ConcurrentLinkedQueue<>();
}

private Tuple<File, JsonObject> pollNext() {
if (!ownQueue.isEmpty()) return ownQueue.poll();
else return master.getSaveQueue().poll();
}

void enqueue(Tuple<File, JsonObject> elem) {
ownQueue.add(elem);
}

@Override
public void run() {
Logger logger = TradeShop.getPlugin().getLogger();
Tuple<File, JsonObject> elem;

while ((elem = pollNext()) != null) {
File file = elem.getLeft();
JsonObject jsonObj = elem.getRight();
String str = master.gson.toJson(jsonObj);

if (str.isEmpty() || jsonObj.entrySet().isEmpty()) {
file.delete();
return;
}

try {
FileWriter fileWriter = new FileWriter(file);
fileWriter.write(str);
fileWriter.flush();
fileWriter.close();
} catch (IOException e) {
logger.log(Level.SEVERE, "Could not save " + file.getName() + " file! Data may be lost!", e);
}
}

// task dies now:
master.filesBeingSaved.remove(this);
}

@Override
public int hashCode() {
return super.getTaskId();
}
}
}