package io.gitlab.jfronny.resclone; import com.google.gson.reflect.TypeToken; import io.gitlab.jfronny.commons.serialize.MalformedDataException; import io.gitlab.jfronny.commons.serialize.Token; import io.gitlab.jfronny.commons.serialize.json.JsonReader; import io.gitlab.jfronny.commons.serialize.json.JsonWriter; import io.gitlab.jfronny.libjf.LibJf; import io.gitlab.jfronny.libjf.config.api.v2.JfCustomConfig; import io.gitlab.jfronny.libjf.config.api.v2.dsl.DSL; import io.gitlab.jfronny.resclone.data.GC_PackMetaUnloaded; import io.gitlab.jfronny.resclone.data.PackMetaUnloaded; import io.gitlab.jfronny.resclone.util.ListAdaptation; import java.io.*; import java.nio.file.Files; import java.nio.file.Path; import java.util.HashSet; import java.util.Set; public class RescloneConfig implements JfCustomConfig { public static Set packs; public static boolean pruneUnused; public static boolean filterPacks; public static boolean logProcessing; private static final String ERR_DUPLICATE = "Unexpected duplicate \"%s\" in Resclone config"; private static final String PACKS = "packs"; private static final String PRUNE_UNUSED = "pruneUnused"; private static final String FILTER_PACKS = "filterPacks"; private static final String LOG_PROCESSING = "logProcessing"; private static void load(Path path) throws IOException { if (!Files.exists(path)) { packs = new HashSet<>(); pruneUnused = true; filterPacks = true; logProcessing = false; write(path); return; } boolean updateRequired = false; try (BufferedReader br = Files.newBufferedReader(path); JsonReader reader = LibJf.LENIENT_TRANSPORT.createReader(br)) { if (reader.peek() == Token.BEGIN_ARRAY) { // Legacy format compatibility packs = ListAdaptation.deserializeSet(reader, GC_PackMetaUnloaded::deserialize); updateRequired = true; } else if (reader.peek() == Token.BEGIN_OBJECT) { // New format reader.beginObject(); Set packs = null; Boolean pruneUnused = null; Boolean filterPacks = null; Boolean logProcessing = null; while (reader.peek() != Token.END_OBJECT) { final String name = reader.nextName(); switch (name) { case PACKS -> { if (packs != null) throw new MalformedDataException(ERR_DUPLICATE.formatted(PACKS)); if (reader.peek() == Token.BEGIN_ARRAY) { packs = ListAdaptation.deserializeSet(reader, GC_PackMetaUnloaded::deserialize); } else { packs = Set.of(GC_PackMetaUnloaded.deserialize(reader)); } } case PRUNE_UNUSED -> { if (pruneUnused != null) throw new MalformedDataException(ERR_DUPLICATE.formatted(PRUNE_UNUSED)); pruneUnused = reader.nextBoolean(); } case FILTER_PACKS -> { if (filterPacks != null) throw new MalformedDataException(ERR_DUPLICATE.formatted(FILTER_PACKS)); filterPacks = reader.nextBoolean(); } case LOG_PROCESSING -> { if (logProcessing != null) throw new MalformedDataException(ERR_DUPLICATE.formatted(LOG_PROCESSING)); logProcessing = reader.nextBoolean(); } default -> throw new MalformedDataException("Unexpected element: \"" + name + "\" in Resclone config"); } } reader.endObject(); if (packs == null) throw new MalformedDataException("Expected Resclone config object to contain packs"); if (pruneUnused == null) { pruneUnused = true; updateRequired = true; } if (filterPacks == null) { filterPacks = true; updateRequired = true; } if (logProcessing == null) { logProcessing = false; updateRequired = true; } RescloneConfig.packs = packs; RescloneConfig.pruneUnused = pruneUnused; RescloneConfig.filterPacks = filterPacks; RescloneConfig.logProcessing = logProcessing; } else throw new MalformedDataException("Expected Resclone config to be an object or array"); } if (updateRequired) write(path); } private static void write(Path path) throws IOException { try (BufferedWriter bw = Files.newBufferedWriter(path); JsonWriter writer = LibJf.LENIENT_TRANSPORT.createWriter(bw)) { writer.beginObject() .comment("The packs to be loaded by resclone") .name(PACKS) .beginArray(); for (PackMetaUnloaded pack : packs) { GC_PackMetaUnloaded.serialize(pack, writer); } writer.endArray() .comment("Automatically remove all downloaded packs that are not in the config to free up unneeded space") .name(PRUNE_UNUSED) .value(pruneUnused) .comment("Whether to filter packs to remove files unchanged from vanilla and empty directories") .name(FILTER_PACKS) .value(filterPacks) .comment("Log automatic processing steps applied to downloaded packs") .name(LOG_PROCESSING) .value(logProcessing) .endObject(); } } static { Path path = Resclone.getConfigPath().resolve("config.json"); DSL.create(Resclone.MOD_ID).register(builder -> builder.setLoadMethod(configInstance -> { try { load(path); } catch (IOException e) { Resclone.LOGGER.error("Could not load config", e); } }).setWriteMethod(configInstance -> { try { write(path); } catch (IOException e) { Resclone.LOGGER.error("Could not write config", e); } }).setPath(path) .>value(PACKS, new HashSet<>(), Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, io.gitlab.jfronny.libjf.config.api.v2.type.Type.ofClass(new TypeToken>(){}.getType()), 100, () -> packs, p -> packs = p) .value(PRUNE_UNUSED, pruneUnused, () -> pruneUnused, p -> pruneUnused = p) .value(FILTER_PACKS, filterPacks, () -> filterPacks, p -> filterPacks = p) .value(LOG_PROCESSING, logProcessing, () -> logProcessing, p -> logProcessing = p) ).load(); } @Override public void register(DSL.Defaulted dsl) { } }