Respackopts/src/main/java/io/gitlab/jfronny/respackopts/util/MetaCache.java

216 lines
9.1 KiB
Java

package io.gitlab.jfronny.respackopts.util;
import io.gitlab.jfronny.commons.throwable.ThrowingBiConsumer;
import io.gitlab.jfronny.libjf.LibJf;
import io.gitlab.jfronny.muscript.data.additional.context.Scope;
import io.gitlab.jfronny.respackopts.Respackopts;
import io.gitlab.jfronny.respackopts.RespackoptsConfig;
import io.gitlab.jfronny.respackopts.filters.util.FileDependencyTracker;
import io.gitlab.jfronny.respackopts.integration.SaveHook;
import io.gitlab.jfronny.respackopts.model.PackMeta;
import io.gitlab.jfronny.respackopts.model.cache.CacheKey;
import io.gitlab.jfronny.respackopts.model.cache.CachedPackState;
import io.gitlab.jfronny.respackopts.model.enums.ConfigSyncMode;
import io.gitlab.jfronny.respackopts.model.enums.PackCapability;
import io.gitlab.jfronny.respackopts.model.tree.ConfigBranch;
import io.gitlab.jfronny.respackopts.model.tree.GC_ConfigBranch;
import io.gitlab.jfronny.respackopts.muscript.MuScriptScope;
import io.gitlab.jfronny.respackopts.muscript.RespackoptsFS;
import net.fabricmc.api.EnvType;
import net.fabricmc.loader.api.FabricLoader;
import net.minecraft.resource.ResourcePack;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
import java.util.concurrent.CompletableFuture;
public class MetaCache {
private static final Map<CacheKey, CachedPackState> PACK_STATES = new HashMap<>();
private static final Map<String, CacheKey> KEYS_BY_DISPLAY_NAME = new HashMap<>();
private static final Map<String, CacheKey> KEYS_BY_PACK_NAME = new HashMap<>();
private static final Map<Path, CacheKey> KEYS_BY_DATA_LOCATION = new HashMap<>();
public static void remove(CacheKey key) {
CacheKey k;
// Remove the key and ones that share a property
// Example: if an old pack has the same data location but a different name, it should still be removed
synchronized (PACK_STATES) {
if ((k = KEYS_BY_DATA_LOCATION.remove(key.dataLocation())) != null) remove(k);
if ((k = KEYS_BY_PACK_NAME.remove(key.packName())) != null) remove(k);
if ((k = KEYS_BY_DISPLAY_NAME.remove(key.displayName())) != null) remove(k);
PACK_STATES.remove(key);
}
}
public static void clear() {
synchronized (PACK_STATES) {
PACK_STATES.clear();
KEYS_BY_DISPLAY_NAME.clear();
KEYS_BY_PACK_NAME.clear();
KEYS_BY_DATA_LOCATION.clear();
}
}
public static void addFromScan(String displayName, String packName, PackMeta meta, Path dataLocation) {
if (Respackopts.META_VERSION > meta.version) {
Respackopts.LOGGER.warn(displayName + " uses an outdated RPO format (" + meta.version + "). Although this is supported, using the latest version (" + Respackopts.META_VERSION + ") is recommended");
}
meta.conf.setVersion(meta.version);
if (meta.version < 5) meta.capabilities.add(PackCapability.DirFilter);
// Reuse the existing branch with a RESPACK_LOAD sync if present
ConfigBranch branch;
if (KEYS_BY_DATA_LOCATION.containsKey(dataLocation)) {
branch = getState(KEYS_BY_DATA_LOCATION.get(dataLocation)).configBranch();
branch.sync(meta.conf, ConfigSyncMode.RESPACK_LOAD);
} else {
branch = meta.conf.clone();
}
// Register the key and state in the relevant maps
CacheKey key = new CacheKey(displayName, packName, dataLocation);
CachedPackState state = new CachedPackState(key, meta, branch);
remove(key);
synchronized (PACK_STATES) {
PACK_STATES.put(key, state);
KEYS_BY_DISPLAY_NAME.put(key.displayName(), key);
KEYS_BY_PACK_NAME.put(key.packName(), key);
KEYS_BY_DATA_LOCATION.put(key.dataLocation(), key);
}
// Move old configs to the new location
if (!dataLocation.startsWith(Respackopts.FALLBACK_CONF_DIR)) {
Path legacyLocation = Respackopts.FALLBACK_CONF_DIR.resolve(meta.id + ".json");
if (Files.exists(legacyLocation) && !Files.exists(dataLocation)) {
try {
Files.move(legacyLocation, dataLocation);
} catch (IOException e) {
Respackopts.LOGGER.error("Could not move data to new location", e);
}
}
}
load(key);
save(dataLocation, meta.conf);
}
public static CompletableFuture<Void> save(SaveHook.Arguments args) {
if (RespackoptsConfig.debugLogs)
Respackopts.LOGGER.info("Saving configs");
synchronized (PACK_STATES) {
for (Map.Entry<CacheKey, CachedPackState> e : PACK_STATES.entrySet()) {
save(e.getKey().dataLocation(), e.getValue().configBranch());
}
}
List<CompletableFuture<Void>> futures = new ArrayList<>();
for (SaveHook hook : FabricLoader.getInstance().getEntrypoints(Respackopts.ID + ":save_hook", SaveHook.class)) {
futures.add(hook.onSave(args));
}
if (FabricLoader.getInstance().getEnvironmentType() == EnvType.CLIENT) {
for (SaveHook hook : FabricLoader.getInstance().getEntrypoints(Respackopts.ID + ":client_save_hook", SaveHook.class)) {
futures.add(hook.onSave(args));
}
}
return CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new));
}
private static void save(Path dataLocation, ConfigBranch branch) {
try (Writer writer = Files.newBufferedWriter(dataLocation)) {
GC_ConfigBranch.serialize(branch, writer, LibJf.LENIENT_TRANSPORT);
} catch (IOException ex) {
Respackopts.LOGGER.error("Could not save config", ex);
}
}
public static void load(CacheKey key) {
if (Files.exists(key.dataLocation())) {
if (RespackoptsConfig.debugLogs)
Respackopts.LOGGER.info("Loading configs for: " + key.displayName());
try (Reader reader = Files.newBufferedReader(key.dataLocation())) {
ConfigBranch b = GC_ConfigBranch.deserialize(reader, LibJf.LENIENT_TRANSPORT);
if (PACK_STATES.containsKey(key))
getBranch(key).sync(b, ConfigSyncMode.CONF_LOAD);
} catch (IOException e) {
Respackopts.LOGGER.error("Failed to load " + key.displayName(), e);
}
}
}
public static PackMeta getMeta(CacheKey key) {
return getState(key).metadata();
}
public static @Nullable CacheKey getKeyByPack(ResourcePack pack) {
return KEYS_BY_PACK_NAME.get(pack.getId());
}
public static @Nullable CacheKey getKeyByDisplayName(String displayName) {
return KEYS_BY_DISPLAY_NAME.get(displayName);
}
public static @Nullable CacheKey getKeyByDataLocation(Path dataLocation) {
return KEYS_BY_DATA_LOCATION.get(dataLocation);
}
public static ConfigBranch getBranch(CacheKey key) {
return getState(key).configBranch();
}
public static String getId(CacheKey key) {
return getState(key).packId();
}
public static CachedPackState getState(CacheKey key) {
return PACK_STATES.get(key);
}
public static Scope getScope(int version) {
Scope scope = MuScriptScope.fork(version);
return populate(scope);
}
public static Scope getScope(@NotNull CacheKey key, RespackoptsFS fs) {
CachedPackState state = MetaCache.getState(key);
Scope scope = state.executionScope().fork();
return populate(MuScriptScope.configureFS(scope, state, fs));
}
private static Scope populate(Scope scope) {
forEach((id, state) -> {
if (!scope.has(state.packId())) {
scope.set(state.packId(), state.configBranch());
}
});
return scope;
}
public static void addDependency(CacheKey key, String to, String on) {
FileDependencyTracker tracker = getState(key).tracker();
if (tracker != null) tracker.addDependency(to, on);
}
public static boolean hasCapability(ResourcePack pack, PackCapability capability) {
CacheKey key = getKeyByPack(pack);
if (key == null) return false;
if (!PACK_STATES.containsKey(key)) {
StringBuilder sb = new StringBuilder("Could not get pack with \"");
sb.append(key);
sb.append("\" (available: ");
synchronized (PACK_STATES) {
for (CacheKey path : PACK_STATES.keySet()) {
sb.append(path).append(", ");
}
}
throw new NullPointerException(sb.substring(0, sb.length() - 2) + ')');
}
return getMeta(key).capabilities.contains(capability);
}
public static <TEx extends Exception> void forEach(ThrowingBiConsumer<CacheKey, CachedPackState, TEx> idAndBranchConsumer) throws TEx {
synchronized (PACK_STATES) {
for (Map.Entry<CacheKey, CachedPackState> entry : PACK_STATES.entrySet().stream().toList()) {
idAndBranchConsumer.accept(entry.getKey(), entry.getValue());
}
}
}
}