fix: implement basic recursive file dependency tracking for debugLogs
This commit is contained in:
parent
7b5157f474
commit
4943127ecc
|
@ -40,6 +40,7 @@ your pack WILL fail to load.
|
|||
## Avoid infinite loops
|
||||
Ensure that you do not reference an original file or a previous fallback from a fallback.
|
||||
Respackopts WILL crash if it runs into an infinite loop!
|
||||
If you get a `StackOverflowException`, you can enable `debugLogs`, which might help you figure out what is wrong.
|
||||
|
||||
## Contact me for support
|
||||
If you are unable to identify the issue, you can try [contacting](https://jfronny.gitlab.io/contact.html) me.
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
package io.gitlab.jfronny.respackopts;
|
||||
|
||||
import io.gitlab.jfronny.commons.logging.Logger;
|
||||
import io.gitlab.jfronny.gson.Gson;
|
||||
import io.gitlab.jfronny.gson.GsonBuilder;
|
||||
import io.gitlab.jfronny.libjf.config.api.v2.ConfigInstance;
|
||||
import io.gitlab.jfronny.gson.*;
|
||||
import io.gitlab.jfronny.muscript.ast.*;
|
||||
import io.gitlab.jfronny.respackopts.filters.*;
|
||||
import io.gitlab.jfronny.respackopts.gson.*;
|
||||
|
@ -34,7 +32,7 @@ public class Respackopts implements ModInitializer, SaveHook {
|
|||
.registerTypeAdapter(StringExpr.class, new StringExprDeserializer())
|
||||
.registerTypeAdapter(BoolExpr.class, new BoolExprDeserializer())
|
||||
.registerTypeAdapter(Condition.class, new ConditionDeserializer())
|
||||
.setLenient()
|
||||
.setStrictness(Strictness.LENIENT)
|
||||
.setPrettyPrinting()
|
||||
.create();
|
||||
|
||||
|
@ -53,10 +51,9 @@ public class Respackopts implements ModInitializer, SaveHook {
|
|||
|
||||
@Override
|
||||
public void onInitialize() {
|
||||
if (RespackoptsConfig.ioLogs) DebugEvents.preInit();
|
||||
DirFilterEvents.init();
|
||||
FileFilterEvents.init();
|
||||
if (RespackoptsConfig.ioLogs) DebugEvents.postInit();
|
||||
if (RespackoptsConfig.ioLogs) DebugEvents.init();
|
||||
ServerInstanceHolder.init();
|
||||
}
|
||||
|
||||
|
|
|
@ -6,6 +6,9 @@ import io.gitlab.jfronny.libjf.config.api.v2.dsl.DSL;
|
|||
import io.gitlab.jfronny.respackopts.util.MetaCache;
|
||||
import net.fabricmc.loader.api.FabricLoader;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
|
@ -21,13 +24,19 @@ public class RespackoptsConfig implements JfCustomConfig {
|
|||
@Override
|
||||
public void register(DSL.Defaulted dsl) {
|
||||
if (configInstance != null) return;
|
||||
Path dir = FabricLoader.getInstance().getConfigDir().resolve("respackopts");
|
||||
try {
|
||||
Files.createDirectories(dir);
|
||||
} catch (IOException e) {
|
||||
// Ignore for now
|
||||
}
|
||||
configInstance = dsl.register(builder -> builder
|
||||
.value("debugCommands", debugCommands, () -> debugCommands, v -> debugCommands = v)
|
||||
.value("debugLogs", debugLogs, () -> debugLogs, v -> debugLogs = v)
|
||||
.value("ioLogs", ioLogs, () -> ioLogs, v -> ioLogs = v)
|
||||
.value("dashloaderCompat", dashloaderCompat, () -> dashloaderCompat, v -> dashloaderCompat = v)
|
||||
// Not using Respackopts.FALLBACK_CONF_DIR to avoid premature initialization with libjf-unsafe and libjf-config-reflect
|
||||
.setPath(FabricLoader.getInstance().getConfigDir().resolve("respackopts").resolve("_respackopts.conf"))
|
||||
.setPath(dir.resolve("_respackopts.conf"))
|
||||
.referenceConfig(() -> {
|
||||
if (!packsInitialized) return List.of();
|
||||
List<ConfigInstance> instances = new LinkedList<>();
|
||||
|
|
|
@ -14,11 +14,8 @@ import java.util.function.Supplier;
|
|||
public enum DebugEvents implements UserResourceEvents.FindResource, UserResourceEvents.ParseMetadata, UserResourceEvents.Open, UserResourceEvents.OpenRoot {
|
||||
INSTANCE;
|
||||
|
||||
public static void preInit() {
|
||||
public static void init() {
|
||||
UserResourceEvents.FIND_RESOURCE.register(INSTANCE);
|
||||
}
|
||||
|
||||
public static void postInit() {
|
||||
UserResourceEvents.PARSE_METADATA.register(INSTANCE);
|
||||
UserResourceEvents.OPEN.register(INSTANCE);
|
||||
UserResourceEvents.OPEN_ROOT.register(INSTANCE);
|
||||
|
|
|
@ -38,7 +38,9 @@ public enum DirFilterEvents implements UserResourceEvents.Open, UserResourceEven
|
|||
if (result == DirRpoResult.IGNORE) return null; // No fallback
|
||||
// Use fallback
|
||||
DirRpoResult.Replacement replacement = (DirRpoResult.Replacement) result;
|
||||
return fs.open(replacement.toFallback(path));
|
||||
String fallback = replacement.toFallback(path);
|
||||
MetaCache.addDependency(key, path, fallback);
|
||||
return fs.open(fallback);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -70,6 +72,7 @@ public enum DirFilterEvents implements UserResourceEvents.Open, UserResourceEven
|
|||
}
|
||||
if (!dirFilterAdditive) {
|
||||
// Only return this single result, don't search for others
|
||||
MetaCache.addDependency(key, path, newPath);
|
||||
previous.accept(identifier, fs.open(newPath));
|
||||
return;
|
||||
}
|
||||
|
@ -94,7 +97,9 @@ public enum DirFilterEvents implements UserResourceEvents.Open, UserResourceEven
|
|||
ResourcePath rp = new ResourcePath(fallbackDir);
|
||||
pack.findResources(rp.getType(), rp.getId().getNamespace(), rp.getId().getPath(), (resource, resVal) -> {
|
||||
String fallbackPath = path(rp.getType(), resource);
|
||||
previous.accept(new ResourcePath(replacement.toOriginal(fallbackPath)).getId(), resVal);
|
||||
String orig = replacement.toOriginal(fallbackPath);
|
||||
MetaCache.addDependency(key, orig, fallbackPath);
|
||||
previous.accept(new ResourcePath(orig).getId(), resVal);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
@ -131,6 +136,7 @@ public enum DirFilterEvents implements UserResourceEvents.Open, UserResourceEven
|
|||
String rp = path + "/" + Respackopts.FILE_EXTENSION;
|
||||
InputSupplier<InputStream> is = UserResourceEvents.disable(() -> fs.open(rp));
|
||||
if (is == null) return parentRPOs;
|
||||
if (state.tracker() != null) state.tracker().addDependency(path, rp);
|
||||
try (Reader w = new InputStreamReader(is.get())) {
|
||||
List<DirRpo> currentRPOs = new LinkedList<>(parentRPOs);
|
||||
DirRpo newRPO = AttachmentHolder.deserialize(state.metadata().version, w, DirRpo.class);
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
package io.gitlab.jfronny.respackopts.filters.util;
|
||||
|
||||
import io.gitlab.jfronny.respackopts.Respackopts;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
public class FileDependencyTracker {
|
||||
private final String pack;
|
||||
private final Map<String, Set<String>> dependencies = new HashMap<>();
|
||||
private final Map<String, Set<String>> dependents = new HashMap<>();
|
||||
private final Set<String> reportedRecursions = new HashSet<>();
|
||||
|
||||
public FileDependencyTracker(String pack) {
|
||||
this.pack = pack;
|
||||
}
|
||||
|
||||
public void addDependency(String to, String on) {
|
||||
if (to.equals(on)) {
|
||||
if (reportedRecursions.add(to)) Respackopts.LOGGER.warn("Discovered recursive dependency in " + pack + "! If you get a StackOverflowException, please validate your fallbacks for " + to);
|
||||
return;
|
||||
}
|
||||
gs(dependencies, to).add(on);
|
||||
gs(dependents, on).add(to);
|
||||
gs(dependents, to).forEach(dp -> addDependency(dp, on));
|
||||
gs(dependencies, on).forEach(dp -> addDependency(to, dp));
|
||||
}
|
||||
|
||||
private Set<String> gs(Map<String, Set<String>> map, String key) {
|
||||
return map.computeIfAbsent(key, _1 -> new HashSet<>());
|
||||
}
|
||||
}
|
|
@ -3,6 +3,7 @@ package io.gitlab.jfronny.respackopts.filters.util;
|
|||
import io.gitlab.jfronny.respackopts.Respackopts;
|
||||
import io.gitlab.jfronny.respackopts.model.cache.CacheKey;
|
||||
import io.gitlab.jfronny.respackopts.muscript.RespackoptsFS;
|
||||
import io.gitlab.jfronny.respackopts.util.MetaCache;
|
||||
import net.minecraft.resource.*;
|
||||
import net.minecraft.util.Identifier;
|
||||
|
||||
|
@ -14,6 +15,7 @@ public class FileFallbackProvider {
|
|||
return FileRpoSearchProvider.modifyWithRpo(file, key, fs, rpo -> {
|
||||
if (rpo.fallbacks != null) {
|
||||
for (String s : rpo.fallbacks) {
|
||||
MetaCache.addDependency(key, file, s);
|
||||
if (fs.open(s) != null) return true;
|
||||
}
|
||||
}
|
||||
|
@ -26,6 +28,7 @@ public class FileFallbackProvider {
|
|||
try {
|
||||
if (rpo.fallbacks != null) {
|
||||
for (String s : rpo.fallbacks) {
|
||||
MetaCache.addDependency(key, file, s);
|
||||
InputSupplier<InputStream> is = fs.open(s);
|
||||
if (is != null) return is;
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ public class FileRpoSearchProvider {
|
|||
if (rpoCache.containsKey(rpoPathS)) return action.run(rpoCache.get(rpoPathS));
|
||||
InputSupplier<InputStream> is = fs.open(rpoPathS);
|
||||
if (is == null) return defaultValue;
|
||||
if (state.tracker() != null) state.tracker().addDependency(fileName, rpoPathS);
|
||||
try (Reader w = new InputStreamReader(is.get())) {
|
||||
FileRpo frp = AttachmentHolder.deserialize(state.metadata().version, w, FileRpo.class);
|
||||
frp.hydrate(rpoPathS);
|
||||
|
|
|
@ -26,16 +26,15 @@ public class BoolExprDeserializer implements JsonDeserializer<BoolExpr> {
|
|||
JsonObject jo = json.getAsJsonObject();
|
||||
if (jo.size() != 1)
|
||||
throw new JsonParseException("More than one key in a condition object");
|
||||
for (Map.Entry<String, JsonElement> entry : jo.entrySet()) {
|
||||
return switch (entry.getKey().toLowerCase(Locale.ROOT)) {
|
||||
case "and", "add", "&" -> merge(context.deserialize(entry.getValue(), conditionListType), Token.And);
|
||||
case "==", "=", "equal", "eq" -> merge(context.deserialize(entry.getValue(), conditionListType), Token.EqualEqual);
|
||||
case "not", "nor", "!" -> new Not(CodeLocation.NONE, merge(context.deserialize(entry.getValue(), conditionListType), Token.Or));
|
||||
case "or", "|" -> merge(context.deserialize(entry.getValue(), conditionListType), Token.Or);
|
||||
case "^", "xor" -> merge(context.deserialize(entry.getValue(), conditionListType), Token.BangEqual);
|
||||
default -> throw new JsonParseException("Unknown condition type: " + entry.getKey());
|
||||
};
|
||||
}
|
||||
Map.Entry<String, JsonElement> entry = jo.entrySet().stream().findFirst().orElseThrow();
|
||||
return switch (entry.getKey().toLowerCase(Locale.ROOT)) {
|
||||
case "and", "add", "&" -> merge(context.deserialize(entry.getValue(), conditionListType), Token.And);
|
||||
case "==", "=", "equal", "eq" -> merge(context.deserialize(entry.getValue(), conditionListType), Token.EqualEqual);
|
||||
case "not", "nor", "!" -> new Not(CodeLocation.NONE, merge(context.deserialize(entry.getValue(), conditionListType), Token.Or));
|
||||
case "or", "|" -> merge(context.deserialize(entry.getValue(), conditionListType), Token.Or);
|
||||
case "^", "xor" -> merge(context.deserialize(entry.getValue(), conditionListType), Token.BangEqual);
|
||||
default -> throw new JsonParseException("Unknown condition type: " + entry.getKey());
|
||||
};
|
||||
}
|
||||
else if (json.isJsonArray()) {
|
||||
return merge(context.deserialize(json, conditionListType), Token.And);
|
||||
|
|
|
@ -31,6 +31,7 @@ public class ExprDeserializer implements JsonDeserializer<Expr<?>> {
|
|||
}
|
||||
}
|
||||
else {
|
||||
if (json.isJsonObject()) throw new JsonParseException("Could not parse script: Expected string but got object (did you forget to migrate this rpo to muScript?)");
|
||||
throw new JsonParseException("Could not parse script: Expected string");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,9 +2,13 @@ package io.gitlab.jfronny.respackopts.model.cache;
|
|||
|
||||
import io.gitlab.jfronny.muscript.data.Scope;
|
||||
import io.gitlab.jfronny.muscript.data.Script;
|
||||
import io.gitlab.jfronny.respackopts.Respackopts;
|
||||
import io.gitlab.jfronny.respackopts.RespackoptsConfig;
|
||||
import io.gitlab.jfronny.respackopts.filters.util.FileDependencyTracker;
|
||||
import io.gitlab.jfronny.respackopts.model.*;
|
||||
import io.gitlab.jfronny.respackopts.model.tree.ConfigBranch;
|
||||
import io.gitlab.jfronny.respackopts.muscript.MuScriptScope;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
|
@ -18,8 +22,9 @@ public record CachedPackState(
|
|||
Map<String, List<DirRpo>> cachedDirRPOs, // Directory RPOs, from outermost to innermost
|
||||
Map<String, Script> cachedScripts, // Scripts, available via runScript
|
||||
Map<String, String> cachedFiles, // Files, read by readString
|
||||
Scope executionScope
|
||||
) {
|
||||
Scope executionScope,
|
||||
@Nullable FileDependencyTracker tracker
|
||||
) {
|
||||
public CachedPackState(CacheKey key, PackMeta meta, ConfigBranch branch) {
|
||||
this(
|
||||
meta.id,
|
||||
|
@ -31,7 +36,8 @@ public record CachedPackState(
|
|||
new HashMap<>(),
|
||||
new HashMap<>(),
|
||||
new HashMap<>(),
|
||||
branch.addTo(MuScriptScope.fork(meta.version))
|
||||
branch.addTo(MuScriptScope.fork(meta.version)),
|
||||
RespackoptsConfig.debugLogs ? new FileDependencyTracker(key.displayName()) : null
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ import io.gitlab.jfronny.commons.throwable.ThrowingBiConsumer;
|
|||
import io.gitlab.jfronny.muscript.data.Scope;
|
||||
import io.gitlab.jfronny.respackopts.Respackopts;
|
||||
import io.gitlab.jfronny.respackopts.RespackoptsConfig;
|
||||
import io.gitlab.jfronny.respackopts.filters.util.FileDependencyTracker;
|
||||
import io.gitlab.jfronny.respackopts.integration.SaveHook;
|
||||
import io.gitlab.jfronny.respackopts.model.PackMeta;
|
||||
import io.gitlab.jfronny.respackopts.model.cache.CacheKey;
|
||||
|
@ -181,6 +182,11 @@ public class MetaCache {
|
|||
return scope;
|
||||
}
|
||||
|
||||
public static void addDependency(CacheKey key, String to, String on) {
|
||||
FileDependencyTracker tracker = getState(key).tracker();
|
||||
if (tracker != null) tracker.addDependency(to, on);
|
||||
}
|
||||
|
||||
public static boolean hasCapability(ResourcePack pack, PackCapability capability) {
|
||||
CacheKey key = getKeyByPack(pack);
|
||||
if (key == null) return false;
|
||||
|
|
Loading…
Reference in New Issue