fix: implement basic recursive file dependency tracking for debugLogs
ci/woodpecker/push/docs Pipeline was successful Details
ci/woodpecker/push/jfmod Pipeline was successful Details

This commit is contained in:
Johannes Frohnmeyer 2023-10-05 18:18:04 +02:00
parent 7b5157f474
commit 4943127ecc
Signed by: Johannes
GPG Key ID: E76429612C2929F4
12 changed files with 83 additions and 26 deletions

View File

@ -40,6 +40,7 @@ your pack WILL fail to load.
## Avoid infinite loops ## Avoid infinite loops
Ensure that you do not reference an original file or a previous fallback from a fallback. Ensure that you do not reference an original file or a previous fallback from a fallback.
Respackopts WILL crash if it runs into an infinite loop! Respackopts WILL crash if it runs into an infinite loop!
If you get a `StackOverflowException`, you can enable `debugLogs`, which might help you figure out what is wrong.
## Contact me for support ## Contact me for support
If you are unable to identify the issue, you can try [contacting](https://jfronny.gitlab.io/contact.html) me. If you are unable to identify the issue, you can try [contacting](https://jfronny.gitlab.io/contact.html) me.

View File

@ -1,9 +1,7 @@
package io.gitlab.jfronny.respackopts; package io.gitlab.jfronny.respackopts;
import io.gitlab.jfronny.commons.logging.Logger; import io.gitlab.jfronny.commons.logging.Logger;
import io.gitlab.jfronny.gson.Gson; import io.gitlab.jfronny.gson.*;
import io.gitlab.jfronny.gson.GsonBuilder;
import io.gitlab.jfronny.libjf.config.api.v2.ConfigInstance;
import io.gitlab.jfronny.muscript.ast.*; import io.gitlab.jfronny.muscript.ast.*;
import io.gitlab.jfronny.respackopts.filters.*; import io.gitlab.jfronny.respackopts.filters.*;
import io.gitlab.jfronny.respackopts.gson.*; import io.gitlab.jfronny.respackopts.gson.*;
@ -34,7 +32,7 @@ public class Respackopts implements ModInitializer, SaveHook {
.registerTypeAdapter(StringExpr.class, new StringExprDeserializer()) .registerTypeAdapter(StringExpr.class, new StringExprDeserializer())
.registerTypeAdapter(BoolExpr.class, new BoolExprDeserializer()) .registerTypeAdapter(BoolExpr.class, new BoolExprDeserializer())
.registerTypeAdapter(Condition.class, new ConditionDeserializer()) .registerTypeAdapter(Condition.class, new ConditionDeserializer())
.setLenient() .setStrictness(Strictness.LENIENT)
.setPrettyPrinting() .setPrettyPrinting()
.create(); .create();
@ -53,10 +51,9 @@ public class Respackopts implements ModInitializer, SaveHook {
@Override @Override
public void onInitialize() { public void onInitialize() {
if (RespackoptsConfig.ioLogs) DebugEvents.preInit();
DirFilterEvents.init(); DirFilterEvents.init();
FileFilterEvents.init(); FileFilterEvents.init();
if (RespackoptsConfig.ioLogs) DebugEvents.postInit(); if (RespackoptsConfig.ioLogs) DebugEvents.init();
ServerInstanceHolder.init(); ServerInstanceHolder.init();
} }

View File

@ -6,6 +6,9 @@ import io.gitlab.jfronny.libjf.config.api.v2.dsl.DSL;
import io.gitlab.jfronny.respackopts.util.MetaCache; import io.gitlab.jfronny.respackopts.util.MetaCache;
import net.fabricmc.loader.api.FabricLoader; import net.fabricmc.loader.api.FabricLoader;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
@ -21,13 +24,19 @@ public class RespackoptsConfig implements JfCustomConfig {
@Override @Override
public void register(DSL.Defaulted dsl) { public void register(DSL.Defaulted dsl) {
if (configInstance != null) return; if (configInstance != null) return;
Path dir = FabricLoader.getInstance().getConfigDir().resolve("respackopts");
try {
Files.createDirectories(dir);
} catch (IOException e) {
// Ignore for now
}
configInstance = dsl.register(builder -> builder configInstance = dsl.register(builder -> builder
.value("debugCommands", debugCommands, () -> debugCommands, v -> debugCommands = v) .value("debugCommands", debugCommands, () -> debugCommands, v -> debugCommands = v)
.value("debugLogs", debugLogs, () -> debugLogs, v -> debugLogs = v) .value("debugLogs", debugLogs, () -> debugLogs, v -> debugLogs = v)
.value("ioLogs", ioLogs, () -> ioLogs, v -> ioLogs = v) .value("ioLogs", ioLogs, () -> ioLogs, v -> ioLogs = v)
.value("dashloaderCompat", dashloaderCompat, () -> dashloaderCompat, v -> dashloaderCompat = v) .value("dashloaderCompat", dashloaderCompat, () -> dashloaderCompat, v -> dashloaderCompat = v)
// Not using Respackopts.FALLBACK_CONF_DIR to avoid premature initialization with libjf-unsafe and libjf-config-reflect // Not using Respackopts.FALLBACK_CONF_DIR to avoid premature initialization with libjf-unsafe and libjf-config-reflect
.setPath(FabricLoader.getInstance().getConfigDir().resolve("respackopts").resolve("_respackopts.conf")) .setPath(dir.resolve("_respackopts.conf"))
.referenceConfig(() -> { .referenceConfig(() -> {
if (!packsInitialized) return List.of(); if (!packsInitialized) return List.of();
List<ConfigInstance> instances = new LinkedList<>(); List<ConfigInstance> instances = new LinkedList<>();

View File

@ -14,11 +14,8 @@ import java.util.function.Supplier;
public enum DebugEvents implements UserResourceEvents.FindResource, UserResourceEvents.ParseMetadata, UserResourceEvents.Open, UserResourceEvents.OpenRoot { public enum DebugEvents implements UserResourceEvents.FindResource, UserResourceEvents.ParseMetadata, UserResourceEvents.Open, UserResourceEvents.OpenRoot {
INSTANCE; INSTANCE;
public static void preInit() { public static void init() {
UserResourceEvents.FIND_RESOURCE.register(INSTANCE); UserResourceEvents.FIND_RESOURCE.register(INSTANCE);
}
public static void postInit() {
UserResourceEvents.PARSE_METADATA.register(INSTANCE); UserResourceEvents.PARSE_METADATA.register(INSTANCE);
UserResourceEvents.OPEN.register(INSTANCE); UserResourceEvents.OPEN.register(INSTANCE);
UserResourceEvents.OPEN_ROOT.register(INSTANCE); UserResourceEvents.OPEN_ROOT.register(INSTANCE);

View File

@ -38,7 +38,9 @@ public enum DirFilterEvents implements UserResourceEvents.Open, UserResourceEven
if (result == DirRpoResult.IGNORE) return null; // No fallback if (result == DirRpoResult.IGNORE) return null; // No fallback
// Use fallback // Use fallback
DirRpoResult.Replacement replacement = (DirRpoResult.Replacement) result; DirRpoResult.Replacement replacement = (DirRpoResult.Replacement) result;
return fs.open(replacement.toFallback(path)); String fallback = replacement.toFallback(path);
MetaCache.addDependency(key, path, fallback);
return fs.open(fallback);
} }
@Override @Override
@ -70,6 +72,7 @@ public enum DirFilterEvents implements UserResourceEvents.Open, UserResourceEven
} }
if (!dirFilterAdditive) { if (!dirFilterAdditive) {
// Only return this single result, don't search for others // Only return this single result, don't search for others
MetaCache.addDependency(key, path, newPath);
previous.accept(identifier, fs.open(newPath)); previous.accept(identifier, fs.open(newPath));
return; return;
} }
@ -94,7 +97,9 @@ public enum DirFilterEvents implements UserResourceEvents.Open, UserResourceEven
ResourcePath rp = new ResourcePath(fallbackDir); ResourcePath rp = new ResourcePath(fallbackDir);
pack.findResources(rp.getType(), rp.getId().getNamespace(), rp.getId().getPath(), (resource, resVal) -> { pack.findResources(rp.getType(), rp.getId().getNamespace(), rp.getId().getPath(), (resource, resVal) -> {
String fallbackPath = path(rp.getType(), resource); String fallbackPath = path(rp.getType(), resource);
previous.accept(new ResourcePath(replacement.toOriginal(fallbackPath)).getId(), resVal); String orig = replacement.toOriginal(fallbackPath);
MetaCache.addDependency(key, orig, fallbackPath);
previous.accept(new ResourcePath(orig).getId(), resVal);
}); });
}; };
} }
@ -131,6 +136,7 @@ public enum DirFilterEvents implements UserResourceEvents.Open, UserResourceEven
String rp = path + "/" + Respackopts.FILE_EXTENSION; String rp = path + "/" + Respackopts.FILE_EXTENSION;
InputSupplier<InputStream> is = UserResourceEvents.disable(() -> fs.open(rp)); InputSupplier<InputStream> is = UserResourceEvents.disable(() -> fs.open(rp));
if (is == null) return parentRPOs; if (is == null) return parentRPOs;
if (state.tracker() != null) state.tracker().addDependency(path, rp);
try (Reader w = new InputStreamReader(is.get())) { try (Reader w = new InputStreamReader(is.get())) {
List<DirRpo> currentRPOs = new LinkedList<>(parentRPOs); List<DirRpo> currentRPOs = new LinkedList<>(parentRPOs);
DirRpo newRPO = AttachmentHolder.deserialize(state.metadata().version, w, DirRpo.class); DirRpo newRPO = AttachmentHolder.deserialize(state.metadata().version, w, DirRpo.class);

View File

@ -0,0 +1,31 @@
package io.gitlab.jfronny.respackopts.filters.util;
import io.gitlab.jfronny.respackopts.Respackopts;
import java.util.*;
public class FileDependencyTracker {
private final String pack;
private final Map<String, Set<String>> dependencies = new HashMap<>();
private final Map<String, Set<String>> dependents = new HashMap<>();
private final Set<String> reportedRecursions = new HashSet<>();
public FileDependencyTracker(String pack) {
this.pack = pack;
}
public void addDependency(String to, String on) {
if (to.equals(on)) {
if (reportedRecursions.add(to)) Respackopts.LOGGER.warn("Discovered recursive dependency in " + pack + "! If you get a StackOverflowException, please validate your fallbacks for " + to);
return;
}
gs(dependencies, to).add(on);
gs(dependents, on).add(to);
gs(dependents, to).forEach(dp -> addDependency(dp, on));
gs(dependencies, on).forEach(dp -> addDependency(to, dp));
}
private Set<String> gs(Map<String, Set<String>> map, String key) {
return map.computeIfAbsent(key, _1 -> new HashSet<>());
}
}

View File

@ -3,6 +3,7 @@ package io.gitlab.jfronny.respackopts.filters.util;
import io.gitlab.jfronny.respackopts.Respackopts; import io.gitlab.jfronny.respackopts.Respackopts;
import io.gitlab.jfronny.respackopts.model.cache.CacheKey; import io.gitlab.jfronny.respackopts.model.cache.CacheKey;
import io.gitlab.jfronny.respackopts.muscript.RespackoptsFS; import io.gitlab.jfronny.respackopts.muscript.RespackoptsFS;
import io.gitlab.jfronny.respackopts.util.MetaCache;
import net.minecraft.resource.*; import net.minecraft.resource.*;
import net.minecraft.util.Identifier; import net.minecraft.util.Identifier;
@ -14,6 +15,7 @@ public class FileFallbackProvider {
return FileRpoSearchProvider.modifyWithRpo(file, key, fs, rpo -> { return FileRpoSearchProvider.modifyWithRpo(file, key, fs, rpo -> {
if (rpo.fallbacks != null) { if (rpo.fallbacks != null) {
for (String s : rpo.fallbacks) { for (String s : rpo.fallbacks) {
MetaCache.addDependency(key, file, s);
if (fs.open(s) != null) return true; if (fs.open(s) != null) return true;
} }
} }
@ -26,6 +28,7 @@ public class FileFallbackProvider {
try { try {
if (rpo.fallbacks != null) { if (rpo.fallbacks != null) {
for (String s : rpo.fallbacks) { for (String s : rpo.fallbacks) {
MetaCache.addDependency(key, file, s);
InputSupplier<InputStream> is = fs.open(s); InputSupplier<InputStream> is = fs.open(s);
if (is != null) return is; if (is != null) return is;
} }

View File

@ -25,6 +25,7 @@ public class FileRpoSearchProvider {
if (rpoCache.containsKey(rpoPathS)) return action.run(rpoCache.get(rpoPathS)); if (rpoCache.containsKey(rpoPathS)) return action.run(rpoCache.get(rpoPathS));
InputSupplier<InputStream> is = fs.open(rpoPathS); InputSupplier<InputStream> is = fs.open(rpoPathS);
if (is == null) return defaultValue; if (is == null) return defaultValue;
if (state.tracker() != null) state.tracker().addDependency(fileName, rpoPathS);
try (Reader w = new InputStreamReader(is.get())) { try (Reader w = new InputStreamReader(is.get())) {
FileRpo frp = AttachmentHolder.deserialize(state.metadata().version, w, FileRpo.class); FileRpo frp = AttachmentHolder.deserialize(state.metadata().version, w, FileRpo.class);
frp.hydrate(rpoPathS); frp.hydrate(rpoPathS);

View File

@ -26,16 +26,15 @@ public class BoolExprDeserializer implements JsonDeserializer<BoolExpr> {
JsonObject jo = json.getAsJsonObject(); JsonObject jo = json.getAsJsonObject();
if (jo.size() != 1) if (jo.size() != 1)
throw new JsonParseException("More than one key in a condition object"); throw new JsonParseException("More than one key in a condition object");
for (Map.Entry<String, JsonElement> entry : jo.entrySet()) { Map.Entry<String, JsonElement> entry = jo.entrySet().stream().findFirst().orElseThrow();
return switch (entry.getKey().toLowerCase(Locale.ROOT)) { return switch (entry.getKey().toLowerCase(Locale.ROOT)) {
case "and", "add", "&" -> merge(context.deserialize(entry.getValue(), conditionListType), Token.And); case "and", "add", "&" -> merge(context.deserialize(entry.getValue(), conditionListType), Token.And);
case "==", "=", "equal", "eq" -> merge(context.deserialize(entry.getValue(), conditionListType), Token.EqualEqual); case "==", "=", "equal", "eq" -> merge(context.deserialize(entry.getValue(), conditionListType), Token.EqualEqual);
case "not", "nor", "!" -> new Not(CodeLocation.NONE, merge(context.deserialize(entry.getValue(), conditionListType), Token.Or)); case "not", "nor", "!" -> new Not(CodeLocation.NONE, merge(context.deserialize(entry.getValue(), conditionListType), Token.Or));
case "or", "|" -> merge(context.deserialize(entry.getValue(), conditionListType), Token.Or); case "or", "|" -> merge(context.deserialize(entry.getValue(), conditionListType), Token.Or);
case "^", "xor" -> merge(context.deserialize(entry.getValue(), conditionListType), Token.BangEqual); case "^", "xor" -> merge(context.deserialize(entry.getValue(), conditionListType), Token.BangEqual);
default -> throw new JsonParseException("Unknown condition type: " + entry.getKey()); default -> throw new JsonParseException("Unknown condition type: " + entry.getKey());
}; };
}
} }
else if (json.isJsonArray()) { else if (json.isJsonArray()) {
return merge(context.deserialize(json, conditionListType), Token.And); return merge(context.deserialize(json, conditionListType), Token.And);

View File

@ -31,6 +31,7 @@ public class ExprDeserializer implements JsonDeserializer<Expr<?>> {
} }
} }
else { else {
if (json.isJsonObject()) throw new JsonParseException("Could not parse script: Expected string but got object (did you forget to migrate this rpo to muScript?)");
throw new JsonParseException("Could not parse script: Expected string"); throw new JsonParseException("Could not parse script: Expected string");
} }
} }

View File

@ -2,9 +2,13 @@ package io.gitlab.jfronny.respackopts.model.cache;
import io.gitlab.jfronny.muscript.data.Scope; import io.gitlab.jfronny.muscript.data.Scope;
import io.gitlab.jfronny.muscript.data.Script; import io.gitlab.jfronny.muscript.data.Script;
import io.gitlab.jfronny.respackopts.Respackopts;
import io.gitlab.jfronny.respackopts.RespackoptsConfig;
import io.gitlab.jfronny.respackopts.filters.util.FileDependencyTracker;
import io.gitlab.jfronny.respackopts.model.*; import io.gitlab.jfronny.respackopts.model.*;
import io.gitlab.jfronny.respackopts.model.tree.ConfigBranch; import io.gitlab.jfronny.respackopts.model.tree.ConfigBranch;
import io.gitlab.jfronny.respackopts.muscript.MuScriptScope; import io.gitlab.jfronny.respackopts.muscript.MuScriptScope;
import org.jetbrains.annotations.Nullable;
import java.util.*; import java.util.*;
@ -18,8 +22,9 @@ public record CachedPackState(
Map<String, List<DirRpo>> cachedDirRPOs, // Directory RPOs, from outermost to innermost Map<String, List<DirRpo>> cachedDirRPOs, // Directory RPOs, from outermost to innermost
Map<String, Script> cachedScripts, // Scripts, available via runScript Map<String, Script> cachedScripts, // Scripts, available via runScript
Map<String, String> cachedFiles, // Files, read by readString Map<String, String> cachedFiles, // Files, read by readString
Scope executionScope Scope executionScope,
) { @Nullable FileDependencyTracker tracker
) {
public CachedPackState(CacheKey key, PackMeta meta, ConfigBranch branch) { public CachedPackState(CacheKey key, PackMeta meta, ConfigBranch branch) {
this( this(
meta.id, meta.id,
@ -31,7 +36,8 @@ public record CachedPackState(
new HashMap<>(), new HashMap<>(),
new HashMap<>(), new HashMap<>(),
new HashMap<>(), new HashMap<>(),
branch.addTo(MuScriptScope.fork(meta.version)) branch.addTo(MuScriptScope.fork(meta.version)),
RespackoptsConfig.debugLogs ? new FileDependencyTracker(key.displayName()) : null
); );
} }
} }

View File

@ -4,6 +4,7 @@ import io.gitlab.jfronny.commons.throwable.ThrowingBiConsumer;
import io.gitlab.jfronny.muscript.data.Scope; import io.gitlab.jfronny.muscript.data.Scope;
import io.gitlab.jfronny.respackopts.Respackopts; import io.gitlab.jfronny.respackopts.Respackopts;
import io.gitlab.jfronny.respackopts.RespackoptsConfig; import io.gitlab.jfronny.respackopts.RespackoptsConfig;
import io.gitlab.jfronny.respackopts.filters.util.FileDependencyTracker;
import io.gitlab.jfronny.respackopts.integration.SaveHook; import io.gitlab.jfronny.respackopts.integration.SaveHook;
import io.gitlab.jfronny.respackopts.model.PackMeta; import io.gitlab.jfronny.respackopts.model.PackMeta;
import io.gitlab.jfronny.respackopts.model.cache.CacheKey; import io.gitlab.jfronny.respackopts.model.cache.CacheKey;
@ -181,6 +182,11 @@ public class MetaCache {
return scope; return scope;
} }
public static void addDependency(CacheKey key, String to, String on) {
FileDependencyTracker tracker = getState(key).tracker();
if (tracker != null) tracker.addDependency(to, on);
}
public static boolean hasCapability(ResourcePack pack, PackCapability capability) { public static boolean hasCapability(ResourcePack pack, PackCapability capability) {
CacheKey key = getKeyByPack(pack); CacheKey key = getKeyByPack(pack);
if (key == null) return false; if (key == null) return false;