Parallelize scanPacks hook. Minor performance bump, probably not worth it, but I already invested the time so why not
ci/woodpecker/push/docs Pipeline was successful Details
ci/woodpecker/push/jfmod Pipeline failed Details

This commit is contained in:
Johannes Frohnmeyer 2023-06-27 23:54:55 +02:00
parent 0dad978c11
commit 9f34e576c9
Signed by: Johannes
GPG Key ID: E76429612C2929F4
17 changed files with 326 additions and 87 deletions

View File

@ -5,8 +5,12 @@ plugins {
}
repositories {
maven("https://maven.shedaniel.me/") { name = "Cloth Config" }
maven("https://maven.vram.io/") { name = "FREX/Canvas" }
maven("https://maven.vram.io/") {
name = "FREX/Canvas"
content {
includeGroup("io.vram")
}
}
maven("https://notalpha.dev/maven/releases") {
name = "DashLoader"
content {
@ -40,4 +44,6 @@ dependencies {
// DashLoader "compatibility"
// modClientRuntimeOnly("dev.notalpha:dashloader:5.0.0-beta.1+1.20.0")
// modClientRuntimeOnly("io.gitlab.jfronny:async-pack-scan:0.1.0")
}

View File

@ -1,6 +1,6 @@
# https://fabricmc.net/develop/
minecraft_version=1.20
yarn_mappings=build.1
minecraft_version=1.20.1
yarn_mappings=build.8
loader_version=0.14.21
maven_group=io.gitlab.jfronny
@ -13,6 +13,6 @@ curseforge_id=430090
curseforge_required_dependencies=fabric-api, libjf
curseforge_optional_dependencies=modmenu
fabric_version=0.83.0+1.20
fabric_version=0.84.0+1.20.1
libjf_version=3.8.0
muscript_version=1.3-SNAPSHOT

View File

@ -1,16 +1,19 @@
package io.gitlab.jfronny.respackopts;
import io.gitlab.jfronny.respackopts.integration.*;
import io.gitlab.jfronny.respackopts.integration.DashLoaderCompat;
import io.gitlab.jfronny.respackopts.integration.FrexCompat;
import io.gitlab.jfronny.respackopts.integration.v1.SaveHook;
import io.gitlab.jfronny.respackopts.util.MetaCache;
import io.gitlab.jfronny.respackopts.util.concurrent.ConcurrentScope;
import io.gitlab.jfronny.respackopts.util.concurrent.JoinedFuture;
import net.fabricmc.api.*;
import net.fabricmc.loader.api.FabricLoader;
import net.minecraft.client.MinecraftClient;
import net.minecraft.server.integrated.IntegratedServer;
import net.minecraft.util.Identifier;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.*;
import java.util.concurrent.*;
@Environment(EnvType.CLIENT)
public class RespackoptsClient implements ClientModInitializer, SaveHook {
@ -37,13 +40,31 @@ public class RespackoptsClient implements ClientModInitializer, SaveHook {
}
@Override
public CompletableFuture<Void> onSave(Arguments args) {
public Future<?> onSave(Arguments args) {
List<Future<?>> futures = new ArrayList<>();
if (RespackoptsConfig.debugLogs)
Respackopts.LOGGER.info("Generating shader code");
StringBuilder sb = new StringBuilder();
sb.append("#ifndef respackopts_loaded");
sb.append("\n#define respackopts_loaded");
MetaCache.forEach((key, state) -> state.configBranch().buildShader(sb, Respackopts.sanitizeString(state.packId())));
List<StringBuilder> sbs = new LinkedList<>();
try (ConcurrentScope scope = new ConcurrentScope()) {
MetaCache.forEach((key, state) -> {
StringBuilder inner = new StringBuilder();
sbs.add(inner);
scope.submit(() -> {
state.configBranch().buildShader(inner, Respackopts.sanitizeString(state.packId()));
});
});
} catch (ExecutionException | InterruptedException e) {
Respackopts.LOGGER.error("Could not complete shader", e);
}
for (StringBuilder inner : sbs) {
sb.append(inner);
}
sb.append("\n#endif");
RespackoptsClient.shaderImportSource = sb.toString();
if (FREX_LOADED) {
@ -54,14 +75,13 @@ public class RespackoptsClient implements ClientModInitializer, SaveHook {
forcePackReload = true;
DashLoaderCompat.requestForceReload();
}
List<CompletableFuture<Void>> futures = new ArrayList<>();
if (args.reloadResourcesImmediately()) {
futures.add(forceReloadResources());
}
if (args.reloadData()) {
futures.add(reloadIntegratedServerData());
}
return CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new));
return JoinedFuture.of(futures);
}
public static CompletableFuture<Void> forceReloadResources() {

View File

@ -3,7 +3,7 @@ package io.gitlab.jfronny.respackopts;
import io.gitlab.jfronny.commons.log.Logger;
import io.gitlab.jfronny.gson.Gson;
import io.gitlab.jfronny.gson.GsonBuilder;
import io.gitlab.jfronny.libjf.config.api.v1.ConfigHolder;
import io.gitlab.jfronny.libjf.config.api.v1.ConfigInstance;
import io.gitlab.jfronny.muscript.StandardLib;
import io.gitlab.jfronny.muscript.ast.*;
import io.gitlab.jfronny.muscript.data.Scope;
@ -12,7 +12,7 @@ import io.gitlab.jfronny.respackopts.filters.DirFilterEvents;
import io.gitlab.jfronny.respackopts.filters.FileFilterEvents;
import io.gitlab.jfronny.respackopts.gson.*;
import io.gitlab.jfronny.respackopts.gson.entry.*;
import io.gitlab.jfronny.respackopts.integration.SaveHook;
import io.gitlab.jfronny.respackopts.integration.v1.SaveHook;
import io.gitlab.jfronny.respackopts.model.Condition;
import io.gitlab.jfronny.respackopts.model.tree.*;
import io.gitlab.jfronny.respackopts.server.ServerInstanceHolder;
@ -77,11 +77,14 @@ public class Respackopts implements ModInitializer, SaveHook {
ServerInstanceHolder.init();
}
private static final EnvType ENV_TYPE = FabricLoader.getInstance().getEnvironmentType();
public static ConfigInstance CONFIG;
@Override
public CompletableFuture<Void> onSave(Arguments args) {
ConfigHolder.getInstance().get(ID).write();
CONFIG.write();
if (args.reloadData() && FabricLoader.getInstance().getEnvironmentType() == EnvType.SERVER) {
if (args.reloadData() && ENV_TYPE == EnvType.SERVER) {
ServerInstanceHolder.reloadResources();
}

View File

@ -15,7 +15,8 @@ public class RespackoptsConfig implements JfCustomConfig {
@Override
public void register(DSL.Defaulted dsl) {
dsl.register(builder -> builder
if (Respackopts.CONFIG != null) return;
Respackopts.CONFIG = dsl.register(builder -> builder
.value("debugCommands", debugCommands, () -> debugCommands, v -> debugCommands = v)
.value("debugLogs", debugLogs, () -> debugLogs, v -> debugLogs = v)
.value("dashloaderCompat", dashloaderCompat, () -> dashloaderCompat, v -> dashloaderCompat = v)

View File

@ -77,7 +77,7 @@ public enum DirFilterEvents implements UserResourceEvents.Open, UserResourceEven
try {
return !rpo.condition.get(MetaCache.getScope(key));
} catch (Condition.ConditionException e) {
String res = "Could not evaluate condition " + file + " (pack: " + key.packName() + ")";
String res = "Could not evaluate condition for " + file + " (pack: " + key.packName() + ")";
try {
Respackopts.LOGGER.error(res + " with condition:\n" + ObjectGraphPrinter.printGraph(rpo.condition) + ")", e);
} catch (Throwable ex) {

View File

@ -1,10 +1,17 @@
package io.gitlab.jfronny.respackopts.integration;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Future;
public interface SaveHook {
@Deprecated
public interface SaveHook extends io.gitlab.jfronny.respackopts.integration.v1.SaveHook {
CompletableFuture<Void> onSave(Arguments args);
@Override
default Future<Void> onSave(io.gitlab.jfronny.respackopts.integration.v1.SaveHook.Arguments args) {
return onSave(new Arguments(args.flagResourcesForReload(), args.reloadResourcesImmediately(), args.reloadData()));
}
record Arguments(boolean flagResourcesForReload, boolean reloadResourcesImmediately, boolean reloadData) {
public static final Arguments DO_NOTHING = new Arguments(false, false, false);
public static final Arguments RELOAD_ALL = new Arguments(true, true, true);

View File

@ -0,0 +1,12 @@
package io.gitlab.jfronny.respackopts.integration.v1;
import java.util.concurrent.Future;
public interface SaveHook {
Future<?> onSave(Arguments args);
record Arguments(boolean flagResourcesForReload, boolean reloadResourcesImmediately, boolean reloadData) {
public static final Arguments DO_NOTHING = new Arguments(false, false, false);
public static final Arguments RELOAD_ALL = new Arguments(true, true, true);
}
}

View File

@ -2,14 +2,13 @@ package io.gitlab.jfronny.respackopts.mixin;
import io.gitlab.jfronny.respackopts.Respackopts;
import io.gitlab.jfronny.respackopts.RespackoptsConfig;
import io.gitlab.jfronny.respackopts.integration.SaveHook;
import io.gitlab.jfronny.respackopts.integration.v1.SaveHook;
import io.gitlab.jfronny.respackopts.model.PackMeta;
import io.gitlab.jfronny.respackopts.model.cache.CacheKey;
import io.gitlab.jfronny.respackopts.util.FallbackI18n;
import io.gitlab.jfronny.respackopts.util.MetaCache;
import io.gitlab.jfronny.respackopts.util.*;
import io.gitlab.jfronny.respackopts.util.concurrent.ConcurrentScope;
import net.minecraft.resource.*;
import net.minecraft.util.Identifier;
import net.minecraft.util.Language;
import org.spongepowered.asm.mixin.*;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
@ -18,23 +17,32 @@ import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import java.io.*;
import java.nio.file.Path;
import java.util.*;
import java.util.concurrent.ExecutionException;
@Mixin(ResourcePackManager.class)
public class ResourcePackManagerMixin {
@Shadow private Map<String, ResourcePackProfile> profiles;
@Unique private final Set<Path> dataLocations = new HashSet<>();
@Unique private boolean reloaderRunning;
@Unique private boolean shouldReload;
@Inject(at = @At("TAIL"), method = "scanPacks()V")
private void scanPacks(CallbackInfo info) {
FallbackI18n.clear();
Set<Path> newDataLocations = new HashSet<>();
Set<Path> toRemove = new HashSet<>(dataLocations);
try (ConcurrentScope scope = new ConcurrentScope()) {
profiles.forEach((s, v) -> scope.submit(() -> {
try (ResourcePack rpi = v.createResourcePack()) {
String id = rpo$checkProfile(s, v.getDisplayName().getString(), rpi, newDataLocations, toRemove);
if (id != null) FallbackI18n.loadFrom(rpi, id);
}
}));
} catch (ExecutionException | InterruptedException e) {
Respackopts.LOGGER.error("Could not scan packs", e);
}
dataLocations.clear();
profiles.forEach((s, v) -> {
try (ResourcePack rpi = v.createResourcePack()) {
String id = rpo$checkProfile(s, v.getDisplayName().getString(), rpi, toRemove);
if (id != null) rpo$loadTranslations(rpi, id);
}
});
dataLocations.addAll(newDataLocations);
for (Path s : toRemove) {
CacheKey k = MetaCache.getKeyByDataLocation(s);
if (k != null) MetaCache.remove(k);
@ -42,7 +50,7 @@ public class ResourcePackManagerMixin {
MetaCache.save(SaveHook.Arguments.DO_NOTHING);
}
private String rpo$checkProfile(String profileName, String displayName, ResourcePack rpi, Set<Path> toRemove) {
private String rpo$checkProfile(String profileName, String displayName, ResourcePack rpi, Set<Path> dataLocations, Set<Path> toRemove) {
Path dataLocation = null;
if (rpi instanceof DirectoryResourcePack drp) {
Path pack = ((DirectoryResourcePackAccessor) drp).getRoot();
@ -57,7 +65,7 @@ public class ResourcePackManagerMixin {
var conf = rpi.openRoot(Respackopts.ID + ".json5");
if (conf != null) {
try (InputStream is = conf.get()) {
return rpo$readConfiguration(is, dataLocation, rpi.getName(), displayName, toRemove);
return rpo$readConfiguration(is, dataLocation, rpi.getName(), displayName, dataLocations, toRemove);
} catch (Throwable e) {
String message = "Could not read respackopts config in root for " + profileName;
if (RespackoptsConfig.debugLogs) Respackopts.LOGGER.error(message, e);
@ -70,7 +78,7 @@ public class ResourcePackManagerMixin {
conf = rpi.open(type, confId);
if (conf != null) {
try (InputStream is = conf.get()) {
return rpo$readConfiguration(is, dataLocation, rpi.getName(), displayName, toRemove);
return rpo$readConfiguration(is, dataLocation, rpi.getName(), displayName, dataLocations, toRemove);
} catch (Throwable e) {
Respackopts.LOGGER.error("Could not initialize pack meta for " + profileName, e);
}
@ -80,7 +88,7 @@ public class ResourcePackManagerMixin {
return null;
}
private String rpo$readConfiguration(InputStream is, Path dataLocation, String packName, String displayName, Set<Path> toRemove) throws IOException {
private String rpo$readConfiguration(InputStream is, Path dataLocation, String packName, String displayName, Set<Path> dataLocations, Set<Path> toRemove) throws IOException {
try (InputStreamReader isr = new InputStreamReader(is)) {
PackMeta conf = Respackopts.GSON.fromJson(isr, PackMeta.class);
if (RespackoptsConfig.debugLogs) Respackopts.LOGGER.info("Discovered pack: " + conf.id);
@ -95,19 +103,4 @@ public class ResourcePackManagerMixin {
return conf.id;
}
}
private void rpo$loadTranslations(ResourcePack rpi, String id) {
for (String namespace : rpi.getNamespaces(ResourceType.CLIENT_RESOURCES)) {
var translation = rpi.open(ResourceType.CLIENT_RESOURCES, new Identifier(namespace, "lang/en_us.json"));
if (translation == null) continue;
try (InputStream is = translation.get()) {
Language.load(is, (key, value) -> {
if (key.startsWith("rpo." + id + ".")) {
FallbackI18n.put(key, value);
}
});
} catch (Throwable ignored) {
}
}
}
}

View File

@ -9,7 +9,7 @@ import io.gitlab.jfronny.muscript.data.dynamic.*;
import io.gitlab.jfronny.muscript.data.dynamic.additional.DFinal;
import io.gitlab.jfronny.respackopts.Respackopts;
import io.gitlab.jfronny.respackopts.RespackoptsConfig;
import io.gitlab.jfronny.respackopts.integration.SaveHook;
import io.gitlab.jfronny.respackopts.integration.v1.SaveHook;
import io.gitlab.jfronny.respackopts.model.enums.ConfigSyncMode;
import io.gitlab.jfronny.respackopts.model.enums.PackReloadType;
import io.gitlab.jfronny.respackopts.util.IndentingStringBuilder;

View File

@ -1,5 +1,11 @@
package io.gitlab.jfronny.respackopts.util;
import net.minecraft.resource.ResourcePack;
import net.minecraft.resource.ResourceType;
import net.minecraft.util.Identifier;
import net.minecraft.util.Language;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
@ -10,6 +16,21 @@ public class FallbackI18n {
translations.clear();
}
public static void loadFrom(ResourcePack pack, String packId) {
for (String namespace : pack.getNamespaces(ResourceType.CLIENT_RESOURCES)) {
var translation = pack.open(ResourceType.CLIENT_RESOURCES, new Identifier(namespace, "lang/en_us.json"));
if (translation == null) continue;
try (InputStream is = translation.get()) {
Language.load(is, (key, value) -> {
if (key.startsWith("rpo." + packId + ".")) {
FallbackI18n.put(key, value);
}
});
} catch (Throwable ignored) {
}
}
}
public static void put(String key, String value) {
translations.put(key, value);
}

View File

@ -4,13 +4,15 @@ import io.gitlab.jfronny.commons.throwable.ThrowingBiConsumer;
import io.gitlab.jfronny.muscript.data.Scope;
import io.gitlab.jfronny.respackopts.Respackopts;
import io.gitlab.jfronny.respackopts.RespackoptsConfig;
import io.gitlab.jfronny.respackopts.integration.SaveHook;
import io.gitlab.jfronny.respackopts.integration.v1.SaveHook;
import io.gitlab.jfronny.respackopts.model.PackMeta;
import io.gitlab.jfronny.respackopts.model.cache.CacheKey;
import io.gitlab.jfronny.respackopts.model.cache.CachedPackState;
import io.gitlab.jfronny.respackopts.model.enums.ConfigSyncMode;
import io.gitlab.jfronny.respackopts.model.enums.PackCapability;
import io.gitlab.jfronny.respackopts.model.tree.ConfigBranch;
import io.gitlab.jfronny.respackopts.util.concurrent.ConcurrentScope;
import io.gitlab.jfronny.respackopts.util.concurrent.JoinedFuture;
import net.fabricmc.api.EnvType;
import net.fabricmc.loader.api.FabricLoader;
import net.minecraft.resource.ResourcePack;
@ -20,28 +22,33 @@ import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.*;
public class MetaCache {
private static final Object LOCK = new Object();
private static final Map<CacheKey, CachedPackState> PACK_STATES = new HashMap<>();
private static final Map<String, CacheKey> KEYS_BY_DISPLAY_NAME = new HashMap<>();
private static final Map<String, CacheKey> KEYS_BY_PACK_NAME = new HashMap<>();
private static final Map<Path, CacheKey> KEYS_BY_DATA_LOCATION = new HashMap<>();
public static void remove(CacheKey key) {
CacheKey k;
// Remove the key and ones that share a property
// Example: if an old pack has the same data location but a different name, it should still be removed
if ((k = KEYS_BY_DATA_LOCATION.remove(key.dataLocation())) != null) remove(k);
if ((k = KEYS_BY_PACK_NAME.remove(key.packName())) != null) remove(k);
if ((k = KEYS_BY_DISPLAY_NAME.remove(key.displayName())) != null) remove(k);
PACK_STATES.remove(key);
synchronized (LOCK) {
// Remove the key and ones that share a property
// Example: if an old pack has the same data location but a different name, it should still be removed
if ((k = KEYS_BY_DATA_LOCATION.remove(key.dataLocation())) != null) remove(k);
if ((k = KEYS_BY_PACK_NAME.remove(key.packName())) != null) remove(k);
if ((k = KEYS_BY_DISPLAY_NAME.remove(key.displayName())) != null) remove(k);
PACK_STATES.remove(key);
}
}
public static void clear() {
PACK_STATES.clear();
KEYS_BY_DISPLAY_NAME.clear();
KEYS_BY_PACK_NAME.clear();
KEYS_BY_DATA_LOCATION.clear();
synchronized (LOCK) {
PACK_STATES.clear();
KEYS_BY_DISPLAY_NAME.clear();
KEYS_BY_PACK_NAME.clear();
KEYS_BY_DATA_LOCATION.clear();
}
}
public static void addFromScan(String displayName, String packName, PackMeta meta, Path dataLocation) {
@ -61,11 +68,13 @@ public class MetaCache {
// Register the key and state in the relevant maps
CacheKey key = new CacheKey(displayName, packName, dataLocation);
CachedPackState state = new CachedPackState(key, meta, branch);
remove(key);
PACK_STATES.put(key, state);
KEYS_BY_DISPLAY_NAME.put(key.displayName(), key);
KEYS_BY_PACK_NAME.put(key.packName(), key);
KEYS_BY_DATA_LOCATION.put(key.dataLocation(), key);
synchronized (LOCK) {
remove(key);
PACK_STATES.put(key, state);
KEYS_BY_DISPLAY_NAME.put(key.displayName(), key);
KEYS_BY_PACK_NAME.put(key.packName(), key);
KEYS_BY_DATA_LOCATION.put(key.dataLocation(), key);
}
// Move old configs to the new location
if (!dataLocation.startsWith(Respackopts.FALLBACK_CONF_DIR)) {
Path legacyLocation = Respackopts.FALLBACK_CONF_DIR.resolve(meta.id + ".json");
@ -81,13 +90,18 @@ public class MetaCache {
save(dataLocation, meta.conf);
}
public static CompletableFuture<Void> save(SaveHook.Arguments args) {
public static Future<?> save(SaveHook.Arguments args) {
if (RespackoptsConfig.debugLogs)
Respackopts.LOGGER.info("Saving configs");
for (Map.Entry<CacheKey, CachedPackState> e : PACK_STATES.entrySet()) {
save(e.getKey().dataLocation(), e.getValue().configBranch());
try (ConcurrentScope scope = new ConcurrentScope()) {
for (Map.Entry<CacheKey, CachedPackState> e : PACK_STATES.entrySet()) {
scope.submit(() -> save(e.getKey().dataLocation(), e.getValue().configBranch()));
}
} catch (ExecutionException | InterruptedException e) {
Respackopts.LOGGER.error("Could not save configs", e);
return CompletableFuture.failedFuture(e);
}
List<CompletableFuture<Void>> futures = new ArrayList<>();
Set<Future<?>> futures = new HashSet<>();
for (SaveHook hook : FabricLoader.getInstance().getEntrypoints(Respackopts.ID + ":save_hook", SaveHook.class)) {
futures.add(hook.onSave(args));
}
@ -96,7 +110,7 @@ public class MetaCache {
futures.add(hook.onSave(args));
}
}
return CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new));
return JoinedFuture.of(futures);
}
private static void save(Path dataLocation, ConfigBranch branch) {
@ -114,8 +128,10 @@ public class MetaCache {
Respackopts.LOGGER.info("Loading configs for: " + key);
try (Reader reader = Files.newBufferedReader(key.dataLocation())) {
ConfigBranch b = Respackopts.GSON.fromJson(reader, ConfigBranch.class);
if (PACK_STATES.containsKey(key))
getBranch(key).sync(b, ConfigSyncMode.CONF_LOAD);
synchronized (LOCK) {
if (PACK_STATES.containsKey(key))
getBranch(key).sync(b, ConfigSyncMode.CONF_LOAD);
}
} catch (IOException e) {
Respackopts.LOGGER.error("Failed to load " + key, e);
}
@ -127,15 +143,21 @@ public class MetaCache {
}
public static @Nullable CacheKey getKeyByPack(ResourcePack pack) {
return KEYS_BY_PACK_NAME.get(pack.getName());
synchronized (LOCK) {
return KEYS_BY_PACK_NAME.get(pack.getName());
}
}
public static @Nullable CacheKey getKeyByDisplayName(String displayName) {
return KEYS_BY_DISPLAY_NAME.get(displayName);
synchronized (LOCK) {
return KEYS_BY_DISPLAY_NAME.get(displayName);
}
}
public static @Nullable CacheKey getKeyByDataLocation(Path dataLocation) {
return KEYS_BY_DATA_LOCATION.get(dataLocation);
synchronized (LOCK) {
return KEYS_BY_DATA_LOCATION.get(dataLocation);
}
}
public static ConfigBranch getBranch(CacheKey key) {
@ -147,7 +169,9 @@ public class MetaCache {
}
public static CachedPackState getState(CacheKey key) {
return PACK_STATES.get(key);
synchronized (LOCK) {
return PACK_STATES.get(key);
}
}
public static Scope getScope(@Nullable CacheKey key) {
@ -164,21 +188,25 @@ public class MetaCache {
public static boolean hasCapability(ResourcePack pack, PackCapability capability) {
CacheKey key = getKeyByPack(pack);
if (key == null) return false;
if (!PACK_STATES.containsKey(key)) {
StringBuilder sb = new StringBuilder("Could not get pack with \"");
sb.append(key);
sb.append("\" (available: ");
for (CacheKey path : PACK_STATES.keySet()) {
sb.append(path).append(", ");
synchronized (LOCK) {
if (!PACK_STATES.containsKey(key)) {
StringBuilder sb = new StringBuilder("Could not get pack with \"");
sb.append(key);
sb.append("\" (available: ");
for (CacheKey path : PACK_STATES.keySet()) {
sb.append(path).append(", ");
}
throw new NullPointerException(sb.substring(0, sb.length() - 2) + ')');
}
throw new NullPointerException(sb.substring(0, sb.length() - 2) + ')');
}
return getMeta(key).capabilities.contains(capability);
}
public static <TEx extends Exception> void forEach(ThrowingBiConsumer<CacheKey, CachedPackState, TEx> idAndBranchConsumer) throws TEx {
for (Map.Entry<CacheKey, CachedPackState> entry : PACK_STATES.entrySet()) {
idAndBranchConsumer.accept(entry.getKey(), entry.getValue());
synchronized (LOCK) {
for (Map.Entry<CacheKey, CachedPackState> entry : PACK_STATES.entrySet()) {
idAndBranchConsumer.accept(entry.getKey(), entry.getValue());
}
}
}
}

View File

@ -0,0 +1,30 @@
package io.gitlab.jfronny.respackopts.util.concurrent;
import java.util.*;
import java.util.concurrent.*;
public class ConcurrentScope implements AutoCloseable {
//TODO use loom
public static final ForkJoinPool POOL = ForkJoinPool.commonPool();
private final Set<Future<?>> tasks = new HashSet<>();
private boolean isFuture = false;
public void submit(Runnable task) {
tasks.add(POOL.submit(task));
}
@Override
public void close() throws ExecutionException, InterruptedException {
if (isFuture) return;
for (Future<?> task : tasks) {
task.get();
}
}
public Future<?> toFuture() {
if (isFuture) throw new IllegalStateException("Attempted to convert future concurrent scope to future");
isFuture = true;
return JoinedFuture.of(tasks);
}
}

View File

@ -0,0 +1,14 @@
package io.gitlab.jfronny.respackopts.util.concurrent;
import org.jetbrains.annotations.NotNull;
import java.util.Iterator;
import java.util.concurrent.Future;
public record FutureIterable(Iterable<? extends Future<?>> futures) implements Iterable<Future<Object>> {
@NotNull
@Override
public Iterator<Future<Object>> iterator() {
return new FutureIterator(futures.iterator());
}
}

View File

@ -0,0 +1,16 @@
package io.gitlab.jfronny.respackopts.util.concurrent;
import java.util.Iterator;
import java.util.concurrent.Future;
public record FutureIterator(Iterator<? extends Future<?>> iterator) implements Iterator<Future<Object>> {
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public Future<Object> next() {
return new RawFuture(iterator.next());
}
}

View File

@ -0,0 +1,56 @@
package io.gitlab.jfronny.respackopts.util.concurrent;
import org.jetbrains.annotations.NotNull;
import java.util.*;
import java.util.concurrent.*;
public record JoinedFuture<T>(Iterable<Future<T>> futures) implements Future<Collection<T>> {
public static JoinedFuture<?> of(Iterable<? extends Future<?>> futures) {
// map is needed to satisfy javac
return new JoinedFuture<>(new FutureIterable(futures));
}
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
boolean cancelled = false;
for (Future<T> future : futures) {
cancelled |= future.cancel(mayInterruptIfRunning);
}
return cancelled;
}
@Override
public boolean isCancelled() {
for (Future<T> future : futures) {
if (future.isCancelled()) return true;
}
return false;
}
@Override
public boolean isDone() {
for (Future<T> future : futures) {
if (!future.isDone()) return false;
}
return true;
}
@Override
public Collection<T> get() throws InterruptedException, ExecutionException {
List<T> res = new LinkedList<>();
for (Future<T> future : futures) {
res.add(future.get());
}
return res;
}
@Override
public Collection<T> get(long timeout, @NotNull TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException {
List<T> res = new LinkedList<>();
for (Future<T> future : futures) {
res.add(future.get(timeout, unit));
}
return res;
}
}

View File

@ -0,0 +1,32 @@
package io.gitlab.jfronny.respackopts.util.concurrent;
import org.jetbrains.annotations.NotNull;
import java.util.concurrent.*;
public record RawFuture(Future<?> future) implements Future<Object> {
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return future.cancel(mayInterruptIfRunning);
}
@Override
public boolean isCancelled() {
return future.isCancelled();
}
@Override
public boolean isDone() {
return future.isDone();
}
@Override
public Object get() throws InterruptedException, ExecutionException {
return future.get();
}
@Override
public Object get(long timeout, @NotNull TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException {
return future.get(timeout, unit);
}
}