package io.gitlab.jfronny.resclone; import io.gitlab.jfronny.commons.log.Logger; import io.gitlab.jfronny.commons.serialize.gson.api.v1.GsonHolders; import io.gitlab.jfronny.resclone.data.PackMetaLoaded; import io.gitlab.jfronny.resclone.data.PackMetaUnloaded; import io.gitlab.jfronny.resclone.fetchers.*; import io.gitlab.jfronny.resclone.processors.*; import io.gitlab.jfronny.resclone.util.PackUrlCache; import net.fabricmc.api.EnvType; import net.fabricmc.api.ModInitializer; import net.fabricmc.loader.api.FabricLoader; import java.io.IOException; import java.net.URI; import java.nio.file.*; import java.util.*; import java.util.concurrent.*; import java.util.stream.Collectors; import java.util.stream.Stream; public class Resclone implements ModInitializer { public static final Map FETCHER_INSTANCES = new LinkedHashMap<>(); public static final Set PROCESSORS = new LinkedHashSet<>(); public static final Set DOWNLOADED_PACKS = new LinkedHashSet<>(); public static final Set NEW_PACKS = new LinkedHashSet<>(); // Client-only! public static final String MOD_ID = "resclone"; public static final Logger LOGGER = Logger.forName(MOD_ID); public static final String USER_AGENT = "jfmods/" + MOD_ID + "/" + FabricLoader.getInstance() .getModContainer(MOD_ID).orElseThrow() .getMetadata() .getVersion() .getFriendlyString(); public static PackUrlCache urlCache; public static int packCount = 0; @Override public void onInitialize() { LOGGER.info("Initialising Resclone."); GsonHolders.registerSerializer(); urlCache = new PackUrlCache(getConfigPath().resolve("urlCache.properties")); FETCHER_INSTANCES.clear(); PROCESSORS.clear(); DOWNLOADED_PACKS.clear(); addProcessor(new RootPathProcessor()); //This should be run before any other processor to make sure the path is valid addFetcher(new BasicFileFetcher()); addFetcher(new GitHubFetcher()); addFetcher(new CurseforgeFetcher()); addFetcher(new ModrinthFetcher()); if (FabricLoader.getInstance().getEnvironmentType() == EnvType.CLIENT) addProcessor(new PruneVanillaProcessor()); addProcessor(new RemoveEmptyProcessor()); reload(); LOGGER.info("Installed {} resource pack{}.", packCount, packCount == 1 ? "" : "s"); } public void addFetcher(PackFetcher fetcher) { FETCHER_INSTANCES.put(fetcher.getSourceTypeName(), fetcher); } public void addProcessor(PackProcessor processor) { PROCESSORS.add(processor); } public void addPack(String fetcher, String pack, String name) { addPack(fetcher, pack, name, false); } public void addPack(String fetcher, String pack, String name, boolean forceRedownload) { addPack(fetcher, pack, name, forceRedownload, false); } public void addPack(String fetcher, String pack, String name, boolean forceRedownload, boolean forceEnable) { RescloneConfig.packs.add(new PackMetaUnloaded(fetcher, pack, name, forceRedownload, forceEnable)); } public void reload() { Set metas = new LinkedHashSet<>(); if (RescloneConfig.packs.isEmpty()) { LOGGER.info("No resclone pack was specified, add one"); } else { try { ExecutorService pool = Executors.newFixedThreadPool(RescloneConfig.packs.size()); for (PackMetaUnloaded s : RescloneConfig.packs) { pool.submit(generateTask(s, metas)); } pool.shutdown(); if (!pool.awaitTermination(Long.MAX_VALUE, TimeUnit.MILLISECONDS)) { LOGGER.error("Download timed out. This shouldn't be possible"); } } catch (InterruptedException e) { LOGGER.error("Could not execute pack download task", e); } } urlCache.save(); DOWNLOADED_PACKS.clear(); DOWNLOADED_PACKS.addAll(metas); if (RescloneConfig.pruneUnused) pruneCache(); } private Runnable generateTask(PackMetaUnloaded meta, Set metas) { return () -> { try { if (!FETCHER_INSTANCES.containsKey(meta.fetcher)) throw new Exception("Invalid fetcher: " + meta.fetcher); Path cacheDir = getConfigPath().resolve("cache"); PackMetaLoaded p; try { boolean isNew = !urlCache.containsKey(meta.source); //Download PackFetcher.Result fr = FETCHER_INSTANCES.get(meta.fetcher).get(meta.source, cacheDir, meta.forceDownload); p = new PackMetaLoaded(fr.downloadPath(), meta.name, meta.forceEnable); metas.add(p); if (isNew && FabricLoader.getInstance().getEnvironmentType() == EnvType.CLIENT) NEW_PACKS.add(p); if (fr.freshDownload()) { //Process Map props = new HashMap<>(); props.put("create", "false"); URI zipfile = URI.create("jar:" + p.zipPath().toUri()); try (FileSystem zipfs = FileSystems.newFileSystem(zipfile, props)) { for (PackProcessor processor : PROCESSORS) { processor.process(zipfs); } } catch (Throwable e) { LOGGER.error("Could not run pack processors on " + p.zipPath(), e); } } } catch (Throwable e) { throw new Exception("Failed to download pack", e); } } catch (Throwable e) { LOGGER.error("Encountered issue while preparing " + meta.name, e); } }; } private void pruneCache() { Set loadedPacks = DOWNLOADED_PACKS.stream().map(PackMetaLoaded::zipPath).collect(Collectors.toUnmodifiableSet()); Set toDelete = new HashSet<>(); try (Stream cacheEntries = Files.list(getConfigPath().resolve("cache"))) { cacheEntries .filter(s -> !Files.isRegularFile(s) || !loadedPacks.contains(s)) .forEach(toDelete::add); } catch (IOException e) { LOGGER.error("Could find cache entries to prune", e); } if (!toDelete.isEmpty()) { LOGGER.info("Pruning " + toDelete.size() + " unused cache entries"); for (Path path : toDelete) { try { Files.delete(path); } catch (IOException e) { LOGGER.error("Could not delete unused cache entry: " + path, e); } } } } public static Path getConfigPath() { Path configPath = FabricLoader.getInstance().getConfigDir().resolve(MOD_ID); if (!Files.isDirectory(configPath.resolve("cache"))) { try { Files.createDirectories(configPath.resolve("cache")); } catch (IOException e) { LOGGER.error("Could not create cache directory", e); } } return configPath; } }