Resclone/src/main/java/io/gitlab/jfronny/resclone/Resclone.java

169 lines
6.4 KiB
Java

package io.gitlab.jfronny.resclone;
import com.google.gson.Gson;
import io.gitlab.jfronny.resclone.api.PackFetcher;
import io.gitlab.jfronny.resclone.api.RescloneApi;
import io.gitlab.jfronny.resclone.api.RescloneEntry;
import io.gitlab.jfronny.resclone.data.PackMetaLoaded;
import io.gitlab.jfronny.resclone.data.PackMetaUnloaded;
import io.gitlab.jfronny.resclone.api.PackProcessor;
import io.gitlab.jfronny.resclone.processors.RemoveEmptyProcessor;
import io.gitlab.jfronny.resclone.processors.RootPathProcessor;
import io.gitlab.jfronny.resclone.util.PackUrlCache;
import io.gitlab.jfronny.resclone.util.Result;
import net.fabricmc.api.ModInitializer;
import net.fabricmc.loader.api.FabricLoader;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.IOException;
import java.net.URI;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
public class Resclone implements ModInitializer, RescloneApi {
public static final Set<PackMetaUnloaded> conf = new LinkedHashSet<>();
public static final Map<String, PackFetcher> fetcherInstances = new LinkedHashMap<>();
public static final Set<PackProcessor> processors = new LinkedHashSet<>();
public static final Set<PackMetaLoaded> downloadedPacks = new LinkedHashSet<>();
public static final Set<PackMetaLoaded> newPacks = new LinkedHashSet<>();
public static final Gson gson = new Gson();
public static final String MOD_ID = "resclone";
public static final Logger LOGGER = LogManager.getLogger(MOD_ID);
public static PackUrlCache urlCache;
public static int COUNT = 0;
@Override
public void onInitialize() {
LOGGER.info("Initialising Resclone.");
urlCache = new PackUrlCache(getConfigPath().resolve("urlCache.properties"));
conf.clear();
fetcherInstances.clear();
processors.clear();
downloadedPacks.clear();
addProcessor(new RootPathProcessor()); //This should be run before any other processor to make sure the path is valid
for (RescloneEntry entry : FabricLoader.getInstance().getEntrypoints(MOD_ID, RescloneEntry.class)) {
try {
entry.init(this);
} catch (Exception e) {
e.printStackTrace();
}
}
addProcessor(new RemoveEmptyProcessor());
reload();
LOGGER.info("Installed {} resource pack{}.", COUNT, COUNT == 1 ? "" : "s");
}
@Override
public void addFetcher(PackFetcher fetcher) {
fetcherInstances.put(fetcher.getSourceTypeName(), fetcher);
}
@Override
public void addProcessor(PackProcessor processor) {
processors.add(processor);
}
@Override
public void addPack(String fetcher, String pack, String name) {
addPack(fetcher, pack, name, false);
}
@Override
public void addPack(String fetcher, String pack, String name, boolean forceRedownload) {
addPack(fetcher, pack, name, forceRedownload, false);
}
@Override
public void addPack(String fetcher, String pack, String name, boolean forceRedownload, boolean forceEnable) {
conf.add(new PackMetaUnloaded(fetcher, pack, name, forceRedownload, forceEnable));
}
@Override
public void reload() {
Set<PackMetaLoaded> metas = new LinkedHashSet<>();
try {
if (conf.isEmpty()) {
LOGGER.info("No resclone pack was specified, add one");
}
else {
ExecutorService pool = Executors.newFixedThreadPool(conf.size());
for (PackMetaUnloaded s : conf) {
pool.submit(generateTask(s, metas));
}
pool.shutdown();
if (!pool.awaitTermination(Long.MAX_VALUE, TimeUnit.MILLISECONDS)) {
LOGGER.error("Download timed out. This shouldn't be possible");
}
}
} catch (InterruptedException e) {
e.printStackTrace();
}
urlCache.save();
downloadedPacks.clear();
downloadedPacks.addAll(metas);
}
private Runnable generateTask(PackMetaUnloaded meta, Set<PackMetaLoaded> metas) {
return () -> {
try {
if (!fetcherInstances.containsKey(meta.fetcher))
throw new Exception("Invalid fetcher: " + meta.fetcher);
Path cacheDir = getConfigPath().resolve("cache");
PackMetaLoaded p;
try {
boolean isNew = !urlCache.containsKey(meta.source);
//Download
Result fr = fetcherInstances.get(meta.fetcher).get(meta.source, cacheDir, meta.forceDownload);
p = new PackMetaLoaded(fr.downloadPath, meta.name, meta.forceEnable);
metas.add(p);
if (isNew)
newPacks.add(p);
if (fr.freshDownload) {
//Process
Map<String, String> props = new HashMap<>();
props.put("create", "false");
URI zipfile = URI.create("jar:" + p.zipPath.toUri());
try (FileSystem zipfs = FileSystems.newFileSystem(zipfile, props)) {
for (PackProcessor processor : processors) {
processor.process(zipfs);
}
} catch (Throwable e) {
e.printStackTrace();
}
}
} catch (Throwable e) {
throw new Exception("Failed to download pack", e);
}
} catch (Throwable e) {
LOGGER.error("Encountered issue while preparing " + meta.name, e);
}
};
}
@Override
public Path getConfigPath() {
Path configPath = FabricLoader.getInstance().getConfigDir().resolve("resclone");
if (!Files.isDirectory(configPath.resolve("cache"))) {
try {
Files.createDirectories(configPath.resolve("cache"));
} catch (IOException e) {
e.printStackTrace();
}
}
return configPath;
}
}