Woodpecker-Include/src/main/java/io/gitlab/jfronny/woodpecker/include/Main.java

64 lines
2.3 KiB
Java
Raw Normal View History

2022-10-20 21:57:43 +02:00
package io.gitlab.jfronny.woodpecker.include;
import io.gitlab.jfronny.commons.HttpUtils;
2022-10-22 13:55:42 +02:00
import io.gitlab.jfronny.commons.log.Logger;
2022-10-20 21:57:43 +02:00
import io.gitlab.jfronny.woodpecker.include.model.*;
import net.freeutils.httpserver.HTTPServer;
import java.io.*;
import java.util.concurrent.atomic.AtomicBoolean;
public class Main {
2022-10-22 13:55:42 +02:00
public static final Logger LOG = Logger.forName("Woodpecker-Include");
2022-10-20 21:57:43 +02:00
public static void main(String[] args) throws IOException {
if (args.length != 1) {
2022-10-22 13:55:42 +02:00
LOG.error("Usage: woodpecker-include <port>");
2022-10-20 21:57:43 +02:00
return;
}
HTTPServer server = new HTTPServer(Integer.parseInt(args[0]));
HTTPServer.VirtualHost host = server.getVirtualHost(null);
HttpUtils.setUserAgent("Woodpecker-Include/1.0");
host.addContext("/ciconfig", (req, resp) -> {
2022-10-28 17:55:26 +02:00
Ref ref = new Ref();
2022-10-20 21:57:43 +02:00
try {
2022-10-28 17:55:26 +02:00
try {
return processRequest(req, resp, ref);
} catch (UncheckedIOException e) {
throw e.getCause();
2022-10-20 21:57:43 +02:00
}
2022-10-28 17:55:26 +02:00
} catch (Throwable e) {
if (ref.name == null) LOG.info("Could not process pipeline", e);
else LOG.info("Could not process pipeline for " + ref.name, e);
throw e;
2022-10-20 21:57:43 +02:00
}
}, "POST");
server.start();
2022-10-22 13:55:42 +02:00
LOG.info("Running Woodpecker-Include");
2022-10-20 21:57:43 +02:00
}
2022-10-28 17:55:26 +02:00
private static int processRequest(HTTPServer.Request req, HTTPServer.Response resp, Ref ref) throws IOException {
RequestModel request;
2022-11-02 23:01:01 +01:00
try (var isr = new InputStreamReader(req.getBody())) {
request = GC_RequestModel.read(isr);
2022-10-28 17:55:26 +02:00
}
ref.name = request.repo.full_name;
AtomicBoolean changed = new AtomicBoolean(false);
ResponseModel response = new ResponseModel(request.configs.stream().mapMulti(new PipelineUnpacker(changed)).toList());
if (changed.get()) {
resp.sendHeaders(200);
try (OutputStreamWriter writer = new OutputStreamWriter(resp.getBody())) {
2022-11-02 23:01:01 +01:00
GC_ResponseModel.write(writer, response);
2022-10-28 17:55:26 +02:00
}
LOG.info("Processed includes for pipeline for " + ref.name);
} else {
resp.sendHeaders(204);
}
return 0;
}
private static class Ref {
public String name;
}
2022-10-20 21:57:43 +02:00
}