Added scheduled tasks and version management.
This commit is contained in:
parent
50d1d8f662
commit
eebcf0c91a
|
@ -1,7 +1,8 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
dub clean
|
||||
dub build --build=release
|
||||
rm -f create-schematic-gen-site
|
||||
dub build
|
||||
echo "Stopping CSGS service."
|
||||
ssh -f root@andrewlalis.com 'systemctl stop csgs'
|
||||
echo "Uploading new binary."
|
||||
|
|
26
cleaner.d
26
cleaner.d
|
@ -1,26 +0,0 @@
|
|||
/**
|
||||
* This standalone module is responsible for cleaning up the list of stored
|
||||
* extracts, so only the recent ones remain. This is meant to be linked as a
|
||||
* cron scheduled program.
|
||||
*/
|
||||
module cleaner;
|
||||
|
||||
import std.stdio;
|
||||
import std.file;
|
||||
import std.path;
|
||||
import std.datetime;
|
||||
|
||||
const EXTRACTS_DIR = "extracts";
|
||||
|
||||
int main() {
|
||||
if (!exists(EXTRACTS_DIR)) return 0;
|
||||
immutable SysTime now = Clock.currTime();
|
||||
foreach (DirEntry entry; dirEntries(EXTRACTS_DIR, SpanMode.shallow, false)) {
|
||||
Duration age = now - entry.timeLastModified();
|
||||
if (age.total!"days" > 5) {
|
||||
writefln!"Removing directory %s because it's too old."(entry.name);
|
||||
rmdirRecurse(entry.name);
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
4
dub.json
4
dub.json
|
@ -4,7 +4,9 @@
|
|||
],
|
||||
"copyright": "Copyright © 2023, Andrew Lalis",
|
||||
"dependencies": {
|
||||
"handy-httpd": "~>7.6.3",
|
||||
"handy-httpd": "~>7.6.4",
|
||||
"requests": "~>2.1.1",
|
||||
"scheduled": "~>1.2.0",
|
||||
"slf4d": "~>2.4.2"
|
||||
},
|
||||
"description": "HTTP server for generating schematic materials lists.",
|
||||
|
|
|
@ -1,9 +1,16 @@
|
|||
{
|
||||
"fileVersion": 1,
|
||||
"versions": {
|
||||
"handy-httpd": "7.6.3",
|
||||
"automem": "0.6.9",
|
||||
"cachetools": "0.4.1",
|
||||
"cronexp": "0.1.0-beta3",
|
||||
"handy-httpd": "7.6.4",
|
||||
"httparsed": "1.2.1",
|
||||
"requests": "2.1.1",
|
||||
"scheduled": "1.1.0",
|
||||
"slf4d": "2.4.2",
|
||||
"streams": "3.5.0"
|
||||
"streams": "3.5.0",
|
||||
"test_allocator": "0.3.4",
|
||||
"unit-threaded": "0.10.8"
|
||||
}
|
||||
}
|
||||
|
|
126
source/app.d
126
source/app.d
|
@ -1,130 +1,14 @@
|
|||
import handy_httpd;
|
||||
import handy_httpd.handlers.path_delegating_handler;
|
||||
import handy_httpd.handlers.file_resolving_handler;
|
||||
import slf4d;
|
||||
import slf4d.default_provider;
|
||||
import std.path;
|
||||
import std.file;
|
||||
import std.json;
|
||||
|
||||
const EXTRACTS_DIR = "extracts";
|
||||
const EXTRACT_FILENAME = "__EXTRACT__.json";
|
||||
const EXTRACT_COMMAND = ["java", "-jar", "materials-extractor-v1.0.0.jar"];
|
||||
import csgs.http;
|
||||
import csgs.task;
|
||||
|
||||
void main() {
|
||||
auto provider = new shared DefaultProvider(true, Levels.INFO);
|
||||
configureLoggingProvider(provider);
|
||||
info("Starting create-schematic-gen-site API.");
|
||||
|
||||
ServerConfig config = ServerConfig.defaultValues();
|
||||
config.workerPoolSize = 3;
|
||||
config.connectionQueueSize = 10;
|
||||
config.port = 8100;
|
||||
|
||||
PathDelegatingHandler handler = new PathDelegatingHandler();
|
||||
handler.addMapping("POST", "/extracts", &handleExtract);
|
||||
handler.addMapping("GET", "/extracts/{extractId}", &getExtract);
|
||||
|
||||
FileResolvingHandler fileHandler = new FileResolvingHandler("site", DirectoryResolutionStrategies.serveIndexFiles);
|
||||
handler.addMapping("/**", fileHandler);
|
||||
new HttpServer(handler, config).start();
|
||||
}
|
||||
|
||||
void handleExtract(ref HttpRequestContext ctx) {
|
||||
import std.json;
|
||||
import std.uuid;
|
||||
import std.process;
|
||||
import std.stdio;
|
||||
|
||||
immutable UUID extractId = randomUUID();
|
||||
MultipartFormData data = ctx.request.readBodyAsMultipartFormData();
|
||||
if (!validateExtractRequest(data, ctx.response)) return;
|
||||
const extractDir = buildPath(EXTRACTS_DIR, extractId.toString());
|
||||
if (!exists(extractDir)) {
|
||||
mkdirRecurse(extractDir);
|
||||
}
|
||||
string[] filenames;
|
||||
uint[] counts;
|
||||
foreach (MultipartElement element; data.elements) {
|
||||
if (element.name == "schematics") {
|
||||
const filePath = buildPath(extractDir, element.filename.get());
|
||||
std.file.write(filePath, element.content);
|
||||
filenames ~= filePath;
|
||||
} else if (element.name == "counts") {
|
||||
import std.conv;
|
||||
immutable uint count = element.content.to!uint;
|
||||
counts ~= count;
|
||||
}
|
||||
}
|
||||
|
||||
infoF!"Running extract process on files: %s"(filenames);
|
||||
auto extractionResult = execute(EXTRACT_COMMAND ~ filenames);
|
||||
immutable int exitCode = extractionResult.status;
|
||||
infoF!"Exit code: %d"(exitCode);
|
||||
rmdirRecurse(extractDir);
|
||||
if (exitCode != 0) {
|
||||
ctx.response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR);
|
||||
ctx.response.writeBodyString(extractionResult.output);
|
||||
} else {
|
||||
const extractJsonPath = extractDir ~ ".json";
|
||||
// It was successful, so add __COUNT__ to each object in the extract data.
|
||||
JSONValue extractData = parseJSON(extractionResult.output);
|
||||
for (uint i = 0; i < extractData.array.length; i++) {
|
||||
extractData.array[i].object["__COUNT__"] = JSONValue(counts[i]);
|
||||
}
|
||||
std.file.write(extractJsonPath, extractData.toPrettyString());
|
||||
|
||||
JSONValue result = JSONValue.emptyObject;
|
||||
result.object["extractId"] = JSONValue(extractId.toString());
|
||||
ctx.response.writeBodyString(result.toJSON(), "application/json");
|
||||
}
|
||||
}
|
||||
|
||||
private bool validateExtractRequest(ref MultipartFormData data, ref HttpResponse response) {
|
||||
if (data.elements.length < 2) {
|
||||
response.setStatus(HttpStatus.BAD_REQUEST);
|
||||
response.writeBodyString("Requires at least 2 form data elements (schematic file and count).");
|
||||
return false;
|
||||
}
|
||||
uint nextElementIdx = 0;
|
||||
while (nextElementIdx < data.elements.length) {
|
||||
MultipartElement element = data.elements[nextElementIdx++];
|
||||
if (element.name == "schematics") {
|
||||
if (element.filename.isNull || element.filename.get().length < 5 || element.content.length < 10) {
|
||||
response.setStatus(HttpStatus.BAD_REQUEST);
|
||||
response.writeBodyString("Invalid or missing schematic file.");
|
||||
return false;
|
||||
}
|
||||
const string filename = element.filename.get();
|
||||
if (nextElementIdx == data.elements.length) {
|
||||
response.setStatus(HttpStatus.BAD_REQUEST);
|
||||
response.writeBodyString("Missing count element for schematic: " ~ filename);
|
||||
return false;
|
||||
}
|
||||
MultipartElement countElement = data.elements[nextElementIdx++];
|
||||
import std.conv;
|
||||
try {
|
||||
immutable uint count = countElement.content.to!uint;
|
||||
if (count < 1 || count > 1000) throw new Exception("out of range: count should be between 1 and 1000, inclusive.");
|
||||
} catch (Exception e) {
|
||||
response.setStatus(HttpStatus.BAD_REQUEST);
|
||||
response.writeBodyString("Invalid count element: " ~ e.msg);
|
||||
return false;
|
||||
}
|
||||
} else if (element.name == "processing-terminal") {
|
||||
// TODO: Check processing-terminal format.
|
||||
} else {
|
||||
response.setStatus(HttpStatus.BAD_REQUEST);
|
||||
response.writeBodyString("Unknown element: " ~ element.name);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void getExtract(ref HttpRequestContext ctx) {
|
||||
string extractId = ctx.request.getPathParamAs!string("extractId");
|
||||
const extractFile = buildPath(EXTRACTS_DIR, extractId ~ ".json");
|
||||
fileResponse(ctx.response, extractFile, "application/json");
|
||||
info("Starting create-schematic-gen-site API.");
|
||||
startScheduledTasks();
|
||||
startServer();
|
||||
}
|
||||
|
|
|
@ -0,0 +1,123 @@
|
|||
module csgs.extract;
|
||||
|
||||
import handy_httpd;
|
||||
import slf4d;
|
||||
|
||||
import std.typecons;
|
||||
import std.file;
|
||||
import std.path;
|
||||
|
||||
const EXTRACTS_DIR = "extracts";
|
||||
const EXTRACT_FILENAME = "__EXTRACT__.json";
|
||||
const EXTRACT_COMMAND = ["java", "-jar", "materials-extractor-v1.0.0.jar"];
|
||||
|
||||
bool validateExtractRequest(ref MultipartFormData data, ref HttpResponse response) {
|
||||
if (data.elements.length < 2) {
|
||||
response.setStatus(HttpStatus.BAD_REQUEST);
|
||||
response.writeBodyString("Requires at least 2 form data elements (schematic file and count).");
|
||||
return false;
|
||||
}
|
||||
uint nextElementIdx = 0;
|
||||
while (nextElementIdx < data.elements.length) {
|
||||
MultipartElement element = data.elements[nextElementIdx++];
|
||||
if (element.name == "schematics") {
|
||||
if (element.filename.isNull || element.filename.get().length < 5 || element.content.length < 10) {
|
||||
response.setStatus(HttpStatus.BAD_REQUEST);
|
||||
response.writeBodyString("Invalid or missing schematic file.");
|
||||
return false;
|
||||
}
|
||||
const string filename = element.filename.get();
|
||||
if (nextElementIdx == data.elements.length) {
|
||||
response.setStatus(HttpStatus.BAD_REQUEST);
|
||||
response.writeBodyString("Missing count element for schematic: " ~ filename);
|
||||
return false;
|
||||
}
|
||||
MultipartElement countElement = data.elements[nextElementIdx++];
|
||||
import std.conv;
|
||||
try {
|
||||
immutable uint count = countElement.content.to!uint;
|
||||
if (count < 1 || count > 1000) throw new Exception("out of range: count should be between 1 and 1000, inclusive.");
|
||||
} catch (Exception e) {
|
||||
response.setStatus(HttpStatus.BAD_REQUEST);
|
||||
response.writeBodyString("Invalid count element: " ~ e.msg);
|
||||
return false;
|
||||
}
|
||||
} else if (element.name == "processing-terminal") {
|
||||
// TODO: Check processing-terminal format.
|
||||
} else {
|
||||
response.setStatus(HttpStatus.BAD_REQUEST);
|
||||
response.writeBodyString("Unknown element: " ~ element.name);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
string doExtract(ref MultipartFormData data) {
|
||||
import std.json;
|
||||
import std.uuid;
|
||||
import std.process;
|
||||
|
||||
immutable UUID extractId = randomUUID();
|
||||
immutable string extractIdStr = extractId.toString();
|
||||
immutable string extractDir = buildPath(EXTRACTS_DIR, extractIdStr);
|
||||
if (!exists(extractDir)) {
|
||||
mkdirRecurse(extractDir);
|
||||
}
|
||||
string[] filenames;
|
||||
uint[] counts;
|
||||
foreach (MultipartElement element; data.elements) {
|
||||
if (element.name == "schematics") {
|
||||
const filePath = buildPath(extractDir, element.filename.get());
|
||||
std.file.write(filePath, element.content);
|
||||
filenames ~= filePath;
|
||||
} else if (element.name == "counts") {
|
||||
import std.conv;
|
||||
immutable uint count = element.content.to!uint;
|
||||
counts ~= count;
|
||||
}
|
||||
}
|
||||
|
||||
infoF!"Running extract process on files: %s"(filenames);
|
||||
Nullable!string extractorPath = getExtractorProgramPath();
|
||||
if (extractorPath.isNull) {
|
||||
throw new HttpStatusException(HttpStatus.INTERNAL_SERVER_ERROR, "Could not find extractor program.");
|
||||
}
|
||||
auto extractionResult = execute(["java", "-jar", extractorPath.get()] ~ filenames);
|
||||
immutable int exitCode = extractionResult.status;
|
||||
infoF!"Exit code: %d"(exitCode);
|
||||
rmdirRecurse(extractDir);
|
||||
if (exitCode != 0) {
|
||||
throw new HttpStatusException(HttpStatus.INTERNAL_SERVER_ERROR, "Extraction program failed.");
|
||||
} else {
|
||||
immutable string extractJsonPath = extractDir ~ ".json";
|
||||
// It was successful, so add __COUNT__ to each object in the extract data.
|
||||
JSONValue extractData = parseJSON(extractionResult.output);
|
||||
for (uint i = 0; i < extractData.array.length; i++) {
|
||||
extractData.array[i].object["__COUNT__"] = JSONValue(counts[i]);
|
||||
}
|
||||
std.file.write(extractJsonPath, extractData.toPrettyString());
|
||||
return extractIdStr;
|
||||
}
|
||||
}
|
||||
|
||||
Nullable!string getExtractorProgramPath() {
|
||||
import std.string;
|
||||
foreach (DirEntry entry; dirEntries(getcwd(), SpanMode.shallow, false)) {
|
||||
if (entry.isFile() && endsWith(entry.name, ".jar")) {
|
||||
return nullable(entry.name);
|
||||
}
|
||||
}
|
||||
return Nullable!string.init;
|
||||
}
|
||||
|
||||
Nullable!string getCurrentExtractorVersion() {
|
||||
import std.regex;
|
||||
Nullable!string extractorPath = getExtractorProgramPath();
|
||||
if (extractorPath.isNull) return Nullable!string.init;
|
||||
auto r = regex(`v\d+\.\d+\.\d+`);
|
||||
auto c = matchFirst(extractorPath.get(), r);
|
||||
if (c.empty) return Nullable!string.init;
|
||||
return nullable(c.front());
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
module csgs.http;
|
||||
|
||||
import handy_httpd;
|
||||
import handy_httpd.handlers.path_delegating_handler;
|
||||
import handy_httpd.handlers.file_resolving_handler;
|
||||
import slf4d;
|
||||
import std.path;
|
||||
|
||||
import csgs.extract;
|
||||
|
||||
void startServer() {
|
||||
ServerConfig config = ServerConfig.defaultValues();
|
||||
config.workerPoolSize = 3;
|
||||
config.connectionQueueSize = 10;
|
||||
config.port = 8100;
|
||||
|
||||
PathDelegatingHandler handler = new PathDelegatingHandler();
|
||||
handler.addMapping("POST", "/extracts", &handleExtract);
|
||||
handler.addMapping("GET", "/extracts/{extractId}", &getExtract);
|
||||
|
||||
FileResolvingHandler fileHandler = new FileResolvingHandler("site", DirectoryResolutionStrategies.serveIndexFiles);
|
||||
handler.addMapping("/**", fileHandler);
|
||||
new HttpServer(handler, config).start();
|
||||
}
|
||||
|
||||
private void handleExtract(ref HttpRequestContext ctx) {
|
||||
import std.json;
|
||||
|
||||
MultipartFormData data = ctx.request.readBodyAsMultipartFormData();
|
||||
if (!validateExtractRequest(data, ctx.response)) return;
|
||||
string extractId = doExtract(data);
|
||||
JSONValue result = JSONValue.emptyObject;
|
||||
result.object["extractId"] = JSONValue(extractId);
|
||||
ctx.response.writeBodyString(result.toJSON(), "application/json");
|
||||
}
|
||||
|
||||
private void getExtract(ref HttpRequestContext ctx) {
|
||||
string extractId = ctx.request.getPathParamAs!string("extractId");
|
||||
const extractFile = buildPath(EXTRACTS_DIR, extractId ~ ".json");
|
||||
fileResponse(ctx.response, extractFile, "application/json");
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
module csgs;
|
|
@ -0,0 +1,110 @@
|
|||
module csgs.task;
|
||||
|
||||
import slf4d;
|
||||
import scheduled;
|
||||
|
||||
import std.datetime;
|
||||
import std.typecons;
|
||||
import std.json;
|
||||
import std.string;
|
||||
import std.file;
|
||||
import std.stdio;
|
||||
|
||||
import csgs.extract;
|
||||
|
||||
void startScheduledTasks() {
|
||||
JobScheduler scheduler = new ThreadedJobScheduler();
|
||||
Job cleanJob = new FunctionJob(&cleanOldExtracts);
|
||||
scheduler.addJob(cleanJob, new FixedIntervalSchedule(days(1)));
|
||||
Job updateCheckJob = new FunctionJob(&checkForExtractorUpdate);
|
||||
scheduler.addJob(updateCheckJob, new FixedIntervalSchedule(hours(1)));
|
||||
scheduler.start();
|
||||
}
|
||||
|
||||
private void cleanOldExtracts() {
|
||||
import std.file;
|
||||
immutable MAX_AGE = days(5);
|
||||
info("Cleaning old extracts.");
|
||||
if (!exists(EXTRACTS_DIR)) return;
|
||||
immutable SysTime now = Clock.currTime();
|
||||
foreach (DirEntry entry; dirEntries(EXTRACTS_DIR, SpanMode.shallow, false)) {
|
||||
if (entry.isDir) {
|
||||
infoF!"Removing directory %s."(entry.name);
|
||||
rmdirRecurse(entry.name);
|
||||
} else if (entry.isFile) {
|
||||
immutable Duration age = now - entry.timeLastModified();
|
||||
if (age > MAX_AGE) {
|
||||
infoF!"Removing extract %s because it's too old."(entry.name);
|
||||
std.file.remove(entry.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void checkForExtractorUpdate() {
|
||||
import requests;
|
||||
import std.json;
|
||||
import std.stdio;
|
||||
import std.string;
|
||||
import std.file;
|
||||
info("Checking for MaterialsExtractor program updates.");
|
||||
|
||||
// First request the latest release from GitHub API:
|
||||
Request req = Request();
|
||||
req.addHeaders(["Accept": "application/vnd.github+json"]);
|
||||
Response resp = req.get("https://api.github.com/repos/andrewlalis/MaterialsExtractor/releases/latest");
|
||||
if (resp.code() != 200) {
|
||||
warnF!"Failed to get the latest MaterialsExtractor release. Status code %d."(resp.code);
|
||||
return;
|
||||
}
|
||||
JSONValue responseJson = parseJSON(resp.responseBody.toString());
|
||||
|
||||
// Get the release's version, compare it with our current version.
|
||||
immutable string releaseVersion = responseJson.object["tag_name"].str;
|
||||
infoF!"Found release version %s"(releaseVersion);
|
||||
Nullable!string currentVersion = getCurrentExtractorVersion();
|
||||
if (!currentVersion.isNull && currentVersion.get() >= releaseVersion) {
|
||||
infoF!"Skipping this version since it's not newer than the current %s."(currentVersion.get);
|
||||
return;
|
||||
}
|
||||
|
||||
// Find the release's asset, and download it.
|
||||
Nullable!JSONValue nullableAsset = findJarAsset(responseJson);
|
||||
if (nullableAsset.isNull) {
|
||||
warn("Latest release has no asset. Ignoring.");
|
||||
return;
|
||||
}
|
||||
JSONValue asset = nullableAsset.get();
|
||||
immutable string filename = asset.object["name"].str;
|
||||
immutable string downloadUrl = asset.object["browser_download_url"].str;
|
||||
infoF!"Found asset: %s. Downloading from %s."(filename, downloadUrl);
|
||||
if (exists(filename)) {
|
||||
std.file.remove(filename);
|
||||
}
|
||||
|
||||
Request downloadRequest = Request();
|
||||
downloadRequest.useStreaming = true;
|
||||
Response downloadResponse = downloadRequest.get(downloadUrl);
|
||||
File f = File(filename, "wb");
|
||||
auto stream = downloadResponse.receiveAsRange();
|
||||
while (!stream.empty) {
|
||||
f.rawWrite(stream.front);
|
||||
stream.popFront();
|
||||
}
|
||||
f.close();
|
||||
infoF!"Downloaded file to %s"(f.name);
|
||||
}
|
||||
|
||||
private Nullable!JSONValue findJarAsset(ref JSONValue response) {
|
||||
foreach (JSONValue asset; response.object["assets"].array) {
|
||||
immutable string name = asset.object["name"].str;
|
||||
if (
|
||||
startsWith(name, "materials-extractor") &&
|
||||
endsWith(name, ".jar") &&
|
||||
asset.object["content_type"].str == "application/java-archive"
|
||||
) {
|
||||
return nullable(asset);
|
||||
}
|
||||
}
|
||||
return Nullable!JSONValue.init;
|
||||
}
|
Loading…
Reference in New Issue