mirror of
https://github.com/BillyOutlast/drop.git
synced 2026-02-04 08:41:17 +01:00
Depot API & v4 (#298)
* feat: nginx + torrential basics & services system * fix: lint + i18n * fix: update torrential to remove openssl * feat: add torrential to Docker build * feat: move to self hosted runner * fix: move off self-hosted runner * fix: update nginx.conf * feat: torrential cache invalidation * fix: update torrential for cache invalidation * feat: integrity check task * fix: lint * feat: move to version ids * fix: client fixes and client-side checks * feat: new depot apis and version id fixes * feat: update torrential * feat: droplet bump and remove unsafe update functions * fix: lint * feat: v4 featureset: emulators, multi-launch commands * fix: lint * fix: mobile ui for game editor * feat: launch options * fix: lint * fix: remove axios, use $fetch * feat: metadata and task api improvements * feat: task actions * fix: slight styling issue * feat: fix style and lints * feat: totp backend routes * feat: oidc groups * fix: update drop-base * feat: creation of passkeys & totp * feat: totp signin * feat: webauthn mfa/signin * feat: launch selecting ui * fix: manually running tasks * feat: update add company game modal to use new SelectorGame * feat: executor selector * fix(docker): update rust to rust nightly for torrential build (#305) * feat: new version ui * feat: move package lookup to build time to allow for deno dev * fix: lint * feat: localisation cleanup * feat: apply localisation cleanup * feat: potential i18n refactor logic * feat: remove args from commands * fix: lint * fix: lockfile --------- Co-authored-by: Aden Lindsay <140392385+AdenMGB@users.noreply.github.com>
This commit is contained in:
@@ -103,4 +103,7 @@ export const systemACLDescriptions: ObjectFromList<typeof systemACLs> = {
|
||||
"Read tasks and maintenance information, like updates available and cleanup.",
|
||||
|
||||
"settings:update": "Update system settings.",
|
||||
|
||||
"depot:new": "Create a new download depot",
|
||||
"depot:delete": "Remove a download depot",
|
||||
};
|
||||
|
||||
@@ -43,6 +43,9 @@ export type UserACL = Array<(typeof userACLs)[number]>;
|
||||
export const systemACLs = [
|
||||
"setup",
|
||||
|
||||
"depot:new",
|
||||
"depot:delete",
|
||||
|
||||
"auth:read",
|
||||
"auth:simple:invitation:read",
|
||||
"auth:simple:invitation:new",
|
||||
@@ -123,8 +126,12 @@ class ACLManager {
|
||||
if (!request)
|
||||
throw new Error("Native web requests not available - weird deployment?");
|
||||
// Sessions automatically have all ACLs
|
||||
const user = await sessionHandler.getSession(request);
|
||||
if (user) return user.userId;
|
||||
const session = await sessionHandler.getSession(request);
|
||||
if (session && session.authenticated) {
|
||||
if (session.authenticated.level >= session.authenticated.requiredLevel)
|
||||
return session.authenticated.userId;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const authorizationToken = this.getAuthorizationToken(request);
|
||||
if (!authorizationToken) return undefined;
|
||||
@@ -158,6 +165,19 @@ class ACLManager {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async allowUserSuperlevel(request: MinimumRequestObject | undefined) {
|
||||
if (!request)
|
||||
throw new Error("Native web requests not available - weird deployment?");
|
||||
const session = await sessionHandler.getSession(request);
|
||||
if (!session || !session.authenticated) return undefined;
|
||||
if (session.authenticated.level < session.authenticated.requiredLevel)
|
||||
return undefined;
|
||||
if (session.authenticated.superleveledExpiry === undefined)
|
||||
return undefined;
|
||||
if (session.authenticated.superleveledExpiry < Date.now()) return undefined;
|
||||
return session.authenticated.userId;
|
||||
}
|
||||
|
||||
async allowSystemACL(
|
||||
request: MinimumRequestObject | undefined,
|
||||
acls: SystemACL,
|
||||
@@ -165,14 +185,19 @@ class ACLManager {
|
||||
if (!request)
|
||||
throw new Error("Native web requests not available - weird deployment?");
|
||||
const userSession = await sessionHandler.getSession(request);
|
||||
if (userSession) {
|
||||
if (userSession && userSession.authenticated) {
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { id: userSession.userId },
|
||||
where: { id: userSession.authenticated.userId },
|
||||
});
|
||||
if (user) {
|
||||
if (!user) return false;
|
||||
if (user.admin) return true;
|
||||
return false;
|
||||
if (!user.admin) return false;
|
||||
if (
|
||||
userSession.authenticated.level <
|
||||
userSession.authenticated.requiredLevel
|
||||
)
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -221,7 +246,7 @@ class ACLManager {
|
||||
request: MinimumRequestObject,
|
||||
): Promise<GlobalACL[] | undefined> {
|
||||
const userSession = await sessionHandler.getSession(request);
|
||||
if (!userSession) {
|
||||
if (!userSession || !userSession.authenticated) {
|
||||
const authorizationToken = this.getAuthorizationToken(request);
|
||||
if (!authorizationToken) return undefined;
|
||||
const token = await prisma.aPIToken.findUnique({
|
||||
@@ -232,7 +257,7 @@ class ACLManager {
|
||||
}
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { id: userSession.userId },
|
||||
where: { id: userSession.authenticated.userId },
|
||||
select: {
|
||||
admin: true,
|
||||
},
|
||||
|
||||
2
server/internal/auth/base32/index.d.ts
vendored
Normal file
2
server/internal/auth/base32/index.d.ts
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export function b32e(array: Uint8Array): string;
|
||||
export function b32d(str: string): Uint8Array;
|
||||
69
server/internal/auth/base32/index.js
Normal file
69
server/internal/auth/base32/index.js
Normal file
@@ -0,0 +1,69 @@
|
||||
// base32 elements
|
||||
//RFC4648: why include 2? Z and 2 looks similar than 8 and O
|
||||
const b32 = "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567";
|
||||
console.assert(b32.length === 32, b32.length);
|
||||
const b32r = new Map(Array.from(b32, (ch, i) => [ch, i])).set("=", 0);
|
||||
//[constants derived from character table size]
|
||||
//cbit = 5 (as 32 == 2 ** 5), ubit = 8 (as byte)
|
||||
//ccount = 8 (= cbit / gcd(cbit, ubit)), ucount = 5 (= ubit / gcd(cbit, ubit))
|
||||
//cmask = 0x1f (= 2 ** cbit - 1), umask = 0xff (= 2 ** ubit - 1)
|
||||
//const b32pad = [0, 6, 4, 3, 1];
|
||||
const b32pad = Array.from(Array(5), (_, i) => ((8 - (i * 8) / 5) | 0) % 8);
|
||||
|
||||
function b32e5(u1, u2 = 0, u3 = 0, u4 = 0, u5 = 0) {
|
||||
const u40 = u1 * 2 ** 32 + u2 * 2 ** 24 + u3 * 2 ** 16 + u4 * 2 ** 8 + u5;
|
||||
return [
|
||||
b32[(u40 / 2 ** 35) & 0x1f],
|
||||
b32[(u40 / 2 ** 30) & 0x1f],
|
||||
b32[(u40 / 2 ** 25) & 0x1f],
|
||||
b32[(u40 / 2 ** 20) & 0x1f],
|
||||
b32[(u40 / 2 ** 15) & 0x1f],
|
||||
b32[(u40 / 2 ** 10) & 0x1f],
|
||||
b32[(u40 / 2 ** 5) & 0x1f],
|
||||
b32[u40 & 0x1f],
|
||||
];
|
||||
}
|
||||
function b32d8(b1, b2, b3, b4, b5, b6, b7, b8) {
|
||||
const u40 =
|
||||
b32r.get(b1) * 2 ** 35 +
|
||||
b32r.get(b2) * 2 ** 30 +
|
||||
b32r.get(b3) * 2 ** 25 +
|
||||
b32r.get(b4) * 2 ** 20 +
|
||||
b32r.get(b5) * 2 ** 15 +
|
||||
b32r.get(b6) * 2 ** 10 +
|
||||
b32r.get(b7) * 2 ** 5 +
|
||||
b32r.get(b8);
|
||||
return [
|
||||
(u40 / 2 ** 32) & 0xff,
|
||||
(u40 / 2 ** 24) & 0xff,
|
||||
(u40 / 2 ** 16) & 0xff,
|
||||
(u40 / 2 ** 8) & 0xff,
|
||||
u40 & 0xff,
|
||||
];
|
||||
}
|
||||
|
||||
// base32 encode/decode: Uint8Array <=> string
|
||||
export function b32e(u8a) {
|
||||
console.assert(u8a instanceof Uint8Array, u8a.constructor);
|
||||
const len = u8a.length,
|
||||
rem = len % 5;
|
||||
const u5s = Array.from(Array((len - rem) / 5), (_, i) =>
|
||||
u8a.subarray(i * 5, i * 5 + 5),
|
||||
);
|
||||
const pad = b32pad[rem];
|
||||
const br = rem === 0 ? [] : b32e5(...u8a.subarray(-rem)).slice(0, 8 - pad);
|
||||
return []
|
||||
.concat(...u5s.map((u5) => b32e5(...u5)), br, ["=".repeat(pad)])
|
||||
.join("");
|
||||
}
|
||||
export function b32d(bs) {
|
||||
const len = bs.length;
|
||||
if (len === 0) return new Uint8Array([]);
|
||||
console.assert(len % 8 === 0, len);
|
||||
const pad = len - bs.indexOf("="),
|
||||
rem = b32pad.indexOf(pad);
|
||||
console.assert(rem >= 0, pad);
|
||||
console.assert(/^[A-Z2-7+/]*$/.test(bs.slice(0, len - pad)), bs);
|
||||
const u8s = [].concat(...bs.match(/.{8}/g).map((b8) => b32d8(...b8)));
|
||||
return new Uint8Array(rem > 0 ? u8s.slice(0, rem - 5) : u8s);
|
||||
}
|
||||
@@ -34,7 +34,7 @@ class AuthManager {
|
||||
(this.authProviders as any)[key] = object;
|
||||
logger.info(`enabled auth: ${key}`);
|
||||
} catch (e) {
|
||||
logger.warn(e);
|
||||
logger.warn((e as string).toString());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -45,6 +45,7 @@ export class OIDCManager {
|
||||
private clientSecret: string;
|
||||
private externalUrl: string;
|
||||
|
||||
private userGroup?: string = process.env.OIDC_USER_GROUP;
|
||||
private adminGroup?: string = process.env.OIDC_ADMIN_GROUP;
|
||||
private usernameClaim: keyof OIDCUserInfo =
|
||||
(process.env.OIDC_USERNAME_CLAIM as keyof OIDCUserInfo) ??
|
||||
@@ -204,11 +205,11 @@ export class OIDCManager {
|
||||
},
|
||||
});
|
||||
|
||||
const user = await this.fetchOrCreateUser(userinfo);
|
||||
const userOrError = await this.fetchOrCreateUser(userinfo);
|
||||
|
||||
if (typeof user === "string") return user;
|
||||
if (typeof userOrError === "string") return userOrError;
|
||||
|
||||
return { user, options: session.options };
|
||||
return { user: userOrError, options: session.options };
|
||||
} catch (e) {
|
||||
logger.error(e);
|
||||
return `Request to identity provider failed: ${e}`;
|
||||
@@ -236,6 +237,19 @@ export class OIDCManager {
|
||||
if (!username)
|
||||
return "Invalid username claim in OIDC response: " + this.usernameClaim;
|
||||
|
||||
const isAdmin =
|
||||
userinfo.groups !== undefined &&
|
||||
this.adminGroup !== undefined &&
|
||||
userinfo.groups.includes(this.adminGroup);
|
||||
|
||||
const isUser = this.userGroup
|
||||
? userinfo.groups !== undefined &&
|
||||
userinfo.groups.includes(this.userGroup)
|
||||
: true;
|
||||
|
||||
if (!(isAdmin || isUser))
|
||||
return "Not authorized to access this application.";
|
||||
|
||||
/*
|
||||
const takenUsername = await prisma.user.count({
|
||||
where: {
|
||||
@@ -274,11 +288,6 @@ export class OIDCManager {
|
||||
);
|
||||
}
|
||||
|
||||
const isAdmin =
|
||||
userinfo.groups !== undefined &&
|
||||
this.adminGroup !== undefined &&
|
||||
userinfo.groups.includes(this.adminGroup);
|
||||
|
||||
const created = await prisma.linkedAuthMec.create({
|
||||
data: {
|
||||
mec: AuthMec.OpenID,
|
||||
|
||||
22
server/internal/auth/totp.ts
Normal file
22
server/internal/auth/totp.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
export function dropEncodeArrayBase64(secret: Uint8Array): string {
|
||||
return encode(secret);
|
||||
}
|
||||
export function dropDecodeArrayBase64(secret: string): Uint8Array {
|
||||
return decode(secret);
|
||||
}
|
||||
|
||||
const { fromCharCode } = String;
|
||||
const encode = (uint8array: Uint8Array) => {
|
||||
const output = [];
|
||||
for (let i = 0, { length } = uint8array; i < length; i++)
|
||||
output.push(fromCharCode(uint8array[i]));
|
||||
return btoa(output.join(""));
|
||||
};
|
||||
|
||||
const asCharCode = (c: string) => c.charCodeAt(0);
|
||||
|
||||
const decode = (chars: string) => Uint8Array.from(atob(chars), asCharCode);
|
||||
|
||||
export interface TOTPv1Credentials {
|
||||
secret: string;
|
||||
}
|
||||
128
server/internal/auth/webauthn.ts
Normal file
128
server/internal/auth/webauthn.ts
Normal file
@@ -0,0 +1,128 @@
|
||||
import { ArkErrors, type } from "arktype";
|
||||
import { systemConfig } from "../config/sys-conf";
|
||||
import { dropDecodeArrayBase64 } from "./totp";
|
||||
import { decode } from "cbor2";
|
||||
import { createHash } from "node:crypto";
|
||||
import cosekey from "parse-cosekey";
|
||||
import type { AuthenticatorTransportFuture } from "@simplewebauthn/server";
|
||||
|
||||
export async function getRpId() {
|
||||
const externalUrl =
|
||||
process.env.WEBAUTHN_DOMAIN ?? (await systemConfig.getExternalUrl());
|
||||
const externalUrlParsed = new URL(externalUrl);
|
||||
|
||||
return externalUrlParsed.hostname;
|
||||
}
|
||||
|
||||
export interface Passkey {
|
||||
name: string;
|
||||
created: number;
|
||||
userId: string;
|
||||
webAuthnUserId: string;
|
||||
id: string;
|
||||
publicKey: string;
|
||||
counter: number;
|
||||
transports: Array<AuthenticatorTransportFuture> | undefined;
|
||||
deviceType: string;
|
||||
backedUp: boolean;
|
||||
}
|
||||
|
||||
export interface WebAuthNv1Credentials {
|
||||
passkeys: Array<Passkey>;
|
||||
}
|
||||
|
||||
const ClientData = type({
|
||||
type: "'webauthn.create'",
|
||||
challenge: "string",
|
||||
origin: "string",
|
||||
});
|
||||
|
||||
const AuthData = type({
|
||||
fmt: "string",
|
||||
authData: "TypedArray.Uint8",
|
||||
});
|
||||
|
||||
export async function parseAndValidatePasskeyCreation(
|
||||
clientDataString: string,
|
||||
attestationObjectString: string,
|
||||
challenge: string,
|
||||
) {
|
||||
const clientData = dropDecodeArrayBase64(clientDataString);
|
||||
const attestationObject = dropDecodeArrayBase64(attestationObjectString);
|
||||
|
||||
const utf8Decoder = new TextDecoder("utf-8");
|
||||
const decodedClientData = utf8Decoder.decode(clientData);
|
||||
const clientDataObj = ClientData(JSON.parse(decodedClientData));
|
||||
if (clientDataObj instanceof ArkErrors)
|
||||
throw createError({
|
||||
statusCode: 400,
|
||||
message: `Invalid client data JSON object: ${clientDataObj.summary}`,
|
||||
});
|
||||
|
||||
const convertedChallenge = Buffer.from(
|
||||
dropDecodeArrayBase64(clientDataObj.challenge),
|
||||
).toString("utf8");
|
||||
|
||||
if (convertedChallenge !== challenge)
|
||||
throw createError({
|
||||
statusCode: 400,
|
||||
message: "Challenge does not match.",
|
||||
});
|
||||
|
||||
const tmp = decode(attestationObject);
|
||||
const decodedAttestationObject = AuthData(tmp);
|
||||
if (decodedAttestationObject instanceof ArkErrors)
|
||||
throw createError({
|
||||
statusCode: 400,
|
||||
message: `Invalid attestation object: ${decodedAttestationObject.summary}`,
|
||||
});
|
||||
|
||||
const userRpIdHash = decodedAttestationObject.authData.slice(0, 32);
|
||||
const rpId = await getRpId();
|
||||
const rpIdHash = createHash("sha256").update(rpId).digest();
|
||||
|
||||
if (!rpIdHash.equals(userRpIdHash))
|
||||
throw createError({
|
||||
statusCode: 400,
|
||||
message: "Incorrect relying party ID",
|
||||
});
|
||||
|
||||
const attestedCredentialData = decodedAttestationObject.authData.slice(37);
|
||||
if (attestedCredentialData.length < 18)
|
||||
throw createError({
|
||||
statusCode: 400,
|
||||
message:
|
||||
"Attested credential data is missing AAGUID and/or credentialIdLength",
|
||||
});
|
||||
// const aaguid = attestedCredentialData.slice(0, 16);
|
||||
const credentialIdLengthBuffer = attestedCredentialData.slice(16, 18);
|
||||
const credentialIdLength = Buffer.from(credentialIdLengthBuffer).readUintBE(
|
||||
0,
|
||||
2,
|
||||
);
|
||||
if (attestedCredentialData.length < 18 + credentialIdLength)
|
||||
throw createError({
|
||||
statusCode: 400,
|
||||
message: "Missing credential data of length: " + credentialIdLength,
|
||||
});
|
||||
const credentialId = attestedCredentialData.slice(
|
||||
18,
|
||||
18 + credentialIdLength,
|
||||
);
|
||||
const credentialPublicKey: Map<number, number> = decode(
|
||||
attestedCredentialData.slice(18 + credentialIdLength),
|
||||
);
|
||||
if (!(credentialPublicKey instanceof Map))
|
||||
throw createError({
|
||||
statusCode: 400,
|
||||
message: "Could not decode public key from attestion credential data",
|
||||
});
|
||||
|
||||
const credentialIdStr = Buffer.from(credentialId).toString("hex");
|
||||
const jwk = cosekey.KeyParser.cose2jwk(credentialPublicKey);
|
||||
|
||||
return {
|
||||
credentialIdStr,
|
||||
jwk,
|
||||
};
|
||||
}
|
||||
@@ -72,18 +72,14 @@ export const dbCertificateStore = () => {
|
||||
};
|
||||
},
|
||||
async blacklistCertificate(name: string) {
|
||||
try {
|
||||
await prisma.certificate.update({
|
||||
where: {
|
||||
id: name,
|
||||
},
|
||||
data: {
|
||||
blacklisted: true,
|
||||
},
|
||||
});
|
||||
} finally {
|
||||
/* empty */
|
||||
}
|
||||
await prisma.certificate.updateMany({
|
||||
where: {
|
||||
id: name,
|
||||
},
|
||||
data: {
|
||||
blacklisted: true,
|
||||
},
|
||||
});
|
||||
},
|
||||
async checkBlacklistCertificate(name: string): Promise<boolean> {
|
||||
const result = await prisma.certificate.findUnique({
|
||||
|
||||
@@ -102,8 +102,6 @@ class CapabilityManager {
|
||||
() => Promise<void> | void
|
||||
> = {
|
||||
[InternalClientCapability.PeerAPI]: async function () {
|
||||
// const configuration =rawCapability as CapabilityConfiguration[InternalClientCapability.PeerAPI];
|
||||
|
||||
const currentClient = await prisma.client.findUnique({
|
||||
where: { id: clientId },
|
||||
select: {
|
||||
@@ -111,26 +109,10 @@ class CapabilityManager {
|
||||
},
|
||||
});
|
||||
if (!currentClient) throw new Error("Invalid client ID");
|
||||
/*
|
||||
if (currentClient.capabilities.includes(ClientCapabilities.PeerAPI)) {
|
||||
await prisma.clientPeerAPIConfiguration.update({
|
||||
where: { clientId },
|
||||
data: {
|
||||
endpoints: configuration.endpoints,
|
||||
},
|
||||
});
|
||||
if (currentClient.capabilities.includes(ClientCapabilities.PeerAPI))
|
||||
return;
|
||||
}
|
||||
|
||||
await prisma.clientPeerAPIConfiguration.create({
|
||||
data: {
|
||||
clientId: clientId,
|
||||
endpoints: configuration.endpoints,
|
||||
},
|
||||
});
|
||||
*/
|
||||
|
||||
await prisma.client.update({
|
||||
await prisma.client.updateMany({
|
||||
where: { id: clientId },
|
||||
data: {
|
||||
capabilities: {
|
||||
@@ -153,7 +135,7 @@ class CapabilityManager {
|
||||
if (currentClient.capabilities.includes(ClientCapabilities.CloudSaves))
|
||||
return;
|
||||
|
||||
await prisma.client.update({
|
||||
await prisma.client.updateMany({
|
||||
where: { id: clientId },
|
||||
data: {
|
||||
capabilities: {
|
||||
@@ -175,7 +157,7 @@ class CapabilityManager {
|
||||
)
|
||||
return;
|
||||
|
||||
await prisma.client.update({
|
||||
await prisma.client.updateMany({
|
||||
where: { id: clientId },
|
||||
data: {
|
||||
capabilities: {
|
||||
|
||||
@@ -124,7 +124,8 @@ export function defineClientEventHandler<T>(handler: EventHandlerFunction<T>) {
|
||||
fetchUser,
|
||||
};
|
||||
|
||||
await prisma.client.update({
|
||||
// Ignore response because we don't care if this fails
|
||||
await prisma.client.updateMany({
|
||||
where: { id: clientId },
|
||||
data: { lastConnected: new Date() },
|
||||
});
|
||||
|
||||
@@ -1,68 +0,0 @@
|
||||
import prisma from "../db/database";
|
||||
import type { DropManifest } from "./manifest";
|
||||
|
||||
const TIMEOUT = 1000 * 60 * 60 * 1; // 1 hour
|
||||
|
||||
class DownloadContextManager {
|
||||
private contexts: Map<
|
||||
string,
|
||||
{
|
||||
timeout: Date;
|
||||
manifest: DropManifest;
|
||||
versionName: string;
|
||||
libraryId: string;
|
||||
libraryPath: string;
|
||||
}
|
||||
> = new Map();
|
||||
|
||||
async createContext(game: string, versionName: string) {
|
||||
const version = await prisma.gameVersion.findUnique({
|
||||
where: {
|
||||
gameId_versionName: {
|
||||
gameId: game,
|
||||
versionName,
|
||||
},
|
||||
},
|
||||
include: {
|
||||
game: {
|
||||
select: {
|
||||
libraryId: true,
|
||||
libraryPath: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
if (!version) return undefined;
|
||||
|
||||
const contextId = crypto.randomUUID();
|
||||
this.contexts.set(contextId, {
|
||||
timeout: new Date(),
|
||||
manifest: JSON.parse(version.dropletManifest as string) as DropManifest,
|
||||
versionName,
|
||||
libraryId: version.game.libraryId!,
|
||||
libraryPath: version.game.libraryPath,
|
||||
});
|
||||
|
||||
return contextId;
|
||||
}
|
||||
|
||||
async fetchContext(contextId: string) {
|
||||
const context = this.contexts.get(contextId);
|
||||
if (!context) return undefined;
|
||||
context.timeout = new Date();
|
||||
this.contexts.set(contextId, context);
|
||||
return context;
|
||||
}
|
||||
|
||||
async cleanup() {
|
||||
for (const key of this.contexts.keys()) {
|
||||
const context = this.contexts.get(key)!;
|
||||
if (context.timeout.getTime() < Date.now() - TIMEOUT) {
|
||||
this.contexts.delete(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const contextManager = new DownloadContextManager();
|
||||
export default contextManager;
|
||||
@@ -1,117 +0,0 @@
|
||||
import type { GameVersionModel } from "~/prisma/client/models";
|
||||
import prisma from "../db/database";
|
||||
import { sum } from "~/utils/array";
|
||||
|
||||
export type DropChunk = {
|
||||
permissions: number;
|
||||
ids: string[];
|
||||
checksums: string[];
|
||||
lengths: number[];
|
||||
};
|
||||
|
||||
export type DropManifest = {
|
||||
[key: string]: DropChunk;
|
||||
};
|
||||
|
||||
export type DropManifestMetadata = {
|
||||
manifest: DropManifest;
|
||||
versionName: string;
|
||||
};
|
||||
|
||||
export type DropGeneratedManifest = DropManifest & {
|
||||
[key: string]: { versionName: string };
|
||||
};
|
||||
|
||||
class ManifestGenerator {
|
||||
private static generateManifestFromMetadata(
|
||||
rootManifest: DropManifestMetadata,
|
||||
...overlays: DropManifestMetadata[]
|
||||
): DropGeneratedManifest {
|
||||
if (overlays.length == 0) {
|
||||
return Object.fromEntries(
|
||||
Object.entries(rootManifest.manifest).map(([key, value]) => {
|
||||
return [
|
||||
key,
|
||||
Object.assign({}, value, { versionName: rootManifest.versionName }),
|
||||
];
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
// Recurse in verse order through versions, skipping files that already exist.
|
||||
const versions = [...overlays.reverse(), rootManifest];
|
||||
const manifest: DropGeneratedManifest = {};
|
||||
for (const version of versions) {
|
||||
for (const [filename, chunk] of Object.entries(version.manifest)) {
|
||||
if (manifest[filename]) continue;
|
||||
manifest[filename] = Object.assign({}, chunk, {
|
||||
versionName: version.versionName,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return manifest;
|
||||
}
|
||||
|
||||
// Local function because eventual caching
|
||||
async generateManifest(gameId: string, versionName: string) {
|
||||
const versions: GameVersionModel[] = [];
|
||||
|
||||
const baseVersion = await prisma.gameVersion.findUnique({
|
||||
where: {
|
||||
gameId_versionName: {
|
||||
gameId: gameId,
|
||||
versionName: versionName,
|
||||
},
|
||||
},
|
||||
});
|
||||
if (!baseVersion) return undefined;
|
||||
versions.push(baseVersion);
|
||||
|
||||
// Collect other versions if this is a delta
|
||||
if (baseVersion.delta) {
|
||||
// Start at the same index minus one, and keep grabbing them
|
||||
// until we run out or we hit something that isn't a delta
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
for (let i = baseVersion.versionIndex - 1; true; i--) {
|
||||
const currentVersion = await prisma.gameVersion.findFirst({
|
||||
where: {
|
||||
gameId: gameId,
|
||||
versionIndex: i,
|
||||
platform: baseVersion.platform,
|
||||
},
|
||||
});
|
||||
if (!currentVersion) return undefined;
|
||||
versions.push(currentVersion);
|
||||
if (!currentVersion.delta) break;
|
||||
}
|
||||
}
|
||||
const leastToMost = versions.reverse();
|
||||
const metadata: DropManifestMetadata[] = leastToMost.map((e) => {
|
||||
return {
|
||||
manifest: JSON.parse(
|
||||
e.dropletManifest?.toString() ?? "{}",
|
||||
) as DropManifest,
|
||||
versionName: e.versionName,
|
||||
};
|
||||
});
|
||||
|
||||
const manifest = ManifestGenerator.generateManifestFromMetadata(
|
||||
metadata[0],
|
||||
...metadata.slice(1),
|
||||
);
|
||||
|
||||
return manifest;
|
||||
}
|
||||
|
||||
calculateManifestSize(manifest: DropManifest) {
|
||||
return sum(
|
||||
Object.values(manifest)
|
||||
.map((chunk) => chunk.lengths)
|
||||
.flat(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export const manifestGenerator = new ManifestGenerator();
|
||||
export default manifestGenerator;
|
||||
@@ -1,8 +1,8 @@
|
||||
import cacheHandler from "../cache";
|
||||
import prisma from "../db/database";
|
||||
import manifestGenerator from "../downloads/manifest";
|
||||
import { sum } from "../../../utils/array";
|
||||
import type { Game, GameVersion } from "~/prisma/client/client";
|
||||
import { castManifest } from "../library/manifest";
|
||||
|
||||
export type GameSize = {
|
||||
gameName: string;
|
||||
@@ -46,20 +46,16 @@ class GameSizeManager {
|
||||
where: { gameId },
|
||||
});
|
||||
const sizes = await Promise.all(
|
||||
versions.map((version) =>
|
||||
manifestGenerator.calculateManifestSize(
|
||||
JSON.parse(version.dropletManifest as string),
|
||||
),
|
||||
),
|
||||
versions.map((version) => castManifest(version.dropletManifest).size),
|
||||
);
|
||||
return sum(sizes);
|
||||
}
|
||||
|
||||
async getGameVersionSize(
|
||||
gameId: string,
|
||||
versionName?: string,
|
||||
versionId?: string,
|
||||
): Promise<number | null> {
|
||||
if (!versionName) {
|
||||
if (!versionId) {
|
||||
const version = await prisma.gameVersion.findFirst({
|
||||
where: { gameId },
|
||||
orderBy: {
|
||||
@@ -69,18 +65,14 @@ class GameSizeManager {
|
||||
if (!version) {
|
||||
return null;
|
||||
}
|
||||
versionName = version.versionName;
|
||||
versionId = version.versionId;
|
||||
}
|
||||
|
||||
const manifest = await manifestGenerator.generateManifest(
|
||||
gameId,
|
||||
versionName,
|
||||
);
|
||||
if (!manifest) {
|
||||
return null;
|
||||
}
|
||||
const { dropletManifest } = (await prisma.gameVersion.findUnique({
|
||||
where: { gameId_versionId: { versionId, gameId } },
|
||||
}))!;
|
||||
|
||||
return manifestGenerator.calculateManifestSize(manifest);
|
||||
return castManifest(dropletManifest).size;
|
||||
}
|
||||
|
||||
private async isLatestVersion(
|
||||
@@ -88,7 +80,7 @@ class GameSizeManager {
|
||||
version: GameVersion,
|
||||
): Promise<boolean> {
|
||||
return gameVersions.length > 0
|
||||
? gameVersions[0].versionName === version.versionName
|
||||
? gameVersions[0].versionId === version.versionId
|
||||
: false;
|
||||
}
|
||||
|
||||
@@ -162,16 +154,16 @@ class GameSizeManager {
|
||||
|
||||
async cacheGameVersion(
|
||||
game: Game & { versions: GameVersion[] },
|
||||
versionName?: string,
|
||||
versionId?: string,
|
||||
) {
|
||||
const cacheVersion = async (version: GameVersion) => {
|
||||
const size = await this.getGameVersionSize(game.id, version.versionName);
|
||||
if (!version.versionName || !size) {
|
||||
const size = await this.getGameVersionSize(game.id, version.versionId);
|
||||
if (!version.versionId || !size) {
|
||||
return;
|
||||
}
|
||||
|
||||
const versionsSizes = {
|
||||
[version.versionName]: {
|
||||
[version.versionId]: {
|
||||
size,
|
||||
gameName: game.mName,
|
||||
gameId: game.id,
|
||||
@@ -186,9 +178,9 @@ class GameSizeManager {
|
||||
});
|
||||
};
|
||||
|
||||
if (versionName) {
|
||||
if (versionId) {
|
||||
const version = await prisma.gameVersion.findFirst({
|
||||
where: { gameId: game.id, versionName },
|
||||
where: { gameId: game.id, versionId },
|
||||
});
|
||||
if (!version) {
|
||||
return;
|
||||
|
||||
@@ -9,7 +9,6 @@ import path from "path";
|
||||
import prisma from "../db/database";
|
||||
import { fuzzy } from "fast-fuzzy";
|
||||
import taskHandler from "../tasks";
|
||||
import { parsePlatform } from "../utils/parseplatform";
|
||||
import notificationSystem from "../notifications";
|
||||
import { GameNotFoundError, type LibraryProvider } from "./provider";
|
||||
import { logger } from "../logging";
|
||||
@@ -17,6 +16,8 @@ import type { GameModel } from "~/prisma/client/models";
|
||||
import { createHash } from "node:crypto";
|
||||
import type { WorkingLibrarySource } from "~/server/api/v1/admin/library/sources/index.get";
|
||||
import gameSizeManager from "~/server/internal/gamesize";
|
||||
import { TORRENTIAL_SERVICE } from "../services/services/torrential";
|
||||
import type { ImportVersion } from "~/server/api/v1/admin/import/version/index.post";
|
||||
|
||||
export function createGameImportTaskId(libraryId: string, libraryPath: string) {
|
||||
return createHash("md5")
|
||||
@@ -24,7 +25,10 @@ export function createGameImportTaskId(libraryId: string, libraryPath: string) {
|
||||
.digest("hex");
|
||||
}
|
||||
|
||||
export function createVersionImportTaskId(gameId: string, versionName: string) {
|
||||
export function createVersionImportTaskKey(
|
||||
gameId: string,
|
||||
versionName: string,
|
||||
) {
|
||||
return createHash("md5")
|
||||
.update(`import:${gameId}:${versionName}`)
|
||||
.digest("hex");
|
||||
@@ -41,6 +45,10 @@ class LibraryManager {
|
||||
this.libraries.delete(id);
|
||||
}
|
||||
|
||||
getLibrary(libraryId: string): LibraryProvider<unknown> | undefined {
|
||||
return this.libraries.get(libraryId);
|
||||
}
|
||||
|
||||
async fetchLibraries(): Promise<WorkingLibrarySource[]> {
|
||||
const libraries = await prisma.library.findMany({});
|
||||
|
||||
@@ -79,7 +87,7 @@ class LibraryManager {
|
||||
const providerUnimportedGames = providerGames.filter(
|
||||
(libraryPath) =>
|
||||
!instanceGames[id]?.[libraryPath] &&
|
||||
!taskHandler.hasTask(createGameImportTaskId(id, libraryPath)),
|
||||
!taskHandler.hasTaskKey(createGameImportTaskId(id, libraryPath)),
|
||||
);
|
||||
unimportedGames[id] = providerUnimportedGames;
|
||||
}
|
||||
@@ -107,12 +115,12 @@ class LibraryManager {
|
||||
try {
|
||||
const versions = await provider.listVersions(
|
||||
libraryPath,
|
||||
game.versions.map((v) => v.versionName),
|
||||
game.versions.map((v) => v.versionPath),
|
||||
);
|
||||
const unimportedVersions = versions.filter(
|
||||
(e) =>
|
||||
game.versions.findIndex((v) => v.versionName == e) == -1 &&
|
||||
!taskHandler.hasTask(createVersionImportTaskId(game.id, e)),
|
||||
game.versions.findIndex((v) => v.versionPath == e) == -1 &&
|
||||
!taskHandler.hasTaskKey(createVersionImportTaskKey(game.id, e)),
|
||||
);
|
||||
return unimportedVersions;
|
||||
} catch (e) {
|
||||
@@ -127,12 +135,8 @@ class LibraryManager {
|
||||
async fetchGamesWithStatus() {
|
||||
const games = await prisma.game.findMany({
|
||||
include: {
|
||||
versions: {
|
||||
select: {
|
||||
versionName: true,
|
||||
},
|
||||
},
|
||||
library: true,
|
||||
versions: true,
|
||||
},
|
||||
orderBy: {
|
||||
mName: "asc",
|
||||
@@ -209,7 +213,7 @@ class LibraryManager {
|
||||
if (checkExt != ext) continue;
|
||||
const fuzzyValue = fuzzy(basename, game.mName);
|
||||
options.push({
|
||||
filename,
|
||||
filename: filename.replaceAll(" ", "\\ "),
|
||||
platform,
|
||||
match: fuzzyValue,
|
||||
});
|
||||
@@ -243,24 +247,10 @@ class LibraryManager {
|
||||
|
||||
async importVersion(
|
||||
gameId: string,
|
||||
versionName: string,
|
||||
metadata: {
|
||||
platform: string;
|
||||
onlySetup: boolean;
|
||||
|
||||
setup: string;
|
||||
setupArgs: string;
|
||||
launch: string;
|
||||
launchArgs: string;
|
||||
delta: boolean;
|
||||
|
||||
umuId: string;
|
||||
},
|
||||
versionPath: string,
|
||||
metadata: typeof ImportVersion.infer,
|
||||
) {
|
||||
const taskId = createVersionImportTaskId(gameId, versionName);
|
||||
|
||||
const platform = parsePlatform(metadata.platform);
|
||||
if (!platform) return undefined;
|
||||
const taskKey = createVersionImportTaskKey(gameId, versionPath);
|
||||
|
||||
const game = await prisma.game.findUnique({
|
||||
where: { id: gameId },
|
||||
@@ -271,17 +261,17 @@ class LibraryManager {
|
||||
const library = this.libraries.get(game.libraryId);
|
||||
if (!library) return undefined;
|
||||
|
||||
taskHandler.create({
|
||||
id: taskId,
|
||||
return await taskHandler.create({
|
||||
key: taskKey,
|
||||
taskGroup: "import:game",
|
||||
name: `Importing version ${versionName} for ${game.mName}`,
|
||||
name: `Importing version ${versionPath} for ${game.mName}`,
|
||||
acls: ["system:import:version:read"],
|
||||
async run({ progress, logger }) {
|
||||
// First, create the manifest via droplet.
|
||||
// This takes up 90% of our progress, so we wrap it in a *0.9
|
||||
const manifest = await library.generateDropletManifest(
|
||||
game.libraryPath,
|
||||
versionName,
|
||||
versionPath,
|
||||
(err, value) => {
|
||||
if (err) throw err;
|
||||
progress(value * 0.9);
|
||||
@@ -299,59 +289,64 @@ class LibraryManager {
|
||||
});
|
||||
|
||||
// Then, create the database object
|
||||
if (metadata.onlySetup) {
|
||||
await prisma.gameVersion.create({
|
||||
data: {
|
||||
gameId: gameId,
|
||||
versionName: versionName,
|
||||
dropletManifest: manifest,
|
||||
versionIndex: currentIndex,
|
||||
delta: metadata.delta,
|
||||
umuIdOverride: metadata.umuId,
|
||||
platform: platform,
|
||||
|
||||
onlySetup: true,
|
||||
setupCommand: metadata.setup,
|
||||
setupArgs: metadata.setupArgs.split(" "),
|
||||
await prisma.gameVersion.create({
|
||||
data: {
|
||||
game: {
|
||||
connect: {
|
||||
id: gameId,
|
||||
},
|
||||
},
|
||||
});
|
||||
} else {
|
||||
await prisma.gameVersion.create({
|
||||
data: {
|
||||
gameId: gameId,
|
||||
versionName: versionName,
|
||||
dropletManifest: manifest,
|
||||
versionIndex: currentIndex,
|
||||
delta: metadata.delta,
|
||||
umuIdOverride: metadata.umuId,
|
||||
platform: platform,
|
||||
|
||||
onlySetup: false,
|
||||
setupCommand: metadata.setup,
|
||||
setupArgs: metadata.setupArgs.split(" "),
|
||||
launchCommand: metadata.launch,
|
||||
launchArgs: metadata.launchArgs.split(" "),
|
||||
displayName: metadata.displayName ?? null,
|
||||
|
||||
versionPath,
|
||||
dropletManifest: manifest,
|
||||
versionIndex: currentIndex,
|
||||
delta: metadata.delta,
|
||||
|
||||
onlySetup: metadata.onlySetup,
|
||||
setups: {
|
||||
createMany: {
|
||||
data: metadata.setups.map((v) => ({
|
||||
command: v.launch,
|
||||
platform: v.platform,
|
||||
})),
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
launches: {
|
||||
createMany: !metadata.onlySetup
|
||||
? {
|
||||
data: metadata.launches.map((v) => ({
|
||||
name: v.name,
|
||||
command: v.launch,
|
||||
platform: v.platform,
|
||||
...(v.executorId
|
||||
? { executorId: v.executorId }
|
||||
: undefined),
|
||||
})),
|
||||
}
|
||||
: { data: [] },
|
||||
},
|
||||
},
|
||||
});
|
||||
logger.info("Successfully created version!");
|
||||
|
||||
notificationSystem.systemPush({
|
||||
nonce: `version-create-${gameId}-${versionName}`,
|
||||
title: `'${game.mName}' ('${versionName}') finished importing.`,
|
||||
description: `Drop finished importing version ${versionName} for ${game.mName}.`,
|
||||
nonce: `version-create-${gameId}-${versionPath}`,
|
||||
title: `'${game.mName}' ('${versionPath}') finished importing.`,
|
||||
description: `Drop finished importing version ${versionPath} for ${game.mName}.`,
|
||||
actions: [`View|/admin/library/${gameId}`],
|
||||
acls: ["system:import:version:read"],
|
||||
});
|
||||
|
||||
await libraryManager.cacheCombinedGameSize(gameId);
|
||||
await libraryManager.cacheGameVersionSize(gameId, versionName);
|
||||
await libraryManager.cacheGameVersionSize(gameId, versionPath);
|
||||
|
||||
await TORRENTIAL_SERVICE.utils().invalidate(gameId, versionPath);
|
||||
progress(100);
|
||||
},
|
||||
});
|
||||
|
||||
return taskId;
|
||||
}
|
||||
|
||||
async peekFile(
|
||||
@@ -381,7 +376,7 @@ class LibraryManager {
|
||||
await prisma.gameVersion.deleteMany({
|
||||
where: {
|
||||
gameId: gameId,
|
||||
versionName: version,
|
||||
versionId: version,
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
27
server/internal/library/manifest.ts
Normal file
27
server/internal/library/manifest.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import type { JsonValue } from "@prisma/client/runtime/library";
|
||||
|
||||
export type Manifest = V2Manifest;
|
||||
|
||||
export type V2Manifest = {
|
||||
version: "2";
|
||||
size: number;
|
||||
key: number[];
|
||||
chunks: { [key: string]: V2ChunkData[] };
|
||||
};
|
||||
|
||||
export type V2ChunkData = {
|
||||
files: Array<V2FileEntry>;
|
||||
checksum: string;
|
||||
iv: number[];
|
||||
};
|
||||
|
||||
export type V2FileEntry = {
|
||||
filename: string;
|
||||
start: number;
|
||||
length: number;
|
||||
permissions: number;
|
||||
};
|
||||
|
||||
export function castManifest(manifest: JsonValue): Manifest {
|
||||
return JSON.parse(manifest as string) as Manifest;
|
||||
}
|
||||
@@ -7,15 +7,18 @@ import {
|
||||
import { LibraryBackend } from "~/prisma/client/enums";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import droplet, { DropletHandler } from "@drop-oss/droplet";
|
||||
import droplet, {
|
||||
hasBackendForPath,
|
||||
listFiles,
|
||||
peekFile,
|
||||
readFile,
|
||||
} from "@drop-oss/droplet";
|
||||
import { fsStats } from "~/server/internal/utils/files";
|
||||
|
||||
export const FilesystemProviderConfig = type({
|
||||
baseDir: "string",
|
||||
});
|
||||
|
||||
export const DROPLET_HANDLER = new DropletHandler();
|
||||
|
||||
export class FilesystemProvider
|
||||
implements LibraryProvider<typeof FilesystemProviderConfig.infer>
|
||||
{
|
||||
@@ -64,7 +67,7 @@ export class FilesystemProvider
|
||||
const validVersionDirs = versionDirs.filter((e) => {
|
||||
if (ignoredVersions && ignoredVersions.includes(e)) return false;
|
||||
const fullDir = path.join(this.config.baseDir, game, e);
|
||||
return DROPLET_HANDLER.hasBackendForPath(fullDir);
|
||||
return hasBackendForPath(fullDir);
|
||||
});
|
||||
return validVersionDirs;
|
||||
}
|
||||
@@ -72,7 +75,7 @@ export class FilesystemProvider
|
||||
async versionReaddir(game: string, version: string): Promise<string[]> {
|
||||
const versionDir = path.join(this.config.baseDir, game, version);
|
||||
if (!fs.existsSync(versionDir)) throw new VersionNotFoundError();
|
||||
return DROPLET_HANDLER.listFiles(versionDir);
|
||||
return await listFiles(versionDir);
|
||||
}
|
||||
|
||||
async generateDropletManifest(
|
||||
@@ -83,25 +86,14 @@ export class FilesystemProvider
|
||||
): Promise<string> {
|
||||
const versionDir = path.join(this.config.baseDir, game, version);
|
||||
if (!fs.existsSync(versionDir)) throw new VersionNotFoundError();
|
||||
const manifest = await new Promise<string>((r, j) =>
|
||||
droplet.generateManifest(
|
||||
DROPLET_HANDLER,
|
||||
versionDir,
|
||||
progress,
|
||||
log,
|
||||
(err, result) => {
|
||||
if (err) return j(err);
|
||||
r(result);
|
||||
},
|
||||
),
|
||||
);
|
||||
const manifest = await droplet.generateManifest(versionDir, progress, log);
|
||||
return manifest;
|
||||
}
|
||||
|
||||
async peekFile(game: string, version: string, filename: string) {
|
||||
const filepath = path.join(this.config.baseDir, game, version);
|
||||
if (!fs.existsSync(filepath)) return undefined;
|
||||
const stat = DROPLET_HANDLER.peekFile(filepath, filename);
|
||||
const stat = await peekFile(filepath, filename);
|
||||
return { size: Number(stat) };
|
||||
}
|
||||
|
||||
@@ -113,7 +105,7 @@ export class FilesystemProvider
|
||||
) {
|
||||
const filepath = path.join(this.config.baseDir, game, version);
|
||||
if (!fs.existsSync(filepath)) return undefined;
|
||||
const stream = DROPLET_HANDLER.readFile(
|
||||
const stream = await readFile(
|
||||
filepath,
|
||||
filename,
|
||||
options?.start ? BigInt(options.start) : undefined,
|
||||
|
||||
@@ -4,8 +4,12 @@ import { VersionNotFoundError } from "../provider";
|
||||
import { LibraryBackend } from "~/prisma/client/enums";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import droplet from "@drop-oss/droplet";
|
||||
import { DROPLET_HANDLER } from "./filesystem";
|
||||
import droplet, {
|
||||
hasBackendForPath,
|
||||
listFiles,
|
||||
peekFile,
|
||||
readFile,
|
||||
} from "@drop-oss/droplet";
|
||||
import { fsStats } from "~/server/internal/utils/files";
|
||||
|
||||
export const FlatFilesystemProviderConfig = type({
|
||||
@@ -48,7 +52,7 @@ export class FlatFilesystemProvider
|
||||
const versionDirs = fs.readdirSync(this.config.baseDir);
|
||||
const validVersionDirs = versionDirs.filter((e) => {
|
||||
const fullDir = path.join(this.config.baseDir, e);
|
||||
return DROPLET_HANDLER.hasBackendForPath(fullDir);
|
||||
return hasBackendForPath(fullDir);
|
||||
});
|
||||
return validVersionDirs;
|
||||
}
|
||||
@@ -65,7 +69,7 @@ export class FlatFilesystemProvider
|
||||
async versionReaddir(game: string, _version: string) {
|
||||
const versionDir = path.join(this.config.baseDir, game);
|
||||
if (!fs.existsSync(versionDir)) throw new VersionNotFoundError();
|
||||
return DROPLET_HANDLER.listFiles(versionDir);
|
||||
return await listFiles(versionDir);
|
||||
}
|
||||
|
||||
async generateDropletManifest(
|
||||
@@ -76,24 +80,13 @@ export class FlatFilesystemProvider
|
||||
) {
|
||||
const versionDir = path.join(this.config.baseDir, game);
|
||||
if (!fs.existsSync(versionDir)) throw new VersionNotFoundError();
|
||||
const manifest = await new Promise<string>((r, j) =>
|
||||
droplet.generateManifest(
|
||||
DROPLET_HANDLER,
|
||||
versionDir,
|
||||
progress,
|
||||
log,
|
||||
(err, result) => {
|
||||
if (err) return j(err);
|
||||
r(result);
|
||||
},
|
||||
),
|
||||
);
|
||||
const manifest = await droplet.generateManifest(versionDir, progress, log);
|
||||
return manifest;
|
||||
}
|
||||
async peekFile(game: string, _version: string, filename: string) {
|
||||
const filepath = path.join(this.config.baseDir, game);
|
||||
if (!fs.existsSync(filepath)) return undefined;
|
||||
const stat = DROPLET_HANDLER.peekFile(filepath, filename);
|
||||
const stat = await peekFile(filepath, filename);
|
||||
return { size: Number(stat) };
|
||||
}
|
||||
async readFile(
|
||||
@@ -104,7 +97,7 @@ export class FlatFilesystemProvider
|
||||
) {
|
||||
const filepath = path.join(this.config.baseDir, game);
|
||||
if (!fs.existsSync(filepath)) return undefined;
|
||||
const stream = DROPLET_HANDLER.readFile(
|
||||
const stream = await readFile(
|
||||
filepath,
|
||||
filename,
|
||||
options?.start ? BigInt(options.start) : undefined,
|
||||
|
||||
@@ -10,10 +10,10 @@ import type {
|
||||
CompanyMetadata,
|
||||
GameMetadataRating,
|
||||
} from "./types";
|
||||
import axios, { type AxiosRequestConfig } from "axios";
|
||||
import TurndownService from "turndown";
|
||||
import { DateTime } from "luxon";
|
||||
import type { TaskRunContext } from "../tasks";
|
||||
import type { NitroFetchOptions, NitroFetchRequest } from "nitropack";
|
||||
|
||||
interface GiantBombResponseType<T> {
|
||||
error: "OK" | string;
|
||||
@@ -120,7 +120,7 @@ export class GiantBombProvider implements MetadataProvider {
|
||||
resource: string,
|
||||
url: string,
|
||||
query: { [key: string]: string },
|
||||
options?: AxiosRequestConfig,
|
||||
options?: NitroFetchOptions<NitroFetchRequest, "post">,
|
||||
) {
|
||||
const queryString = new URLSearchParams({
|
||||
...query,
|
||||
@@ -130,13 +130,7 @@ export class GiantBombProvider implements MetadataProvider {
|
||||
|
||||
const finalURL = `https://www.giantbomb.com/api/${resource}/${url}?${queryString}`;
|
||||
|
||||
const overlay: AxiosRequestConfig = {
|
||||
url: finalURL,
|
||||
baseURL: "",
|
||||
};
|
||||
const response = await axios.request<GiantBombResponseType<T>>(
|
||||
Object.assign({}, options, overlay),
|
||||
);
|
||||
const response = await $fetch<GiantBombResponseType<T>>(finalURL, options);
|
||||
return response;
|
||||
}
|
||||
|
||||
@@ -152,7 +146,7 @@ export class GiantBombProvider implements MetadataProvider {
|
||||
query: query,
|
||||
resources: ["game"].join(","),
|
||||
});
|
||||
const mapped = results.data.results.map((result) => {
|
||||
const mapped = results.results.map((result) => {
|
||||
const date =
|
||||
(result.original_release_date
|
||||
? DateTime.fromISO(result.original_release_date).year
|
||||
@@ -172,13 +166,13 @@ export class GiantBombProvider implements MetadataProvider {
|
||||
return mapped;
|
||||
}
|
||||
async fetchGame(
|
||||
{ id, publisher, developer, createObject }: _FetchGameMetadataParams,
|
||||
{ id, company, createObject }: _FetchGameMetadataParams,
|
||||
context?: TaskRunContext,
|
||||
): Promise<GameMetadata> {
|
||||
context?.logger.info("Using GiantBomb provider");
|
||||
|
||||
const result = await this.request<GameResult>("game", id, {});
|
||||
const gameData = result.data.results;
|
||||
const gameData = result.results;
|
||||
|
||||
const longDescription = gameData.description
|
||||
? this.turndown.turndown(gameData.description)
|
||||
@@ -189,7 +183,7 @@ export class GiantBombProvider implements MetadataProvider {
|
||||
for (const pub of gameData.publishers) {
|
||||
context?.logger.info(`Importing publisher "${pub.name}"`);
|
||||
|
||||
const res = await publisher(pub.name);
|
||||
const res = await company(pub.name);
|
||||
if (res === undefined) {
|
||||
context?.logger.warn(`Failed to import publisher "${pub.name}"`);
|
||||
continue;
|
||||
@@ -206,7 +200,7 @@ export class GiantBombProvider implements MetadataProvider {
|
||||
for (const dev of gameData.developers) {
|
||||
context?.logger.info(`Importing developer "${dev.name}"`);
|
||||
|
||||
const res = await developer(dev.name);
|
||||
const res = await company(dev.name);
|
||||
if (res === undefined) {
|
||||
context?.logger.warn(`Failed to import developer "${dev.name}"`);
|
||||
continue;
|
||||
@@ -244,8 +238,8 @@ export class GiantBombProvider implements MetadataProvider {
|
||||
metadataSource: MetadataSource.GiantBomb,
|
||||
metadataId: reviewId,
|
||||
mReviewCount: 1,
|
||||
mReviewRating: review.data.results.score / 5,
|
||||
mReviewHref: review.data.results.site_detail_url,
|
||||
mReviewRating: review.results.score / 5,
|
||||
mReviewHref: review.results.site_detail_url,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -289,8 +283,7 @@ export class GiantBombProvider implements MetadataProvider {
|
||||
|
||||
// Find the right entry
|
||||
const company =
|
||||
results.data.results.find((e) => e.name == query) ??
|
||||
results.data.results.at(0);
|
||||
results.results.find((e) => e.name == query) ?? results.results.at(0);
|
||||
if (!company) return undefined;
|
||||
|
||||
const longDescription = company.description
|
||||
|
||||
@@ -9,12 +9,11 @@ import type {
|
||||
_FetchCompanyMetadataParams,
|
||||
CompanyMetadata,
|
||||
} from "./types";
|
||||
import type { AxiosRequestConfig } from "axios";
|
||||
import axios from "axios";
|
||||
import { DateTime } from "luxon";
|
||||
import * as jdenticon from "jdenticon";
|
||||
import type { TaskRunContext } from "../tasks";
|
||||
import { logger } from "~/server/internal/logging";
|
||||
import type { NitroFetchOptions, NitroFetchRequest } from "nitropack";
|
||||
|
||||
type IGDBID = number;
|
||||
|
||||
@@ -171,20 +170,16 @@ export class IGDBProvider implements MetadataProvider {
|
||||
grant_type: "client_credentials",
|
||||
});
|
||||
|
||||
const response = await axios.request<TwitchAuthResponse>({
|
||||
url: `https://id.twitch.tv/oauth2/token?${params.toString()}`,
|
||||
baseURL: "",
|
||||
method: "POST",
|
||||
});
|
||||
const response = await $fetch<TwitchAuthResponse>(
|
||||
`https://id.twitch.tv/oauth2/token?${params.toString()}`,
|
||||
{
|
||||
method: "POST",
|
||||
},
|
||||
);
|
||||
|
||||
if (response.status !== 200)
|
||||
throw new Error(
|
||||
`Error in IGDB \nStatus Code: ${response.status}\n${response.data}`,
|
||||
);
|
||||
|
||||
this.accessToken = response.data.access_token;
|
||||
this.accessToken = response.access_token;
|
||||
this.accessTokenExpiry = DateTime.now().plus({
|
||||
seconds: response.data.expires_in,
|
||||
seconds: response.expires_in,
|
||||
});
|
||||
|
||||
logger.info("IGDB done authorizing with twitch");
|
||||
@@ -202,7 +197,7 @@ export class IGDBProvider implements MetadataProvider {
|
||||
private async request<T extends object>(
|
||||
resource: string,
|
||||
body: string,
|
||||
options?: AxiosRequestConfig,
|
||||
options?: NitroFetchOptions<NitroFetchRequest, "post">,
|
||||
) {
|
||||
await this.refreshCredentials();
|
||||
|
||||
@@ -214,11 +209,10 @@ export class IGDBProvider implements MetadataProvider {
|
||||
|
||||
const finalURL = `https://api.igdb.com/v4/${resource}`;
|
||||
|
||||
const overlay: AxiosRequestConfig = {
|
||||
url: finalURL,
|
||||
const overlay: NitroFetchOptions<NitroFetchRequest, "post"> = {
|
||||
baseURL: "",
|
||||
method: "POST",
|
||||
data: body,
|
||||
body,
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
"Client-ID": this.clientId,
|
||||
@@ -226,24 +220,13 @@ export class IGDBProvider implements MetadataProvider {
|
||||
"content-type": "text/plain",
|
||||
},
|
||||
};
|
||||
const response = await axios.request<T[] | IGDBErrorResponse[]>(
|
||||
const response = await $fetch<T[] | IGDBErrorResponse[]>(
|
||||
finalURL,
|
||||
Object.assign({}, options, overlay),
|
||||
);
|
||||
|
||||
if (response.status !== 200) {
|
||||
let cause = "";
|
||||
|
||||
response.data.forEach((item) => {
|
||||
if ("cause" in item) cause = item.cause;
|
||||
});
|
||||
|
||||
throw new Error(
|
||||
`Error in igdb \nStatus Code: ${response.status} \nCause: ${cause}`,
|
||||
);
|
||||
}
|
||||
|
||||
// should not have an error object if the status code is 200
|
||||
return <T[]>response.data;
|
||||
return <T[]>response;
|
||||
}
|
||||
|
||||
private async _getMediaInternal(
|
||||
@@ -356,7 +339,7 @@ export class IGDBProvider implements MetadataProvider {
|
||||
return results;
|
||||
}
|
||||
async fetchGame(
|
||||
{ id, publisher, developer, createObject }: _FetchGameMetadataParams,
|
||||
{ id, company, createObject }: _FetchGameMetadataParams,
|
||||
context?: TaskRunContext,
|
||||
): Promise<GameMetadata> {
|
||||
const body = `where id = ${id}; fields *;`;
|
||||
@@ -416,34 +399,28 @@ export class IGDBProvider implements MetadataProvider {
|
||||
{ name: string } & IGDBItem
|
||||
>("companies", `where id = ${foundInvolved.company}; fields name;`);
|
||||
|
||||
for (const company of findCompanyResponse) {
|
||||
for (const companyData of findCompanyResponse) {
|
||||
context?.logger.info(
|
||||
`Found involved company "${company.name}" as: ${foundInvolved.developer ? "developer, " : ""}${foundInvolved.publisher ? "publisher" : ""}`,
|
||||
);
|
||||
|
||||
const res = await company(companyData.name);
|
||||
if (res === undefined) {
|
||||
context?.logger.warn(
|
||||
`Failed to import company "${companyData.name}"`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
// if company was a dev or publisher
|
||||
// CANNOT use else since a company can be both
|
||||
if (foundInvolved.developer) {
|
||||
const res = await developer(company.name);
|
||||
if (res === undefined) {
|
||||
context?.logger.warn(
|
||||
`Failed to import developer "${company.name}"`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
context?.logger.info(`Imported developer "${company.name}"`);
|
||||
context?.logger.info(`Imported developer "${companyData.name}"`);
|
||||
developers.push(res);
|
||||
}
|
||||
|
||||
if (foundInvolved.publisher) {
|
||||
const res = await publisher(company.name);
|
||||
if (res === undefined) {
|
||||
context?.logger.warn(
|
||||
`Failed to import publisher "${company.name}"`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
context?.logger.info(`Imported publisher "${company.name}"`);
|
||||
context?.logger.info(`Imported publisher "${companyData.name}"`);
|
||||
publishers.push(res);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -191,10 +191,10 @@ export class MetadataHandler {
|
||||
|
||||
const gameId = randomUUID();
|
||||
|
||||
const taskId = createGameImportTaskId(libraryId, libraryPath);
|
||||
await taskHandler.create({
|
||||
const key = createGameImportTaskId(libraryId, libraryPath);
|
||||
return await taskHandler.create({
|
||||
name: `Import game "${result.name}" (${libraryPath})`,
|
||||
id: taskId,
|
||||
key,
|
||||
taskGroup: "import:game",
|
||||
acls: ["system:import:game:read"],
|
||||
async run(context) {
|
||||
@@ -213,6 +213,11 @@ export class MetadataHandler {
|
||||
}),
|
||||
);
|
||||
|
||||
const companyLookupCache: {
|
||||
[key: string]: Awaited<
|
||||
ReturnType<typeof metadataHandler.fetchCompany>
|
||||
>;
|
||||
} = {};
|
||||
let metadata: GameMetadata | undefined = undefined;
|
||||
try {
|
||||
metadata = await provider.fetchGame(
|
||||
@@ -220,8 +225,13 @@ export class MetadataHandler {
|
||||
id: result.id,
|
||||
name: result.name,
|
||||
// wrap in anonymous functions to keep references to this
|
||||
publisher: (name: string) => metadataHandler.fetchCompany(name),
|
||||
developer: (name: string) => metadataHandler.fetchCompany(name),
|
||||
company: async (name: string) => {
|
||||
if (companyLookupCache[name]) return companyLookupCache[name];
|
||||
|
||||
const companyData = await metadataHandler.fetchCompany(name);
|
||||
companyLookupCache[name] = companyData;
|
||||
return companyData;
|
||||
},
|
||||
createObject,
|
||||
},
|
||||
wrapTaskContext(context, {
|
||||
@@ -281,10 +291,10 @@ export class MetadataHandler {
|
||||
|
||||
logger.info(`Finished game import.`);
|
||||
progress(100);
|
||||
|
||||
context.addAction(`View Game:/admin/library/${gameId}`);
|
||||
},
|
||||
});
|
||||
|
||||
return taskId;
|
||||
}
|
||||
|
||||
// Careful with this function, it has no typechecking
|
||||
|
||||
@@ -9,14 +9,13 @@ import type {
|
||||
CompanyMetadata,
|
||||
GameMetadataRating,
|
||||
} from "./types";
|
||||
import type { AxiosRequestConfig } from "axios";
|
||||
import axios from "axios";
|
||||
import * as jdenticon from "jdenticon";
|
||||
import { DateTime } from "luxon";
|
||||
import * as cheerio from "cheerio";
|
||||
import { type } from "arktype";
|
||||
import type { TaskRunContext } from "../tasks";
|
||||
import { logger } from "~/server/internal/logging";
|
||||
import type { NitroFetchOptions, NitroFetchRequest } from "nitropack";
|
||||
|
||||
interface PCGamingWikiParseRawPage {
|
||||
parse: {
|
||||
@@ -104,35 +103,24 @@ export class PCGamingWikiProvider implements MetadataProvider {
|
||||
|
||||
private async request<T>(
|
||||
query: URLSearchParams,
|
||||
options?: AxiosRequestConfig,
|
||||
options?: NitroFetchOptions<NitroFetchRequest, "get" | "post">,
|
||||
) {
|
||||
const finalURL = `https://www.pcgamingwiki.com/w/api.php?${query.toString()}`;
|
||||
|
||||
const overlay: AxiosRequestConfig = {
|
||||
url: finalURL,
|
||||
baseURL: "",
|
||||
};
|
||||
const response = await axios.request<T>(
|
||||
Object.assign({}, options, overlay),
|
||||
);
|
||||
|
||||
if (response.status !== 200)
|
||||
throw new Error(
|
||||
`Error in pcgamingwiki \nStatus Code: ${response.status}\n${response.data}`,
|
||||
);
|
||||
const response = await $fetch<T>(finalURL, options);
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
private async cargoQuery<T>(
|
||||
query: URLSearchParams,
|
||||
options?: AxiosRequestConfig,
|
||||
options?: NitroFetchOptions<NitroFetchRequest, "get" | "post">,
|
||||
) {
|
||||
const response = await this.request<PCGamingWikiCargoResult<T>>(
|
||||
query,
|
||||
options,
|
||||
);
|
||||
if (response.data.error !== undefined)
|
||||
if (response.error !== undefined)
|
||||
throw new Error(`Error in pcgamingwiki cargo query`);
|
||||
return response;
|
||||
}
|
||||
@@ -150,7 +138,7 @@ export class PCGamingWikiProvider implements MetadataProvider {
|
||||
pageid: pageID,
|
||||
});
|
||||
const res = await this.request<PCGamingWikiParseRawPage>(searchParams);
|
||||
const $ = cheerio.load(res.data.parse.text["*"]);
|
||||
const $ = cheerio.load(res.parse.text["*"]);
|
||||
// get intro based on 'introduction' class
|
||||
const introductionEle = $(".introduction").first();
|
||||
// remove citations from intro
|
||||
@@ -281,7 +269,7 @@ export class PCGamingWikiProvider implements MetadataProvider {
|
||||
await this.cargoQuery<PCGamingWikiSearchStub>(searchParams);
|
||||
|
||||
const results: GameMetadataSearchResult[] = [];
|
||||
for (const result of response.data.cargoquery) {
|
||||
for (const result of response.cargoquery) {
|
||||
const game = result.title;
|
||||
const pageContent = await this.getPageContent(game.PageID);
|
||||
|
||||
@@ -372,7 +360,7 @@ export class PCGamingWikiProvider implements MetadataProvider {
|
||||
}
|
||||
|
||||
async fetchGame(
|
||||
{ id, name, publisher, developer, createObject }: _FetchGameMetadataParams,
|
||||
{ id, name, company, createObject }: _FetchGameMetadataParams,
|
||||
context?: TaskRunContext,
|
||||
): Promise<GameMetadata> {
|
||||
context?.logger.info("Using PCGamingWiki provider");
|
||||
@@ -391,10 +379,10 @@ export class PCGamingWikiProvider implements MetadataProvider {
|
||||
this.cargoQuery<PCGamingWikiGame>(searchParams),
|
||||
this.getPageContent(id),
|
||||
]);
|
||||
if (res.data.cargoquery.length < 1)
|
||||
if (res.cargoquery.length < 1)
|
||||
throw new Error("Error in pcgamingwiki, no game");
|
||||
|
||||
const game = res.data.cargoquery[0].title;
|
||||
const game = res.cargoquery[0].title;
|
||||
|
||||
const publishers: CompanyModel[] = [];
|
||||
if (game.Publishers !== null) {
|
||||
@@ -403,7 +391,7 @@ export class PCGamingWikiProvider implements MetadataProvider {
|
||||
for (const pub of pubListClean) {
|
||||
context?.logger.info(`Importing publisher "${pub}"...`);
|
||||
|
||||
const res = await publisher(pub);
|
||||
const res = await company(pub);
|
||||
if (res === undefined) {
|
||||
context?.logger.warn(`Failed to import publisher "${pub}"`);
|
||||
continue;
|
||||
@@ -422,7 +410,7 @@ export class PCGamingWikiProvider implements MetadataProvider {
|
||||
const devListClean = this.parseWikiStringArray(game.Developers);
|
||||
for (const dev of devListClean) {
|
||||
context?.logger.info(`Importing developer "${dev}"...`);
|
||||
const res = await developer(dev);
|
||||
const res = await company(dev);
|
||||
if (res === undefined) {
|
||||
context?.logger.warn(`Failed to import developer "${dev}"`);
|
||||
continue;
|
||||
@@ -487,8 +475,8 @@ export class PCGamingWikiProvider implements MetadataProvider {
|
||||
// TODO: replace with company logo
|
||||
const icon = createObject(jdenticon.toPng(query, 512));
|
||||
|
||||
for (let i = 0; i < res.data.cargoquery.length; i++) {
|
||||
const company = res.data.cargoquery[i].title;
|
||||
for (let i = 0; i < res.cargoquery.length; i++) {
|
||||
const company = res.cargoquery[i].title;
|
||||
|
||||
const fixedCompanyName =
|
||||
this.parseWikiStringArray(company.PageName)[0] ?? company.PageName;
|
||||
|
||||
@@ -9,8 +9,8 @@ import type {
|
||||
GameMetadataRating,
|
||||
} from "./types";
|
||||
import type { TaskRunContext } from "../tasks";
|
||||
import axios from "axios";
|
||||
import * as jdenticon from "jdenticon";
|
||||
import { load } from "cheerio";
|
||||
|
||||
/**
|
||||
* Note: The Steam API is largely undocumented.
|
||||
@@ -188,19 +188,15 @@ export class SteamProvider implements MetadataProvider {
|
||||
}
|
||||
|
||||
async search(query: string): Promise<GameMetadataSearchResult[]> {
|
||||
const response = await axios.get<SteamSearchStub[]>(
|
||||
const response = await $fetch<SteamSearchStub[]>(
|
||||
`https://steamcommunity.com/actions/SearchApps/${query}`,
|
||||
);
|
||||
|
||||
if (
|
||||
response.status !== 200 ||
|
||||
!response.data ||
|
||||
response.data.length === 0
|
||||
) {
|
||||
if (!response || response.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const result: GameMetadataSearchResult[] = response.data.map((item) => ({
|
||||
const result: GameMetadataSearchResult[] = response.map((item) => ({
|
||||
id: item.appid,
|
||||
name: item.name,
|
||||
icon: item.icon || "",
|
||||
@@ -208,7 +204,7 @@ export class SteamProvider implements MetadataProvider {
|
||||
year: 0,
|
||||
}));
|
||||
|
||||
const ids = response.data.map((i) => i.appid);
|
||||
const ids = response.map((i) => i.appid);
|
||||
|
||||
const detailsResponse = await this._fetchGameDetails(ids, {
|
||||
include_basic_info: true,
|
||||
@@ -235,7 +231,7 @@ export class SteamProvider implements MetadataProvider {
|
||||
}
|
||||
|
||||
async fetchGame(
|
||||
{ id, publisher, developer, createObject }: _FetchGameMetadataParams,
|
||||
{ id, company, createObject }: _FetchGameMetadataParams,
|
||||
context?: TaskRunContext,
|
||||
): Promise<GameMetadata> {
|
||||
context?.logger.info(`Starting Steam metadata fetch for game ID: ${id}`);
|
||||
@@ -294,38 +290,66 @@ export class SteamProvider implements MetadataProvider {
|
||||
context?.progress(70);
|
||||
|
||||
context?.logger.info("Processing publishers and developers...");
|
||||
const storePage = await $fetch<string>(
|
||||
`https://store.steampowered.com/app/${id}/`,
|
||||
);
|
||||
const $ = load(storePage);
|
||||
|
||||
const companyLinks = $("a")
|
||||
.toArray()
|
||||
.filter(
|
||||
(v) =>
|
||||
v.attribs["href"]?.startsWith(
|
||||
"https://store.steampowered.com/developer/",
|
||||
) ||
|
||||
v.attribs["href"]?.startsWith(
|
||||
"https://store.steampowered.com/publisher/",
|
||||
),
|
||||
)
|
||||
.map((v) => v.attribs.href);
|
||||
|
||||
const companies: {
|
||||
[key: string]: {
|
||||
pub: boolean;
|
||||
dev: boolean;
|
||||
};
|
||||
} = {};
|
||||
|
||||
companyLinks.forEach((v) => {
|
||||
const [type, name] = v
|
||||
.substring("https://store.steampowered.com/".length, v.indexOf("?"))
|
||||
.split("/");
|
||||
|
||||
companies[name] ??= { pub: false, dev: false };
|
||||
switch (type) {
|
||||
case "publisher":
|
||||
companies[name].pub = true;
|
||||
break;
|
||||
case "developer":
|
||||
companies[name].dev = true;
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
const publishers = [];
|
||||
const publisherNames = currentGame.basic_info.publishers || [];
|
||||
context?.logger.info(
|
||||
`Found ${publisherNames.length} publisher(s) to process`,
|
||||
);
|
||||
|
||||
for (const pub of publisherNames) {
|
||||
context?.logger.info(`Processing publisher: "${pub.name}"`);
|
||||
const comp = await publisher(pub.name);
|
||||
if (!comp) {
|
||||
context?.logger.warn(`Failed to import publisher "${pub.name}"`);
|
||||
continue;
|
||||
}
|
||||
publishers.push(comp);
|
||||
context?.logger.info(`Successfully imported publisher: "${pub.name}"`);
|
||||
}
|
||||
|
||||
const developers = [];
|
||||
const developerNames = currentGame.basic_info.developers || [];
|
||||
context?.logger.info(
|
||||
`Found ${developerNames.length} developer(s) to process`,
|
||||
);
|
||||
|
||||
for (const dev of developerNames) {
|
||||
context?.logger.info(`Processing developer: "${dev.name}"`);
|
||||
const comp = await developer(dev.name);
|
||||
if (!comp) {
|
||||
context?.logger.warn(`Failed to import developer "${dev.name}"`);
|
||||
continue;
|
||||
for (const [companyName, types] of Object.entries(companies)) {
|
||||
context?.logger.info(`Processing company: "${companyName}"`);
|
||||
const comp = await company(companyName);
|
||||
|
||||
if (types.dev) {
|
||||
developers.push(comp);
|
||||
context?.logger.info(
|
||||
`Successfully imported developer: "${companyName}"`,
|
||||
);
|
||||
}
|
||||
if (types.pub) {
|
||||
publishers.push(comp);
|
||||
context?.logger.info(
|
||||
`Successfully imported publisher: "${companyName}"`,
|
||||
);
|
||||
}
|
||||
developers.push(comp);
|
||||
context?.logger.info(`Successfully imported developer: "${dev.name}"`);
|
||||
}
|
||||
|
||||
context?.logger.info(
|
||||
@@ -425,23 +449,19 @@ export class SteamProvider implements MetadataProvider {
|
||||
l: "english",
|
||||
});
|
||||
|
||||
const response = await axios.get(
|
||||
`https://store.steampowered.com/developer/${query.replaceAll(" ", "")}/?${searchParams.toString()}`,
|
||||
{
|
||||
maxRedirects: 0,
|
||||
},
|
||||
);
|
||||
const url = `https://store.steampowered.com/developer/${encodeURIComponent(query)}/?${searchParams.toString()}`;
|
||||
const response = await $fetch<string>(url);
|
||||
|
||||
if (response.status !== 200 || !response.data) {
|
||||
if (!response) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const html = response.data;
|
||||
const html = response;
|
||||
|
||||
// Extract metadata from HTML meta tags
|
||||
const metadata = this._extractMetaTagsFromHtml(html);
|
||||
|
||||
if (!metadata.title) {
|
||||
if (!metadata.title || metadata.title == "Steam Search") {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -623,14 +643,12 @@ export class SteamProvider implements MetadataProvider {
|
||||
}),
|
||||
});
|
||||
|
||||
const request = await axios.get<SteamAppDetailsPackage>(
|
||||
const request = await $fetch<SteamAppDetailsPackage>(
|
||||
`https://api.steampowered.com/IStoreBrowseService/GetItems/v1/?${searchParams.toString()}`,
|
||||
);
|
||||
|
||||
if (request.status !== 200) return [];
|
||||
|
||||
const result = [];
|
||||
const storeItems = request.data?.response?.store_items ?? [];
|
||||
const storeItems = request.response?.store_items ?? [];
|
||||
|
||||
for (const item of storeItems) {
|
||||
if (item.success !== 1) continue;
|
||||
@@ -723,14 +741,14 @@ export class SteamProvider implements MetadataProvider {
|
||||
language,
|
||||
});
|
||||
|
||||
const request = await axios.get<SteamTagsPackage>(
|
||||
const request = await $fetch<SteamTagsPackage>(
|
||||
`https://api.steampowered.com/IStoreService/GetTagList/v1/?${searchParams.toString()}`,
|
||||
);
|
||||
|
||||
if (request.status !== 200 || !request.data.response?.tags) return [];
|
||||
if (!request.response?.tags) return [];
|
||||
|
||||
const tagMap = new Map<number, string>();
|
||||
for (const tag of request.data.response.tags) {
|
||||
for (const tag of request.response.tags) {
|
||||
tagMap.set(tag.tagid, tag.name);
|
||||
}
|
||||
|
||||
@@ -756,15 +774,11 @@ export class SteamProvider implements MetadataProvider {
|
||||
l: language,
|
||||
});
|
||||
|
||||
const request = await axios.get<SteamWebAppDetailsPackage>(
|
||||
const request = await $fetch<SteamWebAppDetailsPackage>(
|
||||
`https://store.steampowered.com/api/appdetails?${searchParams.toString()}`,
|
||||
);
|
||||
|
||||
if (request.status !== 200) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const appData = request.data[appid]?.data;
|
||||
const appData = request[appid]?.data;
|
||||
if (!appData) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
3
server/internal/metadata/types.d.ts
vendored
3
server/internal/metadata/types.d.ts
vendored
@@ -65,8 +65,7 @@ export interface _FetchGameMetadataParams {
|
||||
id: string;
|
||||
name: string;
|
||||
|
||||
publisher: (query: string) => Promise<Company | undefined>;
|
||||
developer: (query: string) => Promise<Company | undefined>;
|
||||
company: (query: string) => Promise<Company | undefined>;
|
||||
|
||||
createObject: (data: TransactionDataType) => ObjectReference;
|
||||
}
|
||||
|
||||
@@ -117,10 +117,12 @@ class NewsManager {
|
||||
image?: string;
|
||||
},
|
||||
) {
|
||||
return await prisma.article.update({
|
||||
where: { id },
|
||||
data,
|
||||
});
|
||||
return (
|
||||
await prisma.article.updateManyAndReturn({
|
||||
where: { id },
|
||||
data,
|
||||
})
|
||||
).at(0);
|
||||
}
|
||||
|
||||
async delete(id: string) {
|
||||
|
||||
@@ -68,13 +68,11 @@ class SaveManager {
|
||||
});
|
||||
}
|
||||
|
||||
const newSave = await prisma.saveSlot.update({
|
||||
const newSaves = await prisma.saveSlot.updateManyAndReturn({
|
||||
where: {
|
||||
id: {
|
||||
userId,
|
||||
gameId,
|
||||
index,
|
||||
},
|
||||
userId,
|
||||
gameId,
|
||||
index,
|
||||
},
|
||||
data: {
|
||||
historyObjectIds: {
|
||||
@@ -86,6 +84,9 @@ class SaveManager {
|
||||
...(clientId && { lastUsedClientId: clientId }),
|
||||
},
|
||||
});
|
||||
const newSave = newSaves.at(0);
|
||||
if (!newSave)
|
||||
throw createError({ statusCode: 404, message: "Save not found" });
|
||||
|
||||
const historyLimit = await applicationSettings.get("saveSlotHistoryLimit");
|
||||
if (newSave.historyObjectIds.length > historyLimit) {
|
||||
@@ -101,19 +102,20 @@ class SaveManager {
|
||||
await this.deleteObjectFromSave(gameId, userId, index, objectId);
|
||||
}
|
||||
|
||||
await prisma.saveSlot.update({
|
||||
const { count } = await prisma.saveSlot.updateMany({
|
||||
where: {
|
||||
id: {
|
||||
userId,
|
||||
gameId,
|
||||
index,
|
||||
},
|
||||
userId,
|
||||
gameId,
|
||||
index,
|
||||
},
|
||||
data: {
|
||||
historyObjectIds: toKeepObjects,
|
||||
historyChecksums: toKeepHashes,
|
||||
},
|
||||
});
|
||||
if (count == 0) {
|
||||
throw createError({ statusCode: 404, message: "Save not found" });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
162
server/internal/services/index.ts
Normal file
162
server/internal/services/index.ts
Normal file
@@ -0,0 +1,162 @@
|
||||
import type { ChildProcess } from "child_process";
|
||||
import { logger } from "../logging";
|
||||
import type { Logger } from "pino";
|
||||
|
||||
class ServiceManager {
|
||||
private services: Map<string, Service<unknown>> = new Map();
|
||||
|
||||
register(name: string, service: Service<unknown>) {
|
||||
this.services.set(name, service);
|
||||
}
|
||||
|
||||
spin() {
|
||||
for (const service of this.services.values()) {
|
||||
service.spin();
|
||||
}
|
||||
}
|
||||
|
||||
kill() {
|
||||
for (const service of this.services.values()) {
|
||||
service.kill();
|
||||
}
|
||||
}
|
||||
|
||||
healthchecks() {
|
||||
return this.services
|
||||
.entries()
|
||||
.map(([name, service]) => ({ name, healthy: service.serviceHealthy() }))
|
||||
.toArray();
|
||||
}
|
||||
}
|
||||
|
||||
export type Executor = () => ChildProcess;
|
||||
export type Setup = () => Promise<boolean>;
|
||||
export type Healthcheck = () => Promise<boolean>;
|
||||
export class Service<T> {
|
||||
name: string;
|
||||
private executor: Executor;
|
||||
private setup: Setup | undefined;
|
||||
private healthcheck: Healthcheck | undefined;
|
||||
|
||||
private logger: Logger<never>;
|
||||
|
||||
private currentProcess: ChildProcess | undefined;
|
||||
|
||||
private runningHealthcheck: boolean = false;
|
||||
private healthy: boolean = true;
|
||||
private spun: boolean = false;
|
||||
|
||||
private uutils: T;
|
||||
|
||||
constructor(
|
||||
name: string,
|
||||
executor: Executor,
|
||||
setup?: Setup,
|
||||
healthcheck?: Healthcheck,
|
||||
utils?: T,
|
||||
) {
|
||||
this.name = name;
|
||||
const serviceLogger = logger.child({ name: `service-${name}` });
|
||||
this.logger = serviceLogger;
|
||||
this.executor = executor;
|
||||
this.setup = setup;
|
||||
this.healthcheck = healthcheck;
|
||||
this.uutils = utils!;
|
||||
}
|
||||
|
||||
spin() {
|
||||
if (this.spun) return;
|
||||
this.launch();
|
||||
|
||||
if (this.healthcheck) {
|
||||
setInterval(this.runHealthcheck, 1000 * 60 * 5); // Every 5 minutes
|
||||
}
|
||||
|
||||
this.spun = true;
|
||||
}
|
||||
|
||||
kill() {
|
||||
this.spun = false;
|
||||
this.currentProcess?.kill();
|
||||
}
|
||||
|
||||
register() {
|
||||
serviceManager.register(this.name, this);
|
||||
}
|
||||
|
||||
private async launch() {
|
||||
if (this.currentProcess) return;
|
||||
const disableEnv = `EXTERNAL_SERVICE_${this.name.toUpperCase()}`;
|
||||
if (!process.env[disableEnv]) {
|
||||
const serviceProcess = this.executor();
|
||||
this.logger.info("service launched");
|
||||
serviceProcess.on("close", async (code, signal) => {
|
||||
serviceProcess.kill();
|
||||
this.currentProcess = undefined;
|
||||
this.logger.warn(
|
||||
`service exited with code ${code} (${signal}), restarting...`,
|
||||
);
|
||||
await new Promise((r) => setTimeout(r, 5000));
|
||||
if (this.spun) this.launch();
|
||||
});
|
||||
serviceProcess.stdout?.on("data", (data) =>
|
||||
this.logger.info(data.toString().trim()),
|
||||
);
|
||||
serviceProcess.stderr?.on("data", (data) =>
|
||||
this.logger.error(data.toString().trim()),
|
||||
);
|
||||
this.currentProcess = serviceProcess;
|
||||
}
|
||||
|
||||
if (this.setup) {
|
||||
while (true) {
|
||||
try {
|
||||
const hasSetup = await this.setup();
|
||||
if (hasSetup) break;
|
||||
throw "setup function returned false...";
|
||||
} catch (e) {
|
||||
this.logger.warn(`failed setup, trying again... | ${e}`);
|
||||
await new Promise((r) => setTimeout(r, 7000));
|
||||
}
|
||||
}
|
||||
this.healthy = true;
|
||||
}
|
||||
}
|
||||
|
||||
private async runHealthcheck() {
|
||||
if (!this.healthcheck || !this.currentProcess || this.runningHealthcheck)
|
||||
return;
|
||||
this.runningHealthcheck = true;
|
||||
let fails = 0;
|
||||
|
||||
while (true) {
|
||||
try {
|
||||
const successful = await this.healthcheck();
|
||||
if (successful) break;
|
||||
} finally {
|
||||
/* empty */
|
||||
}
|
||||
this.healthy = false;
|
||||
fails++;
|
||||
if (fails >= 5) {
|
||||
this.currentProcess.kill();
|
||||
this.runningHealthcheck = false;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
this.healthy = true;
|
||||
this.runningHealthcheck = false;
|
||||
}
|
||||
|
||||
serviceHealthy() {
|
||||
return this.healthy;
|
||||
}
|
||||
|
||||
utils() {
|
||||
return this.uutils;
|
||||
}
|
||||
}
|
||||
|
||||
export const serviceManager = new ServiceManager();
|
||||
export default serviceManager;
|
||||
22
server/internal/services/services/nginx.ts
Normal file
22
server/internal/services/services/nginx.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { spawn } from "child_process";
|
||||
import { Service } from "..";
|
||||
import { systemConfig } from "../../config/sys-conf";
|
||||
import path from "path";
|
||||
import fs from "fs";
|
||||
|
||||
export const NGINX_SERVICE = new Service(
|
||||
"nginx",
|
||||
() => {
|
||||
const nginxConfig = path.resolve(
|
||||
process.env.NGINX_CONFIG ?? "./build/nginx.conf",
|
||||
);
|
||||
const nginxPrefix = path.join(systemConfig.getDataFolder(), "nginx");
|
||||
fs.mkdirSync(nginxPrefix, { recursive: true });
|
||||
|
||||
return spawn("nginx", ["-c", nginxConfig, "-p", nginxPrefix]);
|
||||
},
|
||||
undefined,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
async () => await $fetch(`http://127.0.0.1:8080/`),
|
||||
);
|
||||
62
server/internal/services/services/torrential.ts
Normal file
62
server/internal/services/services/torrential.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
import { spawn } from "child_process";
|
||||
import { Service } from "..";
|
||||
import fs from "fs";
|
||||
import prisma from "../../db/database";
|
||||
import { logger } from "../../logging";
|
||||
import { systemConfig } from "../../config/sys-conf";
|
||||
|
||||
const INTERNAL_DEPOT_URL = new URL(
|
||||
process.env.INTERNAL_DEPOT_URL ?? "http://localhost:5000",
|
||||
);
|
||||
|
||||
export const TORRENTIAL_SERVICE = new Service(
|
||||
"torrential",
|
||||
() => {
|
||||
const localDir = fs.readdirSync(".");
|
||||
if ("torrential" in localDir) return spawn("./torrential", [], {});
|
||||
|
||||
const envPath = process.env.TORRENTIAL_PATH;
|
||||
if (envPath) return spawn(envPath, [], {});
|
||||
|
||||
return spawn("torrential", [], {});
|
||||
},
|
||||
async () => {
|
||||
const externalUrl = systemConfig.getExternalUrl();
|
||||
const depot = await prisma.depot.upsert({
|
||||
where: {
|
||||
id: "torrential",
|
||||
},
|
||||
update: {
|
||||
endpoint: `${externalUrl}/api/v1/depot`,
|
||||
},
|
||||
create: {
|
||||
id: "torrential",
|
||||
endpoint: `${externalUrl}/api/v1/depot`,
|
||||
},
|
||||
});
|
||||
|
||||
await $fetch(`${INTERNAL_DEPOT_URL.toString()}key`, {
|
||||
method: "POST",
|
||||
body: { key: depot.key },
|
||||
});
|
||||
return true;
|
||||
},
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
async () => await $fetch(`${INTERNAL_DEPOT_URL.toString()}healthcheck`),
|
||||
{
|
||||
async invalidate(gameId: string, versionId: string) {
|
||||
try {
|
||||
await $fetch(`${INTERNAL_DEPOT_URL.toString()}invalidate`, {
|
||||
method: "POST",
|
||||
body: {
|
||||
game: gameId,
|
||||
version: versionId,
|
||||
},
|
||||
});
|
||||
} catch (e) {
|
||||
logger.warn("invalidate torrential cache failed with error: " + e);
|
||||
}
|
||||
},
|
||||
},
|
||||
);
|
||||
@@ -16,9 +16,17 @@ export default function createDBSessionHandler(): SessionProvider {
|
||||
},
|
||||
create: {
|
||||
token,
|
||||
...session,
|
||||
...(session.authenticated?.userId
|
||||
? { userId: session.authenticated?.userId }
|
||||
: undefined),
|
||||
expiresAt: session.expiresAt,
|
||||
data: session as object,
|
||||
},
|
||||
|
||||
update: {
|
||||
expiresAt: session.expiresAt,
|
||||
data: session as object,
|
||||
},
|
||||
update: session,
|
||||
});
|
||||
return true;
|
||||
},
|
||||
@@ -39,7 +47,7 @@ export default function createDBSessionHandler(): SessionProvider {
|
||||
// i hate casting
|
||||
// need to cast to unknown since result.data can be an N deep json object technically
|
||||
// ts doesn't like that be cast down to the more constraining session type
|
||||
return result as unknown as T;
|
||||
return result.data as unknown as T;
|
||||
},
|
||||
async removeSession(token) {
|
||||
await cache.remove(token);
|
||||
|
||||
@@ -6,6 +6,7 @@ import type { MinimumRequestObject } from "~/server/h3";
|
||||
import type { DurationLike } from "luxon";
|
||||
import { DateTime } from "luxon";
|
||||
import createDBSessionHandler from "./db";
|
||||
import prisma from "../db/database";
|
||||
|
||||
/*
|
||||
This implementation may need work.
|
||||
@@ -13,6 +14,9 @@ This implementation may need work.
|
||||
It exposes an API that should stay static, but there are plenty of opportunities for optimisation/organisation under the hood
|
||||
*/
|
||||
|
||||
// 10 minutes
|
||||
const SUPERLEVEL_LENGTH = 10 * 60 * 1000;
|
||||
|
||||
const dropTokenCookieName = "drop-token";
|
||||
const normalSessionLength: DurationLike = {
|
||||
days: 31,
|
||||
@@ -21,6 +25,8 @@ const extendedSessionLength: DurationLike = {
|
||||
year: 1,
|
||||
};
|
||||
|
||||
type SigninResult = ["signin", "2fa", "fail"][number];
|
||||
|
||||
export class SessionHandler {
|
||||
private sessionProvider: SessionProvider;
|
||||
|
||||
@@ -31,14 +37,53 @@ export class SessionHandler {
|
||||
// this.sessionProvider = createMemorySessionProvider();
|
||||
}
|
||||
|
||||
async signin(h3: H3Event, userId: string, rememberMe: boolean = false) {
|
||||
async signin(
|
||||
h3: H3Event,
|
||||
userId: string,
|
||||
rememberMe: boolean = false,
|
||||
): Promise<SigninResult> {
|
||||
const mfaCount = await prisma.linkedMFAMec.count({
|
||||
where: { userId, enabled: true },
|
||||
});
|
||||
|
||||
const expiresAt = this.createExipreAt(rememberMe);
|
||||
const token = this.createSessionCookie(h3, expiresAt);
|
||||
return await this.sessionProvider.setSession(token, {
|
||||
userId,
|
||||
|
||||
const token =
|
||||
this.getSessionToken(h3) ?? this.createSessionCookie(h3, expiresAt);
|
||||
const session = (await this.sessionProvider.getSession(token)) ?? {
|
||||
expiresAt,
|
||||
data: {},
|
||||
});
|
||||
};
|
||||
const wasAuthenticated = !!session.authenticated;
|
||||
session.authenticated = {
|
||||
userId,
|
||||
level: session.authenticated?.level ?? 10,
|
||||
requiredLevel: mfaCount > 0 ? 20 : 10,
|
||||
superleveledExpiry: undefined,
|
||||
};
|
||||
if (
|
||||
!wasAuthenticated &&
|
||||
session.authenticated.level >= session.authenticated.requiredLevel
|
||||
)
|
||||
session.authenticated.superleveledExpiry = Date.now() + SUPERLEVEL_LENGTH;
|
||||
const success = await this.sessionProvider.setSession(token, session);
|
||||
if (!success) return "fail";
|
||||
|
||||
if (session.authenticated.level < session.authenticated.requiredLevel)
|
||||
return "2fa";
|
||||
return "signin";
|
||||
}
|
||||
|
||||
async mfa(h3: H3Event, amount: number) {
|
||||
const token = this.getSessionToken(h3);
|
||||
if (!token)
|
||||
throw createError({ statusCode: 403, message: "User not signed in" });
|
||||
const session = await this.sessionProvider.getSession(token);
|
||||
if (!session || !session.authenticated)
|
||||
throw createError({ statusCode: 403, message: "User not signed in" });
|
||||
|
||||
session.authenticated.level += amount;
|
||||
await this.sessionProvider.setSession(token, session);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -48,12 +93,54 @@ export class SessionHandler {
|
||||
async getSession<T extends Session>(request: MinimumRequestObject) {
|
||||
const token = this.getSessionToken(request);
|
||||
if (!token) return undefined;
|
||||
// TODO: should validate if session is expired or not here, not in application code
|
||||
|
||||
const data = await this.sessionProvider.getSession<T>(token);
|
||||
if (!data) return undefined;
|
||||
if (new Date(data.expiresAt).getTime() < Date.now()) return undefined; // Expired
|
||||
return data;
|
||||
}
|
||||
|
||||
async getSessionDataKey<T>(
|
||||
request: MinimumRequestObject,
|
||||
key: string,
|
||||
): Promise<T | undefined> {
|
||||
const token = this.getSessionToken(request);
|
||||
if (!token) return undefined;
|
||||
|
||||
const session = await this.sessionProvider.getSession(token);
|
||||
if (!session) return undefined;
|
||||
return session.data[key] as T;
|
||||
}
|
||||
|
||||
async setSessionDataKey<T>(request: H3Event, key: string, value: T) {
|
||||
const expiresAt = this.createExipreAt(true);
|
||||
|
||||
const token =
|
||||
this.getSessionToken(request) ??
|
||||
this.createSessionCookie(request, expiresAt);
|
||||
|
||||
const session = (await this.sessionProvider.getSession(token)) ?? {
|
||||
expiresAt,
|
||||
data: {},
|
||||
};
|
||||
console.log(session);
|
||||
session.data[key] = value;
|
||||
await this.sessionProvider.setSession(token, session);
|
||||
return true;
|
||||
}
|
||||
|
||||
async deleteSessionDataKey(request: MinimumRequestObject, key: string) {
|
||||
const token = this.getSessionToken(request);
|
||||
if (!token) return false;
|
||||
|
||||
const session = await this.sessionProvider.getSession(token);
|
||||
if (!session) return false;
|
||||
// eslint-disable-next-line @typescript-eslint/no-dynamic-delete
|
||||
delete session.data[key];
|
||||
await this.sessionProvider.setSession(token, session);
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Signout session associated with request and deauthenticates it
|
||||
* @param request
|
||||
|
||||
10
server/internal/session/types.d.ts
vendored
10
server/internal/session/types.d.ts
vendored
@@ -1,5 +1,6 @@
|
||||
export type Session = {
|
||||
userId: string;
|
||||
authenticated?: AuthenticatedSession;
|
||||
|
||||
expiresAt: Date;
|
||||
data: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
@@ -7,6 +8,13 @@ export type Session = {
|
||||
};
|
||||
};
|
||||
|
||||
export interface AuthenticatedSession {
|
||||
userId: string;
|
||||
level: number;
|
||||
requiredLevel: number;
|
||||
superleveledExpiry: number | undefined;
|
||||
}
|
||||
|
||||
export interface SessionProvider {
|
||||
getSession: <T extends Session>(token: string) => Promise<T | undefined>;
|
||||
setSession: (token: string, data: Session) => Promise<boolean>;
|
||||
|
||||
@@ -14,15 +14,19 @@ import pino from "pino";
|
||||
import { logger } from "~/server/internal/logging";
|
||||
import { Writable } from "node:stream";
|
||||
|
||||
type TaskActionLink = `${string}:${string}`;
|
||||
|
||||
// a task that has been run
|
||||
type FinishedTask = {
|
||||
success: boolean;
|
||||
progress: number;
|
||||
key: string | undefined;
|
||||
log: string[];
|
||||
error: { title: string; description: string } | undefined;
|
||||
name: string;
|
||||
taskGroup: TaskGroup;
|
||||
acls: string[];
|
||||
actions: TaskActionLink[];
|
||||
|
||||
// ISO timestamp of when the task started
|
||||
startTime: string;
|
||||
@@ -53,7 +57,6 @@ class TaskHandler {
|
||||
"cleanup:invitations",
|
||||
"cleanup:sessions",
|
||||
"check:update",
|
||||
"debug",
|
||||
];
|
||||
private weeklyScheduledTasks: TaskGroup[] = ["cleanup:objects"];
|
||||
|
||||
@@ -74,8 +77,12 @@ class TaskHandler {
|
||||
this.taskCreators.set(task.taskGroup, task.build);
|
||||
}
|
||||
|
||||
async create(task: Task) {
|
||||
if (this.hasTask(task.id)) throw new Error("Task with ID already exists.");
|
||||
async create(iTask: Omit<Task, "id">) {
|
||||
const task: Task = { ...iTask, id: crypto.randomUUID() };
|
||||
if (this.hasTaskID(task.id))
|
||||
throw new Error("Task with ID already exists.");
|
||||
if (task.key && this.hasTaskKey(task.key))
|
||||
throw new Error("Task with key already exists");
|
||||
|
||||
let updateCollectTimeout: NodeJS.Timeout | undefined;
|
||||
let updateCollectResolves: Array<(value: unknown) => void> = [];
|
||||
@@ -115,6 +122,7 @@ class TaskHandler {
|
||||
error: taskEntry.error,
|
||||
log: taskEntry.log.slice(logOffset),
|
||||
reset,
|
||||
actions: taskEntry.actions,
|
||||
};
|
||||
logOffset = taskEntry.log.length;
|
||||
|
||||
@@ -189,6 +197,7 @@ class TaskHandler {
|
||||
|
||||
this.taskPool.set(task.id, {
|
||||
name: task.name,
|
||||
key: task.key,
|
||||
taskGroup: task.taskGroup,
|
||||
success: false,
|
||||
progress: 0,
|
||||
@@ -198,6 +207,7 @@ class TaskHandler {
|
||||
acls: task.acls,
|
||||
startTime: new Date().toISOString(),
|
||||
endTime: undefined,
|
||||
actions: task.initialActions ?? [],
|
||||
});
|
||||
|
||||
await updateAllClients(true);
|
||||
@@ -205,9 +215,13 @@ class TaskHandler {
|
||||
droplet.callAltThreadFunc(async () => {
|
||||
const taskEntry = this.taskPool.get(task.id);
|
||||
if (!taskEntry) throw new Error("No task entry");
|
||||
const addAction = (action: TaskActionLink) => {
|
||||
taskEntry.actions.push(action);
|
||||
updateAllClients();
|
||||
};
|
||||
|
||||
try {
|
||||
await task.run({ progress, logger: taskLogger });
|
||||
await task.run({ progress, logger: taskLogger, addAction });
|
||||
taskEntry.success = true;
|
||||
} catch (error: unknown) {
|
||||
taskEntry.success = false;
|
||||
@@ -239,6 +253,7 @@ class TaskHandler {
|
||||
log: taskEntry.log,
|
||||
|
||||
acls: taskEntry.acls,
|
||||
actions: taskEntry.actions,
|
||||
|
||||
...(taskEntry.error ? { error: taskEntry.error } : undefined),
|
||||
},
|
||||
@@ -246,6 +261,8 @@ class TaskHandler {
|
||||
|
||||
this.taskPool.delete(task.id);
|
||||
});
|
||||
|
||||
return task.id;
|
||||
}
|
||||
|
||||
async connect(
|
||||
@@ -290,6 +307,7 @@ class TaskHandler {
|
||||
| undefined,
|
||||
log: task.log,
|
||||
progress: task.progress,
|
||||
actions: task.actions as TaskActionLink[],
|
||||
};
|
||||
peer.send(JSON.stringify(catchupMessage));
|
||||
}
|
||||
@@ -336,10 +354,16 @@ class TaskHandler {
|
||||
.toArray();
|
||||
}
|
||||
|
||||
hasTask(id: string) {
|
||||
hasTaskID(id: string) {
|
||||
return this.taskPool.has(id);
|
||||
}
|
||||
|
||||
hasTaskKey(key: string) {
|
||||
return (
|
||||
this.taskPool.values().find((v) => v.key && v.key == key) != undefined
|
||||
);
|
||||
}
|
||||
|
||||
dailyTasks() {
|
||||
return this.dailyScheduledTasks;
|
||||
}
|
||||
@@ -355,8 +379,8 @@ class TaskHandler {
|
||||
return;
|
||||
}
|
||||
const task = taskConstructor();
|
||||
await this.create(task);
|
||||
return task.id;
|
||||
const id = await this.create(task);
|
||||
return id;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -415,6 +439,7 @@ class TaskHandler {
|
||||
export type TaskRunContext = {
|
||||
progress: (progress: number) => void;
|
||||
logger: typeof logger;
|
||||
addAction: (link: TaskActionLink) => void;
|
||||
};
|
||||
|
||||
export function wrapTaskContext(
|
||||
@@ -426,6 +451,7 @@ export function wrapTaskContext(
|
||||
});
|
||||
|
||||
return {
|
||||
...context,
|
||||
progress(progress) {
|
||||
if (progress > 100 || progress < 0) {
|
||||
logger.warn("[wrapTaskContext] progress must be between 0 and 100");
|
||||
@@ -444,10 +470,12 @@ export function wrapTaskContext(
|
||||
|
||||
export interface Task {
|
||||
id: string;
|
||||
key?: string;
|
||||
taskGroup: TaskGroup;
|
||||
name: string;
|
||||
run: (context: TaskRunContext) => Promise<void>;
|
||||
acls: GlobalACL[];
|
||||
initialActions?: TaskActionLink[];
|
||||
}
|
||||
|
||||
export type TaskMessage = {
|
||||
@@ -458,6 +486,7 @@ export type TaskMessage = {
|
||||
error: null | undefined | { title: string; description: string };
|
||||
log: string[];
|
||||
reset?: boolean;
|
||||
actions: TaskActionLink[];
|
||||
};
|
||||
|
||||
export type PeerImpl = {
|
||||
@@ -471,6 +500,7 @@ export interface BuildTask {
|
||||
name: string;
|
||||
run: (context: TaskRunContext) => Promise<void>;
|
||||
acls: GlobalACL[];
|
||||
initialActions?: TaskActionLink[];
|
||||
}
|
||||
|
||||
interface DropTask {
|
||||
@@ -519,6 +549,7 @@ export function defineDropTask(buildTask: BuildTask): DropTask {
|
||||
name: buildTask.name,
|
||||
run: buildTask.run,
|
||||
acls: buildTask.acls,
|
||||
initialActions: buildTask.initialActions ?? [],
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ type FieldReferenceMap = {
|
||||
};
|
||||
|
||||
export default defineDropTask({
|
||||
buildId: () => `cleanup:objects:${new Date().toISOString()}`,
|
||||
buildId: () => `cleanup:objects:${Date.now()}`,
|
||||
name: "Cleanup Objects",
|
||||
acls: ["system:maintenance:read"],
|
||||
taskGroup: "cleanup:objects",
|
||||
|
||||
Reference in New Issue
Block a user