Compare commits

...

38 Commits

Author SHA1 Message Date
Bas950
8b68bf85c8 chore: release v0.0.10 2024-09-13 17:27:44 +02:00
Bas950
e4c794a9ad chore: 202 on disabled flag 2024-09-13 17:27:38 +02:00
Bas950
6e8258d76f chore: release v0.0.21 2024-09-13 15:08:16 +02:00
Bas950
56b796c621 chore: use ky 2024-09-13 15:08:08 +02:00
Bas950
0de59c48b4 chore: release v0.0.20 2024-09-13 14:37:31 +02:00
Bas950
60056e069d chore: update log 2024-09-13 14:37:24 +02:00
Bas950
b6bad90919 chore: release v0.0.9 2024-09-13 14:33:34 +02:00
Bas950
ee21bb9dec chore: release v0.0.20 2024-09-13 14:31:39 +02:00
Bas950
6efac4fef1 feat: use scienceId 2024-09-13 14:31:27 +02:00
Bas950
93424793bd chore: release v0.0.19 2024-09-13 13:46:33 +02:00
Bas950
affcb6a0cf chore: add reason 2024-09-13 13:46:27 +02:00
Bas950
bb56949dfb chore: release v0.0.18 2024-09-13 13:02:31 +02:00
Bas950
c06fe04b65 chore: fix time 2024-09-13 13:02:26 +02:00
Florian Metz
ef976341ba chore: release v0.0.17 2024-09-13 12:33:19 +02:00
Florian Metz
38893891af chore: why does it not abort 2024-09-13 12:33:10 +02:00
Florian Metz
63eeeefda7 chore: release v0.0.16 2024-09-13 12:05:42 +02:00
Florian Metz
056db21cb0 chore: add p-limit dependency for session cleanup 2024-09-13 12:05:37 +02:00
Bas950
d8dc08c6c3 chore: release v0.0.15 2024-09-13 11:55:36 +02:00
Bas950
634391b6e3 chore: always return the key 2024-09-13 11:55:32 +02:00
Florian Metz
c46cf6975a chore: release v0.0.14 2024-09-13 11:52:23 +02:00
Florian Metz
68c6b4fcdc chore: add p-limit dependency for session cleanup 2024-09-13 11:52:00 +02:00
Florian Metz
55fa07d5b5 chore: release v0.0.13 2024-09-13 11:38:49 +02:00
Florian Metz
903c238b33 chore: add timeout to headless session deletion 2024-09-13 11:38:40 +02:00
Bas950
acd9afb2b1 chore: release v0.0.12 2024-09-13 11:32:55 +02:00
Bas950
4bd42390eb chore: move some code 2024-09-13 11:32:44 +02:00
Florian Metz
c014504464 chore: release v0.0.11 2024-09-13 11:00:16 +02:00
Florian Metz
24fe349b60 chore: optimize session cleanup with batch deletion 2024-09-13 10:59:13 +02:00
Bas950
ee5428ce08 chore: release v0.0.10 2024-09-13 10:38:38 +02:00
Bas950
e4b1010160 chore: skip clearOldSesssions if another in progress 2024-09-13 10:38:21 +02:00
Bas950
34c42d59ed chore: release v0.0.9 2024-09-12 15:45:16 +02:00
Bas950
d9267361aa feat: use scan 2024-09-12 15:45:10 +02:00
Bas950
0d5382fd50 chore: release v0.0.8 2024-09-12 14:49:01 +02:00
Bas950
e9015b1204 chore: iodk 2024-09-12 14:47:31 +02:00
Bas950
cea36426ab chore: idk kek 2024-09-12 14:46:13 +02:00
Bas950
48c141094e chore: release v0.0.8 2024-09-12 14:41:56 +02:00
Bas950
e67fb97e14 chore: update lockfile 2024-09-12 14:41:51 +02:00
Bas950
0bd0d759f6 chore: release v0.0.7 2024-09-12 14:38:34 +02:00
Bas950
60b7f63409 feat(api-master): add metrics 2024-09-12 14:38:10 +02:00
18 changed files with 737 additions and 744 deletions

View File

@@ -1,7 +1,7 @@
{
"name": "@premid/api-master",
"type": "module",
"version": "0.0.6",
"version": "0.0.21",
"private": true,
"description": "PreMiD's api master",
"license": "MPL-2.0",
@@ -14,12 +14,16 @@
"dev": "node --watch --env-file .env --enable-source-maps ."
},
"dependencies": {
"@discordjs/rest": "^2.3.0",
"@envelop/sentry": "^9.0.0",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/exporter-prometheus": "^0.52.1",
"@opentelemetry/node": "^0.24.0",
"@sentry/node": "^8.17.0",
"cron": "^3.1.7",
"debug": "^4.3.6",
"ioredis": "^5.3.2"
"ioredis": "^5.3.2",
"ky": "^1.7.2",
"p-limit": "^6.1.0"
},
"devDependencies": {
"@types/debug": "^4.1.12"

View File

@@ -1,46 +1,105 @@
import { REST } from "@discordjs/rest";
import pLimit from "p-limit";
import ky, { HTTPError, TimeoutError } from "ky";
import { mainLog, redis } from "../index.js";
let inProgress = false;
export async function clearOldSessions() {
const sessions = await redis.hgetall("pmd-api.sessions");
const now = Date.now();
if (Object.keys(sessions).length === 0) {
mainLog("No sessions to clear");
if (inProgress) {
mainLog("Session cleanup already in progress");
return;
}
mainLog(`Checking ${Object.keys(sessions).length} sessions`);
inProgress = true;
const now = Date.now();
let cursor = "0";
let totalSessions = 0;
let cleared = 0;
for (const [key, value] of Object.entries(sessions)) {
const session = JSON.parse(value) as {
token: string;
session: string;
lastUpdated: number;
};
const batchSize = 100;
let keysToDelete: string[] = [];
// ? If the session is younger than 30seconds, skip it
if (now - session.lastUpdated < 30000)
continue;
mainLog("Starting session cleanup");
//* Delete the session
try {
const discord = new REST({ version: "10", authPrefix: "Bearer" });
discord.setToken(session.token);
await discord.post("/users/@me/headless-sessions/delete", {
body: {
token: session.session,
},
});
}
catch (error) {
mainLog(`Failed to delete session: %O`, error);
const limit = pLimit(100); // Create a limit of 100 concurrent operations
do {
const [nextCursor, result] = await redis.hscan("pmd-api.sessions", cursor, "COUNT", batchSize);
cursor = nextCursor;
totalSessions += result.length / 2;
const deletePromises = [];
for (let i = 0; i < result.length; i += 2) {
const key = result[i];
const value = result[i + 1];
if (!key || !value) {
continue;
}
const session = JSON.parse(value) as {
token: string;
session: string;
lastUpdated: number;
};
if (now - session.lastUpdated < 30000)
continue;
deletePromises.push(limit(() => deleteSession(session, key)));
}
cleared++;
await redis.hdel("pmd-api.sessions", key);
const results = await Promise.allSettled(deletePromises);
results.forEach((result) => {
if (result.status === "fulfilled" && result.value) {
keysToDelete.push(result.value);
cleared++;
}
});
if (keysToDelete.length >= batchSize) {
await redis.hdel("pmd-api.sessions", ...keysToDelete);
keysToDelete = [];
}
} while (cursor !== "0");
if (keysToDelete.length > 0) {
await redis.hdel("pmd-api.sessions", ...keysToDelete);
}
mainLog(`Cleared ${cleared} sessions`);
if (totalSessions === 0) {
mainLog("No sessions to clear");
}
else {
mainLog(`Checked ${totalSessions} sessions, cleared ${cleared}`);
}
inProgress = false;
}
async function deleteSession(session: { token: string; session: string }, key: string): Promise<string> {
try {
await ky.post("https://discord.com/api/v10/users/@me/headless-sessions/delete", {
json: {
token: session.session,
},
headers: {
Authorization: `Bearer ${session.token}`,
},
retry: 3,
timeout: 5000,
});
}
catch (error) {
if (error instanceof TimeoutError) {
mainLog(`Session deletion aborted due to timeout for key ${key}`);
}
else if (error instanceof HTTPError) {
mainLog(`Failed to delete session for key ${key}: [${error.name}] ${error.message} ${JSON.stringify(await error.response.json())}`);
}
else {
mainLog(`Failed to delete session for key ${key}: Unknown error`);
}
}
return key;
}

View File

@@ -0,0 +1,13 @@
import { redis } from "../index.js";
import { counter } from "../tracing.js";
let activeActivities = 0;
counter.add(0);
export async function setCounter() {
const length = await redis.hlen("pmd-api.sessions");
if (length === activeActivities)
return;
const diff = length - activeActivities;
activeActivities = length;
counter.add(diff);
}

View File

@@ -3,6 +3,8 @@ import { CronJob } from "cron";
import debug from "debug";
import { clearOldSessions } from "./functions/clearOldSessions.js";
import createRedis from "./functions/createRedis.js";
import { setCounter } from "./functions/setCounter.js";
import "./tracing.js";
export const redis = createRedis();
@@ -13,9 +15,19 @@ debug("Starting cron job to clear old sessions");
void new CronJob(
// Every 5 seconds
"*/5 * * * * *",
async () => {
() => {
clearOldSessions();
},
undefined,
true,
);
void new CronJob(
// Every second
"* * * * * *",
() => {
setCounter();
},
undefined,
true,
);

View File

@@ -0,0 +1,18 @@
import { ValueType } from "@opentelemetry/api";
import { PrometheusExporter } from "@opentelemetry/exporter-prometheus";
import { MeterProvider } from "@opentelemetry/sdk-metrics";
const prometheusExporter = new PrometheusExporter();
const provider = new MeterProvider({
readers: [prometheusExporter],
});
const meter = provider.getMeter("nice");
export const counter = meter.createUpDownCounter("active_activites", {
description: "Number of active activities",
valueType: ValueType.INT,
});
prometheusExporter.startServer();

View File

@@ -1,7 +1,7 @@
{
"name": "@premid/api-worker",
"type": "module",
"version": "0.0.8",
"version": "0.0.10",
"private": true,
"description": "PreMiD's api",
"license": "MPL-2.0",

View File

@@ -0,0 +1,10 @@
import process from "node:process";
import { defu } from "defu";
const disabledFlags = process.env.DISABLED_FEATURE_FLAGS?.split(",") ?? [];
const flags = Object.fromEntries(disabledFlags.map(flag => [flag, false]));
export const featureFlags = defu(flags, {
WebSocketManager: true,
SessionKeepAlive: true,
});

View File

@@ -1,13 +1,11 @@
import { readFile } from "node:fs/promises";
import { resolve } from "node:path";
import process from "node:process";
import { useSentry } from "@envelop/sentry";
import { maxAliasesPlugin } from "@escape.tech/graphql-armor-max-aliases";
import { maxDepthPlugin } from "@escape.tech/graphql-armor-max-depth";
import { maxDirectivesPlugin } from "@escape.tech/graphql-armor-max-directives";
import { maxTokensPlugin } from "@escape.tech/graphql-armor-max-tokens";
import fastifyWebsocket from "@fastify/websocket";
import { defu } from "defu";
import fastify from "fastify";
import { createSchema, createYoga } from "graphql-yoga";
@@ -15,6 +13,7 @@ import type { FastifyReply, FastifyRequest } from "fastify";
import { Socket } from "../classes/Socket.js";
import { resolvers } from "../graphql/resolvers/v5/index.js";
import { sessionKeepAlive } from "../routes/sessionKeepAlive.js";
import { featureFlags } from "../constants.js";
import createRedis from "./createRedis.js";
export interface FastifyContext {
@@ -87,15 +86,7 @@ export default async function createServer() {
});
app.get("/v5/feature-flags", async (request, reply) => {
const disabledFlags = process.env.DISABLED_FEATURE_FLAGS?.split(",") ?? [];
const flags = Object.fromEntries(disabledFlags.map(flag => [flag, false]));
const test = defu(flags, {
WebSocketManager: true,
SessionKeepAlive: true,
});
void reply.send(test);
void reply.send(featureFlags);
});
app.post("/v5/session-keep-alive", sessionKeepAlive);

View File

@@ -1,6 +1,6 @@
import type { MutationResolvers } from "../../../../generated/graphql-v5.js";
import addScience from "./addScience.js";
import heartbeat from "./heartbeat.js";
import type { MutationResolvers } from "../../../../generated/graphql-v5.js";
export const Mutation: MutationResolvers = {
addScience,

View File

@@ -1,5 +1,5 @@
import presences from "./presences.js";
import type { QueryResolvers } from "../../../../generated/graphql-v5.js";
import presences from "./presences.js";
export const Query: QueryResolvers = {
presences,

View File

@@ -1,6 +1,6 @@
import type { Resolvers } from "../../../generated/graphql-v5.js";
import { Mutation } from "./Mutation/index.js";
import { Query } from "./Query/index.js";
import type { Resolvers } from "../../../generated/graphql-v5.js";
export const resolvers: Resolvers = {
Query,

View File

@@ -4,17 +4,25 @@ import { type } from "arktype";
import { Routes } from "discord-api-types/v10";
import type { FastifyReply, FastifyRequest } from "fastify";
import { redis } from "../functions/createServer.js";
import { featureFlags } from "../constants.js";
const schema = type({
token: "string.trim",
session: "string.trim",
version: "string.semver & string.trim",
scienceId: "string.trim",
});
export async function sessionKeepAlive(request: FastifyRequest, reply: FastifyReply) {
//* Get the 2 headers
if (!featureFlags.SessionKeepAlive)
return reply.status(202).send();
//* Get the headers
const out = schema({
token: request.headers["x-token"],
session: request.headers["x-session"],
version: request.headers["x-version"] ?? "2.6.8",
scienceId: request.headers["x-science-id"] ?? request.headers["x-token"],
});
if (out instanceof type.errors)
@@ -25,7 +33,7 @@ export async function sessionKeepAlive(request: FastifyRequest, reply: FastifyRe
await redis.hset(
"pmd-api.sessions",
out.token,
out.scienceId,
JSON.stringify({
session: out.session,
token: out.token,

View File

@@ -1,12 +1,11 @@
import process from "node:process";
import KeyvRedis from "@keyv/redis";
import Keyv from "keyv";
import type { KeyvOptions } from "keyv";
import redis from "../redis.js";
export default function createKeyv() {
let options: KeyvOptions | undefined;
let options: Keyv.Options<string> | undefined;
/* c8 ignore next 8 */
if (process.env.REDIS_SENTINELS) {
@@ -16,7 +15,7 @@ export default function createKeyv() {
};
}
const keyv = new Keyv(
const keyv = new Keyv<string>(
options,
);

View File

@@ -1,9 +1,9 @@
import { Buffer } from "node:buffer";
import { readFile } from "node:fs/promises";
import { afterAll, beforeAll, describe, it } from "vitest";
import type { RequestOptions } from "node:http";
import type { AddressInfo } from "node:net";
import { afterAll, beforeAll, describe, it } from "vitest";
import { createServer } from "../functions/createServer.js";

View File

@@ -20,7 +20,7 @@ const handler: RouteHandlerMethod = async (request, reply) => {
return reply.status(400).send("Invalid URL");
const hash = crypto.createHash("sha256").update(url).digest("hex");
const existingShortenedUrl = await keyv.get<string>(hash);
const existingShortenedUrl = await keyv.get(hash);
void reply.header("Cache-control", "public, max-age=1800");

View File

@@ -25,7 +25,7 @@ const handler: RouteHandlerMethod = async (request, reply) => {
if (id.split(".")[0]?.length !== 10)
return reply.code(404).send("Invalid ID");
const url = await keyv.get<string>(id);
const url = await keyv.get(id);
if (!url)
return reply.code(404).send("Unknown ID");

View File

@@ -1,4 +1,4 @@
import { ActivityType, flagsToBadges, PresenceUpdateStatus } from "@discord-user-card/vue";
import { ActivityType, PresenceUpdateStatus, flagsToBadges } from "@discord-user-card/vue";
import { REST } from "@discordjs/rest";
import { Routes } from "discord-api-types/v10";
import type { DiscordUserCardActivity, DiscordUserCardUser } from "@discord-user-card/vue";

1247
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff