diff --git a/Common/Server/Utils/VM/VMRunner.ts b/Common/Server/Utils/VM/VMRunner.ts index be3ee3af7b..ef2ade0c60 100644 --- a/Common/Server/Utils/VM/VMRunner.ts +++ b/Common/Server/Utils/VM/VMRunner.ts @@ -1,5 +1,5 @@ import ReturnResult from "../../../Types/IsolatedVM/ReturnResult"; -import { JSONObject, JSONValue } from "../../../Types/JSON"; +import { JSONObject } from "../../../Types/JSON"; import axios, { AxiosResponse } from "axios"; import crypto from "crypto"; import http from "http"; @@ -7,287 +7,1143 @@ import https from "https"; import ivm from "isolated-vm"; import CaptureSpan from "../Telemetry/CaptureSpan"; import Dictionary from "../../../Types/Dictionary"; -import GenericObject from "../../../Types/GenericObject"; -import vm, { Context } from "vm"; -/** - * Symbol used to retrieve the real (unwrapped) target from a sandbox proxy. - * Hidden from user code via ownKeys / has traps. - */ -const PROXY_TARGET_SYMBOL: unique symbol = Symbol("sandboxProxyTarget"); +const MAX_LOG_BYTES: number = 1_000_000; -/** Properties blocked on every host-realm object exposed to the sandbox. */ -const BLOCKED_SANDBOX_PROPERTIES: ReadonlySet = new Set([ +const BLOCKED_PLAYWRIGHT_PROPERTIES: ReadonlySet = new Set([ "constructor", "__proto__", "prototype", "mainModule", - /* - * Block Playwright methods that can spawn processes or access internals. - * Prevents RCE via browser.browserType().launch({executablePath:"/bin/sh"}) - * and traversal via page.context().browser().browserType().launch(...) - */ - "browserType", // Browser → BrowserType (which has launch/connect) - "launch", // BrowserType.launch() spawns a child process - "launchPersistentContext", // BrowserType.launchPersistentContext() spawns a child process - "connectOverCDP", // BrowserType.connectOverCDP() connects via Chrome DevTools Protocol - "newCDPSession", // BrowserContext/Page.newCDPSession() opens raw CDP sessions + "process", + "require", + "module", + "global", + "globalThis", + "browser", + "browserType", + "launch", + "launchPersistentContext", + "connectOverCDP", + "newCDPSession", ]); -/** - * Wraps a host-realm value in a Proxy that blocks prototype-chain traversal. - * Primitives and null/undefined pass through unchanged. - * Object proxies are cached to preserve identity; function proxies are created - * per-access so they bind to the correct `this` (parent object). - */ -function createSandboxProxy( +const BRIDGE_MARKER_KEY: string = "__oneuptimeBridgeType"; +const BRIDGE_REF_TYPE: string = "ref"; +const BRIDGE_CALLABLE_REF_TYPE: string = "callable-ref"; +const BRIDGE_PROMISE_TYPE: string = "promise"; +const BRIDGE_BUFFER_TYPE: string = "buffer"; + +interface SerializedBridgeError { + message: string; + stack?: string | undefined; +} + +interface SerializedBridgeEnvelope { + ok: boolean; + value?: unknown; + error?: SerializedBridgeError | undefined; +} + +function isPlainObject( value: unknown, - cache: WeakMap, - parentObj?: GenericObject, -): unknown { +): value is Record | Record { + if (!value || typeof value !== "object") { + return false; + } + + const prototype: object | null = Object.getPrototypeOf(value); + return prototype === Object.prototype || prototype === null; +} + +function isPromiseLike(value: unknown): value is Promise { + return Boolean( + value && + (typeof value === "object" || typeof value === "function") && + typeof (value as PromiseLike).then === "function", + ); +} + +function isCopyableToIsolate(value: unknown): boolean { + if ( + value === null || + value === undefined || + typeof value === "string" || + typeof value === "number" || + typeof value === "boolean" + ) { + return true; + } + + if (Array.isArray(value)) { + return value.every((item: unknown) => { + return isCopyableToIsolate(item); + }); + } + + if (!isPlainObject(value)) { + return false; + } + + for (const key of Object.keys(value)) { + if (!isCopyableToIsolate((value as Record)[key])) { + return false; + } + } + + return true; +} + +function serializeError(error: unknown): SerializedBridgeError { + if (error instanceof Error) { + return { + message: error.message || "Unknown sandbox error", + stack: error.stack, + }; + } + + return { + message: typeof error === "string" ? error : String(error), + }; +} + +function reviveBufferMarkers(value: unknown): unknown { if (value === null || value === undefined) { return value; } - const valueType: string = typeof value; + if (Array.isArray(value)) { + return value.map((item: unknown) => { + return reviveBufferMarkers(item); + }); + } - if (valueType !== "object" && valueType !== "function") { + if (typeof value !== "object") { return value; } - const target: GenericObject = value as GenericObject; + const record: Record = value as Record; - if (valueType === "function") { - /* - * Function proxies are NOT cached because the same function may be a method - * on different parent objects and needs a different `this` binding each time. - */ - const fnProxy: unknown = new Proxy( - target as (...args: unknown[]) => unknown, - { - get( - fnTarget: (...args: unknown[]) => unknown, - prop: string | symbol, - ): unknown { - if (prop === PROXY_TARGET_SYMBOL) { - return fnTarget; - } - if ( - typeof prop === "string" && - BLOCKED_SANDBOX_PROPERTIES.has(prop) - ) { - return undefined; - } - const val: unknown = Reflect.get( - fnTarget, - prop, - fnTarget as GenericObject, - ); - return createSandboxProxy(val, cache, fnTarget as GenericObject); - }, - getPrototypeOf(): null { - return null; - }, - apply( - fnTarget: (...args: unknown[]) => unknown, - _thisArg: unknown, - args: unknown[], - ): unknown { - const thisObj: GenericObject = (parentObj || - fnTarget) as GenericObject; - try { - const result: unknown = Reflect.apply(fnTarget, thisObj, args); - if (result instanceof Promise) { - return result.then( - (v: unknown) => { - return createSandboxProxy(v, cache); - }, - (err: unknown) => { - throw createSandboxProxy(err, cache); - }, - ); - } - return createSandboxProxy(result, cache); - } catch (err: unknown) { - throw createSandboxProxy(err, cache); - } - }, - has( - fnTarget: (...args: unknown[]) => unknown, - prop: string | symbol, - ): boolean { - if ( - typeof prop === "string" && - BLOCKED_SANDBOX_PROPERTIES.has(prop) - ) { - return false; - } - return Reflect.has(fnTarget, prop); - }, - ownKeys( - fnTarget: (...args: unknown[]) => unknown, - ): (string | symbol)[] { - return Reflect.ownKeys(fnTarget).filter((k: string | symbol) => { - return !( - typeof k === "string" && BLOCKED_SANDBOX_PROPERTIES.has(k) - ); - }); - }, - getOwnPropertyDescriptor( - fnTarget: (...args: unknown[]) => unknown, - prop: string | symbol, - ): PropertyDescriptor | undefined { - if ( - typeof prop === "string" && - BLOCKED_SANDBOX_PROPERTIES.has(prop) - ) { - return undefined; - } - const desc: PropertyDescriptor | undefined = - Reflect.getOwnPropertyDescriptor(fnTarget, prop); - if (desc && "value" in desc) { - desc.value = createSandboxProxy( - desc.value, - cache, - fnTarget as GenericObject, - ); - } - return desc; - }, - }, - ); - return fnProxy; + if (record[BRIDGE_MARKER_KEY] === BRIDGE_BUFFER_TYPE) { + return Buffer.from(String(record["base64"] || ""), "base64"); } - // Object — use cache to preserve identity and handle circular references - if (cache.has(target)) { - return cache.get(target); + const revived: Record = {}; + + for (const key of Object.keys(record)) { + revived[key] = reviveBufferMarkers(record[key]); } - const objProxy: GenericObject = new Proxy(target, { - get(objTarget: GenericObject, prop: string | symbol): unknown { - if (prop === PROXY_TARGET_SYMBOL) { - return objTarget; - } - if (typeof prop === "string" && BLOCKED_SANDBOX_PROPERTIES.has(prop)) { - return undefined; - } - const val: unknown = Reflect.get(objTarget, prop, objTarget); - return createSandboxProxy(val, cache, objTarget); - }, - getPrototypeOf(): null { - return null; - }, - set( - objTarget: GenericObject, - prop: string | symbol, - newValue: unknown, - ): boolean { - return Reflect.set(objTarget, prop, newValue); - }, - has(objTarget: GenericObject, prop: string | symbol): boolean { - if (typeof prop === "string" && BLOCKED_SANDBOX_PROPERTIES.has(prop)) { - return false; - } - return Reflect.has(objTarget, prop); - }, - ownKeys(objTarget: GenericObject): (string | symbol)[] { - return Reflect.ownKeys(objTarget).filter((k: string | symbol) => { - return !(typeof k === "string" && BLOCKED_SANDBOX_PROPERTIES.has(k)); - }); - }, - getOwnPropertyDescriptor( - objTarget: GenericObject, - prop: string | symbol, - ): PropertyDescriptor | undefined { - if (typeof prop === "string" && BLOCKED_SANDBOX_PROPERTIES.has(prop)) { - return undefined; - } - const desc: PropertyDescriptor | undefined = - Reflect.getOwnPropertyDescriptor(objTarget, prop); - if (desc && "value" in desc) { - desc.value = createSandboxProxy(desc.value, cache, objTarget); - } - return desc; - }, + return revived; +} + +function buildWrappedUserCode(code: string): string { + return `(async () => { + const __result = await (async () => { + ${code} + })(); + try { return JSON.stringify(__result); } + catch(_) { return undefined; } + })()`; +} + +async function executeWrappedUserCode(data: { + context: ivm.Context; + timeout: number; + code: string; + reviveBridgeBuffers?: boolean | undefined; +}): Promise { + const wrappedCode: string = buildWrappedUserCode(data.code); + + const resultPromise: Promise = data.context.eval(wrappedCode, { + promise: true, + timeout: data.timeout, }); - cache.set(target, objProxy); - return objProxy; + const overallTimeout: Promise = new Promise( + (_resolve: (value: never) => void, reject: (reason: Error) => void) => { + global.setTimeout(() => { + reject(new Error("Script execution timed out")); + }, data.timeout + 5000); + }, + ); + + const result: unknown = await Promise.race([resultPromise, overallTimeout]); + + let returnValue: unknown; + + if (typeof result === "string") { + try { + returnValue = JSON.parse(result); + } catch { + returnValue = result; + } + } else { + returnValue = result; + } + + if (data.reviveBridgeBuffers) { + return reviveBufferMarkers(returnValue); + } + + return returnValue; } -/** - * Recursively unwraps sandbox proxies in a return value so the host code - * receives original objects (e.g. Buffers that pass `instanceof` checks). - */ -function deepUnwrapProxies( - value: unknown, - visited?: WeakSet, -): unknown { - if (value === null || value === undefined) { - return value; +async function createBaseSandbox(data: { + timeout: number; + args?: JSONObject | undefined; + logMessages: string[]; +}): Promise<{ + isolate: ivm.Isolate; + context: ivm.Context; + jail: ivm.Reference>; +}> { + const isolate: ivm.Isolate = new ivm.Isolate({ memoryLimit: 128 }); + const context: ivm.Context = await isolate.createContext(); + const jail: ivm.Reference> = context.global; + + let totalLogBytes: number = 0; + + await jail.set("global", jail.derefInto()); + + await jail.set( + "_log", + new ivm.Callback((...args: string[]) => { + const message: string = args.join(" "); + totalLogBytes += message.length; + + if (totalLogBytes <= MAX_LOG_BYTES) { + data.logMessages.push(message); + } + }), + ); + + await context.eval(` + const console = { log: (...a) => _log(...a.map(v => { + try { return typeof v === 'object' ? JSON.stringify(v) : String(v); } + catch(_) { return String(v); } + }))}; + `); + + if (data.args) { + await jail.set("_args", new ivm.ExternalCopy(data.args).copyInto()); + await context.eval("const args = _args;"); + } else { + await context.eval("const args = {};"); } - const valueType: string = typeof value; + await context.eval(` + const https = { + Agent: class Agent { + constructor(options) { + this.__agentType = '__https_agent__'; + this.options = options || {}; + } + } + }; + const http = { + Agent: class Agent { + constructor(options) { + this.__agentType = '__http_agent__'; + this.options = options || {}; + } + } + }; + `); - if (valueType !== "object" && valueType !== "function") { - return value; - } + const axiosRef: ivm.Reference< + ( + method: string, + url: string, + arg1?: string, + arg2?: string, + ) => Promise + > = new ivm.Reference( + async ( + method: string, + url: string, + arg1?: string, + arg2?: string, + ): Promise => { + const methodsWithBody: string[] = ["post", "put", "patch"]; + const hasBody: boolean = methodsWithBody.includes(method); - const obj: Record = value as Record< - string | symbol, - unknown - >; + const body: JSONObject | undefined = + hasBody && arg1 ? (JSON.parse(arg1) as JSONObject) : undefined; - // If it's one of our proxies, unwrap to the original target - try { - const underlying: unknown = obj[PROXY_TARGET_SYMBOL]; - if (underlying !== undefined) { - return underlying; + const configStr: string | undefined = hasBody ? arg2 : arg1; + const config: JSONObject | undefined = configStr + ? (JSON.parse(configStr) as JSONObject) + : undefined; + + if (config) { + const httpsAgentConfig: JSONObject | undefined = config[ + "httpsAgent" + ] as JSONObject | undefined; + + if ( + httpsAgentConfig && + httpsAgentConfig["__agentType"] === "__https_agent__" + ) { + config["httpsAgent"] = new https.Agent( + httpsAgentConfig["options"] as https.AgentOptions, + ) as unknown as JSONObject; + } + + const httpAgentConfig: JSONObject | undefined = config[ + "httpAgent" + ] as JSONObject | undefined; + + if ( + httpAgentConfig && + httpAgentConfig["__agentType"] === "__http_agent__" + ) { + config["httpAgent"] = new http.Agent( + httpAgentConfig["options"] as http.AgentOptions, + ) as unknown as JSONObject; + } + } + + const toPlainHeaders: ( + headers: unknown, + ) => Record = ( + headers: unknown, + ): Record => { + const plain: Record = {}; + + if (headers) { + for (const headerKey of Object.keys( + headers as Record, + )) { + plain[headerKey] = (headers as Record)[headerKey]; + } + } + + return plain; + }; + + try { + let response: AxiosResponse; + + switch (method) { + case "get": + response = await axios.get(url, config); + break; + case "head": + response = await axios.head(url, config); + break; + case "options": + response = await axios.options(url, config); + break; + case "post": + response = await axios.post(url, body, config); + break; + case "put": + response = await axios.put(url, body, config); + break; + case "patch": + response = await axios.patch(url, body, config); + break; + case "delete": + response = await axios.delete(url, config); + break; + case "request": + response = await axios.request( + config as Parameters[0], + ); + break; + default: + throw new Error(`Unsupported HTTP method: ${method}`); + } + + return JSON.stringify({ + status: response.status, + headers: toPlainHeaders(response.headers), + data: response.data, + }); + } catch (error: unknown) { + const axiosError: { + isAxiosError?: boolean; + response?: AxiosResponse>; + message?: string; + } = error as { + isAxiosError?: boolean; + response?: AxiosResponse; + message?: string; + }; + + if (axiosError.isAxiosError && axiosError.response) { + return JSON.stringify({ + __isAxiosError: true, + message: axiosError.message || "Request failed", + status: axiosError.response.status, + statusText: axiosError.response.statusText, + headers: toPlainHeaders(axiosError.response.headers), + data: axiosError.response.data, + }); + } + + throw error; + } + }, + ); + + await jail.set("_axiosRef", axiosRef); + + await context.eval(` + function _assertNoFunctions(obj, path) { + if (!obj || typeof obj !== 'object') return; + if (Array.isArray(obj)) { + for (let i = 0; i < obj.length; i++) { + const fullPath = path + '[' + i + ']'; + if (typeof obj[i] === 'function') { + throw new Error( + 'Functions are not supported in axios config because of security. ' + + 'Found a function at "' + fullPath + '". Please remove it or replace it with a plain value.' + ); + } + if (obj[i] && typeof obj[i] === 'object') { + _assertNoFunctions(obj[i], fullPath); + } + } + return; + } + for (const key of Object.keys(obj)) { + const fullPath = path ? path + '.' + key : key; + if (typeof obj[key] === 'function') { + throw new Error( + 'Functions are not supported in axios config because of security. ' + + 'Found a function at "' + fullPath + '". Please remove it or replace it with a plain value.' + ); + } + if (obj[key] && typeof obj[key] === 'object') { + _assertNoFunctions(obj[key], fullPath); + } + } } - } catch { - // Not a proxy or symbol access failed — treat as a plain value - } - if (!visited) { - visited = new WeakSet(); - } - - if (visited.has(obj as GenericObject)) { - return obj; - } - - visited.add(obj as GenericObject); - - if (Array.isArray(obj)) { - for (let i: number = 0; i < obj.length; i++) { - (obj as unknown[])[i] = deepUnwrapProxies((obj as unknown[])[i], visited); + function _parseAxiosResult(result) { + const parsed = JSON.parse(result); + if (parsed && parsed.__isAxiosError) { + const err = new Error(parsed.message); + err.response = { + status: parsed.status, + statusText: parsed.statusText, + headers: parsed.headers, + data: parsed.data, + }; + err.isAxiosError = true; + err.status = parsed.status; + throw err; + } + return parsed; } - } else if (valueType === "object") { - for (const key of Object.keys(obj as Record)) { - (obj as Record)[key] = deepUnwrapProxies( - (obj as Record)[key], - visited, + + function _makeAxiosInstance(defaults) { + function mergeConfig(overrides) { + if (!defaults && !overrides) return undefined; + if (!defaults) return overrides; + if (!overrides) return Object.assign({}, defaults); + const merged = Object.assign({}, defaults, overrides); + if (defaults.headers && overrides.headers) { + merged.headers = Object.assign({}, defaults.headers, overrides.headers); + } + return merged; + } + + async function _request(config) { + const merged = mergeConfig(config); + if (merged) _assertNoFunctions(merged, 'config'); + const response = await _axiosRef.applySyncPromise(undefined, ['request', '', merged ? JSON.stringify(merged) : undefined]); + return _parseAxiosResult(response); + } + + const instance = async function(urlOrConfig, config) { + if (typeof urlOrConfig === 'object') { + return _request(urlOrConfig); + } + return _request(Object.assign({}, config || {}, { url: urlOrConfig })); + }; + + instance.request = _request; + instance.get = async (url, config) => { + const merged = mergeConfig(config); + if (merged) _assertNoFunctions(merged, 'config'); + const response = await _axiosRef.applySyncPromise(undefined, ['get', url, merged ? JSON.stringify(merged) : undefined]); + return _parseAxiosResult(response); + }; + instance.head = async (url, config) => { + const merged = mergeConfig(config); + if (merged) _assertNoFunctions(merged, 'config'); + const response = await _axiosRef.applySyncPromise(undefined, ['head', url, merged ? JSON.stringify(merged) : undefined]); + return _parseAxiosResult(response); + }; + instance.options = async (url, config) => { + const merged = mergeConfig(config); + if (merged) _assertNoFunctions(merged, 'config'); + const response = await _axiosRef.applySyncPromise(undefined, ['options', url, merged ? JSON.stringify(merged) : undefined]); + return _parseAxiosResult(response); + }; + instance.post = async (url, requestData, config) => { + const merged = mergeConfig(config); + if (requestData) _assertNoFunctions(requestData, 'data'); + if (merged) _assertNoFunctions(merged, 'config'); + const response = await _axiosRef.applySyncPromise(undefined, ['post', url, requestData ? JSON.stringify(requestData) : undefined, merged ? JSON.stringify(merged) : undefined]); + return _parseAxiosResult(response); + }; + instance.put = async (url, requestData, config) => { + const merged = mergeConfig(config); + if (requestData) _assertNoFunctions(requestData, 'data'); + if (merged) _assertNoFunctions(merged, 'config'); + const response = await _axiosRef.applySyncPromise(undefined, ['put', url, requestData ? JSON.stringify(requestData) : undefined, merged ? JSON.stringify(merged) : undefined]); + return _parseAxiosResult(response); + }; + instance.patch = async (url, requestData, config) => { + const merged = mergeConfig(config); + if (requestData) _assertNoFunctions(requestData, 'data'); + if (merged) _assertNoFunctions(merged, 'config'); + const response = await _axiosRef.applySyncPromise(undefined, ['patch', url, requestData ? JSON.stringify(requestData) : undefined, merged ? JSON.stringify(merged) : undefined]); + return _parseAxiosResult(response); + }; + instance.delete = async (url, config) => { + const merged = mergeConfig(config); + if (merged) _assertNoFunctions(merged, 'config'); + const response = await _axiosRef.applySyncPromise(undefined, ['delete', url, merged ? JSON.stringify(merged) : undefined]); + return _parseAxiosResult(response); + }; + instance.create = (instanceDefaults) => { + if (instanceDefaults) _assertNoFunctions(instanceDefaults, 'defaults'); + const combinedDefaults = mergeConfig(instanceDefaults); + return _makeAxiosInstance(combinedDefaults); + }; + + return instance; + } + + const axios = _makeAxiosInstance(null); + `); + + const cryptoRef: ivm.Reference< + (operation: string, ...args: string[]) => string + > = new ivm.Reference((operation: string, ...args: string[]): string => { + switch (operation) { + case "createHash": { + const [algorithm, inputData, encoding] = args; + return crypto + .createHash(algorithm!) + .update(inputData!) + .digest((encoding as crypto.BinaryToTextEncoding) || "hex"); + } + case "createHmac": { + const [algorithm, key, inputData, encoding] = args; + return crypto + .createHmac(algorithm!, key!) + .update(inputData!) + .digest((encoding as crypto.BinaryToTextEncoding) || "hex"); + } + case "randomBytes": { + const [size] = args; + return crypto.randomBytes(parseInt(size!)).toString("hex"); + } + case "randomUUID": + return crypto.randomUUID(); + case "randomInt": { + const [min, max] = args; + return String(crypto.randomInt(parseInt(min!), parseInt(max!))); + } + default: + throw new Error(`Unsupported crypto operation: ${operation}`); + } + }); + + await jail.set("_cryptoRef", cryptoRef); + + await context.eval(` + const crypto = { + createHash: (algorithm) => ({ + _alg: algorithm, _data: '', + update(data) { this._data = data; return this; }, + digest(encoding) { return _cryptoRef.applySync(undefined, ['createHash', this._alg, this._data, encoding || 'hex']); } + }), + createHmac: (algorithm, key) => ({ + _alg: algorithm, _key: key, _data: '', + update(data) { this._data = data; return this; }, + digest(encoding) { return _cryptoRef.applySync(undefined, ['createHmac', this._alg, this._key, this._data, encoding || 'hex']); } + }), + randomBytes: (size) => ({ + toString() { return _cryptoRef.applySync(undefined, ['randomBytes', String(size)]); } + }), + randomUUID: () => { + return _cryptoRef.applySync(undefined, ['randomUUID']); + }, + randomInt: (minOrMax, max) => { + if (max === undefined) { max = minOrMax; minOrMax = 0; } + return Number(_cryptoRef.applySync(undefined, ['randomInt', String(minOrMax), String(max)])); + }, + }; + `); + + const sleepRef: ivm.Reference<(ms: number) => Promise> = + new ivm.Reference((ms: number): Promise => { + return new Promise((resolve: () => void) => { + global.setTimeout(resolve, Math.min(ms, data.timeout)); + }); + }); + + await jail.set("_sleepRef", sleepRef); + + await context.eval(` + function setTimeout(fn, ms) { + _sleepRef.applySyncPromise(undefined, [ms || 0]); + if (typeof fn === 'function') fn(); + } + async function sleep(ms) { + await _sleepRef.applySyncPromise(undefined, [ms || 0]); + } + `); + + return { + isolate, + context, + jail, + }; +} + +class PlaywrightBridge { + private readonly refs: Map = new Map(); + private readonly promises: Map> = new Map(); + private nextRefId: number = 1; + private nextPromiseId: number = 1; + + public addRoot(value: unknown): number { + return this.registerReference(value); + } + + public resolve(rootId: number, pathJson: string): string { + return this.serializeEnvelope(() => { + const path: string[] = this.parsePath(pathJson); + const { value } = this.resolvePath(rootId, path); + return this.serializeValue(value); + }); + } + + public apply(rootId: number, pathJson: string, argsJson: string): string { + return this.serializeEnvelope(() => { + const path: string[] = this.parsePath(pathJson); + const args: unknown[] = this.parseArgs(argsJson); + const { owner, value } = this.resolvePath(rootId, path); + + if (typeof value !== "function") { + throw new Error( + `Bridge target "${path.join(".") || ""}" is not callable`, + ); + } + + return this.serializeValue( + Reflect.apply( + value as (...args: unknown[]) => unknown, + owner, + args, + ), ); + }); + } + + public async awaitPromise(promiseId: number): Promise { + try { + const promise: Promise | undefined = this.promises.get(promiseId); + + if (!promise) { + throw new Error(`Unknown bridge promise id: ${promiseId}`); + } + + const result: unknown = await promise; + + return JSON.stringify({ + ok: true, + value: this.serializeValue(result), + } as SerializedBridgeEnvelope); + } catch (error: unknown) { + return JSON.stringify({ + ok: false, + error: serializeError(error), + } as SerializedBridgeEnvelope); + } finally { + this.promises.delete(promiseId); } } - return obj; + private serializeEnvelope(getValue: () => unknown): string { + try { + return JSON.stringify({ + ok: true, + value: getValue(), + } as SerializedBridgeEnvelope); + } catch (error: unknown) { + return JSON.stringify({ + ok: false, + error: serializeError(error), + } as SerializedBridgeEnvelope); + } + } + + private parsePath(pathJson: string): string[] { + const parsed: unknown = JSON.parse(pathJson || "[]"); + + if (!Array.isArray(parsed)) { + throw new Error("Invalid Playwright bridge path"); + } + + return parsed.map((segment: unknown) => { + if (typeof segment !== "string") { + throw new Error("Invalid Playwright bridge path segment"); + } + + this.assertPathSegment(segment); + return segment; + }); + } + + private parseArgs(argsJson: string): unknown[] { + const parsed: unknown = JSON.parse(argsJson || "[]"); + + if (!Array.isArray(parsed)) { + throw new Error("Invalid Playwright bridge argument payload"); + } + + return parsed.map((arg: unknown) => { + return this.reviveArgument(arg); + }); + } + + private resolvePath(rootId: number, path: string[]): { + owner: unknown; + value: unknown; + } { + let current: unknown = this.refs.get(rootId); + let owner: unknown = undefined; + + if (current === undefined) { + throw new Error(`Unknown Playwright bridge root id: ${rootId}`); + } + + for (const segment of path) { + this.assertPathSegment(segment); + + if (current === null || current === undefined) { + throw new Error(`Cannot access "${segment}" on ${String(current)}`); + } + + owner = current; + current = Reflect.get(current as object, segment); + } + + return { + owner, + value: current, + }; + } + + private assertPathSegment(segment: string): void { + if (segment.startsWith("_")) { + throw new Error(`Access to "${segment}" is not allowed in sandbox`); + } + + if (BLOCKED_PLAYWRIGHT_PROPERTIES.has(segment)) { + throw new Error(`Access to "${segment}" is blocked in sandbox`); + } + } + + private registerReference(value: unknown): number { + const id: number = this.nextRefId++; + this.refs.set(id, value); + return id; + } + + private registerPromise(value: Promise): number { + const id: number = this.nextPromiseId++; + this.promises.set(id, value); + return id; + } + + private serializeValue( + value: unknown, + visited?: WeakSet>, + ): unknown { + if ( + value === null || + value === undefined || + typeof value === "string" || + typeof value === "number" || + typeof value === "boolean" + ) { + return value; + } + + if (typeof value === "bigint") { + return value.toString(); + } + + if (Buffer.isBuffer(value)) { + return { + [BRIDGE_MARKER_KEY]: BRIDGE_BUFFER_TYPE, + base64: value.toString("base64"), + }; + } + + if (isPromiseLike(value)) { + return { + [BRIDGE_MARKER_KEY]: BRIDGE_PROMISE_TYPE, + id: this.registerPromise(Promise.resolve(value)), + }; + } + + if (value instanceof Date) { + return value.toISOString(); + } + + if (value instanceof Error) { + return { + name: value.name, + message: value.message, + stack: value.stack, + }; + } + + if (Array.isArray(value)) { + return value.map((item: unknown) => { + return this.serializeValue(item, visited); + }); + } + + if (typeof value === "function") { + return { + [BRIDGE_MARKER_KEY]: BRIDGE_CALLABLE_REF_TYPE, + id: this.registerReference(value), + }; + } + + if (isPlainObject(value)) { + const objectValue: Record = value as Record< + string, + unknown + >; + + if ( + Object.values(objectValue).some((item: unknown) => { + return typeof item === "function"; + }) + ) { + return { + [BRIDGE_MARKER_KEY]: BRIDGE_REF_TYPE, + id: this.registerReference(value), + }; + } + + if (!visited) { + visited = new WeakSet>(); + } + + if (visited.has(objectValue)) { + return "[Circular]"; + } + + visited.add(objectValue); + + const serialized: Record = {}; + + for (const key of Object.keys(objectValue)) { + serialized[key] = this.serializeValue(objectValue[key], visited); + } + + return serialized; + } + if (typeof value === "object") { + return { + [BRIDGE_MARKER_KEY]: BRIDGE_REF_TYPE, + id: this.registerReference(value), + }; + } + + return String(value); + } + + private reviveArgument(value: unknown): unknown { + if (value === null || value === undefined) { + return value; + } + + if (Array.isArray(value)) { + return value.map((item: unknown) => { + return this.reviveArgument(item); + }); + } + + if (typeof value !== "object") { + return value; + } + + const record: Record = value as Record; + + if ( + record[BRIDGE_MARKER_KEY] === BRIDGE_REF_TYPE || + record[BRIDGE_MARKER_KEY] === BRIDGE_CALLABLE_REF_TYPE + ) { + const refId: number = Number(record["id"]); + const ref: unknown = this.refs.get(refId); + + if (ref === undefined) { + throw new Error(`Unknown Playwright bridge ref id: ${refId}`); + } + + return ref; + } + + if (record[BRIDGE_MARKER_KEY] === BRIDGE_BUFFER_TYPE) { + return Buffer.from(String(record["base64"] || ""), "base64"); + } + + const revived: Record = {}; + + for (const key of Object.keys(record)) { + revived[key] = this.reviveArgument(record[key]); + } + + return revived; + } } -/** - * Unwraps a single value if it is a sandbox proxy, otherwise returns it as-is. - */ -function unwrapProxy(value: T): T { - if (value && typeof value === "object") { - const underlying: unknown = (value as Record)[ - PROXY_TARGET_SYMBOL - ]; - if (underlying !== undefined) { - return underlying as T; +async function attachPlaywrightBridge(data: { + context: ivm.Context; + jail: ivm.Reference>; + bridge: PlaywrightBridge; + contextValues?: Dictionary | undefined; +}): Promise { + const resolveCallback: ivm.Callback = new ivm.Callback( + (rootId: number, pathJson: string): string => { + return data.bridge.resolve(rootId, pathJson); + }, + ); + + const applyCallback: ivm.Callback = new ivm.Callback( + (rootId: number, pathJson: string, argsJson: string): string => { + return data.bridge.apply(rootId, pathJson, argsJson); + }, + ); + + const awaitRef: ivm.Reference<(promiseId: number) => Promise> = + new ivm.Reference((promiseId: number): Promise => { + return data.bridge.awaitPromise(promiseId); + }); + + await data.jail.set("_pwResolve", resolveCallback); + await data.jail.set("_pwApply", applyCallback); + await data.jail.set("_pwAwaitRef", awaitRef); + + await data.context.eval(` + function __oneuptimeParseBridgeResponse(payload) { + const parsed = JSON.parse(payload); + if (!parsed.ok) { + const err = new Error(parsed.error && parsed.error.message ? parsed.error.message : 'Sandbox bridge call failed'); + if (parsed.error && parsed.error.stack) { + err.stack = parsed.error.stack; + } + throw err; + } + return parsed.value; } + + function __oneuptimeUnwrapBridgeValue(value) { + if (value === null || value === undefined) { + return value; + } + + if (Array.isArray(value)) { + return value.map((item) => __oneuptimeUnwrapBridgeValue(item)); + } + + if (typeof value !== 'object') { + return value; + } + + if (value.__oneuptimeBridgeType === 'ref') { + return __createBridgeProxy(Number(value.id), []); + } + + if (value.__oneuptimeBridgeType === 'callable-ref') { + return __createBridgeMethod(Number(value.id), []); + } + + if (value.__oneuptimeBridgeType === 'promise') { + return (async () => { + const payload = await _pwAwaitRef.applySyncPromise( + undefined, + [Number(value.id)], + ); + + return __oneuptimeUnwrapBridgeValue( + __oneuptimeParseBridgeResponse(payload) + ); + })(); + } + + if (value.__oneuptimeBridgeType === 'buffer') { + return value; + } + + const unwrapped = {}; + for (const key of Object.keys(value)) { + unwrapped[key] = __oneuptimeUnwrapBridgeValue(value[key]); + } + return unwrapped; + } + + function __oneuptimeResolveBridgeValue(rootId, path) { + const payload = _pwResolve(rootId, JSON.stringify(path)); + return __oneuptimeParseBridgeResponse(payload); + } + + function __oneuptimeAssertNoFunctions(obj, path) { + if (obj === null || obj === undefined) return; + + if (typeof obj === 'function') { + if (obj.__oneuptimeIsBridgeProxy === true) { + return; + } + throw new Error( + 'Functions are not supported in Playwright sandbox arguments for security reasons. ' + + 'Found a function at "' + path + '".' + ); + } + + if (typeof obj !== 'object') return; + + if (Array.isArray(obj)) { + for (let i = 0; i < obj.length; i++) { + __oneuptimeAssertNoFunctions(obj[i], path + '[' + i + ']'); + } + return; + } + + for (const key of Object.keys(obj)) { + __oneuptimeAssertNoFunctions( + obj[key], + path ? path + '.' + key : key, + ); + } + } + + function __createBridgeMethod(rootId, path) { + const proxyTarget = function() {}; + + return new Proxy(proxyTarget, { + get(_target, prop) { + if (prop === '__oneuptimeIsBridgeProxy') { + return true; + } + + if (prop === 'then') { + return undefined; + } + + if (prop === 'toJSON') { + return () => __oneuptimeResolveBridgeValue(rootId, path); + } + + if (prop === 'toString') { + return () => '[PlaywrightBridge]'; + } + + if (prop === Symbol.toPrimitive) { + return () => '[PlaywrightBridge]'; + } + + if (typeof prop !== 'string') { + return undefined; + } + + if (prop === 'inspect') { + return () => '[PlaywrightBridge]'; + } + + return __createBridgeMethod(rootId, path.concat(prop)); + }, + + apply(_target, _thisArg, argsList) { + __oneuptimeAssertNoFunctions(argsList, 'arguments'); + + const payload = _pwApply( + rootId, + JSON.stringify(path), + JSON.stringify(argsList), + ); + + return __oneuptimeUnwrapBridgeValue( + __oneuptimeParseBridgeResponse(payload) + ); + }, + }); + } + + function __createBridgeProxy(rootId, path) { + return new Proxy({}, { + get(_target, prop) { + if (prop === '__oneuptimeIsBridgeProxy') { + return true; + } + + if (prop === 'then') { + return undefined; + } + + if (prop === 'toJSON') { + return () => __oneuptimeResolveBridgeValue(rootId, path); + } + + if (prop === 'toString') { + return () => '[PlaywrightBridge]'; + } + + if (prop === Symbol.toPrimitive) { + return () => '[PlaywrightBridge]'; + } + + if (typeof prop !== 'string') { + return undefined; + } + + if (prop === 'inspect') { + return () => '[PlaywrightBridge]'; + } + + return __createBridgeMethod(rootId, path.concat(prop)); + }, + }); + } + `); + + if (!data.contextValues) { + return; + } + + for (const key of Object.keys(data.contextValues)) { + const value: unknown = data.contextValues[key]; + + if (value === undefined) { + continue; + } + + if (isCopyableToIsolate(value)) { + await data.jail.set(key, new ivm.ExternalCopy(value).copyInto()); + continue; + } + + const refId: number = data.bridge.addRoot(value); + + await data.context.eval( + `global[${JSON.stringify(key)}] = __createBridgeProxy(${refId}, []);`, + ); } - return value; } export default class VMRunner { @@ -297,164 +1153,43 @@ export default class VMRunner { options: { timeout?: number; args?: JSONObject | undefined; - context?: Dictionary | undefined; + context?: Dictionary | undefined; }; }): Promise { const { code, options } = data; const timeout: number = options.timeout || 5000; - const logMessages: string[] = []; - const MAX_LOG_BYTES: number = 1_000_000; // 1MB cap - let totalLogBytes: number = 0; - // Track timer handles so we can clean them up after execution - type TimerHandle = ReturnType; - const pendingTimeouts: TimerHandle[] = []; - const pendingIntervals: TimerHandle[] = []; - - const wrappedSetTimeout: ( - fn: (...args: unknown[]) => void, - ms?: number, - ...rest: unknown[] - ) => TimerHandle = ( - fn: (...args: unknown[]) => void, - ms?: number, - ...rest: unknown[] - ): TimerHandle => { - const handle: TimerHandle = setTimeout(fn, ms, ...rest); - pendingTimeouts.push(handle); - return handle; - }; - - const wrappedClearTimeout: (handle: TimerHandle) => void = ( - handle: TimerHandle, - ): void => { - const actual: TimerHandle = unwrapProxy(handle); - clearTimeout(actual); - const idx: number = pendingTimeouts.indexOf(actual); - if (idx !== -1) { - pendingTimeouts.splice(idx, 1); - } - }; - - const wrappedSetInterval: ( - fn: (...args: unknown[]) => void, - ms?: number, - ...rest: unknown[] - ) => TimerHandle = ( - fn: (...args: unknown[]) => void, - ms?: number, - ...rest: unknown[] - ): TimerHandle => { - const handle: TimerHandle = setInterval(fn, ms, ...rest); - pendingIntervals.push(handle); - return handle; - }; - - const wrappedClearInterval: (handle: TimerHandle) => void = ( - handle: TimerHandle, - ): void => { - const actual: TimerHandle = unwrapProxy(handle); - clearInterval(actual); - const idx: number = pendingIntervals.indexOf(actual); - if (idx !== -1) { - pendingIntervals.splice(idx, 1); - } - }; - - // Proxy cache shared across all wrapped host objects in this execution - const proxyCache: WeakMap = new WeakMap(); - - // Use null-prototype object to break this.constructor chain on the global - const sandbox: Context = Object.create(null) as Context; - sandbox["process"] = Object.freeze(Object.create(null)); - sandbox["console"] = createSandboxProxy( - { - log: (...args: JSONValue[]) => { - const msg: string = args.join(" "); - totalLogBytes += msg.length; - if (totalLogBytes <= MAX_LOG_BYTES) { - logMessages.push(msg); - } - }, - }, - proxyCache, - ); - sandbox["http"] = createSandboxProxy(http, proxyCache); - sandbox["https"] = createSandboxProxy(https, proxyCache); - sandbox["axios"] = createSandboxProxy(axios, proxyCache); - sandbox["crypto"] = createSandboxProxy(crypto, proxyCache); - sandbox["setTimeout"] = createSandboxProxy(wrappedSetTimeout, proxyCache); - sandbox["clearTimeout"] = createSandboxProxy( - wrappedClearTimeout, - proxyCache, - ); - sandbox["setInterval"] = createSandboxProxy(wrappedSetInterval, proxyCache); - sandbox["clearInterval"] = createSandboxProxy( - wrappedClearInterval, - proxyCache, - ); - - // Wrap any additional context (e.g. Playwright browser/page objects) - if (options.context) { - for (const key of Object.keys(options.context)) { - const val: GenericObject | string | undefined = options.context[key]; - sandbox[key] = - typeof val === "string" ? val : createSandboxProxy(val, proxyCache); - } - } - - if (options.args) { - // args is plain JSON data — no host functions to protect against - sandbox["args"] = options.args; - } - - vm.createContext(sandbox, { - codeGeneration: { - strings: false, - wasm: false, - }, + const { isolate, context, jail } = await createBaseSandbox({ + timeout, + args: options.args, + logMessages, }); - const script: string = `(async()=>{ - ${code} - })()`; - try { - /* - * vm timeout only covers synchronous CPU time, so wrap with - * Promise.race to also cover async operations (network, timers, etc.) - */ - const vmPromise: Promise = vm.runInContext(script, sandbox, { - timeout: timeout, + const bridge: PlaywrightBridge = new PlaywrightBridge(); + + await attachPlaywrightBridge({ + context, + jail, + bridge, + contextValues: options.context, }); - const overallTimeout: Promise = new Promise( - (_resolve: (value: never) => void, reject: (reason: Error) => void) => { - const handle: NodeJS.Timeout = global.setTimeout(() => { - reject(new Error("Script execution timed out")); - }, timeout + 5000); - // Don't let this timer keep the process alive - handle.unref(); - }, - ); - - const returnVal: unknown = await Promise.race([ - vmPromise, - overallTimeout, - ]); + const returnValue: unknown = await executeWrappedUserCode({ + context, + timeout, + code, + reviveBridgeBuffers: true, + }); return { - returnValue: deepUnwrapProxies(returnVal), + returnValue, logMessages, }; } finally { - // Clean up any lingering timers to prevent resource leaks - for (const handle of pendingTimeouts) { - clearTimeout(handle); - } - for (const handle of pendingIntervals) { - clearInterval(handle); + if (!isolate.isDisposed) { + isolate.dispose(); } } } @@ -469,492 +1204,21 @@ export default class VMRunner { }): Promise { const { code, options } = data; const timeout: number = options.timeout || 5000; - const logMessages: string[] = []; - const isolate: ivm.Isolate = new ivm.Isolate({ memoryLimit: 128 }); + const { isolate, context } = await createBaseSandbox({ + timeout, + args: options.args, + logMessages, + }); try { - const context: ivm.Context = await isolate.createContext(); - const jail: ivm.Reference> = context.global; - - // Set up global object - await jail.set("global", jail.derefInto()); - - // console.log - fire-and-forget callback - await jail.set( - "_log", - new ivm.Callback((...args: string[]) => { - logMessages.push(args.join(" ")); - }), - ); - - await context.eval(` - const console = { log: (...a) => _log(...a.map(v => { - try { return typeof v === 'object' ? JSON.stringify(v) : String(v); } - catch(_) { return String(v); } - }))}; - `); - - // args - deep copy into isolate - if (options.args) { - await jail.set("_args", new ivm.ExternalCopy(options.args).copyInto()); - await context.eval("const args = _args;"); - } else { - await context.eval("const args = {};"); - } - - /* - * http / https - provide Agent constructors that serialize across the boundary. - * The sandbox Agent is a plain object with a marker; the host-side axios bridge - * reconstructs the real Node.js Agent before making the request. - */ - await context.eval(` - const https = { - Agent: class Agent { - constructor(options) { - this.__agentType = '__https_agent__'; - this.options = options || {}; - } - } - }; - const http = { - Agent: class Agent { - constructor(options) { - this.__agentType = '__http_agent__'; - this.options = options || {}; - } - } - }; - `); - - /* - * axios (get, head, options, post, put, patch, delete, request) - * bridged via applySyncPromise. - * - * For GET/HEAD/OPTIONS/DELETE: args = [method, url, configJson?] - * For POST/PUT/PATCH: args = [method, url, bodyJson?, configJson?] - * For REQUEST: args = ['request', '', configJson] - */ - const axiosRef: ivm.Reference< - ( - method: string, - url: string, - arg1?: string, - arg2?: string, - ) => Promise - > = new ivm.Reference( - async ( - method: string, - url: string, - arg1?: string, - arg2?: string, - ): Promise => { - const methodsWithBody: string[] = ["post", "put", "patch"]; - const hasBody: boolean = methodsWithBody.includes(method); - - /* - * For POST/PUT/PATCH: arg1=body, arg2=config - * For GET/HEAD/OPTIONS/DELETE/REQUEST: arg1=config - */ - const body: JSONObject | undefined = - hasBody && arg1 ? (JSON.parse(arg1) as JSONObject) : undefined; - - const configStr: string | undefined = hasBody ? arg2 : arg1; - const config: JSONObject | undefined = configStr - ? (JSON.parse(configStr) as JSONObject) - : undefined; - - // Reconstruct real http/https Agents from serialized markers - if (config) { - const httpsAgentConfig: JSONObject | undefined = config[ - "httpsAgent" - ] as JSONObject | undefined; - - if ( - httpsAgentConfig && - httpsAgentConfig["__agentType"] === "__https_agent__" - ) { - config["httpsAgent"] = new https.Agent( - httpsAgentConfig["options"] as https.AgentOptions, - ) as unknown as JSONObject; - } - - const httpAgentConfig: JSONObject | undefined = config[ - "httpAgent" - ] as JSONObject | undefined; - - if ( - httpAgentConfig && - httpAgentConfig["__agentType"] === "__http_agent__" - ) { - config["httpAgent"] = new http.Agent( - httpAgentConfig["options"] as http.AgentOptions, - ) as unknown as JSONObject; - } - } - - /** - * Helper: convert AxiosHeaders (or any header-like object) to a - * plain record so it can be safely JSON-serialised. - */ - const toPlainHeaders: ( - headers: unknown, - ) => Record = ( - headers: unknown, - ): Record => { - const plain: Record = {}; - if (headers) { - for (const hKey of Object.keys( - headers as Record, - )) { - plain[hKey] = (headers as Record)[hKey]; - } - } - return plain; - }; - - try { - let response: AxiosResponse; - - switch (method) { - case "get": - response = await axios.get(url, config); - break; - case "head": - response = await axios.head(url, config); - break; - case "options": - response = await axios.options(url, config); - break; - case "post": - response = await axios.post(url, body, config); - break; - case "put": - response = await axios.put(url, body, config); - break; - case "patch": - response = await axios.patch(url, body, config); - break; - case "delete": - response = await axios.delete(url, config); - break; - case "request": - response = await axios.request( - config as Parameters[0], - ); - break; - default: - throw new Error(`Unsupported HTTP method: ${method}`); - } - - /* - * Convert AxiosHeaders to a plain object before serializing. - * JSON.stringify calls AxiosHeaders.toJSON(key) with a truthy key, - * which makes it join array headers (like set-cookie) with commas. - * This produces invalid Cookie headers when user code forwards them. - */ - return JSON.stringify({ - status: response.status, - headers: toPlainHeaders(response.headers), - data: response.data, - }); - } catch (err: unknown) { - /* - * If this is an axios error with a response (4xx, 5xx, etc.), - * return the error details as JSON so the sandbox-side axios - * wrapper can reconstruct error.response for user code. - */ - const axiosErr: { - isAxiosError?: boolean; - response?: AxiosResponse>; - message?: string; - } = err as { - isAxiosError?: boolean; - response?: AxiosResponse; - message?: string; - }; - - if (axiosErr.isAxiosError && axiosErr.response) { - return JSON.stringify({ - __isAxiosError: true, - message: axiosErr.message || "Request failed", - status: axiosErr.response.status, - statusText: axiosErr.response.statusText, - headers: toPlainHeaders(axiosErr.response.headers), - data: axiosErr.response.data, - }); - } - - throw err; - } - }, - ); - - await jail.set("_axiosRef", axiosRef); - - await context.eval(` - function _assertNoFunctions(obj, path) { - if (!obj || typeof obj !== 'object') return; - if (Array.isArray(obj)) { - for (let i = 0; i < obj.length; i++) { - const fullPath = path + '[' + i + ']'; - if (typeof obj[i] === 'function') { - throw new Error( - 'Functions are not supported in axios config because of security. ' + - 'Found a function at "' + fullPath + '". Please remove it or replace it with a plain value.' - ); - } - if (obj[i] && typeof obj[i] === 'object') { - _assertNoFunctions(obj[i], fullPath); - } - } - return; - } - for (const key of Object.keys(obj)) { - const fullPath = path ? path + '.' + key : key; - if (typeof obj[key] === 'function') { - throw new Error( - 'Functions are not supported in axios config because of security. ' + - 'Found a function at "' + fullPath + '". Please remove it or replace it with a plain value.' - ); - } - if (obj[key] && typeof obj[key] === 'object') { - _assertNoFunctions(obj[key], fullPath); - } - } - } - - function _parseAxiosResult(r) { - const parsed = JSON.parse(r); - if (parsed && parsed.__isAxiosError) { - const err = new Error(parsed.message); - err.response = { - status: parsed.status, - statusText: parsed.statusText, - headers: parsed.headers, - data: parsed.data, - }; - err.isAxiosError = true; - err.status = parsed.status; - throw err; - } - return parsed; - } - - function _makeAxiosInstance(defaults) { - function mergeConfig(overrides) { - if (!defaults && !overrides) return undefined; - if (!defaults) return overrides; - if (!overrides) return Object.assign({}, defaults); - const merged = Object.assign({}, defaults, overrides); - if (defaults.headers && overrides.headers) { - merged.headers = Object.assign({}, defaults.headers, overrides.headers); - } - return merged; - } - - async function _request(config) { - const merged = mergeConfig(config); - if (merged) _assertNoFunctions(merged, 'config'); - const r = await _axiosRef.applySyncPromise(undefined, ['request', '', merged ? JSON.stringify(merged) : undefined]); - return _parseAxiosResult(r); - } - - // Make instance callable: axios(config) or axios(url, config) - const instance = async function(urlOrConfig, config) { - if (typeof urlOrConfig === 'object') { - return _request(urlOrConfig); - } - return _request(Object.assign({}, config || {}, { url: urlOrConfig })); - }; - - instance.request = _request; - instance.get = async (url, config) => { - const merged = mergeConfig(config); - if (merged) _assertNoFunctions(merged, 'config'); - const r = await _axiosRef.applySyncPromise(undefined, ['get', url, merged ? JSON.stringify(merged) : undefined]); - return _parseAxiosResult(r); - }; - instance.head = async (url, config) => { - const merged = mergeConfig(config); - if (merged) _assertNoFunctions(merged, 'config'); - const r = await _axiosRef.applySyncPromise(undefined, ['head', url, merged ? JSON.stringify(merged) : undefined]); - return _parseAxiosResult(r); - }; - instance.options = async (url, config) => { - const merged = mergeConfig(config); - if (merged) _assertNoFunctions(merged, 'config'); - const r = await _axiosRef.applySyncPromise(undefined, ['options', url, merged ? JSON.stringify(merged) : undefined]); - return _parseAxiosResult(r); - }; - instance.post = async (url, data, config) => { - const merged = mergeConfig(config); - if (data) _assertNoFunctions(data, 'data'); - if (merged) _assertNoFunctions(merged, 'config'); - const r = await _axiosRef.applySyncPromise(undefined, ['post', url, data ? JSON.stringify(data) : undefined, merged ? JSON.stringify(merged) : undefined]); - return _parseAxiosResult(r); - }; - instance.put = async (url, data, config) => { - const merged = mergeConfig(config); - if (data) _assertNoFunctions(data, 'data'); - if (merged) _assertNoFunctions(merged, 'config'); - const r = await _axiosRef.applySyncPromise(undefined, ['put', url, data ? JSON.stringify(data) : undefined, merged ? JSON.stringify(merged) : undefined]); - return _parseAxiosResult(r); - }; - instance.patch = async (url, data, config) => { - const merged = mergeConfig(config); - if (data) _assertNoFunctions(data, 'data'); - if (merged) _assertNoFunctions(merged, 'config'); - const r = await _axiosRef.applySyncPromise(undefined, ['patch', url, data ? JSON.stringify(data) : undefined, merged ? JSON.stringify(merged) : undefined]); - return _parseAxiosResult(r); - }; - instance.delete = async (url, config) => { - const merged = mergeConfig(config); - if (merged) _assertNoFunctions(merged, 'config'); - const r = await _axiosRef.applySyncPromise(undefined, ['delete', url, merged ? JSON.stringify(merged) : undefined]); - return _parseAxiosResult(r); - }; - instance.create = (instanceDefaults) => { - if (instanceDefaults) _assertNoFunctions(instanceDefaults, 'defaults'); - const combinedDefaults = mergeConfig(instanceDefaults); - return _makeAxiosInstance(combinedDefaults); - }; - - return instance; - } - - const axios = _makeAxiosInstance(null); - `); - - // crypto (createHash, createHmac, randomBytes, randomUUID, randomInt) - bridged via applySync - const cryptoRef: ivm.Reference< - (op: string, ...args: string[]) => string - > = new ivm.Reference((op: string, ...args: string[]): string => { - switch (op) { - case "createHash": { - const [algorithm, inputData, encoding] = args; - return crypto - .createHash(algorithm!) - .update(inputData!) - .digest((encoding as crypto.BinaryToTextEncoding) || "hex"); - } - case "createHmac": { - const [algorithm, key, inputData, encoding] = args; - return crypto - .createHmac(algorithm!, key!) - .update(inputData!) - .digest((encoding as crypto.BinaryToTextEncoding) || "hex"); - } - case "randomBytes": { - const [size] = args; - return crypto.randomBytes(parseInt(size!)).toString("hex"); - } - case "randomUUID": { - return crypto.randomUUID(); - } - case "randomInt": { - const [min, max] = args; - return String(crypto.randomInt(parseInt(min!), parseInt(max!))); - } - default: - throw new Error(`Unsupported crypto operation: ${op}`); - } + const returnValue: unknown = await executeWrappedUserCode({ + context, + timeout, + code, }); - await jail.set("_cryptoRef", cryptoRef); - - await context.eval(` - const crypto = { - createHash: (algorithm) => ({ - _alg: algorithm, _data: '', - update(d) { this._data = d; return this; }, - digest(enc) { return _cryptoRef.applySync(undefined, ['createHash', this._alg, this._data, enc || 'hex']); } - }), - createHmac: (algorithm, key) => ({ - _alg: algorithm, _key: key, _data: '', - update(d) { this._data = d; return this; }, - digest(enc) { return _cryptoRef.applySync(undefined, ['createHmac', this._alg, this._key, this._data, enc || 'hex']); } - }), - randomBytes: (size) => ({ - toString(enc) { return _cryptoRef.applySync(undefined, ['randomBytes', String(size)]); } - }), - randomUUID: () => { - return _cryptoRef.applySync(undefined, ['randomUUID']); - }, - randomInt: (minOrMax, max) => { - if (max === undefined) { max = minOrMax; minOrMax = 0; } - return Number(_cryptoRef.applySync(undefined, ['randomInt', String(minOrMax), String(max)])); - }, - }; - `); - - // setTimeout / sleep - bridged via applySyncPromise - const sleepRef: ivm.Reference<(ms: number) => Promise> = - new ivm.Reference((ms: number): Promise => { - return new Promise((resolve: () => void) => { - global.setTimeout(resolve, Math.min(ms, timeout)); - }); - }); - - await jail.set("_sleepRef", sleepRef); - - await context.eval(` - function setTimeout(fn, ms) { - _sleepRef.applySyncPromise(undefined, [ms || 0]); - if (typeof fn === 'function') fn(); - } - async function sleep(ms) { - await _sleepRef.applySyncPromise(undefined, [ms || 0]); - } - `); - - /* - * Wrap user code in async IIFE. JSON.stringify the return value inside - * the isolate so only a plain string crosses the boundary — this avoids - * "A non-transferable value was passed" errors when user code returns - * objects containing functions, class instances, or other non-cloneable types. - */ - const wrappedCode: string = `(async () => { - const __result = await (async () => { - ${code} - })(); - try { return JSON.stringify(__result); } - catch(_) { return undefined; } - })()`; - - // Run with overall timeout covering both CPU and I/O wait - const resultPromise: Promise = context.eval(wrappedCode, { - promise: true, - timeout: timeout, - }); - - const overallTimeout: Promise = new Promise( - (_resolve: (value: never) => void, reject: (reason: Error) => void) => { - global.setTimeout(() => { - reject(new Error("Script execution timed out")); - }, timeout + 5000); // 5s grace period beyond isolate timeout - }, - ); - - const result: unknown = await Promise.race([ - resultPromise, - overallTimeout, - ]); - - // Parse the JSON string returned from inside the isolate - let returnValue: unknown; - - if (typeof result === "string") { - try { - returnValue = JSON.parse(result); - } catch { - returnValue = result; - } - } else { - returnValue = result; - } - return { returnValue, logMessages, diff --git a/HelmChart/Public/oneuptime/templates/probe.yaml b/HelmChart/Public/oneuptime/templates/probe.yaml index 4411a35454..3c3aaa3d4e 100644 --- a/HelmChart/Public/oneuptime/templates/probe.yaml +++ b/HelmChart/Public/oneuptime/templates/probe.yaml @@ -104,6 +104,8 @@ spec: value: {{ $val.syntheticMonitorScriptTimeoutInMs | squote }} - name: PROBE_CUSTOM_CODE_MONITOR_SCRIPT_TIMEOUT_IN_MS value: {{ $val.customCodeMonitorScriptTimeoutInMs | squote }} + - name: PROBE_SYNTHETIC_RUNNER_URL + value: "http://127.0.0.1:3885" - name: PROBE_KEY {{- if $val.key }} value: {{ $val.key }} @@ -140,6 +142,52 @@ spec: resources: {{- toYaml $val.resources | nindent 12 }} {{- end }} + - image: {{ include "oneuptime.image" (dict "Values" $.Values "ServiceName" "probe") }} + name: synthetic-runner + {{- if $val.containerSecurityContext }} + securityContext: + {{- toYaml $val.containerSecurityContext | nindent 12 }} + {{- else if $.Values.containerSecurityContext }} + securityContext: + {{- toYaml $.Values.containerSecurityContext | nindent 12 }} + {{- end }} + imagePullPolicy: {{ $.Values.image.pullPolicy }} + command: + - npm + - run + - start:synthetic-runner + env: + - name: LOG_LEVEL + value: {{ $.Values.logLevel }} + - name: PORT + value: "3885" + - name: OPENTELEMETRY_EXPORTER_OTLP_HEADERS + value: {{ $.Values.openTelemetryExporter.headers }} + - name: OPENTELEMETRY_EXPORTER_OTLP_ENDPOINT + value: {{ $.Values.openTelemetryExporter.endpoint }} + - name: PROBE_SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS + value: {{ $val.syntheticMonitorScriptTimeoutInMs | squote }} + {{- if $val.disableTelemetryCollection }} + - name: DISABLE_TELEMETRY + value: {{ $val.disableTelemetryCollection | quote }} + {{- end }} + {{- if and $val.proxy $val.proxy.httpProxyUrl }} + - name: HTTP_PROXY_URL + value: {{ $val.proxy.httpProxyUrl | squote }} + {{- end }} + {{- if and $val.proxy $val.proxy.httpsProxyUrl }} + - name: HTTPS_PROXY_URL + value: {{ $val.proxy.httpsProxyUrl | squote }} + {{- end }} + {{- if and $val.proxy $val.proxy.noProxy }} + - name: NO_PROXY + value: {{ $val.proxy.noProxy | squote }} + {{- end }} + {{- include "oneuptime.env.oneuptimeSecret" (dict "Values" $.Values "Release" $.Release) | nindent 12 }} + ports: + - containerPort: 3885 + protocol: TCP + name: synthetic-runner {{- if $val.additionalContainers }} {{ toYaml $val.additionalContainers | nindent 8 }} {{- end }} diff --git a/Probe/Config.ts b/Probe/Config.ts index 76f85b5eda..e0d3d8c1f7 100644 --- a/Probe/Config.ts +++ b/Probe/Config.ts @@ -86,6 +86,10 @@ export const PORT: Port = new Port( }), ); +export const PROBE_SYNTHETIC_RUNNER_URL: URL = URL.fromString( + process.env["PROBE_SYNTHETIC_RUNNER_URL"] || "http://127.0.0.1:3885", +); + /* * Proxy configuration for all HTTP/HTTPS requests made by the probe * HTTP_PROXY_URL: Proxy for HTTP requests diff --git a/Probe/SyntheticRunner/API/SyntheticMonitor.ts b/Probe/SyntheticRunner/API/SyntheticMonitor.ts new file mode 100644 index 0000000000..1dc0ae3873 --- /dev/null +++ b/Probe/SyntheticRunner/API/SyntheticMonitor.ts @@ -0,0 +1,45 @@ +import ClusterKeyAuthorization from "Common/Server/Middleware/ClusterKeyAuthorization"; +import Express, { + ExpressRequest, + ExpressResponse, + ExpressRouter, + NextFunction, +} from "Common/Server/Utils/Express"; +import Response from "Common/Server/Utils/Response"; +import BadDataException from "Common/Types/Exception/BadDataException"; +import logger from "Common/Server/Utils/Logger"; +import { JSONArray } from "Common/Types/JSON"; +import SyntheticMonitorProcessRunner from "../Execution/SyntheticMonitorProcessRunner"; +import { SyntheticMonitorExecutionRequest } from "../Types/SyntheticMonitorExecution"; + +const router: ExpressRouter = Express.getRouter(); + +router.post( + "/run", + ClusterKeyAuthorization.isAuthorizedServiceMiddleware, + async ( + req: ExpressRequest, + res: ExpressResponse, + next: NextFunction, + ): Promise => { + try { + const request: SyntheticMonitorExecutionRequest = + req.body as SyntheticMonitorExecutionRequest; + + if (!request || typeof request.script !== "string") { + throw new BadDataException("Synthetic monitor script is required"); + } + + const response = await SyntheticMonitorProcessRunner.execute(request); + + return Response.sendJsonObjectResponse(req, res, { + results: response.results as unknown as JSONArray, + }); + } catch (error: unknown) { + logger.error(error); + return next(error); + } + }, +); + +export default router; diff --git a/Probe/SyntheticRunner/Config.ts b/Probe/SyntheticRunner/Config.ts new file mode 100644 index 0000000000..8d0f86a64c --- /dev/null +++ b/Probe/SyntheticRunner/Config.ts @@ -0,0 +1,28 @@ +import NumberUtil from "Common/Utils/Number"; +import Port from "Common/Types/Port"; + +export const PORT: Port = new Port( + NumberUtil.parseNumberWithDefault({ + value: process.env["PORT"], + defaultValue: 3885, + min: 1, + }), +); + +export const SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS: number = + NumberUtil.parseNumberWithDefault({ + value: process.env["PROBE_SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS"], + defaultValue: 60000, + min: 1, + }); + +export const SYNTHETIC_MONITOR_RETRY_DELAY_IN_MS: number = 1000; + +export const SYNTHETIC_MONITOR_ATTEMPT_PADDING_IN_MS: number = 30000; + +export const SYNTHETIC_MONITOR_CHILD_USER_ID: number = 1000; + +export const SYNTHETIC_MONITOR_CHILD_GROUP_ID: number = 1000; + +export const SYNTHETIC_MONITOR_CHILD_HOME_DIR: string = + "/tmp/oneuptime-synthetic-runner"; diff --git a/Probe/SyntheticRunner/Execution/ExecuteSyntheticMonitorScript.ts b/Probe/SyntheticRunner/Execution/ExecuteSyntheticMonitorScript.ts new file mode 100644 index 0000000000..38ebcc0414 --- /dev/null +++ b/Probe/SyntheticRunner/Execution/ExecuteSyntheticMonitorScript.ts @@ -0,0 +1,83 @@ +import SyntheticMonitorExecutor from "./SyntheticMonitorExecutor"; +import logger from "Common/Server/Utils/Logger"; +import { + SyntheticMonitorExecutionChildMessage, + SyntheticMonitorExecutionRequest, +} from "../Types/SyntheticMonitorExecution"; + +let hasHandledMessage: boolean = false; + +const sendAndExit: ( + message: SyntheticMonitorExecutionChildMessage, + exitCode: number, +) => void = ( + message: SyntheticMonitorExecutionChildMessage, + exitCode: number, +): void => { + if (process.send) { + process.send(message, (error: Error | null) => { + if (error) { + logger.error(error); + } + + process.exit(exitCode); + }); + + return; + } + + process.exit(exitCode); +}; + +const handleFatalError: (error: unknown) => void = (error: unknown): void => { + sendAndExit( + { + type: "error", + error: { + message: + (error as Error)?.message || + (error as Error)?.toString() || + String(error), + stack: (error as Error)?.stack, + }, + }, + 1, + ); +}; + +process.once("message", async (message: unknown): Promise => { + hasHandledMessage = true; + + try { + const request: SyntheticMonitorExecutionRequest = + message as SyntheticMonitorExecutionRequest; + + const results = await SyntheticMonitorExecutor.execute(request); + + sendAndExit( + { + type: "success", + payload: { + results, + }, + }, + 0, + ); + } catch (error: unknown) { + handleFatalError(error); + } +}); + +process.on("uncaughtException", (error: Error): void => { + handleFatalError(error); +}); + +process.on("unhandledRejection", (error: unknown): void => { + handleFatalError(error); +}); + +global.setTimeout(() => { + if (!hasHandledMessage) { + handleFatalError(new Error("Synthetic runner child did not receive a job")); + } +}, 10000); diff --git a/Probe/SyntheticRunner/Execution/SyntheticMonitorExecutor.ts b/Probe/SyntheticRunner/Execution/SyntheticMonitorExecutor.ts new file mode 100644 index 0000000000..d98a8488a5 --- /dev/null +++ b/Probe/SyntheticRunner/Execution/SyntheticMonitorExecutor.ts @@ -0,0 +1,639 @@ +import { + SYNTHETIC_MONITOR_RETRY_DELAY_IN_MS, + SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS, +} from "../Config"; +import BadDataException from "Common/Types/Exception/BadDataException"; +import BrowserType from "Common/Types/Monitor/SyntheticMonitors/BrowserType"; +import LocalFile from "Common/Server/Utils/LocalFile"; +import logger from "Common/Server/Utils/Logger"; +import ScreenSizeType from "Common/Types/Monitor/SyntheticMonitors/ScreenSizeType"; +import Screenshots from "Common/Types/Monitor/SyntheticMonitors/Screenshot"; +import SyntheticMonitorResponse from "Common/Types/Monitor/SyntheticMonitors/SyntheticMonitorResponse"; +import axios from "axios"; +import crypto from "crypto"; +import http from "http"; +import https from "https"; +import os from "os"; +import { Browser, BrowserContext, Page, chromium, firefox } from "playwright"; +import { SyntheticMonitorExecutionRequest } from "../Types/SyntheticMonitorExecution"; + +const MAX_LOG_BYTES: number = 1_000_000; + +type AsyncFunctionConstructor = new ( + ...args: Array +) => (...runtimeArgs: Array) => Promise; + +const AsyncFunctionImpl: AsyncFunctionConstructor = Object.getPrototypeOf( + async function (): Promise {}, +).constructor as AsyncFunctionConstructor; + +interface BrowserLaunchOptions { + executablePath?: string; + proxy?: { + server: string; + username?: string; + password?: string; + bypass?: string; + }; + timeout?: number; +} + +interface BrowserSession { + browser: Browser; + context: BrowserContext; + page: Page; +} + +interface ScriptReturnValue { + data?: SyntheticMonitorResponse["result"] | undefined; + screenshots?: Record | undefined; +} + +type ConsoleMethod = (...args: Array) => void; + +interface ScriptConsole { + log: ConsoleMethod; + info: ConsoleMethod; + warn: ConsoleMethod; + error: ConsoleMethod; + debug: ConsoleMethod; +} + +export default class SyntheticMonitorExecutor { + public static async execute( + options: SyntheticMonitorExecutionRequest, + ): Promise> { + const results: Array = []; + + for (const browserType of options.browserTypes || []) { + for (const screenSizeType of options.screenSizeTypes || []) { + logger.debug( + `Running Synthetic Monitor: ${options.monitorId || "unknown"}, Screen Size: ${screenSizeType}, Browser: ${browserType}`, + ); + + const result: SyntheticMonitorResponse | null = + await this.executeWithRetry({ + monitorId: options.monitorId, + script: options.script, + browserType: browserType, + screenSizeType: screenSizeType, + retryCountOnError: options.retryCountOnError || 0, + }); + + if (result) { + result.browserType = browserType; + result.screenSizeType = screenSizeType; + results.push(result); + } + } + } + + return results; + } + + private static async executeWithRetry(options: { + monitorId?: string | undefined; + script: string; + browserType: BrowserType; + screenSizeType: ScreenSizeType; + retryCountOnError: number; + currentRetry?: number | undefined; + }): Promise { + const currentRetry: number = options.currentRetry || 0; + const maxRetries: number = options.retryCountOnError; + + const result: SyntheticMonitorResponse | null = + await this.executeByBrowserAndScreenSize({ + script: options.script, + browserType: options.browserType, + screenSizeType: options.screenSizeType, + }); + + if (result?.scriptError && currentRetry < maxRetries) { + logger.debug( + `Synthetic Monitor script error, retrying (${currentRetry + 1}/${maxRetries}): ${result.scriptError}`, + ); + + await this.sleep(SYNTHETIC_MONITOR_RETRY_DELAY_IN_MS); + + return this.executeWithRetry({ + monitorId: options.monitorId, + script: options.script, + browserType: options.browserType, + screenSizeType: options.screenSizeType, + retryCountOnError: maxRetries, + currentRetry: currentRetry + 1, + }); + } + + return result; + } + + private static async executeByBrowserAndScreenSize(options: { + script: string; + browserType: BrowserType; + screenSizeType: ScreenSizeType; + }): Promise { + const scriptResult: SyntheticMonitorResponse = { + logMessages: [], + scriptError: undefined, + result: undefined, + screenshots: {}, + executionTimeInMS: 0, + browserType: options.browserType, + screenSizeType: options.screenSizeType, + }; + + let browserSession: BrowserSession | null = null; + + try { + const startTime: [number, number] = process.hrtime(); + + browserSession = await SyntheticMonitorExecutor.getPageByBrowserType({ + browserType: options.browserType, + screenSizeType: options.screenSizeType, + }); + + const returnValue: unknown = await this.runScript({ + script: options.script, + page: browserSession.page, + browserType: options.browserType, + screenSizeType: options.screenSizeType, + logMessages: scriptResult.logMessages, + }); + + const endTime: [number, number] = process.hrtime(startTime); + + scriptResult.executionTimeInMS = Math.ceil( + (endTime[0] * 1000000000 + endTime[1]) / 1000000, + ); + + scriptResult.screenshots = this.getScreenshots(returnValue); + scriptResult.result = this.getResultData(returnValue); + } catch (err: unknown) { + logger.error(err); + scriptResult.scriptError = + (err as Error)?.message || (err as Error)?.toString() || String(err); + } finally { + await SyntheticMonitorExecutor.disposeBrowserSession(browserSession); + } + + return scriptResult; + } + + private static async runScript(data: { + script: string; + page: Page; + browserType: BrowserType; + screenSizeType: ScreenSizeType; + logMessages: Array; + }): Promise { + const sandboxConsole: ScriptConsole = this.createConsole(data.logMessages); + const asyncFunction: (...runtimeArgs: Array) => Promise = + new AsyncFunctionImpl( + "axios", + "page", + "browserType", + "screenSizeType", + "crypto", + "http", + "https", + "console", + "sleep", + `"use strict";\n${data.script}`, + ); + + let timeoutHandle: NodeJS.Timeout | undefined = undefined; + + const executionPromise: Promise = asyncFunction( + axios, + data.page, + data.browserType, + data.screenSizeType, + crypto, + http, + https, + sandboxConsole, + this.sleep, + ); + + const timeoutPromise: Promise = new Promise( + (_resolve: (value: never) => void, reject: (reason: Error) => void) => { + timeoutHandle = global.setTimeout(() => { + reject(new Error("Script execution timed out")); + }, SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS); + }, + ); + + try { + return await Promise.race([executionPromise, timeoutPromise]); + } finally { + if (timeoutHandle) { + global.clearTimeout(timeoutHandle); + } + } + } + + private static createConsole(logMessages: Array): ScriptConsole { + let totalLogBytes: number = 0; + + const writeLog: ConsoleMethod = (...args: Array): void => { + const message: string = args + .map((value: unknown) => { + return this.serializeLogValue(value); + }) + .join(" "); + + totalLogBytes += message.length; + + if (totalLogBytes <= MAX_LOG_BYTES) { + logMessages.push(message); + } + }; + + return { + log: writeLog, + info: writeLog, + warn: writeLog, + error: writeLog, + debug: writeLog, + }; + } + + private static serializeLogValue(value: unknown): string { + if (value instanceof Error) { + return value.stack || value.message; + } + + if (typeof value === "string") { + return value; + } + + try { + return typeof value === "object" ? JSON.stringify(value) : String(value); + } catch { + return String(value); + } + } + + private static getResultData( + returnValue: unknown, + ): SyntheticMonitorResponse["result"] { + if (!returnValue || typeof returnValue !== "object") { + return undefined; + } + + return (returnValue as ScriptReturnValue).data; + } + + private static getScreenshots(returnValue: unknown): Screenshots { + const screenshots: Screenshots = {}; + + if (!returnValue || typeof returnValue !== "object") { + return screenshots; + } + + const screenshotValues: Record | undefined = ( + returnValue as ScriptReturnValue + ).screenshots; + + if (!screenshotValues) { + return screenshots; + } + + for (const screenshotName of Object.keys(screenshotValues)) { + const screenshotValue: unknown = screenshotValues[screenshotName]; + + if (!Buffer.isBuffer(screenshotValue)) { + continue; + } + + screenshots[screenshotName] = screenshotValue.toString("base64"); + } + + return screenshots; + } + + private static getViewportHeightAndWidth(options: { + screenSizeType: ScreenSizeType; + }): { + height: number; + width: number; + } { + let viewPortHeight: number = 0; + let viewPortWidth: number = 0; + + switch (options.screenSizeType) { + case ScreenSizeType.Desktop: + viewPortHeight = 1080; + viewPortWidth = 1920; + break; + case ScreenSizeType.Mobile: + viewPortHeight = 640; + viewPortWidth = 360; + break; + case ScreenSizeType.Tablet: + viewPortHeight = 768; + viewPortWidth = 1024; + break; + default: + viewPortHeight = 1080; + viewPortWidth = 1920; + break; + } + + return { + height: viewPortHeight, + width: viewPortWidth, + }; + } + + private static getPlaywrightBrowsersPath(): string { + return ( + process.env["PLAYWRIGHT_BROWSERS_PATH"] || + `${os.homedir()}/.cache/ms-playwright` + ); + } + + public static async getChromeExecutablePath(): Promise { + const browsersPath: string = this.getPlaywrightBrowsersPath(); + + const doesDirectoryExist: boolean = + await LocalFile.doesDirectoryExist(browsersPath); + if (!doesDirectoryExist) { + throw new BadDataException("Chrome executable path not found."); + } + + const directories: string[] = + await LocalFile.getListOfDirectories(browsersPath); + + if (directories.length === 0) { + throw new BadDataException("Chrome executable path not found."); + } + + const chromeInstallationName: string | undefined = directories.find( + (directory: string) => { + return directory.includes("chromium"); + }, + ); + + if (!chromeInstallationName) { + throw new BadDataException("Chrome executable path not found."); + } + + const chromeExecutableCandidates: Array = [ + `${browsersPath}/${chromeInstallationName}/chrome-linux/chrome`, + `${browsersPath}/${chromeInstallationName}/chrome-linux64/chrome`, + `${browsersPath}/${chromeInstallationName}/chrome64/chrome`, + `${browsersPath}/${chromeInstallationName}/chrome/chrome`, + ]; + + for (const executablePath of chromeExecutableCandidates) { + if (await LocalFile.doesFileExist(executablePath)) { + return executablePath; + } + } + + throw new BadDataException("Chrome executable path not found."); + } + + public static async getFirefoxExecutablePath(): Promise { + const browsersPath: string = this.getPlaywrightBrowsersPath(); + + const doesDirectoryExist: boolean = + await LocalFile.doesDirectoryExist(browsersPath); + if (!doesDirectoryExist) { + throw new BadDataException("Firefox executable path not found."); + } + + const directories: string[] = + await LocalFile.getListOfDirectories(browsersPath); + + if (directories.length === 0) { + throw new BadDataException("Firefox executable path not found."); + } + + const firefoxInstallationName: string | undefined = directories.find( + (directory: string) => { + return directory.includes("firefox"); + }, + ); + + if (!firefoxInstallationName) { + throw new BadDataException("Firefox executable path not found."); + } + + const firefoxExecutableCandidates: Array = [ + `${browsersPath}/${firefoxInstallationName}/firefox/firefox`, + `${browsersPath}/${firefoxInstallationName}/firefox-linux64/firefox`, + `${browsersPath}/${firefoxInstallationName}/firefox64/firefox`, + `${browsersPath}/${firefoxInstallationName}/firefox-64/firefox`, + ]; + + for (const executablePath of firefoxExecutableCandidates) { + if (await LocalFile.doesFileExist(executablePath)) { + return executablePath; + } + } + + throw new BadDataException("Firefox executable path not found."); + } + + private static async getPageByBrowserType(data: { + browserType: BrowserType; + screenSizeType: ScreenSizeType; + }): Promise { + const viewport: { + height: number; + width: number; + } = SyntheticMonitorExecutor.getViewportHeightAndWidth({ + screenSizeType: data.screenSizeType, + }); + + const baseOptions: BrowserLaunchOptions = { + timeout: Math.min(SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS, 30000), + }; + + const proxyOptions: BrowserLaunchOptions["proxy"] | undefined = + this.getBrowserProxyOptions(); + + if (proxyOptions) { + baseOptions.proxy = proxyOptions; + + logger.debug( + `Synthetic Monitor using proxy: ${proxyOptions.server} (HTTPS: ${Boolean(process.env["HTTPS_PROXY_URL"] || process.env["https_proxy"])}, HTTP: ${Boolean(process.env["HTTP_PROXY_URL"] || process.env["http_proxy"])})`, + ); + } + + if (data.browserType === BrowserType.Chromium) { + const browser: Browser = await chromium.launch({ + executablePath: await this.getChromeExecutablePath(), + ...baseOptions, + }); + + const context: BrowserContext = await browser.newContext({ + viewport: { + width: viewport.width, + height: viewport.height, + }, + }); + + const page: Page = await context.newPage(); + + return { + browser, + context, + page, + }; + } + + if (data.browserType === BrowserType.Firefox) { + const browser: Browser = await firefox.launch({ + executablePath: await this.getFirefoxExecutablePath(), + ...baseOptions, + }); + + let context: BrowserContext | null = null; + + try { + context = await browser.newContext({ + viewport: { + width: viewport.width, + height: viewport.height, + }, + }); + + const page: Page = await context.newPage(); + + return { + browser, + context, + page, + }; + } catch (error: unknown) { + await SyntheticMonitorExecutor.safeCloseBrowserContext(context); + await SyntheticMonitorExecutor.safeCloseBrowser(browser); + throw error; + } + } + + throw new BadDataException("Invalid Browser Type."); + } + + private static getBrowserProxyOptions(): + | BrowserLaunchOptions["proxy"] + | undefined { + const httpsProxyUrl: string | undefined = + process.env["HTTPS_PROXY_URL"] || process.env["https_proxy"] || undefined; + const httpProxyUrl: string | undefined = + process.env["HTTP_PROXY_URL"] || process.env["http_proxy"] || undefined; + const noProxy: string | undefined = + process.env["NO_PROXY"] || process.env["no_proxy"] || undefined; + const proxyUrl: string | undefined = httpsProxyUrl || httpProxyUrl; + + if (!proxyUrl) { + return undefined; + } + + const proxyOptions: NonNullable = { + server: proxyUrl, + }; + + if (noProxy) { + proxyOptions.bypass = noProxy; + } + + try { + const parsedUrl: URL = new URL(proxyUrl); + + if (parsedUrl.username && parsedUrl.password) { + proxyOptions.username = parsedUrl.username; + proxyOptions.password = parsedUrl.password; + } + } catch (error: unknown) { + logger.warn(`Failed to parse proxy URL for authentication: ${error}`); + } + + return proxyOptions; + } + + private static async disposeBrowserSession( + session: BrowserSession | null, + ): Promise { + if (!session) { + return; + } + + await SyntheticMonitorExecutor.safeClosePage(session.page); + await SyntheticMonitorExecutor.safeCloseBrowserContexts({ + browser: session.browser, + }); + await SyntheticMonitorExecutor.safeCloseBrowser(session.browser); + } + + private static async safeClosePage(page?: Page | null): Promise { + if (!page) { + return; + } + + try { + if (!page.isClosed()) { + await page.close(); + } + } catch (error: unknown) { + logger.warn( + `Failed to close Playwright page: ${(error as Error)?.message || error}`, + ); + } + } + + private static async safeCloseBrowserContext( + context?: BrowserContext | null, + ): Promise { + if (!context) { + return; + } + + try { + await context.close(); + } catch (error: unknown) { + logger.warn( + `Failed to close Playwright browser context: ${(error as Error)?.message || error}`, + ); + } + } + + private static async safeCloseBrowser( + browser?: Browser | null, + ): Promise { + if (!browser) { + return; + } + + try { + if (browser.isConnected()) { + await browser.close(); + } + } catch (error: unknown) { + logger.warn( + `Failed to close Playwright browser: ${(error as Error)?.message || error}`, + ); + } + } + + private static async safeCloseBrowserContexts(data: { + browser: Browser; + }): Promise { + const contexts: Array = data.browser.contexts(); + + for (const context of contexts) { + await SyntheticMonitorExecutor.safeCloseBrowserContext(context); + } + } + + private static async sleep(ms: number): Promise { + return new Promise((resolve: () => void) => { + global.setTimeout(resolve, ms); + }); + } +} diff --git a/Probe/SyntheticRunner/Execution/SyntheticMonitorProcessRunner.ts b/Probe/SyntheticRunner/Execution/SyntheticMonitorProcessRunner.ts new file mode 100644 index 0000000000..9bc4515968 --- /dev/null +++ b/Probe/SyntheticRunner/Execution/SyntheticMonitorProcessRunner.ts @@ -0,0 +1,258 @@ +import { + SYNTHETIC_MONITOR_ATTEMPT_PADDING_IN_MS, + SYNTHETIC_MONITOR_CHILD_GROUP_ID, + SYNTHETIC_MONITOR_CHILD_HOME_DIR, + SYNTHETIC_MONITOR_CHILD_USER_ID, + SYNTHETIC_MONITOR_RETRY_DELAY_IN_MS, + SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS, +} from "../Config"; +import { + SyntheticMonitorExecutionChildMessage, + SyntheticMonitorExecutionRequest, + SyntheticMonitorExecutionResponse, +} from "../Types/SyntheticMonitorExecution"; +import fs from "fs"; +import logger from "Common/Server/Utils/Logger"; +import path from "path"; +import { fork, ForkOptions } from "child_process"; + +export default class SyntheticMonitorProcessRunner { + public static async execute( + request: SyntheticMonitorExecutionRequest, + ): Promise { + const childScriptPath: string = path.resolve( + __dirname, + `ExecuteSyntheticMonitorScript${path.extname(__filename) || ".js"}`, + ); + + this.ensureChildHomeDirectory(); + + const forkOptions: ForkOptions = { + cwd: process.cwd(), + env: this.buildChildEnv(), + stdio: ["ignore", "pipe", "pipe", "ipc"], + detached: true, + }; + + if ( + typeof process.getuid === "function" && + process.getuid() === 0 && + typeof process.getgid === "function" + ) { + forkOptions.uid = SYNTHETIC_MONITOR_CHILD_USER_ID; + forkOptions.gid = SYNTHETIC_MONITOR_CHILD_GROUP_ID; + } + + const child = fork(childScriptPath, [], forkOptions); + const timeoutInMS: number = this.getProcessTimeoutInMS(request); + + child.stdout?.setEncoding("utf8"); + child.stderr?.setEncoding("utf8"); + + child.stdout?.on("data", (chunk: string): void => { + logger.debug(`[synthetic-runner-child] ${chunk.trim()}`); + }); + + child.stderr?.on("data", (chunk: string): void => { + logger.warn(`[synthetic-runner-child] ${chunk.trim()}`); + }); + + return new Promise( + ( + resolve: (value: SyntheticMonitorExecutionResponse) => void, + reject: (reason: Error) => void, + ) => { + let settled: boolean = false; + + const finish = ( + callback: () => void, + options?: { + killChild?: boolean | undefined; + }, + ): void => { + if (settled) { + return; + } + + settled = true; + global.clearTimeout(timeoutHandle); + child.removeAllListeners(); + + if (options?.killChild) { + this.killChildProcessGroup(child.pid); + } + + callback(); + }; + + const timeoutHandle: NodeJS.Timeout = global.setTimeout(() => { + finish( + () => { + reject(new Error("Synthetic monitor process timed out")); + }, + { + killChild: true, + }, + ); + }, timeoutInMS); + + child.once("error", (error: Error) => { + finish( + () => { + reject(error); + }, + { + killChild: true, + }, + ); + }); + + child.once( + "message", + (message: SyntheticMonitorExecutionChildMessage) => { + if (message.type === "success") { + finish(() => { + resolve(message.payload); + }); + + return; + } + + finish( + () => { + reject( + new Error( + message.error.stack + ? `${message.error.message}\n${message.error.stack}` + : message.error.message, + ), + ); + }, + { + killChild: true, + }, + ); + }, + ); + + child.once( + "exit", + (code: number | null, signal: NodeJS.Signals | null) => { + if (settled) { + return; + } + + finish(() => { + reject( + new Error( + `Synthetic runner child exited before responding (code: ${ + code === null ? "null" : code + }, signal: ${signal || "none"})`, + ), + ); + }); + }, + ); + + child.send(request); + }, + ); + } + + private static getProcessTimeoutInMS( + request: SyntheticMonitorExecutionRequest, + ): number { + const browserCount: number = request.browserTypes?.length || 0; + const screenSizeCount: number = request.screenSizeTypes?.length || 0; + const combinationCount: number = + browserCount > 0 && screenSizeCount > 0 + ? browserCount * screenSizeCount + : 1; + const attemptCount: number = (request.retryCountOnError || 0) + 1; + const perAttemptTimeoutInMS: number = + SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS + + SYNTHETIC_MONITOR_ATTEMPT_PADDING_IN_MS; + + return ( + combinationCount * + (attemptCount * perAttemptTimeoutInMS + + (attemptCount - 1) * SYNTHETIC_MONITOR_RETRY_DELAY_IN_MS) + + 5000 + ); + } + + private static buildChildEnv(): NodeJS.ProcessEnv { + const env: Record = { + HOME: SYNTHETIC_MONITOR_CHILD_HOME_DIR, + XDG_CACHE_HOME: SYNTHETIC_MONITOR_CHILD_HOME_DIR, + XDG_CONFIG_HOME: SYNTHETIC_MONITOR_CHILD_HOME_DIR, + XDG_DATA_HOME: SYNTHETIC_MONITOR_CHILD_HOME_DIR, + TMPDIR: "/tmp", + TMP: "/tmp", + TEMP: "/tmp", + PATH: process.env["PATH"] || "", + NODE_ENV: process.env["NODE_ENV"] || "production", + NODE_OPTIONS: process.env["NODE_OPTIONS"], + NODE_EXTRA_CA_CERTS: process.env["NODE_EXTRA_CA_CERTS"], + SSL_CERT_FILE: process.env["SSL_CERT_FILE"], + SSL_CERT_DIR: process.env["SSL_CERT_DIR"], + PLAYWRIGHT_BROWSERS_PATH: + process.env["PLAYWRIGHT_BROWSERS_PATH"] || "/ms-playwright-browsers", + HTTP_PROXY_URL: process.env["HTTP_PROXY_URL"], + HTTPS_PROXY_URL: process.env["HTTPS_PROXY_URL"], + NO_PROXY: process.env["NO_PROXY"], + http_proxy: process.env["http_proxy"], + https_proxy: process.env["https_proxy"], + no_proxy: process.env["no_proxy"], + TZ: process.env["TZ"], + LANG: process.env["LANG"], + LANGUAGE: process.env["LANGUAGE"], + LC_ALL: process.env["LC_ALL"], + }; + + return Object.fromEntries( + Object.entries(env).filter( + (entry: [string, string | undefined]): entry is [string, string] => { + return typeof entry[1] === "string"; + }, + ), + ); + } + + private static ensureChildHomeDirectory(): void { + if (!fs.existsSync(SYNTHETIC_MONITOR_CHILD_HOME_DIR)) { + fs.mkdirSync(SYNTHETIC_MONITOR_CHILD_HOME_DIR, { + recursive: true, + mode: 0o755, + }); + } + + if ( + typeof process.getuid === "function" && + process.getuid() === 0 && + typeof process.getgid === "function" + ) { + fs.chownSync( + SYNTHETIC_MONITOR_CHILD_HOME_DIR, + SYNTHETIC_MONITOR_CHILD_USER_ID, + SYNTHETIC_MONITOR_CHILD_GROUP_ID, + ); + } + } + + private static killChildProcessGroup(pid?: number): void { + if (!pid) { + return; + } + + try { + process.kill(-pid, "SIGKILL"); + } catch { + try { + process.kill(pid, "SIGKILL"); + } catch { + return; + } + } + } +} diff --git a/Probe/SyntheticRunner/Index.ts b/Probe/SyntheticRunner/Index.ts new file mode 100644 index 0000000000..c6dff92f7b --- /dev/null +++ b/Probe/SyntheticRunner/Index.ts @@ -0,0 +1,47 @@ +import { PORT, SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS } from "./Config"; +import SyntheticMonitorAPI from "./API/SyntheticMonitor"; +import { PromiseVoidFunction } from "Common/Types/FunctionTypes"; +import logger from "Common/Server/Utils/Logger"; +import App from "Common/Server/Utils/StartServer"; +import Telemetry from "Common/Server/Utils/Telemetry"; +import Express, { ExpressApplication } from "Common/Server/Utils/Express"; +import "ejs"; + +const APP_NAME: string = "synthetic-runner"; + +const init: PromiseVoidFunction = async (): Promise => { + try { + Telemetry.init({ + serviceName: APP_NAME, + }); + + logger.info( + `Synthetic Runner Service - Script timeout: ${SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS}ms`, + ); + + await App.init({ + appName: APP_NAME, + port: PORT, + isFrontendApp: false, + statusOptions: { + liveCheck: async () => {}, + readyCheck: async () => {}, + }, + }); + + const app: ExpressApplication = Express.getExpressApp(); + app.use("/synthetic-monitor", SyntheticMonitorAPI); + + await App.addDefaultRoutes(); + } catch (err: unknown) { + logger.error("Synthetic runner init failed:"); + logger.error(err); + throw err; + } +}; + +init().catch((err: Error) => { + logger.error(err); + logger.error("Exiting node process"); + process.exit(1); +}); diff --git a/Probe/SyntheticRunner/Types/SyntheticMonitorExecution.ts b/Probe/SyntheticRunner/Types/SyntheticMonitorExecution.ts new file mode 100644 index 0000000000..74a32f6690 --- /dev/null +++ b/Probe/SyntheticRunner/Types/SyntheticMonitorExecution.ts @@ -0,0 +1,32 @@ +import BrowserType from "Common/Types/Monitor/SyntheticMonitors/BrowserType"; +import ScreenSizeType from "Common/Types/Monitor/SyntheticMonitors/ScreenSizeType"; +import SyntheticMonitorResponse from "Common/Types/Monitor/SyntheticMonitors/SyntheticMonitorResponse"; + +export interface SyntheticMonitorExecutionRequest { + monitorId?: string | undefined; + screenSizeTypes?: Array | undefined; + browserTypes?: Array | undefined; + script: string; + retryCountOnError?: number | undefined; +} + +export interface SyntheticMonitorExecutionResponse { + results: Array; +} + +export interface SyntheticMonitorExecutionChildSuccessMessage { + type: "success"; + payload: SyntheticMonitorExecutionResponse; +} + +export interface SyntheticMonitorExecutionChildErrorMessage { + type: "error"; + error: { + message: string; + stack?: string | undefined; + }; +} + +export type SyntheticMonitorExecutionChildMessage = + | SyntheticMonitorExecutionChildSuccessMessage + | SyntheticMonitorExecutionChildErrorMessage; diff --git a/Probe/Utils/Monitors/MonitorTypes/SyntheticMonitor.ts b/Probe/Utils/Monitors/MonitorTypes/SyntheticMonitor.ts index dc96cf17fa..3ca530d0cc 100644 --- a/Probe/Utils/Monitors/MonitorTypes/SyntheticMonitor.ts +++ b/Probe/Utils/Monitors/MonitorTypes/SyntheticMonitor.ts @@ -1,16 +1,20 @@ -import { PROBE_SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS } from "../../../Config"; -import ProxyConfig from "../../ProxyConfig"; -import BadDataException from "Common/Types/Exception/BadDataException"; -import ReturnResult from "Common/Types/IsolatedVM/ReturnResult"; +import { + PROBE_SYNTHETIC_RUNNER_URL, + PROBE_SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS, +} from "../../../Config"; +import ClusterKeyAuthorization from "Common/Server/Middleware/ClusterKeyAuthorization"; +import HTTPErrorResponse from "Common/Types/API/HTTPErrorResponse"; +import HTTPMethod from "Common/Types/API/HTTPMethod"; +import HTTPResponse from "Common/Types/API/HTTPResponse"; import BrowserType from "Common/Types/Monitor/SyntheticMonitors/BrowserType"; import ScreenSizeType from "Common/Types/Monitor/SyntheticMonitors/ScreenSizeType"; import SyntheticMonitorResponse from "Common/Types/Monitor/SyntheticMonitors/SyntheticMonitorResponse"; import ObjectID from "Common/Types/ObjectID"; import logger from "Common/Server/Utils/Logger"; -import VMRunner from "Common/Server/Utils/VM/VMRunner"; -import { Browser, BrowserContext, Page, chromium, firefox } from "playwright"; -import LocalFile from "Common/Server/Utils/LocalFile"; -import os from "os"; +import URL from "Common/Types/API/URL"; +import { JSONObject } from "Common/Types/JSON"; +import API from "Common/Utils/API"; +import { SyntheticMonitorExecutionRequest } from "../../../SyntheticRunner/Types/SyntheticMonitorExecution"; export interface SyntheticMonitorOptions { monitorId?: ObjectID | undefined; @@ -20,503 +24,101 @@ export interface SyntheticMonitorOptions { retryCountOnError?: number | undefined; } -interface BrowserLaunchOptions { - executablePath?: string; - proxy?: { - server: string; - username?: string; - password?: string; - bypass?: string; - }; - args?: string[]; - headless?: boolean; - devtools?: boolean; - timeout?: number; -} - -interface BrowserSession { - browser: Browser; - context: BrowserContext; - page: Page; -} - export default class SyntheticMonitor { public static async execute( options: SyntheticMonitorOptions, ): Promise | null> { + const request: SyntheticMonitorExecutionRequest = { + monitorId: options.monitorId?.toString(), + screenSizeTypes: options.screenSizeTypes, + browserTypes: options.browserTypes, + script: options.script, + retryCountOnError: options.retryCountOnError, + }; + + try { + const result: HTTPResponse | HTTPErrorResponse = + await API.fetch({ + method: HTTPMethod.POST, + url: URL.fromString(PROBE_SYNTHETIC_RUNNER_URL.toString()).addRoute( + "/synthetic-monitor/run", + ), + data: request as unknown as JSONObject, + headers: ClusterKeyAuthorization.getClusterKeyHeaders(), + options: { + timeout: this.getRequestTimeoutInMS(options), + }, + }); + + if (result instanceof HTTPErrorResponse || result.isFailure()) { + const message: string = + result instanceof HTTPErrorResponse + ? result.message || "Synthetic runner request failed" + : `Synthetic runner request failed with status code ${result.statusCode}`; + + logger.error(message); + return this.buildFailureResults(options, message); + } + + const rawResults: unknown = result.data["results"]; + + if (!Array.isArray(rawResults)) { + const message: string = "Synthetic runner returned an invalid payload"; + + logger.error(message); + return this.buildFailureResults(options, message); + } + + return rawResults as Array; + } catch (err: unknown) { + logger.error(err); + + const message: string = + (err as Error)?.message || (err as Error)?.toString() || String(err); + + return this.buildFailureResults(options, message); + } + } + + private static buildFailureResults( + options: SyntheticMonitorOptions, + message: string, + ): Array { const results: Array = []; for (const browserType of options.browserTypes || []) { for (const screenSizeType of options.screenSizeTypes || []) { - logger.debug( - `Running Synthetic Monitor: ${options?.monitorId?.toString()}, Screen Size: ${screenSizeType}, Browser: ${browserType}`, - ); - - const result: SyntheticMonitorResponse | null = - await this.executeWithRetry({ - script: options.script, - browserType: browserType, - screenSizeType: screenSizeType, - retryCountOnError: options.retryCountOnError || 0, - }); - - if (result) { - result.browserType = browserType; - result.screenSizeType = screenSizeType; - results.push(result); - } + results.push({ + logMessages: [], + scriptError: message, + result: undefined, + screenshots: {}, + executionTimeInMS: 0, + browserType: browserType, + screenSizeType: screenSizeType, + }); } } return results; } - private static async executeWithRetry(options: { - script: string; - browserType: BrowserType; - screenSizeType: ScreenSizeType; - retryCountOnError: number; - currentRetry?: number; - }): Promise { - const currentRetry: number = options.currentRetry || 0; - const maxRetries: number = options.retryCountOnError; + private static getRequestTimeoutInMS( + options: SyntheticMonitorOptions, + ): number { + const browserCount: number = options.browserTypes?.length || 0; + const screenSizeCount: number = options.screenSizeTypes?.length || 0; + const combinationCount: number = + browserCount > 0 && screenSizeCount > 0 + ? browserCount * screenSizeCount + : 1; + const attemptCount: number = (options.retryCountOnError || 0) + 1; - const result: SyntheticMonitorResponse | null = - await this.executeByBrowserAndScreenSize({ - script: options.script, - browserType: options.browserType, - screenSizeType: options.screenSizeType, - }); - - // If there's an error and we haven't exceeded retry count, retry - if (result?.scriptError && currentRetry < maxRetries) { - logger.debug( - `Synthetic Monitor script error, retrying (${currentRetry + 1}/${maxRetries}): ${result.scriptError}`, - ); - - // Wait a bit before retrying - await new Promise((resolve: (value: void) => void) => { - setTimeout(resolve, 1000); - }); - - return this.executeWithRetry({ - script: options.script, - browserType: options.browserType, - screenSizeType: options.screenSizeType, - retryCountOnError: maxRetries, - currentRetry: currentRetry + 1, - }); - } - - return result; - } - - private static async executeByBrowserAndScreenSize(options: { - script: string; - browserType: BrowserType; - screenSizeType: ScreenSizeType; - }): Promise { - if (!options) { - // this should never happen - options = { - script: "", - browserType: BrowserType.Chromium, - screenSizeType: ScreenSizeType.Desktop, - }; - } - - const scriptResult: SyntheticMonitorResponse = { - logMessages: [], - scriptError: undefined, - result: undefined, - screenshots: {}, - executionTimeInMS: 0, - browserType: options.browserType, - screenSizeType: options.screenSizeType, - }; - - let browserSession: BrowserSession | null = null; - - try { - let result: ReturnResult | null = null; - - const startTime: [number, number] = process.hrtime(); - - browserSession = await SyntheticMonitor.getPageByBrowserType({ - browserType: options.browserType, - screenSizeType: options.screenSizeType, - }); - - if (!browserSession) { - throw new BadDataException( - "Could not create Playwright browser session", - ); - } - - /* - * Only expose `page` to the sandbox — never the `browser` object. - * Exposing `browser` allows RCE via browser.browserType().launch({executablePath:"/bin/sh"}). - */ - result = await VMRunner.runCodeInNodeVM({ - code: options.script, - options: { - timeout: PROBE_SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS, - args: {}, - context: { - page: browserSession.page, - screenSizeType: options.screenSizeType, - browserType: options.browserType, - }, - }, - }); - - const endTime: [number, number] = process.hrtime(startTime); - - const executionTimeInMS: number = Math.ceil( - (endTime[0] * 1000000000 + endTime[1]) / 1000000, - ); - - scriptResult.executionTimeInMS = executionTimeInMS; - - scriptResult.logMessages = result.logMessages; - - if (result.returnValue?.screenshots) { - if (!scriptResult.screenshots) { - scriptResult.screenshots = {}; - } - - for (const screenshotName in result.returnValue.screenshots) { - if (!result.returnValue.screenshots[screenshotName]) { - continue; - } - - // check if this is of type Buffer. If it is not, continue. - - if ( - !(result.returnValue.screenshots[screenshotName] instanceof Buffer) - ) { - continue; - } - - const screenshotBuffer: Buffer = result.returnValue.screenshots[ - screenshotName - ] as Buffer; - scriptResult.screenshots[screenshotName] = - screenshotBuffer.toString("base64"); // convert screenshots to base 64 - } - } - - scriptResult.result = result?.returnValue?.data; - } catch (err: unknown) { - logger.error(err); - scriptResult.scriptError = - (err as Error)?.message || (err as Error).toString(); - } finally { - // Always dispose browser session to prevent zombie processes - await SyntheticMonitor.disposeBrowserSession(browserSession); - } - - return scriptResult; - } - - private static getViewportHeightAndWidth(options: { - screenSizeType: ScreenSizeType; - }): { - height: number; - width: number; - } { - let viewPortHeight: number = 0; - let viewPortWidth: number = 0; - - switch (options.screenSizeType) { - case ScreenSizeType.Desktop: - viewPortHeight = 1080; - viewPortWidth = 1920; - break; - case ScreenSizeType.Mobile: - viewPortHeight = 640; - viewPortWidth = 360; - break; - case ScreenSizeType.Tablet: - viewPortHeight = 768; - viewPortWidth = 1024; - break; - default: - viewPortHeight = 1080; - viewPortWidth = 1920; - break; - } - - return { height: viewPortHeight, width: viewPortWidth }; - } - - private static getPlaywrightBrowsersPath(): string { return ( - process.env["PLAYWRIGHT_BROWSERS_PATH"] || - `${os.homedir()}/.cache/ms-playwright` + combinationCount * + (attemptCount * (PROBE_SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS + 30000) + + (attemptCount - 1) * 1000) + + 5000 ); } - - public static async getChromeExecutablePath(): Promise { - const browsersPath: string = this.getPlaywrightBrowsersPath(); - - const doesDirectoryExist: boolean = - await LocalFile.doesDirectoryExist(browsersPath); - if (!doesDirectoryExist) { - throw new BadDataException("Chrome executable path not found."); - } - - // get list of files in the directory - const directories: string[] = - await LocalFile.getListOfDirectories(browsersPath); - - if (directories.length === 0) { - throw new BadDataException("Chrome executable path not found."); - } - - const chromeInstallationName: string | undefined = directories.find( - (directory: string) => { - return directory.includes("chromium"); - }, - ); - - if (!chromeInstallationName) { - throw new BadDataException("Chrome executable path not found."); - } - - const chromeExecutableCandidates: Array = [ - `${browsersPath}/${chromeInstallationName}/chrome-linux/chrome`, - `${browsersPath}/${chromeInstallationName}/chrome-linux64/chrome`, - `${browsersPath}/${chromeInstallationName}/chrome64/chrome`, - `${browsersPath}/${chromeInstallationName}/chrome/chrome`, - ]; - - for (const executablePath of chromeExecutableCandidates) { - if (await LocalFile.doesFileExist(executablePath)) { - return executablePath; - } - } - - throw new BadDataException("Chrome executable path not found."); - } - - public static async getFirefoxExecutablePath(): Promise { - const browsersPath: string = this.getPlaywrightBrowsersPath(); - - const doesDirectoryExist: boolean = - await LocalFile.doesDirectoryExist(browsersPath); - if (!doesDirectoryExist) { - throw new BadDataException("Firefox executable path not found."); - } - - // get list of files in the directory - const directories: string[] = - await LocalFile.getListOfDirectories(browsersPath); - - if (directories.length === 0) { - throw new BadDataException("Firefox executable path not found."); - } - - const firefoxInstallationName: string | undefined = directories.find( - (directory: string) => { - return directory.includes("firefox"); - }, - ); - - if (!firefoxInstallationName) { - throw new BadDataException("Firefox executable path not found."); - } - - const firefoxExecutableCandidates: Array = [ - `${browsersPath}/${firefoxInstallationName}/firefox/firefox`, - `${browsersPath}/${firefoxInstallationName}/firefox-linux64/firefox`, - `${browsersPath}/${firefoxInstallationName}/firefox64/firefox`, - `${browsersPath}/${firefoxInstallationName}/firefox-64/firefox`, - ]; - - for (const executablePath of firefoxExecutableCandidates) { - if (await LocalFile.doesFileExist(executablePath)) { - return executablePath; - } - } - - throw new BadDataException("Firefox executable path not found."); - } - - private static async getPageByBrowserType(data: { - browserType: BrowserType; - screenSizeType: ScreenSizeType; - }): Promise { - const viewport: { - height: number; - width: number; - } = SyntheticMonitor.getViewportHeightAndWidth({ - screenSizeType: data.screenSizeType, - }); - - // Prepare browser launch options with proxy support - const baseOptions: BrowserLaunchOptions = {}; - - // Configure proxy if available - if (ProxyConfig.isProxyConfigured()) { - const httpsProxyUrl: string | null = ProxyConfig.getHttpsProxyUrl(); - const httpProxyUrl: string | null = ProxyConfig.getHttpProxyUrl(); - - // Prefer HTTPS proxy, fall back to HTTP proxy - const proxyUrl: string | null = httpsProxyUrl || httpProxyUrl; - - if (proxyUrl) { - baseOptions.proxy = { - server: proxyUrl, - }; - - // Extract username and password if present in proxy URL - try { - const parsedUrl: globalThis.URL = new URL(proxyUrl); - if (parsedUrl.username && parsedUrl.password) { - baseOptions.proxy.username = parsedUrl.username; - baseOptions.proxy.password = parsedUrl.password; - } - } catch (error) { - logger.warn(`Failed to parse proxy URL for authentication: ${error}`); - } - - logger.debug( - `Synthetic Monitor using proxy: ${proxyUrl} (HTTPS: ${Boolean(httpsProxyUrl)}, HTTP: ${Boolean(httpProxyUrl)})`, - ); - } - } - - if (data.browserType === BrowserType.Chromium) { - const browser: Browser = await chromium.launch({ - executablePath: await this.getChromeExecutablePath(), - ...baseOptions, - }); - - const context: BrowserContext = await browser.newContext({ - viewport: { - width: viewport.width, - height: viewport.height, - }, - }); - - const page: Page = await context.newPage(); - - return { - browser, - context, - page, - }; - } - - if (data.browserType === BrowserType.Firefox) { - const browser: Browser = await firefox.launch({ - executablePath: await this.getFirefoxExecutablePath(), - ...baseOptions, - }); - - let context: BrowserContext | null = null; - - try { - context = await browser.newContext({ - viewport: { - width: viewport.width, - height: viewport.height, - }, - }); - - const page: Page = await context.newPage(); - - return { - browser, - context, - page, - }; - } catch (error) { - await SyntheticMonitor.safeCloseBrowserContext(context); - await SyntheticMonitor.safeCloseBrowser(browser); - throw error; - } - } - - throw new BadDataException("Invalid Browser Type."); - } - - private static async disposeBrowserSession( - session: BrowserSession | null, - ): Promise { - if (!session) { - return; - } - - await SyntheticMonitor.safeClosePage(session.page); - await SyntheticMonitor.safeCloseBrowserContexts({ - browser: session.browser, - }); - await SyntheticMonitor.safeCloseBrowser(session.browser); - } - - private static async safeClosePage(page?: Page | null): Promise { - if (!page) { - return; - } - - try { - if (!page.isClosed()) { - await page.close(); - } - } catch (error) { - logger.warn( - `Failed to close Playwright page: ${(error as Error)?.message || error}`, - ); - } - } - - private static async safeCloseBrowserContext( - context?: BrowserContext | null, - ): Promise { - if (!context) { - return; - } - - try { - await context.close(); - } catch (error) { - logger.warn( - `Failed to close Playwright browser context: ${(error as Error)?.message || error}`, - ); - } - } - - private static async safeCloseBrowser( - browser?: Browser | null, - ): Promise { - if (!browser) { - return; - } - - try { - if (browser.isConnected()) { - await browser.close(); - } - } catch (error) { - logger.warn( - `Failed to close Playwright browser: ${(error as Error)?.message || error}`, - ); - } - } - - private static async safeCloseBrowserContexts(data: { - browser: Browser; - }): Promise { - if (!data.browser || !data.browser.contexts) { - return; - } - - const contexts: Array = data.browser.contexts(); - - for (const context of contexts) { - await SyntheticMonitor.safeCloseBrowserContext(context); - } - } } diff --git a/Probe/package.json b/Probe/package.json index ec2f4d46fd..eba485d05e 100644 --- a/Probe/package.json +++ b/Probe/package.json @@ -9,6 +9,7 @@ "main": "index.js", "scripts": { "start": "export NODE_OPTIONS='--max-old-space-size=8096' && node --require ts-node/register Index.ts", + "start:synthetic-runner": "export NODE_OPTIONS='--max-old-space-size=8096' && node --require ts-node/register SyntheticRunner/Index.ts", "compile": "tsc", "clear-modules": "rm -rf node_modules && rm package-lock.json && npm install", "dev": "npx nodemon", diff --git a/docker-compose.base.yml b/docker-compose.base.yml index 192cb84808..2d5c21b5b8 100644 --- a/docker-compose.base.yml +++ b/docker-compose.base.yml @@ -1,410 +1,450 @@ - x-common-variables: &common-variables - HOST: ${HOST} - PROVISION_SSL: ${PROVISION_SSL} + HOST: ${HOST} + PROVISION_SSL: ${PROVISION_SSL} - HTTP_PROTOCOL: ${HTTP_PROTOCOL} + HTTP_PROTOCOL: ${HTTP_PROTOCOL} - STATUS_PAGE_CNAME_RECORD: ${STATUS_PAGE_CNAME_RECORD} + STATUS_PAGE_CNAME_RECORD: ${STATUS_PAGE_CNAME_RECORD} - LOG_LEVEL: ${LOG_LEVEL} + LOG_LEVEL: ${LOG_LEVEL} - NODE_ENV: ${ENVIRONMENT} - BILLING_ENABLED: ${BILLING_ENABLED} - IS_ENTERPRISE_EDITION: ${IS_ENTERPRISE_EDITION} - BILLING_PUBLIC_KEY: ${BILLING_PUBLIC_KEY} - SUBSCRIPTION_PLAN_BASIC: ${SUBSCRIPTION_PLAN_BASIC} - SUBSCRIPTION_PLAN_GROWTH: ${SUBSCRIPTION_PLAN_GROWTH} - SUBSCRIPTION_PLAN_SCALE: ${SUBSCRIPTION_PLAN_SCALE} - SUBSCRIPTION_PLAN_ENTERPRISE: ${SUBSCRIPTION_PLAN_ENTERPRISE} - ANALYTICS_KEY: ${ANALYTICS_KEY} - ANALYTICS_HOST: ${ANALYTICS_HOST} - CAPTCHA_ENABLED: ${CAPTCHA_ENABLED} - CAPTCHA_SITE_KEY: ${CAPTCHA_SITE_KEY} + NODE_ENV: ${ENVIRONMENT} + BILLING_ENABLED: ${BILLING_ENABLED} + IS_ENTERPRISE_EDITION: ${IS_ENTERPRISE_EDITION} + BILLING_PUBLIC_KEY: ${BILLING_PUBLIC_KEY} + SUBSCRIPTION_PLAN_BASIC: ${SUBSCRIPTION_PLAN_BASIC} + SUBSCRIPTION_PLAN_GROWTH: ${SUBSCRIPTION_PLAN_GROWTH} + SUBSCRIPTION_PLAN_SCALE: ${SUBSCRIPTION_PLAN_SCALE} + SUBSCRIPTION_PLAN_ENTERPRISE: ${SUBSCRIPTION_PLAN_ENTERPRISE} + ANALYTICS_KEY: ${ANALYTICS_KEY} + ANALYTICS_HOST: ${ANALYTICS_HOST} + CAPTCHA_ENABLED: ${CAPTCHA_ENABLED} + CAPTCHA_SITE_KEY: ${CAPTCHA_SITE_KEY} - # VAPID keys for Web Push Notifications - VAPID_PUBLIC_KEY: ${VAPID_PUBLIC_KEY} - VAPID_SUBJECT: ${VAPID_SUBJECT} + # VAPID keys for Web Push Notifications + VAPID_PUBLIC_KEY: ${VAPID_PUBLIC_KEY} + VAPID_SUBJECT: ${VAPID_SUBJECT} - ALLOWED_ACTIVE_MONITOR_COUNT_IN_FREE_PLAN: ${ALLOWED_ACTIVE_MONITOR_COUNT_IN_FREE_PLAN} + ALLOWED_ACTIVE_MONITOR_COUNT_IN_FREE_PLAN: ${ALLOWED_ACTIVE_MONITOR_COUNT_IN_FREE_PLAN} - SERVER_APP_HOSTNAME: app - SERVER_TELEMETRY_HOSTNAME: telemetry - SERVER_WORKER_HOSTNAME: worker - SERVER_HOME_HOSTNAME: home - #Ports. Usually they don't need to change. - APP_PORT: ${APP_PORT} - HOME_PORT: ${HOME_PORT} - TELEMETRY_PORT: ${TELEMETRY_PORT} - WORKER_PORT: ${WORKER_PORT} - OPENTELEMETRY_EXPORTER_OTLP_ENDPOINT: ${OPENTELEMETRY_EXPORTER_OTLP_ENDPOINT} - OPENTELEMETRY_EXPORTER_OTLP_HEADERS: ${OPENTELEMETRY_EXPORTER_OTLP_HEADERS} + SERVER_APP_HOSTNAME: app + SERVER_TELEMETRY_HOSTNAME: telemetry + SERVER_WORKER_HOSTNAME: worker + SERVER_HOME_HOSTNAME: home + #Ports. Usually they don't need to change. + APP_PORT: ${APP_PORT} + HOME_PORT: ${HOME_PORT} + TELEMETRY_PORT: ${TELEMETRY_PORT} + WORKER_PORT: ${WORKER_PORT} + OPENTELEMETRY_EXPORTER_OTLP_ENDPOINT: ${OPENTELEMETRY_EXPORTER_OTLP_ENDPOINT} + OPENTELEMETRY_EXPORTER_OTLP_HEADERS: ${OPENTELEMETRY_EXPORTER_OTLP_HEADERS} - SLACK_APP_CLIENT_ID: ${SLACK_APP_CLIENT_ID} + SLACK_APP_CLIENT_ID: ${SLACK_APP_CLIENT_ID} - - MICROSOFT_TEAMS_APP_CLIENT_ID: ${MICROSOFT_TEAMS_APP_CLIENT_ID} - MICROSOFT_TEAMS_APP_TENANT_ID: ${MICROSOFT_TEAMS_APP_TENANT_ID} + MICROSOFT_TEAMS_APP_CLIENT_ID: ${MICROSOFT_TEAMS_APP_CLIENT_ID} + MICROSOFT_TEAMS_APP_TENANT_ID: ${MICROSOFT_TEAMS_APP_TENANT_ID} x-common-runtime-variables: &common-runtime-variables - <<: *common-variables - ONEUPTIME_SECRET: ${ONEUPTIME_SECRET} + <<: *common-variables + ONEUPTIME_SECRET: ${ONEUPTIME_SECRET} - VAPID_PRIVATE_KEY: ${VAPID_PRIVATE_KEY} + VAPID_PRIVATE_KEY: ${VAPID_PRIVATE_KEY} - EXPO_ACCESS_TOKEN: ${EXPO_ACCESS_TOKEN} - PUSH_NOTIFICATION_RELAY_URL: ${PUSH_NOTIFICATION_RELAY_URL} + EXPO_ACCESS_TOKEN: ${EXPO_ACCESS_TOKEN} + PUSH_NOTIFICATION_RELAY_URL: ${PUSH_NOTIFICATION_RELAY_URL} - DATABASE_PORT: ${DATABASE_PORT} - DATABASE_USERNAME: ${DATABASE_USERNAME} - DATABASE_PASSWORD: ${DATABASE_PASSWORD} - DATABASE_NAME: ${DATABASE_NAME} - DATABASE_HOST: ${DATABASE_HOST} - DATABASE_SSL_CA: ${DATABASE_SSL_CA} - DATABASE_SSL_KEY: ${DATABASE_SSL_KEY} - DATABASE_SSL_CERT: ${DATABASE_SSL_CERT} - DATABASE_SSL_REJECT_UNAUTHORIZED: ${DATABASE_SSL_REJECT_UNAUTHORIZED} - LETS_ENCRYPT_NOTIFICATION_EMAIL: ${LETS_ENCRYPT_NOTIFICATION_EMAIL} - LETS_ENCRYPT_ACCOUNT_KEY: ${LETS_ENCRYPT_ACCOUNT_KEY} + DATABASE_PORT: ${DATABASE_PORT} + DATABASE_USERNAME: ${DATABASE_USERNAME} + DATABASE_PASSWORD: ${DATABASE_PASSWORD} + DATABASE_NAME: ${DATABASE_NAME} + DATABASE_HOST: ${DATABASE_HOST} + DATABASE_SSL_CA: ${DATABASE_SSL_CA} + DATABASE_SSL_KEY: ${DATABASE_SSL_KEY} + DATABASE_SSL_CERT: ${DATABASE_SSL_CERT} + DATABASE_SSL_REJECT_UNAUTHORIZED: ${DATABASE_SSL_REJECT_UNAUTHORIZED} + LETS_ENCRYPT_NOTIFICATION_EMAIL: ${LETS_ENCRYPT_NOTIFICATION_EMAIL} + LETS_ENCRYPT_ACCOUNT_KEY: ${LETS_ENCRYPT_ACCOUNT_KEY} - REDIS_USERNAME: ${REDIS_USERNAME} - REDIS_PASSWORD: ${REDIS_PASSWORD} - REDIS_HOST: ${REDIS_HOST} - REDIS_PORT: ${REDIS_PORT} - REDIS_DB: ${REDIS_DB} - REDIS_IP_FAMILY: ${REDIS_IP_FAMILY} - REDIS_TLS_CA: ${REDIS_TLS_CA} - REDIS_TLS_SENTINEL_MODE: ${REDIS_TLS_SENTINEL_MODE} + REDIS_USERNAME: ${REDIS_USERNAME} + REDIS_PASSWORD: ${REDIS_PASSWORD} + REDIS_HOST: ${REDIS_HOST} + REDIS_PORT: ${REDIS_PORT} + REDIS_DB: ${REDIS_DB} + REDIS_IP_FAMILY: ${REDIS_IP_FAMILY} + REDIS_TLS_CA: ${REDIS_TLS_CA} + REDIS_TLS_SENTINEL_MODE: ${REDIS_TLS_SENTINEL_MODE} - ENCRYPTION_SECRET: ${ENCRYPTION_SECRET} + ENCRYPTION_SECRET: ${ENCRYPTION_SECRET} - BILLING_PRIVATE_KEY: ${BILLING_PRIVATE_KEY} - BILLING_PUBLIC_KEY: ${BILLING_PUBLIC_KEY} - BILLING_ENABLED: ${BILLING_ENABLED} - BILLING_WEBHOOK_SECRET: ${BILLING_WEBHOOK_SECRET} + BILLING_PRIVATE_KEY: ${BILLING_PRIVATE_KEY} + BILLING_PUBLIC_KEY: ${BILLING_PUBLIC_KEY} + BILLING_ENABLED: ${BILLING_ENABLED} + BILLING_WEBHOOK_SECRET: ${BILLING_WEBHOOK_SECRET} - CLICKHOUSE_USER: ${CLICKHOUSE_USER} - CLICKHOUSE_PASSWORD: ${CLICKHOUSE_PASSWORD} - CLICKHOUSE_DATABASE: ${CLICKHOUSE_DATABASE} - CLICKHOUSE_HOST: ${CLICKHOUSE_HOST} - CLICKHOUSE_PORT: ${CLICKHOUSE_PORT} + CLICKHOUSE_USER: ${CLICKHOUSE_USER} + CLICKHOUSE_PASSWORD: ${CLICKHOUSE_PASSWORD} + CLICKHOUSE_DATABASE: ${CLICKHOUSE_DATABASE} + CLICKHOUSE_HOST: ${CLICKHOUSE_HOST} + CLICKHOUSE_PORT: ${CLICKHOUSE_PORT} - AVERAGE_SPAN_ROW_SIZE_IN_BYTES: ${AVERAGE_SPAN_ROW_SIZE_IN_BYTES} - AVERAGE_LOG_ROW_SIZE_IN_BYTES: ${AVERAGE_LOG_ROW_SIZE_IN_BYTES} - AVERAGE_METRIC_ROW_SIZE_IN_BYTES: ${AVERAGE_METRIC_ROW_SIZE_IN_BYTES} - AVERAGE_EXCEPTION_ROW_SIZE_IN_BYTES: ${AVERAGE_EXCEPTION_ROW_SIZE_IN_BYTES} + AVERAGE_SPAN_ROW_SIZE_IN_BYTES: ${AVERAGE_SPAN_ROW_SIZE_IN_BYTES} + AVERAGE_LOG_ROW_SIZE_IN_BYTES: ${AVERAGE_LOG_ROW_SIZE_IN_BYTES} + AVERAGE_METRIC_ROW_SIZE_IN_BYTES: ${AVERAGE_METRIC_ROW_SIZE_IN_BYTES} + AVERAGE_EXCEPTION_ROW_SIZE_IN_BYTES: ${AVERAGE_EXCEPTION_ROW_SIZE_IN_BYTES} - WORKFLOW_SCRIPT_TIMEOUT_IN_MS: ${WORKFLOW_SCRIPT_TIMEOUT_IN_MS} - WORKFLOW_TIMEOUT_IN_MS: ${WORKFLOW_TIMEOUT_IN_MS} + WORKFLOW_SCRIPT_TIMEOUT_IN_MS: ${WORKFLOW_SCRIPT_TIMEOUT_IN_MS} + WORKFLOW_TIMEOUT_IN_MS: ${WORKFLOW_TIMEOUT_IN_MS} - DISABLE_AUTOMATIC_INCIDENT_CREATION: ${DISABLE_AUTOMATIC_INCIDENT_CREATION} + DISABLE_AUTOMATIC_INCIDENT_CREATION: ${DISABLE_AUTOMATIC_INCIDENT_CREATION} - DISABLE_AUTOMATIC_ALERT_CREATION: ${DISABLE_AUTOMATIC_ALERT_CREATION} + DISABLE_AUTOMATIC_ALERT_CREATION: ${DISABLE_AUTOMATIC_ALERT_CREATION} - # Open Source Deployment Webhook - OPEN_SOURCE_DEPLOYMENT_WEBHOOK_URL: ${OPEN_SOURCE_DEPLOYMENT_WEBHOOK_URL} + # Open Source Deployment Webhook + OPEN_SOURCE_DEPLOYMENT_WEBHOOK_URL: ${OPEN_SOURCE_DEPLOYMENT_WEBHOOK_URL} - # Notification Webhooks - NOTIFICATION_SLACK_WEBHOOK_ON_CREATED_USER: ${NOTIFICATION_SLACK_WEBHOOK_ON_CREATED_USER} - NOTIFICATION_SLACK_WEBHOOK_ON_CREATED_PROJECT: ${NOTIFICATION_SLACK_WEBHOOK_ON_CREATED_PROJECT} - NOTIFICATION_SLACK_WEBHOOK_ON_DELETED_PROJECT: ${NOTIFICATION_SLACK_WEBHOOK_ON_DELETED_PROJECT} - NOTIFICATION_SLACK_WEBHOOK_ON_SUBSCRIPTION_UPDATE: ${NOTIFICATION_SLACK_WEBHOOK_ON_SUBSCRIPTION_UPDATE} + # Notification Webhooks + NOTIFICATION_SLACK_WEBHOOK_ON_CREATED_USER: ${NOTIFICATION_SLACK_WEBHOOK_ON_CREATED_USER} + NOTIFICATION_SLACK_WEBHOOK_ON_CREATED_PROJECT: ${NOTIFICATION_SLACK_WEBHOOK_ON_CREATED_PROJECT} + NOTIFICATION_SLACK_WEBHOOK_ON_DELETED_PROJECT: ${NOTIFICATION_SLACK_WEBHOOK_ON_DELETED_PROJECT} + NOTIFICATION_SLACK_WEBHOOK_ON_SUBSCRIPTION_UPDATE: ${NOTIFICATION_SLACK_WEBHOOK_ON_SUBSCRIPTION_UPDATE} - SLACK_APP_CLIENT_SECRET: ${SLACK_APP_CLIENT_SECRET} - SLACK_APP_SIGNING_SECRET: ${SLACK_APP_SIGNING_SECRET} + SLACK_APP_CLIENT_SECRET: ${SLACK_APP_CLIENT_SECRET} + SLACK_APP_SIGNING_SECRET: ${SLACK_APP_SIGNING_SECRET} - # Microsoft Teams Configuration + # Microsoft Teams Configuration - MICROSOFT_TEAMS_APP_CLIENT_SECRET: ${MICROSOFT_TEAMS_APP_CLIENT_SECRET} - CAPTCHA_SECRET_KEY: ${CAPTCHA_SECRET_KEY} + MICROSOFT_TEAMS_APP_CLIENT_SECRET: ${MICROSOFT_TEAMS_APP_CLIENT_SECRET} + CAPTCHA_SECRET_KEY: ${CAPTCHA_SECRET_KEY} - # GitHub App Configuration - GITHUB_APP_ID: ${GITHUB_APP_ID} - GITHUB_APP_NAME: ${GITHUB_APP_NAME} - GITHUB_APP_CLIENT_ID: ${GITHUB_APP_CLIENT_ID} - GITHUB_APP_CLIENT_SECRET: ${GITHUB_APP_CLIENT_SECRET} - GITHUB_APP_PRIVATE_KEY: ${GITHUB_APP_PRIVATE_KEY} - GITHUB_APP_WEBHOOK_SECRET: ${GITHUB_APP_WEBHOOK_SECRET} + # GitHub App Configuration + GITHUB_APP_ID: ${GITHUB_APP_ID} + GITHUB_APP_NAME: ${GITHUB_APP_NAME} + GITHUB_APP_CLIENT_ID: ${GITHUB_APP_CLIENT_ID} + GITHUB_APP_CLIENT_SECRET: ${GITHUB_APP_CLIENT_SECRET} + GITHUB_APP_PRIVATE_KEY: ${GITHUB_APP_PRIVATE_KEY} + GITHUB_APP_WEBHOOK_SECRET: ${GITHUB_APP_WEBHOOK_SECRET} - # Inbound Email Configuration (for Incoming Email Monitor) - INBOUND_EMAIL_PROVIDER: ${INBOUND_EMAIL_PROVIDER} - INBOUND_EMAIL_DOMAIN: ${INBOUND_EMAIL_DOMAIN} - INBOUND_EMAIL_WEBHOOK_SECRET: ${INBOUND_EMAIL_WEBHOOK_SECRET} + # Inbound Email Configuration (for Incoming Email Monitor) + INBOUND_EMAIL_PROVIDER: ${INBOUND_EMAIL_PROVIDER} + INBOUND_EMAIL_DOMAIN: ${INBOUND_EMAIL_DOMAIN} + INBOUND_EMAIL_WEBHOOK_SECRET: ${INBOUND_EMAIL_WEBHOOK_SECRET} services: + redis: + image: redis:7.0.12 + restart: always + networks: + - oneuptime + command: redis-server --requirepass "${REDIS_PASSWORD}" --save "" --appendonly no + environment: + REDIS_PASSWORD: ${REDIS_PASSWORD} + healthcheck: + test: ["CMD", "redis-cli", "-a", "${REDIS_PASSWORD}", "ping"] + interval: 10s + timeout: 5s + retries: 5 + start_period: 10s + logging: + driver: "local" + options: + max-size: "1000m" - - redis: - image: redis:7.0.12 - restart: always - networks: - - oneuptime - command: redis-server --requirepass "${REDIS_PASSWORD}" --save "" --appendonly no - environment: - REDIS_PASSWORD: ${REDIS_PASSWORD} - healthcheck: - test: ["CMD", "redis-cli", "-a", "${REDIS_PASSWORD}", "ping"] - interval: 10s - timeout: 5s - retries: 5 - start_period: 10s - logging: - driver: "local" - options: - max-size: "1000m" + clickhouse: + image: clickhouse/clickhouse-server:25.7 + restart: always + environment: + CLICKHOUSE_USER: ${CLICKHOUSE_USER} + CLICKHOUSE_PASSWORD: ${CLICKHOUSE_PASSWORD} + CLICKHOUSE_DB: ${CLICKHOUSE_DATABASE} + CLICKHOUSE_DEFAULT_ACCESS_MANAGEMENT: 1 + networks: + - oneuptime + volumes: + - clickhouse:/var/lib/clickhouse/ + healthcheck: + test: ["CMD-SHELL", "clickhouse-client --query 'SELECT 1'"] + interval: 10s + timeout: 5s + retries: 5 + start_period: 15s + logging: + driver: "local" + options: + max-size: "1000m" - clickhouse: - image: clickhouse/clickhouse-server:25.7 - restart: always - environment: - CLICKHOUSE_USER: ${CLICKHOUSE_USER} - CLICKHOUSE_PASSWORD: ${CLICKHOUSE_PASSWORD} - CLICKHOUSE_DB: ${CLICKHOUSE_DATABASE} - CLICKHOUSE_DEFAULT_ACCESS_MANAGEMENT: 1 - networks: - - oneuptime - volumes: - - clickhouse:/var/lib/clickhouse/ - healthcheck: - test: ["CMD-SHELL", "clickhouse-client --query 'SELECT 1'"] - interval: 10s - timeout: 5s - retries: 5 - start_period: 15s - logging: - driver: "local" - options: - max-size: "1000m" + postgres: + image: postgres:15 + restart: always + environment: + POSTGRES_USER: ${DATABASE_USERNAME} + POSTGRES_PASSWORD: ${DATABASE_PASSWORD} + POSTGRES_DB: ${DATABASE_NAME} + networks: + - oneuptime + volumes: + - postgres:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready"] + interval: 10s + timeout: 5s + retries: 5 + start_period: 15s + logging: + driver: "local" + options: + max-size: "1000m" - postgres: - image: postgres:15 - restart: always - environment: - POSTGRES_USER: ${DATABASE_USERNAME} - POSTGRES_PASSWORD: ${DATABASE_PASSWORD} - POSTGRES_DB: ${DATABASE_NAME} - networks: - - oneuptime - volumes: - - postgres:/var/lib/postgresql/data - healthcheck: - test: ["CMD-SHELL", "pg_isready"] - interval: 10s - timeout: 5s - retries: 5 - start_period: 15s - logging: - driver: "local" - options: - max-size: "1000m" - - llm: - networks: - - oneuptime - restart: always - environment: - <<: *common-runtime-variables - PORT: 8547 - HF_TOKEN: ${LLM_SERVER_HUGGINGFACE_TOKEN} - HF_MODEL_NAME: ${LLM_SERVER_HUGGINGFACE_MODEL_NAME} - volumes: - - ./LLM/Models:/app/Models - logging: - driver: "local" - options: - max-size: "1000m" + llm: + networks: + - oneuptime + restart: always + environment: + <<: *common-runtime-variables + PORT: 8547 + HF_TOKEN: ${LLM_SERVER_HUGGINGFACE_TOKEN} + HF_MODEL_NAME: ${LLM_SERVER_HUGGINGFACE_MODEL_NAME} + volumes: + - ./LLM/Models:/app/Models + logging: + driver: "local" + options: + max-size: "1000m" - test-server: - networks: - - oneuptime - restart: always - environment: - <<: *common-runtime-variables - PORT: ${TEST_SERVER_PORT} - DISABLE_TELEMETRY: ${DISABLE_TELEMETRY_FOR_TEST_SERVER} - logging: - driver: "local" - options: - max-size: "1000m" - + test-server: + networks: + - oneuptime + restart: always + environment: + <<: *common-runtime-variables + PORT: ${TEST_SERVER_PORT} + DISABLE_TELEMETRY: ${DISABLE_TELEMETRY_FOR_TEST_SERVER} + logging: + driver: "local" + options: + max-size: "1000m" - app: - networks: - - oneuptime - restart: always - environment: - <<: *common-runtime-variables - PORT: ${APP_PORT} - SMS_DEFAULT_COST_IN_CENTS: ${SMS_DEFAULT_COST_IN_CENTS} - WHATSAPP_TEXT_DEFAULT_COST_IN_CENTS: ${WHATSAPP_TEXT_DEFAULT_COST_IN_CENTS} - CALL_DEFAULT_COST_IN_CENTS_PER_MINUTE: ${CALL_DEFAULT_COST_IN_CENTS_PER_MINUTE} - SMS_HIGH_RISK_COST_IN_CENTS: ${SMS_HIGH_RISK_COST_IN_CENTS} - CALL_HIGH_RISK_COST_IN_CENTS_PER_MINUTE: ${CALL_HIGH_RISK_COST_IN_CENTS_PER_MINUTE} - DISABLE_TELEMETRY: ${DISABLE_TELEMETRY_FOR_APP} - logging: - driver: "local" - options: - max-size: "1000m" + app: + networks: + - oneuptime + restart: always + environment: + <<: *common-runtime-variables + PORT: ${APP_PORT} + SMS_DEFAULT_COST_IN_CENTS: ${SMS_DEFAULT_COST_IN_CENTS} + WHATSAPP_TEXT_DEFAULT_COST_IN_CENTS: ${WHATSAPP_TEXT_DEFAULT_COST_IN_CENTS} + CALL_DEFAULT_COST_IN_CENTS_PER_MINUTE: ${CALL_DEFAULT_COST_IN_CENTS_PER_MINUTE} + SMS_HIGH_RISK_COST_IN_CENTS: ${SMS_HIGH_RISK_COST_IN_CENTS} + CALL_HIGH_RISK_COST_IN_CENTS_PER_MINUTE: ${CALL_HIGH_RISK_COST_IN_CENTS_PER_MINUTE} + DISABLE_TELEMETRY: ${DISABLE_TELEMETRY_FOR_APP} + logging: + driver: "local" + options: + max-size: "1000m" - home: - networks: - - oneuptime - restart: always - environment: - <<: *common-runtime-variables - PORT: ${HOME_PORT} - logging: - driver: "local" - options: - max-size: "1000m" + home: + networks: + - oneuptime + restart: always + environment: + <<: *common-runtime-variables + PORT: ${HOME_PORT} + logging: + driver: "local" + options: + max-size: "1000m" - worker: - networks: - - oneuptime - restart: always - environment: - <<: *common-runtime-variables - PORT: ${WORKER_PORT} - DISABLE_TELEMETRY: ${DISABLE_TELEMETRY_FOR_WORKER} - WORKER_CONCURRENCY: ${WORKER_CONCURRENCY} - logging: - driver: "local" - options: - max-size: "1000m" + worker: + networks: + - oneuptime + restart: always + environment: + <<: *common-runtime-variables + PORT: ${WORKER_PORT} + DISABLE_TELEMETRY: ${DISABLE_TELEMETRY_FOR_WORKER} + WORKER_CONCURRENCY: ${WORKER_CONCURRENCY} + logging: + driver: "local" + options: + max-size: "1000m" - probe-1: - restart: always - network_mode: host - environment: - ONEUPTIME_URL: ${GLOBAL_PROBE_1_ONEUPTIME_URL} - REGISTER_PROBE_KEY: ${REGISTER_PROBE_KEY} - PROBE_NAME: ${GLOBAL_PROBE_1_NAME} - PROBE_DESCRIPTION: ${GLOBAL_PROBE_1_DESCRIPTION} - PROBE_MONITORING_WORKERS: ${GLOBAL_PROBE_1_MONITORING_WORKERS} - PROBE_KEY: ${GLOBAL_PROBE_1_KEY} - PROBE_SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS: ${GLOBAL_PROBE_1_SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS} - PROBE_CUSTOM_CODE_MONITOR_SCRIPT_TIMEOUT_IN_MS: ${GLOBAL_PROBE_1_CUSTOM_CODE_MONITOR_SCRIPT_TIMEOUT_IN_MS} - PROBE_MONITOR_FETCH_LIMIT: ${GLOBAL_PROBE_1_MONITOR_FETCH_LIMIT} - PORT: ${GLOBAL_PROBE_1_PORT} - NODE_ENV: ${ENVIRONMENT} - LOG_LEVEL: ${LOG_LEVEL} - DISABLE_TELEMETRY: ${DISABLE_TELEMETRY_FOR_PROBE} - OPENTELEMETRY_EXPORTER_OTLP_ENDPOINT: ${OPENTELEMETRY_EXPORTER_OTLP_ENDPOINT} - OPENTELEMETRY_EXPORTER_OTLP_HEADERS: ${OPENTELEMETRY_EXPORTER_OTLP_HEADERS} - logging: - driver: "local" - options: - max-size: "1000m" + probe-1: + restart: always + network_mode: host + environment: + ONEUPTIME_URL: ${GLOBAL_PROBE_1_ONEUPTIME_URL} + REGISTER_PROBE_KEY: ${REGISTER_PROBE_KEY} + PROBE_NAME: ${GLOBAL_PROBE_1_NAME} + PROBE_DESCRIPTION: ${GLOBAL_PROBE_1_DESCRIPTION} + PROBE_MONITORING_WORKERS: ${GLOBAL_PROBE_1_MONITORING_WORKERS} + PROBE_KEY: ${GLOBAL_PROBE_1_KEY} + PROBE_SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS: ${GLOBAL_PROBE_1_SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS} + PROBE_CUSTOM_CODE_MONITOR_SCRIPT_TIMEOUT_IN_MS: ${GLOBAL_PROBE_1_CUSTOM_CODE_MONITOR_SCRIPT_TIMEOUT_IN_MS} + PROBE_MONITOR_FETCH_LIMIT: ${GLOBAL_PROBE_1_MONITOR_FETCH_LIMIT} + PROBE_SYNTHETIC_RUNNER_URL: http://127.0.0.1:${GLOBAL_PROBE_1_SYNTHETIC_RUNNER_PORT:-3885} + HTTP_PROXY_URL: ${GLOBAL_PROBE_1_PROXY_URL} + HTTPS_PROXY_URL: ${GLOBAL_PROBE_1_PROXY_URL} + PORT: ${GLOBAL_PROBE_1_PORT} + NODE_ENV: ${ENVIRONMENT} + LOG_LEVEL: ${LOG_LEVEL} + DISABLE_TELEMETRY: ${DISABLE_TELEMETRY_FOR_PROBE} + OPENTELEMETRY_EXPORTER_OTLP_ENDPOINT: ${OPENTELEMETRY_EXPORTER_OTLP_ENDPOINT} + OPENTELEMETRY_EXPORTER_OTLP_HEADERS: ${OPENTELEMETRY_EXPORTER_OTLP_HEADERS} + logging: + driver: "local" + options: + max-size: "1000m" - probe-2: - restart: always - network_mode: host - environment: - ONEUPTIME_URL: ${GLOBAL_PROBE_2_ONEUPTIME_URL} - REGISTER_PROBE_KEY: ${REGISTER_PROBE_KEY} - PROBE_NAME: ${GLOBAL_PROBE_2_NAME} - PROBE_DESCRIPTION: ${GLOBAL_PROBE_2_DESCRIPTION} - PROBE_MONITORING_WORKERS: ${GLOBAL_PROBE_2_MONITORING_WORKERS} - PROBE_KEY: ${GLOBAL_PROBE_2_KEY} - PROBE_SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS: ${GLOBAL_PROBE_2_SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS} - PROBE_CUSTOM_CODE_MONITOR_SCRIPT_TIMEOUT_IN_MS: ${GLOBAL_PROBE_2_CUSTOM_CODE_MONITOR_SCRIPT_TIMEOUT_IN_MS} - PROBE_MONITOR_FETCH_LIMIT: ${GLOBAL_PROBE_2_MONITOR_FETCH_LIMIT} - PORT: ${GLOBAL_PROBE_2_PORT} - NODE_ENV: ${ENVIRONMENT} - LOG_LEVEL: ${LOG_LEVEL} - DISABLE_TELEMETRY: ${DISABLE_TELEMETRY_FOR_PROBE} - OPENTELEMETRY_EXPORTER_OTLP_ENDPOINT: ${OPENTELEMETRY_EXPORTER_OTLP_ENDPOINT} - OPENTELEMETRY_EXPORTER_OTLP_HEADERS: ${OPENTELEMETRY_EXPORTER_OTLP_HEADERS} - logging: - driver: "local" - options: - max-size: "1000m" + probe-2: + restart: always + network_mode: host + environment: + ONEUPTIME_URL: ${GLOBAL_PROBE_2_ONEUPTIME_URL} + REGISTER_PROBE_KEY: ${REGISTER_PROBE_KEY} + PROBE_NAME: ${GLOBAL_PROBE_2_NAME} + PROBE_DESCRIPTION: ${GLOBAL_PROBE_2_DESCRIPTION} + PROBE_MONITORING_WORKERS: ${GLOBAL_PROBE_2_MONITORING_WORKERS} + PROBE_KEY: ${GLOBAL_PROBE_2_KEY} + PROBE_SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS: ${GLOBAL_PROBE_2_SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS} + PROBE_CUSTOM_CODE_MONITOR_SCRIPT_TIMEOUT_IN_MS: ${GLOBAL_PROBE_2_CUSTOM_CODE_MONITOR_SCRIPT_TIMEOUT_IN_MS} + PROBE_MONITOR_FETCH_LIMIT: ${GLOBAL_PROBE_2_MONITOR_FETCH_LIMIT} + PROBE_SYNTHETIC_RUNNER_URL: http://127.0.0.1:${GLOBAL_PROBE_2_SYNTHETIC_RUNNER_PORT:-3886} + HTTP_PROXY_URL: ${GLOBAL_PROBE_2_PROXY_URL} + HTTPS_PROXY_URL: ${GLOBAL_PROBE_2_PROXY_URL} + PORT: ${GLOBAL_PROBE_2_PORT} + NODE_ENV: ${ENVIRONMENT} + LOG_LEVEL: ${LOG_LEVEL} + DISABLE_TELEMETRY: ${DISABLE_TELEMETRY_FOR_PROBE} + OPENTELEMETRY_EXPORTER_OTLP_ENDPOINT: ${OPENTELEMETRY_EXPORTER_OTLP_ENDPOINT} + OPENTELEMETRY_EXPORTER_OTLP_HEADERS: ${OPENTELEMETRY_EXPORTER_OTLP_HEADERS} + logging: + driver: "local" + options: + max-size: "1000m" - ai-agent: - restart: always - network_mode: host - environment: - <<: *common-runtime-variables - AI_AGENT_KEY: ${AI_AGENT_KEY} - ONEUPTIME_URL: ${AI_AGENT_ONEUPTIME_URL} - DISABLE_TELEMETRY: ${DISABLE_TELEMETRY_FOR_AI_AGENT} - PORT: ${AI_AGENT_PORT} - logging: - driver: "local" - options: - max-size: "1000m" + synthetic-runner-1: + restart: always + network_mode: host + command: npm run start:synthetic-runner + environment: + ONEUPTIME_SECRET: ${ONEUPTIME_SECRET} + PROBE_SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS: ${GLOBAL_PROBE_1_SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS} + HTTP_PROXY_URL: ${GLOBAL_PROBE_1_PROXY_URL} + HTTPS_PROXY_URL: ${GLOBAL_PROBE_1_PROXY_URL} + PORT: ${GLOBAL_PROBE_1_SYNTHETIC_RUNNER_PORT:-3885} + NODE_ENV: ${ENVIRONMENT} + LOG_LEVEL: ${LOG_LEVEL} + DISABLE_TELEMETRY: ${DISABLE_TELEMETRY_FOR_PROBE} + OPENTELEMETRY_EXPORTER_OTLP_ENDPOINT: ${OPENTELEMETRY_EXPORTER_OTLP_ENDPOINT} + OPENTELEMETRY_EXPORTER_OTLP_HEADERS: ${OPENTELEMETRY_EXPORTER_OTLP_HEADERS} + logging: + driver: "local" + options: + max-size: "1000m" - fluentd: - networks: - - oneuptime - restart: always - logging: - driver: "local" - options: - max-size: "1000m" - - fluent-bit: - networks: - - oneuptime - restart: always - logging: - driver: "local" - options: - max-size: "1000m" + synthetic-runner-2: + restart: always + network_mode: host + command: npm run start:synthetic-runner + environment: + ONEUPTIME_SECRET: ${ONEUPTIME_SECRET} + PROBE_SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS: ${GLOBAL_PROBE_2_SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS} + HTTP_PROXY_URL: ${GLOBAL_PROBE_2_PROXY_URL} + HTTPS_PROXY_URL: ${GLOBAL_PROBE_2_PROXY_URL} + PORT: ${GLOBAL_PROBE_2_SYNTHETIC_RUNNER_PORT:-3886} + NODE_ENV: ${ENVIRONMENT} + LOG_LEVEL: ${LOG_LEVEL} + DISABLE_TELEMETRY: ${DISABLE_TELEMETRY_FOR_PROBE} + OPENTELEMETRY_EXPORTER_OTLP_ENDPOINT: ${OPENTELEMETRY_EXPORTER_OTLP_ENDPOINT} + OPENTELEMETRY_EXPORTER_OTLP_HEADERS: ${OPENTELEMETRY_EXPORTER_OTLP_HEADERS} + logging: + driver: "local" + options: + max-size: "1000m" - telemetry: - networks: - - oneuptime - restart: always - environment: - <<: *common-runtime-variables - PORT: ${TELEMETRY_PORT} - DISABLE_TELEMETRY: ${DISABLE_TELEMETRY_FOR_TELEMETRY} - # Max concurrent telemetry jobs the worker will process - TELEMETRY_CONCURRENCY: ${TELEMETRY_CONCURRENCY} - REGISTER_PROBE_KEY: ${REGISTER_PROBE_KEY} - logging: - driver: "local" - options: - max-size: "1000m" - - e2e: - restart: "no" - network_mode: host # This is needed to access the host network, - environment: - <<: *common-variables - E2E_TEST_IS_USER_REGISTERED: ${E2E_TEST_IS_USER_REGISTERED} - E2E_TEST_REGISTERED_USER_EMAIL: ${E2E_TEST_REGISTERED_USER_EMAIL} - E2E_TEST_REGISTERED_USER_PASSWORD: ${E2E_TEST_REGISTERED_USER_PASSWORD} - E2E_TEST_STATUS_PAGE_URL: ${E2E_TEST_STATUS_PAGE_URL} - E2E_TESTS_FAILED_WEBHOOK_URL: ${E2E_TESTS_FAILED_WEBHOOK_URL} - logging: - driver: "local" - options: - max-size: "1000m" + ai-agent: + restart: always + network_mode: host + environment: + <<: *common-runtime-variables + AI_AGENT_KEY: ${AI_AGENT_KEY} + ONEUPTIME_URL: ${AI_AGENT_ONEUPTIME_URL} + DISABLE_TELEMETRY: ${DISABLE_TELEMETRY_FOR_AI_AGENT} + PORT: ${AI_AGENT_PORT} + logging: + driver: "local" + options: + max-size: "1000m" - ingress: - restart: always - networks: - - oneuptime - environment: - <<: *common-runtime-variables - ONEUPTIME_HTTP_PORT: ${ONEUPTIME_HTTP_PORT} - NGINX_LISTEN_ADDRESS: ${NGINX_LISTEN_ADDRESS} - NGINX_LISTEN_OPTIONS: ${NGINX_LISTEN_OPTIONS} - DISABLE_TELEMETRY: ${DISABLE_TELEMETRY_FOR_INGRESS} - ports: - - '${ONEUPTIME_HTTP_PORT}:7849' - - '${STATUS_PAGE_HTTPS_PORT}:7850' - logging: - driver: "local" - options: - max-size: "1000m" + fluentd: + networks: + - oneuptime + restart: always + logging: + driver: "local" + options: + max-size: "1000m" + + fluent-bit: + networks: + - oneuptime + restart: always + logging: + driver: "local" + options: + max-size: "1000m" + + telemetry: + networks: + - oneuptime + restart: always + environment: + <<: *common-runtime-variables + PORT: ${TELEMETRY_PORT} + DISABLE_TELEMETRY: ${DISABLE_TELEMETRY_FOR_TELEMETRY} + # Max concurrent telemetry jobs the worker will process + TELEMETRY_CONCURRENCY: ${TELEMETRY_CONCURRENCY} + REGISTER_PROBE_KEY: ${REGISTER_PROBE_KEY} + logging: + driver: "local" + options: + max-size: "1000m" + + e2e: + restart: "no" + network_mode: host # This is needed to access the host network, + environment: + <<: *common-variables + E2E_TEST_IS_USER_REGISTERED: ${E2E_TEST_IS_USER_REGISTERED} + E2E_TEST_REGISTERED_USER_EMAIL: ${E2E_TEST_REGISTERED_USER_EMAIL} + E2E_TEST_REGISTERED_USER_PASSWORD: ${E2E_TEST_REGISTERED_USER_PASSWORD} + E2E_TEST_STATUS_PAGE_URL: ${E2E_TEST_STATUS_PAGE_URL} + E2E_TESTS_FAILED_WEBHOOK_URL: ${E2E_TESTS_FAILED_WEBHOOK_URL} + logging: + driver: "local" + options: + max-size: "1000m" + + ingress: + restart: always + networks: + - oneuptime + environment: + <<: *common-runtime-variables + ONEUPTIME_HTTP_PORT: ${ONEUPTIME_HTTP_PORT} + NGINX_LISTEN_ADDRESS: ${NGINX_LISTEN_ADDRESS} + NGINX_LISTEN_OPTIONS: ${NGINX_LISTEN_OPTIONS} + DISABLE_TELEMETRY: ${DISABLE_TELEMETRY_FOR_INGRESS} + ports: + - "${ONEUPTIME_HTTP_PORT}:7849" + - "${STATUS_PAGE_HTTPS_PORT}:7850" + logging: + driver: "local" + options: + max-size: "1000m" - volumes: - postgres: - clickhouse: + postgres: + clickhouse: networks: oneuptime: diff --git a/docker-compose.billing.yml b/docker-compose.billing.yml index 3de9884645..83b452e879 100644 --- a/docker-compose.billing.yml +++ b/docker-compose.billing.yml @@ -2,84 +2,88 @@ # It extends docker-compose.yml with the home container. x-common-depends-on: &common-depends-on - postgres: - condition: service_healthy - redis: - condition: service_healthy - clickhouse: - condition: service_healthy + postgres: + condition: service_healthy + redis: + condition: service_healthy + clickhouse: + condition: service_healthy services: + redis: + extends: + file: ./docker-compose.base.yml + service: redis - redis: - extends: - file: ./docker-compose.base.yml - service: redis + clickhouse: + extends: + file: ./docker-compose.base.yml + service: clickhouse - clickhouse: - extends: - file: ./docker-compose.base.yml - service: clickhouse + postgres: + ports: + - "5400:5432" + extends: + file: ./docker-compose.base.yml + service: postgres - postgres: - ports: - - "5400:5432" - extends: - file: ./docker-compose.base.yml - service: postgres + app: + image: oneuptime/app:${APP_TAG} + extends: + file: ./docker-compose.base.yml + service: app + depends_on: + <<: *common-depends-on + home: + image: oneuptime/home:${APP_TAG} + extends: + file: ./docker-compose.base.yml + service: home + depends_on: + <<: *common-depends-on - app: - image: oneuptime/app:${APP_TAG} - extends: - file: ./docker-compose.base.yml - service: app - depends_on: - <<: *common-depends-on + worker: + image: oneuptime/worker:${APP_TAG} + extends: + file: ./docker-compose.base.yml + service: worker + depends_on: + <<: *common-depends-on - home: - image: oneuptime/home:${APP_TAG} - extends: - file: ./docker-compose.base.yml - service: home - depends_on: - <<: *common-depends-on + probe-1: + image: oneuptime/probe:${APP_TAG} + extends: + file: ./docker-compose.base.yml + service: probe-1 + depends_on: + <<: *common-depends-on - worker: - image: oneuptime/worker:${APP_TAG} - extends: - file: ./docker-compose.base.yml - service: worker - depends_on: - <<: *common-depends-on + synthetic-runner-1: + image: oneuptime/probe:${APP_TAG} + extends: + file: ./docker-compose.base.yml + service: synthetic-runner-1 - probe-1: - image: oneuptime/probe:${APP_TAG} - extends: - file: ./docker-compose.base.yml - service: probe-1 - depends_on: - <<: *common-depends-on + telemetry: + image: oneuptime/telemetry:${APP_TAG} + extends: + file: ./docker-compose.base.yml + service: telemetry + depends_on: + <<: *common-depends-on - telemetry: - image: oneuptime/telemetry:${APP_TAG} - extends: - file: ./docker-compose.base.yml - service: telemetry - depends_on: - <<: *common-depends-on - - ingress: - image: oneuptime/nginx:${APP_TAG} - extends: - file: ./docker-compose.base.yml - service: ingress - depends_on: - <<: *common-depends-on + ingress: + image: oneuptime/nginx:${APP_TAG} + extends: + file: ./docker-compose.base.yml + service: ingress + depends_on: + <<: *common-depends-on volumes: - postgres: - clickhouse: + postgres: + clickhouse: networks: oneuptime: diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 94007ed7aa..28cff51d20 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -1,281 +1,297 @@ - - x-common-depends-on: &common-depends-on - postgres: - condition: service_healthy - redis: - condition: service_healthy - clickhouse: - condition: service_healthy + postgres: + condition: service_healthy + redis: + condition: service_healthy + clickhouse: + condition: service_healthy services: + redis: + ports: + - "6310:6379" + extends: + file: ./docker-compose.base.yml + service: redis + clickhouse: + ports: + - "9034:9000" + - "8189:8123" + extends: + file: ./docker-compose.base.yml + service: clickhouse + volumes: + - ./Clickhouse/config.xml:/etc/clickhouse-server/config.xml - redis: - ports: - - '6310:6379' - extends: - file: ./docker-compose.base.yml - service: redis + postgres: + ports: + - "5400:5432" + extends: + file: ./docker-compose.base.yml + service: postgres - clickhouse: - ports: - - '9034:9000' - - '8189:8123' - extends: - file: ./docker-compose.base.yml - service: clickhouse - volumes: - - ./Clickhouse/config.xml:/etc/clickhouse-server/config.xml + test-server: + volumes: + - ./TestServer:/usr/src/app:cached + # Use node modules of the container and not host system. + # https://stackoverflow.com/questions/29181032/add-a-volume-to-docker-but-exclude-a-sub-folder + - /usr/src/app/node_modules/ + - ./Common:/usr/src/Common:cached - postgres: - ports: - - '5400:5432' - extends: - file: ./docker-compose.base.yml - service: postgres + - /usr/src/Common/node_modules/ + extends: + file: ./docker-compose.base.yml + service: test-server + depends_on: + <<: *common-depends-on + ports: + - "9141:9229" # Debugging port. + - "3800:3800" + build: + network: host + context: . + dockerfile: ./TestServer/Dockerfile - test-server: - volumes: - - ./TestServer:/usr/src/app:cached - # Use node modules of the container and not host system. - # https://stackoverflow.com/questions/29181032/add-a-volume-to-docker-but-exclude-a-sub-folder - - /usr/src/app/node_modules/ - - ./Common:/usr/src/Common:cached + home: + volumes: + - ./Home:/usr/src/app:cached + # Use node modules of the container and not host system. + # https://stackoverflow.com/questions/29181032/add-a-volume-to-docker-but-exclude-a-sub-folder + - /usr/src/app/node_modules/ + - ./Common:/usr/src/Common:cached + - /usr/src/Common/node_modules/ + extends: + file: ./docker-compose.base.yml + service: home + depends_on: + <<: *common-depends-on + ports: + - "9212:9229" # Debugging port. + build: + network: host + context: . + dockerfile: ./Home/Dockerfile - - /usr/src/Common/node_modules/ + worker: + volumes: + - ./Worker:/usr/src/app:cached + # Use node modules of the container and not host system. + # https://stackoverflow.com/questions/29181032/add-a-volume-to-docker-but-exclude-a-sub-folder + - /usr/src/app/node_modules/ + - ./Common:/usr/src/Common:cached + - /usr/src/Common/node_modules/ + extends: + file: ./docker-compose.base.yml + service: worker + depends_on: + <<: *common-depends-on + ports: + - "8734:9229" # Debugging port. + build: + network: host + context: . + dockerfile: ./Worker/Dockerfile + app: + volumes: + - ./App:/usr/src/app:cached + - ./App/FeatureSet/Accounts:/usr/src/app/FeatureSet/Accounts:cached + - ./App/FeatureSet/Dashboard:/usr/src/app/FeatureSet/Dashboard:cached + - ./App/FeatureSet/AdminDashboard:/usr/src/app/FeatureSet/AdminDashboard:cached + - ./App/FeatureSet/StatusPage:/usr/src/app/FeatureSet/StatusPage:cached + # Use node modules of the container and not host system. + # https://stackoverflow.com/questions/29181032/add-a-volume-to-docker-but-exclude-a-sub-folder + - /usr/src/app/node_modules/ + - /usr/src/app/FeatureSet/Accounts/node_modules/ + - /usr/src/app/FeatureSet/Dashboard/node_modules/ + - /usr/src/app/FeatureSet/AdminDashboard/node_modules/ + - /usr/src/app/FeatureSet/StatusPage/node_modules/ + - ./Common:/usr/src/Common:cached + - /usr/src/Common/node_modules/ + extends: + file: ./docker-compose.base.yml + service: app + depends_on: + <<: *common-depends-on + ports: + - "9232:9229" # Debugging port. + build: + network: host + context: . + dockerfile: ./App/Dockerfile - extends: - file: ./docker-compose.base.yml - service: test-server - depends_on: - <<: *common-depends-on - ports: - - '9141:9229' # Debugging port. - - '3800:3800' - build: - network: host - context: . - dockerfile: ./TestServer/Dockerfile + probe-1: + volumes: + - ./Probe:/usr/src/app:cached + # Use node modules of the container and not host system. + # https://stackoverflow.com/questions/29181032/add-a-volume-to-docker-but-exclude-a-sub-folder + - /usr/src/app/node_modules/ + - ./Common:/usr/src/Common:cached + - /usr/src/Common/node_modules/ - home: - volumes: - - ./Home:/usr/src/app:cached - # Use node modules of the container and not host system. - # https://stackoverflow.com/questions/29181032/add-a-volume-to-docker-but-exclude-a-sub-folder - - /usr/src/app/node_modules/ - - ./Common:/usr/src/Common:cached - - /usr/src/Common/node_modules/ - extends: - file: ./docker-compose.base.yml - service: home - depends_on: - <<: *common-depends-on - ports: - - '9212:9229' # Debugging port. - build: - network: host - context: . - dockerfile: ./Home/Dockerfile + extends: + file: ./docker-compose.base.yml + service: probe-1 + depends_on: + <<: *common-depends-on + build: + network: host + context: . + dockerfile: ./Probe/Dockerfile - worker: - volumes: - - ./Worker:/usr/src/app:cached - # Use node modules of the container and not host system. - # https://stackoverflow.com/questions/29181032/add-a-volume-to-docker-but-exclude-a-sub-folder - - /usr/src/app/node_modules/ - - ./Common:/usr/src/Common:cached - - /usr/src/Common/node_modules/ - extends: - file: ./docker-compose.base.yml - service: worker - depends_on: - <<: *common-depends-on - ports: - - '8734:9229' # Debugging port. - build: - network: host - context: . - dockerfile: ./Worker/Dockerfile + synthetic-runner-1: + volumes: + - ./Probe:/usr/src/app:cached + - /usr/src/app/node_modules/ + - ./Common:/usr/src/Common:cached + - /usr/src/Common/node_modules/ + extends: + file: ./docker-compose.base.yml + service: synthetic-runner-1 + build: + network: host + context: . + dockerfile: ./Probe/Dockerfile + probe-2: + volumes: + - ./Probe:/usr/src/app:cached + # Use node modules of the container and not host system. + # https://stackoverflow.com/questions/29181032/add-a-volume-to-docker-but-exclude-a-sub-folder + - /usr/src/app/node_modules/ + - ./Common:/usr/src/Common:cached - app: - volumes: - - ./App:/usr/src/app:cached - - ./App/FeatureSet/Accounts:/usr/src/app/FeatureSet/Accounts:cached - - ./App/FeatureSet/Dashboard:/usr/src/app/FeatureSet/Dashboard:cached - - ./App/FeatureSet/AdminDashboard:/usr/src/app/FeatureSet/AdminDashboard:cached - - ./App/FeatureSet/StatusPage:/usr/src/app/FeatureSet/StatusPage:cached - # Use node modules of the container and not host system. - # https://stackoverflow.com/questions/29181032/add-a-volume-to-docker-but-exclude-a-sub-folder - - /usr/src/app/node_modules/ - - /usr/src/app/FeatureSet/Accounts/node_modules/ - - /usr/src/app/FeatureSet/Dashboard/node_modules/ - - /usr/src/app/FeatureSet/AdminDashboard/node_modules/ - - /usr/src/app/FeatureSet/StatusPage/node_modules/ - - ./Common:/usr/src/Common:cached - - /usr/src/Common/node_modules/ - extends: - file: ./docker-compose.base.yml - service: app - depends_on: - <<: *common-depends-on - ports: - - '9232:9229' # Debugging port. - build: - network: host - context: . - dockerfile: ./App/Dockerfile + - /usr/src/Common/node_modules/ + extends: + file: ./docker-compose.base.yml + service: probe-2 + depends_on: + <<: *common-depends-on + build: + network: host + context: . + dockerfile: ./Probe/Dockerfile - probe-1: - volumes: - - ./Probe:/usr/src/app:cached - # Use node modules of the container and not host system. - # https://stackoverflow.com/questions/29181032/add-a-volume-to-docker-but-exclude-a-sub-folder - - /usr/src/app/node_modules/ - - ./Common:/usr/src/Common:cached + synthetic-runner-2: + volumes: + - ./Probe:/usr/src/app:cached + - /usr/src/app/node_modules/ + - ./Common:/usr/src/Common:cached + - /usr/src/Common/node_modules/ + extends: + file: ./docker-compose.base.yml + service: synthetic-runner-2 + build: + network: host + context: . + dockerfile: ./Probe/Dockerfile - - /usr/src/Common/node_modules/ + ai-agent: + volumes: + - ./AIAgent:/usr/src/app:cached + # Use node modules of the container and not host system. + # https://stackoverflow.com/questions/29181032/add-a-volume-to-docker-but-exclude-a-sub-folder + - /usr/src/app/node_modules/ + - ./Common:/usr/src/Common:cached + - /usr/src/Common/node_modules/ - extends: - file: ./docker-compose.base.yml - service: probe-1 - depends_on: - <<: *common-depends-on - build: - network: host - context: . - dockerfile: ./Probe/Dockerfile + extends: + file: ./docker-compose.base.yml + service: ai-agent + depends_on: + <<: *common-depends-on + build: + network: host + context: . + dockerfile: ./AIAgent/Dockerfile - probe-2: - volumes: - - ./Probe:/usr/src/app:cached - # Use node modules of the container and not host system. - # https://stackoverflow.com/questions/29181032/add-a-volume-to-docker-but-exclude-a-sub-folder - - /usr/src/app/node_modules/ - - ./Common:/usr/src/Common:cached + telemetry: + volumes: + - ./Telemetry:/usr/src/app:cached + # Use node modules of the container and not host system. + # https://stackoverflow.com/questions/29181032/add-a-volume-to-docker-but-exclude-a-sub-folder + - /usr/src/app/node_modules/ + - ./Common:/usr/src/Common:cached + - /usr/src/Common/node_modules/ + ports: + - "9938:9229" # Debugging port. + extends: + file: ./docker-compose.base.yml + service: telemetry + depends_on: + <<: *common-depends-on + build: + network: host + context: . + dockerfile: ./Telemetry/Dockerfile - - /usr/src/Common/node_modules/ + # Fluentd. Required only for development. In production its the responsibility of the customer to run fluentd and pipe logs to OneUptime. + # We run this container just for development, to see if logs are piped. + fluentd: + ports: + - 24224:24224 + - 24224:24224/udp + - 8888:8888 + user: fluent + extends: + file: ./docker-compose.base.yml + service: fluentd + volumes: + - ./Fluentd/fluent.conf:/fluentd/etc/fluent.conf + build: + network: host + context: ./Fluentd + dockerfile: ./Dockerfile - extends: - file: ./docker-compose.base.yml - service: probe-2 - depends_on: - <<: *common-depends-on - build: - network: host - context: . - dockerfile: ./Probe/Dockerfile + fluent-bit: + ports: + - 24225:24224 + - 24285:24284 + - 2020:2020 + - 8889:8889 + extends: + file: ./docker-compose.base.yml + service: fluent-bit + volumes: + - ./FluentBit/etc:/fluent-bit/etc/ + build: + network: host + context: . + dockerfile: ./FluentBit/Dockerfile - ai-agent: - volumes: - - ./AIAgent:/usr/src/app:cached - # Use node modules of the container and not host system. - # https://stackoverflow.com/questions/29181032/add-a-volume-to-docker-but-exclude-a-sub-folder - - /usr/src/app/node_modules/ - - ./Common:/usr/src/Common:cached + ingress: + build: + network: host + context: . + dockerfile: ./Nginx/Dockerfile + extends: + file: ./docker-compose.base.yml + service: ingress + depends_on: + <<: *common-depends-on - - /usr/src/Common/node_modules/ - - - extends: - file: ./docker-compose.base.yml - service: ai-agent - depends_on: - <<: *common-depends-on - build: - network: host - context: . - dockerfile: ./AIAgent/Dockerfile - - telemetry: - volumes: - - ./Telemetry:/usr/src/app:cached - # Use node modules of the container and not host system. - # https://stackoverflow.com/questions/29181032/add-a-volume-to-docker-but-exclude-a-sub-folder - - /usr/src/app/node_modules/ - - ./Common:/usr/src/Common:cached - - /usr/src/Common/node_modules/ - ports: - - '9938:9229' # Debugging port. - extends: - file: ./docker-compose.base.yml - service: telemetry - depends_on: - <<: *common-depends-on - build: - network: host - context: . - dockerfile: ./Telemetry/Dockerfile - - # Fluentd. Required only for development. In production its the responsibility of the customer to run fluentd and pipe logs to OneUptime. - # We run this container just for development, to see if logs are piped. - - fluentd: - ports: - - 24224:24224 - - 24224:24224/udp - - 8888:8888 - user: fluent - extends: - file: ./docker-compose.base.yml - service: fluentd - volumes: - - ./Fluentd/fluent.conf:/fluentd/etc/fluent.conf - build: - network: host - context: ./Fluentd - dockerfile: ./Dockerfile - - fluent-bit: - ports: - - 24225:24224 - - 24285:24284 - - 2020:2020 - - 8889:8889 - extends: - file: ./docker-compose.base.yml - service: fluent-bit - volumes: - - ./FluentBit/etc:/fluent-bit/etc/ - build: - network: host - context: . - dockerfile: ./FluentBit/Dockerfile - - ingress: - build: - network: host - context: . - dockerfile: ./Nginx/Dockerfile - extends: - file: ./docker-compose.base.yml - service: ingress - depends_on: - <<: *common-depends-on - - # e2e tests - e2e: - extends: - file: ./docker-compose.base.yml - service: e2e - volumes: - - ./E2E/playwright-report:/usr/src/app/playwright-report - - ./E2E/test-results:/usr/src/app/test-results - build: - network: host - context: . - dockerfile: ./E2E/Dockerfile + # e2e tests + e2e: + extends: + file: ./docker-compose.base.yml + service: e2e + volumes: + - ./E2E/playwright-report:/usr/src/app/playwright-report + - ./E2E/test-results:/usr/src/app/test-results + build: + network: host + context: . + dockerfile: ./E2E/Dockerfile volumes: - postgres: - clickhouse: + postgres: + clickhouse: networks: oneuptime: diff --git a/docker-compose.yml b/docker-compose.yml index cdafeabfe4..7065a6be6c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,76 +2,80 @@ # For example, if you want to use the image from Github Container Registry, you can change the image tag from oneuptime/dashboard:latest to ghcr.io/oneuptime/dashboard:latest x-common-depends-on: &common-depends-on - postgres: - condition: service_healthy - redis: - condition: service_healthy - clickhouse: - condition: service_healthy + postgres: + condition: service_healthy + redis: + condition: service_healthy + clickhouse: + condition: service_healthy services: + redis: + extends: + file: ./docker-compose.base.yml + service: redis - redis: - extends: - file: ./docker-compose.base.yml - service: redis + clickhouse: + extends: + file: ./docker-compose.base.yml + service: clickhouse - clickhouse: - extends: - file: ./docker-compose.base.yml - service: clickhouse + postgres: + ports: + - "5400:5432" # for access to postgres for backups. If you don't need backup, you can comment this line out to make it more secure. + extends: + file: ./docker-compose.base.yml + service: postgres - postgres: - ports: - - "5400:5432" # for access to postgres for backups. If you don't need backup, you can comment this line out to make it more secure. - extends: - file: ./docker-compose.base.yml - service: postgres + app: + image: oneuptime/app:${APP_TAG} + extends: + file: ./docker-compose.base.yml + service: app + depends_on: + <<: *common-depends-on + worker: + image: oneuptime/worker:${APP_TAG} + extends: + file: ./docker-compose.base.yml + service: worker + depends_on: + <<: *common-depends-on - app: - image: oneuptime/app:${APP_TAG} - extends: - file: ./docker-compose.base.yml - service: app - depends_on: - <<: *common-depends-on + probe-1: + image: oneuptime/probe:${APP_TAG} + extends: + file: ./docker-compose.base.yml + service: probe-1 + depends_on: + <<: *common-depends-on - worker: - image: oneuptime/worker:${APP_TAG} - extends: - file: ./docker-compose.base.yml - service: worker - depends_on: - <<: *common-depends-on + synthetic-runner-1: + image: oneuptime/probe:${APP_TAG} + extends: + file: ./docker-compose.base.yml + service: synthetic-runner-1 - probe-1: - image: oneuptime/probe:${APP_TAG} - extends: - file: ./docker-compose.base.yml - service: probe-1 - depends_on: - <<: *common-depends-on + telemetry: + image: oneuptime/telemetry:${APP_TAG} + extends: + file: ./docker-compose.base.yml + service: telemetry + depends_on: + <<: *common-depends-on - telemetry: - image: oneuptime/telemetry:${APP_TAG} - extends: - file: ./docker-compose.base.yml - service: telemetry - depends_on: - <<: *common-depends-on - - ingress: - image: oneuptime/nginx:${APP_TAG} - extends: - file: ./docker-compose.base.yml - service: ingress - depends_on: - <<: *common-depends-on + ingress: + image: oneuptime/nginx:${APP_TAG} + extends: + file: ./docker-compose.base.yml + service: ingress + depends_on: + <<: *common-depends-on volumes: - postgres: - clickhouse: + postgres: + clickhouse: networks: oneuptime: