Compare commits

...

427 Commits

Author SHA1 Message Date
Nawaz Dhandala
5e0d8b487c feat(api): register admin-dashboard auth refresh handler
Add AdminDashboard/src/Utils/API.ts to register a refreshSession handler on BaseAPI
that posts to IDENTITY_URL/refresh-session (using skipAuthRefresh). Handle HTTP error
responses and exceptions, and set a refresh-failure handler that logs and falls back
to logout. Export BaseAPI. Import the module in Index.tsx for side-effect initialization.
2025-11-06 15:01:32 +00:00
Nawaz Dhandala
c41b53dd2a feat(api): add automatic auth-refresh flow, retry handling and skip flag
- introduce RequestOptions.skipAuthRefresh and APIErrorRetryContext
- implement BaseAPI refresh handlers (setters), shouldRetryAfterError, refreshAuthSession with deduping promise and refreshFailure handler
- make API.handleError async and add default shouldRetryAfterError; extend fetchInternal with retryCount and retry after successful auth refresh
- register auth-refresh handlers for Dashboard and StatusPage and import API utils in Index entries for side-effects
- tighten StatusPage logout/refresh logic with logging and safety checks
2025-11-06 14:52:58 +00:00
Nawaz Dhandala
5fe445330b feat(statuspage): add jwtRefreshToken column migration for StatusPagePrivateUser
Add TypeORM migration 1762430566091 to add a jwtRefreshToken varchar(100) column
to StatusPagePrivateUser and register it in the migrations index.
2025-11-06 12:03:57 +00:00
Nawaz Dhandala
38c744ce8c Merge branch 'master' into refresh-sessions 2025-11-06 11:56:03 +00:00
Nawaz Dhandala
ad9771f222 Merge branch 'release' 2025-11-06 11:55:36 +00:00
Nawaz Dhandala
20a3eab3a0 fix(email): validate using RFC5322 regex and remove redundant Zod check 2025-11-06 11:55:14 +00:00
Nawaz Dhandala
fbe198f0c0 fix(nginx): ensure /var/log/nginx and logs exist; set error_log to /var/log/nginx/error.log
Create /var/log/nginx and touch access.log/error.log in run.sh so nginx -t succeeds before reloads.
Revert nginx.conf error_log to /var/log/nginx/error.log (notice).
2025-11-06 11:34:19 +00:00
Simon Larsen
b16743a669 feat(statuspage): add refreshable status-page sessions, namespaced cookies & session lifecycle
- Add CookieUtil.setStatusPageUserCookie and namespace user/refresh cookie keys by statusPageId
- Persist jwtRefreshToken on StatusPagePrivateUser (hashed session id) and update on login/refresh/logout
- Extend JsonWebToken to include statusPageId in refresh tokens and add signStatusPageUserLoginToken
- Implement tryRefreshStatusPageSession in StatusPageService to auto-refresh access tokens from valid refresh tokens (middleware-friendly)
- Update hasReadAccess to attempt automatic session refresh
- Propagate ExpressResponse through StatusPageAPI methods that perform cookie/session operations
- Improve StatusPageAuthentication: robust logout (invalidate by refresh or access token), login stores session tokens and hashed refresh token, add /refresh-session/:statuspageid endpoint to rotate session tokens
- Update tests to cover namespaced refresh token key
2025-11-06 10:07:31 +00:00
Nawaz Dhandala
bb48776e02 fix(nginx): use stderr for error_log to enable container-friendly logging 2025-11-05 21:38:59 +00:00
Nawaz Dhandala
0f92342742 fix(nginx): write error_log to /proc/self/fd/2 for container-friendly logging 2025-11-05 20:33:32 +00:00
Simon Larsen
286c639857 feat(auth): add refresh token lifecycle, session refresh endpoint, and auto-refresh middleware
- Add refresh token signing and decoding (JSONWebToken.signRefreshToken, decodeRefreshToken)
- Persist hashed refresh token on user on signup, login and SSO flows
- Invalidate persisted refresh token on logout
- Add /refresh-session endpoint to validate refresh token, rotate session, and return refreshed session
- Implement middleware tryRefreshSession to auto-refresh expired access tokens using refresh token
- Make CookieUtil.setUserCookie return session details (access/refresh tokens, sessionId, expiries) and set both cookies
- Introduce secure default cookie options (path, sameSite, secure, httpOnly) and use IsProduction for secure flag
- Add CookieName.RefreshToken constant and update tests accordingly
2025-11-05 20:28:26 +00:00
Nawaz Dhandala
6ed41b87dd fix(nginx): send error_log to /dev/stderr for container-friendly logging 2025-11-05 19:52:06 +00:00
Nawaz Dhandala
12364415aa ci(release): skip npm publish if version already published; fail on other errors 2025-11-05 19:37:45 +00:00
Nawaz Dhandala
1a3301e715 fix(monitor): normalize disk paths when matching disk metrics (handle backslashes, trailing slashes and root path) 2025-11-05 17:47:33 +00:00
Nawaz Dhandala
8be7b68faf fix(helm): move nodeSelector into e2e CronJob pod template and add per-job override 2025-11-05 16:58:07 +00:00
Nawaz Dhandala
47f9d3914e fix(queue): normalize job IDs (replace ':' with '-') when adding/removing jobs and handling repeatable keys 2025-11-05 15:14:51 +00:00
Nawaz Dhandala
8a1afbe7dc Merge branch 'master' of https://github.com/OneUptime/oneuptime 2025-11-05 13:32:12 +00:00
Simon Larsen
87ac6f3106 Merge branch 'master' of github.com:OneUptime/oneuptime 2025-11-05 13:23:25 +00:00
Simon Larsen
2df32e4cdd fix(monitor): return null for cached monitor status name instead of undefined 2025-11-05 13:23:23 +00:00
Nawaz Dhandala
2e2adffe17 fix(nginx): use findAllBy and remove LIMIT_MAX when fetching certs for disk write jobs 2025-11-05 13:11:58 +00:00
Nawaz Dhandala
2a15cf8676 fix(acme): adjust ACME challenge route to /acme-challenge/.well-known/:token 2025-11-05 13:02:02 +00:00
Nawaz Dhandala
d249579c1c fix(acme): mount .well-known/acme-challenge router on main router to expose ACME challenge endpoint 2025-11-05 12:52:42 +00:00
Nawaz Dhandala
05681b108b style(monitor): add explicit type annotations and minor formatting cleanup in EvaluationLogList 2025-11-05 12:38:50 +00:00
Nawaz Dhandala
36867a0b8c style(monitor): render criteria message using Alert component in EvaluationLogList 2025-11-05 12:27:12 +00:00
Nawaz Dhandala
0e5a832628 feat(monitor): include incident/alert numbers in evaluation events and show them in UI
- select incidentNumber/alertNumber when loading open incidents/alerts
- attach relatedIncidentNumber / relatedAlertNumber to evaluation events (created/resolved/skipped)
- add number fields to MonitorEvaluationEvent type
- decorate event title/message in EvaluationLogList to include "Incident #N" / "Alert #N"
2025-11-05 12:25:20 +00:00
Nawaz Dhandala
46f69fdde5 Merge branch 'master' of https://github.com/OneUptime/oneuptime 2025-11-05 12:14:21 +00:00
Nawaz Dhandala
70e6c0abe1 style(monitor): wrap criteria message and met notice in styled callouts with icons 2025-11-05 12:14:15 +00:00
Simon Larsen
c5938956af docs(readme,helm): add Community vs. Enterprise table to main README; expand Enterprise Edition details in Helm chart README 2025-11-05 12:12:42 +00:00
Simon Larsen
e9bfe74b5d Merge branch 'master' of github.com:OneUptime/oneuptime 2025-11-05 12:09:46 +00:00
Simon Larsen
55085a5e6c docs(helm): add Community vs. Enterprise table, document ssl.provision and image.type, remove duplicate Uninstall section 2025-11-05 12:09:42 +00:00
Nawaz Dhandala
9cb48a41e7 style(monitor): restore criteria 'not checked' notice and use ArrowCircleRight icon for events 2025-11-05 12:03:14 +00:00
Nawaz Dhandala
dd8179c0a7 refactor(monitor): remove short-circuit notice and related logic from EvaluationLogList
Remove firstMetCriteriaIndex and shouldShowShortCircuitMessage, and delete the UI block that displayed the "remaining criteria were not evaluated" short-circuit message. Simplifies criteria rendering by removing unused short-circuit handling.
2025-11-05 12:01:04 +00:00
Simon Larsen
f9f84d4104 docs(helm): note Let's Encrypt requires host reachable on ports 80 and 443 2025-11-05 11:58:54 +00:00
Simon Larsen
827663675d refactor(queue): extract BullMQAdapter array and cast to BullBoardQueues for type compatibility 2025-11-05 11:57:48 +00:00
Nawaz Dhandala
0e8d7f2d6b fix(monitor): show short-circuit notice only for first met criteria, remove unused import, and tweak spacing/styles 2025-11-05 11:56:51 +00:00
Nawaz Dhandala
0dd9dfb505 style(monitor): fix JSX conditional formatting in EvaluationLogList 2025-11-05 11:40:36 +00:00
Nawaz Dhandala
b3c68df77e refactor(monitor): centralize persistence of latest monitor payload
Extract persistLatestMonitorPayload and consolidate handling of serverMonitorResponse
and incomingMonitorRequest. Ensure monitor payload updates are saved consistently
(including when there are no steps or no matching monitor step) and before writing logs.
Also clone incomingMonitorRequest via JSON to avoid mutating the original payload and
normalize debug messages.
2025-11-05 11:38:34 +00:00
Nawaz Dhandala
06d0510667 Merge branch 'master' of https://github.com/OneUptime/oneuptime 2025-11-05 11:34:28 +00:00
Nawaz Dhandala
059b7db474 fix(nginx): narrow ACME challenge location to /.well-known/acme-challenge 2025-11-05 11:30:17 +00:00
Simon Larsen
ff17d990d1 fix(scim): make Create Group idempotent — reuse existing team and return 200 when reused
- Reuse an existing team instead of throwing BadRequest on duplicate group names.
- Create a new team when not found, setting isTeamEditable/isTeamDeleteable and allowing empty groups.
- Ensure member additions operate on the resolved target team.
- Expand team select fields and fetch the team for response; return 201 for newly created teams and 200 when reusing an existing one.
2025-11-05 11:17:28 +00:00
Nawaz Dhandala
2e6658542b delete scim readme 2025-11-05 11:12:27 +00:00
Nawaz Dhandala
04a4d6e4de fix(monitor): show short-circuit note for satisfied criteria and normalize met text color
- Add notice when a criteria is satisfied under FilterCondition.Any to indicate remaining criteria were not evaluated.
- Ensure FilterCondition is imported where used.
- Change "Criteria met" text color in Logs view from green to gray for consistent summary styling.
2025-11-05 10:52:03 +00:00
Nawaz Dhandala
2f595fe490 Merge branch 'monitor-log-summary-update' 2025-11-05 10:49:48 +00:00
Nawaz Dhandala
303554d644 refactor(monitor): group identical criteria filters in EvaluationLogList, aggregate metadata & status; use NORMAL small action buttons 2025-11-05 10:49:24 +00:00
Simon Larsen
2ca45b143d Merge pull request #2083 from OneUptime/monitor-log-summary-update
Monitor log summary update
2025-11-05 10:32:57 +00:00
Nawaz Dhandala
b5722256c5 refactor(email): replace custom RFC5322 regex with Zod email validation and add early falsy check 2025-11-05 09:41:42 +00:00
Nawaz Dhandala
0d231a6132 chore(eslint): enable no-control-regex and tidy rule formatting 2025-11-05 09:38:26 +00:00
Simon Larsen
ecc1d841e2 Merge pull request #2080 from OneUptime/snyk-upgrade-ad5a46fc6886b9d33f00f95d68951c86
[Snyk] Upgrade zod from 3.25.30 to 3.25.76
2025-11-05 09:34:42 +00:00
Simon Larsen
cddf534449 Merge pull request #2081 from OneUptime/snyk-upgrade-58044c8091e4244c9346f20e7029dcbd
[Snyk] Upgrade @opentelemetry/api-logs from 0.52.1 to 0.206.0
2025-11-05 09:34:36 +00:00
Simon Larsen
a4ba662211 Merge pull request #2078 from OneUptime/snyk-upgrade-a266987590b01e591eec631cb95143b3
[Snyk] Upgrade bullmq from 5.26.2 to 5.61.0
2025-11-05 09:34:20 +00:00
Simon Larsen
604bc33fb3 Merge pull request #2077 from OneUptime/snyk-upgrade-60fa724635c3f357d78809a2791dae5d
[Snyk] Upgrade posthog-js from 1.186.3 to 1.275.3
2025-11-05 09:34:12 +00:00
Simon Larsen
a1ae1bee89 Merge pull request #2082 from OneUptime/monitor-log-summary-update
Monitor log summary update
2025-11-05 09:33:51 +00:00
Nawaz Dhandala
775b8846c7 refactor(monitor): tidy formatting, consolidate imports and add type annotations
- Normalize whitespace/indentation and reformat several monitor utilities and message builders
- Consolidate and reorder imports (including MonitorEvaluationSummary) across evaluator/data-extractor modules
- Add explicit types (MonitorStatus, BasicDiskMetrics) and tighten type annotations in observation/resource code
- Minor cleanups to conditional formatting in dashboard components (EvaluationLogList, SummaryInfo)
2025-11-05 09:33:29 +00:00
Nawaz Dhandala
3837208023 refactor(monitor): split MonitorCriteriaMessageBuilder into smaller responsibility-focused modules
Move expectation, observation, data-extraction and formatting logic out of the large
MonitorCriteriaMessageBuilder into new classes:
- MonitorCriteriaExpectationBuilder
- MonitorCriteriaObservationBuilder
- MonitorCriteriaDataExtractor
- MonitorCriteriaMessageFormatter

Keep MonitorCriteriaMessageBuilder slim: it now orchestrates message construction and
delegates description/formatting responsibilities to the new modules.
2025-11-05 09:23:14 +00:00
Nawaz Dhandala
b45910a22e refactor(monitor): extract criteria evaluation, message builder, metrics & log utils from MonitorResource
- Move criteria processing into MonitorCriteriaEvaluator
- Move filter/observation message logic into MonitorCriteriaMessageBuilder
- Move monitor metrics logic into MonitorMetricUtil
- Move monitor log persistence into MonitorLogUtil
- Replace large in-file implementations in MonitorResource with calls to the new utilities to reduce complexity and improve separation of concerns
2025-11-05 08:59:14 +00:00
Nawaz Dhandala
c787d7eca0 refactor(monitor): format compare messages and add value formatting helpers 2025-11-05 08:38:04 +00:00
snyk-bot
9771c4cd69 fix: upgrade @opentelemetry/api-logs from 0.52.1 to 0.206.0
Snyk has created this PR to upgrade @opentelemetry/api-logs from 0.52.1 to 0.206.0.

See this package in npm:
@opentelemetry/api-logs

See this project in Snyk:
https://app.snyk.io/org/oneuptime-RsC2nshvQ2Vnr35jHvMnMP/project/f6446ec8-d441-487e-b58f-38373430e213?utm_source=github&utm_medium=referral&page=upgrade-pr
2025-11-04 23:28:08 +00:00
snyk-bot
4471d6bec4 fix: upgrade zod from 3.25.30 to 3.25.76
Snyk has created this PR to upgrade zod from 3.25.30 to 3.25.76.

See this package in npm:
zod

See this project in Snyk:
https://app.snyk.io/org/oneuptime-RsC2nshvQ2Vnr35jHvMnMP/project/f6446ec8-d441-487e-b58f-38373430e213?utm_source=github&utm_medium=referral&page=upgrade-pr
2025-11-04 23:28:03 +00:00
snyk-bot
f8a7330f79 fix: upgrade bullmq from 5.26.2 to 5.61.0
Snyk has created this PR to upgrade bullmq from 5.26.2 to 5.61.0.

See this package in npm:
bullmq

See this project in Snyk:
https://app.snyk.io/org/oneuptime-RsC2nshvQ2Vnr35jHvMnMP/project/f6446ec8-d441-487e-b58f-38373430e213?utm_source=github&utm_medium=referral&page=upgrade-pr
2025-11-04 23:27:53 +00:00
snyk-bot
ae177c920f fix: upgrade posthog-js from 1.186.3 to 1.275.3
Snyk has created this PR to upgrade posthog-js from 1.186.3 to 1.275.3.

See this package in npm:
posthog-js

See this project in Snyk:
https://app.snyk.io/org/oneuptime-RsC2nshvQ2Vnr35jHvMnMP/project/f6446ec8-d441-487e-b58f-38373430e213?utm_source=github&utm_medium=referral&page=upgrade-pr
2025-11-04 23:27:47 +00:00
Nawaz Dhandala
9faa38454d refactor(monitor): improve criteria filter messages and add rich observation descriptions
- Replace ad-hoc success/failure messaging with buildCriteriaFilterMessage / buildCriteriaFilterFailureMessage
- Add detailed describe*Observation helpers for many CheckOn types (response, headers, body, isOnline, timeouts, incoming requests, CPU/memory/disk, processes, SSL, synthetic/custom code, metrics, logs, spans)
- Introduce utility helpers: formatNumber, formatPercentage, formatBytes, formatList, formatSnippet, describeProcesses, computeDiskUsagePercent, summarizeNumericSeries, formatResultValue, and various response extractors
- Enhance metric value extraction to select correct aggregated result by alias and return summarized numeric series
- Wire new messaging into MonitorResource evaluation flow (use filterMessage everywhere)
- Import and use FilterType and ServerProcess types plus several monitor response types
- Small fix in ServerMonitorCriteria: find disk metric and prefer percentUsed (fall back to percentFree) when computing disk usage percent
2025-11-04 22:39:51 +00:00
Nawaz Dhandala
a4198ec409 refactor(monitor): consolidate evaluation logs for probe-based monitors
Compute a probable evaluationSummary from props.evaluationSummary or the first probe response that has one, remove per-probe EvaluationLogList, and render evaluation logs once for probable (probe) monitors to avoid duplicated log lists.
2025-11-04 22:11:58 +00:00
Nawaz Dhandala
24d184debf feat(monitor, dashboard): resolve monitor status names in events and add view buttons for incidents/alerts
Cache and fetch monitor status names in MonitorResource and include readable status names in evaluationSummary events. Add action buttons in EvaluationLogList to navigate to related incident/alert views.
2025-11-04 22:08:08 +00:00
Nawaz Dhandala
f307c904b0 fix(monitor): persist monitor log on early exits and add input guards to saveMonitorLog 2025-11-04 21:31:54 +00:00
Nawaz Dhandala
2d20b7fd13 refactor(monitor): add explicit typings for render functions and evaluationSummary; simplify catch
Add explicit function type annotations for renderCriteriaResult, renderEvent and renderEvaluationLogs in SummaryView to improve type safety and readability. Also type the evaluationSummary variable in Monitor view and simplify the catch block by removing the unused error parameter.
2025-11-04 21:11:18 +00:00
Nawaz Dhandala
fd738b23d0 refactor(monitor, dashboard): normalize multiline formatting and simplify conditional logic
- Normalize multiline/inline formatting in MonitorResource.ts, EvaluationLogList.tsx and Logs.tsx
- Simplify nested else to else-if for FilterCondition.All in MonitorResource
- Minor JSX spacing and map return cleanup for clearer readability
2025-11-04 21:09:02 +00:00
Simon Larsen
deffa6489d feat(monitor): fetch and surface evaluationSummary from MonitorLog in Monitor view and Logs
- Add AnalyticsModelAPI call in Monitor view to load the latest MonitorLog and extract evaluationSummary into component state.
- Pass latest evaluationSummary to Summary component in Monitor view.
- Add Evaluation Outcome column to Monitor Logs table to show criteria met / no criteria / not recorded.
- Pass evaluationSummary into the Logs modal's SummaryInfo.
- Import MonitorEvaluationSummary types and MonitorLog analytics model.
2025-11-04 21:00:14 +00:00
Simon Larsen
e5f1d5553e feat(monitor): add MonitorEvaluationSummary and evaluation logs
- introduce MonitorEvaluationSummary type and related result/event types
- thread evaluationSummary through probe/monitor response types and probe ingest flow
- initialize and populate evaluationSummary in MonitorResource (criteriaResults, events)
- record events for criteria evaluation, monitor status changes, incident/alert create/skip/resolve
- update criteria filter evaluation to produce structured filter results and messages
- add EvaluationLogList UI and wire evaluationSummary into Summary/SummaryInfo views
- minor Date utility usages to timestamp events
2025-11-04 20:58:18 +00:00
Nawaz Dhandala
d2ee3c5409 refactor(admin-dashboard, nginx): normalize multiline formatting in Serve.ts, NginxConfigurator.ts and WriteServerCertToDisk.ts 2025-11-04 20:39:53 +00:00
Nawaz Dhandala
9a1ecd7fe0 fix(admin-dashboard): clarify NotAuthorizedException messages in Serve.ts
Replace generic "Only master admins can access this app." with
"Unauthorized: Only master admins can access the admin dashboard." to
provide clearer, more informative authorization errors.
2025-11-04 20:39:14 +00:00
Simon Larsen
b41d8ab5ab Merge pull request #2076 from OneUptime/ssl-load
Ssl load
2025-11-04 20:36:03 +00:00
Nawaz Dhandala
b809e1c43b refactor(analytics): extract includesValues and simplify numeric-array detection in Statement.toColumnType 2025-11-04 20:35:33 +00:00
Nawaz Dhandala
743f8721f3 fix(helm): correct YAML indentation for SERVER_ADMIN_DASHBOARD_HOSTNAME in _helpers.tpl 2025-11-04 20:06:40 +00:00
Simon Larsen
c0aa2b7905 Merge pull request #2075 from OneUptime/ssl-load
Ssl load
2025-11-04 19:56:35 +00:00
Nawaz Dhandala
4ac5819e6a fix(nginx): generate placeholder certs and guard HTTPS directives; test config after envsubst
Add ensure_placeholder_certificate to envsubst-on-templates.sh to generate a temporary self-signed cert when PRIMARY_DOMAIN is set but cert files are missing. Only export SSL listen/certificate directives if certs (real or placeholder) are available; otherwise disable HTTPS directives and emit warnings. Simplify PRIMARY_DOMAIN logging.

Also run "nginx -t" immediately after running the envsubst script in NginxConfigurator to validate generated config before attempting reload.
2025-11-04 19:55:22 +00:00
Nawaz Dhandala
158663c44b fix(nginx): only write/reload when certs change; derive primary domain and guard SSL directives
- WriteServerCertToDisk: read existing cert/key from disk and compare with DB values; skip writing when unchanged. After writing, run envsubst-on-templates.sh and reload nginx with try/catch and logging.
- envsubst-on-templates.sh: derive PRIMARY_DOMAIN from HOST when not set, compute cert/key paths, and only export PROVISION_SSL_* directives when certificate files exist (otherwise clear directives and log).
2025-11-04 19:44:55 +00:00
Simon Larsen
3d2bcfa579 refactor(admin): add types for ensureMasterAdminAccess and JSONWebToken decode result 2025-11-04 19:21:07 +00:00
Simon Larsen
21984c8684 chore(docker-compose): consolidate common-ui/server anchors into common-runtime-variables
Replace <<: *common-ui-variables and <<: *common-server-variables with <<: *common-runtime-variables across services and remove the IS_SERVER flag from the server anchor.
2025-11-04 19:18:28 +00:00
Simon Larsen
ad63d18f0a fix(helm): replace runtime env include with oneuptime.env.oneuptimeSecret in isolated-vm template 2025-11-04 19:17:45 +00:00
Simon Larsen
e5af008079 chore(helm): unify runtime env template and update deployments
Replace separate commonServer/commonUi and oneuptimeSecret includes with a single
oneuptime.env.runtime include across deployments. Move oneuptimeSecret into the
common env where appropriate and remove SERVER_ADMIN_DASHBOARD_HOSTNAME.
Update all affected templates to use the new runtime include.
2025-11-04 19:14:27 +00:00
Simon Larsen
3e72b2a9a4 fix(admin): restrict AdminDashboard to master admins and guard index render
Add ensureMasterAdminAccess in AdminDashboard/Serve.ts to decode JWT, validate
isMasterAdmin and return a NotAuthorized response when access is not allowed.
Wire this function into App.init as getVariablesToRenderIndexPage so the admin
frontend is protected.

Also add a res.headersSent guard in StartServer.ts to skip rendering the index
page if a response was already sent, avoiding duplicate responses.
2025-11-04 18:28:10 +00:00
Nawaz Dhandala
6d66c6c369 ci(test-release): replace MCP server Docker builds with Worker image builds
Update test-release workflow to build and push Worker images (./Worker/Dockerfile) and adjust tags (including enterprise variants) instead of the previous MCP server build steps.
2025-11-04 17:58:41 +00:00
Nawaz Dhandala
9544dc2a6c chore(env): use explicit callback body for prefix startsWith check in getFrontendEnvVars 2025-11-04 16:47:07 +00:00
Simon Larsen
a22e3b63e0 Merge branch 'master' of github.com:OneUptime/oneuptime 2025-11-04 16:08:53 +00:00
Simon Larsen
5f1f0cde4a fix(env): restrict frontend env vars and use sanitized getFrontendEnvVars in env.js
Add getFrontendEnvVars with an allow-list and allowed prefixes to EnvironmentConfig to avoid exposing the entire process.env to the browser. Update StartServer to use getFrontendEnvVars when rendering /env.js and embed the serialized env object directly.
2025-11-04 16:08:47 +00:00
Nawaz Dhandala
3025880d6d chore(nginx): move conditional SSL listen/certificate directives into envsubst variables 2025-11-04 15:59:14 +00:00
Nawaz Dhandala
00994b56c5 chore(values.yaml): relocate image configuration next to ssl and remove duplicate block 2025-11-04 15:35:27 +00:00
Nawaz Dhandala
d56fd8bf69 fix(domain): disallow creating verified domains for non-root users; require post-create verification 2025-11-04 15:09:41 +00:00
Nawaz Dhandala
1229d5d204 style(domain): normalize whitespace and formatting in DomainService.ts 2025-11-04 12:18:14 +00:00
Nawaz Dhandala
c4f1f4e711 fix(domain): allow setting deletedByUserId on create for project roles 2025-11-04 12:17:33 +00:00
Nawaz Dhandala
7b8986b011 fix(domain): require projectId for verification and verify TXT for all project domains; set isVerified false for non-root creates
- set createBy.data.isVerified = false for non-root creations
- require projectId (from query.projectId or props.tenantId) when verifying domains
- query by projectId and other filters, use LIMIT_PER_PROJECT and iterate results to verify TXT records
- add missing imports (LIMIT_PER_PROJECT, ObjectID, FindWhere) and improve error handling/messages
2025-11-04 12:14:51 +00:00
Nawaz Dhandala
46e6176e6a refactor(queue): use Redis.getRedisOptions() for BullMQ connections; remove direct Redis env usage 2025-11-04 11:53:43 +00:00
Simon Larsen
61f9572956 Merge pull request #2073 from OneUptime/chore/npm-audit-fix
chore: npm audit fix
2025-11-04 10:48:58 +00:00
simlarsen
3adc6901da chore: npm audit fix 2025-11-04 01:46:29 +00:00
Simon Larsen
d7f1bfb52a Merge pull request #2072 from OneUptime/core-ssl
refactor(helm): evaluate PROVISION_SSL once in oneuptime.env.common a…
2025-11-03 22:17:55 +00:00
Nawaz Dhandala
53968e681c refactor(helm): evaluate PROVISION_SSL once in oneuptime.env.common and replace dig call 2025-11-03 22:14:19 +00:00
Simon Larsen
3b8c854744 Merge pull request #2071 from OneUptime/core-ssl
Core ssl
2025-11-03 22:01:25 +00:00
Nawaz Dhandala
669ed2580c refactor(coressl,nginx): normalize formatting in WriteServerCertToDisk job
Wrap logger calls and LocalFile.write arguments for consistent multiline formatting.
2025-11-03 22:00:24 +00:00
Nawaz Dhandala
2f29c2e24c refactor(helm): move provisionSSL under ssl.provision and update templates/schema
Nest the top-level provisionSSL into ssl.provision in values.yaml and values.schema.json,
and update _helpers.tpl to read the new path (using default false via dig) so PROVISION_SSL
env is derived from ssl.provision.
2025-11-03 21:56:17 +00:00
Nawaz Dhandala
f0a2f454e2 feat(config,docker-compose): add PROVISION_SSL env and document Let's Encrypt provisioning
Expose PROVISION_SSL in docker-compose common variables and update config.example.env docs to explain automatic ACME/Let's Encrypt TLS provisioning and reverse-proxy alternative.
2025-11-03 21:51:14 +00:00
Nawaz Dhandala
d8206e12de feat(nginx): add ServerCerts volume/mount and enable conditional primary-domain SSL provisioning
- add server-certs emptyDir volume and mount to nginx Deployment
- update default.conf.template to conditionally listen on 7850 and use /etc/nginx/certs/ServerCerts/${PRIMARY_DOMAIN}.crt/.key when PROVISION_SSL is set
- enhance run.sh to export PRIMARY_DOMAIN, temporarily adjust PROVISION_SSL for envsubst, and restore original PROVISION_SSL afterwards
2025-11-03 21:48:15 +00:00
Nawaz Dhandala
f84434ada4 feat(nginx,coressl): add job to write primary host TLS certificate to disk and initialize it 2025-11-03 21:27:22 +00:00
Nawaz Dhandala
d5fbe0443e refactor(acme,coressl): normalize formatting for AcmeChallenge routing, handler signature, and provisioning log 2025-11-03 21:12:46 +00:00
Nawaz Dhandala
2732cd65ed feat(acme): add ACME HTTP-01 challenge routing and nginx proxy
- Refactor AcmeChallengeAPI into a BaseAPI-backed class that exposes a well-known router.
- Add CrudApiEndpoint(Route("/acme-challenge")) to AcmeChallenge model.
- Register AcmeChallengeAPI router in BaseAPIFeatureSet via (new AcmeChallengeAPI).getRouter().
- Add nginx location /.well-known to proxy ACME challenge requests to /api/acme-challenge/.well-known with proper headers, resolver and websocket support.
2025-11-03 21:12:01 +00:00
Nawaz Dhandala
7624523446 feat(api): add ACME HTTP-01 challenge endpoint and register AcmeChallengeAPI 2025-11-03 20:59:02 +00:00
Nawaz Dhandala
5851286548 feat(coressl): add automated Let's Encrypt provisioning for primary host
- add ProvisionPrimaryDomain worker job to order/renew ACME certificates for the HOST
- register job import in Worker Routes
- add ProvisionSsl env flag in Common/Server/EnvironmentConfig
- expose PROVISION_SSL in Helm chart (values.yaml, values.schema.json, _helpers.tpl)
2025-11-03 20:48:07 +00:00
Simon Larsen
123d9b07bc Merge pull request #2067 from OneUptime/service-catalog-pages
Service catalog pages
2025-11-03 19:50:54 +00:00
Nawaz Dhandala
9edc6ac428 refactor(metrics): normalize type annotations and formatting in MetricExplorer/MetricQuery/MetricView 2025-11-03 19:47:27 +00:00
Nawaz Dhandala
72fc633bf1 refactor(metrics): treat metric query alias as meaningful so alias-only queries are preserved in URL/state 2025-11-03 19:44:21 +00:00
Nawaz Dhandala
3264322054 refactor(metrics): support metric query alias in URL/state
- add MetricQueryAliasFromUrl type
- populate initial metric alias fields from parsed URL into initial query configs
- include alias when building metricQueries for the URL and when parsing metricQueries from the URL
- add sanitizeAlias and buildAliasFromMetricAliasData helpers to validate/serialize alias fields
2025-11-03 19:41:48 +00:00
Nawaz Dhandala
d8fedc0b19 refactor(filters,metrics): default to showing advanced filters and optimize metric fetching
- add showAdvancedFiltersByDefault prop to FiltersForm and use it to initialize advanced filter visibility
- MetricQuery: default showAdvancedFilters to true, pass showAdvancedFiltersByDefault to FiltersForm and call onAdvancedFiltersToggle once on mount
- MetricView: introduce getFetchRelevantState and lastFetchSnapshotRef; only fetch aggregated results when relevant state (start/end dates or queryConfigs) actually changes
2025-11-03 19:33:12 +00:00
Nawaz Dhandala
fc7cc5fe7f Merge branch 'master' into service-catalog-pages 2025-11-03 18:55:32 +00:00
Nawaz Dhandala
5b4eb72521 refactor(modal,edition-label): make Modal onSubmit optional and centralize modal props
- Make Modal.onSubmit and ModalFooter.onSubmit optional and use safe optional chaining when invoking
- Extract modalSubmitButtonText, modalOnSubmit, modalIsLoading, and modalDisableSubmitButton in EditionLabel to simplify JSX and reduce inline conditional logic
2025-11-03 18:54:27 +00:00
Nawaz Dhandala
d84cfe9b09 refactor(enterprise): register EnterpriseLicense migration and clean up license validation/env config; refactor(edition-label): adjust alert formatting 2025-11-03 18:35:00 +00:00
Nawaz Dhandala
0e8926a786 refactor(edition-label): update edition labels and streamline license validation UI
- Change "Enterprise Edition (Verified)" -> "Enterprise Edition"
- Change "Enterprise Edition (License Needed)" -> "Enterprise Edition (License Required)"
- Render success Alert without extra wrapper
- Only show license input, validation error and explanatory text when license is not valid
2025-11-03 18:33:33 +00:00
Nawaz Dhandala
12ff3062de feat(enterprise): add server license validation endpoint and integrate UI
- Add POST /global-config/license handler to validate enterprise license via
  EnterpriseLicenseValidationUrl and store results in GlobalConfig.
- Introduce EnterpriseLicenseValidationUrl in EnvironmentConfig.
- Update EditionLabel to call /global-config/license (POST) for validation,
  remove direct ModelAPI update and ObjectID usage, and replace inline messages
  with Alert component. Minor styling and import cleanup.
2025-11-03 18:21:23 +00:00
Simon Larsen
30aad2866f fix(enterprise): remove oneuptime.com host restriction from license validation (comment out Host import and serverHost check) 2025-11-03 15:25:53 +00:00
Simon Larsen
3de636ab9e refactor(edition-label): remove refresh flow and unify license validation
- Remove isRefreshing state and handleRefresh function; use isValidating for all validation/loading logic
- Make modal submit perform license validation (label "Validate License") and derive isLoading/disable state from licenseKeyInput, isValidating, and isConfigLoading
- Change retry button style to DANGER
- Expand enterprise features copy ("Audit logs and many more enterprise-focused features.")
- Reorder and simplify dialog markup (remove in-body Validate button)
2025-11-03 15:10:58 +00:00
Simon Larsen
a1bf9cbaae feat(enterprise): add EnterpriseLicense DB migration (1762181014879) and register in migrations index 2025-11-03 14:46:00 +00:00
Simon Larsen
12c800b81f refactor(edition-label): add explicit types for ref and callbacks, tidy formatting and normalize config.example.env newline 2025-11-03 14:29:36 +00:00
Simon Larsen
677e687662 fix(enterprise): restrict enterprise license validation to oneuptime.com and import Host 2025-11-03 14:16:45 +00:00
Simon Larsen
93719d67be refactor(link): use ReactNode for children and handle numeric children 2025-11-03 14:12:29 +00:00
Simon Larsen
7d23209198 chore(format): tidy formatting in GlobalConfig, EditionLabel, and Config 2025-11-03 14:09:09 +00:00
Simon Larsen
4461127a36 feat(enterprise): add /global-config/license endpoint and make EditionLabel fetch license via API
- Add GET /global-config/license that returns enterprise license fields (companyName, expiresAt, licenseKey, token).
- Update EditionLabel to use APP_API_URL and fetch license from the new endpoint instead of ModelAPI.getItem.
- Replace hard-coded validation URL with APP_API_URL-based URL.
2025-11-03 14:08:20 +00:00
Simon Larsen
8326bf2c9e chore(edition-label): no-op commit (no code changes) 2025-11-03 14:03:06 +00:00
Simon Larsen
be9d2f6beb feat(enterprise): add enterprise license fields to GlobalConfig
Add enterpriseCompanyName, enterpriseLicenseKey, enterpriseLicenseExpiresAt and enterpriseLicenseToken to store validated enterprise license metadata.
2025-11-03 14:01:24 +00:00
Simon Larsen
214dae6204 feat(enterprise): add license validation endpoint to EnterpriseLicense API
Add POST /.../validate route that verifies license key, checks expiration,
and returns license info with a signed JWT token.
2025-11-03 13:40:09 +00:00
Simon Larsen
71c845d94e feat(enterprise): add EnterpriseLicense API
Add EnterpriseLicenseAPI class that extends BaseAPI to expose CRUD operations
for the EnterpriseLicense model using EnterpriseLicenseService.
2025-11-03 13:32:52 +00:00
Simon Larsen
87d709dd05 feat(enterprise): add EnterpriseLicense model, service and API; register model/service and add IsEnterpriseEdition env flag 2025-11-03 12:19:55 +00:00
Simon Larsen
25332f99fd chore(helm): add IS_ENTERPRISE_EDITION env var to oneuptime.env.common 2025-11-03 11:43:18 +00:00
Simon Larsen
1ac6e71f7e chore(config,docker,ci,ui): rename IS_ENTERPRISE to IS_ENTERPRISE_EDITION across env, Dockerfiles, compose and workflows 2025-11-03 11:25:12 +00:00
Simon Larsen
f1efd65ada Merge pull request #2070 from OneUptime/snyk-upgrade-ecc4458d6ea952fff3607671e1a1fb1d
[Snyk] Upgrade react-router-dom from 6.23.1 to 6.30.1
2025-11-03 08:51:04 +00:00
snyk-bot
bc338f41c7 fix: upgrade react-router-dom from 6.23.1 to 6.30.1
Snyk has created this PR to upgrade react-router-dom from 6.23.1 to 6.30.1.

See this package in npm:
react-router-dom

See this project in Snyk:
https://app.snyk.io/org/oneuptime-RsC2nshvQ2Vnr35jHvMnMP/project/40b17bc5-1bd4-48b1-88f1-5b4dc1400e80?utm_source=github&utm_medium=referral&page=upgrade-pr
2025-11-02 10:11:06 +00:00
Simon Larsen
39153735b5 Merge pull request #2069 from OneUptime/chore/npm-audit-fix
chore: npm audit fix
2025-11-02 09:18:55 +00:00
simlarsen
ae9a78f1f4 chore: npm audit fix 2025-11-01 01:49:26 +00:00
Nawaz Dhandala
f224ad6092 fix(ui): display correct feature list and icon for Community Edition
- Map communityFeatures (not enterpriseFeatures) in the Community Edition panel
- Replace wrong/Danger icon with Check and add muted gray styling
2025-10-31 18:14:32 +00:00
Nawaz Dhandala
1abb8bc83f style(ui): reformat EditionLabel and trim whitespace in Dashboard Footer
- Reflow imports, map callbacks and wrapped long strings in EditionLabel for improved readability.
- Remove stray trailing space in Dashboard Footer link.
2025-10-31 18:07:23 +00:00
Nawaz Dhandala
46704b7c5a fix(ui): correct IconProp relative import path in EditionLabel 2025-10-31 16:48:17 +00:00
Nawaz Dhandala
143d91ceab feat(ui): revamp EditionLabel to use shared enterpriseFeatures and render feature lists with icons 2025-10-31 16:46:28 +00:00
Nawaz Dhandala
ea58cacd1b feat(ui): revamp EditionLabel and allow Footer content links
- Update EditionLabel UI: new button styling, status indicator, compact CTA text
- Expand modal content with Community vs Enterprise comparison and use enterprise demo URL
- Pass ModalWidth to modal and refine primary action behavior
- Extend Footer types to accept ReactNode content and render content links
- Embed EditionLabel into Dashboard footer links
2025-10-31 16:43:07 +00:00
Nawaz Dhandala
5e19849ac8 feat(ui): add EditionLabel and expose IS_ENTERPRISE across apps
- Add EditionLabel component to Common UI to show current edition and info modal
- Show edition label in Login page, Dashboard header and Footer
- Add IS_ENTERPRISE env var to config.example.env and export in Common UI Config
- Propagate IS_ENTERPRISE into docker-compose.base.yml service envs
2025-10-31 15:47:12 +00:00
Nawaz Dhandala
f7c05645a9 feat(helm-chart): add image helper and support enterprise image type
- Add Helm helpers oneuptime.image.tag and oneuptime.image to centralize image name/tag formatting
- If image.type is "enterprise-edition" and tag doesn't already contain "enterprise", prefix tag with "enterprise-"
- Replace inline image printf calls with include "oneuptime.image" across deployments, cronjob and tests
- Add image.type to values.schema.json and set default type to "community-edition" in values.yaml
2025-10-31 15:17:41 +00:00
Nawaz Dhandala
1c1a48b78f chore(ci): build/publish enterprise image variants and add IS_ENTERPRISE arg to Dockerfiles 2025-10-31 14:49:07 +00:00
Nawaz Dhandala
13860be56d chore(helm-chart): remove cert-manager integration from oneuptime chart
- Remove cert-manager dependency from Chart.yaml and update Chart.lock (digest/timestamp)
- Delete bundled cert-manager chart (charts/cert-manager-v1.18.2.tgz)
- Remove ClusterIssuer template and cert-manager/Let's Encrypt-related ingress annotations and secret handling
- Remove cert-manager and certManagerLetsEncrypt entries from values.schema.json and values.yaml
- Clean up README to remove Cert-Manager / Let's Encrypt documentation and references
2025-10-30 21:52:19 +00:00
Nawaz Dhandala
38c29664ea refactor(metrics): strengthen typings and clean up MetricExplorer
- Add explicit types for initialQueryConfigs (MetricQueryConfigData) and map callbacks
- Use FilterData<MetricsQuery> and explicit Record<string, unknown> for safer access
- Type lastSerializedStateRef as React.MutableRefObject<string>
- Simplify catch blocks (remove unused error variables) and tidy parsing/sanitization logic
- Minor formatting and type-safe attribute sanitization improvements
2025-10-30 21:36:44 +00:00
Nawaz Dhandala
df1507b314 feat(metrics): persist multiple metric queries & time range in URL
- Refactor MetricExplorer to parse/serialize a metricQueries URL param (array of metricName, attributes, aggregationType) and startTime/endTime.
- Add helpers to sanitize attributes, map aggregation types, and build metric query state.
- Update MetricsTable to generate metricQueries payload (with attributes and aggregationType) when navigating to metric view.
- Minor JSX/formatting cleanup.
2025-10-30 21:33:30 +00:00
Nawaz Dhandala
65c999b5fc feat(metrics): support filtering by multiple telemetry services
Replace telemetryServiceId/telemetryServiceName props with telemetryServiceIds array.
Update MetricsTable to accept multiple IDs, construct view route when exactly one
telemetry service is selected, query with Includes for multiple services, and add
select/filters/column to show telemetry service info. Update ServiceCatalog and
Telemetry Service Metrics pages to pass telemetryServiceIds and remove redundant
single-service fetching/state.
2025-10-30 20:32:35 +00:00
Nawaz Dhandala
803d0436ca feat(service-catalog): reorganize side menu and tidy view components
- Move "Telemetry Services" into the Resources section and introduce an Operations
  section for Alerts and Incidents; update corresponding icons and route targets.
- Reformat FunctionComponent type annotations and async fetch function bodies
  in Alerts, Logs, Traces and Metrics for consistent indentation and readability.
- Minor formatting cleanup for pageRoute/path prop in ServiceCatalogRoutes.
2025-10-30 20:12:52 +00:00
Nawaz Dhandala
b98e7f13a5 feat(service-catalog): add Alerts, Logs, Traces and Metrics pages, routes and menu
- Add new ServiceCatalog view pages: Alerts.tsx, Logs.tsx, Traces.tsx, Metrics.tsx
  (fetch monitors/telemetry service ids and render respective tables/viewers).
- Register lazy routes and PageRoute entries in ServiceCatalogRoutes for alerts,
  logs, traces and metrics.
- Extend PageMap and RouteMap with new keys/paths and Route entries.
- Update SideMenu to include Alerts under Operations and Logs/Traces/Metrics under
  a Telemetry section.
- Add breadcrumbs entries for the new service catalog pages.
2025-10-30 20:08:52 +00:00
Simon Larsen
0785f11abe Merge pull request #2066 from OneUptime/live-logs
Live logs
2025-10-30 18:58:45 +00:00
Nawaz Dhandala
f0d9f7c594 chore(incidents): add IncidentUpdatePayload type and use it for updatedIncidentData 2025-10-30 18:58:19 +00:00
Nawaz Dhandala
dc9463f73d chore(logs-viewer,logs): add explicit types for liveRequestInFlight, fetchItems and handleLiveToggle 2025-10-30 18:45:54 +00:00
Nawaz Dhandala
37c8e8b6b6 chore(logs-viewer,logs): tweak LiveLogsToggle styling for improved contrast
- add bg-white/90 and backdrop-blur to base classes
- update active/inactive border, text and hover classes
- adjust indicator colors and make "Live" label font-semibold
2025-10-30 18:42:41 +00:00
Nawaz Dhandala
763dfaa1c9 chore(logs-viewer,logs): track live-updating state and disable live toggle during live fetches 2025-10-30 18:37:55 +00:00
Nawaz Dhandala
c2e0d402d5 chore(logs-viewer,logs): remove live updating indicator and tooltip; simplify LiveLogsToggle API
- Remove isUpdating and tooltip from LiveLogsToggle props and types
- Drop Tooltip import and spinner markup; always return button content
- Remove isLiveUpdating state and related updates from Dashboard LogsViewer
- Simplify liveOptions to only pass isLive and onToggle
2025-10-30 18:34:48 +00:00
Simon Larsen
cdc1526fbf chore(logs-viewer,logs): normalize formatting - inline LiveLogsToggle JSX and reformat catch callback 2025-10-30 18:20:10 +00:00
Simon Larsen
13ebd34e8f feat(logs-viewer): add live logs toggle and realtime refresh support
- Add LiveLogsOptions type and LiveLogsToggle component.
- Wire liveOptions through LogsViewer -> LogsViewerToolbar to render the live toggle.
- Update Dashboard LogsViewer to support live mode:
  - add isLiveEnabled, isLiveUpdating state and liveRequestInFlight ref.
  - change fetchItems to accept skipLoadingState to perform lightweight live refreshes (sets isLiveUpdating instead of full loader).
  - poll every 10s when live is enabled (only on page 1, sorted by time desc) and use skipLoadingState for background refreshes.
  - integrate with realtime listener and avoid overlapping live requests.
  - add handleLiveToggle to enforce page/sort when enabling and to stop live updates when disabling.
- Automatically disable live mode when user changes filters, page, or sort in ways that conflict with live behavior.
- Minor imports/exports and typing adjustments.
2025-10-30 18:17:17 +00:00
Nawaz Dhandala
3b97c23039 Merge branch 'release' of https://github.com/OneUptime/oneuptime into release 2025-10-30 16:45:43 +00:00
Simon Larsen
7c15424565 Merge pull request #2064 from OneUptime/incident-postmortem
Incident postmortem
2025-10-30 16:45:20 +00:00
Nawaz Dhandala
6817443d9a chore: normalize formatting and comment style across migrations, jobs and postmortem UI
- Reformat MigrationName1761834523183.ts SQL queries to multi-line strings and consistent quoting
- Add trailing comma to migrations Index export
- Convert inline // comments to /* ... */ block comments in KeepCurrentStateConsistent jobs (Alert, Incident, Monitor, ScheduledMaintenance) for consistency
- Minor refactor in IncidentPostmortem: expand setRefreshToggle updater to an explicit return
2025-10-30 16:44:54 +00:00
Nawaz Dhandala
f40a6395a6 feat(incident): set create/edit modal width for incident postmortem templates 2025-10-30 16:42:25 +00:00
Nawaz Dhandala
81eb735aab feat(incident): extract postmortem form fields and apply templates via edit modal 2025-10-30 16:39:25 +00:00
Nawaz Dhandala
923339710b feat(incident): add migration to create IncidentPostmortemTemplate and add postmortemNote to Incident 2025-10-30 14:30:56 +00:00
Nawaz Dhandala
1f9ec3011c feat(incident): add postmortem UI, templates, routes and breadcrumbs
- Add Incident Postmortem page to incident view with:
  - Postmortem note CardModelDetail (edit/save)
  - "Apply Template" modal to pick and apply a postmortem template
- Add Settings pages for postmortem templates:
  - IncidentPostmortemTemplates (list/create)
  - IncidentPostmortemTemplateView (view/edit/delete)
- Wire up routes, route map, page map and side menu links for postmortem views
- Add breadcrumbs entries for incident postmortem and settings pages
- Minor cleanup/formatting tweaks in IncidentPostmortemTemplate model and IncidentService
2025-10-30 14:17:24 +00:00
Nawaz Dhandala
668093b09c feat(incident): add postmortem template & postmortem note feed support
- add IncidentPostmortemTemplate model and IncidentPostmortemTemplateService
- expose model in Common/Models/DatabaseModels index and register API route in BaseAPI
- add postmortemNote column to Incident model
- add IncidentFeedEventType.PostmortemNote enum value
- emit IncidentFeed item when postmortemNote is added/updated/cleared (IncidentService)
- update Dashboard IncidentFeed to show Book icon for PostmortemNote events
2025-10-30 12:27:13 +00:00
Simon Larsen
f39f51e8ee Merge pull request #2063 from OneUptime/master
chore(jobs): comment out KeepCurrentStateConsistent implementations a…
2025-10-30 12:08:05 +00:00
Nawaz Dhandala
0bff616ca9 chore(jobs): comment out KeepCurrentStateConsistent implementations and unused imports for Alert, Incident, Monitor, and ScheduledMaintenance 2025-10-30 11:59:07 +00:00
Nawaz Dhandala
709b9b8343 fix(link): add title to ComponentProps and forward it to the anchor element 2025-10-30 11:36:46 +00:00
Nawaz Dhandala
aa93252407 Merge branch 'master' into release 2025-10-29 20:38:27 +00:00
Simon Larsen
b4e7ffce43 Merge pull request #2062 from OneUptime/log-ui
Log UI
2025-10-29 20:37:16 +00:00
Nawaz Dhandala
1053d22d3f refactor(logs-viewer): add overflow-y-hidden to table container to prevent vertical overflow 2025-10-29 20:35:09 +00:00
Nawaz Dhandala
a37bdb303d refactor(logs-viewer): simplify empty-state copy, remove decorative '>' and tone down text colors 2025-10-29 20:33:31 +00:00
Nawaz Dhandala
860af1bbf3 refactor(logs-viewer): polish empty-state layout and styling
Replace bare empty message with a centered card-style panel:
- add full-height centered wrapper with slate background
- constrain width, add rounded border, padding and inner shadow
- update typography to monospace, uppercase tracking and emerald accent
- tweak spacing and muted text color for helper copy
2025-10-29 20:30:26 +00:00
Nawaz Dhandala
d5e72c1af2 refactor(logs-viewer): normalize formatting and tidy JSX
- Clean up multiline signatures and expressions for readability
- Standardize JSX tag formatting and expanded-content rendering (explicit return)
- Minor whitespace/indentation tweaks in LogsViewer, LogsTable, LogDetailsPanel, and Dashboard LogsViewer

No functional changes.
2025-10-29 20:29:07 +00:00
Nawaz Dhandala
6cc6fc4bf1 refactor(logs-viewer): polish LogDetailsPanel layout, surface cards, badges and trace/span links 2025-10-29 20:27:50 +00:00
Nawaz Dhandala
f890f24650 refactor(logs-viewer): use icon sort indicators and refine table styling
- replace text-based sort markers with Icon (Chevron/ArrowUpDown) and add helper funcs
- adjust table/header/tbody classes (dividers, background, text colors)
- tweak row hover/selected styles and message/service text colors for better contrast
- update small typography and trace/span color for consistency
2025-10-29 20:22:34 +00:00
Nawaz Dhandala
d077e55241 refactor(logs-viewer): add pagination & sorting API, move sort controls to table headers
- Add server/client-aware pagination & sorting to LogsViewer (props: totalCount, page, pageSize, onPageChange, onPageSizeChange, sortField, sortOrder, onSortChange)
- Implement client-side sort fallback, severity-weight sorting and stable time sorting
- Move sort UI into LogsTable column headers and remove autoscroll / toolbar sort buttons
- Introduce internal page/size/sort state, reset behavior on filter/apply, and improved displayedLogs handling
- Update page size defaults and options (DEFAULT_PAGE_SIZE=100, PAGE_SIZE_OPTIONS=[100,250,500,1000])
- Wire Dashboard viewer to new API (pass totalCount, page, pageSize, sort handlers) and adjust realtime fetch logic
- Reduce default page limit usages from 250 to 100
2025-10-29 20:07:59 +00:00
Nawaz Dhandala
a4b7c99b8a refactor(logs-viewer): replace gradients with solid backgrounds and enable log message wrapping 2025-10-29 19:18:06 +00:00
Nawaz Dhandala
671be425ae refactor(logs-viewer): render log details inline and unify panel variants
- Add renderExpandedContent prop to LogsTable and render LogDetailsPanel inline for expanded rows
- Introduce "embedded" variant in LogDetailsPanel and extract container/header/card classes for shared styling
- Update table and surrounding container styles (rounded, gradients, backdrop) and add a click-to-open hint banner
- Remove standalone selectedLog memo and the separate details pane UI; use selectedLogId to control inline expansion
2025-10-29 19:01:53 +00:00
Nawaz Dhandala
d2d7a51842 Merge branch 'master' of https://github.com/OneUptime/oneuptime 2025-10-29 18:24:49 +00:00
Simon Larsen
1142a20d64 refactor(logs-viewer): tighten types and clean up formatting across components
- Add explicit type annotations for callbacks, helpers and theme (SeverityTheme)
- Strengthen severityTheme getSeverityTheme signature and consume typed return
- Remove unused catch variables and mark unused params where applicable
- Minor JSX/formatting tweaks and whitespace cleanup in table, pagination, details panel
- Small import/format fix in Execute.ts
2025-10-29 18:02:19 +00:00
Simon Larsen
8d0d7dc759 refactor(logs-viewer): modularize viewer into table, pagination and details panel
- Remove legacy LogItem and the old scroll-based monolithic layout.
- Add modular components under LogsViewer/components:
  - LogsTable (tabular list view, row selection, copy actions)
  - LogDetailsPanel (expanded details, copy buttons, trace/span links)
  - LogsViewerToolbar (autoscroll toggle, sort controls, summary)
  - LogsPagination (page controls & page-size selector)
  - LogsFilterCard (filters wrapper using FiltersForm)
  - SeverityBadge + severityTheme (centralized severity styling)
- Replace manual scroll alignment with page-based auto-scroll and safer pagination logic.
- Preserve getTraceRoute/getSpanRoute behavior and filter lifecycle (attributes loading, apply filters).
- Improve state handling (selection, pagination, page size, service map loading) and reduce component complexity.
2025-10-29 17:52:26 +00:00
Simon Larsen
cedf06ba42 chore(ci): schedule npm-audit-fix workflow and create PR via action
Switch workflow to run on a daily schedule (and via workflow_dispatch) and
replace direct git commit/push steps with peter-evans/create-pull-request to
open changes as a PR instead of pushing to the default branch.
2025-10-29 17:02:50 +00:00
Nawaz Dhandala
b347e18749 chore: update package-locks — add deps, bump versions & sync subdeps
- Update lockfiles for root, TestServer, Worker and Workflow
- Add new/top-level packages and types:
  - @simplewebauthn/server, archiver, botbuilder
  - @types/archiver
- Bump notable package versions:
  - axios, nodemailer, playwright, react-syntax-highlighter
- Sync a large set of transitive dependency upgrades:
  - @babel/* (code-frame, parser, template, types, helpers, helper-*), brace-expansion, braces, fill-range, micromatch, picocolors, cross-spawn, to-regex-range and others
- Remove/clean outdated lock entries (e.g. to-fast-properties removed where unnecessary)
- Add/fix license fields for several packages in the lockfiles
- General lockfile normalization to ensure consistent subdependency versions across packages

This keeps dependency trees aligned across monorepo packages and pulls in recent bug/security fixes from transitive upgrades.
2025-10-29 16:43:36 +00:00
Nawaz Dhandala
50e9a53547 chore(ci): don't mark whole run as failed when npm audit fix errors; only report the error 2025-10-29 16:26:40 +00:00
Nawaz Dhandala
d0de004498 docs(devpromps): fix typo in TerraformPrompt.md and add note to avoid editing generated provider code 2025-10-29 16:24:51 +00:00
Nawaz Dhandala
65c4998048 chore(isolated-vm): update package-lock.json — bump dependencies, add new & transitive packages 2025-10-29 16:12:16 +00:00
Nawaz Dhandala
5e7a3795c7 chore(ci): add npm-audit-fix workflow and helper script, expose audit-fix npm script 2025-10-29 13:25:31 +00:00
Nawaz Dhandala
cf83319a90 style(execute): normalize import formatting and reindent exec callback 2025-10-28 19:26:44 +00:00
Nawaz Dhandala
87dc9d88d0 chore(common): update package-lock.json with dependency bumps and transitive additions
Bump multiple deps (notably @babel/*, express, undici, brace-expansion, sha.js, call-bind)
Add transitive packages: call-bound, is-typed-array, to-buffer, typed-array-buffer
Include updated metadata (engines, funding) and adjust dev flags/pruned entries in lockfile
2025-10-28 19:24:48 +00:00
Nawaz Dhandala
444cf040a6 refactor(execute,code-repository): allow ExecOptions in Execute.executeCommand and use cwd instead of 'cd'
- Extend Execute.executeCommand to accept ExecOptions and forward them to child_process.exec
- Log stderr on error and debug-log stderr when present
- Update CodeRepository to pass cwd to Execute.executeCommand instead of prefixing commands with "cd"
2025-10-28 17:37:06 +00:00
Nawaz Dhandala
2754657a6f chore(probe): update package-lock with dependency version bumps and new packages
- Bump axios, playwright, playwright-core, react-syntax-highlighter, nodemailer
- Add @simplewebauthn/server, @types/archiver, archiver, botbuilder
- Update resolved versions/integrity in lockfile
2025-10-28 16:58:30 +00:00
Nawaz Dhandala
38ca6b1e9e style(code-repository): normalize logger debug formatting and wrap long expressions for readability 2025-10-28 16:52:54 +00:00
Nawaz Dhandala
f481ef4f5e refactor(code-repository,github): replace ad-hoc shell commands with structured git execution, sanitize paths, and improve logging
- Use CodeRepositoryUtil.runGitCommand / Execute.executeCommandFile instead of building shell command strings.
- Properly resolve and sanitize file paths when adding files to git; skip empty/invalid paths and add relative sanitized paths.
- Use resolvePathWithinRepo and path.relative for accurate file/argument resolution.
- Trim returned git commit hash.
- Improve debug messages for branch creation/checkout, user.name setting, adding remotes, and pushing branches.
- Encode credentials when constructing remote URL for push and use cwd for git operations.
2025-10-28 16:50:30 +00:00
Nawaz Dhandala
ad9adca473 docs(devpromps): add SecurityFix.md prompt to locate and fix security vulnerabilities 2025-10-28 16:04:47 +00:00
Nawaz Dhandala
819bd54a1f refactor(worker,server): remove unused import, add missing LIMIT_PER_PROJECT import, and normalize object literal formatting 2025-10-28 15:00:24 +00:00
Nawaz Dhandala
e212079b4a refactor(database,worker): remove batchSize option and default to LIMIT_MAX
- Remove batchSize field from FindAllBy type.
- Update DatabaseService.findAllBy to stop reading/validating batchSize and use LIMIT_MAX as the batch size.
- Remove per-job batch size constants and batchSize parameters from multiple worker cron jobs (AlertOwners, Incident*, ScheduledMaintenance*, StatusPage*, Announcement, OnCall, Workflow, UserOnCallLog, ServerMonitor, PaymentProvider, etc.).
- Simplify call sites to rely on findAllBy's default batching behavior.
2025-10-28 14:56:38 +00:00
Nawaz Dhandala
bb09dafbcc refactor(database,worker): add paginated findAllBy and migrate jobs to batch fetching
- Add FindAllBy type and implement DatabaseService.findAllBy with batchSize/limit/skip support.
- Add normalizePositiveNumber helper used by findAllBy.
- Add ProjectService.getAllActiveProjects convenience wrapper that uses findAllBy.
- Replace many worker cron jobs' findBy calls with findAllBy, introduce per-job batch size constants, remove LIMIT_MAX imports, and pass batchSize/skip instead of one large limit.
- Convert long-running deletes/hard-deletes to paginated loops (repeat until no more records deleted).
- Adjust various query usages to use LIMIT_PER_PROJECT where appropriate as batchSize.
2025-10-28 14:32:45 +00:00
Nawaz Dhandala
60c472cc09 refactor(monitor-resource): replace Metric/MonitorLog models with JSON rows and add builders
Introduce buildMonitorMetricAttributes and buildMonitorMetricRow helpers to centralize attribute/timestamp logic. Replace creation of Metric and MonitorLog model instances with JSON row construction and use MetricService.insertJsonRows / MonitorLogService.insertJsonRows. Remove setAttributeKeys/Metric import usage and simplify metric aggregation code paths.
2025-10-28 12:33:12 +00:00
Nawaz Dhandala
0e272f0f31 refactor(fluent-ingest): replace Log model with JSON rows and add batching
- Remove dependency on Log model; build log JSON rows inline (IDs, ISO timestamps, unix nano)
- Buffer logs and flush in batches using new flushLogBuffer helper and LogService.insertJsonRows
- Add FLUENT_INGEST_LOG_FLUSH_BATCH_SIZE constant and simplify processing loop
2025-10-28 12:20:43 +00:00
Nawaz Dhandala
f5de74611d refactor(code-repository): centralize git execution with runGitCommand and improve clone folder detection
Replace ad-hoc shell command construction/Execute.executeCommand calls with a new runGitCommand wrapper that uses Execute.executeCommandFile. Update various methods (getCurrentCommitHash, addAllChangedFilesToGit, setAuthorIdentity, discardAllChangesOnCurrentBranch, pullChanges, createOrCheckoutBranch, discardChanges, etc.) to use the helper. Also improve cloneRepository to derive the cloned folder name from the repo URL and throw a clear error if it can't be determined.
2025-10-28 12:14:00 +00:00
Nawaz Dhandala
03d157b850 refactor(identity,code-repository): allow SCIM group updates by removing isTeamEditable guard; type readDirectory entries as fs.Dirent 2025-10-27 20:50:27 +00:00
Nawaz Dhandala
da21cfc1ff refactor(code-repository,local-file): normalize arrow return style and reformat sanitizeFilePath/readDirectory signatures 2025-10-27 17:53:18 +00:00
Nawaz Dhandala
2781bf0583 refactor(execute,code-repository): add executeCommandFile (execFile wrapper) and use it in commitChanges 2025-10-27 17:52:00 +00:00
Nawaz Dhandala
69b16c1c85 refactor(code-repository): use LocalFile.read for getFileContent and add resolvePathWithinRepo to sanitize/validate paths 2025-10-27 17:47:49 +00:00
Nawaz Dhandala
727f009d79 refactor(code-repository): replace shell ls/file usage with LocalFile.readDirectory and Dirent checks; add readDirectory helper 2025-10-27 17:44:00 +00:00
Nawaz Dhandala
65d916f349 refactor(code-repository): replace shell rm -rf with LocalFile.deleteDirectory and add LocalFile.deleteDirectory helper 2025-10-27 17:40:02 +00:00
Nawaz Dhandala
4373c7b49c refactor(telemetry): collapse multi-line if into single-line in TelemetryUsageBillingService 2025-10-27 16:29:41 +00:00
Nawaz Dhandala
34737fbba4 feat(telemetry): account for Exceptions in usage billing and add avg exception row size
- Update TelemetryUsageBilling description to include Exceptions.
- Add AverageExceptionRowSizeInBytes env/config (env example, docker-compose, Helm values & schema).
- Use ExceptionInstanceService in TelemetryUsageBillingService to include exception row counts when estimating bytes for Traces.
- Add helper to read average exception row size and adjust billing calculations.
2025-10-27 16:26:46 +00:00
Nawaz Dhandala
90fcfd1c7e Merge remote-tracking branch 'origin/snyk-upgrade-89594c7cd11449f44c7d156474899af6' 2025-10-27 15:49:24 +00:00
Simon Larsen
b55320f02c Merge pull request #2059 from OneUptime/otel-writer
Efficient Otel Writer
2025-10-27 15:45:51 +00:00
Nawaz Dhandala
34dc078197 fix(analytics): set wait_for_async_insert=0 for ClickHouse async inserts and tidy formatting 2025-10-27 15:45:20 +00:00
Nawaz Dhandala
2eacc90714 refactor(opentelemetry): format timestamps as ClickHouse UTC datetimes
- add OneUptimeDate.toClickhouseDateTime to produce UTC "YYYY-MM-DD HH:mm:ss"
- use ClickHouse-formatted timestamps for createdAt/updatedAt and time fields in OtelLogsIngestService, OtelMetricsIngestService and OtelTracesIngestService
- extend metric timestamp parsing to include db/date (and propagate db for DB storage)
- switch intermediate handling to Date objects to avoid extra ISO-string conversions
2025-10-27 15:41:13 +00:00
Nawaz Dhandala
9d93d59f91 fix(analytics): set wait_for_async_insert=1 and log ClickHouse insert result 2025-10-27 15:19:58 +00:00
Nawaz Dhandala
d84039e621 refactor(analytics): make ClickHouse client nullable and centralize retrieval
Change databaseClient to ClickhouseClient | null and add getDatabaseClient() to
lazily refresh/validate the client. Update insert/execute/executeQuery and
useDefaultDatabase to use the centralized getter and remove unsafe casts and
duplicated null checks.
2025-10-27 15:13:10 +00:00
Nawaz Dhandala
4eb46cf8a0 docs(clickhouse): add SQL to calculate average uncompressed row size per table and close code fence 2025-10-27 13:46:24 +00:00
Nawaz Dhandala
1ef27b7f52 fix(settings): clarify usage history no-items message to mention end-of-day data availability 2025-10-27 13:22:28 +00:00
Nawaz Dhandala
412bd370df refactor(telemetry): add explicit types for attribute value variables in TelemetryUtil 2025-10-27 13:20:33 +00:00
Nawaz Dhandala
1131b80a52 style(otel-ingest): normalize .catch callback formatting in span exception handling 2025-10-27 13:16:16 +00:00
Nawaz Dhandala
8b55f5c348 refactor(otel-ingest): decouple exception handling from analytics model
- Remove direct usage of ExceptionInstance model in OtelTracesIngestService.
- Build and push exceptions as ExceptionEventPayload objects, compute fingerprint from payload.
- Update buildExceptionRow to accept ExceptionEventPayload and map fields accordingly.
- Change ExceptionUtil API: introduce ExceptionFingerprintInput and TelemetryExceptionPayload types; update getFingerprint and saveOrUpdateTelemetryException signatures.
2025-10-27 13:14:07 +00:00
Nawaz Dhandala
159e5c4023 refactor(otel-ingest): switch telemetry ingestion to ClickHouse bulk writer
Replace model-based createMany calls with ClickhouseBulkWriter.insert for logs, metrics, spans and exceptions. Build plain JSON rows (with createdAt/updatedAt, timestamps, attributes, attributeKeys, trace/span ids, severity, etc.) and change buffers to JSONObject arrays. Add helper builders and utilities (buildMetricRow, buildSpanRow, buildExceptionRow, safeParseUnixNano, toNumberOrNull, toBoolean, convertBase64ToHexSafe, calculateDurationNano) to improve timestamp/attribute parsing and robustness. Add new ClickhouseBulkWriter utility.
2025-10-27 13:05:49 +00:00
Nawaz Dhandala
4970538d43 docs: correct example pagination request method to POST 2025-10-27 12:39:22 +00:00
Nawaz Dhandala
d7ca021d52 refactor: improve TelemetryUtil attribute parsing, typing and null handling 2025-10-27 12:17:31 +00:00
Simon Larsen
27eff7f415 Merge pull request #2057 from OneUptime/snyk-upgrade-ef9e383973f8732d2d959dd964599c61
[Snyk] Upgrade axios from 1.12.0 to 1.12.2
2025-10-27 10:32:46 +00:00
Simon Larsen
50543ec7bf Merge pull request #2058 from OneUptime/snyk-upgrade-7e0a372e41cd67fc4ed46876443edaa8
[Snyk] Upgrade react-router-dom from 6.23.1 to 6.30.1
2025-10-27 10:32:39 +00:00
snyk-bot
94c39408ed fix: upgrade react-router-dom from 6.23.1 to 6.30.1
Snyk has created this PR to upgrade react-router-dom from 6.23.1 to 6.30.1.

See this package in npm:
react-router-dom

See this project in Snyk:
https://app.snyk.io/org/oneuptime-RsC2nshvQ2Vnr35jHvMnMP/project/11bb5528-45f9-473c-a635-dc097fd03b3c?utm_source=github&utm_medium=referral&page=upgrade-pr
2025-10-27 09:53:27 +00:00
snyk-bot
7219e1850f fix: upgrade axios from 1.12.0 to 1.12.2
Snyk has created this PR to upgrade axios from 1.12.0 to 1.12.2.

See this package in npm:
axios

See this project in Snyk:
https://app.snyk.io/org/oneuptime-RsC2nshvQ2Vnr35jHvMnMP/project/49c81d9c-12c2-4e8e-b9e8-72f98b1b595c?utm_source=github&utm_medium=referral&page=upgrade-pr
2025-10-27 09:51:39 +00:00
Simon Larsen
3180ed7149 Merge pull request #2055 from OneUptime/snyk-upgrade-9e6de76051d1dee73345ae37a0e51d09
[Snyk] Upgrade react-router-dom from 6.23.1 to 6.30.1
2025-10-26 17:24:46 +00:00
Simon Larsen
79f32b80c8 Merge pull request #2054 from OneUptime/snyk-upgrade-bad1e870c8252b45aa78fe78e19570b0
[Snyk] Upgrade globals from 15.6.0 to 15.15.0
2025-10-26 17:24:27 +00:00
Simon Larsen
cebfde6bf2 Merge pull request #2052 from OneUptime/snyk-upgrade-7a970ceafec3776b6998e6c88441b566
[Snyk] Upgrade typeorm from 0.3.20 to 0.3.27
2025-10-26 17:24:05 +00:00
Simon Larsen
ca644d9dc7 Merge pull request #2051 from OneUptime/snyk-upgrade-6a98a9360c422231b9e06b3e3624ea4f
[Snyk] Upgrade @readme/openapi-parser from 4.1.0 to 4.1.2
2025-10-26 17:23:59 +00:00
snyk-bot
ca4d9cb176 fix: upgrade react-router-dom from 6.23.1 to 6.30.1
Snyk has created this PR to upgrade react-router-dom from 6.23.1 to 6.30.1.

See this package in npm:
react-router-dom

See this project in Snyk:
https://app.snyk.io/org/oneuptime-RsC2nshvQ2Vnr35jHvMnMP/project/8ca4ee75-8bc5-43a1-a3bc-244ceebf1437?utm_source=github&utm_medium=referral&page=upgrade-pr
2025-10-26 10:52:13 +00:00
snyk-bot
56204e02a9 fix: upgrade globals from 15.6.0 to 15.15.0
Snyk has created this PR to upgrade globals from 15.6.0 to 15.15.0.

See this package in npm:
globals

See this project in Snyk:
https://app.snyk.io/org/oneuptime-RsC2nshvQ2Vnr35jHvMnMP/project/c3622982-05c8-495c-809c-20f301c75f92?utm_source=github&utm_medium=referral&page=upgrade-pr
2025-10-25 11:38:53 +00:00
snyk-bot
c8e8a6d687 fix: upgrade jest from 30.0.0 to 30.2.0
Snyk has created this PR to upgrade jest from 30.0.0 to 30.2.0.

See this package in npm:
jest

See this project in Snyk:
https://app.snyk.io/org/oneuptime-RsC2nshvQ2Vnr35jHvMnMP/project/c3622982-05c8-495c-809c-20f301c75f92?utm_source=github&utm_medium=referral&page=upgrade-pr
2025-10-25 11:38:49 +00:00
snyk-bot
5bd2204eee fix: upgrade typeorm from 0.3.20 to 0.3.27
Snyk has created this PR to upgrade typeorm from 0.3.20 to 0.3.27.

See this package in npm:
typeorm

See this project in Snyk:
https://app.snyk.io/org/oneuptime-RsC2nshvQ2Vnr35jHvMnMP/project/c3622982-05c8-495c-809c-20f301c75f92?utm_source=github&utm_medium=referral&page=upgrade-pr
2025-10-25 11:38:44 +00:00
snyk-bot
72a31ed268 fix: upgrade @readme/openapi-parser from 4.1.0 to 4.1.2
Snyk has created this PR to upgrade @readme/openapi-parser from 4.1.0 to 4.1.2.

See this package in npm:
@readme/openapi-parser

See this project in Snyk:
https://app.snyk.io/org/oneuptime-RsC2nshvQ2Vnr35jHvMnMP/project/f8557d79-3b3d-4201-8e2b-598120aedb5c?utm_source=github&utm_medium=referral&page=upgrade-pr
2025-10-25 11:34:15 +00:00
Nawaz Dhandala
ee188dd050 chore(values.schema): add noProxy option to proxy configuration 2025-10-24 17:48:29 +01:00
Nawaz Dhandala
3807aad63b Merge branch 'master' of https://github.com/OneUptime/oneuptime 2025-10-24 17:40:33 +01:00
Nawaz Dhandala
055ec956fd build(esbuild-config): add refractor compatibility plugin to resolve refractor imports
Add createRefractorCompatibilityPlugin that maps imports from refractor/lib and refractor/lang
to the local refractor package (searching candidate node_modules paths), and include it in the
plugins list so esbuild can correctly resolve refractor modules.
2025-10-24 17:40:31 +01:00
Simon Larsen
e7767e59d1 Merge pull request #2048 from OneUptime/probe-no-prpoxy
Probe no prpoxy
2025-10-24 17:24:47 +01:00
Nawaz Dhandala
f643e907b4 refactor(proxy-config): normalize & resolve ports for NO_PROXY matching; add protocol resolution helpers 2025-10-24 17:24:22 +01:00
Nawaz Dhandala
2eec57befd refactor(proxy-config): robust hostname/port extraction, normalize ports, and tidy formatting
- Improve extractHostnameAndPort and splitHostAndPort to handle IPv6 brackets, single-colon ports, and trimmed port values; return explicit result objects.
- Normalize host/port parsing logic and ensure port values are trimmed before use.
- Consolidate import formatting and split long method signatures for readability.
- Minor formatting/whitespace cleanups in Probe Config, Alive, Register, Monitor, and NO_PROXY parsing.
2025-10-24 15:12:12 +01:00
Nawaz Dhandala
dd653f8deb feat(proxy): add NO_PROXY support and per-request proxy bypass based on target URL
- import NO_PROXY and log its entries during configuration
- allow callers to pass a target URL to getHttpProxyAgent/getHttpsProxyAgent/getRequestProxyAgents
- implement shouldBypassProxy with URL/host:port extraction and pattern matching (including wildcard, subdomains, IPv6, and scheme/port-aware patterns)
- return no agents when a target matches NO_PROXY or when proxy is not configured
2025-10-24 15:04:05 +01:00
Nawaz Dhandala
f403c6a9e9 feat(proxy): add NO_PROXY support and use request URL for proxy agent selection
- Parse NO_PROXY / no_proxy in Probe Config into a trimmed list
- Wire NO_PROXY into UI docs, Helm chart values, and probe Docker/compose examples
- Add NO_PROXY env var to Helm probe template when provided
- Pass target URL to ProxyConfig.getRequestProxyAgents / getHttpProxyAgent / getHttpsProxyAgent so proxy selection is per-request
- Update probe calls (Alive, Metrics, FetchList, FetchMonitorTest, Register, Monitor ingest/reporting, Api/Website/Ssl monitors) to use local URL variables and supply them to proxy helpers
- Minor refactors to avoid inline URL construction where reused
2025-10-24 15:02:36 +01:00
Nawaz Dhandala
35f9b7f5c4 refactor(log-item): move collapse toggle to header container and remove redundant handler 2025-10-24 13:16:11 +01:00
Nawaz Dhandala
3c487ff9b9 refactor(log-item): use monospaced font for collapsed row and message previews; make header toggleable 2025-10-24 13:11:37 +01:00
Nawaz Dhandala
41fca346b9 refactor(table-row): extract columnContent and consolidate column rendering
- Move per-column value/element logic into a single columnContent variable
- Add contentWrapperClassName and actionsContainerClassName to unify wrappers
- Remove duplicated JSX branches for desktop row cells and simplify action rendering
2025-10-24 13:00:24 +01:00
Nawaz Dhandala
91b54ced67 refactor(exceptions): normalize icon sizes in TelemetryExceptionElement by adding min-h-6 and min-w-6 to icon wrappers 2025-10-24 12:59:49 +01:00
Nawaz Dhandala
ebdd97b8e9 refactor(exceptions): normalize icon sizes and simplify className usage in TelemetryExceptionElement
- Remove redundant h-6 w-6 classes from Icon elements and rely on wrapper sizing.
- Simplify container className interpolation (remove fallback empty string).
2025-10-24 12:56:13 +01:00
Nawaz Dhandala
8eb1eac629 refactor(table-row): inline column content rendering and remove temporary wrapper variables 2025-10-24 12:53:15 +01:00
Nawaz Dhandala
a075b3c4dd refactor(table-row, exceptions): dedupe column rendering, wrap content, and normalize icon sizes
- Extract columnContent and content/action wrapper class names in TableRow to remove duplicated conditional rendering and wrap cell content with the configured className.
- Replace inline action container with actionsContainerClassName for consistent alignment.
- Add explicit h-6 w-6 classes to exception icons for consistent sizing.
2025-10-24 12:50:59 +01:00
Nawaz Dhandala
738f901a51 refactor(vm-runner, logs-viewer, server-monitor, embedded-status): simplify script generation, add explicit typings, and tidy UI text 2025-10-24 12:18:25 +01:00
Nawaz Dhandala
683a8f5a58 chore(eslint): enable no-constant-binary-expression rule (set to error) 2025-10-24 12:11:30 +01:00
Nawaz Dhandala
160eba1ea4 refactor(exceptions): expose className prop on TelemetryExceptionElement and pass max-w-3xl from ExceptionsTable 2025-10-24 12:09:49 +01:00
Nawaz Dhandala
89b65d1e02 style(logs-viewer): inline ternary expressions in Icon className strings 2025-10-24 12:06:38 +01:00
Nawaz Dhandala
caf709a38a chore(probe): bump Node base image to public.ecr.aws/docker/library/node:24.9 (from 23.8) 2025-10-24 12:01:50 +01:00
Simon Larsen
d57433e4a0 Merge pull request #2045 from OneUptime/telemetry-billing
Telemetry billing
2025-10-24 11:08:18 +01:00
Simon Larsen
5e2aa4e622 Merge pull request #2044 from OneUptime/log-fix
feat(logs-viewer): add toggleable log ordering and update scroll-to-l…
2025-10-24 10:13:40 +01:00
Nawaz Dhandala
6cb51dd54b refactor(logs-viewer): replace sort toggle with segmented "Newest first"/"Oldest first" buttons and extract applySortDirection
Extract applySortDirection to centralize sort toggle + scroll alignment logic and replace the single ArrowUpDown button with two styled buttons (BarsArrowDown / BarsArrowUp) to improve UX and clarity.
2025-10-24 10:11:45 +01:00
Nawaz Dhandala
53ea843bdc Merge branch 'master' into log-fix 2025-10-24 09:52:13 +01:00
Nawaz Dhandala
71eeaf7ecd chore(dashboard): add archiver/@simplewebauthn/server and bump several deps
- add @simplewebauthn/server, archiver, @types/archiver, botbuilder
- bump nodemailer -> ^7.0.7, playwright -> ^1.55.1, react-syntax-highlighter -> ^16.0.0
2025-10-24 09:51:54 +01:00
Simon Larsen
1d0168fcc6 Merge pull request #2043 from OneUptime/embedded-sttaus
Embedded sttaus
2025-10-24 09:49:47 +01:00
Nawaz Dhandala
99c3d440c5 refactor(status-page): remove inline token exposure warning from Embedded Status Badge card 2025-10-24 09:39:12 +01:00
Nawaz Dhandala
5959ce728f refactor(status-page): wrap Embedded Status CardModelDetail in container and tidy props
Wrap the CardModelDetail for the embedded status badge in a surrounding <div> to group it with the other cards, and reorganize/tidy card/form properties (move description to cardProps, clean up formField ordering and types). Also minor JSX/formatting cleanup.
2025-10-24 09:38:47 +01:00
Nawaz Dhandala
0cb9e382a6 refactor(status-page): use provisioned domain for badge docs, replace Alert with inline warning, and tidy embedded badge UI
- Fetch latest SSL-provisioned StatusPageDomain and build a status page URL to use in the badge documentation/markdown embeds (falls back to STATUS_PAGE_URL).
- Add state for statusPageUrl and effect to resolve domain via ModelAPI (Query/Sort).
- Replace Alert component with inline informational paragraph and a highlighted yellow warning box.
- Minor cleanup: remove an obsolete comment, adjust Card layout/spacing and documentation markdown link.
2025-10-24 09:37:04 +01:00
Nawaz Dhandala
0c6d561b7c refactor(status-page): remove redundant "Badge Usage" header and drop h-full from token/preview cards
Remove the "## Badge Usage" heading from the documentation markdown and remove the
className="h-full" from the Security Token and Badge Preview Card components to
simplify and stabilize the embedded badge layout.
2025-10-24 09:21:05 +01:00
Nawaz Dhandala
4291a76dd4 refactor(exceptions/status-page): improve exception message wrapping and simplify embedded badge layout
- Add contentClassName to the "Exception Message" column to constrain width and allow proper wrapping.
- Remove the surrounding grid wrapper and the lg:col-span-2 on the Badge Documentation card to simplify the embedded badge layout.
2025-10-24 09:14:53 +01:00
Nawaz Dhandala
05b1f0ea82 refactor(status-page): use Alert component for token regeneration warning in embedded badge card 2025-10-24 09:12:41 +01:00
Nawaz Dhandala
b943505b1d refactor(status-page): include embeddedOverallStatusToken in modelDetailProps 2025-10-24 09:08:31 +01:00
Simon Larsen
1ed236eb91 Merge pull request #2046 from OneUptime/snyk-upgrade-6c5f58f1b138b8c82cfd6bdcdf7dbcce
[Snyk] Upgrade react-router-dom from 6.23.1 to 6.30.1
2025-10-24 09:02:22 +01:00
snyk-bot
44795182c9 fix: upgrade react-router-dom from 6.23.1 to 6.30.1
Snyk has created this PR to upgrade react-router-dom from 6.23.1 to 6.30.1.

See this package in npm:
react-router-dom

See this project in Snyk:
https://app.snyk.io/org/oneuptime-RsC2nshvQ2Vnr35jHvMnMP/project/5dd2ef9c-1270-4729-aff4-e407805f7a9c?utm_source=github&utm_medium=referral&page=upgrade-pr
2025-10-24 04:01:13 +00:00
Simon Larsen
50267534e0 Merge pull request #2003 from OneUptime/snyk-fix-2b1cd110cd50d4d7d2d3aab8e21c4982
[Snyk] Security upgrade axios from 1.7.2 to 1.12.0
2025-10-23 21:24:39 +01:00
Simon Larsen
f60836a4dc Merge pull request #2027 from OneUptime/snyk-fix-12d4523821beb1a61df8fdd2378e8e5f
[Snyk] Security upgrade nodemailer from 6.9.16 to 7.0.7
2025-10-23 21:24:33 +01:00
Simon Larsen
303294bb5e Merge pull request #2038 from OneUptime/snyk-fix-bb0bdd9905b96cdaeceb0ee81316a34f
[Snyk] Security upgrade playwright from 1.50.0 to 1.55.1
2025-10-23 21:21:36 +01:00
Simon Larsen
1212a8e4be Merge pull request #2031 from OneUptime/snyk-fix-e2f14fffff98beb03eb482adc84b328d
[Snyk] Security upgrade nginx from 1.28.0-alpine to 1.29.2-alpine
2025-10-23 21:21:29 +01:00
Simon Larsen
25c626d2d4 Merge pull request #2030 from OneUptime/snyk-fix-311183f6327ea1b45e86908c4fe82d3a
[Snyk] Security upgrade mailparser from 3.7.1 to 3.7.5
2025-10-23 21:21:20 +01:00
Simon Larsen
ff6f9c89fa Merge pull request #2040 from OneUptime/snyk-fix-66c920d77215ccde82d8cbc57644a387
[Snyk] Fix for 2 vulnerabilities
2025-10-23 21:20:49 +01:00
Nawaz Dhandala
7f6e905c74 refactor(markdown-viewer): reduce padding for pre and syntax highlighter code blocks 2025-10-23 21:04:33 +01:00
Nawaz Dhandala
2cc64838aa refactor(status-page): keep token in state after regeneration, move regen button to card header, and tweak layout/preview styles
- Remove unused default Button import
- Replace window.location.reload() with setToken(newToken) so UI updates without a full page reload
- Move "Regenerate Token" into Card.buttons with loading/disabled states
- Rework card layout into responsive grid, add bodyClassName, and adjust preview styling (dashed container, max height for badge)
2025-10-23 20:49:07 +01:00
Nawaz Dhandala
3de5c8da8c refactor(status-page): consolidate token rotation UI, use useMemo for modelId, fix side menu icon
- Wrap modelId retrieval in useMemo for stable value
- Move "Regenerate Token" button and helper text into the Security Token card and remove the separate "Token Rotation" card
- Update SideMenu icon for "HTML, CSS & JavaScript" to IconProp.Circle
2025-10-23 20:41:53 +01:00
Nawaz Dhandala
7403ffa053 refactor(status-page): simplify embedded badge state and docs UI
- Remove statusPage state and store embeddedOverallStatusToken and enableEmbeddedOverallStatus
  in separate token and isEmbeddedStatusEnabled states.
- Set token and enabled flag directly in onItemLoaded.
- Simplify "Badge Documentation" card to render MarkdownViewer only (remove raw source/preview split)
- Tweak card description copy.
2025-10-23 20:37:03 +01:00
Nawaz Dhandala
fa473474a2 feat(status-page): improve embedded badge UI, use secure token generation, add preview & docs
- Replace non-cryptographic token generation with ObjectID.generate()
- Add statusPage state, token/isEnabled checks and computed badge URLs
- Split UI into Security Token, Token Rotation, Badge Preview and Badge Documentation cards
- Use HiddenText for copyable token and MarkdownViewer for rendered docs
- Wire CardModelDetail onItemLoaded to populate statusPage for live preview
2025-10-23 20:33:45 +01:00
Nawaz Dhandala
20cbcf9a74 refactor(status-page): remove ModelPage wrapper and routing/side menu from EmbeddedStatus
- Return Fragment directly instead of wrapping with ModelPage
- Remove breadcrumb links and sideMenu usage
- Remove unused imports: PageMap, RouteMap, RouteUtil, Route, SideMenu, ModelPage
2025-10-23 20:12:17 +01:00
Nawaz Dhandala
e6fd4b8304 refactor(types): add explicit typings for env parser and telemetry billing
Add a full function signature for parsePositiveNumberFromEnv and strengthen typing in TelemetryUsageBillingService: import TelemetryServiceModel, type telemetryServices as Array<TelemetryServiceModel>, and annotate count results as PositiveNumber for traces, logs and metrics to improve type-safety and readability.
2025-10-23 20:02:57 +01:00
Nawaz Dhandala
4a3a743dcd chore(format): normalize multiline argument formatting in EnvironmentConfig and TelemetryUsageBillingService 2025-10-23 19:53:09 +01:00
Nawaz Dhandala
6f1e2234d3 feat(telemetry): stage daily usage and centralize telemetry billing
- Add stageTelemetryUsageForProject to TelemetryUsageBillingService:
  count traces/logs/metrics per telemetry service via AnalyticsQueryHelper,
  estimate bytes using average row size env vars, convert to GB and create/update
  daily usage records (uses day + usageDate for deduplication).
- Update updateUsageBilling to accept usageDate, set createdAt to usageDate and
  query by day instead of createdAt ranges.
- Add helper getAverageRowSizeForProduct and import LIMIT_INFINITY,
  DEFAULT_RETENTION_IN_DAYS, TelemetryServiceService, Span/Log/Metric services,
  DiskSize, AnalyticsQueryHelper and logger.
- Call stageTelemetryUsageForProject from TelemetryMeteredPlan before reporting
  to the billing provider.
- Remove direct billing invocation from FluentIngest worker to decouple ingestion
  from billing staging.
2025-10-23 19:51:11 +01:00
Nawaz Dhandala
07189b4567 feat(env): add average telemetry row size settings and expose as env vars
- add billing.telemetry.{averageSpanRowSizeInBytes, averageLogRowSizeInBytes, averageMetricRowSizeInBytes} to values.yaml with defaults
- add schema validation for these fields in values.schema.json (integer, minimum 1)
- expose AVERAGE_SPAN_ROW_SIZE_IN_BYTES, AVERAGE_LOG_ROW_SIZE_IN_BYTES, AVERAGE_METRIC_ROW_SIZE_IN_BYTES in templates/_helpers.tpl
2025-10-23 19:49:59 +01:00
Nawaz Dhandala
b1bc02cec4 feat(env): add average telemetry row size env vars with validation
- introduce parsePositiveNumberFromEnv helper in EnvironmentConfig.ts
- add AverageSpan/Log/MetricRowSizeInBytes exports with positive-number parsing and defaults
- document AVERAGE_* vars in config.example.env and expose them in docker-compose.base.yml
2025-10-23 19:49:24 +01:00
Simon Larsen
6f7795aa31 feat(logs-viewer): add toggleable log ordering and update scroll-to-latest behavior
- add isDescending state and useMemo displayLogs to reverse logs when needed
- rename showScrollToBottom -> showScrollToLatest and replace scrollToBottom with scrollToLatest
- convert handleScroll to useCallback and make it order-aware (top vs bottom)
- update scroll button icon/title and ensure scroll position recalculates after toggling order
- render and count displayLogs (use reversed list) and update empty state checks
- minor comment update in Dashboard LogsViewer
2025-10-23 19:24:02 +01:00
Nawaz Dhandala
50ee87c86f chore(mobile): remove MobileApp configuration files
Remove obsolete MobileApp project configuration and dependency files to clean up the repository:
- MobileApp/package.json
- MobileApp/tailwind.config.js
- MobileApp/tsconfig.json

These files contained Expo / React Native specific settings (dependencies, Nativewind/Tailwind config, and TS config) for a mobile app that is no longer maintained in this repo. Removing them reduces clutter and prevents confusion about supported platforms.
2025-10-23 17:21:20 +01:00
Simon Larsen
a75c6b6a43 feat(status-page): add Embedded Status page with token regeneration and wire up route/menu; remove legacy Embedded component 2025-10-23 16:53:58 +01:00
Simon Larsen
f6168c969e feat(status-page): add embedded status badge UI and token regeneration
- Add advanced options UI for embedded overall status badge: enable toggle, security token display, badge preview, and HTML/Markdown embed examples.
- Implement regenerate token flow with confirmation modal and regenerateToken function that creates a new token and updates the StatusPage via ModelAPI.
- Add local state hooks for modal/loading and construct badge URL with token placeholder.
- Rename/export component from StatusPageDelete to StatusPageAdvancedOptions.
2025-10-23 16:26:45 +01:00
Simon Larsen
2b0f9f2e7a feat(status-page): add embedded overall status badge toggle and token with migration
- Add enableEmbeddedOverallStatus boolean column and embeddedOverallStatusToken (with index) to StatusPage model
- Include billing and column access controls for new fields
- Add corresponding TypeORM migration and register it in migrations index
2025-10-23 16:20:18 +01:00
Nawaz Dhandala
14377c68fe Merge branch 'release' of https://github.com/OneUptime/oneuptime into release 2025-10-23 15:45:59 +01:00
Nawaz Dhandala
9176fa2c9b style(forms): add className override to FieldLabel and apply to Dictionary labels 2025-10-23 15:44:26 +01:00
Nawaz Dhandala
4f29fef5f6 feat(model-filter): add isAdvancedFilter and propagate to table
Add isAdvancedFilter to Filter interface, pass it into the ClassicFilter mapping
in BaseModelTable, and include props.filters in the showFilterModal effect deps
so filters refresh when changed. Mark the TraceTable "Attributes" filter as an
advanced filter.
2025-10-23 13:46:06 +01:00
Simon Larsen
f28c7695ab Merge pull request #2041 from OneUptime/master
Release
2025-10-22 18:16:10 +01:00
Nawaz Dhandala
bc234deb0e style(analytics): simplify empty projections to [] in Log/Metric/Span and remove stray blank line in TableManegement 2025-10-22 18:08:18 +01:00
Nawaz Dhandala
546b4a4fb3 refactor(analytics): expose AnalyticsTableManagement helpers and remove projection/materialized view creation from createTables
- Make utility methods public: doesProjectionExist, materializeProjection, escapeForQuery, escapeIdentifier, doesMaterializedViewExist, createMaterializedView
- Remove inline projection and materialized view creation logic from createTables (createTables now only ensures tables exist)
- Remove unused Projection import
2025-10-22 18:07:45 +01:00
Nawaz Dhandala
1300c4e667 feat(analytics): add materialized view support to analytics models and table management
- add MaterializedView type
- wire materializedViews into AnalyticsBaseModel (constructor param, property, getter/setter)
- implement materialized view processing in AnalyticsTableManagement (validation, existence check, creation)
2025-10-22 18:04:51 +01:00
Nawaz Dhandala
988d5d327c style(analytics): remove attribute projection definitions from Log, Metric and Span models 2025-10-22 17:52:44 +01:00
Nawaz Dhandala
f2510a7b89 style(exceptions): format message div across multiple lines for improved readability 2025-10-22 15:05:54 +01:00
Nawaz Dhandala
a2f16ca0eb style(exceptions): remove container truncate and add break-words to message text to allow long words to wrap 2025-10-22 15:04:44 +01:00
Nawaz Dhandala
1448288395 style(autocompleteTextInput,dictionary): normalize type annotations and tidy JSX formatting 2025-10-22 14:58:24 +01:00
Nawaz Dhandala
734481df6c style(dictionary): replace equals Icon with '=' text, adjust item row spacing/alignment, and remove unused Icon import 2025-10-22 14:57:00 +01:00
Nawaz Dhandala
43c534b76a feat(dictionary): add FieldLabelElement labels for Key, Type and Value fields 2025-10-22 14:44:31 +01:00
Nawaz Dhandala
1b593403b6 refactor(dictionary): remove autoConvertValueTypes and implicit conversions
- Remove NumberUtil/BooleanUtil and the autoConvertValueTypes prop from Dictionary
- Stop automatic string->number/boolean coercion; preserve original values (normalize undefined/null to "")
- Use getDefaultValueForType when adding or changing a value's type
- Update JSONFilter and FormField to import ValueType and pass explicit valueTypes
2025-10-22 14:40:59 +01:00
Nawaz Dhandala
54e60cc380 feat(dictionary): replace key input/dropdown with AutocompleteTextInput and add component 2025-10-22 14:24:49 +01:00
Nawaz Dhandala
28d9879dbd style(logs,traces): add explicit typings and clean up callback formatting
- LogItem: annotate onClick/onAuxClick handlers with React.MouseEvent<HTMLDivElement>
- LogsViewer: add OptionalTraceRouteProps/OptionalSpanRouteProps types and type annotate trace/span route props passed to LogItem
- TraceExplorer: introduce/clarify function type aliases (PromiseVoidFunction, FetchSpansFunction) and annotate callbacks (fetchTelemetryServices, fetchSpans, fetchItems, handleShowNextSpans, handleShowAllSpans); reflow long JSX for readability
2025-10-22 14:08:18 +01:00
Nawaz Dhandala
22c417ac92 feat(traces): show compact loading pill for span load actions in TraceExplorer
Replace disabled "Show next" / "Show all" buttons with a single inline loading indicator while additional spans are being fetched, and restore the action buttons when not loading to improve loading UX and reduce visual clutter.
2025-10-22 13:39:42 +01:00
Nawaz Dhandala
401926c792 feat(traces): add paginated/batched span loading and load-more controls in TraceExplorer
Introduce INITIAL_SPAN_FETCH_SIZE, SPAN_PAGE_SIZE and MAX_SPAN_FETCH_BATCH and refactor span loading to support pagination and batching. Change fetchSpans to accept {limit, skip, mode} and return fetched count (replace | append). Remove spanLimitRef and add handleShowNextSpans / handleShowAllSpans to incrementally load spans (with batch loop for "show all"). Update UI to show a descriptive banner with "Show next" and "Show all remaining" buttons and improve loading/error state handling and metrics messaging.
2025-10-22 13:35:35 +01:00
Nawaz Dhandala
90bc4de84a feat(traces): incremental span loading and improved loading/error UX in TraceExplorer
- split telemetry & span fetching (fetchTelemetryServices, fetchSpans(limit, options))
- add totalSpanCount, spanLimitRef and isLoadingMoreSpans to support partial loading
- add handleShowAllSpans and UI banners/buttons to "Show N more spans" and indicate "Showing all N spans"
- show PageLoader only for initial empty-load; surface non-blocking errors inline when spans exist and keep blocking error when no spans
- show loaded/total counts in Spans metric
- reset span-related state (limit, spans, selection, total) when traceId changes
2025-10-22 13:17:16 +01:00
Nawaz Dhandala
93fd2a9ed7 style(date,gantt): reflow multiline returns in OneUptimeDate and format Row isHighlighted prop for readability 2025-10-22 12:27:31 +01:00
Nawaz Dhandala
231451e359 feat(date,span): add humanized duration formatter and use it in SpanUtil 2025-10-22 12:26:22 +01:00
snyk-bot
3073891a38 fix: Common/package.json & Common/package-lock.json to reduce vulnerabilities
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-JS-PLAYWRIGHTCORE-13553173
- https://snyk.io/vuln/SNYK-JS-PRISMJS-9055448
2025-10-22 11:16:45 +00:00
Nawaz Dhandala
2b62d31e8b fix(blog): set default pageSize to 25 for blog and tag listing endpoints 2025-10-22 12:04:21 +01:00
Nawaz Dhandala
bc43e6e6ea feat(traces): make card title/description reflect active tab (All / Root Spans) 2025-10-22 11:44:53 +01:00
Nawaz Dhandala
2204ebde26 feat(traces): add tabs for All/Root spans and apply root-span filtering in TraceTable 2025-10-22 11:41:35 +01:00
Nawaz Dhandala
57a8547e14 style(gantt): use conditional prop spreads and refine row/label highlight styling
- Replace direct optional prop passing with conditional spread for
  multiSelect and highlightBarIds in GanttChart Index, Rows and Row to
  avoid passing undefined values.
- Add labelPaddingLeft and adjust row layout: normalize borders, add
  transitions, spacing, overflow handling and subtle ring/shadow when
  highlighted.
- Pass isHighlighted into RowLabel and update RowLabel to apply
  highlighted title/description styles (font weight and colors).
- Normalize Bar isHighlighted check to a boolean expression for safety.
2025-10-22 11:30:35 +01:00
Nawaz Dhandala
53a70e1b93 feat(gantt,traces): add span highlighting support and propagate highlight state
- Add highlightBarIds prop to GanttChart and propagate through Rows -> RowIndex -> Row -> Bar
- Row: compute highlight set, detect highlighted descendants, auto-expand rows with highlights, and apply highlight styles (background, rounded container, data attribute)
- Bar: accept isHighlighted, apply highlight visuals (box-shadow, z-index) and adjust label styling
- Rows/RowIndex: include highlightBarIds in component interfaces and pass-through props
- TraceExplorer: accept highlightSpanIds, sanitize input, derive highlightableSpanIds from displayed spans, and include them in the generated GanttChartProps
- Trace view pages: extract spanId query param, normalize to highlightSpanIds, and pass to TraceExplorer

This enables highlighting specific spans (via spanId query) in the Gantt chart UI and ensures highlighted rows are visible and visually emphasized.
2025-10-22 11:22:32 +01:00
Nawaz Dhandala
3eb72ef7f9 style(telemetry,ingest,ui): normalize TelemetryUtil.getAttributeKeys call formatting and tidy LogItem JSX/whitespace 2025-10-22 11:03:26 +01:00
Nawaz Dhandala
2a471133c5 feat(logs): make span IDs clickable and refine trace link styling
- Add getSpanRoute prop to LogsViewer and LogItem and thread it through from callers.
- Implement renderSpanId to render clickable span links (resolves via getSpanRoute or by appending spanId to trace route).
- Show "No span" placeholder when spanId is missing.
- Prevent link clicks from toggling the log row (stopPropagation on span links).
- Tweak trace link decoration and external-link icon color for improved hover styling.
- Minor prop typing / pass-through cleanup in LogsViewer.
2025-10-22 11:02:07 +01:00
Nawaz Dhandala
34e92679b2 feat(logs): render trace IDs as clickable links with external-link icon and refined styling 2025-10-22 10:41:25 +01:00
Nawaz Dhandala
8b0c2a7320 feat(analytics): make trace IDs clickable in logs
Add an optional getTraceRoute prop to LogItem and LogsViewer and render traceId as a Link when a route is provided. Stop event propagation on link clicks to avoid toggling collapse. Wire getTraceRoute from Dashboard and Span viewers using RouteUtil.populateRouteParams.
2025-10-22 10:34:28 +01:00
Nawaz Dhandala
353d894394 feat(analytics): mark attributeKeys as required on Log, Metric and Span models 2025-10-21 22:22:48 +01:00
Nawaz Dhandala
09e4f0ff97 fix(analytics): cast metric.attributes when computing attributeKeys to satisfy TelemetryUtil signature 2025-10-21 22:10:46 +01:00
Nawaz Dhandala
247c4bc637 feat(analytics): populate attributeKeys on telemetry records and prefer attributeKeys column when querying
- Add TelemetryUtil.getAttributeKeys helper.
- Populate attributeKeys for Logs, Metrics, Spans, Alerts, Incidents and Monitor metrics during ingest/processing.
- Ensure FluentIngest initializes attributeKeys for raw logs.
- Update TelemetryAttributeService to use attributeKeys column when available and fall back to JSONExtractKeys(attrs).
- Improve attribute extraction SQL to filter empty/null attribute keys.
2025-10-21 22:05:48 +01:00
Nawaz Dhandala
72e2edd49d feat(analytics): add attributeKeys column to Log/Metric/Span, update projections and add migration 2025-10-21 22:04:49 +01:00
Nawaz Dhandala
1be494169d fix(analytics): rename ClickHouse projections to singular 'AttributeProjection' for Log, Metric and Span models 2025-10-21 21:05:52 +01:00
Nawaz Dhandala
3796053403 style(analytics): remove extra blank line in doesProjectionExist catch block 2025-10-21 21:04:17 +01:00
Nawaz Dhandala
f316bfb9fd feat(analytics): materialize projections after creation and add error handling
Call materializeProjection after creating a projection to ensure it is materialized.
Introduce materializeProjection to run ALTER TABLE ... MATERIALIZE PROJECTION and
add escapeIdentifier for safe identifier quoting. Wrap the projection-existence
query in try/catch with logging and rethrow on failure.
2025-10-21 21:03:10 +01:00
Nawaz Dhandala
40f9613bd3 chore(docker): bump clickhouse server image to 25.7 in docker-compose.base.yml 2025-10-21 20:49:33 +01:00
Nawaz Dhandala
a3bb9f003f style(analytics): add debug logging to projection processing in TableManegement.ts 2025-10-21 19:48:41 +01:00
Nawaz Dhandala
70714b2f21 feat(analytics): add ClickHouse projections for attributes to Log, Metric and Span models
Add projection definitions (ALTER TABLE ...) to Log, Metric and Span analytics models to extract distinct attribute keys per project (LogItemAttributesProjection, MetricItemAttributesProjection, SpanItemAttributesProjection).

style(telemetry): simplify empty query object in DeleteOldData.ts
2025-10-21 19:46:48 +01:00
Nawaz Dhandala
a62543bff0 feat(telemetry): iterate active projects and prune telemetry data per service
- Use ProjectService.getActiveProjectStatusQuery() to fetch active projects
- Query telemetry services per project and scope deletions by project.id
- Add try/catch blocks and logger calls for better error isolation and robustness
- Preserve default retention (15 days) and perform deletes for logs, spans, and metrics
2025-10-21 19:27:19 +01:00
Nawaz Dhandala
826a2006d0 style(telemetry): reformat retentionCutOff assignment in DeleteOldData.ts 2025-10-21 18:55:07 +01:00
Nawaz Dhandala
a4075fe349 feat(telemetry): compute single retention cutoff, prune by primary keys, and delete metrics
- compute retentionCutOff once per service instead of repeated calls
- use primary-key columns (time/startTime) for efficient pruning of logs and spans
- implement MetricService.deleteBy to remove old OpenTelemetry metrics
- minor import/order adjustments
2025-10-21 18:53:44 +01:00
Nawaz Dhandala
eb70a923f3 style(analytics): add explicit types for executeQuery result and json response in TableManegement.ts 2025-10-21 16:00:38 +01:00
Nawaz Dhandala
11d39898d3 style(analytics): normalize escaped variable formatting in TableManegement.ts 2025-10-21 15:54:45 +01:00
Nawaz Dhandala
8e5907d523 feat(analytics): check for existing projections before creating; add doesProjectionExist and escapeForQuery helpers and required imports 2025-10-21 15:51:07 +01:00
Nawaz Dhandala
c9e57fcb19 feat(analytics): add Projection type and use typed projections in models and table creation 2025-10-21 15:46:57 +01:00
Nawaz Dhandala
8c6bc331a4 style(analytics): normalize AnalyticsTableColumn constructor formatting
Reformat multi-line new AnalyticsTableColumn(...) expressions to a consistent style and tidy up minor inline description/spacing in ExceptionInstance, Log, Metric and Span models.
2025-10-21 14:56:01 +01:00
Nawaz Dhandala
20129e606a feat(analytics): add projections support to models and apply projection statements on table creation
- add projections param, backing field and accessors to AnalyticsBaseModel (defaults to [])
- update analytics models (ExceptionInstance, Log, Metric, MonitorLog, Span) to include projections: []
- execute each model.projections SQL statement in AnalyticsTableManagement when creating tables
2025-10-21 14:37:22 +01:00
Nawaz Dhandala
55e0eede68 refactor(analytics): extract AnalyticsTableColumn definitions into local variables
Move repeated new AnalyticsTableColumn(...) definitions into named constants and reuse them in tableColumns for Log, Metric, Span, MonitorLog and ExceptionInstance models. Reduces duplication and improves readability without changing behavior.
2025-10-21 14:18:57 +01:00
Nawaz Dhandala
ffa603503d fix: update image tag to use GitHub Container Registry for release builds 2025-10-21 11:19:04 +01:00
Nawaz Dhandala
18429caabe fix: refactor Microsoft Teams pagination logic to use configurable maximum pages 2025-10-20 15:31:18 +01:00
Simon Larsen
ccbfef1cfe Merge pull request #2039 from tollercode/fix/pagination-msteams-fetch-teams-list
fix: pagination msteams fetch teams list
2025-10-20 15:20:37 +01:00
Nils T
4789a15ce7 fix: make maximum pages for fetching teams configurable to prevent infinite loop 2025-10-20 12:47:13 +00:00
Nils T
9633307de0 fix: add pagination limit to prevent infinite loop when fetching teams from Microsoft Graph API 2025-10-20 12:43:24 +00:00
Nils T
ee5c2a0f33 fix: implement pagination for fetching all teams from Microsoft Graph API 2025-10-20 12:17:26 +00:00
snyk-bot
5c3b181507 fix: Probe/package.json to reduce vulnerabilities
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-JS-PLAYWRIGHTCORE-13553173
2025-10-20 10:50:12 +00:00
Nawaz Dhandala
6bbf2f866c fix: update hard delete logic to retain items for 3 years instead of 120 days 2025-10-18 10:37:53 +01:00
Nawaz Dhandala
60e4b51ec9 fix: update Microsoft Teams app manifest version to 1.5.0 2025-10-17 10:43:32 +01:00
Nawaz Dhandala
f244f872d4 fix: improve formatting and consistency in tenant ID retrieval checks in MicrosoftTeamsUtil 2025-10-17 10:14:45 +01:00
Nawaz Dhandala
3e910c1308 fix: ensure tenant ID is retrieved and validated in MicrosoftTeamsUtil 2025-10-17 10:13:58 +01:00
Nawaz Dhandala
2b0067fd17 fix: update tenant ID retrieval logic in MicrosoftTeamsAPI 2025-10-17 10:11:54 +01:00
Nawaz Dhandala
cb23b6b55b Merge branch 'master' of https://github.com/OneUptime/oneuptime 2025-10-16 21:01:31 +01:00
Nawaz Dhandala
4406d52307 feat: add queries management section to Clickhouse documentation 2025-10-16 20:52:52 +01:00
Simon Larsen
03b6bec6d0 feat: add validation for required fields in WorkspaceProjectAuthTokenService and MicrosoftTeamsUtil 2025-10-16 20:36:45 +01:00
Nawaz Dhandala
ff0bd88b02 fix: ensure Microsoft Teams tenant ID is provided before creating Bot Framework adapter 2025-10-16 20:23:50 +01:00
Simon Larsen
94290c77db feat: add advanced filters toggle functionality across various components 2025-10-16 16:35:57 +01:00
Simon Larsen
003e44d331 refactor: clean up code formatting and improve readability in TelemetryAttributeService and LogsViewer components 2025-10-16 16:08:34 +01:00
Simon Larsen
b4cf798246 Merge branch 'master' of github.com:OneUptime/oneuptime 2025-10-16 16:06:49 +01:00
Simon Larsen
e607367809 feat: enhance Filters and LogsViewer components with advanced filters functionality and loading states 2025-10-16 16:06:14 +01:00
Nawaz Dhandala
82a7b5794c fix: reduce row scan limit from 100000 to 10000 in TelemetryAttributeService 2025-10-16 15:59:28 +01:00
Nawaz Dhandala
37c1674029 fix: reduce cache stale time from 10 to 5 minutes in TelemetryAttributeService 2025-10-16 15:59:12 +01:00
Nawaz Dhandala
cd2a5222b8 fix: add expiration options for cache entries in GlobalCache and TelemetryAttributeService 2025-10-16 15:58:53 +01:00
Nawaz Dhandala
875d6c5df9 fix: enhance caching mechanism and improve attribute fetching in TelemetryAttributeService 2025-10-16 15:52:44 +01:00
Nawaz Dhandala
7eea90a55a fix: format constructor parameters for consistency in IdentityAPI 2025-10-16 15:42:13 +01:00
Nawaz Dhandala
c401b86a2e fix: update route handling to use Route class for URL construction across multiple services 2025-10-16 15:30:18 +01:00
Nawaz Dhandala
e96f781157 fix: refactor URL construction for view details and redirect routes in UserNotificationLogTimelineAPI 2025-10-16 15:05:57 +01:00
Nawaz Dhandala
73cc5f8f9e fix: update route handling in EnvironmentConfig for consistency 2025-10-16 15:04:52 +01:00
Nawaz Dhandala
0ee9876de9 fix: format code for consistency in OtelIngest services 2025-10-16 14:22:09 +01:00
Nawaz Dhandala
b76a75479d Implement OpenTelemetry Ingest Services for Logs, Metrics, and Traces
- Added OtelIngestBaseService as a base class for common functionality.
- Implemented OtelLogsIngestService to handle log ingestion, including processing and flushing logs to the database.
- Implemented OtelMetricsIngestService to handle metric ingestion, including processing and flushing metrics to the database.
- Implemented OtelTracesIngestService to handle trace ingestion, including processing spans and exceptions, and flushing them to the database.
- Introduced error handling and logging throughout the ingestion process to ensure robustness.
- Utilized CaptureSpan decorator for telemetry tracking in key methods.
- Enhanced service name extraction from request attributes for better traceability.
2025-10-16 14:21:44 +01:00
Simon Larsen
79b9cf4c06 Merge pull request #2034 from OneUptime/telemetry-attr
Telemetry attr
2025-10-15 18:13:17 +01:00
Nawaz Dhandala
a6da59c966 fix: add Results and DbJSONResponse types to AnalyticsDatabaseService and update TelemetryAttributeService to use them 2025-10-15 18:12:34 +01:00
Nawaz Dhandala
b84695feb9 fix: remove TelemetryType references and indexAttributes calls from various services 2025-10-15 17:21:56 +01:00
Nawaz Dhandala
b77973441d fix: remove TelemetryAttribute and TelemetryAttributeService references and related code 2025-10-15 16:26:05 +01:00
Nawaz Dhandala
27e9c07c57 fix: remove padding from MarkdownViewer component 2025-10-15 15:57:45 +01:00
Nawaz Dhandala
cbf8684d8c fix: remove force garbage collection method and its calls from OtelIngestService 2025-10-15 15:47:22 +01:00
Nawaz Dhandala
87057757a5 fix: remove --expose-gc option from NODE_OPTIONS in Dockerfile, nodemon.json, and package.json 2025-10-15 15:42:29 +01:00
Nawaz Dhandala
23b587f0f6 fix: remove unused EnableWorkflow import from TelemetryException model 2025-10-15 15:22:53 +01:00
Nawaz Dhandala
81c7a4eeb7 fix: remove EnableWorkflow decorator from TelemetryException model 2025-10-15 14:49:56 +01:00
Nawaz Dhandala
549dbfd6c7 fix: format nodemon.json and adjust package.json dev script indentation 2025-10-15 13:45:54 +01:00
Nawaz Dhandala
a2eac673eb fix: update NODE_OPTIONS to include --expose-gc and --use-openssl-ca in Dockerfile and nodemon configuration 2025-10-15 13:45:07 +01:00
Nawaz Dhandala
086a0a661d feat: implement dynamic batch size configuration for OpenTelemetry ingestion 2025-10-15 13:39:11 +01:00
Nawaz Dhandala
da0620eafa fix: improve logging of request body in OpenTelemetry middleware 2025-10-15 12:53:15 +01:00
Nawaz Dhandala
ca90ab0db4 fix: update Kubernetes cheatsheet to include OOMKilled pods in cleanup section 2025-10-15 12:36:06 +01:00
Nawaz Dhandala
94dacc20db feat: add Kubernetes cheatsheet for pod cleanup commands 2025-10-15 12:30:34 +01:00
Nawaz Dhandala
7e887bd4cd feat: enhance app description with HTML formatting and update support links 2025-10-14 19:37:28 +01:00
Nawaz Dhandala
b50dfcdf1c fix: update Microsoft Teams setup guide URL to correct link 2025-10-14 19:02:34 +01:00
Nawaz Dhandala
e2bc0ea4aa feat: add welcome card state management to prevent duplicate sends 2025-10-14 19:02:06 +01:00
Simon Larsen
7d018b94d3 Merge pull request #2033 from OneUptime/fix-change-plan
Fix change plan
2025-10-14 14:12:05 +01:00
Nawaz Dhandala
9fb7a70dc9 feat: add createOrUpdateApiUrl prop to CardModelDetail and update Billing page to generate change plan API URL 2025-10-14 14:02:45 +01:00
Nawaz Dhandala
a37b3fc0b3 feat: implement change plan API with billing permission checks 2025-10-14 13:54:38 +01:00
Nawaz Dhandala
c06ef5ddfc fix: add ManageProjectBilling permission for project updates 2025-10-14 13:34:12 +01:00
Nawaz Dhandala
0fb7174e94 chore: remove unused devDependencies from package.json and package-lock.json 2025-10-14 11:59:17 +01:00
Nawaz Dhandala
84a7cd976d fix: update template IDs for AlertCreated and IncidentCreated in WhatsAppTemplates 2025-10-14 10:44:27 +01:00
Nawaz Dhandala
f7c8c00f04 t psh
:wq!
Merge branch 'master' into release
2025-10-13 13:33:02 +01:00
Simon Larsen
51c3fcd3ca Merge pull request #2032 from OneUptime/whatsapp-webhook
Whatsapp webhook
2025-10-13 13:32:25 +01:00
Nawaz Dhandala
74c3dde7f1 refactor: improve type annotations and enhance readability in WhatsApp API and LogsTable components 2025-10-13 13:31:56 +01:00
Nawaz Dhandala
6bf45f6f31 refactor: streamline code formatting and improve readability in WhatsApp components and services 2025-10-13 13:26:57 +01:00
Nawaz Dhandala
4de4ad8022 feat: enhance status color handling in WhatsAppLogsTable component 2025-10-13 13:26:19 +01:00
Nawaz Dhandala
e263900115 feat: add migration for metaWhatsAppWebhookVerifyToken in GlobalConfig 2025-10-13 13:15:57 +01:00
Nawaz Dhandala
34aaa34fb3 refactor: remove shorthand webhook URL and simplify callback URL instructions in WhatsApp setup markdown 2025-10-13 13:14:10 +01:00
Nawaz Dhandala
9f72a8e554 feat: add webhook verification and enhance WhatsApp status handling 2025-10-13 12:44:48 +01:00
Nawaz Dhandala
8dfabfd96f feat: enhance WhatsApp status handling and add webhook verify token to GlobalConfig 2025-10-13 12:33:39 +01:00
Nawaz Dhandala
14cdc3ea86 chore: update package.json and package-lock.json to add ts-jest and new dependencies 2025-10-13 12:31:52 +01:00
Nawaz Dhandala
cd6abe63ea refactor: format code and ensure consistent import/export statements across migration and mobile app files 2025-10-13 11:52:41 +01:00
Nawaz Dhandala
aeb6d53b9d refactor: remove unused imports and simplify status message handling in WhatsAppLogsTable 2025-10-13 11:41:29 +01:00
Nawaz Dhandala
efa7224718 fix: update ConfirmModal description styling for better text wrapping 2025-10-13 10:56:46 +01:00
Nawaz Dhandala
a2c406d7cc refactor: simplify database properties handling in WhatsAppLogAPI 2025-10-13 10:51:52 +01:00
Nawaz Dhandala
1933e37beb refactor: move getMetaWhatsAppConfig call to improve message handling logic 2025-10-13 10:43:46 +01:00
Nawaz Dhandala
0b0336f9ea Merge branch 'master' of https://github.com/OneUptime/oneuptime 2025-10-13 10:35:51 +01:00
Simon Larsen
9304079a1c feat: implement WhatsAppLogAPI for fetching WhatsApp message status and update WhatsAppLogsTable to display message ID 2025-10-13 10:35:41 +01:00
Nawaz Dhandala
1e2dcf332b fix: enhance type handling for return values in VM router 2025-10-13 10:15:46 +01:00
Simon Larsen
ca860f54a8 feat: update permissions to replace ReadSmsLog with ReadWhatsAppLog in WhatsAppLog model and Permission types 2025-10-13 10:01:05 +01:00
Simon Larsen
513e4146ed feat: add migration for WhatsAppMessageId field in WhatsAppLog table with index 2025-10-13 09:59:33 +01:00
Simon Larsen
43d31ddbe9 feat: add WhatsAppMessageId field to WhatsAppLog model with access control and metadata 2025-10-13 09:55:22 +01:00
Simon Larsen
18231f42aa feat: update MobileAppPrompt.md for UI guidelines and add react-native-webview dependency in package.json; create AuthSession type 2025-10-12 12:38:36 +01:00
Simon Larsen
60291cc218 docs: update MobileAppPrompt.md to clarify usage of @oneuptime/common package 2025-10-12 12:33:46 +01:00
Nawaz Dhandala
036b29da51 fix: standardize error messages and improve formatting in MicrosoftTeamsAPI 2025-10-11 11:12:49 +01:00
Nawaz Dhandala
d0d59147ae fix: add download button for app manifest and manual sideloading instructions 2025-10-11 11:12:15 +01:00
snyk-bot
565fbe6cd3 fix: Nginx/Dockerfile.tpl to reduce vulnerabilities 2025-10-11 08:37:10 +00:00
snyk-bot
12c3f9b25c fix: package.json & package-lock.json to reduce vulnerabilities
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-JS-NODEMAILER-13378253
2025-10-11 08:35:50 +00:00
snyk-bot
9ee0e0f3cb fix: Common/package.json & Common/package-lock.json to reduce vulnerabilities
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-JS-NODEMAILER-13378253
2025-10-08 05:10:50 +00:00
snyk-bot
50be2a666c fix: Probe/package.json & Probe/package-lock.json to reduce vulnerabilities
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-JS-AXIOS-12613773
2025-09-15 08:22:25 +00:00
443 changed files with 75518 additions and 43237 deletions

49
.github/workflows/npm-audit-fix.yml vendored Normal file
View File

@@ -0,0 +1,49 @@
name: NPM Audit Fix
on:
schedule:
- cron: '0 0 * * *'
workflow_dispatch:
permissions:
contents: write
pull-requests: write
jobs:
npm-audit-fix:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: 20
- name: Run npm audit fix across packages
run: npm run audit-fix
- name: Detect changes
id: changes
run: |
if git status --porcelain | grep .; then
echo "has_changes=true" >> $GITHUB_OUTPUT
else
echo "has_changes=false" >> $GITHUB_OUTPUT
fi
- name: Create pull request
if: steps.changes.outputs.has_changes == 'true'
uses: peter-evans/create-pull-request@v6
with:
commit-message: "chore: npm audit fix"
title: "chore: npm audit fix"
body: |
Automated npm audit fix run.
Workflow: ${{ github.workflow }}
Run ID: ${{ github.run_id }}
branch: chore/npm-audit-fix
delete-branch: true

View File

@@ -177,8 +177,21 @@ jobs:
- name: Publish to npm
run: |
cd MCP
npm publish --access public
echo "✅ Published @oneuptime/mcp-server@${{ steps.version.outputs.version }} to npm"
set +e
PUBLISH_OUTPUT=$(npm publish --access public 2>&1)
PUBLISH_EXIT=$?
set -e
echo "$PUBLISH_OUTPUT"
if [ $PUBLISH_EXIT -ne 0 ]; then
if echo "$PUBLISH_OUTPUT" | grep -q "You cannot publish over the previously published versions"; then
echo "⚠️ npm publish skipped: version already published"
else
echo "❌ npm publish failed"
exit $PUBLISH_EXIT
fi
else
echo "✅ Published @oneuptime/mcp-server@${{ steps.version.outputs.version }} to npm"
fi
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
@@ -205,13 +218,24 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{ steps.version.outputs.version }}"
docker buildx build \
--platform linux/amd64,linux/arm64 \
--file ./MCP/Dockerfile.tpl \
--tag oneuptime/mcp-server:${{ steps.version.outputs.version }} \
--tag ghcr.io/oneuptime/mcp-server:${{ steps.version.outputs.version }} \
--tag oneuptime/mcp-server:${VERSION} \
--tag ghcr.io/oneuptime/mcp-server:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{ steps.version.outputs.version }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
--push .
docker buildx build \
--platform linux/amd64,linux/arm64 \
--file ./MCP/Dockerfile.tpl \
--tag oneuptime/mcp-server:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/mcp-server:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
--push .
echo "✅ Pushed Docker images to Docker Hub and GitHub Container Registry"
@@ -282,14 +306,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./Nginx/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/nginx:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/nginx:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/nginx:${VERSION} \
--tag ghcr.io/oneuptime/nginx:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./Nginx/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/nginx:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/nginx:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
e2e-docker-image-deploy:
@@ -352,14 +388,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./E2E/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/e2e:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/e2e:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/e2e:${VERSION} \
--tag ghcr.io/oneuptime/e2e:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./E2E/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/e2e:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/e2e:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
isolated-vm-docker-image-deploy:
@@ -422,14 +470,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./IsolatedVM/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/isolated-vm:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/isolated-vm:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/isolated-vm:${VERSION} \
--tag ghcr.io/oneuptime/isolated-vm:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./IsolatedVM/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/isolated-vm:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/isolated-vm:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
home-docker-image-deploy:
@@ -492,14 +552,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./Home/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/home:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/home:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/home:${VERSION} \
--tag ghcr.io/oneuptime/home:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./Home/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/home:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/home:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
@@ -565,14 +637,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./TestServer/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/test-server:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/test-server:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/test-server:${VERSION} \
--tag ghcr.io/oneuptime/test-server:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./TestServer/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/test-server:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/test-server:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
otel-collector-docker-image-deploy:
@@ -635,14 +719,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./OTelCollector/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/otel-collector:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/otel-collector:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/otel-collector:${VERSION} \
--tag ghcr.io/oneuptime/otel-collector:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./OTelCollector/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/otel-collector:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/otel-collector:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
@@ -707,14 +803,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./StatusPage/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/status-page:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/status-page:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/status-page:${VERSION} \
--tag ghcr.io/oneuptime/status-page:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./StatusPage/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/status-page:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/status-page:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
test-docker-image-deploy:
@@ -777,14 +885,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./Tests/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/test:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/test:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/test:${VERSION} \
--tag ghcr.io/oneuptime/test:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./Tests/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/test:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/test:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
probe-ingest-docker-image-deploy:
@@ -847,14 +967,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./ProbeIngest/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/probe-ingest:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/probe-ingest:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/probe-ingest:${VERSION} \
--tag ghcr.io/oneuptime/probe-ingest:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./ProbeIngest/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/probe-ingest:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/probe-ingest:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
@@ -918,14 +1050,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./ServerMonitorIngest/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/server-monitor-ingest:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/server-monitor-ingest:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/server-monitor-ingest:${VERSION} \
--tag ghcr.io/oneuptime/server-monitor-ingest:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./ServerMonitorIngest/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/server-monitor-ingest:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/server-monitor-ingest:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
@@ -990,14 +1134,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./OpenTelemetryIngest/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/open-telemetry-ingest:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/open-telemetry-ingest:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/open-telemetry-ingest:${VERSION} \
--tag ghcr.io/oneuptime/open-telemetry-ingest:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./OpenTelemetryIngest/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/open-telemetry-ingest:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/open-telemetry-ingest:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
@@ -1061,14 +1217,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./IncomingRequestIngest/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/incoming-request-ingest:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/incoming-request-ingest:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/incoming-request-ingest:${VERSION} \
--tag ghcr.io/oneuptime/incoming-request-ingest:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./IncomingRequestIngest/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/incoming-request-ingest:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/incoming-request-ingest:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
fluent-ingest-docker-image-deploy:
@@ -1131,14 +1299,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./FluentIngest/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/fluent-ingest:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/fluent-ingest:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/fluent-ingest:${VERSION} \
--tag ghcr.io/oneuptime/fluent-ingest:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./FluentIngest/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/fluent-ingest:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/fluent-ingest:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
probe-docker-image-deploy:
@@ -1201,14 +1381,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./Probe/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/probe:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/probe:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/probe:${VERSION} \
--tag ghcr.io/oneuptime/probe:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./Probe/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/probe:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/probe:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
admin-dashboard-docker-image-deploy:
@@ -1271,14 +1463,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./AdminDashboard/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/admin-dashboard:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/admin-dashboard:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/admin-dashboard:${VERSION} \
--tag ghcr.io/oneuptime/admin-dashboard:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./AdminDashboard/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/admin-dashboard:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/admin-dashboard:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
@@ -1342,14 +1546,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./Dashboard/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/dashboard:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/dashboard:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/dashboard:${VERSION} \
--tag ghcr.io/oneuptime/dashboard:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./Dashboard/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/dashboard:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/dashboard:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
app-docker-image-deploy:
@@ -1412,14 +1628,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./App/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/app:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/app:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/app:${VERSION} \
--tag ghcr.io/oneuptime/app:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./App/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/app:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/app:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
@@ -1483,14 +1711,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./Copilot/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/copilot:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/copilot:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/copilot:${VERSION} \
--tag ghcr.io/oneuptime/copilot:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./Copilot/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/copilot:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/copilot:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
accounts-docker-image-deploy:
@@ -1553,14 +1793,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./Accounts/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/accounts:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/accounts:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/accounts:${VERSION} \
--tag ghcr.io/oneuptime/accounts:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./Accounts/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/accounts:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/accounts:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
@@ -1667,14 +1919,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./LLM/Dockerfile \
--platform linux/amd64 \
--push \
--tag oneuptime/llm:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/llm:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/llm:${VERSION} \
--tag ghcr.io/oneuptime/llm:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
./LLM
docker buildx build \
--file ./LLM/Dockerfile \
--platform linux/amd64 \
--push \
--tag oneuptime/llm:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/llm:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
./LLM
docs-docker-image-deploy:
@@ -1739,14 +2003,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./Docs/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/docs:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/docs:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/docs:${VERSION} \
--tag ghcr.io/oneuptime/docs:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./Docs/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/docs:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/docs:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
@@ -1814,14 +2090,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./Worker/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/worker:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/worker:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/worker:${VERSION} \
--tag ghcr.io/oneuptime/worker:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./Worker/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/worker:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/worker:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
@@ -1889,14 +2177,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./Workflow/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/workflow:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/workflow:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/workflow:${VERSION} \
--tag ghcr.io/oneuptime/workflow:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./Workflow/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/workflow:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/workflow:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
@@ -2024,14 +2324,26 @@ jobs:
timeout_minutes: 45
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx build \
--file ./APIReference/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/api-reference:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/api-reference:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag oneuptime/api-reference:${VERSION} \
--tag ghcr.io/oneuptime/api-reference:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./APIReference/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/api-reference:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/api-reference:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
push-release-tags:
@@ -2122,7 +2434,7 @@ jobs:
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx imagetools create \
--tag oneuptime/${{ matrix.image }}:release \
oneuptime/${{ matrix.image }}:${VERSION}
ghcr.io/oneuptime/${{ matrix.image }}:${VERSION}
- name: Create GHCR release tag from version
run: |
@@ -2130,6 +2442,20 @@ jobs:
docker buildx imagetools create \
--tag ghcr.io/oneuptime/${{ matrix.image }}:release \
ghcr.io/oneuptime/${{ matrix.image }}:${VERSION}
- name: Create Docker Hub enterprise release tag from version
run: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx imagetools create \
--tag oneuptime/${{ matrix.image }}:enterprise-release \
ghcr.io/oneuptime/${{ matrix.image }}:enterprise-${VERSION}
- name: Create GHCR enterprise release tag from version
run: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}"
docker buildx imagetools create \
--tag ghcr.io/oneuptime/${{ matrix.image }}:enterprise-release \
ghcr.io/oneuptime/${{ matrix.image }}:enterprise-${VERSION}

View File

@@ -197,15 +197,28 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{ steps.version.outputs.version }}"
docker buildx build \
--platform linux/amd64,linux/arm64 \
--file ./MCP/Dockerfile.tpl \
--tag oneuptime/mcp-server:${{ steps.version.outputs.version }} \
--tag oneuptime/mcp-server:${VERSION} \
--tag oneuptime/mcp-server:test \
--tag ghcr.io/oneuptime/mcp-server:${{ steps.version.outputs.version }} \
--tag ghcr.io/oneuptime/mcp-server:${VERSION} \
--tag ghcr.io/oneuptime/mcp-server:test \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{ steps.version.outputs.version }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
--push .
docker buildx build \
--platform linux/amd64,linux/arm64 \
--file ./MCP/Dockerfile.tpl \
--tag oneuptime/mcp-server:enterprise-${VERSION} \
--tag oneuptime/mcp-server:enterprise-test \
--tag ghcr.io/oneuptime/mcp-server:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/mcp-server:enterprise-test \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
--push .
echo "✅ Pushed test Docker images to Docker Hub and GitHub Container Registry"
@@ -301,16 +314,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./LLM/Dockerfile \
--platform linux/amd64 \
--push \
--tag oneuptime/llm:test \
--tag oneuptime/llm:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/llm:${VERSION} \
--tag ghcr.io/oneuptime/llm:test \
--tag ghcr.io/oneuptime/llm:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag ghcr.io/oneuptime/llm:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
./LLM
docker buildx build \
--file ./LLM/Dockerfile \
--platform linux/amd64 \
--push \
--tag oneuptime/llm:enterprise-test \
--tag oneuptime/llm:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/llm:enterprise-test \
--tag ghcr.io/oneuptime/llm:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
./LLM
@@ -375,16 +402,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./Nginx/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/nginx:test \
--tag oneuptime/nginx:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/nginx:${VERSION} \
--tag ghcr.io/oneuptime/nginx:test \
--tag ghcr.io/oneuptime/nginx:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/nginx:${VERSION} \
--build-arg APP_VERSION=${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./Nginx/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/nginx:enterprise-test \
--tag oneuptime/nginx:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/nginx:enterprise-test \
--tag ghcr.io/oneuptime/nginx:enterprise-${VERSION} \
--build-arg APP_VERSION=${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
@@ -449,15 +490,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./E2E/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/e2e:test \
--tag oneuptime/e2e:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/e2e:${VERSION} \
--tag ghcr.io/oneuptime/e2e:test \
--tag ghcr.io/oneuptime/e2e:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/e2e:${VERSION} \
--build-arg APP_VERSION=${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./E2E/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/e2e:enterprise-test \
--tag oneuptime/e2e:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/e2e:enterprise-test \
--tag ghcr.io/oneuptime/e2e:enterprise-${VERSION} \
--build-arg APP_VERSION=${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
test-server-docker-image-deploy:
@@ -521,16 +577,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./TestServer/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/test-server:test \
--tag oneuptime/test-server:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/test-server:${VERSION} \
--tag ghcr.io/oneuptime/test-server:test \
--tag ghcr.io/oneuptime/test-server:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/test-server:${VERSION} \
--build-arg APP_VERSION=${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./TestServer/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/test-server:enterprise-test \
--tag oneuptime/test-server:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/test-server:enterprise-test \
--tag ghcr.io/oneuptime/test-server:enterprise-${VERSION} \
--build-arg APP_VERSION=${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
otel-collector-docker-image-deploy:
@@ -594,16 +664,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./OTelCollector/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/otel-collector:test \
--tag oneuptime/otel-collector:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/otel-collector:${VERSION} \
--tag ghcr.io/oneuptime/otel-collector:test \
--tag ghcr.io/oneuptime/otel-collector:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/otel-collector:${VERSION} \
--build-arg APP_VERSION=${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./OTelCollector/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/otel-collector:enterprise-test \
--tag oneuptime/otel-collector:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/otel-collector:enterprise-test \
--tag ghcr.io/oneuptime/otel-collector:enterprise-${VERSION} \
--build-arg APP_VERSION=${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
isolated-vm-docker-image-deploy:
@@ -667,16 +751,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./IsolatedVM/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/isolated-vm:test \
--tag oneuptime/isolated-vm:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/isolated-vm:${VERSION} \
--tag ghcr.io/oneuptime/isolated-vm:test \
--tag ghcr.io/oneuptime/isolated-vm:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag ghcr.io/oneuptime/isolated-vm:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./IsolatedVM/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/isolated-vm:enterprise-test \
--tag oneuptime/isolated-vm:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/isolated-vm:enterprise-test \
--tag ghcr.io/oneuptime/isolated-vm:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
home-docker-image-deploy:
@@ -740,16 +838,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./Home/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/home:test \
--tag oneuptime/home:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/home:${VERSION} \
--tag ghcr.io/oneuptime/home:test \
--tag ghcr.io/oneuptime/home:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag ghcr.io/oneuptime/home:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./Home/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/home:enterprise-test \
--tag oneuptime/home:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/home:enterprise-test \
--tag ghcr.io/oneuptime/home:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
@@ -815,16 +927,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./StatusPage/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/status-page:test \
--tag oneuptime/status-page:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/status-page:${VERSION} \
--tag ghcr.io/oneuptime/status-page:test \
--tag ghcr.io/oneuptime/status-page:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag ghcr.io/oneuptime/status-page:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./StatusPage/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/status-page:enterprise-test \
--tag oneuptime/status-page:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/status-page:enterprise-test \
--tag ghcr.io/oneuptime/status-page:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
@@ -890,16 +1016,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./Tests/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/test:test \
--tag oneuptime/test:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/test:${VERSION} \
--tag ghcr.io/oneuptime/test:test \
--tag ghcr.io/oneuptime/test:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag ghcr.io/oneuptime/test:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./Tests/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/test:enterprise-test \
--tag oneuptime/test:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/test:enterprise-test \
--tag ghcr.io/oneuptime/test:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
probe-ingest-docker-image-deploy:
@@ -963,16 +1103,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./ProbeIngest/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/probe-ingest:test \
--tag oneuptime/probe-ingest:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/probe-ingest:${VERSION} \
--tag ghcr.io/oneuptime/probe-ingest:test \
--tag ghcr.io/oneuptime/probe-ingest:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag ghcr.io/oneuptime/probe-ingest:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./ProbeIngest/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/probe-ingest:enterprise-test \
--tag oneuptime/probe-ingest:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/probe-ingest:enterprise-test \
--tag ghcr.io/oneuptime/probe-ingest:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
@@ -1038,16 +1192,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./ServerMonitorIngest/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/server-monitor-ingest:test \
--tag oneuptime/server-monitor-ingest:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/server-monitor-ingest:${VERSION} \
--tag ghcr.io/oneuptime/server-monitor-ingest:test \
--tag ghcr.io/oneuptime/server-monitor-ingest:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag ghcr.io/oneuptime/server-monitor-ingest:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./ServerMonitorIngest/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/server-monitor-ingest:enterprise-test \
--tag oneuptime/server-monitor-ingest:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/server-monitor-ingest:enterprise-test \
--tag ghcr.io/oneuptime/server-monitor-ingest:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
@@ -1114,17 +1282,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./IncomingRequestIngest/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/incoming-request-ingest:test \
--tag oneuptime/incoming-request-ingest:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/incoming-request-ingest:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/incoming-request-ingest:${VERSION} \
--tag ghcr.io/oneuptime/incoming-request-ingest:test \
--tag ghcr.io/oneuptime/incoming-request-ingest:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/incoming-request-ingest:${VERSION} \
--build-arg APP_VERSION=${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./IncomingRequestIngest/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/incoming-request-ingest:enterprise-test \
--tag oneuptime/incoming-request-ingest:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/incoming-request-ingest:enterprise-test \
--tag ghcr.io/oneuptime/incoming-request-ingest:enterprise-${VERSION} \
--build-arg APP_VERSION=${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
open-telemetry-ingest-docker-image-deploy:
@@ -1188,16 +1369,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./OpenTelemetryIngest/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/open-telemetry-ingest:test \
--tag oneuptime/open-telemetry-ingest:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/open-telemetry-ingest:${VERSION} \
--tag ghcr.io/oneuptime/open-telemetry-ingest:test \
--tag ghcr.io/oneuptime/open-telemetry-ingest:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag ghcr.io/oneuptime/open-telemetry-ingest:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./OpenTelemetryIngest/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/open-telemetry-ingest:enterprise-test \
--tag oneuptime/open-telemetry-ingest:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/open-telemetry-ingest:enterprise-test \
--tag ghcr.io/oneuptime/open-telemetry-ingest:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
fluent-ingest-docker-image-deploy:
@@ -1261,16 +1456,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./FluentIngest/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/fluent-ingest:test \
--tag oneuptime/fluent-ingest:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/fluent-ingest:${VERSION} \
--tag ghcr.io/oneuptime/fluent-ingest:test \
--tag ghcr.io/oneuptime/fluent-ingest:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag ghcr.io/oneuptime/fluent-ingest:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./FluentIngest/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/fluent-ingest:enterprise-test \
--tag oneuptime/fluent-ingest:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/fluent-ingest:enterprise-test \
--tag ghcr.io/oneuptime/fluent-ingest:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
probe-docker-image-deploy:
@@ -1334,16 +1543,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./Probe/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/probe:test \
--tag oneuptime/probe:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/probe:${VERSION} \
--tag ghcr.io/oneuptime/probe:test \
--tag ghcr.io/oneuptime/probe:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag ghcr.io/oneuptime/probe:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./Probe/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/probe:enterprise-test \
--tag oneuptime/probe:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/probe:enterprise-test \
--tag ghcr.io/oneuptime/probe:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
dashboard-docker-image-deploy:
@@ -1407,16 +1630,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./Dashboard/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/dashboard:test \
--tag oneuptime/dashboard:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/dashboard:${VERSION} \
--tag ghcr.io/oneuptime/dashboard:test \
--tag ghcr.io/oneuptime/dashboard:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/dashboard:${VERSION} \
--build-arg APP_VERSION=${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./Dashboard/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/dashboard:enterprise-test \
--tag oneuptime/dashboard:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/dashboard:enterprise-test \
--tag ghcr.io/oneuptime/dashboard:enterprise-${VERSION} \
--build-arg APP_VERSION=${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
admin-dashboard-docker-image-deploy:
@@ -1480,16 +1717,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./AdminDashboard/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/admin-dashboard:test \
--tag oneuptime/admin-dashboard:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/admin-dashboard:${VERSION} \
--tag ghcr.io/oneuptime/admin-dashboard:test \
--tag ghcr.io/oneuptime/admin-dashboard:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/admin-dashboard:${VERSION} \
--build-arg APP_VERSION=${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./AdminDashboard/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/admin-dashboard:enterprise-test \
--tag oneuptime/admin-dashboard:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/admin-dashboard:enterprise-test \
--tag ghcr.io/oneuptime/admin-dashboard:enterprise-${VERSION} \
--build-arg APP_VERSION=${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
app-docker-image-deploy:
@@ -1553,16 +1804,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./App/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/app:test \
--tag oneuptime/app:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/app:${VERSION} \
--tag ghcr.io/oneuptime/app:test \
--tag ghcr.io/oneuptime/app:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag ghcr.io/oneuptime/app:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./App/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/app:enterprise-test \
--tag oneuptime/app:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/app:enterprise-test \
--tag ghcr.io/oneuptime/app:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
@@ -1629,17 +1894,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./APIReference/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/api-reference:test \
--tag oneuptime/api-reference:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/api-reference:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/api-reference:${VERSION} \
--tag ghcr.io/oneuptime/api-reference:test \
--tag ghcr.io/oneuptime/api-reference:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/api-reference:${VERSION} \
--build-arg APP_VERSION=${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./APIReference/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/api-reference:enterprise-test \
--tag oneuptime/api-reference:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/api-reference:enterprise-test \
--tag ghcr.io/oneuptime/api-reference:enterprise-${VERSION} \
--build-arg APP_VERSION=${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
@@ -1705,17 +1983,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./Accounts/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/accounts:test \
--tag oneuptime/accounts:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/accounts:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/accounts:${VERSION} \
--tag ghcr.io/oneuptime/accounts:test \
--tag ghcr.io/oneuptime/accounts:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/accounts:${VERSION} \
--build-arg APP_VERSION=${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./Accounts/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/accounts:enterprise-test \
--tag oneuptime/accounts:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/accounts:enterprise-test \
--tag ghcr.io/oneuptime/accounts:enterprise-${VERSION} \
--build-arg APP_VERSION=${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
worker-docker-image-deploy:
@@ -1779,16 +2070,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./Worker/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/worker:${VERSION} \
--tag oneuptime/worker:test \
--tag oneuptime/worker:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag ghcr.io/oneuptime/worker:${VERSION} \
--tag ghcr.io/oneuptime/worker:test \
--tag ghcr.io/oneuptime/worker:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./Worker/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/worker:enterprise-test \
--tag oneuptime/worker:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/worker:enterprise-test \
--tag ghcr.io/oneuptime/worker:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
copilot-docker-image-deploy:
@@ -1852,16 +2157,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./Copilot/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/copilot:test \
--tag oneuptime/copilot:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/copilot:${VERSION} \
--tag ghcr.io/oneuptime/copilot:test \
--tag ghcr.io/oneuptime/copilot:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/copilot:${VERSION} \
--build-arg APP_VERSION=${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./Copilot/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/copilot:enterprise-test \
--tag oneuptime/copilot:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/copilot:enterprise-test \
--tag ghcr.io/oneuptime/copilot:enterprise-${VERSION} \
--build-arg APP_VERSION=${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
@@ -1926,17 +2245,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./Workflow/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/workflow:test \
--tag oneuptime/workflow:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/workflow:${VERSION} \
--tag ghcr.io/oneuptime/workflow:test \
--tag ghcr.io/oneuptime/workflow:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--tag ghcr.io/oneuptime/workflow:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./Workflow/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/workflow:enterprise-test \
--tag oneuptime/workflow:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/workflow:enterprise-test \
--tag ghcr.io/oneuptime/workflow:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.
@@ -2001,16 +2333,30 @@ jobs:
timeout_minutes: 30
max_attempts: 3
command: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
docker buildx build \
--file ./Docs/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/docs:test \
--tag oneuptime/docs:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag oneuptime/docs:${VERSION} \
--tag ghcr.io/oneuptime/docs:test \
--tag ghcr.io/oneuptime/docs:${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test \
--tag ghcr.io/oneuptime/docs:${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=false \
.
docker buildx build \
--file ./Docs/Dockerfile \
--platform linux/amd64,linux/arm64 \
--push \
--tag oneuptime/docs:enterprise-test \
--tag oneuptime/docs:enterprise-${VERSION} \
--tag ghcr.io/oneuptime/docs:enterprise-test \
--tag ghcr.io/oneuptime/docs:enterprise-${VERSION} \
--build-arg GIT_SHA=${{ github.sha }} \
--build-arg APP_VERSION=${VERSION} \
--build-arg IS_ENTERPRISE_EDITION=true \
.

View File

@@ -14,9 +14,11 @@ RUN npm config set fetch-retry-maxtimeout 600000
ARG GIT_SHA
ARG APP_VERSION
ARG IS_ENTERPRISE_EDITION=false
ENV GIT_SHA=${GIT_SHA}
ENV APP_VERSION=${APP_VERSION}
ENV IS_ENTERPRISE_EDITION=${IS_ENTERPRISE_EDITION}
ENV PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD=1

View File

@@ -49,7 +49,9 @@
"@opentelemetry/sdk-trace-web": "^1.25.1",
"@opentelemetry/semantic-conventions": "^1.26.0",
"@remixicon/react": "^4.2.0",
"@simplewebauthn/server": "^13.2.2",
"@tippyjs/react": "^4.2.6",
"@types/archiver": "^6.0.3",
"@types/crypto-js": "^4.2.2",
"@types/qrcode": "^1.5.5",
"@types/react-highlight": "^0.12.8",
@@ -58,7 +60,9 @@
"@types/web-push": "^3.6.4",
"acme-client": "^5.3.0",
"airtable": "^0.12.2",
"axios": "^1.7.2",
"archiver": "^7.0.1",
"axios": "^1.12.0",
"botbuilder": "^4.23.3",
"bullmq": "^5.3.3",
"cookie-parser": "^1.4.7",
"cors": "^2.8.5",
@@ -80,10 +84,10 @@
"moment": "^2.30.1",
"moment-timezone": "^0.5.45",
"node-cron": "^3.0.3",
"nodemailer": "^6.9.10",
"nodemailer": "^7.0.7",
"otpauth": "^9.3.1",
"pg": "^8.7.3",
"playwright": "^1.50.0",
"playwright": "^1.55.1",
"posthog-js": "^1.139.6",
"prop-types": "^15.8.1",
"qrcode": "^1.5.3",
@@ -99,7 +103,7 @@
"react-router-dom": "^6.24.1",
"react-select": "^5.4.0",
"react-spinners": "^0.14.1",
"react-syntax-highlighter": "^15.5.0",
"react-syntax-highlighter": "^16.0.0",
"react-toggle": "^4.1.3",
"reactflow": "^11.11.4",
"recharts": "^2.12.7",
@@ -243,89 +247,20 @@
}
},
"node_modules/@babel/code-frame": {
"version": "7.23.5",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.23.5.tgz",
"integrity": "sha512-CgH3s1a96LipHCmSUmYFPwY7MNx8C3avkq7i4Wl3cfa662ldtUe4VM1TPXX70pfmrlWTb6jLqTYrZyT2ZTJBgA==",
"version": "7.27.1",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
"integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/highlight": "^7.23.4",
"chalk": "^2.4.2"
"@babel/helper-validator-identifier": "^7.27.1",
"js-tokens": "^4.0.0",
"picocolors": "^1.1.1"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/code-frame/node_modules/ansi-styles": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
"integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
"dev": true,
"dependencies": {
"color-convert": "^1.9.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/@babel/code-frame/node_modules/chalk": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
"integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==",
"dev": true,
"dependencies": {
"ansi-styles": "^3.2.1",
"escape-string-regexp": "^1.0.5",
"supports-color": "^5.3.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/@babel/code-frame/node_modules/color-convert": {
"version": "1.9.3",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
"integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
"dev": true,
"dependencies": {
"color-name": "1.1.3"
}
},
"node_modules/@babel/code-frame/node_modules/color-name": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
"integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==",
"dev": true
},
"node_modules/@babel/code-frame/node_modules/escape-string-regexp": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
"integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==",
"dev": true,
"engines": {
"node": ">=0.8.0"
}
},
"node_modules/@babel/code-frame/node_modules/has-flag": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
"integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==",
"dev": true,
"engines": {
"node": ">=4"
}
},
"node_modules/@babel/code-frame/node_modules/supports-color": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
"integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
"dev": true,
"dependencies": {
"has-flag": "^3.0.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/@babel/compat-data": {
"version": "7.23.5",
"resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.23.5.tgz",
@@ -501,19 +436,21 @@
}
},
"node_modules/@babel/helper-string-parser": {
"version": "7.23.4",
"resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.23.4.tgz",
"integrity": "sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ==",
"version": "7.27.1",
"resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz",
"integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-validator-identifier": {
"version": "7.22.20",
"resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz",
"integrity": "sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==",
"version": "7.28.5",
"resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz",
"integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
@@ -528,109 +465,28 @@
}
},
"node_modules/@babel/helpers": {
"version": "7.23.6",
"resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.23.6.tgz",
"integrity": "sha512-wCfsbN4nBidDRhpDhvcKlzHWCTlgJYUUdSJfzXb2NuBssDSIjc3xcb+znA7l+zYsFljAcGM0aFkN40cR3lXiGA==",
"version": "7.28.4",
"resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz",
"integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/template": "^7.22.15",
"@babel/traverse": "^7.23.6",
"@babel/types": "^7.23.6"
"@babel/template": "^7.27.2",
"@babel/types": "^7.28.4"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/highlight": {
"version": "7.23.4",
"resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.23.4.tgz",
"integrity": "sha512-acGdbYSfp2WheJoJm/EBBBLh/ID8KDc64ISZ9DYtBmC8/Q204PZJLHyzeB5qMzJ5trcOkybd78M4x2KWsUq++A==",
"dev": true,
"dependencies": {
"@babel/helper-validator-identifier": "^7.22.20",
"chalk": "^2.4.2",
"js-tokens": "^4.0.0"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/highlight/node_modules/ansi-styles": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
"integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
"dev": true,
"dependencies": {
"color-convert": "^1.9.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/@babel/highlight/node_modules/chalk": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
"integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==",
"dev": true,
"dependencies": {
"ansi-styles": "^3.2.1",
"escape-string-regexp": "^1.0.5",
"supports-color": "^5.3.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/@babel/highlight/node_modules/color-convert": {
"version": "1.9.3",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
"integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
"dev": true,
"dependencies": {
"color-name": "1.1.3"
}
},
"node_modules/@babel/highlight/node_modules/color-name": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
"integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==",
"dev": true
},
"node_modules/@babel/highlight/node_modules/escape-string-regexp": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
"integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==",
"dev": true,
"engines": {
"node": ">=0.8.0"
}
},
"node_modules/@babel/highlight/node_modules/has-flag": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
"integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==",
"dev": true,
"engines": {
"node": ">=4"
}
},
"node_modules/@babel/highlight/node_modules/supports-color": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
"integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
"dev": true,
"dependencies": {
"has-flag": "^3.0.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/@babel/parser": {
"version": "7.23.6",
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.23.6.tgz",
"integrity": "sha512-Z2uID7YJ7oNvAI20O9X0bblw7Qqs8Q2hFy0R9tAfnfLkp5MW0UH9eUvnDSnFwKZ0AvgS1ucqR4KzvVHgnke1VQ==",
"version": "7.28.5",
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz",
"integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/types": "^7.28.5"
},
"bin": {
"parser": "bin/babel-parser.js"
},
@@ -801,14 +657,15 @@
}
},
"node_modules/@babel/template": {
"version": "7.22.15",
"resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.15.tgz",
"integrity": "sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w==",
"version": "7.27.2",
"resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz",
"integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/code-frame": "^7.22.13",
"@babel/parser": "^7.22.15",
"@babel/types": "^7.22.15"
"@babel/code-frame": "^7.27.1",
"@babel/parser": "^7.27.2",
"@babel/types": "^7.27.1"
},
"engines": {
"node": ">=6.9.0"
@@ -836,14 +693,14 @@
}
},
"node_modules/@babel/types": {
"version": "7.23.6",
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.23.6.tgz",
"integrity": "sha512-+uarb83brBzPKN38NX1MkB6vb6+mwvR6amUulqAE7ccQw1pEl+bCia9TbdG1lsnFP7lZySvUn37CHyXQdfTwzg==",
"version": "7.28.5",
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz",
"integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-string-parser": "^7.23.4",
"@babel/helper-validator-identifier": "^7.22.20",
"to-fast-properties": "^2.0.0"
"@babel/helper-string-parser": "^7.27.1",
"@babel/helper-validator-identifier": "^7.28.5"
},
"engines": {
"node": ">=6.9.0"
@@ -1760,21 +1617,23 @@
}
},
"node_modules/brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"version": "1.1.12",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
"license": "MIT",
"dependencies": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
}
},
"node_modules/braces": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
"dev": true,
"license": "MIT",
"dependencies": {
"fill-range": "^7.0.1"
"fill-range": "^7.1.1"
},
"engines": {
"node": ">=8"
@@ -2004,10 +1863,11 @@
"integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ=="
},
"node_modules/cross-spawn": {
"version": "7.0.3",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
"integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
"integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
"dev": true,
"license": "MIT",
"dependencies": {
"path-key": "^3.1.0",
"shebang-command": "^2.0.0",
@@ -2076,9 +1936,10 @@
}
},
"node_modules/ejs": {
"version": "3.1.9",
"resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.9.tgz",
"integrity": "sha512-rC+QVNMJWv+MtPgkt0y+0rVEIdbtxVADApW9JXrUVlzHetgcyczP/E7DJmWJ4fJCZF2cPcBk0laWO9ZHMG3DmQ==",
"version": "3.1.10",
"resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz",
"integrity": "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==",
"license": "Apache-2.0",
"dependencies": {
"jake": "^10.8.5"
},
@@ -2225,9 +2086,10 @@
}
},
"node_modules/filelist/node_modules/brace-expansion": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
"integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
"license": "MIT",
"dependencies": {
"balanced-match": "^1.0.0"
}
@@ -2244,10 +2106,11 @@
}
},
"node_modules/fill-range": {
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
"dev": true,
"license": "MIT",
"dependencies": {
"to-regex-range": "^5.0.1"
},
@@ -2542,6 +2405,7 @@
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.12.0"
}
@@ -3239,7 +3103,8 @@
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
"dev": true
"dev": true,
"license": "MIT"
},
"node_modules/js-yaml": {
"version": "3.14.1",
@@ -3398,12 +3263,13 @@
"dev": true
},
"node_modules/micromatch": {
"version": "4.0.5",
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz",
"integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==",
"version": "4.0.8",
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
"integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
"dev": true,
"license": "MIT",
"dependencies": {
"braces": "^3.0.2",
"braces": "^3.0.3",
"picomatch": "^2.3.1"
},
"engines": {
@@ -3684,10 +3550,11 @@
"dev": true
},
"node_modules/picocolors": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz",
"integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==",
"dev": true
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
"integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
"dev": true,
"license": "ISC"
},
"node_modules/picomatch": {
"version": "2.3.1",
@@ -4107,20 +3974,12 @@
"integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==",
"dev": true
},
"node_modules/to-fast-properties": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz",
"integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==",
"dev": true,
"engines": {
"node": ">=4"
}
},
"node_modules/to-regex-range": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"is-number": "^7.0.0"
},

View File

@@ -108,7 +108,7 @@
</div>
</div>
<div class="[&amp;>:first-child]:mt-0 [&amp;>:last-child]:mb-0">
<%- include('../partials/code', {title: "Example Pagination Request", requestUrl: "/api/monitors/get-list?skip=0&limit=3", requestType: "GET", code: pageData.requestCode }) -%>
<%- include('../partials/code', {title: "Example Pagination Request", requestUrl: "/api/monitors/get-list?skip=0&limit=3", requestType: "POST", code: pageData.requestCode }) -%>
<%- include('../partials/code', {title: "Example Pagination Response" , requestUrl: "", requestType: "", code: pageData.responseCode }) -%>
</div>
</div>

View File

@@ -14,9 +14,11 @@ RUN npm config set fetch-retry-maxtimeout 600000
ARG GIT_SHA
ARG APP_VERSION
ARG IS_ENTERPRISE_EDITION=false
ENV GIT_SHA=${GIT_SHA}
ENV APP_VERSION=${APP_VERSION}
ENV IS_ENTERPRISE_EDITION=${IS_ENTERPRISE_EDITION}
ENV PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD=1

View File

@@ -12,7 +12,7 @@
"ejs": "^3.1.10",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"react-router-dom": "^6.23.1",
"react-router-dom": "^6.30.1",
"use-async-effect": "^2.2.7"
},
"devDependencies": {
@@ -53,7 +53,9 @@
"@opentelemetry/sdk-trace-web": "^1.25.1",
"@opentelemetry/semantic-conventions": "^1.26.0",
"@remixicon/react": "^4.2.0",
"@simplewebauthn/server": "^13.2.2",
"@tippyjs/react": "^4.2.6",
"@types/archiver": "^6.0.3",
"@types/crypto-js": "^4.2.2",
"@types/qrcode": "^1.5.5",
"@types/react-highlight": "^0.12.8",
@@ -62,7 +64,9 @@
"@types/web-push": "^3.6.4",
"acme-client": "^5.3.0",
"airtable": "^0.12.2",
"axios": "^1.7.2",
"archiver": "^7.0.1",
"axios": "^1.12.0",
"botbuilder": "^4.23.3",
"bullmq": "^5.3.3",
"cookie-parser": "^1.4.7",
"cors": "^2.8.5",
@@ -84,10 +88,10 @@
"moment": "^2.30.1",
"moment-timezone": "^0.5.45",
"node-cron": "^3.0.3",
"nodemailer": "^6.9.10",
"nodemailer": "^7.0.7",
"otpauth": "^9.3.1",
"pg": "^8.7.3",
"playwright": "^1.50.0",
"playwright": "^1.55.1",
"posthog-js": "^1.139.6",
"prop-types": "^15.8.1",
"qrcode": "^1.5.3",
@@ -103,7 +107,7 @@
"react-router-dom": "^6.24.1",
"react-select": "^5.4.0",
"react-spinners": "^0.14.1",
"react-syntax-highlighter": "^15.5.0",
"react-syntax-highlighter": "^16.0.0",
"react-toggle": "^4.1.3",
"reactflow": "^11.11.4",
"recharts": "^2.12.7",
@@ -344,9 +348,9 @@
}
},
"node_modules/@remix-run/router": {
"version": "1.16.1",
"resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.16.1.tgz",
"integrity": "sha512-es2g3dq6Nb07iFxGk5GuHN20RwBZOsuDQN7izWIisUcv9r+d2C5jQxqmgkdebXgReWfiyUabcki6Fg77mSNrig==",
"version": "1.23.0",
"resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.23.0.tgz",
"integrity": "sha512-O3rHJzAQKamUz1fvE0Qaw0xSFqsA/yafi2iqeE0pvdFtCO1viYx8QL6f3Ln/aCCTLxs68SLf0KPM9eSeM8yBnA==",
"license": "MIT",
"engines": {
"node": ">=14.0.0"
@@ -522,21 +526,23 @@
}
},
"node_modules/brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"version": "1.1.12",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
"license": "MIT",
"dependencies": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
}
},
"node_modules/braces": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
"dev": true,
"license": "MIT",
"dependencies": {
"fill-range": "^7.0.1"
"fill-range": "^7.1.1"
},
"engines": {
"node": ">=8"
@@ -683,9 +689,9 @@
}
},
"node_modules/filelist/node_modules/brace-expansion": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
"integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
"license": "MIT",
"dependencies": {
"balanced-match": "^1.0.0"
@@ -704,10 +710,11 @@
}
},
"node_modules/fill-range": {
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
"dev": true,
"license": "MIT",
"dependencies": {
"to-regex-range": "^5.0.1"
},
@@ -794,6 +801,7 @@
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.12.0"
}
@@ -958,12 +966,12 @@
}
},
"node_modules/react-router": {
"version": "6.23.1",
"resolved": "https://registry.npmjs.org/react-router/-/react-router-6.23.1.tgz",
"integrity": "sha512-fzcOaRF69uvqbbM7OhvQyBTFDVrrGlsFdS3AL+1KfIBtGETibHzi3FkoTRyiDJnWNc2VxrfvR+657ROHjaNjqQ==",
"version": "6.30.1",
"resolved": "https://registry.npmjs.org/react-router/-/react-router-6.30.1.tgz",
"integrity": "sha512-X1m21aEmxGXqENEPG3T6u0Th7g0aS4ZmoNynhbs+Cn+q+QGTLt+d5IQ2bHAXKzKcxGJjxACpVbnYQSCRcfxHlQ==",
"license": "MIT",
"dependencies": {
"@remix-run/router": "1.16.1"
"@remix-run/router": "1.23.0"
},
"engines": {
"node": ">=14.0.0"
@@ -973,13 +981,13 @@
}
},
"node_modules/react-router-dom": {
"version": "6.23.1",
"resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.23.1.tgz",
"integrity": "sha512-utP+K+aSTtEdbWpC+4gxhdlPFwuEfDKq8ZrPFU65bbRJY+l706qjR7yaidBpo3MSeA/fzwbXWbKBI6ftOnP3OQ==",
"version": "6.30.1",
"resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.30.1.tgz",
"integrity": "sha512-llKsgOkZdbPU1Eg3zK8lCn+sjD9wMRZZPuzmdWWX5SUs8OFkN5HnFVC0u5KMeMaC9aoancFI/KoLuKPqN+hxHw==",
"license": "MIT",
"dependencies": {
"@remix-run/router": "1.16.1",
"react-router": "6.23.1"
"@remix-run/router": "1.23.0",
"react-router": "6.30.1"
},
"engines": {
"node": ">=14.0.0"
@@ -1009,6 +1017,16 @@
"loose-envify": "^1.1.0"
}
},
"node_modules/semver": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz",
"integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==",
"dev": true,
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
}
},
"node_modules/simple-update-notifier": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-1.1.0.tgz",
@@ -1021,15 +1039,6 @@
"node": ">=8.10.0"
}
},
"node_modules/simple-update-notifier/node_modules/semver": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz",
"integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==",
"dev": true,
"bin": {
"semver": "bin/semver.js"
}
},
"node_modules/supports-color": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
@@ -1047,6 +1056,7 @@
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"is-number": "^7.0.0"
},

View File

@@ -33,7 +33,7 @@
"ejs": "^3.1.10",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"react-router-dom": "^6.23.1",
"react-router-dom": "^6.30.1",
"use-async-effect": "^2.2.7"
},
"devDependencies": {

View File

@@ -12,6 +12,7 @@ import FormFieldSchemaType from "Common/UI/Components/Forms/Types/FormFieldSchem
import Link from "Common/UI/Components/Link/Link";
import { DASHBOARD_URL } from "Common/UI/Config";
import OneUptimeLogo from "Common/UI/Images/logos/OneUptimeSVG/3-transparent.svg";
import EditionLabel from "Common/UI/Components/EditionLabel/EditionLabel";
import UiAnalytics from "Common/UI/Utils/Analytics";
import LoginUtil from "Common/UI/Utils/Login";
import UserTotpAuth from "Common/Models/DatabaseModels/UserTotpAuth";
@@ -192,6 +193,9 @@ const LoginPage: () => JSX.Element = () => {
src={OneUptimeLogo}
alt="OneUptime"
/>
<div className="mt-4 flex justify-center">
<EditionLabel />
</div>
{!showTwoFactorAuth && (
<>
<h2 className="mt-6 text-center text-2xl tracking-tight text-gray-900">

View File

@@ -14,9 +14,11 @@ RUN npm config set fetch-retry-maxtimeout 600000
ARG GIT_SHA
ARG APP_VERSION
ARG IS_ENTERPRISE_EDITION=false
ENV GIT_SHA=${GIT_SHA}
ENV APP_VERSION=${APP_VERSION}
ENV IS_ENTERPRISE_EDITION=${IS_ENTERPRISE_EDITION}
ENV PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD=1

View File

@@ -1,13 +1,74 @@
import { PromiseVoidFunction } from "Common/Types/FunctionTypes";
import Express, { ExpressApplication } from "Common/Server/Utils/Express";
import Express, {
ExpressApplication,
ExpressRequest,
ExpressResponse,
} from "Common/Server/Utils/Express";
import logger from "Common/Server/Utils/Logger";
import App from "Common/Server/Utils/StartServer";
import Response from "Common/Server/Utils/Response";
import UserMiddleware from "Common/Server/Middleware/UserAuthorization";
import JSONWebToken from "Common/Server/Utils/JsonWebToken";
import NotAuthorizedException from "Common/Types/Exception/NotAuthorizedException";
import { JSONObject } from "Common/Types/JSON";
import "ejs";
import JSONWebTokenData from "Common/Types/JsonWebTokenData";
export const APP_NAME: string = "admin";
const app: ExpressApplication = Express.getExpressApp();
type EnsureMasterAdminAccessFunction = (
req: ExpressRequest,
res: ExpressResponse,
) => Promise<JSONObject>;
const ensureMasterAdminAccess: EnsureMasterAdminAccessFunction = async (
req: ExpressRequest,
res: ExpressResponse,
): Promise<JSONObject> => {
try {
const accessToken: string | undefined =
UserMiddleware.getAccessTokenFromExpressRequest(req);
if (!accessToken) {
Response.sendErrorResponse(
req,
res,
new NotAuthorizedException(
"Unauthorized: Only master admins can access the admin dashboard.",
),
);
return {};
}
const authData: JSONWebTokenData = JSONWebToken.decode(accessToken);
if (!authData.isMasterAdmin) {
Response.sendErrorResponse(
req,
res,
new NotAuthorizedException(
"Unauthorized: Only master admins can access the admin dashboard.",
),
);
return {};
}
return {};
} catch (error) {
logger.error(error);
Response.sendErrorResponse(
req,
res,
new NotAuthorizedException(
"Unauthorized: Only master admins can access the admin dashboard.",
),
);
return {};
}
};
const init: PromiseVoidFunction = async (): Promise<void> => {
try {
// init the app
@@ -19,6 +80,7 @@ const init: PromiseVoidFunction = async (): Promise<void> => {
liveCheck: async () => {},
readyCheck: async () => {},
},
getVariablesToRenderIndexPage: ensureMasterAdminAccess,
});
// add default routes

View File

@@ -12,7 +12,7 @@
"ejs": "^3.1.10",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"react-router-dom": "^6.23.1"
"react-router-dom": "^6.30.1"
},
"devDependencies": {
"@types/node": "^16.11.35",
@@ -52,7 +52,9 @@
"@opentelemetry/sdk-trace-web": "^1.25.1",
"@opentelemetry/semantic-conventions": "^1.26.0",
"@remixicon/react": "^4.2.0",
"@simplewebauthn/server": "^13.2.2",
"@tippyjs/react": "^4.2.6",
"@types/archiver": "^6.0.3",
"@types/crypto-js": "^4.2.2",
"@types/qrcode": "^1.5.5",
"@types/react-highlight": "^0.12.8",
@@ -61,7 +63,9 @@
"@types/web-push": "^3.6.4",
"acme-client": "^5.3.0",
"airtable": "^0.12.2",
"axios": "^1.7.2",
"archiver": "^7.0.1",
"axios": "^1.12.0",
"botbuilder": "^4.23.3",
"bullmq": "^5.3.3",
"cookie-parser": "^1.4.7",
"cors": "^2.8.5",
@@ -83,10 +87,10 @@
"moment": "^2.30.1",
"moment-timezone": "^0.5.45",
"node-cron": "^3.0.3",
"nodemailer": "^6.9.10",
"nodemailer": "^7.0.7",
"otpauth": "^9.3.1",
"pg": "^8.7.3",
"playwright": "^1.50.0",
"playwright": "^1.55.1",
"posthog-js": "^1.139.6",
"prop-types": "^15.8.1",
"qrcode": "^1.5.3",
@@ -102,7 +106,7 @@
"react-router-dom": "^6.24.1",
"react-select": "^5.4.0",
"react-spinners": "^0.14.1",
"react-syntax-highlighter": "^15.5.0",
"react-syntax-highlighter": "^16.0.0",
"react-toggle": "^4.1.3",
"reactflow": "^11.11.4",
"recharts": "^2.12.7",
@@ -343,9 +347,9 @@
"dev": true
},
"node_modules/@remix-run/router": {
"version": "1.16.1",
"resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.16.1.tgz",
"integrity": "sha512-es2g3dq6Nb07iFxGk5GuHN20RwBZOsuDQN7izWIisUcv9r+d2C5jQxqmgkdebXgReWfiyUabcki6Fg77mSNrig==",
"version": "1.23.0",
"resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.23.0.tgz",
"integrity": "sha512-O3rHJzAQKamUz1fvE0Qaw0xSFqsA/yafi2iqeE0pvdFtCO1viYx8QL6f3Ln/aCCTLxs68SLf0KPM9eSeM8yBnA==",
"license": "MIT",
"engines": {
"node": ">=14.0.0"
@@ -491,21 +495,23 @@
}
},
"node_modules/brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"version": "1.1.12",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
"license": "MIT",
"dependencies": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
}
},
"node_modules/braces": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
"dev": true,
"license": "MIT",
"dependencies": {
"fill-range": "^7.0.1"
"fill-range": "^7.1.1"
},
"engines": {
"node": ">=8"
@@ -601,9 +607,10 @@
}
},
"node_modules/filelist/node_modules/brace-expansion": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
"integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
"license": "MIT",
"dependencies": {
"balanced-match": "^1.0.0"
}
@@ -620,10 +627,11 @@
}
},
"node_modules/fill-range": {
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
"dev": true,
"license": "MIT",
"dependencies": {
"to-regex-range": "^5.0.1"
},
@@ -710,6 +718,7 @@
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.12.0"
}
@@ -937,12 +946,12 @@
}
},
"node_modules/react-router": {
"version": "6.23.1",
"resolved": "https://registry.npmjs.org/react-router/-/react-router-6.23.1.tgz",
"integrity": "sha512-fzcOaRF69uvqbbM7OhvQyBTFDVrrGlsFdS3AL+1KfIBtGETibHzi3FkoTRyiDJnWNc2VxrfvR+657ROHjaNjqQ==",
"version": "6.30.1",
"resolved": "https://registry.npmjs.org/react-router/-/react-router-6.30.1.tgz",
"integrity": "sha512-X1m21aEmxGXqENEPG3T6u0Th7g0aS4ZmoNynhbs+Cn+q+QGTLt+d5IQ2bHAXKzKcxGJjxACpVbnYQSCRcfxHlQ==",
"license": "MIT",
"dependencies": {
"@remix-run/router": "1.16.1"
"@remix-run/router": "1.23.0"
},
"engines": {
"node": ">=14.0.0"
@@ -952,13 +961,13 @@
}
},
"node_modules/react-router-dom": {
"version": "6.23.1",
"resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.23.1.tgz",
"integrity": "sha512-utP+K+aSTtEdbWpC+4gxhdlPFwuEfDKq8ZrPFU65bbRJY+l706qjR7yaidBpo3MSeA/fzwbXWbKBI6ftOnP3OQ==",
"version": "6.30.1",
"resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.30.1.tgz",
"integrity": "sha512-llKsgOkZdbPU1Eg3zK8lCn+sjD9wMRZZPuzmdWWX5SUs8OFkN5HnFVC0u5KMeMaC9aoancFI/KoLuKPqN+hxHw==",
"license": "MIT",
"dependencies": {
"@remix-run/router": "1.16.1",
"react-router": "6.23.1"
"@remix-run/router": "1.23.0",
"react-router": "6.30.1"
},
"engines": {
"node": ">=14.0.0"
@@ -988,6 +997,16 @@
"loose-envify": "^1.1.0"
}
},
"node_modules/semver": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz",
"integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==",
"dev": true,
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
}
},
"node_modules/simple-update-notifier": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-1.1.0.tgz",
@@ -1000,15 +1019,6 @@
"node": ">=8.10.0"
}
},
"node_modules/simple-update-notifier/node_modules/semver": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz",
"integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==",
"dev": true,
"bin": {
"semver": "bin/semver.js"
}
},
"node_modules/supports-color": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
@@ -1026,6 +1036,7 @@
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"is-number": "^7.0.0"
},

View File

@@ -8,7 +8,7 @@
"ejs": "^3.1.10",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"react-router-dom": "^6.23.1"
"react-router-dom": "^6.30.1"
},
"scripts": {
"dev-build": "NODE_ENV=development node esbuild.config.js",

View File

@@ -3,6 +3,7 @@ import Logo from "./Logo";
import UserProfile from "./UserProfile";
import Button, { ButtonStyleType } from "Common/UI/Components/Button/Button";
import Header from "Common/UI/Components/Header/Header";
import EditionLabel from "Common/UI/Components/EditionLabel/EditionLabel";
import { DASHBOARD_URL } from "Common/UI/Config";
import Navigation from "Common/UI/Utils/Navigation";
import React, { FunctionComponent, ReactElement } from "react";
@@ -27,6 +28,7 @@ const DashboardHeader: FunctionComponent = (): ReactElement => {
}
rightComponents={
<>
<EditionLabel className="mr-3 hidden md:inline-flex" />
<Button
title="Exit Admin"
buttonStyle={ButtonStyleType.NORMAL}

View File

@@ -1,3 +1,4 @@
import "./Utils/API";
import App from "./App";
import Telemetry from "Common/UI/Utils/Telemetry/Telemetry";
import React from "react";

View File

@@ -68,6 +68,9 @@ const buildWhatsAppSetupMarkdown: BuildWhatsAppSetupMarkdown = (): string => {
WhatsAppTemplateIds,
) as Array<keyof typeof WhatsAppTemplateIds>;
const appApiBaseUrl: string = APP_API_URL.toString().replace(/\/$/, "");
const primaryWebhookUrl: string = `${appApiBaseUrl}/notification/whatsapp/webhook`;
const description: string =
"Follow these steps to connect Meta WhatsApp with OneUptime so notifications can be delivered via WhatsApp.";
@@ -75,6 +78,7 @@ const buildWhatsAppSetupMarkdown: BuildWhatsAppSetupMarkdown = (): string => {
"Meta Business Manager admin access for the WhatsApp Business Account.",
"A WhatsApp Business phone number approved for API messaging.",
"Admin access to OneUptime with permission to edit global notification settings.",
"A webhook verify token string that you'll configure identically in Meta and OneUptime.",
];
const setupStepsList: Array<string> = [
@@ -82,7 +86,7 @@ const buildWhatsAppSetupMarkdown: BuildWhatsAppSetupMarkdown = (): string => {
"From **Business Settings → Accounts → WhatsApp Accounts**, create or select the account that owns your sender phone number.",
"In Buisness Portfolio, create a system user and assign it to the WhatsApp Business Account with the role of **Admin**.",
"Generate a token for this system user and this will be your long-lived access token. Make sure to select the **whatsapp_business_management** and **whatsapp_business_messaging** permissions when generating the token.",
"Paste the access token and phone number ID into the **Meta WhatsApp Settings** card above, then save.",
"Paste the access token, phone number ID, and webhook verify token into the **Meta WhatsApp Settings** card above, then save.",
"For the **Business Account ID**, go to **Business Settings → Business Info** (or **Business Settings → WhatsApp Accounts → Settings**) and copy the **WhatsApp Business Account ID** value.",
"To locate the **App ID** and **App Secret**, open [Meta for Developers](https://developers.facebook.com/apps/), select your WhatsApp app, then navigate to **Settings → Basic**. The App ID is shown at the top; click **Show** next to **App Secret** to reveal and copy it.",
"Create each template listed below in the Meta WhatsApp Manager. Make sure the template name, language, and variables match exactly. You can however change the content to your preference. Please make sure it's approved by Meta.",
@@ -169,12 +173,25 @@ const buildWhatsAppSetupMarkdown: BuildWhatsAppSetupMarkdown = (): string => {
.filter(Boolean)
.join("\n");
const webhookSection: string = [
"### Configure Meta Webhook Subscription",
"1. In the OneUptime Admin Dashboard, open **Settings → WhatsApp → Meta WhatsApp Settings** and enter a strong value in **Webhook Verify Token**. Save the form so the encrypted token is stored in Global Config.",
"2. Keep that verify token handy—Meta does not generate one for you. You'll paste the exact same value when configuring the callback.",
"3. In [Meta for Developers](https://developers.facebook.com/apps/), select your WhatsApp app and navigate to **WhatsApp → Configuration → Webhooks**.",
`4. Click **Configure**, then supply one of the following callback URLs when Meta asks for your endpoint:\n - \`${primaryWebhookUrl}\`\n `,
"5. Paste the verify token from step 1 into Meta's **Verify Token** field and submit. Meta will call the callback URL and expect that value to match before it approves the subscription.",
"6. After verification succeeds, subscribe to the **messages** field (and any other WhatsApp webhook categories you need) so delivery status updates are forwarded to OneUptime.",
]
.filter(Boolean)
.join("\n\n");
return [
description,
"### Prerequisites",
prerequisitesMarkdown,
"### Setup Steps",
setupStepsMarkdown,
webhookSection,
"### Required WhatsApp Templates",
templateSummaryTable,
"### Template Bodies",
@@ -271,6 +288,18 @@ const SettingsWhatsApp: FunctionComponent = (): ReactElement => {
"Optional Business Account ID that owns the WhatsApp templates.",
placeholder: "123456789012345",
},
{
field: {
metaWhatsAppWebhookVerifyToken: true,
},
title: "Webhook Verify Token",
stepId: "meta-credentials",
fieldType: FormFieldSchemaType.EncryptedText,
required: false,
description:
"Secret token configured in Meta to validate webhook subscription requests.",
placeholder: "Webhook verify token",
},
{
field: {
metaWhatsAppAppId: true,
@@ -324,6 +353,14 @@ const SettingsWhatsApp: FunctionComponent = (): ReactElement => {
fieldType: FieldType.Text,
placeholder: "Not Configured",
},
{
field: {
metaWhatsAppWebhookVerifyToken: true,
},
title: "Webhook Verify Token",
fieldType: FieldType.HiddenText,
placeholder: "Not Configured",
},
{
field: {
metaWhatsAppAppId: true,

View File

@@ -0,0 +1,42 @@
import BaseAPI from "Common/UI/Utils/API/API";
import { IDENTITY_URL } from "Common/UI/Config";
import HTTPErrorResponse from "Common/Types/API/HTTPErrorResponse";
import URL from "Common/Types/API/URL";
import { JSONObject } from "Common/Types/JSON";
import { Logger } from "Common/UI/Utils/Logger";
const registerAdminDashboardAuthRefresh = (): void => {
const refreshSession = async (): Promise<boolean> => {
try {
const response = await BaseAPI.post<JSONObject>({
url: URL.fromURL(IDENTITY_URL).addRoute("/refresh-session"),
options: {
skipAuthRefresh: true,
},
});
if (response instanceof HTTPErrorResponse) {
Logger.warn(
`Admin dashboard session refresh failed with status ${response.statusCode}.`,
);
return false;
}
return response.isSuccess();
} catch (err) {
Logger.error("Admin dashboard session refresh request failed.");
Logger.error(err as Error);
return false;
}
};
BaseAPI.setRefreshSessionHandler(refreshSession);
BaseAPI.setRefreshFailureHandler(() => {
Logger.warn("Admin dashboard session refresh failed; falling back to logout.");
});
};
registerAdminDashboardAuthRefresh();
export default BaseAPI;

View File

@@ -14,9 +14,11 @@ RUN npm config set fetch-retry-maxtimeout 600000
ARG GIT_SHA
ARG APP_VERSION
ARG IS_ENTERPRISE_EDITION=false
ENV GIT_SHA=${GIT_SHA}
ENV APP_VERSION=${APP_VERSION}
ENV IS_ENTERPRISE_EDITION=${IS_ENTERPRISE_EDITION}
ENV PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD=1

View File

@@ -14,9 +14,11 @@ import TelemetryAPI from "Common/Server/API/TelemetryAPI";
import ProbeAPI from "Common/Server/API/ProbeAPI";
import ProjectAPI from "Common/Server/API/ProjectAPI";
import ProjectSsoAPI from "Common/Server/API/ProjectSSO";
import WhatsAppLogAPI from "./WhatsAppLogAPI";
// Import API
import ResellerPlanAPI from "Common/Server/API/ResellerPlanAPI";
import EnterpriseLicenseAPI from "Common/Server/API/EnterpriseLicenseAPI";
import MonitorAPI from "Common/Server/API/MonitorAPI";
import ShortLinkAPI from "Common/Server/API/ShortLinkAPI";
import StatusPageAPI from "Common/Server/API/StatusPageAPI";
@@ -97,6 +99,9 @@ import IncidentInternalNoteService, {
import IncidentNoteTemplateService, {
Service as IncidentNoteTemplateServiceType,
} from "Common/Server/Services/IncidentNoteTemplateService";
import IncidentPostmortemTemplateService, {
Service as IncidentPostmortemTemplateServiceType,
} from "Common/Server/Services/IncidentPostmortemTemplateService";
import TableViewService, {
Service as TableViewServiceType,
} from "Common/Server/Services/TableViewService";
@@ -140,9 +145,6 @@ import LogService, {
LogService as LogServiceType,
} from "Common/Server/Services/LogService";
import TelemetryAttributeService, {
TelemetryAttributeService as TelemetryAttributeServiceType,
} from "Common/Server/Services/TelemetryAttributeService";
import CopilotActionTypePriorityService, {
Service as CopilotActionTypePriorityServiceType,
} from "Common/Server/Services/CopilotActionTypePriorityService";
@@ -285,9 +287,6 @@ import ShortLinkService, {
import SmsLogService, {
Service as SmsLogServiceType,
} from "Common/Server/Services/SmsLogService";
import WhatsAppLogService, {
Service as WhatsAppLogServiceType,
} from "Common/Server/Services/WhatsAppLogService";
import PushNotificationLogService, {
Service as PushNotificationLogServiceType,
} from "Common/Server/Services/PushNotificationLogService";
@@ -382,6 +381,7 @@ import TelemetryExceptionService, {
import ExceptionInstanceService, {
ExceptionInstanceService as ExceptionInstanceServiceType,
} from "Common/Server/Services/ExceptionInstanceService";
import AcmeChallengeAPI from "Common/Server/API/AcmeChallengeAPI";
import FeatureSet from "Common/Server/Types/FeatureSet";
import Express, { ExpressApplication } from "Common/Server/Utils/Express";
@@ -412,6 +412,7 @@ import Incident from "Common/Models/DatabaseModels/Incident";
import IncidentCustomField from "Common/Models/DatabaseModels/IncidentCustomField";
import IncidentInternalNote from "Common/Models/DatabaseModels/IncidentInternalNote";
import IncidentNoteTemplate from "Common/Models/DatabaseModels/IncidentNoteTemplate";
import IncidentPostmortemTemplate from "Common/Models/DatabaseModels/IncidentPostmortemTemplate";
import IncidentOwnerTeam from "Common/Models/DatabaseModels/IncidentOwnerTeam";
import IncidentOwnerUser from "Common/Models/DatabaseModels/IncidentOwnerUser";
import IncidentPublicNote from "Common/Models/DatabaseModels/IncidentPublicNote";
@@ -462,7 +463,6 @@ import ServiceCatalogOwnerUser from "Common/Models/DatabaseModels/ServiceCatalog
import ServiceCopilotCodeRepository from "Common/Models/DatabaseModels/ServiceCopilotCodeRepository";
import ShortLink from "Common/Models/DatabaseModels/ShortLink";
import SmsLog from "Common/Models/DatabaseModels/SmsLog";
import WhatsAppLog from "Common/Models/DatabaseModels/WhatsAppLog";
import StatusPageAnnouncement from "Common/Models/DatabaseModels/StatusPageAnnouncement";
// Custom Fields API
import StatusPageCustomField from "Common/Models/DatabaseModels/StatusPageCustomField";
@@ -492,7 +492,6 @@ import WorkflowVariable from "Common/Models/DatabaseModels/WorkflowVariable";
import ProbeOwnerTeam from "Common/Models/DatabaseModels/ProbeOwnerTeam";
import ProbeOwnerUser from "Common/Models/DatabaseModels/ProbeOwnerUser";
import ServiceCatalogDependency from "Common/Models/DatabaseModels/ServiceCatalogDependency";
import TelemetryAttribute from "Common/Models/AnalyticsModels/TelemetryAttribute";
import ExceptionInstance from "Common/Models/AnalyticsModels/ExceptionInstance";
import TelemetyException from "Common/Models/DatabaseModels/TelemetryException";
import CopilotActionTypePriority from "Common/Models/DatabaseModels/CopilotActionTypePriority";
@@ -618,10 +617,7 @@ const BaseAPIFeatureSet: FeatureSet = {
app.use(
`/${APP_NAME.toLocaleLowerCase()}`,
new BaseAnalyticsAPI<TelemetryAttribute, TelemetryAttributeServiceType>(
TelemetryAttribute,
TelemetryAttributeService,
).getRouter(),
new AcmeChallengeAPI().getRouter(),
);
app.use(`/${APP_NAME.toLocaleLowerCase()}`, OpenAPI.getRouter());
@@ -1403,6 +1399,17 @@ const BaseAPIFeatureSet: FeatureSet = {
).getRouter(),
);
app.use(
`/${APP_NAME.toLocaleLowerCase()}`,
new BaseAPI<
IncidentPostmortemTemplate,
IncidentPostmortemTemplateServiceType
>(
IncidentPostmortemTemplate,
IncidentPostmortemTemplateService,
).getRouter(),
);
app.use(
`/${APP_NAME.toLocaleLowerCase()}`,
new BaseAPI<
@@ -1545,10 +1552,7 @@ const BaseAPIFeatureSet: FeatureSet = {
app.use(
`/${APP_NAME.toLocaleLowerCase()}`,
new BaseAPI<WhatsAppLog, WhatsAppLogServiceType>(
WhatsAppLog,
WhatsAppLogService,
).getRouter(),
new WhatsAppLogAPI().getRouter(),
);
app.use(
@@ -1648,6 +1652,10 @@ const BaseAPIFeatureSet: FeatureSet = {
`/${APP_NAME.toLocaleLowerCase()}`,
new ResellerPlanAPI().getRouter(),
);
app.use(
`/${APP_NAME.toLocaleLowerCase()}`,
new EnterpriseLicenseAPI().getRouter(),
);
app.use(`/${APP_NAME.toLocaleLowerCase()}`, new SlackAPI().getRouter());
app.use(
`/${APP_NAME.toLocaleLowerCase()}`,

View File

@@ -0,0 +1,14 @@
import BaseAPI from "Common/Server/API/BaseAPI";
import WhatsAppLog from "Common/Models/DatabaseModels/WhatsAppLog";
import WhatsAppLogService, {
Service as WhatsAppLogServiceType,
} from "Common/Server/Services/WhatsAppLogService";
export default class WhatsAppLogAPI extends BaseAPI<
WhatsAppLog,
WhatsAppLogServiceType
> {
public constructor() {
super(WhatsAppLog, WhatsAppLogService);
}
}

View File

@@ -26,6 +26,9 @@ import MailService from "Common/Server/Services/MailService";
import UserService from "Common/Server/Services/UserService";
import UserTotpAuthService from "Common/Server/Services/UserTotpAuthService";
import CookieUtil from "Common/Server/Utils/Cookie";
import JSONWebToken, {
RefreshTokenData,
} from "Common/Server/Utils/JsonWebToken";
import Express, {
ExpressRequest,
ExpressResponse,
@@ -40,6 +43,10 @@ import User from "Common/Models/DatabaseModels/User";
import UserTotpAuth from "Common/Models/DatabaseModels/UserTotpAuth";
import UserWebAuthn from "Common/Models/DatabaseModels/UserWebAuthn";
import UserWebAuthnService from "Common/Server/Services/UserWebAuthnService";
import HashedString from "Common/Types/HashedString";
import NotAuthenticatedException from "Common/Types/Exception/NotAuthenticatedException";
import Dictionary from "Common/Types/Dictionary";
import JSONWebTokenData from "Common/Types/JsonWebTokenData";
const router: ExpressRouter = Express.getRouter();
@@ -186,12 +193,29 @@ router.post(
// Refresh Permissions for this user here.
await AccessTokenService.refreshUserAllPermissions(savedUser.id!);
CookieUtil.setUserCookie({
const session = CookieUtil.setUserCookie({
expressResponse: res,
user: savedUser,
isGlobalLogin: true,
});
const hashedSessionId: string = await HashedString.hashValue(
session.sessionId,
EncryptionSecret,
);
await UserService.updateOneBy({
query: {
_id: savedUser.id!,
},
data: {
jwtRefreshToken: hashedSessionId,
},
props: {
isRoot: true,
},
});
logger.info("User signed up: " + savedUser.email?.toString());
return Response.sendEntityResponse(req, res, savedUser, User);
@@ -495,6 +519,67 @@ router.post(
next: NextFunction,
): Promise<void> => {
try {
const refreshToken: string | undefined =
CookieUtil.getCookieFromExpressRequest(
req,
CookieUtil.getRefreshTokenKey(),
);
let userIdToInvalidate: ObjectID | null = null;
if (refreshToken) {
try {
const refreshTokenData: RefreshTokenData =
JSONWebToken.decodeRefreshToken(refreshToken);
userIdToInvalidate = refreshTokenData.userId;
} catch (err) {
const error: Error = err as Error;
logger.warn(
`Failed to decode refresh token during logout: ${
error.message || "unknown error"
}`,
);
logger.debug(error);
}
}
if (!userIdToInvalidate) {
const accessToken: string | undefined =
CookieUtil.getCookieFromExpressRequest(
req,
CookieUtil.getUserTokenKey(),
);
if (accessToken) {
try {
const decoded: JSONWebTokenData = JSONWebToken.decode(accessToken);
userIdToInvalidate = decoded.userId;
} catch (err) {
const error: Error = err as Error;
logger.warn(
`Failed to decode access token during logout: ${
error.message || "unknown error"
}`,
);
logger.debug(error);
}
}
}
if (userIdToInvalidate) {
await UserService.updateOneBy({
query: {
_id: userIdToInvalidate,
},
data: {
jwtRefreshToken: null!,
},
props: {
isRoot: true,
},
});
}
CookieUtil.removeAllCookies(req, res);
return Response.sendEmptySuccessResponse(req, res);
@@ -555,6 +640,122 @@ router.post(
},
);
router.post(
"/refresh-session",
async (
req: ExpressRequest,
res: ExpressResponse,
next: NextFunction,
): Promise<void> => {
try {
const refreshToken: string | undefined =
CookieUtil.getCookieFromExpressRequest(
req,
CookieUtil.getRefreshTokenKey(),
);
if (!refreshToken) {
CookieUtil.removeAllCookies(req, res);
return Response.sendErrorResponse(
req,
res,
new NotAuthenticatedException("Refresh token missing"),
);
}
let refreshTokenData: RefreshTokenData;
try {
refreshTokenData = JSONWebToken.decodeRefreshToken(refreshToken);
} catch (err) {
const error: Error = err as Error;
logger.warn(
`Failed to decode refresh token: ${error.message || "unknown error"}`,
);
logger.debug(error);
CookieUtil.removeAllCookies(req, res);
return Response.sendErrorResponse(
req,
res,
new NotAuthenticatedException("Refresh token is invalid"),
);
}
const hashedSessionId: string = await HashedString.hashValue(
refreshTokenData.sessionId,
EncryptionSecret,
);
const user: User | null = await UserService.findOneBy({
query: {
_id: refreshTokenData.userId,
jwtRefreshToken: hashedSessionId,
},
select: {
_id: true,
email: true,
name: true,
isMasterAdmin: true,
profilePictureId: true,
timezone: true,
enableTwoFactorAuth: true,
},
props: {
isRoot: true,
},
});
if (!user) {
CookieUtil.removeAllCookies(req, res);
return Response.sendErrorResponse(
req,
res,
new NotAuthenticatedException("Refresh token does not match"),
);
}
const session = CookieUtil.setUserCookie({
expressResponse: res,
user: user,
isGlobalLogin: refreshTokenData.isGlobalLogin,
});
if (!req.cookies) {
req.cookies = {} as Dictionary<string>;
}
req.cookies[CookieUtil.getUserTokenKey()] = session.accessToken;
req.cookies[CookieUtil.getRefreshTokenKey()] = session.refreshToken;
const hashedNewSessionId: string = await HashedString.hashValue(
session.sessionId,
EncryptionSecret,
);
await UserService.updateOneBy({
query: {
_id: user.id!,
},
data: {
jwtRefreshToken: hashedNewSessionId,
},
props: {
isRoot: true,
},
});
logger.info(
`User session refreshed: ${
user.email?.toString() || user.id?.toString() || "unknown"
}`,
);
return Response.sendEntityResponse(req, res, user, User);
} catch (err) {
return next(err);
}
},
);
type FetchTotpAuthListFunction = (userId: ObjectID) => Promise<{
totpAuthList: Array<UserTotpAuth>;
webAuthnList: Array<UserWebAuthn>;
@@ -788,12 +989,29 @@ const login: LoginFunction = async (options: {
if (alreadySavedUser.password.toString() === user.password!.toString()) {
logger.info("User logged in: " + alreadySavedUser.email?.toString());
CookieUtil.setUserCookie({
const session = CookieUtil.setUserCookie({
expressResponse: res,
user: alreadySavedUser,
isGlobalLogin: true,
});
const hashedSessionId: string = await HashedString.hashValue(
session.sessionId,
EncryptionSecret,
);
await UserService.updateOneBy({
query: {
_id: alreadySavedUser.id!,
},
data: {
jwtRefreshToken: hashedSessionId,
},
props: {
isRoot: true,
},
});
return Response.sendEntityResponse(req, res, alreadySavedUser, User);
}
}

View File

@@ -834,41 +834,53 @@ router.post(
projectId: projectId,
name: displayName,
},
select: { _id: true },
select: {
_id: true,
name: true,
createdAt: true,
updatedAt: true,
projectId: true,
},
props: { isRoot: true },
});
let targetTeam: Team;
let createdNewTeam: boolean = false;
if (existingTeam) {
logger.debug(
`SCIM Create group - team already exists with id: ${existingTeam.id}`,
`SCIM Create group - team already exists with id: ${existingTeam.id}, reusing existing team`,
);
throw new BadRequestException("Group with this name already exists");
targetTeam = existingTeam;
} else {
// Create new team
logger.debug(`SCIM Create group - creating new team: ${displayName}`);
const team: Team = new Team();
team.projectId = projectId;
team.name = displayName;
team.isTeamEditable = true; // Allow editing SCIM-created teams
team.isTeamDeleteable = true; // Allow deleting SCIM-created teams
team.shouldHaveAtLeastOneMember = false; // SCIM groups can be empty
const createdTeam: Team = await TeamService.create({
data: team,
props: { isRoot: true },
});
logger.debug(
`SCIM Create group - created team with id: ${createdTeam.id}`,
);
targetTeam = createdTeam;
createdNewTeam = true;
}
// Create new team
logger.debug(`SCIM Create group - creating new team: ${displayName}`);
const team: Team = new Team();
team.projectId = projectId;
team.name = displayName;
team.isTeamEditable = true; // Allow editing SCIM-created teams
team.isTeamDeleteable = true; // Allow deleting SCIM-created teams
team.shouldHaveAtLeastOneMember = false; // SCIM groups can be empty
const createdTeam: Team = await TeamService.create({
data: team,
props: { isRoot: true },
});
logger.debug(
`SCIM Create group - created team with id: ${createdTeam.id}`,
);
// Handle initial members if provided
// Handle members if provided. Adds any new members and leaves existing ones intact.
const members: Array<SCIMMember> =
(scimGroup["members"] as Array<SCIMMember>) || [];
if (members.length > 0) {
logger.debug(
`SCIM Create group - adding ${members.length} initial members`,
`SCIM Create group - ensuring ${members.length} members are part of team ${targetTeam.id}`,
);
for (const member of members) {
const userId: string = member["value"] as string;
@@ -887,18 +899,18 @@ router.post(
query: {
projectId: projectId,
userId: new ObjectID(userId),
teamId: createdTeam.id!,
teamId: targetTeam.id!,
},
select: { _id: true },
props: { isRoot: true },
});
if (!existingMember) {
// Add user to the new team
// Add user to the team
const newTeamMember: TeamMember = new TeamMember();
newTeamMember.projectId = projectId;
newTeamMember.userId = new ObjectID(userId);
newTeamMember.teamId = createdTeam.id!;
newTeamMember.teamId = targetTeam.id!;
newTeamMember.hasAcceptedInvitation = true;
newTeamMember.invitationAcceptedAt =
OneUptimeDate.getCurrentDate();
@@ -910,7 +922,7 @@ router.post(
},
});
logger.debug(
`SCIM Create group - added user ${userId} to team`,
`SCIM Create group - added user ${userId} to team ${targetTeam.id}`,
);
}
}
@@ -918,18 +930,39 @@ router.post(
}
}
const createdGroup: JSONObject = await formatTeamForSCIM(
createdTeam,
const teamForResponse: Team | null = await TeamService.findOneById({
id: targetTeam.id!,
select: {
_id: true,
name: true,
createdAt: true,
updatedAt: true,
projectId: true,
},
props: { isRoot: true },
});
if (!teamForResponse) {
throw new NotFoundException("Failed to retrieve group");
}
const groupResponse: JSONObject = await formatTeamForSCIM(
teamForResponse,
req.params["projectScimId"]!,
true,
);
logger.debug(
`SCIM Create group - returning created group with id: ${createdTeam.id}`,
`SCIM Create group - returning group with id: ${teamForResponse.id}`,
);
res.status(201);
return Response.sendJsonObjectResponse(req, res, createdGroup);
if (createdNewTeam) {
res.status(201);
} else {
res.status(200);
}
return Response.sendJsonObjectResponse(req, res, groupResponse);
} catch (err) {
logger.error(err);
return next(err);
@@ -995,10 +1028,6 @@ router.put(
);
}
if (!team.isTeamEditable) {
throw new BadRequestException("This group cannot be updated");
}
// Update team name if provided
const displayName: string = scimGroup["displayName"] as string;
if (displayName && displayName !== team.name) {
@@ -1249,10 +1278,6 @@ router.patch(
);
}
if (!team.isTeamEditable) {
throw new BadRequestException("This group cannot be updated");
}
// Handle SCIM patch operations
const operations: JSONObject[] =
(scimPatch["Operations"] as JSONObject[]) || [];

View File

@@ -15,7 +15,7 @@ import { JSONObject } from "Common/Types/JSON";
import ObjectID from "Common/Types/ObjectID";
import PositiveNumber from "Common/Types/PositiveNumber";
import DatabaseConfig from "Common/Server/DatabaseConfig";
import { Host, HttpProtocol } from "Common/Server/EnvironmentConfig";
import { EncryptionSecret, Host, HttpProtocol } from "Common/Server/EnvironmentConfig";
import AccessTokenService from "Common/Server/Services/AccessTokenService";
import ProjectSSOService from "Common/Server/Services/ProjectSsoService";
import TeamMemberService from "Common/Server/Services/TeamMemberService";
@@ -37,6 +37,7 @@ import TeamMember from "Common/Models/DatabaseModels/TeamMember";
import User from "Common/Models/DatabaseModels/User";
import xml2js from "xml2js";
import Name from "Common/Types/Name";
import HashedString from "Common/Types/HashedString";
const router: ExpressRouter = Express.getRouter();
@@ -539,12 +540,29 @@ const loginUserWithSso: LoginUserWithSsoFunction = async (
expressResponse: res,
});
CookieUtil.setUserCookie({
const session = CookieUtil.setUserCookie({
expressResponse: res,
user: alreadySavedUser,
isGlobalLogin: false,
});
const hashedSessionId: string = await HashedString.hashValue(
session.sessionId,
EncryptionSecret,
);
await UserService.updateOneBy({
query: {
_id: alreadySavedUser.id!,
},
data: {
jwtRefreshToken: hashedSessionId,
},
props: {
isRoot: true,
},
});
// Refresh Permissions for this user here.
await AccessTokenService.refreshUserAllPermissions(alreadySavedUser.id!);

View File

@@ -6,6 +6,7 @@ import URL from "Common/Types/API/URL";
import OneUptimeDate from "Common/Types/Date";
import EmailTemplateType from "Common/Types/Email/EmailTemplateType";
import BadDataException from "Common/Types/Exception/BadDataException";
import NotAuthenticatedException from "Common/Types/Exception/NotAuthenticatedException";
import { JSONObject } from "Common/Types/JSON";
import JSONFunctions from "Common/Types/JSONFunctions";
import ObjectID from "Common/Types/ObjectID";
@@ -22,11 +23,16 @@ import Express, {
ExpressRouter,
NextFunction,
} from "Common/Server/Utils/Express";
import JSONWebToken from "Common/Server/Utils/JsonWebToken";
import JSONWebToken, {
RefreshTokenData,
} from "Common/Server/Utils/JsonWebToken";
import logger from "Common/Server/Utils/Logger";
import Response from "Common/Server/Utils/Response";
import StatusPage from "Common/Models/DatabaseModels/StatusPage";
import StatusPagePrivateUser from "Common/Models/DatabaseModels/StatusPagePrivateUser";
import HashedString from "Common/Types/HashedString";
import Dictionary from "Common/Types/Dictionary";
import JSONWebTokenData from "Common/Types/JsonWebTokenData";
const router: ExpressRouter = Express.getRouter();
@@ -46,7 +52,79 @@ router.post(
req.params["statuspageid"].toString(),
);
CookieUtil.removeCookie(res, CookieUtil.getUserTokenKey(statusPageId)); // remove the cookie.
const refreshTokenKey: string = CookieUtil.getRefreshTokenKey(statusPageId);
const accessTokenKey: string = CookieUtil.getUserTokenKey(statusPageId);
const refreshToken: string | undefined =
CookieUtil.getCookieFromExpressRequest(req, refreshTokenKey);
let userIdToInvalidate: ObjectID | null = null;
if (refreshToken) {
try {
const refreshData: RefreshTokenData =
JSONWebToken.decodeRefreshToken(refreshToken);
if (
refreshData.statusPageId &&
refreshData.statusPageId.toString() === statusPageId.toString()
) {
userIdToInvalidate = refreshData.userId;
}
} catch (err) {
const error: Error = err as Error;
logger.warn(
`Failed to decode status page refresh token during logout: ${
error.message || "unknown error"
}`,
);
logger.debug(error);
}
}
if (!userIdToInvalidate) {
const accessToken: string | undefined =
CookieUtil.getCookieFromExpressRequest(req, accessTokenKey);
if (accessToken) {
try {
const decoded: JSONWebTokenData = JSONWebToken.decode(accessToken);
if (
decoded.statusPageId &&
decoded.statusPageId.toString() === statusPageId.toString()
) {
userIdToInvalidate = decoded.userId;
}
} catch (err) {
const error: Error = err as Error;
logger.warn(
`Failed to decode status page access token during logout: ${
error.message || "unknown error"
}`,
);
logger.debug(error);
}
}
}
if (userIdToInvalidate) {
await StatusPagePrivateUserService.updateOneBy({
query: {
_id: userIdToInvalidate,
statusPageId: statusPageId,
},
data: {
jwtRefreshToken: null!,
},
props: {
isRoot: true,
},
});
}
CookieUtil.removeCookie(res, accessTokenKey);
CookieUtil.removeCookie(res, refreshTokenKey);
return Response.sendEmptySuccessResponse(req, res);
} catch (err) {
@@ -383,23 +461,41 @@ router.post(
});
if (alreadySavedUser) {
const token: string = JSONWebToken.sign({
data: alreadySavedUser,
expiresInSeconds: OneUptimeDate.getSecondsInDays(
new PositiveNumber(30),
),
const session = CookieUtil.setStatusPageUserCookie({
expressResponse: res,
user: alreadySavedUser,
statusPageId: alreadySavedUser.statusPageId!,
});
CookieUtil.setCookie(
res,
CookieUtil.getUserTokenKey(alreadySavedUser.statusPageId!),
token,
{
httpOnly: true,
maxAge: OneUptimeDate.getMillisecondsInDays(new PositiveNumber(30)),
},
if (!req.cookies) {
req.cookies = {} as Dictionary<string>;
}
req.cookies[CookieUtil.getUserTokenKey(alreadySavedUser.statusPageId!)] =
session.accessToken;
req.cookies[
CookieUtil.getRefreshTokenKey(alreadySavedUser.statusPageId!)
] = session.refreshToken;
const hashedSessionId: string = await HashedString.hashValue(
session.sessionId,
EncryptionSecret,
);
await StatusPagePrivateUserService.updateOneBy({
query: {
_id: alreadySavedUser.id!,
statusPageId: alreadySavedUser.statusPageId!,
},
data: {
jwtRefreshToken: hashedSessionId,
lastActive: OneUptimeDate.getCurrentDate(),
},
props: {
isRoot: true,
},
});
return Response.sendEntityResponse(
req,
res,
@@ -407,7 +503,11 @@ router.post(
StatusPagePrivateUser,
{
miscData: {
token: token,
accessToken: session.accessToken,
refreshToken: session.refreshToken,
accessTokenExpiresInSeconds: session.accessTokenExpiresInSeconds,
refreshTokenExpiresInSeconds:
session.refreshTokenExpiresInSeconds,
},
},
);
@@ -421,4 +521,160 @@ router.post(
},
);
router.post(
"/refresh-session/:statuspageid",
async (
req: ExpressRequest,
res: ExpressResponse,
next: NextFunction,
): Promise<void> => {
try {
if (!req.params["statuspageid"]) {
throw new BadDataException("Status Page ID is required.");
}
const statusPageId: ObjectID = new ObjectID(
req.params["statuspageid"].toString(),
);
const refreshTokenKey: string = CookieUtil.getRefreshTokenKey(statusPageId);
const accessTokenKey: string = CookieUtil.getUserTokenKey(statusPageId);
const refreshToken: string | undefined =
CookieUtil.getCookieFromExpressRequest(req, refreshTokenKey);
if (!refreshToken) {
CookieUtil.removeCookie(res, refreshTokenKey);
CookieUtil.removeCookie(res, accessTokenKey);
return Response.sendErrorResponse(
req,
res,
new NotAuthenticatedException("Refresh token missing."),
);
}
let refreshTokenData: RefreshTokenData;
try {
refreshTokenData = JSONWebToken.decodeRefreshToken(refreshToken);
} catch (err) {
const error: Error = err as Error;
logger.warn(
`Failed to decode status page refresh token: ${
error.message || "unknown error"
}`,
);
logger.debug(error);
CookieUtil.removeCookie(res, refreshTokenKey);
CookieUtil.removeCookie(res, accessTokenKey);
return Response.sendErrorResponse(
req,
res,
new NotAuthenticatedException("Refresh token is invalid."),
);
}
if (
!refreshTokenData.statusPageId ||
refreshTokenData.statusPageId.toString() !== statusPageId.toString()
) {
CookieUtil.removeCookie(res, refreshTokenKey);
CookieUtil.removeCookie(res, accessTokenKey);
return Response.sendErrorResponse(
req,
res,
new NotAuthenticatedException("Refresh token status page mismatch."),
);
}
const hashedSessionId: string = await HashedString.hashValue(
refreshTokenData.sessionId,
EncryptionSecret,
);
const user: StatusPagePrivateUser | null =
await StatusPagePrivateUserService.findOneBy({
query: {
_id: refreshTokenData.userId,
statusPageId: statusPageId,
jwtRefreshToken: hashedSessionId,
},
select: {
_id: true,
email: true,
statusPageId: true,
},
props: {
isRoot: true,
},
});
if (!user) {
CookieUtil.removeCookie(res, refreshTokenKey);
CookieUtil.removeCookie(res, accessTokenKey);
return Response.sendErrorResponse(
req,
res,
new NotAuthenticatedException("Refresh token does not match."),
);
}
const session = CookieUtil.setStatusPageUserCookie({
expressResponse: res,
user: user,
statusPageId: statusPageId,
});
if (!req.cookies) {
req.cookies = {} as Dictionary<string>;
}
req.cookies[accessTokenKey] = session.accessToken;
req.cookies[refreshTokenKey] = session.refreshToken;
const hashedNewSessionId: string = await HashedString.hashValue(
session.sessionId,
EncryptionSecret,
);
await StatusPagePrivateUserService.updateOneBy({
query: {
_id: user.id!,
statusPageId: statusPageId,
},
data: {
jwtRefreshToken: hashedNewSessionId,
lastActive: OneUptimeDate.getCurrentDate(),
},
props: {
isRoot: true,
},
});
logger.info(
`Status page session refreshed: ${
user.email?.toString() || user.id?.toString() || "unknown"
} for status page ${statusPageId.toString()}`,
);
return Response.sendEntityResponse(
req,
res,
user,
StatusPagePrivateUser,
{
miscData: {
accessToken: session.accessToken,
refreshToken: session.refreshToken,
accessTokenExpiresInSeconds: session.accessTokenExpiresInSeconds,
refreshTokenExpiresInSeconds: session.refreshTokenExpiresInSeconds,
},
},
);
} catch (err) {
return next(err);
}
},
);
export default router;

View File

@@ -1,6 +1,7 @@
import WhatsAppService from "../Services/WhatsAppService";
import BadDataException from "Common/Types/Exception/BadDataException";
import { JSONObject } from "Common/Types/JSON";
import GlobalConfig from "Common/Models/DatabaseModels/GlobalConfig";
import { JSONArray, JSONObject } from "Common/Types/JSON";
import ObjectID from "Common/Types/ObjectID";
import Phone from "Common/Types/Phone";
import WhatsAppMessage from "Common/Types/WhatsApp/WhatsAppMessage";
@@ -9,17 +10,214 @@ import {
WhatsAppTemplateIds,
WhatsAppTemplateLanguage,
} from "Common/Types/WhatsApp/WhatsAppTemplates";
import WhatsAppStatus from "Common/Types/WhatsAppStatus";
import ClusterKeyAuthorization from "Common/Server/Middleware/ClusterKeyAuthorization";
import WhatsAppLogService from "Common/Server/Services/WhatsAppLogService";
import GlobalConfigService from "Common/Server/Services/GlobalConfigService";
import Express, {
ExpressRequest,
ExpressResponse,
ExpressRouter,
NextFunction,
} from "Common/Server/Utils/Express";
import logger from "Common/Server/Utils/Logger";
import Response from "Common/Server/Utils/Response";
const router: ExpressRouter = Express.getRouter();
const MAX_STATUS_MESSAGE_LENGTH: number = 500;
export const mapWebhookStatusToWhatsAppStatus: (
status?: string,
) => WhatsAppStatus = (status?: string): WhatsAppStatus => {
switch ((status || "").toLowerCase()) {
case "sent":
return WhatsAppStatus.Sent;
case "delivered":
return WhatsAppStatus.Delivered;
case "read":
return WhatsAppStatus.Read;
case "failed":
return WhatsAppStatus.Failed;
case "deleted":
case "removed":
return WhatsAppStatus.Deleted;
case "warning":
return WhatsAppStatus.Warning;
case "queued":
case "pending":
return WhatsAppStatus.Queued;
case "error":
return WhatsAppStatus.Error;
case "success":
return WhatsAppStatus.Success;
default:
return WhatsAppStatus.Unknown;
}
};
export const buildStatusMessage: (payload: JSONObject) => string | undefined = (
payload: JSONObject,
): string | undefined => {
const messageParts: Array<string> = [];
const rawStatus: string | undefined = payload["status"]
? String(payload["status"])
: undefined;
if (rawStatus) {
messageParts.push(`Status: ${rawStatus}`);
}
const timestamp: string | undefined = payload["timestamp"]
? String(payload["timestamp"])
: undefined;
if (timestamp) {
const numericTimestamp: number = Number(timestamp);
if (!isNaN(numericTimestamp)) {
messageParts.push(
`Timestamp: ${new Date(numericTimestamp * 1000).toISOString()}`,
);
} else {
messageParts.push(`Timestamp: ${timestamp}`);
}
}
const conversation: JSONObject | undefined =
(payload["conversation"] as JSONObject | undefined) || undefined;
if (conversation) {
if (conversation["id"]) {
messageParts.push(`Conversation: ${conversation["id"]}`);
}
const origin: JSONObject | undefined =
(conversation["origin"] as JSONObject | undefined) || undefined;
if (origin?.["type"]) {
messageParts.push(`Origin: ${origin["type"]}`);
}
if (conversation["expiration_timestamp"]) {
const expirationTimestamp: number = Number(
conversation["expiration_timestamp"],
);
if (!isNaN(expirationTimestamp)) {
messageParts.push(
`Conversation expires: ${new Date(expirationTimestamp * 1000).toISOString()}`,
);
}
}
}
const pricing: JSONObject | undefined =
(payload["pricing"] as JSONObject | undefined) || undefined;
if (pricing) {
const pricingParts: Array<string> = [];
if (pricing["billable"] !== undefined) {
pricingParts.push(`billable=${pricing["billable"]}`);
}
if (pricing["category"]) {
pricingParts.push(`category=${pricing["category"]}`);
}
if (pricing["pricing_model"]) {
pricingParts.push(`model=${pricing["pricing_model"]}`);
}
if (pricingParts.length > 0) {
messageParts.push(`Pricing: ${pricingParts.join(", ")}`);
}
}
const errors: JSONArray | undefined =
(payload["errors"] as JSONArray | undefined) || undefined;
if (Array.isArray(errors) && errors.length > 0) {
const firstError: JSONObject = errors[0] as JSONObject;
const errorParts: Array<string> = [];
if (firstError["title"]) {
errorParts.push(String(firstError["title"]));
}
if (firstError["code"]) {
errorParts.push(`code=${firstError["code"]}`);
}
if (firstError["detail"]) {
errorParts.push(String(firstError["detail"]));
}
if (errorParts.length > 0) {
messageParts.push(`Error: ${errorParts.join(" - ")}`);
}
}
if (messageParts.length === 0) {
return undefined;
}
const statusMessage: string = messageParts.join(" | ");
if (statusMessage.length <= MAX_STATUS_MESSAGE_LENGTH) {
return statusMessage;
}
return `${statusMessage.substring(0, MAX_STATUS_MESSAGE_LENGTH - 3)}...`;
};
const updateWhatsAppLogStatus: (
statusPayload: JSONObject,
) => Promise<void> = async (statusPayload: JSONObject): Promise<void> => {
const messageId: string | undefined = statusPayload["id"]
? String(statusPayload["id"])
: undefined;
if (!messageId) {
logger.warn(
`[Meta WhatsApp Webhook] Received status payload without message id. Payload: ${JSON.stringify(statusPayload)}`,
);
return;
}
const rawStatus: string | undefined = statusPayload["status"]
? String(statusPayload["status"])
: undefined;
const derivedStatus: WhatsAppStatus =
mapWebhookStatusToWhatsAppStatus(rawStatus);
const statusMessage: string | undefined = buildStatusMessage(statusPayload);
const updateResult: number = await WhatsAppLogService.updateOneBy({
query: {
whatsAppMessageId: messageId,
},
data: {
status: derivedStatus,
...(statusMessage ? { statusMessage } : {}),
},
props: {
isRoot: true,
},
});
if (updateResult === 0) {
logger.warn(
`[Meta WhatsApp Webhook] No WhatsApp log found for message id ${messageId}. Payload: ${JSON.stringify(statusPayload)}`,
);
} else {
logger.debug(
`[Meta WhatsApp Webhook] Updated WhatsApp log for message id ${messageId} with status ${derivedStatus}.`,
);
}
};
const toTemplateVariables: (
rawVariables: JSONObject | undefined,
) => Record<string, string> | undefined = (
@@ -119,6 +317,128 @@ router.post(
},
);
router.get("/webhook", async (req: ExpressRequest, res: ExpressResponse) => {
const mode: string | undefined = req.query["hub.mode"]
? String(req.query["hub.mode"])
: undefined;
const verifyToken: string | undefined = req.query["hub.verify_token"]
? String(req.query["hub.verify_token"])
: undefined;
const challenge: string | undefined = req.query["hub.challenge"]
? String(req.query["hub.challenge"])
: undefined;
if (mode === "subscribe" && challenge) {
const globalConfigTokenResponse: GlobalConfig | null =
await GlobalConfigService.findOneBy({
query: {
_id: ObjectID.getZeroObjectID().toString(),
},
props: {
isRoot: true,
},
select: {
metaWhatsAppWebhookVerifyToken: true,
},
});
const configuredVerifyToken: string | undefined =
globalConfigTokenResponse?.metaWhatsAppWebhookVerifyToken?.trim() ||
undefined;
if (!configuredVerifyToken) {
logger.error(
"Meta WhatsApp webhook verify token is not configured. Rejecting verification request.",
);
res.sendStatus(403);
return;
}
if (verifyToken === configuredVerifyToken) {
res.status(200).send(challenge);
return;
}
logger.warn(
"Meta WhatsApp webhook verification failed due to token mismatch.",
);
res.sendStatus(403);
return;
}
res.sendStatus(400);
});
router.post(
"/webhook",
async (req: ExpressRequest, res: ExpressResponse, next: NextFunction) => {
try {
const body: JSONObject = req.body as JSONObject;
if (
(body["object"] as string | undefined) !== "whatsapp_business_account"
) {
logger.debug(
`[Meta WhatsApp Webhook] Received event for unsupported object: ${JSON.stringify(body)}`,
);
return Response.sendEmptySuccessResponse(req, res);
}
const entries: JSONArray | undefined = body["entry"] as
| JSONArray
| undefined;
if (!Array.isArray(entries)) {
logger.warn(
`[Meta WhatsApp Webhook] Payload did not include entries array. Payload: ${JSON.stringify(body)}`,
);
return Response.sendEmptySuccessResponse(req, res);
}
const statusUpdatePromises: Array<Promise<void>> = [];
for (const entry of entries) {
const entryObject: JSONObject = entry as JSONObject;
const changes: JSONArray | undefined =
(entryObject["changes"] as JSONArray | undefined) || undefined;
if (!Array.isArray(changes)) {
continue;
}
for (const change of changes) {
const changeObject: JSONObject = change as JSONObject;
const value: JSONObject | undefined =
(changeObject["value"] as JSONObject | undefined) || undefined;
if (!value) {
continue;
}
const statuses: JSONArray | undefined =
(value["statuses"] as JSONArray | undefined) || undefined;
if (Array.isArray(statuses)) {
for (const statusItem of statuses) {
statusUpdatePromises.push(
updateWhatsAppLogStatus(statusItem as JSONObject),
);
}
}
}
}
if (statusUpdatePromises.length > 0) {
await Promise.allSettled(statusUpdatePromises);
}
return Response.sendEmptySuccessResponse(req, res);
} catch (err) {
return next(err);
}
},
);
router.post(
"/test",
async (req: ExpressRequest, res: ExpressResponse, next: NextFunction) => {

View File

@@ -68,8 +68,6 @@ export default class WhatsAppService {
);
}
const config: MetaWhatsAppConfig = await getMetaWhatsAppConfig();
const isSensitiveMessage: boolean = Boolean(options.isSensitive);
const messageSummary: string = isSensitiveMessage
? SENSITIVE_MESSAGE_PLACEHOLDER
@@ -131,6 +129,8 @@ export default class WhatsAppService {
whatsAppLog.onCallDutyPolicyScheduleId = options.onCallScheduleId;
}
const config: MetaWhatsAppConfig = await getMetaWhatsAppConfig();
let messageCost: number = 0;
const shouldChargeForMessage: boolean = IsBillingEnabled;
@@ -417,7 +417,11 @@ export default class WhatsAppService {
}
}
whatsAppLog.status = WhatsAppStatus.Success;
if (messageId) {
whatsAppLog.whatsAppMessageId = messageId;
}
whatsAppLog.status = WhatsAppStatus.Sent;
whatsAppLog.statusMessage = messageId
? `Message ID: ${messageId}`
: "WhatsApp message sent successfully";
@@ -470,10 +474,14 @@ export default class WhatsAppService {
await UserOnCallLogTimelineService.updateOneById({
id: options.userOnCallLogTimelineId,
data: {
status:
whatsAppLog.status === WhatsAppStatus.Success
? UserNotificationStatus.Sent
: UserNotificationStatus.Error,
status: [
WhatsAppStatus.Success,
WhatsAppStatus.Sent,
WhatsAppStatus.Delivered,
WhatsAppStatus.Read,
].includes(whatsAppLog.status || WhatsAppStatus.Error)
? UserNotificationStatus.Sent
: UserNotificationStatus.Error,
statusMessage: whatsAppLog.statusMessage,
},
props: {

520
App/package-lock.json generated
View File

@@ -59,7 +59,9 @@
"@opentelemetry/sdk-trace-web": "^1.25.1",
"@opentelemetry/semantic-conventions": "^1.26.0",
"@remixicon/react": "^4.2.0",
"@simplewebauthn/server": "^13.2.2",
"@tippyjs/react": "^4.2.6",
"@types/archiver": "^6.0.3",
"@types/crypto-js": "^4.2.2",
"@types/qrcode": "^1.5.5",
"@types/react-highlight": "^0.12.8",
@@ -68,7 +70,9 @@
"@types/web-push": "^3.6.4",
"acme-client": "^5.3.0",
"airtable": "^0.12.2",
"axios": "^1.7.2",
"archiver": "^7.0.1",
"axios": "^1.12.0",
"botbuilder": "^4.23.3",
"bullmq": "^5.3.3",
"cookie-parser": "^1.4.7",
"cors": "^2.8.5",
@@ -90,10 +94,10 @@
"moment": "^2.30.1",
"moment-timezone": "^0.5.45",
"node-cron": "^3.0.3",
"nodemailer": "^6.9.10",
"nodemailer": "^7.0.7",
"otpauth": "^9.3.1",
"pg": "^8.7.3",
"playwright": "^1.50.0",
"playwright": "^1.55.1",
"posthog-js": "^1.139.6",
"prop-types": "^15.8.1",
"qrcode": "^1.5.3",
@@ -109,7 +113,7 @@
"react-router-dom": "^6.24.1",
"react-select": "^5.4.0",
"react-spinners": "^0.14.1",
"react-syntax-highlighter": "^15.5.0",
"react-syntax-highlighter": "^16.0.0",
"react-toggle": "^4.1.3",
"reactflow": "^11.11.4",
"recharts": "^2.12.7",
@@ -253,89 +257,20 @@
}
},
"node_modules/@babel/code-frame": {
"version": "7.23.5",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.23.5.tgz",
"integrity": "sha512-CgH3s1a96LipHCmSUmYFPwY7MNx8C3avkq7i4Wl3cfa662ldtUe4VM1TPXX70pfmrlWTb6jLqTYrZyT2ZTJBgA==",
"version": "7.27.1",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
"integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/highlight": "^7.23.4",
"chalk": "^2.4.2"
"@babel/helper-validator-identifier": "^7.27.1",
"js-tokens": "^4.0.0",
"picocolors": "^1.1.1"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/code-frame/node_modules/ansi-styles": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
"integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
"dev": true,
"dependencies": {
"color-convert": "^1.9.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/@babel/code-frame/node_modules/chalk": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
"integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==",
"dev": true,
"dependencies": {
"ansi-styles": "^3.2.1",
"escape-string-regexp": "^1.0.5",
"supports-color": "^5.3.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/@babel/code-frame/node_modules/color-convert": {
"version": "1.9.3",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
"integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
"dev": true,
"dependencies": {
"color-name": "1.1.3"
}
},
"node_modules/@babel/code-frame/node_modules/color-name": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
"integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==",
"dev": true
},
"node_modules/@babel/code-frame/node_modules/escape-string-regexp": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
"integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==",
"dev": true,
"engines": {
"node": ">=0.8.0"
}
},
"node_modules/@babel/code-frame/node_modules/has-flag": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
"integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==",
"dev": true,
"engines": {
"node": ">=4"
}
},
"node_modules/@babel/code-frame/node_modules/supports-color": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
"integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
"dev": true,
"dependencies": {
"has-flag": "^3.0.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/@babel/compat-data": {
"version": "7.23.5",
"resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.23.5.tgz",
@@ -511,19 +446,21 @@
}
},
"node_modules/@babel/helper-string-parser": {
"version": "7.23.4",
"resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.23.4.tgz",
"integrity": "sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ==",
"version": "7.27.1",
"resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz",
"integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-validator-identifier": {
"version": "7.22.20",
"resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz",
"integrity": "sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==",
"version": "7.28.5",
"resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz",
"integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
@@ -538,109 +475,28 @@
}
},
"node_modules/@babel/helpers": {
"version": "7.23.6",
"resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.23.6.tgz",
"integrity": "sha512-wCfsbN4nBidDRhpDhvcKlzHWCTlgJYUUdSJfzXb2NuBssDSIjc3xcb+znA7l+zYsFljAcGM0aFkN40cR3lXiGA==",
"version": "7.28.4",
"resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz",
"integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/template": "^7.22.15",
"@babel/traverse": "^7.23.6",
"@babel/types": "^7.23.6"
"@babel/template": "^7.27.2",
"@babel/types": "^7.28.4"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/highlight": {
"version": "7.23.4",
"resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.23.4.tgz",
"integrity": "sha512-acGdbYSfp2WheJoJm/EBBBLh/ID8KDc64ISZ9DYtBmC8/Q204PZJLHyzeB5qMzJ5trcOkybd78M4x2KWsUq++A==",
"dev": true,
"dependencies": {
"@babel/helper-validator-identifier": "^7.22.20",
"chalk": "^2.4.2",
"js-tokens": "^4.0.0"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/highlight/node_modules/ansi-styles": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
"integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
"dev": true,
"dependencies": {
"color-convert": "^1.9.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/@babel/highlight/node_modules/chalk": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
"integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==",
"dev": true,
"dependencies": {
"ansi-styles": "^3.2.1",
"escape-string-regexp": "^1.0.5",
"supports-color": "^5.3.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/@babel/highlight/node_modules/color-convert": {
"version": "1.9.3",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
"integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
"dev": true,
"dependencies": {
"color-name": "1.1.3"
}
},
"node_modules/@babel/highlight/node_modules/color-name": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
"integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==",
"dev": true
},
"node_modules/@babel/highlight/node_modules/escape-string-regexp": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
"integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==",
"dev": true,
"engines": {
"node": ">=0.8.0"
}
},
"node_modules/@babel/highlight/node_modules/has-flag": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
"integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==",
"dev": true,
"engines": {
"node": ">=4"
}
},
"node_modules/@babel/highlight/node_modules/supports-color": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
"integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
"dev": true,
"dependencies": {
"has-flag": "^3.0.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/@babel/parser": {
"version": "7.23.6",
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.23.6.tgz",
"integrity": "sha512-Z2uID7YJ7oNvAI20O9X0bblw7Qqs8Q2hFy0R9tAfnfLkp5MW0UH9eUvnDSnFwKZ0AvgS1ucqR4KzvVHgnke1VQ==",
"version": "7.28.5",
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz",
"integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/types": "^7.28.5"
},
"bin": {
"parser": "bin/babel-parser.js"
},
@@ -811,14 +667,15 @@
}
},
"node_modules/@babel/template": {
"version": "7.22.15",
"resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.15.tgz",
"integrity": "sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w==",
"version": "7.27.2",
"resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz",
"integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/code-frame": "^7.22.13",
"@babel/parser": "^7.22.15",
"@babel/types": "^7.22.15"
"@babel/code-frame": "^7.27.1",
"@babel/parser": "^7.27.2",
"@babel/types": "^7.27.1"
},
"engines": {
"node": ">=6.9.0"
@@ -846,14 +703,14 @@
}
},
"node_modules/@babel/types": {
"version": "7.23.6",
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.23.6.tgz",
"integrity": "sha512-+uarb83brBzPKN38NX1MkB6vb6+mwvR6amUulqAE7ccQw1pEl+bCia9TbdG1lsnFP7lZySvUn37CHyXQdfTwzg==",
"version": "7.28.5",
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz",
"integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-string-parser": "^7.23.4",
"@babel/helper-validator-identifier": "^7.22.20",
"to-fast-properties": "^2.0.0"
"@babel/helper-string-parser": "^7.27.1",
"@babel/helper-validator-identifier": "^7.28.5"
},
"engines": {
"node": ">=6.9.0"
@@ -1745,15 +1602,17 @@
"node_modules/asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
"license": "MIT"
},
"node_modules/axios": {
"version": "1.6.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.2.tgz",
"integrity": "sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A==",
"version": "1.13.1",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.13.1.tgz",
"integrity": "sha512-hU4EGxxt+j7TQijx1oYdAjw4xuIp1wRQSsbMFwSthCWeBQur1eF+qJ5iQ5sN3Tw8YRzQNKb8jszgBdMDVqwJcw==",
"license": "MIT",
"dependencies": {
"follow-redirects": "^1.15.0",
"form-data": "^4.0.0",
"follow-redirects": "^1.15.6",
"form-data": "^4.0.4",
"proxy-from-env": "^1.1.0"
}
},
@@ -1863,21 +1722,23 @@
}
},
"node_modules/brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"version": "1.1.12",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
"license": "MIT",
"dependencies": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
}
},
"node_modules/braces": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
"dev": true,
"license": "MIT",
"dependencies": {
"fill-range": "^7.0.1"
"fill-range": "^7.1.1"
},
"engines": {
"node": ">=8"
@@ -1948,6 +1809,19 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/call-bind-apply-helpers": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
"integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
"function-bind": "^1.1.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/callsites": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
@@ -2108,6 +1982,7 @@
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
"license": "MIT",
"dependencies": {
"delayed-stream": "~1.0.0"
},
@@ -2136,10 +2011,11 @@
"integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ=="
},
"node_modules/cross-spawn": {
"version": "7.0.3",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
"integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
"integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
"dev": true,
"license": "MIT",
"dependencies": {
"path-key": "^3.1.0",
"shebang-command": "^2.0.0",
@@ -2201,6 +2077,7 @@
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
"license": "MIT",
"engines": {
"node": ">=0.4.0"
}
@@ -2231,6 +2108,20 @@
"node": "^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0"
}
},
"node_modules/dunder-proto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
"integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
"license": "MIT",
"dependencies": {
"call-bind-apply-helpers": "^1.0.1",
"es-errors": "^1.3.0",
"gopd": "^1.2.0"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/ecdsa-sig-formatter": {
"version": "1.0.11",
"resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz",
@@ -2240,9 +2131,10 @@
}
},
"node_modules/ejs": {
"version": "3.1.9",
"resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.9.tgz",
"integrity": "sha512-rC+QVNMJWv+MtPgkt0y+0rVEIdbtxVADApW9JXrUVlzHetgcyczP/E7DJmWJ4fJCZF2cPcBk0laWO9ZHMG3DmQ==",
"version": "3.1.10",
"resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz",
"integrity": "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==",
"license": "Apache-2.0",
"dependencies": {
"jake": "^10.8.5"
},
@@ -2286,6 +2178,51 @@
"is-arrayish": "^0.2.1"
}
},
"node_modules/es-define-property": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
"integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-errors": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
"integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-object-atoms": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
"integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-set-tostringtag": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
"integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
"get-intrinsic": "^1.2.6",
"has-tostringtag": "^1.0.2",
"hasown": "^2.0.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/escalade": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz",
@@ -2389,9 +2326,10 @@
}
},
"node_modules/filelist/node_modules/brace-expansion": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
"integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
"license": "MIT",
"dependencies": {
"balanced-match": "^1.0.0"
}
@@ -2408,10 +2346,11 @@
}
},
"node_modules/fill-range": {
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
"dev": true,
"license": "MIT",
"dependencies": {
"to-regex-range": "^5.0.1"
},
@@ -2433,15 +2372,16 @@
}
},
"node_modules/follow-redirects": {
"version": "1.15.3",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz",
"integrity": "sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==",
"version": "1.15.11",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz",
"integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==",
"funding": [
{
"type": "individual",
"url": "https://github.com/sponsors/RubenVerborgh"
}
],
"license": "MIT",
"engines": {
"node": ">=4.0"
},
@@ -2452,12 +2392,15 @@
}
},
"node_modules/form-data": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
"license": "MIT",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"es-set-tostringtag": "^2.1.0",
"hasown": "^2.0.2",
"mime-types": "^2.1.12"
},
"engines": {
@@ -2511,14 +2454,24 @@
}
},
"node_modules/get-intrinsic": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.2.tgz",
"integrity": "sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA==",
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
"integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
"license": "MIT",
"dependencies": {
"call-bind-apply-helpers": "^1.0.2",
"es-define-property": "^1.0.1",
"es-errors": "^1.3.0",
"es-object-atoms": "^1.1.1",
"function-bind": "^1.1.2",
"has-proto": "^1.0.1",
"has-symbols": "^1.0.3",
"hasown": "^2.0.0"
"get-proto": "^1.0.1",
"gopd": "^1.2.0",
"has-symbols": "^1.1.0",
"hasown": "^2.0.2",
"math-intrinsics": "^1.1.0"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
@@ -2533,6 +2486,19 @@
"node": ">=8.0.0"
}
},
"node_modules/get-proto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz",
"integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
"license": "MIT",
"dependencies": {
"dunder-proto": "^1.0.1",
"es-object-atoms": "^1.0.0"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/get-stream": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz",
@@ -2587,11 +2553,12 @@
}
},
"node_modules/gopd": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz",
"integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==",
"dependencies": {
"get-intrinsic": "^1.1.3"
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
"integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
@@ -2642,10 +2609,11 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/has-proto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz",
"integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==",
"node_modules/has-symbols": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
"integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
},
@@ -2653,10 +2621,14 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/has-symbols": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz",
"integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==",
"node_modules/has-tostringtag": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
"integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
"license": "MIT",
"dependencies": {
"has-symbols": "^1.0.3"
},
"engines": {
"node": ">= 0.4"
},
@@ -2665,9 +2637,10 @@
}
},
"node_modules/hasown": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz",
"integrity": "sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==",
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
"integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
"license": "MIT",
"dependencies": {
"function-bind": "^1.1.2"
},
@@ -2826,6 +2799,7 @@
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.12.0"
}
@@ -3523,7 +3497,8 @@
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
"dev": true
"dev": true,
"license": "MIT"
},
"node_modules/js-yaml": {
"version": "3.14.1",
@@ -3780,6 +3755,15 @@
"tmpl": "1.0.5"
}
},
"node_modules/math-intrinsics": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
"integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/merge-stream": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz",
@@ -3787,12 +3771,13 @@
"dev": true
},
"node_modules/micromatch": {
"version": "4.0.5",
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz",
"integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==",
"version": "4.0.8",
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
"integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
"dev": true,
"license": "MIT",
"dependencies": {
"braces": "^3.0.2",
"braces": "^3.0.3",
"picomatch": "^2.3.1"
},
"engines": {
@@ -3803,6 +3788,7 @@
"version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
@@ -3811,6 +3797,7 @@
"version": "2.1.35",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"license": "MIT",
"dependencies": {
"mime-db": "1.52.0"
},
@@ -3875,9 +3862,10 @@
"dev": true
},
"node_modules/nodemailer": {
"version": "6.9.7",
"resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-6.9.7.tgz",
"integrity": "sha512-rUtR77ksqex/eZRLmQ21LKVH5nAAsVicAtAYudK7JgwenEDZ0UIQ1adUGqErz7sMkWYxWTTU1aeP2Jga6WQyJw==",
"version": "6.10.1",
"resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-6.10.1.tgz",
"integrity": "sha512-Z+iLaBGVaSjbIzQ4pX6XV41HrooLsQ10ZWPUehGmuantvzWoDVBnmsdUcOIDM1t+yPor5pDhVlDESgOMEGxhHA==",
"license": "MIT-0",
"engines": {
"node": ">=6.0.0"
}
@@ -4120,10 +4108,11 @@
"dev": true
},
"node_modules/picocolors": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz",
"integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==",
"dev": true
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
"integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
"dev": true,
"license": "ISC"
},
"node_modules/picomatch": {
"version": "2.3.1",
@@ -4627,20 +4616,12 @@
"integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==",
"dev": true
},
"node_modules/to-fast-properties": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz",
"integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==",
"dev": true,
"engines": {
"node": ">=4"
}
},
"node_modules/to-regex-range": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"is-number": "^7.0.0"
},
@@ -4910,9 +4891,10 @@
}
},
"node_modules/xml-crypto": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-3.2.0.tgz",
"integrity": "sha512-qVurBUOQrmvlgmZqIVBqmb06TD2a/PpEUfFPgD7BuBfjmoH4zgkqaWSIJrnymlCvM2GGt9x+XtJFA+ttoAufqg==",
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-3.2.1.tgz",
"integrity": "sha512-0GUNbPtQt+PLMsC5HoZRONX+K6NBJEqpXe/lsvrFj0EqfpGPpVfJKGE7a5jCg8s2+Wkrf/2U1G41kIH+zC9eyQ==",
"license": "MIT",
"dependencies": {
"@xmldom/xmldom": "^0.8.8",
"xpath": "0.0.32"

View File

@@ -18,8 +18,6 @@
"dependencies": {
"@sendgrid/mail": "^8.1.0",
"Common": "file:../Common",
"ejs": "^3.1.9",
"handlebars": "^4.7.8",
"nodemailer": "^6.9.7",

View File

@@ -3,6 +3,8 @@ import Route from "../../../Types/API/Route";
import AnalyticsTableEngine from "../../../Types/AnalyticsDatabase/AnalyticsTableEngine";
import AnalyticsTableColumn from "../../../Types/AnalyticsDatabase/TableColumn";
import TableColumnType from "../../../Types/AnalyticsDatabase/TableColumnType";
import Projection from "../../../Types/AnalyticsDatabase/Projection";
import MaterializedView from "../../../Types/AnalyticsDatabase/MaterializedView";
import {
ColumnAccessControl,
TableAccessControl,
@@ -40,6 +42,8 @@ export default class AnalyticsBaseModel extends CommonModel {
enableWorkflowOn?: EnableWorkflowOn | undefined;
enableRealtimeEventsOn?: EnableRealtimeEventsOn | undefined;
partitionKey: string;
projections?: Array<Projection> | undefined;
materializedViews?: Array<MaterializedView> | undefined;
}) {
super({
tableColumns: data.tableColumns,
@@ -140,6 +144,8 @@ export default class AnalyticsBaseModel extends CommonModel {
this.crudApiPath = data.crudApiPath;
this.enableRealtimeEventsOn = data.enableRealtimeEventsOn;
this.partitionKey = data.partitionKey;
this.projections = data.projections || [];
this.materializedViews = data.materializedViews || [];
}
private _enableWorkflowOn: EnableWorkflowOn | undefined;
@@ -250,6 +256,22 @@ export default class AnalyticsBaseModel extends CommonModel {
this._crudApiPath = v;
}
private _projections: Array<Projection> = [];
public get projections(): Array<Projection> {
return this._projections;
}
public set projections(v: Array<Projection>) {
this._projections = v;
}
private _materializedViews: Array<MaterializedView> = [];
public get materializedViews(): Array<MaterializedView> {
return this._materializedViews;
}
public set materializedViews(v: Array<MaterializedView>) {
this._materializedViews = v;
}
public getTenantColumn(): AnalyticsTableColumn | null {
const column: AnalyticsTableColumn | undefined = this.tableColumns.find(
(column: AnalyticsTableColumn) => {

View File

@@ -9,6 +9,332 @@ import { SpanStatus } from "./Span";
export default class ExceptionInstance extends AnalyticsBaseModel {
public constructor() {
const projectIdColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "projectId",
title: "Project ID",
description: "ID of project",
required: true,
type: TableColumnType.ObjectID,
isTenantId: true,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
});
const serviceIdColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "serviceId",
title: "Service ID",
description: "ID of the Service which created the log",
required: true,
type: TableColumnType.ObjectID,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
});
const timeColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "time",
title: "Time",
description: "When was the log created?",
required: true,
type: TableColumnType.Date,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
});
const timeUnixNanoColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "timeUnixNano",
title: "Time (in Unix Nano)",
description: "When was the log created?",
required: true,
type: TableColumnType.LongNumber,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
});
const exceptionTypeColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "exceptionType",
title: "Exception Type",
description: "Exception Type", // Examples: java.net.ConnectException; OSError; etc.
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
});
const stackTraceColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "stackTrace",
title: "Stack Trace",
description: "Exception Stack Trace", // Examples: Division by zero; Can't convert 'int' object to str implicitly
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
});
const messageColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "message",
title: "Exception Message",
description: "Exception Message", // Examples: Division by zero; Can't convert 'int' object to str implicitly
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
});
const spanStatusCodeColumn: AnalyticsTableColumn = new AnalyticsTableColumn(
{
key: "spanStatusCode",
title: "Span Status Code",
description: "Span Status Code",
required: false,
type: TableColumnType.Number,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
},
);
const escapedColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "escaped",
title: "Exception Escaped",
description: "Exception Escaped", // SHOULD be set to true if the exception event is recorded at a point where it is known that the exception is escaping the scope of the span.
required: false,
type: TableColumnType.Boolean,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
});
const traceIdColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "traceId",
title: "Trace ID",
description: "ID of the trace",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
});
const spanIdColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "spanId",
title: "Span ID",
description: "ID of the span",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
});
const fingerprintColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "fingerprint",
title: "Fingerprint",
description: "Fingerprint of the exception",
required: true,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
});
const spanNameColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "spanName",
title: "Span Name",
description: "Name of the span",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
});
const attributesColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "attributes",
title: "Attributes",
description: "Attributes",
required: true,
defaultValue: {},
type: TableColumnType.JSON,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
});
super({
tableName: "ExceptionItem",
tableEngine: AnalyticsTableEngine.MergeTree,
@@ -45,330 +371,22 @@ export default class ExceptionInstance extends AnalyticsBaseModel {
},
crudApiPath: new Route("/exceptions"),
tableColumns: [
new AnalyticsTableColumn({
key: "projectId",
title: "Project ID",
description: "ID of project",
required: true,
type: TableColumnType.ObjectID,
isTenantId: true,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "serviceId",
title: "Service ID",
description: "ID of the Service which created the log",
required: true,
type: TableColumnType.ObjectID,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "time",
title: "Time",
description: "When was the log created?",
required: true,
type: TableColumnType.Date,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "timeUnixNano",
title: "Time (in Unix Nano)",
description: "When was the log created?",
required: true,
type: TableColumnType.LongNumber,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "exceptionType",
title: "Exception Type",
description: "Exception Type", // Examples: java.net.ConnectException; OSError; etc.
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "stackTrace",
title: "Stack Trace",
description: "Exception Stack Trace", // Examples: Division by zero; Can't convert 'int' object to str implicitly
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "message",
title: "Exception Message",
description: "Exception Message", // Examples: Division by zero; Can't convert 'int' object to str implicitly
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "spanStatusCode",
title: "Span Status Code",
description: "Span Status Code",
required: false,
type: TableColumnType.Number,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "escaped",
title: "Exception Escaped",
description: "Exception Escaped", // SHOULD be set to true if the exception event is recorded at a point where it is known that the exception is escaping the scope of the span.
required: false,
type: TableColumnType.Boolean,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "traceId",
title: "Trace ID",
description: "ID of the trace",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "spanId",
title: "Span ID",
description: "ID of the span",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "fingerprint",
title: "Fingerprint",
description: "Fingerprint of the exception",
required: true,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "spanName",
title: "Span Name",
description: "Name of the span",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "attributes",
title: "Attributes",
description: "Attributes",
required: true,
defaultValue: {},
type: TableColumnType.JSON,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryException,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryException,
],
update: [],
},
}),
projectIdColumn,
serviceIdColumn,
timeColumn,
timeUnixNanoColumn,
exceptionTypeColumn,
stackTraceColumn,
messageColumn,
spanStatusCodeColumn,
escapedColumn,
traceIdColumn,
spanIdColumn,
fingerprintColumn,
spanNameColumn,
attributesColumn,
],
projections: [],
sortKeys: ["projectId", "time", "serviceId", "fingerprint"],
primaryKeys: ["projectId", "time", "serviceId", "fingerprint"],
partitionKey: "sipHash64(projectId) % 16",

View File

@@ -2,7 +2,6 @@ import AnalyticsBaseModel from "./AnalyticsBaseModel/AnalyticsBaseModel";
import Log from "./Log";
import Metric from "./Metric";
import Span from "./Span";
import TelemetryAttribute from "./TelemetryAttribute";
import ExceptionInstance from "./ExceptionInstance";
import MonitorLog from "./MonitorLog";
@@ -10,7 +9,6 @@ const AnalyticsModels: Array<{ new (): AnalyticsBaseModel }> = [
Log,
Span,
Metric,
TelemetryAttribute,
ExceptionInstance,
MonitorLog,
];

View File

@@ -10,6 +10,264 @@ import LogSeverity from "../../Types/Log/LogSeverity";
export default class Log extends AnalyticsBaseModel {
public constructor() {
const projectIdColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "projectId",
title: "Project ID",
description: "ID of project",
required: true,
type: TableColumnType.ObjectID,
isTenantId: true,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const serviceIdColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "serviceId",
title: "Service ID",
description: "ID of the Service which created the log",
required: true,
type: TableColumnType.ObjectID,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const timeColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "time",
title: "Time",
description: "When was the log created?",
required: true,
type: TableColumnType.Date,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const timeUnixNanoColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "timeUnixNano",
title: "Time (in Unix Nano)",
description: "When was the log created?",
required: true,
type: TableColumnType.LongNumber,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const severityTextColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "severityText",
title: "Severity Text",
description: "Log Severity Text",
required: true,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const severityNumberColumn: AnalyticsTableColumn = new AnalyticsTableColumn(
{
key: "severityNumber",
title: "Severity Number",
description: "Log Severity Number",
required: true,
type: TableColumnType.Number,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
},
);
const attributesColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "attributes",
title: "Attributes",
description: "Attributes",
required: true,
defaultValue: {},
type: TableColumnType.JSON,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const attributeKeysColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "attributeKeys",
title: "Attribute Keys",
description: "Attribute keys extracted from attributes",
required: true,
defaultValue: [],
type: TableColumnType.ArrayText,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const traceIdColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "traceId",
title: "Trace ID",
description: "ID of the trace",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const spanIdColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "spanId",
title: "Span ID",
description: "ID of the span",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const bodyColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "body",
title: "Log Body",
description: "Body of the Log",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
super({
tableName: "LogItem",
tableEngine: AnalyticsTableEngine.MergeTree,
@@ -43,238 +301,19 @@ export default class Log extends AnalyticsBaseModel {
pluralName: "Logs",
crudApiPath: new Route("/logs"),
tableColumns: [
new AnalyticsTableColumn({
key: "projectId",
title: "Project ID",
description: "ID of project",
required: true,
type: TableColumnType.ObjectID,
isTenantId: true,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "serviceId",
title: "Service ID",
description: "ID of the Service which created the log",
required: true,
type: TableColumnType.ObjectID,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "time",
title: "Time",
description: "When was the log created?",
required: true,
type: TableColumnType.Date,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "timeUnixNano",
title: "Time (in Unix Nano)",
description: "When was the log created?",
required: true,
type: TableColumnType.LongNumber,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "severityText",
title: "Severity Text",
description: "Log Severity Text",
required: true,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "severityNumber",
title: "Severity Number",
description: "Log Severity Number",
required: true,
type: TableColumnType.Number,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "attributes",
title: "Attributes",
description: "Attributes",
required: true,
defaultValue: {},
type: TableColumnType.JSON,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "traceId",
title: "Trace ID",
description: "ID of the trace",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "spanId",
title: "Span ID",
description: "ID of the span",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "body",
title: "Log Body",
description: "Body of the Log",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
projectIdColumn,
serviceIdColumn,
timeColumn,
timeUnixNanoColumn,
severityTextColumn,
severityNumberColumn,
attributesColumn,
attributeKeysColumn,
traceIdColumn,
spanIdColumn,
bodyColumn,
],
projections: [],
sortKeys: ["projectId", "time", "serviceId"],
primaryKeys: ["projectId", "time", "serviceId"],
partitionKey: "sipHash64(projectId) % 16",
@@ -345,6 +384,14 @@ export default class Log extends AnalyticsBaseModel {
this.setColumnValue("attributes", v);
}
public get attributeKeys(): Array<string> | undefined {
return this.getColumnValue("attributeKeys") as Array<string> | undefined;
}
public set attributeKeys(v: Array<string> | undefined) {
this.setColumnValue("attributeKeys", v);
}
public get traceId(): string | undefined {
return this.getColumnValue("traceId") as string | undefined;
}

View File

@@ -28,6 +28,481 @@ export enum ServiceType {
export default class Metric extends AnalyticsBaseModel {
public constructor() {
const projectIdColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "projectId",
title: "Project ID",
description: "ID of project",
required: true,
type: TableColumnType.ObjectID,
isTenantId: true,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
// this can also be the monitor id or the telemetry service id.
const serviceIdColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "serviceId",
title: "Service ID",
description: "ID of the Service which created the Metric",
required: true,
type: TableColumnType.ObjectID,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
// this can also be the monitor id or the telemetry service id.
const serviceTypeColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "serviceType",
title: "Service Type",
description: "Type of the service that this telemetry belongs to",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
// add name and description
const nameColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "name",
title: "Name",
description: "Name of the Metric",
required: true,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const aggregationTemporalityColumn: AnalyticsTableColumn =
new AnalyticsTableColumn({
key: "aggregationTemporality",
title: "Aggregation Temporality",
description: "Aggregation Temporality of this Metric",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const metricPointTypeColumn: AnalyticsTableColumn =
new AnalyticsTableColumn({
key: "metricPointType",
title: "Metric Point Type",
description: "Metric Point Type of this Metric",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
// this is end time.
const timeColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "time",
title: "Time",
description: "When did the Metric happen?",
required: true,
type: TableColumnType.Date,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const startTimeColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "startTime",
title: "Start Time",
description: "When did the Metric happen?",
required: false,
type: TableColumnType.Date,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
// end time.
const timeUnixNanoColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "timeUnixNano",
title: "Time (in Unix Nano)",
description: "When did the Metric happen?",
required: true,
type: TableColumnType.LongNumber,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const startTimeUnixNanoColumn: AnalyticsTableColumn =
new AnalyticsTableColumn({
key: "startTimeUnixNano",
title: "Start Time (in Unix Nano)",
description: "When did the Metric happen?",
required: false,
type: TableColumnType.LongNumber,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const attributesColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "attributes",
title: "Attributes",
description: "Attributes",
required: true,
type: TableColumnType.JSON,
defaultValue: {},
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const attributeKeysColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "attributeKeys",
title: "Attribute Keys",
description: "Attribute keys extracted from attributes",
required: true,
defaultValue: [],
type: TableColumnType.ArrayText,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const isMonotonicColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "isMonotonic",
title: "Is Monotonic",
description: "Is Monotonic",
required: false,
type: TableColumnType.Boolean,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const countColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "count",
title: "Count",
description: "Count",
required: false,
type: TableColumnType.Number,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const sumColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "sum",
title: "Sum",
description: "Sum",
required: false,
type: TableColumnType.Decimal,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const valueColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "value",
title: "Value",
description: "Value",
required: false,
type: TableColumnType.Decimal,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const minColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "min",
title: "Min",
description: "Min",
required: false,
type: TableColumnType.Decimal,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const maxColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "max",
title: "Max",
description: "Max",
required: false,
type: TableColumnType.Decimal,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const bucketCountsColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "bucketCounts",
title: "Bucket Counts",
description: "Bucket Counts",
required: true,
defaultValue: [],
type: TableColumnType.ArrayNumber,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const explicitBoundsColumn: AnalyticsTableColumn = new AnalyticsTableColumn(
{
key: "explicitBounds",
title: "Explicit Bonds",
description: "Explicit Bonds",
required: true,
defaultValue: [],
type: TableColumnType.ArrayNumber,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
},
);
super({
tableName: "MetricItem",
tableEngine: AnalyticsTableEngine.MergeTree,
@@ -61,453 +536,28 @@ export default class Metric extends AnalyticsBaseModel {
],
},
tableColumns: [
new AnalyticsTableColumn({
key: "projectId",
title: "Project ID",
description: "ID of project",
required: true,
type: TableColumnType.ObjectID,
isTenantId: true,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
// this can also be the monitor id or the telemetry service id.
new AnalyticsTableColumn({
key: "serviceId",
title: "Service ID",
description: "ID of the Service which created the Metric",
required: true,
type: TableColumnType.ObjectID,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
// this can also be the monitor id or the telemetry service id.
new AnalyticsTableColumn({
key: "serviceType",
title: "Service Type",
description: "Type of the service that this telemetry belongs to",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
// add name and description
new AnalyticsTableColumn({
key: "name",
title: "Name",
description: "Name of the Metric",
required: true,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "aggregationTemporality",
title: "Aggregation Temporality",
description: "Aggregation Temporality of this Metric",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "metricPointType",
title: "Metric Point Type",
description: "Metric Point Type of this Metric",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
// this is end time.
new AnalyticsTableColumn({
key: "time",
title: "Time",
description: "When did the Metric happen?",
required: true,
type: TableColumnType.Date,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "startTime",
title: "Start Time",
description: "When did the Metric happen?",
required: false,
type: TableColumnType.Date,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
// end time.
new AnalyticsTableColumn({
key: "timeUnixNano",
title: "Time (in Unix Nano)",
description: "When did the Metric happen?",
required: true,
type: TableColumnType.LongNumber,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "startTimeUnixNano",
title: "Start Time (in Unix Nano)",
description: "When did the Metric happen?",
required: false,
type: TableColumnType.LongNumber,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "attributes",
title: "Attributes",
description: "Attributes",
required: true,
type: TableColumnType.JSON,
defaultValue: {},
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "isMonotonic",
title: "Is Monotonic",
description: "Is Monotonic",
required: false,
type: TableColumnType.Boolean,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "count",
title: "Count",
description: "Count",
required: false,
type: TableColumnType.Number,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "sum",
title: "Sum",
description: "Sum",
required: false,
type: TableColumnType.Decimal,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "value",
title: "Value",
description: "Value",
required: false,
type: TableColumnType.Decimal,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "min",
title: "Min",
description: "Min",
required: false,
type: TableColumnType.Decimal,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "max",
title: "Max",
description: "Max",
required: false,
type: TableColumnType.Decimal,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "bucketCounts",
title: "Bucket Counts",
description: "Bucket Counts",
required: true,
defaultValue: [],
type: TableColumnType.ArrayNumber,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "explicitBounds",
title: "Explicit Bonds",
description: "Explicit Bonds",
required: true,
defaultValue: [],
type: TableColumnType.ArrayNumber,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
projectIdColumn,
serviceIdColumn,
serviceTypeColumn,
nameColumn,
aggregationTemporalityColumn,
metricPointTypeColumn,
timeColumn,
startTimeColumn,
timeUnixNanoColumn,
startTimeUnixNanoColumn,
attributesColumn,
attributeKeysColumn,
isMonotonicColumn,
countColumn,
sumColumn,
valueColumn,
minColumn,
maxColumn,
bucketCountsColumn,
explicitBoundsColumn,
],
projections: [],
sortKeys: ["projectId", "time", "serviceId"],
primaryKeys: ["projectId", "time", "serviceId"],
partitionKey: "sipHash64(projectId) % 16",
@@ -590,6 +640,14 @@ export default class Metric extends AnalyticsBaseModel {
this.setColumnValue("attributes", v);
}
public get attributeKeys(): Array<string> | undefined {
return this.getColumnValue("attributeKeys") as Array<string> | undefined;
}
public set attributeKeys(v: Array<string> | undefined) {
this.setColumnValue("attributeKeys", v);
}
public get startTime(): Date | undefined {
return this.getColumnValue("startTime") as Date | undefined;
}

View File

@@ -9,6 +9,100 @@ import Permission from "../../Types/Permission";
export default class MonitorLog extends AnalyticsBaseModel {
public constructor() {
const projectIdColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "projectId",
title: "Project ID",
description: "ID of project",
required: true,
type: TableColumnType.ObjectID,
isTenantId: true,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectMonitor,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateProjectMonitor,
],
update: [],
},
});
const monitorIdColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "monitorId",
title: "Monitor ID",
description: "ID of the monitor which this logs belongs to",
required: true,
type: TableColumnType.ObjectID,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectMonitor,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateProjectMonitor,
],
update: [],
},
});
const timeColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "time",
title: "Time",
description: "When was the log created?",
required: true,
type: TableColumnType.Date,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectMonitor,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateProjectMonitor,
],
update: [],
},
});
const logBodyColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "logBody",
title: "Log Body",
description: "The body of the log",
required: true,
defaultValue: {},
type: TableColumnType.JSON,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectMonitor,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateProjectMonitor,
],
update: [],
},
});
super({
tableName: "MonitorLog",
tableEngine: AnalyticsTableEngine.MergeTree,
@@ -42,100 +136,12 @@ export default class MonitorLog extends AnalyticsBaseModel {
pluralName: "Monitor Logs",
crudApiPath: new Route("/monitor-log"),
tableColumns: [
new AnalyticsTableColumn({
key: "projectId",
title: "Project ID",
description: "ID of project",
required: true,
type: TableColumnType.ObjectID,
isTenantId: true,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectMonitor,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateProjectMonitor,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "monitorId",
title: "Monitor ID",
description: "ID of the monitor which this logs belongs to",
required: true,
type: TableColumnType.ObjectID,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectMonitor,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateProjectMonitor,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "time",
title: "Time",
description: "When was the log created?",
required: true,
type: TableColumnType.Date,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectMonitor,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateProjectMonitor,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "logBody",
title: "Log Body",
description: "The body of the log",
required: true,
defaultValue: {},
type: TableColumnType.JSON,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectMonitor,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateProjectMonitor,
],
update: [],
},
}),
projectIdColumn,
monitorIdColumn,
timeColumn,
logBodyColumn,
],
projections: [],
sortKeys: ["projectId", "time", "monitorId"],
primaryKeys: ["projectId", "time", "monitorId"],
partitionKey: "sipHash64(projectId) % 16",

View File

@@ -41,6 +41,451 @@ export interface SpanLink {
export default class Span extends AnalyticsBaseModel {
public constructor() {
const projectIdColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "projectId",
title: "Project ID",
description: "ID of project",
required: true,
type: TableColumnType.ObjectID,
isTenantId: true,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
});
const serviceIdColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "serviceId",
title: "Service ID",
description: "ID of the Service which created the log",
required: true,
type: TableColumnType.ObjectID,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
});
const startTimeColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "startTime",
title: "Start Time",
description: "When did the span start?",
required: true,
type: TableColumnType.Date,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
});
const endTimeColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "endTime",
title: "End Time",
description: "When did the span end?",
required: true,
type: TableColumnType.Date,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
});
const startTimeUnixNanoColumn: AnalyticsTableColumn =
new AnalyticsTableColumn({
key: "startTimeUnixNano",
title: "Start Time in Unix Nano",
description: "When did the span start?",
required: true,
type: TableColumnType.LongNumber,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
});
const durationUnixNanoColumn: AnalyticsTableColumn =
new AnalyticsTableColumn({
key: "durationUnixNano",
title: "Duration in Unix Nano",
description: "How long did the span last?",
required: true,
type: TableColumnType.LongNumber,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
});
const endTimeUnixNanoColumn: AnalyticsTableColumn =
new AnalyticsTableColumn({
key: "endTimeUnixNano",
title: "End Time",
description: "When did the span end?",
required: true,
type: TableColumnType.LongNumber,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
});
const traceIdColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "traceId",
title: "Trace ID",
description: "ID of the trace",
required: true,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
});
const spanIdColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "spanId",
title: "Span ID",
description: "ID of the span",
required: true,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
});
const parentSpanIdColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "parentSpanId",
title: "Parent Span ID",
description: "ID of the parent span",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
});
const traceStateColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "traceState",
title: "Trace State",
description: "Trace State",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
});
const attributesColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "attributes",
title: "Attributes",
description: "Attributes",
required: true,
defaultValue: {},
type: TableColumnType.JSON,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
});
const attributeKeysColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "attributeKeys",
title: "Attribute Keys",
description: "Attribute keys extracted from attributes",
required: true,
defaultValue: [],
type: TableColumnType.ArrayText,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
});
const eventsColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "events",
title: "Events",
description: "Span Events",
required: true,
defaultValue: [],
type: TableColumnType.JSONArray,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
});
const linksColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "links",
title: "Links",
description: "Span Links",
required: true,
defaultValue: {},
type: TableColumnType.JSON,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
});
const statusCodeColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "statusCode",
title: "Status Code",
description: "Status Code",
required: false,
type: TableColumnType.Number,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
});
const statusMessageColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "statusMessage",
title: "Status Message",
description: "Status Message",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
});
const nameColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "name",
title: "Name",
description: "Name of the span",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
});
const kindColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "kind",
title: "Kind",
description: "Kind of the span",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
});
super({
tableName: "SpanItem",
tableEngine: AnalyticsTableEngine.MergeTree,
@@ -74,424 +519,27 @@ export default class Span extends AnalyticsBaseModel {
],
},
tableColumns: [
new AnalyticsTableColumn({
key: "projectId",
title: "Project ID",
description: "ID of project",
required: true,
type: TableColumnType.ObjectID,
isTenantId: true,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "serviceId",
title: "Service ID",
description: "ID of the Service which created the log",
required: true,
type: TableColumnType.ObjectID,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "startTime",
title: "Start Time",
description: "When did the span start?",
required: true,
type: TableColumnType.Date,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "endTime",
title: "End Time",
description: "When did the span end?",
required: true,
type: TableColumnType.Date,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "startTimeUnixNano",
title: "Start Time in Unix Nano",
description: "When did the span start?",
required: true,
type: TableColumnType.LongNumber,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "durationUnixNano",
title: "Duration in Unix Nano",
description: "How long did the span last?",
required: true,
type: TableColumnType.LongNumber,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "endTimeUnixNano",
title: "End Time",
description: "When did the span end?",
required: true,
type: TableColumnType.LongNumber,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "traceId",
title: "Trace ID",
description: "ID of the trace",
required: true,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "spanId",
title: "Span ID",
description: "ID of the span",
required: true,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "parentSpanId",
title: "Parent Span ID",
description: "ID of the parent span",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "traceState",
title: "Trace State",
description: "Trace State",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "attributes",
title: "Attributes",
description: "Attributes",
required: true,
defaultValue: {},
type: TableColumnType.JSON,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "events",
title: "Events",
description: "Span Events",
required: true,
defaultValue: [],
type: TableColumnType.JSONArray,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "links",
title: "Links",
description: "Span Links",
required: true,
defaultValue: {},
type: TableColumnType.JSON,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "statusCode",
title: "Status Code",
description: "Status Code",
required: false,
type: TableColumnType.Number,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "statusMessage",
title: "Status Message",
description: "Status Message",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "name",
title: "Name",
description: "Name of the span",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "kind",
title: "Kind",
description: "Kind of the span",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
],
update: [],
},
}),
projectIdColumn,
serviceIdColumn,
startTimeColumn,
endTimeColumn,
startTimeUnixNanoColumn,
durationUnixNanoColumn,
endTimeUnixNanoColumn,
traceIdColumn,
spanIdColumn,
parentSpanIdColumn,
traceStateColumn,
attributesColumn,
attributeKeysColumn,
eventsColumn,
linksColumn,
statusCodeColumn,
statusMessageColumn,
nameColumn,
kindColumn,
],
projections: [],
sortKeys: ["projectId", "startTime", "serviceId", "traceId"],
primaryKeys: ["projectId", "startTime", "serviceId", "traceId"],
partitionKey: "sipHash64(projectId) % 16",
@@ -610,6 +658,14 @@ export default class Span extends AnalyticsBaseModel {
this.setColumnValue("attributes", v);
}
public get attributeKeys(): Array<string> | undefined {
return this.getColumnValue("attributeKeys") as Array<string> | undefined;
}
public set attributeKeys(v: Array<string> | undefined) {
this.setColumnValue("attributeKeys", v);
}
public get events(): Array<SpanEvent> | undefined {
return this.getColumnValue("events") as Array<SpanEvent> | undefined;
}

View File

@@ -1,164 +0,0 @@
import AnalyticsBaseModel from "./AnalyticsBaseModel/AnalyticsBaseModel";
import Route from "../../Types/API/Route";
import AnalyticsTableEngine from "../../Types/AnalyticsDatabase/AnalyticsTableEngine";
import AnalyticsTableColumn from "../../Types/AnalyticsDatabase/TableColumn";
import TableColumnType from "../../Types/AnalyticsDatabase/TableColumnType";
import TelemetryType from "../../Types/Telemetry/TelemetryType";
import ObjectID from "../../Types/ObjectID";
import Permission from "../../Types/Permission";
export default class TelemetryAttribute extends AnalyticsBaseModel {
public constructor() {
super({
tableName: "TelemetryAttribute",
tableEngine: AnalyticsTableEngine.MergeTree,
singularName: "Telemetry Attribute",
pluralName: "Telemetry Attributes",
crudApiPath: new Route("/telemetry-attributes"),
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
Permission.ReadTelemetryServiceLog,
Permission.ReadTelemetryServiceMetrics,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceTraces,
Permission.CreateTelemetryServiceLog,
Permission.CreateTelemetryServiceMetrics,
],
update: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditTelemetryServiceTraces,
Permission.EditTelemetryServiceLog,
Permission.EditTelemetryServiceMetrics,
],
delete: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.DeleteTelemetryServiceTraces,
Permission.DeleteTelemetryServiceLog,
Permission.DeleteTelemetryServiceMetrics,
],
},
tableColumns: [
new AnalyticsTableColumn({
key: "projectId",
title: "Project ID",
description: "ID of project",
required: true,
type: TableColumnType.ObjectID,
isTenantId: true,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
Permission.ReadTelemetryServiceLog,
Permission.ReadTelemetryServiceMetrics,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditTelemetryServiceTraces,
Permission.EditTelemetryServiceLog,
Permission.EditTelemetryServiceMetrics,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "telemetryType",
title: "Telemetry Type",
description: "Type of telemetry",
required: true,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
Permission.ReadTelemetryServiceLog,
Permission.ReadTelemetryServiceMetrics,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditTelemetryServiceTraces,
Permission.EditTelemetryServiceLog,
Permission.EditTelemetryServiceMetrics,
],
update: [],
},
}),
new AnalyticsTableColumn({
key: "attributes",
title: "Attributes",
description: "Attributes",
required: true,
type: TableColumnType.JSONArray,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceTraces,
Permission.ReadTelemetryServiceLog,
Permission.ReadTelemetryServiceMetrics,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditTelemetryServiceTraces,
Permission.EditTelemetryServiceLog,
Permission.EditTelemetryServiceMetrics,
],
update: [],
},
}),
],
sortKeys: ["projectId", "telemetryType"],
primaryKeys: ["projectId", "telemetryType"],
partitionKey: "sipHash64(projectId) % 16",
});
}
public get projectId(): ObjectID | undefined {
return this.getColumnValue("projectId") as ObjectID | undefined;
}
public set projectId(v: ObjectID | undefined) {
this.setColumnValue("projectId", v);
}
public get telemetryType(): TelemetryType | undefined {
return this.getColumnValue("telemetryType") as TelemetryType | undefined;
}
public set telemetryType(v: TelemetryType | undefined) {
this.setColumnValue("telemetryType", v);
}
public get attributes(): Array<string> | undefined {
return this.getColumnValue("attributes") as Array<string> | undefined;
}
public set attributes(v: Array<string> | undefined) {
this.setColumnValue("attributes", v);
}
}

View File

@@ -4,11 +4,13 @@ import ColumnAccessControl from "../../Types/Database/AccessControl/ColumnAccess
import TableAccessControl from "../../Types/Database/AccessControl/TableAccessControl";
import ColumnLength from "../../Types/Database/ColumnLength";
import ColumnType from "../../Types/Database/ColumnType";
import CrudApiEndpoint from "../../Types/Database/CrudApiEndpoint";
import TableColumn from "../../Types/Database/TableColumn";
import TableColumnType from "../../Types/Database/TableColumnType";
import TableMetadata from "../../Types/Database/TableMetadata";
import IconProp from "../../Types/Icon/IconProp";
import ObjectID from "../../Types/ObjectID";
import Route from "../../Types/API/Route";
import { Column, Entity, Index, JoinColumn, ManyToOne } from "typeorm";
@TableAccessControl({
@@ -24,6 +26,7 @@ import { Column, Entity, Index, JoinColumn, ManyToOne } from "typeorm";
icon: IconProp.Lock,
tableDescription: "HTTP Challege for Lets Encrypt Certificates",
})
@CrudApiEndpoint(new Route("/acme-challenge"))
@Entity({
name: "AcmeChallenge",
})

View File

@@ -301,7 +301,12 @@ export default class Domain extends BaseModel {
public deletedByUserId?: ObjectID = undefined;
@ColumnAccessControl({
create: [],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateProjectDomain,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,

View File

@@ -0,0 +1,104 @@
import BaseModel from "./DatabaseBaseModel/DatabaseBaseModel";
import Route from "../../Types/API/Route";
import ColumnAccessControl from "../../Types/Database/AccessControl/ColumnAccessControl";
import TableAccessControl from "../../Types/Database/AccessControl/TableAccessControl";
import ColumnLength from "../../Types/Database/ColumnLength";
import ColumnType from "../../Types/Database/ColumnType";
import CrudApiEndpoint from "../../Types/Database/CrudApiEndpoint";
import TableColumn from "../../Types/Database/TableColumn";
import TableColumnType from "../../Types/Database/TableColumnType";
import TableMetadata from "../../Types/Database/TableMetadata";
import IconProp from "../../Types/Icon/IconProp";
import { Column, Entity, Index } from "typeorm";
@TableAccessControl({
create: [],
read: [],
update: [],
delete: [],
})
@CrudApiEndpoint(new Route("/enterprise-license"))
@TableMetadata({
tableName: "EnterpriseLicense",
singularName: "Enterprise License",
pluralName: "Enterprise Licenses",
icon: IconProp.Lock,
tableDescription: "Enterprise license keys issued by OneUptime.",
})
@Entity({
name: "EnterpriseLicense",
})
export default class EnterpriseLicense extends BaseModel {
@ColumnAccessControl({
create: [],
read: [],
update: [],
})
@TableColumn({
required: true,
type: TableColumnType.ShortText,
title: "Company Name",
description: "Company name associated with this license.",
})
@Column({
nullable: false,
type: ColumnType.ShortText,
length: ColumnLength.ShortText,
})
public companyName?: string = undefined;
@ColumnAccessControl({
create: [],
read: [],
update: [],
})
@TableColumn({
required: true,
type: TableColumnType.ShortText,
title: "License Key",
description: "Enterprise license key.",
unique: true,
})
@Index({ unique: true })
@Column({
nullable: false,
type: ColumnType.ShortText,
length: ColumnLength.ShortText,
unique: true,
})
public licenseKey?: string = undefined;
@ColumnAccessControl({
create: [],
read: [],
update: [],
})
@TableColumn({
required: true,
type: TableColumnType.Date,
title: "Expires At",
description: "Expiration date of this license.",
})
@Column({
nullable: false,
type: ColumnType.Date,
})
public expiresAt?: Date = undefined;
@ColumnAccessControl({
create: [],
read: [],
update: [],
})
@TableColumn({
required: false,
type: TableColumnType.Number,
title: "Annual Contract Value",
description: "Annual contract value (in USD) for this license.",
})
@Column({
nullable: true,
type: ColumnType.Number,
})
public annualContractValue?: number = undefined;
}

View File

@@ -352,6 +352,25 @@ export default class GlobalConfig extends GlobalConfigModel {
})
public metaWhatsAppAppSecret?: string = undefined;
@ColumnAccessControl({
create: [],
read: [],
update: [],
})
@TableColumn({
type: TableColumnType.ShortText,
title: "Meta WhatsApp Webhook Verify Token",
description:
"Verify token configured in Meta to validate webhook subscriptions.",
})
@Column({
type: ColumnType.ShortText,
length: ColumnLength.ShortText,
nullable: true,
unique: true,
})
public metaWhatsAppWebhookVerifyToken?: string = undefined;
@ColumnAccessControl({
create: [],
read: [],
@@ -478,4 +497,75 @@ export default class GlobalConfig extends GlobalConfigModel {
transformer: Email.getDatabaseTransformer(),
})
public adminNotificationEmail?: Email = undefined;
@ColumnAccessControl({
create: [],
read: [],
update: [],
})
@TableColumn({
type: TableColumnType.ShortText,
title: "Enterprise Company Name",
description:
"Company name associated with the validated enterprise license.",
})
@Column({
type: ColumnType.ShortText,
length: ColumnLength.ShortText,
nullable: true,
unique: true,
})
public enterpriseCompanyName?: string = undefined;
@ColumnAccessControl({
create: [],
read: [],
update: [],
})
@TableColumn({
type: TableColumnType.ShortText,
title: "Enterprise License Key",
description: "Enterprise license key stored after successful validation.",
})
@Column({
type: ColumnType.ShortText,
length: ColumnLength.ShortText,
nullable: true,
unique: true,
})
public enterpriseLicenseKey?: string = undefined;
@ColumnAccessControl({
create: [],
read: [],
update: [],
})
@TableColumn({
type: TableColumnType.Date,
title: "Enterprise License Expires At",
description: "Expiration date of the validated enterprise license.",
})
@Column({
type: ColumnType.Date,
nullable: true,
unique: true,
})
public enterpriseLicenseExpiresAt?: Date = undefined;
@ColumnAccessControl({
create: [],
read: [],
update: [],
})
@TableColumn({
type: TableColumnType.VeryLongText,
title: "Enterprise License Token",
description: "Signed JWT returned from license validation.",
})
@Column({
type: ColumnType.VeryLongText,
nullable: true,
unique: true,
})
public enterpriseLicenseToken?: string = undefined;
}

View File

@@ -926,6 +926,39 @@ export default class Incident extends BaseModel {
})
public rootCause?: string = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateProjectIncident,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectIncident,
],
update: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditProjectIncident,
],
})
@TableColumn({
type: TableColumnType.Markdown,
required: false,
isDefaultValueColumn: false,
title: "Postmortem Note",
description: "Document the postmortem summary for this incident.",
})
@Column({
type: ColumnType.Markdown,
nullable: true,
})
public postmortemNote?: string = undefined;
@ColumnAccessControl({
create: [],
read: [

View File

@@ -33,6 +33,7 @@ export enum IncidentFeedEventType {
IncidentUpdated = "IncidentUpdated",
RootCause = "RootCause",
RemediationNotes = "RemediationNotes",
PostmortemNote = "PostmortemNote",
OwnerUserRemoved = "OwnerUserRemoved",
OwnerTeamRemoved = "OwnerTeamRemoved",
OnCallPolicy = "OnCallPolicy",

View File

@@ -0,0 +1,353 @@
import Project from "./Project";
import User from "./User";
import BaseModel from "./DatabaseBaseModel/DatabaseBaseModel";
import Route from "../../Types/API/Route";
import ColumnAccessControl from "../../Types/Database/AccessControl/ColumnAccessControl";
import TableAccessControl from "../../Types/Database/AccessControl/TableAccessControl";
import ColumnLength from "../../Types/Database/ColumnLength";
import ColumnType from "../../Types/Database/ColumnType";
import CrudApiEndpoint from "../../Types/Database/CrudApiEndpoint";
import EnableDocumentation from "../../Types/Database/EnableDocumentation";
import EnableWorkflow from "../../Types/Database/EnableWorkflow";
import TableColumn from "../../Types/Database/TableColumn";
import TableColumnType from "../../Types/Database/TableColumnType";
import TableMetadata from "../../Types/Database/TableMetadata";
import TenantColumn from "../../Types/Database/TenantColumn";
import IconProp from "../../Types/Icon/IconProp";
import ObjectID from "../../Types/ObjectID";
import Permission from "../../Types/Permission";
import { Column, Entity, Index, JoinColumn, ManyToOne } from "typeorm";
import TableBillingAccessControl from "../../Types/Database/AccessControl/TableBillingAccessControl";
import { PlanType } from "../../Types/Billing/SubscriptionPlan";
@TableBillingAccessControl({
create: PlanType.Growth,
read: PlanType.Growth,
update: PlanType.Growth,
delete: PlanType.Growth,
})
@EnableDocumentation()
@TenantColumn("projectId")
@TableAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateIncidentNoteTemplate,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadIncidentNoteTemplate,
],
delete: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.DeleteIncidentNoteTemplate,
],
update: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditIncidentNoteTemplate,
],
})
@CrudApiEndpoint(new Route("/incident-postmortem-template"))
@Entity({
name: "IncidentPostmortemTemplate",
})
@EnableWorkflow({
create: true,
delete: true,
update: true,
read: true,
})
@TableMetadata({
tableName: "IncidentPostmortemTemplate",
singularName: "Incident Postmortem Template",
pluralName: "Incident Postmortem Templates",
icon: IconProp.Book,
tableDescription: "Manage postmortem templates for your incidents",
})
export default class IncidentPostmortemTemplate extends BaseModel {
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateIncidentNoteTemplate,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadIncidentNoteTemplate,
],
update: [],
})
@TableColumn({
manyToOneRelationColumn: "projectId",
type: TableColumnType.Entity,
modelType: Project,
title: "Project",
description: "Relation to Project Resource in which this object belongs",
})
@ManyToOne(
() => {
return Project;
},
{
eager: false,
nullable: true,
onDelete: "CASCADE",
orphanedRowAction: "nullify",
},
)
@JoinColumn({ name: "projectId" })
public project?: Project = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateIncidentNoteTemplate,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadIncidentNoteTemplate,
],
update: [],
})
@Index()
@TableColumn({
type: TableColumnType.ObjectID,
required: true,
canReadOnRelationQuery: true,
title: "Project ID",
description: "ID of your OneUptime Project in which this object belongs",
})
@Column({
type: ColumnType.ObjectID,
nullable: false,
transformer: ObjectID.getDatabaseTransformer(),
})
public projectId?: ObjectID = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateIncidentNoteTemplate,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadIncidentNoteTemplate,
],
update: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditIncidentNoteTemplate,
],
})
@Index()
@TableColumn({
type: TableColumnType.Markdown,
title: "Postmortem Note",
description:
"Markdown template used when documenting an incident postmortem.",
})
@Column({
type: ColumnType.Markdown,
nullable: false,
unique: false,
})
public postmortemNote?: string = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateIncidentNoteTemplate,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadIncidentNoteTemplate,
],
update: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditIncidentNoteTemplate,
],
})
@TableColumn({
required: true,
type: TableColumnType.ShortText,
canReadOnRelationQuery: true,
title: "Name",
description: "Name of the Postmortem Template",
})
@Column({
nullable: false,
type: ColumnType.ShortText,
length: ColumnLength.ShortText,
})
public templateName?: string = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateIncidentNoteTemplate,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadIncidentNoteTemplate,
],
update: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditIncidentNoteTemplate,
],
})
@TableColumn({
required: true,
type: TableColumnType.LongText,
canReadOnRelationQuery: true,
title: "Template Description",
description: "Description of the Postmortem Template",
})
@Column({
nullable: false,
type: ColumnType.LongText,
length: ColumnLength.LongText,
})
public templateDescription?: string = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateIncidentNoteTemplate,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadIncidentNoteTemplate,
],
update: [],
})
@TableColumn({
manyToOneRelationColumn: "createdByUserId",
type: TableColumnType.Entity,
modelType: User,
title: "Created by User",
description:
"Relation to User who created this object (if this object was created by a User)",
})
@ManyToOne(
() => {
return User;
},
{
eager: false,
nullable: true,
onDelete: "SET NULL",
orphanedRowAction: "nullify",
},
)
@JoinColumn({ name: "createdByUserId" })
public createdByUser?: User = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateIncidentNoteTemplate,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadIncidentNoteTemplate,
],
update: [],
})
@TableColumn({
type: TableColumnType.ObjectID,
title: "Created by User ID",
description:
"User ID who created this object (if this object was created by a User)",
})
@Column({
type: ColumnType.ObjectID,
nullable: true,
transformer: ObjectID.getDatabaseTransformer(),
})
public createdByUserId?: ObjectID = undefined;
@ColumnAccessControl({
create: [],
read: [],
update: [],
})
@TableColumn({
manyToOneRelationColumn: "deletedByUserId",
type: TableColumnType.Entity,
title: "Deleted by User",
modelType: User,
description:
"Relation to User who deleted this object (if this object was deleted by a User)",
})
@ManyToOne(
() => {
return User;
},
{
cascade: false,
eager: false,
nullable: true,
onDelete: "SET NULL",
orphanedRowAction: "nullify",
},
)
@JoinColumn({ name: "deletedByUserId" })
public deletedByUser?: User = undefined;
@ColumnAccessControl({
create: [],
read: [],
update: [],
})
@TableColumn({
type: TableColumnType.ObjectID,
title: "Deleted by User ID",
description:
"User ID who deleted this object (if this object was deleted by a User)",
})
@Column({
type: ColumnType.ObjectID,
nullable: true,
transformer: ObjectID.getDatabaseTransformer(),
})
public deletedByUserId?: ObjectID = undefined;
}

View File

@@ -24,6 +24,7 @@ import IncidentFeed from "./IncidentFeed";
import IncidentCustomField from "./IncidentCustomField";
import IncidentInternalNote from "./IncidentInternalNote";
import IncidentNoteTemplate from "./IncidentNoteTemplate";
import IncidentPostmortemTemplate from "./IncidentPostmortemTemplate";
import IncidentOwnerTeam from "./IncidentOwnerTeam";
import IncidentOwnerUser from "./IncidentOwnerUser";
import IncidentPublicNote from "./IncidentPublicNote";
@@ -80,6 +81,7 @@ import ProjectSmtpConfig from "./ProjectSmtpConfig";
//SSO
import ProjectSSO from "./ProjectSso";
import PromoCode from "./PromoCode";
import EnterpriseLicense from "./EnterpriseLicense";
import Reseller from "./Reseller";
import ResellerPlan from "./ResellerPlan";
// ScheduledMaintenances
@@ -235,6 +237,7 @@ const AllModelTypes: Array<{
IncidentOwnerUser,
IncidentSeverity,
IncidentNoteTemplate,
IncidentPostmortemTemplate,
AlertState,
Alert,
@@ -329,6 +332,7 @@ const AllModelTypes: Array<{
ResellerPlan,
PromoCode,
EnterpriseLicense,
GlobalConfig,

View File

@@ -134,7 +134,7 @@ export default class Project extends TenantModel {
Permission.UnAuthorizedSsoUser,
Permission.ProjectUser,
],
update: [Permission.ProjectOwner],
update: [Permission.ProjectOwner, Permission.ManageProjectBilling],
})
@TableColumn({ type: TableColumnType.ShortText })
@Column({

View File

@@ -2331,4 +2331,84 @@ export default class StatusPage extends BaseModel {
create: PlanType.Free,
})
public ipWhitelist?: string = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateProjectStatusPage,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectStatusPage,
],
update: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditProjectStatusPage,
],
})
@TableColumn({
isDefaultValueColumn: true,
type: TableColumnType.Boolean,
title: "Enable Embedded Overall Status Badge",
description:
"Enable embedded overall status badge that can be displayed on external websites?",
defaultValue: false,
})
@Column({
type: ColumnType.Boolean,
default: false,
nullable: false,
})
@ColumnBillingAccessControl({
read: PlanType.Free,
update: PlanType.Growth,
create: PlanType.Free,
})
public enableEmbeddedOverallStatus?: boolean = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateProjectStatusPage,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectStatusPage,
],
update: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditProjectStatusPage,
],
})
@Index()
@TableColumn({
type: TableColumnType.ShortText,
required: false,
title: "Embedded Overall Status Token",
description:
"Security token required to access the embedded overall status badge. This token must be provided in the URL.",
})
@Column({
type: ColumnType.ShortText,
length: ColumnLength.ShortText,
nullable: true,
})
@ColumnBillingAccessControl({
read: PlanType.Free,
update: PlanType.Growth,
create: PlanType.Free,
})
public embeddedOverallStatusToken?: string = undefined;
}

View File

@@ -290,6 +290,21 @@ export default class StatusPagePrivateUser extends BaseModel {
nullable: true,
unique: false,
})
public jwtRefreshToken?: string = undefined;
@ColumnAccessControl({
create: [],
read: [],
update: [],
})
@TableColumn({ type: TableColumnType.ShortText })
@Column({
type: ColumnType.ShortText,
length: ColumnLength.ShortText,
nullable: true,
unique: false,
})
public resetPasswordToken?: string = undefined;
@ColumnAccessControl({

View File

@@ -8,7 +8,6 @@ import ColumnLength from "../../Types/Database/ColumnLength";
import ColumnType from "../../Types/Database/ColumnType";
import CrudApiEndpoint from "../../Types/Database/CrudApiEndpoint";
import EnableDocumentation from "../../Types/Database/EnableDocumentation";
import EnableWorkflow from "../../Types/Database/EnableWorkflow";
import TableColumn from "../../Types/Database/TableColumn";
import TableColumnType from "../../Types/Database/TableColumnType";
import TableMetadata from "../../Types/Database/TableMetadata";
@@ -45,12 +44,6 @@ import TelemetryService from "./TelemetryService";
Permission.EditTelemetryException,
],
})
@EnableWorkflow({
create: true,
delete: true,
update: true,
read: true,
})
@CrudApiEndpoint(new Route("/telemetry-exception-status"))
@TableMetadata({
tableName: "TelemetryException",

View File

@@ -39,7 +39,7 @@ export const DEFAULT_RETENTION_IN_DAYS: number = 15;
pluralName: "Telemetry Usage Billings",
icon: IconProp.Billing,
tableDescription:
"Stores historical usage billing data for your telemetry data like Logs, Metrics, and Traces.",
"Stores historical usage billing data for your telemetry data like Logs, Metrics, Traces, and Exceptions.",
})
@Entity({
name: "TelemetryUsageBilling",

View File

@@ -37,7 +37,7 @@ import { Column, Entity, Index, JoinColumn, ManyToOne } from "typeorm";
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
delete: [],
update: [],
@@ -66,7 +66,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -97,7 +97,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -122,7 +122,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -148,7 +148,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -175,7 +175,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -198,7 +198,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -222,7 +222,32 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@Index()
@TableColumn({
required: false,
type: TableColumnType.ShortText,
title: "WhatsApp Message ID",
description: "Message ID returned by Meta's API",
canReadOnRelationQuery: false,
})
@Column({
nullable: true,
type: ColumnType.ShortText,
length: ColumnLength.ShortText,
})
public whatsAppMessageId?: string = undefined;
@ColumnAccessControl({
create: [],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -246,7 +271,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -273,7 +298,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -304,7 +329,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -329,7 +354,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -360,7 +385,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -385,7 +410,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -416,7 +441,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -441,7 +466,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -472,7 +497,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -498,7 +523,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -529,7 +554,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -554,7 +579,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -586,7 +611,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -612,7 +637,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -643,7 +668,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -668,7 +693,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -699,7 +724,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -725,7 +750,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -758,7 +783,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -784,7 +809,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})
@@ -816,7 +841,7 @@ export default class WhatsAppLog extends BaseModel {
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadSmsLog,
Permission.ReadWhatsAppLog,
],
update: [],
})

View File

@@ -0,0 +1,64 @@
import AcmeChallenge from "../../Models/DatabaseModels/AcmeChallenge";
import NotFoundException from "../../Types/Exception/NotFoundException";
import AcmeChallengeService, {
Service as AcmeChallengeServiceType,
} from "../Services/AcmeChallengeService";
import Express, {
ExpressRequest,
ExpressResponse,
ExpressRouter,
NextFunction,
} from "../Utils/Express";
import Response from "../Utils/Response";
import BaseAPI from "./BaseAPI";
export default class AcmeChallengeAPI extends BaseAPI<
AcmeChallenge,
AcmeChallengeServiceType
> {
private wellKnownRouter: ExpressRouter;
public constructor() {
super(AcmeChallenge, AcmeChallengeService);
this.wellKnownRouter = Express.getRouter();
this.wellKnownRouter.get(
"/acme-challenge/.well-known/:token",
async (req: ExpressRequest, res: ExpressResponse, next: NextFunction) => {
try {
const challenge: AcmeChallenge | null =
await AcmeChallengeService.findOneBy({
query: {
token: req.params["token"] as string,
},
select: {
challenge: true,
},
props: {
isRoot: true,
},
});
if (!challenge) {
return next(new NotFoundException("Challenge not found"));
}
return Response.sendTextResponse(
req,
res,
challenge.challenge as string,
);
} catch (err) {
return next(err);
}
},
);
this.router.use("/", this.wellKnownRouter);
}
public getWellKnownRouter(): ExpressRouter {
return this.wellKnownRouter;
}
}

View File

@@ -0,0 +1,102 @@
import EnterpriseLicense from "../../Models/DatabaseModels/EnterpriseLicense";
import BadDataException from "../../Types/Exception/BadDataException";
import { JSONObject } from "../../Types/JSON";
import EnterpriseLicenseService, {
Service as EnterpriseLicenseServiceType,
} from "../Services/EnterpriseLicenseService";
import UserMiddleware from "../Middleware/UserAuthorization";
import JSONWebToken from "../Utils/JsonWebToken";
import Response from "../Utils/Response";
import {
ExpressRequest,
ExpressResponse,
NextFunction,
} from "../Utils/Express";
import BaseAPI from "./BaseAPI";
// import { Host } from "../EnvironmentConfig";
export default class EnterpriseLicenseAPI extends BaseAPI<
EnterpriseLicense,
EnterpriseLicenseServiceType
> {
public constructor() {
super(EnterpriseLicense, EnterpriseLicenseService);
this.router.post(
`${new this.entityType().getCrudApiPath()?.toString()}/validate`,
UserMiddleware.getUserMiddleware,
async (req: ExpressRequest, res: ExpressResponse, next: NextFunction) => {
try {
const licenseKey: string | undefined = req.body["licenseKey"];
if (!licenseKey) {
throw new BadDataException("License key is required");
}
//const serverHost: string = Host.toString();
/*
* if (!serverHost.includes("oneuptime.com")) {
* throw new BadDataException(
* "Enterprise license validation is only available on oneuptime.com",
* );
* }
*/
const license: EnterpriseLicense | null =
await EnterpriseLicenseService.findOneBy({
query: {
licenseKey: licenseKey,
},
select: {
companyName: true,
expiresAt: true,
licenseKey: true,
},
props: {
isRoot: true,
},
});
if (!license) {
throw new BadDataException("License key is invalid");
}
if (!license.expiresAt) {
throw new BadDataException("License expiration is not set");
}
const now: number = Date.now();
const expiresAtMs: number = license.expiresAt.getTime();
const secondsUntilExpiry: number = Math.floor(
(expiresAtMs - now) / 1000,
);
if (secondsUntilExpiry <= 0) {
throw new BadDataException("License key has expired");
}
const payload: JSONObject = {
companyName: license.companyName || "",
expiresAt: license.expiresAt.toISOString(),
licenseKey: license.licenseKey || "",
};
const token: string = JSONWebToken.signJsonPayload(
payload,
Math.max(secondsUntilExpiry, 1),
);
return Response.sendJsonObjectResponse(req, res, {
companyName: payload["companyName"] as string,
expiresAt: payload["expiresAt"] as string,
licenseKey: payload["licenseKey"] as string,
token,
});
} catch (err) {
next(err);
}
},
);
}
}

View File

@@ -9,6 +9,15 @@ import {
import Response from "../Utils/Response";
import BaseAPI from "./BaseAPI";
import GlobalConfig from "../../Models/DatabaseModels/GlobalConfig";
import ObjectID from "../../Types/ObjectID";
import { JSONObject } from "../../Types/JSON";
import BadDataException from "../../Types/Exception/BadDataException";
import API from "../../Utils/API";
import HTTPErrorResponse from "../../Types/API/HTTPErrorResponse";
import HTTPResponse from "../../Types/API/HTTPResponse";
import PartialEntity from "../../Types/Database/PartialEntity";
import { EnterpriseLicenseValidationUrl } from "../EnvironmentConfig";
import UserMiddleware from "../Middleware/UserAuthorization";
export default class GlobalConfigAPI extends BaseAPI<
GlobalConfig,
@@ -45,5 +54,164 @@ export default class GlobalConfigAPI extends BaseAPI<
}
},
);
this.router.get(
`${new this.entityType().getCrudApiPath()?.toString()}/license`,
async (req: ExpressRequest, res: ExpressResponse, next: NextFunction) => {
try {
const config: GlobalConfig | null =
await GlobalConfigService.findOneById({
id: ObjectID.getZeroObjectID(),
select: {
enterpriseCompanyName: true,
enterpriseLicenseExpiresAt: true,
enterpriseLicenseKey: true,
enterpriseLicenseToken: true,
},
props: {
isRoot: true,
},
});
const responseBody: JSONObject = {
companyName: config?.enterpriseCompanyName || null,
expiresAt: config?.enterpriseLicenseExpiresAt
? config.enterpriseLicenseExpiresAt.toISOString()
: null,
licenseKey: config?.enterpriseLicenseKey || null,
token: config?.enterpriseLicenseToken || null,
};
return Response.sendJsonObjectResponse(req, res, responseBody);
} catch (err) {
next(err);
}
},
);
this.router.post(
`${new this.entityType().getCrudApiPath()?.toString()}/license`,
UserMiddleware.getUserMiddleware,
async (req: ExpressRequest, res: ExpressResponse, next: NextFunction) => {
try {
const licenseKey: string =
(req.body["licenseKey"] as string | undefined)?.trim() || "";
if (!licenseKey) {
throw new BadDataException("License key is required");
}
const validationResponse:
| HTTPResponse<JSONObject>
| HTTPErrorResponse = await API.post<JSONObject>({
url: EnterpriseLicenseValidationUrl,
data: {
licenseKey,
},
});
if (!validationResponse.isSuccess()) {
const errorMessage: string =
validationResponse instanceof HTTPErrorResponse
? validationResponse.message ||
"Failed to validate license key."
: "Failed to validate license key.";
throw new BadDataException(errorMessage);
}
const payload: JSONObject = validationResponse.data as JSONObject;
const companyNameRaw: string =
(payload["companyName"] as string | undefined)?.trim() || "";
const expiresAtRaw: string =
(payload["expiresAt"] as string | undefined) || "";
const licenseKeyRaw: string =
(payload["licenseKey"] as string | undefined)?.trim() || licenseKey;
const licenseToken: string =
(payload["token"] as string | undefined) || "";
let licenseExpiry: Date | undefined = undefined;
if (expiresAtRaw) {
const parsedDate: Date = new Date(expiresAtRaw);
if (Number.isNaN(parsedDate.getTime())) {
throw new BadDataException(
"License expiration returned from server is invalid.",
);
}
licenseExpiry = parsedDate;
}
const updatePayload: PartialEntity<GlobalConfig> = {
enterpriseCompanyName: companyNameRaw || null,
enterpriseLicenseKey: licenseKeyRaw || null,
enterpriseLicenseExpiresAt: licenseExpiry || null,
enterpriseLicenseToken: licenseToken || null,
};
const globalConfigId: ObjectID = ObjectID.getZeroObjectID();
const existingConfig: GlobalConfig | null =
await GlobalConfigService.findOneById({
id: globalConfigId,
select: {
_id: true,
},
props: {
isRoot: true,
ignoreHooks: true,
},
});
if (existingConfig) {
await GlobalConfigService.updateOneById({
id: globalConfigId,
data: updatePayload,
props: {
isRoot: true,
ignoreHooks: true,
},
});
} else {
const newConfig: GlobalConfig = new GlobalConfig();
newConfig.id = globalConfigId;
if (companyNameRaw) {
newConfig.enterpriseCompanyName = companyNameRaw;
}
if (licenseKeyRaw) {
newConfig.enterpriseLicenseKey = licenseKeyRaw;
}
if (licenseToken) {
newConfig.enterpriseLicenseToken = licenseToken;
}
if (licenseExpiry) {
newConfig.enterpriseLicenseExpiresAt = licenseExpiry;
}
await GlobalConfigService.create({
data: newConfig,
props: {
isRoot: true,
ignoreHooks: true,
},
});
}
return Response.sendJsonObjectResponse(req, res, {
companyName: companyNameRaw || null,
expiresAt: licenseExpiry ? licenseExpiry.toISOString() : null,
licenseKey: licenseKeyRaw || null,
token: licenseToken || null,
});
} catch (err) {
next(err);
}
},
);
}
}

View File

@@ -58,7 +58,7 @@ export default class MicrosoftTeamsAPI {
"https://developer.microsoft.com/json-schemas/teams/v1.23/MicrosoftTeams.schema.json",
manifestVersion: "1.23",
version: AppVersion.toLowerCase().includes("unknown")
? "1.3.0"
? "1.5.0"
: AppVersion,
id: MicrosoftTeamsAppClientId,
developer: {
@@ -75,12 +75,13 @@ export default class MicrosoftTeamsAPI {
},
description: {
short: "Complete open-source monitoring and observability platform. ",
full: `OneUptime is a comprehensive solution for monitoring and managing your online services. Whether you need to check the availability of your website, dashboard, API, or any other online resource, OneUptime can alert your team when downtime happens and keep your customers informed with a status page. OneUptime also helps you handle incidents, set up on-call rotations, run tests, secure your services, analyze logs, track performance, and debug errors.
full: `<p>OneUptime is a comprehensive solution for monitoring and managing your online services. Whether you need to check the availability of your website, dashboard, API, or any other online resource, OneUptime can alert your team when downtime happens and keep your customers informed with a status page. OneUptime also helps you handle incidents, set up on-call rotations, run tests, secure your services, analyze logs, track performance, and debug errors.</p>
In order to use the app, you need to have an active account with OneUptime at https://oneuptime.com. Please send an email to support@oneupitme.com if you need more details.
<p>In order to use the app, you need to have an active account with <a href="https://oneuptime.com" target="_blank">OneUptime</a>. Please send an email to <a href="mailto:support@oneuptime.com">support@oneuptime.com</a> if you need more details.</p>
Create a new OneUptime Account: If you wish to sign up for a new account, you can do so at https://oneuptime.com and click on Sign up.
Help and Support: You can reach out to help and support here: https://oneuptime.com/support or contact support@oneuptime.com
<p><strong>Create a new OneUptime Account:</strong> If you wish to sign up for a new account, you can do so by visiting <a href="https://oneuptime.com" target="_blank">OneUptime Sign Up</a>.</p>
<p><strong>Help and Support:</strong> You can reach out to help and support via <a href="https://oneuptime.com/support" target="_blank">Support Page</a> or contact <a href="mailto:support@oneuptime.com">support@oneuptime.com</a>.</p>
`,
},
// Default to size-specific names; route will adjust if fallbacks are used
@@ -622,9 +623,8 @@ Help and Support: You can reach out to help and support here: https://oneuptime.
projectId: new ObjectID(projectId),
workspaceType: WorkspaceType.MicrosoftTeams,
});
const existingTenant: string | undefined = (
existingAuth?.miscData as any
)?.tenantId;
const existingTenant: string | undefined =
existingAuth?.workspaceProjectId;
if (existingTenant) {
tenantForConsent = existingTenant;
}

View File

@@ -20,11 +20,72 @@ import PositiveNumber from "../../Types/PositiveNumber";
import Project from "../../Models/DatabaseModels/Project";
import Reseller from "../../Models/DatabaseModels/Reseller";
import TeamMember from "../../Models/DatabaseModels/TeamMember";
import BadDataException from "../../Types/Exception/BadDataException";
import Permission, { UserPermission } from "../../Types/Permission";
import ObjectID from "../../Types/ObjectID";
import { JSONObject } from "../../Types/JSON";
export default class ProjectAPI extends BaseAPI<Project, ProjectServiceType> {
public constructor() {
super(Project, ProjectService);
this.router.put(
`${new this.entityType().getCrudApiPath()?.toString()}/:id/change-plan`,
UserMiddleware.getUserMiddleware,
async (req: ExpressRequest, res: ExpressResponse, next: NextFunction) => {
try {
if (!IsBillingEnabled) {
throw new BadDataException(
"Billing is not enabled for this server",
);
}
const projectId: ObjectID = new ObjectID(req.params["id"] as string);
const body: JSONObject = (req.body as JSONObject) || {};
const data: JSONObject = (body["data"] as JSONObject) || {};
const paymentProviderPlanId: string | undefined = data[
"paymentProviderPlanId"
] as string | undefined;
if (!paymentProviderPlanId) {
throw new BadDataException("Plan ID is required to change plan");
}
const permissions: Array<UserPermission> =
await this.getPermissionsForTenant(req);
const hasBillingPermission: boolean =
permissions.filter((permission: UserPermission) => {
return (
permission.permission.toString() ===
Permission.ProjectOwner.toString() ||
permission.permission.toString() ===
Permission.ManageProjectBilling.toString()
);
}).length > 0;
if (
!hasBillingPermission &&
!(req as OneUptimeRequest).userAuthorization?.isMasterAdmin
) {
throw new BadDataException(
`You need ${Permission.ProjectOwner} or ${Permission.ManageProjectBilling} permission to change project plan`,
);
}
await ProjectService.changePlan({
projectId: projectId,
paymentProviderPlanId: paymentProviderPlanId,
});
return Response.sendEmptySuccessResponse(req, res);
} catch (err) {
next(err);
}
},
);
/*
* This API lists all the projects where user is its team member.
* This API is usually used to show project selector dropdown in the UI

View File

@@ -276,6 +276,142 @@ export default class StatusPageAPI extends BaseAPI<
},
);
// embedded overall status badge api
this.router.get(
`${new this.entityType()
.getCrudApiPath()
?.toString()}/badge/:statusPageId`,
async (req: ExpressRequest, res: ExpressResponse) => {
try {
const statusPageId: ObjectID = new ObjectID(
req.params["statusPageId"] as string,
);
const token: string = req.query["token"] as string;
if (!token) {
return res.status(400).send("Token is required");
}
// Fetch status page with security token
const statusPage: StatusPage | null =
await StatusPageService.findOneBy({
query: {
_id: statusPageId,
enableEmbeddedOverallStatus: true,
embeddedOverallStatusToken: token,
},
select: {
_id: true,
projectId: true,
downtimeMonitorStatuses: {
_id: true,
},
},
props: {
isRoot: true,
},
});
if (!statusPage) {
return res.status(404).send("Status badge not found or disabled");
}
// Get status page resources and current statuses
const statusPageResources: Array<StatusPageResource> =
await StatusPageResourceService.findBy({
query: {
statusPageId: statusPageId,
},
select: {
_id: true,
monitor: {
_id: true,
currentMonitorStatusId: true,
},
monitorGroupId: true,
},
limit: LIMIT_PER_PROJECT,
skip: 0,
props: {
isRoot: true,
},
});
// Get monitor statuses
const monitorStatuses: Array<MonitorStatus> =
await MonitorStatusService.findBy({
query: {
projectId: statusPage.projectId!,
},
select: {
_id: true,
name: true,
color: true,
priority: true,
isOperationalState: true,
},
sort: {
priority: SortOrder.Ascending,
},
skip: 0,
limit: LIMIT_PER_PROJECT,
props: {
isRoot: true,
},
});
// Get monitor group current statuses
const monitorGroupCurrentStatuses: Dictionary<ObjectID> =
await StatusPageService.getMonitorGroupCurrentStatuses({
statusPageResources,
monitorStatuses,
});
// Calculate overall status
const overallStatus: MonitorStatus | null =
StatusPageService.getOverallMonitorStatus({
statusPageResources,
monitorStatuses,
monitorGroupCurrentStatuses,
});
// Generate SVG badge
const statusName: string = overallStatus?.name || "Unknown";
const statusColor: string =
overallStatus?.color?.toString() || "#808080";
const svg: string = `<svg xmlns="http://www.w3.org/2000/svg" width="150" height="20">
<linearGradient id="b" x2="0" y2="100%">
<stop offset="0" stop-color="#bbb" stop-opacity=".1"/>
<stop offset="1" stop-opacity=".1"/>
</linearGradient>
<mask id="a">
<rect width="150" height="20" rx="3" fill="#fff"/>
</mask>
<g mask="url(#a)">
<path fill="#555" d="M0 0h50v20H0z"/>
<path fill="${statusColor}" d="M50 0h100v20H50z"/>
<path fill="url(#b)" d="M0 0h150v20H0z"/>
</g>
<g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="11">
<text x="25" y="15" fill="#010101" fill-opacity=".3">status</text>
<text x="25" y="14">status</text>
<text x="100" y="15" fill="#010101" fill-opacity=".3">${statusName}</text>
<text x="100" y="14">${statusName}</text>
</g>
</svg>`;
res.setHeader("Content-Type", "image/svg+xml");
res.setHeader("Cache-Control", "no-cache, no-store, must-revalidate");
return res.send(svg);
} catch (err) {
logger.error(err);
return res.status(500).send("Internal Server Error");
}
},
);
// confirm subscription api
this.router.get(
`${new this.entityType()
@@ -673,6 +809,7 @@ export default class StatusPageAPI extends BaseAPI<
await this.checkHasReadAccess({
statusPageId: statusPageId,
req: req,
res: res,
});
const resources: Array<StatusPageResource> =
@@ -733,6 +870,7 @@ export default class StatusPageAPI extends BaseAPI<
await this.checkHasReadAccess({
statusPageId: statusPageId,
req: req,
res: res,
});
/*
@@ -1025,6 +1163,7 @@ export default class StatusPageAPI extends BaseAPI<
await this.checkHasReadAccess({
statusPageId: statusPageId,
req: req,
res: res,
});
const startDate: Date = OneUptimeDate.getSomeDaysAgo(90);
@@ -1392,11 +1531,11 @@ export default class StatusPageAPI extends BaseAPI<
});
const overallStatus: MonitorStatus | null =
this.getOverallMonitorStatus(
StatusPageService.getOverallMonitorStatus({
statusPageResources,
monitorStatuses,
monitorGroupCurrentStatuses,
);
});
const response: JSONObject = {
overallStatus: overallStatus
@@ -1472,7 +1611,7 @@ export default class StatusPageAPI extends BaseAPI<
UserMiddleware.getUserMiddleware,
async (req: ExpressRequest, res: ExpressResponse, next: NextFunction) => {
try {
await this.subscribeToStatusPage(req);
await this.subscribeToStatusPage(req, res);
return Response.sendEmptySuccessResponse(req, res);
} catch (err) {
next(err);
@@ -1509,7 +1648,7 @@ export default class StatusPageAPI extends BaseAPI<
UserMiddleware.getUserMiddleware,
async (req: ExpressRequest, res: ExpressResponse, next: NextFunction) => {
try {
await this.subscribeToStatusPage(req);
await this.subscribeToStatusPage(req, res);
return Response.sendEmptySuccessResponse(req, res);
} catch (err) {
@@ -1525,7 +1664,7 @@ export default class StatusPageAPI extends BaseAPI<
UserMiddleware.getUserMiddleware,
async (req: ExpressRequest, res: ExpressResponse, next: NextFunction) => {
try {
await this.manageExistingSubscription(req);
await this.manageExistingSubscription(req, res);
return Response.sendEmptySuccessResponse(req, res);
} catch (err) {
@@ -1549,6 +1688,7 @@ export default class StatusPageAPI extends BaseAPI<
objectId,
null,
req,
res,
);
return Response.sendJsonObjectResponse(req, res, response);
@@ -1572,8 +1712,8 @@ export default class StatusPageAPI extends BaseAPI<
const response: JSONObject = await this.getScheduledMaintenanceEvents(
objectId,
null,
req,
res,
);
return Response.sendJsonObjectResponse(req, res, response);
@@ -1597,8 +1737,8 @@ export default class StatusPageAPI extends BaseAPI<
const response: JSONObject = await this.getAnnouncements(
objectId,
null,
req,
res,
);
return Response.sendJsonObjectResponse(req, res, response);
@@ -1627,6 +1767,7 @@ export default class StatusPageAPI extends BaseAPI<
objectId,
incidentId,
req,
res,
);
return Response.sendJsonObjectResponse(req, res, response);
@@ -1654,8 +1795,8 @@ export default class StatusPageAPI extends BaseAPI<
const response: JSONObject = await this.getScheduledMaintenanceEvents(
objectId,
scheduledMaintenanceId,
req,
res,
);
return Response.sendJsonObjectResponse(req, res, response);
@@ -1683,8 +1824,8 @@ export default class StatusPageAPI extends BaseAPI<
const response: JSONObject = await this.getAnnouncements(
objectId,
announcementId,
req,
res,
);
return Response.sendJsonObjectResponse(req, res, response);
@@ -1700,10 +1841,12 @@ export default class StatusPageAPI extends BaseAPI<
statusPageId: ObjectID,
scheduledMaintenanceId: ObjectID | null,
req: ExpressRequest,
res: ExpressResponse,
): Promise<JSONObject> {
await this.checkHasReadAccess({
statusPageId: statusPageId,
req: req,
res: res,
});
const statusPage: StatusPage | null = await StatusPageService.findOneBy({
@@ -2017,10 +2160,12 @@ export default class StatusPageAPI extends BaseAPI<
statusPageId: ObjectID,
announcementId: ObjectID | null,
req: ExpressRequest,
res: ExpressResponse,
): Promise<JSONObject> {
await this.checkHasReadAccess({
statusPageId: statusPageId,
req: req,
res: res,
});
const statusPage: StatusPage | null = await StatusPageService.findOneBy({
@@ -2192,7 +2337,10 @@ export default class StatusPageAPI extends BaseAPI<
}
@CaptureSpan()
public async manageExistingSubscription(req: ExpressRequest): Promise<void> {
public async manageExistingSubscription(
req: ExpressRequest,
res: ExpressResponse,
): Promise<void> {
const statusPageId: ObjectID = new ObjectID(
req.params["statusPageId"] as string,
);
@@ -2204,6 +2352,7 @@ export default class StatusPageAPI extends BaseAPI<
await this.checkHasReadAccess({
statusPageId: statusPageId,
req: req,
res: res,
});
const statusPage: StatusPage | null = await StatusPageService.findOneBy({
@@ -2467,7 +2616,10 @@ export default class StatusPageAPI extends BaseAPI<
}
@CaptureSpan()
public async subscribeToStatusPage(req: ExpressRequest): Promise<void> {
public async subscribeToStatusPage(
req: ExpressRequest,
res: ExpressResponse,
): Promise<void> {
const objectId: ObjectID = new ObjectID(
req.params["statusPageId"] as string,
);
@@ -2477,6 +2629,7 @@ export default class StatusPageAPI extends BaseAPI<
await this.checkHasReadAccess({
statusPageId: objectId,
req: req,
res: res,
});
const statusPage: StatusPage | null = await StatusPageService.findOneBy({
@@ -2844,10 +2997,12 @@ export default class StatusPageAPI extends BaseAPI<
statusPageId: ObjectID,
incidentId: ObjectID | null,
req: ExpressRequest,
res: ExpressResponse,
): Promise<JSONObject> {
await this.checkHasReadAccess({
statusPageId: statusPageId,
req: req,
res: res,
});
const statusPage: StatusPage | null = await StatusPageService.findOneBy({
@@ -3099,56 +3254,6 @@ export default class StatusPageAPI extends BaseAPI<
return response;
}
public getOverallMonitorStatus(
statusPageResources: Array<StatusPageResource>,
monitorStatuses: Array<MonitorStatus>,
monitorGroupCurrentStatuses: Dictionary<ObjectID>,
): MonitorStatus | null {
let currentStatus: MonitorStatus | null =
monitorStatuses.length > 0 && monitorStatuses[0]
? monitorStatuses[0]
: null;
const dict: Dictionary<number> = {};
for (const resource of statusPageResources) {
if (resource.monitor?.currentMonitorStatusId) {
if (
!Object.keys(dict).includes(
resource.monitor?.currentMonitorStatusId.toString() || "",
)
) {
dict[resource.monitor?.currentMonitorStatusId?.toString()] = 1;
} else {
dict[resource.monitor!.currentMonitorStatusId!.toString()]!++;
}
}
}
// check status of monitor groups.
for (const groupId in monitorGroupCurrentStatuses) {
const statusId: ObjectID | undefined =
monitorGroupCurrentStatuses[groupId];
if (statusId) {
if (!Object.keys(dict).includes(statusId.toString() || "")) {
dict[statusId.toString()] = 1;
} else {
dict[statusId.toString()]!++;
}
}
}
for (const monitorStatus of monitorStatuses) {
if (monitorStatus._id && dict[monitorStatus._id]) {
currentStatus = monitorStatus;
}
}
return currentStatus;
}
@CaptureSpan()
public async getStatusPageResourcesAndTimelines(data: {
statusPageId: ObjectID;
@@ -3406,6 +3511,7 @@ export default class StatusPageAPI extends BaseAPI<
public async checkHasReadAccess(data: {
statusPageId: ObjectID;
req: ExpressRequest;
res: ExpressResponse;
}): Promise<void> {
const accessResult: {
hasReadAccess: boolean;
@@ -3413,6 +3519,7 @@ export default class StatusPageAPI extends BaseAPI<
} = await this.service.hasReadAccess({
statusPageId: data.statusPageId,
req: data.req,
res: data.res,
});
if (!accessResult.hasReadAccess) {

View File

@@ -236,12 +236,16 @@ export default class UserNotificationLogTimelineAPI extends BaseAPI<
if (timelineItem.isAcknowledged) {
// already acknowledged. Then show already acknowledged page with view details button.
const viewDetailsRoute: Route = new Route(
DashboardRoute.toString(),
).addRoute(
`/${timelineItem.projectId?.toString()}/${timelineItem.triggeredByIncidentId ? "incidents" : "alerts"}/${timelineItem.triggeredByIncidentId ? timelineItem.triggeredByIncidentId!.toString() : timelineItem.triggeredByAlertId!.toString()}`,
);
const viewDetailsUrl: URL = new URL(
httpProtocol,
host,
DashboardRoute.addRoute(
`/${timelineItem.projectId?.toString()}/${timelineItem.triggeredByIncidentId ? "incidents" : "alerts"}/${timelineItem.triggeredByIncidentId ? timelineItem.triggeredByIncidentId!.toString() : timelineItem.triggeredByAlertId!.toString()}`,
),
viewDetailsRoute,
);
return Response.render(
@@ -273,30 +277,30 @@ export default class UserNotificationLogTimelineAPI extends BaseAPI<
// redirect to dashboard to incidents page.
if (timelineItem.triggeredByIncidentId) {
const incidentRoute: Route = new Route(
DashboardRoute.toString(),
).addRoute(
`/${timelineItem.projectId?.toString()}/incidents/${timelineItem.triggeredByIncidentId!.toString()}`,
);
return Response.redirect(
req,
res,
new URL(
httpProtocol,
host,
DashboardRoute.addRoute(
`/${timelineItem.projectId?.toString()}/incidents/${timelineItem.triggeredByIncidentId!.toString()}`,
),
),
new URL(httpProtocol, host, incidentRoute),
);
}
if (timelineItem.triggeredByAlertId) {
const alertRoute: Route = new Route(
DashboardRoute.toString(),
).addRoute(
`/${timelineItem.projectId?.toString()}/alerts/${timelineItem.triggeredByAlertId!.toString()}`,
);
return Response.redirect(
req,
res,
new URL(
httpProtocol,
host,
DashboardRoute.addRoute(
`/${timelineItem.projectId?.toString()}/alerts/${timelineItem.triggeredByAlertId!.toString()}`,
),
),
new URL(httpProtocol, host, alertRoute),
);
}

View File

@@ -3,6 +3,7 @@ import { AccountsRoute, DashboardRoute } from "../ServiceRoute";
import Hostname from "../Types/API/Hostname";
import Protocol from "../Types/API/Protocol";
import URL from "../Types/API/URL";
import Route from "../Types/API/Route";
import BadDataException from "../Types/Exception/BadDataException";
import { JSONValue } from "../Types/JSON";
import GlobalConfig from "../Models/DatabaseModels/GlobalConfig";
@@ -56,7 +57,11 @@ export default class DatabaseConfig {
@CaptureSpan()
public static async getAccountsUrl(): Promise<URL> {
const host: Hostname = await DatabaseConfig.getHost();
return new URL(await DatabaseConfig.getHttpProtocol(), host, AccountsRoute);
return new URL(
await DatabaseConfig.getHttpProtocol(),
host,
new Route(AccountsRoute.toString()),
);
}
@CaptureSpan()
@@ -65,7 +70,7 @@ export default class DatabaseConfig {
return new URL(
await DatabaseConfig.getHttpProtocol(),
host,
DashboardRoute,
new Route(DashboardRoute.toString()),
);
}

View File

@@ -10,6 +10,7 @@ import {
import BillingConfig from "./BillingConfig";
import Protocol from "../Types/API/Protocol";
import URL from "../Types/API/URL";
import Route from "../Types/API/Route";
import SubscriptionPlan from "../Types/Billing/SubscriptionPlan";
import Email from "../Types/Email";
import { JSONObject } from "../Types/JSON";
@@ -22,6 +23,77 @@ export const getAllEnvVars: () => JSONObject = (): JSONObject => {
return process.env;
};
const FRONTEND_ENV_ALLOW_LIST: Array<string> = [
"NODE_ENV",
"HTTP_PROTOCOL",
"HOST",
"BILLING_ENABLED",
"BILLING_PUBLIC_KEY",
"IS_ENTERPRISE_EDITION",
"STRIPE_PUBLIC_KEY",
"VAPID_PUBLIC_KEY",
"VAPID_SUBJECT",
"VERSION",
"STATUS_PAGE_CNAME_RECORD",
"ANALYTICS_KEY",
"ANALYTICS_HOST",
"GIT_SHA",
"APP_VERSION",
"OPENTELEMETRY_EXPORTER_OTLP_ENDPOINT",
"OPENTELEMETRY_EXPORTER_OTLP_HEADERS",
"DISABLE_TELEMETRY",
"SLACK_APP_CLIENT_ID",
"MICROSOFT_TEAMS_APP_CLIENT_ID",
];
const FRONTEND_ENV_ALLOW_PREFIXES: Array<string> = [
"SUBSCRIPTION_PLAN_",
"PUBLIC_",
];
export const getFrontendEnvVars: () => JSONObject = (): JSONObject => {
const frontendEnv: JSONObject = {};
for (const key of Object.keys(process.env)) {
const shouldInclude: boolean =
FRONTEND_ENV_ALLOW_LIST.includes(key) ||
FRONTEND_ENV_ALLOW_PREFIXES.some((prefix: string) => {
return key.startsWith(prefix);
});
if (!shouldInclude) {
continue;
}
const value: string | undefined = process.env[key];
if (typeof value !== "undefined") {
frontendEnv[key] = value;
}
}
return frontendEnv;
};
const parsePositiveNumberFromEnv: (
envKey: string,
fallback: number,
) => number = (envKey: string, fallback: number): number => {
const rawValue: string | undefined = process.env[envKey];
if (!rawValue) {
return fallback;
}
const parsedValue: number = parseFloat(rawValue);
if (!Number.isFinite(parsedValue) || parsedValue <= 0) {
return fallback;
}
return parsedValue;
};
export const IsBillingEnabled: boolean = BillingConfig.IsBillingEnabled;
export const BillingPublicKey: string = BillingConfig.BillingPublicKey;
export const BillingPrivateKey: string = BillingConfig.BillingPrivateKey;
@@ -256,6 +328,8 @@ export const HttpProtocol: Protocol =
export const Host: string = process.env["HOST"] || "";
export const ProvisionSsl: boolean = process.env["PROVISION_SSL"] === "true";
export const WorkflowScriptTimeoutInMS: number = process.env[
"WORKFLOW_SCRIPT_TIMEOUT_IN_MS"
]
@@ -303,36 +377,69 @@ export const NotificationSlackWebhookOnSubscriptionUpdate: string =
export const AdminDashboardClientURL: URL = new URL(
HttpProtocol,
Host,
AdminDashboardRoute,
new Route(AdminDashboardRoute.toString()),
);
export const AppApiClientUrl: URL = new URL(HttpProtocol, Host, AppApiRoute);
export const AppApiClientUrl: URL = new URL(
HttpProtocol,
Host,
new Route(AppApiRoute.toString()),
);
export const StatusPageApiClientUrl: URL = new URL(
HttpProtocol,
Host,
StatusPageApiRoute,
new Route(StatusPageApiRoute.toString()),
);
export const DashboardClientUrl: URL = new URL(
HttpProtocol,
Host,
DashboardRoute,
new Route(DashboardRoute.toString()),
);
export const AccountsClientUrl: URL = new URL(
HttpProtocol,
Host,
AccountsRoute,
new Route(AccountsRoute.toString()),
);
export const HomeClientUrl: URL = new URL(HttpProtocol, Host, HomeRoute);
export const HomeClientUrl: URL = new URL(
HttpProtocol,
Host,
new Route(HomeRoute.toString()),
);
export const DocsClientUrl: URL = new URL(HttpProtocol, Host, DocsRoute);
export const DocsClientUrl: URL = new URL(
HttpProtocol,
Host,
new Route(DocsRoute.toString()),
);
export const DisableTelemetry: boolean =
process.env["DISABLE_TELEMETRY"] === "true";
export const IsEnterpriseEdition: boolean =
process.env["IS_ENTERPRISE_EDITION"] === "true";
export const AverageSpanRowSizeInBytes: number = parsePositiveNumberFromEnv(
"AVERAGE_SPAN_ROW_SIZE_IN_BYTES",
1024,
);
export const AverageLogRowSizeInBytes: number = parsePositiveNumberFromEnv(
"AVERAGE_LOG_ROW_SIZE_IN_BYTES",
1024,
);
export const AverageMetricRowSizeInBytes: number = parsePositiveNumberFromEnv(
"AVERAGE_METRIC_ROW_SIZE_IN_BYTES",
1024,
);
export const AverageExceptionRowSizeInBytes: number =
parsePositiveNumberFromEnv("AVERAGE_EXCEPTION_ROW_SIZE_IN_BYTES", 1024);
export const SlackAppClientId: string | null =
process.env["SLACK_APP_CLIENT_ID"] || null;
export const SlackAppClientSecret: string | null =
@@ -355,3 +462,7 @@ export const VapidPrivateKey: string | undefined =
export const VapidSubject: string =
process.env["VAPID_SUBJECT"] || "mailto:support@oneuptime.com";
export const EnterpriseLicenseValidationUrl: URL = URL.fromString(
"https://oneuptime.com/api/enterprise-license/validate",
);

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.4 KiB

After

Width:  |  Height:  |  Size: 2.4 KiB

View File

@@ -7,6 +7,10 @@ import { JSONArray, JSONObject } from "../../Types/JSON";
import JSONFunctions from "../../Types/JSONFunctions";
import CaptureSpan from "../Utils/Telemetry/CaptureSpan";
type CacheSetOptions = {
expiresInSeconds: number;
};
export default abstract class GlobalCache {
@CaptureSpan()
public static async getJSONObject(
@@ -56,8 +60,9 @@ export default abstract class GlobalCache {
namespace: string,
key: string,
value: string[],
options?: CacheSetOptions,
): Promise<void> {
await this.setString(namespace, key, JSON.stringify(value));
await this.setString(namespace, key, JSON.stringify(value), options);
}
@CaptureSpan()
@@ -136,11 +141,13 @@ export default abstract class GlobalCache {
namespace: string,
key: string,
value: JSONObject,
options?: CacheSetOptions,
): Promise<void> {
await this.setString(
namespace,
key,
JSON.stringify(JSONFunctions.serialize(value)),
options,
);
}
@@ -149,6 +156,7 @@ export default abstract class GlobalCache {
namespace: string,
key: string,
value: string,
options?: CacheSetOptions,
): Promise<void> {
const client: ClientType | null = Redis.getClient();
@@ -157,9 +165,8 @@ export default abstract class GlobalCache {
}
await client.set(`${namespace}-${key}`, value);
await client.expire(
`${namespace}-${key}`,
OneUptimeDate.getSecondsInDays(30),
);
const expiresInSeconds: number =
options?.expiresInSeconds ?? OneUptimeDate.getSecondsInDays(30);
await client.expire(`${namespace}-${key}`, expiresInSeconds);
}
}

View File

@@ -0,0 +1,23 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1760345757975 implements MigrationInterface {
public name = "MigrationName1760345757975";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "WhatsAppLog" ADD "whatsAppMessageId" character varying(100)`,
);
await queryRunner.query(
`CREATE INDEX "IDX_2090742b9abffadde19dab2026" ON "WhatsAppLog" ("whatsAppMessageId") `,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`DROP INDEX "public"."IDX_2090742b9abffadde19dab2026"`,
);
await queryRunner.query(
`ALTER TABLE "WhatsAppLog" DROP COLUMN "whatsAppMessageId"`,
);
}
}

View File

@@ -0,0 +1,23 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1760357680881 implements MigrationInterface {
public name = "MigrationName1760357680881";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "GlobalConfig" ADD "metaWhatsAppWebhookVerifyToken" character varying(100)`,
);
await queryRunner.query(
`ALTER TABLE "GlobalConfig" ADD CONSTRAINT "UQ_afe98d53b718f485d3d64b383b8" UNIQUE ("metaWhatsAppWebhookVerifyToken")`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "GlobalConfig" DROP CONSTRAINT "UQ_afe98d53b718f485d3d64b383b8"`,
);
await queryRunner.query(
`ALTER TABLE "GlobalConfig" DROP COLUMN "metaWhatsAppWebhookVerifyToken"`,
);
}
}

View File

@@ -0,0 +1,29 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1761232578396 implements MigrationInterface {
public name = "MigrationName1761232578396";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "StatusPage" ADD "enableEmbeddedOverallStatus" boolean NOT NULL DEFAULT false`,
);
await queryRunner.query(
`ALTER TABLE "StatusPage" ADD "embeddedOverallStatusToken" character varying(100)`,
);
await queryRunner.query(
`CREATE INDEX "IDX_350d2250fb17e0dc10663de72a" ON "StatusPage" ("embeddedOverallStatusToken") `,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`DROP INDEX "public"."IDX_350d2250fb17e0dc10663de72a"`,
);
await queryRunner.query(
`ALTER TABLE "StatusPage" DROP COLUMN "embeddedOverallStatusToken"`,
);
await queryRunner.query(
`ALTER TABLE "StatusPage" DROP COLUMN "enableEmbeddedOverallStatus"`,
);
}
}

View File

@@ -0,0 +1,49 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1761834523183 implements MigrationInterface {
public name = "MigrationName1761834523183";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`CREATE TABLE "IncidentPostmortemTemplate" ("_id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updatedAt" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "deletedAt" TIMESTAMP WITH TIME ZONE, "version" integer NOT NULL, "projectId" uuid NOT NULL, "postmortemNote" text NOT NULL, "templateName" character varying(100) NOT NULL, "templateDescription" character varying(500) NOT NULL, "createdByUserId" uuid, "deletedByUserId" uuid, CONSTRAINT "PK_76a09ebf10e7874f0c8ee1f0120" PRIMARY KEY ("_id"))`,
);
await queryRunner.query(
`CREATE INDEX "IDX_2a18729813c7c666cc37683c4e" ON "IncidentPostmortemTemplate" ("projectId") `,
);
await queryRunner.query(
`CREATE INDEX "IDX_c791fe4d7179b57064ace561c3" ON "IncidentPostmortemTemplate" ("postmortemNote") `,
);
await queryRunner.query(`ALTER TABLE "Incident" ADD "postmortemNote" text`);
await queryRunner.query(
`ALTER TABLE "IncidentPostmortemTemplate" ADD CONSTRAINT "FK_2a18729813c7c666cc37683c4ea" FOREIGN KEY ("projectId") REFERENCES "Project"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "IncidentPostmortemTemplate" ADD CONSTRAINT "FK_961ac93c4d7ea881170692333d0" FOREIGN KEY ("createdByUserId") REFERENCES "User"("_id") ON DELETE SET NULL ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "IncidentPostmortemTemplate" ADD CONSTRAINT "FK_2e886387888f1311f361d569b8e" FOREIGN KEY ("deletedByUserId") REFERENCES "User"("_id") ON DELETE SET NULL ON UPDATE NO ACTION`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "IncidentPostmortemTemplate" DROP CONSTRAINT "FK_2e886387888f1311f361d569b8e"`,
);
await queryRunner.query(
`ALTER TABLE "IncidentPostmortemTemplate" DROP CONSTRAINT "FK_961ac93c4d7ea881170692333d0"`,
);
await queryRunner.query(
`ALTER TABLE "IncidentPostmortemTemplate" DROP CONSTRAINT "FK_2a18729813c7c666cc37683c4ea"`,
);
await queryRunner.query(
`ALTER TABLE "Incident" DROP COLUMN "postmortemNote"`,
);
await queryRunner.query(
`DROP INDEX "public"."IDX_c791fe4d7179b57064ace561c3"`,
);
await queryRunner.query(
`DROP INDEX "public"."IDX_2a18729813c7c666cc37683c4e"`,
);
await queryRunner.query(`DROP TABLE "IncidentPostmortemTemplate"`);
}
}

View File

@@ -0,0 +1,69 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1762181014879 implements MigrationInterface {
public name = "MigrationName1762181014879";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`CREATE TABLE "EnterpriseLicense" ("_id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updatedAt" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "deletedAt" TIMESTAMP WITH TIME ZONE, "version" integer NOT NULL, "companyName" character varying(100) NOT NULL, "licenseKey" character varying(100) NOT NULL, "expiresAt" TIMESTAMP WITH TIME ZONE NOT NULL, "annualContractValue" integer, CONSTRAINT "UQ_d35e76999092d8a16a66e84c17c" UNIQUE ("licenseKey"), CONSTRAINT "PK_731aa4437672f250fd51ec04166" PRIMARY KEY ("_id"))`,
);
await queryRunner.query(
`CREATE UNIQUE INDEX "IDX_d35e76999092d8a16a66e84c17" ON "EnterpriseLicense" ("licenseKey") `,
);
await queryRunner.query(
`ALTER TABLE "GlobalConfig" ADD "enterpriseCompanyName" character varying(100)`,
);
await queryRunner.query(
`ALTER TABLE "GlobalConfig" ADD CONSTRAINT "UQ_46983cb1a59503dc09fc84bbe0c" UNIQUE ("enterpriseCompanyName")`,
);
await queryRunner.query(
`ALTER TABLE "GlobalConfig" ADD "enterpriseLicenseKey" character varying(100)`,
);
await queryRunner.query(
`ALTER TABLE "GlobalConfig" ADD CONSTRAINT "UQ_89f80e8a18c3372ee150a3812c1" UNIQUE ("enterpriseLicenseKey")`,
);
await queryRunner.query(
`ALTER TABLE "GlobalConfig" ADD "enterpriseLicenseExpiresAt" TIMESTAMP WITH TIME ZONE`,
);
await queryRunner.query(
`ALTER TABLE "GlobalConfig" ADD CONSTRAINT "UQ_a361278e9ce4056d59e8fb13319" UNIQUE ("enterpriseLicenseExpiresAt")`,
);
await queryRunner.query(
`ALTER TABLE "GlobalConfig" ADD "enterpriseLicenseToken" text`,
);
await queryRunner.query(
`ALTER TABLE "GlobalConfig" ADD CONSTRAINT "UQ_b0b9322c111c0cc629fedbb4eb3" UNIQUE ("enterpriseLicenseToken")`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "GlobalConfig" DROP CONSTRAINT "UQ_b0b9322c111c0cc629fedbb4eb3"`,
);
await queryRunner.query(
`ALTER TABLE "GlobalConfig" DROP COLUMN "enterpriseLicenseToken"`,
);
await queryRunner.query(
`ALTER TABLE "GlobalConfig" DROP CONSTRAINT "UQ_a361278e9ce4056d59e8fb13319"`,
);
await queryRunner.query(
`ALTER TABLE "GlobalConfig" DROP COLUMN "enterpriseLicenseExpiresAt"`,
);
await queryRunner.query(
`ALTER TABLE "GlobalConfig" DROP CONSTRAINT "UQ_89f80e8a18c3372ee150a3812c1"`,
);
await queryRunner.query(
`ALTER TABLE "GlobalConfig" DROP COLUMN "enterpriseLicenseKey"`,
);
await queryRunner.query(
`ALTER TABLE "GlobalConfig" DROP CONSTRAINT "UQ_46983cb1a59503dc09fc84bbe0c"`,
);
await queryRunner.query(
`ALTER TABLE "GlobalConfig" DROP COLUMN "enterpriseCompanyName"`,
);
await queryRunner.query(
`DROP INDEX "public"."IDX_d35e76999092d8a16a66e84c17"`,
);
await queryRunner.query(`DROP TABLE "EnterpriseLicense"`);
}
}

View File

@@ -0,0 +1,14 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1762430566091 implements MigrationInterface {
public name = 'MigrationName1762430566091'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`ALTER TABLE "StatusPagePrivateUser" ADD "jwtRefreshToken" character varying(100)`);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`ALTER TABLE "StatusPagePrivateUser" DROP COLUMN "jwtRefreshToken"`);
}
}

View File

@@ -175,6 +175,12 @@ import { MigrationName1759175457008 } from "./1759175457008-MigrationName";
import { MigrationName1759232954703 } from "./1759232954703-MigrationName";
import { RenameUserTwoFactorAuthToUserTotpAuth1759234532998 } from "./1759234532998-MigrationName";
import { MigrationName1759943124812 } from "./1759943124812-MigrationName";
import { MigrationName1760345757975 } from "./1760345757975-MigrationName";
import { MigrationName1760357680881 } from "./1760357680881-MigrationName";
import { MigrationName1761232578396 } from "./1761232578396-MigrationName";
import { MigrationName1761834523183 } from "./1761834523183-MigrationName";
import { MigrationName1762181014879 } from "./1762181014879-MigrationName";
import { MigrationName1762430566091 } from "./1762430566091-MigrationName";
export default [
InitialMigration,
@@ -354,4 +360,10 @@ export default [
MigrationName1759232954703,
RenameUserTwoFactorAuthToUserTotpAuth1759234532998,
MigrationName1759943124812,
MigrationName1760345757975,
MigrationName1760357680881,
MigrationName1761232578396,
MigrationName1761834523183,
MigrationName1762181014879,
MigrationName1762430566091
];

View File

@@ -1,9 +1,4 @@
import {
ClusterKey,
RedisHostname,
RedisPassword,
RedisPort,
} from "../EnvironmentConfig";
import { ClusterKey } from "../EnvironmentConfig";
import Dictionary from "../../Types/Dictionary";
import { JSONObject } from "../../Types/JSON";
import { Queue as BullQueue, Job, JobsOptions } from "bullmq";
@@ -13,6 +8,7 @@ import { BullMQAdapter } from "@bull-board/api/bullMQAdapter";
import { ExpressRouter } from "../Utils/Express";
import CaptureSpan from "../Utils/Telemetry/CaptureSpan";
import logger from "../Utils/Logger";
import Redis from "./Redis";
export enum QueueName {
Workflow = "Workflow",
@@ -25,6 +21,7 @@ export enum QueueName {
}
export type QueueJob = Job;
type BullBoardQueues = Parameters<typeof createBullBoard>[0]["queues"];
export default class Queue {
private static queueDict: Dictionary<BullQueue> = {};
@@ -39,6 +36,11 @@ export default class Queue {
}>
> = {};
// BullMQ rejects custom IDs containing colons, so normalize them early.
private static sanitizeJobId(jobId: string): string {
return jobId.replace(/:/g, "-");
}
private static async setupReconnectListener(
queue: BullQueue,
queueName: QueueName,
@@ -82,11 +84,7 @@ export default class Queue {
}
const queue: BullQueue = new BullQueue(queueName, {
connection: {
host: RedisHostname.toString(),
port: RedisPort.toNumber(),
password: RedisPassword,
},
connection: Redis.getRedisOptions(),
// Keep BullMQ data under control to avoid Redis bloat
defaultJobOptions: {
// keep only recent completed/failed jobs
@@ -138,14 +136,17 @@ export default class Queue {
return;
}
const job: Job | undefined = await this.getQueue(queueName).getJob(jobId);
const sanitizedJobId: string = this.sanitizeJobId(jobId.toString());
const job: Job | undefined =
await this.getQueue(queueName).getJob(sanitizedJobId);
if (job) {
await job.remove();
}
// remove existing repeatable job
await this.getQueue(queueName).removeRepeatableByKey(jobId);
await this.getQueue(queueName).removeRepeatableByKey(sanitizedJobId);
}
@CaptureSpan()
@@ -157,12 +158,15 @@ export default class Queue {
public static getQueueInspectorRouter(): ExpressRouter {
const serverAdapter: ExpressAdapter = new ExpressAdapter();
const queueAdapters: BullMQAdapter[] = Object.values(QueueName).map(
(queueName: QueueName) => {
return new BullMQAdapter(this.getQueue(queueName));
},
);
createBullBoard({
queues: [
...Object.values(QueueName).map((queueName: QueueName) => {
return new BullMQAdapter(this.getQueue(queueName));
}),
],
// Cast keeps compatibility until bull-board widens QueueJob.progress
queues: queueAdapters as unknown as BullBoardQueues,
serverAdapter: serverAdapter,
});
@@ -187,8 +191,10 @@ export default class Queue {
repeatableKey?: string | undefined;
},
): Promise<Job> {
const sanitizedJobId: string = this.sanitizeJobId(jobId.toString());
const optionsObject: JobsOptions = {
jobId: jobId.toString(),
jobId: sanitizedJobId,
};
if (options && options.scheduleAt) {
@@ -197,7 +203,8 @@ export default class Queue {
};
}
const job: Job | undefined = await this.getQueue(queueName).getJob(jobId);
const job: Job | undefined =
await this.getQueue(queueName).getJob(sanitizedJobId);
if (job) {
await job.remove();
@@ -215,7 +222,7 @@ export default class Queue {
if (!this.repeatableJobs[queueName]) {
this.repeatableJobs[queueName] = {};
}
this.repeatableJobs[queueName]![jobId] = {
this.repeatableJobs[queueName]![sanitizedJobId] = {
jobName,
data,
options: optionsObject,

View File

@@ -1,4 +1,3 @@
import { RedisHostname, RedisPassword, RedisPort } from "../EnvironmentConfig";
import { QueueJob, QueueName } from "./Queue";
import TimeoutException from "../../Types/Exception/TimeoutException";
import {
@@ -8,6 +7,7 @@ import {
} from "../../Types/FunctionTypes";
import { Worker } from "bullmq";
import CaptureSpan from "../Utils/Telemetry/CaptureSpan";
import Redis from "./Redis";
export default class QueueWorker {
@CaptureSpan()
@@ -30,11 +30,7 @@ export default class QueueWorker {
},
): Worker {
const worker: Worker = new Worker(queueName, onJobInQueue, {
connection: {
host: RedisHostname.toString(),
port: RedisPort.toNumber(),
password: RedisPassword,
},
connection: Redis.getRedisOptions(),
concurrency: options.concurrency,
// Only set these values if provided so we do not override BullMQ defaults
...(options.lockDuration ? { lockDuration: options.lockDuration } : {}),

View File

@@ -10,7 +10,9 @@ import {
OneUptimeRequest,
} from "../Utils/Express";
import CaptureSpan from "../Utils/Telemetry/CaptureSpan";
import JSONWebToken from "../Utils/JsonWebToken";
import JSONWebToken, {
RefreshTokenData,
} from "../Utils/JsonWebToken";
import logger from "../Utils/Logger";
import Response from "../Utils/Response";
import ProjectMiddleware from "./ProjectAuthorization";
@@ -33,6 +35,8 @@ import {
import UserType from "../../Types/UserType";
import Project from "../../Models/DatabaseModels/Project";
import UserPermissionUtil from "../Utils/UserPermission/UserPermission";
import User from "../../Models/DatabaseModels/User";
import { EncryptionSecret } from "../EnvironmentConfig";
export default class UserMiddleware {
/*
@@ -161,22 +165,44 @@ export default class UserMiddleware {
);
}
const accessToken: string | undefined =
let accessToken: string | undefined =
UserMiddleware.getAccessTokenFromExpressRequest(req);
let userAuthorization: JSONWebTokenData | null = null;
if (!accessToken) {
if (accessToken) {
try {
userAuthorization = JSONWebToken.decode(accessToken);
} catch (err) {
const error: Error = err as Error;
logger.warn(
`Invalid access token, attempting refresh: ${
error.message || "unknown error"
}`,
);
logger.debug(error);
}
}
if (!userAuthorization) {
const refreshedSession:
| {
accessToken: string;
userAuthorization: JSONWebTokenData;
}
| null = await UserMiddleware.tryRefreshSession(req, res);
if (refreshedSession) {
accessToken = refreshedSession.accessToken;
userAuthorization = refreshedSession.userAuthorization;
}
}
if (!userAuthorization) {
oneuptimeRequest.userType = UserType.Public;
return next();
}
try {
oneuptimeRequest.userAuthorization = JSONWebToken.decode(accessToken);
} catch (err) {
// if the token is invalid or expired, it'll throw this error.
logger.error(err);
oneuptimeRequest.userType = UserType.Public;
return next();
}
oneuptimeRequest.userAuthorization = userAuthorization;
if (oneuptimeRequest.userAuthorization.isMasterAdmin) {
oneuptimeRequest.userType = UserType.MasterAdmin;
@@ -184,7 +210,7 @@ export default class UserMiddleware {
oneuptimeRequest.userType = UserType.User;
}
const userId: string = oneuptimeRequest.userAuthorization.userId.toString();
const userId: string = userAuthorization.userId.toString();
await UserService.updateOneBy({
query: {
@@ -290,6 +316,113 @@ export default class UserMiddleware {
return next();
}
@CaptureSpan()
private static async tryRefreshSession(
req: ExpressRequest,
res: ExpressResponse,
): Promise<
| {
accessToken: string;
userAuthorization: JSONWebTokenData;
}
| null
> {
const refreshToken: string | undefined =
CookieUtil.getCookieFromExpressRequest(
req,
CookieUtil.getRefreshTokenKey(),
);
if (!refreshToken) {
return null;
}
let refreshTokenData: RefreshTokenData;
try {
refreshTokenData = JSONWebToken.decodeRefreshToken(refreshToken);
} catch (err) {
const error: Error = err as Error;
logger.warn(
`Failed to decode refresh token during middleware refresh: ${
error.message || "unknown error"
}`,
);
logger.debug(error);
CookieUtil.removeCookie(res, CookieUtil.getRefreshTokenKey());
CookieUtil.removeCookie(res, CookieUtil.getUserTokenKey());
return null;
}
const hashedSessionId: string = await HashedString.hashValue(
refreshTokenData.sessionId,
EncryptionSecret,
);
const user: User | null = await UserService.findOneBy({
query: {
_id: refreshTokenData.userId,
jwtRefreshToken: hashedSessionId,
},
select: {
_id: true,
email: true,
name: true,
isMasterAdmin: true,
profilePictureId: true,
timezone: true,
},
props: {
isRoot: true,
},
});
if (!user) {
CookieUtil.removeCookie(res, CookieUtil.getRefreshTokenKey());
CookieUtil.removeCookie(res, CookieUtil.getUserTokenKey());
return null;
}
const session = CookieUtil.setUserCookie({
expressResponse: res,
user: user,
isGlobalLogin: refreshTokenData.isGlobalLogin,
});
if (!req.cookies) {
req.cookies = {} as Dictionary<string>;
}
req.cookies[CookieUtil.getUserTokenKey()] = session.accessToken;
req.cookies[CookieUtil.getRefreshTokenKey()] = session.refreshToken;
const hashedNewSessionId: string = await HashedString.hashValue(
session.sessionId,
EncryptionSecret,
);
await UserService.updateOneBy({
query: {
_id: user.id!,
},
data: {
jwtRefreshToken: hashedNewSessionId,
},
props: {
isRoot: true,
},
});
const userAuthorization: JSONWebTokenData = JSONWebToken.decode(
session.accessToken,
);
return {
accessToken: session.accessToken,
userAuthorization,
};
}
@CaptureSpan()
public static async getUserTenantAccessPermissionWithTenantId(data: {
req: ExpressRequest;

View File

@@ -19,7 +19,7 @@ export class Service extends DatabaseService<Model> {
super(Model);
if (IsBillingEnabled) {
this.hardDeleteItemsOlderThanInDays("createdAt", 120);
this.hardDeleteItemsOlderThanInDays("createdAt", 3 * 365); // 3 years
}
}

View File

@@ -28,7 +28,6 @@ import AlertState from "../../Models/DatabaseModels/AlertState";
import AlertStateTimeline from "../../Models/DatabaseModels/AlertStateTimeline";
import User from "../../Models/DatabaseModels/User";
import { IsBillingEnabled } from "../EnvironmentConfig";
import TelemetryType from "../../Types/Telemetry/TelemetryType";
import logger from "../Utils/Logger";
import TelemetryUtil from "../Utils/Telemetry/Telemetry";
import MetricService from "./MetricService";
@@ -60,7 +59,7 @@ export class Service extends DatabaseService<Model> {
public constructor() {
super(Model);
if (IsBillingEnabled) {
this.hardDeleteItemsOlderThanInDays("createdAt", 120);
this.hardDeleteItemsOlderThanInDays("createdAt", 3 * 365); // 3 years
}
}
@@ -1156,6 +1155,9 @@ ${alertSeverity.name}
alertSeverityId: alert.alertSeverity?._id?.toString(),
alertSeverityName: alert.alertSeverity?.name?.toString(),
};
alertCountMetric.attributeKeys = TelemetryUtil.getAttributeKeys(
alertCountMetric.attributes,
);
alertCountMetric.time = alertStartsAt;
alertCountMetric.timeUnixNano = OneUptimeDate.toUnixNano(
@@ -1204,6 +1206,9 @@ ${alertSeverity.name}
alertSeverityId: alert.alertSeverity?._id?.toString(),
alertSeverityName: alert.alertSeverity?.name?.toString(),
};
timeToAcknowledgeMetric.attributeKeys = TelemetryUtil.getAttributeKeys(
timeToAcknowledgeMetric.attributes,
);
timeToAcknowledgeMetric.time =
ackAlertStateTimeline?.startsAt ||
@@ -1257,6 +1262,9 @@ ${alertSeverity.name}
alertSeverityId: alert.alertSeverity?._id?.toString(),
alertSeverityName: alert.alertSeverity?.name?.toString(),
};
timeToResolveMetric.attributeKeys = TelemetryUtil.getAttributeKeys(
timeToResolveMetric.attributes,
);
timeToResolveMetric.time =
resolvedAlertStateTimeline?.startsAt ||
@@ -1303,6 +1311,9 @@ ${alertSeverity.name}
alertSeverityId: alert.alertSeverity?._id?.toString(),
alertSeverityName: alert.alertSeverity?.name?.toString(),
};
alertDurationMetric.attributeKeys = TelemetryUtil.getAttributeKeys(
alertDurationMetric.attributes,
);
alertDurationMetric.time =
lastAlertStateTimeline?.startsAt ||
@@ -1329,15 +1340,6 @@ ${alertSeverity.name}
},
});
// index attributes
TelemetryUtil.indexAttributes({
attributes: ["monitorId", "projectId", "alertId", "monitorName"],
projectId: alert.projectId,
telemetryType: TelemetryType.Metric,
}).catch((err: Error) => {
logger.error(err);
});
TelemetryUtil.indexMetricNameServiceNameMap({
metricNameServiceNameMap: metricTypesMap,
projectId: alert.projectId,

View File

@@ -29,7 +29,7 @@ export class Service extends DatabaseService<AlertStateTimeline> {
public constructor() {
super(AlertStateTimeline);
if (IsBillingEnabled) {
this.hardDeleteItemsOlderThanInDays("createdAt", 120);
this.hardDeleteItemsOlderThanInDays("createdAt", 3 * 365); // 3 years
}
}

View File

@@ -42,6 +42,7 @@ import SortOrder from "../../Types/BaseDatabase/SortOrder";
import OneUptimeDate from "../../Types/Date";
import BadDataException from "../../Types/Exception/BadDataException";
import Exception from "../../Types/Exception/Exception";
import ExceptionCode from "../../Types/Exception/ExceptionCode";
import { JSONObject } from "../../Types/JSON";
import ObjectID from "../../Types/ObjectID";
import PositiveNumber from "../../Types/PositiveNumber";
@@ -55,13 +56,18 @@ import Sort from "../Types/AnalyticsDatabase/Sort";
import AggregatedModel from "../../Types/BaseDatabase/AggregatedModel";
import ModelEventType from "../../Types/Realtime/ModelEventType";
export type Results = ResultSet<"JSON">;
export type DbJSONResponse = ResponseJSON<{
data?: Array<JSONObject>;
}>;
export default class AnalyticsDatabaseService<
TBaseModel extends AnalyticsBaseModel,
> extends BaseService {
public modelType!: { new (): TBaseModel };
public database!: ClickhouseDatabase;
public model!: TBaseModel;
public databaseClient!: ClickhouseClient;
public databaseClient!: ClickhouseClient | null;
public statementGenerator!: StatementGenerator<TBaseModel>;
public constructor(data: {
@@ -77,7 +83,7 @@ export default class AnalyticsDatabaseService<
this.database = ClickhouseAppInstance; // default database
}
this.databaseClient = this.database.getDataSource() as ClickhouseClient;
this.databaseClient = this.database.getDataSource();
this.statementGenerator = new StatementGenerator<TBaseModel>({
modelType: this.modelType,
@@ -85,6 +91,46 @@ export default class AnalyticsDatabaseService<
});
}
@CaptureSpan()
public async insertJsonRows(rows: Array<JSONObject>): Promise<void> {
if (!rows || rows.length === 0) {
return;
}
const client: ClickhouseClient = this.getDatabaseClient();
const tableName: string = this.model.tableName;
if (!tableName) {
throw new Exception(
ExceptionCode.BadDataException,
"Analytics model table name not configured",
);
}
try {
await client.insert({
table: tableName,
values: rows,
format: "JSONEachRow",
clickhouse_settings: {
async_insert: 1,
wait_for_async_insert: 0,
},
});
logger.debug(
`ClickHouse insert succeeded for table ${tableName} at ${OneUptimeDate.toString(OneUptimeDate.getCurrentDate())}`,
);
} catch (error) {
logger.error(
`ClickHouse insert failed for table ${tableName} at ${OneUptimeDate.toString(OneUptimeDate.getCurrentDate())}`,
);
logger.error(error);
throw error;
}
}
@CaptureSpan()
public async doesColumnExistInDatabase(columnName: string): Promise<boolean> {
const statement: string =
@@ -802,23 +848,21 @@ export default class AnalyticsDatabaseService<
public useDefaultDatabase(): void {
this.database = ClickhouseAppInstance;
this.databaseClient = this.database.getDataSource() as ClickhouseClient;
this.databaseClient = this.database.getDataSource();
}
@CaptureSpan()
public async execute(
statement: Statement | string
): Promise<ExecResult<Stream>> {
if (!this.databaseClient) {
this.useDefaultDatabase();
}
const client: ClickhouseClient = this.getDatabaseClient();
const query: string =
statement instanceof Statement ? statement.query : statement;
const queryParams: Record<string, unknown> | undefined =
statement instanceof Statement ? statement.query_params : undefined;
return (await this.databaseClient.exec({
return (await client.exec({
query: query,
query_params: queryParams || (undefined as any), // undefined is not specified in the type for query_params, but its ok to pass undefined.
})) as ExecResult<Stream>;
@@ -828,22 +872,43 @@ export default class AnalyticsDatabaseService<
public async executeQuery(
statement: Statement | string
): Promise<ResultSet<"JSON">> {
if (!this.databaseClient) {
this.useDefaultDatabase();
}
const client: ClickhouseClient = this.getDatabaseClient();
const query: string =
statement instanceof Statement ? statement.query : statement;
const queryParams: Record<string, unknown> | undefined =
statement instanceof Statement ? statement.query_params : undefined;
return await this.databaseClient.query({
return await client.query({
query: query,
format: "JSON",
query_params: queryParams || (undefined as any), // undefined is not specified in the type for query_params, but its ok to pass undefined.
});
}
private getDatabaseClient(): ClickhouseClient {
/*
* Refresh the ClickHouse client lazily so services created before the
* ClickHouse connection was established pick up the live client.
*/
if (!this.database) {
this.useDefaultDatabase();
}
if (!this.databaseClient && this.database) {
this.databaseClient = this.database.getDataSource();
}
if (!this.databaseClient) {
throw new Exception(
ExceptionCode.DatabaseNotConnectedException,
"ClickHouse client is not connected",
);
}
return this.databaseClient;
}
protected async onUpdateSuccess(
onUpdate: OnUpdate<TBaseModel>,
_updatedItemIds: Array<ObjectID>

View File

@@ -2,6 +2,7 @@ import { EncryptionSecret, WorkflowHostname } from "../EnvironmentConfig";
import PostgresAppInstance from "../Infrastructure/PostgresDatabase";
import ClusterKeyAuthorization from "../Middleware/ClusterKeyAuthorization";
import CountBy from "../Types/Database/CountBy";
import FindAllBy from "../Types/Database/FindAllBy";
import CreateBy from "../Types/Database/CreateBy";
import DeleteBy from "../Types/Database/DeleteBy";
import DeleteById from "../Types/Database/DeleteById";
@@ -1168,6 +1169,75 @@ class DatabaseService<TBaseModel extends BaseModel> extends BaseService {
}
}
@CaptureSpan()
public async findAllBy(
findAllBy: FindAllBy<TBaseModel>,
): Promise<Array<TBaseModel>> {
const { limit, skip, ...rest } = findAllBy;
let remaining: number | undefined = this.normalizePositiveNumber(limit);
let currentSkip: number = this.normalizePositiveNumber(skip) || 0;
const results: Array<TBaseModel> = [];
while (true) {
const currentBatchSize: number =
remaining !== undefined
? Math.min(LIMIT_MAX, Math.max(remaining, 0))
: LIMIT_MAX;
if (currentBatchSize <= 0) {
break;
}
const page: Array<TBaseModel> = await this.findBy({
...rest,
skip: currentSkip,
limit: currentBatchSize,
});
if (page.length === 0) {
break;
}
results.push(...page);
currentSkip += page.length;
if (remaining !== undefined) {
remaining -= page.length;
if (remaining <= 0) {
break;
}
}
if (page.length < currentBatchSize) {
break;
}
}
return results;
}
private normalizePositiveNumber(
value?: PositiveNumber | number,
): number | undefined {
if (value === undefined || value === null) {
return undefined;
}
if (value instanceof PositiveNumber) {
return value.toNumber();
}
if (typeof value === "number") {
return value;
}
return undefined;
}
@CaptureSpan()
public async findBy(findBy: FindBy<TBaseModel>): Promise<Array<TBaseModel>> {
return await this._findBy(findBy);

View File

@@ -7,6 +7,9 @@ import BadDataException from "../../Types/Exception/BadDataException";
import Text from "../../Types/Text";
import Model from "../../Models/DatabaseModels/Domain";
import CaptureSpan from "../Utils/Telemetry/CaptureSpan";
import { LIMIT_PER_PROJECT } from "../../Types/Database/LimitMax";
import ObjectID from "../../Types/ObjectID";
import { FindWhere } from "../../Types/BaseDatabase/Query";
export class Service extends DatabaseService<Model> {
public constructor() {
super(Model);
@@ -32,6 +35,12 @@ export class Service extends DatabaseService<Model> {
createBy.data.domain = new Domain(domain.trim().toLowerCase());
}
if (!createBy.props.isRoot && createBy.data.isVerified) {
throw new BadDataException(
"Domain cannot be verified during creation. Please verify the domain after creation. Please set isVerified to false.",
);
}
createBy.data.domainVerificationText =
"oneuptime-verification-" + Text.generateRandomText(20);
return Promise.resolve({ createBy, carryForward: null });
@@ -41,67 +50,66 @@ export class Service extends DatabaseService<Model> {
protected override async onBeforeUpdate(
updateBy: UpdateBy<Model>,
): Promise<OnUpdate<Model>> {
if (
updateBy.data.isVerified &&
updateBy.query._id &&
!updateBy.props.isRoot
) {
if (updateBy.data.isVerified && !updateBy.props.isRoot) {
const projectId: FindWhere<ObjectID> | undefined =
updateBy.query.projectId || updateBy.props.tenantId;
if (!projectId) {
throw new BadDataException(
"Project ID is required to verify the domain.",
);
}
// check the verification of the domain.
const items: Array<Model> = await this.findBy({
query: {
_id: updateBy.query._id as string,
projectId: updateBy.props.tenantId!,
projectId,
...updateBy.query,
},
select: {
domain: true,
domainVerificationText: true,
},
limit: 1,
limit: LIMIT_PER_PROJECT,
skip: 0,
props: {
isRoot: true,
},
});
if (items.length === 0) {
throw new BadDataException(
"Domain with id " + updateBy.query._id + " not found.",
for (const item of items) {
const domain: string | undefined = item?.domain?.toString();
const verificationText: string | undefined =
item?.domainVerificationText?.toString();
if (!domain) {
throw new BadDataException("Domain not found.");
}
if (!verificationText) {
throw new BadDataException(
"Domain verification text with id " +
updateBy.query._id +
" not found.",
);
}
const isVerified: boolean = await Domain.verifyTxtRecord(
domain,
verificationText,
);
}
const domain: string | undefined = items[0]?.domain?.toString();
const verificationText: string | undefined =
items[0]?.domainVerificationText?.toString();
if (!domain) {
throw new BadDataException(
"Domain with id " + updateBy.query._id + " not found.",
);
}
if (!verificationText) {
throw new BadDataException(
"Domain verification text with id " +
updateBy.query._id +
" not found.",
);
}
const isVerified: boolean = await Domain.verifyTxtRecord(
domain,
verificationText,
);
if (!isVerified) {
throw new BadDataException(
"Verification TXT record " +
verificationText +
" not found in domain " +
domain +
". Please add a TXT record to verify the domain. If you have already added the TXT record, please wait for few hours to let DNS to propagate.",
);
if (!isVerified) {
throw new BadDataException(
"Verification TXT record " +
verificationText +
" not found in domain " +
domain +
". Please add a TXT record to verify the domain. If you have already added the TXT record, please wait for few hours to let DNS to propagate.",
);
}
}
}

View File

@@ -0,0 +1,10 @@
import DatabaseService from "./DatabaseService";
import EnterpriseLicense from "../../Models/DatabaseModels/EnterpriseLicense";
export class Service extends DatabaseService<EnterpriseLicense> {
public constructor() {
super(EnterpriseLicense);
}
}
export default new Service();

View File

@@ -19,7 +19,7 @@ export class Service extends DatabaseService<IncidentFeed> {
super(IncidentFeed);
if (IsBillingEnabled) {
this.hardDeleteItemsOlderThanInDays("createdAt", 120);
this.hardDeleteItemsOlderThanInDays("createdAt", 3 * 365); // 3 years
}
}

View File

@@ -0,0 +1,10 @@
import DatabaseService from "./DatabaseService";
import Model from "../../Models/DatabaseModels/IncidentPostmortemTemplate";
export class Service extends DatabaseService<Model> {
public constructor() {
super(Model);
}
}
export default new Service();

View File

@@ -43,14 +43,13 @@ import Metric, {
} from "../../Models/AnalyticsModels/Metric";
import OneUptimeDate from "../../Types/Date";
import TelemetryUtil from "../Utils/Telemetry/Telemetry";
import TelemetryType from "../../Types/Telemetry/TelemetryType";
import logger from "../Utils/Logger";
import Semaphore, {
SemaphoreMutex,
} from "../../Server/Infrastructure/Semaphore";
import IncidentFeedService from "./IncidentFeedService";
import { IncidentFeedEventType } from "../../Models/DatabaseModels/IncidentFeed";
import { Gray500, Red500 } from "../../Types/BrandColors";
import { Blue500, Gray500, Red500 } from "../../Types/BrandColors";
import Label from "../../Models/DatabaseModels/Label";
import LabelService from "./LabelService";
import IncidentSeverity from "../../Models/DatabaseModels/IncidentSeverity";
@@ -75,11 +74,22 @@ type UpdateCarryForward = Dictionary<{
newMonitorChangeStatusIdTo: ObjectID | undefined;
}>;
type IncidentUpdatePayload = {
postmortemNote?: string | null;
title?: string | null;
rootCause?: string | null;
description?: string | null;
remediationNotes?: string | null;
labels?: unknown;
incidentSeverity?: unknown;
[key: string]: unknown;
};
export class Service extends DatabaseService<Model> {
public constructor() {
super(Model);
if (IsBillingEnabled) {
this.hardDeleteItemsOlderThanInDays("createdAt", 120);
this.hardDeleteItemsOlderThanInDays("createdAt", 3 * 365); // 3 years
}
}
@@ -1298,59 +1308,103 @@ ${incident.remediationNotes || "No remediation notes provided."}
const projectId: ObjectID = incident!.projectId!;
const incidentNumber: number = incident!.incidentNumber!;
const incidentLabel: string = `Incident ${incidentNumber}`;
const incidentLink: URL = await this.getIncidentLinkInDashboard(
projectId,
incidentId,
);
let shouldAddIncidentFeed: boolean = false;
let feedInfoInMarkdown: string = `**[Incident ${incidentNumber}](${(await this.getIncidentLinkInDashboard(projectId!, incidentId!)).toString()}) was updated.**`;
const updatedIncidentData: IncidentUpdatePayload = (onUpdate.updateBy
.data ?? {}) as IncidentUpdatePayload;
const createdByUserId: ObjectID | undefined | null =
onUpdate.updateBy.props.userId;
if (onUpdate.updateBy.data.title) {
// add incident feed.
if (
Object.prototype.hasOwnProperty.call(
updatedIncidentData,
"postmortemNote",
)
) {
const noteValue: string =
(updatedIncidentData.postmortemNote as string) || "";
const hasNoteContent: boolean = noteValue.trim().length > 0;
feedInfoInMarkdown += `\n\n**Title**:
${onUpdate.updateBy.data.title || "No title provided."}
`;
shouldAddIncidentFeed = true;
const postmortemFeedMarkdown: string = hasNoteContent
? `**📘 Postmortem Note updated for [${incidentLabel}](${incidentLink.toString()})**\n\n${noteValue}`
: `**📘 Postmortem Note cleared for [${incidentLabel}](${incidentLink.toString()})**\n\n_No postmortem note provided._`;
await IncidentFeedService.createIncidentFeedItem({
incidentId,
projectId,
incidentFeedEventType: IncidentFeedEventType.PostmortemNote,
displayColor: Blue500,
feedInfoInMarkdown: postmortemFeedMarkdown,
userId: createdByUserId || undefined,
workspaceNotification: {
sendWorkspaceNotification: true,
},
});
}
if (onUpdate.updateBy.data.rootCause) {
if (onUpdate.updateBy.data.title) {
// add incident feed.
let shouldAddIncidentFeed: boolean = false;
let feedInfoInMarkdown: string = `**[${incidentLabel}](${incidentLink.toString()}) was updated.**`;
feedInfoInMarkdown += `\n\n**📄 Root Cause**:
${onUpdate.updateBy.data.rootCause || "No root cause provided."}
`;
shouldAddIncidentFeed = true;
}
}
if (onUpdate.updateBy.data.description) {
// add incident feed.
feedInfoInMarkdown += `\n\n**Incident Description**:
${onUpdate.updateBy.data.description || "No description provided."}
`;
shouldAddIncidentFeed = true;
}
if (onUpdate.updateBy.data.remediationNotes) {
// add incident feed.
feedInfoInMarkdown += `\n\n**🎯 Remediation Notes**:
${onUpdate.updateBy.data.remediationNotes || "No remediation notes provided."}
`;
if (
Object.prototype.hasOwnProperty.call(updatedIncidentData, "title")
) {
const title: string =
(updatedIncidentData.title as string) || "No title provided.";
feedInfoInMarkdown += `\n\n**Title**: \n${title}\n`;
shouldAddIncidentFeed = true;
}
if (
onUpdate.updateBy.data.labels &&
onUpdate.updateBy.data.labels.length > 0 &&
Array.isArray(onUpdate.updateBy.data.labels)
Object.prototype.hasOwnProperty.call(updatedIncidentData, "rootCause")
) {
const labelIds: Array<ObjectID> = (
onUpdate.updateBy.data.labels as any
const rootCause: string =
(updatedIncidentData.rootCause as string) || "";
const rootCauseText: string = rootCause.trim().length
? rootCause
: "Root cause removed.";
feedInfoInMarkdown += `\n\n**📄 Root Cause**: \n${rootCauseText}\n`;
shouldAddIncidentFeed = true;
}
if (
Object.prototype.hasOwnProperty.call(
updatedIncidentData,
"description",
)
) {
const description: string =
(updatedIncidentData.description as string) ||
"No description provided.";
feedInfoInMarkdown += `\n\n**Incident Description**: \n${description}\n`;
shouldAddIncidentFeed = true;
}
if (
Object.prototype.hasOwnProperty.call(
updatedIncidentData,
"remediationNotes",
)
) {
const remediationNotes: string =
(updatedIncidentData.remediationNotes as string) || "";
const remediationText: string = remediationNotes.trim().length
? remediationNotes
: "Remediation notes removed.";
feedInfoInMarkdown += `\n\n**🎯 Remediation Notes**: \n${remediationText}\n`;
shouldAddIncidentFeed = true;
}
if (
updatedIncidentData.labels &&
(updatedIncidentData.labels as Array<Label>).length > 0 &&
Array.isArray(updatedIncidentData.labels)
) {
const labelIds: Array<ObjectID> = (updatedIncidentData.labels as any)
.map((label: Label) => {
if (label._id) {
return new ObjectID(label._id?.toString());
@@ -1391,16 +1445,14 @@ ${labels
}
if (
onUpdate.updateBy.data.incidentSeverity &&
(onUpdate.updateBy.data.incidentSeverity as any)._id
updatedIncidentData.incidentSeverity &&
(updatedIncidentData.incidentSeverity as any)._id
) {
const incidentSeverity: IncidentSeverity | null =
await IncidentSeverityService.findOneBy({
query: {
_id: new ObjectID(
(
onUpdate.updateBy.data.incidentSeverity as any
)?._id.toString(),
(updatedIncidentData.incidentSeverity as any)?._id.toString(),
),
},
select: {
@@ -1957,6 +2009,9 @@ ${incidentSeverity.name}
incidentSeverityId: incident.incidentSeverity?._id?.toString(),
incidentSeverityName: incident.incidentSeverity?.name?.toString(),
};
incidentCountMetric.attributeKeys = TelemetryUtil.getAttributeKeys(
incidentCountMetric.attributes,
);
incidentCountMetric.time = incidentStartsAt;
incidentCountMetric.timeUnixNano = OneUptimeDate.toUnixNano(
@@ -2014,6 +2069,9 @@ ${incidentSeverity.name}
incidentSeverityId: incident.incidentSeverity?._id?.toString(),
incidentSeverityName: incident.incidentSeverity?.name?.toString(),
};
timeToAcknowledgeMetric.attributeKeys = TelemetryUtil.getAttributeKeys(
timeToAcknowledgeMetric.attributes,
);
timeToAcknowledgeMetric.time =
ackIncidentStateTimeline?.startsAt ||
@@ -2076,6 +2134,9 @@ ${incidentSeverity.name}
incidentSeverityId: incident.incidentSeverity?._id?.toString(),
incidentSeverityName: incident.incidentSeverity?.name?.toString(),
};
timeToResolveMetric.attributeKeys = TelemetryUtil.getAttributeKeys(
timeToResolveMetric.attributes,
);
timeToResolveMetric.time =
resolvedIncidentStateTimeline?.startsAt ||
@@ -2133,6 +2194,9 @@ ${incidentSeverity.name}
incidentSeverityId: incident.incidentSeverity?._id?.toString(),
incidentSeverityName: incident.incidentSeverity?.name?.toString(),
};
incidentDurationMetric.attributeKeys = TelemetryUtil.getAttributeKeys(
incidentDurationMetric.attributes,
);
incidentDurationMetric.time =
lastIncidentStateTimeline?.startsAt ||
@@ -2162,15 +2226,6 @@ ${incidentSeverity.name}
},
});
// index attributes.
TelemetryUtil.indexAttributes({
attributes: ["monitorIds", "projectId", "incidentId", "monitorNames"],
projectId: incident.projectId,
telemetryType: TelemetryType.Metric,
}).catch((err: Error) => {
logger.error(err);
});
TelemetryUtil.indexMetricNameServiceNameMap({
metricNameServiceNameMap: metricTypesMap,
projectId: incident.projectId,

View File

@@ -31,7 +31,7 @@ export class Service extends DatabaseService<IncidentStateTimeline> {
public constructor() {
super(IncidentStateTimeline);
if (IsBillingEnabled) {
this.hardDeleteItemsOlderThanInDays("startsAt", 120);
this.hardDeleteItemsOlderThanInDays("startsAt", 3 * 365); // 3 years
}
}

View File

@@ -71,6 +71,7 @@ import ProjectService from "./ProjectService";
import ProjectSmtpConfigService from "./ProjectSmtpConfigService";
import ProjectSsoService from "./ProjectSsoService";
import PromoCodeService from "./PromoCodeService";
import EnterpriseLicenseService from "./EnterpriseLicenseService";
import ResellerPlanService from "./ResellerPlanService";
import ResellerService from "./ResellerService";
import ScheduledMaintenanceCustomFieldService from "./ScheduledMaintenanceCustomFieldService";
@@ -135,7 +136,6 @@ import WorkflowVariablesService from "./WorkflowVariableService";
import AnalyticsBaseModel from "../../Models/AnalyticsModels/AnalyticsBaseModel/AnalyticsBaseModel";
import CopilotPullRequestService from "./CopilotPullRequestService";
import ServiceCatalogDependencyService from "./ServiceCatalogDependencyService";
import TelemetryAttributeService from "./TelemetryAttributeService";
import TelemetryExceptionService from "./TelemetryExceptionService";
import ExceptionInstanceService from "./ExceptionInstanceService";
import CopilotActionTypePriorityService from "./CopilotActionTypePriorityService";
@@ -174,6 +174,7 @@ const services: Array<BaseService> = [
OnCallDutyPolicyTimeLogService,
AcmeCertificateService,
PromoCodeService,
EnterpriseLicenseService,
ResellerService,
ResellerPlanService,
@@ -356,7 +357,6 @@ export const AnalyticsServices: Array<
LogService,
SpanService,
MetricService,
TelemetryAttributeService,
ExceptionInstanceService,
MonitorLogService,
];

View File

@@ -19,7 +19,7 @@ export class Service extends DatabaseService<MonitorFeed> {
super(MonitorFeed);
if (IsBillingEnabled) {
this.hardDeleteItemsOlderThanInDays("createdAt", 120);
this.hardDeleteItemsOlderThanInDays("createdAt", 3 * 365); // 3 years
}
}

View File

@@ -4,11 +4,6 @@ import ObjectID from "../../Types/ObjectID";
import Metric, {
AggregationTemporality,
} from "../../Models/AnalyticsModels/Metric";
import Dictionary from "../../Types/Dictionary";
import ProductType from "../../Types/MeteredPlan/ProductType";
import { IsBillingEnabled } from "../../Server/EnvironmentConfig";
import TelemetryUsageBillingService from "../../Server/Services/TelemetryUsageBillingService";
import logger from "../../Server/Utils/Logger";
import TelemetryService from "../../Models/DatabaseModels/TelemetryService";
import TelemetryServiceService from "../../Server/Services/TelemetryServiceService";
import { DEFAULT_RETENTION_IN_DAYS } from "../../Models/DatabaseModels/TelemetryUsageBilling";
@@ -20,10 +15,9 @@ export enum OtelAggregationTemporality {
Delta = "AGGREGATION_TEMPORALITY_DELTA",
}
export interface TelemetryServiceDataIngested {
export interface TelemetryServiceMetadata {
serviceName: string;
serviceId: ObjectID;
dataIngestedInGB: number;
dataRententionInDays: number;
}
@@ -80,38 +74,6 @@ export default class OTelIngestService {
service.retainTelemetryDataForDays || DEFAULT_RETENTION_IN_DAYS,
};
}
@CaptureSpan()
public static async recordDataIngestedUsgaeBilling(data: {
services: Dictionary<TelemetryServiceDataIngested>;
projectId: ObjectID;
productType: ProductType;
}): Promise<void> {
if (!IsBillingEnabled) {
return;
}
for (const serviceName in data.services) {
const serviceData: TelemetryServiceDataIngested | undefined =
data.services[serviceName];
if (!serviceData) {
continue;
}
TelemetryUsageBillingService.updateUsageBilling({
projectId: data.projectId,
productType: data.productType,
dataIngestedInGB: serviceData.dataIngestedInGB || 0,
telemetryServiceId: serviceData.serviceId,
retentionInDays: serviceData.dataRententionInDays,
}).catch((err: Error) => {
logger.error("Failed to update usage billing for OTel");
logger.error(err);
});
}
}
@CaptureSpan()
public static getMetricFromDatapoint(data: {
dbMetric: Metric;
@@ -212,6 +174,10 @@ export default class OTelIngestService {
};
}
newDbMetric.attributeKeys = TelemetryUtil.getAttributeKeys(
newDbMetric.attributes,
);
// aggregationTemporality
if (aggregationTemporality) {

View File

@@ -71,6 +71,8 @@ import URL from "../../Types/API/URL";
import Exception from "../../Types/Exception/Exception";
import CaptureSpan from "../Utils/Telemetry/CaptureSpan";
import DatabaseConfig from "../DatabaseConfig";
import DatabaseCommonInteractionProps from "../../Types/BaseDatabase/DatabaseCommonInteractionProps";
import PositiveNumber from "../../Types/PositiveNumber";
export interface CurrentPlan {
plan: PlanType | null;
@@ -321,145 +323,134 @@ export class ProjectService extends DatabaseService<Model> {
);
}
if (updateBy.data.paymentProviderPlanId) {
// payment provider id changed.
const project: Model | null = await this.findOneById({
id: new ObjectID(updateBy.query._id! as string),
select: {
paymentProviderSubscriptionId: true,
paymentProviderMeteredSubscriptionId: true,
paymentProviderSubscriptionSeats: true,
paymentProviderPlanId: true,
trialEndsAt: true,
},
props: {
isRoot: true,
},
});
if (!project) {
throw new BadDataException("Project not found");
}
if (
project.paymentProviderPlanId !== updateBy.data.paymentProviderPlanId
) {
logger.debug("Changing plan for project " + project.id);
const plan: SubscriptionPlan | undefined =
SubscriptionPlan.getSubscriptionPlanById(
updateBy.data.paymentProviderPlanId! as string,
getAllEnvVars(),
);
if (!plan) {
throw new BadDataException("Invalid plan");
}
logger.debug(
"Changing plan for project " +
project.id?.toString() +
" to " +
plan.getName(),
);
if (!project.paymentProviderSubscriptionSeats) {
project.paymentProviderSubscriptionSeats =
await TeamMemberService.getUniqueTeamMemberCountInProject(
project.id!,
);
}
logger.debug(
"Changing plan for project " +
project.id?.toString() +
" to " +
plan.getName() +
" with seats " +
project.paymentProviderSubscriptionSeats,
);
const subscription: {
subscriptionId: string;
meteredSubscriptionId: string;
trialEndsAt?: Date | undefined;
} = await BillingService.changePlan({
projectId: project.id!,
subscriptionId: project.paymentProviderSubscriptionId as string,
meteredSubscriptionId:
project.paymentProviderMeteredSubscriptionId as string,
serverMeteredPlans: AllMeteredPlans,
newPlan: plan,
quantity: project.paymentProviderSubscriptionSeats as number,
isYearly:
plan.getYearlyPlanId() === updateBy.data.paymentProviderPlanId,
endTrialAt: project.trialEndsAt,
});
logger.debug(
"Changing plan for project " +
project.id?.toString() +
" to " +
plan.getName() +
" with seats " +
project.paymentProviderSubscriptionSeats +
" completed.",
);
// refresh subscription status.
const subscriptionState: SubscriptionStatus =
await BillingService.getSubscriptionStatus(
subscription.subscriptionId as string,
);
const meteredSubscriptionState: SubscriptionStatus =
await BillingService.getSubscriptionStatus(
subscription.meteredSubscriptionId as string,
);
await this.updateOneById({
id: new ObjectID(updateBy.query._id! as string),
data: {
paymentProviderSubscriptionId: subscription.subscriptionId,
paymentProviderMeteredSubscriptionId:
subscription.meteredSubscriptionId,
trialEndsAt: subscription.trialEndsAt || new Date(),
planName: SubscriptionPlan.getPlanType(
updateBy.data.paymentProviderPlanId! as string,
),
paymentProviderMeteredSubscriptionStatus:
meteredSubscriptionState,
paymentProviderSubscriptionStatus: subscriptionState,
},
props: {
isRoot: true,
ignoreHooks: true,
},
});
logger.debug(
"Changing plan for project " +
project.id?.toString() +
" to " +
plan.getName() +
" with seats " +
project.paymentProviderSubscriptionSeats +
" completed and project updated.",
);
if (project.id) {
// send slack message on plan change.
await this.sendSubscriptionChangeWebhookSlackNotification(
project.id,
);
}
}
if (
updateBy.data.paymentProviderPlanId &&
!updateBy.props.ignoreHooks &&
!updateBy.props.isRoot
) {
throw new BadDataException(
"Project plan cannot be updated directly. Please use the change plan API.",
);
}
}
return { updateBy, carryForward: [] };
}
@CaptureSpan()
public async changePlan(params: {
projectId: ObjectID;
paymentProviderPlanId: string;
endTrialAt?: Date | null;
}): Promise<void> {
if (!IsBillingEnabled) {
throw new BadDataException("Billing is not enabled for this server");
}
const project: Model | null = await this.findOneById({
id: params.projectId,
select: {
_id: true,
paymentProviderSubscriptionId: true,
paymentProviderMeteredSubscriptionId: true,
paymentProviderSubscriptionSeats: true,
paymentProviderPlanId: true,
trialEndsAt: true,
},
props: {
isRoot: true,
},
});
if (!project) {
throw new BadDataException("Project not found");
}
if (!project.paymentProviderSubscriptionId) {
throw new BadDataException("Payment Provider subscription not found");
}
if (!project.paymentProviderMeteredSubscriptionId) {
throw new BadDataException(
"Payment Provider metered subscription not found",
);
}
const plan: SubscriptionPlan | undefined =
SubscriptionPlan.getSubscriptionPlanById(
params.paymentProviderPlanId,
getAllEnvVars(),
);
if (!plan) {
throw new BadDataException("Invalid plan");
}
let seats: number | undefined = project.paymentProviderSubscriptionSeats;
if (!seats || seats <= 0) {
seats = await TeamMemberService.getUniqueTeamMemberCountInProject(
project.id!,
);
}
logger.debug(
`Changing plan for project ${project.id?.toString()} to ${plan.getName()} with seats ${seats}`,
);
const endTrialAt: Date | undefined =
params.endTrialAt !== undefined
? params.endTrialAt || undefined
: project.trialEndsAt || undefined;
const subscription: {
subscriptionId: string;
meteredSubscriptionId: string;
trialEndsAt?: Date | undefined;
} = await BillingService.changePlan({
projectId: project.id!,
subscriptionId: project.paymentProviderSubscriptionId,
meteredSubscriptionId: project.paymentProviderMeteredSubscriptionId,
serverMeteredPlans: AllMeteredPlans,
newPlan: plan,
quantity: seats,
isYearly: plan.getYearlyPlanId() === params.paymentProviderPlanId,
endTrialAt: endTrialAt,
});
const subscriptionState: SubscriptionStatus =
await BillingService.getSubscriptionStatus(subscription.subscriptionId);
const meteredSubscriptionState: SubscriptionStatus =
await BillingService.getSubscriptionStatus(
subscription.meteredSubscriptionId,
);
await this.updateOneById({
id: project.id!,
data: {
paymentProviderPlanId: params.paymentProviderPlanId,
paymentProviderSubscriptionId: subscription.subscriptionId,
paymentProviderMeteredSubscriptionId:
subscription.meteredSubscriptionId,
paymentProviderSubscriptionSeats: seats,
trialEndsAt: subscription.trialEndsAt || endTrialAt || new Date(),
planName: SubscriptionPlan.getPlanType(
params.paymentProviderPlanId,
getAllEnvVars(),
),
paymentProviderMeteredSubscriptionStatus: meteredSubscriptionState,
paymentProviderSubscriptionStatus: subscriptionState,
},
props: {
isRoot: true,
ignoreHooks: true,
},
});
await this.sendSubscriptionChangeWebhookSlackNotification(project.id!);
}
private async sendSubscriptionChangeWebhookSlackNotification(
projectId: ObjectID,
): Promise<void> {
@@ -1446,6 +1437,28 @@ export class ProjectService extends DatabaseService<Model> {
};
}
@CaptureSpan()
public async getAllActiveProjects(params?: {
select?: Select<Model>;
props?: DatabaseCommonInteractionProps;
skip?: PositiveNumber | number;
limit?: PositiveNumber | number;
}): Promise<Array<Model>> {
const select: Select<Model> | undefined =
params?.select || ({ _id: true } as Select<Model>);
const props: DatabaseCommonInteractionProps = params?.props || {
isRoot: true,
};
return await this.findAllBy({
query: this.getActiveProjectStatusQuery(),
select,
props,
skip: params?.skip,
limit: params?.limit,
});
}
@CaptureSpan()
public async getProjectLinkInDashboard(projectId: ObjectID): Promise<URL> {
const dashboardUrl: URL = await DatabaseConfig.getDashboardUrl();

View File

@@ -19,7 +19,7 @@ export class Service extends DatabaseService<Model> {
super(Model);
if (IsBillingEnabled) {
this.hardDeleteItemsOlderThanInDays("createdAt", 120);
this.hardDeleteItemsOlderThanInDays("createdAt", 3 * 365); // 3 years
}
}

View File

@@ -64,7 +64,7 @@ export class Service extends DatabaseService<Model> {
public constructor() {
super(Model);
if (IsBillingEnabled) {
this.hardDeleteItemsOlderThanInDays("createdAt", 120);
this.hardDeleteItemsOlderThanInDays("createdAt", 3 * 365); // 3 years
}
}

View File

@@ -36,7 +36,7 @@ export class Service extends DatabaseService<ScheduledMaintenanceStateTimeline>
public constructor() {
super(ScheduledMaintenanceStateTimeline);
if (IsBillingEnabled) {
this.hardDeleteItemsOlderThanInDays("createdAt", 120);
this.hardDeleteItemsOlderThanInDays("createdAt", 3 * 365); // 3 years
}
}

View File

@@ -3,8 +3,10 @@ import CreateBy from "../Types/Database/CreateBy";
import { OnCreate, OnUpdate } from "../Types/Database/Hooks";
import UpdateBy from "../Types/Database/UpdateBy";
import CookieUtil from "../Utils/Cookie";
import { ExpressRequest } from "../Utils/Express";
import JSONWebToken from "../Utils/JsonWebToken";
import { ExpressRequest, ExpressResponse } from "../Utils/Express";
import JSONWebToken, {
RefreshTokenData,
} from "../Utils/JsonWebToken";
import logger from "../Utils/Logger";
import CaptureSpan from "../Utils/Telemetry/CaptureSpan";
import DatabaseService from "./DatabaseService";
@@ -24,6 +26,7 @@ import BadDataException from "../../Types/Exception/BadDataException";
import JSONWebTokenData from "../../Types/JsonWebTokenData";
import ObjectID from "../../Types/ObjectID";
import PositiveNumber from "../../Types/PositiveNumber";
import HashedString from "../../Types/HashedString";
import Typeof from "../../Types/Typeof";
import MonitorStatus from "../../Models/DatabaseModels/MonitorStatus";
import StatusPage from "../../Models/DatabaseModels/StatusPage";
@@ -61,6 +64,9 @@ import IP from "../../Types/IP/IP";
import NotAuthenticatedException from "../../Types/Exception/NotAuthenticatedException";
import ForbiddenException from "../../Types/Exception/ForbiddenException";
import CommonAPI from "../API/CommonAPI";
import StatusPagePrivateUserService from "./StatusPagePrivateUserService";
import StatusPagePrivateUser from "../../Models/DatabaseModels/StatusPagePrivateUser";
import { EncryptionSecret } from "../EnvironmentConfig";
export interface StatusPageReportItem {
resourceName: string;
@@ -369,12 +375,14 @@ export class Service extends DatabaseService<StatusPage> {
public async hasReadAccess(data: {
statusPageId: ObjectID;
req: ExpressRequest;
res: ExpressResponse;
}): Promise<{
hasReadAccess: boolean;
error?: NotAuthenticatedException | ForbiddenException;
}> {
const statusPageId: ObjectID = data.statusPageId;
const req: ExpressRequest = data.req;
const res: ExpressResponse = data.res;
const props: DatabaseCommonInteractionProps =
await CommonAPI.getDatabaseCommonInteractionProps(req);
@@ -446,20 +454,37 @@ export class Service extends DatabaseService<StatusPage> {
CookieUtil.getUserTokenKey(statusPageId),
);
let decoded: JSONWebTokenData | null = null;
if (token) {
try {
const decoded: JSONWebTokenData = JSONWebToken.decode(
token as string,
);
if (decoded.statusPageId?.toString() === statusPageId.toString()) {
return {
hasReadAccess: true,
};
}
decoded = JSONWebToken.decode(token as string);
} catch (err) {
logger.error(err);
const error: Error = err as Error;
logger.warn(
`Invalid status page access token, attempting refresh: ${
error.message || "unknown error"
}`,
);
logger.debug(error);
decoded = await this.tryRefreshStatusPageSession({
statusPageId,
req,
res,
});
}
} else {
decoded = await this.tryRefreshStatusPageSession({
statusPageId,
req,
res,
});
}
if (decoded && decoded.statusPageId?.toString() === statusPageId.toString()) {
return {
hasReadAccess: true,
};
}
// if it does not have public access, check if this user has access.
@@ -493,6 +518,121 @@ export class Service extends DatabaseService<StatusPage> {
};
}
@CaptureSpan()
private async tryRefreshStatusPageSession(data: {
statusPageId: ObjectID;
req: ExpressRequest;
res: ExpressResponse;
}): Promise<JSONWebTokenData | null> {
const { statusPageId, req, res } = data;
const refreshTokenKey: string = CookieUtil.getRefreshTokenKey(statusPageId);
const accessTokenKey: string = CookieUtil.getUserTokenKey(statusPageId);
const refreshToken: string | undefined = CookieUtil.getCookieFromExpressRequest(
req,
refreshTokenKey,
);
if (!refreshToken) {
return null;
}
let refreshTokenData: RefreshTokenData;
try {
refreshTokenData = JSONWebToken.decodeRefreshToken(refreshToken);
} catch (err) {
const error: Error = err as Error;
logger.warn(
`Failed to decode status page refresh token during middleware refresh: ${
error.message || "unknown error"
}`,
);
logger.debug(error);
CookieUtil.removeCookie(res, refreshTokenKey);
CookieUtil.removeCookie(res, accessTokenKey);
return null;
}
if (
!refreshTokenData.statusPageId ||
refreshTokenData.statusPageId.toString() !== statusPageId.toString()
) {
CookieUtil.removeCookie(res, refreshTokenKey);
CookieUtil.removeCookie(res, accessTokenKey);
return null;
}
const hashedSessionId: string = await HashedString.hashValue(
refreshTokenData.sessionId,
EncryptionSecret,
);
const user: StatusPagePrivateUser | null =
await StatusPagePrivateUserService.findOneBy({
query: {
_id: refreshTokenData.userId,
statusPageId: statusPageId,
jwtRefreshToken: hashedSessionId,
},
select: {
_id: true,
email: true,
statusPageId: true,
},
props: {
isRoot: true,
},
});
if (!user) {
CookieUtil.removeCookie(res, refreshTokenKey);
CookieUtil.removeCookie(res, accessTokenKey);
return null;
}
const session = CookieUtil.setStatusPageUserCookie({
expressResponse: res,
user: user,
statusPageId: statusPageId,
});
if (!req.cookies) {
req.cookies = {} as Dictionary<string>;
}
req.cookies[accessTokenKey] = session.accessToken;
req.cookies[refreshTokenKey] = session.refreshToken;
const hashedNewSessionId: string = await HashedString.hashValue(
session.sessionId,
EncryptionSecret,
);
await StatusPagePrivateUserService.updateOneBy({
query: {
_id: user.id!,
statusPageId: statusPageId,
},
data: {
jwtRefreshToken: hashedNewSessionId,
lastActive: OneUptimeDate.getCurrentDate(),
},
props: {
isRoot: true,
},
});
logger.info(
`Status page session refreshed automatically for ${
user.email?.toString() || user.id?.toString() || "unknown"
} on status page ${statusPageId.toString()}`,
);
return JSONWebToken.decode(session.accessToken);
}
@CaptureSpan()
public async getMonitorStatusTimelineForStatusPage(data: {
monitorIds: Array<ObjectID>;
@@ -1150,5 +1290,122 @@ export class Service extends DatabaseService<StatusPage> {
},
);
}
@CaptureSpan()
public async getMonitorGroupCurrentStatuses(data: {
statusPageResources: Array<StatusPageResource>;
monitorStatuses: Array<MonitorStatus>;
}): Promise<Dictionary<ObjectID>> {
const monitorGroupCurrentStatuses: Dictionary<ObjectID> = {};
for (const resource of data.statusPageResources) {
if (resource.monitorGroupId) {
const monitorGroupResources: Array<MonitorGroupResource> =
await MonitorGroupResourceService.findBy({
query: {
monitorGroupId: resource.monitorGroupId,
},
select: {
monitorId: true,
monitor: {
currentMonitorStatusId: true,
},
},
skip: 0,
limit: LIMIT_PER_PROJECT,
props: {
isRoot: true,
},
});
const statuses: Array<ObjectID> = monitorGroupResources
.filter((item: MonitorGroupResource) => {
return (
item.monitor &&
item.monitor.currentMonitorStatusId &&
item.monitorId
);
})
.map((item: MonitorGroupResource) => {
return item.monitor!.currentMonitorStatusId!;
});
let worstStatus: MonitorStatus | null = null;
for (const statusId of statuses) {
const status: MonitorStatus | undefined = data.monitorStatuses.find(
(status: MonitorStatus) => {
return status._id?.toString() === statusId.toString();
},
);
if (
status &&
(!worstStatus || status.priority! < worstStatus.priority!)
) {
worstStatus = status;
}
}
if (worstStatus && worstStatus._id) {
monitorGroupCurrentStatuses[resource.monitorGroupId.toString()] =
new ObjectID(worstStatus._id);
}
}
}
return monitorGroupCurrentStatuses;
}
@CaptureSpan()
public getOverallMonitorStatus(data: {
statusPageResources: Array<StatusPageResource>;
monitorStatuses: Array<MonitorStatus>;
monitorGroupCurrentStatuses: Dictionary<ObjectID>;
}): MonitorStatus | null {
let currentStatus: MonitorStatus | null =
data.monitorStatuses.length > 0 && data.monitorStatuses[0]
? data.monitorStatuses[0]
: null;
const dict: Dictionary<number> = {};
for (const resource of data.statusPageResources) {
if (resource.monitor?.currentMonitorStatusId) {
if (
!Object.keys(dict).includes(
resource.monitor?.currentMonitorStatusId.toString() || "",
)
) {
dict[resource.monitor?.currentMonitorStatusId?.toString()] = 1;
} else {
dict[resource.monitor!.currentMonitorStatusId!.toString()]!++;
}
}
}
// check status of monitor groups.
for (const groupId in data.monitorGroupCurrentStatuses) {
const statusId: ObjectID | undefined =
data.monitorGroupCurrentStatuses[groupId];
if (statusId) {
if (!Object.keys(dict).includes(statusId.toString() || "")) {
dict[statusId.toString()] = 1;
} else {
dict[statusId.toString()]!++;
}
}
}
for (const monitorStatus of data.monitorStatuses) {
if (monitorStatus._id && dict[monitorStatus._id]) {
currentStatus = monitorStatus;
}
}
return currentStatus;
}
}
export default new Service();

View File

@@ -21,6 +21,7 @@ import { AccountsRoute } from "../../ServiceRoute";
import Hostname from "../../Types/API/Hostname";
import Protocol from "../../Types/API/Protocol";
import URL from "../../Types/API/URL";
import Route from "../../Types/API/Route";
import SubscriptionPlan, {
PlanType,
} from "../../Types/Billing/SubscriptionPlan";
@@ -151,10 +152,18 @@ export class TeamMemberService extends DatabaseService<TeamMember> {
templateType: EmailTemplateType.InviteMember,
vars: {
signInLink: URL.fromString(
new URL(httpProtocol, host, AccountsRoute).toString(),
new URL(
httpProtocol,
host,
new Route(AccountsRoute.toString()),
).toString(),
).toString(),
registerLink: URL.fromString(
new URL(httpProtocol, host, AccountsRoute).toString(),
new URL(
httpProtocol,
host,
new Route(AccountsRoute.toString()),
).toString(),
)
.addRoute("/register")
.addQueryParam("email", email.toString(), true)

View File

@@ -1,13 +1,70 @@
import { SQL, Statement } from "../Utils/AnalyticsDatabase/Statement";
import TelemetryType from "../../Types/Telemetry/TelemetryType";
import ClickhouseDatabase from "../Infrastructure/ClickhouseDatabase";
import AnalyticsDatabaseService from "./AnalyticsDatabaseService";
import TelemetryAttribute from "../../Models/AnalyticsModels/TelemetryAttribute";
import LogDatabaseService from "./LogService";
import MetricDatabaseService from "./MetricService";
import SpanDatabaseService from "./SpanService";
import TableColumnType from "../../Types/AnalyticsDatabase/TableColumnType";
import { JSONObject } from "../../Types/JSON";
import ObjectID from "../../Types/ObjectID";
import OneUptimeDate from "../../Types/Date";
import GlobalCache from "../Infrastructure/GlobalCache";
import CaptureSpan from "../Utils/Telemetry/CaptureSpan";
import AnalyticsDatabaseService, {
DbJSONResponse,
Results,
} from "./AnalyticsDatabaseService";
export class TelemetryAttributeService extends AnalyticsDatabaseService<TelemetryAttribute> {
public constructor(clickhouseDatabase?: ClickhouseDatabase | undefined) {
super({ modelType: TelemetryAttribute, database: clickhouseDatabase });
type TelemetrySource = {
service: AnalyticsDatabaseService<any>;
tableName: string;
attributesColumn: string;
attributeKeysColumn: string;
timeColumn: string;
};
type TelemetryAttributesCacheEntry = {
attributes: Array<string>;
refreshedAt: Date;
};
export class TelemetryAttributeService {
private static readonly ATTRIBUTES_LIMIT: number = 5000;
private static readonly ROW_SCAN_LIMIT: number = 10000;
private static readonly CACHE_NAMESPACE: string = "telemetry-attributes";
private static readonly CACHE_STALE_AFTER_MINUTES: number = 5;
private static readonly LOOKBACK_WINDOW_IN_DAYS: number = 30;
private getTelemetrySource(
telemetryType: TelemetryType,
): TelemetrySource | null {
switch (telemetryType) {
case TelemetryType.Log:
return {
service: LogDatabaseService,
tableName: LogDatabaseService.model.tableName,
attributesColumn: "attributes",
attributeKeysColumn: "attributeKeys",
timeColumn: "time",
};
case TelemetryType.Metric:
return {
service: MetricDatabaseService,
tableName: MetricDatabaseService.model.tableName,
attributesColumn: "attributes",
attributeKeysColumn: "attributeKeys",
timeColumn: "time",
};
case TelemetryType.Trace:
return {
service: SpanDatabaseService,
tableName: SpanDatabaseService.model.tableName,
attributesColumn: "attributes",
attributeKeysColumn: "attributeKeys",
timeColumn: "startTime",
};
default:
return null;
}
}
@CaptureSpan()
@@ -15,57 +72,230 @@ export class TelemetryAttributeService extends AnalyticsDatabaseService<Telemetr
projectId: ObjectID;
telemetryType: TelemetryType;
}): Promise<string[]> {
const telemetryAttribute: TelemetryAttribute | null = await this.findOneBy({
query: {
projectId: data.projectId,
telemetryType: data.telemetryType,
},
select: {
attributes: true,
},
props: {
isRoot: true,
},
});
const source: TelemetrySource | null = this.getTelemetrySource(
data.telemetryType,
);
return telemetryAttribute &&
telemetryAttribute.attributes &&
telemetryAttribute
? telemetryAttribute.attributes
: [];
if (!source) {
return [];
}
const cacheKey: string = TelemetryAttributeService.getCacheKey(
data.projectId,
data.telemetryType,
);
const cachedEntry: TelemetryAttributesCacheEntry | null =
await TelemetryAttributeService.getCachedAttributes(cacheKey);
if (cachedEntry && TelemetryAttributeService.isCacheFresh(cachedEntry)) {
return cachedEntry.attributes;
}
let attributes: Array<string> = [];
try {
attributes = await TelemetryAttributeService.fetchAttributesFromDatabase({
projectId: data.projectId,
source,
});
} catch (error) {
if (cachedEntry) {
return cachedEntry.attributes;
}
throw error;
}
await TelemetryAttributeService.storeAttributesInCache(
cacheKey,
attributes,
);
if (attributes.length === 0 && cachedEntry) {
return cachedEntry.attributes;
}
return attributes;
}
@CaptureSpan()
public async refreshAttributes(data: {
private static getCacheKey(
projectId: ObjectID,
telemetryType: TelemetryType,
): string {
return `${projectId.toString()}:${telemetryType}`;
}
private static getLookbackStartDate(): Date {
return OneUptimeDate.addRemoveDays(
OneUptimeDate.getCurrentDate(),
-TelemetryAttributeService.LOOKBACK_WINDOW_IN_DAYS,
);
}
private static async getCachedAttributes(
cacheKey: string,
): Promise<TelemetryAttributesCacheEntry | null> {
let payload: JSONObject | null = null;
try {
payload = await GlobalCache.getJSONObject(
TelemetryAttributeService.CACHE_NAMESPACE,
cacheKey,
);
} catch {
return null;
}
if (!payload) {
return null;
}
const attributesValue: JSONObject["attributes"] = payload["attributes"];
const refreshedAtValue: JSONObject["refreshedAt"] = payload["refreshedAt"];
if (
!Array.isArray(attributesValue) ||
typeof refreshedAtValue !== "string"
) {
return null;
}
const attributeCandidates: Array<unknown> =
attributesValue as Array<unknown>;
const attributes: Array<string> = attributeCandidates.filter(
(attribute: unknown): attribute is string => {
return typeof attribute === "string";
},
);
return {
attributes,
refreshedAt: OneUptimeDate.fromString(refreshedAtValue),
};
}
private static isCacheFresh(
cacheEntry: TelemetryAttributesCacheEntry,
): boolean {
const now: Date = OneUptimeDate.getCurrentDate();
const minutesSinceRefresh: number = Math.abs(
OneUptimeDate.getNumberOfMinutesBetweenDates(cacheEntry.refreshedAt, now),
);
return (
minutesSinceRefresh <= TelemetryAttributeService.CACHE_STALE_AFTER_MINUTES
);
}
private static async storeAttributesInCache(
cacheKey: string,
attributes: Array<string>,
): Promise<void> {
const payload: JSONObject = {
attributes,
refreshedAt: OneUptimeDate.getCurrentDate().toISOString(),
};
try {
await GlobalCache.setJSON(
TelemetryAttributeService.CACHE_NAMESPACE,
cacheKey,
payload,
{
expiresInSeconds:
TelemetryAttributeService.CACHE_STALE_AFTER_MINUTES * 60,
},
);
} catch {
return;
}
}
private static buildAttributesStatement(data: {
projectId: ObjectID;
telemetryType: TelemetryType;
attributes: string[];
}): Promise<void> {
const { projectId, telemetryType, attributes } = data;
tableName: string;
attributesColumn: string;
attributeKeysColumn: string;
timeColumn: string;
}): Statement {
const lookbackStartDate: Date =
TelemetryAttributeService.getLookbackStartDate();
// delete existing attributes
await this.deleteBy({
query: {
projectId,
telemetryType,
},
props: {
isRoot: true,
},
});
const statement: Statement = SQL`
WITH filtered AS (
SELECT arrayJoin(
if(
${data.attributeKeysColumn} IS NULL OR empty(${data.attributeKeysColumn}),
JSONExtractKeys(${data.attributesColumn}),
${data.attributeKeysColumn}
)
) AS attribute
FROM ${data.tableName}
WHERE projectId = ${{
type: TableColumnType.ObjectID,
value: data.projectId,
}}
AND (
${data.attributeKeysColumn} IS NOT NULL OR (
${data.attributesColumn} IS NOT NULL AND
${data.attributesColumn} != ''
)
)
AND ${data.timeColumn} >= ${{
type: TableColumnType.Date,
value: lookbackStartDate,
}}
ORDER BY ${data.timeColumn} DESC
LIMIT ${{
type: TableColumnType.Number,
value: TelemetryAttributeService.ROW_SCAN_LIMIT,
}}
)
SELECT DISTINCT attribute
FROM filtered
WHERE attribute IS NOT NULL AND attribute != ''
ORDER BY attribute ASC
LIMIT ${{
type: TableColumnType.Number,
value: TelemetryAttributeService.ATTRIBUTES_LIMIT,
}}
`;
const telemetryAttribute: TelemetryAttribute = new TelemetryAttribute();
return statement;
}
telemetryAttribute.projectId = projectId;
telemetryAttribute.telemetryType = telemetryType;
telemetryAttribute.attributes = attributes;
private static async fetchAttributesFromDatabase(data: {
projectId: ObjectID;
source: TelemetrySource;
}): Promise<Array<string>> {
const statement: Statement =
TelemetryAttributeService.buildAttributesStatement({
projectId: data.projectId,
tableName: data.source.tableName,
attributesColumn: data.source.attributesColumn,
attributeKeysColumn: data.source.attributeKeysColumn,
timeColumn: data.source.timeColumn,
});
await this.create({
data: telemetryAttribute,
props: {
isRoot: true,
},
});
const dbResult: Results = await data.source.service.executeQuery(statement);
const response: DbJSONResponse = await dbResult.json<{
data?: Array<JSONObject>;
}>();
const rows: Array<JSONObject> = response.data || [];
const attributeKeys: Array<string> = rows
.map((row: JSONObject) => {
const attribute: unknown = row["attribute"];
return typeof attribute === "string" ? attribute.trim() : null;
})
.filter((attribute: string | null): attribute is string => {
return Boolean(attribute);
});
return Array.from(new Set(attributeKeys));
}
}

View File

@@ -1,16 +1,33 @@
import { MeteredPlanUtil } from "../Types/Billing/MeteredPlan/AllMeteredPlans";
import TelemetryMeteredPlan from "../Types/Billing/MeteredPlan/TelemetryMeteredPlan";
import QueryHelper from "../Types/Database/QueryHelper";
import DatabaseService from "./DatabaseService";
import SortOrder from "../../Types/BaseDatabase/SortOrder";
import LIMIT_MAX from "../../Types/Database/LimitMax";
import LIMIT_MAX, { LIMIT_INFINITY } from "../../Types/Database/LimitMax";
import OneUptimeDate from "../../Types/Date";
import Decimal from "../../Types/Decimal";
import BadDataException from "../../Types/Exception/BadDataException";
import ProductType from "../../Types/MeteredPlan/ProductType";
import ObjectID from "../../Types/ObjectID";
import Model from "../../Models/DatabaseModels/TelemetryUsageBilling";
import { IsBillingEnabled } from "../EnvironmentConfig";
import Model, {
DEFAULT_RETENTION_IN_DAYS,
} from "../../Models/DatabaseModels/TelemetryUsageBilling";
import TelemetryServiceService from "./TelemetryServiceService";
import SpanService from "./SpanService";
import LogService from "./LogService";
import MetricService from "./MetricService";
import ExceptionInstanceService from "./ExceptionInstanceService";
import AnalyticsQueryHelper from "../Types/AnalyticsDatabase/QueryHelper";
import DiskSize from "../../Types/DiskSize";
import logger from "../Utils/Logger";
import PositiveNumber from "../../Types/PositiveNumber";
import TelemetryServiceModel from "../../Models/DatabaseModels/TelemetryService";
import {
AverageSpanRowSizeInBytes,
AverageLogRowSizeInBytes,
AverageMetricRowSizeInBytes,
AverageExceptionRowSizeInBytes,
IsBillingEnabled,
} from "../EnvironmentConfig";
import CaptureSpan from "../Utils/Telemetry/CaptureSpan";
export class Service extends DatabaseService<Model> {
@@ -31,9 +48,6 @@ export class Service extends DatabaseService<Model> {
projectId: data.projectId,
productType: data.productType,
isReportedToBillingProvider: false,
createdAt: QueryHelper.lessThan(
OneUptimeDate.addRemoveDays(OneUptimeDate.getCurrentDate(), -1),
), // we need to get everything that's not today.
},
skip: 0,
limit: LIMIT_MAX, /// because a project can have MANY telemetry services.
@@ -47,6 +61,203 @@ export class Service extends DatabaseService<Model> {
});
}
@CaptureSpan()
public async stageTelemetryUsageForProject(data: {
projectId: ObjectID;
productType: ProductType;
usageDate?: Date;
}): Promise<void> {
if (!IsBillingEnabled) {
return;
}
const usageDate: Date = data.usageDate
? OneUptimeDate.fromString(data.usageDate)
: OneUptimeDate.addRemoveDays(OneUptimeDate.getCurrentDate(), -1);
const averageRowSizeInBytes: number = this.getAverageRowSizeForProduct(
data.productType,
);
const averageExceptionRowSizeInBytes: number =
this.getAverageExceptionRowSize();
if (data.productType !== ProductType.Traces && averageRowSizeInBytes <= 0) {
return;
}
if (
data.productType === ProductType.Traces &&
averageRowSizeInBytes <= 0 &&
averageExceptionRowSizeInBytes <= 0
) {
return;
}
const usageDayString: string = OneUptimeDate.getDateString(usageDate);
const startOfDay: Date = OneUptimeDate.getStartOfDay(usageDate);
const endOfDay: Date = OneUptimeDate.getEndOfDay(usageDate);
const telemetryServices: Array<TelemetryServiceModel> =
await TelemetryServiceService.findBy({
query: {
projectId: data.projectId,
},
select: {
_id: true,
retainTelemetryDataForDays: true,
},
skip: 0,
limit: LIMIT_MAX,
props: {
isRoot: true,
},
});
if (!telemetryServices || telemetryServices.length === 0) {
return;
}
for (const telemetryService of telemetryServices) {
if (!telemetryService?.id) {
continue;
}
const existingEntry: Model | null = await this.findOneBy({
query: {
projectId: data.projectId,
productType: data.productType,
telemetryServiceId: telemetryService.id,
day: usageDayString,
},
select: {
_id: true,
},
props: {
isRoot: true,
},
});
if (existingEntry) {
continue;
}
let estimatedBytes: number = 0;
try {
if (data.productType === ProductType.Traces) {
const spanCount: PositiveNumber = await SpanService.countBy({
query: {
projectId: data.projectId,
serviceId: telemetryService.id,
startTime: AnalyticsQueryHelper.inBetween(startOfDay, endOfDay),
},
skip: 0,
limit: LIMIT_INFINITY,
props: {
isRoot: true,
},
});
const exceptionCount: PositiveNumber =
await ExceptionInstanceService.countBy({
query: {
projectId: data.projectId,
serviceId: telemetryService.id,
time: AnalyticsQueryHelper.inBetween(startOfDay, endOfDay),
},
skip: 0,
limit: LIMIT_INFINITY,
props: {
isRoot: true,
},
});
const totalSpanCount: number = spanCount.toNumber();
const totalExceptionCount: number = exceptionCount.toNumber();
if (totalSpanCount <= 0 && totalExceptionCount <= 0) {
continue;
}
estimatedBytes =
totalSpanCount * averageRowSizeInBytes +
totalExceptionCount * averageExceptionRowSizeInBytes;
} else if (data.productType === ProductType.Logs) {
const count: PositiveNumber = await LogService.countBy({
query: {
projectId: data.projectId,
serviceId: telemetryService.id,
time: AnalyticsQueryHelper.inBetween(startOfDay, endOfDay),
},
skip: 0,
limit: LIMIT_INFINITY,
props: {
isRoot: true,
},
});
const totalRowCount: number = count.toNumber();
if (totalRowCount <= 0) {
continue;
}
estimatedBytes = totalRowCount * averageRowSizeInBytes;
} else if (data.productType === ProductType.Metrics) {
const count: PositiveNumber = await MetricService.countBy({
query: {
projectId: data.projectId,
serviceId: telemetryService.id,
time: AnalyticsQueryHelper.inBetween(startOfDay, endOfDay),
},
skip: 0,
limit: LIMIT_INFINITY,
props: {
isRoot: true,
},
});
const totalRowCount: number = count.toNumber();
if (totalRowCount <= 0) {
continue;
}
estimatedBytes = totalRowCount * averageRowSizeInBytes;
}
} catch (error) {
logger.error(
`Failed to compute telemetry usage for service ${telemetryService.id?.toString()}:`,
);
logger.error(error as Error);
continue;
}
if (estimatedBytes <= 0) {
continue;
}
const estimatedGigabytes: number = DiskSize.byteSizeToGB(estimatedBytes);
if (!Number.isFinite(estimatedGigabytes) || estimatedGigabytes <= 0) {
continue;
}
const dataRetentionInDays: number =
telemetryService.retainTelemetryDataForDays ||
DEFAULT_RETENTION_IN_DAYS;
await this.updateUsageBilling({
projectId: data.projectId,
productType: data.productType,
telemetryServiceId: telemetryService.id,
dataIngestedInGB: estimatedGigabytes,
retentionInDays: dataRetentionInDays,
usageDate: usageDate,
});
}
}
@CaptureSpan()
public async updateUsageBilling(data: {
projectId: ObjectID;
@@ -54,6 +265,7 @@ export class Service extends DatabaseService<Model> {
telemetryServiceId: ObjectID;
dataIngestedInGB: number;
retentionInDays: number;
usageDate?: Date;
}): Promise<void> {
if (
data.productType !== ProductType.Traces &&
@@ -70,6 +282,12 @@ export class Service extends DatabaseService<Model> {
data.productType,
) as TelemetryMeteredPlan;
const usageDate: Date = data.usageDate
? OneUptimeDate.fromString(data.usageDate)
: OneUptimeDate.getCurrentDate();
const usageDayString: string = OneUptimeDate.getDateString(usageDate);
const totalCostOfThisOperationInUSD: number =
serverMeteredPlan.getTotalCostInUSD({
dataIngestedInGB: data.dataIngestedInGB,
@@ -82,10 +300,7 @@ export class Service extends DatabaseService<Model> {
productType: data.productType,
telemetryServiceId: data.telemetryServiceId,
isReportedToBillingProvider: false,
createdAt: QueryHelper.inBetween(
OneUptimeDate.addRemoveDays(OneUptimeDate.getCurrentDate(), -1),
OneUptimeDate.getCurrentDate(),
),
day: usageDayString,
},
select: {
_id: true,
@@ -135,11 +350,9 @@ export class Service extends DatabaseService<Model> {
usageBilling.telemetryServiceId = data.telemetryServiceId;
usageBilling.retainTelemetryDataForDays = data.retentionInDays;
usageBilling.isReportedToBillingProvider = false;
usageBilling.createdAt = OneUptimeDate.getCurrentDate();
usageBilling.createdAt = usageDate;
usageBilling.day = OneUptimeDate.getDateString(
OneUptimeDate.getCurrentDate(),
);
usageBilling.day = usageDayString;
usageBilling.totalCostInUSD = new Decimal(totalCostOfThisOperationInUSD);
@@ -151,6 +364,46 @@ export class Service extends DatabaseService<Model> {
});
}
}
private getAverageRowSizeForProduct(productType: ProductType): number {
const fallbackSize: number = 1024;
// Narrow to telemetry product types before indexing to satisfy TypeScript
if (
productType !== ProductType.Traces &&
productType !== ProductType.Logs &&
productType !== ProductType.Metrics
) {
return fallbackSize;
}
const value: number =
{
[ProductType.Traces]: AverageSpanRowSizeInBytes,
[ProductType.Logs]: AverageLogRowSizeInBytes,
[ProductType.Metrics]: AverageMetricRowSizeInBytes,
}[productType] ?? fallbackSize;
if (!Number.isFinite(value) || value <= 0) {
return fallbackSize;
}
return value;
}
private getAverageExceptionRowSize(): number {
const fallbackSize: number = 1024;
if (!Number.isFinite(AverageExceptionRowSizeInBytes)) {
return fallbackSize;
}
if (AverageExceptionRowSizeInBytes <= 0) {
return fallbackSize;
}
return AverageExceptionRowSizeInBytes;
}
}
export default new Service();

View File

@@ -5,6 +5,7 @@ import Model, {
WorkspaceMiscData,
} from "../../Models/DatabaseModels/WorkspaceProjectAuthToken";
import { LIMIT_PER_PROJECT } from "../../Types/Database/LimitMax";
import BadDataException from "../../Types/Exception/BadDataException";
import CaptureSpan from "../Utils/Telemetry/CaptureSpan";
export class Service extends DatabaseService<Model> {
@@ -17,6 +18,14 @@ export class Service extends DatabaseService<Model> {
projectId: ObjectID;
workspaceType: WorkspaceType;
}): Promise<Model | null> {
if (!data.projectId) {
throw new BadDataException("projectId is required");
}
if (!data.workspaceType) {
throw new BadDataException("workspaceType is required");
}
return await this.findOneBy({
query: {
projectId: data.projectId,
@@ -38,6 +47,10 @@ export class Service extends DatabaseService<Model> {
public async getProjectAuths(data: {
projectId: ObjectID;
}): Promise<Array<Model>> {
if (!data.projectId) {
throw new BadDataException("projectId is required");
}
return await this.findBy({
query: {
projectId: data.projectId,
@@ -72,6 +85,26 @@ export class Service extends DatabaseService<Model> {
workspaceProjectId: string;
miscData: WorkspaceMiscData;
}): Promise<void> {
if (!data.projectId) {
throw new BadDataException("projectId is required");
}
if (!data.workspaceType) {
throw new BadDataException("workspaceType is required");
}
if (!data.authToken) {
throw new BadDataException("authToken is required");
}
if (!data.workspaceProjectId) {
throw new BadDataException("workspaceProjectId is required");
}
if (!data.miscData) {
throw new BadDataException("miscData is required");
}
let projectAuth: Model | null = await this.findOneBy({
query: {
projectId: data.projectId,

View File

@@ -56,6 +56,11 @@ export default class TelemetryMeteredPlan extends ServerMeteredPlan {
): Promise<void> {
// get all unreported logs
await TelemetryUsageBillingService.stageTelemetryUsageForProject({
projectId: projectId,
productType: this.productType,
});
const usageBillings: Array<TelemetryUsageBilling> =
await TelemetryUsageBillingService.getUnreportedUsageBilling({
projectId: projectId,

View File

@@ -0,0 +1,25 @@
import BaseModel from "../../../Models/DatabaseModels/DatabaseBaseModel/DatabaseBaseModel";
import DatabaseCommonInteractionProps from "../../../Types/BaseDatabase/DatabaseCommonInteractionProps";
import GroupBy from "./GroupBy";
import Query from "./Query";
import Select from "./Select";
import Sort from "./Sort";
import PositiveNumber from "../../../Types/PositiveNumber";
export default interface FindAllBy<TBaseModel extends BaseModel> {
query: Query<TBaseModel>;
select?: Select<TBaseModel> | undefined;
sort?: Sort<TBaseModel> | undefined;
groupBy?: GroupBy<TBaseModel> | undefined;
props: DatabaseCommonInteractionProps;
/**
* Optional number of documents to skip before fetching results.
* Acts the same way as `skip` in `findBy` but defaults to 0 when omitted.
*/
skip?: PositiveNumber | number | undefined;
/**
* Optional total number of documents to return across all batches.
* When omitted, the method keeps fetching until no more data is returned.
*/
limit?: PositiveNumber | number | undefined;
}

View File

@@ -177,11 +177,15 @@ export class Statement implements BaseQueryParams {
};
if ((statementParam as StatementParameter).value instanceof Includes) {
const isNumberArray: boolean = (
const includesValues: Array<string | number | ObjectID> = (
(statementParam as StatementParameter).value as Includes
).values.every((v: string | number | ObjectID) => {
return typeof v === "number";
});
).values as Array<string | number | ObjectID>;
const isNumberArray: boolean = includesValues.every(
(v: string | number | ObjectID) => {
return typeof v === "number";
},
);
if (isNumberArray) {
return "Array(Int32)";

View File

@@ -1,33 +1,26 @@
import path from "path";
import fs from "fs";
import Execute from "../Execute";
import LocalFile from "../LocalFile";
import logger from "../Logger";
import CaptureSpan from "../Telemetry/CaptureSpan";
import CodeRepositoryFile from "./CodeRepositoryFile";
import Dictionary from "../../../Types/Dictionary";
import BadDataException from "../../../Types/Exception/BadDataException";
export default class CodeRepositoryUtil {
@CaptureSpan()
public static getCurrentCommitHash(data: {
repoPath: string;
}): Promise<string> {
const command: string = `cd ${data.repoPath} && git rev-parse HEAD`;
logger.debug("Executing command: " + command);
return Execute.executeCommand(command);
return this.runGitCommand(data.repoPath, ["rev-parse", "HEAD"]);
}
@CaptureSpan()
public static async addAllChangedFilesToGit(data: {
repoPath: string;
}): Promise<void> {
const command: string = `cd ${data.repoPath} && git add -A`;
logger.debug("Executing command: " + command);
const stdout: string = await Execute.executeCommand(command);
logger.debug(stdout);
await this.runGitCommand(data.repoPath, ["add", "-A"]);
}
@CaptureSpan()
@@ -36,26 +29,26 @@ export default class CodeRepositoryUtil {
authorName: string;
authorEmail: string;
}): Promise<void> {
const command: string = `cd ${data.repoPath} && git config --global user.name "${data.authorName}" && git config --global user.email "${data.authorEmail}"`;
await this.runGitCommand(data.repoPath, [
"config",
"--global",
"user.name",
data.authorName,
]);
logger.debug("Executing command: " + command);
const stdout: string = await Execute.executeCommand(command);
logger.debug(stdout);
await this.runGitCommand(data.repoPath, [
"config",
"--global",
"user.email",
data.authorEmail,
]);
}
@CaptureSpan()
public static async discardAllChangesOnCurrentBranch(data: {
repoPath: string;
}): Promise<void> {
const command: string = `cd ${data.repoPath} && git checkout .`;
logger.debug("Executing command: " + command);
const stdout: string = await Execute.executeCommand(command);
logger.debug(stdout);
await this.runGitCommand(data.repoPath, ["checkout", "."]);
}
// returns the folder name of the cloned repository
@@ -64,33 +57,25 @@ export default class CodeRepositoryUtil {
repoPath: string;
repoUrl: string;
}): Promise<string> {
const command: string = `cd ${data.repoPath} && git clone ${data.repoUrl}`;
await this.runGitCommand(data.repoPath, ["clone", data.repoUrl]);
logger.debug("Executing command: " + command);
const normalizedUrl: string = data.repoUrl.trim().replace(/\/+$/g, "");
const lastSegment: string =
normalizedUrl.split("/").pop() || normalizedUrl.split(":").pop() || "";
const folderName: string = lastSegment.replace(/\.git$/i, "");
const stdout: string = await Execute.executeCommand(command);
logger.debug(stdout);
// get the folder name of the repository from the disk.
const getFolderNameCommand: string = `cd ${data.repoPath} && ls`;
const folderName: string =
await Execute.executeCommand(getFolderNameCommand);
if (!folderName) {
throw new BadDataException(
"Unable to determine repository folder name after cloning.",
);
}
return folderName.trim();
}
@CaptureSpan()
public static async pullChanges(data: { repoPath: string }): Promise<void> {
const command: string = `cd ${data.repoPath} && git pull`;
logger.debug("Executing command: " + command);
const stdout: string = await Execute.executeCommand(command);
logger.debug(stdout);
await this.runGitCommand(data.repoPath, ["pull"]);
}
@CaptureSpan()
@@ -98,13 +83,26 @@ export default class CodeRepositoryUtil {
repoPath: string;
branchName: string;
}): Promise<void> {
const command: string = `cd ${data.repoPath} && git checkout ${data.branchName} || git checkout -b ${data.branchName}`;
try {
await this.runGitCommand(data.repoPath, [
"rev-parse",
"--verify",
data.branchName,
]);
await this.runGitCommand(data.repoPath, ["checkout", data.branchName]);
} catch (error) {
logger.debug(
`Branch ${data.branchName} not found. Creating a new branch instead.`,
);
logger.debug("Executing command: " + command);
logger.debug(error);
const stdout: string = await Execute.executeCommand(command);
logger.debug(stdout);
await this.runGitCommand(data.repoPath, [
"checkout",
"-b",
data.branchName,
]);
}
}
@CaptureSpan()
@@ -112,15 +110,12 @@ export default class CodeRepositoryUtil {
repoPath: string;
filePath: string;
}): Promise<string> {
const path: string = LocalFile.sanitizeFilePath(
`${data.repoPath}/${data.filePath}`,
const absolutePath: string = this.resolvePathWithinRepo(
data.repoPath,
data.filePath,
);
const command: string = `cat ${path}`;
logger.debug("Executing command: " + command);
return Execute.executeCommand(`${command}`);
return LocalFile.read(absolutePath);
}
// discard all changes in the working directory
@@ -128,13 +123,7 @@ export default class CodeRepositoryUtil {
public static async discardChanges(data: {
repoPath: string;
}): Promise<void> {
const command: string = `cd ${data.repoPath} && git checkout .`;
logger.debug("Executing command: " + command);
const stdout: string = await Execute.executeCommand(command);
logger.debug(stdout);
await this.runGitCommand(data.repoPath, ["checkout", "."]);
}
@CaptureSpan()
@@ -183,13 +172,9 @@ export default class CodeRepositoryUtil {
`${data.repoPath}/${data.directoryPath}`,
);
const command: string = `rm -rf ${totalPath}`;
logger.debug("Deleting directory: " + totalPath);
logger.debug("Executing command: " + command);
const stdout: string = await Execute.executeCommand(command);
logger.debug(stdout);
await LocalFile.deleteDirectory(totalPath);
}
@CaptureSpan()
@@ -197,11 +182,15 @@ export default class CodeRepositoryUtil {
repoPath: string;
branchName: string;
}): Promise<void> {
const command: string = `cd ${data.repoPath} && git checkout -b ${data.branchName}`;
logger.debug(
`Creating git branch '${data.branchName}' in ${path.resolve(data.repoPath)}`,
);
logger.debug("Executing command: " + command);
const stdout: string = await Execute.executeCommand(command);
const stdout: string = await this.runGitCommand(data.repoPath, [
"checkout",
"-b",
data.branchName,
]);
logger.debug(stdout);
}
@@ -211,11 +200,14 @@ export default class CodeRepositoryUtil {
repoPath: string;
branchName: string;
}): Promise<void> {
const command: string = `cd ${data.repoPath} && git checkout ${data.branchName}`;
logger.debug(
`Checking out git branch '${data.branchName}' in ${path.resolve(data.repoPath)}`,
);
logger.debug("Executing command: " + command);
const stdout: string = await Execute.executeCommand(command);
const stdout: string = await this.runGitCommand(data.repoPath, [
"checkout",
data.branchName,
]);
logger.debug(stdout);
}
@@ -225,22 +217,51 @@ export default class CodeRepositoryUtil {
repoPath: string;
filePaths: Array<string>;
}): Promise<void> {
const filePaths: Array<string> = data.filePaths.map((filePath: string) => {
if (filePath.startsWith("/")) {
// remove the leading slash and return
return filePath.substring(1);
const repoRoot: string = path.resolve(data.repoPath);
const sanitizedRelativeFilePaths: Array<string> = [];
for (const inputFilePath of data.filePaths) {
const normalizedPath: string = inputFilePath.startsWith("/")
? inputFilePath.substring(1)
: inputFilePath;
if (normalizedPath.trim() === "") {
continue;
}
return filePath;
});
const absoluteFilePath: string = this.resolvePathWithinRepo(
data.repoPath,
normalizedPath,
);
const command: string = `cd ${
data.repoPath
} && git add ${filePaths.join(" ")}`;
const relativeFilePath: string = path.relative(
repoRoot,
absoluteFilePath,
);
logger.debug("Executing command: " + command);
if (relativeFilePath.trim() === "") {
continue;
}
const stdout: string = await Execute.executeCommand(command);
sanitizedRelativeFilePaths.push(
LocalFile.sanitizeFilePath(relativeFilePath),
);
}
if (sanitizedRelativeFilePaths.length === 0) {
logger.debug("git add skipped because no file paths were provided");
return;
}
logger.debug(
`Adding ${sanitizedRelativeFilePaths.length} file(s) to git in ${path.resolve(data.repoPath)}`,
);
const stdout: string = await this.runGitCommand(data.repoPath, [
"add",
...sanitizedRelativeFilePaths,
]);
logger.debug(stdout);
}
@@ -250,11 +271,13 @@ export default class CodeRepositoryUtil {
repoPath: string;
username: string;
}): Promise<void> {
const command: string = `cd ${data.repoPath} && git config user.name "${data.username}"`;
logger.debug(`Setting git user.name in ${path.resolve(data.repoPath)}`);
logger.debug("Executing command: " + command);
const stdout: string = await Execute.executeCommand(command);
const stdout: string = await this.runGitCommand(data.repoPath, [
"config",
"user.name",
data.username,
]);
logger.debug(stdout);
}
@@ -264,11 +287,13 @@ export default class CodeRepositoryUtil {
repoPath: string;
message: string;
}): Promise<void> {
const command: string = `cd ${data.repoPath} && git commit -m "${data.message}"`;
logger.debug("Executing git commit");
logger.debug("Executing command: " + command);
const stdout: string = await Execute.executeCommand(command);
const stdout: string = await Execute.executeCommandFile({
command: "git",
args: ["commit", "-m", data.message],
cwd: data.repoPath,
});
logger.debug(stdout);
}
@@ -288,15 +313,28 @@ export default class CodeRepositoryUtil {
const { repoPath, filePath } = data;
const command: string = `cd ${repoPath} && git log -1 --pretty=format:"%H" ".${filePath}"`;
const repoRoot: string = path.resolve(repoPath);
const absoluteTarget: string = this.resolvePathWithinRepo(
repoPath,
filePath,
);
const relativeTarget: string = path.relative(repoRoot, absoluteTarget);
const gitArgument: string = LocalFile.sanitizeFilePath(
`./${relativeTarget}`,
);
logger.debug("Executing command: " + command);
logger.debug(`Getting last commit hash for ${gitArgument} in ${repoRoot}`);
const hash: string = await Execute.executeCommand(command);
const hash: string = await this.runGitCommand(repoRoot, [
"log",
"-1",
"--pretty=format:%H",
gitArgument,
]);
logger.debug(hash);
return hash;
return hash.trim();
}
@CaptureSpan()
@@ -318,11 +356,11 @@ export default class CodeRepositoryUtil {
totalPath = LocalFile.sanitizeFilePath(totalPath); // clean up the path
const output: string = await Execute.executeCommand(`ls ${totalPath}`);
const entries: Array<fs.Dirent> = await LocalFile.readDirectory(totalPath);
const fileNames: Array<string> = output.split("\n");
return fileNames;
return entries.map((entry: fs.Dirent) => {
return entry.name;
});
}
@CaptureSpan()
@@ -350,16 +388,12 @@ export default class CodeRepositoryUtil {
totalPath = LocalFile.sanitizeFilePath(totalPath); // clean up the path
const files: Dictionary<CodeRepositoryFile> = {};
const output: string = await Execute.executeCommand(`ls ${totalPath}`);
const fileNames: Array<string> = output.split("\n");
const subDirectories: Array<string> = [];
for (const fileName of fileNames) {
if (fileName === "") {
continue;
}
const entries: Array<fs.Dirent> = await LocalFile.readDirectory(totalPath);
for (const entry of entries) {
const fileName: string = entry.name;
const filePath: string = LocalFile.sanitizeFilePath(
`${directoryPath}/${fileName}`,
@@ -369,13 +403,7 @@ export default class CodeRepositoryUtil {
continue;
}
const isDirectory: boolean = (
await Execute.executeCommand(
`file "${LocalFile.sanitizeFilePath(`${totalPath}/${fileName}`)}"`,
)
).includes("directory");
if (isDirectory) {
if (entry.isDirectory()) {
subDirectories.push(
LocalFile.sanitizeFilePath(`${directoryPath}/${fileName}`),
);
@@ -449,4 +477,45 @@ export default class CodeRepositoryUtil {
return files;
}
private static runGitCommand(
repoPath: string,
args: Array<string>,
): Promise<string> {
const cwd: string = path.resolve(repoPath);
logger.debug(
`Executing git command in ${cwd}: git ${args
.map((arg: string) => {
return arg.includes(" ") ? `"${arg}"` : arg;
})
.join(" ")}`,
);
return Execute.executeCommandFile({
command: "git",
args,
cwd,
});
}
private static resolvePathWithinRepo(
repoPath: string,
targetPath: string,
): string {
const root: string = path.resolve(repoPath);
const sanitizedTarget: string = LocalFile.sanitizeFilePath(
targetPath,
).replace(/^\/+/, "");
const absoluteTarget: string = path.resolve(root, sanitizedTarget);
if (
absoluteTarget !== root &&
!absoluteTarget.startsWith(root + path.sep)
) {
throw new BadDataException("File path is outside the repository");
}
return absoluteTarget;
}
}

View File

@@ -170,13 +170,15 @@ export default class GitHubUtil extends HostedCodeRepository {
`https://github.com/${data.organizationName}/${data.repositoryName}.git`,
);
const command: string = `git remote add ${
data.remoteName
} ${url.toString()}`;
logger.debug(
`Adding remote '${data.remoteName}' for ${data.organizationName}/${data.repositoryName}`,
);
logger.debug("Executing command: " + command);
const result: string = await Execute.executeCommand(command);
const result: string = await Execute.executeCommandFile({
command: "git",
args: ["remote", "add", data.remoteName, url.toString()],
cwd: process.cwd(),
});
logger.debug(result);
}
@@ -197,10 +199,19 @@ export default class GitHubUtil extends HostedCodeRepository {
"Pushing changes to remote repository with username: " + username,
);
const command: string = `cd ${data.repoPath} && git push -u https://${username}:${password}@github.com/${data.organizationName}/${data.repositoryName}.git ${branchName}`;
logger.debug("Executing command: " + command);
const encodedUsername: string = encodeURIComponent(username);
const encodedPassword: string = encodeURIComponent(password);
const remoteUrl: string = `https://${encodedUsername}:${encodedPassword}@github.com/${data.organizationName}/${data.repositoryName}.git`;
const result: string = await Execute.executeCommand(command);
logger.debug(
`Pushing branch '${branchName}' to ${data.organizationName}/${data.repositoryName}`,
);
const result: string = await Execute.executeCommandFile({
command: "git",
args: ["push", "-u", remoteUrl, branchName],
cwd: data.repoPath,
});
logger.debug(result);
}

Some files were not shown because too many files have changed in this diff Show More