Compare commits

...

157 Commits

Author SHA1 Message Date
Nawaz Dhandala
4868e285b0 fix: simplify description for postmortem published timestamp field 2025-12-03 19:30:08 +00:00
Nawaz Dhandala
f572eb6f93 feat: add subscriber notification fields and migration for postmortem 2025-12-03 19:18:43 +00:00
Nawaz Dhandala
a0868e2f75 feat: add subscriber notification status and resend functionality in postmortem 2025-12-03 19:16:49 +00:00
Nawaz Dhandala
3dfd7a9206 feat: set subscriber notification status to Pending on postmortem published 2025-12-03 19:12:42 +00:00
Nawaz Dhandala
d7582337bf fix: remove ignoreHooks option from incident status update in postmortem notification 2025-12-03 19:11:00 +00:00
Nawaz Dhandala
23043462d7 fix: correct incident number retrieval in postmortem notification logic 2025-12-03 19:08:29 +00:00
Nawaz Dhandala
76d53c53c8 feat: add notifySubscribersOnPostmortemPublished toggle to postmortem form 2025-12-03 19:06:40 +00:00
Nawaz Dhandala
4437e912a3 feat: add notifySubscribersOnPostmortemPublished field and update notification logic 2025-12-03 19:05:28 +00:00
Nawaz Dhandala
937d4675a8 feat: add postmortem notification system for subscribers 2025-12-03 19:03:02 +00:00
Nawaz Dhandala
3cc984f149 Merge branch 'master' into release 2025-12-03 18:53:12 +00:00
Nawaz Dhandala
991928a5a5 fix: update version to 9.2.1 2025-12-03 18:53:05 +00:00
Nawaz Dhandala
6f46812418 fix: remove welcome message sending on bot installation 2025-12-03 18:52:18 +00:00
Nawaz Dhandala
6e20e7f08f fix: correct typo in debug log message for sleep duration in FetchListAndProbe 2025-12-03 18:45:25 +00:00
Nawaz Dhandala
ae406d8ee1 fix: update QEMU setup to use tonistiigi/binfmt:qemu-v10.0.4 in release workflows 2025-12-03 18:42:01 +00:00
Nawaz Dhandala
05920d5b99 fix: improve error logging for Playwright resource closure 2025-12-03 18:33:27 +00:00
Nawaz Dhandala
3a309aabcf fix: enhance browser context closure handling in SyntheticMonitor 2025-12-03 18:31:12 +00:00
Nawaz Dhandala
22a3004a3f fix: simplify error logging in safeCloseBrowserContext and adjust formatting in safeCloseBrowser 2025-12-03 18:27:09 +00:00
Nawaz Dhandala
b8f69fbea3 fix: refactor browser session management in SyntheticMonitor for improved clarity and error handling 2025-12-03 18:25:37 +00:00
Nawaz Dhandala
888aff6392 fix: format migration queries and update index for new migration 2025-12-03 14:42:10 +00:00
Nawaz Dhandala
234de977c4 fix: prevent rendering icon for non-highlighted timeline items 2025-12-03 14:41:39 +00:00
Nawaz Dhandala
fa5f606709 Merge branch 'master' of https://github.com/OneUptime/oneuptime 2025-12-03 14:20:38 +00:00
Nawaz Dhandala
b889611d16 fix: update postmortem attachment message for improved visibility and change version to 9.2.0 2025-12-03 14:04:09 +00:00
Nawaz Dhandala
43f0eeb0f8 fix: improve clarity in postmortem status page description and enhance attachment handling messages 2025-12-03 13:57:40 +00:00
Nawaz Dhandala
be311dd8b5 feat: add default value for Postmortem Published At field 2025-12-03 13:56:30 +00:00
Nawaz Dhandala
29428bf660 fix: update placeholder for Postmortem Published At field to improve clarity 2025-12-03 13:55:58 +00:00
Simon Larsen
9eebbe9dfb feat: implement retry logic with configurable attempts and delay for LMStudioClient 2025-12-03 13:51:30 +00:00
Nawaz Dhandala
0dc3bb4f33 feat: add MigrationName1764767371788 for updating OnCallDutyPolicyScheduleLayer defaults 2025-12-03 13:10:12 +00:00
Nawaz Dhandala
adf5a9c1f3 feat: add postmortemPostedAt field and update related components for incident tracking 2025-12-03 13:09:23 +00:00
Nawaz Dhandala
faaded049a feat: add DocumentCheck icon to IconProp and update incident postmortem note icon 2025-12-03 12:58:27 +00:00
Nawaz Dhandala
d02e3882be fix: adjust font size classes for TimelineItem highlight to improve readability 2025-12-03 12:52:20 +00:00
Nawaz Dhandala
e1af84fafa feat: enhance EventItem styling with conditional highlight for improved visibility 2025-12-03 12:51:03 +00:00
Nawaz Dhandala
c371f0a25f feat: add title and highlight properties to TimelineItem for enhanced event display 2025-12-03 12:42:17 +00:00
Nawaz Dhandala
c86d2c2a4a refactor: streamline API route registration and improve code readability 2025-12-03 11:50:17 +00:00
Nawaz Dhandala
a807cc10ab feat: add migration for IncidentPostmortemAttachmentFile table and related constraints 2025-12-03 11:43:31 +00:00
Nawaz Dhandala
6cc480744d Merge branch 'master' of https://github.com/OneUptime/oneuptime 2025-12-03 11:42:07 +00:00
Nawaz Dhandala
7cb6104795 feat: enhance postmortem functionality with attachment handling and status page visibility 2025-12-03 11:42:05 +00:00
Nawaz Dhandala
49dd315501 feat: implement postmortem attachment handling in Incident and StatusPage APIs 2025-12-03 11:34:00 +00:00
Simon Larsen
9ec2b458ed style: remove unnecessary first-letter styling from blog post body 2025-12-03 11:14:50 +00:00
Nawaz Dhandala
702b5811a9 fix: add missing line continuation for workspace-path in usage example 2025-12-02 22:25:59 +00:00
Nawaz Dhandala
7dc7255790 feat: add alias normalization for historical argument names in ReadFileTool 2025-12-02 22:22:19 +00:00
Nawaz Dhandala
1f620e7092 feat: update default max iterations for tool-calling rounds to 100 2025-12-02 21:48:32 +00:00
Nawaz Dhandala
87466246fa Revert "feat: enhance log file path resolution in CLI to support home directory and relative paths"
This reverts commit 12eaa17859.
2025-12-02 21:34:16 +00:00
Nawaz Dhandala
12eaa17859 feat: enhance log file path resolution in CLI to support home directory and relative paths 2025-12-02 21:31:20 +00:00
Nawaz Dhandala
e782ae6b3c feat: enhance logging in CopilotAgent and WorkspaceContext with detailed message contents 2025-12-02 21:17:34 +00:00
Nawaz Dhandala
9ad87328c2 feat: add detailed JSDoc comments for Copilot agent and tools 2025-12-02 21:04:08 +00:00
Nawaz Dhandala
8279294d15 feat: implement oneuptime-copilot-agent CLI with logging and configuration options 2025-12-02 20:52:15 +00:00
Nawaz Dhandala
8c6da51d58 Adopt PascalCase paths in Copilot 2025-12-02 20:34:27 +00:00
Nawaz Dhandala
6d114e3ac4 chore: bump version to 9.1.3 2025-12-02 20:18:07 +00:00
Nawaz Dhandala
44427d3ee7 feat: enhance ReadFileTool with optional line start and end parameters 2025-12-02 14:52:07 +00:00
Nawaz Dhandala
09b0c3b1ef feat: add debug logging instructions and example to README 2025-12-02 14:42:08 +00:00
Nawaz Dhandala
ad597fe5dd feat: update model name and workspace path in usage examples 2025-12-02 14:39:36 +00:00
Nawaz Dhandala
74f17fa45c fix: handle notification skipping for already notified scheduled maintenance events 2025-12-02 14:22:10 +00:00
Nawaz Dhandala
b19a5fa58a feat: add isCreatedState and isScheduledState checks to skip notifications for already notified incidents 2025-12-02 14:21:52 +00:00
Nawaz Dhandala
57abffa113 Merge branch 'master' of https://github.com/OneUptime/oneuptime 2025-12-02 13:06:53 +00:00
Nawaz Dhandala
e8e493ee5a Refactor code structure for improved readability and maintainability 2025-12-02 13:06:50 +00:00
Simon Larsen
e065ebdddc Merge branch 'copilot-v2' 2025-12-02 13:02:32 +00:00
Simon Larsen
39da442892 style: update blog post first paragraph styling for improved readability 2025-12-02 13:02:18 +00:00
Simon Larsen
45b02b30e3 Merge pull request #2152 from OneUptime/chore/npm-audit-fix
chore: npm audit fix
2025-12-02 11:46:04 +00:00
Nawaz Dhandala
30414327f9 feat: add Dockerfile for OneUptime-copilot setup 2025-12-02 11:05:16 +00:00
simlarsen
b99a20a588 chore: npm audit fix 2025-12-02 01:50:48 +00:00
Nawaz Dhandala
22178c282d fix: format command descriptions for consistency in MicrosoftTeamsAPI 2025-12-01 17:13:11 +00:00
Nawaz Dhandala
30389a8d49 feat: add command lists for improved interaction with OneUptime bot in Microsoft Teams 2025-12-01 17:11:37 +00:00
Nawaz Dhandala
7b73cc2ea7 fix: remove trailing spaces in action type definitions 2025-12-01 17:05:30 +00:00
Nawaz Dhandala
6d2c331216 feat: update command triggers for incident and maintenance actions 2025-12-01 17:05:05 +00:00
Nawaz Dhandala
624e4c2296 chore: update version to 9.1.2 2025-12-01 16:38:51 +00:00
Simon Larsen
5e901ee973 Merge pull request #2151 from OneUptime/copilot-v2
Copilot v2
2025-12-01 16:25:22 +00:00
Simon Larsen
a103abc7a9 fix: simplify boolean expression for hasProgressedBeyondScheduledState 2025-12-01 15:45:35 +00:00
Simon Larsen
a7dda0bd53 feat: add logic to update nextSubscriberNotificationBeforeTheEventAt for progressed scheduled maintenance events 2025-12-01 15:45:15 +00:00
Simon Larsen
6948754c86 Merge pull request #2147 from OneUptime/copilot-v2
Copilot v2
2025-12-01 15:21:05 +00:00
Simon Larsen
cc5731bb6d feat: add error handling and logging for missing tool calls and directory entries 2025-12-01 15:20:44 +00:00
Simon Larsen
6761a8a686 Merge pull request #2148 from OneUptime/snyk-upgrade-240d43adaab510cce84165a4f1ccf9b5
[Snyk] Upgrade mailparser from 3.7.5 to 3.9.0
2025-12-01 13:42:14 +00:00
Simon Larsen
6e487199aa refactor: add type annotations and improve type safety across multiple files 2025-12-01 13:41:34 +00:00
snyk-bot
cda5de92ec fix: upgrade mailparser from 3.7.5 to 3.9.0
Snyk has created this PR to upgrade mailparser from 3.7.5 to 3.9.0.

See this package in npm:
mailparser

See this project in Snyk:
https://app.snyk.io/org/oneuptime-RsC2nshvQ2Vnr35jHvMnMP/project/c3622982-05c8-495c-809c-20f301c75f92?utm_source=github&utm_medium=referral&page=upgrade-pr
2025-11-29 12:10:48 +00:00
Simon Larsen
33349341a9 refactor: improve code formatting and readability across multiple files 2025-11-28 21:53:50 +00:00
Simon Larsen
db81fdd3e7 feat: enhance logging throughout the Copilot agent and tools for better traceability 2025-11-28 21:52:33 +00:00
Simon Larsen
d71eba91dd chore: remove vscode-copilot-chat subproject reference 2025-11-28 21:45:07 +00:00
Simon Larsen
682bb805f3 feat: implement AgentLogger for file-based logging with exit handlers 2025-11-28 21:43:15 +00:00
Simon Larsen
7f38e3d417 docs: add usage example for running the agent in development mode 2025-11-28 21:31:08 +00:00
Simon Larsen
559985e93b feat: add tsconfig-paths for improved module resolution in development 2025-11-28 21:28:19 +00:00
Simon Larsen
43588cbe5a refactor: update optional properties to include 'undefined' type in various interfaces 2025-11-28 20:57:17 +00:00
Simon Larsen
0772fce477 refactor: update Telemetry class to use type assertions for loggerProviderConfig and nodeSdkConfiguration
chore: remove unused common type definitions and clean up tsconfig.json
2025-11-28 20:20:09 +00:00
Simon Larsen
78107d8b1c chore: remove unused type definitions and clean up tsconfig.json 2025-11-28 20:06:43 +00:00
Simon Larsen
078af43b0c chore: remove tsconfig.json for OneUptime Copilot Agent 2025-11-28 19:58:11 +00:00
Simon Larsen
9b9aeb2f40 feat: Implement OneUptime Copilot Agent with workspace tools
- Added SystemPrompt for guiding the agent's behavior.
- Created WorkspaceContextBuilder to gather workspace information.
- Developed main entry point in index.ts for agent execution.
- Implemented LMStudioClient for interacting with the LM Studio API.
- Added ApplyPatchTool for applying code changes via patches.
- Created ListDirectoryTool for listing files and directories.
- Implemented ReadFileTool for reading file contents.
- Developed RunCommandTool for executing shell commands.
- Added SearchWorkspaceTool for searching files in the workspace.
- Created WriteFileTool for writing content to files.
- Established ToolRegistry for managing and executing tools.
- Defined types for chat messages and tool calls.
- Added utility classes for logging and executing commands.
- Implemented WorkspacePaths for managing file paths within the workspace.
- Configured TypeScript settings in tsconfig.json.
2025-11-28 19:57:52 +00:00
Nawaz Dhandala
67577f5a2b refactor: improve formatting and readability in Incident migration and MonitorService 2025-11-28 17:42:22 +00:00
Nawaz Dhandala
4e808cf382 feat: enhance monitor deletion process to include MetricService cleanup 2025-11-28 17:40:31 +00:00
Nawaz Dhandala
c993b33dab feat: add projectId to MetricService deletion query in incident handling 2025-11-28 17:35:23 +00:00
Nawaz Dhandala
3c5a64024b feat: include projectId in MetricService deletion query for incidents 2025-11-28 17:34:30 +00:00
Nawaz Dhandala
86efe54a29 refactor: remove unused favicon handling from DashboardMasterPage 2025-11-28 17:29:43 +00:00
Simon Larsen
17bf568428 feat: Implement OneUptime Copilot Agent with core functionalities
- Add SystemPrompt to define agent behavior and principles.
- Create WorkspaceContextBuilder for workspace snapshot and Git status.
- Initialize main entry point with command-line options for agent configuration.
- Develop LMStudioClient for chat completion requests to LM Studio.
- Implement tools for file operations: ApplyPatchTool, ListDirectoryTool, ReadFileTool, RunCommandTool, SearchWorkspaceTool, WriteFileTool.
- Establish ToolRegistry for managing and executing tools.
- Define types for chat messages, tool calls, and execution results.
- Set up workspace path utilities for file management and validation.
- Configure TypeScript settings for the project.
2025-11-28 16:49:46 +00:00
Simon Larsen
26ac698cc7 Remove Copilot package configuration files 2025-11-28 15:43:36 +00:00
Simon Larsen
72bb25e036 chore: migrate VERSION_PREFIX to VERSION and update related workflows 2025-11-28 15:40:24 +00:00
Nawaz Dhandala
1f23742c1f chore: remove vscode-copilot-chat subproject 2025-11-28 14:12:12 +00:00
Nawaz Dhandala
ac66cee4aa feat: add declaredAt field to Incident model with migration and default value 2025-11-28 10:12:43 +00:00
Nawaz Dhandala
66efe2d2fa feat: add declaredAt field to Incident model and update related services and components 2025-11-28 10:10:05 +00:00
Nawaz Dhandala
0ad5c14882 feat: refactor SCIM creation in TeamMemberService tests for improved clarity 2025-11-27 14:13:14 +00:00
Nawaz Dhandala
2468b39dd2 style: format code for improved readability in TeamMemberService 2025-11-27 13:59:12 +00:00
Nawaz Dhandala
4fec2caef6 feat: update SCIM integration to manage team members with Push Groups 2025-11-27 13:58:47 +00:00
Nawaz Dhandala
dc041d924a style: update social media icons in blog post for improved accessibility 2025-11-27 12:50:30 +00:00
Nawaz Dhandala
37acc617a0 style: add styling for inline code chips in blog body 2025-11-27 12:42:54 +00:00
Nawaz Dhandala
cd28370ce3 style: update color scheme for blog post elements 2025-11-27 12:39:45 +00:00
Nawaz Dhandala
e847f430f2 feat: enhance blog post styling and add reading progress indicator 2025-11-27 12:37:19 +00:00
Nawaz Dhandala
d1e94daaca style: adjust margins for blog post body text 2025-11-27 12:17:10 +00:00
Nawaz Dhandala
df264d6766 feat: add Dockerfile language support to syntax highlighting 2025-11-27 12:16:29 +00:00
Nawaz Dhandala
49c2312c47 Merge branch 'master' of https://github.com/OneUptime/oneuptime 2025-11-27 11:30:01 +00:00
Nawaz Dhandala
0fd3121b29 chore: automate version prefix bump and PR creation in release workflow 2025-11-27 11:29:30 +00:00
Simon Larsen
ea43c43991 Merge pull request #2140 from OneUptime/chore/npm-audit-fix
chore: npm audit fix
2025-11-27 11:20:26 +00:00
simlarsen
51a128efd3 chore: npm audit fix 2025-11-27 01:46:46 +00:00
Nawaz Dhandala
847bac5c6a refactor: enhance Chrome and Firefox executable path retrieval with additional candidates 2025-11-26 16:42:10 +00:00
Nawaz Dhandala
29b137afbd refactor: enhance Chrome executable path retrieval with multiple candidate checks 2025-11-26 16:35:30 +00:00
Nawaz Dhandala
1be0b475a6 bump: update version to 9.1.1 2025-11-26 16:28:59 +00:00
Nawaz Dhandala
2467d2c02d refactor: reorder app dependency installation and Playwright browser setup in Dockerfile 2025-11-26 16:26:17 +00:00
Nawaz Dhandala
b9597250ac refactor: simplify APP_TAG assignment in release and test workflows 2025-11-26 10:58:19 +00:00
Nawaz Dhandala
203e9b8c39 chore: upgrade Docker setup actions to v3 in release and test workflows 2025-11-25 21:41:37 +00:00
Nawaz Dhandala
16078ffe3b Merge branch 'master' of https://github.com/OneUptime/oneuptime 2025-11-25 19:54:54 +00:00
Nawaz Dhandala
898c4de78f refactor: enhance subdomain handling and validation in StatusPageDomain and Domains components 2025-11-25 19:54:50 +00:00
Simon Larsen
da53b7c51c feat: add check for existing GitHub release to skip creation and publishing 2025-11-25 19:51:10 +00:00
Nawaz Dhandala
8a330e7914 refactor: implement caching for Bot Framework adapter to improve performance 2025-11-25 18:51:03 +00:00
Nawaz Dhandala
8bf7b8dfa2 feat: add Microsoft Teams app tenant ID configuration 2025-11-25 18:49:26 +00:00
Nawaz Dhandala
9d36920477 Refactor versioning in release and test workflows to remove build number suffix
- Updated versioning in release.yml to use only major_minor version for Helm chart packaging, Docker images, and GitHub releases.
- Adjusted versioning in test-release.yaml to reflect similar changes, removing build number suffix for test releases.
2025-11-25 13:08:34 +00:00
Nawaz Dhandala
264cdc7c6b refactor: improve code readability by formatting long lines in user update handlers 2025-11-25 11:58:08 +00:00
Nawaz Dhandala
3d8daa46aa refactor: streamline user update logic by consolidating PUT and PATCH handlers 2025-11-25 11:57:42 +00:00
Nawaz Dhandala
673ab6845f refactor: consolidate user update logic into a single handler for PUT and PATCH endpoints 2025-11-25 11:52:43 +00:00
Simon Larsen
bb3df528cf Merge pull request #2137 from OneUptime/chore/npm-audit-fix
chore: npm audit fix
2025-11-25 08:22:21 +00:00
simlarsen
f52e73afb2 chore: npm audit fix 2025-11-25 01:48:46 +00:00
Nawaz Dhandala
3e04d38eb1 fix: update secret usage documentation and enhance monitor secret retrieval logic 2025-11-24 21:30:08 +00:00
Nawaz Dhandala
27c2ffdfbd chore: remove outdated APK build workflow and script 2025-11-24 21:22:17 +00:00
Nawaz Dhandala
78ee52fb4d fix: add missing iarc_rating_id to the manifest file 2025-11-24 21:08:26 +00:00
Nawaz Dhandala
adc15561b9 feat: enhance SMTP transport configuration by adding flexible secure option handling 2025-11-24 21:04:48 +00:00
Nawaz Dhandala
e19a14e906 fix: update proxy_pass path for assetlinks.json in Nginx configuration 2025-11-24 20:19:49 +00:00
Simon Larsen
035f3412b8 Merge pull request #2132 from OneUptime/captcha
Captcha
2025-11-24 19:13:34 +00:00
Nawaz Dhandala
deb902463c feat: enhance captcha integration by improving type definitions and refactoring callback functions 2025-11-24 19:11:21 +00:00
Nawaz Dhandala
a03a2bf9b0 fix: update proxy_pass path for assetlinks.json in Nginx configuration 2025-11-24 19:06:01 +00:00
Nawaz Dhandala
5f396d36a4 feat: implement assetlinks.json for Android app delegation and enhance Nginx configuration for asset handling 2025-11-24 18:59:42 +00:00
Simon Larsen
99cf626d7d Merge branch 'release' of github.com:OneUptime/oneuptime into release 2025-11-24 15:47:59 +00:00
Simon Larsen
ae72437591 fix: update test-e2e workflows to include build number and version dependencies 2025-11-24 15:47:37 +00:00
Simon Larsen
86301213f0 fix: sanitize APP_TAG format by replacing '+' with '-' 2025-11-24 15:42:25 +00:00
Nawaz Dhandala
c6e889b2a8 feat: integrate captcha verification in login process 2025-11-24 15:17:31 +00:00
Nawaz Dhandala
0a053c51e3 feat: update billing payment method permissions to include Manage Billing access 2025-11-24 13:01:08 +00:00
Nawaz Dhandala
296ecbd9e3 feat: enhance error handling in certificate ordering process based on billing status 2025-11-24 12:47:31 +00:00
Nawaz Dhandala
aa4797cc54 fix: add space in subscription management email subject 2025-11-24 12:37:23 +00:00
Nawaz Dhandala
fd4759f16e feat: add site key configuration for hCaptcha in values.yaml 2025-11-24 12:28:05 +00:00
Nawaz Dhandala
a7b7dc61cf feat: add captcha configuration and environment variables to Helm chart and Docker Compose 2025-11-24 12:18:34 +00:00
Nawaz Dhandala
3b0bdca980 feat: implement captcha verification in registration process 2025-11-24 12:13:15 +00:00
Nawaz Dhandala
07bc6d4edd fix: remove public create permission from User table access control 2025-11-24 12:08:33 +00:00
Nawaz Dhandala
8642a54fec feat: add captcha configuration and verification support 2025-11-24 12:08:09 +00:00
Simon Larsen
9ed0c3cf2b Merge pull request #2130 from OneUptime/master
Release
2025-11-24 12:03:48 +00:00
Simon Larsen
396c73f601 Merge pull request #2128 from OneUptime/snyk-upgrade-f06191ee357ef468242a37c903b4b224
[Snyk] Upgrade axios from 1.13.0 to 1.13.1
2025-11-24 12:00:50 +00:00
snyk-bot
ceb54ae12d fix: upgrade axios from 1.13.0 to 1.13.1
Snyk has created this PR to upgrade axios from 1.13.0 to 1.13.1.

See this package in npm:
axios

See this project in Snyk:
https://app.snyk.io/org/oneuptime-RsC2nshvQ2Vnr35jHvMnMP/project/49c81d9c-12c2-4e8e-b9e8-72f98b1b595c?utm_source=github&utm_medium=referral&page=upgrade-pr
2025-11-24 09:37:36 +00:00
Nawaz Dhandala
8df9a14b13 fix: standardize branch name formatting in GitHub Actions workflow 2025-11-22 16:27:47 +00:00
Nawaz Dhandala
7d32627917 Update GitHub Actions workflow for versioning and Docker image builds
- Enhanced the `test-release.yaml` workflow to read and determine semantic versioning from `VERSION_PREFIX`, including major, minor, and patch components.
- Adjusted versioning format in the workflow to use a new scheme: `major.minor.patch-test+build.build_number`.
- Updated Docker image build script to sanitize version strings by replacing '+' with '-' for tagging.
- Incremented the version in `VERSION_PREFIX` from `9.0` to `9.1.0`.
2025-11-22 14:11:02 +00:00
Nawaz Dhandala
a9ea19507e fix: add missing widgets array in manifest.json 2025-11-22 14:05:15 +00:00
Nawaz Dhandala
8c2c002382 fix: update manifest.json to include scope_extensions and adjust client_mode format 2025-11-22 13:52:29 +00:00
Simon Larsen
2a2aca032e Merge pull request #2127 from OneUptime/snyk-upgrade-51c02a535e494371cbaf2b8819deeaff
[Snyk] Upgrade eslint-plugin-unused-imports from 4.2.0 to 4.3.0
2025-11-22 13:00:01 +00:00
snyk-bot
911fe180ab fix: upgrade eslint-plugin-unused-imports from 4.2.0 to 4.3.0
Snyk has created this PR to upgrade eslint-plugin-unused-imports from 4.2.0 to 4.3.0.

See this package in npm:
eslint-plugin-unused-imports

See this project in Snyk:
https://app.snyk.io/org/oneuptime-RsC2nshvQ2Vnr35jHvMnMP/project/c3622982-05c8-495c-809c-20f301c75f92?utm_source=github&utm_medium=referral&page=upgrade-pr
2025-11-22 10:06:29 +00:00
Simon Larsen
11cbe5f34a Merge pull request #2126 from OneUptime/chore/npm-audit-fix
chore: npm audit fix
2025-11-22 04:23:48 +00:00
simlarsen
883f51e2d2 chore: npm audit fix 2025-11-22 01:43:11 +00:00
190 changed files with 7143 additions and 28898 deletions

View File

@@ -1,64 +0,0 @@
name: Build Dashboard Android APK
on:
push:
branches:
- master
- release
env:
MANIFEST_URL: https://oneuptime.com/dashboard/manifest.json
PWA_ORIGIN: https://oneuptime.com
PACKAGE_ID: com.oneuptime.dashboard
HOST_NAME: oneuptime.com
jobs:
build-apk:
runs-on: ubuntu-latest
env:
MANIFEST_SOURCE_PATH: Dashboard/public/manifest.json
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Compute version numbers
id: version
run: |
set -euo pipefail
VERSION_PREFIX=$(cat VERSION_PREFIX | tr -d ' \n')
VERSION_NAME="${VERSION_PREFIX}.${GITHUB_RUN_NUMBER}"
echo "name=${VERSION_NAME}" >> $GITHUB_OUTPUT
echo "code=${GITHUB_RUN_NUMBER}" >> $GITHUB_OUTPUT
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
cache: 'npm'
- name: Setup Java
uses: actions/setup-java@v4
with:
distribution: 'temurin'
java-version: '17'
- name: Build APK
id: build_apk
env:
VERSION_NAME: ${{ steps.version.outputs.name }}
VERSION_CODE: ${{ steps.version.outputs.code }}
SIGNING_KEYSTORE_BASE64: ${{ secrets.ANDROID_APK_SIGNING_KEYSTORE_BASE64 }}
SIGNING_KEY_ALIAS: ${{ secrets.ANDROID_APK_SIGNING_KEY_ALIAS }}
SIGNING_KEY_PASSWORD: ${{ secrets.ANDROID_APK_SIGNING_KEY_PASSWORD }}
SIGNING_STORE_PASSWORD: ${{ secrets.ANDROID_APK_SIGNING_STORE_PASSWORD }}
run: |
set -euo pipefail
APK_PATH=$(./Scripts/GHA/build_apk.sh)
echo "apk_path=$APK_PATH" >> $GITHUB_OUTPUT
echo "Found APK at $APK_PATH"
- name: Upload APK artifact
uses: actions/upload-artifact@v4
with:
name: dashboard-apk-${{ steps.version.outputs.name }}
path: ${{ steps.build_apk.outputs.apk_path }}

File diff suppressed because it is too large Load Diff

View File

@@ -7,7 +7,7 @@ concurrency:
on:
push:
branches:
- master
- "master"
jobs:
generate-build-number:
runs-on: ubuntu-latest
@@ -24,17 +24,67 @@ jobs:
read-version:
runs-on: ubuntu-latest
outputs:
major_minor: ${{ steps.read.outputs.major_minor }}
major_minor: ${{ steps.determine.outputs.semver_base }}
semver_base: ${{ steps.determine.outputs.semver_base }}
major: ${{ steps.determine.outputs.major }}
minor: ${{ steps.determine.outputs.minor }}
patch: ${{ steps.determine.outputs.patch }}
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- name: Read VERSION_PREFIX
id: read
- name: Determine semver base
id: determine
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
REPOSITORY: ${{ github.repository }}
run: |
VERSION_PREFIX=$(cat VERSION_PREFIX | tr -d ' \n')
echo "major_minor=$VERSION_PREFIX" >> $GITHUB_OUTPUT
echo "Using version prefix: $VERSION_PREFIX"
set -euo pipefail
VERSION_RAW="$(tr -d ' \n' < VERSION)"
if [[ -z "$VERSION_RAW" ]]; then
echo "VERSION is empty" >&2
exit 1
fi
IFS='.' read -r major minor patch <<< "$VERSION_RAW"
if [[ -z "$minor" ]]; then
echo "VERSION must contain major and minor components" >&2
exit 1
fi
patch="${patch:-0}"
for part_name in major minor patch; do
part="${!part_name}"
if ! [[ "$part" =~ ^[0-9]+$ ]]; then
echo "Invalid ${part_name} component '$part' in VERSION" >&2
exit 1
fi
done
target_patch="$patch"
latest_tag="$(gh release view --repo "$REPOSITORY" --json tagName --jq '.tagName' 2>/dev/null || echo "")"
if [[ -n "$latest_tag" ]]; then
latest_tag="${latest_tag#v}"
latest_tag_core="${latest_tag%%+*}"
latest_tag_core="${latest_tag_core%%-*}"
IFS='.' read -r rel_major rel_minor rel_patch _ <<< "$latest_tag_core"
rel_patch="${rel_patch:-0}"
if [[ "$rel_major" =~ ^[0-9]+$ && "$rel_minor" =~ ^[0-9]+$ && "$rel_patch" =~ ^[0-9]+$ ]]; then
if [[ "$rel_major" == "$major" && "$rel_minor" == "$minor" ]]; then
target_patch=$((rel_patch + 1))
fi
fi
fi
new_version="${major}.${minor}.${target_patch}"
echo "semver_base=${new_version}" >> "$GITHUB_OUTPUT"
echo "major=${major}" >> "$GITHUB_OUTPUT"
echo "minor=${minor}" >> "$GITHUB_OUTPUT"
echo "patch=${target_patch}" >> "$GITHUB_OUTPUT"
echo "Using version base: ${new_version}"
publish-mcp-server:
needs: [read-version, generate-build-number]
@@ -70,7 +120,7 @@ jobs:
- name: Determine version
id: version
run: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
VERSION="${{needs.read-version.outputs.major_minor}}-test"
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "Publishing MCP server version: $VERSION"
@@ -185,7 +235,7 @@ jobs:
ghcr.io/oneuptime/llm
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -204,10 +254,13 @@ jobs:
# run: mkdir -p ./LLM/Models && cd ./LLM/Models && git clone https://${{ secrets.HUGGING_FACE_USERNAME }}:${{ secrets.HUGGING_FACE_PASSWORD }}@huggingface.co/meta-llama/Meta-Llama-3-8B-Instruct
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -242,7 +295,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image llm \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./LLM/Dockerfile \
--context ./LLM \
--platforms linux/amd64 \
@@ -264,7 +317,7 @@ jobs:
ghcr.io/oneuptime/nginx
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -276,10 +329,13 @@ jobs:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -314,7 +370,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image nginx \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./Nginx/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
@@ -336,7 +392,7 @@ jobs:
ghcr.io/oneuptime/e2e
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -348,10 +404,13 @@ jobs:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -386,7 +445,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image e2e \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./E2E/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
@@ -407,7 +466,7 @@ jobs:
ghcr.io/oneuptime/test-server
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -419,10 +478,13 @@ jobs:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -457,7 +519,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image test-server \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./TestServer/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
@@ -478,7 +540,7 @@ jobs:
ghcr.io/oneuptime/otel-collector
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -490,10 +552,13 @@ jobs:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -528,7 +593,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image otel-collector \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./OTelCollector/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
@@ -549,7 +614,7 @@ jobs:
ghcr.io/oneuptime/isolated-vm
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -561,10 +626,13 @@ jobs:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -599,7 +667,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image isolated-vm \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./IsolatedVM/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
@@ -620,7 +688,7 @@ jobs:
ghcr.io/oneuptime/home
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -632,10 +700,13 @@ jobs:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -670,7 +741,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image home \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./Home/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
@@ -693,7 +764,7 @@ jobs:
ghcr.io/oneuptime/status-page
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -705,10 +776,13 @@ jobs:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -743,7 +817,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image status-page \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./StatusPage/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
@@ -766,7 +840,7 @@ jobs:
ghcr.io/oneuptime/test
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -778,10 +852,13 @@ jobs:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -816,7 +893,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image test \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./Tests/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
@@ -837,7 +914,7 @@ jobs:
ghcr.io/oneuptime/probe-ingest
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -849,10 +926,13 @@ jobs:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -887,7 +967,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image probe-ingest \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./ProbeIngest/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
@@ -910,7 +990,7 @@ jobs:
ghcr.io/oneuptime/server-monitor-ingest
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -922,10 +1002,13 @@ jobs:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -960,7 +1043,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image server-monitor-ingest \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./ServerMonitorIngest/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
@@ -984,7 +1067,7 @@ jobs:
ghcr.io/oneuptime/incoming-request-ingest
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -996,10 +1079,13 @@ jobs:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -1034,7 +1120,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image incoming-request-ingest \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./IncomingRequestIngest/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
@@ -1055,7 +1141,7 @@ jobs:
ghcr.io/oneuptime/telemetry
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -1067,10 +1153,13 @@ jobs:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -1105,7 +1194,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image telemetry \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./Telemetry/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
@@ -1126,7 +1215,7 @@ jobs:
ghcr.io/oneuptime/probe
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -1138,10 +1227,13 @@ jobs:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -1176,7 +1268,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image probe \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./Probe/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
@@ -1197,7 +1289,7 @@ jobs:
ghcr.io/oneuptime/dashboard
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -1209,10 +1301,13 @@ jobs:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -1247,7 +1342,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image dashboard \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./Dashboard/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
@@ -1268,7 +1363,7 @@ jobs:
ghcr.io/oneuptime/admin-dashboard
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -1280,10 +1375,13 @@ jobs:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -1318,7 +1416,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image admin-dashboard \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./AdminDashboard/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
@@ -1339,7 +1437,7 @@ jobs:
ghcr.io/oneuptime/app
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -1351,10 +1449,13 @@ jobs:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -1389,7 +1490,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image app \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./App/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
@@ -1413,7 +1514,7 @@ jobs:
ghcr.io/oneuptime/api-reference
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -1425,10 +1526,13 @@ jobs:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -1463,7 +1567,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image api-reference \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./APIReference/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
@@ -1486,7 +1590,7 @@ jobs:
ghcr.io/oneuptime/accounts
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -1498,10 +1602,13 @@ jobs:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -1536,7 +1643,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image accounts \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./Accounts/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
@@ -1557,7 +1664,7 @@ jobs:
ghcr.io/oneuptime/worker
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -1569,10 +1676,13 @@ jobs:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -1607,7 +1717,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image worker \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./Worker/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
@@ -1628,7 +1738,7 @@ jobs:
ghcr.io/oneuptime/copilot
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -1640,10 +1750,13 @@ jobs:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -1678,7 +1791,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image copilot \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./Copilot/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
@@ -1700,7 +1813,7 @@ jobs:
ghcr.io/oneuptime/workflow
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -1712,10 +1825,13 @@ jobs:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -1750,7 +1866,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image workflow \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./Workflow/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
@@ -1772,7 +1888,7 @@ jobs:
ghcr.io/oneuptime/docs
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
@@ -1784,10 +1900,13 @@ jobs:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
uses: nick-fields/retry@v3
@@ -1822,7 +1941,7 @@ jobs:
command: |
bash ./Scripts/GHA/build_docker_images.sh \
--image docs \
--version "${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test" \
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./Docs/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
@@ -1839,7 +1958,7 @@ jobs:
ref: ${{ github.ref }}
- name: Skip Terraform provider publish for test release
run: |
VERSION="${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}-test"
VERSION="${{needs.read-version.outputs.major_minor}}-test"
echo "Skipping Terraform provider publish for test release $VERSION"
@@ -1871,7 +1990,7 @@ jobs:
test-e2e-test-saas:
runs-on: ubuntu-latest
needs: [test-helm-chart]
needs: [test-helm-chart, generate-build-number, read-version]
env:
CI_PIPELINE_ID: ${{github.run_number}}
steps:
@@ -1892,9 +2011,30 @@ jobs:
- uses: actions/setup-node@v4
with:
node-version: latest
- run: npm run prerun && bash ./Tests/Scripts/change-release-to-test-tag.sh
- name: Preinstall
run: |
set -euo pipefail
npm run prerun
- name: Pin APP_TAG to test release
run: |
set -euo pipefail
VERSION="${{needs.read-version.outputs.major_minor}}-test"
SANITIZED_VERSION="${VERSION//+/-}"
if [ -f config.env ]; then
if grep -q '^APP_TAG=' config.env; then
sed -i "s/^APP_TAG=.*/APP_TAG=${SANITIZED_VERSION}/" config.env
else
echo "APP_TAG=${SANITIZED_VERSION}" >> config.env
fi
else
echo "APP_TAG=${SANITIZED_VERSION}" > config.env
fi
- name: Start Server with release tag
run: npm run start
run: |
set -euo pipefail
export $(grep -v '^#' config.env | xargs)
export APP_TAG=${{needs.read-version.outputs.major_minor}}-test
npm run start
- name: Wait for server to start
run: bash ./Tests/Scripts/status-check.sh http://localhost
- name: Run E2E Tests. Run docker container e2e in docker compose file
@@ -1903,7 +2043,10 @@ jobs:
timeout_minutes: 90
max_attempts: 3
on_retry_command: docker compose -f docker-compose.dev.yml down -v || true
command: export $(grep -v '^#' config.env | xargs) && docker compose -f docker-compose.dev.yml up --exit-code-from e2e --abort-on-container-exit e2e || (docker compose -f docker-compose.dev.yml logs e2e && exit 1)
command: |
export $(grep -v '^#' config.env | xargs)
export APP_TAG=${{needs.read-version.outputs.major_minor}}-test
docker compose -f docker-compose.dev.yml up --exit-code-from e2e --abort-on-container-exit e2e || (docker compose -f docker-compose.dev.yml logs e2e && exit 1)
- name: Upload test results
uses: actions/upload-artifact@v4
# Run this on failure
@@ -1929,7 +2072,7 @@ jobs:
test-e2e-test-self-hosted:
runs-on: ubuntu-latest
# After all the jobs runs
needs: [test-helm-chart]
needs: [test-helm-chart, generate-build-number, read-version]
env:
CI_PIPELINE_ID: ${{github.run_number}}
steps:
@@ -1950,9 +2093,30 @@ jobs:
- uses: actions/setup-node@v4
with:
node-version: latest
- run: npm run prerun && bash ./Tests/Scripts/change-release-to-test-tag.sh
- name: Preinstall
run: |
set -euo pipefail
npm run prerun
- name: Pin APP_TAG to test release
run: |
set -euo pipefail
VERSION="${{needs.read-version.outputs.major_minor}}-test"
SANITIZED_VERSION="${VERSION//+/-}"
if [ -f config.env ]; then
if grep -q '^APP_TAG=' config.env; then
sed -i "s/^APP_TAG=.*/APP_TAG=${SANITIZED_VERSION}/" config.env
else
echo "APP_TAG=${SANITIZED_VERSION}" >> config.env
fi
else
echo "APP_TAG=${SANITIZED_VERSION}" > config.env
fi
- name: Start Server with release tag
run: npm run start
run: |
set -euo pipefail
export $(grep -v '^#' config.env | xargs)
export APP_TAG=${{needs.read-version.outputs.major_minor}}-test
npm run start
- name: Wait for server to start
run: bash ./Tests/Scripts/status-check.sh http://localhost
- name: Run E2E Tests. Run docker container e2e in docker compose file
@@ -1961,7 +2125,10 @@ jobs:
timeout_minutes: 90
max_attempts: 3
on_retry_command: docker compose -f docker-compose.dev.yml down -v || true
command: export $(grep -v '^#' config.env | xargs) && docker compose -f docker-compose.dev.yml up --exit-code-from e2e --abort-on-container-exit e2e || (docker compose -f docker-compose.dev.yml logs e2e && exit 1)
command: |
export $(grep -v '^#' config.env | xargs)
export APP_TAG=${{needs.read-version.outputs.major_minor}}-test
docker compose -f docker-compose.dev.yml up --exit-code-from e2e --abort-on-container-exit e2e || (docker compose -f docker-compose.dev.yml logs e2e && exit 1)
- name: Upload test results
uses: actions/upload-artifact@v4
# Run this on failure

View File

@@ -29,6 +29,7 @@
"@bull-board/express": "^5.21.4",
"@clickhouse/client": "^1.10.1",
"@elastic/elasticsearch": "^8.12.1",
"@hcaptcha/react-hcaptcha": "^1.14.0",
"@monaco-editor/react": "^4.4.6",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/api-logs": "^0.206.0",

View File

@@ -33,6 +33,7 @@
"@bull-board/express": "^5.21.4",
"@clickhouse/client": "^1.10.1",
"@elastic/elasticsearch": "^8.12.1",
"@hcaptcha/react-hcaptcha": "^1.14.0",
"@monaco-editor/react": "^4.4.6",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/api-logs": "^0.206.0",

View File

@@ -7,10 +7,20 @@ import {
import Route from "Common/Types/API/Route";
import URL from "Common/Types/API/URL";
import { JSONArray, JSONObject } from "Common/Types/JSON";
import ModelForm, { FormType } from "Common/UI/Components/Forms/ModelForm";
import ModelForm, {
FormType,
ModelField,
} from "Common/UI/Components/Forms/ModelForm";
import { CustomElementProps } from "Common/UI/Components/Forms/Types/Field";
import FormValues from "Common/UI/Components/Forms/Types/FormValues";
import FormFieldSchemaType from "Common/UI/Components/Forms/Types/FormFieldSchemaType";
import Link from "Common/UI/Components/Link/Link";
import { DASHBOARD_URL } from "Common/UI/Config";
import Captcha from "Common/UI/Components/Captcha/Captcha";
import {
DASHBOARD_URL,
CAPTCHA_ENABLED,
CAPTCHA_SITE_KEY,
} from "Common/UI/Config";
import OneUptimeLogo from "Common/UI/Images/logos/OneUptimeSVG/3-transparent.svg";
import EditionLabel from "Common/UI/Components/EditionLabel/EditionLabel";
import UiAnalytics from "Common/UI/Utils/Analytics";
@@ -73,6 +83,84 @@ const LoginPage: () => JSX.Element = () => {
const [twofactorAuthError, setTwoFactorAuthError] =
React.useState<string>("");
const isCaptchaEnabled: boolean =
CAPTCHA_ENABLED && Boolean(CAPTCHA_SITE_KEY);
const [shouldResetCaptcha, setShouldResetCaptcha] =
React.useState<boolean>(false);
const [captchaResetSignal, setCaptchaResetSignal] = React.useState<number>(0);
const handleCaptchaReset: () => void = React.useCallback(() => {
setCaptchaResetSignal((current: number) => {
return current + 1;
});
}, []);
let loginFields: Array<ModelField<User>> = [
{
field: {
email: true,
},
fieldType: FormFieldSchemaType.Email,
placeholder: "jeff@example.com",
required: true,
disabled: Boolean(initialValues && initialValues["email"]),
title: "Email",
dataTestId: "email",
disableSpellCheck: true,
},
{
field: {
password: true,
},
title: "Password",
required: true,
validation: {
minLength: 6,
},
fieldType: FormFieldSchemaType.Password,
sideLink: {
text: "Forgot password?",
url: new Route("/accounts/forgot-password"),
openLinkInNewTab: false,
},
dataTestId: "password",
disableSpellCheck: true,
},
];
if (isCaptchaEnabled) {
loginFields = loginFields.concat([
{
overrideField: {
captchaToken: true,
},
overrideFieldKey: "captchaToken",
fieldType: FormFieldSchemaType.CustomComponent,
title: "Human Verification",
description:
"Complete the captcha challenge so we know you're not a bot.",
required: true,
showEvenIfPermissionDoesNotExist: true,
getCustomElement: (
_values: FormValues<User>,
customProps: CustomElementProps,
) => {
return (
<Captcha
siteKey={CAPTCHA_SITE_KEY}
resetSignal={captchaResetSignal}
error={customProps.error}
onTokenChange={(token: string) => {
customProps.onChange?.(token);
}}
onBlur={customProps.onBlur}
/>
);
},
},
]);
}
useAsyncEffect(async () => {
if (Navigation.getQueryStringByName("email")) {
setInitialValues({
@@ -228,45 +316,41 @@ const LoginPage: () => JSX.Element = () => {
modelType={User}
id="login-form"
name="Login"
fields={[
{
field: {
email: true,
},
fieldType: FormFieldSchemaType.Email,
placeholder: "jeff@example.com",
required: true,
disabled: Boolean(initialValues && initialValues["email"]),
title: "Email",
dataTestId: "email",
disableSpellCheck: true,
},
{
field: {
password: true,
},
title: "Password",
required: true,
validation: {
minLength: 6,
},
fieldType: FormFieldSchemaType.Password,
sideLink: {
text: "Forgot password?",
url: new Route("/accounts/forgot-password"),
openLinkInNewTab: false,
},
dataTestId: "password",
disableSpellCheck: true,
},
]}
fields={loginFields}
createOrUpdateApiUrl={apiUrl}
formType={FormType.Create}
submitButtonText={"Login"}
onBeforeCreate={(data: User) => {
onBeforeCreate={(data: User, miscDataProps: JSONObject) => {
if (isCaptchaEnabled) {
const captchaToken: string | undefined = (
miscDataProps["captchaToken"] as string | undefined
)
?.toString()
.trim();
if (!captchaToken) {
throw new Error(
"Please complete the captcha challenge before signing in.",
);
}
miscDataProps["captchaToken"] = captchaToken;
setShouldResetCaptcha(true);
}
setInitialValues(User.toJSON(data, User));
return Promise.resolve(data);
}}
onLoadingChange={(loading: boolean) => {
if (!isCaptchaEnabled) {
return;
}
if (!loading && shouldResetCaptcha) {
setShouldResetCaptcha(false);
handleCaptchaReset();
}
}}
onSuccess={(
value: User | JSONObject,
miscData: JSONObject | undefined,

View File

@@ -4,12 +4,22 @@ import URL from "Common/Types/API/URL";
import Dictionary from "Common/Types/Dictionary";
import { JSONObject } from "Common/Types/JSON";
import ErrorMessage from "Common/UI/Components/ErrorMessage/ErrorMessage";
import ModelForm, { FormType } from "Common/UI/Components/Forms/ModelForm";
import Fields from "Common/UI/Components/Forms/Types/Fields";
import ModelForm, {
FormType,
ModelField,
} from "Common/UI/Components/Forms/ModelForm";
import { CustomElementProps } from "Common/UI/Components/Forms/Types/Field";
import FormFieldSchemaType from "Common/UI/Components/Forms/Types/FormFieldSchemaType";
import FormValues from "Common/UI/Components/Forms/Types/FormValues";
import Link from "Common/UI/Components/Link/Link";
import PageLoader from "Common/UI/Components/Loader/PageLoader";
import { BILLING_ENABLED, DASHBOARD_URL } from "Common/UI/Config";
import Captcha from "Common/UI/Components/Captcha/Captcha";
import {
BILLING_ENABLED,
DASHBOARD_URL,
CAPTCHA_ENABLED,
CAPTCHA_SITE_KEY,
} from "Common/UI/Config";
import OneUptimeLogo from "Common/UI/Images/logos/OneUptimeSVG/3-transparent.svg";
import BaseAPI from "Common/UI/Utils/API/API";
import UiAnalytics from "Common/UI/Utils/Analytics";
@@ -36,6 +46,19 @@ const RegisterPage: () => JSX.Element = () => {
undefined,
);
const isCaptchaEnabled: boolean =
CAPTCHA_ENABLED && Boolean(CAPTCHA_SITE_KEY);
const [shouldResetCaptcha, setShouldResetCaptcha] =
React.useState<boolean>(false);
const [captchaResetSignal, setCaptchaResetSignal] = React.useState<number>(0);
const handleCaptchaReset: () => void = React.useCallback(() => {
setCaptchaResetSignal((current: number) => {
return current + 1;
});
}, []);
if (UserUtil.isLoggedIn()) {
Navigation.navigate(DASHBOARD_URL);
}
@@ -93,7 +116,7 @@ const RegisterPage: () => JSX.Element = () => {
}
}, []);
let formFields: Fields<User> = [
let formFields: Array<ModelField<User>> = [
{
field: {
email: true,
@@ -183,6 +206,39 @@ const RegisterPage: () => JSX.Element = () => {
},
]);
if (isCaptchaEnabled) {
formFields = formFields.concat([
{
overrideField: {
captchaToken: true,
},
overrideFieldKey: "captchaToken",
fieldType: FormFieldSchemaType.CustomComponent,
title: "Human Verification",
description:
"Complete the captcha challenge so we know you're not a bot.",
required: true,
showEvenIfPermissionDoesNotExist: true,
getCustomElement: (
_values: FormValues<User>,
customProps: CustomElementProps,
) => {
return (
<Captcha
siteKey={CAPTCHA_SITE_KEY}
resetSignal={captchaResetSignal}
error={customProps.error}
onTokenChange={(token: string) => {
customProps.onChange?.(token);
}}
onBlur={customProps.onBlur}
/>
);
},
},
]);
}
if (error) {
return <ErrorMessage message={error} />;
}
@@ -222,7 +278,27 @@ const RegisterPage: () => JSX.Element = () => {
maxPrimaryButtonWidth={true}
fields={formFields}
createOrUpdateApiUrl={apiUrl}
onBeforeCreate={(item: User): Promise<User> => {
onBeforeCreate={(
item: User,
miscDataProps: JSONObject,
): Promise<User> => {
if (isCaptchaEnabled) {
const captchaToken: string | undefined = (
miscDataProps["captchaToken"] as string | undefined
)
?.toString()
.trim();
if (!captchaToken) {
throw new Error(
"Please complete the captcha challenge before signing up.",
);
}
miscDataProps["captchaToken"] = captchaToken;
setShouldResetCaptcha(true);
}
const utmParams: Dictionary<string> = UserUtil.getUtmParams();
if (utmParams && Object.keys(utmParams).length > 0) {
@@ -240,6 +316,16 @@ const RegisterPage: () => JSX.Element = () => {
}}
formType={FormType.Create}
submitButtonText={"Sign Up"}
onLoadingChange={(loading: boolean) => {
if (!isCaptchaEnabled) {
return;
}
if (!loading && shouldResetCaptcha) {
setShouldResetCaptcha(false);
handleCaptchaReset();
}
}}
onSuccess={(value: User, miscData: JSONObject | undefined) => {
if (value && value.email) {
UiAnalytics.userAuth(value.email);

View File

@@ -32,6 +32,7 @@
"@bull-board/express": "^5.21.4",
"@clickhouse/client": "^1.10.1",
"@elastic/elasticsearch": "^8.12.1",
"@hcaptcha/react-hcaptcha": "^1.14.0",
"@monaco-editor/react": "^4.4.6",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/api-logs": "^0.206.0",

View File

@@ -33,6 +33,7 @@ import IncidentInternalNoteAPI from "Common/Server/API/IncidentInternalNoteAPI";
import IncidentPublicNoteAPI from "Common/Server/API/IncidentPublicNoteAPI";
import ScheduledMaintenanceInternalNoteAPI from "Common/Server/API/ScheduledMaintenanceInternalNoteAPI";
import ScheduledMaintenancePublicNoteAPI from "Common/Server/API/ScheduledMaintenancePublicNoteAPI";
import IncidentAPI from "Common/Server/API/IncidentAPI";
// User Notification methods.
import UserEmailAPI from "Common/Server/API/UserEmailAPI";
import UserNotificationLogTimelineAPI from "Common/Server/API/UserOnCallLogTimelineAPI";
@@ -111,9 +112,6 @@ import IncidentOwnerTeamService, {
import IncidentOwnerUserService, {
Service as IncidentOwnerUserServiceType,
} from "Common/Server/Services/IncidentOwnerUserService";
import IncidentService, {
Service as IncidentServiceType,
} from "Common/Server/Services/IncidentService";
import IncidentSeverityService, {
Service as IncidentSeverityServiceType,
} from "Common/Server/Services/IncidentSeverityService";
@@ -393,7 +391,6 @@ import AlertSeverity from "Common/Models/DatabaseModels/AlertSeverity";
import AlertState from "Common/Models/DatabaseModels/AlertState";
import AlertStateTimeline from "Common/Models/DatabaseModels/AlertStateTimeline";
import Incident from "Common/Models/DatabaseModels/Incident";
import IncidentCustomField from "Common/Models/DatabaseModels/IncidentCustomField";
import IncidentNoteTemplate from "Common/Models/DatabaseModels/IncidentNoteTemplate";
import IncidentPostmortemTemplate from "Common/Models/DatabaseModels/IncidentPostmortemTemplate";
@@ -1268,13 +1265,7 @@ const BaseAPIFeatureSet: FeatureSet = {
).getRouter(),
);
app.use(
`/${APP_NAME.toLocaleLowerCase()}`,
new BaseAPI<Incident, IncidentServiceType>(
Incident,
IncidentService,
).getRouter(),
);
app.use(`/${APP_NAME.toLocaleLowerCase()}`, new IncidentAPI().getRouter());
app.use(
`/${APP_NAME.toLocaleLowerCase()}`,

View File

@@ -38,6 +38,7 @@ import Express, {
getClientIp,
headerValueToString,
} from "Common/Server/Utils/Express";
import CaptchaUtil from "Common/Server/Utils/Captcha";
import logger from "Common/Server/Utils/Logger";
import Response from "Common/Server/Utils/Response";
import TotpAuth from "Common/Server/Utils/TotpAuth";
@@ -107,6 +108,16 @@ router.post(
);
}
const miscDataProps: JSONObject =
(req.body["miscDataProps"] as JSONObject) || {};
await CaptchaUtil.verifyCaptcha({
token:
(miscDataProps["captchaToken"] as string | undefined) ||
(req.body["captchaToken"] as string | undefined),
remoteIp: getClientIp(req) || null,
});
const data: JSONObject = req.body["data"];
/* Creating a type that is a partial of the TBaseModel type. */
@@ -804,6 +815,18 @@ const login: LoginFunction = async (options: {
const verifyWebAuthn: boolean = options.verifyWebAuthn;
try {
const miscDataProps: JSONObject =
(req.body["miscDataProps"] as JSONObject) || {};
if (!verifyTotpAuth && !verifyWebAuthn) {
await CaptchaUtil.verifyCaptcha({
token:
(miscDataProps["captchaToken"] as string | undefined) ||
(req.body["captchaToken"] as string | undefined),
remoteIp: getClientIp(req) || null,
});
}
const data: JSONObject = req.body["data"];
logger.debug("Login request data: " + JSON.stringify(req.body, null, 2));

View File

@@ -470,174 +470,181 @@ router.get(
},
);
// Update User - PUT /scim/v2/Users/{id}
router.put(
"/scim/v2/:projectScimId/Users/:userId",
SCIMMiddleware.isAuthorizedSCIMRequest,
async (
req: ExpressRequest,
res: ExpressResponse,
next: NextFunction,
): Promise<void> => {
try {
logger.debug(
`SCIM Update user request for userId: ${req.params["userId"]}, projectScimId: ${req.params["projectScimId"]}`,
);
const oneuptimeRequest: OneUptimeRequest = req as OneUptimeRequest;
const bearerData: JSONObject =
oneuptimeRequest.bearerTokenData as JSONObject;
const projectId: ObjectID = bearerData["projectId"] as ObjectID;
const userId: string = req.params["userId"]!;
const scimUser: JSONObject = req.body;
const handleUserUpdate: (
req: ExpressRequest,
res: ExpressResponse,
next: NextFunction,
) => Promise<void> = async (
req: ExpressRequest,
res: ExpressResponse,
next: NextFunction,
): Promise<void> => {
try {
logger.debug(
`SCIM Update user request for userId: ${req.params["userId"]}, projectScimId: ${req.params["projectScimId"]}`,
);
const oneuptimeRequest: OneUptimeRequest = req as OneUptimeRequest;
const bearerData: JSONObject =
oneuptimeRequest.bearerTokenData as JSONObject;
const projectId: ObjectID = bearerData["projectId"] as ObjectID;
const userId: string = req.params["userId"]!;
const scimUser: JSONObject = req.body;
logger.debug(
`SCIM Update user - projectId: ${projectId}, userId: ${userId}`,
);
logger.debug(
`SCIM Update user - projectId: ${projectId}, userId: ${userId}`,
);
logger.debug(
`Request body for SCIM Update user: ${JSON.stringify(scimUser, null, 2)}`,
);
logger.debug(
`Request body for SCIM Update user: ${JSON.stringify(scimUser, null, 2)}`,
);
if (!userId) {
throw new BadRequestException("User ID is required");
if (!userId) {
throw new BadRequestException("User ID is required");
}
// Check if user exists and is part of the project
const projectUser: TeamMember | null = await TeamMemberService.findOneBy({
query: {
projectId: projectId,
userId: new ObjectID(userId),
},
select: {
userId: true,
user: {
_id: true,
email: true,
name: true,
createdAt: true,
updatedAt: true,
},
},
props: { isRoot: true },
});
if (!projectUser || !projectUser.user) {
logger.debug(
`SCIM Update user - user not found or not part of project for userId: ${userId}`,
);
throw new NotFoundException("User not found or not part of this project");
}
// Update user information
const email: string =
(scimUser["userName"] as string) ||
((scimUser["emails"] as JSONObject[])?.[0]?.["value"] as string);
const name: string = parseNameFromSCIM(scimUser);
const active: boolean = scimUser["active"] as boolean;
logger.debug(
`SCIM Update user - email: ${email}, name: ${name}, active: ${active}`,
);
const scimConfig: ProjectSCIM = bearerData["scimConfig"] as ProjectSCIM;
// Handle user deactivation by removing from teams
if (active === false && !scimConfig.enablePushGroups) {
logger.debug(
`SCIM Update user - user marked as inactive, removing from teams`,
);
await handleUserTeamOperations(
"remove",
projectId,
new ObjectID(userId),
scimConfig,
);
logger.debug(
`SCIM Update user - user successfully removed from teams due to deactivation`,
);
}
// Handle user activation by adding to teams
if (active === true && !scimConfig.enablePushGroups) {
logger.debug(`SCIM Update user - user marked as active, adding to teams`);
await handleUserTeamOperations(
"add",
projectId,
new ObjectID(userId),
scimConfig,
);
logger.debug(
`SCIM Update user - user successfully added to teams due to activation`,
);
}
if (email || name) {
const updateData: any = {};
if (email) {
updateData.email = new Email(email);
}
if (name) {
updateData.name = new Name(name);
}
// Check if user exists and is part of the project
const projectUser: TeamMember | null = await TeamMemberService.findOneBy({
query: {
projectId: projectId,
userId: new ObjectID(userId),
},
logger.debug(
`SCIM Update user - updating user with data: ${JSON.stringify(updateData)}`,
);
await UserService.updateOneById({
id: new ObjectID(userId),
data: updateData,
props: { isRoot: true },
});
logger.debug(`SCIM Update user - user updated successfully`);
// Fetch updated user
const updatedUser: User | null = await UserService.findOneById({
id: new ObjectID(userId),
select: {
userId: true,
user: {
_id: true,
email: true,
name: true,
createdAt: true,
updatedAt: true,
},
_id: true,
email: true,
name: true,
createdAt: true,
updatedAt: true,
},
props: { isRoot: true },
});
if (!projectUser || !projectUser.user) {
logger.debug(
`SCIM Update user - user not found or not part of project for userId: ${userId}`,
);
throw new NotFoundException(
"User not found or not part of this project",
if (updatedUser) {
const user: JSONObject = formatUserForSCIM(
updatedUser,
req,
req.params["projectScimId"]!,
"project",
);
return Response.sendJsonObjectResponse(req, res, user);
}
// Update user information
const email: string =
(scimUser["userName"] as string) ||
((scimUser["emails"] as JSONObject[])?.[0]?.["value"] as string);
const name: string = parseNameFromSCIM(scimUser);
const active: boolean = scimUser["active"] as boolean;
logger.debug(
`SCIM Update user - email: ${email}, name: ${name}, active: ${active}`,
);
const scimConfig: ProjectSCIM = bearerData["scimConfig"] as ProjectSCIM;
// Handle user deactivation by removing from teams
if (active === false && !scimConfig.enablePushGroups) {
logger.debug(
`SCIM Update user - user marked as inactive, removing from teams`,
);
await handleUserTeamOperations(
"remove",
projectId,
new ObjectID(userId),
scimConfig,
);
logger.debug(
`SCIM Update user - user successfully removed from teams due to deactivation`,
);
}
// Handle user activation by adding to teams
if (active === true && !scimConfig.enablePushGroups) {
logger.debug(
`SCIM Update user - user marked as active, adding to teams`,
);
await handleUserTeamOperations(
"add",
projectId,
new ObjectID(userId),
scimConfig,
);
logger.debug(
`SCIM Update user - user successfully added to teams due to activation`,
);
}
if (email || name) {
const updateData: any = {};
if (email) {
updateData.email = new Email(email);
}
if (name) {
updateData.name = new Name(name);
}
logger.debug(
`SCIM Update user - updating user with data: ${JSON.stringify(updateData)}`,
);
await UserService.updateOneById({
id: new ObjectID(userId),
data: updateData,
props: { isRoot: true },
});
logger.debug(`SCIM Update user - user updated successfully`);
// Fetch updated user
const updatedUser: User | null = await UserService.findOneById({
id: new ObjectID(userId),
select: {
_id: true,
email: true,
name: true,
createdAt: true,
updatedAt: true,
},
props: { isRoot: true },
});
if (updatedUser) {
const user: JSONObject = formatUserForSCIM(
updatedUser,
req,
req.params["projectScimId"]!,
"project",
);
return Response.sendJsonObjectResponse(req, res, user);
}
}
logger.debug(
`SCIM Update user - no updates made, returning existing user`,
);
// If no updates were made, return the existing user
const user: JSONObject = formatUserForSCIM(
projectUser.user,
req,
req.params["projectScimId"]!,
"project",
);
return Response.sendJsonObjectResponse(req, res, user);
} catch (err) {
logger.error(err);
return next(err);
}
},
logger.debug(`SCIM Update user - no updates made, returning existing user`);
// If no updates were made, return the existing user
const user: JSONObject = formatUserForSCIM(
projectUser.user,
req,
req.params["projectScimId"]!,
"project",
);
return Response.sendJsonObjectResponse(req, res, user);
} catch (err) {
logger.error(err);
return next(err);
}
};
// Update User - PUT /scim/v2/Users/{id}
router.put(
"/scim/v2/:projectScimId/Users/:userId",
SCIMMiddleware.isAuthorizedSCIMRequest,
handleUserUpdate,
);
// Update User - PATCH /scim/v2/Users/{id}
router.patch(
"/scim/v2/:projectScimId/Users/:userId",
SCIMMiddleware.isAuthorizedSCIMRequest,
handleUserUpdate,
);
// Groups endpoint - GET /scim/v2/Groups

View File

@@ -355,45 +355,124 @@ router.post(
},
);
// Update Status Page User - PUT /status-page-scim/v2/Users/{id}
router.put(
"/status-page-scim/v2/:statusPageScimId/Users/:userId",
SCIMMiddleware.isAuthorizedSCIMRequest,
async (
req: ExpressRequest,
res: ExpressResponse,
next: NextFunction,
): Promise<void> => {
try {
logger.debug(
`Status Page SCIM Update user request for userId: ${req.params["userId"]}, statusPageScimId: ${req.params["statusPageScimId"]}`,
);
const oneuptimeRequest: OneUptimeRequest = req as OneUptimeRequest;
const bearerData: JSONObject =
oneuptimeRequest.bearerTokenData as JSONObject;
const statusPageId: ObjectID = bearerData["statusPageId"] as ObjectID;
const userId: string = req.params["userId"]!;
const scimUser: JSONObject = req.body;
const handleStatusPageUserUpdate: (
req: ExpressRequest,
res: ExpressResponse,
next: NextFunction,
) => Promise<void> = async (
req: ExpressRequest,
res: ExpressResponse,
next: NextFunction,
): Promise<void> => {
try {
logger.debug(
`Status Page SCIM Update user request for userId: ${req.params["userId"]}, statusPageScimId: ${req.params["statusPageScimId"]}`,
);
const oneuptimeRequest: OneUptimeRequest = req as OneUptimeRequest;
const bearerData: JSONObject =
oneuptimeRequest.bearerTokenData as JSONObject;
const statusPageId: ObjectID = bearerData["statusPageId"] as ObjectID;
const userId: string = req.params["userId"]!;
const scimUser: JSONObject = req.body;
logger.debug(
`Status Page SCIM Update user - statusPageId: ${statusPageId}, userId: ${userId}`,
);
logger.debug(
`Request body for Status Page SCIM Update user: ${JSON.stringify(scimUser, null, 2)}`,
);
if (!userId) {
throw new BadRequestException("User ID is required");
}
// Check if user exists and belongs to this status page
const statusPageUser: StatusPagePrivateUser | null =
await StatusPagePrivateUserService.findOneBy({
query: {
statusPageId: statusPageId,
_id: new ObjectID(userId),
},
select: {
_id: true,
email: true,
createdAt: true,
updatedAt: true,
},
props: { isRoot: true },
});
if (!statusPageUser) {
logger.debug(
`Status Page SCIM Update user - statusPageId: ${statusPageId}, userId: ${userId}`,
`Status Page SCIM Update user - user not found for userId: ${userId}`,
);
throw new NotFoundException(
"User not found or not part of this status page",
);
}
// Update user information
const email: string =
(scimUser["userName"] as string) ||
((scimUser["emails"] as JSONObject[])?.[0]?.["value"] as string);
const active: boolean = scimUser["active"] as boolean;
logger.debug(
`Status Page SCIM Update user - email: ${email}, active: ${active}`,
);
// Handle user deactivation by deleting from status page
if (active === false) {
logger.debug(
`Status Page SCIM Update user - user marked as inactive, removing from status page`,
);
logger.debug(
`Request body for Status Page SCIM Update user: ${JSON.stringify(scimUser, null, 2)}`,
);
const scimConfig: StatusPageSCIM = bearerData[
"scimConfig"
] as StatusPageSCIM;
if (scimConfig.autoDeprovisionUsers) {
await StatusPagePrivateUserService.deleteOneById({
id: new ObjectID(userId),
props: { isRoot: true },
});
if (!userId) {
throw new BadRequestException("User ID is required");
logger.debug(
`Status Page SCIM Update user - user removed from status page`,
);
// Return empty response for deleted user
return Response.sendJsonObjectResponse(req, res, {});
}
}
// Check if user exists and belongs to this status page
const statusPageUser: StatusPagePrivateUser | null =
await StatusPagePrivateUserService.findOneBy({
query: {
statusPageId: statusPageId,
_id: new ObjectID(userId),
},
// Prepare update data
const updateData: {
email?: Email;
} = {};
if (email && email !== statusPageUser.email?.toString()) {
updateData.email = new Email(email);
}
// Only update if there are changes
if (Object.keys(updateData).length > 0) {
logger.debug(
`Status Page SCIM Update user - updating user with data: ${JSON.stringify(updateData)}`,
);
await StatusPagePrivateUserService.updateOneById({
id: new ObjectID(userId),
data: updateData,
props: { isRoot: true },
});
logger.debug(`Status Page SCIM Update user - user updated successfully`);
// Fetch updated user
const updatedUser: StatusPagePrivateUser | null =
await StatusPagePrivateUserService.findOneById({
id: new ObjectID(userId),
select: {
_id: true,
email: true,
@@ -403,116 +482,48 @@ router.put(
props: { isRoot: true },
});
if (!statusPageUser) {
logger.debug(
`Status Page SCIM Update user - user not found for userId: ${userId}`,
);
throw new NotFoundException(
"User not found or not part of this status page",
if (updatedUser) {
const user: JSONObject = formatUserForSCIM(
updatedUser,
req,
req.params["statusPageScimId"]!,
"status-page",
);
return Response.sendJsonObjectResponse(req, res, user);
}
// Update user information
const email: string =
(scimUser["userName"] as string) ||
((scimUser["emails"] as JSONObject[])?.[0]?.["value"] as string);
const active: boolean = scimUser["active"] as boolean;
logger.debug(
`Status Page SCIM Update user - email: ${email}, active: ${active}`,
);
// Handle user deactivation by deleting from status page
if (active === false) {
logger.debug(
`Status Page SCIM Update user - user marked as inactive, removing from status page`,
);
const scimConfig: StatusPageSCIM = bearerData[
"scimConfig"
] as StatusPageSCIM;
if (scimConfig.autoDeprovisionUsers) {
await StatusPagePrivateUserService.deleteOneById({
id: new ObjectID(userId),
props: { isRoot: true },
});
logger.debug(
`Status Page SCIM Update user - user removed from status page`,
);
// Return empty response for deleted user
return Response.sendJsonObjectResponse(req, res, {});
}
}
// Prepare update data
const updateData: {
email?: Email;
} = {};
if (email && email !== statusPageUser.email?.toString()) {
updateData.email = new Email(email);
}
// Only update if there are changes
if (Object.keys(updateData).length > 0) {
logger.debug(
`Status Page SCIM Update user - updating user with data: ${JSON.stringify(updateData)}`,
);
await StatusPagePrivateUserService.updateOneById({
id: new ObjectID(userId),
data: updateData,
props: { isRoot: true },
});
logger.debug(
`Status Page SCIM Update user - user updated successfully`,
);
// Fetch updated user
const updatedUser: StatusPagePrivateUser | null =
await StatusPagePrivateUserService.findOneById({
id: new ObjectID(userId),
select: {
_id: true,
email: true,
createdAt: true,
updatedAt: true,
},
props: { isRoot: true },
});
if (updatedUser) {
const user: JSONObject = formatUserForSCIM(
updatedUser,
req,
req.params["statusPageScimId"]!,
"status-page",
);
return Response.sendJsonObjectResponse(req, res, user);
}
}
logger.debug(
`Status Page SCIM Update user - no updates made, returning existing user`,
);
// If no updates were made, return the existing user
const user: JSONObject = formatUserForSCIM(
statusPageUser,
req,
req.params["statusPageScimId"]!,
"status-page",
);
return Response.sendJsonObjectResponse(req, res, user);
} catch (err) {
logger.error(err);
return next(err);
}
},
logger.debug(
`Status Page SCIM Update user - no updates made, returning existing user`,
);
// If no updates were made, return the existing user
const user: JSONObject = formatUserForSCIM(
statusPageUser,
req,
req.params["statusPageScimId"]!,
"status-page",
);
return Response.sendJsonObjectResponse(req, res, user);
} catch (err) {
logger.error(err);
return next(err);
}
};
// Update Status Page User - PUT /status-page-scim/v2/Users/{id}
router.put(
"/status-page-scim/v2/:statusPageScimId/Users/:userId",
SCIMMiddleware.isAuthorizedSCIMRequest,
handleStatusPageUserUpdate,
);
// Update Status Page User - PATCH /status-page-scim/v2/Users/{id}
router.patch(
"/status-page-scim/v2/:statusPageScimId/Users/:userId",
SCIMMiddleware.isAuthorizedSCIMRequest,
handleStatusPageUserUpdate,
);
// Delete Status Page User - DELETE /status-page-scim/v2/Users/{id}

View File

@@ -0,0 +1,35 @@
{{> Start this}}
{{> CustomLogo this}}
{{> EmailTitle title=(concat "Postmortem Published: " incidentTitle) }}
{{> InfoBlock info="A postmortem has been published for an incident. Here are the details: "}}
{{> DetailBoxStart this }}
{{> DetailBoxField title=incidentTitle text="" }}
{{> DetailBoxField title="Resources Affected: " text=resourcesAffected }}
{{> DetailBoxField title="Severity: " text=incidentSeverity }}
{{> DetailBoxField title="Postmortem: " text="" }}
{{> DetailBoxField title="" text=postmortemNote }}
{{> DetailBoxEnd this }}
{{> InfoBlock info=(concat subscriberEmailNotificationFooterText "") }}
{{#if detailsUrl}}
{{> InfoBlock info=(concat "Find further information here: " detailsUrl)}}
{{else}}
{{> InfoBlock info=(concat "Find further information here: " statusPageUrl)}}
{{/if}}
{{> UnsubscribeBlock this}}
{{> VerticalSpace this}}
{{> End this}}

1
App/package-lock.json generated
View File

@@ -39,6 +39,7 @@
"@bull-board/express": "^5.21.4",
"@clickhouse/client": "^1.10.1",
"@elastic/elasticsearch": "^8.12.1",
"@hcaptcha/react-hcaptcha": "^1.14.0",
"@monaco-editor/react": "^4.4.6",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/api-logs": "^0.206.0",

View File

@@ -20,7 +20,11 @@ import { Column, Entity, Index, JoinColumn, ManyToOne } from "typeorm";
@AllowAccessIfSubscriptionIsUnpaid()
@TenantColumn("projectId")
@TableAccessControl({
create: [Permission.ProjectOwner, Permission.CreateBillingPaymentMethod],
create: [
Permission.ProjectOwner,
Permission.ManageProjectBilling,
Permission.CreateBillingPaymentMethod,
],
read: [
Permission.ProjectOwner,
Permission.ProjectUser,
@@ -28,7 +32,11 @@ import { Column, Entity, Index, JoinColumn, ManyToOne } from "typeorm";
Permission.ProjectMember,
Permission.ReadBillingPaymentMethod,
],
delete: [Permission.ProjectOwner, Permission.DeleteBillingPaymentMethod],
delete: [
Permission.ProjectOwner,
Permission.ManageProjectBilling,
Permission.DeleteBillingPaymentMethod,
],
update: [],
})
@CrudApiEndpoint(new Route("/billing-payment-methods"))
@@ -45,7 +53,11 @@ import { Column, Entity, Index, JoinColumn, ManyToOne } from "typeorm";
})
export default class BillingPaymentMethod extends BaseModel {
@ColumnAccessControl({
create: [Permission.ProjectOwner, Permission.CreateBillingPaymentMethod],
create: [
Permission.ProjectOwner,
Permission.ManageProjectBilling,
Permission.CreateBillingPaymentMethod,
],
read: [
Permission.ProjectOwner,
Permission.ProjectUser,
@@ -77,7 +89,11 @@ export default class BillingPaymentMethod extends BaseModel {
public project?: Project = undefined;
@ColumnAccessControl({
create: [Permission.ProjectOwner, Permission.CreateBillingPaymentMethod],
create: [
Permission.ProjectOwner,
Permission.ManageProjectBilling,
Permission.CreateBillingPaymentMethod,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
@@ -103,7 +119,11 @@ export default class BillingPaymentMethod extends BaseModel {
public projectId?: ObjectID = undefined;
@ColumnAccessControl({
create: [Permission.ProjectOwner, Permission.CreateBillingPaymentMethod],
create: [
Permission.ProjectOwner,
Permission.ManageProjectBilling,
Permission.CreateBillingPaymentMethod,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
@@ -136,7 +156,11 @@ export default class BillingPaymentMethod extends BaseModel {
public createdByUser?: User = undefined;
@ColumnAccessControl({
create: [Permission.ProjectOwner, Permission.CreateBillingPaymentMethod],
create: [
Permission.ProjectOwner,
Permission.ManageProjectBilling,
Permission.CreateBillingPaymentMethod,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
@@ -218,7 +242,11 @@ export default class BillingPaymentMethod extends BaseModel {
public deletedByUserId?: ObjectID = undefined;
@ColumnAccessControl({
create: [Permission.ProjectOwner, Permission.CreateBillingPaymentMethod],
create: [
Permission.ProjectOwner,
Permission.ManageProjectBilling,
Permission.CreateBillingPaymentMethod,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
@@ -278,7 +306,11 @@ export default class BillingPaymentMethod extends BaseModel {
public paymentProviderCustomerId?: string = undefined;
@ColumnAccessControl({
create: [Permission.ProjectOwner, Permission.CreateBillingPaymentMethod],
create: [
Permission.ProjectOwner,
Permission.ManageProjectBilling,
Permission.CreateBillingPaymentMethod,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
@@ -298,7 +330,11 @@ export default class BillingPaymentMethod extends BaseModel {
public last4Digits?: string = undefined;
@ColumnAccessControl({
create: [Permission.ProjectOwner, Permission.CreateBillingPaymentMethod],
create: [
Permission.ProjectOwner,
Permission.ManageProjectBilling,
Permission.CreateBillingPaymentMethod,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,

View File

@@ -7,6 +7,7 @@ import OnCallDutyPolicy from "./OnCallDutyPolicy";
import Probe from "./Probe";
import Project from "./Project";
import User from "./User";
import File from "./File";
import BaseModel from "./DatabaseBaseModel/DatabaseBaseModel";
import Route from "../../Types/API/Route";
import ColumnAccessControl from "../../Types/Database/AccessControl/ColumnAccessControl";
@@ -228,6 +229,43 @@ export default class Incident extends BaseModel {
})
public description?: string = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateProjectIncident,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectIncident,
],
update: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditProjectIncident,
],
})
@Index()
@TableColumn({
required: true,
type: TableColumnType.Date,
title: "Declared At",
description: "Date and time when this incident was declared.",
isDefaultValueColumn: true,
})
@Column({
type: ColumnType.Date,
nullable: false,
default: () => {
return "now()";
},
})
public declaredAt?: Date = undefined;
@Index()
@ColumnAccessControl({
create: [],
@@ -803,6 +841,77 @@ export default class Incident extends BaseModel {
})
public subscriberNotificationStatusMessage?: string = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateProjectIncident,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectIncident,
],
update: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditProjectIncident,
],
})
@TableColumn({
isDefaultValueColumn: true,
computed: true,
hideColumnInDocumentation: true,
type: TableColumnType.ShortText,
title: "Subscriber Notification Status on Postmortem Published",
description:
"Status of notification sent to subscribers about this incident postmortem",
defaultValue: StatusPageSubscriberNotificationStatus.Pending,
})
@Column({
type: ColumnType.ShortText,
default: StatusPageSubscriberNotificationStatus.Pending,
})
public subscriberNotificationStatusOnPostmortemPublished?: StatusPageSubscriberNotificationStatus =
undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateProjectIncident,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectIncident,
],
update: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditProjectIncident,
],
})
@TableColumn({
type: TableColumnType.VeryLongText,
title: "Notification Status Message on Postmortem Published",
description:
"Status message for subscriber notifications on postmortem published - includes success messages, failure reasons, or skip reasons",
required: false,
})
@Column({
type: ColumnType.VeryLongText,
nullable: true,
})
public subscriberNotificationStatusMessageOnPostmortemPublished?: string =
undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
@@ -959,6 +1068,151 @@ export default class Incident extends BaseModel {
})
public postmortemNote?: string = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateProjectIncident,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectIncident,
],
update: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditProjectIncident,
],
})
@TableColumn({
type: TableColumnType.Boolean,
title: "Show postmortem on status page?",
description:
"Should the postmortem note and attachments be visible on the status page once published?",
defaultValue: false,
isDefaultValueColumn: true,
})
@Column({
type: ColumnType.Boolean,
default: false,
})
public showPostmortemOnStatusPage?: boolean = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateProjectIncident,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectIncident,
],
update: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditProjectIncident,
],
})
@TableColumn({
type: TableColumnType.Boolean,
title: "Notify Subscribers on Postmortem Published",
description:
"Should subscribers be notified when the postmortem is published?",
defaultValue: true,
isDefaultValueColumn: true,
})
@Column({
type: ColumnType.Boolean,
default: true,
})
public notifySubscribersOnPostmortemPublished?: boolean = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateProjectIncident,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectIncident,
],
update: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditProjectIncident,
],
})
@TableColumn({
type: TableColumnType.Date,
title: "Postmortem Posted At",
description:
"Timestamp that will be shown alongside the published postmortem on the status page.",
required: false,
})
@Column({
type: ColumnType.Date,
nullable: true,
})
public postmortemPostedAt?: Date = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateProjectIncident,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectIncident,
],
update: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditProjectIncident,
],
})
@TableColumn({
type: TableColumnType.EntityArray,
modelType: File,
title: "Postmortem Attachments",
description:
"Files that accompany the postmortem note and can be shared publicly when enabled.",
required: false,
})
@ManyToMany(() => {
return File;
})
@JoinTable({
name: "IncidentPostmortemAttachmentFile",
joinColumn: {
name: "incidentId",
referencedColumnName: "_id",
},
inverseJoinColumn: {
name: "fileId",
referencedColumnName: "_id",
},
})
public postmortemAttachments?: Array<File> = undefined;
@ColumnAccessControl({
create: [],
read: [

View File

@@ -282,8 +282,9 @@ export default class StatusPageDomain extends BaseModel {
@TableColumn({
required: true,
type: TableColumnType.ShortText,
title: "Sumdomain",
description: "Subdomain of your status page - like (status)",
title: "Subdomain",
description:
"Subdomain label for your status page such as 'status'. Leave blank or enter @ to use the root domain.",
})
@Column({
nullable: false,

View File

@@ -30,7 +30,7 @@ import { Column, Entity, Index, JoinColumn, ManyToOne } from "typeorm";
})
@AllowAccessIfSubscriptionIsUnpaid()
@TableAccessControl({
create: [Permission.Public],
create: [],
read: [Permission.CurrentUser],
delete: [Permission.CurrentUser],
update: [Permission.CurrentUser],

View File

@@ -42,17 +42,16 @@ export default class UserAPI extends BaseAPI<
const userPermissions: Array<UserPermission> = (
await this.getPermissionsForTenant(req)
).filter((permission: UserPermission) => {
return (
permission.permission.toString() ===
Permission.ProjectOwner.toString() ||
permission.permission.toString() ===
Permission.CreateBillingPaymentMethod.toString()
);
return [
Permission.ProjectOwner,
Permission.ManageProjectBilling,
Permission.CreateBillingPaymentMethod,
].includes(permission.permission);
});
if (userPermissions.length === 0) {
throw new BadDataException(
"Only Project owner can add payment methods.",
"Only project owners or members with Manage Billing access can add payment methods.",
);
}

View File

@@ -0,0 +1,106 @@
import Incident from "../../Models/DatabaseModels/Incident";
import File from "../../Models/DatabaseModels/File";
import NotFoundException from "../../Types/Exception/NotFoundException";
import ObjectID from "../../Types/ObjectID";
import IncidentService, {
Service as IncidentServiceType,
} from "../Services/IncidentService";
import UserMiddleware from "../Middleware/UserAuthorization";
import Response from "../Utils/Response";
import BaseAPI from "./BaseAPI";
import {
ExpressRequest,
ExpressResponse,
NextFunction,
} from "../Utils/Express";
import CommonAPI from "./CommonAPI";
import DatabaseCommonInteractionProps from "../../Types/BaseDatabase/DatabaseCommonInteractionProps";
export default class IncidentAPI extends BaseAPI<
Incident,
IncidentServiceType
> {
public constructor() {
super(Incident, IncidentService);
this.router.get(
`${new this.entityType()
.getCrudApiPath()
?.toString()}/postmortem/attachment/:projectId/:incidentId/:fileId`,
UserMiddleware.getUserMiddleware,
async (req: ExpressRequest, res: ExpressResponse, next: NextFunction) => {
try {
await this.getPostmortemAttachment(req, res);
} catch (err) {
next(err);
}
},
);
}
private async getPostmortemAttachment(
req: ExpressRequest,
res: ExpressResponse,
): Promise<void> {
const projectIdParam: string | undefined = req.params["projectId"];
const incidentIdParam: string | undefined = req.params["incidentId"];
const fileIdParam: string | undefined = req.params["fileId"];
if (!projectIdParam || !incidentIdParam || !fileIdParam) {
throw new NotFoundException("Attachment not found");
}
let incidentId: ObjectID;
let fileId: ObjectID;
let projectId: ObjectID;
try {
incidentId = new ObjectID(incidentIdParam);
fileId = new ObjectID(fileIdParam);
projectId = new ObjectID(projectIdParam);
} catch {
throw new NotFoundException("Attachment not found");
}
const props: DatabaseCommonInteractionProps =
await CommonAPI.getDatabaseCommonInteractionProps(req);
const incident: Incident | null = await this.service.findOneBy({
query: {
_id: incidentId,
projectId,
},
select: {
postmortemAttachments: {
_id: true,
file: true,
fileType: true,
name: true,
},
},
props,
});
if (!incident) {
throw new NotFoundException("Attachment not found");
}
const attachment: File | undefined = incident.postmortemAttachments?.find(
(file: File) => {
const attachmentId: string | null = file._id
? file._id.toString()
: file.id
? file.id.toString()
: null;
return attachmentId === fileId.toString();
},
);
if (!attachment || !attachment.file) {
throw new NotFoundException("Attachment not found");
}
Response.setNoCacheHeaders(res);
return Response.sendFileResponse(req, res, attachment);
}
}

View File

@@ -101,7 +101,48 @@ export default class MicrosoftTeamsAPI {
supportsCalling: false,
supportsVideo: false,
// Provide basic command lists to improve client compatibility (esp. mobile)
commandLists: [],
commandLists: [
{
scopes: ["team", "groupChat", "personal"],
commands: [
{
title: "help",
description:
"Show instructions for interacting with the OneUptime bot.",
},
{
title: "create incident",
description:
"Launch the adaptive card to declare a new incident in OneUptime.",
},
{
title: "create maintenance",
description:
"Open the workflow to schedule maintenance directly from Teams.",
},
{
title: "show active incidents",
description:
"List all ongoing incidents with severity and state context.",
},
{
title: "show scheduled maintenance",
description:
"Display upcoming scheduled maintenance events for the workspace.",
},
{
title: "show ongoing maintenance",
description:
"Surface maintenance windows that are currently in progress.",
},
{
title: "show active alerts",
description:
"Provide a summary of alerts that still require attention.",
},
],
},
],
},
],
permissions: ["identity", "messageTeamMembers"],

View File

@@ -408,6 +408,20 @@ export default class StatusPageAPI extends BaseAPI<
},
);
this.router.get(
`${new this.entityType()
.getCrudApiPath()
?.toString()}/incident/postmortem/attachment/:statusPageId/:incidentId/:fileId`,
UserMiddleware.getUserMiddleware,
async (req: ExpressRequest, res: ExpressResponse, next: NextFunction) => {
try {
await this.getIncidentPostmortemAttachment(req, res);
} catch (err) {
next(err);
}
},
);
this.router.get(
`${new this.entityType()
.getCrudApiPath()
@@ -1421,9 +1435,18 @@ export default class StatusPageAPI extends BaseAPI<
if (monitorsOnStatusPage.length > 0) {
let select: Select<Incident> = {
createdAt: true,
declaredAt: true,
updatedAt: true,
title: true,
description: true,
_id: true,
postmortemNote: true,
postmortemPostedAt: true,
showPostmortemOnStatusPage: true,
postmortemAttachments: {
_id: true,
name: true,
},
incidentSeverity: {
name: true,
color: true,
@@ -1474,6 +1497,7 @@ export default class StatusPageAPI extends BaseAPI<
},
select: select,
sort: {
declaredAt: SortOrder.Descending,
createdAt: SortOrder.Descending,
},
@@ -2809,7 +2833,7 @@ export default class StatusPageAPI extends BaseAPI<
manageSubscriptionUrl: manageUrlink,
},
subject:
"Manage your Subscription for" +
"Manage your Subscription for " +
(statusPage.name || "Status Page"),
},
{
@@ -3303,9 +3327,18 @@ export default class StatusPageAPI extends BaseAPI<
let selectIncidents: Select<Incident> = {
createdAt: true,
declaredAt: true,
updatedAt: true,
title: true,
description: true,
_id: true,
postmortemNote: true,
postmortemPostedAt: true,
showPostmortemOnStatusPage: true,
postmortemAttachments: {
_id: true,
name: true,
},
incidentSeverity: {
name: true,
color: true,
@@ -3336,6 +3369,7 @@ export default class StatusPageAPI extends BaseAPI<
query: incidentQuery,
select: selectIncidents,
sort: {
declaredAt: SortOrder.Descending,
createdAt: SortOrder.Descending,
},
skip: 0,
@@ -3373,6 +3407,7 @@ export default class StatusPageAPI extends BaseAPI<
},
select: selectIncidents,
sort: {
declaredAt: SortOrder.Descending,
createdAt: SortOrder.Descending,
},
@@ -3964,6 +3999,110 @@ export default class StatusPageAPI extends BaseAPI<
return Response.sendFileResponse(req, res, attachment);
}
private async getIncidentPostmortemAttachment(
req: ExpressRequest,
res: ExpressResponse,
): Promise<void> {
const statusPageIdParam: string | undefined = req.params["statusPageId"];
const incidentIdParam: string | undefined = req.params["incidentId"];
const fileIdParam: string | undefined = req.params["fileId"];
if (!statusPageIdParam || !incidentIdParam || !fileIdParam) {
throw new NotFoundException("Attachment not found");
}
let statusPageId: ObjectID;
let incidentId: ObjectID;
let fileId: ObjectID;
try {
statusPageId = new ObjectID(statusPageIdParam);
incidentId = new ObjectID(incidentIdParam);
fileId = new ObjectID(fileIdParam);
} catch {
throw new NotFoundException("Attachment not found");
}
await this.checkHasReadAccess({
statusPageId,
req,
});
const statusPage: StatusPage | null = await StatusPageService.findOneBy({
query: {
_id: statusPageId.toString(),
},
select: {
_id: true,
projectId: true,
showIncidentsOnStatusPage: true,
},
props: {
isRoot: true,
},
});
if (
!statusPage ||
!statusPage.projectId ||
!statusPage.showIncidentsOnStatusPage
) {
throw new NotFoundException("Attachment not found");
}
const { monitorsOnStatusPage } =
await StatusPageService.getMonitorIdsOnStatusPage({
statusPageId,
});
if (!monitorsOnStatusPage || monitorsOnStatusPage.length === 0) {
throw new NotFoundException("Attachment not found");
}
const incident: Incident | null = await IncidentService.findOneBy({
query: {
_id: incidentId.toString(),
projectId: statusPage.projectId!,
isVisibleOnStatusPage: true,
showPostmortemOnStatusPage: true,
monitors: monitorsOnStatusPage as any,
},
select: {
postmortemAttachments: {
_id: true,
file: true,
fileType: true,
name: true,
},
},
props: {
isRoot: true,
},
});
if (!incident) {
throw new NotFoundException("Attachment not found");
}
const attachment: File | undefined = incident.postmortemAttachments?.find(
(file: File) => {
const attachmentId: string | null = file._id
? file._id.toString()
: file.id
? file.id.toString()
: null;
return attachmentId === fileId.toString();
},
);
if (!attachment || !attachment.file) {
throw new NotFoundException("Attachment not found");
}
Response.setNoCacheHeaders(res);
return Response.sendFileResponse(req, res, attachment);
}
private async getIncidentPublicNoteAttachment(
req: ExpressRequest,
res: ExpressResponse,

View File

@@ -44,6 +44,8 @@ const FRONTEND_ENV_ALLOW_LIST: Array<string> = [
"DISABLE_TELEMETRY",
"SLACK_APP_CLIENT_ID",
"MICROSOFT_TEAMS_APP_CLIENT_ID",
"CAPTCHA_ENABLED",
"CAPTCHA_SITE_KEY",
];
const FRONTEND_ENV_ALLOW_PREFIXES: Array<string> = [
@@ -324,6 +326,13 @@ export const Host: string = process.env["HOST"] || "";
export const ProvisionSsl: boolean = process.env["PROVISION_SSL"] === "true";
export const CaptchaEnabled: boolean =
process.env["CAPTCHA_ENABLED"] === "true";
export const CaptchaSecretKey: string = process.env["CAPTCHA_SECRET_KEY"] || "";
export const CaptchaSiteKey: string = process.env["CAPTCHA_SITE_KEY"] || "";
export const WorkflowScriptTimeoutInMS: number = process.env[
"WORKFLOW_SCRIPT_TIMEOUT_IN_MS"
]
@@ -446,6 +455,8 @@ export const MicrosoftTeamsAppClientId: string | null =
process.env["MICROSOFT_TEAMS_APP_CLIENT_ID"] || null;
export const MicrosoftTeamsAppClientSecret: string | null =
process.env["MICROSOFT_TEAMS_APP_CLIENT_SECRET"] || null;
export const MicrosoftTeamsAppTenantId: string | null =
process.env["MICROSOFT_TEAMS_APP_TENANT_ID"] || null;
// VAPID Configuration for Web Push Notifications
export const VapidPublicKey: string | undefined =

View File

@@ -0,0 +1,30 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1764324618043 implements MigrationInterface {
public name = "MigrationName1764324618043";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "Incident" ADD "declaredAt" TIMESTAMP WITH TIME ZONE`,
);
await queryRunner.query(
`UPDATE "Incident" SET "declaredAt" = "createdAt" WHERE "declaredAt" IS NULL`,
);
await queryRunner.query(
`ALTER TABLE "Incident" ALTER COLUMN "declaredAt" SET DEFAULT now()`,
);
await queryRunner.query(
`ALTER TABLE "Incident" ALTER COLUMN "declaredAt" SET NOT NULL`,
);
await queryRunner.query(
`CREATE INDEX "IDX_b26979b9f119310661734465a4" ON "Incident" ("declaredAt") `,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`DROP INDEX "public"."IDX_b26979b9f119310661734465a4"`,
);
await queryRunner.query(`ALTER TABLE "Incident" DROP COLUMN "declaredAt"`);
}
}

View File

@@ -0,0 +1,45 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1764762146063 implements MigrationInterface {
public name = "MigrationName1764762146063";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`CREATE TABLE "IncidentPostmortemAttachmentFile" ("incidentId" uuid NOT NULL, "fileId" uuid NOT NULL, CONSTRAINT "PK_40b17c7d5bcfbde48d7ebab4130" PRIMARY KEY ("incidentId", "fileId"))`,
);
await queryRunner.query(
`CREATE INDEX "IDX_62b9c09c42e05df3f134aa14a4" ON "IncidentPostmortemAttachmentFile" ("incidentId") `,
);
await queryRunner.query(
`CREATE INDEX "IDX_7e09116a3b9672622bba9f8b2e" ON "IncidentPostmortemAttachmentFile" ("fileId") `,
);
await queryRunner.query(
`ALTER TABLE "Incident" ADD "showPostmortemOnStatusPage" boolean NOT NULL DEFAULT false`,
);
await queryRunner.query(
`ALTER TABLE "IncidentPostmortemAttachmentFile" ADD CONSTRAINT "FK_62b9c09c42e05df3f134aa14a46" FOREIGN KEY ("incidentId") REFERENCES "Incident"("_id") ON DELETE CASCADE ON UPDATE CASCADE`,
);
await queryRunner.query(
`ALTER TABLE "IncidentPostmortemAttachmentFile" ADD CONSTRAINT "FK_7e09116a3b9672622bba9f8b2e3" FOREIGN KEY ("fileId") REFERENCES "File"("_id") ON DELETE CASCADE ON UPDATE CASCADE`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "IncidentPostmortemAttachmentFile" DROP CONSTRAINT "FK_7e09116a3b9672622bba9f8b2e3"`,
);
await queryRunner.query(
`ALTER TABLE "IncidentPostmortemAttachmentFile" DROP CONSTRAINT "FK_62b9c09c42e05df3f134aa14a46"`,
);
await queryRunner.query(
`ALTER TABLE "Incident" DROP COLUMN "showPostmortemOnStatusPage"`,
);
await queryRunner.query(
`DROP INDEX "public"."IDX_7e09116a3b9672622bba9f8b2e"`,
);
await queryRunner.query(
`DROP INDEX "public"."IDX_62b9c09c42e05df3f134aa14a4"`,
);
await queryRunner.query(`DROP TABLE "IncidentPostmortemAttachmentFile"`);
}
}

View File

@@ -0,0 +1,23 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1764767371788 implements MigrationInterface {
public name = "MigrationName1764767371788";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "OnCallDutyPolicyScheduleLayer" ALTER COLUMN "rotation" SET DEFAULT '{"_type":"Recurring","value":{"intervalType":"Day","intervalCount":{"_type":"PositiveNumber","value":1}}}'`,
);
await queryRunner.query(
`ALTER TABLE "OnCallDutyPolicyScheduleLayer" ALTER COLUMN "restrictionTimes" SET DEFAULT '{"_type":"RestrictionTimes","value":{"restictionType":"None","dayRestrictionTimes":null,"weeklyRestrictionTimes":[]}}'`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "OnCallDutyPolicyScheduleLayer" ALTER COLUMN "restrictionTimes" SET DEFAULT '{"_type": "RestrictionTimes", "value": {"restictionType": "None", "dayRestrictionTimes": null, "weeklyRestrictionTimes": []}}'`,
);
await queryRunner.query(
`ALTER TABLE "OnCallDutyPolicyScheduleLayer" ALTER COLUMN "rotation" SET DEFAULT '{"_type": "Recurring", "value": {"intervalType": "Day", "intervalCount": {"_type": "PositiveNumber", "value": 1}}}'`,
);
}
}

View File

@@ -0,0 +1,29 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1764789433216 implements MigrationInterface {
public name = "MigrationName1764789433216";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "Incident" ADD "subscriberNotificationStatusOnPostmortemPublished" character varying NOT NULL DEFAULT 'Pending'`,
);
await queryRunner.query(
`ALTER TABLE "Incident" ADD "subscriberNotificationStatusMessageOnPostmortemPublished" text`,
);
await queryRunner.query(
`ALTER TABLE "Incident" ADD "notifySubscribersOnPostmortemPublished" boolean NOT NULL DEFAULT true`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "Incident" DROP COLUMN "notifySubscribersOnPostmortemPublished"`,
);
await queryRunner.query(
`ALTER TABLE "Incident" DROP COLUMN "subscriberNotificationStatusMessageOnPostmortemPublished"`,
);
await queryRunner.query(
`ALTER TABLE "Incident" DROP COLUMN "subscriberNotificationStatusOnPostmortemPublished"`,
);
}
}

View File

@@ -186,6 +186,10 @@ import { MigrationName1763471659817 } from "./1763471659817-MigrationName";
import { MigrationName1763477560906 } from "./1763477560906-MigrationName";
import { MigrationName1763480947474 } from "./1763480947474-MigrationName";
import { MigrationName1763643080445 } from "./1763643080445-MigrationName";
import { MigrationName1764324618043 } from "./1764324618043-MigrationName";
import { MigrationName1764762146063 } from "./1764762146063-MigrationName";
import { MigrationName1764767371788 } from "./1764767371788-MigrationName";
import { MigrationName1764789433216 } from "./1764789433216-MigrationName";
export default [
InitialMigration,
@@ -376,4 +380,8 @@ export default [
MigrationName1763477560906,
MigrationName1763480947474,
MigrationName1763643080445,
MigrationName1764324618043,
MigrationName1764762146063,
MigrationName1764767371788,
MigrationName1764789433216,
];

View File

@@ -480,6 +480,14 @@ export class Service extends DatabaseService<Model> {
const projectId: ObjectID =
createBy.props.tenantId || createBy.data.projectId!;
if (!createBy.data.declaredAt) {
createBy.data.declaredAt = OneUptimeDate.getCurrentDate();
} else {
createBy.data.declaredAt = OneUptimeDate.fromString(
createBy.data.declaredAt as Date,
);
}
// Determine the initial incident state
let initialIncidentStateId: ObjectID | undefined = undefined;
@@ -975,6 +983,7 @@ ${incident.remediationNotes || "No remediation notes provided."}
notifyOwners: false,
rootCause: createdItem.rootCause,
stateChangeLog: createdItem.createdStateLog,
timelineStartsAt: createdItem.declaredAt,
props: {
isRoot: true,
},
@@ -1345,6 +1354,19 @@ ${incident.remediationNotes || "No remediation notes provided."}
sendWorkspaceNotification: true,
},
});
// Set subscriber notification status to Pending so the cron job will send notifications
await this.updateOneById({
id: incidentId,
data: {
subscriberNotificationStatusOnPostmortemPublished:
StatusPageSubscriberNotificationStatus.Pending,
},
props: {
isRoot: true,
ignoreHooks: true,
},
});
}
let shouldAddIncidentFeed: boolean = false;
@@ -1790,6 +1812,7 @@ ${incidentSeverity.name}
limit: LIMIT_MAX,
skip: 0,
select: {
_id: true,
projectId: true,
monitors: {
_id: true,
@@ -1821,6 +1844,18 @@ ${incidentSeverity.name}
incident.monitors,
);
}
if (incident.projectId && incident.id) {
await MetricService.deleteBy({
query: {
projectId: incident.projectId,
serviceId: incident.id,
},
props: {
isRoot: true,
},
});
}
}
}
@@ -1838,6 +1873,7 @@ ${incidentSeverity.name}
rootCause: string | undefined;
stateChangeLog: JSONObject | undefined;
props: DatabaseCommonInteractionProps | undefined;
timelineStartsAt?: Date | string | undefined;
}): Promise<void> {
const {
projectId,
@@ -1849,8 +1885,13 @@ ${incidentSeverity.name}
rootCause,
stateChangeLog,
props,
timelineStartsAt,
} = data;
const declaredTimelineStart: Date | undefined = timelineStartsAt
? OneUptimeDate.fromString(timelineStartsAt as Date)
: undefined;
// get last monitor status timeline.
const lastIncidentStatusTimeline: IncidentStateTimeline | null =
await IncidentStateTimelineService.findOneBy({
@@ -1888,6 +1929,10 @@ ${incidentSeverity.name}
statusTimeline.shouldStatusPageSubscribersBeNotified =
shouldNotifyStatusPageSubscribers;
if (!lastIncidentStatusTimeline && declaredTimelineStart) {
statusTimeline.startsAt = declaredTimelineStart;
}
// Map boolean to enum value
statusTimeline.subscriberNotificationStatus = isSubscribersNotified
? StatusPageSubscriberNotificationStatus.Success
@@ -1914,6 +1959,7 @@ ${incidentSeverity.name}
id: data.incidentId,
select: {
projectId: true,
declaredAt: true,
monitors: {
_id: true,
name: true,
@@ -1970,6 +2016,7 @@ ${incidentSeverity.name}
await MetricService.deleteBy({
query: {
projectId: incident.projectId,
serviceId: data.incidentId,
},
props: {
@@ -1983,6 +2030,7 @@ ${incidentSeverity.name}
const incidentStartsAt: Date =
firstIncidentStateTimeline?.startsAt ||
incident.declaredAt ||
incident.createdAt ||
OneUptimeDate.getCurrentDate();
@@ -2075,6 +2123,7 @@ ${incidentSeverity.name}
timeToAcknowledgeMetric.time =
ackIncidentStateTimeline?.startsAt ||
incident.declaredAt ||
incident.createdAt ||
OneUptimeDate.getCurrentDate();
timeToAcknowledgeMetric.timeUnixNano = OneUptimeDate.toUnixNano(
@@ -2140,6 +2189,7 @@ ${incidentSeverity.name}
timeToResolveMetric.time =
resolvedIncidentStateTimeline?.startsAt ||
incident.declaredAt ||
incident.createdAt ||
OneUptimeDate.getCurrentDate();
timeToResolveMetric.timeUnixNano = OneUptimeDate.toUnixNano(
@@ -2200,6 +2250,7 @@ ${incidentSeverity.name}
incidentDurationMetric.time =
lastIncidentStateTimeline?.startsAt ||
incident.declaredAt ||
incident.createdAt ||
OneUptimeDate.getCurrentDate();
incidentDurationMetric.timeUnixNano = OneUptimeDate.toUnixNano(

View File

@@ -63,14 +63,13 @@ import MonitorFeedService from "./MonitorFeedService";
import { MonitorFeedEventType } from "../../Models/DatabaseModels/MonitorFeed";
import { Gray500, Green500 } from "../../Types/BrandColors";
import LabelService from "./LabelService";
import QueryOperator from "../../Types/BaseDatabase/QueryOperator";
import { FindWhere } from "../../Types/BaseDatabase/Query";
import logger from "../Utils/Logger";
import PushNotificationUtil from "../Utils/PushNotificationUtil";
import ExceptionMessages from "../../Types/Exception/ExceptionMessages";
import Project from "../../Models/DatabaseModels/Project";
import { createWhatsAppMessageFromTemplate } from "../Utils/WhatsAppTemplateUtil";
import { WhatsAppMessagePayload } from "../../Types/WhatsApp/WhatsAppMessage";
import MetricService from "./MetricService";
export class Service extends DatabaseService<Model> {
public constructor() {
@@ -136,12 +135,26 @@ export class Service extends DatabaseService<Model> {
protected override async onBeforeDelete(
deleteBy: DeleteBy<Model>,
): Promise<OnDelete<Model>> {
if (deleteBy.query._id) {
// delete all the status page resource for this monitor.
const monitorsPendingDeletion: Array<Model> = await this.findBy({
query: deleteBy.query,
limit: LIMIT_MAX,
skip: 0,
select: {
_id: true,
projectId: true,
},
props: deleteBy.props,
});
for (const monitor of monitorsPendingDeletion) {
if (!monitor.id) {
continue;
}
// delete all the status page resources for this monitor.
await StatusPageResourceService.deleteBy({
query: {
monitorId: new ObjectID(deleteBy.query._id as string),
monitorId: monitor.id,
},
limit: LIMIT_MAX,
skip: 0,
@@ -150,37 +163,19 @@ export class Service extends DatabaseService<Model> {
},
});
let projectId: FindWhere<ObjectID> | QueryOperator<ObjectID> | undefined =
deleteBy.query.projectId || deleteBy.props.tenantId;
const projectId: ObjectID | undefined = monitor.projectId as
| ObjectID
| undefined;
if (!projectId) {
// fetch this monitor from the database to get the projectId.
const monitor: Model | null = await this.findOneById({
id: new ObjectID(deleteBy.query._id as string) as ObjectID,
select: {
projectId: true,
},
props: {
isRoot: true,
},
});
if (!monitor) {
throw new BadDataException(ExceptionMessages.MonitorNotFound);
}
if (!monitor.id) {
throw new BadDataException(ExceptionMessages.MonitorNotFound);
}
projectId = monitor.projectId!;
continue;
}
try {
await WorkspaceNotificationRuleService.archiveWorkspaceChannels({
projectId: projectId as ObjectID,
projectId: projectId,
notificationFor: {
monitorId: new ObjectID(deleteBy.query._id as string) as ObjectID,
monitorId: monitor.id,
},
sendMessageBeforeArchiving: {
_type: "WorkspacePayloadMarkdown",
@@ -189,12 +184,17 @@ export class Service extends DatabaseService<Model> {
});
} catch (error) {
logger.error(
`Error while archiving workspace channels for monitor ${deleteBy.query._id}: ${error}`,
`Error while archiving workspace channels for monitor ${monitor.id?.toString()}: ${error}`,
);
}
}
return { deleteBy, carryForward: null };
return {
deleteBy,
carryForward: {
monitors: monitorsPendingDeletion,
},
};
}
@CaptureSpan()
@@ -208,6 +208,24 @@ export class Service extends DatabaseService<Model> {
);
}
if (onDelete.carryForward && onDelete.carryForward.monitors) {
for (const monitor of onDelete.carryForward.monitors as Array<Model>) {
if (!monitor.projectId || !monitor.id) {
continue;
}
await MetricService.deleteBy({
query: {
projectId: monitor.projectId,
serviceId: monitor.id,
},
props: {
isRoot: true,
},
});
}
}
return onDelete;
}

View File

@@ -510,12 +510,30 @@ export class Service extends DatabaseService<ScheduledMaintenanceStateTimeline>
monitors: {
_id: true,
},
nextSubscriberNotificationBeforeTheEventAt: true,
},
props: {
isRoot: true,
},
});
const hasProgressedBeyondScheduledState: boolean = Boolean(
scheduledMaintenanceState && !scheduledMaintenanceState.isScheduledState,
);
if (
hasProgressedBeyondScheduledState &&
scheduledMaintenanceEvent?.nextSubscriberNotificationBeforeTheEventAt
) {
await ScheduledMaintenanceService.updateOneById({
id: createdItem.scheduledMaintenanceId!,
data: {
nextSubscriberNotificationBeforeTheEventAt: null,
},
props: onCreate.createBy.props,
});
}
if (isOngoingState) {
if (
scheduledMaintenanceEvent &&

View File

@@ -48,19 +48,26 @@ export class Service extends DatabaseService<StatusPageDomain> {
);
}
if (createBy.data.subdomain) {
// trim and lowercase the subdomain.
createBy.data.subdomain = createBy.data.subdomain.trim().toLowerCase();
let normalizedSubdomain: string =
createBy.data.subdomain?.trim().toLowerCase() || "";
if (normalizedSubdomain === "@") {
normalizedSubdomain = "";
}
createBy.data.subdomain = normalizedSubdomain;
if (domain) {
createBy.data.fullDomain = (
createBy.data.subdomain +
"." +
domain.domain?.toString()
)
.toLowerCase()
.trim();
const baseDomain: string =
domain.domain?.toString().toLowerCase().trim() || "";
if (!baseDomain) {
throw new BadDataException("Please select a valid domain.");
}
createBy.data.fullDomain = normalizedSubdomain
? `${normalizedSubdomain}.${baseDomain}`
: baseDomain;
}
createBy.data.cnameVerificationToken = ObjectID.generate().toString();

View File

@@ -44,10 +44,11 @@ export class TeamMemberService extends DatabaseService<TeamMember> {
}
@CaptureSpan()
private async isSCIMEnabled(projectId: ObjectID): Promise<boolean> {
private async isSCIMPushGroupsEnabled(projectId: ObjectID): Promise<boolean> {
const count: PositiveNumber = await ProjectSCIMService.countBy({
query: {
projectId: projectId,
enablePushGroups: true,
},
props: {
isRoot: true,
@@ -63,12 +64,12 @@ export class TeamMemberService extends DatabaseService<TeamMember> {
// Check if SCIM is enabled for the project
if (
!createBy.props.isRoot &&
(await this.isSCIMEnabled(
(await this.isSCIMPushGroupsEnabled(
createBy.data.projectId! || createBy.props.tenantId,
))
) {
throw new BadDataException(
"Cannot invite team members when SCIM is enabled for this project.",
"Cannot invite team members while SCIM Push Groups is enabled for this project. Disable Push Groups to manage members from OneUptime.",
);
}
@@ -311,10 +312,10 @@ export class TeamMemberService extends DatabaseService<TeamMember> {
!deleteBy.props.isRoot &&
members.length > 0 &&
members[0]?.projectId &&
(await this.isSCIMEnabled(members[0].projectId))
(await this.isSCIMPushGroupsEnabled(members[0].projectId))
) {
throw new BadDataException(
"Cannot delete team members when SCIM is enabled for this project.",
"Cannot delete team members while SCIM Push Groups is enabled for this project. Disable Push Groups to manage members from OneUptime.",
);
}
@@ -346,11 +347,11 @@ export class TeamMemberService extends DatabaseService<TeamMember> {
});
// Skip the one-member guard when SCIM manages membership for the project.
const isSCIMEnabled: boolean = await this.isSCIMEnabled(
const isPushGroupsManaged: boolean = await this.isSCIMPushGroupsEnabled(
member.projectId!,
);
if (!isSCIMEnabled && membersInTeam.toNumber() <= 1) {
if (!isPushGroupsManaged && membersInTeam.toNumber() <= 1) {
throw new BadDataException(
Errors.TeamMemberService.ONE_MEMBER_REQUIRED,
);

View File

@@ -71,6 +71,7 @@ export class Service extends DatabaseService<Model> {
const scimCount: PositiveNumber = await ProjectSCIMService.countBy({
query: {
projectId: projectId,
enablePushGroups: true,
},
skip: new PositiveNumber(0),
limit: new PositiveNumber(1),
@@ -82,7 +83,7 @@ export class Service extends DatabaseService<Model> {
if (scimCount.toNumber() > 0) {
throw new BadDataException(
`Cannot ${data.action} teams when SCIM is enabled for this project.`,
`Cannot ${data.action} teams while SCIM Push Groups is enabled for this project. Disable Push Groups to manage teams from OneUptime.`,
);
}
}

View File

@@ -115,9 +115,18 @@ export default class Email extends ComponentCode {
const smtpTransport: SMTPTransport.Options = {
host: args["smtp-host"]?.toString(),
port: args["smtp-port"] as number,
secure: Boolean(args["secure"]),
};
if (
args["secure"] === true ||
args["secure"] === "true" ||
args["secure"] === 1
) {
smtpTransport.secure = true;
} else {
smtpTransport.secure = false;
}
if (username && password) {
smtpTransport.auth = {
user: username,

View File

@@ -166,7 +166,20 @@ export default class BrowserUtil {
throw new BadDataException("Chrome executable path not found.");
}
return `/root/.cache/ms-playwright/${chromeInstallationName}/chrome-linux/chrome`;
const chromeExecutableCandidates: Array<string> = [
`/root/.cache/ms-playwright/${chromeInstallationName}/chrome-linux/chrome`,
`/root/.cache/ms-playwright/${chromeInstallationName}/chrome-linux64/chrome`,
`/root/.cache/ms-playwright/${chromeInstallationName}/chrome64/chrome`,
`/root/.cache/ms-playwright/${chromeInstallationName}/chrome/chrome`,
];
for (const executablePath of chromeExecutableCandidates) {
if (await LocalFile.doesFileExist(executablePath)) {
return executablePath;
}
}
throw new BadDataException("Chrome executable path not found.");
}
@CaptureSpan()
@@ -197,6 +210,19 @@ export default class BrowserUtil {
throw new BadDataException("Firefox executable path not found.");
}
return `/root/.cache/ms-playwright/${firefoxInstallationName}/firefox/firefox`;
const firefoxExecutableCandidates: Array<string> = [
`/root/.cache/ms-playwright/${firefoxInstallationName}/firefox/firefox`,
`/root/.cache/ms-playwright/${firefoxInstallationName}/firefox-linux64/firefox`,
`/root/.cache/ms-playwright/${firefoxInstallationName}/firefox64/firefox`,
`/root/.cache/ms-playwright/${firefoxInstallationName}/firefox-64/firefox`,
];
for (const executablePath of firefoxExecutableCandidates) {
if (await LocalFile.doesFileExist(executablePath)) {
return executablePath;
}
}
throw new BadDataException("Firefox executable path not found.");
}
}

View File

@@ -0,0 +1,98 @@
import axios, { AxiosError, AxiosResponse } from "axios";
import BadDataException from "../../Types/Exception/BadDataException";
import logger from "./Logger";
import { CaptchaEnabled, CaptchaSecretKey } from "../EnvironmentConfig";
export interface VerifyCaptchaOptions {
token: string | null | undefined;
remoteIp?: string | null;
}
const REQUEST_TIMEOUT_MS: number = 5000;
const GENERIC_ERROR_MESSAGE: string =
"Captcha verification failed. Please try again.";
type HCaptchaResponse = {
success?: boolean;
[key: string]: unknown;
};
class CaptchaUtil {
public static isCaptchaEnabled(): boolean {
return CaptchaEnabled && Boolean(CaptchaSecretKey);
}
public static async verifyCaptcha(
options: VerifyCaptchaOptions,
): Promise<void> {
if (!CaptchaEnabled) {
return;
}
if (!CaptchaSecretKey) {
logger.error(
"Captcha is enabled but CAPTCHA_SECRET_KEY is not configured.",
);
throw new BadDataException(GENERIC_ERROR_MESSAGE);
}
const token: string = (options.token || "").trim();
if (!token) {
throw new BadDataException(
"Captcha token is missing. Please complete the verification challenge.",
);
}
try {
await this.verifyHCaptcha(token, options.remoteIp || undefined);
} catch (err) {
if (axios.isAxiosError(err)) {
const axiosError: AxiosError = err as AxiosError;
logger.error(
`Captcha provider verification failure: ${axiosError.message}`,
);
} else {
logger.error(
`Captcha provider verification failure: ${(err as Error).message}`,
);
}
throw new BadDataException(GENERIC_ERROR_MESSAGE);
}
}
private static async verifyHCaptcha(
token: string,
remoteIp?: string,
): Promise<void> {
const params: URLSearchParams = new URLSearchParams();
params.append("secret", CaptchaSecretKey);
params.append("response", token);
if (remoteIp) {
params.append("remoteip", remoteIp);
}
const response: AxiosResponse<HCaptchaResponse> =
await axios.post<HCaptchaResponse>(
"https://hcaptcha.com/siteverify",
params.toString(),
{
headers: {
"content-type": "application/x-www-form-urlencoded",
},
timeout: REQUEST_TIMEOUT_MS,
},
);
if (!response.data?.success) {
logger.warn(
`hCaptcha verification failed: ${JSON.stringify(response.data || {})}`,
);
throw new BadDataException(GENERIC_ERROR_MESSAGE);
}
}
}
export default CaptchaUtil;

View File

@@ -1,4 +1,5 @@
import {
IsBillingEnabled,
LetsEncryptAccountKey,
LetsEncryptNotificationEmail,
} from "../../../Server/EnvironmentConfig";
@@ -325,9 +326,15 @@ export default class GreenlockUtil {
throw e;
}
throw new ServerException(
`Unable to order certificate for ${data.domain}. Please contact support at support@oneuptime.com for more information.`,
);
if (IsBillingEnabled) {
throw new ServerException(
`Unable to order certificate for ${data.domain}. Please contact support at support@oneuptime.com for more information.`,
);
} else {
throw new ServerException(
`Unable to order certificate for ${data.domain}. Please make sure that your server can be accessed publicly over port 80 (HTTP) and port 443 (HTTPS). If the problem persists, please refer to server logs for more information. Please also set up LOG_LEVEL=DEBUG to get more detailed server logs.`,
);
}
}
}
}

View File

@@ -226,7 +226,11 @@ export default class Telemetry {
};
if (logRecordProcessors.length > 0) {
loggerProviderConfig.processors = logRecordProcessors;
(
loggerProviderConfig as LoggerProviderConfig & {
processors?: Array<LogRecordProcessor>;
}
).processors = logRecordProcessors;
}
this.loggerProvider = new LoggerProvider(loggerProviderConfig);
@@ -254,7 +258,11 @@ export default class Telemetry {
*/
if (logRecordProcessors.length > 0) {
nodeSdkConfiguration.logRecordProcessors = logRecordProcessors;
(
nodeSdkConfiguration as opentelemetry.NodeSDKConfiguration & {
logRecordProcessors?: Array<LogRecordProcessor>;
}
).logRecordProcessors = logRecordProcessors;
}
const sdk: opentelemetry.NodeSDK = new opentelemetry.NodeSDK(

View File

@@ -16,7 +16,7 @@ export enum MicrosoftTeamsIncidentActionType {
SubmitExecuteIncidentOnCallPolicy = "SubmitExecuteIncidentOnCallPolicy",
ViewChangeIncidentState = "ViewChangeIncidentState",
SubmitChangeIncidentState = "SubmitChangeIncidentState",
NewIncident = "/incident", // new incident slash command
NewIncident = "CreateIncident",
SubmitNewIncident = "SubmitNewIncident",
}
@@ -57,7 +57,7 @@ export enum MicrosoftTeamsScheduledMaintenanceActionType {
SubmitScheduledMaintenanceNote = "SubmitScheduledMaintenanceNote",
ViewChangeScheduledMaintenanceState = "ViewChangeScheduledMaintenanceState",
SubmitChangeScheduledMaintenanceState = "SubmitChangeScheduledMaintenanceState",
NewScheduledMaintenance = "/maintenance", // new scheduled maintenance slash command
NewScheduledMaintenance = "CreateMaintenance",
SubmitNewScheduledMaintenance = "SubmitNewScheduledMaintenance",
}

View File

@@ -320,6 +320,7 @@ export default class MicrosoftTeamsIncidentActions {
name: true,
},
createdAt: true,
declaredAt: true,
},
props: {
isRoot: true,
@@ -331,7 +332,9 @@ export default class MicrosoftTeamsIncidentActions {
return;
}
const message: string = `**Incident Details**\n\n**Title:** ${incident.title}\n**Description:** ${incident.description || "No description"}\n**State:** ${incident.currentIncidentState?.name || "Unknown"}\n**Severity:** ${incident.incidentSeverity?.name || "Unknown"}\n**Created At:** ${incident.createdAt ? new Date(incident.createdAt).toLocaleString() : "Unknown"}`;
const declaredAt: Date | undefined =
incident.declaredAt || incident.createdAt || undefined;
const message: string = `**Incident Details**\n\n**Title:** ${incident.title}\n**Description:** ${incident.description || "No description"}\n**State:** ${incident.currentIncidentState?.name || "Unknown"}\n**Severity:** ${incident.incidentSeverity?.name || "Unknown"}\n**Declared At:** ${declaredAt ? new Date(declaredAt).toLocaleString() : "Unknown"}`;
await turnContext.sendActivity(message);
return;

View File

@@ -43,6 +43,7 @@ import OneUptimeDate from "../../../../Types/Date";
import {
MicrosoftTeamsAppClientId,
MicrosoftTeamsAppClientSecret,
MicrosoftTeamsAppTenantId,
} from "../../../EnvironmentConfig";
// Import services for bot commands
@@ -91,18 +92,25 @@ const MICROSOFT_TEAMS_APP_TYPE: string = "SingleTenant";
const MICROSOFT_TEAMS_MAX_PAGES: number = 500;
export default class MicrosoftTeamsUtil extends WorkspaceBase {
private static cachedAdapter: CloudAdapter | null = null;
private static readonly WELCOME_CARD_STATE_KEY: string =
"oneuptime.microsoftTeams.welcomeCardSent";
// Get or create Bot Framework adapter for a specific tenant
private static getBotAdapter(microsoftAppTenantId: string): CloudAdapter {
private static getBotAdapter(): CloudAdapter {
if (this.cachedAdapter) {
return this.cachedAdapter;
}
if (!MicrosoftTeamsAppClientId || !MicrosoftTeamsAppClientSecret) {
throw new BadDataException(
"Microsoft Teams App credentials not configured",
);
}
if (!microsoftAppTenantId) {
throw new BadDataException("Microsoft Teams tenant ID is required");
if (!MicrosoftTeamsAppTenantId) {
throw new BadDataException(
"Microsoft Teams app tenant ID is not configured",
);
}
logger.debug(
@@ -110,18 +118,19 @@ export default class MicrosoftTeamsUtil extends WorkspaceBase {
);
logger.debug(`App ID: ${MicrosoftTeamsAppClientId}`);
logger.debug(`App Type: ${MICROSOFT_TEAMS_APP_TYPE}`);
logger.debug(`Tenant ID: ${microsoftAppTenantId}`);
logger.debug(`Tenant ID: ${MicrosoftTeamsAppTenantId}`);
const authConfig: ConfigurationBotFrameworkAuthenticationOptions = {
MicrosoftAppId: MicrosoftTeamsAppClientId,
MicrosoftAppPassword: MicrosoftTeamsAppClientSecret,
MicrosoftAppType: MICROSOFT_TEAMS_APP_TYPE,
MicrosoftAppTenantId: microsoftAppTenantId,
MicrosoftAppTenantId: MicrosoftTeamsAppTenantId,
};
const botFrameworkAuthentication: ConfigurationBotFrameworkAuthentication =
new ConfigurationBotFrameworkAuthentication(authConfig);
const adapter: CloudAdapter = new CloudAdapter(botFrameworkAuthentication);
this.cachedAdapter = adapter;
logger.debug("Bot Framework adapter created successfully");
return adapter;
@@ -1141,7 +1150,7 @@ export default class MicrosoftTeamsUtil extends WorkspaceBase {
logger.debug(`Using bot ID: ${miscData.botId}`);
// Get Bot Framework adapter
const adapter: CloudAdapter = this.getBotAdapter(tenantId);
const adapter: CloudAdapter = this.getBotAdapter();
// Create conversation reference for the channel
const conversationReference: ConversationReference = {
@@ -1789,14 +1798,19 @@ export default class MicrosoftTeamsUtil extends WorkspaceBase {
let responseText: string = "";
try {
const isCreateIncidentCommand: boolean =
cleanText === "create incident" ||
cleanText.startsWith("create incident ");
const isCreateMaintenanceCommand: boolean =
cleanText === "create maintenance" ||
cleanText.startsWith("create maintenance ");
if (cleanText.includes("help") || cleanText === "") {
responseText = this.getHelpMessage();
} else if (
cleanText === "/incident" ||
cleanText.startsWith("/incident ")
) {
// Handle /incident slash command
logger.debug("Processing /incident command");
} else if (isCreateIncidentCommand) {
// Handle create incident command (legacy slash command supported)
logger.debug("Processing create incident command");
const card: JSONObject =
await MicrosoftTeamsIncidentActions.buildNewIncidentCard(projectId);
await data.turnContext.sendActivity({
@@ -1809,12 +1823,9 @@ export default class MicrosoftTeamsUtil extends WorkspaceBase {
});
logger.debug("New incident card sent successfully");
return;
} else if (
cleanText === "/maintenance" ||
cleanText.startsWith("/maintenance ")
) {
// Handle /maintenance slash command
logger.debug("Processing /maintenance command");
} else if (isCreateMaintenanceCommand) {
// Handle create maintenance command (legacy slash command supported)
logger.debug("Processing create maintenance command");
const card: JSONObject =
await MicrosoftTeamsScheduledMaintenanceActions.buildNewScheduledMaintenanceCard(
projectId,
@@ -1871,8 +1882,8 @@ export default class MicrosoftTeamsUtil extends WorkspaceBase {
**Available Commands:**
- **help** — Show this help message
- **/incident** — Create a new incident
- **/maintenance** — Create a new scheduled maintenance event
- **create incident** — Create a new incident
- **create maintenance** — Create a new scheduled maintenance event
- **show active incidents** — Display all currently active incidents
- **show scheduled maintenance** — Show upcoming scheduled maintenance events
- **show ongoing maintenance** — Display currently ongoing maintenance events
@@ -1920,11 +1931,13 @@ Just type any of these commands to get the information you need!`;
color: true,
},
createdAt: true,
declaredAt: true,
monitors: {
name: true,
},
},
sort: {
declaredAt: SortOrder.Descending,
createdAt: SortOrder.Descending,
},
limit: 10,
@@ -1949,8 +1962,10 @@ If you need to report an incident or check historical incidents, please visit th
for (const incident of activeIncidents) {
const severity: string = incident.incidentSeverity?.name || "Unknown";
const state: string = incident.currentIncidentState?.name || "Unknown";
const createdAt: string = incident.createdAt
? OneUptimeDate.getDateAsFormattedString(incident.createdAt)
const declaredAt: Date | undefined =
incident.declaredAt || incident.createdAt;
const declaredAtText: string = declaredAt
? OneUptimeDate.getDateAsFormattedString(declaredAt)
: "Unknown";
const severityIcon: string = ["Critical", "Major"].includes(severity)
@@ -1968,7 +1983,7 @@ If you need to report an incident or check historical incidents, please visit th
message += `${severityIcon} **[Incident #${incident.incidentNumber}: ${incident.title}](${incidentUrl.toString()})**
• **Severity:** ${severity}
• **Status:** ${state}
• **Created:** ${createdAt}
• **Declared:** ${declaredAtText}
`;
if (incident.monitors && incident.monitors.length > 0) {
@@ -2528,7 +2543,6 @@ All monitoring checks are passing normally.`;
if (action === "add") {
logger.debug("OneUptime bot was installed");
await this.sendWelcomeAdaptiveCard(data.turnContext);
} else if (action === "remove") {
logger.debug("OneUptime bot was uninstalled");
}
@@ -2564,7 +2578,7 @@ All monitoring checks are passing normally.`;
}
// Get Bot Framework adapter
const adapter: CloudAdapter = this.getBotAdapter(tenantId);
const adapter: CloudAdapter = this.getBotAdapter();
// Create custom activity handler class that extends TeamsActivityHandler
class OneUptimeTeamsActivityHandler extends TeamsActivityHandler {
@@ -2707,11 +2721,11 @@ All monitoring checks are passing normally.`;
value: "Show quick help and useful links",
},
{
title: "/incident",
title: "create incident",
value: "Create a new incident without leaving Teams",
},
{
title: "/maintenance",
title: "create maintenance",
value: "Schedule or review maintenance windows",
},
{

View File

@@ -5,6 +5,8 @@ import MailService from "../../../Server/Services/MailService";
import TeamMemberService from "../../../Server/Services/TeamMemberService";
import UserNotificationRuleService from "../../../Server/Services/UserNotificationRuleService";
import UserNotificationSettingService from "../../../Server/Services/UserNotificationSettingService";
import ProjectSCIMService from "../../../Server/Services/ProjectSCIMService";
import ProjectSCIM from "../../../Models/DatabaseModels/ProjectSCIM";
import Errors from "../../../Server/Utils/Errors";
import "../TestingUtils/Init";
import ProjectServiceHelper from "../TestingUtils/Services/ProjectServiceHelper";
@@ -334,6 +336,123 @@ describe("TeamMemberService", () => {
},
);
});
it("should block inviting users when SCIM push groups is enabled", async () => {
const owner: User = await UserServiceHelper.genrateAndSaveRandomUser(
null,
{
isRoot: true,
},
);
const project: Project =
await ProjectServiceHelper.generateAndSaveRandomProject(null, {
isRoot: true,
userId: owner.id!,
});
const team: Team = await TeamServiceHelper.generateAndSaveRandomTeam(
{
projectId: new ObjectID(project.id!),
},
{
isRoot: true,
},
);
const memberUser: User =
await UserServiceHelper.genrateAndSaveRandomUser(null, {
isRoot: true,
});
const scimWithPushGroups: ProjectSCIM = new ProjectSCIM();
scimWithPushGroups.projectId = new ObjectID(project._id!);
scimWithPushGroups.name = "Test SCIM Push Groups";
scimWithPushGroups.bearerToken = ObjectID.generate().toString();
scimWithPushGroups.enablePushGroups = true;
await ProjectSCIMService.create({
data: scimWithPushGroups,
props: {
isRoot: true,
},
});
const tm: TeamMember = TeamMemberServiceHelper.generateRandomTeamMember(
{
projectId: new ObjectID(project._id!),
userId: new ObjectID(memberUser._id!),
teamId: new ObjectID(team._id!),
},
);
await expect(
TeamMemberService.create({
data: tm,
props: { isRoot: false, tenantId: project.id! },
}),
).rejects.toThrow(/SCIM Push Groups/i);
});
it("should allow inviting users when SCIM push groups is disabled", async () => {
const owner: User = await UserServiceHelper.genrateAndSaveRandomUser(
null,
{
isRoot: true,
},
);
const project: Project =
await ProjectServiceHelper.generateAndSaveRandomProject(null, {
isRoot: true,
userId: owner.id!,
});
const team: Team = await TeamServiceHelper.generateAndSaveRandomTeam(
{
projectId: new ObjectID(project.id!),
},
{
isRoot: true,
},
);
const memberUser: User =
await UserServiceHelper.genrateAndSaveRandomUser(null, {
isRoot: true,
});
const scimWithoutPushGroups: ProjectSCIM = new ProjectSCIM();
scimWithoutPushGroups.projectId = new ObjectID(project._id!);
scimWithoutPushGroups.name = "Test SCIM without Push Groups";
scimWithoutPushGroups.bearerToken = ObjectID.generate().toString();
scimWithoutPushGroups.enablePushGroups = false;
await ProjectSCIMService.create({
data: scimWithoutPushGroups,
props: {
isRoot: true,
},
});
const tm: TeamMember = TeamMemberServiceHelper.generateRandomTeamMember(
{
projectId: new ObjectID(project._id!),
userId: new ObjectID(memberUser._id!),
teamId: new ObjectID(team._id!),
},
);
const teamMember: TeamMember = await TeamMemberService.create({
data: tm,
props: { isRoot: false, tenantId: project.id! },
});
expect(teamMember).toBeDefined();
expect(teamMember.projectId?.toString()).toEqual(
project._id?.toString(),
);
});
});
describe("onCreateSuccess", () => {

View File

@@ -12,6 +12,7 @@ enum EmailTemplateType {
SubscribedToStatusPage = "SubscribedToStatusPage.hbs",
SubscriberAnnouncementCreated = "SubscriberAnnouncementCreated.hbs",
SubscriberIncidentCreated = "SubscriberIncidentCreated.hbs",
SubscriberIncidentPostmortemCreated = "SubscriberIncidentPostmortemCreated.hbs",
StatusPageSubscriberReport = "StatusPageSubscriberReport.hbs",
SubscriberIncidentNoteCreated = "SubscriberIncidentNoteCreated.hbs",
SubscriberIncidentStateChanged = "SubscriberIncidentStateChanged.hbs",

View File

@@ -2,6 +2,7 @@ enum IconProp {
Equals = "Equals",
Archive = "Archive",
File = "File",
DocumentCheck = "DocumentCheck",
Automation = "Automation",
Workflow = "Workflow",
TableCells = "TableCells",

View File

@@ -0,0 +1,75 @@
import HCaptcha from "@hcaptcha/react-hcaptcha";
import React from "react";
export interface CaptchaProps {
siteKey: string;
resetSignal?: number | undefined;
error?: string | undefined;
onTokenChange?: (token: string) => void;
onBlur?: (() => void) | undefined;
className?: string | undefined;
}
const Captcha: React.FC<CaptchaProps> = ({
siteKey,
resetSignal = 0,
error,
onTokenChange,
onBlur,
className,
}: CaptchaProps): JSX.Element => {
const captchaRef: React.MutableRefObject<HCaptcha | null> =
React.useRef<HCaptcha | null>(null);
const onTokenChangeRef: React.MutableRefObject<
CaptchaProps["onTokenChange"]
> = React.useRef<CaptchaProps["onTokenChange"]>(onTokenChange);
React.useEffect(() => {
onTokenChangeRef.current = onTokenChange;
}, [onTokenChange]);
const handleTokenChange: (token: string | null) => void = React.useCallback(
(token: string | null) => {
onTokenChangeRef.current?.(token || "");
},
[],
);
React.useEffect(() => {
captchaRef.current?.resetCaptcha();
handleTokenChange("");
}, [resetSignal, handleTokenChange]);
if (!siteKey) {
return (
<div className={className || "text-center text-sm text-red-500"}>
Captcha is not configured.
</div>
);
}
return (
<div className={className || "flex flex-col items-center gap-2"}>
<HCaptcha
sitekey={siteKey}
ref={captchaRef}
onVerify={(token: string) => {
handleTokenChange(token);
onBlur?.();
}}
onExpire={() => {
handleTokenChange(null);
captchaRef.current?.resetCaptcha();
onBlur?.();
}}
onError={() => {
handleTokenChange(null);
onBlur?.();
}}
/>
{error && <span className="text-sm text-red-500">{error}</span>}
</div>
);
};
export default Captcha;

View File

@@ -30,6 +30,8 @@ export interface TimelineItem {
icon: IconProp;
iconColor: Color;
attachments?: Array<TimelineAttachment>;
title?: string;
highlight?: boolean;
}
export interface EventItemLabel {
@@ -258,25 +260,41 @@ const EventItem: FunctionComponent<ComponentProps> = (
aria-hidden="true"
></span>
)}
<div className="relative flex items-start space-x-3">
<div>
<div className="relative px-1">
<div className="flex h-8 w-8 items-center justify-center rounded-full bg-gray-100 ring-8 ring-white">
<Icon
icon={item.icon}
className="h-5 w-5 text-gray-500"
style={{
color: item.iconColor.toString(),
}}
/>
<div
className={`relative flex items-start space-x-3 ${
item.highlight
? "rounded-2xl border border-gray-200 bg-gray-50 px-4 py-4 shadow-sm"
: ""
}`}
>
{!item.highlight && (
<div>
<div className="relative px-1">
<div className="flex h-8 w-8 items-center justify-center rounded-full bg-gray-100 ring-8 ring-white">
<Icon
icon={item.icon}
className="h-5 w-5 text-gray-500"
style={{
color: item.iconColor.toString(),
}}
/>
</div>
</div>
</div>
</div>
)}
<div className="min-w-0 flex-1">
<div>
<div className="text-sm">
<span className="font-medium text-gray-900">
Update to this {props.eventType}
<span
className={`font-medium ${
item.highlight
? "text-base text-gray-900"
: "text-sm text-gray-900"
}`}
>
{item.title
? item.title
: `Update to this ${props.eventType}`}
</span>
</div>
<p className="mt-0.5 text-sm text-gray-500">

View File

@@ -1130,6 +1130,14 @@ const Icon: FunctionComponent<ComponentProps> = ({
d="M2.25 15.75l5.159-5.159a2.25 2.25 0 013.182 0l5.159 5.159m-1.5-1.5l1.409-1.409a2.25 2.25 0 013.182 0l2.909 2.909m-18 3.75h16.5a1.5 1.5 0 001.5-1.5V6a1.5 1.5 0 00-1.5-1.5H3.75A1.5 1.5 0 002.25 6v12a1.5 1.5 0 001.5 1.5zm10.5-11.25h.008v.008h-.008V8.25zm.375 0a.375.375 0 11-.75 0 .375.375 0 01.75 0z"
/>,
);
} else if (icon === IconProp.DocumentCheck) {
return getSvgWrapper(
<path
strokeLinecap="round"
strokeLinejoin="round"
d="M10.125 2.25H5.625c-.621 0-1.125.504-1.125 1.125v17.25c0 .621.504 1.125 1.125 1.125h12.75c.621 0 1.125-.504 1.125-1.125V11.625M10.125 2.25H10.5c4.971 0 9 4.029 9 9v.375M10.125 2.25c1.864 0 3.375 1.511 3.375 3.375V7.125c0 .621.504 1.125 1.125 1.125h1.5c1.864 0 3.375 1.511 3.375 3.375M9 15l2.25 2.25L15 12"
/>,
);
} else if (icon === IconProp.TextFile || icon === IconProp.File) {
return getSvgWrapper(
<path

View File

@@ -51,6 +51,9 @@ export const IS_ENTERPRISE_EDITION: boolean =
env("IS_ENTERPRISE_EDITION") === "true";
export const BILLING_PUBLIC_KEY: string = env("BILLING_PUBLIC_KEY") || "";
export const CAPTCHA_ENABLED: boolean = env("CAPTCHA_ENABLED") === "true";
export const CAPTCHA_SITE_KEY: string = env("CAPTCHA_SITE_KEY") || "";
// VAPID Configuration for Push Notifications
export const VAPID_PUBLIC_KEY: string = env("VAPID_PUBLIC_KEY") || "";

155
Common/package-lock.json generated
View File

@@ -13,6 +13,7 @@
"@bull-board/express": "^5.21.4",
"@clickhouse/client": "^1.10.1",
"@elastic/elasticsearch": "^8.12.1",
"@hcaptcha/react-hcaptcha": "^1.14.0",
"@monaco-editor/react": "^4.4.6",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/api-logs": "^0.206.0",
@@ -1550,6 +1551,26 @@
"node": ">=6"
}
},
"node_modules/@hcaptcha/loader": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@hcaptcha/loader/-/loader-2.2.0.tgz",
"integrity": "sha512-LAd0XRt1Mu0hLpUpWAx5OS9FS0d6V+V0aCSYUKFAz0a6tTDHeAuwm5iO81mp2HIEhXFOSSB8AwCCOU9zq695lg==",
"license": "MIT"
},
"node_modules/@hcaptcha/react-hcaptcha": {
"version": "1.14.0",
"resolved": "https://registry.npmjs.org/@hcaptcha/react-hcaptcha/-/react-hcaptcha-1.14.0.tgz",
"integrity": "sha512-XHFhmRjw4L6spgRfTEUj/uW4cN4iWTp7BxLHyheF5zEle6g65fIHUCmqKMrIA/6OKLzBSElUKyc1IuDU+V8RaQ==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.17.9",
"@hcaptcha/loader": "^2.2.0"
},
"peerDependencies": {
"react": ">= 16.3.0",
"react-dom": ">= 16.3.0"
}
},
"node_modules/@hexagon/base64": {
"version": "1.1.28",
"resolved": "https://registry.npmjs.org/@hexagon/base64/-/base64-1.1.28.tgz",
@@ -8391,39 +8412,39 @@
}
},
"node_modules/express": {
"version": "4.21.2",
"resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz",
"integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==",
"version": "4.22.1",
"resolved": "https://registry.npmjs.org/express/-/express-4.22.1.tgz",
"integrity": "sha512-F2X8g9P1X7uCPZMA3MVf9wcTqlyNp7IhH5qPCI0izhaOIYXaW9L535tGA3qmjRzpH+bZczqq7hVKxTR4NWnu+g==",
"license": "MIT",
"dependencies": {
"accepts": "~1.3.8",
"array-flatten": "1.1.1",
"body-parser": "1.20.3",
"content-disposition": "0.5.4",
"body-parser": "~1.20.3",
"content-disposition": "~0.5.4",
"content-type": "~1.0.4",
"cookie": "0.7.1",
"cookie-signature": "1.0.6",
"cookie": "~0.7.1",
"cookie-signature": "~1.0.6",
"debug": "2.6.9",
"depd": "2.0.0",
"encodeurl": "~2.0.0",
"escape-html": "~1.0.3",
"etag": "~1.8.1",
"finalhandler": "1.3.1",
"fresh": "0.5.2",
"http-errors": "2.0.0",
"finalhandler": "~1.3.1",
"fresh": "~0.5.2",
"http-errors": "~2.0.0",
"merge-descriptors": "1.0.3",
"methods": "~1.1.2",
"on-finished": "2.4.1",
"on-finished": "~2.4.1",
"parseurl": "~1.3.3",
"path-to-regexp": "0.1.12",
"path-to-regexp": "~0.1.12",
"proxy-addr": "~2.0.7",
"qs": "6.13.0",
"qs": "~6.14.0",
"range-parser": "~1.2.1",
"safe-buffer": "5.2.1",
"send": "0.19.0",
"serve-static": "1.16.2",
"send": "~0.19.0",
"serve-static": "~1.16.2",
"setprototypeof": "1.2.0",
"statuses": "2.0.1",
"statuses": "~2.0.1",
"type-is": "~1.6.18",
"utils-merge": "1.0.1",
"vary": "~1.1.2"
@@ -8436,15 +8457,6 @@
"url": "https://opencollective.com/express"
}
},
"node_modules/express/node_modules/cookie": {
"version": "0.7.1",
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz",
"integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==",
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/express/node_modules/debug": {
"version": "2.6.9",
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
@@ -8460,6 +8472,21 @@
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==",
"license": "MIT"
},
"node_modules/express/node_modules/qs": {
"version": "6.14.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz",
"integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==",
"license": "BSD-3-Clause",
"dependencies": {
"side-channel": "^1.1.0"
},
"engines": {
"node": ">=0.6"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/extend": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
@@ -12927,9 +12954,9 @@
}
},
"node_modules/node-forge": {
"version": "1.3.1",
"resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz",
"integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==",
"version": "1.3.2",
"resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.2.tgz",
"integrity": "sha512-6xKiQ+cph9KImrRh0VsjH2d8/GXA4FIMlgU4B757iI1ApvcyA9VlouP0yZJha01V+huImO+kKMU7ih+2+E14fw==",
"license": "(BSD-3-Clause OR GPL-2.0)",
"engines": {
"node": ">= 6.13.0"
@@ -12965,9 +12992,9 @@
"license": "MIT"
},
"node_modules/nodemailer": {
"version": "7.0.7",
"resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-7.0.7.tgz",
"integrity": "sha512-jGOaRznodf62TVzdyhKt/f1Q/c3kYynk8629sgJHpRzGZj01ezbgMMWJSAjHADcwTKxco3B68/R+KHJY2T5BaA==",
"version": "7.0.11",
"resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-7.0.11.tgz",
"integrity": "sha512-gnXhNRE0FNhD7wPSCGhdNh46Hs6nm+uTyg+Kq0cZukNQiYdnCsoQjodNP9BQVG9XrcK/v6/MgpAPBUFyzh9pvw==",
"license": "MIT-0",
"engines": {
"node": ">=6.0.0"
@@ -15246,15 +15273,69 @@
}
},
"node_modules/side-channel": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz",
"integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==",
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz",
"integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==",
"license": "MIT",
"dependencies": {
"call-bind": "^1.0.7",
"es-errors": "^1.3.0",
"get-intrinsic": "^1.2.4",
"object-inspect": "^1.13.1"
"object-inspect": "^1.13.3",
"side-channel-list": "^1.0.0",
"side-channel-map": "^1.0.1",
"side-channel-weakmap": "^1.0.2"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/side-channel-list": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz",
"integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==",
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
"object-inspect": "^1.13.3"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/side-channel-map": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz",
"integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==",
"license": "MIT",
"dependencies": {
"call-bound": "^1.0.2",
"es-errors": "^1.3.0",
"get-intrinsic": "^1.2.5",
"object-inspect": "^1.13.3"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/side-channel-weakmap": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz",
"integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==",
"license": "MIT",
"dependencies": {
"call-bound": "^1.0.2",
"es-errors": "^1.3.0",
"get-intrinsic": "^1.2.5",
"object-inspect": "^1.13.3",
"side-channel-map": "^1.0.1"
},
"engines": {
"node": ">= 0.4"

View File

@@ -48,6 +48,7 @@
"@bull-board/express": "^5.21.4",
"@clickhouse/client": "^1.10.1",
"@elastic/elasticsearch": "^8.12.1",
"@hcaptcha/react-hcaptcha": "^1.14.0",
"@monaco-editor/react": "^4.4.6",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/api-logs": "^0.206.0",

View File

@@ -1,56 +0,0 @@
.git
node_modules
# See https://help.github.com/ignore-files/ for more about ignoring files.
# dependencies
/node_modules
node_modules
.idea
# testing
/coverage
# production
/build
# misc
.DS_Store
env.js
npm-debug.log*
yarn-debug.log*
yarn-error.log*
yarn.lock
Untitled-1
*.local.sh
*.local.yaml
run
stop
nohup.out*
encrypted-credentials.tar
encrypted-credentials/
_README.md
# Important Add production values to gitignore.
values-saas-production.yaml
kubernetes/values-saas-production.yaml
/private
/tls_cert.pem
/tls_key.pem
/keys
temp_readme.md
tests/coverage
settings.json
GoSDK/tester/

View File

@@ -1,6 +0,0 @@
ONEUPTIME_URL=https://oneuptime.com
ONEUPTIME_REPOSITORY_SECRET_KEY=your-repository-secret-key
CODE_REPOSITORY_PASSWORD=
CODE_REPOSITORY_USERNAME=
# Optional. If this is left blank then this url will be ONEUPTIME_URL/llama
ONEUPTIME_LLM_SERVER_URL=

View File

@@ -1 +0,0 @@
*.js text eol=lf

18
Copilot/.gitignore vendored
View File

@@ -1,16 +1,4 @@
# See https://help.github.com/ignore-files/ for more about ignoring files.
# dependencies
#/backend/node_modules
/kubernetes
/node_modules
.idea
# misc
node_modules
build
*.log
.DS_Store
npm-debug.log*
yarn-debug.log*
yarn-error.log*
yarn.lock

View File

@@ -1,76 +0,0 @@
import URL from "Common/Types/API/URL";
import LlmType from "./Types/LlmType";
import BadDataException from "Common/Types/Exception/BadDataException";
type GetStringFunction = () => string;
type GetStringOrNullFunction = () => string | null;
type GetURLFunction = () => URL;
export const MIN_ITEMS_IN_QUEUE_PER_SERVICE_CATALOG: number = 10;
export const GetIsCopilotDisabled: () => boolean = () => {
return process.env["DISABLE_COPILOT"] === "true";
};
export const GetOneUptimeURL: GetURLFunction = () => {
return URL.fromString(
process.env["ONEUPTIME_URL"] || "https://oneuptime.com",
);
};
export const GetRepositorySecretKey: GetStringOrNullFunction = ():
| string
| null => {
return process.env["ONEUPTIME_REPOSITORY_SECRET_KEY"] || null;
};
export const GetLocalRepositoryPath: GetStringFunction = (): string => {
return "/repository";
};
export const GetCodeRepositoryPassword: GetStringOrNullFunction = ():
| string
| null => {
const token: string | null = process.env["CODE_REPOSITORY_PASSWORD"] || null;
return token;
};
export const GetCodeRepositoryUsername: GetStringOrNullFunction = ():
| string
| null => {
const username: string | null =
process.env["CODE_REPOSITORY_USERNAME"] || null;
return username;
};
export const GetLlmServerUrl: GetURLFunction = () => {
if (!process.env["ONEUPTIME_LLM_SERVER_URL"]) {
throw new BadDataException("ONEUPTIME_LLM_SERVER_URL is not set");
}
return URL.fromString(process.env["ONEUPTIME_LLM_SERVER_URL"]);
};
export const GetOpenAIAPIKey: GetStringOrNullFunction = (): string | null => {
return process.env["OPENAI_API_KEY"] || null;
};
export const GetOpenAIModel: GetStringOrNullFunction = (): string | null => {
return process.env["OPENAI_MODEL"] || "gpt-4o";
};
type GetLlmTypeFunction = () => LlmType;
export const GetLlmType: GetLlmTypeFunction = (): LlmType => {
if (GetOpenAIAPIKey() && GetOpenAIModel()) {
return LlmType.OpenAI;
}
if (GetLlmServerUrl()) {
return LlmType.ONEUPTIME_LLM;
}
return LlmType.ONEUPTIME_LLM;
};
export const FixNumberOfCodeEventsInEachRun: number = 5;

View File

@@ -25,6 +25,8 @@ ENV PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD=1
RUN if [ -z "$APP_VERSION" ]; then export APP_VERSION=1.0.0; fi
RUN apt-get update
# Install bash.
RUN apt-get install bash -y && apt-get install curl -y
@@ -46,13 +48,6 @@ COPY ./Common /usr/src/Common
ENV PRODUCTION=true
WORKDIR /usr/src/app
@@ -61,12 +56,11 @@ WORKDIR /usr/src/app
COPY ./Copilot/package*.json /usr/src/app/
RUN npm install
# Create /repository/ directory where the app will store the repository
RUN mkdir -p /repository
# Set the stack trace limit to 0 to show full stack traces
ENV NODE_OPTIONS='--stack-trace-limit=30'
# Set the stack trace limit to 30 to show longer stack traces
ENV NODE_OPTIONS="--stack-trace-limit=30"
{{ if eq .Env.ENVIRONMENT "development" }}
#Run the app
@@ -75,9 +69,9 @@ CMD [ "npm", "run", "dev" ]
# Copy app source
COPY ./Copilot /usr/src/app
# Bundle app source
RUN npm run compile
RUN npm run build
# Set permission to write logs and cache in case container run as non root
RUN chown -R 1000:1000 "/tmp/npm" && chmod -R 2777 "/tmp/npm"
#Run the app
CMD [ "npm", "start" ]
{{ end }}
{{ end }}

View File

@@ -1,8 +0,0 @@
import Exception from "Common/Types/Exception/Exception";
import ExceptionCode from "Common/Types/Exception/ExceptionCode";
export default class CopilotActionException extends Exception {
public constructor(code: ExceptionCode, message: string) {
super(code, message);
}
}

View File

@@ -1,8 +0,0 @@
import Exception from "Common/Types/Exception/Exception";
import ExceptionCode from "Common/Types/Exception/ExceptionCode";
export default class CopilotActionProcessingException extends Exception {
public constructor(code: ExceptionCode, message: string) {
super(code, message);
}
}

View File

@@ -1,8 +0,0 @@
import ExceptionCode from "Common/Types/Exception/ExceptionCode";
import CopilotActionProcessingException from "./CopilotActionProcessingException";
export default class ErrorGettingResponseFromLLM extends CopilotActionProcessingException {
public constructor(message: string) {
super(ExceptionCode.BadDataException, message);
}
}

View File

@@ -1,8 +0,0 @@
import ExceptionCode from "Common/Types/Exception/ExceptionCode";
import CopilotActionProcessingException from "./CopilotActionProcessingException";
export default class LLMTimeoutException extends CopilotActionProcessingException {
public constructor(message: string) {
super(ExceptionCode.BadDataException, message);
}
}

View File

@@ -1,8 +0,0 @@
import ExceptionCode from "Common/Types/Exception/ExceptionCode";
import CopilotActionProcessingException from "./CopilotActionProcessingException";
export default class NotAcceptedFileExtentionForCopilotAction extends CopilotActionProcessingException {
public constructor(message: string) {
super(ExceptionCode.BadDataException, message);
}
}

View File

@@ -1,46 +0,0 @@
import CodeRepositoryUtil from "./Utils/CodeRepository";
import HTTPErrorResponse from "Common/Types/API/HTTPErrorResponse";
import logger from "Common/Server/Utils/Logger";
import dotenv from "dotenv";
import Init from "./Init";
import Telemetry from "Common/Server/Utils/Telemetry";
const APP_NAME: string = "copilot";
dotenv.config();
logger.info("OneUptime Copilot is starting...");
// Initialize telemetry
Telemetry.init({
serviceName: APP_NAME,
});
Init()
.then(() => {
process.exit(0);
})
.catch(async (error: Error | HTTPErrorResponse) => {
try {
logger.error(error);
await CodeRepositoryUtil.discardChanges();
// change back to main branch.
await CodeRepositoryUtil.checkoutMainBranch();
} catch (e) {
logger.error(e);
// do nothing.
}
logger.error("Error in starting OneUptime Copilot: ");
if (error instanceof HTTPErrorResponse) {
logger.error(error.message);
} else if (error instanceof Error) {
logger.error(error.message);
} else {
logger.error(error);
}
process.exit(1);
});

View File

@@ -1,231 +0,0 @@
import CodeRepositoryUtil, {
CodeRepositoryResult,
RepoScriptType,
} from "./Utils/CodeRepository";
import InitUtil from "./Utils/Init";
import ServiceRepositoryUtil from "./Utils/ServiceRepository";
import { PromiseVoidFunction } from "Common/Types/FunctionTypes";
import logger from "Common/Server/Utils/Logger";
import CopilotActionUtil from "./Utils/CopilotAction";
import CopilotAction from "Common/Models/DatabaseModels/CopilotAction";
import {
FixNumberOfCodeEventsInEachRun,
GetIsCopilotDisabled,
GetLlmType,
} from "./Config";
import CopilotActionService, {
CopilotExecutionResult,
} from "./Service/CopilotActions/Index";
import CopilotActionStatus from "Common/Types/Copilot/CopilotActionStatus";
import PullRequest from "Common/Types/CodeRepository/PullRequest";
import ServiceCopilotCodeRepository from "Common/Models/DatabaseModels/ServiceCopilotCodeRepository";
import CopilotActionProcessingException from "./Exceptions/CopilotActionProcessingException";
import CopilotPullRequest from "Common/Models/DatabaseModels/CopilotPullRequest";
import ProcessUtil from "./Utils/Process";
let currentFixCount: number = 1;
const init: PromiseVoidFunction = async (): Promise<void> => {
// check if copilot is disabled.
if (GetIsCopilotDisabled()) {
logger.info("Copilot is disabled. Exiting.");
ProcessUtil.haltProcessWithSuccess();
}
logger.info(`Using ${GetLlmType()} as the AI model.`);
await CodeRepositoryUtil.setAuthorIdentity({
email: "copilot@oneuptime.com",
name: "OneUptime Copilot",
});
const codeRepositoryResult: CodeRepositoryResult = await InitUtil.init();
// before cloning the repo, check if there are any services to improve.
ServiceRepositoryUtil.setCodeRepositoryResult({
codeRepositoryResult,
});
const servicesToImprove: ServiceCopilotCodeRepository[] =
await ServiceRepositoryUtil.getServicesToImprove();
logger.debug(`Found ${servicesToImprove.length} services to improve.`);
// if no services to improve, then exit.
if (servicesToImprove.length === 0) {
logger.info("No services to improve. Exiting.");
ProcessUtil.haltProcessWithSuccess();
}
for (const serviceToImprove of servicesToImprove) {
logger.debug(`- ${serviceToImprove.serviceCatalog!.name}`);
}
await cloneRepository({
codeRepositoryResult,
});
await setUpRepository();
for (const serviceRepository of servicesToImprove) {
checkIfCurrentFixCountIsLessThanFixNumberOfCodeEventsInEachRun();
const actionsToWorkOn: Array<CopilotAction> =
await CopilotActionUtil.getActionsToWorkOn({
serviceCatalogId: serviceRepository.serviceCatalog!.id!,
serviceRepositoryId: serviceRepository.id!,
});
for (const actionToWorkOn of actionsToWorkOn) {
checkIfCurrentFixCountIsLessThanFixNumberOfCodeEventsInEachRun();
// check copilot events for this file.
let executionResult: CopilotExecutionResult | null = null;
let currentRetryCount: number = 0;
const maxRetryCount: number = 3;
while (currentRetryCount < maxRetryCount) {
try {
executionResult = await executeAction({
serviceRepository,
copilotAction: actionToWorkOn,
});
break;
} catch (e) {
logger.error(e);
currentRetryCount++;
await CodeRepositoryUtil.discardAllChangesOnCurrentBranch();
}
}
if (
executionResult &&
executionResult.status === CopilotActionStatus.PR_CREATED
) {
currentFixCount++;
}
}
}
};
interface ExecuteActionData {
serviceRepository: ServiceCopilotCodeRepository;
copilotAction: CopilotAction;
}
type ExecutionActionFunction = (
data: ExecuteActionData,
) => Promise<CopilotExecutionResult | null>;
const executeAction: ExecutionActionFunction = async (
data: ExecuteActionData,
): Promise<CopilotExecutionResult | null> => {
const { serviceRepository, copilotAction } = data;
try {
return await CopilotActionService.executeAction({
serviceRepository: serviceRepository,
copilotAction: copilotAction,
});
} catch (e) {
if (e instanceof CopilotActionProcessingException) {
// This is not a serious exception, so we just move on to the next action.
logger.info(e.message);
return null;
}
throw e;
}
};
type CloneRepositoryFunction = (data: {
codeRepositoryResult: CodeRepositoryResult;
}) => Promise<void>;
const cloneRepository: CloneRepositoryFunction = async (data: {
codeRepositoryResult: CodeRepositoryResult;
}): Promise<void> => {
const { codeRepositoryResult } = data;
logger.info(
`Cloning the repository ${codeRepositoryResult.codeRepository.name} to a temporary directory.`,
);
// now clone this repository to a temporary directory - /repository
await CodeRepositoryUtil.cloneRepository({
codeRepository: codeRepositoryResult.codeRepository,
});
// Check if OneUptime Copilot has setup properly.
const onAfterCloneScript: string | null =
await CodeRepositoryUtil.getRepoScript({
scriptType: RepoScriptType.OnAfterClone,
});
if (!onAfterCloneScript) {
logger.debug("No on-after-clone script found for this repository.");
}
if (onAfterCloneScript) {
logger.info("Executing on-after-clone script.");
await CodeRepositoryUtil.executeScript({
script: onAfterCloneScript,
});
logger.info("on-after-clone script executed successfully.");
}
logger.info(
`Repository ${codeRepositoryResult.codeRepository.name} cloned successfully.`,
);
};
const checkIfCurrentFixCountIsLessThanFixNumberOfCodeEventsInEachRun: VoidFunction =
(): void => {
if (currentFixCount <= FixNumberOfCodeEventsInEachRun) {
return;
}
logger.info(
`Copilot has fixed ${FixNumberOfCodeEventsInEachRun} code events. Thank you for using Copilot. If you wish to fix more code events, please run Copilot again.`,
);
ProcessUtil.haltProcessWithSuccess();
};
const setUpRepository: PromiseVoidFunction = async (): Promise<void> => {
const isSetupProperly: boolean =
await CodeRepositoryUtil.isRepoSetupProperly();
if (isSetupProperly) {
return;
}
// if the repo is not set up properly, then check if there's an outstanding setup Pr for this repo.
logger.info("Setting up the repository.");
// check if there's an outstanding setup PR for this repo.
const setupPullRequest: CopilotPullRequest | null =
await CodeRepositoryUtil.getOpenSetupPullRequest();
if (setupPullRequest) {
logger.info(
`There's an open setup PR for this repository: ${setupPullRequest.pullRequestId}. Please merge this PR to continue using Copilot. Exiting...`,
);
ProcessUtil.haltProcessWithSuccess();
return;
}
// if there's no setup PR, then create a new setup PR.
const pullRequest: PullRequest = await CodeRepositoryUtil.setUpRepo();
logger.info(
"Repository setup PR created - #" +
pullRequest.pullRequestNumber +
". Please megre this PR to continue using Copilot. Exiting..",
);
ProcessUtil.haltProcessWithSuccess();
};
export default init;

View File

@@ -1,6 +1,83 @@
# OneUptime Copilot
# OneUptime Copilot Agent
Copilot is a tool that helps you improve your codebase automatically.
A standalone CLI coding agent that mirrors the autonomous workflows we use inside VS Code Copilot Chat. It connects to an LM Studiohosted OpenAI-compatible model, inspects a workspace, reasons about the task, and uses a toolbox (file/patch editing, search, terminal commands) to complete coding requests.
Please refer to the [official documentation](/Docs/Content/copilot) for more information.
## Prerequisites
- Node.js 18+
- An LM Studio instance exposing a chat completions endpoint (for example `http://localhost:1234/v1/chat/completions`).
- The workspace you want the agent to modify must already exist locally.
## Installation
```bash
cd Copilot/oneuptime-copilot-agent
npm install
npm run build
npm link # optional, provides the global oneuptime-copilot-agent command
```
## Usage
```bash
oneuptime-copilot-agent \
--prompt "Refactor auth middleware and add unit tests" \
--model http://localhost:1234/v1/chat/completions \
--model-name openai/gpt-oss-20b \
--workspace-path ./
```
### CLI options
| Flag | Description |
| ---- | ----------- |
| `--prompt` | Required. Natural language description of the task. |
| `--model` | Required. Full LM Studio chat completions endpoint URL. |
| `--workspace-path` | Required. Absolute or relative path to the repo the agent should use. |
| `--model-name` | Optional model identifier that LM Studio expects (default `lmstudio`). |
| `--temperature` | Sampling temperature (default `0.1`). |
| `--max-iterations` | Maximum agent/tool-call loops before stopping (default `12`). |
| `--timeout` | LLM HTTP timeout per request in milliseconds (default `120000`). |
| `--api-key` | Optional bearer token if the endpoint is secured. |
| `--log-level` | `debug`, `info`, `warn`, or `error` (default `info`). |
| `--log-file` | Optional file path. When provided, all logs are appended to this file in addition to stdout. |
### Debug logging
Pass `--log-file` when running the agent to persist verbose debugging output (including `debug` level messages) for later inspection:
```bash
oneuptime-copilot-agent \
--prompt "Track flaky jest tests" \
--model http://localhost:1234/v1/chat/completions \
--workspace-path ./ \
--log-file ./logs/copilot-agent-debug.log
```
The agent will create any missing parent directories and continuously append to the specified file while still streaming logs to stdout.
## Architecture snapshot
- `src/agent` Orchestrates the conversation loop, builds the system prompt (inspired by the VS Code Copilot agent), snapshots the workspace, and streams messages to the LM Studio endpoint.
- `src/tools` Implements the toolbelt (`list_directory`, `read_file`, `search_workspace`, `apply_patch`, `write_file`, `run_command`). These wrap `Common` utilities (`Execute`, `LocalFile`, `Logger`) to stay consistent with other OneUptime services.
- `src/llm` Thin LM Studio/OpenAI-compatible client using `undici` with timeout + error handling.
- `src/@types/Common` Lightweight shim typings so TypeScript consumers get the pieces of `Common` they need without re-compiling that entire package.
## Development scripts
```bash
npm run build # Compile TypeScript -> build/dist
npm run dev # Run with ts-node for quick experiments
```
For example:
```
npm run dev -- --prompt "Write tests for this project. These tests should be in Jest and TypeScript." \
--model http://localhost:1234/v1/chat/completions \
--model-name deepseek/deepseek-r1-0528-qwen3-8b \
--workspace-path ./ \
--log-file ./copilot-agent-debug.log
```
The agent intentionally mirrors Copilots workflow: it iteratively plans, reads files, edits them through patches or full rewrites, and executes commands/tests via the terminal tool. Logs stream to stdout so you can follow each tool invocation in real time.

View File

@@ -1,427 +0,0 @@
import CopilotActionType from "Common/Types/Copilot/CopilotActionType";
import CopilotActionBase from "./CopilotActionsBase";
import CodeRepositoryUtil from "../../Utils/CodeRepository";
import TechStack from "Common/Types/ServiceCatalog/TechStack";
import { CopilotPromptResult } from "../LLM/LLMBase";
import Text from "Common/Types/Text";
import { CopilotActionPrompt, CopilotProcess } from "./Types";
import { PromptRole } from "../LLM/Prompt";
import logger from "Common/Server/Utils/Logger";
import FileActionProp from "Common/Types/Copilot/CopilotActionProps/FileActionProp";
import CodeRepositoryFile from "Common/Server/Utils/CodeRepository/CodeRepositoryFile";
import CopilotActionUtil from "../../Utils/CopilotAction";
import ObjectID from "Common/Types/ObjectID";
import CopilotAction from "Common/Models/DatabaseModels/CopilotAction";
import ServiceRepositoryUtil from "../../Utils/ServiceRepository";
import Dictionary from "Common/Types/Dictionary";
import ArrayUtil from "Common/Utils/Array";
import CopilotActionProp from "Common/Types/Copilot/CopilotActionProps/Index";
import BadDataException from "Common/Types/Exception/BadDataException";
import LocalFile from "Common/Server/Utils/LocalFile";
export default class AddSpans extends CopilotActionBase {
public isRequirementsMet: boolean = false;
public constructor() {
super();
this.copilotActionType = CopilotActionType.ADD_SPANS;
this.acceptFileExtentions = CodeRepositoryUtil.getCodeFileExtentions();
}
protected override async isActionRequired(data: {
serviceCatalogId: ObjectID;
serviceRepositoryId: ObjectID;
copilotActionProp: FileActionProp;
}): Promise<boolean> {
// check if the action has already been processed for this file.
const existingAction: CopilotAction | null =
await CopilotActionUtil.getExistingAction({
serviceCatalogId: data.serviceCatalogId,
actionType: this.copilotActionType,
actionProps: {
filePath: data.copilotActionProp.filePath, // has this action run on this file before?
},
});
if (!existingAction) {
return true;
}
return false;
}
public override async getActionPropsToQueue(data: {
serviceCatalogId: ObjectID;
serviceRepositoryId: ObjectID;
maxActionsToQueue: number;
}): Promise<Array<CopilotActionProp>> {
// get files in the repo.
logger.debug(
`${this.copilotActionType} - Getting files to queue for improve comments.`,
);
let totalActionsToQueue: number = 0;
logger.debug(`${this.copilotActionType} - Reading files in the service.`);
const files: Dictionary<CodeRepositoryFile> =
await ServiceRepositoryUtil.getFilesByServiceCatalogId({
serviceCatalogId: data.serviceCatalogId,
});
logger.debug(
`${this.copilotActionType} - Files read. ${Object.keys(files).length} files found.`,
);
// get keys in random order.
let fileKeys: string[] = Object.keys(files);
//randomize the order of the files.
fileKeys = ArrayUtil.shuffle(fileKeys);
const actionsPropsQueued: Array<CopilotActionProp> = [];
for (const fileKey of fileKeys) {
// check if the file is in accepted file extentions.
const fileExtention: string = LocalFile.getFileExtension(
files[fileKey]!.filePath,
);
if (!this.acceptFileExtentions.includes(fileExtention)) {
continue;
}
const file: CodeRepositoryFile = files[fileKey]!;
logger.debug(
`${this.copilotActionType} - Checking file: ${file.filePath}`,
);
if (
await this.isActionRequired({
serviceCatalogId: data.serviceCatalogId,
serviceRepositoryId: data.serviceRepositoryId,
copilotActionProp: {
filePath: file.filePath,
},
})
) {
actionsPropsQueued.push({
filePath: file.filePath,
});
totalActionsToQueue++;
}
if (totalActionsToQueue >= data.maxActionsToQueue) {
break;
}
}
return actionsPropsQueued;
}
public override async getCommitMessage(
data: CopilotProcess,
): Promise<string> {
return "Add Spans in " + (data.actionProp as FileActionProp).filePath;
}
public override async getPullRequestTitle(
data: CopilotProcess,
): Promise<string> {
return "Add spans in " + (data.actionProp as FileActionProp).filePath;
}
public override async getPullRequestBody(
data: CopilotProcess,
): Promise<string> {
return `Add spans in ${(data.actionProp as FileActionProp).filePath}
${await this.getDefaultPullRequestBody()}
`;
}
public override isActionComplete(_data: CopilotProcess): Promise<boolean> {
return Promise.resolve(this.isRequirementsMet);
}
public override async onExecutionStep(
data: CopilotProcess,
): Promise<CopilotProcess> {
const filePath: string = (data.actionProp as FileActionProp).filePath;
if (!filePath) {
throw new BadDataException("File Path is not set in the action prop.");
}
const fileContent: string = await ServiceRepositoryUtil.getFileContent({
filePath: filePath,
});
const codeParts: string[] = await this.splitInputCode({
code: fileContent,
itemSize: 500,
});
let newContent: string = "";
let hasSpansBeenAdded: boolean = true;
for (const codePart of codeParts) {
const codePartResult: {
newCode: string;
hasSpansBeenAdded: boolean;
} = await this.addSpansInCode({
data: data,
codePart: codePart,
currentRetryCount: 0,
maxRetryCount: 3,
});
if (!codePartResult.hasSpansBeenAdded) {
hasSpansBeenAdded = false;
newContent += codePartResult.newCode + "\n";
} else {
newContent += codePart + "\n";
}
}
if (hasSpansBeenAdded) {
this.isRequirementsMet = true;
return data;
}
newContent = newContent.trim();
logger.debug("New Content:");
logger.debug(newContent);
const fileActionProps: FileActionProp = data.actionProp as FileActionProp;
// add to result.
data.result.files[fileActionProps.filePath] = {
fileContent: newContent,
} as CodeRepositoryFile;
this.isRequirementsMet = true;
return data;
}
private async didPassValidation(data: CopilotPromptResult): Promise<boolean> {
const validationResponse: string = data.output as string;
if (validationResponse === "--no--") {
return true;
}
return false;
}
private async hasSpansBeenAddedAlready(content: string): Promise<boolean> {
if (content.includes("--all-good--")) {
return true;
}
return false;
}
private async addSpansInCode(options: {
data: CopilotProcess;
codePart: string;
currentRetryCount: number;
maxRetryCount: number;
}): Promise<{
newCode: string;
hasSpansBeenAdded: boolean;
}> {
let hasSpansBeenAdded: boolean = true;
const codePart: string = options.codePart;
const data: CopilotProcess = options.data;
const actionPrompt: CopilotActionPrompt = await this.getPrompt(
data,
codePart,
);
const copilotResult: CopilotPromptResult =
await this.askCopilot(actionPrompt);
const newCodePart: string = await this.cleanupCode({
inputCode: codePart,
outputCode: copilotResult.output as string,
});
if (!(await this.hasSpansBeenAddedAlready(newCodePart))) {
hasSpansBeenAdded = false;
}
const validationPrompt: CopilotActionPrompt =
await this.getValidationPrompt({
oldCode: codePart,
newCode: newCodePart,
});
const validationResponse: CopilotPromptResult =
await this.askCopilot(validationPrompt);
const didPassValidation: boolean =
await this.didPassValidation(validationResponse);
if (
!didPassValidation &&
options.currentRetryCount < options.maxRetryCount
) {
return await this.addSpansInCode({
data: data,
codePart: codePart,
currentRetryCount: options.currentRetryCount + 1,
maxRetryCount: options.maxRetryCount,
});
}
if (!didPassValidation) {
return {
newCode: codePart,
hasSpansBeenAdded: false,
};
}
return {
newCode: newCodePart,
hasSpansBeenAdded: hasSpansBeenAdded,
};
}
private async getValidationPrompt(data: {
oldCode: string;
newCode: string;
}): Promise<CopilotActionPrompt> {
const oldCode: string = data.oldCode;
const newCode: string = data.newCode;
const prompt: string = `
I've asked to add open telemetry spans in the code.
This is the old code:
${oldCode}
----
This is the new code:
${newCode}
Was anything changed in the code except adding spans? If yes, please reply with the following text:
--yes--
If the code was NOT changed EXCEPT adding spans, please reply with the following text:
--no--
`;
const systemPrompt: string = await this.getSystemPrompt();
return {
messages: [
{
content: systemPrompt,
role: PromptRole.System,
},
{
content: prompt,
role: PromptRole.User,
},
],
};
}
public override async getPrompt(
_data: CopilotProcess,
inputCode: string,
): Promise<CopilotActionPrompt> {
/*
* const fileLanguage: TechStack = data.input.files[data.input.currentFilePath]
* ?.fileLanguage as TechStack;
*/
const fileLanguage: TechStack = TechStack.TypeScript;
const prompt: string = `Please add OpenTelemetry spans in the code to functions and methods. If spans are already added, do not modify them.
If you think functions in the code already have spans, please reply with the following text:
--all-good--
Here is the code. This is in ${fileLanguage}:
${inputCode}
`;
const systemPrompt: string = await this.getSystemPrompt();
return {
messages: [
{
content: systemPrompt,
role: PromptRole.System,
},
{
content: prompt,
role: PromptRole.User,
},
],
};
}
public async getSystemPrompt(): Promise<string> {
const systemPrompt: string = `You are an expert programmer. Here are your instructions:
- You will follow the instructions given by the user strictly.
- You will not deviate from the instructions given by the user.
- You will not only add OpenTelemetry Spans in this code. You will not do anything else.`;
return systemPrompt;
}
public async cleanupCode(data: {
inputCode: string;
outputCode: string;
}): Promise<string> {
/*
* this code contains text as well. The code is in betwen ```<type> and ```. Please extract the code and return it.
* for example code can be in the format of
* ```python
* print("Hello World")
* ```
*/
// so the code to be extracted is print("Hello World")
// the code can be in multiple lines as well.
let extractedCode: string = data.outputCode; // this is the code in the file
if (extractedCode.includes("```")) {
extractedCode = extractedCode.match(/```.*\n([\s\S]*?)```/)?.[1] ?? "";
}
// get first line of input code.
const firstWordOfInputCode: string = Text.getFirstWord(data.inputCode);
extractedCode = Text.trimStartUntilThisWord(
extractedCode,
firstWordOfInputCode,
);
const lastWordOfInputCode: string = Text.getLastWord(data.inputCode);
extractedCode = Text.trimEndUntilThisWord(
extractedCode,
lastWordOfInputCode,
);
extractedCode = Text.trimUpQuotesFromStartAndEnd(extractedCode);
// check for quotes.
return extractedCode;
}
}

View File

@@ -1,299 +0,0 @@
import NotImplementedException from "Common/Types/Exception/NotImplementedException";
import LlmType from "../../Types/LlmType";
import CopilotActionType from "Common/Types/Copilot/CopilotActionType";
import LLM from "../LLM/LLM";
import { GetLlmType } from "../../Config";
import Text from "Common/Types/Text";
import { CopilotPromptResult } from "../LLM/LLMBase";
import BadDataException from "Common/Types/Exception/BadDataException";
import logger from "Common/Server/Utils/Logger";
import CodeRepositoryUtil, { RepoScriptType } from "../../Utils/CodeRepository";
import CopilotActionProp from "Common/Types/Copilot/CopilotActionProps/Index";
import ObjectID from "Common/Types/ObjectID";
import {
CopilotActionPrompt,
CopilotProcess,
CopilotProcessStart,
} from "./Types";
export default class CopilotActionBase {
public llmType: LlmType = LlmType.ONEUPTIME_LLM; // temp value which will be overridden in the constructor
public copilotActionType: CopilotActionType =
CopilotActionType.IMPROVE_COMMENTS; // temp value which will be overridden in the constructor
public acceptFileExtentions: string[] = [];
public constructor() {
this.llmType = GetLlmType();
}
protected async isActionRequired(_data: {
serviceCatalogId: ObjectID;
serviceRepositoryId: ObjectID;
copilotActionProp: CopilotActionProp;
}): Promise<boolean> {
throw new NotImplementedException();
}
public async getActionPropsToQueue(_data: {
serviceCatalogId: ObjectID;
serviceRepositoryId: ObjectID;
maxActionsToQueue: number;
}): Promise<Array<CopilotActionProp>> {
throw new NotImplementedException();
}
protected async validateExecutionStep(
_data: CopilotProcess,
): Promise<boolean> {
if (!this.copilotActionType) {
throw new BadDataException("Copilot Action Type is not set");
}
// validate by default.
return true;
}
protected async onAfterExecute(
data: CopilotProcess,
): Promise<CopilotProcess> {
// do nothing
return data;
}
protected async onBeforeExecute(
data: CopilotProcess,
): Promise<CopilotProcess> {
// do nothing
return data;
}
public async getBranchName(): Promise<string> {
const randomText: string = Text.generateRandomText(5);
const bracnhName: string = `${Text.pascalCaseToDashes(this.copilotActionType).toLowerCase()}-${randomText}`;
// replace -- with - in the branch name
return Text.replaceAll(bracnhName, "--", "-");
}
public async getPullRequestTitle(_data: CopilotProcess): Promise<string> {
throw new NotImplementedException();
}
public async getPullRequestBody(_data: CopilotProcess): Promise<string> {
throw new NotImplementedException();
}
protected async getDefaultPullRequestBody(): Promise<string> {
return `
#### Warning
This PR is generated by OneUptime Copilot. OneUptime Copilot is an AI tool that improves your code. Please do not rely on it completely. Always review the changes before merging.
#### Feedback
If you have any feedback or suggestions, please let us know. We would love to hear from you. Please contact us at copilot@oneuptime.com.
`;
}
public async getCommitMessage(_data: CopilotProcess): Promise<string> {
throw new NotImplementedException();
}
protected async onExecutionStep(
data: CopilotProcess,
): Promise<CopilotProcess> {
return Promise.resolve(data);
}
protected async isActionComplete(_data: CopilotProcess): Promise<boolean> {
return true; // by default the action is completed
}
protected async getNextFilePath(
_data: CopilotProcess,
): Promise<string | null> {
return null;
}
public async execute(
data: CopilotProcessStart,
): Promise<CopilotProcess | null> {
logger.info(
"Executing Copilot Action (this will take several minutes to complete): " +
this.copilotActionType,
);
logger.info(data.actionProp);
const onBeforeExecuteActionScript: string | null =
await CodeRepositoryUtil.getRepoScript({
scriptType: RepoScriptType.OnBeforeCodeChange,
});
if (!onBeforeExecuteActionScript) {
logger.debug(
"No on-before-copilot-action script found for this repository.",
);
} else {
logger.info("Executing on-before-copilot-action script.");
await CodeRepositoryUtil.executeScript({
script: onBeforeExecuteActionScript,
});
logger.info("on-before-copilot-action script executed successfully");
}
const processData: CopilotProcess = await this.onBeforeExecute({
...data,
result: {
files: {},
statusMessage: "",
logs: [],
},
});
if (!processData.result) {
processData.result = {
files: {},
statusMessage: "",
logs: [],
};
}
if (!processData.result.files) {
processData.result.files = {};
}
let isActionComplete: boolean = false;
while (!isActionComplete) {
if (!(await this.validateExecutionStep(processData))) {
/*
* execution step not valid
* return data as it is
*/
return processData;
}
data = await this.onExecutionStep(processData);
isActionComplete = await this.isActionComplete(processData);
}
data = await this.onAfterExecute(processData);
// write to disk.
await this.writeToDisk({ data: processData });
const onAfterExecuteActionScript: string | null =
await CodeRepositoryUtil.getRepoScript({
scriptType: RepoScriptType.OnAfterCodeChange,
});
if (!onAfterExecuteActionScript) {
logger.debug(
"No on-after-copilot-action script found for this repository.",
);
}
if (onAfterExecuteActionScript) {
logger.info("Executing on-after-copilot-action script.");
await CodeRepositoryUtil.executeScript({
script: onAfterExecuteActionScript,
});
logger.info("on-after-copilot-action script executed successfully");
}
return processData;
}
protected async _getPrompt(
data: CopilotProcess,
inputCode: string,
): Promise<CopilotActionPrompt | null> {
const prompt: CopilotActionPrompt | null = await this._getPrompt(
data,
inputCode,
);
if (!prompt) {
return null;
}
return prompt;
}
protected async getPrompt(
_data: CopilotProcess,
_inputCode: string,
): Promise<CopilotActionPrompt | null> {
throw new NotImplementedException();
}
protected async askCopilot(
prompt: CopilotActionPrompt,
): Promise<CopilotPromptResult> {
return await LLM.getResponse(prompt);
}
protected async writeToDisk(data: { data: CopilotProcess }): Promise<void> {
// write all the modified files.
const processResult: CopilotProcess = data.data;
for (const filePath in processResult.result.files) {
logger.info(`Writing file: ${filePath}`);
logger.info(`File content: `);
logger.info(`${processResult.result.files[filePath]!.fileContent}`);
const code: string = processResult.result.files[filePath]!.fileContent;
await CodeRepositoryUtil.writeToFile({
filePath: filePath,
content: code,
});
}
}
protected async discardAllChanges(): Promise<void> {
await CodeRepositoryUtil.discardAllChangesOnCurrentBranch();
}
protected async splitInputCode(data: {
code: string;
itemSize: number;
}): Promise<string[]> {
const inputCode: string = data.code;
const items: Array<string> = [];
const linesInInputCode: Array<string> = inputCode.split("\n");
let currentItemSize: number = 0;
const maxItemSize: number = data.itemSize;
let currentItem: string = "";
for (const line of linesInInputCode) {
const words: Array<string> = line.split(" ");
// check if the current item size is less than the max item size
if (currentItemSize + words.length < maxItemSize) {
currentItem += line + "\n";
currentItemSize += words.length;
} else {
// start a new item
items.push(currentItem);
currentItem = line + "\n";
currentItemSize = words.length;
}
}
if (currentItem) {
items.push(currentItem);
}
return items;
}
}

View File

@@ -1,446 +0,0 @@
import CopilotActionType from "Common/Types/Copilot/CopilotActionType";
import CopilotActionBase from "./CopilotActionsBase";
import CodeRepositoryUtil from "../../Utils/CodeRepository";
import TechStack from "Common/Types/ServiceCatalog/TechStack";
import { CopilotPromptResult } from "../LLM/LLMBase";
import Text from "Common/Types/Text";
import { CopilotActionPrompt, CopilotProcess } from "./Types";
import { PromptRole } from "../LLM/Prompt";
import logger from "Common/Server/Utils/Logger";
import FileActionProp from "Common/Types/Copilot/CopilotActionProps/FileActionProp";
import CodeRepositoryFile from "Common/Server/Utils/CodeRepository/CodeRepositoryFile";
import CopilotActionUtil from "../../Utils/CopilotAction";
import ObjectID from "Common/Types/ObjectID";
import CopilotAction from "Common/Models/DatabaseModels/CopilotAction";
import ServiceRepositoryUtil from "../../Utils/ServiceRepository";
import Dictionary from "Common/Types/Dictionary";
import ArrayUtil from "Common/Utils/Array";
import CopilotActionProp from "Common/Types/Copilot/CopilotActionProps/Index";
import BadDataException from "Common/Types/Exception/BadDataException";
import LocalFile from "Common/Server/Utils/LocalFile";
export default class ImproveComments extends CopilotActionBase {
public isRequirementsMet: boolean = false;
public constructor() {
super();
this.copilotActionType = CopilotActionType.IMPROVE_COMMENTS;
this.acceptFileExtentions = CodeRepositoryUtil.getCodeFileExtentions();
}
protected override async isActionRequired(data: {
serviceCatalogId: ObjectID;
serviceRepositoryId: ObjectID;
copilotActionProp: FileActionProp;
}): Promise<boolean> {
// check if the action has already been processed for this file.
const existingAction: CopilotAction | null =
await CopilotActionUtil.getExistingAction({
serviceCatalogId: data.serviceCatalogId,
actionType: this.copilotActionType,
actionProps: {
filePath: data.copilotActionProp.filePath, // has this action run on this file before?
},
});
if (!existingAction) {
return true;
}
return false;
}
public override async getActionPropsToQueue(data: {
serviceCatalogId: ObjectID;
serviceRepositoryId: ObjectID;
maxActionsToQueue: number;
}): Promise<Array<CopilotActionProp>> {
// get files in the repo.
logger.debug(
`${this.copilotActionType} - Getting files to queue for improve comments.`,
);
let totalActionsToQueue: number = 0;
logger.debug(`${this.copilotActionType} - Reading files in the service.`);
const files: Dictionary<CodeRepositoryFile> =
await ServiceRepositoryUtil.getFilesByServiceCatalogId({
serviceCatalogId: data.serviceCatalogId,
});
logger.debug(
`${this.copilotActionType} - Files read. ${Object.keys(files).length} files found.`,
);
// get keys in random order.
let fileKeys: string[] = Object.keys(files);
//randomize the order of the files.
fileKeys = ArrayUtil.shuffle(fileKeys);
const actionsPropsQueued: Array<CopilotActionProp> = [];
logger.debug(
`${this.copilotActionType} - Accepted File Extentions: ${this.acceptFileExtentions}`,
);
for (const fileKey of fileKeys) {
logger.debug(
`${this.copilotActionType} - Checking file: ${files[fileKey]!.filePath}`,
);
// check if the file is in accepted file extentions.
const fileExtention: string = LocalFile.getFileExtension(
files[fileKey]!.filePath,
);
logger.debug(
`${this.copilotActionType} - File Extention: ${fileExtention}`,
);
if (!this.acceptFileExtentions.includes(fileExtention)) {
logger.debug(
`${this.copilotActionType} - File is not in accepted file extentions. Skipping.`,
);
continue;
}
const file: CodeRepositoryFile = files[fileKey]!;
logger.debug(
`${this.copilotActionType} - Checking file: ${file.filePath}`,
);
if (
await this.isActionRequired({
serviceCatalogId: data.serviceCatalogId,
serviceRepositoryId: data.serviceRepositoryId,
copilotActionProp: {
filePath: file.filePath,
},
})
) {
actionsPropsQueued.push({
filePath: file.filePath,
});
totalActionsToQueue++;
}
if (totalActionsToQueue >= data.maxActionsToQueue) {
break;
}
}
return actionsPropsQueued;
}
public override async getCommitMessage(
data: CopilotProcess,
): Promise<string> {
return (
"Improved comments on " + (data.actionProp as FileActionProp).filePath
);
}
public override async getPullRequestTitle(
data: CopilotProcess,
): Promise<string> {
return (
"Improved comments on " + (data.actionProp as FileActionProp).filePath
);
}
public override async getPullRequestBody(
data: CopilotProcess,
): Promise<string> {
return `Improved comments on ${(data.actionProp as FileActionProp).filePath}
${await this.getDefaultPullRequestBody()}
`;
}
public override isActionComplete(_data: CopilotProcess): Promise<boolean> {
return Promise.resolve(this.isRequirementsMet);
}
public override async onExecutionStep(
data: CopilotProcess,
): Promise<CopilotProcess> {
const filePath: string = (data.actionProp as FileActionProp).filePath;
if (!filePath) {
throw new BadDataException("File Path is not set in the action prop.");
}
const fileContent: string = await ServiceRepositoryUtil.getFileContent({
filePath: filePath,
});
const codeParts: string[] = await this.splitInputCode({
code: fileContent,
itemSize: 500,
});
let newContent: string = "";
let isWellCommented: boolean = true;
for (const codePart of codeParts) {
const codePartResult: {
newCode: string;
isWellCommented: boolean;
} = await this.commentCodePart({
data: data,
codePart: codePart,
currentRetryCount: 0,
maxRetryCount: 3,
});
if (!codePartResult.isWellCommented) {
isWellCommented = false;
newContent += codePartResult.newCode + "\n";
} else {
newContent += codePart + "\n";
}
}
if (isWellCommented) {
this.isRequirementsMet = true;
return data;
}
newContent = newContent.trim();
logger.debug("New Content:");
logger.debug(newContent);
const fileActionProps: FileActionProp = data.actionProp as FileActionProp;
// add to result.
data.result.files[fileActionProps.filePath] = {
fileContent: newContent,
} as CodeRepositoryFile;
this.isRequirementsMet = true;
return data;
}
private async didPassValidation(data: CopilotPromptResult): Promise<boolean> {
const validationResponse: string = data.output as string;
if (validationResponse === "--no--") {
return true;
}
return false;
}
private async isFileAlreadyWellCommented(content: string): Promise<boolean> {
if (content.includes("--all-good--")) {
return true;
}
return false;
}
private async commentCodePart(options: {
data: CopilotProcess;
codePart: string;
currentRetryCount: number;
maxRetryCount: number;
}): Promise<{
newCode: string;
isWellCommented: boolean;
}> {
let isWellCommented: boolean = true;
const codePart: string = options.codePart;
const data: CopilotProcess = options.data;
const actionPrompt: CopilotActionPrompt = await this.getPrompt(
data,
codePart,
);
const copilotResult: CopilotPromptResult =
await this.askCopilot(actionPrompt);
const newCodePart: string = await this.cleanupCode({
inputCode: codePart,
outputCode: copilotResult.output as string,
});
if (!(await this.isFileAlreadyWellCommented(newCodePart))) {
isWellCommented = false;
}
const validationPrompt: CopilotActionPrompt =
await this.getValidationPrompt({
oldCode: codePart,
newCode: newCodePart,
});
const validationResponse: CopilotPromptResult =
await this.askCopilot(validationPrompt);
const didPassValidation: boolean =
await this.didPassValidation(validationResponse);
if (
!didPassValidation &&
options.currentRetryCount < options.maxRetryCount
) {
return await this.commentCodePart({
data: data,
codePart: codePart,
currentRetryCount: options.currentRetryCount + 1,
maxRetryCount: options.maxRetryCount,
});
}
if (!didPassValidation) {
return {
newCode: codePart,
isWellCommented: false,
};
}
return {
newCode: newCodePart,
isWellCommented: isWellCommented,
};
}
private async getValidationPrompt(data: {
oldCode: string;
newCode: string;
}): Promise<CopilotActionPrompt> {
const oldCode: string = data.oldCode;
const newCode: string = data.newCode;
const prompt: string = `
I've asked to improve comments in the code.
This is the old code:
${oldCode}
----
This is the new code:
${newCode}
Was anything changed in the code except comments? If yes, please reply with the following text:
--yes--
If the code was NOT changed EXCEPT comments, please reply with the following text:
--no--
`;
const systemPrompt: string = await this.getSystemPrompt();
return {
messages: [
{
content: systemPrompt,
role: PromptRole.System,
},
{
content: prompt,
role: PromptRole.User,
},
],
};
}
public override async getPrompt(
_data: CopilotProcess,
inputCode: string,
): Promise<CopilotActionPrompt> {
/*
* const fileLanguage: TechStack = data.input.files[data.input.currentFilePath]
* ?.fileLanguage as TechStack;
*/
const fileLanguage: TechStack = TechStack.TypeScript;
const prompt: string = `Please improve the comments in this code. Please only add minimal comments and comment code which is hard to understand. Please add comments in new line and do not add inline comments.
If you think the code is already well commented, please reply with the following text:
--all-good--
Here is the code. This is in ${fileLanguage}:
${inputCode}
`;
const systemPrompt: string = await this.getSystemPrompt();
return {
messages: [
{
content: systemPrompt,
role: PromptRole.System,
},
{
content: prompt,
role: PromptRole.User,
},
],
};
}
public async getSystemPrompt(): Promise<string> {
const systemPrompt: string = `You are an expert programmer. Here are your instructions:
- You will follow the instructions given by the user strictly.
- You will not deviate from the instructions given by the user.
- You will not change the code. You will only improve the comments.`;
return systemPrompt;
}
public async cleanupCode(data: {
inputCode: string;
outputCode: string;
}): Promise<string> {
/*
* this code contains text as well. The code is in betwen ```<type> and ```. Please extract the code and return it.
* for example code can be in the format of
* ```python
* print("Hello World")
* ```
*/
// so the code to be extracted is print("Hello World")
// the code can be in multiple lines as well.
let extractedCode: string = data.outputCode; // this is the code in the file
if (extractedCode.includes("```")) {
extractedCode = extractedCode.match(/```.*\n([\s\S]*?)```/)?.[1] ?? "";
}
// get first line of input code.
const firstWordOfInputCode: string = Text.getFirstWord(data.inputCode);
extractedCode = Text.trimStartUntilThisWord(
extractedCode,
firstWordOfInputCode,
);
const lastWordOfInputCode: string = Text.getLastWord(data.inputCode);
extractedCode = Text.trimEndUntilThisWord(
extractedCode,
lastWordOfInputCode,
);
extractedCode = Text.trimUpQuotesFromStartAndEnd(extractedCode);
// check for quotes.
return extractedCode;
}
}

View File

@@ -1,227 +0,0 @@
import CopilotActionType from "Common/Types/Copilot/CopilotActionType";
import ImproveComments from "./ImproveComments";
import Dictionary from "Common/Types/Dictionary";
import CopilotActionBase from "./CopilotActionsBase";
import BadDataException from "Common/Types/Exception/BadDataException";
import CodeRepositoryUtil, { RepoScriptType } from "../../Utils/CodeRepository";
import ServiceCopilotCodeRepository from "Common/Models/DatabaseModels/ServiceCopilotCodeRepository";
import PullRequest from "Common/Types/CodeRepository/PullRequest";
import CopilotAction from "Common/Models/DatabaseModels/CopilotAction";
import ObjectID from "Common/Types/ObjectID";
import CopilotActionStatus from "Common/Types/Copilot/CopilotActionStatus";
import logger from "Common/Server/Utils/Logger";
import CopilotPullRequest from "Common/Models/DatabaseModels/CopilotPullRequest";
import CopilotPullRequestService from "../CopilotPullRequest";
import CopilotActionUtil from "../../Utils/CopilotAction";
import { CopilotProcess } from "./Types";
// import AddSpans from "./AddSpan";
export const ActionDictionary: Dictionary<typeof CopilotActionBase> = {
[CopilotActionType.IMPROVE_COMMENTS]: ImproveComments,
// [CopilotActionType.ADD_SPANS]: AddSpans,
};
export interface CopilotExecutionResult {
status: CopilotActionStatus;
pullRequest: PullRequest | null;
}
export default class CopilotActionService {
public static async executeAction(data: {
serviceRepository: ServiceCopilotCodeRepository;
copilotAction: CopilotAction;
}): Promise<CopilotExecutionResult> {
await CodeRepositoryUtil.discardAllChangesOnCurrentBranch();
await CodeRepositoryUtil.switchToMainBranch();
await CodeRepositoryUtil.pullChanges();
const ActionType: typeof CopilotActionBase | undefined =
ActionDictionary[data.copilotAction.copilotActionType!];
if (!ActionType) {
throw new BadDataException("Invalid CopilotActionType");
}
const action: CopilotActionBase = new ActionType() as CopilotActionBase;
// mark this action as processing.
await CopilotActionUtil.updateCopilotAction({
actionStatus: CopilotActionStatus.PROCESSING,
actionId: data.copilotAction.id!,
});
const processResult: CopilotProcess | null = await action.execute({
actionProp: data.copilotAction.copilotActionProp!,
});
let executionResult: CopilotExecutionResult = {
status: CopilotActionStatus.NO_ACTION_REQUIRED,
pullRequest: null,
};
let pullRequest: PullRequest | null = null;
if (
processResult &&
processResult.result &&
processResult.result.files &&
Object.keys(processResult.result.files).length > 0
) {
logger.info("Obtained result from Copilot Action");
logger.info("Committing the changes to the repository and creating a PR");
const branchName: string = CodeRepositoryUtil.getBranchName({
branchName: await action.getBranchName(),
});
// create a branch
await CodeRepositoryUtil.createBranch({
branchName: branchName,
});
// write all the modified files.
const filePaths: string[] = Object.keys(processResult.result.files);
// run on before commit script. This is the place where we can run tests.
const onBeforeCommitScript: string | null =
await CodeRepositoryUtil.getRepoScript({
scriptType: RepoScriptType.OnBeforeCommit,
});
if (!onBeforeCommitScript) {
logger.debug("No on-before-commit script found for this repository.");
} else {
logger.info("Executing on-before-commit script.");
await CodeRepositoryUtil.executeScript({
script: onBeforeCommitScript,
});
logger.info("on-before-commit script executed successfully.");
}
const commitMessage: string =
await action.getCommitMessage(processResult);
const onAfterCommitScript: string | null =
await CodeRepositoryUtil.getRepoScript({
scriptType: RepoScriptType.OnAfterCommit,
});
if (!onAfterCommitScript) {
logger.debug("No on-after-commit script found for this repository.");
}
if (onAfterCommitScript) {
logger.info("Executing on-after-commit script.");
await CodeRepositoryUtil.executeScript({
script: onAfterCommitScript,
});
logger.info("on-after-commit script executed successfully.");
}
// add files to stage
logger.info("Adding files to stage: ");
for (const filePath of filePaths) {
logger.info(`- ${filePath}`);
}
await CodeRepositoryUtil.addFilesToGit({
filePaths: filePaths,
});
// commit changes
logger.info("Committing changes");
await CodeRepositoryUtil.commitChanges({
message: commitMessage,
});
// push changes
logger.info("Pushing changes");
await CodeRepositoryUtil.pushChanges({
branchName: branchName,
});
// create a PR
logger.info("Creating a PR");
pullRequest = await CodeRepositoryUtil.createPullRequest({
branchName: branchName,
title: await action.getPullRequestTitle(processResult),
body: await action.getPullRequestBody(processResult),
});
// switch to main branch.
logger.info("Switching to main branch");
await CodeRepositoryUtil.switchToMainBranch();
//save the result to the database.
logger.info("Saving the result to the database");
executionResult = {
status: CopilotActionStatus.PR_CREATED,
pullRequest: pullRequest,
};
}
if (
!processResult ||
!processResult.result ||
!processResult.result.files ||
Object.keys(processResult.result.files).length === 0
) {
logger.info("No result obtained from Copilot Action");
}
const getCurrentCommitHash: string =
await CodeRepositoryUtil.getCurrentCommitHash();
await CopilotActionService.updateCopilotAction({
serviceCatalogId: data.serviceRepository.serviceCatalog!.id!,
serviceRepositoryId: data.serviceRepository.id!,
commitHash: getCurrentCommitHash,
pullRequest: pullRequest,
copilotActionStatus: executionResult.status,
copilotActonId: data.copilotAction.id!,
statusMessage: processResult?.result.statusMessage || "",
logs: processResult?.result.logs || [],
});
return executionResult;
}
private static async updateCopilotAction(data: {
copilotActonId: ObjectID;
serviceCatalogId: ObjectID;
serviceRepositoryId: ObjectID;
commitHash: string;
pullRequest: PullRequest | null;
statusMessage: string;
logs: Array<string>;
copilotActionStatus: CopilotActionStatus;
}): Promise<void> {
// add copilot action to the database.
let copilotPullRequest: CopilotPullRequest | null = null;
if (data.pullRequest) {
copilotPullRequest =
await CopilotPullRequestService.addPullRequestToDatabase({
pullRequest: data.pullRequest,
serviceCatalogId: data.serviceCatalogId,
serviceRepositoryId: data.serviceRepositoryId,
});
}
await CopilotActionUtil.updateCopilotAction({
actionStatus: data.copilotActionStatus,
pullRequestId: copilotPullRequest ? copilotPullRequest.id! : undefined,
commitHash: data.commitHash,
statusMessage: data.statusMessage,
logs: data.logs,
actionId: data.copilotActonId,
});
}
}

View File

@@ -1,28 +0,0 @@
import CodeRepositoryFile from "Common/Server/Utils/CodeRepository/CodeRepositoryFile";
import Dictionary from "Common/Types/Dictionary";
import { Prompt } from "../LLM/Prompt";
import CopilotActionProp from "Common/Types/Copilot/CopilotActionProps/Index";
export interface CopilotActionRunResult {
files: Dictionary<CodeRepositoryFile>;
statusMessage: string;
logs: Array<string>;
}
export interface CopilotActionPrompt {
messages: Array<Prompt>;
timeoutInMinutes?: number | undefined;
}
export interface CopilotActionVars {
currentFilePath: string;
files: Dictionary<CodeRepositoryFile>;
}
export interface CopilotProcessStart {
actionProp: CopilotActionProp;
}
export interface CopilotProcess extends CopilotProcessStart {
result: CopilotActionRunResult;
}

View File

@@ -1,146 +0,0 @@
import BadDataException from "Common/Types/Exception/BadDataException";
import PullRequest from "Common/Types/CodeRepository/PullRequest";
import ObjectID from "Common/Types/ObjectID";
import URL from "Common/Types/API/URL";
import { GetOneUptimeURL, GetRepositorySecretKey } from "../Config";
import HTTPErrorResponse from "Common/Types/API/HTTPErrorResponse";
import HTTPResponse from "Common/Types/API/HTTPResponse";
import { JSONObject } from "Common/Types/JSON";
import API from "Common/Utils/API";
import CopilotPullRequest from "Common/Models/DatabaseModels/CopilotPullRequest";
import CodeRepositoryUtil from "../Utils/CodeRepository";
import PullRequestState from "Common/Types/CodeRepository/PullRequestState";
export default class CopilotPullRequestService {
public static async refreshPullRequestStatus(data: {
copilotPullRequest: CopilotPullRequest;
}): Promise<PullRequestState> {
if (!data.copilotPullRequest.pullRequestId) {
throw new BadDataException("Pull Request ID not found");
}
if (!data.copilotPullRequest.id) {
throw new BadDataException("Copilot Pull Request ID not found");
}
const currentState: PullRequestState =
await CodeRepositoryUtil.getPullRequestState({
pullRequestId: data.copilotPullRequest.pullRequestId,
});
// update the status of the pull request in the database.
const url: URL = URL.fromString(
GetOneUptimeURL().toString() + "/api",
).addRoute(
`${new CopilotPullRequest()
.getCrudApiPath()
?.toString()}/update-pull-request-status/${GetRepositorySecretKey()}`,
);
const codeRepositoryResult: HTTPErrorResponse | HTTPResponse<JSONObject> =
await API.post({
url: url,
data: {
copilotPullRequestId: data.copilotPullRequest.id?.toString(),
copilotPullRequestStatus: currentState,
},
});
if (codeRepositoryResult instanceof HTTPErrorResponse) {
throw codeRepositoryResult;
}
return currentState;
}
public static async getOpenPullRequestsFromDatabase(): Promise<
Array<CopilotPullRequest>
> {
// send this to the API.
const url: URL = URL.fromString(
GetOneUptimeURL().toString() + "/api",
).addRoute(
`${new CopilotPullRequest()
.getCrudApiPath()
?.toString()}/get-pending-pull-requests/${GetRepositorySecretKey()}`,
);
const codeRepositoryResult: HTTPErrorResponse | HTTPResponse<JSONObject> =
await API.get({
url: url,
});
if (codeRepositoryResult instanceof HTTPErrorResponse) {
throw codeRepositoryResult;
}
const copilotPullRequestsJsonArray: Array<JSONObject> = codeRepositoryResult
.data["copilotPullRequests"] as Array<JSONObject>;
return CopilotPullRequest.fromJSONArray(
copilotPullRequestsJsonArray,
CopilotPullRequest,
) as Array<CopilotPullRequest>;
}
public static async addPullRequestToDatabase(data: {
pullRequest: PullRequest;
serviceCatalogId?: ObjectID | undefined;
serviceRepositoryId?: ObjectID | undefined;
isSetupPullRequest?: boolean | undefined;
}): Promise<CopilotPullRequest> {
let copilotPullRequest: CopilotPullRequest | null = null;
if (data.pullRequest && data.pullRequest.pullRequestNumber) {
copilotPullRequest = new CopilotPullRequest();
copilotPullRequest.pullRequestId =
data.pullRequest.pullRequestNumber.toString();
copilotPullRequest.copilotPullRequestStatus = PullRequestState.Open;
if (data.serviceCatalogId) {
copilotPullRequest.serviceCatalogId = data.serviceCatalogId;
}
if (data.isSetupPullRequest) {
copilotPullRequest.isSetupPullRequest = data.isSetupPullRequest;
}
if (data.serviceRepositoryId) {
copilotPullRequest.serviceRepositoryId = data.serviceRepositoryId;
}
// send this to the API.
const url: URL = URL.fromString(
GetOneUptimeURL().toString() + "/api",
).addRoute(
`${new CopilotPullRequest()
.getCrudApiPath()
?.toString()}/add-pull-request/${GetRepositorySecretKey()}`,
);
const codeRepositoryResult: HTTPErrorResponse | HTTPResponse<JSONObject> =
await API.post({
url: url,
data: {
copilotPullRequest: CopilotPullRequest.toJSON(
copilotPullRequest,
CopilotPullRequest,
),
},
});
if (codeRepositoryResult instanceof HTTPErrorResponse) {
throw codeRepositoryResult;
}
copilotPullRequest = CopilotPullRequest.fromJSON(
codeRepositoryResult.data,
CopilotPullRequest,
) as CopilotPullRequest;
return copilotPullRequest;
}
throw new BadDataException("Pull Request Number not found");
}
}

View File

@@ -1,24 +0,0 @@
import BadDataException from "Common/Types/Exception/BadDataException";
import { GetLlmType } from "../../Config";
import LlmType from "../../Types/LlmType";
import LlmBase, { CopilotPromptResult } from "./LLMBase";
import LLMServer from "./LLMServer";
import OpenAI from "./OpenAI";
import { CopilotActionPrompt } from "../CopilotActions/Types";
export default class LLM extends LlmBase {
public static override async getResponse(
data: CopilotActionPrompt,
): Promise<CopilotPromptResult> {
if (GetLlmType() === LlmType.ONEUPTIME_LLM) {
return await LLMServer.getResponse(data);
}
if (GetLlmType() === LlmType.OpenAI) {
return await OpenAI.getResponse(data);
}
throw new BadDataException("Invalid LLM type");
}
}

View File

@@ -1,15 +0,0 @@
import NotImplementedException from "Common/Types/Exception/NotImplementedException";
import { JSONValue } from "Common/Types/JSON";
import { CopilotActionPrompt } from "../CopilotActions/Types";
export interface CopilotPromptResult {
output: JSONValue;
}
export default class LlmBase {
public static async getResponse(
_data: CopilotActionPrompt,
): Promise<CopilotPromptResult> {
throw new NotImplementedException();
}
}

View File

@@ -1,152 +0,0 @@
import URL from "Common/Types/API/URL";
import { GetLlmServerUrl } from "../../Config";
import LlmBase, { CopilotPromptResult } from "./LLMBase";
import API from "Common/Utils/API";
import HTTPErrorResponse from "Common/Types/API/HTTPErrorResponse";
import HTTPResponse from "Common/Types/API/HTTPResponse";
import { JSONArray, JSONObject } from "Common/Types/JSON";
import BadRequestException from "Common/Types/Exception/BadRequestException";
import Sleep from "Common/Types/Sleep";
import logger from "Common/Server/Utils/Logger";
import ErrorGettingResponseFromLLM from "../../Exceptions/ErrorGettingResponseFromLLM";
import BadOperationException from "Common/Types/Exception/BadOperationException";
import OneUptimeDate from "Common/Types/Date";
import LLMTimeoutException from "../../Exceptions/LLMTimeoutException";
import { CopilotActionPrompt } from "../CopilotActions/Types";
import { Prompt } from "./Prompt";
enum LlamaPromptStatus {
Processed = "processed",
NotFound = "not found",
Pending = "pending",
}
export default class Llama extends LlmBase {
public static override async getResponse(
data: CopilotActionPrompt,
): Promise<CopilotPromptResult> {
const serverUrl: URL = GetLlmServerUrl();
const response: HTTPErrorResponse | HTTPResponse<JSONObject> =
await API.post<JSONObject>({
url: URL.fromString(serverUrl.toString()).addRoute("/prompt/"),
data: {
messages: data.messages.map((message: Prompt) => {
return {
content: message.content,
role: message.role,
};
}),
// secretkey: GetRepositorySecretKey(),
},
headers: {},
options: {
retries: 3,
exponentialBackoff: true,
},
});
if (response instanceof HTTPErrorResponse) {
throw response;
}
const result: JSONObject = response.data;
const idOfPrompt: string = result["id"] as string;
if (result["error"] && typeof result["error"] === "string") {
throw new BadOperationException(result["error"]);
}
// now check this prompt status.
let promptStatus: LlamaPromptStatus = LlamaPromptStatus.Pending;
let promptResult: JSONObject | null = null;
const currentDate: Date = OneUptimeDate.getCurrentDate();
const timeoutInMinutes: number = data.timeoutInMinutes || 5;
while (promptStatus === LlamaPromptStatus.Pending) {
const timeNow: Date = OneUptimeDate.getCurrentDate();
if (
OneUptimeDate.getDifferenceInMinutes(timeNow, currentDate) >
timeoutInMinutes
) {
throw new LLMTimeoutException(
`Timeout of ${timeoutInMinutes} minutes exceeded. Skipping the prompt.`,
);
}
const response: HTTPErrorResponse | HTTPResponse<JSONObject> =
await API.post<JSONObject>({
url: URL.fromString(serverUrl.toString()).addRoute(`/prompt-result/`),
data: {
id: idOfPrompt,
// secretkey: GetRepositorySecretKey(),
},
headers: {},
options: {
retries: 3,
exponentialBackoff: true,
},
});
if (response instanceof HTTPErrorResponse) {
throw response;
}
if (
response.data["error"] &&
typeof response.data["error"] === "string"
) {
throw new BadOperationException(response.data["error"]);
}
const result: JSONObject = response.data;
promptStatus = result["status"] as LlamaPromptStatus;
if (promptStatus === LlamaPromptStatus.Processed) {
logger.debug("Prompt is processed");
promptResult = result;
} else if (promptStatus === LlamaPromptStatus.NotFound) {
throw new ErrorGettingResponseFromLLM("Error processing prompt");
} else if (promptStatus === LlamaPromptStatus.Pending) {
logger.debug("Prompt is still pending. Waiting for 1 second");
await Sleep.sleep(1000);
}
}
if (!promptResult) {
throw new BadRequestException("Failed to get response from Llama server");
}
if (
promptResult["output"] &&
(promptResult["output"] as JSONArray).length > 0
) {
promptResult = (promptResult["output"] as JSONArray)[0] as JSONObject;
}
if (promptResult && (promptResult as JSONObject)["generated_text"]) {
const arrayOfGeneratedText: JSONArray = (promptResult as JSONObject)[
"generated_text"
] as JSONArray;
// get last item
const lastItem: JSONObject = arrayOfGeneratedText[
arrayOfGeneratedText.length - 1
] as JSONObject;
if (lastItem["content"]) {
return {
output: lastItem["content"] as string,
};
}
}
throw new BadRequestException("Failed to get response from Llama server");
}
}

View File

@@ -1,49 +0,0 @@
import OpenAI from "openai";
import { GetOpenAIAPIKey, GetOpenAIModel } from "../../Config";
import LlmBase, { CopilotPromptResult } from "./LLMBase";
import BadRequestException from "Common/Types/Exception/BadRequestException";
import { CopilotActionPrompt } from "../CopilotActions/Types";
import logger from "Common/Server/Utils/Logger";
export default class Llama extends LlmBase {
public static openai: OpenAI | null = null;
public static override async getResponse(
data: CopilotActionPrompt,
): Promise<CopilotPromptResult> {
if (!GetOpenAIAPIKey() || !GetOpenAIModel()) {
throw new BadRequestException("OpenAI API Key or Model is not set");
}
if (!this.openai) {
this.openai = new OpenAI({
apiKey: GetOpenAIAPIKey() as string,
});
}
logger.debug("Getting response from OpenAI");
const chatCompletion: OpenAI.Chat.Completions.ChatCompletion =
await this.openai.chat.completions.create({
messages: data.messages,
model: GetOpenAIModel()!,
});
logger.debug("Got response from OpenAI");
if (
chatCompletion.choices.length > 0 &&
chatCompletion.choices[0]?.message?.content
) {
const response: string = chatCompletion.choices[0]!.message.content;
logger.debug(`Response from OpenAI: ${response}`);
return {
output: response,
};
}
throw new BadRequestException("Failed to get response from OpenAI server");
}
}

View File

@@ -1,10 +0,0 @@
export enum PromptRole {
System = "system",
User = "user",
Assistant = "assistant",
}
export interface Prompt {
content: string;
role: PromptRole;
}

View File

@@ -1,15 +0,0 @@
## OneUptime Copilot
This folder contains the configuration files for the OneUptime Copilot. The Copilot is a tool that automatically improves your code. It can fix issues, improve code quality, and help you ship faster.
This folder has the following structure:
- `config.js`: The configuration file for the Copilot. You can customize the Copilot's behavior by changing this file.
- `scripts`: A folder containing scripts that the Copilot runs. These are hooks that run at different stages of the Copilot's process.
- `on-after-clone.sh`: A script that runs after the Copilot clones your repository.
- `on-before-code-change.sh`: A script that runs before the Copilot makes changes to your code.
- `on-after-code-change.sh`: A script that runs after the Copilot makes changes to your code.
- `on-before-commit.sh`: A script that runs before the Copilot commits changes to your repository.
- `on-after-commit.sh`: A script that runs after the Copilot commits changes to your repository.

View File

@@ -1,10 +0,0 @@
// This is the configuration file for the oneuptime copilot.
const getCopilotConfig = () => {
return {
// The version of the schema for this configuration file.
schemaVersion: '1.0',
}
}
export default getCopilotConfig;

View File

@@ -1,13 +0,0 @@
# Description: Copilot clones your repository and to improve your code.
# This scirpt runs after the clone process is completed.
# Some of the common tasks you can do here are:
# 1. Install dependencies
# 2. Run linting
# 3. Run tests
# 4. Run build
# 5. Run any other command that you want to run after the clone process is completed.
# If this script fails, copilot will not proceed with the next steps to improve your code.
# This step is to ensure that the code is in a good state before we start improving it.
# If you want to skip this script, you can keep this file empty.
# It's highly recommended to run linting and tests in this script to ensure the code is in a good state.
# This scirpt will run on ubuntu machine. So, make sure the commands you run are compatible with ubuntu.

View File

@@ -1,11 +0,0 @@
# Description: Copilot will run this script after we make improvements to your code and write it to disk.
# Some of the common tasks you can do here are:
# 1. Run linting
# 2. Run tests
# 3. Run build
# 4. Run any other command that you want to run after the code is changed.
# If this script fails, copilot will not commit the changes to your repository.
# This step is to ensure that the code is in a good state before we commit the changes.
# If you want to skip this script, you can keep this file empty.
# It's highly recommended to run linting and tests in this script to ensure the code is in a good state.
# This scirpt will run on ubuntu machine. So, make sure the commands you run are compatible with ubuntu.

View File

@@ -1 +0,0 @@
# Description: Copilot will run this script after the commit process is completed.

View File

@@ -1,9 +0,0 @@
# Description: Copilot will run this script before we make changes to your code.
# Some of the common tasks you can do here are:
# 1. Install dependencies
# 2. Run any other command that you want to run before the code is changed.
# If this script fails, copilot will not make any changes to the code.
# This step is to ensure that the code is in a good state before we start making changes.
# If you want to skip this script, you can keep this file empty.
# It's highly recommended to run things like installing dependencies in this script.
# This scirpt will run on ubuntu machine. So, make sure the commands you run are compatible with ubuntu.

View File

@@ -1 +0,0 @@
# Description: Copilot will run this script before we commit the changes to your repository.

View File

@@ -1,6 +0,0 @@
enum LlmType {
ONEUPTIME_LLM = "OneUptime LLM Server", // OneUptime custom LLM Server
OpenAI = "OpenAI",
}
export default LlmType;

View File

@@ -1,822 +0,0 @@
import {
GetCodeRepositoryPassword,
GetCodeRepositoryUsername,
GetLocalRepositoryPath,
GetOneUptimeURL,
GetRepositorySecretKey,
} from "../Config";
import HTTPErrorResponse from "Common/Types/API/HTTPErrorResponse";
import HTTPResponse from "Common/Types/API/HTTPResponse";
import URL from "Common/Types/API/URL";
import CodeRepositoryType from "Common/Types/CodeRepository/CodeRepositoryType";
import PullRequest from "Common/Types/CodeRepository/PullRequest";
import PullRequestState from "Common/Types/CodeRepository/PullRequestState";
import BadDataException from "Common/Types/Exception/BadDataException";
import { JSONArray, JSONObject } from "Common/Types/JSON";
import API from "Common/Utils/API";
import CodeRepositoryServerUtil from "Common/Server/Utils/CodeRepository/CodeRepository";
import GitHubUtil from "Common/Server/Utils/CodeRepository/GitHub/GitHub";
import LocalFile from "Common/Server/Utils/LocalFile";
import logger from "Common/Server/Utils/Logger";
import CopilotCodeRepository from "Common/Models/DatabaseModels/CopilotCodeRepository";
import ServiceCopilotCodeRepository from "Common/Models/DatabaseModels/ServiceCopilotCodeRepository";
import Text from "Common/Types/Text";
import Execute from "Common/Server/Utils/Execute";
import CopilotPullRequestService from "../Service/CopilotPullRequest";
import CopilotPullRequest from "Common/Models/DatabaseModels/CopilotPullRequest";
export interface CodeRepositoryResult {
codeRepository: CopilotCodeRepository;
serviceRepositories: Array<ServiceCopilotCodeRepository>;
}
export interface ServiceToImproveResult {
serviceRepository: ServiceCopilotCodeRepository;
numberOfOpenPullRequests: number;
pullRequests: Array<CopilotPullRequest>;
}
export enum RepoScriptType {
OnAfterClone = "onAfterClone",
OnBeforeCommit = "onBeforeCommit",
OnAfterCommit = "OnAfterCommit",
OnBeforeCodeChange = "OnBeforeCodeChange",
OnAfterCodeChange = "OnAfterCodeChange",
}
export default class CodeRepositoryUtil {
public static codeRepositoryResult: CodeRepositoryResult | null = null;
public static gitHubUtil: GitHubUtil | null = null;
public static folderNameOfClonedRepository: string | null = null;
public static async getCurrentCommitHash(): Promise<string> {
return await CodeRepositoryServerUtil.getCurrentCommitHash({
repoPath: this.getLocalRepositoryPath(),
});
}
public static isRepoCloned(): boolean {
return Boolean(this.folderNameOfClonedRepository);
}
public static async getOpenSetupPullRequest(): Promise<CopilotPullRequest | null> {
const openPullRequests: Array<CopilotPullRequest> =
await CopilotPullRequestService.getOpenPullRequestsFromDatabase();
for (const pullRequest of openPullRequests) {
if (pullRequest.isSetupPullRequest) {
return pullRequest;
}
}
return null;
}
public static getLocalRepositoryPath(): string {
if (this.folderNameOfClonedRepository) {
return LocalFile.sanitizeFilePath(
GetLocalRepositoryPath() + "/" + this.folderNameOfClonedRepository,
);
}
return GetLocalRepositoryPath();
}
public static async discardAllChangesOnCurrentBranch(): Promise<void> {
await CodeRepositoryServerUtil.discardAllChangesOnCurrentBranch({
repoPath: this.getLocalRepositoryPath(),
});
}
public static async setAuthorIdentity(data: {
name: string;
email: string;
}): Promise<void> {
await CodeRepositoryServerUtil.setAuthorIdentity({
repoPath: this.getLocalRepositoryPath(),
authorName: data.name,
authorEmail: data.email,
});
}
public static async getPullRequestState(data: {
pullRequestId: string;
}): Promise<PullRequestState> {
// check if org name and repo name is present.
if (!this.codeRepositoryResult?.codeRepository.organizationName) {
throw new BadDataException("Organization Name is required");
}
if (!this.codeRepositoryResult?.codeRepository.repositoryName) {
throw new BadDataException("Repository Name is required");
}
const githubUtil: GitHubUtil = this.getGitHubUtil();
if (!githubUtil) {
throw new BadDataException("GitHub Util is required");
}
const pullRequest: PullRequest | undefined =
await githubUtil.getPullRequestByNumber({
organizationName:
this.codeRepositoryResult.codeRepository.organizationName,
repositoryName: this.codeRepositoryResult.codeRepository.repositoryName,
pullRequestId: data.pullRequestId,
});
if (!pullRequest) {
throw new BadDataException("Pull Request not found");
}
return pullRequest.state;
}
public static async setUpRepo(): Promise<PullRequest> {
// check if the repository is setup properly.
const isRepoSetupProperly: boolean = await this.isRepoSetupProperly();
if (isRepoSetupProperly) {
throw new BadDataException("Repository is already setup properly.");
}
// otherwise, we copy the folder /usr/src/app/Templates/.oneuptime to the repository folder.
const templateFolderPath: string = LocalFile.sanitizeFilePath(
"/usr/src/app/Templates/.oneuptime",
);
const oneUptimeConfigPath: string = LocalFile.sanitizeFilePath(
this.getLocalRepositoryPath() + "/.oneuptime",
);
// create a new branch called oneuptime-copilot-setup
const branchName: string = "setup-" + Text.generateRandomText(5);
await this.createBranch({
branchName: branchName,
});
await LocalFile.makeDirectory(oneUptimeConfigPath);
await LocalFile.copyDirectory({
source: templateFolderPath,
destination: oneUptimeConfigPath,
});
// add all the files to the git.
await this.addAllChangedFilesToGit();
// commit the changes.
await this.commitChanges({
message: "OneUptime Copilot Setup",
});
// push changes to the repo.
await this.pushChanges({
branchName: branchName,
});
// create a pull request.
const pullRequest: PullRequest = await this.createPullRequest({
branchName: branchName,
title: "OneUptime Copilot Setup",
body: "This pull request is created by OneUptime Copilot to setup the repository.",
});
// save this to the database.
await CopilotPullRequestService.addPullRequestToDatabase({
pullRequest: pullRequest,
isSetupPullRequest: true,
});
return pullRequest;
}
public static async isRepoSetupProperly(): Promise<boolean> {
// check if .oneuptime folder exists.
const repoPath: string = this.getLocalRepositoryPath();
const oneUptimeFolderPath: string = LocalFile.sanitizeFilePath(
`${repoPath}/.oneuptime`,
);
const doesDirectoryExist: boolean =
await LocalFile.doesDirectoryExist(oneUptimeFolderPath);
if (!doesDirectoryExist) {
return false;
}
// check if .oneuptime/scripts folder exists.
const oneuptimeScriptsPath: string = LocalFile.sanitizeFilePath(
`${oneUptimeFolderPath}/scripts`,
);
const doesScriptsDirectoryExist: boolean =
await LocalFile.doesDirectoryExist(oneuptimeScriptsPath);
if (!doesScriptsDirectoryExist) {
return false;
}
return true; // return true if all checks pass.
}
public static addAllChangedFilesToGit(): Promise<void> {
return CodeRepositoryServerUtil.addAllChangedFilesToGit({
repoPath: this.getLocalRepositoryPath(),
});
}
// returns the folder name of the cloned repository.
public static async cloneRepository(data: {
codeRepository: CopilotCodeRepository;
}): Promise<void> {
// make sure this.getLocalRepositoryPath() is empty.
const repoLocalPath: string = this.getLocalRepositoryPath();
await LocalFile.deleteAllDataInDirectory(repoLocalPath);
await LocalFile.makeDirectory(repoLocalPath);
// check if the data in the directory eixsts, if it does then delete it.
if (!data.codeRepository.repositoryHostedAt) {
throw new BadDataException("Repository Hosted At is required");
}
if (!data.codeRepository.mainBranchName) {
throw new BadDataException("Main Branch Name is required");
}
if (!data.codeRepository.organizationName) {
throw new BadDataException("Organization Name is required");
}
if (!data.codeRepository.repositoryName) {
throw new BadDataException("Repository Name is required");
}
const CodeRepositoryUsername: string | null = GetCodeRepositoryUsername();
if (!CodeRepositoryUsername) {
throw new BadDataException("Code Repository Username is required");
}
const CodeRepositoryPassword: string | null = GetCodeRepositoryPassword();
if (!CodeRepositoryPassword) {
throw new BadDataException("Code Repository Password is required");
}
const repoUrl: string = `https://${CodeRepositoryUsername}:${CodeRepositoryPassword}@${
data.codeRepository.repositoryHostedAt === CodeRepositoryType.GitHub
? "github.com"
: ""
}/${data.codeRepository.organizationName}/${data.codeRepository.repositoryName}.git`;
const folderName: string = await CodeRepositoryServerUtil.cloneRepository({
repoUrl: repoUrl,
repoPath: repoLocalPath,
});
this.folderNameOfClonedRepository = folderName;
logger.debug(`Repository cloned to ${repoLocalPath}/${folderName}`);
}
public static async executeScript(data: { script: string }): Promise<string> {
const commands: Array<string> = data.script
.split("\n")
.filter((command: string) => {
return command.trim() !== "" && !command.startsWith("#");
});
const results: Array<string> = [];
for (const command of commands) {
logger.info(`Executing command: ${command}`);
const commandResult: string = await Execute.executeCommand(command, {
cwd: this.getLocalRepositoryPath(),
});
if (commandResult) {
logger.info(`Command result: ${commandResult}`);
results.push(commandResult);
}
}
return results.join("\n");
}
public static async getRepoScript(data: {
scriptType: RepoScriptType;
}): Promise<string | null> {
const repoPath: string = this.getLocalRepositoryPath();
const oneUptimeFolderPath: string = LocalFile.sanitizeFilePath(
`${repoPath}/.oneuptime`,
);
const doesDirectoryExist: boolean =
await LocalFile.doesDirectoryExist(oneUptimeFolderPath);
if (!doesDirectoryExist) {
return null;
}
const oneuptimeScriptsPath: string = LocalFile.sanitizeFilePath(
`${oneUptimeFolderPath}/scripts`,
);
const doesScriptsDirectoryExist: boolean =
await LocalFile.doesDirectoryExist(oneuptimeScriptsPath);
if (!doesScriptsDirectoryExist) {
return null;
}
const scriptPath: string = LocalFile.sanitizeFilePath(
`${oneuptimeScriptsPath}/${Text.fromPascalCaseToDashes(data.scriptType)}.sh`,
);
const doesScriptExist: boolean = await LocalFile.doesFileExist(scriptPath);
if (!doesScriptExist) {
return null;
}
const scriptContent: string = await LocalFile.read(scriptPath);
return scriptContent.trim() || null;
}
public static hasOpenPRForFile(data: {
filePath: string;
pullRequests: Array<PullRequest>;
}): boolean {
const pullRequests: Array<PullRequest> = this.getOpenPRForFile(data);
return pullRequests.length > 0;
}
public static getOpenPRForFile(data: {
filePath: string;
pullRequests: Array<PullRequest>;
}): Array<PullRequest> {
const pullRequests: Array<PullRequest> = [];
for (const pullRequest of data.pullRequests) {
if (pullRequest.title.includes(data.filePath)) {
pullRequests.push(pullRequest);
}
}
return pullRequests;
}
public static async listFilesInDirectory(data: {
directoryPath: string;
}): Promise<Array<string>> {
return await CodeRepositoryServerUtil.listFilesInDirectory({
repoPath: this.getLocalRepositoryPath(),
directoryPath: data.directoryPath,
});
}
public static getGitHubUtil(): GitHubUtil {
if (!this.gitHubUtil) {
const gitHubToken: string | null = GetCodeRepositoryPassword();
const gitHubUsername: string | null = GetCodeRepositoryUsername();
if (!gitHubUsername) {
throw new BadDataException("GitHub Username is required");
}
if (!gitHubToken) {
throw new BadDataException("GitHub Token is required");
}
this.gitHubUtil = new GitHubUtil({
authToken: gitHubToken,
username: gitHubUsername!,
});
}
return this.gitHubUtil;
}
public static async pullChanges(): Promise<void> {
await CodeRepositoryServerUtil.pullChanges({
repoPath: this.getLocalRepositoryPath(),
});
}
public static getBranchName(data: { branchName: string }): string {
return "oneuptime-copilot-" + data.branchName;
}
public static async createBranch(data: {
branchName: string;
}): Promise<void> {
const branchName: string = this.getBranchName({
branchName: data.branchName,
});
await CodeRepositoryServerUtil.createBranch({
repoPath: this.getLocalRepositoryPath(),
branchName: branchName,
});
}
public static async createOrCheckoutBranch(data: {
branchName: string;
}): Promise<void> {
const branchName: string = this.getBranchName({
branchName: data.branchName,
});
await CodeRepositoryServerUtil.createOrCheckoutBranch({
repoPath: this.getLocalRepositoryPath(),
branchName: branchName,
});
}
public static async writeToFile(data: {
filePath: string;
content: string;
}): Promise<void> {
await CodeRepositoryServerUtil.writeToFile({
repoPath: this.getLocalRepositoryPath(),
filePath: data.filePath,
content: data.content,
});
}
public static async createDirectory(data: {
directoryPath: string;
}): Promise<void> {
await CodeRepositoryServerUtil.createDirectory({
repoPath: this.getLocalRepositoryPath(),
directoryPath: data.directoryPath,
});
}
public static async deleteFile(data: { filePath: string }): Promise<void> {
await CodeRepositoryServerUtil.deleteFile({
repoPath: this.getLocalRepositoryPath(),
filePath: data.filePath,
});
}
public static async deleteDirectory(data: {
directoryPath: string;
}): Promise<void> {
await CodeRepositoryServerUtil.deleteDirectory({
repoPath: this.getLocalRepositoryPath(),
directoryPath: data.directoryPath,
});
}
public static async discardChanges(): Promise<void> {
if (this.isRepoCloned()) {
await CodeRepositoryServerUtil.discardChanges({
repoPath: this.getLocalRepositoryPath(),
});
}
}
public static async checkoutBranch(data: {
branchName: string;
}): Promise<void> {
if (this.isRepoCloned()) {
await CodeRepositoryServerUtil.checkoutBranch({
repoPath: this.getLocalRepositoryPath(),
branchName: data.branchName,
});
}
}
public static async checkoutMainBranch(): Promise<void> {
if (!this.isRepoCloned()) {
return;
}
const codeRepository: CopilotCodeRepository =
await this.getCodeRepository();
if (!codeRepository.mainBranchName) {
throw new BadDataException("Main Branch Name is required");
}
await this.checkoutBranch({
branchName: codeRepository.mainBranchName!,
});
}
public static async addFilesToGit(data: {
filePaths: Array<string>;
}): Promise<void> {
await CodeRepositoryServerUtil.addFilesToGit({
repoPath: this.getLocalRepositoryPath(),
filePaths: data.filePaths,
});
}
public static async commitChanges(data: { message: string }): Promise<void> {
let username: string | null = null;
if (
this.codeRepositoryResult?.codeRepository.repositoryHostedAt ===
CodeRepositoryType.GitHub
) {
username = GetCodeRepositoryUsername();
}
if (!username) {
throw new BadDataException("Username is required");
}
await CodeRepositoryServerUtil.commitChanges({
repoPath: this.getLocalRepositoryPath(),
message: data.message,
});
}
public static async pushChanges(data: { branchName: string }): Promise<void> {
const branchName: string = this.getBranchName({
branchName: data.branchName,
});
const codeRepository: CopilotCodeRepository =
await this.getCodeRepository();
if (!codeRepository.mainBranchName) {
throw new BadDataException("Main Branch Name is required");
}
if (!codeRepository.organizationName) {
throw new BadDataException("Organization Name is required");
}
if (!codeRepository.repositoryName) {
throw new BadDataException("Repository Name is required");
}
if (codeRepository.repositoryHostedAt === CodeRepositoryType.GitHub) {
return await this.getGitHubUtil().pushChanges({
repoPath: this.getLocalRepositoryPath(),
branchName: branchName,
organizationName: codeRepository.organizationName,
repositoryName: codeRepository.repositoryName,
});
}
}
public static async switchToMainBranch(): Promise<void> {
const codeRepository: CopilotCodeRepository =
await this.getCodeRepository();
if (!codeRepository.mainBranchName) {
throw new BadDataException("Main Branch Name is required");
}
await this.checkoutBranch({
branchName: codeRepository.mainBranchName!,
});
}
public static async createPullRequest(data: {
branchName: string;
title: string;
body: string;
}): Promise<PullRequest> {
const branchName: string = this.getBranchName({
branchName: data.branchName,
});
const codeRepository: CopilotCodeRepository =
await this.getCodeRepository();
if (!codeRepository.mainBranchName) {
throw new BadDataException("Main Branch Name is required");
}
if (!codeRepository.organizationName) {
throw new BadDataException("Organization Name is required");
}
if (!codeRepository.repositoryName) {
throw new BadDataException("Repository Name is required");
}
if (codeRepository.repositoryHostedAt === CodeRepositoryType.GitHub) {
return await this.getGitHubUtil().createPullRequest({
headBranchName: branchName,
baseBranchName: codeRepository.mainBranchName,
organizationName: codeRepository.organizationName,
repositoryName: codeRepository.repositoryName,
title: data.title,
body: data.body,
});
}
throw new BadDataException("Code Repository type not supported");
}
public static async getServicesToImproveCode(data: {
codeRepository: CopilotCodeRepository;
serviceRepositories: Array<ServiceCopilotCodeRepository>;
openPullRequests: Array<CopilotPullRequest>;
}): Promise<Array<ServiceToImproveResult>> {
const servicesToImproveCode: Array<ServiceToImproveResult> = [];
for (const service of data.serviceRepositories) {
if (!data.codeRepository.mainBranchName) {
throw new BadDataException("Main Branch Name is required");
}
if (!data.codeRepository.organizationName) {
throw new BadDataException("Organization Name is required");
}
if (!data.codeRepository.repositoryName) {
throw new BadDataException("Repository Name is required");
}
if (!service.limitNumberOfOpenPullRequestsCount) {
throw new BadDataException(
"Limit Number Of Open Pull Requests Count is required",
);
}
if (
data.codeRepository.repositoryHostedAt === CodeRepositoryType.GitHub
) {
const gitHuhbToken: string | null = GetCodeRepositoryPassword();
if (!gitHuhbToken) {
throw new BadDataException("GitHub Token is required");
}
const pullRequestByService: Array<CopilotPullRequest> =
data.openPullRequests.filter((pullRequest: CopilotPullRequest) => {
return (
pullRequest.serviceRepositoryId?.toString() ===
service.id?.toString()
);
});
const numberOfPullRequestForThisService: number =
pullRequestByService.length;
if (
numberOfPullRequestForThisService <
service.limitNumberOfOpenPullRequestsCount
) {
servicesToImproveCode.push({
serviceRepository: service,
numberOfOpenPullRequests: numberOfPullRequestForThisService,
pullRequests: pullRequestByService,
});
logger.info(
`Service ${service.serviceCatalog?.name} has ${numberOfPullRequestForThisService} open pull requests. Limit is ${service.limitNumberOfOpenPullRequestsCount}. Adding to the list to improve code...`,
);
} else {
logger.warn(
`Service ${service.serviceCatalog?.name} has ${numberOfPullRequestForThisService} open pull requests. Limit is ${service.limitNumberOfOpenPullRequestsCount}. Skipping...`,
);
}
}
}
return servicesToImproveCode;
}
public static async getCodeRepositoryResult(): Promise<CodeRepositoryResult> {
if (this.codeRepositoryResult) {
return this.codeRepositoryResult;
}
logger.info("Fetching Code Repository...");
const repositorySecretKey: string | null = GetRepositorySecretKey();
if (!repositorySecretKey) {
throw new BadDataException("Repository Secret Key is required");
}
const url: URL = URL.fromString(
GetOneUptimeURL().toString() + "/api",
).addRoute(
`${new CopilotCodeRepository()
.getCrudApiPath()
?.toString()}/get-code-repository/${repositorySecretKey}`,
);
const codeRepositoryResult: HTTPErrorResponse | HTTPResponse<JSONObject> =
await API.get({
url: url,
});
if (codeRepositoryResult instanceof HTTPErrorResponse) {
throw codeRepositoryResult;
}
const codeRepository: CopilotCodeRepository =
CopilotCodeRepository.fromJSON(
codeRepositoryResult.data["codeRepository"] as JSONObject,
CopilotCodeRepository,
) as CopilotCodeRepository;
const servicesRepository: Array<ServiceCopilotCodeRepository> = (
codeRepositoryResult.data["servicesRepository"] as JSONArray
).map((serviceRepository: JSONObject) => {
return ServiceCopilotCodeRepository.fromJSON(
serviceRepository,
ServiceCopilotCodeRepository,
) as ServiceCopilotCodeRepository;
});
if (!codeRepository) {
throw new BadDataException(
"Code Repository not found with the secret key provided.",
);
}
if (!servicesRepository || servicesRepository.length === 0) {
throw new BadDataException(
"No services attached to this repository. Please attach services to this repository on OneUptime Dashboard.",
);
}
logger.info(`Code Repository found: ${codeRepository.name}`);
logger.info("Services found in the repository:");
servicesRepository.forEach(
(serviceRepository: ServiceCopilotCodeRepository) => {
logger.info(`- ${serviceRepository.serviceCatalog?.name}`);
},
);
this.codeRepositoryResult = {
codeRepository,
serviceRepositories: servicesRepository,
};
return this.codeRepositoryResult;
}
public static async getCodeRepository(): Promise<CopilotCodeRepository> {
if (!this.codeRepositoryResult) {
const result: CodeRepositoryResult = await this.getCodeRepositoryResult();
return result.codeRepository;
}
return this.codeRepositoryResult.codeRepository;
}
public static getCodeFileExtentions(): Array<string> {
const extensions: Array<string> = [
"ts",
"js",
"tsx",
"jsx",
"py",
"go",
"java",
"c",
"cpp",
"cs",
"swift",
"php",
"rb",
"rs",
"kt",
"dart",
"sh",
"pl",
"lua",
"r",
"scala",
"ts",
"js",
"tsx",
"jsx",
];
return extensions;
}
public static getReadmeFileExtentions(): Array<string> {
return ["md"];
}
}

View File

@@ -1,373 +0,0 @@
import BadDataException from "Common/Types/Exception/BadDataException";
import CopilotAction from "Common/Models/DatabaseModels/CopilotAction";
import {
GetOneUptimeURL,
GetRepositorySecretKey,
MIN_ITEMS_IN_QUEUE_PER_SERVICE_CATALOG,
} from "../Config";
import URL from "Common/Types/API/URL";
import HTTPErrorResponse from "Common/Types/API/HTTPErrorResponse";
import { JSONArray, JSONObject } from "Common/Types/JSON";
import HTTPResponse from "Common/Types/API/HTTPResponse";
import API from "Common/Utils/API";
import ObjectID from "Common/Types/ObjectID";
import logger from "Common/Server/Utils/Logger";
import CopilotActionTypePriority from "Common/Models/DatabaseModels/CopilotActionTypePriority";
import CopilotActionTypeUtil from "./CopilotActionTypes";
import CopilotActionType from "Common/Types/Copilot/CopilotActionType";
import { ActionDictionary } from "../Service/CopilotActions/Index";
import CopilotActionBase from "../Service/CopilotActions/CopilotActionsBase";
import CopilotActionStatus from "Common/Types/Copilot/CopilotActionStatus";
import CopilotActionProp from "Common/Types/Copilot/CopilotActionProps/Index";
import CodeRepositoryUtil from "./CodeRepository";
export default class CopilotActionUtil {
public static async getExistingAction(data: {
serviceCatalogId: ObjectID;
actionType: CopilotActionType;
actionProps: JSONObject;
}): Promise<CopilotAction | null> {
if (!data.serviceCatalogId) {
throw new BadDataException("Service Catalog ID is required");
}
if (!data.actionType) {
throw new BadDataException("Action Type is required");
}
const repositorySecretKey: string | null = GetRepositorySecretKey();
if (!repositorySecretKey) {
throw new BadDataException("Repository Secret Key is required");
}
const url: URL = URL.fromString(
GetOneUptimeURL().toString() + "/api",
).addRoute(
`${new CopilotAction()
.getCrudApiPath()
?.toString()}/get-copilot-action/${repositorySecretKey}`,
);
const copilotActionResult: HTTPErrorResponse | HTTPResponse<JSONObject> =
await API.get({
url: url,
params: {
serviceCatalogId: data.serviceCatalogId.toString(),
actionType: data.actionType,
actionProps: JSON.stringify(data.actionProps),
},
});
if (copilotActionResult instanceof HTTPErrorResponse) {
throw copilotActionResult;
}
if (!copilotActionResult.data["copilotAction"]) {
return null;
}
return CopilotAction.fromJSONObject(
copilotActionResult.data["copilotAction"] as JSONObject,
CopilotAction,
);
}
public static async getActionTypesBasedOnPriority(): Promise<
Array<CopilotActionTypePriority>
> {
const repositorySecretKey: string | null = GetRepositorySecretKey();
const url: URL = URL.fromString(
GetOneUptimeURL().toString() + "/api",
).addRoute(
`${new CopilotAction().getCrudApiPath()?.toString()}/copilot-action-types-by-priority/${repositorySecretKey}`,
);
const actionTypesResult: HTTPErrorResponse | HTTPResponse<JSONObject> =
await API.get({
url: url,
});
if (actionTypesResult instanceof HTTPErrorResponse) {
throw actionTypesResult;
}
const actionTypes: Array<CopilotActionTypePriority> =
CopilotActionTypePriority.fromJSONArray(
actionTypesResult.data["actionTypes"] as JSONArray,
CopilotActionTypePriority,
) || [];
logger.debug(
`Copilot action types based on priority: ${JSON.stringify(actionTypes, null, 2)}`,
);
return actionTypes;
}
public static async getActionsToWorkOn(data: {
serviceCatalogId: ObjectID;
serviceRepositoryId: ObjectID;
}): Promise<Array<CopilotAction>> {
logger.debug("Getting actions to work on");
if (!data.serviceCatalogId) {
throw new BadDataException("Service Catalog ID is required");
}
const repositorySecretKey: string | null = GetRepositorySecretKey();
if (!repositorySecretKey) {
throw new BadDataException("Repository Secret Key is required");
}
// check actions in queue
const actionsInQueue: Array<CopilotAction> =
await CopilotActionUtil.getInQueueActions({
serviceCatalogId: data.serviceCatalogId,
});
if (actionsInQueue.length >= MIN_ITEMS_IN_QUEUE_PER_SERVICE_CATALOG) {
logger.debug(
`Actions in queue: ${JSON.stringify(actionsInQueue, null, 2)}`,
);
return actionsInQueue;
}
const actionTypePriorities: Array<CopilotActionTypePriority> =
await CopilotActionTypeUtil.getEnabledActionTypesBasedOnPriority();
logger.debug(
"Action type priorities: " +
actionTypePriorities.map(
(actionTypePriority: CopilotActionTypePriority) => {
return actionTypePriority.actionType;
},
),
);
for (const actionTypePriority of actionTypePriorities) {
logger.debug(
`Getting actions for action type: ${actionTypePriority.actionType}`,
);
// get items in queue based on priority
const itemsInQueue: number =
CopilotActionTypeUtil.getItemsInQueueByPriority(
actionTypePriority.priority || 1,
);
// get actions based on priority
const actions: Array<CopilotAction> = await CopilotActionUtil.getActions({
serviceCatalogId: data.serviceCatalogId,
serviceRepositoryId: data.serviceRepositoryId,
actionType: actionTypePriority.actionType!,
itemsInQueue,
});
// add these actions to the queue
actionsInQueue.push(...actions);
}
return actionsInQueue;
}
public static async getActions(data: {
serviceCatalogId: ObjectID;
serviceRepositoryId: ObjectID;
actionType: CopilotActionType;
itemsInQueue: number;
}): Promise<Array<CopilotAction>> {
logger.debug(`Getting actions for action type: ${data.actionType}`);
if (!data.serviceCatalogId) {
throw new BadDataException("Service Catalog ID is required");
}
if (!data.actionType) {
throw new BadDataException("Action Type is required");
}
const CopilotActionBaseType: typeof CopilotActionBase =
ActionDictionary[data.actionType]!;
const ActionBase: CopilotActionBase = new CopilotActionBaseType();
logger.debug(`Getting action props for action type: ${data.actionType}`);
const actionProps: Array<CopilotActionProp> =
await ActionBase.getActionPropsToQueue({
serviceCatalogId: data.serviceCatalogId,
serviceRepositoryId: data.serviceRepositoryId,
maxActionsToQueue: data.itemsInQueue,
});
logger.debug(`Action props for action type: ${data.actionType}`);
const savedActions: Array<CopilotAction> = [];
// now these actions need to be saved.
for (const actionProp of actionProps) {
try {
logger.debug(
`Creating copilot action for action type: ${data.actionType}`,
);
const savedAction: CopilotAction =
await CopilotActionUtil.createCopilotAction({
actionType: data.actionType,
serviceCatalogId: data.serviceCatalogId,
serviceRepositoryId: data.serviceRepositoryId,
actionProps: actionProp,
});
logger.debug(
`Copilot action created for action type: ${data.actionType}`,
);
logger.debug(savedAction);
savedActions.push(savedAction);
} catch (error) {
logger.error(`Error while adding copilot action: ${error}`);
}
}
return savedActions;
}
public static async updateCopilotAction(data: {
actionId: ObjectID;
actionStatus: CopilotActionStatus;
pullRequestId?: ObjectID | undefined;
commitHash?: string | undefined;
statusMessage?: string | undefined;
logs?: Array<string> | undefined;
}): Promise<void> {
// send this to the API.
const url: URL = URL.fromString(
GetOneUptimeURL().toString() + "/api",
).addRoute(
`${new CopilotAction()
.getCrudApiPath()
?.toString()}/update-copilot-action/${GetRepositorySecretKey()}`,
);
const codeRepositoryResult: HTTPErrorResponse | HTTPResponse<JSONObject> =
await API.post({
url: url,
data: {
...data,
},
});
if (codeRepositoryResult instanceof HTTPErrorResponse) {
throw codeRepositoryResult;
}
}
public static async createCopilotAction(data: {
actionType: CopilotActionType;
serviceCatalogId: ObjectID;
serviceRepositoryId: ObjectID;
actionProps: CopilotActionProp;
actionStatus?: CopilotActionStatus;
}): Promise<CopilotAction> {
const action: CopilotAction = new CopilotAction();
action.copilotActionType = data.actionType;
action.serviceCatalogId = data.serviceCatalogId;
action.serviceRepositoryId = data.serviceRepositoryId;
action.copilotActionProp = data.actionProps;
action.commitHash = await CodeRepositoryUtil.getCurrentCommitHash();
if (data.actionStatus) {
action.copilotActionStatus = data.actionStatus;
} else {
action.copilotActionStatus = CopilotActionStatus.IN_QUEUE;
}
// send this to the API.
const url: URL = URL.fromString(
GetOneUptimeURL().toString() + "/api",
).addRoute(
`${new CopilotAction()
.getCrudApiPath()
?.toString()}/create-copilot-action/${GetRepositorySecretKey()}`,
);
const codeRepositoryResult: HTTPErrorResponse | HTTPResponse<JSONObject> =
await API.post({
url: url,
data: {
copilotAction: CopilotAction.toJSON(action, CopilotAction),
},
});
if (codeRepositoryResult instanceof HTTPErrorResponse) {
throw codeRepositoryResult;
}
const copilotAction: CopilotAction = CopilotAction.fromJSONObject(
codeRepositoryResult.data as JSONObject,
CopilotAction,
);
if (!copilotAction) {
throw new BadDataException("Copilot action not created");
}
if (!copilotAction._id) {
throw new BadDataException("Copilot action ID not created");
}
return copilotAction;
}
public static async getInQueueActions(data: {
serviceCatalogId: ObjectID;
}): Promise<Array<CopilotAction>> {
if (!data.serviceCatalogId) {
throw new BadDataException("Service Catalog ID is required");
}
const repositorySecretKey: string | null = GetRepositorySecretKey();
if (!repositorySecretKey) {
throw new BadDataException("Repository Secret Key is required");
}
const url: URL = URL.fromString(
GetOneUptimeURL().toString() + "/api",
).addRoute(
`${new CopilotAction()
.getCrudApiPath()
?.toString()}/copilot-actions-in-queue/${repositorySecretKey}`,
);
const copilotActionsResult: HTTPErrorResponse | HTTPResponse<JSONObject> =
await API.get({
url: url,
params: {
serviceCatalogId: data.serviceCatalogId.toString(),
},
});
if (copilotActionsResult instanceof HTTPErrorResponse) {
throw copilotActionsResult;
}
const copilotActions: Array<CopilotAction> =
CopilotAction.fromJSONArray(
copilotActionsResult.data["copilotActions"] as JSONArray,
CopilotAction,
) || [];
logger.debug(
`Copilot actions in queue for service catalog id: ${data.serviceCatalogId}`,
);
logger.debug(`Copilot events: ${JSON.stringify(copilotActions, null, 2)}`);
return copilotActions;
}
}

View File

@@ -1,71 +0,0 @@
import CopilotActionTypePriority from "Common/Models/DatabaseModels/CopilotActionTypePriority";
import CopilotActionType, {
CopilotActionTypeUtil as ActionTypeUtil,
CopilotActionTypeData,
} from "Common/Types/Copilot/CopilotActionType";
import CopilotActionUtil from "./CopilotAction";
import { ActionDictionary } from "../Service/CopilotActions/Index";
import logger from "Common/Server/Utils/Logger";
export default class CopilotActionTypeUtil {
private static isActionEnabled(actionType: CopilotActionType): boolean {
return Boolean(ActionDictionary[actionType]); // if action is not in dictionary then it is not enabled
}
public static async getEnabledActionTypesBasedOnPriority(): Promise<
Array<CopilotActionTypePriority>
> {
// if there are no actions then, get actions based on priority
const actionTypes: Array<CopilotActionTypePriority> =
await CopilotActionUtil.getActionTypesBasedOnPriority();
const enabledActions: Array<CopilotActionTypePriority> = [];
for (const actionType of actionTypes) {
if (this.isActionEnabled(actionType.actionType!)) {
enabledActions.push(actionType);
}
}
return enabledActions;
}
public static getItemsInQueueByPriority(priority: number): number {
// so if the priority is 1, then there will be 5 items in queue. If the priority is 5, then there will be 1 item in queue.
const itemsInQueue: number = 6;
return itemsInQueue - priority;
}
public static printEnabledAndDisabledActionTypes(): void {
const allActionTypes: Array<CopilotActionTypeData> =
ActionTypeUtil.getAllCopilotActionTypes();
// log all the actions from these actions that are in Action dictionary
const enabledActionTypesData: Array<CopilotActionTypeData> =
allActionTypes.filter((actionTypeData: CopilotActionTypeData) => {
return this.isActionEnabled(actionTypeData.type);
});
const disabledActionTypesData: Array<CopilotActionTypeData> =
allActionTypes.filter((actionTypeData: CopilotActionTypeData) => {
return !this.isActionEnabled(actionTypeData.type);
});
logger.info("--------------------");
logger.info("Copilot will fix the following issues:");
for (const actionTypeData of enabledActionTypesData) {
logger.info(`- ${actionTypeData.type}`);
}
logger.info("--------------------");
logger.info(
"Copilot will not fix the following issues at this time (but we will in the future update of the software. We're working on this and they will be launched soon):",
);
for (const disabledTypesData of disabledActionTypesData) {
logger.info(`- ${disabledTypesData.type}`);
}
logger.info("--------------------");
}
}

View File

@@ -1,191 +0,0 @@
import TechStack from "Common/Types/ServiceCatalog/TechStack";
export default class ServiceFileTypesUtil {
private static getCommonDirectoriesToIgnore(): string[] {
return [
"node_modules",
".git",
"build",
"dist",
"coverage",
"logs",
"tmp",
"temp",
"temporal",
"tempfiles",
"tempfiles",
];
}
private static getCommonFilesToIgnore(): string[] {
return [".DS_Store", "Thumbs.db", ".gitignore", ".gitattributes"];
}
public static getCommonFilesToIgnoreByTechStackItem(
techStack: TechStack,
): string[] {
let filesToIgnore: string[] = [];
switch (techStack) {
case TechStack.NodeJS:
filesToIgnore = ["package-lock.json"];
break;
case TechStack.Python:
filesToIgnore = ["__pycache__"];
break;
case TechStack.Ruby:
filesToIgnore = ["Gemfile.lock"];
break;
case TechStack.Go:
filesToIgnore = ["go.sum", "go.mod"];
break;
case TechStack.Java:
filesToIgnore = ["pom.xml"];
break;
case TechStack.PHP:
filesToIgnore = ["composer.lock"];
break;
case TechStack.CSharp:
filesToIgnore = ["packages", "bin", "obj"];
break;
case TechStack.CPlusPlus:
filesToIgnore = ["build", "CMakeFiles", "CMakeCache.txt", "Makefile"];
break;
case TechStack.Rust:
filesToIgnore = ["Cargo.lock"];
break;
case TechStack.Swift:
filesToIgnore = ["Podfile.lock"];
break;
case TechStack.Kotlin:
filesToIgnore = [
"gradle",
"build",
"gradlew",
"gradlew.bat",
"gradle.properties",
];
break;
case TechStack.TypeScript:
filesToIgnore = ["node_modules", "package-lock.json"];
break;
case TechStack.JavaScript:
filesToIgnore = ["node_modules", "package-lock.json"];
break;
case TechStack.Shell:
filesToIgnore = [];
break;
case TechStack.React:
filesToIgnore = ["node_modules", "package-lock.json"];
break;
case TechStack.Other:
filesToIgnore = [];
break;
default:
filesToIgnore = [];
}
return filesToIgnore;
}
public static getCommonFilesToIgnoreByTechStack(
techStack: Array<TechStack>,
): string[] {
let filesToIgnore: string[] = [];
for (const stack of techStack) {
filesToIgnore = filesToIgnore.concat(
this.getCommonFilesToIgnoreByTechStackItem(stack),
);
}
return filesToIgnore
.concat(this.getCommonFilesToIgnore())
.concat(this.getCommonDirectoriesToIgnore());
}
private static getCommonFilesExtentions(): string[] {
// return markdown, dockerfile, etc.
return [".md", "dockerfile", ".yml", ".yaml", ".sh", ".gitignore"];
}
public static getFileExtentionsByTechStackItem(
techStack: TechStack,
): string[] {
let fileExtentions: Array<string> = [];
switch (techStack) {
case TechStack.NodeJS:
fileExtentions = [".js", ".ts", ".json", ".mjs"];
break;
case TechStack.Python:
fileExtentions = [".py"];
break;
case TechStack.Ruby:
fileExtentions = [".rb"];
break;
case TechStack.Go:
fileExtentions = [".go"];
break;
case TechStack.Java:
fileExtentions = [".java"];
break;
case TechStack.PHP:
fileExtentions = [".php"];
break;
case TechStack.CSharp:
fileExtentions = [".cs"];
break;
case TechStack.CPlusPlus:
fileExtentions = [".cpp", ".c"];
break;
case TechStack.Rust:
fileExtentions = [".rs"];
break;
case TechStack.Swift:
fileExtentions = [".swift"];
break;
case TechStack.Kotlin:
fileExtentions = [".kt", ".kts"];
break;
case TechStack.TypeScript:
fileExtentions = [".ts", ".tsx"];
break;
case TechStack.JavaScript:
fileExtentions = [".js", ".jsx"];
break;
case TechStack.Shell:
fileExtentions = [".sh"];
break;
case TechStack.React:
fileExtentions = [".js", ".ts", ".jsx", ".tsx"];
break;
case TechStack.Other:
fileExtentions = [];
break;
default:
fileExtentions = [];
}
return fileExtentions;
}
public static getFileExtentionsByTechStack(
techStack: Array<TechStack>,
): string[] {
let fileExtentions: Array<string> = [];
for (let i: number = 0; i < techStack.length; i++) {
if (!techStack[i]) {
continue;
}
fileExtentions = fileExtentions.concat(
this.getFileExtentionsByTechStackItem(techStack[i]!),
);
}
// add common files extentions
return fileExtentions.concat(this.getCommonFilesExtentions());
}
}

View File

@@ -1,83 +0,0 @@
import {
GetCodeRepositoryPassword,
GetLlmServerUrl,
GetLlmType,
GetOneUptimeURL,
GetRepositorySecretKey,
} from "../Config";
import CodeRepositoryUtil, { CodeRepositoryResult } from "./CodeRepository";
import CodeRepositoryType from "Common/Types/CodeRepository/CodeRepositoryType";
import BadDataException from "Common/Types/Exception/BadDataException";
import URL from "Common/Types/API/URL";
import LlmType from "../Types/LlmType";
import API from "Common/Utils/API";
import HTTPErrorResponse from "Common/Types/API/HTTPErrorResponse";
import HTTPResponse from "Common/Types/API/HTTPResponse";
import { JSONObject } from "Common/Types/JSON";
import logger from "Common/Server/Utils/Logger";
import CopilotActionTypeUtil from "./CopilotActionTypes";
export default class InitUtil {
public static async init(): Promise<CodeRepositoryResult> {
if (GetLlmType() === LlmType.ONEUPTIME_LLM) {
const llmServerUrl: URL = GetLlmServerUrl();
// check status of ll, server
const result: HTTPErrorResponse | HTTPResponse<JSONObject> =
await API.get({
url: URL.fromString(llmServerUrl.toString()),
});
if (result instanceof HTTPErrorResponse) {
throw new BadDataException(
"OneUptime LLM server is not reachable. Please check the server URL in the environment variables.",
);
}
}
// check if oneuptime server is up.
const oneuptimeServerUrl: URL = GetOneUptimeURL();
const result: HTTPErrorResponse | HTTPResponse<JSONObject> = await API.get({
url: URL.fromString(oneuptimeServerUrl.toString() + "/status"),
});
if (result instanceof HTTPErrorResponse) {
throw new BadDataException(
`OneUptime ${GetOneUptimeURL().toString()} is not reachable. Please check the server URL in the environment variables.`,
);
}
if (!GetRepositorySecretKey()) {
throw new BadDataException("Repository Secret Key is required");
}
const codeRepositoryResult: CodeRepositoryResult =
await CodeRepositoryUtil.getCodeRepositoryResult();
// Check if the repository type is GitHub and the GitHub token is provided
if (codeRepositoryResult.serviceRepositories.length === 0) {
logger.error(
"No services found in the repository. Please add services to the repository in OneUptime Dashboard.",
);
throw new BadDataException(
"No services found in the repository. Please add services to the repository in OneUptime Dashboard.",
);
}
if (
codeRepositoryResult.codeRepository.repositoryHostedAt ===
CodeRepositoryType.GitHub &&
!GetCodeRepositoryPassword()
) {
throw new BadDataException(
"GitHub token is required for this repository. Please provide the GitHub token in the environment variables.",
);
}
// check copilot action types enabled and print it out for user.
CopilotActionTypeUtil.printEnabledAndDisabledActionTypes();
return codeRepositoryResult;
}
}

View File

@@ -1,5 +0,0 @@
export default class ProcessUtil {
public static haltProcessWithSuccess(): void {
process.exit(0);
}
}

View File

@@ -1,32 +0,0 @@
import CopilotPullRequest from "Common/Models/DatabaseModels/CopilotPullRequest";
import CopilotPullRequestService from "../Service/CopilotPullRequest";
import PullRequestState from "Common/Types/CodeRepository/PullRequestState";
export default class PullRequestUtil {
public static async getOpenPRs(): Promise<Array<CopilotPullRequest>> {
const openPRs: Array<CopilotPullRequest> = [];
// get all open pull requests.
const openPullRequests: Array<CopilotPullRequest> =
await CopilotPullRequestService.getOpenPullRequestsFromDatabase();
for (const openPullRequest of openPullRequests) {
// refresh status of this PR.
if (!openPullRequest.pullRequestId) {
continue;
}
const pullRequestState: PullRequestState =
await CopilotPullRequestService.refreshPullRequestStatus({
copilotPullRequest: openPullRequest,
});
if (pullRequestState === PullRequestState.Open) {
openPRs.push(openPullRequest);
}
}
return openPRs;
}
}

View File

@@ -1,154 +0,0 @@
import ServiceFileTypesUtil from "./FileTypes";
import Dictionary from "Common/Types/Dictionary";
import BadDataException from "Common/Types/Exception/BadDataException";
import TechStack from "Common/Types/ServiceCatalog/TechStack";
import CodeRepositoryCommonServerUtil from "Common/Server/Utils/CodeRepository/CodeRepository";
import CodeRepositoryFile from "Common/Server/Utils/CodeRepository/CodeRepositoryFile";
import LocalFile from "Common/Server/Utils/LocalFile";
import ServiceCopilotCodeRepository from "Common/Models/DatabaseModels/ServiceCopilotCodeRepository";
import ServiceLanguageUtil from "Common/Utils/TechStack";
import CodeRepositoryUtil, {
CodeRepositoryResult,
ServiceToImproveResult,
} from "./CodeRepository";
import PullRequestUtil from "./PullRequest";
import CopilotPullRequest from "Common/Models/DatabaseModels/CopilotPullRequest";
import logger from "Common/Server/Utils/Logger";
import ProcessUtil from "./Process";
import ObjectID from "Common/Types/ObjectID";
export default class ServiceRepositoryUtil {
public static codeRepositoryResult: CodeRepositoryResult | null = null;
public static servicesToImprove: Array<ServiceCopilotCodeRepository> | null =
null;
public static setCodeRepositoryResult(data: {
codeRepositoryResult: CodeRepositoryResult;
}): void {
ServiceRepositoryUtil.codeRepositoryResult = data.codeRepositoryResult;
}
public static async getServicesToImprove(): Promise<
Array<ServiceCopilotCodeRepository>
> {
if (this.servicesToImprove) {
return this.servicesToImprove;
}
const codeRepositoryResult: CodeRepositoryResult =
ServiceRepositoryUtil.codeRepositoryResult!;
if (!codeRepositoryResult) {
throw new BadDataException("Code repository result is not set");
}
// before cloning the repo, check if there are any services to improve.
const openPullRequests: Array<CopilotPullRequest> =
await PullRequestUtil.getOpenPRs();
const servicesToImproveResult: Array<ServiceToImproveResult> =
await CodeRepositoryUtil.getServicesToImproveCode({
codeRepository: codeRepositoryResult.codeRepository,
serviceRepositories: codeRepositoryResult.serviceRepositories,
openPullRequests: openPullRequests,
});
const servicesToImprove: Array<ServiceCopilotCodeRepository> =
servicesToImproveResult.map(
(serviceToImproveResult: ServiceToImproveResult) => {
return serviceToImproveResult.serviceRepository;
},
);
if (servicesToImprove.length === 0) {
logger.info("No services to improve. Exiting.");
ProcessUtil.haltProcessWithSuccess();
}
this.servicesToImprove = servicesToImprove;
return servicesToImprove;
}
public static async getFileLanguage(data: {
filePath: string;
}): Promise<TechStack> {
const fileExtention: string = LocalFile.getFileExtension(data.filePath);
const techStack: TechStack = ServiceLanguageUtil.getLanguageByFileExtension(
{
fileExtension: fileExtention,
},
);
return techStack;
}
public static async getFileContent(data: {
filePath: string;
}): Promise<string> {
const { filePath } = data;
const fileContent: string =
await CodeRepositoryCommonServerUtil.getFileContent({
repoPath: CodeRepositoryUtil.getLocalRepositoryPath(),
filePath: filePath,
});
return fileContent;
}
public static async getFilesByServiceCatalogId(data: {
serviceCatalogId: ObjectID;
}): Promise<Dictionary<CodeRepositoryFile>> {
const { serviceCatalogId } = data;
const serviceRepository: ServiceCopilotCodeRepository | undefined = (
await ServiceRepositoryUtil.getServicesToImprove()
).find((serviceRepository: ServiceCopilotCodeRepository) => {
return (
serviceRepository.serviceCatalog!.id?.toString() ===
serviceCatalogId.toString()
);
});
if (!serviceRepository) {
throw new BadDataException("Service repository not found");
}
const allFiles: Dictionary<CodeRepositoryFile> =
await ServiceRepositoryUtil.getFilesInServiceDirectory({
serviceRepository,
});
return allFiles;
}
public static async getFilesInServiceDirectory(data: {
serviceRepository: ServiceCopilotCodeRepository;
}): Promise<Dictionary<CodeRepositoryFile>> {
const { serviceRepository } = data;
if (!serviceRepository.serviceCatalog?.techStack) {
throw new BadDataException(
"Service language is not defined in the service catalog",
);
}
const allFiles: Dictionary<CodeRepositoryFile> =
await CodeRepositoryCommonServerUtil.getFilesInDirectoryRecursive({
repoPath: CodeRepositoryUtil.getLocalRepositoryPath(),
directoryPath: serviceRepository.servicePathInRepository || ".",
acceptedFileExtensions:
ServiceFileTypesUtil.getFileExtentionsByTechStack(
serviceRepository.serviceCatalog!.techStack!,
),
ignoreFilesOrDirectories:
ServiceFileTypesUtil.getCommonFilesToIgnoreByTechStack(
serviceRepository.serviceCatalog!.techStack!,
),
});
return allFiles;
}
}

View File

@@ -1,32 +0,0 @@
{
"preset": "ts-jest",
"testPathIgnorePatterns": [
"node_modules",
"dist"
],
"verbose": true,
"globals": {
"ts-jest": {
"tsconfig": "tsconfig.json",
"babelConfig": false
}
},
"moduleFileExtensions": ["ts", "js", "json"],
"transform": {
".(ts|tsx)": "ts-jest"
},
"testEnvironment": "node",
"collectCoverage": false,
"coverageReporters": ["text", "lcov"],
"testRegex": "./Tests/(.*).test.ts",
"collectCoverageFrom": ["./**/*.(tsx||ts)"],
"coverageThreshold": {
"global": {
"lines": 0,
"functions": 0,
"branches": 0,
"statements": 0
}
}
}

View File

@@ -1,11 +0,0 @@
{
"watch": [
"./",
"../Common"
],
"ext": "ts,tsx",
"ignore": ["./node_modules/**", "./public/**", "./bin/**", "./build/**"],
"watchOptions": {"useFsEvents": false, "interval": 500},
"env": {"TS_NODE_TRANSPILE_ONLY": "1", "TS_NODE_FILES": "false"},
"exec": "node -r ts-node/register/transpile-only Index.ts"
}

23908
Copilot/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,31 +1,31 @@
{
"name": "@oneuptime/copilot",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"start": "export NODE_OPTIONS='--max-old-space-size=8096' && node --require ts-node/register Index.ts",
"compile": "tsc",
"clear-modules": "rm -rf node_modules && rm package-lock.json && npm install",
"dev": "npx nodemon",
"audit": "npm audit --audit-level=low",
"dep-check": "npm install -g depcheck && depcheck ./ --skip-missing=true",
"test": "jest --passWithNoTests"
},
"author": "OneUptime <hello@oneuptime.com> (https://oneuptime.com/)",
"license": "Apache-2.0",
"dependencies": {
"Common": "file:../Common",
"dotenv": "^16.4.5",
"openai": "^4.52.5",
"ts-node": "^10.9.1"
},
"devDependencies": {
"@types/jest": "^27.5.0",
"@types/node": "^17.0.31",
"jest": "^28.1.0",
"nodemon": "^2.0.20",
"ts-jest": "^28.0.2"
}
"name": "@oneuptime/copilot-agent",
"version": "0.1.0",
"description": "Standalone OneUptime Copilot coding agent CLI",
"private": true,
"bin": {
"oneuptime-copilot-agent": "./build/dist/Index.js"
},
"scripts": {
"build": "tsc",
"compile": "tsc",
"dev": "ts-node --transpile-only -r tsconfig-paths/register src/Index.ts",
"start": "node --enable-source-maps ./build/dist/Index.js",
"clear-modules": "rm -rf node_modules && rm -f package-lock.json && npm install"
},
"dependencies": {
"Common": "file:../../Common",
"commander": "^12.1.0",
"undici": "^6.19.8",
"zod": "^3.23.8"
},
"devDependencies": {
"@types/node": "^17.0.45",
"tsconfig-paths": "^4.2.0",
"ts-node": "^10.9.2",
"typescript": "^5.6.3"
},
"engines": {
"node": ">=18"
}
}

View File

@@ -0,0 +1,288 @@
import path from "node:path";
import LocalFile from "Common/Server/Utils/LocalFile";
import { LMStudioClient } from "../LLM/LMStudioClient";
import { buildSystemPrompt } from "./SystemPrompt";
import { WorkspaceContextBuilder } from "./WorkspaceContext";
import { ToolRegistry } from "../Tools/ToolRegistry";
import { ChatMessage, OpenAIToolCall, ToolExecutionResult } from "../Types";
import AgentLogger from "../Utils/AgentLogger";
/**
* Configuration values that control how the Copilot agent connects to the
* model, how many iterations it may run, and which workspace it operates on.
*/
export interface CopilotAgentOptions {
prompt: string;
modelUrl: string;
modelName: string;
workspacePath: string;
temperature: number;
maxIterations: number;
requestTimeoutMs: number;
apiKey?: string | undefined;
}
/**
* Coordinates the overall tool-using conversation loop with the target LLM,
* including prompt preparation, workspace validation, and tool execution.
*/
export class CopilotAgent {
private readonly options: CopilotAgentOptions;
private readonly workspaceRoot: string;
private readonly client: LMStudioClient;
private readonly registry: ToolRegistry;
/**
* Creates a new agent instance, wiring up the LM Studio client and tool
* registry for the provided workspace.
*/
public constructor(options: CopilotAgentOptions) {
this.options = options;
this.workspaceRoot = path.resolve(options.workspacePath);
this.client = new LMStudioClient({
endpoint: options.modelUrl,
model: options.modelName,
temperature: options.temperature,
timeoutMs: options.requestTimeoutMs,
apiKey: options.apiKey,
});
this.registry = new ToolRegistry(this.workspaceRoot);
AgentLogger.debug("CopilotAgent initialized", {
workspaceRoot: this.workspaceRoot,
modelUrl: options.modelUrl,
modelName: options.modelName,
temperature: options.temperature,
maxIterations: options.maxIterations,
timeoutMs: options.requestTimeoutMs,
hasApiKey: Boolean(options.apiKey),
});
}
/**
* Executes the multi-iteration conversation loop until the model responds
* without tool calls or the iteration budget is exhausted.
*/
public async run(): Promise<void> {
AgentLogger.debug("Ensuring workspace exists", {
workspaceRoot: this.workspaceRoot,
});
await this.ensureWorkspace();
AgentLogger.debug("Workspace verified", {
workspaceRoot: this.workspaceRoot,
});
const contextSnapshot: string = await WorkspaceContextBuilder.buildSnapshot(
this.workspaceRoot,
);
AgentLogger.debug(`Workspace snapshot built:\n${contextSnapshot}`, {
snapshotLength: contextSnapshot.length,
snapshotContents: contextSnapshot,
});
const messages: Array<ChatMessage> = [
{ role: "system", content: buildSystemPrompt() },
{
role: "user",
content: this.composeUserPrompt(this.options.prompt, contextSnapshot),
},
];
AgentLogger.debug(
`Initial conversation seeded:\n${this.describeMessages(messages)}`,
{
messageCount: messages.length,
seedMessages: messages,
},
);
for (
let iteration: number = 0;
iteration < this.options.maxIterations;
iteration += 1
) {
AgentLogger.info(`Starting iteration ${iteration + 1}`);
AgentLogger.debug(
`Sending messages to LLM (iteration ${iteration + 1}):\n${this.describeMessages(messages)}`,
{
iteration: iteration + 1,
messageCount: messages.length,
outgoingMessages: messages,
},
);
const response: ChatMessage = await this.client.createChatCompletion({
messages,
tools: this.registry.getToolDefinitions(),
});
AgentLogger.debug(
`LLM response received (iteration ${iteration + 1}):\n${this.describeMessages([response])}`,
{
iteration: iteration + 1,
hasToolCalls: Boolean(response.tool_calls?.length),
responseContent: response.content ?? null,
responseObject: response,
responseToolCalls: response.tool_calls ?? null,
},
);
if (response.tool_calls?.length) {
AgentLogger.info(
`Model requested tools: ${response.tool_calls
.map((call: OpenAIToolCall) => {
return call.function.name;
})
.join(", ")}`,
);
messages.push(response);
await this.handleToolCalls(response.tool_calls, messages);
continue;
}
const finalMessage: string =
response.content?.trim() ||
"Model ended the conversation without a reply.";
// eslint-disable-next-line no-console
console.log(`\n${finalMessage}`);
AgentLogger.debug(
`Conversation completed after ${iteration + 1} iterations:\n${finalMessage}`,
{
iterationsUsed: iteration + 1,
finalMessageLength: finalMessage.length,
finalMessage,
},
);
return;
}
AgentLogger.error("Iteration limit reached", {
maxIterations: this.options.maxIterations,
prompt: this.options.prompt,
});
throw new Error(
`Reached the iteration limit (${this.options.maxIterations}) without a final response.`,
);
}
/**
* Executes every tool call requested by the model and appends the results to
* the running conversation so the LLM can observe tool outputs.
*/
private async handleToolCalls(
calls: Array<{
id: string;
type: "function";
function: { name: string; arguments: string };
}>,
messages: Array<ChatMessage>,
): Promise<void> {
for (let index: number = 0; index < calls.length; index += 1) {
const call:
| {
id: string;
type: "function";
function: { name: string; arguments: string };
}
| undefined = calls[index];
if (call === undefined) {
AgentLogger.warn("Missing tool call entry", {
requestedIndex: index,
totalCalls: calls.length,
});
continue;
}
AgentLogger.debug("Executing tool", {
toolName: call.function.name,
callId: call.id,
});
const result: ToolExecutionResult = await this.registry.execute(call);
// eslint-disable-next-line no-console
console.log(`\n# Tool: ${call.function.name}\n${result.output}\n`);
AgentLogger.debug(
`Tool execution completed (${call.function.name}/${call.id}):\n${result.output}`,
{
toolName: call.function.name,
callId: call.id,
isError: result.output.startsWith("ERROR"),
outputLength: result.output.length,
outputContents: result.output,
},
);
messages.push({
role: "tool",
content: result.output,
tool_call_id: result.toolCallId,
});
AgentLogger.debug(
`Tool result appended to conversation (total ${messages.length} messages):\n${this.describeMessages(messages)}`,
{
totalMessages: messages.length,
updatedConversation: messages,
},
);
}
}
/**
* Verifies that the configured workspace root directory exists before any
* commands or tool calls attempt to touch the file system.
*/
private async ensureWorkspace(): Promise<void> {
AgentLogger.debug("Validating workspace directory", {
workspaceRoot: this.workspaceRoot,
});
if (!(await LocalFile.doesDirectoryExist(this.workspaceRoot))) {
throw new Error(
`Workspace path ${this.workspaceRoot} does not exist or is not a directory.`,
);
}
AgentLogger.debug("Workspace exists", {
workspaceRoot: this.workspaceRoot,
});
}
/**
* Builds the user-facing portion of the chat prompt by combining the task
* description with a structured workspace snapshot.
*/
private composeUserPrompt(task: string, snapshot: string): string {
const prompt: string = `# Task\n${task.trim()}\n\n# Workspace snapshot\n${snapshot}\n\nPlease reason step-by-step, gather any missing context with the tools, and keep iterating until the task is complete.`;
AgentLogger.debug(`Composed user prompt:\n${prompt}`, {
taskLength: task.length,
snapshotLength: snapshot.length,
promptLength: prompt.length,
taskContents: task,
snapshotContents: snapshot,
promptContents: prompt,
});
return prompt;
}
private describeMessages(messages: Array<ChatMessage>): string {
return messages
.map((message: ChatMessage, index: number) => {
const headerParts: Array<string> = [
`Message ${index + 1}`,
`role=${message.role}`,
];
if (message.tool_call_id) {
headerParts.push(`tool_call_id=${message.tool_call_id}`);
}
const content: unknown = message.content;
const normalizedContent: string =
typeof content === "string"
? content
: content
? JSON.stringify(content, null, 2)
: "<no content>";
const toolCalls: string =
Array.isArray(message.tool_calls) && message.tool_calls.length
? `\nTool calls:\n${JSON.stringify(message.tool_calls, null, 2)}`
: "";
return `${headerParts.join(" | ")}\n${normalizedContent}${toolCalls}`;
})
.join("\n\n");
}
}

View File

@@ -0,0 +1,19 @@
/**
* Returns the static instruction block that tells the LLM how to behave when
* operating as the OneUptime Copilot inside a local repository.
*/
export function buildSystemPrompt(): string {
return `You are the OneUptime Copilot Agent, a fully autonomous senior engineer that works inside a local workspace. Your job is to understand the user's request, gather the context you need, modify files with precision, run checks, and stop only when the request is satisfied or truly blocked.
Core principles:
1. Stay focused on the workspace. Read files and inspect folders before editing. Never guess when you can verify.
2. Use the provided tools instead of printing raw code or shell commands. read_file/list_directory/search_workspace help you understand; apply_patch/write_file/run_command let you change or validate.
3. Break work into short iterations. Form a plan, call tools, review the output, and keep going until the plan is complete.
4. Prefer targeted edits (apply_patch) over rewriting entire files. If you must create or replace a whole file, describe why.
5. When running commands, capture real output and summarize failures honestly. Do not invent results.
6. Reference workspace paths or symbols using Markdown backticks (\`path/to/file.ts\`).
7. Keep responses concise and outcome-oriented. Explain what you inspected, what you changed, how you verified it, and what remains.
8. If you hit a blocker (missing dependency, failing command, lacking permission), describe the issue and what you tried before asking for help.
Always think before acting, gather enough evidence, and prefer high-quality, minimal diffs. The user expects you to proactively explore, implement, and validate fixes without further guidance.`;
}

View File

@@ -0,0 +1,134 @@
import fs from "node:fs/promises";
import type { Dirent } from "node:fs";
import path from "node:path";
import Execute from "Common/Server/Utils/Execute";
import AgentLogger from "../Utils/AgentLogger";
/**
* Produces human-readable snapshots of the current workspace, including git
* metadata and directory listings, so the agent can reason about its
* environment.
*/
export class WorkspaceContextBuilder {
/**
* Builds a multi-section textual snapshot describing the workspace root,
* git branch/status, and top-level entries.
*/
public static async buildSnapshot(workspaceRoot: string): Promise<string> {
const absoluteRoot: string = path.resolve(workspaceRoot);
const sections: Array<string> = [`Workspace root: ${absoluteRoot}`];
AgentLogger.debug("Building workspace snapshot", {
workspaceRoot: absoluteRoot,
});
const branch: string | null = await this.tryGitCommand(
["rev-parse", "--abbrev-ref", "HEAD"],
absoluteRoot,
);
if (branch) {
sections.push(`Git branch: ${branch.trim()}`);
AgentLogger.debug(`Detected git branch: ${branch.trim()}`, {
branch: branch.trim(),
});
}
const status: string | null = await this.tryGitCommand(
["status", "-sb"],
absoluteRoot,
);
if (status) {
sections.push(`Git status:\n${status.trim()}`);
AgentLogger.debug(`Captured git status:\n${status.trim()}`, {
statusLength: status.length,
statusContents: status.trim(),
});
}
const entries: Array<string> = await this.listTopLevelEntries(absoluteRoot);
sections.push(
`Top-level entries (${entries.length}): ${entries.join(", ")}`,
);
AgentLogger.debug(
`Listed top-level entries (${entries.length}): ${entries.join(", ")}`,
{
entryCount: entries.length,
entries,
},
);
const snapshot: string = sections.join("\n");
AgentLogger.debug(`Workspace snapshot complete:\n${snapshot}`, {
sectionCount: sections.length,
snapshotLength: snapshot.length,
snapshotContents: snapshot,
});
return snapshot;
}
/**
* Returns an ordered, filtered list of top-level files and directories while
* hiding dotfiles and heavy folders like node_modules.
*/
private static async listTopLevelEntries(
root: string,
): Promise<Array<string>> {
try {
const dirEntries: Array<Dirent> = await fs.readdir(root, {
withFileTypes: true,
});
return dirEntries
.filter((entry: Dirent) => {
return !entry.name.startsWith(".") && entry.name !== "node_modules";
})
.slice(0, 25)
.map((entry: Dirent) => {
return entry.isDirectory() ? `${entry.name}/` : entry.name;
});
} catch (error) {
AgentLogger.error("Unable to list workspace entries", error as Error);
return [];
} finally {
AgentLogger.debug("listTopLevelEntries completed", {
root,
});
}
}
/**
* Executes a git command and returns the trimmed output, swallowing errors so
* snapshot generation never fails if git is unavailable.
*/
private static async tryGitCommand(
args: Array<string>,
cwd: string,
): Promise<string | null> {
try {
const output: string = await Execute.executeCommandFile({
command: "git",
args,
cwd,
});
AgentLogger.debug(
`Git command succeeded (${args.join(" ")}):\n${output}`,
{
args,
cwd,
outputLength: output.length,
outputContents: output,
},
);
return output;
} catch (error) {
const message: string = (error as Error).message;
AgentLogger.debug(
`Git command failed (${args.join(" ")}) in ${cwd}: ${message}`,
{
cwd,
args,
error: message,
},
);
return null;
}
}
}

121
Copilot/src/Index.ts Normal file
View File

@@ -0,0 +1,121 @@
#!/usr/bin/env node
import path from "node:path";
import { Command } from "commander";
import { CopilotAgent, CopilotAgentOptions } from "./Agent/CopilotAgent";
import AgentLogger from "./Utils/AgentLogger";
/** CLI harness for configuring and launching the Copilot agent. */
const program: Command = new Command();
program
.name("oneuptime-copilot-agent")
.description("Autonomous OneUptime coding agent for LM Studio hosted models")
.requiredOption(
"--prompt <text>",
"Problem statement or set of tasks for the agent",
)
.requiredOption(
"--model <url>",
"Full LM Studio chat-completions endpoint (for example http://localhost:1234/v1/chat/completions)",
)
.requiredOption(
"--workspace-path <path>",
"Path to the repository or folder the agent should work inside",
)
.option(
"--model-name <name>",
"Model identifier expected by the LM Studio endpoint",
"lmstudio",
)
.option(
"--temperature <value>",
"Sampling temperature passed to the model (default 0.1)",
"0.1",
)
.option(
"--max-iterations <count>",
"Maximum number of tool-calling rounds (default 100)",
"100",
)
.option(
"--timeout <ms>",
"HTTP timeout for each LLM request in milliseconds (default 120000)",
"120000",
)
.option(
"--api-key <token>",
"API key if the endpoint requires authentication",
)
.option(
"--log-level <level>",
"debug | info | warn | error (default info)",
process.env["LOG_LEVEL"] ?? "info",
)
.option(
"--log-file <path>",
"Optional file path to append all agent logs for auditing",
)
.parse(process.argv);
/** Entry point that parses CLI args, configures logging, and runs the agent. */
(async () => {
const opts: {
prompt: string;
model: string;
workspacePath: string;
modelName?: string;
temperature: string;
maxIterations: string;
timeout: string;
apiKey?: string;
logLevel?: string;
logFile?: string;
} = program.opts<{
prompt: string;
model: string;
workspacePath: string;
modelName?: string;
temperature: string;
maxIterations: string;
timeout: string;
apiKey?: string;
logLevel?: string;
logFile?: string;
}>();
process.env["LOG_LEVEL"] = opts.logLevel?.toUpperCase() ?? "INFO";
await AgentLogger.configure({ logFilePath: opts.logFile });
AgentLogger.debug("CLI options parsed", {
workspacePath: opts.workspacePath,
model: opts.model,
modelName: opts.modelName,
temperature: opts.temperature,
maxIterations: opts.maxIterations,
timeout: opts.timeout,
hasApiKey: Boolean(opts.apiKey),
logLevel: process.env["LOG_LEVEL"],
logFile: opts.logFile,
});
const config: CopilotAgentOptions = {
prompt: opts.prompt,
modelUrl: opts.model,
modelName: opts.modelName || "lmstudio",
workspacePath: path.resolve(opts.workspacePath),
temperature: Number(opts.temperature) || 0.1,
maxIterations: Number(opts.maxIterations) || 100,
requestTimeoutMs: Number(opts.timeout) || 120000,
apiKey: opts.apiKey,
};
try {
const agent: CopilotAgent = new CopilotAgent(config);
await agent.run();
} catch (error) {
AgentLogger.error("Agent run failed", error as Error);
// eslint-disable-next-line no-console
console.error("Agent failed", error);
process.exit(1);
}
})();

Some files were not shown because too many files have changed in this diff Show More