mirror of
https://github.com/OneUptime/oneuptime.git
synced 2026-04-06 08:42:13 +02:00
Compare commits
37 Commits
9.4.6
...
feature-gi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5ec8432b4d | ||
|
|
131e538a62 | ||
|
|
10b28bb406 | ||
|
|
a9618c2e71 | ||
|
|
dda1689ae1 | ||
|
|
90ab84fb90 | ||
|
|
3174972153 | ||
|
|
239838a4cb | ||
|
|
67cad51439 | ||
|
|
73e6eb8fae | ||
|
|
8ae87bed6c | ||
|
|
498dddcd2f | ||
|
|
f671e7f506 | ||
|
|
88826d5f71 | ||
|
|
71cec28b10 | ||
|
|
38a3a0503f | ||
|
|
51ba011155 | ||
|
|
ca0b5df568 | ||
|
|
52b90d2bcd | ||
|
|
04805cb1e9 | ||
|
|
8c260457d5 | ||
|
|
05cde6a224 | ||
|
|
495269dc50 | ||
|
|
4983765d4a | ||
|
|
5817d4b86e | ||
|
|
f27610911e | ||
|
|
55d8192c96 | ||
|
|
fd4c7bbf78 | ||
|
|
7262d0ef43 | ||
|
|
5e60b3a07c | ||
|
|
d470a277c9 | ||
|
|
fce3816557 | ||
|
|
a2efe3d86c | ||
|
|
b2200e5108 | ||
|
|
a87dd8599b | ||
|
|
9236eaa351 | ||
|
|
372bcf5bec |
@@ -1,68 +0,0 @@
|
||||
# These are aliases that will make your life simple when you're building OneUptime
|
||||
|
||||
# Make directory and change directory at the same time.
|
||||
mkcdir ()
|
||||
{
|
||||
mkdir -p -- "$1" &&
|
||||
cd -P -- "$1"
|
||||
}
|
||||
|
||||
# Git aliases
|
||||
alias g="git"
|
||||
alias gs="git status"
|
||||
alias ga="git add"
|
||||
alias gc="git checkout"
|
||||
alias gb="git branch"
|
||||
alias gp="git pull"
|
||||
alias gpo="git push origin"
|
||||
alias gl="git log"
|
||||
alias gd="git diff"
|
||||
alias gm="git merge"
|
||||
|
||||
# Kubernetes aliases
|
||||
alias k="kubectl"
|
||||
alias kg="kubectl get"
|
||||
alias kd="kubectl describe"
|
||||
alias kc="kubectl create"
|
||||
alias kdel="kubectl delete"
|
||||
alias klo="kubectl logs"
|
||||
alias klof="kubectl logs -f"
|
||||
alias kex="kubectl exec"
|
||||
alias kexi="kubectl exec -it"
|
||||
|
||||
# Docker aliases
|
||||
alias d="docker"
|
||||
alias dc="docker compose"
|
||||
alias dcu="docker compose up"
|
||||
alias dcd="docker compose down"
|
||||
|
||||
# Node aliases
|
||||
alias n="npm"
|
||||
alias ni="npm install"
|
||||
alias nis="npm install --save"
|
||||
alias nid="npm install --save-dev"
|
||||
alias nr="npm run"
|
||||
alias nt="npm test"
|
||||
alias ns="npm start"
|
||||
alias nb="npm build"
|
||||
|
||||
# Rust aliases
|
||||
alias c="cargo"
|
||||
alias cb="cargo build"
|
||||
alias cr="cargo run"
|
||||
|
||||
# OneUptime Specific Aliases
|
||||
# --------------------------
|
||||
|
||||
alias nrd="npm run dev"
|
||||
alias nrl="npm run logs"
|
||||
alias nrb="npm run build"
|
||||
alias nrfb="npm run force-build"
|
||||
alias nrps="npm run ps-dev"
|
||||
|
||||
# OneUptime LLM Server
|
||||
alias nrfbl="npm run force-build-llm"
|
||||
alias nrdl="npm run dev-llm"
|
||||
alias nrll="npm run logs-llm"
|
||||
alias nrbl="npm run build-llm"
|
||||
|
||||
@@ -5,9 +5,7 @@ node_modules
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
node_modules
|
||||
**/node_modules
|
||||
|
||||
/newrelic_agent.log
|
||||
.idea
|
||||
# testing
|
||||
/coverage
|
||||
@@ -17,7 +15,10 @@ node_modules
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
env.js
|
||||
|
||||
npm-debug.log*
|
||||
@@ -33,15 +34,6 @@ stop
|
||||
|
||||
nohup.out*
|
||||
|
||||
# Large directories not needed for Docker builds
|
||||
E2E/playwright-report
|
||||
E2E/test-results
|
||||
Terraform
|
||||
HelmChart
|
||||
Scripts
|
||||
.git
|
||||
GoSDK
|
||||
|
||||
encrypted-credentials.tar
|
||||
encrypted-credentials/
|
||||
|
||||
@@ -63,6 +55,4 @@ tests/coverage
|
||||
|
||||
settings.json
|
||||
|
||||
GoSDK/tester/
|
||||
|
||||
|
||||
go-sdk/tester/
|
||||
30
.eslintignore
Normal file
30
.eslintignore
Normal file
@@ -0,0 +1,30 @@
|
||||
*/node_modules/*
|
||||
*/build/*
|
||||
*/coverage/*
|
||||
|
||||
*/public/*
|
||||
*/views/*
|
||||
|
||||
*fonts*
|
||||
*logos*
|
||||
|
||||
.*
|
||||
*.png
|
||||
*.sh
|
||||
*.txt
|
||||
*.snap
|
||||
*.enc
|
||||
Dockerfile
|
||||
CHANGELOG
|
||||
LICENSE
|
||||
|
||||
marketing/*/*
|
||||
licenses/*
|
||||
certifications/*
|
||||
api-docs/public/assets/*
|
||||
js-sdk/src/cli/server-monitor/out/scripts/prettify/*
|
||||
js-sdk/dist/logger.js
|
||||
js-sdk/dist/logger.min.js
|
||||
js-sdk/dist/fyipe.js
|
||||
js-sdk/dist/fyipe.min.js
|
||||
_test/*
|
||||
107
.eslintrc.json
Normal file
107
.eslintrc.json
Normal file
@@ -0,0 +1,107 @@
|
||||
{
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 8,
|
||||
"ecmaFeatures": {
|
||||
"experimentalObjectRestSpread": true,
|
||||
"jsx": true,
|
||||
"spread": true
|
||||
},
|
||||
"sourceType": "module"
|
||||
},
|
||||
"env": {
|
||||
"browser": true,
|
||||
"node": true,
|
||||
"jquery": true,
|
||||
"es6": true,
|
||||
"jest": true
|
||||
},
|
||||
"plugins": [
|
||||
"react",
|
||||
"jsx-a11y"
|
||||
],
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:react/recommended",
|
||||
"plugin:prettier/recommended",
|
||||
"prettier",
|
||||
"prettier/react"
|
||||
],
|
||||
"globals": {
|
||||
"describe" :true,
|
||||
"context" :true,
|
||||
"before" :true,
|
||||
"beforeEach" :true,
|
||||
"after" :true,
|
||||
"afterEach" :true,
|
||||
"it" :true,
|
||||
"expect" :true,
|
||||
"workbox" :true,
|
||||
"importScripts" :true
|
||||
},
|
||||
"parser": "babel-eslint",
|
||||
"rules": {
|
||||
"no-fallthrough": "error",
|
||||
"no-unreachable": "error",
|
||||
"no-cond-assign": "error",
|
||||
"valid-typeof": "error",
|
||||
"no-func-assign": "error",
|
||||
"no-extra-semi": "error",
|
||||
"linebreak-style": [
|
||||
"error",
|
||||
"unix"
|
||||
],
|
||||
"no-unused-vars": "error",
|
||||
"no-console": "error",
|
||||
"no-undef": "error",
|
||||
"no-empty": "error",
|
||||
"no-case-declarations": "error",
|
||||
"no-mixed-spaces-and-tabs": "error",
|
||||
"no-useless-escape": "error",
|
||||
"prettier/prettier": "error",
|
||||
"react/jsx-no-undef": "error",
|
||||
"react/jsx-no-bind": [
|
||||
"error",
|
||||
{
|
||||
"allowArrowFunctions": true,
|
||||
"allowBind": false,
|
||||
"ignoreRefs": false
|
||||
}
|
||||
],
|
||||
"react/no-children-prop": "error",
|
||||
"react/no-deprecated": "error",
|
||||
"react/boolean-prop-naming": "error",
|
||||
"react/no-is-mounted": "error",
|
||||
"react/no-find-dom-node": "error",
|
||||
"react/no-did-update-set-state": "error",
|
||||
"react/no-unknown-property": "error",
|
||||
"react/no-unused-prop-types": "error",
|
||||
"react/jsx-no-duplicate-props": "error",
|
||||
"react/no-unused-state": "error",
|
||||
"react/jsx-uses-vars": "error",
|
||||
"react/prop-types": "error",
|
||||
"react/react-in-jsx-scope": "error",
|
||||
"react/no-string-refs": "error",
|
||||
"jsx-a11y/href-no-hash": [
|
||||
0
|
||||
],
|
||||
"react/no-unescaped-entities": "error",
|
||||
"react/display-name": "error",
|
||||
"react/jsx-pascal-case": "error",
|
||||
"array-callback-return": "error",
|
||||
"no-loop-func": "error",
|
||||
"jsx-a11y/anchor-is-valid": "error",
|
||||
"prefer-const": [
|
||||
"error",
|
||||
{
|
||||
"destructuring": "any",
|
||||
"ignoreReadBeforeAssign": false
|
||||
}
|
||||
],
|
||||
"no-var": "error"
|
||||
},
|
||||
"settings": {
|
||||
"react": {
|
||||
"version": "16.5"
|
||||
}
|
||||
}
|
||||
}
|
||||
41
.github/ISSUE_TEMPLATE/bug_report.md
vendored
41
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,41 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: '<Title of the issue>'
|
||||
labels: 'bug'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Desktop (please complete the following information):**
|
||||
- OS: [e.g. iOS]
|
||||
- Browser [e.g. chrome, safari]
|
||||
- Version [e.g. 22]
|
||||
|
||||
**Smartphone (please complete the following information):**
|
||||
- Device: [e.g. iPhone6]
|
||||
- OS: [e.g. iOS8.1]
|
||||
- Browser [e.g. stock browser, safari]
|
||||
- Version [e.g. 22]
|
||||
|
||||
**Deployment Type**
|
||||
Is this issue on SaaS (at https://oneuptime.com) or self hosted (the version that you deployed on your server)?
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -1,20 +0,0 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: '<Title of the issue>'
|
||||
labels: 'enhancement'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
10
.github/ISSUE_TEMPLATE/request-for-test.md
vendored
10
.github/ISSUE_TEMPLATE/request-for-test.md
vendored
@@ -1,10 +0,0 @@
|
||||
---
|
||||
name: Request for Test
|
||||
about: 'Request more tests for product code in the platform. '
|
||||
title: Tests for [FileName]
|
||||
labels: 'write tests'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
|
||||
17
.github/instructions/instructions.md
vendored
17
.github/instructions/instructions.md
vendored
@@ -1,17 +0,0 @@
|
||||
---
|
||||
applyTo: '**'
|
||||
---
|
||||
|
||||
# Building and Compiling
|
||||
|
||||
If you would like to compile or build any project. Please cd into the directory and run the following command:
|
||||
|
||||
```
|
||||
npm run compile
|
||||
```
|
||||
|
||||
Ths will make sure there are no type / syntax errors.
|
||||
|
||||
# Typescript Types.
|
||||
|
||||
Please do not use "any" types. Please create proper types where required.
|
||||
14
.github/pull_request_template.md
vendored
14
.github/pull_request_template.md
vendored
@@ -1,14 +0,0 @@
|
||||
### Title of this pull request?
|
||||
|
||||
### Small Description?
|
||||
|
||||
### Pull Request Checklist:
|
||||
|
||||
- [ ] Please make sure all jobs pass before requesting a review.
|
||||
- [ ] Put `closes #XXXX` in your comment to auto-close the issue that your PR fixes (if such).
|
||||
- [ ] Have you lint your code locally before submission?
|
||||
- [ ] Did you write tests where appropriate?
|
||||
|
||||
### Related Issue?
|
||||
|
||||
### Screenshots (if appropriate):
|
||||
35
.github/workflows/backend.yaml
vendored
Normal file
35
.github/workflows/backend.yaml
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
name: Backend Jobs
|
||||
|
||||
on:
|
||||
push:
|
||||
# run this workflow when changes are pushed to backend
|
||||
paths:
|
||||
- "backend/**"
|
||||
- ".github/workflows/backend.yaml"
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
- name: Run Backend tests
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y curl gcc
|
||||
sudo apt-get install -y build-essential
|
||||
curl -sSL https://get.docker.com/ | sh
|
||||
sudo docker stop $(sudo docker ps -aq) || echo 'No docker containers'
|
||||
sudo docker rm $(sudo docker ps -aq) || echo 'No docker containers'
|
||||
sudo docker run --name mongo -p 27017:27017 -d mongo:4.2.3
|
||||
sudo docker run --name redis -p 6379:6379 -d redis:5.0.7 redis-server
|
||||
sudo docker build -t oneuptime/backend:3.0 ./backend
|
||||
sudo docker run --env-file ./backend/.env -e IS_SAAS_SERVICE=true -p 3002:3002 -d oneuptime/backend:3.0
|
||||
sudo docker ps
|
||||
sudo apt-get install -y nodejs
|
||||
node --version
|
||||
cd backend
|
||||
npm ci
|
||||
npm run test
|
||||
475
.github/workflows/build.yml
vendored
475
.github/workflows/build.yml
vendored
@@ -1,475 +0,0 @@
|
||||
name: Build
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'hotfix-*'
|
||||
- 'release'
|
||||
|
||||
|
||||
jobs:
|
||||
|
||||
docker-build-accounts:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Preinstall
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: npm run prerun
|
||||
|
||||
# build image for accounts service
|
||||
- name: build docker image
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 45
|
||||
max_attempts: 3
|
||||
command: sudo docker build --no-cache -f ./Accounts/Dockerfile .
|
||||
|
||||
docker-build-isolated-vm:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Preinstall
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: npm run prerun
|
||||
|
||||
# build image for accounts service
|
||||
- name: build docker image
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 45
|
||||
max_attempts: 3
|
||||
command: sudo docker build --no-cache -f ./IsolatedVM/Dockerfile .
|
||||
|
||||
docker-build-home:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Preinstall
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: npm run prerun
|
||||
|
||||
# build image for accounts service
|
||||
- name: build docker image
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 45
|
||||
max_attempts: 3
|
||||
command: sudo docker build --no-cache -f ./Home/Dockerfile .
|
||||
|
||||
docker-build-worker:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Preinstall
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: npm run prerun
|
||||
|
||||
# build image for accounts service
|
||||
- name: build docker image
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 45
|
||||
max_attempts: 3
|
||||
command: sudo docker build --no-cache -f ./Worker/Dockerfile .
|
||||
|
||||
docker-build-workflow:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Preinstall
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: npm run prerun
|
||||
|
||||
# build image for accounts service
|
||||
- name: build docker image
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 45
|
||||
max_attempts: 3
|
||||
command: sudo docker build --no-cache -f ./Workflow/Dockerfile .
|
||||
|
||||
docker-build-api-reference:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Preinstall
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: npm run prerun
|
||||
|
||||
# build image for accounts service
|
||||
- name: build docker image
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 45
|
||||
max_attempts: 3
|
||||
command: sudo docker build --no-cache -f ./APIReference/Dockerfile .
|
||||
|
||||
docker-build-docs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Preinstall
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: npm run prerun
|
||||
|
||||
# build image for accounts service
|
||||
- name: build docker image
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 45
|
||||
max_attempts: 3
|
||||
command: sudo docker build --no-cache -f ./Docs/Dockerfile .
|
||||
|
||||
|
||||
docker-build-otel-collector:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Preinstall
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: npm run prerun
|
||||
|
||||
# build image for accounts service
|
||||
- name: build docker image
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 45
|
||||
max_attempts: 3
|
||||
command: sudo docker build --no-cache -f ./OTelCollector/Dockerfile .
|
||||
|
||||
docker-build-app:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Preinstall
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: npm run prerun
|
||||
|
||||
|
||||
# build image for accounts service
|
||||
- name: build docker image
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 45
|
||||
max_attempts: 3
|
||||
command: sudo docker build --no-cache -f ./App/Dockerfile .
|
||||
|
||||
|
||||
docker-build-e2e:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Preinstall
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: npm run prerun
|
||||
|
||||
|
||||
# build image for accounts service
|
||||
- name: build docker image
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 45
|
||||
max_attempts: 3
|
||||
command: sudo docker build --no-cache -f ./E2E/Dockerfile .
|
||||
|
||||
docker-build-admin-dashboard:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Preinstall
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: npm run prerun
|
||||
|
||||
# build image for home
|
||||
- name: build docker image
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 45
|
||||
max_attempts: 3
|
||||
command: sudo docker build --no-cache -f ./AdminDashboard/Dockerfile .
|
||||
|
||||
docker-build-dashboard:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Preinstall
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: npm run prerun
|
||||
|
||||
# build image for home
|
||||
- name: build docker image
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 45
|
||||
max_attempts: 3
|
||||
command: sudo docker build --no-cache -f ./Dashboard/Dockerfile .
|
||||
|
||||
docker-build-probe:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Preinstall
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: npm run prerun
|
||||
|
||||
# build image probe api
|
||||
- name: build docker image
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 45
|
||||
max_attempts: 3
|
||||
command: sudo docker build --no-cache -f ./Probe/Dockerfile .
|
||||
|
||||
docker-build-probe-ingest:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Preinstall
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: npm run prerun
|
||||
|
||||
# build image probe api
|
||||
- name: build docker image
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 45
|
||||
max_attempts: 3
|
||||
command: sudo docker build --no-cache -f ./ProbeIngest/Dockerfile .
|
||||
|
||||
docker-build-server-monitor-ingest:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Preinstall
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: npm run prerun
|
||||
|
||||
# build image probe api
|
||||
- name: build docker image
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 45
|
||||
max_attempts: 3
|
||||
command: sudo docker build --no-cache -f ./ServerMonitorIngest/Dockerfile .
|
||||
|
||||
docker-build-telemetry:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Preinstall
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: npm run prerun
|
||||
|
||||
# build image probe api
|
||||
- name: build docker image
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 45
|
||||
max_attempts: 3
|
||||
command: sudo docker build --no-cache -f ./Telemetry/Dockerfile .
|
||||
|
||||
docker-build-incoming-request-ingest:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Preinstall
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: npm run prerun
|
||||
|
||||
# build image probe api
|
||||
- name: build docker image
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 45
|
||||
max_attempts: 3
|
||||
command: sudo docker build --no-cache -f ./IncomingRequestIngest/Dockerfile .
|
||||
|
||||
docker-build-status-page:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Preinstall
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: npm run prerun
|
||||
|
||||
# build image for home
|
||||
- name: build docker image
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 45
|
||||
max_attempts: 3
|
||||
command: sudo docker build --no-cache -f ./StatusPage/Dockerfile .
|
||||
|
||||
docker-build-test-server:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Preinstall
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: npm run prerun
|
||||
|
||||
# build image for mail service
|
||||
- name: build docker image
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 45
|
||||
max_attempts: 3
|
||||
command: sudo docker build --no-cache -f ./TestServer/Dockerfile .
|
||||
|
||||
docker-build-ai-agent:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Preinstall
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: npm run prerun
|
||||
|
||||
# build image for ai agent service
|
||||
- name: build docker image
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 45
|
||||
max_attempts: 3
|
||||
command: sudo docker build --no-cache -f ./AIAgent/Dockerfile .
|
||||
70
.github/workflows/codeql-analysis.yml
vendored
70
.github/workflows/codeql-analysis.yml
vendored
@@ -1,70 +0,0 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ master ]
|
||||
schedule:
|
||||
- cron: '17 0 * * 4'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
matrix:
|
||||
language: [ 'javascript', 'typescript', 'go' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
40
.github/workflows/common-jobs.yaml
vendored
40
.github/workflows/common-jobs.yaml
vendored
@@ -1,40 +0,0 @@
|
||||
name: Common Jobs
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'hotfix-*' # excludes hotfix branches
|
||||
- 'release'
|
||||
|
||||
jobs:
|
||||
helm-lint:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
- name: Install Helm
|
||||
run: |
|
||||
curl https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3 | bash
|
||||
- name: Lint Helm Chart
|
||||
run: |
|
||||
helm lint ./HelmChart/Public/oneuptime
|
||||
|
||||
js-lint:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
- run: sudo apt-get update
|
||||
- run: sudo apt-get install -y curl gcc
|
||||
- run: sudo apt-get install -y build-essential
|
||||
- run: sudo apt-get install -y nodejs
|
||||
- run: npm install
|
||||
- run: npm run lint
|
||||
- name: Show how to fix lint
|
||||
if: ${{ failure() }}
|
||||
run: echo "Please run `npm run fix` in your root directory to fix the lint automatically."
|
||||
22
.github/workflows/common.yaml
vendored
Normal file
22
.github/workflows/common.yaml
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
name: Common Jobs
|
||||
|
||||
on:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'hotfix-*' # excludes hotfix branches
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v2
|
||||
- run: sudo apt-get update
|
||||
- run: sudo apt-get install -y curl gcc
|
||||
- run: sudo apt-get install -y build-essential
|
||||
- run: sudo apt-get install -y nodejs
|
||||
- run: npm ci
|
||||
- run: npm run lint
|
||||
- run: chmod +x ./ci/scripts/cleanup.sh
|
||||
- run: ./ci/scripts/cleanup.sh
|
||||
|
||||
407
.github/workflows/compile.yml
vendored
407
.github/workflows/compile.yml
vendored
@@ -1,407 +0,0 @@
|
||||
name: Compile
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'hotfix-*'
|
||||
- 'release'
|
||||
|
||||
|
||||
jobs:
|
||||
|
||||
compile-accounts:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
- name: Compile Accounts
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd Accounts && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-isolated-vm:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
- name: Compile IsolatedVM
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd IsolatedVM && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-common:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- name: Compile Common
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd Common && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-app:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
- name: Compile App
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd App && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-home:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
- name: Compile Home
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd Home && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-worker:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
- name: Compile Worker
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd Worker && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-workflow:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
- name: Compile Workflow
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd Workflow && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-api-reference:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
- name: Compile API Reference
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd APIReference && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-docs-reference:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
- name: Compile Docs Reference
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd Docs && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-nginx:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
|
||||
- name: Compile Nginx
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd Nginx && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-infrastructure-agent:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
# Setup Go
|
||||
- uses: actions/setup-go@v5
|
||||
- name: Compile Infrastructure Agent
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd InfrastructureAgent && go build .
|
||||
|
||||
|
||||
compile-admin-dashboard:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
|
||||
- name: Compile Admin Dashboard
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd AdminDashboard && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-dashboard:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
|
||||
- name: Compile Dashboard
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd Dashboard && npm install && npm run compile && npm run dep-check
|
||||
|
||||
|
||||
compile-e2e:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: sudo apt-get update
|
||||
- run: cd Common && npm install
|
||||
- name: Compile E2E
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd E2E && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-probe:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
- name: Compile Probe
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd Probe && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-probe-ingest:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
- name: Compile Probe Ingest
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd ProbeIngest && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-server-monitor-ingest:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
- name: Compile Server Monitor Ingest
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd ServerMonitorIngest && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-telemetry:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
- name: Compile Telemetry
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd Telemetry && npm install && npm run compile && npm run dep-check
|
||||
|
||||
|
||||
compile-incoming-request-ingest:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
- name: Compile Incoming Request Ingest
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd IncomingRequestIngest && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-status-page:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
|
||||
- name: Compile Status Page
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd StatusPage && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-test-server:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
- name: Compile Test Server
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd TestServer && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-mcp:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
- name: Compile MCP
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd MCP && npm update @oneuptime/common && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-ai-agent:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
- name: Compile AIAgent
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd AIAgent && npm install && npm run compile && npm run dep-check
|
||||
49
.github/workflows/npm-audit-fix.yml
vendored
49
.github/workflows/npm-audit-fix.yml
vendored
@@ -1,49 +0,0 @@
|
||||
name: NPM Audit Fix
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
npm-audit-fix:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Run npm audit fix across packages
|
||||
run: npm run audit-fix
|
||||
|
||||
- name: Detect changes
|
||||
id: changes
|
||||
run: |
|
||||
if git status --porcelain | grep .; then
|
||||
echo "has_changes=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "has_changes=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Create pull request
|
||||
if: steps.changes.outputs.has_changes == 'true'
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
commit-message: "chore: npm audit fix"
|
||||
title: "chore: npm audit fix"
|
||||
body: |
|
||||
Automated npm audit fix run.
|
||||
Workflow: ${{ github.workflow }}
|
||||
Run ID: ${{ github.run_id }}
|
||||
branch: chore/npm-audit-fix
|
||||
delete-branch: true
|
||||
74
.github/workflows/openapi-spec-generation.yml
vendored
74
.github/workflows/openapi-spec-generation.yml
vendored
@@ -1,74 +0,0 @@
|
||||
name: OpenAPI Spec Generation
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'hotfix-*'
|
||||
- 'release'
|
||||
|
||||
jobs:
|
||||
generate-openapi-spec:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{ github.run_number }}
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install Common dependencies
|
||||
run: cd Common && npm install
|
||||
|
||||
- name: Install root dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Install Script dependencies
|
||||
run: cd Scripts && npm install
|
||||
|
||||
- name: Generate OpenAPI specification
|
||||
run: npm run generate-openapi-spec
|
||||
|
||||
- name: Check if OpenAPI spec was generated
|
||||
run: |
|
||||
if [ -f "./openapi.json" ]; then
|
||||
echo "✅ OpenAPI spec file generated successfully"
|
||||
echo "📄 File size: $(du -h ./openapi.json | cut -f1)"
|
||||
echo "📊 Spec contains $(jq '.paths | length' ./openapi.json) API paths"
|
||||
echo "🏷️ API version: $(jq -r '.info.version' ./openapi.json)"
|
||||
echo "📝 API title: $(jq -r '.info.title' ./openapi.json)"
|
||||
else
|
||||
echo "❌ OpenAPI spec file was not generated"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Validate OpenAPI spec format
|
||||
run: |
|
||||
# Check if the file is valid JSON
|
||||
if jq empty ./openapi.json; then
|
||||
echo "✅ OpenAPI spec is valid JSON"
|
||||
else
|
||||
echo "❌ OpenAPI spec is not valid JSON"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if it has required OpenAPI fields
|
||||
if jq -e '.openapi and .info and .paths' ./openapi.json > /dev/null; then
|
||||
echo "✅ OpenAPI spec has required fields"
|
||||
else
|
||||
echo "❌ OpenAPI spec missing required fields (openapi, info, paths)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload OpenAPI spec as artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: openapi-spec
|
||||
path: ./openapi.json
|
||||
retention-days: 30
|
||||
2243
.github/workflows/release.yml
vendored
2243
.github/workflows/release.yml
vendored
File diff suppressed because it is too large
Load Diff
47
.github/workflows/security.yml
vendored
Normal file
47
.github/workflows/security.yml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
# This workflow uses actions that are not certified by GitHub.
|
||||
# They are provided by a third-party and are governed by
|
||||
# separate terms of service, privacy policy, and support
|
||||
# documentation.
|
||||
|
||||
# A sample workflow which checks out your Infrastructure as Code Configuration files,
|
||||
# such as Kubernetes, Helm & Terraform and scans them for any security issues.
|
||||
# The results are then uploaded to GitHub Security Code Scanning
|
||||
#
|
||||
# For more examples, including how to limit scans to only high-severity issues
|
||||
# and fail PR checks, see https://github.com/snyk/actions/
|
||||
|
||||
name: Security
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ master ]
|
||||
schedule:
|
||||
- cron: '23 20 * * 2'
|
||||
|
||||
jobs:
|
||||
snyk:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Run Snyk to check configuration files for security issues
|
||||
# Snyk can be used to break the build when it detects security issues.
|
||||
# In this case we want to upload the issues to GitHub Code Scanning
|
||||
continue-on-error: true
|
||||
uses: snyk/actions/iac@14818c4695ecc4045f33c9cee9e795a788711ca4
|
||||
env:
|
||||
# In order to use the Snyk Action you will need to have a Snyk API token.
|
||||
# More details in https://github.com/snyk/actions#getting-your-snyk-token
|
||||
# or you can signup for free at https://snyk.io/login
|
||||
SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }}
|
||||
with:
|
||||
# Add the path to the configuration file that you would like to test.
|
||||
# For example `deployment.yaml` for a Kubernetes deployment manifest
|
||||
# or `main.tf` for a Terraform configuration file
|
||||
file: your-file-to-test.yaml
|
||||
- name: Upload result to GitHub Code Scanning
|
||||
uses: github/codeql-action/upload-sarif@v1
|
||||
with:
|
||||
sarif_file: snyk.sarif
|
||||
71
.github/workflows/terraform-provider-e2e.yml
vendored
71
.github/workflows/terraform-provider-e2e.yml
vendored
@@ -1,71 +0,0 @@
|
||||
name: Terraform Provider E2E Tests
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
- develop
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
terraform-e2e-tests:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{ github.run_number }}
|
||||
APP_TAG: latest
|
||||
|
||||
steps:
|
||||
- name: Free Disk Space (Ubuntu)
|
||||
uses: jlumbroso/free-disk-space@main
|
||||
with:
|
||||
tool-cache: true
|
||||
android: true
|
||||
dotnet: true
|
||||
haskell: true
|
||||
large-packages: true
|
||||
docker-images: true
|
||||
swap-storage: true
|
||||
|
||||
- name: Additional Disk Cleanup
|
||||
run: |
|
||||
sudo rm -rf /usr/local/lib/android || true
|
||||
sudo rm -rf /opt/ghc || true
|
||||
sudo rm -rf /usr/share/dotnet || true
|
||||
sudo rm -rf /opt/hostedtoolcache/CodeQL || true
|
||||
sudo rm -rf /usr/local/share/boost || true
|
||||
sudo rm -rf /usr/share/swift || true
|
||||
sudo apt-get clean || true
|
||||
sudo rm -rf /var/lib/apt/lists/* || true
|
||||
df -h
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
cache: 'npm'
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: 'stable'
|
||||
cache: true
|
||||
|
||||
- name: Setup Terraform
|
||||
uses: hashicorp/setup-terraform@v3
|
||||
with:
|
||||
terraform_version: "1.6.0"
|
||||
terraform_wrapper: false
|
||||
|
||||
- name: Run E2E Tests
|
||||
run: |
|
||||
chmod +x ./E2E/Terraform/e2e-tests/scripts/*.sh
|
||||
./E2E/Terraform/e2e-tests/scripts/index.sh
|
||||
101
.github/workflows/terraform-provider-generation.yml
vendored
101
.github/workflows/terraform-provider-generation.yml
vendored
@@ -1,101 +0,0 @@
|
||||
name: Terraform Provider Generation
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
- develop
|
||||
workflow_dispatch: # Allow manual trigger
|
||||
|
||||
jobs:
|
||||
generate-terraform-provider:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{ github.run_number }}
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
cache: 'npm'
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: 'stable'
|
||||
cache: true
|
||||
|
||||
- name: Install Common dependencies
|
||||
run: cd Common && npm install
|
||||
|
||||
- name: Install root dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Install Script dependencies
|
||||
run: cd Scripts && npm install
|
||||
|
||||
- name: Generate Terraform provider
|
||||
run: npm run generate-terraform-provider
|
||||
|
||||
- name: Verify provider generation
|
||||
run: |
|
||||
PROVIDER_DIR="./Terraform"
|
||||
|
||||
# Check if provider directory was created
|
||||
if [ ! -d "$PROVIDER_DIR" ]; then
|
||||
echo "❌ Terraform provider directory not created"
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ Provider directory created: $PROVIDER_DIR"
|
||||
|
||||
# Count generated files
|
||||
GO_FILES=$(find "$PROVIDER_DIR" -name "*.go" | wc -l)
|
||||
echo "📊 Generated Go files: $GO_FILES"
|
||||
|
||||
if [ "$GO_FILES" -eq 0 ]; then
|
||||
echo "❌ No Go files were generated"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check for essential files
|
||||
if [ -f "$PROVIDER_DIR/go.mod" ]; then
|
||||
echo "✅ Go module file created"
|
||||
fi
|
||||
|
||||
if [ -f "$PROVIDER_DIR/README.md" ]; then
|
||||
echo "✅ Documentation created"
|
||||
fi
|
||||
|
||||
# Show directory structure for debugging
|
||||
echo "📁 Provider directory structure:"
|
||||
ls -la "$PROVIDER_DIR" || true
|
||||
|
||||
- name: Test Go build
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: |
|
||||
PROVIDER_DIR="./Terraform"
|
||||
if [ -d "$PROVIDER_DIR" ] && [ -f "$PROVIDER_DIR/go.mod" ]; then
|
||||
cd "$PROVIDER_DIR"
|
||||
echo "🔨 Testing Go build..."
|
||||
go mod tidy
|
||||
go build -v ./...
|
||||
echo "✅ Go build successful"
|
||||
else
|
||||
echo "⚠️ Cannot test build - missing go.mod or provider directory"
|
||||
fi
|
||||
|
||||
- name: Upload Terraform provider as artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: Terraform
|
||||
path: ./Terraform/
|
||||
retention-days: 30
|
||||
1964
.github/workflows/test-release.yaml
vendored
1964
.github/workflows/test-release.yaml
vendored
File diff suppressed because it is too large
Load Diff
23
.github/workflows/test.ai-agent.yaml
vendored
23
.github/workflows/test.ai-agent.yaml
vendored
@@ -1,23 +0,0 @@
|
||||
name: AIAgent Test
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'hotfix-*'
|
||||
- 'release'
|
||||
|
||||
|
||||
jobs:
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
- run: cd AIAgent && npm install && npm run test
|
||||
23
.github/workflows/test.common.yaml
vendored
23
.github/workflows/test.common.yaml
vendored
@@ -1,23 +0,0 @@
|
||||
name: Common Test
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'hotfix-*' # excludes hotfix branches
|
||||
- 'release'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
BILLING_PRIVATE_KEY: ${{secrets.TEST_BILLING_PRIVATE_KEY}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && bash test-setup.sh
|
||||
- run: cd Common && npm install && rm -rf build && npm run test
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
name: Incoming Request Ingest Test
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'hotfix-*' # excludes hotfix branches
|
||||
- 'release'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd IncomingRequestIngest && npm install && npm run test
|
||||
|
||||
21
.github/workflows/test.mcp.yaml
vendored
21
.github/workflows/test.mcp.yaml
vendored
@@ -1,21 +0,0 @@
|
||||
name: MCP Server Test
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'hotfix-*' # excludes hotfix branches
|
||||
- 'release'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
- run: cd MCP && npm install && npm run test
|
||||
21
.github/workflows/test.probe-ingest.yaml
vendored
21
.github/workflows/test.probe-ingest.yaml
vendored
@@ -1,21 +0,0 @@
|
||||
name: ProbeIngest Test
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'hotfix-*' # excludes hotfix branches
|
||||
- 'release'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd ProbeIngest && npm install && npm run test
|
||||
|
||||
23
.github/workflows/test.probe.yaml
vendored
23
.github/workflows/test.probe.yaml
vendored
@@ -1,23 +0,0 @@
|
||||
name: Probe Test
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'hotfix-*' # excludes hotfix branches
|
||||
- 'release'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
- run: cd Probe && npm install
|
||||
- run: cd Probe && npm run test
|
||||
|
||||
22
.github/workflows/test.telemetry.yaml
vendored
22
.github/workflows/test.telemetry.yaml
vendored
@@ -1,22 +0,0 @@
|
||||
name: Telemetry Test
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'hotfix-*' # excludes hotfix branches
|
||||
- 'release'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Common && npm install
|
||||
- run: cd Telemetry && npm install && npm run test
|
||||
|
||||
42
.github/workflows/test.yaml
vendored
42
.github/workflows/test.yaml
vendored
@@ -1,42 +0,0 @@
|
||||
name: Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'hotfix-*' # excludes hotfix branches
|
||||
- 'release'
|
||||
|
||||
jobs:
|
||||
test-app:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd App && npm install && npm run test
|
||||
|
||||
test-home:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Home && npm install && npm run test
|
||||
|
||||
test-worker:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- run: cd Worker && npm install && npm run test
|
||||
98
.gitignore
vendored
98
.gitignore
vendored
@@ -3,24 +3,20 @@ node_modules
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
node_modules
|
||||
|
||||
# Build Dist
|
||||
*/build/dist/*
|
||||
|
||||
# Build Dist
|
||||
**/dist/*
|
||||
|
||||
/newrelic_agent.log
|
||||
.idea
|
||||
# testing
|
||||
**/coverage
|
||||
/coverage
|
||||
|
||||
# production
|
||||
/build
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
env.js
|
||||
|
||||
npm-debug.log*
|
||||
@@ -57,84 +53,4 @@ tests/coverage
|
||||
|
||||
settings.json
|
||||
|
||||
GoSDK/tester/
|
||||
|
||||
.gitconfig
|
||||
|
||||
secret.env
|
||||
|
||||
# This file is autogenerated from the template
|
||||
*/.env
|
||||
*/Dockerfile
|
||||
config.env
|
||||
config.env.tmp
|
||||
config.env.temp
|
||||
logs.txt
|
||||
|
||||
*/Cert.crt
|
||||
*/Key.key
|
||||
|
||||
Certs/StatusPageCerts/*.crt
|
||||
Certs/StatusPageCerts/*.key
|
||||
|
||||
Certs/ServerCerts/*.crt
|
||||
Certs/ServerCerts/*.key
|
||||
|
||||
Backups/*.backup
|
||||
Backups/*.sql
|
||||
Backups/*.tar
|
||||
|
||||
.env
|
||||
|
||||
.eslintcache
|
||||
|
||||
HelmChart/Values/*.values.yaml
|
||||
|
||||
LLM/__pycache__/*
|
||||
|
||||
LLM/Models/*
|
||||
|
||||
Examples/otel-dotnet/obj/*
|
||||
|
||||
InfrastructureAgent/sea-prep.blob
|
||||
InfrastructureAgent/InfrastructureAgent
|
||||
InfrastructureAgent/build/*
|
||||
|
||||
|
||||
InfrastructureAgent/err.log
|
||||
InfrastructureAgent/out.log
|
||||
InfrastructureAgent/daemon.pid
|
||||
App/greenlock/.greenlockrc
|
||||
App/greenlock/greenlock.d/config.json
|
||||
App/greenlock/greenlock.d/config.json.bak
|
||||
Examples/otel-dotnet/bin/Debug/net6.0/Grpc.Core.Api.dll.txt
|
||||
InfrastructureAgent/oneuptime-infrastructure-agent
|
||||
|
||||
# ESLint cache
|
||||
.eslintcache*
|
||||
|
||||
# Terraform generated files
|
||||
openapi.json
|
||||
|
||||
Terraform/terraform-provider-oneuptime/**
|
||||
Terraform/openapi.json
|
||||
TerraformTest/**
|
||||
|
||||
terraform-provider-example/**
|
||||
|
||||
# MCP Server
|
||||
MCP/build/
|
||||
MCP/.env
|
||||
MCP/node_modules
|
||||
Dashboard/public/sw.js
|
||||
.claude/settings.local.json
|
||||
Common/.claude/settings.local.json
|
||||
E2E/Terraform/e2e-tests/test-env.sh
|
||||
|
||||
# Terraform state and plan files
|
||||
*.tfplan
|
||||
tfplan
|
||||
terraform.tfstate
|
||||
terraform.tfstate.backup
|
||||
.terraform/
|
||||
.terraform.lock.hcl
|
||||
go-sdk/tester/
|
||||
167
.gitlab-ci.yml
Normal file
167
.gitlab-ci.yml
Normal file
@@ -0,0 +1,167 @@
|
||||
# IMPORTANT:
|
||||
#
|
||||
# This CI/CD will run on an Ubuntu VM. After this script runs, the VM will retain the state.
|
||||
# It's your responsibility to clean the VM up in the cleanup stage after job finishes executing.
|
||||
# This script runs on Microk8s. Since our deployments are on Kubernetes, our aim is to create EXACT same replica for test, staging and production.
|
||||
|
||||
# Define stages
|
||||
stages:
|
||||
- BuildAndTest
|
||||
- Deploy
|
||||
- RollbackIfDeploymentFail
|
||||
- Tests
|
||||
- RollbackIfTestsFail
|
||||
|
||||
include:
|
||||
# GLOBAL JOBS
|
||||
- '/ci/spec/global/lint.yaml'
|
||||
- '/ci/spec/global/rollback.yaml'
|
||||
|
||||
# ACCOUNTS
|
||||
- '/ci/spec/accounts/audit.yaml'
|
||||
- '/ci/spec/accounts/deploy.yaml'
|
||||
- '/ci/spec/accounts/rollback.yaml'
|
||||
- '/ci/spec/accounts/lighthouse.yaml'
|
||||
|
||||
# ADMIN DASHBOARD
|
||||
- '/ci/spec/admin-dashboard/audit.yaml'
|
||||
- '/ci/spec/admin-dashboard/deploy.yaml'
|
||||
- '/ci/spec/admin-dashboard/rollback.yaml'
|
||||
|
||||
# API-DOCS DASHBOARD
|
||||
- '/ci/spec/api-docs/audit.yaml'
|
||||
- '/ci/spec/api-docs/deploy.yaml'
|
||||
- '/ci/spec/api-docs/rollback.yaml'
|
||||
|
||||
# BACKEND
|
||||
- '/ci/spec/backend/audit.yaml'
|
||||
- '/ci/spec/backend/build-n-test.yaml'
|
||||
- '/ci/spec/backend/deploy.yaml'
|
||||
- '/ci/spec/backend/rollback.yaml'
|
||||
|
||||
# DASHBOARD
|
||||
- '/ci/spec/dashboard/audit.yaml'
|
||||
- '/ci/spec/dashboard/deploy.yaml'
|
||||
- '/ci/spec/dashboard/rollback.yaml'
|
||||
|
||||
# HELM-CHART
|
||||
- '/ci/spec/helm-chart/audit.yaml'
|
||||
- '/ci/spec/helm-chart/build-n-test.yaml'
|
||||
- '/ci/spec/helm-chart/deploy.yaml'
|
||||
- '/ci/spec/helm-chart/rollback.yaml'
|
||||
|
||||
# HOME
|
||||
- '/ci/spec/home/audit.yaml'
|
||||
- '/ci/spec/home/deploy.yaml'
|
||||
- '/ci/spec/home/rollback.yaml'
|
||||
- '/ci/spec/home/lighthouse.yaml'
|
||||
|
||||
# HTTP-TEST-SERVER
|
||||
- '/ci/spec/http-test-server/audit.yaml'
|
||||
- '/ci/spec/http-test-server/deploy.yaml'
|
||||
- '/ci/spec/http-test-server/rollback.yaml'
|
||||
|
||||
# INIT-SCRIPT
|
||||
- '/ci/spec/init-script/audit.yaml'
|
||||
- '/ci/spec/init-script/deploy.yaml'
|
||||
|
||||
# LIGHTHOUSE RUNNER
|
||||
- '/ci/spec/lighthouse-runner/audit.yaml'
|
||||
- '/ci/spec/lighthouse-runner/deploy.yaml'
|
||||
|
||||
# LICENSING
|
||||
- '/ci/spec/licensing/audit.yaml'
|
||||
- '/ci/spec/licensing/build-n-test.yaml'
|
||||
- '/ci/spec/licensing/deploy.yaml'
|
||||
- '/ci/spec/licensing/rollback.yaml'
|
||||
|
||||
# PROBE
|
||||
- '/ci/spec/probe/audit.yaml'
|
||||
- '/ci/spec/probe/deploy.yaml'
|
||||
- '/ci/spec/probe/rollback.yaml'
|
||||
|
||||
# STATUS-PAGE
|
||||
- '/ci/spec/status-page/audit.yaml'
|
||||
- '/ci/spec/status-page/deploy.yaml'
|
||||
- '/ci/spec/status-page/rollback.yaml'
|
||||
|
||||
# ZAPIER
|
||||
- '/ci/spec/zapier/audit.yaml'
|
||||
- '/ci/spec/zapier/build-n-test.yaml'
|
||||
|
||||
# E2E
|
||||
- '/ci/spec/e2e/docker-compose-enterprise-test.yaml'
|
||||
- '/ci/spec/e2e/docker-compose-test.yaml'
|
||||
- '/ci/spec/e2e/kubernetes-enterprise-test.yaml'
|
||||
- '/ci/spec/e2e/kubernetes-test.yaml'
|
||||
|
||||
# SMOKE TEST
|
||||
- '/ci/spec/tests/staging.yaml'
|
||||
- '/ci/spec/tests/production.yaml'
|
||||
|
||||
# JS-SDK
|
||||
- '/ci/spec/js-sdk/audit.yaml'
|
||||
- '/ci/spec/js-sdk/build-n-test.yaml'
|
||||
- '/ci/spec/js-sdk/deploy.yaml'
|
||||
|
||||
# PHP-SDK
|
||||
- '/ci/spec/php-sdk/audit.yaml'
|
||||
- '/ci/spec/php-sdk/build-n-test.yaml'
|
||||
- '/ci/spec/php-sdk/deploy.yaml'
|
||||
|
||||
# JAVA-SDK
|
||||
- '/ci/spec/java-sdk/audit.yaml'
|
||||
- '/ci/spec/java-sdk/build-n-test.yaml'
|
||||
- '/ci/spec/java-sdk/deploy.yaml'
|
||||
|
||||
# PYTHON-SDK
|
||||
- '/ci/spec/python-sdk/audit.yaml'
|
||||
- '/ci/spec/python-sdk/build-n-test.yaml'
|
||||
- '/ci/spec/python-sdk/deploy.yaml'
|
||||
|
||||
# RUBY-SDK
|
||||
- '/ci/spec/ruby-sdk/audit.yaml'
|
||||
- '/ci/spec/ruby-sdk/build-n-test.yaml'
|
||||
- '/ci/spec/ruby-sdk/deploy.yaml'
|
||||
|
||||
# HARAKA
|
||||
- '/ci/spec/haraka/deploy.yaml'
|
||||
|
||||
# ONEUPTIME-LE-STORE
|
||||
- '/ci/spec/oneuptime-le-store/deploy.yaml'
|
||||
|
||||
# ONEUPTIME-ACME-HTTP-01
|
||||
- '/ci/spec/oneuptime-acme-http-01/deploy.yaml'
|
||||
|
||||
# ONEUPTIME-GL-MANAGER
|
||||
- '/ci/spec/oneuptime-gl-manager/deploy.yaml'
|
||||
|
||||
# Application Scanner
|
||||
- '/ci/spec/application-scanner/deploy.yaml'
|
||||
|
||||
# Script Runner
|
||||
- '/ci/spec/script-runner/audit.yaml'
|
||||
- '/ci/spec/script-runner/deploy.yaml'
|
||||
|
||||
# Container Scanner
|
||||
- '/ci/spec/container-scanner/deploy.yaml'
|
||||
|
||||
# realtime
|
||||
- '/ci/spec/realtime/audit.yaml'
|
||||
- '/ci/spec/realtime/deploy.yaml'
|
||||
- '/ci/spec/realtime/rollback.yaml'
|
||||
|
||||
# data-ingestor
|
||||
- '/ci/spec/data-ingestor/audit.yaml'
|
||||
- '/ci/spec/data-ingestor/deploy.yaml'
|
||||
- '/ci/spec/data-ingestor/rollback.yaml'
|
||||
|
||||
# probe-api
|
||||
- '/ci/spec/probe-api/audit.yaml'
|
||||
- '/ci/spec/probe-api/deploy.yaml'
|
||||
- '/ci/spec/probe-api/rollback.yaml'
|
||||
|
||||
# # GO-SDK
|
||||
# - '/ci/spec/go-sdk/audit.yaml'
|
||||
# - '/ci/spec/go-sdk/build-n-test.yaml'
|
||||
# - '/ci/spec/go-sdk/deploy.yaml'
|
||||
@@ -1,4 +0,0 @@
|
||||
#!/bin/sh
|
||||
. "$(dirname "$0")/_/husky.sh"
|
||||
|
||||
# npm run lint
|
||||
@@ -3,7 +3,6 @@ node_modules
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
node_modules
|
||||
.vscode/
|
||||
.idea
|
||||
# testing
|
||||
@@ -14,7 +13,10 @@ node_modules
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
env.js
|
||||
|
||||
npm-debug.log*
|
||||
@@ -49,5 +51,5 @@ LICENSE
|
||||
marketing/*/*
|
||||
licenses/*
|
||||
certifications/*
|
||||
ApiReference/public/assets/*
|
||||
JavaScriptSDK/src/cli/server-monitor/out/scripts/prettify/*
|
||||
api-docs/public/assets/*
|
||||
js-sdk/src/cli/server-monitor/out/scripts/prettify/*
|
||||
8
.prettierrc.json
Normal file
8
.prettierrc.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"trailingComma": "es5",
|
||||
"tabWidth": 4,
|
||||
"semi": true,
|
||||
"singleQuote": true,
|
||||
"bracketSpacing": true,
|
||||
"arrowParens": "avoid"
|
||||
}
|
||||
5
.vscode/extensions.json
vendored
5
.vscode/extensions.json
vendored
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"esbenp.prettier-vscode"
|
||||
]
|
||||
}
|
||||
323
.vscode/launch.json
vendored
323
.vscode/launch.json
vendored
@@ -3,237 +3,22 @@
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"tasks": [
|
||||
"configurations": [
|
||||
{
|
||||
"type": "docker-run",
|
||||
"label": "docker-run: debug",
|
||||
"dependsOn": [
|
||||
"docker-build"
|
||||
],
|
||||
"dockerRun": {
|
||||
"command": "nest start --debug 127.0.0.1:9229"
|
||||
},
|
||||
"node": {
|
||||
"enableDebugging": true
|
||||
}
|
||||
}
|
||||
],
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Debug Infrastructure Agent",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "debug",
|
||||
"program": "./InfrastructureAgent",
|
||||
"args": ["start"],
|
||||
},
|
||||
{
|
||||
"name": "Node.js - Debug Current File",
|
||||
"type": "node",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true,
|
||||
"request": "launch",
|
||||
"program": "${file}"
|
||||
},
|
||||
{
|
||||
"address": "127.0.0.1",
|
||||
"localRoot": "${workspaceFolder}/App",
|
||||
"name": "App: Debug with Docker",
|
||||
"address": "0.0.0.0",
|
||||
"localRoot": "${workspaceFolder}/backend",
|
||||
"name": "Backend: Debug with Docker",
|
||||
"port": 9232,
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"request": "attach",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "node",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true
|
||||
"type": "pwa-node"
|
||||
},
|
||||
{
|
||||
"address": "127.0.0.1",
|
||||
"localRoot": "${workspaceFolder}/Home",
|
||||
"name": "Home: Debug with Docker",
|
||||
"port": 9212,
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"request": "attach",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "node",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true
|
||||
},
|
||||
{
|
||||
"address": "127.0.0.1",
|
||||
"localRoot": "${workspaceFolder}/Worker",
|
||||
"name": "Worker: Debug with Docker",
|
||||
"port": 8734,
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"request": "attach",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "node",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true
|
||||
},
|
||||
{
|
||||
"address": "127.0.0.1",
|
||||
"localRoot": "${workspaceFolder}/Workflow",
|
||||
"name": "Workflow: Debug with Docker",
|
||||
"port": 8735,
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"request": "attach",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "node",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true
|
||||
},
|
||||
{
|
||||
"address": "127.0.0.1",
|
||||
"localRoot": "${workspaceFolder}/Docs",
|
||||
"name": "Docs: Debug with Docker",
|
||||
"port": 8738,
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"request": "attach",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "node",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true
|
||||
},
|
||||
{
|
||||
"address": "127.0.0.1",
|
||||
"localRoot": "${workspaceFolder}/APIReference",
|
||||
"name": "API Reference: Debug with Docker",
|
||||
"port": 8737,
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"request": "attach",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "node",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true
|
||||
},
|
||||
{
|
||||
"address": "127.0.0.1",
|
||||
"localRoot": "${workspaceFolder}/TestServer",
|
||||
"name": "Test Server: Debug with Docker",
|
||||
"port": 9141,
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"request": "attach",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "node",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true
|
||||
},
|
||||
{
|
||||
"address": "127.0.0.1",
|
||||
"localRoot": "${workspaceFolder}/Probe",
|
||||
"name": "Probe: Debug with Docker",
|
||||
"port": 9229,
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"request": "attach",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "node",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true
|
||||
},
|
||||
{
|
||||
"address": "127.0.0.1",
|
||||
"localRoot": "${workspaceFolder}/ProbeIngest",
|
||||
"name": "ProbeIngest: Debug with Docker",
|
||||
"port": 9932,
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"request": "attach",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "node",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true
|
||||
},
|
||||
{
|
||||
"address": "127.0.0.1",
|
||||
"localRoot": "${workspaceFolder}/ServerMonitorIngest",
|
||||
"name": "ServerMonitorIngest: Debug with Docker",
|
||||
"port": 9941,
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"request": "attach",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "node",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true
|
||||
},
|
||||
{
|
||||
"address": "127.0.0.1",
|
||||
"localRoot": "${workspaceFolder}/IncomingRequestIngest",
|
||||
"name": "IncomingRequestIngest: Debug with Docker",
|
||||
"port": 9933,
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"request": "attach",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "node",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true
|
||||
},
|
||||
{
|
||||
"address": "127.0.0.1",
|
||||
"localRoot": "${workspaceFolder}/Telemetry",
|
||||
"name": "Telemetry: Debug with Docker",
|
||||
"port": 9938,
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"request": "attach",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "node",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true
|
||||
},
|
||||
{
|
||||
"address": "127.0.0.1",
|
||||
"localRoot": "${workspaceFolder}/IsolatedVM",
|
||||
"name": "Isolated VM: Debug with Docker",
|
||||
"port": 9974,
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"request": "attach",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "node",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true
|
||||
},
|
||||
{
|
||||
"address": "127.0.0.1",
|
||||
"localRoot": "${workspaceFolder}/Workflow",
|
||||
"name": "Workflow: Debug with Docker",
|
||||
"port": 9212,
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"request": "attach",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "node",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true
|
||||
},
|
||||
{
|
||||
"address": "127.0.0.1",
|
||||
"localRoot": "${workspaceFolder}/Realtime",
|
||||
"address": "0.0.0.0",
|
||||
"localRoot": "${workspaceFolder}/realtime",
|
||||
"name": "Realtime: Debug with Docker",
|
||||
"port": 9250,
|
||||
"remoteRoot": "/usr/src/app",
|
||||
@@ -241,27 +26,35 @@
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "node",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true
|
||||
"type": "pwa-node"
|
||||
},
|
||||
{
|
||||
"address": "127.0.0.1",
|
||||
"localRoot": "${workspaceFolder}/StatusPage",
|
||||
"name": "Status Page API${cwd}: Debug with Docker",
|
||||
"port": 9764,
|
||||
"address": "0.0.0.0",
|
||||
"localRoot": "${workspaceFolder}/probe-api",
|
||||
"name": "Probe API: Debug with Docker",
|
||||
"port": 9251,
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"request": "attach",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "node",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true
|
||||
"type": "pwa-node"
|
||||
},
|
||||
{
|
||||
"address": "127.0.0.1",
|
||||
"localRoot": "${workspaceFolder}/HttpTestServer",
|
||||
"address": "0.0.0.0",
|
||||
"localRoot": "${workspaceFolder}/licensing",
|
||||
"name": "Licensing: Debug with Docker",
|
||||
"port": 9233,
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"request": "attach",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "pwa-node"
|
||||
},
|
||||
{
|
||||
"address": "0.0.0.0",
|
||||
"localRoot": "${workspaceFolder}/http-test-server",
|
||||
"name": "HTTP Test Server: Debug with Docker",
|
||||
"port": 9234,
|
||||
"remoteRoot": "/usr/src/app",
|
||||
@@ -269,12 +62,22 @@
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "node",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true
|
||||
"type": "pwa-node"
|
||||
},
|
||||
{
|
||||
"address": "127.0.0.1",
|
||||
"address": "0.0.0.0",
|
||||
"localRoot": "${workspaceFolder}/home",
|
||||
"name": "Home: Debug with Docker",
|
||||
"port": 9235,
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"request": "attach",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "pwa-node"
|
||||
},
|
||||
{
|
||||
"address": "0.0.0.0",
|
||||
"localRoot": "${workspaceFolder}/script-runnner",
|
||||
"name": "Script Runner: Debug with Docker",
|
||||
"port": 9236,
|
||||
@@ -283,13 +86,11 @@
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "node",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true
|
||||
"type": "pwa-node"
|
||||
},
|
||||
{
|
||||
"address": "127.0.0.1",
|
||||
"localRoot": "${workspaceFolder}/InitScript",
|
||||
"address": "0.0.0.0",
|
||||
"localRoot": "${workspaceFolder}/init-script",
|
||||
"name": "Init Script: Debug with Docker",
|
||||
"port": 9237,
|
||||
"remoteRoot": "/usr/src/app",
|
||||
@@ -297,35 +98,19 @@
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "node",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true
|
||||
"type": "pwa-node"
|
||||
},
|
||||
{
|
||||
"name": "Common: Debug Tests",
|
||||
"type": "node",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true,
|
||||
"request": "launch",
|
||||
"cwd": "${workspaceRoot}/Common",
|
||||
"runtimeExecutable": "npm",
|
||||
"runtimeArgs": [
|
||||
"run-script",
|
||||
"debug:test"
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "Accounts: Debug Local Files",
|
||||
"type": "node",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true,
|
||||
"request": "launch",
|
||||
"cwd": "${workspaceRoot}/Accounts",
|
||||
"runtimeExecutable": "npm",
|
||||
"runtimeArgs": [
|
||||
"run-script",
|
||||
"dev"
|
||||
"address": "0.0.0.0",
|
||||
"localRoot": "${workspaceFolder}/probe",
|
||||
"name": "Probe: Debug with Docker",
|
||||
"port": 9238,
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"request": "attach",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "pwa-node"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,95 +0,0 @@
|
||||
import Express, {
|
||||
ExpressRequest,
|
||||
ExpressResponse,
|
||||
ExpressRouter,
|
||||
NextFunction,
|
||||
} from "Common/Server/Utils/Express";
|
||||
import Response from "Common/Server/Utils/Response";
|
||||
import { ONEUPTIME_URL } from "../Config";
|
||||
import HTTPErrorResponse from "Common/Types/API/HTTPErrorResponse";
|
||||
import HTTPMethod from "Common/Types/API/HTTPMethod";
|
||||
import HTTPResponse from "Common/Types/API/HTTPResponse";
|
||||
import URL from "Common/Types/API/URL";
|
||||
import { JSONObject } from "Common/Types/JSON";
|
||||
import API from "Common/Utils/API";
|
||||
import logger from "Common/Server/Utils/Logger";
|
||||
import AIAgentAPIRequest from "../Utils/AIAgentAPIRequest";
|
||||
|
||||
const router: ExpressRouter = Express.getRouter();
|
||||
|
||||
/*
|
||||
* Metrics endpoint for Keda autoscaling
|
||||
* Returns the number of pending AI agent tasks
|
||||
*/
|
||||
router.get(
|
||||
"/queue-size",
|
||||
async (
|
||||
req: ExpressRequest,
|
||||
res: ExpressResponse,
|
||||
next: NextFunction,
|
||||
): Promise<void> => {
|
||||
try {
|
||||
/*
|
||||
* Get the pending task count from OneUptime API
|
||||
* This is the correct metric - the number of tasks waiting to be processed
|
||||
*/
|
||||
const pendingTaskCountUrl: URL = URL.fromString(
|
||||
ONEUPTIME_URL.toString(),
|
||||
).addRoute("/api/ai-agent-task/get-pending-task-count");
|
||||
|
||||
logger.debug(
|
||||
"Fetching pending task count from OneUptime API for KEDA scaling",
|
||||
);
|
||||
|
||||
// Use AI Agent authentication (AI Agent key and AI Agent ID)
|
||||
const requestBody: JSONObject = AIAgentAPIRequest.getDefaultRequestBody();
|
||||
|
||||
const result: HTTPResponse<JSONObject> | HTTPErrorResponse =
|
||||
await API.fetch<JSONObject>({
|
||||
method: HTTPMethod.POST,
|
||||
url: pendingTaskCountUrl,
|
||||
data: requestBody,
|
||||
headers: {},
|
||||
});
|
||||
|
||||
if (result instanceof HTTPErrorResponse) {
|
||||
logger.error("Error fetching pending task count from OneUptime API");
|
||||
logger.error(result);
|
||||
throw result;
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
"Pending task count fetched successfully from OneUptime API",
|
||||
);
|
||||
logger.debug(result.data);
|
||||
|
||||
// Extract count from the response - this is the number of tasks pending to be processed
|
||||
let queueSize: number = (result.data["count"] as number) || 0;
|
||||
|
||||
// if string then convert to number
|
||||
if (typeof queueSize === "string") {
|
||||
const parsedQueueSize: number = parseInt(queueSize, 10);
|
||||
if (!isNaN(parsedQueueSize)) {
|
||||
queueSize = parsedQueueSize;
|
||||
} else {
|
||||
logger.warn(
|
||||
"Pending task count is not a valid number, defaulting to 0",
|
||||
);
|
||||
queueSize = 0;
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug(`Pending task count for KEDA: ${queueSize}`);
|
||||
|
||||
return Response.sendJsonObjectResponse(req, res, {
|
||||
queueSize: queueSize,
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error("Error in metrics queue-size endpoint");
|
||||
logger.error(err);
|
||||
return next(err);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
export default router;
|
||||
@@ -1,103 +0,0 @@
|
||||
import {
|
||||
CodeAgent,
|
||||
CodeAgentType,
|
||||
getCodeAgentDisplayName,
|
||||
} from "./CodeAgentInterface";
|
||||
import OpenCodeAgent from "./OpenCodeAgent";
|
||||
import logger from "Common/Server/Utils/Logger";
|
||||
|
||||
// Factory class to create code agents
|
||||
export default class CodeAgentFactory {
|
||||
// Default agent type to use
|
||||
private static defaultAgentType: CodeAgentType = CodeAgentType.OpenCode;
|
||||
|
||||
// Create an agent of the specified type
|
||||
public static createAgent(type: CodeAgentType): CodeAgent {
|
||||
logger.debug(`Creating code agent: ${getCodeAgentDisplayName(type)}`);
|
||||
|
||||
switch (type) {
|
||||
case CodeAgentType.OpenCode:
|
||||
return new OpenCodeAgent();
|
||||
|
||||
/*
|
||||
* Future agents can be added here:
|
||||
* case CodeAgentType.Goose:
|
||||
* return new GooseAgent();
|
||||
* case CodeAgentType.ClaudeCode:
|
||||
* return new ClaudeCodeAgent();
|
||||
*/
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown code agent type: ${type}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Create the default agent
|
||||
public static createDefaultAgent(): CodeAgent {
|
||||
return this.createAgent(this.defaultAgentType);
|
||||
}
|
||||
|
||||
// Set the default agent type
|
||||
public static setDefaultAgentType(type: CodeAgentType): void {
|
||||
this.defaultAgentType = type;
|
||||
}
|
||||
|
||||
// Get the default agent type
|
||||
public static getDefaultAgentType(): CodeAgentType {
|
||||
return this.defaultAgentType;
|
||||
}
|
||||
|
||||
// Get all available agent types
|
||||
public static getAvailableAgentTypes(): Array<CodeAgentType> {
|
||||
return Object.values(CodeAgentType);
|
||||
}
|
||||
|
||||
// Check if an agent type is available on the system
|
||||
public static async isAgentAvailable(type: CodeAgentType): Promise<boolean> {
|
||||
try {
|
||||
const agent: CodeAgent = this.createAgent(type);
|
||||
return await agent.isAvailable();
|
||||
} catch (error) {
|
||||
logger.error(`Error checking agent availability for ${type}:`);
|
||||
logger.error(error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Get the first available agent
|
||||
public static async getFirstAvailableAgent(): Promise<CodeAgent | null> {
|
||||
for (const type of this.getAvailableAgentTypes()) {
|
||||
if (await this.isAgentAvailable(type)) {
|
||||
return this.createAgent(type);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/*
|
||||
* Create agent with fallback
|
||||
* Tries to create the specified type, falls back to first available
|
||||
*/
|
||||
public static async createAgentWithFallback(
|
||||
preferredType?: CodeAgentType,
|
||||
): Promise<CodeAgent> {
|
||||
// If preferred type is specified and available, use it
|
||||
if (preferredType && (await this.isAgentAvailable(preferredType))) {
|
||||
return this.createAgent(preferredType);
|
||||
}
|
||||
|
||||
// Try the default type
|
||||
if (await this.isAgentAvailable(this.defaultAgentType)) {
|
||||
return this.createAgent(this.defaultAgentType);
|
||||
}
|
||||
|
||||
// Fall back to first available
|
||||
const agent: CodeAgent | null = await this.getFirstAvailableAgent();
|
||||
|
||||
if (!agent) {
|
||||
throw new Error("No code agents are available on this system");
|
||||
}
|
||||
|
||||
return agent;
|
||||
}
|
||||
}
|
||||
@@ -1,94 +0,0 @@
|
||||
import LlmType from "Common/Types/LLM/LlmType";
|
||||
import TaskLogger from "../Utils/TaskLogger";
|
||||
|
||||
// Configuration for the LLM provider
|
||||
export interface CodeAgentLLMConfig {
|
||||
llmType: LlmType;
|
||||
apiKey?: string;
|
||||
baseUrl?: string;
|
||||
modelName?: string;
|
||||
}
|
||||
|
||||
// The task to be executed by the code agent
|
||||
export interface CodeAgentTask {
|
||||
workingDirectory: string;
|
||||
prompt: string;
|
||||
context?: string;
|
||||
timeoutMs?: number;
|
||||
servicePath?: string; // Path within the repo where the service code lives
|
||||
}
|
||||
|
||||
// Result from the code agent execution
|
||||
export interface CodeAgentResult {
|
||||
success: boolean;
|
||||
filesModified: Array<string>;
|
||||
summary: string;
|
||||
logs: Array<string>;
|
||||
error?: string;
|
||||
exitCode?: number;
|
||||
}
|
||||
|
||||
// Progress event from the code agent
|
||||
export interface CodeAgentProgressEvent {
|
||||
type: "stdout" | "stderr" | "status";
|
||||
message: string;
|
||||
timestamp: Date;
|
||||
}
|
||||
|
||||
// Callback type for progress events
|
||||
export type CodeAgentProgressCallback = (
|
||||
event: CodeAgentProgressEvent,
|
||||
) => void | Promise<void>;
|
||||
|
||||
/*
|
||||
* Abstract interface for code agents
|
||||
* This allows us to support multiple agents (OpenCode, Goose, Claude Code, etc.)
|
||||
*/
|
||||
export interface CodeAgent {
|
||||
// Name of the agent (e.g., "OpenCode", "Goose", "ClaudeCode")
|
||||
readonly name: string;
|
||||
|
||||
// Initialize the agent with LLM configuration
|
||||
initialize(config: CodeAgentLLMConfig, logger?: TaskLogger): Promise<void>;
|
||||
|
||||
// Execute a task and return the result
|
||||
executeTask(task: CodeAgentTask): Promise<CodeAgentResult>;
|
||||
|
||||
// Set a callback for progress events (streaming output)
|
||||
onProgress(callback: CodeAgentProgressCallback): void;
|
||||
|
||||
// Check if the agent is available on the system
|
||||
isAvailable(): Promise<boolean>;
|
||||
|
||||
// Abort the current task execution
|
||||
abort(): Promise<void>;
|
||||
|
||||
// Clean up any resources used by the agent
|
||||
cleanup(): Promise<void>;
|
||||
}
|
||||
|
||||
// Enum for supported code agent types
|
||||
export enum CodeAgentType {
|
||||
OpenCode = "OpenCode",
|
||||
/*
|
||||
* Future agents:
|
||||
* Goose = "Goose",
|
||||
* ClaudeCode = "ClaudeCode",
|
||||
* Aider = "Aider",
|
||||
*/
|
||||
}
|
||||
|
||||
// Helper function to get display name for agent type
|
||||
export function getCodeAgentDisplayName(type: CodeAgentType): string {
|
||||
switch (type) {
|
||||
case CodeAgentType.OpenCode:
|
||||
return "OpenCode AI";
|
||||
default:
|
||||
return type;
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to check if an agent type is valid
|
||||
export function isValidCodeAgentType(type: string): type is CodeAgentType {
|
||||
return Object.values(CodeAgentType).includes(type as CodeAgentType);
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
// Export all code agent related types and classes
|
||||
export {
|
||||
CodeAgent,
|
||||
CodeAgentLLMConfig,
|
||||
CodeAgentTask,
|
||||
CodeAgentResult,
|
||||
CodeAgentProgressEvent,
|
||||
CodeAgentProgressCallback,
|
||||
CodeAgentType,
|
||||
getCodeAgentDisplayName,
|
||||
isValidCodeAgentType,
|
||||
} from "./CodeAgentInterface";
|
||||
|
||||
export { default as CodeAgentFactory } from "./CodeAgentFactory";
|
||||
export { default as OpenCodeAgent } from "./OpenCodeAgent";
|
||||
@@ -1,562 +0,0 @@
|
||||
import {
|
||||
CodeAgent,
|
||||
CodeAgentLLMConfig,
|
||||
CodeAgentTask,
|
||||
CodeAgentResult,
|
||||
CodeAgentProgressCallback,
|
||||
CodeAgentProgressEvent,
|
||||
} from "./CodeAgentInterface";
|
||||
import TaskLogger from "../Utils/TaskLogger";
|
||||
import Execute from "Common/Server/Utils/Execute";
|
||||
import LocalFile from "Common/Server/Utils/LocalFile";
|
||||
import LlmType from "Common/Types/LLM/LlmType";
|
||||
import logger from "Common/Server/Utils/Logger";
|
||||
import path from "path";
|
||||
import { ChildProcess, spawn } from "child_process";
|
||||
import BadDataException from "Common/Types/Exception/BadDataException";
|
||||
|
||||
// OpenCode configuration file structure
|
||||
interface OpenCodeConfig {
|
||||
provider?: Record<string, unknown>;
|
||||
model?: string;
|
||||
small_model?: string;
|
||||
disabled_providers?: Array<string>;
|
||||
enabled_providers?: Array<string>;
|
||||
}
|
||||
|
||||
export default class OpenCodeAgent implements CodeAgent {
|
||||
public readonly name: string = "OpenCode";
|
||||
|
||||
private config: CodeAgentLLMConfig | null = null;
|
||||
private taskLogger: TaskLogger | null = null;
|
||||
private progressCallback: CodeAgentProgressCallback | null = null;
|
||||
private currentProcess: ChildProcess | null = null;
|
||||
private aborted: boolean = false;
|
||||
|
||||
// Track original opencode.json content for restoration
|
||||
private originalOpenCodeConfig: string | null = null;
|
||||
private openCodeConfigPath: string | null = null;
|
||||
|
||||
// Default timeout: 30 minutes
|
||||
private static readonly DEFAULT_TIMEOUT_MS: number = 30 * 60 * 1000;
|
||||
|
||||
public async initialize(
|
||||
config: CodeAgentLLMConfig,
|
||||
taskLogger?: TaskLogger,
|
||||
): Promise<void> {
|
||||
this.config = config;
|
||||
|
||||
if (taskLogger) {
|
||||
this.taskLogger = taskLogger;
|
||||
}
|
||||
|
||||
await this.log(`Initializing ${this.name} with ${config.llmType} provider`);
|
||||
}
|
||||
|
||||
public async executeTask(task: CodeAgentTask): Promise<CodeAgentResult> {
|
||||
if (!this.config) {
|
||||
return this.createErrorResult(
|
||||
"Agent not initialized. Call initialize() first.",
|
||||
);
|
||||
}
|
||||
|
||||
this.aborted = false;
|
||||
const logs: Array<string> = [];
|
||||
const timeoutMs: number =
|
||||
task.timeoutMs || OpenCodeAgent.DEFAULT_TIMEOUT_MS;
|
||||
|
||||
try {
|
||||
await this.log(`Executing task in directory: ${task.workingDirectory}`);
|
||||
|
||||
// Create OpenCode config file in the working directory
|
||||
await this.createOpenCodeConfig(task.workingDirectory);
|
||||
|
||||
// Build the prompt
|
||||
const fullPrompt: string = this.buildFullPrompt(task);
|
||||
|
||||
await this.log("Starting OpenCode execution...");
|
||||
logs.push(`Prompt: ${fullPrompt.substring(0, 500)}...`);
|
||||
|
||||
// Execute OpenCode
|
||||
const output: string = await this.runOpenCode(
|
||||
task.workingDirectory,
|
||||
fullPrompt,
|
||||
timeoutMs,
|
||||
(event: CodeAgentProgressEvent) => {
|
||||
logs.push(`[${event.type}] ${event.message}`);
|
||||
if (this.progressCallback) {
|
||||
this.progressCallback(event);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
logs.push(
|
||||
`Output: ${output.substring(0, 1000)}${output.length > 1000 ? "..." : ""}`,
|
||||
);
|
||||
|
||||
if (this.aborted) {
|
||||
return this.createErrorResult("Task was aborted", logs);
|
||||
}
|
||||
|
||||
// Check for modified files
|
||||
const modifiedFiles: Array<string> = await this.getModifiedFiles(
|
||||
task.workingDirectory,
|
||||
);
|
||||
|
||||
// Restore or delete opencode.json before returning
|
||||
await this.restoreOpenCodeConfig();
|
||||
|
||||
await this.log(
|
||||
`OpenCode completed. ${modifiedFiles.length} files modified.`,
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
filesModified: modifiedFiles,
|
||||
summary: this.extractSummary(output),
|
||||
logs,
|
||||
exitCode: 0,
|
||||
};
|
||||
} catch (error) {
|
||||
const errorMessage: string =
|
||||
error instanceof Error ? error.message : String(error);
|
||||
|
||||
// Restore or delete opencode.json on error
|
||||
await this.restoreOpenCodeConfig();
|
||||
|
||||
await this.log(`OpenCode execution failed: ${errorMessage}`);
|
||||
logs.push(`Error: ${errorMessage}`);
|
||||
|
||||
return this.createErrorResult(errorMessage, logs);
|
||||
}
|
||||
}
|
||||
|
||||
public onProgress(callback: CodeAgentProgressCallback): void {
|
||||
this.progressCallback = callback;
|
||||
}
|
||||
|
||||
public async isAvailable(): Promise<boolean> {
|
||||
try {
|
||||
const result: string = await Execute.executeCommandFile({
|
||||
command: "opencode",
|
||||
args: ["--version"],
|
||||
cwd: process.cwd(),
|
||||
});
|
||||
|
||||
logger.debug(`OpenCode version check: ${result}`);
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.debug("OpenCode is not available:");
|
||||
logger.debug(error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public async abort(): Promise<void> {
|
||||
this.aborted = true;
|
||||
|
||||
if (this.currentProcess) {
|
||||
this.currentProcess.kill("SIGTERM");
|
||||
this.currentProcess = null;
|
||||
}
|
||||
|
||||
await this.log("OpenCode execution aborted");
|
||||
}
|
||||
|
||||
public async cleanup(): Promise<void> {
|
||||
if (this.currentProcess) {
|
||||
this.currentProcess.kill("SIGTERM");
|
||||
this.currentProcess = null;
|
||||
}
|
||||
|
||||
this.config = null;
|
||||
this.progressCallback = null;
|
||||
}
|
||||
|
||||
// Create OpenCode configuration file in the workspace
|
||||
private async createOpenCodeConfig(workingDirectory: string): Promise<void> {
|
||||
if (!this.config) {
|
||||
throw new Error("Config not initialized");
|
||||
}
|
||||
|
||||
const configPath: string = path.join(workingDirectory, "opencode.json");
|
||||
this.openCodeConfigPath = configPath;
|
||||
|
||||
// Check if opencode.json already exists and backup its content
|
||||
try {
|
||||
const existingContent: string = await LocalFile.read(configPath);
|
||||
this.originalOpenCodeConfig = existingContent;
|
||||
await this.log("Backed up existing opencode.json from repository");
|
||||
} catch {
|
||||
// File doesn't exist, which is the normal case
|
||||
this.originalOpenCodeConfig = null;
|
||||
}
|
||||
|
||||
const openCodeConfig: OpenCodeConfig = {
|
||||
model: this.getModelString(),
|
||||
small_model: this.getSmallModelString(),
|
||||
};
|
||||
|
||||
// Set enabled providers based on LLM type
|
||||
if (this.config.llmType === LlmType.Anthropic) {
|
||||
openCodeConfig.enabled_providers = ["anthropic"];
|
||||
} else if (this.config.llmType === LlmType.OpenAI) {
|
||||
openCodeConfig.enabled_providers = ["openai"];
|
||||
}
|
||||
|
||||
await LocalFile.write(configPath, JSON.stringify(openCodeConfig, null, 2));
|
||||
|
||||
await this.log(`Created OpenCode config at ${configPath}`);
|
||||
}
|
||||
|
||||
// Restore or delete opencode.json after execution
|
||||
private async restoreOpenCodeConfig(): Promise<void> {
|
||||
if (!this.openCodeConfigPath) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
if (this.originalOpenCodeConfig !== null) {
|
||||
// Restore the original file content
|
||||
await LocalFile.write(
|
||||
this.openCodeConfigPath,
|
||||
this.originalOpenCodeConfig,
|
||||
);
|
||||
await this.log("Restored original opencode.json from repository");
|
||||
} else {
|
||||
// Delete the file we created
|
||||
await LocalFile.deleteFile(this.openCodeConfigPath);
|
||||
await this.log("Deleted generated opencode.json config file");
|
||||
}
|
||||
} catch (error) {
|
||||
// Log but don't throw - cleanup failure shouldn't fail the task
|
||||
logger.warn(`Failed to restore/delete opencode.json: ${error}`);
|
||||
}
|
||||
|
||||
// Reset the tracking variables
|
||||
this.openCodeConfigPath = null;
|
||||
this.originalOpenCodeConfig = null;
|
||||
}
|
||||
|
||||
// Get the model string in OpenCode format (provider/model)
|
||||
private getModelString(): string {
|
||||
if (!this.config) {
|
||||
throw new Error("Config not initialized");
|
||||
}
|
||||
|
||||
const provider: string = this.getProviderName();
|
||||
const model: string = this.config.modelName || this.getDefaultModel();
|
||||
|
||||
return `${provider}/${model}`;
|
||||
}
|
||||
|
||||
// Get the small model string for quick operations
|
||||
private getSmallModelString(): string {
|
||||
if (!this.config) {
|
||||
throw new Error("Config not initialized");
|
||||
}
|
||||
|
||||
const provider: string = this.getProviderName();
|
||||
const smallModel: string = this.getDefaultSmallModel();
|
||||
|
||||
return `${provider}/${smallModel}`;
|
||||
}
|
||||
|
||||
// Get provider name for OpenCode config
|
||||
private getProviderName(): string {
|
||||
if (!this.config) {
|
||||
return "anthropic";
|
||||
}
|
||||
|
||||
switch (this.config.llmType) {
|
||||
case LlmType.Anthropic:
|
||||
return "anthropic";
|
||||
case LlmType.OpenAI:
|
||||
return "openai";
|
||||
case LlmType.Ollama:
|
||||
return "ollama";
|
||||
default:
|
||||
throw new BadDataException("Unsupported LLM type for OpenCode agent");
|
||||
}
|
||||
}
|
||||
|
||||
// Get default model based on provider
|
||||
private getDefaultModel(): string {
|
||||
if (!this.config) {
|
||||
return "claude-sonnet-4-20250514";
|
||||
}
|
||||
|
||||
switch (this.config.llmType) {
|
||||
case LlmType.Anthropic:
|
||||
return "claude-sonnet-4-20250514";
|
||||
case LlmType.OpenAI:
|
||||
return "gpt-4o";
|
||||
case LlmType.Ollama:
|
||||
return "llama2";
|
||||
default:
|
||||
throw new BadDataException("Unsupported LLM type for OpenCode agent");
|
||||
}
|
||||
}
|
||||
|
||||
// Get default small model for quick operations
|
||||
private getDefaultSmallModel(): string {
|
||||
if (!this.config) {
|
||||
return "claude-haiku-4-20250514";
|
||||
}
|
||||
|
||||
switch (this.config.llmType) {
|
||||
case LlmType.Anthropic:
|
||||
return "claude-haiku-4-20250514";
|
||||
case LlmType.OpenAI:
|
||||
return "gpt-4o-mini";
|
||||
case LlmType.Ollama:
|
||||
return "llama2";
|
||||
default:
|
||||
throw new BadDataException("Unsupported LLM type for OpenCode agent");
|
||||
}
|
||||
}
|
||||
|
||||
// Build the full prompt including context
|
||||
private buildFullPrompt(task: CodeAgentTask): string {
|
||||
let prompt: string = task.prompt;
|
||||
|
||||
if (task.context) {
|
||||
prompt = `${task.context}\n\n${prompt}`;
|
||||
}
|
||||
|
||||
if (task.servicePath) {
|
||||
prompt = `The service code is located at: ${task.servicePath}\n\n${prompt}`;
|
||||
}
|
||||
|
||||
return prompt;
|
||||
}
|
||||
|
||||
// Run OpenCode in non-interactive mode
|
||||
private async runOpenCode(
|
||||
workingDirectory: string,
|
||||
prompt: string,
|
||||
timeoutMs: number,
|
||||
onOutput: (event: CodeAgentProgressEvent) => void,
|
||||
): Promise<string> {
|
||||
return new Promise(
|
||||
(resolve: (value: string) => void, reject: (reason: Error) => void) => {
|
||||
if (!this.config) {
|
||||
reject(new Error("Config not initialized"));
|
||||
return;
|
||||
}
|
||||
|
||||
// Set environment variables for API key
|
||||
const env: NodeJS.ProcessEnv = { ...process.env };
|
||||
|
||||
if (this.config.apiKey) {
|
||||
switch (this.config.llmType) {
|
||||
case LlmType.Anthropic:
|
||||
env["ANTHROPIC_API_KEY"] = this.config.apiKey;
|
||||
break;
|
||||
case LlmType.OpenAI:
|
||||
env["OPENAI_API_KEY"] = this.config.apiKey;
|
||||
break;
|
||||
case LlmType.Ollama:
|
||||
if (this.config.baseUrl) {
|
||||
env["OLLAMA_HOST"] = this.config.baseUrl;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Use CLI mode flags to ensure output goes to stdout/stderr instead of TUI
|
||||
* Pass prompt via stdin using "-" to avoid command line argument issues with long prompts
|
||||
*/
|
||||
const args: Array<string> = [
|
||||
"run",
|
||||
"--print-logs",
|
||||
"--log-level",
|
||||
"DEBUG",
|
||||
"--format",
|
||||
"default",
|
||||
"-", // Read prompt from stdin
|
||||
];
|
||||
|
||||
logger.debug(
|
||||
`Running: opencode ${args.join(" ")} (prompt via stdin, ${prompt.length} chars)`,
|
||||
);
|
||||
|
||||
const child: ChildProcess = spawn("opencode", args, {
|
||||
cwd: workingDirectory,
|
||||
env,
|
||||
stdio: ["pipe", "pipe", "pipe"],
|
||||
});
|
||||
|
||||
this.currentProcess = child;
|
||||
|
||||
// Write prompt to stdin and close it
|
||||
if (child.stdin) {
|
||||
child.stdin.write(prompt);
|
||||
child.stdin.end();
|
||||
}
|
||||
|
||||
let stdout: string = "";
|
||||
let stderr: string = "";
|
||||
|
||||
// Set timeout
|
||||
const timeout: ReturnType<typeof setTimeout> = setTimeout(() => {
|
||||
if (child.pid) {
|
||||
child.kill("SIGTERM");
|
||||
reject(
|
||||
new Error(
|
||||
`OpenCode execution timed out after ${timeoutMs / 1000} seconds`,
|
||||
),
|
||||
);
|
||||
}
|
||||
}, timeoutMs);
|
||||
|
||||
child.stdout?.on("data", (data: Buffer) => {
|
||||
const text: string = data.toString();
|
||||
stdout += text;
|
||||
|
||||
// Stream to console immediately
|
||||
const trimmedText: string = text.trim();
|
||||
if (trimmedText) {
|
||||
logger.info(`[OpenCode stdout] ${trimmedText}`);
|
||||
|
||||
// Stream to task logger for server-side logging
|
||||
if (this.taskLogger) {
|
||||
this.taskLogger
|
||||
.info(`[OpenCode] ${trimmedText}`)
|
||||
.catch((err: Error) => {
|
||||
logger.error(`Failed to log OpenCode output: ${err.message}`);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
onOutput({
|
||||
type: "stdout",
|
||||
message: trimmedText,
|
||||
timestamp: new Date(),
|
||||
});
|
||||
});
|
||||
|
||||
child.stderr?.on("data", (data: Buffer) => {
|
||||
const text: string = data.toString();
|
||||
stderr += text;
|
||||
|
||||
// Stream to console immediately
|
||||
const trimmedText: string = text.trim();
|
||||
if (trimmedText) {
|
||||
logger.warn(`[OpenCode stderr] ${trimmedText}`);
|
||||
|
||||
// Stream to task logger for server-side logging
|
||||
if (this.taskLogger) {
|
||||
this.taskLogger
|
||||
.warning(`[OpenCode stderr] ${trimmedText}`)
|
||||
.catch((err: Error) => {
|
||||
logger.error(`Failed to log OpenCode stderr: ${err.message}`);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
onOutput({
|
||||
type: "stderr",
|
||||
message: trimmedText,
|
||||
timestamp: new Date(),
|
||||
});
|
||||
});
|
||||
|
||||
child.on("close", (code: number | null) => {
|
||||
clearTimeout(timeout);
|
||||
this.currentProcess = null;
|
||||
|
||||
if (this.aborted) {
|
||||
reject(new Error("Execution aborted"));
|
||||
return;
|
||||
}
|
||||
|
||||
if (code === 0 || code === null) {
|
||||
resolve(stdout);
|
||||
} else {
|
||||
reject(
|
||||
new Error(
|
||||
`OpenCode exited with code ${code}. stderr: ${stderr.substring(0, 500)}`,
|
||||
),
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
child.on("error", (error: Error) => {
|
||||
clearTimeout(timeout);
|
||||
this.currentProcess = null;
|
||||
reject(error);
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
// Get list of modified files using git
|
||||
private async getModifiedFiles(
|
||||
workingDirectory: string,
|
||||
): Promise<Array<string>> {
|
||||
try {
|
||||
const result: string = await Execute.executeCommandFile({
|
||||
command: "git",
|
||||
args: ["status", "--porcelain"],
|
||||
cwd: workingDirectory,
|
||||
});
|
||||
|
||||
if (!result.trim()) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return result
|
||||
.split("\n")
|
||||
.filter((line: string) => {
|
||||
return line.trim().length > 0;
|
||||
})
|
||||
.map((line: string) => {
|
||||
// Git status format: "XY filename"
|
||||
return line.substring(3).trim();
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error("Error getting modified files:");
|
||||
logger.error(error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// Extract summary from OpenCode output
|
||||
private extractSummary(output: string): string {
|
||||
// Try to extract a meaningful summary from the output
|
||||
const lines: Array<string> = output.split("\n").filter((line: string) => {
|
||||
return line.trim().length > 0;
|
||||
});
|
||||
|
||||
// Return last few meaningful lines as summary
|
||||
const summaryLines: Array<string> = lines.slice(-5);
|
||||
|
||||
return summaryLines.join("\n") || "No summary available";
|
||||
}
|
||||
|
||||
// Create error result helper
|
||||
private createErrorResult(
|
||||
errorMessage: string,
|
||||
logs: Array<string> = [],
|
||||
): CodeAgentResult {
|
||||
return {
|
||||
success: false,
|
||||
filesModified: [],
|
||||
summary: "",
|
||||
logs,
|
||||
error: errorMessage,
|
||||
exitCode: 1,
|
||||
};
|
||||
}
|
||||
|
||||
// Logging helper
|
||||
private async log(message: string): Promise<void> {
|
||||
if (this.taskLogger) {
|
||||
await this.taskLogger.info(`[${this.name}] ${message}`);
|
||||
} else {
|
||||
logger.debug(`[${this.name}] ${message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
import URL from "Common/Types/API/URL";
|
||||
import ObjectID from "Common/Types/ObjectID";
|
||||
import logger from "Common/Server/Utils/Logger";
|
||||
import Port from "Common/Types/Port";
|
||||
|
||||
if (!process.env["ONEUPTIME_URL"]) {
|
||||
logger.error("ONEUPTIME_URL is not set");
|
||||
process.exit();
|
||||
}
|
||||
|
||||
export const ONEUPTIME_URL: URL = URL.fromString(
|
||||
process.env["ONEUPTIME_URL"] || "https://oneuptime.com",
|
||||
);
|
||||
|
||||
export const AI_AGENT_ID: ObjectID | null = process.env["AI_AGENT_ID"]
|
||||
? new ObjectID(process.env["AI_AGENT_ID"])
|
||||
: null;
|
||||
|
||||
if (!process.env["AI_AGENT_KEY"]) {
|
||||
logger.error("AI_AGENT_KEY is not set");
|
||||
process.exit();
|
||||
}
|
||||
|
||||
export const AI_AGENT_KEY: string = process.env["AI_AGENT_KEY"];
|
||||
|
||||
export const AI_AGENT_NAME: string | null =
|
||||
process.env["AI_AGENT_NAME"] || null;
|
||||
|
||||
export const AI_AGENT_DESCRIPTION: string | null =
|
||||
process.env["AI_AGENT_DESCRIPTION"] || null;
|
||||
|
||||
export const HOSTNAME: string = process.env["HOSTNAME"] || "localhost";
|
||||
|
||||
export const PORT: Port = new Port(
|
||||
process.env["PORT"] ? parseInt(process.env["PORT"]) : 3875,
|
||||
);
|
||||
@@ -1,82 +0,0 @@
|
||||
#
|
||||
# OneUptime-AIAgent Dockerfile
|
||||
#
|
||||
|
||||
# Pull base image nodejs image.
|
||||
FROM public.ecr.aws/docker/library/node:24.9
|
||||
RUN mkdir /tmp/npm && chmod 2777 /tmp/npm && chown 1000:1000 /tmp/npm && npm config set cache /tmp/npm --global
|
||||
|
||||
RUN npm config set fetch-retries 5
|
||||
RUN npm config set fetch-retry-mintimeout 20000
|
||||
RUN npm config set fetch-retry-maxtimeout 60000
|
||||
|
||||
|
||||
ARG GIT_SHA
|
||||
ARG APP_VERSION
|
||||
ARG IS_ENTERPRISE_EDITION=false
|
||||
|
||||
ENV GIT_SHA=${GIT_SHA}
|
||||
ENV APP_VERSION=${APP_VERSION}
|
||||
ENV IS_ENTERPRISE_EDITION=${IS_ENTERPRISE_EDITION}
|
||||
ENV NODE_OPTIONS="--use-openssl-ca"
|
||||
|
||||
## Add Intermediate Certs
|
||||
COPY ./SslCertificates /usr/local/share/ca-certificates
|
||||
RUN update-ca-certificates
|
||||
|
||||
|
||||
# IF APP_VERSION is not set, set it to 1.0.0
|
||||
RUN if [ -z "$APP_VERSION" ]; then export APP_VERSION=1.0.0; fi
|
||||
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
# Install bash.
|
||||
RUN apt-get install bash -y && apt-get install curl -y
|
||||
|
||||
# Install OpenCode AI coding assistant
|
||||
RUN curl -fsSL https://opencode.ai/install | bash
|
||||
|
||||
# Add OpenCode to PATH (installed to $HOME/.opencode/bin by default)
|
||||
ENV PATH="/root/.opencode/bin:${PATH}"
|
||||
|
||||
#Use bash shell by default
|
||||
SHELL ["/bin/bash", "-c"]
|
||||
|
||||
RUN mkdir -p /usr/src
|
||||
|
||||
WORKDIR /usr/src/Common
|
||||
COPY ./Common/package*.json /usr/src/Common/
|
||||
# Set version in ./Common/package.json to the APP_VERSION
|
||||
RUN sed -i "s/\"version\": \".*\"/\"version\": \"$APP_VERSION\"/g" /usr/src/Common/package.json
|
||||
RUN npm install
|
||||
COPY ./Common /usr/src/Common
|
||||
|
||||
|
||||
ENV PRODUCTION=true
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Install app dependencies
|
||||
COPY ./AIAgent/package*.json /usr/src/app/
|
||||
# Set version in ./AIAgent/package.json to the APP_VERSION
|
||||
RUN sed -i "s/\"version\": \".*\"/\"version\": \"$APP_VERSION\"/g" /usr/src/app/package.json
|
||||
RUN npm install
|
||||
|
||||
# Expose ports.
|
||||
# - 3875: OneUptime-AIAgent
|
||||
EXPOSE 3875
|
||||
|
||||
{{ if eq .Env.ENVIRONMENT "development" }}
|
||||
#Run the app
|
||||
CMD [ "npm", "run", "dev" ]
|
||||
{{ else }}
|
||||
# Copy app source
|
||||
COPY ./AIAgent /usr/src/app
|
||||
# Bundle app source
|
||||
RUN npm run compile
|
||||
# Set permission to write logs and cache in case container run as non root
|
||||
RUN chown -R 1000:1000 "/tmp/npm" && chmod -R 2777 "/tmp/npm"
|
||||
#Run the app
|
||||
CMD [ "npm", "start" ]
|
||||
{{ end }}
|
||||
@@ -1,84 +0,0 @@
|
||||
import { PORT } from "./Config";
|
||||
import AliveJob from "./Jobs/Alive";
|
||||
import startTaskProcessingLoop from "./Jobs/ProcessScheduledTasks";
|
||||
import Register from "./Services/Register";
|
||||
import MetricsAPI from "./API/Metrics";
|
||||
import {
|
||||
getTaskHandlerRegistry,
|
||||
FixExceptionTaskHandler,
|
||||
} from "./TaskHandlers/Index";
|
||||
import { PromiseVoidFunction } from "Common/Types/FunctionTypes";
|
||||
import logger from "Common/Server/Utils/Logger";
|
||||
import App from "Common/Server/Utils/StartServer";
|
||||
import Telemetry from "Common/Server/Utils/Telemetry";
|
||||
import Express, { ExpressApplication } from "Common/Server/Utils/Express";
|
||||
import "ejs";
|
||||
|
||||
const APP_NAME: string = "ai-agent";
|
||||
|
||||
const init: PromiseVoidFunction = async (): Promise<void> => {
|
||||
try {
|
||||
// Initialize telemetry
|
||||
Telemetry.init({
|
||||
serviceName: APP_NAME,
|
||||
});
|
||||
|
||||
logger.info("AI Agent Service - Starting...");
|
||||
|
||||
// init the app
|
||||
await App.init({
|
||||
appName: APP_NAME,
|
||||
port: PORT,
|
||||
isFrontendApp: false,
|
||||
statusOptions: {
|
||||
liveCheck: async () => {},
|
||||
readyCheck: async () => {},
|
||||
},
|
||||
});
|
||||
|
||||
// Add metrics API routes for KEDA autoscaling
|
||||
const app: ExpressApplication = Express.getExpressApp();
|
||||
app.use("/metrics", MetricsAPI);
|
||||
|
||||
// add default routes
|
||||
await App.addDefaultRoutes();
|
||||
|
||||
try {
|
||||
// Register this AI Agent.
|
||||
await Register.registerAIAgent();
|
||||
|
||||
logger.debug("AI Agent registered");
|
||||
|
||||
AliveJob();
|
||||
|
||||
// Register task handlers
|
||||
logger.debug("Registering task handlers...");
|
||||
const taskHandlerRegistry: ReturnType<typeof getTaskHandlerRegistry> =
|
||||
getTaskHandlerRegistry();
|
||||
taskHandlerRegistry.register(new FixExceptionTaskHandler());
|
||||
logger.debug(
|
||||
`Registered ${taskHandlerRegistry.getHandlerCount()} task handler(s): ${taskHandlerRegistry.getRegisteredTaskTypes().join(", ")}`,
|
||||
);
|
||||
|
||||
// Start task processing loop (runs in background)
|
||||
startTaskProcessingLoop().catch((err: Error) => {
|
||||
logger.error("Task processing loop failed:");
|
||||
logger.error(err);
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error("Register AI Agent failed");
|
||||
logger.error(err);
|
||||
throw err;
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error("App Init Failed:");
|
||||
logger.error(err);
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
init().catch((err: Error) => {
|
||||
logger.error(err);
|
||||
logger.error("Exiting node process");
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,56 +0,0 @@
|
||||
import { ONEUPTIME_URL } from "../Config";
|
||||
import Register from "../Services/Register";
|
||||
import AIAgentAPIRequest from "../Utils/AIAgentAPIRequest";
|
||||
import URL from "Common/Types/API/URL";
|
||||
import API from "Common/Utils/API";
|
||||
import { EVERY_MINUTE } from "Common/Utils/CronTime";
|
||||
import LocalCache from "Common/Server/Infrastructure/LocalCache";
|
||||
import BasicCron from "Common/Server/Utils/BasicCron";
|
||||
import logger from "Common/Server/Utils/Logger";
|
||||
import HTTPResponse from "Common/Types/API/HTTPResponse";
|
||||
import { JSONObject } from "Common/Types/JSON";
|
||||
|
||||
const InitJob: VoidFunction = (): void => {
|
||||
BasicCron({
|
||||
jobName: "AIAgent:Alive",
|
||||
options: {
|
||||
schedule: EVERY_MINUTE,
|
||||
runOnStartup: false,
|
||||
},
|
||||
runFunction: async () => {
|
||||
logger.debug("Checking if AI Agent is alive...");
|
||||
|
||||
const aiAgentId: string | undefined = LocalCache.getString(
|
||||
"AI_AGENT",
|
||||
"AI_AGENT_ID",
|
||||
);
|
||||
|
||||
if (!aiAgentId) {
|
||||
logger.warn(
|
||||
"AI Agent is not registered yet. Skipping alive check. Trying to register AI Agent again...",
|
||||
);
|
||||
await Register.registerAIAgent();
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug("AI Agent ID: " + aiAgentId.toString());
|
||||
|
||||
const aliveUrl: URL = URL.fromString(ONEUPTIME_URL.toString()).addRoute(
|
||||
"/api/ai-agent/alive",
|
||||
);
|
||||
|
||||
const result: HTTPResponse<JSONObject> = await API.post({
|
||||
url: aliveUrl,
|
||||
data: AIAgentAPIRequest.getDefaultRequestBody(),
|
||||
});
|
||||
|
||||
if (result.isSuccess()) {
|
||||
logger.debug("AI Agent update sent to server successfully.");
|
||||
} else {
|
||||
logger.error("Failed to send AI Agent update to server.");
|
||||
}
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
export default InitJob;
|
||||
@@ -1,257 +0,0 @@
|
||||
import { ONEUPTIME_URL } from "../Config";
|
||||
import AIAgentAPIRequest from "../Utils/AIAgentAPIRequest";
|
||||
import AIAgentTaskLog from "../Utils/AIAgentTaskLog";
|
||||
import TaskLogger from "../Utils/TaskLogger";
|
||||
import BackendAPI from "../Utils/BackendAPI";
|
||||
import {
|
||||
getTaskHandlerRegistry,
|
||||
TaskContext,
|
||||
TaskMetadata,
|
||||
TaskHandler,
|
||||
TaskResult,
|
||||
} from "../TaskHandlers/Index";
|
||||
import TaskHandlerRegistry from "../TaskHandlers/TaskHandlerRegistry";
|
||||
import URL from "Common/Types/API/URL";
|
||||
import API from "Common/Utils/API";
|
||||
import HTTPResponse from "Common/Types/API/HTTPResponse";
|
||||
import { JSONObject } from "Common/Types/JSON";
|
||||
import logger from "Common/Server/Utils/Logger";
|
||||
import AIAgentTaskStatus from "Common/Types/AI/AIAgentTaskStatus";
|
||||
import AIAgentTaskType from "Common/Types/AI/AIAgentTaskType";
|
||||
import ObjectID from "Common/Types/ObjectID";
|
||||
import Sleep from "Common/Types/Sleep";
|
||||
|
||||
// Type for task data from the API
|
||||
interface AIAgentTaskData {
|
||||
_id: string;
|
||||
projectId: string;
|
||||
taskType: AIAgentTaskType;
|
||||
metadata: TaskMetadata;
|
||||
createdAt: string;
|
||||
status?: AIAgentTaskStatus;
|
||||
}
|
||||
|
||||
// Type for API response containing task
|
||||
interface GetPendingTaskResponse {
|
||||
task: AIAgentTaskData | null;
|
||||
}
|
||||
|
||||
const SLEEP_WHEN_NO_TASKS_MS: number = 60 * 1000; // 1 minute
|
||||
|
||||
type ExecuteTaskFunction = (task: AIAgentTaskData) => Promise<void>;
|
||||
|
||||
/**
|
||||
* Execute an AI Agent task using the registered task handler
|
||||
*/
|
||||
const executeTask: ExecuteTaskFunction = async (
|
||||
task: AIAgentTaskData,
|
||||
): Promise<void> => {
|
||||
const taskIdString: string = task._id;
|
||||
const projectIdString: string = task.projectId;
|
||||
const taskId: ObjectID = new ObjectID(taskIdString);
|
||||
const projectId: ObjectID = new ObjectID(projectIdString);
|
||||
const taskType: AIAgentTaskType = task.taskType;
|
||||
const metadata: TaskMetadata = task.metadata || {};
|
||||
const createdAt: Date = new Date(task.createdAt);
|
||||
|
||||
// Get the task handler from the registry
|
||||
const registry: TaskHandlerRegistry = getTaskHandlerRegistry();
|
||||
const handler: TaskHandler | undefined = registry.getHandler(taskType);
|
||||
|
||||
if (!handler) {
|
||||
throw new Error(`No handler registered for task type: ${taskType}`);
|
||||
}
|
||||
|
||||
// Create task logger
|
||||
const taskLogger: TaskLogger = new TaskLogger({
|
||||
taskId: taskIdString,
|
||||
context: `${handler.name}`,
|
||||
});
|
||||
|
||||
// Create backend API client
|
||||
const backendAPI: BackendAPI = new BackendAPI();
|
||||
|
||||
// Build task context
|
||||
const context: TaskContext = {
|
||||
taskId,
|
||||
projectId,
|
||||
taskType,
|
||||
metadata,
|
||||
logger: taskLogger,
|
||||
backendAPI,
|
||||
createdAt,
|
||||
startedAt: new Date(),
|
||||
};
|
||||
|
||||
try {
|
||||
// Log handler starting
|
||||
await taskLogger.info(
|
||||
`Starting ${handler.name} for task type: ${taskType}`,
|
||||
);
|
||||
|
||||
// Validate metadata if the handler supports it
|
||||
if (handler.validateMetadata && !handler.validateMetadata(metadata)) {
|
||||
throw new Error(`Invalid metadata for task type: ${taskType}`);
|
||||
}
|
||||
|
||||
// Execute the task handler
|
||||
const result: TaskResult = await handler.execute(context);
|
||||
|
||||
// Log result
|
||||
if (result.success) {
|
||||
await taskLogger.info(`Task completed: ${result.message}`);
|
||||
|
||||
if (result.pullRequestsCreated && result.pullRequestsCreated > 0) {
|
||||
await taskLogger.info(
|
||||
`Created ${result.pullRequestsCreated} pull request(s): ${result.pullRequestUrls?.join(", ") || ""}`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
await taskLogger.warning(`Task did not succeed: ${result.message}`);
|
||||
}
|
||||
|
||||
// Flush all pending logs
|
||||
await taskLogger.flush();
|
||||
|
||||
/*
|
||||
* If the task was not successful and we want to report it as an error
|
||||
* Note: Based on user requirements, "no fix found" should be Completed, not Error
|
||||
* Only throw if there was an actual error (not just "no action taken")
|
||||
*/
|
||||
if (!result.success && result.data?.["isError"]) {
|
||||
throw new Error(result.message);
|
||||
}
|
||||
} catch (error) {
|
||||
// Ensure logs are flushed even on error
|
||||
await taskLogger.flush();
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const startTaskProcessingLoop: () => Promise<void> =
|
||||
async (): Promise<void> => {
|
||||
logger.info("Starting AI Agent task processing loop...");
|
||||
|
||||
const getPendingTaskUrl: URL = URL.fromString(
|
||||
ONEUPTIME_URL.toString(),
|
||||
).addRoute("/api/ai-agent-task/get-pending-task");
|
||||
|
||||
const updateTaskStatusUrl: URL = URL.fromString(
|
||||
ONEUPTIME_URL.toString(),
|
||||
).addRoute("/api/ai-agent-task/update-task-status");
|
||||
|
||||
/* Continuous loop to process tasks */
|
||||
while (true) {
|
||||
try {
|
||||
/* Fetch one scheduled task */
|
||||
const getPendingTaskResult: HTTPResponse<JSONObject> = await API.post({
|
||||
url: getPendingTaskUrl,
|
||||
data: AIAgentAPIRequest.getDefaultRequestBody(),
|
||||
});
|
||||
|
||||
if (!getPendingTaskResult.isSuccess()) {
|
||||
logger.error("Failed to fetch pending task from server");
|
||||
logger.debug(
|
||||
`Sleeping for ${SLEEP_WHEN_NO_TASKS_MS / 1000} seconds before retrying...`,
|
||||
);
|
||||
await Sleep.sleep(SLEEP_WHEN_NO_TASKS_MS);
|
||||
continue;
|
||||
}
|
||||
|
||||
const responseData: GetPendingTaskResponse =
|
||||
getPendingTaskResult.data as unknown as GetPendingTaskResponse;
|
||||
const task: AIAgentTaskData | null = responseData.task;
|
||||
|
||||
if (!task || !task._id) {
|
||||
logger.debug("No pending tasks available");
|
||||
logger.debug(
|
||||
`Sleeping for ${SLEEP_WHEN_NO_TASKS_MS / 1000} seconds before checking again...`,
|
||||
);
|
||||
await Sleep.sleep(SLEEP_WHEN_NO_TASKS_MS);
|
||||
continue;
|
||||
}
|
||||
|
||||
const taskId: string = task._id;
|
||||
const taskType: string = task.taskType || "Unknown";
|
||||
logger.info(`Processing task: ${taskId} (type: ${taskType})`);
|
||||
|
||||
try {
|
||||
/* Mark task as InProgress */
|
||||
const inProgressResult: HTTPResponse<JSONObject> = await API.post({
|
||||
url: updateTaskStatusUrl,
|
||||
data: {
|
||||
...AIAgentAPIRequest.getDefaultRequestBody(),
|
||||
taskId: taskId,
|
||||
status: AIAgentTaskStatus.InProgress,
|
||||
},
|
||||
});
|
||||
|
||||
if (!inProgressResult.isSuccess()) {
|
||||
logger.error(
|
||||
`Failed to mark task ${taskId} as InProgress. Skipping.`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
/* Send task started log */
|
||||
await AIAgentTaskLog.sendTaskStartedLog(taskId);
|
||||
|
||||
/* Execute the task using the handler system */
|
||||
await executeTask(task);
|
||||
|
||||
/* Mark task as Completed */
|
||||
const completedResult: HTTPResponse<JSONObject> = await API.post({
|
||||
url: updateTaskStatusUrl,
|
||||
data: {
|
||||
...AIAgentAPIRequest.getDefaultRequestBody(),
|
||||
taskId: taskId,
|
||||
status: AIAgentTaskStatus.Completed,
|
||||
},
|
||||
});
|
||||
|
||||
if (!completedResult.isSuccess()) {
|
||||
logger.error(`Failed to mark task ${taskId} as Completed`);
|
||||
} else {
|
||||
/* Send task completed log */
|
||||
await AIAgentTaskLog.sendTaskCompletedLog(taskId);
|
||||
logger.info(`Task completed successfully: ${taskId}`);
|
||||
}
|
||||
} catch (error) {
|
||||
/* Mark task as Error with error message */
|
||||
const errorMessage: string =
|
||||
error instanceof Error ? error.message : "Unknown error occurred";
|
||||
|
||||
const errorResult: HTTPResponse<JSONObject> = await API.post({
|
||||
url: updateTaskStatusUrl,
|
||||
data: {
|
||||
...AIAgentAPIRequest.getDefaultRequestBody(),
|
||||
taskId: taskId,
|
||||
status: AIAgentTaskStatus.Error,
|
||||
statusMessage: errorMessage,
|
||||
},
|
||||
});
|
||||
|
||||
if (!errorResult.isSuccess()) {
|
||||
logger.error(
|
||||
`Failed to mark task ${taskId} as Error: ${errorMessage}`,
|
||||
);
|
||||
}
|
||||
|
||||
/* Send task error log */
|
||||
await AIAgentTaskLog.sendTaskErrorLog(taskId, errorMessage);
|
||||
|
||||
logger.error(`Task failed: ${taskId} - ${errorMessage}`);
|
||||
logger.error(error);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error("Error in task processing loop:");
|
||||
logger.error(error);
|
||||
logger.debug(
|
||||
`Sleeping for ${SLEEP_WHEN_NO_TASKS_MS / 1000} seconds before retrying...`,
|
||||
);
|
||||
await Sleep.sleep(SLEEP_WHEN_NO_TASKS_MS);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export default startTaskProcessingLoop;
|
||||
@@ -1,127 +0,0 @@
|
||||
import {
|
||||
ONEUPTIME_URL,
|
||||
AI_AGENT_ID,
|
||||
AI_AGENT_KEY,
|
||||
AI_AGENT_NAME,
|
||||
AI_AGENT_DESCRIPTION,
|
||||
} from "../Config";
|
||||
import HTTPResponse from "Common/Types/API/HTTPResponse";
|
||||
import URL from "Common/Types/API/URL";
|
||||
import { JSONObject } from "Common/Types/JSON";
|
||||
import Sleep from "Common/Types/Sleep";
|
||||
import API from "Common/Utils/API";
|
||||
import { HasClusterKey } from "Common/Server/EnvironmentConfig";
|
||||
import LocalCache from "Common/Server/Infrastructure/LocalCache";
|
||||
import logger from "Common/Server/Utils/Logger";
|
||||
import ClusterKeyAuthorization from "Common/Server/Middleware/ClusterKeyAuthorization";
|
||||
|
||||
export default class Register {
|
||||
public static async registerAIAgent(): Promise<void> {
|
||||
// register AI agent with 10 retries and 30 second interval between each retry.
|
||||
|
||||
let currentRetry: number = 0;
|
||||
|
||||
const maxRetry: number = 10;
|
||||
|
||||
const retryIntervalInSeconds: number = 30;
|
||||
|
||||
while (currentRetry < maxRetry) {
|
||||
try {
|
||||
logger.debug(`Registering AI Agent. Attempt: ${currentRetry + 1}`);
|
||||
await Register._registerAIAgent();
|
||||
logger.debug(`AI Agent registered successfully.`);
|
||||
break;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`Failed to register AI Agent. Retrying after ${retryIntervalInSeconds} seconds...`,
|
||||
);
|
||||
logger.error(error);
|
||||
currentRetry++;
|
||||
await Sleep.sleep(retryIntervalInSeconds * 1000);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static async _registerAIAgent(): Promise<void> {
|
||||
if (HasClusterKey) {
|
||||
// Clustered mode: Auto-register and get ID from server
|
||||
const aiAgentRegistrationUrl: URL = URL.fromString(
|
||||
ONEUPTIME_URL.toString(),
|
||||
).addRoute("/api/ai-agent/register");
|
||||
|
||||
logger.debug("Registering AI Agent...");
|
||||
logger.debug("Sending request to: " + aiAgentRegistrationUrl.toString());
|
||||
|
||||
const result: HTTPResponse<JSONObject> = await API.post({
|
||||
url: aiAgentRegistrationUrl,
|
||||
data: {
|
||||
aiAgentKey: AI_AGENT_KEY,
|
||||
aiAgentName: AI_AGENT_NAME,
|
||||
aiAgentDescription: AI_AGENT_DESCRIPTION,
|
||||
clusterKey: ClusterKeyAuthorization.getClusterKey(),
|
||||
},
|
||||
});
|
||||
|
||||
if (!result.isSuccess()) {
|
||||
logger.error(
|
||||
`Failed to register AI Agent. Status: ${result.statusCode}`,
|
||||
);
|
||||
logger.error(result.data);
|
||||
throw new Error(
|
||||
"Failed to register AI Agent: HTTP " + result.statusCode,
|
||||
);
|
||||
}
|
||||
|
||||
logger.debug("AI Agent Registered");
|
||||
logger.debug(result.data);
|
||||
|
||||
const aiAgentId: string | undefined = result.data["_id"] as
|
||||
| string
|
||||
| undefined;
|
||||
|
||||
if (!aiAgentId) {
|
||||
logger.error("AI Agent ID not found in response");
|
||||
logger.error(result.data);
|
||||
throw new Error("AI Agent ID not found in registration response");
|
||||
}
|
||||
|
||||
LocalCache.setString("AI_AGENT", "AI_AGENT_ID", aiAgentId);
|
||||
} else {
|
||||
// Non-clustered mode: Validate AI agent by sending alive request
|
||||
if (!AI_AGENT_ID) {
|
||||
logger.error("AI_AGENT_ID or ONEUPTIME_SECRET should be set");
|
||||
return process.exit();
|
||||
}
|
||||
|
||||
const aliveUrl: URL = URL.fromString(ONEUPTIME_URL.toString()).addRoute(
|
||||
"/api/ai-agent/alive",
|
||||
);
|
||||
|
||||
logger.debug("Registering AI Agent...");
|
||||
logger.debug("Sending request to: " + aliveUrl.toString());
|
||||
|
||||
const result: HTTPResponse<JSONObject> = await API.post({
|
||||
url: aliveUrl,
|
||||
data: {
|
||||
aiAgentKey: AI_AGENT_KEY.toString(),
|
||||
aiAgentId: AI_AGENT_ID.toString(),
|
||||
},
|
||||
});
|
||||
|
||||
if (result.isSuccess()) {
|
||||
LocalCache.setString(
|
||||
"AI_AGENT",
|
||||
"AI_AGENT_ID",
|
||||
AI_AGENT_ID.toString() as string,
|
||||
);
|
||||
logger.debug("AI Agent registered successfully");
|
||||
} else {
|
||||
throw new Error("Failed to register AI Agent: " + result.statusCode);
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
`AI Agent ID: ${LocalCache.getString("AI_AGENT", "AI_AGENT_ID") || "Unknown"}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,454 +0,0 @@
|
||||
import {
|
||||
BaseTaskHandler,
|
||||
TaskContext,
|
||||
TaskResult,
|
||||
TaskMetadata,
|
||||
TaskResultData,
|
||||
} from "./TaskHandlerInterface";
|
||||
import AIAgentTaskType from "Common/Types/AI/AIAgentTaskType";
|
||||
import {
|
||||
LLMConfig,
|
||||
ExceptionDetails,
|
||||
CodeRepositoryInfo,
|
||||
RepositoryToken,
|
||||
} from "../Utils/BackendAPI";
|
||||
import RepositoryManager, {
|
||||
RepositoryConfig,
|
||||
CloneResult,
|
||||
} from "../Utils/RepositoryManager";
|
||||
import PullRequestCreator, {
|
||||
PullRequestResult,
|
||||
} from "../Utils/PullRequestCreator";
|
||||
import WorkspaceManager, { WorkspaceInfo } from "../Utils/WorkspaceManager";
|
||||
import {
|
||||
CodeAgentFactory,
|
||||
CodeAgent,
|
||||
CodeAgentType,
|
||||
CodeAgentTask,
|
||||
CodeAgentResult,
|
||||
CodeAgentProgressEvent,
|
||||
CodeAgentLLMConfig,
|
||||
} from "../CodeAgents/Index";
|
||||
|
||||
// Metadata structure for Fix Exception tasks
|
||||
export interface FixExceptionMetadata extends TaskMetadata {
|
||||
exceptionId: string;
|
||||
serviceId?: string;
|
||||
stackTrace?: string;
|
||||
errorMessage?: string;
|
||||
}
|
||||
|
||||
export default class FixExceptionTaskHandler extends BaseTaskHandler<FixExceptionMetadata> {
|
||||
public readonly taskType: AIAgentTaskType = AIAgentTaskType.FixException;
|
||||
public readonly name: string = "Fix Exception Handler";
|
||||
|
||||
// Default timeout for code agent execution (30 minutes)
|
||||
private static readonly CODE_AGENT_TIMEOUT_MS: number = 30 * 60 * 1000;
|
||||
|
||||
public async execute(
|
||||
context: TaskContext<FixExceptionMetadata>,
|
||||
): Promise<TaskResult> {
|
||||
const metadata: FixExceptionMetadata = context.metadata;
|
||||
|
||||
await this.log(
|
||||
context,
|
||||
`Starting Fix Exception task for exception: ${metadata.exceptionId} (taskId: ${context.taskId.toString()})`,
|
||||
);
|
||||
|
||||
let workspace: WorkspaceInfo | null = null;
|
||||
|
||||
try {
|
||||
// Step 1: Get LLM configuration for the project
|
||||
await this.log(context, "Fetching LLM provider configuration...");
|
||||
const llmConfig: LLMConfig = await context.backendAPI.getLLMConfig(
|
||||
context.projectId.toString(),
|
||||
);
|
||||
await this.log(
|
||||
context,
|
||||
`Using LLM provider: ${llmConfig.llmType}${llmConfig.modelName ? ` (${llmConfig.modelName})` : ""}`,
|
||||
);
|
||||
|
||||
// Step 2: Get exception details
|
||||
await this.log(context, "Fetching exception details...");
|
||||
const exceptionDetails: ExceptionDetails =
|
||||
await context.backendAPI.getExceptionDetails(metadata.exceptionId);
|
||||
|
||||
if (!exceptionDetails.service) {
|
||||
await this.log(context, "No service linked to this exception", "error");
|
||||
return this.createFailureResult("No service linked to this exception", {
|
||||
isError: true,
|
||||
});
|
||||
}
|
||||
|
||||
await this.log(
|
||||
context,
|
||||
`Exception: ${exceptionDetails.exception.message.substring(0, 100)}...`,
|
||||
);
|
||||
await this.log(context, `Service: ${exceptionDetails.service.name}`);
|
||||
|
||||
// Step 3: Get linked code repositories
|
||||
await this.log(context, "Finding linked code repositories...");
|
||||
const repositories: Array<CodeRepositoryInfo> =
|
||||
await context.backendAPI.getCodeRepositories(
|
||||
exceptionDetails.service.id,
|
||||
);
|
||||
|
||||
if (repositories.length === 0) {
|
||||
await this.log(
|
||||
context,
|
||||
"No code repositories linked to this service",
|
||||
"error",
|
||||
);
|
||||
return this.createFailureResult(
|
||||
"No code repositories linked to this service via Service Catalog",
|
||||
{ isError: true },
|
||||
);
|
||||
}
|
||||
|
||||
await this.log(
|
||||
context,
|
||||
`Found ${repositories.length} linked code repository(ies)`,
|
||||
);
|
||||
|
||||
// Step 4: Create workspace for the task
|
||||
workspace = await WorkspaceManager.createWorkspace(
|
||||
context.taskId.toString(),
|
||||
);
|
||||
await this.log(context, `Created workspace: ${workspace.workspacePath}`);
|
||||
|
||||
// Step 5: Process each repository
|
||||
const pullRequestUrls: Array<string> = [];
|
||||
const errors: Array<string> = [];
|
||||
|
||||
for (const repo of repositories) {
|
||||
try {
|
||||
await this.log(
|
||||
context,
|
||||
`Processing repository: ${repo.organizationName}/${repo.repositoryName}`,
|
||||
);
|
||||
|
||||
const prUrl: string | null = await this.processRepository(
|
||||
context,
|
||||
repo,
|
||||
exceptionDetails,
|
||||
llmConfig,
|
||||
workspace,
|
||||
);
|
||||
|
||||
if (prUrl) {
|
||||
pullRequestUrls.push(prUrl);
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage: string =
|
||||
error instanceof Error ? error.message : String(error);
|
||||
errors.push(
|
||||
`${repo.organizationName}/${repo.repositoryName}: ${errorMessage}`,
|
||||
);
|
||||
await this.log(
|
||||
context,
|
||||
`Failed to process repository ${repo.organizationName}/${repo.repositoryName}: ${errorMessage}`,
|
||||
"error",
|
||||
);
|
||||
// Continue with next repository
|
||||
}
|
||||
}
|
||||
|
||||
// Step 6: Return result
|
||||
if (pullRequestUrls.length > 0) {
|
||||
await this.log(
|
||||
context,
|
||||
`Successfully created ${pullRequestUrls.length} pull request(s)`,
|
||||
);
|
||||
|
||||
const resultData: TaskResultData = {
|
||||
pullRequests: pullRequestUrls,
|
||||
};
|
||||
|
||||
if (errors.length > 0) {
|
||||
resultData.errors = errors;
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Created ${pullRequestUrls.length} pull request(s)`,
|
||||
pullRequestsCreated: pullRequestUrls.length,
|
||||
pullRequestUrls,
|
||||
data: resultData,
|
||||
};
|
||||
}
|
||||
|
||||
// No PRs created - mark as error
|
||||
await this.log(
|
||||
context,
|
||||
"No fixes could be applied to any repository",
|
||||
"error",
|
||||
);
|
||||
return this.createFailureResult(
|
||||
errors.length > 0
|
||||
? `No fixes could be applied. Errors: ${errors.join("; ")}`
|
||||
: "No fixes could be applied to any repository",
|
||||
{ isError: true },
|
||||
);
|
||||
} catch (error) {
|
||||
const errorMessage: string =
|
||||
error instanceof Error ? error.message : String(error);
|
||||
await this.log(context, `Task failed: ${errorMessage}`, "error");
|
||||
// Mark as an actual error (not just "no action taken") so task gets Error status
|
||||
return this.createFailureResult(errorMessage, { isError: true });
|
||||
} finally {
|
||||
// Cleanup workspace
|
||||
if (workspace) {
|
||||
await this.log(context, "Cleaning up workspace...");
|
||||
await WorkspaceManager.deleteWorkspace(workspace.workspacePath);
|
||||
}
|
||||
|
||||
// Flush logs
|
||||
await context.logger.flush();
|
||||
}
|
||||
}
|
||||
|
||||
// Process a single repository
|
||||
private async processRepository(
|
||||
context: TaskContext<FixExceptionMetadata>,
|
||||
repo: CodeRepositoryInfo,
|
||||
exceptionDetails: ExceptionDetails,
|
||||
llmConfig: LLMConfig,
|
||||
workspace: WorkspaceInfo,
|
||||
): Promise<string | null> {
|
||||
// Get access token for the repository
|
||||
await this.log(
|
||||
context,
|
||||
`Getting access token for ${repo.organizationName}/${repo.repositoryName}...`,
|
||||
);
|
||||
|
||||
const tokenData: RepositoryToken =
|
||||
await context.backendAPI.getRepositoryToken(repo.id);
|
||||
|
||||
// Clone the repository
|
||||
await this.log(
|
||||
context,
|
||||
`Cloning repository ${repo.organizationName}/${repo.repositoryName}...`,
|
||||
);
|
||||
|
||||
const repoConfig: RepositoryConfig = {
|
||||
organizationName: tokenData.organizationName,
|
||||
repositoryName: tokenData.repositoryName,
|
||||
token: tokenData.token,
|
||||
repositoryUrl: tokenData.repositoryUrl,
|
||||
};
|
||||
|
||||
const repoManager: RepositoryManager = new RepositoryManager(
|
||||
context.logger,
|
||||
);
|
||||
const cloneResult: CloneResult = await repoManager.cloneRepository(
|
||||
repoConfig,
|
||||
workspace.workspacePath,
|
||||
);
|
||||
|
||||
// Create a fix branch
|
||||
const branchName: string = `oneuptime-fix-exception-${context.taskId.toString().substring(0, 8)}`;
|
||||
await this.log(context, `Creating branch: ${branchName}`);
|
||||
await repoManager.createBranch(cloneResult.repositoryPath, branchName);
|
||||
|
||||
// Build the prompt for the code agent
|
||||
const prompt: string = this.buildFixPrompt(
|
||||
exceptionDetails,
|
||||
repo.servicePathInRepository,
|
||||
);
|
||||
|
||||
// Initialize code agent
|
||||
await this.log(context, "Initializing code agent...");
|
||||
const agent: CodeAgent = CodeAgentFactory.createAgent(
|
||||
CodeAgentType.OpenCode,
|
||||
);
|
||||
const agentConfig: CodeAgentLLMConfig = {
|
||||
llmType: llmConfig.llmType,
|
||||
};
|
||||
|
||||
if (llmConfig.apiKey) {
|
||||
agentConfig.apiKey = llmConfig.apiKey;
|
||||
}
|
||||
|
||||
if (llmConfig.baseUrl) {
|
||||
agentConfig.baseUrl = llmConfig.baseUrl;
|
||||
}
|
||||
|
||||
if (llmConfig.modelName) {
|
||||
agentConfig.modelName = llmConfig.modelName;
|
||||
}
|
||||
|
||||
await agent.initialize(agentConfig, context.logger);
|
||||
|
||||
// Set up progress callback to log agent output
|
||||
agent.onProgress((event: CodeAgentProgressEvent) => {
|
||||
context.logger.logProcessOutput("CodeAgent", event.message);
|
||||
});
|
||||
|
||||
// Execute the code agent
|
||||
await this.log(context, "Running code agent to fix exception...");
|
||||
const codeAgentTask: CodeAgentTask = {
|
||||
workingDirectory: cloneResult.repositoryPath,
|
||||
prompt,
|
||||
timeoutMs: FixExceptionTaskHandler.CODE_AGENT_TIMEOUT_MS,
|
||||
};
|
||||
|
||||
if (repo.servicePathInRepository) {
|
||||
codeAgentTask.servicePath = repo.servicePathInRepository;
|
||||
}
|
||||
|
||||
const agentResult: CodeAgentResult = await agent.executeTask(codeAgentTask);
|
||||
|
||||
// Check if any changes were made
|
||||
if (!agentResult.success || agentResult.filesModified.length === 0) {
|
||||
await this.log(
|
||||
context,
|
||||
`Code agent did not make any changes: ${agentResult.error || agentResult.summary}`,
|
||||
"warning",
|
||||
);
|
||||
await agent.cleanup();
|
||||
return null;
|
||||
}
|
||||
|
||||
await this.log(
|
||||
context,
|
||||
`Code agent modified ${agentResult.filesModified.length} file(s)`,
|
||||
);
|
||||
|
||||
// Add all changes and commit
|
||||
await this.log(context, "Committing changes...");
|
||||
await repoManager.addAllChanges(cloneResult.repositoryPath);
|
||||
|
||||
const commitMessage: string = this.buildCommitMessage(exceptionDetails);
|
||||
await repoManager.commitChanges(cloneResult.repositoryPath, commitMessage);
|
||||
|
||||
// Push the branch
|
||||
await this.log(context, `Pushing branch ${branchName}...`);
|
||||
await repoManager.pushBranch(
|
||||
cloneResult.repositoryPath,
|
||||
branchName,
|
||||
repoConfig,
|
||||
);
|
||||
|
||||
// Create pull request
|
||||
await this.log(context, "Creating pull request...");
|
||||
const prCreator: PullRequestCreator = new PullRequestCreator(
|
||||
context.logger,
|
||||
);
|
||||
|
||||
const prTitle: string = PullRequestCreator.generatePRTitle(
|
||||
exceptionDetails.exception.message,
|
||||
);
|
||||
|
||||
const prBody: string = PullRequestCreator.generatePRBody({
|
||||
exceptionMessage: exceptionDetails.exception.message,
|
||||
exceptionType: exceptionDetails.exception.exceptionType,
|
||||
stackTrace: exceptionDetails.exception.stackTrace,
|
||||
serviceName: exceptionDetails.service?.name || "Unknown Service",
|
||||
summary: agentResult.summary,
|
||||
});
|
||||
|
||||
const prResult: PullRequestResult = await prCreator.createPullRequest({
|
||||
token: tokenData.token,
|
||||
organizationName: tokenData.organizationName,
|
||||
repositoryName: tokenData.repositoryName,
|
||||
baseBranch: repo.mainBranchName || "main",
|
||||
headBranch: branchName,
|
||||
title: prTitle,
|
||||
body: prBody,
|
||||
});
|
||||
|
||||
await this.log(context, `Pull request created: ${prResult.htmlUrl}`);
|
||||
|
||||
// Record the PR in the backend
|
||||
await context.backendAPI.recordPullRequest({
|
||||
taskId: context.taskId.toString(),
|
||||
codeRepositoryId: repo.id,
|
||||
pullRequestUrl: prResult.htmlUrl,
|
||||
pullRequestNumber: prResult.number,
|
||||
pullRequestId: prResult.id,
|
||||
title: prResult.title,
|
||||
description: prBody.substring(0, 1000),
|
||||
headRefName: branchName,
|
||||
baseRefName: repo.mainBranchName || "main",
|
||||
});
|
||||
|
||||
// Cleanup agent
|
||||
await agent.cleanup();
|
||||
|
||||
return prResult.htmlUrl;
|
||||
}
|
||||
|
||||
// Build the prompt for the code agent
|
||||
private buildFixPrompt(
|
||||
exceptionDetails: ExceptionDetails,
|
||||
servicePathInRepository: string | null,
|
||||
): string {
|
||||
let prompt: string = `You are a software engineer fixing a bug in a codebase.
|
||||
|
||||
## Exception Information
|
||||
|
||||
**Exception Type:** ${exceptionDetails.exception.exceptionType}
|
||||
|
||||
**Error Message:**
|
||||
${exceptionDetails.exception.message}
|
||||
|
||||
**Stack Trace:**
|
||||
\`\`\`
|
||||
${exceptionDetails.exception.stackTrace}
|
||||
\`\`\`
|
||||
|
||||
## Task
|
||||
|
||||
Please analyze the stack trace and fix the exception. Here are the steps to follow:
|
||||
|
||||
1. Identify the root cause of the exception from the stack trace
|
||||
2. Find the relevant source files in the codebase
|
||||
3. Implement a fix for the issue
|
||||
4. Make sure your fix handles edge cases appropriately
|
||||
5. The fix should be minimal and focused - only change what's necessary
|
||||
|
||||
## Guidelines
|
||||
|
||||
- Do NOT add excessive error handling or logging unless necessary
|
||||
- Do NOT refactor unrelated code
|
||||
- Keep the fix simple and targeted
|
||||
- Preserve existing code style and patterns
|
||||
- If you cannot determine how to fix the issue, explain why
|
||||
|
||||
Please proceed with analyzing and fixing this exception.`;
|
||||
|
||||
if (servicePathInRepository) {
|
||||
prompt = `The service code is located in the \`${servicePathInRepository}\` directory.\n\n${prompt}`;
|
||||
}
|
||||
|
||||
return prompt;
|
||||
}
|
||||
|
||||
// Build commit message for the fix
|
||||
private buildCommitMessage(exceptionDetails: ExceptionDetails): string {
|
||||
const shortMessage: string = exceptionDetails.exception.message
|
||||
.replace(/\n/g, " ")
|
||||
.replace(/\s+/g, " ")
|
||||
.trim()
|
||||
.substring(0, 50);
|
||||
|
||||
return `fix: ${shortMessage}
|
||||
|
||||
This commit fixes an exception detected by OneUptime.
|
||||
|
||||
Exception Type: ${exceptionDetails.exception.exceptionType}
|
||||
Exception ID: ${exceptionDetails.exception.id}
|
||||
|
||||
Automatically generated by OneUptime AI Agent.`;
|
||||
}
|
||||
|
||||
// Validate metadata
|
||||
public validateMetadata(metadata: FixExceptionMetadata): boolean {
|
||||
return Boolean(metadata.exceptionId);
|
||||
}
|
||||
|
||||
// Get handler description
|
||||
public getDescription(): string {
|
||||
return "Analyzes exceptions detected by OneUptime and attempts to fix them by modifying the source code and creating a pull request.";
|
||||
}
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
// Export all task handler related types and classes
|
||||
export {
|
||||
TaskHandler,
|
||||
TaskContext,
|
||||
TaskResult,
|
||||
TaskMetadata,
|
||||
TaskResultData,
|
||||
BaseTaskHandler,
|
||||
} from "./TaskHandlerInterface";
|
||||
|
||||
export {
|
||||
default as TaskHandlerRegistry,
|
||||
getTaskHandlerRegistry,
|
||||
} from "./TaskHandlerRegistry";
|
||||
|
||||
export { default as FixExceptionTaskHandler } from "./FixExceptionTaskHandler";
|
||||
@@ -1,161 +0,0 @@
|
||||
import AIAgentTaskType from "Common/Types/AI/AIAgentTaskType";
|
||||
import ObjectID from "Common/Types/ObjectID";
|
||||
import TaskLogger from "../Utils/TaskLogger";
|
||||
import BackendAPI from "../Utils/BackendAPI";
|
||||
|
||||
// Base interface for task metadata - handlers should define their own specific metadata types
|
||||
export interface TaskMetadata {
|
||||
// All metadata must have at least these optional fields for extensibility
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
// Base interface for task result data
|
||||
export interface TaskResultData {
|
||||
// Pull requests created (for Fix Exception tasks)
|
||||
pullRequests?: Array<string>;
|
||||
// Errors encountered during processing
|
||||
errors?: Array<string>;
|
||||
// Flag to indicate if this is an error result (not just "no action taken")
|
||||
isError?: boolean;
|
||||
// Additional data fields
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
// Context provided to task handlers
|
||||
export interface TaskContext<TMetadata extends TaskMetadata = TaskMetadata> {
|
||||
// Task identification
|
||||
taskId: ObjectID;
|
||||
projectId: ObjectID;
|
||||
taskType: AIAgentTaskType;
|
||||
|
||||
// Task metadata (varies by task type)
|
||||
metadata: TMetadata;
|
||||
|
||||
// Utilities
|
||||
logger: TaskLogger;
|
||||
backendAPI: BackendAPI;
|
||||
|
||||
// Task timestamps
|
||||
createdAt: Date;
|
||||
startedAt: Date;
|
||||
}
|
||||
|
||||
// Result returned by task handlers
|
||||
export interface TaskResult {
|
||||
// Whether the task completed successfully
|
||||
success: boolean;
|
||||
|
||||
// Human-readable message describing the result
|
||||
message: string;
|
||||
|
||||
// Additional data about the result (optional)
|
||||
data?: TaskResultData;
|
||||
|
||||
// Number of PRs created (for Fix Exception tasks)
|
||||
pullRequestsCreated?: number;
|
||||
|
||||
// List of PR URLs created
|
||||
pullRequestUrls?: Array<string>;
|
||||
}
|
||||
|
||||
// Interface that all task handlers must implement
|
||||
export interface TaskHandler<TMetadata extends TaskMetadata = TaskMetadata> {
|
||||
// The type of task this handler processes
|
||||
readonly taskType: AIAgentTaskType;
|
||||
|
||||
// Human-readable name for the handler
|
||||
readonly name: string;
|
||||
|
||||
// Execute the task and return a result
|
||||
execute(context: TaskContext<TMetadata>): Promise<TaskResult>;
|
||||
|
||||
// Check if this handler can process a given task
|
||||
canHandle(taskType: AIAgentTaskType): boolean;
|
||||
|
||||
// Optional: Validate task metadata before execution
|
||||
validateMetadata?(metadata: TMetadata): boolean;
|
||||
|
||||
// Optional: Get a description of what this handler does
|
||||
getDescription?(): string;
|
||||
}
|
||||
|
||||
// Abstract base class that provides common functionality for task handlers
|
||||
export abstract class BaseTaskHandler<
|
||||
TMetadata extends TaskMetadata = TaskMetadata,
|
||||
> implements TaskHandler<TMetadata>
|
||||
{
|
||||
public abstract readonly taskType: AIAgentTaskType;
|
||||
public abstract readonly name: string;
|
||||
|
||||
public abstract execute(context: TaskContext<TMetadata>): Promise<TaskResult>;
|
||||
|
||||
public canHandle(taskType: AIAgentTaskType): boolean {
|
||||
return taskType === this.taskType;
|
||||
}
|
||||
|
||||
// Create a success result
|
||||
protected createSuccessResult(
|
||||
message: string,
|
||||
data?: TaskResultData,
|
||||
): TaskResult {
|
||||
const result: TaskResult = {
|
||||
success: true,
|
||||
message,
|
||||
};
|
||||
|
||||
if (data) {
|
||||
result.data = data;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Create a failure result
|
||||
protected createFailureResult(
|
||||
message: string,
|
||||
data?: TaskResultData,
|
||||
): TaskResult {
|
||||
const result: TaskResult = {
|
||||
success: false,
|
||||
message,
|
||||
};
|
||||
|
||||
if (data) {
|
||||
result.data = data;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Create a result for when no action was taken
|
||||
protected createNoActionResult(message: string): TaskResult {
|
||||
return {
|
||||
success: true,
|
||||
message,
|
||||
pullRequestsCreated: 0,
|
||||
};
|
||||
}
|
||||
|
||||
// Log to the task logger
|
||||
protected async log(
|
||||
context: TaskContext<TMetadata>,
|
||||
message: string,
|
||||
level: "info" | "debug" | "warning" | "error" = "info",
|
||||
): Promise<void> {
|
||||
switch (level) {
|
||||
case "debug":
|
||||
await context.logger.debug(message);
|
||||
break;
|
||||
case "warning":
|
||||
await context.logger.warning(message);
|
||||
break;
|
||||
case "error":
|
||||
await context.logger.error(message);
|
||||
break;
|
||||
case "info":
|
||||
default:
|
||||
await context.logger.info(message);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,93 +0,0 @@
|
||||
import { TaskHandler } from "./TaskHandlerInterface";
|
||||
import AIAgentTaskType from "Common/Types/AI/AIAgentTaskType";
|
||||
import logger from "Common/Server/Utils/Logger";
|
||||
|
||||
/*
|
||||
* Registry for task handlers
|
||||
* Allows dynamic registration and lookup of handlers by task type
|
||||
*/
|
||||
export default class TaskHandlerRegistry {
|
||||
private static instance: TaskHandlerRegistry | null = null;
|
||||
private handlers: Map<AIAgentTaskType, TaskHandler> = new Map();
|
||||
|
||||
// Private constructor for singleton pattern
|
||||
private constructor() {}
|
||||
|
||||
// Get the singleton instance
|
||||
public static getInstance(): TaskHandlerRegistry {
|
||||
if (!TaskHandlerRegistry.instance) {
|
||||
TaskHandlerRegistry.instance = new TaskHandlerRegistry();
|
||||
}
|
||||
return TaskHandlerRegistry.instance;
|
||||
}
|
||||
|
||||
// Reset the singleton (useful for testing)
|
||||
public static resetInstance(): void {
|
||||
TaskHandlerRegistry.instance = null;
|
||||
}
|
||||
|
||||
// Register a task handler
|
||||
public register(handler: TaskHandler): void {
|
||||
if (this.handlers.has(handler.taskType)) {
|
||||
logger.warn(
|
||||
`Overwriting existing handler for task type: ${handler.taskType}`,
|
||||
);
|
||||
}
|
||||
|
||||
this.handlers.set(handler.taskType, handler);
|
||||
logger.debug(
|
||||
`Registered handler "${handler.name}" for task type: ${handler.taskType}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Register multiple handlers at once
|
||||
public registerAll(handlers: Array<TaskHandler>): void {
|
||||
for (const handler of handlers) {
|
||||
this.register(handler);
|
||||
}
|
||||
}
|
||||
|
||||
// Unregister a handler
|
||||
public unregister(taskType: AIAgentTaskType): void {
|
||||
if (this.handlers.has(taskType)) {
|
||||
this.handlers.delete(taskType);
|
||||
logger.debug(`Unregistered handler for task type: ${taskType}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Get a handler for a specific task type
|
||||
public getHandler(taskType: AIAgentTaskType): TaskHandler | undefined {
|
||||
return this.handlers.get(taskType);
|
||||
}
|
||||
|
||||
// Check if a handler exists for a task type
|
||||
public hasHandler(taskType: AIAgentTaskType): boolean {
|
||||
return this.handlers.has(taskType);
|
||||
}
|
||||
|
||||
// Get all registered task types
|
||||
public getRegisteredTaskTypes(): Array<AIAgentTaskType> {
|
||||
return Array.from(this.handlers.keys());
|
||||
}
|
||||
|
||||
// Get all registered handlers
|
||||
public getAllHandlers(): Array<TaskHandler> {
|
||||
return Array.from(this.handlers.values());
|
||||
}
|
||||
|
||||
// Get the number of registered handlers
|
||||
public getHandlerCount(): number {
|
||||
return this.handlers.size;
|
||||
}
|
||||
|
||||
// Clear all handlers
|
||||
public clear(): void {
|
||||
this.handlers.clear();
|
||||
logger.debug("Cleared all task handlers");
|
||||
}
|
||||
}
|
||||
|
||||
// Export a convenience function to get the registry instance
|
||||
export function getTaskHandlerRegistry(): TaskHandlerRegistry {
|
||||
return TaskHandlerRegistry.getInstance();
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
import BadDataException from "Common/Types/Exception/BadDataException";
|
||||
import ObjectID from "Common/Types/ObjectID";
|
||||
import LocalCache from "Common/Server/Infrastructure/LocalCache";
|
||||
|
||||
export default class AIAgentUtil {
|
||||
public static getAIAgentId(): ObjectID {
|
||||
const id: string | undefined =
|
||||
LocalCache.getString("AI_AGENT", "AI_AGENT_ID") ||
|
||||
process.env["AI_AGENT_ID"];
|
||||
|
||||
if (!id) {
|
||||
throw new BadDataException("AI Agent ID not found");
|
||||
}
|
||||
|
||||
return new ObjectID(id);
|
||||
}
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
import { AI_AGENT_KEY } from "../Config";
|
||||
import AIAgentUtil from "./AIAgent";
|
||||
import { JSONObject } from "Common/Types/JSON";
|
||||
|
||||
export default class AIAgentAPIRequest {
|
||||
public static getDefaultRequestBody(): JSONObject {
|
||||
return {
|
||||
aiAgentKey: AI_AGENT_KEY,
|
||||
aiAgentId: AIAgentUtil.getAIAgentId().toString(),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,79 +0,0 @@
|
||||
import { ONEUPTIME_URL } from "../Config";
|
||||
import AIAgentAPIRequest from "./AIAgentAPIRequest";
|
||||
import URL from "Common/Types/API/URL";
|
||||
import API from "Common/Utils/API";
|
||||
import HTTPResponse from "Common/Types/API/HTTPResponse";
|
||||
import { JSONObject } from "Common/Types/JSON";
|
||||
import LogSeverity from "Common/Types/Log/LogSeverity";
|
||||
import logger from "Common/Server/Utils/Logger";
|
||||
|
||||
export interface SendLogOptions {
|
||||
taskId: string;
|
||||
severity: LogSeverity;
|
||||
message: string;
|
||||
}
|
||||
|
||||
export default class AIAgentTaskLog {
|
||||
private static createLogUrl: URL | null = null;
|
||||
|
||||
private static getCreateLogUrl(): URL {
|
||||
if (!this.createLogUrl) {
|
||||
this.createLogUrl = URL.fromString(ONEUPTIME_URL.toString()).addRoute(
|
||||
"/api/ai-agent-task-log/create-log",
|
||||
);
|
||||
}
|
||||
return this.createLogUrl;
|
||||
}
|
||||
|
||||
public static async sendLog(options: SendLogOptions): Promise<boolean> {
|
||||
try {
|
||||
const result: HTTPResponse<JSONObject> = await API.post({
|
||||
url: this.getCreateLogUrl(),
|
||||
data: {
|
||||
...AIAgentAPIRequest.getDefaultRequestBody(),
|
||||
taskId: options.taskId,
|
||||
severity: options.severity,
|
||||
message: options.message,
|
||||
},
|
||||
});
|
||||
|
||||
if (!result.isSuccess()) {
|
||||
logger.error(`Failed to send log for task ${options.taskId}`);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error(`Error sending log for task ${options.taskId}:`);
|
||||
logger.error(error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static async sendTaskStartedLog(taskId: string): Promise<boolean> {
|
||||
return this.sendLog({
|
||||
taskId,
|
||||
severity: LogSeverity.Information,
|
||||
message: "Task execution started",
|
||||
});
|
||||
}
|
||||
|
||||
public static async sendTaskCompletedLog(taskId: string): Promise<boolean> {
|
||||
return this.sendLog({
|
||||
taskId,
|
||||
severity: LogSeverity.Information,
|
||||
message: "Task execution completed successfully",
|
||||
});
|
||||
}
|
||||
|
||||
public static async sendTaskErrorLog(
|
||||
taskId: string,
|
||||
errorMessage: string,
|
||||
): Promise<boolean> {
|
||||
return this.sendLog({
|
||||
taskId,
|
||||
severity: LogSeverity.Error,
|
||||
message: `Task execution failed: ${errorMessage}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,394 +0,0 @@
|
||||
import { ONEUPTIME_URL } from "../Config";
|
||||
import AIAgentAPIRequest from "./AIAgentAPIRequest";
|
||||
import URL from "Common/Types/API/URL";
|
||||
import API from "Common/Utils/API";
|
||||
import HTTPResponse from "Common/Types/API/HTTPResponse";
|
||||
import { JSONObject } from "Common/Types/JSON";
|
||||
import LlmType from "Common/Types/LLM/LlmType";
|
||||
import AIAgentTaskStatus from "Common/Types/AI/AIAgentTaskStatus";
|
||||
import logger from "Common/Server/Utils/Logger";
|
||||
|
||||
// API Response types
|
||||
interface LLMConfigResponse {
|
||||
llmType: LlmType;
|
||||
apiKey?: string;
|
||||
baseUrl?: string;
|
||||
modelName?: string;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
interface ExceptionResponse {
|
||||
id: string;
|
||||
message: string;
|
||||
stackTrace: string;
|
||||
exceptionType: string;
|
||||
fingerprint: string;
|
||||
}
|
||||
|
||||
interface ServiceResponse {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
}
|
||||
|
||||
interface ExceptionDetailsResponse {
|
||||
exception: ExceptionResponse;
|
||||
service: ServiceResponse | null;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
interface CodeRepositoryResponse {
|
||||
id: string;
|
||||
name: string;
|
||||
repositoryHostedAt: string;
|
||||
organizationName: string;
|
||||
repositoryName: string;
|
||||
mainBranchName: string;
|
||||
servicePathInRepository: string | null;
|
||||
gitHubAppInstallationId: string | null;
|
||||
}
|
||||
|
||||
interface CodeRepositoriesResponse {
|
||||
repositories: Array<CodeRepositoryResponse>;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
interface RepositoryTokenResponse {
|
||||
token: string;
|
||||
expiresAt: string;
|
||||
repositoryUrl: string;
|
||||
organizationName: string;
|
||||
repositoryName: string;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
interface RecordPullRequestResponse {
|
||||
success: boolean;
|
||||
pullRequestId: string;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
interface UpdateTaskStatusResponse {
|
||||
success?: boolean;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
// Exported types
|
||||
export interface LLMConfig {
|
||||
llmType: LlmType;
|
||||
apiKey?: string;
|
||||
baseUrl?: string;
|
||||
modelName?: string;
|
||||
}
|
||||
|
||||
export interface ExceptionDetails {
|
||||
exception: {
|
||||
id: string;
|
||||
message: string;
|
||||
stackTrace: string;
|
||||
exceptionType: string;
|
||||
fingerprint: string;
|
||||
};
|
||||
service: {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
} | null;
|
||||
}
|
||||
|
||||
export interface CodeRepositoryInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
repositoryHostedAt: string;
|
||||
organizationName: string;
|
||||
repositoryName: string;
|
||||
mainBranchName: string;
|
||||
servicePathInRepository: string | null;
|
||||
gitHubAppInstallationId: string | null;
|
||||
}
|
||||
|
||||
export interface RepositoryToken {
|
||||
token: string;
|
||||
expiresAt: Date;
|
||||
repositoryUrl: string;
|
||||
organizationName: string;
|
||||
repositoryName: string;
|
||||
}
|
||||
|
||||
export interface RecordPullRequestOptions {
|
||||
taskId: string;
|
||||
codeRepositoryId: string;
|
||||
pullRequestUrl: string;
|
||||
pullRequestNumber?: number;
|
||||
pullRequestId?: number;
|
||||
title: string;
|
||||
description?: string;
|
||||
headRefName?: string;
|
||||
baseRefName?: string;
|
||||
}
|
||||
|
||||
export interface RecordPullRequestResult {
|
||||
success: boolean;
|
||||
pullRequestId: string;
|
||||
}
|
||||
|
||||
export default class BackendAPI {
|
||||
private baseUrl: URL;
|
||||
|
||||
public constructor() {
|
||||
this.baseUrl = URL.fromString(ONEUPTIME_URL.toString());
|
||||
}
|
||||
|
||||
// Get LLM configuration for a project
|
||||
public async getLLMConfig(projectId: string): Promise<LLMConfig> {
|
||||
const url: URL = URL.fromURL(this.baseUrl).addRoute(
|
||||
"/api/ai-agent-data/get-llm-config",
|
||||
);
|
||||
|
||||
const response: HTTPResponse<JSONObject> = await API.post({
|
||||
url,
|
||||
data: {
|
||||
...AIAgentAPIRequest.getDefaultRequestBody(),
|
||||
projectId: projectId,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.isSuccess()) {
|
||||
const data: LLMConfigResponse =
|
||||
response.data as unknown as LLMConfigResponse;
|
||||
const errorMessage: string = data?.message || "Failed to get LLM config";
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
|
||||
const data: LLMConfigResponse =
|
||||
response.data as unknown as LLMConfigResponse;
|
||||
|
||||
logger.debug(`Got LLM config for project ${projectId}: ${data.llmType}`);
|
||||
|
||||
const llmConfig: LLMConfig = {
|
||||
llmType: data.llmType,
|
||||
};
|
||||
|
||||
if (data.apiKey) {
|
||||
llmConfig.apiKey = data.apiKey;
|
||||
}
|
||||
|
||||
if (data.baseUrl) {
|
||||
llmConfig.baseUrl = data.baseUrl;
|
||||
}
|
||||
|
||||
if (data.modelName) {
|
||||
llmConfig.modelName = data.modelName;
|
||||
}
|
||||
|
||||
return llmConfig;
|
||||
}
|
||||
|
||||
// Get exception details with telemetry service info
|
||||
public async getExceptionDetails(
|
||||
exceptionId: string,
|
||||
): Promise<ExceptionDetails> {
|
||||
const url: URL = URL.fromURL(this.baseUrl).addRoute(
|
||||
"/api/ai-agent-data/get-exception-details",
|
||||
);
|
||||
|
||||
const response: HTTPResponse<JSONObject> = await API.post({
|
||||
url,
|
||||
data: {
|
||||
...AIAgentAPIRequest.getDefaultRequestBody(),
|
||||
exceptionId: exceptionId,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.isSuccess()) {
|
||||
const data: ExceptionDetailsResponse =
|
||||
response.data as unknown as ExceptionDetailsResponse;
|
||||
const errorMessage: string =
|
||||
data?.message || "Failed to get exception details";
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
|
||||
const data: ExceptionDetailsResponse =
|
||||
response.data as unknown as ExceptionDetailsResponse;
|
||||
|
||||
logger.debug(
|
||||
`Got exception details for ${exceptionId}: ${data.exception.message.substring(0, 100)}`,
|
||||
);
|
||||
|
||||
return {
|
||||
exception: {
|
||||
id: data.exception.id,
|
||||
message: data.exception.message,
|
||||
stackTrace: data.exception.stackTrace,
|
||||
exceptionType: data.exception.exceptionType,
|
||||
fingerprint: data.exception.fingerprint,
|
||||
},
|
||||
service: data.service
|
||||
? {
|
||||
id: data.service.id,
|
||||
name: data.service.name,
|
||||
description: data.service.description,
|
||||
}
|
||||
: null,
|
||||
};
|
||||
}
|
||||
|
||||
// Get code repositories linked to a service
|
||||
public async getCodeRepositories(
|
||||
serviceId: string,
|
||||
): Promise<Array<CodeRepositoryInfo>> {
|
||||
const url: URL = URL.fromURL(this.baseUrl).addRoute(
|
||||
"/api/ai-agent-data/get-code-repositories",
|
||||
);
|
||||
|
||||
const response: HTTPResponse<JSONObject> = await API.post({
|
||||
url,
|
||||
data: {
|
||||
...AIAgentAPIRequest.getDefaultRequestBody(),
|
||||
serviceId: serviceId,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.isSuccess()) {
|
||||
const data: CodeRepositoriesResponse =
|
||||
response.data as unknown as CodeRepositoriesResponse;
|
||||
const errorMessage: string =
|
||||
data?.message || "Failed to get code repositories";
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
|
||||
const data: CodeRepositoriesResponse =
|
||||
response.data as unknown as CodeRepositoriesResponse;
|
||||
|
||||
logger.debug(
|
||||
`Got ${data.repositories.length} code repositories for service ${serviceId}`,
|
||||
);
|
||||
|
||||
return data.repositories.map((repo: CodeRepositoryResponse) => {
|
||||
return {
|
||||
id: repo.id,
|
||||
name: repo.name,
|
||||
repositoryHostedAt: repo.repositoryHostedAt,
|
||||
organizationName: repo.organizationName,
|
||||
repositoryName: repo.repositoryName,
|
||||
mainBranchName: repo.mainBranchName,
|
||||
servicePathInRepository: repo.servicePathInRepository,
|
||||
gitHubAppInstallationId: repo.gitHubAppInstallationId,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
// Get access token for a code repository
|
||||
public async getRepositoryToken(
|
||||
codeRepositoryId: string,
|
||||
): Promise<RepositoryToken> {
|
||||
const url: URL = URL.fromURL(this.baseUrl).addRoute(
|
||||
"/api/ai-agent-data/get-repository-token",
|
||||
);
|
||||
|
||||
const response: HTTPResponse<JSONObject> = await API.post({
|
||||
url,
|
||||
data: {
|
||||
...AIAgentAPIRequest.getDefaultRequestBody(),
|
||||
codeRepositoryId: codeRepositoryId,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.isSuccess()) {
|
||||
const data: RepositoryTokenResponse =
|
||||
response.data as unknown as RepositoryTokenResponse;
|
||||
const errorMessage: string =
|
||||
data?.message || "Failed to get repository token";
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
|
||||
const data: RepositoryTokenResponse =
|
||||
response.data as unknown as RepositoryTokenResponse;
|
||||
|
||||
logger.debug(
|
||||
`Got access token for repository ${data.organizationName}/${data.repositoryName}`,
|
||||
);
|
||||
|
||||
return {
|
||||
token: data.token,
|
||||
expiresAt: new Date(data.expiresAt),
|
||||
repositoryUrl: data.repositoryUrl,
|
||||
organizationName: data.organizationName,
|
||||
repositoryName: data.repositoryName,
|
||||
};
|
||||
}
|
||||
|
||||
// Record a pull request created by the AI Agent
|
||||
public async recordPullRequest(
|
||||
options: RecordPullRequestOptions,
|
||||
): Promise<RecordPullRequestResult> {
|
||||
const url: URL = URL.fromURL(this.baseUrl).addRoute(
|
||||
"/api/ai-agent-data/record-pull-request",
|
||||
);
|
||||
|
||||
const response: HTTPResponse<JSONObject> = await API.post({
|
||||
url,
|
||||
data: {
|
||||
...AIAgentAPIRequest.getDefaultRequestBody(),
|
||||
taskId: options.taskId,
|
||||
codeRepositoryId: options.codeRepositoryId,
|
||||
pullRequestUrl: options.pullRequestUrl,
|
||||
pullRequestNumber: options.pullRequestNumber,
|
||||
pullRequestId: options.pullRequestId,
|
||||
title: options.title,
|
||||
description: options.description,
|
||||
headRefName: options.headRefName,
|
||||
baseRefName: options.baseRefName,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.isSuccess()) {
|
||||
const data: RecordPullRequestResponse =
|
||||
response.data as unknown as RecordPullRequestResponse;
|
||||
const errorMessage: string =
|
||||
data?.message || "Failed to record pull request";
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
|
||||
const data: RecordPullRequestResponse =
|
||||
response.data as unknown as RecordPullRequestResponse;
|
||||
|
||||
logger.debug(`Recorded pull request: ${options.pullRequestUrl}`);
|
||||
|
||||
return {
|
||||
success: data.success,
|
||||
pullRequestId: data.pullRequestId,
|
||||
};
|
||||
}
|
||||
|
||||
// Update task status (wrapper around existing endpoint)
|
||||
public async updateTaskStatus(
|
||||
taskId: string,
|
||||
status: AIAgentTaskStatus,
|
||||
statusMessage?: string,
|
||||
): Promise<void> {
|
||||
const url: URL = URL.fromURL(this.baseUrl).addRoute(
|
||||
"/api/ai-agent-task/update-task-status",
|
||||
);
|
||||
|
||||
const response: HTTPResponse<JSONObject> = await API.post({
|
||||
url,
|
||||
data: {
|
||||
...AIAgentAPIRequest.getDefaultRequestBody(),
|
||||
taskId: taskId,
|
||||
status: status,
|
||||
statusMessage: statusMessage,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.isSuccess()) {
|
||||
const data: UpdateTaskStatusResponse =
|
||||
response.data as unknown as UpdateTaskStatusResponse;
|
||||
const errorMessage: string =
|
||||
data?.message || "Failed to update task status";
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
|
||||
logger.debug(`Updated task ${taskId} status to ${status}`);
|
||||
}
|
||||
}
|
||||
@@ -1,369 +0,0 @@
|
||||
import API from "Common/Utils/API";
|
||||
import HTTPResponse from "Common/Types/API/HTTPResponse";
|
||||
import HTTPErrorResponse from "Common/Types/API/HTTPErrorResponse";
|
||||
import URL from "Common/Types/API/URL";
|
||||
import { JSONObject, JSONArray } from "Common/Types/JSON";
|
||||
import logger from "Common/Server/Utils/Logger";
|
||||
import Headers from "Common/Types/API/Headers";
|
||||
import TaskLogger from "./TaskLogger";
|
||||
|
||||
export interface PullRequestOptions {
|
||||
token: string;
|
||||
organizationName: string;
|
||||
repositoryName: string;
|
||||
baseBranch: string;
|
||||
headBranch: string;
|
||||
title: string;
|
||||
body: string;
|
||||
draft?: boolean;
|
||||
}
|
||||
|
||||
export interface PullRequestResult {
|
||||
id: number;
|
||||
number: number;
|
||||
url: string;
|
||||
htmlUrl: string;
|
||||
state: string;
|
||||
title: string;
|
||||
}
|
||||
|
||||
export default class PullRequestCreator {
|
||||
private static readonly GITHUB_API_BASE: string = "https://api.github.com";
|
||||
private static readonly GITHUB_API_VERSION: string = "2022-11-28";
|
||||
|
||||
private logger: TaskLogger | null = null;
|
||||
|
||||
public constructor(taskLogger?: TaskLogger) {
|
||||
if (taskLogger) {
|
||||
this.logger = taskLogger;
|
||||
}
|
||||
}
|
||||
|
||||
// Create a pull request on GitHub
|
||||
public async createPullRequest(
|
||||
options: PullRequestOptions,
|
||||
): Promise<PullRequestResult> {
|
||||
await this.log(
|
||||
`Creating pull request: ${options.title} (${options.headBranch} -> ${options.baseBranch})`,
|
||||
);
|
||||
|
||||
const url: URL = URL.fromString(
|
||||
`${PullRequestCreator.GITHUB_API_BASE}/repos/${options.organizationName}/${options.repositoryName}/pulls`,
|
||||
);
|
||||
|
||||
const headers: Headers = this.getHeaders(options.token);
|
||||
|
||||
const response: HTTPResponse<JSONObject> = await API.post({
|
||||
url,
|
||||
data: {
|
||||
title: options.title,
|
||||
body: options.body,
|
||||
head: options.headBranch,
|
||||
base: options.baseBranch,
|
||||
draft: options.draft || false,
|
||||
},
|
||||
headers,
|
||||
});
|
||||
|
||||
if (!response.isSuccess()) {
|
||||
const errorData: JSONObject = response.data as JSONObject;
|
||||
const errorMessage: string =
|
||||
(errorData["message"] as string) || "Failed to create pull request";
|
||||
logger.error(`GitHub API error: ${errorMessage}`);
|
||||
throw new Error(`Failed to create pull request: ${errorMessage}`);
|
||||
}
|
||||
|
||||
const data: JSONObject = response.data as JSONObject;
|
||||
|
||||
const result: PullRequestResult = {
|
||||
id: data["id"] as number,
|
||||
number: data["number"] as number,
|
||||
url: data["url"] as string,
|
||||
htmlUrl: data["html_url"] as string,
|
||||
state: data["state"] as string,
|
||||
title: data["title"] as string,
|
||||
};
|
||||
|
||||
await this.log(`Pull request created: ${result.htmlUrl}`);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Get an existing pull request by number
|
||||
public async getPullRequest(
|
||||
token: string,
|
||||
organizationName: string,
|
||||
repositoryName: string,
|
||||
pullNumber: number,
|
||||
): Promise<PullRequestResult | null> {
|
||||
const url: URL = URL.fromString(
|
||||
`${PullRequestCreator.GITHUB_API_BASE}/repos/${organizationName}/${repositoryName}/pulls/${pullNumber}`,
|
||||
);
|
||||
|
||||
const headers: Headers = this.getHeaders(token);
|
||||
|
||||
const response: HTTPResponse<JSONObject> = await API.get({
|
||||
url,
|
||||
headers,
|
||||
});
|
||||
|
||||
if (!response.isSuccess()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const data: JSONObject = response.data as JSONObject;
|
||||
|
||||
return {
|
||||
id: data["id"] as number,
|
||||
number: data["number"] as number,
|
||||
url: data["url"] as string,
|
||||
htmlUrl: data["html_url"] as string,
|
||||
state: data["state"] as string,
|
||||
title: data["title"] as string,
|
||||
};
|
||||
}
|
||||
|
||||
// Check if a pull request already exists for a branch
|
||||
public async findExistingPullRequest(
|
||||
token: string,
|
||||
organizationName: string,
|
||||
repositoryName: string,
|
||||
headBranch: string,
|
||||
baseBranch: string,
|
||||
): Promise<PullRequestResult | null> {
|
||||
const url: URL = URL.fromString(
|
||||
`${PullRequestCreator.GITHUB_API_BASE}/repos/${organizationName}/${repositoryName}/pulls`,
|
||||
);
|
||||
|
||||
const headers: Headers = this.getHeaders(token);
|
||||
|
||||
const response: HTTPResponse<JSONArray> | HTTPErrorResponse = await API.get(
|
||||
{
|
||||
url,
|
||||
headers,
|
||||
params: {
|
||||
head: `${organizationName}:${headBranch}`,
|
||||
base: baseBranch,
|
||||
state: "open",
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
if (!response.isSuccess()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const pulls: JSONArray = response.data as JSONArray;
|
||||
|
||||
if (pulls.length > 0) {
|
||||
const data: JSONObject = pulls[0] as JSONObject;
|
||||
return {
|
||||
id: data["id"] as number,
|
||||
number: data["number"] as number,
|
||||
url: data["url"] as string,
|
||||
htmlUrl: data["html_url"] as string,
|
||||
state: data["state"] as string,
|
||||
title: data["title"] as string,
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// Update an existing pull request
|
||||
public async updatePullRequest(
|
||||
token: string,
|
||||
organizationName: string,
|
||||
repositoryName: string,
|
||||
pullNumber: number,
|
||||
updates: { title?: string; body?: string; state?: "open" | "closed" },
|
||||
): Promise<PullRequestResult> {
|
||||
const url: URL = URL.fromString(
|
||||
`${PullRequestCreator.GITHUB_API_BASE}/repos/${organizationName}/${repositoryName}/pulls/${pullNumber}`,
|
||||
);
|
||||
|
||||
const headers: Headers = this.getHeaders(token);
|
||||
|
||||
const response: HTTPResponse<JSONObject> = await API.patch({
|
||||
url,
|
||||
data: updates,
|
||||
headers,
|
||||
});
|
||||
|
||||
if (!response.isSuccess()) {
|
||||
const errorData: JSONObject = response.data as JSONObject;
|
||||
const errorMessage: string =
|
||||
(errorData["message"] as string) || "Failed to update pull request";
|
||||
throw new Error(`Failed to update pull request: ${errorMessage}`);
|
||||
}
|
||||
|
||||
const data: JSONObject = response.data as JSONObject;
|
||||
|
||||
return {
|
||||
id: data["id"] as number,
|
||||
number: data["number"] as number,
|
||||
url: data["url"] as string,
|
||||
htmlUrl: data["html_url"] as string,
|
||||
state: data["state"] as string,
|
||||
title: data["title"] as string,
|
||||
};
|
||||
}
|
||||
|
||||
// Add labels to a pull request
|
||||
public async addLabels(
|
||||
token: string,
|
||||
organizationName: string,
|
||||
repositoryName: string,
|
||||
issueNumber: number,
|
||||
labels: Array<string>,
|
||||
): Promise<void> {
|
||||
await this.log(`Adding labels to PR #${issueNumber}: ${labels.join(", ")}`);
|
||||
|
||||
const url: URL = URL.fromString(
|
||||
`${PullRequestCreator.GITHUB_API_BASE}/repos/${organizationName}/${repositoryName}/issues/${issueNumber}/labels`,
|
||||
);
|
||||
|
||||
const headers: Headers = this.getHeaders(token);
|
||||
|
||||
const response: HTTPResponse<JSONObject> = await API.post({
|
||||
url,
|
||||
data: { labels },
|
||||
headers,
|
||||
});
|
||||
|
||||
if (!response.isSuccess()) {
|
||||
logger.warn(`Failed to add labels to PR #${issueNumber}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Add reviewers to a pull request
|
||||
public async requestReviewers(
|
||||
token: string,
|
||||
organizationName: string,
|
||||
repositoryName: string,
|
||||
pullNumber: number,
|
||||
reviewers: Array<string>,
|
||||
teamReviewers?: Array<string>,
|
||||
): Promise<void> {
|
||||
await this.log(`Requesting reviewers for PR #${pullNumber}`);
|
||||
|
||||
const url: URL = URL.fromString(
|
||||
`${PullRequestCreator.GITHUB_API_BASE}/repos/${organizationName}/${repositoryName}/pulls/${pullNumber}/requested_reviewers`,
|
||||
);
|
||||
|
||||
const headers: Headers = this.getHeaders(token);
|
||||
|
||||
const data: JSONObject = {
|
||||
reviewers,
|
||||
};
|
||||
|
||||
if (teamReviewers && teamReviewers.length > 0) {
|
||||
data["team_reviewers"] = teamReviewers;
|
||||
}
|
||||
|
||||
const response: HTTPResponse<JSONObject> = await API.post({
|
||||
url,
|
||||
data,
|
||||
headers,
|
||||
});
|
||||
|
||||
if (!response.isSuccess()) {
|
||||
logger.warn(`Failed to request reviewers for PR #${pullNumber}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Add a comment to a pull request
|
||||
public async addComment(
|
||||
token: string,
|
||||
organizationName: string,
|
||||
repositoryName: string,
|
||||
issueNumber: number,
|
||||
comment: string,
|
||||
): Promise<void> {
|
||||
await this.log(`Adding comment to PR #${issueNumber}`);
|
||||
|
||||
const url: URL = URL.fromString(
|
||||
`${PullRequestCreator.GITHUB_API_BASE}/repos/${organizationName}/${repositoryName}/issues/${issueNumber}/comments`,
|
||||
);
|
||||
|
||||
const headers: Headers = this.getHeaders(token);
|
||||
|
||||
const response: HTTPResponse<JSONObject> = await API.post({
|
||||
url,
|
||||
data: { body: comment },
|
||||
headers,
|
||||
});
|
||||
|
||||
if (!response.isSuccess()) {
|
||||
logger.warn(`Failed to add comment to PR #${issueNumber}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Generate PR body from exception details
|
||||
public static generatePRBody(data: {
|
||||
exceptionMessage: string;
|
||||
exceptionType: string;
|
||||
stackTrace: string;
|
||||
serviceName: string;
|
||||
summary: string;
|
||||
}): string {
|
||||
return `## Exception Fix
|
||||
|
||||
This pull request was automatically generated by OneUptime AI Agent to fix an exception.
|
||||
|
||||
### Exception Details
|
||||
|
||||
**Service:** ${data.serviceName}
|
||||
**Type:** ${data.exceptionType}
|
||||
**Message:** ${data.exceptionMessage}
|
||||
|
||||
### Stack Trace
|
||||
|
||||
\`\`\`
|
||||
${data.stackTrace.substring(0, 2000)}${data.stackTrace.length > 2000 ? "\n...(truncated)" : ""}
|
||||
\`\`\`
|
||||
|
||||
### Summary of Changes
|
||||
|
||||
${data.summary}
|
||||
|
||||
---
|
||||
|
||||
*This PR was automatically generated by [OneUptime AI Agent](https://oneuptime.com)*`;
|
||||
}
|
||||
|
||||
// Generate PR title from exception
|
||||
public static generatePRTitle(exceptionMessage: string): string {
|
||||
// Truncate and clean the message for use as a title
|
||||
const cleanMessage: string = exceptionMessage
|
||||
.replace(/\n/g, " ")
|
||||
.replace(/\s+/g, " ")
|
||||
.trim();
|
||||
|
||||
const maxLength: number = 70;
|
||||
if (cleanMessage.length <= maxLength) {
|
||||
return `fix: ${cleanMessage}`;
|
||||
}
|
||||
|
||||
return `fix: ${cleanMessage.substring(0, maxLength - 3)}...`;
|
||||
}
|
||||
|
||||
// Helper method to get GitHub API headers
|
||||
private getHeaders(token: string): Headers {
|
||||
return {
|
||||
Authorization: `Bearer ${token}`,
|
||||
Accept: "application/vnd.github+json",
|
||||
"X-GitHub-Api-Version": PullRequestCreator.GITHUB_API_VERSION,
|
||||
"Content-Type": "application/json",
|
||||
};
|
||||
}
|
||||
|
||||
// Helper method for logging
|
||||
private async log(message: string): Promise<void> {
|
||||
if (this.logger) {
|
||||
await this.logger.info(message);
|
||||
} else {
|
||||
logger.debug(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,313 +0,0 @@
|
||||
import Execute from "Common/Server/Utils/Execute";
|
||||
import LocalFile from "Common/Server/Utils/LocalFile";
|
||||
import logger from "Common/Server/Utils/Logger";
|
||||
import path from "path";
|
||||
import TaskLogger from "./TaskLogger";
|
||||
|
||||
export interface CloneResult {
|
||||
workingDirectory: string;
|
||||
repositoryPath: string;
|
||||
}
|
||||
|
||||
export interface RepositoryConfig {
|
||||
organizationName: string;
|
||||
repositoryName: string;
|
||||
token: string;
|
||||
repositoryUrl?: string;
|
||||
}
|
||||
|
||||
export default class RepositoryManager {
|
||||
private logger: TaskLogger | null = null;
|
||||
|
||||
public constructor(taskLogger?: TaskLogger) {
|
||||
if (taskLogger) {
|
||||
this.logger = taskLogger;
|
||||
}
|
||||
}
|
||||
|
||||
// Clone a repository with token-based authentication
|
||||
public async cloneRepository(
|
||||
config: RepositoryConfig,
|
||||
workDir: string,
|
||||
): Promise<CloneResult> {
|
||||
await this.log(
|
||||
`Cloning repository ${config.organizationName}/${config.repositoryName}...`,
|
||||
);
|
||||
|
||||
// Build the authenticated URL
|
||||
const authUrl: string = this.buildAuthenticatedUrl(config);
|
||||
|
||||
// Ensure the working directory exists
|
||||
await LocalFile.makeDirectory(workDir);
|
||||
|
||||
// Clone the repository
|
||||
await this.runGitCommand(workDir, ["clone", authUrl]);
|
||||
|
||||
const repositoryPath: string = path.join(workDir, config.repositoryName);
|
||||
|
||||
await this.log(`Repository cloned to ${repositoryPath}`);
|
||||
|
||||
// Set git config for the repository
|
||||
await this.setGitConfig(repositoryPath);
|
||||
|
||||
return {
|
||||
workingDirectory: workDir,
|
||||
repositoryPath: repositoryPath,
|
||||
};
|
||||
}
|
||||
|
||||
// Build URL with embedded token for authentication
|
||||
private buildAuthenticatedUrl(config: RepositoryConfig): string {
|
||||
if (config.repositoryUrl) {
|
||||
// If URL is provided, inject token
|
||||
const url: URL = new URL(config.repositoryUrl);
|
||||
url.username = "x-access-token";
|
||||
url.password = config.token;
|
||||
return url.toString();
|
||||
}
|
||||
|
||||
// Default to GitHub
|
||||
return `https://x-access-token:${config.token}@github.com/${config.organizationName}/${config.repositoryName}.git`;
|
||||
}
|
||||
|
||||
// Set git user config for commits
|
||||
private async setGitConfig(repoPath: string): Promise<void> {
|
||||
await this.runGitCommand(repoPath, [
|
||||
"config",
|
||||
"user.name",
|
||||
"OneUptime AI Agent",
|
||||
]);
|
||||
|
||||
await this.runGitCommand(repoPath, [
|
||||
"config",
|
||||
"user.email",
|
||||
"ai-agent@oneuptime.com",
|
||||
]);
|
||||
}
|
||||
|
||||
// Create a new branch
|
||||
public async createBranch(
|
||||
repoPath: string,
|
||||
branchName: string,
|
||||
): Promise<void> {
|
||||
await this.log(`Creating branch: ${branchName}`);
|
||||
|
||||
await this.runGitCommand(repoPath, ["checkout", "-b", branchName]);
|
||||
|
||||
await this.log(`Branch ${branchName} created and checked out`);
|
||||
}
|
||||
|
||||
// Checkout existing branch
|
||||
public async checkoutBranch(
|
||||
repoPath: string,
|
||||
branchName: string,
|
||||
): Promise<void> {
|
||||
await this.log(`Checking out branch: ${branchName}`);
|
||||
|
||||
await this.runGitCommand(repoPath, ["checkout", branchName]);
|
||||
}
|
||||
|
||||
// Create branch if doesn't exist, or checkout if it does
|
||||
public async createOrCheckoutBranch(
|
||||
repoPath: string,
|
||||
branchName: string,
|
||||
): Promise<void> {
|
||||
try {
|
||||
// Check if branch exists locally
|
||||
await this.runGitCommand(repoPath, ["rev-parse", "--verify", branchName]);
|
||||
await this.checkoutBranch(repoPath, branchName);
|
||||
} catch {
|
||||
// Branch doesn't exist, create it
|
||||
await this.createBranch(repoPath, branchName);
|
||||
}
|
||||
}
|
||||
|
||||
// Add all changes to staging
|
||||
public async addAllChanges(repoPath: string): Promise<void> {
|
||||
await this.log("Adding all changes to git staging...");
|
||||
|
||||
await this.runGitCommand(repoPath, ["add", "-A"]);
|
||||
}
|
||||
|
||||
// Check if there are any changes to commit
|
||||
public async hasChanges(repoPath: string): Promise<boolean> {
|
||||
try {
|
||||
const status: string = await this.runGitCommand(repoPath, [
|
||||
"status",
|
||||
"--porcelain",
|
||||
]);
|
||||
return status.trim().length > 0;
|
||||
} catch (error) {
|
||||
logger.error("Error checking for changes:");
|
||||
logger.error(error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Get list of changed files
|
||||
public async getChangedFiles(repoPath: string): Promise<Array<string>> {
|
||||
const status: string = await this.runGitCommand(repoPath, [
|
||||
"status",
|
||||
"--porcelain",
|
||||
]);
|
||||
|
||||
if (!status.trim()) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return status
|
||||
.split("\n")
|
||||
.filter((line: string) => {
|
||||
return line.trim().length > 0;
|
||||
})
|
||||
.map((line: string) => {
|
||||
// Status output format is "XY filename" where XY is the status
|
||||
return line.substring(3).trim();
|
||||
});
|
||||
}
|
||||
|
||||
// Commit changes
|
||||
public async commitChanges(repoPath: string, message: string): Promise<void> {
|
||||
await this.log(`Committing changes: ${message.substring(0, 50)}...`);
|
||||
|
||||
await Execute.executeCommandFile({
|
||||
command: "git",
|
||||
args: ["commit", "-m", message],
|
||||
cwd: repoPath,
|
||||
});
|
||||
|
||||
await this.log("Changes committed successfully");
|
||||
}
|
||||
|
||||
// Push branch to remote
|
||||
public async pushBranch(
|
||||
repoPath: string,
|
||||
branchName: string,
|
||||
config: RepositoryConfig,
|
||||
): Promise<void> {
|
||||
await this.log(`Pushing branch ${branchName} to remote...`);
|
||||
|
||||
// Set the remote URL with authentication
|
||||
const authUrl: string = this.buildAuthenticatedUrl(config);
|
||||
|
||||
// Update the remote URL
|
||||
await this.runGitCommand(repoPath, [
|
||||
"remote",
|
||||
"set-url",
|
||||
"origin",
|
||||
authUrl,
|
||||
]);
|
||||
|
||||
// Push with tracking
|
||||
await this.runGitCommand(repoPath, ["push", "-u", "origin", branchName]);
|
||||
|
||||
await this.log(`Branch ${branchName} pushed to remote`);
|
||||
}
|
||||
|
||||
// Get the current branch name
|
||||
public async getCurrentBranch(repoPath: string): Promise<string> {
|
||||
const branch: string = await this.runGitCommand(repoPath, [
|
||||
"rev-parse",
|
||||
"--abbrev-ref",
|
||||
"HEAD",
|
||||
]);
|
||||
return branch.trim();
|
||||
}
|
||||
|
||||
// Get the current commit hash
|
||||
public async getCurrentCommitHash(repoPath: string): Promise<string> {
|
||||
const hash: string = await this.runGitCommand(repoPath, [
|
||||
"rev-parse",
|
||||
"HEAD",
|
||||
]);
|
||||
return hash.trim();
|
||||
}
|
||||
|
||||
// Pull latest changes from remote
|
||||
public async pullChanges(repoPath: string): Promise<void> {
|
||||
await this.log("Pulling latest changes from remote...");
|
||||
|
||||
await this.runGitCommand(repoPath, ["pull"]);
|
||||
}
|
||||
|
||||
// Discard all local changes
|
||||
public async discardChanges(repoPath: string): Promise<void> {
|
||||
await this.log("Discarding all local changes...");
|
||||
|
||||
await this.runGitCommand(repoPath, ["checkout", "."]);
|
||||
await this.runGitCommand(repoPath, ["clean", "-fd"]);
|
||||
}
|
||||
|
||||
// Clean up the repository directory
|
||||
public async cleanup(workDir: string): Promise<void> {
|
||||
await this.log(`Cleaning up workspace: ${workDir}`);
|
||||
|
||||
try {
|
||||
await LocalFile.deleteDirectory(workDir);
|
||||
await this.log("Workspace cleaned up successfully");
|
||||
} catch (error) {
|
||||
logger.error(`Error cleaning up workspace ${workDir}:`);
|
||||
logger.error(error);
|
||||
}
|
||||
}
|
||||
|
||||
// Get diff of current changes
|
||||
public async getDiff(repoPath: string): Promise<string> {
|
||||
try {
|
||||
const diff: string = await this.runGitCommand(repoPath, ["diff"]);
|
||||
return diff;
|
||||
} catch (error) {
|
||||
logger.error("Error getting diff:");
|
||||
logger.error(error);
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
// Get staged diff
|
||||
public async getStagedDiff(repoPath: string): Promise<string> {
|
||||
try {
|
||||
const diff: string = await this.runGitCommand(repoPath, [
|
||||
"diff",
|
||||
"--staged",
|
||||
]);
|
||||
return diff;
|
||||
} catch (error) {
|
||||
logger.error("Error getting staged diff:");
|
||||
logger.error(error);
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
// Helper method to run git commands
|
||||
private async runGitCommand(
|
||||
repoPath: string,
|
||||
args: Array<string>,
|
||||
): Promise<string> {
|
||||
const cwd: string = path.resolve(repoPath);
|
||||
|
||||
const logArgs: Array<string> = args.map((arg: string) => {
|
||||
// Mask tokens in URLs
|
||||
if (arg.includes("x-access-token:")) {
|
||||
return arg.replace(/x-access-token:[^@]+@/, "x-access-token:***@");
|
||||
}
|
||||
return arg.includes(" ") ? `"${arg}"` : arg;
|
||||
});
|
||||
|
||||
logger.debug(`Executing git command in ${cwd}: git ${logArgs.join(" ")}`);
|
||||
|
||||
return Execute.executeCommandFile({
|
||||
command: "git",
|
||||
args,
|
||||
cwd,
|
||||
});
|
||||
}
|
||||
|
||||
// Helper method for logging
|
||||
private async log(message: string): Promise<void> {
|
||||
if (this.logger) {
|
||||
await this.logger.info(message);
|
||||
} else {
|
||||
logger.debug(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,229 +0,0 @@
|
||||
import { ONEUPTIME_URL } from "../Config";
|
||||
import AIAgentAPIRequest from "./AIAgentAPIRequest";
|
||||
import URL from "Common/Types/API/URL";
|
||||
import API from "Common/Utils/API";
|
||||
import HTTPResponse from "Common/Types/API/HTTPResponse";
|
||||
import { JSONObject } from "Common/Types/JSON";
|
||||
import LogSeverity from "Common/Types/Log/LogSeverity";
|
||||
import logger from "Common/Server/Utils/Logger";
|
||||
import OneUptimeDate from "Common/Types/Date";
|
||||
|
||||
export interface TaskLoggerOptions {
|
||||
taskId: string;
|
||||
context?: string;
|
||||
batchSize?: number;
|
||||
flushIntervalMs?: number;
|
||||
}
|
||||
|
||||
interface LogEntry {
|
||||
severity: LogSeverity;
|
||||
message: string;
|
||||
timestamp: Date;
|
||||
}
|
||||
|
||||
export default class TaskLogger {
|
||||
private taskId: string;
|
||||
private context: string | undefined;
|
||||
private logBuffer: Array<LogEntry> = [];
|
||||
private batchSize: number;
|
||||
private flushIntervalMs: number;
|
||||
private flushTimer: ReturnType<typeof setInterval> | null = null;
|
||||
private createLogUrl: URL | null = null;
|
||||
|
||||
public constructor(options: TaskLoggerOptions) {
|
||||
this.taskId = options.taskId;
|
||||
this.context = options.context;
|
||||
this.batchSize = options.batchSize || 10;
|
||||
this.flushIntervalMs = options.flushIntervalMs || 5000; // 5 seconds default
|
||||
|
||||
// Start periodic flush timer
|
||||
this.startFlushTimer();
|
||||
}
|
||||
|
||||
private getCreateLogUrl(): URL {
|
||||
if (!this.createLogUrl) {
|
||||
this.createLogUrl = URL.fromString(ONEUPTIME_URL.toString()).addRoute(
|
||||
"/api/ai-agent-task-log/create-log",
|
||||
);
|
||||
}
|
||||
return this.createLogUrl;
|
||||
}
|
||||
|
||||
private startFlushTimer(): void {
|
||||
this.flushTimer = setInterval(() => {
|
||||
this.flush().catch((err: Error) => {
|
||||
logger.error(`Error flushing logs: ${err.message}`);
|
||||
});
|
||||
}, this.flushIntervalMs);
|
||||
}
|
||||
|
||||
private stopFlushTimer(): void {
|
||||
if (this.flushTimer) {
|
||||
clearInterval(this.flushTimer);
|
||||
this.flushTimer = null;
|
||||
}
|
||||
}
|
||||
|
||||
private formatMessage(
|
||||
severity: LogSeverity,
|
||||
message: string,
|
||||
timestamp: Date,
|
||||
): string {
|
||||
const timestampStr: string = OneUptimeDate.toDateTimeLocalString(timestamp);
|
||||
const severityStr: string = severity.toUpperCase().padEnd(7);
|
||||
const contextStr: string = this.context ? `[${this.context}] ` : "";
|
||||
|
||||
return `[${timestampStr}] [${severityStr}] ${contextStr}${message}`;
|
||||
}
|
||||
|
||||
private addToBuffer(severity: LogSeverity, message: string): void {
|
||||
const entry: LogEntry = {
|
||||
severity,
|
||||
message,
|
||||
timestamp: OneUptimeDate.getCurrentDate(),
|
||||
};
|
||||
|
||||
this.logBuffer.push(entry);
|
||||
|
||||
// Also log locally for debugging
|
||||
logger.debug(
|
||||
`[Task ${this.taskId}] ${this.formatMessage(entry.severity, entry.message, entry.timestamp)}`,
|
||||
);
|
||||
|
||||
// Auto-flush if buffer is full
|
||||
if (this.logBuffer.length >= this.batchSize) {
|
||||
this.flush().catch((err: Error) => {
|
||||
logger.error(`Error auto-flushing logs: ${err.message}`);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private async sendLogToServer(
|
||||
severity: LogSeverity,
|
||||
message: string,
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
const result: HTTPResponse<JSONObject> = await API.post({
|
||||
url: this.getCreateLogUrl(),
|
||||
data: {
|
||||
...AIAgentAPIRequest.getDefaultRequestBody(),
|
||||
taskId: this.taskId,
|
||||
severity: severity,
|
||||
message: message,
|
||||
},
|
||||
});
|
||||
|
||||
if (!result.isSuccess()) {
|
||||
logger.error(`Failed to send log for task ${this.taskId}`);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error(`Error sending log for task ${this.taskId}:`);
|
||||
logger.error(error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Public logging methods
|
||||
public async debug(message: string): Promise<void> {
|
||||
this.addToBuffer(LogSeverity.Debug, message);
|
||||
}
|
||||
|
||||
public async info(message: string): Promise<void> {
|
||||
this.addToBuffer(LogSeverity.Information, message);
|
||||
}
|
||||
|
||||
public async warning(message: string): Promise<void> {
|
||||
this.addToBuffer(LogSeverity.Warning, message);
|
||||
}
|
||||
|
||||
public async error(message: string): Promise<void> {
|
||||
this.addToBuffer(LogSeverity.Error, message);
|
||||
// Immediately flush on errors
|
||||
await this.flush();
|
||||
}
|
||||
|
||||
public async trace(message: string): Promise<void> {
|
||||
this.addToBuffer(LogSeverity.Trace, message);
|
||||
}
|
||||
|
||||
// Log output from external processes like OpenCode
|
||||
public async logProcessOutput(
|
||||
processName: string,
|
||||
output: string,
|
||||
): Promise<void> {
|
||||
const lines: Array<string> = output.split("\n").filter((line: string) => {
|
||||
return line.trim().length > 0;
|
||||
});
|
||||
|
||||
for (const line of lines) {
|
||||
this.addToBuffer(LogSeverity.Information, `[${processName}] ${line}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Log a code block (useful for stack traces, code snippets, etc.)
|
||||
public async logCodeBlock(
|
||||
title: string,
|
||||
code: string,
|
||||
severity: LogSeverity = LogSeverity.Information,
|
||||
): Promise<void> {
|
||||
const formattedCode: string = `${title}:\n\`\`\`\n${code}\n\`\`\``;
|
||||
this.addToBuffer(severity, formattedCode);
|
||||
}
|
||||
|
||||
// Flush all buffered logs to the server
|
||||
public async flush(): Promise<void> {
|
||||
if (this.logBuffer.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Get all entries and clear buffer
|
||||
const entries: Array<LogEntry> = [...this.logBuffer];
|
||||
this.logBuffer = [];
|
||||
|
||||
// Send each log entry separately to preserve individual log lines
|
||||
for (const entry of entries) {
|
||||
const formattedMessage: string = this.formatMessage(
|
||||
entry.severity,
|
||||
entry.message,
|
||||
entry.timestamp,
|
||||
);
|
||||
await this.sendLogToServer(entry.severity, formattedMessage);
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup method - call when task is done
|
||||
public async dispose(): Promise<void> {
|
||||
this.stopFlushTimer();
|
||||
await this.flush();
|
||||
}
|
||||
|
||||
// Helper methods for common log patterns
|
||||
public async logStepStart(stepName: string): Promise<void> {
|
||||
await this.info(`Starting: ${stepName}`);
|
||||
}
|
||||
|
||||
public async logStepComplete(stepName: string): Promise<void> {
|
||||
await this.info(`Completed: ${stepName}`);
|
||||
}
|
||||
|
||||
public async logStepFailed(stepName: string, error: string): Promise<void> {
|
||||
await this.error(`Failed: ${stepName} - ${error}`);
|
||||
}
|
||||
|
||||
// Create a child logger with additional context
|
||||
public createChildLogger(childContext: string): TaskLogger {
|
||||
const fullContext: string = this.context
|
||||
? `${this.context}:${childContext}`
|
||||
: childContext;
|
||||
|
||||
return new TaskLogger({
|
||||
taskId: this.taskId,
|
||||
context: fullContext,
|
||||
batchSize: this.batchSize,
|
||||
flushIntervalMs: this.flushIntervalMs,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,221 +0,0 @@
|
||||
import LocalFile from "Common/Server/Utils/LocalFile";
|
||||
import logger from "Common/Server/Utils/Logger";
|
||||
import ObjectID from "Common/Types/ObjectID";
|
||||
import path from "path";
|
||||
import os from "os";
|
||||
|
||||
export interface WorkspaceInfo {
|
||||
workspacePath: string;
|
||||
taskId: string;
|
||||
createdAt: Date;
|
||||
}
|
||||
|
||||
export default class WorkspaceManager {
|
||||
private static readonly BASE_TEMP_DIR: string = path.join(
|
||||
os.tmpdir(),
|
||||
"oneuptime-ai-agent",
|
||||
);
|
||||
|
||||
// Create a new workspace for a task
|
||||
public static async createWorkspace(taskId: string): Promise<WorkspaceInfo> {
|
||||
const timestamp: number = Date.now();
|
||||
const uniqueId: string = ObjectID.generate().toString().substring(0, 8);
|
||||
const workspaceName: string = `task-${taskId}-${timestamp}-${uniqueId}`;
|
||||
const workspacePath: string = path.join(this.BASE_TEMP_DIR, workspaceName);
|
||||
|
||||
logger.debug(`Creating workspace: ${workspacePath}`);
|
||||
|
||||
// Create the workspace directory
|
||||
await LocalFile.makeDirectory(workspacePath);
|
||||
|
||||
return {
|
||||
workspacePath,
|
||||
taskId,
|
||||
createdAt: new Date(),
|
||||
};
|
||||
}
|
||||
|
||||
// Create a subdirectory within a workspace
|
||||
public static async createSubdirectory(
|
||||
workspacePath: string,
|
||||
subdirectoryName: string,
|
||||
): Promise<string> {
|
||||
const subdirectoryPath: string = path.join(workspacePath, subdirectoryName);
|
||||
await LocalFile.makeDirectory(subdirectoryPath);
|
||||
return subdirectoryPath;
|
||||
}
|
||||
|
||||
// Check if workspace exists
|
||||
public static async workspaceExists(workspacePath: string): Promise<boolean> {
|
||||
try {
|
||||
await LocalFile.readDirectory(workspacePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Delete a workspace and all its contents
|
||||
public static async deleteWorkspace(workspacePath: string): Promise<void> {
|
||||
logger.debug(`Deleting workspace: ${workspacePath}`);
|
||||
|
||||
try {
|
||||
// Verify the path is within our temp directory to prevent accidental deletion
|
||||
const normalizedPath: string = path.normalize(workspacePath);
|
||||
const normalizedBase: string = path.normalize(this.BASE_TEMP_DIR);
|
||||
|
||||
if (!normalizedPath.startsWith(normalizedBase)) {
|
||||
throw new Error(
|
||||
`Security error: Cannot delete path outside workspace base: ${workspacePath}`,
|
||||
);
|
||||
}
|
||||
|
||||
await LocalFile.deleteDirectory(workspacePath);
|
||||
logger.debug(`Workspace deleted: ${workspacePath}`);
|
||||
} catch (error) {
|
||||
logger.error(`Error deleting workspace ${workspacePath}:`);
|
||||
logger.error(error);
|
||||
}
|
||||
}
|
||||
|
||||
// Write a file to workspace
|
||||
public static async writeFile(
|
||||
workspacePath: string,
|
||||
relativePath: string,
|
||||
content: string,
|
||||
): Promise<string> {
|
||||
const filePath: string = path.join(workspacePath, relativePath);
|
||||
|
||||
// Ensure parent directory exists
|
||||
const parentDir: string = path.dirname(filePath);
|
||||
await LocalFile.makeDirectory(parentDir);
|
||||
|
||||
await LocalFile.write(filePath, content);
|
||||
|
||||
return filePath;
|
||||
}
|
||||
|
||||
// Read a file from workspace
|
||||
public static async readFile(
|
||||
workspacePath: string,
|
||||
relativePath: string,
|
||||
): Promise<string> {
|
||||
const filePath: string = path.join(workspacePath, relativePath);
|
||||
return LocalFile.read(filePath);
|
||||
}
|
||||
|
||||
// Check if a file exists in workspace
|
||||
public static async fileExists(
|
||||
workspacePath: string,
|
||||
relativePath: string,
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
const filePath: string = path.join(workspacePath, relativePath);
|
||||
await LocalFile.read(filePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Delete a file from workspace
|
||||
public static async deleteFile(
|
||||
workspacePath: string,
|
||||
relativePath: string,
|
||||
): Promise<void> {
|
||||
const filePath: string = path.join(workspacePath, relativePath);
|
||||
await LocalFile.deleteFile(filePath);
|
||||
}
|
||||
|
||||
// List files in workspace directory
|
||||
public static async listFiles(workspacePath: string): Promise<Array<string>> {
|
||||
const entries: Array<{ name: string; isDirectory(): boolean }> =
|
||||
await LocalFile.readDirectory(workspacePath);
|
||||
return entries.map((entry: { name: string }) => {
|
||||
return entry.name;
|
||||
});
|
||||
}
|
||||
|
||||
// Get the full path for a relative path in workspace
|
||||
public static getFullPath(
|
||||
workspacePath: string,
|
||||
relativePath: string,
|
||||
): string {
|
||||
return path.join(workspacePath, relativePath);
|
||||
}
|
||||
|
||||
// Clean up old workspaces (older than specified hours)
|
||||
public static async cleanupOldWorkspaces(
|
||||
maxAgeHours: number = 24,
|
||||
): Promise<number> {
|
||||
logger.debug(`Cleaning up workspaces older than ${maxAgeHours} hours`);
|
||||
|
||||
let cleanedCount: number = 0;
|
||||
|
||||
try {
|
||||
// Ensure base directory exists
|
||||
try {
|
||||
await LocalFile.readDirectory(this.BASE_TEMP_DIR);
|
||||
} catch {
|
||||
// Base directory doesn't exist, nothing to clean
|
||||
return 0;
|
||||
}
|
||||
|
||||
const entries: Array<{ name: string; isDirectory(): boolean }> =
|
||||
await LocalFile.readDirectory(this.BASE_TEMP_DIR);
|
||||
|
||||
const maxAge: number = maxAgeHours * 60 * 60 * 1000; // Convert to milliseconds
|
||||
const now: number = Date.now();
|
||||
|
||||
for (const entry of entries) {
|
||||
if (!entry.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const workspacePath: string = path.join(this.BASE_TEMP_DIR, entry.name);
|
||||
|
||||
/*
|
||||
* Try to extract timestamp from directory name
|
||||
* Format: task-{taskId}-{timestamp}-{uniqueId}
|
||||
*/
|
||||
const match: RegExpMatchArray | null = entry.name.match(
|
||||
/task-[^-]+-(\d+)-[^-]+/,
|
||||
);
|
||||
|
||||
if (match) {
|
||||
const timestamp: number = parseInt(match[1] || "0", 10);
|
||||
|
||||
if (now - timestamp > maxAge) {
|
||||
await this.deleteWorkspace(workspacePath);
|
||||
cleanedCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error("Error during workspace cleanup:");
|
||||
logger.error(error);
|
||||
}
|
||||
|
||||
logger.debug(`Cleaned up ${cleanedCount} old workspaces`);
|
||||
|
||||
return cleanedCount;
|
||||
}
|
||||
|
||||
// Initialize workspace manager (create base directory if needed)
|
||||
public static async initialize(): Promise<void> {
|
||||
try {
|
||||
await LocalFile.makeDirectory(this.BASE_TEMP_DIR);
|
||||
logger.debug(
|
||||
`Workspace base directory initialized: ${this.BASE_TEMP_DIR}`,
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error("Error initializing workspace manager:");
|
||||
logger.error(error);
|
||||
}
|
||||
}
|
||||
|
||||
// Get the base temp directory path
|
||||
public static getBaseTempDir(): string {
|
||||
return this.BASE_TEMP_DIR;
|
||||
}
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"watch": [
|
||||
"./",
|
||||
"../Common"
|
||||
],
|
||||
"ext": "ts,tsx",
|
||||
"ignore": ["./node_modules/**", "./public/**", "./bin/**", "./build/**"],
|
||||
"watchOptions": {"useFsEvents": false, "interval": 500},
|
||||
"env": {"TS_NODE_TRANSPILE_ONLY": "1", "TS_NODE_FILES": "false"},
|
||||
"exec": "node -r ts-node/register/transpile-only Index.ts"
|
||||
}
|
||||
4752
AIAgent/package-lock.json
generated
4752
AIAgent/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,34 +0,0 @@
|
||||
{
|
||||
"name": "@oneuptime/ai-agent",
|
||||
"version": "1.0.0",
|
||||
"description": "OneUptime AI Agent",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/OneUptime/oneuptime"
|
||||
},
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"start": "export NODE_OPTIONS='--max-old-space-size=8096' && node --require ts-node/register Index.ts",
|
||||
"compile": "tsc",
|
||||
"clear-modules": "rm -rf node_modules && rm package-lock.json && npm install",
|
||||
"dev": "npx nodemon",
|
||||
"audit": "npm audit --audit-level=low",
|
||||
"dep-check": "npm install -g depcheck && depcheck ./ --skip-missing=true",
|
||||
"test": "jest --detectOpenHandles --passWithNoTests",
|
||||
"coverage": "jest --detectOpenHandles --coverage",
|
||||
"debug:test": "node --inspect node_modules/.bin/jest --runInBand ./Tests --detectOpenHandles"
|
||||
},
|
||||
"author": "OneUptime <hello@oneuptime.com> (https://oneuptime.com/)",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"Common": "file:../Common",
|
||||
"ejs": "^3.1.10",
|
||||
"ts-node": "^10.9.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^27.5.2",
|
||||
"@types/node": "^17.0.31",
|
||||
"jest": "^28.1.0",
|
||||
"nodemon": "^2.0.20"
|
||||
}
|
||||
}
|
||||
@@ -1,45 +0,0 @@
|
||||
{
|
||||
"ts-node": {
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"resolveJsonModule": true
|
||||
}
|
||||
},
|
||||
"compilerOptions": {
|
||||
"target": "es2017",
|
||||
"jsx": "react",
|
||||
"experimentalDecorators": true,
|
||||
"emitDecoratorMetadata": true,
|
||||
"rootDir": "",
|
||||
"moduleResolution": "node",
|
||||
"typeRoots": [
|
||||
"./node_modules/@types"
|
||||
],
|
||||
"types": ["node", "jest"],
|
||||
"sourceMap": true,
|
||||
"outDir": "build/dist",
|
||||
"esModuleInterop": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"strict": true,
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"strictFunctionTypes": true,
|
||||
"strictBindCallApply": true,
|
||||
"strictPropertyInitialization": true,
|
||||
"noImplicitThis": true,
|
||||
"useUnknownInCatchVariables": true,
|
||||
"alwaysStrict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"exactOptionalPropertyTypes": true,
|
||||
"noImplicitReturns": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noUncheckedIndexedAccess": true,
|
||||
"noImplicitOverride": true,
|
||||
"noPropertyAccessFromIndexSignature": true,
|
||||
"skipLibCheck": true,
|
||||
"resolveJsonModule": true
|
||||
},
|
||||
"include": ["/**/*.ts"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
.git
|
||||
|
||||
node_modules
|
||||
# See https://help.github.com/ignore-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
node_modules
|
||||
|
||||
.idea
|
||||
# testing
|
||||
/coverage
|
||||
|
||||
# production
|
||||
/build
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
|
||||
env.js
|
||||
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
yarn.lock
|
||||
Untitled-1
|
||||
*.local.sh
|
||||
*.local.yaml
|
||||
run
|
||||
stop
|
||||
|
||||
nohup.out*
|
||||
|
||||
encrypted-credentials.tar
|
||||
encrypted-credentials/
|
||||
|
||||
_README.md
|
||||
|
||||
# Important Add production values to gitignore.
|
||||
values-saas-production.yaml
|
||||
kubernetes/values-saas-production.yaml
|
||||
|
||||
/private
|
||||
|
||||
/tls_cert.pem
|
||||
/tls_key.pem
|
||||
/keys
|
||||
|
||||
temp_readme.md
|
||||
|
||||
tests/coverage
|
||||
|
||||
settings.json
|
||||
|
||||
GoSDK/tester/
|
||||
30
APIReference/.gitignore
vendored
30
APIReference/.gitignore
vendored
@@ -1,30 +0,0 @@
|
||||
# See https://help.github.com/ignore-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
#/backend/node_modules
|
||||
/kubernetes
|
||||
/node_modules
|
||||
.idea
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
|
||||
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
yarn.lock
|
||||
|
||||
**/*/paymentService.test.js
|
||||
apiTest.rest
|
||||
|
||||
application_security_dir
|
||||
container_security_dir
|
||||
|
||||
# coverage
|
||||
/coverage
|
||||
/.nyc_output
|
||||
|
||||
/greenlock.d/config.json
|
||||
/greenlock.d/config.json.bak
|
||||
/.greenlockrc
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"query": {
|
||||
"age": {
|
||||
"_type": "EqualTo",
|
||||
value: 10
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"query": {
|
||||
"age": {
|
||||
"_type": "EqualToOrNull",
|
||||
value: 10
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"query": {
|
||||
"age": {
|
||||
"_type": "GreaterThan",
|
||||
"value": 10
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"query": {
|
||||
"age": {
|
||||
"_type": "GreaterThanOrEqual",
|
||||
"value": 10
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"query": {
|
||||
"age": {
|
||||
"_type": "GreaterThanOrNull",
|
||||
"value": 10
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"query": {
|
||||
"labels": {
|
||||
"_type": "Includes",
|
||||
"value": [
|
||||
"aaa00000-aaaa-aaaa-aaaa-aaaaaaaaaaaa",
|
||||
"bbb00000-bbbb-bbbb-bbbb-bbbbbbbbbbbb"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"query": {
|
||||
"age": {
|
||||
"_type": "IsNull"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"query": {
|
||||
"age": {
|
||||
"_type": "LessThan",
|
||||
"value": 10
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"query": {
|
||||
"age": {
|
||||
"_type": "LessThanOrEqual",
|
||||
"value": 10
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"query": {
|
||||
"age": {
|
||||
"_type": "LessThanOrNull",
|
||||
"value": 10
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"query": {
|
||||
"name": {
|
||||
"_type": "NotEqual",
|
||||
"value": "Hello"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"query": {
|
||||
"age": {
|
||||
"_type": "NotNull"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"select": {
|
||||
"name": true,
|
||||
// other fields
|
||||
}
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"sort": {
|
||||
"name": "ASC",
|
||||
// other fields
|
||||
}
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
Request Headers:
|
||||
|
||||
ApiKey: {secret-api-key}
|
||||
ProjectID: {project-id}
|
||||
|
||||
Request Body:
|
||||
|
||||
{
|
||||
"query": {
|
||||
// query object (optional, if left optional it'll select everything)
|
||||
},
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
{
|
||||
"count": 107
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
Request Headers:
|
||||
|
||||
ApiKey: {secret-api-key}
|
||||
ProjectID: {project-id}
|
||||
|
||||
Request Body:
|
||||
|
||||
{
|
||||
"data": {
|
||||
// properties to update as JSON.
|
||||
}
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"_id": "xxx-xxx-xxxx-xxxx",
|
||||
// other object fields.
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
Request Headers:
|
||||
|
||||
ApiKey: {secret-api-key}
|
||||
ProjectID: {project-id}
|
||||
@@ -1 +0,0 @@
|
||||
{}
|
||||
@@ -1,12 +0,0 @@
|
||||
Request Headers:
|
||||
|
||||
ApiKey: {secret-api-key}
|
||||
ProjectID: {project-id}
|
||||
|
||||
Request Body:
|
||||
|
||||
{
|
||||
"select": {
|
||||
// select object (optional, if left optional it'll only fetch ID).
|
||||
},
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"_id": "3599ee69-43a7-42d7",
|
||||
// ... other object fields.
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
Request Headers:
|
||||
|
||||
ApiKey: {secret-api-key}
|
||||
ProjectID: {project-id}
|
||||
|
||||
Request Body:
|
||||
|
||||
{
|
||||
"select": {
|
||||
// select object (optional, if left optional it'll only fetch ID).
|
||||
},
|
||||
"query": {
|
||||
// query object (optional, if left optional it'll select everything)
|
||||
},
|
||||
"sort": {
|
||||
// sort object (optional)
|
||||
}
|
||||
}
|
||||
@@ -1,19 +0,0 @@
|
||||
{
|
||||
"count": 10,
|
||||
"limit": 10,
|
||||
"skip": 0,
|
||||
"data": [
|
||||
{
|
||||
"_id": "p39d997a-026c-44b2-8b02-7b2814d886d2",
|
||||
// ...
|
||||
},
|
||||
{
|
||||
"_id": "l34e743e-1af5-4b0a-998f-1d421c22f7a1",
|
||||
// ...
|
||||
},
|
||||
{
|
||||
"_id": "6a5aa129-0b3e-45b7-a7f6-b4b6df338a19",
|
||||
// ...
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
Request Headers:
|
||||
|
||||
ApiKey: {secret-api-key}
|
||||
ProjectID: {project-id}
|
||||
|
||||
Request Body:
|
||||
|
||||
{
|
||||
"data": {
|
||||
// properties to update as JSON.
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
{}
|
||||
@@ -1,19 +0,0 @@
|
||||
{
|
||||
"count": 10,
|
||||
"limit": 3,
|
||||
"skip": 0,
|
||||
"data": [
|
||||
{
|
||||
"_id": "p39d997a-026c-44b2-8b02-7b2814d886d2",
|
||||
// ...
|
||||
},
|
||||
{
|
||||
"_id": "l34e743e-1af5-4b0a-998f-1d421c22f7a1",
|
||||
// ...
|
||||
},
|
||||
{
|
||||
"_id": "6a5aa129-0b3e-45b7-a7f6-b4b6df338a19",
|
||||
// ...
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,75 +0,0 @@
|
||||
#
|
||||
# OneUptime-App Dockerfile
|
||||
#
|
||||
|
||||
# Pull base image nodejs image.
|
||||
FROM public.ecr.aws/docker/library/node:24.9-alpine3.21
|
||||
RUN mkdir /tmp/npm && chmod 2777 /tmp/npm && chown 1000:1000 /tmp/npm && npm config set cache /tmp/npm --global
|
||||
|
||||
RUN npm config set fetch-retries 5
|
||||
RUN npm config set fetch-retry-mintimeout 20000
|
||||
RUN npm config set fetch-retry-maxtimeout 60000
|
||||
|
||||
|
||||
|
||||
ARG GIT_SHA
|
||||
ARG APP_VERSION
|
||||
ARG IS_ENTERPRISE_EDITION=false
|
||||
|
||||
ENV GIT_SHA=${GIT_SHA}
|
||||
ENV APP_VERSION=${APP_VERSION}
|
||||
ENV IS_ENTERPRISE_EDITION=${IS_ENTERPRISE_EDITION}
|
||||
ENV PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD=1
|
||||
|
||||
|
||||
# IF APP_VERSION is not set, set it to 1.0.0
|
||||
RUN if [ -z "$APP_VERSION" ]; then export APP_VERSION=1.0.0; fi
|
||||
|
||||
|
||||
# Install bash.
|
||||
RUN apk add bash && apk add curl
|
||||
|
||||
|
||||
# Install python
|
||||
RUN apk update && apk add --no-cache --virtual .gyp python3 make g++
|
||||
|
||||
#Use bash shell by default
|
||||
SHELL ["/bin/bash", "-c"]
|
||||
|
||||
|
||||
RUN mkdir /usr/src
|
||||
|
||||
WORKDIR /usr/src/Common
|
||||
COPY ./Common/package*.json /usr/src/Common/
|
||||
# Set version in ./Common/package.json to the APP_VERSION
|
||||
RUN sed -i "s/\"version\": \".*\"/\"version\": \"$APP_VERSION\"/g" /usr/src/Common/package.json
|
||||
RUN npm install
|
||||
COPY ./Common /usr/src/Common
|
||||
|
||||
ENV PRODUCTION=true
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Install app dependencies
|
||||
COPY ./APIReference/package*.json /usr/src/app/
|
||||
# Set version in ./App/package.json to the APP_VERSION
|
||||
RUN sed -i "s/\"version\": \".*\"/\"version\": \"$APP_VERSION\"/g" /usr/src/app/package.json
|
||||
RUN npm install
|
||||
|
||||
# Expose ports.
|
||||
# - 1446: OneUptime-api-reference
|
||||
EXPOSE 1446
|
||||
|
||||
{{ if eq .Env.ENVIRONMENT "development" }}
|
||||
#Run the app
|
||||
CMD [ "npm", "run", "dev" ]
|
||||
{{ else }}
|
||||
# Copy app source
|
||||
COPY ./APIReference /usr/src/app
|
||||
# Bundle app source
|
||||
RUN npm run compile
|
||||
# Set permission to write logs and cache in case container run as non root
|
||||
RUN chown -R 1000:1000 "/tmp/npm" && chmod -R 2777 "/tmp/npm"
|
||||
#Run the app
|
||||
CMD [ "npm", "start" ]
|
||||
{{ end }}
|
||||
@@ -1,52 +0,0 @@
|
||||
import APIReferenceRoutes from "./Routes";
|
||||
import { PromiseVoidFunction } from "Common/Types/FunctionTypes";
|
||||
import InfrastructureStatus from "Common/Server/Infrastructure/Status";
|
||||
import logger from "Common/Server/Utils/Logger";
|
||||
import App from "Common/Server/Utils/StartServer";
|
||||
import Telemetry from "Common/Server/Utils/Telemetry";
|
||||
import "ejs";
|
||||
|
||||
const APP_NAME: string = "reference";
|
||||
|
||||
const init: PromiseVoidFunction = async (): Promise<void> => {
|
||||
try {
|
||||
// Initialize telemetry
|
||||
Telemetry.init({
|
||||
serviceName: APP_NAME,
|
||||
});
|
||||
|
||||
const statusCheck: PromiseVoidFunction = async (): Promise<void> => {
|
||||
// Check the status of infrastructure components
|
||||
return await InfrastructureStatus.checkStatusWithRetry({
|
||||
checkClickhouseStatus: false,
|
||||
checkPostgresStatus: false,
|
||||
checkRedisStatus: false,
|
||||
retryCount: 3,
|
||||
});
|
||||
};
|
||||
|
||||
// Initialize the app with service name and status checks
|
||||
await App.init({
|
||||
appName: APP_NAME,
|
||||
statusOptions: {
|
||||
liveCheck: statusCheck,
|
||||
readyCheck: statusCheck,
|
||||
},
|
||||
});
|
||||
|
||||
await APIReferenceRoutes.init();
|
||||
|
||||
// Add default routes to the app
|
||||
await App.addDefaultRoutes();
|
||||
} catch (err) {
|
||||
logger.error("App Init Failed:");
|
||||
logger.error(err);
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
init().catch((err: Error) => {
|
||||
logger.error(err);
|
||||
logger.error("Exiting node process");
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,89 +0,0 @@
|
||||
import AuthenticationServiceHandler from "./Service/Authentication";
|
||||
import DataTypeServiceHandler from "./Service/DataType";
|
||||
import ErrorServiceHandler from "./Service/Errors";
|
||||
import OpenAPIServiceHandler from "./Service/OpenAPI";
|
||||
import IntroductionServiceHandler from "./Service/Introduction";
|
||||
import ModelServiceHandler from "./Service/Model";
|
||||
import PageNotFoundServiceHandler from "./Service/PageNotFound";
|
||||
import PaginationServiceHandler from "./Service/Pagination";
|
||||
import PermissionServiceHandler from "./Service/Permissions";
|
||||
import StatusServiceHandler from "./Service/Status";
|
||||
import { StaticPath } from "./Utils/Config";
|
||||
import ResourceUtil, { ModelDocumentation } from "./Utils/Resources";
|
||||
import Dictionary from "Common/Types/Dictionary";
|
||||
import FeatureSet from "Common/Server/Types/FeatureSet";
|
||||
import Express, {
|
||||
ExpressApplication,
|
||||
ExpressRequest,
|
||||
ExpressResponse,
|
||||
ExpressStatic,
|
||||
} from "Common/Server/Utils/Express";
|
||||
|
||||
const APIReferenceFeatureSet: FeatureSet = {
|
||||
init: async (): Promise<void> => {
|
||||
const ResourceDictionary: Dictionary<ModelDocumentation> =
|
||||
ResourceUtil.getResourceDictionaryByPath();
|
||||
|
||||
const app: ExpressApplication = Express.getExpressApp();
|
||||
|
||||
// Serve static files for the API reference with a cache max age of 30 days
|
||||
app.use("/reference", ExpressStatic(StaticPath, { maxAge: 2592000 }));
|
||||
|
||||
// Redirect index page to the introduction page
|
||||
app.get(["/reference"], (_req: ExpressRequest, res: ExpressResponse) => {
|
||||
return res.redirect("/reference/introduction");
|
||||
});
|
||||
|
||||
// Handle "Page Not Found" page
|
||||
app.get(
|
||||
["/reference/page-not-found"],
|
||||
(req: ExpressRequest, res: ExpressResponse) => {
|
||||
return PageNotFoundServiceHandler.executeResponse(req, res);
|
||||
},
|
||||
);
|
||||
|
||||
// Handle all other pages based on the "page" parameter
|
||||
app.get(
|
||||
["/reference/:page"],
|
||||
(req: ExpressRequest, res: ExpressResponse) => {
|
||||
const page: string | undefined = req.params["page"];
|
||||
|
||||
if (!page) {
|
||||
return PageNotFoundServiceHandler.executeResponse(req, res);
|
||||
}
|
||||
|
||||
const currentResource: ModelDocumentation | undefined =
|
||||
ResourceDictionary[page];
|
||||
|
||||
// Execute the appropriate service handler based on the "page" parameter
|
||||
if (req.params["page"] === "permissions") {
|
||||
return PermissionServiceHandler.executeResponse(req, res);
|
||||
} else if (req.params["page"] === "authentication") {
|
||||
return AuthenticationServiceHandler.executeResponse(req, res);
|
||||
} else if (req.params["page"] === "pagination") {
|
||||
return PaginationServiceHandler.executeResponse(req, res);
|
||||
} else if (req.params["page"] === "errors") {
|
||||
return ErrorServiceHandler.executeResponse(req, res);
|
||||
} else if (req.params["page"] === "introduction") {
|
||||
return IntroductionServiceHandler.executeResponse(req, res);
|
||||
} else if (req.params["page"] === "openapi") {
|
||||
return OpenAPIServiceHandler.executeResponse(req, res);
|
||||
} else if (req.params["page"] === "status") {
|
||||
return StatusServiceHandler.executeResponse(req, res);
|
||||
} else if (req.params["page"] === "data-types") {
|
||||
return DataTypeServiceHandler.executeResponse(req, res);
|
||||
} else if (currentResource) {
|
||||
return ModelServiceHandler.executeResponse(req, res);
|
||||
}
|
||||
// page not found
|
||||
return PageNotFoundServiceHandler.executeResponse(req, res);
|
||||
},
|
||||
);
|
||||
|
||||
app.get("/reference/*", (req: ExpressRequest, res: ExpressResponse) => {
|
||||
return PageNotFoundServiceHandler.executeResponse(req, res);
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
export default APIReferenceFeatureSet;
|
||||
@@ -1,36 +0,0 @@
|
||||
import { IsBillingEnabled } from "Common/Server/EnvironmentConfig";
|
||||
import { ViewsPath } from "../Utils/Config";
|
||||
import ResourceUtil, { ModelDocumentation } from "../Utils/Resources";
|
||||
import { ExpressRequest, ExpressResponse } from "Common/Server/Utils/Express";
|
||||
import Dictionary from "Common/Types/Dictionary";
|
||||
|
||||
// Retrieve resources documentation
|
||||
const Resources: Array<ModelDocumentation> = ResourceUtil.getResources();
|
||||
|
||||
export default class ServiceHandler {
|
||||
public static async executeResponse(
|
||||
req: ExpressRequest,
|
||||
res: ExpressResponse,
|
||||
): Promise<void> {
|
||||
let pageTitle: string = "";
|
||||
let pageDescription: string = "";
|
||||
|
||||
// Extract page parameter from request
|
||||
const page: string | undefined = req.params["page"];
|
||||
const pageData: Dictionary<unknown> = {};
|
||||
|
||||
// Set default page title and description for the authentication page
|
||||
pageTitle = "Authentication";
|
||||
pageDescription = "Learn how to authenticate requests with OneUptime API";
|
||||
|
||||
// Render the index page with the specified parameters
|
||||
return res.render(`${ViewsPath}/pages/index`, {
|
||||
page: page,
|
||||
resources: Resources,
|
||||
pageTitle: pageTitle,
|
||||
enableGoogleTagManager: IsBillingEnabled,
|
||||
pageDescription: pageDescription,
|
||||
pageData: pageData,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,159 +0,0 @@
|
||||
import { IsBillingEnabled } from "Common/Server/EnvironmentConfig";
|
||||
import { CodeExamplesPath, ViewsPath } from "../Utils/Config";
|
||||
import ResourceUtil, { ModelDocumentation } from "../Utils/Resources";
|
||||
import LocalCache from "Common/Server/Infrastructure/LocalCache";
|
||||
import { ExpressRequest, ExpressResponse } from "Common/Server/Utils/Express";
|
||||
import LocalFile from "Common/Server/Utils/LocalFile";
|
||||
import Dictionary from "Common/Types/Dictionary";
|
||||
|
||||
const Resources: Array<ModelDocumentation> = ResourceUtil.getResources();
|
||||
|
||||
export default class ServiceHandler {
|
||||
public static async executeResponse(
|
||||
_req: ExpressRequest,
|
||||
res: ExpressResponse,
|
||||
): Promise<void> {
|
||||
const pageData: Dictionary<unknown> = {};
|
||||
|
||||
pageData["selectCode"] = await LocalCache.getOrSetString(
|
||||
"data-type",
|
||||
"select",
|
||||
async () => {
|
||||
return await LocalFile.read(`${CodeExamplesPath}/DataTypes/Select.md`);
|
||||
},
|
||||
);
|
||||
|
||||
pageData["sortCode"] = await LocalCache.getOrSetString(
|
||||
"data-type",
|
||||
"sort",
|
||||
async () => {
|
||||
return await LocalFile.read(`${CodeExamplesPath}/DataTypes/Sort.md`);
|
||||
},
|
||||
);
|
||||
|
||||
pageData["equalToCode"] = await LocalCache.getOrSetString(
|
||||
"data-type",
|
||||
"equal-to",
|
||||
async () => {
|
||||
return await LocalFile.read(`${CodeExamplesPath}/DataTypes/EqualTo.md`);
|
||||
},
|
||||
);
|
||||
|
||||
pageData["equalToOrNullCode"] = await LocalCache.getOrSetString(
|
||||
"data-type",
|
||||
"equal-to-or-null",
|
||||
async () => {
|
||||
return await LocalFile.read(
|
||||
`${CodeExamplesPath}/DataTypes/EqualToOrNull.md`,
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
pageData["greaterThanCode"] = await LocalCache.getOrSetString(
|
||||
"data-type",
|
||||
"greater-than",
|
||||
async () => {
|
||||
return await LocalFile.read(
|
||||
`${CodeExamplesPath}/DataTypes/GreaterThan.md`,
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
pageData["greaterThanOrEqualCode"] = await LocalCache.getOrSetString(
|
||||
"data-type",
|
||||
"greater-than-or-equal",
|
||||
async () => {
|
||||
return await LocalFile.read(
|
||||
`${CodeExamplesPath}/DataTypes/GreaterThanOrEqual.md`,
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
pageData["lessThanCode"] = await LocalCache.getOrSetString(
|
||||
"data-type",
|
||||
"less-than",
|
||||
async () => {
|
||||
return await LocalFile.read(
|
||||
`${CodeExamplesPath}/DataTypes/LessThan.md`,
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
pageData["lessThanOrEqualCode"] = await LocalCache.getOrSetString(
|
||||
"data-type",
|
||||
"less-than-or-equal",
|
||||
async () => {
|
||||
return await LocalFile.read(
|
||||
`${CodeExamplesPath}/DataTypes/LessThanOrEqual.md`,
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
pageData["includesCode"] = await LocalCache.getOrSetString(
|
||||
"data-type",
|
||||
"includes",
|
||||
async () => {
|
||||
return await LocalFile.read(
|
||||
`${CodeExamplesPath}/DataTypes/Includes.md`,
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
pageData["lessThanOrNullCode"] = await LocalCache.getOrSetString(
|
||||
"data-type",
|
||||
"less-than-or-equal",
|
||||
async () => {
|
||||
return await LocalFile.read(
|
||||
`${CodeExamplesPath}/DataTypes/LessThanOrNull.md`,
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
pageData["greaterThanOrNullCode"] = await LocalCache.getOrSetString(
|
||||
"data-type",
|
||||
"less-than-or-equal",
|
||||
async () => {
|
||||
return await LocalFile.read(
|
||||
`${CodeExamplesPath}/DataTypes/LessThanOrNull.md`,
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
pageData["isNullCode"] = await LocalCache.getOrSetString(
|
||||
"data-type",
|
||||
"is-null",
|
||||
async () => {
|
||||
return await LocalFile.read(`${CodeExamplesPath}/DataTypes/IsNull.md`);
|
||||
},
|
||||
);
|
||||
|
||||
pageData["notNullCode"] = await LocalCache.getOrSetString(
|
||||
"data-type",
|
||||
"not-null",
|
||||
async () => {
|
||||
return await LocalFile.read(`${CodeExamplesPath}/DataTypes/NotNull.md`);
|
||||
},
|
||||
);
|
||||
|
||||
pageData["notEqualToCode"] = await LocalCache.getOrSetString(
|
||||
"data-type",
|
||||
"not-equals",
|
||||
async () => {
|
||||
return await LocalFile.read(
|
||||
`${CodeExamplesPath}/DataTypes/NotEqualTo.md`,
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
res.status(200);
|
||||
return res.render(`${ViewsPath}/pages/index`, {
|
||||
page: "data-types",
|
||||
pageTitle: "Data Types",
|
||||
enableGoogleTagManager: IsBillingEnabled,
|
||||
pageDescription:
|
||||
"Data Types that can be used to interact with OneUptime API",
|
||||
resources: Resources,
|
||||
pageData: pageData,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
import { IsBillingEnabled } from "Common/Server/EnvironmentConfig";
|
||||
import { ViewsPath } from "../Utils/Config";
|
||||
import ResourceUtil, { ModelDocumentation } from "../Utils/Resources";
|
||||
import { ExpressRequest, ExpressResponse } from "Common/Server/Utils/Express";
|
||||
import Dictionary from "Common/Types/Dictionary";
|
||||
|
||||
// Fetch a list of resources used in the application
|
||||
const Resources: Array<ModelDocumentation> = ResourceUtil.getResources();
|
||||
|
||||
export default class ServiceHandler {
|
||||
// Handles the HTTP response for a given request
|
||||
public static async executeResponse(
|
||||
req: ExpressRequest,
|
||||
res: ExpressResponse,
|
||||
): Promise<void> {
|
||||
let pageTitle: string = "";
|
||||
let pageDescription: string = "";
|
||||
|
||||
// Get the 'page' parameter from the request
|
||||
const page: string | undefined = req.params["page"];
|
||||
const pageData: Dictionary<unknown> = {};
|
||||
|
||||
// Set the default page title and description
|
||||
pageTitle = "Errors";
|
||||
pageDescription = "Learn more about how we return errors from API";
|
||||
|
||||
// Render the response using the given view and data
|
||||
return res.render(`${ViewsPath}/pages/index`, {
|
||||
page: page,
|
||||
resources: Resources,
|
||||
pageTitle: pageTitle,
|
||||
enableGoogleTagManager: IsBillingEnabled,
|
||||
pageDescription: pageDescription,
|
||||
pageData: pageData,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,43 +0,0 @@
|
||||
import { IsBillingEnabled } from "Common/Server/EnvironmentConfig";
|
||||
import { ViewsPath } from "../Utils/Config";
|
||||
import ResourceUtil, { ModelDocumentation } from "../Utils/Resources";
|
||||
import { ExpressRequest, ExpressResponse } from "Common/Server/Utils/Express";
|
||||
import Dictionary from "Common/Types/Dictionary";
|
||||
|
||||
// Get all resources and featured resources from ResourceUtil
|
||||
const Resources: Array<ModelDocumentation> = ResourceUtil.getResources();
|
||||
const FeaturedResources: Array<ModelDocumentation> =
|
||||
ResourceUtil.getFeaturedResources();
|
||||
|
||||
export default class ServiceHandler {
|
||||
// Handle the API request
|
||||
public static async executeResponse(
|
||||
req: ExpressRequest,
|
||||
res: ExpressResponse,
|
||||
): Promise<void> {
|
||||
// Initialize page title and description
|
||||
let pageTitle: string = "";
|
||||
let pageDescription: string = "";
|
||||
|
||||
// Get the requested page from the URL parameters
|
||||
const page: string | undefined = req.params["page"];
|
||||
const pageData: Dictionary<unknown> = {};
|
||||
|
||||
// Set featured resources for the page
|
||||
pageData["featuredResources"] = FeaturedResources;
|
||||
|
||||
// Set page title and description
|
||||
pageTitle = "Introduction";
|
||||
pageDescription = "API Reference for OneUptime";
|
||||
|
||||
// Render the index page with the required data
|
||||
return res.render(`${ViewsPath}/pages/index`, {
|
||||
page: page,
|
||||
resources: Resources,
|
||||
pageTitle: pageTitle,
|
||||
enableGoogleTagManager: IsBillingEnabled,
|
||||
pageDescription: pageDescription,
|
||||
pageData: pageData,
|
||||
});
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user