mirror of
https://github.com/OneUptime/oneuptime.git
synced 2026-04-06 08:42:13 +02:00
Compare commits
111 Commits
ext-postgr
...
test-branc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c5680f6c26 | ||
|
|
d2a1385123 | ||
|
|
feda1b0426 | ||
|
|
b97cc46a1e | ||
|
|
41a8101b54 | ||
|
|
676a2b18b3 | ||
|
|
df7477929b | ||
|
|
c1ebe14c50 | ||
|
|
2b478e7a13 | ||
|
|
3bb1d93f3e | ||
|
|
703c4b7685 | ||
|
|
d7e9776a3c | ||
|
|
3cb29b63fe | ||
|
|
e7f4a36ec9 | ||
|
|
be78862e49 | ||
|
|
64ae5eeb89 | ||
|
|
78bdc42534 | ||
|
|
68c81734e8 | ||
|
|
bf081d1ebe | ||
|
|
3de407842e | ||
|
|
a4a9e45fe0 | ||
|
|
f9c9480434 | ||
|
|
eb644ad2f2 | ||
|
|
5186e193a8 | ||
|
|
55d947fb39 | ||
|
|
77f1262ff5 | ||
|
|
47bf8f9c89 | ||
|
|
4ca4f28d1c | ||
|
|
b6565ce2bb | ||
|
|
0704e1f556 | ||
|
|
87c16d7bc3 | ||
|
|
cc66820e7b | ||
|
|
a478f60a39 | ||
|
|
5637f12d3a | ||
|
|
27c28b17af | ||
|
|
c55b169488 | ||
|
|
9b584d69ff | ||
|
|
05c090445a | ||
|
|
597aeb74f4 | ||
|
|
b7191a9c2e | ||
|
|
c686030014 | ||
|
|
eed1078f06 | ||
|
|
051a3c43b2 | ||
|
|
a152813535 | ||
|
|
decea5acfc | ||
|
|
4c2dfb0f92 | ||
|
|
a24bf077ce | ||
|
|
2d82f50789 | ||
|
|
3e13776252 | ||
|
|
463bb32872 | ||
|
|
99dcee80cd | ||
|
|
c418dc41dd | ||
|
|
c0678c410d | ||
|
|
081359f7e0 | ||
|
|
cc0cfe4f8c | ||
|
|
dc87905f05 | ||
|
|
9c31047d52 | ||
|
|
91d196ddea | ||
|
|
78db5cab39 | ||
|
|
689e72e5ec | ||
|
|
bc9e97f67c | ||
|
|
19550c23ed | ||
|
|
2d09df2d87 | ||
|
|
2dfebdd2e4 | ||
|
|
ebec143c9c | ||
|
|
aa68a6316a | ||
|
|
27a37581e4 | ||
|
|
091622f8cf | ||
|
|
87caae077c | ||
|
|
a146691773 | ||
|
|
9fce103b11 | ||
|
|
2aa0ae89fc | ||
|
|
0a16f6bf44 | ||
|
|
0f49e6e100 | ||
|
|
d954b4a5df | ||
|
|
e762778fc6 | ||
|
|
4cced50857 | ||
|
|
26c900d8e2 | ||
|
|
63461343ba | ||
|
|
931abc522b | ||
|
|
054592eed3 | ||
|
|
82b2307b51 | ||
|
|
b1dba73842 | ||
|
|
babbf5f8a6 | ||
|
|
39c7da79ab | ||
|
|
938bd32915 | ||
|
|
f8e1ace311 | ||
|
|
05e2e22e1d | ||
|
|
9054c49b3e | ||
|
|
5d5468603f | ||
|
|
4c6979cfa1 | ||
|
|
714a4be2b0 | ||
|
|
b935443f96 | ||
|
|
d3a3f01f20 | ||
|
|
56b0fea04a | ||
|
|
2605140166 | ||
|
|
8b9611e145 | ||
|
|
21057038d1 | ||
|
|
e587d4ba19 | ||
|
|
14da201c8d | ||
|
|
f5584a5037 | ||
|
|
157f8e95d7 | ||
|
|
fb83126f37 | ||
|
|
964def0c45 | ||
|
|
650d7cc939 | ||
|
|
bfb4c46bd0 | ||
|
|
149c8c763d | ||
|
|
fdbcace48c | ||
|
|
3cfe0517a8 | ||
|
|
9a32a47146 | ||
|
|
fd83a71a56 |
16
.github/workflows/build.yml
vendored
16
.github/workflows/build.yml
vendored
@@ -72,6 +72,22 @@ jobs:
|
||||
- name: build docker image
|
||||
run: sudo docker build -f ./App/Dockerfile .
|
||||
|
||||
|
||||
docker-build-copilot:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Preinstall
|
||||
run: npm run prerun
|
||||
|
||||
# build image for accounts service
|
||||
- name: build docker image
|
||||
run: sudo docker build -f ./Copilot/Dockerfile .
|
||||
|
||||
docker-build-e2e:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
|
||||
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
@@ -34,7 +34,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'javascript' ]
|
||||
language: [ 'javascript', 'typescript', 'go' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
||||
|
||||
|
||||
14
.github/workflows/compile.yml
vendored
14
.github/workflows/compile.yml
vendored
@@ -91,6 +91,20 @@ jobs:
|
||||
- run: cd CommonUI && npm install --force
|
||||
- run: cd App && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-copilot:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 18.3.0
|
||||
- run: cd Common && npm install
|
||||
- run: cd Model && npm install
|
||||
- run: cd CommonServer && npm install
|
||||
- run: cd Copilot && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-nginx:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
|
||||
168
.github/workflows/release.yml
vendored
168
.github/workflows/release.yml
vendored
@@ -875,6 +875,67 @@ jobs:
|
||||
GIT_SHA=${{ github.sha }}
|
||||
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
|
||||
|
||||
|
||||
copilot-docker-image-deploy:
|
||||
needs: [generate-build-number, github-release]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Docker Meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: |
|
||||
oneuptime/copilot
|
||||
ghcr.io/oneuptime/copilot
|
||||
tags: |
|
||||
type=raw,value=release,enable=true
|
||||
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.ref }}
|
||||
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 18.3.0
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Generate Dockerfile from Dockerfile.tpl
|
||||
run: npm run prerun
|
||||
|
||||
# Build and deploy app.
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2.2.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2.2.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
file: ./Copilot/Dockerfile
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
GIT_SHA=${{ github.sha }}
|
||||
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
|
||||
|
||||
accounts-docker-image-deploy:
|
||||
needs: [generate-build-number, github-release]
|
||||
runs-on: ubuntu-latest
|
||||
@@ -983,3 +1044,110 @@ jobs:
|
||||
draft: false
|
||||
prerelease: false
|
||||
tag_name: 7.0.${{needs.generate-build-number.outputs.build_number}}
|
||||
|
||||
|
||||
test-e2e-release-saas:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [copilot-docker-image-deploy, accounts-docker-image-deploy, admin-dashboard-docker-image-deploy, app-docker-image-deploy, dashboard-docker-image-deploy, haraka-docker-image-deploy, ingestor-docker-image-deploy, isolated-vm-docker-image-deploy, otel-collector-docker-image-deploy, probe-docker-image-deploy, status-page-docker-image-deploy, test-docker-image-deploy, test-server-docker-image-deploy, infrastructure-agent-deploy, publish-npm-packages, e2e-docker-image-deploy, helm-chart-deploy, github-release, generate-build-number, nginx-docker-image-deploy]
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
# Docker compose needs a lot of space to build images, so we need to free up some space first in the GitHub Actions runner
|
||||
- name: Free Disk Space (Ubuntu)
|
||||
uses: jlumbroso/free-disk-space@main
|
||||
with:
|
||||
# this might remove tools that are actually needed,
|
||||
# if set to "true" but frees about 6 GB
|
||||
tool-cache: false
|
||||
android: true
|
||||
dotnet: true
|
||||
haskell: true
|
||||
large-packages: true
|
||||
docker-images: true
|
||||
swap-storage: true
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 18.3.0
|
||||
- run: npm run prerun && bash ./Tests/Scripts/enable-billing-env-var.sh
|
||||
- name: Start Server with release tag
|
||||
run: npm run start
|
||||
- name: Wait for server to start
|
||||
run: bash ./Tests/Scripts/status-check.sh http://localhost
|
||||
- name: Run E2E Tests. Run docker container e2e in docker compose file
|
||||
run: export $(grep -v '^#' config.env | xargs) && docker-compose -f docker-compose.dev.yml up --exit-code-from e2e --abort-on-container-exit e2e || (docker-compose -f docker-compose.dev.yml logs e2e && exit 1)
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
# Run this on failure
|
||||
if: failure()
|
||||
with:
|
||||
# Name of the artifact to upload.
|
||||
# Optional. Default is 'artifact'
|
||||
name: test-results
|
||||
|
||||
# A file, directory or wildcard pattern that describes what to upload
|
||||
# Required.
|
||||
path: |
|
||||
./E2E/playwright-report
|
||||
./E2E/test-results
|
||||
|
||||
|
||||
# Duration after which artifact will expire in days. 0 means using default retention.
|
||||
# Minimum 1 day.
|
||||
# Maximum 90 days unless changed from the repository settings page.
|
||||
# Optional. Defaults to repository settings.
|
||||
retention-days: 7
|
||||
|
||||
|
||||
test-e2e-release-self-hosted:
|
||||
runs-on: ubuntu-latest
|
||||
# After all the jobs runs
|
||||
needs: [copilot-docker-image-deploy, accounts-docker-image-deploy, admin-dashboard-docker-image-deploy, app-docker-image-deploy, dashboard-docker-image-deploy, haraka-docker-image-deploy, ingestor-docker-image-deploy, isolated-vm-docker-image-deploy, otel-collector-docker-image-deploy, probe-docker-image-deploy, status-page-docker-image-deploy, test-docker-image-deploy, test-server-docker-image-deploy, infrastructure-agent-deploy, publish-npm-packages, e2e-docker-image-deploy, helm-chart-deploy, github-release, generate-build-number, nginx-docker-image-deploy]
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
# Docker compose needs a lot of space to build images, so we need to free up some space first in the GitHub Actions runner
|
||||
- name: Free Disk Space (Ubuntu)
|
||||
uses: jlumbroso/free-disk-space@main
|
||||
with:
|
||||
# this might remove tools that are actually needed,
|
||||
# if set to "true" but frees about 6 GB
|
||||
tool-cache: false
|
||||
android: true
|
||||
dotnet: true
|
||||
haskell: true
|
||||
large-packages: true
|
||||
docker-images: true
|
||||
swap-storage: true
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 18.3.0
|
||||
- run: npm run prerun
|
||||
- name: Start Server with release tag
|
||||
run: npm run start
|
||||
- name: Wait for server to start
|
||||
run: bash ./Tests/Scripts/status-check.sh http://localhost
|
||||
- name: Run E2E Tests. Run docker container e2e in docker compose file
|
||||
run: export $(grep -v '^#' config.env | xargs) && docker-compose -f docker-compose.dev.yml up --exit-code-from e2e --abort-on-container-exit e2e || (docker-compose -f docker-compose.dev.yml logs e2e && exit 1)
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
# Run this on failure
|
||||
if: failure()
|
||||
with:
|
||||
# Name of the artifact to upload.
|
||||
# Optional. Default is 'artifact'
|
||||
name: test-results
|
||||
|
||||
# A file, directory or wildcard pattern that describes what to upload
|
||||
# Required.
|
||||
path: |
|
||||
./E2E/playwright-report
|
||||
./E2E/test-results
|
||||
|
||||
|
||||
# Duration after which artifact will expire in days. 0 means using default retention.
|
||||
# Minimum 1 day.
|
||||
# Maximum 90 days unless changed from the repository settings page.
|
||||
# Optional. Defaults to repository settings.
|
||||
retention-days: 7
|
||||
174
.github/workflows/test-release.yaml
vendored
174
.github/workflows/test-release.yaml
vendored
@@ -876,10 +876,72 @@ jobs:
|
||||
GIT_SHA=${{ github.sha }}
|
||||
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
|
||||
|
||||
|
||||
copilot-docker-image-deploy:
|
||||
needs: generate-build-number
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Docker Meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: |
|
||||
oneuptime/copilot
|
||||
ghcr.io/oneuptime/copilot
|
||||
tags: |
|
||||
type=raw,value=test,enable=true
|
||||
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
|
||||
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.ref }}
|
||||
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 18.3.0
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Generate Dockerfile from Dockerfile.tpl
|
||||
run: npm run prerun
|
||||
|
||||
# Build and deploy accounts.
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2.2.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2.2.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
file: ./Copilot/Dockerfile
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
GIT_SHA=${{ github.sha }}
|
||||
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
|
||||
|
||||
|
||||
test-helm-chart:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [isolated-vm-docker-image-deploy, test-server-docker-image-deploy, test-docker-image-deploy, ingestor-docker-image-deploy, probe-docker-image-deploy, haraka-docker-image-deploy, dashboard-docker-image-deploy, admin-dashboard-docker-image-deploy, app-docker-image-deploy, accounts-docker-image-deploy, otel-collector-docker-image-deploy, status-page-docker-image-deploy, nginx-docker-image-deploy, e2e-docker-image-deploy]
|
||||
needs: [copilot-docker-image-deploy, isolated-vm-docker-image-deploy, test-server-docker-image-deploy, test-docker-image-deploy, ingestor-docker-image-deploy, probe-docker-image-deploy, haraka-docker-image-deploy, dashboard-docker-image-deploy, admin-dashboard-docker-image-deploy, app-docker-image-deploy, accounts-docker-image-deploy, otel-collector-docker-image-deploy, status-page-docker-image-deploy, nginx-docker-image-deploy, e2e-docker-image-deploy]
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
@@ -888,3 +950,113 @@ jobs:
|
||||
with:
|
||||
node-version: 18.3.0
|
||||
- run: cd HelmChart && cd Tests && bash index.sh
|
||||
|
||||
test-e2e-test-saas:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test-helm-chart]
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
# Docker compose needs a lot of space to build images, so we need to free up some space first in the GitHub Actions runner
|
||||
- name: Free Disk Space (Ubuntu)
|
||||
uses: jlumbroso/free-disk-space@main
|
||||
with:
|
||||
# this might remove tools that are actually needed,
|
||||
# if set to "true" but frees about 6 GB
|
||||
tool-cache: false
|
||||
android: true
|
||||
dotnet: true
|
||||
haskell: true
|
||||
large-packages: true
|
||||
docker-images: true
|
||||
swap-storage: true
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 18.3.0
|
||||
- run: npm run prerun && bash ./Tests/Scripts/change-release-to-test-tag.sh
|
||||
- name: Start Server with release tag
|
||||
run: npm run start
|
||||
- name: Wait for server to start
|
||||
run: bash ./Tests/Scripts/status-check.sh http://localhost
|
||||
- name: Run E2E Tests. Run docker container e2e in docker compose file
|
||||
run: export $(grep -v '^#' config.env | xargs) && docker-compose -f docker-compose.dev.yml up --exit-code-from e2e --abort-on-container-exit e2e || (docker-compose -f docker-compose.dev.yml logs e2e && exit 1)
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
# Run this on failure
|
||||
if: failure()
|
||||
with:
|
||||
# Name of the artifact to upload.
|
||||
# Optional. Default is 'artifact'
|
||||
name: test-results
|
||||
|
||||
# A file, directory or wildcard pattern that describes what to upload
|
||||
# Required.
|
||||
path: |
|
||||
./E2E/playwright-report
|
||||
./E2E/test-results
|
||||
|
||||
|
||||
# Duration after which artifact will expire in days. 0 means using default retention.
|
||||
# Minimum 1 day.
|
||||
# Maximum 90 days unless changed from the repository settings page.
|
||||
# Optional. Defaults to repository settings.
|
||||
retention-days: 7
|
||||
|
||||
|
||||
test-e2e-test-self-hosted:
|
||||
runs-on: ubuntu-latest
|
||||
# After all the jobs runs
|
||||
needs: [test-helm-chart]
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
# Docker compose needs a lot of space to build images, so we need to free up some space first in the GitHub Actions runner
|
||||
- name: Free Disk Space (Ubuntu)
|
||||
uses: jlumbroso/free-disk-space@main
|
||||
with:
|
||||
# this might remove tools that are actually needed,
|
||||
# if set to "true" but frees about 6 GB
|
||||
tool-cache: false
|
||||
android: true
|
||||
dotnet: true
|
||||
haskell: true
|
||||
large-packages: true
|
||||
docker-images: true
|
||||
swap-storage: true
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 18.3.0
|
||||
- run: npm run prerun && bash ./Tests/Scripts/change-release-to-test-tag.sh
|
||||
- name: Start Server with release tag
|
||||
run: npm run start
|
||||
- name: Wait for server to start
|
||||
run: bash ./Tests/Scripts/status-check.sh http://localhost
|
||||
- name: Run E2E Tests. Run docker container e2e in docker compose file
|
||||
run: export $(grep -v '^#' config.env | xargs) && docker-compose -f docker-compose.dev.yml up --exit-code-from e2e --abort-on-container-exit e2e || (docker-compose -f docker-compose.dev.yml logs e2e && exit 1)
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
# Run this on failure
|
||||
if: failure()
|
||||
with:
|
||||
# Name of the artifact to upload.
|
||||
# Optional. Default is 'artifact'
|
||||
name: test-results
|
||||
|
||||
# A file, directory or wildcard pattern that describes what to upload
|
||||
# Required.
|
||||
path: |
|
||||
./E2E/playwright-report
|
||||
./E2E/test-results
|
||||
|
||||
|
||||
# Duration after which artifact will expire in days. 0 means using default retention.
|
||||
# Minimum 1 day.
|
||||
# Maximum 90 days unless changed from the repository settings page.
|
||||
# Optional. Defaults to repository settings.
|
||||
retention-days: 7
|
||||
|
||||
|
||||
|
||||
|
||||
5
.github/workflows/test.e2e.yaml
vendored
5
.github/workflows/test.e2e.yaml
vendored
@@ -56,7 +56,4 @@ jobs:
|
||||
# Minimum 1 day.
|
||||
# Maximum 90 days unless changed from the repository settings page.
|
||||
# Optional. Defaults to repository settings.
|
||||
retention-days: 7
|
||||
|
||||
|
||||
|
||||
retention-days: 7
|
||||
@@ -68,11 +68,12 @@ const RegisterPage: () => JSX.Element = () => {
|
||||
placeholder: 'New Password',
|
||||
title: 'New Password',
|
||||
required: true,
|
||||
showEvenIfPermissionDoesNotExist: true,
|
||||
},
|
||||
{
|
||||
field: {
|
||||
password: true,
|
||||
},
|
||||
confirmPassword: true,
|
||||
} as any,
|
||||
validation: {
|
||||
minLength: 6,
|
||||
toMatchField: 'password',
|
||||
@@ -82,6 +83,7 @@ const RegisterPage: () => JSX.Element = () => {
|
||||
title: 'Confirm Password',
|
||||
overrideFieldKey: 'confirmPassword',
|
||||
required: true,
|
||||
showEvenIfPermissionDoesNotExist: true,
|
||||
},
|
||||
]}
|
||||
createOrUpdateApiUrl={apiUrl}
|
||||
|
||||
@@ -2,6 +2,7 @@ import BaseAPI from 'CommonServer/API/BaseAPI';
|
||||
import BaseAnalyticsAPI from 'CommonServer/API/BaseAnalyticsAPI';
|
||||
import BillingInvoiceAPI from 'CommonServer/API/BillingInvoiceAPI';
|
||||
import BillingPaymentMethodAPI from 'CommonServer/API/BillingPaymentMethodAPI';
|
||||
import CodeRepositoryAPI from 'CommonServer/API/CodeRepositoryAPI';
|
||||
import FileAPI from 'CommonServer/API/FileAPI';
|
||||
import GlobalConfigAPI from 'CommonServer/API/GlobalConfigAPI';
|
||||
import MonitorGroupAPI from 'CommonServer/API/MonitorGroupAPI';
|
||||
@@ -29,6 +30,9 @@ import ApiKeyService, {
|
||||
import CallLogService, {
|
||||
Service as CallLogServiceType,
|
||||
} from 'CommonServer/Services/CallLogService';
|
||||
import CopilotEventService, {
|
||||
Service as CopilotEventServiceType,
|
||||
} from 'CommonServer/Services/CopilotEventService';
|
||||
import DomainService, {
|
||||
Service as DomainServiceType,
|
||||
} from 'CommonServer/Services/DomainService';
|
||||
@@ -194,6 +198,15 @@ import ScheduledMaintenanceStateService, {
|
||||
import ScheduledMaintenanceStateTimelineService, {
|
||||
Service as ScheduledMaintenanceStateTimelineServiceType,
|
||||
} from 'CommonServer/Services/ScheduledMaintenanceStateTimelineService';
|
||||
import ServiceCatalogOwnerTeamService, {
|
||||
Service as ServiceCatalogOwnerTeamServiceType,
|
||||
} from 'CommonServer/Services/ServiceCatalogOwnerTeamService';
|
||||
import ServiceCatalogOwnerUserService, {
|
||||
Service as ServiceCatalogOwnerUserServiceType,
|
||||
} from 'CommonServer/Services/ServiceCatalogOwnerUserService';
|
||||
import ServiceCatalogService, {
|
||||
Service as ServiceCatalogServiceType,
|
||||
} from 'CommonServer/Services/ServiceCatalogService';
|
||||
import ShortLinkService, {
|
||||
Service as ShortLinkServiceType,
|
||||
} from 'CommonServer/Services/ShortLinkService';
|
||||
@@ -281,6 +294,7 @@ import Span from 'Model/AnalyticsModels/Span';
|
||||
import ApiKey from 'Model/Models/ApiKey';
|
||||
import ApiKeyPermission from 'Model/Models/ApiKeyPermission';
|
||||
import CallLog from 'Model/Models/CallLog';
|
||||
import CopilotEvent from 'Model/Models/CopilotEvent';
|
||||
import Domain from 'Model/Models/Domain';
|
||||
import EmailLog from 'Model/Models/EmailLog';
|
||||
import EmailVerificationToken from 'Model/Models/EmailVerificationToken';
|
||||
@@ -333,6 +347,9 @@ import ScheduledMaintenanceOwnerUser from 'Model/Models/ScheduledMaintenanceOwne
|
||||
import ScheduledMaintenancePublicNote from 'Model/Models/ScheduledMaintenancePublicNote';
|
||||
import ScheduledMaintenanceState from 'Model/Models/ScheduledMaintenanceState';
|
||||
import ScheduledMaintenanceStateTimeline from 'Model/Models/ScheduledMaintenanceStateTimeline';
|
||||
import ServiceCatalog from 'Model/Models/ServiceCatalog';
|
||||
import ServiceCatalogOwnerTeam from 'Model/Models/ServiceCatalogOwnerTeam';
|
||||
import ServiceCatalogOwnerUser from 'Model/Models/ServiceCatalogOwnerUser';
|
||||
import ShortLink from 'Model/Models/ShortLink';
|
||||
import SmsLog from 'Model/Models/SmsLog';
|
||||
import StatusPageAnnouncement from 'Model/Models/StatusPageAnnouncement';
|
||||
@@ -468,6 +485,44 @@ const BaseAPIFeatureSet: FeatureSet = {
|
||||
>(MonitorGroupOwnerUser, MonitorGroupOwnerUserService).getRouter()
|
||||
);
|
||||
|
||||
app.use(
|
||||
`/${APP_NAME.toLocaleLowerCase()}`,
|
||||
new BaseAPI<ServiceCatalog, ServiceCatalogServiceType>(
|
||||
ServiceCatalog,
|
||||
ServiceCatalogService
|
||||
).getRouter()
|
||||
);
|
||||
|
||||
app.use(
|
||||
`/${APP_NAME.toLocaleLowerCase()}`,
|
||||
new BaseAPI<
|
||||
ServiceCatalogOwnerTeam,
|
||||
ServiceCatalogOwnerTeamServiceType
|
||||
>(
|
||||
ServiceCatalogOwnerTeam,
|
||||
ServiceCatalogOwnerTeamService
|
||||
).getRouter()
|
||||
);
|
||||
|
||||
app.use(
|
||||
`/${APP_NAME.toLocaleLowerCase()}`,
|
||||
new BaseAPI<CopilotEvent, CopilotEventServiceType>(
|
||||
CopilotEvent,
|
||||
CopilotEventService
|
||||
).getRouter()
|
||||
);
|
||||
|
||||
app.use(
|
||||
`/${APP_NAME.toLocaleLowerCase()}`,
|
||||
new BaseAPI<
|
||||
ServiceCatalogOwnerUser,
|
||||
ServiceCatalogOwnerUserServiceType
|
||||
>(
|
||||
ServiceCatalogOwnerUser,
|
||||
ServiceCatalogOwnerUserService
|
||||
).getRouter()
|
||||
);
|
||||
|
||||
app.use(
|
||||
`/${APP_NAME.toLocaleLowerCase()}`,
|
||||
new BaseAPI<
|
||||
@@ -990,6 +1045,11 @@ const BaseAPIFeatureSet: FeatureSet = {
|
||||
new GlobalConfigAPI().getRouter()
|
||||
);
|
||||
|
||||
app.use(
|
||||
`/${APP_NAME.toLocaleLowerCase()}`,
|
||||
new CodeRepositoryAPI().getRouter()
|
||||
);
|
||||
|
||||
app.use(
|
||||
`/${APP_NAME.toLocaleLowerCase()}`,
|
||||
new UserNotificationLogTimelineAPI().getRouter()
|
||||
|
||||
@@ -59,6 +59,18 @@ export default class SmsService {
|
||||
smsCost = smsCost * smsSegments;
|
||||
}
|
||||
|
||||
smsLog.toNumber = to;
|
||||
|
||||
smsLog.smsText =
|
||||
options && options.isSensitive
|
||||
? 'This message is sensitive and is not logged'
|
||||
: message;
|
||||
smsLog.smsCostInUSDCents = 0;
|
||||
|
||||
if (options.projectId) {
|
||||
smsLog.projectId = options.projectId;
|
||||
}
|
||||
|
||||
const twilioConfig: TwilioConfig | null =
|
||||
options.customTwilioConfig || (await getTwilioConfig());
|
||||
|
||||
@@ -71,17 +83,7 @@ export default class SmsService {
|
||||
twilioConfig.authToken
|
||||
);
|
||||
|
||||
smsLog.toNumber = to;
|
||||
smsLog.fromNumber = twilioConfig.phoneNumber;
|
||||
smsLog.smsText =
|
||||
options && options.isSensitive
|
||||
? 'This message is sensitive and is not logged'
|
||||
: message;
|
||||
smsLog.smsCostInUSDCents = 0;
|
||||
|
||||
if (options.projectId) {
|
||||
smsLog.projectId = options.projectId;
|
||||
}
|
||||
|
||||
let project: Project | null = null;
|
||||
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
import DataMigrationBase from './DataMigrationBase';
|
||||
import AnalyticsTableColumn from 'Common/Types/AnalyticsDatabase/TableColumn';
|
||||
import TableColumnType from 'Common/Types/AnalyticsDatabase/TableColumnType';
|
||||
import MetricService from 'CommonServer/Services/MetricService';
|
||||
import Metric from 'Model/AnalyticsModels/Metric';
|
||||
|
||||
export default class AddAggregationTemporalityToMetric extends DataMigrationBase {
|
||||
public constructor() {
|
||||
super('AddAggregationTemporalityToMetric');
|
||||
}
|
||||
|
||||
public override async migrate(): Promise<void> {
|
||||
const column: AnalyticsTableColumn | undefined =
|
||||
new Metric().tableColumns.find((column: AnalyticsTableColumn) => {
|
||||
return column.key === 'aggregationTemporality';
|
||||
});
|
||||
|
||||
if (!column) {
|
||||
return;
|
||||
}
|
||||
|
||||
const columnType: TableColumnType | null =
|
||||
await MetricService.getColumnTypeInDatabase(column);
|
||||
|
||||
if (!columnType) {
|
||||
await MetricService.dropColumnInDatabase('aggregationTemporality');
|
||||
await MetricService.addColumnInDatabase(column);
|
||||
}
|
||||
}
|
||||
|
||||
public override async rollback(): Promise<void> {
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
import DataMigrationBase from './DataMigrationBase';
|
||||
import AnalyticsTableColumn from 'Common/Types/AnalyticsDatabase/TableColumn';
|
||||
import TableColumnType from 'Common/Types/AnalyticsDatabase/TableColumnType';
|
||||
import MetricService from 'CommonServer/Services/MetricService';
|
||||
import Metric from 'Model/AnalyticsModels/Metric';
|
||||
|
||||
export default class AddIsMonotonicToMetric extends DataMigrationBase {
|
||||
public constructor() {
|
||||
super('AddIsMonotonicToMetric');
|
||||
}
|
||||
|
||||
public override async migrate(): Promise<void> {
|
||||
const column: AnalyticsTableColumn | undefined =
|
||||
new Metric().tableColumns.find((column: AnalyticsTableColumn) => {
|
||||
return column.key === 'isMonotonic';
|
||||
});
|
||||
|
||||
if (!column) {
|
||||
return;
|
||||
}
|
||||
|
||||
const columnType: TableColumnType | null =
|
||||
await MetricService.getColumnTypeInDatabase(column);
|
||||
|
||||
if (!columnType) {
|
||||
await MetricService.dropColumnInDatabase('isMonotonic');
|
||||
await MetricService.addColumnInDatabase(column);
|
||||
}
|
||||
}
|
||||
|
||||
public override async rollback(): Promise<void> {
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
import DataMigrationBase from './DataMigrationBase';
|
||||
import AnalyticsTableColumn from 'Common/Types/AnalyticsDatabase/TableColumn';
|
||||
import TableColumnType from 'Common/Types/AnalyticsDatabase/TableColumnType';
|
||||
import MetricService from 'CommonServer/Services/MetricService';
|
||||
import Metric from 'Model/AnalyticsModels/Metric';
|
||||
|
||||
export default class AddPointTypeToMetric extends DataMigrationBase {
|
||||
public constructor() {
|
||||
super('AddPointTypeToMetric');
|
||||
}
|
||||
|
||||
public override async migrate(): Promise<void> {
|
||||
const column: AnalyticsTableColumn | undefined =
|
||||
new Metric().tableColumns.find((column: AnalyticsTableColumn) => {
|
||||
return column.key === 'metricPointType';
|
||||
});
|
||||
|
||||
if (!column) {
|
||||
return;
|
||||
}
|
||||
|
||||
const columnType: TableColumnType | null =
|
||||
await MetricService.getColumnTypeInDatabase(column);
|
||||
|
||||
if (!columnType) {
|
||||
await MetricService.dropColumnInDatabase('metricPointType');
|
||||
await MetricService.addColumnInDatabase(column);
|
||||
}
|
||||
}
|
||||
|
||||
public override async rollback(): Promise<void> {
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
import DataMigrationBase from './DataMigrationBase';
|
||||
import AnalyticsTableColumn from 'Common/Types/AnalyticsDatabase/TableColumn';
|
||||
import TableColumnType from 'Common/Types/AnalyticsDatabase/TableColumnType';
|
||||
import MetricService from 'CommonServer/Services/MetricService';
|
||||
import Metric from 'Model/AnalyticsModels/Metric';
|
||||
|
||||
export default class AddUnitColumnToMetricsTable extends DataMigrationBase {
|
||||
public constructor() {
|
||||
super('AddUnitColumnToMetricsTable');
|
||||
}
|
||||
|
||||
public override async migrate(): Promise<void> {
|
||||
await this.addUnitColumnToMetricsTable();
|
||||
}
|
||||
|
||||
public async addUnitColumnToMetricsTable(): Promise<void> {
|
||||
// logs
|
||||
const unitColumn: AnalyticsTableColumn | undefined =
|
||||
new Metric().tableColumns.find((column: AnalyticsTableColumn) => {
|
||||
return column.key === 'unit';
|
||||
});
|
||||
|
||||
if (!unitColumn) {
|
||||
return;
|
||||
}
|
||||
|
||||
const columnType: TableColumnType | null =
|
||||
await MetricService.getColumnTypeInDatabase(unitColumn);
|
||||
|
||||
if (!columnType) {
|
||||
await MetricService.dropColumnInDatabase('unit');
|
||||
await MetricService.addColumnInDatabase(unitColumn);
|
||||
}
|
||||
}
|
||||
|
||||
public override async rollback(): Promise<void> {
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
import DataMigrationBase from './DataMigrationBase';
|
||||
import AnalyticsTableColumn from 'Common/Types/AnalyticsDatabase/TableColumn';
|
||||
import TableColumnType from 'Common/Types/AnalyticsDatabase/TableColumnType';
|
||||
import MetricService from 'CommonServer/Services/MetricService';
|
||||
import Metric from 'Model/AnalyticsModels/Metric';
|
||||
|
||||
export default class ChangeMetricColumnTypeToDecimal extends DataMigrationBase {
|
||||
public constructor() {
|
||||
super('ChangeMetricColumnTypeToDecimal');
|
||||
}
|
||||
|
||||
public override async migrate(): Promise<void> {
|
||||
await this.dropAndCreateColumn('value');
|
||||
await this.dropAndCreateColumn('sum');
|
||||
await this.dropAndCreateColumn('min');
|
||||
await this.dropAndCreateColumn('max');
|
||||
}
|
||||
|
||||
public async dropAndCreateColumn(columnName: string): Promise<void> {
|
||||
const column: AnalyticsTableColumn | undefined =
|
||||
new Metric().tableColumns.find((column: AnalyticsTableColumn) => {
|
||||
return column.key === columnName;
|
||||
});
|
||||
|
||||
if (!column) {
|
||||
return;
|
||||
}
|
||||
|
||||
const columnType: TableColumnType | null =
|
||||
await MetricService.getColumnTypeInDatabase(column);
|
||||
|
||||
if (columnType) {
|
||||
await MetricService.dropColumnInDatabase(columnName);
|
||||
}
|
||||
|
||||
await MetricService.addColumnInDatabase(column);
|
||||
}
|
||||
|
||||
public override async rollback(): Promise<void> {
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
import AddAggregationTemporalityToMetric from './AddAggregationTemporalityToMetric';
|
||||
import AddAttributeColumnToSpanAndLog from './AddAttributesColumnToSpanAndLog';
|
||||
import AddDefaultGlobalConfig from './AddDefaultGlobalConfig';
|
||||
import AddDowntimeMonitorStatusToStatusPage from './AddDowntimeMonitorStatusToStatusPage';
|
||||
@@ -7,15 +8,19 @@ import AddEndDateToMonitorStatusTimeline from './AddEndDateToMonitorStatusTimeli
|
||||
import AddEndDateToMonitorStatusTimelineWhereEndDateIsMissing from './AddEndDateToMonitorStatusTimelineWhereEndDateIsMissing';
|
||||
import AddEndDateToScheduledEventsStateTimeline from './AddEndDateToScheduledEventsStateTimeline';
|
||||
import AddEndedState from './AddEndedState';
|
||||
import AddIsMonotonicToMetric from './AddIsMonotonicToMetric';
|
||||
import AddMonitoringDatesToMonitor from './AddMonitoringDatesToMonitors';
|
||||
import AddOwnerInfoToProjects from './AddOwnerInfoToProject';
|
||||
import AddPointTypeToMetric from './AddPointTypeToMetric';
|
||||
import AddPostedAtToPublicNotes from './AddPostedAtToPublicNotes';
|
||||
import AddSecretKeyToIncomingRequestMonitor from './AddSecretKeyToIncomingRequestMonitor';
|
||||
import AddStartDateToIncidentStateTimeline from './AddStartDateToIncidentStateTimeline';
|
||||
import AddStartDateToMonitorStatusTimeline from './AddStartDateToMonitorStatusTimeline';
|
||||
import AddStartDateToScheduledEventsStateTimeline from './AddStartDateToScheduledEventsStateTimeline';
|
||||
import AddTelemetryServiceColor from './AddTelemetryServiceColor';
|
||||
import AddUnitColumnToMetricsTable from './AddUnitColumnToMetricsTable';
|
||||
import ChangeLogSeverityColumnTypeFromTextToNumber from './ChangeLogSeverityColumnTypeFromTextToNumber';
|
||||
import ChangeMetricColumnTypeToDecimal from './ChangeMetricColumnTypesToDecimal';
|
||||
import DataMigrationBase from './DataMigrationBase';
|
||||
import GenerateNewCertsForStatusPage from './GenerateNewCertsForStatusPage';
|
||||
import MigrateDefaultUserNotificationRule from './MigrateDefaultUserNotificationRule';
|
||||
@@ -57,6 +62,11 @@ const DataMigrations: Array<DataMigrationBase> = [
|
||||
new GenerateNewCertsForStatusPage(),
|
||||
new AddEndDateToMonitorStatusTimelineWhereEndDateIsMissing(),
|
||||
new RemoveCanFromPermissions(),
|
||||
new AddUnitColumnToMetricsTable(),
|
||||
new ChangeMetricColumnTypeToDecimal(),
|
||||
new AddAggregationTemporalityToMetric(),
|
||||
new AddPointTypeToMetric(),
|
||||
new AddIsMonotonicToMetric(),
|
||||
];
|
||||
|
||||
export default DataMigrations;
|
||||
|
||||
@@ -94,6 +94,12 @@ RunCron(
|
||||
fromName: true,
|
||||
secure: true,
|
||||
},
|
||||
callSmsConfig: {
|
||||
_id: true,
|
||||
twilioAccountSID: true,
|
||||
twilioAuthToken: true,
|
||||
twilioPhoneNumber: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
6
Common/Types/Copilot/CopilotEventType.ts
Normal file
6
Common/Types/Copilot/CopilotEventType.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
enum CopilotEventType {
|
||||
IMPROVE_COMMENTS = 'IMPROVE_COMMENTS',
|
||||
FIX_GRAMMAR_AND_SPELLING = 'FIX_GRAMMAR_AND_SPELLING',
|
||||
}
|
||||
|
||||
export default CopilotEventType;
|
||||
@@ -148,6 +148,21 @@ export class MonitorTypeHelper {
|
||||
return isProbeableMonitor;
|
||||
}
|
||||
|
||||
public static getActiveMonitorTypes(): Array<MonitorType> {
|
||||
return [
|
||||
MonitorType.API,
|
||||
MonitorType.Website,
|
||||
MonitorType.IP,
|
||||
MonitorType.Ping,
|
||||
MonitorType.Port,
|
||||
MonitorType.SSLCertificate,
|
||||
MonitorType.SyntheticMonitor,
|
||||
MonitorType.CustomJavaScriptCode,
|
||||
MonitorType.IncomingRequest,
|
||||
MonitorType.Server,
|
||||
];
|
||||
}
|
||||
|
||||
public static doesMonitorTypeHaveDocumentation(
|
||||
monitorType: MonitorType
|
||||
): boolean {
|
||||
|
||||
@@ -171,6 +171,16 @@ enum Permission {
|
||||
EditStatusPageOwnerUser = 'EditStatusPageOwnerUser',
|
||||
ReadStatusPageOwnerUser = 'ReadStatusPageOwnerUser',
|
||||
|
||||
CreateServiceCatalogOwnerTeam = 'CreateServiceCatalogOwnerTeam',
|
||||
DeleteServiceCatalogOwnerTeam = 'DeleteServiceCatalogOwnerTeam',
|
||||
EditServiceCatalogOwnerTeam = 'EditServiceCatalogOwnerTeam',
|
||||
ReadServiceCatalogOwnerTeam = 'ReadServiceCatalogOwnerTeam',
|
||||
|
||||
CreateServiceCatalogOwnerUser = 'CreateServiceCatalogOwner',
|
||||
DeleteServiceCatalogOwnerUser = 'DeleteServiceCatalogOwnerUser',
|
||||
EditServiceCatalogOwnerUser = 'EditServiceCatalogOwnerUser',
|
||||
ReadServiceCatalogOwnerUser = 'ReadServiceCatalogOwnerUser',
|
||||
|
||||
CreateMonitorOwnerTeam = 'CreateMonitorOwnerTeam',
|
||||
DeleteMonitorOwnerTeam = 'DeleteMonitorOwnerTeam',
|
||||
EditMonitorOwnerTeam = 'EditMonitorOwnerTeam',
|
||||
@@ -444,6 +454,18 @@ enum Permission {
|
||||
EditIncidentSeverity = 'EditIncidentSeverity',
|
||||
DeleteIncidentSeverity = 'DeleteIncidentSeverity',
|
||||
ReadIncidentSeverity = 'ReadIncidentSeverity',
|
||||
|
||||
CreateServiceCatalog = 'CreateServiceCatalog',
|
||||
DeleteServiceCatalog = 'DeleteServiceCatalog',
|
||||
EditServiceCatalog = 'EditServiceCatalog',
|
||||
ReadServiceCatalog = 'ReadServiceCatalog',
|
||||
|
||||
CreateCodeRepository = 'CreateCodeRepository',
|
||||
DeleteCodeRepository = 'DeleteCodeRepository',
|
||||
EditCodeRepository = 'EditCodeRepository',
|
||||
ReadCodeRepository = 'ReadCodeRepository',
|
||||
|
||||
ReadCopilotEvent = 'ReadCopilotEvent',
|
||||
}
|
||||
|
||||
export class PermissionHelper {
|
||||
@@ -2427,6 +2449,81 @@ export class PermissionHelper {
|
||||
isAccessControlPermission: false,
|
||||
},
|
||||
|
||||
{
|
||||
permission: Permission.CreateCodeRepository,
|
||||
title: 'Create Code Repository',
|
||||
description:
|
||||
'This permission can create Code Repository this project.',
|
||||
isAssignableToTenant: true,
|
||||
isAccessControlPermission: true,
|
||||
},
|
||||
{
|
||||
permission: Permission.DeleteCodeRepository,
|
||||
title: 'Delete Code Repository',
|
||||
description:
|
||||
'This permission can delete Code Repository of this project.',
|
||||
isAssignableToTenant: true,
|
||||
isAccessControlPermission: true,
|
||||
},
|
||||
{
|
||||
permission: Permission.EditCodeRepository,
|
||||
title: 'Edit Code Repository',
|
||||
description:
|
||||
'This permission can edit Code Repository of this project.',
|
||||
isAssignableToTenant: true,
|
||||
isAccessControlPermission: true,
|
||||
},
|
||||
{
|
||||
permission: Permission.ReadCodeRepository,
|
||||
title: 'Read Code Repository',
|
||||
description:
|
||||
'This permission can read Code Repository of this project.',
|
||||
isAssignableToTenant: true,
|
||||
isAccessControlPermission: true,
|
||||
},
|
||||
|
||||
{
|
||||
permission: Permission.ReadCopilotEvent,
|
||||
title: 'Read Copilot Event',
|
||||
description:
|
||||
'This permission can read Copilot Event of this project.',
|
||||
isAssignableToTenant: true,
|
||||
isAccessControlPermission: false,
|
||||
},
|
||||
|
||||
{
|
||||
permission: Permission.CreateServiceCatalog,
|
||||
title: 'Create Service Catalog',
|
||||
description:
|
||||
'This permission can create Service Catalog this project.',
|
||||
isAssignableToTenant: true,
|
||||
isAccessControlPermission: true,
|
||||
},
|
||||
{
|
||||
permission: Permission.DeleteServiceCatalog,
|
||||
title: 'Delete Service Catalog',
|
||||
description:
|
||||
'This permission can delete Service Catalog of this project.',
|
||||
isAssignableToTenant: true,
|
||||
isAccessControlPermission: true,
|
||||
},
|
||||
{
|
||||
permission: Permission.EditServiceCatalog,
|
||||
title: 'Edit Service Catalog',
|
||||
description:
|
||||
'This permission can edit Service Catalog of this project.',
|
||||
isAssignableToTenant: true,
|
||||
isAccessControlPermission: true,
|
||||
},
|
||||
{
|
||||
permission: Permission.ReadServiceCatalog,
|
||||
title: 'Read Service Catalog',
|
||||
description:
|
||||
'This permission can read Service Catalog of this project.',
|
||||
isAssignableToTenant: true,
|
||||
isAccessControlPermission: true,
|
||||
},
|
||||
|
||||
{
|
||||
permission: Permission.CreateTelemetryServiceTraces,
|
||||
title: 'Create Telemetry Service Traces',
|
||||
@@ -2757,6 +2854,72 @@ export class PermissionHelper {
|
||||
isAccessControlPermission: false,
|
||||
},
|
||||
|
||||
{
|
||||
permission: Permission.CreateServiceCatalogOwnerTeam,
|
||||
title: 'Create Service Catalog Team Owner',
|
||||
description:
|
||||
'This permission can create Service Catalog Team Owner this project.',
|
||||
isAssignableToTenant: true,
|
||||
isAccessControlPermission: false,
|
||||
},
|
||||
{
|
||||
permission: Permission.DeleteServiceCatalogOwnerTeam,
|
||||
title: 'Delete Service Catalog Team Owner',
|
||||
description:
|
||||
'This permission can delete Service Catalog Team Owner of this project.',
|
||||
isAssignableToTenant: true,
|
||||
isAccessControlPermission: false,
|
||||
},
|
||||
{
|
||||
permission: Permission.EditServiceCatalogOwnerTeam,
|
||||
title: 'Edit Service Catalog Team Owner',
|
||||
description:
|
||||
'This permission can edit Service Catalog Team Owner of this project.',
|
||||
isAssignableToTenant: true,
|
||||
isAccessControlPermission: false,
|
||||
},
|
||||
{
|
||||
permission: Permission.ReadServiceCatalogOwnerTeam,
|
||||
title: 'Read Service Catalog Team Owner',
|
||||
description:
|
||||
'This permission can read Service Catalog Team Owner of this project.',
|
||||
isAssignableToTenant: true,
|
||||
isAccessControlPermission: false,
|
||||
},
|
||||
|
||||
{
|
||||
permission: Permission.CreateServiceCatalogOwnerUser,
|
||||
title: 'Create Service Catalog User Owner',
|
||||
description:
|
||||
'This permission can create Service Catalog User Owner this project.',
|
||||
isAssignableToTenant: true,
|
||||
isAccessControlPermission: false,
|
||||
},
|
||||
{
|
||||
permission: Permission.DeleteServiceCatalogOwnerUser,
|
||||
title: 'Delete Service Catalog User Owner',
|
||||
description:
|
||||
'This permission can delete Service Catalog User Owner of this project.',
|
||||
isAssignableToTenant: true,
|
||||
isAccessControlPermission: false,
|
||||
},
|
||||
{
|
||||
permission: Permission.EditServiceCatalogOwnerUser,
|
||||
title: 'Edit Service Catalog User Owner',
|
||||
description:
|
||||
'This permission can edit Service Catalog User Owner of this project.',
|
||||
isAssignableToTenant: true,
|
||||
isAccessControlPermission: false,
|
||||
},
|
||||
{
|
||||
permission: Permission.ReadServiceCatalogOwnerUser,
|
||||
title: 'Read Service Catalog User Owner',
|
||||
description:
|
||||
'This permission can read Service Catalog User Owner of this project.',
|
||||
isAssignableToTenant: true,
|
||||
isAccessControlPermission: false,
|
||||
},
|
||||
|
||||
{
|
||||
permission: Permission.CreateIncidentTemplateOwnerUser,
|
||||
title: 'Create IncidentTemplate User Owner',
|
||||
|
||||
@@ -1,12 +1,18 @@
|
||||
import Hostname from '../API/Hostname';
|
||||
import URL from '../API/URL';
|
||||
import Dictionary from '../Dictionary';
|
||||
import IP from '../IP/IP';
|
||||
import { JSONObject } from '../JSON';
|
||||
import CustomCodeMonitorResponse from '../Monitor/CustomCodeMonitor/CustomCodeMonitorResponse';
|
||||
import SslMonitorResponse from '../Monitor/SSLMonitor/SslMonitorResponse';
|
||||
import SyntheticMonitorResponse from '../Monitor/SyntheticMonitors/SyntheticMonitorResponse';
|
||||
import ObjectID from '../ObjectID';
|
||||
import Port from '../Port';
|
||||
|
||||
export default interface ProbeMonitorResponse {
|
||||
isOnline?: boolean | undefined;
|
||||
monitorDestination?: URL | IP | Hostname | undefined;
|
||||
monitorDestinationPort?: Port | undefined;
|
||||
responseTimeInMs?: number | undefined;
|
||||
responseCode?: number | undefined;
|
||||
responseHeaders?: Dictionary<string> | undefined;
|
||||
|
||||
9
Common/package-lock.json
generated
9
Common/package-lock.json
generated
@@ -11,7 +11,7 @@
|
||||
"dependencies": {
|
||||
"@types/crypto-js": "^4.2.2",
|
||||
"@types/uuid": "^8.3.4",
|
||||
"axios": "^1.6.8",
|
||||
"axios": "^1.7.1",
|
||||
"crypto-js": "^4.1.1",
|
||||
"json5": "^2.2.3",
|
||||
"moment": "^2.30.1",
|
||||
@@ -1382,9 +1382,10 @@
|
||||
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "1.6.8",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.8.tgz",
|
||||
"integrity": "sha512-v/ZHtJDU39mDpyBoFVkETcd/uNdxrWRrg3bKpOKzXFA6Bvqopts6ALSMU3y6ijYxbw2B+wPrIv46egTzJXCLGQ==",
|
||||
"version": "1.7.1",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.1.tgz",
|
||||
"integrity": "sha512-+LV37nQcd1EpFalkXksWNBiA17NZ5m5/WspmHGmZmdx1qBOg/VNq/c4eRJiA9VQQHBOs+N0ZhhdU10h2TyNK7Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.15.6",
|
||||
"form-data": "^4.0.0",
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
"dependencies": {
|
||||
"@types/crypto-js": "^4.2.2",
|
||||
"@types/uuid": "^8.3.4",
|
||||
"axios": "^1.6.8",
|
||||
"axios": "^1.7.1",
|
||||
"crypto-js": "^4.1.1",
|
||||
"json5": "^2.2.3",
|
||||
"moment": "^2.30.1",
|
||||
|
||||
65
CommonServer/API/CodeRepositoryAPI.ts
Normal file
65
CommonServer/API/CodeRepositoryAPI.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import UserMiddleware from '../Middleware/UserAuthorization';
|
||||
import CodeRepositoryService, {
|
||||
Service as CodeRepositoryServiceType,
|
||||
} from '../Services/CodeRepositoryService';
|
||||
import {
|
||||
ExpressRequest,
|
||||
ExpressResponse,
|
||||
NextFunction,
|
||||
} from '../Utils/Express';
|
||||
import Response from '../Utils/Response';
|
||||
import BaseAPI from './BaseAPI';
|
||||
import BadDataException from 'Common/Types/Exception/BadDataException';
|
||||
import ObjectID from 'Common/Types/ObjectID';
|
||||
import CodeRepository from 'Model/Models/CodeRepository';
|
||||
|
||||
export default class CodeRepositoryAPI extends BaseAPI<
|
||||
CodeRepository,
|
||||
CodeRepositoryServiceType
|
||||
> {
|
||||
public constructor() {
|
||||
super(CodeRepository, CodeRepositoryService);
|
||||
|
||||
this.router.get(
|
||||
`${new this.entityType()
|
||||
.getCrudApiPath()
|
||||
?.toString()}/get-code-repository/:secretkey`,
|
||||
UserMiddleware.getUserMiddleware,
|
||||
async (
|
||||
req: ExpressRequest,
|
||||
res: ExpressResponse,
|
||||
next: NextFunction
|
||||
) => {
|
||||
try {
|
||||
const secretkey: string = req.params['secretkey']!;
|
||||
|
||||
if (!secretkey) {
|
||||
throw new BadDataException('Secret key is required');
|
||||
}
|
||||
|
||||
const codeRepository: CodeRepository | null =
|
||||
await CodeRepositoryService.findOneBy({
|
||||
query: {
|
||||
secretToken: new ObjectID(secretkey),
|
||||
},
|
||||
select: {
|
||||
name: true,
|
||||
},
|
||||
props: {
|
||||
isRoot: true,
|
||||
},
|
||||
});
|
||||
|
||||
return Response.sendEntityResponse(
|
||||
req,
|
||||
res,
|
||||
codeRepository,
|
||||
CodeRepository
|
||||
);
|
||||
} catch (err) {
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,7 @@ import Express, {
|
||||
} from '../Utils/Express';
|
||||
import logger from '../Utils/Logger';
|
||||
import Response from '../Utils/Response';
|
||||
import Telemetry from '../Utils/Telemetry';
|
||||
import Exception from 'Common/Types/Exception/Exception';
|
||||
import ServerException from 'Common/Types/Exception/ServerException';
|
||||
|
||||
@@ -15,6 +16,35 @@ export interface StatusAPIOptions {
|
||||
}
|
||||
|
||||
export default class StatusAPI {
|
||||
public static statusCheckSuccessCounter = Telemetry.getCounter({
|
||||
name: 'status.check.success',
|
||||
description: 'Status check counter',
|
||||
});
|
||||
|
||||
// ready counter
|
||||
public static stausReadySuccess = Telemetry.getCounter({
|
||||
name: 'status.ready.success',
|
||||
description: 'Ready check counter',
|
||||
});
|
||||
// live counter
|
||||
|
||||
public static stausLiveSuccess = Telemetry.getCounter({
|
||||
name: 'status.live.success',
|
||||
description: 'Live check counter',
|
||||
});
|
||||
|
||||
// ready failed counter
|
||||
public static stausReadyFailed = Telemetry.getCounter({
|
||||
name: 'status.ready.failed',
|
||||
description: 'Ready check counter',
|
||||
});
|
||||
|
||||
// live failed counter
|
||||
public static stausLiveFailed = Telemetry.getCounter({
|
||||
name: 'status.live.failed',
|
||||
description: 'Live check counter',
|
||||
});
|
||||
|
||||
public static init(options: StatusAPIOptions): ExpressRouter {
|
||||
const router: ExpressRouter = Express.getRouter();
|
||||
|
||||
@@ -27,6 +57,8 @@ export default class StatusAPI {
|
||||
|
||||
// General status
|
||||
router.get('/status', (req: ExpressRequest, res: ExpressResponse) => {
|
||||
this.statusCheckSuccessCounter.add(1);
|
||||
|
||||
logger.info('Status check: ok');
|
||||
|
||||
Response.sendJsonObjectResponse(req, res, {
|
||||
@@ -42,10 +74,12 @@ export default class StatusAPI {
|
||||
logger.debug('Ready check');
|
||||
await options.readyCheck();
|
||||
logger.info('Ready check: ok');
|
||||
this.stausReadySuccess.add(1);
|
||||
Response.sendJsonObjectResponse(req, res, {
|
||||
status: 'ok',
|
||||
});
|
||||
} catch (e) {
|
||||
this.stausReadyFailed.add(1);
|
||||
Response.sendErrorResponse(
|
||||
req,
|
||||
res,
|
||||
@@ -65,10 +99,12 @@ export default class StatusAPI {
|
||||
logger.debug('Live check');
|
||||
await options.readyCheck();
|
||||
logger.info('Live check: ok');
|
||||
this.stausLiveSuccess.add(1);
|
||||
Response.sendJsonObjectResponse(req, res, {
|
||||
status: 'ok',
|
||||
});
|
||||
} catch (e) {
|
||||
this.stausLiveFailed.add(1);
|
||||
Response.sendErrorResponse(
|
||||
req,
|
||||
res,
|
||||
|
||||
@@ -2040,6 +2040,7 @@ export default class StatusPageAPI extends BaseAPI<
|
||||
}
|
||||
|
||||
statusPageSubscriber.statusPageId = objectId;
|
||||
statusPageSubscriber.sendYouHaveSubscribedMessage = true;
|
||||
statusPageSubscriber.projectId = statusPage.projectId!;
|
||||
statusPageSubscriber.isSubscribedToAllResources = Boolean(
|
||||
req.body.data['isSubscribedToAllResources']
|
||||
|
||||
@@ -116,13 +116,21 @@ export const RedisDb: number = Number(process.env['REDIS_DB']) || 0;
|
||||
export const RedisUsername: string = process.env['REDIS_USERNAME'] || 'default';
|
||||
export const RedisPassword: string =
|
||||
process.env['REDIS_PASSWORD'] || 'password';
|
||||
|
||||
export const RedisTlsCa: string | undefined =
|
||||
process.env['REDIS_TLS_CA'] || undefined;
|
||||
|
||||
export const RedisTlsCert: string | undefined =
|
||||
process.env['REDIS_TLS_CERT'] || undefined;
|
||||
|
||||
export const RedisTlsKey: string | undefined =
|
||||
process.env['REDIS_TLS_KEY'] || undefined;
|
||||
|
||||
export const RedisTlsSentinelMode: boolean =
|
||||
process.env['REDIS_TLS_SENTINEL_MODE'] === 'true';
|
||||
|
||||
export const ShouldRedisTlsEnable: boolean = Boolean(
|
||||
RedisTlsCa || RedisTlsSentinelMode
|
||||
RedisTlsCa || (RedisTlsCert && RedisTlsKey)
|
||||
);
|
||||
|
||||
export const IsProduction: boolean =
|
||||
@@ -162,12 +170,28 @@ export const ClickhousePassword: string =
|
||||
export const ClickhouseDatabase: string =
|
||||
process.env['CLICKHOUSE_DATABASE'] || 'oneuptime';
|
||||
|
||||
export const ClickhouseTlsCa: string | undefined =
|
||||
process.env['CLICKHOUSE_TLS_CA'] || undefined;
|
||||
|
||||
export const ClickhouseTlsCert: string | undefined =
|
||||
process.env['CLICKHOUSE_TLS_CERT'] || undefined;
|
||||
|
||||
export const ClickhouseTlsKey: string | undefined =
|
||||
process.env['CLICKHOUSE_TLS_KEY'] || undefined;
|
||||
|
||||
export const ClickHouseIsHostHttps: boolean =
|
||||
process.env['CLICKHOUSE_IS_HOST_HTTPS'] === 'true';
|
||||
|
||||
export const ShouldClickhouseSslEnable: boolean = Boolean(
|
||||
ClickhouseTlsCa || (ClickhouseTlsCert && ClickhouseTlsKey)
|
||||
);
|
||||
|
||||
export const GitSha: string = process.env['GIT_SHA'] || 'unknown';
|
||||
|
||||
export const AppVersion: string = process.env['APP_VERSION'] || 'unknown';
|
||||
|
||||
export const LogLevel: ConfigLogLevel =
|
||||
(process.env['LOG_LEVEL'] as ConfigLogLevel) || ConfigLogLevel.ERROR;
|
||||
(process.env['LOG_LEVEL'] as ConfigLogLevel) || ConfigLogLevel.INFO;
|
||||
|
||||
export const HttpProtocol: Protocol =
|
||||
process.env['HTTP_PROTOCOL'] === 'https' ? Protocol.HTTPS : Protocol.HTTP;
|
||||
@@ -179,3 +203,11 @@ export const WorkflowScriptTimeoutInMS: number = process.env[
|
||||
]
|
||||
? parseInt(process.env['WORKFLOW_SCRIPT_TIMEOUT_IN_MS'].toString())
|
||||
: 5000;
|
||||
|
||||
export const AllowedActiveMonitorCountInFreePlan: number = process.env[
|
||||
'ALLOWED_ACTIVE_MONITOR_COUNT_IN_FREE_PLAN'
|
||||
]
|
||||
? parseInt(
|
||||
process.env['ALLOWED_ACTIVE_MONITOR_COUNT_IN_FREE_PLAN'].toString()
|
||||
)
|
||||
: 10;
|
||||
|
||||
@@ -1,21 +1,49 @@
|
||||
import {
|
||||
ClickHouseIsHostHttps,
|
||||
ClickhouseDatabase,
|
||||
ClickhouseHost,
|
||||
ClickhousePassword,
|
||||
ClickhousePort,
|
||||
ClickhouseTlsCa,
|
||||
ClickhouseTlsCert,
|
||||
ClickhouseTlsKey,
|
||||
ClickhouseUsername,
|
||||
ShouldClickhouseSslEnable,
|
||||
} from '../EnvironmentConfig';
|
||||
import { NodeClickHouseClientConfigOptions } from '@clickhouse/client/dist/client';
|
||||
|
||||
export type ClickHouseClientConfigOptions = NodeClickHouseClientConfigOptions;
|
||||
|
||||
export const dataSourceOptions: ClickHouseClientConfigOptions = {
|
||||
host: `http://${ClickhouseHost.toString()}:${ClickhousePort.toNumber()}`,
|
||||
const hostProtocol: string = ClickHouseIsHostHttps ? 'https' : 'http';
|
||||
|
||||
const options: ClickHouseClientConfigOptions = {
|
||||
host: `${hostProtocol}://${ClickhouseHost.toString()}:${ClickhousePort.toNumber()}`,
|
||||
username: ClickhouseUsername,
|
||||
password: ClickhousePassword,
|
||||
database: ClickhouseDatabase,
|
||||
application: 'oneuptime',
|
||||
};
|
||||
|
||||
if (ShouldClickhouseSslEnable && ClickhouseTlsCa) {
|
||||
options.tls = {
|
||||
ca_cert: Buffer.from(ClickhouseTlsCa),
|
||||
};
|
||||
}
|
||||
|
||||
if (
|
||||
ShouldClickhouseSslEnable &&
|
||||
ClickhouseTlsCa &&
|
||||
ClickhouseTlsCert &&
|
||||
ClickhouseTlsKey
|
||||
) {
|
||||
options.tls = {
|
||||
ca_cert: Buffer.from(ClickhouseTlsCa),
|
||||
cert: Buffer.from(ClickhouseTlsCert),
|
||||
key: Buffer.from(ClickhouseTlsKey),
|
||||
};
|
||||
}
|
||||
|
||||
export const dataSourceOptions: ClickHouseClientConfigOptions = options;
|
||||
|
||||
export const testDataSourceOptions: ClickHouseClientConfigOptions =
|
||||
dataSourceOptions;
|
||||
|
||||
42
CommonServer/Infrastructure/Postgres/DataSourceOptions.ts
Normal file
42
CommonServer/Infrastructure/Postgres/DataSourceOptions.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import {
|
||||
DatabaseHost,
|
||||
DatabaseName,
|
||||
DatabasePassword,
|
||||
DatabasePort,
|
||||
DatabaseRejectUnauthorized,
|
||||
DatabaseSslCa,
|
||||
DatabaseSslCert,
|
||||
DatabaseSslKey,
|
||||
DatabaseUsername,
|
||||
ShouldDatabaseSslEnable,
|
||||
} from '../../EnvironmentConfig';
|
||||
import Migrations from './SchemaMigrations/Index';
|
||||
import DatabaseType from 'Common/Types/DatabaseType';
|
||||
import Entities from 'Model/Models/Index';
|
||||
import { DataSourceOptions } from 'typeorm';
|
||||
|
||||
const dataSourceOptions: DataSourceOptions = {
|
||||
type: DatabaseType.Postgres,
|
||||
host: DatabaseHost.toString(),
|
||||
port: DatabasePort.toNumber(),
|
||||
username: DatabaseUsername,
|
||||
password: DatabasePassword,
|
||||
database: DatabaseName,
|
||||
migrationsTableName: 'migrations',
|
||||
migrations: Migrations,
|
||||
migrationsRun: true,
|
||||
entities: Entities,
|
||||
applicationName: 'oneuptime',
|
||||
ssl: ShouldDatabaseSslEnable
|
||||
? {
|
||||
rejectUnauthorized: DatabaseRejectUnauthorized,
|
||||
ca: DatabaseSslCa,
|
||||
key: DatabaseSslKey,
|
||||
cert: DatabaseSslCert,
|
||||
}
|
||||
: false,
|
||||
// logging: 'all',
|
||||
synchronize: false,
|
||||
};
|
||||
|
||||
export default dataSourceOptions;
|
||||
@@ -0,0 +1,14 @@
|
||||
import dataSourceOptions from './DataSourceOptions';
|
||||
import { DataSource } from 'typeorm';
|
||||
|
||||
const dataSourceOptionToMigrate: any = {
|
||||
...dataSourceOptions,
|
||||
host: 'localhost',
|
||||
port: 5400,
|
||||
};
|
||||
|
||||
const PostgresDataSource: DataSource = new DataSource(
|
||||
dataSourceOptionToMigrate
|
||||
);
|
||||
|
||||
export default PostgresDataSource;
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,52 @@
|
||||
import { MigrationInterface, QueryRunner, Table, TableColumn } from 'typeorm';
|
||||
|
||||
export class MigrationName1717678334852 implements MigrationInterface {
|
||||
public name: string = 'MigrationName1717678334852';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
// check if the column exists
|
||||
|
||||
const apiKeyPermissionTable: Table | undefined =
|
||||
await queryRunner.getTable('ApiKeyPermission');
|
||||
|
||||
if (apiKeyPermissionTable) {
|
||||
const isBlockPermissionColumn: TableColumn | undefined =
|
||||
apiKeyPermissionTable.columns.find((column: TableColumn) => {
|
||||
return column.name === 'isBlockPermission';
|
||||
});
|
||||
|
||||
if (!isBlockPermissionColumn) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ApiKeyPermission" ADD "isBlockPermission" boolean NOT NULL DEFAULT false`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// check if the column exists
|
||||
|
||||
const teamPermissionTable: Table | undefined =
|
||||
await queryRunner.getTable('TeamPermission');
|
||||
|
||||
if (teamPermissionTable) {
|
||||
const isBlockPermissionColumn: TableColumn | undefined =
|
||||
teamPermissionTable.columns.find((column: TableColumn) => {
|
||||
return column.name === 'isBlockPermission';
|
||||
});
|
||||
|
||||
if (!isBlockPermissionColumn) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "TeamPermission" ADD "isBlockPermission" boolean NOT NULL DEFAULT false`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "TeamPermission" DROP COLUMN "isBlockPermission"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ApiKeyPermission" DROP COLUMN "isBlockPermission"`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,67 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class MigrationName1717839110671 implements MigrationInterface {
|
||||
public name = 'MigrationName1717839110671';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "ServiceCatalog" ("_id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" TIMESTAMP NOT NULL DEFAULT now(), "updatedAt" TIMESTAMP NOT NULL DEFAULT now(), "deletedAt" TIMESTAMP, "version" integer NOT NULL, "projectId" uuid NOT NULL, "name" character varying(100) NOT NULL, "slug" character varying(100) NOT NULL, "description" character varying(500), "createdByUserId" uuid, "deletedByUserId" uuid, "serviceColor" character varying, CONSTRAINT "PK_5186d54b1b97610ea80b5c55aad" PRIMARY KEY ("_id"))`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_e712ff4cf5c1a865a5baa242e2" ON "ServiceCatalog" ("projectId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "ServiceCatalogLabel" ("serviceCatalogId" uuid NOT NULL, "labelId" uuid NOT NULL, CONSTRAINT "PK_a2c59f364d3bdb0d28307ad1d46" PRIMARY KEY ("serviceCatalogId", "labelId"))`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_98e9d83b6ff61003a29590f398" ON "ServiceCatalogLabel" ("serviceCatalogId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_701f84e45404bdddcffdcaaba2" ON "ServiceCatalogLabel" ("labelId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalog" ADD CONSTRAINT "FK_e712ff4cf5c1a865a5baa242e2e" FOREIGN KEY ("projectId") REFERENCES "Project"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalog" ADD CONSTRAINT "FK_b8d64daaf462acd6f694ca47dad" FOREIGN KEY ("createdByUserId") REFERENCES "User"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalog" ADD CONSTRAINT "FK_42f81942e36f5f42a5dce8e606d" FOREIGN KEY ("deletedByUserId") REFERENCES "User"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogLabel" ADD CONSTRAINT "FK_98e9d83b6ff61003a29590f3987" FOREIGN KEY ("serviceCatalogId") REFERENCES "ServiceCatalog"("_id") ON DELETE CASCADE ON UPDATE CASCADE`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogLabel" ADD CONSTRAINT "FK_701f84e45404bdddcffdcaaba20" FOREIGN KEY ("labelId") REFERENCES "Label"("_id") ON DELETE CASCADE ON UPDATE CASCADE`
|
||||
);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogLabel" DROP CONSTRAINT "FK_701f84e45404bdddcffdcaaba20"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogLabel" DROP CONSTRAINT "FK_98e9d83b6ff61003a29590f3987"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalog" DROP CONSTRAINT "FK_42f81942e36f5f42a5dce8e606d"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalog" DROP CONSTRAINT "FK_b8d64daaf462acd6f694ca47dad"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalog" DROP CONSTRAINT "FK_e712ff4cf5c1a865a5baa242e2e"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_701f84e45404bdddcffdcaaba2"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_98e9d83b6ff61003a29590f398"`
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "ServiceCatalogLabel"`);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_e712ff4cf5c1a865a5baa242e2"`
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "ServiceCatalog"`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,115 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class MigrationName1717849921874 implements MigrationInterface {
|
||||
public name = 'MigrationName1717849921874';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "ServiceCatalogOwnerTeam" ("_id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" TIMESTAMP NOT NULL DEFAULT now(), "updatedAt" TIMESTAMP NOT NULL DEFAULT now(), "deletedAt" TIMESTAMP, "version" integer NOT NULL, "projectId" uuid NOT NULL, "teamId" uuid NOT NULL, "serviceCatalogId" uuid NOT NULL, "createdByUserId" uuid, "deletedByUserId" uuid, CONSTRAINT "PK_da84693caf7072d56bedfc2dc1b" PRIMARY KEY ("_id"))`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_726241162b0a853b29d85e28c4" ON "ServiceCatalogOwnerTeam" ("projectId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_e3090773a4106e0c4375897993" ON "ServiceCatalogOwnerTeam" ("teamId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_c015513688ebd42e5569b4d6ac" ON "ServiceCatalogOwnerTeam" ("serviceCatalogId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "ServiceCatalogOwnerUser" ("_id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" TIMESTAMP NOT NULL DEFAULT now(), "updatedAt" TIMESTAMP NOT NULL DEFAULT now(), "deletedAt" TIMESTAMP, "version" integer NOT NULL, "projectId" uuid NOT NULL, "userId" uuid NOT NULL, "serviceCatalogId" uuid NOT NULL, "createdByUserId" uuid, "deletedByUserId" uuid, CONSTRAINT "PK_c0fbf81bd041371f8beb69b440d" PRIMARY KEY ("_id"))`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_51c375fe9f6ffb0372d3425d99" ON "ServiceCatalogOwnerUser" ("projectId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_f6abd337058906d7912164ae12" ON "ServiceCatalogOwnerUser" ("userId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_27a396dd77fb8c0d5d6cb89216" ON "ServiceCatalogOwnerUser" ("serviceCatalogId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogOwnerTeam" ADD CONSTRAINT "FK_726241162b0a853b29d85e28c4c" FOREIGN KEY ("projectId") REFERENCES "Project"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogOwnerTeam" ADD CONSTRAINT "FK_e3090773a4106e0c4375897993f" FOREIGN KEY ("teamId") REFERENCES "Team"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogOwnerTeam" ADD CONSTRAINT "FK_c015513688ebd42e5569b4d6ac6" FOREIGN KEY ("serviceCatalogId") REFERENCES "ServiceCatalog"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogOwnerTeam" ADD CONSTRAINT "FK_9afb156569266f66a2301eb09ff" FOREIGN KEY ("createdByUserId") REFERENCES "User"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogOwnerTeam" ADD CONSTRAINT "FK_0e93a638ddc94aaad4ad33789d7" FOREIGN KEY ("deletedByUserId") REFERENCES "User"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogOwnerUser" ADD CONSTRAINT "FK_51c375fe9f6ffb0372d3425d999" FOREIGN KEY ("projectId") REFERENCES "Project"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogOwnerUser" ADD CONSTRAINT "FK_f6abd337058906d7912164ae12e" FOREIGN KEY ("userId") REFERENCES "User"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogOwnerUser" ADD CONSTRAINT "FK_27a396dd77fb8c0d5d6cb892165" FOREIGN KEY ("serviceCatalogId") REFERENCES "ServiceCatalog"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogOwnerUser" ADD CONSTRAINT "FK_2d2c21db8da169b5b2d2bee3111" FOREIGN KEY ("createdByUserId") REFERENCES "User"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogOwnerUser" ADD CONSTRAINT "FK_d61607e823057b6516f05e8f1cd" FOREIGN KEY ("deletedByUserId") REFERENCES "User"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`
|
||||
);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogOwnerUser" DROP CONSTRAINT "FK_d61607e823057b6516f05e8f1cd"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogOwnerUser" DROP CONSTRAINT "FK_2d2c21db8da169b5b2d2bee3111"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogOwnerUser" DROP CONSTRAINT "FK_27a396dd77fb8c0d5d6cb892165"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogOwnerUser" DROP CONSTRAINT "FK_f6abd337058906d7912164ae12e"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogOwnerUser" DROP CONSTRAINT "FK_51c375fe9f6ffb0372d3425d999"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogOwnerTeam" DROP CONSTRAINT "FK_0e93a638ddc94aaad4ad33789d7"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogOwnerTeam" DROP CONSTRAINT "FK_9afb156569266f66a2301eb09ff"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogOwnerTeam" DROP CONSTRAINT "FK_c015513688ebd42e5569b4d6ac6"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogOwnerTeam" DROP CONSTRAINT "FK_e3090773a4106e0c4375897993f"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "ServiceCatalogOwnerTeam" DROP CONSTRAINT "FK_726241162b0a853b29d85e28c4c"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_27a396dd77fb8c0d5d6cb89216"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_f6abd337058906d7912164ae12"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_51c375fe9f6ffb0372d3425d99"`
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "ServiceCatalogOwnerUser"`);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_c015513688ebd42e5569b4d6ac"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_e3090773a4106e0c4375897993"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_726241162b0a853b29d85e28c4"`
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "ServiceCatalogOwnerTeam"`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,73 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class MigrationName1717955235341 implements MigrationInterface {
|
||||
public name = 'MigrationName1717955235341';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "CodeRepository" ("_id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" TIMESTAMP NOT NULL DEFAULT now(), "updatedAt" TIMESTAMP NOT NULL DEFAULT now(), "deletedAt" TIMESTAMP, "version" integer NOT NULL, "projectId" uuid NOT NULL, "name" character varying(100) NOT NULL, "slug" character varying(100) NOT NULL, "description" character varying(500), "createdByUserId" uuid, "deletedByUserId" uuid, "secretToken" uuid NOT NULL, CONSTRAINT "PK_7b5219d06a82fbc0bc4540b74f0" PRIMARY KEY ("_id"))`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_a653bdc2fac520c9c8b9a7c7a6" ON "CodeRepository" ("projectId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_789f71951901d03fe3da24dca5" ON "CodeRepository" ("secretToken") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "CodeRepositoryLabel" ("codeRepositoryId" uuid NOT NULL, "labelId" uuid NOT NULL, CONSTRAINT "PK_5adb09e0b5957488be8931f46bc" PRIMARY KEY ("codeRepositoryId", "labelId"))`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_7710ab8ee47601f78f3a4b76b6" ON "CodeRepositoryLabel" ("codeRepositoryId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_8f7d12100e441fc72e02151fc5" ON "CodeRepositoryLabel" ("labelId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "CodeRepository" ADD CONSTRAINT "FK_a653bdc2fac520c9c8b9a7c7a6a" FOREIGN KEY ("projectId") REFERENCES "Project"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "CodeRepository" ADD CONSTRAINT "FK_a870b71b99c87ea658c11421490" FOREIGN KEY ("createdByUserId") REFERENCES "User"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "CodeRepository" ADD CONSTRAINT "FK_79d9249eb5f8174a6f6228311f4" FOREIGN KEY ("deletedByUserId") REFERENCES "User"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "CodeRepositoryLabel" ADD CONSTRAINT "FK_7710ab8ee47601f78f3a4b76b64" FOREIGN KEY ("codeRepositoryId") REFERENCES "CodeRepository"("_id") ON DELETE CASCADE ON UPDATE CASCADE`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "CodeRepositoryLabel" ADD CONSTRAINT "FK_8f7d12100e441fc72e02151fc56" FOREIGN KEY ("labelId") REFERENCES "Label"("_id") ON DELETE CASCADE ON UPDATE CASCADE`
|
||||
);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "CodeRepositoryLabel" DROP CONSTRAINT "FK_8f7d12100e441fc72e02151fc56"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "CodeRepositoryLabel" DROP CONSTRAINT "FK_7710ab8ee47601f78f3a4b76b64"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "CodeRepository" DROP CONSTRAINT "FK_79d9249eb5f8174a6f6228311f4"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "CodeRepository" DROP CONSTRAINT "FK_a870b71b99c87ea658c11421490"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "CodeRepository" DROP CONSTRAINT "FK_a653bdc2fac520c9c8b9a7c7a6a"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_8f7d12100e441fc72e02151fc5"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_7710ab8ee47601f78f3a4b76b6"`
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "CodeRepositoryLabel"`);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_789f71951901d03fe3da24dca5"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_a653bdc2fac520c9c8b9a7c7a6"`
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "CodeRepository"`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class MigrationName1718037833516 implements MigrationInterface {
|
||||
public name = 'MigrationName1718037833516';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "CopilotEvent" ("_id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" TIMESTAMP NOT NULL DEFAULT now(), "updatedAt" TIMESTAMP NOT NULL DEFAULT now(), "deletedAt" TIMESTAMP, "version" integer NOT NULL, "projectId" uuid NOT NULL, "codeRepositoryId" uuid NOT NULL, "createdByUserId" uuid, "deletedByUserId" uuid, "filePath" character varying NOT NULL, "commitHash" character varying NOT NULL, "copilotEventType" character varying NOT NULL, CONSTRAINT "PK_df9ab694204304a1416a720bbfc" PRIMARY KEY ("_id"))`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_02c9884520b692949fea5c65f9" ON "CopilotEvent" ("projectId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_e9db4a03a7d521b1d242ff3c9a" ON "CopilotEvent" ("codeRepositoryId") `
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "CopilotEvent" ADD CONSTRAINT "FK_02c9884520b692949fea5c65f9c" FOREIGN KEY ("projectId") REFERENCES "Project"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "CopilotEvent" ADD CONSTRAINT "FK_e9db4a03a7d521b1d242ff3c9a2" FOREIGN KEY ("codeRepositoryId") REFERENCES "CodeRepository"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "CopilotEvent" ADD CONSTRAINT "FK_7ff1de5682d290b1686848fc5cf" FOREIGN KEY ("createdByUserId") REFERENCES "User"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "CopilotEvent" ADD CONSTRAINT "FK_81c5f57878dd2230d2eec3bcb44" FOREIGN KEY ("deletedByUserId") REFERENCES "User"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`
|
||||
);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "CopilotEvent" DROP CONSTRAINT "FK_81c5f57878dd2230d2eec3bcb44"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "CopilotEvent" DROP CONSTRAINT "FK_7ff1de5682d290b1686848fc5cf"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "CopilotEvent" DROP CONSTRAINT "FK_e9db4a03a7d521b1d242ff3c9a2"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "CopilotEvent" DROP CONSTRAINT "FK_02c9884520b692949fea5c65f9c"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_e9db4a03a7d521b1d242ff3c9a"`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "public"."IDX_02c9884520b692949fea5c65f9"`
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "CopilotEvent"`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class MigrationName1718100824584 implements MigrationInterface {
|
||||
public name = 'MigrationName1718100824584';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "StatusPageSubscriber" ADD "sendYouHaveSubscribedMessage" boolean NOT NULL DEFAULT true`
|
||||
);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "OnCallDutyPolicyScheduleLayer" ALTER COLUMN "restrictionTimes" SET DEFAULT '{"_type": "RestrictionTimes", "value": {"restictionType": "None", "dayRestrictionTimes": null, "weeklyRestrictionTimes": []}}'`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class MigrationName1718101665865 implements MigrationInterface {
|
||||
public name = 'MigrationName1718101665865';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "SmsLog" ALTER COLUMN "fromNumber" DROP NOT NULL`
|
||||
);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "SmsLog" ALTER COLUMN "fromNumber" SET NOT NULL`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
import InitialMigration from './1717605043663-InitialMigration';
|
||||
import { MigrationName1717678334852 } from './1717678334852-MigrationName';
|
||||
import { MigrationName1717839110671 } from './1717839110671-MigrationName';
|
||||
import { MigrationName1717849921874 } from './1717849921874-MigrationName';
|
||||
import { MigrationName1717955235341 } from './1717955235341-MigrationName';
|
||||
import { MigrationName1718037833516 } from './1718037833516-MigrationName';
|
||||
import { MigrationName1718100824584 } from './1718100824584-MigrationName';
|
||||
import { MigrationName1718101665865 } from './1718101665865-MigrationName';
|
||||
|
||||
export default [
|
||||
InitialMigration,
|
||||
MigrationName1717678334852,
|
||||
MigrationName1717839110671,
|
||||
MigrationName1717849921874,
|
||||
MigrationName1717955235341,
|
||||
MigrationName1718037833516,
|
||||
MigrationName1718100824584,
|
||||
MigrationName1718101665865,
|
||||
];
|
||||
@@ -0,0 +1,24 @@
|
||||
import { DatabaseName } from '../../EnvironmentConfig';
|
||||
import DatabaseType from 'Common/Types/DatabaseType';
|
||||
import Faker from 'Common/Utils/Faker';
|
||||
import Entities from 'Model/Models/Index';
|
||||
import { DataSourceOptions } from 'typeorm';
|
||||
|
||||
type GetTestDataSourceOptions = () => DataSourceOptions;
|
||||
|
||||
const getTestDataSourceOptions: GetTestDataSourceOptions =
|
||||
(): DataSourceOptions => {
|
||||
// we use process.env values directly here because it can change during test runs and we need to get the latest values.
|
||||
return {
|
||||
type: DatabaseType.Postgres,
|
||||
host: process.env['DATABASE_HOST'] || 'localhost',
|
||||
port: parseInt(process.env['DATABASE_PORT']?.toString() || '5432'),
|
||||
username: process.env['DATABASE_USERNAME'] || 'postgres',
|
||||
password: process.env['DATABASE_PASSWORD'] || 'password',
|
||||
database: DatabaseName + Faker.randomNumbers(16),
|
||||
entities: Entities,
|
||||
synchronize: true,
|
||||
};
|
||||
};
|
||||
|
||||
export default getTestDataSourceOptions;
|
||||
@@ -1,64 +0,0 @@
|
||||
import {
|
||||
DatabaseHost,
|
||||
DatabaseName,
|
||||
DatabasePassword,
|
||||
DatabasePort,
|
||||
DatabaseRejectUnauthorized,
|
||||
DatabaseSslCa,
|
||||
DatabaseSslCert,
|
||||
DatabaseSslKey,
|
||||
DatabaseUsername,
|
||||
Env,
|
||||
ShouldDatabaseSslEnable,
|
||||
} from '../EnvironmentConfig';
|
||||
import AppEnvironment from 'Common/Types/AppEnvironment';
|
||||
import DatabaseType from 'Common/Types/DatabaseType';
|
||||
import Faker from 'Common/Utils/Faker';
|
||||
import Migrations from 'Model/Migrations/Index';
|
||||
import Entities from 'Model/Models/Index';
|
||||
import { DataSource, DataSourceOptions } from 'typeorm';
|
||||
|
||||
export const dataSourceOptions: DataSourceOptions = {
|
||||
type: DatabaseType.Postgres,
|
||||
host: DatabaseHost.toString(),
|
||||
port: DatabasePort.toNumber(),
|
||||
username: DatabaseUsername,
|
||||
password: DatabasePassword,
|
||||
database: DatabaseName,
|
||||
migrationsTableName: 'migrations',
|
||||
migrations: Migrations,
|
||||
entities: Entities,
|
||||
applicationName: 'oneuptime',
|
||||
ssl: ShouldDatabaseSslEnable
|
||||
? {
|
||||
rejectUnauthorized: DatabaseRejectUnauthorized,
|
||||
ca: DatabaseSslCa,
|
||||
key: DatabaseSslKey,
|
||||
cert: DatabaseSslCert,
|
||||
}
|
||||
: false,
|
||||
// logging: 'all',
|
||||
// synchronize: Env === AppEnvironment.Development,
|
||||
synchronize: true,
|
||||
};
|
||||
|
||||
export const datasource: DataSource = new DataSource(dataSourceOptions);
|
||||
|
||||
type GetTestDataSourceOptions = () => DataSourceOptions;
|
||||
|
||||
export const getTestDataSourceOptions: GetTestDataSourceOptions =
|
||||
(): DataSourceOptions => {
|
||||
// we use process.env values directly here because it can change during test runs and we need to get the latest values.
|
||||
return {
|
||||
type: DatabaseType.Postgres,
|
||||
host: process.env['DATABASE_HOST'] || 'localhost',
|
||||
port: parseInt(process.env['DATABASE_PORT']?.toString() || '5432'),
|
||||
username: process.env['DATABASE_USERNAME'] || 'postgres',
|
||||
password: process.env['DATABASE_PASSWORD'] || 'password',
|
||||
database: DatabaseName + Faker.randomNumbers(16),
|
||||
entities: Entities,
|
||||
synchronize:
|
||||
Env === AppEnvironment.Test ||
|
||||
Env === AppEnvironment.Development,
|
||||
};
|
||||
};
|
||||
@@ -1,5 +1,6 @@
|
||||
import logger from '../Utils/Logger';
|
||||
import { dataSourceOptions, getTestDataSourceOptions } from './PostgresConfig';
|
||||
import dataSourceOptions from './Postgres/DataSourceOptions';
|
||||
import getTestDataSourceOptions from './Postgres/TestDataSourceOptions';
|
||||
import Sleep from 'Common/Types/Sleep';
|
||||
import { DataSource, DataSourceOptions } from 'typeorm';
|
||||
|
||||
|
||||
@@ -4,6 +4,8 @@ import {
|
||||
RedisPassword,
|
||||
RedisPort,
|
||||
RedisTlsCa,
|
||||
RedisTlsCert,
|
||||
RedisTlsKey,
|
||||
RedisTlsSentinelMode,
|
||||
RedisUsername,
|
||||
ShouldRedisTlsEnable,
|
||||
@@ -44,7 +46,11 @@ export default abstract class Redis {
|
||||
};
|
||||
|
||||
if (ShouldRedisTlsEnable) {
|
||||
redisOptions.tls = { ca: RedisTlsCa };
|
||||
redisOptions.tls = {
|
||||
ca: RedisTlsCa || undefined,
|
||||
cert: RedisTlsCert || undefined,
|
||||
key: RedisTlsKey || undefined,
|
||||
};
|
||||
}
|
||||
|
||||
this.client = new RedisClient(redisOptions);
|
||||
|
||||
@@ -113,10 +113,18 @@ export default class AnalyticsDatabaseService<
|
||||
|
||||
const dbResult: ExecResult<Stream> = await this.execute(statement);
|
||||
|
||||
const strResult: string = await StreamUtil.convertStreamToText(
|
||||
let strResult: string = await StreamUtil.convertStreamToText(
|
||||
dbResult.stream
|
||||
);
|
||||
|
||||
// if strResult includes Nullable(type) then extract type.
|
||||
|
||||
if (strResult.includes('Nullable')) {
|
||||
let type: string = strResult.split('Nullable(')[1] as string;
|
||||
type = type.split(')')[0] as string;
|
||||
strResult = type;
|
||||
}
|
||||
|
||||
return (
|
||||
(this.statementGenerator.toTableColumnType(
|
||||
strResult.trim()
|
||||
|
||||
25
CommonServer/Services/CodeRepositoryService.ts
Normal file
25
CommonServer/Services/CodeRepositoryService.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import PostgresDatabase from '../Infrastructure/PostgresDatabase';
|
||||
import CreateBy from '../Types/Database/CreateBy';
|
||||
import { OnCreate } from '../Types/Database/Hooks';
|
||||
import DatabaseService from './DatabaseService';
|
||||
import ObjectID from 'Common/Types/ObjectID';
|
||||
import Model from 'Model/Models/CodeRepository';
|
||||
|
||||
export class Service extends DatabaseService<Model> {
|
||||
public constructor(postgresDatabase?: PostgresDatabase) {
|
||||
super(Model, postgresDatabase);
|
||||
}
|
||||
|
||||
protected override async onBeforeCreate(
|
||||
createBy: CreateBy<Model>
|
||||
): Promise<OnCreate<Model>> {
|
||||
createBy.data.secretToken = ObjectID.generate();
|
||||
|
||||
return {
|
||||
carryForward: null,
|
||||
createBy: createBy,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default new Service();
|
||||
11
CommonServer/Services/CopilotEventService.ts
Normal file
11
CommonServer/Services/CopilotEventService.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import PostgresDatabase from '../Infrastructure/PostgresDatabase';
|
||||
import DatabaseService from './DatabaseService';
|
||||
import Model from 'Model/Models/CopilotEvent';
|
||||
|
||||
export class Service extends DatabaseService<Model> {
|
||||
public constructor(postgresDatabase?: PostgresDatabase) {
|
||||
super(Model, postgresDatabase);
|
||||
}
|
||||
}
|
||||
|
||||
export default new Service();
|
||||
@@ -1389,7 +1389,7 @@ class DatabaseService<TBaseModel extends BaseModel> extends BaseService {
|
||||
skip: updateBy.skip.toNumber(),
|
||||
limit: updateBy.limit.toNumber(),
|
||||
select: selectColumns,
|
||||
props: { ...beforeUpdateBy.props, ignoreHooks: true },
|
||||
props: { isRoot: true, ignoreHooks: true },
|
||||
});
|
||||
|
||||
for (const item of items) {
|
||||
|
||||
@@ -11,6 +11,8 @@ import BillingPaymentMethodsService from './BillingPaymentMethodService';
|
||||
import BillingService from './BillingService';
|
||||
import CallLogService from './CallLogService';
|
||||
import CallService from './CallService';
|
||||
import CodeRepositoryService from './CodeRepositoryService';
|
||||
import CopilotEventService from './CopilotEventService';
|
||||
import DataMigrationService from './DataMigrationService';
|
||||
import DomainService from './DomainService';
|
||||
import EmailLogService from './EmailLogService';
|
||||
@@ -79,6 +81,9 @@ import ScheduledMaintenancePublicNoteService from './ScheduledMaintenancePublicN
|
||||
import ScheduledMaintenanceService from './ScheduledMaintenanceService';
|
||||
import ScheduledMaintenanceStateService from './ScheduledMaintenanceStateService';
|
||||
import ScheduledMaintenanceStateTimelineService from './ScheduledMaintenanceStateTimelineService';
|
||||
import ServiceCatalogOwnerTeamService from './ServiceCatalogOwnerTeamService';
|
||||
import ServiceCatalogOwnerUserService from './ServiceCatalogOwnerUserService';
|
||||
import ServiceCatalogService from './ServiceCatalogService';
|
||||
import ShortLinkService from './ShortLinkService';
|
||||
// SMS Log Service
|
||||
import SmsLogService from './SmsLogService';
|
||||
@@ -246,6 +251,13 @@ const services: Array<BaseService> = [
|
||||
|
||||
UsageBillingService,
|
||||
ProjectCallSMSConfigService,
|
||||
|
||||
ServiceCatalogService,
|
||||
ServiceCatalogOwnerTeamService,
|
||||
ServiceCatalogOwnerUserService,
|
||||
|
||||
CodeRepositoryService,
|
||||
CopilotEventService,
|
||||
];
|
||||
|
||||
export const AnalyticsServices: Array<
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
import DatabaseConfig from '../DatabaseConfig';
|
||||
import { IsBillingEnabled } from '../EnvironmentConfig';
|
||||
import {
|
||||
AllowedActiveMonitorCountInFreePlan,
|
||||
IsBillingEnabled,
|
||||
} from '../EnvironmentConfig';
|
||||
import PostgresDatabase from '../Infrastructure/PostgresDatabase';
|
||||
import { ActiveMonitoringMeteredPlan } from '../Types/Billing/MeteredPlan/AllMeteredPlans';
|
||||
import CreateBy from '../Types/Database/CreateBy';
|
||||
import { OnCreate, OnDelete, OnUpdate } from '../Types/Database/Hooks';
|
||||
import QueryHelper from '../Types/Database/QueryHelper';
|
||||
import DatabaseService from './DatabaseService';
|
||||
import MonitorOwnerTeamService from './MonitorOwnerTeamService';
|
||||
import MonitorOwnerUserService from './MonitorOwnerUserService';
|
||||
@@ -11,10 +15,12 @@ import MonitorProbeService from './MonitorProbeService';
|
||||
import MonitorStatusService from './MonitorStatusService';
|
||||
import MonitorStatusTimelineService from './MonitorStatusTimelineService';
|
||||
import ProbeService from './ProbeService';
|
||||
import ProjectService, { CurrentPlan } from './ProjectService';
|
||||
import TeamMemberService from './TeamMemberService';
|
||||
import URL from 'Common/Types/API/URL';
|
||||
import DatabaseCommonInteractionProps from 'Common/Types/BaseDatabase/DatabaseCommonInteractionProps';
|
||||
import SortOrder from 'Common/Types/BaseDatabase/SortOrder';
|
||||
import { PlanSelect } from 'Common/Types/Billing/SubscriptionPlan';
|
||||
import { LIMIT_PER_PROJECT } from 'Common/Types/Database/LimitMax';
|
||||
import BadDataException from 'Common/Types/Exception/BadDataException';
|
||||
import { JSONObject } from 'Common/Types/JSON';
|
||||
@@ -22,6 +28,7 @@ import MonitorType, {
|
||||
MonitorTypeHelper,
|
||||
} from 'Common/Types/Monitor/MonitorType';
|
||||
import ObjectID from 'Common/Types/ObjectID';
|
||||
import PositiveNumber from 'Common/Types/PositiveNumber';
|
||||
import Typeof from 'Common/Types/Typeof';
|
||||
import Model from 'Model/Models/Monitor';
|
||||
import MonitorOwnerTeam from 'Model/Models/MonitorOwnerTeam';
|
||||
@@ -93,6 +100,43 @@ export class Service extends DatabaseService<Model> {
|
||||
);
|
||||
}
|
||||
|
||||
if (IsBillingEnabled && createBy.props.tenantId) {
|
||||
const currentPlan: CurrentPlan =
|
||||
await ProjectService.getCurrentPlan(createBy.props.tenantId);
|
||||
|
||||
if (currentPlan.isSubscriptionUnpaid) {
|
||||
throw new BadDataException(
|
||||
'Your subscription is unpaid. Please update your payment method and pay all the outstanding invoices to add more monitors.'
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
currentPlan.plan === PlanSelect.Free &&
|
||||
createBy.data.monitorType !== MonitorType.Manual
|
||||
) {
|
||||
const monitorCount: PositiveNumber = await this.countBy({
|
||||
query: {
|
||||
projectId: createBy.props.tenantId,
|
||||
monitorType: QueryHelper.any(
|
||||
MonitorTypeHelper.getActiveMonitorTypes()
|
||||
),
|
||||
},
|
||||
props: {
|
||||
isRoot: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (
|
||||
monitorCount.toNumber() >=
|
||||
AllowedActiveMonitorCountInFreePlan
|
||||
) {
|
||||
throw new BadDataException(
|
||||
`You have reached the maximum allowed monitor limit for the free plan. Please upgrade your plan to add more monitors.`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (createBy.data.monitorType === MonitorType.Server) {
|
||||
createBy.data.serverMonitorSecretKey = ObjectID.generate();
|
||||
}
|
||||
|
||||
@@ -50,6 +50,11 @@ import TeamPermission from 'Model/Models/TeamPermission';
|
||||
import User from 'Model/Models/User';
|
||||
import { In } from 'typeorm';
|
||||
|
||||
export interface CurrentPlan {
|
||||
plan: PlanSelect | null;
|
||||
isSubscriptionUnpaid: boolean;
|
||||
}
|
||||
|
||||
export class Service extends DatabaseService<Model> {
|
||||
public constructor(postgresDatabase?: PostgresDatabase) {
|
||||
super(Model, postgresDatabase);
|
||||
@@ -959,9 +964,7 @@ export class Service extends DatabaseService<Model> {
|
||||
return onDelete;
|
||||
}
|
||||
|
||||
public async getCurrentPlan(
|
||||
projectId: ObjectID
|
||||
): Promise<{ plan: PlanSelect | null; isSubscriptionUnpaid: boolean }> {
|
||||
public async getCurrentPlan(projectId: ObjectID): Promise<CurrentPlan> {
|
||||
if (!IsBillingEnabled) {
|
||||
return { plan: null, isSubscriptionUnpaid: false };
|
||||
}
|
||||
|
||||
11
CommonServer/Services/ServiceCatalogOwnerTeamService.ts
Normal file
11
CommonServer/Services/ServiceCatalogOwnerTeamService.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import PostgresDatabase from '../Infrastructure/PostgresDatabase';
|
||||
import DatabaseService from './DatabaseService';
|
||||
import Model from 'Model/Models/ServiceCatalogOwnerTeam';
|
||||
|
||||
export class Service extends DatabaseService<Model> {
|
||||
public constructor(postgresDatabase?: PostgresDatabase) {
|
||||
super(Model, postgresDatabase);
|
||||
}
|
||||
}
|
||||
|
||||
export default new Service();
|
||||
11
CommonServer/Services/ServiceCatalogOwnerUserService.ts
Normal file
11
CommonServer/Services/ServiceCatalogOwnerUserService.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import PostgresDatabase from '../Infrastructure/PostgresDatabase';
|
||||
import DatabaseService from './DatabaseService';
|
||||
import Model from 'Model/Models/ServiceCatalogOwnerUser';
|
||||
|
||||
export class Service extends DatabaseService<Model> {
|
||||
public constructor(postgresDatabase?: PostgresDatabase) {
|
||||
super(Model, postgresDatabase);
|
||||
}
|
||||
}
|
||||
|
||||
export default new Service();
|
||||
27
CommonServer/Services/ServiceCatalogService.ts
Normal file
27
CommonServer/Services/ServiceCatalogService.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import PostgresDatabase from '../Infrastructure/PostgresDatabase';
|
||||
import CreateBy from '../Types/Database/CreateBy';
|
||||
import { OnCreate } from '../Types/Database/Hooks';
|
||||
import DatabaseService from './DatabaseService';
|
||||
import ArrayUtil from 'Common/Types/ArrayUtil';
|
||||
import { BrightColors } from 'Common/Types/BrandColors';
|
||||
import Model from 'Model/Models/ServiceCatalog';
|
||||
|
||||
export class Service extends DatabaseService<Model> {
|
||||
public constructor(postgresDatabase?: PostgresDatabase) {
|
||||
super(Model, postgresDatabase);
|
||||
}
|
||||
|
||||
protected override async onBeforeCreate(
|
||||
createBy: CreateBy<Model>
|
||||
): Promise<OnCreate<Model>> {
|
||||
// select a random color.
|
||||
createBy.data.serviceColor = ArrayUtil.selectItemByRandom(BrightColors);
|
||||
|
||||
return {
|
||||
carryForward: null,
|
||||
createBy: createBy,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default new Service();
|
||||
@@ -7,7 +7,9 @@ import QueryHelper from '../Types/Database/QueryHelper';
|
||||
import logger from '../Utils/Logger';
|
||||
import DatabaseService from './DatabaseService';
|
||||
import MailService from './MailService';
|
||||
import ProjectCallSMSConfigService from './ProjectCallSMSConfigService';
|
||||
import ProjectService from './ProjectService';
|
||||
import SmsService from './SmsService';
|
||||
import StatusPageService from './StatusPageService';
|
||||
import { FileRoute } from 'Common/ServiceRoute';
|
||||
import Hostname from 'Common/Types/API/Hostname';
|
||||
@@ -144,31 +146,86 @@ export class Service extends DatabaseService<Model> {
|
||||
onCreate: OnCreate<Model>,
|
||||
createdItem: Model
|
||||
): Promise<Model> {
|
||||
if (!createdItem.statusPageId) {
|
||||
return createdItem;
|
||||
}
|
||||
|
||||
const statusPageURL: string = await StatusPageService.getStatusPageURL(
|
||||
createdItem.statusPageId
|
||||
);
|
||||
|
||||
const statusPageName: string =
|
||||
onCreate.carryForward.pageTitle ||
|
||||
onCreate.carryForward.name ||
|
||||
'Status Page';
|
||||
|
||||
const host: Hostname = await DatabaseConfig.getHost();
|
||||
|
||||
const httpProtocol: Protocol = await DatabaseConfig.getHttpProtocol();
|
||||
|
||||
const unsubscribeLink: string = this.getUnsubscribeLink(
|
||||
URL.fromString(statusPageURL),
|
||||
createdItem.id!
|
||||
).toString();
|
||||
|
||||
if (
|
||||
createdItem.statusPageId &&
|
||||
createdItem.subscriberPhone &&
|
||||
createdItem._id &&
|
||||
createdItem.sendYouHaveSubscribedMessage
|
||||
) {
|
||||
const statusPage: StatusPage | null =
|
||||
await StatusPageService.findOneBy({
|
||||
query: {
|
||||
_id: createdItem.statusPageId.toString(),
|
||||
},
|
||||
select: {
|
||||
callSmsConfig: {
|
||||
_id: true,
|
||||
twilioAccountSID: true,
|
||||
twilioAuthToken: true,
|
||||
twilioPhoneNumber: true,
|
||||
},
|
||||
},
|
||||
props: {
|
||||
isRoot: true,
|
||||
ignoreHooks: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!statusPage) {
|
||||
return createdItem;
|
||||
}
|
||||
|
||||
SmsService.sendSms(
|
||||
{
|
||||
to: createdItem.subscriberPhone,
|
||||
message: `You have been subscribed to ${statusPageName}. To unsubscribe, click on the link: ${unsubscribeLink}`,
|
||||
},
|
||||
{
|
||||
projectId: createdItem.projectId,
|
||||
isSensitive: false,
|
||||
customTwilioConfig:
|
||||
ProjectCallSMSConfigService.toTwilioConfig(
|
||||
statusPage.callSmsConfig
|
||||
),
|
||||
}
|
||||
).catch((err: Error) => {
|
||||
logger.error(err);
|
||||
});
|
||||
}
|
||||
|
||||
if (
|
||||
createdItem.statusPageId &&
|
||||
createdItem.subscriberEmail &&
|
||||
createdItem._id
|
||||
createdItem._id &&
|
||||
createdItem.sendYouHaveSubscribedMessage
|
||||
) {
|
||||
// Call mail service and send an email.
|
||||
|
||||
// get status page domain for this status page.
|
||||
// if the domain is not found, use the internal status page preview link.
|
||||
|
||||
const statusPageURL: string =
|
||||
await StatusPageService.getStatusPageURL(
|
||||
createdItem.statusPageId
|
||||
);
|
||||
|
||||
const statusPageName: string =
|
||||
onCreate.carryForward.pageTitle ||
|
||||
onCreate.carryForward.name ||
|
||||
'Status Page';
|
||||
|
||||
const host: Hostname = await DatabaseConfig.getHost();
|
||||
|
||||
const httpProtocol: Protocol =
|
||||
await DatabaseConfig.getHttpProtocol();
|
||||
|
||||
MailService.sendMail(
|
||||
{
|
||||
toEmail: createdItem.subscriberEmail,
|
||||
@@ -189,10 +246,7 @@ export class Service extends DatabaseService<Model> {
|
||||
.isPublicStatusPage
|
||||
? 'true'
|
||||
: 'false',
|
||||
unsubscribeUrl: this.getUnsubscribeLink(
|
||||
URL.fromString(statusPageURL),
|
||||
createdItem.id!
|
||||
).toString(),
|
||||
unsubscribeUrl: unsubscribeLink,
|
||||
},
|
||||
subject: 'You have been subscribed to ' + statusPageName,
|
||||
},
|
||||
|
||||
@@ -230,12 +230,10 @@ describe('StatementGenerator', () => {
|
||||
}),
|
||||
]);
|
||||
|
||||
/* eslint-disable prettier/prettier */
|
||||
expectStatement(statement, SQL`
|
||||
column_1 String NOT NULL,
|
||||
column_2 Int32 NULL
|
||||
`);
|
||||
/* eslint-enable prettier/prettier */
|
||||
expectStatement(
|
||||
statement,
|
||||
SQL`column_1 String, column_2 Nullable(Int32)`
|
||||
);
|
||||
});
|
||||
|
||||
test('should support nested models', () => {
|
||||
@@ -280,15 +278,10 @@ describe('StatementGenerator', () => {
|
||||
}),
|
||||
]);
|
||||
|
||||
/* eslint-disable prettier/prettier */
|
||||
expectStatement(statement, SQL`
|
||||
column_1 String NOT NULL,
|
||||
column_2 Nested NULL (
|
||||
nested_column_1 String NOT NULL,
|
||||
nested_column_2 Int32 NULL
|
||||
)
|
||||
`);
|
||||
/* eslint-enable prettier/prettier */
|
||||
expectStatement(
|
||||
statement,
|
||||
SQL`column_1 String, column_2 Nullable(Nested) (nested_column_1 String, nested_column_2 Nullable(Int32))`
|
||||
);
|
||||
});
|
||||
|
||||
test('should not add NULL|NOT NULL to Array types', () => {
|
||||
@@ -309,12 +302,10 @@ describe('StatementGenerator', () => {
|
||||
}),
|
||||
]);
|
||||
|
||||
/* eslint-disable prettier/prettier */
|
||||
expectStatement(statement, SQL`
|
||||
column_1 Array(String) NOT NULL,
|
||||
column_2 Array(Int32) NULL
|
||||
`);
|
||||
/* eslint-enable prettier/prettier */
|
||||
expectStatement(
|
||||
statement,
|
||||
SQL`column_1 Array(String), column_2 Nullable(Array(Int32))`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -562,36 +562,26 @@ export default class StatementGenerator<TBaseModel extends AnalyticsBaseModel> {
|
||||
}
|
||||
|
||||
public toColumnsCreateStatement(
|
||||
tableColumns: Array<AnalyticsTableColumn>,
|
||||
isNestedModel: boolean = false
|
||||
tableColumns: Array<AnalyticsTableColumn>
|
||||
): Statement {
|
||||
const columns: Statement = new Statement();
|
||||
|
||||
// indent so combines nicely with toTableCreateStatement()
|
||||
const indent: Statement = SQL` `;
|
||||
|
||||
for (let i: number = 0; i < tableColumns.length; i++) {
|
||||
const column: AnalyticsTableColumn = tableColumns[i]!;
|
||||
|
||||
if (i !== 0) {
|
||||
columns.append(SQL`,\n`);
|
||||
}
|
||||
if (isNestedModel) {
|
||||
columns.append(SQL` `);
|
||||
columns.append(SQL`, `);
|
||||
}
|
||||
|
||||
let nestedModelColumns: Statement | null = null;
|
||||
|
||||
if (column.type === TableColumnType.NestedModel) {
|
||||
nestedModelColumns = SQL`(\n`
|
||||
nestedModelColumns = SQL`(`
|
||||
.append(
|
||||
this.toColumnsCreateStatement(
|
||||
column.nestedModel!.tableColumns,
|
||||
true
|
||||
column.nestedModel!.tableColumns
|
||||
)
|
||||
)
|
||||
.append(SQL`\n`)
|
||||
.append(indent)
|
||||
.append(SQL`)`);
|
||||
}
|
||||
|
||||
@@ -600,11 +590,16 @@ export default class StatementGenerator<TBaseModel extends AnalyticsBaseModel> {
|
||||
const keyStatement: string = column.key;
|
||||
|
||||
columns
|
||||
.append(indent)
|
||||
.append(keyStatement)
|
||||
.append(SQL` `)
|
||||
.append(this.toColumnType(column.type))
|
||||
.append(column.required ? SQL` NOT NULL` : SQL` NULL`);
|
||||
.append(
|
||||
column.required
|
||||
? this.toColumnType(column.type)
|
||||
: SQL`Nullable(`
|
||||
.append(this.toColumnType(column.type))
|
||||
.append(SQL`)`)
|
||||
);
|
||||
|
||||
if (nestedModelColumns) {
|
||||
columns.append(SQL` `).append(nestedModelColumns);
|
||||
}
|
||||
@@ -665,9 +660,9 @@ export default class StatementGenerator<TBaseModel extends AnalyticsBaseModel> {
|
||||
const statement: Statement = SQL`
|
||||
ALTER TABLE ${this.database.getDatasourceOptions().database!}.${
|
||||
this.model.tableName
|
||||
}
|
||||
ADD COLUMN IF NOT EXISTS
|
||||
`.append(this.toColumnsCreateStatement([column], false));
|
||||
} ADD COLUMN IF NOT EXISTS `.append(
|
||||
this.toColumnsCreateStatement([column])
|
||||
);
|
||||
|
||||
logger.debug(`${this.model.tableName} Add Column Statement`);
|
||||
logger.debug(statement);
|
||||
@@ -676,8 +671,8 @@ export default class StatementGenerator<TBaseModel extends AnalyticsBaseModel> {
|
||||
}
|
||||
|
||||
public toDropColumnStatement(columnName: string): string {
|
||||
const statement: string = `
|
||||
ALTER TABLE ${this.database.getDatasourceOptions().database!}.${
|
||||
const statement: string = `ALTER TABLE ${this.database.getDatasourceOptions()
|
||||
.database!}.${
|
||||
this.model.tableName
|
||||
} DROP COLUMN IF EXISTS ${columnName}`;
|
||||
|
||||
|
||||
78
CommonServer/Utils/CodeRepository/CodeRepository.ts
Normal file
78
CommonServer/Utils/CodeRepository/CodeRepository.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import Execute from '../Execute';
|
||||
import CodeRepositoryFile from './CodeRepositoryFile';
|
||||
|
||||
export default class CodeRepositoryUtil {
|
||||
public static async getGitCommitHashForFile(
|
||||
filePath: string
|
||||
): Promise<string> {
|
||||
return await Execute.executeCommand(
|
||||
`git log -1 --pretty=format:"%H" ${filePath}`
|
||||
);
|
||||
}
|
||||
|
||||
public static async getFilesInDirectory(directoryPath: string): Promise<{
|
||||
files: Array<CodeRepositoryFile>;
|
||||
subDirectories: Array<string>;
|
||||
}> {
|
||||
const files: Array<CodeRepositoryFile> = [];
|
||||
const output: string = await Execute.executeCommand(
|
||||
`ls ${directoryPath}`
|
||||
);
|
||||
|
||||
const fileNames: Array<string> = output.split('\n');
|
||||
|
||||
const subDirectories: Array<string> = [];
|
||||
|
||||
for (const fileName of fileNames) {
|
||||
if (fileName === '') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const isDirectory: boolean = (
|
||||
await Execute.executeCommand(
|
||||
`file ${directoryPath}/${fileName}`
|
||||
)
|
||||
).includes('directory');
|
||||
|
||||
if (isDirectory) {
|
||||
subDirectories.push(`${directoryPath}/${fileName}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const filePath: string = `${directoryPath}/${fileName}`;
|
||||
const gitCommitHash: string = await this.getGitCommitHashForFile(
|
||||
filePath
|
||||
);
|
||||
const fileExtension: string = fileName.split('.').pop() || '';
|
||||
files.push({
|
||||
filePath,
|
||||
gitCommitHash,
|
||||
fileExtension,
|
||||
fileName,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
files,
|
||||
subDirectories: subDirectories,
|
||||
};
|
||||
}
|
||||
|
||||
public static async getFilesInDirectoryRecursive(
|
||||
directoryPath: string
|
||||
): Promise<Array<CodeRepositoryFile>> {
|
||||
const files: Array<CodeRepositoryFile> = [];
|
||||
|
||||
const { files: filesInDirectory, subDirectories } =
|
||||
await this.getFilesInDirectory(directoryPath);
|
||||
files.push(...filesInDirectory);
|
||||
|
||||
for (const subDirectory of subDirectories) {
|
||||
files.push(
|
||||
...(await this.getFilesInDirectoryRecursive(subDirectory))
|
||||
);
|
||||
}
|
||||
|
||||
return files;
|
||||
}
|
||||
}
|
||||
6
CommonServer/Utils/CodeRepository/CodeRepositoryFile.ts
Normal file
6
CommonServer/Utils/CodeRepository/CodeRepositoryFile.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export default interface CodeRepositoryFile {
|
||||
filePath: string;
|
||||
gitCommitHash: string;
|
||||
fileExtension: string;
|
||||
fileName: string;
|
||||
}
|
||||
0
CommonServer/Utils/CodeRepository/GitHub/Index.ts
Normal file
0
CommonServer/Utils/CodeRepository/GitHub/Index.ts
Normal file
24
CommonServer/Utils/Execute.ts
Normal file
24
CommonServer/Utils/Execute.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import { PromiseRejectErrorFunction } from 'Common/Types/FunctionTypes';
|
||||
import { ExecException, exec } from 'node:child_process';
|
||||
|
||||
export default class Execute {
|
||||
public static executeCommand(command: string): Promise<string> {
|
||||
return new Promise(
|
||||
(
|
||||
resolve: (output: string) => void,
|
||||
reject: PromiseRejectErrorFunction
|
||||
) => {
|
||||
exec(
|
||||
`${command}`,
|
||||
(err: ExecException | null, stdout: string) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
return resolve(stdout);
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -7,6 +7,14 @@ import { JSONObject } from 'Common/Types/JSON';
|
||||
export type LogBody = string | JSONObject | Exception | Error | unknown;
|
||||
|
||||
export default class logger {
|
||||
public static getLogLevel(): ConfigLogLevel {
|
||||
if (!LogLevel) {
|
||||
return ConfigLogLevel.INFO;
|
||||
}
|
||||
|
||||
return LogLevel;
|
||||
}
|
||||
|
||||
public static serializeLogBody(body: LogBody): string {
|
||||
if (typeof body === 'string') {
|
||||
return body;
|
||||
@@ -17,9 +25,11 @@ export default class logger {
|
||||
}
|
||||
|
||||
public static info(message: LogBody): void {
|
||||
const logLevel: ConfigLogLevel = this.getLogLevel();
|
||||
|
||||
if (
|
||||
LogLevel === ConfigLogLevel.DEBUG ||
|
||||
LogLevel === ConfigLogLevel.INFO
|
||||
logLevel === ConfigLogLevel.DEBUG ||
|
||||
logLevel === ConfigLogLevel.INFO
|
||||
) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.info(message);
|
||||
@@ -32,11 +42,13 @@ export default class logger {
|
||||
}
|
||||
|
||||
public static error(message: LogBody): void {
|
||||
const logLevel: ConfigLogLevel = this.getLogLevel();
|
||||
|
||||
if (
|
||||
LogLevel === ConfigLogLevel.DEBUG ||
|
||||
LogLevel === ConfigLogLevel.INFO ||
|
||||
LogLevel === ConfigLogLevel.WARN ||
|
||||
LogLevel === ConfigLogLevel.ERROR
|
||||
logLevel === ConfigLogLevel.DEBUG ||
|
||||
logLevel === ConfigLogLevel.INFO ||
|
||||
logLevel === ConfigLogLevel.WARN ||
|
||||
logLevel === ConfigLogLevel.ERROR
|
||||
) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(message);
|
||||
@@ -49,10 +61,12 @@ export default class logger {
|
||||
}
|
||||
|
||||
public static warn(message: LogBody): void {
|
||||
const logLevel: ConfigLogLevel = this.getLogLevel();
|
||||
|
||||
if (
|
||||
LogLevel === ConfigLogLevel.DEBUG ||
|
||||
LogLevel === ConfigLogLevel.INFO ||
|
||||
LogLevel === ConfigLogLevel.WARN
|
||||
logLevel === ConfigLogLevel.DEBUG ||
|
||||
logLevel === ConfigLogLevel.INFO ||
|
||||
logLevel === ConfigLogLevel.WARN
|
||||
) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(message);
|
||||
@@ -65,7 +79,9 @@ export default class logger {
|
||||
}
|
||||
|
||||
public static debug(message: LogBody): void {
|
||||
if (LogLevel === ConfigLogLevel.DEBUG) {
|
||||
const logLevel: ConfigLogLevel = this.getLogLevel();
|
||||
|
||||
if (logLevel === ConfigLogLevel.DEBUG) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.debug(message);
|
||||
|
||||
|
||||
@@ -1,4 +1,11 @@
|
||||
import OpenTelemetryAPI, { Meter } from '@opentelemetry/api';
|
||||
import { Logger, logs } from '@opentelemetry/api-logs';
|
||||
import {
|
||||
Counter,
|
||||
Histogram,
|
||||
MetricOptions,
|
||||
ObservableGauge,
|
||||
} from '@opentelemetry/api/build/src/metrics/Metric';
|
||||
import { getNodeAutoInstrumentations } from '@opentelemetry/auto-instrumentations-node';
|
||||
import { OTLPLogExporter } from '@opentelemetry/exporter-logs-otlp-http';
|
||||
import { OTLPMetricExporter } from '@opentelemetry/exporter-metrics-otlp-proto';
|
||||
@@ -10,7 +17,10 @@ import {
|
||||
BatchLogRecordProcessor,
|
||||
LoggerProvider,
|
||||
} from '@opentelemetry/sdk-logs';
|
||||
import { PeriodicExportingMetricReader } from '@opentelemetry/sdk-metrics';
|
||||
import {
|
||||
MeterProvider,
|
||||
PeriodicExportingMetricReader,
|
||||
} from '@opentelemetry/sdk-metrics';
|
||||
import * as opentelemetry from '@opentelemetry/sdk-node';
|
||||
import { SpanExporter } from '@opentelemetry/sdk-trace-node';
|
||||
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
|
||||
@@ -20,9 +30,13 @@ import Dictionary from 'Common/Types/Dictionary';
|
||||
// Enable this line to see debug logs
|
||||
// diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.DEBUG);
|
||||
|
||||
const serviceName: string = process.env['SERVICE_NAME'] || 'oneuptime';
|
||||
|
||||
export default class Telemetry {
|
||||
public static sdk: opentelemetry.NodeSDK | null = null;
|
||||
public static logger: Logger | null = null;
|
||||
public static meter: Meter | null = null;
|
||||
public static meterProvider: MeterProvider | null = null;
|
||||
|
||||
public static getHeaders(): Dictionary<string> {
|
||||
if (!process.env['OPENTELEMETRY_EXPORTER_OTLP_HEADERS']) {
|
||||
@@ -182,8 +196,92 @@ export default class Telemetry {
|
||||
|
||||
return this.logger!;
|
||||
}
|
||||
|
||||
public static getMeterProvider(): MeterProvider {
|
||||
if (!this.meterProvider) {
|
||||
this.meterProvider = new MeterProvider();
|
||||
OpenTelemetryAPI.metrics.setGlobalMeterProvider(this.meterProvider);
|
||||
}
|
||||
|
||||
return this.meterProvider;
|
||||
}
|
||||
|
||||
public static getMeter(): Meter {
|
||||
if (!this.meter) {
|
||||
this.meter = OpenTelemetryAPI.metrics.getMeter('default');
|
||||
}
|
||||
|
||||
return this.meter;
|
||||
}
|
||||
|
||||
public static getCounter(data: {
|
||||
name: string;
|
||||
description: string;
|
||||
unit?: string;
|
||||
}): Counter {
|
||||
const { name, description } = data;
|
||||
|
||||
const metricOptions: MetricOptions = {
|
||||
description: description,
|
||||
};
|
||||
|
||||
if (data.unit) {
|
||||
metricOptions.unit = data.unit;
|
||||
}
|
||||
|
||||
const counter: Counter<opentelemetry.api.Attributes> =
|
||||
this.getMeter().createCounter(name, metricOptions);
|
||||
|
||||
return counter;
|
||||
}
|
||||
|
||||
// guage
|
||||
|
||||
public static getGauge(data: {
|
||||
name: string;
|
||||
description: string;
|
||||
unit?: string;
|
||||
}): ObservableGauge {
|
||||
const { name, description } = data;
|
||||
|
||||
const metricOptions: MetricOptions = {
|
||||
description: description,
|
||||
};
|
||||
|
||||
if (data.unit) {
|
||||
metricOptions.unit = data.unit;
|
||||
}
|
||||
|
||||
const guage: ObservableGauge<opentelemetry.api.Attributes> =
|
||||
this.getMeter().createObservableGauge(name, metricOptions);
|
||||
|
||||
return guage;
|
||||
}
|
||||
|
||||
// histogram
|
||||
|
||||
public static getHistogram(data: {
|
||||
name: string;
|
||||
description: string;
|
||||
unit?: string;
|
||||
}): Histogram {
|
||||
const { name, description } = data;
|
||||
|
||||
const metricOptions: MetricOptions = {
|
||||
description: description,
|
||||
};
|
||||
|
||||
if (data.unit) {
|
||||
metricOptions.unit = data.unit;
|
||||
}
|
||||
|
||||
const histogram: Histogram<opentelemetry.api.Attributes> =
|
||||
this.getMeter().createHistogram(name, metricOptions);
|
||||
|
||||
return histogram;
|
||||
}
|
||||
}
|
||||
|
||||
Telemetry.init({
|
||||
serviceName: process.env['SERVICE_NAME'] || 'oneuptime',
|
||||
serviceName: serviceName,
|
||||
});
|
||||
|
||||
2
CommonServer/package-lock.json
generated
2
CommonServer/package-lock.json
generated
@@ -78,7 +78,7 @@
|
||||
"json5": "^2.2.3",
|
||||
"moment": "^2.30.1",
|
||||
"moment-timezone": "^0.5.45",
|
||||
"posthog-js": "^1.130.1",
|
||||
"posthog-js": "^1.131.4",
|
||||
"reflect-metadata": "^0.2.2",
|
||||
"slugify": "^1.6.5",
|
||||
"typeorm": "^0.3.20",
|
||||
|
||||
6
CommonUI/package-lock.json
generated
6
CommonUI/package-lock.json
generated
@@ -33,7 +33,7 @@
|
||||
"prop-types": "^15.8.1",
|
||||
"react": "^18.3.1",
|
||||
"react-beautiful-dnd": "^13.1.1",
|
||||
"react-big-calendar": "^1.11.2",
|
||||
"react-big-calendar": "^1.12.2",
|
||||
"react-color": "^2.19.3",
|
||||
"react-dom": "^18.3.1",
|
||||
"react-dropzone": "^14.2.2",
|
||||
@@ -15894,7 +15894,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/react-big-calendar": {
|
||||
"version": "1.12.1",
|
||||
"version": "1.12.2",
|
||||
"resolved": "https://registry.npmjs.org/react-big-calendar/-/react-big-calendar-1.12.2.tgz",
|
||||
"integrity": "sha512-cPVcwH5V1YiC6QKaV4afvpuZ2DtP8+TocnZY98nGodqq8bfjVDiP3Ch+TewBZzj9mg7JbewHdufDZXZBqQl1lw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.20.7",
|
||||
|
||||
@@ -38,7 +38,7 @@
|
||||
"prop-types": "^15.8.1",
|
||||
"react": "^18.3.1",
|
||||
"react-beautiful-dnd": "^13.1.1",
|
||||
"react-big-calendar": "^1.11.2",
|
||||
"react-big-calendar": "^1.12.2",
|
||||
"react-color": "^2.19.3",
|
||||
"react-dom": "^18.3.1",
|
||||
"react-dropzone": "^14.2.2",
|
||||
|
||||
@@ -86,7 +86,10 @@ const DuplicateModel: <TBaseModel extends BaseModel>(
|
||||
Navigation.navigate(
|
||||
new Route(props.navigateToOnSuccess.toString()).addRoute(
|
||||
`/${newItem.data.id!.toString()}`
|
||||
)
|
||||
),
|
||||
{
|
||||
forceNavigate: true,
|
||||
}
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
|
||||
@@ -86,7 +86,7 @@ const EventItem: FunctionComponent<ComponentProps> = (
|
||||
</div>
|
||||
<div className="mt-5">
|
||||
<h2
|
||||
className="active-event-box-body-title"
|
||||
className={`active-event-box-body-title event-${props.eventType.toLowerCase()}-box-body-title`}
|
||||
style={{
|
||||
fontSize: props.isDetailItem ? '20px' : '16px',
|
||||
}}
|
||||
@@ -95,7 +95,9 @@ const EventItem: FunctionComponent<ComponentProps> = (
|
||||
</h2>
|
||||
</div>
|
||||
{props.eventDescription && (
|
||||
<div className="mt-2 text-sm active-event-box-body-description">
|
||||
<div
|
||||
className={`mt-2 text-sm active-event-box-body-description event-${props.eventType.toLowerCase()}-box-body-description`}
|
||||
>
|
||||
<MarkdownViewer text={props.eventDescription || ''} />
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -0,0 +1,324 @@
|
||||
import Button, { ButtonSize, ButtonStyleType } from '../Button/Button';
|
||||
import Input, { InputType } from '../Input/Input';
|
||||
import FieldType from '../Types/FieldType';
|
||||
import Filter from './Types/Filter';
|
||||
import FilterData from './Types/FilterData';
|
||||
import InBetween from 'Common/Types/BaseDatabase/InBetween';
|
||||
import OneUptimeDate from 'Common/Types/Date';
|
||||
import GenericObject from 'Common/Types/GenericObject';
|
||||
import React, { ReactElement, useEffect } from 'react';
|
||||
|
||||
export interface ComponentProps<T extends GenericObject> {
|
||||
filter: Filter<T>;
|
||||
onFilterChanged?: undefined | ((filterData: FilterData<T>) => void);
|
||||
filterData: FilterData<T>;
|
||||
}
|
||||
|
||||
type DateFilterFunction = <T extends GenericObject>(
|
||||
props: ComponentProps<T>
|
||||
) => ReactElement;
|
||||
|
||||
const DateFilter: DateFilterFunction = <T extends GenericObject>(
|
||||
props: ComponentProps<T>
|
||||
): ReactElement => {
|
||||
const filter: Filter<T> = props.filter;
|
||||
const filterData: FilterData<T> = { ...props.filterData };
|
||||
|
||||
const [startDateTime, setStartDateTime] = React.useState<Date | null>(null);
|
||||
const [endDateTime, setEndDateTime] = React.useState<Date | null>(null);
|
||||
|
||||
const [startDateError, setStartDateError] = React.useState<string>('');
|
||||
const [endDateError, setEndDateError] = React.useState<string>('');
|
||||
|
||||
const [didSetInitialValue, setDidSetInitialValue] =
|
||||
React.useState<boolean>(false);
|
||||
|
||||
let inputType: InputType = InputType.TEXT;
|
||||
|
||||
if (filter.type === FieldType.Date) {
|
||||
inputType = InputType.DATE;
|
||||
} else if (filter.type === FieldType.DateTime) {
|
||||
inputType = InputType.DATETIME_LOCAL;
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
// prefill the date filter if it is already set
|
||||
|
||||
if (
|
||||
!didSetInitialValue &&
|
||||
filterData[filter.key] &&
|
||||
filterData[filter.key] instanceof InBetween
|
||||
) {
|
||||
const inBetween: InBetween = filterData[filter.key] as InBetween;
|
||||
|
||||
if (inBetween.startValue) {
|
||||
setStartDateTime(
|
||||
OneUptimeDate.fromString(inBetween.startValue as string)
|
||||
);
|
||||
}
|
||||
|
||||
if (inBetween.endValue) {
|
||||
setEndDateTime(
|
||||
OneUptimeDate.fromString(inBetween.endValue as string)
|
||||
);
|
||||
}
|
||||
|
||||
setDidSetInitialValue(true);
|
||||
}
|
||||
}, [props.filterData]);
|
||||
|
||||
useEffect(() => {
|
||||
if (startDateTime && endDateTime) {
|
||||
// check if start date is after end date
|
||||
|
||||
if (!OneUptimeDate.isAfter(endDateTime, startDateTime)) {
|
||||
setStartDateError('Start date should be before end date');
|
||||
setEndDateError('End date should be after start date');
|
||||
delete filterData[filter.key];
|
||||
|
||||
props.onFilterChanged && props.onFilterChanged(filterData);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
filterData[filter.key] = new InBetween(startDateTime, endDateTime);
|
||||
}
|
||||
|
||||
if (!startDateTime || !endDateTime) {
|
||||
delete filterData[filter.key];
|
||||
}
|
||||
|
||||
if (startDateTime && !endDateTime) {
|
||||
setStartDateError('');
|
||||
setEndDateError('End date is required');
|
||||
} else if (!startDateTime && endDateTime) {
|
||||
setEndDateError('');
|
||||
setStartDateError('Start date is required');
|
||||
} else {
|
||||
setStartDateError('');
|
||||
setEndDateError('');
|
||||
}
|
||||
|
||||
props.onFilterChanged && props.onFilterChanged(filterData);
|
||||
}, [startDateTime, endDateTime]);
|
||||
|
||||
if (
|
||||
!filter.filterDropdownOptions &&
|
||||
(filter.type === FieldType.Date || filter.type === FieldType.DateTime)
|
||||
) {
|
||||
return (
|
||||
<div>
|
||||
<div className="flex space-x-3 mt-1">
|
||||
<div className="w-1/2">
|
||||
<div className="text-xs text-gray-500">From:</div>
|
||||
<div>
|
||||
<Input
|
||||
error={startDateError}
|
||||
onChange={(changedValue: string | Date) => {
|
||||
if (filter.key) {
|
||||
if (!changedValue) {
|
||||
setStartDateTime(null);
|
||||
}
|
||||
|
||||
if (
|
||||
changedValue &&
|
||||
(filter.type === FieldType.Date ||
|
||||
filter.type ===
|
||||
FieldType.DateTime)
|
||||
) {
|
||||
setStartDateTime(
|
||||
OneUptimeDate.fromString(
|
||||
changedValue as string
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
}}
|
||||
value={startDateTime || ''}
|
||||
placeholder={`Filter by ${filter.title}`}
|
||||
type={inputType}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div className="w-1/2">
|
||||
<div className="text-xs text-gray-500">To:</div>
|
||||
<div>
|
||||
<Input
|
||||
error={endDateError}
|
||||
onChange={(changedValue: string | Date) => {
|
||||
if (filter.key) {
|
||||
if (!changedValue) {
|
||||
setEndDateTime(null);
|
||||
}
|
||||
|
||||
if (
|
||||
changedValue &&
|
||||
(filter.type === FieldType.Date ||
|
||||
filter.type ===
|
||||
FieldType.DateTime)
|
||||
) {
|
||||
setEndDateTime(
|
||||
OneUptimeDate.fromString(
|
||||
changedValue as string
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
}}
|
||||
value={endDateTime || ''}
|
||||
placeholder={`Filter by ${filter.title}`}
|
||||
type={inputType}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="mt-1 flex space-x-2 -ml-3">
|
||||
{filter.type === FieldType.DateTime && (
|
||||
<Button
|
||||
buttonStyle={ButtonStyleType.NORMAL}
|
||||
buttonSize={ButtonSize.Small}
|
||||
onClick={() => {
|
||||
// set it to past 1 hour
|
||||
const endDate: Date =
|
||||
OneUptimeDate.getCurrentDate();
|
||||
const startDate: Date =
|
||||
OneUptimeDate.addRemoveHours(endDate, -1);
|
||||
|
||||
setStartDateTime(startDate);
|
||||
setEndDateTime(endDate);
|
||||
}}
|
||||
title="1 hour"
|
||||
/>
|
||||
)}
|
||||
|
||||
{filter.type === FieldType.DateTime && (
|
||||
<Button
|
||||
buttonStyle={ButtonStyleType.NORMAL}
|
||||
buttonSize={ButtonSize.Small}
|
||||
onClick={() => {
|
||||
// set it to past 3 hour
|
||||
const endDate: Date =
|
||||
OneUptimeDate.getCurrentDate();
|
||||
const startDate: Date =
|
||||
OneUptimeDate.addRemoveHours(endDate, -3);
|
||||
|
||||
setStartDateTime(startDate);
|
||||
setEndDateTime(endDate);
|
||||
}}
|
||||
title="3 hours"
|
||||
/>
|
||||
)}
|
||||
|
||||
<Button
|
||||
buttonStyle={ButtonStyleType.NORMAL}
|
||||
buttonSize={ButtonSize.Small}
|
||||
onClick={() => {
|
||||
// set it to past 1 day
|
||||
const endDate: Date =
|
||||
OneUptimeDate.getCurrentDate();
|
||||
const startDate: Date = OneUptimeDate.addRemoveDays(
|
||||
endDate,
|
||||
-1
|
||||
);
|
||||
|
||||
setStartDateTime(startDate);
|
||||
setEndDateTime(endDate);
|
||||
}}
|
||||
title="1 day"
|
||||
/>
|
||||
|
||||
<Button
|
||||
buttonStyle={ButtonStyleType.NORMAL}
|
||||
buttonSize={ButtonSize.Small}
|
||||
onClick={() => {
|
||||
// set it to past 1 week
|
||||
const endDate: Date =
|
||||
OneUptimeDate.getCurrentDate();
|
||||
const startDate: Date = OneUptimeDate.addRemoveDays(
|
||||
endDate,
|
||||
-7
|
||||
);
|
||||
|
||||
setStartDateTime(startDate);
|
||||
setEndDateTime(endDate);
|
||||
}}
|
||||
title="1 week"
|
||||
/>
|
||||
|
||||
<Button
|
||||
buttonStyle={ButtonStyleType.NORMAL}
|
||||
buttonSize={ButtonSize.Small}
|
||||
onClick={() => {
|
||||
// set it to past 1 week
|
||||
const endDate: Date =
|
||||
OneUptimeDate.getCurrentDate();
|
||||
const startDate: Date = OneUptimeDate.addRemoveDays(
|
||||
endDate,
|
||||
-14
|
||||
);
|
||||
|
||||
setStartDateTime(startDate);
|
||||
setEndDateTime(endDate);
|
||||
}}
|
||||
title="2 weeks"
|
||||
/>
|
||||
|
||||
<Button
|
||||
buttonStyle={ButtonStyleType.NORMAL}
|
||||
buttonSize={ButtonSize.Small}
|
||||
onClick={() => {
|
||||
// set it to past 1 week
|
||||
const endDate: Date =
|
||||
OneUptimeDate.getCurrentDate();
|
||||
const startDate: Date = OneUptimeDate.addRemoveDays(
|
||||
endDate,
|
||||
-21
|
||||
);
|
||||
|
||||
setStartDateTime(startDate);
|
||||
setEndDateTime(endDate);
|
||||
}}
|
||||
title="3 weeks"
|
||||
/>
|
||||
|
||||
<Button
|
||||
buttonStyle={ButtonStyleType.NORMAL}
|
||||
buttonSize={ButtonSize.Small}
|
||||
onClick={() => {
|
||||
// set it to past 1 month
|
||||
const endDate: Date =
|
||||
OneUptimeDate.getCurrentDate();
|
||||
const startDate: Date =
|
||||
OneUptimeDate.addRemoveMonths(endDate, -1);
|
||||
|
||||
setStartDateTime(startDate);
|
||||
setEndDateTime(endDate);
|
||||
}}
|
||||
title="1 month"
|
||||
/>
|
||||
|
||||
<Button
|
||||
buttonStyle={ButtonStyleType.NORMAL}
|
||||
buttonSize={ButtonSize.Small}
|
||||
onClick={() => {
|
||||
// set it to past 1 month
|
||||
const endDate: Date =
|
||||
OneUptimeDate.getCurrentDate();
|
||||
const startDate: Date =
|
||||
OneUptimeDate.addRemoveMonths(endDate, -3);
|
||||
|
||||
setStartDateTime(startDate);
|
||||
setEndDateTime(endDate);
|
||||
}}
|
||||
title="3 months"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return <></>;
|
||||
};
|
||||
|
||||
export default DateFilter;
|
||||
|
||||
@@ -214,14 +214,17 @@ const FilterComponent: FilterComponentFunction = <T extends GenericObject>(
|
||||
key
|
||||
] as InBetween;
|
||||
|
||||
const shouldOnlyShowDate: boolean =
|
||||
data.filter.type === FieldType.Date;
|
||||
|
||||
if (
|
||||
OneUptimeDate.getDateAsLocalFormattedString(
|
||||
startAndEndDates.startValue as Date,
|
||||
data.filter.type === FieldType.Date
|
||||
shouldOnlyShowDate
|
||||
) ===
|
||||
OneUptimeDate.getDateAsLocalFormattedString(
|
||||
startAndEndDates.endValue as Date,
|
||||
data.filter.type === FieldType.Date
|
||||
shouldOnlyShowDate
|
||||
)
|
||||
) {
|
||||
return (
|
||||
@@ -248,14 +251,14 @@ const FilterComponent: FilterComponentFunction = <T extends GenericObject>(
|
||||
<span className="font-medium">
|
||||
{OneUptimeDate.getDateAsLocalFormattedString(
|
||||
startAndEndDates.startValue as Date,
|
||||
data.filter.type === FieldType.Date
|
||||
shouldOnlyShowDate
|
||||
)}
|
||||
</span>{' '}
|
||||
and{' '}
|
||||
<span className="font-medium">
|
||||
{OneUptimeDate.getDateAsLocalFormattedString(
|
||||
startAndEndDates.endValue as Date,
|
||||
data.filter.type === FieldType.Date
|
||||
shouldOnlyShowDate
|
||||
)}
|
||||
</span>{' '}
|
||||
</div>
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import ComponentLoader from '../ComponentLoader/ComponentLoader';
|
||||
import ErrorMessage from '../ErrorMessage/ErrorMessage';
|
||||
import BooleanFilter from './BooleanFilter';
|
||||
import DateFilter from './DateFilter';
|
||||
import DropdownFilter from './DropdownFilter';
|
||||
import EntityFilter from './EntityFilter';
|
||||
import JSONFilter from './JSONFilter';
|
||||
@@ -78,6 +79,12 @@ const FiltersForm: FiltersFormFunction = <T extends GenericObject>(
|
||||
onFilterChanged={changeFilterData}
|
||||
/>
|
||||
|
||||
<DateFilter
|
||||
filter={filter}
|
||||
filterData={props.filterData}
|
||||
onFilterChanged={changeFilterData}
|
||||
/>
|
||||
|
||||
<TextFilter
|
||||
filter={filter}
|
||||
filterData={props.filterData}
|
||||
|
||||
@@ -34,13 +34,11 @@ const TextFilter: TextFilterFunction = <T extends GenericObject>(
|
||||
|
||||
if (
|
||||
!filter.filterDropdownOptions &&
|
||||
(filter.type === FieldType.Date ||
|
||||
filter.type === FieldType.Email ||
|
||||
(filter.type === FieldType.Email ||
|
||||
filter.type === FieldType.Phone ||
|
||||
filter.type === FieldType.Name ||
|
||||
filter.type === FieldType.Port ||
|
||||
filter.type === FieldType.URL ||
|
||||
filter.type === FieldType.DateTime ||
|
||||
filter.type === FieldType.ObjectID ||
|
||||
filter.type === FieldType.Text)
|
||||
) {
|
||||
|
||||
@@ -632,7 +632,8 @@ const FormField: <T extends GenericObject>(
|
||||
? (props.currentValues as any)[
|
||||
props.fieldName
|
||||
]
|
||||
: false
|
||||
: (props.field.defaultValue as boolean) ||
|
||||
false
|
||||
}
|
||||
/>
|
||||
)}
|
||||
|
||||
@@ -67,7 +67,7 @@ export default interface Field<TEntity> {
|
||||
validation?: {
|
||||
minLength?: number | undefined;
|
||||
maxLength?: number | undefined;
|
||||
toMatchField?: string | undefined;
|
||||
toMatchField?: keyof TEntity | undefined;
|
||||
noSpaces?: boolean | undefined;
|
||||
noSpecialCharacters?: boolean;
|
||||
noNumbers?: boolean | undefined;
|
||||
@@ -96,4 +96,5 @@ export default interface Field<TEntity> {
|
||||
|
||||
// set this to true if you want to show this field in the form even when the form is in edit mode.
|
||||
doNotShowWhenEditing?: boolean | undefined;
|
||||
doNotShowWhenCreating?: boolean | undefined;
|
||||
}
|
||||
|
||||
@@ -134,7 +134,7 @@ export default class Validation {
|
||||
public static validateMatchField<T extends GenericObject>(
|
||||
content: string | undefined,
|
||||
field: Field<T>,
|
||||
entity: JSONObject
|
||||
entity: FormValues<T>
|
||||
): string | null {
|
||||
if (
|
||||
content &&
|
||||
@@ -144,7 +144,9 @@ export default class Validation {
|
||||
.toString()
|
||||
.trim() !== content.trim()
|
||||
) {
|
||||
return `${field.title} should match ${field.validation?.toMatchField}`;
|
||||
return `${field.title} should match ${
|
||||
field.validation?.toMatchField as string
|
||||
}`;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@@ -239,7 +241,7 @@ export default class Validation {
|
||||
currentFormStepId?: string | null | undefined;
|
||||
}): Dictionary<string> {
|
||||
const errors: JSONObject = {};
|
||||
const entries: JSONObject = { ...args.values } as JSONObject;
|
||||
const entries: FormValues<T> = { ...args.values };
|
||||
|
||||
for (const field of args.formFields) {
|
||||
if (
|
||||
@@ -256,7 +258,9 @@ export default class Validation {
|
||||
const name: string = field.name;
|
||||
|
||||
if (name in entries) {
|
||||
const content: string | undefined = entries[name]?.toString();
|
||||
const content: string | undefined = (entries as JSONObject)[
|
||||
name
|
||||
]?.toString();
|
||||
|
||||
// Check Required fields.
|
||||
const resultRequired: string | null = this.validateRequired(
|
||||
|
||||
@@ -21,12 +21,12 @@ export enum InputType {
|
||||
}
|
||||
|
||||
export interface ComponentProps {
|
||||
initialValue?: undefined | string;
|
||||
initialValue?: undefined | string | Date;
|
||||
onClick?: undefined | (() => void);
|
||||
placeholder?: undefined | string;
|
||||
className?: undefined | string;
|
||||
onChange?: undefined | ((value: string) => void);
|
||||
value?: string | undefined;
|
||||
value?: string | Date | undefined;
|
||||
readOnly?: boolean | undefined;
|
||||
disabled?: boolean | undefined;
|
||||
type?: InputType;
|
||||
@@ -61,7 +61,7 @@ const Input: FunctionComponent<ComponentProps> = (
|
||||
className += ' bg-gray-100 text-gray-500 cursor-not-allowed';
|
||||
}
|
||||
|
||||
const [value, setValue] = useState<string>('');
|
||||
const [value, setValue] = useState<string | Date>('');
|
||||
const [displayValue, setDisplayValue] = useState<string>('');
|
||||
const ref: any = useRef<any>(null);
|
||||
|
||||
@@ -89,7 +89,11 @@ const Input: FunctionComponent<ComponentProps> = (
|
||||
Logger.error(e);
|
||||
}
|
||||
setDisplayValue(dateString);
|
||||
} else if (value && value.includes && !value.includes(' - ')) {
|
||||
} else if (
|
||||
value &&
|
||||
(value as any).includes &&
|
||||
!(value as any).includes(' - ')
|
||||
) {
|
||||
// " - " is for InBetween dates.
|
||||
const date: Date = OneUptimeDate.fromString(value);
|
||||
let dateString: string = '';
|
||||
@@ -106,11 +110,14 @@ const Input: FunctionComponent<ComponentProps> = (
|
||||
Logger.error(err);
|
||||
}
|
||||
setDisplayValue(dateString);
|
||||
} else if (!value || (value.includes && !value.includes(' - '))) {
|
||||
} else if (
|
||||
!value ||
|
||||
((value as any).includes && !(value as any).includes(' - '))
|
||||
) {
|
||||
setDisplayValue('');
|
||||
}
|
||||
} else {
|
||||
setDisplayValue(value);
|
||||
setDisplayValue(value as string);
|
||||
}
|
||||
}, [value]);
|
||||
|
||||
|
||||
@@ -29,14 +29,26 @@ const LogItem: FunctionComponent<ComponentProps> = (
|
||||
);
|
||||
};
|
||||
|
||||
if (
|
||||
props.log.severityText === LogSeverity.Warning ||
|
||||
props.log.severityText === LogSeverity.Trace ||
|
||||
props.log.severityText === LogSeverity.Debug
|
||||
) {
|
||||
if (props.log.severityText === LogSeverity.Warning) {
|
||||
bodyColor = 'text-amber-400';
|
||||
} else if (props.log.severityText === LogSeverity.Error) {
|
||||
bodyColor = 'text-rose-400';
|
||||
} else if (
|
||||
props.log.severityText === LogSeverity.Trace ||
|
||||
props.log.severityText === LogSeverity.Debug
|
||||
) {
|
||||
bodyColor = 'text-slate-400';
|
||||
}
|
||||
|
||||
let logBody: string = props.log.body?.toString() || '';
|
||||
|
||||
let isBodyInJSON: boolean = false;
|
||||
|
||||
try {
|
||||
logBody = JSON.stringify(JSON.parse(logBody), null, 2);
|
||||
isBodyInJSON = true;
|
||||
} catch (e) {
|
||||
isBodyInJSON = false;
|
||||
}
|
||||
|
||||
if (isCollapsed) {
|
||||
@@ -71,12 +83,12 @@ const LogItem: FunctionComponent<ComponentProps> = (
|
||||
</div>
|
||||
)}
|
||||
{props.log.severityText === LogSeverity.Trace && (
|
||||
<div className="text-amber-400 courier-prime flex-none">
|
||||
<div className="text-slate-400 courier-prime flex-none">
|
||||
[TRACE]
|
||||
</div>
|
||||
)}
|
||||
{props.log.severityText === LogSeverity.Debug && (
|
||||
<div className="text-amber-400 courier-prime flex-none">
|
||||
<div className="text-slate-400 courier-prime flex-none">
|
||||
[DEBUG]
|
||||
</div>
|
||||
)}
|
||||
@@ -92,7 +104,8 @@ const LogItem: FunctionComponent<ComponentProps> = (
|
||||
)}
|
||||
|
||||
<div className={`${bodyColor} courier-prime`}>
|
||||
{props.log.body?.toString()}
|
||||
{isBodyInJSON && <pre>{logBody}</pre>}
|
||||
{!isBodyInJSON && props.log.body?.toString()}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
@@ -146,7 +159,7 @@ const LogItem: FunctionComponent<ComponentProps> = (
|
||||
<div className="font-medium text-slate-200 courier-prime mr-2">
|
||||
SEVERITY:
|
||||
</div>
|
||||
<div className="text-amber-400 courier-prime">
|
||||
<div className="text-slate-400 courier-prime">
|
||||
[TRACE]
|
||||
</div>
|
||||
</div>
|
||||
@@ -156,7 +169,7 @@ const LogItem: FunctionComponent<ComponentProps> = (
|
||||
<div className="font-medium text-slate-200 courier-prime mr-2">
|
||||
SEVERITY:
|
||||
</div>
|
||||
<div className="text-amber-400 courier-prime">
|
||||
<div className="text-slate-400 courier-prime">
|
||||
[DEBUG]
|
||||
</div>
|
||||
</div>
|
||||
@@ -182,13 +195,22 @@ const LogItem: FunctionComponent<ComponentProps> = (
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="flex">
|
||||
<div className="font-medium text-slate-200 courier-prime mr-2">
|
||||
MESSAGE:
|
||||
</div>
|
||||
<div className={`${bodyColor} courier-prime`}>
|
||||
{props.log.body?.toString()}
|
||||
<div>
|
||||
<div className="flex">
|
||||
<div className="font-medium text-slate-200 courier-prime mr-2">
|
||||
MESSAGE:
|
||||
</div>
|
||||
{!isBodyInJSON && (
|
||||
<div className={`${bodyColor} courier-prime`}>
|
||||
{props.log.body?.toString()}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{isBodyInJSON && (
|
||||
<pre className={`${bodyColor} courier-prime`}>
|
||||
{logBody}
|
||||
</pre>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{props.log.traceId && (
|
||||
@@ -216,13 +238,15 @@ const LogItem: FunctionComponent<ComponentProps> = (
|
||||
)}
|
||||
|
||||
{props.log.attributes && (
|
||||
<div className="flex">
|
||||
<div className="font-medium text-slate-200 courier-prime mr-2">
|
||||
ATTRIBUTES:
|
||||
<div>
|
||||
<div className="flex">
|
||||
<div className="font-medium text-slate-200 courier-prime mr-2">
|
||||
ATTRIBUTES:
|
||||
</div>
|
||||
</div>
|
||||
<div className={`${bodyColor} courier-prime`}>
|
||||
<pre className={`${bodyColor} courier-prime`}>
|
||||
{JSON.stringify(props.log.attributes, null, 2)}
|
||||
</div>
|
||||
</pre>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -495,7 +495,7 @@ const BaseModelTable: <TBaseModel extends BaseModel | AnalyticsBaseModel>(
|
||||
const permissions: Array<Permission> | null =
|
||||
PermissionUtil.getAllPermissions();
|
||||
|
||||
if (
|
||||
let showActionsColumn: boolean = Boolean(
|
||||
(permissions &&
|
||||
((props.isDeleteable &&
|
||||
model.hasDeletePermissions(permissions)) ||
|
||||
@@ -503,9 +503,20 @@ const BaseModelTable: <TBaseModel extends BaseModel | AnalyticsBaseModel>(
|
||||
model.hasUpdatePermissions(permissions)) ||
|
||||
(props.isViewable &&
|
||||
model.hasReadPermissions(permissions)))) ||
|
||||
(props.actionButtons && props.actionButtons.length > 0) ||
|
||||
props.showViewIdButton
|
||||
) {
|
||||
(props.actionButtons && props.actionButtons.length > 0) ||
|
||||
props.showViewIdButton
|
||||
);
|
||||
|
||||
if (User.isMasterAdmin()) {
|
||||
if (
|
||||
(props.actionButtons && props.actionButtons.length > 0) ||
|
||||
props.showViewIdButton
|
||||
) {
|
||||
showActionsColumn = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (showActionsColumn) {
|
||||
columns.push({
|
||||
title: 'Actions',
|
||||
type: FieldType.Actions,
|
||||
@@ -1108,7 +1119,11 @@ const BaseModelTable: <TBaseModel extends BaseModel | AnalyticsBaseModel>(
|
||||
});
|
||||
}
|
||||
|
||||
if (props.isEditable && model.hasUpdatePermissions(permissions)) {
|
||||
if (
|
||||
props.isEditable &&
|
||||
(model.hasUpdatePermissions(permissions) ||
|
||||
User.isMasterAdmin())
|
||||
) {
|
||||
actionsSchema.push({
|
||||
title: props.editButtonText || 'Edit',
|
||||
buttonStyleType: ButtonStyleType.OUTLINE,
|
||||
@@ -1134,7 +1149,11 @@ const BaseModelTable: <TBaseModel extends BaseModel | AnalyticsBaseModel>(
|
||||
});
|
||||
}
|
||||
|
||||
if (props.isDeleteable && model.hasDeletePermissions(permissions)) {
|
||||
if (
|
||||
props.isDeleteable &&
|
||||
(model.hasDeletePermissions(permissions) ||
|
||||
User.isMasterAdmin())
|
||||
) {
|
||||
actionsSchema.push({
|
||||
title: props.deleteButtonText || 'Delete',
|
||||
icon: IconProp.Trash,
|
||||
|
||||
@@ -161,10 +161,14 @@ const ModelTable: <TBaseModel extends BaseModel>(
|
||||
props.formFields?.filter(
|
||||
(field: ModelField<TBaseModel>) => {
|
||||
// If the field has doNotShowWhenEditing set to true, then don't show it when editing
|
||||
return !(
|
||||
field.doNotShowWhenEditing &&
|
||||
modelIdToEdit
|
||||
);
|
||||
|
||||
if (modelIdToEdit) {
|
||||
return !field.doNotShowWhenEditing;
|
||||
}
|
||||
|
||||
// If the field has doNotShowWhenCreating set to true, then don't show it when creating
|
||||
|
||||
return !field.doNotShowWhenCreating;
|
||||
}
|
||||
) || [],
|
||||
steps: props.formSteps || [],
|
||||
|
||||
@@ -58,7 +58,7 @@ const TableRow: TableRowFunction = <T extends GenericObject>(
|
||||
<tr {...provided?.draggableProps} ref={provided?.innerRef}>
|
||||
{props.enableDragAndDrop && (
|
||||
<td
|
||||
className="ml-5 w-10 align-top"
|
||||
className="ml-5 py-4 w-10 align-top"
|
||||
{...provided?.dragHandleProps}
|
||||
>
|
||||
<Icon
|
||||
|
||||
@@ -108,12 +108,16 @@ abstract class Navigation {
|
||||
return pathname.split('/')[getFromFirstRoute || 1];
|
||||
}
|
||||
|
||||
public static getLastParamAsObjectID(getFromLastRoute?: number): ObjectID {
|
||||
public static getLastParamAsString(getFromLastRoute?: number): string {
|
||||
const param: Route | null = URL.fromString(
|
||||
window.location.href
|
||||
).getLastRoute(getFromLastRoute);
|
||||
|
||||
return new ObjectID(param?.toString().replace('/', '') || '');
|
||||
return param?.toString().replace('/', '') || '';
|
||||
}
|
||||
|
||||
public static getLastParamAsObjectID(getFromLastRoute?: number): ObjectID {
|
||||
return new ObjectID(this.getLastParamAsString(getFromLastRoute));
|
||||
}
|
||||
|
||||
public static getCurrentRoute(): Route {
|
||||
|
||||
56
Copilot/.dockerignore
Normal file
56
Copilot/.dockerignore
Normal file
@@ -0,0 +1,56 @@
|
||||
.git
|
||||
|
||||
node_modules
|
||||
# See https://help.github.com/ignore-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
node_modules
|
||||
|
||||
.idea
|
||||
# testing
|
||||
/coverage
|
||||
|
||||
# production
|
||||
/build
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
|
||||
env.js
|
||||
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
yarn.lock
|
||||
Untitled-1
|
||||
*.local.sh
|
||||
*.local.yaml
|
||||
run
|
||||
stop
|
||||
|
||||
nohup.out*
|
||||
|
||||
encrypted-credentials.tar
|
||||
encrypted-credentials/
|
||||
|
||||
_README.md
|
||||
|
||||
# Important Add production values to gitignore.
|
||||
values-saas-production.yaml
|
||||
kubernetes/values-saas-production.yaml
|
||||
|
||||
/private
|
||||
|
||||
/tls_cert.pem
|
||||
/tls_key.pem
|
||||
/keys
|
||||
|
||||
temp_readme.md
|
||||
|
||||
tests/coverage
|
||||
|
||||
settings.json
|
||||
|
||||
GoSDK/tester/
|
||||
3
Copilot/.env.example
Normal file
3
Copilot/.env.example
Normal file
@@ -0,0 +1,3 @@
|
||||
ONEUPTIME_URL=https://oneuptime.com
|
||||
ONEUPTIME_REPOSITORY_SECRET_KEY=your-repository-secret-key
|
||||
LOCAL_REPOSITORY_PATH=/repository
|
||||
1
Copilot/.gitattributes
vendored
Normal file
1
Copilot/.gitattributes
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*.js text eol=lf
|
||||
16
Copilot/.gitignore
vendored
Normal file
16
Copilot/.gitignore
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
# See https://help.github.com/ignore-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
#/backend/node_modules
|
||||
/kubernetes
|
||||
/node_modules
|
||||
.idea
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
|
||||
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
yarn.lock
|
||||
18
Copilot/Config.ts
Normal file
18
Copilot/Config.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import URL from 'Common/Types/API/URL';
|
||||
|
||||
type GetStringFunction = () => string;
|
||||
type GetURLFunction = () => URL;
|
||||
|
||||
export const GetOneUptimeURL: GetURLFunction = () => {
|
||||
return URL.fromString(
|
||||
process.env['ONEUPTIME_URL'] || 'https://oneuptime.com'
|
||||
);
|
||||
};
|
||||
|
||||
export const GetRepositorySecretKey: GetStringFunction = (): string => {
|
||||
return process.env['ONEUPTIME_REPOSITORY_SECRET_KEY'] || '';
|
||||
};
|
||||
|
||||
export const GetLocalRepositoryPath: GetStringFunction = (): string => {
|
||||
return process.env['ONEUPTIME_LOCAL_REPOSITORY_PATH'] || '/repository';
|
||||
};
|
||||
86
Copilot/Dockerfile.tpl
Normal file
86
Copilot/Dockerfile.tpl
Normal file
@@ -0,0 +1,86 @@
|
||||
#
|
||||
# OneUptime-copilot Dockerfile
|
||||
#
|
||||
|
||||
# Pull base image nodejs image.
|
||||
FROM node:21.7.3-alpine3.18
|
||||
RUN mkdir /tmp/npm && chmod 2777 /tmp/npm && chown 1000:1000 /tmp/npm && npm config set cache /tmp/npm --global
|
||||
|
||||
|
||||
ARG GIT_SHA
|
||||
ARG APP_VERSION
|
||||
|
||||
ENV GIT_SHA=${GIT_SHA}
|
||||
ENV APP_VERSION=${APP_VERSION}
|
||||
|
||||
|
||||
# IF APP_VERSION is not set, set it to 1.0.0
|
||||
RUN if [ -z "$APP_VERSION" ]; then export APP_VERSION=1.0.0; fi
|
||||
|
||||
|
||||
# Install bash.
|
||||
RUN apk add bash && apk add curl
|
||||
|
||||
|
||||
# Install python
|
||||
RUN apk update && apk add --no-cache --virtual .gyp python3 make g++
|
||||
|
||||
#Use bash shell by default
|
||||
SHELL ["/bin/bash", "-c"]
|
||||
|
||||
|
||||
RUN mkdir /usr/src
|
||||
|
||||
WORKDIR /usr/src/Common
|
||||
COPY ./Common/package*.json /usr/src/Common/
|
||||
# Set version in ./Common/package.json to the APP_VERSION
|
||||
RUN sed -i "s/\"version\": \".*\"/\"version\": \"$APP_VERSION\"/g" /usr/src/Common/package.json
|
||||
RUN npm install
|
||||
COPY ./Common /usr/src/Common
|
||||
|
||||
|
||||
WORKDIR /usr/src/Model
|
||||
COPY ./Model/package*.json /usr/src/Model/
|
||||
# Set version in ./Model/package.json to the APP_VERSION
|
||||
RUN sed -i "s/\"version\": \".*\"/\"version\": \"$APP_VERSION\"/g" /usr/src/Model/package.json
|
||||
RUN npm install
|
||||
COPY ./Model /usr/src/Model
|
||||
|
||||
|
||||
|
||||
WORKDIR /usr/src/CommonServer
|
||||
COPY ./CommonServer/package*.json /usr/src/CommonServer/
|
||||
# Set version in ./CommonServer/package.json to the APP_VERSION
|
||||
RUN sed -i "s/\"version\": \".*\"/\"version\": \"$APP_VERSION\"/g" /usr/src/CommonServer/package.json
|
||||
RUN npm install
|
||||
COPY ./CommonServer /usr/src/CommonServer
|
||||
|
||||
|
||||
WORKDIR /usr/src/CommonUI
|
||||
COPY ./CommonUI/package*.json /usr/src/CommonUI/
|
||||
# Set version in ./CommonServer/package.json to the APP_VERSION
|
||||
RUN sed -i "s/\"version\": \".*\"/\"version\": \"$APP_VERSION\"/g" /usr/src/CommonUI/package.json
|
||||
RUN npm install
|
||||
COPY ./CommonUI /usr/src/CommonUI
|
||||
|
||||
ENV PRODUCTION=true
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Install app dependencies
|
||||
COPY ./Copilot/package*.json /usr/src/app/
|
||||
RUN npm install
|
||||
|
||||
|
||||
{{ if eq .Env.ENVIRONMENT "development" }}
|
||||
#Run the app
|
||||
CMD [ "npm", "run", "dev" ]
|
||||
{{ else }}
|
||||
# Copy app source
|
||||
COPY ./Copilot /usr/src/app
|
||||
# Bundle app source
|
||||
RUN npm run compile
|
||||
#Run the app
|
||||
CMD [ "npm", "start" ]
|
||||
{{ end }}
|
||||
|
||||
25
Copilot/Index.ts
Normal file
25
Copilot/Index.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import CodeRepositoryUtil from './Utils/CodeRepository';
|
||||
import { PromiseVoidFunction } from 'Common/Types/FunctionTypes';
|
||||
import logger from 'CommonServer/Utils/Logger';
|
||||
import CodeRepository from 'Model/Models/CodeRepository';
|
||||
import dotenv from 'dotenv';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
logger.info('OneUptime Copilot is starting...');
|
||||
|
||||
const init: PromiseVoidFunction = async (): Promise<void> => {
|
||||
const codeRepository: CodeRepository =
|
||||
await CodeRepositoryUtil.getCodeRepository();
|
||||
logger.info(`Code Repository found: ${codeRepository.name}`);
|
||||
};
|
||||
|
||||
init()
|
||||
.then(() => {
|
||||
process.exit(0);
|
||||
})
|
||||
.catch((error: Error) => {
|
||||
logger.error('Error in starting OneUptime Copilot: ');
|
||||
logger.error(error);
|
||||
process.exit(1);
|
||||
});
|
||||
13
Copilot/README.md
Normal file
13
Copilot/README.md
Normal file
@@ -0,0 +1,13 @@
|
||||
# OneUptime Copilot
|
||||
|
||||
Copilot is a tool that helps you improve your codebase automatically.
|
||||
|
||||
## Run Copilot with Docker
|
||||
|
||||
```bash
|
||||
docker run -v $(pwd):/app -w /app oneuptime/copilot
|
||||
```
|
||||
|
||||
### Volumes
|
||||
|
||||
- `/repository` - The directory where your codebase is located.
|
||||
48
Copilot/Utils/CodeRepository.ts
Normal file
48
Copilot/Utils/CodeRepository.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { GetOneUptimeURL, GetRepositorySecretKey } from '../Config';
|
||||
import HTTPErrorResponse from 'Common/Types/API/HTTPErrorResponse';
|
||||
import HTTPResponse from 'Common/Types/API/HTTPResponse';
|
||||
import URL from 'Common/Types/API/URL';
|
||||
import BadDataException from 'Common/Types/Exception/BadDataException';
|
||||
import { JSONObject } from 'Common/Types/JSON';
|
||||
import API from 'Common/Utils/API';
|
||||
import CodeRepositoryModel from 'Model/Models/CodeRepository';
|
||||
|
||||
export default class CodeRepositoryUtil {
|
||||
public static async getCodeRepository(): Promise<CodeRepositoryModel> {
|
||||
const repositorySecretKey: string = GetRepositorySecretKey();
|
||||
|
||||
if (!repositorySecretKey) {
|
||||
throw new BadDataException('Repository Secret Key is required');
|
||||
}
|
||||
|
||||
const url: URL = URL.fromString(
|
||||
GetOneUptimeURL().toString() + '/api'
|
||||
).addRoute(
|
||||
`${new CodeRepositoryModel()
|
||||
.getCrudApiPath()
|
||||
?.toString()}/get-code-repository/${repositorySecretKey}`
|
||||
);
|
||||
|
||||
const codeRepositoryResult:
|
||||
| HTTPErrorResponse
|
||||
| HTTPResponse<JSONObject> = await API.get(url);
|
||||
|
||||
if (codeRepositoryResult instanceof HTTPErrorResponse) {
|
||||
throw codeRepositoryResult;
|
||||
}
|
||||
|
||||
const codeRepository: CodeRepositoryModel =
|
||||
CodeRepositoryModel.fromJSON(
|
||||
codeRepositoryResult.data as JSONObject,
|
||||
CodeRepositoryModel
|
||||
) as CodeRepositoryModel;
|
||||
|
||||
if (!codeRepository) {
|
||||
throw new BadDataException(
|
||||
'Code Repository not found with the secret key provided.'
|
||||
);
|
||||
}
|
||||
|
||||
return codeRepository;
|
||||
}
|
||||
}
|
||||
27
Copilot/jest.config.json
Normal file
27
Copilot/jest.config.json
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"preset": "ts-jest",
|
||||
"verbose": true,
|
||||
"globals": {
|
||||
"ts-jest": {
|
||||
"tsconfig": "tsconfig.json",
|
||||
"babelConfig": false
|
||||
}
|
||||
},
|
||||
"moduleFileExtensions": ["ts", "js", "json"],
|
||||
"transform": {
|
||||
".(ts|tsx)": "ts-jest"
|
||||
},
|
||||
"testEnvironment": "node",
|
||||
"collectCoverage": false,
|
||||
"coverageReporters": ["text", "lcov"],
|
||||
"testRegex": "./Tests/(.*).test.ts",
|
||||
"collectCoverageFrom": ["./**/*.(tsx||ts)"],
|
||||
"coverageThreshold": {
|
||||
"global": {
|
||||
"lines": 0,
|
||||
"functions": 0,
|
||||
"branches": 0,
|
||||
"statements": 0
|
||||
}
|
||||
}
|
||||
}
|
||||
5
Copilot/nodemon.json
Normal file
5
Copilot/nodemon.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"watch": ["./","../Common", "../CommonServer", "../Model"],
|
||||
"ext": "ts,json,tsx,env,js,jsx,hbs",
|
||||
"exec": "node --inspect=0.0.0.0:9229 --require ts-node/register Index.ts"
|
||||
}
|
||||
4685
Copilot/package-lock.json
generated
Normal file
4685
Copilot/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
31
Copilot/package.json
Normal file
31
Copilot/package.json
Normal file
@@ -0,0 +1,31 @@
|
||||
{
|
||||
"name": "@oneuptime/copilot",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"start": "node --require ts-node/register Index.ts",
|
||||
"compile": "tsc",
|
||||
"clear-modules": "rm -rf node_modules && rm package-lock.json && npm install",
|
||||
"dev": "npx nodemon",
|
||||
"audit": "npm audit --audit-level=low",
|
||||
"dep-check": "npm install -g depcheck && depcheck ./ --skip-missing=true",
|
||||
"test": "jest --passWithNoTests"
|
||||
},
|
||||
"author": "OneUptime <hello@oneuptime.com> (https://oneuptime.com/)",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"Common": "file:../Common",
|
||||
"CommonServer": "file:../CommonServer",
|
||||
"dotenv": "^16.4.5",
|
||||
"Model": "file:../Model",
|
||||
"ts-node": "^10.9.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^27.5.0",
|
||||
"@types/node": "^17.0.31",
|
||||
"jest": "^28.1.0",
|
||||
"nodemon": "^2.0.20",
|
||||
"ts-jest": "^28.0.2"
|
||||
}
|
||||
}
|
||||
115
Copilot/tsconfig.json
Normal file
115
Copilot/tsconfig.json
Normal file
@@ -0,0 +1,115 @@
|
||||
{
|
||||
"ts-node": {
|
||||
// these options are overrides used only by ts-node
|
||||
// same as the --compilerOptions flag and the TS_NODE_COMPILER_OPTIONS environment variable
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"resolveJsonModule": true,
|
||||
}
|
||||
},
|
||||
"compilerOptions": {
|
||||
/* Visit https://aka.ms/tsconfig.json to read more about this file */
|
||||
|
||||
/* Projects */
|
||||
// "incremental": true, /* Enable incremental compilation */
|
||||
// "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
|
||||
// "tsBuildInfoFile": "./", /* Specify the folder for .tsbuildinfo incremental compilation files. */
|
||||
// "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects */
|
||||
// "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
|
||||
// "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
|
||||
|
||||
/* Language and Environment */
|
||||
"target": "es2017" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */,
|
||||
// "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */
|
||||
"jsx": "react" /* Specify what JSX code is generated. */,
|
||||
"experimentalDecorators": true /* Enable experimental support for TC39 stage 2 draft decorators. */,
|
||||
"emitDecoratorMetadata": true /* Emit design-type metadata for decorated declarations in source files. */,
|
||||
// "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h' */
|
||||
// "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
|
||||
// "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using `jsx: react-jsx*`.` */
|
||||
// "reactNamespace": "", /* Specify the object invoked for `createElement`. This only applies when targeting `react` JSX emit. */
|
||||
// "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
|
||||
// "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
|
||||
|
||||
/* Modules */
|
||||
// "module": "es2022" /* Specify what module code is generated. */,
|
||||
"rootDir": "" /* Specify the root folder within your source files. */,
|
||||
"moduleResolution": "node" /* Specify how TypeScript looks up a file from a given module specifier. */,
|
||||
// "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
|
||||
// "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
|
||||
// "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
|
||||
"typeRoots": [
|
||||
"./node_modules/@types"
|
||||
] /* Specify multiple folders that act like `./node_modules/@types`. */,
|
||||
"types": [
|
||||
"node",
|
||||
"jest"
|
||||
] /* Specify type package names to be included without being referenced in a source file. */,
|
||||
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
|
||||
// "resolveJsonModule": true, /* Enable importing .json files */
|
||||
// "noResolve": true, /* Disallow `import`s, `require`s or `<reference>`s from expanding the number of files TypeScript should add to a project. */
|
||||
|
||||
/* JavaScript Support */
|
||||
// "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the `checkJS` option to get errors from these files. */
|
||||
// "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */
|
||||
// "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from `node_modules`. Only applicable with `allowJs`. */
|
||||
|
||||
/* Emit */
|
||||
// "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
|
||||
// "declarationMap": true, /* Create sourcemaps for d.ts files. */
|
||||
// "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
|
||||
"sourceMap": true /* Create source map files for emitted JavaScript files. */,
|
||||
// "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If `declaration` is true, also designates a file that bundles all .d.ts output. */
|
||||
"outDir": "build/dist" /* Specify an output folder for all emitted files. */,
|
||||
// "removeComments": true, /* Disable emitting comments. */
|
||||
// "noEmit": true, /* Disable emitting files from a compilation. */
|
||||
// "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
|
||||
// "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types */
|
||||
// "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
|
||||
// "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
|
||||
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
|
||||
// "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
|
||||
// "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
|
||||
// "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
|
||||
// "newLine": "crlf", /* Set the newline character for emitting files. */
|
||||
// "stripInternal": true, /* Disable emitting declarations that have `@internal` in their JSDoc comments. */
|
||||
// "noEmitHelpers": true, /* Disable generating custom helper functions like `__extends` in compiled output. */
|
||||
// "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
|
||||
// "preserveConstEnums": true, /* Disable erasing `const enum` declarations in generated code. */
|
||||
// "declarationDir": "./", /* Specify the output directory for generated declaration files. */
|
||||
// "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */
|
||||
|
||||
/* Interop Constraints */
|
||||
// "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */
|
||||
// "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */
|
||||
"esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables `allowSyntheticDefaultImports` for type compatibility. */,
|
||||
// "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
|
||||
"forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */,
|
||||
|
||||
/* Type Checking */
|
||||
"strict": true /* Enable all strict type-checking options. */,
|
||||
"noImplicitAny": true /* Enable error reporting for expressions and declarations with an implied `any` type.. */,
|
||||
"strictNullChecks": true /* When type checking, take into account `null` and `undefined`. */,
|
||||
"strictFunctionTypes": true /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */,
|
||||
"strictBindCallApply": true /* Check that the arguments for `bind`, `call`, and `apply` methods match the original function. */,
|
||||
"strictPropertyInitialization": true /* Check for class properties that are declared but not set in the constructor. */,
|
||||
"noImplicitThis": true /* Enable error reporting when `this` is given the type `any`. */,
|
||||
"useUnknownInCatchVariables": true /* Type catch clause variables as 'unknown' instead of 'any'. */,
|
||||
"alwaysStrict": true /* Ensure 'use strict' is always emitted. */,
|
||||
"noUnusedLocals": true /* Enable error reporting when a local variables aren't read. */,
|
||||
"noUnusedParameters": true /* Raise an error when a function parameter isn't read */,
|
||||
"exactOptionalPropertyTypes": true /* Interpret optional property types as written, rather than adding 'undefined'. */,
|
||||
"noImplicitReturns": true /* Enable error reporting for codepaths that do not explicitly return in a function. */,
|
||||
"noFallthroughCasesInSwitch": true /* Enable error reporting for fallthrough cases in switch statements. */,
|
||||
"noUncheckedIndexedAccess": true /* Include 'undefined' in index signature results */,
|
||||
"noImplicitOverride": true /* Ensure overriding members in derived classes are marked with an override modifier. */,
|
||||
"noPropertyAccessFromIndexSignature": true /* Enforces using indexed accessors for keys declared using an indexed type */,
|
||||
// "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
|
||||
// "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
|
||||
|
||||
/* Completeness */
|
||||
// "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
|
||||
"skipLibCheck": true, /* Skip type checking all .d.ts files. */
|
||||
"resolveJsonModule": true
|
||||
}
|
||||
}
|
||||
@@ -15,6 +15,7 @@ import Welcome from './Pages/Onboarding/Welcome';
|
||||
import PageComponentProps from './Pages/PageComponentProps';
|
||||
import PageNotFound from './Pages/PageNotFound/PageNotFound';
|
||||
import SettingsDangerZone from './Pages/Settings/DangerZone';
|
||||
import AICopilotRoutes from './Routes/AICopilotRoutes';
|
||||
import IncidentsRoutes from './Routes/IncidentsRoutes';
|
||||
//Routes
|
||||
import InitRoutes from './Routes/InitRoutes';
|
||||
@@ -22,6 +23,7 @@ import MonitorGroupRoutes from './Routes/MonitorGroupRoutes';
|
||||
import MonitorsRoutes from './Routes/MonitorsRoutes';
|
||||
import OnCallDutyRoutes from './Routes/OnCallDutyRoutes';
|
||||
import ScheduledMaintenanceEventsRoutes from './Routes/ScheduleMaintenaceEventsRoutes';
|
||||
import ServiceCatalogRoutes from './Routes/ServiceCatalogRoutes';
|
||||
import SettingsRoutes from './Routes/SettingsRoutes';
|
||||
import StatusPagesRoutes from './Routes/StatusPagesRoutes';
|
||||
import TelemetryRoutes from './Routes/TelemetryRoutes';
|
||||
@@ -308,6 +310,20 @@ const App: () => JSX.Element = () => {
|
||||
element={<StatusPagesRoutes {...commonPageProps} />}
|
||||
/>
|
||||
|
||||
{/* Service Catalog */}
|
||||
<PageRoute
|
||||
path={
|
||||
RouteMap[PageMap.SERVICE_CATALOG_ROOT]?.toString() || ''
|
||||
}
|
||||
element={<ServiceCatalogRoutes {...commonPageProps} />}
|
||||
/>
|
||||
|
||||
{/** AI Copilot */}
|
||||
<PageRoute
|
||||
path={RouteMap[PageMap.AI_COPILOT_ROOT]?.toString() || ''}
|
||||
element={<AICopilotRoutes {...commonPageProps} />}
|
||||
/>
|
||||
|
||||
{/* Incidents */}
|
||||
<PageRoute
|
||||
path={RouteMap[PageMap.INCIDENTS_ROOT]?.toString() || ''}
|
||||
|
||||
100
Dashboard/src/Components/Metrics/MetricVIew.tsx
Normal file
100
Dashboard/src/Components/Metrics/MetricVIew.tsx
Normal file
@@ -0,0 +1,100 @@
|
||||
import ObjectID from 'Common/Types/ObjectID';
|
||||
import LineChart, {
|
||||
AxisType,
|
||||
XScalePrecision,
|
||||
XScaleType,
|
||||
YScaleType,
|
||||
} from 'CommonUI/src/Components/Charts/Line/LineChart';
|
||||
import FiltersForm from 'CommonUI/src/Components/Filters/FiltersForm';
|
||||
import FilterData from 'CommonUI/src/Components/Filters/Types/FilterData';
|
||||
import FieldType from 'CommonUI/src/Components/Types/FieldType';
|
||||
import Metric from 'Model/AnalyticsModels/Metric';
|
||||
import React, {
|
||||
Fragment,
|
||||
FunctionComponent,
|
||||
ReactElement,
|
||||
useEffect,
|
||||
} from 'react';
|
||||
|
||||
export interface ComponentProps {
|
||||
metricName: string;
|
||||
serviceId: ObjectID;
|
||||
}
|
||||
|
||||
const MetricView: FunctionComponent<ComponentProps> = (
|
||||
props: ComponentProps
|
||||
): ReactElement => {
|
||||
const [filterData, setFilterData] = React.useState<FilterData<Metric>>({
|
||||
name: props.metricName,
|
||||
serviceId: props.serviceId,
|
||||
});
|
||||
|
||||
// const [isLoading, setIsLoading] = React.useState<boolean>(true);
|
||||
|
||||
// const [values, setValues] = React.useState<Metric[]>([]);
|
||||
|
||||
useEffect(() => {}, []);
|
||||
|
||||
return (
|
||||
<Fragment>
|
||||
<div>
|
||||
<FiltersForm<Metric>
|
||||
showFilter={true}
|
||||
id="metrics-filter"
|
||||
filterData={filterData}
|
||||
onFilterChanged={(filterData: FilterData<Metric>) => {
|
||||
setFilterData(filterData);
|
||||
}}
|
||||
filters={[
|
||||
{
|
||||
key: 'name',
|
||||
title: 'Name',
|
||||
type: FieldType.Text,
|
||||
},
|
||||
{
|
||||
key: 'createdAt',
|
||||
title: 'Created At',
|
||||
type: FieldType.Date,
|
||||
},
|
||||
{
|
||||
key: 'serviceId',
|
||||
title: 'Service',
|
||||
type: FieldType.Dropdown,
|
||||
filterDropdownOptions: [],
|
||||
},
|
||||
]}
|
||||
/>
|
||||
|
||||
<LineChart
|
||||
xScale={{
|
||||
type: XScaleType.TIME,
|
||||
max: 'auto',
|
||||
min: 'auto',
|
||||
precision: XScalePrecision.MINUTE,
|
||||
}}
|
||||
yScale={{
|
||||
type: YScaleType.LINEAR,
|
||||
min: 'auto',
|
||||
max: 'auto',
|
||||
}}
|
||||
axisBottom={{
|
||||
type: AxisType.Time,
|
||||
legend: 'Time',
|
||||
}}
|
||||
axisLeft={{
|
||||
type: AxisType.Number,
|
||||
legend: 'Value',
|
||||
}}
|
||||
data={[
|
||||
{
|
||||
seriesName: props.metricName,
|
||||
data: [{ x: new Date(), y: 0 }],
|
||||
},
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
</Fragment>
|
||||
);
|
||||
};
|
||||
|
||||
export default MetricView;
|
||||
@@ -254,6 +254,13 @@ const MonitorsTable: FunctionComponent<ComponentProps> = (
|
||||
value: '_id',
|
||||
},
|
||||
},
|
||||
{
|
||||
title: 'Created At',
|
||||
type: FieldType.Date,
|
||||
field: {
|
||||
createdAt: true,
|
||||
},
|
||||
},
|
||||
]}
|
||||
columns={[
|
||||
{
|
||||
|
||||
@@ -19,6 +19,19 @@ const PingMonitorView: FunctionComponent<ComponentProps> = (
|
||||
|
||||
return (
|
||||
<div className="space-y-5">
|
||||
<div className="flex space-x-3">
|
||||
<InfoCard
|
||||
className="w-full shadow-none border-2 border-gray-100 "
|
||||
title="Hostname or IP address"
|
||||
value={
|
||||
(props.probeMonitorResponse.monitorDestination?.toString() ||
|
||||
'') +
|
||||
(props.probeMonitorResponse.monitorDestinationPort?.toString()
|
||||
? `:${props.probeMonitorResponse.monitorDestinationPort.toString()}`
|
||||
: '') || '-'
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex space-x-3">
|
||||
<InfoCard
|
||||
className="w-1/3 shadow-none border-2 border-gray-100 "
|
||||
|
||||
@@ -31,6 +31,16 @@ const SSLCertificateMonitorView: FunctionComponent<ComponentProps> = (
|
||||
return (
|
||||
<div className="space-y-5">
|
||||
<div className="space-y-5">
|
||||
<div className="flex space-x-3">
|
||||
<InfoCard
|
||||
className="w-full shadow-none border-2 border-gray-100 "
|
||||
title="URL"
|
||||
value={
|
||||
props.probeMonitorResponse.monitorDestination?.toString() ||
|
||||
'-'
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex space-x-3 w-full">
|
||||
<InfoCard
|
||||
className="w-1/3 shadow-none border-2 border-gray-100 "
|
||||
|
||||
@@ -46,6 +46,16 @@ const WebsiteMonitorSummaryView: FunctionComponent<ComponentProps> = (
|
||||
|
||||
return (
|
||||
<div className="space-y-5">
|
||||
<div className="flex space-x-3">
|
||||
<InfoCard
|
||||
className="w-full shadow-none border-2 border-gray-100 "
|
||||
title="URL"
|
||||
value={
|
||||
props.probeMonitorResponse.monitorDestination?.toString() ||
|
||||
'-'
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex space-x-3">
|
||||
<InfoCard
|
||||
className="w-1/3 shadow-none border-2 border-gray-100 "
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user