Merge pull request #2353 from OneUptime/master

Release
This commit is contained in:
Simon Larsen
2026-03-13 10:07:09 +00:00
committed by GitHub
24 changed files with 1100 additions and 658 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -86,13 +86,21 @@ jobs:
echo "patch=${target_patch}" >> "$GITHUB_OUTPUT"
echo "Using version base: ${new_version}"
nginx-docker-image-deploy:
# ─── Docker image build jobs (per-arch matrix) ───────────────────────
nginx-docker-image-build:
needs: [read-version, generate-build-number]
runs-on: ubuntu-latest
env:
QEMU_CPU: max
strategy:
matrix:
include:
- platform: linux/amd64
runner: ubuntu-latest
- platform: linux/arm64
runner: ubuntu-24.04-arm
runs-on: ${{ matrix.runner }}
steps:
- name: Free Disk Space (Ubuntu)
if: matrix.platform == 'linux/amd64'
uses: jlumbroso/free-disk-space@main
with:
tool-cache: false
@@ -102,17 +110,6 @@ jobs:
large-packages: true
docker-images: true
swap-storage: true
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/nginx
ghcr.io/oneuptime/nginx
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
with:
@@ -122,20 +119,12 @@ jobs:
with:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy nginx.
- name: Login to Docker Hub
run: |
echo "${{ secrets.DOCKERHUB_PASSWORD }}" | docker login --username "${{ secrets.DOCKERHUB_USERNAME }}" --password-stdin
@@ -151,19 +140,52 @@ jobs:
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./Nginx/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
--platforms ${{ matrix.platform }} \
--git-sha "${{ github.sha }}" \
--extra-tags test \
--extra-enterprise-tags enterprise-test
e2e-docker-image-deploy:
needs: [read-version, generate-build-number]
nginx-docker-image-merge:
needs: [nginx-docker-image-build, read-version]
runs-on: ubuntu-latest
env:
QEMU_CPU: max
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
run: |
echo "${{ secrets.DOCKERHUB_PASSWORD }}" | docker login --username "${{ secrets.DOCKERHUB_USERNAME }}" --password-stdin
- name: Login to GitHub Container Registry
run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io --username "${{ github.repository_owner }}" --password-stdin
- name: Merge multi-arch manifests
run: |
VERSION="${{needs.read-version.outputs.major_minor}}-test"
SANITIZED_VERSION="${VERSION//+/-}"
bash ./Scripts/GHA/merge_docker_manifests.sh \
--image nginx \
--tags "${SANITIZED_VERSION},test,enterprise-${SANITIZED_VERSION},enterprise-test"
e2e-docker-image-build:
needs: [read-version, generate-build-number]
strategy:
matrix:
include:
- platform: linux/amd64
runner: ubuntu-latest
- platform: linux/arm64
runner: ubuntu-24.04-arm
runs-on: ${{ matrix.runner }}
steps:
- name: Free Disk Space (Ubuntu)
if: matrix.platform == 'linux/amd64'
uses: jlumbroso/free-disk-space@main
with:
tool-cache: false
@@ -173,17 +195,6 @@ jobs:
large-packages: true
docker-images: true
swap-storage: true
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/e2e
ghcr.io/oneuptime/e2e
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
with:
@@ -193,20 +204,12 @@ jobs:
with:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy e2e.
- name: Login to Docker Hub
run: |
echo "${{ secrets.DOCKERHUB_PASSWORD }}" | docker login --username "${{ secrets.DOCKERHUB_USERNAME }}" --password-stdin
@@ -222,18 +225,51 @@ jobs:
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./E2E/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
--platforms ${{ matrix.platform }} \
--git-sha "${{ github.sha }}" \
--extra-tags test \
--extra-enterprise-tags enterprise-test
test-server-docker-image-deploy:
needs: [read-version, generate-build-number]
e2e-docker-image-merge:
needs: [e2e-docker-image-build, read-version]
runs-on: ubuntu-latest
env:
QEMU_CPU: max
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
run: |
echo "${{ secrets.DOCKERHUB_PASSWORD }}" | docker login --username "${{ secrets.DOCKERHUB_USERNAME }}" --password-stdin
- name: Login to GitHub Container Registry
run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io --username "${{ github.repository_owner }}" --password-stdin
- name: Merge multi-arch manifests
run: |
VERSION="${{needs.read-version.outputs.major_minor}}-test"
SANITIZED_VERSION="${VERSION//+/-}"
bash ./Scripts/GHA/merge_docker_manifests.sh \
--image e2e \
--tags "${SANITIZED_VERSION},test,enterprise-${SANITIZED_VERSION},enterprise-test"
test-server-docker-image-build:
needs: [read-version, generate-build-number]
strategy:
matrix:
include:
- platform: linux/amd64
runner: ubuntu-latest
- platform: linux/arm64
runner: ubuntu-24.04-arm
runs-on: ${{ matrix.runner }}
steps:
- name: Free Disk Space (Ubuntu)
if: matrix.platform == 'linux/amd64'
uses: jlumbroso/free-disk-space@main
with:
tool-cache: false
@@ -243,17 +279,6 @@ jobs:
large-packages: true
docker-images: true
swap-storage: true
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/test-server
ghcr.io/oneuptime/test-server
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
with:
@@ -263,20 +288,12 @@ jobs:
with:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy test-server.
- name: Login to Docker Hub
run: |
echo "${{ secrets.DOCKERHUB_PASSWORD }}" | docker login --username "${{ secrets.DOCKERHUB_USERNAME }}" --password-stdin
@@ -292,18 +309,51 @@ jobs:
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./TestServer/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
--platforms ${{ matrix.platform }} \
--git-sha "${{ github.sha }}" \
--extra-tags test \
--extra-enterprise-tags enterprise-test
home-docker-image-deploy:
needs: [read-version, generate-build-number]
test-server-docker-image-merge:
needs: [test-server-docker-image-build, read-version]
runs-on: ubuntu-latest
env:
QEMU_CPU: max
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
run: |
echo "${{ secrets.DOCKERHUB_PASSWORD }}" | docker login --username "${{ secrets.DOCKERHUB_USERNAME }}" --password-stdin
- name: Login to GitHub Container Registry
run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io --username "${{ github.repository_owner }}" --password-stdin
- name: Merge multi-arch manifests
run: |
VERSION="${{needs.read-version.outputs.major_minor}}-test"
SANITIZED_VERSION="${VERSION//+/-}"
bash ./Scripts/GHA/merge_docker_manifests.sh \
--image test-server \
--tags "${SANITIZED_VERSION},test,enterprise-${SANITIZED_VERSION},enterprise-test"
home-docker-image-build:
needs: [read-version, generate-build-number]
strategy:
matrix:
include:
- platform: linux/amd64
runner: ubuntu-latest
- platform: linux/arm64
runner: ubuntu-24.04-arm
runs-on: ${{ matrix.runner }}
steps:
- name: Free Disk Space (Ubuntu)
if: matrix.platform == 'linux/amd64'
uses: jlumbroso/free-disk-space@main
with:
tool-cache: false
@@ -313,17 +363,6 @@ jobs:
large-packages: true
docker-images: true
swap-storage: true
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/home
ghcr.io/oneuptime/home
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
with:
@@ -333,20 +372,12 @@ jobs:
with:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy home.
- name: Login to Docker Hub
run: |
echo "${{ secrets.DOCKERHUB_PASSWORD }}" | docker login --username "${{ secrets.DOCKERHUB_USERNAME }}" --password-stdin
@@ -362,20 +393,51 @@ jobs:
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./Home/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
--platforms ${{ matrix.platform }} \
--git-sha "${{ github.sha }}" \
--extra-tags test \
--extra-enterprise-tags enterprise-test
test-docker-image-deploy:
needs: [read-version, generate-build-number]
home-docker-image-merge:
needs: [home-docker-image-build, read-version]
runs-on: ubuntu-latest
env:
QEMU_CPU: max
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
run: |
echo "${{ secrets.DOCKERHUB_PASSWORD }}" | docker login --username "${{ secrets.DOCKERHUB_USERNAME }}" --password-stdin
- name: Login to GitHub Container Registry
run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io --username "${{ github.repository_owner }}" --password-stdin
- name: Merge multi-arch manifests
run: |
VERSION="${{needs.read-version.outputs.major_minor}}-test"
SANITIZED_VERSION="${VERSION//+/-}"
bash ./Scripts/GHA/merge_docker_manifests.sh \
--image home \
--tags "${SANITIZED_VERSION},test,enterprise-${SANITIZED_VERSION},enterprise-test"
test-docker-image-build:
needs: [read-version, generate-build-number]
strategy:
matrix:
include:
- platform: linux/amd64
runner: ubuntu-latest
- platform: linux/arm64
runner: ubuntu-24.04-arm
runs-on: ${{ matrix.runner }}
steps:
- name: Free Disk Space (Ubuntu)
if: matrix.platform == 'linux/amd64'
uses: jlumbroso/free-disk-space@main
with:
tool-cache: false
@@ -385,17 +447,6 @@ jobs:
large-packages: true
docker-images: true
swap-storage: true
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/test
ghcr.io/oneuptime/test
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
with:
@@ -405,20 +456,12 @@ jobs:
with:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy test.
- name: Login to Docker Hub
run: |
echo "${{ secrets.DOCKERHUB_PASSWORD }}" | docker login --username "${{ secrets.DOCKERHUB_USERNAME }}" --password-stdin
@@ -434,19 +477,51 @@ jobs:
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./Tests/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
--platforms ${{ matrix.platform }} \
--git-sha "${{ github.sha }}" \
--extra-tags test \
--extra-enterprise-tags enterprise-test
telemetry-docker-image-deploy:
needs: [read-version, generate-build-number]
test-docker-image-merge:
needs: [test-docker-image-build, read-version]
runs-on: ubuntu-latest
env:
QEMU_CPU: max
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
run: |
echo "${{ secrets.DOCKERHUB_PASSWORD }}" | docker login --username "${{ secrets.DOCKERHUB_USERNAME }}" --password-stdin
- name: Login to GitHub Container Registry
run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io --username "${{ github.repository_owner }}" --password-stdin
- name: Merge multi-arch manifests
run: |
VERSION="${{needs.read-version.outputs.major_minor}}-test"
SANITIZED_VERSION="${VERSION//+/-}"
bash ./Scripts/GHA/merge_docker_manifests.sh \
--image test \
--tags "${SANITIZED_VERSION},test,enterprise-${SANITIZED_VERSION},enterprise-test"
telemetry-docker-image-build:
needs: [read-version, generate-build-number]
strategy:
matrix:
include:
- platform: linux/amd64
runner: ubuntu-latest
- platform: linux/arm64
runner: ubuntu-24.04-arm
runs-on: ${{ matrix.runner }}
steps:
- name: Free Disk Space (Ubuntu)
if: matrix.platform == 'linux/amd64'
uses: jlumbroso/free-disk-space@main
with:
tool-cache: false
@@ -456,17 +531,6 @@ jobs:
large-packages: true
docker-images: true
swap-storage: true
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/telemetry
ghcr.io/oneuptime/telemetry
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
with:
@@ -476,20 +540,12 @@ jobs:
with:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy telemetry.
- name: Login to Docker Hub
run: |
echo "${{ secrets.DOCKERHUB_PASSWORD }}" | docker login --username "${{ secrets.DOCKERHUB_USERNAME }}" --password-stdin
@@ -505,18 +561,51 @@ jobs:
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./Telemetry/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
--platforms ${{ matrix.platform }} \
--git-sha "${{ github.sha }}" \
--extra-tags test \
--extra-enterprise-tags enterprise-test
probe-docker-image-deploy:
needs: [read-version, generate-build-number]
telemetry-docker-image-merge:
needs: [telemetry-docker-image-build, read-version]
runs-on: ubuntu-latest
env:
QEMU_CPU: max
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
run: |
echo "${{ secrets.DOCKERHUB_PASSWORD }}" | docker login --username "${{ secrets.DOCKERHUB_USERNAME }}" --password-stdin
- name: Login to GitHub Container Registry
run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io --username "${{ github.repository_owner }}" --password-stdin
- name: Merge multi-arch manifests
run: |
VERSION="${{needs.read-version.outputs.major_minor}}-test"
SANITIZED_VERSION="${VERSION//+/-}"
bash ./Scripts/GHA/merge_docker_manifests.sh \
--image telemetry \
--tags "${SANITIZED_VERSION},test,enterprise-${SANITIZED_VERSION},enterprise-test"
probe-docker-image-build:
needs: [read-version, generate-build-number]
strategy:
matrix:
include:
- platform: linux/amd64
runner: ubuntu-latest
- platform: linux/arm64
runner: ubuntu-24.04-arm
runs-on: ${{ matrix.runner }}
steps:
- name: Free Disk Space (Ubuntu)
if: matrix.platform == 'linux/amd64'
uses: jlumbroso/free-disk-space@main
with:
tool-cache: false
@@ -526,17 +615,6 @@ jobs:
large-packages: true
docker-images: true
swap-storage: true
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/probe
ghcr.io/oneuptime/probe
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
with:
@@ -546,20 +624,12 @@ jobs:
with:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy probe.
- name: Login to Docker Hub
run: |
echo "${{ secrets.DOCKERHUB_PASSWORD }}" | docker login --username "${{ secrets.DOCKERHUB_USERNAME }}" --password-stdin
@@ -575,18 +645,51 @@ jobs:
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./Probe/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
--platforms ${{ matrix.platform }} \
--git-sha "${{ github.sha }}" \
--extra-tags test \
--extra-enterprise-tags enterprise-test
app-docker-image-deploy:
needs: [read-version, generate-build-number]
probe-docker-image-merge:
needs: [probe-docker-image-build, read-version]
runs-on: ubuntu-latest
env:
QEMU_CPU: max
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
run: |
echo "${{ secrets.DOCKERHUB_PASSWORD }}" | docker login --username "${{ secrets.DOCKERHUB_USERNAME }}" --password-stdin
- name: Login to GitHub Container Registry
run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io --username "${{ github.repository_owner }}" --password-stdin
- name: Merge multi-arch manifests
run: |
VERSION="${{needs.read-version.outputs.major_minor}}-test"
SANITIZED_VERSION="${VERSION//+/-}"
bash ./Scripts/GHA/merge_docker_manifests.sh \
--image probe \
--tags "${SANITIZED_VERSION},test,enterprise-${SANITIZED_VERSION},enterprise-test"
app-docker-image-build:
needs: [read-version, generate-build-number]
strategy:
matrix:
include:
- platform: linux/amd64
runner: ubuntu-latest
- platform: linux/arm64
runner: ubuntu-24.04-arm
runs-on: ${{ matrix.runner }}
steps:
- name: Free Disk Space (Ubuntu)
if: matrix.platform == 'linux/amd64'
uses: jlumbroso/free-disk-space@main
with:
tool-cache: false
@@ -596,17 +699,6 @@ jobs:
large-packages: true
docker-images: true
swap-storage: true
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/app
ghcr.io/oneuptime/app
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
with:
@@ -616,20 +708,12 @@ jobs:
with:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy app.
- name: Login to Docker Hub
run: |
echo "${{ secrets.DOCKERHUB_PASSWORD }}" | docker login --username "${{ secrets.DOCKERHUB_USERNAME }}" --password-stdin
@@ -645,21 +729,51 @@ jobs:
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./App/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
--platforms ${{ matrix.platform }} \
--git-sha "${{ github.sha }}" \
--extra-tags test \
--extra-enterprise-tags enterprise-test
ai-agent-docker-image-deploy:
needs: [read-version, generate-build-number]
app-docker-image-merge:
needs: [app-docker-image-build, read-version]
runs-on: ubuntu-latest
env:
QEMU_CPU: max
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
run: |
echo "${{ secrets.DOCKERHUB_PASSWORD }}" | docker login --username "${{ secrets.DOCKERHUB_USERNAME }}" --password-stdin
- name: Login to GitHub Container Registry
run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io --username "${{ github.repository_owner }}" --password-stdin
- name: Merge multi-arch manifests
run: |
VERSION="${{needs.read-version.outputs.major_minor}}-test"
SANITIZED_VERSION="${VERSION//+/-}"
bash ./Scripts/GHA/merge_docker_manifests.sh \
--image app \
--tags "${SANITIZED_VERSION},test,enterprise-${SANITIZED_VERSION},enterprise-test"
ai-agent-docker-image-build:
needs: [read-version, generate-build-number]
strategy:
matrix:
include:
- platform: linux/amd64
runner: ubuntu-latest
- platform: linux/arm64
runner: ubuntu-24.04-arm
runs-on: ${{ matrix.runner }}
steps:
- name: Free Disk Space (Ubuntu)
if: matrix.platform == 'linux/amd64'
uses: jlumbroso/free-disk-space@main
with:
tool-cache: false
@@ -669,17 +783,6 @@ jobs:
large-packages: true
docker-images: true
swap-storage: true
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/ai-agent
ghcr.io/oneuptime/ai-agent
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
with:
@@ -689,19 +792,12 @@ jobs:
with:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy ai-agent.
- name: Login to Docker Hub
run: |
echo "${{ secrets.DOCKERHUB_PASSWORD }}" | docker login --username "${{ secrets.DOCKERHUB_USERNAME }}" --password-stdin
@@ -717,18 +813,51 @@ jobs:
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./AIAgent/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
--platforms ${{ matrix.platform }} \
--git-sha "${{ github.sha }}" \
--extra-tags test \
--extra-enterprise-tags enterprise-test
worker-docker-image-deploy:
needs: [read-version, generate-build-number]
ai-agent-docker-image-merge:
needs: [ai-agent-docker-image-build, read-version]
runs-on: ubuntu-latest
env:
QEMU_CPU: max
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
run: |
echo "${{ secrets.DOCKERHUB_PASSWORD }}" | docker login --username "${{ secrets.DOCKERHUB_USERNAME }}" --password-stdin
- name: Login to GitHub Container Registry
run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io --username "${{ github.repository_owner }}" --password-stdin
- name: Merge multi-arch manifests
run: |
VERSION="${{needs.read-version.outputs.major_minor}}-test"
SANITIZED_VERSION="${VERSION//+/-}"
bash ./Scripts/GHA/merge_docker_manifests.sh \
--image ai-agent \
--tags "${SANITIZED_VERSION},test,enterprise-${SANITIZED_VERSION},enterprise-test"
worker-docker-image-build:
needs: [read-version, generate-build-number]
strategy:
matrix:
include:
- platform: linux/amd64
runner: ubuntu-latest
- platform: linux/arm64
runner: ubuntu-24.04-arm
runs-on: ${{ matrix.runner }}
steps:
- name: Free Disk Space (Ubuntu)
if: matrix.platform == 'linux/amd64'
uses: jlumbroso/free-disk-space@main
with:
tool-cache: false
@@ -738,17 +867,6 @@ jobs:
large-packages: true
docker-images: true
swap-storage: true
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/worker
ghcr.io/oneuptime/worker
tags: |
type=raw,value=test,enable=true
type=raw,value=${{needs.read-version.outputs.major_minor}}-test,enable=true
- uses: actions/checkout@v4
with:
@@ -758,20 +876,12 @@ jobs:
with:
node-version: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v10.0.4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy accounts.
- name: Login to Docker Hub
run: |
echo "${{ secrets.DOCKERHUB_PASSWORD }}" | docker login --username "${{ secrets.DOCKERHUB_USERNAME }}" --password-stdin
@@ -787,11 +897,39 @@ jobs:
--version "${{needs.read-version.outputs.major_minor}}-test" \
--dockerfile ./Worker/Dockerfile \
--context . \
--platforms linux/amd64,linux/arm64 \
--platforms ${{ matrix.platform }} \
--git-sha "${{ github.sha }}" \
--extra-tags test \
--extra-enterprise-tags enterprise-test
worker-docker-image-merge:
needs: [worker-docker-image-build, read-version]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
run: |
echo "${{ secrets.DOCKERHUB_PASSWORD }}" | docker login --username "${{ secrets.DOCKERHUB_USERNAME }}" --password-stdin
- name: Login to GitHub Container Registry
run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io --username "${{ github.repository_owner }}" --password-stdin
- name: Merge multi-arch manifests
run: |
VERSION="${{needs.read-version.outputs.major_minor}}-test"
SANITIZED_VERSION="${VERSION//+/-}"
bash ./Scripts/GHA/merge_docker_manifests.sh \
--image worker \
--tags "${SANITIZED_VERSION},test,enterprise-${SANITIZED_VERSION},enterprise-test"
# ─── Non-Docker jobs (unchanged) ─────────────────────────────────────
publish-terraform-provider:
runs-on: ubuntu-latest
@@ -805,11 +943,10 @@ jobs:
VERSION="${{needs.read-version.outputs.major_minor}}-test"
echo "Skipping Terraform provider publish for test release $VERSION"
test-helm-chart:
runs-on: ubuntu-latest
needs: [infrastructure-agent-deploy, publish-terraform-provider, telemetry-docker-image-deploy, worker-docker-image-deploy, home-docker-image-deploy, test-server-docker-image-deploy, test-docker-image-deploy, probe-docker-image-deploy, app-docker-image-deploy, ai-agent-docker-image-deploy, nginx-docker-image-deploy, e2e-docker-image-deploy]
needs: [infrastructure-agent-deploy, publish-terraform-provider, telemetry-docker-image-merge, worker-docker-image-merge, home-docker-image-merge, test-server-docker-image-merge, test-docker-image-merge, probe-docker-image-merge, app-docker-image-merge, ai-agent-docker-image-merge, nginx-docker-image-merge, e2e-docker-image-merge]
env:
CI_PIPELINE_ID: ${{github.run_number}}
steps:
@@ -910,7 +1047,7 @@ jobs:
retention-days: 7
test-e2e-test-self-hosted:
test-e2e-test-self-hosted:
runs-on: ubuntu-latest
# After all the jobs runs
needs: [test-helm-chart, generate-build-number, read-version]
@@ -1003,7 +1140,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- name: Set up Go
uses: actions/setup-go@v4
@@ -1027,7 +1164,7 @@ jobs:
- name: Release MSI Images
run: cd InfrastructureAgent && bash build-msi.sh ${{needs.read-version.outputs.major_minor}}.${{needs.generate-build-number.outputs.build_number}}
- name: Upload Release Binaries
uses: actions/upload-artifact@v4
@@ -1036,13 +1173,13 @@ jobs:
# Name of the artifact to upload.
# Optional. Default is 'artifact'
name: binaries
# A file, directory or wildcard pattern that describes what to upload
# Required.
path: |
./InfrastructureAgent/dist
# Duration after which artifact will expire in days. 0 means using default retention.
# Minimum 1 day.
# Maximum 90 days unless changed from the repository settings page.

View File

@@ -89,7 +89,7 @@
"mermaid": "^11.12.2",
"moment": "^2.30.1",
"moment-timezone": "^0.5.45",
"multer": "^2.0.2",
"multer": "^2.1.1",
"node-cron": "^3.0.3",
"nodemailer": "^7.0.7",
"otpauth": "^9.3.1",

View File

@@ -93,7 +93,7 @@
"mermaid": "^11.12.2",
"moment": "^2.30.1",
"moment-timezone": "^0.5.45",
"multer": "^2.0.2",
"multer": "^2.1.1",
"node-cron": "^3.0.3",
"nodemailer": "^7.0.7",
"otpauth": "^9.3.1",

View File

@@ -92,7 +92,7 @@
"mermaid": "^11.12.2",
"moment": "^2.30.1",
"moment-timezone": "^0.5.45",
"multer": "^2.0.2",
"multer": "^2.1.1",
"node-cron": "^3.0.3",
"nodemailer": "^7.0.7",
"otpauth": "^9.3.1",

View File

@@ -96,7 +96,7 @@
"mermaid": "^11.12.2",
"moment": "^2.30.1",
"moment-timezone": "^0.5.45",
"multer": "^2.0.2",
"multer": "^2.1.1",
"node-cron": "^3.0.3",
"nodemailer": "^7.0.7",
"otpauth": "^9.3.1",

View File

@@ -93,7 +93,7 @@
"mermaid": "^11.12.2",
"moment": "^2.30.1",
"moment-timezone": "^0.5.45",
"multer": "^2.0.2",
"multer": "^2.1.1",
"node-cron": "^3.0.3",
"nodemailer": "^7.0.7",
"otpauth": "^9.3.1",

2
App/package-lock.json generated
View File

@@ -100,7 +100,7 @@
"mermaid": "^11.12.2",
"moment": "^2.30.1",
"moment-timezone": "^0.5.45",
"multer": "^2.0.2",
"multer": "^2.1.1",
"node-cron": "^3.0.3",
"nodemailer": "^7.0.7",
"otpauth": "^9.3.1",

View File

@@ -298,6 +298,78 @@ export default class Log extends AnalyticsBaseModel {
},
});
const observedTimeUnixNanoColumn: AnalyticsTableColumn =
new AnalyticsTableColumn({
key: "observedTimeUnixNano",
title: "Observed Time (in Unix Nano)",
description:
"When the log was observed/collected by the telemetry pipeline",
required: false,
type: TableColumnType.LongNumber,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const droppedAttributesCountColumn: AnalyticsTableColumn =
new AnalyticsTableColumn({
key: "droppedAttributesCount",
title: "Dropped Attributes Count",
description: "Number of attributes that were dropped during collection",
required: false,
type: TableColumnType.Number,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const flagsColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "flags",
title: "Flags",
description: "Log record flags (e.g., W3C trace flags)",
required: false,
type: TableColumnType.Number,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
});
const retentionDateColumn: AnalyticsTableColumn = new AnalyticsTableColumn({
key: "retentionDate",
title: "Retention Date",
@@ -352,6 +424,9 @@ export default class Log extends AnalyticsBaseModel {
traceIdColumn,
spanIdColumn,
bodyColumn,
observedTimeUnixNanoColumn,
droppedAttributesCountColumn,
flagsColumn,
retentionDateColumn,
],
projections: [],
@@ -450,6 +525,30 @@ export default class Log extends AnalyticsBaseModel {
this.setColumnValue("spanId", v);
}
public get observedTimeUnixNano(): number | undefined {
return this.getColumnValue("observedTimeUnixNano") as number | undefined;
}
public set observedTimeUnixNano(v: number | undefined) {
this.setColumnValue("observedTimeUnixNano", v);
}
public get droppedAttributesCount(): number | undefined {
return this.getColumnValue("droppedAttributesCount") as number | undefined;
}
public set droppedAttributesCount(v: number | undefined) {
this.setColumnValue("droppedAttributesCount", v);
}
public get flags(): number | undefined {
return this.getColumnValue("flags") as number | undefined;
}
public set flags(v: number | undefined) {
this.setColumnValue("flags", v);
}
public get retentionDate(): Date | undefined {
return this.getColumnValue("retentionDate") as Date | undefined;
}

2
E2E/package-lock.json generated
View File

@@ -88,7 +88,7 @@
"mermaid": "^11.12.2",
"moment": "^2.30.1",
"moment-timezone": "^0.5.45",
"multer": "^2.0.2",
"multer": "^2.1.1",
"node-cron": "^3.0.3",
"nodemailer": "^7.0.7",
"otpauth": "^9.3.1",

View File

@@ -90,7 +90,7 @@
"mermaid": "^11.12.2",
"moment": "^2.30.1",
"moment-timezone": "^0.5.45",
"multer": "^2.0.2",
"multer": "^2.1.1",
"node-cron": "^3.0.3",
"nodemailer": "^7.0.7",
"otpauth": "^9.3.1",

View File

@@ -16,15 +16,16 @@ The following features have been implemented and removed from this plan:
- **Phase 5.4** - Per-service TTL via `retentionDate` column
- **Phase 5.5** - Parameterized SQL in `LogAggregationService`
- **Phase 5.6** - ZSTD compression on `body` column
- **Phase 2.1** - Saved Views (LogSavedView model, SavedViewsDropdown, CRUD API)
- **Phase 2.2** - Log Analytics View (LogsAnalyticsView with timeseries, toplist, table charts; analytics endpoint)
- **Phase 2.3** - Column Customization (ColumnSelector with dynamic columns from log attributes)
- **Phase 5.8** - Store Missing OpenTelemetry Log Fields (observedTimeUnixNano, droppedAttributesCount, flags columns + ingestion + migration)
## Gap Analysis Summary
| Feature | OneUptime | Datadog | New Relic | Priority |
|---------|-----------|---------|-----------|----------|
| Saved Views | None | Full state save/share | Full state save/share | **P1** |
| Log Patterns (ML clustering) | None | Auto-clustering + Pattern Inspector | ML clustering + anomaly | **P1** |
| Log-based analytics/charts | None | Timeseries, TopList, Table, Pie | Full NRQL charting | **P1** |
| Column customization | Fixed 4 columns | Fully customizable | Configurable | **P1** |
| Log context (surrounding logs) | None | Before/after from same host/service | Automatic via APM agent | **P2** |
| Log Pipelines (server-side processing) | None (raw storage only) | 270+ OOTB, 14+ processor types | Grok parsing, built-in rules | **P2** |
| Log-based Metrics | None | Count + Distribution, 15-month retention | Via NRQL | **P2** |
@@ -36,68 +37,6 @@ The following features have been implemented and removed from this plan:
---
## Phase 2: Analytics & Organization (P1) — Power User Features
### 2.1 Saved Views
**Current**: No way to save filter/query state.
**Target**: Users can save, name, and share log views.
**Implementation**:
- Create a new PostgreSQL model `LogSavedView` with fields: `id`, `projectId`, `name`, `query` (JSON), `columns` (JSON array), `sortField`, `sortOrder`, `pageSize`, `createdByUserId`, `isDefault`, timestamps
- CRUD API via standard OneUptime model patterns
- Add a "Save View" button in the toolbar and a dropdown to load saved views
- Saved views appear in the left sidebar above the facets
**Files to modify**:
- `Common/Models/DatabaseModels/LogSavedView.ts` (new model)
- `Common/Server/Services/LogSavedViewService.ts` (new service)
- `Common/UI/Components/LogsViewer/components/SavedViewsDropdown.tsx` (new)
- `Common/UI/Components/LogsViewer/components/LogsViewerToolbar.tsx` (add save/load buttons)
### 2.2 Log Analytics View (Charts from Logs)
**Current**: Logs are only viewable as a list.
**Target**: A toggle to switch from "List" to "Analytics" mode showing aggregate visualizations.
**Implementation**:
- Add a view mode toggle in the toolbar: "List" | "Analytics"
- Analytics view provides a query builder for:
- **Timeseries**: Count/unique count over time, grouped by up to 2 dimensions
- **Top List**: Top N values for a dimension by count
- **Table**: Pivot table with multiple group-by dimensions
- Reuse the histogram endpoint with extended aggregation support
- New API endpoint `POST /telemetry/logs/analytics` that supports flexible GROUP BY + aggregation queries
- Create `LogsAnalyticsView` component using a charting library (recommend recharts, already likely in the project)
**Files to modify**:
- `Common/Server/API/TelemetryAPI.ts` (add analytics endpoint)
- `Common/UI/Components/LogsViewer/components/LogsAnalyticsView.tsx` (new)
- `Common/UI/Components/LogsViewer/components/LogsViewerToolbar.tsx` (add view toggle)
- `Common/UI/Components/LogsViewer/LogsViewer.tsx` (conditional rendering)
### 2.3 Column Customization
**Current**: Fixed columns: Time, Service, Severity, Message.
**Target**: Users can add/remove/reorder columns from log attributes.
**Implementation**:
- Add a "Columns" button in the toolbar that opens a dropdown/popover
- Default columns: Time, Service, Severity, Message
- Available columns: any discovered attribute key (from the existing get-attributes endpoint)
- Selected columns persist in localStorage (and in Saved Views when that feature ships)
- Extend `LogsTable.tsx` to dynamically render columns based on configuration
**Files to modify**:
- `Common/UI/Components/LogsViewer/components/ColumnSelector.tsx` (new)
- `Common/UI/Components/LogsViewer/components/LogsTable.tsx` (dynamic columns)
- `Common/UI/Components/LogsViewer/LogsViewer.tsx` (column state management)
---
## Phase 3: Processing & Operations (P2) — Platform Capabilities
### 3.1 Log Context (Surrounding Logs)
@@ -195,14 +134,11 @@ The following features have been implemented and removed from this plan:
## Recommended Implementation Order
1. **Phase 2.3** - Column Customization (small effort, high user value)
2. **Phase 2.1** - Saved Views (moderate effort, high retention value)
3. **Phase 3.4** - Export CSV/JSON (small effort, table-stakes feature)
4. **Phase 3.1** - Log Context (moderate effort, high debugging value)
5. **Phase 2.2** - Log Analytics View (larger effort, advanced user feature)
6. **Phase 3.2** - Log Pipelines (large effort, platform capability)
7. **Phase 3.3** - Drop Filters (moderate effort, cost optimization)
8. **Phase 4.x** - Patterns, Shortcuts, Data Scrubbing (future)
1. **Phase 3.4** - Export CSV/JSON (small effort, table-stakes feature)
2. **Phase 3.1** - Log Context (moderate effort, high debugging value)
3. **Phase 3.2** - Log Pipelines (large effort, platform capability)
4. **Phase 3.3** - Drop Filters (moderate effort, cost optimization)
5. **Phase 4.x** - Patterns, Shortcuts, Data Scrubbing (future)
## Phase 5: ClickHouse Storage & Query Optimizations (P0) — Performance Foundation
@@ -262,42 +198,17 @@ These optimizations address fundamental storage and indexing gaps in the telemet
- `Common/Server/Utils/AnalyticsDatabase/StatementGenerator.ts` (emit PROJECTION clause)
- `Worker/DataMigrations/` (new migration to materialize)
### 5.8 Store Missing OpenTelemetry Log Fields (Low)
**Current**: Several standard OTEL log record fields are dropped during ingestion:
- `observedTimeUnixNano` — when the log was collected by the pipeline (useful for measuring ingestion lag)
- `droppedAttributesCount` — signals data loss during collection
- `flags` — log record flags (e.g., W3C trace flags)
**Target**: Preserve these fields for full OTEL compliance and operational debugging.
**Implementation**:
- Add optional columns to the Log model:
- `observedTimeUnixNano` (LongNumber)
- `droppedAttributesCount` (Number, default 0)
- `flags` (Number, default 0)
- Update `OtelLogsIngestService.processLogsAsync()` to extract and store these fields
- Migration to add columns to existing table
**Files to modify**:
- `Common/Models/AnalyticsModels/Log.ts` (add columns)
- `Telemetry/Services/OtelLogsIngestService.ts` (extract additional fields)
- `Worker/DataMigrations/` (new migration)
### 5.x Remaining Performance Impact Summary
| Optimization | Query Pattern Improved | Expected Speedup | Effort |
|-------------|----------------------|-------------------|--------|
| 5.3 DateTime64 time column | Sub-second log ordering | Correctness fix | Medium |
| 5.7 Histogram projections | Histogram and severity aggregation | 5-10x | Medium |
| 5.8 Missing OTEL fields | OTEL compliance | N/A (completeness) | Small |
### 5.x Recommended Remaining Order
1. **5.3** — DateTime64 upgrade (correctness)
2. **5.7** — Projections (performance polish)
3. **5.8** — Missing OTEL fields (completeness)
---

2
MCP/package-lock.json generated
View File

@@ -95,7 +95,7 @@
"mermaid": "^11.12.2",
"moment": "^2.30.1",
"moment-timezone": "^0.5.45",
"multer": "^2.0.2",
"multer": "^2.1.1",
"node-cron": "^3.0.3",
"nodemailer": "^7.0.7",
"otpauth": "^9.3.1",

View File

@@ -85,7 +85,7 @@
"mermaid": "^11.12.2",
"moment": "^2.30.1",
"moment-timezone": "^0.5.45",
"multer": "^2.0.2",
"multer": "^2.1.1",
"node-cron": "^3.0.3",
"nodemailer": "^7.0.7",
"otpauth": "^9.3.1",

View File

@@ -101,7 +101,7 @@
"mermaid": "^11.12.2",
"moment": "^2.30.1",
"moment-timezone": "^0.5.45",
"multer": "^2.0.2",
"multer": "^2.1.1",
"node-cron": "^3.0.3",
"nodemailer": "^7.0.7",
"otpauth": "^9.3.1",

View File

@@ -14,6 +14,8 @@ Required flags:
Optional flags:
--context <path> Build context directory (default: .)
--platforms <list> Comma-separated platforms passed to docker buildx (default: linux/amd64,linux/arm64)
When a single platform is given, tags are suffixed with the arch
(e.g. -amd64 or -arm64) so parallel builds don't overwrite each other.
--git-sha <sha> Commit SHA used for the GIT_SHA build arg (default: detected via git)
--extra-tags <tag> Additional tags for the community image (can be repeated)
--extra-enterprise-tags <tag> Additional tags for the enterprise image (can be repeated)
@@ -88,6 +90,15 @@ if [[ -z "$GIT_SHA" ]]; then
fi
fi
# Determine if this is a single-platform build.
# When building for a single platform, append the arch suffix to tags
# so that parallel per-arch jobs don't overwrite each other.
ARCH_SUFFIX=""
if [[ "$PLATFORMS" != *","* ]]; then
# Single platform — extract arch (e.g. linux/amd64 -> amd64)
ARCH_SUFFIX="-${PLATFORMS#*/}"
fi
build_variant() {
local variant_prefix="$1" # "" or "enterprise-"
local enterprise_flag="$2" # false/true
@@ -95,24 +106,26 @@ build_variant() {
local sanitized_version
sanitized_version="${VERSION//+/-}"
local cache_scope="${IMAGE}-${variant_prefix:-community}"
local -a args
args=(
docker buildx build
--file "$DOCKERFILE"
--platform "$PLATFORMS"
--push
--cache-from "type=gha,scope=${IMAGE}-${variant_prefix:-community}"
--cache-to "type=gha,mode=max,scope=${IMAGE}-${variant_prefix:-community}"
--cache-from "type=registry,ref=ghcr.io/oneuptime/${IMAGE}:cache-${cache_scope}"
--cache-to "type=registry,ref=ghcr.io/oneuptime/${IMAGE}:cache-${cache_scope},mode=max"
)
args+=(
--tag "oneuptime/${IMAGE}:${variant_prefix}${sanitized_version}"
--tag "ghcr.io/oneuptime/${IMAGE}:${variant_prefix}${sanitized_version}"
--tag "oneuptime/${IMAGE}:${variant_prefix}${sanitized_version}${ARCH_SUFFIX}"
--tag "ghcr.io/oneuptime/${IMAGE}:${variant_prefix}${sanitized_version}${ARCH_SUFFIX}"
)
for tag_suffix in "${extra_tags_ref[@]}"; do
args+=(--tag "oneuptime/${IMAGE}:${tag_suffix}")
args+=(--tag "ghcr.io/oneuptime/${IMAGE}:${tag_suffix}")
args+=(--tag "oneuptime/${IMAGE}:${tag_suffix}${ARCH_SUFFIX}")
args+=(--tag "ghcr.io/oneuptime/${IMAGE}:${tag_suffix}${ARCH_SUFFIX}")
done
args+=(
@@ -125,8 +138,8 @@ build_variant() {
"${args[@]}"
}
echo "🚀 Building docker images for ${IMAGE} (${VERSION})"
echo "🚀 Building docker images for ${IMAGE} (${VERSION}) [${PLATFORMS}]"
build_variant "" false EXTRA_TAGS
echo "✅ Pushed community image for ${IMAGE}:${VERSION}"
echo "✅ Pushed community image for ${IMAGE}:${VERSION}${ARCH_SUFFIX}"
build_variant "enterprise-" true EXTRA_ENTERPRISE_TAGS
echo "✅ Pushed enterprise image for ${IMAGE}:enterprise-${VERSION}"
echo "✅ Pushed enterprise image for ${IMAGE}:enterprise-${VERSION}${ARCH_SUFFIX}"

View File

@@ -0,0 +1,67 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'EOF'
Usage: merge_docker_manifests.sh --image <name> --tags <tag1,tag2,...>
Combines per-architecture images (tagged with -amd64 / -arm64 suffixes) into
multi-arch manifests and pushes them to Docker Hub and GHCR.
Required flags:
--image <name> Image name without registry prefix (example: nginx)
--tags <list> Comma-separated list of final manifest tags to create
(e.g. "10.0.31,release,enterprise-10.0.31,enterprise-release")
EOF
}
IMAGE=""
TAGS=""
while [[ $# -gt 0 ]]; do
case "$1" in
--image)
IMAGE="$2"
shift 2
;;
--tags)
TAGS="$2"
shift 2
;;
-h|--help)
usage
exit 0
;;
*)
echo "Unknown option: $1" >&2
usage
exit 1
;;
esac
done
if [[ -z "$IMAGE" || -z "$TAGS" ]]; then
echo "Missing required arguments" >&2
usage
exit 1
fi
IFS=',' read -ra TAG_LIST <<< "$TAGS"
for tag in "${TAG_LIST[@]}"; do
tag="$(echo "$tag" | xargs)" # trim whitespace
[[ -z "$tag" ]] && continue
echo "🔗 Creating multi-arch manifest for ${IMAGE}:${tag}"
# Create manifest from arch-specific tags and push to both registries
for registry in "oneuptime" "ghcr.io/oneuptime"; do
docker buildx imagetools create \
--tag "${registry}/${IMAGE}:${tag}" \
"${registry}/${IMAGE}:${tag}-amd64" \
"${registry}/${IMAGE}:${tag}-arm64"
done
echo "✅ Pushed multi-arch manifest for ${IMAGE}:${tag}"
done

View File

@@ -93,7 +93,7 @@
"mermaid": "^11.12.2",
"moment": "^2.30.1",
"moment-timezone": "^0.5.45",
"multer": "^2.0.2",
"multer": "^2.1.1",
"node-cron": "^3.0.3",
"nodemailer": "^7.0.7",
"otpauth": "^9.3.1",
@@ -6174,7 +6174,7 @@
"mermaid": "^11.12.2",
"moment": "^2.30.1",
"moment-timezone": "^0.5.45",
"multer": "^2.0.2",
"multer": "^2.1.1",
"node-cron": "^3.0.3",
"nodemailer": "^7.0.7",
"otpauth": "^9.3.1",

View File

@@ -280,6 +280,31 @@ export default class OtelLogsIngestService extends OtelIngestBaseService {
spanId = "";
}
// Extract observedTimeUnixNano
let observedTimeUnixNano: number = 0;
if (log["observedTimeUnixNano"]) {
try {
if (typeof log["observedTimeUnixNano"] === "string") {
observedTimeUnixNano = parseFloat(
log["observedTimeUnixNano"],
);
if (isNaN(observedTimeUnixNano)) {
observedTimeUnixNano = 0;
}
} else {
observedTimeUnixNano =
(log["observedTimeUnixNano"] as number) || 0;
}
} catch {
observedTimeUnixNano = 0;
}
}
const droppedAttributesCount: number =
(log["droppedAttributesCount"] as number) || 0;
const logFlags: number = (log["flags"] as number) || 0;
const ingestionDate: Date = OneUptimeDate.getCurrentDate();
const ingestionTimestamp: string =
OneUptimeDate.toClickhouseDateTime(ingestionDate);
@@ -306,6 +331,10 @@ export default class OtelLogsIngestService extends OtelIngestBaseService {
traceId: traceId,
spanId: spanId,
body: body,
observedTimeUnixNano:
Math.trunc(observedTimeUnixNano).toString(),
droppedAttributesCount: droppedAttributesCount,
flags: logFlags,
retentionDate:
OneUptimeDate.toClickhouseDateTime(retentionDate),
};

View File

@@ -93,7 +93,7 @@
"mermaid": "^11.12.2",
"moment": "^2.30.1",
"moment-timezone": "^0.5.45",
"multer": "^2.0.2",
"multer": "^2.1.1",
"node-cron": "^3.0.3",
"nodemailer": "^7.0.7",
"otpauth": "^9.3.1",

View File

@@ -90,7 +90,7 @@
"mermaid": "^11.12.2",
"moment": "^2.30.1",
"moment-timezone": "^0.5.45",
"multer": "^2.0.2",
"multer": "^2.1.1",
"node-cron": "^3.0.3",
"nodemailer": "^7.0.7",
"otpauth": "^9.3.1",

View File

@@ -0,0 +1,51 @@
import DataMigrationBase from "./DataMigrationBase";
import AnalyticsTableColumn from "Common/Types/AnalyticsDatabase/TableColumn";
import Log from "Common/Models/AnalyticsModels/Log";
import LogService from "Common/Server/Services/LogService";
export default class AddOtelFieldsToLogTable extends DataMigrationBase {
public constructor() {
super("AddOtelFieldsToLogTable");
}
public override async migrate(): Promise<void> {
const columnKeys: string[] = [
"observedTimeUnixNano",
"droppedAttributesCount",
"flags",
];
for (const key of columnKeys) {
const hasColumn: boolean =
await LogService.doesColumnExistInDatabase(key);
if (!hasColumn) {
const column: AnalyticsTableColumn | undefined =
new Log().tableColumns.find((col: AnalyticsTableColumn) => {
return col.key === key;
});
if (column) {
await LogService.addColumnInDatabase(column);
}
}
}
}
public override async rollback(): Promise<void> {
const columnKeys: string[] = [
"observedTimeUnixNano",
"droppedAttributesCount",
"flags",
];
for (const key of columnKeys) {
const hasColumn: boolean =
await LogService.doesColumnExistInDatabase(key);
if (hasColumn) {
await LogService.dropColumnInDatabase(key);
}
}
}
}

View File

@@ -58,6 +58,7 @@ import AddDefaultIconsToIncidentRoles from "./AddDefaultIconsToIncidentRoles";
import UpdateObserverRoleToAllowMultipleUsers from "./UpdateObserverRoleToAllowMultipleUsers";
import AddColumnsToExceptionInstance from "./AddColumnsToExceptionInstance";
import AddRetentionDateAndSkipIndexesToTelemetryTables from "./AddRetentionDateAndSkipIndexesToTelemetryTables";
import AddOtelFieldsToLogTable from "./AddOtelFieldsToLogTable";
// This is the order in which the migrations will be run. Add new migrations to the end of the array.
@@ -120,6 +121,7 @@ const DataMigrations: Array<DataMigrationBase> = [
new UpdateObserverRoleToAllowMultipleUsers(),
new AddColumnsToExceptionInstance(),
new AddRetentionDateAndSkipIndexesToTelemetryTables(),
new AddOtelFieldsToLogTable(),
];
export default DataMigrations;

View File

@@ -89,7 +89,7 @@
"mermaid": "^11.12.2",
"moment": "^2.30.1",
"moment-timezone": "^0.5.45",
"multer": "^2.0.2",
"multer": "^2.1.1",
"node-cron": "^3.0.3",
"nodemailer": "^7.0.7",
"otpauth": "^9.3.1",