Docker Compose deployment

This commit is contained in:
Wayne
2025-07-25 15:50:25 +03:00
parent 946da7925b
commit 8c12cda370
7 changed files with 116 additions and 11 deletions

View File

@@ -8,8 +8,11 @@ PORT_FRONTEND=3000
DATABASE_URL="postgresql://admin:password@postgres:5432/open_archive?schema=public"
# Redis
REDIS_HOST=redis
REDIS_HOST=valkey
REDIS_PORT=6379
REDIS_PASSWORD=astrongredispassword
REDIS_TLS_ENABLED=false
# Meilisearch
MEILI_MASTER_KEY=aSampleMasterKey

View File

@@ -0,0 +1,74 @@
version: '3.8'
services:
open-archiver:
image: logiclabshq/open-archiver:latest
container_name: open-archiver
restart: unless-stopped
ports:
- '4000:4000' # Backend
- '3000:3000' # Frontend
env_file:
- .env
volumes:
- archiver-data:/var/data/open-archiver
depends_on:
- postgres
- valkey
- meilisearch
networks:
- open-archiver-net
postgres:
image: postgres:17-alpine
container_name: postgres
restart: unless-stopped
environment:
POSTGRES_DB: ${POSTGRES_DB:-open_archive}
POSTGRES_USER: ${POSTGRES_USER:-admin}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-password}
volumes:
- pgdata:/var/lib/postgresql/data
ports:
- '5432:5432'
networks:
- open-archiver-net
valkey:
image: valkey/valkey:8-alpine
container_name: valkey
restart: unless-stopped
command: valkey-server --requirepass ${REDIS_PASSWORD}
ports:
- '6379:6379'
volumes:
- valkeydata:/data
networks:
- open-archiver-net
meilisearch:
image: getmeili/meilisearch:v1.15
container_name: meilisearch
restart: unless-stopped
environment:
MEILI_MASTER_KEY: ${MEILI_MASTER_KEY:-aSampleMasterKey}
ports:
- '7700:7700'
volumes:
- meilidata:/meili_data
networks:
- open-archiver-net
volumes:
pgdata:
driver: local
valkeydata:
driver: local
meilidata:
driver: local
archiver-data:
driver: local
networks:
open-archiver-net:
driver: bridge

View File

@@ -35,9 +35,6 @@ COPY packages/backend/package.json ./packages/backend/
COPY packages/frontend/package.json ./packages/frontend/
COPY packages/types/package.json ./packages/types/
# Install only production dependencies
RUN pnpm install --frozen-lockfile --prod
# Copy built application from build stage
COPY --from=build /app/packages/backend/dist ./packages/backend/dist
COPY --from=build /app/packages/frontend/build ./packages/frontend/build
@@ -45,9 +42,16 @@ COPY --from=build /app/packages/types/dist ./packages/types/dist
COPY --from=build /app/packages/backend/drizzle.config.ts ./packages/backend/drizzle.config.ts
COPY --from=build /app/packages/backend/src/database/migrations ./packages/backend/src/database/migrations
# Copy the entrypoint script and make it executable
COPY docker/docker-entrypoint.sh /usr/local/bin/
RUN chmod +x /usr/local/bin/docker-entrypoint.sh
# Expose the port the app runs on
EXPOSE 4000
EXPOSE 3000
# Set the entrypoint
ENTRYPOINT ["docker-entrypoint.sh"]
# Start the application
CMD ["pnpm", "docker-start"]

View File

@@ -0,0 +1,17 @@
#!/bin/sh
# Exit immediately if a command exits with a non-zero status
set -e
# Run pnpm install to ensure all dependencies, including native addons,
# are built for the container's architecture. This is crucial for
# multi-platform Docker images, as it prevents "exec format error"
# when running on a different architecture than the one used for building.
pnpm install --frozen-lockfile --prod
# Run database migrations before starting the application to prevent
# race conditions where the app starts before the database is ready.
pnpm db:migrate
# Execute the main container command
exec "$@"

View File

@@ -10,7 +10,7 @@
"db:generate": "dotenv -- pnpm --filter @open-archiver/backend db:generate",
"db:migrate": "dotenv -- pnpm --filter @open-archiver/backend db:migrate",
"db:migrate:dev": "dotenv -- pnpm --filter @open-archiver/backend db:migrate:dev",
"docker-start": "pnpm db:migrate && concurrently \"pnpm start:workers\" \"pnpm start\""
"docker-start": "concurrently \"pnpm start:workers\" \"pnpm start\""
},
"dependencies": {
"concurrently": "^9.2.0",

View File

@@ -3,12 +3,17 @@ import 'dotenv/config';
/**
* @see https://github.com/taskforcesh/bullmq/blob/master/docs/gitbook/guide/connections.md
*/
export const connection = {
const connectionOptions: any = {
host: process.env.REDIS_HOST || 'localhost',
port: (process.env.REDIS_PORT && parseInt(process.env.REDIS_PORT, 10)) || 6379,
password: process.env.REDIS_PASSWORD,
maxRetriesPerRequest: null,
tls: {
rejectUnauthorized: false
}
};
if (process.env.REDIS_TLS_ENABLED === 'true') {
connectionOptions.tls = {
rejectUnauthorized: false
};
}
export const connection = connectionOptions;

View File

@@ -1,15 +1,17 @@
import type { Handle } from '@sveltejs/kit';
import { jwtVerify } from 'jose';
import type { User } from '@open-archiver/types';
import { JWT_SECRET } from '$env/static/private';
const JWT_SECRET = new TextEncoder().encode('a-very-secret-key');
const JWT_SECRET_ENCODED = new TextEncoder().encode(JWT_SECRET);
export const handle: Handle = async ({ event, resolve }) => {
const token = event.cookies.get('accessToken');
if (token) {
try {
const { payload } = await jwtVerify(token, JWT_SECRET);
const { payload } = await jwtVerify(token, JWT_SECRET_ENCODED);
event.locals.user = payload as Omit<User, 'passwordHash'>;
event.locals.accessToken = token;
} catch (error) {