Compare commits

...

17 Commits

Author SHA1 Message Date
Rostislav Dugin
c100d94a92 FIX (tidy): Run go mod tidy 2025-12-20 00:25:39 +03:00
Rostislav Dugin
f14739a1fb FEATURE (intervals): Add cron intervals for backups 2025-12-20 00:23:39 +03:00
github-actions[bot]
b7d2521088 Update CITATION.cff to v2.9.0 2025-12-19 20:39:27 +00:00
Rostislav Dugin
eb8e5aa428 FEATURE (storages): Add SFTP 2025-12-19 23:24:16 +03:00
github-actions[bot]
1f030bd8fb Update CITATION.cff to v2.8.1 2025-12-19 11:44:37 +00:00
Rostislav Dugin
b278a79104 FIX (databases): Remove optional text from db name field 2025-12-19 14:28:54 +03:00
github-actions[bot]
b74ae734af Update CITATION.cff to v2.8.0 2025-12-18 16:13:17 +00:00
Rostislav Dugin
d21a9398c6 FIX (Dockerfile): Upgrade Go version 2025-12-18 18:57:26 +03:00
Rostislav Dugin
6ad7b95b7d FIX (go tidy): Run go mod tidy 2025-12-18 18:42:02 +03:00
Rostislav Dugin
8432d1626f FIX (linting): Increase lint timeout 2025-12-18 18:36:11 +03:00
Rostislav Dugin
d7f631fa93 FIX (golangci): Upgrade version of golangci 2025-12-18 18:33:41 +03:00
Rostislav Dugin
c3fb2aa529 FIX (golangci): Upgrade version of golangci 2025-12-18 18:31:03 +03:00
Rostislav Dugin
1817937409 FIX (ci \ cd): Upgrade Go version 2025-12-18 18:16:37 +03:00
Rostislav Dugin
3172396668 FIX (extensions): Exclude extensions comments as well 2025-12-18 17:54:52 +03:00
Rostislav Dugin
9cd5c8c57c Merge branch 'main' of https://github.com/RostislavDugin/postgresus 2025-12-18 17:49:24 +03:00
Rostislav Dugin
d8826d85c3 FEATURE (storanges): Add rclone 2025-12-18 17:46:16 +03:00
github-actions[bot]
49fdd46cbe Update CITATION.cff to v2.7.0 2025-12-18 11:49:21 +00:00
49 changed files with 3220 additions and 202 deletions

View File

@@ -17,7 +17,7 @@ jobs:
- name: Set up Go
uses: actions/setup-go@v5
with:
go-version: "1.23.3"
go-version: "1.24.4"
- name: Cache Go modules
uses: actions/cache@v4
@@ -31,7 +31,7 @@ jobs:
- name: Install golangci-lint
run: |
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v1.60.3
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/HEAD/install.sh | sh -s -- -b $(go env GOPATH)/bin v2.7.2
echo "$(go env GOPATH)/bin" >> $GITHUB_PATH
- name: Install swag for swagger generation
@@ -116,7 +116,7 @@ jobs:
- name: Set up Go
uses: actions/setup-go@v5
with:
go-version: "1.23.3"
go-version: "1.24.4"
- name: Cache Go modules
uses: actions/cache@v4
@@ -165,6 +165,10 @@ jobs:
TEST_AZURITE_BLOB_PORT=10000
# testing NAS
TEST_NAS_PORT=7006
# testing FTP
TEST_FTP_PORT=7007
# testing SFTP
TEST_SFTP_PORT=7008
# testing Telegram
TEST_TELEGRAM_BOT_TOKEN=${{ secrets.TEST_TELEGRAM_BOT_TOKEN }}
TEST_TELEGRAM_CHAT_ID=${{ secrets.TEST_TELEGRAM_CHAT_ID }}
@@ -200,6 +204,12 @@ jobs:
# Wait for Azurite
timeout 60 bash -c 'until nc -z localhost 10000; do sleep 2; done'
# Wait for FTP
timeout 60 bash -c 'until nc -z localhost 7007; do sleep 2; done'
# Wait for SFTP
timeout 60 bash -c 'until nc -z localhost 7008; do sleep 2; done'
- name: Create data and temp directories
run: |
# Create directories that are used for backups and restore

View File

@@ -29,5 +29,5 @@ keywords:
- system-administration
- database-backup
license: Apache-2.0
version: 2.6.0
date-released: "2025-12-17"
version: 2.9.0
date-released: "2025-12-19"

View File

@@ -22,7 +22,7 @@ RUN npm run build
# ========= BUILD BACKEND =========
# Backend build stage
FROM --platform=$BUILDPLATFORM golang:1.23.3 AS backend-build
FROM --platform=$BUILDPLATFORM golang:1.24.4 AS backend-build
# Make TARGET args available early so tools built here match the final image arch
ARG TARGETOS
@@ -77,16 +77,16 @@ ENV APP_VERSION=$APP_VERSION
# Set production mode for Docker containers
ENV ENV_MODE=production
# Install PostgreSQL server and client tools (versions 12-18)
# Install PostgreSQL server and client tools (versions 12-18) and rclone
RUN apt-get update && apt-get install -y --no-install-recommends \
wget ca-certificates gnupg lsb-release sudo gosu && \
wget ca-certificates gnupg lsb-release sudo gosu curl unzip && \
wget -qO- https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \
echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" \
> /etc/apt/sources.list.d/pgdg.list && \
apt-get update && \
apt-get install -y --no-install-recommends \
postgresql-17 postgresql-18 postgresql-client-12 postgresql-client-13 postgresql-client-14 postgresql-client-15 \
postgresql-client-16 postgresql-client-17 postgresql-client-18 && \
postgresql-client-16 postgresql-client-17 postgresql-client-18 rclone && \
rm -rf /var/lib/apt/lists/*
# Create postgres user and set up directories

View File

@@ -38,14 +38,14 @@
### 🔄 **Scheduled Backups**
- **Flexible scheduling**: hourly, daily, weekly, monthly
- **Flexible scheduling**: hourly, daily, weekly, monthly or cron
- **Precise timing**: run backups at specific times (e.g., 4 AM during low traffic)
- **Smart compression**: 4-8x space savings with balanced compression (~20% overhead)
### 🗄️ **Multiple Storage Destinations** <a href="https://postgresus.com/storages">(view supported)</a>
- **Local storage**: Keep backups on your VPS/server
- **Cloud storage**: S3, Cloudflare R2, Google Drive, NAS, Dropbox and more
- **Cloud storage**: S3, Cloudflare R2, Google Drive, NAS, Dropbox, SFTP, Rclone and more
- **Secure**: All data stays under your control
### 📱 **Smart Notifications** <a href="https://postgresus.com/notifiers">(view supported)</a>
@@ -212,7 +212,7 @@ For more options (NodePort, TLS, HTTPRoute for Gateway API), see the [Helm chart
1. **Access the dashboard**: Navigate to `http://localhost:4005`
2. **Add first DB for backup**: Click "New Database" and follow the setup wizard
3. **Configure schedule**: Choose from hourly, daily, weekly or monthly intervals
3. **Configure schedule**: Choose from hourly, daily, weekly, monthly or cron intervals
4. **Set database connection**: Enter your PostgreSQL credentials and connection details
5. **Choose storage**: Select where to store your backups (local, S3, Google Drive, etc.)
6. **Add notifications** (optional): Configure email, Telegram, Slack, or webhook notifications

View File

@@ -9,4 +9,4 @@ When applying changes, do not forget to refactor old code.
You can shortify, make more readable, improve code quality, etc.
Common logic can be extracted to functions, constants, files, etc.
After each large change with more than ~50-100 lines of code - always run `make lint` (from backend root folder).
After each large change with more than ~50-100 lines of code - always run `make lint` (from backend root folder) and, if you change frontend, run `npm run format` (from frontend root folder).

View File

@@ -41,4 +41,6 @@ TEST_SUPABASE_USERNAME=
TEST_SUPABASE_PASSWORD=
TEST_SUPABASE_DATABASE=
# FTP
TEST_FTP_PORT=7007
TEST_FTP_PORT=7007
# SFTP
TEST_SFTP_PORT=7008

View File

@@ -1,7 +1,7 @@
version: "2"
run:
timeout: 1m
timeout: 5m
tests: false
concurrency: 4

View File

@@ -146,3 +146,11 @@ services:
- FTP_USER_HOME=/home/ftpusers/testuser
- FTP_PASSIVE_PORTS=30000:30009
container_name: test-ftp
# Test SFTP server
test-sftp:
image: atmoz/sftp:latest
ports:
- "${TEST_SFTP_PORT:-7008}:22"
command: testuser:testpassword:1001::upload
container_name: test-sftp

View File

@@ -1,6 +1,6 @@
module postgresus-backend
go 1.23.3
go 1.24.4
require (
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.20.0
@@ -12,40 +12,196 @@ require (
github.com/google/uuid v1.6.0
github.com/ilyakaznacheev/cleanenv v1.5.0
github.com/jackc/pgx/v5 v5.7.5
github.com/jlaffaye/ftp v0.2.0
github.com/jlaffaye/ftp v0.2.1-0.20240918233326-1b970516f5d3
github.com/jmoiron/sqlx v1.4.0
github.com/joho/godotenv v1.5.1
github.com/lib/pq v1.10.9
github.com/minio/minio-go/v7 v7.0.92
github.com/shirou/gopsutil/v4 v4.25.5
github.com/minio/minio-go/v7 v7.0.97
github.com/pkg/sftp v1.13.10
github.com/rclone/rclone v1.72.1
github.com/robfig/cron/v3 v3.0.1
github.com/shirou/gopsutil/v4 v4.25.10
github.com/stretchr/testify v1.11.1
github.com/swaggo/files v1.0.1
github.com/swaggo/gin-swagger v1.6.0
github.com/swaggo/swag v1.16.4
golang.org/x/crypto v0.41.0
golang.org/x/time v0.12.0
golang.org/x/crypto v0.46.0
golang.org/x/time v0.14.0
gorm.io/driver/postgres v1.5.11
gorm.io/gorm v1.26.1
)
require (
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.13.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.2 // indirect
github.com/hashicorp/errwrap v1.0.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/storage/azfile v1.5.3 // indirect
github.com/Azure/go-ntlmssp v0.0.2-0.20251110135918-10b7b7e7cd26 // indirect
github.com/AzureAD/microsoft-authentication-library-for-go v1.6.0 // indirect
github.com/Files-com/files-sdk-go/v3 v3.2.264 // indirect
github.com/IBM/go-sdk-core/v5 v5.21.0 // indirect
github.com/Max-Sum/base32768 v0.0.0-20230304063302-18e6ce5945fd // indirect
github.com/Microsoft/go-winio v0.6.2 // indirect
github.com/ProtonMail/bcrypt v0.0.0-20211005172633-e235017c1baf // indirect
github.com/ProtonMail/gluon v0.17.1-0.20230724134000-308be39be96e // indirect
github.com/ProtonMail/go-crypto v1.3.0 // indirect
github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f // indirect
github.com/ProtonMail/go-srp v0.0.7 // indirect
github.com/ProtonMail/gopenpgp/v2 v2.9.0 // indirect
github.com/PuerkitoBio/goquery v1.10.3 // indirect
github.com/a1ex3/zstd-seekable-format-go/pkg v0.10.0 // indirect
github.com/abbot/go-http-auth v0.4.0 // indirect
github.com/anchore/go-lzo v0.1.0 // indirect
github.com/andybalholm/cascadia v1.3.3 // indirect
github.com/appscode/go-querystring v0.0.0-20170504095604-0126cfb3f1dc // indirect
github.com/aws/aws-sdk-go-v2 v1.39.6 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.3 // indirect
github.com/aws/aws-sdk-go-v2/config v1.31.17 // indirect
github.com/aws/aws-sdk-go-v2/credentials v1.18.21 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.13 // indirect
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.20.4 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.13 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.13 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 // indirect
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.13 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.4 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.13 // indirect
github.com/aws/aws-sdk-go-v2/service/s3 v1.90.0 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.30.1 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.5 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.39.1 // indirect
github.com/aws/smithy-go v1.23.2 // indirect
github.com/bahlo/generic-list-go v0.2.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/boombuler/barcode v1.1.0 // indirect
github.com/bradenaw/juniper v0.15.3 // indirect
github.com/bradfitz/iter v0.0.0-20191230175014-e8f45d346db8 // indirect
github.com/buengese/sgzip v0.1.1 // indirect
github.com/buger/jsonparser v1.1.1 // indirect
github.com/calebcase/tmpfile v1.0.3 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/chilts/sid v0.0.0-20190607042430-660e94789ec9 // indirect
github.com/clipperhouse/stringish v0.1.1 // indirect
github.com/clipperhouse/uax29/v2 v2.3.0 // indirect
github.com/cloudflare/circl v1.6.1 // indirect
github.com/cloudinary/cloudinary-go/v2 v2.13.0 // indirect
github.com/cloudsoda/go-smb2 v0.0.0-20250228001242-d4c70e6251cc // indirect
github.com/cloudsoda/sddl v0.0.0-20250224235906-926454e91efc // indirect
github.com/colinmarc/hdfs/v2 v2.4.0 // indirect
github.com/coreos/go-semver v0.3.1 // indirect
github.com/coreos/go-systemd/v22 v22.6.0 // indirect
github.com/creasty/defaults v1.8.0 // indirect
github.com/cronokirby/saferith v0.33.0 // indirect
github.com/diskfs/go-diskfs v1.7.0 // indirect
github.com/dropbox/dropbox-sdk-go-unofficial/v6 v6.0.5 // indirect
github.com/emersion/go-message v0.18.2 // indirect
github.com/emersion/go-vcard v0.0.0-20241024213814-c9703dde27ff // indirect
github.com/flynn/noise v1.1.0 // indirect
github.com/go-chi/chi/v5 v5.2.3 // indirect
github.com/go-darwin/apfs v0.0.0-20211011131704-f84b94dbf348 // indirect
github.com/go-git/go-billy/v5 v5.6.2 // indirect
github.com/go-openapi/errors v0.22.4 // indirect
github.com/go-openapi/strfmt v0.25.0 // indirect
github.com/go-resty/resty/v2 v2.16.5 // indirect
github.com/go-viper/mapstructure/v2 v2.4.0 // indirect
github.com/gofrs/flock v0.13.0 // indirect
github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang-jwt/jwt/v5 v5.3.0 // indirect
github.com/google/btree v1.1.3 // indirect
github.com/gorilla/schema v1.4.1 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/hashicorp/go-retryablehttp v0.7.8 // indirect
github.com/hashicorp/go-uuid v1.0.3 // indirect
github.com/henrybear327/Proton-API-Bridge v1.0.0 // indirect
github.com/henrybear327/go-proton-api v1.0.0 // indirect
github.com/jcmturner/aescts/v2 v2.0.0 // indirect
github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect
github.com/jcmturner/gofork v1.7.6 // indirect
github.com/jcmturner/goidentity/v6 v6.0.1 // indirect
github.com/jcmturner/gokrb5/v8 v8.4.4 // indirect
github.com/jcmturner/rpc/v2 v2.0.3 // indirect
github.com/jtolio/noiseconn v0.0.0-20231127013910-f6d9ecbf1de7 // indirect
github.com/jzelinskie/whirlpool v0.0.0-20201016144138-0675e54bb004 // indirect
github.com/klauspost/crc32 v1.3.0 // indirect
github.com/koofr/go-httpclient v0.0.0-20240520111329-e20f8f203988 // indirect
github.com/koofr/go-koofrclient v0.0.0-20221207135200-cbd7fc9ad6a6 // indirect
github.com/kr/fs v0.1.0 // indirect
github.com/kylelemons/godebug v1.1.0 // indirect
github.com/lanrat/extsort v1.4.2 // indirect
github.com/lpar/date v1.0.0 // indirect
github.com/lufia/plan9stats v0.0.0-20251013123823-9fd1530e3ec3 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-runewidth v0.0.19 // indirect
github.com/mitchellh/go-homedir v1.1.0 // indirect
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
github.com/ncw/swift/v2 v2.0.5 // indirect
github.com/oklog/ulid v1.3.1 // indirect
github.com/oracle/oci-go-sdk/v65 v65.104.0 // indirect
github.com/panjf2000/ants/v2 v2.11.3 // indirect
github.com/patrickmn/go-cache v2.1.0+incompatible // indirect
github.com/pengsrc/go-shared v0.2.1-0.20190131101655-1999055a4a14 // indirect
github.com/peterh/liner v1.2.2 // indirect
github.com/pierrec/lz4/v4 v4.1.22 // indirect
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/pkg/xattr v0.4.12 // indirect
github.com/pquerna/otp v1.5.0 // indirect
github.com/prometheus/client_golang v1.23.2 // indirect
github.com/prometheus/client_model v0.6.2 // indirect
github.com/prometheus/common v0.67.2 // indirect
github.com/prometheus/procfs v0.19.2 // indirect
github.com/putdotio/go-putio/putio v0.0.0-20200123120452-16d982cac2b8 // indirect
github.com/relvacode/iso8601 v1.7.0 // indirect
github.com/rfjakob/eme v1.1.2 // indirect
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06 // indirect
github.com/samber/lo v1.52.0 // indirect
github.com/sirupsen/logrus v1.9.4-0.20230606125235-dd1b4c2e81af // indirect
github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 // indirect
github.com/sony/gobreaker v1.0.0 // indirect
github.com/spacemonkeygo/monkit/v3 v3.0.25-0.20251022131615-eb24eb109368 // indirect
github.com/spf13/pflag v1.0.10 // indirect
github.com/t3rm1n4l/go-mega v0.0.0-20251031123324-a804aaa87491 // indirect
github.com/tklauser/go-sysconf v0.3.15 // indirect
github.com/tklauser/numcpus v0.10.0 // indirect
github.com/ulikunitz/xz v0.5.15 // indirect
github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect
github.com/xanzy/ssh-agent v0.3.3 // indirect
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
github.com/yunify/qingstor-sdk-go/v3 v3.2.0 // indirect
github.com/zeebo/blake3 v0.2.4 // indirect
github.com/zeebo/errs v1.4.0 // indirect
github.com/zeebo/xxh3 v1.0.2 // indirect
go.etcd.io/bbolt v1.4.3 // indirect
go.mongodb.org/mongo-driver v1.17.6 // indirect
go.yaml.in/yaml/v2 v2.4.3 // indirect
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 // indirect
golang.org/x/term v0.38.0 // indirect
gopkg.in/natefinch/lumberjack.v2 v2.2.1 // indirect
gopkg.in/validator.v2 v2.0.1 // indirect
moul.io/http2curl/v2 v2.3.0 // indirect
sigs.k8s.io/yaml v1.6.0 // indirect
storj.io/common v0.0.0-20251107171817-6221ae45072c // indirect
storj.io/drpc v0.0.35-0.20250513201419-f7819ea69b55 // indirect
storj.io/eventkit v0.0.0-20250410172343-61f26d3de156 // indirect
storj.io/infectious v0.0.2 // indirect
storj.io/picobuf v0.0.4 // indirect
storj.io/uplink v1.13.1 // indirect
)
require (
cloud.google.com/go/auth v0.16.2 // indirect
cloud.google.com/go/auth v0.17.0 // indirect
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
cloud.google.com/go/compute/metadata v0.7.0 // indirect
github.com/geoffgarside/ber v1.1.0 // indirect
cloud.google.com/go/compute/metadata v0.9.0 // indirect
github.com/geoffgarside/ber v1.2.0 // indirect
github.com/google/s2a-go v0.1.9 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.7 // indirect
github.com/googleapis/gax-go/v2 v2.15.0 // indirect
github.com/hirochachacha/go-smb2 v1.1.0
google.golang.org/genproto/googleapis/api v0.0.0-20250528174236-200df99c418a // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
google.golang.org/grpc v1.73.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20251103181224-f26f9409b101 // indirect
google.golang.org/grpc v1.76.0 // indirect
)
require (
@@ -56,11 +212,11 @@ require (
github.com/bytedance/sonic v1.13.2 // indirect
github.com/bytedance/sonic/loader v0.2.4 // indirect
github.com/cloudwego/base64x v0.1.5 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/ebitengine/purego v0.8.4 // indirect
github.com/ebitengine/purego v0.9.1 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/gabriel-vasile/mimetype v1.4.9 // indirect
github.com/gabriel-vasile/mimetype v1.4.11 // indirect
github.com/gin-contrib/sse v1.1.0 // indirect
github.com/go-ini/ini v1.67.0 // indirect
github.com/go-logr/logr v1.4.3 // indirect
@@ -72,7 +228,7 @@ require (
github.com/go-openapi/swag v0.19.15 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.26.0 // indirect
github.com/go-playground/validator/v10 v10.28.0 // indirect
github.com/go-sql-driver/mysql v1.9.2 // indirect
github.com/goccy/go-json v0.10.5 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
@@ -82,40 +238,39 @@ require (
github.com/jinzhu/now v1.1.5 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/compress v1.18.0 // indirect
github.com/klauspost/cpuid/v2 v2.2.10 // indirect
github.com/klauspost/compress v1.18.1 // indirect
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/mailru/easyjson v0.7.6 // indirect
github.com/mailru/easyjson v0.9.1 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/minio/crc64nvme v1.0.1 // indirect
github.com/minio/crc64nvme v1.1.1 // indirect
github.com/minio/md5-simd v1.1.2 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
github.com/philhofer/fwd v1.1.3-0.20240916144458-20a13a1f6b7c // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/philhofer/fwd v1.2.0 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect
github.com/rogpeppe/go-internal v1.14.1 // indirect
github.com/rs/xid v1.6.0 // indirect
github.com/stretchr/objx v0.5.2 // indirect
github.com/tinylib/msgp v1.3.0 // indirect
github.com/tinylib/msgp v1.5.0 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.2.12 // indirect
github.com/yusufpapurcu/wmi v1.2.4 // indirect
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
go.opentelemetry.io/otel v1.36.0 // indirect
go.opentelemetry.io/otel/metric v1.36.0 // indirect
go.opentelemetry.io/otel/trace v1.36.0 // indirect
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0 // indirect
go.opentelemetry.io/otel v1.38.0 // indirect
go.opentelemetry.io/otel/metric v1.38.0 // indirect
go.opentelemetry.io/otel/trace v1.38.0 // indirect
golang.org/x/arch v0.17.0 // indirect
golang.org/x/net v0.43.0 // indirect
golang.org/x/oauth2 v0.30.0
golang.org/x/sync v0.16.0 // indirect
golang.org/x/sys v0.35.0 // indirect
golang.org/x/text v0.28.0 // indirect
golang.org/x/tools v0.35.0 // indirect
google.golang.org/api v0.239.0
google.golang.org/protobuf v1.36.6 // indirect
golang.org/x/net v0.47.0 // indirect
golang.org/x/oauth2 v0.33.0
golang.org/x/sync v0.19.0 // indirect
golang.org/x/sys v0.39.0 // indirect
golang.org/x/text v0.32.0 // indirect
golang.org/x/tools v0.39.0 // indirect
google.golang.org/api v0.255.0
google.golang.org/protobuf v1.36.10 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
olympos.io/encoding/edn v0.0.0-20201019073823-d3554ca0b0a3 // indirect

File diff suppressed because it is too large Load Diff

View File

@@ -47,8 +47,9 @@ type EnvVariables struct {
TestAzuriteBlobPort string `env:"TEST_AZURITE_BLOB_PORT"`
TestNASPort string `env:"TEST_NAS_PORT"`
TestFTPPort string `env:"TEST_FTP_PORT"`
TestNASPort string `env:"TEST_NAS_PORT"`
TestFTPPort string `env:"TEST_FTP_PORT"`
TestSFTPPort string `env:"TEST_SFTP_PORT"`
// oauth
GitHubClientID string `env:"GITHUB_CLIENT_ID"`

View File

@@ -7,4 +7,5 @@ const (
IntervalDaily IntervalType = "DAILY"
IntervalWeekly IntervalType = "WEEKLY"
IntervalMonthly IntervalType = "MONTHLY"
IntervalCron IntervalType = "CRON"
)

View File

@@ -5,6 +5,7 @@ import (
"time"
"github.com/google/uuid"
"github.com/robfig/cron/v3"
"gorm.io/gorm"
)
@@ -12,11 +13,13 @@ type Interval struct {
ID uuid.UUID `json:"id" gorm:"primaryKey;type:uuid;default:gen_random_uuid()"`
Interval IntervalType `json:"interval" gorm:"type:text;not null"`
TimeOfDay *string `json:"timeOfDay" gorm:"type:text;"`
TimeOfDay *string `json:"timeOfDay" gorm:"type:text;"`
// only for WEEKLY
Weekday *int `json:"weekday,omitempty" gorm:"type:int"`
Weekday *int `json:"weekday,omitempty" gorm:"type:int"`
// only for MONTHLY
DayOfMonth *int `json:"dayOfMonth,omitempty" gorm:"type:int"`
DayOfMonth *int `json:"dayOfMonth,omitempty" gorm:"type:int"`
// only for CRON
CronExpression *string `json:"cronExpression,omitempty" gorm:"type:text"`
}
func (i *Interval) BeforeSave(tx *gorm.DB) error {
@@ -40,6 +43,16 @@ func (i *Interval) Validate() error {
return errors.New("day of month is required for monthly intervals")
}
// for cron interval cron expression is required and must be valid
if i.Interval == IntervalCron {
if i.CronExpression == nil || *i.CronExpression == "" {
return errors.New("cron expression is required for cron intervals")
}
if err := i.validateCronExpression(*i.CronExpression); err != nil {
return err
}
}
return nil
}
@@ -59,6 +72,8 @@ func (i *Interval) ShouldTriggerBackup(now time.Time, lastBackupTime *time.Time)
return i.shouldTriggerWeekly(now, *lastBackupTime)
case IntervalMonthly:
return i.shouldTriggerMonthly(now, *lastBackupTime)
case IntervalCron:
return i.shouldTriggerCron(now, *lastBackupTime)
default:
return false
}
@@ -66,11 +81,12 @@ func (i *Interval) ShouldTriggerBackup(now time.Time, lastBackupTime *time.Time)
func (i *Interval) Copy() *Interval {
return &Interval{
ID: uuid.Nil,
Interval: i.Interval,
TimeOfDay: i.TimeOfDay,
Weekday: i.Weekday,
DayOfMonth: i.DayOfMonth,
ID: uuid.Nil,
Interval: i.Interval,
TimeOfDay: i.TimeOfDay,
Weekday: i.Weekday,
DayOfMonth: i.DayOfMonth,
CronExpression: i.CronExpression,
}
}
@@ -204,3 +220,31 @@ func getStartOfWeek(t time.Time) time.Time {
func getStartOfMonth(t time.Time) time.Time {
return time.Date(t.Year(), t.Month(), 1, 0, 0, 0, 0, t.Location())
}
// cron trigger: check if we've passed a scheduled cron time since last backup
func (i *Interval) shouldTriggerCron(now, lastBackup time.Time) bool {
if i.CronExpression == nil || *i.CronExpression == "" {
return false
}
parser := cron.NewParser(cron.Minute | cron.Hour | cron.Dom | cron.Month | cron.Dow)
schedule, err := parser.Parse(*i.CronExpression)
if err != nil {
return false
}
// Find the next scheduled time after the last backup
nextAfterLastBackup := schedule.Next(lastBackup)
// If we're at or past that next scheduled time, trigger
return now.After(nextAfterLastBackup) || now.Equal(nextAfterLastBackup)
}
func (i *Interval) validateCronExpression(expr string) error {
parser := cron.NewParser(cron.Minute | cron.Hour | cron.Dom | cron.Month | cron.Dow)
_, err := parser.Parse(expr)
if err != nil {
return errors.New("invalid cron expression: " + err.Error())
}
return nil
}

View File

@@ -457,6 +457,144 @@ func TestInterval_ShouldTriggerBackup_Monthly(t *testing.T) {
)
}
func TestInterval_ShouldTriggerBackup_Cron(t *testing.T) {
cronExpr := "0 2 * * *" // Daily at 2:00 AM
interval := &Interval{
ID: uuid.New(),
Interval: IntervalCron,
CronExpression: &cronExpr,
}
t.Run("No previous backup: Trigger backup immediately", func(t *testing.T) {
now := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
should := interval.ShouldTriggerBackup(now, nil)
assert.True(t, should)
})
t.Run("Before scheduled cron time: Do not trigger backup", func(t *testing.T) {
now := time.Date(2024, 1, 15, 1, 59, 0, 0, time.UTC)
lastBackup := time.Date(2024, 1, 14, 2, 0, 0, 0, time.UTC) // Yesterday at 2 AM
should := interval.ShouldTriggerBackup(now, &lastBackup)
assert.False(t, should)
})
t.Run("Exactly at scheduled cron time: Trigger backup", func(t *testing.T) {
now := time.Date(2024, 1, 15, 2, 0, 0, 0, time.UTC)
lastBackup := time.Date(2024, 1, 14, 2, 0, 0, 0, time.UTC) // Yesterday at 2 AM
should := interval.ShouldTriggerBackup(now, &lastBackup)
assert.True(t, should)
})
t.Run("After scheduled cron time: Trigger backup", func(t *testing.T) {
now := time.Date(2024, 1, 15, 3, 0, 0, 0, time.UTC)
lastBackup := time.Date(2024, 1, 14, 2, 0, 0, 0, time.UTC) // Yesterday at 2 AM
should := interval.ShouldTriggerBackup(now, &lastBackup)
assert.True(t, should)
})
t.Run("Backup already done after scheduled time: Do not trigger again", func(t *testing.T) {
now := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
lastBackup := time.Date(2024, 1, 15, 2, 5, 0, 0, time.UTC) // Today at 2:05 AM
should := interval.ShouldTriggerBackup(now, &lastBackup)
assert.False(t, should)
})
t.Run("Weekly cron expression: 0 3 * * 1 (Monday at 3 AM)", func(t *testing.T) {
weeklyCron := "0 3 * * 1" // Every Monday at 3 AM
weeklyInterval := &Interval{
ID: uuid.New(),
Interval: IntervalCron,
CronExpression: &weeklyCron,
}
// Monday Jan 15, 2024 at 3:00 AM
monday := time.Date(2024, 1, 15, 3, 0, 0, 0, time.UTC)
// Last backup was previous Monday
lastBackup := time.Date(2024, 1, 8, 3, 0, 0, 0, time.UTC)
should := weeklyInterval.ShouldTriggerBackup(monday, &lastBackup)
assert.True(t, should)
})
t.Run("Complex cron expression: 30 4 1,15 * * (1st and 15th at 4:30 AM)", func(t *testing.T) {
complexCron := "30 4 1,15 * *" // 1st and 15th of each month at 4:30 AM
complexInterval := &Interval{
ID: uuid.New(),
Interval: IntervalCron,
CronExpression: &complexCron,
}
// Jan 15, 2024 at 4:30 AM
now := time.Date(2024, 1, 15, 4, 30, 0, 0, time.UTC)
// Last backup was Jan 1
lastBackup := time.Date(2024, 1, 1, 4, 30, 0, 0, time.UTC)
should := complexInterval.ShouldTriggerBackup(now, &lastBackup)
assert.True(t, should)
})
t.Run("Every 6 hours cron expression: 0 */6 * * *", func(t *testing.T) {
sixHourlyCron := "0 */6 * * *" // Every 6 hours (0:00, 6:00, 12:00, 18:00)
sixHourlyInterval := &Interval{
ID: uuid.New(),
Interval: IntervalCron,
CronExpression: &sixHourlyCron,
}
// 12:00 - next trigger after 6:00
now := time.Date(2024, 1, 15, 12, 0, 0, 0, time.UTC)
// Last backup was at 6:00
lastBackup := time.Date(2024, 1, 15, 6, 0, 0, 0, time.UTC)
should := sixHourlyInterval.ShouldTriggerBackup(now, &lastBackup)
assert.True(t, should)
})
t.Run("Invalid cron expression returns false", func(t *testing.T) {
invalidCron := "invalid cron"
invalidInterval := &Interval{
ID: uuid.New(),
Interval: IntervalCron,
CronExpression: &invalidCron,
}
now := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
lastBackup := time.Date(2024, 1, 14, 10, 0, 0, 0, time.UTC)
should := invalidInterval.ShouldTriggerBackup(now, &lastBackup)
assert.False(t, should)
})
t.Run("Empty cron expression returns false", func(t *testing.T) {
emptyCron := ""
emptyInterval := &Interval{
ID: uuid.New(),
Interval: IntervalCron,
CronExpression: &emptyCron,
}
now := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
lastBackup := time.Date(2024, 1, 14, 10, 0, 0, 0, time.UTC)
should := emptyInterval.ShouldTriggerBackup(now, &lastBackup)
assert.False(t, should)
})
t.Run("Nil cron expression returns false", func(t *testing.T) {
nilInterval := &Interval{
ID: uuid.New(),
Interval: IntervalCron,
CronExpression: nil,
}
now := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
lastBackup := time.Date(2024, 1, 14, 10, 0, 0, 0, time.UTC)
should := nilInterval.ShouldTriggerBackup(now, &lastBackup)
assert.False(t, should)
})
}
func TestInterval_Validate(t *testing.T) {
t.Run("Daily interval requires time of day", func(t *testing.T) {
interval := &Interval{
@@ -526,4 +664,60 @@ func TestInterval_Validate(t *testing.T) {
err := interval.Validate()
assert.NoError(t, err)
})
t.Run("Cron interval requires cron expression", func(t *testing.T) {
interval := &Interval{
ID: uuid.New(),
Interval: IntervalCron,
}
err := interval.Validate()
assert.Error(t, err)
assert.Contains(t, err.Error(), "cron expression is required")
})
t.Run("Cron interval with empty expression is invalid", func(t *testing.T) {
emptyCron := ""
interval := &Interval{
ID: uuid.New(),
Interval: IntervalCron,
CronExpression: &emptyCron,
}
err := interval.Validate()
assert.Error(t, err)
assert.Contains(t, err.Error(), "cron expression is required")
})
t.Run("Cron interval with invalid expression is invalid", func(t *testing.T) {
invalidCron := "invalid cron"
interval := &Interval{
ID: uuid.New(),
Interval: IntervalCron,
CronExpression: &invalidCron,
}
err := interval.Validate()
assert.Error(t, err)
assert.Contains(t, err.Error(), "invalid cron expression")
})
t.Run("Valid cron interval with daily expression", func(t *testing.T) {
cronExpr := "0 2 * * *" // Daily at 2 AM
interval := &Interval{
ID: uuid.New(),
Interval: IntervalCron,
CronExpression: &cronExpr,
}
err := interval.Validate()
assert.NoError(t, err)
})
t.Run("Valid cron interval with complex expression", func(t *testing.T) {
cronExpr := "30 4 1,15 * *" // 1st and 15th of each month at 4:30 AM
interval := &Interval{
ID: uuid.New(),
Interval: IntervalCron,
CronExpression: &cronExpr,
}
err := interval.Validate()
assert.NoError(t, err)
})
}

View File

@@ -599,21 +599,21 @@ func (uc *RestorePostgresqlBackupUsecase) generateFilteredTocList(
return "", fmt.Errorf("failed to generate TOC list: %w", err)
}
// Filter out EXTENSION lines
// Filter out EXTENSION-related lines (both CREATE EXTENSION and COMMENT ON EXTENSION)
var filteredLines []string
for _, line := range strings.Split(string(tocOutput), "\n") {
// Skip lines that contain EXTENSION (but not COMMENT ON EXTENSION)
// TOC format: "123; 1234 12345 EXTENSION - extension_name owner"
for line := range strings.SplitSeq(string(tocOutput), "\n") {
trimmedLine := strings.TrimSpace(line)
if trimmedLine == "" {
continue
}
// Check if this is an EXTENSION entry (not a comment about extension)
// Extension lines look like: "3420; 0 0 EXTENSION - uuid-ossp"
if strings.Contains(trimmedLine, " EXTENSION ") &&
!strings.Contains(strings.ToUpper(trimmedLine), "COMMENT") {
uc.logger.Info("Excluding extension from restore", "tocLine", trimmedLine)
upperLine := strings.ToUpper(trimmedLine)
// Skip lines that contain " EXTENSION " - this catches both:
// - CREATE EXTENSION entries: "3420; 0 0 EXTENSION - uuid-ossp"
// - COMMENT ON EXTENSION entries: "3462; 0 0 COMMENT - EXTENSION "uuid-ossp""
if strings.Contains(upperLine, " EXTENSION ") {
uc.logger.Info("Excluding extension-related entry from restore", "tocLine", trimmedLine)
continue
}

View File

@@ -12,7 +12,9 @@ import (
google_drive_storage "postgresus-backend/internal/features/storages/models/google_drive"
local_storage "postgresus-backend/internal/features/storages/models/local"
nas_storage "postgresus-backend/internal/features/storages/models/nas"
rclone_storage "postgresus-backend/internal/features/storages/models/rclone"
s3_storage "postgresus-backend/internal/features/storages/models/s3"
sftp_storage "postgresus-backend/internal/features/storages/models/sftp"
users_enums "postgresus-backend/internal/features/users/enums"
users_middleware "postgresus-backend/internal/features/users/middleware"
users_services "postgresus-backend/internal/features/users/services"
@@ -786,6 +788,108 @@ func Test_StorageSensitiveDataLifecycle_AllTypes(t *testing.T) {
assert.Equal(t, "", storage.FTPStorage.Password)
},
},
{
name: "SFTP Storage",
storageType: StorageTypeSFTP,
createStorage: func(workspaceID uuid.UUID) *Storage {
return &Storage{
WorkspaceID: workspaceID,
Type: StorageTypeSFTP,
Name: "Test SFTP Storage",
SFTPStorage: &sftp_storage.SFTPStorage{
Host: "sftp.example.com",
Port: 22,
Username: "testuser",
Password: "original-password",
PrivateKey: "original-private-key",
SkipHostKeyVerify: false,
Path: "/backups",
},
}
},
updateStorage: func(workspaceID uuid.UUID, storageID uuid.UUID) *Storage {
return &Storage{
ID: storageID,
WorkspaceID: workspaceID,
Type: StorageTypeSFTP,
Name: "Updated SFTP Storage",
SFTPStorage: &sftp_storage.SFTPStorage{
Host: "sftp2.example.com",
Port: 2222,
Username: "testuser2",
Password: "",
PrivateKey: "",
SkipHostKeyVerify: true,
Path: "/backups2",
},
}
},
verifySensitiveData: func(t *testing.T, storage *Storage) {
assert.True(t, strings.HasPrefix(storage.SFTPStorage.Password, "enc:"),
"Password should be encrypted with 'enc:' prefix")
assert.True(t, strings.HasPrefix(storage.SFTPStorage.PrivateKey, "enc:"),
"PrivateKey should be encrypted with 'enc:' prefix")
encryptor := encryption.GetFieldEncryptor()
password, err := encryptor.Decrypt(storage.ID, storage.SFTPStorage.Password)
assert.NoError(t, err)
assert.Equal(t, "original-password", password)
privateKey, err := encryptor.Decrypt(storage.ID, storage.SFTPStorage.PrivateKey)
assert.NoError(t, err)
assert.Equal(t, "original-private-key", privateKey)
},
verifyHiddenData: func(t *testing.T, storage *Storage) {
assert.Equal(t, "", storage.SFTPStorage.Password)
assert.Equal(t, "", storage.SFTPStorage.PrivateKey)
},
},
{
name: "Rclone Storage",
storageType: StorageTypeRclone,
createStorage: func(workspaceID uuid.UUID) *Storage {
return &Storage{
WorkspaceID: workspaceID,
Type: StorageTypeRclone,
Name: "Test Rclone Storage",
RcloneStorage: &rclone_storage.RcloneStorage{
ConfigContent: "[myremote]\ntype = s3\nprovider = AWS\naccess_key_id = test\nsecret_access_key = secret\n",
RemotePath: "/backups",
},
}
},
updateStorage: func(workspaceID uuid.UUID, storageID uuid.UUID) *Storage {
return &Storage{
ID: storageID,
WorkspaceID: workspaceID,
Type: StorageTypeRclone,
Name: "Updated Rclone Storage",
RcloneStorage: &rclone_storage.RcloneStorage{
ConfigContent: "",
RemotePath: "/backups2",
},
}
},
verifySensitiveData: func(t *testing.T, storage *Storage) {
assert.True(t, strings.HasPrefix(storage.RcloneStorage.ConfigContent, "enc:"),
"ConfigContent should be encrypted with 'enc:' prefix")
encryptor := encryption.GetFieldEncryptor()
configContent, err := encryptor.Decrypt(
storage.ID,
storage.RcloneStorage.ConfigContent,
)
assert.NoError(t, err)
assert.Equal(
t,
"[myremote]\ntype = s3\nprovider = AWS\naccess_key_id = test\nsecret_access_key = secret\n",
configContent,
)
},
verifyHiddenData: func(t *testing.T, storage *Storage) {
assert.Equal(t, "", storage.RcloneStorage.ConfigContent)
},
},
}
for _, tc := range testCases {

View File

@@ -9,4 +9,6 @@ const (
StorageTypeNAS StorageType = "NAS"
StorageTypeAzureBlob StorageType = "AZURE_BLOB"
StorageTypeFTP StorageType = "FTP"
StorageTypeSFTP StorageType = "SFTP"
StorageTypeRclone StorageType = "RCLONE"
)

View File

@@ -10,7 +10,9 @@ import (
google_drive_storage "postgresus-backend/internal/features/storages/models/google_drive"
local_storage "postgresus-backend/internal/features/storages/models/local"
nas_storage "postgresus-backend/internal/features/storages/models/nas"
rclone_storage "postgresus-backend/internal/features/storages/models/rclone"
s3_storage "postgresus-backend/internal/features/storages/models/s3"
sftp_storage "postgresus-backend/internal/features/storages/models/sftp"
"postgresus-backend/internal/util/encryption"
"github.com/google/uuid"
@@ -30,6 +32,8 @@ type Storage struct {
NASStorage *nas_storage.NASStorage `json:"nasStorage" gorm:"foreignKey:StorageID"`
AzureBlobStorage *azure_blob_storage.AzureBlobStorage `json:"azureBlobStorage" gorm:"foreignKey:StorageID"`
FTPStorage *ftp_storage.FTPStorage `json:"ftpStorage" gorm:"foreignKey:StorageID"`
SFTPStorage *sftp_storage.SFTPStorage `json:"sftpStorage" gorm:"foreignKey:StorageID"`
RcloneStorage *rclone_storage.RcloneStorage `json:"rcloneStorage" gorm:"foreignKey:StorageID"`
}
func (s *Storage) SaveFile(
@@ -115,6 +119,14 @@ func (s *Storage) Update(incoming *Storage) {
if s.FTPStorage != nil && incoming.FTPStorage != nil {
s.FTPStorage.Update(incoming.FTPStorage)
}
case StorageTypeSFTP:
if s.SFTPStorage != nil && incoming.SFTPStorage != nil {
s.SFTPStorage.Update(incoming.SFTPStorage)
}
case StorageTypeRclone:
if s.RcloneStorage != nil && incoming.RcloneStorage != nil {
s.RcloneStorage.Update(incoming.RcloneStorage)
}
}
}
@@ -132,6 +144,10 @@ func (s *Storage) getSpecificStorage() StorageFileSaver {
return s.AzureBlobStorage
case StorageTypeFTP:
return s.FTPStorage
case StorageTypeSFTP:
return s.SFTPStorage
case StorageTypeRclone:
return s.RcloneStorage
default:
panic("invalid storage type: " + string(s.Type))
}

View File

@@ -13,7 +13,9 @@ import (
google_drive_storage "postgresus-backend/internal/features/storages/models/google_drive"
local_storage "postgresus-backend/internal/features/storages/models/local"
nas_storage "postgresus-backend/internal/features/storages/models/nas"
rclone_storage "postgresus-backend/internal/features/storages/models/rclone"
s3_storage "postgresus-backend/internal/features/storages/models/s3"
sftp_storage "postgresus-backend/internal/features/storages/models/sftp"
"postgresus-backend/internal/util/encryption"
"postgresus-backend/internal/util/logger"
"strconv"
@@ -79,6 +81,14 @@ func Test_Storage_BasicOperations(t *testing.T) {
}
}
// Setup SFTP port
sftpPort := 22
if portStr := config.GetEnv().TestSFTPPort; portStr != "" {
if port, err := strconv.Atoi(portStr); err == nil {
sftpPort = port
}
}
// Run tests
testCases := []struct {
name string
@@ -145,6 +155,32 @@ func Test_Storage_BasicOperations(t *testing.T) {
Path: "test-files",
},
},
{
name: "SFTPStorage",
storage: &sftp_storage.SFTPStorage{
StorageID: uuid.New(),
Host: "localhost",
Port: sftpPort,
Username: "testuser",
Password: "testpassword",
SkipHostKeyVerify: true,
Path: "upload",
},
},
{
name: "RcloneStorage",
storage: &rclone_storage.RcloneStorage{
StorageID: uuid.New(),
ConfigContent: fmt.Sprintf(`[minio]
type = s3
provider = Other
access_key_id = %s
secret_access_key = %s
endpoint = http://%s
acl = private`, s3Container.accessKey, s3Container.secretKey, s3Container.endpoint),
RemotePath: s3Container.bucketName,
},
},
}
// Add Google Drive storage test only if environment variables are available

View File

@@ -0,0 +1,293 @@
package rclone_storage
import (
"bufio"
"context"
"errors"
"fmt"
"io"
"log/slog"
"postgresus-backend/internal/util/encryption"
"strings"
"sync"
"time"
"github.com/google/uuid"
"github.com/rclone/rclone/fs"
"github.com/rclone/rclone/fs/config"
"github.com/rclone/rclone/fs/operations"
_ "github.com/rclone/rclone/backend/all"
)
const (
rcloneOperationTimeout = 30 * time.Second
)
var rcloneConfigMu sync.Mutex
type RcloneStorage struct {
StorageID uuid.UUID `json:"storageId" gorm:"primaryKey;type:uuid;column:storage_id"`
ConfigContent string `json:"configContent" gorm:"not null;type:text;column:config_content"`
RemotePath string `json:"remotePath" gorm:"type:text;column:remote_path"`
}
func (r *RcloneStorage) TableName() string {
return "rclone_storages"
}
func (r *RcloneStorage) SaveFile(
ctx context.Context,
encryptor encryption.FieldEncryptor,
logger *slog.Logger,
fileID uuid.UUID,
file io.Reader,
) error {
select {
case <-ctx.Done():
return ctx.Err()
default:
}
logger.Info("Starting to save file to rclone storage", "fileId", fileID.String())
remoteFs, err := r.getFs(ctx, encryptor)
if err != nil {
logger.Error("Failed to create rclone filesystem", "fileId", fileID.String(), "error", err)
return fmt.Errorf("failed to create rclone filesystem: %w", err)
}
filePath := r.getFilePath(fileID.String())
logger.Debug("Uploading file via rclone", "fileId", fileID.String(), "filePath", filePath)
_, err = operations.Rcat(ctx, remoteFs, filePath, io.NopCloser(file), time.Now().UTC(), nil)
if err != nil {
select {
case <-ctx.Done():
logger.Info("Rclone upload cancelled", "fileId", fileID.String())
return ctx.Err()
default:
logger.Error(
"Failed to upload file via rclone",
"fileId",
fileID.String(),
"error",
err,
)
return fmt.Errorf("failed to upload file via rclone: %w", err)
}
}
logger.Info(
"Successfully saved file to rclone storage",
"fileId",
fileID.String(),
"filePath",
filePath,
)
return nil
}
func (r *RcloneStorage) GetFile(
encryptor encryption.FieldEncryptor,
fileID uuid.UUID,
) (io.ReadCloser, error) {
ctx := context.Background()
remoteFs, err := r.getFs(ctx, encryptor)
if err != nil {
return nil, fmt.Errorf("failed to create rclone filesystem: %w", err)
}
filePath := r.getFilePath(fileID.String())
obj, err := remoteFs.NewObject(ctx, filePath)
if err != nil {
return nil, fmt.Errorf("failed to get object from rclone: %w", err)
}
reader, err := obj.Open(ctx)
if err != nil {
return nil, fmt.Errorf("failed to open object from rclone: %w", err)
}
return reader, nil
}
func (r *RcloneStorage) DeleteFile(encryptor encryption.FieldEncryptor, fileID uuid.UUID) error {
ctx := context.Background()
remoteFs, err := r.getFs(ctx, encryptor)
if err != nil {
return fmt.Errorf("failed to create rclone filesystem: %w", err)
}
filePath := r.getFilePath(fileID.String())
obj, err := remoteFs.NewObject(ctx, filePath)
if err != nil {
return nil
}
err = obj.Remove(ctx)
if err != nil {
return fmt.Errorf("failed to delete file from rclone: %w", err)
}
return nil
}
func (r *RcloneStorage) Validate(encryptor encryption.FieldEncryptor) error {
if r.ConfigContent == "" {
return errors.New("rclone config content is required")
}
return nil
}
func (r *RcloneStorage) TestConnection(encryptor encryption.FieldEncryptor) error {
ctx, cancel := context.WithTimeout(context.Background(), rcloneOperationTimeout)
defer cancel()
remoteFs, err := r.getFs(ctx, encryptor)
if err != nil {
return fmt.Errorf("failed to create rclone filesystem: %w", err)
}
testFileID := uuid.New().String() + "-test"
testFilePath := r.getFilePath(testFileID)
testData := strings.NewReader("test connection")
_, err = operations.Rcat(
ctx,
remoteFs,
testFilePath,
io.NopCloser(testData),
time.Now().UTC(),
nil,
)
if err != nil {
return fmt.Errorf("failed to upload test file via rclone: %w", err)
}
obj, err := remoteFs.NewObject(ctx, testFilePath)
if err != nil {
return fmt.Errorf("failed to get test file from rclone: %w", err)
}
err = obj.Remove(ctx)
if err != nil {
return fmt.Errorf("failed to delete test file from rclone: %w", err)
}
return nil
}
func (r *RcloneStorage) HideSensitiveData() {
r.ConfigContent = ""
}
func (r *RcloneStorage) EncryptSensitiveData(encryptor encryption.FieldEncryptor) error {
if r.ConfigContent != "" {
encrypted, err := encryptor.Encrypt(r.StorageID, r.ConfigContent)
if err != nil {
return fmt.Errorf("failed to encrypt rclone config content: %w", err)
}
r.ConfigContent = encrypted
}
return nil
}
func (r *RcloneStorage) Update(incoming *RcloneStorage) {
r.RemotePath = incoming.RemotePath
if incoming.ConfigContent != "" {
r.ConfigContent = incoming.ConfigContent
}
}
func (r *RcloneStorage) getFs(
ctx context.Context,
encryptor encryption.FieldEncryptor,
) (fs.Fs, error) {
configContent, err := encryptor.Decrypt(r.StorageID, r.ConfigContent)
if err != nil {
return nil, fmt.Errorf("failed to decrypt rclone config content: %w", err)
}
rcloneConfigMu.Lock()
defer rcloneConfigMu.Unlock()
parsedConfig, err := parseConfigContent(configContent)
if err != nil {
return nil, fmt.Errorf("failed to parse rclone config: %w", err)
}
if len(parsedConfig) == 0 {
return nil, errors.New("rclone config must contain at least one remote section")
}
var remoteName string
for section, values := range parsedConfig {
remoteName = section
for key, value := range values {
config.FileSetValue(section, key, value)
}
}
remotePath := remoteName + ":"
if r.RemotePath != "" {
remotePath = remoteName + ":" + strings.TrimPrefix(r.RemotePath, "/")
}
remoteFs, err := fs.NewFs(ctx, remotePath)
if err != nil {
return nil, fmt.Errorf(
"failed to create rclone filesystem for remote '%s': %w",
remoteName,
err,
)
}
return remoteFs, nil
}
func (r *RcloneStorage) getFilePath(filename string) string {
return filename
}
func parseConfigContent(content string) (map[string]map[string]string, error) {
sections := make(map[string]map[string]string)
var currentSection string
scanner := bufio.NewScanner(strings.NewReader(content))
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
if line == "" || strings.HasPrefix(line, "#") || strings.HasPrefix(line, ";") {
continue
}
if strings.HasPrefix(line, "[") && strings.HasSuffix(line, "]") {
currentSection = strings.TrimPrefix(strings.TrimSuffix(line, "]"), "[")
if sections[currentSection] == nil {
sections[currentSection] = make(map[string]string)
}
continue
}
if currentSection != "" && strings.Contains(line, "=") {
parts := strings.SplitN(line, "=", 2)
key := strings.TrimSpace(parts[0])
value := ""
if len(parts) > 1 {
value = strings.TrimSpace(parts[1])
}
sections[currentSection][key] = value
}
}
return sections, scanner.Err()
}

View File

@@ -0,0 +1,430 @@
package sftp_storage
import (
"context"
"errors"
"fmt"
"io"
"log/slog"
"net"
"postgresus-backend/internal/util/encryption"
"strings"
"time"
"github.com/google/uuid"
"github.com/pkg/sftp"
"golang.org/x/crypto/ssh"
)
const (
sftpConnectTimeout = 30 * time.Second
sftpTestConnectTimeout = 10 * time.Second
)
type SFTPStorage struct {
StorageID uuid.UUID `json:"storageId" gorm:"primaryKey;type:uuid;column:storage_id"`
Host string `json:"host" gorm:"not null;type:text;column:host"`
Port int `json:"port" gorm:"not null;default:22;column:port"`
Username string `json:"username" gorm:"not null;type:text;column:username"`
Password string `json:"password" gorm:"type:text;column:password"`
PrivateKey string `json:"privateKey" gorm:"type:text;column:private_key"`
Path string `json:"path" gorm:"type:text;column:path"`
SkipHostKeyVerify bool `json:"skipHostKeyVerify" gorm:"not null;default:false;column:skip_host_key_verify"`
}
func (s *SFTPStorage) TableName() string {
return "sftp_storages"
}
func (s *SFTPStorage) SaveFile(
ctx context.Context,
encryptor encryption.FieldEncryptor,
logger *slog.Logger,
fileID uuid.UUID,
file io.Reader,
) error {
select {
case <-ctx.Done():
return ctx.Err()
default:
}
logger.Info("Starting to save file to SFTP storage", "fileId", fileID.String(), "host", s.Host)
client, sshConn, err := s.connect(encryptor, sftpConnectTimeout)
if err != nil {
logger.Error("Failed to connect to SFTP", "fileId", fileID.String(), "error", err)
return fmt.Errorf("failed to connect to SFTP: %w", err)
}
defer func() {
if closeErr := client.Close(); closeErr != nil {
logger.Error(
"Failed to close SFTP client",
"fileId",
fileID.String(),
"error",
closeErr,
)
}
if closeErr := sshConn.Close(); closeErr != nil {
logger.Error(
"Failed to close SSH connection",
"fileId",
fileID.String(),
"error",
closeErr,
)
}
}()
if s.Path != "" {
if err := s.ensureDirectory(client, s.Path); err != nil {
logger.Error(
"Failed to ensure directory",
"fileId",
fileID.String(),
"path",
s.Path,
"error",
err,
)
return fmt.Errorf("failed to ensure directory: %w", err)
}
}
filePath := s.getFilePath(fileID.String())
logger.Debug("Uploading file to SFTP", "fileId", fileID.String(), "filePath", filePath)
remoteFile, err := client.Create(filePath)
if err != nil {
logger.Error("Failed to create remote file", "fileId", fileID.String(), "error", err)
return fmt.Errorf("failed to create remote file: %w", err)
}
defer func() {
_ = remoteFile.Close()
}()
ctxReader := &contextReader{ctx: ctx, reader: file}
_, err = io.Copy(remoteFile, ctxReader)
if err != nil {
select {
case <-ctx.Done():
logger.Info("SFTP upload cancelled", "fileId", fileID.String())
return ctx.Err()
default:
logger.Error("Failed to upload file to SFTP", "fileId", fileID.String(), "error", err)
return fmt.Errorf("failed to upload file to SFTP: %w", err)
}
}
logger.Info(
"Successfully saved file to SFTP storage",
"fileId",
fileID.String(),
"filePath",
filePath,
)
return nil
}
func (s *SFTPStorage) GetFile(
encryptor encryption.FieldEncryptor,
fileID uuid.UUID,
) (io.ReadCloser, error) {
client, sshConn, err := s.connect(encryptor, sftpConnectTimeout)
if err != nil {
return nil, fmt.Errorf("failed to connect to SFTP: %w", err)
}
filePath := s.getFilePath(fileID.String())
remoteFile, err := client.Open(filePath)
if err != nil {
_ = client.Close()
_ = sshConn.Close()
return nil, fmt.Errorf("failed to open file from SFTP: %w", err)
}
return &sftpFileReader{
file: remoteFile,
client: client,
sshConn: sshConn,
}, nil
}
func (s *SFTPStorage) DeleteFile(encryptor encryption.FieldEncryptor, fileID uuid.UUID) error {
client, sshConn, err := s.connect(encryptor, sftpConnectTimeout)
if err != nil {
return fmt.Errorf("failed to connect to SFTP: %w", err)
}
defer func() {
_ = client.Close()
_ = sshConn.Close()
}()
filePath := s.getFilePath(fileID.String())
_, err = client.Stat(filePath)
if err != nil {
return nil
}
err = client.Remove(filePath)
if err != nil {
return fmt.Errorf("failed to delete file from SFTP: %w", err)
}
return nil
}
func (s *SFTPStorage) Validate(encryptor encryption.FieldEncryptor) error {
if s.Host == "" {
return errors.New("SFTP host is required")
}
if s.Username == "" {
return errors.New("SFTP username is required")
}
if s.Password == "" && s.PrivateKey == "" {
return errors.New("SFTP password or private key is required")
}
if s.Port <= 0 || s.Port > 65535 {
return errors.New("SFTP port must be between 1 and 65535")
}
return nil
}
func (s *SFTPStorage) TestConnection(encryptor encryption.FieldEncryptor) error {
ctx, cancel := context.WithTimeout(context.Background(), sftpTestConnectTimeout)
defer cancel()
client, sshConn, err := s.connectWithContext(ctx, encryptor, sftpTestConnectTimeout)
if err != nil {
return fmt.Errorf("failed to connect to SFTP: %w", err)
}
defer func() {
_ = client.Close()
_ = sshConn.Close()
}()
if s.Path != "" {
if err := s.ensureDirectory(client, s.Path); err != nil {
return fmt.Errorf("failed to access or create path '%s': %w", s.Path, err)
}
}
return nil
}
func (s *SFTPStorage) HideSensitiveData() {
s.Password = ""
s.PrivateKey = ""
}
func (s *SFTPStorage) EncryptSensitiveData(encryptor encryption.FieldEncryptor) error {
if s.Password != "" {
encrypted, err := encryptor.Encrypt(s.StorageID, s.Password)
if err != nil {
return fmt.Errorf("failed to encrypt SFTP password: %w", err)
}
s.Password = encrypted
}
if s.PrivateKey != "" {
encrypted, err := encryptor.Encrypt(s.StorageID, s.PrivateKey)
if err != nil {
return fmt.Errorf("failed to encrypt SFTP private key: %w", err)
}
s.PrivateKey = encrypted
}
return nil
}
func (s *SFTPStorage) Update(incoming *SFTPStorage) {
s.Host = incoming.Host
s.Port = incoming.Port
s.Username = incoming.Username
s.SkipHostKeyVerify = incoming.SkipHostKeyVerify
s.Path = incoming.Path
if incoming.Password != "" {
s.Password = incoming.Password
}
if incoming.PrivateKey != "" {
s.PrivateKey = incoming.PrivateKey
}
}
func (s *SFTPStorage) connect(
encryptor encryption.FieldEncryptor,
timeout time.Duration,
) (*sftp.Client, *ssh.Client, error) {
return s.connectWithContext(context.Background(), encryptor, timeout)
}
func (s *SFTPStorage) connectWithContext(
ctx context.Context,
encryptor encryption.FieldEncryptor,
timeout time.Duration,
) (*sftp.Client, *ssh.Client, error) {
var authMethods []ssh.AuthMethod
if s.Password != "" {
password, err := encryptor.Decrypt(s.StorageID, s.Password)
if err != nil {
return nil, nil, fmt.Errorf("failed to decrypt SFTP password: %w", err)
}
authMethods = append(authMethods, ssh.Password(password))
}
if s.PrivateKey != "" {
privateKey, err := encryptor.Decrypt(s.StorageID, s.PrivateKey)
if err != nil {
return nil, nil, fmt.Errorf("failed to decrypt SFTP private key: %w", err)
}
signer, err := ssh.ParsePrivateKey([]byte(privateKey))
if err != nil {
return nil, nil, fmt.Errorf("failed to parse private key: %w", err)
}
authMethods = append(authMethods, ssh.PublicKeys(signer))
}
var hostKeyCallback ssh.HostKeyCallback
if s.SkipHostKeyVerify {
hostKeyCallback = ssh.InsecureIgnoreHostKey()
} else {
hostKeyCallback = ssh.InsecureIgnoreHostKey()
}
config := &ssh.ClientConfig{
User: s.Username,
Auth: authMethods,
HostKeyCallback: hostKeyCallback,
Timeout: timeout,
}
address := fmt.Sprintf("%s:%d", s.Host, s.Port)
dialer := net.Dialer{Timeout: timeout}
conn, err := dialer.DialContext(ctx, "tcp", address)
if err != nil {
return nil, nil, fmt.Errorf("failed to dial SFTP server: %w", err)
}
sshConn, chans, reqs, err := ssh.NewClientConn(conn, address, config)
if err != nil {
_ = conn.Close()
return nil, nil, fmt.Errorf("failed to create SSH connection: %w", err)
}
sshClient := ssh.NewClient(sshConn, chans, reqs)
sftpClient, err := sftp.NewClient(sshClient)
if err != nil {
_ = sshClient.Close()
return nil, nil, fmt.Errorf("failed to create SFTP client: %w", err)
}
return sftpClient, sshClient, nil
}
func (s *SFTPStorage) ensureDirectory(client *sftp.Client, path string) error {
path = strings.TrimPrefix(path, "/")
path = strings.TrimSuffix(path, "/")
if path == "" {
return nil
}
parts := strings.Split(path, "/")
currentPath := ""
for _, part := range parts {
if part == "" || part == "." {
continue
}
if currentPath == "" {
currentPath = "/" + part
} else {
currentPath = currentPath + "/" + part
}
_, err := client.Stat(currentPath)
if err != nil {
err = client.Mkdir(currentPath)
if err != nil {
return fmt.Errorf("failed to create directory '%s': %w", currentPath, err)
}
}
}
return nil
}
func (s *SFTPStorage) getFilePath(filename string) string {
if s.Path == "" {
return filename
}
path := strings.TrimPrefix(s.Path, "/")
path = strings.TrimSuffix(path, "/")
return "/" + path + "/" + filename
}
type sftpFileReader struct {
file *sftp.File
client *sftp.Client
sshConn *ssh.Client
}
func (r *sftpFileReader) Read(p []byte) (n int, err error) {
return r.file.Read(p)
}
func (r *sftpFileReader) Close() error {
var errs []error
if r.file != nil {
if err := r.file.Close(); err != nil {
errs = append(errs, fmt.Errorf("failed to close file: %w", err))
}
}
if r.client != nil {
if err := r.client.Close(); err != nil {
errs = append(errs, fmt.Errorf("failed to close SFTP client: %w", err))
}
}
if r.sshConn != nil {
if err := r.sshConn.Close(); err != nil {
errs = append(errs, fmt.Errorf("failed to close SSH connection: %w", err))
}
}
if len(errs) > 0 {
return errs[0]
}
return nil
}
type contextReader struct {
ctx context.Context
reader io.Reader
}
func (r *contextReader) Read(p []byte) (n int, err error) {
select {
case <-r.ctx.Done():
return 0, r.ctx.Err()
default:
return r.reader.Read(p)
}
}

View File

@@ -38,17 +38,25 @@ func (r *StorageRepository) Save(storage *Storage) (*Storage, error) {
if storage.FTPStorage != nil {
storage.FTPStorage.StorageID = storage.ID
}
case StorageTypeSFTP:
if storage.SFTPStorage != nil {
storage.SFTPStorage.StorageID = storage.ID
}
case StorageTypeRclone:
if storage.RcloneStorage != nil {
storage.RcloneStorage.StorageID = storage.ID
}
}
if storage.ID == uuid.Nil {
if err := tx.Create(storage).
Omit("LocalStorage", "S3Storage", "GoogleDriveStorage", "NASStorage", "AzureBlobStorage", "FTPStorage").
Omit("LocalStorage", "S3Storage", "GoogleDriveStorage", "NASStorage", "AzureBlobStorage", "FTPStorage", "SFTPStorage", "RcloneStorage").
Error; err != nil {
return err
}
} else {
if err := tx.Save(storage).
Omit("LocalStorage", "S3Storage", "GoogleDriveStorage", "NASStorage", "AzureBlobStorage", "FTPStorage").
Omit("LocalStorage", "S3Storage", "GoogleDriveStorage", "NASStorage", "AzureBlobStorage", "FTPStorage", "SFTPStorage", "RcloneStorage").
Error; err != nil {
return err
}
@@ -97,6 +105,20 @@ func (r *StorageRepository) Save(storage *Storage) (*Storage, error) {
return err
}
}
case StorageTypeSFTP:
if storage.SFTPStorage != nil {
storage.SFTPStorage.StorageID = storage.ID // Ensure ID is set
if err := tx.Save(storage.SFTPStorage).Error; err != nil {
return err
}
}
case StorageTypeRclone:
if storage.RcloneStorage != nil {
storage.RcloneStorage.StorageID = storage.ID // Ensure ID is set
if err := tx.Save(storage.RcloneStorage).Error; err != nil {
return err
}
}
}
return nil
@@ -120,6 +142,8 @@ func (r *StorageRepository) FindByID(id uuid.UUID) (*Storage, error) {
Preload("NASStorage").
Preload("AzureBlobStorage").
Preload("FTPStorage").
Preload("SFTPStorage").
Preload("RcloneStorage").
Where("id = ?", id).
First(&s).Error; err != nil {
return nil, err
@@ -139,6 +163,8 @@ func (r *StorageRepository) FindByWorkspaceID(workspaceID uuid.UUID) ([]*Storage
Preload("NASStorage").
Preload("AzureBlobStorage").
Preload("FTPStorage").
Preload("SFTPStorage").
Preload("RcloneStorage").
Where("workspace_id = ?", workspaceID).
Order("name ASC").
Find(&storages).Error; err != nil {
@@ -188,6 +214,18 @@ func (r *StorageRepository) Delete(s *Storage) error {
return err
}
}
case StorageTypeSFTP:
if s.SFTPStorage != nil {
if err := tx.Delete(s.SFTPStorage).Error; err != nil {
return err
}
}
case StorageTypeRclone:
if s.RcloneStorage != nil {
if err := tx.Delete(s.RcloneStorage).Error; err != nil {
return err
}
}
}
// Delete the main storage

View File

@@ -548,10 +548,12 @@ func testBackupRestoreWithExcludeExtensionsForVersion(t *testing.T, pgVersion st
assert.NoError(t, err)
defer container.DB.Close()
// Create table with uuid-ossp extension
// Create table with uuid-ossp extension and add a comment on the extension
// The comment is important to test that COMMENT ON EXTENSION statements are also excluded
_, err = container.DB.Exec(`
DROP EXTENSION IF EXISTS "uuid-ossp" CASCADE;
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
COMMENT ON EXTENSION "uuid-ossp" IS 'Test comment on uuid-ossp extension';
DROP TABLE IF EXISTS test_extension_data;
CREATE TABLE test_extension_data (

View File

@@ -0,0 +1,23 @@
-- +goose Up
-- +goose StatementBegin
CREATE TABLE rclone_storages (
storage_id UUID PRIMARY KEY,
config_content TEXT NOT NULL,
remote_path TEXT
);
ALTER TABLE rclone_storages
ADD CONSTRAINT fk_rclone_storages_storage
FOREIGN KEY (storage_id)
REFERENCES storages (id)
ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED;
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
DROP TABLE IF EXISTS rclone_storages;
-- +goose StatementEnd

View File

@@ -0,0 +1,28 @@
-- +goose Up
-- +goose StatementBegin
CREATE TABLE sftp_storages (
storage_id UUID PRIMARY KEY,
host TEXT NOT NULL,
port INTEGER NOT NULL DEFAULT 22,
username TEXT NOT NULL,
password TEXT,
private_key TEXT,
path TEXT,
skip_host_key_verify BOOLEAN NOT NULL DEFAULT FALSE
);
ALTER TABLE sftp_storages
ADD CONSTRAINT fk_sftp_storages_storage
FOREIGN KEY (storage_id)
REFERENCES storages (id)
ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED;
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
DROP TABLE IF EXISTS sftp_storages;
-- +goose StatementEnd

View File

@@ -0,0 +1,5 @@
-- +goose Up
ALTER TABLE intervals ADD COLUMN cron_expression TEXT;
-- +goose Down
ALTER TABLE intervals DROP COLUMN cron_expression;

View File

@@ -10,6 +10,7 @@
"dependencies": {
"@tailwindcss/vite": "^4.1.7",
"antd": "^5.25.1",
"cron-parser": "^5.4.0",
"dayjs": "^1.11.13",
"react": "^19.1.0",
"react-dom": "^19.1.0",
@@ -3138,6 +3139,18 @@
"toggle-selection": "^1.0.6"
}
},
"node_modules/cron-parser": {
"version": "5.4.0",
"resolved": "https://registry.npmjs.org/cron-parser/-/cron-parser-5.4.0.tgz",
"integrity": "sha512-HxYB8vTvnQFx4dLsZpGRa0uHp6X3qIzS3ZJgJ9v6l/5TJMgeWQbLkR5yiJ5hOxGbc9+jCADDnydIe15ReLZnJA==",
"license": "MIT",
"dependencies": {
"luxon": "^3.7.1"
},
"engines": {
"node": ">=18"
}
},
"node_modules/cross-spawn": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
@@ -5463,6 +5476,15 @@
"yallist": "^3.0.2"
}
},
"node_modules/luxon": {
"version": "3.7.2",
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.7.2.tgz",
"integrity": "sha512-vtEhXh/gNjI9Yg1u4jX/0YVPMvxzHuGgCm6tC5kZyb08yjGWGnqAjGJvcXbqQR2P3MyMEFnRbpcdFS6PBcLqew==",
"license": "MIT",
"engines": {
"node": ">=12"
}
},
"node_modules/magic-string": {
"version": "0.30.17",
"resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz",

View File

@@ -15,6 +15,7 @@
"dependencies": {
"@tailwindcss/vite": "^4.1.7",
"antd": "^5.25.1",
"cron-parser": "^5.4.0",
"dayjs": "^1.11.13",
"react": "^19.1.0",
"react-dom": "^19.1.0",
@@ -24,12 +25,12 @@
"tailwindcss": "^4.1.7"
},
"devDependencies": {
"@vitest/coverage-v8": "^3.2.4",
"@eslint/js": "^9.25.0",
"@trivago/prettier-plugin-sort-imports": "^5.2.2",
"@types/react": "^19.1.2",
"@types/react-dom": "^19.1.2",
"@vitejs/plugin-react": "^4.4.1",
"@vitest/coverage-v8": "^3.2.4",
"eslint": "^9.25.0",
"eslint-plugin-react": "^7.37.5",
"eslint-plugin-react-hooks": "^5.2.0",

View File

@@ -0,0 +1,102 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
width="64"
height="64"
viewBox="0 0 64 64"
version="1.1"
xml:space="preserve"
style="clip-rule:evenodd;fill-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:1.41420996"
id="svg50"
sodipodi:docname="rclone-icon.svg"
inkscape:version="0.92.4 (5da689c313, 2019-01-14)"><metadata
id="metadata56"><rdf:RDF><cc:Work
rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" /><dc:title></dc:title></cc:Work></rdf:RDF></metadata><defs
id="defs54">
<clipPath
id="_clip1">
<rect
x="14"
y="579"
width="257"
height="84"
id="rect4" />
</clipPath>
</defs><sodipodi:namedview
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1"
objecttolerance="10"
gridtolerance="10"
guidetolerance="10"
inkscape:pageopacity="0"
inkscape:pageshadow="2"
inkscape:window-width="1531"
inkscape:window-height="807"
id="namedview52"
showgrid="false"
units="px"
inkscape:zoom="1.539823"
inkscape:cx="-84.425288"
inkscape:cy="26.5"
inkscape:window-x="70"
inkscape:window-y="27"
inkscape:window-maximized="0"
inkscape:current-layer="svg50" />
<g
id="g824"
transform="matrix(1.3422256,0,0,1.3422256,-2.2309418e-8,3.8420351)"><path
d="m 45.726917,21.83581 c -1.507672,-2.611426 -3.701518,-4.579735 -6.222732,-5.808561 -0.322585,1.72227 -0.932898,3.419936 -1.857594,5.021921 l -1.459147,2.532147 c 0.971853,0.539918 1.817954,1.334759 2.414598,2.368122 1.753027,3.035842 0.712146,6.919151 -2.324383,8.672176 -3.035847,1.753025 -6.919159,0.712829 -8.672186,-2.323698 l -2.944264,-5.091631 h -4.751283 l -2.375642,4.114312 2.946315,5.090948 c 4.025469,6.971776 12.939592,9.360401 19.911375,5.334937 6.971101,-4.024782 9.359727,-12.938896 5.334943,-19.910673"
style="fill:#70caf2;fill-rule:nonzero;stroke-width:0.68344086"
id="path7"
inkscape:connector-curvature="0" /><path
d="M 31.127807,0.45456543 C 24.156023,-3.5702158 15.2419,-1.1815912 11.217114,5.7895021 9.7087599,8.4009285 9.1018638,11.285048 9.2980112,14.083052 10.950572,13.501445 12.726153,13.180911 14.576228,13.180911 l 2.921711,-0.0027 c -0.01845,-1.111274 0.247406,-2.241684 0.843367,-3.2743635 1.75371,-3.036526 5.636339,-4.0774059 8.672868,-2.3236971 3.03653,1.7530242 4.076727,5.6356506 2.323701,8.6721766 l -2.936747,5.095732 2.374958,4.114995 4.751283,-6.83e-4 2.93538,-5.097099 C 40.488218,13.394145 38.099591,4.4793466 31.127807,0.45456543"
style="fill:#b4e3f9;fill-rule:nonzero;stroke-width:0.68344086"
id="path11"
inkscape:connector-curvature="0" /><path
d="m 19.297646,37.095505 -1.463932,-2.529413 c -0.9534,0.57204 -2.064675,0.906925 -3.25728,0.906925 -3.506736,0 -6.3491688,-2.842428 -6.3491688,-6.349162 0,-3.50605 2.8424328,-6.348479 6.3491688,-6.348479 l 5.881011,-0.0041 2.376326,-4.114312 -2.376326,-4.114312 -5.881695,0.0055 C 6.5254965,14.548074 1.6621211e-8,21.074248 1.6621211e-8,29.12381 1.6621211e-8,37.174056 6.5254965,43.70023 14.57575,43.70023 c 3.014659,0 5.814718,-0.915811 8.139101,-2.48294 -1.329976,-1.140662 -2.49251,-2.520528 -3.417205,-4.12183"
style="fill:#3f79ad;fill-rule:nonzero;stroke-width:0.68344086"
id="path15"
inkscape:connector-curvature="0" /></g>
</svg>

After

Width:  |  Height:  |  Size: 4.3 KiB

View File

@@ -0,0 +1,3 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
<svg width="800px" height="800px" viewBox="0 0 1024 1024" class="icon" version="1.1" xmlns="http://www.w3.org/2000/svg"><path d="M853.333333 256H469.333333l-85.333333-85.333333H170.666667c-46.933333 0-85.333333 38.4-85.333334 85.333333v170.666667h853.333334v-85.333334c0-46.933333-38.4-85.333333-85.333334-85.333333z" fill="#FFA000" /><path d="M853.333333 256H170.666667c-46.933333 0-85.333333 38.4-85.333334 85.333333v426.666667c0 46.933333 38.4 85.333333 85.333334 85.333333h682.666666c46.933333 0 85.333333-38.4 85.333334-85.333333V341.333333c0-46.933333-38.4-85.333333-85.333334-85.333333z" fill="#FFCA28" /></svg>

After

Width:  |  Height:  |  Size: 741 B

View File

@@ -8,4 +8,6 @@ export interface Interval {
weekday?: number;
// only for MONTHLY
dayOfMonth?: number;
// only for CRON
cronExpression?: string;
}

View File

@@ -3,4 +3,5 @@ export enum IntervalType {
DAILY = 'DAILY',
WEEKLY = 'WEEKLY',
MONTHLY = 'MONTHLY',
CRON = 'CRON',
}

View File

@@ -9,3 +9,5 @@ export { getStorageNameFromType } from './models/getStorageNameFromType';
export { type GoogleDriveStorage } from './models/GoogleDriveStorage';
export { type AzureBlobStorage } from './models/AzureBlobStorage';
export { type FTPStorage } from './models/FTPStorage';
export { type SFTPStorage } from './models/SFTPStorage';
export { type RcloneStorage } from './models/RcloneStorage';

View File

@@ -0,0 +1,4 @@
export interface RcloneStorage {
configContent: string;
remotePath?: string;
}

View File

@@ -0,0 +1,9 @@
export interface SFTPStorage {
host: string;
port: number;
username: string;
password?: string;
privateKey?: string;
path?: string;
skipHostKeyVerify?: boolean;
}

View File

@@ -3,7 +3,9 @@ import type { FTPStorage } from './FTPStorage';
import type { GoogleDriveStorage } from './GoogleDriveStorage';
import type { LocalStorage } from './LocalStorage';
import type { NASStorage } from './NASStorage';
import type { RcloneStorage } from './RcloneStorage';
import type { S3Storage } from './S3Storage';
import type { SFTPStorage } from './SFTPStorage';
import type { StorageType } from './StorageType';
export interface Storage {
@@ -20,4 +22,6 @@ export interface Storage {
nasStorage?: NASStorage;
azureBlobStorage?: AzureBlobStorage;
ftpStorage?: FTPStorage;
sftpStorage?: SFTPStorage;
rcloneStorage?: RcloneStorage;
}

View File

@@ -5,4 +5,6 @@ export enum StorageType {
NAS = 'NAS',
AZURE_BLOB = 'AZURE_BLOB',
FTP = 'FTP',
SFTP = 'SFTP',
RCLONE = 'RCLONE',
}

View File

@@ -14,6 +14,10 @@ export const getStorageLogoFromType = (type: StorageType) => {
return '/icons/storages/azure.svg';
case StorageType.FTP:
return '/icons/storages/ftp.svg';
case StorageType.SFTP:
return '/icons/storages/sftp.svg';
case StorageType.RCLONE:
return '/icons/storages/rclone.svg';
default:
return '';
}

View File

@@ -14,6 +14,10 @@ export const getStorageNameFromType = (type: StorageType) => {
return 'Azure Blob Storage';
case StorageType.FTP:
return 'FTP';
case StorageType.SFTP:
return 'SFTP';
case StorageType.RCLONE:
return 'Rclone';
default:
return '';
}

View File

@@ -2,6 +2,7 @@ import { InfoCircleOutlined } from '@ant-design/icons';
import {
Button,
Checkbox,
Input,
InputNumber,
Modal,
Select,
@@ -10,6 +11,7 @@ import {
TimePicker,
Tooltip,
} from 'antd';
import { CronExpressionParser } from 'cron-parser';
import dayjs, { Dayjs } from 'dayjs';
import { useEffect, useMemo, useState } from 'react';
@@ -19,10 +21,11 @@ import type { Database } from '../../../entity/databases';
import { Period } from '../../../entity/databases/model/Period';
import { type Interval, IntervalType } from '../../../entity/intervals';
import { type Storage, getStorageLogoFromType, storageApi } from '../../../entity/storages';
import { getUserTimeFormat } from '../../../shared/time';
import {
getUserTimeFormat as getIs12Hour,
getLocalDayOfMonth,
getLocalWeekday,
getUserTimeFormat,
getUtcDayOfMonth,
getUtcWeekday,
} from '../../../shared/time/utils';
@@ -77,10 +80,12 @@ export const EditBackupConfigComponent = ({
const [isShowWarn, setIsShowWarn] = useState(false);
const timeFormat = useMemo(() => {
const is12 = getUserTimeFormat();
const is12 = getIs12Hour();
return { use12Hours: is12, format: is12 ? 'h:mm A' : 'HH:mm' };
}, []);
const dateTimeFormat = useMemo(() => getUserTimeFormat(), []);
const updateBackupConfig = (patch: Partial<BackupConfig>) => {
setBackupConfig((prev) => (prev ? { ...prev, ...patch } : prev));
setIsUnsaved(true);
@@ -201,7 +206,8 @@ export const EditBackupConfigComponent = ({
Boolean(backupInterval?.interval) &&
(!backupInterval ||
((backupInterval.interval !== IntervalType.WEEKLY || displayedWeekday) &&
(backupInterval.interval !== IntervalType.MONTHLY || displayedDayOfMonth))));
(backupInterval.interval !== IntervalType.MONTHLY || displayedDayOfMonth) &&
(backupInterval.interval !== IntervalType.CRON || backupInterval.cronExpression))));
return (
<div>
@@ -230,6 +236,7 @@ export const EditBackupConfigComponent = ({
{ label: 'Daily', value: IntervalType.DAILY },
{ label: 'Weekly', value: IntervalType.WEEKLY },
{ label: 'Monthly', value: IntervalType.MONTHLY },
{ label: 'Cron', value: IntervalType.CRON },
]}
/>
</div>
@@ -269,33 +276,93 @@ export const EditBackupConfigComponent = ({
</div>
)}
{backupInterval?.interval !== IntervalType.HOURLY && (
<div className="mb-1 flex w-full flex-col items-start sm:flex-row sm:items-center">
<div className="mb-1 min-w-[150px] sm:mb-0">Backup time of day</div>
<TimePicker
value={localTime}
format={timeFormat.format}
use12Hours={timeFormat.use12Hours}
allowClear={false}
size="small"
className="w-full max-w-[200px] grow"
onChange={(t) => {
if (!t) return;
const patch: Partial<Interval> = { timeOfDay: t.utc().format('HH:mm') };
if (backupInterval?.interval === IntervalType.WEEKLY && displayedWeekday) {
patch.weekday = getUtcWeekday(displayedWeekday, t);
{backupInterval?.interval === IntervalType.CRON && (
<>
<div className="mb-1 flex w-full flex-col items-start sm:flex-row sm:items-center">
<div className="mb-1 min-w-[150px] sm:mb-0">Cron expression (UTC)</div>
<div className="flex items-center">
<Input
value={backupInterval?.cronExpression || ''}
onChange={(e) => saveInterval({ cronExpression: e.target.value })}
placeholder="0 2 * * *"
size="small"
className="w-full max-w-[200px] grow"
/>
<Tooltip
className="cursor-pointer"
title={
<div>
<div className="font-bold">
Cron format: minute hour day month weekday (UTC)
</div>
<div className="mt-1">Examples:</div>
<div> 0 2 * * * - Daily at 2:00 AM UTC</div>
<div> 0 */6 * * * - Every 6 hours</div>
<div> 0 3 * * 1 - Every Monday at 3:00 AM UTC</div>
<div> 30 4 1,15 * * - 1st and 15th at 4:30 AM UTC</div>
</div>
}
>
<InfoCircleOutlined className="ml-2" style={{ color: 'gray' }} />
</Tooltip>
</div>
</div>
{backupInterval?.cronExpression &&
(() => {
try {
const interval = CronExpressionParser.parse(backupInterval.cronExpression, {
tz: 'UTC',
});
const nextRun = interval.next().toDate();
return (
<div className="mb-1 flex w-full flex-col items-start text-xs text-gray-600 sm:flex-row sm:items-center dark:text-gray-400">
<div className="mb-1 min-w-[150px] sm:mb-0" />
<div className="text-gray-600 dark:text-gray-400">
Next run {dayjs(nextRun).local().format(dateTimeFormat.format)}
<br />({dayjs(nextRun).fromNow()})
</div>
</div>
);
} catch {
return (
<div className="mb-1 flex w-full flex-col items-start text-red-500 sm:flex-row sm:items-center">
<div className="mb-1 min-w-[150px] sm:mb-0" />
<div className="text-red-500">Invalid cron expression</div>
</div>
);
}
if (backupInterval?.interval === IntervalType.MONTHLY && displayedDayOfMonth) {
patch.dayOfMonth = getUtcDayOfMonth(displayedDayOfMonth, t);
}
saveInterval(patch);
}}
/>
</div>
})()}
</>
)}
{backupInterval?.interval !== IntervalType.HOURLY &&
backupInterval?.interval !== IntervalType.CRON && (
<div className="mb-1 flex w-full flex-col items-start sm:flex-row sm:items-center">
<div className="mb-1 min-w-[150px] sm:mb-0">Backup time of day</div>
<TimePicker
value={localTime}
format={timeFormat.format}
use12Hours={timeFormat.use12Hours}
allowClear={false}
size="small"
className="w-full max-w-[200px] grow"
onChange={(t) => {
if (!t) return;
const patch: Partial<Interval> = { timeOfDay: t.utc().format('HH:mm') };
if (backupInterval?.interval === IntervalType.WEEKLY && displayedWeekday) {
patch.weekday = getUtcWeekday(displayedWeekday, t);
}
if (backupInterval?.interval === IntervalType.MONTHLY && displayedDayOfMonth) {
patch.dayOfMonth = getUtcDayOfMonth(displayedDayOfMonth, t);
}
saveInterval(patch);
}}
/>
</div>
)}
<div className="mt-4 mb-1 flex w-full flex-col items-start sm:flex-row sm:items-center">
<div className="mb-1 min-w-[150px] sm:mb-0">Retry backup if failed</div>
<div className="flex items-center">

View File

@@ -1,5 +1,6 @@
import { InfoCircleOutlined } from '@ant-design/icons';
import { Tooltip } from 'antd';
import { CronExpressionParser } from 'cron-parser';
import dayjs from 'dayjs';
import { useMemo } from 'react';
import { useEffect, useState } from 'react';
@@ -10,7 +11,12 @@ import type { Database } from '../../../entity/databases';
import { Period } from '../../../entity/databases/model/Period';
import { IntervalType } from '../../../entity/intervals';
import { getStorageLogoFromType } from '../../../entity/storages/models/getStorageLogoFromType';
import { getLocalDayOfMonth, getLocalWeekday, getUserTimeFormat } from '../../../shared/time/utils';
import { getUserTimeFormat } from '../../../shared/time';
import {
getUserTimeFormat as getIs12Hour,
getLocalDayOfMonth,
getLocalWeekday,
} from '../../../shared/time/utils';
interface Props {
database: Database;
@@ -31,6 +37,7 @@ const intervalLabels = {
[IntervalType.DAILY]: 'Daily',
[IntervalType.WEEKLY]: 'Weekly',
[IntervalType.MONTHLY]: 'Monthly',
[IntervalType.CRON]: 'Cron',
};
const periodLabels = {
@@ -57,13 +64,15 @@ export const ShowBackupConfigComponent = ({ database }: Props) => {
// Detect user's preferred time format (12-hour vs 24-hour)
const timeFormat = useMemo(() => {
const is12Hour = getUserTimeFormat();
const is12Hour = getIs12Hour();
return {
use12Hours: is12Hour,
format: is12Hour ? 'h:mm A' : 'HH:mm',
};
}, []);
const dateTimeFormat = useMemo(() => getUserTimeFormat(), []);
useEffect(() => {
if (database.id) {
backupConfigApi.getBackupConfigByDbID(database.id).then((res) => {
@@ -131,13 +140,45 @@ export const ShowBackupConfigComponent = ({ database }: Props) => {
</div>
)}
{backupInterval?.interval !== IntervalType.HOURLY && (
<div className="mb-1 flex w-full items-center">
<div className="min-w-[150px]">Backup time of day</div>
<div>{formattedTime}</div>
</div>
{backupInterval?.interval === IntervalType.CRON && (
<>
<div className="mb-1 flex w-full items-center">
<div className="min-w-[150px]">Cron expression (UTC)</div>
<code className="rounded bg-gray-100 px-2 py-0.5 text-sm dark:bg-gray-700">
{backupInterval?.cronExpression || ''}
</code>
</div>
{backupInterval?.cronExpression &&
(() => {
try {
const interval = CronExpressionParser.parse(backupInterval.cronExpression, {
tz: 'UTC',
});
const nextRun = interval.next().toDate();
return (
<div className="mb-1 flex w-full items-center text-xs text-gray-600 dark:text-gray-400">
<div className="min-w-[150px]" />
<div>
Next run {dayjs(nextRun).local().format(dateTimeFormat.format)}
<br />({dayjs(nextRun).fromNow()})
</div>
</div>
);
} catch {
return null;
}
})()}
</>
)}
{backupInterval?.interval !== IntervalType.HOURLY &&
backupInterval?.interval !== IntervalType.CRON && (
<div className="mb-1 flex w-full items-center">
<div className="min-w-[150px]">Backup time of day</div>
<div>{formattedTime}</div>
</div>
)}
<div className="mb-1 flex w-full items-center">
<div className="min-w-[150px]">Retry if failed</div>
<div>{backupConfig.isRetryIfFailed ? 'Yes' : 'No'}</div>

View File

@@ -331,7 +331,7 @@ export const EditDatabaseSpecificDataComponent = ({
}}
size="small"
className="max-w-[200px] grow"
placeholder="Enter PG database name (optional)"
placeholder="Enter PG database name"
/>
</div>
)}

View File

@@ -12,7 +12,9 @@ import { EditAzureBlobStorageComponent } from './storages/EditAzureBlobStorageCo
import { EditFTPStorageComponent } from './storages/EditFTPStorageComponent';
import { EditGoogleDriveStorageComponent } from './storages/EditGoogleDriveStorageComponent';
import { EditNASStorageComponent } from './storages/EditNASStorageComponent';
import { EditRcloneStorageComponent } from './storages/EditRcloneStorageComponent';
import { EditS3StorageComponent } from './storages/EditS3StorageComponent';
import { EditSFTPStorageComponent } from './storages/EditSFTPStorageComponent';
interface Props {
workspaceId: string;
@@ -88,6 +90,8 @@ export function EditStorageComponent({
storage.googleDriveStorage = undefined;
storage.azureBlobStorage = undefined;
storage.ftpStorage = undefined;
storage.sftpStorage = undefined;
storage.rcloneStorage = undefined;
if (type === StorageType.LOCAL) {
storage.localStorage = {};
@@ -146,6 +150,23 @@ export function EditStorageComponent({
};
}
if (type === StorageType.SFTP) {
storage.sftpStorage = {
host: '',
port: 22,
username: '',
password: '',
path: '',
};
}
if (type === StorageType.RCLONE) {
storage.rcloneStorage = {
configContent: '',
remotePath: '',
};
}
setStorage(
JSON.parse(
JSON.stringify({
@@ -261,6 +282,29 @@ export function EditStorageComponent({
);
}
if (storage.type === StorageType.SFTP) {
if (storage.id) {
return (
storage.sftpStorage?.host && storage.sftpStorage?.port && storage.sftpStorage?.username
);
}
return (
storage.sftpStorage?.host &&
storage.sftpStorage?.port &&
storage.sftpStorage?.username &&
(storage.sftpStorage?.password || storage.sftpStorage?.privateKey)
);
}
if (storage.type === StorageType.RCLONE) {
if (storage.id) {
return true;
}
return storage.rcloneStorage?.configContent;
}
return false;
};
@@ -298,6 +342,8 @@ export function EditStorageComponent({
{ label: 'NAS', value: StorageType.NAS },
{ label: 'Azure Blob Storage', value: StorageType.AZURE_BLOB },
{ label: 'FTP', value: StorageType.FTP },
{ label: 'SFTP', value: StorageType.SFTP },
{ label: 'Rclone', value: StorageType.RCLONE },
]}
onChange={(value) => {
setStorageType(value);
@@ -370,6 +416,28 @@ export function EditStorageComponent({
}}
/>
)}
{storage?.type === StorageType.SFTP && (
<EditSFTPStorageComponent
storage={storage}
setStorage={setStorage}
setUnsaved={() => {
setIsUnsaved(true);
setIsTestConnectionSuccess(false);
}}
/>
)}
{storage?.type === StorageType.RCLONE && (
<EditRcloneStorageComponent
storage={storage}
setStorage={setStorage}
setUnsaved={() => {
setIsUnsaved(true);
setIsTestConnectionSuccess(false);
}}
/>
)}
</div>
<div className="mt-3 flex">

View File

@@ -0,0 +1,109 @@
import { InfoCircleOutlined } from '@ant-design/icons';
import { Input, Tooltip } from 'antd';
import type { Storage } from '../../../../../entity/storages';
interface Props {
storage: Storage;
setStorage: (storage: Storage) => void;
setUnsaved: () => void;
}
export function EditRcloneStorageComponent({ storage, setStorage, setUnsaved }: Props) {
return (
<>
<div className="mb-1 flex w-full flex-col items-start sm:flex-row sm:items-start">
<div className="mb-1 min-w-[110px] pt-1 sm:mb-0">Config</div>
<div className="flex w-full flex-col">
<div className="flex items-start">
<Input.TextArea
value={storage?.rcloneStorage?.configContent || ''}
onChange={(e) => {
if (!storage?.rcloneStorage) return;
setStorage({
...storage,
rcloneStorage: {
...storage.rcloneStorage,
configContent: e.target.value,
},
});
setUnsaved();
}}
className="w-full max-w-[400px] font-mono text-xs"
placeholder={`[myremote]
type = s3
provider = AWS
access_key_id = YOUR_ACCESS_KEY
secret_access_key = YOUR_SECRET_KEY
region = us-east-1`}
rows={8}
style={{ resize: 'vertical' }}
/>
<Tooltip
className="cursor-pointer"
title="Paste your rclone.conf content here. You can get it by running 'rclone config file' and copying the contents. This config supports 70+ cloud storage providers."
>
<InfoCircleOutlined className="mt-2 ml-2" style={{ color: 'gray' }} />
</Tooltip>
</div>
</div>
</div>
{!storage?.id && (
<div className="mb-2 flex items-center">
<div className="hidden min-w-[110px] sm:block" />
<div className="max-w-[300px] text-xs text-gray-400">
*content is hidden to not expose sensitive data. If you want to update existing config,
put a new one here
</div>
</div>
)}
<div className="mb-2 flex items-center">
<div className="hidden min-w-[110px] sm:block" />
<div className="text-xs text-blue-600">
<a href="https://rclone.org/docs/" target="_blank" rel="noreferrer">
Rclone documentation
</a>
</div>
</div>
<div className="mb-1 flex w-full flex-col items-start sm:flex-row sm:items-center">
<div className="mb-1 min-w-[110px] sm:mb-0">Remote path</div>
<div className="flex items-center">
<Input
value={storage?.rcloneStorage?.remotePath || ''}
onChange={(e) => {
if (!storage?.rcloneStorage) return;
setStorage({
...storage,
rcloneStorage: {
...storage.rcloneStorage,
remotePath: e.target.value.trim(),
},
});
setUnsaved();
}}
size="small"
className="w-full max-w-[250px]"
placeholder="/backups (optional)"
/>
<Tooltip
className="cursor-pointer"
title="Optional path prefix on the remote where backups will be stored (e.g., '/backups' or 'my-folder/backups')"
>
<InfoCircleOutlined className="ml-2" style={{ color: 'gray' }} />
</Tooltip>
</div>
</div>
<div className="mb-5" />
</>
);
}

View File

@@ -0,0 +1,267 @@
import { DownOutlined, InfoCircleOutlined, UpOutlined } from '@ant-design/icons';
import { Checkbox, Input, InputNumber, Radio, Tooltip } from 'antd';
import { useState } from 'react';
import type { Storage } from '../../../../../entity/storages';
interface Props {
storage: Storage;
setStorage: (storage: Storage) => void;
setUnsaved: () => void;
}
export function EditSFTPStorageComponent({ storage, setStorage, setUnsaved }: Props) {
const hasAdvancedValues = !!storage?.sftpStorage?.skipHostKeyVerify;
const [showAdvanced, setShowAdvanced] = useState(hasAdvancedValues);
const authMethod = storage?.sftpStorage?.privateKey ? 'privateKey' : 'password';
return (
<>
<div className="mb-1 flex w-full flex-col items-start sm:flex-row sm:items-center">
<div className="mb-1 min-w-[110px] sm:mb-0">Host</div>
<Input
value={storage?.sftpStorage?.host || ''}
onChange={(e) => {
if (!storage?.sftpStorage) return;
setStorage({
...storage,
sftpStorage: {
...storage.sftpStorage,
host: e.target.value.trim(),
},
});
setUnsaved();
}}
size="small"
className="w-full max-w-[250px]"
placeholder="sftp.example.com"
/>
</div>
<div className="mb-1 flex w-full flex-col items-start sm:flex-row sm:items-center">
<div className="mb-1 min-w-[110px] sm:mb-0">Port</div>
<InputNumber
value={storage?.sftpStorage?.port}
onChange={(value) => {
if (!storage?.sftpStorage || !value) return;
setStorage({
...storage,
sftpStorage: {
...storage.sftpStorage,
port: value,
},
});
setUnsaved();
}}
size="small"
className="w-full max-w-[250px]"
min={1}
max={65535}
placeholder="22"
/>
</div>
<div className="mb-1 flex w-full flex-col items-start sm:flex-row sm:items-center">
<div className="mb-1 min-w-[110px] sm:mb-0">Username</div>
<Input
value={storage?.sftpStorage?.username || ''}
onChange={(e) => {
if (!storage?.sftpStorage) return;
setStorage({
...storage,
sftpStorage: {
...storage.sftpStorage,
username: e.target.value.trim(),
},
});
setUnsaved();
}}
size="small"
className="w-full max-w-[250px]"
placeholder="username"
/>
</div>
<div className="mb-1 flex w-full flex-col items-start sm:flex-row sm:items-center">
<div className="mb-1 min-w-[110px] sm:mb-0">Auth Method</div>
<Radio.Group
value={authMethod}
onChange={(e) => {
if (!storage?.sftpStorage) return;
if (e.target.value === 'password') {
setStorage({
...storage,
sftpStorage: {
...storage.sftpStorage,
privateKey: undefined,
},
});
} else {
setStorage({
...storage,
sftpStorage: {
...storage.sftpStorage,
password: undefined,
},
});
}
setUnsaved();
}}
size="small"
>
<Radio value="password">Password</Radio>
<Radio value="privateKey">Private Key</Radio>
</Radio.Group>
</div>
{authMethod === 'password' && (
<div className="mb-1 flex w-full flex-col items-start sm:flex-row sm:items-center">
<div className="mb-1 min-w-[110px] sm:mb-0">Password</div>
<Input.Password
value={storage?.sftpStorage?.password || ''}
onChange={(e) => {
if (!storage?.sftpStorage) return;
setStorage({
...storage,
sftpStorage: {
...storage.sftpStorage,
password: e.target.value,
},
});
setUnsaved();
}}
size="small"
className="w-full max-w-[250px]"
placeholder="password"
/>
</div>
)}
{authMethod === 'privateKey' && (
<div className="mb-1 flex w-full flex-col items-start sm:flex-row sm:items-center">
<div className="mb-1 min-w-[110px] sm:mb-0">Private Key</div>
<div className="flex w-full max-w-[250px] flex-col">
<Input.TextArea
value={storage?.sftpStorage?.privateKey || ''}
onChange={(e) => {
if (!storage?.sftpStorage) return;
setStorage({
...storage,
sftpStorage: {
...storage.sftpStorage,
privateKey: e.target.value,
},
});
setUnsaved();
}}
size="small"
className="w-full"
placeholder="-----BEGIN OPENSSH PRIVATE KEY-----"
rows={4}
/>
<Tooltip
className="mt-1 cursor-pointer"
title="Paste your SSH private key (PEM format). Supports RSA, DSA, ECDSA, and Ed25519 keys."
>
<InfoCircleOutlined style={{ color: 'gray' }} />
</Tooltip>
</div>
</div>
)}
<div className="mb-1 flex w-full flex-col items-start sm:flex-row sm:items-center">
<div className="mb-1 min-w-[110px] sm:mb-0">Path</div>
<div className="flex items-center">
<Input
value={storage?.sftpStorage?.path || ''}
onChange={(e) => {
if (!storage?.sftpStorage) return;
let pathValue = e.target.value.trim();
if (pathValue.startsWith('/')) {
pathValue = pathValue.substring(1);
}
setStorage({
...storage,
sftpStorage: {
...storage.sftpStorage,
path: pathValue || undefined,
},
});
setUnsaved();
}}
size="small"
className="w-full max-w-[250px]"
placeholder="backups (optional)"
/>
<Tooltip
className="cursor-pointer"
title="Remote directory path for storing backups (optional)"
>
<InfoCircleOutlined className="ml-2" style={{ color: 'gray' }} />
</Tooltip>
</div>
</div>
<div className="mt-4 mb-3 flex items-center">
<div
className="flex cursor-pointer items-center text-sm text-blue-600 hover:text-blue-800"
onClick={() => setShowAdvanced(!showAdvanced)}
>
<span className="mr-2">Advanced settings</span>
{showAdvanced ? (
<UpOutlined style={{ fontSize: '12px' }} />
) : (
<DownOutlined style={{ fontSize: '12px' }} />
)}
</div>
</div>
{showAdvanced && (
<>
<div className="mb-1 flex w-full flex-col items-start sm:flex-row sm:items-center">
<div className="mb-1 min-w-[110px] sm:mb-0">Skip host key</div>
<div className="flex items-center">
<Checkbox
checked={storage?.sftpStorage?.skipHostKeyVerify || false}
onChange={(e) => {
if (!storage?.sftpStorage) return;
setStorage({
...storage,
sftpStorage: {
...storage.sftpStorage,
skipHostKeyVerify: e.target.checked,
},
});
setUnsaved();
}}
>
Skip host key verification
</Checkbox>
<Tooltip
className="cursor-pointer"
title="Skip SSH host key verification. Enable this if you trust the server. Warning: this reduces security."
>
<InfoCircleOutlined className="ml-2" style={{ color: 'gray' }} />
</Tooltip>
</div>
</div>
</>
)}
<div className="mb-5" />
</>
);
}

View File

@@ -5,7 +5,9 @@ import { ShowAzureBlobStorageComponent } from './storages/ShowAzureBlobStorageCo
import { ShowFTPStorageComponent } from './storages/ShowFTPStorageComponent';
import { ShowGoogleDriveStorageComponent } from './storages/ShowGoogleDriveStorageComponent';
import { ShowNASStorageComponent } from './storages/ShowNASStorageComponent';
import { ShowRcloneStorageComponent } from './storages/ShowRcloneStorageComponent';
import { ShowS3StorageComponent } from './storages/ShowS3StorageComponent';
import { ShowSFTPStorageComponent } from './storages/ShowSFTPStorageComponent';
interface Props {
storage?: Storage;
@@ -49,6 +51,14 @@ export function ShowStorageComponent({ storage }: Props) {
<div>
{storage?.type === StorageType.FTP && <ShowFTPStorageComponent storage={storage} />}
</div>
<div>
{storage?.type === StorageType.SFTP && <ShowSFTPStorageComponent storage={storage} />}
</div>
<div>
{storage?.type === StorageType.RCLONE && <ShowRcloneStorageComponent storage={storage} />}
</div>
</div>
);
}

View File

@@ -0,0 +1,21 @@
import type { Storage } from '../../../../../entity/storages';
interface Props {
storage: Storage;
}
export function ShowRcloneStorageComponent({ storage }: Props) {
return (
<>
<div className="mb-1 flex items-center">
<div className="min-w-[110px]">Remote path</div>
{storage?.rcloneStorage?.remotePath || '-'}
</div>
<div className="mb-1 flex items-center">
<div className="min-w-[110px]">Config</div>
{'*************'}
</div>
</>
);
}

View File

@@ -0,0 +1,50 @@
import type { Storage } from '../../../../../entity/storages';
interface Props {
storage: Storage;
}
export function ShowSFTPStorageComponent({ storage }: Props) {
const authMethod = storage?.sftpStorage?.privateKey ? 'Private Key' : 'Password';
return (
<>
<div className="mb-1 flex items-center">
<div className="min-w-[110px]">Host</div>
{storage?.sftpStorage?.host || '-'}
</div>
<div className="mb-1 flex items-center">
<div className="min-w-[110px]">Port</div>
{storage?.sftpStorage?.port || '-'}
</div>
<div className="mb-1 flex items-center">
<div className="min-w-[110px]">Username</div>
{storage?.sftpStorage?.username || '-'}
</div>
<div className="mb-1 flex items-center">
<div className="min-w-[110px]">Auth Method</div>
{authMethod}
</div>
<div className="mb-1 flex items-center">
<div className="min-w-[110px]">Credentials</div>
{'*************'}
</div>
<div className="mb-1 flex items-center">
<div className="min-w-[110px]">Path</div>
{storage?.sftpStorage?.path || '-'}
</div>
{storage?.sftpStorage?.skipHostKeyVerify && (
<div className="mb-1 flex items-center">
<div className="min-w-[110px]">Skip host key</div>
Enabled
</div>
)}
</>
);
}

View File

@@ -1,23 +1,67 @@
export const getUserTimeFormat = () => {
// Detect date order (MDY, DMY, YMD) and separator from user's locale
const getLocaleDateFormat = () => {
const locale = navigator.language || 'en-US';
// Use a test date where day, month, year are all different: March 15, 2023
const testDate = new Date(2023, 2, 15);
const formatted = testDate.toLocaleDateString(locale);
// Detect separator: . / or -
let separator = '/';
if (formatted.includes('.')) separator = '.';
else if (formatted.includes('-')) separator = '-';
// Detect order by checking position of 15 (day), 3 (month), 2023/23 (year)
const parts = formatted.split(/[./-]/);
const dayIndex = parts.findIndex((p) => p === '15');
const monthIndex = parts.findIndex((p) => p === '3' || p === '03');
const yearIndex = parts.findIndex((p) => p === '2023' || p === '23');
// Default to DMY if detection fails
let dateFormat = `DD${separator}MM${separator}YYYY`;
let shortDateFormat = `DD MMM YYYY`;
if (yearIndex === 0) {
// YMD (China, Japan, Korea, ISO)
dateFormat = `YYYY${separator}MM${separator}DD`;
shortDateFormat = `YYYY MMM DD`;
} else if (monthIndex === 0 && dayIndex === 1) {
// MDY (USA)
dateFormat = `MM${separator}DD${separator}YYYY`;
shortDateFormat = `MMM DD, YYYY`;
} else {
// DMY (Europe, Russia, most of the world)
dateFormat = `DD${separator}MM${separator}YYYY`;
shortDateFormat = `DD MMM YYYY`;
}
return { dateFormat, shortDateFormat, separator };
};
// Detect if user prefers 12-hour time format
const getIs12HourFormat = () => {
const locale = navigator.language || 'en-US';
const testDate = new Date(2023, 0, 1, 13, 0, 0); // 1 PM
const timeString = testDate.toLocaleTimeString(locale, { hour: 'numeric' });
const is12Hour = timeString.includes('PM') || timeString.includes('AM');
return timeString.includes('PM') || timeString.includes('AM');
};
export const getUserTimeFormat = () => {
const { dateFormat } = getLocaleDateFormat();
const is12Hour = getIs12HourFormat();
return {
use12Hours: is12Hour,
format: is12Hour ? 'DD.MM.YYYY h:mm:ss A' : 'DD.MM.YYYY HH:mm:ss',
format: is12Hour ? `${dateFormat} h:mm A` : `${dateFormat} HH:mm`,
};
};
export const getUserShortTimeFormat = () => {
const locale = navigator.language || 'en-US';
const testDate = new Date(2023, 0, 1, 13, 0, 0); // 1 PM
const timeString = testDate.toLocaleTimeString(locale, { hour: 'numeric' });
const is12Hour = timeString.includes('PM') || timeString.includes('AM');
const { shortDateFormat } = getLocaleDateFormat();
const is12Hour = getIs12HourFormat();
return {
use12Hours: is12Hour,
format: is12Hour ? 'DD MMM YYYY h:mm A' : 'DD MMM YYYY HH:mm',
format: is12Hour ? `${shortDateFormat} h:mm A` : `${shortDateFormat} HH:mm`,
};
};