Compare commits

...

10 Commits

Author SHA1 Message Date
Rostislav Dugin
cba40afd00 Merge pull request #228 from databasus/develop
FIX (backend): Fix formatting
2026-01-08 17:11:43 +03:00
Rostislav Dugin
7aea012aeb FIX (backend): Fix formatting 2026-01-08 17:10:47 +03:00
Rostislav Dugin
6d5534deaa Merge pull request #227 from databasus/develop
Develop
2026-01-08 16:55:12 +03:00
Rostislav Dugin
c04bd54683 FIX (download): Add streamable download of backups 2026-01-08 15:55:52 +03:00
Rostislav Dugin
1c3f16b372 FIX (google drive): Fix UI after new local redirect PR 2026-01-08 12:22:47 +03:00
Rostislav Dugin
ed08da56a6 FIX (cicd): Get rid of CITATION auto generate 2026-01-08 11:35:55 +03:00
Rostislav Dugin
c53e84b48d FIX (devex): Fix Linux tools installation script 2026-01-08 11:34:35 +03:00
Rostislav Dugin
dbfeb9e27f merge develop 2026-01-08 11:33:34 +03:00
Rostislav Dugin
02e86ffb3b FIX (devex): Fix Linux tools installation script 2026-01-08 11:10:56 +03:00
github-actions[bot]
207382116c Update CITATION.cff to v2.21.0 2026-01-05 18:38:28 +00:00
43 changed files with 1222 additions and 174 deletions

View File

@@ -672,17 +672,6 @@ jobs:
echo EOF
} >> $GITHUB_OUTPUT
- name: Update CITATION.cff version
run: |
VERSION="${{ needs.determine-version.outputs.new_version }}"
sed -i "s/^version: .*/version: ${VERSION}/" CITATION.cff
sed -i "s/^date-released: .*/date-released: \"$(date +%Y-%m-%d)\"/" CITATION.cff
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
git add CITATION.cff
git commit -m "Update CITATION.cff to v${VERSION}" || true
git push || true
- name: Create GitHub Release
uses: actions/create-release@v1
env:

View File

@@ -6,14 +6,14 @@ repos:
hooks:
- id: frontend-format
name: Frontend Format (Prettier)
entry: powershell -Command "cd frontend; npm run format"
entry: bash -c "cd frontend && npm run format"
language: system
files: ^frontend/.*\.(ts|tsx|js|jsx|json|css|md)$
pass_filenames: false
- id: frontend-lint
name: Frontend Lint (ESLint)
entry: powershell -Command "cd frontend; npm run lint"
entry: bash -c "cd frontend && npm run lint"
language: system
files: ^frontend/.*\.(ts|tsx|js|jsx)$
pass_filenames: false
@@ -23,7 +23,7 @@ repos:
hooks:
- id: backend-format-and-lint
name: Backend Format & Lint (golangci-lint)
entry: powershell -Command "cd backend; golangci-lint fmt; golangci-lint run"
entry: bash -c "cd backend && golangci-lint fmt ./internal/... ./cmd/... && golangci-lint run ./internal/... ./cmd/..."
language: system
files: ^backend/.*\.go$
pass_filenames: false
pass_filenames: false

View File

@@ -32,5 +32,5 @@ keywords:
- mongodb
- mariadb
license: Apache-2.0
version: 2.20.3
date-released: "2026-01-04"
version: 2.21.0
date-released: "2026-01-05"

View File

@@ -2,7 +2,7 @@ run:
go run cmd/main.go
test:
go test -p=1 -count=1 -failfast -timeout 10m .\internal\...
go test -p=1 -count=1 -failfast -timeout 10m ./internal/...
lint:
golangci-lint fmt && golangci-lint run

View File

@@ -183,6 +183,7 @@ func setUpRoutes(r *gin.Engine) {
userController := users_controllers.GetUserController()
userController.RegisterRoutes(v1)
system_healthcheck.GetHealthcheckController().RegisterRoutes(v1)
backups.GetBackupController().RegisterPublicRoutes(v1)
// Setup auth middleware
userService := users_services.GetUserService()
@@ -243,6 +244,10 @@ func runBackgroundTasks(log *slog.Logger) {
go runWithPanicLogging(log, "audit log cleanup background service", func() {
audit_logs.GetAuditLogBackgroundService().Run()
})
go runWithPanicLogging(log, "download token cleanup background service", func() {
backups.GetDownloadTokenBackgroundService().Run()
})
}
func runWithPanicLogging(log *slog.Logger, serviceName string, fn func()) {

View File

@@ -18,11 +18,17 @@ type BackupController struct {
func (c *BackupController) RegisterRoutes(router *gin.RouterGroup) {
router.GET("/backups", c.GetBackups)
router.POST("/backups", c.MakeBackup)
router.GET("/backups/:id/file", c.GetFile)
router.POST("/backups/:id/download-token", c.GenerateDownloadToken)
router.DELETE("/backups/:id", c.DeleteBackup)
router.POST("/backups/:id/cancel", c.CancelBackup)
}
// RegisterPublicRoutes registers routes that don't require Bearer authentication
// (they have their own authentication mechanisms like download tokens)
func (c *BackupController) RegisterPublicRoutes(router *gin.RouterGroup) {
router.GET("/backups/:id/file", c.GetFile)
}
// GetBackups
// @Summary Get backups for a database
// @Description Get paginated backups for the specified database
@@ -159,17 +165,16 @@ func (c *BackupController) CancelBackup(ctx *gin.Context) {
ctx.Status(http.StatusNoContent)
}
// GetFile
// @Summary Download a backup file
// @Description Download the backup file for the specified backup
// GenerateDownloadToken
// @Summary Generate short-lived download token
// @Description Generate a token for downloading a backup file (valid for 5 minutes)
// @Tags backups
// @Param id path string true "Backup ID"
// @Success 200 {file} file
// @Success 200 {object} GenerateDownloadTokenResponse
// @Failure 400
// @Failure 401
// @Failure 500
// @Router /backups/{id}/file [get]
func (c *BackupController) GetFile(ctx *gin.Context) {
// @Router /backups/{id}/download-token [post]
func (c *BackupController) GenerateDownloadToken(ctx *gin.Context) {
user, ok := users_middleware.GetUserFromContext(ctx)
if !ok {
ctx.JSON(http.StatusUnauthorized, gin.H{"error": "User not authenticated"})
@@ -182,7 +187,56 @@ func (c *BackupController) GetFile(ctx *gin.Context) {
return
}
fileReader, backup, database, err := c.backupService.GetBackupFile(user, id)
response, err := c.backupService.GenerateDownloadToken(user, id)
if err != nil {
ctx.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
ctx.JSON(http.StatusOK, response)
}
// GetFile
// @Summary Download a backup file
// @Description Download the backup file for the specified backup using a download token
// @Tags backups
// @Param id path string true "Backup ID"
// @Param token query string true "Download token"
// @Success 200 {file} file
// @Failure 400
// @Failure 401
// @Failure 500
// @Router /backups/{id}/file [get]
func (c *BackupController) GetFile(ctx *gin.Context) {
token := ctx.Query("token")
if token == "" {
ctx.JSON(http.StatusUnauthorized, gin.H{"error": "download token is required"})
return
}
// Get backup ID from URL
backupIDParam := ctx.Param("id")
backupID, err := uuid.Parse(backupIDParam)
if err != nil {
ctx.JSON(http.StatusBadRequest, gin.H{"error": "invalid backup ID"})
return
}
downloadToken, err := c.backupService.ValidateDownloadToken(token)
if err != nil {
ctx.JSON(http.StatusUnauthorized, gin.H{"error": "invalid or expired download token"})
return
}
// Verify token is for the requested backup
if downloadToken.BackupID != backupID {
ctx.JSON(http.StatusUnauthorized, gin.H{"error": "invalid or expired download token"})
return
}
fileReader, backup, database, err := c.backupService.GetBackupFileWithoutAuth(
downloadToken.BackupID,
)
if err != nil {
ctx.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
@@ -195,6 +249,12 @@ func (c *BackupController) GetFile(ctx *gin.Context) {
filename := c.generateBackupFilename(backup, database)
// Set Content-Length for progress tracking
if backup.BackupSizeMb > 0 {
sizeBytes := int64(backup.BackupSizeMb * 1024 * 1024)
ctx.Header("Content-Length", fmt.Sprintf("%d", sizeBytes))
}
ctx.Header("Content-Type", "application/octet-stream")
ctx.Header(
"Content-Disposition",
@@ -203,9 +263,12 @@ func (c *BackupController) GetFile(ctx *gin.Context) {
_, err = io.Copy(ctx.Writer, fileReader)
if err != nil {
ctx.JSON(http.StatusInternalServerError, gin.H{"error": "failed to stream file"})
fmt.Printf("Error streaming file: %v\n", err)
return
}
// Write audit log after successful download
c.backupService.WriteAuditLogForDownload(downloadToken.UserID, backup, database)
}
type MakeBackupRequest struct {

View File

@@ -18,6 +18,7 @@ import (
"databasus-backend/internal/config"
audit_logs "databasus-backend/internal/features/audit_logs"
"databasus-backend/internal/features/backups/backups/download_token"
backups_config "databasus-backend/internal/features/backups/config"
"databasus-backend/internal/features/databases"
"databasus-backend/internal/features/databases/databases/postgresql"
@@ -89,7 +90,13 @@ func Test_GetBackups_PermissionsEnforced(t *testing.T) {
testUserToken = owner.Token
} else {
member := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspaces_testing.AddMemberToWorkspace(workspace, member, *tt.workspaceRole, owner.Token, router)
workspaces_testing.AddMemberToWorkspace(
workspace,
member,
*tt.workspaceRole,
owner.Token,
router,
)
testUserToken = member.Token
}
} else {
@@ -181,7 +188,13 @@ func Test_CreateBackup_PermissionsEnforced(t *testing.T) {
testUserToken = owner.Token
} else {
member := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspaces_testing.AddMemberToWorkspace(workspace, member, *tt.workspaceRole, owner.Token, router)
workspaces_testing.AddMemberToWorkspace(
workspace,
member,
*tt.workspaceRole,
owner.Token,
router,
)
testUserToken = member.Token
}
} else {
@@ -311,7 +324,13 @@ func Test_DeleteBackup_PermissionsEnforced(t *testing.T) {
testUserToken = owner.Token
} else {
member := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspaces_testing.AddMemberToWorkspace(workspace, member, *tt.workspaceRole, owner.Token, router)
workspaces_testing.AddMemberToWorkspace(
workspace,
member,
*tt.workspaceRole,
owner.Token,
router,
)
testUserToken = member.Token
}
} else {
@@ -380,7 +399,7 @@ func Test_DeleteBackup_AuditLogWritten(t *testing.T) {
assert.True(t, found, "Audit log for backup deletion not found")
}
func Test_DownloadBackup_PermissionsEnforced(t *testing.T) {
func Test_GenerateDownloadToken_PermissionsEnforced(t *testing.T) {
tests := []struct {
name string
workspaceRole *users_enums.WorkspaceRole
@@ -389,28 +408,28 @@ func Test_DownloadBackup_PermissionsEnforced(t *testing.T) {
expectedStatusCode int
}{
{
name: "workspace viewer can download backup",
name: "workspace viewer can generate token",
workspaceRole: func() *users_enums.WorkspaceRole { r := users_enums.WorkspaceRoleViewer; return &r }(),
isGlobalAdmin: false,
expectSuccess: true,
expectedStatusCode: http.StatusOK,
},
{
name: "workspace member can download backup",
name: "workspace member can generate token",
workspaceRole: func() *users_enums.WorkspaceRole { r := users_enums.WorkspaceRoleMember; return &r }(),
isGlobalAdmin: false,
expectSuccess: true,
expectedStatusCode: http.StatusOK,
},
{
name: "non-member cannot download backup",
name: "non-member cannot generate token",
workspaceRole: nil,
isGlobalAdmin: false,
expectSuccess: false,
expectedStatusCode: http.StatusBadRequest,
},
{
name: "global admin can download backup",
name: "global admin can generate token",
workspaceRole: nil,
isGlobalAdmin: true,
expectSuccess: true,
@@ -435,7 +454,13 @@ func Test_DownloadBackup_PermissionsEnforced(t *testing.T) {
testUserToken = owner.Token
} else {
member := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspaces_testing.AddMemberToWorkspace(workspace, member, *tt.workspaceRole, owner.Token, router)
workspaces_testing.AddMemberToWorkspace(
workspace,
member,
*tt.workspaceRole,
owner.Token,
router,
)
testUserToken = member.Token
}
} else {
@@ -443,21 +468,244 @@ func Test_DownloadBackup_PermissionsEnforced(t *testing.T) {
testUserToken = nonMember.Token
}
testResp := test_utils.MakeGetRequest(
testResp := test_utils.MakePostRequest(
t,
router,
fmt.Sprintf("/api/v1/backups/%s/file", backup.ID.String()),
fmt.Sprintf("/api/v1/backups/%s/download-token", backup.ID.String()),
"Bearer "+testUserToken,
nil,
tt.expectedStatusCode,
)
if !tt.expectSuccess {
if tt.expectSuccess {
var response GenerateDownloadTokenResponse
err := json.Unmarshal(testResp.Body, &response)
assert.NoError(t, err)
assert.NotEmpty(t, response.Token)
assert.NotEmpty(t, response.Filename)
assert.Equal(t, backup.ID, response.BackupID)
} else {
assert.Contains(t, string(testResp.Body), "insufficient permissions")
}
})
}
}
func Test_DownloadBackup_WithValidToken_Success(t *testing.T) {
router := createTestRouter()
owner := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspace := workspaces_testing.CreateTestWorkspace("Test Workspace", owner, router)
_, backup := createTestDatabaseWithBackups(workspace, owner, router)
// Generate download token
var tokenResponse GenerateDownloadTokenResponse
test_utils.MakePostRequestAndUnmarshal(
t,
router,
fmt.Sprintf("/api/v1/backups/%s/download-token", backup.ID.String()),
"Bearer "+owner.Token,
nil,
http.StatusOK,
&tokenResponse,
)
// Download with token
testResp := test_utils.MakeGetRequest(
t,
router,
fmt.Sprintf("/api/v1/backups/%s/file?token=%s", backup.ID.String(), tokenResponse.Token),
"",
http.StatusOK,
)
// Verify response
contentDisposition := testResp.Headers.Get("Content-Disposition")
assert.Contains(t, contentDisposition, "attachment")
assert.Contains(t, contentDisposition, tokenResponse.Filename)
}
func Test_DownloadBackup_WithoutToken_Unauthorized(t *testing.T) {
router := createTestRouter()
owner := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspace := workspaces_testing.CreateTestWorkspace("Test Workspace", owner, router)
_, backup := createTestDatabaseWithBackups(workspace, owner, router)
// Try to download without token
testResp := test_utils.MakeGetRequest(
t,
router,
fmt.Sprintf("/api/v1/backups/%s/file", backup.ID.String()),
"",
http.StatusUnauthorized,
)
assert.Contains(t, string(testResp.Body), "download token is required")
}
func Test_DownloadBackup_WithInvalidToken_Unauthorized(t *testing.T) {
router := createTestRouter()
owner := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspace := workspaces_testing.CreateTestWorkspace("Test Workspace", owner, router)
_, backup := createTestDatabaseWithBackups(workspace, owner, router)
// Try to download with invalid token
testResp := test_utils.MakeGetRequest(
t,
router,
fmt.Sprintf("/api/v1/backups/%s/file?token=%s", backup.ID.String(), "invalid-token-xyz"),
"",
http.StatusUnauthorized,
)
assert.Contains(t, string(testResp.Body), "invalid or expired download token")
}
func Test_DownloadBackup_WithExpiredToken_Unauthorized(t *testing.T) {
router := createTestRouter()
owner := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspace := workspaces_testing.CreateTestWorkspace("Test Workspace", owner, router)
database, backup := createTestDatabaseWithBackups(workspace, owner, router)
// Get user for token generation
userService := users_services.GetUserService()
user, err := userService.GetUserFromToken(owner.Token)
assert.NoError(t, err)
// Create an expired token directly in the database
expiredToken := createExpiredDownloadToken(backup.ID, user.ID)
// Try to download with expired token
testResp := test_utils.MakeGetRequest(
t,
router,
fmt.Sprintf("/api/v1/backups/%s/file?token=%s", backup.ID.String(), expiredToken),
"",
http.StatusUnauthorized,
)
assert.Contains(t, string(testResp.Body), "invalid or expired download token")
// Verify audit log was NOT created for failed download
time.Sleep(100 * time.Millisecond)
auditLogService := audit_logs.GetAuditLogService()
auditLogs, err := auditLogService.GetWorkspaceAuditLogs(
workspace.ID,
&audit_logs.GetAuditLogsRequest{
Limit: 100,
Offset: 0,
},
)
assert.NoError(t, err)
found := false
for _, log := range auditLogs.AuditLogs {
if strings.Contains(log.Message, "Backup file downloaded") &&
strings.Contains(log.Message, database.Name) {
found = true
break
}
}
assert.False(t, found, "Audit log should NOT be created for failed download with expired token")
}
func Test_DownloadBackup_TokenUsedOnce_CannotReuseToken(t *testing.T) {
router := createTestRouter()
owner := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspace := workspaces_testing.CreateTestWorkspace("Test Workspace", owner, router)
_, backup := createTestDatabaseWithBackups(workspace, owner, router)
// Generate download token
var tokenResponse GenerateDownloadTokenResponse
test_utils.MakePostRequestAndUnmarshal(
t,
router,
fmt.Sprintf("/api/v1/backups/%s/download-token", backup.ID.String()),
"Bearer "+owner.Token,
nil,
http.StatusOK,
&tokenResponse,
)
// Download with token (first time - should succeed)
test_utils.MakeGetRequest(
t,
router,
fmt.Sprintf("/api/v1/backups/%s/file?token=%s", backup.ID.String(), tokenResponse.Token),
"",
http.StatusOK,
)
// Try to download again with same token (should fail)
testResp := test_utils.MakeGetRequest(
t,
router,
fmt.Sprintf("/api/v1/backups/%s/file?token=%s", backup.ID.String(), tokenResponse.Token),
"",
http.StatusUnauthorized,
)
assert.Contains(t, string(testResp.Body), "invalid or expired download token")
}
func Test_DownloadBackup_WithDifferentBackupToken_Unauthorized(t *testing.T) {
router := createTestRouter()
owner := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspace := workspaces_testing.CreateTestWorkspace("Test Workspace", owner, router)
database1 := createTestDatabase("Database 1", workspace.ID, owner.Token, router)
storage := createTestStorage(workspace.ID)
configService := backups_config.GetBackupConfigService()
config1, err := configService.GetBackupConfigByDbId(database1.ID)
assert.NoError(t, err)
config1.IsBackupsEnabled = true
config1.StorageID = &storage.ID
config1.Storage = storage
_, err = configService.SaveBackupConfig(config1)
assert.NoError(t, err)
backup1 := createTestBackup(database1, owner)
database2 := createTestDatabase("Database 2", workspace.ID, owner.Token, router)
config2, err := configService.GetBackupConfigByDbId(database2.ID)
assert.NoError(t, err)
config2.IsBackupsEnabled = true
config2.StorageID = &storage.ID
config2.Storage = storage
_, err = configService.SaveBackupConfig(config2)
assert.NoError(t, err)
backup2 := createTestBackup(database2, owner)
// Generate token for backup1
var tokenResponse GenerateDownloadTokenResponse
test_utils.MakePostRequestAndUnmarshal(
t,
router,
fmt.Sprintf("/api/v1/backups/%s/download-token", backup1.ID.String()),
"Bearer "+owner.Token,
nil,
http.StatusOK,
&tokenResponse,
)
// Try to use backup1's token to download backup2 (should fail)
testResp := test_utils.MakeGetRequest(
t,
router,
fmt.Sprintf("/api/v1/backups/%s/file?token=%s", backup2.ID.String(), tokenResponse.Token),
"",
http.StatusUnauthorized,
)
assert.Contains(t, string(testResp.Body), "invalid or expired download token")
}
func Test_DownloadBackup_AuditLogWritten(t *testing.T) {
router := createTestRouter()
owner := users_testing.CreateTestUser(users_enums.UserRoleMember)
@@ -465,11 +713,24 @@ func Test_DownloadBackup_AuditLogWritten(t *testing.T) {
database, backup := createTestDatabaseWithBackups(workspace, owner, router)
// Generate download token
var tokenResponse GenerateDownloadTokenResponse
test_utils.MakePostRequestAndUnmarshal(
t,
router,
fmt.Sprintf("/api/v1/backups/%s/download-token", backup.ID.String()),
"Bearer "+owner.Token,
nil,
http.StatusOK,
&tokenResponse,
)
// Download with token
test_utils.MakeGetRequest(
t,
router,
fmt.Sprintf("/api/v1/backups/%s/file", backup.ID.String()),
"Bearer "+owner.Token,
fmt.Sprintf("/api/v1/backups/%s/file?token=%s", backup.ID.String(), tokenResponse.Token),
"",
http.StatusOK,
)
@@ -544,11 +805,28 @@ func Test_DownloadBackup_ProperFilenameForPostgreSQL(t *testing.T) {
backup := createTestBackup(database, owner)
// Generate download token
var tokenResponse GenerateDownloadTokenResponse
test_utils.MakePostRequestAndUnmarshal(
t,
router,
fmt.Sprintf("/api/v1/backups/%s/download-token", backup.ID.String()),
"Bearer "+owner.Token,
nil,
http.StatusOK,
&tokenResponse,
)
// Download with token
resp := test_utils.MakeGetRequest(
t,
router,
fmt.Sprintf("/api/v1/backups/%s/file", backup.ID.String()),
"Bearer "+owner.Token,
fmt.Sprintf(
"/api/v1/backups/%s/file?token=%s",
backup.ID.String(),
tokenResponse.Token,
),
"",
http.StatusOK,
)
@@ -817,9 +1095,38 @@ func createTestBackup(
dummyContent := []byte("dummy backup content for testing")
reader := strings.NewReader(string(dummyContent))
logger := slog.New(slog.NewTextHandler(io.Discard, nil))
if err := storages[0].SaveFile(context.Background(), encryption.GetFieldEncryptor(), logger, backup.ID, reader); err != nil {
if err := storages[0].SaveFile(
context.Background(),
encryption.GetFieldEncryptor(),
logger,
backup.ID,
reader,
); err != nil {
panic(fmt.Sprintf("Failed to create test backup file: %v", err))
}
return backup
}
func createExpiredDownloadToken(backupID, userID uuid.UUID) string {
tokenService := GetBackupService().downloadTokenService
token, err := tokenService.Generate(backupID, userID)
if err != nil {
panic(fmt.Sprintf("Failed to generate download token: %v", err))
}
// Manually update the token to be expired
repo := &download_token.DownloadTokenRepository{}
downloadToken, err := repo.FindByToken(token)
if err != nil || downloadToken == nil {
panic(fmt.Sprintf("Failed to find generated token: %v", err))
}
// Set expiration to 10 minutes ago
downloadToken.ExpiresAt = time.Now().UTC().Add(-10 * time.Minute)
if err := repo.Update(downloadToken); err != nil {
panic(fmt.Sprintf("Failed to update token expiration: %v", err))
}
return token
}

View File

@@ -4,6 +4,7 @@ import (
"time"
audit_logs "databasus-backend/internal/features/audit_logs"
"databasus-backend/internal/features/backups/backups/download_token"
"databasus-backend/internal/features/backups/backups/usecases"
backups_config "databasus-backend/internal/features/backups/config"
"databasus-backend/internal/features/databases"
@@ -34,6 +35,7 @@ var backupService = &BackupService{
workspaces_services.GetWorkspaceService(),
audit_logs.GetAuditLogService(),
backupContextManager,
download_token.GetDownloadTokenService(),
}
var backupBackgroundService = &BackupBackgroundService{
@@ -69,3 +71,7 @@ func GetBackupController() *BackupController {
func GetBackupBackgroundService() *BackupBackgroundService {
return backupBackgroundService
}
func GetDownloadTokenBackgroundService() *download_token.DownloadTokenBackgroundService {
return download_token.GetDownloadTokenBackgroundService()
}

View File

@@ -0,0 +1,32 @@
package download_token
import (
"databasus-backend/internal/config"
"log/slog"
"time"
)
type DownloadTokenBackgroundService struct {
downloadTokenService *DownloadTokenService
logger *slog.Logger
}
func (s *DownloadTokenBackgroundService) Run() {
s.logger.Info("Starting download token cleanup background service")
if config.IsShouldShutdown() {
return
}
for {
if config.IsShouldShutdown() {
return
}
if err := s.downloadTokenService.CleanExpiredTokens(); err != nil {
s.logger.Error("Failed to clean expired download tokens", "error", err)
}
time.Sleep(1 * time.Minute)
}
}

View File

@@ -0,0 +1,25 @@
package download_token
import (
"databasus-backend/internal/util/logger"
)
var downloadTokenRepository = &DownloadTokenRepository{}
var downloadTokenService = &DownloadTokenService{
downloadTokenRepository,
logger.GetLogger(),
}
var downloadTokenBackgroundService = &DownloadTokenBackgroundService{
downloadTokenService,
logger.GetLogger(),
}
func GetDownloadTokenService() *DownloadTokenService {
return downloadTokenService
}
func GetDownloadTokenBackgroundService() *DownloadTokenBackgroundService {
return downloadTokenBackgroundService
}

View File

@@ -0,0 +1,21 @@
package download_token
import (
"time"
"github.com/google/uuid"
)
type DownloadToken struct {
ID uuid.UUID `json:"id" gorm:"column:id;primaryKey"`
Token string `json:"token" gorm:"column:token;uniqueIndex;not null"`
BackupID uuid.UUID `json:"backupId" gorm:"column:backup_id;not null"`
UserID uuid.UUID `json:"userId" gorm:"column:user_id;not null"`
ExpiresAt time.Time `json:"expiresAt" gorm:"column:expires_at;not null"`
Used bool `json:"used" gorm:"column:used;not null;default:false"`
CreatedAt time.Time `json:"createdAt" gorm:"column:created_at;not null"`
}
func (DownloadToken) TableName() string {
return "download_tokens"
}

View File

@@ -0,0 +1,60 @@
package download_token
import (
"crypto/rand"
"databasus-backend/internal/storage"
"encoding/base64"
"time"
"github.com/google/uuid"
"gorm.io/gorm"
)
type DownloadTokenRepository struct{}
func (r *DownloadTokenRepository) Create(token *DownloadToken) error {
if token.ID == uuid.Nil {
token.ID = uuid.New()
}
if token.CreatedAt.IsZero() {
token.CreatedAt = time.Now().UTC()
}
return storage.GetDb().Create(token).Error
}
func (r *DownloadTokenRepository) FindByToken(token string) (*DownloadToken, error) {
var downloadToken DownloadToken
err := storage.GetDb().
Where("token = ?", token).
First(&downloadToken).Error
if err != nil {
if err == gorm.ErrRecordNotFound {
return nil, nil
}
return nil, err
}
return &downloadToken, nil
}
func (r *DownloadTokenRepository) Update(token *DownloadToken) error {
return storage.GetDb().Save(token).Error
}
func (r *DownloadTokenRepository) DeleteExpired(before time.Time) error {
return storage.GetDb().
Where("expires_at < ?", before).
Delete(&DownloadToken{}).Error
}
func GenerateSecureToken() string {
b := make([]byte, 32)
if _, err := rand.Read(b); err != nil {
panic("failed to generate secure random token: " + err.Error())
}
return base64.URLEncoding.EncodeToString(b)
}

View File

@@ -0,0 +1,69 @@
package download_token
import (
"errors"
"log/slog"
"time"
"github.com/google/uuid"
)
type DownloadTokenService struct {
repository *DownloadTokenRepository
logger *slog.Logger
}
func (s *DownloadTokenService) Generate(backupID, userID uuid.UUID) (string, error) {
token := GenerateSecureToken()
downloadToken := &DownloadToken{
Token: token,
BackupID: backupID,
UserID: userID,
ExpiresAt: time.Now().UTC().Add(5 * time.Minute),
Used: false,
}
if err := s.repository.Create(downloadToken); err != nil {
return "", err
}
s.logger.Info("Generated download token", "backupId", backupID, "userId", userID)
return token, nil
}
func (s *DownloadTokenService) ValidateAndConsume(token string) (*DownloadToken, error) {
dt, err := s.repository.FindByToken(token)
if err != nil {
return nil, err
}
if dt == nil {
return nil, errors.New("invalid token")
}
if dt.Used {
return nil, errors.New("token already used")
}
if time.Now().UTC().After(dt.ExpiresAt) {
return nil, errors.New("token expired")
}
dt.Used = true
if err := s.repository.Update(dt); err != nil {
s.logger.Error("Failed to mark token as used", "error", err)
}
s.logger.Info("Token validated and consumed", "backupId", dt.BackupID)
return dt, nil
}
func (s *DownloadTokenService) CleanExpiredTokens() error {
now := time.Now().UTC()
if err := s.repository.DeleteExpired(now); err != nil {
return err
}
s.logger.Debug("Cleaned expired download tokens")
return nil
}

View File

@@ -3,6 +3,8 @@ package backups
import (
"databasus-backend/internal/features/backups/backups/encryption"
"io"
"github.com/google/uuid"
)
type GetBackupsRequest struct {
@@ -18,6 +20,12 @@ type GetBackupsResponse struct {
Offset int `json:"offset"`
}
type GenerateDownloadTokenResponse struct {
Token string `json:"token"`
Filename string `json:"filename"`
BackupID uuid.UUID `json:"backupId"`
}
type decryptionReaderCloser struct {
*encryption.DecryptionReader
baseReader io.ReadCloser

View File

@@ -12,6 +12,7 @@ import (
"time"
audit_logs "databasus-backend/internal/features/audit_logs"
"databasus-backend/internal/features/backups/backups/download_token"
"databasus-backend/internal/features/backups/backups/encryption"
backups_config "databasus-backend/internal/features/backups/config"
"databasus-backend/internal/features/databases"
@@ -44,6 +45,7 @@ type BackupService struct {
workspaceService *workspaces_services.WorkspaceService
auditLogService *audit_logs.AuditLogService
backupContextManager *BackupContextManager
downloadTokenService *download_token.DownloadTokenService
}
func (s *BackupService) AddBackupRemoveListener(listener BackupRemoveListener) {
@@ -683,3 +685,113 @@ func (s *BackupService) getBackupReader(backupID uuid.UUID) (io.ReadCloser, erro
fileReader,
}, nil
}
func (s *BackupService) GenerateDownloadToken(
user *users_models.User,
backupID uuid.UUID,
) (*GenerateDownloadTokenResponse, error) {
backup, err := s.backupRepository.FindByID(backupID)
if err != nil {
return nil, err
}
database, err := s.databaseService.GetDatabaseByID(backup.DatabaseID)
if err != nil {
return nil, err
}
if database.WorkspaceID == nil {
return nil, errors.New("cannot download backup for database without workspace")
}
canAccess, _, err := s.workspaceService.CanUserAccessWorkspace(*database.WorkspaceID, user)
if err != nil {
return nil, err
}
if !canAccess {
return nil, errors.New("insufficient permissions to download backup for this database")
}
token, err := s.downloadTokenService.Generate(backupID, user.ID)
if err != nil {
return nil, err
}
filename := s.generateBackupFilename(backup, database)
s.auditLogService.WriteAuditLog(
fmt.Sprintf("Download token generated for backup of database: %s", database.Name),
&user.ID,
database.WorkspaceID,
)
return &GenerateDownloadTokenResponse{
Token: token,
Filename: filename,
BackupID: backupID,
}, nil
}
func (s *BackupService) ValidateDownloadToken(token string) (*download_token.DownloadToken, error) {
return s.downloadTokenService.ValidateAndConsume(token)
}
func (s *BackupService) GetBackupFileWithoutAuth(
backupID uuid.UUID,
) (io.ReadCloser, *Backup, *databases.Database, error) {
backup, err := s.backupRepository.FindByID(backupID)
if err != nil {
return nil, nil, nil, err
}
database, err := s.databaseService.GetDatabaseByID(backup.DatabaseID)
if err != nil {
return nil, nil, nil, err
}
reader, err := s.getBackupReader(backupID)
if err != nil {
return nil, nil, nil, err
}
return reader, backup, database, nil
}
func (s *BackupService) WriteAuditLogForDownload(
userID uuid.UUID,
backup *Backup,
database *databases.Database,
) {
s.auditLogService.WriteAuditLog(
fmt.Sprintf(
"Backup file downloaded for database: %s (ID: %s)",
database.Name,
backup.ID.String(),
),
&userID,
database.WorkspaceID,
)
}
func (s *BackupService) generateBackupFilename(
backup *Backup,
database *databases.Database,
) string {
timestamp := backup.CreatedAt.Format("2006-01-02_15-04-05")
safeName := sanitizeFilename(database.Name)
extension := s.getBackupExtension(database.Type)
return fmt.Sprintf("%s_backup_%s%s", safeName, timestamp, extension)
}
func (s *BackupService) getBackupExtension(dbType databases.DatabaseType) string {
switch dbType {
case databases.DatabaseTypeMysql, databases.DatabaseTypeMariadb:
return ".sql.zst"
case databases.DatabaseTypePostgres:
return ".dump"
case databases.DatabaseTypeMongodb:
return ".archive"
default:
return ".backup"
}
}

View File

@@ -65,6 +65,7 @@ func Test_BackupExecuted_NotificationSent(t *testing.T) {
workspaces_services.GetWorkspaceService(),
nil,
NewBackupContextManager(),
nil,
}
// Set up expectations
@@ -113,6 +114,7 @@ func Test_BackupExecuted_NotificationSent(t *testing.T) {
workspaces_services.GetWorkspaceService(),
nil,
NewBackupContextManager(),
nil,
}
backupService.MakeBackup(database.ID, true)
@@ -138,6 +140,7 @@ func Test_BackupExecuted_NotificationSent(t *testing.T) {
workspaces_services.GetWorkspaceService(),
nil,
NewBackupContextManager(),
nil,
}
// capture arguments

View File

@@ -14,13 +14,19 @@ import (
)
func CreateTestRouter() *gin.Engine {
return workspaces_testing.CreateTestRouter(
router := workspaces_testing.CreateTestRouter(
workspaces_controllers.GetWorkspaceController(),
workspaces_controllers.GetMembershipController(),
databases.GetDatabaseController(),
backups_config.GetBackupConfigController(),
GetBackupController(),
)
// Register public routes (no auth required - token-based)
v1 := router.Group("/api/v1")
GetBackupController().RegisterPublicRoutes(v1)
return router
}
// WaitForBackupCompletion waits for a new backup to be created and completed (or failed)

View File

@@ -135,7 +135,14 @@ func (uc *CreatePostgresqlBackupUsecase) streamToStorage(
cmd := exec.CommandContext(ctx, pgBin, args...)
uc.logger.Info("Executing PostgreSQL backup command", "command", cmd.String())
if err := uc.setupPgEnvironment(cmd, pgpassFile, db.Postgresql.IsHttps, password, db.Postgresql.CpuCount, pgBin); err != nil {
if err := uc.setupPgEnvironment(
cmd,
pgpassFile,
db.Postgresql.IsHttps,
password,
db.Postgresql.CpuCount,
pgBin,
); err != nil {
return nil, err
}

View File

@@ -97,7 +97,13 @@ func Test_SaveBackupConfig_PermissionsEnforced(t *testing.T) {
testUserToken = owner.Token
} else if tt.workspaceRole != nil {
member := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspaces_testing.AddMemberToWorkspace(workspace, member, *tt.workspaceRole, owner.Token, router)
workspaces_testing.AddMemberToWorkspace(
workspace,
member,
*tt.workspaceRole,
owner.Token,
router,
)
testUserToken = member.Token
}
@@ -244,7 +250,13 @@ func Test_GetBackupConfigByDbID_PermissionsEnforced(t *testing.T) {
testUserToken = owner.Token
} else if tt.workspaceRole != nil {
member := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspaces_testing.AddMemberToWorkspace(workspace, member, *tt.workspaceRole, owner.Token, router)
workspaces_testing.AddMemberToWorkspace(
workspace,
member,
*tt.workspaceRole,
owner.Token,
router,
)
testUserToken = member.Token
} else {
nonMember := users_testing.CreateTestUser(users_enums.UserRoleMember)

View File

@@ -151,7 +151,13 @@ func Test_CreateDatabase_PermissionsEnforced(t *testing.T) {
testUserToken = owner.Token
} else if tt.workspaceRole != nil {
member := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspaces_testing.AddMemberToWorkspace(workspace, member, *tt.workspaceRole, owner.Token, router)
workspaces_testing.AddMemberToWorkspace(
workspace,
member,
*tt.workspaceRole,
owner.Token,
router,
)
testUserToken = member.Token
}
@@ -263,7 +269,13 @@ func Test_UpdateDatabase_PermissionsEnforced(t *testing.T) {
testUserToken = owner.Token
} else if tt.workspaceRole != nil {
member := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspaces_testing.AddMemberToWorkspace(workspace, member, *tt.workspaceRole, owner.Token, router)
workspaces_testing.AddMemberToWorkspace(
workspace,
member,
*tt.workspaceRole,
owner.Token,
router,
)
testUserToken = member.Token
}
@@ -365,7 +377,13 @@ func Test_DeleteDatabase_PermissionsEnforced(t *testing.T) {
testUserToken = owner.Token
} else if tt.workspaceRole != nil {
member := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspaces_testing.AddMemberToWorkspace(workspace, member, *tt.workspaceRole, owner.Token, router)
workspaces_testing.AddMemberToWorkspace(
workspace,
member,
*tt.workspaceRole,
owner.Token,
router,
)
testUserToken = member.Token
}
@@ -430,7 +448,13 @@ func Test_GetDatabase_PermissionsEnforced(t *testing.T) {
testUser = admin.Token
} else if tt.userRole != nil {
member := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspaces_testing.AddMemberToWorkspace(workspace, member, *tt.userRole, owner.Token, router)
workspaces_testing.AddMemberToWorkspace(
workspace,
member,
*tt.userRole,
owner.Token,
router,
)
testUser = member.Token
} else {
nonMember := users_testing.CreateTestUser(users_enums.UserRoleMember)
@@ -654,7 +678,13 @@ func Test_CopyDatabase_PermissionsEnforced(t *testing.T) {
testUserToken = owner.Token
} else if tt.workspaceRole != nil {
member := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspaces_testing.AddMemberToWorkspace(workspace, member, *tt.workspaceRole, owner.Token, router)
workspaces_testing.AddMemberToWorkspace(
workspace,
member,
*tt.workspaceRole,
owner.Token,
router,
)
testUserToken = member.Token
}

View File

@@ -97,7 +97,13 @@ func (m *MongodbDatabase) TestConnection(
}
m.Version = detectedVersion
if err := checkBackupPermissions(ctx, client, m.Username, m.Database, m.AuthDatabase); err != nil {
if err := checkBackupPermissions(
ctx,
client,
m.Username,
m.Database,
m.AuthDatabase,
); err != nil {
return err
}

View File

@@ -631,7 +631,10 @@ func (s *DatabaseService) IsUserReadOnly(
usingDatabase = existingDatabase
} else {
if database.WorkspaceID != nil {
canAccess, _, err := s.workspaceService.CanUserAccessWorkspace(*database.WorkspaceID, user)
canAccess, _, err := s.workspaceService.CanUserAccessWorkspace(
*database.WorkspaceID,
user,
)
if err != nil {
return false, nil, err
}

View File

@@ -109,7 +109,13 @@ func Test_GetAttemptsByDatabase_PermissionsEnforced(t *testing.T) {
testUserToken = owner.Token
} else if tt.workspaceRole != nil {
member := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspaces_testing.AddMemberToWorkspace(workspace, member, *tt.workspaceRole, owner.Token, router)
workspaces_testing.AddMemberToWorkspace(
workspace,
member,
*tt.workspaceRole,
owner.Token,
router,
)
testUserToken = member.Token
} else {
nonMember := users_testing.CreateTestUser(users_enums.UserRoleMember)

View File

@@ -88,7 +88,13 @@ func Test_SaveHealthcheckConfig_PermissionsEnforced(t *testing.T) {
testUserToken = owner.Token
} else if tt.workspaceRole != nil {
member := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspaces_testing.AddMemberToWorkspace(workspace, member, *tt.workspaceRole, owner.Token, router)
workspaces_testing.AddMemberToWorkspace(
workspace,
member,
*tt.workspaceRole,
owner.Token,
router,
)
testUserToken = member.Token
}
@@ -226,7 +232,13 @@ func Test_GetHealthcheckConfig_PermissionsEnforced(t *testing.T) {
testUserToken = owner.Token
} else if tt.workspaceRole != nil {
member := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspaces_testing.AddMemberToWorkspace(workspace, member, *tt.workspaceRole, owner.Token, router)
workspaces_testing.AddMemberToWorkspace(
workspace,
member,
*tt.workspaceRole,
owner.Token,
router,
)
testUserToken = member.Token
} else {
nonMember := users_testing.CreateTestUser(users_enums.UserRoleMember)

View File

@@ -263,7 +263,12 @@ func (c *NotifierController) TransferNotifierToWorkspace(ctx *gin.Context) {
return
}
if err := c.notifierService.TransferNotifierToWorkspace(user, id, request.TargetWorkspaceID, nil); err != nil {
if err := c.notifierService.TransferNotifierToWorkspace(
user,
id,
request.TargetWorkspaceID,
nil,
); err != nil {
if errors.Is(err, ErrInsufficientPermissionsInSourceWorkspace) ||
errors.Is(err, ErrInsufficientPermissionsInTargetWorkspace) {
ctx.JSON(http.StatusForbidden, gin.H{"error": err.Error()})

View File

@@ -1050,8 +1050,20 @@ func Test_TransferNotifier_PermissionsEnforced(t *testing.T) {
testUserToken = admin.Token
} else if tt.sourceRole != nil {
testUser := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspaces_testing.AddMemberToWorkspace(sourceWorkspace, testUser, *tt.sourceRole, sourceOwner.Token, router)
workspaces_testing.AddMemberToWorkspace(targetWorkspace, testUser, *tt.targetRole, targetOwner.Token, router)
workspaces_testing.AddMemberToWorkspace(
sourceWorkspace,
testUser,
*tt.sourceRole,
sourceOwner.Token,
router,
)
workspaces_testing.AddMemberToWorkspace(
targetWorkspace,
testUser,
*tt.targetRole,
targetOwner.Token,
router,
)
testUserToken = testUser.Token
}

View File

@@ -290,7 +290,12 @@ func Test_RestoreBackup_DiskSpaceValidation(t *testing.T) {
},
}
} else {
mysqlDB := createTestMySQLDatabase("Test MySQL DB", workspace.ID, owner.Token, router)
mysqlDB := createTestMySQLDatabase(
"Test MySQL DB",
workspace.ID,
owner.Token,
router,
)
storage := createTestStorage(workspace.ID)
configService := backups_config.GetBackupConfigService()
@@ -530,7 +535,13 @@ func createTestBackup(
dummyContent := []byte("dummy backup content for testing")
reader := strings.NewReader(string(dummyContent))
logger := slog.New(slog.NewTextHandler(io.Discard, nil))
if err := storages[0].SaveFile(context.Background(), fieldEncryptor, logger, backup.ID, reader); err != nil {
if err := storages[0].SaveFile(
context.Background(),
fieldEncryptor,
logger,
backup.ID,
reader,
); err != nil {
panic(fmt.Sprintf("Failed to create test backup file: %v", err))
}

View File

@@ -263,7 +263,12 @@ func (c *StorageController) TransferStorageToWorkspace(ctx *gin.Context) {
return
}
if err := c.storageService.TransferStorageToWorkspace(user, id, request.TargetWorkspaceID, nil); err != nil {
if err := c.storageService.TransferStorageToWorkspace(
user,
id,
request.TargetWorkspaceID,
nil,
); err != nil {
if errors.Is(err, ErrInsufficientPermissionsInSourceWorkspace) ||
errors.Is(err, ErrInsufficientPermissionsInTargetWorkspace) {
ctx.JSON(http.StatusForbidden, gin.H{"error": err.Error()})

View File

@@ -1071,8 +1071,20 @@ func Test_TransferStorage_PermissionsEnforced(t *testing.T) {
testUserToken = admin.Token
} else if tt.sourceRole != nil {
testUser := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspaces_testing.AddMemberToWorkspace(sourceWorkspace, testUser, *tt.sourceRole, sourceOwner.Token, router)
workspaces_testing.AddMemberToWorkspace(targetWorkspace, testUser, *tt.targetRole, targetOwner.Token, router)
workspaces_testing.AddMemberToWorkspace(
sourceWorkspace,
testUser,
*tt.sourceRole,
sourceOwner.Token,
router,
)
workspaces_testing.AddMemberToWorkspace(
targetWorkspace,
testUser,
*tt.targetRole,
targetOwner.Token,
router,
)
testUserToken = testUser.Token
}

View File

@@ -56,11 +56,17 @@ func (s *UserService) SignUp(request *users_dto.SignUpRequestDTO) error {
// If user exists with INVITED status, activate them and set password
if existingUser != nil && existingUser.Status == users_enums.UserStatusInvited {
if err := s.userRepository.UpdateUserPassword(existingUser.ID, hashedPasswordStr); err != nil {
if err := s.userRepository.UpdateUserPassword(
existingUser.ID,
hashedPasswordStr,
); err != nil {
return fmt.Errorf("failed to set password: %w", err)
}
if err := s.userRepository.UpdateUserStatus(existingUser.ID, users_enums.UserStatusActive); err != nil {
if err := s.userRepository.UpdateUserStatus(
existingUser.ID,
users_enums.UserStatusActive,
); err != nil {
return fmt.Errorf("failed to activate user: %w", err)
}
@@ -635,7 +641,10 @@ func (s *UserService) getOrCreateUserFromOAuth(
if userByEmail != nil {
if userByEmail.Status == users_enums.UserStatusInvited {
if err := s.userRepository.UpdateUserStatus(userByEmail.ID, users_enums.UserStatusActive); err != nil {
if err := s.userRepository.UpdateUserStatus(
userByEmail.ID,
users_enums.UserStatusActive,
); err != nil {
return nil, fmt.Errorf("failed to activate user: %w", err)
}

View File

@@ -161,7 +161,12 @@ func (c *MembershipController) ChangeMemberRole(ctx *gin.Context) {
return
}
if err := c.membershipService.ChangeMemberRole(workspaceID, userID, &request, user); err != nil {
if err := c.membershipService.ChangeMemberRole(
workspaceID,
userID,
&request,
user,
); err != nil {
if errors.Is(err, workspaces_errors.ErrInsufficientPermissionsToManageMembers) ||
errors.Is(err, workspaces_errors.ErrOnlyOwnerCanAddManageAdmins) {
ctx.JSON(http.StatusForbidden, gin.H{"error": err.Error()})

View File

@@ -123,7 +123,11 @@ func Test_GetWorkspaceMembers_PermissionsEnforced(t *testing.T) {
"Bearer "+testUserToken,
tt.expectedStatusCode,
)
assert.Contains(t, string(resp.Body), "insufficient permissions to view workspace members")
assert.Contains(
t,
string(resp.Body),
"insufficient permissions to view workspace members",
)
}
})
}
@@ -1202,7 +1206,11 @@ func Test_TransferWorkspaceOwnership_PermissionsEnforced(t *testing.T) {
if tt.expectSuccess {
assert.Contains(t, string(resp.Body), "Ownership transferred successfully")
} else {
assert.Contains(t, string(resp.Body), "only workspace owner or admin can transfer ownership")
assert.Contains(
t,
string(resp.Body),
"only workspace owner or admin can transfer ownership",
)
}
})
}

View File

@@ -100,7 +100,11 @@ func Test_CreateWorkspace_PermissionsEnforced(t *testing.T) {
request,
tt.expectedStatusCode,
)
assert.Contains(t, string(resp.Body), "insufficient permissions to create workspaces")
assert.Contains(
t,
string(resp.Body),
"insufficient permissions to create workspaces",
)
}
})
}
@@ -263,7 +267,13 @@ func Test_GetSingleWorkspace_PermissionsEnforced(t *testing.T) {
testUserToken = owner.Token
} else if tt.workspaceRole != nil {
member := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspaces_testing.AddMemberToWorkspace(workspace, member, *tt.workspaceRole, owner.Token, router)
workspaces_testing.AddMemberToWorkspace(
workspace,
member,
*tt.workspaceRole,
owner.Token,
router,
)
testUserToken = member.Token
} else {
nonMember := users_testing.CreateTestUser(users_enums.UserRoleMember)
@@ -365,7 +375,13 @@ func Test_UpdateWorkspace_PermissionsEnforced(t *testing.T) {
testUserToken = owner.Token
} else {
member := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspaces_testing.AddMemberToWorkspace(workspace, member, tt.workspaceRole, owner.Token, router)
workspaces_testing.AddMemberToWorkspace(
workspace,
member,
tt.workspaceRole,
owner.Token,
router,
)
testUserToken = member.Token
}
@@ -396,7 +412,11 @@ func Test_UpdateWorkspace_PermissionsEnforced(t *testing.T) {
updateRequest,
tt.expectedStatusCode,
)
assert.Contains(t, string(resp.Body), "insufficient permissions to update workspace")
assert.Contains(
t,
string(resp.Body),
"insufficient permissions to update workspace",
)
}
})
}
@@ -461,7 +481,13 @@ func Test_DeleteWorkspace_PermissionsEnforced(t *testing.T) {
testUserToken = owner.Token
} else if tt.workspaceRole != nil {
member := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspaces_testing.AddMemberToWorkspace(workspace, member, *tt.workspaceRole, owner.Token, router)
workspaces_testing.AddMemberToWorkspace(
workspace,
member,
*tt.workspaceRole,
owner.Token,
router,
)
testUserToken = member.Token
}
@@ -475,7 +501,11 @@ func Test_DeleteWorkspace_PermissionsEnforced(t *testing.T) {
if tt.expectSuccess {
assert.Contains(t, string(resp.Body), "Workspace deleted successfully")
} else {
assert.Contains(t, string(resp.Body), "only workspace owner or admin can delete workspace")
assert.Contains(
t,
string(resp.Body),
"only workspace owner or admin can delete workspace",
)
}
})
}

View File

@@ -173,7 +173,11 @@ func (s *MembershipService) ChangeMemberRole(
return workspaces_errors.ErrUserNotFound
}
if err := s.membershipRepository.UpdateMemberRole(memberUserID, workspaceID, request.Role); err != nil {
if err := s.membershipRepository.UpdateMemberRole(
memberUserID,
workspaceID,
request.Role,
); err != nil {
return fmt.Errorf("failed to update member role: %w", err)
}
@@ -283,11 +287,19 @@ func (s *MembershipService) TransferOwnership(
return workspaces_errors.ErrNoCurrentWorkspaceOwner
}
if err := s.membershipRepository.UpdateMemberRole(newOwner.ID, workspaceID, users_enums.WorkspaceRoleOwner); err != nil {
if err := s.membershipRepository.UpdateMemberRole(
newOwner.ID,
workspaceID,
users_enums.WorkspaceRoleOwner,
); err != nil {
return fmt.Errorf("failed to update new owner role: %w", err)
}
if err := s.membershipRepository.UpdateMemberRole(currentOwner.UserID, workspaceID, users_enums.WorkspaceRoleAdmin); err != nil {
if err := s.membershipRepository.UpdateMemberRole(
currentOwner.UserID,
workspaceID,
users_enums.WorkspaceRoleAdmin,
); err != nil {
return fmt.Errorf("failed to update previous owner role: %w", err)
}

View File

@@ -0,0 +1,44 @@
-- +goose Up
-- +goose StatementBegin
CREATE TABLE download_tokens (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
token TEXT NOT NULL UNIQUE,
backup_id UUID NOT NULL,
user_id UUID NOT NULL,
expires_at TIMESTAMPTZ NOT NULL,
used BOOLEAN NOT NULL DEFAULT FALSE,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
ALTER TABLE download_tokens
ADD CONSTRAINT fk_download_tokens_backup_id
FOREIGN KEY (backup_id)
REFERENCES backups (id)
ON DELETE CASCADE;
ALTER TABLE download_tokens
ADD CONSTRAINT fk_download_tokens_user_id
FOREIGN KEY (user_id)
REFERENCES users (id)
ON DELETE CASCADE;
CREATE INDEX idx_download_tokens_token ON download_tokens (token);
CREATE INDEX idx_download_tokens_expires_at ON download_tokens (expires_at);
CREATE INDEX idx_download_tokens_backup_id ON download_tokens (backup_id);
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
DROP INDEX IF EXISTS idx_download_tokens_backup_id;
DROP INDEX IF EXISTS idx_download_tokens_expires_at;
DROP INDEX IF EXISTS idx_download_tokens_token;
ALTER TABLE download_tokens DROP CONSTRAINT IF EXISTS fk_download_tokens_user_id;
ALTER TABLE download_tokens DROP CONSTRAINT IF EXISTS fk_download_tokens_backup_id;
DROP TABLE IF EXISTS download_tokens;
-- +goose StatementEnd

View File

@@ -9,13 +9,13 @@ echo "Installing PostgreSQL, MySQL, MariaDB and MongoDB client tools for Linux (
echo
# Check if running on supported system
if ! command -v apt-get &> /dev/null; then
if ! command -v apt-get > /dev/null 2>&1; then
echo "Error: This script requires apt-get (Debian/Ubuntu-like system)"
exit 1
fi
# Check if running as root or with sudo
if [[ $EUID -eq 0 ]]; then
if [ $EUID -eq 0 ]; then
SUDO=""
else
SUDO="sudo"
@@ -107,6 +107,12 @@ for version in $mysql_versions; do
version_dir="$MYSQL_DIR/mysql-$version"
mkdir -p "$version_dir/bin"
# Skip if already exists
if [ -f "$version_dir/bin/mysqldump" ]; then
echo " MySQL $version already installed, skipping..."
continue
fi
# Download MySQL client tools from official CDN
# Note: 5.7 is in Downloads, 8.0, 8.4 specific versions are in archives, 9.5 is in MySQL-9.5
case $version in
@@ -132,11 +138,14 @@ for version in $mysql_versions; do
wget -q "$MYSQL_URL" -O "mysql-$version.tar.gz" || wget -q "$MYSQL_URL" -O "mysql-$version.tar.xz"
echo " Extracting MySQL $version..."
if [[ "$MYSQL_URL" == *.xz ]]; then
tar -xJf "mysql-$version.tar.xz" 2>/dev/null || tar -xJf "mysql-$version.tar.gz" 2>/dev/null
else
tar -xzf "mysql-$version.tar.gz" 2>/dev/null || tar -xzf "mysql-$version.tar.xz" 2>/dev/null
fi
case "$MYSQL_URL" in
*.xz)
tar -xJf "mysql-$version.tar.xz" 2>/dev/null || tar -xJf "mysql-$version.tar.gz" 2>/dev/null
;;
*)
tar -xzf "mysql-$version.tar.gz" 2>/dev/null || tar -xzf "mysql-$version.tar.xz" 2>/dev/null
;;
esac
# Find extracted directory
EXTRACTED_DIR=$(ls -d mysql-*/ 2>/dev/null | head -1)
@@ -175,12 +184,7 @@ echo "Installing MariaDB client tools to: $MARIADB_DIR"
# Install dependencies
$SUDO apt-get install -y -qq apt-transport-https curl
# MariaDB versions to install with their URLs
declare -A MARIADB_URLS=(
["10.6"]="https://archive.mariadb.org/mariadb-10.6.21/bintar-linux-systemd-x86_64/mariadb-10.6.21-linux-systemd-x86_64.tar.gz"
["12.1"]="https://archive.mariadb.org/mariadb-12.1.2/bintar-linux-systemd-x86_64/mariadb-12.1.2-linux-systemd-x86_64.tar.gz"
)
# MariaDB versions to install
mariadb_versions="10.6 12.1"
for version in $mariadb_versions; do
@@ -195,7 +199,19 @@ for version in $mariadb_versions; do
continue
fi
url=${MARIADB_URLS[$version]}
# Get URL based on version
case "$version" in
"10.6")
url="https://archive.mariadb.org/mariadb-10.6.21/bintar-linux-systemd-x86_64/mariadb-10.6.21-linux-systemd-x86_64.tar.gz"
;;
"12.1")
url="https://archive.mariadb.org/mariadb-12.1.2/bintar-linux-systemd-x86_64/mariadb-12.1.2-linux-systemd-x86_64.tar.gz"
;;
*)
echo " Warning: Unknown MariaDB version $version"
continue
;;
esac
TEMP_DIR="/tmp/mariadb_install_$version"
mkdir -p "$TEMP_DIR"
@@ -238,43 +254,48 @@ mkdir -p "$MONGODB_DIR/bin"
echo "Installing MongoDB Database Tools to: $MONGODB_DIR"
# MongoDB Database Tools are backward compatible - single version supports all servers (4.0-8.0)
# Detect architecture
ARCH=$(uname -m)
if [ "$ARCH" = "x86_64" ]; then
MONGODB_TOOLS_URL="https://fastdl.mongodb.org/tools/db/mongodb-database-tools-debian12-x86_64-100.10.0.deb"
elif [ "$ARCH" = "aarch64" ]; then
MONGODB_TOOLS_URL="https://fastdl.mongodb.org/tools/db/mongodb-database-tools-debian12-aarch64-100.10.0.deb"
# Skip if already installed
if [ -f "$MONGODB_DIR/bin/mongodump" ] && [ -L "$MONGODB_DIR/bin/mongodump" ]; then
echo "MongoDB Database Tools already installed, skipping..."
else
echo "Warning: Unsupported architecture $ARCH for MongoDB Database Tools"
MONGODB_TOOLS_URL=""
fi
if [ -n "$MONGODB_TOOLS_URL" ]; then
TEMP_DIR="/tmp/mongodb_install"
mkdir -p "$TEMP_DIR"
cd "$TEMP_DIR"
echo "Downloading MongoDB Database Tools..."
wget -q "$MONGODB_TOOLS_URL" -O mongodb-database-tools.deb || {
echo "Warning: Could not download MongoDB Database Tools"
cd - >/dev/null
rm -rf "$TEMP_DIR"
}
if [ -f "mongodb-database-tools.deb" ]; then
echo "Installing MongoDB Database Tools..."
$SUDO dpkg -i mongodb-database-tools.deb 2>/dev/null || $SUDO apt-get install -f -y -qq
# Create symlinks to tools directory
ln -sf /usr/bin/mongodump "$MONGODB_DIR/bin/mongodump"
ln -sf /usr/bin/mongorestore "$MONGODB_DIR/bin/mongorestore"
echo "MongoDB Database Tools installed successfully"
# MongoDB Database Tools are backward compatible - single version supports all servers (4.0-8.0)
# Detect architecture
ARCH=$(uname -m)
if [ "$ARCH" = "x86_64" ]; then
MONGODB_TOOLS_URL="https://fastdl.mongodb.org/tools/db/mongodb-database-tools-debian12-x86_64-100.10.0.deb"
elif [ "$ARCH" = "aarch64" ]; then
MONGODB_TOOLS_URL="https://fastdl.mongodb.org/tools/db/mongodb-database-tools-debian12-aarch64-100.10.0.deb"
else
echo "Warning: Unsupported architecture $ARCH for MongoDB Database Tools"
MONGODB_TOOLS_URL=""
fi
cd - >/dev/null
rm -rf "$TEMP_DIR"
if [ -n "$MONGODB_TOOLS_URL" ]; then
TEMP_DIR="/tmp/mongodb_install"
mkdir -p "$TEMP_DIR"
cd "$TEMP_DIR"
echo "Downloading MongoDB Database Tools..."
if ! wget -q "$MONGODB_TOOLS_URL" -O mongodb-database-tools.deb; then
echo "Warning: Could not download MongoDB Database Tools"
cd - >/dev/null
rm -rf "$TEMP_DIR"
else
if [ -f "mongodb-database-tools.deb" ]; then
echo "Installing MongoDB Database Tools..."
$SUDO dpkg -i mongodb-database-tools.deb 2>/dev/null || $SUDO apt-get install -f -y -qq
# Create symlinks to tools directory
ln -sf /usr/bin/mongodump "$MONGODB_DIR/bin/mongodump"
ln -sf /usr/bin/mongorestore "$MONGODB_DIR/bin/mongorestore"
echo "MongoDB Database Tools installed successfully"
fi
cd - >/dev/null
rm -rf "$TEMP_DIR"
fi
fi
fi
echo

View File

@@ -29,23 +29,25 @@ export const backupsApi = {
return apiHelper.fetchDeleteRaw(`${getApplicationServer()}/api/v1/backups/${id}`);
},
async downloadBackup(id: string): Promise<{ blob: Blob; filename: string }> {
const result = await apiHelper.fetchGetBlobWithHeaders(
`${getApplicationServer()}/api/v1/backups/${id}/file`,
);
async downloadBackup(id: string): Promise<void> {
// Generate short-lived download token
const tokenResponse = await apiHelper.fetchPostJson<{
token: string;
filename: string;
backupId: string;
}>(`${getApplicationServer()}/api/v1/backups/${id}/download-token`, new RequestOptions());
// Extract filename from Content-Disposition header
const contentDisposition = result.headers.get('Content-Disposition');
let filename = `backup_${id}.backup`; // fallback filename
// Create direct download link with token
const downloadUrl = `${getApplicationServer()}/api/v1/backups/${id}/file?token=${tokenResponse.token}`;
if (contentDisposition) {
const filenameMatch = contentDisposition.match(/filename="?(.+?)"?$/);
if (filenameMatch && filenameMatch[1]) {
filename = filenameMatch[1];
}
}
const link = document.createElement('a');
link.href = downloadUrl;
link.download = tokenResponse.filename;
link.style.display = 'none';
return { blob: result.blob, filename };
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
},
async cancelBackup(id: string) {

View File

@@ -2,4 +2,5 @@ export interface GoogleDriveStorage {
clientId: string;
clientSecret: string;
tokenJson?: string;
useLocalRedirect?: boolean;
}

View File

@@ -64,21 +64,7 @@ export const BackupsComponent = ({ database, isCanManageDBs, scrollContainerRef
const downloadBackup = async (backupId: string) => {
try {
const { blob, filename } = await backupsApi.downloadBackup(backupId);
// Create a download link
const url = window.URL.createObjectURL(blob);
const link = document.createElement('a');
link.href = url;
link.download = filename;
// Trigger download
document.body.appendChild(link);
link.click();
// Cleanup
document.body.removeChild(link);
window.URL.revokeObjectURL(url);
await backupsApi.downloadBackup(backupId);
} catch (e) {
alert((e as Error).message);
} finally {

View File

@@ -1,4 +1,6 @@
import { Button, Input } from 'antd';
import { DownOutlined, InfoCircleOutlined, UpOutlined } from '@ant-design/icons';
import { Button, Checkbox, Input, Tooltip } from 'antd';
import { useState } from 'react';
import { GOOGLE_DRIVE_OAUTH_REDIRECT_URL } from '../../../../../constants';
import type { Storage } from '../../../../../entity/storages';
@@ -11,12 +13,17 @@ interface Props {
}
export function EditGoogleDriveStorageComponent({ storage, setStorage, setUnsaved }: Props) {
const hasAdvancedValues = !!storage?.googleDriveStorage?.useLocalRedirect;
const [showAdvanced, setShowAdvanced] = useState(hasAdvancedValues);
const goToAuthUrl = () => {
if (!storage?.googleDriveStorage?.clientId || !storage?.googleDriveStorage?.clientSecret) {
return;
}
const redirectUri = GOOGLE_DRIVE_OAUTH_REDIRECT_URL;
const localRedirectUri = `${window.location.origin}/storages/google-oauth`;
const useLocal = storage.googleDriveStorage.useLocalRedirect;
const redirectUri = useLocal ? localRedirectUri : GOOGLE_DRIVE_OAUTH_REDIRECT_URL;
const clientId = storage.googleDriveStorage.clientId;
const scope = 'https://www.googleapis.com/auth/drive.file';
const originUrl = `${window.location.origin}/storages/google-oauth`;
@@ -92,6 +99,53 @@ export function EditGoogleDriveStorageComponent({ storage, setStorage, setUnsave
/>
</div>
<div className="mt-4 mb-3 flex items-center">
<div
className="flex cursor-pointer items-center text-sm text-blue-600 hover:text-blue-800"
onClick={() => setShowAdvanced(!showAdvanced)}
>
<span className="mr-2">Advanced settings</span>
{showAdvanced ? (
<UpOutlined style={{ fontSize: '12px' }} />
) : (
<DownOutlined style={{ fontSize: '12px' }} />
)}
</div>
</div>
{showAdvanced && (
<div className="mb-4 flex w-full flex-col items-start sm:flex-row sm:items-center">
<div className="flex items-center">
<Checkbox
checked={storage?.googleDriveStorage?.useLocalRedirect || false}
onChange={(e) => {
if (!storage?.googleDriveStorage) return;
setStorage({
...storage,
googleDriveStorage: {
...storage.googleDriveStorage,
useLocalRedirect: e.target.checked,
},
});
setUnsaved();
}}
disabled={!!storage?.googleDriveStorage?.tokenJson}
>
<span>Use local redirect</span>
</Checkbox>
<Tooltip
className="cursor-pointer"
title="When enabled, uses your address as the origin and redirect URL (specify it in Google Cloud Console). HTTPS is required."
>
<InfoCircleOutlined className="ml-2" style={{ color: 'gray' }} />
</Tooltip>
</div>
</div>
)}
{storage?.googleDriveStorage?.tokenJson && (
<>
<div className="mb-1 flex w-full flex-col items-start sm:flex-row sm:items-center">

View File

@@ -18,6 +18,8 @@ export function OauthStorageComponent() {
const { clientId, clientSecret } = oauthDto.storage.googleDriveStorage;
const { authCode } = oauthDto;
const redirectUri = oauthDto.redirectUrl || GOOGLE_DRIVE_OAUTH_REDIRECT_URL;
try {
// Exchange authorization code for access token
const response = await fetch('https://oauth2.googleapis.com/token', {
@@ -29,13 +31,16 @@ export function OauthStorageComponent() {
code: authCode,
client_id: clientId,
client_secret: clientSecret,
redirect_uri: GOOGLE_DRIVE_OAUTH_REDIRECT_URL,
redirect_uri: redirectUri,
grant_type: 'authorization_code',
}),
});
if (!response.ok) {
throw new Error(`OAuth exchange failed: ${response.statusText}`);
const errorData = await response.json();
throw new Error(
errorData.error_description || `OAuth exchange failed: ${response.statusText}`,
);
}
const tokenData = await response.json();
@@ -44,27 +49,71 @@ export function OauthStorageComponent() {
setStorage(oauthDto.storage);
} catch (error) {
alert(`Failed to exchange OAuth code: ${error}`);
// Return to home if exchange fails
setTimeout(() => {
window.location.href = '/';
}, 3000);
}
};
useEffect(() => {
const oauthDtoParam = new URLSearchParams(window.location.search).get('oauthDto');
if (!oauthDtoParam) {
alert('OAuth param not found');
return;
}
const decodedParam = decodeURIComponent(oauthDtoParam);
const oauthDto: StorageOauthDto = JSON.parse(decodedParam);
/**
* Helper to validate the DTO and start the exchange process
*/
const processOauthDto = (oauthDto: StorageOauthDto) => {
if (oauthDto.storage.type === StorageType.GOOGLE_DRIVE) {
if (!oauthDto.storage.googleDriveStorage) {
alert('Google Drive storage not found');
alert('Google Drive storage configuration not found in DTO');
return;
}
exchangeGoogleOauthCode(oauthDto);
} else {
alert('Unsupported storage type for OAuth');
}
};
useEffect(() => {
const urlParams = new URLSearchParams(window.location.search);
// Attempt 1: Check for the 'oauthDto' param (Third-party/Legacy way)
const oauthDtoParam = urlParams.get('oauthDto');
if (oauthDtoParam) {
try {
const decodedParam = decodeURIComponent(oauthDtoParam);
const oauthDto: StorageOauthDto = JSON.parse(decodedParam);
processOauthDto(oauthDto);
return;
} catch (e) {
console.error('Error parsing oauthDto parameter:', e);
alert('Malformed OAuth parameter received');
return;
}
}
// Attempt 2: Check for 'code' and 'state' (Direct Google/Local way)
const code = urlParams.get('code');
const state = urlParams.get('state');
if (code && state) {
try {
// The 'state' parameter contains our stringified StorageOauthDto
const decodedState = decodeURIComponent(state);
const oauthDto: StorageOauthDto = JSON.parse(decodedState);
// Inject the authorization code received from Google
oauthDto.authCode = code;
processOauthDto(oauthDto);
return;
} catch (e) {
console.error('Error parsing OAuth state:', e);
alert('OAuth state parameter is invalid');
return;
}
}
// Attempt 3: No valid parameters found
alert('OAuth param not found. Ensure the redirect URL is configured correctly.');
}, []);
if (!storage) {