Compare commits

...

3 Commits

Author SHA1 Message Date
Rostislav Dugin
9ea795b48f FEATURE (backups): Add backups cancelling 2025-11-08 20:04:06 +03:00
Rostislav Dugin
a809dc8a9c FEATURE (protection): Do not expose sensetive data of databases, notifiers and storages from API + make backups lazy loaded 2025-11-08 18:49:23 +03:00
Rostislav Dugin
bd053b51a3 FIX (workspaces): Fix switch between workspaces 2025-11-07 15:54:15 +03:00
63 changed files with 1805 additions and 205 deletions

View File

@@ -0,0 +1,47 @@
package backups
import (
"context"
"errors"
"sync"
"github.com/google/uuid"
)
type BackupContextManager struct {
mu sync.RWMutex
cancelFuncs map[uuid.UUID]context.CancelFunc
}
func NewBackupContextManager() *BackupContextManager {
return &BackupContextManager{
cancelFuncs: make(map[uuid.UUID]context.CancelFunc),
}
}
func (m *BackupContextManager) RegisterBackup(backupID uuid.UUID, cancelFunc context.CancelFunc) {
m.mu.Lock()
defer m.mu.Unlock()
m.cancelFuncs[backupID] = cancelFunc
}
func (m *BackupContextManager) CancelBackup(backupID uuid.UUID) error {
m.mu.Lock()
defer m.mu.Unlock()
cancelFunc, exists := m.cancelFuncs[backupID]
if !exists {
return errors.New("backup is not in progress or already completed")
}
cancelFunc()
delete(m.cancelFuncs, backupID)
return nil
}
func (m *BackupContextManager) UnregisterBackup(backupID uuid.UUID) {
m.mu.Lock()
defer m.mu.Unlock()
delete(m.cancelFuncs, backupID)
}

View File

@@ -19,15 +19,18 @@ func (c *BackupController) RegisterRoutes(router *gin.RouterGroup) {
router.POST("/backups", c.MakeBackup)
router.GET("/backups/:id/file", c.GetFile)
router.DELETE("/backups/:id", c.DeleteBackup)
router.POST("/backups/:id/cancel", c.CancelBackup)
}
// GetBackups
// @Summary Get backups for a database
// @Description Get all backups for the specified database
// @Description Get paginated backups for the specified database
// @Tags backups
// @Produce json
// @Param database_id query string true "Database ID"
// @Success 200 {array} Backup
// @Param limit query int false "Number of items per page" default(10)
// @Param offset query int false "Offset for pagination" default(0)
// @Success 200 {object} GetBackupsResponse
// @Failure 400
// @Failure 401
// @Failure 500
@@ -39,25 +42,25 @@ func (c *BackupController) GetBackups(ctx *gin.Context) {
return
}
databaseIDStr := ctx.Query("database_id")
if databaseIDStr == "" {
ctx.JSON(http.StatusBadRequest, gin.H{"error": "database_id query parameter is required"})
var request GetBackupsRequest
if err := ctx.ShouldBindQuery(&request); err != nil {
ctx.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
databaseID, err := uuid.Parse(databaseIDStr)
databaseID, err := uuid.Parse(request.DatabaseID)
if err != nil {
ctx.JSON(http.StatusBadRequest, gin.H{"error": "invalid database_id"})
return
}
backups, err := c.backupService.GetBackups(user, databaseID)
response, err := c.backupService.GetBackups(user, databaseID, request.Limit, request.Offset)
if err != nil {
ctx.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
ctx.JSON(http.StatusOK, backups)
ctx.JSON(http.StatusOK, response)
}
// MakeBackup
@@ -124,6 +127,37 @@ func (c *BackupController) DeleteBackup(ctx *gin.Context) {
ctx.Status(http.StatusNoContent)
}
// CancelBackup
// @Summary Cancel an in-progress backup
// @Description Cancel a backup that is currently in progress
// @Tags backups
// @Param id path string true "Backup ID"
// @Success 204
// @Failure 400
// @Failure 401
// @Failure 500
// @Router /backups/{id}/cancel [post]
func (c *BackupController) CancelBackup(ctx *gin.Context) {
user, ok := users_middleware.GetUserFromContext(ctx)
if !ok {
ctx.JSON(http.StatusUnauthorized, gin.H{"error": "User not authenticated"})
return
}
id, err := uuid.Parse(ctx.Param("id"))
if err != nil {
ctx.JSON(http.StatusBadRequest, gin.H{"error": "invalid backup ID"})
return
}
if err := c.backupService.CancelBackup(user, id); err != nil {
ctx.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
ctx.Status(http.StatusNoContent)
}
// GetFile
// @Summary Download a backup file
// @Description Download the backup file for the specified backup

View File

@@ -102,10 +102,11 @@ func Test_GetBackups_PermissionsEnforced(t *testing.T) {
)
if tt.expectSuccess {
var backups []*Backup
err := json.Unmarshal(testResp.Body, &backups)
var response GetBackupsResponse
err := json.Unmarshal(testResp.Body, &response)
assert.NoError(t, err)
assert.GreaterOrEqual(t, len(backups), 1)
assert.GreaterOrEqual(t, len(response.Backups), 1)
assert.GreaterOrEqual(t, response.Total, int64(1))
} else {
assert.Contains(t, string(testResp.Body), "insufficient permissions")
}
@@ -329,9 +330,9 @@ func Test_DeleteBackup_PermissionsEnforced(t *testing.T) {
ownerUser, err := userService.GetUserFromToken(owner.Token)
assert.NoError(t, err)
backups, err := GetBackupService().GetBackups(ownerUser, database.ID)
response, err := GetBackupService().GetBackups(ownerUser, database.ID, 10, 0)
assert.NoError(t, err)
assert.Equal(t, 0, len(backups))
assert.Equal(t, 0, len(response.Backups))
}
})
}
@@ -491,6 +492,77 @@ func Test_DownloadBackup_AuditLogWritten(t *testing.T) {
assert.True(t, found, "Audit log for backup download not found")
}
func Test_CancelBackup_InProgressBackup_SuccessfullyCancelled(t *testing.T) {
router := createTestRouter()
owner := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspace := workspaces_testing.CreateTestWorkspace("Test Workspace", owner, router)
database := createTestDatabase("Test Database", workspace.ID, owner.Token, router)
storage := createTestStorage(workspace.ID)
configService := backups_config.GetBackupConfigService()
config, err := configService.GetBackupConfigByDbId(database.ID)
assert.NoError(t, err)
config.IsBackupsEnabled = true
config.StorageID = &storage.ID
config.Storage = storage
_, err = configService.SaveBackupConfig(config)
assert.NoError(t, err)
backup := &Backup{
ID: uuid.New(),
DatabaseID: database.ID,
Database: database,
StorageID: storage.ID,
Storage: storage,
Status: BackupStatusInProgress,
BackupSizeMb: 0,
BackupDurationMs: 0,
CreatedAt: time.Now().UTC(),
}
repo := &BackupRepository{}
err = repo.Save(backup)
assert.NoError(t, err)
// Register a cancellable context for the backup
GetBackupService().backupContextMgr.RegisterBackup(backup.ID, func() {})
resp := test_utils.MakePostRequest(
t,
router,
fmt.Sprintf("/api/v1/backups/%s/cancel", backup.ID.String()),
"Bearer "+owner.Token,
nil,
http.StatusNoContent,
)
assert.Equal(t, http.StatusNoContent, resp.StatusCode)
// Verify audit log was created
admin := users_testing.CreateTestUser(users_enums.UserRoleAdmin)
userService := users_services.GetUserService()
adminUser, err := userService.GetUserFromToken(admin.Token)
assert.NoError(t, err)
auditLogService := audit_logs.GetAuditLogService()
auditLogs, err := auditLogService.GetGlobalAuditLogs(
adminUser,
&audit_logs.GetAuditLogsRequest{Limit: 100, Offset: 0},
)
assert.NoError(t, err)
foundCancelLog := false
for _, log := range auditLogs.AuditLogs {
if strings.Contains(log.Message, "Backup cancelled") &&
strings.Contains(log.Message, database.Name) {
foundCancelLog = true
break
}
}
assert.True(t, foundCancelLog, "Cancel audit log should be created")
}
func createTestRouter() *gin.Engine {
return CreateTestRouter()
}

View File

@@ -13,6 +13,9 @@ import (
)
var backupRepository = &BackupRepository{}
var backupContextManager = NewBackupContextManager()
var backupService = &BackupService{
databases.GetDatabaseService(),
storages.GetStorageService(),
@@ -25,6 +28,7 @@ var backupService = &BackupService{
[]BackupRemoveListener{},
workspaces_services.GetWorkspaceService(),
audit_logs.GetAuditLogService(),
backupContextManager,
}
var backupBackgroundService = &BackupBackgroundService{

View File

@@ -0,0 +1,14 @@
package backups
type GetBackupsRequest struct {
DatabaseID string `form:"database_id" binding:"required"`
Limit int `form:"limit"`
Offset int `form:"offset"`
}
type GetBackupsResponse struct {
Backups []*Backup `json:"backups"`
Total int64 `json:"total"`
Limit int `json:"limit"`
Offset int `json:"offset"`
}

View File

@@ -6,4 +6,5 @@ const (
BackupStatusInProgress BackupStatus = "IN_PROGRESS"
BackupStatusCompleted BackupStatus = "COMPLETED"
BackupStatusFailed BackupStatus = "FAILED"
BackupStatusCanceled BackupStatus = "CANCELED"
)

View File

@@ -1,6 +1,8 @@
package backups
import (
"context"
backups_config "postgresus-backend/internal/features/backups/config"
"postgresus-backend/internal/features/databases"
"postgresus-backend/internal/features/notifiers"
@@ -19,6 +21,7 @@ type NotificationSender interface {
type CreateBackupUsecase interface {
Execute(
ctx context.Context,
backupID uuid.UUID,
backupConfig *backups_config.BackupConfig,
database *databases.Database,

View File

@@ -195,3 +195,38 @@ func (r *BackupRepository) FindBackupsBeforeDate(
return backups, nil
}
func (r *BackupRepository) FindByDatabaseIDWithPagination(
databaseID uuid.UUID,
limit, offset int,
) ([]*Backup, error) {
var backups []*Backup
if err := storage.
GetDb().
Preload("Database").
Preload("Storage").
Where("database_id = ?", databaseID).
Order("created_at DESC").
Limit(limit).
Offset(offset).
Find(&backups).Error; err != nil {
return nil, err
}
return backups, nil
}
func (r *BackupRepository) CountByDatabaseID(databaseID uuid.UUID) (int64, error) {
var count int64
if err := storage.
GetDb().
Model(&Backup{}).
Where("database_id = ?", databaseID).
Count(&count).Error; err != nil {
return 0, err
}
return count, nil
}

View File

@@ -1,6 +1,7 @@
package backups
import (
"context"
"errors"
"fmt"
"io"
@@ -13,6 +14,7 @@ import (
users_models "postgresus-backend/internal/features/users/models"
workspaces_services "postgresus-backend/internal/features/workspaces/services"
"slices"
"strings"
"time"
"github.com/google/uuid"
@@ -34,6 +36,7 @@ type BackupService struct {
workspaceService *workspaces_services.WorkspaceService
auditLogService *audit_logs.AuditLogService
backupContextMgr *BackupContextManager
}
func (s *BackupService) AddBackupRemoveListener(listener BackupRemoveListener) {
@@ -93,7 +96,8 @@ func (s *BackupService) MakeBackupWithAuth(
func (s *BackupService) GetBackups(
user *users_models.User,
databaseID uuid.UUID,
) ([]*Backup, error) {
limit, offset int,
) (*GetBackupsResponse, error) {
database, err := s.databaseService.GetDatabaseByID(databaseID)
if err != nil {
return nil, err
@@ -111,12 +115,29 @@ func (s *BackupService) GetBackups(
return nil, errors.New("insufficient permissions to access backups for this database")
}
backups, err := s.backupRepository.FindByDatabaseID(databaseID)
if limit <= 0 {
limit = 10
}
if offset < 0 {
offset = 0
}
backups, err := s.backupRepository.FindByDatabaseIDWithPagination(databaseID, limit, offset)
if err != nil {
return nil, err
}
return backups, nil
total, err := s.backupRepository.CountByDatabaseID(databaseID)
if err != nil {
return nil, err
}
return &GetBackupsResponse{
Backups: backups,
Total: total,
Limit: limit,
Offset: offset,
}, nil
}
func (s *BackupService) DeleteBackup(
@@ -229,7 +250,12 @@ func (s *BackupService) MakeBackup(databaseID uuid.UUID, isLastTry bool) {
}
}
ctx, cancel := context.WithCancel(context.Background())
s.backupContextMgr.RegisterBackup(backup.ID, cancel)
defer s.backupContextMgr.UnregisterBackup(backup.ID)
err = s.createBackupUseCase.Execute(
ctx,
backup.ID,
backupConfig,
database,
@@ -238,6 +264,34 @@ func (s *BackupService) MakeBackup(databaseID uuid.UUID, isLastTry bool) {
)
if err != nil {
errMsg := err.Error()
// Check if backup was cancelled (not due to shutdown)
if strings.Contains(errMsg, "backup cancelled") && !strings.Contains(errMsg, "shutdown") {
backup.Status = BackupStatusCanceled
backup.BackupDurationMs = time.Since(start).Milliseconds()
backup.BackupSizeMb = 0
if err := s.backupRepository.Save(backup); err != nil {
s.logger.Error("Failed to save cancelled backup", "error", err)
}
// Delete partial backup from storage
storage, storageErr := s.storageService.GetStorageByID(backup.StorageID)
if storageErr == nil {
if deleteErr := storage.DeleteFile(backup.ID); deleteErr != nil {
s.logger.Error(
"Failed to delete partial backup file",
"backupId",
backup.ID,
"error",
deleteErr,
)
}
}
return
}
backup.FailMessage = &errMsg
backup.Status = BackupStatusFailed
backup.BackupDurationMs = time.Since(start).Milliseconds()
@@ -364,6 +418,48 @@ func (s *BackupService) GetBackup(backupID uuid.UUID) (*Backup, error) {
return s.backupRepository.FindByID(backupID)
}
func (s *BackupService) CancelBackup(
user *users_models.User,
backupID uuid.UUID,
) error {
backup, err := s.backupRepository.FindByID(backupID)
if err != nil {
return err
}
if backup.Database.WorkspaceID == nil {
return errors.New("cannot cancel backup for database without workspace")
}
canManage, err := s.workspaceService.CanUserManageDBs(*backup.Database.WorkspaceID, user)
if err != nil {
return err
}
if !canManage {
return errors.New("insufficient permissions to cancel backup for this database")
}
if backup.Status != BackupStatusInProgress {
return errors.New("backup is not in progress")
}
if err := s.backupContextMgr.CancelBackup(backupID); err != nil {
return err
}
s.auditLogService.WriteAuditLog(
fmt.Sprintf(
"Backup cancelled for database: %s (ID: %s)",
backup.Database.Name,
backupID.String(),
),
&user.ID,
backup.Database.WorkspaceID,
)
return nil
}
func (s *BackupService) GetBackupFile(
user *users_models.User,
backupID uuid.UUID,

View File

@@ -1,6 +1,7 @@
package backups
import (
"context"
"errors"
backups_config "postgresus-backend/internal/features/backups/config"
"postgresus-backend/internal/features/databases"
@@ -56,6 +57,7 @@ func Test_BackupExecuted_NotificationSent(t *testing.T) {
[]BackupRemoveListener{},
nil, // workspaceService
nil, // auditLogService
NewBackupContextManager(),
}
// Set up expectations
@@ -101,6 +103,7 @@ func Test_BackupExecuted_NotificationSent(t *testing.T) {
[]BackupRemoveListener{},
nil, // workspaceService
nil, // auditLogService
NewBackupContextManager(),
}
backupService.MakeBackup(database.ID, true)
@@ -123,6 +126,7 @@ func Test_BackupExecuted_NotificationSent(t *testing.T) {
[]BackupRemoveListener{},
nil, // workspaceService
nil, // auditLogService
NewBackupContextManager(),
}
// capture arguments
@@ -158,6 +162,7 @@ type CreateFailedBackupUsecase struct {
}
func (uc *CreateFailedBackupUsecase) Execute(
ctx context.Context,
backupID uuid.UUID,
backupConfig *backups_config.BackupConfig,
database *databases.Database,
@@ -174,6 +179,7 @@ type CreateSuccessBackupUsecase struct {
}
func (uc *CreateSuccessBackupUsecase) Execute(
ctx context.Context,
backupID uuid.UUID,
backupConfig *backups_config.BackupConfig,
database *databases.Database,

View File

@@ -1,6 +1,7 @@
package usecases
import (
"context"
"errors"
usecases_postgresql "postgresus-backend/internal/features/backups/backups/usecases/postgresql"
backups_config "postgresus-backend/internal/features/backups/config"
@@ -16,6 +17,7 @@ type CreateBackupUsecase struct {
// Execute creates a backup of the database and returns the backup size in MB
func (uc *CreateBackupUsecase) Execute(
ctx context.Context,
backupID uuid.UUID,
backupConfig *backups_config.BackupConfig,
database *databases.Database,
@@ -26,6 +28,7 @@ func (uc *CreateBackupUsecase) Execute(
) error {
if database.Type == databases.DatabaseTypePostgres {
return uc.CreatePostgresqlBackupUsecase.Execute(
ctx,
backupID,
backupConfig,
database,

View File

@@ -29,6 +29,7 @@ type CreatePostgresqlBackupUsecase struct {
// Execute creates a backup of the database
func (uc *CreatePostgresqlBackupUsecase) Execute(
ctx context.Context,
backupID uuid.UUID,
backupConfig *backups_config.BackupConfig,
db *databases.Database,
@@ -81,6 +82,7 @@ func (uc *CreatePostgresqlBackupUsecase) Execute(
}
return uc.streamToStorage(
ctx,
backupID,
backupConfig,
tools.GetPostgresqlExecutable(
@@ -99,6 +101,7 @@ func (uc *CreatePostgresqlBackupUsecase) Execute(
// streamToStorage streams pg_dump output directly to storage
func (uc *CreatePostgresqlBackupUsecase) streamToStorage(
parentCtx context.Context,
backupID uuid.UUID,
backupConfig *backups_config.BackupConfig,
pgBin string,
@@ -112,7 +115,7 @@ func (uc *CreatePostgresqlBackupUsecase) streamToStorage(
// if backup not fit into 23 hours, Postgresus
// seems not to work for such database size
ctx, cancel := context.WithTimeout(context.Background(), 23*time.Hour)
ctx, cancel := context.WithTimeout(parentCtx, 23*time.Hour)
defer cancel()
// Monitor for shutdown and cancel context if needed
@@ -272,8 +275,9 @@ func (uc *CreatePostgresqlBackupUsecase) streamToStorage(
bytesWritten := <-bytesWrittenCh
waitErr := cmd.Wait()
// Check for shutdown before finalizing
if config.IsShouldShutdown() {
// Check for shutdown or cancellation before finalizing
select {
case <-ctx.Done():
if pipeWriter, ok := countingWriter.writer.(*io.PipeWriter); ok {
if err := pipeWriter.Close(); err != nil {
uc.logger.Error("Failed to close counting writer", "error", err)
@@ -281,7 +285,12 @@ func (uc *CreatePostgresqlBackupUsecase) streamToStorage(
}
<-saveErrCh // Wait for storage to finish
return fmt.Errorf("backup cancelled due to shutdown")
if config.IsShouldShutdown() {
return fmt.Errorf("backup cancelled due to shutdown")
}
return fmt.Errorf("backup cancelled")
default:
}
// Close the pipe writer to signal end of data
@@ -303,8 +312,13 @@ func (uc *CreatePostgresqlBackupUsecase) streamToStorage(
switch {
case waitErr != nil:
if config.IsShouldShutdown() {
return fmt.Errorf("backup cancelled due to shutdown")
select {
case <-ctx.Done():
if config.IsShouldShutdown() {
return fmt.Errorf("backup cancelled due to shutdown")
}
return fmt.Errorf("backup cancelled")
default:
}
// Enhanced error handling for PostgreSQL connection and SSL issues
@@ -402,14 +416,24 @@ func (uc *CreatePostgresqlBackupUsecase) streamToStorage(
return errors.New(errorMsg)
case copyErr != nil:
if config.IsShouldShutdown() {
return fmt.Errorf("backup cancelled due to shutdown")
select {
case <-ctx.Done():
if config.IsShouldShutdown() {
return fmt.Errorf("backup cancelled due to shutdown")
}
return fmt.Errorf("backup cancelled")
default:
}
return fmt.Errorf("copy to storage: %w", copyErr)
case saveErr != nil:
if config.IsShouldShutdown() {
return fmt.Errorf("backup cancelled due to shutdown")
select {
case <-ctx.Done():
if config.IsShouldShutdown() {
return fmt.Errorf("backup cancelled due to shutdown")
}
return fmt.Errorf("backup cancelled")
default:
}
return fmt.Errorf("save to storage: %w", saveErr)

View File

@@ -82,19 +82,6 @@ func (s *BackupConfigService) SaveBackupConfig(
}
}
if !backupConfig.IsBackupsEnabled && existingConfig.StorageID != nil {
if err := s.dbStorageChangeListener.OnBeforeBackupsStorageChange(
backupConfig.DatabaseID,
); err != nil {
return nil, err
}
// we clear storage for disabled backups to allow
// storage removal for unused storages
backupConfig.Storage = nil
backupConfig.StorageID = nil
}
return s.backupConfigRepository.Save(backupConfig)
}

View File

@@ -768,3 +768,161 @@ func createTestDatabaseViaAPI(
return &database
}
func Test_DatabaseSensitiveDataLifecycle_AllTypes(t *testing.T) {
testCases := []struct {
name string
databaseType DatabaseType
createDatabase func(workspaceID uuid.UUID) *Database
updateDatabase func(workspaceID uuid.UUID, databaseID uuid.UUID) *Database
verifySensitiveData func(t *testing.T, database *Database)
verifyHiddenData func(t *testing.T, database *Database)
}{
{
name: "PostgreSQL Database",
databaseType: DatabaseTypePostgres,
createDatabase: func(workspaceID uuid.UUID) *Database {
testDbName := "test_db"
return &Database{
WorkspaceID: &workspaceID,
Name: "Test PostgreSQL Database",
Type: DatabaseTypePostgres,
Postgresql: &postgresql.PostgresqlDatabase{
Version: tools.PostgresqlVersion16,
Host: "localhost",
Port: 5432,
Username: "postgres",
Password: "original-password-secret",
Database: &testDbName,
},
}
},
updateDatabase: func(workspaceID uuid.UUID, databaseID uuid.UUID) *Database {
testDbName := "updated_test_db"
return &Database{
ID: databaseID,
WorkspaceID: &workspaceID,
Name: "Updated PostgreSQL Database",
Type: DatabaseTypePostgres,
Postgresql: &postgresql.PostgresqlDatabase{
Version: tools.PostgresqlVersion17,
Host: "updated-host",
Port: 5433,
Username: "updated_user",
Password: "",
Database: &testDbName,
},
}
},
verifySensitiveData: func(t *testing.T, database *Database) {
assert.Equal(t, "original-password-secret", database.Postgresql.Password)
},
verifyHiddenData: func(t *testing.T, database *Database) {
assert.Equal(t, "", database.Postgresql.Password)
},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
router := createTestRouter()
owner := users_testing.CreateTestUser(users_enums.UserRoleMember)
workspace := workspaces_testing.CreateTestWorkspace("Test Workspace", owner, router)
// Phase 1: Create database with sensitive data
initialDatabase := tc.createDatabase(workspace.ID)
var createdDatabase Database
test_utils.MakePostRequestAndUnmarshal(
t,
router,
"/api/v1/databases/create",
"Bearer "+owner.Token,
*initialDatabase,
http.StatusCreated,
&createdDatabase,
)
assert.NotEmpty(t, createdDatabase.ID)
assert.Equal(t, initialDatabase.Name, createdDatabase.Name)
// Phase 2: Read via service - sensitive data should be hidden
var retrievedDatabase Database
test_utils.MakeGetRequestAndUnmarshal(
t,
router,
fmt.Sprintf("/api/v1/databases/%s", createdDatabase.ID.String()),
"Bearer "+owner.Token,
http.StatusOK,
&retrievedDatabase,
)
tc.verifyHiddenData(t, &retrievedDatabase)
assert.Equal(t, initialDatabase.Name, retrievedDatabase.Name)
// Phase 3: Update with non-sensitive changes only (sensitive fields empty)
updatedDatabase := tc.updateDatabase(workspace.ID, createdDatabase.ID)
var updateResponse Database
test_utils.MakePostRequestAndUnmarshal(
t,
router,
"/api/v1/databases/update",
"Bearer "+owner.Token,
*updatedDatabase,
http.StatusOK,
&updateResponse,
)
// Phase 4: Retrieve directly from repository to verify sensitive data preservation
repository := &DatabaseRepository{}
databaseFromDB, err := repository.FindByID(createdDatabase.ID)
assert.NoError(t, err)
// Verify original sensitive data is still present in DB
tc.verifySensitiveData(t, databaseFromDB)
// Verify non-sensitive fields were updated in DB
assert.Equal(t, updatedDatabase.Name, databaseFromDB.Name)
// Phase 5: Additional verification - Check via GET that data is still hidden
var finalRetrieved Database
test_utils.MakeGetRequestAndUnmarshal(
t,
router,
fmt.Sprintf("/api/v1/databases/%s", createdDatabase.ID.String()),
"Bearer "+owner.Token,
http.StatusOK,
&finalRetrieved,
)
tc.verifyHiddenData(t, &finalRetrieved)
// Phase 6: Verify GetDatabasesByWorkspace also hides sensitive data
var workspaceDatabases []Database
test_utils.MakeGetRequestAndUnmarshal(
t,
router,
fmt.Sprintf("/api/v1/databases?workspace_id=%s", workspace.ID.String()),
"Bearer "+owner.Token,
http.StatusOK,
&workspaceDatabases,
)
var foundDatabase *Database
for i := range workspaceDatabases {
if workspaceDatabases[i].ID == createdDatabase.ID {
foundDatabase = &workspaceDatabases[i]
break
}
}
assert.NotNil(t, foundDatabase, "Database should be found in workspace databases list")
tc.verifyHiddenData(t, foundDatabase)
// Clean up: Delete database before removing workspace
test_utils.MakeDeleteRequest(
t,
router,
fmt.Sprintf("/api/v1/databases/%s", createdDatabase.ID.String()),
"Bearer "+owner.Token,
http.StatusNoContent,
)
workspaces_testing.RemoveTestWorkspace(workspace, router)
})
}
}

View File

@@ -66,6 +66,23 @@ func (p *PostgresqlDatabase) TestConnection(logger *slog.Logger) error {
return testSingleDatabaseConnection(logger, ctx, p)
}
func (p *PostgresqlDatabase) HideSensitiveData() {
p.Password = ""
}
func (p *PostgresqlDatabase) Update(incoming *PostgresqlDatabase) {
p.Version = incoming.Version
p.Host = incoming.Host
p.Port = incoming.Port
p.Username = incoming.Username
p.Database = incoming.Database
p.IsHttps = incoming.IsHttps
if incoming.Password != "" {
p.Password = incoming.Password
}
}
// testSingleDatabaseConnection tests connection to a specific database for pg_dump
func testSingleDatabaseConnection(
logger *slog.Logger,

View File

@@ -12,6 +12,8 @@ type DatabaseValidator interface {
type DatabaseConnector interface {
TestConnection(logger *slog.Logger) error
HideSensitiveData()
}
type DatabaseCreationListener interface {

View File

@@ -60,6 +60,22 @@ func (d *Database) TestConnection(logger *slog.Logger) error {
return d.getSpecificDatabase().TestConnection(logger)
}
func (d *Database) HideSensitiveData() {
d.getSpecificDatabase().HideSensitiveData()
}
func (d *Database) Update(incoming *Database) {
d.Name = incoming.Name
d.Type = incoming.Type
switch d.Type {
case DatabaseTypePostgres:
if d.Postgresql != nil && incoming.Postgresql != nil {
d.Postgresql.Update(incoming.Postgresql)
}
}
}
func (d *Database) getSpecificDatabase() DatabaseConnector {
switch d.Type {
case DatabaseTypePostgres:

View File

@@ -112,17 +112,19 @@ func (s *DatabaseService) UpdateDatabase(
return err
}
if err := database.Validate(); err != nil {
existingDatabase.Update(database)
if err := existingDatabase.Validate(); err != nil {
return err
}
_, err = s.dbRepository.Save(database)
_, err = s.dbRepository.Save(existingDatabase)
if err != nil {
return err
}
s.auditLogService.WriteAuditLog(
fmt.Sprintf("Database updated: %s", database.Name),
fmt.Sprintf("Database updated: %s", existingDatabase.Name),
&user.ID,
existingDatabase.WorkspaceID,
)
@@ -187,6 +189,7 @@ func (s *DatabaseService) GetDatabase(
return nil, errors.New("insufficient permissions to access this database")
}
database.HideSensitiveData()
return database, nil
}
@@ -202,7 +205,16 @@ func (s *DatabaseService) GetDatabasesByWorkspace(
return nil, errors.New("insufficient permissions to access this workspace")
}
return s.dbRepository.FindByWorkspaceID(workspaceID)
databases, err := s.dbRepository.FindByWorkspaceID(workspaceID)
if err != nil {
return nil, err
}
for _, database := range databases {
database.HideSensitiveData()
}
return databases, nil
}
func (s *DatabaseService) IsNotifierUsing(
@@ -259,7 +271,31 @@ func (s *DatabaseService) TestDatabaseConnection(
func (s *DatabaseService) TestDatabaseConnectionDirect(
database *Database,
) error {
return database.TestConnection(s.logger)
var usingDatabase *Database
if database.ID != uuid.Nil {
existingDatabase, err := s.dbRepository.FindByID(database.ID)
if err != nil {
return err
}
if database.WorkspaceID != nil && existingDatabase.WorkspaceID != nil &&
*existingDatabase.WorkspaceID != *database.WorkspaceID {
return errors.New("database does not belong to this workspace")
}
existingDatabase.Update(database)
if err := existingDatabase.Validate(); err != nil {
return err
}
usingDatabase = existingDatabase
} else {
usingDatabase = database
}
return usingDatabase.TestConnection(s.logger)
}
func (s *DatabaseService) GetDatabaseByID(

View File

@@ -54,11 +54,6 @@ func (c *NotifierController) SaveNotifier(ctx *gin.Context) {
return
}
if err := request.Validate(); err != nil {
ctx.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
if err := c.notifierService.SaveNotifier(user, request.WorkspaceID, &request); err != nil {
if err.Error() == "insufficient permissions to manage notifier in this workspace" {
ctx.JSON(http.StatusForbidden, gin.H{"error": err.Error()})

View File

@@ -7,6 +7,10 @@ import (
"postgresus-backend/internal/config"
audit_logs "postgresus-backend/internal/features/audit_logs"
discord_notifier "postgresus-backend/internal/features/notifiers/models/discord"
email_notifier "postgresus-backend/internal/features/notifiers/models/email_notifier"
slack_notifier "postgresus-backend/internal/features/notifiers/models/slack"
teams_notifier "postgresus-backend/internal/features/notifiers/models/teams"
telegram_notifier "postgresus-backend/internal/features/notifiers/models/telegram"
webhook_notifier "postgresus-backend/internal/features/notifiers/models/webhook"
users_enums "postgresus-backend/internal/features/users/enums"
@@ -512,3 +516,300 @@ func deleteNotifier(
http.StatusOK,
)
}
func Test_NotifierSensitiveDataLifecycle_AllTypes(t *testing.T) {
testCases := []struct {
name string
notifierType NotifierType
createNotifier func(workspaceID uuid.UUID) *Notifier
updateNotifier func(workspaceID uuid.UUID, notifierID uuid.UUID) *Notifier
verifySensitiveData func(t *testing.T, notifier *Notifier)
verifyHiddenData func(t *testing.T, notifier *Notifier)
}{
{
name: "Telegram Notifier",
notifierType: NotifierTypeTelegram,
createNotifier: func(workspaceID uuid.UUID) *Notifier {
return &Notifier{
WorkspaceID: workspaceID,
Name: "Test Telegram Notifier",
NotifierType: NotifierTypeTelegram,
TelegramNotifier: &telegram_notifier.TelegramNotifier{
BotToken: "original-bot-token-12345",
TargetChatID: "123456789",
},
}
},
updateNotifier: func(workspaceID uuid.UUID, notifierID uuid.UUID) *Notifier {
return &Notifier{
ID: notifierID,
WorkspaceID: workspaceID,
Name: "Updated Telegram Notifier",
NotifierType: NotifierTypeTelegram,
TelegramNotifier: &telegram_notifier.TelegramNotifier{
BotToken: "",
TargetChatID: "987654321",
},
}
},
verifySensitiveData: func(t *testing.T, notifier *Notifier) {
assert.Equal(t, "original-bot-token-12345", notifier.TelegramNotifier.BotToken)
},
verifyHiddenData: func(t *testing.T, notifier *Notifier) {
assert.Equal(t, "", notifier.TelegramNotifier.BotToken)
},
},
{
name: "Email Notifier",
notifierType: NotifierTypeEmail,
createNotifier: func(workspaceID uuid.UUID) *Notifier {
return &Notifier{
WorkspaceID: workspaceID,
Name: "Test Email Notifier",
NotifierType: NotifierTypeEmail,
EmailNotifier: &email_notifier.EmailNotifier{
TargetEmail: "test@example.com",
SMTPHost: "smtp.example.com",
SMTPPort: 587,
SMTPUser: "user@example.com",
SMTPPassword: "original-password-secret",
},
}
},
updateNotifier: func(workspaceID uuid.UUID, notifierID uuid.UUID) *Notifier {
return &Notifier{
ID: notifierID,
WorkspaceID: workspaceID,
Name: "Updated Email Notifier",
NotifierType: NotifierTypeEmail,
EmailNotifier: &email_notifier.EmailNotifier{
TargetEmail: "updated@example.com",
SMTPHost: "smtp.newhost.com",
SMTPPort: 465,
SMTPUser: "newuser@example.com",
SMTPPassword: "",
},
}
},
verifySensitiveData: func(t *testing.T, notifier *Notifier) {
assert.Equal(t, "original-password-secret", notifier.EmailNotifier.SMTPPassword)
},
verifyHiddenData: func(t *testing.T, notifier *Notifier) {
assert.Equal(t, "", notifier.EmailNotifier.SMTPPassword)
},
},
{
name: "Slack Notifier",
notifierType: NotifierTypeSlack,
createNotifier: func(workspaceID uuid.UUID) *Notifier {
return &Notifier{
WorkspaceID: workspaceID,
Name: "Test Slack Notifier",
NotifierType: NotifierTypeSlack,
SlackNotifier: &slack_notifier.SlackNotifier{
BotToken: "xoxb-original-slack-token",
TargetChatID: "C123456",
},
}
},
updateNotifier: func(workspaceID uuid.UUID, notifierID uuid.UUID) *Notifier {
return &Notifier{
ID: notifierID,
WorkspaceID: workspaceID,
Name: "Updated Slack Notifier",
NotifierType: NotifierTypeSlack,
SlackNotifier: &slack_notifier.SlackNotifier{
BotToken: "",
TargetChatID: "C789012",
},
}
},
verifySensitiveData: func(t *testing.T, notifier *Notifier) {
assert.Equal(t, "xoxb-original-slack-token", notifier.SlackNotifier.BotToken)
},
verifyHiddenData: func(t *testing.T, notifier *Notifier) {
assert.Equal(t, "", notifier.SlackNotifier.BotToken)
},
},
{
name: "Discord Notifier",
notifierType: NotifierTypeDiscord,
createNotifier: func(workspaceID uuid.UUID) *Notifier {
return &Notifier{
WorkspaceID: workspaceID,
Name: "Test Discord Notifier",
NotifierType: NotifierTypeDiscord,
DiscordNotifier: &discord_notifier.DiscordNotifier{
ChannelWebhookURL: "https://discord.com/api/webhooks/123/original-token",
},
}
},
updateNotifier: func(workspaceID uuid.UUID, notifierID uuid.UUID) *Notifier {
return &Notifier{
ID: notifierID,
WorkspaceID: workspaceID,
Name: "Updated Discord Notifier",
NotifierType: NotifierTypeDiscord,
DiscordNotifier: &discord_notifier.DiscordNotifier{
ChannelWebhookURL: "",
},
}
},
verifySensitiveData: func(t *testing.T, notifier *Notifier) {
assert.Equal(
t,
"https://discord.com/api/webhooks/123/original-token",
notifier.DiscordNotifier.ChannelWebhookURL,
)
},
verifyHiddenData: func(t *testing.T, notifier *Notifier) {
assert.Equal(t, "", notifier.DiscordNotifier.ChannelWebhookURL)
},
},
{
name: "Teams Notifier",
notifierType: NotifierTypeTeams,
createNotifier: func(workspaceID uuid.UUID) *Notifier {
return &Notifier{
WorkspaceID: workspaceID,
Name: "Test Teams Notifier",
NotifierType: NotifierTypeTeams,
TeamsNotifier: &teams_notifier.TeamsNotifier{
WebhookURL: "https://outlook.office.com/webhook/original-token",
},
}
},
updateNotifier: func(workspaceID uuid.UUID, notifierID uuid.UUID) *Notifier {
return &Notifier{
ID: notifierID,
WorkspaceID: workspaceID,
Name: "Updated Teams Notifier",
NotifierType: NotifierTypeTeams,
TeamsNotifier: &teams_notifier.TeamsNotifier{
WebhookURL: "",
},
}
},
verifySensitiveData: func(t *testing.T, notifier *Notifier) {
assert.Equal(
t,
"https://outlook.office.com/webhook/original-token",
notifier.TeamsNotifier.WebhookURL,
)
},
verifyHiddenData: func(t *testing.T, notifier *Notifier) {
assert.Equal(t, "", notifier.TeamsNotifier.WebhookURL)
},
},
{
name: "Webhook Notifier",
notifierType: NotifierTypeWebhook,
createNotifier: func(workspaceID uuid.UUID) *Notifier {
return &Notifier{
WorkspaceID: workspaceID,
Name: "Test Webhook Notifier",
NotifierType: NotifierTypeWebhook,
WebhookNotifier: &webhook_notifier.WebhookNotifier{
WebhookURL: "https://webhook.example.com/test",
WebhookMethod: webhook_notifier.WebhookMethodPOST,
},
}
},
updateNotifier: func(workspaceID uuid.UUID, notifierID uuid.UUID) *Notifier {
return &Notifier{
ID: notifierID,
WorkspaceID: workspaceID,
Name: "Updated Webhook Notifier",
NotifierType: NotifierTypeWebhook,
WebhookNotifier: &webhook_notifier.WebhookNotifier{
WebhookURL: "https://webhook.example.com/updated",
WebhookMethod: webhook_notifier.WebhookMethodGET,
},
}
},
verifySensitiveData: func(t *testing.T, notifier *Notifier) {
// No sensitive data to verify for webhook
},
verifyHiddenData: func(t *testing.T, notifier *Notifier) {
// No sensitive data to hide for webhook
},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
owner := users_testing.CreateTestUser(users_enums.UserRoleMember)
router := createRouter()
workspace := workspaces_testing.CreateTestWorkspace("Test Workspace", owner, router)
// Phase 1: Create notifier with sensitive data
initialNotifier := tc.createNotifier(workspace.ID)
var createdNotifier Notifier
test_utils.MakePostRequestAndUnmarshal(
t,
router,
"/api/v1/notifiers",
"Bearer "+owner.Token,
*initialNotifier,
http.StatusOK,
&createdNotifier,
)
assert.NotEmpty(t, createdNotifier.ID)
assert.Equal(t, initialNotifier.Name, createdNotifier.Name)
// Phase 2: Read via service - sensitive data should be hidden
var retrievedNotifier Notifier
test_utils.MakeGetRequestAndUnmarshal(
t,
router,
fmt.Sprintf("/api/v1/notifiers/%s", createdNotifier.ID.String()),
"Bearer "+owner.Token,
http.StatusOK,
&retrievedNotifier,
)
tc.verifyHiddenData(t, &retrievedNotifier)
assert.Equal(t, initialNotifier.Name, retrievedNotifier.Name)
// Phase 3: Update with non-sensitive changes only (sensitive fields empty)
updatedNotifier := tc.updateNotifier(workspace.ID, createdNotifier.ID)
var updateResponse Notifier
test_utils.MakePostRequestAndUnmarshal(
t,
router,
"/api/v1/notifiers",
"Bearer "+owner.Token,
*updatedNotifier,
http.StatusOK,
&updateResponse,
)
// Verify non-sensitive fields were updated
assert.Equal(t, updatedNotifier.Name, updateResponse.Name)
// Phase 4: Retrieve directly from repository to verify sensitive data preservation
repository := &NotifierRepository{}
notifierFromDB, err := repository.FindByID(createdNotifier.ID)
assert.NoError(t, err)
// Verify original sensitive data is still present in DB
tc.verifySensitiveData(t, notifierFromDB)
// Verify non-sensitive fields were updated in DB
assert.Equal(t, updatedNotifier.Name, notifierFromDB.Name)
// Phase 5: Additional verification - Check via GET that data is still hidden
var finalRetrieved Notifier
test_utils.MakeGetRequestAndUnmarshal(
t,
router,
fmt.Sprintf("/api/v1/notifiers/%s", createdNotifier.ID.String()),
"Bearer "+owner.Token,
http.StatusOK,
&finalRetrieved,
)
tc.verifyHiddenData(t, &finalRetrieved)
deleteNotifier(t, router, createdNotifier.ID, workspace.ID, owner.Token)
workspaces_testing.RemoveTestWorkspace(workspace, router)
})
}
}

View File

@@ -6,4 +6,6 @@ type NotificationSender interface {
Send(logger *slog.Logger, heading string, message string) error
Validate() error
HideSensitiveData()
}

View File

@@ -54,6 +54,42 @@ func (n *Notifier) Send(logger *slog.Logger, heading string, message string) err
return err
}
func (n *Notifier) HideSensitiveData() {
n.getSpecificNotifier().HideSensitiveData()
}
func (n *Notifier) Update(incoming *Notifier) {
n.Name = incoming.Name
n.NotifierType = incoming.NotifierType
switch n.NotifierType {
case NotifierTypeTelegram:
if n.TelegramNotifier != nil && incoming.TelegramNotifier != nil {
n.TelegramNotifier.Update(incoming.TelegramNotifier)
}
case NotifierTypeEmail:
if n.EmailNotifier != nil && incoming.EmailNotifier != nil {
n.EmailNotifier.Update(incoming.EmailNotifier)
}
case NotifierTypeWebhook:
if n.WebhookNotifier != nil && incoming.WebhookNotifier != nil {
n.WebhookNotifier.Update(incoming.WebhookNotifier)
}
case NotifierTypeSlack:
if n.SlackNotifier != nil && incoming.SlackNotifier != nil {
n.SlackNotifier.Update(incoming.SlackNotifier)
}
case NotifierTypeDiscord:
if n.DiscordNotifier != nil && incoming.DiscordNotifier != nil {
n.DiscordNotifier.Update(incoming.DiscordNotifier)
}
case NotifierTypeTeams:
if n.TeamsNotifier != nil && incoming.TeamsNotifier != nil {
n.TeamsNotifier.Update(incoming.TeamsNotifier)
}
}
}
func (n *Notifier) getSpecificNotifier() NotificationSender {
switch n.NotifierType {
case NotifierTypeTelegram:

View File

@@ -71,3 +71,13 @@ func (d *DiscordNotifier) Send(logger *slog.Logger, heading string, message stri
return nil
}
func (d *DiscordNotifier) HideSensitiveData() {
d.ChannelWebhookURL = ""
}
func (d *DiscordNotifier) Update(incoming *DiscordNotifier) {
if incoming.ChannelWebhookURL != "" {
d.ChannelWebhookURL = incoming.ChannelWebhookURL
}
}

View File

@@ -208,3 +208,18 @@ func (e *EmailNotifier) Send(logger *slog.Logger, heading string, message string
return client.Quit()
}
}
func (e *EmailNotifier) HideSensitiveData() {
e.SMTPPassword = ""
}
func (e *EmailNotifier) Update(incoming *EmailNotifier) {
e.TargetEmail = incoming.TargetEmail
e.SMTPHost = incoming.SMTPHost
e.SMTPPort = incoming.SMTPPort
e.SMTPUser = incoming.SMTPUser
if incoming.SMTPPassword != "" {
e.SMTPPassword = incoming.SMTPPassword
}
}

View File

@@ -132,3 +132,15 @@ func (s *SlackNotifier) Send(logger *slog.Logger, heading, message string) error
return nil
}
}
func (s *SlackNotifier) HideSensitiveData() {
s.BotToken = ""
}
func (s *SlackNotifier) Update(incoming *SlackNotifier) {
s.TargetChatID = incoming.TargetChatID
if incoming.BotToken != "" {
s.BotToken = incoming.BotToken
}
}

View File

@@ -94,3 +94,13 @@ func (n *TeamsNotifier) Send(logger *slog.Logger, heading, message string) error
return nil
}
func (n *TeamsNotifier) HideSensitiveData() {
n.WebhookURL = ""
}
func (n *TeamsNotifier) Update(incoming *TeamsNotifier) {
if incoming.WebhookURL != "" {
n.WebhookURL = incoming.WebhookURL
}
}

View File

@@ -80,3 +80,16 @@ func (t *TelegramNotifier) Send(logger *slog.Logger, heading string, message str
return nil
}
func (t *TelegramNotifier) HideSensitiveData() {
t.BotToken = ""
}
func (t *TelegramNotifier) Update(incoming *TelegramNotifier) {
t.TargetChatID = incoming.TargetChatID
t.ThreadID = incoming.ThreadID
if incoming.BotToken != "" {
t.BotToken = incoming.BotToken
}
}

View File

@@ -102,3 +102,11 @@ func (t *WebhookNotifier) Send(logger *slog.Logger, heading string, message stri
return fmt.Errorf("unsupported webhook method: %s", t.WebhookMethod)
}
}
func (t *WebhookNotifier) HideSensitiveData() {
}
func (t *WebhookNotifier) Update(incoming *WebhookNotifier) {
t.WebhookURL = incoming.WebhookURL
t.WebhookMethod = incoming.WebhookMethod
}

View File

@@ -44,23 +44,34 @@ func (s *NotifierService) SaveNotifier(
return errors.New("notifier does not belong to this workspace")
}
notifier.WorkspaceID = existingNotifier.WorkspaceID
} else {
notifier.WorkspaceID = workspaceID
}
existingNotifier.Update(notifier)
_, err = s.notifierRepository.Save(notifier)
if err != nil {
return err
}
if err := existingNotifier.Validate(); err != nil {
return err
}
_, err = s.notifierRepository.Save(existingNotifier)
if err != nil {
return err
}
if isUpdate {
s.auditLogService.WriteAuditLog(
fmt.Sprintf("Notifier updated: %s", notifier.Name),
fmt.Sprintf("Notifier updated: %s", existingNotifier.Name),
&user.ID,
&workspaceID,
)
} else {
notifier.WorkspaceID = workspaceID
if err := notifier.Validate(); err != nil {
return err
}
_, err = s.notifierRepository.Save(notifier)
if err != nil {
return err
}
s.auditLogService.WriteAuditLog(
fmt.Sprintf("Notifier created: %s", notifier.Name),
&user.ID,
@@ -119,6 +130,7 @@ func (s *NotifierService) GetNotifier(
return nil, errors.New("insufficient permissions to view notifier in this workspace")
}
notifier.HideSensitiveData()
return notifier, nil
}
@@ -134,7 +146,16 @@ func (s *NotifierService) GetNotifiers(
return nil, errors.New("insufficient permissions to view notifiers in this workspace")
}
return s.notifierRepository.FindByWorkspaceID(workspaceID)
notifiers, err := s.notifierRepository.FindByWorkspaceID(workspaceID)
if err != nil {
return nil, err
}
for _, notifier := range notifiers {
notifier.HideSensitiveData()
}
return notifiers, nil
}
func (s *NotifierService) SendTestNotification(
@@ -170,7 +191,30 @@ func (s *NotifierService) SendTestNotification(
func (s *NotifierService) SendTestNotificationToNotifier(
notifier *Notifier,
) error {
return notifier.Send(s.logger, "Test message", "This is a test message")
var usingNotifier *Notifier
if notifier.ID != uuid.Nil {
existingNotifier, err := s.notifierRepository.FindByID(notifier.ID)
if err != nil {
return err
}
if existingNotifier.WorkspaceID != notifier.WorkspaceID {
return errors.New("notifier does not belong to this workspace")
}
existingNotifier.Update(notifier)
if err := existingNotifier.Validate(); err != nil {
return err
}
usingNotifier = existingNotifier
} else {
usingNotifier = notifier
}
return usingNotifier.Send(s.logger, "Test message", "This is a test message")
}
func (s *NotifierService) SendNotification(

View File

@@ -54,11 +54,6 @@ func (c *StorageController) SaveStorage(ctx *gin.Context) {
return
}
if err := request.Validate(); err != nil {
ctx.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
if err := c.storageService.SaveStorage(user, request.WorkspaceID, &request); err != nil {
if err.Error() == "insufficient permissions to manage storage in this workspace" {
ctx.JSON(http.StatusForbidden, gin.H{"error": err.Error()})
@@ -271,11 +266,6 @@ func (c *StorageController) TestStorageConnectionDirect(ctx *gin.Context) {
return
}
if err := request.Validate(); err != nil {
ctx.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
if err := c.storageService.TestStorageConnectionDirect(&request); err != nil {
ctx.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return

View File

@@ -7,6 +7,7 @@ import (
audit_logs "postgresus-backend/internal/features/audit_logs"
local_storage "postgresus-backend/internal/features/storages/models/local"
s3_storage "postgresus-backend/internal/features/storages/models/s3"
users_enums "postgresus-backend/internal/features/users/enums"
users_middleware "postgresus-backend/internal/features/users/middleware"
users_services "postgresus-backend/internal/features/users/services"
@@ -484,3 +485,158 @@ func deleteStorage(
http.StatusOK,
)
}
func Test_StorageSensitiveDataLifecycle_AllTypes(t *testing.T) {
testCases := []struct {
name string
storageType StorageType
createStorage func(workspaceID uuid.UUID) *Storage
updateStorage func(workspaceID uuid.UUID, storageID uuid.UUID) *Storage
verifySensitiveData func(t *testing.T, storage *Storage)
verifyHiddenData func(t *testing.T, storage *Storage)
}{
{
name: "S3 Storage",
storageType: StorageTypeS3,
createStorage: func(workspaceID uuid.UUID) *Storage {
return &Storage{
WorkspaceID: workspaceID,
Type: StorageTypeS3,
Name: "Test S3 Storage",
S3Storage: &s3_storage.S3Storage{
S3Bucket: "test-bucket",
S3Region: "us-east-1",
S3AccessKey: "original-access-key",
S3SecretKey: "original-secret-key",
S3Endpoint: "https://s3.amazonaws.com",
},
}
},
updateStorage: func(workspaceID uuid.UUID, storageID uuid.UUID) *Storage {
return &Storage{
ID: storageID,
WorkspaceID: workspaceID,
Type: StorageTypeS3,
Name: "Updated S3 Storage",
S3Storage: &s3_storage.S3Storage{
S3Bucket: "updated-bucket",
S3Region: "us-west-2",
S3AccessKey: "",
S3SecretKey: "",
S3Endpoint: "https://s3.us-west-2.amazonaws.com",
},
}
},
verifySensitiveData: func(t *testing.T, storage *Storage) {
assert.Equal(t, "original-access-key", storage.S3Storage.S3AccessKey)
assert.Equal(t, "original-secret-key", storage.S3Storage.S3SecretKey)
},
verifyHiddenData: func(t *testing.T, storage *Storage) {
assert.Equal(t, "", storage.S3Storage.S3AccessKey)
assert.Equal(t, "", storage.S3Storage.S3SecretKey)
},
},
{
name: "Local Storage",
storageType: StorageTypeLocal,
createStorage: func(workspaceID uuid.UUID) *Storage {
return &Storage{
WorkspaceID: workspaceID,
Type: StorageTypeLocal,
Name: "Test Local Storage",
LocalStorage: &local_storage.LocalStorage{},
}
},
updateStorage: func(workspaceID uuid.UUID, storageID uuid.UUID) *Storage {
return &Storage{
ID: storageID,
WorkspaceID: workspaceID,
Type: StorageTypeLocal,
Name: "Updated Local Storage",
LocalStorage: &local_storage.LocalStorage{},
}
},
verifySensitiveData: func(t *testing.T, storage *Storage) {
},
verifyHiddenData: func(t *testing.T, storage *Storage) {
},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
owner := users_testing.CreateTestUser(users_enums.UserRoleMember)
router := createRouter()
workspace := workspaces_testing.CreateTestWorkspace("Test Workspace", owner, router)
// Phase 1: Create storage with sensitive data
initialStorage := tc.createStorage(workspace.ID)
var createdStorage Storage
test_utils.MakePostRequestAndUnmarshal(
t,
router,
"/api/v1/storages",
"Bearer "+owner.Token,
*initialStorage,
http.StatusOK,
&createdStorage,
)
assert.NotEmpty(t, createdStorage.ID)
assert.Equal(t, initialStorage.Name, createdStorage.Name)
// Phase 2: Read via service - sensitive data should be hidden
var retrievedStorage Storage
test_utils.MakeGetRequestAndUnmarshal(
t,
router,
fmt.Sprintf("/api/v1/storages/%s", createdStorage.ID.String()),
"Bearer "+owner.Token,
http.StatusOK,
&retrievedStorage,
)
tc.verifyHiddenData(t, &retrievedStorage)
assert.Equal(t, initialStorage.Name, retrievedStorage.Name)
// Phase 3: Update with non-sensitive changes only (sensitive fields empty)
updatedStorage := tc.updateStorage(workspace.ID, createdStorage.ID)
var updateResponse Storage
test_utils.MakePostRequestAndUnmarshal(
t,
router,
"/api/v1/storages",
"Bearer "+owner.Token,
*updatedStorage,
http.StatusOK,
&updateResponse,
)
// Verify non-sensitive fields were updated
assert.Equal(t, updatedStorage.Name, updateResponse.Name)
// Phase 4: Retrieve directly from repository to verify sensitive data preservation
repository := &StorageRepository{}
storageFromDB, err := repository.FindByID(createdStorage.ID)
assert.NoError(t, err)
// Verify original sensitive data is still present in DB
tc.verifySensitiveData(t, storageFromDB)
// Verify non-sensitive fields were updated in DB
assert.Equal(t, updatedStorage.Name, storageFromDB.Name)
// Additional verification: Check via GET that data is still hidden
var finalRetrieved Storage
test_utils.MakeGetRequestAndUnmarshal(
t,
router,
fmt.Sprintf("/api/v1/storages/%s", createdStorage.ID.String()),
"Bearer "+owner.Token,
http.StatusOK,
&finalRetrieved,
)
tc.verifyHiddenData(t, &finalRetrieved)
})
}
}

View File

@@ -17,4 +17,6 @@ type StorageFileSaver interface {
Validate() error
TestConnection() error
HideSensitiveData()
}

View File

@@ -63,6 +63,34 @@ func (s *Storage) TestConnection() error {
return s.getSpecificStorage().TestConnection()
}
func (s *Storage) HideSensitiveData() {
s.getSpecificStorage().HideSensitiveData()
}
func (s *Storage) Update(incoming *Storage) {
s.Name = incoming.Name
s.Type = incoming.Type
switch s.Type {
case StorageTypeLocal:
if s.LocalStorage != nil && incoming.LocalStorage != nil {
s.LocalStorage.Update(incoming.LocalStorage)
}
case StorageTypeS3:
if s.S3Storage != nil && incoming.S3Storage != nil {
s.S3Storage.Update(incoming.S3Storage)
}
case StorageTypeGoogleDrive:
if s.GoogleDriveStorage != nil && incoming.GoogleDriveStorage != nil {
s.GoogleDriveStorage.Update(incoming.GoogleDriveStorage)
}
case StorageTypeNAS:
if s.NASStorage != nil && incoming.NASStorage != nil {
s.NASStorage.Update(incoming.NASStorage)
}
}
}
func (s *Storage) getSpecificStorage() StorageFileSaver {
switch s.Type {
case StorageTypeLocal:

View File

@@ -191,6 +191,23 @@ func (s *GoogleDriveStorage) TestConnection() error {
})
}
func (s *GoogleDriveStorage) HideSensitiveData() {
s.ClientSecret = ""
s.TokenJSON = ""
}
func (s *GoogleDriveStorage) Update(incoming *GoogleDriveStorage) {
s.ClientID = incoming.ClientID
if incoming.ClientSecret != "" {
s.ClientSecret = incoming.ClientSecret
}
if incoming.TokenJSON != "" {
s.TokenJSON = incoming.TokenJSON
}
}
// withRetryOnAuth executes the provided function with retry logic for authentication errors
func (s *GoogleDriveStorage) withRetryOnAuth(fn func(*drive.Service) error) error {
driveService, err := s.getDriveService()

View File

@@ -156,3 +156,9 @@ func (l *LocalStorage) TestConnection() error {
return nil
}
func (l *LocalStorage) HideSensitiveData() {
}
func (l *LocalStorage) Update(incoming *LocalStorage) {
}

View File

@@ -251,6 +251,24 @@ func (n *NASStorage) TestConnection() error {
return nil
}
func (n *NASStorage) HideSensitiveData() {
n.Password = ""
}
func (n *NASStorage) Update(incoming *NASStorage) {
n.Host = incoming.Host
n.Port = incoming.Port
n.Share = incoming.Share
n.Username = incoming.Username
n.UseSSL = incoming.UseSSL
n.Domain = incoming.Domain
n.Path = incoming.Path
if incoming.Password != "" {
n.Password = incoming.Password
}
}
func (n *NASStorage) createSession() (*smb2.Session, error) {
// Create connection with timeout
conn, err := n.createConnection()

View File

@@ -180,6 +180,25 @@ func (s *S3Storage) TestConnection() error {
return nil
}
func (s *S3Storage) HideSensitiveData() {
s.S3AccessKey = ""
s.S3SecretKey = ""
}
func (s *S3Storage) Update(incoming *S3Storage) {
s.S3Bucket = incoming.S3Bucket
s.S3Region = incoming.S3Region
s.S3Endpoint = incoming.S3Endpoint
if incoming.S3AccessKey != "" {
s.S3AccessKey = incoming.S3AccessKey
}
if incoming.S3SecretKey != "" {
s.S3SecretKey = incoming.S3SecretKey
}
}
func (s *S3Storage) getClient() (*minio.Client, error) {
endpoint := s.S3Endpoint
useSSL := true

View File

@@ -42,23 +42,34 @@ func (s *StorageService) SaveStorage(
return errors.New("storage does not belong to this workspace")
}
storage.WorkspaceID = existingStorage.WorkspaceID
} else {
storage.WorkspaceID = workspaceID
}
existingStorage.Update(storage)
_, err = s.storageRepository.Save(storage)
if err != nil {
return err
}
if err := existingStorage.Validate(); err != nil {
return err
}
_, err = s.storageRepository.Save(existingStorage)
if err != nil {
return err
}
if isUpdate {
s.auditLogService.WriteAuditLog(
fmt.Sprintf("Storage updated: %s", storage.Name),
fmt.Sprintf("Storage updated: %s", existingStorage.Name),
&user.ID,
&workspaceID,
)
} else {
storage.WorkspaceID = workspaceID
if err := storage.Validate(); err != nil {
return err
}
_, err = s.storageRepository.Save(storage)
if err != nil {
return err
}
s.auditLogService.WriteAuditLog(
fmt.Sprintf("Storage created: %s", storage.Name),
&user.ID,
@@ -117,6 +128,8 @@ func (s *StorageService) GetStorage(
return nil, errors.New("insufficient permissions to view storage in this workspace")
}
storage.HideSensitiveData()
return storage, nil
}
@@ -132,7 +145,16 @@ func (s *StorageService) GetStorages(
return nil, errors.New("insufficient permissions to view storages in this workspace")
}
return s.storageRepository.FindByWorkspaceID(workspaceID)
storages, err := s.storageRepository.FindByWorkspaceID(workspaceID)
if err != nil {
return nil, err
}
for _, storage := range storages {
storage.HideSensitiveData()
}
return storages, nil
}
func (s *StorageService) TestStorageConnection(
@@ -171,7 +193,30 @@ func (s *StorageService) TestStorageConnection(
func (s *StorageService) TestStorageConnectionDirect(
storage *Storage,
) error {
return storage.TestConnection()
var usingStorage *Storage
if storage.ID != uuid.Nil {
existingStorage, err := s.storageRepository.FindByID(storage.ID)
if err != nil {
return err
}
if existingStorage.WorkspaceID != storage.WorkspaceID {
return errors.New("storage does not belong to this workspace")
}
existingStorage.Update(storage)
if err := existingStorage.Validate(); err != nil {
return err
}
usingStorage = existingStorage
} else {
usingStorage = storage
}
return usingStorage.TestConnection()
}
func (s *StorageService) GetStorageByID(

View File

@@ -1,6 +1,7 @@
package tests
import (
"context"
"fmt"
"os"
"path/filepath"
@@ -138,6 +139,7 @@ func testBackupRestoreForVersion(t *testing.T, pgVersion string, port string) {
// Make backup
progressTracker := func(completedMBs float64) {}
err = usecases_postgresql_backup.GetCreatePostgresqlBackupUsecase().Execute(
context.Background(),
backupID,
backupConfig,
backupDb,

View File

@@ -6,6 +6,7 @@ import { Routes } from 'react-router';
import { userApi } from './entity/users';
import { AuthPageComponent } from './pages/AuthPageComponent';
import { OAuthCallbackPage } from './pages/OAuthCallbackPage';
import { OauthStorageComponent } from './pages/OauthStorageComponent';
import { MainScreenComponent } from './widgets/main/MainScreenComponent';
function App() {
@@ -32,6 +33,7 @@ function App() {
<BrowserRouter>
<Routes>
<Route path="/auth/callback" element={<OAuthCallbackPage />} />
<Route path="/storages/google-oauth" element={<OauthStorageComponent />} />
<Route
path="/"
element={!isAuthorized ? <AuthPageComponent /> : <MainScreenComponent />}

View File

@@ -1,12 +1,16 @@
import { getApplicationServer } from '../../../constants';
import RequestOptions from '../../../shared/api/RequestOptions';
import { apiHelper } from '../../../shared/api/apiHelper';
import type { Backup } from '../model/Backup';
import type { GetBackupsResponse } from '../model/GetBackupsResponse';
export const backupsApi = {
async getBackups(databaseId: string) {
return apiHelper.fetchGetJson<Backup[]>(
`${getApplicationServer()}/api/v1/backups?database_id=${databaseId}`,
async getBackups(databaseId: string, limit?: number, offset?: number) {
const params = new URLSearchParams({ database_id: databaseId });
if (limit !== undefined) params.append('limit', limit.toString());
if (offset !== undefined) params.append('offset', offset.toString());
return apiHelper.fetchGetJson<GetBackupsResponse>(
`${getApplicationServer()}/api/v1/backups?${params.toString()}`,
undefined,
true,
);
@@ -28,4 +32,8 @@ export const backupsApi = {
async downloadBackup(id: string): Promise<Blob> {
return apiHelper.fetchGetBlob(`${getApplicationServer()}/api/v1/backups/${id}/file`);
},
async cancelBackup(id: string) {
return apiHelper.fetchPostRaw(`${getApplicationServer()}/api/v1/backups/${id}/cancel`);
},
};

View File

@@ -3,4 +3,5 @@ export enum BackupStatus {
COMPLETED = 'COMPLETED',
FAILED = 'FAILED',
DELETED = 'DELETED',
CANCELED = 'CANCELED',
}

View File

@@ -0,0 +1,8 @@
import type { Backup } from './Backup';
export interface GetBackupsResponse {
backups: Backup[];
total: number;
limit: number;
offset: number;
}

View File

@@ -1,7 +1,7 @@
import type { DiscordNotifier } from './DiscordNotifier';
export const validateDiscordNotifier = (notifier: DiscordNotifier): boolean => {
if (!notifier.channelWebhookUrl) {
export const validateDiscordNotifier = (isCreate: boolean, notifier: DiscordNotifier): boolean => {
if (isCreate && !notifier.channelWebhookUrl) {
return false;
}

View File

@@ -1,6 +1,6 @@
import type { EmailNotifier } from './EmailNotifier';
export const validateEmailNotifier = (notifier: EmailNotifier): boolean => {
export const validateEmailNotifier = (isCreate: boolean, notifier: EmailNotifier): boolean => {
if (!notifier.targetEmail) {
return false;
}
@@ -13,5 +13,9 @@ export const validateEmailNotifier = (notifier: EmailNotifier): boolean => {
return false;
}
if (isCreate && !notifier.smtpPassword) {
return false;
}
return true;
};

View File

@@ -1,7 +1,7 @@
import type { SlackNotifier } from './SlackNotifier';
export const validateSlackNotifier = (notifier: SlackNotifier): boolean => {
if (!notifier.botToken) {
export const validateSlackNotifier = (isCreate: boolean, notifier: SlackNotifier): boolean => {
if (isCreate && !notifier.botToken) {
return false;
}

View File

@@ -1,7 +1,7 @@
import type { TeamsNotifier } from './TeamsNotifier';
export const validateTeamsNotifier = (notifier: TeamsNotifier): boolean => {
if (!notifier?.powerAutomateUrl) {
export const validateTeamsNotifier = (isCreate: boolean, notifier: TeamsNotifier): boolean => {
if (isCreate && !notifier?.powerAutomateUrl) {
return false;
}

View File

@@ -1,7 +1,10 @@
import type { TelegramNotifier } from './TelegramNotifier';
export const validateTelegramNotifier = (notifier: TelegramNotifier): boolean => {
if (!notifier.botToken) {
export const validateTelegramNotifier = (
isCreate: boolean,
notifier: TelegramNotifier,
): boolean => {
if (isCreate && !notifier.botToken) {
return false;
}

View File

@@ -1,7 +1,7 @@
import type { WebhookNotifier } from './WebhookNotifier';
export const validateWebhookNotifier = (notifier: WebhookNotifier): boolean => {
if (!notifier.webhookUrl) {
export const validateWebhookNotifier = (isCreate: boolean, notifier: WebhookNotifier): boolean => {
if (isCreate && !notifier.webhookUrl) {
return false;
}

View File

@@ -1,5 +1,6 @@
import {
CheckCircleOutlined,
CloseCircleOutlined,
CloudUploadOutlined,
DeleteOutlined,
DownloadOutlined,
@@ -12,23 +13,37 @@ import type { ColumnsType } from 'antd/es/table';
import dayjs from 'dayjs';
import { useEffect, useRef, useState } from 'react';
import { type Backup, BackupStatus, backupConfigApi, backupsApi } from '../../../entity/backups';
import {
type Backup,
type BackupConfig,
BackupStatus,
backupConfigApi,
backupsApi,
} from '../../../entity/backups';
import type { Database } from '../../../entity/databases';
import { getUserTimeFormat } from '../../../shared/time';
import { ConfirmationComponent } from '../../../shared/ui';
import { RestoresComponent } from '../../restores';
const BACKUPS_PAGE_SIZE = 10;
interface Props {
database: Database;
isCanManageDBs: boolean;
scrollContainerRef?: React.RefObject<HTMLDivElement | null>;
}
export const BackupsComponent = ({ database, isCanManageDBs }: Props) => {
export const BackupsComponent = ({ database, isCanManageDBs, scrollContainerRef }: Props) => {
const [isBackupsLoading, setIsBackupsLoading] = useState(false);
const [backups, setBackups] = useState<Backup[]>([]);
const [totalBackups, setTotalBackups] = useState(0);
const [currentLimit, setCurrentLimit] = useState(BACKUPS_PAGE_SIZE);
const [isLoadingMore, setIsLoadingMore] = useState(false);
const [hasMore, setHasMore] = useState(true);
const [backupConfig, setBackupConfig] = useState<BackupConfig | undefined>();
const [isBackupConfigLoading, setIsBackupConfigLoading] = useState(false);
const [isShowBackupConfig, setIsShowBackupConfig] = useState(false);
const [isMakeBackupRequestLoading, setIsMakeBackupRequestLoading] = useState(false);
@@ -40,8 +55,10 @@ export const BackupsComponent = ({ database, isCanManageDBs }: Props) => {
const [showingRestoresBackupId, setShowingRestoresBackupId] = useState<string | undefined>();
const isReloadInProgress = useRef(false);
const isLazyLoadInProgress = useRef(false);
const [downloadingBackupId, setDownloadingBackupId] = useState<string | undefined>();
const [cancellingBackupId, setCancellingBackupId] = useState<string | undefined>();
const downloadBackup = async (backupId: string) => {
try {
@@ -71,16 +88,20 @@ export const BackupsComponent = ({ database, isCanManageDBs }: Props) => {
}
};
const loadBackups = async () => {
if (isReloadInProgress.current) {
const loadBackups = async (limit?: number) => {
if (isReloadInProgress.current || isLazyLoadInProgress.current) {
return;
}
isReloadInProgress.current = true;
try {
const backups = await backupsApi.getBackups(database.id);
setBackups(backups);
const loadLimit = limit || currentLimit;
const response = await backupsApi.getBackups(database.id, loadLimit, 0);
setBackups(response.backups);
setTotalBackups(response.total);
setHasMore(response.backups.length < response.total);
} catch (e) {
alert((e as Error).message);
}
@@ -88,12 +109,75 @@ export const BackupsComponent = ({ database, isCanManageDBs }: Props) => {
isReloadInProgress.current = false;
};
const reloadInProgressBackups = async () => {
if (isReloadInProgress.current || isLazyLoadInProgress.current) {
return;
}
isReloadInProgress.current = true;
try {
// Fetch only the recent backups that could be in progress
// We fetch a small number (20) to capture recent backups that might be in progress
const response = await backupsApi.getBackups(database.id, 20, 0);
// Update only the backups that exist in both lists
setBackups((prevBackups) => {
const updatedBackups = [...prevBackups];
response.backups.forEach((newBackup) => {
const index = updatedBackups.findIndex((b) => b.id === newBackup.id);
if (index !== -1) {
updatedBackups[index] = newBackup;
} else if (index === -1 && updatedBackups.length < currentLimit) {
// New backup that doesn't exist yet (e.g., just created)
updatedBackups.unshift(newBackup);
}
});
return updatedBackups;
});
setTotalBackups(response.total);
} catch (e) {
alert((e as Error).message);
}
isReloadInProgress.current = false;
};
const loadMoreBackups = async () => {
if (isLoadingMore || !hasMore || isLazyLoadInProgress.current) {
return;
}
isLazyLoadInProgress.current = true;
setIsLoadingMore(true);
try {
const newLimit = currentLimit + BACKUPS_PAGE_SIZE;
const response = await backupsApi.getBackups(database.id, newLimit, 0);
setBackups(response.backups);
setCurrentLimit(newLimit);
setTotalBackups(response.total);
setHasMore(response.backups.length < response.total);
} catch (e) {
alert((e as Error).message);
}
setIsLoadingMore(false);
isLazyLoadInProgress.current = false;
};
const makeBackup = async () => {
setIsMakeBackupRequestLoading(true);
try {
await backupsApi.makeBackup(database.id);
await loadBackups();
setCurrentLimit(BACKUPS_PAGE_SIZE);
setHasMore(true);
await loadBackups(BACKUPS_PAGE_SIZE);
} catch (e) {
alert((e as Error).message);
}
@@ -111,7 +195,9 @@ export const BackupsComponent = ({ database, isCanManageDBs }: Props) => {
try {
await backupsApi.deleteBackup(deleteConfimationId);
await loadBackups();
setCurrentLimit(BACKUPS_PAGE_SIZE);
setHasMore(true);
await loadBackups(BACKUPS_PAGE_SIZE);
} catch (e) {
alert((e as Error).message);
}
@@ -120,31 +206,51 @@ export const BackupsComponent = ({ database, isCanManageDBs }: Props) => {
setDeleteConfimationId(undefined);
};
useEffect(() => {
let isBackupsEnabled = false;
const cancelBackup = async (backupId: string) => {
setCancellingBackupId(backupId);
try {
await backupsApi.cancelBackup(backupId);
await reloadInProgressBackups();
} catch (e) {
alert((e as Error).message);
}
setCancellingBackupId(undefined);
};
useEffect(() => {
setIsBackupConfigLoading(true);
backupConfigApi.getBackupConfigByDbID(database.id).then((backupConfig) => {
setCurrentLimit(BACKUPS_PAGE_SIZE);
setHasMore(true);
backupConfigApi.getBackupConfigByDbID(database.id).then((config) => {
setBackupConfig(config);
setIsBackupConfigLoading(false);
if (backupConfig.isBackupsEnabled) {
// load backups
isBackupsEnabled = true;
setIsShowBackupConfig(true);
setIsBackupsLoading(true);
loadBackups().then(() => setIsBackupsLoading(false));
}
setIsBackupsLoading(true);
loadBackups(BACKUPS_PAGE_SIZE).then(() => setIsBackupsLoading(false));
});
const interval = setInterval(() => {
if (isBackupsEnabled) {
loadBackups();
}
return () => {};
}, [database]);
// Reload backups that are in progress to update their state
useEffect(() => {
const hasInProgressBackups = backups.some(
(backup) => backup.status === BackupStatus.IN_PROGRESS,
);
if (!hasInProgressBackups) {
return;
}
const timeoutId = setTimeout(async () => {
await reloadInProgressBackups();
}, 1_000);
return () => clearInterval(interval);
}, [database]);
return () => clearTimeout(timeoutId);
}, [backups]);
useEffect(() => {
if (downloadingBackupId) {
@@ -152,6 +258,26 @@ export const BackupsComponent = ({ database, isCanManageDBs }: Props) => {
}
}, [downloadingBackupId]);
useEffect(() => {
if (!scrollContainerRef?.current) {
return;
}
const handleScroll = () => {
if (!scrollContainerRef.current) return;
const { scrollTop, scrollHeight, clientHeight } = scrollContainerRef.current;
if (scrollHeight - scrollTop <= clientHeight + 100 && hasMore && !isLoadingMore) {
loadMoreBackups();
}
};
const container = scrollContainerRef.current;
container.addEventListener('scroll', handleScroll);
return () => container.removeEventListener('scroll', handleScroll);
}, [hasMore, isLoadingMore, currentLimit, scrollContainerRef]);
const columns: ColumnsType<Backup> = [
{
title: 'Created at',
@@ -213,6 +339,15 @@ export const BackupsComponent = ({ database, isCanManageDBs }: Props) => {
);
}
if (status === BackupStatus.CANCELED) {
return (
<div className="flex items-center text-gray-600">
<CloseCircleOutlined className="mr-2" style={{ fontSize: 16 }} />
<div>Canceled</div>
</div>
);
}
return <span className="font-bold">{status}</span>;
},
filters: [
@@ -232,6 +367,10 @@ export const BackupsComponent = ({ database, isCanManageDBs }: Props) => {
value: BackupStatus.DELETED,
text: 'Deleted',
},
{
value: BackupStatus.CANCELED,
text: 'Canceled',
},
],
onFilter: (value, record) => record.status === value,
},
@@ -282,6 +421,25 @@ export const BackupsComponent = ({ database, isCanManageDBs }: Props) => {
render: (_, record: Backup) => {
return (
<div className="flex gap-2 text-lg">
{record.status === BackupStatus.IN_PROGRESS && isCanManageDBs && (
<div className="flex gap-2">
{cancellingBackupId === record.id ? (
<SyncOutlined spin />
) : (
<Tooltip title="Cancel backup">
<CloseCircleOutlined
className="cursor-pointer"
onClick={() => {
if (cancellingBackupId) return;
cancelBackup(record.id);
}}
style={{ color: '#ff0000', opacity: cancellingBackupId ? 0.2 : 1 }}
/>
</Tooltip>
)}
</div>
)}
{record.status === BackupStatus.COMPLETED && (
<div className="flex gap-2">
{deletingBackupId === record.id ? (
@@ -348,14 +506,16 @@ export const BackupsComponent = ({ database, isCanManageDBs }: Props) => {
);
}
if (!isShowBackupConfig) {
return <div />;
}
return (
<div className="mt-5 w-full rounded-md bg-white p-5 shadow">
<h2 className="text-xl font-bold">Backups</h2>
{!isBackupConfigLoading && !backupConfig?.isBackupsEnabled && (
<div className="text-red-600">
Scheduled backups are disabled (you can enable it back in the backup configuration)
</div>
)}
<div className="mt-5" />
<div className="flex">
@@ -380,6 +540,16 @@ export const BackupsComponent = ({ database, isCanManageDBs }: Props) => {
size="small"
pagination={false}
/>
{isLoadingMore && (
<div className="mt-2 flex justify-center">
<Spin />
</div>
)}
{!hasMore && backups.length > 0 && (
<div className="mt-2 text-center text-gray-500">
All backups loaded ({totalBackups} total)
</div>
)}
</div>
{deleteConfimationId && (

View File

@@ -74,7 +74,6 @@ export const EditBackupConfigComponent = ({
const [isShowCreateStorage, setShowCreateStorage] = useState(false);
const [isShowWarn, setIsShowWarn] = useState(false);
const [isShowBackupDisableConfirm, setIsShowBackupDisableConfirm] = useState(false);
const timeFormat = useMemo(() => {
const is12 = getUserTimeFormat();
@@ -208,12 +207,7 @@ export const EditBackupConfigComponent = ({
<Switch
checked={backupConfig.isBackupsEnabled}
onChange={(checked) => {
// If disabling backups on existing database, show confirmation
if (!checked && database.id && backupConfig.isBackupsEnabled) {
setIsShowBackupDisableConfirm(true);
} else {
updateBackupConfig({ isBackupsEnabled: checked });
}
updateBackupConfig({ isBackupsEnabled: checked });
}}
size="small"
/>
@@ -385,41 +379,47 @@ export const EditBackupConfigComponent = ({
</Tooltip>
</div>
<div className="mt-5 mb-1 flex w-full items-center">
<div className="min-w-[150px]">Storage</div>
<Select
value={backupConfig.storage?.id}
onChange={(storageId) => {
if (storageId.includes('create-new-storage')) {
setShowCreateStorage(true);
return;
}
<div className="mb-3" />
</>
)}
const selectedStorage = storages.find((s) => s.id === storageId);
updateBackupConfig({ storage: selectedStorage });
<div className="mt-2 mb-1 flex w-full items-center">
<div className="min-w-[150px]">Storage</div>
<Select
value={backupConfig.storage?.id}
onChange={(storageId) => {
if (storageId.includes('create-new-storage')) {
setShowCreateStorage(true);
return;
}
if (backupConfig.storage?.id) {
setIsShowWarn(true);
}
}}
size="small"
className="mr-2 max-w-[200px] grow"
options={[
...storages.map((s) => ({ label: s.name, value: s.id })),
{ label: 'Create new storage', value: 'create-new-storage' },
]}
placeholder="Select storage"
/>
const selectedStorage = storages.find((s) => s.id === storageId);
updateBackupConfig({ storage: selectedStorage });
{backupConfig.storage?.type && (
<img
src={getStorageLogoFromType(backupConfig.storage.type)}
alt="storageIcon"
className="ml-1 h-4 w-4"
/>
)}
</div>
if (backupConfig.storage?.id) {
setIsShowWarn(true);
}
}}
size="small"
className="mr-2 max-w-[200px] grow"
options={[
...storages.map((s) => ({ label: s.name, value: s.id })),
{ label: 'Create new storage', value: 'create-new-storage' },
]}
placeholder="Select storage"
/>
{backupConfig.storage?.type && (
<img
src={getStorageLogoFromType(backupConfig.storage.type)}
alt="storageIcon"
className="ml-1 h-4 w-4"
/>
)}
</div>
{backupConfig.isBackupsEnabled && (
<>
<div className="mt-4 mb-1 flex w-full items-start">
<div className="mt-1 min-w-[150px]">Notifications</div>
<div className="flex flex-col space-y-2">
@@ -526,22 +526,6 @@ export const EditBackupConfigComponent = ({
hideCancelButton
/>
)}
{isShowBackupDisableConfirm && (
<ConfirmationComponent
onConfirm={() => {
updateBackupConfig({ isBackupsEnabled: false });
setIsShowBackupDisableConfirm(false);
}}
onDecline={() => {
setIsShowBackupDisableConfirm(false);
}}
description="All current backups will be removed? Are you sure?"
actionButtonColor="red"
actionText="Yes, disable backing up and remove all existing backup files"
cancelText="Cancel"
/>
)}
</div>
);
};

View File

@@ -99,7 +99,9 @@ export const ShowBackupConfigComponent = ({ database }: Props) => {
<div>
<div className="mb-1 flex w-full items-center">
<div className="min-w-[150px]">Backups enabled</div>
<div>{backupConfig.isBackupsEnabled ? 'Yes' : 'No'}</div>
<div className={backupConfig.isBackupsEnabled ? '' : 'font-bold text-red-600'}>
{backupConfig.isBackupsEnabled ? 'Yes' : 'No'}
</div>
</div>
{backupConfig.isBackupsEnabled ? (

View File

@@ -1,5 +1,5 @@
import { Spin } from 'antd';
import { useState } from 'react';
import { useRef, useState } from 'react';
import { useEffect } from 'react';
import { type Database, databaseApi } from '../../../entity/databases';
@@ -27,6 +27,8 @@ export const DatabaseComponent = ({
const [database, setDatabase] = useState<Database | undefined>();
const [editDatabase, setEditDatabase] = useState<Database | undefined>();
const scrollContainerRef = useRef<HTMLDivElement>(null);
const loadSettings = () => {
setDatabase(undefined);
setEditDatabase(undefined);
@@ -42,7 +44,11 @@ export const DatabaseComponent = ({
}
return (
<div className="w-full overflow-y-auto" style={{ maxHeight: contentHeight }}>
<div
className="w-full overflow-y-auto"
style={{ maxHeight: contentHeight }}
ref={scrollContainerRef}
>
<div className="flex">
<div
className={`mr-2 cursor-pointer rounded-tl-md rounded-tr-md px-6 py-2 ${currentTab === 'config' ? 'bg-white' : 'bg-gray-200'}`}
@@ -73,7 +79,11 @@ export const DatabaseComponent = ({
{currentTab === 'backups' && (
<>
<HealthckeckAttemptsComponent database={database} />
<BackupsComponent database={database} isCanManageDBs={isCanManageDBs} />
<BackupsComponent
database={database}
isCanManageDBs={isCanManageDBs}
scrollContainerRef={scrollContainerRef}
/>
</>
)}
</div>

View File

@@ -100,7 +100,7 @@ export const EditDatabaseSpecificDataComponent = ({
if (!editingDatabase.postgresql?.host) isAllFieldsFilled = false;
if (!editingDatabase.postgresql?.port) isAllFieldsFilled = false;
if (!editingDatabase.postgresql?.username) isAllFieldsFilled = false;
if (!editingDatabase.postgresql?.password) isAllFieldsFilled = false;
if (!editingDatabase.id && !editingDatabase.postgresql?.password) isAllFieldsFilled = false;
if (!editingDatabase.postgresql?.database) isAllFieldsFilled = false;
return (
@@ -161,6 +161,7 @@ export const EditDatabaseSpecificDataComponent = ({
host: e.target.value.trim().replace('https://', '').replace('http://', ''),
},
});
setIsConnectionTested(false);
}}
size="small"
className="max-w-[200px] grow"
@@ -199,6 +200,7 @@ export const EditDatabaseSpecificDataComponent = ({
...editingDatabase,
postgresql: { ...editingDatabase.postgresql, username: e.target.value.trim() },
});
setIsConnectionTested(false);
}}
size="small"
className="max-w-[200px] grow"

View File

@@ -44,7 +44,7 @@ export const ShowDatabaseSpecificDataComponent = ({ database }: Props) => {
<div className="mb-1 flex w-full items-center">
<div className="min-w-[150px]">Password</div>
<div>{database.postgresql?.password ? '*********' : ''}</div>
<div>{'*************'}</div>
</div>
<div className="mb-1 flex w-full items-center">

View File

@@ -176,27 +176,27 @@ export function EditNotifierComponent({
if (!notifier.name) return false;
if (notifier.notifierType === NotifierType.TELEGRAM && notifier.telegramNotifier) {
return validateTelegramNotifier(notifier.telegramNotifier);
return validateTelegramNotifier(!notifier.id, notifier.telegramNotifier);
}
if (notifier.notifierType === NotifierType.EMAIL && notifier.emailNotifier) {
return validateEmailNotifier(notifier.emailNotifier);
return validateEmailNotifier(!notifier.id, notifier.emailNotifier);
}
if (notifier.notifierType === NotifierType.WEBHOOK && notifier.webhookNotifier) {
return validateWebhookNotifier(notifier.webhookNotifier);
return validateWebhookNotifier(!notifier.id, notifier.webhookNotifier);
}
if (notifier.notifierType === NotifierType.SLACK && notifier.slackNotifier) {
return validateSlackNotifier(notifier.slackNotifier);
return validateSlackNotifier(!notifier.id, notifier.slackNotifier);
}
if (notifier.notifierType === NotifierType.DISCORD && notifier.discordNotifier) {
return validateDiscordNotifier(notifier.discordNotifier);
return validateDiscordNotifier(!notifier.id, notifier.discordNotifier);
}
if (notifier.notifierType === NotifierType.TEAMS && notifier.teamsNotifier) {
return validateTeamsNotifier(notifier.teamsNotifier);
return validateTeamsNotifier(!notifier.id, notifier.teamsNotifier);
}
return false;

View File

@@ -29,7 +29,7 @@ export function ShowEmailNotifierComponent({ notifier }: Props) {
<div className="mb-1 flex items-center">
<div className="min-w-[110px]">SMTP password</div>
{notifier?.emailNotifier?.smtpPassword ? '*********' : ''}
{'*************'}
</div>
</>
);

View File

@@ -155,6 +155,10 @@ export function EditStorageComponent({
}
if (storage.type === StorageType.S3) {
if (storage.id) {
return storage.s3Storage?.s3Bucket;
}
return (
storage.s3Storage?.s3Bucket &&
storage.s3Storage?.s3AccessKey &&
@@ -163,6 +167,10 @@ export function EditStorageComponent({
}
if (storage.type === StorageType.GOOGLE_DRIVE) {
if (storage.id) {
return storage.googleDriveStorage?.clientId;
}
return (
storage.googleDriveStorage?.clientId &&
storage.googleDriveStorage?.clientSecret &&
@@ -171,6 +179,15 @@ export function EditStorageComponent({
}
if (storage.type === StorageType.NAS) {
if (storage.id) {
return (
storage.nasStorage?.host &&
storage.nasStorage?.port &&
storage.nasStorage?.share &&
storage.nasStorage?.username
);
}
return (
storage.nasStorage?.host &&
storage.nasStorage?.port &&

View File

@@ -16,16 +16,12 @@ export function ShowGoogleDriveStorageComponent({ storage }: Props) {
<div className="mb-1 flex items-center">
<div className="min-w-[110px]">Client Secret</div>
{storage?.googleDriveStorage?.clientSecret
? `${storage?.googleDriveStorage?.clientSecret.slice(0, 10)}***`
: '-'}
{`*************`}
</div>
<div className="mb-1 flex items-center">
<div className="min-w-[110px]">User Token</div>
{storage?.googleDriveStorage?.tokenJson
? `${storage?.googleDriveStorage?.tokenJson.slice(0, 10)}***`
: '-'}
{`*************`}
</div>
</>
);

View File

@@ -29,7 +29,7 @@ export function ShowNASStorageComponent({ storage }: Props) {
<div className="mb-1 flex items-center">
<div className="min-w-[110px]">Password</div>
{storage?.nasStorage?.password ? '*********' : '-'}
{'*************'}
</div>
<div className="mb-1 flex items-center">

View File

@@ -14,17 +14,17 @@ export function ShowS3StorageComponent({ storage }: Props) {
<div className="mb-1 flex items-center">
<div className="min-w-[110px]">Region</div>
{storage?.s3Storage?.s3Region}
{storage?.s3Storage?.s3Region || '-'}
</div>
<div className="mb-1 flex items-center">
<div className="min-w-[110px]">Access Key</div>
{storage?.s3Storage?.s3AccessKey ? '*********' : ''}
{'*************'}
</div>
<div className="mb-1 flex items-center">
<div className="min-w-[110px]">Secret Key</div>
{storage?.s3Storage?.s3SecretKey ? '*********' : ''}
{'*************'}
</div>
<div className="mb-1 flex items-center">

View File

@@ -0,0 +1,105 @@
import { Modal, Spin } from 'antd';
import { useEffect, useState } from 'react';
import { GOOGLE_DRIVE_OAUTH_REDIRECT_URL } from '../constants';
import { type Storage, StorageType } from '../entity/storages';
import type { StorageOauthDto } from '../entity/storages/models/StorageOauthDto';
import { EditStorageComponent } from '../features/storages/ui/edit/EditStorageComponent';
export function OauthStorageComponent() {
const [storage, setStorage] = useState<Storage | undefined>();
const exchangeGoogleOauthCode = async (oauthDto: StorageOauthDto) => {
if (!oauthDto.storage.googleDriveStorage) {
alert('Google Drive storage configuration not found');
return;
}
const { clientId, clientSecret } = oauthDto.storage.googleDriveStorage;
const { authCode } = oauthDto;
try {
// Exchange authorization code for access token
const response = await fetch('https://oauth2.googleapis.com/token', {
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
},
body: new URLSearchParams({
code: authCode,
client_id: clientId,
client_secret: clientSecret,
redirect_uri: GOOGLE_DRIVE_OAUTH_REDIRECT_URL,
grant_type: 'authorization_code',
}),
});
if (!response.ok) {
throw new Error(`OAuth exchange failed: ${response.statusText}`);
}
const tokenData = await response.json();
oauthDto.storage.googleDriveStorage.tokenJson = JSON.stringify(tokenData);
setStorage(oauthDto.storage);
} catch (error) {
alert(`Failed to exchange OAuth code: ${error}`);
}
};
useEffect(() => {
const oauthDtoParam = new URLSearchParams(window.location.search).get('oauthDto');
if (!oauthDtoParam) {
alert('OAuth param not found');
return;
}
const decodedParam = decodeURIComponent(oauthDtoParam);
const oauthDto: StorageOauthDto = JSON.parse(decodedParam);
if (oauthDto.storage.type === StorageType.GOOGLE_DRIVE) {
if (!oauthDto.storage.googleDriveStorage) {
alert('Google Drive storage not found');
return;
}
exchangeGoogleOauthCode(oauthDto);
}
}, []);
if (!storage) {
return (
<div className="mt-20 flex justify-center">
<Spin />
</div>
);
}
return (
<div>
<Modal
title="Add storage"
footer={<div />}
open
onCancel={() => {
window.location.href = '/';
}}
>
<div className="my-3 max-w-[250px] text-gray-500">
Storage - is a place where backups will be stored (local disk, S3, etc.)
</div>
<EditStorageComponent
workspaceId={storage.workspaceId}
isShowClose={false}
onClose={() => {}}
isShowName={false}
editingStorage={storage}
onChanged={() => {
window.location.href = '/';
}}
/>
</Modal>
</div>
);
}

View File

@@ -317,6 +317,7 @@ export const MainScreenComponent = () => {
contentHeight={contentHeight}
workspace={selectedWorkspace}
isCanManageNotifiers={isCanManageDBs}
key={`notifiers-${selectedWorkspace.id}`}
/>
)}
{selectedTab === 'storages' && selectedWorkspace && (
@@ -324,6 +325,7 @@ export const MainScreenComponent = () => {
contentHeight={contentHeight}
workspace={selectedWorkspace}
isCanManageStorages={isCanManageDBs}
key={`storages-${selectedWorkspace.id}`}
/>
)}
{selectedTab === 'databases' && selectedWorkspace && (
@@ -331,6 +333,7 @@ export const MainScreenComponent = () => {
contentHeight={contentHeight}
workspace={selectedWorkspace}
isCanManageDBs={isCanManageDBs}
key={`databases-${selectedWorkspace.id}`}
/>
)}
{selectedTab === 'settings' && selectedWorkspace && user && (
@@ -338,6 +341,7 @@ export const MainScreenComponent = () => {
workspaceResponse={selectedWorkspace}
contentHeight={contentHeight}
user={user}
key={`settings-${selectedWorkspace.id}`}
/>
)}
</>