diff --git a/.github/workflows/build-image-manual.yml b/.github/workflows/build-image-manual.yml new file mode 100644 index 00000000..30b8c732 --- /dev/null +++ b/.github/workflows/build-image-manual.yml @@ -0,0 +1,43 @@ +name: Build Image Manual Branch + +on: + workflow_dispatch: + inputs: + branch: + description: 'Branch to build and release' + required: true + default: 'develop' + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v2 + with: + ref: ${{ github.event.inputs.branch }} + + - name: Build Frontend Docker Image + run: docker build . --file services/frontend/Dockerfile --tag justnz/exflow:frontend-${{ github.sha }} --tag justnz/exflow:frontend-${{ github.ref_name }} + + - name: Build Backend Docker Image + run: docker build . --file services/backend/Dockerfile --tag justnz/exflow:backend-${{ github.sha }} --tag justnz/exflow:backend-${{ github.ref_name }} + + - name: Build exFlow Docker Image + run: docker build . --file Dockerfile --tag justnz/exflow:${{ github.sha }} --tag justnz/exflow:${{ github.ref_name }} + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ vars.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Push Docker Images + run: | + docker push justnz/exflow:frontend-${{ github.sha }} + docker push justnz/exflow:frontend-${{ github.ref_name }} + docker push justnz/exflow:backend-${{ github.sha }} + docker push justnz/exflow:backend-${{ github.ref_name }} + docker push justnz/exflow:${{ github.sha }} + docker push justnz/exflow:${{ github.ref_name }} diff --git a/.gitignore b/.gitignore index 64349e68..17399d40 100644 --- a/.gitignore +++ b/.gitignore @@ -43,6 +43,7 @@ services/backend/*.exe~ services/backend/*.dll services/backend/*.so services/backend/*.dylib +services/backend/*.sql # Test binary, built with `go test -c` services/backend/*.test diff --git a/Dockerfile b/Dockerfile index e30e6c8a..1ff2dffd 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ -FROM node:23-alpine AS base +FROM node:24.7-alpine AS base # Stage 1: Build the frontend -FROM node:23-alpine AS frontend-builder +FROM node:24.7-alpine AS frontend-builder RUN apk add --no-cache libc6-compat WORKDIR /app/frontend COPY services/frontend/package.json services/frontend/pnpm-lock.yaml ./ @@ -49,11 +49,10 @@ RUN mkdir .next \ COPY --from=frontend-builder --chown=nextjs:nodejs /app/frontend/.next/standalone ./ COPY --from=frontend-builder --chown=nextjs:nodejs /app/frontend/.next/static ./.next/static -# Copy .env file to the working directory -COPY --from=frontend-builder --chown=nextjs:nodejs /app/frontend/.env /app/.env +RUN chown -R nextjs:nodejs /app -RUN mkdir -p /etc/exflow -COPY services/backend/config/config.yaml /etc/exflow/backend_config.yaml +RUN mkdir -p /etc/exflow \ + && chown -R nextjs:nodejs /etc/exflow # Set environment variables ENV NODE_ENV=production @@ -69,4 +68,4 @@ USER nextjs ENTRYPOINT ["/sbin/tini", "--"] # Start the backend and frontend -CMD ["sh", "-c", "./exflow-backend --config /etc/exflow/backend_config.yaml & node /app/server.js"] \ No newline at end of file +CMD ["sh", "-c", "./exflow-backend --config /etc/exflow/config.yaml & node /app/server.js"] \ No newline at end of file diff --git a/ENCRYPTION_SECURITY.md b/ENCRYPTION_SECURITY.md new file mode 100644 index 00000000..4fda6874 --- /dev/null +++ b/ENCRYPTION_SECURITY.md @@ -0,0 +1,110 @@ +# Secure Project-Based Encryption Setup + +## Overview + +The enhanced encryption system now uses **key derivation** instead of storing encryption keys directly in the database. This significantly improves security by ensuring that even if someone gains access to your database, they cannot decrypt the data without the master secret. + +## How It Works + +1. **Master Secret**: A single secret stored outside the database (environment variable, config file, external key management system) +2. **Project Salts**: Random salts generated per project and stored in the database +3. **Key Derivation**: Encryption keys are derived using PBKDF2(master_secret + project_salt) + +Even if an attacker gains access to your database, they only see: +- Encrypted data +- Random salts (which are useless without the master secret) + +## Configuration + +### Option 1: Environment Variable (Recommended for Production) +```bash +# Set the master secret as an environment variable +export EXFLOW_ENCRYPTION_MASTER_SECRET="your-very-long-and-secure-master-secret-here" +``` + +### Option 2: Configuration File +```yaml +# In your backend config.yaml +encryption: + master_secret: "your-very-long-and-secure-master-secret-here" + # Fallback key for legacy data (optional) + key: "legacy-key-for-backward-compatibility" +``` + +## Master Secret Requirements + +- **Length**: Minimum 32 characters, recommended 64+ characters +- **Randomness**: Use a cryptographically secure random generator +- **Characters**: Include letters, numbers, and symbols +- **Uniqueness**: Must be unique per exFlow installation + +### Generate a Secure Master Secret + +```bash +# Option 1: Using OpenSSL +openssl rand -base64 64 + +# Option 2: Using Python +python3 -c "import secrets; print(secrets.token_urlsafe(64))" + +# Option 3: Using Go +go run -c "package main; import (\"crypto/rand\", \"encoding/base64\", \"fmt\"); func main() { b := make([]byte, 64); rand.Read(b); fmt.Println(base64.URLEncoding.EncodeToString(b)) }" +``` + +## Security Benefits + +1. **Database Compromise Protection**: Even with full database access, encrypted data remains secure +2. **Per-Project Isolation**: Each project uses a unique derived key +3. **Key Rotation**: Changing the master secret or project salt rotates all encryption +4. **Audit Trail**: Key derivation can be logged and monitored +5. **Compliance**: Meets most regulatory requirements for encryption key management + +## Migration from Legacy System + +The system maintains backward compatibility: + +1. **New Projects**: Automatically use the secure key derivation system +2. **Existing Projects**: Continue working with existing keys until migrated +3. **Gradual Migration**: Projects can be migrated one by one using the key rotation feature + +## Best Practices + +### Storage +- **Never** store the master secret in the database +- Use environment variables or external key management systems +- Rotate the master secret periodically (quarterly/annually) +- Keep secure backups of the master secret + +### Access Control +- Limit access to the master secret to essential personnel only +- Use separate master secrets for different environments (dev/staging/prod) +- Log all access to encryption keys + +### Monitoring +- Monitor for unusual encryption/decryption patterns +- Alert on encryption failures +- Regular security audits of key management processes + +## Troubleshooting + +### "Master secret not configured" Error +1. Ensure the master secret is set in your configuration +2. Restart the backend service after setting the secret +3. Check environment variable spelling + +### "Failed to decrypt" Errors +1. Verify the master secret hasn't changed +2. Check if the project salt was corrupted +3. Consider falling back to legacy key mode temporarily + +### Performance Considerations +- Key derivation adds ~1-2ms per operation +- Consider caching derived keys in memory for high-throughput scenarios +- Monitor CPU usage during bulk encryption operations + +## Example Implementation + +```go +// Environment variable +masterSecret := os.Getenv("EXFLOW_ENCRYPTION_MASTER_SECRET") +``` diff --git a/README.md b/README.md index 2725a49c..25db4d53 100644 --- a/README.md +++ b/README.md @@ -117,9 +117,10 @@ To get started with the exFlow project, follow these steps: password: postgres encryption: - enabled: true - # maximum 32 characters - key: null + # Minimum 32 characters, recommended 64+ characters + master_secret: "your-very-long-and-secure-master-secret-here" + # Fallback key for legacy data (optional) + key: "legacy-key-for-backward-compatibility" jwt: secret: null diff --git a/docker-compose.yaml b/docker-compose.yaml index 6a4b0635..927a7c3d 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -20,20 +20,9 @@ services: - "3000:3000" # exFlow frontend - "8080:8080" # exFlow backend volumes: - - ./config.yaml:/etc/exflow/backend_config.yaml - # environment: - # Adjust these if your config.yaml uses different DB settings - # BACKEND_LOG_LEVEL: info - # BACKEND_PORT: 8080 - # BACKEND_DATABASE_SERVER: db - # BACKEND_DATABASE_PORT: 5432 - # BACKEND_DATABASE_NAME: postgres - # BACKEND_DATABASE_USER: postgres - # BACKEND_DATABASE_PASSWORD: postgres - # BACKEND_ENCRYPTION_ENABLED: "true" - # BACKEND_ENCRYPTION_KEY: "change-me" - # BACKEND_JWT_SECRET: "change-me" - entrypoint: ["/bin/sh", "-c", "until pg_isready -h db -p 5432 -U postgres; do sleep 1; done; exec ./exflow-backend --config /etc/exflow/backend_config.yaml & exec node /app/server.js"] + - exflow_data:/etc/exflow + entrypoint: ["/bin/sh", "-c", "until pg_isready -h db -p 5432 -U postgres; do sleep 1; done; exec ./exflow-backend --config /etc/exflow/config.yaml & exec node /app/server.js"] volumes: - db_data: \ No newline at end of file + db_data: + exflow_data: \ No newline at end of file diff --git a/services/backend/Dockerfile b/services/backend/Dockerfile index 58ffc65b..85c42b1b 100644 --- a/services/backend/Dockerfile +++ b/services/backend/Dockerfile @@ -1,6 +1,6 @@ FROM golang:1.24-alpine as builder -WORKDIR /backend +WORKDIR /app/backend COPY services/backend/go.mod services/backend/go.sum ./ RUN go mod download @@ -8,18 +8,23 @@ RUN go mod download COPY services/backend/ ./ # Build -RUN CGO_ENABLED=0 GOOS=linux go build -o /exflow-backend +RUN go build -o exflow-backend -FROM alpine:3.12 as runner +FROM alpine:3.22 as runner WORKDIR /app -COPY --from=builder /exflow-backend /exflow-backend +COPY --from=builder /app/backend/exflow-backend /app/ -RUN mkdir /app/config -COPY services/backend/config/config.yaml /etc/exflow/backend_config.yaml +RUN addgroup --system --gid 1001 exflow +RUN adduser --system --uid 1001 exflow + +RUN mkdir -p /etc/exflow \ + && chown -R exflow:exflow /etc/exflow + +RUN chown -R exflow:exflow /app VOLUME [ "/etc/exflow" ] EXPOSE 8080 -CMD [ "/exflow-backend", "--config", "/etc/exflow/backend_config.yaml" ] +CMD [ "/exflow-backend", "--config", "/etc/exflow/config.yaml" ] diff --git a/services/backend/config/config.yaml b/services/backend/config/config.yaml index 03826a27..e9f26b73 100644 --- a/services/backend/config/config.yaml +++ b/services/backend/config/config.yaml @@ -12,9 +12,10 @@ database: password: postgres encryption: - enabled: true - # maximum 32 characters - key: null + # Minimum 32 characters, recommended 64+ characters + master_secret: "your-very-long-and-secure-master-secret-here" + # Fallback key for legacy data (optional) + key: "legacy-key-for-backward-compatibility" jwt: secret: null diff --git a/services/backend/config/main.go b/services/backend/config/main.go index 6e38ef71..07956031 100644 --- a/services/backend/config/main.go +++ b/services/backend/config/main.go @@ -46,8 +46,8 @@ type JWTConf struct { } type EncryptionConf struct { - Enabled bool `mapstructure:"enabled" validate:"required"` - Key string `mapstructure:"key"` + Key string `mapstructure:"key"` + MasterSecret string `mapstructure:"master_secret" validate:"required"` } type RunnerConf struct { @@ -84,8 +84,8 @@ func (cm *ConfigurationManager) LoadConfig(configFile string) error { "database.name": "BACKEND_DATABASE_NAME", "database.user": "BACKEND_DATABASE_USER", "database.password": "BACKEND_DATABASE_PASSWORD", - "encryption.enabled": "BACKEND_ENCRYPTION_ENABLED", "encryption.key": "BACKEND_ENCRYPTION_KEY", + "encryption.master_secret": "BACKEND_ENCRYPTION_MASTER_SECRET", "jwt.secret": "BACKEND_JWT_SECRET", "runner.shared_runner_secret": "BACKEND_RUNNER_SHARED_RUNNER_SECRET", } @@ -118,6 +118,10 @@ func (cm *ConfigurationManager) LoadConfig(configFile string) error { // Assign to package-level variable for global access Config = &config + if config.Encryption.MasterSecret == "" { + log.Fatal("Master secret is required for encryption") + } + log.WithFields(log.Fields{ "file": configFile, "content": cm.viper.AllSettings(), diff --git a/services/backend/database/migrations/10_flows_type_col.go b/services/backend/database/migrations/10_flows_type_col.go new file mode 100644 index 00000000..e5bdc6f5 --- /dev/null +++ b/services/backend/database/migrations/10_flows_type_col.go @@ -0,0 +1,60 @@ +package migrations + +import ( + "context" + "fmt" + + log "github.com/sirupsen/logrus" + "github.com/uptrace/bun" +) + +func init() { + Migrations.MustRegister(func(ctx context.Context, db *bun.DB) error { + return addTypeToFlows(ctx, db) + }, func(ctx context.Context, db *bun.DB) error { + return removeTypeFromFlows(ctx, db) + }) +} + +func addTypeToFlows(ctx context.Context, db *bun.DB) error { + // add type column + exists, err := columnExists(ctx, db, "flows", "type") + if err != nil { + return fmt.Errorf("failed to check if type column exists: %v", err) + } + if !exists { + _, err := db.NewAddColumn(). + Table("flows"). + ColumnExpr("type TEXT DEFAULT 'default'"). + Exec(ctx) + + if err != nil { + return fmt.Errorf("failed to add type column to flows table: %v", err) + } + } else { + log.Debug("type column already exists in flows table") + } + + return nil +} + +func removeTypeFromFlows(ctx context.Context, db *bun.DB) error { + exists, err := columnExists(ctx, db, "flows", "type") + if err != nil { + return fmt.Errorf("failed to check if type column exists: %v", err) + } + if exists { + _, err := db.NewDropColumn(). + Table("flows"). + Column("type"). + Exec(ctx) + + if err != nil { + return fmt.Errorf("failed to remove type column from flows table: %v", err) + } + } else { + log.Debug("type column already removed from flows table") + } + + return nil +} diff --git a/services/backend/database/migrations/11_create_alerts_table.go b/services/backend/database/migrations/11_create_alerts_table.go new file mode 100644 index 00000000..20311785 --- /dev/null +++ b/services/backend/database/migrations/11_create_alerts_table.go @@ -0,0 +1,48 @@ +package migrations + +import ( + "context" + "fmt" + + "github.com/v1Flows/exFlow/services/backend/pkg/models" + + "github.com/uptrace/bun" +) + +func init() { + Migrations.MustRegister(func(ctx context.Context, db *bun.DB) error { + return createAlertsSchema(ctx, db) + }, func(ctx context.Context, db *bun.DB) error { + return dropAlertsSchema(ctx, db) + }) +} + +func createAlertsSchema(ctx context.Context, db *bun.DB) error { + models := []interface{}{ + (*models.Alerts)(nil), + } + + for _, model := range models { + _, err := db.NewCreateTable().Model(model).IfNotExists().Exec(ctx) + if err != nil { + return fmt.Errorf("failed to create table: %v", err) + } + } + + return nil +} + +func dropAlertsSchema(ctx context.Context, db *bun.DB) error { + models := []interface{}{ + (*models.Alerts)(nil), + } + + for _, model := range models { + _, err := db.NewDropTable().Model(model).IfExists().Cascade().Exec(ctx) + if err != nil { + return fmt.Errorf("failed to drop table: %v", err) + } + } + + return nil +} diff --git a/services/backend/database/migrations/12_flows_alerts.go b/services/backend/database/migrations/12_flows_alerts.go new file mode 100644 index 00000000..6cb70a16 --- /dev/null +++ b/services/backend/database/migrations/12_flows_alerts.go @@ -0,0 +1,162 @@ +package migrations + +import ( + "context" + "fmt" + + log "github.com/sirupsen/logrus" + "github.com/uptrace/bun" +) + +func init() { + Migrations.MustRegister(func(ctx context.Context, db *bun.DB) error { + return addAlertColsToFlows(ctx, db) + }, func(ctx context.Context, db *bun.DB) error { + return removeAlertColsFromFlows(ctx, db) + }) +} + +func addAlertColsToFlows(ctx context.Context, db *bun.DB) error { + // add patterns column + exists, err := columnExists(ctx, db, "flows", "patterns") + if err != nil { + return fmt.Errorf("failed to check if patterns column exists: %v", err) + } + if !exists { + _, err := db.NewAddColumn(). + Table("flows"). + ColumnExpr("patterns JSONB DEFAULT jsonb('[]')"). + Exec(ctx) + + if err != nil { + return fmt.Errorf("failed to add patterns column to flows table: %v", err) + } + } else { + log.Debug("patterns column already exists in flows table") + } + + exists, err = columnExists(ctx, db, "flows", "group_alerts") + if err != nil { + return fmt.Errorf("failed to check if group_alerts column exists: %v", err) + } + if !exists { + _, err := db.NewAddColumn(). + Table("flows"). + ColumnExpr("group_alerts BOOLEAN DEFAULT true"). + Exec(ctx) + + if err != nil { + return fmt.Errorf("failed to add group_alerts column to flows table: %v", err) + } + } else { + log.Debug("group_alerts column already exists in flows table") + } + + exists, err = columnExists(ctx, db, "flows", "group_alerts_identifier") + if err != nil { + return fmt.Errorf("failed to check if group_alerts_identifier column exists: %v", err) + } + if !exists { + _, err := db.NewAddColumn(). + Table("flows"). + ColumnExpr("group_alerts_identifier TEXT DEFAULT ''"). + Exec(ctx) + + if err != nil { + return fmt.Errorf("failed to add group_alerts_identifier column to flows table: %v", err) + } + } else { + log.Debug("group_alerts_identifier column already exists in flows table") + } + + exists, err = columnExists(ctx, db, "flows", "alert_threshold") + if err != nil { + return fmt.Errorf("failed to check if alert_threshold column exists: %v", err) + } + if !exists { + _, err := db.NewAddColumn(). + Table("flows"). + ColumnExpr("alert_threshold INTEGER DEFAULT 0"). + Exec(ctx) + + if err != nil { + return fmt.Errorf("failed to add alert_threshold column to flows table: %v", err) + } + } else { + log.Debug("alert_threshold column already exists in flows table") + } + + return nil +} + +func removeAlertColsFromFlows(ctx context.Context, db *bun.DB) error { + exists, err := columnExists(ctx, db, "flows", "patterns") + if err != nil { + return fmt.Errorf("failed to check if patterns column exists: %v", err) + } + if exists { + _, err := db.NewDropColumn(). + Table("flows"). + Column("patterns"). + Exec(ctx) + + if err != nil { + return fmt.Errorf("failed to remove patterns column from flows table: %v", err) + } + } else { + log.Debug("patterns column already removed from flows table") + } + + exists, err = columnExists(ctx, db, "flows", "group_alerts") + if err != nil { + return fmt.Errorf("failed to check if group_alerts column exists: %v", err) + } + if exists { + _, err := db.NewDropColumn(). + Table("flows"). + Column("group_alerts"). + Exec(ctx) + + if err != nil { + return fmt.Errorf("failed to remove group_alerts column from flows table: %v", err) + } + } else { + log.Debug("group_alerts column already removed from flows table") + } + + exists, err = columnExists(ctx, db, "flows", "group_alerts_identifier") + if err != nil { + return fmt.Errorf("failed to check if group_alerts_identifier column exists: %v", err) + } + if exists { + _, err := db.NewDropColumn(). + Table("flows"). + Column("group_alerts_identifier"). + Exec(ctx) + + if err != nil { + return fmt.Errorf("failed to remove group_alerts_identifier column from flows table: %v", err) + } + } else { + log.Debug("group_alerts_identifier column already removed from flows table") + } + + exists, err = columnExists(ctx, db, "flows", "alert_threshold") + if err != nil { + return fmt.Errorf("failed to check if alert_threshold column exists: %v", err) + } + if exists { + _, err := db.NewDropColumn(). + Table("flows"). + Column("alert_threshold"). + Exec(ctx) + + if err != nil { + return fmt.Errorf("failed to remove alert_threshold column from flows table: %v", err) + } + } else { + log.Debug("alert_threshold column already removed from flows table") + } + + return nil +} diff --git a/services/backend/database/migrations/13_execustions_alert_id.go b/services/backend/database/migrations/13_execustions_alert_id.go new file mode 100644 index 00000000..b52be4a4 --- /dev/null +++ b/services/backend/database/migrations/13_execustions_alert_id.go @@ -0,0 +1,60 @@ +package migrations + +import ( + "context" + "fmt" + + log "github.com/sirupsen/logrus" + "github.com/uptrace/bun" +) + +func init() { + Migrations.MustRegister(func(ctx context.Context, db *bun.DB) error { + return addAlertIDToExecutions(ctx, db) + }, func(ctx context.Context, db *bun.DB) error { + return removeAlertIDFromExecutions(ctx, db) + }) +} + +func addAlertIDToExecutions(ctx context.Context, db *bun.DB) error { + // add alert_id column + exists, err := columnExists(ctx, db, "executions", "alert_id") + if err != nil { + return fmt.Errorf("failed to check if alert_id column exists: %v", err) + } + if !exists { + _, err := db.NewAddColumn(). + Table("executions"). + ColumnExpr("alert_id TEXT DEFAULT ''"). + Exec(ctx) + + if err != nil { + return fmt.Errorf("failed to add alert_id column to executions table: %v", err) + } + } else { + log.Debug("alert_id column already exists in executions table") + } + + return nil +} + +func removeAlertIDFromExecutions(ctx context.Context, db *bun.DB) error { + exists, err := columnExists(ctx, db, "executions", "alert_id") + if err != nil { + return fmt.Errorf("failed to check if alert_id column exists: %v", err) + } + if exists { + _, err := db.NewDropColumn(). + Table("executions"). + Column("alert_id"). + Exec(ctx) + + if err != nil { + return fmt.Errorf("failed to remove alert_id column from executions table: %v", err) + } + } else { + log.Debug("alert_id column already removed from executions table") + } + + return nil +} diff --git a/services/backend/database/migrations/7_project_encryption_keys.go b/services/backend/database/migrations/7_project_encryption_keys.go new file mode 100644 index 00000000..e7c7bf94 --- /dev/null +++ b/services/backend/database/migrations/7_project_encryption_keys.go @@ -0,0 +1,67 @@ +package migrations + +import ( + "context" + "crypto/rand" + "encoding/hex" + + "github.com/uptrace/bun" +) + +func init() { + Migrations.MustRegister(func(ctx context.Context, db *bun.DB) error { + // Add encryption_key column to projects table + _, err := db.ExecContext(ctx, "ALTER TABLE projects ADD COLUMN IF NOT EXISTS encryption_key TEXT DEFAULT ''") + if err != nil { + return err + } + + // Add encryption_enabled column to projects table + _, err = db.ExecContext(ctx, "ALTER TABLE projects ADD COLUMN IF NOT EXISTS encryption_enabled BOOLEAN DEFAULT true") + if err != nil { + return err + } + + // Generate encryption salts for existing projects that don't have them + rows, err := db.QueryContext(ctx, "SELECT id FROM projects WHERE encryption_key = '' OR encryption_key IS NULL") + if err != nil { + return err + } + defer rows.Close() + + for rows.Next() { + var projectID string + if err := rows.Scan(&projectID); err != nil { + continue // Skip problematic rows, don't fail the entire migration + } + + // Generate a new 32-byte salt (not a key!) + salt := make([]byte, 32) + if _, err := rand.Read(salt); err != nil { + continue // Skip if salt generation fails + } + hexSalt := hex.EncodeToString(salt) + + // Update the project with the new encryption salt + _, err = db.ExecContext(ctx, "UPDATE projects SET encryption_key = $1, encryption_enabled = true WHERE id = $2", hexSalt, projectID) + if err != nil { + continue // Skip if update fails + } + } + + return nil + }, func(ctx context.Context, db *bun.DB) error { + // Drop the added columns + _, err := db.ExecContext(ctx, "ALTER TABLE projects DROP COLUMN IF EXISTS encryption_key") + if err != nil { + return err + } + + _, err = db.ExecContext(ctx, "ALTER TABLE projects DROP COLUMN IF EXISTS encryption_enabled") + if err != nil { + return err + } + + return nil + }) +} diff --git a/services/backend/database/migrations/8_settings_encryption_migration.go b/services/backend/database/migrations/8_settings_encryption_migration.go new file mode 100644 index 00000000..61e6ae2c --- /dev/null +++ b/services/backend/database/migrations/8_settings_encryption_migration.go @@ -0,0 +1,60 @@ +package migrations + +import ( + "context" + "fmt" + + log "github.com/sirupsen/logrus" + "github.com/uptrace/bun" +) + +func init() { + Migrations.MustRegister(func(ctx context.Context, db *bun.DB) error { + return addNewEncryptionMigratedToSettings(ctx, db) + }, func(ctx context.Context, db *bun.DB) error { + return removeNewEncryptionMigratedFromSettings(ctx, db) + }) +} + +func addNewEncryptionMigratedToSettings(ctx context.Context, db *bun.DB) error { + // add new_encryption_migrated column + exists, err := columnExists(ctx, db, "settings", "new_encryption_migrated") + if err != nil { + return fmt.Errorf("failed to check if new_encryption_migrated column exists: %v", err) + } + if !exists { + _, err := db.NewAddColumn(). + Table("settings"). + ColumnExpr("new_encryption_migrated BOOLEAN DEFAULT FALSE"). + Exec(ctx) + + if err != nil { + return fmt.Errorf("failed to add new_encryption_migrated column to settings table: %v", err) + } + } else { + log.Debug("new_encryption_migrated column already exists in settings table") + } + + return nil +} + +func removeNewEncryptionMigratedFromSettings(ctx context.Context, db *bun.DB) error { + exists, err := columnExists(ctx, db, "settings", "new_encryption_migrated") + if err != nil { + return fmt.Errorf("failed to check if new_encryption_migrated column exists: %v", err) + } + if exists { + _, err := db.NewDropColumn(). + Table("settings"). + Column("new_encryption_migrated"). + Exec(ctx) + + if err != nil { + return fmt.Errorf("failed to remove new_encryption_migrated column from settings table: %v", err) + } + } else { + log.Debug("new_encryption_migrated column already removed from settings table") + } + + return nil +} diff --git a/services/backend/database/migrations/9_remove_flows_cols.go b/services/backend/database/migrations/9_remove_flows_cols.go new file mode 100644 index 00000000..5437f901 --- /dev/null +++ b/services/backend/database/migrations/9_remove_flows_cols.go @@ -0,0 +1,55 @@ +package migrations + +import ( + "context" + "fmt" + + log "github.com/sirupsen/logrus" + "github.com/uptrace/bun" +) + +func init() { + Migrations.MustRegister(func(ctx context.Context, db *bun.DB) error { + return removeColsFromFlow(ctx, db) + }, func(ctx context.Context, db *bun.DB) error { + return removeColsFromFlow(ctx, db) + }) +} + +func removeColsFromFlow(ctx context.Context, db *bun.DB) error { + exists, err := columnExists(ctx, db, "flows", "encrypt_action_params") + if err != nil { + return fmt.Errorf("failed to check if encrypt_action_params column exists: %v", err) + } + if exists { + _, err := db.NewDropColumn(). + Table("flows"). + Column("encrypt_action_params"). + Exec(ctx) + + if err != nil { + return fmt.Errorf("failed to remove encrypt_action_params column from flows table: %v", err) + } + } else { + log.Debug("encrypt_action_params column already removed from flows table") + } + + exists, err = columnExists(ctx, db, "flows", "encrypt_executions") + if err != nil { + return fmt.Errorf("failed to check if encrypt_executions column exists: %v", err) + } + if exists { + _, err := db.NewDropColumn(). + Table("flows"). + Column("encrypt_executions"). + Exec(ctx) + + if err != nil { + return fmt.Errorf("failed to remove encrypt_executions column from flows table: %v", err) + } + } else { + log.Debug("encrypt_executions column already removed from flows table") + } + + return nil +} diff --git a/services/backend/functions/background_checks/checkForFlowActionUpdates.go b/services/backend/functions/background_checks/checkForFlowActionUpdates.go index de42daaa..0b12c6a3 100644 --- a/services/backend/functions/background_checks/checkForFlowActionUpdates.go +++ b/services/backend/functions/background_checks/checkForFlowActionUpdates.go @@ -5,10 +5,8 @@ import ( "strings" "github.com/Masterminds/semver" - "github.com/v1Flows/exFlow/services/backend/config" "github.com/v1Flows/exFlow/services/backend/functions/encryption" "github.com/v1Flows/exFlow/services/backend/pkg/models" - shared_models "github.com/v1Flows/shared-library/pkg/models" "github.com/mohae/deepcopy" // Import for deep copy log "github.com/sirupsen/logrus" @@ -54,49 +52,57 @@ func processFlowsForProject(db *bun.DB, context context.Context, projectID strin return } + // get project data + var project models.Projects + err = db.NewSelect().Model(&project).Where("id = ?", projectID).Scan(context) + if err != nil { + return + } + // Process each flow for _, flow := range flows { updatedFlow := deepcopy.Copy(flow).(models.Flows) // Deep copy the flow - updateFlowActions(&updatedFlow, runners) + updateFlowActions(&updatedFlow, runners, project, db) // Write updated flow to the database _, err := db.NewUpdate().Model(&updatedFlow).Where("id = ?", updatedFlow.ID).Set("failure_pipelines = ?, actions = ?", updatedFlow.FailurePipelines, updatedFlow.Actions).Exec(context) if err != nil { log.Error("Bot: Error updating flow actions. ", err) + continue } } } -func updateFlowActions(flow *models.Flows, runners []models.Runners) { +func updateFlowActions(flow *models.Flows, runners []models.Runners, project models.Projects, db *bun.DB) { // Check for action updates in the flow itself for j, action := range flow.Actions { if len(runners) == 0 { if action.UpdateAvailable { action.UpdateAvailable = false action.UpdateVersion = "" - action.UpdatedAction = &shared_models.Action{} + action.UpdatedAction = &models.Action{} flow.Actions[j] = action } } else { - updatedAction := updateActionIfNeeded(flow, action, runners) + updatedAction := updateActionIfNeeded(flow, action, runners, project, db) flow.Actions[j] = updatedAction } } // Check for action updates in the failure pipelines for i, failurePipeline := range flow.FailurePipelines { - updatedPipeline := deepcopy.Copy(failurePipeline).(shared_models.FailurePipeline) // Deep copy the pipeline + updatedPipeline := deepcopy.Copy(failurePipeline).(models.FailurePipeline) // Deep copy the pipeline for j, action := range updatedPipeline.Actions { if len(runners) == 0 { if action.UpdateAvailable { action.UpdateAvailable = false action.UpdateVersion = "" - action.UpdatedAction = &shared_models.Action{} + action.UpdatedAction = &models.Action{} updatedPipeline.Actions[j] = action } } else { - updatedAction := updateActionIfNeeded(flow, action, runners) + updatedAction := updateActionIfNeeded(flow, action, runners, project, db) updatedPipeline.Actions[j] = updatedAction } } @@ -104,7 +110,7 @@ func updateFlowActions(flow *models.Flows, runners []models.Runners) { } } -func updateActionIfNeeded(flow *models.Flows, action shared_models.Action, runners []models.Runners) shared_models.Action { +func updateActionIfNeeded(flow *models.Flows, action models.Action, runners []models.Runners, project models.Projects, db *bun.DB) models.Action { for _, runner := range runners { for _, plugin := range runner.Plugins { if action.Plugin == strings.ToLower(plugin.Name) { @@ -121,7 +127,7 @@ func updateActionIfNeeded(flow *models.Flows, action shared_models.Action, runne } if pluginVersion.GreaterThan(actionVersion) { - return createUpdatedAction(flow, action, plugin) + return createUpdatedAction(flow, action, plugin, project, db) } } } @@ -129,13 +135,13 @@ func updateActionIfNeeded(flow *models.Flows, action shared_models.Action, runne return action } -func createUpdatedAction(flow *models.Flows, action shared_models.Action, plugin shared_models.Plugin) shared_models.Action { - updatedAction := deepcopy.Copy(action).(shared_models.Action) // Deep copy the action +func createUpdatedAction(flow *models.Flows, action models.Action, plugin models.Plugin, project models.Projects, db *bun.DB) models.Action { + updatedAction := deepcopy.Copy(action).(models.Action) // Deep copy the action updatedAction.UpdateAvailable = true updatedAction.UpdateVersion = plugin.Version // Create a deep copy of plugin.Action to avoid shared references - updatedPluginAction := deepcopy.Copy(plugin.Action).(shared_models.Action) + updatedPluginAction := deepcopy.Copy(plugin.Action).(models.Action) updatedPluginAction.Version = plugin.Version updatedAction.UpdatedAction = &updatedPluginAction @@ -150,9 +156,9 @@ func createUpdatedAction(flow *models.Flows, action shared_models.Action, plugin // Otherwise, use the default value updatedAction.UpdatedAction.Params[uP].Value = updatedParam.Default - if config.Config.Encryption.Enabled && flow.EncryptActionParams { + if project.EncryptionEnabled { var err error - updatedAction.UpdatedAction.Params[uP], err = encryption.EncryptParam(updatedAction.UpdatedAction.Params[uP]) + updatedAction.UpdatedAction.Params[uP], err = encryption.EncryptParamWithProject(updatedAction.UpdatedAction.Params[uP], project.ID.String(), db) if err != nil { log.Errorf("Bot: Error encrypting action param %s: %v", updatedAction.UpdatedAction.Params[uP].Key, err) } diff --git a/services/backend/functions/background_checks/checkHangingExecutionSteps.go b/services/backend/functions/background_checks/checkHangingExecutionSteps.go index d05072b1..9cfbe0c8 100644 --- a/services/backend/functions/background_checks/checkHangingExecutionSteps.go +++ b/services/backend/functions/background_checks/checkHangingExecutionSteps.go @@ -6,7 +6,6 @@ import ( "github.com/v1Flows/exFlow/services/backend/functions/encryption" "github.com/v1Flows/exFlow/services/backend/pkg/models" - shared_models "github.com/v1Flows/shared-library/pkg/models" log "github.com/sirupsen/logrus" "github.com/uptrace/bun" @@ -42,13 +41,22 @@ func checkHangingExecutionSteps(db *bun.DB) { continue } + // get project data + var project models.Projects + err = db.NewSelect().Model(&project).Where("id = ?", flow.ProjectID).Scan(context) + if err != nil { + log.Error("Bot: Error getting project data for flow ", flow.ID, err) + continue + } + // if the execution is finished, let the step fail if execution.Status == "success" || execution.Status == "error" || execution.Status == "canceled" || execution.Status == "noPatternMatch" || execution.Status == "recovered" { // check for encryption and decrypt messages - if flow.EncryptExecutions && step.Messages != nil && len(step.Messages) > 0 { - step.Messages, err = encryption.DecryptExecutionStepActionMessage(step.Messages) + if project.EncryptionEnabled && step.Messages != nil && len(step.Messages) > 0 { + step.Messages, err = encryption.DecryptExecutionStepActionMessageWithProject(step.Messages, project.ID.String(), db) if err != nil { log.Error("Bot: Error encrypting execution step action messages", err) + continue } step.Encrypted = true @@ -56,9 +64,9 @@ func checkHangingExecutionSteps(db *bun.DB) { step.Status = "error" step.FinishedAt = time.Now() - step.Messages = append(step.Messages, shared_models.Message{ + step.Messages = append(step.Messages, models.Message{ Title: "Automated Check", - Lines: []shared_models.Line{ + Lines: []models.Line{ { Content: "Execution is already finished, marking step as error", Color: "danger", @@ -68,10 +76,11 @@ func checkHangingExecutionSteps(db *bun.DB) { }) // check for encryption and encrypt messages - if flow.EncryptExecutions && step.Messages != nil && len(step.Messages) > 0 { - step.Messages, err = encryption.EncryptExecutionStepActionMessage(step.Messages) + if project.EncryptionEnabled && step.Messages != nil && len(step.Messages) > 0 { + step.Messages, err = encryption.EncryptExecutionStepActionMessageWithProject(step.Messages, project.ID.String(), db) if err != nil { log.Error("Bot: Error encrypting execution step action messages", err) + continue } step.Encrypted = true @@ -80,6 +89,7 @@ func checkHangingExecutionSteps(db *bun.DB) { _, err := db.NewUpdate().Model(&step).Column("status", "encrypted", "messages", "finished_at").Where("id = ?", step.ID).Exec(context) if err != nil { log.Error("Bot: Error updating step", err) + continue } // set execution status to error if it is not already set @@ -93,6 +103,7 @@ func checkHangingExecutionSteps(db *bun.DB) { _, err := db.NewUpdate().Model(&execution).Column("status", "finished_at").Where("id = ?", execution.ID).Exec(context) if err != nil { log.Error("Bot: Error updating execution status to error", err) + continue } } continue diff --git a/services/backend/functions/background_checks/checkHangingExecutions.go b/services/backend/functions/background_checks/checkHangingExecutions.go index 2021416c..424dcd7f 100644 --- a/services/backend/functions/background_checks/checkHangingExecutions.go +++ b/services/backend/functions/background_checks/checkHangingExecutions.go @@ -6,7 +6,6 @@ import ( "github.com/v1Flows/exFlow/services/backend/functions/encryption" "github.com/v1Flows/exFlow/services/backend/pkg/models" - shared_models "github.com/v1Flows/shared-library/pkg/models" log "github.com/sirupsen/logrus" "github.com/uptrace/bun" @@ -36,18 +35,27 @@ func checkHangingExecutions(db *bun.DB) { err = db.NewSelect().Model(&flow).Where("id = ?", execution.FlowID).Scan(context) if err != nil { log.Error("Bot: Error getting flow data", err) + continue } - step := shared_models.ExecutionSteps{ + // get project data + var project models.Projects + err = db.NewSelect().Model(&project).Where("id = ?", flow.ProjectID).Scan(context) + if err != nil { + log.Error("Bot: Error getting project data", err) + continue + } + + step := models.ExecutionSteps{ ExecutionID: execution.ID.String(), - Action: shared_models.Action{ + Action: models.Action{ Name: "Automated Check", Icon: "hugeicons:robotic", }, - Messages: []shared_models.Message{ + Messages: []models.Message{ { Title: "Automated Check", - Lines: []shared_models.Line{ + Lines: []models.Line{ { Content: "Last execution heartbeat was more than 15 seconds ago", Color: "danger", @@ -68,10 +76,11 @@ func checkHangingExecutions(db *bun.DB) { } // check for encryption - if flow.EncryptExecutions && step.Messages != nil && len(step.Messages) > 0 { - step.Messages, err = encryption.EncryptExecutionStepActionMessage(step.Messages) + if project.EncryptionEnabled && step.Messages != nil && len(step.Messages) > 0 { + step.Messages, err = encryption.EncryptExecutionStepActionMessageWithProject(step.Messages, project.ID.String(), db) if err != nil { log.Error("Bot: Error encrypting execution step action messages", err) + continue } step.Encrypted = true @@ -80,17 +89,20 @@ func checkHangingExecutions(db *bun.DB) { _, err := db.NewInsert().Model(&step).Exec(context) if err != nil { log.Error("Bot: Error adding error step", err) + continue } _, err = db.NewUpdate().Model(&execution).Set("status = 'error'").Set("finished_at = ?", time.Now()).Where("id = ?", execution.ID).Exec(context) if err != nil { log.Error("Bot: Error updating execution", err) + continue } var steps []models.ExecutionSteps err = db.NewSelect().Model(&steps).Where("execution_id = ?", execution.ID).Scan(context) if err != nil { log.Error("Bot: Error getting steps for execution", err) + continue } // mark all steps as canceled if they are not finished @@ -103,18 +115,19 @@ func checkHangingExecutions(db *bun.DB) { step.CanceledBy = "Automated Check" // check for encryption and decrypt messages - if flow.EncryptExecutions && step.Messages != nil && len(step.Messages) > 0 { - step.Messages, err = encryption.DecryptExecutionStepActionMessage(step.Messages) + if project.EncryptionEnabled && step.Messages != nil && len(step.Messages) > 0 { + step.Messages, err = encryption.DecryptExecutionStepActionMessageWithProject(step.Messages, project.ID.String(), db) if err != nil { log.Error("Bot: Error encrypting execution step action messages", err) + continue } step.Encrypted = true } - step.Messages = append(step.Messages, shared_models.Message{ + step.Messages = append(step.Messages, models.Message{ Title: "Automated Check", - Lines: []shared_models.Line{ + Lines: []models.Line{ { Content: "Execution was marked as error, step will be canceled", Color: "danger", @@ -124,10 +137,11 @@ func checkHangingExecutions(db *bun.DB) { }) // check for encryption and encrypt messages - if flow.EncryptExecutions && step.Messages != nil && len(step.Messages) > 0 { - step.Messages, err = encryption.EncryptExecutionStepActionMessage(step.Messages) + if project.EncryptionEnabled && step.Messages != nil && len(step.Messages) > 0 { + step.Messages, err = encryption.EncryptExecutionStepActionMessageWithProject(step.Messages, project.ID.String(), db) if err != nil { log.Error("Bot: Error encrypting execution step action messages", err) + continue } step.Encrypted = true @@ -136,6 +150,7 @@ func checkHangingExecutions(db *bun.DB) { _, err := db.NewUpdate().Model(&step).Column("status", "encrypted", "messages", "started_at", "finished_at", "canceled_at", "canceled_by").Where("id = ?", step.ID).Exec(context) if err != nil { log.Error("Bot: Error updating step", err) + continue } } } diff --git a/services/backend/functions/background_checks/checkScheduledExecutions.go b/services/backend/functions/background_checks/checkScheduledExecutions.go index fa444397..67c36010 100644 --- a/services/backend/functions/background_checks/checkScheduledExecutions.go +++ b/services/backend/functions/background_checks/checkScheduledExecutions.go @@ -6,7 +6,6 @@ import ( "github.com/v1Flows/exFlow/services/backend/functions/encryption" "github.com/v1Flows/exFlow/services/backend/pkg/models" - shared_models "github.com/v1Flows/shared-library/pkg/models" log "github.com/sirupsen/logrus" "github.com/uptrace/bun" @@ -31,6 +30,15 @@ func checkScheduledExecutions(db *bun.DB) { err = db.NewSelect().Model(&flow).Where("id = ?", execution.FlowID).Scan(context) if err != nil { log.Error("Bot: Error getting flow data", err) + continue + } + + // get project data + var project models.Projects + err = db.NewSelect().Model(&project).Where("id = ?", flow.ProjectID).Scan(context) + if err != nil { + log.Error("Bot: Error getting project data", err) + continue } // update the scheduled step to success @@ -38,6 +46,7 @@ func checkScheduledExecutions(db *bun.DB) { err = db.NewSelect().Model(&steps).Where("execution_id = ?", execution.ID).Scan(context) if err != nil { log.Error("Bot: Error getting steps for execution", err) + continue } // mark all steps as canceled if they are not finished @@ -47,18 +56,19 @@ func checkScheduledExecutions(db *bun.DB) { step.FinishedAt = time.Now() // check for encryption - if flow.EncryptExecutions && step.Messages != nil && len(step.Messages) > 0 { - step.Messages, err = encryption.DecryptExecutionStepActionMessage(step.Messages) + if project.EncryptionEnabled && step.Messages != nil && len(step.Messages) > 0 { + step.Messages, err = encryption.DecryptExecutionStepActionMessageWithProject(step.Messages, project.ID.String(), db) if err != nil { - log.Error("Bot: Error encrypting execution step action messages", err) + log.Error("Bot: Error decrypting execution step action messages", err) + continue } step.Encrypted = true } - step.Messages = append(step.Messages, shared_models.Message{ + step.Messages = append(step.Messages, models.Message{ Title: "Scheduled", - Lines: []shared_models.Line{ + Lines: []models.Line{ { Content: "Scheduled time reached. Execution is now starting.", Color: "success", @@ -68,10 +78,11 @@ func checkScheduledExecutions(db *bun.DB) { }) // check for encryption - if flow.EncryptExecutions && step.Messages != nil && len(step.Messages) > 0 { - step.Messages, err = encryption.EncryptExecutionStepActionMessage(step.Messages) + if project.EncryptionEnabled && step.Messages != nil && len(step.Messages) > 0 { + step.Messages, err = encryption.EncryptExecutionStepActionMessageWithProject(step.Messages, project.ID.String(), db) if err != nil { log.Error("Bot: Error encrypting execution step action messages", err) + continue } step.Encrypted = true @@ -80,19 +91,20 @@ func checkScheduledExecutions(db *bun.DB) { _, err = db.NewUpdate().Model(&step).Set("status = ?, finished_at = ?, messages = ?", step.Status, step.FinishedAt, step.Messages).Where("id = ?", step.ID).Exec(context) if err != nil { log.Error("Bot: Error updating step", err) + continue } // create execution step which tells that the execution is registerd and waiting for runner to pick it up - step := shared_models.ExecutionSteps{ + step := models.ExecutionSteps{ ExecutionID: execution.ID.String(), - Action: shared_models.Action{ + Action: models.Action{ Name: "Pick Up", Icon: "hugeicons:rocket", }, - Messages: []shared_models.Message{ + Messages: []models.Message{ { Title: "Pick Up", - Lines: []shared_models.Line{ + Lines: []models.Line{ { Content: "Waiting for runner to pick it up", Timestamp: time.Now(), @@ -106,10 +118,11 @@ func checkScheduledExecutions(db *bun.DB) { } // check for encryption - if flow.EncryptExecutions && step.Messages != nil && len(step.Messages) > 0 { - step.Messages, err = encryption.EncryptExecutionStepActionMessage(step.Messages) + if project.EncryptionEnabled && step.Messages != nil && len(step.Messages) > 0 { + step.Messages, err = encryption.EncryptExecutionStepActionMessageWithProject(step.Messages, project.ID.String(), db) if err != nil { log.Error("Bot: Error encrypting execution step action messages", err) + continue } step.Encrypted = true @@ -118,6 +131,7 @@ func checkScheduledExecutions(db *bun.DB) { _, err = db.NewInsert().Model(&step).Exec(context) if err != nil { log.Error("Bot: Error adding error step", err) + continue } } } @@ -125,6 +139,7 @@ func checkScheduledExecutions(db *bun.DB) { _, err = db.NewUpdate().Model(&execution).Set("status = 'pending'").Where("id = ?", execution.ID).Exec(context) if err != nil { log.Error("Bot: Error updating execution", err) + continue } } } diff --git a/services/backend/functions/background_checks/scheduleFlowExecutions.go b/services/backend/functions/background_checks/scheduleFlowExecutions.go index 30ca944c..0cb42d7d 100644 --- a/services/backend/functions/background_checks/scheduleFlowExecutions.go +++ b/services/backend/functions/background_checks/scheduleFlowExecutions.go @@ -10,7 +10,6 @@ import ( "github.com/uptrace/bun" "github.com/v1Flows/exFlow/services/backend/functions/encryption" "github.com/v1Flows/exFlow/services/backend/pkg/models" - shared_models "github.com/v1Flows/shared-library/pkg/models" ) func scheduleFlowExecutions(db *bun.DB) { @@ -27,6 +26,13 @@ func scheduleFlowExecutions(db *bun.DB) { // schedule new executions for each flow based on the schedule for _, flow := range flows { + // get project data + var project models.Projects + err = db.NewSelect().Model(&project).Where("id = ?", flow.ProjectID).Scan(context) + if err != nil { + return + } + // get all executions for that flow that are triggered by schedule var lastScheduledExecution []models.Executions count, err := db.NewSelect(). @@ -46,10 +52,10 @@ func scheduleFlowExecutions(db *bun.DB) { var currentTime time.Time if count == 0 { currentTime = time.Now() - returnedExecutionTime := createExecution(currentTime, flow, db, context) + returnedExecutionTime := createExecution(currentTime, flow, db, context, project) // directly schedule the next execution - createExecution(returnedExecutionTime, flow, db, context) + createExecution(returnedExecutionTime, flow, db, context, project) } else { currentTime = lastScheduledExecution[0].ScheduledAt @@ -57,13 +63,13 @@ func scheduleFlowExecutions(db *bun.DB) { currentTime = time.Now() } - createExecution(currentTime, flow, db, context) + createExecution(currentTime, flow, db, context, project) } } } -func createExecution(currentTime time.Time, flow models.Flows, db *bun.DB, context context.Context) (scheduledAt time.Time) { +func createExecution(currentTime time.Time, flow models.Flows, db *bun.DB, context context.Context, project models.Projects) (scheduledAt time.Time) { // calculate the next execution time var nextExecutionTime time.Time switch flow.ScheduleEveryUnit { @@ -92,16 +98,16 @@ func createExecution(currentTime time.Time, flow models.Flows, db *bun.DB, conte } // create execution step which tells that the execution is registerd and waiting for runner to pick it up - step := shared_models.ExecutionSteps{ + step := models.ExecutionSteps{ ExecutionID: execution.ID.String(), - Action: shared_models.Action{ + Action: models.Action{ Name: "Scheduled", Icon: "hugeicons:time-schedule", }, - Messages: []shared_models.Message{ + Messages: []models.Message{ { Title: "Scheduled", - Lines: []shared_models.Line{ + Lines: []models.Line{ { Content: "Execution is registered and is waiting for the scheduled time to start", Timestamp: time.Now(), @@ -119,8 +125,8 @@ func createExecution(currentTime time.Time, flow models.Flows, db *bun.DB, conte } // check for encryption - if flow.EncryptExecutions && step.Messages != nil && len(step.Messages) > 0 { - step.Messages, err = encryption.EncryptExecutionStepActionMessage(step.Messages) + if project.EncryptionEnabled && step.Messages != nil && len(step.Messages) > 0 { + step.Messages, err = encryption.EncryptExecutionStepActionMessageWithProject(step.Messages, project.ID.String(), db) if err != nil { log.Error("Bot: Error encrypting execution step action messages. ", err) return diff --git a/services/backend/functions/encryption/alert_payload.go b/services/backend/functions/encryption/alert_payload.go new file mode 100644 index 00000000..72a19c4a --- /dev/null +++ b/services/backend/functions/encryption/alert_payload.go @@ -0,0 +1,100 @@ +package encryption + +import ( + "crypto/aes" + "crypto/cipher" + "crypto/rand" + "encoding/hex" + "encoding/json" + "errors" + "io" + + "github.com/uptrace/bun" +) + +func EncryptPayload(payload json.RawMessage, projectID string, db *bun.DB) (json.RawMessage, error) { + // Validate that the input payload is valid JSON + var validationInterface interface{} + if err := json.Unmarshal(payload, &validationInterface); err != nil { + return nil, errors.New("input payload is not valid JSON: " + err.Error()) + } + + encryptionKey, err := getEncryptionKey(projectID, db) + if err != nil { + return nil, err + } + + block, err := aes.NewCipher(encryptionKey) + if err != nil { + return nil, err + } + + gcm, err := cipher.NewGCM(block) + if err != nil { + return nil, err + } + + // Generate a nonce for GCM + nonce := make([]byte, gcm.NonceSize()) + if _, err := io.ReadFull(rand.Reader, nonce); err != nil { + return nil, err + } + + // Encrypt the JSON value + ciphertext := gcm.Seal(nonce, nonce, payload, nil) + + // Encode the ciphertext as hex to make it safe for database storage + hexEncoded := hex.EncodeToString(ciphertext) + + return json.RawMessage(`"` + hexEncoded + `"`), nil +} + +func DecryptPayload(payload json.RawMessage, projectID string, db *bun.DB) (json.RawMessage, error) { + encryptionKey, err := getEncryptionKey(projectID, db) + if err != nil { + return nil, err + } + + block, err := aes.NewCipher(encryptionKey) + if err != nil { + return nil, err + } + + gcm, err := cipher.NewGCM(block) + if err != nil { + return nil, err + } + + // Decode the hex string from JSON string format + var hexString string + if err := json.Unmarshal(payload, &hexString); err != nil { + return nil, errors.New("failed to unmarshal JSON string: " + err.Error()) + } + + ciphertext, err := hex.DecodeString(hexString) + if err != nil { + return nil, errors.New("failed to decode hex string: " + err.Error()) + } + + if len(ciphertext) < gcm.NonceSize() { + return nil, errors.New("ciphertext too short") + } + + // Extract the nonce and ciphertext + nonce := ciphertext[:gcm.NonceSize()] + ciphertext = ciphertext[gcm.NonceSize():] + + // Decrypt the ciphertext + plaintext, err := gcm.Open(nil, nonce, ciphertext, nil) + if err != nil { + return nil, errors.New("failed to decrypt: " + err.Error()) + } + + // Validate that the decrypted data is valid JSON + var validationInterface interface{} + if err := json.Unmarshal(plaintext, &validationInterface); err != nil { + return nil, errors.New("decrypted payload is not valid JSON: " + err.Error()) + } + + return json.RawMessage(plaintext), nil +} diff --git a/services/backend/functions/encryption/migration.go b/services/backend/functions/encryption/migration.go new file mode 100644 index 00000000..31e597ba --- /dev/null +++ b/services/backend/functions/encryption/migration.go @@ -0,0 +1,146 @@ +package encryption + +import ( + "context" + + "github.com/uptrace/bun" + "github.com/v1Flows/exFlow/services/backend/pkg/models" + + log "github.com/sirupsen/logrus" +) + +// MigrateProjectEncryption migrates all encrypted data in a project from old key to new key +// This is useful when rotating encryption keys or migrating from global to project-specific encryption +func MigrateProjectsEncryption(oldKey string, db *bun.DB) error { + + // check if already migrated + var settings models.Settings + err := db.NewSelect().Model(&settings).Where("id = ?", 1).Scan(context.Background()) + if err != nil { + return err + } + if settings.NewEncryptionMigrated { + if oldKey != "" { + log.Info("Projects already migrated, skipping migration. You can remove the old key from the config.") + } + return nil + } + + log.Info("Migrating projects to new encryption algorithm...") + + var projects []models.Projects + err = db.NewSelect().Model(&projects).Scan(context.Background()) + if err != nil { + return err + } + + for _, project := range projects { + err = EnableProjectEncryption(project.ID.String(), db) + if err != nil { + return err + } + + // Get all flows for this project + var flows []models.Flows + err := db.NewSelect().Model(&flows).Where("project_id = ?", project.ID).Scan(context.Background()) + if err != nil { + return err + } + + for _, flow := range flows { + // Decrypt with old key and re-encrypt with new key + if len(flow.Actions) > 0 { + // Temporarily decrypt with old key + decryptedActions, err := DecryptParams(flow.Actions, true) + if err != nil { + continue + } + + // Re-encrypt with new encryption + encryptedActions, err := EncryptParamsWithProject(decryptedActions, flow.ProjectID, db) + if err != nil { + return err + } + + // Update the flow with re-encrypted data + _, err = db.NewUpdate().Model(&flow).Set("actions = ?", encryptedActions).Where("id = ?", flow.ID).Exec(context.Background()) + if err != nil { + return err + } + } + + // Handle failure pipeline actions + for i, pipeline := range flow.FailurePipelines { + if len(pipeline.Actions) > 0 { + // Temporarily decrypt with old key + decryptedActions, err := DecryptParams(pipeline.Actions, true) + if err != nil { + continue + } + + // Re-encrypt with new key + encryptedActions, err := EncryptParamsWithProject(decryptedActions, flow.ProjectID, db) + if err != nil { + return err + } + + flow.FailurePipelines[i].Actions = encryptedActions + } + } + + // Update failure pipelines + _, err = db.NewUpdate().Model(&flow).Set("failure_pipelines = ?", flow.FailurePipelines).Where("id = ?", flow.ID).Exec(context.Background()) + if err != nil { + return err + } + + // migrate execution step messages + var executions []models.Executions + err = db.NewSelect().Model(&executions).Where("flow_id = ?", flow.ID).Scan(context.Background()) + if err != nil { + return err + } + + for _, execution := range executions { + var steps []models.ExecutionSteps + err = db.NewSelect().Model(&steps).Where("execution_id = ?", execution.ID).Scan(context.Background()) + if err != nil { + return err + } + + for _, step := range steps { + // Decrypt with old key and re-encrypt with new key + if len(step.Messages) > 0 { + // Temporarily decrypt with old key + decryptedMessages, err := DecryptExecutionStepActionMessage(step.Messages) + if err != nil { + continue + } + + // Re-encrypt with new encryption + encryptedMessages, err := EncryptExecutionStepActionMessageWithProject(decryptedMessages, project.ID.String(), db) + if err != nil { + return err + } + + // Update the step with re-encrypted data + _, err = db.NewUpdate().Model(&step).Set("messages = ?", encryptedMessages).Where("id = ?", step.ID).Exec(context.Background()) + if err != nil { + return err + } + } + } + } + } + } + + // set new_encryption_migrated in settings + _, err = db.NewUpdate().Model(&models.Settings{}).Set("new_encryption_migrated = ?", true).Where("id = ?", 1).Exec(context.Background()) + if err != nil { + return err + } + + log.Info("Projects migrated successfully") + + return nil +} diff --git a/services/backend/functions/encryption/old_encryption.go b/services/backend/functions/encryption/old_encryption.go new file mode 100644 index 00000000..fc2feff8 --- /dev/null +++ b/services/backend/functions/encryption/old_encryption.go @@ -0,0 +1,160 @@ +package encryption + +import ( + "crypto/aes" + "crypto/cipher" + "encoding/base64" + "encoding/hex" + "encoding/json" + "errors" + "fmt" + + "github.com/v1Flows/exFlow/services/backend/config" + "github.com/v1Flows/exFlow/services/backend/pkg/models" +) + +func DecryptParams(actions []models.Action, decryptPasswords bool) ([]models.Action, error) { + block, err := aes.NewCipher([]byte(config.Config.Encryption.Key)) + if err != nil { + return nil, err + } + + gcm, err := cipher.NewGCM(block) + if err != nil { + return nil, err + } + + for i, action := range actions { + for j, param := range action.Params { + // Skip decryption if the value is empty + if param.Value == "" { + continue + } + + if param.Type == "password" && !decryptPasswords { + continue + } + + // Skip decryption if the value is not encrypted + if !IsEncrypted(param.Value) { + continue + } + + // Decode the hex string + ciphertext, err := hex.DecodeString(param.Value) + if err != nil { + return nil, errors.New("failed to decode hex string: " + err.Error()) + } + + if len(ciphertext) < gcm.NonceSize() { + return nil, errors.New("ciphertext too short") + } + + // Extract the nonce and ciphertext + nonce := ciphertext[:gcm.NonceSize()] + ciphertext = ciphertext[gcm.NonceSize():] + + // Decrypt the ciphertext + plaintext, err := gcm.Open(nil, nonce, ciphertext, nil) + if err != nil { + return nil, errors.New("failed to decrypt: " + err.Error()) + } + + // Convert the decrypted JSON value back to the original type + var originalValue interface{} + if err := json.Unmarshal(plaintext, &originalValue); err != nil { + return nil, err + } + + param.Value = fmt.Sprintf("%v", originalValue) + actions[i].Params[j] = param + } + + // if action has an updated action, decrypt the params of the updated action + if action.UpdatedAction != nil { + for j, param := range action.UpdatedAction.Params { + // skip if value is not encrypted + if !IsEncrypted(param.Value) { + continue + } + + // Skip decryption if the value is empty + if param.Value == "" { + continue + } + + if param.Type == "password" && !decryptPasswords { + continue + } + + // Decode the hex string + ciphertext, err := hex.DecodeString(param.Value) + if err != nil { + return nil, errors.New("failed to decode hex string: " + err.Error()) + } + + if len(ciphertext) < gcm.NonceSize() { + return nil, errors.New("ciphertext too short") + } + + // Extract the nonce and ciphertext + nonce := ciphertext[:gcm.NonceSize()] + ciphertext = ciphertext[gcm.NonceSize():] + + // Decrypt the ciphertext + plaintext, err := gcm.Open(nil, nonce, ciphertext, nil) + if err != nil { + return nil, errors.New("failed to decrypt: " + err.Error()) + } + + // Convert the decrypted JSON value back to the original type + var originalValue interface{} + if err := json.Unmarshal(plaintext, &originalValue); err != nil { + return nil, err + } + + param.Value = fmt.Sprintf("%v", originalValue) + action.UpdatedAction.Params[j] = param + } + } + } + + return actions, nil +} + +func DecryptExecutionStepActionMessage(encryptedMessage []models.Message) ([]models.Message, error) { + block, err := aes.NewCipher([]byte(config.Config.Encryption.Key)) + if err != nil { + return nil, err + } + + gcm, err := cipher.NewGCM(block) + if err != nil { + return nil, err + } + + for i := range encryptedMessage { + for line := range encryptedMessage[i].Lines { + encodedCiphertext := encryptedMessage[i].Lines[line].Content + ciphertext, err := base64.StdEncoding.DecodeString(encodedCiphertext) + if err != nil { + return nil, err + } + + nonceSize := gcm.NonceSize() + if len(ciphertext) < nonceSize { + return nil, errors.New("ciphertext too short") + } + + nonce, ciphertext := ciphertext[:nonceSize], ciphertext[nonceSize:] + plaintext, err := gcm.Open(nil, nonce, ciphertext, nil) + if err != nil { + return nil, err + } + + encryptedMessage[i].Lines[line].Content = string(plaintext) + } + } + + return encryptedMessage, nil +} diff --git a/services/backend/functions/encryption/payload.go b/services/backend/functions/encryption/payload.go deleted file mode 100644 index 8b31837b..00000000 --- a/services/backend/functions/encryption/payload.go +++ /dev/null @@ -1,63 +0,0 @@ -package encryption - -import ( - "github.com/v1Flows/exFlow/services/backend/config" - "crypto/aes" - "crypto/cipher" - "crypto/rand" - "encoding/base64" - "encoding/json" - "io" -) - -func EncryptPayload(payload json.RawMessage) (json.RawMessage, error) { - block, err := aes.NewCipher([]byte(config.Config.Encryption.Key)) - if err != nil { - return nil, err - } - - plaintext := []byte(payload) - ciphertext := make([]byte, aes.BlockSize+len(plaintext)) - iv := ciphertext[:aes.BlockSize] - - if _, err := io.ReadFull(rand.Reader, iv); err != nil { - return nil, err - } - - stream := cipher.NewCFBEncrypter(block, iv) - stream.XORKeyStream(ciphertext[aes.BlockSize:], plaintext) - - // Encode the ciphertext as base64 to ensure it can be stored as JSON - encodedCiphertext := base64.StdEncoding.EncodeToString(ciphertext) - encryptedPayload, err := json.Marshal(encodedCiphertext) - if err != nil { - return nil, err - } - - return json.RawMessage(encryptedPayload), nil -} - -func DecryptPayload(payload json.RawMessage) (json.RawMessage, error) { - block, err := aes.NewCipher([]byte(config.Config.Encryption.Key)) - if err != nil { - return nil, err - } - - var encodedCiphertext string - if err := json.Unmarshal(payload, &encodedCiphertext); err != nil { - return nil, err - } - - ciphertext, err := base64.StdEncoding.DecodeString(encodedCiphertext) - if err != nil { - return nil, err - } - - iv := ciphertext[:aes.BlockSize] - ciphertext = ciphertext[aes.BlockSize:] - - stream := cipher.NewCFBDecrypter(block, iv) - stream.XORKeyStream(ciphertext, ciphertext) - - return json.RawMessage(ciphertext), nil -} diff --git a/services/backend/functions/encryption/execution_step_action_message.go b/services/backend/functions/encryption/project_execution_messages.go similarity index 64% rename from services/backend/functions/encryption/execution_step_action_message.go rename to services/backend/functions/encryption/project_execution_messages.go index 776b0f40..40cd6c57 100644 --- a/services/backend/functions/encryption/execution_step_action_message.go +++ b/services/backend/functions/encryption/project_execution_messages.go @@ -8,12 +8,18 @@ import ( "errors" "io" - "github.com/v1Flows/exFlow/services/backend/config" - shared_models "github.com/v1Flows/shared-library/pkg/models" + "github.com/uptrace/bun" + "github.com/v1Flows/exFlow/services/backend/pkg/models" ) -func EncryptExecutionStepActionMessage(messages []shared_models.Message) ([]shared_models.Message, error) { - block, err := aes.NewCipher([]byte(config.Config.Encryption.Key)) +// EncryptExecutionStepActionMessageWithProject encrypts execution step messages using project-specific encryption +func EncryptExecutionStepActionMessageWithProject(messages []models.Message, projectID string, db *bun.DB) ([]models.Message, error) { + encryptionKey, err := getEncryptionKey(projectID, db) + if err != nil { + return nil, err + } + + block, err := aes.NewCipher(encryptionKey) if err != nil { return nil, err } @@ -43,8 +49,14 @@ func EncryptExecutionStepActionMessage(messages []shared_models.Message) ([]shar return messages, nil } -func DecryptExecutionStepActionMessage(encryptedMessage []shared_models.Message) ([]shared_models.Message, error) { - block, err := aes.NewCipher([]byte(config.Config.Encryption.Key)) +// DecryptExecutionStepActionMessageWithProject decrypts execution step messages using project-specific encryption +func DecryptExecutionStepActionMessageWithProject(encryptedMessage []models.Message, projectID string, db *bun.DB) ([]models.Message, error) { + encryptionKey, err := getEncryptionKey(projectID, db) + if err != nil { + return nil, err + } + + block, err := aes.NewCipher(encryptionKey) if err != nil { return nil, err } diff --git a/services/backend/functions/encryption/project_keys.go b/services/backend/functions/encryption/project_keys.go new file mode 100644 index 00000000..e4941009 --- /dev/null +++ b/services/backend/functions/encryption/project_keys.go @@ -0,0 +1,148 @@ +package encryption + +import ( + "context" + "crypto/rand" + "crypto/sha256" + "encoding/hex" + "errors" + + "golang.org/x/crypto/pbkdf2" + + "github.com/uptrace/bun" + "github.com/v1Flows/exFlow/services/backend/config" + "github.com/v1Flows/exFlow/services/backend/pkg/models" +) + +// GenerateProjectSalt generates a new random salt for a project +func GenerateProjectSalt() (string, error) { + salt := make([]byte, 32) // 256-bit salt + _, err := rand.Read(salt) + if err != nil { + return "", err + } + return hex.EncodeToString(salt), nil +} + +// DeriveProjectEncryptionKey derives an encryption key from master secret + project salt +func DeriveProjectEncryptionKey(projectSalt string, masterSecret string) ([]byte, error) { + if masterSecret == "" { + return nil, errors.New("master secret not configured") + } + + saltBytes, err := hex.DecodeString(projectSalt) + if err != nil { + return nil, err + } + + // Use PBKDF2 to derive a 32-byte key from master secret + project salt + // 100,000 iterations should be sufficient for this use case + key := pbkdf2.Key([]byte(masterSecret), saltBytes, 100000, 32, sha256.New) + return key, nil +} + +// GetProjectEncryptionKey retrieves the encryption key for a specific project +func GetProjectEncryptionKey(projectID string, db *bun.DB) ([]byte, error) { + var project models.Projects + err := db.NewSelect().Model(&project).Where("id = ?", projectID).Scan(context.Background()) + if err != nil { + return nil, err + } + + if !project.EncryptionEnabled { + return nil, errors.New("encryption is disabled for this project") + } + + if project.EncryptionKey == "" { + return nil, errors.New("encryption salt not found for project") + } + + // Derive the actual encryption key from master secret + project salt + masterSecret := config.Config.Encryption.MasterSecret + if masterSecret == "" { + // Fall back to legacy key storage if master secret not configured + keyBytes, err := hex.DecodeString(project.EncryptionKey) + if err != nil { + return nil, err + } + return keyBytes, nil + } + + return DeriveProjectEncryptionKey(project.EncryptionKey, masterSecret) +} + +// SetProjectEncryptionSalt sets the encryption salt for a specific project +func SetProjectEncryptionSalt(projectID string, encryptionSalt string, db *bun.DB) error { + _, err := db.NewUpdate(). + Model((*models.Projects)(nil)). + Set("encryption_key = ?, encryption_enabled = ?", encryptionSalt, true). + Where("id = ?", projectID). + Exec(context.Background()) + + return err +} + +// EnableProjectEncryption enables encryption for a project and generates a new salt if one doesn't exist +func EnableProjectEncryption(projectID string, db *bun.DB) error { + var project models.Projects + err := db.NewSelect().Model(&project).Where("id = ?", projectID).Scan(context.Background()) + if err != nil { + return err + } + + // Generate a new salt if one doesn't exist + if project.EncryptionKey == "" { + newSalt, err := GenerateProjectSalt() + if err != nil { + return err + } + + _, err = db.NewUpdate(). + Model((*models.Projects)(nil)). + Set("encryption_key = ?, encryption_enabled = ?", newSalt, true). + Where("id = ?", projectID). + Exec(context.Background()) + + return err + } + + // Just enable encryption if salt already exists + _, err = db.NewUpdate(). + Model((*models.Projects)(nil)). + Set("encryption_enabled = ?", true). + Where("id = ?", projectID). + Exec(context.Background()) + + return err +} + +// DisableProjectEncryption disables encryption for a project (but keeps the salt) +func DisableProjectEncryption(projectID string, db *bun.DB) error { + _, err := db.NewUpdate(). + Model((*models.Projects)(nil)). + Set("encryption_enabled = ?", false). + Where("id = ?", projectID). + Exec(context.Background()) + + return err +} + +// RotateProjectEncryptionKey generates a new encryption salt for a project +func RotateProjectEncryptionKey(projectID string, db *bun.DB) (string, error) { + newSalt, err := GenerateProjectSalt() + if err != nil { + return "", err + } + + _, err = db.NewUpdate(). + Model((*models.Projects)(nil)). + Set("encryption_key = ?", newSalt). + Where("id = ?", projectID). + Exec(context.Background()) + + if err != nil { + return "", err + } + + return newSalt, nil +} diff --git a/services/backend/functions/encryption/params.go b/services/backend/functions/encryption/project_params.go similarity index 67% rename from services/backend/functions/encryption/params.go rename to services/backend/functions/encryption/project_params.go index 82da8364..c1dfb2f1 100644 --- a/services/backend/functions/encryption/params.go +++ b/services/backend/functions/encryption/project_params.go @@ -1,6 +1,7 @@ package encryption import ( + "context" "crypto/aes" "crypto/cipher" "crypto/rand" @@ -10,22 +11,60 @@ import ( "fmt" "io" + "github.com/uptrace/bun" "github.com/v1Flows/exFlow/services/backend/config" - shared_models "github.com/v1Flows/shared-library/pkg/models" + "github.com/v1Flows/exFlow/services/backend/pkg/models" ) -func IsEncrypted(value string) bool { - // Encrypted values should be at least as long as the AES block size - if len(value) < aes.BlockSize*2 { - return false +// getEncryptionKey returns the appropriate encryption key for a project +// Falls back to global config key if project encryption is disabled or key is missing +func getEncryptionKey(projectID string, db *bun.DB) ([]byte, error) { + if projectID == "" { + // Fall back to global config if no project ID provided + return []byte(config.Config.Encryption.Key), nil + } + + var project models.Projects + err := db.NewSelect().Model(&project).Where("id = ?", projectID).Scan(context.Background()) + if err != nil { + // Fall back to global config if project not found + return []byte(config.Config.Encryption.Key), nil + } + + // Use project-specific encryption if enabled and salt exists + if project.EncryptionEnabled && project.EncryptionKey != "" { + // Try to derive key from master secret + salt + masterSecret := config.Config.Encryption.MasterSecret + if masterSecret != "" { + keyBytes, err := DeriveProjectEncryptionKey(project.EncryptionKey, masterSecret) + if err != nil { + // Fall back to global config if key derivation fails + return []byte(config.Config.Encryption.Key), nil + } + return keyBytes, nil + } + + // Legacy: treat stored value as actual key (for backward compatibility) + keyBytes, err := hex.DecodeString(project.EncryptionKey) + if err != nil { + // Fall back to global config if key decode fails + return []byte(config.Config.Encryption.Key), nil + } + return keyBytes, nil } - _, err := hex.DecodeString(value) - return err == nil + // Fall back to global config + return []byte(config.Config.Encryption.Key), nil } -func EncryptParams(actions []shared_models.Action) ([]shared_models.Action, error) { - block, err := aes.NewCipher([]byte(config.Config.Encryption.Key)) +// EncryptParamsWithProject encrypts action params using project-specific encryption +func EncryptParamsWithProject(actions []models.Action, projectID string, db *bun.DB) ([]models.Action, error) { + encryptionKey, err := getEncryptionKey(projectID, db) + if err != nil { + return nil, err + } + + block, err := aes.NewCipher(encryptionKey) if err != nil { return nil, err } @@ -108,8 +147,14 @@ func EncryptParams(actions []shared_models.Action) ([]shared_models.Action, erro return actions, nil } -func DecryptParams(actions []shared_models.Action, decryptPasswords bool) ([]shared_models.Action, error) { - block, err := aes.NewCipher([]byte(config.Config.Encryption.Key)) +// DecryptParamsWithProject decrypts action params using project-specific encryption +func DecryptParamsWithProject(actions []models.Action, projectID string, decryptPasswords bool, db *bun.DB) ([]models.Action, error) { + encryptionKey, err := getEncryptionKey(projectID, db) + if err != nil { + return nil, err + } + + block, err := aes.NewCipher(encryptionKey) if err != nil { return nil, err } @@ -217,8 +262,14 @@ func DecryptParams(actions []shared_models.Action, decryptPasswords bool) ([]sha return actions, nil } -func EncryptParam(param shared_models.Params) (shared_models.Params, error) { - block, err := aes.NewCipher([]byte(config.Config.Encryption.Key)) +// EncryptParamWithProject encrypts a single param using project-specific encryption +func EncryptParamWithProject(param models.Params, projectID string, db *bun.DB) (models.Params, error) { + encryptionKey, err := getEncryptionKey(projectID, db) + if err != nil { + return param, err + } + + block, err := aes.NewCipher(encryptionKey) if err != nil { return param, err } @@ -258,8 +309,14 @@ func EncryptParam(param shared_models.Params) (shared_models.Params, error) { return param, nil } -func DecryptString(value string) (string, error) { - block, err := aes.NewCipher([]byte(config.Config.Encryption.Key)) +// DecryptStringWithProject decrypts a string using project-specific encryption +func DecryptStringWithProject(value string, projectID string, db *bun.DB) (string, error) { + encryptionKey, err := getEncryptionKey(projectID, db) + if err != nil { + return "", err + } + + block, err := aes.NewCipher(encryptionKey) if err != nil { return "", err } @@ -291,3 +348,14 @@ func DecryptString(value string) (string, error) { return string(plaintext), nil } + +func IsEncrypted(value string) bool { + decoded, err := hex.DecodeString(value) + if err != nil { + return false + } + + // GCM nonce size is 12 bytes for standard GCM + nonceSize := 12 + return len(decoded) > nonceSize +} diff --git a/services/backend/functions/flow/startExecution.go b/services/backend/functions/flow/startExecution.go index b7bb42cb..ec7c42db 100644 --- a/services/backend/functions/flow/startExecution.go +++ b/services/backend/functions/flow/startExecution.go @@ -2,6 +2,8 @@ package functions import ( "context" + "database/sql" + "time" "github.com/v1Flows/exFlow/services/backend/pkg/models" @@ -9,9 +11,30 @@ import ( "github.com/uptrace/bun" ) -func PreStartExecution(flowID string, flow models.Flows, db *bun.DB) error { +func PreStartExecution(flowID string, flow models.Flows, db *bun.DB, alert models.Alerts) error { context := context.Background() + // check when the last alert came in which got executed + var lastAlert models.Alerts + count, err := db.NewSelect().Model(&lastAlert).Where("flow_id = ? AND id != ? AND execution_id != ''", flowID, alert.ID).Order("created_at DESC").Limit(1).ScanAndCount(context) + if err != nil && err != sql.ErrNoRows { + return err + } + + if count > 0 { + // compare the difference between the last alert and the current alert to the flow alert threshold + if lastAlert.CreatedAt.Add(time.Duration(flow.AlertThreshold) * time.Minute).After(alert.CreatedAt) { + // set note on alert why it was not executed + alert.Note = "Alert was not executed because it came in too soon after the last alert" + _, err = db.NewUpdate().Model(&alert).Column("note").Where("id = ?", alert.ID).Exec(context) + if err != nil { + return err + } + + return nil + } + } + var execution models.Executions if flow.RunnerID != "" { @@ -21,7 +44,16 @@ func PreStartExecution(flowID string, flow models.Flows, db *bun.DB) error { execution.ID = uuid.New() execution.FlowID = flowID execution.Status = "pending" - _, err := db.NewInsert().Model(&execution).Column("id", "flow_id", "status", "executed_at").Exec(context) + execution.AlertID = alert.ID.String() + + _, err = db.NewInsert().Model(&execution).Column("id", "flow_id", "alert_id", "status", "executed_at").Exec(context) + if err != nil { + return err + } + + // set execution id on alert + alert.ExecutionID = execution.ID.String() + _, err = db.NewUpdate().Model(&alert).Column("execution_id").Where("id = ?", alert.ID).Exec(context) if err != nil { return err } diff --git a/services/backend/functions/httperror/internalServerError.go b/services/backend/functions/httperror/internalServerError.go index 4b59fff7..db259e01 100644 --- a/services/backend/functions/httperror/internalServerError.go +++ b/services/backend/functions/httperror/internalServerError.go @@ -7,6 +7,9 @@ import ( ) func InternalServerError(context *gin.Context, message string, err error) { - context.JSON(http.StatusInternalServerError, gin.H{"message": message, "error": err.Error()}) - context.Abort() + errorMessage := "Unknown error" + if err != nil { + errorMessage = err.Error() + } + context.JSON(http.StatusInternalServerError, gin.H{"message": message, "error": errorMessage}) } diff --git a/services/backend/go.mod b/services/backend/go.mod index 34f550d4..70593f5b 100644 --- a/services/backend/go.mod +++ b/services/backend/go.mod @@ -6,7 +6,7 @@ require ( github.com/uptrace/bun v1.2.15 github.com/uptrace/bun/driver/pgdriver v1.2.15 github.com/uptrace/bun/extra/bunotel v1.2.15 - golang.org/x/crypto v0.41.0 + golang.org/x/crypto v0.42.0 ) require ( @@ -14,7 +14,7 @@ require ( github.com/bytedance/sonic v1.13.3 // indirect github.com/bytedance/sonic/loader v0.2.4 // indirect github.com/cloudwego/base64x v0.1.5 // indirect - github.com/fsnotify/fsnotify v1.8.0 // indirect + github.com/fsnotify/fsnotify v1.9.0 // indirect github.com/gabriel-vasile/mimetype v1.4.9 // indirect github.com/gin-contrib/sse v1.1.0 // indirect github.com/go-logr/logr v1.4.3 // indirect @@ -22,7 +22,7 @@ require ( github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect github.com/go-playground/validator/v10 v10.26.0 // indirect - github.com/go-viper/mapstructure/v2 v2.3.0 // indirect + github.com/go-viper/mapstructure/v2 v2.4.0 // indirect github.com/goccy/go-json v0.10.5 // indirect github.com/jinzhu/inflection v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect @@ -33,11 +33,11 @@ require ( github.com/modern-go/reflect2 v1.0.2 // indirect github.com/pelletier/go-toml/v2 v2.2.4 // indirect github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect - github.com/sagikazarmark/locafero v0.7.0 // indirect - github.com/sourcegraph/conc v0.3.0 // indirect - github.com/spf13/afero v1.12.0 // indirect - github.com/spf13/cast v1.7.1 // indirect - github.com/spf13/pflag v1.0.6 // indirect + github.com/sagikazarmark/locafero v0.11.0 // indirect + github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 // indirect + github.com/spf13/afero v1.15.0 // indirect + github.com/spf13/cast v1.10.0 // indirect + github.com/spf13/pflag v1.0.10 // indirect github.com/subosito/gotenv v1.6.0 // indirect github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect @@ -50,11 +50,10 @@ require ( go.opentelemetry.io/otel v1.37.0 // indirect go.opentelemetry.io/otel/metric v1.37.0 // indirect go.opentelemetry.io/otel/trace v1.37.0 // indirect - go.uber.org/atomic v1.10.0 // indirect - go.uber.org/multierr v1.9.0 // indirect + go.yaml.in/yaml/v3 v3.0.4 // indirect golang.org/x/arch v0.18.0 // indirect - golang.org/x/net v0.42.0 // indirect - golang.org/x/text v0.28.0 // indirect + golang.org/x/net v0.43.0 // indirect + golang.org/x/text v0.29.0 // indirect google.golang.org/protobuf v1.36.6 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect mellium.im/sasl v0.3.2 // indirect @@ -68,11 +67,11 @@ require ( github.com/golang-jwt/jwt/v5 v5.3.0 github.com/google/uuid v1.6.0 github.com/lib/pq v1.10.9 - github.com/mattn/go-sqlite3 v1.14.31 + github.com/mattn/go-sqlite3 v1.14.32 github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 github.com/sirupsen/logrus v1.9.3 - github.com/spf13/viper v1.20.1 + github.com/spf13/viper v1.21.0 github.com/uptrace/bun/dialect/pgdialect v1.2.15 - github.com/v1Flows/shared-library v1.0.25 - golang.org/x/sys v0.35.0 // indirect + github.com/v1Flows/shared-library v1.0.27 + golang.org/x/sys v0.36.0 // indirect ) diff --git a/services/backend/go.sum b/services/backend/go.sum index 91587d8f..c87e3d22 100644 --- a/services/backend/go.sum +++ b/services/backend/go.sum @@ -17,8 +17,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= -github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M= -github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= +github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k= +github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY= github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok= github.com/gin-contrib/cors v1.7.6 h1:3gQ8GMzs1Ylpf70y8bMw4fVpycXIeX1ZemuSQIsnQQY= @@ -40,8 +40,8 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= github.com/go-playground/validator/v10 v10.26.0 h1:SP05Nqhjcvz81uJaRfEV0YBSSSGMc/iMaVtFbr3Sw2k= github.com/go-playground/validator/v10 v10.26.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo= -github.com/go-viper/mapstructure/v2 v2.3.0 h1:27XbWsHIqhbdR5TIC911OfYvgSaW93HM+dX7970Q7jk= -github.com/go-viper/mapstructure/v2 v2.3.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= +github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs= +github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= @@ -69,8 +69,8 @@ github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mattn/go-sqlite3 v1.14.31 h1:ldt6ghyPJsokUIlksH63gWZkG6qVGeEAu4zLeS4aVZM= -github.com/mattn/go-sqlite3 v1.14.31/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= +github.com/mattn/go-sqlite3 v1.14.32 h1:JD12Ag3oLy1zQA+BNn74xRgaBbdhbNIDYvQUEuuErjs= +github.com/mattn/go-sqlite3 v1.14.32/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -86,20 +86,20 @@ github.com/puzpuzpuz/xsync/v3 v3.5.1 h1:GJYJZwO6IdxN/IKbneznS6yPkVC+c3zyY/j19c++ github.com/puzpuzpuz/xsync/v3 v3.5.1/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA= github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= -github.com/sagikazarmark/locafero v0.7.0 h1:5MqpDsTGNDhY8sGp0Aowyf0qKsPrhewaLSsFaodPcyo= -github.com/sagikazarmark/locafero v0.7.0/go.mod h1:2za3Cg5rMaTMoG/2Ulr9AwtFaIppKXTRYnozin4aB5k= +github.com/sagikazarmark/locafero v0.11.0 h1:1iurJgmM9G3PA/I+wWYIOw/5SyBtxapeHDcg+AAIFXc= +github.com/sagikazarmark/locafero v0.11.0/go.mod h1:nVIGvgyzw595SUSUE6tvCp3YYTeHs15MvlmU87WwIik= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= -github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= -github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= -github.com/spf13/afero v1.12.0 h1:UcOPyRBYczmFn6yvphxkn9ZEOY65cpwGKb5mL36mrqs= -github.com/spf13/afero v1.12.0/go.mod h1:ZTlWwG4/ahT8W7T0WQ5uYmjI9duaLQGy3Q2OAl4sk/4= -github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y= -github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= -github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o= -github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.20.1 h1:ZMi+z/lvLyPSCoNtFCpqjy0S4kPbirhpTMwl8BkW9X4= -github.com/spf13/viper v1.20.1/go.mod h1:P9Mdzt1zoHIG8m2eZQinpiBjo6kCmZSKBClNNqjJvu4= +github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 h1:+jumHNA0Wrelhe64i8F6HNlS8pkoyMv5sreGx2Ry5Rw= +github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8/go.mod h1:3n1Cwaq1E1/1lhQhtRK2ts/ZwZEhjcQeJQ1RuC6Q/8U= +github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I= +github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg= +github.com/spf13/cast v1.10.0 h1:h2x0u2shc1QuLHfxi+cTJvs30+ZAHOGRic8uyGTDWxY= +github.com/spf13/cast v1.10.0/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo= +github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk= +github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.21.0 h1:x5S+0EU27Lbphp4UKm1C+1oQO+rKx36vfCoaVebLFSU= +github.com/spf13/viper v1.21.0/go.mod h1:P0lhsswPGWD/1lZJ9ny3fYnVqxiegrlNrEmgLjbTCAY= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= @@ -111,8 +111,8 @@ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= -github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc h1:9lRDQMhESg+zvGYmW5DyG0UqvY96Bu5QYsTLvCHdrgo= @@ -131,8 +131,8 @@ github.com/uptrace/bun/extra/bunotel v1.2.15 h1:6KAvKRpH9BC/7n3eMXVgDYLqghHf2H3F github.com/uptrace/bun/extra/bunotel v1.2.15/go.mod h1:qnASdcJVuoEE+13N3Gd8XHi5gwCydt2S1TccJnefH2k= github.com/uptrace/opentelemetry-go-extra/otelsql v0.3.2 h1:ZjUj9BLYf9PEqBn8W/OapxhPjVRdC6CsXTdULHsyk5c= github.com/uptrace/opentelemetry-go-extra/otelsql v0.3.2/go.mod h1:O8bHQfyinKwTXKkiKNGmLQS7vRsqRxIQTFZpYpHK3IQ= -github.com/v1Flows/shared-library v1.0.25 h1:Rez0FNvDXdYByx3JAT8/+BXqld2vmvqUz0rPoBxt5UE= -github.com/v1Flows/shared-library v1.0.25/go.mod h1:UVP6m6Nri6JC3L0xS3wkbqGvfQJ5fsYIJx81Gfj1TFw= +github.com/v1Flows/shared-library v1.0.27 h1:BQMZ0hgBMhOHMelygi4Rl7XxriAEKveFarWer9SlN0Q= +github.com/v1Flows/shared-library v1.0.27/go.mod h1:UVP6m6Nri6JC3L0xS3wkbqGvfQJ5fsYIJx81Gfj1TFw= github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IUPn0Bjt8= github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= @@ -147,22 +147,20 @@ go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/Wgbsd go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E= go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4= go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0= -go.uber.org/atomic v1.10.0 h1:9qC72Qh0+3MqyJbAn8YU5xVq1frD8bn3JtD2oXtafVQ= -go.uber.org/atomic v1.10.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= -go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI= -go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ= +go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc= +go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= golang.org/x/arch v0.18.0 h1:WN9poc33zL4AzGxqf8VtpKUnGvMi8O9lhNyBMF/85qc= golang.org/x/arch v0.18.0/go.mod h1:bdwinDaKcfZUGpH09BB7ZmOfhalA8lQdzl62l8gGWsk= -golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4= -golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc= -golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs= -golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8= +golang.org/x/crypto v0.42.0 h1:chiH31gIWm57EkTXpwnqf8qeuMUi0yekh6mT2AvFlqI= +golang.org/x/crypto v0.42.0/go.mod h1:4+rDnOTJhQCx2q7/j6rAN5XDw8kPjeaXEUR2eL94ix8= +golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE= +golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI= -golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= -golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng= -golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= +golang.org/x/sys v0.36.0 h1:KVRy2GtZBrk1cBYA7MKu5bEZFxQk4NIDV6RLVcC8o0k= +golang.org/x/sys v0.36.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/text v0.29.0 h1:1neNs90w9YzJ9BocxfsQNHKuAT4pkghyXc4nhZ6sJvk= +golang.org/x/text v0.29.0/go.mod h1:7MhJOA9CD2qZyOKYazxdYMF85OwPdEr9jTtBpO7ydH4= google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/services/backend/handlers/alerts/create.go b/services/backend/handlers/alerts/create.go new file mode 100644 index 00000000..f1f988dc --- /dev/null +++ b/services/backend/handlers/alerts/create.go @@ -0,0 +1,104 @@ +package alerts + +import ( + "errors" + "net/http" + "time" + + "github.com/v1Flows/exFlow/services/backend/functions/encryption" + functions "github.com/v1Flows/exFlow/services/backend/functions/flow" + "github.com/v1Flows/exFlow/services/backend/functions/gatekeeper" + "github.com/v1Flows/exFlow/services/backend/functions/httperror" + "github.com/v1Flows/exFlow/services/backend/pkg/models" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + _ "github.com/lib/pq" + "github.com/uptrace/bun" + + log "github.com/sirupsen/logrus" +) + +func CreateAlert(context *gin.Context, db *bun.DB) { + var alert models.Alerts + if err := context.ShouldBindJSON(&alert); err != nil { + httperror.StatusBadRequest(context, "Error parsing incoming data", err) + return + } + + var flow models.Flows + flowCount, err := db.NewSelect().Model(&flow).Where("id = ?", alert.FlowID).ScanAndCount(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting flow data from db", err) + return + } + if flowCount == 0 { + httperror.StatusNotFound(context, "Error no flow found", err) + return + } + + // get project data + var project models.Projects + err = db.NewSelect().Model(&project).Where("id = ?", flow.ProjectID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting project data from db", err) + return + } + + access, err := gatekeeper.CheckUserProjectAccess(flow.ProjectID, context, db) + if err != nil { + httperror.InternalServerError(context, "Error checking your user permissions on project", err) + return + } + if !access { + httperror.Unauthorized(context, "You are not allowed to view this alert", errors.New("unauthorized")) + return + } + + alert.ID = uuid.New() + alert.CreatedAt = time.Now() + + // encrypt payload if enabled + if project.EncryptionEnabled { + alert.Payload, err = encryption.EncryptPayload(alert.Payload, project.ID.String(), db) + if err != nil { + httperror.InternalServerError(context, "Error encrypting payload", err) + return + } + alert.Encrypted = true + } + + res, err := db.NewInsert().Model(&alert).ExcludeColumn("execution_id").Exec(context) + if err != nil { + httperror.InternalServerError(context, "Error creating alert on db", err) + return + } + + // if the alert has a parent_id we need to update the parent alert updated_at time + if alert.ParentID != "" { + parentAlert := models.Alerts{} + parentAlert.UpdatedAt = time.Now() + if alert.Status == "resolved" { + parentAlert.Status = "resolved" + _, err := db.NewUpdate().Model(&parentAlert).Where("id = ?", alert.ParentID).Column("updated_at", "status").Exec(context) + if err != nil { + httperror.InternalServerError(context, "Error updating parent alert on db", err) + return + } + } else { + _, err := db.NewUpdate().Model(&parentAlert).Where("id = ?", alert.ParentID).Column("updated_at").Exec(context) + if err != nil { + httperror.InternalServerError(context, "Error updating parent alert on db", err) + return + } + } + } + + err = functions.PreStartExecution(alert.FlowID, flow, db, alert) + if err != nil { + log.Error("Failed to start execution: " + err.Error()) + httperror.InternalServerError(context, "Failed to start execution", err) + } + + context.JSON(http.StatusCreated, gin.H{"result": "success", "response": res}) +} diff --git a/services/backend/handlers/alerts/delete.go b/services/backend/handlers/alerts/delete.go new file mode 100644 index 00000000..480fa131 --- /dev/null +++ b/services/backend/handlers/alerts/delete.go @@ -0,0 +1,60 @@ +package alerts + +import ( + "errors" + "net/http" + + "github.com/v1Flows/exFlow/services/backend/functions/gatekeeper" + "github.com/v1Flows/exFlow/services/backend/functions/httperror" + "github.com/v1Flows/exFlow/services/backend/pkg/models" + + "github.com/gin-gonic/gin" + _ "github.com/lib/pq" + "github.com/uptrace/bun" +) + +func Delete(context *gin.Context, db *bun.DB) { + alertID := context.Param("alertID") + + // get flow_id from alert + var alert models.Alerts + err := db.NewSelect().Model(&alert).Where("id = ?", alertID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting alert data from db", err) + return + } + + // get project_id from flow_id + var flow models.Flows + err = db.NewSelect().Model(&flow).Where("id = ?", alert.FlowID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting flow data from db", err) + return + } + + // check the requestors role in project + canModify, err := gatekeeper.CheckRequestUserProjectModifyRole(flow.ProjectID, context, db) + if err != nil { + httperror.InternalServerError(context, "Error checking your user permissions on project", err) + return + } + if !canModify { + httperror.Unauthorized(context, "You are not allowed to delete this alert", errors.New("unauthorized")) + return + } + + // delete all alerts which got this alert id as parent_id + _, err = db.NewDelete().Model((*models.Alerts)(nil)).Where("parent_id = ?", alertID).Exec(context) + if err != nil { + httperror.InternalServerError(context, "Error deleting alert on db", err) + return + } + + _, err = db.NewDelete().Model((*models.Alerts)(nil)).Where("id = ?", alertID).Exec(context) + if err != nil { + httperror.InternalServerError(context, "Error deleting alert on db", err) + return + } + + context.JSON(http.StatusOK, gin.H{"result": "success"}) +} diff --git a/services/backend/handlers/alerts/get_alert.go b/services/backend/handlers/alerts/get_alert.go new file mode 100644 index 00000000..6a045398 --- /dev/null +++ b/services/backend/handlers/alerts/get_alert.go @@ -0,0 +1,54 @@ +package alerts + +import ( + "errors" + "net/http" + + "github.com/v1Flows/exFlow/services/backend/functions/encryption" + "github.com/v1Flows/exFlow/services/backend/functions/gatekeeper" + "github.com/v1Flows/exFlow/services/backend/functions/httperror" + "github.com/v1Flows/exFlow/services/backend/pkg/models" + + "github.com/gin-gonic/gin" + _ "github.com/lib/pq" + "github.com/uptrace/bun" +) + +func GetSingle(context *gin.Context, db *bun.DB) { + alertID := context.Param("alertID") + + var alert models.Alerts + err := db.NewSelect().Model(&alert).Where("id = ?", alertID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting alert data from db", err) + return + } + + // get project_id from flow_id + var flow models.Flows + err = db.NewSelect().Model(&flow).Where("id = ?", alert.FlowID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting flow data from db", err) + return + } + + access, err := gatekeeper.CheckUserProjectAccess(flow.ProjectID, context, db) + if err != nil { + httperror.InternalServerError(context, "Error checking your user permissions on project", err) + return + } + if !access { + httperror.Unauthorized(context, "You are not allowed to view this alert", errors.New("unauthorized")) + return + } + + if alert.Encrypted { + alert.Payload, err = encryption.DecryptPayload(alert.Payload, flow.ProjectID, db) + if err != nil { + httperror.InternalServerError(context, "Error decrypting alert", err) + return + } + } + + context.JSON(http.StatusOK, gin.H{"alert": alert}) +} diff --git a/services/backend/handlers/alerts/get_alerts.go b/services/backend/handlers/alerts/get_alerts.go new file mode 100644 index 00000000..9a08e697 --- /dev/null +++ b/services/backend/handlers/alerts/get_alerts.go @@ -0,0 +1,115 @@ +package alerts + +import ( + "fmt" + "net/http" + "strings" + + "github.com/v1Flows/exFlow/services/backend/functions/auth" + "github.com/v1Flows/exFlow/services/backend/functions/encryption" + "github.com/v1Flows/exFlow/services/backend/functions/httperror" + "github.com/v1Flows/exFlow/services/backend/pkg/models" + + "github.com/gin-gonic/gin" + _ "github.com/lib/pq" + "github.com/uptrace/bun" +) + +func GetMultiple(context *gin.Context, db *bun.DB) { + userID, err := auth.GetUserIDFromToken(context.GetHeader("Authorization")) + if err != nil { + httperror.InternalServerError(context, "Error receiving userID from token", err) + return + } + + // Parse pagination params + limit := 20 + offset := 0 + if l := context.Query("limit"); l != "" { + fmt.Sscanf(l, "%d", &limit) + } + if o := context.Query("offset"); o != "" { + fmt.Sscanf(o, "%d", &offset) + } + + // Parse status filter (comma-separated) + statusParam := context.Query("status") + var statusList []string + if statusParam != "" { + statusList = strings.Split(statusParam, ",") + } + + // get all flows where the user is a member + flows := make([]models.Flows, 0) + err = db.NewSelect().Model(&flows).Column("id", "project_id").Where("project_id::uuid IN (SELECT project_id::uuid FROM project_members WHERE user_id = ? AND invite_pending = false)", userID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting flows from db", err) + return + } + + // put flow ids in an array + flowsArray := make([]string, 0) + for _, flow := range flows { + flowsArray = append(flowsArray, flow.ID.String()) + } + + alerts := make([]models.Alerts, 0) + query := db.NewSelect().Model(&alerts). + Where("flow_id IN (?)", bun.In(flowsArray)) + + if len(statusList) > 0 { + query = query.Where("status IN (?)", bun.In(statusList)) + } + + err = query.Order("created_at DESC"). + Limit(limit). + Offset(offset). + Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting alerts from db", err) + return + } + + for i := range alerts { + if alerts[i].Encrypted { + // get flow to find project_id and use the already fetched flows to avoid another db call + var flowProjectID string + for _, flow := range flows { + if flow.ID.String() == alerts[i].FlowID { + flowProjectID = flow.ProjectID + break + } + } + if flowProjectID == "" { + continue + } + + // Decrypt using the project ID found + alerts[i].Payload, err = encryption.DecryptPayload(alerts[i].Payload, flowProjectID, db) + if err != nil { + httperror.InternalServerError(context, "Error decrypting alert", err) + return + } + } + } + + // Count total alerts for pagination (with status filter) + countQuery := db.NewSelect(). + Model((*models.Alerts)(nil)). + Where("flow_id IN (?)", bun.In(flowsArray)) + if len(statusList) > 0 { + countQuery = countQuery.Where("status IN (?)", bun.In(statusList)) + } + totalAlerts, err := countQuery.Count(context) + if err != nil { + httperror.InternalServerError(context, "Error counting alerts", err) + return + } + + context.JSON(http.StatusOK, gin.H{ + "alerts": alerts, + "limit": limit, + "offset": offset, + "total": totalAlerts, + }) +} diff --git a/services/backend/handlers/alerts/get_grouped_alerts.go b/services/backend/handlers/alerts/get_grouped_alerts.go new file mode 100644 index 00000000..77631ca8 --- /dev/null +++ b/services/backend/handlers/alerts/get_grouped_alerts.go @@ -0,0 +1,66 @@ +package alerts + +import ( + "errors" + "net/http" + + "github.com/v1Flows/exFlow/services/backend/functions/encryption" + "github.com/v1Flows/exFlow/services/backend/functions/gatekeeper" + "github.com/v1Flows/exFlow/services/backend/functions/httperror" + "github.com/v1Flows/exFlow/services/backend/pkg/models" + + "github.com/gin-gonic/gin" + _ "github.com/lib/pq" + "github.com/uptrace/bun" +) + +func GetGrouped(context *gin.Context, db *bun.DB) { + var incomingRequest models.IncomingGroupedAlertsRequest + if err := context.ShouldBindJSON(&incomingRequest); err != nil { + httperror.StatusBadRequest(context, "Error parsing incoming data", err) + return + } + + flow := models.Flows{} + err := db.NewSelect().Model(&flow).Where("id = ?", incomingRequest.FlowID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting flows from db", err) + return + } + + access, err := gatekeeper.CheckUserProjectAccess(flow.ProjectID, context, db) + if err != nil { + httperror.InternalServerError(context, "Error checking your user permissions on project", err) + return + } + if !access { + httperror.Unauthorized(context, "You are not allowed to view this alert", errors.New("unauthorized")) + return + } + + alerts := make([]models.Alerts, 0) + + // check if grouped alerts are enabled + if flow.GroupAlerts { + err = db.NewSelect().Model(&alerts).Where("flow_id = ? AND group_key = ? AND status != 'resolved' AND parent_id = ''", incomingRequest.FlowID, incomingRequest.GroupAlertsIdentifier).Order("created_at ASC").Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting alerts from db", err) + return + } + } else { + httperror.StatusNotFound(context, "Grouped alerts are not enabled", nil) + return + } + + for i := range alerts { + if alerts[i].Encrypted { + alerts[i].Payload, err = encryption.DecryptPayload(alerts[i].Payload, flow.ProjectID, db) + if err != nil { + httperror.InternalServerError(context, "Error decrypting alert", err) + return + } + } + } + + context.JSON(http.StatusOK, gin.H{"alerts": alerts}) +} diff --git a/services/backend/handlers/alerts/update.go b/services/backend/handlers/alerts/update.go new file mode 100644 index 00000000..dc159043 --- /dev/null +++ b/services/backend/handlers/alerts/update.go @@ -0,0 +1,101 @@ +package alerts + +import ( + "errors" + "net/http" + "time" + + "github.com/v1Flows/exFlow/services/backend/functions/encryption" + "github.com/v1Flows/exFlow/services/backend/functions/gatekeeper" + "github.com/v1Flows/exFlow/services/backend/functions/httperror" + "github.com/v1Flows/exFlow/services/backend/pkg/models" + + "github.com/gin-gonic/gin" + _ "github.com/lib/pq" + "github.com/uptrace/bun" +) + +func Update(context *gin.Context, db *bun.DB) { + alertID := context.Param("alertID") + + var alert models.Alerts + if err := context.ShouldBindJSON(&alert); err != nil { + httperror.StatusBadRequest(context, "Error parsing incoming data", err) + return + } + + var alertDB models.Alerts + err := db.NewSelect().Model(&alertDB).Where("id = ?", alertID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting alert data from db", err) + return + } + + // get project_id from flow_id + var flow models.Flows + err = db.NewSelect().Model(&flow).Where("id = ?", alertDB.FlowID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting flow data from db", err) + return + } + + // get project data + var project models.Projects + err = db.NewSelect().Model(&project).Where("id = ?", flow.ProjectID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting project data from db", err) + return + } + + access, err := gatekeeper.CheckUserProjectAccess(flow.ProjectID, context, db) + if err != nil { + httperror.InternalServerError(context, "Error checking your user permissions on project", err) + return + } + if !access { + httperror.Unauthorized(context, "You are not allowed to view this alert", errors.New("unauthorized")) + return + } + + alert.UpdatedAt = time.Now() + columns := []string{} + if alert.Name != "" { + columns = append(columns, "name") + } + if alert.FlowID != "" { + columns = append(columns, "flow_id") + } + if len(alert.Payload) != 0 { + columns = append(columns, "payload") + + if project.EncryptionEnabled { + alert.Payload, err = encryption.EncryptPayload(alert.Payload, project.ID.String(), db) + if err != nil { + httperror.InternalServerError(context, "Error encrypting payload", err) + return + } + alert.Encrypted = true + columns = append(columns, "encrypted") + } + } + if alert.Status != alertDB.Status { + columns = append(columns, "status") + } + if alert.ResolvedAt != alertDB.ResolvedAt { + columns = append(columns, "resolved_at") + } + if alert.ParentID != alertDB.ParentID { + columns = append(columns, "parent_id") + } + if alert.UpdatedAt != alertDB.UpdatedAt { + columns = append(columns, "updated_at") + } + + _, err = db.NewUpdate().Model(&alert).Column(columns...).Where("id = ?", alertID).Exec(context) + if err != nil { + httperror.InternalServerError(context, "Error updating alert on db", err) + return + } + + context.JSON(http.StatusOK, gin.H{"alert": alert}) +} diff --git a/services/backend/handlers/executions/create_step.go b/services/backend/handlers/executions/create_step.go index b374556e..7304d781 100644 --- a/services/backend/handlers/executions/create_step.go +++ b/services/backend/handlers/executions/create_step.go @@ -35,10 +35,17 @@ func CreateStep(context *gin.Context, db *bun.DB) { httperror.InternalServerError(context, "Error fetching flow data", err) return } + // get project data + var project models.Projects + err = db.NewSelect().Model(&project).Where("id = ?", flow.ProjectID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting project data from db", err) + return + } // check for encryption - if flow.EncryptExecutions && step.Messages != nil && len(step.Messages) > 0 { - step.Messages, err = encryption.EncryptExecutionStepActionMessage(step.Messages) + if project.EncryptionEnabled && step.Messages != nil && len(step.Messages) > 0 { + step.Messages, err = encryption.EncryptExecutionStepActionMessageWithProject(step.Messages, project.ID.String(), db) if err != nil { httperror.InternalServerError(context, "Error encrypting execution step action messages", err) return diff --git a/services/backend/handlers/executions/get_step.go b/services/backend/handlers/executions/get_step.go index d876bb20..3ceef6b9 100644 --- a/services/backend/handlers/executions/get_step.go +++ b/services/backend/handlers/executions/get_step.go @@ -1,10 +1,11 @@ package executions import ( + "net/http" + "github.com/v1Flows/exFlow/services/backend/functions/encryption" "github.com/v1Flows/exFlow/services/backend/functions/httperror" "github.com/v1Flows/exFlow/services/backend/pkg/models" - "net/http" "github.com/gin-gonic/gin" "github.com/uptrace/bun" @@ -21,8 +22,24 @@ func GetStep(context *gin.Context, db *bun.DB) { return } + // get execution data + var execution models.Executions + err = db.NewSelect().Model(&execution).Where("id = ?", executionID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error fetching execution data", err) + return + } + + // get flow data + var flow models.Flows + err = db.NewSelect().Model(&flow).Where("id = ?", execution.FlowID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error fetching flow data", err) + return + } + if step.Encrypted { - step.Messages, err = encryption.DecryptExecutionStepActionMessage(step.Messages) + step.Messages, err = encryption.DecryptExecutionStepActionMessageWithProject(step.Messages, flow.ProjectID, db) if err != nil { httperror.InternalServerError(context, "Error decrypting execution step action messages", err) return diff --git a/services/backend/handlers/executions/get_steps.go b/services/backend/handlers/executions/get_steps.go index 894f050f..06909ccc 100644 --- a/services/backend/handlers/executions/get_steps.go +++ b/services/backend/handlers/executions/get_steps.go @@ -21,9 +21,25 @@ func GetSteps(context *gin.Context, db *bun.DB) { return } + // get execution data + var execution models.Executions + err = db.NewSelect().Model(&execution).Where("id = ?", executionID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error fetching execution data", err) + return + } + + // get flow data + var flow models.Flows + err = db.NewSelect().Model(&flow).Where("id = ?", execution.FlowID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error fetching flow data", err) + return + } + for i := range steps { if steps[i].Encrypted { - steps[i].Messages, err = encryption.DecryptExecutionStepActionMessage(steps[i].Messages) + steps[i].Messages, err = encryption.DecryptExecutionStepActionMessageWithProject(steps[i].Messages, flow.ProjectID, db) if err != nil { httperror.InternalServerError(context, "Error decrypting execution step action messages", err) return diff --git a/services/backend/handlers/executions/schedule.go b/services/backend/handlers/executions/schedule.go index c4308999..8778ca77 100644 --- a/services/backend/handlers/executions/schedule.go +++ b/services/backend/handlers/executions/schedule.go @@ -8,7 +8,6 @@ import ( "github.com/v1Flows/exFlow/services/backend/functions/encryption" "github.com/v1Flows/exFlow/services/backend/functions/httperror" "github.com/v1Flows/exFlow/services/backend/pkg/models" - shared_models "github.com/v1Flows/shared-library/pkg/models" "github.com/gin-gonic/gin" "github.com/uptrace/bun" @@ -43,16 +42,16 @@ func ScheduleExecution(context *gin.Context, db *bun.DB) { } // create execution step which tells that the execution is registerd and waiting for runner to pick it up - step := shared_models.ExecutionSteps{ + step := models.ExecutionSteps{ ExecutionID: execution.ID.String(), - Action: shared_models.Action{ + Action: models.Action{ Name: "Scheduled", Icon: "hugeicons:time-schedule", }, - Messages: []shared_models.Message{ + Messages: []models.Message{ { Title: "Scheduled", - Lines: []shared_models.Line{ + Lines: []models.Line{ { Content: "Execution is registered and is waiting for the scheduled time to start", Timestamp: time.Now(), @@ -77,9 +76,17 @@ func ScheduleExecution(context *gin.Context, db *bun.DB) { return } + // get project data + var project models.Projects + err = db.NewSelect().Model(&project).Where("id = ?", flow.ProjectID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting project data from db", err) + return + } + // check for encryption - if flow.EncryptExecutions && step.Messages != nil && len(step.Messages) > 0 { - step.Messages, err = encryption.EncryptExecutionStepActionMessage(step.Messages) + if project.EncryptionEnabled && step.Messages != nil && len(step.Messages) > 0 { + step.Messages, err = encryption.EncryptExecutionStepActionMessageWithProject(step.Messages, project.ID.String(), db) if err != nil { httperror.InternalServerError(context, "Error encrypting execution step action messages", err) return diff --git a/services/backend/handlers/executions/update_step.go b/services/backend/handlers/executions/update_step.go index 8cf0e867..f841594e 100644 --- a/services/backend/handlers/executions/update_step.go +++ b/services/backend/handlers/executions/update_step.go @@ -7,7 +7,6 @@ import ( "github.com/v1Flows/exFlow/services/backend/functions/encryption" "github.com/v1Flows/exFlow/services/backend/functions/httperror" "github.com/v1Flows/exFlow/services/backend/pkg/models" - shared_models "github.com/v1Flows/shared-library/pkg/models" "github.com/gin-gonic/gin" "github.com/uptrace/bun" @@ -16,7 +15,7 @@ import ( func UpdateStep(context *gin.Context, db *bun.DB) { stepID := context.Param("stepID") - var step shared_models.ExecutionSteps + var step models.ExecutionSteps if err := context.ShouldBindJSON(&step); err != nil { httperror.StatusBadRequest(context, "Error parsing incoming data", err) log.Error("Error parsing incoming data", err) @@ -24,7 +23,7 @@ func UpdateStep(context *gin.Context, db *bun.DB) { } // get current action messages - var dbStep shared_models.ExecutionSteps + var dbStep models.ExecutionSteps err := db.NewSelect().Model(&dbStep).Where("id = ?", stepID).Scan(context) if err != nil { httperror.InternalServerError(context, "Error collecting current step messages from db", err) @@ -48,10 +47,17 @@ func UpdateStep(context *gin.Context, db *bun.DB) { log.Error("Error fetching flow data", err) return } + // get project data + var project models.Projects + err = db.NewSelect().Model(&project).Where("id = ?", flow.ProjectID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting project data from db", err) + return + } // check for ecryption and decrypt if needed - if flow.EncryptExecutions && dbStep.Messages != nil && len(dbStep.Messages) > 0 { - dbStep.Messages, err = encryption.DecryptExecutionStepActionMessage(dbStep.Messages) + if project.EncryptionEnabled && dbStep.Messages != nil && len(dbStep.Messages) > 0 { + dbStep.Messages, err = encryption.DecryptExecutionStepActionMessageWithProject(dbStep.Messages, project.ID.String(), db) if err != nil { httperror.InternalServerError(context, "Error decrypting execution step action messages", err) log.Error("Error decrypting execution step action messages", err) @@ -71,8 +77,8 @@ func UpdateStep(context *gin.Context, db *bun.DB) { } // check for ecryption and encrypt if needed - if flow.EncryptExecutions && step.Messages != nil && len(step.Messages) > 0 { - step.Messages, err = encryption.EncryptExecutionStepActionMessage(step.Messages) + if project.EncryptionEnabled && step.Messages != nil && len(step.Messages) > 0 { + step.Messages, err = encryption.EncryptExecutionStepActionMessageWithProject(step.Messages, project.ID.String(), db) if err != nil { httperror.InternalServerError(context, "Error encrypting execution step action messages", err) log.Error("Error encrypting execution step action messages", err) diff --git a/services/backend/handlers/flows/add_actions.go b/services/backend/handlers/flows/add_actions.go index a79b266d..10b7f8ec 100644 --- a/services/backend/handlers/flows/add_actions.go +++ b/services/backend/handlers/flows/add_actions.go @@ -5,7 +5,6 @@ import ( "fmt" "net/http" - "github.com/v1Flows/exFlow/services/backend/config" "github.com/v1Flows/exFlow/services/backend/functions/encryption" "github.com/v1Flows/exFlow/services/backend/functions/gatekeeper" "github.com/v1Flows/exFlow/services/backend/functions/httperror" @@ -34,6 +33,14 @@ func AddFlowActions(context *gin.Context, db *bun.DB) { return } + // get project data + var project models.Projects + err = db.NewSelect().Model(&project).Where("id = ?", flowDB.ProjectID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting project data from db", err) + return + } + // check if user has access to project access, err := gatekeeper.CheckUserProjectAccess(flowDB.ProjectID, context, db) if err != nil { @@ -57,8 +64,8 @@ func AddFlowActions(context *gin.Context, db *bun.DB) { } // encrypt action params - if config.Config.Encryption.Enabled && flowDB.EncryptActionParams { - flow.Actions, err = encryption.EncryptParams(flow.Actions) + if project.EncryptionEnabled { + flow.Actions, err = encryption.EncryptParamsWithProject(flow.Actions, flowDB.ProjectID, db) if err != nil { httperror.InternalServerError(context, "Error encrypting action params", err) fmt.Println(err) diff --git a/services/backend/handlers/flows/add_failure_pipeline_actions.go b/services/backend/handlers/flows/add_failure_pipeline_actions.go index c366d4b4..448ef7f6 100644 --- a/services/backend/handlers/flows/add_failure_pipeline_actions.go +++ b/services/backend/handlers/flows/add_failure_pipeline_actions.go @@ -6,13 +6,11 @@ import ( "net/http" log "github.com/sirupsen/logrus" - "github.com/v1Flows/exFlow/services/backend/config" "github.com/v1Flows/exFlow/services/backend/functions/encryption" "github.com/v1Flows/exFlow/services/backend/functions/gatekeeper" "github.com/v1Flows/exFlow/services/backend/functions/httperror" functions_project "github.com/v1Flows/exFlow/services/backend/functions/project" "github.com/v1Flows/exFlow/services/backend/pkg/models" - shared_models "github.com/v1Flows/shared-library/pkg/models" "github.com/gin-gonic/gin" _ "github.com/lib/pq" @@ -23,7 +21,7 @@ func AddFlowFailurePipelineActions(context *gin.Context, db *bun.DB) { flowID := context.Param("flowID") failurePipelineID := context.Param("failurePipelineID") - var failurePipeline shared_models.FailurePipeline + var failurePipeline models.FailurePipeline if err := context.ShouldBindJSON(&failurePipeline); err != nil { httperror.StatusBadRequest(context, "Error parsing incoming data", err) return @@ -36,6 +34,14 @@ func AddFlowFailurePipelineActions(context *gin.Context, db *bun.DB) { return } + // get project data + var project models.Projects + err = db.NewSelect().Model(&project).Where("id = ?", flowDB.ProjectID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting project data from db", err) + return + } + // check if user has access to project access, err := gatekeeper.CheckUserProjectAccess(flowDB.ProjectID, context, db) if err != nil { @@ -59,8 +65,8 @@ func AddFlowFailurePipelineActions(context *gin.Context, db *bun.DB) { } // encrypt action params - if config.Config.Encryption.Enabled && flowDB.EncryptActionParams { - failurePipeline.Actions, err = encryption.EncryptParams(failurePipeline.Actions) + if project.EncryptionEnabled { + failurePipeline.Actions, err = encryption.EncryptParamsWithProject(failurePipeline.Actions, flowDB.ProjectID, db) if err != nil { httperror.InternalServerError(context, "Error encrypting failure pipeline action params", err) fmt.Println(err) diff --git a/services/backend/handlers/flows/delete_alerts.go b/services/backend/handlers/flows/delete_alerts.go new file mode 100644 index 00000000..6cf4a6f9 --- /dev/null +++ b/services/backend/handlers/flows/delete_alerts.go @@ -0,0 +1,64 @@ +package flows + +import ( + "errors" + "net/http" + + "github.com/v1Flows/exFlow/services/backend/functions/gatekeeper" + "github.com/v1Flows/exFlow/services/backend/functions/httperror" + "github.com/v1Flows/exFlow/services/backend/pkg/models" + + "github.com/gin-gonic/gin" + _ "github.com/lib/pq" + "github.com/uptrace/bun" +) + +func DeleteAlert(context *gin.Context, db *bun.DB) { + alertID := context.Param("alertID") + + // get alert data from db + var alert models.Alerts + err := db.NewSelect().Model(&alert).Where("id = ?", alertID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting alert data from db", err) + return + } + + // get flow data + var flow models.Flows + err = db.NewSelect().Model(&flow).Where("id = ?", alert.FlowID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting flow data from db", err) + return + } + + // check if user has access to project + access, err := gatekeeper.CheckUserProjectAccess(flow.ProjectID, context, db) + if err != nil { + httperror.InternalServerError(context, "Error checking for flow access", err) + return + } + if !access { + httperror.Unauthorized(context, "You do not have access to this flow", errors.New("you do not have access to this flow")) + return + } + + // check the requestors role in project + canModify, err := gatekeeper.CheckRequestUserProjectModifyRole(flow.ProjectID, context, db) + if err != nil { + httperror.InternalServerError(context, "Error checking your user permissions on flow", err) + return + } + if !canModify { + httperror.Unauthorized(context, "You are not allowed to make modifications on this flow", errors.New("unauthorized")) + return + } + + _, err = db.NewDelete().Model((*models.Alerts)(nil)).Where("id = ?", alertID).Exec(context) + if err != nil { + httperror.InternalServerError(context, "Error deleting alert from db", err) + return + } + + context.JSON(http.StatusOK, gin.H{"result": "success"}) +} diff --git a/services/backend/handlers/flows/get_alerts.go b/services/backend/handlers/flows/get_alerts.go new file mode 100644 index 00000000..2093cbc2 --- /dev/null +++ b/services/backend/handlers/flows/get_alerts.go @@ -0,0 +1,104 @@ +package flows + +import ( + "errors" + "fmt" + "net/http" + "strings" + + "github.com/v1Flows/exFlow/services/backend/functions/encryption" + "github.com/v1Flows/exFlow/services/backend/functions/gatekeeper" + "github.com/v1Flows/exFlow/services/backend/functions/httperror" + "github.com/v1Flows/exFlow/services/backend/pkg/models" + + "github.com/gin-gonic/gin" + _ "github.com/lib/pq" + "github.com/uptrace/bun" +) + +func GetFlowAlerts(context *gin.Context, db *bun.DB) { + flowID := context.Param("flowID") + + // Parse pagination params + limit := 20 + offset := 0 + if l := context.Query("limit"); l != "" { + fmt.Sscanf(l, "%d", &limit) + } + if o := context.Query("offset"); o != "" { + fmt.Sscanf(o, "%d", &offset) + } + + // Parse status filter (comma-separated) + statusParam := context.Query("status") + var statusList []string + if statusParam != "" { + statusList = strings.Split(statusParam, ",") + } + + // get flow + var flow models.Flows + err := db.NewSelect().Model(&flow).Where("id = ?", flowID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting flow data from db", err) + return + } + + // check if user has access to project + access, err := gatekeeper.CheckUserProjectAccess(flow.ProjectID, context, db) + if err != nil { + httperror.InternalServerError(context, "Error checking for flow access", err) + return + } + if !access { + httperror.Unauthorized(context, "You do not have access to this flow", errors.New("you do not have access to this flow")) + return + } + + alerts := make([]models.Alerts, 0) + query := db.NewSelect().Model(&alerts). + Where("flow_id = ?", flowID) + + if len(statusList) > 0 { + query = query.Where("status IN (?)", bun.In(statusList)) + } + + err = query.Order("created_at DESC"). + Limit(limit). + Offset(offset). + Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting alerts from db", err) + return + } + + // Count total alerts for pagination (with status filter) + countQuery := db.NewSelect(). + Model((*models.Alerts)(nil)). + Where("flow_id = ?", flowID) + if len(statusList) > 0 { + countQuery = countQuery.Where("status IN (?)", bun.In(statusList)) + } + totalAlerts, err := countQuery.Count(context) + if err != nil { + httperror.InternalServerError(context, "Error counting alerts", err) + return + } + + for i := range alerts { + if alerts[i].Encrypted { + alerts[i].Payload, err = encryption.DecryptPayload(alerts[i].Payload, flow.ProjectID, db) + if err != nil { + httperror.InternalServerError(context, "Error decrypting payload", err) + return + } + } + } + + context.JSON(http.StatusOK, gin.H{ + "alerts": alerts, + "limit": limit, + "offset": offset, + "total": totalAlerts, + }) +} diff --git a/services/backend/handlers/flows/get_flow.go b/services/backend/handlers/flows/get_flow.go index dccc141d..0b3627f8 100644 --- a/services/backend/handlers/flows/get_flow.go +++ b/services/backend/handlers/flows/get_flow.go @@ -4,7 +4,6 @@ import ( "errors" "net/http" - "github.com/v1Flows/exFlow/services/backend/config" "github.com/v1Flows/exFlow/services/backend/functions/auth" "github.com/v1Flows/exFlow/services/backend/functions/encryption" "github.com/v1Flows/exFlow/services/backend/functions/gatekeeper" @@ -26,6 +25,14 @@ func GetFlow(context *gin.Context, db *bun.DB) { return } + // get project data + var project models.Projects + err = db.NewSelect().Model(&project).Where("id = ?", flow.ProjectID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting project data from db", err) + return + } + // check if user has access to project access, err := gatekeeper.CheckUserProjectAccess(flow.ProjectID, context, db) if err != nil { @@ -52,8 +59,8 @@ func GetFlow(context *gin.Context, db *bun.DB) { decryptPasswords = true } - if config.Config.Encryption.Enabled && flow.EncryptActionParams && len(flow.Actions) > 0 { - flow.Actions, err = encryption.DecryptParams(flow.Actions, decryptPasswords) + if project.EncryptionEnabled && len(flow.Actions) > 0 { + flow.Actions, err = encryption.DecryptParamsWithProject(flow.Actions, flow.ProjectID, decryptPasswords, db) if err != nil { httperror.InternalServerError(context, "Error decrypting action params", err) return @@ -62,7 +69,7 @@ func GetFlow(context *gin.Context, db *bun.DB) { // decrypt failure pipeline actions for i, pipeline := range flow.FailurePipelines { if pipeline.Actions != nil { - flow.FailurePipelines[i].Actions, err = encryption.DecryptParams(pipeline.Actions, decryptPasswords) + flow.FailurePipelines[i].Actions, err = encryption.DecryptParamsWithProject(pipeline.Actions, flow.ProjectID, decryptPasswords, db) if err != nil { httperror.InternalServerError(context, "Error decrypting action params", err) return diff --git a/services/backend/handlers/flows/get_flows.go b/services/backend/handlers/flows/get_flows.go index 1829d0b9..8da8d422 100644 --- a/services/backend/handlers/flows/get_flows.go +++ b/services/backend/handlers/flows/get_flows.go @@ -3,7 +3,6 @@ package flows import ( "net/http" - "github.com/v1Flows/exFlow/services/backend/config" "github.com/v1Flows/exFlow/services/backend/functions/auth" "github.com/v1Flows/exFlow/services/backend/functions/encryption" "github.com/v1Flows/exFlow/services/backend/functions/httperror" @@ -43,29 +42,36 @@ func GetFlows(context *gin.Context, db *bun.DB) { decryptPasswords = true } - if config.Config.Encryption.Enabled && len(flows) > 0 { - for i, flow := range flows { - if flow.EncryptActionParams && len(flow.Actions) > 0 { - flow.Actions, err = encryption.DecryptParams(flow.Actions, decryptPasswords) - if err != nil { - httperror.InternalServerError(context, "Error decrypting action params", err) - return - } + for i, flow := range flows { - flows[i].Actions = flow.Actions + // get project data + var project models.Projects + err = db.NewSelect().Model(&project).Where("id = ?", flow.ProjectID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting project data from db", err) + return + } - // decrypt failure pipeline actions - for i, pipeline := range flow.FailurePipelines { - if pipeline.Actions != nil { - flow.FailurePipelines[i].Actions, err = encryption.DecryptParams(pipeline.Actions, decryptPasswords) - if err != nil { - httperror.InternalServerError(context, "Error decrypting action params", err) - return - } - } + if project.EncryptionEnabled && len(flow.Actions) > 0 { + flow.Actions, err = encryption.DecryptParamsWithProject(flow.Actions, flow.ProjectID, decryptPasswords, db) + if err != nil { + httperror.InternalServerError(context, "Error decrypting action params", err) + return + } - flows[i].FailurePipelines = flow.FailurePipelines + flows[i].Actions = flow.Actions + + // decrypt failure pipeline actions + for i, pipeline := range flow.FailurePipelines { + if pipeline.Actions != nil { + flow.FailurePipelines[i].Actions, err = encryption.DecryptParamsWithProject(pipeline.Actions, flow.ProjectID, decryptPasswords, db) + if err != nil { + httperror.InternalServerError(context, "Error decrypting action params", err) + return + } } + + flows[i].FailurePipelines = flow.FailurePipelines } } } diff --git a/services/backend/handlers/flows/start_execution.go b/services/backend/handlers/flows/start_execution.go index dea1c8d1..2c1db034 100644 --- a/services/backend/handlers/flows/start_execution.go +++ b/services/backend/handlers/flows/start_execution.go @@ -10,7 +10,6 @@ import ( "github.com/v1Flows/exFlow/services/backend/functions/encryption" "github.com/v1Flows/exFlow/services/backend/functions/httperror" "github.com/v1Flows/exFlow/services/backend/pkg/models" - shared_models "github.com/v1Flows/shared-library/pkg/models" "github.com/gin-gonic/gin" "github.com/uptrace/bun" @@ -27,6 +26,14 @@ func StartExecution(context *gin.Context, db *bun.DB) { return } + // get project data + var project models.Projects + err = db.NewSelect().Model(&project).Where("id = ?", flow.ProjectID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting project data from db", err) + return + } + // check auth token type tokenType, err := auth.GetTypeFromToken(context.GetHeader("Authorization")) if err != nil { @@ -51,16 +58,16 @@ func StartExecution(context *gin.Context, db *bun.DB) { } // create execution step which tells that the execution is registerd and waiting for runner to pick it up - step := shared_models.ExecutionSteps{ + step := models.ExecutionSteps{ ExecutionID: execution.ID.String(), - Action: shared_models.Action{ + Action: models.Action{ Name: "Pick Up", Icon: "hugeicons:rocket", }, - Messages: []shared_models.Message{ + Messages: []models.Message{ { Title: "Pick Up", - Lines: []shared_models.Line{ + Lines: []models.Line{ { Content: "Execution is registered and waiting for runner to pick it up", Timestamp: time.Now(), @@ -74,8 +81,8 @@ func StartExecution(context *gin.Context, db *bun.DB) { } // check for encryption - if flow.EncryptExecutions && step.Messages != nil && len(step.Messages) > 0 { - step.Messages, err = encryption.EncryptExecutionStepActionMessage(step.Messages) + if project.EncryptionEnabled && step.Messages != nil && len(step.Messages) > 0 { + step.Messages, err = encryption.EncryptExecutionStepActionMessageWithProject(step.Messages, project.ID.String(), db) if err != nil { httperror.InternalServerError(context, "Error encrypting execution step action messages", err) return diff --git a/services/backend/handlers/flows/update.go b/services/backend/handlers/flows/update.go index 092d3c08..4a10d314 100644 --- a/services/backend/handlers/flows/update.go +++ b/services/backend/handlers/flows/update.go @@ -3,6 +3,7 @@ package flows import ( "errors" "net/http" + "reflect" "time" "github.com/v1Flows/exFlow/services/backend/functions/gatekeeper" @@ -56,6 +57,9 @@ func UpdateFlow(context *gin.Context, db *bun.DB) { flow.UpdatedAt = time.Now() columns := []string{} + if flow.Type != "" { + columns = append(columns, "type") + } if flow.Name != "" { columns = append(columns, "name") } @@ -71,18 +75,27 @@ func UpdateFlow(context *gin.Context, db *bun.DB) { if flow.RunnerID != flowDB.RunnerID { columns = append(columns, "runner_id") } - if flow.EncryptActionParams != flowDB.EncryptActionParams { - columns = append(columns, "encrypt_action_params") - } - if flow.EncryptExecutions != flowDB.EncryptExecutions { - columns = append(columns, "encrypt_executions") - } if flow.ScheduleEveryValue != flowDB.ScheduleEveryValue { columns = append(columns, "schedule_every_value") } if flow.ScheduleEveryUnit != flowDB.ScheduleEveryUnit { columns = append(columns, "schedule_every_unit") } + if flow.GroupAlerts != flowDB.GroupAlerts { + columns = append(columns, "group_alerts") + } + if flow.GroupAlertsIdentifier != flowDB.GroupAlertsIdentifier { + columns = append(columns, "group_alerts_identifier") + } + if flow.AlertThreshold != flowDB.AlertThreshold { + columns = append(columns, "alert_threshold") + } + if flow.ScheduleEveryUnit != flowDB.ScheduleEveryUnit { + columns = append(columns, "schedule_every_unit") + } + if !reflect.DeepEqual(flow.Patterns, flowDB.Patterns) { + columns = append(columns, "patterns") + } columns = append(columns, "exec_parallel") columns = append(columns, "failure_pipeline_id") columns = append(columns, "updated_at") diff --git a/services/backend/handlers/flows/update_actions.go b/services/backend/handlers/flows/update_actions.go index 2b8fc67c..719d9ea1 100644 --- a/services/backend/handlers/flows/update_actions.go +++ b/services/backend/handlers/flows/update_actions.go @@ -4,7 +4,6 @@ import ( "errors" "net/http" - "github.com/v1Flows/exFlow/services/backend/config" "github.com/v1Flows/exFlow/services/backend/functions/encryption" "github.com/v1Flows/exFlow/services/backend/functions/gatekeeper" "github.com/v1Flows/exFlow/services/backend/functions/httperror" @@ -34,6 +33,14 @@ func UpdateFlowActions(context *gin.Context, db *bun.DB) { return } + // get project data + var project models.Projects + err = db.NewSelect().Model(&project).Where("id = ?", flowDB.ProjectID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting project data from db", err) + return + } + // check if user has access to project access, err := gatekeeper.CheckUserProjectAccess(flowDB.ProjectID, context, db) if err != nil { @@ -57,8 +64,8 @@ func UpdateFlowActions(context *gin.Context, db *bun.DB) { } // encrypt action params - if config.Config.Encryption.Enabled && flowDB.EncryptActionParams { - flow.Actions, err = encryption.EncryptParams(flow.Actions) + if project.EncryptionEnabled { + flow.Actions, err = encryption.EncryptParamsWithProject(flow.Actions, project.ID.String(), db) if err != nil { httperror.InternalServerError(context, "Error encrypting action params", err) return diff --git a/services/backend/handlers/flows/update_actions_details.go b/services/backend/handlers/flows/update_actions_details.go index 27106b04..3e77fe70 100644 --- a/services/backend/handlers/flows/update_actions_details.go +++ b/services/backend/handlers/flows/update_actions_details.go @@ -4,7 +4,6 @@ import ( "errors" "net/http" - "github.com/v1Flows/exFlow/services/backend/config" "github.com/v1Flows/exFlow/services/backend/functions/encryption" "github.com/v1Flows/exFlow/services/backend/functions/gatekeeper" "github.com/v1Flows/exFlow/services/backend/functions/httperror" @@ -34,6 +33,14 @@ func UpdateFlowActionsDetails(context *gin.Context, db *bun.DB) { return } + // get project data + var project models.Projects + err = db.NewSelect().Model(&project).Where("id = ?", flowDB.ProjectID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting project data from db", err) + return + } + // check if user has access to project access, err := gatekeeper.CheckUserProjectAccess(flowDB.ProjectID, context, db) if err != nil { @@ -56,23 +63,17 @@ func UpdateFlowActionsDetails(context *gin.Context, db *bun.DB) { return } - if (!flowDB.EncryptActionParams && flow.EncryptActionParams) && config.Config.Encryption.Enabled { - flow.Actions, err = encryption.EncryptParams(flowDB.Actions) + if project.EncryptionEnabled { + flow.Actions, err = encryption.EncryptParamsWithProject(flowDB.Actions, flowDB.ProjectID, db) if err != nil { httperror.InternalServerError(context, "Error encrypting action params", err) return } - } else if flowDB.EncryptActionParams && !flow.EncryptActionParams && config.Config.Encryption.Enabled { - flow.Actions, err = encryption.DecryptParams(flowDB.Actions, true) - if err != nil { - httperror.InternalServerError(context, "Error decrypting action params", err) - return - } } else { flow.Actions = flowDB.Actions } - _, err = db.NewUpdate().Model(&flow).Column("encrypt_action_params", "exec_parallel", "patterns", "actions").Where("id = ?", flowID).Exec(context) + _, err = db.NewUpdate().Model(&flow).Column("exec_parallel", "patterns", "actions").Where("id = ?", flowID).Exec(context) if err != nil { httperror.InternalServerError(context, "Error updating actions details on db", err) return diff --git a/services/backend/handlers/flows/update_failure_pipeline.go b/services/backend/handlers/flows/update_failure_pipeline.go index bdff99b4..6a2516ef 100644 --- a/services/backend/handlers/flows/update_failure_pipeline.go +++ b/services/backend/handlers/flows/update_failure_pipeline.go @@ -5,7 +5,6 @@ import ( "net/http" "time" - "github.com/v1Flows/exFlow/services/backend/config" "github.com/v1Flows/exFlow/services/backend/functions/encryption" "github.com/v1Flows/exFlow/services/backend/functions/gatekeeper" "github.com/v1Flows/exFlow/services/backend/functions/httperror" @@ -35,6 +34,14 @@ func UpdateFlowFailurePipelines(context *gin.Context, db *bun.DB) { return } + // get project data + var project models.Projects + err = db.NewSelect().Model(&project).Where("id = ?", flowDB.ProjectID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting project data from db", err) + return + } + // check if user has access to project access, err := gatekeeper.CheckUserProjectAccess(flow.ProjectID, context, db) if err != nil { @@ -60,10 +67,10 @@ func UpdateFlowFailurePipelines(context *gin.Context, db *bun.DB) { flow.UpdatedAt = time.Now() // encrypt the actions for each failure pipeline - if config.Config.Encryption.Enabled && flowDB.EncryptActionParams { + if project.EncryptionEnabled { for i := range flow.FailurePipelines { if flow.FailurePipelines[i].Actions != nil { - flow.FailurePipelines[i].Actions, err = encryption.EncryptParams(flow.FailurePipelines[i].Actions) + flow.FailurePipelines[i].Actions, err = encryption.EncryptParamsWithProject(flow.FailurePipelines[i].Actions, project.ID.String(), db) if err != nil { httperror.InternalServerError(context, "Error encrypting actions", err) return diff --git a/services/backend/handlers/flows/update_failure_pipeline_actions.go b/services/backend/handlers/flows/update_failure_pipeline_actions.go index a3a801f5..cbb0b38a 100644 --- a/services/backend/handlers/flows/update_failure_pipeline_actions.go +++ b/services/backend/handlers/flows/update_failure_pipeline_actions.go @@ -4,13 +4,11 @@ import ( "errors" "net/http" - "github.com/v1Flows/exFlow/services/backend/config" "github.com/v1Flows/exFlow/services/backend/functions/encryption" "github.com/v1Flows/exFlow/services/backend/functions/gatekeeper" "github.com/v1Flows/exFlow/services/backend/functions/httperror" functions_project "github.com/v1Flows/exFlow/services/backend/functions/project" "github.com/v1Flows/exFlow/services/backend/pkg/models" - shared_models "github.com/v1Flows/shared-library/pkg/models" "github.com/gin-gonic/gin" _ "github.com/lib/pq" @@ -22,7 +20,7 @@ func UpdateFlowFailurePipelineActions(context *gin.Context, db *bun.DB) { flowID := context.Param("flowID") failurePipelineID := context.Param("failurePipelineID") - var failurePipeline shared_models.FailurePipeline + var failurePipeline models.FailurePipeline if err := context.ShouldBindJSON(&failurePipeline); err != nil { httperror.StatusBadRequest(context, "Error parsing incoming data", err) return @@ -36,6 +34,14 @@ func UpdateFlowFailurePipelineActions(context *gin.Context, db *bun.DB) { return } + // get project data + var project models.Projects + err = db.NewSelect().Model(&project).Where("id = ?", flowDB.ProjectID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error collecting project data from db", err) + return + } + // check if user has access to project access, err := gatekeeper.CheckUserProjectAccess(flowDB.ProjectID, context, db) if err != nil { @@ -59,8 +65,8 @@ func UpdateFlowFailurePipelineActions(context *gin.Context, db *bun.DB) { } // encrypt action params - if config.Config.Encryption.Enabled && flowDB.EncryptActionParams { - failurePipeline.Actions, err = encryption.EncryptParams(failurePipeline.Actions) + if project.EncryptionEnabled { + failurePipeline.Actions, err = encryption.EncryptParamsWithProject(failurePipeline.Actions, project.ID.String(), db) if err != nil { httperror.InternalServerError(context, "Error encrypting action params", err) return diff --git a/services/backend/handlers/projects/create.go b/services/backend/handlers/projects/create.go index 1f6d3eea..5dbe27c3 100644 --- a/services/backend/handlers/projects/create.go +++ b/services/backend/handlers/projects/create.go @@ -8,6 +8,7 @@ import ( "time" "github.com/v1Flows/exFlow/services/backend/functions/auth" + "github.com/v1Flows/exFlow/services/backend/functions/encryption" "github.com/v1Flows/exFlow/services/backend/functions/httperror" functions_runner "github.com/v1Flows/exFlow/services/backend/functions/runner" "github.com/v1Flows/exFlow/services/backend/pkg/models" @@ -55,7 +56,16 @@ func CreateProject(context *gin.Context, db *bun.DB) { return } - _, err = db.NewInsert().Model(&project).Column("id", "name", "description", "shared_runners", "icon", "color", "runner_auto_join_token").Exec(context) + // Generate encryption salt for the new project + encryptionSalt, err := encryption.GenerateProjectSalt() + if err != nil { + httperror.InternalServerError(context, "Error generating encryption salt", err) + return + } + project.EncryptionKey = encryptionSalt + project.EncryptionEnabled = true + + _, err = db.NewInsert().Model(&project).Column("id", "name", "description", "shared_runners", "icon", "color", "runner_auto_join_token", "encryption_key", "encryption_enabled").Exec(context) if err != nil { log.Error(err) httperror.InternalServerError(context, "Error creating project on db", err) diff --git a/services/backend/handlers/projects/encryption.go b/services/backend/handlers/projects/encryption.go new file mode 100644 index 00000000..f8ee5a43 --- /dev/null +++ b/services/backend/handlers/projects/encryption.go @@ -0,0 +1,152 @@ +package projects + +import ( + "errors" + "net/http" + + "github.com/v1Flows/exFlow/services/backend/functions/encryption" + "github.com/v1Flows/exFlow/services/backend/functions/gatekeeper" + "github.com/v1Flows/exFlow/services/backend/functions/httperror" + "github.com/v1Flows/exFlow/services/backend/pkg/models" + + "github.com/gin-gonic/gin" + "github.com/uptrace/bun" +) + +// GetProjectEncryptionStatus returns the encryption status for a project +func GetProjectEncryptionStatus(context *gin.Context, db *bun.DB) { + projectID := context.Param("projectID") + + // check if user has access to project + access, err := gatekeeper.CheckUserProjectAccess(projectID, context, db) + if err != nil { + httperror.InternalServerError(context, "Error checking for project access", err) + return + } + if !access { + httperror.Unauthorized(context, "You do not have access to this project", errors.New("you do not have access to this project")) + return + } + + var project models.Projects + err = db.NewSelect().Model(&project).Where("id = ?", projectID).Scan(context) + if err != nil { + httperror.InternalServerError(context, "Error receiving project data from db", err) + return + } + + response := gin.H{ + "encryption_enabled": project.EncryptionEnabled, + "has_encryption_salt": project.EncryptionKey != "", + } + + context.JSON(http.StatusOK, response) +} + +// EnableProjectEncryption enables encryption for a project +func EnableProjectEncryption(context *gin.Context, db *bun.DB) { + projectID := context.Param("projectID") + + // check if user has access to project + access, err := gatekeeper.CheckUserProjectAccess(projectID, context, db) + if err != nil { + httperror.InternalServerError(context, "Error checking for project access", err) + return + } + if !access { + httperror.Unauthorized(context, "You do not have access to this project", errors.New("you do not have access to this project")) + return + } + + // check the requestors role in project (only owners and editors can manage encryption) + canModify, err := gatekeeper.CheckRequestUserProjectModifyRole(projectID, context, db) + if err != nil { + httperror.InternalServerError(context, "Error checking your user permissions on project", err) + return + } + if !canModify { + httperror.Unauthorized(context, "You are not allowed to make modifications on this project", errors.New("unauthorized")) + return + } + + err = encryption.EnableProjectEncryption(projectID, db) + if err != nil { + httperror.InternalServerError(context, "Error enabling project encryption", err) + return + } + + context.JSON(http.StatusOK, gin.H{"result": "success", "message": "Project encryption enabled"}) +} + +// DisableProjectEncryption disables encryption for a project +func DisableProjectEncryption(context *gin.Context, db *bun.DB) { + projectID := context.Param("projectID") + + // check if user has access to project + access, err := gatekeeper.CheckUserProjectAccess(projectID, context, db) + if err != nil { + httperror.InternalServerError(context, "Error checking for project access", err) + return + } + if !access { + httperror.Unauthorized(context, "You do not have access to this project", errors.New("you do not have access to this project")) + return + } + + // check the requestors role in project (only owners and editors can manage encryption) + canModify, err := gatekeeper.CheckRequestUserProjectModifyRole(projectID, context, db) + if err != nil { + httperror.InternalServerError(context, "Error checking your user permissions on project", err) + return + } + if !canModify { + httperror.Unauthorized(context, "You are not allowed to make modifications on this project", errors.New("unauthorized")) + return + } + + err = encryption.DisableProjectEncryption(projectID, db) + if err != nil { + httperror.InternalServerError(context, "Error disabling project encryption", err) + return + } + + context.JSON(http.StatusOK, gin.H{"result": "success", "message": "Project encryption disabled"}) +} + +// RotateProjectEncryptionKey generates a new encryption key for a project +func RotateProjectEncryptionKey(context *gin.Context, db *bun.DB) { + projectID := context.Param("projectID") + + // check if user has access to project + access, err := gatekeeper.CheckUserProjectAccess(projectID, context, db) + if err != nil { + httperror.InternalServerError(context, "Error checking for project access", err) + return + } + if !access { + httperror.Unauthorized(context, "You do not have access to this project", errors.New("you do not have access to this project")) + return + } + + // check the requestors role in project (only owners can rotate encryption keys) + canModify, err := gatekeeper.CheckRequestUserProjectModifyRole(projectID, context, db) + if err != nil { + httperror.InternalServerError(context, "Error checking your user permissions on project", err) + return + } + if !canModify { + httperror.Unauthorized(context, "You are not allowed to make modifications on this project", errors.New("unauthorized")) + return + } + + _, err = encryption.RotateProjectEncryptionKey(projectID, db) + if err != nil { + httperror.InternalServerError(context, "Error rotating project encryption key", err) + return + } + + context.JSON(http.StatusOK, gin.H{ + "result": "success", + "message": "Project encryption salt rotated successfully. Existing encrypted data will continue to work with the old salt until re-encrypted.", + }) +} diff --git a/services/backend/handlers/runners/register.go b/services/backend/handlers/runners/register.go index 0b40e9de..e8896680 100644 --- a/services/backend/handlers/runners/register.go +++ b/services/backend/handlers/runners/register.go @@ -11,7 +11,6 @@ import ( "github.com/v1Flows/exFlow/services/backend/functions/httperror" functions_runner "github.com/v1Flows/exFlow/services/backend/functions/runner" "github.com/v1Flows/exFlow/services/backend/pkg/models" - shared_models "github.com/v1Flows/shared-library/pkg/models" "github.com/gin-gonic/gin" "github.com/google/uuid" @@ -43,7 +42,7 @@ func RegisterRunner(context *gin.Context, db *bun.DB) { } var runner models.Runners - var autoRunner shared_models.IncomingAutoRunners + var autoRunner models.IncomingAutoRunners if runnerType == "project_auto_runner" { if err := context.ShouldBindJSON(&autoRunner); err != nil { diff --git a/services/backend/handlers/setup/main.go b/services/backend/handlers/setup/main.go new file mode 100644 index 00000000..084749e6 --- /dev/null +++ b/services/backend/handlers/setup/main.go @@ -0,0 +1,470 @@ +package setup + +import ( + "crypto/rand" + "database/sql" + "encoding/base64" + "fmt" + "net/http" + "net/url" + "os" + "os/exec" + "strings" + "syscall" + "time" + + "github.com/gin-gonic/gin" + _ "github.com/lib/pq" // PostgreSQL driver + log "github.com/sirupsen/logrus" + "gopkg.in/yaml.v3" +) + +type SetupRequest struct { + BackendURL string `json:"backend_url" binding:"required"` + BackendPort int `json:"backend_port" binding:"required"` + Database DatabaseSetup `json:"database" binding:"required"` + FrontendURL string `json:"frontend_url" binding:"required"` +} + +type DatabaseSetup struct { + Server string `json:"server" binding:"required"` + Port int `json:"port" binding:"required"` + Name string `json:"name" binding:"required"` + User string `json:"user" binding:"required"` + Password string `json:"password" binding:"required"` +} + +type BackendConfig struct { + LogLevel string `yaml:"log_level"` + Port int `yaml:"port"` + Database DatabaseConfig `yaml:"database"` + JWT JWTConfig `yaml:"jwt"` + Encryption EncryptionConfig `yaml:"encryption"` + Runner RunnerConfig `yaml:"runner"` +} + +type DatabaseConfig struct { + Server string `yaml:"server"` + Port int `yaml:"port"` + Name string `yaml:"name"` + User string `yaml:"user"` + Password string `yaml:"password"` +} + +type JWTConfig struct { + Secret string `yaml:"secret"` +} + +type EncryptionConfig struct { + MasterSecret string `yaml:"master_secret"` + Key string `yaml:"key"` +} + +type RunnerConfig struct { + SharedRunnerSecret string `yaml:"shared_runner_secret"` +} + +// generateRandomString generates a cryptographically secure random string of specified length +func generateRandomString(length int) (string, error) { + bytes := make([]byte, length) + _, err := rand.Read(bytes) + if err != nil { + return "", err + } + return base64.URLEncoding.EncodeToString(bytes)[:length], nil +} + +// DatabaseValidationResult contains detailed database validation information +type DatabaseValidationResult struct { + Connected bool `json:"connected"` + TablesExist bool `json:"tables_exist"` + TableCount int `json:"table_count"` + ExistingTables []string `json:"existing_tables,omitempty"` + IsEmpty bool `json:"is_empty"` + Error string `json:"error,omitempty"` + Warning string `json:"warning,omitempty"` +} + +// validateDatabaseConnection tests the database connection and checks table status +func validateDatabaseConnection(server string, port int, name, user, password string) (*DatabaseValidationResult, error) { + result := &DatabaseValidationResult{ + Connected: false, + TablesExist: false, + TableCount: 0, + ExistingTables: []string{}, + IsEmpty: true, + } + + // Construct PostgreSQL connection string + connStr := fmt.Sprintf("host=%s port=%d user=%s password=%s dbname=%s sslmode=disable", + server, port, user, password, name) + + // Open connection + db, err := sql.Open("postgres", connStr) + if err != nil { + result.Error = fmt.Sprintf("failed to create database connection: %v", err) + return result, fmt.Errorf("failed to create database connection: %v", err) + } + defer db.Close() + + // Set connection timeout + db.SetConnMaxLifetime(5 * time.Second) + + // Test the connection + err = db.Ping() + if err != nil { + result.Error = fmt.Sprintf("failed to connect to database: %v", err) + return result, fmt.Errorf("failed to connect to database: %v", err) + } + + result.Connected = true + + // Check for existing tables + query := ` + SELECT table_name + FROM information_schema.tables + WHERE table_schema = 'public' + AND table_type = 'BASE TABLE' + ORDER BY table_name + ` + + rows, err := db.Query(query) + if err != nil { + result.Warning = fmt.Sprintf("could not check tables: %v", err) + return result, nil // Connection works, but can't check tables + } + defer rows.Close() + + var tables []string + for rows.Next() { + var tableName string + if err := rows.Scan(&tableName); err != nil { + continue + } + tables = append(tables, tableName) + } + + result.ExistingTables = tables + result.TableCount = len(tables) + result.TablesExist = len(tables) > 0 + result.IsEmpty = len(tables) == 0 + + // Check if database is empty - if not, return an error + if result.IsEmpty { + result.Warning = "Database is empty - perfect for a fresh ExFlow installation" + return result, nil + } else { + // Database contains tables - this is an error for setup + result.Error = fmt.Sprintf("Database contains %d existing tables: %s. ExFlow setup requires an empty database.", + len(tables), strings.Join(tables, ", ")) + return result, fmt.Errorf("database must be empty for setup - found %d existing tables", len(tables)) + } +} + +// validateBackendURL tests if the backend URL is accessible and properly formatted +func validateBackendURL(backendURL string) error { + // Parse the URL + parsedURL, err := url.Parse(backendURL) + if err != nil { + return fmt.Errorf("invalid URL format: %v", err) + } + + // Check if scheme is provided + if parsedURL.Scheme == "" { + return fmt.Errorf("URL must include scheme (http:// or https://)") + } + + // Check if host is provided + if parsedURL.Host == "" { + return fmt.Errorf("URL must include host") + } + + // For localhost URLs, we can't test connectivity from the backend to itself + // since we're in setup mode, so just validate format + if strings.Contains(parsedURL.Host, "localhost") || strings.Contains(parsedURL.Host, "127.0.0.1") { + return nil + } + + // For external URLs, test connectivity + client := &http.Client{ + Timeout: 5 * time.Second, + } + + resp, err := client.Get(backendURL + "/api/v1/health") + if err != nil { + // If health endpoint fails, just warn but don't fail setup + log.Warn("Could not verify backend URL accessibility: ", err) + return nil + } + defer resp.Body.Close() + + return nil +} + +// SetupSystem handles the initial system setup +func SetupSystem(c *gin.Context, configFile string, frontendEnv string) { + var req SetupRequest + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Validate database connection + log.Info("Validating database connection...") + dbResult, err := validateDatabaseConnection(req.Database.Server, req.Database.Port, req.Database.Name, req.Database.User, req.Database.Password) + if err != nil { + log.Error("Database validation failed: ", err) + c.JSON(http.StatusBadRequest, gin.H{ + "error": fmt.Sprintf("Database connection failed: %v", err), + "field": "database", + "details": dbResult, + }) + return + } + + if dbResult.Warning != "" { + log.Warn("Database validation warning: ", dbResult.Warning) + } + + log.Info("Database connection validated successfully") + + // Validate backend URL format + log.Info("Validating backend URL...") + if err := validateBackendURL(req.BackendURL); err != nil { + log.Error("Backend URL validation failed: ", err) + c.JSON(http.StatusBadRequest, gin.H{ + "error": fmt.Sprintf("Backend URL validation failed: %v", err), + "field": "backend_url", + }) + return + } + log.Info("Backend URL validated successfully") + + // Generate secure secrets + jwtSecret, err := generateRandomString(64) + if err != nil { + log.Error("Failed to generate JWT secret: ", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to generate JWT secret"}) + return + } + + masterSecret, err := generateRandomString(64) + if err != nil { + log.Error("Failed to generate master secret: ", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to generate master secret"}) + return + } + + encryptionKey, err := generateRandomString(32) + if err != nil { + log.Error("Failed to generate encryption key: ", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to generate encryption key"}) + return + } + + runnerSecret, err := generateRandomString(32) + if err != nil { + log.Error("Failed to generate runner secret: ", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to generate runner secret"}) + return + } + + // Create backend config + backendConfig := BackendConfig{ + LogLevel: "info", + Port: req.BackendPort, + Database: DatabaseConfig{ + Server: req.Database.Server, + Port: req.Database.Port, + Name: req.Database.Name, + User: req.Database.User, + Password: req.Database.Password, + }, + JWT: JWTConfig{ + Secret: jwtSecret, + }, + Encryption: EncryptionConfig{ + MasterSecret: masterSecret, + Key: encryptionKey, + }, + Runner: RunnerConfig{ + SharedRunnerSecret: runnerSecret, + }, + } + + // Write backend config.yaml + configData, err := yaml.Marshal(&backendConfig) + if err != nil { + log.Error("Failed to marshal backend config: ", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create backend config"}) + return + } + + err = os.WriteFile(configFile, configData, 0600) + if err != nil { + log.Error("Failed to write backend config: ", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to write backend config"}) + return + } + + // Create frontend .env file + envContent := "NEXT_PUBLIC_API_URL=\"" + req.BackendURL + "\"\n" + + err = os.WriteFile(frontendEnv, []byte(envContent), 0644) + if err != nil { + log.Error("Failed to write frontend .env: ", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to write frontend config"}) + return + } + + log.Info("System setup completed successfully") + c.JSON(http.StatusOK, gin.H{ + "message": "Setup completed successfully. Application will restart in full mode.", + "backend_config_path": configFile, + "frontend_env_path": frontendEnv, + "restart_required": true, + }) + + // Restart the application in a goroutine to allow the response to be sent first + go func() { + log.Info("Restarting application in full mode...") + RestartApplication(nil) + }() +} + +// CheckSetupStatus checks if the system has been set up +func CheckSetupStatus(c *gin.Context, configFile string, frontendEnv string) { + backendConfigExists := false + frontendEnvExists := false + + // Check if backend config exists + if _, err := os.Stat(configFile); err == nil { + backendConfigExists = true + } + + // Check if frontend .env exists + if _, err := os.Stat(frontendEnv); err == nil { + frontendEnvExists = true + } + + isSetup := backendConfigExists && frontendEnvExists + + c.JSON(http.StatusOK, gin.H{ + "is_setup": isSetup, + "backend_config_exists": backendConfigExists, + "frontend_env_exists": frontendEnvExists, + }) +} + +// ValidateSetupData validates setup configuration without saving +func ValidateSetupData(c *gin.Context) { + var req SetupRequest + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + validationResults := gin.H{ + "database_valid": false, + "backend_url_valid": false, + "validation_errors": []string{}, + "database_details": nil, + } + + // Validate database connection + dbResult, err := validateDatabaseConnection(req.Database.Server, req.Database.Port, req.Database.Name, req.Database.User, req.Database.Password) + if err != nil { + validationResults["validation_errors"] = append(validationResults["validation_errors"].([]string), fmt.Sprintf("Database: %v", err)) + validationResults["database_details"] = dbResult + } else { + validationResults["database_valid"] = true + validationResults["database_details"] = dbResult + + // Add warning as info message if present + if dbResult.Warning != "" { + validationResults["validation_errors"] = append(validationResults["validation_errors"].([]string), fmt.Sprintf("Database Info: %s", dbResult.Warning)) + } + } + + // Validate backend URL + if err := validateBackendURL(req.BackendURL); err != nil { + validationResults["validation_errors"] = append(validationResults["validation_errors"].([]string), fmt.Sprintf("Backend URL: %v", err)) + } else { + validationResults["backend_url_valid"] = true + } + + allValid := validationResults["database_valid"].(bool) && validationResults["backend_url_valid"].(bool) + validationResults["all_valid"] = allValid + + if allValid { + c.JSON(http.StatusOK, validationResults) + } else { + c.JSON(http.StatusBadRequest, validationResults) + } +} + +// RestartApplication restarts the current application process +func RestartApplication(c *gin.Context) { + if c != nil { + c.JSON(http.StatusOK, gin.H{"message": "Application restarting..."}) + } + + log.Info("Restarting application...") + + // Get the current executable path + executable, err := os.Executable() + if err != nil { + log.Error("Failed to get executable path: ", err) + if c != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to restart application"}) + } + return + } + + // Get current process arguments + args := os.Args[1:] // Exclude the program name + + // Create a new process + cmd := exec.Command(executable, args...) + + // Set up environment for the new process + cmd.Env = os.Environ() + cmd.Env = append(cmd.Env, "EXFLOW_RESTARTED=1") // Flag to indicate this is a restarted process + + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + cmd.Stdin = os.Stdin + + // For development (go run), we need to stay in the same process group + // so that Ctrl+C signals are properly forwarded + // For production (compiled binary), we can use process groups + isGoRun := strings.Contains(executable, "go-build") || strings.Contains(executable, "/tmp/") + + if !isGoRun { + // Production mode - use separate process group for better isolation + cmd.SysProcAttr = &syscall.SysProcAttr{ + Setpgid: true, + Pgid: 0, + } + log.Info("Starting in production mode with separate process group") + } else { + // Development mode - inherit parent process group for signal forwarding + log.Info("Starting in development mode (go run) - inheriting process group") + } + + // Start the new process + err = cmd.Start() + if err != nil { + log.Error("Failed to start new process: ", err) + if c != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to restart application"}) + } + return + } + + log.Info("New process started with PID: ", cmd.Process.Pid) + log.Info("Setup-initiated restart complete") + + // Terminate the current process gracefully + os.Exit(0) +} diff --git a/services/backend/handlers/users/get_stats.go b/services/backend/handlers/users/get_stats.go index dca410c9..18c0522c 100644 --- a/services/backend/handlers/users/get_stats.go +++ b/services/backend/handlers/users/get_stats.go @@ -1,9 +1,7 @@ package users import ( - "fmt" "net/http" - "strconv" "time" "github.com/v1Flows/exFlow/services/backend/functions/auth" @@ -50,70 +48,135 @@ func GetUserStats(context *gin.Context, db *bun.DB) { } type Stats struct { - Date string `json:"date"` - Weekday string `json:"weekday"` - Value int `json:"value"` - IsCurrent bool `json:"is_current"` + Weekday string `json:"weekday"` + TotalExecutions int `json:"total_executions"` + Success int `json:"success"` + Error int `json:"error"` + Pending int `json:"pending"` + Running int `json:"running"` + Canceled int `json:"canceled"` + Scheduled int `json:"scheduled"` + NoPatternMatch int `json:"noPatternMatch"` + Recovered int `json:"recovered"` } - var executionStats []Stats + type AlertStats struct { + Weekday string `json:"weekday"` + TotalAlerts int `json:"total_alerts"` + Firing int `json:"firing"` + Resolved int `json:"resolved"` + } + + type RawExecutionStats struct { + Date string `json:"date"` + Status string `json:"status"` + Value int `json:"value"` + } + + var rawExecutionStats []RawExecutionStats err = db.NewSelect(). - TableExpr("(SELECT DATE(created_at) as date, COUNT(*) as value FROM executions WHERE flow_id IN (?) AND created_at >= NOW() - INTERVAL '7 days' GROUP BY DATE(created_at)) AS subquery", bun.In(flowsArray)). - Scan(context, &executionStats) + TableExpr("(SELECT DATE(created_at) as date, status, COUNT(*) as value FROM executions WHERE flow_id IN (?) AND created_at >= NOW() - INTERVAL '7 days' GROUP BY DATE(created_at), status) AS subquery", bun.In(flowsArray)). + Scan(context, &rawExecutionStats) if err != nil { httperror.InternalServerError(context, "Error collecting execution stats from db", err) return } - // Create a map to store the execution stats by weekday of the week - statsMap := make(map[string]int) - for _, stat := range executionStats { + // Create a map to store the execution stats by weekday and status + statsMap := make(map[string]map[string]int) + totalStatsMap := make(map[string]int) + for _, stat := range rawExecutionStats { date, _ := time.Parse("2006-01-02", stat.Date) - weekday := date.Weekday().String()[:2] - statsMap[weekday] += stat.Value + weekday := date.Weekday().String()[:3] + + if statsMap[weekday] == nil { + statsMap[weekday] = make(map[string]int) + } + + statsMap[weekday][stat.Status] += stat.Value + totalStatsMap[weekday] += stat.Value } // Generate the execution stats for each weekday of the week - executionStats = make([]Stats, 0) + var executionStats []Stats for i := 6; i >= 0; i-- { // Look from the current weekday in the past - weekday := time.Now().AddDate(0, 0, -i).Weekday().String()[:2] - isCurrent := i == 0 - executionStats = append(executionStats, Stats{Weekday: weekday, Value: statsMap[weekday], IsCurrent: isCurrent}) - } - - // Determine the trend for executions - executionTrend := "neutral" - executionTrendPercentage := 0.0 - if len(executionStats) > 1 { - previousValue := executionStats[len(executionStats)-2].Value - currentValue := executionStats[len(executionStats)-1].Value - if previousValue != 0 { - if currentValue > previousValue { - executionTrend = "positive" - executionTrendPercentage = (float64(currentValue-previousValue) / float64(previousValue)) * 100 - } else if currentValue < previousValue { - executionTrend = "negative" - executionTrendPercentage = (float64(previousValue-currentValue) / float64(previousValue)) * 100 - } - } else if currentValue > 0 { - executionTrend = "positive" - executionTrendPercentage = float64(currentValue) * 100 // Reflect significant increase + weekday := time.Now().AddDate(0, 0, -i).Weekday().String()[:3] + + executionStats = append(executionStats, Stats{ + Weekday: weekday, + TotalExecutions: totalStatsMap[weekday], + Success: statsMap[weekday]["success"], + Error: statsMap[weekday]["error"], + Pending: statsMap[weekday]["pending"], + Running: statsMap[weekday]["running"], + Canceled: statsMap[weekday]["canceled"], + Scheduled: statsMap[weekday]["scheduled"], + NoPatternMatch: statsMap[weekday]["noPatternMatch"], + Recovered: statsMap[weekday]["recovered"], + }) + } + + // alerts - similar logic as executions + type RawAlertStats struct { + Date string `json:"date"` + Status string `json:"status"` + Value int `json:"value"` + } + + var rawAlertStats []RawAlertStats + err = db.NewSelect(). + TableExpr("(SELECT DATE(created_at) as date, status, COUNT(*) as value FROM alerts WHERE flow_id IN (?) AND created_at >= NOW() - INTERVAL '7 days' GROUP BY DATE(created_at), status) AS subquery", bun.In(flowsArray)). + Scan(context, &rawAlertStats) + if err != nil { + httperror.InternalServerError(context, "Error collecting alert stats from db", err) + return + } + + // Create a map to store the alert stats by weekday and status + alertStatsMap := make(map[string]map[string]int) + totalAlertStatsMap := make(map[string]int) + for _, stat := range rawAlertStats { + date, _ := time.Parse("2006-01-02", stat.Date) + weekday := date.Weekday().String()[:3] + + if alertStatsMap[weekday] == nil { + alertStatsMap[weekday] = make(map[string]int) } - executionTrendPercentage, _ = strconv.ParseFloat(fmt.Sprintf("%.2f", executionTrendPercentage), 64) + + alertStatsMap[weekday][stat.Status] += stat.Value + totalAlertStatsMap[weekday] += stat.Value + } + + // Generate the alert stats for each weekday of the week + var alertStats []AlertStats + for i := 6; i >= 0; i-- { // Look from the current weekday in the past + weekday := time.Now().AddDate(0, 0, -i).Weekday().String()[:3] + + alertStats = append(alertStats, AlertStats{ + Weekday: weekday, + TotalAlerts: totalAlertStatsMap[weekday], + Firing: alertStatsMap[weekday]["firing"], + Resolved: alertStatsMap[weekday]["resolved"], + }) + } + + alertCount := 0 + for _, alert := range alertStats { + alertCount += alert.TotalAlerts } executionCount := 0 for _, execution := range executionStats { - executionCount += execution.Value + executionCount += execution.TotalExecutions } context.JSON(http.StatusOK, gin.H{"result": "success", "stats": gin.H{ - "total_projects": projectCount, - "total_flows": flowCount, - "total_runners": runnerCount, - "total_executions": executionCount, - "executions": executionStats, - "execution_trend": executionTrend, - "execution_trend_percentage": executionTrendPercentage, + "total_projects": projectCount, + "total_flows": flowCount, + "total_runners": runnerCount, + "total_executions": executionCount, + "total_alerts": alertCount, + "executions": executionStats, + "alerts": alertStats, }}) } diff --git a/services/backend/main.go b/services/backend/main.go index 7e64c6ef..381a4271 100644 --- a/services/backend/main.go +++ b/services/backend/main.go @@ -1,35 +1,43 @@ package main import ( + "context" + "os" + "os/signal" "strings" + "syscall" + "time" "github.com/v1Flows/exFlow/services/backend/config" "github.com/v1Flows/exFlow/services/backend/database" "github.com/v1Flows/exFlow/services/backend/functions/background_checks" + "github.com/v1Flows/exFlow/services/backend/functions/encryption" "github.com/v1Flows/exFlow/services/backend/router" "github.com/alecthomas/kingpin/v2" log "github.com/sirupsen/logrus" ) -const version string = "1.5.2" +const version string = "2.0.0" var ( - configFile = kingpin.Flag("config", "Config file").Short('c').Default("config.yaml").String() + configFile = kingpin.Flag("config", "Config file").Short('c').Default("/etc/exflow/config.yaml").String() + frontendEnv = kingpin.Flag("frontendEnv", "Path to frontend environment").Default("/etc/exflow/.env").String() ) func logging(logLevel string) { logLevel = strings.ToLower(logLevel) - if logLevel == "info" { + switch logLevel { + case "info": log.SetLevel(log.InfoLevel) - } else if logLevel == "warn" { + case "warn": log.SetLevel(log.WarnLevel) - } else if logLevel == "error" { + case "error": log.SetLevel(log.ErrorLevel) - } else if logLevel == "debug" { + case "debug": log.SetLevel(log.DebugLevel) - } else { + default: log.SetLevel(log.InfoLevel) } } @@ -41,10 +49,24 @@ func main() { log.Info("Starting exFlow API. Version: ", version) + // Check if this is a restarted process + if os.Getenv("EXFLOW_RESTARTED") == "1" { + log.Info("Application restarted after setup completion") + } + + // Check if config file exists + if _, err := os.Stat(*configFile); os.IsNotExist(err) { + log.Info("Config file not found, starting in setup mode") + startSetupMode(*configFile, *frontendEnv) + return + } + log.Info("Loading Config File: ", *configFile) err := config.GetInstance().LoadConfig(*configFile) if err != nil { - panic(err) + log.Error("Failed to load config file, starting in setup mode: ", err) + startSetupMode(*configFile, *frontendEnv) + return } cfg := config.Config @@ -57,6 +79,51 @@ func main() { log.Fatal("Failed to connect to the database") } + err = encryption.MigrateProjectsEncryption(cfg.Encryption.Key, db) + if err != nil { + log.Fatal("Failed to migrate projects: ", err) + } + go background_checks.Init(db) - router.StartRouter(db, cfg.Port) + + // Set up signal handling for graceful shutdown + server := router.StartRouter(db, cfg.Port, *configFile, *frontendEnv) + + // Wait for interrupt signal to gracefully shutdown the server + quit := make(chan os.Signal, 1) + signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM) + <-quit + log.Info("Shutting down server...") + + // The server has 30 seconds to finish the request it is currently handling + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + if err := server.Shutdown(ctx); err != nil { + log.Fatal("Server forced to shutdown:", err) + } + + log.Info("Server exited") +} + +func startSetupMode(configFile string, frontendEnv string) { + log.Info("Starting in setup mode - limited functionality available") + logging("info") // Default to info level logging in setup mode + + // Start router in setup mode (without database connection) + server := router.StartSetupRouter(8080, configFile, frontendEnv) // Default port for setup + + // Wait for interrupt signal to gracefully shutdown the server + quit := make(chan os.Signal, 1) + signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM) + <-quit + log.Info("Shutting down setup server...") + + // The server has 30 seconds to finish the request it is currently handling + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + if err := server.Shutdown(ctx); err != nil { + log.Fatal("Setup server forced to shutdown:", err) + } + + log.Info("Setup server exited") } diff --git a/services/backend/pkg/models/alerts.go b/services/backend/pkg/models/alerts.go new file mode 100644 index 00000000..0a1f4d9c --- /dev/null +++ b/services/backend/pkg/models/alerts.go @@ -0,0 +1,52 @@ +package models + +import ( + "encoding/json" + "time" + + "github.com/google/uuid" + "github.com/uptrace/bun" +) + +type Alerts struct { + bun.BaseModel `bun:"table:alerts"` + + ID uuid.UUID `bun:",pk,type:uuid,default:gen_random_uuid()" json:"id"` + Name string `bun:"name,type:text,default:''" json:"name"` + Status string `bun:"status,type:text,default:''" json:"status"` + Payload json.RawMessage `bun:"payload,type:jsonb,default:jsonb('[]')" json:"payload"` + FlowID string `bun:"flow_id,type:text,default:''" json:"flow_id"` + ExecutionID string `bun:"execution_id,type:text,default:''" json:"execution_id"` + RunnerID string `bun:"runner_id,type:text,default:''" json:"runner_id"` + ParentID string `bun:"parent_id,type:text,default:''" json:"parent_id"` + Plugin string `bun:"plugin,type:text,default:''" json:"plugin"` + CreatedAt time.Time `bun:"created_at,type:timestamptz,default:now()" json:"created_at"` + Encrypted bool `bun:"encrypted,type:bool,default:false" json:"encrypted"` + UpdatedAt time.Time `bun:"updated_at,type:timestamptz" json:"updated_at"` + ResolvedAt time.Time `bun:"resolved_at,type:timestamptz" json:"resolved_at"` + GroupKey string `bun:"group_key,type:text,default:''" json:"group_key"` + SubAlerts []SubAlerts `bun:"sub_alerts,type:jsonb,default:jsonb('[]')" json:"sub_alerts"` + Note string `bun:"note,type:text,default:''" json:"note"` +} + +type AlertEndpoints struct { + ID string `json:"id"` + Name string `json:"name"` + Endpoint string `json:"endpoint"` + Icon string `json:"icon"` + Color string `json:"color"` +} + +type SubAlerts struct { + ID string `json:"id"` + Name string `json:"name"` + Status string `json:"status"` + Labels json.RawMessage `json:"labels"` + StartedAt time.Time `json:"started_at"` + ResolvedAt time.Time `json:"resolved_at"` +} + +type IncomingGroupedAlertsRequest struct { + FlowID string `json:"flow_id"` + GroupAlertsIdentifier string `json:"group_alerts_identifier"` +} diff --git a/services/backend/pkg/models/execution_steps.go b/services/backend/pkg/models/execution_steps.go index 30204f20..8856b35c 100644 --- a/services/backend/pkg/models/execution_steps.go +++ b/services/backend/pkg/models/execution_steps.go @@ -1,9 +1,44 @@ package models import ( - shared_models "github.com/v1Flows/shared-library/pkg/models" + "time" + + "github.com/google/uuid" + "github.com/uptrace/bun" ) type ExecutionSteps struct { - shared_models.ExecutionSteps + bun.BaseModel `bun:"table:execution_steps"` + + ID uuid.UUID `bun:",pk,type:uuid,default:gen_random_uuid()" json:"id"` + ExecutionID string `bun:"execution_id,type:text,notnull" json:"execution_id"` + Action Action `bun:"action,type:jsonb,default:jsonb('{}')" json:"action"` + Messages []Message `bun:"messages,type:jsonb,default:jsonb('[]')" json:"messages"` + RunnerID string `bun:"runner_id,type:text,default:''" json:"runner_id"` + ParentID string `bun:"parent_id,type:text,default:''" json:"parent_id"` + IsHidden bool `bun:"is_hidden,type:bool,default:false" json:"is_hidden"` + Status string `bun:"status,type:text,default:''" json:"status"` + Encrypted bool `bun:"encrypted,type:bool,default:false" json:"encrypted"` + Interactive bool `bun:"interactive,type:bool,default:false" json:"interactive"` + Interacted bool `bun:"interacted,type:bool,default:false" json:"interacted"` + InteractionApproved bool `bun:"interaction_approved,type:bool,default:false" json:"interaction_approved"` + InteractionRejected bool `bun:"interaction_rejected,type:bool,default:false" json:"interaction_rejected"` + InteractedBy string `bun:"interacted_by,type:text,default:''" json:"interacted_by"` + InteractedAt time.Time `bun:"interacted_at,type:timestamptz" json:"interacted_at"` + CanceledBy string `bun:"canceled_by,type:text,default:''" json:"canceled_by"` + CanceledAt time.Time `bun:"canceled_at,type:timestamptz" json:"canceled_at"` + CreatedAt time.Time `bun:"created_at,type:timestamptz,default:now()" json:"created_at"` + StartedAt time.Time `bun:"started_at,type:timestamptz" json:"started_at"` + FinishedAt time.Time `bun:"finished_at,type:timestamptz" json:"finished_at"` +} + +type Message struct { + Title string `json:"title"` + Lines []Line `json:"lines"` +} + +type Line struct { + Content string `json:"content"` + Color string `json:"color"` + Timestamp time.Time `json:"timestamp"` } diff --git a/services/backend/pkg/models/executions.go b/services/backend/pkg/models/executions.go index 5b040202..7c77f5c9 100644 --- a/services/backend/pkg/models/executions.go +++ b/services/backend/pkg/models/executions.go @@ -3,14 +3,24 @@ package models import ( "time" - shared_models "github.com/v1Flows/shared-library/pkg/models" + "github.com/google/uuid" + "github.com/uptrace/bun" ) type Executions struct { - shared_models.Executions + bun.BaseModel `bun:"table:executions"` - ScheduledAt time.Time `bun:"scheduled_at,type:timestamptz" json:"scheduled_at"` - TriggeredBy string `bun:"triggered_by,type:text,default:'user'" json:"triggered_by"` + ID uuid.UUID `bun:",pk,type:uuid,default:gen_random_uuid()" json:"id"` + FlowID string `bun:"flow_id,type:text,default:''" json:"flow_id"` + RunnerID string `bun:"runner_id,type:text,default:''" json:"runner_id"` + Status string `bun:"status,type:text,default:''" json:"status"` + CreatedAt time.Time `bun:"created_at,type:timestamptz,default:now()" json:"created_at"` + ExecutedAt time.Time `bun:"executed_at,type:timestamptz" json:"executed_at"` + FinishedAt time.Time `bun:"finished_at,type:timestamptz" json:"finished_at"` + LastHeartbeat time.Time `bun:"last_heartbeat,type:timestamptz" json:"last_heartbeat"` + ScheduledAt time.Time `bun:"scheduled_at,type:timestamptz" json:"scheduled_at"` + TriggeredBy string `bun:"triggered_by,type:text,default:'user'" json:"triggered_by"` + AlertID string `bun:"alert_id,type:text,default:''" json:"alert_id"` } type ExecutionWithSteps struct { diff --git a/services/backend/pkg/models/flows.go b/services/backend/pkg/models/flows.go index a1890097..17749e7b 100644 --- a/services/backend/pkg/models/flows.go +++ b/services/backend/pkg/models/flows.go @@ -1,13 +1,104 @@ package models import ( - shared_models "github.com/v1Flows/shared-library/pkg/models" + "time" + + "github.com/google/uuid" + "github.com/uptrace/bun" ) type Flows struct { - shared_models.Flows + bun.BaseModel `bun:"table:flows"` + + ID uuid.UUID `bun:",pk,type:uuid,default:gen_random_uuid()" json:"id"` + Name string `bun:"name,type:text,notnull" json:"name"` + Description string `bun:"description,type:text,default:''" json:"description"` + Type string `bun:"type,type:text,default:'default'" json:"type"` + ProjectID string `bun:"project_id,type:text,notnull" json:"project_id"` + RunnerID string `bun:"runner_id,type:text,default:''" json:"runner_id"` + ExecParallel bool `bun:"exec_parallel,type:bool,default:false" json:"exec_parallel"` + Actions []Action `bun:"type:jsonb,default:jsonb('[]')" json:"actions"` + Maintenance bool `bun:"maintenance,type:bool,default:false" json:"maintenance"` + MaintenanceMessage string `bun:"maintenance_message,type:text,default:''" json:"maintenance_message"` + Disabled bool `bun:"disabled,type:bool,default:false" json:"disabled"` + DisabledReason string `bun:"disabled_reason,type:text,default:''" json:"disabled_reason"` + CreatedAt time.Time `bun:"created_at,type:timestamptz,default:now()" json:"created_at"` + UpdatedAt time.Time `bun:"updated_at,type:timestamptz" json:"updated_at"` + FailurePipelines []FailurePipeline `bun:"type:jsonb,default:jsonb('[]')" json:"failure_pipelines"` + FailurePipelineID string `bun:"failure_pipeline_id,type:text,default:''" json:"failure_pipeline_id"` + FolderID string `bun:"folder_id,type:text,default:''" json:"folder_id"` + ScheduleEveryValue int `bun:"schedule_every_value,type:integer,default:0" json:"schedule_every_value"` + ScheduleEveryUnit string `bun:"schedule_every_unit,type:text,default:''" json:"schedule_every_unit"` + Patterns []Pattern `bun:"type:jsonb,default:jsonb('[]')" json:"patterns"` + GroupAlerts bool `bun:"group_alerts,type:bool,default:true" json:"group_alerts"` + GroupAlertsIdentifier string `bun:"group_alerts_identifier,type:text,default:''" json:"group_alerts_identifier"` + AlertThreshold int `bun:"alert_threshold,type:int,default:0" json:"alert_threshold"` +} + +type Action struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + Plugin string `json:"plugin"` + Version string `json:"version"` + Icon string `json:"icon"` + Category string `json:"category"` + Active bool `json:"active"` + Params []Params `json:"params"` + CustomName string `json:"custom_name"` + CustomDescription string `json:"custom_description"` + FailurePipelineID string `json:"failure_pipeline_id"` + UpdateAvailable bool `json:"update_available"` + UpdateVersion string `json:"update_version,omitempty"` + UpdatedAction *Action `json:"updated_action,omitempty"` + Condition Condition `json:"condition,omitempty"` +} + +type Params struct { + Key string `json:"key"` + Title string `json:"title"` + Description string `json:"description"` + Category string `json:"category"` + Required bool `json:"required"` + Type string `json:"type"` + Value string `json:"value"` + Default string `json:"default"` + Options []Option `json:"options,omitempty"` + DependsOn DependsOn `json:"depends_on,omitempty"` +} + +type DependsOn struct { + Key string `json:"key"` + Value string `json:"value"` +} + +type Option struct { + Key string `json:"key"` + Value string `json:"value"` +} + +type FailurePipeline struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Actions []Action `json:"actions"` + ExecParallel bool `json:"exec_parallel"` +} + +type Condition struct { + SelectedActionID string `json:"selected_action_id"` + ConditionItems []ConditionItem `json:"condition_items"` + CancelExecution bool `json:"cancel_execution"` +} + +type ConditionItem struct { + ConditionKey string `json:"condition_key"` + ConditionType string `json:"condition_type"` + ConditionValue string `json:"condition_value"` + ConditionLogic string `json:"condition_logic"` // e.g., "AND", "OR" +} - FolderID string `bun:"folder_id,type:text,default:''" json:"folder_id"` - ScheduleEveryValue int `bun:"schedule_every_value,type:integer,default:0" json:"schedule_every_value"` - ScheduleEveryUnit string `bun:"schedule_every_unit,type:text,default:''" json:"schedule_every_unit"` +type Pattern struct { + Key string `json:"key"` + Value string `json:"value"` + Type string `json:"type"` } diff --git a/services/backend/pkg/models/projects.go b/services/backend/pkg/models/projects.go index c99a3ced..09192d6f 100644 --- a/services/backend/pkg/models/projects.go +++ b/services/backend/pkg/models/projects.go @@ -22,6 +22,8 @@ type Projects struct { EnableAutoRunners bool `bun:"enable_auto_runners,type:bool,default:false" json:"enable_auto_runners"` DisableRunnerJoin bool `bun:"disable_runner_join,type:bool,default:false" json:"disable_runner_join"` RunnerAutoJoinToken string `bun:"runner_auto_join_token,type:text,notnull" json:"runner_auto_join_token"` + EncryptionKey string `bun:"encryption_key,type:text,default:''" json:"encryption_key"` + EncryptionEnabled bool `bun:"encryption_enabled,type:bool,default:true" json:"encryption_enabled"` } type ProjectsWithMembers struct { diff --git a/services/backend/pkg/models/runners.go b/services/backend/pkg/models/runners.go index 327d527a..2590dee5 100644 --- a/services/backend/pkg/models/runners.go +++ b/services/backend/pkg/models/runners.go @@ -1,9 +1,61 @@ package models import ( - shared_models "github.com/v1Flows/shared-library/pkg/models" + "time" + + "github.com/google/uuid" + "github.com/uptrace/bun" ) type Runners struct { - shared_models.Runners + bun.BaseModel `bun:"table:runners"` + + ID uuid.UUID `bun:",pk,type:uuid,default:gen_random_uuid()" json:"id"` + Name string `bun:"name,type:text,notnull" json:"name"` + Registered bool `bun:"registered,type:bool,default:false" json:"registered"` + ProjectID string `bun:"project_id,type:text,default:''" json:"project_id"` + Version string `bun:"version,type:text,default:''" json:"version"` + Mode string `bun:"mode,type:text,default:''" json:"mode"` + AutoRunner bool `bun:"auto_runner,type:bool,default:false" json:"auto_runner"` + SharedRunner bool `bun:"shared_runner,type:bool,default:false" json:"shared_runner"` + LastHeartbeat time.Time `bun:"last_heartbeat,type:timestamptz" json:"last_heartbeat"` + ExecutingJob bool `bun:"executing_job,type:bool,default:false" json:"executing_job"` + Disabled bool `bun:"disabled,type:bool,default:false" json:"disabled"` + DisabledReason string `bun:"disabled_reason,type:text,default:''" json:"disabled_reason"` + Plugins []Plugin `bun:"plugins,type:jsonb,default:jsonb('[]')" json:"plugins"` + Actions []Action `bun:"actions,type:jsonb,default:jsonb('[]')" json:"actions"` + Endpoints []Endpoint `bun:"endpoints,type:jsonb,default:jsonb('[]')" json:"endpoints"` + RegisteredAt time.Time `bun:"registered_at,type:timestamptz,default:now()" json:"registered_at"` + ExecutedExecutions []string `bun:"executed_executions,type:text[],default:'{}'" json:"executed_executions"` + ApiURL string `bun:"api_url,type:text,default:''" json:"api_url"` + ApiToken string `bun:"api_token,type:text,default:''" json:"api_token"` +} + +type Endpoint struct { + ID string `json:"id"` + Name string `json:"name"` + Path string `json:"path"` + Icon string `json:"icon"` + Color string `json:"color"` +} + +type IncomingAutoRunners struct { + Registered bool `json:"registered"` + Version string `json:"version"` + Mode string `json:"mode"` + LastHeartbeat time.Time `json:"last_heartbeat"` + Plugins []Plugin `json:"plugins"` + Actions []Action `json:"actions"` + Endpoints []Endpoint `json:"endpoints"` + ApiURL string `json:"api_url"` + ApiToken string `json:"api_token"` +} + +type Plugin struct { + Name string `json:"name"` + Type string `json:"type"` + Version string `json:"version"` + Author string `json:"author"` + Action Action `json:"action"` + Endpoint Endpoint `json:"endpoint"` } diff --git a/services/backend/pkg/models/settings.go b/services/backend/pkg/models/settings.go index f3234cb5..31311ed9 100644 --- a/services/backend/pkg/models/settings.go +++ b/services/backend/pkg/models/settings.go @@ -20,4 +20,5 @@ type Settings struct { AllowSharedRunnerAutoJoin bool `bun:"allow_shared_runner_auto_join,type:bool,default:true" json:"allow_shared_runner_auto_join"` AllowSharedRunnerJoin bool `bun:"allow_shared_runner_join,type:bool,default:true" json:"allow_shared_runner_join"` SharedRunnerAutoJoinToken string `bun:"shared_runner_auto_join_token,type:text,default:''" json:"shared_runner_auto_join_token"` + NewEncryptionMigrated bool `bun:"new_encryption_migrated,type:bool,default:false" json:"new_encryption_migrated"` } diff --git a/services/backend/router/alerts.go b/services/backend/router/alerts.go new file mode 100644 index 00000000..fb8fdf1d --- /dev/null +++ b/services/backend/router/alerts.go @@ -0,0 +1,34 @@ +package router + +import ( + "github.com/v1Flows/exFlow/services/backend/handlers/alerts" + "github.com/v1Flows/exFlow/services/backend/middlewares" + + "github.com/gin-gonic/gin" + "github.com/uptrace/bun" +) + +func Alerts(router *gin.RouterGroup, db *bun.DB) { + alert := router.Group("/alerts").Use(middlewares.Mixed(db)) + { + alert.GET("/", func(c *gin.Context) { + alerts.GetMultiple(c, db) + }) + alert.GET("/grouped", func(c *gin.Context) { + alerts.GetGrouped(c, db) + }) + alert.GET("/:alertID", func(c *gin.Context) { + alerts.GetSingle(c, db) + }) + + alert.POST("/", func(c *gin.Context) { + alerts.CreateAlert(c, db) + }) + alert.PUT("/:alertID", func(c *gin.Context) { + alerts.Update(c, db) + }) + alert.DELETE("/:alertID", func(c *gin.Context) { + alerts.Delete(c, db) + }) + } +} diff --git a/services/backend/router/flows.go b/services/backend/router/flows.go index 3cdb85d1..05d8b67f 100644 --- a/services/backend/router/flows.go +++ b/services/backend/router/flows.go @@ -75,6 +75,11 @@ func Flows(router *gin.RouterGroup, db *bun.DB) { flows.DeleteFlowFailurePipelineAction(c, db) }) + // alerts + flow.GET("/:flowID/alerts", func(c *gin.Context) { + flows.GetFlowAlerts(c, db) + }) + // executions flow.GET("/:flowID/executions", func(c *gin.Context) { flows.GetFlowExecutions(c, db) diff --git a/services/backend/router/main.go b/services/backend/router/main.go index 0ca47b67..49521b7b 100644 --- a/services/backend/router/main.go +++ b/services/backend/router/main.go @@ -1,6 +1,7 @@ package router import ( + "net/http" "strconv" "time" @@ -11,12 +12,12 @@ import ( log "github.com/sirupsen/logrus" ) -func StartRouter(db *bun.DB, port int) { +func StartRouter(db *bun.DB, port int, configFile string, frontendEnv string) *http.Server { gin.SetMode(gin.ReleaseMode) router := gin.Default() router.Use(cors.New(cors.Config{ - AllowOrigins: []string{"https://exflow.org", "http://localhost:3000"}, + AllowOrigins: []string{"https://exflow.org", "http://localhost:3000", "http://localhost:4000"}, AllowMethods: []string{"GET", "HEAD", "POST", "PUT", "OPTIONS", "DELETE"}, AllowHeaders: []string{"Origin", "Authorization", "X-Requested-With", "Content-Type"}, ExposeHeaders: []string{"Content-Length"}, @@ -32,13 +33,62 @@ func StartRouter(db *bun.DB, port int) { Executions(v1, db) Flows(v1, db) Page(v1, db) + Alerts(v1, db) Projects(v1, db) Runners(v1, db) Token(v1, db) User(v1, db) Health(v1) + Setup(v1, configFile, frontendEnv) } - log.Info("Starting Router on port ", strconv.Itoa(port)) - router.Run(":" + strconv.Itoa(port)) + server := &http.Server{ + Addr: ":" + strconv.Itoa(port), + Handler: router, + } + + go func() { + log.Info("Starting Router on port ", strconv.Itoa(port)) + if err := server.ListenAndServe(); err != nil && err != http.ErrServerClosed { + log.Fatalf("Failed to start server: %v\n", err) + } + }() + + return server +} + +// StartSetupRouter starts a minimal router for setup mode (no database required) +func StartSetupRouter(port int, configFile string, frontendEnv string) *http.Server { + gin.SetMode(gin.ReleaseMode) + router := gin.Default() + + router.Use(cors.New(cors.Config{ + AllowOrigins: []string{"https://exflow.org", "http://localhost:3000", "http://localhost:4000"}, + AllowMethods: []string{"GET", "HEAD", "POST", "PUT", "OPTIONS", "DELETE"}, + AllowHeaders: []string{"Origin", "Authorization", "X-Requested-With", "Content-Type"}, + ExposeHeaders: []string{"Content-Length"}, + AllowCredentials: true, + MaxAge: 12 * time.Hour, + })) + + v1 := router.Group("/api/v1") + { + // Only enable setup and health endpoints in setup mode + Health(v1) + Setup(v1, configFile, frontendEnv) + } + + server := &http.Server{ + Addr: ":" + strconv.Itoa(port), + Handler: router, + } + + go func() { + log.Info("Starting Setup Router on port ", strconv.Itoa(port)) + if err := server.ListenAndServe(); err != nil && err != http.ErrServerClosed { + log.Fatalf("Failed to start setup server: %v\n", err) + } + }() + + return server } diff --git a/services/backend/router/projects.go b/services/backend/router/projects.go index b04c5ab8..2d9e8197 100644 --- a/services/backend/router/projects.go +++ b/services/backend/router/projects.go @@ -80,5 +80,19 @@ func Projects(router *gin.RouterGroup, db *bun.DB) { project.PUT("/:projectID/transfer_ownership", func(c *gin.Context) { projects.TransferOwnership(c, db) }) + + // encryption management + project.GET("/:projectID/encryption", func(c *gin.Context) { + projects.GetProjectEncryptionStatus(c, db) + }) + project.PUT("/:projectID/encryption/enable", func(c *gin.Context) { + projects.EnableProjectEncryption(c, db) + }) + project.PUT("/:projectID/encryption/disable", func(c *gin.Context) { + projects.DisableProjectEncryption(c, db) + }) + project.PUT("/:projectID/encryption/rotate-key", func(c *gin.Context) { + projects.RotateProjectEncryptionKey(c, db) + }) } } diff --git a/services/backend/router/setup.go b/services/backend/router/setup.go new file mode 100644 index 00000000..15390a10 --- /dev/null +++ b/services/backend/router/setup.go @@ -0,0 +1,20 @@ +package router + +import ( + "github.com/gin-gonic/gin" + "github.com/v1Flows/exFlow/services/backend/handlers/setup" +) + +func Setup(rg *gin.RouterGroup, configFile string, frontendEnv string) { + setupGroup := rg.Group("/setup") + { + setupGroup.POST("/configure", func(c *gin.Context) { + setup.SetupSystem(c, configFile, frontendEnv) + }) + setupGroup.GET("/status", func(c *gin.Context) { + setup.CheckSetupStatus(c, configFile, frontendEnv) + }) + setupGroup.POST("/validate", setup.ValidateSetupData) + setupGroup.POST("/restart", setup.RestartApplication) + } +} diff --git a/services/frontend/Dockerfile b/services/frontend/Dockerfile index d074dc19..81c66dd6 100644 --- a/services/frontend/Dockerfile +++ b/services/frontend/Dockerfile @@ -1,4 +1,4 @@ -FROM node:23-alpine AS base +FROM node:24.7-alpine AS base # Install dependencies only when needed FROM base AS deps @@ -43,8 +43,10 @@ RUN chown nextjs:nodejs .next COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./ COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static -# Copy .env file to the working directory -COPY --from=builder /app/.env .env +RUN mkdir -p /etc/exflow \ + && chown -R nextjs:nodejs /etc/exflow + +VOLUME [ "/etc/exflow" ] USER nextjs diff --git a/services/frontend/app/alerts/page.tsx b/services/frontend/app/alerts/page.tsx new file mode 100644 index 00000000..e3b30876 --- /dev/null +++ b/services/frontend/app/alerts/page.tsx @@ -0,0 +1,5 @@ +import AlertsPageClient from "@/components/alerts/page-client"; + +export default function RunnersPage() { + return ; +} diff --git a/services/frontend/app/auth/login/page.tsx b/services/frontend/app/auth/login/page.tsx index e077c1fe..b83c9a92 100644 --- a/services/frontend/app/auth/login/page.tsx +++ b/services/frontend/app/auth/login/page.tsx @@ -1,10 +1,5 @@ -import LoginPageComponent from "@/components/auth/loginPage"; -import PageGetSettings from "@/lib/fetch/page/settings"; +import LoginPageClient from "@/components/auth/login-page-client"; -export default async function LoginPage() { - const settingsData = PageGetSettings(); - - const [settings] = (await Promise.all([settingsData])) as any; - - return ; +export default function LoginPage() { + return ; } diff --git a/services/frontend/app/flows/[id]/execution/[executionID]/page.tsx b/services/frontend/app/flows/[id]/execution/[executionID]/page.tsx index 9c8d6a46..c3612167 100644 --- a/services/frontend/app/flows/[id]/execution/[executionID]/page.tsx +++ b/services/frontend/app/flows/[id]/execution/[executionID]/page.tsx @@ -1,10 +1,4 @@ -import { Execution } from "@/components/executions/execution/execution"; -import ErrorCard from "@/components/error/ErrorCard"; -import GetExecution from "@/lib/fetch/executions/execution"; -import GetFlow from "@/lib/fetch/flow/flow"; -import PageGetSettings from "@/lib/fetch/page/settings"; -import GetProjectRunners from "@/lib/fetch/project/runners"; -import GetUserDetails from "@/lib/fetch/user/getDetails"; +import ExecutionPageClient from "@/components/executions/execution-page-client"; export default async function DashboardExecutionPage({ params, @@ -13,57 +7,5 @@ export default async function DashboardExecutionPage({ }) { const { id, executionID } = await params; - const flowData = GetFlow(id); - const executionData = GetExecution(executionID); - const settingsData = PageGetSettings(); - const userDetailsData = GetUserDetails(); - - const [flow, execution, settings, userDetails] = (await Promise.all([ - flowData, - executionData, - settingsData, - userDetailsData, - ])) as any; - - let runnersData; - - if (flow.success) { - runnersData = GetProjectRunners(flow.data.flow.project_id); - } - const runners = await runnersData; - - return ( - <> - {execution.success && - flow.success && - runners.success && - settings.success && - userDetails.success ? ( - - ) : ( - - )} - - ); + return ; } diff --git a/services/frontend/app/flows/[id]/page.tsx b/services/frontend/app/flows/[id]/page.tsx index 6713a0ec..59d62e53 100644 --- a/services/frontend/app/flows/[id]/page.tsx +++ b/services/frontend/app/flows/[id]/page.tsx @@ -1,18 +1,4 @@ -import { Divider, Spacer } from "@heroui/react"; - -import FlowTabs from "@/components/flows/flow/tabs"; -import GetFlow from "@/lib/fetch/flow/flow"; -import ErrorCard from "@/components/error/ErrorCard"; -import FlowHeading from "@/components/flows/flow/heading"; -import FlowDetails from "@/components/flows/flow/details"; -import GetProjects from "@/lib/fetch/project/all"; -import GetFlowExecutions from "@/lib/fetch/flow/executions"; -import GetUserDetails from "@/lib/fetch/user/getDetails"; -import GetProjectRunners from "@/lib/fetch/project/runners"; -import GetProject from "@/lib/fetch/project/data"; -import GetFolders from "@/lib/fetch/folder/all"; -import PageGetSettings from "@/lib/fetch/page/settings"; -import GetFlows from "@/lib/fetch/flow/all"; +import FlowPageClient from "@/components/flows/flow/page-client"; export default async function FlowPage({ params, @@ -21,71 +7,5 @@ export default async function FlowPage({ }) { const { id } = await params; - const flowsData = GetFlows(); - const flowData = GetFlow(id); - const projectsData = GetProjects(); - const executionsData = GetFlowExecutions(id); - const userDetailsData = GetUserDetails(); - const foldersData = GetFolders(); - const settingsData = PageGetSettings(); - - const [flows, flow, projects, executions, userDetails, folders, settings] = - (await Promise.all([ - flowsData, - flowData, - projectsData, - executionsData, - userDetailsData, - foldersData, - settingsData, - ])) as any; - - let runnersData; - let projectdata; - - if (flow.success) { - runnersData = GetProjectRunners(flow.data.flow.project_id); - projectdata = GetProject(flow.data.flow.project_id); - } - - const [runners, project] = (await Promise.all([ - runnersData, - projectdata, - ])) as any; - - return ( -
- {flow.success ? ( - <> - - - - - - - ) : ( - - )} -
- ); + return ; } diff --git a/services/frontend/app/flows/page.tsx b/services/frontend/app/flows/page.tsx index 7bad1df5..2eac4e22 100644 --- a/services/frontend/app/flows/page.tsx +++ b/services/frontend/app/flows/page.tsx @@ -1,76 +1,5 @@ -import { Divider } from "@heroui/react"; +import FlowsPageClient from "@/components/flows/page-client"; -import FlowList from "@/components/flows/list"; -import GetFlows from "@/lib/fetch/flow/all"; -import GetFolders from "@/lib/fetch/folder/all"; -import FlowsHeading from "@/components/flows/heading"; -import GetProjects from "@/lib/fetch/project/all"; -import GetRunningExecutions from "@/lib/fetch/executions/running"; -import GetUserDetails from "@/lib/fetch/user/getDetails"; -import ErrorCard from "@/components/error/ErrorCard"; -import PageGetSettings from "@/lib/fetch/page/settings"; - -export default async function FlowsPage() { - const flowsData = GetFlows(); - const foldersData = GetFolders(); - const projectsData = GetProjects(); - const runningExecutionsData = GetRunningExecutions(); - const userDetailsData = GetUserDetails(); - const settingsData = PageGetSettings(); - - const [flows, folders, projects, runningExecutions, userDetails, settings] = - (await Promise.all([ - flowsData, - foldersData, - projectsData, - runningExecutionsData, - userDetailsData, - settingsData, - ])) as any; - - return ( -
- {projects.success && - folders.success && - flows.success && - userDetails.success && - settings.success ? ( - <> - - - - - ) : ( - - )} -
- ); +export default function FlowsPage() { + return ; } diff --git a/services/frontend/app/layout.tsx b/services/frontend/app/layout.tsx index 3b4042e6..96d8f335 100644 --- a/services/frontend/app/layout.tsx +++ b/services/frontend/app/layout.tsx @@ -1,16 +1,17 @@ +/* eslint-disable import/order */ import "@/styles/globals.css"; import { Metadata, Viewport } from "next"; import clsx from "clsx"; import { ReactNode } from "react"; -import { cookies } from "next/headers"; +import { cookies, headers } from "next/headers"; import { siteConfig } from "@/config/site"; import { fontSans } from "@/config/fonts"; import { Navbar } from "@/components/navbar"; +import { AppContent } from "@/components/app-content"; import GetUserDetails from "@/lib/fetch/user/getDetails"; import Footer from "@/components/footer/footer"; import PageGetSettings from "@/lib/fetch/page/settings"; - import Favicon from "/public/favicon.ico"; import GetFlows from "@/lib/fetch/flow/all"; @@ -75,6 +76,10 @@ export default async function RootLayout({ const cookieStore = await cookies(); const sessionCookie = cookieStore.get("session"); + // get the current page the user is on + const headersList = await headers(); + const currentPage = headersList.get("x-pathname") || "/"; + const userDetailsData = GetUserDetails(); const settingsData = PageGetSettings(); const flowsData = GetFlows(); @@ -104,20 +109,22 @@ export default async function RootLayout({ )} > -
- {sessionCookie && ( - - )} -
{children}
-
-
+ +
+ {sessionCookie && currentPage !== "/setup" && ( + + )} +
{children}
+
+
+
diff --git a/services/frontend/app/page.tsx b/services/frontend/app/page.tsx index dd34617c..16df9d92 100644 --- a/services/frontend/app/page.tsx +++ b/services/frontend/app/page.tsx @@ -1,58 +1,5 @@ -import ErrorCard from "@/components/error/ErrorCard"; -import GetFlows from "@/lib/fetch/flow/all"; -import GetRunners from "@/lib/fetch/runner/get"; -import GetUserDetails from "@/lib/fetch/user/getDetails"; -import GetUserStats from "@/lib/fetch/user/stats"; -import DashboardHome from "@/components/dashboard/home"; -import GetExecutionsWithAttention from "@/lib/fetch/executions/attention"; +import DashboardHomePageClient from "@/components/dashboard/home-page-client"; -export default async function DashboardHomePage() { - const statsData = GetUserStats(); - const flowsData = GetFlows(); - const runnersData = GetRunners(); - const executionsData = GetExecutionsWithAttention(); - const userData = GetUserDetails(); - - const [stats, flows, runners, executions, user] = (await Promise.all([ - statsData, - flowsData, - runnersData, - executionsData, - userData, - ])) as any; - - return ( - <> - {executions.success && - flows.success && - runners.success && - stats.success && - user.success ? ( - - ) : ( - - )} - - ); +export default function DashboardHomePage() { + return ; } diff --git a/services/frontend/app/profile/page.tsx b/services/frontend/app/profile/page.tsx index 304762de..a516e2c1 100644 --- a/services/frontend/app/profile/page.tsx +++ b/services/frontend/app/profile/page.tsx @@ -1,36 +1,10 @@ import { cookies } from "next/headers"; -import ErrorCard from "@/components/error/ErrorCard"; -import PageGetSettings from "@/lib/fetch/page/settings"; -import GetUserDetails from "@/lib/fetch/user/getDetails"; -import { UserProfile } from "@/components/user/profile"; +import ProfilePageClient from "@/components/user/profile-page-client"; export default async function ProfilePage() { - const c = await cookies(); + const cookieStore = await cookies(); + const session = cookieStore.get("session")?.value; - const settingsData = PageGetSettings(); - const userDetailsData = GetUserDetails(); - const session = c.get("session")?.value; - - const [settings, userDetails] = (await Promise.all([ - settingsData, - userDetailsData, - ])) as any; - - return ( - <> - {settings.success && userDetails.success ? ( - - ) : ( - - )} - - ); + return ; } diff --git a/services/frontend/app/projects/[id]/page.tsx b/services/frontend/app/projects/[id]/page.tsx index f865670f..f4ae22f0 100644 --- a/services/frontend/app/projects/[id]/page.tsx +++ b/services/frontend/app/projects/[id]/page.tsx @@ -1,12 +1,4 @@ -import ErrorCard from "@/components/error/ErrorCard"; -import GetFlows from "@/lib/fetch/flow/all"; -import PageGetSettings from "@/lib/fetch/page/settings"; -import GetProjectAuditLogs from "@/lib/fetch/project/audit"; -import GetProject from "@/lib/fetch/project/data"; -import GetProjectRunners from "@/lib/fetch/project/runners"; -import GetProjectApiKeys from "@/lib/fetch/project/tokens"; -import GetUserDetails from "@/lib/fetch/user/getDetails"; -import Project from "@/components/projects/project"; +import ProjectPageClient from "@/components/projects/project-page-client"; export default async function ProjectPage({ params, @@ -15,65 +7,5 @@ export default async function ProjectPage({ }) { const { id } = await params; - const settingsData = PageGetSettings(); - const projectData = GetProject(id); - const runnersData = GetProjectRunners(id); - const tokensData = GetProjectApiKeys(id); - const auditData = GetProjectAuditLogs(id); - const userDetailsData = GetUserDetails(); - const flowsData = GetFlows(); - - const [settings, project, runners, tokens, audit, userDetails, flows] = - (await Promise.all([ - settingsData, - projectData, - runnersData, - tokensData, - auditData, - userDetailsData, - flowsData, - ])) as any; - - return ( - <> - {audit.success && - flows.success && - project.success && - runners.success && - settings.success && - tokens.success && - userDetails.success ? ( - - ) : ( - - )} - - ); + return ; } diff --git a/services/frontend/app/projects/page.tsx b/services/frontend/app/projects/page.tsx index 4a922362..04d02908 100644 --- a/services/frontend/app/projects/page.tsx +++ b/services/frontend/app/projects/page.tsx @@ -1,45 +1,5 @@ -import { Divider } from "@heroui/react"; +import ProjectsPageClient from "@/components/projects/page-client"; -import ErrorCard from "@/components/error/ErrorCard"; -import { ProjectsList } from "@/components/projects/list"; -import PageGetSettings from "@/lib/fetch/page/settings"; -import GetProjects from "@/lib/fetch/project/all"; -import GetUserDetails from "@/lib/fetch/user/getDetails"; -import ProjectsHeading from "@/components/projects/heading"; - -export default async function ProjectsPage() { - const projectsData = GetProjects(); - const settingsData = PageGetSettings(); - const userDetailsData = GetUserDetails(); - - const [projects, settings, userDetails] = (await Promise.all([ - projectsData, - settingsData, - userDetailsData, - ])) as any; - - return ( -
- {projects.success && settings.success && userDetails.success ? ( - <> - - - - - ) : ( - - )} -
- ); +export default function ProjectsPage() { + return ; } diff --git a/services/frontend/app/providers.tsx b/services/frontend/app/providers.tsx index e52d1431..493b1176 100644 --- a/services/frontend/app/providers.tsx +++ b/services/frontend/app/providers.tsx @@ -6,6 +6,8 @@ import { useRouter } from "next/navigation"; import * as React from "react"; import { ToastProvider } from "@heroui/react"; +import SWRProvider from "@/lib/swr/provider"; + type ThemeProviderProps = React.ComponentProps; export type ProvidersProps = { @@ -17,9 +19,11 @@ export function Providers({ children, themeProps }: ProvidersProps) { const router = useRouter(); return ( - - - {children} - + + + + {children} + + ); } diff --git a/services/frontend/app/runners/page.tsx b/services/frontend/app/runners/page.tsx index c39bfa9d..ff36a552 100644 --- a/services/frontend/app/runners/page.tsx +++ b/services/frontend/app/runners/page.tsx @@ -1,43 +1,5 @@ -import { Divider } from "@heroui/react"; +import RunnersPageClient from "@/components/runners/page-client"; -import RunnersList from "@/components/runners/list"; -import GetRunners from "@/lib/fetch/runner/get"; -import GetProjects from "@/lib/fetch/project/all"; -import RunnersHeading from "@/components/runners/heading"; -import GetUserDetails from "@/lib/fetch/user/getDetails"; -import ErrorCard from "@/components/error/ErrorCard"; - -export default async function RunnersPage() { - const projectsData = GetProjects(); - const runnersData = GetRunners(); - const userDetailsData = GetUserDetails(); - - const [projects, runners, userDetails] = (await Promise.all([ - projectsData, - runnersData, - userDetailsData, - ])) as any; - - return ( -
- {projects.success && runners.success && userDetails.success ? ( - <> - - - - - - ) : ( - - )} -
- ); +export default function RunnersPage() { + return ; } diff --git a/services/frontend/app/setup/page.tsx b/services/frontend/app/setup/page.tsx new file mode 100644 index 00000000..053c013e --- /dev/null +++ b/services/frontend/app/setup/page.tsx @@ -0,0 +1,5 @@ +import SetupPageClient from "@/components/setup/page-client"; + +export default function RunnersPage() { + return ; +} diff --git a/services/frontend/components/admin/projects/list.tsx b/services/frontend/components/admin/projects/list.tsx index 204925ea..5b05399f 100644 --- a/services/frontend/components/admin/projects/list.tsx +++ b/services/frontend/components/admin/projects/list.tsx @@ -55,7 +55,7 @@ export function AdminProjectList({ projects }: any) { case "icon": return (
- - diff --git a/services/frontend/components/admin/settings/heading.tsx b/services/frontend/components/admin/settings/heading.tsx index 600c1369..f8506bfb 100644 --- a/services/frontend/components/admin/settings/heading.tsx +++ b/services/frontend/components/admin/settings/heading.tsx @@ -1,7 +1,5 @@ "use client"; -import Reloader from "@/components/reloader/Reloader"; - export default function AdminSettingsHeading() { return (
@@ -11,9 +9,6 @@ export default function AdminSettingsHeading() { Admin | exFlow Settings

-
- -
); diff --git a/services/frontend/components/admin/settings/list.tsx b/services/frontend/components/admin/settings/list.tsx index aa4fa599..4b3a25c3 100644 --- a/services/frontend/components/admin/settings/list.tsx +++ b/services/frontend/components/admin/settings/list.tsx @@ -37,9 +37,7 @@ export function AdminSettings({ settings }: any) { const [startExecutions, setStartExecutions] = React.useState( settings.start_executions, ); - const [receiveAlerts, setReceiveAlerts] = React.useState( - settings.receive_alerts, - ); + const [receiveAlerts] = React.useState(settings.receive_alerts); const [isLoading, setIsLoading] = React.useState(false); diff --git a/services/frontend/components/alerts/alerts.tsx b/services/frontend/components/alerts/alerts.tsx new file mode 100644 index 00000000..b1df0cd3 --- /dev/null +++ b/services/frontend/components/alerts/alerts.tsx @@ -0,0 +1,208 @@ +import { + Button, + ButtonGroup, + Card, + CardBody, + Dropdown, + DropdownItem, + DropdownMenu, + DropdownTrigger, + Pagination, + Spacer, + Spinner, + Tooltip, +} from "@heroui/react"; +import { Icon } from "@iconify/react"; +import { useMemo, useState } from "react"; +import NumberFlow from "@number-flow/react"; + +import { useAlerts, useFlowAlertsPaginated } from "@/lib/swr/hooks/flows"; +import { useAlertsStyleStore } from "@/lib/functions/userAlertsStyle"; + +import AlertsList from "./list"; + +export default function Alerts({ + runners, + flows, + canEdit, + flowID, + showFlow, +}: { + runners: any; + flows: any; + canEdit?: boolean; + flowID?: any; + showFlow?: boolean; +}) { + const { displayStyle, setDisplayStyle } = useAlertsStyleStore(); + const [statusFilter, setStatusFilter] = useState(new Set([]) as any); + + // pagination + const [page, setPage] = useState(1); + const limit = 6; + + // Calculate offset using page directly for now (will be validated later) + const offset = (page - 1) * limit; + + // Convert statusFilter to string for API + const statusFilterString = + statusFilter.size > 0 ? Array.from(statusFilter).join(",") : null; + + // Always call both hooks but only use the relevant one + const flowAlertsResult = useFlowAlertsPaginated( + flowID || null, + limit, + offset, + statusFilterString, + ); + const allAlertsResult = useAlerts( + flowID ? 0 : limit, + flowID ? 0 : offset, + flowID ? null : statusFilterString, + ); + + // Choose the right result based on whether we have a flowID + const { + alerts, + total: totalAlerts, + isLoading: loading, + refresh, + } = flowID ? flowAlertsResult : allAlertsResult; + + const items = useMemo(() => { + return alerts || []; + }, [alerts]); + + // Calculate total pages + const totalPages = Math.max(1, Math.ceil(totalAlerts / limit)); + + // Ensure page is never higher than total pages + const safePage = Math.min(page, totalPages); + + // If safe page is different from current page, update it + if (safePage !== page && totalPages > 0 && !loading) { + setPage(safePage); + } + + return ( + + +
+

+ Total Alerts: +

+
+ + + + + { + setStatusFilter(e); + setPage(1); // Reset to first page when filter changes + }} + > + + } + > + Firing + + + } + > + Resolved + + + + + + + + +
+
+ + + + {loading ? ( +
+ +
+ ) : ( + <> + {displayStyle === "list" && ( + + )} + + )} + +
+ setPage(newPage)} + /> +
+
+
+ ); +} diff --git a/services/frontend/components/alerts/heading.tsx b/services/frontend/components/alerts/heading.tsx new file mode 100644 index 00000000..2ad5f1b7 --- /dev/null +++ b/services/frontend/components/alerts/heading.tsx @@ -0,0 +1,11 @@ +"use client"; + +export default function AlertsHeading() { + return ( +
+
+

Alerts

+
+
+ ); +} diff --git a/services/frontend/components/alerts/list.tsx b/services/frontend/components/alerts/list.tsx new file mode 100644 index 00000000..23bd37c2 --- /dev/null +++ b/services/frontend/components/alerts/list.tsx @@ -0,0 +1,250 @@ +"use client"; + +import { + Accordion, + AccordionItem, + Card, + CardBody, + Chip, + Listbox, + ListboxItem, + ScrollShadow, + Spacer, + useDisclosure, +} from "@heroui/react"; +import { Icon } from "@iconify/react"; +import { useState } from "react"; +import ReactTimeago from "react-timeago"; + +import { IconWrapper } from "@/lib/IconWrapper"; +import AlertDrawer from "@/components/modals/alerts/details"; + +export default function AlertsList({ + alerts, + runners, + flows, + canEdit, + showDelete, + showFlowChip, +}: { + alerts: any; + runners: any; + flows?: any; + canEdit?: boolean; + showDelete?: boolean; + showFlowChip?: boolean; +}) { + const alertDrawer = useDisclosure(); + + const [targetAlert, setTargetAlert] = useState(null); + + return ( +
+
+ {alerts.map((alert: any) => ( + { + setTargetAlert(alert); + alertDrawer.onOpenChange(); + }} + > + +
+
+
+ +
+
+

{alert.name || "N/A"}

+

+ {alert.status || "N/A"} +

+
+
+ +
+ {showFlowChip && ( + + Flow:{" "} + { + flows.filter((f: any) => f.id === alert.flow_id)[0] + ?.name + } + + )} + {alert.execution_id !== "" && ( + + Executed + + )} + {alerts.filter((a: any) => a.parent_id === alert.id).length > + 0 && ( + <> + + Parent Alert + + + { + alerts.filter((a: any) => a.parent_id === alert.id) + .length + }{" "} + Sub Alert/s + + + )} + {alert.updated_at !== "0001-01-01T00:00:00Z" && ( + + + Last Update: + + + )} + + + Created: + + +
+
+ + + + {alerts.filter((a: any) => a.parent_id === alert.id).length > + 0 && ( + + + + + {alerts.map((a: any) => { + if (a.parent_id === alert.id) { + return ( + + + + } + textValue={a.name} + onPress={() => { + setTargetAlert(a); + alertDrawer.onOpenChange(); + }} + > +
+ {a.name} +
+ + {a.status || "N/A"} + +
+ + + + {new Date(a.created_at).getTime() === + Math.max( + ...alerts + .filter( + (alert: any) => + alert.parent_id === a.parent_id, + ) + .map((alert: any) => + new Date( + alert.created_at, + ).getTime(), + ), + ) && ( + + Latest + + )} + {a.execution_id !== "" && ( + + Executed + + )} +
+
+
+
+ ); + } + })} +
+
+
+
+ )} +
+
+ ))} +
+ +
+ ); +} diff --git a/services/frontend/components/alerts/page-client.tsx b/services/frontend/components/alerts/page-client.tsx new file mode 100644 index 00000000..25c1d9ea --- /dev/null +++ b/services/frontend/components/alerts/page-client.tsx @@ -0,0 +1,51 @@ +"use client"; + +import { Divider } from "@heroui/react"; + +import ErrorCard from "@/components/error/ErrorCard"; +import { PageSkeleton } from "@/components/loading/page-skeleton"; +import { useUserDetails, useRunners, useFlows } from "@/lib/swr/hooks/flows"; + +import Alerts from "./alerts"; +import AlertsHeading from "./heading"; + +export default function AlertsPageClient() { + const { + runners, + isLoading: runnersLoading, + isError: runnersError, + } = useRunners(); + const { flows, isLoading: flowsLoading, isError: flowsError } = useFlows(); + const { user, isLoading: userLoading, isError: userError } = useUserDetails(); + + // Check if any essential data is still loading or missing + const isLoading = + runnersLoading || flowsLoading || userLoading || !runners || !user; + + // Show loading state if essential data is still loading + if (isLoading) { + return ; + } + + // Show error state + const hasError = runnersError || flowsError || userError; + + if (hasError) { + return ( +
+ +
+ ); + } + + return ( +
+ + + +
+ ); +} diff --git a/services/frontend/components/app-content.tsx b/services/frontend/components/app-content.tsx new file mode 100644 index 00000000..168000fb --- /dev/null +++ b/services/frontend/components/app-content.tsx @@ -0,0 +1,13 @@ +"use client"; + +import { ReactNode } from "react"; + +import { SetupGuard } from "@/lib/setup"; + +interface AppContentProps { + children: ReactNode; +} + +export function AppContent({ children }: AppContentProps) { + return {children}; +} diff --git a/services/frontend/components/auth/login-page-client.tsx b/services/frontend/components/auth/login-page-client.tsx new file mode 100644 index 00000000..33cab8fc --- /dev/null +++ b/services/frontend/components/auth/login-page-client.tsx @@ -0,0 +1,20 @@ +"use client"; + +import LoginPageComponent from "@/components/auth/loginPage"; +import { PageSkeleton } from "@/components/loading/page-skeleton"; +import { usePageSettings } from "@/lib/swr/hooks/flows"; + +export default function LoginPageClient() { + const { settings, isLoading, isError } = usePageSettings(); + + if (isLoading) { + return ; + } + + if (isError || !settings) { + // For login page, we can fall back to some default settings or show a basic login form + return ; + } + + return ; +} diff --git a/services/frontend/components/auth/login.tsx b/services/frontend/components/auth/login.tsx index ec99d6a5..cd2e19ff 100644 --- a/services/frontend/components/auth/login.tsx +++ b/services/frontend/components/auth/login.tsx @@ -15,7 +15,7 @@ import React from "react"; import { Logout } from "@/lib/logout"; -export default function Login({ user, session, showSignUp, settings }: any) { +export default function Login({ user, _, showSignUp, settings }: any) { const router = useRouter(); const userData = user; diff --git a/services/frontend/components/auth/loginPage.tsx b/services/frontend/components/auth/loginPage.tsx index e5aec0e9..da72372b 100644 --- a/services/frontend/components/auth/loginPage.tsx +++ b/services/frontend/components/auth/loginPage.tsx @@ -17,7 +17,7 @@ import React, { useState } from "react"; import { setSession } from "@/lib/setSession"; import LoginAPI from "@/lib/auth/login"; -import Particles from "../magicui/particles"; +import { Particles } from "../magicui/particles"; export default function LoginPageComponent({ settings }: { settings: any }) { const { theme } = useTheme(); diff --git a/services/frontend/components/auth/signupPage.tsx b/services/frontend/components/auth/signupPage.tsx index 5adc7403..670ced72 100644 --- a/services/frontend/components/auth/signupPage.tsx +++ b/services/frontend/components/auth/signupPage.tsx @@ -20,7 +20,7 @@ import SignUpAPI from "@/lib/auth/signup"; import LoginAPI from "@/lib/auth/login"; import CheckUserTaken from "@/lib/auth/checkTaken"; -import Particles from "../magicui/particles"; +import { Particles } from "../magicui/particles"; export default function SignUpPage({ settings }: any) { const router = useRouter(); diff --git a/services/frontend/components/dashboard/home-page-client.tsx b/services/frontend/components/dashboard/home-page-client.tsx new file mode 100644 index 00000000..5aceafc0 --- /dev/null +++ b/services/frontend/components/dashboard/home-page-client.tsx @@ -0,0 +1,73 @@ +"use client"; + +import ErrorCard from "@/components/error/ErrorCard"; +import DashboardHome from "@/components/dashboard/home"; +import { PageSkeleton } from "@/components/loading/page-skeleton"; +import { + useFlows, + useRunners, + useUserDetails, + useUserStats, + useExecutionsWithAttention, +} from "@/lib/swr/hooks/flows"; + +export default function DashboardHomePageClient() { + const { + stats, + isLoading: statsLoading, + isError: statsError, + } = useUserStats(); + const { flows, isLoading: flowsLoading, isError: flowsError } = useFlows(); + const { + runners, + isLoading: runnersLoading, + isError: runnersError, + } = useRunners(); + const { + executionsWithAttention, + isLoading: executionsLoading, + isError: executionsError, + } = useExecutionsWithAttention(); + const { user, isLoading: userLoading, isError: userError } = useUserDetails(); + + // Check if any essential data is still loading or missing + const isLoading = + statsLoading || + flowsLoading || + runnersLoading || + executionsLoading || + userLoading || + !stats || + !flows || + !runners || + !executionsWithAttention || + !user; + + // Show loading state if essential data is still loading + if (isLoading) { + return ; + } + + // Show error state + const hasError = + statsError || flowsError || runnersError || executionsError || userError; + + if (hasError) { + return ( + + ); + } + + return ( + + ); +} diff --git a/services/frontend/components/dashboard/home.tsx b/services/frontend/components/dashboard/home.tsx index 21271839..fe74aa35 100644 --- a/services/frontend/components/dashboard/home.tsx +++ b/services/frontend/components/dashboard/home.tsx @@ -16,10 +16,11 @@ import { useEffect, useState } from "react"; import ReactTimeago from "react-timeago"; import WelcomeModal from "@/components/modals/user/welcome"; -import Stats from "@/components/dashboard/stats"; -import Reloader from "../reloader/Reloader"; import Executions from "../executions/executions"; +import Alerts from "../alerts/alerts"; + +import DashboardExecutionsStats from "./stats-charts"; export default function DashboardHome({ stats, @@ -66,14 +67,11 @@ export default function DashboardHome({ return (
-
-
-

Hello, {user.username} 👋

-

- Here's the current status for today. -

-
- +
+

Hello, {user.username} 👋

+

+ Here's the current status for today. +

@@ -385,12 +383,16 @@ export default function DashboardHome({ - {/* Stats */} - + -

Executions

- +

Executions & Alerts

+ +
+ + + +
diff --git a/services/frontend/components/dashboard/stats-charts.tsx b/services/frontend/components/dashboard/stats-charts.tsx new file mode 100644 index 00000000..e0b1e647 --- /dev/null +++ b/services/frontend/components/dashboard/stats-charts.tsx @@ -0,0 +1,260 @@ +"use client"; + +import type { ButtonProps, CardProps, RadioProps } from "@heroui/react"; + +import React from "react"; +import { + BarChart, + Bar, + XAxis, + YAxis, + Tooltip, + ResponsiveContainer, +} from "recharts"; +import { Card, Button, VisuallyHidden, useRadio, cn } from "@heroui/react"; + +type ChartData = { + weekday: string; + [key: string]: string | number; +}; + +type BarChartProps = { + title: string; + categories: { title: string; color: string }[]; + chartData: ChartData[]; +}; + +export default function DashboardExecutionsStats({ stats }: { stats: any }) { + const data: BarChartProps[] = [ + { + title: "Processed Executions", + categories: [ + { + title: "Success", + color: "success", + }, + { + title: "Error", + color: "danger", + }, + { + title: "Pending", + color: "default", + }, + { + title: "Running", + color: "primary", + }, + { + title: "Canceled", + color: "danger-300", + }, + { + title: "Scheduled", + color: "secondary", + }, + { + title: "NoPatternMatch", + color: "secondary-300", + }, + { + title: "Recovered", + color: "warning", + }, + ], + chartData: stats.executions, + }, + { + title: "Incoming Alerts", + categories: [ + { + title: "Resolved", + color: "success", + }, + { + title: "Firing", + color: "danger", + }, + ], + chartData: stats.alerts, + }, + ]; + + return ( +
+ {data.map((item, index) => ( + + ))} +
+ ); +} + +const formatWeekday = (weekday: string) => { + const day = + { + Mon: 1, + Tue: 2, + Wed: 3, + Thu: 4, + Fri: 5, + Sat: 6, + Sun: 0, + }[weekday] ?? 0; + + return new Intl.DateTimeFormat("en-US", { weekday: "long" }).format( + new Date(2024, 0, day), + ); +}; + +const BarChartCard = React.forwardRef< + // eslint-disable-next-line no-undef + HTMLDivElement, + Omit & BarChartProps +>(({ className, title, categories, chartData, ...props }, ref) => { + return ( + +
+
+

{title}

+
+
+ {categories.map((category, index) => ( +
+ + {category.title} +
+ ))} +
+
+ + + + + ( +
+
+ + {formatWeekday(label)} + + {payload?.map((p, index) => { + const name = p.name; + const value = p.value; + const category = categories.find( + (c) => c.title.toLowerCase() === name, + ) ?? { title: name, color: "default" }; + + return ( +
+
+
+ + {category.title} + + + {value} + +
+
+ ); + })} +
+
+ )} + cursor={false} + /> + {categories.map((category, index) => ( + + ))} + + + + ); +}); + +BarChartCard.displayName = "BarChartCard"; + +const ButtonRadioItem = React.forwardRef< + // eslint-disable-next-line no-undef + HTMLInputElement, + Omit & { + color?: ButtonProps["color"]; + size?: ButtonProps["size"]; + variant?: ButtonProps["variant"]; + } +>(({ children, color, size = "sm", variant, ...props }, ref) => { + const { Component, isSelected, getBaseProps, getInputProps } = + useRadio(props); + + return ( + + + + + + + ); +}); + +ButtonRadioItem.displayName = "ButtonRadioItem"; diff --git a/services/frontend/components/dashboard/stats.tsx b/services/frontend/components/dashboard/stats.tsx deleted file mode 100644 index f1d1d7f3..00000000 --- a/services/frontend/components/dashboard/stats.tsx +++ /dev/null @@ -1,232 +0,0 @@ -"use client"; - -import { Icon } from "@iconify/react"; -import { Card, Chip } from "@heroui/react"; -import React from "react"; -import { Bar, BarChart, ResponsiveContainer, Tooltip, XAxis } from "recharts"; - -import { cn } from "@/components/cn/cn"; - -const formatWeekday = (weekday: string) => { - if (weekday === "Mo") { - return "Monday"; - } else if (weekday === "Tu") { - return "Tuesday"; - } else if (weekday === "We") { - return "Wednesday"; - } else if (weekday === "Th") { - return "Thursday"; - } else if (weekday === "Fr") { - return "Friday"; - } else if (weekday === "Sa") { - return "Saturday"; - } else if (weekday === "Su") { - return "Sunday"; - } -}; - -const formatValue = (value: number) => { - return new Intl.NumberFormat("en-US", { - style: "decimal", - minimumFractionDigits: 0, - maximumFractionDigits: 0, - }).format(value); -}; - -export default function Stats({ stats }: { stats: any }) { - const data = [ - { - title: "Total Executions this week", - value: stats.total_executions, - change: `${stats.execution_trend_percentage}% today`, - changeType: stats.execution_trend, - trendChipPosition: "bottom", - chartData: stats.executions, - }, - ]; - - const handleMouseEnter = React.useCallback( - (chartIndex: number, itemIndex: number) => { - // eslint-disable-next-line no-undef - const bars = document.querySelectorAll( - `#chart-${chartIndex} .recharts-bar-rectangle`, - ); - - bars.forEach((bar, i) => { - if (i !== itemIndex) { - const path = bar.querySelector("path"); - - if (path) { - path.setAttribute("fill", "hsl(var(--heroui-default-300))"); - } - } - }); - }, - [], - ); - - const handleMouseLeave = React.useCallback((chartIndex: number) => { - // eslint-disable-next-line no-undef - const bars = document.querySelectorAll( - `#chart-${chartIndex} .recharts-bar-rectangle`, - ); - - bars.forEach((bar) => { - const path = bar.querySelector("path"); - - if (path) { - path.setAttribute("fill", "hsl(var(--heroui-foreground))"); - } - }); - }, []); - - const trendChipContent = React.useCallback( - ({ - changeType, - change, - trendChipPosition, - }: { - changeType: string; - change: string; - trendChipPosition: string; - }) => ( -
- - ) : changeType === "neutral" ? ( - - ) : ( - - ) - } - variant="flat" - > - {change} - -
- ), - [], - ); - - return ( -
- {data.map( - ({ title, value, changeType, change, trendChipPosition }, index) => ( - -
-
-
-
- {title} -
-
-
- {value} -
- {trendChipContent({ - changeType, - change, - trendChipPosition, - })} -
-
-
- {trendChipContent({ - changeType, - change, - trendChipPosition, - })} -
-
-
-
-
- - - - ( -
-
- - {formatWeekday(label)} - - - {formatValue(payload?.[0]?.value as number)} - -
- )} - cursor={false} - /> - - handleMouseEnter(index, itemIndex) - } - onMouseLeave={() => handleMouseLeave(index)} - /> - - -
-
-
- ), - )} -
- ); -} diff --git a/services/frontend/components/executions/execution-page-client.tsx b/services/frontend/components/executions/execution-page-client.tsx new file mode 100644 index 00000000..2bfd5596 --- /dev/null +++ b/services/frontend/components/executions/execution-page-client.tsx @@ -0,0 +1,86 @@ +"use client"; + +import { Execution } from "@/components/executions/execution/execution"; +import ErrorCard from "@/components/error/ErrorCard"; +import { PageSkeleton } from "@/components/loading/page-skeleton"; +import { + useFlow, + useExecution, + usePageSettings, + useProjectRunners, + useUserDetails, +} from "@/lib/swr/hooks/flows"; + +interface ExecutionPageClientProps { + flowId: string; + executionId: string; +} + +export default function ExecutionPageClient({ + flowId, + executionId, +}: ExecutionPageClientProps) { + const { flow, isLoading: flowLoading, isError: flowError } = useFlow(flowId); + const { + execution, + isLoading: executionLoading, + isError: executionError, + } = useExecution(executionId); + const { + settings, + isLoading: settingsLoading, + isError: settingsError, + } = usePageSettings(); + const { user, isLoading: userLoading, isError: userError } = useUserDetails(); + + // Only fetch project runners if we have the flow + const projectId = (flow as any)?.project_id; + const { + runners, + isLoading: runnersLoading, + isError: runnersError, + } = useProjectRunners(projectId || ""); + + // Check if any essential data is still loading or missing + const isLoading = + flowLoading || + executionLoading || + settingsLoading || + userLoading || + !flow || + !execution || + !settings || + !user; + + // Show loading state if essential data is still loading + if (isLoading || (projectId && runnersLoading)) { + return ; + } + + // Show error state + const hasError = + flowError || + executionError || + settingsError || + userError || + (projectId && runnersError); + + if (hasError) { + return ( + + ); + } + + return ( + + ); +} diff --git a/services/frontend/components/executions/execution/adminExecutionActions.tsx b/services/frontend/components/executions/execution/adminExecutionActions.tsx index 8ac20d0f..bc308b77 100644 --- a/services/frontend/components/executions/execution/adminExecutionActions.tsx +++ b/services/frontend/components/executions/execution/adminExecutionActions.tsx @@ -8,16 +8,16 @@ import { DropdownSection, DropdownTrigger, } from "@heroui/react"; -import { useRouter } from "next/navigation"; import UpdateExecution from "@/lib/fetch/executions/PUT/update"; +import { useRefreshCache } from "@/lib/swr/hooks/useRefreshCache"; export default function AdminExecutionActions({ execution, }: { execution: any; }) { - const router = useRouter(); + const { refreshExecution } = useRefreshCache(); async function changeExecutionStatus(status: string) { const newExecution = { ...execution }; @@ -106,7 +106,7 @@ export default function AdminExecutionActions({ color: "success", variant: "flat", }); - router.refresh(); + refreshExecution(execution.id); } else { addToast({ title: "Execution", @@ -121,7 +121,7 @@ export default function AdminExecutionActions({ diff --git a/services/frontend/components/executions/execution/adminStepActions.tsx b/services/frontend/components/executions/execution/adminStepActions.tsx index 1a463401..c60c0638 100644 --- a/services/frontend/components/executions/execution/adminStepActions.tsx +++ b/services/frontend/components/executions/execution/adminStepActions.tsx @@ -8,9 +8,9 @@ import { DropdownSection, DropdownTrigger, } from "@heroui/react"; -import { useRouter } from "next/navigation"; import UpdateExecutionStep from "@/lib/fetch/executions/PUT/updateStep"; +import { useRefreshCache } from "@/lib/swr/hooks/useRefreshCache"; export default function AdminStepActions({ execution, @@ -19,7 +19,7 @@ export default function AdminStepActions({ execution: any; step: any; }) { - const router = useRouter(); + const { refreshExecutionSteps } = useRefreshCache(); async function changeStepStatus(status: string) { const newStep = { ...step }; @@ -317,7 +317,7 @@ export default function AdminStepActions({ color: "success", variant: "flat", }); - router.refresh(); + refreshExecutionSteps(execution.id); } else { addToast({ title: "Execution", diff --git a/services/frontend/components/executions/execution/execution.tsx b/services/frontend/components/executions/execution/execution.tsx index ce26b152..5dfd84f5 100644 --- a/services/frontend/components/executions/execution/execution.tsx +++ b/services/frontend/components/executions/execution/execution.tsx @@ -3,12 +3,13 @@ import { Icon } from "@iconify/react"; import { addToast, Button, ButtonGroup, Divider, Spacer } from "@heroui/react"; import { useRouter } from "next/navigation"; -import React, { useEffect, useState } from "react"; +import React, { useState } from "react"; -import Reloader from "@/components/reloader/Reloader"; -import GetExecutionSteps from "@/lib/fetch/executions/steps"; +import { useExecutionSteps } from "@/lib/swr/hooks/flows"; import APICancelExecution from "@/lib/fetch/executions/cancel"; import { useExecutionStepStyleStore } from "@/lib/functions/userExecutionStepStyle"; +import RefreshButton from "@/components/ui/refresh-button"; +import { useRefreshCache } from "@/lib/swr/hooks/useRefreshCache"; import AdminExecutionActions from "./adminExecutionActions"; import ExecutionDetails from "./details"; @@ -19,33 +20,43 @@ export function Execution({ flow, execution, runners, userDetails }: any) { const router = useRouter(); const { displayStyle, setDisplayStyle } = useExecutionStepStyleStore(); - const [steps, setSteps] = useState([] as any); - useEffect(() => { - GetExecutionSteps(execution.id).then((steps) => { - if (steps.success) { - setSteps(steps.data.steps); - } else { - if ("error" in steps) { - addToast({ - title: "Execution", - description: steps.error, - color: "danger", - variant: "flat", - }); - } - } - }); - }, [execution]); + // Check if execution is running to enable auto-refresh + const isRunning = + execution.status === "running" || + execution.status === "pending" || + execution.status === "paused" || + execution.status === "scheduled" || + execution.status === "interactionWaiting"; + + // Use SWR for auto-refreshing execution steps data + const { steps, isError } = useExecutionSteps(execution.id, isRunning); + const { refreshExecution, refreshExecutionSteps } = useRefreshCache(); + const [executionLoading, setExecutionLoading] = useState(false); + + // Handle SWR errors + React.useEffect(() => { + if (isError) { + addToast({ + title: "Error fetching execution steps", + description: "Failed to load execution steps. Please try refreshing.", + color: "danger", + variant: "flat", + }); + } + }, [isError]); + + const handleRefresh = async () => { + setExecutionLoading(true); + await refreshExecution(execution.id); + await refreshExecutionSteps(execution.id); + setExecutionLoading(false); + }; return ( <>
- @@ -114,7 +125,17 @@ export function Execution({ flow, execution, runners, userDetails }: any) { execution.status === "interactionWaiting") && (
- + {isRunning && ( +
+ + Auto-refresh 2s +
+ )} +
)}
diff --git a/services/frontend/components/executions/execution/executionStepsAccordion.tsx b/services/frontend/components/executions/execution/executionStepsAccordion.tsx index 2874bf58..09f77711 100644 --- a/services/frontend/components/executions/execution/executionStepsAccordion.tsx +++ b/services/frontend/components/executions/execution/executionStepsAccordion.tsx @@ -11,12 +11,12 @@ import { Progress, Snippet, } from "@heroui/react"; -import { useRouter } from "next/navigation"; import React, { useEffect, useState, useRef } from "react"; import { isMobile, isTablet } from "react-device-detect"; import InteractExecutionStep from "@/lib/fetch/executions/PUT/step_interact"; import { executionStatusWrapper } from "@/lib/functions/executionStyles"; +import { useRefreshCache } from "@/lib/swr/hooks/useRefreshCache"; import AdminStepActions from "./adminStepActions"; @@ -27,7 +27,7 @@ export function ExecutionStepsAccordion({ runners, userDetails, }: any) { - const router = useRouter(); + const { refreshExecution, refreshExecutionSteps } = useRefreshCache(); const [parSteps, setParSteps] = useState([] as any); const [selectedKeys, setSelectedKeys] = React.useState(new Set(["1"])); @@ -98,19 +98,19 @@ export function ExecutionStepsAccordion({ (step: any) => step.status !== "pending", ); const activeStep = nonPendingSteps[nonPendingSteps.length - 1]; - + if (activeStep) { const stepElement = stepItemRef.current[activeStep.id]; if (stepElement) { // Set flag to prevent scroll listener from interfering isAutoScrollingRef.current = true; - + stepElement.scrollIntoView({ behavior: "smooth", block: "center", }); - + // Clear the flag after scrolling is complete setTimeout(() => { isAutoScrollingRef.current = false; @@ -248,22 +248,22 @@ export function ExecutionStepsAccordion({ (step: any) => step.status !== "pending", ); const activeStep = nonPendingSteps[nonPendingSteps.length - 1]; - + if (activeStep) { const stepElement = stepItemRef.current[activeStep.id]; if (stepElement) { // Set flag to prevent scroll listener from interfering isAutoScrollingRef.current = true; - + stepElement.scrollIntoView({ behavior: "smooth", block: "center", }); - + setStepAutoScrollEnabled(true); setUserSelected(false); - + // Clear the flag after scrolling is complete setTimeout(() => { isAutoScrollingRef.current = false; @@ -338,7 +338,10 @@ export function ExecutionStepsAccordion({ color: "success", variant: "flat", }); - router.refresh(); + // wait 1 second + await new Promise((resolve) => setTimeout(resolve, 1000)); + await refreshExecutionSteps(execution.id); + await refreshExecution(execution.id); } } @@ -484,12 +487,12 @@ export function ExecutionStepsAccordion({ key={`${dataIndex}-${lineIndex}`} className={`container flex items-start gap-3 py-0.3 hover:bg-default-100/50 transition-colors`} > -
+
{currentLineNumber}
-
+
{new Date( line.timestamp, @@ -623,7 +626,7 @@ export function ExecutionStepsAccordion({
- + {/* Floating auto-scroll button - fixed position */} {!stepAutoScrollEnabled && ( - - - - - - - } - onPress={() => { - navigator.clipboard.writeText( - JSON.stringify(action), - ); - addToast({ - title: "Action", - description: "Action copied to clipboard!", - color: "success", - variant: "flat", - }); - }} - > - Copy to Clipboard - - - } - onPress={() => { - // if action is in an failure pipeline, open the edit modal - if ( - flow.failure_pipelines.some( - (pipeline: any) => - pipeline.actions !== null && - pipeline.actions.some( - (pipelineAction: any) => - pipelineAction.id === action.id, - ), - ) - ) { - setTargetAction(action); - setTargetFailurePipeline( - flow.failure_pipelines.filter( - (pipeline: any) => - pipeline.actions !== null && - pipeline.actions.some( - (pipelineAction: any) => - pipelineAction.id === action.id, - ), - )[0], - ); - copyFlowFailurePipelineActionModal.onOpen(); - } else { - setTargetAction(action); - copyFlowActionModal.onOpen(); - } - }} - > - Copy Locally - - - } - onPress={() => { - // if action is in an failure pipeline, open the edit modal - if ( - flow.failure_pipelines.some( - (pipeline: any) => - pipeline.actions !== null && - pipeline.actions.some( - (pipelineAction: any) => - pipelineAction.id === action.id, - ), - ) - ) { - setTargetAction(action); - setTargetFailurePipeline( - flow.failure_pipelines.filter( - (pipeline: any) => - pipeline.actions !== null && - pipeline.actions.some( - (pipelineAction: any) => - pipelineAction.id === action.id, - ), - )[0], - ); - copyFailurePipelineActionToDifferentFlowModal.onOpen(); - } else { - setTargetAction(action); - copyActionToDifferentFlowModal.onOpen(); - } - }} - > - Copy to another Flow - - - - - - - - -
+
+
+
+
- -
- - {action.active ? "Active" : "Disabled"} - - - Vers. {action.version} - - {flow.failure_pipeline_id !== "" || - (flow.failure_pipeline_id !== null && - !flow.failure_pipelines.some( - (pipeline: any) => - pipeline.id === action.failure_pipeline_id || - (pipeline.actions !== null && - pipeline.actions.some( - (pipelineAction: any) => - pipelineAction.id === action.id, - )), - ) && ( - - No Failure Pipeline Assigned - - ))} +
+

+ {action.custom_name ? action.custom_name : action.name} +

+

+ {action.custom_description + ? action.custom_description + : action.description} +

+
+ + + +
+ + +
+ + Vers. {action.version} + + + {action.active ? "Active" : "Disabled"} + + {flow.failure_pipeline_id !== "" || + (flow.failure_pipeline_id !== null && + !flow.failure_pipelines.some( + (pipeline: any) => + pipeline.id === action.failure_pipeline_id || + (pipeline.actions !== null && + pipeline.actions.some( + (pipelineAction: any) => + pipelineAction.id === action.id, + )), + ) && ( + + No Failure Pipeline Assigned + + ))} + {action.update_available && ( + + Upgrade Available + + )} +
+
+ {action.update_available && ( - - } - variant="flat" - onPress={() => { - // if action is in an failure pipeline, open the edit modal - if ( - flow.failure_pipelines.some( - (pipeline: any) => - pipeline.actions !== null && - pipeline.actions.some( - (pipelineAction: any) => - pipelineAction.id === action.id, - ), - ) - ) { - setTargetAction(action); - setUpdatedAction(action.updated_action); - setTargetFailurePipeline( - flow.failure_pipelines.filter( - (pipeline: any) => - pipeline.actions !== null && - pipeline.actions.some( - (pipelineAction: any) => - pipelineAction.id === action.id, - ), - )[0], - ); - upgradeFlowFailurePipelineActionModal.onOpen(); - } else { - setTargetAction(action); - setUpdatedAction(action.updated_action); - upgradeFlowActionModal.onOpen(); - } - }} - > - Upgrade - - } - title={`Update to version ${action.update_version} available`} - variant="faded" - /> + + + )} - - + + + + + } + onPress={() => { + navigator.clipboard.writeText(JSON.stringify(action)); + addToast({ + title: "Action", + description: "Action copied to clipboard!", + color: "success", + variant: "flat", + }); + }} > -
- - Show default parameters - -
- - - Key - Value - Note - - - {action.params - .filter( - (param: any) => - showDefaultParams || - param.value !== param.default, - ) - .map((param: any, index: number) => ( - - {param.key} - - {param.type === "password" - ? "••••••••" - : param.value} - - - {param.type === "password" && - param.value != "" ? ( - - Encrypted - - ) : ( - "" - )} - - - ))} - -
-
- )} - {action.condition.selected_action_id !== "" && ( - + + } + onPress={() => { + setTargetAction(action); + copyFlowActionModal.onOpen(); + }} > -
-

Options

- - Cancel{" "} - Execution if conditions match and dont start any - following action. - -
- -
-
- - a.id === action.condition.selected_action_id, - )[0]?.icon - } - width={26} - /> -
-
-
-

- {flow.actions.filter( - (a: any) => - a.id === action.condition.selected_action_id, - )[0]?.custom_name || - flow.actions.filter( - (a: any) => - a.id === - action.condition.selected_action_id, - )[0]?.name || - action.condition.selected_action_id} -

-
-

- {flow.actions.filter( - (a: any) => - a.id === action.condition.selected_action_id, - )[0]?.custom_description || - flow.actions.filter( - (a: any) => - a.id === action.condition.selected_action_id, - )[0]?.description || - "No description available"} -

-
-
- - - Key - Type - Value - Logic - - - {action.condition.condition_items.map( - (condition: any, index: number) => ( - - {condition.condition_key} - - {condition.condition_type} - - - {condition.condition_value} - - - {condition.condition_logic === "and" - ? "&" - : "or"} - - - ), - )} - -
-
- )} -
-
+ Local + + + } + onPress={() => { + setTargetAction(action); + copyActionToDifferentFlowModal.onOpen(); + }} + > + Transfer + + + + + + + + + +
- +
); @@ -780,409 +343,87 @@ export default function Actions({ }); } - const handleDragEndPipeline = (pipeline: any, event: any) => { - const { active, over } = event; - - if (active.id !== over.id) { - const items = [...pipeline.actions]; - const oldIndex = items.findIndex((item: any) => item.id === active.id); - const newIndex = items.findIndex((item: any) => item.id === over.id); - - const newArray = arrayMove(items, oldIndex, newIndex); - - updateFlowFailurePipelineActions(pipeline, newArray); - } - }; - - function updateFlowFailurePipelineActions(pipeline: any, actions: any) { - UpdateFlowFailurePipelineActions(flow.id, pipeline.id, actions) - .then(() => { - router.refresh(); - addToast({ - title: "Flow", - description: - "Flow failure pipeline actions order updated successfully.", - color: "success", - variant: "flat", - }); - }) - .catch(() => { - router.refresh(); - addToast({ - title: "Flow", - description: "Failed to update flow failure pipeline actions order.", - color: "danger", - variant: "flat", - }); - }); - } - return (
-

- Info: Common action settings can be found on the settings tab -

- -
-
- - - -
- {actions.map((action: any) => ( - - ))} -
-
-
-
- -
- - -
-
-
- -
-
-

Create new Action

-

- Add a new action to the flow -

-
-
-
-
-
- - { - const parsedAction = await getClipboardAction(); - - if (parsedAction) { - setTargetAction(parsedAction); - copyFlowActionModal.onOpen(); - } else { - addToast({ - title: "Flow", - description: "No action found in clipboard.", - color: "danger", - variant: "flat", - }); - } - }} - > - -
-
-
- -
-
-

- Paste Action from Clipboard -

-

- You have an action copied to the clipboard. -

-
-
-
-
-
-
-
- -
-
- -
-

Failure Pipelines

-

- With failure pipelines you have the ability to send - notifications or trigger any other action if a specific action - or the whole execution failed. + + +

+
+

Actions

+

+ Common action settings can be found on the settings tab

-
- - {failurePipelines.map((pipeline: any) => ( - -
- - -
-
-
-

{pipeline.name}

-
- - {pipeline.exec_parallel - ? "Parallel" - : "Sequential"} - - - action.failure_pipeline_id === - pipeline.id, - ).length > 0 - ? "success" - : "danger" - } - radius="sm" - size="sm" - variant="flat" - > - {flow.failure_pipeline_id === pipeline.id - ? "Assigned to Flow" - : flow.actions.filter( - (action: any) => - action.failure_pipeline_id === - pipeline.id, - ).length > 0 - ? "Assigned on Step" - : "Not Assigned"} - -
-
-

- {pipeline.id} -

-
-
- - -
-
-
-
- - - handleDragEndPipeline(pipeline, event) - } - > - -
- {pipeline.actions !== null && - pipeline.actions.length > 0 && - pipeline.actions.map((action: any) => ( - - ))} -
-
-
-
- -
- { - setTargetFailurePipeline(pipeline); - addFlowFailurePipelineActionModal.onOpen(); - }} - > - -
-
-
- -
-
-

- Create new Action -

-

- Add a new action to the failure pipeline -

-
-
-
-
-
- - { - const parsedAction = await getClipboardAction(); - - if (parsedAction) { - setTargetAction(parsedAction); - setTargetFailurePipeline(pipeline); - copyFlowFailurePipelineActionModal.onOpen(); - } else { - addToast({ - title: "Flow", - description: "No action found in clipboard.", - color: "danger", - variant: "flat", - }); - } - }} - > - -
-
-
- -
-
-

- Paste Action from Clipboard -

-

- You have an action copied to the clipboard. -

-
-
-
-
-
-
-
-
- ))} - + - } - /> -
- {flow.failure_pipelines !== null && - flow.failure_pipelines.length === 0 && ( -
-

- No failure pipelines defined. -

-
- )} + /> + +
+
+ + + + {actions.length === 0 && ( +
+
+ )} +
+ + +
+ {actions.map((action: any) => ( + + ))} +
+
+
+ - - - - - - - - - - -
); } diff --git a/services/frontend/components/flows/flow/details.tsx b/services/frontend/components/flows/flow/details.tsx index ad7eaa46..edfddeaa 100644 --- a/services/frontend/components/flows/flow/details.tsx +++ b/services/frontend/components/flows/flow/details.tsx @@ -5,22 +5,23 @@ import { Icon } from "@iconify/react"; import NumberFlow from "@number-flow/react"; import { useRouter } from "next/navigation"; +import { useFlowExecutions } from "@/lib/swr/hooks/flows"; + export default function FlowDetails({ flow, project, - totalExecutions, runners, }: { flow: any; project: any; - totalExecutions: any; runners: any; }) { const router = useRouter(); + const { total: totalExecutions } = useFlowExecutions(flow.id); return (
-
+
@@ -81,6 +82,21 @@ export default function FlowDetails({
+
+ + +
+
+ +
+
+

{flow.type}

+

Type

+
+
+
+
+
@@ -89,7 +105,10 @@ export default function FlowDetails({
- +

Executions

diff --git a/services/frontend/components/flows/flow/failure-pipelines.tsx b/services/frontend/components/flows/flow/failure-pipelines.tsx new file mode 100644 index 00000000..82b6c18b --- /dev/null +++ b/services/frontend/components/flows/flow/failure-pipelines.tsx @@ -0,0 +1,712 @@ +import { closestCenter, DndContext } from "@dnd-kit/core"; +import { + arrayMove, + SortableContext, + useSortable, + verticalListSortingStrategy, +} from "@dnd-kit/sortable"; +import { CSS } from "@dnd-kit/utilities"; +import { Icon } from "@iconify/react"; +import { + addToast, + Button, + ButtonGroup, + Card, + CardBody, + CardFooter, + Chip, + Dropdown, + DropdownItem, + DropdownMenu, + DropdownTrigger, + Spacer, + Tab, + Tabs, + Tooltip, + useDisclosure, +} from "@heroui/react"; +import React, { useEffect } from "react"; +import { useRouter } from "next/navigation"; + +import EditActionModal from "@/components/modals/actions/edit"; +import DeleteActionModal from "@/components/modals/actions/delete"; +import AddActionModal from "@/components/modals/actions/add"; +import CreateFailurePipelineModal from "@/components/modals/failurePipelines/create"; +import DeleteFailurePipelineModal from "@/components/modals/failurePipelines/delete"; +import EditFailurePipelineModal from "@/components/modals/failurePipelines/edit"; +import UpdateFlowFailurePipelineActions from "@/lib/fetch/flow/PUT/UpdateFailurePipelineActions"; +import CopyActionModal from "@/components/modals/actions/copy"; +import UpgradeActionModal from "@/components/modals/actions/upgrade"; +import CopyActionToDifferentFlowModal from "@/components/modals/actions/transferCopy"; +import FlowActionDetails from "@/components/modals/actions/details"; + +export default function FlowFailurePipelines({ + projects, + flows, + flow, + runners, + user, + canEdit, + settings, +}: { + projects: any; + flows: any; + flow: any; + runners: any; + user: any; + canEdit: boolean; + settings: any; +}) { + const router = useRouter(); + + const [targetAction, setTargetAction] = React.useState({} as any); + const [updatedAction, setUpdatedAction] = React.useState({} as any); + + const [failurePipelines, setFailurePipelines] = React.useState([] as any); + const [targetFailurePipeline, setTargetFailurePipeline] = React.useState( + {} as any, + ); + + const [failurePipelineTab, setFailurePipelineTab] = + React.useState("add-pipeline"); + + const viewFlowActionDetails = useDisclosure(); + const createFlowFailurePipelineModal = useDisclosure(); + const editFlowFailurePipelineModal = useDisclosure(); + const deleteFailurePipelineModal = useDisclosure(); + const addFlowFailurePipelineActionModal = useDisclosure(); + const editFlowFailurePipelineActionModal = useDisclosure(); + const deleteFlowFailurePipelineActionModal = useDisclosure(); + const copyFlowFailurePipelineActionModal = useDisclosure(); + const upgradeFlowFailurePipelineActionModal = useDisclosure(); + const copyActionToDifferentFlowModal = useDisclosure(); + const copyFailurePipelineActionToDifferentFlowModal = useDisclosure(); + + useEffect(() => { + if (flow.failure_pipelines !== null) { + setFailurePipelines(flow.failure_pipelines); + + if (failurePipelineTab === "add-pipeline") { + setFailurePipelineTab(flow.failure_pipelines[0]?.id || "add-pipeline"); + } + } + }, [flow]); + + const handleFailurePipelineTabChange = (key: any) => { + setFailurePipelineTab(key); + }; + + // function to get action from clipboard + const getClipboardAction = async () => { + try { + const clipboardText = await navigator.clipboard.readText(); + const parsedAction = JSON.parse(clipboardText); + + if (parsedAction && parsedAction.id && parsedAction.plugin) { + return parsedAction; + } else { + return null; + } + } catch { + return null; + } + }; + + const SortableItem = ({ action }: { action: any }) => { + const { attributes, listeners, setNodeRef, transform, transition } = + useSortable({ id: action.id }); + + const style = { + transform: CSS.Transform.toString(transform), + transition, + }; + + return ( +
+ { + setTargetAction(action); + viewFlowActionDetails.onOpen(); + }} + > + +
+
+
+ +
+
+

+ {action.custom_name ? action.custom_name : action.name} +

+

+ {action.custom_description + ? action.custom_description + : action.description} +

+
+
+
+ + + +
+
+
+ +
+ + Vers. {action.version} + + + {action.active ? "Active" : "Disabled"} + + {flow.failure_pipeline_id !== "" || + (flow.failure_pipeline_id !== null && + !flow.failure_pipelines.some( + (pipeline: any) => + pipeline.id === action.failure_pipeline_id || + (pipeline.actions !== null && + pipeline.actions.some( + (pipelineAction: any) => + pipelineAction.id === action.id, + )), + ) && ( + + No Failure Pipeline Assigned + + ))} + {action.update_available && ( + + Upgrade Available + + )} +
+
+ + {action.update_available && ( + + + + )} + + + + + + + + + + } + onPress={() => { + navigator.clipboard.writeText(JSON.stringify(action)); + addToast({ + title: "Action", + description: "Action copied to clipboard!", + color: "success", + variant: "flat", + }); + }} + > + Clipboard + + + } + onPress={() => { + setTargetAction(action); + setTargetFailurePipeline( + flow.failure_pipelines.filter( + (pipeline: any) => + pipeline.actions !== null && + pipeline.actions.some( + (pipelineAction: any) => + pipelineAction.id === action.id, + ), + )[0], + ); + copyFlowFailurePipelineActionModal.onOpen(); + }} + > + Local + + + } + onPress={() => { + // if action is in an failure pipeline, open the edit modal + if ( + flow.failure_pipelines.some( + (pipeline: any) => + pipeline.actions !== null && + pipeline.actions.some( + (pipelineAction: any) => + pipelineAction.id === action.id, + ), + ) + ) { + setTargetAction(action); + setTargetFailurePipeline( + flow.failure_pipelines.filter( + (pipeline: any) => + pipeline.actions !== null && + pipeline.actions.some( + (pipelineAction: any) => + pipelineAction.id === action.id, + ), + )[0], + ); + copyFailurePipelineActionToDifferentFlowModal.onOpen(); + } else { + setTargetAction(action); + copyActionToDifferentFlowModal.onOpen(); + } + }} + > + Transfer + + + + + + + + + + +
+
+
+
+ ); + }; + + const handleDragEndPipeline = (pipeline: any, event: any) => { + const { active, over } = event; + + if (active.id !== over.id) { + const items = [...pipeline.actions]; + const oldIndex = items.findIndex((item: any) => item.id === active.id); + const newIndex = items.findIndex((item: any) => item.id === over.id); + + const newArray = arrayMove(items, oldIndex, newIndex); + + updateFlowFailurePipelineActions(pipeline, newArray); + } + }; + + function updateFlowFailurePipelineActions(pipeline: any, actions: any) { + UpdateFlowFailurePipelineActions(flow.id, pipeline.id, actions) + .then(() => { + router.refresh(); + addToast({ + title: "Flow", + description: + "Flow failure pipeline actions order updated successfully.", + color: "success", + variant: "flat", + }); + }) + .catch(() => { + router.refresh(); + addToast({ + title: "Flow", + description: "Failed to update flow failure pipeline actions order.", + color: "danger", + variant: "flat", + }); + }); + } + + return ( +
+

+ With failure pipelines you have the ability to send notifications or + trigger any other action if a specific action or the whole execution + failed. +

+ + + {failurePipelines.map((pipeline: any) => ( + +
+ + +
+
+
+

{pipeline.name}

+
+ + {pipeline.exec_parallel ? "Parallel" : "Sequential"} + + + action.failure_pipeline_id === + pipeline.id, + ).length > 0 + ? "success" + : "danger" + } + radius="sm" + size="sm" + variant="flat" + > + {flow.failure_pipeline_id === pipeline.id + ? "Assigned to Flow" + : flow.actions.filter( + (action: any) => + action.failure_pipeline_id === + pipeline.id, + ).length > 0 + ? "Assigned on Step" + : "Not Assigned"} + +
+
+

+ {pipeline.id} +

+
+
+ +
+
+
+
+ handleDragEndPipeline(pipeline, event)} + > + +
+ {pipeline.actions !== null && + pipeline.actions.length > 0 && + pipeline.actions.map((action: any) => ( + + ))} +
+
+
+
+
+ ))} + { + createFlowFailurePipelineModal.onOpen(); + }} + > + + + } + /> +
+ + {flow.failure_pipelines !== null && + flow.failure_pipelines.length === 0 && ( +
+

+ No failure pipelines defined. +

+
+ )} + + + + + + + + + + + + +
+ ); +} diff --git a/services/frontend/components/flows/flow/heading.tsx b/services/frontend/components/flows/flow/heading.tsx index 4b76c847..bef3bb2c 100644 --- a/services/frontend/components/flows/flow/heading.tsx +++ b/services/frontend/components/flows/flow/heading.tsx @@ -3,11 +3,12 @@ import { addToast, Button, Divider, useDisclosure } from "@heroui/react"; import { Icon } from "@iconify/react"; -import APIStartExecution from "@/lib/fetch/executions/start"; -import Reloader from "@/components/reloader/Reloader"; import ScheduleExecutionModal from "@/components/modals/executions/schedule"; import EditFlowModal from "@/components/modals/flows/edit"; import canEditProject from "@/lib/functions/canEditProject"; +import { startExecution } from "@/lib/swr/api/executions"; +import { useRefreshCache } from "@/lib/swr/hooks/useRefreshCache"; +import SimulateAlertModal from "@/components/modals/alerts/simulate"; export default function FlowHeading({ flow, @@ -26,6 +27,27 @@ export default function FlowHeading({ }) { const editFlowModal = useDisclosure(); const scheduleExecutionModal = useDisclosure(); + const simulateAlertModal = useDisclosure(); + const { refreshAllExecutionCaches } = useRefreshCache(); + + const handleExecuteFlow = async () => { + const result = await startExecution(flow.id); + + if (result.success) { + addToast({ + title: "Execution Started", + color: "success", + }); + // Immediately refresh executions data + refreshAllExecutionCaches(flow.id); + } else { + addToast({ + title: "Execution start failed", + description: result.error, + color: "danger", + }); + } + }; return (
@@ -36,49 +58,55 @@ export default function FlowHeading({
- - + {flow.type === "alert" ? ( + + ) : ( + <> + + + + )} - - + />
{/* Mobile */}
-
+ ; + } + + // Show error state + if (flowError || !flow) { + return ( +
+ +
+ ); + } + + return ( +
+ + + + + +
+ ); +} diff --git a/services/frontend/components/flows/flow/page-skeleton.tsx b/services/frontend/components/flows/flow/page-skeleton.tsx new file mode 100644 index 00000000..6793d531 --- /dev/null +++ b/services/frontend/components/flows/flow/page-skeleton.tsx @@ -0,0 +1,59 @@ +"use client"; + +import { Card, CardBody, Skeleton } from "@heroui/react"; + +export default function FlowPageSkeleton() { + return ( +
+ {/* Header skeleton */} +
+
+ + +
+
+
+ + + + +
+
+
+ + {/* Divider */} +
+ + {/* Details cards skeleton */} +
+ {Array.from({ length: 4 }).map((_, i) => ( + + +
+ +
+ + +
+
+
+
+ ))} +
+ + {/* Tabs skeleton */} +
+
+ {Array.from({ length: 4 }).map((_, i) => ( + + ))} +
+ + + + + +
+
+ ); +} diff --git a/services/frontend/components/flows/flow/settings.tsx b/services/frontend/components/flows/flow/settings.tsx index a2481877..78a22bf8 100644 --- a/services/frontend/components/flows/flow/settings.tsx +++ b/services/frontend/components/flows/flow/settings.tsx @@ -3,18 +3,26 @@ import { Button, Card, CardBody, + Code, + Input, NumberInput, Select, SelectItem, Spacer, Switch, + Table, + TableBody, + TableCell, + TableColumn, + TableHeader, + TableRow, } from "@heroui/react"; -import { useRouter } from "next/navigation"; import { useState } from "react"; import { Icon } from "@iconify/react"; import UpdateFlow from "@/lib/fetch/flow/PUT/UpdateFlow"; import ErrorCard from "@/components/error/ErrorCard"; +import { useRefreshCache } from "@/lib/swr/hooks/useRefreshCache"; export default function FlowSettings({ flow, @@ -25,24 +33,24 @@ export default function FlowSettings({ user: any; canEdit: boolean; }) { - const router = useRouter(); + const { refreshFlowData } = useRefreshCache(); const [execParallel, setExecParallel] = useState(flow.exec_parallel); const [failurePipelineID, setFailurePipelineID] = useState( flow.failure_pipeline_id, ); - const [encryptExecutions, setEncryptExecutions] = useState( - flow.encrypt_executions, - ); - const [encryptActionParams, setEncryptActionParams] = useState( - flow.encrypt_action_params, - ); const [scheduleEveryValue, setScheduleEveryValue] = useState( flow.schedule_every_value, ); const [scheduleEveryUnit, setScheduleEveryUnit] = useState( flow.schedule_every_unit, ); + const [groupAlerts, setGroupAlerts] = useState(flow.group_alerts); + const [groupAlertsIdentifier, setGroupAlertsIdentifier] = useState( + flow.group_alerts_identifier, + ); + const [alertThreshold, setAlertThreshold] = useState(flow.alert_threshold); + const [flowPatterns, setFlowPatterns] = useState(flow.patterns); const [error, setError] = useState(false); const [errorMessage, setErrorMessage] = useState(""); @@ -55,12 +63,14 @@ export default function FlowSettings({ flow.project_id, flow.folder_id, flow.runner_id, - encryptExecutions, - encryptActionParams, execParallel, failurePipelineID, scheduleEveryValue, scheduleEveryUnit, + groupAlerts, + groupAlertsIdentifier, + alertThreshold, + flowPatterns, )) as any; if (!response) { @@ -71,7 +81,7 @@ export default function FlowSettings({ } if (response.success) { - router.refresh(); + refreshFlowData(flow.id); addToast({ title: "Flow", description: "Flow updated successfully", @@ -94,84 +104,218 @@ export default function FlowSettings({ <> {error && }
-
-

Actions

-
- - -
-
-

Execution Strategy

-

- Switch between parallel and sequential execution of - actions -

-
- - Sequential - Parallel - -
-
-
+ placeholder="Select the execution strategy" + selectedKeys={[execParallel ? "parallel" : "sequential"]} + variant="bordered" + onSelectionChange={(e) => { + if (e.currentKey === "parallel") { + setExecParallel(true); + } else { + setExecParallel(false); + } + }} + > + Sequential + Parallel + + + - - -
-
-

Common Failure Pipeline

-

- Execute an failure pipeline when actions during an - execution fail. - -
- CAUTION! This will override the per action failure - pipeline -
-

-
- { + if (e.currentKey === "none") { + setFailurePipelineID(""); + } else { + setFailurePipelineID(e.currentKey); + } + }} + > + None + {flow.failure_pipelines.map((pipeline: any) => ( + + {pipeline.name} + + ))} + + + +
+ + {flow.type == "alert" && ( +
+
+
+

Patterns

+

+ Patterns can be used to trigger executions only when the + alert payload contains specified key value pairs. +

+
+
+ - None - {flow.failure_pipelines.map((pipeline: any) => ( - {pipeline.name} - ))} - + + Key + Type + Value + Actions + + + {flowPatterns.map((pattern: any, index: number) => ( + + + { + const newPatterns = [...flowPatterns]; + + newPatterns[index].key = e.target.value; + setFlowPatterns(newPatterns); + }} + /> + + + + + + { + const newPatterns = [...flowPatterns]; + + newPatterns[index].value = e.target.value; + setFlowPatterns(newPatterns); + }} + /> + + +
-
-
-
-
-
-

Executions

-
+ )} +
+ + + + + +

Executions

- +

Schedule Every

@@ -190,13 +334,21 @@ export default function FlowSettings({
+
+ + + + + +
+
+

Threshold

+

+ If an alert is resolved and reoccurs after which + threshold should a new execution be accepted? +

+
+
+ minutes

+ } + isDisabled={ + (!canEdit || flow.disabled) && user.role !== "admin" + } + minValue={0} + placeholder="Enter a number" + variant="bordered" + onValueChange={setAlertThreshold} + /> +
+
+
+
+
+ + + )}
+ } + > + + - + Actions
} @@ -79,33 +95,55 @@ export default function FlowTabs({ /> - - Executions + + Failure Pipelines
} > - + + {flow.type === "alert" && ( + + + Alerts + + } + > + + + )} + - - Stats + + Info } > + + - - - Info - - } - > - - diff --git a/services/frontend/components/flows/list.tsx b/services/frontend/components/flows/list.tsx index d257d598..bd6ca314 100644 --- a/services/frontend/components/flows/list.tsx +++ b/services/frontend/components/flows/list.tsx @@ -314,7 +314,7 @@ export default function FlowList({ size="sm" variant="flat" > - {flow.disabled ? "Disabled" : "Active"} + {flow.disabled ? "Disabled" : "Enabled"} diff --git a/services/frontend/components/flows/page-client.tsx b/services/frontend/components/flows/page-client.tsx new file mode 100644 index 00000000..6c14d71a --- /dev/null +++ b/services/frontend/components/flows/page-client.tsx @@ -0,0 +1,90 @@ +"use client"; + +import { Divider } from "@heroui/react"; + +import FlowList from "@/components/flows/list"; +import FlowsHeading from "@/components/flows/heading"; +import ErrorCard from "@/components/error/ErrorCard"; +import { PageSkeleton } from "@/components/loading/page-skeleton"; +import { + useFlows, + useFolders, + useProjects, + useRunningExecutions, + useUserDetails, + usePageSettings, +} from "@/lib/swr/hooks/flows"; + +export default function FlowsPageClient() { + const { flows, isLoading: flowsLoading, isError: flowsError } = useFlows(); + const { + folders, + isLoading: foldersLoading, + isError: foldersError, + } = useFolders(); + const { + projects, + isLoading: projectsLoading, + isError: projectsError, + } = useProjects(); + const { runningExecutions, isLoading: runningExecutionsLoading } = + useRunningExecutions(); + const { user, isLoading: userLoading, isError: userError } = useUserDetails(); + const { + settings, + isLoading: settingsLoading, + isError: settingsError, + } = usePageSettings(); + + // Check if any essential data is still loading or missing + const isLoading = + flowsLoading || + foldersLoading || + projectsLoading || + userLoading || + settingsLoading || + !flows || + !folders || + !projects || + !user || + !settings; + + // Show loading state if essential data is still loading + if (isLoading || runningExecutionsLoading) { + return ; + } + + // Show error state + const hasError = + flowsError || foldersError || projectsError || userError || settingsError; + + if (hasError) { + return ( +
+ +
+ ); + } + + return ( +
+ + + +
+ ); +} diff --git a/services/frontend/components/loading/page-skeleton.tsx b/services/frontend/components/loading/page-skeleton.tsx new file mode 100644 index 00000000..f86bae7f --- /dev/null +++ b/services/frontend/components/loading/page-skeleton.tsx @@ -0,0 +1,58 @@ +import { Card, CardBody, Skeleton, Spacer } from "@heroui/react"; + +export function PageSkeleton() { + return ( +
+ {/* Heading skeleton */} +
+
+ +
+ + + +
+ +
+
+ +
+ + +
+ +
+
+ + + + {/* Divider skeleton */} + +
+ + + + + {/* Content skeleton */} +
+ {Array.from({ length: 6 }).map((_, i) => ( + + + +
+ + + +
+ + + +
+ + + + ))} +
+
+ ); +} diff --git a/services/frontend/components/magicui/particles.tsx b/services/frontend/components/magicui/particles.tsx index f5ed89a0..f2ab0ee9 100644 --- a/services/frontend/components/magicui/particles.tsx +++ b/services/frontend/components/magicui/particles.tsx @@ -1,12 +1,18 @@ -/* eslint-disable no-undef */ "use client"; -import React, { useEffect, useRef, useState } from "react"; +import React, { + ComponentPropsWithoutRef, + useEffect, + useRef, + useState, +} from "react"; -type MousePosition = { +import { cn } from "@/lib/utils"; + +interface MousePosition { x: number; y: number; -}; +} function MousePosition(): MousePosition { const [mousePosition, setMousePosition] = useState({ @@ -15,13 +21,16 @@ function MousePosition(): MousePosition { }); useEffect(() => { + // eslint-disable-next-line no-undef const handleMouseMove = (event: MouseEvent) => { setMousePosition({ x: event.clientX, y: event.clientY }); }; + // eslint-disable-next-line no-undef window.addEventListener("mousemove", handleMouseMove); return () => { + // eslint-disable-next-line no-undef window.removeEventListener("mousemove", handleMouseMove); }; }, []); @@ -29,7 +38,7 @@ function MousePosition(): MousePosition { return mousePosition; } -type ParticlesProps = { +interface ParticlesProps extends ComponentPropsWithoutRef<"div"> { className?: string; quantity?: number; staticity?: number; @@ -39,7 +48,8 @@ type ParticlesProps = { color?: string; vx?: number; vy?: number; -}; +} + function hexToRgb(hex: string): number[] { hex = hex.replace("#", ""); @@ -50,7 +60,7 @@ function hexToRgb(hex: string): number[] { .join(""); } - const hexInt = Number.parseInt(hex, 16); + const hexInt = parseInt(hex, 16); const red = (hexInt >> 16) & 255; const green = (hexInt >> 8) & 255; const blue = hexInt & 255; @@ -58,7 +68,20 @@ function hexToRgb(hex: string): number[] { return [red, green, blue]; } -const Particles: React.FC = ({ +type Circle = { + x: number; + y: number; + translateX: number; + translateY: number; + size: number; + alpha: number; + targetAlpha: number; + dx: number; + dy: number; + magnetism: number; +}; + +export const Particles: React.FC = ({ className = "", quantity = 100, staticity = 50, @@ -68,15 +91,23 @@ const Particles: React.FC = ({ color = "#ffffff", vx = 0, vy = 0, + ...props }) => { + // eslint-disable-next-line no-undef const canvasRef = useRef(null); + // eslint-disable-next-line no-undef const canvasContainerRef = useRef(null); + // eslint-disable-next-line no-undef const context = useRef(null); - const circles = useRef([]); + const circles = useRef([]); const mousePosition = MousePosition(); const mouse = useRef<{ x: number; y: number }>({ x: 0, y: 0 }); const canvasSize = useRef<{ w: number; h: number }>({ w: 0, h: 0 }); + // eslint-disable-next-line no-undef const dpr = typeof window !== "undefined" ? window.devicePixelRatio : 1; + const rafID = useRef(null); + // eslint-disable-next-line no-undef + const resizeTimeout = useRef(null); useEffect(() => { if (canvasRef.current) { @@ -84,10 +115,29 @@ const Particles: React.FC = ({ } initCanvas(); animate(); - window.addEventListener("resize", initCanvas); + + const handleResize = () => { + if (resizeTimeout.current) { + clearTimeout(resizeTimeout.current); + } + resizeTimeout.current = setTimeout(() => { + initCanvas(); + }, 200); + }; + + // eslint-disable-next-line no-undef + window.addEventListener("resize", handleResize); return () => { - window.removeEventListener("resize", initCanvas); + if (rafID.current != null) { + // eslint-disable-next-line no-undef + window.cancelAnimationFrame(rafID.current); + } + if (resizeTimeout.current) { + clearTimeout(resizeTimeout.current); + } + // eslint-disable-next-line no-undef + window.removeEventListener("resize", handleResize); }; }, [color]); @@ -119,29 +169,24 @@ const Particles: React.FC = ({ } }; - type Circle = { - x: number; - y: number; - translateX: number; - translateY: number; - size: number; - alpha: number; - targetAlpha: number; - dx: number; - dy: number; - magnetism: number; - }; - const resizeCanvas = () => { if (canvasContainerRef.current && canvasRef.current && context.current) { - circles.current.length = 0; canvasSize.current.w = canvasContainerRef.current.offsetWidth; canvasSize.current.h = canvasContainerRef.current.offsetHeight; + canvasRef.current.width = canvasSize.current.w * dpr; canvasRef.current.height = canvasSize.current.h * dpr; canvasRef.current.style.width = `${canvasSize.current.w}px`; canvasRef.current.style.height = `${canvasSize.current.h}px`; context.current.scale(dpr, dpr); + + // Clear existing particles and create new ones with exact quantity + circles.current = []; + for (let i = 0; i < quantity; i++) { + const circle = circleParams(); + + drawCircle(circle); + } } }; @@ -152,9 +197,7 @@ const Particles: React.FC = ({ const translateY = 0; const pSize = Math.floor(Math.random() * 2) + size; const alpha = 0; - const targetAlpha = Number.parseFloat( - (Math.random() * 0.6 + 0.1).toFixed(1), - ); + const targetAlpha = parseFloat((Math.random() * 0.6 + 0.1).toFixed(1)); const dx = (Math.random() - 0.5) * 0.1; const dy = (Math.random() - 0.5) * 0.1; const magnetism = 0.1 + Math.random() * 4; @@ -238,7 +281,7 @@ const Particles: React.FC = ({ canvasSize.current.h - circle.y - circle.translateY - circle.size, // distance from bottom edge ]; const closestEdge = edge.reduce((a, b) => Math.min(a, b)); - const remapClosestEdge = Number.parseFloat( + const remapClosestEdge = parseFloat( remapValue(closestEdge, 0, 20, 0, 1).toFixed(2), ); @@ -274,17 +317,20 @@ const Particles: React.FC = ({ const newCircle = circleParams(); drawCircle(newCircle); - // update the circle position } }); - window.requestAnimationFrame(animate); + // eslint-disable-next-line no-undef + rafID.current = window.requestAnimationFrame(animate); }; return ( -