diff --git a/cmd/experimental/experimental.go b/cmd/experimental/experimental.go index 7e7d376fea..b1931b18da 100644 --- a/cmd/experimental/experimental.go +++ b/cmd/experimental/experimental.go @@ -2,6 +2,7 @@ package experimental import ( mcp "github.com/databricks/cli/experimental/aitools/cmd" + dev "github.com/databricks/cli/experimental/dev/cmd" "github.com/spf13/cobra" ) @@ -20,6 +21,7 @@ These commands provide early access to new features that are still under development. They may change or be removed in future versions without notice.`, } + cmd.AddCommand(dev.New()) cmd.AddCommand(mcp.NewMcpCmd()) return cmd diff --git a/cmd/workspace/apps/overrides.go b/cmd/workspace/apps/overrides.go index a1e35da903..34a9cd8ee9 100644 --- a/cmd/workspace/apps/overrides.go +++ b/cmd/workspace/apps/overrides.go @@ -23,6 +23,9 @@ func listDeploymentsOverride(listDeploymentsCmd *cobra.Command, listDeploymentsR } func createOverride(createCmd *cobra.Command, createReq *apps.CreateAppRequest) { + createCmd.Short = `Create an app in your workspace.` + createCmd.Long = `Create an app in your workspace.` + originalRunE := createCmd.RunE createCmd.RunE = func(cmd *cobra.Command, args []string) error { err := originalRunE(cmd, args) diff --git a/experimental/aitools/templates/appkit/databricks_template_schema.json b/experimental/aitools/templates/appkit/databricks_template_schema.json index 973dab9e04..43c6c5a356 100644 --- a/experimental/aitools/templates/appkit/databricks_template_schema.json +++ b/experimental/aitools/templates/appkit/databricks_template_schema.json @@ -10,6 +10,7 @@ "sql_warehouse_id": { "type": "string", "description": "SQL Warehouse ID", + "default": "", "order": 2 }, "profile": { diff --git a/experimental/aitools/templates/appkit/template/{{.project_name}}/.env.tmpl b/experimental/aitools/templates/appkit/template/{{.project_name}}/.env.tmpl index be54897988..8599a13ed5 100644 --- a/experimental/aitools/templates/appkit/template/{{.project_name}}/.env.tmpl +++ b/experimental/aitools/templates/appkit/template/{{.project_name}}/.env.tmpl @@ -1,5 +1,5 @@ {{if ne .profile ""}}DATABRICKS_CONFIG_PROFILE={{.profile}}{{else}}DATABRICKS_HOST={{workspace_host}}{{end}} DATABRICKS_WAREHOUSE_ID={{.sql_warehouse_id}} DATABRICKS_APP_PORT=8000 -DATABRICKS_APP_NAME=minimal +DATABRICKS_APP_NAME={{.project_name}} FLASK_RUN_HOST=0.0.0.0 diff --git a/experimental/aitools/templates/appkit/template/{{.project_name}}/config/queries/schema.ts b/experimental/aitools/templates/appkit/template/{{.project_name}}/config/queries/schema.ts deleted file mode 100644 index 1abc1821d7..0000000000 --- a/experimental/aitools/templates/appkit/template/{{.project_name}}/config/queries/schema.ts +++ /dev/null @@ -1,28 +0,0 @@ -/** - * Query Result Schemas - Define the COLUMNS RETURNED by each SQL query. - * - * These schemas validate QUERY RESULTS, not input parameters. - * - Input parameters are passed to useAnalyticsQuery() as the second argument - * - These schemas define the shape of data[] returned by the query - * - * Example: - * SQL: SELECT name, age FROM users WHERE city = :city - * Schema: z.array(z.object({ name: z.string(), age: z.number() })) - * Usage: useAnalyticsQuery('users', { city: sql.string('NYC') }) - * ^ input params ^ schema validates this result - */ - -import { z } from 'zod'; -export const querySchemas = { - mocked_sales: z.array( - z.object({ - max_month_num: z.number().min(1).max(12), - }) - ), - - hello_world: z.array( - z.object({ - value: z.string(), - }) - ), -}; diff --git a/experimental/aitools/templates/appkit/template/{{.project_name}}/package.json b/experimental/aitools/templates/appkit/template/{{.project_name}}/package.json index 480d310043..a26a2dcd2b 100644 --- a/experimental/aitools/templates/appkit/template/{{.project_name}}/package.json +++ b/experimental/aitools/templates/appkit/template/{{.project_name}}/package.json @@ -12,7 +12,7 @@ "typecheck": "tsc -p ./tsconfig.server.json --noEmit && tsc -p ./tsconfig.client.json --noEmit", "lint": "eslint .", "lint:fix": "eslint . --fix", - "lint:ast-grep": "tsx scripts/lint-ast-grep.ts", + "lint:ast-grep": "appkit-lint", "format": "prettier --check .", "format:fix": "prettier --write .", "test": "vitest run && npm run test:smoke", @@ -20,7 +20,8 @@ "test:e2e:ui": "playwright test --ui", "test:smoke": "playwright install chromium && playwright test tests/smoke.spec.ts", "clean": "rm -rf client/dist dist build node_modules .smoke-test test-results playwright-report", - "typegen": "tsx scripts/generate-types.ts" + "typegen": "appkit-generate-types", + "setup": "appkit-setup --write" }, "keywords": [], "author": "", diff --git a/experimental/dev/cmd/app/app.go b/experimental/dev/cmd/app/app.go new file mode 100644 index 0000000000..f20a5ec106 --- /dev/null +++ b/experimental/dev/cmd/app/app.go @@ -0,0 +1,24 @@ +package app + +import ( + "github.com/spf13/cobra" +) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "app", + Short: "Manage Databricks applications", + Long: `Manage Databricks applications. + +Provides a streamlined interface for creating, managing, and monitoring +full-stack Databricks applications built with TypeScript, React, and +Tailwind CSS.`, + } + + cmd.AddCommand(newInitCmd()) + cmd.AddCommand(newImportCmd()) + cmd.AddCommand(newDeployCmd()) + cmd.AddCommand(newDevRemoteCmd()) + + return cmd +} diff --git a/experimental/dev/cmd/app/deploy.go b/experimental/dev/cmd/app/deploy.go new file mode 100644 index 0000000000..f41da604d7 --- /dev/null +++ b/experimental/dev/cmd/app/deploy.go @@ -0,0 +1,228 @@ +package app + +import ( + "bytes" + "context" + "errors" + "fmt" + "os" + "os/exec" + "sync" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/resources" + "github.com/databricks/cli/bundle/run" + "github.com/databricks/cli/cmd/bundle/utils" + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/log" + "github.com/spf13/cobra" +) + +func newDeployCmd() *cobra.Command { + var ( + force bool + skipBuild bool + ) + + cmd := &cobra.Command{ + Use: "deploy", + Short: "Build, deploy the AppKit application and run it", + Long: `Build, deploy the AppKit application and run it. + +This command runs a deployment pipeline: +1. Builds the frontend (npm run build) +2. Deploys the bundle to the workspace +3. Runs the app + +Examples: + # Deploy to default target + databricks experimental dev app deploy + + # Deploy to a specific target + databricks experimental dev app deploy --target prod + + # Skip frontend build (if already built) + databricks experimental dev app deploy --skip-build + + # Force deploy (override git branch validation) + databricks experimental dev app deploy --force + + # Set bundle variables + databricks experimental dev app deploy --var="warehouse_id=abc123"`, + Args: root.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + return runDeploy(cmd, force, skipBuild) + }, + } + + cmd.Flags().StringP("target", "t", "", "Deployment target (e.g., dev, prod)") + cmd.Flags().BoolVar(&force, "force", false, "Force-override Git branch validation") + cmd.Flags().BoolVar(&skipBuild, "skip-build", false, "Skip npm build step") + cmd.Flags().StringSlice("var", []string{}, `Set values for variables defined in bundle config. Example: --var="key=value"`) + + return cmd +} + +func runDeploy(cmd *cobra.Command, force, skipBuild bool) error { + ctx := cmd.Context() + + // Check for bundle configuration + if _, err := os.Stat("databricks.yml"); os.IsNotExist(err) { + return errors.New("no databricks.yml found; run this command from a bundle directory") + } + + // Step 1: Build frontend (unless skipped) + if !skipBuild { + if err := runNpmTypegen(ctx); err != nil { + return err + } + if err := runNpmBuild(ctx); err != nil { + return err + } + } + + // Step 2: Deploy bundle + cmdio.LogString(ctx, "Deploying bundle...") + b, err := utils.ProcessBundle(cmd, utils.ProcessOptions{ + InitFunc: func(b *bundle.Bundle) { + b.Config.Bundle.Force = force + }, + AlwaysPull: true, + FastValidate: true, + Build: true, + Deploy: true, + }) + if err != nil { + return fmt.Errorf("deploy failed: %w", err) + } + log.Infof(ctx, "Deploy completed") + + // Step 3: Detect and run app + appKey, err := detectApp(b) + if err != nil { + return err + } + + log.Infof(ctx, "Running app: %s", appKey) + if err := runApp(ctx, b, appKey); err != nil { + cmdio.LogString(ctx, "āœ” Deployment succeeded, but failed to start app") + return fmt.Errorf("failed to run app: %w", err) + } + + cmdio.LogString(ctx, "āœ” Deployment complete!") + return nil +} + +// syncBuffer is a thread-safe buffer for capturing command output. +type syncBuffer struct { + mu sync.Mutex + buf bytes.Buffer +} + +func (b *syncBuffer) Write(p []byte) (n int, err error) { + b.mu.Lock() + defer b.mu.Unlock() + return b.buf.Write(p) +} + +func (b *syncBuffer) String() string { + b.mu.Lock() + defer b.mu.Unlock() + return b.buf.String() +} + +// runNpmTypegen runs npm run typegen in the current directory. +func runNpmTypegen(ctx context.Context) error { + if _, err := exec.LookPath("npm"); err != nil { + return errors.New("npm not found: please install Node.js") + } + + var output syncBuffer + + err := RunWithSpinnerCtx(ctx, "Generating types...", func() error { + cmd := exec.CommandContext(ctx, "npm", "run", "typegen") + cmd.Stdout = &output + cmd.Stderr = &output + return cmd.Run() + }) + if err != nil { + out := output.String() + if out != "" { + return fmt.Errorf("typegen failed:\n%s", out) + } + return fmt.Errorf("typegen failed: %w", err) + } + return nil +} + +// runNpmBuild runs npm run build in the current directory. +func runNpmBuild(ctx context.Context) error { + if _, err := exec.LookPath("npm"); err != nil { + return errors.New("npm not found: please install Node.js") + } + + var output syncBuffer + + err := RunWithSpinnerCtx(ctx, "Building frontend...", func() error { + cmd := exec.CommandContext(ctx, "npm", "run", "build") + cmd.Stdout = &output + cmd.Stderr = &output + return cmd.Run() + }) + if err != nil { + out := output.String() + if out != "" { + return fmt.Errorf("build failed:\n%s", out) + } + return fmt.Errorf("build failed: %w", err) + } + return nil +} + +// detectApp finds the single app in the bundle configuration. +func detectApp(b *bundle.Bundle) (string, error) { + apps := b.Config.Resources.Apps + + if len(apps) == 0 { + return "", errors.New("no apps found in bundle configuration") + } + + if len(apps) > 1 { + return "", errors.New("multiple apps found in bundle, cannot auto-detect") + } + + for key := range apps { + return key, nil + } + + return "", errors.New("unexpected error detecting app") +} + +// runApp runs the specified app using the runner interface. +func runApp(ctx context.Context, b *bundle.Bundle, appKey string) error { + ref, err := resources.Lookup(b, appKey, run.IsRunnable) + if err != nil { + return fmt.Errorf("failed to lookup app: %w", err) + } + + runner, err := run.ToRunner(b, ref) + if err != nil { + return fmt.Errorf("failed to create runner: %w", err) + } + + output, err := runner.Run(ctx, &run.Options{}) + if err != nil { + return fmt.Errorf("failed to run app: %w", err) + } + + if output != nil { + resultString, err := output.String() + if err != nil { + return err + } + log.Infof(ctx, "App output: %s", resultString) + } + + return nil +} diff --git a/experimental/dev/cmd/app/dev_remote.go b/experimental/dev/cmd/app/dev_remote.go new file mode 100644 index 0000000000..a819ff870e --- /dev/null +++ b/experimental/dev/cmd/app/dev_remote.go @@ -0,0 +1,243 @@ +package app + +import ( + "bytes" + "context" + _ "embed" + "errors" + "fmt" + "net" + "net/url" + "os" + "os/exec" + "os/signal" + "path/filepath" + "strconv" + "syscall" + "time" + + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdctx" + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) + +//go:embed vite-server.js +var viteServerScript []byte + +const ( + vitePort = 5173 + viteReadyCheckInterval = 100 * time.Millisecond + viteReadyMaxAttempts = 50 +) + +func isViteReady(port int) bool { + conn, err := net.DialTimeout("tcp", "localhost:"+strconv.Itoa(port), viteReadyCheckInterval) + if err != nil { + return false + } + conn.Close() + return true +} + +// detectAppNameFromBundle tries to extract the app name from a databricks.yml bundle config. +// Returns the app name if found, or empty string if no bundle or no apps found. +func detectAppNameFromBundle() string { + const bundleFile = "databricks.yml" + + // Check if databricks.yml exists + if _, err := os.Stat(bundleFile); os.IsNotExist(err) { + return "" + } + + // Get current working directory + cwd, err := os.Getwd() + if err != nil { + return "" + } + + // Load the bundle configuration directly + rootConfig, diags := config.Load(filepath.Join(cwd, bundleFile)) + if diags.HasError() { + return "" + } + + // Check for apps in the bundle + apps := rootConfig.Resources.Apps + if len(apps) == 0 { + return "" + } + + // If there's exactly one app, return its name + if len(apps) == 1 { + for _, app := range apps { + return app.Name + } + } + + // Multiple apps - can't auto-detect + return "" +} + +func startViteDevServer(ctx context.Context, appURL string, port int) (*exec.Cmd, chan error, error) { + // Pass script through stdin, and pass arguments in order + viteCmd := exec.Command("node", "-", appURL, strconv.Itoa(port)) + viteCmd.Stdin = bytes.NewReader(viteServerScript) + viteCmd.Stdout = os.Stdout + viteCmd.Stderr = os.Stderr + + err := viteCmd.Start() + if err != nil { + return nil, nil, fmt.Errorf("failed to start Vite server: %w", err) + } + + cmdio.LogString(ctx, fmt.Sprintf("šŸš€ Starting Vite development server on port %d...", port)) + + viteErr := make(chan error, 1) + go func() { + if err := viteCmd.Wait(); err != nil { + viteErr <- fmt.Errorf("vite server exited with error: %w", err) + } else { + viteErr <- errors.New("vite server exited unexpectedly") + } + }() + + for range viteReadyMaxAttempts { + select { + case err := <-viteErr: + return nil, nil, err + default: + if isViteReady(port) { + return viteCmd, viteErr, nil + } + time.Sleep(viteReadyCheckInterval) + } + } + + _ = viteCmd.Process.Kill() + return nil, nil, errors.New("timeout waiting for Vite server to be ready") +} + +func newDevRemoteCmd() *cobra.Command { + var ( + appName string + clientPath string + port int + ) + + cmd := &cobra.Command{ + Use: "dev-remote", + Short: "Run AppKit app locally with WebSocket bridge to remote server", + Long: `Run AppKit app locally with WebSocket bridge to remote server. + +Starts a local Vite development server and establishes a WebSocket bridge +to the remote Databricks app for development with hot module replacement. + +Examples: + # Interactive mode - select app from picker + databricks experimental dev app dev-remote + + # Start development server for a specific app + databricks experimental dev app dev-remote --name my-app + + # Use a custom client path + databricks experimental dev app dev-remote --name my-app --client-path ./frontend + + # Use a custom port + databricks experimental dev app dev-remote --name my-app --port 3000`, + Args: root.NoArgs, + PreRunE: root.MustWorkspaceClient, + RunE: func(cmd *cobra.Command, args []string) error { + ctx := cmd.Context() + + // Validate client path early (before any network calls) + if _, err := os.Stat(clientPath); os.IsNotExist(err) { + return fmt.Errorf("client directory not found: %s", clientPath) + } + + // Check if port is already in use + if isViteReady(port) { + return fmt.Errorf("port %d is already in use; try --port ", port) + } + + w := cmdctx.WorkspaceClient(ctx) + + // Resolve app name with priority: flag > bundle config > prompt + if appName == "" { + // Try to detect from bundle config + appName = detectAppNameFromBundle() + if appName != "" { + cmdio.LogString(ctx, fmt.Sprintf("Using app '%s' from bundle configuration", appName)) + } + } + + if appName == "" { + // Fall back to interactive prompt + selected, err := PromptForAppSelection(ctx, "Select an app to connect to") + if err != nil { + return err + } + appName = selected + } + + bridge := NewViteBridge(ctx, w, appName, port) + + // Validate app exists and get domain before starting Vite + var appDomain *url.URL + err := RunWithSpinnerCtx(ctx, "Connecting to app...", func() error { + var domainErr error + appDomain, domainErr = bridge.GetAppDomain() + return domainErr + }) + if err != nil { + return fmt.Errorf("failed to get app domain: %w", err) + } + + viteCmd, viteErr, err := startViteDevServer(ctx, appDomain.String(), port) + if err != nil { + return err + } + + done := make(chan error, 1) + sigChan := make(chan os.Signal, 1) + signal.Notify(sigChan, os.Interrupt, syscall.SIGTERM) + + go func() { + done <- bridge.Start() + }() + + select { + case err := <-viteErr: + bridge.Stop() + <-done + return err + case err := <-done: + cmdio.LogString(ctx, "Bridge stopped") + if viteCmd.Process != nil { + _ = viteCmd.Process.Signal(os.Interrupt) + <-viteErr + } + return err + case <-sigChan: + cmdio.LogString(ctx, "\nšŸ›‘ Shutting down...") + bridge.Stop() + <-done + if viteCmd.Process != nil { + if err := viteCmd.Process.Signal(os.Interrupt); err != nil { + cmdio.LogString(ctx, fmt.Sprintf("Failed to interrupt Vite: %v", err)) + _ = viteCmd.Process.Kill() + } + <-viteErr + } + return nil + } + }, + } + + cmd.Flags().StringVar(&appName, "name", "", "Name of the app to connect to (prompts if not provided)") + cmd.Flags().StringVar(&clientPath, "client-path", "./client", "Path to the Vite client directory") + cmd.Flags().IntVar(&port, "port", vitePort, "Port to run the Vite server on") + + return cmd +} diff --git a/experimental/dev/cmd/app/dev_remote_test.go b/experimental/dev/cmd/app/dev_remote_test.go new file mode 100644 index 0000000000..cb0c1ec72d --- /dev/null +++ b/experimental/dev/cmd/app/dev_remote_test.go @@ -0,0 +1,90 @@ +package app + +import ( + "context" + "net" + "os" + "testing" + "time" + + "github.com/databricks/cli/libs/cmdio" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestIsViteReady(t *testing.T) { + t.Run("vite not running", func(t *testing.T) { + // Assuming nothing is running on port 5173 + ready := isViteReady(5173) + assert.False(t, ready) + }) + + t.Run("vite is running", func(t *testing.T) { + // Start a mock server on the Vite port + listener, err := net.Listen("tcp", "localhost:5173") + require.NoError(t, err) + defer listener.Close() + + // Accept connections in the background + go func() { + for { + conn, err := listener.Accept() + if err != nil { + return + } + conn.Close() + } + }() + + // Give the listener a moment to start + time.Sleep(50 * time.Millisecond) + + ready := isViteReady(5173) + assert.True(t, ready) + }) +} + +func TestViteServerScriptContent(t *testing.T) { + // Verify the embedded script is not empty + assert.NotEmpty(t, viteServerScript) + + // Verify it's a JavaScript file with expected content + assert.Contains(t, string(viteServerScript), "startViteServer") +} + +func TestStartViteDevServerNoNode(t *testing.T) { + // Skip this test if node is not available or in CI environments + if os.Getenv("CI") != "" { + t.Skip("Skipping node-dependent test in CI") + } + + ctx := context.Background() + ctx = cmdio.MockDiscard(ctx) + + // Create a temporary directory to act as project root + tmpDir := t.TempDir() + oldWd, err := os.Getwd() + require.NoError(t, err) + defer func() { _ = os.Chdir(oldWd) }() + + err = os.Chdir(tmpDir) + require.NoError(t, err) + + // Create a client directory + err = os.Mkdir("client", 0o755) + require.NoError(t, err) + + // Try to start Vite server with invalid app URL (will fail fast) + // This test mainly verifies the function signature and error handling + _, _, err = startViteDevServer(ctx, "", 5173) + assert.Error(t, err) +} + +func TestViteServerScriptEmbedded(t *testing.T) { + assert.NotEmpty(t, viteServerScript) + + scriptContent := string(viteServerScript) + assert.Contains(t, scriptContent, "startViteServer") + assert.Contains(t, scriptContent, "createServer") + assert.Contains(t, scriptContent, "queriesHMRPlugin") +} diff --git a/experimental/dev/cmd/app/features.go b/experimental/dev/cmd/app/features.go new file mode 100644 index 0000000000..768c8f167b --- /dev/null +++ b/experimental/dev/cmd/app/features.go @@ -0,0 +1,329 @@ +package app + +import ( + "fmt" + "os" + "path/filepath" + "regexp" + "strings" +) + +// FeatureDependency defines a prompt/input required by a feature. +type FeatureDependency struct { + ID string // e.g., "sql_warehouse_id" + FlagName string // CLI flag name, e.g., "warehouse-id" (maps to --warehouse-id) + Title string // e.g., "SQL Warehouse ID" + Description string // e.g., "Required for executing SQL queries" + Placeholder string + Required bool +} + +// FeatureResourceFiles defines paths to YAML fragment files for a feature's resources. +// Paths are relative to the template's features directory (e.g., "analytics/bundle_variables.yml"). +type FeatureResourceFiles struct { + BundleVariables string // Variables section for databricks.yml + BundleResources string // Resources section for databricks.yml (app resources) + TargetVariables string // Dev target variables section for databricks.yml + AppEnv string // Environment variables for app.yaml + DotEnv string // Environment variables for .env (development) + DotEnvExample string // Environment variables for .env.example +} + +// Feature represents an optional feature that can be added to an AppKit project. +type Feature struct { + ID string + Name string + Description string + PluginImport string + PluginUsage string + Dependencies []FeatureDependency + ResourceFiles FeatureResourceFiles +} + +// AvailableFeatures lists all features that can be selected when creating a project. +var AvailableFeatures = []Feature{ + { + ID: "analytics", + Name: "Analytics", + Description: "SQL analytics with charts and dashboards", + PluginImport: "analytics", + PluginUsage: "analytics()", + Dependencies: []FeatureDependency{ + { + ID: "sql_warehouse_id", + FlagName: "warehouse-id", + Title: "SQL Warehouse ID", + Description: "required for SQL queries", + Required: true, + }, + }, + ResourceFiles: FeatureResourceFiles{ + BundleVariables: "analytics/bundle_variables.yml", + BundleResources: "analytics/bundle_resources.yml", + TargetVariables: "analytics/target_variables.yml", + AppEnv: "analytics/app_env.yml", + DotEnv: "analytics/dotenv.yml", + DotEnvExample: "analytics/dotenv_example.yml", + }, + }, +} + +var featureByID = func() map[string]Feature { + m := make(map[string]Feature, len(AvailableFeatures)) + for _, f := range AvailableFeatures { + m[f.ID] = f + } + return m +}() + +// featureByPluginImport maps plugin import names to features. +var featureByPluginImport = func() map[string]Feature { + m := make(map[string]Feature, len(AvailableFeatures)) + for _, f := range AvailableFeatures { + if f.PluginImport != "" { + m[f.PluginImport] = f + } + } + return m +}() + +// pluginPattern matches plugin function calls dynamically built from AvailableFeatures. +// Matches patterns like: analytics(), genie(), oauth(), etc. +var pluginPattern = func() *regexp.Regexp { + var plugins []string + for _, f := range AvailableFeatures { + if f.PluginImport != "" { + plugins = append(plugins, regexp.QuoteMeta(f.PluginImport)) + } + } + if len(plugins) == 0 { + // Fallback pattern that matches nothing + return regexp.MustCompile(`$^`) + } + // Build pattern: \b(plugin1|plugin2|plugin3)\s*\( + pattern := `\b(` + strings.Join(plugins, "|") + `)\s*\(` + return regexp.MustCompile(pattern) +}() + +// serverFilePaths lists common locations for the server entry file. +var serverFilePaths = []string{ + "src/server/index.ts", + "src/server/index.tsx", + "src/server.ts", + "server/index.ts", + "server/server.ts", + "server.ts", +} + +// TODO: We should come to an agreement if we want to do it like this, +// or maybe we should have an appkit.json manifest file in each project. +func DetectPluginsFromServer(templateDir string) ([]string, error) { + var content []byte + + for _, p := range serverFilePaths { + fullPath := filepath.Join(templateDir, p) + data, err := os.ReadFile(fullPath) + if err == nil { + content = data + break + } + } + + if content == nil { + return nil, nil // No server file found + } + + matches := pluginPattern.FindAllStringSubmatch(string(content), -1) + seen := make(map[string]bool) + var plugins []string + + for _, m := range matches { + plugin := m[1] + if !seen[plugin] { + seen[plugin] = true + plugins = append(plugins, plugin) + } + } + + return plugins, nil +} + +// GetPluginDependencies returns all dependencies required by the given plugin names. +func GetPluginDependencies(pluginNames []string) []FeatureDependency { + seen := make(map[string]bool) + var deps []FeatureDependency + + for _, plugin := range pluginNames { + feature, ok := featureByPluginImport[plugin] + if !ok { + continue + } + for _, dep := range feature.Dependencies { + if !seen[dep.ID] { + seen[dep.ID] = true + deps = append(deps, dep) + } + } + } + + return deps +} + +// MapPluginsToFeatures maps plugin import names to feature IDs. +// This is used to convert detected plugins (e.g., "analytics") to feature IDs +// so that ApplyFeatures can properly retain feature-specific files. +func MapPluginsToFeatures(pluginNames []string) []string { + seen := make(map[string]bool) + var features []string + + for _, plugin := range pluginNames { + feature, ok := featureByPluginImport[plugin] + if ok && !seen[feature.ID] { + seen[feature.ID] = true + features = append(features, feature.ID) + } + } + + return features +} + +// HasFeaturesDirectory checks if the template uses the feature-fragment system. +func HasFeaturesDirectory(templateDir string) bool { + featuresDir := filepath.Join(templateDir, "features") + info, err := os.Stat(featuresDir) + return err == nil && info.IsDir() +} + +// ValidateFeatureIDs checks that all provided feature IDs are valid. +// Returns an error if any feature ID is unknown. +func ValidateFeatureIDs(featureIDs []string) error { + for _, id := range featureIDs { + if _, ok := featureByID[id]; !ok { + return fmt.Errorf("unknown feature: %q; available: %s", id, strings.Join(GetFeatureIDs(), ", ")) + } + } + return nil +} + +// ValidateFeatureDependencies checks that all required dependencies for the given features +// are provided in the flagValues map. Returns an error listing missing required flags. +func ValidateFeatureDependencies(featureIDs []string, flagValues map[string]string) error { + deps := CollectDependencies(featureIDs) + var missing []string + + for _, dep := range deps { + if !dep.Required { + continue + } + value, ok := flagValues[dep.FlagName] + if !ok || value == "" { + missing = append(missing, "--"+dep.FlagName) + } + } + + if len(missing) > 0 { + return fmt.Errorf("missing required flags for selected features: %s", strings.Join(missing, ", ")) + } + return nil +} + +// GetFeatureIDs returns a list of all available feature IDs for help text. +func GetFeatureIDs() []string { + ids := make([]string, len(AvailableFeatures)) + for i, f := range AvailableFeatures { + ids[i] = f.ID + } + return ids +} + +// BuildPluginStrings builds the plugin import and usage strings from selected feature IDs. +// Returns comma-separated imports and newline-separated usages. +func BuildPluginStrings(featureIDs []string) (pluginImport, pluginUsage string) { + if len(featureIDs) == 0 { + return "", "" + } + + var imports []string + var usages []string + + for _, id := range featureIDs { + feature, ok := featureByID[id] + if !ok || feature.PluginImport == "" { + continue + } + imports = append(imports, feature.PluginImport) + usages = append(usages, feature.PluginUsage) + } + + if len(imports) == 0 { + return "", "" + } + + // Join imports with comma (e.g., "analytics, trpc") + pluginImport = strings.Join(imports, ", ") + + // Join usages with newline and proper indentation + pluginUsage = strings.Join(usages, ",\n ") + + return pluginImport, pluginUsage +} + +// ApplyFeatures applies any post-copy modifications for selected features. +// This removes feature-specific directories if the feature is not selected. +func ApplyFeatures(projectDir string, featureIDs []string) error { + selectedSet := make(map[string]bool) + for _, id := range featureIDs { + selectedSet[id] = true + } + + // Remove analytics-specific files if analytics is not selected + if !selectedSet["analytics"] { + queriesDir := filepath.Join(projectDir, "config", "queries") + if err := os.RemoveAll(queriesDir); err != nil && !os.IsNotExist(err) { + return err + } + } + + return nil +} + +// CollectDependencies returns all unique dependencies required by the selected features. +func CollectDependencies(featureIDs []string) []FeatureDependency { + seen := make(map[string]bool) + var deps []FeatureDependency + + for _, id := range featureIDs { + feature, ok := featureByID[id] + if !ok { + continue + } + for _, dep := range feature.Dependencies { + if !seen[dep.ID] { + seen[dep.ID] = true + deps = append(deps, dep) + } + } + } + + return deps +} + +// CollectResourceFiles returns all resource file paths for the selected features. +func CollectResourceFiles(featureIDs []string) []FeatureResourceFiles { + var resources []FeatureResourceFiles + for _, id := range featureIDs { + feature, ok := featureByID[id] + if !ok { + continue + } + // Only include if at least one resource file is defined + rf := feature.ResourceFiles + if rf.BundleVariables != "" || rf.BundleResources != "" || + rf.TargetVariables != "" || rf.AppEnv != "" || + rf.DotEnv != "" || rf.DotEnvExample != "" { + resources = append(resources, rf) + } + } + + return resources +} diff --git a/experimental/dev/cmd/app/features_test.go b/experimental/dev/cmd/app/features_test.go new file mode 100644 index 0000000000..a3aaf3f0ab --- /dev/null +++ b/experimental/dev/cmd/app/features_test.go @@ -0,0 +1,453 @@ +package app + +import ( + "fmt" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestValidateFeatureIDs(t *testing.T) { + tests := []struct { + name string + featureIDs []string + expectError bool + errorMsg string + }{ + { + name: "valid feature - analytics", + featureIDs: []string{"analytics"}, + expectError: false, + }, + { + name: "empty feature list", + featureIDs: []string{}, + expectError: false, + }, + { + name: "nil feature list", + featureIDs: nil, + expectError: false, + }, + { + name: "unknown feature", + featureIDs: []string{"unknown-feature"}, + expectError: true, + errorMsg: "unknown feature", + }, + { + name: "mix of valid and invalid", + featureIDs: []string{"analytics", "invalid"}, + expectError: true, + errorMsg: "unknown feature", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := ValidateFeatureIDs(tt.featureIDs) + if tt.expectError { + require.Error(t, err) + assert.Contains(t, err.Error(), tt.errorMsg) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestValidateFeatureDependencies(t *testing.T) { + tests := []struct { + name string + featureIDs []string + flagValues map[string]string + expectError bool + errorMsg string + }{ + { + name: "analytics with warehouse provided", + featureIDs: []string{"analytics"}, + flagValues: map[string]string{"warehouse-id": "abc123"}, + expectError: false, + }, + { + name: "analytics without warehouse", + featureIDs: []string{"analytics"}, + flagValues: map[string]string{}, + expectError: true, + errorMsg: "--warehouse-id", + }, + { + name: "analytics with empty warehouse", + featureIDs: []string{"analytics"}, + flagValues: map[string]string{"warehouse-id": ""}, + expectError: true, + errorMsg: "--warehouse-id", + }, + { + name: "no features - no dependencies needed", + featureIDs: []string{}, + flagValues: map[string]string{}, + expectError: false, + }, + { + name: "unknown feature - gracefully ignored", + featureIDs: []string{"unknown"}, + flagValues: map[string]string{}, + expectError: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := ValidateFeatureDependencies(tt.featureIDs, tt.flagValues) + if tt.expectError { + require.Error(t, err) + assert.Contains(t, err.Error(), tt.errorMsg) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestGetFeatureIDs(t *testing.T) { + ids := GetFeatureIDs() + + assert.NotEmpty(t, ids) + assert.Contains(t, ids, "analytics") +} + +func TestBuildPluginStrings(t *testing.T) { + tests := []struct { + name string + featureIDs []string + expectedImport string + expectedUsage string + }{ + { + name: "no features", + featureIDs: []string{}, + expectedImport: "", + expectedUsage: "", + }, + { + name: "nil features", + featureIDs: nil, + expectedImport: "", + expectedUsage: "", + }, + { + name: "analytics feature", + featureIDs: []string{"analytics"}, + expectedImport: "analytics", + expectedUsage: "analytics()", + }, + { + name: "unknown feature - ignored", + featureIDs: []string{"unknown"}, + expectedImport: "", + expectedUsage: "", + }, + { + name: "mix of known and unknown", + featureIDs: []string{"analytics", "unknown"}, + expectedImport: "analytics", + expectedUsage: "analytics()", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + importStr, usageStr := BuildPluginStrings(tt.featureIDs) + assert.Equal(t, tt.expectedImport, importStr) + assert.Equal(t, tt.expectedUsage, usageStr) + }) + } +} + +func TestCollectDependencies(t *testing.T) { + tests := []struct { + name string + featureIDs []string + expectedDeps int + expectedIDs []string + }{ + { + name: "no features", + featureIDs: []string{}, + expectedDeps: 0, + expectedIDs: nil, + }, + { + name: "analytics feature", + featureIDs: []string{"analytics"}, + expectedDeps: 1, + expectedIDs: []string{"sql_warehouse_id"}, + }, + { + name: "unknown feature", + featureIDs: []string{"unknown"}, + expectedDeps: 0, + expectedIDs: nil, + }, + { + name: "duplicate features - deduped deps", + featureIDs: []string{"analytics", "analytics"}, + expectedDeps: 1, + expectedIDs: []string{"sql_warehouse_id"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + deps := CollectDependencies(tt.featureIDs) + assert.Len(t, deps, tt.expectedDeps) + + if tt.expectedIDs != nil { + for i, expectedID := range tt.expectedIDs { + assert.Equal(t, expectedID, deps[i].ID) + } + } + }) + } +} + +func TestCollectResourceFiles(t *testing.T) { + tests := []struct { + name string + featureIDs []string + expectedResources int + }{ + { + name: "no features", + featureIDs: []string{}, + expectedResources: 0, + }, + { + name: "analytics feature", + featureIDs: []string{"analytics"}, + expectedResources: 1, + }, + { + name: "unknown feature", + featureIDs: []string{"unknown"}, + expectedResources: 0, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + resources := CollectResourceFiles(tt.featureIDs) + assert.Len(t, resources, tt.expectedResources) + + if tt.expectedResources > 0 && tt.featureIDs[0] == "analytics" { + assert.NotEmpty(t, resources[0].BundleVariables) + assert.NotEmpty(t, resources[0].BundleResources) + } + }) + } +} + +func TestDetectPluginsFromServer(t *testing.T) { + tests := []struct { + name string + serverContent string + expectedPlugins []string + }{ + { + name: "analytics plugin", + serverContent: `import { createApp, server, analytics } from '@databricks/appkit'; +createApp({ + plugins: [ + server(), + analytics(), + ], +}).catch(console.error);`, + expectedPlugins: []string{"analytics"}, + }, + { + name: "analytics with other plugins not in AvailableFeatures", + serverContent: `import { createApp, server, analytics, genie } from '@databricks/appkit'; +createApp({ + plugins: [ + server(), + analytics(), + genie(), + ], +}).catch(console.error);`, + expectedPlugins: []string{"analytics"}, // Only analytics is detected since genie is not in AvailableFeatures + }, + { + name: "no recognized plugins", + serverContent: `import { createApp, server } from '@databricks/appkit';`, + expectedPlugins: nil, + }, + { + name: "plugin not in AvailableFeatures", + serverContent: `createApp({ + plugins: [oauth()], +});`, + expectedPlugins: nil, // oauth is not in AvailableFeatures, so not detected + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Create temp dir with server file + tempDir := t.TempDir() + serverDir := tempDir + "/src/server" + require.NoError(t, os.MkdirAll(serverDir, 0o755)) + require.NoError(t, os.WriteFile(serverDir+"/index.ts", []byte(tt.serverContent), 0o644)) + + plugins, err := DetectPluginsFromServer(tempDir) + require.NoError(t, err) + assert.Equal(t, tt.expectedPlugins, plugins) + }) + } +} + +func TestDetectPluginsFromServerAlternatePath(t *testing.T) { + // Test server/server.ts path (common in some templates) + tempDir := t.TempDir() + serverDir := tempDir + "/server" + require.NoError(t, os.MkdirAll(serverDir, 0o755)) + + serverContent := `import { createApp, server, analytics } from '@databricks/appkit'; +createApp({ + plugins: [ + server(), + analytics(), + ], +}).catch(console.error);` + + require.NoError(t, os.WriteFile(serverDir+"/server.ts", []byte(serverContent), 0o644)) + + plugins, err := DetectPluginsFromServer(tempDir) + require.NoError(t, err) + assert.Equal(t, []string{"analytics"}, plugins) +} + +func TestDetectPluginsFromServerNoFile(t *testing.T) { + tempDir := t.TempDir() + plugins, err := DetectPluginsFromServer(tempDir) + require.NoError(t, err) + assert.Nil(t, plugins) +} + +func TestGetPluginDependencies(t *testing.T) { + tests := []struct { + name string + pluginNames []string + expectedDeps []string + }{ + { + name: "analytics plugin", + pluginNames: []string{"analytics"}, + expectedDeps: []string{"sql_warehouse_id"}, + }, + { + name: "unknown plugin", + pluginNames: []string{"server"}, + expectedDeps: nil, + }, + { + name: "empty plugins", + pluginNames: []string{}, + expectedDeps: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + deps := GetPluginDependencies(tt.pluginNames) + if tt.expectedDeps == nil { + assert.Empty(t, deps) + } else { + assert.Len(t, deps, len(tt.expectedDeps)) + for i, dep := range deps { + assert.Equal(t, tt.expectedDeps[i], dep.ID) + } + } + }) + } +} + +func TestHasFeaturesDirectory(t *testing.T) { + // Test with features directory + tempDir := t.TempDir() + require.NoError(t, os.MkdirAll(tempDir+"/features", 0o755)) + assert.True(t, HasFeaturesDirectory(tempDir)) + + // Test without features directory + tempDir2 := t.TempDir() + assert.False(t, HasFeaturesDirectory(tempDir2)) +} + +func TestMapPluginsToFeatures(t *testing.T) { + tests := []struct { + name string + pluginNames []string + expectedFeatures []string + }{ + { + name: "analytics plugin maps to analytics feature", + pluginNames: []string{"analytics"}, + expectedFeatures: []string{"analytics"}, + }, + { + name: "unknown plugin", + pluginNames: []string{"server", "unknown"}, + expectedFeatures: nil, + }, + { + name: "empty plugins", + pluginNames: []string{}, + expectedFeatures: nil, + }, + { + name: "duplicate plugins", + pluginNames: []string{"analytics", "analytics"}, + expectedFeatures: []string{"analytics"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + features := MapPluginsToFeatures(tt.pluginNames) + if tt.expectedFeatures == nil { + assert.Empty(t, features) + } else { + assert.Equal(t, tt.expectedFeatures, features) + } + }) + } +} + +func TestPluginPatternGeneration(t *testing.T) { + // Test that the plugin pattern is dynamically generated from AvailableFeatures + // This ensures new features with PluginImport are automatically detected + + // Get all plugin imports from AvailableFeatures + var expectedPlugins []string + for _, f := range AvailableFeatures { + if f.PluginImport != "" { + expectedPlugins = append(expectedPlugins, f.PluginImport) + } + } + + // Test that each plugin is matched by the pattern + for _, plugin := range expectedPlugins { + testCode := fmt.Sprintf("plugins: [%s()]", plugin) + matches := pluginPattern.FindAllStringSubmatch(testCode, -1) + assert.NotEmpty(t, matches, "Pattern should match plugin: %s", plugin) + assert.Equal(t, plugin, matches[0][1], "Captured group should be plugin name: %s", plugin) + } + + // Test that non-plugin function calls are not matched + testCode := "const x = someOtherFunction()" + matches := pluginPattern.FindAllStringSubmatch(testCode, -1) + assert.Empty(t, matches, "Pattern should not match non-plugin functions") +} diff --git a/experimental/dev/cmd/app/import.go b/experimental/dev/cmd/app/import.go new file mode 100644 index 0000000000..9945ef3571 --- /dev/null +++ b/experimental/dev/cmd/app/import.go @@ -0,0 +1,265 @@ +package app + +import ( + "context" + "errors" + "fmt" + "io" + "os" + "os/exec" + "path/filepath" + "strings" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdctx" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go" + "github.com/databricks/databricks-sdk-go/service/apps" + "github.com/databricks/databricks-sdk-go/service/workspace" + "github.com/spf13/cobra" + "golang.org/x/sync/errgroup" +) + +func newImportCmd() *cobra.Command { + var ( + appName string + force bool + outputDir string + ) + + cmd := &cobra.Command{ + Use: "import", + Short: "Import app source code from Databricks workspace to local disk", + Long: `Import app source code from Databricks workspace to local disk. + +Downloads the source code of a deployed Databricks app to a local directory +named after the app. + +Examples: + # Interactive mode - select app from picker + databricks experimental dev app import + + # Import a specific app's source code + databricks experimental dev app import --name my-app + + # Import to a specific directory + databricks experimental dev app import --name my-app --output-dir ./projects + + # Force overwrite existing files + databricks experimental dev app import --name my-app --force`, + Args: root.NoArgs, + PreRunE: root.MustWorkspaceClient, + RunE: func(cmd *cobra.Command, args []string) error { + ctx := cmd.Context() + + // Prompt for app name if not provided + if appName == "" { + selected, err := PromptForAppSelection(ctx, "Select an app to import") + if err != nil { + return err + } + appName = selected + } + + return runImport(ctx, importOptions{ + appName: appName, + force: force, + outputDir: outputDir, + }) + }, + } + + cmd.Flags().StringVar(&appName, "name", "", "Name of the app to import (prompts if not provided)") + cmd.Flags().StringVar(&outputDir, "output-dir", "", "Directory to write the imported app to") + cmd.Flags().BoolVar(&force, "force", false, "Overwrite existing files") + + return cmd +} + +type importOptions struct { + appName string + force bool + outputDir string +} + +func runImport(ctx context.Context, opts importOptions) error { + w := cmdctx.WorkspaceClient(ctx) + + // Step 1: Fetch the app + var app *apps.App + err := RunWithSpinnerCtx(ctx, fmt.Sprintf("Fetching app '%s'...", opts.appName), func() error { + var fetchErr error + app, fetchErr = w.Apps.Get(ctx, apps.GetAppRequest{Name: opts.appName}) + return fetchErr + }) + if err != nil { + return fmt.Errorf("failed to get app: %w", err) + } + + // Step 2: Check if the app has a source code path + if app.DefaultSourceCodePath == "" { + return errors.New("app has no source code path - it may not have been deployed yet") + } + + cmdio.LogString(ctx, "Source code path: "+app.DefaultSourceCodePath) + + // Step 3: Create output directory + destDir := opts.appName + if opts.outputDir != "" { + destDir = filepath.Join(opts.outputDir, opts.appName) + } + if err := ensureOutputDir(destDir, opts.force); err != nil { + return err + } + + // Step 4: Download files with spinner + var fileCount int + err = RunWithSpinnerCtx(ctx, "Downloading files...", func() error { + var downloadErr error + fileCount, downloadErr = downloadDirectory(ctx, w, app.DefaultSourceCodePath, destDir, opts.force) + return downloadErr + }) + if err != nil { + return fmt.Errorf("failed to download files for app '%s': %w", opts.appName, err) + } + + // Get absolute path for display + absDestDir, err := filepath.Abs(destDir) + if err != nil { + absDestDir = destDir + } + + // Step 5: Run npm install if package.json exists + packageJSONPath := filepath.Join(destDir, "package.json") + if _, err := os.Stat(packageJSONPath); err == nil { + if err := runNpmInstallInDir(ctx, destDir); err != nil { + cmdio.LogString(ctx, fmt.Sprintf("⚠ npm install failed: %v", err)) + cmdio.LogString(ctx, " You can run 'npm install' manually in the project directory.") + } + } + + // Step 6: Detect and configure DABs + bundlePath := filepath.Join(destDir, "databricks.yml") + if _, err := os.Stat(bundlePath); err == nil { + cmdio.LogString(ctx, "") + cmdio.LogString(ctx, "Detected Databricks Asset Bundle configuration.") + cmdio.LogString(ctx, "Run 'databricks bundle validate' to verify the bundle is configured correctly.") + } + + // Show success message with next steps + PrintSuccess(opts.appName, absDestDir, fileCount, true) + + return nil +} + +// runNpmInstallInDir runs npm install in the specified directory. +func runNpmInstallInDir(ctx context.Context, dir string) error { + if _, err := exec.LookPath("npm"); err != nil { + return errors.New("npm not found: please install Node.js") + } + + return RunWithSpinnerCtx(ctx, "Installing dependencies...", func() error { + cmd := exec.CommandContext(ctx, "npm", "install") + cmd.Dir = dir + cmd.Stdout = nil + cmd.Stderr = nil + return cmd.Run() + }) +} + +// ensureOutputDir creates the output directory or checks if it's safe to use. +func ensureOutputDir(dir string, force bool) error { + info, err := os.Stat(dir) + if err == nil { + if !info.IsDir() { + return fmt.Errorf("%s exists but is not a directory", dir) + } + if !force { + return fmt.Errorf("directory %s already exists (use --force to overwrite)", dir) + } + } else if !os.IsNotExist(err) { + return err + } + + return os.MkdirAll(dir, 0o755) +} + +// downloadDirectory recursively downloads all files from a workspace path to a local directory. +func downloadDirectory(ctx context.Context, w *databricks.WorkspaceClient, remotePath, localDir string, force bool) (int, error) { + // List all files recursively + objects, err := w.Workspace.RecursiveList(ctx, remotePath) + if err != nil { + return 0, fmt.Errorf("failed to list workspace files: %w", err) + } + + // Filter out directories, keep only files + var files []workspace.ObjectInfo + for _, obj := range objects { + if obj.ObjectType != workspace.ObjectTypeDirectory { + files = append(files, obj) + } + } + + if len(files) == 0 { + return 0, errors.New("no files found in app source code path") + } + + // Download files in parallel + errs, errCtx := errgroup.WithContext(ctx) + errs.SetLimit(10) // Limit concurrent downloads + + for _, file := range files { + errs.Go(func() error { + return downloadFile(errCtx, w, file, remotePath, localDir, force) + }) + } + + if err := errs.Wait(); err != nil { + return 0, err + } + + return len(files), nil +} + +// downloadFile downloads a single file from the workspace to the local directory. +func downloadFile(ctx context.Context, w *databricks.WorkspaceClient, file workspace.ObjectInfo, remotePath, localDir string, force bool) error { + // Calculate relative path from the remote root + relPath := strings.TrimPrefix(file.Path, remotePath) + relPath = strings.TrimPrefix(relPath, "/") + + // Determine local file path + localPath := filepath.Join(localDir, relPath) + + // Check if file exists + if !force { + if _, err := os.Stat(localPath); err == nil { + return fmt.Errorf("file %s already exists (use --force to overwrite)", localPath) + } + } + + // Create parent directories + if err := os.MkdirAll(filepath.Dir(localPath), 0o755); err != nil { + return fmt.Errorf("failed to create directory for %s: %w", localPath, err) + } + + // Download file content + reader, err := w.Workspace.Download(ctx, file.Path) + if err != nil { + return fmt.Errorf("failed to download %s: %w", file.Path, err) + } + defer reader.Close() + + // Create local file + localFile, err := os.Create(localPath) + if err != nil { + return fmt.Errorf("failed to create %s: %w", localPath, err) + } + defer localFile.Close() + + // Copy content + if _, err := io.Copy(localFile, reader); err != nil { + return fmt.Errorf("failed to write %s: %w", localPath, err) + } + + return nil +} diff --git a/experimental/dev/cmd/app/init.go b/experimental/dev/cmd/app/init.go new file mode 100644 index 0000000000..a457770bf3 --- /dev/null +++ b/experimental/dev/cmd/app/init.go @@ -0,0 +1,1024 @@ +package app + +import ( + "bytes" + "context" + "errors" + "fmt" + "os" + "os/exec" + "path/filepath" + "strings" + "text/template" + + "github.com/charmbracelet/huh" + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdctx" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/log" + "github.com/spf13/cobra" +) + +const ( + templatePathEnvVar = "DATABRICKS_APPKIT_TEMPLATE_PATH" + // TODO: Change this with appkit main once ready. + defaultTemplateURL = "https://github.com/databricks/appkit/tree/add-generic-template/template" +) + +func newInitCmd() *cobra.Command { + var ( + templatePath string + branch string + name string + warehouseID string + description string + outputDir string + features []string + deploy bool + run string + ) + + cmd := &cobra.Command{ + Use: "init", + Short: "Initialize a new AppKit application from a template", + Long: `Initialize a new AppKit application from a template. + +When run without arguments, uses the default AppKit template and an interactive prompt +guides you through the setup. When run with --name, runs in non-interactive mode +(all required flags must be provided). + +Examples: + # Interactive mode with default template (recommended) + databricks experimental dev app init + + # Non-interactive with flags + databricks experimental dev app init --name my-app + + # With analytics feature (requires --warehouse-id) + databricks experimental dev app init --name my-app --features=analytics --warehouse-id=abc123 + + # Create, deploy, and run with dev-remote + databricks experimental dev app init --name my-app --deploy --run=dev-remote + + # With a custom template from a local path + databricks experimental dev app init --template /path/to/template --name my-app + + # With a GitHub URL + databricks experimental dev app init --template https://github.com/user/repo --name my-app + +Feature dependencies: + Some features require additional flags: + - analytics: requires --warehouse-id (SQL Warehouse ID) + +Environment variables: + DATABRICKS_APPKIT_TEMPLATE_PATH Override the default template source`, + Args: cobra.NoArgs, + PreRunE: root.MustWorkspaceClient, + RunE: func(cmd *cobra.Command, args []string) error { + ctx := cmd.Context() + return runCreate(ctx, createOptions{ + templatePath: templatePath, + branch: branch, + name: name, + warehouseID: warehouseID, + description: description, + outputDir: outputDir, + features: features, + deploy: deploy, + run: run, + }) + }, + } + + cmd.Flags().StringVar(&templatePath, "template", "", "Template path (local directory or GitHub URL)") + cmd.Flags().StringVar(&branch, "branch", "", "Git branch or tag (for GitHub templates)") + cmd.Flags().StringVar(&name, "name", "", "Project name (prompts if not provided)") + cmd.Flags().StringVar(&warehouseID, "warehouse-id", "", "SQL warehouse ID") + cmd.Flags().StringVar(&description, "description", "", "App description") + cmd.Flags().StringVar(&outputDir, "output-dir", "", "Directory to write the project to") + cmd.Flags().StringSliceVar(&features, "features", nil, "Features to enable (comma-separated). Available: "+strings.Join(GetFeatureIDs(), ", ")) + cmd.Flags().BoolVar(&deploy, "deploy", false, "Deploy the app after creation") + cmd.Flags().StringVar(&run, "run", "", "Run the app after creation (none, dev, dev-remote)") + + return cmd +} + +type createOptions struct { + templatePath string + branch string + name string + warehouseID string + description string + outputDir string + features []string + deploy bool + run string +} + +// templateVars holds the variables for template substitution. +type templateVars struct { + ProjectName string + SQLWarehouseID string + AppDescription string + Profile string + WorkspaceHost string + PluginImport string + PluginUsage string + // Feature resource fragments (aggregated from selected features) + BundleVariables string + BundleResources string + TargetVariables string + AppEnv string + DotEnv string + DotEnvExample string +} + +// featureFragments holds aggregated content from feature resource files. +type featureFragments struct { + BundleVariables string + BundleResources string + TargetVariables string + AppEnv string + DotEnv string + DotEnvExample string +} + +// parseDeployAndRunFlags parses the deploy and run flag values into typed values. +func parseDeployAndRunFlags(deploy bool, run string) (bool, RunMode, error) { + var runMode RunMode + switch run { + case "dev": + runMode = RunModeDev + case "dev-remote": + runMode = RunModeDevRemote + case "", "none": + runMode = RunModeNone + default: + return false, RunModeNone, fmt.Errorf("invalid --run value: %q (must be none, dev, or dev-remote)", run) + } + return deploy, runMode, nil +} + +// promptForFeaturesAndDeps prompts for features and their dependencies. +// Used when the template uses the feature-fragment system. +func promptForFeaturesAndDeps(ctx context.Context, preSelectedFeatures []string) (*CreateProjectConfig, error) { + config := &CreateProjectConfig{ + Dependencies: make(map[string]string), + Features: preSelectedFeatures, + } + theme := appkitTheme() + + // Step 1: Feature selection (skip if features already provided via flag) + if len(config.Features) == 0 && len(AvailableFeatures) > 0 { + options := make([]huh.Option[string], 0, len(AvailableFeatures)) + for _, f := range AvailableFeatures { + label := f.Name + " - " + f.Description + options = append(options, huh.NewOption(label, f.ID)) + } + + err := huh.NewMultiSelect[string](). + Title("Select features"). + Description("space to toggle, enter to confirm"). + Options(options...). + Value(&config.Features). + WithTheme(theme). + Run() + if err != nil { + return nil, err + } + } + + // Step 2: Prompt for feature dependencies + deps := CollectDependencies(config.Features) + for _, dep := range deps { + // Special handling for SQL warehouse - show picker instead of text input + if dep.ID == "sql_warehouse_id" { + warehouseID, err := PromptForWarehouse(ctx) + if err != nil { + return nil, err + } + config.Dependencies[dep.ID] = warehouseID + continue + } + + var value string + description := dep.Description + if !dep.Required { + description += " (optional)" + } + + input := huh.NewInput(). + Title(dep.Title). + Description(description). + Placeholder(dep.Placeholder). + Value(&value) + + if dep.Required { + input = input.Validate(func(s string) error { + if s == "" { + return errors.New("this field is required") + } + return nil + }) + } + + if err := input.WithTheme(theme).Run(); err != nil { + return nil, err + } + config.Dependencies[dep.ID] = value + } + + // Step 3: Description + config.Description = DefaultAppDescription + err := huh.NewInput(). + Title("Description"). + Placeholder(DefaultAppDescription). + Value(&config.Description). + WithTheme(theme). + Run() + if err != nil { + return nil, err + } + + if config.Description == "" { + config.Description = DefaultAppDescription + } + + // Step 4: Deploy and run options + config.Deploy, config.RunMode, err = PromptForDeployAndRun() + if err != nil { + return nil, err + } + + return config, nil +} + +// loadFeatureFragments reads and aggregates resource fragments for selected features. +// templateDir is the path to the template directory (containing the "features" subdirectory). +func loadFeatureFragments(templateDir string, featureIDs []string, vars templateVars) (*featureFragments, error) { + featuresDir := filepath.Join(templateDir, "features") + + resourceFiles := CollectResourceFiles(featureIDs) + if len(resourceFiles) == 0 { + return &featureFragments{}, nil + } + + var bundleVarsList, bundleResList, targetVarsList, appEnvList, dotEnvList, dotEnvExampleList []string + + for _, rf := range resourceFiles { + if rf.BundleVariables != "" { + content, err := readAndSubstitute(filepath.Join(featuresDir, rf.BundleVariables), vars) + if err != nil { + return nil, fmt.Errorf("read bundle variables: %w", err) + } + bundleVarsList = append(bundleVarsList, content) + } + if rf.BundleResources != "" { + content, err := readAndSubstitute(filepath.Join(featuresDir, rf.BundleResources), vars) + if err != nil { + return nil, fmt.Errorf("read bundle resources: %w", err) + } + bundleResList = append(bundleResList, content) + } + if rf.TargetVariables != "" { + content, err := readAndSubstitute(filepath.Join(featuresDir, rf.TargetVariables), vars) + if err != nil { + return nil, fmt.Errorf("read target variables: %w", err) + } + targetVarsList = append(targetVarsList, content) + } + if rf.AppEnv != "" { + content, err := readAndSubstitute(filepath.Join(featuresDir, rf.AppEnv), vars) + if err != nil { + return nil, fmt.Errorf("read app env: %w", err) + } + appEnvList = append(appEnvList, content) + } + if rf.DotEnv != "" { + content, err := readAndSubstitute(filepath.Join(featuresDir, rf.DotEnv), vars) + if err != nil { + return nil, fmt.Errorf("read dotenv: %w", err) + } + dotEnvList = append(dotEnvList, content) + } + if rf.DotEnvExample != "" { + content, err := readAndSubstitute(filepath.Join(featuresDir, rf.DotEnvExample), vars) + if err != nil { + return nil, fmt.Errorf("read dotenv example: %w", err) + } + dotEnvExampleList = append(dotEnvExampleList, content) + } + } + + // Join fragments (they already have proper indentation from the fragment files) + return &featureFragments{ + BundleVariables: strings.TrimSuffix(strings.Join(bundleVarsList, ""), "\n"), + BundleResources: strings.TrimSuffix(strings.Join(bundleResList, ""), "\n"), + TargetVariables: strings.TrimSuffix(strings.Join(targetVarsList, ""), "\n"), + AppEnv: strings.TrimSuffix(strings.Join(appEnvList, ""), "\n"), + DotEnv: strings.TrimSuffix(strings.Join(dotEnvList, ""), "\n"), + DotEnvExample: strings.TrimSuffix(strings.Join(dotEnvExampleList, ""), "\n"), + }, nil +} + +// readAndSubstitute reads a file and applies variable substitution. +func readAndSubstitute(path string, vars templateVars) (string, error) { + content, err := os.ReadFile(path) + if err != nil { + if os.IsNotExist(err) { + return "", nil // Fragment file doesn't exist, skip it + } + return "", err + } + return substituteVars(string(content), vars), nil +} + +// parseGitHubURL extracts the repository URL, subdirectory, and branch from a GitHub URL. +// Input: https://github.com/user/repo/tree/main/templates/starter +// Output: repoURL="https://github.com/user/repo", subdir="templates/starter", branch="main" +func parseGitHubURL(url string) (repoURL, subdir, branch string) { + // Remove trailing slash + url = strings.TrimSuffix(url, "/") + + // Check for /tree/branch/path pattern + if idx := strings.Index(url, "/tree/"); idx != -1 { + repoURL = url[:idx] + rest := url[idx+6:] // Skip "/tree/" + + // Split into branch and path + parts := strings.SplitN(rest, "/", 2) + branch = parts[0] + if len(parts) > 1 { + subdir = parts[1] + } + return repoURL, subdir, branch + } + + // No /tree/ pattern, just a repo URL + return url, "", "" +} + +// cloneRepo clones a git repository to a temporary directory. +func cloneRepo(ctx context.Context, repoURL, branch string) (string, error) { + tempDir, err := os.MkdirTemp("", "appkit-template-*") + if err != nil { + return "", fmt.Errorf("create temp dir: %w", err) + } + + args := []string{"clone", "--depth", "1"} + if branch != "" { + args = append(args, "--branch", branch) + } + args = append(args, repoURL, tempDir) + + cmd := exec.CommandContext(ctx, "git", args...) + cmd.Stdout = nil + var stderr bytes.Buffer + cmd.Stderr = &stderr + if err := cmd.Run(); err != nil { + os.RemoveAll(tempDir) + if stderr.Len() > 0 { + return "", fmt.Errorf("git clone failed: %s: %w", strings.TrimSpace(stderr.String()), err) + } + return "", fmt.Errorf("git clone failed: %w", err) + } + + return tempDir, nil +} + +// resolveTemplate resolves a template path, handling both local paths and GitHub URLs. +// Returns the local path to use, a cleanup function (for temp dirs), and any error. +func resolveTemplate(ctx context.Context, templatePath, branch string) (localPath string, cleanup func(), err error) { + // Case 1: Local path - return as-is + if !strings.HasPrefix(templatePath, "https://") { + return templatePath, nil, nil + } + + // Case 2: GitHub URL - parse and clone + repoURL, subdir, urlBranch := parseGitHubURL(templatePath) + if branch == "" { + branch = urlBranch // Use branch from URL if not overridden by flag + } + + // Clone to temp dir with spinner + var tempDir string + err = RunWithSpinnerCtx(ctx, "Cloning template...", func() error { + var cloneErr error + tempDir, cloneErr = cloneRepo(ctx, repoURL, branch) + return cloneErr + }) + if err != nil { + return "", nil, err + } + + cleanup = func() { os.RemoveAll(tempDir) } + + // Return path to subdirectory if specified + if subdir != "" { + return filepath.Join(tempDir, subdir), cleanup, nil + } + return tempDir, cleanup, nil +} + +func runCreate(ctx context.Context, opts createOptions) error { + var selectedFeatures []string + var dependencies map[string]string + var shouldDeploy bool + var runMode RunMode + isInteractive := cmdio.IsPromptSupported(ctx) + + // Use features from flags if provided + if len(opts.features) > 0 { + selectedFeatures = opts.features + } + + // Resolve template path (supports local paths and GitHub URLs) + templateSrc := opts.templatePath + if templateSrc == "" { + templateSrc = os.Getenv(templatePathEnvVar) + } + if templateSrc == "" { + // Use default template from GitHub + templateSrc = defaultTemplateURL + } + + // Step 1: Get project name first (needed before we can check destination) + if opts.name == "" { + if !isInteractive { + return errors.New("--name is required in non-interactive mode") + } + name, err := PromptForProjectName() + if err != nil { + return err + } + opts.name = name + } + + // Validate project name + if err := ValidateProjectName(opts.name); err != nil { + return err + } + + // Step 2: Resolve template (handles GitHub URLs by cloning) + resolvedPath, cleanup, err := resolveTemplate(ctx, templateSrc, opts.branch) + if err != nil { + return err + } + if cleanup != nil { + defer cleanup() + } + + // Check for generic subdirectory first (default for multi-template repos) + templateDir := filepath.Join(resolvedPath, "generic") + if _, err := os.Stat(templateDir); os.IsNotExist(err) { + // Fall back to the provided path directly + templateDir = resolvedPath + if _, err := os.Stat(templateDir); os.IsNotExist(err) { + return fmt.Errorf("template not found at %s (also checked %s/generic)", resolvedPath, resolvedPath) + } + } + + // Step 3: Determine template type and gather configuration + usesFeatureFragments := HasFeaturesDirectory(templateDir) + + if usesFeatureFragments { + // Feature-fragment template: prompt for features and their dependencies + if isInteractive && len(selectedFeatures) == 0 { + // Need to prompt for features (but we already have the name) + config, err := promptForFeaturesAndDeps(ctx, selectedFeatures) + if err != nil { + return err + } + selectedFeatures = config.Features + dependencies = config.Dependencies + if config.Description != "" { + opts.description = config.Description + } + shouldDeploy = config.Deploy + runMode = config.RunMode + + // Get warehouse from dependencies if provided + if wh, ok := dependencies["sql_warehouse_id"]; ok && wh != "" { + opts.warehouseID = wh + } + } else { + // Non-interactive or features provided via flag + flagValues := map[string]string{ + "warehouse-id": opts.warehouseID, + } + if len(selectedFeatures) > 0 { + if err := ValidateFeatureDependencies(selectedFeatures, flagValues); err != nil { + return err + } + } + dependencies = make(map[string]string) + if opts.warehouseID != "" { + dependencies["sql_warehouse_id"] = opts.warehouseID + } + var err error + shouldDeploy, runMode, err = parseDeployAndRunFlags(opts.deploy, opts.run) + if err != nil { + return err + } + } + + // Validate feature IDs + if err := ValidateFeatureIDs(selectedFeatures); err != nil { + return err + } + } else { + // Pre-assembled template: detect plugins and prompt for their dependencies + detectedPlugins, err := DetectPluginsFromServer(templateDir) + if err != nil { + return fmt.Errorf("failed to detect plugins: %w", err) + } + + log.Debugf(ctx, "Detected plugins: %v", detectedPlugins) + + // Map detected plugins to feature IDs for ApplyFeatures + selectedFeatures = MapPluginsToFeatures(detectedPlugins) + log.Debugf(ctx, "Mapped to features: %v", selectedFeatures) + + pluginDeps := GetPluginDependencies(detectedPlugins) + + log.Debugf(ctx, "Plugin dependencies: %d", len(pluginDeps)) + + if isInteractive && len(pluginDeps) > 0 { + // Prompt for plugin dependencies + dependencies, err = PromptForPluginDependencies(ctx, pluginDeps) + if err != nil { + return err + } + if wh, ok := dependencies["sql_warehouse_id"]; ok && wh != "" { + opts.warehouseID = wh + } + } else { + // Non-interactive: check flags + dependencies = make(map[string]string) + if opts.warehouseID != "" { + dependencies["sql_warehouse_id"] = opts.warehouseID + } + + // Validate required dependencies are provided + for _, dep := range pluginDeps { + if dep.Required { + if _, ok := dependencies[dep.ID]; !ok { + return fmt.Errorf("missing required flag --%s for detected plugin", dep.FlagName) + } + } + } + } + + // Prompt for description and post-creation actions + if isInteractive { + if opts.description == "" { + opts.description = DefaultAppDescription + } + var deployVal bool + var runVal RunMode + deployVal, runVal, err = PromptForDeployAndRun() + if err != nil { + return err + } + shouldDeploy = deployVal + runMode = runVal + } else { + var err error + shouldDeploy, runMode, err = parseDeployAndRunFlags(opts.deploy, opts.run) + if err != nil { + return err + } + } + } + + // Determine output directory + destDir := opts.name + if opts.outputDir != "" { + destDir = filepath.Join(opts.outputDir, opts.name) + } + + // Check if destination already exists + if _, err := os.Stat(destDir); err == nil { + return fmt.Errorf("directory %s already exists", destDir) + } + + // Track whether we started creating the project for cleanup on failure + var projectCreated bool + var runErr error + defer func() { + if runErr != nil && projectCreated { + // Clean up partially created project on failure + os.RemoveAll(destDir) + } + }() + + // Set description default + if opts.description == "" { + opts.description = DefaultAppDescription + } + + // Get workspace host and profile from context + workspaceHost := "" + profile := "" + if w := cmdctx.WorkspaceClient(ctx); w != nil && w.Config != nil { + workspaceHost = w.Config.Host + profile = w.Config.Profile + } + + // Build plugin imports and usages from selected features + pluginImport, pluginUsage := BuildPluginStrings(selectedFeatures) + + // Template variables (initial, without feature fragments) + vars := templateVars{ + ProjectName: opts.name, + SQLWarehouseID: opts.warehouseID, + AppDescription: opts.description, + Profile: profile, + WorkspaceHost: workspaceHost, + PluginImport: pluginImport, + PluginUsage: pluginUsage, + } + + // Load feature resource fragments + fragments, err := loadFeatureFragments(templateDir, selectedFeatures, vars) + if err != nil { + return fmt.Errorf("load feature fragments: %w", err) + } + vars.BundleVariables = fragments.BundleVariables + vars.BundleResources = fragments.BundleResources + vars.TargetVariables = fragments.TargetVariables + vars.AppEnv = fragments.AppEnv + vars.DotEnv = fragments.DotEnv + vars.DotEnvExample = fragments.DotEnvExample + + // Copy template with variable substitution + var fileCount int + runErr = RunWithSpinnerCtx(ctx, "Creating project...", func() error { + var copyErr error + fileCount, copyErr = copyTemplate(templateDir, destDir, vars) + return copyErr + }) + if runErr != nil { + return runErr + } + projectCreated = true // From here on, cleanup on failure + + // Get absolute path + absOutputDir, err := filepath.Abs(destDir) + if err != nil { + absOutputDir = destDir + } + + // Apply features (adds selected features, removes unselected feature files) + runErr = RunWithSpinnerCtx(ctx, "Configuring features...", func() error { + return ApplyFeatures(absOutputDir, selectedFeatures) + }) + if runErr != nil { + return runErr + } + + // Run npm install + runErr = runNpmInstall(ctx, absOutputDir) + if runErr != nil { + return runErr + } + + // Run npm run setup + runErr = runNpmSetup(ctx, absOutputDir) + if runErr != nil { + return runErr + } + + // Show next steps only if user didn't choose to deploy or run + showNextSteps := !shouldDeploy && runMode == RunModeNone + PrintSuccess(opts.name, absOutputDir, fileCount, showNextSteps) + + // Execute post-creation actions (deploy and/or run) + if shouldDeploy || runMode != RunModeNone { + // Change to project directory for subsequent commands + if err := os.Chdir(absOutputDir); err != nil { + return fmt.Errorf("failed to change to project directory: %w", err) + } + } + + if shouldDeploy { + cmdio.LogString(ctx, "") + cmdio.LogString(ctx, "Deploying app...") + if err := runPostCreateDeploy(ctx); err != nil { + cmdio.LogString(ctx, fmt.Sprintf("⚠ Deploy failed: %v", err)) + cmdio.LogString(ctx, " You can deploy manually with: databricks experimental dev app deploy") + } + } + + if runMode != RunModeNone { + cmdio.LogString(ctx, "") + if err := runPostCreateDev(ctx, runMode); err != nil { + return err + } + } + + return nil +} + +// runPostCreateDeploy runs the deploy command in the current directory. +func runPostCreateDeploy(ctx context.Context) error { + // Use os.Args[0] to get the path to the current executable + executable := os.Args[0] + cmd := exec.CommandContext(ctx, executable, "experimental", "dev", "app", "deploy") + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + cmd.Stdin = os.Stdin + return cmd.Run() +} + +// runPostCreateDev runs the dev or dev-remote command in the current directory. +func runPostCreateDev(ctx context.Context, mode RunMode) error { + switch mode { + case RunModeDev: + cmdio.LogString(ctx, "Starting development server (npm run dev)...") + cmd := exec.CommandContext(ctx, "npm", "run", "dev") + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + cmd.Stdin = os.Stdin + return cmd.Run() + case RunModeDevRemote: + cmdio.LogString(ctx, "Starting remote development server...") + // Use os.Args[0] to get the path to the current executable + executable := os.Args[0] + cmd := exec.CommandContext(ctx, executable, "experimental", "dev", "app", "dev-remote") + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + cmd.Stdin = os.Stdin + return cmd.Run() + default: + return nil + } +} + +// runNpmInstall runs npm install in the project directory. +func runNpmInstall(ctx context.Context, projectDir string) error { + // Check if npm is available + if _, err := exec.LookPath("npm"); err != nil { + cmdio.LogString(ctx, "⚠ npm not found. Please install Node.js and run 'npm install' manually.") + return nil + } + + return RunWithSpinnerCtx(ctx, "Installing dependencies...", func() error { + cmd := exec.CommandContext(ctx, "npm", "install") + cmd.Dir = projectDir + cmd.Stdout = nil // Suppress output + cmd.Stderr = nil + return cmd.Run() + }) +} + +// runNpmSetup runs npx appkit-setup in the project directory. +func runNpmSetup(ctx context.Context, projectDir string) error { + // Check if npx is available + if _, err := exec.LookPath("npx"); err != nil { + return nil + } + + return RunWithSpinnerCtx(ctx, "Running setup...", func() error { + cmd := exec.CommandContext(ctx, "npx", "appkit-setup", "--write") + cmd.Dir = projectDir + cmd.Stdout = nil // Suppress output + cmd.Stderr = nil + return cmd.Run() + }) +} + +// renameFiles maps source file names to destination names (for files that can't use special chars). +var renameFiles = map[string]string{ + "_gitignore": ".gitignore", + "_env": ".env", + "_env.local": ".env.local", + "_npmrc": ".npmrc", + "_prettierrc": ".prettierrc", + "_eslintrc": ".eslintrc", +} + +// copyTemplate copies the template directory to dest, substituting variables. +func copyTemplate(src, dest string, vars templateVars) (int, error) { + fileCount := 0 + + // Find the project_name placeholder directory + srcProjectDir := "" + entries, err := os.ReadDir(src) + if err != nil { + return 0, err + } + for _, e := range entries { + if e.IsDir() && strings.Contains(e.Name(), "{{.project_name}}") { + srcProjectDir = filepath.Join(src, e.Name()) + break + } + } + + // If no {{.project_name}} dir found, copy src directly + if srcProjectDir == "" { + srcProjectDir = src + } + + log.Debugf(context.Background(), "Copying template from: %s", srcProjectDir) + + // Files and directories to skip + skipFiles := map[string]bool{ + "CLAUDE.md": true, + "AGENTS.md": true, + "databricks_template_schema.json": true, + } + skipDirs := map[string]bool{ + "docs": true, + "features": true, // Feature fragments are processed separately, not copied + } + + err = filepath.Walk(srcProjectDir, func(srcPath string, info os.FileInfo, err error) error { + if err != nil { + return err + } + + baseName := filepath.Base(srcPath) + + // Skip certain files + if skipFiles[baseName] { + log.Debugf(context.Background(), "Skipping file: %s", baseName) + return nil + } + + // Skip certain directories + if info.IsDir() && skipDirs[baseName] { + log.Debugf(context.Background(), "Skipping directory: %s", baseName) + return filepath.SkipDir + } + + // Calculate relative path from source project dir + relPath, err := filepath.Rel(srcProjectDir, srcPath) + if err != nil { + return err + } + + // Substitute variables in path + relPath = substituteVars(relPath, vars) + + // Handle .tmpl extension - strip it + relPath = strings.TrimSuffix(relPath, ".tmpl") + + // Apply file renames (e.g., _gitignore -> .gitignore) + fileName := filepath.Base(relPath) + if newName, ok := renameFiles[fileName]; ok { + relPath = filepath.Join(filepath.Dir(relPath), newName) + } + + destPath := filepath.Join(dest, relPath) + + if info.IsDir() { + log.Debugf(context.Background(), "Creating directory: %s", relPath) + return os.MkdirAll(destPath, info.Mode()) + } + + log.Debugf(context.Background(), "Copying file: %s", relPath) + + // Read file content + content, err := os.ReadFile(srcPath) + if err != nil { + return err + } + + // Handle special files + switch filepath.Base(srcPath) { + case "package.json": + content, err = processPackageJSON(content, vars) + if err != nil { + return fmt.Errorf("process package.json: %w", err) + } + default: + // Use Go template engine for .tmpl files (handles conditionals) + if strings.HasSuffix(srcPath, ".tmpl") { + content, err = executeTemplate(srcPath, content, vars) + if err != nil { + return fmt.Errorf("process template %s: %w", srcPath, err) + } + } else if isTextFile(srcPath) { + // Simple substitution for other text files + content = []byte(substituteVars(string(content), vars)) + } + } + + // Create parent directory + if err := os.MkdirAll(filepath.Dir(destPath), 0o755); err != nil { + return err + } + + // Write file + if err := os.WriteFile(destPath, content, info.Mode()); err != nil { + return err + } + + fileCount++ + return nil + }) + if err != nil { + log.Debugf(context.Background(), "Error during template copy: %v", err) + } + log.Debugf(context.Background(), "Copied %d files", fileCount) + + return fileCount, err +} + +// processPackageJSON updates the package.json with project-specific values. +func processPackageJSON(content []byte, vars templateVars) ([]byte, error) { + // Just do string substitution to preserve key order and formatting + return []byte(substituteVars(string(content), vars)), nil +} + +// substituteVars replaces template variables in a string. +func substituteVars(s string, vars templateVars) string { + s = strings.ReplaceAll(s, "{{.project_name}}", vars.ProjectName) + s = strings.ReplaceAll(s, "{{.sql_warehouse_id}}", vars.SQLWarehouseID) + s = strings.ReplaceAll(s, "{{.app_description}}", vars.AppDescription) + s = strings.ReplaceAll(s, "{{.profile}}", vars.Profile) + s = strings.ReplaceAll(s, "{{workspace_host}}", vars.WorkspaceHost) + + // Handle plugin placeholders + if vars.PluginImport != "" { + s = strings.ReplaceAll(s, "{{.plugin_import}}", vars.PluginImport) + s = strings.ReplaceAll(s, "{{.plugin_usage}}", vars.PluginUsage) + } else { + // No plugins selected - clean up the template + // Remove ", {{.plugin_import}}" from import line + s = strings.ReplaceAll(s, ", {{.plugin_import}} ", " ") + s = strings.ReplaceAll(s, ", {{.plugin_import}}", "") + // Remove the plugin_usage line entirely + s = strings.ReplaceAll(s, " {{.plugin_usage}},\n", "") + s = strings.ReplaceAll(s, " {{.plugin_usage}},", "") + } + + return s +} + +// executeTemplate processes a .tmpl file using Go's text/template engine. +func executeTemplate(path string, content []byte, vars templateVars) ([]byte, error) { + tmpl, err := template.New(filepath.Base(path)). + Funcs(template.FuncMap{ + "workspace_host": func() string { return vars.WorkspaceHost }, + }). + Parse(string(content)) + if err != nil { + return nil, fmt.Errorf("parse template: %w", err) + } + + // Use a map to match template variable names exactly (snake_case) + data := map[string]string{ + "project_name": vars.ProjectName, + "sql_warehouse_id": vars.SQLWarehouseID, + "app_description": vars.AppDescription, + "profile": vars.Profile, + "workspace_host": vars.WorkspaceHost, + "plugin_import": vars.PluginImport, + "plugin_usage": vars.PluginUsage, + "bundle_variables": vars.BundleVariables, + "bundle_resources": vars.BundleResources, + "target_variables": vars.TargetVariables, + "app_env": vars.AppEnv, + "dotenv": vars.DotEnv, + "dotenv_example": vars.DotEnvExample, + } + + var buf bytes.Buffer + if err := tmpl.Execute(&buf, data); err != nil { + return nil, fmt.Errorf("execute template: %w", err) + } + + return buf.Bytes(), nil +} + +// textExtensions contains file extensions that should be treated as text files. +var textExtensions = map[string]bool{ + ".ts": true, ".tsx": true, ".js": true, ".jsx": true, + ".json": true, ".yaml": true, ".yml": true, + ".md": true, ".txt": true, ".html": true, ".css": true, + ".scss": true, ".less": true, ".sql": true, + ".sh": true, ".bash": true, ".zsh": true, + ".py": true, ".go": true, ".rs": true, + ".toml": true, ".ini": true, ".cfg": true, + ".env": true, ".gitignore": true, ".npmrc": true, + ".prettierrc": true, ".eslintrc": true, +} + +// textBaseNames contains file names (without extension) that should be treated as text files. +var textBaseNames = map[string]bool{ + "Makefile": true, "Dockerfile": true, "LICENSE": true, + "README": true, ".gitignore": true, ".env": true, + ".nvmrc": true, ".node-version": true, + "_gitignore": true, "_env": true, "_npmrc": true, +} + +// isTextFile checks if a file is likely a text file based on extension. +func isTextFile(path string) bool { + ext := strings.ToLower(filepath.Ext(path)) + if textExtensions[ext] { + return true + } + return textBaseNames[filepath.Base(path)] +} diff --git a/experimental/dev/cmd/app/init_test.go b/experimental/dev/cmd/app/init_test.go new file mode 100644 index 0000000000..a7746e7a35 --- /dev/null +++ b/experimental/dev/cmd/app/init_test.go @@ -0,0 +1,303 @@ +package app + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestParseGitHubURL(t *testing.T) { + tests := []struct { + name string + url string + wantRepoURL string + wantSubdir string + wantBranch string + }{ + { + name: "simple repo URL", + url: "https://github.com/user/repo", + wantRepoURL: "https://github.com/user/repo", + wantSubdir: "", + wantBranch: "", + }, + { + name: "repo URL with trailing slash", + url: "https://github.com/user/repo/", + wantRepoURL: "https://github.com/user/repo", + wantSubdir: "", + wantBranch: "", + }, + { + name: "repo with branch", + url: "https://github.com/user/repo/tree/main", + wantRepoURL: "https://github.com/user/repo", + wantSubdir: "", + wantBranch: "main", + }, + { + name: "repo with branch and subdir", + url: "https://github.com/user/repo/tree/main/templates/starter", + wantRepoURL: "https://github.com/user/repo", + wantSubdir: "templates/starter", + wantBranch: "main", + }, + { + name: "repo with branch and deep subdir", + url: "https://github.com/databricks/cli/tree/v0.1.0/libs/template/templates/default-python", + wantRepoURL: "https://github.com/databricks/cli", + wantSubdir: "libs/template/templates/default-python", + wantBranch: "v0.1.0", + }, + { + name: "repo with feature branch", + url: "https://github.com/user/repo/tree/feature/my-feature", + wantRepoURL: "https://github.com/user/repo", + wantSubdir: "my-feature", + wantBranch: "feature", + }, + { + name: "repo URL with trailing slash and tree", + url: "https://github.com/user/repo/tree/main/", + wantRepoURL: "https://github.com/user/repo", + wantSubdir: "", + wantBranch: "main", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotRepoURL, gotSubdir, gotBranch := parseGitHubURL(tt.url) + assert.Equal(t, tt.wantRepoURL, gotRepoURL, "repoURL mismatch") + assert.Equal(t, tt.wantSubdir, gotSubdir, "subdir mismatch") + assert.Equal(t, tt.wantBranch, gotBranch, "branch mismatch") + }) + } +} + +func TestIsTextFile(t *testing.T) { + tests := []struct { + path string + expected bool + }{ + // Text files by extension + {"file.ts", true}, + {"file.tsx", true}, + {"file.js", true}, + {"file.jsx", true}, + {"file.json", true}, + {"file.yaml", true}, + {"file.yml", true}, + {"file.md", true}, + {"file.txt", true}, + {"file.html", true}, + {"file.css", true}, + {"file.scss", true}, + {"file.sql", true}, + {"file.sh", true}, + {"file.py", true}, + {"file.go", true}, + {"file.toml", true}, + {"file.env", true}, + + // Text files by name + {"Makefile", true}, + {"Dockerfile", true}, + {"LICENSE", true}, + {"README", true}, + {".gitignore", true}, + {".env", true}, + {"_gitignore", true}, + {"_env", true}, + + // Binary files (should return false) + {"file.png", false}, + {"file.jpg", false}, + {"file.gif", false}, + {"file.pdf", false}, + {"file.exe", false}, + {"file.bin", false}, + {"file.zip", false}, + {"randomfile", false}, + } + + for _, tt := range tests { + t.Run(tt.path, func(t *testing.T) { + result := isTextFile(tt.path) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestSubstituteVars(t *testing.T) { + vars := templateVars{ + ProjectName: "my-app", + SQLWarehouseID: "warehouse123", + AppDescription: "My awesome app", + Profile: "default", + WorkspaceHost: "https://dbc-123.cloud.databricks.com", + PluginImport: "analytics", + PluginUsage: "analytics()", + } + + tests := []struct { + name string + input string + expected string + }{ + { + name: "project name substitution", + input: "name: {{.project_name}}", + expected: "name: my-app", + }, + { + name: "warehouse id substitution", + input: "warehouse: {{.sql_warehouse_id}}", + expected: "warehouse: warehouse123", + }, + { + name: "description substitution", + input: "description: {{.app_description}}", + expected: "description: My awesome app", + }, + { + name: "profile substitution", + input: "profile: {{.profile}}", + expected: "profile: default", + }, + { + name: "workspace host substitution", + input: "host: {{workspace_host}}", + expected: "host: https://dbc-123.cloud.databricks.com", + }, + { + name: "plugin import substitution", + input: "import { {{.plugin_import}} } from 'appkit'", + expected: "import { analytics } from 'appkit'", + }, + { + name: "plugin usage substitution", + input: "plugins: [{{.plugin_usage}}]", + expected: "plugins: [analytics()]", + }, + { + name: "multiple substitutions", + input: "{{.project_name}} - {{.app_description}}", + expected: "my-app - My awesome app", + }, + { + name: "no substitutions needed", + input: "plain text without variables", + expected: "plain text without variables", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := substituteVars(tt.input, vars) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestSubstituteVarsNoPlugins(t *testing.T) { + // Test plugin cleanup when no plugins are selected + vars := templateVars{ + ProjectName: "my-app", + SQLWarehouseID: "", + AppDescription: "My app", + Profile: "", + WorkspaceHost: "", + PluginImport: "", // No plugins + PluginUsage: "", + } + + tests := []struct { + name string + input string + expected string + }{ + { + name: "removes plugin import with comma", + input: "import { core, {{.plugin_import}} } from 'appkit'", + expected: "import { core } from 'appkit'", + }, + { + name: "removes plugin usage line", + input: "plugins: [\n {{.plugin_usage}},\n]", + expected: "plugins: [\n]", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := substituteVars(tt.input, vars) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestParseDeployAndRunFlags(t *testing.T) { + tests := []struct { + name string + deploy bool + run string + wantDeploy bool + wantRunMode RunMode + wantErr bool + }{ + { + name: "deploy true, run none", + deploy: true, + run: "none", + wantDeploy: true, + wantRunMode: RunModeNone, + wantErr: false, + }, + { + name: "deploy true, run dev", + deploy: true, + run: "dev", + wantDeploy: true, + wantRunMode: RunModeDev, + wantErr: false, + }, + { + name: "deploy false, run dev-remote", + deploy: false, + run: "dev-remote", + wantDeploy: false, + wantRunMode: RunModeDevRemote, + wantErr: false, + }, + { + name: "empty run value", + deploy: false, + run: "", + wantDeploy: false, + wantRunMode: RunModeNone, + wantErr: false, + }, + { + name: "invalid run value", + deploy: true, + run: "invalid", + wantDeploy: false, + wantRunMode: RunModeNone, + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + deploy, runMode, err := parseDeployAndRunFlags(tt.deploy, tt.run) + if tt.wantErr { + assert.Error(t, err) + return + } + assert.NoError(t, err) + assert.Equal(t, tt.wantDeploy, deploy) + assert.Equal(t, tt.wantRunMode, runMode) + }) + } +} diff --git a/experimental/dev/cmd/app/prompt.go b/experimental/dev/cmd/app/prompt.go new file mode 100644 index 0000000000..ac28b2f2a1 --- /dev/null +++ b/experimental/dev/cmd/app/prompt.go @@ -0,0 +1,527 @@ +package app + +import ( + "context" + "errors" + "fmt" + "regexp" + "strconv" + "time" + + "github.com/briandowns/spinner" + "github.com/charmbracelet/huh" + "github.com/charmbracelet/lipgloss" + "github.com/databricks/cli/libs/cmdctx" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/listing" + "github.com/databricks/databricks-sdk-go/service/apps" + "github.com/databricks/databricks-sdk-go/service/sql" +) + +// DefaultAppDescription is the default description for new apps. +const DefaultAppDescription = "A Databricks App powered by AppKit" + +// AppkitTheme returns a custom theme for appkit prompts. +func appkitTheme() *huh.Theme { + t := huh.ThemeBase() + + // Databricks brand colors + red := lipgloss.Color("#BD2B26") + gray := lipgloss.Color("#71717A") // Mid-tone gray, readable on light and dark + yellow := lipgloss.Color("#FFAB00") + + t.Focused.Title = t.Focused.Title.Foreground(red).Bold(true) + t.Focused.Description = t.Focused.Description.Foreground(gray) + t.Focused.SelectedOption = t.Focused.SelectedOption.Foreground(yellow) + t.Focused.TextInput.Placeholder = t.Focused.TextInput.Placeholder.Foreground(gray) + + return t +} + +// RunMode specifies how to run the app after creation. +type RunMode string + +const ( + RunModeNone RunMode = "none" + RunModeDev RunMode = "dev" + RunModeDevRemote RunMode = "dev-remote" +) + +// CreateProjectConfig holds the configuration gathered from the interactive prompt. +type CreateProjectConfig struct { + ProjectName string + Description string + Features []string + Dependencies map[string]string // e.g., {"sql_warehouse_id": "abc123"} + Deploy bool // Whether to deploy the app after creation + RunMode RunMode // How to run the app after creation +} + +// App name constraints. +const ( + MaxAppNameLength = 30 + DevTargetPrefix = "dev-" +) + +// projectNamePattern is the compiled regex for validating project names. +// Pre-compiled for efficiency since validation is called on every keystroke. +var projectNamePattern = regexp.MustCompile(`^[a-z][a-z0-9-]*$`) + +// ValidateProjectName validates the project name for length and pattern constraints. +// It checks that the name plus the "dev-" prefix doesn't exceed 30 characters, +// and that the name follows the pattern: starts with a letter, contains only +// lowercase letters, numbers, or hyphens. +func ValidateProjectName(s string) error { + if s == "" { + return errors.New("project name is required") + } + + // Check length constraint (dev- prefix + name <= 30) + totalLength := len(DevTargetPrefix) + len(s) + if totalLength > MaxAppNameLength { + maxAllowed := MaxAppNameLength - len(DevTargetPrefix) + return fmt.Errorf("name too long (max %d chars)", maxAllowed) + } + + // Check pattern + if !projectNamePattern.MatchString(s) { + return errors.New("must start with a letter, use only lowercase letters, numbers, or hyphens") + } + + return nil +} + +// printHeader prints the AppKit header banner. +func printHeader() { + headerStyle := lipgloss.NewStyle(). + Foreground(lipgloss.Color("#BD2B26")). + Bold(true) + + subtitleStyle := lipgloss.NewStyle(). + Foreground(lipgloss.Color("#71717A")) + + fmt.Println() + fmt.Println(headerStyle.Render("ā—† Create a new Databricks AppKit project")) + fmt.Println(subtitleStyle.Render(" Full-stack TypeScript • React • Tailwind CSS")) + fmt.Println() +} + +// PromptForProjectName prompts only for project name. +// Used as the first step before resolving templates. +func PromptForProjectName() (string, error) { + printHeader() + theme := appkitTheme() + + var name string + err := huh.NewInput(). + Title("Project name"). + Description("lowercase letters, numbers, hyphens (max 26 chars)"). + Placeholder("my-app"). + Value(&name). + Validate(ValidateProjectName). + WithTheme(theme). + Run() + if err != nil { + return "", err + } + + return name, nil +} + +// PromptForPluginDependencies prompts for dependencies required by detected plugins. +// Returns a map of dependency ID to value. +func PromptForPluginDependencies(ctx context.Context, deps []FeatureDependency) (map[string]string, error) { + theme := appkitTheme() + result := make(map[string]string) + + for _, dep := range deps { + // Special handling for SQL warehouse - show picker instead of text input + if dep.ID == "sql_warehouse_id" { + warehouseID, err := PromptForWarehouse(ctx) + if err != nil { + return nil, err + } + result[dep.ID] = warehouseID + continue + } + + var value string + description := dep.Description + if !dep.Required { + description += " (optional)" + } + + input := huh.NewInput(). + Title(dep.Title). + Description(description). + Placeholder(dep.Placeholder). + Value(&value) + + if dep.Required { + input = input.Validate(func(s string) error { + if s == "" { + return errors.New("this field is required") + } + return nil + }) + } + + if err := input.WithTheme(theme).Run(); err != nil { + return nil, err + } + result[dep.ID] = value + } + + return result, nil +} + +// PromptForDeployAndRun prompts for post-creation deploy and run options. +func PromptForDeployAndRun() (deploy bool, runMode RunMode, err error) { + theme := appkitTheme() + + // Deploy after creation? + err = huh.NewConfirm(). + Title("Deploy after creation?"). + Description("Run 'databricks experimental dev app deploy' after setup"). + Value(&deploy). + WithTheme(theme). + Run() + if err != nil { + return false, RunModeNone, err + } + + // Run the app? + runModeStr := string(RunModeNone) + err = huh.NewSelect[string](). + Title("Run the app after creation?"). + Description("Choose how to start the development server"). + Options( + huh.NewOption("No, I'll run it later", string(RunModeNone)), + huh.NewOption("Yes, run locally (npm run dev)", string(RunModeDev)), + huh.NewOption("Yes, run with remote bridge (dev-remote)", string(RunModeDevRemote)), + ). + Value(&runModeStr). + WithTheme(theme). + Run() + if err != nil { + return false, RunModeNone, err + } + + return deploy, RunMode(runModeStr), nil +} + +// PromptForProjectConfig shows an interactive form to gather project configuration. +// Flow: name -> features -> feature dependencies -> description. +// If preSelectedFeatures is provided, the feature selection prompt is skipped. +func PromptForProjectConfig(ctx context.Context, preSelectedFeatures []string) (*CreateProjectConfig, error) { + config := &CreateProjectConfig{ + Dependencies: make(map[string]string), + Features: preSelectedFeatures, + } + theme := appkitTheme() + + printHeader() + + // Step 1: Project name + err := huh.NewInput(). + Title("Project name"). + Description("lowercase letters, numbers, hyphens (max 26 chars)"). + Placeholder("my-app"). + Value(&config.ProjectName). + Validate(ValidateProjectName). + WithTheme(theme). + Run() + if err != nil { + return nil, err + } + + // Step 2: Feature selection (skip if features already provided via flag) + if len(config.Features) == 0 && len(AvailableFeatures) > 0 { + options := make([]huh.Option[string], 0, len(AvailableFeatures)) + for _, f := range AvailableFeatures { + label := f.Name + " - " + f.Description + options = append(options, huh.NewOption(label, f.ID)) + } + + err = huh.NewMultiSelect[string](). + Title("Select features"). + Description("space to toggle, enter to confirm"). + Options(options...). + Value(&config.Features). + WithTheme(theme). + Run() + if err != nil { + return nil, err + } + } + + // Step 3: Prompt for feature dependencies + deps := CollectDependencies(config.Features) + for _, dep := range deps { + // Special handling for SQL warehouse - show picker instead of text input + if dep.ID == "sql_warehouse_id" { + warehouseID, err := PromptForWarehouse(ctx) + if err != nil { + return nil, err + } + config.Dependencies[dep.ID] = warehouseID + continue + } + + var value string + description := dep.Description + if !dep.Required { + description += " (optional)" + } + + input := huh.NewInput(). + Title(dep.Title). + Description(description). + Placeholder(dep.Placeholder). + Value(&value) + + if dep.Required { + input = input.Validate(func(s string) error { + if s == "" { + return errors.New("this field is required") + } + return nil + }) + } + + if err := input.WithTheme(theme).Run(); err != nil { + return nil, err + } + config.Dependencies[dep.ID] = value + } + + // Step 4: Description + config.Description = DefaultAppDescription + + err = huh.NewInput(). + Title("Description"). + Placeholder(DefaultAppDescription). + Value(&config.Description). + WithTheme(theme). + Run() + if err != nil { + return nil, err + } + + if config.Description == "" { + config.Description = DefaultAppDescription + } + + // Step 5: Deploy after creation? + err = huh.NewConfirm(). + Title("Deploy after creation?"). + Description("Run 'databricks experimental dev app deploy' after setup"). + Value(&config.Deploy). + WithTheme(theme). + Run() + if err != nil { + return nil, err + } + + // Step 6: Run the app? + runModeStr := string(RunModeNone) + err = huh.NewSelect[string](). + Title("Run the app after creation?"). + Description("Choose how to start the development server"). + Options( + huh.NewOption("No, I'll run it later", string(RunModeNone)), + huh.NewOption("Yes, run locally (npm run dev)", string(RunModeDev)), + huh.NewOption("Yes, run with remote bridge (dev-remote)", string(RunModeDevRemote)), + ). + Value(&runModeStr). + WithTheme(theme). + Run() + if err != nil { + return nil, err + } + config.RunMode = RunMode(runModeStr) + + return config, nil +} + +// ListSQLWarehouses fetches all SQL warehouses the user has access to. +func ListSQLWarehouses(ctx context.Context) ([]sql.EndpointInfo, error) { + w := cmdctx.WorkspaceClient(ctx) + if w == nil { + return nil, errors.New("no workspace client available") + } + + iter := w.Warehouses.List(ctx, sql.ListWarehousesRequest{}) + return listing.ToSlice(ctx, iter) +} + +// PromptForWarehouse shows a picker to select a SQL warehouse. +func PromptForWarehouse(ctx context.Context) (string, error) { + var warehouses []sql.EndpointInfo + err := RunWithSpinnerCtx(ctx, "Fetching SQL warehouses...", func() error { + var fetchErr error + warehouses, fetchErr = ListSQLWarehouses(ctx) + return fetchErr + }) + if err != nil { + return "", fmt.Errorf("failed to fetch SQL warehouses: %w", err) + } + + if len(warehouses) == 0 { + return "", errors.New("no SQL warehouses found. Create one in your workspace first") + } + + theme := appkitTheme() + + // Build options with warehouse name and state + options := make([]huh.Option[string], 0, len(warehouses)) + for _, wh := range warehouses { + state := string(wh.State) + label := fmt.Sprintf("%s (%s)", wh.Name, state) + options = append(options, huh.NewOption(label, wh.Id)) + } + + var selected string + err = huh.NewSelect[string](). + Title("Select SQL Warehouse"). + Description(fmt.Sprintf("%d warehouses available — type to filter", len(warehouses))). + Options(options...). + Value(&selected). + Filtering(true). + WithTheme(theme). + Run() + if err != nil { + return "", err + } + + return selected, nil +} + +// RunWithSpinnerCtx runs a function while showing a spinner with the given title. +// The spinner stops and the function returns early if the context is cancelled. +// Panics in the action are recovered and returned as errors. +func RunWithSpinnerCtx(ctx context.Context, title string, action func() error) error { + s := spinner.New( + spinner.CharSets[14], + 80*time.Millisecond, + spinner.WithColor("yellow"), // Databricks brand color + spinner.WithSuffix(" "+title), + ) + s.Start() + + done := make(chan error, 1) + go func() { + defer func() { + if r := recover(); r != nil { + done <- fmt.Errorf("action panicked: %v", r) + } + }() + done <- action() + }() + + select { + case err := <-done: + s.Stop() + return err + case <-ctx.Done(): + s.Stop() + // Wait for action goroutine to complete to avoid orphaned goroutines. + // For exec.CommandContext, the process is killed when context is cancelled. + <-done + return ctx.Err() + } +} + +// ListAllApps fetches all apps the user has access to from the workspace. +func ListAllApps(ctx context.Context) ([]apps.App, error) { + w := cmdctx.WorkspaceClient(ctx) + if w == nil { + return nil, errors.New("no workspace client available") + } + + iter := w.Apps.List(ctx, apps.ListAppsRequest{}) + return listing.ToSlice(ctx, iter) +} + +// PromptForAppSelection shows a picker to select an existing app. +// Returns the selected app name or error if cancelled/no apps found. +func PromptForAppSelection(ctx context.Context, title string) (string, error) { + if !cmdio.IsPromptSupported(ctx) { + return "", errors.New("--name is required in non-interactive mode") + } + + // Fetch all apps the user has access to + var existingApps []apps.App + err := RunWithSpinnerCtx(ctx, "Fetching apps...", func() error { + var fetchErr error + existingApps, fetchErr = ListAllApps(ctx) + return fetchErr + }) + if err != nil { + return "", fmt.Errorf("failed to fetch apps: %w", err) + } + + if len(existingApps) == 0 { + return "", errors.New("no apps found. Create one first with 'databricks apps create '") + } + + theme := appkitTheme() + + // Build options + options := make([]huh.Option[string], 0, len(existingApps)) + for _, app := range existingApps { + label := app.Name + if app.Description != "" { + desc := app.Description + if len(desc) > 40 { + desc = desc[:37] + "..." + } + label += " — " + desc + } + options = append(options, huh.NewOption(label, app.Name)) + } + + var selected string + err = huh.NewSelect[string](). + Title(title). + Description(fmt.Sprintf("%d apps found — type to filter", len(existingApps))). + Options(options...). + Value(&selected). + Filtering(true). + WithTheme(theme). + Run() + if err != nil { + return "", err + } + + return selected, nil +} + +// PrintSuccess prints a success message after project creation. +// If showNextSteps is true, also prints the "Next steps" section. +func PrintSuccess(projectName, outputDir string, fileCount int, showNextSteps bool) { + successStyle := lipgloss.NewStyle(). + Foreground(lipgloss.Color("#FFAB00")). // Databricks yellow + Bold(true) + + dimStyle := lipgloss.NewStyle(). + Foreground(lipgloss.Color("#71717A")) // Mid-tone gray + + codeStyle := lipgloss.NewStyle(). + Foreground(lipgloss.Color("#FF3621")) // Databricks orange + + fmt.Println() + fmt.Println(successStyle.Render("āœ” Project created successfully!")) + fmt.Println() + fmt.Println(dimStyle.Render(" Location: " + outputDir)) + fmt.Println(dimStyle.Render(" Files: " + strconv.Itoa(fileCount))) + + if showNextSteps { + fmt.Println() + fmt.Println(dimStyle.Render(" Next steps:")) + fmt.Println() + fmt.Println(codeStyle.Render(" cd " + projectName)) + fmt.Println(codeStyle.Render(" npm run dev")) + } + fmt.Println() +} diff --git a/experimental/dev/cmd/app/prompt_test.go b/experimental/dev/cmd/app/prompt_test.go new file mode 100644 index 0000000000..f580031186 --- /dev/null +++ b/experimental/dev/cmd/app/prompt_test.go @@ -0,0 +1,187 @@ +package app + +import ( + "context" + "errors" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestValidateProjectName(t *testing.T) { + tests := []struct { + name string + projectName string + expectError bool + errorMsg string + }{ + { + name: "valid simple name", + projectName: "my-app", + expectError: false, + }, + { + name: "valid name with numbers", + projectName: "app123", + expectError: false, + }, + { + name: "valid name with hyphens", + projectName: "my-cool-app", + expectError: false, + }, + { + name: "empty name", + projectName: "", + expectError: true, + errorMsg: "required", + }, + { + name: "name too long", + projectName: "this-is-a-very-long-app-name-that-exceeds", + expectError: true, + errorMsg: "too long", + }, + { + name: "name at max length (26 chars)", + projectName: "abcdefghijklmnopqrstuvwxyz", + expectError: false, + }, + { + name: "name starts with number", + projectName: "123app", + expectError: true, + errorMsg: "must start with a letter", + }, + { + name: "name starts with hyphen", + projectName: "-myapp", + expectError: true, + errorMsg: "must start with a letter", + }, + { + name: "name with uppercase", + projectName: "MyApp", + expectError: true, + errorMsg: "lowercase", + }, + { + name: "name with underscore", + projectName: "my_app", + expectError: true, + errorMsg: "lowercase letters, numbers, or hyphens", + }, + { + name: "name with spaces", + projectName: "my app", + expectError: true, + errorMsg: "lowercase letters, numbers, or hyphens", + }, + { + name: "name with special characters", + projectName: "my@app!", + expectError: true, + errorMsg: "lowercase letters, numbers, or hyphens", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := ValidateProjectName(tt.projectName) + if tt.expectError { + require.Error(t, err) + assert.Contains(t, err.Error(), tt.errorMsg) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestRunWithSpinnerCtx(t *testing.T) { + t.Run("successful action", func(t *testing.T) { + ctx := context.Background() + executed := false + + err := RunWithSpinnerCtx(ctx, "Testing...", func() error { + executed = true + return nil + }) + + assert.NoError(t, err) + assert.True(t, executed) + }) + + t.Run("action returns error", func(t *testing.T) { + ctx := context.Background() + expectedErr := errors.New("action failed") + + err := RunWithSpinnerCtx(ctx, "Testing...", func() error { + return expectedErr + }) + + assert.Equal(t, expectedErr, err) + }) + + t.Run("context cancelled", func(t *testing.T) { + ctx, cancel := context.WithCancel(context.Background()) + actionStarted := make(chan struct{}) + actionDone := make(chan struct{}) + + go func() { + _ = RunWithSpinnerCtx(ctx, "Testing...", func() error { + close(actionStarted) + time.Sleep(100 * time.Millisecond) + close(actionDone) + return nil + }) + }() + + // Wait for action to start + <-actionStarted + // Cancel context + cancel() + // Wait for action to complete (spinner should wait) + <-actionDone + }) + + t.Run("action panics - recovered", func(t *testing.T) { + ctx := context.Background() + + err := RunWithSpinnerCtx(ctx, "Testing...", func() error { + panic("test panic") + }) + + require.Error(t, err) + assert.Contains(t, err.Error(), "action panicked") + assert.Contains(t, err.Error(), "test panic") + }) +} + +func TestRunModeConstants(t *testing.T) { + assert.Equal(t, RunModeNone, RunMode("none")) + assert.Equal(t, RunModeDev, RunMode("dev")) + assert.Equal(t, RunModeDevRemote, RunMode("dev-remote")) +} + +func TestMaxAppNameLength(t *testing.T) { + // Verify the constant is set correctly + assert.Equal(t, 30, MaxAppNameLength) + assert.Equal(t, "dev-", DevTargetPrefix) + + // Max allowed name length should be 30 - 4 ("dev-") = 26 + maxAllowed := MaxAppNameLength - len(DevTargetPrefix) + assert.Equal(t, 26, maxAllowed) + + // Test at boundary + validName := "abcdefghijklmnopqrstuvwxyz" // 26 chars + assert.Len(t, validName, 26) + assert.NoError(t, ValidateProjectName(validName)) + + // Test over boundary + invalidName := "abcdefghijklmnopqrstuvwxyz1" // 27 chars + assert.Len(t, invalidName, 27) + assert.Error(t, ValidateProjectName(invalidName)) +} diff --git a/experimental/dev/cmd/app/vite-server.js b/experimental/dev/cmd/app/vite-server.js new file mode 100644 index 0000000000..e0ba85322a --- /dev/null +++ b/experimental/dev/cmd/app/vite-server.js @@ -0,0 +1,172 @@ +#!/usr/bin/env node +const path = require("node:path"); +const fs = require("node:fs"); + +async function startViteServer() { + const vitePath = safeViteResolve(); + + if (!vitePath) { + console.log( + "\nāŒ Vite needs to be installed in the current directory. Run `npm install vite`.\n" + ); + process.exit(1); + } + + const { createServer, loadConfigFromFile, mergeConfig } = require(vitePath); + + /** + * This script is controlled by us, and shouldn't be called directly by the user. + * We know the order of the arguments is always: + * 1. appUrl + * 2. port + * + * We can safely access the arguments by index. + */ + const clientPath = path.join(process.cwd(), "client"); + const appUrl = process.argv[2] || ""; + const port = parseInt(process.argv[3] || 5173); + + if (!fs.existsSync(clientPath)) { + console.error("client folder doesn't exist."); + process.exit(1); + } + + if (!appUrl) { + console.error("App URL is required"); + process.exit(1); + } + + try { + const domain = new URL(appUrl); + + const loadedConfig = await loadConfigFromFile( + { + mode: "development", + command: "serve", + }, + undefined, + clientPath + ); + const userConfig = loadedConfig?.config ?? {}; + + /** + * Vite uses the same port for the HMR server as the main server. + * Allowing the user to set this option breaks the system. + * By just providing the port override option, Vite will use the same port for the HMR server. + * Multiple servers will work, but if the user has this in their config we need to delete it. + */ + delete userConfig.server?.hmr?.port; + + const coreConfig = { + configFile: false, + root: clientPath, + server: { + open: `${domain.origin}?dev=true`, + port: port, + hmr: { + overlay: true, + path: `/dev-hmr`, + }, + middlewareMode: false, + }, + plugins: [queriesHMRPlugin()], + }; + const mergedConfigs = mergeConfig(userConfig, coreConfig); + const server = await createServer(mergedConfigs); + + await server.listen(); + + console.log(`\nāœ… Vite dev server started successfully!`); + console.log(`\nPress Ctrl+C to stop the server\n`); + + const shutdown = async () => { + await server.close(); + process.exit(0); + }; + + process.on("SIGINT", shutdown); + process.on("SIGTERM", shutdown); + } catch (error) { + console.error(`āŒ Failed to start Vite server:`, error.message); + if (error.stack) { + console.error(error.stack); + } + process.exit(1); + } +} + +function safeViteResolve() { + try { + const vitePath = require.resolve("vite", { paths: [process.cwd()] }); + + return vitePath; + } catch (error) { + return null; + } +} + +// Start the server +startViteServer().catch((error) => { + console.error("Fatal error:", error); + process.exit(1); +}); + +/* + * development only, watches for changes in the queries directory and sends HMR updates to the client. + */ +function queriesHMRPlugin(options = {}) { + const { queriesPath = path.resolve(process.cwd(), "config/queries") } = + options; + let isServe = false; + let serverRunning = false; + + return { + name: "queries-hmr", + async buildStart() { + if (!isServe) return; + if (serverRunning) { + return; + } + serverRunning = true; + }, + configResolved(config) { + isServe = config.command === "serve"; + }, + configureServer(server) { + if (!isServe) return; + if (!server.config.mode || server.config.mode === "development") { + // 1. check if queries directory exists + if (fs.existsSync(queriesPath)) { + // 2. add the queries directory to the watcher + server.watcher.add(queriesPath); + + const handleFileChange = (file) => { + if (file.includes("config/queries") && file.endsWith(".sql")) { + const fileName = path.basename(file); + const queryKey = fileName.replace(/\.(sql)$/, ""); + + console.log("šŸ”„ Query updated:", queryKey, fileName); + + server.ws.send({ + type: "custom", + event: "query-update", + data: { + key: queryKey, + timestamp: Date.now(), + }, + }); + } + }; + + server.watcher.on("change", handleFileChange); + } + + process.on("SIGINT", () => { + console.log("šŸ›‘ SIGINT received — cleaning up before exit..."); + serverRunning = false; + process.exit(0); + }); + } + }, + }; +} diff --git a/experimental/dev/cmd/app/vite_bridge.go b/experimental/dev/cmd/app/vite_bridge.go new file mode 100644 index 0000000000..cddc6d02d6 --- /dev/null +++ b/experimental/dev/cmd/app/vite_bridge.go @@ -0,0 +1,838 @@ +package app + +import ( + "bufio" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "os" + "path/filepath" + "strings" + "sync" + "time" + + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/log" + "github.com/databricks/databricks-sdk-go" + "github.com/databricks/databricks-sdk-go/service/apps" + "github.com/gorilla/websocket" + "golang.org/x/sync/errgroup" +) + +const ( + localViteURL = "http://localhost:%d" + localViteHMRURL = "ws://localhost:%d/dev-hmr" + viteHMRProtocol = "vite-hmr" + + // WebSocket timeouts + wsHandshakeTimeout = 45 * time.Second + wsKeepaliveInterval = 20 * time.Second + wsWriteTimeout = 5 * time.Second + + // HTTP client timeouts + httpRequestTimeout = 60 * time.Second + httpIdleConnTimeout = 90 * time.Second + + // Bridge operation timeouts + bridgeFetchTimeout = 30 * time.Second + bridgeConnTimeout = 60 * time.Second + bridgeTunnelReadyTimeout = 30 * time.Second + + // Retry configuration + tunnelConnectMaxRetries = 5 + tunnelConnectInitialBackoff = 2 * time.Second + tunnelConnectMaxBackoff = 30 * time.Second +) + +type ViteBridgeMessage struct { + Type string `json:"type"` + TunnelID string `json:"tunnelId,omitempty"` + Path string `json:"path,omitempty"` + Method string `json:"method,omitempty"` + Status int `json:"status,omitempty"` + Headers map[string]any `json:"headers,omitempty"` + Body string `json:"body,omitempty"` + Viewer string `json:"viewer"` + RequestID string `json:"requestId"` + Approved bool `json:"approved"` + Content string `json:"content,omitempty"` + Error string `json:"error,omitempty"` +} + +// prioritizedMessage represents a message to send through the tunnel websocket +type prioritizedMessage struct { + messageType int + data []byte + priority int // 0 = high (HMR), 1 = normal (fetch) +} + +type ViteBridge struct { + ctx context.Context + w *databricks.WorkspaceClient + appName string + tunnelConn *websocket.Conn + hmrConn *websocket.Conn + tunnelID string + tunnelWriteChan chan prioritizedMessage + stopChan chan struct{} + stopOnce sync.Once + httpClient *http.Client + connectionRequests chan *ViteBridgeMessage + port int + keepaliveDone chan struct{} // Signals keepalive goroutine to stop on reconnect + keepaliveMu sync.Mutex // Protects keepaliveDone +} + +func NewViteBridge(ctx context.Context, w *databricks.WorkspaceClient, appName string, port int) *ViteBridge { + // Configure HTTP client optimized for local high-volume requests + transport := &http.Transport{ + MaxIdleConns: 100, + MaxIdleConnsPerHost: 100, + IdleConnTimeout: httpIdleConnTimeout, + DisableKeepAlives: false, + DisableCompression: false, + } + + return &ViteBridge{ + ctx: ctx, + w: w, + appName: appName, + httpClient: &http.Client{ + Timeout: httpRequestTimeout, + Transport: transport, + }, + stopChan: make(chan struct{}), + tunnelWriteChan: make(chan prioritizedMessage, 100), // Buffered channel for async writes + connectionRequests: make(chan *ViteBridgeMessage, 10), + port: port, + } +} + +func (vb *ViteBridge) getAuthHeaders(wsURL string) (http.Header, error) { + req, err := http.NewRequestWithContext(vb.ctx, "GET", wsURL, nil) + if err != nil { + return nil, fmt.Errorf("failed to create request: %w", err) + } + + err = vb.w.Config.Authenticate(req) + if err != nil { + return nil, fmt.Errorf("failed to authenticate: %w", err) + } + + return req.Header, nil +} + +func (vb *ViteBridge) GetAppDomain() (*url.URL, error) { + app, err := vb.w.Apps.Get(vb.ctx, apps.GetAppRequest{ + Name: vb.appName, + }) + if err != nil { + return nil, fmt.Errorf("failed to get app: %w", err) + } + + if app.Url == "" { + return nil, errors.New("app URL is empty") + } + + return url.Parse(app.Url) +} + +func (vb *ViteBridge) connectToTunnel(appDomain *url.URL) error { + wsURL := fmt.Sprintf("wss://%s/dev-tunnel", appDomain.Host) + + headers, err := vb.getAuthHeaders(wsURL) + if err != nil { + return fmt.Errorf("failed to get auth headers: %w", err) + } + + dialer := websocket.Dialer{ + HandshakeTimeout: wsHandshakeTimeout, + ReadBufferSize: 256 * 1024, // 256KB read buffer for large assets + WriteBufferSize: 256 * 1024, // 256KB write buffer for large assets + } + + conn, resp, err := dialer.Dial(wsURL, headers) + if err != nil { + if resp != nil { + body, _ := io.ReadAll(resp.Body) + resp.Body.Close() + return fmt.Errorf("failed to connect to tunnel (status %d): %w, body: %s", resp.StatusCode, err, string(body)) + } + return fmt.Errorf("failed to connect to tunnel: %w", err) + } + if resp != nil && resp.Body != nil { + resp.Body.Close() + } + + // Configure keepalive to prevent server timeout + _ = conn.SetReadDeadline(time.Time{}) // No read timeout + _ = conn.SetWriteDeadline(time.Time{}) // No write timeout + + // Enable pong handler to respond to server pongs (response to our pings) + conn.SetPongHandler(func(appData string) error { + log.Debugf(vb.ctx, "[vite_bridge] Received pong from server") + return nil + }) + + // Enable ping handler to respond to server pings with pongs + conn.SetPingHandler(func(appData string) error { + log.Debugf(vb.ctx, "[vite_bridge] Received ping from server, sending pong") + // Send pong response + select { + case vb.tunnelWriteChan <- prioritizedMessage{ + messageType: websocket.PongMessage, + data: []byte(appData), + priority: 0, // High priority + }: + case <-time.After(wsWriteTimeout): + log.Warnf(vb.ctx, "[vite_bridge] Failed to send pong response") + } + return nil + }) + + vb.tunnelConn = conn + + // Start keepalive ping goroutine (stop existing one first if any) + vb.keepaliveMu.Lock() + if vb.keepaliveDone != nil { + close(vb.keepaliveDone) + } + vb.keepaliveDone = make(chan struct{}) + keepaliveDone := vb.keepaliveDone + vb.keepaliveMu.Unlock() + + go vb.tunnelKeepalive(keepaliveDone) + + return nil +} + +// connectToTunnelWithRetry attempts to connect to the tunnel with exponential backoff. +// This handles cases where the app isn't fully ready yet (e.g., right after deployment). +func (vb *ViteBridge) connectToTunnelWithRetry(appDomain *url.URL) error { + var lastErr error + backoff := tunnelConnectInitialBackoff + + for attempt := 1; attempt <= tunnelConnectMaxRetries; attempt++ { + err := vb.connectToTunnel(appDomain) + if err == nil { + if attempt > 1 { + cmdio.LogString(vb.ctx, "āœ… Connected to tunnel successfully!") + } + return nil + } + + lastErr = err + + // Check if context is cancelled + select { + case <-vb.ctx.Done(): + return vb.ctx.Err() + default: + } + + // Don't retry on the last attempt + if attempt == tunnelConnectMaxRetries { + break + } + + // Log retry attempt + cmdio.LogString(vb.ctx, fmt.Sprintf("ā³ Connection attempt %d/%d failed, retrying in %v...", attempt, tunnelConnectMaxRetries, backoff)) + log.Debugf(vb.ctx, "[vite_bridge] Connection error: %v", err) + + // Wait before retrying + select { + case <-time.After(backoff): + case <-vb.ctx.Done(): + return vb.ctx.Err() + } + + // Exponential backoff with cap + backoff = time.Duration(float64(backoff) * 1.5) + if backoff > tunnelConnectMaxBackoff { + backoff = tunnelConnectMaxBackoff + } + } + + return fmt.Errorf("failed to connect after %d attempts: %w", tunnelConnectMaxRetries, lastErr) +} + +func (vb *ViteBridge) connectToViteHMR() error { + dialer := websocket.Dialer{ + Subprotocols: []string{viteHMRProtocol}, + } + + conn, resp, err := dialer.Dial(fmt.Sprintf(localViteHMRURL, vb.port), nil) + if err != nil { + if resp != nil && resp.Body != nil { + resp.Body.Close() + } + return fmt.Errorf("failed to connect to Vite HMR: %w", err) + } + if resp != nil && resp.Body != nil { + resp.Body.Close() + } + + vb.hmrConn = conn + log.Infof(vb.ctx, "[vite_bridge] Connected to local Vite HMR WS") + return nil +} + +// tunnelKeepalive sends periodic pings to keep the connection alive. +// Remote servers often have 30-60s idle timeouts. +// The done channel is used to stop this goroutine on reconnect. +func (vb *ViteBridge) tunnelKeepalive(done <-chan struct{}) { + ticker := time.NewTicker(wsKeepaliveInterval) + defer ticker.Stop() + + for { + select { + case <-done: + return + case <-vb.stopChan: + return + case <-ticker.C: + // Send ping through the write channel to avoid race conditions + select { + case vb.tunnelWriteChan <- prioritizedMessage{ + messageType: websocket.PingMessage, + data: []byte{}, + priority: 0, // High priority to ensure keepalive + }: + log.Debugf(vb.ctx, "[vite_bridge] Sent keepalive ping") + case <-time.After(wsWriteTimeout): + log.Warnf(vb.ctx, "[vite_bridge] Failed to send keepalive ping (channel full)") + } + } + } +} + +// tunnelWriter handles all writes to the tunnel websocket in a single goroutine +// This eliminates mutex contention and ensures ordered delivery +func (vb *ViteBridge) tunnelWriter(ctx context.Context) error { + for { + select { + case <-ctx.Done(): + return ctx.Err() + case <-vb.stopChan: + return nil + case msg := <-vb.tunnelWriteChan: + if err := vb.tunnelConn.WriteMessage(msg.messageType, msg.data); err != nil { + log.Errorf(vb.ctx, "[vite_bridge] Failed to write message: %v", err) + return fmt.Errorf("failed to write to tunnel: %w", err) + } + } + } +} + +func (vb *ViteBridge) handleTunnelMessages(ctx context.Context) error { + for { + select { + case <-ctx.Done(): + return ctx.Err() + case <-vb.stopChan: + return nil + default: + } + + _, message, err := vb.tunnelConn.ReadMessage() + if err != nil { + if websocket.IsCloseError(err, websocket.CloseNormalClosure, websocket.CloseGoingAway, websocket.CloseNoStatusReceived, websocket.CloseAbnormalClosure) { + cmdio.LogString(vb.ctx, "šŸ”„ Tunnel closed, reconnecting...") + + appDomain, err := vb.GetAppDomain() + if err != nil { + return fmt.Errorf("failed to get app domain for reconnection: %w", err) + } + + if err := vb.connectToTunnelWithRetry(appDomain); err != nil { + return fmt.Errorf("failed to reconnect to tunnel: %w", err) + } + continue + } + return fmt.Errorf("tunnel connection error: %w", err) + } + + // Debug: Log raw message + log.Debugf(vb.ctx, "[vite_bridge] Raw message: %s", string(message)) + + var msg ViteBridgeMessage + if err := json.Unmarshal(message, &msg); err != nil { + log.Errorf(vb.ctx, "[vite_bridge] Failed to parse message: %v", err) + continue + } + + // Debug: Log all incoming message types + log.Debugf(vb.ctx, "[vite_bridge] Received message type: %s", msg.Type) + + if err := vb.handleMessage(&msg); err != nil { + log.Errorf(vb.ctx, "[vite_bridge] Error handling message: %v", err) + } + } +} + +func (vb *ViteBridge) handleMessage(msg *ViteBridgeMessage) error { + switch msg.Type { + case "tunnel:ready": + vb.tunnelID = msg.TunnelID + log.Infof(vb.ctx, "[vite_bridge] Tunnel ID assigned: %s", vb.tunnelID) + return nil + + case "connection:request": + vb.connectionRequests <- msg + return nil + + case "fetch": + go func(fetchMsg ViteBridgeMessage) { + if err := vb.handleFetchRequest(&fetchMsg); err != nil { + log.Errorf(vb.ctx, "[vite_bridge] Error handling fetch request for %s: %v", fetchMsg.Path, err) + } + }(*msg) + return nil + + case "file:read": + // Handle file read requests in parallel like fetch requests + go func(fileReadMsg ViteBridgeMessage) { + if err := vb.handleFileReadRequest(&fileReadMsg); err != nil { + log.Errorf(vb.ctx, "[vite_bridge] Error handling file read request for %s: %v", fileReadMsg.Path, err) + } + }(*msg) + return nil + + case "hmr:message": + return vb.handleHMRMessage(msg) + + default: + log.Warnf(vb.ctx, "[vite_bridge] Unknown message type: %s", msg.Type) + return nil + } +} + +func (vb *ViteBridge) handleConnectionRequest(msg *ViteBridgeMessage) error { + cmdio.LogString(vb.ctx, "") + cmdio.LogString(vb.ctx, "šŸ”” Connection Request") + cmdio.LogString(vb.ctx, " User: "+msg.Viewer) + cmdio.LogString(vb.ctx, " Approve this connection? (y/n)") + + // Read from stdin with timeout to prevent indefinite blocking + inputChan := make(chan string, 1) + errChan := make(chan error, 1) + + go func() { + reader := bufio.NewReader(os.Stdin) + input, err := reader.ReadString('\n') + if err != nil { + errChan <- err + return + } + inputChan <- input + }() + + var approved bool + select { + case input := <-inputChan: + approved = strings.ToLower(strings.TrimSpace(input)) == "y" + case err := <-errChan: + return fmt.Errorf("failed to read user input: %w", err) + case <-time.After(bridgeConnTimeout): + // Default to denying after timeout + cmdio.LogString(vb.ctx, "ā±ļø Timeout waiting for response, denying connection") + approved = false + } + + response := ViteBridgeMessage{ + Type: "connection:response", + RequestID: msg.RequestID, + Viewer: msg.Viewer, + Approved: approved, + } + + responseData, err := json.Marshal(response) + if err != nil { + return fmt.Errorf("failed to marshal connection response: %w", err) + } + + // Send through channel instead of direct write + select { + case vb.tunnelWriteChan <- prioritizedMessage{ + messageType: websocket.TextMessage, + data: responseData, + priority: 1, + }: + case <-time.After(wsWriteTimeout): + return errors.New("timeout sending connection response") + } + + if approved { + cmdio.LogString(vb.ctx, "āœ… Approved connection from "+msg.Viewer) + } else { + cmdio.LogString(vb.ctx, "āŒ Denied connection from "+msg.Viewer) + } + + return nil +} + +func (vb *ViteBridge) handleFetchRequest(msg *ViteBridgeMessage) error { + targetURL := fmt.Sprintf(localViteURL, vb.port) + msg.Path + log.Debugf(vb.ctx, "[vite_bridge] Fetch request: %s %s", msg.Method, msg.Path) + + req, err := http.NewRequestWithContext(vb.ctx, msg.Method, targetURL, nil) + if err != nil { + return fmt.Errorf("failed to create request: %w", err) + } + + resp, err := vb.httpClient.Do(req) + if err != nil { + return fmt.Errorf("failed to fetch from Vite: %w", err) + } + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to read response body: %w", err) + } + + log.Debugf(vb.ctx, "[vite_bridge] Fetch response: %s (status=%d, size=%d bytes)", msg.Path, resp.StatusCode, len(body)) + + headers := make(map[string]any, len(resp.Header)) + for key, values := range resp.Header { + if len(values) > 0 { + headers[key] = values[0] + } + } + + metadataResponse := ViteBridgeMessage{ + Type: "fetch:response:meta", + Path: msg.Path, + Status: resp.StatusCode, + Headers: headers, + RequestID: msg.RequestID, + } + + responseData, err := json.Marshal(metadataResponse) + if err != nil { + return fmt.Errorf("failed to marshal metadata: %w", err) + } + + select { + case vb.tunnelWriteChan <- prioritizedMessage{ + messageType: websocket.TextMessage, + data: responseData, + priority: 1, // Normal priority + }: + case <-time.After(bridgeFetchTimeout): + return errors.New("timeout sending fetch metadata") + } + + if len(body) > 0 { + select { + case vb.tunnelWriteChan <- prioritizedMessage{ + messageType: websocket.BinaryMessage, + data: body, + priority: 1, // Normal priority + }: + case <-time.After(bridgeFetchTimeout): + return errors.New("timeout sending fetch body") + } + } + + return nil +} + +const ( + allowedBasePath = "config/queries" + allowedExtension = ".sql" +) + +func (vb *ViteBridge) handleFileReadRequest(msg *ViteBridgeMessage) error { + log.Debugf(vb.ctx, "[vite_bridge] File read request: %s", msg.Path) + + if err := validateFilePath(msg.Path); err != nil { + log.Warnf(vb.ctx, "[vite_bridge] File validation failed for %s: %v", msg.Path, err) + return vb.sendFileReadError(msg.RequestID, fmt.Sprintf("Invalid file path: %v", err)) + } + + content, err := os.ReadFile(msg.Path) + + response := ViteBridgeMessage{ + Type: "file:read:response", + RequestID: msg.RequestID, + } + + if err != nil { + log.Errorf(vb.ctx, "[vite_bridge] Failed to read file %s: %v", msg.Path, err) + response.Error = err.Error() + } else { + log.Debugf(vb.ctx, "[vite_bridge] Read file %s (%d bytes)", msg.Path, len(content)) + response.Content = string(content) + } + + responseData, err := json.Marshal(response) + if err != nil { + return fmt.Errorf("failed to marshal file read response: %w", err) + } + + select { + case vb.tunnelWriteChan <- prioritizedMessage{ + messageType: websocket.TextMessage, + data: responseData, + priority: 1, + }: + case <-time.After(wsWriteTimeout): + return errors.New("timeout sending file read response") + } + + return nil +} + +func validateFilePath(requestedPath string) error { + // Clean the path to resolve any ../ or ./ components + cleanPath := filepath.Clean(requestedPath) + + // Get absolute path + absPath, err := filepath.Abs(cleanPath) + if err != nil { + return fmt.Errorf("failed to resolve absolute path: %w", err) + } + + // Get the working directory + cwd, err := os.Getwd() + if err != nil { + return fmt.Errorf("failed to get working directory: %w", err) + } + + // Construct the allowed base directory (absolute path) + allowedDir := filepath.Join(cwd, allowedBasePath) + + // Ensure the resolved path is within the allowed directory + // Add trailing separator to prevent prefix attacks (e.g., queries-malicious/) + allowedDirWithSep := allowedDir + string(filepath.Separator) + if absPath != allowedDir && !strings.HasPrefix(absPath, allowedDirWithSep) { + return fmt.Errorf("path %s is outside allowed directory %s", absPath, allowedBasePath) + } + + // Ensure the file has the correct extension + if filepath.Ext(absPath) != allowedExtension { + return fmt.Errorf("only %s files are allowed, got: %s", allowedExtension, filepath.Ext(absPath)) + } + + // Additional check: no hidden files + if strings.HasPrefix(filepath.Base(absPath), ".") { + return errors.New("hidden files are not allowed") + } + + return nil +} + +// Helper to send error response +func (vb *ViteBridge) sendFileReadError(requestID, errorMsg string) error { + response := ViteBridgeMessage{ + Type: "file:read:response", + RequestID: requestID, + Error: errorMsg, + } + + responseData, err := json.Marshal(response) + if err != nil { + return fmt.Errorf("failed to marshal error response: %w", err) + } + + select { + case vb.tunnelWriteChan <- prioritizedMessage{ + messageType: websocket.TextMessage, + data: responseData, + priority: 1, + }: + case <-time.After(wsWriteTimeout): + return errors.New("timeout sending file read error") + } + + return nil +} + +func (vb *ViteBridge) handleHMRMessage(msg *ViteBridgeMessage) error { + log.Debugf(vb.ctx, "[vite_bridge] HMR message received: %s", msg.Body) + + response := ViteBridgeMessage{ + Type: "hmr:client", + Body: msg.Body, + } + + responseData, err := json.Marshal(response) + if err != nil { + return fmt.Errorf("failed to marshal HMR message: %w", err) + } + + // Send HMR with HIGH priority so it doesn't get blocked by fetch requests + select { + case vb.tunnelWriteChan <- prioritizedMessage{ + messageType: websocket.TextMessage, + data: responseData, + priority: 0, // HIGH PRIORITY for HMR! + }: + case <-time.After(wsWriteTimeout): + return errors.New("timeout sending HMR message") + } + + return nil +} + +func (vb *ViteBridge) handleViteHMRMessages(ctx context.Context) error { + for { + select { + case <-ctx.Done(): + return ctx.Err() + case <-vb.stopChan: + return nil + default: + } + + _, message, err := vb.hmrConn.ReadMessage() + if err != nil { + if websocket.IsUnexpectedCloseError(err, websocket.CloseGoingAway, websocket.CloseAbnormalClosure) { + log.Infof(vb.ctx, "[vite_bridge] Vite HMR connection closed, reconnecting...") + time.Sleep(time.Second) + if err := vb.connectToViteHMR(); err != nil { + return fmt.Errorf("failed to reconnect to Vite HMR: %w", err) + } + continue + } + return err + } + + response := ViteBridgeMessage{ + Type: "hmr:message", + Body: string(message), + } + + responseData, err := json.Marshal(response) + if err != nil { + log.Errorf(vb.ctx, "[vite_bridge] Failed to marshal Vite HMR message: %v", err) + continue + } + + select { + case vb.tunnelWriteChan <- prioritizedMessage{ + messageType: websocket.TextMessage, + data: responseData, + priority: 0, + }: + case <-time.After(wsWriteTimeout): + log.Errorf(vb.ctx, "[vite_bridge] Timeout sending Vite HMR message") + } + } +} + +func (vb *ViteBridge) Start() error { + appDomain, err := vb.GetAppDomain() + if err != nil { + return fmt.Errorf("failed to get app domain: %w", err) + } + + // Use retry logic for initial connection (app may not be ready yet) + if err := vb.connectToTunnelWithRetry(appDomain); err != nil { + return err + } + + readyChan := make(chan error, 1) + go func() { + for vb.tunnelID == "" { + _, message, err := vb.tunnelConn.ReadMessage() + if err != nil { + readyChan <- err + return + } + + var msg ViteBridgeMessage + if err := json.Unmarshal(message, &msg); err != nil { + continue + } + + if msg.Type == "tunnel:ready" { + vb.tunnelID = msg.TunnelID + log.Infof(vb.ctx, "[vite_bridge] Tunnel ID assigned: %s", vb.tunnelID) + readyChan <- nil + return + } + } + }() + + select { + case err := <-readyChan: + if err != nil { + return fmt.Errorf("failed waiting for tunnel ready: %w", err) + } + case <-time.After(bridgeTunnelReadyTimeout): + return errors.New("timeout waiting for tunnel ready") + } + + if err := vb.connectToViteHMR(); err != nil { + return err + } + + cmdio.LogString(vb.ctx, fmt.Sprintf("\n🌐 App URL:\n%s?dev=true\n", appDomain.String())) + cmdio.LogString(vb.ctx, fmt.Sprintf("\nšŸ”— Shareable URL:\n%s?dev=%s\n", appDomain.String(), vb.tunnelID)) + + g, gCtx := errgroup.WithContext(vb.ctx) + + // Start dedicated tunnel writer goroutine + g.Go(func() error { + if err := vb.tunnelWriter(gCtx); err != nil { + return fmt.Errorf("tunnel writer error: %w", err) + } + return nil + }) + + // Connection request handler - not in errgroup to avoid blocking other handlers + go func() { + for { + select { + case msg := <-vb.connectionRequests: + if err := vb.handleConnectionRequest(msg); err != nil { + log.Errorf(vb.ctx, "[vite_bridge] Error handling connection request: %v", err) + } + case <-gCtx.Done(): + return + case <-vb.stopChan: + return + } + } + }() + + g.Go(func() error { + if err := vb.handleTunnelMessages(gCtx); err != nil { + return fmt.Errorf("tunnel message handler error: %w", err) + } + return nil + }) + + g.Go(func() error { + if err := vb.handleViteHMRMessages(gCtx); err != nil { + return fmt.Errorf("vite HMR message handler error: %w", err) + } + return nil + }) + + <-gCtx.Done() + vb.Stop() + return g.Wait() +} + +func (vb *ViteBridge) Stop() { + vb.stopOnce.Do(func() { + close(vb.stopChan) + + if vb.tunnelConn != nil { + _ = vb.tunnelConn.WriteMessage(websocket.CloseMessage, websocket.FormatCloseMessage(websocket.CloseNormalClosure, "")) + vb.tunnelConn.Close() + } + + if vb.hmrConn != nil { + _ = vb.hmrConn.WriteMessage(websocket.CloseMessage, websocket.FormatCloseMessage(websocket.CloseNormalClosure, "")) + vb.hmrConn.Close() + } + }) +} diff --git a/experimental/dev/cmd/app/vite_bridge_test.go b/experimental/dev/cmd/app/vite_bridge_test.go new file mode 100644 index 0000000000..45f758563c --- /dev/null +++ b/experimental/dev/cmd/app/vite_bridge_test.go @@ -0,0 +1,370 @@ +package app + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "testing" + "time" + + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go" + "github.com/gorilla/websocket" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestValidateFilePath(t *testing.T) { + // Create a temporary directory structure for testing + tmpDir := t.TempDir() + oldWd, err := os.Getwd() + require.NoError(t, err) + defer func() { _ = os.Chdir(oldWd) }() + + // Change to temp directory + err = os.Chdir(tmpDir) + require.NoError(t, err) + + // Create the allowed directory + queriesDir := filepath.Join(tmpDir, "config", "queries") + err = os.MkdirAll(queriesDir, 0o755) + require.NoError(t, err) + + // Create a valid test file + validFile := filepath.Join(queriesDir, "test.sql") + err = os.WriteFile(validFile, []byte("SELECT * FROM table"), 0o644) + require.NoError(t, err) + + tests := []struct { + name string + path string + expectError bool + errorMsg string + }{ + { + name: "valid file path", + path: "config/queries/test.sql", + expectError: false, + }, + { + name: "path outside allowed directory", + path: "../../etc/passwd", + expectError: true, + errorMsg: "outside allowed directory", + }, + { + name: "wrong file extension", + path: "config/queries/test.txt", + expectError: true, + errorMsg: "only .sql files are allowed", + }, + { + name: "hidden file", + path: "config/queries/.hidden.sql", + expectError: true, + errorMsg: "hidden files are not allowed", + }, + { + name: "path traversal attempt", + path: "config/queries/../../../etc/passwd", + expectError: true, + errorMsg: "outside allowed directory", + }, + { + name: "prefix attack - similar directory name", + path: "config/queries-malicious/test.sql", + expectError: true, + errorMsg: "outside allowed directory", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := validateFilePath(tt.path) + if tt.expectError { + require.Error(t, err) + assert.Contains(t, err.Error(), tt.errorMsg) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestViteBridgeMessageSerialization(t *testing.T) { + tests := []struct { + name string + msg ViteBridgeMessage + }{ + { + name: "tunnel ready message", + msg: ViteBridgeMessage{ + Type: "tunnel:ready", + TunnelID: "test-tunnel-123", + }, + }, + { + name: "fetch request message", + msg: ViteBridgeMessage{ + Type: "fetch", + Path: "/src/components/ui/card.tsx", + Method: "GET", + RequestID: "req-123", + }, + }, + { + name: "connection request message", + msg: ViteBridgeMessage{ + Type: "connection:request", + Viewer: "user@example.com", + RequestID: "req-456", + }, + }, + { + name: "fetch response with headers", + msg: ViteBridgeMessage{ + Type: "fetch:response:meta", + Status: 200, + Headers: map[string]any{ + "Content-Type": "application/json", + }, + RequestID: "req-789", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + data, err := json.Marshal(tt.msg) + require.NoError(t, err) + + var decoded ViteBridgeMessage + err = json.Unmarshal(data, &decoded) + require.NoError(t, err) + + assert.Equal(t, tt.msg.Type, decoded.Type) + assert.Equal(t, tt.msg.TunnelID, decoded.TunnelID) + assert.Equal(t, tt.msg.Path, decoded.Path) + assert.Equal(t, tt.msg.Method, decoded.Method) + assert.Equal(t, tt.msg.RequestID, decoded.RequestID) + }) + } +} + +func TestViteBridgeHandleMessage(t *testing.T) { + ctx := cmdio.MockDiscard(context.Background()) + + w := &databricks.WorkspaceClient{} + + vb := NewViteBridge(ctx, w, "test-app", 5173) + + tests := []struct { + name string + msg *ViteBridgeMessage + expectError bool + }{ + { + name: "tunnel ready message", + msg: &ViteBridgeMessage{ + Type: "tunnel:ready", + TunnelID: "tunnel-123", + }, + expectError: false, + }, + { + name: "unknown message type", + msg: &ViteBridgeMessage{ + Type: "unknown:type", + }, + expectError: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := vb.handleMessage(tt.msg) + if tt.expectError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + + if tt.msg.Type == "tunnel:ready" { + assert.Equal(t, tt.msg.TunnelID, vb.tunnelID) + } + }) + } +} + +func TestViteBridgeHandleFileReadRequest(t *testing.T) { + // Create a temporary directory structure + tmpDir := t.TempDir() + oldWd, err := os.Getwd() + require.NoError(t, err) + defer func() { _ = os.Chdir(oldWd) }() + + err = os.Chdir(tmpDir) + require.NoError(t, err) + + queriesDir := filepath.Join(tmpDir, "config", "queries") + err = os.MkdirAll(queriesDir, 0o755) + require.NoError(t, err) + + testContent := "SELECT * FROM users WHERE id = 1" + testFile := filepath.Join(queriesDir, "test_query.sql") + err = os.WriteFile(testFile, []byte(testContent), 0o644) + require.NoError(t, err) + + t.Run("successful file read", func(t *testing.T) { + ctx := cmdio.MockDiscard(context.Background()) + w := &databricks.WorkspaceClient{} + + // Create a mock tunnel connection using httptest + var lastMessage []byte + upgrader := websocket.Upgrader{} + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + conn, err := upgrader.Upgrade(w, r, nil) + if err != nil { + t.Errorf("failed to upgrade: %v", err) + return + } + defer conn.Close() + + // Read the message sent by handleFileReadRequest + _, message, err := conn.ReadMessage() + if err != nil { + t.Errorf("failed to read message: %v", err) + return + } + lastMessage = message + })) + defer server.Close() + + // Connect to the mock server + wsURL := "ws" + server.URL[4:] + conn, resp, err := websocket.DefaultDialer.Dial(wsURL, nil) + require.NoError(t, err) + defer resp.Body.Close() + defer conn.Close() + + vb := NewViteBridge(ctx, w, "test-app", 5173) + vb.tunnelConn = conn + + go func() { _ = vb.tunnelWriter(ctx) }() + + msg := &ViteBridgeMessage{ + Type: "file:read", + Path: "config/queries/test_query.sql", + RequestID: "req-123", + } + + err = vb.handleFileReadRequest(msg) + require.NoError(t, err) + + // Give the message time to be sent + time.Sleep(100 * time.Millisecond) + + // Parse the response + var response ViteBridgeMessage + err = json.Unmarshal(lastMessage, &response) + require.NoError(t, err) + + assert.Equal(t, "file:read:response", response.Type) + assert.Equal(t, "req-123", response.RequestID) + assert.Equal(t, testContent, response.Content) + assert.Empty(t, response.Error) + }) + + t.Run("file not found", func(t *testing.T) { + ctx := cmdio.MockDiscard(context.Background()) + w := &databricks.WorkspaceClient{} + + var lastMessage []byte + upgrader := websocket.Upgrader{} + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + conn, err := upgrader.Upgrade(w, r, nil) + if err != nil { + t.Errorf("failed to upgrade: %v", err) + return + } + defer conn.Close() + + _, message, err := conn.ReadMessage() + if err != nil { + t.Errorf("failed to read message: %v", err) + return + } + lastMessage = message + })) + defer server.Close() + + wsURL := "ws" + server.URL[4:] + conn, resp, err := websocket.DefaultDialer.Dial(wsURL, nil) + require.NoError(t, err) + defer resp.Body.Close() + defer conn.Close() + + vb := NewViteBridge(ctx, w, "test-app", 5173) + vb.tunnelConn = conn + + go func() { _ = vb.tunnelWriter(ctx) }() + + msg := &ViteBridgeMessage{ + Type: "file:read", + Path: "config/queries/nonexistent.sql", + RequestID: "req-456", + } + + err = vb.handleFileReadRequest(msg) + require.NoError(t, err) + + // Give the message time to be sent + time.Sleep(100 * time.Millisecond) + + var response ViteBridgeMessage + err = json.Unmarshal(lastMessage, &response) + require.NoError(t, err) + + assert.Equal(t, "file:read:response", response.Type) + assert.Equal(t, "req-456", response.RequestID) + assert.NotEmpty(t, response.Error) + }) +} + +func TestViteBridgeStop(t *testing.T) { + ctx := cmdio.MockDiscard(context.Background()) + w := &databricks.WorkspaceClient{} + + vb := NewViteBridge(ctx, w, "test-app", 5173) + + // Call Stop multiple times to ensure it's idempotent + vb.Stop() + vb.Stop() + vb.Stop() + + // Verify stopChan is closed + select { + case <-vb.stopChan: + // Channel is closed, this is expected + default: + t.Error("stopChan should be closed after Stop()") + } +} + +func TestNewViteBridge(t *testing.T) { + ctx := context.Background() + w := &databricks.WorkspaceClient{} + appName := "test-app" + + vb := NewViteBridge(ctx, w, appName, 5173) + + assert.NotNil(t, vb) + assert.Equal(t, appName, vb.appName) + assert.NotNil(t, vb.httpClient) + assert.NotNil(t, vb.stopChan) + assert.NotNil(t, vb.connectionRequests) + assert.Equal(t, 10, cap(vb.connectionRequests)) +} diff --git a/experimental/dev/cmd/dev.go b/experimental/dev/cmd/dev.go new file mode 100644 index 0000000000..27679e71fc --- /dev/null +++ b/experimental/dev/cmd/dev.go @@ -0,0 +1,21 @@ +package dev + +import ( + "github.com/databricks/cli/experimental/dev/cmd/app" + "github.com/spf13/cobra" +) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "dev", + Short: "Development tools for Databricks applications", + Long: `Development tools for Databricks applications. + +Provides commands for creating, developing, and deploying full-stack +Databricks applications.`, + } + + cmd.AddCommand(app.New()) + + return cmd +} diff --git a/go.mod b/go.mod index dd1a073f73..3a4c15f796 100644 --- a/go.mod +++ b/go.mod @@ -43,15 +43,32 @@ require ( // Dependencies for experimental MCP commands require github.com/google/jsonschema-go v0.4.2 // MIT +require ( + github.com/charmbracelet/huh v0.8.0 + github.com/charmbracelet/lipgloss v1.1.0 +) + require ( cloud.google.com/go/auth v0.16.5 // indirect cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect cloud.google.com/go/compute/metadata v0.8.4 // indirect github.com/ProtonMail/go-crypto v1.1.6 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect + github.com/atotto/clipboard v0.1.4 // indirect + github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect + github.com/catppuccin/go v0.3.0 // indirect + github.com/charmbracelet/bubbles v0.21.1-0.20250623103423-23b8fd6302d7 // indirect + github.com/charmbracelet/bubbletea v1.3.6 // indirect + github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect + github.com/charmbracelet/x/ansi v0.9.3 // indirect + github.com/charmbracelet/x/cellbuf v0.0.13 // indirect + github.com/charmbracelet/x/exp/strings v0.0.0-20240722160745-212f7b056ed0 // indirect + github.com/charmbracelet/x/term v0.2.1 // indirect github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e // indirect github.com/cloudflare/circl v1.6.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/go-logr/logr v1.4.3 // indirect github.com/go-logr/stdr v1.2.2 // indirect @@ -62,9 +79,18 @@ require ( github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/hashicorp/go-retryablehttp v0.7.7 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/lucasb-eyer/go-colorful v1.2.0 // indirect github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-localereader v0.0.1 // indirect + github.com/mattn/go-runewidth v0.0.16 // indirect + github.com/mitchellh/hashstructure/v2 v2.0.2 // indirect + github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect + github.com/muesli/cancelreader v0.2.2 // indirect + github.com/muesli/termenv v0.16.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/rivo/uniseg v0.4.7 // indirect github.com/stretchr/objx v0.5.2 // indirect + github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect github.com/zclconf/go-cty v1.16.4 // indirect go.opentelemetry.io/auto/sdk v1.2.1 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0 // indirect diff --git a/go.sum b/go.sum index 8be5ce6c28..397fcc914b 100644 --- a/go.sum +++ b/go.sum @@ -8,6 +8,8 @@ dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8= dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA= github.com/BurntSushi/toml v1.6.0 h1:dRaEfpa2VI55EwlIW72hMRHdWouJeRF7TPYhI+AUQjk= github.com/BurntSushi/toml v1.6.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= +github.com/MakeNowJust/heredoc v1.0.0 h1:cXCdzVdstXyiTqTvfqk9SDHpKNjxuom+DOlyEeQ4pzQ= +github.com/MakeNowJust/heredoc v1.0.0/go.mod h1:mG5amYoWBHf8vpLOuehzbGGw0EHxpZZ6lCpQ4fNJ8LE= github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1Xbatp0= github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= @@ -16,8 +18,44 @@ github.com/ProtonMail/go-crypto v1.1.6 h1:ZcV+Ropw6Qn0AX9brlQLAUXfqLBc7Bl+f/DmNx github.com/ProtonMail/go-crypto v1.1.6/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY= github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4= +github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4= +github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI= +github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= +github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8= +github.com/aymanbagabas/go-udiff v0.3.1 h1:LV+qyBQ2pqe0u42ZsUEtPiCaUoqgA9gYRDs3vj1nolY= +github.com/aymanbagabas/go-udiff v0.3.1/go.mod h1:G0fsKmG+P6ylD0r6N/KgQD/nWzgfnl8ZBcNLgcbrw8E= github.com/briandowns/spinner v1.23.1 h1:t5fDPmScwUjozhDj4FA46p5acZWIPXYE30qW2Ptu650= github.com/briandowns/spinner v1.23.1/go.mod h1:LaZeM4wm2Ywy6vO571mvhQNRcWfRUnXOs0RcKV0wYKM= +github.com/catppuccin/go v0.3.0 h1:d+0/YicIq+hSTo5oPuRi5kOpqkVA5tAsU6dNhvRu+aY= +github.com/catppuccin/go v0.3.0/go.mod h1:8IHJuMGaUUjQM82qBrGNBv7LFq6JI3NnQCF6MOlZjpc= +github.com/charmbracelet/bubbles v0.21.1-0.20250623103423-23b8fd6302d7 h1:JFgG/xnwFfbezlUnFMJy0nusZvytYysV4SCS2cYbvws= +github.com/charmbracelet/bubbles v0.21.1-0.20250623103423-23b8fd6302d7/go.mod h1:ISC1gtLcVilLOf23wvTfoQuYbW2q0JevFxPfUzZ9Ybw= +github.com/charmbracelet/bubbletea v1.3.6 h1:VkHIxPJQeDt0aFJIsVxw8BQdh/F/L2KKZGsK6et5taU= +github.com/charmbracelet/bubbletea v1.3.6/go.mod h1:oQD9VCRQFF8KplacJLo28/jofOI2ToOfGYeFgBBxHOc= +github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc h1:4pZI35227imm7yK2bGPcfpFEmuY1gc2YSTShr4iJBfs= +github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc/go.mod h1:X4/0JoqgTIPSFcRA/P6INZzIuyqdFY5rm8tb41s9okk= +github.com/charmbracelet/huh v0.8.0 h1:Xz/Pm2h64cXQZn/Jvele4J3r7DDiqFCNIVteYukxDvY= +github.com/charmbracelet/huh v0.8.0/go.mod h1:5YVc+SlZ1IhQALxRPpkGwwEKftN/+OlJlnJYlDRFqN4= +github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY= +github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30= +github.com/charmbracelet/x/ansi v0.9.3 h1:BXt5DHS/MKF+LjuK4huWrC6NCvHtexww7dMayh6GXd0= +github.com/charmbracelet/x/ansi v0.9.3/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE= +github.com/charmbracelet/x/cellbuf v0.0.13 h1:/KBBKHuVRbq1lYx5BzEHBAFBP8VcQzJejZ/IA3iR28k= +github.com/charmbracelet/x/cellbuf v0.0.13/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs= +github.com/charmbracelet/x/conpty v0.1.0 h1:4zc8KaIcbiL4mghEON8D72agYtSeIgq8FSThSPQIb+U= +github.com/charmbracelet/x/conpty v0.1.0/go.mod h1:rMFsDJoDwVmiYM10aD4bH2XiRgwI7NYJtQgl5yskjEQ= +github.com/charmbracelet/x/errors v0.0.0-20240508181413-e8d8b6e2de86 h1:JSt3B+U9iqk37QUU2Rvb6DSBYRLtWqFqfxf8l5hOZUA= +github.com/charmbracelet/x/errors v0.0.0-20240508181413-e8d8b6e2de86/go.mod h1:2P0UgXMEa6TsToMSuFqKFQR+fZTO9CNGUNokkPatT/0= +github.com/charmbracelet/x/exp/golden v0.0.0-20241011142426-46044092ad91 h1:payRxjMjKgx2PaCWLZ4p3ro9y97+TVLZNaRZgJwSVDQ= +github.com/charmbracelet/x/exp/golden v0.0.0-20241011142426-46044092ad91/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U= +github.com/charmbracelet/x/exp/strings v0.0.0-20240722160745-212f7b056ed0 h1:qko3AQ4gK1MTS/de7F5hPGx6/k1u0w4TeYmBFwzYVP4= +github.com/charmbracelet/x/exp/strings v0.0.0-20240722160745-212f7b056ed0/go.mod h1:pBhA0ybfXv6hDjQUZ7hk1lVxBiUbupdw5R31yPUViVQ= +github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ= +github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg= +github.com/charmbracelet/x/termios v0.1.1 h1:o3Q2bT8eqzGnGPOYheoYS8eEleT5ZVNYNy8JawjaNZY= +github.com/charmbracelet/x/termios v0.1.1/go.mod h1:rB7fnv1TgOPOyyKRJ9o+AsTU/vK5WHJ2ivHeut/Pcwo= +github.com/charmbracelet/x/xpty v0.1.2 h1:Pqmu4TEJ8KeA9uSkISKMU3f+C1F6OGBn8ABuGlqCbtI= +github.com/charmbracelet/x/xpty v0.1.2/go.mod h1:XK2Z0id5rtLWcpeNiMYBccNNBrP2IJnzHI0Lq13Xzq4= github.com/chzyer/logex v1.1.10 h1:Swpa1K6QvQznwJRcfTfQJmTE72DqScAa40E+fbHEXEE= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e h1:fY5BOSpyZCqRo5OhCuC+XN+r/bBCmeuuJtjz+bCNIf8= @@ -27,6 +65,8 @@ github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMn github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0= github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= +github.com/creack/pty v1.1.24 h1:bJrF4RRfyJnbTJqzRLHzcGaZK1NeM5kTC9jGgovnR1s= +github.com/creack/pty v1.1.24/go.mod h1:08sCNb52WyoAwi2QDyzUCTgcvVFhUzewun7wtTfvcwE= github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s= github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= github.com/databricks/databricks-sdk-go v0.96.0 h1:tpR3GSwkM3Vd6P9KfYEXAJiKZ1KLJ2T2+J3tF8jxlEk= @@ -34,8 +74,12 @@ github.com/databricks/databricks-sdk-go v0.96.0/go.mod h1:hWoHnHbNLjPKiTm5K/7bcI github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= +github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4= +github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM= github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= @@ -101,6 +145,8 @@ github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY= +github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= github.com/manifoldco/promptui v0.9.0 h1:3V4HzJk1TtXW1MTZMP7mdlwbBpIinw3HztaIlYthEiA= github.com/manifoldco/promptui v0.9.0/go.mod h1:ka04sppxSGFAtxX0qhlYQjISsg9mR4GWtQEhdbn6Pgg= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= @@ -111,6 +157,18 @@ github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOA github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4= +github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88= +github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= +github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mitchellh/hashstructure/v2 v2.0.2 h1:vGKWl0YJqUNxE8d+h8f6NJLcCJrgbhC4NcD46KavDd4= +github.com/mitchellh/hashstructure/v2 v2.0.2/go.mod h1:MG3aRVU/N29oo/V/IhBX8GR/zz4kQkprJgF2EVszyDE= +github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI= +github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo= +github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA= +github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo= +github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc= +github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk= github.com/nwidger/jsoncolor v0.3.2 h1:rVJJlwAWDJShnbTYOQ5RM7yTA20INyKXlJ/fg4JMhHQ= github.com/nwidger/jsoncolor v0.3.2/go.mod h1:Cs34umxLbJvgBMnVNVqhji9BhoT/N/KinHqZptQ7cf4= github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4= @@ -121,6 +179,9 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/quasilyte/go-ruleguard/dsl v0.3.22 h1:wd8zkOhSNr+I+8Qeciml08ivDt1pSXe60+5DqOpCjPE= github.com/quasilyte/go-ruleguard/dsl v0.3.22/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= +github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= @@ -143,6 +204,8 @@ github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM= github.com/zclconf/go-cty v1.16.4 h1:QGXaag7/7dCzb+odlGrgr+YmYZFaOCMW6DEpS+UD1eE= github.com/zclconf/go-cty v1.16.4/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64= @@ -176,6 +239,7 @@ golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=