Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions experimental/aitools/cmd/aitools.go
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ The server communicates via stdio using the Model Context Protocol.`,
cmd.Flags().StringVar(&warehouseID, "warehouse-id", "", "Databricks SQL Warehouse ID")

cmd.AddCommand(newInstallCmd())
cmd.AddCommand(newSkillsCmd())
cmd.AddCommand(newToolsCmd())

return cmd
Expand Down
175 changes: 175 additions & 0 deletions experimental/aitools/cmd/skills.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,175 @@
package mcp

import (
"context"
"fmt"
"io/fs"
"os"
"path/filepath"

"github.com/databricks/cli/experimental/aitools/lib/agent_skills"
appkitdocs "github.com/databricks/cli/experimental/aitools/templates/appkit"
"github.com/databricks/cli/libs/cmdio"
"github.com/fatih/color"
"github.com/spf13/cobra"
)

func newSkillsCmd() *cobra.Command {
cmd := &cobra.Command{
Use: "skills",
Short: "Manage Databricks skills for Claude Code",
Long: `Manage Databricks skills that can be installed to ~/.claude/skills/ for use with Claude Code.`,
}

cmd.AddCommand(newSkillsListCmd())
cmd.AddCommand(newSkillsInstallCmd())

return cmd
}

func newSkillsListCmd() *cobra.Command {
return &cobra.Command{
Use: "list",
Short: "List available skills",
RunE: func(cmd *cobra.Command, args []string) error {
return listSkills(cmd.Context())
},
}
}

func newSkillsInstallCmd() *cobra.Command {
return &cobra.Command{
Use: "install",
Short: "Install all Databricks skills for Claude Code",
Long: `Install all Databricks skills to ~/.claude/skills/ for use with Claude Code.`,
RunE: func(cmd *cobra.Command, args []string) error {
return installAllSkills(cmd.Context())
},
}
}

func getSkillNames() ([]string, error) {
entries, err := fs.ReadDir(agent_skills.SkillsFS, ".")
if err != nil {
return nil, fmt.Errorf("failed to read skills: %w", err)
}

var names []string
for _, entry := range entries {
if entry.IsDir() {
names = append(names, entry.Name())
}
}
return names, nil
}

func listSkills(ctx context.Context) error {
names, err := getSkillNames()
if err != nil {
return err
}

cmdio.LogString(ctx, "Available skills:")
cmdio.LogString(ctx, "")
for _, name := range names {
cmdio.LogString(ctx, " "+name)
}
cmdio.LogString(ctx, "")
cmdio.LogString(ctx, "Install all with: databricks experimental aitools skills install")
return nil
}

func installAllSkills(ctx context.Context) error {
names, err := getSkillNames()
if err != nil {
return err
}

for _, name := range names {
if err := installSkill(ctx, name); err != nil {
return err
}
}
return nil
}

func installSkill(ctx context.Context, skillName string) error {
skillFS, err := fs.Sub(agent_skills.SkillsFS, skillName)
if err != nil {
return fmt.Errorf("skill %q not found", skillName)
}

if _, err := fs.Stat(skillFS, "SKILL.md"); err != nil {
return fmt.Errorf("skill %q not found", skillName)
}

homeDir, err := os.UserHomeDir()
if err != nil {
return fmt.Errorf("failed to get home directory: %w", err)
}

destDir := filepath.Join(homeDir, ".claude", "skills", skillName)

if err := os.MkdirAll(destDir, 0o755); err != nil {
return fmt.Errorf("failed to create directory: %w", err)
}

// copy skill-specific files (SKILL.md, authentication.md, etc.)
err = fs.WalkDir(skillFS, ".", func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}

destPath := filepath.Join(destDir, path)

if d.IsDir() {
return os.MkdirAll(destPath, 0o755)
}

content, err := fs.ReadFile(skillFS, path)
if err != nil {
return fmt.Errorf("failed to read %s: %w", path, err)
}

return os.WriteFile(destPath, content, 0o644)
})
if err != nil {
return fmt.Errorf("failed to copy skill files: %w", err)
}

// copy shared docs from appkit template
if err := copySharedDocs(destDir); err != nil {
return fmt.Errorf("failed to copy shared docs: %w", err)
}

cmdio.LogString(ctx, color.GreenString("✓ Installed %q to %s", skillName, destDir))
return nil
}

func copySharedDocs(destDir string) error {
refsDir := filepath.Join(destDir, "references")
if err := os.MkdirAll(refsDir, 0o755); err != nil {
return err
}

// docs from appkit template to copy as skill references
sharedDocs := []string{
"appkit-sdk.md",
"frontend.md",
"sql-queries.md",
"testing.md",
"trpc.md",
}

for _, doc := range sharedDocs {
content, err := appkitdocs.DocsFS.ReadFile("template/{{.project_name}}/docs/" + doc)
if err != nil {
return fmt.Errorf("failed to read %s: %w", doc, err)
}
if err := os.WriteFile(filepath.Join(refsDir, doc), content, 0o644); err != nil {
return fmt.Errorf("failed to write %s: %w", doc, err)
}
}

return nil
}
94 changes: 94 additions & 0 deletions experimental/aitools/lib/agent_skills/databricks-apps/SKILL.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
---
name: databricks-apps
description: Build full-stack TypeScript apps on Databricks. Use when asked to create dashboards, data apps, analytics tools, or visualizations that query Databricks SQL. Provides project scaffolding, SQL data access patterns, and deployment commands. Invoke BEFORE starting implementation.
metadata:
version: "0.1.0"
min_cli_version: "0.250.0"
---

# Databricks Apps Development

Build TypeScript apps that query Databricks SQL warehouses and deploy to Databricks Apps.

## Workflow

1. **Verify auth**: `databricks auth profiles`
2. **Find warehouse**: `databricks sql warehouses list`
3. **Explore data**: `databricks experimental aitools tools discover-schema CATALOG.SCHEMA.TABLE`
4. **Scaffold project**: `databricks experimental aitools tools init-template --name my-app --description "..."`
5. **Develop**: `cd my-app && npm install && npm run dev`
6. **Validate**: `databricks experimental aitools tools validate ./`
7. **Deploy**: `databricks experimental aitools tools deploy` (requires user permission)

## Data Exploration

```bash
# list catalogs/schemas/tables
databricks catalogs list
databricks schemas list <catalog>
databricks tables list <catalog> <schema>

# discover table schema (columns, types, sample data)
databricks experimental aitools tools discover-schema CATALOG.SCHEMA.TABLE

# test queries
databricks experimental aitools tools query "SELECT * FROM catalog.schema.table LIMIT 10"
```

Note: Use separate arguments for `catalogs/schemas/tables` commands. Dot notation only works in `discover-schema` and `query`.

## Project Structure

After scaffolding:
- `server/` - Node.js backend with App Kit and tRPC
- `client/` - React frontend with App Kit hooks
- `config/queries/` - SQL query files
- `shared/` - Shared TypeScript types

## Adding Visualizations

**Step 1**: Create SQL file in `config/queries/my_data.sql`
```sql
SELECT category, COUNT(*) as count FROM my_table GROUP BY category
```

**Step 2**: Define schema in `config/queries/schema.ts`
```typescript
export const querySchemas = {
my_data: z.array(z.object({ category: z.string(), count: z.number() })),
};
```

**Step 3**: Use visualization component
```typescript
import { BarChart } from '@databricks/appkit-ui/react';
<BarChart queryKey="my_data" parameters={{}} />
```

Run `npm run dev` to regenerate types after schema changes.

## Key References

Load these when implementing specific features:
- [SQL Queries](references/sql-queries.md) - query files, schemas, parameterization, sql.* helpers
- [AppKit SDK](references/appkit-sdk.md) - imports, server setup, useAnalyticsQuery hook
- [Frontend](references/frontend.md) - visualization components, styling, Radix constraints
- [tRPC](references/trpc.md) - custom endpoints for mutations, Databricks APIs
- [Testing](references/testing.md) - vitest unit tests, Playwright smoke tests
- [Authentication](references/authentication.md) - profiles, OAuth, troubleshooting

## Critical Rules

1. **SQL for data retrieval**: Always use `config/queries/` + visualization components. Never use tRPC for SELECT queries.
2. **Numeric types**: All SQL numbers return as strings in JSON. Always convert: `Number(row.amount).toFixed(2)`
3. **Type imports**: Use `import type { ... }` for type-only imports (verbatimModuleSyntax is enabled).
4. **App name**: Must be ≤26 characters (dev- prefix adds 4 chars, max 30 total).
5. **Validate before deploy**: Always run `databricks experimental aitools tools validate ./` first.

## Decision Tree

- **Display data from SQL?**
- Chart/Table → Use `BarChart`, `LineChart`, `DataTable` components
- Custom layout (KPIs, cards) → Use `useAnalyticsQuery` hook
- **Call Databricks API?** → Use tRPC (serving endpoints, MLflow, Jobs API)
- **Modify data?** → Use tRPC mutations
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
# Authentication

## Check Status

```bash
databricks auth profiles
```

## Configure Profile

```bash
databricks configure --profile <name>
```

## OAuth Login

```bash
databricks auth login --profile <name> --host <workspace-url>
```

Browser-based OAuth. Recommended for development.

## Profile Switching

```bash
# single command
DATABRICKS_CONFIG_PROFILE=<name> databricks <command>

# or flag
databricks --profile <name> <command>
```

## Environment Variables

| Variable | Purpose |
|----------|---------|
| `DATABRICKS_HOST` | Workspace URL |
| `DATABRICKS_CONFIG_PROFILE` | Profile name |
| `DATABRICKS_WAREHOUSE_ID` | Default warehouse |

## Troubleshooting

| Issue | Solution |
|-------|----------|
| No profiles | `databricks configure --profile <name>` |
| Token expired | `databricks auth login --profile <name> --host <url>` |
| Wrong workspace | Check `DATABRICKS_CONFIG_PROFILE` or use `--profile` |
| Silent auth fail | `databricks auth profiles` to check status |

## New Account

Free account: https://docs.databricks.com/getting-started/free-edition
8 changes: 8 additions & 0 deletions experimental/aitools/lib/agent_skills/embed.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
package agent_skills

import "embed"

// SkillsFS embeds all installable agent skills.
//
//go:embed all:databricks-apps
var SkillsFS embed.FS
8 changes: 8 additions & 0 deletions experimental/aitools/templates/appkit/docs.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
package appkit

import "embed"

// DocsFS embeds the appkit template documentation.
//
//go:embed template/{{.project_name}}/docs/*.md
var DocsFS embed.FS