From 9448fc09612c31b98c7f721ee6bb1d96c687caf8 Mon Sep 17 00:00:00 2001 From: Arseny Kravchenko Date: Tue, 13 Jan 2026 17:45:49 +0100 Subject: [PATCH 1/2] aitools: add skills install command for Claude Code Add `databricks experimental aitools skills` subcommand: - `skills list` - list available skills - `skills install` - install all skills to ~/.claude/skills/ Includes databricks-apps skill with reference docs synced from the appkit template. --- experimental/aitools/cmd/aitools.go | 1 + experimental/aitools/cmd/skills.go | 140 +++++++++++++ .../lib/agent_skills/databricks-apps/SKILL.md | 94 +++++++++ .../databricks-apps/references/appkit-sdk.md | 86 ++++++++ .../references/authentication.md | 52 +++++ .../databricks-apps/references/frontend.md | 108 ++++++++++ .../databricks-apps/references/sql-queries.md | 195 ++++++++++++++++++ .../databricks-apps/references/testing.md | 58 ++++++ .../databricks-apps/references/trpc.md | 95 +++++++++ .../aitools/lib/agent_skills/embed.go | 8 + 10 files changed, 837 insertions(+) create mode 100644 experimental/aitools/cmd/skills.go create mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/SKILL.md create mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/appkit-sdk.md create mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/authentication.md create mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/frontend.md create mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/sql-queries.md create mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/testing.md create mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/trpc.md create mode 100644 experimental/aitools/lib/agent_skills/embed.go diff --git a/experimental/aitools/cmd/aitools.go b/experimental/aitools/cmd/aitools.go index 7da47306c0..1658a49bd0 100644 --- a/experimental/aitools/cmd/aitools.go +++ b/experimental/aitools/cmd/aitools.go @@ -83,6 +83,7 @@ The server communicates via stdio using the Model Context Protocol.`, cmd.Flags().StringVar(&warehouseID, "warehouse-id", "", "Databricks SQL Warehouse ID") cmd.AddCommand(newInstallCmd()) + cmd.AddCommand(newSkillsCmd()) cmd.AddCommand(newToolsCmd()) return cmd diff --git a/experimental/aitools/cmd/skills.go b/experimental/aitools/cmd/skills.go new file mode 100644 index 0000000000..9284a43ef3 --- /dev/null +++ b/experimental/aitools/cmd/skills.go @@ -0,0 +1,140 @@ +package mcp + +import ( + "context" + "fmt" + "io/fs" + "os" + "path/filepath" + + "github.com/databricks/cli/experimental/aitools/lib/agent_skills" + "github.com/databricks/cli/libs/cmdio" + "github.com/fatih/color" + "github.com/spf13/cobra" +) + +func newSkillsCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "skills", + Short: "Manage Databricks skills for Claude Code", + Long: `Manage Databricks skills that can be installed to ~/.claude/skills/ for use with Claude Code.`, + } + + cmd.AddCommand(newSkillsListCmd()) + cmd.AddCommand(newSkillsInstallCmd()) + + return cmd +} + +func newSkillsListCmd() *cobra.Command { + return &cobra.Command{ + Use: "list", + Short: "List available skills", + RunE: func(cmd *cobra.Command, args []string) error { + return listSkills(cmd.Context()) + }, + } +} + +func newSkillsInstallCmd() *cobra.Command { + return &cobra.Command{ + Use: "install", + Short: "Install all Databricks skills for Claude Code", + Long: `Install all Databricks skills to ~/.claude/skills/ for use with Claude Code.`, + RunE: func(cmd *cobra.Command, args []string) error { + return installAllSkills(cmd.Context()) + }, + } +} + +func getSkillNames() ([]string, error) { + entries, err := fs.ReadDir(agent_skills.SkillsFS, ".") + if err != nil { + return nil, fmt.Errorf("failed to read skills: %w", err) + } + + var names []string + for _, entry := range entries { + if entry.IsDir() { + names = append(names, entry.Name()) + } + } + return names, nil +} + +func listSkills(ctx context.Context) error { + names, err := getSkillNames() + if err != nil { + return err + } + + cmdio.LogString(ctx, "Available skills:") + cmdio.LogString(ctx, "") + for _, name := range names { + cmdio.LogString(ctx, " "+name) + } + cmdio.LogString(ctx, "") + cmdio.LogString(ctx, "Install all with: databricks experimental aitools skills install") + return nil +} + +func installAllSkills(ctx context.Context) error { + names, err := getSkillNames() + if err != nil { + return err + } + + for _, name := range names { + if err := installSkill(ctx, name); err != nil { + return err + } + } + return nil +} + +func installSkill(ctx context.Context, skillName string) error { + skillFS, err := fs.Sub(agent_skills.SkillsFS, skillName) + if err != nil { + return fmt.Errorf("skill %q not found", skillName) + } + + if _, err := fs.Stat(skillFS, "SKILL.md"); err != nil { + return fmt.Errorf("skill %q not found", skillName) + } + + homeDir, err := os.UserHomeDir() + if err != nil { + return fmt.Errorf("failed to get home directory: %w", err) + } + + destDir := filepath.Join(homeDir, ".claude", "skills", skillName) + + if err := os.MkdirAll(destDir, 0o755); err != nil { + return fmt.Errorf("failed to create directory: %w", err) + } + + err = fs.WalkDir(skillFS, ".", func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + + destPath := filepath.Join(destDir, path) + + if d.IsDir() { + return os.MkdirAll(destPath, 0o755) + } + + content, err := fs.ReadFile(skillFS, path) + if err != nil { + return fmt.Errorf("failed to read %s: %w", path, err) + } + + return os.WriteFile(destPath, content, 0o644) + }) + if err != nil { + return fmt.Errorf("failed to copy skill files: %w", err) + } + + cmdio.LogString(ctx, color.GreenString("✓ Installed %q to %s", skillName, destDir)) + return nil +} diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/SKILL.md b/experimental/aitools/lib/agent_skills/databricks-apps/SKILL.md new file mode 100644 index 0000000000..e82a6fcf0f --- /dev/null +++ b/experimental/aitools/lib/agent_skills/databricks-apps/SKILL.md @@ -0,0 +1,94 @@ +--- +name: databricks-apps +description: Build full-stack TypeScript apps on Databricks. Use when asked to create dashboards, data apps, analytics tools, or visualizations that query Databricks SQL. Provides project scaffolding, SQL data access patterns, and deployment commands. Invoke BEFORE starting implementation. +metadata: + version: "0.1.0" + min_cli_version: "0.250.0" +--- + +# Databricks Apps Development + +Build TypeScript apps that query Databricks SQL warehouses and deploy to Databricks Apps. + +## Workflow + +1. **Verify auth**: `databricks auth profiles` +2. **Find warehouse**: `databricks sql warehouses list` +3. **Explore data**: `databricks experimental aitools tools discover-schema CATALOG.SCHEMA.TABLE` +4. **Scaffold project**: `databricks experimental aitools tools init-template --name my-app --description "..."` +5. **Develop**: `cd my-app && npm install && npm run dev` +6. **Validate**: `databricks experimental aitools tools validate ./` +7. **Deploy**: `databricks experimental aitools tools deploy` (requires user permission) + +## Data Exploration + +```bash +# list catalogs/schemas/tables +databricks catalogs list +databricks schemas list +databricks tables list + +# discover table schema (columns, types, sample data) +databricks experimental aitools tools discover-schema CATALOG.SCHEMA.TABLE + +# test queries +databricks experimental aitools tools query "SELECT * FROM catalog.schema.table LIMIT 10" +``` + +Note: Use separate arguments for `catalogs/schemas/tables` commands. Dot notation only works in `discover-schema` and `query`. + +## Project Structure + +After scaffolding: +- `server/` - Node.js backend with App Kit and tRPC +- `client/` - React frontend with App Kit hooks +- `config/queries/` - SQL query files +- `shared/` - Shared TypeScript types + +## Adding Visualizations + +**Step 1**: Create SQL file in `config/queries/my_data.sql` +```sql +SELECT category, COUNT(*) as count FROM my_table GROUP BY category +``` + +**Step 2**: Define schema in `config/queries/schema.ts` +```typescript +export const querySchemas = { + my_data: z.array(z.object({ category: z.string(), count: z.number() })), +}; +``` + +**Step 3**: Use visualization component +```typescript +import { BarChart } from '@databricks/appkit-ui/react'; + +``` + +Run `npm run dev` to regenerate types after schema changes. + +## Key References + +Load these when implementing specific features: +- [SQL Queries](references/sql-queries.md) - query files, schemas, parameterization, sql.* helpers +- [AppKit SDK](references/appkit-sdk.md) - imports, server setup, useAnalyticsQuery hook +- [Frontend](references/frontend.md) - visualization components, styling, Radix constraints +- [tRPC](references/trpc.md) - custom endpoints for mutations, Databricks APIs +- [Testing](references/testing.md) - vitest unit tests, Playwright smoke tests +- [Authentication](references/authentication.md) - profiles, OAuth, troubleshooting + +## Critical Rules + +1. **SQL for data retrieval**: Always use `config/queries/` + visualization components. Never use tRPC for SELECT queries. +2. **Numeric types**: All SQL numbers return as strings in JSON. Always convert: `Number(row.amount).toFixed(2)` +3. **Type imports**: Use `import type { ... }` for type-only imports (verbatimModuleSyntax is enabled). +4. **App name**: Must be ≤26 characters (dev- prefix adds 4 chars, max 30 total). +5. **Validate before deploy**: Always run `databricks experimental aitools tools validate ./` first. + +## Decision Tree + +- **Display data from SQL?** + - Chart/Table → Use `BarChart`, `LineChart`, `DataTable` components + - Custom layout (KPIs, cards) → Use `useAnalyticsQuery` hook +- **Call Databricks API?** → Use tRPC (serving endpoints, MLflow, Jobs API) +- **Modify data?** → Use tRPC mutations diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/appkit-sdk.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/appkit-sdk.md new file mode 100644 index 0000000000..5ab00768e1 --- /dev/null +++ b/experimental/aitools/lib/agent_skills/databricks-apps/references/appkit-sdk.md @@ -0,0 +1,86 @@ +# Databricks App Kit SDK + +## TypeScript Import Rules + +This template uses strict TypeScript settings with `verbatimModuleSyntax: true`. **Always use `import type` for type-only imports**. + +Template enforces `noUnusedLocals` - remove unused imports immediately or build fails. + +```typescript +// ✅ CORRECT - use import type for types +import type { MyInterface, MyType } from '../../shared/types'; + +// ❌ WRONG - will fail compilation +import { MyInterface, MyType } from '../../shared/types'; +``` + +## Server Setup + +```typescript +import { createApp, server, analytics } from '@databricks/app-kit'; + +const app = await createApp({ + plugins: [ + server({ autoStart: false }), + analytics(), + ], +}); + +// Extend with custom tRPC endpoints if needed +app.server.extend((express: Application) => { + express.use('/trpc', [appRouterMiddleware()]); +}); + +await app.server.start(); +``` + +## useAnalyticsQuery Hook + +**ONLY use when displaying data in a custom way that isn't a chart or table.** + +Use cases: +- Custom HTML layouts (cards, lists, grids) +- Summary statistics and KPIs +- Conditional rendering based on data values +- Data that needs transformation before display + +```typescript +import { useAnalyticsQuery, Skeleton } from '@databricks/app-kit-ui/react'; + +interface QueryResult { column_name: string; value: number; } + +function CustomDisplay() { + const { data, loading, error } = useAnalyticsQuery('query_name', { + start_date: sql.date(Date.now()), + category: sql.string("tools") + }); + + if (loading) return ; + if (error) return
Error: {error}
; + + return ( +
+ {data?.map(row => ( +
+

{row.column_name}

+

{row.value}

+
+ ))} +
+ ); +} +``` + +**API:** + +```typescript +const { data, loading, error } = useAnalyticsQuery( + queryName: string, // SQL file name without .sql extension + params: Record // Query parameters +); +// Returns: { data: T | null, loading: boolean, error: string | null } +``` + +**NOT supported:** +- `enabled` - Query always executes on mount. Use conditional rendering: `{selectedId && }` +- `refetch` - Not available. Re-mount component to re-query. diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/authentication.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/authentication.md new file mode 100644 index 0000000000..a990b7bf9f --- /dev/null +++ b/experimental/aitools/lib/agent_skills/databricks-apps/references/authentication.md @@ -0,0 +1,52 @@ +# Authentication + +## Check Status + +```bash +databricks auth profiles +``` + +## Configure Profile + +```bash +databricks configure --profile +``` + +## OAuth Login + +```bash +databricks auth login --profile --host +``` + +Browser-based OAuth. Recommended for development. + +## Profile Switching + +```bash +# single command +DATABRICKS_CONFIG_PROFILE= databricks + +# or flag +databricks --profile +``` + +## Environment Variables + +| Variable | Purpose | +|----------|---------| +| `DATABRICKS_HOST` | Workspace URL | +| `DATABRICKS_CONFIG_PROFILE` | Profile name | +| `DATABRICKS_WAREHOUSE_ID` | Default warehouse | + +## Troubleshooting + +| Issue | Solution | +|-------|----------| +| No profiles | `databricks configure --profile ` | +| Token expired | `databricks auth login --profile --host ` | +| Wrong workspace | Check `DATABRICKS_CONFIG_PROFILE` or use `--profile` | +| Silent auth fail | `databricks auth profiles` to check status | + +## New Account + +Free account: https://docs.databricks.com/getting-started/free-edition diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/frontend.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/frontend.md new file mode 100644 index 0000000000..a270b46b9e --- /dev/null +++ b/experimental/aitools/lib/agent_skills/databricks-apps/references/frontend.md @@ -0,0 +1,108 @@ +# Frontend Guidelines + +## Visualization Components + +Components from `@databricks/appkit-ui/react` handle data fetching, loading states, and error handling internally. + +Available: `AreaChart`, `BarChart`, `LineChart`, `PieChart`, `RadarChart`, `DataTable` + +**Basic Usage:** + +```typescript +import { BarChart, LineChart, DataTable, Card, CardContent, CardHeader, CardTitle } from '@databricks/appkit-ui/react'; +import { sql } from "@databricks/appkit-ui/js"; + +function MyDashboard() { + return ( +
+ + Sales by Region + + + + + + + Revenue Trend + + + + +
+ ); +} +``` + +Components automatically fetch data, show loading states, display errors, and render with sensible defaults. + +**Custom Visualization (Recharts):** + +```typescript +import { BarChart } from '@databricks/appkit-ui/react'; +import { Bar, XAxis, YAxis, CartesianGrid, Tooltip, Legend } from 'recharts'; + + + + + + + + + + +``` + +Databricks brand colors: `['#40d1f5', '#4462c9', '#EB1600', '#0B2026', '#4A4A4A', '#353a4a']` + +**❌ Don't double-fetch:** + +```typescript +// WRONG - redundant fetch +const { data } = useAnalyticsQuery('sales_data', {}); +return ; + +// CORRECT - let component handle it +return ; +``` + +## Layout Structure + +```tsx +
+

Page Title

+
{/* form inputs */}
+
{/* list items */}
+
+``` + +## Component Organization + +- Shared UI components: `@databricks/appkit-ui/react` +- Feature components: `client/src/components/FeatureName.tsx` +- Split components when logic exceeds ~100 lines or component is reused + +## Radix UI Constraints + +- `SelectItem` cannot have `value=""`. Use sentinel value like `"all"` for "show all" options. + +## Map Libraries (react-leaflet) + +For maps with React 19, use react-leaflet v5: + +```bash +npm install react-leaflet@^5.0.0 leaflet @types/leaflet +``` + +```typescript +import 'leaflet/dist/leaflet.css'; +``` + +## Best Practices + +- Use shadcn/radix components (Button, Input, Card, etc.) for consistent UI, import them from `@databricks/appkit-ui/react`. +- **Use skeleton loaders**: Always use `` components instead of plain "Loading..." text +- Define result types in `shared/types.ts` for reuse between frontend and backend +- Handle nullable fields: `value={field || ''}` for inputs +- Type callbacks explicitly: `onChange={(e: React.ChangeEvent) => ...}` +- Forms should have loading states: `disabled={isLoading}` +- Show empty states with helpful text when no data exists diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/sql-queries.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/sql-queries.md new file mode 100644 index 0000000000..2db77f0bfb --- /dev/null +++ b/experimental/aitools/lib/agent_skills/databricks-apps/references/sql-queries.md @@ -0,0 +1,195 @@ +# SQL Query Files + +**IMPORTANT**: ALWAYS use SQL files in `config/queries/` for data retrieval. NEVER use tRPC for SQL queries. + +- Store ALL SQL queries in `config/queries/` directory +- Name files descriptively: `trip_statistics.sql`, `user_metrics.sql`, `sales_by_region.sql` +- Reference by filename (without extension) in `useAnalyticsQuery` or directly in a visualization component passing it as `queryKey` +- App Kit automatically executes queries against configured Databricks warehouse +- Benefits: Built-in caching, proper connection pooling, better performance + +## Query Schemas + +Define the shape of QUERY RESULTS (not input parameters) in `config/queries/schema.ts` using Zod schemas. + +- **These schemas validate the COLUMNS RETURNED by SQL queries** +- Input parameters are passed separately to `useAnalyticsQuery()` as the second argument +- Schema field names must match your SQL SELECT column names/aliases + +Example: + +```typescript +import { z } from 'zod'; + +export const querySchemas = { + mocked_sales: z.array( + z.object({ + max_month_num: z.number().min(1).max(12), + }) + ), + + hello_world: z.array( + z.object({ + value: z.string(), + }) + ), +}; +``` + +**IMPORTANT: Refreshing Type Definitions** + +After adding or modifying query schemas in `config/queries/schema.ts`: + +1. **DO NOT** manually edit `client/src/appKitTypes.d.ts` - this file is auto-generated +2. Run `npm run dev` to automatically regenerate the TypeScript type definitions +3. The dev server will scan your SQL files and schema definitions and update `appKitTypes.d.ts` accordingly + +## SQL Type Handling (Critical) + +**ALL numeric values from Databricks SQL are returned as STRINGS in JSON responses.** This includes results from `ROUND()`, `AVG()`, `SUM()`, `COUNT()`, etc. Always convert before using numeric methods: + +```typescript +// ❌ WRONG - fails at runtime +{row.total_amount.toFixed(2)} + +// ✅ CORRECT - convert to number first +{Number(row.total_amount).toFixed(2)} +``` + +**Helper Functions:** + +Use the helpers from `shared/types.ts` for consistent formatting: + +```typescript +import { toNumber, formatCurrency, formatPercent } from '../../shared/types'; + +// Convert to number +const amount = toNumber(row.amount); // "123.45" → 123.45 + +// Format as currency +const formatted = formatCurrency(row.amount); // "123.45" → "$123.45" + +// Format as percentage +const percent = formatPercent(row.rate); // "85.5" → "85.5%" +``` + +## Query Parameterization + +SQL queries can accept parameters to make them dynamic and reusable. + +**Key Points:** +- Parameters use colon prefix: `:parameter_name` +- Databricks infers types from values automatically +- For optional string parameters, use pattern: `(:param = '' OR column = :param)` +- **For optional date parameters, use sentinel dates** (`'1900-01-01'` and `'9999-12-31'`) instead of empty strings + +### SQL Parameter Syntax + +```sql +-- config/queries/filtered_data.sql +SELECT * +FROM my_table +WHERE column_value >= :min_value + AND column_value <= :max_value + AND category = :category + AND (:optional_filter = '' OR status = :optional_filter) +``` + +### Frontend Parameter Passing + +```typescript +import { sql } from "@databricks/appkit-ui/js"; + +const { data } = useAnalyticsQuery('filtered_data', { + min_value: sql.number(minValue), + max_value: sql.number(maxValue), + category: sql.string(category), + optional_filter: sql.string(optionalFilter || ''), // empty string for optional params +}); +``` + +### Date Parameters + +Use `sql.date()` for date parameters with `YYYY-MM-DD` format strings. + +**Frontend - Using Date Parameters:** + +```typescript +import { sql } from '@databricks/appkit-ui/js'; +import { useState } from 'react'; + +function MyComponent() { + const [startDate, setStartDate] = useState('2016-02-01'); + const [endDate, setEndDate] = useState('2016-02-29'); + + const queryParams = { + start_date: sql.date(startDate), // Pass YYYY-MM-DD string to sql.date() + end_date: sql.date(endDate), + }; + + const { data } = useAnalyticsQuery('my_query', queryParams); + + // ... +} +``` + +**SQL - Date Filtering:** + +```sql +-- Filter by date range using DATE() function +SELECT COUNT(*) as trip_count +FROM samples.nyctaxi.trips +WHERE DATE(tpep_pickup_datetime) >= :start_date + AND DATE(tpep_pickup_datetime) <= :end_date +``` + +**Date Helper Functions:** + +```typescript +// Helper to get dates relative to today +const daysAgo = (n: number) => { + const date = new Date(Date.now() - n * 86400000); + return sql.date(date) +}; + +const params = { + start_date: daysAgo(7), // 7 days ago + end_date: sql.date(daysAgo(0)), // Today +}; +``` + +### Optional Date Parameters - Use Sentinel Dates + +Databricks App Kit validates parameter types before query execution. **DO NOT use empty strings (`''`) for optional date parameters** as this causes validation errors. + +**✅ CORRECT - Use Sentinel Dates:** + +```typescript +// Frontend: Use sentinel dates for "no filter" instead of empty strings +const revenueParams = { + group_by: 'month', + start_date: sql.date('1900-01-01'), // Sentinel: effectively no lower bound + end_date: sql.date('9999-12-31'), // Sentinel: effectively no upper bound + country: sql.string(country || ''), + property_type: sql.string(propertyType || ''), +}; +``` + +```sql +-- SQL: Simple comparison since sentinel dates are always valid +WHERE b.check_in >= CAST(:start_date AS DATE) + AND b.check_in <= CAST(:end_date AS DATE) +``` + +**Why Sentinel Dates Work:** +- `1900-01-01` is before any real data (effectively no lower bound filter) +- `9999-12-31` is after any real data (effectively no upper bound filter) +- Always valid DATE types, so no parameter validation errors +- All real dates fall within this range, so no filtering occurs + +**Parameter Types Summary:** +- ALWAYS use sql.* helper functions from the `@databricks/appkit-ui/js` package to define SQL parameters +- **Strings/Numbers**: Use directly in SQL with `:param_name` +- **Dates**: Use with `CAST(:param AS DATE)` in SQL +- **Optional Strings**: Use empty string default, check with `(:param = '' OR column = :param)` +- **Optional Dates**: Use sentinel dates (`sql.date('1900-01-01')` and `sql.date('9999-12-31')`) instead of empty strings diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/testing.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/testing.md new file mode 100644 index 0000000000..b1a4fea219 --- /dev/null +++ b/experimental/aitools/lib/agent_skills/databricks-apps/references/testing.md @@ -0,0 +1,58 @@ +# Testing Guidelines + +## Unit Tests (Vitest) + +**CRITICAL**: Use vitest for all tests. Put tests next to the code (e.g. src/\*.test.ts) + +```typescript +import { describe, it, expect } from 'vitest'; + +describe('Feature Name', () => { + it('should do something', () => { + expect(true).toBe(true); + }); + + it('should handle async operations', async () => { + const result = await someAsyncFunction(); + expect(result).toBeDefined(); + }); +}); +``` + +**Best Practices:** +- Use `describe` blocks to group related tests +- Use `it` for individual test cases +- Use `expect` for assertions +- Tests run with `npm test` (runs `vitest run`) + +❌ **Do not write unit tests for:** +- SQL files under `config/queries/` - little value in testing static SQL +- Types associated with queries - these are just schema definitions + +## Smoke Test (Playwright) + +The template includes a smoke test at `tests/smoke.spec.ts` that verifies the app loads correctly. + +**What the smoke test does:** +- Opens the app +- Waits for data to load (SQL query results) +- Verifies key UI elements are visible +- Captures screenshots and console logs to `.smoke-test/` directory +- Always captures artifacts, even on test failure + +**When customizing the app**, update `tests/smoke.spec.ts` to match your UI: +- Change heading selector to match your app title (replace 'Minimal Databricks App') +- Update data assertions to match your query results (replace 'hello world' check) +- Keep the test simple - just verify app loads and displays data +- The default test expects specific template content; update these expectations after customization + +**Keep smoke tests simple:** +- Only verify that the app loads and displays initial data +- Wait for key elements to appear (page title, main content) +- Capture artifacts for debugging +- Run quickly (< 5 seconds) + +**For extended E2E tests:** +- Create separate test files in `tests/` directory (e.g., `tests/user-flow.spec.ts`) +- Use `npm run test:e2e` to run all Playwright tests +- Keep complex user flows, interactions, and edge cases out of the smoke test diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/trpc.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/trpc.md new file mode 100644 index 0000000000..acfb68c1b6 --- /dev/null +++ b/experimental/aitools/lib/agent_skills/databricks-apps/references/trpc.md @@ -0,0 +1,95 @@ +# tRPC for Custom Endpoints + +**CRITICAL**: Do NOT use tRPC for SQL queries or data retrieval. Use `config/queries/` + `useAnalyticsQuery` instead. + +Use tRPC ONLY for: + +- **Mutations**: Creating, updating, or deleting data (INSERT, UPDATE, DELETE) +- **External APIs**: Calling Databricks APIs (serving endpoints, jobs, MLflow, etc.) +- **Complex business logic**: Multi-step operations that cannot be expressed in SQL +- **File operations**: File uploads, processing, transformations +- **Custom computations**: Operations requiring TypeScript/Node.js logic + +## Server-side Pattern + +```typescript +// server/trpc.ts +import { initTRPC } from '@trpc/server'; +import { getRequestContext } from '@databricks/appkit'; +import { z } from 'zod'; + +const t = initTRPC.create({ transformer: superjson }); +const publicProcedure = t.procedure; + +export const appRouter = t.router({ + // Example: Query a serving endpoint + queryModel: publicProcedure.input(z.object({ prompt: z.string() })).query(async ({ input: { prompt } }) => { + const { serviceDatabricksClient: client } = getRequestContext(); + const response = await client.servingEndpoints.query({ + name: 'your-endpoint-name', + messages: [{ role: 'user', content: prompt }], + }); + return response; + }), + + // Example: Mutation + createRecord: publicProcedure.input(z.object({ name: z.string() })).mutation(async ({ input }) => { + // Custom logic here + return { success: true, id: 123 }; + }), +}); +``` + +## Client-side Pattern + +```typescript +// client/src/components/MyComponent.tsx +import { trpc } from '@/lib/trpc'; +import { useState, useEffect } from 'react'; + +function MyComponent() { + const [result, setResult] = useState(null); + + useEffect(() => { + trpc.queryModel + .query({ prompt: "Hello" }) + .then(setResult) + .catch(console.error); + }, []); + + const handleCreate = async () => { + await trpc.createRecord.mutate({ name: "test" }); + }; + + return
{/* component JSX */}
; +} +``` + +## Decision Tree for Data Operations + +1. **Need to display data from SQL?** + - **Chart or Table?** → Use visualization components (`BarChart`, `LineChart`, `DataTable`, etc.) + - **Custom display (KPIs, cards, lists)?** → Use `useAnalyticsQuery` hook + - **Never** use tRPC for SQL SELECT statements + +2. **Need to call a Databricks API?** → Use tRPC + - Serving endpoints (model inference) + - MLflow operations + - Jobs API + - Workspace API + +3. **Need to modify data?** → Use tRPC mutations + - INSERT, UPDATE, DELETE operations + - Multi-step transactions + - Business logic with side effects + +4. **Need non-SQL custom logic?** → Use tRPC + - File processing + - External API calls + - Complex computations in TypeScript + +**Summary:** +- ✅ SQL queries → Visualization components or `useAnalyticsQuery` +- ✅ Databricks APIs → tRPC +- ✅ Data mutations → tRPC +- ❌ SQL queries → tRPC (NEVER do this) diff --git a/experimental/aitools/lib/agent_skills/embed.go b/experimental/aitools/lib/agent_skills/embed.go new file mode 100644 index 0000000000..0777084b30 --- /dev/null +++ b/experimental/aitools/lib/agent_skills/embed.go @@ -0,0 +1,8 @@ +package agent_skills + +import "embed" + +// SkillsFS embeds all installable agent skills. +// +//go:embed all:databricks-apps +var SkillsFS embed.FS From acdbfd20f05b0958d6ee3eaec532618dda5ba1e7 Mon Sep 17 00:00:00 2001 From: Arseny Kravchenko Date: Tue, 13 Jan 2026 17:53:13 +0100 Subject: [PATCH 2/2] reuse appkit template docs for skill references --- experimental/aitools/cmd/skills.go | 35 ++++ .../databricks-apps/references/appkit-sdk.md | 86 -------- .../databricks-apps/references/frontend.md | 108 ---------- .../databricks-apps/references/sql-queries.md | 195 ------------------ .../databricks-apps/references/testing.md | 58 ------ .../databricks-apps/references/trpc.md | 95 --------- experimental/aitools/templates/appkit/docs.go | 8 + 7 files changed, 43 insertions(+), 542 deletions(-) delete mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/appkit-sdk.md delete mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/frontend.md delete mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/sql-queries.md delete mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/testing.md delete mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/trpc.md create mode 100644 experimental/aitools/templates/appkit/docs.go diff --git a/experimental/aitools/cmd/skills.go b/experimental/aitools/cmd/skills.go index 9284a43ef3..92ebf358cc 100644 --- a/experimental/aitools/cmd/skills.go +++ b/experimental/aitools/cmd/skills.go @@ -8,6 +8,7 @@ import ( "path/filepath" "github.com/databricks/cli/experimental/aitools/lib/agent_skills" + appkitdocs "github.com/databricks/cli/experimental/aitools/templates/appkit" "github.com/databricks/cli/libs/cmdio" "github.com/fatih/color" "github.com/spf13/cobra" @@ -113,6 +114,7 @@ func installSkill(ctx context.Context, skillName string) error { return fmt.Errorf("failed to create directory: %w", err) } + // copy skill-specific files (SKILL.md, authentication.md, etc.) err = fs.WalkDir(skillFS, ".", func(path string, d fs.DirEntry, err error) error { if err != nil { return err @@ -135,6 +137,39 @@ func installSkill(ctx context.Context, skillName string) error { return fmt.Errorf("failed to copy skill files: %w", err) } + // copy shared docs from appkit template + if err := copySharedDocs(destDir); err != nil { + return fmt.Errorf("failed to copy shared docs: %w", err) + } + cmdio.LogString(ctx, color.GreenString("✓ Installed %q to %s", skillName, destDir)) return nil } + +func copySharedDocs(destDir string) error { + refsDir := filepath.Join(destDir, "references") + if err := os.MkdirAll(refsDir, 0o755); err != nil { + return err + } + + // docs from appkit template to copy as skill references + sharedDocs := []string{ + "appkit-sdk.md", + "frontend.md", + "sql-queries.md", + "testing.md", + "trpc.md", + } + + for _, doc := range sharedDocs { + content, err := appkitdocs.DocsFS.ReadFile("template/{{.project_name}}/docs/" + doc) + if err != nil { + return fmt.Errorf("failed to read %s: %w", doc, err) + } + if err := os.WriteFile(filepath.Join(refsDir, doc), content, 0o644); err != nil { + return fmt.Errorf("failed to write %s: %w", doc, err) + } + } + + return nil +} diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/appkit-sdk.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/appkit-sdk.md deleted file mode 100644 index 5ab00768e1..0000000000 --- a/experimental/aitools/lib/agent_skills/databricks-apps/references/appkit-sdk.md +++ /dev/null @@ -1,86 +0,0 @@ -# Databricks App Kit SDK - -## TypeScript Import Rules - -This template uses strict TypeScript settings with `verbatimModuleSyntax: true`. **Always use `import type` for type-only imports**. - -Template enforces `noUnusedLocals` - remove unused imports immediately or build fails. - -```typescript -// ✅ CORRECT - use import type for types -import type { MyInterface, MyType } from '../../shared/types'; - -// ❌ WRONG - will fail compilation -import { MyInterface, MyType } from '../../shared/types'; -``` - -## Server Setup - -```typescript -import { createApp, server, analytics } from '@databricks/app-kit'; - -const app = await createApp({ - plugins: [ - server({ autoStart: false }), - analytics(), - ], -}); - -// Extend with custom tRPC endpoints if needed -app.server.extend((express: Application) => { - express.use('/trpc', [appRouterMiddleware()]); -}); - -await app.server.start(); -``` - -## useAnalyticsQuery Hook - -**ONLY use when displaying data in a custom way that isn't a chart or table.** - -Use cases: -- Custom HTML layouts (cards, lists, grids) -- Summary statistics and KPIs -- Conditional rendering based on data values -- Data that needs transformation before display - -```typescript -import { useAnalyticsQuery, Skeleton } from '@databricks/app-kit-ui/react'; - -interface QueryResult { column_name: string; value: number; } - -function CustomDisplay() { - const { data, loading, error } = useAnalyticsQuery('query_name', { - start_date: sql.date(Date.now()), - category: sql.string("tools") - }); - - if (loading) return ; - if (error) return
Error: {error}
; - - return ( -
- {data?.map(row => ( -
-

{row.column_name}

-

{row.value}

-
- ))} -
- ); -} -``` - -**API:** - -```typescript -const { data, loading, error } = useAnalyticsQuery( - queryName: string, // SQL file name without .sql extension - params: Record // Query parameters -); -// Returns: { data: T | null, loading: boolean, error: string | null } -``` - -**NOT supported:** -- `enabled` - Query always executes on mount. Use conditional rendering: `{selectedId && }` -- `refetch` - Not available. Re-mount component to re-query. diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/frontend.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/frontend.md deleted file mode 100644 index a270b46b9e..0000000000 --- a/experimental/aitools/lib/agent_skills/databricks-apps/references/frontend.md +++ /dev/null @@ -1,108 +0,0 @@ -# Frontend Guidelines - -## Visualization Components - -Components from `@databricks/appkit-ui/react` handle data fetching, loading states, and error handling internally. - -Available: `AreaChart`, `BarChart`, `LineChart`, `PieChart`, `RadarChart`, `DataTable` - -**Basic Usage:** - -```typescript -import { BarChart, LineChart, DataTable, Card, CardContent, CardHeader, CardTitle } from '@databricks/appkit-ui/react'; -import { sql } from "@databricks/appkit-ui/js"; - -function MyDashboard() { - return ( -
- - Sales by Region - - - - - - - Revenue Trend - - - - -
- ); -} -``` - -Components automatically fetch data, show loading states, display errors, and render with sensible defaults. - -**Custom Visualization (Recharts):** - -```typescript -import { BarChart } from '@databricks/appkit-ui/react'; -import { Bar, XAxis, YAxis, CartesianGrid, Tooltip, Legend } from 'recharts'; - - - - - - - - - - -``` - -Databricks brand colors: `['#40d1f5', '#4462c9', '#EB1600', '#0B2026', '#4A4A4A', '#353a4a']` - -**❌ Don't double-fetch:** - -```typescript -// WRONG - redundant fetch -const { data } = useAnalyticsQuery('sales_data', {}); -return ; - -// CORRECT - let component handle it -return ; -``` - -## Layout Structure - -```tsx -
-

Page Title

-
{/* form inputs */}
-
{/* list items */}
-
-``` - -## Component Organization - -- Shared UI components: `@databricks/appkit-ui/react` -- Feature components: `client/src/components/FeatureName.tsx` -- Split components when logic exceeds ~100 lines or component is reused - -## Radix UI Constraints - -- `SelectItem` cannot have `value=""`. Use sentinel value like `"all"` for "show all" options. - -## Map Libraries (react-leaflet) - -For maps with React 19, use react-leaflet v5: - -```bash -npm install react-leaflet@^5.0.0 leaflet @types/leaflet -``` - -```typescript -import 'leaflet/dist/leaflet.css'; -``` - -## Best Practices - -- Use shadcn/radix components (Button, Input, Card, etc.) for consistent UI, import them from `@databricks/appkit-ui/react`. -- **Use skeleton loaders**: Always use `` components instead of plain "Loading..." text -- Define result types in `shared/types.ts` for reuse between frontend and backend -- Handle nullable fields: `value={field || ''}` for inputs -- Type callbacks explicitly: `onChange={(e: React.ChangeEvent) => ...}` -- Forms should have loading states: `disabled={isLoading}` -- Show empty states with helpful text when no data exists diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/sql-queries.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/sql-queries.md deleted file mode 100644 index 2db77f0bfb..0000000000 --- a/experimental/aitools/lib/agent_skills/databricks-apps/references/sql-queries.md +++ /dev/null @@ -1,195 +0,0 @@ -# SQL Query Files - -**IMPORTANT**: ALWAYS use SQL files in `config/queries/` for data retrieval. NEVER use tRPC for SQL queries. - -- Store ALL SQL queries in `config/queries/` directory -- Name files descriptively: `trip_statistics.sql`, `user_metrics.sql`, `sales_by_region.sql` -- Reference by filename (without extension) in `useAnalyticsQuery` or directly in a visualization component passing it as `queryKey` -- App Kit automatically executes queries against configured Databricks warehouse -- Benefits: Built-in caching, proper connection pooling, better performance - -## Query Schemas - -Define the shape of QUERY RESULTS (not input parameters) in `config/queries/schema.ts` using Zod schemas. - -- **These schemas validate the COLUMNS RETURNED by SQL queries** -- Input parameters are passed separately to `useAnalyticsQuery()` as the second argument -- Schema field names must match your SQL SELECT column names/aliases - -Example: - -```typescript -import { z } from 'zod'; - -export const querySchemas = { - mocked_sales: z.array( - z.object({ - max_month_num: z.number().min(1).max(12), - }) - ), - - hello_world: z.array( - z.object({ - value: z.string(), - }) - ), -}; -``` - -**IMPORTANT: Refreshing Type Definitions** - -After adding or modifying query schemas in `config/queries/schema.ts`: - -1. **DO NOT** manually edit `client/src/appKitTypes.d.ts` - this file is auto-generated -2. Run `npm run dev` to automatically regenerate the TypeScript type definitions -3. The dev server will scan your SQL files and schema definitions and update `appKitTypes.d.ts` accordingly - -## SQL Type Handling (Critical) - -**ALL numeric values from Databricks SQL are returned as STRINGS in JSON responses.** This includes results from `ROUND()`, `AVG()`, `SUM()`, `COUNT()`, etc. Always convert before using numeric methods: - -```typescript -// ❌ WRONG - fails at runtime -{row.total_amount.toFixed(2)} - -// ✅ CORRECT - convert to number first -{Number(row.total_amount).toFixed(2)} -``` - -**Helper Functions:** - -Use the helpers from `shared/types.ts` for consistent formatting: - -```typescript -import { toNumber, formatCurrency, formatPercent } from '../../shared/types'; - -// Convert to number -const amount = toNumber(row.amount); // "123.45" → 123.45 - -// Format as currency -const formatted = formatCurrency(row.amount); // "123.45" → "$123.45" - -// Format as percentage -const percent = formatPercent(row.rate); // "85.5" → "85.5%" -``` - -## Query Parameterization - -SQL queries can accept parameters to make them dynamic and reusable. - -**Key Points:** -- Parameters use colon prefix: `:parameter_name` -- Databricks infers types from values automatically -- For optional string parameters, use pattern: `(:param = '' OR column = :param)` -- **For optional date parameters, use sentinel dates** (`'1900-01-01'` and `'9999-12-31'`) instead of empty strings - -### SQL Parameter Syntax - -```sql --- config/queries/filtered_data.sql -SELECT * -FROM my_table -WHERE column_value >= :min_value - AND column_value <= :max_value - AND category = :category - AND (:optional_filter = '' OR status = :optional_filter) -``` - -### Frontend Parameter Passing - -```typescript -import { sql } from "@databricks/appkit-ui/js"; - -const { data } = useAnalyticsQuery('filtered_data', { - min_value: sql.number(minValue), - max_value: sql.number(maxValue), - category: sql.string(category), - optional_filter: sql.string(optionalFilter || ''), // empty string for optional params -}); -``` - -### Date Parameters - -Use `sql.date()` for date parameters with `YYYY-MM-DD` format strings. - -**Frontend - Using Date Parameters:** - -```typescript -import { sql } from '@databricks/appkit-ui/js'; -import { useState } from 'react'; - -function MyComponent() { - const [startDate, setStartDate] = useState('2016-02-01'); - const [endDate, setEndDate] = useState('2016-02-29'); - - const queryParams = { - start_date: sql.date(startDate), // Pass YYYY-MM-DD string to sql.date() - end_date: sql.date(endDate), - }; - - const { data } = useAnalyticsQuery('my_query', queryParams); - - // ... -} -``` - -**SQL - Date Filtering:** - -```sql --- Filter by date range using DATE() function -SELECT COUNT(*) as trip_count -FROM samples.nyctaxi.trips -WHERE DATE(tpep_pickup_datetime) >= :start_date - AND DATE(tpep_pickup_datetime) <= :end_date -``` - -**Date Helper Functions:** - -```typescript -// Helper to get dates relative to today -const daysAgo = (n: number) => { - const date = new Date(Date.now() - n * 86400000); - return sql.date(date) -}; - -const params = { - start_date: daysAgo(7), // 7 days ago - end_date: sql.date(daysAgo(0)), // Today -}; -``` - -### Optional Date Parameters - Use Sentinel Dates - -Databricks App Kit validates parameter types before query execution. **DO NOT use empty strings (`''`) for optional date parameters** as this causes validation errors. - -**✅ CORRECT - Use Sentinel Dates:** - -```typescript -// Frontend: Use sentinel dates for "no filter" instead of empty strings -const revenueParams = { - group_by: 'month', - start_date: sql.date('1900-01-01'), // Sentinel: effectively no lower bound - end_date: sql.date('9999-12-31'), // Sentinel: effectively no upper bound - country: sql.string(country || ''), - property_type: sql.string(propertyType || ''), -}; -``` - -```sql --- SQL: Simple comparison since sentinel dates are always valid -WHERE b.check_in >= CAST(:start_date AS DATE) - AND b.check_in <= CAST(:end_date AS DATE) -``` - -**Why Sentinel Dates Work:** -- `1900-01-01` is before any real data (effectively no lower bound filter) -- `9999-12-31` is after any real data (effectively no upper bound filter) -- Always valid DATE types, so no parameter validation errors -- All real dates fall within this range, so no filtering occurs - -**Parameter Types Summary:** -- ALWAYS use sql.* helper functions from the `@databricks/appkit-ui/js` package to define SQL parameters -- **Strings/Numbers**: Use directly in SQL with `:param_name` -- **Dates**: Use with `CAST(:param AS DATE)` in SQL -- **Optional Strings**: Use empty string default, check with `(:param = '' OR column = :param)` -- **Optional Dates**: Use sentinel dates (`sql.date('1900-01-01')` and `sql.date('9999-12-31')`) instead of empty strings diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/testing.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/testing.md deleted file mode 100644 index b1a4fea219..0000000000 --- a/experimental/aitools/lib/agent_skills/databricks-apps/references/testing.md +++ /dev/null @@ -1,58 +0,0 @@ -# Testing Guidelines - -## Unit Tests (Vitest) - -**CRITICAL**: Use vitest for all tests. Put tests next to the code (e.g. src/\*.test.ts) - -```typescript -import { describe, it, expect } from 'vitest'; - -describe('Feature Name', () => { - it('should do something', () => { - expect(true).toBe(true); - }); - - it('should handle async operations', async () => { - const result = await someAsyncFunction(); - expect(result).toBeDefined(); - }); -}); -``` - -**Best Practices:** -- Use `describe` blocks to group related tests -- Use `it` for individual test cases -- Use `expect` for assertions -- Tests run with `npm test` (runs `vitest run`) - -❌ **Do not write unit tests for:** -- SQL files under `config/queries/` - little value in testing static SQL -- Types associated with queries - these are just schema definitions - -## Smoke Test (Playwright) - -The template includes a smoke test at `tests/smoke.spec.ts` that verifies the app loads correctly. - -**What the smoke test does:** -- Opens the app -- Waits for data to load (SQL query results) -- Verifies key UI elements are visible -- Captures screenshots and console logs to `.smoke-test/` directory -- Always captures artifacts, even on test failure - -**When customizing the app**, update `tests/smoke.spec.ts` to match your UI: -- Change heading selector to match your app title (replace 'Minimal Databricks App') -- Update data assertions to match your query results (replace 'hello world' check) -- Keep the test simple - just verify app loads and displays data -- The default test expects specific template content; update these expectations after customization - -**Keep smoke tests simple:** -- Only verify that the app loads and displays initial data -- Wait for key elements to appear (page title, main content) -- Capture artifacts for debugging -- Run quickly (< 5 seconds) - -**For extended E2E tests:** -- Create separate test files in `tests/` directory (e.g., `tests/user-flow.spec.ts`) -- Use `npm run test:e2e` to run all Playwright tests -- Keep complex user flows, interactions, and edge cases out of the smoke test diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/trpc.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/trpc.md deleted file mode 100644 index acfb68c1b6..0000000000 --- a/experimental/aitools/lib/agent_skills/databricks-apps/references/trpc.md +++ /dev/null @@ -1,95 +0,0 @@ -# tRPC for Custom Endpoints - -**CRITICAL**: Do NOT use tRPC for SQL queries or data retrieval. Use `config/queries/` + `useAnalyticsQuery` instead. - -Use tRPC ONLY for: - -- **Mutations**: Creating, updating, or deleting data (INSERT, UPDATE, DELETE) -- **External APIs**: Calling Databricks APIs (serving endpoints, jobs, MLflow, etc.) -- **Complex business logic**: Multi-step operations that cannot be expressed in SQL -- **File operations**: File uploads, processing, transformations -- **Custom computations**: Operations requiring TypeScript/Node.js logic - -## Server-side Pattern - -```typescript -// server/trpc.ts -import { initTRPC } from '@trpc/server'; -import { getRequestContext } from '@databricks/appkit'; -import { z } from 'zod'; - -const t = initTRPC.create({ transformer: superjson }); -const publicProcedure = t.procedure; - -export const appRouter = t.router({ - // Example: Query a serving endpoint - queryModel: publicProcedure.input(z.object({ prompt: z.string() })).query(async ({ input: { prompt } }) => { - const { serviceDatabricksClient: client } = getRequestContext(); - const response = await client.servingEndpoints.query({ - name: 'your-endpoint-name', - messages: [{ role: 'user', content: prompt }], - }); - return response; - }), - - // Example: Mutation - createRecord: publicProcedure.input(z.object({ name: z.string() })).mutation(async ({ input }) => { - // Custom logic here - return { success: true, id: 123 }; - }), -}); -``` - -## Client-side Pattern - -```typescript -// client/src/components/MyComponent.tsx -import { trpc } from '@/lib/trpc'; -import { useState, useEffect } from 'react'; - -function MyComponent() { - const [result, setResult] = useState(null); - - useEffect(() => { - trpc.queryModel - .query({ prompt: "Hello" }) - .then(setResult) - .catch(console.error); - }, []); - - const handleCreate = async () => { - await trpc.createRecord.mutate({ name: "test" }); - }; - - return
{/* component JSX */}
; -} -``` - -## Decision Tree for Data Operations - -1. **Need to display data from SQL?** - - **Chart or Table?** → Use visualization components (`BarChart`, `LineChart`, `DataTable`, etc.) - - **Custom display (KPIs, cards, lists)?** → Use `useAnalyticsQuery` hook - - **Never** use tRPC for SQL SELECT statements - -2. **Need to call a Databricks API?** → Use tRPC - - Serving endpoints (model inference) - - MLflow operations - - Jobs API - - Workspace API - -3. **Need to modify data?** → Use tRPC mutations - - INSERT, UPDATE, DELETE operations - - Multi-step transactions - - Business logic with side effects - -4. **Need non-SQL custom logic?** → Use tRPC - - File processing - - External API calls - - Complex computations in TypeScript - -**Summary:** -- ✅ SQL queries → Visualization components or `useAnalyticsQuery` -- ✅ Databricks APIs → tRPC -- ✅ Data mutations → tRPC -- ❌ SQL queries → tRPC (NEVER do this) diff --git a/experimental/aitools/templates/appkit/docs.go b/experimental/aitools/templates/appkit/docs.go new file mode 100644 index 0000000000..1070628cc1 --- /dev/null +++ b/experimental/aitools/templates/appkit/docs.go @@ -0,0 +1,8 @@ +package appkit + +import "embed" + +// DocsFS embeds the appkit template documentation. +// +//go:embed template/{{.project_name}}/docs/*.md +var DocsFS embed.FS