diff --git a/PLAN.md b/PLAN.md new file mode 100644 index 000000000..9a1a34780 --- /dev/null +++ b/PLAN.md @@ -0,0 +1,154 @@ +# Pipeline Server Refactor Plan + +## Goals +- Replace file-id derivation with a secure, readable scheme based on full relative path segments, joined by `~`. +- Strip `.ucd-pipeline.ts` and `.ts` from file IDs (configurable suffix list). +- Extract helper logic from `src/server/routes/pipelines.ts` into `src/server/lib/pipelines/`. +- Split API routes into smaller files with coherent concerns. +- Improve frontend layout and file display post-migration. +- Add H3 fetch-based tests for key API routes. + +## Progress +- [ ] Create helper modules under `src/server/lib/pipelines/`. +- [ ] Split and update server routes. +- [ ] Update API response shapes with `fileLabel`. +- [ ] Update pipeline-ui types and sidebar display. +- [ ] Update pipeline-server client UI and layout. +- [ ] Add H3 fetch tests in `packages/pipelines/pipeline-server/test`. + +--- + +## 1) File ID & Path Helpers + +### New module: `src/server/lib/pipelines/ids.ts` +- `PIPELINE_FILE_SUFFIXES = [".ucd-pipeline.ts", ".ts"]` +- `stripSuffixes(fileName, suffixes)` +- `sanitizeSegment(segment)`: + - replace `~` with `-` + - replace whitespace with `-` +- `fileIdFromPath(filePath)`: + - normalize path + - split segments + - strip suffixes from final segment + - join segments with `~` +- `fileLabelFromPath(filePath)`: + - readable display label, e.g. `pipelines/api/foo` + +### New module: `src/server/lib/pipelines/resolve.ts` +- `resolveLocalFilePath(cwd, filePath)`: + - `resolved = path.resolve(cwd, filePath)` + - `relative = path.relative(cwd, resolved)` + - if `relative.startsWith("..")` or `path.isAbsolute(relative)` -> throw error + - ensures traversal prevention + +### New module: `src/server/lib/pipelines/files.ts` +- Move: + - `FilePipelineEntry` + - `PipelineFileGroup` + - `buildFileGroups` + - `applySearchFilter` + - `findFileGroup` + - `findPipelineByFileId` +- Update file groups to include `fileLabel` from `fileLabelFromPath`. + +### New module: `src/server/lib/pipelines/loader.ts` +- Move `getPipelines` from `src/server/lib.ts`. + +--- + +## 2) Server Routes Split + +### `src/server/routes/pipelines/index.ts` +- `GET /api/pipelines` +- Uses `getPipelines` + `buildFileGroups` + `applySearchFilter` +- Response includes: `fileId`, `filePath`, `fileLabel`, `sourceId`, `pipelines` + +### `src/server/routes/pipelines/file.ts` +- `GET /api/pipelines/:file` +- Looks up a file by `fileId`, returns `fileId`, `filePath`, `fileLabel`, `sourceId`, `pipelines` + +### `src/server/routes/pipelines/pipeline.ts` +- `GET /api/pipelines/:file/:id` +- `GET /api/pipelines/:file/:id/code` +- Use shared helpers and `resolveLocalFilePath` for local file reads + +### `src/server/routes/pipelines/execution.ts` +- `POST /api/pipelines/:file/:id/execute` +- `GET /api/pipelines/:file/:id/executions` +- Reuse helper logic and avoid duplicating source scanning + +### `src/server/routes/pipelines.ts` +- Mounts above route files under `/api/pipelines` + +--- + +## 3) API Shape Changes + +### Add `fileLabel` everywhere files are referenced: +- `PipelinesResponse.files[]` +- `PipelineFileResponse.file` +- `PipelineResponse` (optional `fileLabel` for convenience) + +`filePath` stays for display only; never used for routing. + +--- + +## 4) Frontend Updates + +### `packages/pipelines/pipeline-server` client +- Update fetch paths to use the new `fileId`. +- Use `fileLabel` as primary display, show `filePath` in muted text. +- Improve layout in: + - `src/client/routes/pipelines/$file/index.tsx` + - `src/client/components/pipeline-command-palette.tsx` + - Optional: breadcrumb or label in `src/client/components/pipeline-header.tsx` + +### `packages/pipelines/pipeline-ui` +- Update types: add `fileLabel?: string` in `PipelineFileInfo` +- Update sidebar display to use `fileLabel` instead of `filePath.split` + +--- + +## 5) Tests (H3 fetch) + +### Test approach +- Use `vitest` + `h3` app `.fetch` to call API routes. +- For routes needing filesystem context, use `vitest-testdirs`: + ```ts + import { testdir } from "vitest-testdirs"; + const storePath = await testdir({ + "UnicodeData.txt": content, + }); + ``` + +### Proposed tests +1) `GET /api/pipelines` returns files with `fileId` using `~` separator and no `.ts` suffix +2) `GET /api/pipelines/:file` finds file by new `fileId` +3) `GET /api/pipelines/:file/:id` returns pipeline details +4) `GET /api/pipelines/:file/:id/code` returns definePipeline snippet +5) `POST /api/pipelines/:file/:id/execute` returns execution id and status +6) `GET /api/pipelines/:file/:id/executions` lists executions +7) Local path traversal is blocked (attempt `../` path should fail) + +### Test location +- `packages/pipelines/pipeline-server/test/pipelines.test.ts` + +--- + +## 6) Execution Order + +1. Create helper modules under `src/server/lib/pipelines/`. +2. Move `getPipelines` into `loader.ts`. +3. Split and update server routes. +4. Update API response shape to include `fileLabel`. +5. Update pipeline-ui types and sidebar. +6. Update pipeline-server client UI and layout. +7. Add H3 fetch tests with `testdir` where needed. + +--- + +## Notes & Constraints + +- No backward compat needed. +- Tanstack type errors: if Link/useParams type complaints appear, report and stop (no casts). +- Avoid tiny re-export files; keep modules meaningful. diff --git a/ROADMAP.md b/ROADMAP.md new file mode 100644 index 000000000..5f2a1b201 --- /dev/null +++ b/ROADMAP.md @@ -0,0 +1,130 @@ +# Pipeline Server v2.0 Roadmap + +## Phase 1: Authentication & Deployment Foundation + +### 1.1 Cloudflare Workers Migration +- Move from Node.js server to Cloudflare Workers +- Use Durable Objects for stateful operations +- KV storage for caching +- D1 database for persistence + +### 1.2 Authentication Strategy +- **Local Development**: Nuxt DevTools-style auth (simple, no external deps) +- **Production**: GitHub OAuth (leverage existing GitHub integration) +- JWT tokens with refresh mechanism +- Role-based access (viewer, editor, admin) + +### 1.3 GitHub Integration +- OAuth app for repository access +- Webhook support for pipeline file changes +- Read `.ucd-pipeline.ts` files from repos +- Sync pipeline definitions with GitHub + +## Phase 2: Core Features + +### 2.1 Multi-tenant Support +- Organizations/teams +- Repository-level permissions +- Pipeline sharing between teams + +### 2.2 Advanced Execution +- Queue system for pipeline runs +- Concurrent execution limits +- Execution history retention +- Scheduled/cron jobs + +### 2.3 Collaboration Features +- Comments on pipeline executions +- Share execution results via links +- Export/import pipeline configs + +## Phase 3: UI/UX Overhaul + +### 3.1 Design System +- Consistent component library +- Dark/light mode +- Responsive layouts +- Accessibility (WCAG 2.1) + +### 3.2 Pipeline Visualizer Improvements +- Interactive graph editing +- Real-time execution visualization +- Performance profiling +- Bottleneck detection + +### 3.3 Mobile Support +- Responsive sidebar +- Touch-friendly interactions +- Mobile-optimized views + +## Phase 4: Developer Experience + +### 4.1 VS Code Extension +- Edit pipelines in IDE +- Live preview +- Auto-completion +- One-click deploy + +### 4.2 CLI Improvements +- Remote pipeline management +- CI/CD integration +- Deployment commands + +### 4.3 API & SDK +- RESTful API +- TypeScript SDK +- WebSocket for real-time updates + +## Technical Considerations + +### Architecture Changes +- Move to edge functions (Cloudflare) +- Separate compute from storage +- Event-driven architecture +- GraphQL for flexible queries + +### Data Model + +```typescript +// Organizations +interface Organization { + id: string; + name: string; + githubInstallationId: string; +} + +// Repositories +interface Repository { + id: string; + orgId: string; + fullName: string; // owner/repo + pipelines: Pipeline[]; +} + +// Users +interface User { + id: string; + githubId: string; + orgs: string[]; + role: 'viewer' | 'editor' | 'admin'; +} +``` + +### Security +- Encrypted tokens at rest +- Rate limiting +- Audit logs +- Secret management for pipeline configs + +## Priority Order + +1. **Authentication** (Nuxt DevTools style for local) +2. **Cloudflare Workers migration** +3. **GitHub OAuth & repo sync** +4. **UI overhaul** (fix context menus, actions) +5. **Multi-tenant/teams** +6. **Advanced features** (scheduler, webhooks) + +--- + +*Last updated: February 2026* diff --git a/apps/web/package.json b/apps/web/package.json index f5f70135f..48bb8389e 100644 --- a/apps/web/package.json +++ b/apps/web/package.json @@ -29,6 +29,7 @@ "@tanstack/react-router-ssr-query": "catalog:web", "@tanstack/react-start": "catalog:web", "@ucdjs-internal/shared-ui": "workspace:*", + "@ucdjs/pipelines-ui": "workspace:*", "@ucdjs/schemas": "workspace:*", "@ucdjs/ucd-store": "workspace:*", "@unicode-utils/core": "catalog:prod", diff --git a/apps/web/src/components/file-explorer/explorer-entry.tsx b/apps/web/src/components/file-explorer/explorer-entry.tsx index ff169d78d..e32aa6b0a 100644 --- a/apps/web/src/components/file-explorer/explorer-entry.tsx +++ b/apps/web/src/components/file-explorer/explorer-entry.tsx @@ -1,7 +1,7 @@ import type { ViewMode } from "#types/file-explorer"; import type { FileEntry } from "@ucdjs/schemas"; -import { cn } from "#lib/utils"; import { Link } from "@tanstack/react-router"; +import { cn } from "@ucdjs-internal/shared-ui"; import { Card, CardContent } from "@ucdjs-internal/shared-ui/ui/card"; import { FileIcon, FolderIcon, FolderOpen } from "lucide-react"; diff --git a/apps/web/src/components/file-explorer/explorer-toolbar.tsx b/apps/web/src/components/file-explorer/explorer-toolbar.tsx index dbcbe605e..9759c27ce 100644 --- a/apps/web/src/components/file-explorer/explorer-toolbar.tsx +++ b/apps/web/src/components/file-explorer/explorer-toolbar.tsx @@ -1,6 +1,6 @@ import type { SearchQueryParams } from "../../routes/file-explorer/$"; -import { cn } from "#lib/utils"; import { useNavigate, useSearch } from "@tanstack/react-router"; +import { cn } from "@ucdjs-internal/shared-ui"; import { Badge } from "@ucdjs-internal/shared-ui/ui/badge"; import { Button } from "@ucdjs-internal/shared-ui/ui/button"; import { DropdownMenu, DropdownMenuContent, DropdownMenuGroup, DropdownMenuLabel, DropdownMenuRadioGroup, DropdownMenuRadioItem, DropdownMenuSeparator, DropdownMenuTrigger } from "@ucdjs-internal/shared-ui/ui/dropdown-menu"; diff --git a/apps/web/src/components/file-explorer/file-viewer.tsx b/apps/web/src/components/file-explorer/file-viewer.tsx index 00300b41a..4d085f182 100644 --- a/apps/web/src/components/file-explorer/file-viewer.tsx +++ b/apps/web/src/components/file-explorer/file-viewer.tsx @@ -1,4 +1,4 @@ -import { cn } from "#lib/utils"; +import { cn } from "@ucdjs-internal/shared-ui"; import { Button } from "@ucdjs-internal/shared-ui/ui/button"; import { Card, CardContent, CardHeader, CardTitle } from "@ucdjs-internal/shared-ui/ui/card"; import { Skeleton } from "@ucdjs-internal/shared-ui/ui/skeleton"; diff --git a/apps/web/src/components/file-explorer/parent-directory.tsx b/apps/web/src/components/file-explorer/parent-directory.tsx index 1a95bef89..3f31e90c0 100644 --- a/apps/web/src/components/file-explorer/parent-directory.tsx +++ b/apps/web/src/components/file-explorer/parent-directory.tsx @@ -1,5 +1,5 @@ -import { cn } from "#lib/utils"; import { Link } from "@tanstack/react-router"; +import { cn } from "@ucdjs-internal/shared-ui"; import { Card, CardContent } from "@ucdjs-internal/shared-ui/ui/card"; import { ArrowUp, FolderUp } from "lucide-react"; diff --git a/apps/web/src/components/layout/sidebar/app-sidebar.tsx b/apps/web/src/components/layout/sidebar/app-sidebar.tsx index c93054303..8f9362862 100644 --- a/apps/web/src/components/layout/sidebar/app-sidebar.tsx +++ b/apps/web/src/components/layout/sidebar/app-sidebar.tsx @@ -12,7 +12,7 @@ import { SidebarMenuItem, SidebarRail, } from "@ucdjs-internal/shared-ui/ui/sidebar"; -import { BookOpen, ExternalLink, Grid3X3, Lightbulb, Search, Type } from "lucide-react"; +import { BookOpen, ExternalLink, GitGraph, Grid3X3, Lightbulb, Search, Type } from "lucide-react"; import { UcdLogo } from "../../ucd-logo"; import { VersionSwitcher } from "../../version-switcher"; @@ -33,6 +33,7 @@ const VERSION_ITEMS = [ const TOOLS_ITEMS = [ { to: "/file-explorer/$", params: { _splat: "" }, icon: BookOpen, label: "File Explorer" }, + { to: "/pipeline-graph", icon: GitGraph, label: "Pipeline Graph" }, ] as const; export function AppSidebar({ ...props }: ComponentProps) { diff --git a/apps/web/src/routeTree.gen.ts b/apps/web/src/routeTree.gen.ts index 117671496..943cd67f7 100644 --- a/apps/web/src/routeTree.gen.ts +++ b/apps/web/src/routeTree.gen.ts @@ -10,6 +10,7 @@ import { Route as rootRouteImport } from './routes/__root' import { Route as SearchRouteImport } from './routes/search' +import { Route as PipelineGraphRouteImport } from './routes/pipeline-graph' import { Route as CodepointInspectorRouteImport } from './routes/codepoint-inspector' import { Route as FileExplorerRouteRouteImport } from './routes/file-explorer/route' import { Route as IndexRouteImport } from './routes/index' @@ -31,6 +32,11 @@ const SearchRoute = SearchRouteImport.update({ path: '/search', getParentRoute: () => rootRouteImport, } as any) +const PipelineGraphRoute = PipelineGraphRouteImport.update({ + id: '/pipeline-graph', + path: '/pipeline-graph', + getParentRoute: () => rootRouteImport, +} as any) const CodepointInspectorRoute = CodepointInspectorRouteImport.update({ id: '/codepoint-inspector', path: '/codepoint-inspector', @@ -113,6 +119,7 @@ export interface FileRoutesByFullPath { '/': typeof IndexRoute '/file-explorer': typeof FileExplorerRouteRouteWithChildren '/codepoint-inspector': typeof CodepointInspectorRoute + '/pipeline-graph': typeof PipelineGraphRoute '/search': typeof SearchRoute '/v/$version': typeof VVersionRouteRouteWithChildren '/file-explorer/$': typeof FileExplorerSplatRoute @@ -131,6 +138,7 @@ export interface FileRoutesByTo { '/': typeof IndexRoute '/file-explorer': typeof FileExplorerRouteRouteWithChildren '/codepoint-inspector': typeof CodepointInspectorRoute + '/pipeline-graph': typeof PipelineGraphRoute '/search': typeof SearchRoute '/file-explorer/$': typeof FileExplorerSplatRoute '/v': typeof VIndexRoute @@ -149,6 +157,7 @@ export interface FileRoutesById { '/': typeof IndexRoute '/file-explorer': typeof FileExplorerRouteRouteWithChildren '/codepoint-inspector': typeof CodepointInspectorRoute + '/pipeline-graph': typeof PipelineGraphRoute '/search': typeof SearchRoute '/v/$version': typeof VVersionRouteRouteWithChildren '/file-explorer/$': typeof FileExplorerSplatRoute @@ -169,6 +178,7 @@ export interface FileRouteTypes { | '/' | '/file-explorer' | '/codepoint-inspector' + | '/pipeline-graph' | '/search' | '/v/$version' | '/file-explorer/$' @@ -187,6 +197,7 @@ export interface FileRouteTypes { | '/' | '/file-explorer' | '/codepoint-inspector' + | '/pipeline-graph' | '/search' | '/file-explorer/$' | '/v' @@ -204,6 +215,7 @@ export interface FileRouteTypes { | '/' | '/file-explorer' | '/codepoint-inspector' + | '/pipeline-graph' | '/search' | '/v/$version' | '/file-explorer/$' @@ -223,6 +235,7 @@ export interface RootRouteChildren { IndexRoute: typeof IndexRoute FileExplorerRouteRoute: typeof FileExplorerRouteRouteWithChildren CodepointInspectorRoute: typeof CodepointInspectorRoute + PipelineGraphRoute: typeof PipelineGraphRoute SearchRoute: typeof SearchRoute VVersionRouteRoute: typeof VVersionRouteRouteWithChildren VIndexRoute: typeof VIndexRoute @@ -237,6 +250,13 @@ declare module '@tanstack/react-router' { preLoaderRoute: typeof SearchRouteImport parentRoute: typeof rootRouteImport } + '/pipeline-graph': { + id: '/pipeline-graph' + path: '/pipeline-graph' + fullPath: '/pipeline-graph' + preLoaderRoute: typeof PipelineGraphRouteImport + parentRoute: typeof rootRouteImport + } '/codepoint-inspector': { id: '/codepoint-inspector' path: '/codepoint-inspector' @@ -388,6 +408,7 @@ const rootRouteChildren: RootRouteChildren = { IndexRoute: IndexRoute, FileExplorerRouteRoute: FileExplorerRouteRouteWithChildren, CodepointInspectorRoute: CodepointInspectorRoute, + PipelineGraphRoute: PipelineGraphRoute, SearchRoute: SearchRoute, VVersionRouteRoute: VVersionRouteRouteWithChildren, VIndexRoute: VIndexRoute, diff --git a/apps/web/src/routes/file-explorer/$.tsx b/apps/web/src/routes/file-explorer/$.tsx index 401def879..3cb8d0659 100644 --- a/apps/web/src/routes/file-explorer/$.tsx +++ b/apps/web/src/routes/file-explorer/$.tsx @@ -4,9 +4,9 @@ import { ExplorerToolbar } from "#components/file-explorer/explorer-toolbar"; import { ParentDirectory } from "#components/file-explorer/parent-directory"; import { ExplorerNotFound } from "#components/not-found"; import { filesQueryOptions, getFileHeadInfo } from "#functions/files"; -import { cn } from "#lib/utils"; import { createFileRoute, redirect, retainSearchParams, useSearch } from "@tanstack/react-router"; import { zodValidator } from "@tanstack/zod-adapter"; +import { cn } from "@ucdjs-internal/shared-ui"; import { Skeleton } from "@ucdjs-internal/shared-ui/ui/skeleton"; import { Suspense } from "react"; import z from "zod"; diff --git a/apps/web/src/routes/pipeline-graph.tsx b/apps/web/src/routes/pipeline-graph.tsx new file mode 100644 index 000000000..43dfd73fb --- /dev/null +++ b/apps/web/src/routes/pipeline-graph.tsx @@ -0,0 +1,82 @@ +import type { PipelineGraph as PipelineGraphType } from "@ucdjs/pipelines-core"; +import { createFileRoute, Link } from "@tanstack/react-router"; +import { Breadcrumb, BreadcrumbItem, BreadcrumbLink, BreadcrumbList, BreadcrumbPage, BreadcrumbSeparator } from "@ucdjs-internal/shared-ui/ui/breadcrumb"; +import { PipelineGraph } from "@ucdjs/pipelines-ui"; + +export const Route = createFileRoute("/pipeline-graph")({ + component: PipelineGraphPage, +}); + +// Fake data for testing the visualization +const fakeGraph: PipelineGraphType = { + nodes: [ + { id: "source-16.0.0", type: "source", version: "16.0.0" }, + { id: "file-ucd-linebreak", type: "file", file: { version: "16.0.0", dir: "ucd", path: "ucd/LineBreak.txt", name: "LineBreak.txt", ext: ".txt" } }, + { id: "file-ucd-unicodedata", type: "file", file: { version: "16.0.0", dir: "ucd", path: "ucd/UnicodeData.txt", name: "UnicodeData.txt", ext: ".txt" } }, + { id: "file-ucd-blocks", type: "file", file: { version: "16.0.0", dir: "ucd", path: "ucd/Blocks.txt", name: "Blocks.txt", ext: ".txt" } }, + { id: "file-emoji-data", type: "file", file: { version: "16.0.0", dir: "emoji", path: "emoji/emoji-data.txt", name: "emoji-data.txt", ext: ".txt" } }, + { id: "route-linebreak", type: "route", routeId: "linebreak" }, + { id: "route-unicodedata", type: "route", routeId: "unicode-data" }, + { id: "route-blocks", type: "route", routeId: "blocks" }, + { id: "route-emoji", type: "route", routeId: "emoji-properties" }, + { id: "artifact-linebreak-json", type: "artifact", artifactId: "linebreak.json" }, + { id: "artifact-characters-json", type: "artifact", artifactId: "characters.json" }, + { id: "artifact-blocks-json", type: "artifact", artifactId: "blocks.json" }, + { id: "artifact-emoji-json", type: "artifact", artifactId: "emoji.json" }, + { id: "output-0", type: "output", outputIndex: 0 }, + { id: "output-1", type: "output", outputIndex: 1 }, + { id: "output-2", type: "output", outputIndex: 2 }, + { id: "output-3", type: "output", outputIndex: 3 }, + ], + edges: [ + { from: "source-16.0.0", to: "file-ucd-linebreak", type: "provides" }, + { from: "source-16.0.0", to: "file-ucd-unicodedata", type: "provides" }, + { from: "source-16.0.0", to: "file-ucd-blocks", type: "provides" }, + { from: "source-16.0.0", to: "file-emoji-data", type: "provides" }, + { from: "file-ucd-linebreak", to: "route-linebreak", type: "matched" }, + { from: "file-ucd-unicodedata", to: "route-unicodedata", type: "matched" }, + { from: "file-ucd-blocks", to: "route-blocks", type: "matched" }, + { from: "file-emoji-data", to: "route-emoji", type: "matched" }, + { from: "route-linebreak", to: "artifact-linebreak-json", type: "parsed" }, + { from: "route-unicodedata", to: "artifact-characters-json", type: "parsed" }, + { from: "route-blocks", to: "artifact-blocks-json", type: "parsed" }, + { from: "route-emoji", to: "artifact-emoji-json", type: "parsed" }, + { from: "artifact-linebreak-json", to: "output-0", type: "resolved" }, + { from: "artifact-characters-json", to: "output-1", type: "resolved" }, + { from: "artifact-blocks-json", to: "output-2", type: "resolved" }, + { from: "artifact-emoji-json", to: "output-3", type: "resolved" }, + // Cross-artifact dependency example + { from: "artifact-characters-json", to: "artifact-emoji-json", type: "uses-artifact" }, + ], +}; + +function PipelineGraphPage() { + return ( +
+
+ + + + Home} /> + + + + Pipeline Graph + + + +
+ +
+ { + if (node) { + console.log("Selected node:", node); + } + }} + /> +
+
+ ); +} diff --git a/packages/cli/package.json b/packages/cli/package.json index 87b326a39..383f9efa7 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -48,6 +48,8 @@ "@ucdjs/env": "workspace:*", "@ucdjs/fs-bridge": "workspace:*", "@ucdjs/lockfile": "workspace:*", + "@ucdjs/pipelines-loader": "workspace:*", + "@ucdjs/pipelines-server": "workspace:*", "@ucdjs/schema-gen": "workspace:*", "@ucdjs/schemas": "workspace:*", "@ucdjs/ucd-store": "workspace:*", diff --git a/packages/cli/src/cli-utils.ts b/packages/cli/src/cli-utils.ts index c0ffc74b7..0263bc25f 100644 --- a/packages/cli/src/cli-utils.ts +++ b/packages/cli/src/cli-utils.ts @@ -2,6 +2,8 @@ import type { Prettify, RemoveIndexSignature } from "@luxass/utils"; import type { Arguments } from "yargs-parser"; import type { CLICodegenCmdOptions } from "./cmd/codegen/root"; import type { CLIFilesCmdOptions } from "./cmd/files/root"; +import type { CLILockfileCmdOptions } from "./cmd/lockfile/root"; +import type { CLIPipelinesCmdOptions } from "./cmd/pipelines/root"; import type { CLIStoreCmdOptions } from "./cmd/store/root"; import process from "node:process"; import { @@ -22,13 +24,15 @@ type CLICommand | "codegen" | "store" | "files" - | "lockfile"; + | "lockfile" + | "pipelines"; const SUPPORTED_COMMANDS = new Set([ "codegen", "store", "files", "lockfile", + "pipelines", ]); export interface GlobalCLIFlags { @@ -181,6 +185,7 @@ export async function runCommand(cmd: CLICommand, flags: Arguments): Promise { setJsonMode(false); } } + +export function parseRepoString(repoString: string): { owner: string; repo: string } { + const parts = repoString.split("/"); + if (parts.length !== 2) { + throw new Error(`Invalid repository format: ${repoString}. Expected: owner/repo`); + } + return { owner: parts[0]!, repo: parts[1]! }; +} diff --git a/packages/cli/src/cmd/pipelines/list.ts b/packages/cli/src/cmd/pipelines/list.ts new file mode 100644 index 000000000..9af3b3c58 --- /dev/null +++ b/packages/cli/src/cmd/pipelines/list.ts @@ -0,0 +1,221 @@ +import type { Prettify } from "@luxass/utils"; +import type { GitHubSource, GitLabSource, LocalSource } from "@ucdjs/pipelines-loader"; +import type { CLIArguments } from "../../cli-utils"; +import path from "node:path"; +import process from "node:process"; +import { findPipelineFiles, findRemotePipelineFiles, loadPipelinesFromPaths, loadRemotePipelines } from "@ucdjs/pipelines-loader"; +import { parseRepoString, printHelp } from "../../cli-utils"; +import { + blankLine, + bold, + cyan, + dim, + header, + keyValue, + output, + yellow, +} from "../../output"; + +export interface CLIPipelinesRunCmdOptions { + flags: CLIArguments>; +} + +export async function runListPipelines({ flags }: CLIPipelinesRunCmdOptions) { + if (flags?.help || flags?.h) { + printHelp({ + headline: "List Pipelines", + commandName: "ucd pipelines list", + usage: "[...flags]", + tables: { + Flags: [ + ["--cwd ", "Search for pipeline files from this directory."], + ["--github ", "Load pipelines from a GitHub repository."], + ["--gitlab ", "Load pipelines from a GitLab repository."], + ["--ref ", "Git reference (branch/tag) for remote repositories."], + ["--path ", "Subdirectory path within the repository."], + ["--help (-h)", "See all available flags."], + ], + }, + }); + return; + } + + const sources: (LocalSource | GitHubSource | GitLabSource)[] = []; + + // Local source + if (flags?.cwd) { + sources.push({ + type: "local", + id: "local", + cwd: flags.cwd, + }); + } + + // GitHub source + if (flags?.github) { + const { owner, repo } = parseRepoString(flags.github as string); + sources.push({ + type: "github", + id: `github-${owner}-${repo}`, + owner, + repo, + ref: flags.ref as string | undefined, + path: flags.path as string | undefined, + }); + } + + // GitLab source + if (flags?.gitlab) { + const { owner, repo } = parseRepoString(flags.gitlab as string); + sources.push({ + type: "gitlab", + id: `gitlab-${owner}-${repo}`, + owner, + repo, + ref: flags.ref as string | undefined, + path: flags.path as string | undefined, + }); + } + + // Default to local if no sources specified + if (sources.length === 0) { + sources.push({ + type: "local", + id: "local", + cwd: process.cwd(), + }); + } + + const allPipelines: { + filePath: string; + pipelines: { name: string; id: string; description?: string; routes: number; sources: number }[]; + exportNames: string[]; + sourceId: string; + sourceType: string; + }[] = []; + const allErrors: { filePath: string; message: string; sourceId: string; sourceType: string }[] = []; + + for (const source of sources) { + try { + let result; + if (source.type === "local") { + const files = await findPipelineFiles({ cwd: (source as LocalSource).cwd }); + result = await loadPipelinesFromPaths(files); + } else { + const fileList = await findRemotePipelineFiles(source as GitHubSource | GitLabSource); + result = await loadRemotePipelines(source as GitHubSource | GitLabSource, fileList.files); + } + + for (const file of result.files) { + allPipelines.push({ + filePath: file.filePath, + exportNames: file.exportNames, + pipelines: file.pipelines.map((p) => ({ + name: p.name ?? p.id, + id: p.id, + description: p.description, + routes: p.routes.length, + sources: p.inputs.length, + })), + sourceId: source.id, + sourceType: source.type, + }); + } + + for (const err of result.errors) { + allErrors.push({ + filePath: err.filePath, + message: err.error.message, + sourceId: source.id, + sourceType: source.type, + }); + } + } catch (err) { + allErrors.push({ + filePath: "", + message: err instanceof Error ? err.message : String(err), + sourceId: source.id, + sourceType: source.type, + }); + } + } + + const totalPipelines = allPipelines.reduce((sum, f) => sum + f.pipelines.length, 0); + + header("Pipelines"); + keyValue("Files", String(allPipelines.length)); + keyValue("Pipelines", String(totalPipelines)); + keyValue("Sources", String(sources.length)); + blankLine(); + + // Group by source + for (const source of sources) { + const sourcePipelines = allPipelines.filter((p) => p.sourceId === source.id); + if (sourcePipelines.length === 0) continue; + + if (source.type === "local") { + output.info(`${cyan("local")} ${dim("·")} ${(source as LocalSource).cwd}`); + } else if (source.type === "github") { + const s = source as GitHubSource; + output.info(`${cyan("github")} ${dim("·")} ${s.owner}/${s.repo}${s.ref ? `@${s.ref}` : ""}`); + } else if (source.type === "gitlab") { + const s = source as GitLabSource; + output.info(`${cyan("gitlab")} ${dim("·")} ${s.owner}/${s.repo}${s.ref ? `@${s.ref}` : ""}`); + } + blankLine(); + + for (const f of sourcePipelines) { + if (source.type === "local") { + const rel = path.relative((source as LocalSource).cwd, f.filePath); + output.info(`${dim("•")} ${cyan(rel)}`); + } else { + output.info(`${dim("•")} ${cyan(f.filePath)}`); + } + + if (f.exportNames.length === 0) { + output.info(` ${dim("(no pipeline exports found)")}`); + continue; + } + + const items = f.pipelines.map((p, i) => { + const displayName = p.name ?? f.exportNames[i] ?? "default"; + const idLabel = p.id && p.id !== displayName ? ` ${dim(`[${p.id}]`)}` : ""; + const routesCount = p.routes ?? 0; + const sourcesCount = p.sources ?? 0; + const details = ` ${dim("·")} ${routesCount} route(s) ${dim("·")} ${sourcesCount} source(s)`; + const description = p.description ? ` ${dim("·")} ${p.description}` : ""; + + return `${bold(displayName)}${idLabel}${details}${description}`; + }); + + items.forEach((item, index) => { + const isLast = index === items.length - 1; + const prefix = isLast ? "└" : "├"; + output.info(` ${dim(prefix)} ${item}`); + }); + } + + blankLine(); + } + + if (allErrors.length > 0) { + header("Errors"); + for (const e of allErrors) { + let sourceLabel = ""; + if (e.sourceType === "local") { + sourceLabel = "[local] "; + } else if (e.sourceType === "github") { + sourceLabel = "[github] "; + } else if (e.sourceType === "gitlab") { + sourceLabel = "[gitlab] "; + } + output.error(` ${yellow("•")} ${sourceLabel}${e.filePath}: ${e.message}`); + } + } +} diff --git a/packages/cli/src/cmd/pipelines/root.ts b/packages/cli/src/cmd/pipelines/root.ts new file mode 100644 index 000000000..00af65aca --- /dev/null +++ b/packages/cli/src/cmd/pipelines/root.ts @@ -0,0 +1,65 @@ +import type { CLIArguments } from "../../cli-utils"; +import { printHelp } from "../../cli-utils"; + +export interface CLIPipelinesCmdOptions { + flags: CLIArguments<{ + ui: boolean; + port: number; + }>; +} + +const PIPELINES_SUBCOMMANDS = [ + "run", + "list", + "create", +] as const; +export type Subcommand = (typeof PIPELINES_SUBCOMMANDS)[number]; + +function isValidSubcommand(subcommand: string): subcommand is Subcommand { + return PIPELINES_SUBCOMMANDS.includes(subcommand as Subcommand); +} + +export async function runPipelinesRoot(subcommand: string, { flags }: CLIPipelinesCmdOptions) { + const isValidSub = isValidSubcommand(subcommand); + const requestsHelp = flags?.help || flags?.h; + + if (!isValidSub || (requestsHelp && !isValidSub)) { + printHelp({ + commandName: "ucd pipelines", + usage: "[command] [...flags]", + tables: { + Commands: [ + ["run", "Run a pipeline from the command line."], + ["list", "List available pipelines."], + ["create", "Create a new pipeline scaffold."], + ], + Flags: [ + ["--help (-h)", "See all available flags."], + ], + }, + }); + return; + } + + if (subcommand === "run") { + const { runPipelinesRun } = await import("./run"); + await runPipelinesRun({ + flags, + }); + return; + } + + if (subcommand === "list") { + const { runListPipelines } = await import("./list"); + await runListPipelines({ flags }); + return; + } + + // if (subcommand === "create") { + // const { runVerifyStore } = await import("./create"); + // await runVerifyStore({ flags, versions }); + // return; + // } + + throw new Error(`Invalid subcommand: ${subcommand}`); +} diff --git a/packages/cli/src/cmd/pipelines/run.ts b/packages/cli/src/cmd/pipelines/run.ts new file mode 100644 index 000000000..1efe416b6 --- /dev/null +++ b/packages/cli/src/cmd/pipelines/run.ts @@ -0,0 +1,101 @@ +import type { Prettify } from "@luxass/utils"; +import type { CLIArguments } from "../../cli-utils"; +import process from "node:process"; +import { parseRepoString, printHelp } from "../../cli-utils"; +import { output } from "../../output"; + +export interface CLIPipelinesRunCmdOptions { + flags: CLIArguments>; +} + +export async function runPipelinesRun({ flags }: CLIPipelinesRunCmdOptions) { + if (flags?.help || flags?.h) { + printHelp({ + headline: "Run Pipelines", + commandName: "ucd pipelines run", + usage: "[...pipelines] [...flags]", + tables: { + Flags: [ + ["--ui", "Run the pipeline with a UI."], + ["--port ", "Port for the UI server (default: 3030)."], + ["--cwd ", "Search for pipeline files from this directory."], + ["--github ", "Load pipelines from a GitHub repository."], + ["--gitlab ", "Load pipelines from a GitLab repository."], + ["--ref ", "Git reference (branch/tag) for remote repositories."], + ["--path ", "Subdirectory path within the repository."], + ["--help (-h)", "See all available flags."], + ], + }, + }); + return; + } + + const sources: ({ type: "local"; id: string; cwd: string } | { type: "github"; id: string; owner: string; repo: string; ref?: string; path?: string } | { type: "gitlab"; id: string; owner: string; repo: string; ref?: string; path?: string })[] = []; + + if (flags?.cwd) { + sources.push({ + type: "local", + id: "local", + cwd: flags.cwd, + }); + } + + if (flags?.github) { + const { owner, repo } = parseRepoString(flags.github as string); + sources.push({ + type: "github", + id: `github-${owner}-${repo}`, + owner, + repo, + ref: flags.ref as string | undefined, + path: flags.path as string | undefined, + }); + } + + if (flags?.gitlab) { + const { owner, repo } = parseRepoString(flags.gitlab as string); + sources.push({ + type: "gitlab", + id: `gitlab-${owner}-${repo}`, + owner, + repo, + ref: flags.ref as string | undefined, + path: flags.path as string | undefined, + }); + } + + if (sources.length === 0) { + sources.push({ + type: "local", + id: "local", + cwd: process.cwd(), + }); + } + + if (flags?.ui) { + const { startServer } = await import("@ucdjs/pipelines-server"); + const port = flags?.port ?? 3030; + output.info(`Starting Pipeline UI on port ${port}...`); + for (const source of sources) { + if (source.type === "local") { + output.info(` [local] ${source.cwd}`); + } else if (source.type === "github") { + output.info(` [github] ${source.owner}/${source.repo}${source.ref ? `@${source.ref}` : ""}`); + } else if (source.type === "gitlab") { + output.info(` [gitlab] ${source.owner}/${source.repo}${source.ref ? `@${source.ref}` : ""}`); + } + } + await startServer({ port, sources }); + return; + } + + output.info("Running pipelines..."); +} diff --git a/packages/pipelines/pipeline-artifacts/README.md b/packages/pipelines/pipeline-artifacts/README.md new file mode 100644 index 000000000..77accc96a --- /dev/null +++ b/packages/pipelines/pipeline-artifacts/README.md @@ -0,0 +1,27 @@ +# @ucdjs/pipelines-artifacts + +[![npm version][npm-version-src]][npm-version-href] +[![npm downloads][npm-downloads-src]][npm-downloads-href] +[![codecov][codecov-src]][codecov-href] + +> [!IMPORTANT] +> This is an internal package. It may change without warning and is not subject to semantic versioning. Use at your own risk. + +A collection of core pipeline functionalities for the UCD project. + +## Installation + +```bash +npm install @ucdjs/pipelines-artifacts +``` + +## 📄 License + +Published under [MIT License](./LICENSE). + +[npm-version-src]: https://img.shields.io/npm/v/@ucdjs/pipelines-artifacts?style=flat&colorA=18181B&colorB=4169E1 +[npm-version-href]: https://npmjs.com/package/@ucdjs/pipelines-artifacts +[npm-downloads-src]: https://img.shields.io/npm/dm/@ucdjs/pipelines-artifacts?style=flat&colorA=18181B&colorB=4169E1 +[npm-downloads-href]: https://npmjs.com/package/@ucdjs/pipelines-artifacts +[codecov-src]: https://img.shields.io/codecov/c/gh/ucdjs/ucd?style=flat&colorA=18181B&colorB=4169E1 +[codecov-href]: https://codecov.io/gh/ucdjs/ucd diff --git a/packages/pipelines/pipeline-artifacts/eslint.config.js b/packages/pipelines/pipeline-artifacts/eslint.config.js new file mode 100644 index 000000000..d9c0ca1ec --- /dev/null +++ b/packages/pipelines/pipeline-artifacts/eslint.config.js @@ -0,0 +1,7 @@ +// @ts-check +import { luxass } from "@luxass/eslint-config"; + +export default luxass({ + type: "lib", + pnpm: true, +}); diff --git a/packages/pipelines/pipeline-artifacts/package.json b/packages/pipelines/pipeline-artifacts/package.json new file mode 100644 index 000000000..30e473975 --- /dev/null +++ b/packages/pipelines/pipeline-artifacts/package.json @@ -0,0 +1,55 @@ +{ + "name": "@ucdjs/pipelines-artifacts", + "version": "0.0.1", + "type": "module", + "author": { + "name": "Lucas Nørgård", + "email": "lucasnrgaard@gmail.com", + "url": "https://luxass.dev" + }, + "packageManager": "pnpm@10.27.0", + "license": "MIT", + "homepage": "https://github.com/ucdjs/ucd", + "repository": { + "type": "git", + "url": "git+https://github.com/ucdjs/ucd.git", + "directory": "packages/pipelines/pipeline-artifacts" + }, + "bugs": { + "url": "https://github.com/ucdjs/ucd/issues" + }, + "exports": { + ".": "./dist/index.mjs", + "./package.json": "./package.json" + }, + "types": "./dist/index.d.mts", + "files": [ + "dist" + ], + "engines": { + "node": ">=22.18" + }, + "scripts": { + "build": "tsdown --tsconfig=./tsconfig.build.json", + "dev": "tsdown --watch", + "clean": "git clean -xdf dist node_modules", + "lint": "eslint .", + "typecheck": "tsc --noEmit -p tsconfig.build.json" + }, + "dependencies": { + "@ucdjs/pipelines-core": "workspace:*", + "zod": "catalog:prod" + }, + "devDependencies": { + "@luxass/eslint-config": "catalog:linting", + "@ucdjs-tooling/tsconfig": "workspace:*", + "@ucdjs-tooling/tsdown-config": "workspace:*", + "eslint": "catalog:linting", + "publint": "catalog:build", + "tsdown": "catalog:build", + "typescript": "catalog:build" + }, + "publishConfig": { + "access": "public" + } +} diff --git a/packages/pipelines/pipeline-artifacts/src/definition.ts b/packages/pipelines/pipeline-artifacts/src/definition.ts new file mode 100644 index 000000000..13eec1841 --- /dev/null +++ b/packages/pipelines/pipeline-artifacts/src/definition.ts @@ -0,0 +1,42 @@ +import type { ParseContext, ParsedRow, PipelineFilter } from "@ucdjs/pipelines-core"; + +export interface ArtifactBuildContext { + version: string; +} + +export interface PipelineArtifactDefinition< + TId extends string = string, + TValue = unknown, +> { + id: TId; + filter?: PipelineFilter; + parser?: (ctx: ParseContext) => AsyncIterable; + build: (ctx: ArtifactBuildContext, rows?: AsyncIterable) => Promise; +} + +export function definePipelineArtifact< + const TId extends string, + TValue, +>( + definition: PipelineArtifactDefinition, +): PipelineArtifactDefinition { + return definition; +} + +export type InferArtifactId = T extends PipelineArtifactDefinition ? TId : never; +export type InferArtifactValue = T extends PipelineArtifactDefinition ? TValue : never; + +export type InferArtifactsMap = { + [K in T[number] as InferArtifactId]: InferArtifactValue; +}; + +export function isPipelineArtifactDefinition(value: unknown): value is PipelineArtifactDefinition { + return ( + typeof value === "object" + && value !== null + && "id" in value + && "build" in value + && typeof (value as { id: unknown }).id === "string" + && typeof (value as { build: unknown }).build === "function" + ); +} diff --git a/packages/pipelines/pipeline-artifacts/src/index.ts b/packages/pipelines/pipeline-artifacts/src/index.ts new file mode 100644 index 000000000..9e5da954a --- /dev/null +++ b/packages/pipelines/pipeline-artifacts/src/index.ts @@ -0,0 +1,26 @@ +export type { + ArtifactBuildContext, + InferArtifactId, + InferArtifactsMap, + InferArtifactValue, + PipelineArtifactDefinition, +} from "./definition"; + +export { + definePipelineArtifact, + isPipelineArtifactDefinition, +} from "./definition"; + +export type { + Artifact, + ArtifactDefinition, + GlobalArtifact, + InferArtifactSchemaType, + InferEmittedArtifacts, +} from "./schema"; + +export { + artifact, + isGlobalArtifact, + isVersionArtifact, +} from "./schema"; diff --git a/packages/pipelines/pipeline-artifacts/src/schema.ts b/packages/pipelines/pipeline-artifacts/src/schema.ts new file mode 100644 index 000000000..05e3d690f --- /dev/null +++ b/packages/pipelines/pipeline-artifacts/src/schema.ts @@ -0,0 +1,68 @@ +import type { z } from "zod"; + +export interface Artifact { + _type: "artifact"; + schema: TSchema; + scope: "version"; +} + +export interface GlobalArtifact { + _type: "global-artifact"; + schema: TSchema; + scope: "global"; +} + +export type ArtifactDefinition + = | Artifact + | GlobalArtifact; + +export function artifact( + schema: TSchema, +): Artifact; + +export function artifact( + schema: TSchema, + scope: "version", +): Artifact; + +export function artifact( + schema: TSchema, + scope: "global", +): GlobalArtifact; + +export function artifact( + schema: TSchema, + scope?: "version" | "global", +): ArtifactDefinition { + if (scope === "global") { + return { + _type: "global-artifact", + schema, + scope: "global", + }; + } + return { + _type: "artifact", + schema, + scope: "version", + }; +} + +export type InferArtifactSchemaType + = T extends ArtifactDefinition ? z.infer : never; + +export type InferEmittedArtifacts> = { + [K in keyof TEmits]: InferArtifactSchemaType; +}; + +export function isGlobalArtifact( + def: ArtifactDefinition, +): def is GlobalArtifact { + return def._type === "global-artifact"; +} + +export function isVersionArtifact( + def: ArtifactDefinition, +): def is Artifact { + return def._type === "artifact"; +} diff --git a/packages/pipelines/pipeline-artifacts/test/definition.test.ts b/packages/pipelines/pipeline-artifacts/test/definition.test.ts new file mode 100644 index 000000000..0b61f568c --- /dev/null +++ b/packages/pipelines/pipeline-artifacts/test/definition.test.ts @@ -0,0 +1,398 @@ +import type { ParseContext, ParsedRow, PipelineFilter } from "@ucdjs/pipelines-core"; +import type { + ArtifactBuildContext, + InferArtifactId, + InferArtifactsMap, + InferArtifactValue, + PipelineArtifactDefinition, +} from "../src/definition"; +import { assert, describe, expect, expectTypeOf, it, vi } from "vitest"; +import { + definePipelineArtifact, + isPipelineArtifactDefinition, +} from "../src/definition"; + +describe("definePipelineArtifact", () => { + it("should define a minimal artifact", () => { + const build = vi.fn().mockResolvedValue("result"); + const artifact = definePipelineArtifact({ + id: "test-artifact", + build, + }); + + expect(artifact).toEqual({ + id: "test-artifact", + build, + }); + }); + + it("should define artifact with filter", () => { + const build = vi.fn().mockResolvedValue({ count: 42 }); + const filter: PipelineFilter = (ctx) => ctx.file.name.endsWith(".txt"); + + const artifact = definePipelineArtifact({ + id: "filtered-artifact", + filter, + build, + }); + + expect(artifact.id).toBe("filtered-artifact"); + expect(artifact.filter).toBe(filter); + expect(artifact.build).toBe(build); + }); + + it("should define artifact with parser", async () => { + const mockRows: ParsedRow[] = [ + { sourceFile: "test.txt", kind: "point", codePoint: "0041", value: "a" }, + { sourceFile: "test.txt", kind: "range", start: "0042", end: "0043", value: "b" }, + ]; + + async function* parser(_ctx: ParseContext): AsyncIterable { + for (const row of mockRows) { + yield row; + } + } + + const build = vi.fn().mockResolvedValue([]); + + const artifact = definePipelineArtifact({ + id: "parsed-artifact", + parser, + build, + }); + + expect(artifact.parser).toBe(parser); + }); + + it("should define artifact with all properties", async () => { + const filter: PipelineFilter = (ctx) => ctx.file.ext === ".txt"; + async function* parser(_ctx: ParseContext): AsyncIterable { + yield { sourceFile: "test.txt", kind: "point", codePoint: "0041", value: "test" }; + } + const build = vi.fn().mockResolvedValue({ data: "processed" }); + + const artifact = definePipelineArtifact({ + id: "complete-artifact", + filter, + parser, + build, + }); + + expect(artifact).toEqual({ + id: "complete-artifact", + filter, + parser, + build, + }); + }); + + it("should preserve build function signature", async () => { + interface CustomResult { + version: string; + count: number; + } + + const build = async (ctx: ArtifactBuildContext): Promise => { + return { + version: ctx.version, + count: 42, + }; + }; + + const artifact = definePipelineArtifact({ + id: "typed-artifact", + build, + }); + + const context: ArtifactBuildContext = { version: "16.0.0" }; + const result = await artifact.build(context); + + expect(result).toEqual({ + version: "16.0.0", + count: 42, + }); + }); + + it("should work with async build processing rows", async () => { + async function* mockParser(): AsyncIterable { + yield { sourceFile: "test.txt", kind: "point", codePoint: "0041", value: "a" }; + yield { sourceFile: "test.txt", kind: "point", codePoint: "0042", value: "b" }; + } + + const build = async ( + ctx: ArtifactBuildContext, + rows?: AsyncIterable, + ): Promise => { + let count = 0; + if (rows) { + for await (const _row of rows) { + count++; + } + } + return count; + }; + + const artifact = definePipelineArtifact({ + id: "counting-artifact", + parser: mockParser, + build, + }); + + const context: ArtifactBuildContext = { version: "16.0.0" }; + const rows = mockParser(); + const result = await artifact.build(context, rows); + + expect(result).toBe(2); + }); +}); + +describe("isPipelineArtifactDefinition", () => { + it("should return true for valid artifact definition", () => { + const valid: PipelineArtifactDefinition = { + id: "test", + build: async () => "result", + }; + + expect(isPipelineArtifactDefinition(valid)).toBe(true); + }); + + it("should return true for artifact with all properties", () => { + const valid: PipelineArtifactDefinition = { + id: "test", + filter: (ctx) => ctx.file.ext === ".txt", + async* parser() { + yield { sourceFile: "test.txt", kind: "point", codePoint: "0041" }; + }, + build: async () => "result", + }; + + expect(isPipelineArtifactDefinition(valid)).toBe(true); + }); + + it("should return false for null", () => { + expect(isPipelineArtifactDefinition(null)).toBe(false); + }); + + it("should return false for undefined", () => { + expect(isPipelineArtifactDefinition(undefined)).toBe(false); + }); + + it("should return false for primitive types", () => { + expect(isPipelineArtifactDefinition("string")).toBe(false); + expect(isPipelineArtifactDefinition(123)).toBe(false); + expect(isPipelineArtifactDefinition(true)).toBe(false); + }); + + it("should return false for empty object", () => { + expect(isPipelineArtifactDefinition({})).toBe(false); + }); + + it("should return false for object missing id", () => { + const invalid = { + build: async () => "result", + }; + + expect(isPipelineArtifactDefinition(invalid)).toBe(false); + }); + + it("should return false for object missing build", () => { + const invalid = { + id: "test", + }; + + expect(isPipelineArtifactDefinition(invalid)).toBe(false); + }); + + it("should return false for object with non-string id", () => { + const invalid = { + id: 123, + build: async () => "result", + }; + + expect(isPipelineArtifactDefinition(invalid)).toBe(false); + }); + + it("should return false for object with non-function build", () => { + const invalid = { + id: "test", + build: "not a function", + }; + + expect(isPipelineArtifactDefinition(invalid)).toBe(false); + }); + + it("should return false for array", () => { + expect(isPipelineArtifactDefinition([])).toBe(false); + expect(isPipelineArtifactDefinition([{ id: "test", build: async () => {} }])).toBe(false); + }); + + it("should work as type guard", () => { + const unknown: unknown = { + id: "test", + build: async () => "result", + }; + + assert(isPipelineArtifactDefinition(unknown)); + expect(unknown.id).toBe("test"); + expect(typeof unknown.build).toBe("function"); + expectTypeOf(unknown).toEqualTypeOf(); + }); +}); + +describe("type inference", () => { + describe("inferArtifactId", () => { + it("should infer artifact id", () => { + // eslint-disable-next-line unused-imports/no-unused-vars + const artifact = definePipelineArtifact({ + id: "my-artifact", + build: async () => "result", + }); + + type Id = InferArtifactId; + + expectTypeOf().toEqualTypeOf<"my-artifact">(); + }); + + it("should work with const assertion", () => { + // eslint-disable-next-line unused-imports/no-unused-vars + const artifact = definePipelineArtifact({ + id: "specific-id" as const, + build: async () => 123, + }); + + type Id = InferArtifactId; + + expectTypeOf().toEqualTypeOf<"specific-id">(); + }); + }); + + describe("inferArtifactValue", () => { + it("should infer artifact value type", () => { + // eslint-disable-next-line unused-imports/no-unused-vars + const artifact = definePipelineArtifact({ + id: "test", + build: async (): Promise<{ count: number }> => ({ count: 42 }), + }); + + type Value = InferArtifactValue; + + expectTypeOf().toEqualTypeOf<{ count: number }>(); + }); + + it("should work with primitive return types", () => { + // eslint-disable-next-line unused-imports/no-unused-vars + const stringArtifact = definePipelineArtifact({ + id: "string-artifact", + build: async (): Promise => "result", + }); + + type StringValue = InferArtifactValue; + + expectTypeOf().toEqualTypeOf(); + }); + }); + + describe("inferArtifactsMap", () => { + it("should infer map of artifact ids to values", () => { + // eslint-disable-next-line unused-imports/no-unused-vars + const artifacts = [ + definePipelineArtifact({ + id: "counts", + build: async (): Promise => 42, + }), + definePipelineArtifact({ + id: "names", + build: async (): Promise => ["a", "b"], + }), + ] as const; + + type ArtifactsMap = InferArtifactsMap; + + expectTypeOf().toEqualTypeOf<{ + counts: number; + names: string[]; + }>(); + }); + + it("should work with complex value types", () => { + // eslint-disable-next-line unused-imports/no-unused-vars + const artifacts = [ + definePipelineArtifact({ + id: "data", + build: async (): Promise<{ items: string[]; total: number }> => ({ + items: ["x", "y"], + total: 2, + }), + }), + definePipelineArtifact({ + id: "enabled", + build: async (): Promise => true, + }), + ] as const; + + type ArtifactsMap = InferArtifactsMap; + + expectTypeOf().toEqualTypeOf<{ + data: { items: string[]; total: number }; + enabled: boolean; + }>(); + }); + }); +}); + +describe("build context", () => { + it("should receive version in build context", async () => { + const build = vi.fn(async (ctx: ArtifactBuildContext) => { + return { processedVersion: ctx.version }; + }); + + const artifact = definePipelineArtifact({ + id: "version-aware", + build, + }); + + const context: ArtifactBuildContext = { version: "16.0.0" }; + const result = await artifact.build(context); + + expect(build).toHaveBeenCalledWith(context, undefined); + expect(result).toEqual({ processedVersion: "16.0.0" }); + }); + + it("should receive rows when parser is provided", async () => { + const mockRows: ParsedRow[] = [ + { sourceFile: "test.txt", kind: "point", codePoint: "0041", value: "a" }, + { sourceFile: "test.txt", kind: "point", codePoint: "0042", value: "b" }, + ]; + + async function* mockParser(): AsyncIterable { + for (const row of mockRows) { + yield row; + } + } + + const build = vi.fn(async (ctx: ArtifactBuildContext, rows?: AsyncIterable) => { + const collected: ParsedRow[] = []; + if (rows) { + for await (const row of rows) { + collected.push(row); + } + } + return { version: ctx.version, rowCount: collected.length }; + }); + + const artifact = definePipelineArtifact({ + id: "row-processor", + parser: mockParser, + build, + }); + + const context: ArtifactBuildContext = { version: "16.0.0" }; + const rows = mockParser(); + const result = await artifact.build(context, rows); + + expect(result).toEqual({ + version: "16.0.0", + rowCount: 2, + }); + }); +}); diff --git a/packages/pipelines/pipeline-artifacts/test/schema.test.ts b/packages/pipelines/pipeline-artifacts/test/schema.test.ts new file mode 100644 index 000000000..0a5049559 --- /dev/null +++ b/packages/pipelines/pipeline-artifacts/test/schema.test.ts @@ -0,0 +1,263 @@ +import type { + Artifact, + ArtifactDefinition, + GlobalArtifact, + InferArtifactSchemaType, + InferEmittedArtifacts, +} from "../src/schema"; +import { assert, describe, expect, expectTypeOf, it } from "vitest"; +import { z } from "zod"; +import { + artifact, + isGlobalArtifact, + isVersionArtifact, +} from "../src/schema"; + +describe("artifact", () => { + it("should create version artifact by default", () => { + const schema = z.string(); + const result = artifact(schema); + + expect(result).toEqual({ + _type: "artifact", + schema, + scope: "version", + }); + }); + + it("should create version artifact when explicitly specified", () => { + const schema = z.number(); + const result = artifact(schema, "version"); + + expect(result).toEqual({ + _type: "artifact", + schema, + scope: "version", + }); + }); + + it("should create global artifact when scope is global", () => { + const schema = z.boolean(); + const result = artifact(schema, "global"); + + expect(result).toEqual({ + _type: "global-artifact", + schema, + scope: "global", + }); + }); + + it("should work with complex schemas", () => { + const schema = z.object({ + id: z.string(), + count: z.number(), + metadata: z.record(z.string(), z.unknown()), + }); + const result = artifact(schema, "version"); + + expect(result.schema).toBe(schema); + expect(result.scope).toBe("version"); + }); + + it("should work with array schemas", () => { + const schema = z.array(z.string()); + const result = artifact(schema); + + expect(result.schema).toBe(schema); + expect(result._type).toBe("artifact"); + }); + + it("should work with union schemas", () => { + const schema = z.union([z.string(), z.number()]); + const result = artifact(schema, "global"); + + expect(result.schema).toBe(schema); + expect(result._type).toBe("global-artifact"); + }); +}); + +describe("isGlobalArtifact", () => { + it("should return true for global artifacts", () => { + const schema = z.string(); + const globalArt: ArtifactDefinition = { + _type: "global-artifact", + schema, + scope: "global", + }; + + expect(isGlobalArtifact(globalArt)).toBe(true); + }); + + it("should return false for version artifacts", () => { + const schema = z.string(); + const versionArt = artifact(schema, "version"); + + expect(isGlobalArtifact(versionArt)).toBe(false); + }); + + it("should work as type guard", () => { + const schema = z.object({ value: z.string() }); + const art: ArtifactDefinition = artifact(schema, "global"); + + assert(isGlobalArtifact(art)); + expect(art._type).toBe("global-artifact"); + expect(art.scope).toBe("global"); + expectTypeOf(art).toEqualTypeOf(); + }); +}); + +describe("isVersionArtifact", () => { + it("should return true for version artifacts", () => { + const schema = z.number(); + const versionArt: Artifact = { + _type: "artifact", + schema, + scope: "version", + }; + + expect(isVersionArtifact(versionArt)).toBe(true); + }); + + it("should return false for global artifacts", () => { + const schema = z.number(); + const globalArt: GlobalArtifact = { + _type: "global-artifact", + schema, + scope: "global", + }; + + expect(isVersionArtifact(globalArt)).toBe(false); + }); + + it("should work as type guard", () => { + const schema = z.object({ count: z.number() }); + const art: ArtifactDefinition = artifact(schema, "version"); + + assert(isVersionArtifact(art)); + expect(art._type).toBe("artifact"); + expect(art.scope).toBe("version"); + expectTypeOf(art).toEqualTypeOf(); + }); +}); + +describe("type inference", () => { + describe("inferArtifactSchemaType", () => { + it("should infer schema type correctly", () => { + const schema = z.object({ + id: z.string(), + count: z.number(), + }); + // eslint-disable-next-line unused-imports/no-unused-vars + const art = artifact(schema); + + type Inferred = InferArtifactSchemaType; + + expectTypeOf().toEqualTypeOf<{ id: string; count: number }>(); + }); + + it("should work with primitive types", () => { + // eslint-disable-next-line unused-imports/no-unused-vars + const stringArt = artifact(z.string()); + type StringType = InferArtifactSchemaType; + + expectTypeOf().toEqualTypeOf(); + }); + }); + + describe("inferEmittedArtifacts", () => { + it("should infer multiple artifact types", () => { + // eslint-disable-next-line unused-imports/no-unused-vars + const emits = { + result: artifact(z.object({ value: z.string() })), + count: artifact(z.number()), + enabled: artifact(z.boolean()), + } as const; + + type Inferred = InferEmittedArtifacts; + + expectTypeOf().toEqualTypeOf<{ + readonly result: { value: string }; + readonly count: number; + readonly enabled: boolean; + }>(); + }); + + it("should work with global and version artifacts", () => { + // eslint-disable-next-line unused-imports/no-unused-vars + const emits = { + global: artifact(z.string(), "global"), + version: artifact(z.number(), "version"), + } as const; + + type Inferred = InferEmittedArtifacts; + + expectTypeOf().toEqualTypeOf<{ + readonly global: string; + readonly version: number; + }>(); + }); + + it("should work with complex nested schemas", () => { + // eslint-disable-next-line unused-imports/no-unused-vars + const emits = { + data: artifact( + z.object({ + items: z.array(z.string()), + metadata: z.record(z.string(), z.unknown()), + }), + ), + } as const; + + type Inferred = InferEmittedArtifacts; + + expectTypeOf().toEqualTypeOf<{ + readonly data: { + items: string[]; + metadata: Record; + }; + }>(); + }); + }); +}); + +describe("schema validation", () => { + it("should validate data with artifact schema", () => { + const schema = z.object({ + name: z.string(), + age: z.number(), + }); + const art = artifact(schema); + + const validData = { name: "John", age: 30 }; + const result = art.schema.parse(validData); + + expect(result).toEqual(validData); + }); + + it("should reject invalid data", () => { + const schema = z.object({ + name: z.string(), + age: z.number(), + }); + const art = artifact(schema); + + const invalidData = { name: "John", age: "thirty" }; + const result = art.schema.safeParse(invalidData); + + expect(result.success).toBe(false); + }); + + it("should work with optional fields", () => { + const schema = z.object({ + required: z.string(), + optional: z.number().optional(), + }); + const art = artifact(schema); + + const data1 = { required: "test" }; + const data2 = { required: "test", optional: 42 }; + + expect(art.schema.safeParse(data1).success).toBe(true); + expect(art.schema.safeParse(data2).success).toBe(true); + }); +}); diff --git a/packages/pipelines/pipeline-artifacts/tsconfig.build.json b/packages/pipelines/pipeline-artifacts/tsconfig.build.json new file mode 100644 index 000000000..36c889e0c --- /dev/null +++ b/packages/pipelines/pipeline-artifacts/tsconfig.build.json @@ -0,0 +1,5 @@ +{ + "extends": "./tsconfig.json", + "include": ["src"], + "exclude": ["dist", "test"] +} diff --git a/packages/pipelines/pipeline-artifacts/tsconfig.json b/packages/pipelines/pipeline-artifacts/tsconfig.json new file mode 100644 index 000000000..9c6dd744b --- /dev/null +++ b/packages/pipelines/pipeline-artifacts/tsconfig.json @@ -0,0 +1,8 @@ +{ + "extends": "@ucdjs-tooling/tsconfig/base", + "include": [ + "src", + "test" + ], + "exclude": ["dist"] +} diff --git a/packages/pipelines/pipeline-artifacts/tsdown.config.ts b/packages/pipelines/pipeline-artifacts/tsdown.config.ts new file mode 100644 index 000000000..dee0149e6 --- /dev/null +++ b/packages/pipelines/pipeline-artifacts/tsdown.config.ts @@ -0,0 +1,7 @@ +import { createTsdownConfig } from "@ucdjs-tooling/tsdown-config"; + +export default createTsdownConfig({ + entry: [ + "./src/index.ts", + ], +}); diff --git a/packages/pipelines/pipeline-core/README.md b/packages/pipelines/pipeline-core/README.md new file mode 100644 index 000000000..494d64b1c --- /dev/null +++ b/packages/pipelines/pipeline-core/README.md @@ -0,0 +1,27 @@ +# @ucdjs/pipelines-core + +[![npm version][npm-version-src]][npm-version-href] +[![npm downloads][npm-downloads-src]][npm-downloads-href] +[![codecov][codecov-src]][codecov-href] + +> [!IMPORTANT] +> This is an internal package. It may change without warning and is not subject to semantic versioning. Use at your own risk. + +A collection of core pipeline functionalities for the UCD project. + +## Installation + +```bash +npm install @ucdjs/pipelines-core +``` + +## 📄 License + +Published under [MIT License](./LICENSE). + +[npm-version-src]: https://img.shields.io/npm/v/@ucdjs/pipelines-core?style=flat&colorA=18181B&colorB=4169E1 +[npm-version-href]: https://npmjs.com/package/@ucdjs/pipelines-core +[npm-downloads-src]: https://img.shields.io/npm/dm/@ucdjs/pipelines-core?style=flat&colorA=18181B&colorB=4169E1 +[npm-downloads-href]: https://npmjs.com/package/@ucdjs/pipelines-core +[codecov-src]: https://img.shields.io/codecov/c/gh/ucdjs/ucd?style=flat&colorA=18181B&colorB=4169E1 +[codecov-href]: https://codecov.io/gh/ucdjs/ucd diff --git a/packages/pipelines/pipeline-core/eslint.config.js b/packages/pipelines/pipeline-core/eslint.config.js new file mode 100644 index 000000000..d9c0ca1ec --- /dev/null +++ b/packages/pipelines/pipeline-core/eslint.config.js @@ -0,0 +1,7 @@ +// @ts-check +import { luxass } from "@luxass/eslint-config"; + +export default luxass({ + type: "lib", + pnpm: true, +}); diff --git a/packages/pipelines/pipeline-core/package.json b/packages/pipelines/pipeline-core/package.json new file mode 100644 index 000000000..55e0a67c9 --- /dev/null +++ b/packages/pipelines/pipeline-core/package.json @@ -0,0 +1,58 @@ +{ + "name": "@ucdjs/pipelines-core", + "version": "0.0.1", + "type": "module", + "author": { + "name": "Lucas Nørgård", + "email": "lucasnrgaard@gmail.com", + "url": "https://luxass.dev" + }, + "packageManager": "pnpm@10.27.0", + "license": "MIT", + "homepage": "https://github.com/ucdjs/ucd", + "repository": { + "type": "git", + "url": "git+https://github.com/ucdjs/ucd.git", + "directory": "packages/pipelines/pipeline-core" + }, + "bugs": { + "url": "https://github.com/ucdjs/ucd/issues" + }, + "exports": { + ".": "./dist/index.mjs", + "./package.json": "./package.json" + }, + "types": "./dist/index.d.mts", + "files": [ + "dist" + ], + "engines": { + "node": ">=22.18" + }, + "scripts": { + "build": "tsdown --tsconfig=./tsconfig.build.json", + "dev": "tsdown --watch", + "clean": "git clean -xdf dist node_modules", + "lint": "eslint .", + "typecheck": "tsc --noEmit -p tsconfig.build.json" + }, + "dependencies": { + "@ucdjs-internal/shared": "workspace:*", + "picomatch": "catalog:prod", + "zod": "catalog:prod" + }, + "devDependencies": { + "@luxass/eslint-config": "catalog:linting", + "@types/picomatch": "catalog:types", + "@ucdjs-tooling/tsconfig": "workspace:*", + "@ucdjs-tooling/tsdown-config": "workspace:*", + "eslint": "catalog:linting", + "publint": "catalog:build", + "tsdown": "catalog:build", + "tsx": "catalog:build", + "typescript": "catalog:build" + }, + "publishConfig": { + "access": "public" + } +} diff --git a/packages/pipelines/pipeline-core/src/dag.ts b/packages/pipelines/pipeline-core/src/dag.ts new file mode 100644 index 000000000..4ed489253 --- /dev/null +++ b/packages/pipelines/pipeline-core/src/dag.ts @@ -0,0 +1,246 @@ +import type { PipelineRouteDefinition } from "./route"; +import { isArtifactDependency, isRouteDependency, parseDependency } from "./dependencies"; + +export interface DAGNode { + id: string; + dependencies: Set; + dependents: Set; + emittedArtifacts: Set; +} + +export interface DAG { + nodes: Map; + executionOrder: string[]; +} + +export interface DAGValidationError { + type: "cycle" | "missing-route" | "missing-artifact" | "duplicate-route"; + message: string; + details: { + routeId?: string; + dependencyId?: string; + cycle?: string[]; + }; +} + +export interface DAGValidationResult { + valid: boolean; + errors: DAGValidationError[]; + dag?: DAG; +} + +export function buildDAG(routes: readonly PipelineRouteDefinition[]): DAGValidationResult { + const errors: DAGValidationError[] = []; + const nodes = new Map(); + const artifactsByRoute = new Map>(); + + const seenIds = new Map(); + for (let i = 0; i < routes.length; i++) { + const route = routes[i]; + if (!route) continue; + const id = route.id; + if (seenIds.has(id)) { + errors.push({ + type: "duplicate-route", + message: `Duplicate route ID "${id}" found at index ${seenIds.get(id)} and ${i}`, + details: { routeId: id }, + }); + } else { + seenIds.set(id, i); + } + } + + if (errors.length > 0) { + return { valid: false, errors }; + } + + const routeIds = new Set(routes.map((r) => r.id)); + + for (const route of routes) { + const emittedArtifacts = new Set(); + if (route.emits) { + for (const artifactName of Object.keys(route.emits)) { + emittedArtifacts.add(`${route.id}:${artifactName}`); + } + } + artifactsByRoute.set(route.id, emittedArtifacts); + + nodes.set(route.id, { + id: route.id, + dependencies: new Set(), + dependents: new Set(), + emittedArtifacts, + }); + } + + for (const route of routes) { + const node = nodes.get(route.id)!; + + if (!route.depends) continue; + + for (const dep of route.depends) { + const parsed = parseDependency(dep); + + if (isRouteDependency(dep)) { + if (!routeIds.has(parsed.routeId)) { + errors.push({ + type: "missing-route", + message: `Route "${route.id}" depends on non-existent route "${parsed.routeId}"`, + details: { routeId: route.id, dependencyId: parsed.routeId }, + }); + continue; + } + node.dependencies.add(parsed.routeId); + nodes.get(parsed.routeId)!.dependents.add(route.id); + } else if (isArtifactDependency(dep)) { + const artifactParsed = parseDependency(dep); + if (artifactParsed.type !== "artifact") continue; + + if (!routeIds.has(artifactParsed.routeId)) { + errors.push({ + type: "missing-route", + message: `Route "${route.id}" depends on artifact from non-existent route "${artifactParsed.routeId}"`, + details: { routeId: route.id, dependencyId: artifactParsed.routeId }, + }); + continue; + } + + const routeArtifacts = artifactsByRoute.get(artifactParsed.routeId); + const artifactKey = `${artifactParsed.routeId}:${artifactParsed.artifactName}`; + if (!routeArtifacts?.has(artifactKey)) { + errors.push({ + type: "missing-artifact", + message: `Route "${route.id}" depends on non-existent artifact "${artifactParsed.artifactName}" from route "${artifactParsed.routeId}"`, + details: { routeId: route.id, dependencyId: artifactKey }, + }); + continue; + } + + node.dependencies.add(artifactParsed.routeId); + nodes.get(artifactParsed.routeId)!.dependents.add(route.id); + } + } + } + + const cycleResult = detectCycle(nodes); + if (cycleResult) { + errors.push({ + type: "cycle", + message: `Circular dependency detected: ${cycleResult.join(" -> ")}`, + details: { cycle: cycleResult }, + }); + } + + if (errors.length > 0) { + return { valid: false, errors }; + } + + const executionOrder = topologicalSort(nodes); + + return { + valid: true, + errors: [], + dag: { nodes, executionOrder }, + }; +} + +function detectCycle(nodes: Map): string[] | null { + const visited = new Set(); + const recursionStack = new Set(); + const path: string[] = []; + + function dfs(nodeId: string): string[] | null { + visited.add(nodeId); + recursionStack.add(nodeId); + path.push(nodeId); + + const node = nodes.get(nodeId); + if (node) { + for (const depId of node.dependencies) { + if (!visited.has(depId)) { + const cycle = dfs(depId); + if (cycle) return cycle; + } else if (recursionStack.has(depId)) { + const cycleStart = path.indexOf(depId); + return [...path.slice(cycleStart), depId]; + } + } + } + + path.pop(); + recursionStack.delete(nodeId); + return null; + } + + for (const nodeId of nodes.keys()) { + if (!visited.has(nodeId)) { + const cycle = dfs(nodeId); + if (cycle) return cycle; + } + } + + return null; +} + +function topologicalSort(nodes: Map): string[] { + const result: string[] = []; + const visited = new Set(); + const temp = new Set(); + + function visit(nodeId: string): void { + if (temp.has(nodeId)) return; + if (visited.has(nodeId)) return; + + temp.add(nodeId); + + const node = nodes.get(nodeId); + if (node) { + for (const depId of node.dependencies) { + visit(depId); + } + } + + temp.delete(nodeId); + visited.add(nodeId); + result.push(nodeId); + } + + for (const nodeId of nodes.keys()) { + if (!visited.has(nodeId)) { + visit(nodeId); + } + } + + return result; +} + +export function getExecutionLayers(dag: DAG): string[][] { + const layers: string[][] = []; + const scheduled = new Set(); + const remaining = new Set(dag.nodes.keys()); + + while (remaining.size > 0) { + const layer: string[] = []; + + for (const nodeId of remaining) { + const node = dag.nodes.get(nodeId)!; + const allDepsScheduled = [...node.dependencies].every((dep) => scheduled.has(dep)); + if (allDepsScheduled) { + layer.push(nodeId); + } + } + + if (layer.length === 0) { + break; + } + + for (const nodeId of layer) { + remaining.delete(nodeId); + scheduled.add(nodeId); + } + + layers.push(layer); + } + + return layers; +} diff --git a/packages/pipelines/pipeline-core/src/dependencies.ts b/packages/pipelines/pipeline-core/src/dependencies.ts new file mode 100644 index 000000000..43aff2445 --- /dev/null +++ b/packages/pipelines/pipeline-core/src/dependencies.ts @@ -0,0 +1,70 @@ +type RouteDependency = `route:${string}`; +type ArtifactDependency = `artifact:${string}:${string}`; + +export type PipelineDependency = RouteDependency | ArtifactDependency; + +export interface ParsedRouteDependency { + type: "route"; + routeId: string; +} + +export interface ParsedArtifactDependency { + type: "artifact"; + routeId: string; + artifactName: string; +} + +export type ParsedDependency = ParsedRouteDependency | ParsedArtifactDependency; + +export type ParseDependencyType + = T extends `route:${infer RouteId}` + ? { type: "route"; routeId: RouteId } + : T extends `artifact:${infer RouteId}:${infer ArtifactName}` + ? { type: "artifact"; routeId: RouteId; artifactName: ArtifactName } + : never; + +export type ExtractRouteDependencies = { + [K in keyof T]: T[K] extends `route:${infer RouteId}` ? RouteId : never; +}[number]; + +export type ExtractArtifactDependencies = { + [K in keyof T]: T[K] extends `artifact:${infer RouteId}:${infer ArtifactName}` + ? { routeId: RouteId; artifactName: ArtifactName } + : never; +}[number]; + +export type ExtractArtifactKeys = { + [K in keyof T]: T[K] extends `artifact:${infer RouteId}:${infer ArtifactName}` + ? `${RouteId}:${ArtifactName}` + : never; +}[number]; + +export function parseDependency(dep: PipelineDependency): ParsedDependency { + const parts = dep.split(":"); + + if (parts[0] === "route" && parts[1]) { + return { type: "route", routeId: parts[1] }; + } + + if (parts[0] === "artifact" && parts[1] && parts[2]) { + return { type: "artifact", routeId: parts[1], artifactName: parts[2] }; + } + + throw new Error(`Invalid dependency format: ${dep}. Expected "route:" or "artifact::"`); +} + +export function isRouteDependency(dep: PipelineDependency): dep is RouteDependency { + return dep.startsWith("route:"); +} + +export function isArtifactDependency(dep: PipelineDependency): dep is ArtifactDependency { + return dep.startsWith("artifact:"); +} + +export function createRouteDependency(routeId: string): RouteDependency { + return `route:${routeId}`; +} + +export function createArtifactDependency(routeId: string, artifactName: string): ArtifactDependency { + return `artifact:${routeId}:${artifactName}`; +} diff --git a/packages/pipelines/pipeline-core/src/events.ts b/packages/pipelines/pipeline-core/src/events.ts new file mode 100644 index 000000000..f87d51418 --- /dev/null +++ b/packages/pipelines/pipeline-core/src/events.ts @@ -0,0 +1,255 @@ +import type { FileContext } from "./types"; + +export type PipelineEventType + = | "pipeline:start" + | "pipeline:end" + | "version:start" + | "version:end" + | "artifact:start" + | "artifact:end" + | "artifact:produced" + | "artifact:consumed" + | "file:matched" + | "file:skipped" + | "file:fallback" + | "parse:start" + | "parse:end" + | "resolve:start" + | "resolve:end" + | "cache:hit" + | "cache:miss" + | "cache:store" + | "error"; + +export interface PipelineStartEvent { + id: string; + type: "pipeline:start"; + versions: string[]; + timestamp: number; +} + +export interface PipelineEndEvent { + id: string; + type: "pipeline:end"; + durationMs: number; + timestamp: number; +} + +export interface VersionStartEvent { + id: string; + type: "version:start"; + version: string; + timestamp: number; +} + +export interface VersionEndEvent { + id: string; + type: "version:end"; + version: string; + durationMs: number; + timestamp: number; +} + +export interface ArtifactStartEvent { + id: string; + type: "artifact:start"; + artifactId: string; + version: string; + timestamp: number; +} + +export interface ArtifactEndEvent { + id: string; + type: "artifact:end"; + artifactId: string; + version: string; + durationMs: number; + timestamp: number; +} + +export interface ArtifactProducedEvent { + id: string; + type: "artifact:produced"; + artifactId: string; + routeId: string; + version: string; + timestamp: number; +} + +export interface ArtifactConsumedEvent { + id: string; + type: "artifact:consumed"; + artifactId: string; + routeId: string; + version: string; + timestamp: number; +} + +export interface FileMatchedEvent { + id: string; + type: "file:matched"; + file: FileContext; + routeId: string; + timestamp: number; +} + +export interface FileSkippedEvent { + id: string; + type: "file:skipped"; + file: FileContext; + reason: "no-match" | "filtered"; + timestamp: number; +} + +export interface FileFallbackEvent { + id: string; + type: "file:fallback"; + file: FileContext; + timestamp: number; +} + +export interface ParseStartEvent { + id: string; + type: "parse:start"; + file: FileContext; + routeId: string; + timestamp: number; +} + +export interface ParseEndEvent { + id: string; + type: "parse:end"; + file: FileContext; + routeId: string; + rowCount: number; + durationMs: number; + timestamp: number; +} + +export interface ResolveStartEvent { + id: string; + type: "resolve:start"; + file: FileContext; + routeId: string; + timestamp: number; +} + +export interface ResolveEndEvent { + id: string; + type: "resolve:end"; + file: FileContext; + routeId: string; + outputCount: number; + durationMs: number; + timestamp: number; +} + +export interface CacheHitEvent { + id: string; + type: "cache:hit"; + routeId: string; + file: FileContext; + version: string; + timestamp: number; +} + +export interface CacheMissEvent { + id: string; + type: "cache:miss"; + routeId: string; + file: FileContext; + version: string; + timestamp: number; +} + +export interface CacheStoreEvent { + id: string; + type: "cache:store"; + routeId: string; + file: FileContext; + version: string; + timestamp: number; +} + +export interface PipelineErrorEvent { + id: string; + type: "error"; + error: PipelineError; + timestamp: number; +} + +export type PipelineEvent + = | PipelineStartEvent + | PipelineEndEvent + | VersionStartEvent + | VersionEndEvent + | ArtifactStartEvent + | ArtifactEndEvent + | ArtifactProducedEvent + | ArtifactConsumedEvent + | FileMatchedEvent + | FileSkippedEvent + | FileFallbackEvent + | ParseStartEvent + | ParseEndEvent + | ResolveStartEvent + | ResolveEndEvent + | CacheHitEvent + | CacheMissEvent + | CacheStoreEvent + | PipelineErrorEvent; + +export type PipelineErrorScope = "pipeline" | "version" | "file" | "route" | "artifact"; + +export interface PipelineError { + scope: PipelineErrorScope; + message: string; + error?: unknown; + file?: FileContext; + routeId?: string; + artifactId?: string; + version?: string; +} + +export type PipelineGraphNodeType = "source" | "file" | "route" | "artifact" | "output"; + +export type PipelineGraphNode + = | { id: string; type: "source"; version: string } + | { id: string; type: "file"; file: FileContext } + | { id: string; type: "route"; routeId: string } + | { id: string; type: "artifact"; artifactId: string } + | { id: string; type: "output"; outputIndex: number; property?: string }; + +export type PipelineGraphEdgeType = "provides" | "matched" | "parsed" | "resolved" | "uses-artifact"; + +export interface PipelineGraphEdge { + from: string; + to: string; + type: PipelineGraphEdgeType; +} + +export interface PipelineGraph { + nodes: PipelineGraphNode[]; + edges: PipelineGraphEdge[]; +} + +export type PipelineEventInput + = | Omit & { id?: string } + | Omit & { id?: string } + | Omit & { id?: string } + | Omit & { id?: string } + | Omit & { id?: string } + | Omit & { id?: string } + | Omit & { id?: string } + | Omit & { id?: string } + | Omit & { id?: string } + | Omit & { id?: string } + | Omit & { id?: string } + | Omit & { id?: string } + | Omit & { id?: string } + | Omit & { id?: string } + | Omit & { id?: string } + | Omit & { id?: string } + | Omit & { id?: string } + | Omit & { id?: string } + | Omit & { id?: string }; diff --git a/packages/pipelines/pipeline-core/src/filters.ts b/packages/pipelines/pipeline-core/src/filters.ts new file mode 100644 index 000000000..7adb1c7a0 --- /dev/null +++ b/packages/pipelines/pipeline-core/src/filters.ts @@ -0,0 +1,62 @@ +import type { FileContext, PipelineFilter } from "./types"; +import picomatch from "picomatch"; + +export function byName(name: string): PipelineFilter { + return (ctx) => ctx.file.name === name; +} + +export function byDir(dir: FileContext["dir"]): PipelineFilter { + return (ctx) => ctx.file.dir === dir; +} + +export function byExt(ext: string): PipelineFilter { + if (ext === "") { + return (ctx) => ctx.file.ext === ""; + } + const normalizedExt = ext.startsWith(".") ? ext : `.${ext}`; + return (ctx) => ctx.file.ext === normalizedExt; +} + +export function byGlob(pattern: string): PipelineFilter { + const matcher = picomatch(pattern); + return (ctx) => matcher(ctx.file.path); +} + +export function byPath(pathPattern: string | RegExp): PipelineFilter { + if (typeof pathPattern === "string") { + return (ctx) => ctx.file.path === pathPattern; + } + return (ctx) => pathPattern.test(ctx.file.path); +} + +export function byProp(pattern: string | RegExp): PipelineFilter { + if (typeof pattern === "string") { + return (ctx) => ctx.row?.property === pattern; + } + return (ctx) => !!ctx.row?.property && pattern.test(ctx.row.property); +} + +export function bySource(sourceIds: string | string[]): PipelineFilter { + const ids = Array.isArray(sourceIds) ? sourceIds : [sourceIds]; + return (ctx) => ctx.source != null && ids.includes(ctx.source.id); +} + +export function and(...filters: PipelineFilter[]): PipelineFilter { + return (ctx) => filters.every((f) => f(ctx)); +} + +export function or(...filters: PipelineFilter[]): PipelineFilter { + return (ctx) => filters.some((f) => f(ctx)); +} + +export function not(filter: PipelineFilter): PipelineFilter { + return (ctx) => !filter(ctx); +} + +export function always(): PipelineFilter { + return () => true; +} + +export function never(): PipelineFilter { + return () => false; +} diff --git a/packages/pipelines/pipeline-core/src/index.ts b/packages/pipelines/pipeline-core/src/index.ts new file mode 100644 index 000000000..548b2e0d1 --- /dev/null +++ b/packages/pipelines/pipeline-core/src/index.ts @@ -0,0 +1,154 @@ +export type { + DAG, + DAGNode, + DAGValidationError, + DAGValidationResult, +} from "./dag"; + +export { + buildDAG, + getExecutionLayers, +} from "./dag"; + +export type { + ExtractArtifactDependencies, + ExtractArtifactKeys, + ExtractRouteDependencies, + ParsedArtifactDependency, + ParsedDependency, + ParseDependencyType, + ParsedRouteDependency, + PipelineDependency, +} from "./dependencies"; + +export { + createArtifactDependency, + createRouteDependency, + isArtifactDependency, + isRouteDependency, + parseDependency, +} from "./dependencies"; + +export type { + ArtifactConsumedEvent, + ArtifactEndEvent, + ArtifactProducedEvent, + ArtifactStartEvent, + CacheHitEvent, + CacheMissEvent, + CacheStoreEvent, + FileFallbackEvent, + FileMatchedEvent, + FileSkippedEvent, + ParseEndEvent, + ParseStartEvent, + PipelineEndEvent, + PipelineError, + PipelineErrorEvent, + PipelineErrorScope, + PipelineEvent, + PipelineEventInput, + PipelineEventType, + PipelineGraph, + PipelineGraphEdge, + PipelineGraphEdgeType, + PipelineGraphNode, + PipelineGraphNodeType, + PipelineStartEvent, + ResolveEndEvent, + ResolveStartEvent, + VersionEndEvent, + VersionStartEvent, +} from "./events"; + +export { + always, + and, + byDir, + byExt, + byGlob, + byName, + byPath, + byProp, + bySource, + never, + not, + or, +} from "./filters"; + +export type { + FallbackRouteDefinition, + InferPipelineOutput, + InferPipelineRouteIds, + InferPipelineSourceIds, + PipelineDefinition, + PipelineDefinitionOptions, +} from "./pipeline"; + +export { + definePipeline, + getPipelineRouteIds, + getPipelineSourceIds, + isPipelineDefinition, +} from "./pipeline"; + +export type { + ArtifactDefinition, + InferArtifactType, + InferEmittedArtifactsFromRoute, + InferRouteDepends, + InferRouteEmits, + InferRouteId, + InferRouteOutput, + InferRoutesOutput, + InferRouteTransforms, + PipelineRouteDefinition, + RouteResolveContext, +} from "./route"; + +export { definePipelineRoute } from "./route"; + +export type { + FileMetadata, + InferSourceId, + InferSourceIds, + PipelineSourceDefinition, + SourceBackend, + SourceFileContext, + StreamOptions, +} from "./source"; + +export { + definePipelineSource, + resolveMultipleSourceFiles, + resolveSourceFiles, +} from "./source"; + +export type { + ChainTransforms, + InferTransformInput, + InferTransformOutput, + PipelineTransformDefinition, + TransformContext, +} from "./transform"; + +export { + applyTransforms, + definePipelineTransform, +} from "./transform"; + +export type { + DefaultRange, + FileContext, + FilterContext, + ParseContext, + ParsedRow, + ParserFn, + PipelineFilter, + PropertyJson, + ResolveContext, + ResolvedEntry, + ResolverFn, + RouteOutput, + RowContext, +} from "./types"; diff --git a/packages/pipelines/pipeline-core/src/pipeline.ts b/packages/pipelines/pipeline-core/src/pipeline.ts new file mode 100644 index 000000000..503b031e6 --- /dev/null +++ b/packages/pipelines/pipeline-core/src/pipeline.ts @@ -0,0 +1,270 @@ +import type { DAG } from "./dag"; +import type { PipelineEvent } from "./events"; +import type { InferRoutesOutput, PipelineRouteDefinition } from "./route"; +import type { InferSourceIds, PipelineSourceDefinition } from "./source"; +import type { ParseContext, ParsedRow, PipelineFilter, ResolveContext } from "./types"; +import { buildDAG } from "./dag"; + +export interface FallbackRouteDefinition< + TArtifacts extends Record = Record, + TOutput = unknown, +> { + /** + * Optional filter to restrict which unmatched files the fallback handles. + */ + filter?: PipelineFilter; + + /** + * Parser function that yields parsed rows from file content. + */ + parser: (ctx: ParseContext) => AsyncIterable; + + /** + * Resolver function that transforms parsed rows into output. + */ + resolver: (ctx: ResolveContext, rows: AsyncIterable) => Promise; +} + +export interface PipelineDefinitionOptions< + TSources extends readonly PipelineSourceDefinition[] = readonly PipelineSourceDefinition[], + TRoutes extends readonly PipelineRouteDefinition[] = readonly PipelineRouteDefinition[], + TFallback extends FallbackRouteDefinition | undefined = undefined, +> { + /** + * Unique identifier for the pipeline. + */ + id: string; + + /** + * Human-readable name for the pipeline. + */ + name: string; + + /** + * Description of what this pipeline does. + */ + description?: string; + + /** + * Tags associated with this pipeline. + */ + tags?: string[]; + + /** + * Unicode versions this pipeline processes. + */ + versions: string[]; + + /** + * Input sources that provide files to the pipeline. + */ + inputs: TSources; + + /** + * Routes that process matched files. + */ + routes: TRoutes; + + /** + * Global filter to include/exclude files before routing. + */ + include?: PipelineFilter; + + /** + * If true, throw error for files with no matching route (and no fallback). + * @default false + */ + strict?: boolean; + + /** + * Maximum concurrent route executions. + * @default 4 + */ + concurrency?: number; + + /** + * Fallback handler for files that don't match any route. + */ + fallback?: TFallback; + + /** + * Event handler for pipeline events. + * Note: This is stored but not invoked by the definition itself. + * The executor is responsible for calling this. + */ + onEvent?: (event: PipelineEvent) => void | Promise; +} + +export interface PipelineDefinition< + TId extends string = string, + TSources extends readonly PipelineSourceDefinition[] = readonly PipelineSourceDefinition[], + TRoutes extends readonly PipelineRouteDefinition[] = readonly PipelineRouteDefinition[], + TFallback extends FallbackRouteDefinition | undefined = undefined, +> { + /** + * Marker to identify this as a pipeline definition. + */ + readonly _type: "pipeline-definition"; + + /** + * Unique identifier for the pipeline. + */ + readonly id: TId; + + /** + * Human-readable name for the pipeline. + */ + readonly name: string; + + /** + * Description of what this pipeline does. + */ + readonly description?: string; + + /** + * Unicode versions this pipeline processes. + */ + readonly versions: string[]; + + /** + * Input sources that provide files to the pipeline. + */ + readonly inputs: TSources; + + /** + * Routes that process matched files. + */ + readonly routes: TRoutes; + + /** + * Global filter to include/exclude files before routing. + */ + readonly include?: PipelineFilter; + + /** + * If true, throw error for files with no matching route (and no fallback). + */ + readonly strict: boolean; + + /** + * Maximum concurrent route executions. + */ + readonly concurrency: number; + + /** + * Fallback handler for files that don't match any route. + */ + readonly fallback?: TFallback; + + /** + * Event handler for pipeline events. + */ + readonly onEvent?: (event: PipelineEvent) => void | Promise; + + /** + * Precomputed DAG (Directed Acyclic Graph) for route execution order. + * Built at definition time from route dependencies. + */ + readonly dag: DAG; + + /** + * Tags associated with this pipeline. + */ + readonly tags: string[]; +} + +export type InferPipelineOutput< + TRoutes extends readonly PipelineRouteDefinition[], + TFallback extends FallbackRouteDefinition | undefined, +> = TFallback extends FallbackRouteDefinition + ? InferRoutesOutput | TFallbackOutput + : InferRoutesOutput; + +export type InferPipelineSourceIds = T extends PipelineDefinition + ? InferSourceIds + : never; + +export type InferPipelineRouteIds = T extends PipelineDefinition + ? TRoutes[number] extends PipelineRouteDefinition + ? TId + : never + : never; + +/** + * Define a pipeline configuration. + * + * This returns a pure data structure describing the pipeline. + * To execute the pipeline, pass it to a pipeline executor. + * + * @example + * ```ts + * const pipeline = definePipeline({ + * id: "my-pipeline", + * versions: ["16.0.0"], + * inputs: [mySource], + * routes: [myRoute], + * }); + * + * // Execute with an executor + * const executor = createPipelineExecutor({ pipelines: [pipeline] }); + * const result = await executor.run(); + * ``` + */ +export function definePipeline< + const TId extends string, + const TSources extends readonly PipelineSourceDefinition[], + const TRoutes extends readonly PipelineRouteDefinition[], + TFallback extends FallbackRouteDefinition | undefined = undefined, +>( + options: Omit, "inputs" | "routes"> + & { + id: TId; + inputs: readonly [...TSources]; + routes: readonly [...TRoutes]; + }, +): PipelineDefinition { + const dagResult = buildDAG(options.routes); + + if (!dagResult.valid) { + const errorMessages = dagResult.errors.map((e) => e.message).join("\n "); + throw new Error(`Pipeline "${options.id}" has invalid route dependencies:\n ${errorMessages}`); + } + + return { + _type: "pipeline-definition", + id: options.id, + name: options.name, + description: options.description, + versions: options.versions, + inputs: options.inputs as TSources, + routes: options.routes as TRoutes, + include: options.include, + strict: options.strict ?? false, + concurrency: options.concurrency ?? 4, + fallback: options.fallback, + onEvent: options.onEvent, + dag: dagResult.dag!, + tags: options.tags ?? [], + }; +} + +export function isPipelineDefinition(value: unknown): value is PipelineDefinition { + return ( + typeof value === "object" + && value !== null + && "_type" in value + && (value as { _type: unknown })._type === "pipeline-definition" + ); +} + +export function getPipelineRouteIds( + pipeline: T, +): string[] { + return pipeline.routes.map((route) => route.id); +} + +export function getPipelineSourceIds( + pipeline: T, +): string[] { + return pipeline.inputs.map((source) => source.id); +} diff --git a/packages/pipelines/pipeline-core/src/route.ts b/packages/pipelines/pipeline-core/src/route.ts new file mode 100644 index 000000000..59ba48e4b --- /dev/null +++ b/packages/pipelines/pipeline-core/src/route.ts @@ -0,0 +1,98 @@ +import type { z } from "zod"; +import type { ExtractArtifactKeys, PipelineDependency } from "./dependencies"; +import type { ChainTransforms, PipelineTransformDefinition } from "./transform"; +import type { + FileContext, + ParsedRow, + ParserFn, + PipelineFilter, + PropertyJson, + ResolvedEntry, + RouteOutput, +} from "./types"; + +export interface ArtifactDefinition { + _type: "artifact" | "global-artifact"; + schema: TSchema; + scope: "version" | "global"; +} + +export type InferArtifactType + = T extends ArtifactDefinition ? z.infer : never; + +export interface RouteResolveContext< + TArtifactKeys extends string = string, + TEmits extends Record = Record, +> { + version: string; + file: FileContext; + getArtifact: (key: K) => unknown; + emitArtifact: ( + key: K, + value: InferArtifactType, + ) => void; + normalizeEntries: (entries: ResolvedEntry[]) => ResolvedEntry[]; + now: () => string; +} + +export interface PipelineRouteDefinition< + TId extends string = string, + TDepends extends readonly PipelineDependency[] = readonly PipelineDependency[], + TEmits extends Record = Record, + TTransforms extends readonly PipelineTransformDefinition[] = readonly [], + TOutput = PropertyJson[], +> { + id: TId; + filter: PipelineFilter; + depends?: TDepends; + emits?: TEmits; + parser: ParserFn; + transforms?: TTransforms; + resolver: ( + ctx: RouteResolveContext, TEmits>, + rows: AsyncIterable>, + ) => Promise; + out?: RouteOutput; + cache?: boolean; +} + +export function definePipelineRoute< + const TId extends string, + const TDepends extends readonly PipelineDependency[] = readonly [], + const TEmits extends Record = Record, + const TTransforms extends readonly PipelineTransformDefinition[] = readonly [], + TOutput = PropertyJson[], +>( + definition: PipelineRouteDefinition, +): PipelineRouteDefinition { + return definition; +} + +export type InferRouteId = T extends PipelineRouteDefinition + ? TId + : never; + +export type InferRouteDepends = T extends PipelineRouteDefinition + ? TDepends + : never; + +export type InferRouteEmits = T extends PipelineRouteDefinition + ? TEmits + : never; + +export type InferRouteTransforms = T extends PipelineRouteDefinition + ? TTransforms + : never; + +export type InferRouteOutput = T extends PipelineRouteDefinition + ? TOutput + : never; + +export type InferRoutesOutput[]> + = T[number] extends PipelineRouteDefinition + ? TOutput extends unknown[] ? TOutput[number] : TOutput + : never; + +export type InferEmittedArtifactsFromRoute = T extends PipelineRouteDefinition + ? { [K in keyof TEmits]: TEmits[K] extends ArtifactDefinition ? z.infer : never } + : never; diff --git a/packages/pipelines/pipeline-core/src/source.ts b/packages/pipelines/pipeline-core/src/source.ts new file mode 100644 index 000000000..717a38c28 --- /dev/null +++ b/packages/pipelines/pipeline-core/src/source.ts @@ -0,0 +1,87 @@ +import type { FileContext, PipelineFilter } from "./types"; + +export interface StreamOptions { + chunkSize?: number; + start?: number; + end?: number; +} + +export interface FileMetadata { + size: number; + hash?: string; + lastModified?: string; +} + +export interface SourceBackend { + listFiles: (version: string) => Promise; + readFile: (file: FileContext) => Promise; + readFileStream?: (file: FileContext, options?: StreamOptions) => AsyncIterable; + getMetadata?: (file: FileContext) => Promise; +} + +export interface PipelineSourceDefinition { + id: TId; + backend: SourceBackend; + includes?: PipelineFilter; + excludes?: PipelineFilter; +} + +export interface SourceFileContext extends FileContext { + source: { + id: string; + }; +} + +export function definePipelineSource( + definition: PipelineSourceDefinition, +): PipelineSourceDefinition { + return definition; +} + +export async function resolveSourceFiles( + source: PipelineSourceDefinition, + version: string, +): Promise { + const allFiles = await source.backend.listFiles(version); + + const filteredFiles = allFiles.filter((file) => { + const ctx = { file }; + + if (source.includes && !source.includes(ctx)) { + return false; + } + + if (source.excludes && source.excludes(ctx)) { + return false; + } + + return true; + }); + + return filteredFiles.map((file) => ({ + ...file, + source: { id: source.id }, + })); +} + +export async function resolveMultipleSourceFiles( + sources: PipelineSourceDefinition[], + version: string, +): Promise { + const filesByPath = new Map(); + + for (const source of sources) { + const files = await resolveSourceFiles(source, version); + for (const file of files) { + filesByPath.set(file.path, file); + } + } + + return Array.from(filesByPath.values()); +} + +export type InferSourceId = T extends PipelineSourceDefinition ? TId : never; + +export type InferSourceIds = { + [K in keyof T]: InferSourceId; +}[number]; diff --git a/packages/pipelines/pipeline-core/src/transform.ts b/packages/pipelines/pipeline-core/src/transform.ts new file mode 100644 index 000000000..1f452dc7a --- /dev/null +++ b/packages/pipelines/pipeline-core/src/transform.ts @@ -0,0 +1,117 @@ +import type { FileContext } from "./types"; + +export interface TransformContext { + version: string; + file: FileContext; +} + +export interface PipelineTransformDefinition { + id: string; + fn: (ctx: TransformContext, rows: AsyncIterable) => AsyncIterable; +} + +/** + * Creates and returns a validated pipeline transform definition. + * + * This function is primarily a type-safe way to define transforms. It ensures that + * the transform adheres to the `PipelineTransformDefinition` interface and preserves + * type information for chaining. + * + * @typeParam TInput - The input type accepted by the transform. + * @typeParam TOutput - The output type produced by the transform. + * @param {PipelineTransformDefinition} definition - The transform definition. + * @returns {PipelineTransformDefinition} The same definition, typed correctly. + * + * @example + * ```ts + * const uppercase = definePipelineTransform({ + * id: 'uppercase', + * fn: async function* (_ctx, rows) { + * for await (const row of rows) { + * yield row.toUpperCase(); + * } + * }, + * }); + * ``` + */ +export function definePipelineTransform( + definition: PipelineTransformDefinition, +): PipelineTransformDefinition { + return definition; +} + +export type InferTransformInput = T extends PipelineTransformDefinition ? TInput : never; + +export type InferTransformOutput = T extends PipelineTransformDefinition ? TOutput : never; + +/** + * Recursively composes a sequence of transform definitions into a single output type. + * + * Given an input type and a sequence of transforms, this type infers the final output + * type by threading the output of each transform into the input of the next. + * + * @typeParam TInput - The initial input type. + * @typeParam TTransforms - A readonly tuple of transform definitions, starting with one + * that accepts `TInput`. + * + * @example + * ```ts + * type T1 = PipelineTransformDefinition; + * type T2 = PipelineTransformDefinition; + * type Result = ChainTransforms; // boolean + * ``` + */ +type ChainTransformsHelper< + TInput, + TTransforms extends readonly PipelineTransformDefinition[], +> = TTransforms extends readonly [ + infer _First extends PipelineTransformDefinition, + ...infer Rest extends PipelineTransformDefinition[], +] + ? ChainTransformsHelper + : TInput; + +export type ChainTransforms< + TInput, + TTransforms extends readonly PipelineTransformDefinition[], +> = ChainTransformsHelper; + +/** + * Applies a sequence of transforms to an async iterable, composing them together. + * + * This function threads the output of one transform into the input of the next, + * creating a pipeline. All iteration is lazy—values are pulled through the pipeline + * only as they are consumed. + * + * @typeParam TInput - The input type of the first transform. + * @param {TransformContext} ctx - The context to pass to each transform. + * @param {AsyncIterable} rows - The initial data source. + * @param {readonly PipelineTransformDefinition[]} transforms - The transforms to apply in order. + * @returns {AsyncIterable} An async iterable of the final output (typed as `unknown` since + * the result type depends on the transform sequence). + * + * @remarks + * The output type can be narrowed using the `ChainTransforms` utility type if the + * exact sequence of transforms is known at compile time. + * + * @example + * ```ts + * const output = applyTransforms(ctx, sourceRows, [ + * defineTransform({ id: 'filter', fn: filterFn }), + * defineTransform({ id: 'map', fn: mapFn }), + * ]); + * ``` + */ +export async function* applyTransforms( + ctx: TransformContext, + rows: AsyncIterable, + transforms: readonly PipelineTransformDefinition[], +): AsyncIterable { + let current: AsyncIterable = rows; + + for (const transform of transforms) { + current = transform.fn(ctx, current); + } + + yield* current; +} diff --git a/packages/pipelines/pipeline-core/src/types.ts b/packages/pipelines/pipeline-core/src/types.ts new file mode 100644 index 000000000..5f02ebf0e --- /dev/null +++ b/packages/pipelines/pipeline-core/src/types.ts @@ -0,0 +1,285 @@ +export interface FileContext { + /** + * The Unicode version being processed (e.g., "16.0.0"). + */ + version: string; + + /** + * The directory category of the file. + */ + dir: "ucd" | "extracted" | "auxiliary" | "emoji" | "unihan" | string; + + /** + * The relative path from the version root (e.g., "ucd/LineBreak.txt"). + */ + path: string; + + /** + * The file name (e.g., "LineBreak.txt"). + */ + name: string; + + /** + * The file extension (e.g., ".txt"). + */ + ext: string; +} + +/** + * Context for a specific row/line within a file. + * Used during row-level filtering in multi-property files. + */ +export interface RowContext { + /** + * The property name for multi-property files (e.g., "NFKC_Casefold"). + */ + property?: string; +} + +/** + * Combined context passed to filter predicates. + * During file routing, only `file` is defined. + * During row filtering, both `file` and `row` are defined. + */ +export interface FilterContext { + /** + * The file context. + */ + file: FileContext; + + /** + * The row context (only defined during row-level filtering). + */ + row?: RowContext; + + /** + * The source context (only defined when using multiple sources). + */ + source?: { + /** + * The source ID. + */ + id: string; + }; +} + +/** + * A predicate function that determines if a file or row should be processed. + */ +export type PipelineFilter = (ctx: FilterContext) => boolean; + +/** + * A parsed row from a UCD file. + */ +export interface ParsedRow { + /** + * The source file path relative to the version root. + */ + sourceFile: string; + + /** + * The kind of entry. + */ + kind: "range" | "point" | "sequence" | "alias"; + + /** + * Start of range (hex string, e.g., "0041"). + */ + start?: string; + + /** + * End of range (hex string, e.g., "005A"). + */ + end?: string; + + /** + * Single code point (hex string). + */ + codePoint?: string; + + /** + * Sequence of code points (hex strings). + */ + sequence?: string[]; + + /** + * Property name for multi-property files. + */ + property?: string; + + /** + * The value(s) associated with this entry. + */ + value?: string | string[]; + + /** + * Additional metadata (comments, line numbers, etc.). + */ + meta?: Record; +} + +/** + * Context passed to parser functions. + */ +export interface ParseContext { + /** + * The file being parsed. + */ + file: FileContext; + + /** + * Read the raw content of the file. + */ + readContent: () => Promise; + + /** + * Read the file line by line. + */ + readLines: () => AsyncIterable; + + /** + * Check if a line is a comment. + */ + isComment: (line: string) => boolean; +} + +/** + * A parser function that converts file content to parsed rows. + */ +export type ParserFn = (ctx: ParseContext) => AsyncIterable; + +/** + * A resolved entry in the output JSON. + */ +export interface ResolvedEntry { + /** + * Range in "XXXX..YYYY" format (hex, inclusive). + */ + range?: `${string}..${string}`; + + /** + * Single code point in hex. + */ + codePoint?: string; + + /** + * Sequence of code points. + */ + sequence?: string[]; + + /** + * The value(s) for this entry. + */ + value: string | string[]; +} + +/** + * A default range from @missing declarations. + */ +export interface DefaultRange { + /** + * The range this default applies to. + */ + range: `${string}..${string}`; + + /** + * The default value. + */ + value: string | string[]; +} + +/** + * The standardized JSON output for a property. + */ +export interface PropertyJson { + /** + * The Unicode version (e.g., "16.0.0"). + */ + version: string; + + /** + * The property name (e.g., "Line_Break"). + */ + property: string; + + /** + * The source file name (e.g., "LineBreak.txt"). + */ + file: string; + + /** + * The resolved entries. + */ + entries: ResolvedEntry[]; + + /** + * Default ranges from @missing (in encounter order). + */ + defaults?: DefaultRange[]; + + /** + * Additional metadata. + */ + meta?: Record; +} + +/** + * Context passed to resolver functions. + */ +export interface ResolveContext = Record> { + /** + * The Unicode version being processed. + */ + version: string; + + /** + * The file being resolved. + */ + file: FileContext; + + /** + * Get an artifact by ID. + */ + getArtifact: (id: K) => TArtifacts[K]; + + /** + * Emit an artifact for subsequent routes. + */ + emitArtifact: (id: K, value: V) => void; + + /** + * Normalize and sort entries by code point range. + */ + normalizeEntries: (entries: ResolvedEntry[]) => ResolvedEntry[]; + + /** + * Get current timestamp in ISO 8601 format. + */ + now: () => string; +} + +/** + * A resolver function that converts parsed rows to property JSON. + */ +export type ResolverFn< + TArtifacts extends Record = Record, + TOutput = PropertyJson[], +> = ( + ctx: ResolveContext, + rows: AsyncIterable, +) => Promise; + +/** + * Output configuration for a route. + */ +export interface RouteOutput { + /** + * Custom output directory. + */ + dir?: string; + + /** + * Custom file name generator. + */ + fileName?: (pj: PropertyJson) => string; +} diff --git a/packages/pipelines/pipeline-core/test/dag.test.ts b/packages/pipelines/pipeline-core/test/dag.test.ts new file mode 100644 index 000000000..95439106c --- /dev/null +++ b/packages/pipelines/pipeline-core/test/dag.test.ts @@ -0,0 +1,252 @@ +import type { ParsedRow, PipelineRouteDefinition } from "../src"; +import { describe, expect, it } from "vitest"; +import { buildDAG, definePipelineRoute, getExecutionLayers } from "../src"; + +function createMockParser() { + return async function* (): AsyncIterable { + yield { + sourceFile: "test.txt", + kind: "point" as const, + codePoint: "0041", + value: "test", + }; + }; +} + +function createRoute( + id: string, + depends?: (`route:${string}` | `artifact:${string}:${string}`)[], +): PipelineRouteDefinition { + return definePipelineRoute({ + id, + filter: () => true, + parser: createMockParser(), + resolver: async () => [], + depends, + }); +} + +describe("buildDAG", () => { + it("should build DAG from independent routes", () => { + const routes = [ + createRoute("route-a"), + createRoute("route-b"), + createRoute("route-c"), + ]; + + const result = buildDAG(routes); + + expect(result.valid).toBe(true); + expect(result.errors).toEqual([]); + expect(result.dag).toBeDefined(); + expect(result.dag!.nodes.size).toBe(3); + }); + + it("should build DAG with route dependencies", () => { + const routes = [ + createRoute("route-a"), + createRoute("route-b", ["route:route-a"]), + createRoute("route-c", ["route:route-b"]), + ]; + + const result = buildDAG(routes); + + expect(result.valid).toBe(true); + expect(result.dag!.nodes.get("route-b")!.dependencies.has("route-a")).toBe(true); + expect(result.dag!.nodes.get("route-c")!.dependencies.has("route-b")).toBe(true); + }); + + it("should detect circular dependencies", () => { + const routes = [ + createRoute("route-a", ["route:route-c"]), + createRoute("route-b", ["route:route-a"]), + createRoute("route-c", ["route:route-b"]), + ]; + + const result = buildDAG(routes); + + expect(result.valid).toBe(false); + expect(result.errors.length).toBeGreaterThan(0); + expect(result.errors[0].type).toBe("cycle"); + expect(result.errors[0].details.cycle).toBeDefined(); + }); + + it("should detect missing route dependencies", () => { + const routes = [ + createRoute("route-a", ["route:missing"]), + ]; + + const result = buildDAG(routes); + + expect(result.valid).toBe(false); + expect(result.errors[0].type).toBe("missing-route"); + expect(result.errors[0].details.dependencyId).toBe("missing"); + }); + + it("should detect duplicate route IDs", () => { + const routes = [ + createRoute("route-a"), + createRoute("route-b"), + createRoute("route-a"), + ]; + + const result = buildDAG(routes); + + expect(result.valid).toBe(false); + expect(result.errors.length).toBe(1); + expect(result.errors[0].type).toBe("duplicate-route"); + expect(result.errors[0].details.routeId).toBe("route-a"); + expect(result.errors[0].message).toContain("index 0 and 2"); + }); + + it("should track dependents", () => { + const routes = [ + createRoute("route-a"), + createRoute("route-b", ["route:route-a"]), + createRoute("route-c", ["route:route-a"]), + ]; + + const result = buildDAG(routes); + + const nodeA = result.dag!.nodes.get("route-a"); + expect(nodeA!.dependents.has("route-b")).toBe(true); + expect(nodeA!.dependents.has("route-c")).toBe(true); + }); + + it("should generate correct execution order", () => { + const routes = [ + createRoute("route-a"), + createRoute("route-b", ["route:route-a"]), + createRoute("route-c", ["route:route-b"]), + ]; + + const result = buildDAG(routes); + + expect(result.valid).toBe(true); + const order = result.dag!.executionOrder; + const indexA = order.indexOf("route-a"); + const indexB = order.indexOf("route-b"); + const indexC = order.indexOf("route-c"); + + expect(indexA).toBeLessThan(indexB); + expect(indexB).toBeLessThan(indexC); + }); +}); + +describe("getExecutionLayers", () => { + it("should put independent routes in same layer", () => { + const routes = [ + createRoute("route-a"), + createRoute("route-b"), + createRoute("route-c"), + ]; + + const result = buildDAG(routes); + const layers = getExecutionLayers(result.dag!); + + expect(layers.length).toBe(1); + expect(layers[0]).toHaveLength(3); + expect(new Set(layers[0])).toEqual(new Set(["route-a", "route-b", "route-c"])); + }); + + it("should create layers based on dependencies", () => { + const routes = [ + createRoute("route-a"), + createRoute("route-b", ["route:route-a"]), + createRoute("route-c", ["route:route-b"]), + ]; + + const result = buildDAG(routes); + const layers = getExecutionLayers(result.dag!); + + expect(layers.length).toBe(3); + expect(layers[0]).toEqual(["route-a"]); + expect(layers[1]).toEqual(["route-b"]); + expect(layers[2]).toEqual(["route-c"]); + }); + + it("should handle fan-in dependencies", () => { + const routes = [ + createRoute("route-a"), + createRoute("route-b"), + createRoute("route-c", ["route:route-a", "route:route-b"]), + ]; + + const result = buildDAG(routes); + const layers = getExecutionLayers(result.dag!); + + expect(layers.length).toBe(2); + expect(layers[0].sort()).toEqual(["route-a", "route-b"]); + expect(layers[1]).toEqual(["route-c"]); + }); + + it("should handle fan-out dependencies", () => { + const routes = [ + createRoute("route-a"), + createRoute("route-b", ["route:route-a"]), + createRoute("route-c", ["route:route-a"]), + ]; + + const result = buildDAG(routes); + const layers = getExecutionLayers(result.dag!); + + expect(layers.length).toBe(2); + expect(layers[0]).toEqual(["route-a"]); + expect(layers[1].sort()).toEqual(["route-b", "route-c"]); + }); + + it("should handle complex DAG", () => { + const routes = [ + createRoute("route-a"), + createRoute("route-b"), + createRoute("route-c", ["route:route-a"]), + createRoute("route-d", ["route:route-b"]), + createRoute("route-e", ["route:route-c", "route:route-d"]), + ]; + + const result = buildDAG(routes); + const layers = getExecutionLayers(result.dag!); + + expect(layers.length).toBe(3); + expect(new Set(layers[0])).toEqual(new Set(["route-a", "route-b"])); + expect(new Set(layers[1])).toEqual(new Set(["route-c", "route-d"])); + expect(layers[2]).toEqual(["route-e"]); + }); +}); + +describe("DAGNode structure", () => { + it("should have correct node structure", () => { + const routes = [createRoute("test-route")]; + + const result = buildDAG(routes); + const node = result.dag!.nodes.get("test-route"); + + expect(node).toEqual({ + id: "test-route", + dependencies: expect.any(Set), + dependents: expect.any(Set), + emittedArtifacts: expect.any(Set), + }); + }); + + it("should have empty dependencies for independent routes", () => { + const routes = [createRoute("independent")]; + + const result = buildDAG(routes); + const node = result.dag!.nodes.get("independent"); + + expect(node!.dependencies.size).toBe(0); + }); + + it("should have empty dependents for leaf nodes", () => { + const routes = [ + createRoute("root"), + createRoute("leaf", ["route:root"]), + ]; + + const result = buildDAG(routes); + const node = result.dag!.nodes.get("leaf"); + + expect(node!.dependents.size).toBe(0); + }); +}); diff --git a/packages/pipelines/pipeline-core/test/dependencies.test.ts b/packages/pipelines/pipeline-core/test/dependencies.test.ts new file mode 100644 index 000000000..0f8cab000 --- /dev/null +++ b/packages/pipelines/pipeline-core/test/dependencies.test.ts @@ -0,0 +1,307 @@ +import type { + ExtractArtifactDependencies, + ExtractArtifactKeys, + ExtractRouteDependencies, + ParsedArtifactDependency, + ParsedDependency, + ParsedRouteDependency, + PipelineDependency, +} from "../src/dependencies"; +import { describe, expect, it } from "vitest"; +import { + createArtifactDependency, + createRouteDependency, + isArtifactDependency, + isRouteDependency, + parseDependency, +} from "../src/dependencies"; + +describe("parseDependency", () => { + it("should parse route dependency", () => { + const result = parseDependency("route:my-route"); + + expect(result).toEqual({ + type: "route", + routeId: "my-route", + }); + }); + + it("should parse artifact dependency", () => { + const result = parseDependency("artifact:my-route:my-artifact"); + + expect(result).toEqual({ + type: "artifact", + routeId: "my-route", + artifactName: "my-artifact", + }); + }); + + it("should parse route with hyphens and underscores", () => { + const result = parseDependency("route:unicode-data_processor"); + + expect(result).toEqual({ + type: "route", + routeId: "unicode-data_processor", + }); + }); + + it("should parse artifact with complex names", () => { + const result = parseDependency("artifact:data-processor:normalized_output"); + + expect(result).toEqual({ + type: "artifact", + routeId: "data-processor", + artifactName: "normalized_output", + }); + }); + + it("should throw error for invalid format", () => { + expect(() => parseDependency("invalid" as PipelineDependency)).toThrow( + "Invalid dependency format: invalid. Expected \"route:\" or \"artifact::\"", + ); + }); + + it("should throw error for route without id", () => { + expect(() => parseDependency("route:" as PipelineDependency)).toThrow( + "Invalid dependency format: route:", + ); + }); + + it("should throw error for artifact without name", () => { + expect(() => parseDependency("artifact:my-route:" as PipelineDependency)).toThrow( + "Invalid dependency format: artifact:my-route:", + ); + }); + + it("should throw error for artifact without route id", () => { + expect(() => parseDependency("artifact::my-artifact" as PipelineDependency)).toThrow( + "Invalid dependency format: artifact::my-artifact", + ); + }); + + it("should throw error for unknown dependency type", () => { + expect(() => parseDependency("unknown:value" as PipelineDependency)).toThrow( + "Invalid dependency format: unknown:value", + ); + }); +}); + +describe("isRouteDependency", () => { + it("should return true for route dependency", () => { + expect(isRouteDependency("route:my-route")).toBe(true); + }); + + it("should return false for artifact dependency", () => { + expect(isRouteDependency("artifact:route:artifact")).toBe(false); + }); + + it("should work as type guard", () => { + const dep: PipelineDependency = "route:test" as const; + + if (isRouteDependency(dep)) { + expect(dep).toBe("route:test"); + } else { + throw new Error("Expected route dependency"); + } + }); +}); + +describe("isArtifactDependency", () => { + it("should return true for artifact dependency", () => { + expect(isArtifactDependency("artifact:route:artifact")).toBe(true); + }); + + it("should return false for route dependency", () => { + expect(isArtifactDependency("route:my-route")).toBe(false); + }); + + it("should work as type guard", () => { + const dep: PipelineDependency = "artifact:route:artifact" as const; + + if (isArtifactDependency(dep)) { + expect(dep).toBe("artifact:route:artifact"); + } else { + throw new Error("Expected artifact dependency"); + } + }); +}); + +describe("createRouteDependency", () => { + it("should create route dependency", () => { + const dep = createRouteDependency("my-route"); + + expect(dep).toBe("route:my-route"); + }); + + it("should create route dependency with complex id", () => { + const dep = createRouteDependency("unicode-data_processor"); + + expect(dep).toBe("route:unicode-data_processor"); + }); + + it("should be parseable", () => { + const dep = createRouteDependency("test-route"); + const parsed = parseDependency(dep); + + expect(parsed).toEqual({ + type: "route", + routeId: "test-route", + }); + }); +}); + +describe("createArtifactDependency", () => { + it("should create artifact dependency", () => { + const dep = createArtifactDependency("my-route", "my-artifact"); + + expect(dep).toBe("artifact:my-route:my-artifact"); + }); + + it("should create artifact dependency with complex names", () => { + const dep = createArtifactDependency("data-processor", "normalized_output"); + + expect(dep).toBe("artifact:data-processor:normalized_output"); + }); + + it("should be parseable", () => { + const dep = createArtifactDependency("test-route", "test-artifact"); + const parsed = parseDependency(dep); + + expect(parsed).toEqual({ + type: "artifact", + routeId: "test-route", + artifactName: "test-artifact", + }); + }); +}); + +describe("parsedDependency types", () => { + it("should handle route dependency types", () => { + const parsed: ParsedRouteDependency = { + type: "route", + routeId: "my-route", + }; + + expect(parsed.type).toBe("route"); + expect(parsed.routeId).toBe("my-route"); + }); + + it("should handle artifact dependency types", () => { + const parsed: ParsedArtifactDependency = { + type: "artifact", + routeId: "my-route", + artifactName: "my-artifact", + }; + + expect(parsed.type).toBe("artifact"); + expect(parsed.routeId).toBe("my-route"); + expect(parsed.artifactName).toBe("my-artifact"); + }); + + it("should handle union type correctly", () => { + const routeDep: ParsedDependency = { + type: "route", + routeId: "test", + }; + + const artifactDep: ParsedDependency = { + type: "artifact", + routeId: "test", + artifactName: "artifact", + }; + + expect(routeDep.type).toBe("route"); + expect(artifactDep.type).toBe("artifact"); + }); +}); + +describe("type inference", () => { + describe("extractRouteDependencies", () => { + it("should extract route ids from dependency array", () => { + type RouteIds = ExtractRouteDependencies<[ + "route:parser", + "route:normalizer", + "artifact:other:data", + ]>; + + const id1: RouteIds = "parser"; + const id2: RouteIds = "normalizer"; + + expect(id1).toBe("parser"); + expect(id2).toBe("normalizer"); + }); + + it("should extract never type for empty array", () => { + // eslint-disable-next-line unused-imports/no-unused-vars + const deps = [] as const; + + type RouteIds = ExtractRouteDependencies; + + const neverValue: RouteIds = undefined as never; + expect(neverValue).toBeUndefined(); + }); + }); + + describe("extractArtifactDependencies", () => { + it("should extract artifact info from dependency array", () => { + // eslint-disable-next-line unused-imports/no-unused-vars + const deps = [ + "artifact:parser:result", + "artifact:normalizer:data", + "route:other", + ] as const; + + type ArtifactDeps = ExtractArtifactDependencies; + + const dep1: ArtifactDeps = { routeId: "parser", artifactName: "result" }; + const dep2: ArtifactDeps = { routeId: "normalizer", artifactName: "data" }; + + expect(dep1).toEqual({ routeId: "parser", artifactName: "result" }); + expect(dep2).toEqual({ routeId: "normalizer", artifactName: "data" }); + }); + }); + + describe("extractArtifactKeys", () => { + it("should extract artifact keys from dependency array", () => { + // eslint-disable-next-line unused-imports/no-unused-vars + const deps = [ + "artifact:parser:result", + "artifact:normalizer:data", + "route:other", + ] as const; + + type ArtifactKeys = ExtractArtifactKeys; + + const key1: ArtifactKeys = "parser:result"; + const key2: ArtifactKeys = "normalizer:data"; + + expect(key1).toBe("parser:result"); + expect(key2).toBe("normalizer:data"); + }); + }); +}); + +describe("roundtrip parsing", () => { + it("should roundtrip route dependency", () => { + const original = createRouteDependency("test-route"); + const parsed = parseDependency(original); + const reconstructed = createRouteDependency(parsed.routeId); + + expect(reconstructed).toBe(original); + }); + + it("should roundtrip artifact dependency", () => { + const original = createArtifactDependency("test-route", "test-artifact"); + const parsed = parseDependency(original); + + if (parsed.type === "artifact") { + const reconstructed = createArtifactDependency( + parsed.routeId, + parsed.artifactName, + ); + expect(reconstructed).toBe(original); + } else { + throw new Error("Expected artifact dependency"); + } + }); +}); diff --git a/packages/pipelines/pipeline-core/test/filters.test.ts b/packages/pipelines/pipeline-core/test/filters.test.ts new file mode 100644 index 000000000..2f95b69f8 --- /dev/null +++ b/packages/pipelines/pipeline-core/test/filters.test.ts @@ -0,0 +1,445 @@ +import type { FileContext, FilterContext } from "../src/types"; +import { describe, expect, it } from "vitest"; +import { + always, + and, + byDir, + byExt, + byGlob, + byName, + byPath, + byProp, + bySource, + never, + not, + or, +} from "../src/filters"; + +function createFileContext(overrides: Partial = {}): FileContext { + return { + version: "16.0.0", + dir: "ucd", + path: "ucd/LineBreak.txt", + name: "LineBreak.txt", + ext: ".txt", + ...overrides, + }; +} + +function createFilterContext( + fileOverrides: Partial = {}, + rowProperty?: string, + sourceId?: string, +): FilterContext { + return { + file: createFileContext(fileOverrides), + row: rowProperty ? { property: rowProperty } : undefined, + source: sourceId ? { id: sourceId } : undefined, + }; +} + +describe("byName", () => { + it("should match exact file name", () => { + const filter = byName("LineBreak.txt"); + const ctx = createFilterContext({ name: "LineBreak.txt" }); + + expect(filter(ctx)).toBe(true); + }); + + it("should not match different file name", () => { + const filter = byName("LineBreak.txt"); + const ctx = createFilterContext({ name: "PropList.txt" }); + + expect(filter(ctx)).toBe(false); + }); + + it("should be case-sensitive", () => { + const filter = byName("LineBreak.txt"); + const ctx = createFilterContext({ name: "linebreak.txt" }); + + expect(filter(ctx)).toBe(false); + }); +}); + +describe("byDir", () => { + it("should match ucd directory", () => { + const filter = byDir("ucd"); + const ctx = createFilterContext({ dir: "ucd" }); + + expect(filter(ctx)).toBe(true); + }); + + it("should match extracted directory", () => { + const filter = byDir("extracted"); + const ctx = createFilterContext({ dir: "extracted" }); + + expect(filter(ctx)).toBe(true); + }); + + it("should match auxiliary directory", () => { + const filter = byDir("auxiliary"); + const ctx = createFilterContext({ dir: "auxiliary" }); + + expect(filter(ctx)).toBe(true); + }); + + it("should match emoji directory", () => { + const filter = byDir("emoji"); + const ctx = createFilterContext({ dir: "emoji" }); + + expect(filter(ctx)).toBe(true); + }); + + it("should match unihan directory", () => { + const filter = byDir("unihan"); + const ctx = createFilterContext({ dir: "unihan" }); + + expect(filter(ctx)).toBe(true); + }); + + it("should not match different directory", () => { + const filter = byDir("ucd"); + const ctx = createFilterContext({ dir: "emoji" }); + + expect(filter(ctx)).toBe(false); + }); + + it("should match custom directory", () => { + const filter = byDir("custom-dir"); + const ctx = createFilterContext({ dir: "custom-dir" }); + + expect(filter(ctx)).toBe(true); + }); +}); + +describe("byExt", () => { + it("should match extension with dot", () => { + const filter = byExt(".txt"); + const ctx = createFilterContext({ ext: ".txt" }); + + expect(filter(ctx)).toBe(true); + }); + + it("should match extension without dot", () => { + const filter = byExt("txt"); + const ctx = createFilterContext({ ext: ".txt" }); + + expect(filter(ctx)).toBe(true); + }); + + it("should not match different extension", () => { + const filter = byExt(".txt"); + const ctx = createFilterContext({ ext: ".xml" }); + + expect(filter(ctx)).toBe(false); + }); + + it("should match empty extension", () => { + const filter = byExt(""); + const ctx = createFilterContext({ ext: "" }); + + expect(filter(ctx)).toBe(true); + }); + + it("should not match non-empty extension when filter is empty", () => { + const filter = byExt(""); + const ctx = createFilterContext({ ext: ".txt" }); + + expect(filter(ctx)).toBe(false); + }); +}); + +describe("byGlob", () => { + it("should match wildcard pattern", () => { + const filter = byGlob("ucd/*.txt"); + const ctx = createFilterContext({ path: "ucd/LineBreak.txt" }); + + expect(filter(ctx)).toBe(true); + }); + + it("should match double-star pattern", () => { + const filter = byGlob("**/*.txt"); + const ctx = createFilterContext({ path: "ucd/extracted/LineBreak.txt" }); + + expect(filter(ctx)).toBe(true); + }); + + it("should match specific file pattern", () => { + const filter = byGlob("ucd/LineBreak.txt"); + const ctx = createFilterContext({ path: "ucd/LineBreak.txt" }); + + expect(filter(ctx)).toBe(true); + }); + + it("should not match excluded pattern", () => { + const filter = byGlob("ucd/*.txt"); + const ctx = createFilterContext({ path: "emoji/data.txt" }); + + expect(filter(ctx)).toBe(false); + }); + + it("should match multiple extensions with brace expansion", () => { + const filter = byGlob("ucd/*.{txt,xml}"); + + expect(filter(createFilterContext({ path: "ucd/file.txt" }))).toBe(true); + expect(filter(createFilterContext({ path: "ucd/file.xml" }))).toBe(true); + expect(filter(createFilterContext({ path: "ucd/file.json" }))).toBe(false); + }); +}); + +describe("byPath", () => { + it("should match exact path string", () => { + const filter = byPath("ucd/LineBreak.txt"); + const ctx = createFilterContext({ path: "ucd/LineBreak.txt" }); + + expect(filter(ctx)).toBe(true); + }); + + it("should not match different path", () => { + const filter = byPath("ucd/LineBreak.txt"); + const ctx = createFilterContext({ path: "ucd/PropList.txt" }); + + expect(filter(ctx)).toBe(false); + }); + + it("should match regex pattern", () => { + const filter = byPath(/LineBreak/); + const ctx = createFilterContext({ path: "ucd/LineBreak.txt" }); + + expect(filter(ctx)).toBe(true); + }); + + it("should match complex regex", () => { + const filter = byPath(/^ucd\/.*\.txt$/); + + expect(filter(createFilterContext({ path: "ucd/LineBreak.txt" }))).toBe(true); + expect(filter(createFilterContext({ path: "ucd/file.txt" }))).toBe(true); + expect(filter(createFilterContext({ path: "emoji/file.txt" }))).toBe(false); + expect(filter(createFilterContext({ path: "ucd/file.xml" }))).toBe(false); + }); +}); + +describe("byProp", () => { + it("should match exact property name", () => { + const filter = byProp("NFKC_Casefold"); + const ctx = createFilterContext({}, "NFKC_Casefold"); + + expect(filter(ctx)).toBe(true); + }); + + it("should not match different property", () => { + const filter = byProp("NFKC_Casefold"); + const ctx = createFilterContext({}, "Line_Break"); + + expect(filter(ctx)).toBe(false); + }); + + it("should return false when no row context", () => { + const filter = byProp("NFKC_Casefold"); + const ctx = createFilterContext(); + + expect(filter(ctx)).toBe(false); + }); + + it("should match regex pattern", () => { + const filter = byProp(/^NFKC_/); + const ctx = createFilterContext({}, "NFKC_Casefold"); + + expect(filter(ctx)).toBe(true); + }); + + it("should not match when regex does not match", () => { + const filter = byProp(/^NFKC_/); + const ctx = createFilterContext({}, "Line_Break"); + + expect(filter(ctx)).toBe(false); + }); + + it("should return false for regex when no property", () => { + const filter = byProp(/^NFKC_/); + const ctx = createFilterContext(); + + expect(filter(ctx)).toBe(false); + }); +}); + +describe("bySource", () => { + it("should match single source id", () => { + const filter = bySource("unicode"); + const ctx = createFilterContext({}, undefined, "unicode"); + + expect(filter(ctx)).toBe(true); + }); + + it("should match array of source ids", () => { + const filter = bySource(["unicode", "cldr"]); + + expect(filter(createFilterContext({}, undefined, "unicode"))).toBe(true); + expect(filter(createFilterContext({}, undefined, "cldr"))).toBe(true); + expect(filter(createFilterContext({}, undefined, "other"))).toBe(false); + }); + + it("should not match when source is undefined", () => { + const filter = bySource("unicode"); + const ctx = createFilterContext(); + + expect(filter(ctx)).toBe(false); + }); + + it("should not match different source", () => { + const filter = bySource("unicode"); + const ctx = createFilterContext({}, undefined, "cldr"); + + expect(filter(ctx)).toBe(false); + }); +}); + +describe("and", () => { + it("should return true when all filters pass", () => { + const filter = and( + byDir("ucd"), + byExt(".txt"), + byName("LineBreak.txt"), + ); + const ctx = createFilterContext({ + dir: "ucd", + ext: ".txt", + name: "LineBreak.txt", + }); + + expect(filter(ctx)).toBe(true); + }); + + it("should return false when any filter fails", () => { + const filter = and( + byDir("ucd"), + byExt(".txt"), + byName("PropList.txt"), + ); + const ctx = createFilterContext({ + dir: "ucd", + ext: ".txt", + name: "LineBreak.txt", + }); + + expect(filter(ctx)).toBe(false); + }); + + it("should return true for empty filter array", () => { + const filter = and(); + const ctx = createFilterContext(); + + expect(filter(ctx)).toBe(true); + }); + + it("should work with single filter", () => { + const filter = and(byDir("ucd")); + const ctx = createFilterContext({ dir: "ucd" }); + + expect(filter(ctx)).toBe(true); + }); +}); + +describe("or", () => { + it("should return true when any filter passes", () => { + const filter = or( + byName("LineBreak.txt"), + byName("PropList.txt"), + ); + const ctx = createFilterContext({ name: "LineBreak.txt" }); + + expect(filter(ctx)).toBe(true); + }); + + it("should return false when all filters fail", () => { + const filter = or( + byName("LineBreak.txt"), + byName("PropList.txt"), + ); + const ctx = createFilterContext({ name: "UnicodeData.txt" }); + + expect(filter(ctx)).toBe(false); + }); + + it("should return false for empty filter array", () => { + const filter = or(); + const ctx = createFilterContext(); + + expect(filter(ctx)).toBe(false); + }); + + it("should work with single filter", () => { + const filter = or(byDir("ucd")); + + expect(filter(createFilterContext({ dir: "ucd" }))).toBe(true); + expect(filter(createFilterContext({ dir: "emoji" }))).toBe(false); + }); +}); + +describe("not", () => { + it("should invert filter result", () => { + const filter = not(byDir("ucd")); + + expect(filter(createFilterContext({ dir: "ucd" }))).toBe(false); + expect(filter(createFilterContext({ dir: "emoji" }))).toBe(true); + }); + + it("should work with complex filters", () => { + const filter = not(and(byDir("ucd"), byExt(".txt"))); + + expect(filter(createFilterContext({ dir: "ucd", ext: ".txt" }))).toBe(false); + expect(filter(createFilterContext({ dir: "ucd", ext: ".xml" }))).toBe(true); + expect(filter(createFilterContext({ dir: "emoji", ext: ".txt" }))).toBe(true); + }); +}); + +describe("always", () => { + it("should always return true", () => { + const filter = always(); + + expect(filter(createFilterContext())).toBe(true); + expect(filter(createFilterContext({ dir: "ucd" }))).toBe(true); + expect(filter(createFilterContext({ dir: "emoji" }))).toBe(true); + }); +}); + +describe("never", () => { + it("should always return false", () => { + const filter = never(); + + expect(filter(createFilterContext())).toBe(false); + expect(filter(createFilterContext({ dir: "ucd" }))).toBe(false); + expect(filter(createFilterContext({ dir: "emoji" }))).toBe(false); + }); +}); + +describe("complex filter combinations", () => { + it("should combine and/or/not filters", () => { + const filter = and( + byDir("ucd"), + or( + byExt(".txt"), + byExt(".xml"), + ), + not(byName("ReadMe.txt")), + ); + + expect(filter(createFilterContext({ dir: "ucd", ext: ".txt", name: "LineBreak.txt" }))).toBe(true); + expect(filter(createFilterContext({ dir: "ucd", ext: ".xml", name: "data.xml" }))).toBe(true); + expect(filter(createFilterContext({ dir: "ucd", ext: ".txt", name: "ReadMe.txt" }))).toBe(false); + expect(filter(createFilterContext({ dir: "emoji", ext: ".txt", name: "data.txt" }))).toBe(false); + expect(filter(createFilterContext({ dir: "ucd", ext: ".json", name: "data.json" }))).toBe(false); + }); + + it("should handle nested logic", () => { + const filter = or( + and(byDir("ucd"), byExt(".txt")), + and(byDir("emoji"), byExt(".txt")), + ); + + expect(filter(createFilterContext({ dir: "ucd", ext: ".txt" }))).toBe(true); + expect(filter(createFilterContext({ dir: "emoji", ext: ".txt" }))).toBe(true); + expect(filter(createFilterContext({ dir: "ucd", ext: ".xml" }))).toBe(false); + expect(filter(createFilterContext({ dir: "auxiliary", ext: ".txt" }))).toBe(false); + }); +}); diff --git a/packages/pipelines/pipeline-core/test/pipeline.test.ts b/packages/pipelines/pipeline-core/test/pipeline.test.ts new file mode 100644 index 000000000..42eab5075 --- /dev/null +++ b/packages/pipelines/pipeline-core/test/pipeline.test.ts @@ -0,0 +1,547 @@ +import type { FallbackRouteDefinition } from "../src/pipeline"; +import type { SourceBackend } from "../src/source"; +import type { ParsedRow } from "../src/types"; +import { describe, expect, it, vi } from "vitest"; +import { + definePipeline, + getPipelineRouteIds, + getPipelineSourceIds, + isPipelineDefinition, +} from "../src/pipeline"; +import { definePipelineRoute } from "../src/route"; +import { definePipelineSource } from "../src/source"; + +function createMockBackend(): SourceBackend { + return { + listFiles: vi.fn().mockResolvedValue([]), + readFile: vi.fn().mockResolvedValue(""), + }; +} + +function createMockSource(id: string) { + return definePipelineSource({ + id, + backend: createMockBackend(), + }); +} + +async function* mockParser(): AsyncIterable { + yield { sourceFile: "test.txt", kind: "point", codePoint: "0041", value: "A" }; +} + +function createMockRoute(id: string) { + return definePipelineRoute({ + id, + filter: () => true, + parser: mockParser, + resolver: async () => [], + }); +} + +describe("definePipeline", () => { + it("should define a minimal pipeline", () => { + const pipeline = definePipeline({ + id: "test-pipeline", + versions: ["16.0.0"], + inputs: [], + routes: [], + }); + + expect(pipeline._type).toBe("pipeline-definition"); + expect(pipeline.id).toBe("test-pipeline"); + expect(pipeline.versions).toEqual(["16.0.0"]); + expect(pipeline.inputs).toEqual([]); + expect(pipeline.routes).toEqual([]); + }); + + it("should define a pipeline with name and description", () => { + const pipeline = definePipeline({ + id: "named-pipeline", + name: "My Pipeline", + description: "A test pipeline", + versions: ["16.0.0"], + inputs: [], + routes: [], + }); + + expect(pipeline.name).toBe("My Pipeline"); + expect(pipeline.description).toBe("A test pipeline"); + }); + + it("should define a pipeline with multiple versions", () => { + const pipeline = definePipeline({ + id: "multi-version", + versions: ["16.0.0", "15.1.0", "15.0.0"], + inputs: [], + routes: [], + }); + + expect(pipeline.versions).toEqual(["16.0.0", "15.1.0", "15.0.0"]); + }); + + it("should define a pipeline with inputs", () => { + const source1 = createMockSource("source1"); + const source2 = createMockSource("source2"); + + const pipeline = definePipeline({ + id: "with-inputs", + versions: ["16.0.0"], + inputs: [source1, source2], + routes: [], + }); + + expect(pipeline.inputs).toHaveLength(2); + expect(pipeline.inputs[0].id).toBe("source1"); + expect(pipeline.inputs[1].id).toBe("source2"); + }); + + it("should define a pipeline with routes", () => { + const route1 = createMockRoute("route1"); + const route2 = createMockRoute("route2"); + + const pipeline = definePipeline({ + id: "with-routes", + versions: ["16.0.0"], + inputs: [], + routes: [route1, route2], + }); + + expect(pipeline.routes).toHaveLength(2); + expect(pipeline.routes[0].id).toBe("route1"); + expect(pipeline.routes[1].id).toBe("route2"); + }); + + it("should define a pipeline with include filter", () => { + const include = vi.fn().mockReturnValue(true); + + const pipeline = definePipeline({ + id: "filtered", + versions: ["16.0.0"], + inputs: [], + routes: [], + include, + }); + + expect(pipeline.include).toBe(include); + }); + + it("should default strict to false", () => { + const pipeline = definePipeline({ + id: "default-strict", + versions: ["16.0.0"], + inputs: [], + routes: [], + }); + + expect(pipeline.strict).toBe(false); + }); + + it("should allow setting strict to true", () => { + const pipeline = definePipeline({ + id: "strict-pipeline", + versions: ["16.0.0"], + inputs: [], + routes: [], + strict: true, + }); + + expect(pipeline.strict).toBe(true); + }); + + it("should default concurrency to 4", () => { + const pipeline = definePipeline({ + id: "default-concurrency", + versions: ["16.0.0"], + inputs: [], + routes: [], + }); + + expect(pipeline.concurrency).toBe(4); + }); + + it("should allow setting custom concurrency", () => { + const pipeline = definePipeline({ + id: "custom-concurrency", + versions: ["16.0.0"], + inputs: [], + routes: [], + concurrency: 8, + }); + + expect(pipeline.concurrency).toBe(8); + }); + + it("should define a pipeline with fallback", () => { + const fallback: FallbackRouteDefinition = { + parser: mockParser, + resolver: async () => [], + }; + + const pipeline = definePipeline({ + id: "with-fallback", + versions: ["16.0.0"], + inputs: [], + routes: [], + fallback, + }); + + expect(pipeline.fallback).toBe(fallback); + }); + + it("should define a pipeline with fallback filter", () => { + const fallback: FallbackRouteDefinition = { + filter: (ctx) => ctx.file.ext === ".txt", + parser: mockParser, + resolver: async () => [], + }; + + const pipeline = definePipeline({ + id: "filtered-fallback", + versions: ["16.0.0"], + inputs: [], + routes: [], + fallback, + }); + + expect(pipeline.fallback?.filter).toBeDefined(); + }); + + it("should define a pipeline with onEvent handler", () => { + const onEvent = vi.fn(); + + const pipeline = definePipeline({ + id: "with-events", + versions: ["16.0.0"], + inputs: [], + routes: [], + onEvent, + }); + + expect(pipeline.onEvent).toBe(onEvent); + }); + + it("should define a complete pipeline with all options", () => { + const source = createMockSource("source"); + const route = createMockRoute("route"); + const include = vi.fn().mockReturnValue(true); + const onEvent = vi.fn(); + const fallback: FallbackRouteDefinition = { + parser: mockParser, + resolver: async () => [], + }; + + const pipeline = definePipeline({ + id: "complete-pipeline", + name: "Complete Pipeline", + description: "A fully configured pipeline", + versions: ["16.0.0", "15.1.0"], + inputs: [source], + routes: [route], + include, + strict: true, + concurrency: 2, + fallback, + onEvent, + }); + + expect(pipeline._type).toBe("pipeline-definition"); + expect(pipeline.id).toBe("complete-pipeline"); + expect(pipeline.name).toBe("Complete Pipeline"); + expect(pipeline.description).toBe("A fully configured pipeline"); + expect(pipeline.versions).toHaveLength(2); + expect(pipeline.inputs).toHaveLength(1); + expect(pipeline.routes).toHaveLength(1); + expect(pipeline.include).toBe(include); + expect(pipeline.strict).toBe(true); + expect(pipeline.concurrency).toBe(2); + expect(pipeline.fallback).toBe(fallback); + expect(pipeline.onEvent).toBe(onEvent); + }); +}); + +describe("isPipelineDefinition", () => { + it("should return true for valid pipeline definition", () => { + const pipeline = definePipeline({ + id: "test", + versions: ["16.0.0"], + inputs: [], + routes: [], + }); + + expect(isPipelineDefinition(pipeline)).toBe(true); + }); + + it("should return false for null", () => { + expect(isPipelineDefinition(null)).toBe(false); + }); + + it("should return false for undefined", () => { + expect(isPipelineDefinition(undefined)).toBe(false); + }); + + it("should return false for primitive types", () => { + expect(isPipelineDefinition("string")).toBe(false); + expect(isPipelineDefinition(123)).toBe(false); + expect(isPipelineDefinition(true)).toBe(false); + }); + + it("should return false for empty object", () => { + expect(isPipelineDefinition({})).toBe(false); + }); + + it("should return false for object without _type", () => { + const notPipeline = { + id: "test", + versions: ["16.0.0"], + inputs: [], + routes: [], + }; + + expect(isPipelineDefinition(notPipeline)).toBe(false); + }); + + it("should return false for object with wrong _type", () => { + const notPipeline = { + _type: "not-a-pipeline", + id: "test", + versions: ["16.0.0"], + inputs: [], + routes: [], + }; + + expect(isPipelineDefinition(notPipeline)).toBe(false); + }); + + it("should return false for array", () => { + expect(isPipelineDefinition([])).toBe(false); + }); + + it("should work as type guard", () => { + const unknown: unknown = definePipeline({ + id: "guarded", + versions: ["16.0.0"], + inputs: [], + routes: [], + }); + + if (isPipelineDefinition(unknown)) { + expect(unknown.id).toBe("guarded"); + expect(unknown._type).toBe("pipeline-definition"); + } else { + throw new Error("Expected valid pipeline definition"); + } + }); +}); + +describe("getPipelineRouteIds", () => { + it("should return empty array for pipeline with no routes", () => { + const pipeline = definePipeline({ + id: "no-routes", + versions: ["16.0.0"], + inputs: [], + routes: [], + }); + + const routeIds = getPipelineRouteIds(pipeline); + + expect(routeIds).toEqual([]); + }); + + it("should return route ids for pipeline with routes", () => { + const route1 = createMockRoute("route-a"); + const route2 = createMockRoute("route-b"); + const route3 = createMockRoute("route-c"); + + const pipeline = definePipeline({ + id: "with-routes", + versions: ["16.0.0"], + inputs: [], + routes: [route1, route2, route3], + }); + + const routeIds = getPipelineRouteIds(pipeline); + + expect(routeIds).toEqual(["route-a", "route-b", "route-c"]); + }); + + it("should preserve route order", () => { + const routes = [ + createMockRoute("third"), + createMockRoute("first"), + createMockRoute("second"), + ]; + + const pipeline = definePipeline({ + id: "ordered", + versions: ["16.0.0"], + inputs: [], + routes, + }); + + const routeIds = getPipelineRouteIds(pipeline); + + expect(routeIds).toEqual(["third", "first", "second"]); + }); +}); + +describe("getPipelineSourceIds", () => { + it("should return empty array for pipeline with no sources", () => { + const pipeline = definePipeline({ + id: "no-sources", + versions: ["16.0.0"], + inputs: [], + routes: [], + }); + + const sourceIds = getPipelineSourceIds(pipeline); + + expect(sourceIds).toEqual([]); + }); + + it("should return source ids for pipeline with sources", () => { + const source1 = createMockSource("source-a"); + const source2 = createMockSource("source-b"); + + const pipeline = definePipeline({ + id: "with-sources", + versions: ["16.0.0"], + inputs: [source1, source2], + routes: [], + }); + + const sourceIds = getPipelineSourceIds(pipeline); + + expect(sourceIds).toEqual(["source-a", "source-b"]); + }); + + it("should preserve source order", () => { + const sources = [ + createMockSource("z-source"), + createMockSource("a-source"), + createMockSource("m-source"), + ]; + + const pipeline = definePipeline({ + id: "ordered", + versions: ["16.0.0"], + inputs: sources, + routes: [], + }); + + const sourceIds = getPipelineSourceIds(pipeline); + + expect(sourceIds).toEqual(["z-source", "a-source", "m-source"]); + }); +}); + +describe("pipeline definition properties", () => { + it("should have readonly _type property", () => { + const pipeline = definePipeline({ + id: "readonly-test", + versions: ["16.0.0"], + inputs: [], + routes: [], + }); + + expect(pipeline._type).toBe("pipeline-definition"); + }); + + it("should include all provided inputs", () => { + const sources = [ + createMockSource("unicode"), + createMockSource("cldr"), + createMockSource("local"), + ]; + + const pipeline = definePipeline({ + id: "multi-source", + versions: ["16.0.0"], + inputs: sources, + routes: [], + }); + + expect(pipeline.inputs).toHaveLength(3); + sources.forEach((source, i) => { + expect(pipeline.inputs[i]).toBe(source); + }); + }); + + it("should include all provided routes", () => { + const routes = [ + createMockRoute("line-break"), + createMockRoute("scripts"), + createMockRoute("blocks"), + ]; + + const pipeline = definePipeline({ + id: "multi-route", + versions: ["16.0.0"], + inputs: [], + routes, + }); + + expect(pipeline.routes).toHaveLength(3); + routes.forEach((route, i) => { + expect(pipeline.routes[i]).toBe(route); + }); + }); +}); + +describe("fallback route", () => { + it("should have parser and resolver", () => { + const fallback: FallbackRouteDefinition = { + parser: mockParser, + resolver: async () => [], + }; + + const pipeline = definePipeline({ + id: "fallback-test", + versions: ["16.0.0"], + inputs: [], + routes: [], + fallback, + }); + + expect(typeof pipeline.fallback?.parser).toBe("function"); + expect(typeof pipeline.fallback?.resolver).toBe("function"); + }); + + it("should support custom output type in resolver", async () => { + interface CustomOutput { + raw: string; + } + + const fallback: FallbackRouteDefinition, CustomOutput> = { + parser: mockParser, + resolver: async () => ({ raw: "data" }), + }; + + const pipeline = definePipeline({ + id: "custom-fallback", + versions: ["16.0.0"], + inputs: [], + routes: [], + fallback, + }); + + expect(pipeline.fallback).toBeDefined(); + }); +}); + +describe("type inference", () => { + it("should preserve const types for sources and routes", () => { + const source = createMockSource("my-source"); + const route = createMockRoute("my-route"); + + const pipeline = definePipeline({ + id: "typed-pipeline" as const, + versions: ["16.0.0"], + inputs: [source] as const, + routes: [route] as const, + }); + + expect(pipeline.id).toBe("typed-pipeline"); + expect(pipeline.inputs).toHaveLength(1); + expect(pipeline.routes).toHaveLength(1); + }); +}); diff --git a/packages/pipelines/pipeline-core/test/route.test.ts b/packages/pipelines/pipeline-core/test/route.test.ts new file mode 100644 index 000000000..f44596abd --- /dev/null +++ b/packages/pipelines/pipeline-core/test/route.test.ts @@ -0,0 +1,458 @@ +import type { + ArtifactDefinition, + InferRouteDepends, + InferRouteEmits, + InferRouteId, + InferRouteOutput, + RouteResolveContext, +} from "../src/route"; +import type { FileContext, ParsedRow, PropertyJson } from "../src/types"; +import { describe, expect, expectTypeOf, it, vi } from "vitest"; +import { z } from "zod"; +import { + definePipelineRoute, +} from "../src/route"; +import { definePipelineTransform } from "../src/transform"; + +function createFileContext(): FileContext { + return { + version: "16.0.0", + dir: "ucd", + path: "ucd/LineBreak.txt", + name: "LineBreak.txt", + ext: ".txt", + }; +} + +async function* mockParser(): AsyncIterable { + yield { sourceFile: "test.txt", kind: "point", codePoint: "0041", value: "A" }; + yield { sourceFile: "test.txt", kind: "point", codePoint: "0042", value: "B" }; +} + +function createMockResolveContext(): RouteResolveContext { + return { + version: "16.0.0", + file: createFileContext(), + getArtifact: vi.fn(), + emitArtifact: vi.fn(), + normalizeEntries: vi.fn((entries) => entries), + now: vi.fn(() => "2024-01-01T00:00:00Z"), + }; +} + +describe("definePipelineRoute", () => { + it("should define a minimal route", () => { + const route = definePipelineRoute({ + id: "test-route", + filter: () => true, + parser: mockParser, + resolver: async () => [], + }); + + expect(route.id).toBe("test-route"); + expect(typeof route.filter).toBe("function"); + expect(typeof route.parser).toBe("function"); + expect(typeof route.resolver).toBe("function"); + }); + + it("should define a route with dependencies", () => { + const route = definePipelineRoute({ + id: "dependent-route", + filter: () => true, + depends: ["route:other-route", "artifact:source:data"] as const, + parser: mockParser, + resolver: async () => [], + }); + + expect(route.depends).toEqual(["route:other-route", "artifact:source:data"]); + }); + + it("should define a route with emits", () => { + const emits = { + result: { + _type: "artifact" as const, + schema: z.string(), + scope: "version" as const, + }, + }; + + const route = definePipelineRoute({ + id: "emitting-route", + filter: () => true, + emits, + parser: mockParser, + resolver: async () => [], + }); + + expect(route.emits).toBe(emits); + }); + + it("should define a route with transforms", () => { + const transform = definePipelineTransform({ + id: "test-transform", + async* fn(_ctx, rows) { + for await (const row of rows) { + yield row; + } + }, + }); + + const route = definePipelineRoute({ + id: "transformed-route", + filter: () => true, + parser: mockParser, + transforms: [transform] as const, + resolver: async () => [], + }); + + expect(route.transforms).toHaveLength(1); + }); + + it("should define a route with output configuration", () => { + const route = definePipelineRoute({ + id: "output-route", + filter: () => true, + parser: mockParser, + resolver: async () => [], + out: { + dir: "custom-dir", + fileName: (pj) => `${pj.property}.json`, + }, + }); + + expect(route.out?.dir).toBe("custom-dir"); + expect(typeof route.out?.fileName).toBe("function"); + }); + + it("should define a route with cache option", () => { + const route = definePipelineRoute({ + id: "cached-route", + filter: () => true, + parser: mockParser, + resolver: async () => [], + cache: true, + }); + + expect(route.cache).toBe(true); + }); + + it("should define a route with all options", () => { + const transform = definePipelineTransform({ + id: "transform", + async* fn(_ctx, rows) { + yield* rows; + }, + }); + + const emits = { + data: { + _type: "artifact" as const, + schema: z.number(), + scope: "version" as const, + }, + }; + + const route = definePipelineRoute({ + id: "full-route", + filter: (ctx) => ctx.file.ext === ".txt", + depends: ["route:dependency"] as const, + emits, + parser: mockParser, + transforms: [transform] as const, + resolver: async () => [], + out: { dir: "output" }, + cache: true, + }); + + expect(route.id).toBe("full-route"); + expect(route.depends).toHaveLength(1); + expect(route.emits).toBe(emits); + expect(route.transforms).toHaveLength(1); + expect(route.out?.dir).toBe("output"); + expect(route.cache).toBe(true); + }); +}); + +describe("route filter", () => { + it("should filter by file extension", () => { + const route = definePipelineRoute({ + id: "txt-only", + filter: (ctx) => ctx.file.ext === ".txt", + parser: mockParser, + resolver: async () => [], + }); + + expect(route.filter({ file: { ...createFileContext(), ext: ".txt" } })).toBe(true); + expect(route.filter({ file: { ...createFileContext(), ext: ".xml" } })).toBe(false); + }); + + it("should filter by file name", () => { + const route = definePipelineRoute({ + id: "specific-file", + filter: (ctx) => ctx.file.name === "LineBreak.txt", + parser: mockParser, + resolver: async () => [], + }); + + expect(route.filter({ file: createFileContext() })).toBe(true); + expect(route.filter({ file: { ...createFileContext(), name: "Other.txt" } })).toBe(false); + }); + + it("should filter by directory", () => { + const route = definePipelineRoute({ + id: "ucd-only", + filter: (ctx) => ctx.file.dir === "ucd", + parser: mockParser, + resolver: async () => [], + }); + + expect(route.filter({ file: { ...createFileContext(), dir: "ucd" } })).toBe(true); + expect(route.filter({ file: { ...createFileContext(), dir: "emoji" } })).toBe(false); + }); +}); + +describe("route resolver", () => { + it("should call resolver with context and rows", async () => { + const resolver = vi.fn().mockResolvedValue([]); + + const route = definePipelineRoute({ + id: "test", + filter: () => true, + parser: mockParser, + resolver, + }); + + const ctx = createMockResolveContext(); + const rows = mockParser(); + + await route.resolver(ctx, rows); + + expect(resolver).toHaveBeenCalledWith(ctx, rows); + }); + + it("should return PropertyJson array", async () => { + const expectedOutput: PropertyJson[] = [{ + version: "16.0.0", + property: "Test", + file: "test.txt", + entries: [], + }]; + + const route = definePipelineRoute({ + id: "test", + filter: () => true, + parser: mockParser, + resolver: async () => expectedOutput, + }); + + const result = await route.resolver(createMockResolveContext(), mockParser()); + + expect(result).toEqual(expectedOutput); + }); + + it("should support custom output types", async () => { + interface CustomOutput { + count: number; + data: string[]; + } + + const route = definePipelineRoute<"custom", readonly [], Record, readonly [], CustomOutput>({ + id: "custom", + filter: () => true, + parser: mockParser, + resolver: async (_ctx, rows) => { + const data: string[] = []; + for await (const row of rows) { + if (row.value && typeof row.value === "string") { + data.push(row.value); + } + } + return { count: data.length, data }; + }, + }); + + const result = await route.resolver(createMockResolveContext(), mockParser()); + + expect(result.count).toBe(2); + expect(result.data).toEqual(["A", "B"]); + }); +}); + +describe("route context methods", () => { + it("should provide getArtifact in resolver context", async () => { + const getArtifact = vi.fn().mockReturnValue("artifact-value"); + + const route = definePipelineRoute({ + id: "test", + filter: () => true, + depends: ["artifact:source:data"] as const, + parser: mockParser, + resolver: async (ctx) => { + const value = ctx.getArtifact("source:data"); + return [{ value }] as any; + }, + }); + + const ctx: RouteResolveContext = { + ...createMockResolveContext(), + getArtifact, + }; + + await route.resolver(ctx, mockParser()); + + expect(getArtifact).toHaveBeenCalledWith("source:data"); + }); + + it("should provide emitArtifact in resolver context", async () => { + const emitArtifact = vi.fn(); + + const route = definePipelineRoute({ + id: "test", + filter: () => true, + emits: { + result: { + _type: "artifact", + schema: z.string(), + scope: "version", + }, + }, + parser: mockParser, + resolver: async (ctx) => { + ctx.emitArtifact("result", "emitted-value"); + return []; + }, + }); + + const ctx: RouteResolveContext = { + ...createMockResolveContext(), + emitArtifact, + }; + + await route.resolver(ctx, mockParser()); + + expect(emitArtifact).toHaveBeenCalledWith("result", "emitted-value"); + }); + + it("should provide normalizeEntries in resolver context", async () => { + const normalizeEntries = vi.fn((entries) => entries.sort()); + + const route = definePipelineRoute({ + id: "test", + filter: () => true, + parser: mockParser, + resolver: async (ctx) => { + const entries = [{ codePoint: "0042", value: "B" }, { codePoint: "0041", value: "A" }]; + ctx.normalizeEntries(entries); + return []; + }, + }); + + const ctx: RouteResolveContext = { + ...createMockResolveContext(), + normalizeEntries, + }; + + await route.resolver(ctx, mockParser()); + + expect(normalizeEntries).toHaveBeenCalled(); + }); + + it("should provide now in resolver context", async () => { + const now = vi.fn().mockReturnValue("2024-06-15T12:00:00Z"); + + const route = definePipelineRoute({ + id: "test", + filter: () => true, + parser: mockParser, + resolver: async (ctx) => { + const timestamp = ctx.now(); + return [{ timestamp }] as any; + }, + }); + + const ctx: RouteResolveContext = { + ...createMockResolveContext(), + now, + }; + + const result = await route.resolver(ctx, mockParser()); + + expect(now).toHaveBeenCalled(); + expect(result[0].timestamp).toBe("2024-06-15T12:00:00Z"); + }); +}); + +describe("type inference", () => { + describe("inferRouteId", () => { + it("should infer route id", () => { + const routeId = "my-route" as const; + // eslint-disable-next-line unused-imports/no-unused-vars + const route = definePipelineRoute({ + id: routeId, + filter: () => true, + parser: mockParser, + resolver: async () => [], + }); + + type RouteId = InferRouteId; + expectTypeOf().toBeString(); + expectTypeOf().toEqualTypeOf(); + }); + }); + + describe("inferRouteDepends", () => { + it("should infer route dependencies", () => { + // eslint-disable-next-line unused-imports/no-unused-vars + const route = definePipelineRoute({ + id: "test", + filter: () => true, + depends: ["route:dep1", "artifact:route:artifact"] as const, + parser: mockParser, + resolver: async () => [], + }); + + type Depends = InferRouteDepends; + expectTypeOf().toEqualTypeOf(); + }); + }); + + describe("inferRouteEmits", () => { + it("should infer route emits", () => { + const emits = { + data: { + _type: "artifact" as const, + schema: z.string(), + scope: "version" as const, + }, + } satisfies Record; + + const route = definePipelineRoute({ + id: "test", + filter: () => true, + emits, + parser: mockParser, + resolver: async () => [], + }); + + type Emits = InferRouteEmits; + const routeEmits: Emits = route.emits!; + + expect(routeEmits.data._type).toBe("artifact"); + }); + }); + + describe("inferRouteOutput", () => { + it("should infer route output type", () => { + // eslint-disable-next-line unused-imports/no-unused-vars + const route = definePipelineRoute({ + id: "test", + filter: () => true, + parser: mockParser, + resolver: async (): Promise => [], + }); + + type Output = InferRouteOutput; + expectTypeOf().toEqualTypeOf(); + }); + }); +}); diff --git a/packages/pipelines/pipeline-core/test/source.test.ts b/packages/pipelines/pipeline-core/test/source.test.ts new file mode 100644 index 000000000..714469aac --- /dev/null +++ b/packages/pipelines/pipeline-core/test/source.test.ts @@ -0,0 +1,598 @@ +import type { + InferSourceId, + InferSourceIds, + PipelineSourceDefinition, + SourceBackend, + SourceFileContext, +} from "../src/source"; +import type { FileContext, PipelineFilter } from "../src/types"; +import { describe, expect, expectTypeOf, it, vi } from "vitest"; +import { + definePipelineSource, + resolveMultipleSourceFiles, + resolveSourceFiles, +} from "../src/source"; + +function createMockBackend(files: FileContext[]): SourceBackend { + return { + listFiles: vi.fn().mockResolvedValue(files), + readFile: vi.fn().mockResolvedValue("file content"), + }; +} + +function createFile(overrides: Partial = {}): FileContext { + return { + version: "16.0.0", + dir: "ucd", + path: "ucd/LineBreak.txt", + name: "LineBreak.txt", + ext: ".txt", + ...overrides, + }; +} + +describe("definePipelineSource", () => { + it("should return source definition with correct types", () => { + const backend = createMockBackend([]); + const source = definePipelineSource({ + id: "unicode", + backend, + }); + + expect(source.id).toBe("unicode"); + expect(source.backend).toBe(backend); + expect(source.includes).toBeUndefined(); + expect(source.excludes).toBeUndefined(); + + expectTypeOf(source).toEqualTypeOf>(); + expectTypeOf(source.id).toEqualTypeOf<"unicode">(); + }); + + it("should accept includes filter", () => { + const backend = createMockBackend([]); + const includes = vi.fn().mockReturnValue(true); + + const source = definePipelineSource({ + id: "filtered", + backend, + includes, + }); + + expect(source.includes).toBe(includes); + expectTypeOf(source.includes).toEqualTypeOf(); + }); + + it("should accept excludes filter", () => { + const backend = createMockBackend([]); + const excludes = vi.fn().mockReturnValue(false); + + const source = definePipelineSource({ + id: "filtered", + backend, + excludes, + }); + + expect(source.excludes).toBe(excludes); + expectTypeOf(source.excludes).toEqualTypeOf(); + }); + + it("should accept both includes and excludes filters", () => { + const backend = createMockBackend([]); + const includes = vi.fn().mockReturnValue(true); + const excludes = vi.fn().mockReturnValue(false); + + const source = definePipelineSource({ + id: "dual-filtered", + backend, + includes, + excludes, + }); + + expect(source.includes).toBe(includes); + expect(source.excludes).toBe(excludes); + }); + + it("should infer literal string type for id", () => { + const source = definePipelineSource({ + id: "my-literal-id", + backend: createMockBackend([]), + }); + + expectTypeOf(source.id).toEqualTypeOf<"my-literal-id">(); + expectTypeOf(source.id).not.toEqualTypeOf(); + }); +}); + +describe("resolveSourceFiles", () => { + it("should resolve all files from backend", async () => { + const files = [ + createFile({ path: "ucd/LineBreak.txt", name: "LineBreak.txt" }), + createFile({ path: "ucd/Scripts.txt", name: "Scripts.txt" }), + ]; + const backend = createMockBackend(files); + const source = definePipelineSource({ id: "test", backend }); + + const result = await resolveSourceFiles(source, "16.0.0"); + + expect(result).toHaveLength(2); + expect(backend.listFiles).toHaveBeenCalledWith("16.0.0"); + expect(backend.listFiles).toHaveBeenCalledTimes(1); + + // Type assertion + expectTypeOf(result).toEqualTypeOf(); + }); + + it("should add source id to each file", async () => { + const files = [createFile()]; + const backend = createMockBackend(files); + const source = definePipelineSource({ id: "my-source", backend }); + + const result = await resolveSourceFiles(source, "16.0.0"); + + expect(result[0]?.source).toEqual({ id: "my-source" }); + expectTypeOf(result[0]?.source.id).toEqualTypeOf(); + }); + + it("should filter files using includes", async () => { + const files = [ + createFile({ path: "ucd/LineBreak.txt", name: "LineBreak.txt", dir: "ucd" }), + createFile({ path: "ucd/Scripts.txt", name: "Scripts.txt", dir: "ucd" }), + createFile({ path: "emoji/data.txt", name: "data.txt", dir: "emoji" }), + ]; + const backend = createMockBackend(files); + const includes = (ctx: any) => ctx.file.dir === "ucd"; + + const source = definePipelineSource({ id: "test", backend, includes }); + const result = await resolveSourceFiles(source, "16.0.0"); + + expect(result).toHaveLength(2); + expect(result.every((f) => f.dir === "ucd")).toBe(true); + expect(result.map((f) => f.name)).toEqual(["LineBreak.txt", "Scripts.txt"]); + }); + + it("should filter files using excludes", async () => { + const files = [ + createFile({ path: "ucd/LineBreak.txt", name: "LineBreak.txt" }), + createFile({ path: "ucd/ReadMe.txt", name: "ReadMe.txt" }), + createFile({ path: "ucd/Scripts.txt", name: "Scripts.txt" }), + ]; + const backend = createMockBackend(files); + const excludes = (ctx: any) => ctx.file.name === "ReadMe.txt"; + + const source = definePipelineSource({ id: "test", backend, excludes }); + const result = await resolveSourceFiles(source, "16.0.0"); + + expect(result).toHaveLength(2); + expect(result.some((f) => f.name === "ReadMe.txt")).toBe(false); + expect(result.map((f) => f.name)).toEqual(["LineBreak.txt", "Scripts.txt"]); + }); + + it("should apply both includes and excludes filters", async () => { + const files = [ + createFile({ path: "ucd/LineBreak.txt", name: "LineBreak.txt", dir: "ucd" }), + createFile({ path: "ucd/ReadMe.txt", name: "ReadMe.txt", dir: "ucd" }), + createFile({ path: "emoji/data.txt", name: "data.txt", dir: "emoji" }), + ]; + const backend = createMockBackend(files); + const includes = (ctx: any) => ctx.file.dir === "ucd"; + const excludes = (ctx: any) => ctx.file.name === "ReadMe.txt"; + + const source = definePipelineSource({ id: "test", backend, includes, excludes }); + const result = await resolveSourceFiles(source, "16.0.0"); + + expect(result).toHaveLength(1); + expect(result[0]?.name).toBe("LineBreak.txt"); + expect(result[0]?.dir).toBe("ucd"); + }); + + it("should return empty array when no files match filters", async () => { + const files = [createFile({ dir: "emoji" })]; + const backend = createMockBackend(files); + const includes = (ctx: any) => ctx.file.dir === "ucd"; + + const source = definePipelineSource({ id: "test", backend, includes }); + const result = await resolveSourceFiles(source, "16.0.0"); + + expect(result).toHaveLength(0); + expect(result).toEqual([]); + }); + + it("should return empty array when backend returns no files", async () => { + const backend = createMockBackend([]); + const source = definePipelineSource({ id: "test", backend }); + + const result = await resolveSourceFiles(source, "16.0.0"); + + expect(result).toHaveLength(0); + expect(result).toEqual([]); + }); + + it("should preserve all file properties and add source", async () => { + const file = createFile({ + version: "16.0.0", + dir: "ucd", + path: "ucd/LineBreak.txt", + name: "LineBreak.txt", + ext: ".txt", + }); + const backend = createMockBackend([file]); + const source = definePipelineSource({ id: "test-source", backend }); + + const result = await resolveSourceFiles(source, "16.0.0"); + + expect(result[0]).toBeDefined(); + expect(result[0]).toMatchObject({ + version: "16.0.0", + dir: "ucd", + path: "ucd/LineBreak.txt", + name: "LineBreak.txt", + ext: ".txt", + source: { id: "test-source" }, + }); + }); + + it("should handle multiple versions correctly", async () => { + const backend = createMockBackend([createFile({ version: "15.0.0" })]); + const source = definePipelineSource({ id: "test", backend }); + + await resolveSourceFiles(source, "15.0.0"); + expect(backend.listFiles).toHaveBeenCalledWith("15.0.0"); + + await resolveSourceFiles(source, "16.0.0"); + expect(backend.listFiles).toHaveBeenCalledWith("16.0.0"); + }); +}); + +describe("resolveMultipleSourceFiles", () => { + it("should resolve files from multiple sources", async () => { + const source1Files = [createFile({ path: "ucd/LineBreak.txt" })]; + const source2Files = [createFile({ path: "emoji/data.txt", dir: "emoji" })]; + + const source1 = definePipelineSource({ + id: "source1", + backend: createMockBackend(source1Files), + }); + const source2 = definePipelineSource({ + id: "source2", + backend: createMockBackend(source2Files), + }); + + const result = await resolveMultipleSourceFiles([source1, source2], "16.0.0"); + + expect(result).toHaveLength(2); + expectTypeOf(result).toEqualTypeOf(); + }); + + it("should deduplicate files by path (last source wins)", async () => { + const source1Files = [ + createFile({ path: "ucd/LineBreak.txt", name: "LineBreak.txt" }), + ]; + const source2Files = [ + createFile({ path: "ucd/LineBreak.txt", name: "LineBreak.txt" }), + ]; + + const source1 = definePipelineSource({ + id: "source1", + backend: createMockBackend(source1Files), + }); + const source2 = definePipelineSource({ + id: "source2", + backend: createMockBackend(source2Files), + }); + + const result = await resolveMultipleSourceFiles([source1, source2], "16.0.0"); + + expect(result).toHaveLength(1); + expect(result[0]?.source.id).toBe("source2"); + expect(result[0]?.path).toBe("ucd/LineBreak.txt"); + }); + + it("should handle empty sources array", async () => { + const result = await resolveMultipleSourceFiles([], "16.0.0"); + + expect(result).toHaveLength(0); + expect(result).toEqual([]); + }); + + it("should handle sources with no files", async () => { + const source1 = definePipelineSource({ + id: "empty1", + backend: createMockBackend([]), + }); + const source2 = definePipelineSource({ + id: "empty2", + backend: createMockBackend([]), + }); + + const result = await resolveMultipleSourceFiles([source1, source2], "16.0.0"); + + expect(result).toHaveLength(0); + }); + + it("should apply filters from each source independently", async () => { + const source1Files = [ + createFile({ path: "ucd/LineBreak.txt", dir: "ucd", name: "LineBreak.txt" }), + createFile({ path: "ucd/ReadMe.txt", dir: "ucd", name: "ReadMe.txt" }), + ]; + const source2Files = [ + createFile({ path: "emoji/data.txt", dir: "emoji", name: "data.txt" }), + ]; + + const source1 = definePipelineSource({ + id: "source1", + backend: createMockBackend(source1Files), + excludes: (ctx: any) => ctx.file.name === "ReadMe.txt", + }); + const source2 = definePipelineSource({ + id: "source2", + backend: createMockBackend(source2Files), + }); + + const result = await resolveMultipleSourceFiles([source1, source2], "16.0.0"); + + expect(result).toHaveLength(2); + expect(result.some((f) => f.name === "ReadMe.txt")).toBe(false); + expect(result.map((f) => f.name).sort()).toEqual(["LineBreak.txt", "data.txt"]); + }); + + it("should handle mix of filtered and unfiltered sources", async () => { + const source1 = definePipelineSource({ + id: "filtered", + backend: createMockBackend([ + createFile({ path: "ucd/A.txt", name: "A.txt" }), + createFile({ path: "ucd/B.txt", name: "B.txt" }), + ]), + includes: (ctx: any) => ctx.file.name === "A.txt", + }); + const source2 = definePipelineSource({ + id: "unfiltered", + backend: createMockBackend([ + createFile({ path: "emoji/C.txt", name: "C.txt", dir: "emoji" }), + ]), + }); + + const result = await resolveMultipleSourceFiles([source1, source2], "16.0.0"); + + expect(result).toHaveLength(2); + expect(result.map((f) => f.name).sort()).toEqual(["A.txt", "C.txt"]); + }); + + it("should preserve source id in deduplication", async () => { + const sharedPath = "shared/file.txt"; + const source1 = definePipelineSource({ + id: "first", + backend: createMockBackend([createFile({ path: sharedPath, dir: "shared" })]), + }); + const source2 = definePipelineSource({ + id: "second", + backend: createMockBackend([createFile({ path: sharedPath, dir: "shared" })]), + }); + const source3 = definePipelineSource({ + id: "third", + backend: createMockBackend([createFile({ path: sharedPath, dir: "shared" })]), + }); + + const result = await resolveMultipleSourceFiles([source1, source2, source3], "16.0.0"); + + expect(result).toHaveLength(1); + expect(result[0]?.source.id).toBe("third"); // Last one wins + }); +}); + +describe("type inference", () => { + describe("inferSourceId", () => { + it("should infer literal source id from definition", () => { + // eslint-disable-next-line unused-imports/no-unused-vars + const source = definePipelineSource({ + id: "my-source", + backend: createMockBackend([]), + }); + + type SourceId = InferSourceId; + expectTypeOf().toEqualTypeOf<"my-source">(); + expectTypeOf().not.toEqualTypeOf(); + }); + + it("should work with different id types", () => { + // eslint-disable-next-line unused-imports/no-unused-vars + const unicodeSource = definePipelineSource({ + id: "unicode", + backend: createMockBackend([]), + }); + // eslint-disable-next-line unused-imports/no-unused-vars + const emojiSource = definePipelineSource({ + id: "emoji", + backend: createMockBackend([]), + }); + + type UnicodeId = InferSourceId; + type EmojiId = InferSourceId; + + expectTypeOf().toEqualTypeOf<"unicode">(); + expectTypeOf().toEqualTypeOf<"emoji">(); + expectTypeOf().not.toEqualTypeOf(); + }); + }); + + describe("inferSourceIds", () => { + it("should infer multiple source ids as union", () => { + // eslint-disable-next-line unused-imports/no-unused-vars + const sources = [ + definePipelineSource({ id: "source1", backend: createMockBackend([]) }), + definePipelineSource({ id: "source2", backend: createMockBackend([]) }), + ] as const; + + type SourceIds = InferSourceIds; + expectTypeOf().toEqualTypeOf<"source1" | "source2">(); + }); + + it("should work with single source", () => { + // eslint-disable-next-line unused-imports/no-unused-vars + const sources = [ + definePipelineSource({ id: "only", backend: createMockBackend([]) }), + ] as const; + + type SourceIds = InferSourceIds; + expectTypeOf().toEqualTypeOf<"only">(); + }); + + it("should work with triple sources", () => { + // eslint-disable-next-line unused-imports/no-unused-vars + const sources = [ + definePipelineSource({ id: "first", backend: createMockBackend([]) }), + definePipelineSource({ id: "second", backend: createMockBackend([]) }), + definePipelineSource({ id: "third", backend: createMockBackend([]) }), + ] as const; + + type SourceIds = InferSourceIds; + expectTypeOf().toEqualTypeOf<"first" | "second" | "third">(); + }); + + it("should require readonly tuple for proper inference", () => { + // Without 'as const', type is wider + const sources = [ + definePipelineSource({ id: "source1", backend: createMockBackend([]) }), + definePipelineSource({ id: "source2", backend: createMockBackend([]) }), + ]; + + // This will be PipelineSourceDefinition[] not a tuple + expectTypeOf(sources).toMatchTypeOf(); + }); + }); + + describe("sourceFileContext", () => { + it("should extend FileContext with source", async () => { + const source = definePipelineSource({ + id: "test-source", + backend: createMockBackend([createFile()]), + }); + + const result = await resolveSourceFiles(source, "16.0.0"); + const file = result[0]!; + + expectTypeOf(file).toMatchTypeOf(); + expectTypeOf(file).toMatchTypeOf(); + expectTypeOf(file.source).toEqualTypeOf<{ id: string }>(); + expectTypeOf(file.source.id).toEqualTypeOf(); + expectTypeOf(file.version).toEqualTypeOf(); + expectTypeOf(file.path).toEqualTypeOf(); + expectTypeOf(file.name).toEqualTypeOf(); + }); + + it("should have all FileContext properties", () => { + const fileContext: SourceFileContext = { + version: "16.0.0", + dir: "ucd", + path: "ucd/test.txt", + name: "test.txt", + ext: ".txt", + source: { id: "test" }, + }; + + expectTypeOf(fileContext.version).toEqualTypeOf(); + expectTypeOf(fileContext.dir).toEqualTypeOf(); + expectTypeOf(fileContext.path).toEqualTypeOf(); + expectTypeOf(fileContext.name).toEqualTypeOf(); + expectTypeOf(fileContext.ext).toEqualTypeOf(); + expectTypeOf(fileContext.source).toEqualTypeOf<{ id: string }>(); + }); + }); +}); + +describe("edge cases", () => { + it("should handle files with special characters in path", async () => { + const files = [ + createFile({ path: "ucd/NamesList-16.0.0d1.txt", name: "NamesList-16.0.0d1.txt" }), + ]; + const backend = createMockBackend(files); + const source = definePipelineSource({ id: "test", backend }); + + const result = await resolveSourceFiles(source, "16.0.0"); + + expect(result).toHaveLength(1); + expect(result[0]?.path).toBe("ucd/NamesList-16.0.0d1.txt"); + }); + + it("should handle filter that always returns false", async () => { + const files = [createFile(), createFile({ path: "ucd/other.txt" })]; + const backend = createMockBackend(files); + const source = definePipelineSource({ + id: "test", + backend, + includes: () => false, + }); + + const result = await resolveSourceFiles(source, "16.0.0"); + + expect(result).toHaveLength(0); + }); + + it("should handle filter that always returns true", async () => { + const files = [createFile(), createFile({ path: "ucd/other.txt" })]; + const backend = createMockBackend(files); + const source = definePipelineSource({ + id: "test", + backend, + excludes: () => false, + }); + + const result = await resolveSourceFiles(source, "16.0.0"); + + expect(result).toHaveLength(2); + }); + + it("should handle complex deduplication scenario", async () => { + const path1 = "shared/file1.txt"; + const path2 = "shared/file2.txt"; + + const sources = [ + definePipelineSource({ + id: "s1", + backend: createMockBackend([ + createFile({ path: path1, dir: "shared", name: "file1.txt" }), + createFile({ path: path2, dir: "shared", name: "file2.txt" }), + ]), + }), + definePipelineSource({ + id: "s2", + backend: createMockBackend([ + createFile({ path: path1, dir: "shared", name: "file1.txt" }), + ]), + }), + definePipelineSource({ + id: "s3", + backend: createMockBackend([ + createFile({ path: path2, dir: "shared", name: "file2.txt" }), + ]), + }), + ]; + + const result = await resolveMultipleSourceFiles(sources, "16.0.0"); + + expect(result).toHaveLength(2); + expect(result.find((f) => f.path === path1)?.source.id).toBe("s2"); + expect(result.find((f) => f.path === path2)?.source.id).toBe("s3"); + }); + + it("should maintain insertion order for non-duplicate files", async () => { + const sources = [ + definePipelineSource({ + id: "s1", + backend: createMockBackend([ + createFile({ path: "a.txt", name: "a.txt" }), + createFile({ path: "b.txt", name: "b.txt" }), + ]), + }), + definePipelineSource({ + id: "s2", + backend: createMockBackend([ + createFile({ path: "c.txt", name: "c.txt" }), + ]), + }), + ]; + + const result = await resolveMultipleSourceFiles(sources, "16.0.0"); + + expect(result.map((f) => f.name)).toEqual(["a.txt", "b.txt", "c.txt"]); + }); +}); diff --git a/packages/pipelines/pipeline-core/test/transform.test.ts b/packages/pipelines/pipeline-core/test/transform.test.ts new file mode 100644 index 000000000..e4ff10da5 --- /dev/null +++ b/packages/pipelines/pipeline-core/test/transform.test.ts @@ -0,0 +1,322 @@ +import type { PipelineTransformDefinition } from "../src/transform"; +import { asyncFromArray, collect } from "#test-utils"; +import { describe, expect, expectTypeOf, it } from "vitest"; +import { applyTransforms, definePipelineTransform } from "../src/transform"; + +describe("definePipelineTransform", () => { + it("should define a simple transform", async () => { + const transform = definePipelineTransform({ + id: "uppercase", + async* fn(_ctx, rows) { + for await (const row of rows) { + yield (row as string).toUpperCase(); + } + }, + }); + + expect(transform.id).toBe("uppercase"); + expect(typeof transform.fn).toBe("function"); + expectTypeOf(transform).toEqualTypeOf>(); + + const input = ["a", "b", "c"]; + const result = await collect( + transform.fn(null as any, asyncFromArray(input)), + ); + + expect(result).toEqual(["A", "B", "C"]); + }); + + it("should define a transform with type parameters", async () => { + const transform = definePipelineTransform({ + id: "string-length", + async* fn(_ctx, rows) { + for await (const row of rows) { + yield row.length; + } + }, + }); + + expect(transform.id).toBe("string-length"); + expectTypeOf(transform).toEqualTypeOf>(); + + const input = ["foo", "barbaz"]; + const result = await collect( + transform.fn(null as any, asyncFromArray(input)), + ); + + expect(result).toEqual([3, 6]); + }); + + it("should preserve transform function", async () => { + const transform = definePipelineTransform({ + id: "double", + async* fn(_ctx, rows) { + for await (const row of rows) { + yield row * 2; + } + }, + }); + + expectTypeOf(transform).toEqualTypeOf>(); + + const input = [1, 2, 3]; + const result = await collect( + transform.fn(null as any, asyncFromArray(input)), + ); + expectTypeOf(result).toEqualTypeOf(); + + expect(result).toEqual([2, 4, 6]); + }); +}); + +describe("applyTransforms", () => { + it("should apply single transform", async () => { + const uppercase = definePipelineTransform({ + id: "uppercase", + async* fn(_ctx, rows) { + for await (const row of rows) { + yield row.toUpperCase(); + } + }, + }); + + expectTypeOf(uppercase).toEqualTypeOf>(); + + const ctx = {} as any; + const result = await collect(applyTransforms(ctx, asyncFromArray(["hello", "world"]), [uppercase])); + + expectTypeOf(result).toEqualTypeOf(); + + expect(result).toEqual(["HELLO", "WORLD"]); + }); + + it("should chain multiple transforms", async () => { + const uppercase = definePipelineTransform({ + id: "uppercase", + async* fn(_ctx, rows) { + for await (const row of rows) { + yield row.toUpperCase(); + } + }, + }); + + const exclaim = definePipelineTransform({ + id: "exclaim", + async* fn(_ctx, rows) { + for await (const row of rows) { + yield `${row}!`; + } + }, + }); + + expectTypeOf(uppercase).toEqualTypeOf>(); + expectTypeOf(exclaim).toEqualTypeOf>(); + + const ctx = {} as any; + const result = await collect(applyTransforms(ctx, asyncFromArray(["hello", "world"]), [uppercase, exclaim])); + + expectTypeOf(result).toEqualTypeOf(); + + expect(result).toEqual(["HELLO!", "WORLD!"]); + }); + + it("should handle type transformations", async () => { + const toLength = definePipelineTransform({ + id: "to-length", + async* fn(_ctx, rows) { + for await (const row of rows) { + yield row.length; + } + }, + }); + + const double = definePipelineTransform({ + id: "double", + async* fn(_ctx, rows) { + for await (const row of rows) { + yield row * 2; + } + }, + }); + + expectTypeOf(toLength).toEqualTypeOf>(); + expectTypeOf(double).toEqualTypeOf>(); + + const ctx = {} as any; + const result = await collect(applyTransforms(ctx, asyncFromArray(["a", "ab", "abc"]), [toLength, double])); + + expectTypeOf(result).toEqualTypeOf(); + + expect(result).toEqual([2, 4, 6]); + }); + + it("should apply transforms in order", async () => { + const append = (suffix: string) => + definePipelineTransform({ + id: `append-${suffix}`, + async* fn(_ctx, rows) { + for await (const row of rows) { + yield `${row}${suffix}`; + } + }, + }); + + const t1 = append("1"); + const t2 = append("2"); + const t3 = append("3"); + expectTypeOf(t1).toEqualTypeOf>(); + expectTypeOf(t2).toEqualTypeOf>(); + expectTypeOf(t3).toEqualTypeOf>(); + + const ctx = {} as any; + const result = await collect( + applyTransforms(ctx, asyncFromArray(["x"]), [t1, t2, t3]), + ); + + expectTypeOf(result).toEqualTypeOf(); + + expect(result).toEqual(["x123"]); + }); + + it("should handle empty transform array", async () => { + const ctx = {} as any; + const result = await collect(applyTransforms(ctx, asyncFromArray(["a", "b", "c"]), [])); + + expectTypeOf(result).toEqualTypeOf(); + + expect(result).toEqual(["a", "b", "c"]); + }); + + it("should handle empty input", async () => { + const uppercase = definePipelineTransform({ + id: "uppercase", + async* fn(_ctx, rows) { + for await (const row of rows) { + yield row.toUpperCase(); + } + }, + }); + + expectTypeOf(uppercase).toEqualTypeOf>(); + + const ctx = {} as any; + const result = await collect(applyTransforms(ctx, asyncFromArray([]), [uppercase])); + + expectTypeOf(result).toEqualTypeOf(); + + expect(result).toEqual([]); + }); + + it("should pass context to transforms", async () => { + let capturedVersion: string | undefined; + let capturedFileName: string | undefined; + + const captureContext = definePipelineTransform({ + id: "capture", + async* fn(ctx, rows) { + capturedVersion = ctx.version; + capturedFileName = ctx.file.name; + for await (const row of rows) { + yield row; + } + }, + }); + + expectTypeOf(captureContext).toEqualTypeOf>(); + + const ctx = { + version: "16.0.0", + file: { + version: "16.0.0", + dir: "ucd", + path: "ucd/LineBreak.txt", + name: "LineBreak.txt", + ext: ".txt", + }, + }; + await collect(applyTransforms(ctx, asyncFromArray(["test"]), [captureContext])); + + expect(capturedVersion).toBe("16.0.0"); + expect(capturedFileName).toBe("LineBreak.txt"); + }); + + it("should handle object transformations", async () => { + interface Person { + name: string; + age: number; + } + + interface PersonWithId extends Person { + id: string; + } + + const addId = definePipelineTransform({ + id: "add-id", + async* fn(_ctx, rows) { + let counter = 0; + for await (const row of rows) { + yield { ...row, id: `person-${counter++}` }; + } + }, + }); + + expectTypeOf(addId).toEqualTypeOf>(); + + const ctx = {} as any; + const result = await collect(applyTransforms(ctx, asyncFromArray([ + { name: "Alice", age: 30 }, + { name: "Bob", age: 25 }, + ]), [addId])); + + expectTypeOf(result).toEqualTypeOf(); + + expect(result).toEqual([ + { name: "Alice", age: 30, id: "person-0" }, + { name: "Bob", age: 25, id: "person-1" }, + ]); + }); + + it("should handle filter transformations", async () => { + const filterEven = definePipelineTransform({ + id: "filter-even", + async* fn(_ctx, rows) { + for await (const row of rows) { + if (row % 2 === 0) { + yield row; + } + } + }, + }); + + expectTypeOf(filterEven).toEqualTypeOf>(); + + const ctx = {} as any; + const result = await collect(applyTransforms(ctx, asyncFromArray([1, 2, 3, 4, 5, 6]), [filterEven])); + + expectTypeOf(result).toEqualTypeOf(); + + expect(result).toEqual([2, 4, 6]); + }); + + it("should handle aggregation transformations", async () => { + const toArray = definePipelineTransform({ + id: "to-array", + async* fn(_ctx, rows) { + const arr: number[] = []; + for await (const row of rows) { + arr.push(row); + } + yield arr; + }, + }); + + expectTypeOf(toArray).toEqualTypeOf>(); + + const ctx = {} as any; + const result = await collect(applyTransforms(ctx, asyncFromArray([1, 2, 3]), [toArray])); + + expectTypeOf(result).toEqualTypeOf(); + + expect(result).toEqual([[1, 2, 3]]); + }); +}); diff --git a/packages/pipelines/pipeline-core/tsconfig.build.json b/packages/pipelines/pipeline-core/tsconfig.build.json new file mode 100644 index 000000000..36c889e0c --- /dev/null +++ b/packages/pipelines/pipeline-core/tsconfig.build.json @@ -0,0 +1,5 @@ +{ + "extends": "./tsconfig.json", + "include": ["src"], + "exclude": ["dist", "test"] +} diff --git a/packages/pipelines/pipeline-core/tsconfig.json b/packages/pipelines/pipeline-core/tsconfig.json new file mode 100644 index 000000000..9c6dd744b --- /dev/null +++ b/packages/pipelines/pipeline-core/tsconfig.json @@ -0,0 +1,8 @@ +{ + "extends": "@ucdjs-tooling/tsconfig/base", + "include": [ + "src", + "test" + ], + "exclude": ["dist"] +} diff --git a/packages/pipelines/pipeline-core/tsdown.config.ts b/packages/pipelines/pipeline-core/tsdown.config.ts new file mode 100644 index 000000000..dee0149e6 --- /dev/null +++ b/packages/pipelines/pipeline-core/tsdown.config.ts @@ -0,0 +1,7 @@ +import { createTsdownConfig } from "@ucdjs-tooling/tsdown-config"; + +export default createTsdownConfig({ + entry: [ + "./src/index.ts", + ], +}); diff --git a/packages/pipelines/pipeline-executor/README.md b/packages/pipelines/pipeline-executor/README.md new file mode 100644 index 000000000..045525964 --- /dev/null +++ b/packages/pipelines/pipeline-executor/README.md @@ -0,0 +1,27 @@ +# @ucdjs/pipelines-executor + +[![npm version][npm-version-src]][npm-version-href] +[![npm downloads][npm-downloads-src]][npm-downloads-href] +[![codecov][codecov-src]][codecov-href] + +> [!IMPORTANT] +> This is an internal package. It may change without warning and is not subject to semantic versioning. Use at your own risk. + +A collection of core pipeline functionalities for the UCD project. + +## Installation + +```bash +npm install @ucdjs/pipelines-executor +``` + +## 📄 License + +Published under [MIT License](./LICENSE). + +[npm-version-src]: https://img.shields.io/npm/v/@ucdjs/pipelines-executor?style=flat&colorA=18181B&colorB=4169E1 +[npm-version-href]: https://npmjs.com/package/@ucdjs/pipelines-executor +[npm-downloads-src]: https://img.shields.io/npm/dm/@ucdjs/pipelines-executor?style=flat&colorA=18181B&colorB=4169E1 +[npm-downloads-href]: https://npmjs.com/package/@ucdjs/pipelines-executor +[codecov-src]: https://img.shields.io/codecov/c/gh/ucdjs/ucd?style=flat&colorA=18181B&colorB=4169E1 +[codecov-href]: https://codecov.io/gh/ucdjs/ucd diff --git a/packages/pipelines/pipeline-executor/eslint.config.js b/packages/pipelines/pipeline-executor/eslint.config.js new file mode 100644 index 000000000..d9c0ca1ec --- /dev/null +++ b/packages/pipelines/pipeline-executor/eslint.config.js @@ -0,0 +1,7 @@ +// @ts-check +import { luxass } from "@luxass/eslint-config"; + +export default luxass({ + type: "lib", + pnpm: true, +}); diff --git a/packages/pipelines/pipeline-executor/package.json b/packages/pipelines/pipeline-executor/package.json new file mode 100644 index 000000000..9d993cba3 --- /dev/null +++ b/packages/pipelines/pipeline-executor/package.json @@ -0,0 +1,55 @@ +{ + "name": "@ucdjs/pipelines-executor", + "version": "0.0.1", + "type": "module", + "author": { + "name": "Lucas Nørgård", + "email": "lucasnrgaard@gmail.com", + "url": "https://luxass.dev" + }, + "packageManager": "pnpm@10.27.0", + "license": "MIT", + "homepage": "https://github.com/ucdjs/ucd", + "repository": { + "type": "git", + "url": "git+https://github.com/ucdjs/ucd.git", + "directory": "packages/pipelines/pipeline-executor" + }, + "bugs": { + "url": "https://github.com/ucdjs/ucd/issues" + }, + "exports": { + ".": "./dist/index.mjs", + "./package.json": "./package.json" + }, + "types": "./dist/index.d.mts", + "files": [ + "dist" + ], + "engines": { + "node": ">=22.18" + }, + "scripts": { + "build": "tsdown --tsconfig=./tsconfig.build.json", + "dev": "tsdown --watch", + "clean": "git clean -xdf dist node_modules", + "lint": "eslint .", + "typecheck": "tsc --noEmit -p tsconfig.build.json" + }, + "dependencies": { + "@ucdjs/pipelines-artifacts": "workspace:*", + "@ucdjs/pipelines-core": "workspace:*" + }, + "devDependencies": { + "@luxass/eslint-config": "catalog:linting", + "@ucdjs-tooling/tsconfig": "workspace:*", + "@ucdjs-tooling/tsdown-config": "workspace:*", + "eslint": "catalog:linting", + "publint": "catalog:build", + "tsdown": "catalog:build", + "typescript": "catalog:build" + }, + "publishConfig": { + "access": "public" + } +} diff --git a/packages/pipelines/pipeline-executor/src/cache.ts b/packages/pipelines/pipeline-executor/src/cache.ts new file mode 100644 index 000000000..c8bd1ec20 --- /dev/null +++ b/packages/pipelines/pipeline-executor/src/cache.ts @@ -0,0 +1,136 @@ +export interface CacheKey { + routeId: string; + version: string; + inputHash: string; + artifactHashes: Record; +} + +export interface CacheEntry { + key: CacheKey; + output: TOutput[]; + producedArtifacts: Record; + createdAt: string; + meta?: Record; +} + +export function serializeCacheKey(key: CacheKey): string { + const artifactHashStr = Object.entries(key.artifactHashes) + .sort(([a], [b]) => a.localeCompare(b)) + .map(([id, hash]) => `${id}:${hash}`) + .join(","); + + return `${key.routeId}|${key.version}|${key.inputHash}|${artifactHashStr}`; +} + +export interface CacheStore { + get: (key: CacheKey) => Promise; + set: (entry: CacheEntry) => Promise; + has: (key: CacheKey) => Promise; + delete: (key: CacheKey) => Promise; + clear: () => Promise; + stats?: () => Promise; +} + +export interface CacheStats { + entries: number; + sizeBytes?: number; + hits?: number; + misses?: number; +} + +export interface CacheOptions { + enabled?: boolean; + hashFn?: (content: string) => string; +} + +export function createMemoryCacheStore(): CacheStore { + const cache = new Map(); + let hits = 0; + let misses = 0; + + return { + async get(key: CacheKey): Promise { + const serialized = serializeCacheKey(key); + const entry = cache.get(serialized); + if (entry) { + hits++; + } else { + misses++; + } + return entry; + }, + + async set(entry: CacheEntry): Promise { + const serialized = serializeCacheKey(entry.key); + cache.set(serialized, entry); + }, + + async has(key: CacheKey): Promise { + const serialized = serializeCacheKey(key); + return cache.has(serialized); + }, + + async delete(key: CacheKey): Promise { + const serialized = serializeCacheKey(key); + return cache.delete(serialized); + }, + + async clear(): Promise { + cache.clear(); + hits = 0; + misses = 0; + }, + + async stats(): Promise { + return { + entries: cache.size, + hits, + misses, + }; + }, + }; +} + +export function defaultHashFn(content: string): string { + let hash = 5381; + for (let i = 0; i < content.length; i++) { + hash = ((hash << 5) + hash) ^ content.charCodeAt(i); + } + return (hash >>> 0).toString(16).padStart(8, "0"); +} + +export function hashArtifact(value: unknown): string { + if (value === null || value === undefined) { + return "null"; + } + + if (typeof value === "string") { + return defaultHashFn(value); + } + + if (value instanceof Map) { + const entries = Array.from(value.entries()) + .sort(([a], [b]) => String(a).localeCompare(String(b))) + .map(([k, v]) => `${String(k)}=${String(v)}`) + .join(";"); + return defaultHashFn(entries); + } + + if (value instanceof Set) { + const entries = Array.from(value) + .map(String) + .sort() + .join(";"); + return defaultHashFn(entries); + } + + if (Array.isArray(value)) { + return defaultHashFn(JSON.stringify(value)); + } + + if (typeof value === "object") { + return defaultHashFn(JSON.stringify(value)); + } + + return defaultHashFn(String(value)); +} diff --git a/packages/pipelines/pipeline-executor/src/executor.ts b/packages/pipelines/pipeline-executor/src/executor.ts new file mode 100644 index 000000000..5da7d938f --- /dev/null +++ b/packages/pipelines/pipeline-executor/src/executor.ts @@ -0,0 +1,925 @@ +import type { ArtifactDefinition, PipelineArtifactDefinition } from "@ucdjs/pipelines-artifacts"; +import type { + FileContext, + ParseContext, + ParsedRow, + PipelineDefinition, + PipelineEvent, + PipelineEventInput, + PipelineFilter, + PipelineGraphEdge, + PipelineGraphNode, + PipelineRouteDefinition, + RouteResolveContext, + SourceBackend, + SourceFileContext, +} from "@ucdjs/pipelines-core"; +import type { CacheEntry, CacheKey, CacheStore } from "./cache"; +import type { MultiplePipelineRunResult, PipelineRunResult, PipelineSummary } from "./results"; +import { isGlobalArtifact } from "@ucdjs/pipelines-artifacts"; +import { applyTransforms, getExecutionLayers, resolveMultipleSourceFiles } from "@ucdjs/pipelines-core"; +import { defaultHashFn, hashArtifact } from "./cache"; + +interface SourceAdapter { + listFiles: (version: string) => Promise; + readFile: (file: FileContext) => Promise; +} + +export interface PipelineExecutorOptions { + artifacts?: PipelineArtifactDefinition[]; + cacheStore?: CacheStore; + onEvent?: (event: PipelineEvent) => void | Promise; +} + +export interface PipelineExecutorRunOptions { + cache?: boolean; + versions?: string[]; +} + +export interface PipelineExecutor { + run: (pipelines: PipelineDefinition[], options?: PipelineExecutorRunOptions) => Promise; +} + +export function createPipelineExecutor(options: PipelineExecutorOptions): PipelineExecutor { + const { + artifacts: globalArtifacts = [], + cacheStore, + onEvent, + } = options; + + let eventCounter = 0; + function generateEventId(): string { + return `evt_${Date.now()}_${++eventCounter}`; + } + + async function emit(event: PipelineEventInput): Promise { + await onEvent?.({ + ...event, + id: event.id ?? generateEventId(), + }); + } + + async function runSinglePipeline( + pipeline: PipelineDefinition, + runOptions: Omit = {}, + ): Promise { + const { cache: enableCache = true, versions: runVersions } = runOptions; + const useCache = enableCache && cacheStore != null; + const versionsToRun = runVersions ?? pipeline.versions; + + const effectiveSource = createSourceAdapter(pipeline); + + const startTime = performance.now(); + const graphNodes: PipelineGraphNode[] = []; + const graphEdges: PipelineGraphEdge[] = []; + const allOutputs: unknown[] = []; + const errors: PipelineRunResult["errors"] = []; + + let totalFiles = 0; + let matchedFiles = 0; + let skippedFiles = 0; + let fallbackFiles = 0; + + const dag = pipeline.dag; + + await emit({ id: generateEventId(), type: "pipeline:start", versions: versionsToRun, timestamp: performance.now() }); + + for (const version of versionsToRun) { + const versionStartTime = performance.now(); + await emit({ id: generateEventId(), type: "version:start", version, timestamp: performance.now() }); + + const sourceNodeId = `source:${version}`; + graphNodes.push({ id: sourceNodeId, type: "source", version }); + + const artifactsMap: Record = {}; + const globalArtifactsMap: Record = {}; + + for (const artifactDef of globalArtifacts) { + const artifactStartTime = performance.now(); + await emit({ + id: generateEventId(), + type: "artifact:start", + artifactId: artifactDef.id, + version, + timestamp: performance.now(), + }); + + const artifactNodeId = `artifact:${version}:${artifactDef.id}`; + graphNodes.push({ id: artifactNodeId, type: "artifact", artifactId: artifactDef.id }); + graphEdges.push({ from: sourceNodeId, to: artifactNodeId, type: "provides" }); + + try { + let rows: AsyncIterable | undefined; + + if (artifactDef.filter && artifactDef.parser) { + const files = await effectiveSource.listFiles(version); + for (const file of files) { + if (artifactDef.filter({ file })) { + const parseCtx = createParseContext(file, effectiveSource); + rows = artifactDef.parser(parseCtx); + break; + } + } + } + + const value = await artifactDef.build({ version }, rows); + artifactsMap[artifactDef.id] = value; + } catch (err) { + const pipelineError = { + scope: "artifact" as const, + message: err instanceof Error ? err.message : String(err), + error: err, + artifactId: artifactDef.id, + version, + }; + errors.push(pipelineError); + await emit({ + id: generateEventId(), + type: "error", + error: pipelineError, + timestamp: performance.now(), + }); + } + + await emit({ + id: generateEventId(), + type: "artifact:end", + artifactId: artifactDef.id, + version, + durationMs: performance.now() - artifactStartTime, + timestamp: performance.now(), + }); + } + + const files = await effectiveSource.listFiles(version); + totalFiles += files.length; + + const filesToProcess = pipeline.include + ? files.filter((file) => pipeline.include!({ file })) + : files; + + const executionLayers = getExecutionLayers(dag); + + for (const layer of executionLayers) { + const processingQueue = createProcessingQueue(pipeline.concurrency); + const layerRoutes = pipeline.routes.filter((r) => layer.includes(r.id)); + + for (const route of layerRoutes) { + const matchingFiles = filesToProcess.filter((file) => { + const sourceFile = file as SourceFileContext; + const filterCtx = { + file, + source: sourceFile.source, + }; + return route.filter(filterCtx); + }); + + for (const file of matchingFiles) { + await processingQueue.add(async () => { + const fileNodeId = `file:${version}:${file.path}`; + if (!graphNodes.some((n) => n.id === fileNodeId)) { + graphNodes.push({ id: fileNodeId, type: "file", file }); + graphEdges.push({ from: sourceNodeId, to: fileNodeId, type: "provides" }); + } + + matchedFiles++; + const routeNodeId = `route:${version}:${route.id}`; + + if (!graphNodes.some((n) => n.id === routeNodeId)) { + graphNodes.push({ id: routeNodeId, type: "route", routeId: route.id }); + } + + graphEdges.push({ from: fileNodeId, to: routeNodeId, type: "matched" }); + + await emit({ + type: "file:matched", + file, + routeId: route.id, + timestamp: performance.now(), + }); + + try { + const routeCacheEnabled = useCache && route.cache !== false; + let result: ProcessRouteResult | null = null; + let cacheHit = false; + + if (routeCacheEnabled && cacheStore) { + const fileContent = await effectiveSource.readFile(file); + const inputHash = defaultHashFn(fileContent); + + const partialKey: CacheKey = { + routeId: route.id, + version, + inputHash, + artifactHashes: {}, + }; + + const cachedEntry = await cacheStore.get(partialKey); + + if (cachedEntry) { + const currentArtifactHashes: Record = {}; + for (const id of Object.keys(cachedEntry.key.artifactHashes)) { + const combinedMap = { ...artifactsMap, ...globalArtifactsMap }; + if (id in combinedMap) { + currentArtifactHashes[id] = hashArtifact(combinedMap[id]); + } + } + + const artifactHashesMatch = Object.keys(cachedEntry.key.artifactHashes).every( + (id) => currentArtifactHashes[id] === cachedEntry.key.artifactHashes[id], + ); + + if (artifactHashesMatch) { + cacheHit = true; + result = { + outputs: cachedEntry.output, + emittedArtifacts: cachedEntry.producedArtifacts, + consumedArtifactIds: Object.keys(cachedEntry.key.artifactHashes), + }; + + await emit({ + type: "cache:hit", + routeId: route.id, + file, + version, + timestamp: performance.now(), + }); + } + } + + if (!cacheHit) { + await emit({ + type: "cache:miss", + routeId: route.id, + file, + version, + timestamp: performance.now(), + }); + } + } + + if (!result) { + result = await processRoute( + file, + route, + { ...artifactsMap, ...globalArtifactsMap }, + effectiveSource, + version, + emit, + ); + + if (routeCacheEnabled && cacheStore) { + const fileContent = await effectiveSource.readFile(file); + const combinedMap = { ...artifactsMap, ...globalArtifactsMap }; + const cacheKey = await buildCacheKey( + route.id, + version, + fileContent, + combinedMap, + result.consumedArtifactIds, + ); + + const cacheEntry: CacheEntry = { + key: cacheKey, + output: result.outputs, + producedArtifacts: result.emittedArtifacts, + createdAt: new Date().toISOString(), + }; + + await cacheStore.set(cacheEntry); + + await emit({ + type: "cache:store", + routeId: route.id, + file, + version, + timestamp: performance.now(), + }); + } + } + + for (const [artifactName, artifactValue] of Object.entries(result.emittedArtifacts)) { + const prefixedKey = `${route.id}:${artifactName}`; + const artifactDef = route.emits?.[artifactName]; + + if (artifactDef && isGlobalArtifact(artifactDef)) { + globalArtifactsMap[prefixedKey] = artifactValue; + } else { + artifactsMap[prefixedKey] = artifactValue; + } + } + + for (const output of result.outputs) { + const outputIndex = allOutputs.length; + allOutputs.push(output); + + const outputNodeId = `output:${version}:${outputIndex}`; + graphNodes.push({ + id: outputNodeId, + type: "output", + outputIndex, + property: (output as { property?: string }).property, + }); + graphEdges.push({ from: routeNodeId, to: outputNodeId, type: "resolved" }); + } + } catch (err) { + const pipelineError = { + scope: "route" as const, + message: err instanceof Error ? err.message : String(err), + error: err, + file, + routeId: route.id, + version, + }; + errors.push(pipelineError); + await emit({ + type: "error", + error: pipelineError, + timestamp: performance.now(), + }); + } + }); + } + } + + await processingQueue.drain(); + } + + const processedFiles = new Set(); + for (const route of pipeline.routes) { + for (const file of filesToProcess) { + const sourceFile = file as SourceFileContext; + const filterCtx = { file, source: sourceFile.source }; + if (route.filter(filterCtx)) { + processedFiles.add(file.path); + } + } + } + + for (const file of filesToProcess) { + if (processedFiles.has(file.path)) continue; + + if (pipeline.fallback) { + const fallback = pipeline.fallback as FallbackRouteDefinition; + const shouldUseFallback = !fallback.filter || fallback.filter({ file }); + + if (shouldUseFallback) { + fallbackFiles++; + + const fileNodeId = `file:${version}:${file.path}`; + if (!graphNodes.some((n) => n.id === fileNodeId)) { + graphNodes.push({ id: fileNodeId, type: "file", file }); + graphEdges.push({ from: sourceNodeId, to: fileNodeId, type: "provides" }); + } + + await emit({ + type: "file:fallback", + file, + timestamp: performance.now(), + }); + + try { + const outputs = await processFallback( + file, + fallback, + { ...artifactsMap, ...globalArtifactsMap }, + effectiveSource, + version, + emit, + ); + + for (const output of outputs) { + const outputIndex = allOutputs.length; + allOutputs.push(output); + + const outputNodeId = `output:${version}:${outputIndex}`; + graphNodes.push({ + id: outputNodeId, + type: "output", + outputIndex, + property: (output as { property?: string }).property, + }); + graphEdges.push({ from: fileNodeId, to: outputNodeId, type: "resolved" }); + } + } catch (err) { + const pipelineError = { + scope: "file" as const, + message: err instanceof Error ? err.message : String(err), + error: err, + file, + version, + }; + errors.push(pipelineError); + await emit({ + type: "error", + error: pipelineError, + timestamp: performance.now(), + }); + } + } else { + skippedFiles++; + await emit({ + type: "file:skipped", + file, + reason: "filtered", + timestamp: performance.now(), + }); + } + } else { + skippedFiles++; + + if (pipeline.strict) { + const pipelineError = { + scope: "file" as const, + message: `No matching route for file: ${file.path}`, + file, + version, + }; + errors.push(pipelineError); + await emit({ + type: "error", + error: pipelineError, + timestamp: performance.now(), + }); + } else { + await emit({ + type: "file:skipped", + file, + reason: "no-match", + timestamp: performance.now(), + }); + } + } + } + + await emit({ + type: "version:end", + version, + durationMs: performance.now() - versionStartTime, + timestamp: performance.now(), + }); + } + + const durationMs = performance.now() - startTime; + + await emit({ + type: "pipeline:end", + durationMs, + timestamp: performance.now(), + }); + + const summary: PipelineSummary = { + versions: versionsToRun, + totalFiles, + matchedFiles, + skippedFiles, + fallbackFiles, + totalOutputs: allOutputs.length, + durationMs, + }; + + return { + data: allOutputs, + graph: { nodes: graphNodes, edges: graphEdges }, + errors, + summary, + }; + } + + async function run(pipelinesToRun: PipelineDefinition[], runOptions: PipelineExecutorRunOptions = {}): Promise { + const startTime = performance.now(); + + const results = new Map(); + let successfulPipelines = 0; + let failedPipelines = 0; + + for (const pipeline of pipelinesToRun) { + try { + const result = await runSinglePipeline(pipeline, runOptions); + results.set(pipeline.id, result); + if (result.errors.length === 0) { + successfulPipelines++; + } else { + failedPipelines++; + } + } catch (err) { + failedPipelines++; + results.set(pipeline.id, { + data: [], + graph: { nodes: [], edges: [] }, + errors: [{ + scope: "pipeline", + message: err instanceof Error ? err.message : String(err), + error: err, + }], + summary: { + versions: pipeline.versions, + totalFiles: 0, + matchedFiles: 0, + skippedFiles: 0, + fallbackFiles: 0, + totalOutputs: 0, + durationMs: 0, + }, + }); + } + } + + return { + results, + summary: { + totalPipelines: pipelinesToRun.length, + successfulPipelines, + failedPipelines, + durationMs: performance.now() - startTime, + }, + }; + } + + return { + run, + }; +} + +function createSourceAdapter(pipeline: PipelineDefinition): SourceAdapter { + if (pipeline.inputs.length === 0) { + throw new Error("Pipeline requires at least one input source"); + } + + const backends = new Map(); + for (const input of pipeline.inputs) { + backends.set(input.id, input.backend); + } + + return { + listFiles: async (version: string) => { + return resolveMultipleSourceFiles(pipeline.inputs as any, version); + }, + readFile: async (file: FileContext) => { + const sourceFile = file as SourceFileContext; + if (sourceFile.source) { + const backend = backends.get(sourceFile.source.id); + if (backend) { + return backend.readFile(file); + } + } + const firstBackend = backends.values().next().value; + if (firstBackend) { + return firstBackend.readFile(file); + } + throw new Error(`No backend found for file: ${file.path}`); + }, + }; +} + +function createParseContext(file: FileContext, source: SourceAdapter): ParseContext { + let cachedContent: string | null = null; + + return { + file, + readContent: async () => { + if (cachedContent === null) { + cachedContent = await source.readFile(file); + } + return cachedContent!; + }, + async* readLines() { + const content = await source.readFile(file); + const lines = content.split(/\r?\n/); + for (const line of lines) { + yield line; + } + }, + isComment: (line: string) => line.startsWith("#") || line.trim() === "", + }; +} + +interface ResolveContextOptions { + version: string; + file: FileContext; + routeId: string; + artifactsMap: Record; + emittedArtifacts: Record; + emitsDefinition?: Record; + onArtifactEmit?: (id: string, value: unknown) => void; + onArtifactGet?: (id: string) => void; +} + +function createRouteResolveContext( + options: ResolveContextOptions, +): RouteResolveContext> { + const { version, file, routeId, artifactsMap, emittedArtifacts, emitsDefinition, onArtifactEmit, onArtifactGet } = options; + + return { + version, + file, + getArtifact: (key: K): unknown => { + if (!(key in artifactsMap)) { + throw new Error(`Artifact "${key}" not found. Make sure a route that produces this artifact runs before route "${routeId}".`); + } + onArtifactGet?.(key); + return artifactsMap[key]; + }, + emitArtifact: (id: K, value: unknown): void => { + if (emitsDefinition) { + const def = emitsDefinition[id]; + if (def) { + const result = def.schema.safeParse(value); + if (!result.success) { + throw new Error(`Artifact "${id}" validation failed: ${result.error.message}`); + } + } + } + emittedArtifacts[id] = value; + onArtifactEmit?.(id, value); + }, + normalizeEntries: (entries) => { + return entries.sort((a, b) => { + const aStart = a.range?.split("..")[0] ?? a.codePoint ?? ""; + const bStart = b.range?.split("..")[0] ?? b.codePoint ?? ""; + return aStart.localeCompare(bStart); + }); + }, + now: () => new Date().toISOString(), + }; +} + +interface ProcessRouteResult { + outputs: unknown[]; + emittedArtifacts: Record; + consumedArtifactIds: string[]; +} + +async function processRoute( + file: FileContext, + route: PipelineRouteDefinition, + artifactsMap: Record, + source: SourceAdapter, + version: string, + emit: (event: any) => Promise, +): Promise { + const parseStartTime = performance.now(); + await emit({ + type: "parse:start", + file, + routeId: route.id, + timestamp: performance.now(), + }); + + const parseCtx = createParseContext(file, source); + let rows: AsyncIterable = route.parser(parseCtx); + + const collectedRows: ParsedRow[] = []; + const filteredRows = filterRows(rows as AsyncIterable, file, route.filter, collectedRows); + + if (route.transforms && route.transforms.length > 0) { + rows = applyTransforms( + { version, file }, + filteredRows, + route.transforms, + ); + } else { + rows = filteredRows; + } + + await emit({ + type: "parse:end", + file, + routeId: route.id, + rowCount: collectedRows.length, + durationMs: performance.now() - parseStartTime, + timestamp: performance.now(), + }); + + const resolveStartTime = performance.now(); + await emit({ + type: "resolve:start", + file, + routeId: route.id, + timestamp: performance.now(), + }); + + const emittedArtifacts: Record = {}; + const consumedArtifactIds: string[] = []; + + const resolveCtx = createRouteResolveContext({ + version, + file, + routeId: route.id, + artifactsMap, + emittedArtifacts, + emitsDefinition: route.emits, + onArtifactEmit: async (id) => { + await emit({ + type: "artifact:produced", + artifactId: `${route.id}:${id}`, + routeId: route.id, + version, + timestamp: performance.now(), + }); + }, + onArtifactGet: async (id) => { + if (!consumedArtifactIds.includes(id)) { + consumedArtifactIds.push(id); + await emit({ + type: "artifact:consumed", + artifactId: id, + routeId: route.id, + version, + timestamp: performance.now(), + }); + } + }, + }); + + const outputs = await route.resolver(resolveCtx, rows); + + const outputArray = Array.isArray(outputs) ? outputs : [outputs]; + + await emit({ + type: "resolve:end", + file, + routeId: route.id, + outputCount: outputArray.length, + durationMs: performance.now() - resolveStartTime, + timestamp: performance.now(), + }); + + return { outputs: outputArray, emittedArtifacts, consumedArtifactIds }; +} + +interface FallbackRouteDefinition = Record, TOutput = unknown> { + filter?: PipelineFilter; + parser: (ctx: ParseContext) => AsyncIterable; + resolver: (ctx: { version: string; file: FileContext; getArtifact: (id: K) => TArtifacts[K]; emitArtifact: (id: K, value: V) => void; normalizeEntries: (entries: any[]) => any[]; now: () => string }, rows: AsyncIterable) => Promise; +} + +async function processFallback( + file: FileContext, + fallback: FallbackRouteDefinition, + artifactsMap: Record, + source: SourceAdapter, + version: string, + emit: (event: any) => Promise, +): Promise { + const parseStartTime = performance.now(); + await emit({ + type: "parse:start", + file, + routeId: "__fallback__", + timestamp: performance.now(), + }); + + const parseCtx = createParseContext(file, source); + const rows = fallback.parser(parseCtx); + + const collectedRows: ParsedRow[] = []; + const filteredRows = filterRows(rows, file, fallback.filter, collectedRows); + + await emit({ + type: "parse:end", + file, + routeId: "__fallback__", + rowCount: collectedRows.length, + durationMs: performance.now() - parseStartTime, + timestamp: performance.now(), + }); + + const resolveStartTime = performance.now(); + await emit({ + type: "resolve:start", + file, + routeId: "__fallback__", + timestamp: performance.now(), + }); + + const emittedArtifacts: Record = {}; + + const resolveCtx = { + version, + file, + getArtifact: (id: K): unknown => { + if (!(id in artifactsMap)) { + throw new Error(`Artifact "${String(id)}" not found.`); + } + return artifactsMap[id]; + }, + emitArtifact: (id: K, value: V): void => { + emittedArtifacts[id] = value; + }, + normalizeEntries: (entries: any[]) => { + return entries.sort((a: any, b: any) => { + const aStart = a.range?.split("..")[0] ?? a.codePoint ?? ""; + const bStart = b.range?.split("..")[0] ?? b.codePoint ?? ""; + return aStart.localeCompare(bStart); + }); + }, + now: () => new Date().toISOString(), + }; + const outputs = await fallback.resolver(resolveCtx, filteredRows); + + const outputArray = Array.isArray(outputs) ? outputs : [outputs]; + + await emit({ + type: "resolve:end", + file, + routeId: "__fallback__", + outputCount: outputArray.length, + durationMs: performance.now() - resolveStartTime, + timestamp: performance.now(), + }); + + return outputArray; +} + +async function* filterRows( + rows: AsyncIterable, + file: FileContext, + filter: PipelineFilter | undefined, + collector: ParsedRow[], +): AsyncIterable { + for await (const row of rows) { + collector.push(row); + + if (!filter) { + yield row; + continue; + } + + const shouldInclude = filter({ + file, + row: { property: row.property }, + }); + + if (shouldInclude) { + yield row; + } + } +} + +async function buildCacheKey( + routeId: string, + version: string, + fileContent: string, + artifactsMap: Record, + consumedArtifactIds: string[], +): Promise { + const artifactHashes: Record = {}; + for (const id of consumedArtifactIds) { + if (id in artifactsMap) { + artifactHashes[id] = hashArtifact(artifactsMap[id]); + } + } + + return { + routeId, + version, + inputHash: defaultHashFn(fileContent), + artifactHashes, + }; +} + +interface ProcessingQueue { + add: (task: () => Promise) => Promise; + drain: () => Promise; +} + +function createProcessingQueue(concurrency: number): ProcessingQueue { + const queue: (() => Promise)[] = []; + let running = 0; + let resolveIdle: (() => void) | null = null; + + async function runNext(): Promise { + if (running >= concurrency || queue.length === 0) { + if (running === 0 && queue.length === 0 && resolveIdle) { + resolveIdle(); + } + return; + } + + running++; + const task = queue.shift()!; + + try { + await task(); + } finally { + running--; + runNext(); + } + } + + return { + add: async (task) => { + queue.push(task); + runNext(); + }, + drain: () => { + if (running === 0 && queue.length === 0) { + return Promise.resolve(); + } + return new Promise((resolve) => { + resolveIdle = resolve; + }); + }, + }; +} diff --git a/packages/pipelines/pipeline-executor/src/index.ts b/packages/pipelines/pipeline-executor/src/index.ts new file mode 100644 index 000000000..df4193bd5 --- /dev/null +++ b/packages/pipelines/pipeline-executor/src/index.ts @@ -0,0 +1,28 @@ +export type { + CacheEntry, + CacheKey, + CacheOptions, + CacheStats, + CacheStore, +} from "./cache"; + +export { + createMemoryCacheStore, + defaultHashFn, + hashArtifact, + serializeCacheKey, +} from "./cache"; + +export type { + PipelineExecutor, + PipelineExecutorOptions, + PipelineExecutorRunOptions, +} from "./executor"; + +export { createPipelineExecutor } from "./executor"; + +export type { + MultiplePipelineRunResult, + PipelineRunResult, + PipelineSummary, +} from "./results"; diff --git a/packages/pipelines/pipeline-executor/src/results.ts b/packages/pipelines/pipeline-executor/src/results.ts new file mode 100644 index 000000000..eb4ac6325 --- /dev/null +++ b/packages/pipelines/pipeline-executor/src/results.ts @@ -0,0 +1,28 @@ +import type { PipelineError, PipelineGraph } from "@ucdjs/pipelines-core"; + +export interface PipelineSummary { + versions: string[]; + totalFiles: number; + matchedFiles: number; + skippedFiles: number; + fallbackFiles: number; + totalOutputs: number; + durationMs: number; +} + +export interface PipelineRunResult { + data: TData[]; + graph: PipelineGraph; + errors: PipelineError[]; + summary: PipelineSummary; +} + +export interface MultiplePipelineRunResult { + results: Map>; + summary: { + totalPipelines: number; + successfulPipelines: number; + failedPipelines: number; + durationMs: number; + }; +} diff --git a/packages/pipelines/pipeline-executor/test/cache.test.ts b/packages/pipelines/pipeline-executor/test/cache.test.ts new file mode 100644 index 000000000..a8fca00bb --- /dev/null +++ b/packages/pipelines/pipeline-executor/test/cache.test.ts @@ -0,0 +1,484 @@ +import type { CacheEntry, CacheKey, CacheStore } from "../src/cache"; +import { beforeEach, describe, expect, it } from "vitest"; +import { + + createMemoryCacheStore, + defaultHashFn, + hashArtifact, + serializeCacheKey, +} from "../src/cache"; + +describe("serializeCacheKey", () => { + it("should serialize a simple cache key", () => { + const key: CacheKey = { + routeId: "my-route", + version: "16.0.0", + inputHash: "abc123", + artifactHashes: {}, + }; + + const serialized = serializeCacheKey(key); + + expect(serialized).toBe("my-route|16.0.0|abc123|"); + }); + + it("should serialize cache key with artifact hashes", () => { + const key: CacheKey = { + routeId: "my-route", + version: "16.0.0", + inputHash: "abc123", + artifactHashes: { + "route1:artifact1": "hash1", + "route2:artifact2": "hash2", + }, + }; + + const serialized = serializeCacheKey(key); + + expect(serialized).toContain("my-route|16.0.0|abc123|"); + expect(serialized).toContain("route1:artifact1:hash1"); + expect(serialized).toContain("route2:artifact2:hash2"); + }); + + it("should sort artifact hashes alphabetically", () => { + const key: CacheKey = { + routeId: "route", + version: "16.0.0", + inputHash: "hash", + artifactHashes: { + zebra: "z", + alpha: "a", + beta: "b", + }, + }; + + const serialized = serializeCacheKey(key); + + expect(serialized).toBe("route|16.0.0|hash|alpha:a,beta:b,zebra:z"); + }); + + it("should produce same result for same key", () => { + const key: CacheKey = { + routeId: "route", + version: "16.0.0", + inputHash: "hash", + artifactHashes: { a: "1", b: "2" }, + }; + + expect(serializeCacheKey(key)).toBe(serializeCacheKey(key)); + }); + + it("should produce different results for different keys", () => { + const key1: CacheKey = { + routeId: "route1", + version: "16.0.0", + inputHash: "hash", + artifactHashes: {}, + }; + const key2: CacheKey = { + routeId: "route2", + version: "16.0.0", + inputHash: "hash", + artifactHashes: {}, + }; + + expect(serializeCacheKey(key1)).not.toBe(serializeCacheKey(key2)); + }); +}); + +describe("defaultHashFn", () => { + it("should hash a string", () => { + const hash = defaultHashFn("hello world"); + + expect(typeof hash).toBe("string"); + expect(hash.length).toBe(8); + }); + + it("should produce same hash for same input", () => { + const input = "test string"; + + expect(defaultHashFn(input)).toBe(defaultHashFn(input)); + }); + + it("should produce different hashes for different inputs", () => { + expect(defaultHashFn("hello")).not.toBe(defaultHashFn("world")); + }); + + it("should handle empty string", () => { + const hash = defaultHashFn(""); + + expect(typeof hash).toBe("string"); + expect(hash.length).toBe(8); + }); + + it("should handle long strings", () => { + const longString = "a".repeat(10000); + const hash = defaultHashFn(longString); + + expect(typeof hash).toBe("string"); + expect(hash.length).toBe(8); + }); + + it("should handle unicode strings", () => { + const hash = defaultHashFn("こんにちは世界"); + + expect(typeof hash).toBe("string"); + expect(hash.length).toBe(8); + }); +}); + +describe("hashArtifact", () => { + it("should hash null as 'null'", () => { + expect(hashArtifact(null)).toBe("null"); + }); + + it("should hash undefined as 'null'", () => { + expect(hashArtifact(undefined)).toBe("null"); + }); + + it("should hash strings using defaultHashFn", () => { + const value = "test string"; + expect(hashArtifact(value)).toBe(defaultHashFn(value)); + }); + + it("should hash numbers", () => { + const hash = hashArtifact(42); + + expect(typeof hash).toBe("string"); + expect(hash.length).toBe(8); + }); + + it("should hash booleans", () => { + const trueHash = hashArtifact(true); + const falseHash = hashArtifact(false); + + expect(trueHash).not.toBe(falseHash); + }); + + it("should hash arrays", () => { + const hash = hashArtifact([1, 2, 3]); + + expect(typeof hash).toBe("string"); + expect(hash.length).toBe(8); + }); + + it("should hash objects", () => { + const hash = hashArtifact({ a: 1, b: 2 }); + + expect(typeof hash).toBe("string"); + expect(hash.length).toBe(8); + }); + + it("should hash Maps", () => { + const map = new Map([ + ["key1", "value1"], + ["key2", "value2"], + ]); + const hash = hashArtifact(map); + + expect(typeof hash).toBe("string"); + expect(hash.length).toBe(8); + }); + + it("should hash Sets", () => { + const set = new Set(["a", "b", "c"]); + const hash = hashArtifact(set); + + expect(typeof hash).toBe("string"); + expect(hash.length).toBe(8); + }); + + it("should produce same hash for equivalent objects", () => { + const obj1 = { a: 1, b: 2 }; + const obj2 = { a: 1, b: 2 }; + + expect(hashArtifact(obj1)).toBe(hashArtifact(obj2)); + }); + + it("should produce different hashes for different objects", () => { + const obj1 = { a: 1 }; + const obj2 = { a: 2 }; + + expect(hashArtifact(obj1)).not.toBe(hashArtifact(obj2)); + }); + + it("should sort Map entries for consistent hashing", () => { + const map1 = new Map([ + ["b", "2"], + ["a", "1"], + ]); + const map2 = new Map([ + ["a", "1"], + ["b", "2"], + ]); + + expect(hashArtifact(map1)).toBe(hashArtifact(map2)); + }); + + it("should sort Set entries for consistent hashing", () => { + const set1 = new Set(["c", "a", "b"]); + const set2 = new Set(["a", "b", "c"]); + + expect(hashArtifact(set1)).toBe(hashArtifact(set2)); + }); +}); + +describe("createMemoryCacheStore", () => { + let store: CacheStore; + + beforeEach(() => { + store = createMemoryCacheStore(); + }); + + function createCacheKey(overrides: Partial = {}): CacheKey { + return { + routeId: "test-route", + version: "16.0.0", + inputHash: "testhash", + artifactHashes: {}, + ...overrides, + }; + } + + function createCacheEntry(key: CacheKey, output: unknown[] = []): CacheEntry { + return { + key, + output, + producedArtifacts: {}, + createdAt: new Date().toISOString(), + }; + } + + describe("get", () => { + it("should return undefined for non-existent key", async () => { + const key = createCacheKey(); + const result = await store.get(key); + + expect(result).toBeUndefined(); + }); + + it("should return entry for existing key", async () => { + const key = createCacheKey(); + const entry = createCacheEntry(key, ["output"]); + + await store.set(entry); + const result = await store.get(key); + + expect(result).toEqual(entry); + }); + }); + + describe("set", () => { + it("should store an entry", async () => { + const key = createCacheKey(); + const entry = createCacheEntry(key, ["data"]); + + await store.set(entry); + const result = await store.get(key); + + expect(result).toEqual(entry); + }); + + it("should overwrite existing entry", async () => { + const key = createCacheKey(); + const entry1 = createCacheEntry(key, ["first"]); + const entry2 = createCacheEntry(key, ["second"]); + + await store.set(entry1); + await store.set(entry2); + const result = await store.get(key); + + expect(result?.output).toEqual(["second"]); + }); + + it("should store entries with different keys separately", async () => { + const key1 = createCacheKey({ routeId: "route1" }); + const key2 = createCacheKey({ routeId: "route2" }); + const entry1 = createCacheEntry(key1, ["data1"]); + const entry2 = createCacheEntry(key2, ["data2"]); + + await store.set(entry1); + await store.set(entry2); + + expect((await store.get(key1))?.output).toEqual(["data1"]); + expect((await store.get(key2))?.output).toEqual(["data2"]); + }); + }); + + describe("has", () => { + it("should return false for non-existent key", async () => { + const key = createCacheKey(); + + expect(await store.has(key)).toBe(false); + }); + + it("should return true for existing key", async () => { + const key = createCacheKey(); + const entry = createCacheEntry(key); + + await store.set(entry); + + expect(await store.has(key)).toBe(true); + }); + }); + + describe("delete", () => { + it("should return false for non-existent key", async () => { + const key = createCacheKey(); + + expect(await store.delete(key)).toBe(false); + }); + + it("should return true and remove existing key", async () => { + const key = createCacheKey(); + const entry = createCacheEntry(key); + + await store.set(entry); + const deleted = await store.delete(key); + + expect(deleted).toBe(true); + expect(await store.has(key)).toBe(false); + }); + }); + + describe("clear", () => { + it("should remove all entries", async () => { + const key1 = createCacheKey({ routeId: "route1" }); + const key2 = createCacheKey({ routeId: "route2" }); + + await store.set(createCacheEntry(key1)); + await store.set(createCacheEntry(key2)); + await store.clear(); + + expect(await store.has(key1)).toBe(false); + expect(await store.has(key2)).toBe(false); + }); + + it("should reset stats", async () => { + const key = createCacheKey(); + await store.set(createCacheEntry(key)); + await store.get(key); + await store.get(createCacheKey({ routeId: "nonexistent" })); + + await store.clear(); + + const stats = await store.stats?.(); + expect(stats?.entries).toBe(0); + expect(stats?.hits).toBe(0); + expect(stats?.misses).toBe(0); + }); + }); + + describe("stats", () => { + it("should track entry count", async () => { + const key1 = createCacheKey({ routeId: "route1" }); + const key2 = createCacheKey({ routeId: "route2" }); + + await store.set(createCacheEntry(key1)); + await store.set(createCacheEntry(key2)); + + const stats = await store.stats?.(); + expect(stats?.entries).toBe(2); + }); + + it("should track cache hits", async () => { + const key = createCacheKey(); + await store.set(createCacheEntry(key)); + + await store.get(key); + await store.get(key); + + const stats = await store.stats?.(); + expect(stats?.hits).toBe(2); + }); + + it("should track cache misses", async () => { + await store.get(createCacheKey({ routeId: "missing1" })); + await store.get(createCacheKey({ routeId: "missing2" })); + + const stats = await store.stats?.(); + expect(stats?.misses).toBe(2); + }); + + it("should track hits and misses together", async () => { + const existingKey = createCacheKey({ routeId: "existing" }); + await store.set(createCacheEntry(existingKey)); + + await store.get(existingKey); + await store.get(createCacheKey({ routeId: "missing" })); + await store.get(existingKey); + + const stats = await store.stats?.(); + expect(stats?.hits).toBe(2); + expect(stats?.misses).toBe(1); + }); + }); +}); + +describe("cache key matching", () => { + it("should match keys with same artifact hashes", async () => { + const store = createMemoryCacheStore(); + + const key: CacheKey = { + routeId: "route", + version: "16.0.0", + inputHash: "input", + artifactHashes: { + dep1: "hash1", + dep2: "hash2", + }, + }; + + await store.set({ + key, + output: ["result"], + producedArtifacts: {}, + createdAt: new Date().toISOString(), + }); + + const sameKey: CacheKey = { + routeId: "route", + version: "16.0.0", + inputHash: "input", + artifactHashes: { + dep1: "hash1", + dep2: "hash2", + }, + }; + + const result = await store.get(sameKey); + expect(result?.output).toEqual(["result"]); + }); + + it("should not match keys with different artifact hashes", async () => { + const store = createMemoryCacheStore(); + + const key: CacheKey = { + routeId: "route", + version: "16.0.0", + inputHash: "input", + artifactHashes: { + dep1: "hash1", + }, + }; + + await store.set({ + key, + output: ["result"], + producedArtifacts: {}, + createdAt: new Date().toISOString(), + }); + + const differentKey: CacheKey = { + routeId: "route", + version: "16.0.0", + inputHash: "input", + artifactHashes: { + dep1: "different-hash", + }, + }; + + const result = await store.get(differentKey); + expect(result).toBeUndefined(); + }); +}); diff --git a/packages/pipelines/pipeline-executor/test/executor.test.ts b/packages/pipelines/pipeline-executor/test/executor.test.ts new file mode 100644 index 000000000..4959cb78c --- /dev/null +++ b/packages/pipelines/pipeline-executor/test/executor.test.ts @@ -0,0 +1,929 @@ +import type { + FileContext, + ParseContext, + ParsedRow, + PipelineEvent, + PipelineRouteDefinition, + PipelineSourceDefinition, + SourceBackend, +} from "@ucdjs/pipelines-core"; +import type { CacheStore } from "../src/cache"; +import type { PipelineExecutor } from "../src/executor"; +import { definePipeline, definePipelineRoute, definePipelineSource } from "@ucdjs/pipelines-core"; +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { createMemoryCacheStore } from "../src/cache"; +import { createPipelineExecutor } from "../src/executor"; + +function createMockFile(name: string, dir: string = "ucd"): FileContext { + return { + version: "16.0.0", + dir, + path: `${dir}/${name}`, + name, + ext: name.includes(".") ? `.${name.split(".").pop()}` : "", + }; +} + +function createMockBackend(files: FileContext[], contents: Record = {}): SourceBackend { + return { + listFiles: vi.fn().mockResolvedValue(files), + readFile: vi.fn().mockImplementation((file: FileContext) => { + return Promise.resolve(contents[file.path] ?? ""); + }), + }; +} + +async function* mockParser(ctx: ParseContext): AsyncIterable { + const content = await ctx.readContent(); + const lines = content.split("\n").filter((line) => !ctx.isComment(line)); + + for (const line of lines) { + const [codePoint, value] = line.split(";").map((s) => s.trim()); + if (codePoint && value) { + yield { + sourceFile: ctx.file.path, + kind: "point", + codePoint, + value, + }; + } + } +} + +function createTestRoute( + id: string, + filter: (ctx: { file: FileContext }) => boolean, +): PipelineRouteDefinition { + return definePipelineRoute({ + id, + filter, + parser: mockParser, + resolver: async (ctx, rows) => { + const entries: Array<{ codePoint: string; value: string }> = []; + for await (const row of rows as AsyncIterable) { + entries.push({ + codePoint: row.codePoint!, + value: row.value as string, + }); + } + return { + version: ctx.version, + file: ctx.file.name, + entries, + }; + }, + }); +} + +function createTestSource(files: FileContext[], contents: Record = {}): PipelineSourceDefinition { + return definePipelineSource({ + id: "test-source", + backend: createMockBackend(files, contents), + }); +} + +describe("createPipelineExecutor", () => { + it("should create an executor with run method", () => { + const executor = createPipelineExecutor({}); + + expect(executor).toHaveProperty("run"); + expect(typeof executor.run).toBe("function"); + }); + + it("should accept pipelines and optional artifacts", () => { + const pipeline = definePipeline({ + id: "test", + versions: ["16.0.0"], + inputs: [createTestSource([])], + routes: [], + }); + + const executor = createPipelineExecutor({ + artifacts: [], + }); + + expect(executor).toBeDefined(); + }); + + it("should accept optional cache store", () => { + const cacheStore = createMemoryCacheStore(); + + const executor = createPipelineExecutor({ + cacheStore, + }); + + expect(executor).toBeDefined(); + }); + + it("should accept optional event handler", () => { + const onEvent = vi.fn(); + + const executor = createPipelineExecutor({ + onEvent, + }); + + expect(executor).toBeDefined(); + }); +}); + +describe("executor.run", () => { + let executor: PipelineExecutor; + let files: FileContext[]; + let contents: Record; + let pipeline: ReturnType; + + beforeEach(() => { + files = [ + createMockFile("LineBreak.txt"), + createMockFile("Scripts.txt"), + ]; + + contents = { + "ucd/LineBreak.txt": "0041;AL\n0042;AL", + "ucd/Scripts.txt": "0041;Latin\n0042;Latin", + }; + + const source = createTestSource(files, contents); + const routes = [ + createTestRoute("line-break", (ctx) => ctx.file.name === "LineBreak.txt"), + createTestRoute("scripts", (ctx) => ctx.file.name === "Scripts.txt"), + ]; + + pipeline = definePipeline({ + id: "test-pipeline", + versions: ["16.0.0"], + inputs: [source], + routes, + }); + + executor = createPipelineExecutor({}); + }); + + it("should run all pipelines and return results", async () => { + const result = await executor.run([pipeline as any]); + + expect(result.results).toBeInstanceOf(Map); + expect(result.results.size).toBe(1); + expect(result.results.has("test-pipeline")).toBe(true); + }); + + it("should return summary with pipeline counts", async () => { + const result = await executor.run([pipeline as any]); + + expect(result.summary).toEqual({ + totalPipelines: 1, + successfulPipelines: 1, + failedPipelines: 0, + durationMs: expect.any(Number), + }); + }); + + it("should process files matching routes", async () => { + const result = await executor.run([pipeline as any]); + const pipelineResult = result.results.get("test-pipeline")!; + + expect(pipelineResult.data.length).toBe(2); + }); + + it("should run provided pipelines", async () => { + const result = await executor.run([pipeline as any]); + + expect(result.results.has("test-pipeline")).toBe(true); + }); + + it("should return empty results when no pipelines provided", async () => { + const result = await executor.run([]); + + expect(result.results.size).toBe(0); + }); + + it("should filter versions when specified", async () => { + const result = await executor.run([pipeline as any], { versions: ["16.0.0"] }); + const pipelineResult = result.results.get("test-pipeline")!; + + expect(pipelineResult.summary.versions).toEqual(["16.0.0"]); + }); +}); + +describe("running single pipeline via run()", () => { + let executor: PipelineExecutor; + let files: FileContext[]; + let contents: Record; + let pipeline: ReturnType; + + beforeEach(() => { + files = [createMockFile("LineBreak.txt")]; + contents = { "ucd/LineBreak.txt": "0041;AL" }; + + const source = createTestSource(files, contents); + const route = createTestRoute("line-break", (ctx) => ctx.file.name === "LineBreak.txt"); + + pipeline = definePipeline({ + id: "test-pipeline", + versions: ["16.0.0"], + inputs: [source], + routes: [route], + }); + + executor = createPipelineExecutor({}); + }); + + it("should run a single pipeline", async () => { + const multi = await executor.run([pipeline]); + const result = multi.results.get("test-pipeline")!; + + expect(result).toBeDefined(); + expect(result.data).toBeDefined(); + expect(result.graph).toBeDefined(); + expect(result.errors).toBeDefined(); + expect(result.summary).toBeDefined(); + }); + + it("should return pipeline run result", async () => { + const multi = await executor.run([pipeline]); + const result = multi.results.get("test-pipeline")!; + + expect(result.data.length).toBeGreaterThan(0); + expect(result.errors).toEqual([]); + }); + + it("should accept version filter", async () => { + const multi = await executor.run([pipeline], { versions: ["16.0.0"] }); + const result = multi.results.get("test-pipeline")!; + + expect(result.summary.versions).toEqual(["16.0.0"]); + }); + + it("should respect cache option", async () => { + const cacheStore = createMemoryCacheStore(); + + const cachedPipeline = definePipeline({ + id: "cached-pipeline", + versions: ["16.0.0"], + inputs: [createTestSource(files, contents)], + routes: [createTestRoute("line-break", (ctx) => ctx.file.name === "LineBreak.txt")], + }); + + const ex = createPipelineExecutor({ cacheStore }); + + await ex.run([cachedPipeline], { cache: true }); + const stats = await cacheStore.stats?.(); + + expect(stats?.entries).toBeGreaterThanOrEqual(0); + }); + + it("should return empty results for unknown pipeline", async () => { + const multi = await executor.run([]); + expect(multi.results.size).toBe(0); + }); +}); + +describe("pipeline events", () => { + it("should emit pipeline:start and pipeline:end events", async () => { + const events: PipelineEvent[] = []; + const onEvent = vi.fn((event: PipelineEvent) => { + events.push(event); + }); + + const pipeline = definePipeline({ + id: "event-test", + versions: ["16.0.0"], + inputs: [createTestSource([createMockFile("Test.txt")], { "ucd/Test.txt": "0041;A" })], + routes: [createTestRoute("test", () => true)], + }); + + const executor = createPipelineExecutor({ onEvent }); + + await executor.run([pipeline as any]); + + const eventTypes = events.map((e) => e.type); + expect(eventTypes).toContain("pipeline:start"); + expect(eventTypes).toContain("pipeline:end"); + }); + + it("should emit version:start and version:end events", async () => { + const events: PipelineEvent[] = []; + + const pipeline = definePipeline({ + id: "version-events", + versions: ["16.0.0"], + inputs: [createTestSource([createMockFile("Test.txt")], { "ucd/Test.txt": "0041;A" })], + routes: [createTestRoute("test", () => true)], + }); + + const executor = createPipelineExecutor({ + onEvent: (event) => { events.push(event); return undefined; }, + }); + + await executor.run([pipeline as any]); + + const eventTypes = events.map((e) => e.type); + expect(eventTypes).toContain("version:start"); + expect(eventTypes).toContain("version:end"); + }); + + it("should emit parse and resolve events", async () => { + const events: PipelineEvent[] = []; + + const pipeline = definePipeline({ + id: "parse-events", + versions: ["16.0.0"], + inputs: [createTestSource([createMockFile("Test.txt")], { "ucd/Test.txt": "0041;A" })], + routes: [createTestRoute("test", () => true)], + }); + + const executor = createPipelineExecutor({ + onEvent: (event) => { events.push(event); return undefined; }, + }); + + await executor.run([pipeline as any]); + + const eventTypes = events.map((e) => e.type); + expect(eventTypes).toContain("parse:start"); + expect(eventTypes).toContain("parse:end"); + expect(eventTypes).toContain("resolve:start"); + expect(eventTypes).toContain("resolve:end"); + }); + + it("should emit file:matched event when file matches route", async () => { + const events: PipelineEvent[] = []; + + const pipeline = definePipeline({ + id: "file-matched", + versions: ["16.0.0"], + inputs: [createTestSource([createMockFile("Test.txt")], { "ucd/Test.txt": "0041;A" })], + routes: [createTestRoute("test", () => true)], + }); + + const executor = createPipelineExecutor({ + onEvent: (event) => { events.push(event); return undefined; }, + }); + + await executor.run([pipeline as any]); + + const matchedEvents = events.filter((e) => e.type === "file:matched"); + expect(matchedEvents.length).toBeGreaterThan(0); + }); +}); + +describe("pipeline graph", () => { + it("should build graph with source nodes", async () => { + const pipeline = definePipeline({ + id: "graph-test", + versions: ["16.0.0"], + inputs: [createTestSource([createMockFile("Test.txt")], { "ucd/Test.txt": "0041;A" })], + routes: [createTestRoute("test", () => true)], + }); + + const executor = createPipelineExecutor({}); + const multi = await executor.run([pipeline]); + const result = multi.results.get("graph-test")!; + + const sourceNodes = result.graph.nodes.filter((n) => n.type === "source"); + expect(sourceNodes.length).toBe(1); + }); + + it("should build graph with file nodes", async () => { + const pipeline = definePipeline({ + id: "graph-files", + versions: ["16.0.0"], + inputs: [createTestSource([createMockFile("Test.txt")], { "ucd/Test.txt": "0041;A" })], + routes: [createTestRoute("test", () => true)], + }); + + const executor = createPipelineExecutor({}); + const multi = await executor.run([pipeline]); + const result = multi.results.get("graph-files")!; + + const fileNodes = result.graph.nodes.filter((n) => n.type === "file"); + expect(fileNodes.length).toBe(1); + }); + + it("should build graph with route nodes", async () => { + const pipeline = definePipeline({ + id: "graph-routes", + versions: ["16.0.0"], + inputs: [createTestSource([createMockFile("Test.txt")], { "ucd/Test.txt": "0041;A" })], + routes: [createTestRoute("test", () => true)], + }); + + const executor = createPipelineExecutor({}); + const multi = await executor.run([pipeline]); + const result = multi.results.get("graph-routes")!; + + const routeNodes = result.graph.nodes.filter((n) => n.type === "route"); + expect(routeNodes.length).toBe(1); + }); + + it("should build graph with output nodes", async () => { + const pipeline = definePipeline({ + id: "graph-outputs", + versions: ["16.0.0"], + inputs: [createTestSource([createMockFile("Test.txt")], { "ucd/Test.txt": "0041;A" })], + routes: [createTestRoute("test", () => true)], + }); + + const executor = createPipelineExecutor({}); + const multi = await executor.run([pipeline]); + const result = multi.results.get("graph-outputs")!; + + const outputNodes = result.graph.nodes.filter((n) => n.type === "output"); + expect(outputNodes.length).toBeGreaterThan(0); + }); + + it("should build graph with edges", async () => { + const pipeline = definePipeline({ + id: "graph-edges", + versions: ["16.0.0"], + inputs: [createTestSource([createMockFile("Test.txt")], { "ucd/Test.txt": "0041;A" })], + routes: [createTestRoute("test", () => true)], + }); + + const executor = createPipelineExecutor({}); + const multi = await executor.run([pipeline]); + const result = multi.results.get("graph-edges")!; + + expect(result.graph.edges.length).toBeGreaterThan(0); + }); +}); + +describe("pipeline summary", () => { + it("should track total files", async () => { + const files = [ + createMockFile("File1.txt"), + createMockFile("File2.txt"), + createMockFile("File3.txt"), + ]; + const contents = { + "ucd/File1.txt": "0041;A", + "ucd/File2.txt": "0042;B", + "ucd/File3.txt": "0043;C", + }; + + const pipeline = definePipeline({ + id: "summary-total", + versions: ["16.0.0"], + inputs: [createTestSource(files, contents)], + routes: [createTestRoute("all", () => true)], + }); + + const executor = createPipelineExecutor({}); + const multi = await executor.run([pipeline]); + const result = multi.results.get("summary-total")!; + + expect(result.summary.totalFiles).toBe(3); + }); + + it("should track matched files", async () => { + const files = [ + createMockFile("Match1.txt"), + createMockFile("Match2.txt"), + createMockFile("NoMatch.txt"), + ]; + const contents = { + "ucd/Match1.txt": "0041;A", + "ucd/Match2.txt": "0042;B", + "ucd/NoMatch.txt": "0043;C", + }; + + const pipeline = definePipeline({ + id: "summary-matched", + versions: ["16.0.0"], + inputs: [createTestSource(files, contents)], + routes: [createTestRoute("match", (ctx) => ctx.file.name.startsWith("Match"))], + }); + + const executor = createPipelineExecutor({}); + const multi = await executor.run([pipeline]); + const result = multi.results.get("summary-matched")!; + + expect(result.summary.matchedFiles).toBe(2); + }); + + it("should track skipped files", async () => { + const files = [ + createMockFile("Process.txt"), + createMockFile("Skip.txt"), + ]; + const contents = { + "ucd/Process.txt": "0041;A", + "ucd/Skip.txt": "0042;B", + }; + + const pipeline = definePipeline({ + id: "summary-skipped", + versions: ["16.0.0"], + inputs: [createTestSource(files, contents)], + routes: [createTestRoute("process", (ctx) => ctx.file.name === "Process.txt")], + }); + + const executor = createPipelineExecutor({}); + const multi = await executor.run([pipeline]); + const result = multi.results.get("summary-skipped")!; + + expect(result.summary.skippedFiles).toBe(1); + }); + + it("should track duration", async () => { + const pipeline = definePipeline({ + id: "summary-duration", + versions: ["16.0.0"], + inputs: [createTestSource([createMockFile("Test.txt")], { "ucd/Test.txt": "0041;A" })], + routes: [createTestRoute("test", () => true)], + }); + + const executor = createPipelineExecutor({}); + const multi = await executor.run([pipeline]); + const result = multi.results.get("summary-duration")!; + + expect(result.summary.durationMs).toBeGreaterThanOrEqual(0); + }); + + it("should track total outputs", async () => { + const files = [ + createMockFile("File1.txt"), + createMockFile("File2.txt"), + ]; + const contents = { + "ucd/File1.txt": "0041;A", + "ucd/File2.txt": "0042;B", + }; + + const pipeline = definePipeline({ + id: "summary-outputs", + versions: ["16.0.0"], + inputs: [createTestSource(files, contents)], + routes: [createTestRoute("all", () => true)], + }); + + const executor = createPipelineExecutor({}); + const multi = await executor.run([pipeline]); + const result = multi.results.get("summary-outputs")!; + + expect(result.summary.totalOutputs).toBe(2); + }); +}); + +describe("error handling", () => { + it("should capture route errors without stopping execution", async () => { + const failingRoute = definePipelineRoute({ + id: "failing", + filter: () => true, + parser: mockParser, + resolver: async () => { + throw new Error("Route failed"); + }, + }); + + const pipeline = definePipeline({ + id: "error-test", + versions: ["16.0.0"], + inputs: [createTestSource([createMockFile("Test.txt")], { "ucd/Test.txt": "0041;A" })], + routes: [failingRoute], + }); + + const executor = createPipelineExecutor({}); + const multi = await executor.run([pipeline]); + const result = multi.results.get("error-test")!; + + expect(result.errors.length).toBeGreaterThan(0); + const firstError = result.errors[0]!; + expect(firstError.scope).toBe("route"); + expect(firstError.message).toContain("Route failed"); + }); + + it("should emit error events", async () => { + const events: PipelineEvent[] = []; + + const failingRoute = definePipelineRoute({ + id: "failing", + filter: () => true, + parser: mockParser, + resolver: async () => { + throw new Error("Test error"); + }, + }); + + const pipeline = definePipeline({ + id: "error-events", + versions: ["16.0.0"], + inputs: [createTestSource([createMockFile("Test.txt")], { "ucd/Test.txt": "0041;A" })], + routes: [failingRoute], + }); + + const executor = createPipelineExecutor({ + onEvent: (event) => { events.push(event); return undefined; }, + }); + + await executor.run([pipeline]); + + const errorEvents = events.filter((e) => e.type === "error"); + expect(errorEvents.length).toBeGreaterThan(0); + }); + + it("should handle pipeline without inputs gracefully", async () => { + const pipeline = definePipeline({ + id: "no-inputs", + versions: ["16.0.0"], + inputs: [], + routes: [], + }); + + const executor = createPipelineExecutor({}); + + const multi = await executor.run([pipeline as any]); + const result = multi.results.get("no-inputs")!; + expect(result.errors.length).toBeGreaterThan(0); + const firstError = result.errors[0]!; + expect(firstError.message).toContain("Pipeline requires at least one input source"); + }); +}); + +describe("caching", () => { + let cacheStore: CacheStore; + + beforeEach(() => { + cacheStore = createMemoryCacheStore(); + }); + + it("should use cache when enabled", async () => { + const events: PipelineEvent[] = []; + + const pipeline = definePipeline({ + id: "cache-test", + versions: ["16.0.0"], + inputs: [createTestSource([createMockFile("Test.txt")], { "ucd/Test.txt": "0041;A" })], + routes: [createTestRoute("test", () => true)], + }); + + const executor = createPipelineExecutor({ + cacheStore, + onEvent: (event) => { events.push(event); return undefined; }, + }); + + await executor.run([pipeline], { cache: true }); + + const cacheEvents = events.filter((e) => + e.type === "cache:hit" || e.type === "cache:miss" || e.type === "cache:store", + ); + expect(cacheEvents.length).toBeGreaterThan(0); + }); + + it("should hit cache on second run", async () => { + const events: PipelineEvent[] = []; + + const pipeline = definePipeline({ + id: "cache-hit-test", + versions: ["16.0.0"], + inputs: [createTestSource([createMockFile("Test.txt")], { "ucd/Test.txt": "0041;A" })], + routes: [createTestRoute("test", () => true)], + }); + + const executor = createPipelineExecutor({ + cacheStore, + onEvent: (event) => { events.push(event); return undefined; }, + }); + + await executor.run([pipeline], { cache: true }); + + events.length = 0; + + await executor.run([pipeline], { cache: true }); + + const hitEvents = events.filter((e) => e.type === "cache:hit"); + expect(hitEvents.length).toBeGreaterThan(0); + }); + + it("should skip cache when disabled", async () => { + const events: PipelineEvent[] = []; + + const pipeline = definePipeline({ + id: "cache-disabled", + versions: ["16.0.0"], + inputs: [createTestSource([createMockFile("Test.txt")], { "ucd/Test.txt": "0041;A" })], + routes: [createTestRoute("test", () => true)], + }); + + const executor = createPipelineExecutor({ + cacheStore, + onEvent: (event) => { events.push(event); return undefined; }, + }); + + await executor.run([pipeline as any], { cache: false }); + + const cacheEvents = events.filter((e) => + e.type === "cache:hit" || e.type === "cache:miss" || e.type === "cache:store", + ); + expect(cacheEvents).toEqual([]); + }); +}); + +describe("multiple pipelines", () => { + it("should run multiple pipelines", async () => { + const files = [createMockFile("Test.txt")]; + const contents = { "ucd/Test.txt": "0041;A" }; + + const pipeline1 = definePipeline({ + id: "pipeline-1", + versions: ["16.0.0"], + inputs: [createTestSource(files, contents)], + routes: [createTestRoute("route-1", () => true)], + }); + + const pipeline2 = definePipeline({ + id: "pipeline-2", + versions: ["16.0.0"], + inputs: [createTestSource(files, contents)], + routes: [createTestRoute("route-2", () => true)], + }); + + const executor = createPipelineExecutor({}); + + const result = await executor.run([pipeline1 as any, pipeline2 as any]); + + expect(result.results.size).toBe(2); + expect(result.results.has("pipeline-1")).toBe(true); + expect(result.results.has("pipeline-2")).toBe(true); + }); + + it("should track successful and failed pipelines", async () => { + const files = [createMockFile("Test.txt")]; + const contents = { "ucd/Test.txt": "0041;A" }; + + const successPipeline = definePipeline({ + id: "success", + versions: ["16.0.0"], + inputs: [createTestSource(files, contents)], + routes: [createTestRoute("ok", () => true)], + }); + + const failPipeline = definePipeline({ + id: "fail", + versions: ["16.0.0"], + inputs: [createTestSource(files, contents)], + routes: [ + definePipelineRoute({ + id: "fail-route", + filter: () => true, + parser: mockParser, + resolver: async () => { + throw new Error("Intentional failure"); + }, + }), + ], + }); + + const executor = createPipelineExecutor({}); + + const result = await executor.run([successPipeline as any, failPipeline as any]); + + expect(result.summary.successfulPipelines).toBe(1); + expect(result.summary.failedPipelines).toBe(1); + }); +}); + +describe("strict mode", () => { + it("should error on unmatched files in strict mode", async () => { + const files = [ + createMockFile("Matched.txt"), + createMockFile("Unmatched.txt"), + ]; + const contents = { + "ucd/Matched.txt": "0041;A", + "ucd/Unmatched.txt": "0042;B", + }; + + const pipeline = definePipeline({ + id: "strict-test", + versions: ["16.0.0"], + inputs: [createTestSource(files, contents)], + routes: [createTestRoute("matched", (ctx) => ctx.file.name === "Matched.txt")], + strict: true, + }); + + const executor = createPipelineExecutor({}); + const multi = await executor.run([pipeline]); + const result = multi.results.get("strict-test")!; + + const fileErrors = result.errors.filter((e) => e.scope === "file"); + expect(fileErrors.length).toBe(1); + const firstFileError = fileErrors[0]!; + expect(firstFileError.message).toContain("No matching route"); + }); + + it("should not error on unmatched files when not strict", async () => { + const files = [ + createMockFile("Matched.txt"), + createMockFile("Unmatched.txt"), + ]; + const contents = { + "ucd/Matched.txt": "0041;A", + "ucd/Unmatched.txt": "0042;B", + }; + + const pipeline = definePipeline({ + id: "non-strict-test", + versions: ["16.0.0"], + inputs: [createTestSource(files, contents)], + routes: [createTestRoute("matched", (ctx) => ctx.file.name === "Matched.txt")], + strict: false, + }); + + const executor = createPipelineExecutor({}); + const multi = await executor.run([pipeline]); + const result = multi.results.get("non-strict-test")!; + + const fileErrors = result.errors.filter((e) => e.scope === "file"); + expect(fileErrors).toEqual([]); + }); +}); + +describe("fallback route", () => { + it("should use fallback for unmatched files", async () => { + const files = [ + createMockFile("Matched.txt"), + createMockFile("Fallback.txt"), + ]; + const contents = { + "ucd/Matched.txt": "0041;A", + "ucd/Fallback.txt": "0042;B", + }; + + const pipeline = definePipeline({ + id: "fallback-test", + versions: ["16.0.0"], + inputs: [createTestSource(files, contents)], + routes: [createTestRoute("matched", (ctx) => ctx.file.name === "Matched.txt")], + fallback: { + parser: mockParser, + resolver: async (ctx: any, rows: AsyncIterable) => { + const entries: Array<{ codePoint: string; value: string }> = []; + for await (const row of rows) { + entries.push({ codePoint: row.codePoint!, value: row.value as string }); + } + return { type: "fallback", file: ctx.file.name, entries }; + }, + }, + }) as any; + + const executor = createPipelineExecutor({}); + const multi = await executor.run([pipeline]); + const result = multi.results.get("fallback-test")!; + + expect(result.summary.fallbackFiles).toBe(1); + expect(result.data.length).toBe(2); + + const fallbackOutput = result.data.find((d: any) => d.type === "fallback"); + expect(fallbackOutput).toBeDefined(); + }); + + it("should emit file:fallback event", async () => { + const events: PipelineEvent[] = []; + + const pipeline = definePipeline({ + id: "fallback-event", + versions: ["16.0.0"], + inputs: [createTestSource([createMockFile("Unmatched.txt")], { "ucd/Unmatched.txt": "0041;A" })], + routes: [], + fallback: { + parser: mockParser, + resolver: async () => ({ fallback: true }), + }, + }) as any; + + const executor = createPipelineExecutor({ onEvent: (event) => { events.push(event); return undefined; } }); + + await executor.run([pipeline as any]); + + const fallbackEvents = events.filter((e) => e.type === "file:fallback"); + expect(fallbackEvents.length).toBe(1); + }); +}); + +describe("include filter", () => { + it("should only process files matching include filter", async () => { + const files = [ + createMockFile("Include.txt"), + createMockFile("Exclude.txt"), + ]; + const contents = { + "ucd/Include.txt": "0041;A", + "ucd/Exclude.txt": "0042;B", + }; + + const pipeline = definePipeline({ + id: "include-test", + versions: ["16.0.0"], + inputs: [createTestSource(files, contents)], + routes: [createTestRoute("all", () => true)], + include: (ctx) => ctx.file.name.startsWith("Include"), + }); + + const executor = createPipelineExecutor({}); + const multi = await executor.run([pipeline]); + const result = multi.results.get("include-test")!; + + expect(result.summary.matchedFiles).toBe(1); + expect(result.data.length).toBe(1); + }); +}); diff --git a/packages/pipelines/pipeline-executor/tsconfig.build.json b/packages/pipelines/pipeline-executor/tsconfig.build.json new file mode 100644 index 000000000..36c889e0c --- /dev/null +++ b/packages/pipelines/pipeline-executor/tsconfig.build.json @@ -0,0 +1,5 @@ +{ + "extends": "./tsconfig.json", + "include": ["src"], + "exclude": ["dist", "test"] +} diff --git a/packages/pipelines/pipeline-executor/tsconfig.json b/packages/pipelines/pipeline-executor/tsconfig.json new file mode 100644 index 000000000..07edf31d8 --- /dev/null +++ b/packages/pipelines/pipeline-executor/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "@ucdjs-tooling/tsconfig/base", + "include": [ + "src", + "test", + "playgrounds" + ], + "exclude": ["dist"] +} diff --git a/packages/pipelines/pipeline-executor/tsdown.config.ts b/packages/pipelines/pipeline-executor/tsdown.config.ts new file mode 100644 index 000000000..dee0149e6 --- /dev/null +++ b/packages/pipelines/pipeline-executor/tsdown.config.ts @@ -0,0 +1,7 @@ +import { createTsdownConfig } from "@ucdjs-tooling/tsdown-config"; + +export default createTsdownConfig({ + entry: [ + "./src/index.ts", + ], +}); diff --git a/packages/pipelines/pipeline-graph/README.md b/packages/pipelines/pipeline-graph/README.md new file mode 100644 index 000000000..b19a872fe --- /dev/null +++ b/packages/pipelines/pipeline-graph/README.md @@ -0,0 +1,27 @@ +# @ucdjs/pipelines-graph + +[![npm version][npm-version-src]][npm-version-href] +[![npm downloads][npm-downloads-src]][npm-downloads-href] +[![codecov][codecov-src]][codecov-href] + +> [!IMPORTANT] +> This is an internal package. It may change without warning and is not subject to semantic versioning. Use at your own risk. + +A collection of core pipeline functionalities for the UCD project. + +## Installation + +```bash +npm install @ucdjs/pipelines-graph +``` + +## 📄 License + +Published under [MIT License](./LICENSE). + +[npm-version-src]: https://img.shields.io/npm/v/@ucdjs/pipelines-graph?style=flat&colorA=18181B&colorB=4169E1 +[npm-version-href]: https://npmjs.com/package/@ucdjs/pipelines-graph +[npm-downloads-src]: https://img.shields.io/npm/dm/@ucdjs/pipelines-graph?style=flat&colorA=18181B&colorB=4169E1 +[npm-downloads-href]: https://npmjs.com/package/@ucdjs/pipelines-graph +[codecov-src]: https://img.shields.io/codecov/c/gh/ucdjs/ucd?style=flat&colorA=18181B&colorB=4169E1 +[codecov-href]: https://codecov.io/gh/ucdjs/ucd diff --git a/packages/pipelines/pipeline-graph/eslint.config.js b/packages/pipelines/pipeline-graph/eslint.config.js new file mode 100644 index 000000000..d9c0ca1ec --- /dev/null +++ b/packages/pipelines/pipeline-graph/eslint.config.js @@ -0,0 +1,7 @@ +// @ts-check +import { luxass } from "@luxass/eslint-config"; + +export default luxass({ + type: "lib", + pnpm: true, +}); diff --git a/packages/pipelines/pipeline-graph/package.json b/packages/pipelines/pipeline-graph/package.json new file mode 100644 index 000000000..52e5b7149 --- /dev/null +++ b/packages/pipelines/pipeline-graph/package.json @@ -0,0 +1,54 @@ +{ + "name": "@ucdjs/pipelines-graph", + "version": "0.0.1", + "type": "module", + "author": { + "name": "Lucas Nørgård", + "email": "lucasnrgaard@gmail.com", + "url": "https://luxass.dev" + }, + "packageManager": "pnpm@10.27.0", + "license": "MIT", + "homepage": "https://github.com/ucdjs/ucd", + "repository": { + "type": "git", + "url": "git+https://github.com/ucdjs/ucd.git", + "directory": "packages/pipelines/pipeline-graph" + }, + "bugs": { + "url": "https://github.com/ucdjs/ucd/issues" + }, + "exports": { + ".": "./dist/index.mjs", + "./package.json": "./package.json" + }, + "types": "./dist/index.d.mts", + "files": [ + "dist" + ], + "engines": { + "node": ">=22.18" + }, + "scripts": { + "build": "tsdown --tsconfig=./tsconfig.build.json", + "dev": "tsdown --watch", + "clean": "git clean -xdf dist node_modules", + "lint": "eslint .", + "typecheck": "tsc --noEmit -p tsconfig.build.json" + }, + "dependencies": { + "@ucdjs/pipelines-core": "workspace:*" + }, + "devDependencies": { + "@luxass/eslint-config": "catalog:linting", + "@ucdjs-tooling/tsconfig": "workspace:*", + "@ucdjs-tooling/tsdown-config": "workspace:*", + "eslint": "catalog:linting", + "publint": "catalog:build", + "tsdown": "catalog:build", + "typescript": "catalog:build" + }, + "publishConfig": { + "access": "public" + } +} diff --git a/packages/pipelines/pipeline-graph/src/builder.ts b/packages/pipelines/pipeline-graph/src/builder.ts new file mode 100644 index 000000000..d63a8c053 --- /dev/null +++ b/packages/pipelines/pipeline-graph/src/builder.ts @@ -0,0 +1,106 @@ +import type { + DAG, + FileContext, + PipelineDefinition, + PipelineGraph, + PipelineGraphEdge, + PipelineGraphNode, +} from "@ucdjs/pipelines-core"; + +export interface GraphBuilderOptions { + includeArtifacts?: boolean; +} + +export class PipelineGraphBuilder { + private nodes: Map = new Map(); + private edges: PipelineGraphEdge[] = []; + + addSourceNode(version: string): string { + const id = `source:${version}`; + if (!this.nodes.has(id)) { + this.nodes.set(id, { id, type: "source", version }); + } + return id; + } + + addFileNode(file: FileContext): string { + const id = `file:${file.version}:${file.path}`; + if (!this.nodes.has(id)) { + this.nodes.set(id, { id, type: "file", file }); + } + return id; + } + + addRouteNode(routeId: string, version: string): string { + const id = `route:${version}:${routeId}`; + if (!this.nodes.has(id)) { + this.nodes.set(id, { id, type: "route", routeId }); + } + return id; + } + + addArtifactNode(artifactId: string, version: string): string { + const id = `artifact:${version}:${artifactId}`; + if (!this.nodes.has(id)) { + this.nodes.set(id, { id, type: "artifact", artifactId }); + } + return id; + } + + addOutputNode(outputIndex: number, version: string, property?: string): string { + const id = `output:${version}:${outputIndex}`; + if (!this.nodes.has(id)) { + this.nodes.set(id, { id, type: "output", outputIndex, property }); + } + return id; + } + + addEdge(from: string, to: string, type: PipelineGraphEdge["type"]): void { + const exists = this.edges.some((e) => e.from === from && e.to === to && e.type === type); + if (!exists) { + this.edges.push({ from, to, type }); + } + } + + build(): PipelineGraph { + return { + nodes: Array.from(this.nodes.values()), + edges: [...this.edges], + }; + } + + clear(): void { + this.nodes.clear(); + this.edges.length = 0; + } +} + +export function buildRouteGraph( + pipeline: PipelineDefinition, + dag: DAG, +): PipelineGraph { + const builder = new PipelineGraphBuilder(); + + for (const route of pipeline.routes) { + const routeNode = dag.nodes.get(route.id); + if (!routeNode) continue; + + builder.addRouteNode(route.id, "static"); + + for (const depId of routeNode.dependencies) { + builder.addRouteNode(depId, "static"); + builder.addEdge(`route:static:${depId}`, `route:static:${route.id}`, "provides"); + } + + for (const artifactId of routeNode.emittedArtifacts) { + builder.addArtifactNode(artifactId, "static"); + builder.addEdge(`route:static:${route.id}`, `artifact:static:${artifactId}`, "resolved"); + } + } + + return builder.build(); +} + +export function createPipelineGraphBuilder(): PipelineGraphBuilder { + return new PipelineGraphBuilder(); +} diff --git a/packages/pipelines/pipeline-graph/src/index.ts b/packages/pipelines/pipeline-graph/src/index.ts new file mode 100644 index 000000000..3f4f3058a --- /dev/null +++ b/packages/pipelines/pipeline-graph/src/index.ts @@ -0,0 +1,9 @@ +export type { + GraphBuilderOptions, +} from "./builder"; + +export { + buildRouteGraph, + createPipelineGraphBuilder, + PipelineGraphBuilder, +} from "./builder"; diff --git a/packages/pipelines/pipeline-graph/tsconfig.build.json b/packages/pipelines/pipeline-graph/tsconfig.build.json new file mode 100644 index 000000000..36c889e0c --- /dev/null +++ b/packages/pipelines/pipeline-graph/tsconfig.build.json @@ -0,0 +1,5 @@ +{ + "extends": "./tsconfig.json", + "include": ["src"], + "exclude": ["dist", "test"] +} diff --git a/packages/pipelines/pipeline-graph/tsconfig.json b/packages/pipelines/pipeline-graph/tsconfig.json new file mode 100644 index 000000000..07edf31d8 --- /dev/null +++ b/packages/pipelines/pipeline-graph/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "@ucdjs-tooling/tsconfig/base", + "include": [ + "src", + "test", + "playgrounds" + ], + "exclude": ["dist"] +} diff --git a/packages/pipelines/pipeline-graph/tsdown.config.ts b/packages/pipelines/pipeline-graph/tsdown.config.ts new file mode 100644 index 000000000..dee0149e6 --- /dev/null +++ b/packages/pipelines/pipeline-graph/tsdown.config.ts @@ -0,0 +1,7 @@ +import { createTsdownConfig } from "@ucdjs-tooling/tsdown-config"; + +export default createTsdownConfig({ + entry: [ + "./src/index.ts", + ], +}); diff --git a/packages/pipelines/pipeline-loader/README.md b/packages/pipelines/pipeline-loader/README.md new file mode 100644 index 000000000..5f1710618 --- /dev/null +++ b/packages/pipelines/pipeline-loader/README.md @@ -0,0 +1,27 @@ +# @ucdjs/pipelines-loader + +[![npm version][npm-version-src]][npm-version-href] +[![npm downloads][npm-downloads-src]][npm-downloads-href] +[![codecov][codecov-src]][codecov-href] + +> [!IMPORTANT] +> This is an internal package. It may change without warning and is not subject to semantic versioning. Use at your own risk. + +A collection of core pipeline functionalities for the UCD project. + +## Installation + +```bash +npm install @ucdjs/pipelines-loader +``` + +## 📄 License + +Published under [MIT License](./LICENSE). + +[npm-version-src]: https://img.shields.io/npm/v/@ucdjs/pipelines-loader?style=flat&colorA=18181B&colorB=4169E1 +[npm-version-href]: https://npmjs.com/package/@ucdjs/pipelines-loader +[npm-downloads-src]: https://img.shields.io/npm/dm/@ucdjs/pipelines-loader?style=flat&colorA=18181B&colorB=4169E1 +[npm-downloads-href]: https://npmjs.com/package/@ucdjs/pipelines-loader +[codecov-src]: https://img.shields.io/codecov/c/gh/ucdjs/ucd?style=flat&colorA=18181B&colorB=4169E1 +[codecov-href]: https://codecov.io/gh/ucdjs/ucd diff --git a/packages/pipelines/pipeline-loader/eslint.config.js b/packages/pipelines/pipeline-loader/eslint.config.js new file mode 100644 index 000000000..d9c0ca1ec --- /dev/null +++ b/packages/pipelines/pipeline-loader/eslint.config.js @@ -0,0 +1,7 @@ +// @ts-check +import { luxass } from "@luxass/eslint-config"; + +export default luxass({ + type: "lib", + pnpm: true, +}); diff --git a/packages/pipelines/pipeline-loader/package.json b/packages/pipelines/pipeline-loader/package.json new file mode 100644 index 000000000..9967515a4 --- /dev/null +++ b/packages/pipelines/pipeline-loader/package.json @@ -0,0 +1,60 @@ +{ + "name": "@ucdjs/pipelines-loader", + "version": "0.0.1", + "type": "module", + "author": { + "name": "Lucas Nørgård", + "email": "lucasnrgaard@gmail.com", + "url": "https://luxass.dev" + }, + "packageManager": "pnpm@10.27.0", + "license": "MIT", + "homepage": "https://github.com/ucdjs/ucd", + "repository": { + "type": "git", + "url": "git+https://github.com/ucdjs/ucd.git", + "directory": "packages/pipelines/pipeline-loader" + }, + "bugs": { + "url": "https://github.com/ucdjs/ucd/issues" + }, + "exports": { + ".": "./dist/index.mjs", + "./insecure": "./dist/insecure.mjs", + "./remote": "./dist/remote.mjs", + "./package.json": "./package.json" + }, + "types": "./dist/index.d.mts", + "files": [ + "dist" + ], + "engines": { + "node": ">=22.18" + }, + "scripts": { + "build": "tsdown --tsconfig=./tsconfig.build.json", + "dev": "tsdown --watch", + "clean": "git clean -xdf dist node_modules", + "lint": "eslint .", + "typecheck": "tsc --noEmit -p tsconfig.build.json" + }, + "dependencies": { + "@ucdjs/pipelines-core": "workspace:*", + "oxc-transform": "catalog:prod", + "rolldown": "catalog:build", + "tinyglobby": "catalog:prod" + }, + "devDependencies": { + "@luxass/eslint-config": "catalog:linting", + "@ucdjs-tooling/tsconfig": "workspace:*", + "@ucdjs-tooling/tsdown-config": "workspace:*", + "eslint": "catalog:linting", + "publint": "catalog:build", + "tsdown": "catalog:build", + "typescript": "catalog:build", + "vitest-testdirs": "catalog:testing" + }, + "publishConfig": { + "access": "public" + } +} diff --git a/packages/pipelines/pipeline-loader/src/index.ts b/packages/pipelines/pipeline-loader/src/index.ts new file mode 100644 index 000000000..3bdc3e4b0 --- /dev/null +++ b/packages/pipelines/pipeline-loader/src/index.ts @@ -0,0 +1,25 @@ +export { + findPipelineFiles, + loadPipelineFile, + loadPipelinesFromPaths, +} from "./loader"; +export type { FindPipelineFilesOptions, LoadPipelinesOptions } from "./loader"; + +export { + findRemotePipelineFiles, + loadRemotePipelines, +} from "./remote"; +export type { + FindRemotePipelineFilesOptions, + LoadRemotePipelinesOptions, +} from "./remote"; + +export type { + GitHubSource, + GitLabSource, + LoadedPipelineFile, + LoadPipelinesResult, + LocalSource, + PipelineLoadError, + PipelineSource, +} from "./types"; diff --git a/packages/pipelines/pipeline-loader/src/insecure.ts b/packages/pipelines/pipeline-loader/src/insecure.ts new file mode 100644 index 000000000..79c5f11d6 --- /dev/null +++ b/packages/pipelines/pipeline-loader/src/insecure.ts @@ -0,0 +1,48 @@ +import type { PipelineDefinition } from "@ucdjs/pipelines-core"; +import type { LoadedPipelineFile } from "./types"; +import path from "node:path"; +import { isPipelineDefinition } from "@ucdjs/pipelines-core"; +import { bundleRemoteModule, createDataUrl, identifierForLocalFile } from "./remote/bundler"; + +export interface LoadPipelineFromContentOptions { + identifier?: string; + fetchFn?: typeof fetch; +} + +export async function loadPipelineFromContent( + content: string, + filename: string, + options: LoadPipelineFromContentOptions = {}, +): Promise { + const identifier = options.identifier ?? identifierForLocalFile(path.resolve(filename)); + const bundle = await bundleRemoteModule({ + content, + identifier, + fetchFn: options.fetchFn, + }); + + const dataUrl = createDataUrl(bundle); + const module = await import(/* @vite-ignore */ dataUrl); + + const pipelines: PipelineDefinition[] = []; + const exportNames: string[] = []; + + const exportedModule = module as Record; + + for (const [name, value] of Object.entries(exportedModule)) { + if (name === "default") { + continue; + } + + if (isPipelineDefinition(value)) { + pipelines.push(value); + exportNames.push(name); + } + } + + return { + filePath: filename, + pipelines, + exportNames, + }; +} diff --git a/packages/pipelines/pipeline-loader/src/loader.ts b/packages/pipelines/pipeline-loader/src/loader.ts new file mode 100644 index 000000000..b1b5a5ab5 --- /dev/null +++ b/packages/pipelines/pipeline-loader/src/loader.ts @@ -0,0 +1,132 @@ +import type { PipelineDefinition } from "@ucdjs/pipelines-core"; +import type { + LoadedPipelineFile, + LoadPipelinesResult, + PipelineLoadError, +} from "./types"; +import { isPipelineDefinition } from "@ucdjs/pipelines-core"; + +import { glob } from "tinyglobby"; + +export async function loadPipelineFile(filePath: string): Promise { + const module = await import(/* @vite-ignore */ filePath); + + const pipelines: PipelineDefinition[] = []; + const exportNames: string[] = []; + + for (const [name, value] of Object.entries(module)) { + if (isPipelineDefinition(value)) { + pipelines.push(value); + exportNames.push(name); + } + } + + return { + filePath, + pipelines, + exportNames, + }; +} + +export interface LoadPipelinesOptions { + throwOnError?: boolean; +} + +export async function loadPipelinesFromPaths( + filePaths: string[], + options: LoadPipelinesOptions = {}, +): Promise { + const { throwOnError = false } = options; + + if (throwOnError) { + const wrapped = filePaths.map((filePath) => + loadPipelineFile(filePath).catch((err) => { + const error = err instanceof Error ? err : new Error(String(err)); + throw new Error(`Failed to load pipeline file: ${filePath}`, { cause: error }); + }), + ); + + const results = await Promise.all(wrapped); + const pipelines = results.flatMap((r) => r.pipelines); + + return { + pipelines, + files: results, + errors: [], + }; + } + + const settled = await Promise.allSettled(filePaths.map((fp) => loadPipelineFile(fp))); + + const files: LoadedPipelineFile[] = []; + const errors: PipelineLoadError[] = []; + + for (const [i, result] of settled.entries()) { + if (result.status === "fulfilled") { + files.push(result.value); + continue; + } + + const error = result.reason instanceof Error + ? result.reason + : new Error(String(result.reason)); + errors.push({ filePath: filePaths[i]!, error }); + } + + const pipelines = files.flatMap((f) => f.pipelines); + + return { + pipelines, + files, + errors, + }; +} + +export interface FindPipelineFilesOptions { + /** + * Glob patterns to match pipeline files. + * Defaults to `**\/*.ucd-pipeline.ts` + */ + patterns?: string | string[]; + + /** + * Current working directory to resolve patterns against. + * Defaults to `process.cwd()` + */ + cwd?: string; +} + +/** + * Find pipeline files on disk. + * + * By default matches files named `*.ucd-pipeline.ts` (the repository standard). + * + * @param {FindPipelineFilesOptions} options options for finding pipeline files + * @returns {Promise} Array of matched file paths + * + * @example + * ```ts + * const files = await findPipelineFiles({ cwd: "./pipelines" }); + * console.log(files); // Array of file paths + * ``` + */ +export async function findPipelineFiles( + options: FindPipelineFilesOptions = {}, +): Promise { + let patterns: string[] = ["**/*.ucd-pipeline.ts"]; + // eslint-disable-next-line node/prefer-global/process + const resolvedCwd = options.cwd ?? process.cwd(); + + if (options.patterns) { + patterns = Array.isArray(options.patterns) + ? options.patterns + : [options.patterns]; + } + + return glob(patterns, { + cwd: resolvedCwd, + ignore: ["node_modules/**", "**/node_modules/**", "**/dist/**", "**/build/**", "**/.git/**"], + absolute: true, + onlyFiles: true, + }); +} diff --git a/packages/pipelines/pipeline-loader/src/remote.ts b/packages/pipelines/pipeline-loader/src/remote.ts new file mode 100644 index 000000000..9206dadc1 --- /dev/null +++ b/packages/pipelines/pipeline-loader/src/remote.ts @@ -0,0 +1,137 @@ +import type { RemoteFileList } from "./remote/types"; +import type { + GitHubSource, + GitLabSource, + LoadedPipelineFile, + LoadPipelinesResult, + PipelineLoadError, +} from "./types"; +import { loadPipelineFromContent } from "./insecure"; +import * as github from "./remote/github"; +import * as gitlab from "./remote/gitlab"; + +export { github, gitlab }; + +export interface FindRemotePipelineFilesOptions { + pattern?: string; + customFetch?: typeof fetch; +} + +export async function findRemotePipelineFiles( + source: GitHubSource | GitLabSource, + options: FindRemotePipelineFilesOptions = {}, +): Promise { + const { pattern = "**/*.ucd-pipeline.ts", customFetch = fetch } = options; + const { owner, repo, ref, path } = source; + + const repoRef = { owner, repo, ref, path }; + + let fileList: RemoteFileList; + if (source.type === "github") { + fileList = await github.listFiles(repoRef, { customFetch }); + } else { + fileList = await gitlab.listFiles(repoRef, { customFetch }); + } + + const matcher = new RegExp( + pattern + .replace(/\*\*/g, ".*") + .replace(/\*/g, "[^/]*") + .replace(/\?/g, "."), + ); + + const matchedFiles = fileList.files.filter((file) => matcher.test(file)); + + return { + files: matchedFiles, + truncated: fileList.truncated, + }; +} + +export interface LoadRemotePipelinesOptions { + throwOnError?: boolean; + customFetch?: typeof fetch; +} + +function buildRemoteIdentifier( + provider: "github" | "gitlab", + owner: string, + repo: string, + ref: string | undefined, + filePath: string, +): string { + const url = new URL(`${provider}://${owner}/${repo}`); + url.searchParams.set("ref", ref ?? "HEAD"); + url.searchParams.set("path", filePath); + return url.toString(); +} + +export async function loadRemotePipelines( + source: GitHubSource | GitLabSource, + filePaths: string[], + options: LoadRemotePipelinesOptions = {}, +): Promise { + const { throwOnError = false, customFetch = fetch } = options; + const { owner, repo, ref, type } = source; + + const repoRef = { owner, repo, ref }; + + if (throwOnError) { + const wrapped = filePaths.map((filePath) => + (type === "github" + ? github.fetchFile(repoRef, filePath, { customFetch }) + : gitlab.fetchFile(repoRef, filePath, { customFetch }) + ).then((content) => loadPipelineFromContent(content, filePath, { + identifier: buildRemoteIdentifier(type, owner, repo, ref, filePath), + fetchFn: customFetch, + })).catch((err) => { + const error = err instanceof Error ? err : new Error(String(err)); + throw new Error(`Failed to load pipeline file: ${filePath}`, { cause: error }); + }), + ); + + const results = await Promise.all(wrapped); + const pipelines = results.flatMap((r) => r.pipelines); + + return { + pipelines, + files: results, + errors: [], + }; + } + + const settled = await Promise.allSettled( + filePaths.map(async (filePath) => { + const content = type === "github" + ? await github.fetchFile(repoRef, filePath, { customFetch }) + : await gitlab.fetchFile(repoRef, filePath, { customFetch }); + return loadPipelineFromContent(content, filePath, { + identifier: buildRemoteIdentifier(type, owner, repo, ref, filePath), + fetchFn: customFetch, + }); + }), + ); + + const files: LoadedPipelineFile[] = []; + const errors: PipelineLoadError[] = []; + + for (const [i, result] of settled.entries()) { + if (result.status === "fulfilled") { + files.push(result.value); + continue; + } + + const error = result.reason instanceof Error + ? result.reason + : new Error(String(result.reason)); + errors.push({ filePath: filePaths[i]!, error }); + } + + const pipelines = files.flatMap((f) => f.pipelines); + + return { + pipelines, + files, + errors, + }; +} diff --git a/packages/pipelines/pipeline-loader/src/remote/bundler.ts b/packages/pipelines/pipeline-loader/src/remote/bundler.ts new file mode 100644 index 000000000..f6de9fb50 --- /dev/null +++ b/packages/pipelines/pipeline-loader/src/remote/bundler.ts @@ -0,0 +1,261 @@ +import type { RolldownPlugin } from "rolldown"; +import { readFile } from "node:fs/promises"; +import path from "node:path"; +import { transform } from "oxc-transform"; +import { build } from "rolldown"; +import * as github from "./github"; +import * as gitlab from "./gitlab"; + +export interface BundleInput { + content: string; + identifier: string; + fetchFn?: typeof fetch; +} + +const EXTENSIONS = [".ts", ".mts", ".js", ".mjs"]; + +function isUrlLike(value: string): boolean { + return /^[a-z][a-z+.-]*:/i.test(value); +} + +type RemoteProvider = "github" | "gitlab"; + +interface RemoteIdentifier { + provider: RemoteProvider; + owner: string; + repo: string; + ref: string; + path: string; +} + +function parseRemoteIdentifier(identifier: string): RemoteIdentifier | null { + if (!identifier.startsWith("github://") && !identifier.startsWith("gitlab://")) { + return null; + } + + const url = new URL(identifier); + const provider = url.protocol.replace(":", "") as RemoteProvider; + const owner = url.hostname; + const repo = url.pathname.replace(/^\/+/, ""); + + if (!owner || !repo) { + throw new Error(`Invalid remote identifier: ${identifier}`); + } + + const ref = url.searchParams.get("ref") ?? "HEAD"; + const filePath = url.searchParams.get("path") ?? ""; + + return { + provider, + owner, + repo, + ref, + path: filePath, + }; +} + +function formatRemoteIdentifier(remote: RemoteIdentifier): string { + const url = new URL(`${remote.provider}://${remote.owner}/${remote.repo}`); + url.searchParams.set("ref", remote.ref); + url.searchParams.set("path", remote.path); + return url.toString(); +} + +function assertRelativeSpecifier(specifier: string): void { + if (isUrlLike(specifier)) { + throw new Error(`Unsupported import specifier: ${specifier}`); + } + + if (!specifier.startsWith("./") && !specifier.startsWith("../")) { + throw new Error(`Unsupported import specifier: ${specifier}`); + } +} + +function stripTrailingSlash(value: string): string { + return value.endsWith("/") ? value.slice(0, -1) : value; +} + +function appendSuffix(identifier: string, suffix: string): string { + const remote = parseRemoteIdentifier(identifier); + if (remote) { + return formatRemoteIdentifier({ + ...remote, + path: `${remote.path}${suffix}`, + }); + } + + if (isUrlLike(identifier)) { + const url = new URL(identifier); + url.pathname = `${url.pathname}${suffix}`; + return url.toString(); + } + + return `${identifier}${suffix}`; +} + +function resolveRelativeSpecifier(specifier: string, parentIdentifier: string): string { + const remote = parseRemoteIdentifier(parentIdentifier); + if (remote) { + const parentDir = remote.path ? path.posix.dirname(remote.path) : ""; + const resolvedPath = path.posix.normalize(path.posix.join(parentDir, specifier)); + const cleanPath = resolvedPath.replace(/^\/+/, ""); + return formatRemoteIdentifier({ + ...remote, + path: cleanPath, + }); + } + + if (isUrlLike(parentIdentifier)) { + const base = new URL(parentIdentifier); + return new URL(specifier, base).toString(); + } + + const parentDir = path.dirname(parentIdentifier); + return path.resolve(parentDir, specifier); +} + +function getSpecifierExtension(specifier: string): string { + return path.posix.extname(specifier); +} + +function buildCandidateIdentifiers( + specifier: string, + parentIdentifier: string, +): string[] { + const resolvedBase = resolveRelativeSpecifier(specifier, parentIdentifier); + const hasExtension = getSpecifierExtension(specifier) !== ""; + + if (hasExtension) { + return [resolvedBase]; + } + + const normalizedBase = stripTrailingSlash(resolvedBase); + const candidates: string[] = []; + + for (const ext of EXTENSIONS) { + candidates.push(appendSuffix(normalizedBase, ext)); + } + + for (const ext of EXTENSIONS) { + candidates.push(appendSuffix(`${normalizedBase}/index`, ext)); + } + + return candidates; +} + +async function loadRemoteSource( + identifier: string, + fetchFn: typeof fetch, +): Promise { + const remote = parseRemoteIdentifier(identifier); + if (!remote) { + if (isUrlLike(identifier)) { + throw new Error(`Unsupported import specifier: ${identifier}`); + } + + return readFile(identifier, "utf-8"); + } + + const repoRef = { owner: remote.owner, repo: remote.repo, ref: remote.ref }; + if (remote.provider === "github") { + return github.fetchFile(repoRef, remote.path, { customFetch: fetchFn }); + } + return gitlab.fetchFile(repoRef, remote.path, { customFetch: fetchFn }); +} + +async function compileModuleSource(identifier: string, source: string): Promise { + const result = await transform(identifier, source, { sourceType: "module" }); + + if (result.errors && result.errors.length > 0) { + const message = result.errors.map((error) => error.message).join("\n"); + throw new Error(`Failed to parse module ${identifier}: ${message}`); + } + + return result.code; +} + +function createRemotePlugin(input: BundleInput): RolldownPlugin { + const fetchFn = input.fetchFn ?? fetch; + const moduleCache = new Map(); + + return { + name: "pipeline-remote-loader", + resolveId: async (specifier: string, importer?: string) => { + if (!importer) { + return input.identifier; + } + + assertRelativeSpecifier(specifier); + + const candidates = buildCandidateIdentifiers(specifier, importer); + for (const candidate of candidates) { + try { + const source = await loadRemoteSource(candidate, fetchFn); + moduleCache.set(candidate, source); + return candidate; + } catch (err) { + if (err instanceof Error && err.message.includes("Module not found")) { + continue; + } + if (err instanceof Error && err.message.includes("404")) { + continue; + } + throw err; + } + } + + throw new Error(`Module not found: ${specifier}`); + }, + load: async (id: string) => { + if (id === input.identifier) { + const code = await compileModuleSource(id, input.content); + return code; + } + + const source = moduleCache.get(id) ?? await loadRemoteSource(id, fetchFn); + const code = await compileModuleSource(id, source); + moduleCache.set(id, source); + return code; + }, + }; +} + +export async function bundleRemoteModule(input: BundleInput): Promise { + // eslint-disable-next-line regexp/no-super-linear-backtracking + for (const match of input.content.matchAll(/\bimport\s*(?:[^"']*from\s*)?["']([^"']+)["']/g)) { + const specifier = match[1]; + if (!specifier) { + continue; + } + assertRelativeSpecifier(specifier); + } + + const result = await build({ + input: input.identifier, + plugins: [createRemotePlugin(input)], + write: false, + output: { + format: "esm", + }, + }); + + const outputs = Array.isArray(result) ? result : [result]; + const chunks = outputs.flatMap((output) => output.output ?? []); + const chunk = chunks.find((item: { type: string }) => item.type === "chunk"); + + if (!chunk || chunk.type !== "chunk") { + throw new Error("Failed to bundle remote module"); + } + + return chunk.code; +} + +export function createDataUrl(code: string): string { + // eslint-disable-next-line node/prefer-global/buffer + const encoded = Buffer.from(code, "utf-8").toString("base64"); + return `data:text/javascript;base64,${encoded}`; +} + +export function identifierForLocalFile(filePath: string): string { + return path.resolve(filePath); +} diff --git a/packages/pipelines/pipeline-loader/src/remote/github.ts b/packages/pipelines/pipeline-loader/src/remote/github.ts new file mode 100644 index 000000000..0f3c12013 --- /dev/null +++ b/packages/pipelines/pipeline-loader/src/remote/github.ts @@ -0,0 +1,88 @@ +import type { RemoteFileList, RemoteRequestOptions } from "./types"; + +const GITHUB_API_BASE = "https://api.github.com"; +const GITHUB_ACCEPT_HEADER = "application/vnd.github.v3+json"; + +interface GitHubTreeItem { + path: string; + type: string; +} + +interface GitHubTreeResponse { + tree: GitHubTreeItem[]; + truncated: boolean; +} + +interface GitHubContentResponse { + content: string; + encoding: string; +} + +interface GitHubRepoRef { + owner: string; + repo: string; + ref?: string; + path?: string; +} + +export async function listFiles( + repoRef: GitHubRepoRef, + options: RemoteRequestOptions = {}, +): Promise { + const { owner, repo, ref = "HEAD", path = "" } = repoRef; + const { customFetch = fetch } = options; + + const url = `${GITHUB_API_BASE}/repos/${owner}/${repo}/git/trees/${ref}?recursive=1`; + const response = await customFetch(url, { + headers: { + Accept: GITHUB_ACCEPT_HEADER, + }, + }); + + if (!response.ok) { + throw new Error(`GitHub API error: ${response.status} ${response.statusText}`); + } + + const data = await response.json() as GitHubTreeResponse; + + const prefix = path ? `${path}/` : ""; + const files = data.tree + .filter((item) => item.type === "blob" && item.path.startsWith(prefix)) + .map((item) => item.path); + + return { + files, + truncated: data.truncated, + }; +} + +export async function fetchFile( + repoRef: GitHubRepoRef, + filePath: string, + options: RemoteRequestOptions = {}, +): Promise { + const { owner, repo, ref = "HEAD" } = repoRef; + const { customFetch = fetch } = options; + + const encodedPath = encodeURIComponent(filePath); + const url = `${GITHUB_API_BASE}/repos/${owner}/${repo}/contents/${encodedPath}?ref=${ref}`; + + const response = await customFetch(url, { + headers: { + Accept: GITHUB_ACCEPT_HEADER, + }, + }); + + if (!response.ok) { + throw new Error(`GitHub API error: ${response.status} ${response.statusText}`); + } + + const data = await response.json() as GitHubContentResponse; + + if (data.encoding !== "base64") { + throw new Error(`Unexpected encoding: ${data.encoding}`); + } + + // eslint-disable-next-line node/prefer-global/buffer + return Buffer.from(data.content, "base64").toString("utf-8"); +} diff --git a/packages/pipelines/pipeline-loader/src/remote/gitlab.ts b/packages/pipelines/pipeline-loader/src/remote/gitlab.ts new file mode 100644 index 000000000..3f75e8d6a --- /dev/null +++ b/packages/pipelines/pipeline-loader/src/remote/gitlab.ts @@ -0,0 +1,71 @@ +import type { RemoteFileList, RemoteRequestOptions } from "./types"; + +const GITLAB_API_BASE = "https://gitlab.com/api/v4"; + +interface GitLabTreeItem { + path: string; + type: string; +} + +export interface GitLabRepoRef { + owner: string; + repo: string; + ref?: string; + path?: string; +} + +function encodeProjectPath(owner: string, repo: string): string { + return encodeURIComponent(`${owner}/${repo}`); +} + +export async function listFiles( + repoRef: GitLabRepoRef, + options: RemoteRequestOptions = {}, +): Promise { + const { owner, repo, ref, path } = repoRef; + const refValue = ref ?? "HEAD"; + const pathValue = path ?? ""; + const { customFetch = fetch } = options; + + const projectId = encodeProjectPath(owner, repo); + const url = `${GITLAB_API_BASE}/projects/${projectId}/repository/tree?recursive=true&ref=${refValue}&path=${encodeURIComponent(pathValue)}&per_page=100`; + + const response = await customFetch(url); + + if (!response.ok) { + throw new Error(`GitLab API error: ${response.status} ${response.statusText}`); + } + + const data = await response.json() as GitLabTreeItem[]; + + const files = data + .filter((item) => item.type === "blob") + .map((item) => item.path); + + return { + files, + truncated: false, + }; +} + +export async function fetchFile( + repoRef: GitLabRepoRef, + filePath: string, + options: RemoteRequestOptions = {}, +): Promise { + const { owner, repo, ref } = repoRef; + const refValue = ref ?? "HEAD"; + const { customFetch = fetch } = options; + + const projectId = encodeProjectPath(owner, repo); + const encodedPath = encodeURIComponent(filePath); + const url = `${GITLAB_API_BASE}/projects/${projectId}/repository/files/${encodedPath}/raw?ref=${refValue}`; + + const response = await customFetch(url); + + if (!response.ok) { + throw new Error(`GitLab API error: ${response.status} ${response.statusText}`); + } + + return response.text(); +} diff --git a/packages/pipelines/pipeline-loader/src/remote/types.ts b/packages/pipelines/pipeline-loader/src/remote/types.ts new file mode 100644 index 000000000..be00426d0 --- /dev/null +++ b/packages/pipelines/pipeline-loader/src/remote/types.ts @@ -0,0 +1,8 @@ +export interface RemoteFileList { + files: string[]; + truncated: boolean; +} + +export interface RemoteRequestOptions { + customFetch?: typeof fetch; +} diff --git a/packages/pipelines/pipeline-loader/src/types.ts b/packages/pipelines/pipeline-loader/src/types.ts new file mode 100644 index 000000000..82faa417b --- /dev/null +++ b/packages/pipelines/pipeline-loader/src/types.ts @@ -0,0 +1,44 @@ +import type { PipelineDefinition } from "@ucdjs/pipelines-core"; + +export interface LoadedPipelineFile { + filePath: string; + pipelines: PipelineDefinition[]; + exportNames: string[]; +} + +export interface LoadPipelinesResult { + pipelines: PipelineDefinition[]; + files: LoadedPipelineFile[]; + errors: PipelineLoadError[]; +} + +export interface PipelineLoadError { + filePath: string; + error: Error; +} + +export interface GitHubSource { + type: "github"; + id: string; + owner: string; + repo: string; + ref?: string; + path?: string; +} + +export interface GitLabSource { + type: "gitlab"; + id: string; + owner: string; + repo: string; + ref?: string; + path?: string; +} + +export interface LocalSource { + type: "local"; + id: string; + cwd: string; +} + +export type PipelineSource = LocalSource | GitHubSource | GitLabSource; diff --git a/packages/pipelines/pipeline-loader/test/insecure.test.ts b/packages/pipelines/pipeline-loader/test/insecure.test.ts new file mode 100644 index 000000000..3d41551e2 --- /dev/null +++ b/packages/pipelines/pipeline-loader/test/insecure.test.ts @@ -0,0 +1,119 @@ +import { encodeBase64 } from "#test-utils"; +import { HttpResponse, mockFetch } from "#test-utils/msw"; +import { createPipelineModuleSource } from "#test-utils/pipelines"; +import { describe, expect, it } from "vitest"; +import { loadPipelineFromContent } from "../src/insecure"; + +describe("loadPipelineFromContent", () => { + it("should load pipeline definitions and export names", async () => { + const content = createPipelineModuleSource({ + named: ["alpha"], + extraExports: "export const config = { name: \"pipeline\" };", + }); + + const result = await loadPipelineFromContent(content, "remote.ucd-pipeline.ts"); + + expect(result.filePath).toBe("remote.ucd-pipeline.ts"); + expect(result.exportNames).toEqual(["alpha"]); + expect(result.exportNames).toHaveLength(1); + expect(result.pipelines.map((pipeline) => pipeline.id)).toEqual(["alpha"]); + }); + + it("should return empty arrays when no pipelines are exported", async () => { + const content = "export const config = { ok: true };"; + + const result = await loadPipelineFromContent(content, "empty.ucd-pipeline.ts"); + + expect(result.pipelines).toEqual([]); + expect(result.exportNames).toEqual([]); + }); + + it("should load multiple named pipelines", async () => { + const content = createPipelineModuleSource({ + named: ["alpha", "beta"], + }); + + const result = await loadPipelineFromContent(content, "multi.ucd-pipeline.ts"); + + expect(result.pipelines.map((pipeline) => pipeline.id).sort()).toEqual(["alpha", "beta"]); + expect(result.exportNames.sort()).toEqual(["alpha", "beta"]); + }); + + it("should ignore non-pipeline named exports", async () => { + const content = createPipelineModuleSource({ + named: ["alpha"], + extraExports: "export const meta = { ok: true };", + }); + + const result = await loadPipelineFromContent(content, "extra.ucd-pipeline.ts"); + + expect(result.pipelines.map((pipeline) => pipeline.id)).toEqual(["alpha"]); + expect(result.exportNames).toEqual(["alpha"]); + }); + + it("should ignore default exports", async () => { + const content = createPipelineModuleSource({ + named: ["alpha"], + extraExports: "export default { _type: \"pipeline-definition\", id: \"default-only\", versions: [\"16.0.0\"], inputs: [], routes: [] };", + }); + + const result = await loadPipelineFromContent(content, "default.ucd-pipeline.ts"); + + expect(result.pipelines.map((pipeline) => pipeline.id)).toEqual(["alpha"]); + expect(result.exportNames).toEqual(["alpha"]); + }); + + it("should resolve relative imports without extensions", async () => { + const depSource = "export const helper = { ok: true };"; + const depEncoded = encodeBase64(depSource); + + mockFetch([ + [ + "GET", + "https://api.github.com/repos/ucdjs/demo-pipelines/contents/pipelines%2Fdep.ts", + () => HttpResponse.json({ content: depEncoded, encoding: "base64" }), + ], + ]); + + const content = [ + "import { helper } from './dep';", + "export const alpha = { _type: 'pipeline-definition', id: 'alpha', versions: ['16.0.0'], inputs: [], routes: [] };", + "export const meta = helper;", + ].join("\n"); + + const result = await loadPipelineFromContent(content, "pipelines/main.ucd-pipeline.ts", { + identifier: "github://ucdjs/demo-pipelines?ref=main&path=pipelines/main.ucd-pipeline.ts", + }); + + expect(result.pipelines.map((pipeline) => pipeline.id)).toEqual(["alpha"]); + expect(result.exportNames).toEqual(["alpha"]); + }); + + it("should reject full URL import specifiers", async () => { + const content = "import data from 'https://example.com/pipelines/dep.ts';\nexport const alpha = { _type: 'pipeline-definition', id: 'alpha', versions: ['16.0.0'], inputs: [], routes: [] };"; + + await expect( + loadPipelineFromContent(content, "pipelines/main.ucd-pipeline.ts", { + identifier: "github://ucdjs/demo-pipelines?ref=main&path=pipelines/main.ucd-pipeline.ts", + }), + ).rejects.toThrow("Unsupported import specifier"); + }); + + it("should reject bare import specifiers", async () => { + const content = "import zod from 'zod';\nexport const alpha = { _type: 'pipeline-definition', id: 'alpha', versions: ['16.0.0'], inputs: [], routes: [] };"; + + await expect( + loadPipelineFromContent(content, "pipelines/main.ucd-pipeline.ts", { + identifier: "github://ucdjs/demo-pipelines?ref=main&path=pipelines/main.ucd-pipeline.ts", + }), + ).rejects.toThrow("Unsupported import specifier"); + }); + + it("should throw on invalid source code", async () => { + const content = "export const broken = ;"; + + await expect( + loadPipelineFromContent(content, "broken.ucd-pipeline.ts"), + ).rejects.toBeInstanceOf(Error); + }); +}); diff --git a/packages/pipelines/pipeline-loader/test/loader.test.ts b/packages/pipelines/pipeline-loader/test/loader.test.ts new file mode 100644 index 000000000..acd475449 --- /dev/null +++ b/packages/pipelines/pipeline-loader/test/loader.test.ts @@ -0,0 +1,137 @@ +import path from "node:path"; +import { createPipelineModuleSource } from "#test-utils/pipelines"; +import { describe, expect, it } from "vitest"; +import { testdir } from "vitest-testdirs"; +import { findPipelineFiles, loadPipelineFile, loadPipelinesFromPaths } from "../src"; + +describe("findPipelineFiles", () => { + it("should find pipeline files and ignore node_modules and dist", async () => { + const root = await testdir({ + pipelines: { + "alpha.ucd-pipeline.ts": createPipelineModuleSource({ named: ["alpha"] }), + "nested": { + "beta.ucd-pipeline.ts": createPipelineModuleSource({ named: ["beta"] }), + }, + }, + node_modules: { + "ignored.ucd-pipeline.ts": createPipelineModuleSource({ named: ["ignored"] }), + }, + dist: { + "built.ucd-pipeline.ts": createPipelineModuleSource({ named: ["built"] }), + }, + }); + + const files = await findPipelineFiles({ + cwd: root, + }); + const expected = [ + path.join(root, "pipelines", "alpha.ucd-pipeline.ts"), + path.join(root, "pipelines", "nested", "beta.ucd-pipeline.ts"), + ]; + + expect(files.sort()).toEqual(expected.sort()); + expect(files.every((file: string) => path.isAbsolute(file))).toBe(true); + }); + + it("should support custom patterns with a cwd", async () => { + const root = await testdir({ + pipelines: { + "gamma.ucd-pipeline.ts": createPipelineModuleSource({ named: ["gamma"] }), + "notes.txt": "not a pipeline", + }, + other: { + "delta.ucd-pipeline.ts": createPipelineModuleSource({ named: ["delta"] }), + }, + }); + + const cwd = path.join(root, "pipelines"); + const files = await findPipelineFiles({ + cwd, + }); + + expect(files).toEqual([path.join(cwd, "gamma.ucd-pipeline.ts")]); + }); +}); + +describe("loadPipelineFile", () => { + it("should load pipeline definitions and export names", async () => { + const root = await testdir({ + "demo.ucd-pipeline.ts": createPipelineModuleSource({ + named: ["alpha"], + extraExports: "export const config = { name: \"pipeline\" };", + }), + }); + const filePath = path.join(root, "demo.ucd-pipeline.ts"); + + const result = await loadPipelineFile(filePath); + + expect(result.filePath).toBe(filePath); + expect(result.exportNames).toEqual(["alpha"]); + expect(result.exportNames).toHaveLength(1); + expect(result.pipelines.map((pipeline) => pipeline.id)).toEqual(["alpha"]); + }); + + it("should return empty arrays when no pipelines are exported", async () => { + const root = await testdir({ + "empty.ucd-pipeline.ts": "export const config = { ok: true };", + }); + const filePath = path.join(root, "empty.ucd-pipeline.ts"); + + const result = await loadPipelineFile(filePath); + + expect(result.pipelines).toEqual([]); + expect(result.exportNames).toEqual([]); + }); +}); + +describe("loadPipelinesFromPaths", () => { + it("should merge pipelines and file metadata", async () => { + const root = await testdir({ + "alpha.ucd-pipeline.ts": createPipelineModuleSource({ named: ["alpha"] }), + "beta.ucd-pipeline.ts": createPipelineModuleSource({ named: ["beta"] }), + }); + + const alphaPath = path.join(root, "alpha.ucd-pipeline.ts"); + const betaPath = path.join(root, "beta.ucd-pipeline.ts"); + + const result = await loadPipelinesFromPaths([alphaPath, betaPath]); + + expect(result.errors).toEqual([]); + expect(result.files.map((file) => file.filePath)).toEqual([alphaPath, betaPath]); + expect(result.pipelines.map((pipeline) => pipeline.id).sort()).toEqual(["alpha", "beta"]); + }); + + it("should collect errors when files fail to load", async () => { + const root = await testdir({ + "alpha.ucd-pipeline.ts": createPipelineModuleSource({ named: ["alpha"] }), + }); + + const alphaPath = path.join(root, "alpha.ucd-pipeline.ts"); + const missingPath = path.join(root, "missing.ucd-pipeline.ts"); + + const result = await loadPipelinesFromPaths([alphaPath, missingPath]); + + expect(result.files).toHaveLength(1); + expect(result.errors).toHaveLength(1); + expect(result.errors[0]?.filePath).toBe(missingPath); + expect(result.errors[0]?.error).toBeInstanceOf(Error); + expect(result.pipelines.map((pipeline) => pipeline.id)).toEqual(["alpha"]); + }); + + it("should throw when throwOnError is enabled", async () => { + const root = await testdir({ + "alpha.ucd-pipeline.ts": createPipelineModuleSource({ named: ["alpha"] }), + }); + + const missingPath = path.join(root, "missing.ucd-pipeline.ts"); + + try { + await loadPipelinesFromPaths([missingPath], { throwOnError: true }); + throw new Error("Expected loadPipelinesFromPaths to throw"); + } catch (error) { + expect(error).toBeInstanceOf(Error); + expect((error as Error).message).toContain(`Failed to load pipeline file: ${missingPath}`); + expect((error as Error).cause).toBeInstanceOf(Error); + } + }); +}); diff --git a/packages/pipelines/pipeline-loader/test/remote.test.ts b/packages/pipelines/pipeline-loader/test/remote.test.ts new file mode 100644 index 000000000..c6b76bd5a --- /dev/null +++ b/packages/pipelines/pipeline-loader/test/remote.test.ts @@ -0,0 +1,209 @@ +import { encodeBase64 } from "#test-utils"; +import { HttpResponse, mockFetch } from "#test-utils/msw"; +import { createPipelineModuleSource } from "#test-utils/pipelines"; +import { describe, expect, it } from "vitest"; +import { findRemotePipelineFiles, loadRemotePipelines } from "../src/remote"; + +describe("findRemotePipelineFiles", () => { + it("should list GitHub files and apply path + pattern filtering", async () => { + mockFetch([ + [ + "GET", + "https://api.github.com/repos/ucdjs/demo-pipelines/git/trees/main", + () => HttpResponse.json({ + tree: [ + { path: "pipelines/alpha.ucd-pipeline.ts", type: "blob" }, + { path: "pipelines/notes.txt", type: "blob" }, + { path: "other/beta.ucd-pipeline.ts", type: "blob" }, + { path: "pipelines/subdir", type: "tree" }, + ], + truncated: false, + }), + ], + ]); + + const result = await findRemotePipelineFiles({ + type: "github", + id: "demo-pipelines", + owner: "ucdjs", + repo: "demo-pipelines", + ref: "main", + path: "pipelines", + }); + + expect(result.files).toEqual(["pipelines/alpha.ucd-pipeline.ts"]); + expect(result.truncated).toBe(false); + }); + + it("should list GitLab files and apply pattern filtering", async () => { + mockFetch([ + [ + "GET", + "https://gitlab.com/api/v4/projects/ucdjs%2Fdemo-pipelines/repository/tree", + () => HttpResponse.json([ + { path: "pipelines/alpha.ucd-pipeline.ts", type: "blob" }, + { path: "pipelines/notes.txt", type: "blob" }, + { path: "pipelines/subdir", type: "tree" }, + ]), + ], + ]); + + const result = await findRemotePipelineFiles({ + type: "gitlab", + id: "demo-pipelines", + owner: "ucdjs", + repo: "demo-pipelines", + ref: "main", + path: "pipelines", + }); + + expect(result.files).toEqual(["pipelines/alpha.ucd-pipeline.ts"]); + expect(result.truncated).toBe(false); + }); +}); + +describe("loadRemotePipelines", () => { + it("should load GitHub pipeline files", async () => { + const alpha = createPipelineModuleSource({ named: ["alpha"] }); + const beta = createPipelineModuleSource({ named: ["beta"] }); + + mockFetch([ + [ + "GET", + "https://api.github.com/repos/ucdjs/demo-pipelines/contents/pipelines%2Falpha.ucd-pipeline.ts", + () => HttpResponse.json({ content: encodeBase64(alpha), encoding: "base64" }), + ], + [ + "GET", + "https://api.github.com/repos/ucdjs/demo-pipelines/contents/pipelines%2Fbeta.ucd-pipeline.ts", + () => HttpResponse.json({ content: encodeBase64(beta), encoding: "base64" }), + ], + ]); + + const result = await loadRemotePipelines( + { type: "github", id: "demo-pipelines", owner: "ucdjs", repo: "demo-pipelines", ref: "main" }, + ["pipelines/alpha.ucd-pipeline.ts", "pipelines/beta.ucd-pipeline.ts"], + ); + + expect(result.errors).toEqual([]); + expect(result.files).toHaveLength(2); + expect(result.pipelines.map((p) => p.id).sort()).toEqual(["alpha", "beta"]); + }); + + it("should collect GitHub errors when files fail", async () => { + const alpha = createPipelineModuleSource({ named: ["alpha"] }); + + mockFetch([ + [ + "GET", + "https://api.github.com/repos/ucdjs/demo-pipelines/contents/pipelines%2Falpha.ucd-pipeline.ts", + () => HttpResponse.json({ content: encodeBase64(alpha), encoding: "base64" }), + ], + [ + "GET", + "https://api.github.com/repos/ucdjs/demo-pipelines/contents/pipelines%2Fbeta.ucd-pipeline.ts", + () => HttpResponse.text("Not found", { status: 404 }), + ], + ]); + + const result = await loadRemotePipelines( + { type: "github", id: "demo-pipelines", owner: "ucdjs", repo: "demo-pipelines", ref: "main" }, + ["pipelines/alpha.ucd-pipeline.ts", "pipelines/beta.ucd-pipeline.ts"], + ); + + expect(result.errors).toHaveLength(1); + expect(result.errors[0]?.filePath).toBe("pipelines/beta.ucd-pipeline.ts"); + expect(result.files).toHaveLength(1); + expect(result.pipelines.map((p) => p.id)).toEqual(["alpha"]); + }); + + it("should throw for GitHub when throwOnError is enabled", async () => { + mockFetch([ + [ + "GET", + "https://api.github.com/repos/ucdjs/demo-pipelines/contents/pipelines%2Fmissing.ucd-pipeline.ts", + () => HttpResponse.text("Not found", { status: 404 }), + ], + ]); + + await expect( + loadRemotePipelines( + { type: "github", id: "demo-pipelines", owner: "ucdjs", repo: "demo-pipelines", ref: "main" }, + ["pipelines/missing.ucd-pipeline.ts"], + { throwOnError: true }, + ), + ).rejects.toThrow("Failed to load pipeline file: pipelines/missing.ucd-pipeline.ts"); + }); + + it("should load GitLab pipeline files", async () => { + const alpha = createPipelineModuleSource({ named: ["alpha"] }); + const beta = createPipelineModuleSource({ named: ["beta"] }); + + mockFetch([ + [ + "GET", + "https://gitlab.com/api/v4/projects/ucdjs%2Fdemo-pipelines/repository/files/pipelines%2Falpha.ucd-pipeline.ts/raw", + () => HttpResponse.text(alpha), + ], + [ + "GET", + "https://gitlab.com/api/v4/projects/ucdjs%2Fdemo-pipelines/repository/files/pipelines%2Fbeta.ucd-pipeline.ts/raw", + () => HttpResponse.text(beta), + ], + ]); + + const result = await loadRemotePipelines( + { type: "gitlab", id: "demo-pipelines", owner: "ucdjs", repo: "demo-pipelines", ref: "main" }, + ["pipelines/alpha.ucd-pipeline.ts", "pipelines/beta.ucd-pipeline.ts"], + ); + + expect(result.errors).toEqual([]); + expect(result.files).toHaveLength(2); + expect(result.pipelines.map((p) => p.id).sort()).toEqual(["alpha", "beta"]); + }); + + it("should collect GitLab errors when files fail", async () => { + const alpha = createPipelineModuleSource({ named: ["alpha"] }); + + mockFetch([ + [ + "GET", + "https://gitlab.com/api/v4/projects/ucdjs%2Fdemo-pipelines/repository/files/pipelines%2Falpha.ucd-pipeline.ts/raw", + () => HttpResponse.text(alpha), + ], + [ + "GET", + "https://gitlab.com/api/v4/projects/ucdjs%2Fdemo-pipelines/repository/files/pipelines%2Fbeta.ucd-pipeline.ts/raw", + () => HttpResponse.text("Not found", { status: 404 }), + ], + ]); + + const result = await loadRemotePipelines( + { type: "gitlab", id: "demo-pipelines", owner: "ucdjs", repo: "demo-pipelines", ref: "main" }, + ["pipelines/alpha.ucd-pipeline.ts", "pipelines/beta.ucd-pipeline.ts"], + ); + + expect(result.errors).toHaveLength(1); + expect(result.errors[0]?.filePath).toBe("pipelines/beta.ucd-pipeline.ts"); + expect(result.files).toHaveLength(1); + expect(result.pipelines.map((p) => p.id)).toEqual(["alpha"]); + }); + + it("should throw for GitLab when throwOnError is enabled", async () => { + mockFetch([ + [ + "GET", + "https://gitlab.com/api/v4/projects/ucdjs%2Fdemo-pipelines/repository/files/pipelines%2Fmissing.ucd-pipeline.ts/raw", + () => HttpResponse.text("Not found", { status: 404 }), + ], + ]); + + await expect( + loadRemotePipelines( + { type: "gitlab", id: "demo-pipelines", owner: "ucdjs", repo: "demo-pipelines", ref: "main" }, + ["pipelines/missing.ucd-pipeline.ts"], + { throwOnError: true }, + ), + ).rejects.toThrow("Failed to load pipeline file: pipelines/missing.ucd-pipeline.ts"); + }); +}); diff --git a/packages/pipelines/pipeline-loader/tsconfig.build.json b/packages/pipelines/pipeline-loader/tsconfig.build.json new file mode 100644 index 000000000..36c889e0c --- /dev/null +++ b/packages/pipelines/pipeline-loader/tsconfig.build.json @@ -0,0 +1,5 @@ +{ + "extends": "./tsconfig.json", + "include": ["src"], + "exclude": ["dist", "test"] +} diff --git a/packages/pipelines/pipeline-loader/tsconfig.json b/packages/pipelines/pipeline-loader/tsconfig.json new file mode 100644 index 000000000..07edf31d8 --- /dev/null +++ b/packages/pipelines/pipeline-loader/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "@ucdjs-tooling/tsconfig/base", + "include": [ + "src", + "test", + "playgrounds" + ], + "exclude": ["dist"] +} diff --git a/packages/pipelines/pipeline-loader/tsdown.config.ts b/packages/pipelines/pipeline-loader/tsdown.config.ts new file mode 100644 index 000000000..ac92bb427 --- /dev/null +++ b/packages/pipelines/pipeline-loader/tsdown.config.ts @@ -0,0 +1,9 @@ +import { createTsdownConfig } from "@ucdjs-tooling/tsdown-config"; + +export default createTsdownConfig({ + entry: [ + "./src/index.ts", + "./src/remote.ts", + "./src/insecure.ts", + ], +}); diff --git a/packages/pipelines/pipeline-playground/package.json b/packages/pipelines/pipeline-playground/package.json new file mode 100644 index 000000000..32ea2c44f --- /dev/null +++ b/packages/pipelines/pipeline-playground/package.json @@ -0,0 +1,41 @@ +{ + "name": "@ucdjs/pipelines-playground", + "version": "0.0.0", + "private": true, + "type": "module", + "author": { + "name": "Lucas Nørgaard", + "email": "lucasnrgaard@gmail.com", + "url": "https://luxass.dev" + }, + "packageManager": "pnpm@10.28.0", + "license": "MIT", + "homepage": "https://github.com/ucdjs/ucd", + "repository": { + "type": "git", + "url": "git+https://github.com/ucdjs/ucd.git", + "directory": "packages/pipelines/pipeline-playground" + }, + "bugs": { + "url": "https://github.com/ucdjs/ucd/issues" + }, + "exports": { + "./package.json": "./package.json" + }, + "publishConfig": { + "access": "restricted" + }, + "files": [], + "engines": { + "node": ">=22.18" + }, + "scripts": { + "pipelines:list": "ucd pipelines list --cwd ." + }, + "dependencies": { + "@ucdjs/pipelines-core": "workspace:*", + "@ucdjs/pipelines-presets": "workspace:*", + "zod": "catalog:prod" + }, + "devDependencies": {} +} diff --git a/packages/pipelines/pipeline-playground/src/advanced.ucd-pipeline.ts b/packages/pipelines/pipeline-playground/src/advanced.ucd-pipeline.ts new file mode 100644 index 000000000..798078a54 --- /dev/null +++ b/packages/pipelines/pipeline-playground/src/advanced.ucd-pipeline.ts @@ -0,0 +1,375 @@ +import type { ParsedRow, PropertyJson, ResolvedEntry } from "@ucdjs/pipelines-core"; +import { + always, + and, + byDir, + byExt, + byName, + definePipeline, + definePipelineRoute, + definePipelineTransform, +} from "@ucdjs/pipelines-core"; +import { + createDeduplicateTransform, + createExpandRangesTransform, + createMemorySource, + createNormalizeTransform, + createSortTransform, + propertyJsonResolver, + sequenceParser, + standardParser, + unicodeDataParser, +} from "@ucdjs/pipelines-presets"; + +const filterEmptyValues = definePipelineTransform({ + id: "filter-empty-values", + async* fn(_ctx, rows) { + for await (const row of rows) { + if (row.value && row.value !== "" && row.value !== "") { + yield row; + } + } + }, +}); + +const addMetadata = definePipelineTransform({ + id: "add-metadata", + async* fn(ctx, rows) { + for await (const row of rows) { + yield { + ...row, + meta: { + ...row.meta, + processedAt: new Date().toISOString(), + sourceVersion: ctx.version, + }, + }; + } + }, +}); + +const unicodeDataRoute = definePipelineRoute({ + id: "unicode-data", + filter: byName("UnicodeData.txt"), + parser: unicodeDataParser, + transforms: [ + createExpandRangesTransform(), + addMetadata, + ], + resolver: propertyJsonResolver, + out: { + fileName: (pj: PropertyJson) => `properties/${pj.property.toLowerCase().replace(/_/g, "-")}.json`, + }, +}); + +const blocksRoute = definePipelineRoute({ + id: "blocks", + filter: byName("Blocks.txt"), + parser: standardParser, + transforms: [ + createSortTransform({ direction: "asc" }), + ], + resolver: async (ctx, rows) => { + const entries: ResolvedEntry[] = []; + + for await (const row of rows) { + const range = row.start && row.end ? `${row.start}..${row.end}` : undefined; + entries.push({ + range: range as `${string}..${string}` | undefined, + codePoint: row.codePoint, + value: row.value, + }); + } + + return [{ + version: ctx.version, + property: "Blocks", + file: "Blocks.txt", + entries, + }]; + }, +}); + +const scriptsRoute = definePipelineRoute({ + id: "scripts", + filter: byName("Scripts.txt"), + depends: ["route:blocks"] as unknown as readonly ["route:blocks"], + parser: standardParser, + resolver: async (ctx, rows) => { + const entries: ResolvedEntry[] = []; + + for await (const row of rows) { + const range = row.start && row.end ? `${row.start}..${row.end}` : undefined; + entries.push({ + range: range as `${string}..${string}` | undefined, + codePoint: row.codePoint, + value: row.value, + }); + } + + return [{ + version: ctx.version, + property: "Scripts", + file: "Scripts.txt", + entries, + }]; + }, +}); + +const lineBreakRoute = definePipelineRoute({ + id: "line-break", + filter: byName("LineBreak.txt"), + depends: ["route:unicode-data"] as unknown as readonly ["route:unicode-data"], + parser: standardParser, + transforms: [ + createNormalizeTransform(), + createSortTransform({ direction: "asc" }), + ], + resolver: propertyJsonResolver, + cache: true, +}); + +const propListRoute = definePipelineRoute({ + id: "prop-list", + filter: byName("PropList.txt"), + parser: standardParser, + transforms: [ + filterEmptyValues, + createDeduplicateTransform({ strategy: "first" }), + ], + resolver: async (ctx, rows) => { + const propertyGroups = new Map(); + + for await (const row of rows) { + const prop = row.property || "Unknown"; + if (!propertyGroups.has(prop)) { + propertyGroups.set(prop, []); + } + const entries = propertyGroups.get(prop)!; + const range = row.start && row.end ? `${row.start}..${row.end}` : undefined; + entries.push({ + range: range as `${string}..${string}` | undefined, + codePoint: row.codePoint, + value: true, + }); + } + + return Array.from(propertyGroups.entries()).map(([property, entries]) => ({ + version: ctx.version, + property, + file: "PropList.txt", + entries, + })); + }, +}); + +const sequencesRoute = definePipelineRoute({ + id: "sequences", + filter: byName("Sequences.txt"), + parser: sequenceParser, + resolver: propertyJsonResolver, +}); + +const emojiDataRoute = definePipelineRoute({ + id: "emoji-data", + filter: and(byDir("emoji"), byExt(".txt")), + depends: ["route:prop-list"] as unknown as readonly ["route:prop-list"], + parser: standardParser, + resolver: async (ctx, rows) => { + const entries: ResolvedEntry[] = []; + + for await (const row of rows) { + const range = row.start && row.end ? `${row.start}..${row.end}` : undefined; + entries.push({ + range: range as `${string}..${string}` | undefined, + codePoint: row.codePoint, + sequence: row.sequence, + value: row.value, + }); + } + + return [{ + version: ctx.version, + property: `Emoji_${ctx.file.name.replace(".txt", "")}`, + file: ctx.file.path, + entries, + meta: { + emojiVersion: "16.0", + }, + }]; + }, +}); + +const ucdGeneralRoute = definePipelineRoute({ + id: "ucd-general", + filter: and(byDir("ucd"), byExt(".txt")), + parser: standardParser, + resolver: propertyJsonResolver, +}); + +const auxiliaryRoute = definePipelineRoute({ + id: "auxiliary", + filter: byDir("auxiliary"), + depends: ["route:unicode-data", "route:blocks"] as unknown as readonly ["route:unicode-data", "route:blocks"], + parser: standardParser, + transforms: [ + createSortTransform({ direction: "asc" }), + ], + resolver: propertyJsonResolver, +}); + +const extractedRoute = definePipelineRoute({ + id: "extracted", + filter: byDir("extracted"), + parser: standardParser, + resolver: propertyJsonResolver, +}); + +export const playgroundAdvancedPipeline = definePipeline({ + id: "playground-advanced", + name: "Advanced Pipeline Playground", + description: "A comprehensive pipeline demonstrating all available features including dependencies, artifacts, transforms, and multiple sources", + versions: ["15.1.0", "16.0.0"], + inputs: [ + createMemorySource({ + id: "test-data", + files: { + "16.0.0": [ + { + path: "ucd/UnicodeData.txt", + content: `0041;LATIN CAPITAL LETTER A;Lu;0;L;;;;;N;;;;0061; +0042;LATIN CAPITAL LETTER B;Lu;0;L;;;;;N;;;;0062; +0061;LATIN SMALL LETTER A;Ll;0;L;;;;;N;;;0041;;0041 +0062;LATIN SMALL LETTER B;Ll;0;L;;;;;N;;;0042;;0042`, + }, + { + path: "ucd/Blocks.txt", + content: `0000..007F; Basic Latin +0080..00FF; Latin-1 Supplement`, + }, + { + path: "ucd/Scripts.txt", + content: `0000..007F; Latin +0080..00FF; Latin`, + }, + { + path: "ucd/LineBreak.txt", + content: `0000..0008; CM +0009; BA +000A; LF`, + }, + { + path: "ucd/PropList.txt", + content: `0000..001F; Control +007F..009F; Control`, + }, + { + path: "ucd/Sequences.txt", + content: `0041 0308; A_WITH_DIAERESIS +0061 0308; a_WITH_DIAERESIS`, + }, + ], + "15.1.0": [ + { + path: "ucd/UnicodeData.txt", + content: `0041;LATIN CAPITAL LETTER A;Lu;0;L;;;;;N;;;;0061; +0061;LATIN SMALL LETTER A;Ll;0;L;;;;;N;;;0041;;0041`, + }, + { + path: "ucd/Blocks.txt", + content: `0000..007F; Basic Latin`, + }, + ], + }, + }), + createMemorySource({ + id: "emoji-data", + files: { + "16.0.0": [ + { + path: "emoji/emoji-data.txt", + content: `231A..231B; Emoji +23E9..23EC; Emoji`, + }, + { + path: "emoji/emoji-sequences.txt", + content: `231A..231B; Emoji; Watch +23E9..23EC; Emoji; Arrow`, + }, + ], + }, + }), + ], + routes: [ + unicodeDataRoute, + blocksRoute, + scriptsRoute, + lineBreakRoute, + propListRoute, + sequencesRoute, + emojiDataRoute, + ucdGeneralRoute, + auxiliaryRoute, + extractedRoute, + ], + include: byExt(".txt"), + strict: false, + concurrency: 8, + fallback: { + filter: always(), + parser: async function* (ctx) { + const content = await ctx.readContent(); + yield { + sourceFile: ctx.file.path, + kind: "point" as const, + value: `fallback: ${content.length} bytes`, + }; + }, + resolver: async (ctx, rows) => { + const entries: ResolvedEntry[] = []; + for await (const row of rows) { + entries.push({ + codePoint: "0000", + value: row.value, + }); + } + return [{ + version: ctx.version, + property: "Fallback", + file: ctx.file.name, + entries, + }]; + }, + }, + onEvent: (event) => { + switch (event.type) { + case "pipeline:start": + console.log(`[Pipeline] Started: ${event.id}`); + break; + case "pipeline:end": + console.log(`[Pipeline] Completed in ${event.durationMs}ms`); + break; + case "version:start": + console.log(`[Version] Processing ${event.version}`); + break; + case "version:end": + console.log(`[Version] ${event.version} completed in ${event.durationMs}ms`); + break; + case "file:matched": + console.log(`[File] Matched ${event.file.path} -> ${event.routeId}`); + break; + case "file:skipped": + console.log(`[File] Skipped ${event.file.path}: ${event.reason}`); + break; + case "artifact:produced": + console.log(`[Artifact] Produced ${event.artifactId} from ${event.routeId}`); + break; + case "artifact:consumed": + console.log(`[Artifact] Consumed ${event.artifactId} by ${event.routeId}`); + break; + case "error": + console.error(`[Error] ${event.error.scope}: ${event.error.message}`); + break; + } + }, +}); diff --git a/packages/pipelines/pipeline-playground/src/multiple.ucd-pipeline.ts b/packages/pipelines/pipeline-playground/src/multiple.ucd-pipeline.ts new file mode 100644 index 000000000..34845b6c7 --- /dev/null +++ b/packages/pipelines/pipeline-playground/src/multiple.ucd-pipeline.ts @@ -0,0 +1,17 @@ +import { definePipeline } from "@ucdjs/pipelines-core"; + +export const firstPipeline = definePipeline({ + id: "first-pipeline", + name: "First Pipeline", + versions: ["16.0.0"], + inputs: [], + routes: [], +}) + +export const secondPipeline = definePipeline({ + id: "second-pipeline", + name: "Second Pipeline", + versions: ["16.0.0"], + inputs: [], + routes: [], +}) diff --git a/packages/pipelines/pipeline-playground/src/sequence.ucd-pipeline.ts b/packages/pipelines/pipeline-playground/src/sequence.ucd-pipeline.ts new file mode 100644 index 000000000..2f271f56b --- /dev/null +++ b/packages/pipelines/pipeline-playground/src/sequence.ucd-pipeline.ts @@ -0,0 +1,28 @@ +import { byName, definePipeline, definePipelineRoute } from "@ucdjs/pipelines-core"; +import { createMemorySource, sequenceParser, propertyJsonResolver } from "@ucdjs/pipelines-presets"; + +export const sequencePipeline = definePipeline({ + id: "playground-sequence", + name: "Playground Sequence", + versions: ["16.0.0"], + inputs: [ + createMemorySource({ + files: { + "16.0.0": [ + { + path: "ucd/Sequences.txt", + content: "0041 0308; A_DIAERESIS\n006F 0308; O_DIAERESIS\n", + }, + ], + }, + }), + ], + routes: [ + definePipelineRoute({ + id: "sequence-route", + filter: byName("Sequences.txt"), + parser: sequenceParser, + resolver: propertyJsonResolver, + }), + ], +}); diff --git a/packages/pipelines/pipeline-playground/src/simple.ucd-pipeline.ts b/packages/pipelines/pipeline-playground/src/simple.ucd-pipeline.ts new file mode 100644 index 000000000..5ae75c5a7 --- /dev/null +++ b/packages/pipelines/pipeline-playground/src/simple.ucd-pipeline.ts @@ -0,0 +1,32 @@ +import { always, definePipeline, definePipelineRoute } from "@ucdjs/pipelines-core"; +import { createMemorySource, standardParser, propertyJsonResolver } from "@ucdjs/pipelines-presets"; + +export const simplePipeline = definePipeline({ + id: "simple", + name: "Playground Simple", + versions: ["16.0.0"], + tags: [ + "simple" + ], + inputs: [ + createMemorySource({ + files: { + "16.0.0": [ + { + path: "ucd/Hello.txt", + content: "0048; H\n0065; e\n006C; l\n006C; l\n006F; o\n", + }, + ], + }, + }), + ], + routes: [ + definePipelineRoute({ + id: "hello", + filter: always(), + parser: standardParser, + resolver: propertyJsonResolver, + }), + ], +}); + diff --git a/packages/pipelines/pipeline-playground/src/with-imports.ucd-pipeline.ts b/packages/pipelines/pipeline-playground/src/with-imports.ucd-pipeline.ts new file mode 100644 index 000000000..6488707eb --- /dev/null +++ b/packages/pipelines/pipeline-playground/src/with-imports.ucd-pipeline.ts @@ -0,0 +1,12 @@ +import { definePipeline } from "@ucdjs/pipelines-core"; +import { firstPipeline, secondPipeline } from "./multiple.ucd-pipeline"; + +export const mainPipeline = definePipeline({ + id: "main-pipeline", + name: "Main Pipeline", + versions: ["16.0.0"], + inputs: [], + routes: [], +}) + +export { firstPipeline, secondPipeline }; diff --git a/packages/pipelines/pipeline-playground/tsconfig.build.json b/packages/pipelines/pipeline-playground/tsconfig.build.json new file mode 100644 index 000000000..36c889e0c --- /dev/null +++ b/packages/pipelines/pipeline-playground/tsconfig.build.json @@ -0,0 +1,5 @@ +{ + "extends": "./tsconfig.json", + "include": ["src"], + "exclude": ["dist", "test"] +} diff --git a/packages/pipelines/pipeline-playground/tsconfig.json b/packages/pipelines/pipeline-playground/tsconfig.json new file mode 100644 index 000000000..ebce38306 --- /dev/null +++ b/packages/pipelines/pipeline-playground/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "@ucdjs-tooling/tsconfig/base", + "include": [ + "src" + ], + "exclude": ["dist"] +} diff --git a/packages/pipelines/pipeline-playground/tsdown.config.ts b/packages/pipelines/pipeline-playground/tsdown.config.ts new file mode 100644 index 000000000..324776b47 --- /dev/null +++ b/packages/pipelines/pipeline-playground/tsdown.config.ts @@ -0,0 +1,9 @@ +import { createTsdownConfig } from "@ucdjs-tooling/tsdown-config"; + +export default createTsdownConfig({ + entry: [ + "./src/simple.ucd-pipeline.ts", + "./src/memory.ucd-pipeline.ts", + "./src/sequence.ucd-pipeline.ts", + ], +}); diff --git a/packages/pipelines/pipeline-presets/eslint.config.js b/packages/pipelines/pipeline-presets/eslint.config.js new file mode 100644 index 000000000..d9c0ca1ec --- /dev/null +++ b/packages/pipelines/pipeline-presets/eslint.config.js @@ -0,0 +1,7 @@ +// @ts-check +import { luxass } from "@luxass/eslint-config"; + +export default luxass({ + type: "lib", + pnpm: true, +}); diff --git a/packages/pipelines/pipeline-presets/package.json b/packages/pipelines/pipeline-presets/package.json new file mode 100644 index 000000000..ff31cdbfc --- /dev/null +++ b/packages/pipelines/pipeline-presets/package.json @@ -0,0 +1,61 @@ +{ + "name": "@ucdjs/pipelines-presets", + "version": "0.0.1", + "type": "module", + "author": { + "name": "Lucas Nørgård", + "email": "lucasnrgaard@gmail.com", + "url": "https://luxass.dev" + }, + "packageManager": "pnpm@10.27.0", + "license": "MIT", + "homepage": "https://github.com/ucdjs/ucd", + "repository": { + "type": "git", + "url": "git+https://github.com/ucdjs/ucd.git", + "directory": "packages/pipelines/pipeline-presets" + }, + "bugs": { + "url": "https://github.com/ucdjs/ucd/issues" + }, + "exports": { + ".": "./dist/index.mjs", + "./parsers": "./dist/parsers/index.mjs", + "./pipelines": "./dist/pipelines/index.mjs", + "./resolvers": "./dist/resolvers/index.mjs", + "./routes": "./dist/routes/index.mjs", + "./sources": "./dist/sources/index.mjs", + "./transforms": "./dist/transforms/index.mjs", + "./package.json": "./package.json" + }, + "types": "./dist/index.d.mts", + "files": [ + "dist" + ], + "engines": { + "node": ">=22.18" + }, + "scripts": { + "build": "tsdown --tsconfig=./tsconfig.build.json", + "dev": "tsdown --watch", + "clean": "git clean -xdf dist node_modules", + "lint": "eslint .", + "typecheck": "tsc --noEmit -p tsconfig.build.json" + }, + "dependencies": { + "@ucdjs/pipelines-core": "workspace:*", + "zod": "catalog:prod" + }, + "devDependencies": { + "@luxass/eslint-config": "catalog:linting", + "@ucdjs-tooling/tsconfig": "workspace:*", + "@ucdjs-tooling/tsdown-config": "workspace:*", + "eslint": "catalog:linting", + "publint": "catalog:build", + "tsdown": "catalog:build", + "typescript": "catalog:build" + }, + "publishConfig": { + "access": "public" + } +} diff --git a/packages/pipelines/pipeline-presets/src/index.ts b/packages/pipelines/pipeline-presets/src/index.ts new file mode 100644 index 000000000..9b9ef99f6 --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/index.ts @@ -0,0 +1,79 @@ +export { + createMultiPropertyParser, + createSequenceParser, + createStandardParser, + multiPropertyParser, + type MultiPropertyParserOptions, + sequenceParser, + type SequenceParserOptions, + standardParser, + type StandardParserOptions, + type UnicodeDataMeta, + unicodeDataParser, + type UnicodeDataRow, +} from "./parsers"; + +export { + type BasicPipelineOptions, + createBasicPipeline, + createEmojiPipeline, + createFullPipeline, + type EmojiPipelineOptions, + type FullPipelineOptions, +} from "./pipelines"; + +export { + createGroupedResolver, + createPropertyJsonResolver, + type GroupedResolverOptions, + propertyJsonResolver, + type PropertyJsonResolverOptions, +} from "./resolvers"; + +export { + allRoutes, + blocksRoute, + coreRoutes, + derivedCorePropertiesRoute, + emojiDataRoute, + emojiRoutes, + generalCategoryRoute, + lineBreakRoute, + propListRoute, + scriptsRoute, + unicodeDataRoute, +} from "./routes"; + +export { + createHttpBackend, + createHttpSource, + createMemoryBackend, + createMemorySource, + createUnicodeOrgSource, + type HttpBackendOptions, + type HttpSourceOptions, + type MemoryBackendOptions, + type MemoryFile, + type MemorySourceOptions, + UNICODE_ORG_BASE_URL, + unicodeOrgSource, +} from "./sources"; + +export { + createDeduplicateTransform, + createExpandRangesTransform, + createFilterByPipelineFilter, + createNormalizeTransform, + createRowFilter, + createSortTransform, + type DeduplicateOptions, + deduplicateRows, + type DeduplicateStrategy, + expandRanges, + type ExpandRangesOptions, + normalizeCodePoints, + type RowFilterOptions, + sortByCodePoint, + type SortDirection, + type SortOptions, +} from "./transforms"; diff --git a/packages/pipelines/pipeline-presets/src/parsers/index.ts b/packages/pipelines/pipeline-presets/src/parsers/index.ts new file mode 100644 index 000000000..740d4a2c7 --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/parsers/index.ts @@ -0,0 +1,23 @@ +export { + createMultiPropertyParser, + multiPropertyParser, + type MultiPropertyParserOptions, +} from "./multi-property"; + +export { + createSequenceParser, + sequenceParser, + type SequenceParserOptions, +} from "./sequence"; + +export { + createStandardParser, + standardParser, + type StandardParserOptions, +} from "./standard"; + +export { + type UnicodeDataMeta, + unicodeDataParser, + type UnicodeDataRow, +} from "./unicode-data"; diff --git a/packages/pipelines/pipeline-presets/src/parsers/multi-property.ts b/packages/pipelines/pipeline-presets/src/parsers/multi-property.ts new file mode 100644 index 000000000..e32d5468f --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/parsers/multi-property.ts @@ -0,0 +1,71 @@ +import type { ParseContext, ParsedRow, ParserFn } from "@ucdjs/pipelines-core"; + +export interface MultiPropertyParserOptions { + delimiter?: string; + propertyMarker?: string; + trimFields?: boolean; +} + +function parseCodePointOrRange(field: string): { kind: ParsedRow["kind"]; start?: string; end?: string; codePoint?: string } { + const trimmed = field.trim(); + + if (trimmed.includes("..")) { + const [start, end] = trimmed.split(".."); + return { kind: "range", start: start.trim(), end: end.trim() }; + } + + return { kind: "point", codePoint: trimmed }; +} + +export function createMultiPropertyParser(options: MultiPropertyParserOptions = {}): ParserFn { + const { delimiter = ";", propertyMarker = "@", trimFields = true } = options; + + return async function* multiPropertyParser(ctx: ParseContext): AsyncIterable { + let currentProperty: string | undefined; + + for await (const line of ctx.readLines()) { + const trimmedLine = line.trim(); + + if (trimmedLine.startsWith(`# ${propertyMarker}`)) { + const match = trimmedLine.match(/# @(\w+)=(\w+)/); + if (match) { + currentProperty = match[2]; + } + continue; + } + + if (ctx.isComment(line) || trimmedLine === "") { + continue; + } + + const commentIndex = trimmedLine.indexOf("#"); + const dataLine = commentIndex >= 0 ? trimmedLine.slice(0, commentIndex) : trimmedLine; + + if (dataLine.trim() === "") { + continue; + } + + const fields = dataLine.split(delimiter); + if (fields.length < 2) { + continue; + } + + const codePointField = trimFields ? fields[0].trim() : fields[0]; + const valueField = trimFields ? fields[1].trim() : fields[1]; + + const { kind, start, end, codePoint } = parseCodePointOrRange(codePointField); + + yield { + sourceFile: ctx.file.path, + kind, + start, + end, + codePoint, + property: currentProperty || valueField, + value: valueField, + }; + } + }; +} + +export const multiPropertyParser = createMultiPropertyParser(); diff --git a/packages/pipelines/pipeline-presets/src/parsers/sequence.ts b/packages/pipelines/pipeline-presets/src/parsers/sequence.ts new file mode 100644 index 000000000..28a403df7 --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/parsers/sequence.ts @@ -0,0 +1,63 @@ +import type { ParseContext, ParsedRow, ParserFn } from "@ucdjs/pipelines-core"; + +export interface SequenceParserOptions { + delimiter?: string; + sequenceDelimiter?: string; + trimFields?: boolean; +} + +export function createSequenceParser(options: SequenceParserOptions = {}): ParserFn { + const { delimiter = ";", sequenceDelimiter = " ", trimFields = true } = options; + + return async function* sequenceParser(ctx: ParseContext): AsyncIterable { + for await (const line of ctx.readLines()) { + if (ctx.isComment(line)) { + continue; + } + + const trimmedLine = line.trim(); + if (trimmedLine === "") { + continue; + } + + const commentIndex = trimmedLine.indexOf("#"); + const dataLine = commentIndex >= 0 ? trimmedLine.slice(0, commentIndex) : trimmedLine; + + if (dataLine.trim() === "") { + continue; + } + + const fields = dataLine.split(delimiter); + if (fields.length < 2) { + continue; + } + + const sequenceField = trimFields ? fields[0].trim() : fields[0]; + const valueField = trimFields ? fields[1].trim() : fields[1]; + + const codePoints = sequenceField.split(sequenceDelimiter).filter(Boolean); + + if (codePoints.length === 0) { + continue; + } + + if (codePoints.length === 1) { + yield { + sourceFile: ctx.file.path, + kind: "point", + codePoint: codePoints[0], + value: valueField, + }; + } else { + yield { + sourceFile: ctx.file.path, + kind: "sequence", + sequence: codePoints, + value: valueField, + }; + } + } + }; +} + +export const sequenceParser = createSequenceParser(); diff --git a/packages/pipelines/pipeline-presets/src/parsers/standard.ts b/packages/pipelines/pipeline-presets/src/parsers/standard.ts new file mode 100644 index 000000000..bb540fa21 --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/parsers/standard.ts @@ -0,0 +1,63 @@ +import type { ParseContext, ParsedRow, ParserFn } from "@ucdjs/pipelines-core"; + +export interface StandardParserOptions { + delimiter?: string; + trimFields?: boolean; + skipEmpty?: boolean; +} + +function parseCodePointOrRange(field: string): { kind: ParsedRow["kind"]; start?: string; end?: string; codePoint?: string } { + const trimmed = field.trim(); + + if (trimmed.includes("..")) { + const [start, end] = trimmed.split(".."); + return { kind: "range", start: start!.trim(), end: end!.trim() }; + } + + return { kind: "point", codePoint: trimmed }; +} + +export function createStandardParser(options: StandardParserOptions = {}): ParserFn { + const { delimiter = ";", trimFields = true, skipEmpty = true } = options; + + return async function* standardParser(ctx: ParseContext): AsyncIterable { + for await (const line of ctx.readLines()) { + if (ctx.isComment(line)) { + continue; + } + + const trimmedLine = line.trim(); + if (skipEmpty && trimmedLine === "") { + continue; + } + + const commentIndex = trimmedLine.indexOf("#"); + const dataLine = commentIndex >= 0 ? trimmedLine.slice(0, commentIndex) : trimmedLine; + + if (dataLine.trim() === "") { + continue; + } + + const fields = dataLine.split(delimiter); + if (fields.length < 2) { + continue; + } + + const codePointField = trimFields ? fields[0]!.trim() : fields[0]; + const valueField = trimFields ? fields[1]!.trim() : fields[1]; + + const { kind, start, end, codePoint } = parseCodePointOrRange(codePointField!); + + yield { + sourceFile: ctx.file.path, + kind, + start, + end, + codePoint, + value: valueField, + }; + } + }; +} + +export const standardParser = createStandardParser(); diff --git a/packages/pipelines/pipeline-presets/src/parsers/unicode-data.ts b/packages/pipelines/pipeline-presets/src/parsers/unicode-data.ts new file mode 100644 index 000000000..a2e8d13e0 --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/parsers/unicode-data.ts @@ -0,0 +1,106 @@ +import type { ParseContext, ParsedRow, ParserFn } from "@ucdjs/pipelines-core"; + +export interface UnicodeDataMeta { + characterName: string; + generalCategory: string; + canonicalCombiningClass: string; + bidiClass: string; + decompositionMapping: string; + numericType: string; + numericValue: string; + bidiMirrored: string; + unicode1Name: string; + isoComment: string; + simpleUppercaseMapping: string; + simpleLowercaseMapping: string; + simpleTitlecaseMapping: string; +} + +export type UnicodeDataRow = ParsedRow & { meta: UnicodeDataMeta }; + +export const unicodeDataParser: ParserFn = async function* ( + ctx: ParseContext, +): AsyncIterable { + let rangeStart: string | null = null; + let rangeName: string | null = null; + + for await (const line of ctx.readLines()) { + if (ctx.isComment(line)) { + continue; + } + + const trimmedLine = line.trim(); + if (trimmedLine === "") { + continue; + } + + const fields = trimmedLine.split(";"); + if (fields.length < 14) { + continue; + } + + const codePoint = fields[0].trim(); + const characterName = fields[1].trim(); + const generalCategory = fields[2].trim(); + + if (characterName.endsWith(", First>")) { + rangeStart = codePoint; + rangeName = characterName.replace(", First>", "").replace("<", ""); + continue; + } + + if (characterName.endsWith(", Last>") && rangeStart !== null) { + const row: UnicodeDataRow = { + sourceFile: ctx.file.path, + kind: "range", + start: rangeStart, + end: codePoint, + value: generalCategory, + meta: { + characterName: rangeName || "", + generalCategory, + canonicalCombiningClass: fields[3].trim(), + bidiClass: fields[4].trim(), + decompositionMapping: fields[5].trim(), + numericType: fields[6].trim(), + numericValue: fields[7].trim(), + bidiMirrored: fields[9].trim(), + unicode1Name: fields[10].trim(), + isoComment: fields[11].trim(), + simpleUppercaseMapping: fields[12].trim(), + simpleLowercaseMapping: fields[13].trim(), + simpleTitlecaseMapping: fields[14]?.trim() || "", + }, + }; + + rangeStart = null; + rangeName = null; + yield row; + continue; + } + + const row: UnicodeDataRow = { + sourceFile: ctx.file.path, + kind: "point", + codePoint, + value: generalCategory, + meta: { + characterName, + generalCategory, + canonicalCombiningClass: fields[3].trim(), + bidiClass: fields[4].trim(), + decompositionMapping: fields[5].trim(), + numericType: fields[6].trim(), + numericValue: fields[7].trim(), + bidiMirrored: fields[9].trim(), + unicode1Name: fields[10].trim(), + isoComment: fields[11].trim(), + simpleUppercaseMapping: fields[12].trim(), + simpleLowercaseMapping: fields[13].trim(), + simpleTitlecaseMapping: fields[14]?.trim() || "", + }, + }; + + yield row; + } +}; diff --git a/packages/pipelines/pipeline-presets/src/pipelines/basic.ts b/packages/pipelines/pipeline-presets/src/pipelines/basic.ts new file mode 100644 index 000000000..227b2ebf0 --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/pipelines/basic.ts @@ -0,0 +1,36 @@ +import { byExt, definePipeline } from "@ucdjs/pipelines-core"; +import { standardParser } from "../parsers/standard"; +import { propertyJsonResolver } from "../resolvers/property-json"; +import { coreRoutes } from "../routes/common"; + +export interface BasicPipelineOptions { + id?: string; + versions: string[]; + concurrency?: number; + strict?: boolean; +} + +export function createBasicPipeline(options: BasicPipelineOptions) { + const { + id = "basic-ucd", + versions, + concurrency = 4, + strict = false, + } = options; + + return definePipeline({ + id, + name: "Basic UCD Pipeline", + description: "Processes core Unicode Character Database files", + versions, + inputs: [], + routes: [...coreRoutes], + include: byExt(".txt"), + concurrency, + strict, + fallback: { + parser: standardParser, + resolver: propertyJsonResolver, + }, + }); +} diff --git a/packages/pipelines/pipeline-presets/src/pipelines/emoji.ts b/packages/pipelines/pipeline-presets/src/pipelines/emoji.ts new file mode 100644 index 000000000..949d304cf --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/pipelines/emoji.ts @@ -0,0 +1,36 @@ +import { and, byDir, byExt, definePipeline } from "@ucdjs/pipelines-core"; +import { sequenceParser } from "../parsers/sequence"; +import { propertyJsonResolver } from "../resolvers/property-json"; +import { emojiRoutes } from "../routes/common"; + +export interface EmojiPipelineOptions { + id?: string; + versions: string[]; + concurrency?: number; + strict?: boolean; +} + +export function createEmojiPipeline(options: EmojiPipelineOptions) { + const { + id = "emoji", + versions, + concurrency = 4, + strict = false, + } = options; + + return definePipeline({ + id, + name: "Emoji Pipeline", + description: "Processes Unicode emoji data files", + versions, + inputs: [], + routes: [...emojiRoutes], + include: and(byDir("emoji"), byExt(".txt")), + concurrency, + strict, + fallback: { + parser: sequenceParser, + resolver: propertyJsonResolver, + }, + }); +} diff --git a/packages/pipelines/pipeline-presets/src/pipelines/full.ts b/packages/pipelines/pipeline-presets/src/pipelines/full.ts new file mode 100644 index 000000000..256fae8d8 --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/pipelines/full.ts @@ -0,0 +1,36 @@ +import { byExt, definePipeline } from "@ucdjs/pipelines-core"; +import { standardParser } from "../parsers/standard"; +import { propertyJsonResolver } from "../resolvers/property-json"; +import { allRoutes } from "../routes/common"; + +export interface FullPipelineOptions { + id?: string; + versions: string[]; + concurrency?: number; + strict?: boolean; +} + +export function createFullPipeline(options: FullPipelineOptions) { + const { + id = "full-ucd", + versions, + concurrency = 4, + strict = false, + } = options; + + return definePipeline({ + id, + name: "Full UCD Pipeline", + description: "Processes all Unicode Character Database files", + versions, + inputs: [], + routes: [...allRoutes], + include: byExt(".txt"), + concurrency, + strict, + fallback: { + parser: standardParser, + resolver: propertyJsonResolver, + }, + }); +} diff --git a/packages/pipelines/pipeline-presets/src/pipelines/index.ts b/packages/pipelines/pipeline-presets/src/pipelines/index.ts new file mode 100644 index 000000000..c7f2b4ae7 --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/pipelines/index.ts @@ -0,0 +1,14 @@ +export { + type BasicPipelineOptions, + createBasicPipeline, +} from "./basic"; + +export { + createEmojiPipeline, + type EmojiPipelineOptions, +} from "./emoji"; + +export { + createFullPipeline, + type FullPipelineOptions, +} from "./full"; diff --git a/packages/pipelines/pipeline-presets/src/resolvers/grouped.ts b/packages/pipelines/pipeline-presets/src/resolvers/grouped.ts new file mode 100644 index 000000000..91d869f14 --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/resolvers/grouped.ts @@ -0,0 +1,75 @@ +import type { ParsedRow, PropertyJson, ResolvedEntry, RouteResolveContext } from "@ucdjs/pipelines-core"; + +export interface GroupedResolverOptions { + groupBy: "property" | "value" | ((row: ParsedRow) => string); + propertyNameFn?: (groupKey: string, ctx: RouteResolveContext) => string; +} + +function rowToResolvedEntry(row: ParsedRow): ResolvedEntry | null { + const value = row.value; + if (value === undefined) { + return null; + } + + if (row.kind === "point" && row.codePoint) { + return { codePoint: row.codePoint, value }; + } + + if (row.kind === "range" && row.start && row.end) { + return { range: `${row.start}..${row.end}`, value }; + } + + if (row.kind === "sequence" && row.sequence) { + return { sequence: row.sequence, value }; + } + + return null; +} + +export function createGroupedResolver(options: GroupedResolverOptions) { + const { groupBy, propertyNameFn } = options; + + const getGroupKey = typeof groupBy === "function" + ? groupBy + : groupBy === "property" + ? (row: ParsedRow) => row.property || "unknown" + : (row: ParsedRow) => { + const v = row.value; + return Array.isArray(v) ? v.join(",") : v || "unknown"; + }; + + return async function groupedResolver( + ctx: RouteResolveContext, + rows: AsyncIterable, + ): Promise { + const groups = new Map(); + + for await (const row of rows) { + const key = getGroupKey(row); + const entry = rowToResolvedEntry(row); + + if (entry) { + const existing = groups.get(key) || []; + existing.push(entry); + groups.set(key, existing); + } + } + + const results: PropertyJson[] = []; + + for (const [key, entries] of groups) { + const propertyName = propertyNameFn + ? propertyNameFn(key, ctx) + : key; + + results.push({ + version: ctx.version, + property: propertyName, + file: ctx.file.name, + entries: ctx.normalizeEntries(entries), + }); + } + + return results; + }; +} diff --git a/packages/pipelines/pipeline-presets/src/resolvers/index.ts b/packages/pipelines/pipeline-presets/src/resolvers/index.ts new file mode 100644 index 000000000..cba8b9d1b --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/resolvers/index.ts @@ -0,0 +1,10 @@ +export { + createGroupedResolver, + type GroupedResolverOptions, +} from "./grouped"; + +export { + createPropertyJsonResolver, + propertyJsonResolver, + type PropertyJsonResolverOptions, +} from "./property-json"; diff --git a/packages/pipelines/pipeline-presets/src/resolvers/property-json.ts b/packages/pipelines/pipeline-presets/src/resolvers/property-json.ts new file mode 100644 index 000000000..945bf436a --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/resolvers/property-json.ts @@ -0,0 +1,66 @@ +import type { ArtifactDefinition, ParsedRow, PropertyJson, ResolvedEntry, RouteResolveContext } from "@ucdjs/pipelines-core"; + +export interface PropertyJsonResolverOptions { + property?: string; + includeDefaults?: boolean; +} + +function rowToResolvedEntry(row: ParsedRow): ResolvedEntry | null { + const value = row.value; + if (value === undefined) { + return null; + } + + if (row.kind === "point" && row.codePoint) { + return { + codePoint: row.codePoint, + value, + }; + } + + if (row.kind === "range" && row.start && row.end) { + return { + range: `${row.start}..${row.end}`, + value, + }; + } + + if (row.kind === "sequence" && row.sequence) { + return { + sequence: row.sequence, + value, + }; + } + + return null; +} + +export function createPropertyJsonResolver(options: PropertyJsonResolverOptions = {}) { + return async function propertyJsonResolver< + TArtifactKeys extends string, + TEmits extends Record, + >( + ctx: RouteResolveContext, + rows: AsyncIterable, + ): Promise { + const entries: ResolvedEntry[] = []; + + for await (const row of rows) { + const entry = rowToResolvedEntry(row); + if (entry) { + entries.push(entry); + } + } + + const propertyName = options.property || ctx.file.name.replace(/\.txt$/, ""); + + return [{ + version: ctx.version, + property: propertyName, + file: ctx.file.name, + entries: ctx.normalizeEntries(entries), + }]; + }; +} + +export const propertyJsonResolver = createPropertyJsonResolver(); diff --git a/packages/pipelines/pipeline-presets/src/routes/common.ts b/packages/pipelines/pipeline-presets/src/routes/common.ts new file mode 100644 index 000000000..1e307fcf7 --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/routes/common.ts @@ -0,0 +1,104 @@ +import { + byGlob, + byName, + definePipelineRoute, + +} from "@ucdjs/pipelines-core"; +import { standardParser } from "../parsers/standard"; +import { unicodeDataParser } from "../parsers/unicode-data"; +import { createGroupedResolver } from "../resolvers/grouped"; +import { propertyJsonResolver } from "../resolvers/property-json"; +import { normalizeCodePoints } from "../transforms/normalize"; +import { sortByCodePoint } from "../transforms/sort"; + +export const lineBreakRoute = definePipelineRoute({ + id: "line-break", + filter: byName("LineBreak.txt"), + parser: standardParser, + transforms: [normalizeCodePoints, sortByCodePoint], + resolver: propertyJsonResolver, +}); + +export const scriptsRoute = definePipelineRoute({ + id: "scripts", + filter: byName("Scripts.txt"), + parser: standardParser, + transforms: [normalizeCodePoints, sortByCodePoint], + resolver: propertyJsonResolver, +}); + +export const blocksRoute = definePipelineRoute({ + id: "blocks", + filter: byName("Blocks.txt"), + parser: standardParser, + transforms: [normalizeCodePoints, sortByCodePoint], + resolver: propertyJsonResolver, +}); + +export const generalCategoryRoute = definePipelineRoute({ + id: "general-category", + filter: byName("extracted/DerivedGeneralCategory.txt"), + parser: standardParser, + transforms: [normalizeCodePoints, sortByCodePoint], + resolver: propertyJsonResolver, +}); + +export const propListRoute = definePipelineRoute({ + id: "prop-list", + filter: byName("PropList.txt"), + parser: standardParser, + transforms: [normalizeCodePoints, sortByCodePoint], + resolver: createGroupedResolver({ + groupBy: "value", + propertyNameFn: (value) => value, + }), +}); + +export const derivedCorePropertiesRoute = definePipelineRoute({ + id: "derived-core-properties", + filter: byName("DerivedCoreProperties.txt"), + parser: standardParser, + transforms: [normalizeCodePoints, sortByCodePoint], + resolver: createGroupedResolver({ + groupBy: "value", + propertyNameFn: (value) => value, + }), +}); + +export const emojiDataRoute = definePipelineRoute({ + id: "emoji-data", + filter: byGlob("emoji/emoji-data.txt"), + parser: standardParser, + transforms: [normalizeCodePoints, sortByCodePoint], + resolver: createGroupedResolver({ + groupBy: "value", + propertyNameFn: (value) => `Emoji_${value}`, + }), +}); + +export const unicodeDataRoute = definePipelineRoute({ + id: "unicode-data", + filter: byName("UnicodeData.txt"), + parser: unicodeDataParser, + transforms: [normalizeCodePoints], + resolver: propertyJsonResolver, +}); + +export const coreRoutes = [ + lineBreakRoute, + scriptsRoute, + blocksRoute, + generalCategoryRoute, + propListRoute, + derivedCorePropertiesRoute, + unicodeDataRoute, +] as const; + +export const emojiRoutes = [ + emojiDataRoute, +] as const; + +export const allRoutes = [ + ...coreRoutes, + ...emojiRoutes, +] as const; diff --git a/packages/pipelines/pipeline-presets/src/routes/index.ts b/packages/pipelines/pipeline-presets/src/routes/index.ts new file mode 100644 index 000000000..139b580b2 --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/routes/index.ts @@ -0,0 +1,13 @@ +export { + allRoutes, + blocksRoute, + coreRoutes, + derivedCorePropertiesRoute, + emojiDataRoute, + emojiRoutes, + generalCategoryRoute, + lineBreakRoute, + propListRoute, + scriptsRoute, + unicodeDataRoute, +} from "./common"; diff --git a/packages/pipelines/pipeline-presets/src/sources/http.ts b/packages/pipelines/pipeline-presets/src/sources/http.ts new file mode 100644 index 000000000..91b569298 --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/sources/http.ts @@ -0,0 +1,107 @@ +import type { + FileContext, + FileMetadata, + PipelineSourceDefinition, + SourceBackend, +} from "@ucdjs/pipelines-core"; +import { definePipelineSource } from "@ucdjs/pipelines-core"; + +export interface HttpBackendOptions { + baseUrl: string; + headers?: Record; + timeout?: number; +} + +export function createHttpBackend(options: HttpBackendOptions): SourceBackend { + const { baseUrl, headers = {}, timeout = 30000 } = options; + + const normalizedBaseUrl = baseUrl.endsWith("/") ? baseUrl : `${baseUrl}/`; + + return { + async listFiles(version: string): Promise { + throw new Error( + `HTTP backend does not support listing files. ` + + `Use a file manifest or provide explicit file list for version ${version}.`, + ); + }, + + async readFile(file: FileContext): Promise { + const url = `${normalizedBaseUrl}${file.version}/${file.path}`; + + const controller = new AbortController(); + const timeoutId = setTimeout(() => controller.abort(), timeout); + + try { + const response = await fetch(url, { + headers, + signal: controller.signal, + }); + + if (!response.ok) { + throw new Error(`Failed to fetch ${url}: ${response.status} ${response.statusText}`); + } + + return response.text(); + } finally { + clearTimeout(timeoutId); + } + }, + + async getMetadata(file: FileContext): Promise { + const url = `${normalizedBaseUrl}${file.version}/${file.path}`; + + const controller = new AbortController(); + const timeoutId = setTimeout(() => controller.abort(), timeout); + + try { + const response = await fetch(url, { + method: "HEAD", + headers, + signal: controller.signal, + }); + + if (!response.ok) { + throw new Error(`Failed to get metadata for ${url}: ${response.status}`); + } + + const contentLength = response.headers.get("content-length"); + const lastModified = response.headers.get("last-modified"); + + return { + size: contentLength ? Number.parseInt(contentLength, 10) : 0, + lastModified: lastModified || undefined, + }; + } finally { + clearTimeout(timeoutId); + } + }, + }; +} + +export interface HttpSourceOptions extends HttpBackendOptions { + id?: string; +} + +export function createHttpSource( + options: HttpSourceOptions & { id?: TId }, +): PipelineSourceDefinition { + const { id = "http" as TId, ...backendOptions } = options; + + return definePipelineSource({ + id: id as TId extends undefined ? "http" : TId, + backend: createHttpBackend(backendOptions), + }); +} + +export const UNICODE_ORG_BASE_URL = "https://www.unicode.org/Public/"; + +export function createUnicodeOrgSource( + id?: TId, +): PipelineSourceDefinition { + return createHttpSource({ + id: (id ?? "unicode-org") as TId extends undefined ? "unicode-org" : TId, + baseUrl: UNICODE_ORG_BASE_URL, + }); +} + +export const unicodeOrgSource = createUnicodeOrgSource(); diff --git a/packages/pipelines/pipeline-presets/src/sources/index.ts b/packages/pipelines/pipeline-presets/src/sources/index.ts new file mode 100644 index 000000000..e72bedba1 --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/sources/index.ts @@ -0,0 +1,17 @@ +export { + createHttpBackend, + createHttpSource, + createUnicodeOrgSource, + type HttpBackendOptions, + type HttpSourceOptions, + UNICODE_ORG_BASE_URL, + unicodeOrgSource, +} from "./http"; + +export { + createMemoryBackend, + createMemorySource, + type MemoryBackendOptions, + type MemoryFile, + type MemorySourceOptions, +} from "./memory"; diff --git a/packages/pipelines/pipeline-presets/src/sources/memory.ts b/packages/pipelines/pipeline-presets/src/sources/memory.ts new file mode 100644 index 000000000..297ae29f6 --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/sources/memory.ts @@ -0,0 +1,95 @@ +import type { + FileContext, + FileMetadata, + PipelineSourceDefinition, + SourceBackend, +} from "@ucdjs/pipelines-core"; +import { definePipelineSource } from "@ucdjs/pipelines-core"; + +export interface MemoryFile { + path: string; + content: string; + dir?: FileContext["dir"]; +} + +export interface MemoryBackendOptions { + files: Record; +} + +function getFileContext(version: string, file: MemoryFile): FileContext { + const path = file.path; + const parts = path.split("/"); + const name = parts[parts.length - 1]; + const extIndex = name.lastIndexOf("."); + const ext = extIndex >= 0 ? name.slice(extIndex) : ""; + const dir = file.dir || parts[0] || "ucd"; + + return { + version, + dir, + path, + name, + ext, + }; +} + +export function createMemoryBackend(options: MemoryBackendOptions): SourceBackend { + const { files } = options; + + return { + async listFiles(version: string): Promise { + const versionFiles = files[version]; + if (!versionFiles) { + return []; + } + + return versionFiles.map((f) => getFileContext(version, f)); + }, + + async readFile(file: FileContext): Promise { + const versionFiles = files[file.version]; + if (!versionFiles) { + throw new Error(`Version ${file.version} not found in memory backend`); + } + + const memFile = versionFiles.find((f) => f.path === file.path); + if (!memFile) { + throw new Error(`File ${file.path} not found in version ${file.version}`); + } + + return memFile.content; + }, + + async getMetadata(file: FileContext): Promise { + const versionFiles = files[file.version]; + if (!versionFiles) { + throw new Error(`Version ${file.version} not found in memory backend`); + } + + const memFile = versionFiles.find((f) => f.path === file.path); + if (!memFile) { + throw new Error(`File ${file.path} not found in version ${file.version}`); + } + + return { + size: new TextEncoder().encode(memFile.content).length, + }; + }, + }; +} + +export interface MemorySourceOptions { + id?: string; + files: Record; +} + +export function createMemorySource( + options: MemorySourceOptions & { id?: TId }, +): PipelineSourceDefinition { + const { id = "memory" as TId, files } = options; + + return definePipelineSource({ + id: id as TId extends undefined ? "memory" : TId, + backend: createMemoryBackend({ files }), + }); +} diff --git a/packages/pipelines/pipeline-presets/src/transforms/deduplicate.ts b/packages/pipelines/pipeline-presets/src/transforms/deduplicate.ts new file mode 100644 index 000000000..dd82bb10b --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/transforms/deduplicate.ts @@ -0,0 +1,74 @@ +import type { ParsedRow } from "@ucdjs/pipelines-core"; +import { definePipelineTransform } from "@ucdjs/pipelines-core"; + +function getRowKey(row: ParsedRow): string { + if (row.kind === "point" && row.codePoint) { + return `point:${row.codePoint}`; + } + if (row.kind === "range" && row.start && row.end) { + return `range:${row.start}..${row.end}`; + } + if (row.kind === "sequence" && row.sequence) { + return `seq:${row.sequence.join("-")}`; + } + return `unknown:${JSON.stringify(row)}`; +} + +export const deduplicateRows = definePipelineTransform({ + id: "deduplicate-rows", + async* fn(_ctx, rows) { + const seen = new Set(); + + for await (const row of rows) { + const key = getRowKey(row); + + if (!seen.has(key)) { + seen.add(key); + yield row; + } + } + }, +}); + +export type DeduplicateStrategy = "first" | "last" | "merge"; + +export interface DeduplicateOptions { + strategy?: DeduplicateStrategy; + keyFn?: (row: ParsedRow) => string; +} + +export function createDeduplicateTransform(options: DeduplicateOptions = {}) { + const { strategy = "first", keyFn = getRowKey } = options; + + if (strategy === "last") { + return definePipelineTransform({ + id: "deduplicate-rows-last", + async* fn(_ctx, rows) { + const byKey = new Map(); + + for await (const row of rows) { + const key = keyFn(row); + byKey.set(key, row); + } + + yield* byKey.values(); + }, + }); + } + + return definePipelineTransform({ + id: "deduplicate-rows-first", + async* fn(_ctx, rows) { + const seen = new Set(); + + for await (const row of rows) { + const key = keyFn(row); + + if (!seen.has(key)) { + seen.add(key); + yield row; + } + } + }, + }); +} diff --git a/packages/pipelines/pipeline-presets/src/transforms/expand-ranges.ts b/packages/pipelines/pipeline-presets/src/transforms/expand-ranges.ts new file mode 100644 index 000000000..5068dfbbe --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/transforms/expand-ranges.ts @@ -0,0 +1,72 @@ +import type { ParsedRow } from "@ucdjs/pipelines-core"; +import { definePipelineTransform } from "@ucdjs/pipelines-core"; + +function hexToNumber(hex: string): number { + return Number.parseInt(hex, 16); +} + +function numberToHex(num: number): string { + return num.toString(16).toUpperCase().padStart(4, "0"); +} + +export const expandRanges = definePipelineTransform({ + id: "expand-ranges", + async* fn(_ctx, rows) { + for await (const row of rows) { + if (row.kind === "range" && row.start && row.end) { + const start = hexToNumber(row.start); + const end = hexToNumber(row.end); + + for (let i = start; i <= end; i++) { + yield { + ...row, + kind: "point", + codePoint: numberToHex(i), + start: undefined, + end: undefined, + }; + } + } else { + yield row; + } + } + }, +}); + +export interface ExpandRangesOptions { + maxExpansion?: number; +} + +export function createExpandRangesTransform(options: ExpandRangesOptions = {}) { + const { maxExpansion = 10000 } = options; + + return definePipelineTransform({ + id: "expand-ranges-limited", + async* fn(_ctx, rows) { + for await (const row of rows) { + if (row.kind === "range" && row.start && row.end) { + const start = hexToNumber(row.start); + const end = hexToNumber(row.end); + const size = end - start + 1; + + if (size > maxExpansion) { + yield row; + continue; + } + + for (let i = start; i <= end; i++) { + yield { + ...row, + kind: "point", + codePoint: numberToHex(i), + start: undefined, + end: undefined, + }; + } + } else { + yield row; + } + } + }, + }); +} diff --git a/packages/pipelines/pipeline-presets/src/transforms/filter.ts b/packages/pipelines/pipeline-presets/src/transforms/filter.ts new file mode 100644 index 000000000..ac1332dcd --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/transforms/filter.ts @@ -0,0 +1,71 @@ +import type { ParsedRow, PipelineFilter } from "@ucdjs/pipelines-core"; +import { definePipelineTransform } from "@ucdjs/pipelines-core"; + +export interface RowFilterOptions { + property?: string | RegExp; + value?: string | RegExp; + kind?: ParsedRow["kind"] | ParsedRow["kind"][]; +} + +export function createRowFilter(options: RowFilterOptions) { + return definePipelineTransform({ + id: "row-filter", + async* fn(ctx, rows) { + for await (const row of rows) { + if (options.property) { + if (!row.property) { + continue; + } + if (typeof options.property === "string") { + if (row.property !== options.property) { + continue; + } + } else if (!options.property.test(row.property)) { + continue; + } + } + + if (options.value) { + const rowValue = Array.isArray(row.value) ? row.value.join(",") : row.value; + if (!rowValue) { + continue; + } + if (typeof options.value === "string") { + if (rowValue !== options.value) { + continue; + } + } else if (!options.value.test(rowValue)) { + continue; + } + } + + if (options.kind) { + const kinds = Array.isArray(options.kind) ? options.kind : [options.kind]; + if (!kinds.includes(row.kind)) { + continue; + } + } + + yield row; + } + }, + }); +} + +export function createFilterByPipelineFilter(filter: PipelineFilter) { + return definePipelineTransform({ + id: "filter-by-pipeline-filter", + async* fn(ctx, rows) { + for await (const row of rows) { + const filterCtx = { + file: ctx.file, + row: row.property ? { property: row.property } : undefined, + }; + + if (filter(filterCtx)) { + yield row; + } + } + }, + }); +} diff --git a/packages/pipelines/pipeline-presets/src/transforms/index.ts b/packages/pipelines/pipeline-presets/src/transforms/index.ts new file mode 100644 index 000000000..53fddc6b3 --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/transforms/index.ts @@ -0,0 +1,30 @@ +export { + createDeduplicateTransform, + type DeduplicateOptions, + deduplicateRows, + type DeduplicateStrategy, +} from "./deduplicate"; + +export { + createExpandRangesTransform, + expandRanges, + type ExpandRangesOptions, +} from "./expand-ranges"; + +export { + createFilterByPipelineFilter, + createRowFilter, + type RowFilterOptions, +} from "./filter"; + +export { + createNormalizeTransform, + normalizeCodePoints, +} from "./normalize"; + +export { + createSortTransform, + sortByCodePoint, + type SortDirection, + type SortOptions, +} from "./sort"; diff --git a/packages/pipelines/pipeline-presets/src/transforms/normalize.ts b/packages/pipelines/pipeline-presets/src/transforms/normalize.ts new file mode 100644 index 000000000..a9c80fcf7 --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/transforms/normalize.ts @@ -0,0 +1,66 @@ +import type { ParsedRow, PipelineTransformDefinition } from "@ucdjs/pipelines-core"; +import { definePipelineTransform } from "@ucdjs/pipelines-core"; + +function normalizeHex(hex: string): string { + return hex.toUpperCase().replace(/^0+/, "") || "0"; +} + +function padHex(hex: string, length: number = 4): string { + return hex.toUpperCase().padStart(length, "0"); +} + +export const normalizeCodePoints = definePipelineTransform({ + id: "normalize-code-points", + async* fn(_ctx, rows) { + for await (const row of rows) { + const normalized = { ...row }; + + if (normalized.codePoint) { + normalized.codePoint = padHex(normalizeHex(normalized.codePoint)); + } + + if (normalized.start) { + normalized.start = padHex(normalizeHex(normalized.start)); + } + + if (normalized.end) { + normalized.end = padHex(normalizeHex(normalized.end)); + } + + if (normalized.sequence) { + normalized.sequence = normalized.sequence.map((cp) => padHex(normalizeHex(cp))); + } + + yield normalized; + } + }, +}); + +export function createNormalizeTransform(padLength: number = 4): PipelineTransformDefinition { + return definePipelineTransform({ + id: `normalize-code-points-${padLength}`, + async* fn(_ctx, rows) { + for await (const row of rows) { + const normalized = { ...row }; + + if (normalized.codePoint) { + normalized.codePoint = padHex(normalizeHex(normalized.codePoint), padLength); + } + + if (normalized.start) { + normalized.start = padHex(normalizeHex(normalized.start), padLength); + } + + if (normalized.end) { + normalized.end = padHex(normalizeHex(normalized.end), padLength); + } + + if (normalized.sequence) { + normalized.sequence = normalized.sequence.map((cp) => padHex(normalizeHex(cp), padLength)); + } + + yield normalized; + } + }, + }); +} diff --git a/packages/pipelines/pipeline-presets/src/transforms/sort.ts b/packages/pipelines/pipeline-presets/src/transforms/sort.ts new file mode 100644 index 000000000..2174488ab --- /dev/null +++ b/packages/pipelines/pipeline-presets/src/transforms/sort.ts @@ -0,0 +1,61 @@ +import type { ParsedRow } from "@ucdjs/pipelines-core"; +import { definePipelineTransform } from "@ucdjs/pipelines-core"; + +function hexToNumber(hex: string): number { + return Number.parseInt(hex, 16); +} + +function getRowSortKey(row: ParsedRow): number { + if (row.codePoint) { + return hexToNumber(row.codePoint); + } + if (row.start) { + return hexToNumber(row.start); + } + if (row.sequence && row.sequence.length > 0) { + return hexToNumber(row.sequence[0]); + } + return 0; +} + +export const sortByCodePoint = definePipelineTransform({ + id: "sort-by-code-point", + async* fn(_ctx, rows) { + const collected: ParsedRow[] = []; + + for await (const row of rows) { + collected.push(row); + } + + collected.sort((a, b) => getRowSortKey(a) - getRowSortKey(b)); + + yield* collected; + }, +}); + +export type SortDirection = "asc" | "desc"; + +export interface SortOptions { + direction?: SortDirection; + keyFn?: (row: ParsedRow) => number; +} + +export function createSortTransform(options: SortOptions = {}) { + const { direction = "asc", keyFn = getRowSortKey } = options; + const multiplier = direction === "asc" ? 1 : -1; + + return definePipelineTransform({ + id: `sort-${direction}`, + async* fn(_ctx, rows) { + const collected: ParsedRow[] = []; + + for await (const row of rows) { + collected.push(row); + } + + collected.sort((a, b) => multiplier * (keyFn(a) - keyFn(b))); + + yield* collected; + }, + }); +} diff --git a/packages/pipelines/pipeline-presets/tsconfig.build.json b/packages/pipelines/pipeline-presets/tsconfig.build.json new file mode 100644 index 000000000..36c889e0c --- /dev/null +++ b/packages/pipelines/pipeline-presets/tsconfig.build.json @@ -0,0 +1,5 @@ +{ + "extends": "./tsconfig.json", + "include": ["src"], + "exclude": ["dist", "test"] +} diff --git a/packages/pipelines/pipeline-presets/tsconfig.json b/packages/pipelines/pipeline-presets/tsconfig.json new file mode 100644 index 000000000..9c6dd744b --- /dev/null +++ b/packages/pipelines/pipeline-presets/tsconfig.json @@ -0,0 +1,8 @@ +{ + "extends": "@ucdjs-tooling/tsconfig/base", + "include": [ + "src", + "test" + ], + "exclude": ["dist"] +} diff --git a/packages/pipelines/pipeline-presets/tsdown.config.ts b/packages/pipelines/pipeline-presets/tsdown.config.ts new file mode 100644 index 000000000..27f9d71b5 --- /dev/null +++ b/packages/pipelines/pipeline-presets/tsdown.config.ts @@ -0,0 +1,13 @@ +import { createTsdownConfig } from "@ucdjs-tooling/tsdown-config"; + +export default createTsdownConfig({ + entry: [ + "./src/index.ts", + "./src/parsers/index.ts", + "./src/transforms/index.ts", + "./src/resolvers/index.ts", + "./src/sources/index.ts", + "./src/routes/index.ts", + "./src/pipelines/index.ts", + ], +}); diff --git a/packages/pipelines/pipeline-server/drizzle.config.ts b/packages/pipelines/pipeline-server/drizzle.config.ts new file mode 100644 index 000000000..e4999e8a9 --- /dev/null +++ b/packages/pipelines/pipeline-server/drizzle.config.ts @@ -0,0 +1,10 @@ +import { defineConfig } from "drizzle-kit"; + +export default defineConfig({ + out: "./src/server/db/migrations", + schema: "./src/server/db/schema.ts", + dialect: "sqlite", + dbCredentials: { + url: "file:./pipeline-server.db", + }, +}); diff --git a/packages/pipelines/pipeline-server/eslint.config.js b/packages/pipelines/pipeline-server/eslint.config.js new file mode 100644 index 000000000..7071b430a --- /dev/null +++ b/packages/pipelines/pipeline-server/eslint.config.js @@ -0,0 +1,35 @@ +// @ts-check +import { luxass } from "@luxass/eslint-config"; +import pkg from "./package.json" with { type: "json" }; + +const clientOnlyImports = Object.keys(pkg.imports).filter((key) => !key.startsWith("#server/")); +const serverOnlyImports = Object.keys(pkg.imports).filter((key) => key.startsWith("#server/")); + +export default luxass({ + type: "app", + pnpm: true, + react: true, +}).append({ + files: [ + "./src/server/**/*.ts", + ], + rules: { + "no-restricted-imports": ["error", { + patterns: [ + ...clientOnlyImports, + ], + }], + }, +}).append({ + files: [ + "./src/client/**/*.ts", + "./src/client/**/*.tsx", + ], + rules: { + "no-restricted-imports": ["error", { + patterns: [ + ...serverOnlyImports, + ], + }], + }, +}); diff --git a/packages/pipelines/pipeline-server/index.html b/packages/pipelines/pipeline-server/index.html new file mode 100644 index 000000000..29bf19dcc --- /dev/null +++ b/packages/pipelines/pipeline-server/index.html @@ -0,0 +1,12 @@ + + + + + + UCD Pipeline UI + + +
+ + + diff --git a/packages/pipelines/pipeline-server/package.json b/packages/pipelines/pipeline-server/package.json new file mode 100644 index 000000000..534d25130 --- /dev/null +++ b/packages/pipelines/pipeline-server/package.json @@ -0,0 +1,100 @@ +{ + "name": "@ucdjs/pipelines-server", + "version": "0.0.1", + "type": "module", + "author": { + "name": "Lucas Nørgård", + "email": "lucasnrgaard@gmail.com", + "url": "https://luxass.dev" + }, + "packageManager": "pnpm@10.27.0", + "license": "MIT", + "homepage": "https://github.com/ucdjs/ucd", + "repository": { + "type": "git", + "url": "git+https://github.com/ucdjs/ucd.git", + "directory": "packages/pipelines/pipeline-server" + }, + "bugs": { + "url": "https://github.com/ucdjs/ucd/issues" + }, + "sideEffects": false, + "imports": { + "#routes/*": "./src/client/routes/*.ts", + "#components/*": "./src/client/components/*.tsx", + "#hooks/*": "./src/client/hooks/*.ts", + "#lib/*": "./src/client/lib/*.ts", + "#server/*": "./src/server/*.ts", + "#server/db": "./src/server/db/index.ts", + "#server/routes": "./src/server/routes/index.ts" + }, + "exports": { + ".": "./dist/server/app.js", + "./package.json": "./package.json" + }, + "files": [ + "dist" + ], + "engines": { + "node": ">=22.18" + }, + "scripts": { + "build": "vite build", + "dev": "vite dev --port 3031", + "clean": "git clean -xdf dist node_modules", + "lint": "eslint .", + "typecheck": "tsc --noEmit -p tsconfig.build.json", + "db:generate": "drizzle-kit generate", + "db:push": "drizzle-kit push", + "db:migrate": "drizzle-kit migrate" + }, + "dependencies": { + "@libsql/client": "catalog:prod", + "@ucdjs-internal/shared-ui": "workspace:*", + "@ucdjs/pipelines-core": "workspace:*", + "@ucdjs/pipelines-executor": "workspace:*", + "@ucdjs/pipelines-loader": "workspace:*", + "@ucdjs/pipelines-ui": "workspace:*", + "chokidar": "catalog:prod", + "crossws": "catalog:prod", + "drizzle-orm": "catalog:prod", + "esrap": "catalog:prod", + "h3": "catalog:prod", + "lucide-react": "catalog:web", + "oxc-parser": "catalog:prod", + "pathe": "catalog:prod", + "zod": "catalog:prod" + }, + "devDependencies": { + "@eslint-react/eslint-plugin": "catalog:linting", + "@luxass/eslint-config": "catalog:linting", + "@tailwindcss/vite": "catalog:web", + "@tanstack/react-router": "catalog:web", + "@tanstack/router-plugin": "catalog:web", + "@types/node": "catalog:types", + "@types/react": "catalog:types", + "@types/react-dom": "catalog:types", + "@ucdjs-tooling/tsconfig": "workspace:*", + "@ucdjs-tooling/tsdown-config": "workspace:*", + "@vitejs/plugin-react": "catalog:web", + "clsx": "catalog:web", + "drizzle-kit": "catalog:prod", + "eslint": "catalog:linting", + "eslint-plugin-react-hooks": "catalog:linting", + "eslint-plugin-react-refresh": "catalog:linting", + "publint": "catalog:build", + "react": "catalog:web", + "react-dom": "catalog:web", + "tailwind-merge": "catalog:web", + "tailwindcss": "catalog:web", + "tsdown": "catalog:build", + "typescript": "catalog:build", + "vite": "catalog:web", + "vite-tsconfig-paths": "catalog:web", + "vitest": "catalog:testing", + "vitest-testdirs": "catalog:testing" + }, + "publishConfig": { + "access": "public" + } +} diff --git a/packages/pipelines/pipeline-server/src/client/components/pipeline-command-palette.tsx b/packages/pipelines/pipeline-server/src/client/components/pipeline-command-palette.tsx new file mode 100644 index 000000000..8c224342d --- /dev/null +++ b/packages/pipelines/pipeline-server/src/client/components/pipeline-command-palette.tsx @@ -0,0 +1,187 @@ +import { useNavigate, useParams } from "@tanstack/react-router"; +import { + Command, + CommandDialog, + CommandEmpty, + CommandGroup, + CommandInput, + CommandItem, + CommandList, + CommandShortcut, +} from "@ucdjs-internal/shared-ui/ui/command"; +import { useExecute, usePipelines } from "@ucdjs/pipelines-ui"; +import { FileCode, Loader2, Play, ScrollText, Terminal } from "lucide-react"; +import { useCallback, useEffect, useState } from "react"; + +const STORAGE_KEY_PREFIX = "ucd-versions-"; + +function getSelectedVersionsFromStorage(storageKey: string, allVersions: string[]): string[] { + if (typeof window === "undefined") { + return allVersions; + } + + try { + const stored = localStorage.getItem(`${STORAGE_KEY_PREFIX}${storageKey}`); + if (stored) { + const parsed = JSON.parse(stored) as string[]; + const validVersions = parsed.filter((v) => allVersions.includes(v)); + if (validVersions.length > 0) { + return validVersions; + } + } + } catch { + // Fall through to default + } + + return allVersions; +} + +export function PipelineCommandPalette() { + const [open, setOpen] = useState(false); + const [search, setSearch] = useState(""); + const { data } = usePipelines(); + const { execute, executing } = useExecute(); + const navigate = useNavigate(); + const { file, id: currentPipelineId } = useParams({ strict: false }); + + const files = data?.files ?? []; + const pipelines = files.flatMap((fileInfo) => + fileInfo.pipelines.map((pipeline) => ({ + ...pipeline, + fileId: fileInfo.fileId, + })), + ); + const currentPipeline = pipelines.find((p) => p.id === currentPipelineId && p.fileId === file); + + // Toggle command palette with ⌘K + useEffect(() => { + const down = (e: KeyboardEvent) => { + if (e.key === "k" && (e.metaKey || e.ctrlKey)) { + e.preventDefault(); + setOpen((open) => !open); + } + }; + + document.addEventListener("keydown", down); + return () => document.removeEventListener("keydown", down); + }, []); + + // Execute current pipeline with ⌘E when palette is open + useEffect(() => { + if (!open || !currentPipeline || executing) return; + + const down = async (e: KeyboardEvent) => { + if (e.key === "e" && (e.metaKey || e.ctrlKey)) { + e.preventDefault(); + try { + const versionsToExecute = getSelectedVersionsFromStorage( + `${currentPipeline.fileId}:${currentPipeline.id}`, + currentPipeline.versions, + ); + await execute(currentPipeline.fileId, currentPipeline.id, versionsToExecute); + setOpen(false); + } catch (err) { + console.error("Failed to execute pipeline:", err); + } + } + }; + + document.addEventListener("keydown", down); + return () => document.removeEventListener("keydown", down); + }, [open, currentPipeline, execute, executing]); + + useEffect(() => { + if (!open) { + setSearch(""); + } + }, [open]); + + const handleExecuteCurrent = useCallback(async () => { + if (!currentPipeline || executing) return; + try { + const versionsToExecute = getSelectedVersionsFromStorage( + `${currentPipeline.fileId}:${currentPipeline.id}`, + currentPipeline.versions, + ); + await execute(currentPipeline.fileId, currentPipeline.id, versionsToExecute); + setOpen(false); + } catch (err) { + console.error("Failed to execute pipeline:", err); + } + }, [currentPipeline, execute, executing]); + + const handleExecutePipeline = useCallback(async (fileId: string, pipelineId: string, versions: string[]) => { + if (executing) return; + try { + const versionsToExecute = getSelectedVersionsFromStorage(`${fileId}:${pipelineId}`, versions); + await execute(fileId, pipelineId, versionsToExecute); + setOpen(false); + } catch (err) { + console.error("Failed to execute pipeline:", err); + } + }, [execute, executing]); + + const handleNavigate = useCallback((to: string) => { + navigate({ to }); + setOpen(false); + }, [navigate]); + + return ( + + + + + No results found. + + {currentPipeline && ( + + + {executing + ? + : } + Execute Current Pipeline + ⌘E + + handleNavigate(`/pipelines/${currentPipeline.fileId}/${currentPipeline.id}/code`)}> + + View Current Pipeline Code + + handleNavigate(`/pipelines/${currentPipeline.fileId}/${currentPipeline.id}/executions`)}> + + View Pipeline Executions + + + )} + + + {pipelines.map((pipeline) => ( + handleExecutePipeline(pipeline.fileId, pipeline.id, pipeline.versions)} + value={`${pipeline.fileId}-${pipeline.id}`} + disabled={executing} + > + {executing + ? + : } + {pipeline.name || pipeline.id} + + {pipeline.versions.length} + {" "} + versions + + + ))} + + + + + ); +} diff --git a/packages/pipelines/pipeline-server/src/client/components/pipeline-header.tsx b/packages/pipelines/pipeline-server/src/client/components/pipeline-header.tsx new file mode 100644 index 000000000..0b2370a08 --- /dev/null +++ b/packages/pipelines/pipeline-server/src/client/components/pipeline-header.tsx @@ -0,0 +1,99 @@ +import { Link, useLoaderData, useNavigate, useParams } from "@tanstack/react-router"; +import { Badge } from "@ucdjs-internal/shared-ui/ui/badge"; +import { Button } from "@ucdjs-internal/shared-ui/ui/button"; +import { useExecute, usePipelineVersions } from "@ucdjs/pipelines-ui"; +import { CheckCircle, Loader2, Play } from "lucide-react"; +import { useCallback } from "react"; + +export function PipelineHeader() { + const { file, id } = useParams({ from: "/pipelines/$file/$id" }); + const navigate = useNavigate(); + const data = useLoaderData({ from: "/pipelines/$file/$id" }) as { pipeline?: { versions: string[]; name?: string; id?: string; routeCount?: number; sourceCount?: number; description?: string } }; + const pipeline = data.pipeline; + const { execute, executing, executionId } = useExecute(); + const allVersions = pipeline?.versions ?? []; + const { selectedVersions } = usePipelineVersions(id, allVersions, `${file}:${id}`); + + const canExecute = selectedVersions.size > 0; + + const handleExecute = useCallback(async () => { + if (!canExecute) return; + const result = await execute(file, id, Array.from(selectedVersions)); + // Navigate to execution detail after successful execution + if (result.success && result.executionId) { + navigate({ + to: "/pipelines/$file/$id/executions/$executionId", + params: { file, id, executionId: result.executionId }, + }); + } + }, [execute, file, id, selectedVersions, canExecute, navigate]); + + return ( +
+
+
+
+

+ {pipeline?.name || pipeline?.id || "The cake is a lie"} +

+ + {pipeline?.versions.length ?? 0} + {" "} + versions + + + {pipeline?.routeCount ?? 0} + {" "} + routes + + + {pipeline?.sourceCount ?? 0} + {" "} + sources + +
+ +

+ {pipeline?.description ?? "No description provided."} +

+
+
+ {executionId && !executing && ( + +
+
+
+ ); +} diff --git a/packages/pipelines/pipeline-server/src/client/components/pipeline-overview/quick-actions.tsx b/packages/pipelines/pipeline-server/src/client/components/pipeline-overview/quick-actions.tsx new file mode 100644 index 000000000..025a5336c --- /dev/null +++ b/packages/pipelines/pipeline-server/src/client/components/pipeline-overview/quick-actions.tsx @@ -0,0 +1,98 @@ +import { Link, useLoaderData, useNavigate, useParams } from "@tanstack/react-router"; +import { Button } from "@ucdjs-internal/shared-ui/ui/button"; +import { + Card, + CardContent, + CardDescription, + CardHeader, + CardTitle, +} from "@ucdjs-internal/shared-ui/ui/card"; +import { useExecute, usePipelineVersions } from "@ucdjs/pipelines-ui"; +import { ArrowRight, FileCode, Loader2, Play, Workflow } from "lucide-react"; +import { useCallback } from "react"; + +export function QuickActionsPanel() { + const { file, id } = useParams({ from: "/pipelines/$file/$id" }); + const { pipeline } = useLoaderData({ from: "/pipelines/$file/$id" }); + const navigate = useNavigate(); + const { execute, executing } = useExecute(); + const { selectedVersions } = usePipelineVersions(id, pipeline?.versions || [], `${file}:${id}`); + const canExecute = selectedVersions.size > 0 && Boolean(pipeline); + + const handleExecute = useCallback(async () => { + if (!canExecute) return; + const result = await execute(file, id, Array.from(selectedVersions)); + if (result.success && result.executionId) { + navigate({ + to: "/pipelines/$file/$id/executions/$executionId", + params: { file, id, executionId: result.executionId }, + }); + } + }, [execute, file, id, selectedVersions, canExecute, navigate]); + + return ( + + + Quick Actions + + Run, inspect, or view pipeline history + + + + + + + + + + + ); +} diff --git a/packages/pipelines/pipeline-server/src/client/index.css b/packages/pipelines/pipeline-server/src/client/index.css new file mode 100644 index 000000000..b0511814c --- /dev/null +++ b/packages/pipelines/pipeline-server/src/client/index.css @@ -0,0 +1,3 @@ +@import "tailwindcss"; +@import "@ucdjs-internal/shared-ui/styles.css"; +@import "@ucdjs/pipelines-ui/styles.css"; diff --git a/packages/pipelines/pipeline-server/src/client/lib/pipeline-executions.ts b/packages/pipelines/pipeline-server/src/client/lib/pipeline-executions.ts new file mode 100644 index 000000000..332599860 --- /dev/null +++ b/packages/pipelines/pipeline-server/src/client/lib/pipeline-executions.ts @@ -0,0 +1,65 @@ +export type ExecutionStatus = "running" | "completed" | "failed"; + +export interface Execution { + id: string; + pipelineId: string; + status: ExecutionStatus; + startedAt: string; + completedAt: string | null; + versions: string[] | null; + summary: { + totalRoutes: number; + cached: number; + } | null; + error: string | null; +} + +export interface ExecutionsResponse { + executions: Execution[]; + pagination: { + total: number; + limit: number; + offset: number; + hasMore: boolean; + }; +} + +export async function fetchExecutions(fileId: string, pipelineId: string): Promise { + const response = await fetch(`/api/pipelines/${fileId}/${pipelineId}/executions?limit=10`); + if (!response.ok) { + throw new Error("Failed to fetch executions"); + } + return response.json(); +} + +export function formatDuration(startedAt: string, completedAt: string | null): string { + const start = new Date(startedAt).getTime(); + const end = completedAt ? new Date(completedAt).getTime() : Date.now(); + const durationMs = end - start; + + if (durationMs < 1000) { + return `${durationMs}ms`; + } + + if (durationMs < 60000) { + return `${(durationMs / 1000).toFixed(1)}s`; + } + + return `${Math.floor(durationMs / 60000)}m ${Math.floor((durationMs % 60000) / 1000)}s`; +} + +export function formatTimeAgo(timestamp: string): string { + const date = new Date(timestamp); + const now = new Date(); + const diffMs = now.getTime() - date.getTime(); + const diffSecs = Math.floor(diffMs / 1000); + const diffMins = Math.floor(diffSecs / 60); + const diffHours = Math.floor(diffMins / 60); + const diffDays = Math.floor(diffHours / 24); + + if (diffSecs < 60) return "just now"; + if (diffMins < 60) return `${diffMins}m ago`; + if (diffHours < 24) return `${diffHours}h ago`; + if (diffDays < 7) return `${diffDays}d ago`; + return date.toLocaleDateString(); +} diff --git a/packages/pipelines/pipeline-server/src/client/main.tsx b/packages/pipelines/pipeline-server/src/client/main.tsx new file mode 100644 index 000000000..1ac67bde1 --- /dev/null +++ b/packages/pipelines/pipeline-server/src/client/main.tsx @@ -0,0 +1,19 @@ +import { createRouter, RouterProvider } from "@tanstack/react-router"; +import { StrictMode } from "react"; +import { createRoot } from "react-dom/client"; +import { routeTree } from "./routeTree.gen"; +import "./index.css"; + +const router = createRouter({ routeTree }); + +declare module "@tanstack/react-router" { + interface Register { + router: typeof router; + } +} + +createRoot(document.getElementById("root")!).render( + + + , +); diff --git a/packages/pipelines/pipeline-server/src/client/routeTree.gen.ts b/packages/pipelines/pipeline-server/src/client/routeTree.gen.ts new file mode 100644 index 000000000..8e0c25df8 --- /dev/null +++ b/packages/pipelines/pipeline-server/src/client/routeTree.gen.ts @@ -0,0 +1,292 @@ +/* eslint-disable */ + +// @ts-nocheck + +// noinspection JSUnusedGlobalSymbols + +// This file was automatically generated by TanStack Router. +// You should NOT make any changes in this file as it will be overwritten. +// Additionally, you should also exclude this file from your linter and/or formatter to prevent it from being checked or modified. + +import { Route as rootRouteImport } from './routes/__root' +import { Route as IndexRouteImport } from './routes/index' +import { Route as PipelinesFileRouteRouteImport } from './routes/pipelines/$file/route' +import { Route as PipelinesFileIndexRouteImport } from './routes/pipelines/$file/index' +import { Route as PipelinesFileIdRouteRouteImport } from './routes/pipelines/$file/$id/route' +import { Route as PipelinesFileIdIndexRouteImport } from './routes/pipelines/$file/$id/index' +import { Route as PipelinesFileIdInspectRouteImport } from './routes/pipelines/$file/$id/inspect' +import { Route as PipelinesFileIdGraphRouteImport } from './routes/pipelines/$file/$id/graph' +import { Route as PipelinesFileIdCodeRouteImport } from './routes/pipelines/$file/$id/code' +import { Route as PipelinesFileIdExecutionsIndexRouteImport } from './routes/pipelines/$file/$id/executions/index' +import { Route as PipelinesFileIdExecutionsExecutionIdIndexRouteImport } from './routes/pipelines/$file/$id/executions/$executionId/index' +import { Route as PipelinesFileIdExecutionsExecutionIdGraphRouteImport } from './routes/pipelines/$file/$id/executions/$executionId/graph' + +const IndexRoute = IndexRouteImport.update({ + id: '/', + path: '/', + getParentRoute: () => rootRouteImport, +} as any) +const PipelinesFileRouteRoute = PipelinesFileRouteRouteImport.update({ + id: '/pipelines/$file', + path: '/pipelines/$file', + getParentRoute: () => rootRouteImport, +} as any) +const PipelinesFileIndexRoute = PipelinesFileIndexRouteImport.update({ + id: '/', + path: '/', + getParentRoute: () => PipelinesFileRouteRoute, +} as any) +const PipelinesFileIdRouteRoute = PipelinesFileIdRouteRouteImport.update({ + id: '/$id', + path: '/$id', + getParentRoute: () => PipelinesFileRouteRoute, +} as any) +const PipelinesFileIdIndexRoute = PipelinesFileIdIndexRouteImport.update({ + id: '/', + path: '/', + getParentRoute: () => PipelinesFileIdRouteRoute, +} as any) +const PipelinesFileIdInspectRoute = PipelinesFileIdInspectRouteImport.update({ + id: '/inspect', + path: '/inspect', + getParentRoute: () => PipelinesFileIdRouteRoute, +} as any) +const PipelinesFileIdGraphRoute = PipelinesFileIdGraphRouteImport.update({ + id: '/graph', + path: '/graph', + getParentRoute: () => PipelinesFileIdRouteRoute, +} as any) +const PipelinesFileIdCodeRoute = PipelinesFileIdCodeRouteImport.update({ + id: '/code', + path: '/code', + getParentRoute: () => PipelinesFileIdRouteRoute, +} as any) +const PipelinesFileIdExecutionsIndexRoute = + PipelinesFileIdExecutionsIndexRouteImport.update({ + id: '/executions/', + path: '/executions/', + getParentRoute: () => PipelinesFileIdRouteRoute, + } as any) +const PipelinesFileIdExecutionsExecutionIdIndexRoute = + PipelinesFileIdExecutionsExecutionIdIndexRouteImport.update({ + id: '/executions/$executionId/', + path: '/executions/$executionId/', + getParentRoute: () => PipelinesFileIdRouteRoute, + } as any) +const PipelinesFileIdExecutionsExecutionIdGraphRoute = + PipelinesFileIdExecutionsExecutionIdGraphRouteImport.update({ + id: '/executions/$executionId/graph', + path: '/executions/$executionId/graph', + getParentRoute: () => PipelinesFileIdRouteRoute, + } as any) + +export interface FileRoutesByFullPath { + '/': typeof IndexRoute + '/pipelines/$file': typeof PipelinesFileRouteRouteWithChildren + '/pipelines/$file/$id': typeof PipelinesFileIdRouteRouteWithChildren + '/pipelines/$file/': typeof PipelinesFileIndexRoute + '/pipelines/$file/$id/code': typeof PipelinesFileIdCodeRoute + '/pipelines/$file/$id/graph': typeof PipelinesFileIdGraphRoute + '/pipelines/$file/$id/inspect': typeof PipelinesFileIdInspectRoute + '/pipelines/$file/$id/': typeof PipelinesFileIdIndexRoute + '/pipelines/$file/$id/executions/': typeof PipelinesFileIdExecutionsIndexRoute + '/pipelines/$file/$id/executions/$executionId/graph': typeof PipelinesFileIdExecutionsExecutionIdGraphRoute + '/pipelines/$file/$id/executions/$executionId/': typeof PipelinesFileIdExecutionsExecutionIdIndexRoute +} +export interface FileRoutesByTo { + '/': typeof IndexRoute + '/pipelines/$file': typeof PipelinesFileIndexRoute + '/pipelines/$file/$id/code': typeof PipelinesFileIdCodeRoute + '/pipelines/$file/$id/graph': typeof PipelinesFileIdGraphRoute + '/pipelines/$file/$id/inspect': typeof PipelinesFileIdInspectRoute + '/pipelines/$file/$id': typeof PipelinesFileIdIndexRoute + '/pipelines/$file/$id/executions': typeof PipelinesFileIdExecutionsIndexRoute + '/pipelines/$file/$id/executions/$executionId/graph': typeof PipelinesFileIdExecutionsExecutionIdGraphRoute + '/pipelines/$file/$id/executions/$executionId': typeof PipelinesFileIdExecutionsExecutionIdIndexRoute +} +export interface FileRoutesById { + __root__: typeof rootRouteImport + '/': typeof IndexRoute + '/pipelines/$file': typeof PipelinesFileRouteRouteWithChildren + '/pipelines/$file/$id': typeof PipelinesFileIdRouteRouteWithChildren + '/pipelines/$file/': typeof PipelinesFileIndexRoute + '/pipelines/$file/$id/code': typeof PipelinesFileIdCodeRoute + '/pipelines/$file/$id/graph': typeof PipelinesFileIdGraphRoute + '/pipelines/$file/$id/inspect': typeof PipelinesFileIdInspectRoute + '/pipelines/$file/$id/': typeof PipelinesFileIdIndexRoute + '/pipelines/$file/$id/executions/': typeof PipelinesFileIdExecutionsIndexRoute + '/pipelines/$file/$id/executions/$executionId/graph': typeof PipelinesFileIdExecutionsExecutionIdGraphRoute + '/pipelines/$file/$id/executions/$executionId/': typeof PipelinesFileIdExecutionsExecutionIdIndexRoute +} +export interface FileRouteTypes { + fileRoutesByFullPath: FileRoutesByFullPath + fullPaths: + | '/' + | '/pipelines/$file' + | '/pipelines/$file/$id' + | '/pipelines/$file/' + | '/pipelines/$file/$id/code' + | '/pipelines/$file/$id/graph' + | '/pipelines/$file/$id/inspect' + | '/pipelines/$file/$id/' + | '/pipelines/$file/$id/executions/' + | '/pipelines/$file/$id/executions/$executionId/graph' + | '/pipelines/$file/$id/executions/$executionId/' + fileRoutesByTo: FileRoutesByTo + to: + | '/' + | '/pipelines/$file' + | '/pipelines/$file/$id/code' + | '/pipelines/$file/$id/graph' + | '/pipelines/$file/$id/inspect' + | '/pipelines/$file/$id' + | '/pipelines/$file/$id/executions' + | '/pipelines/$file/$id/executions/$executionId/graph' + | '/pipelines/$file/$id/executions/$executionId' + id: + | '__root__' + | '/' + | '/pipelines/$file' + | '/pipelines/$file/$id' + | '/pipelines/$file/' + | '/pipelines/$file/$id/code' + | '/pipelines/$file/$id/graph' + | '/pipelines/$file/$id/inspect' + | '/pipelines/$file/$id/' + | '/pipelines/$file/$id/executions/' + | '/pipelines/$file/$id/executions/$executionId/graph' + | '/pipelines/$file/$id/executions/$executionId/' + fileRoutesById: FileRoutesById +} +export interface RootRouteChildren { + IndexRoute: typeof IndexRoute + PipelinesFileRouteRoute: typeof PipelinesFileRouteRouteWithChildren +} + +declare module '@tanstack/react-router' { + interface FileRoutesByPath { + '/': { + id: '/' + path: '/' + fullPath: '/' + preLoaderRoute: typeof IndexRouteImport + parentRoute: typeof rootRouteImport + } + '/pipelines/$file': { + id: '/pipelines/$file' + path: '/pipelines/$file' + fullPath: '/pipelines/$file' + preLoaderRoute: typeof PipelinesFileRouteRouteImport + parentRoute: typeof rootRouteImport + } + '/pipelines/$file/': { + id: '/pipelines/$file/' + path: '/' + fullPath: '/pipelines/$file/' + preLoaderRoute: typeof PipelinesFileIndexRouteImport + parentRoute: typeof PipelinesFileRouteRoute + } + '/pipelines/$file/$id': { + id: '/pipelines/$file/$id' + path: '/$id' + fullPath: '/pipelines/$file/$id' + preLoaderRoute: typeof PipelinesFileIdRouteRouteImport + parentRoute: typeof PipelinesFileRouteRoute + } + '/pipelines/$file/$id/': { + id: '/pipelines/$file/$id/' + path: '/' + fullPath: '/pipelines/$file/$id/' + preLoaderRoute: typeof PipelinesFileIdIndexRouteImport + parentRoute: typeof PipelinesFileIdRouteRoute + } + '/pipelines/$file/$id/inspect': { + id: '/pipelines/$file/$id/inspect' + path: '/inspect' + fullPath: '/pipelines/$file/$id/inspect' + preLoaderRoute: typeof PipelinesFileIdInspectRouteImport + parentRoute: typeof PipelinesFileIdRouteRoute + } + '/pipelines/$file/$id/graph': { + id: '/pipelines/$file/$id/graph' + path: '/graph' + fullPath: '/pipelines/$file/$id/graph' + preLoaderRoute: typeof PipelinesFileIdGraphRouteImport + parentRoute: typeof PipelinesFileIdRouteRoute + } + '/pipelines/$file/$id/code': { + id: '/pipelines/$file/$id/code' + path: '/code' + fullPath: '/pipelines/$file/$id/code' + preLoaderRoute: typeof PipelinesFileIdCodeRouteImport + parentRoute: typeof PipelinesFileIdRouteRoute + } + '/pipelines/$file/$id/executions/': { + id: '/pipelines/$file/$id/executions/' + path: '/executions' + fullPath: '/pipelines/$file/$id/executions/' + preLoaderRoute: typeof PipelinesFileIdExecutionsIndexRouteImport + parentRoute: typeof PipelinesFileIdRouteRoute + } + '/pipelines/$file/$id/executions/$executionId/': { + id: '/pipelines/$file/$id/executions/$executionId/' + path: '/executions/$executionId' + fullPath: '/pipelines/$file/$id/executions/$executionId/' + preLoaderRoute: typeof PipelinesFileIdExecutionsExecutionIdIndexRouteImport + parentRoute: typeof PipelinesFileIdRouteRoute + } + '/pipelines/$file/$id/executions/$executionId/graph': { + id: '/pipelines/$file/$id/executions/$executionId/graph' + path: '/executions/$executionId/graph' + fullPath: '/pipelines/$file/$id/executions/$executionId/graph' + preLoaderRoute: typeof PipelinesFileIdExecutionsExecutionIdGraphRouteImport + parentRoute: typeof PipelinesFileIdRouteRoute + } + } +} + +interface PipelinesFileIdRouteRouteChildren { + PipelinesFileIdCodeRoute: typeof PipelinesFileIdCodeRoute + PipelinesFileIdGraphRoute: typeof PipelinesFileIdGraphRoute + PipelinesFileIdInspectRoute: typeof PipelinesFileIdInspectRoute + PipelinesFileIdIndexRoute: typeof PipelinesFileIdIndexRoute + PipelinesFileIdExecutionsIndexRoute: typeof PipelinesFileIdExecutionsIndexRoute + PipelinesFileIdExecutionsExecutionIdGraphRoute: typeof PipelinesFileIdExecutionsExecutionIdGraphRoute + PipelinesFileIdExecutionsExecutionIdIndexRoute: typeof PipelinesFileIdExecutionsExecutionIdIndexRoute +} + +const PipelinesFileIdRouteRouteChildren: PipelinesFileIdRouteRouteChildren = { + PipelinesFileIdCodeRoute: PipelinesFileIdCodeRoute, + PipelinesFileIdGraphRoute: PipelinesFileIdGraphRoute, + PipelinesFileIdInspectRoute: PipelinesFileIdInspectRoute, + PipelinesFileIdIndexRoute: PipelinesFileIdIndexRoute, + PipelinesFileIdExecutionsIndexRoute: PipelinesFileIdExecutionsIndexRoute, + PipelinesFileIdExecutionsExecutionIdGraphRoute: + PipelinesFileIdExecutionsExecutionIdGraphRoute, + PipelinesFileIdExecutionsExecutionIdIndexRoute: + PipelinesFileIdExecutionsExecutionIdIndexRoute, +} + +const PipelinesFileIdRouteRouteWithChildren = + PipelinesFileIdRouteRoute._addFileChildren(PipelinesFileIdRouteRouteChildren) + +interface PipelinesFileRouteRouteChildren { + PipelinesFileIdRouteRoute: typeof PipelinesFileIdRouteRouteWithChildren + PipelinesFileIndexRoute: typeof PipelinesFileIndexRoute +} + +const PipelinesFileRouteRouteChildren: PipelinesFileRouteRouteChildren = { + PipelinesFileIdRouteRoute: PipelinesFileIdRouteRouteWithChildren, + PipelinesFileIndexRoute: PipelinesFileIndexRoute, +} + +const PipelinesFileRouteRouteWithChildren = + PipelinesFileRouteRoute._addFileChildren(PipelinesFileRouteRouteChildren) + +const rootRouteChildren: RootRouteChildren = { + IndexRoute: IndexRoute, + PipelinesFileRouteRoute: PipelinesFileRouteRouteWithChildren, +} +export const routeTree = rootRouteImport + ._addFileChildren(rootRouteChildren) + ._addFileTypes() diff --git a/packages/pipelines/pipeline-server/src/client/routes/__root.tsx b/packages/pipelines/pipeline-server/src/client/routes/__root.tsx new file mode 100644 index 000000000..36a04ba9e --- /dev/null +++ b/packages/pipelines/pipeline-server/src/client/routes/__root.tsx @@ -0,0 +1,21 @@ +import { PipelineCommandPalette } from "#components/pipeline-command-palette"; +import { createRootRoute, Outlet } from "@tanstack/react-router"; +import { SidebarInset, SidebarProvider } from "@ucdjs-internal/shared-ui/ui/sidebar"; +import { PipelineSidebar } from "@ucdjs/pipelines-ui"; + +export const Route = createRootRoute({ + component: RootLayout, +}); + +function RootLayout() { + return ( + + + + + + + + + ); +} diff --git a/packages/pipelines/pipeline-server/src/client/routes/index.tsx b/packages/pipelines/pipeline-server/src/client/routes/index.tsx new file mode 100644 index 000000000..183191fd3 --- /dev/null +++ b/packages/pipelines/pipeline-server/src/client/routes/index.tsx @@ -0,0 +1,84 @@ +import { createFileRoute } from "@tanstack/react-router"; +import { usePipelines } from "@ucdjs/pipelines-ui"; + +export const Route = createFileRoute("/")({ + component: HomePage, +}); + +function EmptyStateIcon() { + return ( +
+ +
+ ); +} + +function SelectPipelinePrompt() { + return ( + <> + +

+ Select a Pipeline +

+

+ Choose a pipeline from the sidebar to view its details and execute it. +

+ + ); +} + +function NoPipelinesFound() { + return ( + <> + +

+ No Pipelines Found +

+

+ Create a pipeline file to get started. +

+ + *.ucd-pipeline.ts + + + ); +} + +function LoadingState() { + return ( +
+

Loading pipelines...

+
+ ); +} + +function HomePage() { + const { data, loading } = usePipelines(); + + if (loading) { + return ; + } + + const pipelineCount = data?.pipelines?.length ?? 0; + + return ( +
+
+ {pipelineCount > 0 ? : } +
+
+ ); +} diff --git a/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/code.tsx b/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/code.tsx new file mode 100644 index 000000000..187bf3414 --- /dev/null +++ b/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/code.tsx @@ -0,0 +1,189 @@ +import type { CodeResponse } from "../../../../types"; +import { createFileRoute, useSearch } from "@tanstack/react-router"; +import { ShikiCode } from "@ucdjs-internal/shared-ui/components/shiki-code"; +import { Badge } from "@ucdjs-internal/shared-ui/ui/badge"; +import { Button } from "@ucdjs-internal/shared-ui/ui/button"; +import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@ucdjs-internal/shared-ui/ui/card"; +import { Separator } from "@ucdjs-internal/shared-ui/ui/separator"; +import { useCallback, useEffect, useMemo, useRef, useState } from "react"; + +interface CodeSearchParams { + route?: string; +} + +export const Route = createFileRoute("/pipelines/$file/$id/code")({ + loader: async ({ params }): Promise => { + const res = await fetch(`/api/pipelines/${params.file}/${params.id}/code`); + if (!res.ok) { + throw new Error(`Failed to load code (${res.status})`); + } + return res.json(); + }, + component: PipelineCodePage, +}); + +function CodeDisplay({ code, filePath, highlightRoute }: { code: string; filePath: string; highlightRoute?: string }) { + const codeRef = useRef(null); + const [copied, setCopied] = useState(false); + + const routeInfo = useMemo(() => { + if (!highlightRoute) { + return null; + } + + const escapedRoute = highlightRoute.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); + const routePattern = new RegExp(`route\\s*:\\s*["\']${escapedRoute}["\']`, "i"); + const match = routePattern.exec(code); + if (!match || match.index == null) { + return null; + } + + const prefix = code.slice(0, match.index); + const lineIndex = Math.max(0, prefix.split("\n").length - 1); + const lines = code.split("\n"); + const lineText = lines[lineIndex] ?? ""; + + return { + lineIndex, + lineNumber: lineIndex + 1, + lineText, + matchIndex: match.index, + matchLength: match[0].length, + label: `Route: ${highlightRoute}`, + }; + }, [code, highlightRoute]); + + const decorations = useMemo(() => { + const items: { + start: number; + end: number; + properties: { class: string[]; "data-label": string }; + alwaysWrap?: boolean; + }[] = []; + + if (routeInfo) { + items.push({ + start: routeInfo.matchIndex, + end: routeInfo.matchIndex + routeInfo.matchLength, + properties: { + class: ["shiki-decor", "shiki-decor-route"], + "data-label": routeInfo.label, + }, + alwaysWrap: true, + }); + } + + return items.length ? items : undefined; + }, [code, routeInfo]); + + const stats = useMemo(() => { + const lines = code.split("\n").length; + return { + lines, + chars: code.length, + }; + }, [code]); + + const handleCopy = useCallback(async () => { + try { + await navigator.clipboard.writeText(code); + setCopied(true); + window.setTimeout(() => setCopied(false), 1500); + } catch { + setCopied(false); + } + }, [code]); + + useEffect(() => { + if (!routeInfo || !codeRef.current) { + return; + } + + const scrollContainer = codeRef.current.querySelector(".shiki") || codeRef.current; + const lineElement = scrollContainer.querySelector(`[data-line="${routeInfo.lineNumber}"]`); + if (lineElement) { + lineElement.scrollIntoView({ behavior: "smooth", block: "center" }); + } + }, [routeInfo]); + + return ( + + +
+ + {filePath} + +
+ TS + +
+
+
+ {stats.lines} lines + + {stats.chars} chars +
+
+ + + Pipeline source code. Use arrow keys to navigate the content. + +
+ +
+
+
+ ); +} + +function EmptyCodeDisplay({ pipelineId }: { pipelineId: string }) { + return ( + + + Pipeline Code + {pipelineId} + + +
+

No code found.

+
+
+
+ ); +} + +function PipelineCodePage() { + const { file, id } = Route.useParams(); + const data = Route.useLoaderData(); + const search = useSearch({ from: "/pipelines/$file/$id/code" }) as CodeSearchParams; + const highlightRoute = search?.route; + + return ( +
+ {data.code + ? ( + + ) + : ( + + )} +
+ ); +} diff --git a/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/executions/$executionId/graph.tsx b/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/executions/$executionId/graph.tsx new file mode 100644 index 000000000..514f86093 --- /dev/null +++ b/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/executions/$executionId/graph.tsx @@ -0,0 +1,30 @@ +import { createFileRoute } from "@tanstack/react-router"; +import { PipelineGraph, useExecute } from "@ucdjs/pipelines-ui"; +import { fetchExecutionEvents } from "."; + +export const Route = createFileRoute( + "/pipelines/$file/$id/executions/$executionId/graph", +)({ + component: RouteComponent, + loader: async ({ params }) => { + const executionData = await fetchExecutionEvents(params.executionId); + return { executionData }; + }, +}); + +function RouteComponent() { + Route.useParams(); + Route.useLoaderData(); + const { result: currentExecution } = useExecute(); + return ( +
+ +
+ ); +} diff --git a/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/executions/$executionId/index.tsx b/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/executions/$executionId/index.tsx new file mode 100644 index 000000000..39ebbe452 --- /dev/null +++ b/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/executions/$executionId/index.tsx @@ -0,0 +1,236 @@ +import type { PipelineEvent } from "@ucdjs/pipelines-core"; +import { StatusBadge } from "#components/pipeline-overview/status-badge"; +import { StatusIcon } from "#components/pipeline-overview/status-icon"; +import { createFileRoute, Link } from "@tanstack/react-router"; +import { cn } from "@ucdjs-internal/shared-ui/lib/utils"; +import { Badge } from "@ucdjs-internal/shared-ui/ui/badge"; +import { Card, CardContent, CardHeader, CardTitle } from "@ucdjs-internal/shared-ui/ui/card"; +import { ScrollArea } from "@ucdjs-internal/shared-ui/ui/scroll-area"; +import { + EventDetailPanel, + formatHighPrecisionTime, + InlineJsonView, + useEventView, + ViewModeToggle, +} from "@ucdjs/pipelines-ui"; +import { ArrowLeft, CheckCircle2, ChevronRight, Clock, FileCode, XCircle } from "lucide-react"; + +interface ExecutionEvent { + id: string; + type: string; + timestamp: string; + data: PipelineEvent; +} + +interface ExecutionEventsResponse { + executionId: string; + pipelineId: string; + status: "running" | "completed" | "failed"; + events: ExecutionEvent[]; + pagination: { + total: number; + limit: number; + offset: number; + hasMore: boolean; + }; +} + +// eslint-disable-next-line react-refresh/only-export-components +export async function fetchExecutionEvents(executionId: string): Promise { + const response = await fetch(`/api/executions/${executionId}/events?limit=500`); + if (!response.ok) { + throw new Error("Failed to fetch execution events"); + } + return response.json(); +} + +function EventItem({ + event, + isJsonMode, + isExpanded, + isSelected, + onToggleExpand, + onSelect, +}: { + event: PipelineEvent; + isJsonMode: boolean; + isExpanded: boolean; + isSelected: boolean; + onToggleExpand: () => void; + onSelect: () => void; +}) { + const timestamp = formatHighPrecisionTime(event.timestamp); + const version = "version" in event ? event.version : undefined; + const routeId = "routeId" in event ? event.routeId : undefined; + const artifactId = "artifactId" in event ? event.artifactId : undefined; + const durationMs = "durationMs" in event ? event.durationMs : undefined; + + const handleClick = () => { + if (isJsonMode) { + onToggleExpand(); + } else { + onSelect(); + } + }; + + return ( +
+ + + {isJsonMode && isExpanded && ( +
+ +
+ )} +
+ ); +} + +export const Route = createFileRoute("/pipelines/$file/$id/executions/$executionId/")({ + component: ExecutionDetailPage, + loader: async ({ params }) => { + const executionData = await fetchExecutionEvents(params.executionId); + return { executionData }; + }, +}); + +function ExecutionDetailPage() { + const { file, id: pipelineId, executionId } = Route.useParams(); + const { executionData } = Route.useLoaderData(); + + const { + isJsonMode, + selectedEventId, + isDetailPanelOpen, + toggleJsonMode, + openDetailPanel, + closeDetailPanel, + toggleInlineExpansion, + isInlineExpanded, + } = useEventView(); + + const events = executionData.events.map((e: ExecutionEvent) => e.data); + const selectedEvent = events.find((e: PipelineEvent) => e.id === selectedEventId) || null; + + return ( +
+ {/* Header */} +
+
+ + + + + + +
+
+

+ Execution + {" "} + {executionId.slice(0, 8)} +

+ +
+

+ {executionData.pagination.total} + {" "} + events • Pipeline: + {pipelineId} +

+
+ + +
+
+ + {/* Events List */} +
+ + {events.length === 0 + ? ( +
+

No events recorded for this execution

+
+ ) + : ( +
+ {events.map((event: PipelineEvent) => ( + toggleInlineExpansion(event.id)} + onSelect={() => openDetailPanel(event.id)} + /> + ))} +
+ )} +
+
+ + +
+ ); +} diff --git a/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/executions/index.tsx b/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/executions/index.tsx new file mode 100644 index 000000000..96972c138 --- /dev/null +++ b/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/executions/index.tsx @@ -0,0 +1,252 @@ +import { createFileRoute, Link } from "@tanstack/react-router"; +import { Badge } from "@ucdjs-internal/shared-ui/ui/badge"; +import { Card, CardContent, CardHeader, CardTitle } from "@ucdjs-internal/shared-ui/ui/card"; +import { + Table, + TableBody, + TableCell, + TableHead, + TableHeader, + TableRow, +} from "@ucdjs-internal/shared-ui/ui/table"; +import { useExecute } from "@ucdjs/pipelines-ui"; +import { CheckCircle2, Circle, Clock, Play, XCircle } from "lucide-react"; + +interface Execution { + id: string; + pipelineId: string; + status: "running" | "completed" | "failed"; + startedAt: string; + completedAt: string | null; + versions: string[] | null; + summary: { + totalRoutes: number; + cached: number; + } | null; + error: string | null; +} + +interface ExecutionsResponse { + executions: Execution[]; + pagination: { + total: number; + limit: number; + offset: number; + hasMore: boolean; + }; +} + +async function fetchExecutions(fileId: string, pipelineId: string): Promise { + const response = await fetch(`/api/pipelines/${fileId}/${pipelineId}/executions?limit=50`); + if (!response.ok) { + throw new Error("Failed to fetch executions"); + } + return response.json(); +} + +function formatDuration(startedAt: string, completedAt: string | null): string { + const start = new Date(startedAt).getTime(); + const end = completedAt ? new Date(completedAt).getTime() : Date.now(); + const durationMs = end - start; + + if (durationMs < 1000) { + return `${durationMs}ms`; + } else if (durationMs < 60000) { + return `${(durationMs / 1000).toFixed(1)}s`; + } else { + return `${Math.floor(durationMs / 60000)}m ${Math.floor((durationMs % 60000) / 1000)}s`; + } +} + +function formatTimeAgo(timestamp: string): string { + const date = new Date(timestamp); + const now = new Date(); + const diffMs = now.getTime() - date.getTime(); + const diffSecs = Math.floor(diffMs / 1000); + const diffMins = Math.floor(diffSecs / 60); + const diffHours = Math.floor(diffMins / 60); + const diffDays = Math.floor(diffHours / 24); + + if (diffSecs < 60) return "just now"; + if (diffMins < 60) return `${diffMins}m ago`; + if (diffHours < 24) return `${diffHours}h ago`; + if (diffDays < 7) return `${diffDays}d ago`; + return date.toLocaleDateString(); +} + +function StatusIcon({ status }: { status: Execution["status"] }) { + switch (status) { + case "completed": + return ; + case "failed": + return ; + case "running": + return ; + default: + return ; + } +} + +function StatusBadge({ status }: { status: Execution["status"] }) { + switch (status) { + case "completed": + return ( + + Success + + ); + case "failed": + return ( + + Failed + + ); + case "running": + return ( + + Running + + ); + } +} + +export const Route = createFileRoute("/pipelines/$file/$id/executions/")({ + component: ExecutionsListPage, + loader: async ({ params }) => { + const executions = await fetchExecutions(params.file, params.id); + return { executions }; + }, +}); + +function ExecutionsListPage() { + const { file, id: pipelineId } = Route.useParams(); + const { executions } = Route.useLoaderData(); + const { result: currentExecution } = useExecute(); + + const allExecutions = currentExecution?.executionId + ? [ + { + id: currentExecution.executionId, + pipelineId, + status: "completed" as const, + startedAt: new Date().toISOString(), + completedAt: new Date().toISOString(), + versions: null, + summary: currentExecution.summary ?? null, + error: null, + }, + ...executions.executions, + ] + : executions.executions; + + return ( +
+ + +
+
+ Executions +

+ {allExecutions.length} + {" "} + total runs +

+
+
+
+ + {allExecutions.length === 0 + ? ( +
+ +

No executions yet

+

+ Execute the pipeline to see results here +

+
+ ) + : ( + + + + Status + ID + When + Duration + Versions + Routes + + + + + {allExecutions.map((execution) => ( + + +
+ + +
+
+ + + {execution.id} + + + + {formatTimeAgo(execution.startedAt)} + + + {formatDuration(execution.startedAt, execution.completedAt)} + + + {execution.versions + ? ( +
+ {execution.versions.map((v) => ( + + {v} + + ))} +
+ ) + : ( + - + )} +
+ + {execution.summary && "totalRoutes" in execution.summary + ? ( + + {execution.summary.totalRoutes} + + {" "} + ( + {execution.summary.cached} + {" "} + cached) + + + ) + : ( + - + )} + + + + View + + +
+ ))} +
+
+ )} +
+
+
+ ); +} diff --git a/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/graph.tsx b/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/graph.tsx new file mode 100644 index 000000000..979a51a70 --- /dev/null +++ b/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/graph.tsx @@ -0,0 +1,51 @@ +import { createFileRoute } from "@tanstack/react-router"; +import { PipelineGraph, useExecute } from "@ucdjs/pipelines-ui"; + +export const Route = createFileRoute("/pipelines/$file/$id/graph")({ + component: PipelineGraphPage, +}); + +function EmptyGraphState() { + return ( +
+ Run the pipeline to generate the execution graph. +
+ ); +} + +function PipelineGraphPage() { + const { result } = useExecute(); + + const graph = result?.graph && result.graph.nodes.length > 0 + ? result.graph + : null; + + if (!graph) { + return ( +
+ +
+ ); + } + + return ( +
+ +
+ ); +} diff --git a/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/index.tsx b/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/index.tsx new file mode 100644 index 000000000..c98fbab2e --- /dev/null +++ b/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/index.tsx @@ -0,0 +1,31 @@ +import { QuickActionsPanel } from "#components/pipeline-overview/quick-actions"; +import { RecentExecutionsPanel } from "#components/pipeline-overview/recent-executions-panel"; +import { RecentOutputsPanel } from "#components/pipeline-overview/recent-outputs-panel"; +import { fetchExecutions } from "#lib/pipeline-executions"; +import { createFileRoute } from "@tanstack/react-router"; + +export const Route = createFileRoute("/pipelines/$file/$id/")({ + component: PipelineOverviewPage, + loader: async ({ params }) => { + const executions = await fetchExecutions(params.file, params.id); + return { executions }; + }, +}); + +function PipelineOverviewPage() { + const { executions } = Route.useLoaderData(); + + return ( +
+
+
+ + +
+
+ +
+
+
+ ); +} diff --git a/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/inspect.tsx b/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/inspect.tsx new file mode 100644 index 000000000..64abd185c --- /dev/null +++ b/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/inspect.tsx @@ -0,0 +1,322 @@ +import type { PipelineDetails } from "@ucdjs/pipelines-ui"; +import { createFileRoute, useLoaderData } from "@tanstack/react-router"; +import { cn } from "@ucdjs-internal/shared-ui/lib/utils"; +import { Badge } from "@ucdjs-internal/shared-ui/ui/badge"; +import { Card, CardContent, CardHeader, CardTitle } from "@ucdjs-internal/shared-ui/ui/card"; +import { Input } from "@ucdjs-internal/shared-ui/ui/input"; +import { useEffect, useMemo, useState } from "react"; + +export const Route = createFileRoute("/pipelines/$file/$id/inspect")({ + component: PipelineInspectPage, +}); + +type RouteInfo = PipelineDetails["routes"][number]; +type Dependency = RouteInfo["depends"][number]; +type EmittedArtifact = RouteInfo["emits"][number]; +type OutputConfig = RouteInfo["outputs"][number]; + +interface RouteListProps { + routes: RouteInfo[]; + selectedRouteId: string | null; + onSelectRoute: (routeId: string) => void; + searchQuery: string; + onSearchChange: (query: string) => void; +} + +function RouteListCard({ + routes, + selectedRouteId, + onSelectRoute, + searchQuery, + onSearchChange, +}: RouteListProps) { + const filteredRoutes = useMemo(() => { + if (!searchQuery.trim()) return routes; + const value = searchQuery.trim().toLowerCase(); + return routes.filter((route) => route.id.toLowerCase().includes(value)); + }, [searchQuery, routes]); + + return ( + + + Routes + + + onSearchChange(event.target.value)} + placeholder="Search routes" + aria-label="Search routes" + /> +
+ {filteredRoutes.length === 0 + ? ( +

No routes match the search.

+ ) + : ( + filteredRoutes.map((route) => ( + onSelectRoute(route.id)} + /> + )) + )} +
+
+
+ ); +} + +interface RouteListItemProps { + route: RouteInfo; + isSelected: boolean; + onClick: () => void; +} + +function RouteListItem({ route, isSelected, onClick }: RouteListItemProps) { + return ( + + ); +} + +interface RouteDetailsProps { + route: RouteInfo; +} + +function RouteDetailsCard({ route }: RouteDetailsProps) { + return ( + + + Details + + + + + + + + + ); +} + +interface DependsSectionProps { + depends: readonly Dependency[]; +} + +function DependsSection({ depends }: DependsSectionProps) { + return ( +
+

Depends

+ {depends.length === 0 + ? ( +

No dependencies.

+ ) + : ( +
+ {depends.map((dep, index) => ( + + {dep.type === "route" + ? `route:${dep.routeId}` + : `artifact:${dep.routeId}:${dep.artifactName}`} + + ))} +
+ )} +
+ ); +} + +interface EmitsSectionProps { + emits: readonly EmittedArtifact[]; +} + +function EmitsSection({ emits }: EmitsSectionProps) { + return ( +
+

Emits

+ {emits.length === 0 + ? ( +

No emitted artifacts.

+ ) + : ( +
+ {emits.map((emit) => ( + + {emit.id} + {" "} + {emit.scope} + + ))} +
+ )} +
+ ); +} + +interface OutputsSectionProps { + outputs: readonly OutputConfig[]; +} + +function OutputsSection({ outputs }: OutputsSectionProps) { + return ( +
+

Outputs

+ {outputs.length === 0 + ? ( +

No output configuration.

+ ) + : ( +
+ {outputs.map((output, index) => ( +
+
+ dir: + {" "} + {output.dir ?? "default"} +
+
+ file: + {" "} + {output.fileName ?? "default"} +
+
+ ))} +
+ )} +
+ ); +} + +interface TransformsSectionProps { + transforms: readonly string[]; +} + +function TransformsSection({ transforms }: TransformsSectionProps) { + return ( +
+

Transforms

+ {transforms.length === 0 + ? ( +

No transforms.

+ ) + : ( +
+ {transforms.map((transform, index) => ( + + {transform} + + ))} +
+ )} +
+ ); +} + +function EmptyDetailsCard() { + return ( + + + Details + + +

Select a route to inspect.

+
+
+ ); +} + +function PipelineInspectPage() { + const { pipeline } = useLoaderData({ from: "/pipelines/$file/$id" }); + const [searchQuery, setSearchQuery] = useState(""); + const [selectedRouteId, setSelectedRouteId] = useState(null); + + const routes = pipeline!.routes; + + const filteredRoutes = useMemo(() => { + if (!searchQuery.trim()) return routes; + const value = searchQuery.trim().toLowerCase(); + return routes.filter((route) => route.id.toLowerCase().includes(value)); + }, [searchQuery, routes]); + + const selectedRoute = useMemo(() => { + return filteredRoutes.find((route) => route.id === selectedRouteId) ?? filteredRoutes[0] ?? null; + }, [filteredRoutes, selectedRouteId]); + + useEffect(() => { + if (filteredRoutes.length === 0) { + setSelectedRouteId(null); + } else if (!selectedRouteId || !filteredRoutes.some((route) => route.id === selectedRouteId)) { + setSelectedRouteId(filteredRoutes[0]!.id); + } + }, [filteredRoutes, selectedRouteId]); + + if (!pipeline) { + return
; + } + + return ( +
+ + + {selectedRoute + ? ( + + ) + : ( + + )} +
+ ); +} diff --git a/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/route.tsx b/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/route.tsx new file mode 100644 index 000000000..37de750af --- /dev/null +++ b/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/$id/route.tsx @@ -0,0 +1,77 @@ +import type { PipelineResponse } from "@ucdjs/pipelines-ui"; +import { PipelineHeader } from "#components/pipeline-header"; +import { PipelineTabs } from "#components/pipeline-tabs"; +import { createFileRoute, notFound, Outlet, useLoaderData, useParams } from "@tanstack/react-router"; +import { usePipelineVersions, VersionSelector } from "@ucdjs/pipelines-ui"; + +export const Route = createFileRoute("/pipelines/$file/$id")({ + loader: async ({ params }) => { + const res = await fetch(`/api/pipelines/${params.file}/${params.id}`); + + if (!res.ok) { + if (res.status === 404) { + throw notFound(); + } + throw new Error(`Failed to load pipeline (${res.status})`); + } + + const data = await res.json(); + if (data.error) { + throw new Error(data.error); + } + + return data as PipelineResponse; + }, + notFoundComponent: NotFoundComponent, + component: PipelineDetailLayout, +}); + +function PipelineDetailLayout() { + const { file, id } = useParams({ from: "/pipelines/$file/$id" }); + const data = useLoaderData({ from: "/pipelines/$file/$id" }); + const pipeline = data.pipeline; + const { selectedVersions, toggleVersion, selectAll, deselectAll } = usePipelineVersions( + id, + pipeline?.versions || [], + ); + + return ( +
+
+ +
+ selectAll(pipeline?.versions || [])} + onDeselectAll={deselectAll} + /> +
+ +
+ +
+ +
+
+ ); +} + +function NotFoundComponent() { + const { file, id } = Route.useParams(); + + return ( +
+
+

Pipeline not found

+

+ Pipeline: + {file} + / + {id} +

+
+
+ ); +} diff --git a/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/index.tsx b/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/index.tsx new file mode 100644 index 000000000..2f9577831 --- /dev/null +++ b/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/index.tsx @@ -0,0 +1,57 @@ +import { createFileRoute, Link } from "@tanstack/react-router"; +import { usePipelineFile } from "@ucdjs/pipelines-ui"; + +export const Route = createFileRoute("/pipelines/$file/")({ + component: PipelineFilePage, +}); + +function PipelineFilePage() { + const { file } = Route.useParams(); + const { file: fileInfo, loading, error } = usePipelineFile(file); + + if (loading) { + return ( +
+

Loading pipeline file...

+
+ ); + } + + if (error || !fileInfo) { + return ( +
+

+ {error ?? "Pipeline file not found"} +

+
+ ); + } + + return ( +
+
+

Pipeline File

+

{fileInfo.filePath}

+
+
+ {fileInfo.pipelines.map((pipeline) => ( + +
+ {pipeline.name || pipeline.id} +
+
+ {pipeline.versions.length} + {" "} + versions +
+ + ))} +
+
+ ); +} diff --git a/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/route.tsx b/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/route.tsx new file mode 100644 index 000000000..9940893e0 --- /dev/null +++ b/packages/pipelines/pipeline-server/src/client/routes/pipelines/$file/route.tsx @@ -0,0 +1,9 @@ +import { createFileRoute, Outlet } from "@tanstack/react-router"; + +export const Route = createFileRoute("/pipelines/$file")({ + component: PipelineFileLayout, +}); + +function PipelineFileLayout() { + return ; +} diff --git a/packages/pipelines/pipeline-server/src/client/types.ts b/packages/pipelines/pipeline-server/src/client/types.ts new file mode 100644 index 000000000..689ce064f --- /dev/null +++ b/packages/pipelines/pipeline-server/src/client/types.ts @@ -0,0 +1,5 @@ +export interface CodeResponse { + code?: string; + filePath?: string; + error?: string; +} diff --git a/packages/pipelines/pipeline-server/src/server/app.ts b/packages/pipelines/pipeline-server/src/server/app.ts new file mode 100644 index 000000000..0a704ea2c --- /dev/null +++ b/packages/pipelines/pipeline-server/src/server/app.ts @@ -0,0 +1,143 @@ +import type { Database } from "#server/db"; +import type { PipelineSource } from "@ucdjs/pipelines-loader"; +import fs from "node:fs"; +import path from "node:path"; +import process from "node:process"; +import { + pipelinesEventsRouter, + pipelinesExecutionRouter, + pipelinesFileRouter, + pipelinesIndexRouter, + pipelinesPipelineRouter, +} from "#server/routes"; +import { H3, serve, serveStatic } from "h3"; + +export interface AppOptions { + sources?: PipelineSource[]; + db?: Database; +} + +export interface ServerOptions extends AppOptions { + port?: number; +} + +declare module "h3" { + interface H3EventContext { + sources: PipelineSource[]; + db: Database; + } +} + +export function createApp(options: AppOptions = {}): H3 { + const { sources = [], db } = options; + + if (!db) { + throw new Error("Database is required. Pass db to createApp() or use startServer()"); + } + + const app = new H3({ debug: true }); + + // Default to pipeline-playground in development + let resolvedSources = sources; + if (sources.length === 0) { + const cwd = process.cwd(); + if (process.env.NODE_ENV === "development" || (import.meta as any).env.DEV) { + resolvedSources = [{ + type: "local", + id: "local", + cwd: path.join(import.meta.dirname, "../../../pipeline-playground"), + }]; + } else { + resolvedSources = [{ + type: "local", + id: "local", + cwd, + }]; + } + } + + app.use("/**", (event, next) => { + event.context.sources = resolvedSources; + event.context.db = db; + next(); + }); + + app.get("/api/hello", () => ({ + message: "Hello from H3!", + timestamp: Date.now(), + })); + + app.mount("/api/pipelines", pipelinesIndexRouter); + app.mount("/api/pipelines", pipelinesFileRouter); + app.mount("/api/pipelines", pipelinesPipelineRouter); + app.mount("/api/pipelines", pipelinesExecutionRouter); + app.mount("/api/pipelines", pipelinesEventsRouter); + + return app; +} + +export async function startServer(options: ServerOptions = {}): Promise { + const { port = 3030, sources } = options; + + // Initialize database with auto-migration + // NOTE: This will CRASH the server if database initialization fails + // This is intentional - we don't want to run with a misconfigured database + const { createDatabase, runMigrations } = await import("./db"); + const db = createDatabase(); + + try { + await runMigrations(db); + // eslint-disable-next-line no-console + console.info("Database migrations completed successfully"); + } catch (err) { + console.error("Failed to run database migrations:", err); + throw err; // CRASH - no fallback + } + + const app = createApp({ sources, db }); + + const clientDir = path.join(import.meta.dirname, "../client"); + + app.use((event) => { + const url = event.url.pathname; + + if (url.startsWith("/api")) { + return; + } + + return serveStatic(event, { + fallthrough: true, + getContents: (id) => fs.promises.readFile(path.join(clientDir, id)), + getMeta: async (id) => { + const filePath = path.join(clientDir, id); + try { + const stats = await fs.promises.stat(filePath); + if (!stats.isFile()) return; + return { size: stats.size, mtime: stats.mtime }; + } catch { + // File not found - fall through to SPA fallback + } + }, + }); + }); + + app.use((event) => { + const url = event.url.pathname; + + if (url.startsWith("/api")) { + return; + } + + const indexPath = path.join(clientDir, "index.html"); + return fs.promises.readFile(indexPath, "utf-8").then((html) => { + return new Response(html, { + headers: { "content-type": "text/html" }, + }); + }); + }); + + serve(app, { port, silent: true }); + + // eslint-disable-next-line no-console + console.info(`Pipeline UI running at http://localhost:${port}`); +} diff --git a/packages/pipelines/pipeline-server/src/server/code.ts b/packages/pipelines/pipeline-server/src/server/code.ts new file mode 100644 index 000000000..05ea19d48 --- /dev/null +++ b/packages/pipelines/pipeline-server/src/server/code.ts @@ -0,0 +1,119 @@ +import { print } from "esrap"; +import ts from "esrap/languages/ts"; +import { parseSync } from "oxc-parser"; + +interface ExportTarget { + exportName: string; +} + +export function extractDefinePipelineCode(source: string, { exportName }: ExportTarget): string { + const { program } = parseSync("pipeline.ts", source, { + sourceType: "module", + lang: "ts", + } as any); + + const callExpression = findDefinePipelineExpression(program, exportName); + if (!callExpression) { + throw new Error(`definePipeline call not found for export "${exportName}"`); + } + + try { + const { code } = print( + { + type: "Program", + sourceType: "module", + body: [ + { + type: "ExpressionStatement", + expression: callExpression, + }, + ], + } as any, + ts(), + ); + + return code.trim(); + } catch { + const span = callExpression.span as { start: number; end: number } | undefined; + if (span) { + return source.slice(span.start, span.end).trim(); + } + throw new Error("Failed to print definePipeline call"); + } +} + +function findDefinePipelineExpression(program: any, exportName: string): any | null { + const body = program.body as any[]; + + if (exportName === "default") { + for (const node of body) { + if (node.type === "ExportDefaultDeclaration") { + if (isDefinePipelineCall(node.declaration)) { + return node.declaration; + } + if (node.declaration?.type === "Identifier") { + const expr = findVariableInitializer(body, node.declaration.name); + if (expr) return expr; + } + } + } + } + + for (const node of body) { + if (node.type === "ExportNamedDeclaration") { + if (node.declaration) { + const expr = findDefinePipelineInDeclaration(node.declaration, exportName); + if (expr) return expr; + } + + if (node.specifiers?.length) { + for (const spec of node.specifiers) { + if (spec.exported?.name === exportName) { + const localName = spec.local?.name ?? exportName; + const expr = findVariableInitializer(body, localName); + if (expr) return expr; + } + } + } + } + } + + return findVariableInitializer(body, exportName); +} + +function findDefinePipelineInDeclaration(declaration: any, exportName: string): any | null { + if (declaration.type === "VariableDeclaration") { + for (const decl of declaration.declarations ?? []) { + const id = decl.id; + if (id?.type === "Identifier" && id.name === exportName) { + if (isDefinePipelineCall(decl.init)) return decl.init; + } + } + } + + return null; +} + +function findVariableInitializer(body: any[], name: string): any | null { + for (const node of body) { + if (node.type === "VariableDeclaration") { + for (const decl of node.declarations ?? []) { + const id = decl.id; + if (id?.type === "Identifier" && id.name === name) { + if (isDefinePipelineCall(decl.init)) return decl.init; + } + } + } + } + + return null; +} + +function isDefinePipelineCall(node: any): boolean { + return ( + node + && node.type === "CallExpression" + && node.callee?.type === "Identifier" + && node.callee?.name === "definePipeline" + ); +} diff --git a/packages/pipelines/pipeline-server/src/server/db/index.ts b/packages/pipelines/pipeline-server/src/server/db/index.ts new file mode 100644 index 000000000..5c37922a7 --- /dev/null +++ b/packages/pipelines/pipeline-server/src/server/db/index.ts @@ -0,0 +1,38 @@ +import type { LibSQLDatabase } from "drizzle-orm/libsql"; +import { existsSync } from "node:fs"; +import process from "node:process"; +import { createClient } from "@libsql/client"; +import { drizzle } from "drizzle-orm/libsql"; +import { migrate } from "drizzle-orm/libsql/migrator"; +import * as schema from "./schema"; + +export type Database = LibSQLDatabase; + +interface CreateDatabaseOptions { + url?: string; + authToken?: string; +} + +export function createDatabase(options: CreateDatabaseOptions = {}): Database { + const url = options.url ?? process.env.DB_URL ?? "file:./pipeline-server.db"; + const authToken = options.authToken ?? process.env.DB_AUTH_TOKEN; + + const client = createClient({ url, authToken }); + + return drizzle(client, { schema }); +} + +export async function runMigrations(db: Database): Promise { + const migrationsFolder = new URL("./migrations", import.meta.url).pathname; + + if (!existsSync(migrationsFolder)) { + throw new Error( + `Migrations folder not found at ${migrationsFolder}. ` + + "Run 'pnpm db:generate' to create migrations.", + ); + } + + await migrate(db, { migrationsFolder }); +} + +export { schema }; diff --git a/packages/pipelines/pipeline-server/src/server/db/migrations/0000_amazing_kang.sql b/packages/pipelines/pipeline-server/src/server/db/migrations/0000_amazing_kang.sql new file mode 100644 index 000000000..5963676ae --- /dev/null +++ b/packages/pipelines/pipeline-server/src/server/db/migrations/0000_amazing_kang.sql @@ -0,0 +1,20 @@ +CREATE TABLE `events` ( + `id` text PRIMARY KEY NOT NULL, + `execution_id` text NOT NULL, + `type` text NOT NULL, + `timestamp` integer NOT NULL, + `data` text, + FOREIGN KEY (`execution_id`) REFERENCES `executions`(`id`) ON UPDATE no action ON DELETE cascade +); +--> statement-breakpoint +CREATE TABLE `executions` ( + `id` text PRIMARY KEY NOT NULL, + `pipeline_id` text NOT NULL, + `status` text NOT NULL, + `started_at` integer NOT NULL, + `completed_at` integer, + `versions` text, + `summary` text, + `graph` text, + `error` text +); diff --git a/packages/pipelines/pipeline-server/src/server/db/migrations/meta/0000_snapshot.json b/packages/pipelines/pipeline-server/src/server/db/migrations/meta/0000_snapshot.json new file mode 100644 index 000000000..ac1146b3f --- /dev/null +++ b/packages/pipelines/pipeline-server/src/server/db/migrations/meta/0000_snapshot.json @@ -0,0 +1,150 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "c007ad29-ee6b-4b0a-a33f-f2aac3b46262", + "prevId": "00000000-0000-0000-0000-000000000000", + "tables": { + "events": { + "name": "events", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "execution_id": { + "name": "execution_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "timestamp": { + "name": "timestamp", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "data": { + "name": "data", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "events_execution_id_executions_id_fk": { + "name": "events_execution_id_executions_id_fk", + "tableFrom": "events", + "tableTo": "executions", + "columnsFrom": [ + "execution_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "executions": { + "name": "executions", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "pipeline_id": { + "name": "pipeline_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "started_at": { + "name": "started_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "completed_at": { + "name": "completed_at", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "versions": { + "name": "versions", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "summary": { + "name": "summary", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "graph": { + "name": "graph", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "error": { + "name": "error", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + }, + "internal": { + "indexes": {} + } +} diff --git a/packages/pipelines/pipeline-server/src/server/db/migrations/meta/_journal.json b/packages/pipelines/pipeline-server/src/server/db/migrations/meta/_journal.json new file mode 100644 index 000000000..c51f840ba --- /dev/null +++ b/packages/pipelines/pipeline-server/src/server/db/migrations/meta/_journal.json @@ -0,0 +1,13 @@ +{ + "version": "7", + "dialect": "sqlite", + "entries": [ + { + "idx": 0, + "version": "6", + "when": 1770386712312, + "tag": "0000_amazing_kang", + "breakpoints": true + } + ] +} diff --git a/packages/pipelines/pipeline-server/src/server/db/schema.ts b/packages/pipelines/pipeline-server/src/server/db/schema.ts new file mode 100644 index 000000000..e75d99a38 --- /dev/null +++ b/packages/pipelines/pipeline-server/src/server/db/schema.ts @@ -0,0 +1,24 @@ +import type { PipelineEvent, PipelineEventType, PipelineGraph } from "@ucdjs/pipelines-core"; +import type { PipelineSummary } from "@ucdjs/pipelines-executor"; +import { integer, sqliteTable, text } from "drizzle-orm/sqlite-core"; + +export const executions = sqliteTable("executions", { + id: text("id").primaryKey(), + pipelineId: text("pipeline_id").notNull(), + status: text("status").$type<"running" | "completed" | "failed">().notNull(), + startedAt: integer("started_at", { mode: "timestamp" }).notNull(), + completedAt: integer("completed_at", { mode: "timestamp" }), + versions: text("versions", { mode: "json" }).$type(), + summary: text("summary", { mode: "json" }).$type(), + graph: text("graph", { mode: "json" }).$type(), + error: text("error"), +}); + +export const events = sqliteTable("events", { + id: text("id").primaryKey(), + executionId: text("execution_id").notNull() + .references(() => executions.id, { onDelete: "cascade" }), + type: text("type").$type().notNull(), + timestamp: integer("timestamp", { mode: "timestamp" }).notNull(), + data: text("data", { mode: "json" }).$type(), +}); diff --git a/packages/pipelines/pipeline-server/src/server/lib/files.ts b/packages/pipelines/pipeline-server/src/server/lib/files.ts new file mode 100644 index 000000000..7cc6e9a0e --- /dev/null +++ b/packages/pipelines/pipeline-server/src/server/lib/files.ts @@ -0,0 +1,162 @@ +import type { PipelineDefinition } from "@ucdjs/pipelines-core"; +import type { PipelineSource } from "@ucdjs/pipelines-loader"; +import type { PipelineInfo } from "@ucdjs/pipelines-ui"; +import { toPipelineInfo } from "@ucdjs/pipelines-ui"; +import { fileIdFromPath, fileLabelFromPath } from "./ids"; +import { getPipelines } from "./loader"; + +export interface FilePipelineEntry { + pipeline: PipelineDefinition; + exportName: string; +} + +export interface PipelineFileGroup { + fileId: string; + filePath: string; + fileLabel: string; + sourceId: string; + pipelines: PipelineInfo[]; + entries: FilePipelineEntry[]; +} + +export interface PipelineLoadErrorInfo { + filePath: string; + message: string; + sourceId: string; +} + +export interface PipelineSourceGroup { + sourceId: string; + source: PipelineSource; + fileGroups: PipelineFileGroup[]; + errors: PipelineLoadErrorInfo[]; +} + +export function buildFileGroups( + sourceId: string, + files: Array<{ filePath: string; pipelines: PipelineDefinition[]; exportNames: string[] }>, +): PipelineFileGroup[] { + return files.map((file) => { + const entries = file.pipelines.map((pipeline, index) => ({ + pipeline, + exportName: file.exportNames[index] ?? "default", + })); + + const fileId = fileIdFromPath(file.filePath); + const fileLabel = fileLabelFromPath(file.filePath); + + return { + fileId, + filePath: file.filePath, + fileLabel, + sourceId, + pipelines: entries.map((entry) => ({ + ...toPipelineInfo(entry.pipeline), + sourceId, + })), + entries, + }; + }); +} + +export function applySearchFilter( + groups: PipelineFileGroup[], + search: string, +): PipelineFileGroup[] { + if (!search) return groups; + + return groups + .map((group) => { + const entries = group.entries.filter(({ pipeline }) => { + const haystack = [ + pipeline.id, + pipeline.name ?? "", + pipeline.description ?? "", + ...pipeline.versions, + ...pipeline.routes.map((route) => route.id), + ...pipeline.inputs.map((input) => input.id), + ] + .join(" ") + .toLowerCase(); + return haystack.includes(search); + }); + + if (!entries.length) { + return null; + } + + return { + ...group, + pipelines: entries.map((entry) => ({ + ...toPipelineInfo(entry.pipeline), + sourceId: group.sourceId, + })), + entries, + }; + }) + .filter((group): group is PipelineFileGroup => Boolean(group)); +} + +export function findFileGroup( + fileId: string, + fileGroups: PipelineFileGroup[], +): PipelineFileGroup | null { + return fileGroups.find((group) => group.fileId === fileId) ?? null; +} + +export function findPipelineByFileId( + fileId: string, + fileGroups: PipelineFileGroup[], + pipelineId: string, +): { + fileGroup: PipelineFileGroup; + entry: FilePipelineEntry; +} | null { + const fileGroup = findFileGroup(fileId, fileGroups); + + if (!fileGroup) return null; + + const entry = fileGroup.entries.find(({ pipeline }) => pipeline.id === pipelineId); + if (!entry) return null; + + return { fileGroup, entry }; +} + +export async function loadPipelineFileGroups( + sources: PipelineSource[], +): Promise { + const results = await Promise.allSettled( + sources.map(async (source) => { + const result = await getPipelines(source); + const fileGroups = buildFileGroups(source.id, result.files); + const errors = result.errors.map((e) => ({ + filePath: e.filePath, + message: e.error.message, + sourceId: source.id, + })); + + return { + sourceId: source.id, + source, + fileGroups, + errors, + }; + }), + ); + + return results.map((result, index) => { + const source = sources[index]!; + + if (result.status === "fulfilled") { + return result.value; + } + + const message = result.reason instanceof Error ? result.reason.message : String(result.reason); + return { + sourceId: source.id, + source, + fileGroups: [], + errors: [{ filePath: "", message, sourceId: source.id }], + }; + }); +} diff --git a/packages/pipelines/pipeline-server/src/server/lib/ids.ts b/packages/pipelines/pipeline-server/src/server/lib/ids.ts new file mode 100644 index 000000000..0392c006a --- /dev/null +++ b/packages/pipelines/pipeline-server/src/server/lib/ids.ts @@ -0,0 +1,63 @@ +const PATH_SEPARATOR = "~"; + +export const PIPELINE_FILE_SUFFIXES = [".ucd-pipeline.ts", ".ts"] as const; + +function normalizePathSegments(filePath: string): string[] { + const normalized = filePath + .replace(/\\/g, "/") + .replace(/^\.\/+/, "") + .replace(/\/+/g, "/"); + + return normalized.split("/").filter(Boolean); +} + +export function stripSuffixes( + fileName: string, + suffixes: readonly string[] = PIPELINE_FILE_SUFFIXES, +): string { + for (const suffix of suffixes) { + if (fileName.endsWith(suffix)) { + return fileName.slice(0, -suffix.length); + } + } + + return fileName; +} + +export function sanitizeSegment(segment: string): string { + return segment.trim().replace(/~/g, "-").replace(/\s+/g, "-"); +} + +export function fileIdFromPath( + filePath: string, + suffixes: readonly string[] = PIPELINE_FILE_SUFFIXES, +): string { + const segments = normalizePathSegments(filePath); + + if (segments.length === 0) return ""; + + const lastSegment = segments[segments.length - 1] ?? ""; + const strippedLast = stripSuffixes(lastSegment, suffixes) || lastSegment; + + const idSegments = [ + ...segments.slice(0, -1).map(sanitizeSegment), + sanitizeSegment(strippedLast), + ].filter(Boolean); + + return idSegments.join(PATH_SEPARATOR); +} + +export function fileLabelFromPath( + filePath: string, + suffixes: readonly string[] = PIPELINE_FILE_SUFFIXES, +): string { + const segments = normalizePathSegments(filePath); + + if (segments.length === 0) return ""; + + const lastSegment = segments[segments.length - 1] ?? ""; + const strippedLast = stripSuffixes(lastSegment, suffixes) || lastSegment; + const labelSegments = [...segments.slice(0, -1), strippedLast].filter(Boolean); + + return labelSegments.join("/"); +} diff --git a/packages/pipelines/pipeline-server/src/server/lib/loader.ts b/packages/pipelines/pipeline-server/src/server/lib/loader.ts new file mode 100644 index 000000000..2db3a6517 --- /dev/null +++ b/packages/pipelines/pipeline-server/src/server/lib/loader.ts @@ -0,0 +1,32 @@ +import type { LoadPipelinesResult, PipelineSource } from "@ucdjs/pipelines-loader"; +import path from "node:path"; +import { + findPipelineFiles, + findRemotePipelineFiles, + loadPipelinesFromPaths, + loadRemotePipelines, +} from "@ucdjs/pipelines-loader"; + +export async function getPipelines(source: PipelineSource): Promise { + if (source.type === "local") { + const files = await findPipelineFiles({ cwd: source.cwd }); + const result = await loadPipelinesFromPaths(files); + const normalize = (filePath: string) => + path.relative(source.cwd, filePath).replace(/\\/g, "/"); + + return { + ...result, + files: result.files.map((file) => ({ + ...file, + filePath: normalize(file.filePath), + })), + errors: result.errors.map((error) => ({ + ...error, + filePath: normalize(error.filePath), + })), + }; + } + + const fileList = await findRemotePipelineFiles(source); + return loadRemotePipelines(source, fileList.files); +} diff --git a/packages/pipelines/pipeline-server/src/server/lib/resolve.ts b/packages/pipelines/pipeline-server/src/server/lib/resolve.ts new file mode 100644 index 000000000..24c79bf3c --- /dev/null +++ b/packages/pipelines/pipeline-server/src/server/lib/resolve.ts @@ -0,0 +1,12 @@ +import path from "node:path"; + +export function resolveLocalFilePath(cwd: string, filePath: string): string { + const resolved = path.resolve(cwd, filePath); + const relative = path.relative(cwd, resolved); + + if (relative.startsWith("..") || path.isAbsolute(relative)) { + throw new Error("Invalid pipeline file path"); + } + + return resolved; +} diff --git a/packages/pipelines/pipeline-server/src/server/routes/index.ts b/packages/pipelines/pipeline-server/src/server/routes/index.ts new file mode 100644 index 000000000..a759349bc --- /dev/null +++ b/packages/pipelines/pipeline-server/src/server/routes/index.ts @@ -0,0 +1,5 @@ +export { pipelinesEventsRouter } from "./pipelines.events"; +export { pipelinesExecutionRouter } from "./pipelines.execution"; +export { pipelinesFileRouter } from "./pipelines.file"; +export { pipelinesIndexRouter } from "./pipelines.index"; +export { pipelinesPipelineRouter } from "./pipelines.pipeline"; diff --git a/packages/pipelines/pipeline-server/src/server/routes/pipelines.events.ts b/packages/pipelines/pipeline-server/src/server/routes/pipelines.events.ts new file mode 100644 index 000000000..3b868aeda --- /dev/null +++ b/packages/pipelines/pipeline-server/src/server/routes/pipelines.events.ts @@ -0,0 +1,69 @@ +import { schema } from "#server/db"; +import { asc, eq } from "drizzle-orm"; +import { getQuery, H3 } from "h3"; + +export const pipelinesEventsRouter = new H3(); + +pipelinesEventsRouter.get("/:file/:id/executions/:executionId/events", async (event) => { + const { db } = event.context; + const executionId = event.context.params?.executionId; + + if (!executionId) { + return { error: "Execution ID is required" }; + } + + const query = getQuery(event); + const limit = Math.min( + typeof query.limit === "string" ? Number.parseInt(query.limit, 10) : 100, + 500, + ); + const offset = typeof query.offset === "string" ? Number.parseInt(query.offset, 10) : 0; + + try { + const execution = await db.query.executions.findFirst({ + where: eq(schema.executions.id, executionId), + columns: { id: true, pipelineId: true, status: true }, + }); + + if (!execution) { + return { error: `Execution "${executionId}" not found` }; + } + + const events = await db.query.events.findMany({ + where: eq(schema.events.executionId, executionId), + orderBy: asc(schema.events.timestamp), + limit, + offset, + }); + + const countResult = await db.query.events.findMany({ + where: eq(schema.events.executionId, executionId), + columns: { id: true }, + }); + const total = countResult.length; + + return { + executionId, + pipelineId: execution.pipelineId, + status: execution.status, + events: events.map((evt) => ({ + id: evt.id, + type: evt.type, + timestamp: evt.timestamp.toISOString(), + data: evt.data, + })), + pagination: { + total, + limit, + offset, + hasMore: offset + limit < total, + }, + }; + } catch (err) { + console.error("Failed to fetch events:", err); + return { + error: "Failed to fetch events", + details: err instanceof Error ? err.message : String(err), + }; + } +}); diff --git a/packages/pipelines/pipeline-server/src/server/routes/pipelines.execution.ts b/packages/pipelines/pipeline-server/src/server/routes/pipelines.execution.ts new file mode 100644 index 000000000..ab9753dc2 --- /dev/null +++ b/packages/pipelines/pipeline-server/src/server/routes/pipelines.execution.ts @@ -0,0 +1,61 @@ +import { schema } from "#server/db"; +import { desc, eq } from "drizzle-orm"; +import { getQuery, H3 } from "h3"; + +export const pipelinesExecutionRouter = new H3(); + +pipelinesExecutionRouter.get("/:file/:id/executions", async (event) => { + const { db } = event.context; + const id = event.context.params?.id; + + if (!id) { + return { error: "Pipeline ID is required" }; + } + + const query = getQuery(event); + const limit = Math.min( + typeof query.limit === "string" ? Number.parseInt(query.limit, 10) : 50, + 100, + ); + const offset = typeof query.offset === "string" ? Number.parseInt(query.offset, 10) : 0; + + try { + const executions = await db.query.executions.findMany({ + where: eq(schema.executions.pipelineId, id), + orderBy: desc(schema.executions.startedAt), + limit, + offset, + }); + + const countResult = await db.query.executions.findMany({ + where: eq(schema.executions.pipelineId, id), + columns: { id: true }, + }); + const total = countResult.length; + + return { + executions: executions.map((exec) => ({ + id: exec.id, + pipelineId: exec.pipelineId, + status: exec.status, + startedAt: exec.startedAt.toISOString(), + completedAt: exec.completedAt?.toISOString() ?? null, + versions: exec.versions, + summary: exec.summary, + error: exec.error, + })), + pagination: { + total, + limit, + offset, + hasMore: offset + limit < total, + }, + }; + } catch (err) { + console.error("Failed to fetch executions:", err); + return { + error: "Failed to fetch executions", + details: err instanceof Error ? err.message : String(err), + }; + } +}); diff --git a/packages/pipelines/pipeline-server/src/server/routes/pipelines.file.ts b/packages/pipelines/pipeline-server/src/server/routes/pipelines.file.ts new file mode 100644 index 000000000..6889af3e3 --- /dev/null +++ b/packages/pipelines/pipeline-server/src/server/routes/pipelines.file.ts @@ -0,0 +1,39 @@ +import type { PipelineLoadErrorInfo } from "#server/lib/files"; +import { + findFileGroup, + loadPipelineFileGroups, +} from "#server/lib/files"; +import { H3 } from "h3"; + +export const pipelinesFileRouter = new H3(); + +pipelinesFileRouter.get("/:file", async (event) => { + const { sources } = event.context; + const fileId = event.context.params?.file; + + if (!fileId) { + return { error: "File ID is required" }; + } + + const allErrors: PipelineLoadErrorInfo[] = []; + const groups = await loadPipelineFileGroups(sources); + + for (const group of groups) { + const fileGroup = findFileGroup(fileId, group.fileGroups); + if (fileGroup) { + return { + file: { + fileId: fileGroup.fileId, + filePath: fileGroup.filePath, + fileLabel: fileGroup.fileLabel, + sourceId: fileGroup.sourceId, + pipelines: fileGroup.pipelines, + }, + }; + } + + allErrors.push(...group.errors); + } + + return { error: `Pipeline file "${fileId}" not found`, errors: allErrors }; +}); diff --git a/packages/pipelines/pipeline-server/src/server/routes/pipelines.index.ts b/packages/pipelines/pipeline-server/src/server/routes/pipelines.index.ts new file mode 100644 index 000000000..cdbae28cd --- /dev/null +++ b/packages/pipelines/pipeline-server/src/server/routes/pipelines.index.ts @@ -0,0 +1,37 @@ +import type { PipelineFileGroup, PipelineLoadErrorInfo } from "#server/lib/files"; +import { + applySearchFilter, + loadPipelineFileGroups, +} from "#server/lib/files"; +import { getValidatedQuery, H3 } from "h3"; +import { z } from "zod"; + +export const pipelinesIndexRouter = new H3(); + +pipelinesIndexRouter.get("/", async (event) => { + const { sources } = event.context; + const query = await getValidatedQuery(event, z.object({ + search: z.string().optional().transform((s) => s?.trim().toLowerCase()).default(""), + })); + + const allFiles: PipelineFileGroup[] = []; + const allErrors: PipelineLoadErrorInfo[] = []; + + const groups = await loadPipelineFileGroups(sources); + for (const group of groups) { + const filteredGroups = applySearchFilter(group.fileGroups, query.search); + allFiles.push(...filteredGroups); + allErrors.push(...group.errors); + } + + return { + files: allFiles.map((file) => ({ + fileId: file.fileId, + filePath: file.filePath, + fileLabel: file.fileLabel, + sourceId: file.sourceId, + pipelines: file.pipelines, + })), + errors: allErrors, + }; +}); diff --git a/packages/pipelines/pipeline-server/src/server/routes/pipelines.pipeline.ts b/packages/pipelines/pipeline-server/src/server/routes/pipelines.pipeline.ts new file mode 100644 index 000000000..218133f8e --- /dev/null +++ b/packages/pipelines/pipeline-server/src/server/routes/pipelines.pipeline.ts @@ -0,0 +1,202 @@ +import type { PipelineEvent } from "@ucdjs/pipelines-core"; +import type { PipelineSource } from "@ucdjs/pipelines-loader"; +import { randomUUID } from "node:crypto"; +import fs from "node:fs"; +import { extractDefinePipelineCode } from "#server/code"; +import { schema } from "#server/db"; +import { findPipelineByFileId, loadPipelineFileGroups } from "#server/lib/files"; +import { resolveLocalFilePath } from "#server/lib/resolve"; +import { createPipelineExecutor } from "@ucdjs/pipelines-executor"; +import { github, gitlab } from "@ucdjs/pipelines-loader/remote"; +import { toPipelineDetails } from "@ucdjs/pipelines-ui"; +import { eq } from "drizzle-orm"; +import { H3, readValidatedBody } from "h3"; +import { z } from "zod"; + +export const pipelinesPipelineRouter = new H3(); + +async function getPipelineFileForSource( + source: PipelineSource, + filePath: string, +): Promise<{ content: string; filePath: string } | null> { + if (source.type === "local") { + const resolvedPath = resolveLocalFilePath(source.cwd, filePath); + const content = await fs.promises.readFile(resolvedPath, "utf-8"); + return { content, filePath }; + } + + const { owner, repo, ref } = source; + const content = source.type === "github" + ? await github.fetchFile({ owner, repo, ref }, filePath) + : await gitlab.fetchFile({ owner, repo, ref }, filePath); + + return { content, filePath }; +} + +pipelinesPipelineRouter.get("/:file/:id", async (event) => { + const { sources } = event.context; + const fileId = event.context.params?.file; + const id = event.context.params?.id; + + if (!fileId || !id) { + return { error: "File ID and pipeline ID are required" }; + } + + const groups = await loadPipelineFileGroups(sources); + for (const group of groups) { + const match = findPipelineByFileId(fileId, group.fileGroups, id); + if (match) { + return { + pipeline: toPipelineDetails(match.entry.pipeline), + fileId: match.fileGroup.fileId, + filePath: match.fileGroup.filePath, + fileLabel: match.fileGroup.fileLabel, + sourceId: match.fileGroup.sourceId, + }; + } + } + + return { error: `Pipeline "${id}" not found` }; +}); + +pipelinesPipelineRouter.get("/:file/:id/code", async (event) => { + const { sources } = event.context; + const fileId = event.context.params?.file; + const id = event.context.params?.id; + + if (!fileId || !id) { + return { error: "File ID and pipeline ID are required" }; + } + + const groups = await loadPipelineFileGroups(sources); + for (const group of groups) { + const match = findPipelineByFileId(fileId, group.fileGroups, id); + if (!match) { + continue; + } + + const file = await getPipelineFileForSource(group.source, match.fileGroup.filePath); + if (!file?.content) { + continue; + } + + return { + code: extractDefinePipelineCode(file.content, { exportName: match.entry.exportName }), + filePath: file.filePath, + fileLabel: match.fileGroup.fileLabel, + fileId: match.fileGroup.fileId, + sourceId: match.fileGroup.sourceId, + }; + } + + return { error: `Pipeline "${id}" not found` }; +}); + +pipelinesPipelineRouter.post("/:file/:id/execute", async (event) => { + const { sources, db } = event.context; + const fileId = event.context.params?.file; + const id = event.context.params?.id; + + if (!fileId || !id) { + return { error: "File ID and pipeline ID are required" }; + } + + const localSources = sources.filter((s) => s.type === "local"); + + if (localSources.length === 0) { + return { error: "No local sources configured for pipeline execution" }; + } + + const body = await readValidatedBody(event, z.object({ + versions: z.array(z.string()).optional(), + cache: z.boolean().optional(), + })); + + const groups = await loadPipelineFileGroups(localSources); + for (const group of groups) { + const match = findPipelineByFileId(fileId, group.fileGroups, id); + if (!match) { + continue; + } + + const pipeline = match.entry.pipeline; + const versions = body.versions ?? pipeline.versions; + const cache = body.cache ?? true; + + const executionId = randomUUID(); + const startedAt = new Date(); + + await db.insert(schema.executions).values({ + id: executionId, + pipelineId: id, + status: "running", + startedAt, + versions, + }); + + const events: PipelineEvent[] = []; + const executor = createPipelineExecutor({ + onEvent: async (evt) => { + events.push(evt); + + await db.insert(schema.events).values({ + id: randomUUID(), + executionId, + type: evt.type, + timestamp: new Date(evt.timestamp), + data: evt, + }); + }, + }); + + try { + const execResult = await executor.run([pipeline], { + versions, + cache, + }); + + const pipelineResult = execResult.results.get(id); + const completedAt = new Date(); + + await db.update(schema.executions) + .set({ + status: "completed", + completedAt, + summary: pipelineResult?.summary ?? null, + graph: pipelineResult?.graph ?? null, + }) + .where(eq(schema.executions.id, executionId)); + + // eslint-disable-next-line no-console + console.info("Pipeline execution completed:", { + executionId, + pipelineId: id, + summary: pipelineResult?.summary, + }); + + return { + success: true, + executionId, + }; + } catch (err) { + const completedAt = new Date(); + const errorMessage = err instanceof Error ? err.message : String(err); + + await db.update(schema.executions) + .set({ + status: "failed", + completedAt, + error: errorMessage, + }) + .where(eq(schema.executions.id, executionId)); + + return { + success: false, + executionId, + error: errorMessage, + }; + } + } + + return { error: `Pipeline "${id}" not found in local sources` }; +}); diff --git a/packages/pipelines/pipeline-server/tsconfig.build.json b/packages/pipelines/pipeline-server/tsconfig.build.json new file mode 100644 index 000000000..277eca3e1 --- /dev/null +++ b/packages/pipelines/pipeline-server/tsconfig.build.json @@ -0,0 +1,9 @@ +{ + "extends": "@ucdjs-tooling/tsconfig/base.build", + "include": [ + "src" + ], + "exclude": [ + "dist" + ] +} diff --git a/packages/pipelines/pipeline-server/tsconfig.json b/packages/pipelines/pipeline-server/tsconfig.json new file mode 100644 index 000000000..9c6dd744b --- /dev/null +++ b/packages/pipelines/pipeline-server/tsconfig.json @@ -0,0 +1,8 @@ +{ + "extends": "@ucdjs-tooling/tsconfig/base", + "include": [ + "src", + "test" + ], + "exclude": ["dist"] +} diff --git a/packages/pipelines/pipeline-server/turbo.json b/packages/pipelines/pipeline-server/turbo.json new file mode 100644 index 000000000..ca1c1e9f6 --- /dev/null +++ b/packages/pipelines/pipeline-server/turbo.json @@ -0,0 +1,15 @@ +{ + "$schema": "https://turbo.build/schema.json", + "extends": ["//"], + "tasks": { + "build": { + "outputs": ["dist/**"] + }, + "dev": { + "persistent": true + }, + "typecheck": { + "outputs": [".cache/tsbuildinfo.json"] + } + } +} diff --git a/packages/pipelines/pipeline-server/vite.config.ts b/packages/pipelines/pipeline-server/vite.config.ts new file mode 100644 index 000000000..132600e17 --- /dev/null +++ b/packages/pipelines/pipeline-server/vite.config.ts @@ -0,0 +1,143 @@ +import type { H3 } from "h3"; +import type { Plugin } from "vite"; +import tailwindcss from "@tailwindcss/vite"; +import { tanstackRouter } from "@tanstack/router-plugin/vite"; +import react from "@vitejs/plugin-react"; +import { defineConfig } from "vite"; +import viteTsConfigPaths from "vite-tsconfig-paths"; + +const appModuleId = "/src/server/app.ts"; +const dbModuleId = "/src/server/db/index.ts"; + +function h3DevServerPlugin(): Plugin { + return { + name: "h3-dev-server", + async configureServer(server) { + let appPromise: Promise

| null = null; + let db: import("./src/server/db").Database | null = null; + + // Initialize database before starting the server + try { + const dbMod = await server.ssrLoadModule(dbModuleId); + const { createDatabase, runMigrations } = dbMod as typeof import("./src/server/db"); + + db = createDatabase(); + await runMigrations(db); + console.log("[h3-dev-server] Database migrations completed successfully"); + } catch (err) { + console.error("[h3-dev-server] Failed to initialize database:", err); + // In dev, we still continue but log the error prominently + // The app will fail when trying to access db.context + } + + const getApp = async () => { + if (!appPromise) { + appPromise = server + .ssrLoadModule(appModuleId) + .then((mod) => (mod as typeof import("./src/server/app")).createApp({ db: db! })); + } + + return appPromise; + }; + + server.watcher.on("change", (file) => { + if (file.includes("/src/server/")) { + appPromise = null; + } + }); + + // Add middleware BEFORE Vite's internal middleware (no return = pre-hook) + // This ensures /api routes are handled before Vite's SPA fallback + server.middlewares.use(async (req, res, next) => { + if (!req.url?.startsWith("/api")) { + return next(); + } + + // If database failed to initialize, return error + if (!db) { + res.statusCode = 500; + res.setHeader("content-type", "application/json"); + res.end(JSON.stringify({ + error: "Database not initialized", + message: "The database failed to initialize. Check the server console for details.", + })); + return; + } + + try { + const app = await getApp(); + + // Collect request body for POST/PUT/PATCH + let body: string | undefined; + if (req.method && ["POST", "PUT", "PATCH"].includes(req.method)) { + // eslint-disable-next-line node/prefer-global/buffer + const chunks: Buffer[] = []; + for await (const chunk of req) { + chunks.push(chunk); + } + // eslint-disable-next-line node/prefer-global/buffer + body = Buffer.concat(chunks).toString(); + } + + const response = await app.fetch( + new Request(new URL(req.url, "http://localhost"), { + method: req.method, + headers: req.headers as HeadersInit, + body, + }), + ); + + res.statusCode = response.status; + response.headers.forEach((value, key) => { + res.setHeader(key, value); + }); + + const responseBody = await response.text(); + res.end(responseBody); + } catch (error) { + next(error); + } + }); + }, + }; +} + +export default defineConfig({ + clearScreen: false, + plugins: [ + viteTsConfigPaths({ + projects: ["./tsconfig.json"], + loose: true, + projectDiscovery: "lazy", + }), + tanstackRouter({ + routesDirectory: "./src/client/routes", + generatedRouteTree: "./src/client/routeTree.gen.ts", + }), + react(), + tailwindcss(), + h3DevServerPlugin(), + ], + environments: { + client: { + build: { + outDir: "dist/client", + }, + }, + server: { + build: { + outDir: "dist/server", + ssr: false, + rollupOptions: { + input: "src/server/app.ts", + }, + }, + }, + }, + builder: { + async buildApp(builder) { + await builder.build(builder.environments.client); + await builder.build(builder.environments.server); + }, + }, +}); diff --git a/packages/pipelines/pipeline-ui/README.md b/packages/pipelines/pipeline-ui/README.md new file mode 100644 index 000000000..5032f7fab --- /dev/null +++ b/packages/pipelines/pipeline-ui/README.md @@ -0,0 +1,27 @@ +# @ucdjs/pipelines-ui + +[![npm version][npm-version-src]][npm-version-href] +[![npm downloads][npm-downloads-src]][npm-downloads-href] +[![codecov][codecov-src]][codecov-href] + +> [!IMPORTANT] +> This is an internal package. It may change without warning and is not subject to semantic versioning. Use at your own risk. + +A collection of core pipeline functionalities for the UCD project. + +## Installation + +```bash +npm install @ucdjs/pipelines-ui +``` + +## 📄 License + +Published under [MIT License](./LICENSE). + +[npm-version-src]: https://img.shields.io/npm/v/@ucdjs/pipelines-ui?style=flat&colorA=18181B&colorB=4169E1 +[npm-version-href]: https://npmjs.com/package/@ucdjs/pipelines-ui +[npm-downloads-src]: https://img.shields.io/npm/dm/@ucdjs/pipelines-ui?style=flat&colorA=18181B&colorB=4169E1 +[npm-downloads-href]: https://npmjs.com/package/@ucdjs/pipelines-ui +[codecov-src]: https://img.shields.io/codecov/c/gh/ucdjs/ucd?style=flat&colorA=18181B&colorB=4169E1 +[codecov-href]: https://codecov.io/gh/ucdjs/ucd diff --git a/packages/pipelines/pipeline-ui/eslint.config.js b/packages/pipelines/pipeline-ui/eslint.config.js new file mode 100644 index 000000000..e08216c94 --- /dev/null +++ b/packages/pipelines/pipeline-ui/eslint.config.js @@ -0,0 +1,10 @@ +// @ts-check +import { luxass } from "@luxass/eslint-config"; + +export default luxass({ + type: "lib", + pnpm: true, + react: true, +}).overrideRules({ + "ts/explicit-function-return-type": "off", +}); diff --git a/packages/pipelines/pipeline-ui/package.json b/packages/pipelines/pipeline-ui/package.json new file mode 100644 index 000000000..047333b42 --- /dev/null +++ b/packages/pipelines/pipeline-ui/package.json @@ -0,0 +1,92 @@ +{ + "name": "@ucdjs/pipelines-ui", + "version": "0.0.1", + "type": "module", + "author": { + "name": "Lucas Nørgård", + "email": "lucasnrgaard@gmail.com", + "url": "https://luxass.dev" + }, + "packageManager": "pnpm@10.27.0", + "license": "MIT", + "homepage": "https://github.com/ucdjs/ucd", + "repository": { + "type": "git", + "url": "git+https://github.com/ucdjs/ucd.git", + "directory": "packages/pipelines/pipeline-ui" + }, + "bugs": { + "url": "https://github.com/ucdjs/ucd/issues" + }, + "sideEffects": false, + "imports": { + "#components/*": "./src/components/*.tsx", + "#lib/*": "./src/lib/*.ts", + "#hooks/*": "./src/hooks/*.ts" + }, + "exports": { + ".": "./dist/index.mjs", + "./components/pipeline-sidebar": "./dist/components/pipeline-sidebar.mjs", + "./hooks": "./dist/hooks/index.mjs", + "./lib": "./dist/lib/index.mjs", + "./lib/adapter": "./dist/lib/adapter.mjs", + "./lib/colors": "./dist/lib/colors.mjs", + "./lib/format-time": "./dist/lib/format-time.mjs", + "./lib/layout": "./dist/lib/layout.mjs", + "./lib/pipeline-utils": "./dist/lib/pipeline-utils.mjs", + "./lib/utils": "./dist/lib/utils.mjs", + "./styles.css": "./dist/styles/globals.css", + "./package.json": "./package.json" + }, + "types": "./dist/index.d.mts", + "files": [ + "dist" + ], + "engines": { + "node": ">=22.18" + }, + "scripts": { + "build": "tsdown --tsconfig=./tsconfig.build.json", + "dev": "tsdown --watch", + "clean": "git clean -xdf dist node_modules", + "lint": "eslint .", + "typecheck": "tsc --noEmit -p tsconfig.build.json" + }, + "peerDependencies": { + "@tanstack/react-router": "catalog:web", + "react": ">=19.2.0", + "react-dom": ">=19.2.0" + }, + "dependencies": { + "@icons-pack/react-simple-icons": "catalog:web", + "@ucdjs-internal/shared-ui": "workspace:*", + "@ucdjs/pipelines-core": "workspace:*", + "@xyflow/react": "catalog:web", + "clsx": "catalog:web", + "lucide-react": "catalog:web", + "tailwind-merge": "catalog:web" + }, + "devDependencies": { + "@eslint-react/eslint-plugin": "catalog:linting", + "@luxass/eslint-config": "catalog:linting", + "@rollup/plugin-babel": "catalog:build", + "@tanstack/react-router": "catalog:web", + "@types/react": "catalog:types", + "@types/react-dom": "catalog:types", + "@ucdjs-tooling/tsconfig": "workspace:*", + "@ucdjs-tooling/tsdown-config": "workspace:*", + "babel-plugin-react-compiler": "catalog:build", + "eslint": "catalog:linting", + "eslint-plugin-react-hooks": "catalog:linting", + "eslint-plugin-react-refresh": "catalog:linting", + "publint": "catalog:build", + "react": "catalog:web", + "react-dom": "catalog:web", + "tailwindcss": "catalog:web", + "tsdown": "catalog:build", + "typescript": "catalog:build" + }, + "publishConfig": { + "access": "public" + } +} diff --git a/packages/pipelines/pipeline-ui/src/components/detail/execution-result.tsx b/packages/pipelines/pipeline-ui/src/components/detail/execution-result.tsx new file mode 100644 index 000000000..2ef7e9fc1 --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/components/detail/execution-result.tsx @@ -0,0 +1,187 @@ +import type { ExecuteResult } from "../../types"; +import { cn } from "#lib/utils"; + +export interface ExecutionResultProps { + result: ExecuteResult; + className?: string; +} + +export function ExecutionResult({ + result, + className, +}: ExecutionResultProps) { + const isSuccess = result.success; + + return ( +
+ {/* Status indicator */} +
+
+ + {isSuccess ? "Completed" : "Failed"} + +
+ + {/* Summary stats */} + {result.summary && ( +
+
+ Files + {result.summary.totalFiles} +
+
+ Matched + + {result.summary.matchedFiles} + +
+
+ Skipped + {result.summary.skippedFiles} +
+
+ Fallback + {result.summary.fallbackFiles} +
+
+ Outputs + {result.summary.totalOutputs} +
+
+ Time + + {Math.round(result.summary.durationMs)} + ms + +
+
+ )} + + {/* Top-level error */} + {result.error && ( +

{result.error}

+ )} + + {/* Detailed errors list */} + {result.errors && result.errors.length > 0 && ( +
+ {result.errors.map((err, i) => ( +
+ + [ + {err.scope} + ] + + {" "} + {err.message} +
+ ))} +
+ )} +
+ ); +} + +export interface ExecutionSummaryProps { + totalFiles: number; + matchedFiles: number; + skippedFiles: number; + fallbackFiles: number; + totalOutputs: number; + durationMs: number; + className?: string; +} + +/** + * Compact execution summary (without result wrapper) + */ +export function ExecutionSummary({ + totalFiles, + matchedFiles, + skippedFiles, + fallbackFiles, + totalOutputs, + durationMs, + className, +}: ExecutionSummaryProps) { + return ( +
+
+ Files + {totalFiles} +
+
+ Matched + {matchedFiles} +
+
+ Skipped + {skippedFiles} +
+
+ Fallback + {fallbackFiles} +
+
+ Outputs + {totalOutputs} +
+
+ Time + + {Math.round(durationMs)} + ms + +
+
+ ); +} + +export interface ExecutionErrorsProps { + errors: Array<{ scope: string; message: string }>; + className?: string; +} + +/** + * List of execution errors + */ +export function ExecutionErrors({ + errors, + className, +}: ExecutionErrorsProps) { + if (errors.length === 0) return null; + + return ( +
+ {errors.map((err, i) => ( +
+ + [ + {err.scope} + ] + + {" "} + {err.message} +
+ ))} +
+ ); +} diff --git a/packages/pipelines/pipeline-ui/src/components/detail/route-list.tsx b/packages/pipelines/pipeline-ui/src/components/detail/route-list.tsx new file mode 100644 index 000000000..b5178578a --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/components/detail/route-list.tsx @@ -0,0 +1,91 @@ +import { cn } from "#lib/utils"; + +export interface RouteItemProps { + route: { id: string; cache: boolean }; + onClick?: () => void; + className?: string; +} + +export function RouteItem({ + route, + onClick, + className, +}: RouteItemProps) { + return ( + + ); +} + +export interface RouteListProps { + routes: Array<{ id: string; cache: boolean }>; + onRouteClick?: (routeId: string) => void; + className?: string; +} + +export function RouteList({ + routes, + onRouteClick, + className, +}: RouteListProps) { + return ( +
+
+

+ Routes +

+ + {routes.length} + +
+
+ {routes.map((route, index) => ( +
+ onRouteClick(route.id) : undefined} + /> +
+ ))} +
+
+ ); +} diff --git a/packages/pipelines/pipeline-ui/src/components/detail/source-list.tsx b/packages/pipelines/pipeline-ui/src/components/detail/source-list.tsx new file mode 100644 index 000000000..7ee0ad80f --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/components/detail/source-list.tsx @@ -0,0 +1,28 @@ +export interface SourceListProps { + sources: Array<{ id: string }>; + className?: string; +} + +export function SourceList({ + sources, + className, +}: SourceListProps) { + return ( +
+

+ Sources + {" "} + ( + {sources.length} + ) +

+
+ {sources.map((source) => ( +
+ {source.id} +
+ ))} +
+
+ ); +} diff --git a/packages/pipelines/pipeline-ui/src/components/detail/version-selector.tsx b/packages/pipelines/pipeline-ui/src/components/detail/version-selector.tsx new file mode 100644 index 000000000..f3d372dfe --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/components/detail/version-selector.tsx @@ -0,0 +1,99 @@ +import { cn } from "#lib/utils"; +import { useMemo } from "react"; + +export interface VersionSelectorProps { + versions: string[]; + selectedVersions: Set; + onToggleVersion: (version: string) => void; + onSelectAll?: () => void; + onDeselectAll?: () => void; + className?: string; +} + +export function VersionSelector({ + versions, + selectedVersions, + onToggleVersion, + onSelectAll, + onDeselectAll, + className, +}: VersionSelectorProps) { + const versionToggles = useMemo(() => { + const map = new Map void>(); + for (const v of versions) { + map.set(v, () => onToggleVersion(v)); + } + return map; + }, [versions, onToggleVersion]); + + return ( +
+
+ + Versions ( + {selectedVersions.size} + / + {versions.length} + ) + + {(onSelectAll || onDeselectAll) && ( +
+ {onSelectAll && ( + + )} + {onDeselectAll && ( + + )} +
+ )} +
+
+ {versions.map((version) => ( + + ))} +
+
+ ); +} + +function VersionTag({ + version, + selected, + onToggle, +}: { + version: string; + selected: boolean; + onToggle?: () => void; +}) { + return ( + + ); +} diff --git a/packages/pipelines/pipeline-ui/src/components/events/event-detail-panel.tsx b/packages/pipelines/pipeline-ui/src/components/events/event-detail-panel.tsx new file mode 100644 index 000000000..92bd92391 --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/components/events/event-detail-panel.tsx @@ -0,0 +1,111 @@ +import type { PipelineEvent } from "@ucdjs/pipelines-core"; +import { cn } from "#lib/utils"; +import { Badge } from "@ucdjs-internal/shared-ui/ui/badge"; +import { Button } from "@ucdjs-internal/shared-ui/ui/button"; +import { Card, CardContent, CardHeader, CardTitle } from "@ucdjs-internal/shared-ui/ui/card"; + +export interface EventDetailPanelProps { + event: PipelineEvent | null; + isOpen: boolean; + onClose: () => void; + events: readonly PipelineEvent[]; +} + +export function EventDetailPanel({ event, isOpen, onClose, events }: EventDetailPanelProps) { + if (!event || !isOpen) return null; + + const jsonString = JSON.stringify(event, null, 2); + + const eventIndex = events.findIndex((e) => e.id === event.id); + const previousEvent = eventIndex > 0 ? events[eventIndex - 1] : null; + const nextEvent = eventIndex < events.length - 1 ? events[eventIndex + 1] : null; + + const timeDiff = (e1: PipelineEvent, e2: PipelineEvent) => { + const diff = e2.timestamp - e1.timestamp; + return diff > 1000 ? `${(diff / 1000).toFixed(1)}s` : `${diff}ms`; + }; + + const handleCopyJson = () => { + navigator.clipboard.writeText(jsonString); + }; + + return ( + <> +
+ +
+ + +
+ Event Details +

+ ID: + {event.id} +

+
+ +
+ + +
+ +
+ +
+
+                {jsonString}
+              
+
+ +
+

Timeline Context

+ + {previousEvent && ( +
+ Previous: +
+ {previousEvent.type} + + ( + {timeDiff(previousEvent, event)} + {" "} + before) + +
+
+ )} + + {nextEvent && ( +
+ Next: +
+ {nextEvent.type} + + ( + {timeDiff(event, nextEvent)} + {" "} + after) + +
+
+ )} +
+
+
+
+ + ); +} diff --git a/packages/pipelines/pipeline-ui/src/components/events/inline-json-view.tsx b/packages/pipelines/pipeline-ui/src/components/events/inline-json-view.tsx new file mode 100644 index 000000000..8fc5e0a52 --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/components/events/inline-json-view.tsx @@ -0,0 +1,17 @@ +import type { PipelineEvent } from "@ucdjs/pipelines-core"; + +export interface InlineJsonViewProps { + event: PipelineEvent; +} + +export function InlineJsonView({ event }: InlineJsonViewProps) { + const jsonString = JSON.stringify(event, null, 2); + + return ( +
+
+        {jsonString}
+      
+
+ ); +} diff --git a/packages/pipelines/pipeline-ui/src/components/events/simple-timeline.tsx b/packages/pipelines/pipeline-ui/src/components/events/simple-timeline.tsx new file mode 100644 index 000000000..412d52a3c --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/components/events/simple-timeline.tsx @@ -0,0 +1,175 @@ +import type { PipelineEvent } from "@ucdjs/pipelines-core"; +import { cn } from "#lib/utils"; + +export interface SimpleTimelineProps { + events: readonly PipelineEvent[]; + selectedEventId: string | null; + onSelectEvent: (eventId: string) => void; +} + +function getEventColor(type: string): string { + if (type.startsWith("pipeline:")) return "bg-purple-500"; + if (type.startsWith("version:")) return "bg-blue-500"; + if (type.startsWith("artifact:")) return "bg-orange-500"; + if (type.startsWith("file:")) return "bg-green-500"; + if (type.startsWith("parse:")) return "bg-cyan-500"; + if (type.startsWith("resolve:")) return "bg-pink-500"; + if (type.startsWith("cache:")) return "bg-yellow-500"; + if (type === "error") return "bg-red-500"; + return "bg-gray-500"; +} + +function getEventLevel(type: string): number { + if (type.startsWith("pipeline:")) return 0; + if (type.startsWith("version:")) return 1; + if (type.startsWith("artifact:")) return 2; + return 3; +} + +function formatDuration(ms: number): string { + if (ms > 1000) return `${(ms / 1000).toFixed(2)}s`; + if (ms > 1) return `${ms.toFixed(1)}ms`; + if (ms > 0.001) return `${(ms * 1000).toFixed(1)}μs`; + return `${(ms * 1000000).toFixed(0)}ns`; +} + +export function SimpleTimeline({ events, selectedEventId, onSelectEvent }: SimpleTimelineProps) { + if (events.length === 0) return null; + + const sortedEvents = [...events].sort((a, b) => a.timestamp - b.timestamp); + const timestamps = sortedEvents.map((e) => e.timestamp); + const startTime = Math.min(...timestamps); + const endTime = Math.max(...timestamps); + const totalDuration = Math.max(endTime - startTime, 1); + + const eventsByLevel: Record = {}; + for (const event of sortedEvents) { + const level = getEventLevel(event.type); + if (!eventsByLevel[level]) { + eventsByLevel[level] = []; + } + eventsByLevel[level].push(event); + } + + const levels = Object.keys(eventsByLevel) + .map(Number) + .sort((a, b) => a - b); + + return ( +
+
+

Execution Timeline

+ + {sortedEvents.length} + {" "} + events · + {formatDuration(totalDuration)} + +
+ +
+
+ {[0, 25, 50, 75, 100].map((pct) => ( +
+ ))} +
+ +
+ {levels.map((level) => { + const levelEvents = eventsByLevel[level] ?? []; + + return ( +
+
+ + {levelEvents.map((event, index) => { + const isSelected = event.id === selectedEventId; + const color = getEventColor(event.type); + + const hasDuration = "durationMs" in event + && typeof event.durationMs === "number" + && event.durationMs > 0; + + const isStartEvent = event.type.endsWith(":start"); + + const eventOffset = event.timestamp - startTime; + + let leftPercent: number; + let widthPercent: number; + + if (totalDuration < 0.1) { + leftPercent = (index / Math.max(levelEvents.length - 1, 1)) * 90; + widthPercent = hasDuration ? 8 : 6; + } else if (hasDuration) { + const endPercent = (eventOffset / totalDuration) * 100; + const durationPercent = (event.durationMs / totalDuration) * 100; + + leftPercent = Math.max(0, endPercent - durationPercent); + widthPercent = Math.min(durationPercent, endPercent); + } else { + leftPercent = (eventOffset / totalDuration) * 100; + widthPercent = 6; + } + + const clampedLeft = Math.max(0, Math.min(leftPercent, 94)); + const maxWidth = 100 - clampedLeft; + const clampedWidth = Math.max(3, Math.min(widthPercent, maxWidth)); + + return ( + + ); + })} +
+ ); + })} +
+
+ +
+ {[ + { color: "bg-purple-500", label: "Pipeline" }, + { color: "bg-blue-500", label: "Version" }, + { color: "bg-orange-500", label: "Artifact" }, + { color: "bg-green-500", label: "File" }, + { color: "bg-cyan-500", label: "Parse" }, + { color: "bg-pink-500", label: "Resolve" }, + { color: "bg-yellow-500", label: "Cache" }, + { color: "bg-red-500", label: "Error" }, + ].map(({ color, label }) => ( +
+
+ {label} +
+ ))} +
+
+ ); +} diff --git a/packages/pipelines/pipeline-ui/src/components/events/view-mode-toggle.tsx b/packages/pipelines/pipeline-ui/src/components/events/view-mode-toggle.tsx new file mode 100644 index 000000000..1ca5da408 --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/components/events/view-mode-toggle.tsx @@ -0,0 +1,36 @@ +import { cn } from "#lib/utils"; +import { Button } from "@ucdjs-internal/shared-ui/ui/button"; + +export interface ViewModeToggleProps { + isJsonMode: boolean; + onToggle: () => void; +} + +export function ViewModeToggle({ isJsonMode, onToggle }: ViewModeToggleProps) { + return ( +
+ + +
+ ); +} diff --git a/packages/pipelines/pipeline-ui/src/components/graph/details.tsx b/packages/pipelines/pipeline-ui/src/components/graph/details.tsx new file mode 100644 index 000000000..06375114f --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/components/graph/details.tsx @@ -0,0 +1,176 @@ +import type { PipelineGraphNode } from "@ucdjs/pipelines-core"; +import type { CSSProperties } from "react"; + +export interface PipelineGraphDetailsProps { + node: PipelineGraphNode | null; + onClose: () => void; +} + +const containerStyle: CSSProperties = { + width: "280px", + backgroundColor: "#ffffff", + borderLeft: "1px solid #e5e7eb", + display: "flex", + flexDirection: "column", + height: "100%", + boxShadow: "-2px 0 8px rgba(0,0,0,0.05)", + fontFamily: "system-ui, -apple-system, sans-serif", +}; + +const headerStyle: CSSProperties = { + display: "flex", + alignItems: "center", + justifyContent: "space-between", + padding: "12px", + borderBottom: "1px solid #e5e7eb", +}; + +const closeButtonStyle: CSSProperties = { + padding: "4px", + color: "#9ca3af", + background: "none", + border: "none", + borderRadius: "4px", + cursor: "pointer", + display: "flex", + alignItems: "center", + justifyContent: "center", +}; + +const contentStyle: CSSProperties = { + flex: 1, + padding: "12px", + overflowY: "auto", +}; + +const detailsContainerStyle: CSSProperties = { + display: "flex", + flexDirection: "column", + gap: "12px", +}; + +const detailRowStyle: CSSProperties = { + display: "flex", + flexDirection: "column", + gap: "2px", +}; + +const detailLabelStyle: CSSProperties = { + fontSize: "11px", + fontWeight: 500, + color: "#6b7280", + textTransform: "uppercase", + letterSpacing: "0.025em", +}; + +const detailValueStyle: CSSProperties = { + fontSize: "13px", + color: "#111827", + fontFamily: "ui-monospace, monospace", + wordBreak: "break-all", +}; + +const badgeStyleCache = new Map(); + +function getBadgeStyle(type: string): CSSProperties { + let cached = badgeStyleCache.get(type); + if (!cached) { + const colors: Record = { + source: { bg: "#eef2ff", color: "#4f46e5" }, + file: { bg: "#ecfdf5", color: "#059669" }, + route: { bg: "#fffbeb", color: "#d97706" }, + artifact: { bg: "#f5f3ff", color: "#7c3aed" }, + output: { bg: "#f0f9ff", color: "#0284c7" }, + }; + const c = colors[type] ?? { bg: "#f3f4f6", color: "#6b7280" }; + cached = { + padding: "2px 8px", + fontSize: "11px", + fontWeight: 600, + borderRadius: "4px", + textTransform: "uppercase", + letterSpacing: "0.025em", + backgroundColor: c.bg, + color: c.color, + }; + badgeStyleCache.set(type, cached); + } + return cached; +} + +function DetailRow({ label, value }: { label: string; value: string }) { + return ( +
+ {label} + {value} +
+ ); +} + +function NodeDetails({ node }: { node: PipelineGraphNode }) { + switch (node.type) { + case "source": + return ; + case "file": + return ( + <> + + + + + + + ); + case "route": + return ; + case "artifact": + return ; + case "output": + return ( + <> + + {node.property && } + + ); + } +} + +const closeIcon = ( + + + +); + +export function PipelineGraphDetails({ + node, + onClose, +}: PipelineGraphDetailsProps) { + if (!node) { + return null; + } + + return ( +
+
+ + {node.type} + + +
+ +
+
+ + +
+
+
+ ); +} diff --git a/packages/pipelines/pipeline-ui/src/components/graph/filters.tsx b/packages/pipelines/pipeline-ui/src/components/graph/filters.tsx new file mode 100644 index 000000000..c3dfd7e08 --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/components/graph/filters.tsx @@ -0,0 +1,138 @@ +import type { PipelineGraphNodeType } from "@ucdjs/pipelines-core"; +import type { CSSProperties } from "react"; +import { memo, useCallback } from "react"; + +interface NodeTypeConfig { + label: string; + color: string; +} + +const nodeTypeLabels: Record = { + source: { label: "Source", color: "#6366f1" }, + file: { label: "File", color: "#10b981" }, + route: { label: "Route", color: "#f59e0b" }, + artifact: { label: "Artifact", color: "#8b5cf6" }, + output: { label: "Output", color: "#0ea5e9" }, +}; + +const allNodeTypes: readonly PipelineGraphNodeType[] = ["source", "file", "route", "artifact", "output"] as const; + +const containerStyle: CSSProperties = { + display: "flex", + alignItems: "center", + gap: "6px", + padding: "8px 12px", + backgroundColor: "#ffffff", + borderRadius: "10px", + boxShadow: "0 1px 3px rgba(0,0,0,0.1), 0 1px 2px rgba(0,0,0,0.06)", + fontFamily: "system-ui, -apple-system, sans-serif", + border: "1px solid #e5e7eb", +}; + +const labelStyle: CSSProperties = { + fontSize: "11px", + fontWeight: 500, + color: "#6b7280", + marginRight: "4px", + textTransform: "uppercase", + letterSpacing: "0.05em", +}; + +const buttonStyleCache = new Map(); +const dotStyleCache = new Map(); + +function getButtonStyle(color: string, isVisible: boolean): CSSProperties { + const key = `${color}-${isVisible}`; + let cached = buttonStyleCache.get(key); + if (!cached) { + cached = { + display: "flex", + alignItems: "center", + gap: "6px", + padding: "4px 10px", + fontSize: "12px", + fontWeight: 500, + borderRadius: "6px", + border: "none", + cursor: "pointer", + transition: "all 0.15s ease", + backgroundColor: isVisible ? `${color}15` : "#f3f4f6", + color: isVisible ? color : "#9ca3af", + opacity: isVisible ? 1 : 0.6, + }; + buttonStyleCache.set(key, cached); + } + return cached; +} + +function getDotStyle(color: string, isVisible: boolean): CSSProperties { + const key = `${color}-${isVisible}`; + let cached = dotStyleCache.get(key); + if (!cached) { + cached = { + width: "8px", + height: "8px", + borderRadius: "50%", + backgroundColor: color, + opacity: isVisible ? 1 : 0.3, + transition: "opacity 0.15s ease", + }; + dotStyleCache.set(key, cached); + } + return cached; +} + +export interface PipelineGraphFiltersProps { + visibleTypes: Set; + onToggleType: (type: PipelineGraphNodeType) => void; +} + +interface FilterButtonProps { + type: PipelineGraphNodeType; + config: NodeTypeConfig; + isVisible: boolean; + onToggle: (type: PipelineGraphNodeType) => void; +} + +const FilterButton = memo(({ + type, + config, + isVisible, + onToggle, +}: FilterButtonProps) => { + const handleClick = useCallback(() => { + onToggle(type); + }, [onToggle, type]); + + return ( + + ); +}); + +export function PipelineGraphFilters({ + visibleTypes, + onToggleType, +}: PipelineGraphFiltersProps) { + return ( +
+ Show: + {allNodeTypes.map((type) => ( + + ))} +
+ ); +} diff --git a/packages/pipelines/pipeline-ui/src/components/graph/node-types.ts b/packages/pipelines/pipeline-ui/src/components/graph/node-types.ts new file mode 100644 index 000000000..ce435e21e --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/components/graph/node-types.ts @@ -0,0 +1,16 @@ +import { + ArtifactNode, + FileNode, + OutputNode, + RouteNode, + SourceNode, +} from "./nodes"; + +// Hoisted outside component to maintain stable reference +export const nodeTypes = { + source: SourceNode, + file: FileNode, + route: RouteNode, + artifact: ArtifactNode, + output: OutputNode, +} as const; diff --git a/packages/pipelines/pipeline-ui/src/components/graph/nodes.tsx b/packages/pipelines/pipeline-ui/src/components/graph/nodes.tsx new file mode 100644 index 000000000..851c58e47 --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/components/graph/nodes.tsx @@ -0,0 +1,203 @@ +import type { PipelineGraphNode } from "@ucdjs/pipelines-core"; +import type { NodeProps } from "@xyflow/react"; +import type { CSSProperties } from "react"; +import { Handle, Position } from "@xyflow/react"; +import { memo } from "react"; + +export interface PipelineNodeData { + pipelineNode: PipelineGraphNode; + label: string; +} + +interface NodeTypeStyle { + bg: string; + border: string; + iconBg: string; + icon: string; +} + +// Hoisted outside component - these never change +const nodeTypeStyles: Record = { + source: { + bg: "#eef2ff", + border: "#a5b4fc", + iconBg: "#6366f1", + icon: "S", + }, + file: { + bg: "#ecfdf5", + border: "#6ee7b7", + iconBg: "#10b981", + icon: "F", + }, + route: { + bg: "#fffbeb", + border: "#fcd34d", + iconBg: "#f59e0b", + icon: "R", + }, + artifact: { + bg: "#f5f3ff", + border: "#c4b5fd", + iconBg: "#8b5cf6", + icon: "A", + }, + output: { + bg: "#f0f9ff", + border: "#7dd3fc", + iconBg: "#0ea5e9", + icon: "O", + }, +}; + +const defaultStyle: NodeTypeStyle = { + bg: "#f9fafb", + border: "#d1d5db", + iconBg: "#6b7280", + icon: "?", +}; + +// Hoisted static styles - reused across all nodes +const flexCenterStyle: CSSProperties = { + display: "flex", + alignItems: "center", +}; + +const labelContainerStyle: CSSProperties = { + display: "flex", + flexDirection: "column", + overflow: "hidden", + marginLeft: "10px", +}; + +const typeStyle: CSSProperties = { + fontSize: "10px", + textTransform: "uppercase", + letterSpacing: "0.05em", + color: "#6b7280", + marginBottom: "1px", +}; + +const labelStyle: CSSProperties = { + fontSize: "13px", + fontWeight: 500, + color: "#111827", + whiteSpace: "nowrap", + overflow: "hidden", + textOverflow: "ellipsis", +}; + +// Cache for computed styles to avoid recreation +const containerStyleCache = new Map(); +const iconStyleCache = new Map(); +const handleStyleCache = new Map(); + +function getContainerStyle(styles: NodeTypeStyle, selected: boolean): CSSProperties { + const key = `${styles.bg}-${styles.border}-${selected}`; + let cached = containerStyleCache.get(key); + if (!cached) { + cached = { + backgroundColor: styles.bg, + border: `2px solid ${styles.border}`, + borderRadius: "10px", + padding: "10px 14px", + minWidth: "150px", + maxWidth: "220px", + boxShadow: selected + ? `0 0 0 2px #3b82f6, 0 1px 3px rgba(0,0,0,0.1)` + : "0 1px 3px rgba(0,0,0,0.1)", + transition: "box-shadow 0.15s ease", + fontFamily: "system-ui, -apple-system, sans-serif", + }; + containerStyleCache.set(key, cached); + } + return cached; +} + +function getIconStyle(iconBg: string): CSSProperties { + let cached = iconStyleCache.get(iconBg); + if (!cached) { + cached = { + display: "flex", + alignItems: "center", + justifyContent: "center", + width: "28px", + height: "28px", + borderRadius: "6px", + backgroundColor: iconBg, + color: "#ffffff", + fontSize: "12px", + fontWeight: 700, + flexShrink: 0, + }; + iconStyleCache.set(iconBg, cached); + } + return cached; +} + +function getHandleStyle(border: string): CSSProperties { + let cached = handleStyleCache.get(border); + if (!cached) { + cached = { + width: "8px", + height: "8px", + backgroundColor: border, + border: "none", + }; + handleStyleCache.set(border, cached); + } + return cached; +} + +// Base node component - memoized to prevent re-renders when parent updates +const BaseNode = memo(({ + data, + selected = false, + type, +}: NodeProps & { data: PipelineNodeData; type: string }) => { + const styles = nodeTypeStyles[type] ?? defaultStyle; + + return ( +
+ + +
+ + {styles.icon} + +
+ + {type} + + + {data.label} + +
+
+ + +
+ ); +}); + +// Factory function to create node type components +function createNodeComponent(type: string) { + return memo((props: NodeProps & { data: PipelineNodeData }) => ( + + )); +} + +// Individual node type components - created via factory +export const SourceNode = createNodeComponent("source"); +export const FileNode = createNodeComponent("file"); +export const RouteNode = createNodeComponent("route"); +export const ArtifactNode = createNodeComponent("artifact"); +export const OutputNode = createNodeComponent("output"); diff --git a/packages/pipelines/pipeline-ui/src/components/graph/pipeline-graph.tsx b/packages/pipelines/pipeline-ui/src/components/graph/pipeline-graph.tsx new file mode 100644 index 000000000..104db1581 --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/components/graph/pipeline-graph.tsx @@ -0,0 +1,204 @@ +import type { PipelineFlowNode } from "#lib/adapter"; +import type { + PipelineGraphNode, + PipelineGraphNodeType, + PipelineGraph as PipelineGraphType, +} from "@ucdjs/pipelines-core"; +import type { NodeMouseHandler, OnNodesChange } from "@xyflow/react"; +import type { CSSProperties } from "react"; +import { filterNodesByType, pipelineGraphToFlow } from "#lib/adapter"; +import { getNodeColor } from "#lib/colors"; +import { applyLayout } from "#lib/layout"; +import { + applyNodeChanges, + Background, + Controls, + MiniMap, + ReactFlow, +} from "@xyflow/react"; +import { memo, useCallback, useMemo, useRef, useState } from "react"; +import { PipelineGraphDetails } from "./details"; +import { PipelineGraphFilters } from "./filters"; +import { nodeTypes } from "./node-types"; + +import "@xyflow/react/dist/style.css"; + +const defaultVisibleTypes: Set = new Set([ + "source", + "file", + "route", + "artifact", + "output", +]); + +const containerStyle: CSSProperties = { + display: "flex", + height: "100%", + width: "100%", +}; + +const graphContainerStyle: CSSProperties = { + flex: 1, + display: "flex", + flexDirection: "column", + position: "relative", +}; + +const filtersContainerStyle: CSSProperties = { + position: "absolute", + top: "12px", + left: "12px", + zIndex: 10, +}; + +const fitViewOptions = { padding: 0.2 } as const; +const proOptions = { hideAttribution: true } as const; +const minimapMaskColor = "rgba(0, 0, 0, 0.1)"; + +export interface PipelineGraphProps { + graph: PipelineGraphType; + onNodeSelect?: (node: PipelineGraphNode | null) => void; + showFilters?: boolean; + showDetails?: boolean; + showMinimap?: boolean; + className?: string; +} + +export const PipelineGraph = memo(({ + graph, + onNodeSelect, + showFilters = true, + showDetails = true, + showMinimap = true, + className, +}: PipelineGraphProps) => { + const { allNodes, allEdges } = useMemo(() => { + const { nodes, edges } = pipelineGraphToFlow(graph); + return { allNodes: nodes, allEdges: edges }; + }, [graph]); + + const [visibleTypes, setVisibleTypes] = useState>( + () => new Set(defaultVisibleTypes), + ); + + const [selectedNode, setSelectedNode] = useState(null); + + const { layoutedNodes, layoutedEdges } = useMemo(() => { + const { nodes: filteredNodes, edges: filteredEdges } = filterNodesByType( + allNodes, + allEdges, + visibleTypes, + ); + const positioned = applyLayout(filteredNodes, filteredEdges); + return { layoutedNodes: positioned, layoutedEdges: filteredEdges }; + }, [allNodes, allEdges, visibleTypes]); + + const [nodes, setNodes] = useState([]); + + const layoutKeyRef = useRef(""); + + const currentLayoutKey = layoutedNodes.map((n) => n.id).join(","); + if (currentLayoutKey !== layoutKeyRef.current) { + layoutKeyRef.current = currentLayoutKey; + setNodes(layoutedNodes); + } + + const onNodesChange: OnNodesChange = useCallback((changes) => { + setNodes((nds) => applyNodeChanges(changes, nds)); + }, []); + + const isDraggingRef = useRef(false); + + const handleToggleType = useCallback((type: PipelineGraphNodeType) => { + setVisibleTypes((prev) => { + const next = new Set(prev); + if (next.has(type)) { + if (next.size > 1) { + next.delete(type); + } + } else { + next.add(type); + } + return next; + }); + }, []); + + const handleNodeDragStart = useCallback(() => { + isDraggingRef.current = true; + }, []); + + const handleNodeDragStop = useCallback(() => { + setTimeout(() => { + isDraggingRef.current = false; + }, 0); + }, []); + + const handleNodeClick: NodeMouseHandler = useCallback( + (_event, node) => { + if (isDraggingRef.current) { + return; + } + const pipelineNode = node.data?.pipelineNode ?? null; + setSelectedNode(pipelineNode); + onNodeSelect?.(pipelineNode); + }, + [onNodeSelect], + ); + + const handlePaneClick = useCallback(() => { + setSelectedNode(null); + onNodeSelect?.(null); + }, [onNodeSelect]); + + const handleCloseDetails = useCallback(() => { + setSelectedNode(null); + onNodeSelect?.(null); + }, [onNodeSelect]); + + return ( +
+
+ {showFilters && ( +
+ +
+ )} + + + + + {showMinimap && ( + + )} + +
+ + {showDetails && selectedNode && ( + + )} +
+ ); +}); diff --git a/packages/pipelines/pipeline-ui/src/components/pipeline-sidebar.tsx b/packages/pipelines/pipeline-ui/src/components/pipeline-sidebar.tsx new file mode 100644 index 000000000..4917cd4ae --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/components/pipeline-sidebar.tsx @@ -0,0 +1,164 @@ +import { cn } from "#lib/utils"; +import { Link, useParams } from "@tanstack/react-router"; +import { + Sidebar, + SidebarContent, + SidebarFooter, + SidebarGroup, + SidebarGroupLabel, + SidebarHeader, + SidebarMenu, + SidebarMenuButton, + SidebarMenuItem, + SidebarMenuSub, + SidebarMenuSubButton, + SidebarMenuSubItem, +} from "@ucdjs-internal/shared-ui/ui/sidebar"; +import { usePipelines } from "@ucdjs/pipelines-ui"; +import { BookOpen, ExternalLink, Folder, FolderOpen } from "lucide-react"; +import { useMemo, useState } from "react"; + +export function PipelineSidebar() { + const { data, loading } = usePipelines(); + const params = useParams({ strict: false }) as { id?: string; file?: string }; + const currentPipelineId = params.id; + const currentFileSlug = params.file; + const [openFiles, setOpenFiles] = useState>({}); + + const files = useMemo(() => { + return data?.files ?? []; + }, [data?.files]); + + const toggleFile = (fileId: string) => { + setOpenFiles((prev) => ({ + ...prev, + [fileId]: !prev[fileId], + })); + }; + + return ( + + +
+

+ UCD Pipelines +

+

Pipeline files

+
+
+ + + + + {loading + ? ( +
+ Loading... +
+ ) + : ( + files.map((file) => { + const isFileActive = currentFileSlug === file.fileId; + const isOpen = openFiles[file.fileId] ?? isFileActive; + const fileName = file.filePath.split("/").pop() ?? file.filePath; + + return ( + + { + event.preventDefault(); + toggleFile(file.fileId); + }} + render={( + + {isOpen ? : } + {fileName} + {file.pipelines.length} + + )} + /> + {isOpen && ( + + {file.pipelines.map((pipeline) => { + const isActive = currentPipelineId === pipeline.id && currentFileSlug === file.fileId; + + return ( + + +
+ + {pipeline.name || pipeline.id} + + + )} + /> + + ); + })} + + )} + + ); + }) + )} + + + + + + + Documentation + + + + + Getting Started + + )} + /> + + + + + API Reference + + )} + /> + + + + {(data?.errors?.length || 0) > 0 && ( +
+

+ {data?.errors.length} + {" "} + error + {data?.errors.length !== 1 ? "s" : ""} +

+
+ )} +
+ + ); +} diff --git a/packages/pipelines/pipeline-ui/src/hooks/index.ts b/packages/pipelines/pipeline-ui/src/hooks/index.ts new file mode 100644 index 000000000..dc70ff769 --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/hooks/index.ts @@ -0,0 +1,30 @@ +export { + useExecute, + type UseExecuteOptions, + type UseExecuteReturn, +} from "./use-execute"; +export { + type EventViewActions, + type EventViewState, + useEventView, +} from "./use-event-view"; +export { + usePipeline, + type UsePipelineOptions, + type UsePipelineReturn, +} from "./use-pipeline"; +export { + usePipelineFile, + type PipelineFileResponse, + type UsePipelineFileOptions, + type UsePipelineFileReturn, +} from "./use-pipeline-file"; +export { + usePipelineVersions, + type UsePipelineVersionsReturn, +} from "./use-pipeline-versions"; +export { + usePipelines, + type UsePipelinesOptions, + type UsePipelinesReturn, +} from "./use-pipelines"; diff --git a/packages/pipelines/pipeline-ui/src/hooks/use-event-view.ts b/packages/pipelines/pipeline-ui/src/hooks/use-event-view.ts new file mode 100644 index 000000000..8a037d8e0 --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/hooks/use-event-view.ts @@ -0,0 +1,90 @@ +import type { PipelineEvent } from "@ucdjs/pipelines-core"; +import { useCallback, useState } from "react"; + +export interface EventViewState { + isJsonMode: boolean; + selectedEventId: string | null; + isDetailPanelOpen: boolean; + expandedInlineIds: Set; +} + +export interface EventViewActions { + toggleJsonMode: () => void; + setJsonMode: (value: boolean) => void; + selectEvent: (eventId: string | null) => void; + openDetailPanel: (eventId: string) => void; + closeDetailPanel: () => void; + toggleInlineExpansion: (eventId: string) => void; + isInlineExpanded: (eventId: string) => boolean; +} + +export function useEventView(): EventViewState & EventViewActions { + const [isJsonMode, setIsJsonMode] = useState(() => { + if (typeof window !== "undefined") { + const stored = localStorage.getItem("pipeline-events-json-mode"); + return stored === "true"; + } + return false; + }); + + const [selectedEventId, setSelectedEventId] = useState(null); + const [isDetailPanelOpen, setIsDetailPanelOpen] = useState(false); + const [expandedInlineIds, setExpandedInlineIds] = useState>(() => new Set()); + + const toggleJsonMode = useCallback(() => { + setIsJsonMode((prev) => { + const next = !prev; + localStorage.setItem("pipeline-events-json-mode", String(next)); + return next; + }); + }, []); + + const setJsonMode = useCallback((value: boolean) => { + setIsJsonMode(value); + localStorage.setItem("pipeline-events-json-mode", String(value)); + }, []); + + const selectEvent = useCallback((eventId: string | null) => { + setSelectedEventId(eventId); + }, []); + + const openDetailPanel = useCallback((eventId: string) => { + setSelectedEventId(eventId); + setIsDetailPanelOpen(true); + }, []); + + const closeDetailPanel = useCallback(() => { + setIsDetailPanelOpen(false); + setSelectedEventId(null); + }, []); + + const toggleInlineExpansion = useCallback((eventId: string) => { + setExpandedInlineIds((prev) => { + const next = new Set(prev); + if (next.has(eventId)) { + next.delete(eventId); + } else { + next.add(eventId); + } + return next; + }); + }, []); + + const isInlineExpanded = useCallback((eventId: string) => { + return expandedInlineIds.has(eventId); + }, [expandedInlineIds]); + + return { + isJsonMode, + selectedEventId, + isDetailPanelOpen, + expandedInlineIds, + toggleJsonMode, + setJsonMode, + selectEvent, + openDetailPanel, + closeDetailPanel, + toggleInlineExpansion, + isInlineExpanded, + }; +} diff --git a/packages/pipelines/pipeline-ui/src/hooks/use-execute.ts b/packages/pipelines/pipeline-ui/src/hooks/use-execute.ts new file mode 100644 index 000000000..c994cbc06 --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/hooks/use-execute.ts @@ -0,0 +1,100 @@ +import type { ExecuteResult } from "../types"; +import { useCallback, useState } from "react"; + +export interface UseExecuteOptions { + /** + * Base URL for the API (default: "") + */ + baseUrl?: string; +} + +export interface UseExecuteReturn { + execute: (fileId: string, pipelineId: string, versions: string[]) => Promise; + executing: boolean; + result: ExecuteResult | null; + error: string | null; + executionId: string | null; + reset: () => void; +} + +interface ExecuteApiResponse { + success: boolean; + executionId?: string; + error?: string; +} + +export function useExecute(options: UseExecuteOptions = {}): UseExecuteReturn { + const { baseUrl = "" } = options; + + const [executing, setExecuting] = useState(false); + const [result, setResult] = useState(null); + const [error, setError] = useState(null); + const [executionId, setExecutionId] = useState(null); + + const execute = useCallback( + async (fileId: string, pipelineId: string, versions: string[]): Promise => { + setExecuting(true); + setError(null); + setResult(null); + setExecutionId(null); + + try { + const res = await fetch(`${baseUrl}/api/pipelines/${fileId}/${pipelineId}/execute`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ versions }), + }); + const data: ExecuteApiResponse = await res.json(); + + // Handle the new API response format + if (data.success && data.executionId) { + setExecutionId(data.executionId); + const successResult: ExecuteResult = { + success: true, + pipelineId, + executionId: data.executionId, + }; + setResult(successResult); + return successResult; + } else { + const errorResult: ExecuteResult = { + success: false, + pipelineId, + executionId: data.executionId, + error: data.error ?? "Execution failed", + }; + setResult(errorResult); + setError(errorResult.error ?? "Execution failed"); + return errorResult; + } + } catch (err) { + const errorResult: ExecuteResult = { + success: false, + pipelineId, + error: err instanceof Error ? err.message : String(err), + }; + setResult(errorResult); + setError(errorResult.error ?? "Execution failed"); + return errorResult; + } finally { + setExecuting(false); + } + }, + [baseUrl], + ); + + const reset = useCallback(() => { + setResult(null); + setError(null); + setExecutionId(null); + }, []); + + return { + execute, + executing, + result, + error, + executionId, + reset, + }; +} diff --git a/packages/pipelines/pipeline-ui/src/hooks/use-pipeline-file.ts b/packages/pipelines/pipeline-ui/src/hooks/use-pipeline-file.ts new file mode 100644 index 000000000..48ee36eb5 --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/hooks/use-pipeline-file.ts @@ -0,0 +1,66 @@ +import type { PipelineFileInfo } from "../types"; +import { useCallback, useEffect, useState } from "react"; + +export interface PipelineFileResponse { + file?: PipelineFileInfo; + error?: string; +} + +export interface UsePipelineFileOptions { + baseUrl?: string; + fetchOnMount?: boolean; +} + +export interface UsePipelineFileReturn { + file: PipelineFileInfo | null; + loading: boolean; + error: string | null; + refetch: () => void; +} + +export function usePipelineFile( + fileId: string, + options: UsePipelineFileOptions = {}, +): UsePipelineFileReturn { + const { baseUrl = "", fetchOnMount = true } = options; + + const [file, setFile] = useState(null); + const [loading, setLoading] = useState(fetchOnMount); + const [error, setError] = useState(null); + + const fetchFile = useCallback(async () => { + setLoading(true); + setError(null); + try { + const res = await fetch(`${baseUrl}/api/pipelines/${fileId}`); + if (!res.ok) { + throw new Error(`HTTP ${res.status}`); + } + const json: PipelineFileResponse = await res.json(); + if (json.error) { + setError(json.error); + setFile(null); + } else { + setFile(json.file ?? null); + } + } catch (err) { + setError(err instanceof Error ? err.message : "Failed to load pipeline file"); + setFile(null); + } finally { + setLoading(false); + } + }, [baseUrl, fileId]); + + useEffect(() => { + if (fetchOnMount) { + fetchFile(); + } + }, [fetchOnMount, fetchFile]); + + return { + file, + loading, + error, + refetch: fetchFile, + }; +} diff --git a/packages/pipelines/pipeline-ui/src/hooks/use-pipeline-versions.ts b/packages/pipelines/pipeline-ui/src/hooks/use-pipeline-versions.ts new file mode 100644 index 000000000..c4e24c28b --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/hooks/use-pipeline-versions.ts @@ -0,0 +1,117 @@ +import { useCallback, useMemo, useState } from "react"; + +const STORAGE_KEY_PREFIX = "ucd-versions-"; + +export interface UsePipelineVersionsReturn { + selectedVersions: Set; + toggleVersion: (version: string) => void; + selectAll: (versions: string[]) => void; + deselectAll: () => void; +} + +function getStorageKey(storageKey: string): string { + return `${STORAGE_KEY_PREFIX}${storageKey}`; +} + +function loadVersionsFromStorage(storageKey: string, allVersions: string[]): Set { + if (typeof window === "undefined") { + return new Set(allVersions); + } + + try { + const stored = localStorage.getItem(getStorageKey(storageKey)); + if (stored) { + const parsed = JSON.parse(stored) as string[]; + // Filter to only valid versions + const validVersions = parsed.filter((v) => allVersions.includes(v)); + if (validVersions.length > 0) { + return new Set(validVersions); + } + } + } catch { + // Fall through to default + } + + return new Set(allVersions); +} + +function saveVersionsToStorage(storageKey: string, versions: Set): void { + if (typeof window === "undefined") return; + + try { + localStorage.setItem(getStorageKey(storageKey), JSON.stringify(Array.from(versions))); + } catch { + // Ignore storage errors + } +} + +function sanitizeVersions(versions: Iterable, allVersions: string[]): Set { + const valid = Array.from(versions).filter((v) => allVersions.includes(v)); + return new Set(valid.length > 0 ? valid : allVersions); +} + +export function usePipelineVersions( + pipelineId: string, + allVersions: string[], + storageKeyOverride?: string, +): UsePipelineVersionsReturn { + const [overridesByPipeline, setOverridesByPipeline] = useState>({}); + const storageKey = storageKeyOverride ?? pipelineId; + + const baseSelection = useMemo( + () => loadVersionsFromStorage(storageKey, allVersions), + [storageKey, allVersions], + ); + + const selectedVersions = useMemo(() => { + const override = overridesByPipeline[pipelineId]; + const source = override ? new Set(override) : baseSelection; + return sanitizeVersions(source, allVersions); + }, [allVersions, baseSelection, overridesByPipeline, pipelineId]); + + const toggleVersion = useCallback((version: string) => { + setOverridesByPipeline((prev) => { + const current = prev[pipelineId] ? new Set(prev[pipelineId]) : selectedVersions; + const next = new Set(current); + if (next.has(version)) { + next.delete(version); + } else { + next.add(version); + } + const sanitized = sanitizeVersions(next, allVersions); + saveVersionsToStorage(storageKey, sanitized); + return { + ...prev, + [pipelineId]: Array.from(sanitized), + }; + }); + }, [allVersions, pipelineId, selectedVersions, storageKey]); + + const selectAll = useCallback( + (versions: string[]) => { + const sanitized = sanitizeVersions(versions, allVersions); + saveVersionsToStorage(storageKey, sanitized); + setOverridesByPipeline((prev) => ({ + ...prev, + [pipelineId]: Array.from(sanitized), + })); + }, + [allVersions, pipelineId, storageKey], + ); + + const deselectAll = useCallback(() => { + const sanitized = sanitizeVersions([], allVersions); + saveVersionsToStorage(storageKey, sanitized); + setOverridesByPipeline((prev) => ({ + ...prev, + [pipelineId]: Array.from(sanitized), + })); + }, [allVersions, pipelineId, storageKey]); + + return { + selectedVersions, + toggleVersion, + selectAll, + deselectAll, + }; +} diff --git a/packages/pipelines/pipeline-ui/src/hooks/use-pipeline.ts b/packages/pipelines/pipeline-ui/src/hooks/use-pipeline.ts new file mode 100644 index 000000000..2c7a7faa8 --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/hooks/use-pipeline.ts @@ -0,0 +1,67 @@ +import type { PipelineDetails, PipelineResponse } from "../types"; +import { useCallback, useEffect, useState } from "react"; + +export interface UsePipelineOptions { + /** Base URL for the API (default: "") */ + baseUrl?: string; + /** Whether to fetch on mount (default: true) */ + fetchOnMount?: boolean; +} + +export interface UsePipelineReturn { + pipeline: PipelineDetails | null; + loading: boolean; + error: string | null; + refetch: () => void; +} + +/** + * Hook to fetch and manage a single pipeline by ID + */ +export function usePipeline( + fileId: string, + pipelineId: string, + options: UsePipelineOptions = {}, +): UsePipelineReturn { + const { baseUrl = "", fetchOnMount = true } = options; + + const [pipeline, setPipeline] = useState(null); + const [loading, setLoading] = useState(fetchOnMount); + const [error, setError] = useState(null); + + const fetchPipeline = useCallback(async () => { + setLoading(true); + setError(null); + try { + const res = await fetch(`${baseUrl}/api/pipelines/${fileId}/${pipelineId}`); + if (!res.ok) { + throw new Error(`HTTP ${res.status}`); + } + const json: PipelineResponse = await res.json(); + if (json.error) { + setError(json.error); + setPipeline(null); + } else { + setPipeline(json.pipeline ?? null); + } + } catch (err) { + setError(err instanceof Error ? err.message : "Failed to load pipeline"); + setPipeline(null); + } finally { + setLoading(false); + } + }, [baseUrl, fileId, pipelineId]); + + useEffect(() => { + if (fetchOnMount) { + fetchPipeline(); + } + }, [fetchOnMount, fetchPipeline]); + + return { + pipeline, + loading, + error, + refetch: fetchPipeline, + }; +} diff --git a/packages/pipelines/pipeline-ui/src/hooks/use-pipelines.ts b/packages/pipelines/pipeline-ui/src/hooks/use-pipelines.ts new file mode 100644 index 000000000..0ada8c9c0 --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/hooks/use-pipelines.ts @@ -0,0 +1,67 @@ +import type { PipelinesResponse } from "../types"; +import { useCallback, useEffect, useState } from "react"; + +export interface UsePipelinesOptions { + /** Base URL for the API (default: "") */ + baseUrl?: string; + /** Optional search query */ + search?: string; + /** Whether to fetch on mount (default: true) */ + fetchOnMount?: boolean; +} + +export interface UsePipelinesReturn { + data: PipelinesResponse | null; + loading: boolean; + error: string | null; + refetch: () => void; +} + +/** + * Hook to fetch and manage the list of all pipelines + */ +export function usePipelines(options: UsePipelinesOptions = {}): UsePipelinesReturn { + const { baseUrl = "", search, fetchOnMount = true } = options; + + const [data, setData] = useState(null); + const [loading, setLoading] = useState(fetchOnMount); + const [error, setError] = useState(null); + + const fetchPipelines = useCallback(async () => { + setLoading(true); + setError(null); + try { + const params = new URLSearchParams(); + if (search && search.trim()) { + params.set("search", search.trim()); + } + const queryString = params.toString(); + const url = queryString + ? `${baseUrl}/api/pipelines?${queryString}` + : `${baseUrl}/api/pipelines`; + const res = await fetch(url); + if (!res.ok) { + throw new Error(`HTTP ${res.status}`); + } + const json = await res.json(); + setData(json); + } catch (err) { + setError(err instanceof Error ? err.message : "Failed to load pipelines"); + } finally { + setLoading(false); + } + }, [baseUrl, search]); + + useEffect(() => { + if (fetchOnMount) { + fetchPipelines(); + } + }, [fetchOnMount, fetchPipelines]); + + return { + data, + loading, + error, + refetch: fetchPipelines, + }; +} diff --git a/packages/pipelines/pipeline-ui/src/index.ts b/packages/pipelines/pipeline-ui/src/index.ts new file mode 100644 index 000000000..99ae16ca2 --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/index.ts @@ -0,0 +1,83 @@ +export { + ExecutionErrors, + type ExecutionErrorsProps, + ExecutionResult, + type ExecutionResultProps, + ExecutionSummary, + type ExecutionSummaryProps, +} from "./components/detail/execution-result"; +export { + RouteList, + type RouteListProps, +} from "./components/detail/route-list"; +export { + SourceList, + type SourceListProps, +} from "./components/detail/source-list"; +export { + VersionSelector, + type VersionSelectorProps, +} from "./components/detail/version-selector"; +export { + EventDetailPanel, + type EventDetailPanelProps, +} from "./components/events/event-detail-panel"; +export { + InlineJsonView, + type InlineJsonViewProps, +} from "./components/events/inline-json-view"; +export { + SimpleTimeline, + type SimpleTimelineProps, +} from "./components/events/simple-timeline"; +export { + ViewModeToggle, + type ViewModeToggleProps, +} from "./components/events/view-mode-toggle"; +export { + PipelineGraphDetails, + type PipelineGraphDetailsProps, +} from "./components/graph/details"; +export { + PipelineGraphFilters, + type PipelineGraphFiltersProps, +} from "./components/graph/filters"; +export { nodeTypes } from "./components/graph/node-types"; +export { + ArtifactNode, + FileNode, + OutputNode, + type PipelineNodeData, + RouteNode, + SourceNode, +} from "./components/graph/nodes"; +export { + PipelineGraph, + type PipelineGraphProps, +} from "./components/graph/pipeline-graph"; +export { PipelineSidebar } from "./components/pipeline-sidebar"; +export * from "./hooks"; +export { + filterNodesByType, + type PipelineFlowEdge, + type PipelineFlowNode, + pipelineGraphToFlow, +} from "./lib/adapter"; +export { getNodeColor, nodeTypeColors } from "./lib/colors"; +export { formatHighPrecisionTime } from "./lib/format-time"; +export { applyLayout, NODE_HEIGHT, NODE_WIDTH } from "./lib/layout"; +export { + toPipelineDetails, + toPipelineInfo, + toRouteDetails, +} from "./lib/pipeline-utils"; +export { cn } from "./lib/utils"; +export type { + ExecuteResult, + LoadError, + PipelineDetails, + PipelineFileInfo, + PipelineInfo, + PipelineResponse, + PipelinesResponse, +} from "./types"; diff --git a/packages/pipelines/pipeline-ui/src/lib/adapter.ts b/packages/pipelines/pipeline-ui/src/lib/adapter.ts new file mode 100644 index 000000000..cdac1bef0 --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/lib/adapter.ts @@ -0,0 +1,101 @@ +import type { + PipelineGraph, + PipelineGraphEdge, + PipelineGraphNode, + PipelineGraphNodeType, +} from "@ucdjs/pipelines-core"; + +import type { Edge, Node } from "@xyflow/react"; + +export interface PipelineFlowNode extends Node { + data: { + pipelineNode: PipelineGraphNode; + label: string; + }; + + type: PipelineGraphNodeType; +} + +export interface PipelineFlowEdge extends Edge { + data: { + pipelineEdge: PipelineGraphEdge; + }; +} + +function getNodeLabel(node: PipelineGraphNode): string { + switch (node.type) { + case "source": + return `v${node.version}`; + case "file": + return node.file.name; + case "route": + return node.routeId; + case "artifact": + return node.artifactId; + case "output": + return node.property + ? `Output[${node.outputIndex}].${node.property}` + : `Output[${node.outputIndex}]`; + } +} + +function getEdgeStyle(edgeType: PipelineGraphEdge["type"]): Pick { + const baseStyle = { strokeWidth: 2 }; + + switch (edgeType) { + case "provides": + return { style: { ...baseStyle, stroke: "#6366f1" } }; + case "matched": + return { style: { ...baseStyle, stroke: "#22c55e" } }; + case "parsed": + return { style: { ...baseStyle, stroke: "#f59e0b" } }; + case "resolved": + return { style: { ...baseStyle, stroke: "#3b82f6" } }; + case "uses-artifact": + return { style: { ...baseStyle, stroke: "#8b5cf6" }, animated: true }; + default: + return { style: baseStyle }; + } +} + +export function pipelineGraphToFlow( + graph: PipelineGraph, +): { nodes: PipelineFlowNode[]; edges: PipelineFlowEdge[] } { + const nodes: PipelineFlowNode[] = graph.nodes.map((node) => ({ + id: node.id, + type: node.type, + position: { x: 0, y: 0 }, + data: { + pipelineNode: node, + label: getNodeLabel(node), + }, + })); + + const edges: PipelineFlowEdge[] = graph.edges.map((edge, index) => ({ + id: `edge-${index}-${edge.from}-${edge.to}`, + source: edge.from, + target: edge.to, + label: edge.type, + ...getEdgeStyle(edge.type), + data: { + pipelineEdge: edge, + }, + })); + + return { nodes, edges }; +} + +export function filterNodesByType( + nodes: PipelineFlowNode[], + edges: PipelineFlowEdge[], + visibleTypes: Set, +): { nodes: PipelineFlowNode[]; edges: PipelineFlowEdge[] } { + const filteredNodes = nodes.filter((node) => visibleTypes.has(node.type as PipelineGraphNodeType)); + const filteredNodeIds = new Set(filteredNodes.map((n) => n.id)); + + const filteredEdges = edges.filter( + (edge) => filteredNodeIds.has(edge.source) && filteredNodeIds.has(edge.target), + ); + + return { nodes: filteredNodes, edges: filteredEdges }; +} diff --git a/packages/pipelines/pipeline-ui/src/lib/colors.ts b/packages/pipelines/pipeline-ui/src/lib/colors.ts new file mode 100644 index 000000000..662ae07b1 --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/lib/colors.ts @@ -0,0 +1,13 @@ +export const nodeTypeColors: Record = { + source: "#6366f1", + file: "#10b981", + route: "#f59e0b", + artifact: "#8b5cf6", + output: "#0ea5e9", + default: "#6b7280", +}; + +export function getNodeColor(node: { type?: string }): string { + const color = nodeTypeColors[node.type ?? ""]; + return color ?? nodeTypeColors.default ?? "#6b7280"; +} diff --git a/packages/pipelines/pipeline-ui/src/lib/format-time.ts b/packages/pipelines/pipeline-ui/src/lib/format-time.ts new file mode 100644 index 000000000..f0a7f6e76 --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/lib/format-time.ts @@ -0,0 +1,5 @@ +export function formatHighPrecisionTime(ms: number): string { + const seconds = Math.floor(ms / 1000); + const fractionalMs = ms % 1000; + return `${seconds}.${fractionalMs.toFixed(3).padStart(6, "0")}s`; +} diff --git a/packages/pipelines/pipeline-ui/src/lib/index.ts b/packages/pipelines/pipeline-ui/src/lib/index.ts new file mode 100644 index 000000000..59a989478 --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/lib/index.ts @@ -0,0 +1,5 @@ +export { filterNodesByType, pipelineGraphToFlow } from "./adapter"; +export type { PipelineFlowEdge, PipelineFlowNode } from "./adapter"; +export { getNodeColor, nodeTypeColors } from "./colors"; +export { applyLayout, NODE_HEIGHT, NODE_WIDTH } from "./layout"; +export { cn } from "./utils"; diff --git a/packages/pipelines/pipeline-ui/src/lib/layout.ts b/packages/pipelines/pipeline-ui/src/lib/layout.ts new file mode 100644 index 000000000..676f4b206 --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/lib/layout.ts @@ -0,0 +1,102 @@ +import type { PipelineFlowEdge, PipelineFlowNode } from "./adapter"; + +const NODE_WIDTH = 180; + +const NODE_HEIGHT = 60; + +const HORIZONTAL_GAP = 80; + +const VERTICAL_GAP = 40; + +export function applyLayout( + nodes: PipelineFlowNode[], + edges: PipelineFlowEdge[], +): PipelineFlowNode[] { + if (nodes.length === 0) { + return nodes; + } + + const incomingEdges = new Map>(); + const outgoingEdges = new Map>(); + + for (const node of nodes) { + incomingEdges.set(node.id, new Set()); + outgoingEdges.set(node.id, new Set()); + } + + const nodeIds = new Set(nodes.map((n) => n.id)); + + for (const edge of edges) { + if (nodeIds.has(edge.source) && nodeIds.has(edge.target)) { + incomingEdges.get(edge.target)?.add(edge.source); + outgoingEdges.get(edge.source)?.add(edge.target); + } + } + + const layers = new Map(); + const rootNodes = nodes.filter((n) => incomingEdges.get(n.id)?.size === 0); + + const firstNode = nodes[0]; + const queue: Array<{ id: string; layer: number }> = rootNodes.length > 0 + ? rootNodes.map((n) => ({ id: n.id, layer: 0 })) + : firstNode ? [{ id: firstNode.id, layer: 0 }] : []; + + while (queue.length > 0) { + const item = queue.shift(); + if (!item) { + continue; + } + const { id, layer } = item; + + if (!layers.has(id) || layers.get(id)! < layer) { + layers.set(id, layer); + + for (const childId of outgoingEdges.get(id) || []) { + queue.push({ id: childId, layer: layer + 1 }); + } + } + } + + for (const node of nodes) { + if (!layers.has(node.id)) { + layers.set(node.id, 0); + } + } + + const layerGroups = new Map(); + for (const node of nodes) { + const layer = layers.get(node.id) ?? 0; + if (!layerGroups.has(layer)) { + layerGroups.set(layer, []); + } + layerGroups.get(layer)!.push(node); + } + + const sortedLayers = Array.from(layerGroups.entries()).sort((a, b) => a[0] - b[0]); + + const positionedNodes = new Map(); + + for (const [layerIndex, layerNodes] of sortedLayers) { + const x = layerIndex * (NODE_WIDTH + HORIZONTAL_GAP); + const layerHeight = layerNodes.length * (NODE_HEIGHT + VERTICAL_GAP) - VERTICAL_GAP; + const startY = -layerHeight / 2; + + for (let i = 0; i < layerNodes.length; i++) { + const node = layerNodes[i]; + if (!node) { + continue; + } + const y = startY + i * (NODE_HEIGHT + VERTICAL_GAP); + + const positionedNode: PipelineFlowNode = { + ...node, + position: { x, y }, + }; + positionedNodes.set(node.id, positionedNode); + } + } + + return nodes.map((n) => positionedNodes.get(n.id) ?? n); +} + +export { NODE_HEIGHT, NODE_WIDTH }; diff --git a/packages/pipelines/pipeline-ui/src/lib/pipeline-utils.ts b/packages/pipelines/pipeline-ui/src/lib/pipeline-utils.ts new file mode 100644 index 000000000..d7e3d99df --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/lib/pipeline-utils.ts @@ -0,0 +1,53 @@ +import type { PipelineDefinition, PipelineRouteDefinition } from "@ucdjs/pipelines-core"; +import type { PipelineDetails, PipelineInfo } from "../types"; +import { parseDependency } from "@ucdjs/pipelines-core"; + +export function toPipelineInfo(pipeline: PipelineDefinition): PipelineInfo { + return { + id: pipeline.id, + name: pipeline.name, + description: pipeline.description, + tags: pipeline.tags, + versions: pipeline.versions, + routeCount: pipeline.routes.length, + sourceCount: pipeline.inputs.length, + sourceId: pipeline.inputs[0]?.id ?? "local", + }; +} + +export function toPipelineDetails(pipeline: PipelineDefinition): PipelineDetails { + return { + ...toPipelineInfo(pipeline), + routes: pipeline.routes.map((route) => toRouteDetails(route)), + sources: pipeline.inputs.map((source) => ({ id: source.id })), + }; +} + +export function toRouteDetails( + route: PipelineRouteDefinition, +): PipelineDetails["routes"][number] { + const depends = (route.depends ?? []).map((dep) => parseDependency(dep)); + const emits = Object.entries(route.emits ?? {}).map(([id, def]) => { + const scope = def.scope === "global" ? "global" : "version"; + return { id, scope } as const; + }); + + const outputs = route.out + ? [{ dir: route.out.dir, fileName: typeof route.out.fileName === "function" ? "[fn]" : route.out.fileName }] + : []; + + const transformList = (route.transforms ?? []) as { id?: string }[]; + const transforms = transformList.map((transform, index) => { + const id = transform.id; + return id ?? `transform-${index + 1}`; + }); + + return { + id: route.id, + cache: route.cache !== false, + depends, + emits, + outputs, + transforms, + }; +} diff --git a/apps/web/src/lib/utils.ts b/packages/pipelines/pipeline-ui/src/lib/utils.ts similarity index 98% rename from apps/web/src/lib/utils.ts rename to packages/pipelines/pipeline-ui/src/lib/utils.ts index 88283f013..8b2811290 100644 --- a/apps/web/src/lib/utils.ts +++ b/packages/pipelines/pipeline-ui/src/lib/utils.ts @@ -1,5 +1,7 @@ import type { ClassValue } from "clsx"; + import { clsx } from "clsx"; + import { twMerge } from "tailwind-merge"; export function cn(...inputs: ClassValue[]) { diff --git a/packages/pipelines/pipeline-ui/src/styles/globals.css b/packages/pipelines/pipeline-ui/src/styles/globals.css new file mode 100644 index 000000000..6ee1b199d --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/styles/globals.css @@ -0,0 +1,3 @@ +@import "@ucdjs-internal/shared-ui/styles.css"; + +@source "../components/**/*.{ts,js,mjs,jsx,tsx}"; diff --git a/packages/pipelines/pipeline-ui/src/types.ts b/packages/pipelines/pipeline-ui/src/types.ts new file mode 100644 index 000000000..f9465a315 --- /dev/null +++ b/packages/pipelines/pipeline-ui/src/types.ts @@ -0,0 +1,78 @@ +import type { PipelineEvent, PipelineGraph } from "@ucdjs/pipelines-core"; + +export interface PipelineInfo { + id: string; + name: string; + description?: string; + tags?: string[]; + versions: string[]; + routeCount: number; + sourceCount: number; + sourceId: string; +} + +export interface PipelineFileInfo { + fileId: string; + filePath: string; + sourceId: string; + pipelines: PipelineInfo[]; +} + +export interface PipelineDetails { + id: string; + name?: string; + description?: string; + versions: string[]; + routeCount: number; + sourceCount: number; + routes: Array<{ + id: string; + cache: boolean; + depends: Array< + | { type: "route"; routeId: string } + | { type: "artifact"; routeId: string; artifactName: string } + >; + emits: Array<{ id: string; scope: "version" | "global" }>; + outputs: Array<{ dir?: string; fileName?: string }>; + transforms: string[]; + }>; + sources: Array<{ id: string }>; +} + +export interface LoadError { + filePath: string; + message: string; + sourceId?: string; +} + +export interface PipelinesResponse { + files: PipelineFileInfo[]; + errors: LoadError[]; +} + +export interface PipelineResponse { + pipeline?: PipelineDetails; + error?: string; + fileId?: string; + filePath?: string; + sourceId?: string; +} + +export interface ExecuteResult { + success: boolean; + pipelineId: string; + executionId?: string; + summary?: { + versions: string[]; + totalFiles: number; + matchedFiles: number; + skippedFiles: number; + fallbackFiles: number; + totalOutputs: number; + durationMs: number; + }; + graph?: PipelineGraph; + events?: PipelineEvent[]; + errors?: Array<{ scope: string; message: string }>; + error?: string; +} diff --git a/packages/pipelines/pipeline-ui/tsconfig.build.json b/packages/pipelines/pipeline-ui/tsconfig.build.json new file mode 100644 index 000000000..36c889e0c --- /dev/null +++ b/packages/pipelines/pipeline-ui/tsconfig.build.json @@ -0,0 +1,5 @@ +{ + "extends": "./tsconfig.json", + "include": ["src"], + "exclude": ["dist", "test"] +} diff --git a/packages/pipelines/pipeline-ui/tsconfig.json b/packages/pipelines/pipeline-ui/tsconfig.json new file mode 100644 index 000000000..07edf31d8 --- /dev/null +++ b/packages/pipelines/pipeline-ui/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "@ucdjs-tooling/tsconfig/base", + "include": [ + "src", + "test", + "playgrounds" + ], + "exclude": ["dist"] +} diff --git a/packages/pipelines/pipeline-ui/tsdown.config.ts b/packages/pipelines/pipeline-ui/tsdown.config.ts new file mode 100644 index 000000000..82b6f4f59 --- /dev/null +++ b/packages/pipelines/pipeline-ui/tsdown.config.ts @@ -0,0 +1,44 @@ +import pluginBabel from "@rollup/plugin-babel"; +import { createTsdownConfig } from "@ucdjs-tooling/tsdown-config"; + +export default createTsdownConfig({ + entry: [ + "./src/index.ts", + "./src/lib/*.ts", + "./src/hooks/index.ts", + "./src/components/*.tsx", + ], + exports: { + customExports(exports) { + exports["./styles.css"] = "./dist/styles/globals.css"; + exports["./package.json"] = "./package.json"; + + return exports; + }, + packageJson: false, + }, + copy: [ + { from: "src/styles/globals.css", to: "dist/styles" }, + ], + format: "esm", + inputOptions: { + transform: { + jsx: "react-jsx", + }, + experimental: { + lazyBarrel: true, + }, + }, + unbundle: true, + plugins: [ + pluginBabel({ + babelHelpers: "bundled", + parserOpts: { + sourceType: "module", + plugins: ["jsx", "typescript"], + }, + plugins: ["babel-plugin-react-compiler"], + extensions: [".js", ".jsx", ".ts", ".tsx"], + }), + ], +}); diff --git a/packages/pipelines/pipeline-ui/turbo.json b/packages/pipelines/pipeline-ui/turbo.json new file mode 100644 index 000000000..6d43a828f --- /dev/null +++ b/packages/pipelines/pipeline-ui/turbo.json @@ -0,0 +1,15 @@ +{ + "$schema": "https://turbo.build/schema.json", + "extends": ["//"], + "tasks": { + "build": { + "outputs": ["dist/**"] + }, + "dev": { + "persistent": false + }, + "typecheck": { + "outputs": [".cache/tsbuildinfo.json"] + } + } +} diff --git a/packages/shared-ui/package.json b/packages/shared-ui/package.json index 3729aa8e8..508f3353c 100644 --- a/packages/shared-ui/package.json +++ b/packages/shared-ui/package.json @@ -36,6 +36,7 @@ "./ui/breadcrumb": "./dist/ui/breadcrumb.mjs", "./ui/button": "./dist/ui/button.mjs", "./ui/card": "./dist/ui/card.mjs", + "./ui/checkbox": "./dist/ui/checkbox.mjs", "./ui/collapsible": "./dist/ui/collapsible.mjs", "./ui/combobox": "./dist/ui/combobox.mjs", "./ui/command": "./dist/ui/command.mjs", @@ -46,11 +47,13 @@ "./ui/input": "./dist/ui/input.mjs", "./ui/input-group": "./dist/ui/input-group.mjs", "./ui/label": "./dist/ui/label.mjs", + "./ui/scroll-area": "./dist/ui/scroll-area.mjs", "./ui/select": "./dist/ui/select.mjs", "./ui/separator": "./dist/ui/separator.mjs", "./ui/sheet": "./dist/ui/sheet.mjs", "./ui/sidebar": "./dist/ui/sidebar.mjs", "./ui/skeleton": "./dist/ui/skeleton.mjs", + "./ui/table": "./dist/ui/table.mjs", "./ui/textarea": "./dist/ui/textarea.mjs", "./ui/tooltip": "./dist/ui/tooltip.mjs", "./styles.css": "./dist/styles/globals.css", @@ -58,8 +61,7 @@ }, "types": "./dist/index.d.mts", "files": [ - "dist", - "src/styles" + "dist" ], "engines": { "node": ">=22.18" diff --git a/packages/shared-ui/src/components/shiki-code.tsx b/packages/shared-ui/src/components/shiki-code.tsx index 340206589..d641208a1 100644 --- a/packages/shared-ui/src/components/shiki-code.tsx +++ b/packages/shared-ui/src/components/shiki-code.tsx @@ -1,5 +1,5 @@ -import type { BundledLanguage, BundledTheme } from "shiki"; -import { cache, memo, use } from "react"; +import type { BundledLanguage, BundledTheme, DecorationItem, ShikiTransformer } from "shiki"; +import { memo, use, useMemo } from "react"; import { createJavaScriptRegexEngine } from "shiki"; import { createHighlighterCore } from "shiki/core"; @@ -8,32 +8,67 @@ export interface ShikiCodeProps { * The code to highlight */ code: string; + /** * The language to use for syntax highlighting * @default "typescript" */ language?: BundledLanguage; + /** * The theme to use for syntax highlighting - * @default "github-dark" + * Used when `themes` is not provided. + * @default "github-dark-dimmed" */ theme?: BundledTheme; + + /** + * Multiple themes for light/dark mode. + * When provided, `theme` is ignored. + */ + themes?: { + light: BundledTheme; + dark: BundledTheme; + }; + + /** + * Default color behavior when using multiple themes. + * @default "light-dark()" + */ + defaultColor?: "light" | "dark" | "light-dark()" | false; + /** * Additional CSS class names */ className?: string; + + /** + * Additional CSS class names for the `
` element.
+   */
+  preClassName?: string;
+
+  /**
+   * Additional CSS class names for the `` element.
+   */
+  codeClassName?: string;
+
+  /**
+   * Shiki decorations for inline annotations/highlights.
+   */
+  decorations?: DecorationItem[];
 }
 
-const getHighlighter = cache(async () => {
-  return await createHighlighterCore({
-    themes: [import("shiki/themes/github-dark.mjs")],
-    langs: [
-      import("shiki/langs/javascript.mjs"),
-      import("shiki/langs/typescript.mjs"),
-      import("shiki/langs/json.mjs"),
-    ],
-    engine: createJavaScriptRegexEngine(),
-  });
+const highlighterPromise = createHighlighterCore({
+  themes: [
+    import("shiki/themes/github-light.mjs"),
+    import("shiki/themes/github-dark-dimmed.mjs"),
+  ],
+  langs: [
+    import("shiki/langs/javascript.mjs"),
+    import("shiki/langs/typescript.mjs"),
+    import("shiki/langs/json.mjs"),
+  ],
+  engine: createJavaScriptRegexEngine(),
 });
 
 /**
@@ -48,16 +83,81 @@ const getHighlighter = cache(async () => {
 export const ShikiCode = memo(({
   code,
   language = "typescript",
-  theme = "github-dark",
+  theme,
+  themes,
+  defaultColor = "light-dark()",
   className,
+  preClassName,
+  codeClassName,
+  decorations,
 }) => {
-  const highlighter = use(getHighlighter());
+  const highlighter = use(highlighterPromise);
+  const rootClassName = className ? `shiki-root ${className}` : "shiki-root";
+
+  const shadcnThemes = useMemo(() => {
+    if (themes) {
+      return themes;
+    }
+    if (theme) {
+      return null;
+    }
+    return {
+      light: "github-light",
+      dark: "github-dark-dimmed",
+    } satisfies ShikiCodeProps["themes"];
+  }, [theme, themes]);
+
+  const transformers = useMemo(() => {
+    const preClasses = ["shiki", "shiki-code"];
+    if (preClassName) {
+      preClasses.push(preClassName);
+    }
+    const codeClasses = ["shiki-code__code"];
+    if (codeClassName) {
+      codeClasses.push(codeClassName);
+    }
+
+    return [
+      {
+        name: "shiki:shadcn",
+        pre(hast) {
+          this.addClassToHast(hast, preClasses);
+          hast.properties ||= {};
+          hast.properties["data-language"] = this.options.lang;
+        },
+        code(hast) {
+          this.addClassToHast(hast, codeClasses);
+        },
+        line(hast, line) {
+          this.addClassToHast(hast, "shiki-code__line");
+          hast.properties ||= {};
+          hast.properties["data-line"] = line;
+        },
+      },
+    ];
+  }, [codeClassName, preClassName]);
+
+  const html = useMemo(() => {
+    if (shadcnThemes) {
+      return highlighter.codeToHtml(code, {
+        lang: language,
+        themes: shadcnThemes,
+        defaultColor,
+        rootStyle: false,
+        transformers,
+        decorations,
+      });
+    }
 
-  const html = highlighter.codeToHtml(code, {
-    lang: language,
-    theme,
-  });
+    return highlighter.codeToHtml(code, {
+      lang: language,
+      theme: theme ?? "github-dark-dimmed",
+      rootStyle: false,
+      transformers,
+      decorations,
+    });
+  }, [code, decorations, defaultColor, highlighter, language, shadcnThemes, theme, transformers]);
 
   // eslint-disable-next-line react-dom/no-dangerously-set-innerhtml
-  return 
; + return
; }); diff --git a/packages/shared-ui/src/styles/globals.css b/packages/shared-ui/src/styles/globals.css index f8dbbe39b..994060749 100644 --- a/packages/shared-ui/src/styles/globals.css +++ b/packages/shared-ui/src/styles/globals.css @@ -3,7 +3,8 @@ @import "shadcn/tailwind.css"; @import "@fontsource-variable/inter"; -@source "../ui/**/*.{ts,tsx,mjs,js,jsx}"; +@source "../ui/**/*.{ts,js,mjs,jsx,tsx}"; +@source "../components/**/*.{ts,js,mjs,jsx,tsx}"; @custom-variant dark (&:is(.dark *)); @@ -131,3 +132,59 @@ @apply font-sans; } } + +@layer components { + .shiki.shiki-code, + .shiki-root pre { + @apply text-foreground rounded-md text-[12px] leading-[13px] overflow-auto border; + white-space: pre; + word-break: normal; + overflow-wrap: normal; + background: var(--card); + box-shadow: none; + position: relative; + padding: 0.5rem 0.75rem 0.5rem 2.25rem; + counter-reset: shiki-line; + } + + .shiki.shiki-code code, + .shiki-root pre > code { + @apply font-mono; + white-space: inherit; + word-break: normal; + overflow-wrap: normal; + } + + .shiki-code__line { + @apply block px-1 -mx-1 rounded; + position: relative; + counter-increment: shiki-line; + } + + .shiki-code__line::before { + content: counter(shiki-line); + @apply absolute -left-8 top-0 w-6 text-right text-[0.65rem] text-muted-foreground/70; + } + + .shiki-decor { + @apply relative inline-flex items-center rounded-sm; + padding: 0 0.1rem; + background: color-mix(in oklab, var(--primary) 8%, transparent); + border-bottom: 1px solid color-mix(in oklab, var(--primary) 30%, transparent); + } + + .shiki-decor::after { + content: ""; + display: none; + } + + .shiki-decor-route { + border-bottom-color: color-mix(in oklab, var(--primary) 45%, transparent); + } + + .shiki-decor-route::after, + .shiki-decor-id::after, + .shiki-decor-name::after { + content: ""; + } +} diff --git a/packages/shared-ui/src/ui/checkbox.tsx b/packages/shared-ui/src/ui/checkbox.tsx new file mode 100644 index 000000000..871cf216e --- /dev/null +++ b/packages/shared-ui/src/ui/checkbox.tsx @@ -0,0 +1,25 @@ +import { cn } from "#lib/utils"; +import { Checkbox as CheckboxPrimitive } from "@base-ui/react/checkbox"; +import { CheckIcon } from "lucide-react"; + +function Checkbox({ className, ...props }: CheckboxPrimitive.Root.Props) { + return ( + + + + + + ); +} + +export { Checkbox }; diff --git a/packages/shared-ui/src/ui/command.tsx b/packages/shared-ui/src/ui/command.tsx index fa6b8e709..a186f8b7a 100644 --- a/packages/shared-ui/src/ui/command.tsx +++ b/packages/shared-ui/src/ui/command.tsx @@ -135,7 +135,7 @@ function CommandSeparator({ return ( ); @@ -150,7 +150,7 @@ function CommandItem({ + + {children} + + + + + ); +} + +function ScrollBar({ + className, + orientation = "vertical", + ...props +}: ScrollAreaPrimitive.Scrollbar.Props) { + return ( + + + + ); +} + +export { ScrollArea, ScrollBar }; diff --git a/packages/shared-ui/src/ui/table.tsx b/packages/shared-ui/src/ui/table.tsx new file mode 100644 index 000000000..7ffbae397 --- /dev/null +++ b/packages/shared-ui/src/ui/table.tsx @@ -0,0 +1,98 @@ +import { cn } from "#lib/utils"; +import * as React from "react"; + +function Table({ className, ...props }: React.ComponentProps<"table">) { + return ( +
+ + + ); +} + +function TableHeader({ className, ...props }: React.ComponentProps<"thead">) { + return ( + + ); +} + +function TableBody({ className, ...props }: React.ComponentProps<"tbody">) { + return ( + + ); +} + +function TableFooter({ className, ...props }: React.ComponentProps<"tfoot">) { + return ( + tr]:last:border-b-0", className)} + {...props} + /> + ); +} + +function TableRow({ className, ...props }: React.ComponentProps<"tr">) { + return ( + + ); +} + +function TableHead({ className, ...props }: React.ComponentProps<"th">) { + return ( +
+ ); +} + +function TableCell({ className, ...props }: React.ComponentProps<"td">) { + return ( + + ); +} + +function TableCaption({ + className, + ...props +}: React.ComponentProps<"caption">) { + return ( +
+ ); +} + +export { + Table, + TableBody, + TableCaption, + TableCell, + TableFooter, + TableHead, + TableHeader, + TableRow, +}; diff --git a/packages/shared-ui/tsdown.config.ts b/packages/shared-ui/tsdown.config.ts index 3e6f57b0e..9777df6c8 100644 --- a/packages/shared-ui/tsdown.config.ts +++ b/packages/shared-ui/tsdown.config.ts @@ -25,5 +25,9 @@ export default createTsdownConfig({ transform: { jsx: "react-jsx", }, + experimental: { + lazyBarrel: true, + }, }, + unbundle: true, }); diff --git a/packages/test-utils/package.json b/packages/test-utils/package.json index 728fd3a95..3a44c8b0e 100644 --- a/packages/test-utils/package.json +++ b/packages/test-utils/package.json @@ -26,6 +26,7 @@ "./mock-store": "./dist/mock-store.mjs", "./msw": "./dist/msw.mjs", "./msw/vitest-setup": "./dist/msw/vitest-setup.mjs", + "./pipelines": "./dist/pipelines.mjs", "./package.json": "./package.json" }, "types": "./dist/index.d.mts", diff --git a/packages/test-utils/src/async.ts b/packages/test-utils/src/async.ts new file mode 100644 index 000000000..548347d54 --- /dev/null +++ b/packages/test-utils/src/async.ts @@ -0,0 +1,84 @@ +/** + * Collects all values from an async iterable into an array. + * + * This function consumes an async iterable and collects all emitted values into + * a regular array. If the async iterable throws an error at any point, the error + * is propagated after the iterator's `return()` method is called (if available) + * to allow for cleanup. + * + * @typeParam T - The type of values yielded by the async iterable. + * @param {AsyncIterable} iterable - The async iterable to consume completely. + * @returns {Promise} A promise that resolves to an array containing all values in emission order. + * @throws {Error} Propagates any error thrown by the async iterable. + * + * @example + * ```ts + * const values = await collect(asyncFromArray([1, 2, 3])); + * console.assert(values.equals([1, 2, 3])); + * ``` + * + * @example Cleanup on error + * ```ts + * async function* source() { + * try { + * yield 1; + * throw new Error('boom'); + * } finally { + * console.log('Cleanup!'); // Called even though an error was thrown + * } + * } + * await collect(source()); // throws, but cleanup runs first + * ``` + */ +export async function collect(iterable: AsyncIterable): Promise { + const result: T[] = []; + for await (const value of iterable) { + result.push(value); + } + return result; +} + +/** + * Wraps a synchronous iterable as an async iterable. + * + * This is useful in tests when you want to simulate an async source but only have + * synchronous data. The resulting async iterable properly implements the async + * iterator protocol, including support for cleanup via `return()`. + * + * @typeParam T - The type of elements in the iterable. + * @param {Iterable} iterable - A synchronous iterable (array, Set, Map, etc.) to wrap. + * @param {object} [options] - Optional configuration. + * @param {number} [options.delay] - Number of milliseconds to wait before each value is yielded. + * Useful for simulating network latency or slow producers in tests. + * Must be a non-negative number. + * @returns {AsyncIterable} An async iterable that yields each element from the source iterable. + * + * @example + * ```ts + * // Simple case: wrap an array + * for await (const value of asyncFromArray([1, 2, 3])) { + * console.log(value); + * } + * ``` + * + * @example + * ```ts + * // Simulate network latency (50ms between values) + * for await (const value of asyncFromArray(['a', 'b', 'c'], { delay: 50 })) { + * console.log(value); + * } + * ``` + */ +export function asyncFromArray( + iterable: Iterable, + options?: { readonly delay?: number }, +): AsyncIterable { + return (async function* () { + for (const value of iterable) { + if (options?.delay) { + await new Promise((resolve) => setTimeout(resolve, options.delay)); + } + yield value; + } + })(); +} diff --git a/packages/test-utils/src/index.ts b/packages/test-utils/src/index.ts index 81969ab2a..c1ccbf10a 100644 --- a/packages/test-utils/src/index.ts +++ b/packages/test-utils/src/index.ts @@ -1,5 +1,11 @@ +export { asyncFromArray, collect } from "./async"; export { mockStoreApi } from "./mock-store"; export type { MockStoreConfig, } from "./mock-store"; export { configure, unsafeResponse } from "./mock-store/helpers"; + +export function encodeBase64(content: string): string { + // eslint-disable-next-line node/prefer-global/buffer + return Buffer.from(content, "utf-8").toString("base64"); +} diff --git a/packages/test-utils/src/pipelines/index.ts b/packages/test-utils/src/pipelines/index.ts new file mode 100644 index 000000000..f6683856f --- /dev/null +++ b/packages/test-utils/src/pipelines/index.ts @@ -0,0 +1 @@ +export { createPipelineModuleSource } from "./source"; diff --git a/packages/test-utils/src/pipelines/source.ts b/packages/test-utils/src/pipelines/source.ts new file mode 100644 index 000000000..3ed9d29e9 --- /dev/null +++ b/packages/test-utils/src/pipelines/source.ts @@ -0,0 +1,75 @@ +export interface PipelineDefinition { + _type?: string; + id: string; + versions?: string[]; + inputs?: unknown[]; + routes?: unknown[]; + [key: string]: unknown; +} + +export type NamedExportValue = PipelineDefinition | string; + +export type NamedExportConfig = Record; + +export type PipelineModuleSourceNamed = string[] | NamedExportConfig; + +export interface PipelineModuleSourceOptions { + named?: PipelineModuleSourceNamed; + definitions?: Record>; + extraExports?: string; + prelude?: string; +} + +function isStringArray(value: PipelineModuleSourceNamed): value is string[] { + return Array.isArray(value); +} + +function isPipelineDefinition(value: NamedExportValue): value is PipelineDefinition { + return typeof value === "object" && value !== null && "id" in value; +} + +function buildDefinition( + id: string, + overrides?: Partial, +): string { + const def: PipelineDefinition = { + _type: "pipeline-definition", + id, + versions: ["16.0.0"], + inputs: [], + routes: [], + ...overrides, + }; + return JSON.stringify(def).replace(/"(\w+)":/g, "$1:"); +} + +function buildNamedExports( + named: PipelineModuleSourceNamed, + definitions: Record>, +): string { + if (isStringArray(named)) { + return named + .map((name) => `export const ${name} = ${buildDefinition(name, definitions[name])};`) + .join("\n\n"); + } + + return Object.entries(named) + .map(([name, value]) => { + const exportValue: string = isPipelineDefinition(value) + ? buildDefinition(value.id, value) + : value; + return `export const ${name} = ${exportValue};`; + }) + .join("\n\n"); +} + +export function createPipelineModuleSource( + options: PipelineModuleSourceOptions = {}, +): string { + const { named = [], definitions = {}, extraExports, prelude } = options; + + const namedExports: string = buildNamedExports(named, definitions); + const parts: string[] = [prelude, namedExports, extraExports].filter(Boolean) as string[]; + + return parts.join("\n\n"); +} diff --git a/packages/test-utils/test/async.test.ts b/packages/test-utils/test/async.test.ts new file mode 100644 index 000000000..93ea3382c --- /dev/null +++ b/packages/test-utils/test/async.test.ts @@ -0,0 +1,48 @@ +import { describe, expect, it } from "vitest"; +import { asyncFromArray, collect } from "../src/async"; + +describe("async helpers", () => { + it("collects values from an async iterable", async () => { + const source = asyncFromArray([1, 2, 3]); + const out = await collect(source); + expect(out).toEqual([1, 2, 3]); + }); + + it("works with empty iterables", async () => { + const out = await collect(asyncFromArray([])); + expect(out).toEqual([]); + }); + + it("calls cleanup when source throws (cleanup on error)", async () => { + let cleaned = false; + + async function* source() { + try { + yield 1; + throw new Error("boom"); + } finally { + cleaned = true; + } + } + + await expect(collect(source())).rejects.toThrow("boom"); + expect(cleaned).toBe(true); + }); + + it("propagates source errors (still throws)", async () => { + async function* source() { + yield 1; + throw new Error("boom"); + } + + await expect(collect(source())).rejects.toThrow("boom"); + }); + + it("asyncFromArray supports delay option (yields values)", async () => { + const out: number[] = []; + for await (const v of asyncFromArray([4, 5, 6], { delay: 1 })) { + out.push(v); + } + expect(out).toEqual([4, 5, 6]); + }); +}); diff --git a/packages/test-utils/tsdown.config.ts b/packages/test-utils/tsdown.config.ts index f7f666139..910b343c4 100644 --- a/packages/test-utils/tsdown.config.ts +++ b/packages/test-utils/tsdown.config.ts @@ -9,6 +9,7 @@ export default createTsdownConfig({ "fs-bridges/index": "./src/fs-bridges/index.ts", "matchers/vitest-setup": "./src/matchers/vitest-setup.ts", "matchers/types": "./src/matchers/types.d.ts", + "pipelines": "./src/pipelines/index.ts", }, external: [ "vitest", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 667fa01ac..7daf051c3 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -19,6 +19,12 @@ catalogs: specifier: 4.11.7 version: 4.11.7 build: + '@rollup/plugin-babel': + specifier: 6.1.0 + version: 6.1.0 + babel-plugin-react-compiler: + specifier: 1.0.0 + version: 1.0.0 nanotar: specifier: 0.2.0 version: 0.2.0 @@ -89,6 +95,9 @@ catalogs: '@clack/prompts': specifier: 1.0.0 version: 1.0.0 + '@libsql/client': + specifier: 0.17.0 + version: 0.17.0 '@luxass/msw-utils': specifier: 0.6.0 version: 0.6.0 @@ -110,12 +119,30 @@ catalogs: cac: specifier: 6.7.14 version: 6.7.14 + chokidar: + specifier: 5.0.0 + version: 5.0.0 + crossws: + specifier: 0.4.4 + version: 0.4.4 defu: specifier: 6.1.4 version: 6.1.4 + drizzle-kit: + specifier: 0.31.8 + version: 0.31.8 + drizzle-orm: + specifier: 0.45.1 + version: 0.45.1 + esrap: + specifier: 2.2.2 + version: 2.2.2 farver: specifier: 0.4.2 version: 0.4.2 + h3: + specifier: 2.0.1-rc.11 + version: 2.0.1-rc.11 hookable: specifier: 6.0.1 version: 6.0.1 @@ -125,12 +152,21 @@ catalogs: obug: specifier: 2.1.1 version: 2.1.1 + oxc-parser: + specifier: 0.112.0 + version: 0.112.0 + oxc-transform: + specifier: 0.112.0 + version: 0.112.0 pathe: specifier: 2.0.3 version: 2.0.3 picomatch: specifier: 4.0.3 version: 4.0.3 + tinyglobby: + specifier: 0.2.15 + version: 0.2.15 yargs-parser: specifier: 22.0.0 version: 22.0.0 @@ -201,6 +237,9 @@ catalogs: '@fontsource-variable/inter': specifier: 5.2.8 version: 5.2.8 + '@icons-pack/react-simple-icons': + specifier: 13.8.0 + version: 13.8.0 '@tailwindcss/vite': specifier: 4.1.18 version: 4.1.18 @@ -240,6 +279,9 @@ catalogs: '@vitejs/plugin-react': specifier: 5.1.2 version: 5.1.2 + '@xyflow/react': + specifier: 12.10.0 + version: 12.10.0 babel-plugin-react-compiler: specifier: 1.0.0 version: 1.0.0 @@ -288,7 +330,7 @@ catalogs: overrides: '@asteasolutions/zod-to-openapi': 8.4.0 - rolldown: 1.0.0-rc.2 + rolldown: 1.0.0-rc.3 wrangler: 4.61.1 patchedDependencies: @@ -375,7 +417,7 @@ importers: devDependencies: '@cloudflare/vitest-pool-workers': specifier: catalog:testing - version: https://pkg.pr.new/@cloudflare/vitest-pool-workers@11632(@vitest/runner@4.1.0-beta.2)(@vitest/snapshot@4.1.0-beta.2)(vitest@4.1.0-beta.1) + version: https://pkg.pr.new/@cloudflare/vitest-pool-workers@11632(@vitest/runner@4.1.0-beta.1)(@vitest/snapshot@4.1.0-beta.1)(vitest@4.1.0-beta.1) '@luxass/eslint-config': specifier: catalog:linting version: 7.0.0(@eslint-react/eslint-plugin@2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(@vue/compiler-sfc@3.5.25)(eslint-plugin-format@1.3.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.1.0-beta.1) @@ -450,7 +492,7 @@ importers: version: 0.563.0(react@19.2.4) nitro: specifier: catalog:web - version: 3.0.1-alpha.2(better-sqlite3@12.5.0)(chokidar@5.0.0)(ioredis@5.8.2)(lru-cache@11.2.4)(rolldown@1.0.0-rc.2)(rollup@4.53.3)(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)) + version: 3.0.1-alpha.2(@libsql/client@0.17.0)(better-sqlite3@12.5.0)(chokidar@5.0.0)(drizzle-orm@0.45.1(@libsql/client@0.17.0)(@opentelemetry/api@1.9.0)(better-sqlite3@12.5.0))(ioredis@5.8.2)(lru-cache@11.2.4)(rolldown@1.0.0-rc.3)(rollup@4.53.3)(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)) react: specifier: catalog:web version: 19.2.4 @@ -517,7 +559,7 @@ importers: version: 0.9.4(@types/react-dom@19.2.3(@types/react@19.2.10))(@types/react@19.2.10)(csstype@3.2.3)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(solid-js@1.9.9) '@tanstack/react-form': specifier: catalog:web - version: 1.28.0(@tanstack/react-start@1.157.17(crossws@0.4.3(srvx@0.10.1))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)))(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + version: 1.28.0(@tanstack/react-start@1.157.17(crossws@0.4.4(srvx@0.10.1))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)))(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@tanstack/react-query': specifier: catalog:web version: 5.90.20(react@19.2.4) @@ -535,10 +577,13 @@ importers: version: 1.157.17(@tanstack/query-core@5.90.20)(@tanstack/react-query@5.90.20(react@19.2.4))(@tanstack/react-router@1.157.17(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(@tanstack/router-core@1.157.16)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@tanstack/react-start': specifier: catalog:web - version: 1.157.17(crossws@0.4.3(srvx@0.10.1))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)) + version: 1.157.17(crossws@0.4.4(srvx@0.10.1))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)) '@ucdjs-internal/shared-ui': specifier: workspace:* version: link:../../packages/shared-ui + '@ucdjs/pipelines-ui': + specifier: workspace:* + version: link:../../packages/pipelines/pipeline-ui '@ucdjs/schemas': specifier: workspace:* version: link:../../packages/schemas @@ -626,7 +671,7 @@ importers: version: 1.3.1(eslint@9.39.2(jiti@2.6.1)) nitro: specifier: catalog:web - version: 3.0.1-alpha.2(better-sqlite3@12.5.0)(chokidar@5.0.0)(ioredis@5.8.2)(lru-cache@11.2.4)(rolldown@1.0.0-rc.2)(rollup@4.53.3)(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)) + version: 3.0.1-alpha.2(@libsql/client@0.17.0)(better-sqlite3@12.5.0)(chokidar@5.0.0)(drizzle-orm@0.45.1(@libsql/client@0.17.0)(@opentelemetry/api@1.9.0)(better-sqlite3@12.5.0))(ioredis@5.8.2)(lru-cache@11.2.4)(rolldown@1.0.0-rc.3)(rollup@4.53.3)(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)) tailwindcss: specifier: catalog:web version: 4.1.18 @@ -666,6 +711,12 @@ importers: '@ucdjs/lockfile': specifier: workspace:* version: link:../lockfile + '@ucdjs/pipelines-loader': + specifier: workspace:* + version: link:../pipelines/pipeline-loader + '@ucdjs/pipelines-server': + specifier: workspace:* + version: link:../pipelines/pipeline-server '@ucdjs/schema-gen': specifier: workspace:* version: link:../schema-gen @@ -931,39 +982,24 @@ importers: specifier: catalog:testing version: 4.4.2(vitest@4.1.0-beta.1) - packages/schema-gen: + packages/pipelines/pipeline-artifacts: dependencies: - '@ai-sdk/openai': - specifier: catalog:prod - version: 3.0.23(zod@4.3.6) - '@luxass/unicode-utils-old': - specifier: catalog:prod - version: '@luxass/unicode-utils@0.11.0' - '@luxass/utils': - specifier: catalog:prod - version: 2.7.2 - '@ucdjs-internal/shared': + '@ucdjs/pipelines-core': specifier: workspace:* - version: link:../shared - ai: - specifier: catalog:prod - version: 6.0.64(zod@4.3.6) - knitwork: - specifier: catalog:prod - version: 1.3.0 + version: link:../pipeline-core zod: specifier: catalog:prod version: 4.3.6 devDependencies: '@luxass/eslint-config': specifier: catalog:linting - version: 7.0.0(@eslint-react/eslint-plugin@2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(@vue/compiler-sfc@3.5.25)(eslint-plugin-format@1.3.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.1.0-beta.1) + version: 7.0.0(@eslint-react/eslint-plugin@2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(@vue/compiler-sfc@3.5.25)(eslint-plugin-format@1.3.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.0.17) '@ucdjs-tooling/tsconfig': specifier: workspace:* - version: link:../../tooling/tsconfig + version: link:../../../tooling/tsconfig '@ucdjs-tooling/tsdown-config': specifier: workspace:* - version: link:../../tooling/tsdown-config + version: link:../../../tooling/tsdown-config eslint: specifier: catalog:linting version: 9.39.2(jiti@2.6.1) @@ -976,15 +1012,15 @@ importers: typescript: specifier: catalog:build version: 5.9.3 - vitest-testdirs: - specifier: catalog:testing - version: 4.4.2(vitest@4.1.0-beta.1) - packages/schemas: + packages/pipelines/pipeline-core: dependencies: - '@luxass/utils': + '@ucdjs-internal/shared': + specifier: workspace:* + version: link:../../shared + picomatch: specifier: catalog:prod - version: 2.7.2 + version: 4.0.3 zod: specifier: catalog:prod version: 4.3.6 @@ -992,12 +1028,15 @@ importers: '@luxass/eslint-config': specifier: catalog:linting version: 7.0.0(@eslint-react/eslint-plugin@2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(@vue/compiler-sfc@3.5.25)(eslint-plugin-format@1.3.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.0.17) + '@types/picomatch': + specifier: catalog:types + version: 4.0.2 '@ucdjs-tooling/tsconfig': specifier: workspace:* - version: link:../../tooling/tsconfig + version: link:../../../tooling/tsconfig '@ucdjs-tooling/tsdown-config': specifier: workspace:* - version: link:../../tooling/tsdown-config + version: link:../../../tooling/tsdown-config eslint: specifier: catalog:linting version: 9.39.2(jiti@2.6.1) @@ -1007,146 +1046,62 @@ importers: tsdown: specifier: catalog:build version: 0.20.1(publint@0.3.17)(synckit@0.11.11)(typescript@5.9.3) + tsx: + specifier: catalog:build + version: 4.21.0 typescript: specifier: catalog:build version: 5.9.3 - packages/shared: + packages/pipelines/pipeline-executor: dependencies: - '@luxass/msw-utils': - specifier: catalog:prod - version: 0.6.0(msw@2.12.7(@types/node@24.3.1)(typescript@5.9.3)) - '@luxass/utils': - specifier: catalog:prod - version: 2.7.2 - '@ucdjs/env': + '@ucdjs/pipelines-artifacts': specifier: workspace:* - version: link:../env - '@ucdjs/schemas': + version: link:../pipeline-artifacts + '@ucdjs/pipelines-core': specifier: workspace:* - version: link:../schemas - '@unicode-utils/core': - specifier: catalog:prod - version: 0.12.0-beta.19 - defu: - specifier: catalog:prod - version: 6.1.4 - obug: - specifier: catalog:prod - version: 2.1.1 - picomatch: - specifier: catalog:prod - version: 4.0.3 - zod: - specifier: catalog:prod - version: 4.3.6 + version: link:../pipeline-core devDependencies: '@luxass/eslint-config': specifier: catalog:linting - version: 7.0.0(@eslint-react/eslint-plugin@2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(@vue/compiler-sfc@3.5.25)(eslint-plugin-format@1.3.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.1.0-beta.1) - '@types/picomatch': - specifier: catalog:types - version: 4.0.2 + version: 7.0.0(@eslint-react/eslint-plugin@2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(@vue/compiler-sfc@3.5.25)(eslint-plugin-format@1.3.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.0.17) '@ucdjs-tooling/tsconfig': specifier: workspace:* - version: link:../../tooling/tsconfig + version: link:../../../tooling/tsconfig '@ucdjs-tooling/tsdown-config': specifier: workspace:* - version: link:../../tooling/tsdown-config + version: link:../../../tooling/tsdown-config eslint: specifier: catalog:linting version: 9.39.2(jiti@2.6.1) publint: specifier: catalog:build version: 0.3.17 - rolldown: - specifier: 1.0.0-rc.2 - version: 1.0.0-rc.2 tsdown: specifier: catalog:build version: 0.20.1(publint@0.3.17)(synckit@0.11.11)(typescript@5.9.3) typescript: specifier: catalog:build version: 5.9.3 - vitest-testdirs: - specifier: catalog:testing - version: 4.4.2(vitest@4.1.0-beta.1) - packages/shared-ui: + packages/pipelines/pipeline-graph: dependencies: - '@base-ui/react': - specifier: catalog:web - version: 1.1.0(@types/react@19.2.10)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) - '@fontsource-variable/inter': - specifier: catalog:web - version: 5.2.8 - class-variance-authority: - specifier: catalog:web - version: 0.7.1 - clsx: - specifier: catalog:web - version: 2.1.1 - cmdk: - specifier: catalog:web - version: 1.1.1(@types/react-dom@19.2.3(@types/react@19.2.10))(@types/react@19.2.10)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) - lucide-react: - specifier: catalog:web - version: 0.563.0(react@19.2.4) - react: - specifier: catalog:web - version: 19.2.4 - react-dom: - specifier: catalog:web - version: 19.2.4(react@19.2.4) - shadcn: - specifier: catalog:web - version: 3.8.1(@types/node@24.3.1)(typescript@5.9.3) - shiki: - specifier: catalog:web - version: 3.22.0 - tailwind-merge: - specifier: catalog:web - version: 3.4.0 - tailwindcss: - specifier: catalog:web - version: 4.1.18 - tw-animate-css: - specifier: catalog:web - version: 1.4.0 - zod: - specifier: catalog:prod - version: 4.3.6 + '@ucdjs/pipelines-core': + specifier: workspace:* + version: link:../pipeline-core devDependencies: - '@eslint-react/eslint-plugin': - specifier: catalog:linting - version: 2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) '@luxass/eslint-config': specifier: catalog:linting version: 7.0.0(@eslint-react/eslint-plugin@2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(@vue/compiler-sfc@3.5.25)(eslint-plugin-format@1.3.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.0.17) - '@types/react': - specifier: catalog:types - version: 19.2.10 - '@types/react-dom': - specifier: catalog:types - version: 19.2.3(@types/react@19.2.10) '@ucdjs-tooling/tsconfig': specifier: workspace:* - version: link:../../tooling/tsconfig + version: link:../../../tooling/tsconfig '@ucdjs-tooling/tsdown-config': specifier: workspace:* - version: link:../../tooling/tsdown-config + version: link:../../../tooling/tsdown-config eslint: specifier: catalog:linting version: 9.39.2(jiti@2.6.1) - eslint-plugin-format: - specifier: catalog:linting - version: 1.3.1(eslint@9.39.2(jiti@2.6.1)) - eslint-plugin-react-hooks: - specifier: catalog:linting - version: 7.0.1(eslint@9.39.2(jiti@2.6.1)) - eslint-plugin-react-refresh: - specifier: catalog:linting - version: 0.4.26(eslint@9.39.2(jiti@2.6.1)) publint: specifier: catalog:build version: 0.3.17 @@ -1157,66 +1112,39 @@ importers: specifier: catalog:build version: 5.9.3 - packages/test-utils: + packages/pipelines/pipeline-loader: dependencies: - '@luxass/msw-utils': - specifier: catalog:prod - version: 0.6.0(msw@2.12.7(@types/node@24.3.1)(typescript@5.9.3)) - '@luxass/utils': - specifier: catalog:prod - version: 2.7.2 - '@ucdjs-internal/shared': - specifier: workspace:* - version: link:../shared - '@ucdjs/env': - specifier: workspace:* - version: link:../env - '@ucdjs/fs-bridge': - specifier: workspace:* - version: link:../fs-bridge - '@ucdjs/schemas': + '@ucdjs/pipelines-core': specifier: workspace:* - version: link:../schemas - '@unicode-utils/core': + version: link:../pipeline-core + oxc-transform: specifier: catalog:prod - version: 0.12.0-beta.19 - msw: - specifier: catalog:testing - version: 2.12.7(@types/node@24.3.1)(typescript@5.9.3) - zod: + version: 0.112.0 + rolldown: + specifier: 1.0.0-rc.3 + version: 1.0.0-rc.3 + tinyglobby: specifier: catalog:prod - version: 4.3.6 + version: 0.2.15 devDependencies: '@luxass/eslint-config': specifier: catalog:linting version: 7.0.0(@eslint-react/eslint-plugin@2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(@vue/compiler-sfc@3.5.25)(eslint-plugin-format@1.3.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.1.0-beta.1) '@ucdjs-tooling/tsconfig': specifier: workspace:* - version: link:../../tooling/tsconfig + version: link:../../../tooling/tsconfig '@ucdjs-tooling/tsdown-config': specifier: workspace:* - version: link:../../tooling/tsdown-config - '@ucdjs/ucd-store': - specifier: workspace:* - version: link:../ucd-store - '@vitest/expect': - specifier: catalog:testing - version: 4.1.0-beta.1 + version: link:../../../tooling/tsdown-config eslint: specifier: catalog:linting version: 9.39.2(jiti@2.6.1) - openapi-typescript: - specifier: catalog:build - version: 7.10.1(typescript@5.9.3) publint: specifier: catalog:build version: 0.3.17 tsdown: specifier: catalog:build version: 0.20.1(publint@0.3.17)(synckit@0.11.11)(typescript@5.9.3) - tsx: - specifier: catalog:build - version: 4.21.0 typescript: specifier: catalog:build version: 5.9.3 @@ -1224,51 +1152,36 @@ importers: specifier: catalog:testing version: 4.4.2(vitest@4.1.0-beta.1) - packages/ucd-store: + packages/pipelines/pipeline-playground: dependencies: - '@luxass/utils': - specifier: catalog:prod - version: 2.7.2 - '@ucdjs-internal/shared': - specifier: workspace:* - version: link:../shared - '@ucdjs/client': - specifier: workspace:* - version: link:../client - '@ucdjs/env': - specifier: workspace:* - version: link:../env - '@ucdjs/fs-bridge': - specifier: workspace:* - version: link:../fs-bridge - '@ucdjs/lockfile': - specifier: workspace:* - version: link:../lockfile - '@ucdjs/path-utils': + '@ucdjs/pipelines-core': specifier: workspace:* - version: link:../path-utils - '@ucdjs/schemas': + version: link:../pipeline-core + '@ucdjs/pipelines-presets': specifier: workspace:* - version: link:../schemas - '@unicode-utils/core': - specifier: catalog:prod - version: 0.12.0-beta.19 - defu: + version: link:../pipeline-presets + zod: specifier: catalog:prod - version: 6.1.4 + version: 4.3.6 + + packages/pipelines/pipeline-presets: + dependencies: + '@ucdjs/pipelines-core': + specifier: workspace:* + version: link:../pipeline-core zod: specifier: catalog:prod version: 4.3.6 devDependencies: '@luxass/eslint-config': specifier: catalog:linting - version: 7.0.0(@eslint-react/eslint-plugin@2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(@vue/compiler-sfc@3.5.25)(eslint-plugin-format@1.3.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.1.0-beta.1) + version: 7.0.0(@eslint-react/eslint-plugin@2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(@vue/compiler-sfc@3.5.25)(eslint-plugin-format@1.3.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.0.17) '@ucdjs-tooling/tsconfig': specifier: workspace:* - version: link:../../tooling/tsconfig + version: link:../../../tooling/tsconfig '@ucdjs-tooling/tsdown-config': specifier: workspace:* - version: link:../../tooling/tsdown-config + version: link:../../../tooling/tsdown-config eslint: specifier: catalog:linting version: 9.39.2(jiti@2.6.1) @@ -1278,40 +1191,600 @@ importers: tsdown: specifier: catalog:build version: 0.20.1(publint@0.3.17)(synckit@0.11.11)(typescript@5.9.3) - tsx: - specifier: catalog:build - version: 4.21.0 typescript: specifier: catalog:build version: 5.9.3 - vitest-testdirs: - specifier: catalog:testing - version: 4.4.2(vitest@4.1.0-beta.1) - packages/ucdjs-scripts: + packages/pipelines/pipeline-server: dependencies: - '@ucdjs-internal/shared': + '@libsql/client': + specifier: catalog:prod + version: 0.17.0 + '@ucdjs-internal/shared-ui': specifier: workspace:* - version: link:../shared - '@ucdjs/client': + version: link:../../shared-ui + '@ucdjs/pipelines-core': specifier: workspace:* - version: link:../client - '@ucdjs/schemas': + version: link:../pipeline-core + '@ucdjs/pipelines-executor': specifier: workspace:* - version: link:../schemas - '@unicode-utils/core': + version: link:../pipeline-executor + '@ucdjs/pipelines-loader': + specifier: workspace:* + version: link:../pipeline-loader + '@ucdjs/pipelines-ui': + specifier: workspace:* + version: link:../pipeline-ui + chokidar: specifier: catalog:prod - version: 0.12.0-beta.19 - apache-autoindex-parse: + version: 5.0.0 + crossws: specifier: catalog:prod - version: 5.0.2 - cac: + version: 0.4.4(srvx@0.10.1) + drizzle-orm: specifier: catalog:prod - version: 6.7.14 - nanotar: - specifier: catalog:build - version: 0.2.0 - wrangler: + version: 0.45.1(@libsql/client@0.17.0)(@opentelemetry/api@1.9.0)(better-sqlite3@12.5.0) + esrap: + specifier: catalog:prod + version: 2.2.2 + h3: + specifier: catalog:prod + version: 2.0.1-rc.11(crossws@0.4.4(srvx@0.10.1)) + lucide-react: + specifier: catalog:web + version: 0.563.0(react@19.2.4) + oxc-parser: + specifier: catalog:prod + version: 0.112.0 + pathe: + specifier: catalog:prod + version: 2.0.3 + zod: + specifier: catalog:prod + version: 4.3.6 + devDependencies: + '@eslint-react/eslint-plugin': + specifier: catalog:linting + version: 2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + '@luxass/eslint-config': + specifier: catalog:linting + version: 7.0.0(@eslint-react/eslint-plugin@2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(@vue/compiler-sfc@3.5.25)(eslint-plugin-format@1.3.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.1.0-beta.1) + '@tailwindcss/vite': + specifier: catalog:web + version: 4.1.18(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)) + '@tanstack/react-router': + specifier: catalog:web + version: 1.157.17(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@tanstack/router-plugin': + specifier: catalog:web + version: 1.157.17(@tanstack/react-router@1.157.17(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)) + '@types/node': + specifier: catalog:types + version: 22.18.12 + '@types/react': + specifier: catalog:types + version: 19.2.10 + '@types/react-dom': + specifier: catalog:types + version: 19.2.3(@types/react@19.2.10) + '@ucdjs-tooling/tsconfig': + specifier: workspace:* + version: link:../../../tooling/tsconfig + '@ucdjs-tooling/tsdown-config': + specifier: workspace:* + version: link:../../../tooling/tsdown-config + '@vitejs/plugin-react': + specifier: catalog:web + version: 5.1.2(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)) + clsx: + specifier: catalog:web + version: 2.1.1 + drizzle-kit: + specifier: catalog:prod + version: 0.31.8 + eslint: + specifier: catalog:linting + version: 9.39.2(jiti@2.6.1) + eslint-plugin-react-hooks: + specifier: catalog:linting + version: 7.0.1(eslint@9.39.2(jiti@2.6.1)) + eslint-plugin-react-refresh: + specifier: catalog:linting + version: 0.4.26(eslint@9.39.2(jiti@2.6.1)) + publint: + specifier: catalog:build + version: 0.3.17 + react: + specifier: catalog:web + version: 19.2.4 + react-dom: + specifier: catalog:web + version: 19.2.4(react@19.2.4) + tailwind-merge: + specifier: catalog:web + version: 3.4.0 + tailwindcss: + specifier: catalog:web + version: 4.1.18 + tsdown: + specifier: catalog:build + version: 0.20.1(publint@0.3.17)(synckit@0.11.11)(typescript@5.9.3) + typescript: + specifier: catalog:build + version: 5.9.3 + vite: + specifier: catalog:web + version: 7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2) + vite-tsconfig-paths: + specifier: catalog:web + version: 6.0.5(typescript@5.9.3)(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)) + vitest: + specifier: catalog:testing + version: 4.1.0-beta.1(@opentelemetry/api@1.9.0)(@types/node@22.18.12)(@vitest/ui@4.1.0-beta.1)(jiti@2.6.1)(jsdom@27.3.0(postcss@8.5.6))(lightningcss@1.30.2)(msw@2.12.7(@types/node@22.18.12)(typescript@5.9.3))(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2) + vitest-testdirs: + specifier: catalog:testing + version: 4.4.2(vitest@4.1.0-beta.1) + + packages/pipelines/pipeline-ui: + dependencies: + '@icons-pack/react-simple-icons': + specifier: catalog:web + version: 13.8.0(react@19.2.4) + '@ucdjs-internal/shared-ui': + specifier: workspace:* + version: link:../../shared-ui + '@ucdjs/pipelines-core': + specifier: workspace:* + version: link:../pipeline-core + '@xyflow/react': + specifier: catalog:web + version: 12.10.0(@types/react@19.2.10)(immer@9.0.21)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + clsx: + specifier: catalog:web + version: 2.1.1 + lucide-react: + specifier: catalog:web + version: 0.563.0(react@19.2.4) + tailwind-merge: + specifier: catalog:web + version: 3.4.0 + devDependencies: + '@eslint-react/eslint-plugin': + specifier: catalog:linting + version: 2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + '@luxass/eslint-config': + specifier: catalog:linting + version: 7.0.0(@eslint-react/eslint-plugin@2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(@vue/compiler-sfc@3.5.25)(eslint-plugin-format@1.3.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.0.17) + '@rollup/plugin-babel': + specifier: catalog:build + version: 6.1.0(@babel/core@7.28.5)(@types/babel__core@7.20.5)(rollup@4.53.3) + '@tanstack/react-router': + specifier: catalog:web + version: 1.157.17(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@types/react': + specifier: catalog:types + version: 19.2.10 + '@types/react-dom': + specifier: catalog:types + version: 19.2.3(@types/react@19.2.10) + '@ucdjs-tooling/tsconfig': + specifier: workspace:* + version: link:../../../tooling/tsconfig + '@ucdjs-tooling/tsdown-config': + specifier: workspace:* + version: link:../../../tooling/tsdown-config + babel-plugin-react-compiler: + specifier: catalog:build + version: 1.0.0 + eslint: + specifier: catalog:linting + version: 9.39.2(jiti@2.6.1) + eslint-plugin-react-hooks: + specifier: catalog:linting + version: 7.0.1(eslint@9.39.2(jiti@2.6.1)) + eslint-plugin-react-refresh: + specifier: catalog:linting + version: 0.4.26(eslint@9.39.2(jiti@2.6.1)) + publint: + specifier: catalog:build + version: 0.3.17 + react: + specifier: catalog:web + version: 19.2.4 + react-dom: + specifier: catalog:web + version: 19.2.4(react@19.2.4) + tailwindcss: + specifier: catalog:web + version: 4.1.18 + tsdown: + specifier: catalog:build + version: 0.20.1(publint@0.3.17)(synckit@0.11.11)(typescript@5.9.3) + typescript: + specifier: catalog:build + version: 5.9.3 + + packages/schema-gen: + dependencies: + '@ai-sdk/openai': + specifier: catalog:prod + version: 3.0.23(zod@4.3.6) + '@luxass/unicode-utils-old': + specifier: catalog:prod + version: '@luxass/unicode-utils@0.11.0' + '@luxass/utils': + specifier: catalog:prod + version: 2.7.2 + '@ucdjs-internal/shared': + specifier: workspace:* + version: link:../shared + ai: + specifier: catalog:prod + version: 6.0.64(zod@4.3.6) + knitwork: + specifier: catalog:prod + version: 1.3.0 + zod: + specifier: catalog:prod + version: 4.3.6 + devDependencies: + '@luxass/eslint-config': + specifier: catalog:linting + version: 7.0.0(@eslint-react/eslint-plugin@2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(@vue/compiler-sfc@3.5.25)(eslint-plugin-format@1.3.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.1.0-beta.1) + '@ucdjs-tooling/tsconfig': + specifier: workspace:* + version: link:../../tooling/tsconfig + '@ucdjs-tooling/tsdown-config': + specifier: workspace:* + version: link:../../tooling/tsdown-config + eslint: + specifier: catalog:linting + version: 9.39.2(jiti@2.6.1) + publint: + specifier: catalog:build + version: 0.3.17 + tsdown: + specifier: catalog:build + version: 0.20.1(publint@0.3.17)(synckit@0.11.11)(typescript@5.9.3) + typescript: + specifier: catalog:build + version: 5.9.3 + vitest-testdirs: + specifier: catalog:testing + version: 4.4.2(vitest@4.1.0-beta.1) + + packages/schemas: + dependencies: + '@luxass/utils': + specifier: catalog:prod + version: 2.7.2 + zod: + specifier: catalog:prod + version: 4.3.6 + devDependencies: + '@luxass/eslint-config': + specifier: catalog:linting + version: 7.0.0(@eslint-react/eslint-plugin@2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(@vue/compiler-sfc@3.5.25)(eslint-plugin-format@1.3.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.0.17) + '@ucdjs-tooling/tsconfig': + specifier: workspace:* + version: link:../../tooling/tsconfig + '@ucdjs-tooling/tsdown-config': + specifier: workspace:* + version: link:../../tooling/tsdown-config + eslint: + specifier: catalog:linting + version: 9.39.2(jiti@2.6.1) + publint: + specifier: catalog:build + version: 0.3.17 + tsdown: + specifier: catalog:build + version: 0.20.1(publint@0.3.17)(synckit@0.11.11)(typescript@5.9.3) + typescript: + specifier: catalog:build + version: 5.9.3 + + packages/shared: + dependencies: + '@luxass/msw-utils': + specifier: catalog:prod + version: 0.6.0(msw@2.12.7(@types/node@24.3.1)(typescript@5.9.3)) + '@luxass/utils': + specifier: catalog:prod + version: 2.7.2 + '@ucdjs/env': + specifier: workspace:* + version: link:../env + '@ucdjs/schemas': + specifier: workspace:* + version: link:../schemas + '@unicode-utils/core': + specifier: catalog:prod + version: 0.12.0-beta.19 + defu: + specifier: catalog:prod + version: 6.1.4 + obug: + specifier: catalog:prod + version: 2.1.1 + picomatch: + specifier: catalog:prod + version: 4.0.3 + zod: + specifier: catalog:prod + version: 4.3.6 + devDependencies: + '@luxass/eslint-config': + specifier: catalog:linting + version: 7.0.0(@eslint-react/eslint-plugin@2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(@vue/compiler-sfc@3.5.25)(eslint-plugin-format@1.3.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.1.0-beta.1) + '@types/picomatch': + specifier: catalog:types + version: 4.0.2 + '@ucdjs-tooling/tsconfig': + specifier: workspace:* + version: link:../../tooling/tsconfig + '@ucdjs-tooling/tsdown-config': + specifier: workspace:* + version: link:../../tooling/tsdown-config + eslint: + specifier: catalog:linting + version: 9.39.2(jiti@2.6.1) + publint: + specifier: catalog:build + version: 0.3.17 + rolldown: + specifier: 1.0.0-rc.3 + version: 1.0.0-rc.3 + tsdown: + specifier: catalog:build + version: 0.20.1(publint@0.3.17)(synckit@0.11.11)(typescript@5.9.3) + typescript: + specifier: catalog:build + version: 5.9.3 + vitest-testdirs: + specifier: catalog:testing + version: 4.4.2(vitest@4.1.0-beta.1) + + packages/shared-ui: + dependencies: + '@base-ui/react': + specifier: catalog:web + version: 1.1.0(@types/react@19.2.10)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@fontsource-variable/inter': + specifier: catalog:web + version: 5.2.8 + class-variance-authority: + specifier: catalog:web + version: 0.7.1 + clsx: + specifier: catalog:web + version: 2.1.1 + cmdk: + specifier: catalog:web + version: 1.1.1(@types/react-dom@19.2.3(@types/react@19.2.10))(@types/react@19.2.10)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + lucide-react: + specifier: catalog:web + version: 0.563.0(react@19.2.4) + react: + specifier: catalog:web + version: 19.2.4 + react-dom: + specifier: catalog:web + version: 19.2.4(react@19.2.4) + shadcn: + specifier: catalog:web + version: 3.8.1(@types/node@24.3.1)(typescript@5.9.3) + shiki: + specifier: catalog:web + version: 3.22.0 + tailwind-merge: + specifier: catalog:web + version: 3.4.0 + tailwindcss: + specifier: catalog:web + version: 4.1.18 + tw-animate-css: + specifier: catalog:web + version: 1.4.0 + zod: + specifier: catalog:prod + version: 4.3.6 + devDependencies: + '@eslint-react/eslint-plugin': + specifier: catalog:linting + version: 2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + '@luxass/eslint-config': + specifier: catalog:linting + version: 7.0.0(@eslint-react/eslint-plugin@2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(@vue/compiler-sfc@3.5.25)(eslint-plugin-format@1.3.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.0.17) + '@types/react': + specifier: catalog:types + version: 19.2.10 + '@types/react-dom': + specifier: catalog:types + version: 19.2.3(@types/react@19.2.10) + '@ucdjs-tooling/tsconfig': + specifier: workspace:* + version: link:../../tooling/tsconfig + '@ucdjs-tooling/tsdown-config': + specifier: workspace:* + version: link:../../tooling/tsdown-config + eslint: + specifier: catalog:linting + version: 9.39.2(jiti@2.6.1) + eslint-plugin-format: + specifier: catalog:linting + version: 1.3.1(eslint@9.39.2(jiti@2.6.1)) + eslint-plugin-react-hooks: + specifier: catalog:linting + version: 7.0.1(eslint@9.39.2(jiti@2.6.1)) + eslint-plugin-react-refresh: + specifier: catalog:linting + version: 0.4.26(eslint@9.39.2(jiti@2.6.1)) + publint: + specifier: catalog:build + version: 0.3.17 + tsdown: + specifier: catalog:build + version: 0.20.1(publint@0.3.17)(synckit@0.11.11)(typescript@5.9.3) + typescript: + specifier: catalog:build + version: 5.9.3 + + packages/test-utils: + dependencies: + '@luxass/msw-utils': + specifier: catalog:prod + version: 0.6.0(msw@2.12.7(@types/node@24.3.1)(typescript@5.9.3)) + '@luxass/utils': + specifier: catalog:prod + version: 2.7.2 + '@ucdjs-internal/shared': + specifier: workspace:* + version: link:../shared + '@ucdjs/env': + specifier: workspace:* + version: link:../env + '@ucdjs/fs-bridge': + specifier: workspace:* + version: link:../fs-bridge + '@ucdjs/schemas': + specifier: workspace:* + version: link:../schemas + '@unicode-utils/core': + specifier: catalog:prod + version: 0.12.0-beta.19 + msw: + specifier: catalog:testing + version: 2.12.7(@types/node@24.3.1)(typescript@5.9.3) + zod: + specifier: catalog:prod + version: 4.3.6 + devDependencies: + '@luxass/eslint-config': + specifier: catalog:linting + version: 7.0.0(@eslint-react/eslint-plugin@2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(@vue/compiler-sfc@3.5.25)(eslint-plugin-format@1.3.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.1.0-beta.1) + '@ucdjs-tooling/tsconfig': + specifier: workspace:* + version: link:../../tooling/tsconfig + '@ucdjs-tooling/tsdown-config': + specifier: workspace:* + version: link:../../tooling/tsdown-config + '@ucdjs/ucd-store': + specifier: workspace:* + version: link:../ucd-store + '@vitest/expect': + specifier: catalog:testing + version: 4.1.0-beta.1 + eslint: + specifier: catalog:linting + version: 9.39.2(jiti@2.6.1) + openapi-typescript: + specifier: catalog:build + version: 7.10.1(typescript@5.9.3) + publint: + specifier: catalog:build + version: 0.3.17 + tsdown: + specifier: catalog:build + version: 0.20.1(publint@0.3.17)(synckit@0.11.11)(typescript@5.9.3) + tsx: + specifier: catalog:build + version: 4.21.0 + typescript: + specifier: catalog:build + version: 5.9.3 + vitest-testdirs: + specifier: catalog:testing + version: 4.4.2(vitest@4.1.0-beta.1) + + packages/ucd-store: + dependencies: + '@luxass/utils': + specifier: catalog:prod + version: 2.7.2 + '@ucdjs-internal/shared': + specifier: workspace:* + version: link:../shared + '@ucdjs/client': + specifier: workspace:* + version: link:../client + '@ucdjs/env': + specifier: workspace:* + version: link:../env + '@ucdjs/fs-bridge': + specifier: workspace:* + version: link:../fs-bridge + '@ucdjs/lockfile': + specifier: workspace:* + version: link:../lockfile + '@ucdjs/path-utils': + specifier: workspace:* + version: link:../path-utils + '@ucdjs/schemas': + specifier: workspace:* + version: link:../schemas + '@unicode-utils/core': + specifier: catalog:prod + version: 0.12.0-beta.19 + defu: + specifier: catalog:prod + version: 6.1.4 + zod: + specifier: catalog:prod + version: 4.3.6 + devDependencies: + '@luxass/eslint-config': + specifier: catalog:linting + version: 7.0.0(@eslint-react/eslint-plugin@2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(@vue/compiler-sfc@3.5.25)(eslint-plugin-format@1.3.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.1.0-beta.1) + '@ucdjs-tooling/tsconfig': + specifier: workspace:* + version: link:../../tooling/tsconfig + '@ucdjs-tooling/tsdown-config': + specifier: workspace:* + version: link:../../tooling/tsdown-config + eslint: + specifier: catalog:linting + version: 9.39.2(jiti@2.6.1) + publint: + specifier: catalog:build + version: 0.3.17 + tsdown: + specifier: catalog:build + version: 0.20.1(publint@0.3.17)(synckit@0.11.11)(typescript@5.9.3) + tsx: + specifier: catalog:build + version: 4.21.0 + typescript: + specifier: catalog:build + version: 5.9.3 + vitest-testdirs: + specifier: catalog:testing + version: 4.4.2(vitest@4.1.0-beta.1) + + packages/ucdjs-scripts: + dependencies: + '@ucdjs-internal/shared': + specifier: workspace:* + version: link:../shared + '@ucdjs/client': + specifier: workspace:* + version: link:../client + '@ucdjs/schemas': + specifier: workspace:* + version: link:../schemas + '@unicode-utils/core': + specifier: catalog:prod + version: 0.12.0-beta.19 + apache-autoindex-parse: + specifier: catalog:prod + version: 5.0.2 + cac: + specifier: catalog:prod + version: 6.7.14 + nanotar: + specifier: catalog:build + version: 0.2.0 + wrangler: specifier: 4.61.1 version: 4.61.1 devDependencies: @@ -1800,7 +2273,7 @@ packages: optional: true '@cloudflare/vitest-pool-workers@https://pkg.pr.new/@cloudflare/vitest-pool-workers@11632': - resolution: {integrity: sha512-NmzXfKgEcR1I9xQMeQGZODB9N4wBzTbl9PJMxLzeq5smYatje18ZRDxeN2P4bS3pRCa6XI/aRNTAEYyAR4C4ew==, tarball: https://pkg.pr.new/@cloudflare/vitest-pool-workers@11632} + resolution: {tarball: https://pkg.pr.new/@cloudflare/vitest-pool-workers@11632} version: 0.12.6 peerDependencies: '@vitest/runner': 4.1.0-beta.1 @@ -1918,6 +2391,9 @@ packages: '@dprint/toml@0.7.0': resolution: {integrity: sha512-eFaQTcfxKHB+YyTh83x7GEv+gDPuj9q5NFOTaoj5rZmQTbj6OgjjMxUicmS1R8zYcx8YAq5oA9J3YFa5U6x2gA==} + '@drizzle-team/brocli@0.10.2': + resolution: {integrity: sha512-z33Il7l5dKjUgGULTqBsQBQwckHh5AbIuxhdsIxDDiZAzBOrZO6q9ogcWC65kU382AfynTfgNumVcNIjuIua6w==} + '@ecies/ciphers@0.2.5': resolution: {integrity: sha512-GalEZH4JgOMHYYcYmVqnFirFsjZHeoGMDt9IxEnM9F7GRUUyUksJ7Ou53L83WHJq3RWKD3AcBpo0iQh0oMpf8A==} engines: {bun: '>=1', deno: '>=2', node: '>=16'} @@ -2011,6 +2487,20 @@ packages: resolution: {integrity: sha512-Q9hjxWI5xBM+qW2enxfe8wDKdFWMfd0Z29k5ZJnuBqD/CasY5Zryj09aCA6owbGATWz+39p5uIdaHXpopOcG8g==} engines: {node: '>=10'} + '@esbuild-kit/core-utils@3.3.2': + resolution: {integrity: sha512-sPRAnw9CdSsRmEtnsl2WXWdyquogVpB3yZ3dgwJfe8zrOzTsV7cJvmwrKVa+0ma5BoiGJ+BoqkMvawbayKUsqQ==} + deprecated: 'Merged into tsx: https://tsx.is' + + '@esbuild-kit/esm-loader@2.6.5': + resolution: {integrity: sha512-FxEMIkJKnodyA1OaCUoEvbYRkoZlLZ4d/eXFu9Fh8CbBBgP5EmZxrfTRyN0qpXZ4vOvqnE5YdRdcrmUUXuU+dA==} + deprecated: 'Merged into tsx: https://tsx.is' + + '@esbuild/aix-ppc64@0.25.12': + resolution: {integrity: sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [aix] + '@esbuild/aix-ppc64@0.27.0': resolution: {integrity: sha512-KuZrd2hRjz01y5JK9mEBSD3Vj3mbCvemhT466rSuJYeE/hjuBrHfjjcjMdTm/sz7au+++sdbJZJmuBwQLuw68A==} engines: {node: '>=18'} @@ -2023,6 +2513,18 @@ packages: cpu: [ppc64] os: [aix] + '@esbuild/android-arm64@0.18.20': + resolution: {integrity: sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ==} + engines: {node: '>=12'} + cpu: [arm64] + os: [android] + + '@esbuild/android-arm64@0.25.12': + resolution: {integrity: sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [android] + '@esbuild/android-arm64@0.27.0': resolution: {integrity: sha512-CC3vt4+1xZrs97/PKDkl0yN7w8edvU2vZvAFGD16n9F0Cvniy5qvzRXjfO1l94efczkkQE6g1x0i73Qf5uthOQ==} engines: {node: '>=18'} @@ -2035,6 +2537,18 @@ packages: cpu: [arm64] os: [android] + '@esbuild/android-arm@0.18.20': + resolution: {integrity: sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw==} + engines: {node: '>=12'} + cpu: [arm] + os: [android] + + '@esbuild/android-arm@0.25.12': + resolution: {integrity: sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg==} + engines: {node: '>=18'} + cpu: [arm] + os: [android] + '@esbuild/android-arm@0.27.0': resolution: {integrity: sha512-j67aezrPNYWJEOHUNLPj9maeJte7uSMM6gMoxfPC9hOg8N02JuQi/T7ewumf4tNvJadFkvLZMlAq73b9uwdMyQ==} engines: {node: '>=18'} @@ -2047,6 +2561,18 @@ packages: cpu: [arm] os: [android] + '@esbuild/android-x64@0.18.20': + resolution: {integrity: sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg==} + engines: {node: '>=12'} + cpu: [x64] + os: [android] + + '@esbuild/android-x64@0.25.12': + resolution: {integrity: sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg==} + engines: {node: '>=18'} + cpu: [x64] + os: [android] + '@esbuild/android-x64@0.27.0': resolution: {integrity: sha512-wurMkF1nmQajBO1+0CJmcN17U4BP6GqNSROP8t0X/Jiw2ltYGLHpEksp9MpoBqkrFR3kv2/te6Sha26k3+yZ9Q==} engines: {node: '>=18'} @@ -2059,6 +2585,18 @@ packages: cpu: [x64] os: [android] + '@esbuild/darwin-arm64@0.18.20': + resolution: {integrity: sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [darwin] + + '@esbuild/darwin-arm64@0.25.12': + resolution: {integrity: sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [darwin] + '@esbuild/darwin-arm64@0.27.0': resolution: {integrity: sha512-uJOQKYCcHhg07DL7i8MzjvS2LaP7W7Pn/7uA0B5S1EnqAirJtbyw4yC5jQ5qcFjHK9l6o/MX9QisBg12kNkdHg==} engines: {node: '>=18'} @@ -2071,6 +2609,18 @@ packages: cpu: [arm64] os: [darwin] + '@esbuild/darwin-x64@0.18.20': + resolution: {integrity: sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [darwin] + + '@esbuild/darwin-x64@0.25.12': + resolution: {integrity: sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA==} + engines: {node: '>=18'} + cpu: [x64] + os: [darwin] + '@esbuild/darwin-x64@0.27.0': resolution: {integrity: sha512-8mG6arH3yB/4ZXiEnXof5MK72dE6zM9cDvUcPtxhUZsDjESl9JipZYW60C3JGreKCEP+p8P/72r69m4AZGJd5g==} engines: {node: '>=18'} @@ -2083,6 +2633,18 @@ packages: cpu: [x64] os: [darwin] + '@esbuild/freebsd-arm64@0.18.20': + resolution: {integrity: sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw==} + engines: {node: '>=12'} + cpu: [arm64] + os: [freebsd] + + '@esbuild/freebsd-arm64@0.25.12': + resolution: {integrity: sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [freebsd] + '@esbuild/freebsd-arm64@0.27.0': resolution: {integrity: sha512-9FHtyO988CwNMMOE3YIeci+UV+x5Zy8fI2qHNpsEtSF83YPBmE8UWmfYAQg6Ux7Gsmd4FejZqnEUZCMGaNQHQw==} engines: {node: '>=18'} @@ -2095,6 +2657,18 @@ packages: cpu: [arm64] os: [freebsd] + '@esbuild/freebsd-x64@0.18.20': + resolution: {integrity: sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [freebsd] + + '@esbuild/freebsd-x64@0.25.12': + resolution: {integrity: sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [freebsd] + '@esbuild/freebsd-x64@0.27.0': resolution: {integrity: sha512-zCMeMXI4HS/tXvJz8vWGexpZj2YVtRAihHLk1imZj4efx1BQzN76YFeKqlDr3bUWI26wHwLWPd3rwh6pe4EV7g==} engines: {node: '>=18'} @@ -2107,6 +2681,18 @@ packages: cpu: [x64] os: [freebsd] + '@esbuild/linux-arm64@0.18.20': + resolution: {integrity: sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [linux] + + '@esbuild/linux-arm64@0.25.12': + resolution: {integrity: sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ==} + engines: {node: '>=18'} + cpu: [arm64] + os: [linux] + '@esbuild/linux-arm64@0.27.0': resolution: {integrity: sha512-AS18v0V+vZiLJyi/4LphvBE+OIX682Pu7ZYNsdUHyUKSoRwdnOsMf6FDekwoAFKej14WAkOef3zAORJgAtXnlQ==} engines: {node: '>=18'} @@ -2119,6 +2705,18 @@ packages: cpu: [arm64] os: [linux] + '@esbuild/linux-arm@0.18.20': + resolution: {integrity: sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg==} + engines: {node: '>=12'} + cpu: [arm] + os: [linux] + + '@esbuild/linux-arm@0.25.12': + resolution: {integrity: sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw==} + engines: {node: '>=18'} + cpu: [arm] + os: [linux] + '@esbuild/linux-arm@0.27.0': resolution: {integrity: sha512-t76XLQDpxgmq2cNXKTVEB7O7YMb42atj2Re2Haf45HkaUpjM2J0UuJZDuaGbPbamzZ7bawyGFUkodL+zcE+jvQ==} engines: {node: '>=18'} @@ -2128,7 +2726,19 @@ packages: '@esbuild/linux-arm@0.27.2': resolution: {integrity: sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw==} engines: {node: '>=18'} - cpu: [arm] + cpu: [arm] + os: [linux] + + '@esbuild/linux-ia32@0.18.20': + resolution: {integrity: sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA==} + engines: {node: '>=12'} + cpu: [ia32] + os: [linux] + + '@esbuild/linux-ia32@0.25.12': + resolution: {integrity: sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA==} + engines: {node: '>=18'} + cpu: [ia32] os: [linux] '@esbuild/linux-ia32@0.27.0': @@ -2143,6 +2753,18 @@ packages: cpu: [ia32] os: [linux] + '@esbuild/linux-loong64@0.18.20': + resolution: {integrity: sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg==} + engines: {node: '>=12'} + cpu: [loong64] + os: [linux] + + '@esbuild/linux-loong64@0.25.12': + resolution: {integrity: sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng==} + engines: {node: '>=18'} + cpu: [loong64] + os: [linux] + '@esbuild/linux-loong64@0.27.0': resolution: {integrity: sha512-QbEREjdJeIreIAbdG2hLU1yXm1uu+LTdzoq1KCo4G4pFOLlvIspBm36QrQOar9LFduavoWX2msNFAAAY9j4BDg==} engines: {node: '>=18'} @@ -2155,6 +2777,18 @@ packages: cpu: [loong64] os: [linux] + '@esbuild/linux-mips64el@0.18.20': + resolution: {integrity: sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ==} + engines: {node: '>=12'} + cpu: [mips64el] + os: [linux] + + '@esbuild/linux-mips64el@0.25.12': + resolution: {integrity: sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw==} + engines: {node: '>=18'} + cpu: [mips64el] + os: [linux] + '@esbuild/linux-mips64el@0.27.0': resolution: {integrity: sha512-sJz3zRNe4tO2wxvDpH/HYJilb6+2YJxo/ZNbVdtFiKDufzWq4JmKAiHy9iGoLjAV7r/W32VgaHGkk35cUXlNOg==} engines: {node: '>=18'} @@ -2167,6 +2801,18 @@ packages: cpu: [mips64el] os: [linux] + '@esbuild/linux-ppc64@0.18.20': + resolution: {integrity: sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [linux] + + '@esbuild/linux-ppc64@0.25.12': + resolution: {integrity: sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [linux] + '@esbuild/linux-ppc64@0.27.0': resolution: {integrity: sha512-z9N10FBD0DCS2dmSABDBb5TLAyF1/ydVb+N4pi88T45efQ/w4ohr/F/QYCkxDPnkhkp6AIpIcQKQ8F0ANoA2JA==} engines: {node: '>=18'} @@ -2179,6 +2825,18 @@ packages: cpu: [ppc64] os: [linux] + '@esbuild/linux-riscv64@0.18.20': + resolution: {integrity: sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A==} + engines: {node: '>=12'} + cpu: [riscv64] + os: [linux] + + '@esbuild/linux-riscv64@0.25.12': + resolution: {integrity: sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w==} + engines: {node: '>=18'} + cpu: [riscv64] + os: [linux] + '@esbuild/linux-riscv64@0.27.0': resolution: {integrity: sha512-pQdyAIZ0BWIC5GyvVFn5awDiO14TkT/19FTmFcPdDec94KJ1uZcmFs21Fo8auMXzD4Tt+diXu1LW1gHus9fhFQ==} engines: {node: '>=18'} @@ -2191,6 +2849,18 @@ packages: cpu: [riscv64] os: [linux] + '@esbuild/linux-s390x@0.18.20': + resolution: {integrity: sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ==} + engines: {node: '>=12'} + cpu: [s390x] + os: [linux] + + '@esbuild/linux-s390x@0.25.12': + resolution: {integrity: sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg==} + engines: {node: '>=18'} + cpu: [s390x] + os: [linux] + '@esbuild/linux-s390x@0.27.0': resolution: {integrity: sha512-hPlRWR4eIDDEci953RI1BLZitgi5uqcsjKMxwYfmi4LcwyWo2IcRP+lThVnKjNtk90pLS8nKdroXYOqW+QQH+w==} engines: {node: '>=18'} @@ -2203,6 +2873,18 @@ packages: cpu: [s390x] os: [linux] + '@esbuild/linux-x64@0.18.20': + resolution: {integrity: sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w==} + engines: {node: '>=12'} + cpu: [x64] + os: [linux] + + '@esbuild/linux-x64@0.25.12': + resolution: {integrity: sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw==} + engines: {node: '>=18'} + cpu: [x64] + os: [linux] + '@esbuild/linux-x64@0.27.0': resolution: {integrity: sha512-1hBWx4OUJE2cab++aVZ7pObD6s+DK4mPGpemtnAORBvb5l/g5xFGk0vc0PjSkrDs0XaXj9yyob3d14XqvnQ4gw==} engines: {node: '>=18'} @@ -2215,6 +2897,12 @@ packages: cpu: [x64] os: [linux] + '@esbuild/netbsd-arm64@0.25.12': + resolution: {integrity: sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [netbsd] + '@esbuild/netbsd-arm64@0.27.0': resolution: {integrity: sha512-6m0sfQfxfQfy1qRuecMkJlf1cIzTOgyaeXaiVaaki8/v+WB+U4hc6ik15ZW6TAllRlg/WuQXxWj1jx6C+dfy3w==} engines: {node: '>=18'} @@ -2227,6 +2915,18 @@ packages: cpu: [arm64] os: [netbsd] + '@esbuild/netbsd-x64@0.18.20': + resolution: {integrity: sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A==} + engines: {node: '>=12'} + cpu: [x64] + os: [netbsd] + + '@esbuild/netbsd-x64@0.25.12': + resolution: {integrity: sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [netbsd] + '@esbuild/netbsd-x64@0.27.0': resolution: {integrity: sha512-xbbOdfn06FtcJ9d0ShxxvSn2iUsGd/lgPIO2V3VZIPDbEaIj1/3nBBe1AwuEZKXVXkMmpr6LUAgMkLD/4D2PPA==} engines: {node: '>=18'} @@ -2239,6 +2939,12 @@ packages: cpu: [x64] os: [netbsd] + '@esbuild/openbsd-arm64@0.25.12': + resolution: {integrity: sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openbsd] + '@esbuild/openbsd-arm64@0.27.0': resolution: {integrity: sha512-fWgqR8uNbCQ/GGv0yhzttj6sU/9Z5/Sv/VGU3F5OuXK6J6SlriONKrQ7tNlwBrJZXRYk5jUhuWvF7GYzGguBZQ==} engines: {node: '>=18'} @@ -2251,6 +2957,18 @@ packages: cpu: [arm64] os: [openbsd] + '@esbuild/openbsd-x64@0.18.20': + resolution: {integrity: sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg==} + engines: {node: '>=12'} + cpu: [x64] + os: [openbsd] + + '@esbuild/openbsd-x64@0.25.12': + resolution: {integrity: sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw==} + engines: {node: '>=18'} + cpu: [x64] + os: [openbsd] + '@esbuild/openbsd-x64@0.27.0': resolution: {integrity: sha512-aCwlRdSNMNxkGGqQajMUza6uXzR/U0dIl1QmLjPtRbLOx3Gy3otfFu/VjATy4yQzo9yFDGTxYDo1FfAD9oRD2A==} engines: {node: '>=18'} @@ -2263,6 +2981,12 @@ packages: cpu: [x64] os: [openbsd] + '@esbuild/openharmony-arm64@0.25.12': + resolution: {integrity: sha512-rm0YWsqUSRrjncSXGA7Zv78Nbnw4XL6/dzr20cyrQf7ZmRcsovpcRBdhD43Nuk3y7XIoW2OxMVvwuRvk9XdASg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openharmony] + '@esbuild/openharmony-arm64@0.27.0': resolution: {integrity: sha512-nyvsBccxNAsNYz2jVFYwEGuRRomqZ149A39SHWk4hV0jWxKM0hjBPm3AmdxcbHiFLbBSwG6SbpIcUbXjgyECfA==} engines: {node: '>=18'} @@ -2275,6 +2999,18 @@ packages: cpu: [arm64] os: [openharmony] + '@esbuild/sunos-x64@0.18.20': + resolution: {integrity: sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [sunos] + + '@esbuild/sunos-x64@0.25.12': + resolution: {integrity: sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w==} + engines: {node: '>=18'} + cpu: [x64] + os: [sunos] + '@esbuild/sunos-x64@0.27.0': resolution: {integrity: sha512-Q1KY1iJafM+UX6CFEL+F4HRTgygmEW568YMqDA5UV97AuZSm21b7SXIrRJDwXWPzr8MGr75fUZPV67FdtMHlHA==} engines: {node: '>=18'} @@ -2287,6 +3023,18 @@ packages: cpu: [x64] os: [sunos] + '@esbuild/win32-arm64@0.18.20': + resolution: {integrity: sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg==} + engines: {node: '>=12'} + cpu: [arm64] + os: [win32] + + '@esbuild/win32-arm64@0.25.12': + resolution: {integrity: sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [win32] + '@esbuild/win32-arm64@0.27.0': resolution: {integrity: sha512-W1eyGNi6d+8kOmZIwi/EDjrL9nxQIQ0MiGqe/AWc6+IaHloxHSGoeRgDRKHFISThLmsewZ5nHFvGFWdBYlgKPg==} engines: {node: '>=18'} @@ -2299,6 +3047,18 @@ packages: cpu: [arm64] os: [win32] + '@esbuild/win32-ia32@0.18.20': + resolution: {integrity: sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g==} + engines: {node: '>=12'} + cpu: [ia32] + os: [win32] + + '@esbuild/win32-ia32@0.25.12': + resolution: {integrity: sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ==} + engines: {node: '>=18'} + cpu: [ia32] + os: [win32] + '@esbuild/win32-ia32@0.27.0': resolution: {integrity: sha512-30z1aKL9h22kQhilnYkORFYt+3wp7yZsHWus+wSKAJR8JtdfI76LJ4SBdMsCopTR3z/ORqVu5L1vtnHZWVj4cQ==} engines: {node: '>=18'} @@ -2311,6 +3071,18 @@ packages: cpu: [ia32] os: [win32] + '@esbuild/win32-x64@0.18.20': + resolution: {integrity: sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [win32] + + '@esbuild/win32-x64@0.25.12': + resolution: {integrity: sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA==} + engines: {node: '>=18'} + cpu: [x64] + os: [win32] + '@esbuild/win32-x64@0.27.0': resolution: {integrity: sha512-aIitBcjQeyOhMTImhLZmtxfdOcuNRpwlPNmlFKPcHQYPhEssw75Cl1TSXJXpMkzaua9FUetx/4OQKq7eJul5Cg==} engines: {node: '>=18'} @@ -2395,16 +3167,16 @@ packages: resolution: {integrity: sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/config-helpers@0.5.1': - resolution: {integrity: sha512-QN8067dXsXAl9HIvqws7STEviheRFojX3zek5OpC84oBxDGqizW9731ByF/ASxqQihbWrVDdZXS+Ihnsckm9dg==} + '@eslint/config-helpers@0.5.2': + resolution: {integrity: sha512-a5MxrdDXEvqnIq+LisyCX6tQMPF/dSJpCfBgBauY+pNZ28yCtSsTvyTYrMhaI+LK26bVyCJfJkT0u8KIj2i1dQ==} engines: {node: ^20.19.0 || ^22.13.0 || >=24} '@eslint/core@0.17.0': resolution: {integrity: sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/core@1.0.1': - resolution: {integrity: sha512-r18fEAj9uCk+VjzGt2thsbOmychS+4kxI14spVNibUO2vqKX7obOG+ymZljAwuPZl+S3clPGwCwTDtrdqTiY6Q==} + '@eslint/core@1.1.0': + resolution: {integrity: sha512-/nr9K9wkr3P1EzFTdFdMoLuo1PmIxjmwvPozwoSodjNBdefGujXQUF93u1DDZpEaTuDvMsIQddsd35BwtrW9Xw==} engines: {node: ^20.19.0 || ^22.13.0 || >=24} '@eslint/eslintrc@3.3.1': @@ -2501,6 +3273,11 @@ packages: resolution: {integrity: sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==} engines: {node: '>=18.18'} + '@icons-pack/react-simple-icons@13.8.0': + resolution: {integrity: sha512-iZrhL1fSklfCCVn68IYHaAoKfcby3RakUTn2tRPyHBkhr2tkYqeQbjJWf+NizIYBzKBn2IarDJXmTdXd6CuEfw==} + peerDependencies: + react: ^16.13 || ^17 || ^18 || ^19 + '@img/colour@1.0.0': resolution: {integrity: sha512-A5P/LfWGFSl6nsckYtjw9da+19jB8hkJ6ACTGcDfEJ0aE+l2n2El7dsVM7UVHZQ9s2lmYMWlrS21YLy2IR1LUw==} engines: {node: '>=18'} @@ -2744,6 +3521,63 @@ packages: peerDependencies: jsep: ^0.4.0||^1.0.0 + '@libsql/client@0.17.0': + resolution: {integrity: sha512-TLjSU9Otdpq0SpKHl1tD1Nc9MKhrsZbCFGot3EbCxRa8m1E5R1mMwoOjKMMM31IyF7fr+hPNHLpYfwbMKNusmg==} + + '@libsql/core@0.17.0': + resolution: {integrity: sha512-hnZRnJHiS+nrhHKLGYPoJbc78FE903MSDrFJTbftxo+e52X+E0Y0fHOCVYsKWcg6XgB7BbJYUrz/xEkVTSaipw==} + + '@libsql/darwin-arm64@0.5.22': + resolution: {integrity: sha512-4B8ZlX3nIDPndfct7GNe0nI3Yw6ibocEicWdC4fvQbSs/jdq/RC2oCsoJxJ4NzXkvktX70C1J4FcmmoBy069UA==} + cpu: [arm64] + os: [darwin] + + '@libsql/darwin-x64@0.5.22': + resolution: {integrity: sha512-ny2HYWt6lFSIdNFzUFIJ04uiW6finXfMNJ7wypkAD8Pqdm6nAByO+Fdqu8t7sD0sqJGeUCiOg480icjyQ2/8VA==} + cpu: [x64] + os: [darwin] + + '@libsql/hrana-client@0.9.0': + resolution: {integrity: sha512-pxQ1986AuWfPX4oXzBvLwBnfgKDE5OMhAdR/5cZmRaB4Ygz5MecQybvwZupnRz341r2CtFmbk/BhSu7k2Lm+Jw==} + + '@libsql/isomorphic-ws@0.1.5': + resolution: {integrity: sha512-DtLWIH29onUYR00i0GlQ3UdcTRC6EP4u9w/h9LxpUZJWRMARk6dQwZ6Jkd+QdwVpuAOrdxt18v0K2uIYR3fwFg==} + + '@libsql/linux-arm-gnueabihf@0.5.22': + resolution: {integrity: sha512-3Uo3SoDPJe/zBnyZKosziRGtszXaEtv57raWrZIahtQDsjxBVjuzYQinCm9LRCJCUT5t2r5Z5nLDPJi2CwZVoA==} + cpu: [arm] + os: [linux] + + '@libsql/linux-arm-musleabihf@0.5.22': + resolution: {integrity: sha512-LCsXh07jvSojTNJptT9CowOzwITznD+YFGGW+1XxUr7fS+7/ydUrpDfsMX7UqTqjm7xG17eq86VkWJgHJfvpNg==} + cpu: [arm] + os: [linux] + + '@libsql/linux-arm64-gnu@0.5.22': + resolution: {integrity: sha512-KSdnOMy88c9mpOFKUEzPskSaF3VLflfSUCBwas/pn1/sV3pEhtMF6H8VUCd2rsedwoukeeCSEONqX7LLnQwRMA==} + cpu: [arm64] + os: [linux] + + '@libsql/linux-arm64-musl@0.5.22': + resolution: {integrity: sha512-mCHSMAsDTLK5YH//lcV3eFEgiR23Ym0U9oEvgZA0667gqRZg/2px+7LshDvErEKv2XZ8ixzw3p1IrBzLQHGSsw==} + cpu: [arm64] + os: [linux] + + '@libsql/linux-x64-gnu@0.5.22': + resolution: {integrity: sha512-kNBHaIkSg78Y4BqAdgjcR2mBilZXs4HYkAmi58J+4GRwDQZh5fIUWbnQvB9f95DkWUIGVeenqLRFY2pcTmlsew==} + cpu: [x64] + os: [linux] + + '@libsql/linux-x64-musl@0.5.22': + resolution: {integrity: sha512-UZ4Xdxm4pu3pQXjvfJiyCzZop/9j/eA2JjmhMaAhe3EVLH2g11Fy4fwyUp9sT1QJYR1kpc2JLuybPM0kuXv/Tg==} + cpu: [x64] + os: [linux] + + '@libsql/win32-x64-msvc@0.5.22': + resolution: {integrity: sha512-Fj0j8RnBpo43tVZUVoNK6BV/9AtDUM5S7DF3LB4qTYg1LMSZqi3yeCneUTLJD6XomQJlZzbI4mst89yspVSAnA==} + cpu: [x64] + os: [win32] + '@luxass/eslint-config@7.0.0': resolution: {integrity: sha512-kmryLHe8B48d3AQmoltX/1XPsv7ynJoMbaJ5hqgoRpZcD5bQa6fY56Jk0XKMOAUWV3lIsGa0rkBWSDo1CevfSg==} engines: {node: '>=22'} @@ -2843,6 +3677,9 @@ packages: '@napi-rs/wasm-runtime@1.1.1': resolution: {integrity: sha512-p64ah1M1ld8xjWv3qbvFwHiFVWrq1yFvV4f7w+mzaqiR4IlSgkqhcRdHwsGgomwzBH51sRY4NEowLxnaBjcW/A==} + '@neon-rs/load@0.0.4': + resolution: {integrity: sha512-kTPhdZyTQxB+2wpiRcFWrDcejc4JI6tkPuS7UZCG4l6Zvc5kU/gGQ/ozvHTh1XR5tS+UlfAfGuPajjzQjCiHCw==} + '@noble/ciphers@1.3.0': resolution: {integrity: sha512-2I0gnIVPtfnMw9ee9h1dJG7tp81+8Ob3OJb3Mv37rx5L40/b0i7djjCVvGOVqc9AEIQyvyu1i6ypKdFw8R8gQw==} engines: {node: ^14.21.3 || >=16} @@ -2879,156 +3716,283 @@ packages: resolution: {integrity: sha512-ZKfET8Ak1wsLAiLWNfFkZc/BraDccuTJKR6svTYc7sVjbR+Iu0vtXdiDMY4o6jaFl5TW2TlS7jbLl4VovtAJWQ==} engines: {node: '>=20.0'} - '@oozcitak/util@10.0.0': - resolution: {integrity: sha512-hAX0pT/73190NLqBPPWSdBVGtbY6VOhWYK3qqHqtXQ1gK7kS2yz4+ivsN07hpJ6I3aeMtKP6J6npsEKOAzuTLA==} - engines: {node: '>=20.0'} + '@oozcitak/util@10.0.0': + resolution: {integrity: sha512-hAX0pT/73190NLqBPPWSdBVGtbY6VOhWYK3qqHqtXQ1gK7kS2yz4+ivsN07hpJ6I3aeMtKP6J6npsEKOAzuTLA==} + engines: {node: '>=20.0'} + + '@open-draft/deferred-promise@2.2.0': + resolution: {integrity: sha512-CecwLWx3rhxVQF6V4bAgPS5t+So2sTbPgAzafKkVizyi7tlwpcFpdFqq+wqF2OwNBmqFuu6tOyouTuxgpMfzmA==} + + '@open-draft/logger@0.3.0': + resolution: {integrity: sha512-X2g45fzhxH238HKO4xbSr7+wBS8Fvw6ixhTDuvLd5mqh6bJJCFAPwU9mPDxbcrRtfxv4u5IHCEH77BmxvXmmxQ==} + + '@open-draft/until@2.1.0': + resolution: {integrity: sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg==} + + '@opentelemetry/api@1.9.0': + resolution: {integrity: sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==} + engines: {node: '>=8.0.0'} + + '@orama/orama@3.1.18': + resolution: {integrity: sha512-a61ljmRVVyG5MC/698C8/FfFDw5a8LOIvyOLW5fztgUXqUpc1jOfQzOitSCbge657OgXXThmY3Tk8fpiDb4UcA==} + engines: {node: '>= 20.0.0'} + + '@oxc-minify/binding-android-arm-eabi@0.110.0': + resolution: {integrity: sha512-43fMTO8/5bMlqfOiNSZNKUzIqeLIYuB9Hr1Ohyf58B1wU11S2dPGibTXOGNaWsfgHy99eeZ1bSgeIHy/fEYqbw==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm] + os: [android] + + '@oxc-minify/binding-android-arm64@0.110.0': + resolution: {integrity: sha512-5oQrnn9eK/ccOp80PTrNj0Vq893NPNNRryjGpOIVsYNgWFuoGCfpnKg68oEFcN8bArizYAqw4nvgHljEnar69w==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [android] + + '@oxc-minify/binding-darwin-arm64@0.110.0': + resolution: {integrity: sha512-dqBDgTG9tF2z2lrZp9E8wU+Godz1i8gCGSei2eFKS2hRploBOD5dmOLp1j4IMornkPvSQmbwB3uSjPq7fjx4EA==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [darwin] + + '@oxc-minify/binding-darwin-x64@0.110.0': + resolution: {integrity: sha512-U0AqabqaooDOpYmeeOye8wClv8PSScELXgOfYqyqgrwH9J9KrpCE1jL8Rlqgz68QbL4mPw3V6sKiiHssI4CLeQ==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [x64] + os: [darwin] + + '@oxc-minify/binding-freebsd-x64@0.110.0': + resolution: {integrity: sha512-H0w8o/Wo1072WSdLfhwwrpFpwZnPpjQODlHuRYkTfsSSSJbTxQtjJd4uxk7YJsRv5RQp69y0I7zvdH6f8Xueyw==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [x64] + os: [freebsd] + + '@oxc-minify/binding-linux-arm-gnueabihf@0.110.0': + resolution: {integrity: sha512-qd6sW0AvEVYZhbVVMGtmKZw3b1zDYGIW+54Uh42moWRAj6i4Jhk/LGr6r9YNZpOINeuvZfkFuEeDD/jbu7xPUA==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm] + os: [linux] + + '@oxc-minify/binding-linux-arm-musleabihf@0.110.0': + resolution: {integrity: sha512-7WXP0aXMrWSn0ScppUBi3jf68ebfBG0eri8kxLmBOVSBj6jw1repzkHMITJMBeLr5d0tT/51qFEptiAk2EP2iA==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm] + os: [linux] + + '@oxc-minify/binding-linux-arm64-gnu@0.110.0': + resolution: {integrity: sha512-LYfADrq5x1W5gs+u9OIbMbDQNYkAECTXX0ufnAuf3oGmO51rF98kGFR5qJqC/6/csokDyT3wwTpxhE0TkcF/Og==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [linux] + libc: [glibc] + + '@oxc-minify/binding-linux-arm64-musl@0.110.0': + resolution: {integrity: sha512-53GjCVY8kvymk9P6qNDh6zyblcehF5QHstq9QgCjv13ONGRnSHjeds0PxIwiihD7h295bxsWs84DN39syLPH4Q==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [linux] + libc: [musl] + + '@oxc-minify/binding-linux-ppc64-gnu@0.110.0': + resolution: {integrity: sha512-li8XcN81dxbJDMBESnTgGhoiAQ+CNIdM0QGscZ4duVPjCry1RpX+5FJySFbGqG3pk4s9ZzlL/vtQtbRzZIZOzg==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [ppc64] + os: [linux] + libc: [glibc] + + '@oxc-minify/binding-linux-riscv64-gnu@0.110.0': + resolution: {integrity: sha512-SweKfsnLKShu6UFV8mwuj1d1wmlNoL/FlAxPUzwjEBgwiT2HQkY24KnjBH+TIA+//1O83kzmWKvvs4OuEhdIEQ==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [riscv64] + os: [linux] + libc: [glibc] + + '@oxc-minify/binding-linux-riscv64-musl@0.110.0': + resolution: {integrity: sha512-oH8G4aFMP8XyTsEpdANC5PQyHgSeGlopHZuW1rpyYcaErg5YaK0vXjQ4EM5HVvPm+feBV24JjxgakTnZoF3aOQ==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [riscv64] + os: [linux] + libc: [musl] + + '@oxc-minify/binding-linux-s390x-gnu@0.110.0': + resolution: {integrity: sha512-W9na+Vza7XVUlpf8wMt4QBfH35KeTENEmnpPUq3NSlbQHz8lSlSvhAafvo43NcKvHAXV3ckD/mUf2VkqSdbklg==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [s390x] + os: [linux] + libc: [glibc] + + '@oxc-minify/binding-linux-x64-gnu@0.110.0': + resolution: {integrity: sha512-XJdA4mmmXOjJxSRgNJXsDP7Xe8h3gQhmb56hUcCrvq5d+h5UcEi2pR8rxsdIrS8QmkLuBA3eHkGK8E27D7DTgQ==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [x64] + os: [linux] + libc: [glibc] + + '@oxc-minify/binding-linux-x64-musl@0.110.0': + resolution: {integrity: sha512-QqzvALuOTtSckI8x467R4GNArzYDb/yEh6aNzLoeaY1O7vfT7SPDwlOEcchaTznutpeS9Dy8gUS/AfqtUHaufw==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [x64] + os: [linux] + libc: [musl] - '@open-draft/deferred-promise@2.2.0': - resolution: {integrity: sha512-CecwLWx3rhxVQF6V4bAgPS5t+So2sTbPgAzafKkVizyi7tlwpcFpdFqq+wqF2OwNBmqFuu6tOyouTuxgpMfzmA==} + '@oxc-minify/binding-openharmony-arm64@0.110.0': + resolution: {integrity: sha512-gAMssLs2Q3+uhLZxanh1DF+27Kaug3cf4PXb9AB7XK81DR+LVcKySXaoGYoOs20Co0fFSphd6rRzKge2qDK3dA==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [openharmony] - '@open-draft/logger@0.3.0': - resolution: {integrity: sha512-X2g45fzhxH238HKO4xbSr7+wBS8Fvw6ixhTDuvLd5mqh6bJJCFAPwU9mPDxbcrRtfxv4u5IHCEH77BmxvXmmxQ==} + '@oxc-minify/binding-wasm32-wasi@0.110.0': + resolution: {integrity: sha512-7Wqi5Zjl022bs2zXq+ICdalDPeDuCH/Nhbi8q2isLihAonMVIT0YH2hqqnNEylRNGYck+FJ6gRZwMpGCgrNxPg==} + engines: {node: '>=14.0.0'} + cpu: [wasm32] - '@open-draft/until@2.1.0': - resolution: {integrity: sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg==} + '@oxc-minify/binding-win32-arm64-msvc@0.110.0': + resolution: {integrity: sha512-ZPx+0Tj4dqn41ecyoGotlvekQKy6JxJCixn9Rw7h/dafZ3eDuBcEVh3c2ZoldXXsyMIt5ywI8IWzFZsjNedd5Q==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [win32] - '@opentelemetry/api@1.9.0': - resolution: {integrity: sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==} - engines: {node: '>=8.0.0'} + '@oxc-minify/binding-win32-ia32-msvc@0.110.0': + resolution: {integrity: sha512-H0Oyd3RWBfpEyvJIrFK94RYiY7KKSQl11Ym7LMDwLEagelIAfRCkt1amHZhFa/S3ZRoaOJFXzEw4YKeSsjVFsg==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [ia32] + os: [win32] - '@orama/orama@3.1.18': - resolution: {integrity: sha512-a61ljmRVVyG5MC/698C8/FfFDw5a8LOIvyOLW5fztgUXqUpc1jOfQzOitSCbge657OgXXThmY3Tk8fpiDb4UcA==} - engines: {node: '>= 20.0.0'} + '@oxc-minify/binding-win32-x64-msvc@0.110.0': + resolution: {integrity: sha512-Hr3nK90+qXKJ2kepXwFIcNfQQIOBecB4FFCyaMMypthoEEhVP08heRynj4eSXZ8NL9hLjs3fQzH8PJXfpznRnQ==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [x64] + os: [win32] - '@oxc-minify/binding-android-arm-eabi@0.110.0': - resolution: {integrity: sha512-43fMTO8/5bMlqfOiNSZNKUzIqeLIYuB9Hr1Ohyf58B1wU11S2dPGibTXOGNaWsfgHy99eeZ1bSgeIHy/fEYqbw==} + '@oxc-parser/binding-android-arm-eabi@0.112.0': + resolution: {integrity: sha512-retxBzJ39Da7Lh/eZTn9+HJgTeDUxZIpuI0urOsmcFsBKXAth3lc1jIvwseQ9qbAI/VrsoFOXiGIzgclARbAHg==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm] os: [android] - '@oxc-minify/binding-android-arm64@0.110.0': - resolution: {integrity: sha512-5oQrnn9eK/ccOp80PTrNj0Vq893NPNNRryjGpOIVsYNgWFuoGCfpnKg68oEFcN8bArizYAqw4nvgHljEnar69w==} + '@oxc-parser/binding-android-arm64@0.112.0': + resolution: {integrity: sha512-pRkbBRbuIIsufUWpOJ+JHWfJFNupkidy4sbjfcm37e6xwYrn9LSKMLubPHvNaL1Zf92ZRhGiwaYkEcmaFg2VcA==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [android] - '@oxc-minify/binding-darwin-arm64@0.110.0': - resolution: {integrity: sha512-dqBDgTG9tF2z2lrZp9E8wU+Godz1i8gCGSei2eFKS2hRploBOD5dmOLp1j4IMornkPvSQmbwB3uSjPq7fjx4EA==} + '@oxc-parser/binding-darwin-arm64@0.112.0': + resolution: {integrity: sha512-fh6/KQL/cbH5DukT3VkdCqnULLuvVnszVKySD5IgSE0WZb32YZo/cPsPdEv052kk6w3N4agu+NTiMnZjcvhUIg==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [darwin] - '@oxc-minify/binding-darwin-x64@0.110.0': - resolution: {integrity: sha512-U0AqabqaooDOpYmeeOye8wClv8PSScELXgOfYqyqgrwH9J9KrpCE1jL8Rlqgz68QbL4mPw3V6sKiiHssI4CLeQ==} + '@oxc-parser/binding-darwin-x64@0.112.0': + resolution: {integrity: sha512-vUBOOY1E30vlu/DoTGDoT1UbLlwu5Yv9tqeBabAwRzwNDz8Skho16VKhsBDUiyqddtpsR3//v6vNk38w4c+6IA==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [darwin] - '@oxc-minify/binding-freebsd-x64@0.110.0': - resolution: {integrity: sha512-H0w8o/Wo1072WSdLfhwwrpFpwZnPpjQODlHuRYkTfsSSSJbTxQtjJd4uxk7YJsRv5RQp69y0I7zvdH6f8Xueyw==} + '@oxc-parser/binding-freebsd-x64@0.112.0': + resolution: {integrity: sha512-hnEtO/9AVnYWzrgnp6L+oPs/6UqlFeteUL6n7magkd2tttgmx1C01hyNNh6nTpZfLzEVJSNJ0S+4NTsK2q2CxA==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [freebsd] - '@oxc-minify/binding-linux-arm-gnueabihf@0.110.0': - resolution: {integrity: sha512-qd6sW0AvEVYZhbVVMGtmKZw3b1zDYGIW+54Uh42moWRAj6i4Jhk/LGr6r9YNZpOINeuvZfkFuEeDD/jbu7xPUA==} + '@oxc-parser/binding-linux-arm-gnueabihf@0.112.0': + resolution: {integrity: sha512-WxJrUz3pcIc2hp4lvJbvt/sTL33oX9NPvkD3vDDybE6tc0V++rS+hNOJxwXdD2FDIFPkHs/IEn5asEZFVH+VKw==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm] os: [linux] - '@oxc-minify/binding-linux-arm-musleabihf@0.110.0': - resolution: {integrity: sha512-7WXP0aXMrWSn0ScppUBi3jf68ebfBG0eri8kxLmBOVSBj6jw1repzkHMITJMBeLr5d0tT/51qFEptiAk2EP2iA==} + '@oxc-parser/binding-linux-arm-musleabihf@0.112.0': + resolution: {integrity: sha512-jj8A8WWySaJQqM9XKAIG8U2Q3qxhFQKrXPWv98d1oC35at+L1h+C+V4M3l8BAKhpHKCu3dYlloaAbHd5q1Hw6A==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm] os: [linux] - '@oxc-minify/binding-linux-arm64-gnu@0.110.0': - resolution: {integrity: sha512-LYfADrq5x1W5gs+u9OIbMbDQNYkAECTXX0ufnAuf3oGmO51rF98kGFR5qJqC/6/csokDyT3wwTpxhE0TkcF/Og==} + '@oxc-parser/binding-linux-arm64-gnu@0.112.0': + resolution: {integrity: sha512-G2F8H6FcAExVK5vvhpSh61tqWx5QoaXXUnSsj5FyuDiFT/K7AMMVSQVqnZREDc+YxhrjB0vnKjCcuobXK63kIw==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [linux] libc: [glibc] - '@oxc-minify/binding-linux-arm64-musl@0.110.0': - resolution: {integrity: sha512-53GjCVY8kvymk9P6qNDh6zyblcehF5QHstq9QgCjv13ONGRnSHjeds0PxIwiihD7h295bxsWs84DN39syLPH4Q==} + '@oxc-parser/binding-linux-arm64-musl@0.112.0': + resolution: {integrity: sha512-3R0iqjM3xYOZCnwgcxOQXH7hrz64/USDIuLbNTM1kZqQzRqaR4w7SwoWKU934zABo8d0op2oSwOp+CV3hZnM7A==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [linux] libc: [musl] - '@oxc-minify/binding-linux-ppc64-gnu@0.110.0': - resolution: {integrity: sha512-li8XcN81dxbJDMBESnTgGhoiAQ+CNIdM0QGscZ4duVPjCry1RpX+5FJySFbGqG3pk4s9ZzlL/vtQtbRzZIZOzg==} + '@oxc-parser/binding-linux-ppc64-gnu@0.112.0': + resolution: {integrity: sha512-lAQf8PQxfgy7h0bmcfSVE3hg3qMueshPYULFsCrHM+8KefGZ9W+ZMvRyU33gLrB4w1O3Fz1orR0hmKMCRxXNrQ==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [ppc64] os: [linux] libc: [glibc] - '@oxc-minify/binding-linux-riscv64-gnu@0.110.0': - resolution: {integrity: sha512-SweKfsnLKShu6UFV8mwuj1d1wmlNoL/FlAxPUzwjEBgwiT2HQkY24KnjBH+TIA+//1O83kzmWKvvs4OuEhdIEQ==} + '@oxc-parser/binding-linux-riscv64-gnu@0.112.0': + resolution: {integrity: sha512-2QlvQBUhHuAE3ezD4X3CAEKMXdfgInggQ5Bj/7gb5NcYP3GyfLTj7c+mMu+BRwfC9B3AXBNyqHWbqEuuUvZyRQ==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [riscv64] os: [linux] libc: [glibc] - '@oxc-minify/binding-linux-riscv64-musl@0.110.0': - resolution: {integrity: sha512-oH8G4aFMP8XyTsEpdANC5PQyHgSeGlopHZuW1rpyYcaErg5YaK0vXjQ4EM5HVvPm+feBV24JjxgakTnZoF3aOQ==} + '@oxc-parser/binding-linux-riscv64-musl@0.112.0': + resolution: {integrity: sha512-v06iu0osHszgqJ1dLQRb6leWFU1sjG/UQk4MoVBtE6ZPewgfTkby6G9II1SpEAf2onnAuQceVYxQH9iuU3NJqw==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [riscv64] os: [linux] libc: [musl] - '@oxc-minify/binding-linux-s390x-gnu@0.110.0': - resolution: {integrity: sha512-W9na+Vza7XVUlpf8wMt4QBfH35KeTENEmnpPUq3NSlbQHz8lSlSvhAafvo43NcKvHAXV3ckD/mUf2VkqSdbklg==} + '@oxc-parser/binding-linux-s390x-gnu@0.112.0': + resolution: {integrity: sha512-+5HhNHtxsdcd7+ljXFnn9FOoCNXJX3UPgIfIE6vdwS1HqdGNH6eAcVobuqGOp54l8pvcxDQA6F4cPswCgLrQfQ==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [s390x] os: [linux] libc: [glibc] - '@oxc-minify/binding-linux-x64-gnu@0.110.0': - resolution: {integrity: sha512-XJdA4mmmXOjJxSRgNJXsDP7Xe8h3gQhmb56hUcCrvq5d+h5UcEi2pR8rxsdIrS8QmkLuBA3eHkGK8E27D7DTgQ==} + '@oxc-parser/binding-linux-x64-gnu@0.112.0': + resolution: {integrity: sha512-jKwO7ZLNkjxwg7FoCLw+fJszooL9yXRZsDN0AQ1AQUTWq1l8GH/2e44k68N3fcP19jl8O8jGpqLAZcQTYk6skA==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [linux] libc: [glibc] - '@oxc-minify/binding-linux-x64-musl@0.110.0': - resolution: {integrity: sha512-QqzvALuOTtSckI8x467R4GNArzYDb/yEh6aNzLoeaY1O7vfT7SPDwlOEcchaTznutpeS9Dy8gUS/AfqtUHaufw==} + '@oxc-parser/binding-linux-x64-musl@0.112.0': + resolution: {integrity: sha512-TYqnuKV/p3eOc+N61E0961nA7DC+gaCeJ3+V2LcjJdTwFMdikqWL6uVk1jlrpUCBrozHDATVUKDZYH7r4FQYjQ==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [linux] libc: [musl] - '@oxc-minify/binding-openharmony-arm64@0.110.0': - resolution: {integrity: sha512-gAMssLs2Q3+uhLZxanh1DF+27Kaug3cf4PXb9AB7XK81DR+LVcKySXaoGYoOs20Co0fFSphd6rRzKge2qDK3dA==} + '@oxc-parser/binding-openharmony-arm64@0.112.0': + resolution: {integrity: sha512-ZhrVmWFifVEFQX4XPwLoVFDHw9tAWH9p9vHsHFH+5uCKdfVR+jje4WxVo6YrokWCboGckoOzHq5KKMOcPZfkRg==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [openharmony] - '@oxc-minify/binding-wasm32-wasi@0.110.0': - resolution: {integrity: sha512-7Wqi5Zjl022bs2zXq+ICdalDPeDuCH/Nhbi8q2isLihAonMVIT0YH2hqqnNEylRNGYck+FJ6gRZwMpGCgrNxPg==} + '@oxc-parser/binding-wasm32-wasi@0.112.0': + resolution: {integrity: sha512-Gr8X2PUU3hX1g3F5oLWIZB8DhzDmjr5TfOrmn5tlBOo9l8ojPGdKjnIBfObM7X15928vza8QRKW25RTR7jfivg==} engines: {node: '>=14.0.0'} cpu: [wasm32] - '@oxc-minify/binding-win32-arm64-msvc@0.110.0': - resolution: {integrity: sha512-ZPx+0Tj4dqn41ecyoGotlvekQKy6JxJCixn9Rw7h/dafZ3eDuBcEVh3c2ZoldXXsyMIt5ywI8IWzFZsjNedd5Q==} + '@oxc-parser/binding-win32-arm64-msvc@0.112.0': + resolution: {integrity: sha512-t5CDLbU70Ea88bGRhvU/dLJTc/Wcrtf2Jp534E8P3cgjAvHDjdKsfDDqBZrhybJ8Jv9v9vW5ngE40EK51BluDA==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [win32] - '@oxc-minify/binding-win32-ia32-msvc@0.110.0': - resolution: {integrity: sha512-H0Oyd3RWBfpEyvJIrFK94RYiY7KKSQl11Ym7LMDwLEagelIAfRCkt1amHZhFa/S3ZRoaOJFXzEw4YKeSsjVFsg==} + '@oxc-parser/binding-win32-ia32-msvc@0.112.0': + resolution: {integrity: sha512-rZH0JynCCwnhe2HfRoyNOl/Kfd9pudoWxgpC5OZhj7j77pMK0UOAa35hYDfrtSOUk2HLzrikV5dPUOY2DpSBSA==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [ia32] os: [win32] - '@oxc-minify/binding-win32-x64-msvc@0.110.0': - resolution: {integrity: sha512-Hr3nK90+qXKJ2kepXwFIcNfQQIOBecB4FFCyaMMypthoEEhVP08heRynj4eSXZ8NL9hLjs3fQzH8PJXfpznRnQ==} + '@oxc-parser/binding-win32-x64-msvc@0.112.0': + resolution: {integrity: sha512-oGHluohzmVFAuQrkEnl1OXAxMz2aYmimxUqIgKXpBgbr7PvFv0doELB273sX+5V3fKeggohKg1A2Qq21W9Z9cQ==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [win32] - '@oxc-project/types@0.111.0': - resolution: {integrity: sha512-bh54LJMafgRGl2cPQ/QM+tI5rWaShm/wK9KywEj/w36MhiPKXYM67H2y3q+9pr4YO7ufwg2AKdBAZkhHBD8ClA==} + '@oxc-project/types@0.112.0': + resolution: {integrity: sha512-m6RebKHIRsax2iCwVpYW2ErQwa4ywHJrE4sCK3/8JK8ZZAWOKXaRJFl/uP51gaVyyXlaS4+chU1nSCdzYf6QqQ==} '@oxc-transform/binding-android-arm-eabi@0.110.0': resolution: {integrity: sha512-sE9dxvqqAax1YYJ3t7j+h5ZSI9jl6dYuDfngl6ieZUrIy5P89/8JKVgAzgp8o3wQSo7ndpJvYsi1K4ZqrmbP7w==} @@ -3036,42 +4000,84 @@ packages: cpu: [arm] os: [android] + '@oxc-transform/binding-android-arm-eabi@0.112.0': + resolution: {integrity: sha512-r4LuBaPnOAi0eUOBNi880Fm2tO2omH7N1FRrL6+nyz/AjQ+QPPLtoyZJva0O+sKi1buyN/7IzM5p9m+5ANSDbg==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm] + os: [android] + '@oxc-transform/binding-android-arm64@0.110.0': resolution: {integrity: sha512-nqtbP4aMCtsCZ6qpHlHaQoWVHSBtlKzwaAgwEOvR+9DWqHjk31BHvpGiDXlMeed6CVNpl3lCbWgygb3RcSjcfw==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [android] + '@oxc-transform/binding-android-arm64@0.112.0': + resolution: {integrity: sha512-ve46vQcQrY8eGe8990VSlS9gkD+AogJqbtfOkeua+5sQGQTDgeIRRxOm7ktCo19uZc2bEBwXRJITgosd+NRVmQ==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [android] + '@oxc-transform/binding-darwin-arm64@0.110.0': resolution: {integrity: sha512-oeSeHnL4Z4cMXtc8V0/rwoVn0dgwlS9q0j6LcHn9dIhtFEdp3W0iSBF8YmMQA+E7sILeLDjsHmHE4Kp0sOScXw==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [darwin] + '@oxc-transform/binding-darwin-arm64@0.112.0': + resolution: {integrity: sha512-ddbmLU3Tr+i7MOynfwAXxUXud3SjJKlv7XNjaq08qiI8Av/QvhXVGc2bMhXkWQSMSBUeTDoiughKjK+Zsb6y/A==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [darwin] + '@oxc-transform/binding-darwin-x64@0.110.0': resolution: {integrity: sha512-nL9K5x7OuZydobAGPylsEW9d4APs2qEkIBLMgQPA+kY8dtVD3IR87QsTbs4l4DBQYyun/+ay6qVCDlxqxdX2Jg==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [darwin] + '@oxc-transform/binding-darwin-x64@0.112.0': + resolution: {integrity: sha512-TKvmNw96jQZPqYb4pRrzLFDailNB3YS14KNn+x2hwRbqc6CqY96S9PYwyOpVpYdxfoRjYO9WgX9SoS+62a1DPA==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [x64] + os: [darwin] + '@oxc-transform/binding-freebsd-x64@0.110.0': resolution: {integrity: sha512-GS29zXXirDQhZEUq8xKJ1azAWMuUy3Ih3W5Bc5ddk12LRthO5wRLFcKIyeHpAXCoXymQ+LmxbMtbPf84GPxouw==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [freebsd] + '@oxc-transform/binding-freebsd-x64@0.112.0': + resolution: {integrity: sha512-YPMkSCDaelO8HHYRMYjm+Q+IfkfIbdtQzwPuasItYkq8UUkNeHNPheNh2JkvQa3c+io3E9ePOgHQ2yihpk7o/Q==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [x64] + os: [freebsd] + '@oxc-transform/binding-linux-arm-gnueabihf@0.110.0': resolution: {integrity: sha512-glzDHak8ISyZJemCUi7RCvzNSl+MQ1ly9RceT2qRufhUsvNZ4C/2QLJ1HJwd2N6E88bO4laYn+RofdRzNnGGEA==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm] os: [linux] + '@oxc-transform/binding-linux-arm-gnueabihf@0.112.0': + resolution: {integrity: sha512-nA7kzQGNEpuTRknst/IJ3l8hqmDmEda3aun6jkXgp7gKxESjuHeaNH04mKISxvJ7fIacvP2g/wtTSnm4u5jL8Q==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm] + os: [linux] + '@oxc-transform/binding-linux-arm-musleabihf@0.110.0': resolution: {integrity: sha512-8JThvgJ2FRoTVfbp7e4wqeZqCZbtudM06SfZmNzND9kPNu/LVYygIR+72RWs+xm4bWkuYHg/islo/boNPtMT5Q==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm] os: [linux] + '@oxc-transform/binding-linux-arm-musleabihf@0.112.0': + resolution: {integrity: sha512-w8GuLmckKlGc3YujaZKhtbFxziCcosvM2l9GnQjCb/yENWLGDiyQOy0BTAgPGdJwpYTiOeJblEXSuXYvlE1Ong==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm] + os: [linux] + '@oxc-transform/binding-linux-arm64-gnu@0.110.0': resolution: {integrity: sha512-IRh21Ub/g4bkHoErZ0AUWMlWfoZaS0A6EaOVtbcY70RSYIMlrsbjiFwJCzM+b/1DD1rXbH5tsGcH7GweTbfRqg==} engines: {node: ^20.19.0 || >=22.12.0} @@ -3079,6 +4085,13 @@ packages: os: [linux] libc: [glibc] + '@oxc-transform/binding-linux-arm64-gnu@0.112.0': + resolution: {integrity: sha512-9LwwGnJ8+WT0rXcrI8M0RJtDNt91eMqcDPPEvJxhRFHIMcHTy5D5xT+fOl3Us0yMqKo3HUWkbfUYqAp4GoZ3Jw==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [linux] + libc: [glibc] + '@oxc-transform/binding-linux-arm64-musl@0.110.0': resolution: {integrity: sha512-e5JN94/oy+wevk76q+LMr+2klTTcO60uXa+Wkq558Ms7mdF2TvkKFI++d/JeiuIwJLTi/BxQ4qdT5FWcsHM/ug==} engines: {node: ^20.19.0 || >=22.12.0} @@ -3086,6 +4099,13 @@ packages: os: [linux] libc: [musl] + '@oxc-transform/binding-linux-arm64-musl@0.112.0': + resolution: {integrity: sha512-Lg6VOuSd3oXv7J0eGywgqh/086h+qQzIBOD+47pYKMTTJcbDe+f3h/RgGoMKJE5HhiwT5sH1aGEJfIfaYUiVSw==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [linux] + libc: [musl] + '@oxc-transform/binding-linux-ppc64-gnu@0.110.0': resolution: {integrity: sha512-Y3/Tnnz1GvDpmv8FXBIKtdZPsdZklOEPdrL6NHrN5i2u54BOkybFaDSptgWF53wOrJlTrcmAVSE6fRKK9XCM2Q==} engines: {node: ^20.19.0 || >=22.12.0} @@ -3093,6 +4113,13 @@ packages: os: [linux] libc: [glibc] + '@oxc-transform/binding-linux-ppc64-gnu@0.112.0': + resolution: {integrity: sha512-PXzmj82o1moA4IGphYImTRgc2youTi4VRfyFX3CHwLjxPcQ5JtcsgbDt4QUdOzXZ+zC07s5jf2ZzhRapEOlj2w==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [ppc64] + os: [linux] + libc: [glibc] + '@oxc-transform/binding-linux-riscv64-gnu@0.110.0': resolution: {integrity: sha512-Y0E35iA9/v9jlkNcP6tMJ+ZFOS0rLsWDqG6rU9z+X2R3fBFJBO9UARIK6ngx8upxk81y1TFR2CmBFhupfYdH6Q==} engines: {node: ^20.19.0 || >=22.12.0} @@ -3100,6 +4127,13 @@ packages: os: [linux] libc: [glibc] + '@oxc-transform/binding-linux-riscv64-gnu@0.112.0': + resolution: {integrity: sha512-vhJsMsVH/6xwa3bt1LGts33FXUkGjaEGDwsRyp4lIfOjSfQVWMtCmWMFNaA0dW9FVWdD2Gt2fSFBSZ+azDxlpg==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [riscv64] + os: [linux] + libc: [glibc] + '@oxc-transform/binding-linux-riscv64-musl@0.110.0': resolution: {integrity: sha512-JOUSYFfHjBUs7xp2FHmZHb8eTYD/oEu0NklS6JgUauqnoXZHiTLPLVW2o2uVCqldnabYHcomuwI2iqVFYJNhTw==} engines: {node: ^20.19.0 || >=22.12.0} @@ -3107,6 +4141,13 @@ packages: os: [linux] libc: [musl] + '@oxc-transform/binding-linux-riscv64-musl@0.112.0': + resolution: {integrity: sha512-cXWFb7z+2IjFUEcXtRwluq9oEG5qnyFCjiu3SWrgYNcWwPdHusv3I/7K5/CTbbi4StoZ5txbi7/iSfDHNyWuRw==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [riscv64] + os: [linux] + libc: [musl] + '@oxc-transform/binding-linux-s390x-gnu@0.110.0': resolution: {integrity: sha512-7blgoXF9D3Ngzb7eun23pNrHJpoV/TtE6LObwlZ3Nmb4oZ6Z+yMvBVaoW68NarbmvNGfZ95zrOjgm6cVETLYBA==} engines: {node: ^20.19.0 || >=22.12.0} @@ -3114,6 +4155,13 @@ packages: os: [linux] libc: [glibc] + '@oxc-transform/binding-linux-s390x-gnu@0.112.0': + resolution: {integrity: sha512-eEFu4SRqJTJ20/88KRWmp+jpHKAw0Y1DsnSgpEeXyBIIcsOaLIUMU/TfYWUmqRbvbMV9rmOmI3kp5xWYUq6kSQ==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [s390x] + os: [linux] + libc: [glibc] + '@oxc-transform/binding-linux-x64-gnu@0.110.0': resolution: {integrity: sha512-YQ2joGWCVDZVEU2cD/r/w49hVjDm/Qu1BvC/7zs8LvprzdLS/HyMXGF2oA0puw0b+AqgYaz3bhwKB2xexHyITQ==} engines: {node: ^20.19.0 || >=22.12.0} @@ -3121,6 +4169,13 @@ packages: os: [linux] libc: [glibc] + '@oxc-transform/binding-linux-x64-gnu@0.112.0': + resolution: {integrity: sha512-ST1MDT+TlOyZ1c5btrGinRSUW2Jf4Pa+0gdKwsyjDSOC3dxy2ZNkN3mosTf4ywc3J+mxfYKqtjs7zSwHz03ILA==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [x64] + os: [linux] + libc: [glibc] + '@oxc-transform/binding-linux-x64-musl@0.110.0': resolution: {integrity: sha512-fkjr5qE632ULmNgvFXWDR/8668WxERz3tU7TQFp6JebPBneColitjSkdx6VKNVXEoMmQnOvBIGeP5tUNT384oA==} engines: {node: ^20.19.0 || >=22.12.0} @@ -3128,35 +4183,71 @@ packages: os: [linux] libc: [musl] + '@oxc-transform/binding-linux-x64-musl@0.112.0': + resolution: {integrity: sha512-ISQoA3pD4cyTGpf9sXXeerH6pL2L6EIpdy6oAy2ttkswyVFDyQNVOVIGIdLZDgbpmqGljxZnWqt/J/N68pQaig==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [x64] + os: [linux] + libc: [musl] + '@oxc-transform/binding-openharmony-arm64@0.110.0': resolution: {integrity: sha512-HWH9Zj+lMrdSTqFRCZsvDWMz7OnMjbdGsm3xURXWfRZpuaz0bVvyuZNDQXc4FyyhRDsemICaJbU1bgeIpUJDGw==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [openharmony] + '@oxc-transform/binding-openharmony-arm64@0.112.0': + resolution: {integrity: sha512-UOGVrGIv7yLJovyEXEyUTADuLq98vd/cbMHFLJweRXD+11I8Tn4jASi4WzdsN8C3BVYGRHrXH2NlSBmhz33a4g==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [openharmony] + '@oxc-transform/binding-wasm32-wasi@0.110.0': resolution: {integrity: sha512-ejdxHmYfIcHDPhZUe3WklViLt9mDEJE5BzcW7+R1vc5i/5JFA8D0l7NUSsHBJ7FB8Bu9gF+5iMDm6cXGAgaghw==} engines: {node: '>=14.0.0'} cpu: [wasm32] + '@oxc-transform/binding-wasm32-wasi@0.112.0': + resolution: {integrity: sha512-XIX7Gpq9koAvzBVHDlVFHM79r5uOVK6kTEsdsN4qaajpjkgtv4tdsAOKIYK6l7fUbsbE6xS+6w1+yRFrDeC1kg==} + engines: {node: '>=14.0.0'} + cpu: [wasm32] + '@oxc-transform/binding-win32-arm64-msvc@0.110.0': resolution: {integrity: sha512-9VTwpXCZs7xkV+mKhQ62dVk7KLnLXtEUxNS2T4nLz3iMl1IJbA4h5oltK0JoobtiUAnbkV53QmMVGW8+Nh3bDQ==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [win32] + '@oxc-transform/binding-win32-arm64-msvc@0.112.0': + resolution: {integrity: sha512-EgXef9kOne9BNsbYBbuRqxk2hteT0xsAGcx/VbtCBMJYNj8fANFhT271DUSOgfa4DAgrQQmsyt/Kr1aV9mpU9w==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [win32] + '@oxc-transform/binding-win32-ia32-msvc@0.110.0': resolution: {integrity: sha512-5y0fzuNON7/F2hh2P94vANFaRPJ/3DI1hVl5rseCT8VUVqOGIjWaza0YS/D1g6t1WwycW2LWDMi2raOKoWU5GQ==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [ia32] os: [win32] + '@oxc-transform/binding-win32-ia32-msvc@0.112.0': + resolution: {integrity: sha512-6QaB0qjNaou2YR+blncHdw7j0e26IOwOIjLbhVGDeuf9+4rjJeiqRXJ2hOtCcS4zblnao/MjdgQuZ3fM0nl+Kw==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [ia32] + os: [win32] + '@oxc-transform/binding-win32-x64-msvc@0.110.0': resolution: {integrity: sha512-QROrowwlrApI1fEScMknGWKM6GTM/Z2xwMnDqvSaEmzNazBsDUlE08Jasw610hFEsYAVU2K5sp/YaCa9ORdP4A==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [win32] + '@oxc-transform/binding-win32-x64-msvc@0.112.0': + resolution: {integrity: sha512-FRKYlY959QeqRPx9kXs0HjU2xuXPT1cdF+vvA200D9uAX/KLcC34MwRqUKTYml4kCc2Vf/P2pBR9cQuBm3zECQ==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [x64] + os: [win32] + '@parcel/watcher-android-arm64@2.5.1': resolution: {integrity: sha512-KF8+j9nNbUN8vzOFDpRMsaKBHZ/mcjEjMToVMJOhTozkDonQFFrRcfdLWn6yWKCmJKmdVxSgHiYvTCef4/qcBA==} engines: {node: '>= 10.0.0'} @@ -3649,83 +4740,83 @@ packages: resolution: {integrity: sha512-0EbE8LRbkogtcCXU7liAyC00n9uNG9hJ+eMyHFdUsy9lB/WGqnEBgwjA9q2cyzAVcdTkQqTBBU1XePNnN3OijA==} engines: {node: '>=18.17.0', npm: '>=9.5.0'} - '@rolldown/binding-android-arm64@1.0.0-rc.2': - resolution: {integrity: sha512-AGV80viZ4Hil4C16GFH+PSwq10jclV9oyRFhD+5HdowPOCJ+G+99N5AClQvMkUMIahTY8cX0SQpKEEWcCg6fSA==} + '@rolldown/binding-android-arm64@1.0.0-rc.3': + resolution: {integrity: sha512-0T1k9FinuBZ/t7rZ8jN6OpUKPnUjNdYHoj/cESWrQ3ZraAJ4OMm6z7QjSfCxqj8mOp9kTKc1zHK3kGz5vMu+nQ==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [android] - '@rolldown/binding-darwin-arm64@1.0.0-rc.2': - resolution: {integrity: sha512-PYR+PQu1mMmQiiKHN2JiOctvH32Xc/Mf+Su2RSmWtC9BbIqlqsVWjbulnShk0imjRim0IsbkMMCN5vYQwiuqaA==} + '@rolldown/binding-darwin-arm64@1.0.0-rc.3': + resolution: {integrity: sha512-JWWLzvcmc/3pe7qdJqPpuPk91SoE/N+f3PcWx/6ZwuyDVyungAEJPvKm/eEldiDdwTmaEzWfIR+HORxYWrCi1A==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [darwin] - '@rolldown/binding-darwin-x64@1.0.0-rc.2': - resolution: {integrity: sha512-X2G36Z6oh5ynoYpE2JAyG+uQ4kO/3N7XydM/I98FNk8VVgDKjajFF+v7TXJ2FMq6xa7Xm0UIUKHW2MRQroqoUA==} + '@rolldown/binding-darwin-x64@1.0.0-rc.3': + resolution: {integrity: sha512-MTakBxfx3tde5WSmbHxuqlDsIW0EzQym+PJYGF4P6lG2NmKzi128OGynoFUqoD5ryCySEY85dug4v+LWGBElIw==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [darwin] - '@rolldown/binding-freebsd-x64@1.0.0-rc.2': - resolution: {integrity: sha512-XpiFTsl9qjiDfrmJF6CE3dgj1nmSbxUIT+p2HIbXV6WOj/32btO8FKkWSsOphUwVinEt3R8HVkVrcLtFNruMMQ==} + '@rolldown/binding-freebsd-x64@1.0.0-rc.3': + resolution: {integrity: sha512-jje3oopyOLs7IwfvXoS6Lxnmie5JJO7vW29fdGFu5YGY1EDbVDhD+P9vDihqS5X6fFiqL3ZQZCMBg6jyHkSVww==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [freebsd] - '@rolldown/binding-linux-arm-gnueabihf@1.0.0-rc.2': - resolution: {integrity: sha512-zjYZ99e47Wlygs4hW+sQ+kshlO8ake9OoY2ecnJ9cwpDGiiIB9rQ3LgP3kt8j6IeVyMSksu//VEhc8Mrd1lRIw==} + '@rolldown/binding-linux-arm-gnueabihf@1.0.0-rc.3': + resolution: {integrity: sha512-A0n8P3hdLAaqzSFrQoA42p23ZKBYQOw+8EH5r15Sa9X1kD9/JXe0YT2gph2QTWvdr0CVK2BOXiK6ENfy6DXOag==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm] os: [linux] - '@rolldown/binding-linux-arm64-gnu@1.0.0-rc.2': - resolution: {integrity: sha512-Piso04EZ9IHV1aZSsLQVMOPTiCq4Ps2UPL3pchjNXHGJGFiB9U42s22LubPaEBFS+i6tCawS5EarIwex1zC4BA==} + '@rolldown/binding-linux-arm64-gnu@1.0.0-rc.3': + resolution: {integrity: sha512-kWXkoxxarYISBJ4bLNf5vFkEbb4JvccOwxWDxuK9yee8lg5XA7OpvlTptfRuwEvYcOZf+7VS69Uenpmpyo5Bjw==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [linux] libc: [glibc] - '@rolldown/binding-linux-arm64-musl@1.0.0-rc.2': - resolution: {integrity: sha512-OwJCeMZlmjKsN9pfJfTmqYpe3JC+L6RO87+hu9ajRLr1Lh6cM2FRQ8e48DLRyRDww8Ti695XQvqEANEMmsuzLw==} + '@rolldown/binding-linux-arm64-musl@1.0.0-rc.3': + resolution: {integrity: sha512-Z03/wrqau9Bicfgb3Dbs6SYTHliELk2PM2LpG2nFd+cGupTMF5kanLEcj2vuuJLLhptNyS61rtk7SOZ+lPsTUA==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [linux] libc: [musl] - '@rolldown/binding-linux-x64-gnu@1.0.0-rc.2': - resolution: {integrity: sha512-uQqBmA8dTWbKvfqbeSsXNUssRGfdgQCc0hkGfhQN7Pf85wG2h0Fd/z2d+ykyT4YbcsjQdgEGxBNsg3v4ekOuEA==} + '@rolldown/binding-linux-x64-gnu@1.0.0-rc.3': + resolution: {integrity: sha512-iSXXZsQp08CSilff/DCTFZHSVEpEwdicV3W8idHyrByrcsRDVh9sGC3sev6d8BygSGj3vt8GvUKBPCoyMA4tgQ==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [linux] libc: [glibc] - '@rolldown/binding-linux-x64-musl@1.0.0-rc.2': - resolution: {integrity: sha512-ItZabVsICCYWHbP+jcAgNzjPAYg5GIVQp/NpqT6iOgWctaMYtobClc5m0kNtxwqfNrLXoyt998xUey4AvcxnGQ==} + '@rolldown/binding-linux-x64-musl@1.0.0-rc.3': + resolution: {integrity: sha512-qaj+MFudtdCv9xZo9znFvkgoajLdc+vwf0Kz5N44g+LU5XMe+IsACgn3UG7uTRlCCvhMAGXm1XlpEA5bZBrOcw==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [linux] libc: [musl] - '@rolldown/binding-openharmony-arm64@1.0.0-rc.2': - resolution: {integrity: sha512-U4UYANwafcMXSUC0VqdrqTAgCo2v8T7SiuTYwVFXgia0KOl8jiv3okwCFqeZNuw/G6EWDiqhT8kK1DLgyLsxow==} + '@rolldown/binding-openharmony-arm64@1.0.0-rc.3': + resolution: {integrity: sha512-U662UnMETyjT65gFmG9ma+XziENrs7BBnENi/27swZPYagubfHRirXHG2oMl+pEax2WvO7Kb9gHZmMakpYqBHQ==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [openharmony] - '@rolldown/binding-wasm32-wasi@1.0.0-rc.2': - resolution: {integrity: sha512-ZIWCjQsMon4tqRoao0Vzowjwx0cmFT3kublh2nNlgeasIJMWlIGHtr0d4fPypm57Rqx4o1h4L8SweoK2q6sMGA==} + '@rolldown/binding-wasm32-wasi@1.0.0-rc.3': + resolution: {integrity: sha512-gekrQ3Q2HiC1T5njGyuUJoGpK/l6B/TNXKed3fZXNf9YRTJn3L5MOZsFBn4bN2+UX+8+7hgdlTcEsexX988G4g==} engines: {node: '>=14.0.0'} cpu: [wasm32] - '@rolldown/binding-win32-arm64-msvc@1.0.0-rc.2': - resolution: {integrity: sha512-NIo7vwRUPEzZ4MuZGr5YbDdjJ84xdiG+YYf8ZBfTgvIsk9wM0sZamJPEXvaLkzVIHpOw5uqEHXS85Gqqb7aaqQ==} + '@rolldown/binding-win32-arm64-msvc@1.0.0-rc.3': + resolution: {integrity: sha512-85y5JifyMgs8m5K2XzR/VDsapKbiFiohl7s5lEj7nmNGO0pkTXE7q6TQScei96BNAsoK7JC3pA7ukA8WRHVJpg==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [win32] - '@rolldown/binding-win32-x64-msvc@1.0.0-rc.2': - resolution: {integrity: sha512-bLKzyLFbvngeNPZocuLo3LILrKwCrkyMxmRXs6fZYDrvh7cyZRw9v56maDL9ipPas0OOmQK1kAKYwvTs30G21Q==} + '@rolldown/binding-win32-x64-msvc@1.0.0-rc.3': + resolution: {integrity: sha512-a4VUQZH7LxGbUJ3qJ/TzQG8HxdHvf+jOnqf7B7oFx1TEBm+j2KNL2zr5SQ7wHkNAcaPevF6gf9tQnVBnC4mD+A==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [win32] @@ -3736,8 +4827,21 @@ packages: '@rolldown/pluginutils@1.0.0-beta.53': resolution: {integrity: sha512-vENRlFU4YbrwVqNDZ7fLvy+JR1CRkyr01jhSiDpE1u6py3OMzQfztQU2jxykW3ALNxO4kSlqIDeYyD0Y9RcQeQ==} - '@rolldown/pluginutils@1.0.0-rc.2': - resolution: {integrity: sha512-izyXV/v+cHiRfozX62W9htOAvwMo4/bXKDrQ+vom1L1qRuexPock/7VZDAhnpHCLNejd3NJ6hiab+tO0D44Rgw==} + '@rolldown/pluginutils@1.0.0-rc.3': + resolution: {integrity: sha512-eybk3TjzzzV97Dlj5c+XrBFW57eTNhzod66y9HrBlzJ6NsCrWCp/2kaPS3K9wJmurBC0Tdw4yPjXKZqlznim3Q==} + + '@rollup/plugin-babel@6.1.0': + resolution: {integrity: sha512-dFZNuFD2YRcoomP4oYf+DvQNSUA9ih+A3vUqopQx5EdtPGo3WBnQcI/S8pwpz91UsGfL0HsMSOlaMld8HrbubA==} + engines: {node: '>=14.0.0'} + peerDependencies: + '@babel/core': ^7.0.0 + '@types/babel__core': ^7.1.9 + rollup: ^1.20.0||^2.0.0||^3.0.0||^4.0.0 + peerDependenciesMeta: + '@types/babel__core': + optional: true + rollup: + optional: true '@rollup/plugin-commonjs@22.0.2': resolution: {integrity: sha512-//NdP6iIwPbMTcazYsiBMbJW7gfmpHom33u1beiIoHDEM0Q9clvtQB1T0efvMqHeKsGohiHo97BCPCkBXdscwg==} @@ -3751,6 +4855,15 @@ packages: peerDependencies: rollup: ^1.20.0||^2.0.0 + '@rollup/pluginutils@5.3.0': + resolution: {integrity: sha512-5EdhGZtnu3V88ces7s53hhfK5KSASnJZv8Lulpc04cWO3REESroJXg73DFsOmgbU2BhwV0E20bu2IDZb3VKW4Q==} + engines: {node: '>=14.0.0'} + peerDependencies: + rollup: ^1.20.0||^2.0.0||^3.0.0||^4.0.0 + peerDependenciesMeta: + rollup: + optional: true + '@rollup/rollup-android-arm-eabi@4.53.3': resolution: {integrity: sha512-mRSi+4cBjrRLoaal2PnqH82Wqyb+d3HsPUN/W+WslCXsZsyHa9ZeQQX/pQsZaVIWDkPcpV6jJ+3KLbTbgnwv8w==} cpu: [arm] @@ -4503,6 +5616,24 @@ packages: '@types/chai@5.2.2': resolution: {integrity: sha512-8kB30R7Hwqf40JPiKhVzodJs2Qc1ZJ5zuT3uzw5Hq/dhNCl3G3l83jfpdI1e20BP348+fV7VIL/+FxaXkqBmWg==} + '@types/d3-color@3.1.3': + resolution: {integrity: sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==} + + '@types/d3-drag@3.0.7': + resolution: {integrity: sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ==} + + '@types/d3-interpolate@3.0.4': + resolution: {integrity: sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==} + + '@types/d3-selection@3.0.11': + resolution: {integrity: sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w==} + + '@types/d3-transition@3.0.9': + resolution: {integrity: sha512-uZS5shfxzO3rGlu0cC3bjmMFKsXv+SmZZcgp0KD22ts4uGXp5EVYGzu/0YdwZeKmddhcAccYtREJKkPfXkZuCg==} + + '@types/d3-zoom@3.0.8': + resolution: {integrity: sha512-iqMC4/YlFCSlO8+2Ii1GGGliCAY4XdeG748w5vQUbevlbDu0zSjH/+jojorQVBK/se0j6DUFNPBGSqD3YWYnDw==} + '@types/debug@4.1.12': resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==} @@ -4583,6 +5714,9 @@ packages: '@types/vscode@1.108.1': resolution: {integrity: sha512-DerV0BbSzt87TbrqmZ7lRDIYaMiqvP8tmJTzW2p49ZBVtGUnGAu2RGQd1Wv4XMzEVUpaHbsemVM5nfuQJj7H6w==} + '@types/ws@8.18.1': + resolution: {integrity: sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==} + '@types/yargs-parser@21.0.3': resolution: {integrity: sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==} @@ -4726,27 +5860,18 @@ packages: '@vitest/pretty-format@4.1.0-beta.1': resolution: {integrity: sha512-CeI3uthjV/XKA6KBCr/B5HlCQaFdCgprdl7gBg/sUExQPary8BBhYoVWJeAPTeg9u+ppT9S4v/sYjjNjn3Qsrw==} - '@vitest/pretty-format@4.1.0-beta.2': - resolution: {integrity: sha512-Ms6NWhaLbJC8UaxLg+OLneAbGFoaAgTvN0RDSjso3QOwcwv6Aqy9QWXl1NT07+BRrK+EOSjgRJQNb8f1pkyLqw==} - '@vitest/runner@4.0.17': resolution: {integrity: sha512-JmuQyf8aMWoo/LmNFppdpkfRVHJcsgzkbCA+/Bk7VfNH7RE6Ut2qxegeyx2j3ojtJtKIbIGy3h+KxGfYfk28YQ==} '@vitest/runner@4.1.0-beta.1': resolution: {integrity: sha512-oE0nFu+0zT6IhhAu8Z9wWCWWy63a7btZLvq4zUkrGwJ9U4sabXHWzYakBE6ZDLXpI8aDv796+0AMej2AJ7m3tw==} - '@vitest/runner@4.1.0-beta.2': - resolution: {integrity: sha512-5ZU0gHEVEV0414RSj57ov1AWVLtTtQzH/BEDDafv94E92frTGoOLKHRPkLRiw7efBxD+R1L3tUphBvw0R5mNJw==} - '@vitest/snapshot@4.0.17': resolution: {integrity: sha512-npPelD7oyL+YQM2gbIYvlavlMVWUfNNGZPcu0aEUQXt7FXTuqhmgiYupPnAanhKvyP6Srs2pIbWo30K0RbDtRQ==} '@vitest/snapshot@4.1.0-beta.1': resolution: {integrity: sha512-wSt0PAy1QCZjzPUgpIYXBtZFTFXPw65GIQxz9mjhl0yjkAE+wnRU08+w3R3X5hrCKYVhTS3HHV8zs6Yin2K0Dw==} - '@vitest/snapshot@4.1.0-beta.2': - resolution: {integrity: sha512-wqepqIP9VDfD5dTDq2x3J1hW2Qd2BOlhBGrFS0Ye0pN0I3cFpX2+d10GbbRxN5NilofXaZnSo8avW/0zboDMtQ==} - '@vitest/spy@4.0.17': resolution: {integrity: sha512-I1bQo8QaP6tZlTomQNWKJE6ym4SHf3oLS7ceNjozxxgzavRAgZDc06T7kD8gb9bXKEgcLNt00Z+kZO6KaJ62Ew==} @@ -4769,9 +5894,6 @@ packages: '@vitest/utils@4.1.0-beta.1': resolution: {integrity: sha512-IUCsqDFj8E8WJq3wGRQ7MiMb2571tjTnjyrJ1oy+0HODutA2TpZGRqBA8ziLCIWTOL/e4RArE2k6eZh/jXgk9A==} - '@vitest/utils@4.1.0-beta.2': - resolution: {integrity: sha512-c5ZnkcNaSANcFn70YNwGNb8vXCWjFKGbIJ1tFVUf5+oBXNFfNqksTOqHDPydS2vi3IPVqYjA2n5UqKZus20qDw==} - '@vscode/vsce-sign-alpine-arm64@2.0.6': resolution: {integrity: sha512-wKkJBsvKF+f0GfsUuGT0tSW0kZL87QggEiqNqK6/8hvqsXvpx8OsTEc3mnE1kejkh5r+qUyQ7PtF8jZYN0mo8Q==} cpu: [arm64] @@ -4840,6 +5962,15 @@ packages: '@vue/shared@3.5.25': resolution: {integrity: sha512-AbOPdQQnAnzs58H2FrrDxYj/TJfmeS2jdfEEhgiKINy+bnOANmVizIEgq1r+C5zsbs6l1CCQxtcj71rwNQ4jWg==} + '@xyflow/react@12.10.0': + resolution: {integrity: sha512-eOtz3whDMWrB4KWVatIBrKuxECHqip6PfA8fTpaS2RUGVpiEAe+nqDKsLqkViVWxDGreq0lWX71Xth/SPAzXiw==} + peerDependencies: + react: '>=17' + react-dom: '>=17' + + '@xyflow/system@0.0.74': + resolution: {integrity: sha512-7v7B/PkiVrkdZzSbL+inGAo6tkR/WQHHG0/jhSvLQToCsfa8YubOGmBYd1s08tpKpihdHDZFwzQZeR69QSBb4Q==} + abort-controller@3.0.0: resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==} engines: {node: '>=6.5'} @@ -5183,6 +6314,9 @@ packages: class-variance-authority@0.7.1: resolution: {integrity: sha512-Ka+9Trutv7G8M6WT6SeiRWz792K5qEqIGEGzXKhAE6xOWAY6pPH8U+9IY3oCMv6kqTmLsv7Xh/2w2RigkePMsg==} + classcat@5.0.5: + resolution: {integrity: sha512-JhZUT7JFcQy/EzW605k/ktHtncoo9vnyW/2GspNYwFlN1C/WmjuV/xtS04e9SOkL2sTdw0VAZ2UGCcQ9lR6p6w==} + clean-regexp@1.0.0: resolution: {integrity: sha512-GfisEZEJvzKrmGWkvfhgzcz/BllN1USeqD2V6tg14OAOgaCD2Z/PUEuxnAZ/nPvmaHRG7a8y77p1T/IRQ4D1Hw==} engines: {node: '>=4'} @@ -5337,6 +6471,9 @@ packages: typescript: optional: true + cross-fetch@4.1.0: + resolution: {integrity: sha512-uKm5PU+MHTootlWEY+mZ4vvXoCn4fLQxT9dSc1sXVMSFkINTJVN8cAQROpwcKm8bJ/c7rgZVIBWzH5T78sNZZw==} + cross-spawn@7.0.6: resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} engines: {node: '>= 8'} @@ -5349,6 +6486,14 @@ packages: srvx: optional: true + crossws@0.4.4: + resolution: {integrity: sha512-w6c4OdpRNnudVmcgr7brb/+/HmYjMQvYToO/oTrprTwxRUiom3LYWU1PMWuD006okbUWpII1Ea9/+kwpUfmyRg==} + peerDependencies: + srvx: '>=0.7.1' + peerDependenciesMeta: + srvx: + optional: true + css-select@5.2.2: resolution: {integrity: sha512-TizTzUddG/xYLA3NXodFM0fSbNizXjOKhqiQQwvhlspadZokn1KDy0NZFS0wuEubIYAV5/c1/lAr0TaaFXEXzw==} @@ -5372,6 +6517,44 @@ packages: csstype@3.2.3: resolution: {integrity: sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==} + d3-color@3.1.0: + resolution: {integrity: sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==} + engines: {node: '>=12'} + + d3-dispatch@3.0.1: + resolution: {integrity: sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==} + engines: {node: '>=12'} + + d3-drag@3.0.0: + resolution: {integrity: sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==} + engines: {node: '>=12'} + + d3-ease@3.0.1: + resolution: {integrity: sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==} + engines: {node: '>=12'} + + d3-interpolate@3.0.1: + resolution: {integrity: sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==} + engines: {node: '>=12'} + + d3-selection@3.0.0: + resolution: {integrity: sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==} + engines: {node: '>=12'} + + d3-timer@3.0.1: + resolution: {integrity: sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==} + engines: {node: '>=12'} + + d3-transition@3.0.1: + resolution: {integrity: sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==} + engines: {node: '>=12'} + peerDependencies: + d3-selection: 2 - 3 + + d3-zoom@3.0.0: + resolution: {integrity: sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==} + engines: {node: '>=12'} + data-uri-to-buffer@2.0.2: resolution: {integrity: sha512-ND9qDTLc6diwj+Xe5cdAgVTbLVdXbtxTJRXRhli8Mowuaan+0EJOtdqJ0QCHNSSPyoXGx9HX2/VMnKeC34AChA==} @@ -5504,6 +6687,10 @@ packages: engines: {node: '>=0.10'} hasBin: true + detect-libc@2.0.2: + resolution: {integrity: sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==} + engines: {node: '>=8'} + detect-libc@2.1.2: resolution: {integrity: sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==} engines: {node: '>=8'} @@ -5543,6 +6730,102 @@ packages: resolution: {integrity: sha512-JVUnt+DUIzu87TABbhPmNfVdBDt18BLOWjMUFJMSi/Qqg7NTYtabbvSNJGOJ7afbRuv9D/lngizHtP7QyLQ+9w==} engines: {node: '>=12'} + drizzle-kit@0.31.8: + resolution: {integrity: sha512-O9EC/miwdnRDY10qRxM8P3Pg8hXe3LyU4ZipReKOgTwn4OqANmftj8XJz1UPUAS6NMHf0E2htjsbQujUTkncCg==} + hasBin: true + + drizzle-orm@0.45.1: + resolution: {integrity: sha512-Te0FOdKIistGNPMq2jscdqngBRfBpC8uMFVwqjf6gtTVJHIQ/dosgV/CLBU2N4ZJBsXL5savCba9b0YJskKdcA==} + peerDependencies: + '@aws-sdk/client-rds-data': '>=3' + '@cloudflare/workers-types': '>=4' + '@electric-sql/pglite': '>=0.2.0' + '@libsql/client': '>=0.10.0' + '@libsql/client-wasm': '>=0.10.0' + '@neondatabase/serverless': '>=0.10.0' + '@op-engineering/op-sqlite': '>=2' + '@opentelemetry/api': ^1.4.1 + '@planetscale/database': '>=1.13' + '@prisma/client': '*' + '@tidbcloud/serverless': '*' + '@types/better-sqlite3': '*' + '@types/pg': '*' + '@types/sql.js': '*' + '@upstash/redis': '>=1.34.7' + '@vercel/postgres': '>=0.8.0' + '@xata.io/client': '*' + better-sqlite3: '>=7' + bun-types: '*' + expo-sqlite: '>=14.0.0' + gel: '>=2' + knex: '*' + kysely: '*' + mysql2: '>=2' + pg: '>=8' + postgres: '>=3' + prisma: '*' + sql.js: '>=1' + sqlite3: '>=5' + peerDependenciesMeta: + '@aws-sdk/client-rds-data': + optional: true + '@cloudflare/workers-types': + optional: true + '@electric-sql/pglite': + optional: true + '@libsql/client': + optional: true + '@libsql/client-wasm': + optional: true + '@neondatabase/serverless': + optional: true + '@op-engineering/op-sqlite': + optional: true + '@opentelemetry/api': + optional: true + '@planetscale/database': + optional: true + '@prisma/client': + optional: true + '@tidbcloud/serverless': + optional: true + '@types/better-sqlite3': + optional: true + '@types/pg': + optional: true + '@types/sql.js': + optional: true + '@upstash/redis': + optional: true + '@vercel/postgres': + optional: true + '@xata.io/client': + optional: true + better-sqlite3: + optional: true + bun-types: + optional: true + expo-sqlite: + optional: true + gel: + optional: true + knex: + optional: true + kysely: + optional: true + mysql2: + optional: true + pg: + optional: true + postgres: + optional: true + prisma: + optional: true + sql.js: + optional: true + sqlite3: + optional: true + dts-resolver@2.1.3: resolution: {integrity: sha512-bihc7jPC90VrosXNzK0LTE2cuLP6jr0Ro8jk+kMugHReJVLIpHz/xadeq3MhuwyO4TD4OA3L1Q8pBBFRc08Tsw==} engines: {node: '>=20.19.0'} @@ -5665,6 +6948,21 @@ packages: esast-util-from-js@2.0.1: resolution: {integrity: sha512-8Ja+rNJ0Lt56Pcf3TAmpBZjmx8ZcK5Ts4cAzIOjsjevg9oSXJnl6SUQ2EevU8tv3h6ZLWmoKL5H4fgWvdvfETw==} + esbuild-register@3.6.0: + resolution: {integrity: sha512-H2/S7Pm8a9CL1uhp9OvjwrBh5Pvx0H8qVOxNu8Wed9Y7qv56MPtq+GGM8RJpq6glYJn9Wspr8uw7l55uyinNeg==} + peerDependencies: + esbuild: '>=0.12 <1' + + esbuild@0.18.20: + resolution: {integrity: sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA==} + engines: {node: '>=12'} + hasBin: true + + esbuild@0.25.12: + resolution: {integrity: sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==} + engines: {node: '>=18'} + hasBin: true + esbuild@0.27.0: resolution: {integrity: sha512-jd0f4NHbD6cALCyGElNpGAOtWxSq46l9X/sWB0Nzd5er4Kz2YTm+Vl0qKFT9KUJvD8+fiO8AvoHhFvEatfVixA==} engines: {node: '>=18'} @@ -5940,6 +7238,9 @@ packages: resolution: {integrity: sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==} engines: {node: '>=0.10'} + esrap@2.2.2: + resolution: {integrity: sha512-zA6497ha+qKvoWIK+WM9NAh5ni17sKZKhbS5B3PoYbBvaYHZWoS33zmFybmyqpn07RLUxSmn+RCls2/XF+d0oQ==} + esrecurse@4.3.0: resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==} engines: {node: '>=4.0'} @@ -6817,6 +8118,9 @@ packages: jose@6.1.3: resolution: {integrity: sha512-0TpaTfihd4QMNwrz/ob2Bp7X04yuxJkjRGi4aKmOqwhov54i6u79oCv7T+C7lo70MKH6BesI3vscD1yb/yzKXQ==} + js-base64@3.7.8: + resolution: {integrity: sha512-hNngCeKxIUQiEUN3GPJOkz4wF/YvdUdbNL9hsBcMQTkKzboD7T/q3OYOuuPZLUE6dBxSGpwhk5mwuDud7JVAow==} + js-levenshtein@1.1.6: resolution: {integrity: sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g==} engines: {node: '>=0.10.0'} @@ -6944,6 +8248,11 @@ packages: resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} engines: {node: '>= 0.8.0'} + libsql@0.5.22: + resolution: {integrity: sha512-NscWthMQt7fpU8lqd7LXMvT9pi+KhhmTHAJWUB/Lj6MWa0MKFv0F2V4C6WKKpjCVZl0VwcDz4nOI3CyaT1DDiA==} + cpu: [x64, arm64, wasm32, arm] + os: [darwin, linux, win32] + lightningcss-android-arm64@1.30.2: resolution: {integrity: sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A==} engines: {node: '>= 12.0.0'} @@ -7472,7 +8781,7 @@ packages: engines: {node: ^20.19.0 || >=22.12.0} hasBin: true peerDependencies: - rolldown: 1.0.0-rc.2 + rolldown: 1.0.0-rc.3 rollup: ^4 vite: ^7 || ^8 || >=8.0.0-0 xml2js: ^0.6.2 @@ -7641,10 +8950,18 @@ packages: resolution: {integrity: sha512-KWGTzPo83QmGrXC4ml83PM9HDwUPtZFfasiclUvTV4i3/0j7xRRqINVkrL77CbQnoWura3CMxkRofjQKVDuhBw==} engines: {node: ^20.19.0 || >=22.12.0} + oxc-parser@0.112.0: + resolution: {integrity: sha512-7rQ3QdJwobMQLMZwQaPuPYMEF2fDRZwf51lZ//V+bA37nejjKW5ifMHbbCwvA889Y4RLhT+/wLJpPRhAoBaZYw==} + engines: {node: ^20.19.0 || >=22.12.0} + oxc-transform@0.110.0: resolution: {integrity: sha512-/fymQNzzUoKZweH0nC5yvbI2eR0yWYusT9TEKDYVgOgYrf9Qmdez9lUFyvxKR9ycx+PTHi/reIOzqf3wkShQsw==} engines: {node: ^20.19.0 || >=22.12.0} + oxc-transform@0.112.0: + resolution: {integrity: sha512-cIRRvZgrHfsAHrkt8LWdAX4+Do8R0MzQSfeo9yzErzHeYiuyNiP4PCTPbOy/wBXL4MYzt3ebrBa5jt3akQkKAg==} + engines: {node: ^20.19.0 || >=22.12.0} + p-limit@3.1.0: resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} engines: {node: '>=10'} @@ -7833,6 +9150,9 @@ packages: printable-characters@1.0.42: resolution: {integrity: sha512-dKp+C4iXWK4vVYZmYSd0KBH5F/h1HoZRsbJ82AVKRO3PEo8L4lBS/vLwhVtpwwuYcoIsVY+1JYKR268yn480uQ==} + promise-limit@2.7.0: + resolution: {integrity: sha512-7nJ6v5lnJsXwGprnGXga4wx6d1POjvi5Qmf1ivTRxTjH4Z/9Czja/UCMLVmB9N93GeWOU93XaFaEt6jbuoagNw==} + prompts@2.4.2: resolution: {integrity: sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==} engines: {node: '>= 6'} @@ -8093,7 +9413,7 @@ packages: peerDependencies: '@ts-macro/tsc': ^0.3.6 '@typescript/native-preview': '>=7.0.0-dev.20250601.1' - rolldown: 1.0.0-rc.2 + rolldown: 1.0.0-rc.3 typescript: ^5.0.0 vue-tsc: ~3.2.0 peerDependenciesMeta: @@ -8106,8 +9426,8 @@ packages: vue-tsc: optional: true - rolldown@1.0.0-rc.2: - resolution: {integrity: sha512-1g/8Us9J8sgJGn3hZfBecX1z4U3y5KO7V/aV2U1M/9UUzLNqHA8RfFQ/NPT7HLxOIldyIgrcjaYTRvA81KhJIg==} + rolldown@1.0.0-rc.3: + resolution: {integrity: sha512-Po/YZECDOqVXjIXrtC5h++a5NLvKAQNrd9ggrIG3sbDfGO5BqTUsrI6l8zdniKRp3r5Tp/2JTrXqx4GIguFCMw==} engines: {node: ^20.19.0 || >=22.12.0} hasBin: true @@ -8742,12 +10062,8 @@ packages: resolution: {integrity: sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==} engines: {node: '>=16'} - type-fest@5.3.0: - resolution: {integrity: sha512-d9CwU93nN0IA1QL+GSNDdwLAu1Ew5ZjTwupvedwg3WdfoH6pIDvYQ2hV0Uc2nKBLPq7NB5apCx57MLS5qlmO5g==} - engines: {node: '>=20'} - - type-fest@5.4.2: - resolution: {integrity: sha512-FLEenlVYf7Zcd34ISMLo3ZzRE1gRjY1nMDTp+bQRBiPsaKyIW8K3Zr99ioHDUgA9OGuGGJPyYpNcffGmBhJfGg==} + type-fest@5.4.3: + resolution: {integrity: sha512-AXSAQJu79WGc79/3e9/CR77I/KQgeY1AhNvcShIH4PTcGYyC4xv6H4R4AUOwkPS5799KlVDAu8zExeCrkGquiA==} engines: {node: '>=20'} type-is@2.0.1: @@ -8800,10 +10116,6 @@ packages: undici-types@7.10.0: resolution: {integrity: sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag==} - undici@7.16.0: - resolution: {integrity: sha512-QEg3HPMll0o3t2ourKwOeUAZ159Kn9mx5pnzHRQO8+Wixmh88YdZRiIwat0iNzNNXn0yoEtXJqFpyW7eM8BV7g==} - engines: {node: '>=20.18.1'} - undici@7.18.2: resolution: {integrity: sha512-y+8YjDFzWdQlSE9N5nzKMT3g4a5UBX1HKowfdXh0uvAnTaqqwqB92Jt4UXBAeKekDs5IaDKyJFR4X1gYVCgXcw==} engines: {node: '>=20.18.1'} @@ -8840,9 +10152,6 @@ packages: unist-util-visit-parents@6.0.2: resolution: {integrity: sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ==} - unist-util-visit@5.0.0: - resolution: {integrity: sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==} - unist-util-visit@5.1.0: resolution: {integrity: sha512-m+vIdyeCOpdr/QeQCu2EzxX/ohgS8KbnPDgFni4dQsfSCtpz8UqDyY5GjRru8PDKuYn7Fq19j1CQ+nJSsGKOzg==} @@ -9395,12 +10704,24 @@ packages: zod@3.25.76: resolution: {integrity: sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==} - zod@4.3.5: - resolution: {integrity: sha512-k7Nwx6vuWx1IJ9Bjuf4Zt1PEllcwe7cls3VNzm4CQ1/hgtFUK2bRNG3rvnpPUhFjmqJKAKtjV576KnUkHocg/g==} - zod@4.3.6: resolution: {integrity: sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==} + zustand@4.5.7: + resolution: {integrity: sha512-CHOUy7mu3lbD6o6LJLfllpjkzhHXSBlX8B9+qPddUsIfeF5S/UZ5q0kmCsnRqT1UHFQZchNFDDzMbQsuesHWlw==} + engines: {node: '>=12.7.0'} + peerDependencies: + '@types/react': '>=16.8' + immer: '>=9.0.6' + react: '>=16.8' + peerDependenciesMeta: + '@types/react': + optional: true + immer: + optional: true + react: + optional: true + zwitch@2.0.4: resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==} @@ -9828,10 +11149,10 @@ snapshots: optionalDependencies: workerd: 1.20260128.0 - '@cloudflare/vitest-pool-workers@https://pkg.pr.new/@cloudflare/vitest-pool-workers@11632(@vitest/runner@4.1.0-beta.2)(@vitest/snapshot@4.1.0-beta.2)(vitest@4.1.0-beta.1)': + '@cloudflare/vitest-pool-workers@https://pkg.pr.new/@cloudflare/vitest-pool-workers@11632(@vitest/runner@4.1.0-beta.1)(@vitest/snapshot@4.1.0-beta.1)(vitest@4.1.0-beta.1)': dependencies: - '@vitest/runner': 4.1.0-beta.2 - '@vitest/snapshot': 4.1.0-beta.2 + '@vitest/runner': 4.1.0-beta.1 + '@vitest/snapshot': 4.1.0-beta.1 cjs-module-lexer: 1.4.3 esbuild: 0.27.0 miniflare: https://pkg.pr.new/cloudflare/workers-sdk/miniflare@79a1932 @@ -9925,6 +11246,8 @@ snapshots: '@dprint/toml@0.7.0': {} + '@drizzle-team/brocli@0.10.2': {} + '@ecies/ciphers@0.2.5(@noble/ciphers@1.3.0)': dependencies: '@noble/ciphers': 1.3.0 @@ -9969,7 +11292,7 @@ snapshots: '@effect/sql': 0.48.6(@effect/experimental@0.57.11(@effect/platform@0.93.6(effect@3.19.9))(effect@3.19.9)(ioredis@5.8.2))(@effect/platform@0.93.6(effect@3.19.9))(effect@3.19.9) effect: 3.19.9 mime: 3.0.0 - undici: 7.16.0 + undici: 7.18.2 ws: 8.18.3 transitivePeerDependencies: - bufferutil @@ -10036,156 +11359,310 @@ snapshots: '@es-joy/resolve.exports@1.2.0': {} + '@esbuild-kit/core-utils@3.3.2': + dependencies: + esbuild: 0.18.20 + source-map-support: 0.5.21 + + '@esbuild-kit/esm-loader@2.6.5': + dependencies: + '@esbuild-kit/core-utils': 3.3.2 + get-tsconfig: 4.13.0 + + '@esbuild/aix-ppc64@0.25.12': + optional: true + '@esbuild/aix-ppc64@0.27.0': optional: true '@esbuild/aix-ppc64@0.27.2': optional: true + '@esbuild/android-arm64@0.18.20': + optional: true + + '@esbuild/android-arm64@0.25.12': + optional: true + '@esbuild/android-arm64@0.27.0': optional: true '@esbuild/android-arm64@0.27.2': optional: true + '@esbuild/android-arm@0.18.20': + optional: true + + '@esbuild/android-arm@0.25.12': + optional: true + '@esbuild/android-arm@0.27.0': optional: true '@esbuild/android-arm@0.27.2': optional: true + '@esbuild/android-x64@0.18.20': + optional: true + + '@esbuild/android-x64@0.25.12': + optional: true + '@esbuild/android-x64@0.27.0': optional: true '@esbuild/android-x64@0.27.2': optional: true + '@esbuild/darwin-arm64@0.18.20': + optional: true + + '@esbuild/darwin-arm64@0.25.12': + optional: true + '@esbuild/darwin-arm64@0.27.0': optional: true '@esbuild/darwin-arm64@0.27.2': optional: true + '@esbuild/darwin-x64@0.18.20': + optional: true + + '@esbuild/darwin-x64@0.25.12': + optional: true + '@esbuild/darwin-x64@0.27.0': optional: true '@esbuild/darwin-x64@0.27.2': optional: true + '@esbuild/freebsd-arm64@0.18.20': + optional: true + + '@esbuild/freebsd-arm64@0.25.12': + optional: true + '@esbuild/freebsd-arm64@0.27.0': optional: true '@esbuild/freebsd-arm64@0.27.2': optional: true + '@esbuild/freebsd-x64@0.18.20': + optional: true + + '@esbuild/freebsd-x64@0.25.12': + optional: true + '@esbuild/freebsd-x64@0.27.0': optional: true '@esbuild/freebsd-x64@0.27.2': optional: true + '@esbuild/linux-arm64@0.18.20': + optional: true + + '@esbuild/linux-arm64@0.25.12': + optional: true + '@esbuild/linux-arm64@0.27.0': optional: true '@esbuild/linux-arm64@0.27.2': optional: true + '@esbuild/linux-arm@0.18.20': + optional: true + + '@esbuild/linux-arm@0.25.12': + optional: true + '@esbuild/linux-arm@0.27.0': optional: true '@esbuild/linux-arm@0.27.2': optional: true + '@esbuild/linux-ia32@0.18.20': + optional: true + + '@esbuild/linux-ia32@0.25.12': + optional: true + '@esbuild/linux-ia32@0.27.0': optional: true '@esbuild/linux-ia32@0.27.2': optional: true + '@esbuild/linux-loong64@0.18.20': + optional: true + + '@esbuild/linux-loong64@0.25.12': + optional: true + '@esbuild/linux-loong64@0.27.0': optional: true '@esbuild/linux-loong64@0.27.2': optional: true + '@esbuild/linux-mips64el@0.18.20': + optional: true + + '@esbuild/linux-mips64el@0.25.12': + optional: true + '@esbuild/linux-mips64el@0.27.0': optional: true '@esbuild/linux-mips64el@0.27.2': optional: true + '@esbuild/linux-ppc64@0.18.20': + optional: true + + '@esbuild/linux-ppc64@0.25.12': + optional: true + '@esbuild/linux-ppc64@0.27.0': optional: true '@esbuild/linux-ppc64@0.27.2': optional: true + '@esbuild/linux-riscv64@0.18.20': + optional: true + + '@esbuild/linux-riscv64@0.25.12': + optional: true + '@esbuild/linux-riscv64@0.27.0': optional: true '@esbuild/linux-riscv64@0.27.2': optional: true + '@esbuild/linux-s390x@0.18.20': + optional: true + + '@esbuild/linux-s390x@0.25.12': + optional: true + '@esbuild/linux-s390x@0.27.0': optional: true '@esbuild/linux-s390x@0.27.2': optional: true + '@esbuild/linux-x64@0.18.20': + optional: true + + '@esbuild/linux-x64@0.25.12': + optional: true + '@esbuild/linux-x64@0.27.0': optional: true '@esbuild/linux-x64@0.27.2': optional: true + '@esbuild/netbsd-arm64@0.25.12': + optional: true + '@esbuild/netbsd-arm64@0.27.0': optional: true '@esbuild/netbsd-arm64@0.27.2': optional: true + '@esbuild/netbsd-x64@0.18.20': + optional: true + + '@esbuild/netbsd-x64@0.25.12': + optional: true + '@esbuild/netbsd-x64@0.27.0': optional: true '@esbuild/netbsd-x64@0.27.2': optional: true + '@esbuild/openbsd-arm64@0.25.12': + optional: true + '@esbuild/openbsd-arm64@0.27.0': optional: true '@esbuild/openbsd-arm64@0.27.2': optional: true + '@esbuild/openbsd-x64@0.18.20': + optional: true + + '@esbuild/openbsd-x64@0.25.12': + optional: true + '@esbuild/openbsd-x64@0.27.0': optional: true '@esbuild/openbsd-x64@0.27.2': optional: true + '@esbuild/openharmony-arm64@0.25.12': + optional: true + '@esbuild/openharmony-arm64@0.27.0': optional: true '@esbuild/openharmony-arm64@0.27.2': optional: true + '@esbuild/sunos-x64@0.18.20': + optional: true + + '@esbuild/sunos-x64@0.25.12': + optional: true + '@esbuild/sunos-x64@0.27.0': optional: true '@esbuild/sunos-x64@0.27.2': optional: true + '@esbuild/win32-arm64@0.18.20': + optional: true + + '@esbuild/win32-arm64@0.25.12': + optional: true + '@esbuild/win32-arm64@0.27.0': optional: true '@esbuild/win32-arm64@0.27.2': optional: true + '@esbuild/win32-ia32@0.18.20': + optional: true + + '@esbuild/win32-ia32@0.25.12': + optional: true + '@esbuild/win32-ia32@0.27.0': optional: true '@esbuild/win32-ia32@0.27.2': optional: true + '@esbuild/win32-x64@0.18.20': + optional: true + + '@esbuild/win32-x64@0.25.12': + optional: true + '@esbuild/win32-x64@0.27.0': optional: true @@ -10294,15 +11771,15 @@ snapshots: dependencies: '@eslint/core': 0.17.0 - '@eslint/config-helpers@0.5.1': + '@eslint/config-helpers@0.5.2': dependencies: - '@eslint/core': 1.0.1 + '@eslint/core': 1.1.0 '@eslint/core@0.17.0': dependencies: '@types/json-schema': 7.0.15 - '@eslint/core@1.0.1': + '@eslint/core@1.1.0': dependencies: '@types/json-schema': 7.0.15 @@ -10345,7 +11822,7 @@ snapshots: '@eslint/plugin-kit@0.5.1': dependencies: - '@eslint/core': 1.0.1 + '@eslint/core': 1.1.0 levn: 0.4.1 '@floating-ui/core@1.7.3': @@ -10412,6 +11889,10 @@ snapshots: '@humanwhocodes/retry@0.4.3': {} + '@icons-pack/react-simple-icons@13.8.0(react@19.2.4)': + dependencies: + react: 19.2.4 + '@img/colour@1.0.0': {} '@img/sharp-darwin-arm64@0.34.5': @@ -10620,6 +12101,68 @@ snapshots: dependencies: jsep: 1.4.0 + '@libsql/client@0.17.0': + dependencies: + '@libsql/core': 0.17.0 + '@libsql/hrana-client': 0.9.0 + js-base64: 3.7.8 + libsql: 0.5.22 + promise-limit: 2.7.0 + transitivePeerDependencies: + - bufferutil + - encoding + - utf-8-validate + + '@libsql/core@0.17.0': + dependencies: + js-base64: 3.7.8 + + '@libsql/darwin-arm64@0.5.22': + optional: true + + '@libsql/darwin-x64@0.5.22': + optional: true + + '@libsql/hrana-client@0.9.0': + dependencies: + '@libsql/isomorphic-ws': 0.1.5 + cross-fetch: 4.1.0 + js-base64: 3.7.8 + node-fetch: 3.3.2 + transitivePeerDependencies: + - bufferutil + - encoding + - utf-8-validate + + '@libsql/isomorphic-ws@0.1.5': + dependencies: + '@types/ws': 8.18.1 + ws: 8.18.3 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + + '@libsql/linux-arm-gnueabihf@0.5.22': + optional: true + + '@libsql/linux-arm-musleabihf@0.5.22': + optional: true + + '@libsql/linux-arm64-gnu@0.5.22': + optional: true + + '@libsql/linux-arm64-musl@0.5.22': + optional: true + + '@libsql/linux-x64-gnu@0.5.22': + optional: true + + '@libsql/linux-x64-musl@0.5.22': + optional: true + + '@libsql/win32-x64-msvc@0.5.22': + optional: true + '@luxass/eslint-config@7.0.0(@eslint-react/eslint-plugin@2.8.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(@vue/compiler-sfc@3.5.25)(eslint-plugin-format@1.3.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.0.17)': dependencies: '@antfu/install-pkg': 1.1.0 @@ -10755,7 +12298,7 @@ snapshots: unified: 11.0.5 unist-util-position-from-estree: 2.0.0 unist-util-stringify-position: 4.0.0 - unist-util-visit: 5.0.0 + unist-util-visit: 5.1.0 vfile: 6.0.3 transitivePeerDependencies: - supports-color @@ -10813,6 +12356,8 @@ snapshots: '@tybys/wasm-util': 0.10.1 optional: true + '@neon-rs/load@0.0.4': {} + '@noble/ciphers@1.3.0': {} '@noble/curves@1.9.7': @@ -10878,117 +12423,241 @@ snapshots: '@oxc-minify/binding-freebsd-x64@0.110.0': optional: true - '@oxc-minify/binding-linux-arm-gnueabihf@0.110.0': + '@oxc-minify/binding-linux-arm-gnueabihf@0.110.0': + optional: true + + '@oxc-minify/binding-linux-arm-musleabihf@0.110.0': + optional: true + + '@oxc-minify/binding-linux-arm64-gnu@0.110.0': + optional: true + + '@oxc-minify/binding-linux-arm64-musl@0.110.0': + optional: true + + '@oxc-minify/binding-linux-ppc64-gnu@0.110.0': + optional: true + + '@oxc-minify/binding-linux-riscv64-gnu@0.110.0': + optional: true + + '@oxc-minify/binding-linux-riscv64-musl@0.110.0': + optional: true + + '@oxc-minify/binding-linux-s390x-gnu@0.110.0': + optional: true + + '@oxc-minify/binding-linux-x64-gnu@0.110.0': + optional: true + + '@oxc-minify/binding-linux-x64-musl@0.110.0': + optional: true + + '@oxc-minify/binding-openharmony-arm64@0.110.0': + optional: true + + '@oxc-minify/binding-wasm32-wasi@0.110.0': + dependencies: + '@napi-rs/wasm-runtime': 1.1.1 + optional: true + + '@oxc-minify/binding-win32-arm64-msvc@0.110.0': + optional: true + + '@oxc-minify/binding-win32-ia32-msvc@0.110.0': + optional: true + + '@oxc-minify/binding-win32-x64-msvc@0.110.0': + optional: true + + '@oxc-parser/binding-android-arm-eabi@0.112.0': + optional: true + + '@oxc-parser/binding-android-arm64@0.112.0': + optional: true + + '@oxc-parser/binding-darwin-arm64@0.112.0': + optional: true + + '@oxc-parser/binding-darwin-x64@0.112.0': + optional: true + + '@oxc-parser/binding-freebsd-x64@0.112.0': + optional: true + + '@oxc-parser/binding-linux-arm-gnueabihf@0.112.0': optional: true - '@oxc-minify/binding-linux-arm-musleabihf@0.110.0': + '@oxc-parser/binding-linux-arm-musleabihf@0.112.0': optional: true - '@oxc-minify/binding-linux-arm64-gnu@0.110.0': + '@oxc-parser/binding-linux-arm64-gnu@0.112.0': optional: true - '@oxc-minify/binding-linux-arm64-musl@0.110.0': + '@oxc-parser/binding-linux-arm64-musl@0.112.0': optional: true - '@oxc-minify/binding-linux-ppc64-gnu@0.110.0': + '@oxc-parser/binding-linux-ppc64-gnu@0.112.0': optional: true - '@oxc-minify/binding-linux-riscv64-gnu@0.110.0': + '@oxc-parser/binding-linux-riscv64-gnu@0.112.0': optional: true - '@oxc-minify/binding-linux-riscv64-musl@0.110.0': + '@oxc-parser/binding-linux-riscv64-musl@0.112.0': optional: true - '@oxc-minify/binding-linux-s390x-gnu@0.110.0': + '@oxc-parser/binding-linux-s390x-gnu@0.112.0': optional: true - '@oxc-minify/binding-linux-x64-gnu@0.110.0': + '@oxc-parser/binding-linux-x64-gnu@0.112.0': optional: true - '@oxc-minify/binding-linux-x64-musl@0.110.0': + '@oxc-parser/binding-linux-x64-musl@0.112.0': optional: true - '@oxc-minify/binding-openharmony-arm64@0.110.0': + '@oxc-parser/binding-openharmony-arm64@0.112.0': optional: true - '@oxc-minify/binding-wasm32-wasi@0.110.0': + '@oxc-parser/binding-wasm32-wasi@0.112.0': dependencies: '@napi-rs/wasm-runtime': 1.1.1 optional: true - '@oxc-minify/binding-win32-arm64-msvc@0.110.0': + '@oxc-parser/binding-win32-arm64-msvc@0.112.0': optional: true - '@oxc-minify/binding-win32-ia32-msvc@0.110.0': + '@oxc-parser/binding-win32-ia32-msvc@0.112.0': optional: true - '@oxc-minify/binding-win32-x64-msvc@0.110.0': + '@oxc-parser/binding-win32-x64-msvc@0.112.0': optional: true - '@oxc-project/types@0.111.0': {} + '@oxc-project/types@0.112.0': {} '@oxc-transform/binding-android-arm-eabi@0.110.0': optional: true + '@oxc-transform/binding-android-arm-eabi@0.112.0': + optional: true + '@oxc-transform/binding-android-arm64@0.110.0': optional: true + '@oxc-transform/binding-android-arm64@0.112.0': + optional: true + '@oxc-transform/binding-darwin-arm64@0.110.0': optional: true + '@oxc-transform/binding-darwin-arm64@0.112.0': + optional: true + '@oxc-transform/binding-darwin-x64@0.110.0': optional: true + '@oxc-transform/binding-darwin-x64@0.112.0': + optional: true + '@oxc-transform/binding-freebsd-x64@0.110.0': optional: true + '@oxc-transform/binding-freebsd-x64@0.112.0': + optional: true + '@oxc-transform/binding-linux-arm-gnueabihf@0.110.0': optional: true + '@oxc-transform/binding-linux-arm-gnueabihf@0.112.0': + optional: true + '@oxc-transform/binding-linux-arm-musleabihf@0.110.0': optional: true + '@oxc-transform/binding-linux-arm-musleabihf@0.112.0': + optional: true + '@oxc-transform/binding-linux-arm64-gnu@0.110.0': optional: true + '@oxc-transform/binding-linux-arm64-gnu@0.112.0': + optional: true + '@oxc-transform/binding-linux-arm64-musl@0.110.0': optional: true + '@oxc-transform/binding-linux-arm64-musl@0.112.0': + optional: true + '@oxc-transform/binding-linux-ppc64-gnu@0.110.0': optional: true + '@oxc-transform/binding-linux-ppc64-gnu@0.112.0': + optional: true + '@oxc-transform/binding-linux-riscv64-gnu@0.110.0': optional: true + '@oxc-transform/binding-linux-riscv64-gnu@0.112.0': + optional: true + '@oxc-transform/binding-linux-riscv64-musl@0.110.0': optional: true + '@oxc-transform/binding-linux-riscv64-musl@0.112.0': + optional: true + '@oxc-transform/binding-linux-s390x-gnu@0.110.0': optional: true + '@oxc-transform/binding-linux-s390x-gnu@0.112.0': + optional: true + '@oxc-transform/binding-linux-x64-gnu@0.110.0': optional: true + '@oxc-transform/binding-linux-x64-gnu@0.112.0': + optional: true + '@oxc-transform/binding-linux-x64-musl@0.110.0': optional: true + '@oxc-transform/binding-linux-x64-musl@0.112.0': + optional: true + '@oxc-transform/binding-openharmony-arm64@0.110.0': optional: true + '@oxc-transform/binding-openharmony-arm64@0.112.0': + optional: true + '@oxc-transform/binding-wasm32-wasi@0.110.0': dependencies: '@napi-rs/wasm-runtime': 1.1.1 optional: true + '@oxc-transform/binding-wasm32-wasi@0.112.0': + dependencies: + '@napi-rs/wasm-runtime': 1.1.1 + optional: true + '@oxc-transform/binding-win32-arm64-msvc@0.110.0': optional: true + '@oxc-transform/binding-win32-arm64-msvc@0.112.0': + optional: true + '@oxc-transform/binding-win32-ia32-msvc@0.110.0': optional: true + '@oxc-transform/binding-win32-ia32-msvc@0.112.0': + optional: true + '@oxc-transform/binding-win32-x64-msvc@0.110.0': optional: true + '@oxc-transform/binding-win32-x64-msvc@0.112.0': + optional: true + '@parcel/watcher-android-arm64@2.5.1': optional: true @@ -11456,52 +13125,63 @@ snapshots: transitivePeerDependencies: - supports-color - '@rolldown/binding-android-arm64@1.0.0-rc.2': + '@rolldown/binding-android-arm64@1.0.0-rc.3': optional: true - '@rolldown/binding-darwin-arm64@1.0.0-rc.2': + '@rolldown/binding-darwin-arm64@1.0.0-rc.3': optional: true - '@rolldown/binding-darwin-x64@1.0.0-rc.2': + '@rolldown/binding-darwin-x64@1.0.0-rc.3': optional: true - '@rolldown/binding-freebsd-x64@1.0.0-rc.2': + '@rolldown/binding-freebsd-x64@1.0.0-rc.3': optional: true - '@rolldown/binding-linux-arm-gnueabihf@1.0.0-rc.2': + '@rolldown/binding-linux-arm-gnueabihf@1.0.0-rc.3': optional: true - '@rolldown/binding-linux-arm64-gnu@1.0.0-rc.2': + '@rolldown/binding-linux-arm64-gnu@1.0.0-rc.3': optional: true - '@rolldown/binding-linux-arm64-musl@1.0.0-rc.2': + '@rolldown/binding-linux-arm64-musl@1.0.0-rc.3': optional: true - '@rolldown/binding-linux-x64-gnu@1.0.0-rc.2': + '@rolldown/binding-linux-x64-gnu@1.0.0-rc.3': optional: true - '@rolldown/binding-linux-x64-musl@1.0.0-rc.2': + '@rolldown/binding-linux-x64-musl@1.0.0-rc.3': optional: true - '@rolldown/binding-openharmony-arm64@1.0.0-rc.2': + '@rolldown/binding-openharmony-arm64@1.0.0-rc.3': optional: true - '@rolldown/binding-wasm32-wasi@1.0.0-rc.2': + '@rolldown/binding-wasm32-wasi@1.0.0-rc.3': dependencies: '@napi-rs/wasm-runtime': 1.1.1 optional: true - '@rolldown/binding-win32-arm64-msvc@1.0.0-rc.2': + '@rolldown/binding-win32-arm64-msvc@1.0.0-rc.3': optional: true - '@rolldown/binding-win32-x64-msvc@1.0.0-rc.2': + '@rolldown/binding-win32-x64-msvc@1.0.0-rc.3': optional: true '@rolldown/pluginutils@1.0.0-beta.40': {} '@rolldown/pluginutils@1.0.0-beta.53': {} - '@rolldown/pluginutils@1.0.0-rc.2': {} + '@rolldown/pluginutils@1.0.0-rc.3': {} + + '@rollup/plugin-babel@6.1.0(@babel/core@7.28.5)(@types/babel__core@7.20.5)(rollup@4.53.3)': + dependencies: + '@babel/core': 7.28.5 + '@babel/helper-module-imports': 7.27.1 + '@rollup/pluginutils': 5.3.0(rollup@4.53.3) + optionalDependencies: + '@types/babel__core': 7.20.5 + rollup: 4.53.3 + transitivePeerDependencies: + - supports-color '@rollup/plugin-commonjs@22.0.2(rollup@2.79.2)': dependencies: @@ -11521,6 +13201,14 @@ snapshots: picomatch: 2.3.1 rollup: 2.79.2 + '@rollup/pluginutils@5.3.0(rollup@4.53.3)': + dependencies: + '@types/estree': 1.0.8 + estree-walker: 2.0.2 + picomatch: 4.0.3 + optionalDependencies: + rollup: 4.53.3 + '@rollup/rollup-android-arm-eabi@4.53.3': optional: true @@ -11602,7 +13290,7 @@ snapshots: dependencies: '@scalar/helpers': 0.2.10 nanoid: 5.1.6 - type-fest: 5.4.2 + type-fest: 5.4.3 zod: 4.3.6 '@sec-ant/readable-stream@0.4.1': {} @@ -12226,13 +13914,13 @@ snapshots: - solid-js - utf-8-validate - '@tanstack/react-form@1.28.0(@tanstack/react-start@1.157.17(crossws@0.4.3(srvx@0.10.1))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + '@tanstack/react-form@1.28.0(@tanstack/react-start@1.157.17(crossws@0.4.4(srvx@0.10.1))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@tanstack/form-core': 1.28.0 '@tanstack/react-store': 0.8.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4) react: 19.2.4 optionalDependencies: - '@tanstack/react-start': 1.157.17(crossws@0.4.3(srvx@0.10.1))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)) + '@tanstack/react-start': 1.157.17(crossws@0.4.4(srvx@0.10.1))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)) transitivePeerDependencies: - react-dom @@ -12302,6 +13990,18 @@ snapshots: transitivePeerDependencies: - crossws + '@tanstack/react-start-server@1.157.17(crossws@0.4.4(srvx@0.10.1))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@tanstack/history': 1.154.14 + '@tanstack/react-router': 1.157.17(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@tanstack/router-core': 1.157.16 + '@tanstack/start-client-core': 1.157.16 + '@tanstack/start-server-core': 1.157.16(crossws@0.4.4(srvx@0.10.1)) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + transitivePeerDependencies: + - crossws + '@tanstack/react-start@1.157.17(crossws@0.4.3(srvx@0.10.1))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2))': dependencies: '@tanstack/react-router': 1.157.17(react-dom@19.2.4(react@19.2.4))(react@19.2.4) @@ -12322,6 +14022,26 @@ snapshots: - vite-plugin-solid - webpack + '@tanstack/react-start@1.157.17(crossws@0.4.4(srvx@0.10.1))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2))': + dependencies: + '@tanstack/react-router': 1.157.17(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@tanstack/react-start-client': 1.157.17(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@tanstack/react-start-server': 1.157.17(crossws@0.4.4(srvx@0.10.1))(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@tanstack/router-utils': 1.154.7 + '@tanstack/start-client-core': 1.157.16 + '@tanstack/start-plugin-core': 1.157.17(@tanstack/react-router@1.157.17(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(crossws@0.4.4(srvx@0.10.1))(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)) + '@tanstack/start-server-core': 1.157.16(crossws@0.4.4(srvx@0.10.1)) + pathe: 2.0.3 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + vite: 7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2) + transitivePeerDependencies: + - '@rsbuild/core' + - crossws + - supports-color + - vite-plugin-solid + - webpack + '@tanstack/react-store@0.8.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@tanstack/store': 0.8.0 @@ -12442,6 +14162,37 @@ snapshots: - vite-plugin-solid - webpack + '@tanstack/start-plugin-core@1.157.17(@tanstack/react-router@1.157.17(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(crossws@0.4.4(srvx@0.10.1))(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2))': + dependencies: + '@babel/code-frame': 7.27.1 + '@babel/core': 7.28.5 + '@babel/types': 7.28.5 + '@rolldown/pluginutils': 1.0.0-beta.40 + '@tanstack/router-core': 1.157.16 + '@tanstack/router-generator': 1.157.16 + '@tanstack/router-plugin': 1.157.17(@tanstack/react-router@1.157.17(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)) + '@tanstack/router-utils': 1.154.7 + '@tanstack/start-client-core': 1.157.16 + '@tanstack/start-server-core': 1.157.16(crossws@0.4.4(srvx@0.10.1)) + babel-dead-code-elimination: 1.0.11 + cheerio: 1.1.2 + exsolve: 1.0.8 + pathe: 2.0.3 + srvx: 0.10.1 + tinyglobby: 0.2.15 + ufo: 1.6.1 + vite: 7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2) + vitefu: 1.1.1(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)) + xmlbuilder2: 4.0.3 + zod: 3.25.76 + transitivePeerDependencies: + - '@rsbuild/core' + - '@tanstack/react-router' + - crossws + - supports-color + - vite-plugin-solid + - webpack + '@tanstack/start-server-core@1.157.16(crossws@0.4.3(srvx@0.10.1))': dependencies: '@tanstack/history': 1.154.14 @@ -12454,6 +14205,18 @@ snapshots: transitivePeerDependencies: - crossws + '@tanstack/start-server-core@1.157.16(crossws@0.4.4(srvx@0.10.1))': + dependencies: + '@tanstack/history': 1.154.14 + '@tanstack/router-core': 1.157.16 + '@tanstack/start-client-core': 1.157.16 + '@tanstack/start-storage-context': 1.157.16 + h3-v2: h3@2.0.1-rc.11(crossws@0.4.4(srvx@0.10.1)) + seroval: 1.5.0 + tiny-invariant: 1.3.3 + transitivePeerDependencies: + - crossws + '@tanstack/start-storage-context@1.157.16': dependencies: '@tanstack/router-core': 1.157.16 @@ -12534,6 +14297,27 @@ snapshots: dependencies: '@types/deep-eql': 4.0.2 + '@types/d3-color@3.1.3': {} + + '@types/d3-drag@3.0.7': + dependencies: + '@types/d3-selection': 3.0.11 + + '@types/d3-interpolate@3.0.4': + dependencies: + '@types/d3-color': 3.1.3 + + '@types/d3-selection@3.0.11': {} + + '@types/d3-transition@3.0.9': + dependencies: + '@types/d3-selection': 3.0.11 + + '@types/d3-zoom@3.0.8': + dependencies: + '@types/d3-interpolate': 3.0.4 + '@types/d3-selection': 3.0.11 + '@types/debug@4.1.12': dependencies: '@types/ms': 2.1.0 @@ -12604,6 +14388,10 @@ snapshots: '@types/vscode@1.108.1': {} + '@types/ws@8.18.1': + dependencies: + '@types/node': 24.3.1 + '@types/yargs-parser@21.0.3': {} '@typescript-eslint/eslint-plugin@8.54.0(@typescript-eslint/parser@8.54.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)': @@ -12832,14 +14620,14 @@ snapshots: msw: 2.12.7(@types/node@22.18.12)(typescript@5.9.3) vite: 7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2) - '@vitest/mocker@4.1.0-beta.1(msw@2.12.7(@types/node@24.3.1)(typescript@5.9.3))(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2))': + '@vitest/mocker@4.1.0-beta.1(msw@2.12.7(@types/node@24.3.1)(typescript@5.9.3))(vite@7.3.1(@types/node@24.3.1)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2))': dependencies: '@vitest/spy': 4.1.0-beta.1 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: msw: 2.12.7(@types/node@24.3.1)(typescript@5.9.3) - vite: 7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2) + vite: 7.3.1(@types/node@24.3.1)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2) '@vitest/pretty-format@4.0.17': dependencies: @@ -12850,10 +14638,6 @@ snapshots: dependencies: tinyrainbow: 3.0.3 - '@vitest/pretty-format@4.1.0-beta.2': - dependencies: - tinyrainbow: 3.0.3 - '@vitest/runner@4.0.17': dependencies: '@vitest/utils': 4.0.17 @@ -12865,11 +14649,6 @@ snapshots: '@vitest/utils': 4.1.0-beta.1 pathe: 2.0.3 - '@vitest/runner@4.1.0-beta.2': - dependencies: - '@vitest/utils': 4.1.0-beta.2 - pathe: 2.0.3 - '@vitest/snapshot@4.0.17': dependencies: '@vitest/pretty-format': 4.0.17 @@ -12883,12 +14662,6 @@ snapshots: magic-string: 0.30.21 pathe: 2.0.3 - '@vitest/snapshot@4.1.0-beta.2': - dependencies: - '@vitest/pretty-format': 4.1.0-beta.2 - magic-string: 0.30.21 - pathe: 2.0.3 - '@vitest/spy@4.0.17': optional: true @@ -12928,11 +14701,6 @@ snapshots: '@vitest/pretty-format': 4.1.0-beta.1 tinyrainbow: 3.0.3 - '@vitest/utils@4.1.0-beta.2': - dependencies: - '@vitest/pretty-format': 4.1.0-beta.2 - tinyrainbow: 3.0.3 - '@vscode/vsce-sign-alpine-arm64@2.0.6': optional: true @@ -13040,6 +14808,29 @@ snapshots: '@vue/shared@3.5.25': {} + '@xyflow/react@12.10.0(@types/react@19.2.10)(immer@9.0.21)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@xyflow/system': 0.0.74 + classcat: 5.0.5 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + zustand: 4.5.7(@types/react@19.2.10)(immer@9.0.21)(react@19.2.4) + transitivePeerDependencies: + - '@types/react' + - immer + + '@xyflow/system@0.0.74': + dependencies: + '@types/d3-drag': 3.0.7 + '@types/d3-interpolate': 3.0.4 + '@types/d3-selection': 3.0.11 + '@types/d3-transition': 3.0.9 + '@types/d3-zoom': 3.0.8 + d3-drag: 3.0.0 + d3-interpolate: 3.0.1 + d3-selection: 3.0.0 + d3-zoom: 3.0.0 + abort-controller@3.0.0: dependencies: event-target-shim: 5.0.1 @@ -13286,8 +15077,7 @@ snapshots: buffer-equal-constant-time@1.0.1: {} - buffer-from@1.1.2: - optional: true + buffer-from@1.1.2: {} buffer@5.7.1: dependencies: @@ -13369,7 +15159,7 @@ snapshots: parse5: 7.3.0 parse5-htmlparser2-tree-adapter: 7.1.0 parse5-parser-stream: 7.1.2 - undici: 7.16.0 + undici: 7.18.2 whatwg-mimetype: 4.0.0 chokidar@3.6.0: @@ -13399,6 +15189,8 @@ snapshots: dependencies: clsx: 2.1.1 + classcat@5.0.5: {} + clean-regexp@1.0.0: dependencies: escape-string-regexp: 1.0.5 @@ -13523,6 +15315,12 @@ snapshots: optionalDependencies: typescript: 5.9.3 + cross-fetch@4.1.0: + dependencies: + node-fetch: 2.7.0 + transitivePeerDependencies: + - encoding + cross-spawn@7.0.6: dependencies: path-key: 3.1.1 @@ -13533,6 +15331,10 @@ snapshots: optionalDependencies: srvx: 0.10.1 + crossws@0.4.4(srvx@0.10.1): + optionalDependencies: + srvx: 0.10.1 + css-select@5.2.2: dependencies: boolbase: 1.0.0 @@ -13562,6 +15364,42 @@ snapshots: csstype@3.2.3: {} + d3-color@3.1.0: {} + + d3-dispatch@3.0.1: {} + + d3-drag@3.0.0: + dependencies: + d3-dispatch: 3.0.1 + d3-selection: 3.0.0 + + d3-ease@3.0.1: {} + + d3-interpolate@3.0.1: + dependencies: + d3-color: 3.1.0 + + d3-selection@3.0.0: {} + + d3-timer@3.0.1: {} + + d3-transition@3.0.1(d3-selection@3.0.0): + dependencies: + d3-color: 3.1.0 + d3-dispatch: 3.0.1 + d3-ease: 3.0.1 + d3-interpolate: 3.0.1 + d3-selection: 3.0.0 + d3-timer: 3.0.1 + + d3-zoom@3.0.0: + dependencies: + d3-dispatch: 3.0.1 + d3-drag: 3.0.0 + d3-interpolate: 3.0.1 + d3-selection: 3.0.0 + d3-transition: 3.0.1(d3-selection@3.0.0) + data-uri-to-buffer@2.0.2: {} data-uri-to-buffer@4.0.1: {} @@ -13590,9 +15428,11 @@ snapshots: es-errors: 1.3.0 is-data-view: 1.0.2 - db0@0.3.4(better-sqlite3@12.5.0): + db0@0.3.4(@libsql/client@0.17.0)(better-sqlite3@12.5.0)(drizzle-orm@0.45.1(@libsql/client@0.17.0)(@opentelemetry/api@1.9.0)(better-sqlite3@12.5.0)): optionalDependencies: + '@libsql/client': 0.17.0 better-sqlite3: 12.5.0 + drizzle-orm: 0.45.1(@libsql/client@0.17.0)(@opentelemetry/api@1.9.0)(better-sqlite3@12.5.0) debug@4.4.3(supports-color@10.2.2): dependencies: @@ -13657,6 +15497,8 @@ snapshots: detect-libc@1.0.3: {} + detect-libc@2.0.2: {} + detect-libc@2.1.2: {} detect-node-es@1.1.0: {} @@ -13691,6 +15533,21 @@ snapshots: dotenv@17.2.3: {} + drizzle-kit@0.31.8: + dependencies: + '@drizzle-team/brocli': 0.10.2 + '@esbuild-kit/esm-loader': 2.6.5 + esbuild: 0.25.12 + esbuild-register: 3.6.0(esbuild@0.25.12) + transitivePeerDependencies: + - supports-color + + drizzle-orm@0.45.1(@libsql/client@0.17.0)(@opentelemetry/api@1.9.0)(better-sqlite3@12.5.0): + optionalDependencies: + '@libsql/client': 0.17.0 + '@opentelemetry/api': 1.9.0 + better-sqlite3: 12.5.0 + dts-resolver@2.1.3: {} dunder-proto@1.0.1: @@ -13869,6 +15726,67 @@ snapshots: esast-util-from-estree: 2.0.0 vfile-message: 4.0.3 + esbuild-register@3.6.0(esbuild@0.25.12): + dependencies: + debug: 4.4.3(supports-color@10.2.2) + esbuild: 0.25.12 + transitivePeerDependencies: + - supports-color + + esbuild@0.18.20: + optionalDependencies: + '@esbuild/android-arm': 0.18.20 + '@esbuild/android-arm64': 0.18.20 + '@esbuild/android-x64': 0.18.20 + '@esbuild/darwin-arm64': 0.18.20 + '@esbuild/darwin-x64': 0.18.20 + '@esbuild/freebsd-arm64': 0.18.20 + '@esbuild/freebsd-x64': 0.18.20 + '@esbuild/linux-arm': 0.18.20 + '@esbuild/linux-arm64': 0.18.20 + '@esbuild/linux-ia32': 0.18.20 + '@esbuild/linux-loong64': 0.18.20 + '@esbuild/linux-mips64el': 0.18.20 + '@esbuild/linux-ppc64': 0.18.20 + '@esbuild/linux-riscv64': 0.18.20 + '@esbuild/linux-s390x': 0.18.20 + '@esbuild/linux-x64': 0.18.20 + '@esbuild/netbsd-x64': 0.18.20 + '@esbuild/openbsd-x64': 0.18.20 + '@esbuild/sunos-x64': 0.18.20 + '@esbuild/win32-arm64': 0.18.20 + '@esbuild/win32-ia32': 0.18.20 + '@esbuild/win32-x64': 0.18.20 + + esbuild@0.25.12: + optionalDependencies: + '@esbuild/aix-ppc64': 0.25.12 + '@esbuild/android-arm': 0.25.12 + '@esbuild/android-arm64': 0.25.12 + '@esbuild/android-x64': 0.25.12 + '@esbuild/darwin-arm64': 0.25.12 + '@esbuild/darwin-x64': 0.25.12 + '@esbuild/freebsd-arm64': 0.25.12 + '@esbuild/freebsd-x64': 0.25.12 + '@esbuild/linux-arm': 0.25.12 + '@esbuild/linux-arm64': 0.25.12 + '@esbuild/linux-ia32': 0.25.12 + '@esbuild/linux-loong64': 0.25.12 + '@esbuild/linux-mips64el': 0.25.12 + '@esbuild/linux-ppc64': 0.25.12 + '@esbuild/linux-riscv64': 0.25.12 + '@esbuild/linux-s390x': 0.25.12 + '@esbuild/linux-x64': 0.25.12 + '@esbuild/netbsd-arm64': 0.25.12 + '@esbuild/netbsd-x64': 0.25.12 + '@esbuild/openbsd-arm64': 0.25.12 + '@esbuild/openbsd-x64': 0.25.12 + '@esbuild/openharmony-arm64': 0.25.12 + '@esbuild/sunos-x64': 0.25.12 + '@esbuild/win32-arm64': 0.25.12 + '@esbuild/win32-ia32': 0.25.12 + '@esbuild/win32-x64': 0.25.12 + esbuild@0.27.0: optionalDependencies: '@esbuild/aix-ppc64': 0.27.0 @@ -13954,7 +15872,7 @@ snapshots: eslint-flat-config-utils@3.0.0: dependencies: - '@eslint/config-helpers': 0.5.1 + '@eslint/config-helpers': 0.5.2 pathe: 2.0.3 eslint-formatting-reporter@0.0.0(eslint@9.39.2(jiti@2.6.1)): @@ -14198,7 +16116,7 @@ snapshots: eslint-plugin-toml@1.0.3(eslint@9.39.2(jiti@2.6.1)): dependencies: - '@eslint/core': 1.0.1 + '@eslint/core': 1.1.0 '@eslint/plugin-kit': 0.5.1 debug: 4.4.3(supports-color@10.2.2) eslint: 9.39.2(jiti@2.6.1) @@ -14250,7 +16168,7 @@ snapshots: eslint-plugin-yml@3.0.0(eslint@9.39.2(jiti@2.6.1)): dependencies: - '@eslint/core': 1.0.1 + '@eslint/core': 1.1.0 '@eslint/plugin-kit': 0.5.1 debug: 4.4.3(supports-color@10.2.2) diff-sequences: 29.6.3 @@ -14342,6 +16260,10 @@ snapshots: dependencies: estraverse: 5.3.0 + esrap@2.2.2: + dependencies: + '@jridgewell/sourcemap-codec': 1.5.5 + esrecurse@4.3.0: dependencies: estraverse: 5.3.0 @@ -14674,9 +16596,9 @@ snapshots: tinyglobby: 0.2.15 unified: 11.0.5 unist-util-remove-position: 5.0.0 - unist-util-visit: 5.0.0 + unist-util-visit: 5.1.0 vfile: 6.0.3 - zod: 4.3.5 + zod: 4.3.6 optionalDependencies: '@types/react': 19.2.10 react: 19.2.4 @@ -14852,6 +16774,13 @@ snapshots: optionalDependencies: crossws: 0.4.3(srvx@0.10.1) + h3@2.0.1-rc.11(crossws@0.4.4(srvx@0.10.1)): + dependencies: + rou3: 0.7.12 + srvx: 0.10.1 + optionalDependencies: + crossws: 0.4.4(srvx@0.10.1) + has-bigints@1.1.0: {} has-flag@4.0.0: {} @@ -15286,6 +17215,8 @@ snapshots: jose@6.1.3: {} + js-base64@3.7.8: {} + js-levenshtein@1.1.6: {} js-tokens@4.0.0: {} @@ -15429,6 +17360,21 @@ snapshots: prelude-ls: 1.2.1 type-check: 0.4.0 + libsql@0.5.22: + dependencies: + '@neon-rs/load': 0.0.4 + detect-libc: 2.0.2 + optionalDependencies: + '@libsql/darwin-arm64': 0.5.22 + '@libsql/darwin-x64': 0.5.22 + '@libsql/linux-arm-gnueabihf': 0.5.22 + '@libsql/linux-arm-musleabihf': 0.5.22 + '@libsql/linux-arm64-gnu': 0.5.22 + '@libsql/linux-arm64-musl': 0.5.22 + '@libsql/linux-x64-gnu': 0.5.22 + '@libsql/linux-x64-musl': 0.5.22 + '@libsql/win32-x64-msvc': 0.5.22 + lightningcss-android-arm64@1.30.2: optional: true @@ -15747,7 +17693,7 @@ snapshots: mdast-util-to-string: 4.0.0 micromark-util-classify-character: 2.0.1 micromark-util-decode-string: 2.0.1 - unist-util-visit: 5.0.0 + unist-util-visit: 5.1.0 zwitch: 2.0.4 mdast-util-to-string@4.0.0: @@ -16160,7 +18106,7 @@ snapshots: statuses: 2.0.2 strict-event-emitter: 0.5.1 tough-cookie: 6.0.0 - type-fest: 5.3.0 + type-fest: 5.4.3 until-async: 3.0.2 yargs: 17.7.2 optionalDependencies: @@ -16185,7 +18131,7 @@ snapshots: statuses: 2.0.2 strict-event-emitter: 0.5.1 tough-cookie: 6.0.0 - type-fest: 5.3.0 + type-fest: 5.4.3 until-async: 3.0.2 yargs: 17.7.2 optionalDependencies: @@ -16231,11 +18177,11 @@ snapshots: jsonpath-plus: 10.3.0 lodash.topath: 4.5.2 - nitro@3.0.1-alpha.2(better-sqlite3@12.5.0)(chokidar@5.0.0)(ioredis@5.8.2)(lru-cache@11.2.4)(rolldown@1.0.0-rc.2)(rollup@4.53.3)(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)): + nitro@3.0.1-alpha.2(@libsql/client@0.17.0)(better-sqlite3@12.5.0)(chokidar@5.0.0)(drizzle-orm@0.45.1(@libsql/client@0.17.0)(@opentelemetry/api@1.9.0)(better-sqlite3@12.5.0))(ioredis@5.8.2)(lru-cache@11.2.4)(rolldown@1.0.0-rc.3)(rollup@4.53.3)(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)): dependencies: consola: 3.4.2 crossws: 0.4.3(srvx@0.10.1) - db0: 0.3.4(better-sqlite3@12.5.0) + db0: 0.3.4(@libsql/client@0.17.0)(better-sqlite3@12.5.0)(drizzle-orm@0.45.1(@libsql/client@0.17.0)(@opentelemetry/api@1.9.0)(better-sqlite3@12.5.0)) h3: 2.0.1-rc.11(crossws@0.4.3(srvx@0.10.1)) jiti: 2.6.1 nf3: 0.3.5 @@ -16246,9 +18192,9 @@ snapshots: srvx: 0.10.1 undici: 7.18.2 unenv: 2.0.0-rc.24 - unstorage: 2.0.0-alpha.5(chokidar@5.0.0)(db0@0.3.4(better-sqlite3@12.5.0))(ioredis@5.8.2)(lru-cache@11.2.4)(ofetch@2.0.0-alpha.3) + unstorage: 2.0.0-alpha.5(chokidar@5.0.0)(db0@0.3.4(@libsql/client@0.17.0)(better-sqlite3@12.5.0)(drizzle-orm@0.45.1(@libsql/client@0.17.0)(@opentelemetry/api@1.9.0)(better-sqlite3@12.5.0)))(ioredis@5.8.2)(lru-cache@11.2.4)(ofetch@2.0.0-alpha.3) optionalDependencies: - rolldown: 1.0.0-rc.2 + rolldown: 1.0.0-rc.3 rollup: 4.53.3 vite: 7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2) transitivePeerDependencies: @@ -16473,6 +18419,31 @@ snapshots: '@oxc-minify/binding-win32-ia32-msvc': 0.110.0 '@oxc-minify/binding-win32-x64-msvc': 0.110.0 + oxc-parser@0.112.0: + dependencies: + '@oxc-project/types': 0.112.0 + optionalDependencies: + '@oxc-parser/binding-android-arm-eabi': 0.112.0 + '@oxc-parser/binding-android-arm64': 0.112.0 + '@oxc-parser/binding-darwin-arm64': 0.112.0 + '@oxc-parser/binding-darwin-x64': 0.112.0 + '@oxc-parser/binding-freebsd-x64': 0.112.0 + '@oxc-parser/binding-linux-arm-gnueabihf': 0.112.0 + '@oxc-parser/binding-linux-arm-musleabihf': 0.112.0 + '@oxc-parser/binding-linux-arm64-gnu': 0.112.0 + '@oxc-parser/binding-linux-arm64-musl': 0.112.0 + '@oxc-parser/binding-linux-ppc64-gnu': 0.112.0 + '@oxc-parser/binding-linux-riscv64-gnu': 0.112.0 + '@oxc-parser/binding-linux-riscv64-musl': 0.112.0 + '@oxc-parser/binding-linux-s390x-gnu': 0.112.0 + '@oxc-parser/binding-linux-x64-gnu': 0.112.0 + '@oxc-parser/binding-linux-x64-musl': 0.112.0 + '@oxc-parser/binding-openharmony-arm64': 0.112.0 + '@oxc-parser/binding-wasm32-wasi': 0.112.0 + '@oxc-parser/binding-win32-arm64-msvc': 0.112.0 + '@oxc-parser/binding-win32-ia32-msvc': 0.112.0 + '@oxc-parser/binding-win32-x64-msvc': 0.112.0 + oxc-transform@0.110.0: optionalDependencies: '@oxc-transform/binding-android-arm-eabi': 0.110.0 @@ -16496,6 +18467,29 @@ snapshots: '@oxc-transform/binding-win32-ia32-msvc': 0.110.0 '@oxc-transform/binding-win32-x64-msvc': 0.110.0 + oxc-transform@0.112.0: + optionalDependencies: + '@oxc-transform/binding-android-arm-eabi': 0.112.0 + '@oxc-transform/binding-android-arm64': 0.112.0 + '@oxc-transform/binding-darwin-arm64': 0.112.0 + '@oxc-transform/binding-darwin-x64': 0.112.0 + '@oxc-transform/binding-freebsd-x64': 0.112.0 + '@oxc-transform/binding-linux-arm-gnueabihf': 0.112.0 + '@oxc-transform/binding-linux-arm-musleabihf': 0.112.0 + '@oxc-transform/binding-linux-arm64-gnu': 0.112.0 + '@oxc-transform/binding-linux-arm64-musl': 0.112.0 + '@oxc-transform/binding-linux-ppc64-gnu': 0.112.0 + '@oxc-transform/binding-linux-riscv64-gnu': 0.112.0 + '@oxc-transform/binding-linux-riscv64-musl': 0.112.0 + '@oxc-transform/binding-linux-s390x-gnu': 0.112.0 + '@oxc-transform/binding-linux-x64-gnu': 0.112.0 + '@oxc-transform/binding-linux-x64-musl': 0.112.0 + '@oxc-transform/binding-openharmony-arm64': 0.112.0 + '@oxc-transform/binding-wasm32-wasi': 0.112.0 + '@oxc-transform/binding-win32-arm64-msvc': 0.112.0 + '@oxc-transform/binding-win32-ia32-msvc': 0.112.0 + '@oxc-transform/binding-win32-x64-msvc': 0.112.0 + p-limit@3.1.0: dependencies: yocto-queue: 0.1.0 @@ -16677,6 +18671,8 @@ snapshots: printable-characters@1.0.42: {} + promise-limit@2.7.0: {} + prompts@2.4.2: dependencies: kleur: 3.0.3 @@ -16992,7 +18988,7 @@ snapshots: reusify@1.1.0: {} - rolldown-plugin-dts@0.21.8(rolldown@1.0.0-rc.2)(typescript@5.9.3): + rolldown-plugin-dts@0.21.8(rolldown@1.0.0-rc.3)(typescript@5.9.3): dependencies: '@babel/generator': 8.0.0-beta.4 '@babel/parser': 8.0.0-beta.4 @@ -17002,30 +18998,30 @@ snapshots: dts-resolver: 2.1.3 get-tsconfig: 4.13.0 obug: 2.1.1 - rolldown: 1.0.0-rc.2 + rolldown: 1.0.0-rc.3 optionalDependencies: typescript: 5.9.3 transitivePeerDependencies: - oxc-resolver - rolldown@1.0.0-rc.2: + rolldown@1.0.0-rc.3: dependencies: - '@oxc-project/types': 0.111.0 - '@rolldown/pluginutils': 1.0.0-rc.2 + '@oxc-project/types': 0.112.0 + '@rolldown/pluginutils': 1.0.0-rc.3 optionalDependencies: - '@rolldown/binding-android-arm64': 1.0.0-rc.2 - '@rolldown/binding-darwin-arm64': 1.0.0-rc.2 - '@rolldown/binding-darwin-x64': 1.0.0-rc.2 - '@rolldown/binding-freebsd-x64': 1.0.0-rc.2 - '@rolldown/binding-linux-arm-gnueabihf': 1.0.0-rc.2 - '@rolldown/binding-linux-arm64-gnu': 1.0.0-rc.2 - '@rolldown/binding-linux-arm64-musl': 1.0.0-rc.2 - '@rolldown/binding-linux-x64-gnu': 1.0.0-rc.2 - '@rolldown/binding-linux-x64-musl': 1.0.0-rc.2 - '@rolldown/binding-openharmony-arm64': 1.0.0-rc.2 - '@rolldown/binding-wasm32-wasi': 1.0.0-rc.2 - '@rolldown/binding-win32-arm64-msvc': 1.0.0-rc.2 - '@rolldown/binding-win32-x64-msvc': 1.0.0-rc.2 + '@rolldown/binding-android-arm64': 1.0.0-rc.3 + '@rolldown/binding-darwin-arm64': 1.0.0-rc.3 + '@rolldown/binding-darwin-x64': 1.0.0-rc.3 + '@rolldown/binding-freebsd-x64': 1.0.0-rc.3 + '@rolldown/binding-linux-arm-gnueabihf': 1.0.0-rc.3 + '@rolldown/binding-linux-arm64-gnu': 1.0.0-rc.3 + '@rolldown/binding-linux-arm64-musl': 1.0.0-rc.3 + '@rolldown/binding-linux-x64-gnu': 1.0.0-rc.3 + '@rolldown/binding-linux-x64-musl': 1.0.0-rc.3 + '@rolldown/binding-openharmony-arm64': 1.0.0-rc.3 + '@rolldown/binding-wasm32-wasi': 1.0.0-rc.3 + '@rolldown/binding-win32-arm64-msvc': 1.0.0-rc.3 + '@rolldown/binding-win32-x64-msvc': 1.0.0-rc.3 rollup@2.79.2: optionalDependencies: @@ -17427,7 +19423,6 @@ snapshots: dependencies: buffer-from: 1.1.2 source-map: 0.6.1 - optional: true source-map@0.6.1: {} @@ -17750,8 +19745,8 @@ snapshots: import-without-cache: 0.2.5 obug: 2.1.1 picomatch: 4.0.3 - rolldown: 1.0.0-rc.2 - rolldown-plugin-dts: 0.21.8(rolldown@1.0.0-rc.2)(typescript@5.9.3) + rolldown: 1.0.0-rc.3 + rolldown-plugin-dts: 0.21.8(rolldown@1.0.0-rc.3)(typescript@5.9.3) semver: 7.7.3 tinyexec: 1.0.2 tinyglobby: 0.2.15 @@ -17823,11 +19818,7 @@ snapshots: type-fest@4.41.0: {} - type-fest@5.3.0: - dependencies: - tagged-tag: 1.0.0 - - type-fest@5.4.2: + type-fest@5.4.3: dependencies: tagged-tag: 1.0.0 @@ -17900,8 +19891,6 @@ snapshots: undici-types@7.10.0: {} - undici@7.16.0: {} - undici@7.18.2: {} unenv@2.0.0-rc.24: @@ -17937,7 +19926,7 @@ snapshots: unist-util-remove-position@5.0.0: dependencies: '@types/unist': 3.0.3 - unist-util-visit: 5.0.0 + unist-util-visit: 5.1.0 unist-util-stringify-position@4.0.0: dependencies: @@ -17948,12 +19937,6 @@ snapshots: '@types/unist': 3.0.3 unist-util-is: 6.0.0 - unist-util-visit@5.0.0: - dependencies: - '@types/unist': 3.0.3 - unist-util-is: 6.0.0 - unist-util-visit-parents: 6.0.2 - unist-util-visit@5.1.0: dependencies: '@types/unist': 3.0.3 @@ -17973,14 +19956,14 @@ snapshots: unrun@0.2.26(synckit@0.11.11): dependencies: - rolldown: 1.0.0-rc.2 + rolldown: 1.0.0-rc.3 optionalDependencies: synckit: 0.11.11 - unstorage@2.0.0-alpha.5(chokidar@5.0.0)(db0@0.3.4(better-sqlite3@12.5.0))(ioredis@5.8.2)(lru-cache@11.2.4)(ofetch@2.0.0-alpha.3): + unstorage@2.0.0-alpha.5(chokidar@5.0.0)(db0@0.3.4(@libsql/client@0.17.0)(better-sqlite3@12.5.0)(drizzle-orm@0.45.1(@libsql/client@0.17.0)(@opentelemetry/api@1.9.0)(better-sqlite3@12.5.0)))(ioredis@5.8.2)(lru-cache@11.2.4)(ofetch@2.0.0-alpha.3): optionalDependencies: chokidar: 5.0.0 - db0: 0.3.4(better-sqlite3@12.5.0) + db0: 0.3.4(@libsql/client@0.17.0)(better-sqlite3@12.5.0)(drizzle-orm@0.45.1(@libsql/client@0.17.0)(@opentelemetry/api@1.9.0)(better-sqlite3@12.5.0)) ioredis: 5.8.2 lru-cache: 11.2.4 ofetch: 2.0.0-alpha.3 @@ -18193,7 +20176,7 @@ snapshots: vitest@4.1.0-beta.1(@opentelemetry/api@1.9.0)(@types/node@24.3.1)(@vitest/ui@4.1.0-beta.1)(jiti@2.6.1)(jsdom@27.3.0(postcss@8.5.6))(lightningcss@1.30.2)(msw@2.12.7(@types/node@24.3.1)(typescript@5.9.3))(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2): dependencies: '@vitest/expect': 4.1.0-beta.1 - '@vitest/mocker': 4.1.0-beta.1(msw@2.12.7(@types/node@24.3.1)(typescript@5.9.3))(vite@7.3.1(@types/node@22.18.12)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)) + '@vitest/mocker': 4.1.0-beta.1(msw@2.12.7(@types/node@24.3.1)(typescript@5.9.3))(vite@7.3.1(@types/node@24.3.1)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2)) '@vitest/pretty-format': 4.1.0-beta.1 '@vitest/runner': 4.1.0-beta.1 '@vitest/snapshot': 4.1.0-beta.1 @@ -18488,8 +20471,14 @@ snapshots: zod@3.25.76: {} - zod@4.3.5: {} - zod@4.3.6: {} + zustand@4.5.7(@types/react@19.2.10)(immer@9.0.21)(react@19.2.4): + dependencies: + use-sync-external-store: 1.6.0(react@19.2.4) + optionalDependencies: + '@types/react': 19.2.10 + immer: 9.0.21 + react: 19.2.4 + zwitch@2.0.4: {} diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 87356dce8..0daf2aa2b 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -65,6 +65,16 @@ catalogs: "@luxass/msw-utils": 0.6.0 hookable: 6.0.1 cac: 6.7.14 + tinyglobby: 0.2.15 + h3: 2.0.1-rc.11 + chokidar: 5.0.0 + crossws: 0.4.4 + esrap: 2.2.2 + oxc-parser: 0.112.0 + oxc-transform: 0.112.0 + drizzle-orm: 0.45.1 + "@libsql/client": 0.17.0 + drizzle-kit: 0.31.8 build: tsdown: 0.20.1 @@ -73,7 +83,9 @@ catalogs: nanotar: 0.2.0 openapi-typescript: 7.10.1 tsx: 4.21.0 - rolldown: 1.0.0-rc.2 + rolldown: 1.0.0-rc.3 + "@rollup/plugin-babel": 6.1.0 + babel-plugin-react-compiler: 1.0.0 api: wrangler: 4.61.1 @@ -112,6 +124,7 @@ catalogs: "@tanstack/react-start": 1.157.17 "@tanstack/devtools-vite": 0.5.0 lucide-react: 0.563.0 + "@icons-pack/react-simple-icons": 13.8.0 nitro: 3.0.1-alpha.2 vite-tsconfig-paths: 6.0.5 babel-plugin-react-compiler: 1.0.0 @@ -124,6 +137,7 @@ catalogs: "@fontsource-variable/inter": 5.2.8 shiki: 3.22.0 cmdk: 1.1.1 + "@xyflow/react": 12.10.0 vscode: vscode-ext-gen: 1.5.1 diff --git a/tooling/tsconfig/base.json b/tooling/tsconfig/base.json index c2f5080f5..48b744c78 100644 --- a/tooling/tsconfig/base.json +++ b/tooling/tsconfig/base.json @@ -52,6 +52,19 @@ "./packages/fs-bridge/src/bridges/node.ts" ], + // pipeline packages + "@ucdjs/pipelines-core": ["./packages/pipelines/pipeline-core/src/index.ts"], + "@ucdjs/pipelines-artifacts": ["./packages/pipelines/pipeline-artifacts/src/index.ts"], + "@ucdjs/pipelines-graph": ["./packages/pipelines/pipeline-graph/src/index.ts"], + "@ucdjs/pipelines-executor": ["./packages/pipelines/pipeline-executor/src/index.ts"], + "@ucdjs/pipelines-loader": ["./packages/pipelines/pipeline-loader/src/index.ts"], + "@ucdjs/pipelines-ui": ["./packages/pipelines/pipeline-ui/src/index.ts"], + "@ucdjs/pipelines-ui/styles.css": [ + "./packages/pipelines/pipeline-ui/src/styles/globals.css" + ], + "@ucdjs/pipelines-server": ["./packages/pipelines/pipeline-server/src/index.ts"], + "@ucdjs/pipelines-presets": ["./packages/pipelines/pipeline-presets/src/index.ts"], + // Test utils "@ucdjs/test-utils": ["./packages/test-utils/src/index.ts"], // we use alias for test utils to avoid having to build it during development @@ -66,6 +79,9 @@ "#test-utils/matchers": [ "./packages/test-utils/src/matchers/types.d.ts" ], + "#test-utils/pipelines": [ + "./packages/test-utils/src/pipelines/index.ts" + ], // Internal Test Utils "#internal/test-utils/conditions": ["./test/utils/conditions.ts"] diff --git a/vitest.aliases.ts b/vitest.aliases.ts index 330b7fa07..7566ba4ee 100644 --- a/vitest.aliases.ts +++ b/vitest.aliases.ts @@ -27,6 +27,7 @@ export const aliases = readdirSync(fileURLToPath(new NodeURL("./packages", impor "#test-utils/msw": alias("test-utils") + "/msw.ts", "#test-utils/mock-store": alias("test-utils") + "/mock-store/index.ts", "#test-utils/fs-bridges": alias("test-utils") + "/fs-bridges/index.ts", + "#test-utils/pipelines": alias("test-utils") + "/pipelines/index.ts", "#test-utils": alias("test-utils") + "/index.ts", "#internal/test-utils/conditions": fileURLToPath(new NodeURL("./test/utils/conditions.ts", import.meta.url)), }); diff --git a/vitest.config.ts b/vitest.config.ts index cb2a0525e..573d9701c 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -48,6 +48,8 @@ async function createProjects(root: string): Promise } const packageProjects = await createProjects("packages"); +const pipelinePackageProjects = await createProjects("packages/pipelines"); + const appProjects = await createProjects("apps"); const hiddenLogs: string[] = []; @@ -72,7 +74,11 @@ export default defineConfig({ return false; }, - projects: [...packageProjects, ...appProjects], + projects: [ + ...packageProjects, + ...pipelinePackageProjects, + ...appProjects + ] }, esbuild: { target: "es2020" }, resolve: { alias: aliases },