diff --git a/.chronus/changes/dual-dev-prod-builds-2026-2-18.md b/.chronus/changes/dual-dev-prod-builds-2026-2-18.md new file mode 100644 index 000000000..1320a1cfb --- /dev/null +++ b/.chronus/changes/dual-dev-prod-builds-2026-2-18.md @@ -0,0 +1,7 @@ +--- +changeKind: feature +packages: + - "@alloy-js/cli" +--- + +Add `--with-dev` flag to produce both production (`dist/`) and development (`dist/dev/`) builds. Dev builds include source info from babel transforms for improved debugging. `--dev` produces only a dev build to `dist/`, and `--watch` now defaults to dev mode. Package exports use `"development"` condition so consumers can opt into dev builds via `node --conditions=development`. diff --git a/.chronus/changes/feat-trace-cli-2026-1-17-16-59-56.md b/.chronus/changes/feat-trace-cli-2026-1-17-16-59-56.md new file mode 100644 index 000000000..86245863a --- /dev/null +++ b/.chronus/changes/feat-trace-cli-2026-1-17-16-59-56.md @@ -0,0 +1,16 @@ +--- +changeKind: feature +packages: + - "@alloy-js/core" + - "@alloy-js/create" + - "@alloy-js/csharp" + - "@alloy-js/go" + - "@alloy-js/java" + - "@alloy-js/json" + - "@alloy-js/markdown" + - "@alloy-js/msbuild" + - "@alloy-js/python" + - "@alloy-js/typescript" +--- + +Ship dev sources in package for debugging. Use node's --condition="development" flag to use this build. \ No newline at end of file diff --git a/.chronus/changes/feat-trace-cli-2026-1-17-17-6-34.md b/.chronus/changes/feat-trace-cli-2026-1-17-17-6-34.md new file mode 100644 index 000000000..b714eb1e0 --- /dev/null +++ b/.chronus/changes/feat-trace-cli-2026-1-17-17-6-34.md @@ -0,0 +1,7 @@ +--- +changeKind: feature +packages: + - "@alloy-js/babel-plugin-jsx-dom-expressions" +--- + +Pass import.meta.url to createComponent for dev builds. \ No newline at end of file diff --git a/.chronus/changes/trace-cli-2026-2-18.md b/.chronus/changes/trace-cli-2026-2-18.md new file mode 100644 index 000000000..347468828 --- /dev/null +++ b/.chronus/changes/trace-cli-2026-2-18.md @@ -0,0 +1,7 @@ +--- +changeKind: feature +packages: + - "@alloy-js/trace-cli" +--- + +Add `@alloy-js/trace-cli` for querying Alloy trace databases from the command line, enabling LLMs and developers to explore render trees, component stacks, effects, and output files to understand what happened during a render. diff --git a/eslint.config.js b/eslint.config.js index 4f54e96aa..891003677 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -54,6 +54,15 @@ export default tsEslint.config( eqeqeq: ["warn", "always", { null: "ignore" }], }, }, + { + /** + * CLI packages — console.log is the primary output mechanism. + */ + files: ["packages/trace-cli/**/*.ts"], + rules: { + "no-console": "off", + }, + }, { /** * Test files specific rules diff --git a/packages/babel-plugin-jsx-dom-expressions/index.js b/packages/babel-plugin-jsx-dom-expressions/index.js index bd3e74a87..c3d0f12d6 100644 --- a/packages/babel-plugin-jsx-dom-expressions/index.js +++ b/packages/babel-plugin-jsx-dom-expressions/index.js @@ -1205,8 +1205,14 @@ function transformComponent(path) { if (config.generate !== "ssr" && config.addSourceInfo) { const loc = path.node.loc; if (loc && loc.start) { + // Use import.meta.url so the path resolves to the installed location at + // runtime rather than being a hardcoded absolute path from the build machine. + const importMetaUrl = t__namespace.memberExpression( + t__namespace.metaProperty(t__namespace.identifier("import"), t__namespace.identifier("meta")), + t__namespace.identifier("url"), + ); const sourceInfo = t__namespace.objectExpression([ - t__namespace.objectProperty(t__namespace.identifier("fileName"), t__namespace.stringLiteral(path.hub.file.opts.filename || "unknown")), + t__namespace.objectProperty(t__namespace.identifier("fileName"), importMetaUrl), t__namespace.objectProperty(t__namespace.identifier("lineNumber"), t__namespace.numericLiteral(loc.start.line)), t__namespace.objectProperty(t__namespace.identifier("columnNumber"), t__namespace.numericLiteral(loc.start.column + 1)) ]); diff --git a/packages/babel-plugin-jsx-dom-expressions/src/shared/component.js b/packages/babel-plugin-jsx-dom-expressions/src/shared/component.js index db32bd6b1..779a2d1ec 100644 --- a/packages/babel-plugin-jsx-dom-expressions/src/shared/component.js +++ b/packages/babel-plugin-jsx-dom-expressions/src/shared/component.js @@ -225,8 +225,14 @@ export default function transformComponent(path) { if (config.generate !== "ssr" && config.addSourceInfo) { const loc = path.node.loc; if (loc && loc.start) { + // Use import.meta.url so the path resolves to the installed location at + // runtime rather than being a hardcoded absolute path from the build machine. + const importMetaUrl = t.memberExpression( + t.metaProperty(t.identifier("import"), t.identifier("meta")), + t.identifier("url"), + ); const sourceInfo = t.objectExpression([ - t.objectProperty(t.identifier("fileName"), t.stringLiteral(path.hub.file.opts.filename || "unknown")), + t.objectProperty(t.identifier("fileName"), importMetaUrl), t.objectProperty(t.identifier("lineNumber"), t.numericLiteral(loc.start.line)), t.objectProperty(t.identifier("columnNumber"), t.numericLiteral(loc.start.column + 1)) ]); diff --git a/packages/cli/src/cli.ts b/packages/cli/src/cli.ts index c3b455e80..0863ec8da 100644 --- a/packages/cli/src/cli.ts +++ b/packages/cli/src/cli.ts @@ -1,4 +1,5 @@ import { parseArgs } from "node:util"; +import { join } from "pathe"; import pc from "picocolors"; import ts from "typescript"; import { buildAllFiles } from "./babel.js"; @@ -20,6 +21,9 @@ const args = parseArgs({ "source-info": { type: "boolean", }, + "with-dev": { + type: "boolean", + }, }, }); @@ -49,10 +53,26 @@ async function build() { }); const emitResult = program.emit(); const start = new Date().getTime(); - await buildAllFiles(opts.fileNames, opts.rootDir, opts.outDir, { - sourceMaps: opts.options.sourceMap, - addSourceInfo, - }); + + if (args.values["with-dev"]) { + // Dual build: prod → dist/, dev → dist/dev/ + await buildAllFiles(opts.fileNames, opts.rootDir, opts.outDir, { + sourceMaps: opts.options.sourceMap, + addSourceInfo: false, + }); + const devOutDir = join(opts.outDir, "dev"); + await buildAllFiles(opts.fileNames, opts.rootDir, devOutDir, { + sourceMaps: opts.options.sourceMap, + addSourceInfo: true, + }); + } else { + // Single build: --dev produces dev build, default produces prod build + await buildAllFiles(opts.fileNames, opts.rootDir, opts.outDir, { + sourceMaps: opts.options.sourceMap, + addSourceInfo, + }); + } + const allDiagnostics = ts .getPreEmitDiagnostics(program as any) .concat(emitResult.diagnostics); @@ -76,7 +96,6 @@ async function build() { } function watchMain() { - const { addSourceInfo } = resolveBuildSettings(); const opts = getParseCommandLine(); const createProgram = ts.createSemanticDiagnosticsBuilderProgram; @@ -103,7 +122,7 @@ function watchMain() { opts.outDir, { sourceMaps: opts.options.sourceMap, - addSourceInfo, + addSourceInfo: true, }, ); } catch (e) { diff --git a/packages/core/package.json b/packages/core/package.json index cfaa14034..3e15f4efe 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -10,33 +10,39 @@ "exports": { ".": { "browser": "./dist/src/index.browser.js", - "development": "./src/index.ts", + "source": "./src/index.ts", + "development": "./dist/dev/src/index.js", "import": "./dist/src/index.js" }, "./jsx-runtime": { "types": "./dist/src/jsx-runtime.d.ts", - "development": "./src/jsx-runtime.ts", + "source": "./src/jsx-runtime.ts", + "development": "./dist/dev/src/jsx-runtime.js", "import": "./dist/src/jsx-runtime.js" }, "./testing": { - "development": "./testing/index.ts", + "source": "./testing/index.ts", + "development": "./dist/dev/testing/index.js", "import": "./dist/testing/index.js" }, "./testing/matchers": { "types": "./testing/vitest.d.ts" }, "./stc": { - "development": "./src/components/stc/index.ts", + "source": "./src/components/stc/index.ts", + "development": "./dist/dev/src/components/stc/index.js", "import": "./dist/src/components/stc/index.js" }, "./components": { - "development": "./src/components/index.ts", + "source": "./src/components/index.ts", + "development": "./dist/dev/src/components/index.js", "import": "./dist/src/components/index.js" }, "./devtools": { "types": "./dist/src/devtools-entry.d.ts", "browser": "./dist/src/devtools-entry.browser.js", - "development": "./src/devtools-entry.ts", + "source": "./src/devtools-entry.ts", + "development": "./dist/dev/src/devtools-entry.js", "import": "./dist/src/devtools-entry.js" } }, @@ -50,7 +56,7 @@ }, "scripts": { "generate-docs": "api-extractor run", - "build": "alloy build && pnpm run generate-docs", + "build": "alloy build --with-dev && pnpm run generate-docs", "clean": "rimraf dist/ .temp/", "test": "vitest run", "test:watch": "vitest -w", diff --git a/packages/core/src/debug/effects.ts b/packages/core/src/debug/effects.ts index 1b2edf4a5..3feccf222 100644 --- a/packages/core/src/debug/effects.ts +++ b/packages/core/src/debug/effects.ts @@ -4,6 +4,7 @@ import { getReactiveCreationLocation, nextReactiveId, } from "../reactivity.js"; +import { loadSourceMapSupport, resolveSourceMap } from "./source-map.js"; import { insertEdge, insertEffect, insertRef } from "./trace-writer.js"; import { isDebugEnabled, @@ -112,68 +113,6 @@ const VUE_REACTIVITY_MARKERS = [ // Fast source location capture using V8 structured CallSite API // ───────────────────────────────────────────────────────────────────────────── -// Lazily loaded findSourceMap from node:module -let findSourceMap: - | ((path: string) => - | { - findEntry: ( - line: number, - col: number, - ) => - | { - originalSource: string; - originalLine: number; - originalColumn: number; - } - | undefined; - } - | undefined) - | undefined; -let findSourceMapLoaded = false; -let realpathSync: ((path: string) => string) | undefined; -// Cache realpath lookups to avoid repeated fs calls -const realpathCache = new Map(); - -function loadFindSourceMap() { - if (findSourceMapLoaded) return; - findSourceMapLoaded = true; - // process.getBuiltinModule works in both ESM and CJS contexts - try { - const mod = process.getBuiltinModule?.("node:module") as - | typeof import("node:module") - | undefined; - if (mod && typeof mod.findSourceMap === "function") { - findSourceMap = mod.findSourceMap as typeof findSourceMap; - } - } catch { - // not available - } - try { - const fs = process.getBuiltinModule?.("node:fs") as - | typeof import("node:fs") - | undefined; - if (fs) { - realpathSync = fs.realpathSync; - } - } catch { - // not available - } -} - -function getRealPath(fileName: string): string { - if (!realpathSync) return fileName; - let real = realpathCache.get(fileName); - if (real === undefined) { - try { - real = realpathSync(fileName); - } catch { - real = fileName; - } - realpathCache.set(fileName, real); - } - return real; -} - function isSkipFile(fileName: string): boolean { for (const skip of STACK_SKIP) { if (fileName.includes(skip)) return true; @@ -188,25 +127,6 @@ function isVueReactivityFile(fileName: string): boolean { return false; } -function resolveSourceMap( - fileName: string, - line: number, - col: number, -): { fileName: string; line: number; col: number } { - if (!findSourceMap) return { fileName, line, col }; - // pnpm uses symlinks; findSourceMap only matches the real path - const real = getRealPath(fileName); - const map = findSourceMap(real); - if (!map) return { fileName, line, col }; - const entry = map.findEntry(line - 1, col - 1); - if (!entry) return { fileName, line, col }; - return { - fileName: entry.originalSource, - line: entry.originalLine + 1, - col: entry.originalColumn + 1, - }; -} - // V8 structured stack capture — avoids string formatting entirely const structuredPrepare = ( _err: Error, @@ -227,7 +147,7 @@ export function captureSourceLocation( skipReactives = true, ): SourceLocation | undefined { if (!isDebugEnabled()) return undefined; - loadFindSourceMap(); + loadSourceMapSupport(); const sites = captureCallSites(); diff --git a/packages/core/src/debug/render.ts b/packages/core/src/debug/render.ts index 334138dfd..c4c2fc0ec 100644 --- a/packages/core/src/debug/render.ts +++ b/packages/core/src/debug/render.ts @@ -17,6 +17,7 @@ import { getContext, untrack } from "../reactivity.js"; import type { ComponentCreator } from "../runtime/component.js"; import { flushJobsAsync } from "../scheduler.js"; import { sanitizeRecord } from "./serialize.js"; +import { resolveComponentSource } from "./source-map.js"; import { deleteDirectory, deleteOutputFile, @@ -79,7 +80,10 @@ type TrackedNode = RenderedTextTree | PrintHook; let nodeIds = new WeakMap(); let idToNode = new Map(); let entryIds = new WeakMap(); -let nodeKinds = new WeakMap(); +let nodeKinds = new WeakMap< + TrackedNode, + { kind: string; name?: string; source?: RenderTreeNodeInfo["source"] } +>(); let fileNodes = new Map(); let directoryNodes = new Map(); let nodeProps = new Map(); @@ -269,8 +273,12 @@ function recordNodeAdded( if (info.propsSerialized !== undefined) { nodeProps.set(id, info.propsSerialized); } - // Remember the kind so cached re-adds preserve it - nodeKinds.set(node, { kind: info.kind, name: info.name }); + // Remember the kind and source so cached re-adds preserve them + nodeKinds.set(node, { + kind: info.kind, + name: info.name, + source: info.source, + }); setEntryId(parent, index, id); insertRenderNode( id, @@ -297,8 +305,11 @@ function recordSubtreeAdded( const existingId = nodeIds.get(subtree); const isCached = existingId !== undefined; const id = isCached ? existingId : getOrCreateNodeId(subtree); - // Remember the kind so cached re-adds preserve it - nodeKinds.set(subtree, { kind: info.kind, name: info.name }); + // Merge source from previously-saved nodeKinds if the caller didn't provide one + const savedKind = nodeKinds.get(subtree); + const source = info.source ?? savedKind?.source; + // Remember the kind and source so cached re-adds preserve them + nodeKinds.set(subtree, { kind: info.kind, name: info.name, source }); // Track in entryIds so clearRenderTreeChildren can find and remove it if (Array.isArray(parentNode)) { const list = getEntryList(parentNode); @@ -310,9 +321,9 @@ function recordSubtreeAdded( info.kind, info.name, info.propsSerialized, - info.source?.fileName, - info.source?.lineNumber, - info.source?.columnNumber, + source?.fileName, + source?.lineNumber, + source?.columnNumber, null, undefined, ); @@ -366,7 +377,7 @@ function recordCachedSubtreeChildrenRecursively(node: RenderedTextTree) { ); } } else if (Array.isArray(child)) { - // Nested RenderedTextTree - record and recurse, preserving original kind + // Nested RenderedTextTree - record and recurse, preserving original kind and source const id = getOrCreateNodeId(child); list.push(id); const savedKind = nodeKinds.get(child); @@ -376,9 +387,9 @@ function recordCachedSubtreeChildrenRecursively(node: RenderedTextTree) { savedKind?.kind ?? "fragment", savedKind?.name, undefined, - undefined, - undefined, - undefined, + savedKind?.source?.fileName, + savedKind?.source?.lineNumber, + savedKind?.source?.columnNumber, null, undefined, ); @@ -544,6 +555,7 @@ export function beginComponent( } } const propsSerialized = serializeRenderTreeProps(propsSource); + const resolvedSource = resolveComponentSource(source); if (isExisting) { clearRenderTreeChildren(node); } else { @@ -551,7 +563,7 @@ export function beginComponent( kind: "component", name: componentName, propsSerialized, - source, + source: resolvedSource, }); } diff --git a/packages/core/src/debug/source-map.ts b/packages/core/src/debug/source-map.ts new file mode 100644 index 000000000..84e853083 --- /dev/null +++ b/packages/core/src/debug/source-map.ts @@ -0,0 +1,135 @@ +/** + * Shared source-map resolution utilities. + * + * Used by both effect source capture (effects.ts) and component source + * resolution (render.ts) to convert file paths and resolve source maps. + */ + +import type { SourceLocation } from "../devtools/devtools-protocol.js"; + +// Lazily loaded findSourceMap from node:module +let findSourceMap: + | ((path: string) => + | { + findEntry: ( + line: number, + col: number, + ) => + | { + originalSource: string; + originalLine: number; + originalColumn: number; + } + | undefined; + } + | undefined) + | undefined; +let loaded = false; +let realpathSync: ((path: string) => string) | undefined; +let fileURLToPath: ((url: string | URL) => string) | undefined; +const realpathCache = new Map(); + +export function loadSourceMapSupport() { + if (loaded) return; + loaded = true; + try { + const mod = process.getBuiltinModule?.("node:module") as + | typeof import("node:module") + | undefined; + if (mod && typeof mod.findSourceMap === "function") { + findSourceMap = mod.findSourceMap as typeof findSourceMap; + } + } catch { + // not available + } + try { + const fs = process.getBuiltinModule?.("node:fs") as + | typeof import("node:fs") + | undefined; + if (fs) { + realpathSync = fs.realpathSync; + } + } catch { + // not available + } + try { + const url = process.getBuiltinModule?.("node:url") as + | typeof import("node:url") + | undefined; + if (url) { + fileURLToPath = url.fileURLToPath; + } + } catch { + // not available + } +} + +export function getRealPath(fileName: string): string { + if (!realpathSync) return fileName; + let real = realpathCache.get(fileName); + if (real === undefined) { + try { + real = realpathSync(fileName); + } catch { + real = fileName; + } + realpathCache.set(fileName, real); + } + return real; +} + +export function resolveSourceMap( + fileName: string, + line: number, + col: number, +): { fileName: string; line: number; col: number } { + if (!findSourceMap) return { fileName, line, col }; + // pnpm uses symlinks; findSourceMap only matches the real path + const real = getRealPath(fileName); + const map = findSourceMap(real); + if (!map) return { fileName, line, col }; + const entry = map.findEntry(line - 1, col - 1); + if (!entry) return { fileName, line, col }; + return { + fileName: entry.originalSource, + line: entry.originalLine + 1, + col: entry.originalColumn + 1, + }; +} + +function stripFileUrl(path: string): string { + if (!path.startsWith("file://")) return path; + if (fileURLToPath) return fileURLToPath(path); + return new URL(path).pathname; +} + +/** + * Resolve a component source location. Handles: + * - `file://` URLs from import.meta.url (converts to file path) + * - Source map resolution (maps compiled .js to original .tsx) + * - Plain absolute paths (legacy format, returned as-is) + */ +export function resolveComponentSource( + source: SourceLocation | undefined, +): SourceLocation | undefined { + if (!source?.fileName) return source; + loadSourceMapSupport(); + + const fileName = stripFileUrl(source.fileName); + + if (source.lineNumber != null && source.columnNumber != null) { + // Resolve through source maps to get the original .tsx path + const resolved = resolveSourceMap( + fileName, + source.lineNumber, + source.columnNumber, + ); + return { + fileName: stripFileUrl(resolved.fileName), + lineNumber: resolved.line, + columnNumber: resolved.col, + }; + } + + return { ...source, fileName }; +} diff --git a/packages/core/src/debug/trace.ts b/packages/core/src/debug/trace.ts index 02923a533..3cf8cc769 100644 --- a/packages/core/src/debug/trace.ts +++ b/packages/core/src/debug/trace.ts @@ -71,12 +71,28 @@ if (traceDbEnv) { traceDbEnv === "1" || traceDbEnv === "true" ? "alloy-trace.db" : traceDbEnv; try { await initTrace(traceDbPath); + // eslint-disable-next-line no-console + console.log(` Trace database: ${traceDbPath}`); } catch (e) { // eslint-disable-next-line no-console console.warn(`Failed to initialize trace database at ${traceDbPath}:`, e); } } +if (import.meta.url.includes("/dist/dev/")) { + // eslint-disable-next-line no-console + console.log("Alloy debug build loaded."); + if (process.sourceMapsEnabled) { + // eslint-disable-next-line no-console + console.log(" Source maps enabled."); + } else { + // eslint-disable-next-line no-console + console.log( + " Source maps disabled. Run with --enable-source-maps for better stack traces.", + ); + } +} + // ───────────────────────────────────────────────────────────────────────────── // Trace phases // ───────────────────────────────────────────────────────────────────────────── diff --git a/packages/core/test/browser-build.test.ts b/packages/core/test/browser-build.test.ts index bdcda295e..ab8274d53 100644 --- a/packages/core/test/browser-build.test.ts +++ b/packages/core/test/browser-build.test.ts @@ -80,7 +80,7 @@ describe("Browser Build Test", () => { expect(() => { execSync("npm run build", { cwd: testDir, stdio: "inherit" }); }).not.toThrow(); - }); + }, 10000); afterAll(() => { // Ensure testDir exists before attempting to remove it diff --git a/packages/core/vitest.config.ts b/packages/core/vitest.config.ts index ba661c0a9..519ef9a11 100644 --- a/packages/core/vitest.config.ts +++ b/packages/core/vitest.config.ts @@ -2,6 +2,14 @@ import alloyPlugin from "@alloy-js/rollup-plugin"; import { defineConfig } from "vitest/config"; export default defineConfig({ + resolve: { + conditions: ["source"], + }, + ssr: { + resolve: { + conditions: ["source"], + }, + }, esbuild: { jsx: "preserve", sourcemap: "both", diff --git a/packages/create/package.json b/packages/create/package.json index 7ac296133..f3eb89152 100644 --- a/packages/create/package.json +++ b/packages/create/package.json @@ -9,7 +9,7 @@ "main": "./dist/src/index.js", "bin": "./dist/src/index.js", "scripts": { - "build": "node scripts/gen-deps.js && alloy build", + "build": "node scripts/gen-deps.js && alloy build --with-dev", "clean": "rimraf dist/ .temp/", "test": "vitest run", "test:watch": "vitest -w", diff --git a/packages/csharp/package.json b/packages/csharp/package.json index f94de57db..bd8d5288c 100644 --- a/packages/csharp/package.json +++ b/packages/csharp/package.json @@ -8,40 +8,47 @@ }, "exports": { ".": { - "development": "./src/index.ts", + "source": "./src/index.ts", + "development": "./dist/dev/src/index.js", "import": "./dist/src/index.js" }, "./stc": { - "development": "./src/components/stc/index.ts", + "source": "./src/components/stc/index.ts", + "development": "./dist/dev/src/components/stc/index.js", "import": "./dist/src/components/stc/index.js" }, "./global/*": { - "development": "./src/builtins/*/index.ts", + "source": "./src/builtins/*/index.ts", + "development": "./dist/dev/src/builtins/*/index.js", "import": "./dist/src/builtins/*/index.js" }, "./testing": { - "development": "./testing/index.ts", + "source": "./testing/index.ts", + "development": "./dist/dev/testing/index.js", "import": "./dist/testing/index.js" } }, "imports": { "#test/*": "./test/*", "#components/*": { - "development": "./src/components/*", + "source": "./src/components/*", + "development": "./dist/dev/src/components/*", "default": "./dist/src/components/*" }, "#createLibrary": { - "development": "./src/create-library.ts", + "source": "./src/create-library.ts", + "development": "./dist/dev/src/create-library.js", "default": "./dist/src/create-library.js" }, "#builtins": { - "development": "./src/builtins.ts", + "source": "./src/builtins.ts", + "development": "./dist/dev/src/builtins.js", "default": "./dist/src/builtins.js" } }, "scripts": { "generate-docs": "api-extractor run", - "build": "alloy build && pnpm run generate-docs", + "build": "alloy build --with-dev && pnpm run generate-docs", "clean": "rimraf dist/ .temp/", "test:watch": "vitest -w", "watch": "alloy build --watch", diff --git a/packages/csharp/vitest.config.ts b/packages/csharp/vitest.config.ts index a98451981..5fa2542c0 100644 --- a/packages/csharp/vitest.config.ts +++ b/packages/csharp/vitest.config.ts @@ -2,6 +2,14 @@ import alloyPlugin from "@alloy-js/rollup-plugin"; import { defineConfig } from "vitest/config"; export default defineConfig({ + resolve: { + conditions: ["source"], + }, + ssr: { + resolve: { + conditions: ["source"], + }, + }, esbuild: { jsx: "preserve", sourcemap: "both", diff --git a/packages/go/package.json b/packages/go/package.json index 552d8c725..cb09b8227 100644 --- a/packages/go/package.json +++ b/packages/go/package.json @@ -8,24 +8,27 @@ }, "exports": { ".": { - "development": "./src/index.ts", + "source": "./src/index.ts", + "development": "./dist/dev/src/index.js", "import": "./dist/src/index.js" }, "./stc": { - "development": "./src/components/stc/index.ts", + "source": "./src/components/stc/index.ts", + "development": "./dist/dev/src/components/stc/index.js", "import": "./dist/src/components/stc/index.js" } }, "imports": { "#test/*": "./test/*", "#components/*": { - "development": "./src/components/*", + "source": "./src/components/*", + "development": "./dist/dev/src/components/*", "default": "./dist/src/components/*" } }, "scripts": { "generate-docs": "api-extractor run", - "build": "alloy build && pnpm run generate-docs", + "build": "alloy build --with-dev && pnpm run generate-docs", "clean": "rimraf dist/ .temp/", "test:watch": "vitest -w", "watch": "alloy build --watch", diff --git a/packages/go/vitest.config.ts b/packages/go/vitest.config.ts index a98451981..5fa2542c0 100644 --- a/packages/go/vitest.config.ts +++ b/packages/go/vitest.config.ts @@ -2,6 +2,14 @@ import alloyPlugin from "@alloy-js/rollup-plugin"; import { defineConfig } from "vitest/config"; export default defineConfig({ + resolve: { + conditions: ["source"], + }, + ssr: { + resolve: { + conditions: ["source"], + }, + }, esbuild: { jsx: "preserve", sourcemap: "both", diff --git a/packages/java/package.json b/packages/java/package.json index cf9cbf360..87b706acd 100644 --- a/packages/java/package.json +++ b/packages/java/package.json @@ -8,17 +8,19 @@ }, "exports": { ".": { - "development": "./src/index.ts", + "source": "./src/index.ts", + "development": "./dist/dev/src/index.js", "import": "./dist/src/index.js" }, "./stc": { - "development": "./src/components/stc/index.ts", + "source": "./src/components/stc/index.ts", + "development": "./dist/dev/src/components/stc/index.js", "import": "./dist/src/components/stc/index.js" } }, "scripts": { "generate-docs": "api-extractor run", - "build": "alloy build && pnpm run generate-docs", + "build": "alloy build --with-dev && pnpm run generate-docs", "clean": "rimraf dist/ .temp/", "test:watch": "vitest -w", "watch": "alloy build --watch", diff --git a/packages/java/vitest.config.ts b/packages/java/vitest.config.ts index a98451981..5fa2542c0 100644 --- a/packages/java/vitest.config.ts +++ b/packages/java/vitest.config.ts @@ -2,6 +2,14 @@ import alloyPlugin from "@alloy-js/rollup-plugin"; import { defineConfig } from "vitest/config"; export default defineConfig({ + resolve: { + conditions: ["source"], + }, + ssr: { + resolve: { + conditions: ["source"], + }, + }, esbuild: { jsx: "preserve", sourcemap: "both", diff --git a/packages/json/package.json b/packages/json/package.json index e7780ba80..b8123feb8 100644 --- a/packages/json/package.json +++ b/packages/json/package.json @@ -8,11 +8,13 @@ }, "exports": { ".": { - "development": "./src/index.ts", + "source": "./src/index.ts", + "development": "./dist/dev/src/index.js", "import": "./dist/src/index.js" }, "./stc": { - "development": "./src/components/stc/index.ts", + "source": "./src/components/stc/index.ts", + "development": "./dist/dev/src/components/stc/index.js", "import": "./dist/src/components/stc/index.js" } }, @@ -21,7 +23,7 @@ }, "scripts": { "generate-docs": "api-extractor run", - "build": "alloy build && pnpm run generate-docs", + "build": "alloy build --with-dev && pnpm run generate-docs", "clean": "rimraf dist/ .temp/", "test:watch": "vitest -w", "watch": "alloy build --watch", diff --git a/packages/json/vitest.config.ts b/packages/json/vitest.config.ts index a98451981..5fa2542c0 100644 --- a/packages/json/vitest.config.ts +++ b/packages/json/vitest.config.ts @@ -2,6 +2,14 @@ import alloyPlugin from "@alloy-js/rollup-plugin"; import { defineConfig } from "vitest/config"; export default defineConfig({ + resolve: { + conditions: ["source"], + }, + ssr: { + resolve: { + conditions: ["source"], + }, + }, esbuild: { jsx: "preserve", sourcemap: "both", diff --git a/packages/markdown/package.json b/packages/markdown/package.json index 443e8405f..102b6eb6f 100644 --- a/packages/markdown/package.json +++ b/packages/markdown/package.json @@ -8,17 +8,19 @@ }, "exports": { ".": { - "development": "./src/index.ts", + "source": "./src/index.ts", + "development": "./dist/dev/src/index.js", "import": "./dist/src/index.js" }, "./stc": { - "development": "./src/components/stc/index.ts", + "source": "./src/components/stc/index.ts", + "development": "./dist/dev/src/components/stc/index.js", "import": "./dist/src/components/stc/index.js" } }, "scripts": { "generate-docs": "api-extractor run", - "build": "alloy build && npm run generate-docs", + "build": "alloy build --with-dev && npm run generate-docs", "clean": "rimraf dist/ .temp/", "watch": "alloy build --watch", "test": "vitest run", diff --git a/packages/markdown/vitest.config.ts b/packages/markdown/vitest.config.ts index ba661c0a9..519ef9a11 100644 --- a/packages/markdown/vitest.config.ts +++ b/packages/markdown/vitest.config.ts @@ -2,6 +2,14 @@ import alloyPlugin from "@alloy-js/rollup-plugin"; import { defineConfig } from "vitest/config"; export default defineConfig({ + resolve: { + conditions: ["source"], + }, + ssr: { + resolve: { + conditions: ["source"], + }, + }, esbuild: { jsx: "preserve", sourcemap: "both", diff --git a/packages/msbuild/package.json b/packages/msbuild/package.json index d9c1a6369..6d70a0207 100644 --- a/packages/msbuild/package.json +++ b/packages/msbuild/package.json @@ -8,28 +8,32 @@ }, "exports": { "./components": { - "development": "./src/components/index.tsx", + "source": "./src/components/index.tsx", + "development": "./dist/dev/src/components/index.js", "import": "./dist/src/components/index.js" } }, "imports": { "#test/*": "./test/*", "#components/*": { - "development": "./src/components/*", + "source": "./src/components/*", + "development": "./dist/dev/src/components/*", "default": "./dist/src/components/*" }, "#createLibrary": { - "development": "./src/create-library.ts", + "source": "./src/create-library.ts", + "development": "./dist/dev/src/create-library.js", "default": "./dist/src/create-library.js" }, "#builtins": { - "development": "./src/builtins.ts", + "source": "./src/builtins.ts", + "development": "./dist/dev/src/builtins.js", "default": "./dist/src/builtins.js" } }, "scripts": { "generate-docs": "api-extractor run", - "build": "alloy build && pnpm run generate-docs", + "build": "alloy build --with-dev && pnpm run generate-docs", "clean": "rimraf dist/ .temp/", "test:watch": "vitest -w", "watch": "alloy build --watch", diff --git a/packages/msbuild/vitest.config.ts b/packages/msbuild/vitest.config.ts index a98451981..5fa2542c0 100644 --- a/packages/msbuild/vitest.config.ts +++ b/packages/msbuild/vitest.config.ts @@ -2,6 +2,14 @@ import alloyPlugin from "@alloy-js/rollup-plugin"; import { defineConfig } from "vitest/config"; export default defineConfig({ + resolve: { + conditions: ["source"], + }, + ssr: { + resolve: { + conditions: ["source"], + }, + }, esbuild: { jsx: "preserve", sourcemap: "both", diff --git a/packages/python/package.json b/packages/python/package.json index c89825686..d876da73a 100644 --- a/packages/python/package.json +++ b/packages/python/package.json @@ -8,17 +8,19 @@ }, "exports": { ".": { - "development": "./src/index.ts", + "source": "./src/index.ts", + "development": "./dist/dev/src/index.js", "import": "./dist/src/index.js" }, "./stc": { - "development": "./src/components/stc/index.ts", + "source": "./src/components/stc/index.ts", + "development": "./dist/dev/src/components/stc/index.js", "import": "./dist/src/components/stc/index.js" } }, "scripts": { "generate-docs": "api-extractor run", - "build": "alloy build && pnpm run generate-docs", + "build": "alloy build --with-dev && pnpm run generate-docs", "clean": "rimraf dist/ .temp/", "test:watch": "vitest -w", "watch": "alloy build --watch", diff --git a/packages/python/vitest.config.ts b/packages/python/vitest.config.ts index ba661c0a9..519ef9a11 100644 --- a/packages/python/vitest.config.ts +++ b/packages/python/vitest.config.ts @@ -2,6 +2,14 @@ import alloyPlugin from "@alloy-js/rollup-plugin"; import { defineConfig } from "vitest/config"; export default defineConfig({ + resolve: { + conditions: ["source"], + }, + ssr: { + resolve: { + conditions: ["source"], + }, + }, esbuild: { jsx: "preserve", sourcemap: "both", diff --git a/packages/trace-cli/package.json b/packages/trace-cli/package.json new file mode 100644 index 000000000..0c161e355 --- /dev/null +++ b/packages/trace-cli/package.json @@ -0,0 +1,26 @@ +{ + "name": "@alloy-js/trace-cli", + "version": "0.1.0", + "description": "CLI tool for querying Alloy debug trace databases", + "type": "module", + "bin": { + "alloy-trace": "./dist/cli.js" + }, + "files": [ + "dist" + ], + "engines": { + "node": ">=22.5.0" + }, + "scripts": { + "build": "tsc -p tsconfig.json" + }, + "devDependencies": { + "@types/diff-match-patch": "^1.0.36", + "@types/node": "catalog:", + "typescript": "catalog:" + }, + "dependencies": { + "diff-match-patch": "^1.0.5" + } +} diff --git a/packages/trace-cli/readme.md b/packages/trace-cli/readme.md new file mode 100644 index 000000000..e6488e409 --- /dev/null +++ b/packages/trace-cli/readme.md @@ -0,0 +1,116 @@ +# @alloy-js/trace-cli + +CLI tool for querying and analyzing [Alloy](https://alloy-framework.github.io/alloy) debug trace databases. + +When Alloy renders with devtools enabled, it writes a SQLite trace database (`alloy-trace.db`) containing the full render tree, reactive graph (effects, refs, edges), symbols, scopes, scheduler activity, and output files. This CLI lets you explore that data from the terminal. + +## Requirements + +Node.js ≥ 22.5.0 (uses the built-in `node:sqlite` module via `--experimental-sqlite`). + +## Installation + +```bash +npm install -g @alloy-js/trace-cli +``` + +Or run directly from the workspace: + +```bash +npx alloy-trace --db ./alloy-trace.db stats +``` + +## Usage + +``` +alloy-trace [subcommand] [options] +``` + +### Entity commands + +| Command | Subcommands | Description | +| ----------- | ---------------------------------------------------------------------------------- | ---------------------- | +| `component` | `list`, `show `, `tree [id]`, `stats` | Render tree components | +| `effect` | `list`, `show `, `chain `, `hotspots`, `ancestry `, `subtree ` | Reactive effects | +| `ref` | `list`, `show `, `chain `, `hotspots`, `fanout `, `ownership ` | Reactive refs | +| `symbol` | `list`, `show ` | Output symbols | +| `scope` | `list`, `show ` | Output scopes | +| `file` | `list`, `show ` | Generated output files | + +### Analysis commands + +| Command | Description | +| ------------- | ---------------------------------------------- | +| `stats` | Aggregate statistics and overhead analysis | +| `errors` | List render errors with component stacks | +| `query ` | Run a raw SQL query against the trace database | + +### Options + +| Option | Description | +| ------------------------- | ---------------------------------------------- | +| `--db=` | Path to trace database (default: `./trace.db`) | +| `--json` | Output as JSON (one object per line) | +| `--limit=` | Limit number of results | +| `--depth=` | Max tree depth for `component tree` | +| `--source-file=` | Filter by source file path (LIKE match) | +| `--output-file=` | Filter by output file path (LIKE match) | +| `--component=` | Filter by component name | +| `--name=` | Filter by name | +| `--type=` | Filter by effect type or ref kind | +| `--min-trackers=` | Show refs tracked by at least N effects | +| `--unused` | Show only unused refs (no edges) | +| `--framework` | Show only framework-internal effects | + +## Examples + +```bash +# Overview of a trace +alloy-trace --db ./alloy-trace.db stats + +# List all components from a specific source file +alloy-trace component list --source-file=models + +# Show the full render tree +alloy-trace component tree + +# Find the most active effects +alloy-trace effect hotspots + +# Trace the reactive chain from a ref +alloy-trace ref chain 42 + +# Find refs with the most trackers +alloy-trace ref hotspots --limit=10 + +# Show unused refs (potential dead code) +alloy-trace ref list --unused + +# Walk an effect's component ancestry +alloy-trace effect ancestry 15 + +# Raw SQL query +alloy-trace query "SELECT name, COUNT(*) as n FROM render_nodes WHERE kind='component' GROUP BY name ORDER BY n DESC" + +# JSON output for scripting +alloy-trace effect list --json | jq '.name' +``` + +## Database schema + +The trace database contains these tables: + +- **effects** — Reactive computations (render effects, content effects, memos, binder effects) +- **refs** — Reactive values (refs, computed, shallow reactive) +- **edges** — Reactive graph edges (track, trigger, triggered-by) +- **render_nodes** — Render tree (components, fragments, text nodes, memos, custom contexts) +- **symbols** — Output symbol declarations +- **scopes** — Output naming scopes +- **output_files** — Generated file paths and content +- **render_errors** — Errors caught during rendering +- **effect_lifecycle** — Effect run/skip events +- **scheduler_jobs** — Scheduler job queue events +- **scheduler_flushes** — Scheduler flush batches +- **source_maps** — Output file source maps + +Use `alloy-trace query "SELECT sql FROM sqlite_master WHERE type='table'"` to inspect the full schema. diff --git a/packages/trace-cli/src/cli.ts b/packages/trace-cli/src/cli.ts new file mode 100644 index 000000000..25f60756f --- /dev/null +++ b/packages/trace-cli/src/cli.ts @@ -0,0 +1,130 @@ +#!/usr/bin/env -S node --experimental-sqlite --no-warnings=ExperimentalWarning +import { parseArgs } from "node:util"; +import { componentCommand } from "./commands/component.js"; +import { effectCommand } from "./commands/effect.js"; +import { runErrors } from "./commands/errors.js"; +import { fileCommand } from "./commands/file.js"; +import { runQuery } from "./commands/query.js"; +import { refCommand } from "./commands/ref.js"; +import { scopeCommand } from "./commands/scope.js"; +import { statsCommand } from "./commands/stats.js"; +import { symbolCommand } from "./commands/symbol.js"; +import { openTrace } from "./db.js"; + +const { positionals, values } = parseArgs({ + options: { + db: { type: "string", default: "./trace.db" }, + json: { type: "boolean", default: false }, + limit: { type: "string" }, + depth: { type: "string" }, + component: { type: "string" }, + "source-file": { type: "string" }, + "output-file": { type: "string" }, + name: { type: "string" }, + type: { type: "string" }, + "min-trackers": { type: "string" }, + unused: { type: "boolean", default: false }, + framework: { type: "boolean", default: false }, + "all-frames": { type: "boolean", default: false }, + }, + allowPositionals: true, + strict: false, +}); + +const [entity, subcommand, ...args] = positionals; + +if (!entity || entity === "help") { + printUsage(); + process.exit(0); +} + +const db = openTrace(values.db as string); +const opts = { + json: values.json as boolean, + limit: values.limit ? parseInt(values.limit as string, 10) : undefined, + depth: values.depth ? parseInt(values.depth as string, 10) : undefined, + component: values.component as string | undefined, + sourceFile: values["source-file"] as string | undefined, + outputFile: values["output-file"] as string | undefined, + name: values.name as string | undefined, + type: values.type as string | undefined, + minTrackers: + values["min-trackers"] ? + parseInt(values["min-trackers"] as string, 10) + : undefined, + unused: values.unused as boolean, + framework: values.framework as boolean, + allFrames: values["all-frames"] as boolean, +}; + +try { + switch (entity) { + case "effect": + effectCommand(db, subcommand ?? "list", args, opts); + break; + case "ref": + refCommand(db, subcommand ?? "list", args, opts); + break; + case "component": + componentCommand(db, subcommand ?? "list", args, opts); + break; + case "symbol": + symbolCommand(db, subcommand ?? "list", args, opts); + break; + case "scope": + scopeCommand(db, subcommand ?? "list", args, opts); + break; + case "file": + fileCommand(db, subcommand ?? "list", args, opts); + break; + case "stats": + statsCommand(db, args, opts); + break; + case "errors": + runErrors(db, opts); + break; + case "query": + runQuery(db, [subcommand, ...args].filter(Boolean), opts); + break; + default: + console.error(`Unknown command: ${entity}`); + printUsage(); + process.exit(1); + } +} finally { + db.close(); +} + +function printUsage() { + console.log(` +Usage: alloy-trace [subcommand] [options] + +Entity commands: + effect [id] Effects (reactive computations) + ref [id] Refs (reactive values) + component [id] Components (render tree nodes) + symbol [id] Symbols (output symbols) + scope [id] Scopes (output scopes) + file [path] [substring] Output files + +Analysis commands: + stats Aggregate stats and overhead analysis + errors List render errors + query Run a raw SQL query + +Options: + --db= Path to trace database (default: ./trace.db) + --json Output as JSON + --limit= Limit number of results + --depth= Max tree depth + --source-file= Filter by source file + --output-file= Filter by output file + --component= Filter by component name + --name= Filter by name + --type= Filter by type/kind + --min-trackers= Filter refs by minimum tracker count + --unused Show only unused refs + --framework Show only framework-internal effects + --all-frames Show all stack frames (including library/framework) +`); +} diff --git a/packages/trace-cli/src/commands/component.ts b/packages/trace-cli/src/commands/component.ts new file mode 100644 index 000000000..54f3506b4 --- /dev/null +++ b/packages/trace-cli/src/commands/component.ts @@ -0,0 +1,276 @@ +import { + type Db, + type Opts, + outputFileContextsCte, + printPaginationFooter, + requireId, + shortPath, +} from "../types.js"; + +export function componentCommand( + db: Db, + subcommand: string, + args: string[], + opts: Opts, +) { + switch (subcommand) { + case "list": + case "ls": + return componentList(db, opts); + case "show": + return componentShow( + db, + requireId(args, "Usage: alloy-trace component show "), + opts, + ); + case "tree": + return componentTree( + db, + args[0] ? parseInt(args[0], 10) : undefined, + opts, + ); + case "stats": + return componentStats(db, opts); + default: + console.error(`Unknown component subcommand: ${subcommand} +Usage: alloy-trace component [args] [options]`); + process.exit(1); + } +} + +function componentList(db: Db, opts: Opts) { + const conditions: string[] = ["rn.kind = 'component'"]; + const params: any[] = []; + + if (opts.sourceFile) { + conditions.push("rn.source_file LIKE ?"); + params.push(`%${opts.sourceFile}%`); + } + if (opts.outputFile) { + conditions.push(`rn.context_id IN ${outputFileContextsCte()}`); + params.push(`%${opts.outputFile}%`); + } + if (opts.name) { + conditions.push("rn.name LIKE ?"); + params.push(`%${opts.name}%`); + } + + const where = "WHERE " + conditions.join(" AND "); + const limit = opts.limit ?? 50; + const sql = ` + SELECT rn.id, rn.name, rn.source_file, rn.source_line, + (SELECT COUNT(*) FROM render_nodes c WHERE c.parent_id = rn.id) as children + FROM render_nodes rn ${where} + ORDER BY rn.seq + LIMIT ? + `; + const allParams = [...params, limit]; + const rows = db.prepare(sql).all(...allParams) as any[]; + + if (opts.json) { + for (const r of rows) console.log(JSON.stringify(r)); + return; + } + if (rows.length === 0) { + console.log("No components found."); + return; + } + + for (const r of rows) { + const src = + r.source_file ? shortPath(r.source_file) + ":" + r.source_line : ""; + console.log( + ` ${String(r.id).padStart(4)} ${(r.name || "(unnamed)").padEnd(30)} ${r.children} children ${src}`, + ); + } + + printPaginationFooter( + db, + `SELECT COUNT(*) as n FROM render_nodes rn ${where}`, + params, + limit, + rows.length, + ); +} + +function componentShow(db: Db, id: number, opts: Opts) { + const node = db + .prepare("SELECT * FROM render_nodes WHERE id = ?") + .get(id) as any; + if (!node) { + console.error(`Render node ${id} not found`); + return; + } + + if (opts.json) { + console.log(JSON.stringify(node)); + return; + } + + console.log(`Component ${id}: "${node.name}" (${node.kind})`); + if (node.source_file) + console.log( + ` Source: ${node.source_file}:${node.source_line}:${node.source_col}`, + ); + if (node.props) console.log(` Props: ${node.props}`); + if (node.context_id != null) console.log(` Context: ${node.context_id}`); + + const children = db + .prepare( + "SELECT id, kind, name FROM render_nodes WHERE parent_id = ? ORDER BY seq", + ) + .all(id) as any[]; + if (children.length > 0) { + console.log(` Children (${children.length}):`); + for (const c of children) { + console.log(` ${c.kind} ${c.name ?? ""} (id: ${c.id})`); + } + } +} + +function componentTree(db: Db, nodeId: number | undefined, opts: Opts) { + const maxDepth = opts.depth ?? 50; + + if (opts.component) { + const nodes = db + .prepare( + "SELECT * FROM render_nodes WHERE kind = 'component' AND name LIKE ?", + ) + .all(`%${opts.component}%`) as any[]; + if (nodes.length === 0) { + console.log(`No components matching "${opts.component}"`); + return; + } + for (const node of nodes) printNode(db, node, 0, maxDepth); + return; + } + + if (nodeId != null) { + const node = db + .prepare("SELECT * FROM render_nodes WHERE id = ?") + .get(nodeId) as any; + if (!node) { + console.error(`Node ${nodeId} not found`); + return; + } + printNode(db, node, 0, maxDepth); + return; + } + + const roots = db + .prepare("SELECT * FROM render_nodes WHERE parent_id IS NULL ORDER BY seq") + .all() as any[]; + for (const root of roots) { + if (opts.json) { + printTreeJson(db, root, maxDepth); + } else { + printNode(db, root, 0, maxDepth); + } + } +} + +function printNode(db: Db, node: any, depth: number, maxDepth: number) { + if (depth > maxDepth) return; + const indent = depth === 0 ? "" : " ".repeat(depth - 1) + "├─ "; + const name = node.name ? ` "${node.name}"` : ""; + let value = ""; + if (node.kind === "text" && node.value != null) { + const truncated = + node.value.length > 60 ? node.value.slice(0, 60) + "…" : node.value; + value = ` ${JSON.stringify(truncated)}`; + } + console.log(`${indent}${node.kind}${name}${value}`); + + const children = db + .prepare("SELECT * FROM render_nodes WHERE parent_id = ? ORDER BY seq") + .all(node.id) as any[]; + for (const child of children) printNode(db, child, depth + 1, maxDepth); +} + +function printTreeJson(db: Db, node: any, maxDepth: number, depth = 0) { + if (depth > maxDepth) return; + const children = db + .prepare("SELECT * FROM render_nodes WHERE parent_id = ? ORDER BY seq") + .all(node.id) as any[]; + const result: any = { ...node, children: [] }; + for (const child of children) + result.children.push(printTreeJson(db, child, maxDepth, depth + 1)); + if (depth === 0) console.log(JSON.stringify(result)); + return result; +} + +function componentStats(db: Db, opts: Opts) { + const limit = opts.limit ?? 25; + + // For each component type: count instances, total effects in subtree, total refs in subtree + // We do this by walking each component's context_id subtree + const componentTypes = db + .prepare( + ` + SELECT name, COUNT(*) as instances + FROM render_nodes WHERE kind = 'component' AND name IS NOT NULL + GROUP BY name ORDER BY instances DESC + LIMIT ? + `, + ) + .all(limit) as any[]; + + if (opts.json) { + // For JSON, compute full stats per type + const results = componentTypes.map((ct: any) => { + const stats = db + .prepare( + ` + WITH comp_contexts AS ( + SELECT context_id FROM render_nodes WHERE kind = 'component' AND name = ? + ), + subtree_effects AS ( + SELECT e.id, e.context_id FROM effects e WHERE e.context_id IN (SELECT context_id FROM comp_contexts) + ) + SELECT + (SELECT COUNT(*) FROM subtree_effects) as total_effects, + (SELECT COUNT(*) FROM refs WHERE created_by_effect_id IN (SELECT id FROM subtree_effects)) as total_refs + `, + ) + .get(ct.name) as any; + return { ...ct, ...stats }; + }); + for (const r of results) console.log(JSON.stringify(r)); + return; + } + + console.log( + "Per-component overhead (direct effects and refs created by the component's render effect):\n", + ); + console.log( + ` ${"Component".padEnd(38)} ${"Inst".padStart(5)} ${"Effects".padStart(8)} ${"Eff/Inst".padStart(8)} ${"Refs".padStart(8)} ${"Ref/Inst".padStart(8)}`, + ); + console.log( + ` ${"─".repeat(38)} ${"─".repeat(5)} ${"─".repeat(8)} ${"─".repeat(8)} ${"─".repeat(8)} ${"─".repeat(8)}`, + ); + + for (const ct of componentTypes) { + // Count direct effects and refs for each instance of this component type + // Direct = effects whose owner_context_id is the component's context_id + const stats = db + .prepare( + ` + SELECT + COUNT(DISTINCT e.id) as total_effects, + (SELECT COUNT(*) FROM refs r WHERE r.created_by_effect_id IN ( + SELECT e2.id FROM effects e2 + WHERE e2.owner_context_id IN (SELECT context_id FROM render_nodes WHERE kind = 'component' AND name = ?) + )) as total_refs + FROM effects e + WHERE e.owner_context_id IN (SELECT context_id FROM render_nodes WHERE kind = 'component' AND name = ?) + `, + ) + .get(ct.name, ct.name) as any; + + const effPerInst = (stats.total_effects / ct.instances).toFixed(1); + const refPerInst = (stats.total_refs / ct.instances).toFixed(1); + console.log( + ` ${ct.name.padEnd(38)} ${String(ct.instances).padStart(5)} ${String(stats.total_effects).padStart(8)} ${effPerInst.padStart(8)} ${String(stats.total_refs).padStart(8)} ${refPerInst.padStart(8)}`, + ); + } +} diff --git a/packages/trace-cli/src/commands/effect.ts b/packages/trace-cli/src/commands/effect.ts new file mode 100644 index 000000000..7637aeb93 --- /dev/null +++ b/packages/trace-cli/src/commands/effect.ts @@ -0,0 +1,435 @@ +import { + type Db, + type Opts, + outputFileContextsCte, + printPaginationFooter, + requireId, + shortPath, +} from "../types.js"; + +export function effectCommand( + db: Db, + subcommand: string, + args: string[], + opts: Opts, +) { + switch (subcommand) { + case "list": + case "ls": + return effectList(db, opts); + case "show": + return effectShow( + db, + requireId(args, "Usage: alloy-trace effect show "), + opts, + ); + case "chain": + return effectChain( + db, + requireId(args, "Usage: alloy-trace effect chain "), + opts, + ); + case "hotspots": + return effectHotspots(db, opts); + case "ancestry": + return effectAncestry( + db, + requireId(args, "Usage: alloy-trace effect ancestry "), + opts, + ); + case "subtree": + return effectSubtree( + db, + requireId(args, "Usage: alloy-trace effect subtree "), + opts, + ); + default: + console.error(`Unknown effect subcommand: ${subcommand} +Usage: alloy-trace effect [args] [options]`); + process.exit(1); + } +} + +function effectList(db: Db, opts: Opts) { + const conditions: string[] = []; + const params: any[] = []; + + if (opts.sourceFile) { + conditions.push("e.source_file LIKE ?"); + params.push(`%${opts.sourceFile}%`); + } + if (opts.outputFile) { + conditions.push(`e.context_id IN ${outputFileContextsCte()}`); + params.push(`%${opts.outputFile}%`); + } + if (opts.component) { + conditions.push("e.component LIKE ?"); + params.push(`%${opts.component}%`); + } + if (opts.name) { + conditions.push("e.name LIKE ?"); + params.push(`%${opts.name}%`); + } + if (opts.type) { + conditions.push("e.type = ?"); + params.push(opts.type); + } + if (opts.framework) { + conditions.push("e.source_file IS NULL"); + } + + const where = + conditions.length > 0 ? "WHERE " + conditions.join(" AND ") : ""; + const limit = opts.limit ?? 50; + const sql = ` + SELECT e.id, e.name, e.type, e.component, e.source_file, e.source_line, + (SELECT COUNT(*) FROM edges WHERE effect_id = e.id AND type = 'track') as tracks, + (SELECT COUNT(*) FROM edges WHERE effect_id = e.id AND type = 'trigger') as triggers + FROM effects e ${where} + ORDER BY e.id + LIMIT ? + `; + const allParams = [...params, limit]; + const rows = db.prepare(sql).all(...allParams) as any[]; + + if (opts.json) { + for (const r of rows) console.log(JSON.stringify(r)); + return; + } + if (rows.length === 0) { + console.log("No effects found."); + return; + } + + for (const r of rows) { + const src = + r.source_file ? shortPath(r.source_file) + ":" + r.source_line : ""; + const comp = r.component ? ` [${r.component}]` : ""; + const stats = `tracks ${r.tracks} refs, triggers ${r.triggers}`; + console.log( + ` ${String(r.id).padStart(4)} ${(r.name || "(anonymous)").padEnd(40)} ${r.type ?? ""} ${stats}${comp}`, + ); + if (src) console.log(` ${src}`); + } + + printPaginationFooter( + db, + `SELECT COUNT(*) as n FROM effects e ${where}`, + params, + limit, + rows.length, + ); +} + +function effectShow(db: Db, id: number, opts: Opts) { + const effect = db + .prepare("SELECT * FROM effects WHERE id = ?") + .get(id) as any; + if (!effect) { + console.error(`Effect ${id} not found`); + return; + } + + if (opts.json) { + const tracks = db + .prepare( + "SELECT DISTINCT ref_id FROM edges WHERE effect_id = ? AND type = 'track' AND ref_id IS NOT NULL", + ) + .all(id); + const triggeredBy = db + .prepare( + "SELECT ref_id, COUNT(*) as n, GROUP_CONCAT(seq) as seqs FROM edges WHERE effect_id = ? AND type = 'triggered-by' GROUP BY ref_id", + ) + .all(id); + const triggers = db + .prepare( + "SELECT ref_id, COUNT(*) as n, GROUP_CONCAT(seq) as seqs FROM edges WHERE effect_id = ? AND type = 'trigger' GROUP BY ref_id", + ) + .all(id); + const lifecycle = db + .prepare( + "SELECT seq, event, trigger_ref_id FROM effect_lifecycle WHERE effect_id = ? ORDER BY seq", + ) + .all(id); + console.log( + JSON.stringify({ effect, tracks, triggeredBy, triggers, lifecycle }), + ); + return; + } + + console.log(`Effect ${id}: "${effect.name}" (${effect.type ?? "unknown"})`); + if (effect.source_file) + console.log( + ` Source: ${effect.source_file}:${effect.source_line}:${effect.source_col}`, + ); + console.log( + ` Context: ${effect.context_id} (owner: ${effect.owner_context_id})`, + ); + if (effect.component) console.log(` Component: ${effect.component}`); + + const tracks = db + .prepare( + "SELECT DISTINCT ref_id FROM edges WHERE effect_id = ? AND type = 'track' AND ref_id IS NOT NULL", + ) + .all(id) as any[]; + if (tracks.length > 0) { + console.log(` Tracks ${tracks.length} refs:`); + for (const t of tracks) { + const ref = db + .prepare("SELECT * FROM refs WHERE id = ?") + .get(t.ref_id) as any; + console.log( + ` ref ${t.ref_id} (${ref?.source_file ?? "unknown"}:${ref?.source_line ?? "?"})`, + ); + } + } + + const triggeredBy = db + .prepare( + "SELECT ref_id, COUNT(*) as n, GROUP_CONCAT(seq) as seqs FROM edges WHERE effect_id = ? AND type = 'triggered-by' GROUP BY ref_id", + ) + .all(id) as any[]; + if (triggeredBy.length > 0) { + console.log(" Triggered by:"); + for (const t of triggeredBy) + console.log(` ref ${t.ref_id} ×${t.n} (seq ${t.seqs})`); + } + + const triggers = db + .prepare( + "SELECT ref_id, COUNT(*) as n, GROUP_CONCAT(seq) as seqs FROM edges WHERE effect_id = ? AND type = 'trigger' GROUP BY ref_id", + ) + .all(id) as any[]; + if (triggers.length > 0) { + console.log(" Triggers (writes to):"); + for (const t of triggers) + console.log(` ref ${t.ref_id} ×${t.n} (seq ${t.seqs})`); + } + + // Lifecycle events + const lifecycle = db + .prepare( + "SELECT seq, event, trigger_ref_id FROM effect_lifecycle WHERE effect_id = ? AND event != 'scheduled' ORDER BY seq", + ) + .all(id) as any[]; + if (lifecycle.length > 0) { + const ranEvents = lifecycle.filter((e: any) => e.event === "ran"); + const skippedEvents = lifecycle.filter((e: any) => e.event === "skipped"); + console.log( + ` Lifecycle: ran ${ranEvents.length} times, skipped ${skippedEvents.length}`, + ); + for (const e of ranEvents) { + const triggerInfo = + e.trigger_ref_id != null ? + ` (triggered by ref ${e.trigger_ref_id})` + : ""; + console.log(` ran at seq ${e.seq}${triggerInfo}`); + } + } +} + +function effectChain(db: Db, id: number, opts: Opts) { + const effect = db + .prepare("SELECT * FROM effects WHERE id = ?") + .get(id) as any; + if (!effect) { + console.error(`Effect ${id} not found`); + return; + } + + console.log(`effect ${id} (${effect.name})`); + + const triggeredBy = db + .prepare( + ` + SELECT DISTINCT ref_id FROM edges + WHERE effect_id = ? AND type = 'triggered-by' AND ref_id IS NOT NULL + `, + ) + .all(id) as any[]; + + if (triggeredBy.length > 0) { + console.log(" Triggered by:"); + for (const t of triggeredBy) { + const writers = db + .prepare( + ` + SELECT DISTINCT e.id, e.name FROM edges ed JOIN effects e ON ed.effect_id = e.id + WHERE ed.ref_id = ? AND ed.type = 'trigger' + `, + ) + .all(t.ref_id) as any[]; + const writerStr = writers + .map((w: any) => `effect ${w.id} (${w.name})`) + .join(", "); + console.log(` ref ${t.ref_id} → written by ${writerStr}`); + } + } + + const runs = db + .prepare( + "SELECT seq FROM scheduler_jobs WHERE effect_id = ? AND event = 'run' ORDER BY seq", + ) + .all(id) as any[]; + if (runs.length > 0) { + console.log( + ` Scheduler runs: ${runs.map((r: any) => `seq ${r.seq}`).join(", ")}`, + ); + } +} + +function effectHotspots(db: Db, opts: Opts) { + const limit = opts.limit ?? 20; + const rows = db + .prepare( + ` + SELECT e.id, e.name, e.type, e.component, e.source_file, e.source_line, + (SELECT COUNT(*) FROM edges WHERE effect_id = e.id AND type = 'track') as tracks, + (SELECT COUNT(*) FROM edges WHERE effect_id = e.id AND type = 'trigger') as triggers, + (SELECT COUNT(*) FROM refs WHERE created_by_effect_id = e.id) as refs_created + FROM effects e + ORDER BY tracks + triggers + refs_created DESC + LIMIT ? + `, + ) + .all(limit) as any[]; + + if (opts.json) { + for (const r of rows) console.log(JSON.stringify(r)); + return; + } + if (rows.length === 0) { + console.log("No effects found."); + return; + } + + console.log("Effects with highest reactive activity:\n"); + for (const r of rows) { + const src = + r.source_file ? shortPath(r.source_file) + ":" + r.source_line : ""; + const comp = r.component ? ` [${r.component}]` : ""; + console.log( + ` ${String(r.id).padStart(5)} ${(r.name || "(anonymous)").padEnd(35)} tracks ${r.tracks}, triggers ${r.triggers}, creates ${r.refs_created} refs${comp}`, + ); + if (src) console.log(` ${src}`); + } +} + +function effectAncestry(db: Db, id: number, _opts: Opts) { + const effect = db + .prepare("SELECT * FROM effects WHERE id = ?") + .get(id) as any; + if (!effect) { + console.error(`Effect ${id} not found`); + return; + } + + console.log(`Effect ${id}: "${effect.name}" (${effect.type ?? "unknown"})`); + if (effect.source_file) + console.log( + ` Source: ${shortPath(effect.source_file)}:${effect.source_line}`, + ); + console.log(); + + // Walk up the context ownership chain + let ctxId: number | null = effect.owner_context_id; + let depth = 0; + while (ctxId != null && depth < 50) { + const parent = db + .prepare( + "SELECT id, name, type, component, context_id, owner_context_id FROM effects WHERE context_id = ?", + ) + .get(ctxId) as any; + if (!parent) break; + const indent = " ".repeat(depth); + const comp = parent.component ? ` [${parent.component}]` : ""; + const marker = parent.component ? "●" : "│"; + console.log( + ` ${indent}${marker} ${parent.name ?? "(anonymous)"}${comp} (ctx: ${parent.context_id})`, + ); + ctxId = parent.owner_context_id; + depth++; + } +} + +function effectSubtree(db: Db, contextId: number, opts: Opts) { + const root = db + .prepare("SELECT * FROM effects WHERE context_id = ?") + .get(contextId) as any; + if (!root) { + console.error(`No effect with context_id ${contextId}`); + return; + } + + // Count all effects in the subtree via recursive CTE + const totals = db + .prepare( + ` + WITH RECURSIVE subtree(ctx_id) AS ( + SELECT ? + UNION ALL + SELECT e.context_id FROM effects e JOIN subtree s ON e.owner_context_id = s.ctx_id + ) + SELECT + COUNT(*) as total_effects, + (SELECT COUNT(*) FROM refs WHERE created_by_effect_id IN (SELECT ctx_id FROM subtree)) as total_refs + FROM effects WHERE context_id IN (SELECT ctx_id FROM subtree) + `, + ) + .get(contextId) as any; + + console.log( + `Subtree of effect context ${contextId}: "${root.name}" (${root.type ?? "unknown"})`, + ); + if (root.component) console.log(` Component: ${root.component}`); + console.log(` Total effects: ${totals.total_effects}`); + console.log(` Total refs created: ${totals.total_refs}`); + console.log(); + + // Break down by effect type + const byType = db + .prepare( + ` + WITH RECURSIVE subtree(ctx_id) AS ( + SELECT ? + UNION ALL + SELECT e.context_id FROM effects e JOIN subtree s ON e.owner_context_id = s.ctx_id + ) + SELECT COALESCE(type, '(unnamed)') as type, COUNT(*) as cnt + FROM effects WHERE context_id IN (SELECT ctx_id FROM subtree) + GROUP BY type ORDER BY cnt DESC + `, + ) + .all(contextId) as any[]; + + console.log(" By type:"); + for (const r of byType) { + console.log(` ${r.type.padEnd(20)} ${r.cnt}`); + } + console.log(); + + // Break down by component (immediate children that are components) + const byComponent = db + .prepare( + ` + WITH RECURSIVE subtree(ctx_id) AS ( + SELECT ? + UNION ALL + SELECT e.context_id FROM effects e JOIN subtree s ON e.owner_context_id = s.ctx_id + ) + SELECT component, COUNT(*) as cnt + FROM effects + WHERE context_id IN (SELECT ctx_id FROM subtree) AND component IS NOT NULL + GROUP BY component ORDER BY cnt DESC + LIMIT ? + `, + ) + .all(contextId, opts.limit ?? 20) as any[]; + + if (byComponent.length > 0) { + console.log(" By component:"); + for (const r of byComponent) { + console.log(` ${r.component.padEnd(35)} ${r.cnt} effects`); + } + } +} diff --git a/packages/trace-cli/src/commands/errors.ts b/packages/trace-cli/src/commands/errors.ts new file mode 100644 index 000000000..3d94e1753 --- /dev/null +++ b/packages/trace-cli/src/commands/errors.ts @@ -0,0 +1,37 @@ +import { type Db, type Opts, formatComponentStack } from "../types.js"; + +export function runErrors(db: Db, opts: Opts) { + const errors = db + .prepare("SELECT * FROM render_errors ORDER BY seq") + .all() as any[]; + + if (opts.json) { + for (const err of errors) console.log(JSON.stringify(err)); + return; + } + + if (errors.length === 0) { + console.log("No render errors."); + return; + } + + console.log(`${errors.length} render error(s):\n`); + for (const err of errors) { + if (err.stack) { + console.log(err.stack); + } else { + console.log(`${err.name}: ${err.message}`); + } + if (err.component_stack) { + const formatted = formatComponentStack( + err.component_stack, + opts.allFrames, + ); + if (formatted) { + console.log("Component stack:"); + console.log(formatted); + } + } + console.log(); + } +} diff --git a/packages/trace-cli/src/commands/file.ts b/packages/trace-cli/src/commands/file.ts new file mode 100644 index 000000000..ea4bc61e6 --- /dev/null +++ b/packages/trace-cli/src/commands/file.ts @@ -0,0 +1,403 @@ +import { DIFF_EQUAL, diff_match_patch } from "diff-match-patch"; +import { type Db, type Opts, formatComponentStack } from "../types.js"; + +export function fileCommand( + db: Db, + subcommand: string, + args: string[], + opts: Opts, +) { + switch (subcommand) { + case "list": + case "ls": + return fileList(db, opts); + case "show": + return fileShow(db, args[0], opts); + case "search": + return fileSearch(db, args[0], args.slice(1).join(" "), opts); + default: + console.error(`Unknown file subcommand: ${subcommand} +Usage: alloy-trace file [args] [options]`); + process.exit(1); + } +} + +function fileList(db: Db, opts: Opts) { + const files = db + .prepare("SELECT * FROM output_files ORDER BY seq") + .all() as any[]; + + if (opts.json) { + for (const f of files) console.log(JSON.stringify(f)); + return; + } + if (files.length === 0) { + console.log("No output files recorded."); + return; + } + + console.log(`${files.length} file(s) generated:\n`); + for (const f of files) { + const hasContent = f.content ? "✓" : "✗"; + console.log( + ` ${f.path.padEnd(40)} (filetype: ${f.filetype}, render_node: ${f.render_node_id}) [content: ${hasContent}]`, + ); + } +} + +function fileShow(db: Db, path: string | undefined, opts: Opts) { + if (!path) { + console.error("Usage: alloy-trace file show "); + process.exit(1); + } + const file = db + .prepare( + "SELECT * FROM output_files WHERE path = ? ORDER BY seq DESC LIMIT 1", + ) + .get(path) as any; + if (!file) { + console.error(`No output file matching "${path}"`); + process.exit(1); + } + if (opts.json) { + console.log(JSON.stringify(file)); + return; + } + + console.log(`── ${file.path} (${file.filetype}) ──`); + console.log(file.content ?? "(no content recorded)"); +} + +interface TextRange { + fileStart: number; + fileEnd: number; + nodeId: number; +} + +/** + * Collect text nodes in DFS (output) order by walking the tree structure. + * Children of each parent are ordered by seq, matching the render pipeline's + * output order. This is critical because text nodes may be created out of + * order (e.g. reactive references resolved later) but their position in the + * tree reflects where they appear in the file. + */ +function collectTextNodesDfs( + db: Db, + rootId: number, +): { id: number; value: string }[] { + // Load all descendant nodes + const allNodes = db + .prepare( + ` + WITH RECURSIVE desc_nodes(id) AS ( + SELECT ? + UNION ALL + SELECT rn.id FROM render_nodes rn JOIN desc_nodes d ON rn.parent_id = d.id + ) + SELECT rn.id, rn.kind, rn.value, rn.parent_id, rn.seq + FROM render_nodes rn + JOIN desc_nodes d ON rn.id = d.id + `, + ) + .all(rootId) as { + id: number; + kind: string; + value: string | null; + parent_id: number | null; + seq: number; + }[]; + + // Build parent→children map, sorted by seq + const childrenMap = new Map(); + const nodeMap = new Map(); + for (const n of allNodes) { + nodeMap.set(n.id, n); + if (n.parent_id != null) { + let children = childrenMap.get(n.parent_id); + if (!children) { + children = []; + childrenMap.set(n.parent_id, children); + } + children.push(n); + } + } + for (const children of childrenMap.values()) { + children.sort((a, b) => a.seq - b.seq); + } + + // DFS walk collecting text nodes + const result: { id: number; value: string }[] = []; + const stack: number[] = [rootId]; + while (stack.length > 0) { + const id = stack.pop()!; + const node = nodeMap.get(id); + if (!node) continue; + + if (node.kind === "text" && node.value) { + result.push({ id: node.id, value: node.value }); + continue; + } + + const children = childrenMap.get(id); + if (children) { + // Push in reverse order so first child is popped first + for (let i = children.length - 1; i >= 0; i--) { + stack.push(children[i].id); + } + } + } + + return result; +} + +/** + * Build a mapping from file content offsets to text node IDs using the same + * diff-match-patch approach as the devtools. Text nodes collected in DFS + * (output) order differ from the file content by formatting whitespace; the + * diff identifies equal segments and maps them between the two coordinate + * systems. + */ +function buildFileTextRanges( + db: Db, + renderNodeId: number, + fileContent: string, +): TextRange[] { + const textNodes = collectTextNodesDfs(db, renderNodeId); + + if (textNodes.length === 0 || fileContent.length === 0) return []; + + // Build node spans in concatenated-text coordinate system + const nodeSpans: { id: number; start: number; end: number }[] = []; + let cursor = 0; + for (const node of textNodes) { + nodeSpans.push({ + id: node.id, + start: cursor, + end: cursor + node.value.length, + }); + cursor += node.value.length; + } + + const nodeText = textNodes.map((n) => n.value).join(""); + + // Diff concatenated text vs file content to find equal segments + const dmp = new diff_match_patch(); + const diffs = dmp.diff_main(nodeText, fileContent); + dmp.diff_cleanupSemantic(diffs); + + const equalSegments: { + nodeStart: number; + nodeEnd: number; + fileStart: number; + fileEnd: number; + }[] = []; + let nodePos = 0; + let filePos = 0; + for (const [op, text] of diffs) { + const len = text.length; + if (op === DIFF_EQUAL) { + equalSegments.push({ + nodeStart: nodePos, + nodeEnd: nodePos + len, + fileStart: filePos, + fileEnd: filePos + len, + }); + nodePos += len; + filePos += len; + } else if (op === -1) { + nodePos += len; + } else { + filePos += len; + } + } + + // Map each node span through equal segments to get file ranges + const ranges: TextRange[] = []; + let segIdx = 0; + for (const span of nodeSpans) { + while ( + segIdx < equalSegments.length && + equalSegments[segIdx].nodeEnd <= span.start + ) { + segIdx++; + } + let idx = segIdx; + while ( + idx < equalSegments.length && + equalSegments[idx].nodeStart < span.end + ) { + const seg = equalSegments[idx]; + const start = Math.max(span.start, seg.nodeStart); + const end = Math.min(span.end, seg.nodeEnd); + if (start < end) { + const fileStart = seg.fileStart + (start - seg.nodeStart); + const fileEnd = fileStart + (end - start); + ranges.push({ fileStart, fileEnd, nodeId: span.id }); + } + if (seg.nodeEnd >= span.end) break; + idx++; + } + } + + ranges.sort((a, b) => a.fileStart - b.fileStart); + return ranges; +} + +/** + * Given a file offset range, find the text node(s) that produced it using + * the pre-computed text ranges. Returns the first (shallowest file-offset) + * node whose range overlaps the match. + */ +function findNodeAtOffset( + ranges: TextRange[], + matchStart: number, + matchEnd: number, +): number | undefined { + for (const r of ranges) { + if (r.fileEnd <= matchStart) continue; + if (r.fileStart >= matchEnd) break; + return r.nodeId; + } + return undefined; +} + +function fileSearch( + db: Db, + path: string | undefined, + substring: string | undefined, + opts: Opts, +) { + if (!path || !substring) { + console.error("Usage: alloy-trace file search "); + process.exit(1); + } + + const file = db + .prepare( + "SELECT * FROM output_files WHERE path = ? OR path LIKE ? ORDER BY seq DESC LIMIT 1", + ) + .get(path, `%${path}`) as any; + if (!file) { + console.error(`No output file matching "${path}"`); + process.exit(1); + } + + if (!file.content) { + console.log(`No content recorded for ${file.path}`); + return; + } + + const matches = findContentMatches(file.content, substring); + if (matches.length === 0) { + console.log(`No text matching "${substring}" found in ${file.path}`); + return; + } + + // Build the offset→node mapping using diff-match-patch + const ranges = buildFileTextRanges(db, file.render_node_id, file.content); + + if (opts.json) { + for (const match of matches) { + const nodeId = findNodeAtOffset(ranges, match.start, match.end); + const stack = nodeId ? buildComponentStack(db, nodeId) : []; + console.log( + JSON.stringify({ + text: match.text, + offset: match.start, + textNodeId: nodeId, + stack, + }), + ); + } + return; + } + + for (const match of matches) { + const context = getMatchContext(file.content, match.start, match.end); + console.log(context); + + const nodeId = findNodeAtOffset(ranges, match.start, match.end); + if (nodeId) { + const stack = buildComponentStack(db, nodeId); + if (stack.length > 0) { + const formatted = formatComponentStack( + JSON.stringify( + stack.map((c: any) => ({ + name: c.name ?? "(unnamed)", + renderNodeId: c.id, + source: + c.source_file ? + { + fileName: c.source_file, + lineNumber: c.source_line, + columnNumber: c.source_col, + } + : undefined, + })), + ), + opts.allFrames, + ); + if (formatted) { + console.log(formatted); + } + } + } + console.log(); + } +} + +function buildComponentStack(db: Db, nodeId: number): any[] { + const stack: any[] = []; + let currentId: number | null = nodeId; + + while (currentId !== null) { + const node = db + .prepare( + "SELECT id, parent_id, kind, name, props, source_file, source_line, source_col FROM render_nodes WHERE id = ?", + ) + .get(currentId) as any; + if (!node) break; + if (node.kind === "component") { + stack.push(node); + } + currentId = node.parent_id; + } + + return stack; +} + +function findContentMatches( + content: string, + substring: string, +): { start: number; end: number; text: string }[] { + const matches: { start: number; end: number; text: string }[] = []; + let pos = 0; + while (true) { + const idx = content.indexOf(substring, pos); + if (idx === -1) break; + matches.push({ start: idx, end: idx + substring.length, text: substring }); + pos = idx + 1; + } + return matches; +} + +function getMatchContext(content: string, start: number, end: number): string { + const matchLineStart = content.lastIndexOf("\n", start - 1) + 1; + const matchLineEnd = content.indexOf("\n", end); + + // Line before + const prevLineStart = + matchLineStart > 0 ? content.lastIndexOf("\n", matchLineStart - 2) + 1 : -1; + // Line after — find end of next line, or end of content + let contextEnd: number; + if (matchLineEnd === -1) { + // Match is on the last line (no newline after match) + contextEnd = content.length; + } else { + const nextLineEnd = content.indexOf("\n", matchLineEnd + 1); + contextEnd = nextLineEnd !== -1 ? nextLineEnd : content.length; + } + + const contextStart = prevLineStart >= 0 ? prevLineStart : matchLineStart; + return content.slice(contextStart, contextEnd); +} diff --git a/packages/trace-cli/src/commands/query.ts b/packages/trace-cli/src/commands/query.ts new file mode 100644 index 000000000..ed27658e5 --- /dev/null +++ b/packages/trace-cli/src/commands/query.ts @@ -0,0 +1,69 @@ +import { type Db, type Opts } from "../types.js"; + +export function runQuery(db: Db, args: string[], opts: Opts) { + const sql = args.join(" "); + if (!sql) { + console.error("Usage: alloy-trace query "); + process.exit(1); + } + + try { + const stmt = db.prepare(sql); + const rows = stmt.all() as any[]; + + if (opts.json) { + for (const row of rows.slice(0, opts.limit)) { + console.log(JSON.stringify(row)); + } + return; + } + + if (rows.length === 0) { + console.log("(no results)"); + return; + } + + const limit = opts.limit ?? 100; + const display = rows.slice(0, limit); + const columns = Object.keys(display[0]); + + // Calculate column widths + const widths = columns.map((col) => + Math.max( + col.length, + ...display.map((row) => String(row[col] ?? "").length), + ), + ); + + // Clamp widths to 60 + const clampedWidths = widths.map((w) => Math.min(w, 60)); + + // Print header + console.log( + columns.map((col, i) => col.padEnd(clampedWidths[i])).join(" "), + ); + console.log(clampedWidths.map((w) => "─".repeat(w)).join(" ")); + + // Print rows + for (const row of display) { + console.log( + columns + .map((col, i) => + String(row[col] ?? "") + .padEnd(clampedWidths[i]) + .slice(0, clampedWidths[i]), + ) + .join(" "), + ); + } + + if (rows.length > limit) { + console.log( + `\n(${rows.length - limit} more rows, use --limit to show more)`, + ); + } + } catch (e: any) { + console.error(`SQL error: ${e.message}`); + process.exit(1); + } +} diff --git a/packages/trace-cli/src/commands/ref.ts b/packages/trace-cli/src/commands/ref.ts new file mode 100644 index 000000000..4cda1572c --- /dev/null +++ b/packages/trace-cli/src/commands/ref.ts @@ -0,0 +1,412 @@ +import { + type Db, + type Opts, + outputFileContextsCte, + printPaginationFooter, + requireId, + shortPath, +} from "../types.js"; + +export function refCommand( + db: Db, + subcommand: string, + args: string[], + opts: Opts, +) { + switch (subcommand) { + case "list": + case "ls": + return refList(db, opts); + case "show": + return refShow( + db, + requireId(args, "Usage: alloy-trace ref show "), + opts, + ); + case "chain": + return refChain( + db, + requireId(args, "Usage: alloy-trace ref chain "), + opts, + ); + case "hotspots": + return refHotspots(db, opts); + case "fanout": + return refFanout( + db, + requireId(args, "Usage: alloy-trace ref fanout "), + opts, + ); + case "ownership": + return refOwnership( + db, + requireId(args, "Usage: alloy-trace ref ownership "), + opts, + ); + default: + console.error(`Unknown ref subcommand: ${subcommand} +Usage: alloy-trace ref [args] [options]`); + process.exit(1); + } +} + +function refList(db: Db, opts: Opts) { + const conditions: string[] = []; + const params: any[] = []; + + if (opts.sourceFile) { + conditions.push("r.source_file LIKE ?"); + params.push(`%${opts.sourceFile}%`); + } + if (opts.outputFile) { + conditions.push( + `r.created_by_effect_id IN (SELECT e.id FROM effects e WHERE e.context_id IN ${outputFileContextsCte()})`, + ); + params.push(`%${opts.outputFile}%`); + } + if (opts.type) { + conditions.push("r.kind = ?"); + params.push(opts.type); + } + if (opts.minTrackers != null) { + conditions.push( + `(SELECT COUNT(*) FROM edges WHERE ref_id = r.id AND type = 'track') >= ?`, + ); + params.push(opts.minTrackers); + } + if (opts.unused) { + conditions.push(`(SELECT COUNT(*) FROM edges WHERE ref_id = r.id) = 0`); + } + + const where = + conditions.length > 0 ? "WHERE " + conditions.join(" AND ") : ""; + const limit = opts.limit ?? 50; + const orderBy = + opts.minTrackers != null ? + "ORDER BY (SELECT COUNT(*) FROM edges WHERE ref_id = r.id AND type = 'track') DESC" + : "ORDER BY r.id"; + const sql = ` + SELECT r.id, r.kind, r.source_file, r.source_line, r.created_by_effect_id, + (SELECT COUNT(*) FROM edges WHERE ref_id = r.id AND type = 'track') as tracked_by, + (SELECT COUNT(*) FROM edges WHERE ref_id = r.id AND type = 'trigger') as triggered + FROM refs r ${where} + ${orderBy} + LIMIT ? + `; + const allParams = [...params, limit]; + const rows = db.prepare(sql).all(...allParams) as any[]; + + if (opts.json) { + for (const r of rows) console.log(JSON.stringify(r)); + return; + } + if (rows.length === 0) { + console.log("No refs found."); + return; + } + + for (const r of rows) { + const src = + r.source_file ? shortPath(r.source_file) + ":" + r.source_line : ""; + console.log( + ` ${String(r.id).padStart(4)} ${r.kind.padEnd(12)} creator: effect ${r.created_by_effect_id ?? "?"} tracked_by: ${r.tracked_by} triggered: ${r.triggered}`, + ); + if (src) console.log(` ${src}`); + } + + printPaginationFooter( + db, + `SELECT COUNT(*) as n FROM refs r ${where}`, + params, + limit, + rows.length, + ); +} + +function refShow(db: Db, id: number, opts: Opts) { + const ref = db.prepare("SELECT * FROM refs WHERE id = ?").get(id) as any; + if (!ref) { + console.error(`Ref ${id} not found`); + return; + } + + if (opts.json) { + const trackedBy = db + .prepare( + "SELECT DISTINCT effect_id FROM edges WHERE ref_id = ? AND type = 'track'", + ) + .all(id); + const writtenBy = db + .prepare( + "SELECT effect_id, COUNT(*) as n FROM edges WHERE ref_id = ? AND type = 'trigger' GROUP BY effect_id", + ) + .all(id); + console.log(JSON.stringify({ ref, trackedBy, writtenBy })); + return; + } + + console.log(`Ref ${id} (kind: ${ref.kind ?? "unknown"})`); + if (ref.source_file) + console.log( + ` Source: ${ref.source_file}:${ref.source_line}:${ref.source_col}`, + ); + if (ref.created_by_effect_id != null) + console.log(` Created by: effect ${ref.created_by_effect_id}`); + + const trackedBy = db + .prepare( + "SELECT DISTINCT e.id, e.name FROM edges ed JOIN effects e ON ed.effect_id = e.id WHERE ed.ref_id = ? AND ed.type = 'track'", + ) + .all(id) as any[]; + if (trackedBy.length > 0) { + console.log(` Tracked by ${trackedBy.length} effects:`); + for (const t of trackedBy) console.log(` effect ${t.id} "${t.name}"`); + } + + const writtenBy = db + .prepare( + "SELECT e.id, e.name, COUNT(*) as n FROM edges ed JOIN effects e ON ed.effect_id = e.id WHERE ed.ref_id = ? AND ed.type = 'trigger' GROUP BY e.id", + ) + .all(id) as any[]; + if (writtenBy.length > 0) { + console.log(` Written by ${writtenBy.length} effects:`); + for (const w of writtenBy) + console.log(` effect ${w.id} "${w.name}" ×${w.n}`); + } +} + +function refChain( + db: Db, + refId: number, + opts: Opts, + depth = 0, + maxDepth = 5, + visited = new Set(), +) { + if (visited.has(refId)) return; + visited.add(refId); + + const ref = db.prepare("SELECT * FROM refs WHERE id = ?").get(refId) as any; + if (depth === 0) { + console.log( + `ref ${refId} (kind: ${ref?.kind ?? "?"}, source: ${ref?.source_file ?? "?"}:${ref?.source_line ?? "?"})`, + ); + } + + if (depth >= maxDepth) { + console.log(" ".repeat(depth + 1) + "... (max depth reached)"); + return; + } + + const triggeredEffects = db + .prepare( + ` + SELECT DISTINCT e.id, e.name, COUNT(*) as n + FROM edges ed JOIN effects e ON ed.effect_id = e.id + WHERE ed.ref_id = ? AND ed.type = 'triggered-by' + GROUP BY e.id + `, + ) + .all(refId) as any[]; + + for (const effect of triggeredEffects) { + const indent = " ".repeat(depth + 1); + console.log( + `${indent}├─ triggers effect ${effect.id} (${effect.name}) [×${effect.n}]`, + ); + + const writes = db + .prepare( + ` + SELECT DISTINCT ref_id FROM edges + WHERE effect_id = ? AND type = 'trigger' AND ref_id IS NOT NULL + `, + ) + .all(effect.id) as any[]; + + for (const write of writes) { + console.log(`${indent}│ └─ writes ref ${write.ref_id}`); + refChain(db, write.ref_id, opts, depth + 2, maxDepth, visited); + } + } +} + +function refHotspots(db: Db, opts: Opts) { + const limit = opts.limit ?? 20; + const rows = db + .prepare( + ` + SELECT r.id, r.kind, r.source_file, r.source_line, + r.created_by_effect_id, + e.name as creator_name, e.component as creator_component, + (SELECT COUNT(*) FROM edges WHERE ref_id = r.id AND type = 'track') as tracked_by, + (SELECT COUNT(*) FROM edges WHERE ref_id = r.id AND type = 'trigger') as triggered + FROM refs r + LEFT JOIN effects e ON r.created_by_effect_id = e.id + ORDER BY tracked_by DESC + LIMIT ? + `, + ) + .all(limit) as any[]; + + if (opts.json) { + for (const r of rows) console.log(JSON.stringify(r)); + return; + } + if (rows.length === 0) { + console.log("No refs found."); + return; + } + + console.log("Refs with most trackers:\n"); + for (const r of rows) { + const creator = + r.creator_name ? + `${r.creator_name}${r.creator_component ? ` [${r.creator_component}]` : ""}` + : "?"; + const src = + r.source_file ? ` ${shortPath(r.source_file)}:${r.source_line}` : ""; + console.log( + ` ${String(r.id).padStart(5)} ${r.kind.padEnd(12)} ${String(r.tracked_by).padStart(4)} trackers, ${String(r.triggered).padStart(3)} writes creator: ${creator}${src}`, + ); + } +} + +function refFanout(db: Db, refId: number, opts: Opts) { + const ref = db.prepare("SELECT * FROM refs WHERE id = ?").get(refId) as any; + if (!ref) { + console.error(`Ref ${refId} not found`); + return; + } + + const trackedBy = db + .prepare( + ` + SELECT DISTINCT e.id, e.name, e.component + FROM edges ed JOIN effects e ON ed.effect_id = e.id + WHERE ed.ref_id = ? AND ed.type = 'track' + `, + ) + .all(refId) as any[]; + + console.log( + `Ref ${refId} (${ref.kind}) — tracked by ${trackedBy.length} effects\n`, + ); + + // Group trackers by component + const byComponent = new Map(); + for (const t of trackedBy) { + const key = t.component ?? "(no component)"; + if (!byComponent.has(key)) byComponent.set(key, []); + byComponent.get(key)!.push(t); + } + + for (const [comp, effects] of [...byComponent.entries()].sort( + (a, b) => b[1].length - a[1].length, + )) { + console.log(` ${comp}: ${effects.length} effects`); + const limit = opts.limit ?? 5; + for (const e of effects.slice(0, limit)) { + // Walk up to find the component ancestry + const ancestry = getComponentAncestry(db, e.id, 5); + const path = ancestry.length > 0 ? ` (${ancestry.join(" → ")})` : ""; + console.log(` effect ${e.id} "${e.name}"${path}`); + } + if (effects.length > limit) { + console.log(` ... and ${effects.length - limit} more`); + } + } + + // Show what writes to this ref + const writers = db + .prepare( + ` + SELECT DISTINCT e.id, e.name, e.component + FROM edges ed JOIN effects e ON ed.effect_id = e.id + WHERE ed.ref_id = ? AND ed.type = 'trigger' + `, + ) + .all(refId) as any[]; + + if (writers.length > 0) { + console.log(`\n Written by:`); + for (const w of writers) { + console.log( + ` effect ${w.id} "${w.name}"${w.component ? ` [${w.component}]` : ""}`, + ); + } + } +} + +function getComponentAncestry( + db: Db, + effectId: number, + maxDepth: number, +): string[] { + const components: string[] = []; + let ctxId = effectId; + for (let i = 0; i < maxDepth * 3; i++) { + const e = db + .prepare( + "SELECT owner_context_id, component FROM effects WHERE context_id = ?", + ) + .get(ctxId) as any; + if (!e || !e.owner_context_id) break; + if (e.component) components.push(e.component); + ctxId = e.owner_context_id; + if (components.length >= maxDepth) break; + } + return components; +} + +function refOwnership(db: Db, id: number, _opts: Opts) { + const ref = db.prepare("SELECT * FROM refs WHERE id = ?").get(id) as any; + if (!ref) { + console.error(`Ref ${id} not found`); + return; + } + + console.log(`Ref ${id} (${ref.kind ?? "unknown"})`); + if (ref.source_file) + console.log(` Source: ${shortPath(ref.source_file)}:${ref.source_line}`); + + if (ref.created_by_effect_id == null) { + console.log(" Created outside reactive tracking (no creator effect)"); + return; + } + + console.log(` Created by: effect ${ref.created_by_effect_id}`); + console.log(); + + // Walk the creator's context ownership chain to find the component path + const creator = db + .prepare("SELECT * FROM effects WHERE id = ?") + .get(ref.created_by_effect_id) as any; + if (!creator) { + console.log(" Creator effect not found in trace"); + return; + } + + console.log(` Creator: "${creator.name}" (${creator.type ?? "unknown"})`); + console.log(); + console.log(" Component ancestry:"); + + let ctxId: number | null = creator.context_id; + let depth = 0; + while (ctxId != null && depth < 50) { + const e = db + .prepare( + "SELECT id, name, type, component, context_id, owner_context_id FROM effects WHERE context_id = ?", + ) + .get(ctxId) as any; + if (!e) break; + const indent = " ".repeat(depth); + if (e.component) { + console.log( + ` ${indent}● ${e.component} (effect ${e.id}, ctx: ${e.context_id})`, + ); + } + ctxId = e.owner_context_id; + depth++; + } +} diff --git a/packages/trace-cli/src/commands/scope.ts b/packages/trace-cli/src/commands/scope.ts new file mode 100644 index 000000000..e85073494 --- /dev/null +++ b/packages/trace-cli/src/commands/scope.ts @@ -0,0 +1,116 @@ +import { + type Db, + type Opts, + outputFileRenderNodesCte, + requireId, +} from "../types.js"; + +export function scopeCommand( + db: Db, + subcommand: string, + args: string[], + opts: Opts, +) { + switch (subcommand) { + case "list": + case "ls": + return scopeList(db, opts); + case "show": + return scopeShow( + db, + requireId(args, "Usage: alloy-trace scope show "), + opts, + ); + default: + console.error(`Unknown scope subcommand: ${subcommand} +Usage: alloy-trace scope [args] [options]`); + process.exit(1); + } +} + +function scopeList(db: Db, opts: Opts) { + const conditions: string[] = []; + const params: any[] = []; + + if (opts.name) { + conditions.push("s.name LIKE ?"); + params.push(`%${opts.name}%`); + } + if (opts.outputFile) { + conditions.push(`s.render_node_id IN ${outputFileRenderNodesCte()}`); + params.push(`%${opts.outputFile}%`); + } + if (opts.component) { + conditions.push( + "s.render_node_id IN (SELECT rn.id FROM render_nodes rn WHERE rn.kind = 'component' AND rn.name LIKE ?)", + ); + params.push(`%${opts.component}%`); + } + + const where = + conditions.length > 0 ? "WHERE " + conditions.join(" AND ") : ""; + const sql = ` + SELECT s.id, s.name, s.parent_id, s.is_member_scope, s.owner_symbol_id, + (SELECT COUNT(*) FROM symbols sym WHERE sym.scope_id = s.id OR sym.owner_symbol_id = s.owner_symbol_id) as symbol_count, + p.name as parent_name, + os.name as owner_name + FROM scopes s + LEFT JOIN scopes p ON s.parent_id = p.id + LEFT JOIN symbols os ON s.owner_symbol_id = os.id + ${where} + ORDER BY s.id + `; + const rows = db.prepare(sql).all(...params) as any[]; + + if (opts.json) { + for (const r of rows) console.log(JSON.stringify(r)); + return; + } + if (rows.length === 0) { + console.log("No scopes found."); + return; + } + + for (const r of rows) { + const parent = r.parent_name ? ` (parent: "${r.parent_name}")` : ""; + const member = r.is_member_scope ? " [member]" : ""; + const owner = r.owner_name ? ` owner: "${r.owner_name}"` : ""; + console.log( + ` ${String(r.id).padStart(4)} ${r.name}${member} ${r.symbol_count} symbols${owner}${parent}`, + ); + } +} + +function scopeShow(db: Db, id: number, opts: Opts) { + const scope = db.prepare("SELECT * FROM scopes WHERE id = ?").get(id) as any; + if (!scope) { + console.error(`Scope ${id} not found`); + return; + } + if (opts.json) { + console.log(JSON.stringify(scope)); + return; + } + + console.log(`Scope ${id}: "${scope.name}"`); + if (scope.parent_id != null) { + const parent = db + .prepare("SELECT name FROM scopes WHERE id = ?") + .get(scope.parent_id) as any; + console.log( + ` Parent: scope ${scope.parent_id} ("${parent?.name ?? "?"}")`, + ); + } + if (scope.owner_symbol_id != null) + console.log(` Owner symbol: ${scope.owner_symbol_id}`); + console.log(` Member scope: ${Boolean(scope.is_member_scope)}`); + + const syms = db + .prepare("SELECT id, name FROM symbols WHERE scope_id = ?") + .all(id) as any[]; + if (syms.length > 0) { + console.log( + ` Symbols: ${syms.map((s: any) => `${s.name} (${s.id})`).join(", ")}`, + ); + } +} diff --git a/packages/trace-cli/src/commands/stats.ts b/packages/trace-cli/src/commands/stats.ts new file mode 100644 index 000000000..a0daea4f4 --- /dev/null +++ b/packages/trace-cli/src/commands/stats.ts @@ -0,0 +1,185 @@ +import { type Db, type Opts, shortPath } from "../types.js"; + +export function statsCommand(db: Db, _args: string[], opts: Opts) { + const effects = (db.prepare("SELECT COUNT(*) as n FROM effects").get() as any) + .n; + const refs = (db.prepare("SELECT COUNT(*) as n FROM refs").get() as any).n; + const edges = (db.prepare("SELECT COUNT(*) as n FROM edges").get() as any).n; + const components = ( + db + .prepare( + "SELECT COUNT(*) as n FROM render_nodes WHERE kind = 'component'", + ) + .get() as any + ).n; + const symbols = (db.prepare("SELECT COUNT(*) as n FROM symbols").get() as any) + .n; + const scopes = (db.prepare("SELECT COUNT(*) as n FROM scopes").get() as any) + .n; + const renderNodes = ( + db.prepare("SELECT COUNT(*) as n FROM render_nodes").get() as any + ).n; + const jobsRun = ( + db + .prepare("SELECT COUNT(*) as n FROM scheduler_jobs WHERE event = 'run'") + .get() as any + ).n; + const flushes = ( + db.prepare("SELECT COUNT(*) as n FROM scheduler_flushes").get() as any + ).n; + const outputFiles = ( + db.prepare("SELECT COUNT(*) as n FROM output_files").get() as any + ).n; + const errors = ( + db.prepare("SELECT COUNT(*) as n FROM render_errors").get() as any + ).n; + + if (opts.json) { + console.log( + JSON.stringify({ + effects, + refs, + edges, + components, + symbols, + scopes, + renderNodes, + jobsRun, + flushes, + outputFiles, + errors, + }), + ); + return; + } + + // Summary + console.log(" Summary"); + console.log(" ───────"); + console.log(` Effects: ${effects} | Refs: ${refs} | Edges: ${edges}`); + console.log( + ` Components: ${components} | Render nodes: ${renderNodes} | Symbols: ${symbols} | Scopes: ${scopes}`, + ); + console.log(` Scheduler: ${jobsRun} jobs in ${flushes} flushes`); + console.log(` Output files: ${outputFiles} | Errors: ${errors}`); + console.log(); + + // Overhead analysis + console.log(" Overhead"); + console.log(" ────────"); + + const frameworkEffects = ( + db + .prepare("SELECT COUNT(*) as n FROM effects WHERE source_file IS NULL") + .get() as any + ).n; + const userEffects = effects - frameworkEffects; + console.log( + ` Framework effects: ${frameworkEffects} (${pct(frameworkEffects, effects)}) | User effects: ${userEffects} (${pct(userEffects, effects)})`, + ); + console.log( + ` Effects per component: ${(effects / components).toFixed(1)} | Refs per component: ${(refs / components).toFixed(1)}`, + ); + console.log(); + + // Effect types + console.log(" Effects by type"); + console.log(" ───────────────"); + const effectTypes = db + .prepare( + ` + SELECT COALESCE(type, '(unnamed)') as type, COUNT(*) as cnt + FROM effects GROUP BY type ORDER BY cnt DESC + `, + ) + .all() as any[]; + for (const r of effectTypes) { + console.log( + ` ${r.type.padEnd(20)} ${String(r.cnt).padStart(6)} (${pct(r.cnt, effects)})`, + ); + } + console.log(); + + // Ref usage + console.log(" Ref usage"); + console.log(" ─────────"); + const neverTracked = ( + db + .prepare( + ` + SELECT COUNT(*) as n FROM refs r + WHERE NOT EXISTS (SELECT 1 FROM edges WHERE ref_id = r.id AND type = 'track') + `, + ) + .get() as any + ).n; + const completelyUnused = ( + db + .prepare( + ` + SELECT COUNT(*) as n FROM refs r + WHERE NOT EXISTS (SELECT 1 FROM edges WHERE ref_id = r.id) + `, + ) + .get() as any + ).n; + console.log( + ` Never tracked: ${String(neverTracked).padStart(6)} (${pct(neverTracked, refs)})`, + ); + console.log( + ` Completely unused: ${String(completelyUnused).padStart(6)} (${pct(completelyUnused, refs)})`, + ); + console.log(); + + // Top components by instance count + console.log(" Top components (by instance count)"); + console.log(" ──────────────────────────────────"); + const topComponents = db + .prepare( + ` + SELECT name, COUNT(*) as instances + FROM render_nodes WHERE kind = 'component' + GROUP BY name ORDER BY instances DESC LIMIT 15 + `, + ) + .all() as any[]; + for (const r of topComponents) { + console.log( + ` ${r.name.padEnd(35)} ${String(r.instances).padStart(4)} instances`, + ); + } + console.log(); + + // Source files by effect count + const limit = opts.limit ?? 15; + const byFile = db + .prepare( + ` + SELECT e.source_file, + COUNT(*) as effects, + (SELECT COUNT(*) FROM refs r WHERE r.source_file = e.source_file) as refs + FROM effects e + WHERE e.source_file IS NOT NULL + GROUP BY e.source_file + ORDER BY effects DESC + LIMIT ? + `, + ) + .all(limit) as any[]; + + if (byFile.length > 0) { + console.log(" Source files (by effect count)"); + console.log(" ─────────────────────────────"); + for (const row of byFile) { + const file = shortPath(row.source_file); + console.log( + ` ${file.padEnd(55)} ${row.effects} effects, ${row.refs} refs`, + ); + } + } +} + +function pct(n: number, total: number): string { + if (total === 0) return "0%"; + return ((100 * n) / total).toFixed(1) + "%"; +} diff --git a/packages/trace-cli/src/commands/symbol.ts b/packages/trace-cli/src/commands/symbol.ts new file mode 100644 index 000000000..e65f64afb --- /dev/null +++ b/packages/trace-cli/src/commands/symbol.ts @@ -0,0 +1,122 @@ +import { + type Db, + type Opts, + outputFileRenderNodesCte, + requireId, +} from "../types.js"; + +export function symbolCommand( + db: Db, + subcommand: string, + args: string[], + opts: Opts, +) { + switch (subcommand) { + case "list": + case "ls": + return symbolList(db, opts); + case "show": + return symbolShow( + db, + requireId(args, "Usage: alloy-trace symbol show "), + opts, + ); + default: + console.error(`Unknown symbol subcommand: ${subcommand} +Usage: alloy-trace symbol [args] [options]`); + process.exit(1); + } +} + +function symbolList(db: Db, opts: Opts) { + const conditions: string[] = []; + const params: any[] = []; + + if (opts.name) { + conditions.push("s.name LIKE ?"); + params.push(`%${opts.name}%`); + } + if (opts.outputFile) { + conditions.push(`s.render_node_id IN ${outputFileRenderNodesCte()}`); + params.push(`%${opts.outputFile}%`); + } + if (opts.component) { + conditions.push( + "s.render_node_id IN (SELECT rn.id FROM render_nodes rn WHERE rn.kind = 'component' AND rn.name LIKE ?)", + ); + params.push(`%${opts.component}%`); + } + + const where = + conditions.length > 0 ? "WHERE " + conditions.join(" AND ") : ""; + const sql = ` + SELECT s.id, s.name, s.original_name, s.scope_id, s.owner_symbol_id, + s.is_member, s.is_transient, s.is_alias, + sc.name as scope_name, + os.name as owner_name + FROM symbols s + LEFT JOIN scopes sc ON s.scope_id = sc.id + LEFT JOIN symbols os ON s.owner_symbol_id = os.id + ${where} + ORDER BY s.id + `; + const rows = db.prepare(sql).all(...params) as any[]; + + if (opts.json) { + for (const r of rows) console.log(JSON.stringify(r)); + return; + } + if (rows.length === 0) { + console.log("No symbols found."); + return; + } + + for (const r of rows) { + const orig = + r.original_name && r.original_name !== r.name ? + ` (original: ${r.original_name})` + : ""; + const scope = r.scope_name ? ` in scope "${r.scope_name}"` : ""; + const owner = r.owner_name ? ` member of "${r.owner_name}"` : ""; + const flags = [ + r.is_member ? "member" : "", + r.is_transient ? "transient" : "", + r.is_alias ? "alias" : "", + ] + .filter(Boolean) + .join(", "); + const flagStr = flags ? ` [${flags}]` : ""; + console.log( + ` ${String(r.id).padStart(4)} ${r.name}${orig}${flagStr}${scope}${owner}`, + ); + } +} + +function symbolShow(db: Db, id: number, opts: Opts) { + const sym = db.prepare("SELECT * FROM symbols WHERE id = ?").get(id) as any; + if (!sym) { + console.error(`Symbol ${id} not found`); + return; + } + if (opts.json) { + console.log(JSON.stringify(sym)); + return; + } + + console.log(`Symbol ${id}: "${sym.name}"`); + if (sym.original_name !== sym.name) + console.log(` Original name: ${sym.original_name}`); + if (sym.scope_id != null) { + const scope = db + .prepare("SELECT name FROM scopes WHERE id = ?") + .get(sym.scope_id) as any; + console.log(` Scope: ${sym.scope_id} ("${scope?.name ?? "?"}")`); + } + if (sym.owner_symbol_id != null) + console.log(` Owner symbol: ${sym.owner_symbol_id}`); + if (sym.render_node_id != null) + console.log(` Render node: ${sym.render_node_id}`); + console.log( + ` Member: ${Boolean(sym.is_member)} | Transient: ${Boolean(sym.is_transient)} | Alias: ${Boolean(sym.is_alias)}`, + ); +} diff --git a/packages/trace-cli/src/db.ts b/packages/trace-cli/src/db.ts new file mode 100644 index 000000000..4c6eaf3c2 --- /dev/null +++ b/packages/trace-cli/src/db.ts @@ -0,0 +1,10 @@ +import { DatabaseSync } from "node:sqlite"; + +export function openTrace(path: string): DatabaseSync { + try { + return new DatabaseSync(path, { readOnly: true }); + } catch (e: any) { + console.error(`Failed to open trace database: ${path}\n ${e.message}`); + process.exit(1); + } +} diff --git a/packages/trace-cli/src/test/capture.ts b/packages/trace-cli/src/test/capture.ts new file mode 100644 index 000000000..66bfcec0e --- /dev/null +++ b/packages/trace-cli/src/test/capture.ts @@ -0,0 +1,31 @@ +/** + * Captures all console.log and console.error output during `fn()`. + * Returns { stdout, stderr } as joined strings with ANSI codes stripped. + */ +export function captureOutput(fn: () => void): { + stdout: string; + stderr: string; +} { + const stdoutLines: string[] = []; + const stderrLines: string[] = []; + const origLog = console.log; + const origError = console.error; + + console.log = (...args: any[]) => stdoutLines.push(args.join(" ")); + console.error = (...args: any[]) => stderrLines.push(args.join(" ")); + + try { + fn(); + } finally { + console.log = origLog; + console.error = origError; + } + + // eslint-disable-next-line no-control-regex + const stripAnsi = (s: string) => s.replace(/\x1b\[[0-9;]*m/g, ""); + + return { + stdout: stripAnsi(stdoutLines.join("\n")), + stderr: stripAnsi(stderrLines.join("\n")), + }; +} diff --git a/packages/trace-cli/src/test/component.test.ts b/packages/trace-cli/src/test/component.test.ts new file mode 100644 index 000000000..83b61efa6 --- /dev/null +++ b/packages/trace-cli/src/test/component.test.ts @@ -0,0 +1,177 @@ +import { DatabaseSync } from "node:sqlite"; +import { afterEach, beforeEach, describe, expect, it } from "vitest"; +import { componentCommand } from "../commands/component.js"; +import { captureOutput } from "./capture.js"; +import { createTestDb } from "./test-db.js"; + +let db: DatabaseSync; + +beforeEach(() => { + db = createTestDb(); +}); +afterEach(() => { + db.close(); +}); + +describe("component list", () => { + it("lists all components", () => { + const { stdout } = captureOutput(() => + componentCommand(db, "list", [], {}), + ); + expect(stdout).toContain("SourceFile"); + expect(stdout).toContain("Declaration"); + }); + + it("filters by name", () => { + const { stdout } = captureOutput(() => + componentCommand(db, "list", [], { name: "Source" }), + ); + expect(stdout).toContain("SourceFile"); + expect(stdout).not.toContain("Declaration"); + }); + + it("returns json", () => { + const { stdout } = captureOutput(() => + componentCommand(db, "list", [], { json: true }), + ); + const lines = stdout.split("\n").filter(Boolean); + expect(lines.length).toBe(2); + const parsed = JSON.parse(lines[0]); + expect(parsed).toHaveProperty("id"); + expect(parsed).toHaveProperty("name"); + expect(parsed).toHaveProperty("children"); + }); + + it("returns empty message when no match", () => { + const { stdout } = captureOutput(() => + componentCommand(db, "list", [], { name: "NonExistent" }), + ); + expect(stdout).toContain("No components found"); + }); + + it("respects limit", () => { + const { stdout } = captureOutput(() => + componentCommand(db, "list", [], { limit: 1 }), + ); + // should show pagination footer since there are 2 components + expect(stdout).toContain("showing 1 of 2"); + }); + + it("filters by source file", () => { + const { stdout } = captureOutput(() => + componentCommand(db, "list", [], { sourceFile: "source-file" }), + ); + expect(stdout).toContain("SourceFile"); + expect(stdout).not.toContain("Declaration"); + }); +}); + +describe("component show", () => { + it("shows component details", () => { + const { stdout } = captureOutput(() => + componentCommand(db, "show", ["2"], {}), + ); + expect(stdout).toContain('Component 2: "SourceFile"'); + expect(stdout).toContain("Source:"); + expect(stdout).toContain("Children"); + }); + + it("shows component details as json", () => { + const { stdout } = captureOutput(() => + componentCommand(db, "show", ["2"], { json: true }), + ); + const parsed = JSON.parse(stdout); + expect(parsed.name).toBe("SourceFile"); + expect(parsed.kind).toBe("component"); + }); + + it("reports not found for missing id", () => { + const { stderr } = captureOutput(() => + componentCommand(db, "show", ["999"], {}), + ); + expect(stderr).toContain("not found"); + }); + + it("lists children of a component", () => { + const { stdout } = captureOutput(() => + componentCommand(db, "show", ["2"], {}), + ); + // SourceFile has children: Declaration(3), Fragment(5), memo(7) + expect(stdout).toContain("Children (3)"); + expect(stdout).toContain("component Declaration"); + expect(stdout).toContain("fragment"); + expect(stdout).toContain("memo"); + }); +}); + +describe("component tree", () => { + it("prints full tree from root", () => { + const { stdout } = captureOutput(() => + componentCommand(db, "tree", [], {}), + ); + expect(stdout).toContain("root"); + expect(stdout).toContain('"SourceFile"'); + expect(stdout).toContain('"Declaration"'); + expect(stdout).toContain("text"); + }); + + it("prints subtree from a specific node", () => { + const { stdout } = captureOutput(() => + componentCommand(db, "tree", ["3"], {}), + ); + expect(stdout).toContain('"Declaration"'); + expect(stdout).toContain("text"); + // should not contain parent SourceFile as a tree root + expect(stdout).not.toContain('"SourceFile"'); + }); + + it("respects depth limit", () => { + const { stdout } = captureOutput(() => + componentCommand(db, "tree", [], { depth: 1 }), + ); + expect(stdout).toContain("root"); + expect(stdout).toContain('"SourceFile"'); + // Declaration is depth 2, should be cut off + expect(stdout).not.toContain('"Declaration"'); + }); + + it("filters by component name", () => { + const { stdout } = captureOutput(() => + componentCommand(db, "tree", [], { component: "Declaration" }), + ); + expect(stdout).toContain('"Declaration"'); + expect(stdout).toContain("text"); + }); + + it("outputs json tree", () => { + const { stdout } = captureOutput(() => + componentCommand(db, "tree", [], { json: true }), + ); + const tree = JSON.parse(stdout); + expect(tree.kind).toBe("root"); + expect(tree.children.length).toBeGreaterThan(0); + }); +}); + +describe("component stats", () => { + it("shows per-component overhead", () => { + const { stdout } = captureOutput(() => + componentCommand(db, "stats", [], {}), + ); + expect(stdout).toContain("SourceFile"); + expect(stdout).toContain("Declaration"); + expect(stdout).toContain("Per-component overhead"); + }); + + it("returns json stats", () => { + const { stdout } = captureOutput(() => + componentCommand(db, "stats", [], { json: true }), + ); + const lines = stdout.split("\n").filter(Boolean); + expect(lines.length).toBeGreaterThan(0); + const parsed = JSON.parse(lines[0]); + expect(parsed).toHaveProperty("name"); + expect(parsed).toHaveProperty("instances"); + expect(parsed).toHaveProperty("total_effects"); + }); +}); diff --git a/packages/trace-cli/src/test/effect.test.ts b/packages/trace-cli/src/test/effect.test.ts new file mode 100644 index 000000000..1f5696b07 --- /dev/null +++ b/packages/trace-cli/src/test/effect.test.ts @@ -0,0 +1,203 @@ +import { DatabaseSync } from "node:sqlite"; +import { afterEach, beforeEach, describe, expect, it } from "vitest"; +import { effectCommand } from "../commands/effect.js"; +import { captureOutput } from "./capture.js"; +import { createTestDb } from "./test-db.js"; + +let db: DatabaseSync; + +beforeEach(() => { + db = createTestDb(); +}); +afterEach(() => { + db.close(); +}); + +describe("effect list", () => { + it("lists all effects", () => { + const { stdout } = captureOutput(() => effectCommand(db, "list", [], {})); + expect(stdout).toContain("render:SourceFile"); + expect(stdout).toContain("render:Declaration"); + expect(stdout).toContain("content:models"); + expect(stdout).toContain("binder:resolve"); + }); + + it("filters by name", () => { + const { stdout } = captureOutput(() => + effectCommand(db, "list", [], { name: "render" }), + ); + expect(stdout).toContain("render:SourceFile"); + expect(stdout).toContain("render:Declaration"); + expect(stdout).not.toContain("binder:resolve"); + }); + + it("filters by type", () => { + const { stdout } = captureOutput(() => + effectCommand(db, "list", [], { type: "binder" }), + ); + expect(stdout).toContain("binder:resolve"); + expect(stdout).not.toContain("render:SourceFile"); + }); + + it("filters by component", () => { + const { stdout } = captureOutput(() => + effectCommand(db, "list", [], { component: "Declaration" }), + ); + expect(stdout).toContain("render:Declaration"); + expect(stdout).not.toContain("render:SourceFile"); + }); + + it("shows framework-only effects", () => { + const { stdout } = captureOutput(() => + effectCommand(db, "list", [], { framework: true }), + ); + expect(stdout).toContain("binder:resolve"); + expect(stdout).not.toContain("render:SourceFile"); + }); + + it("returns json", () => { + const { stdout } = captureOutput(() => + effectCommand(db, "list", [], { json: true }), + ); + const lines = stdout.split("\n").filter(Boolean); + expect(lines.length).toBe(4); + const parsed = JSON.parse(lines[0]); + expect(parsed).toHaveProperty("id"); + expect(parsed).toHaveProperty("name"); + expect(parsed).toHaveProperty("tracks"); + expect(parsed).toHaveProperty("triggers"); + }); + + it("respects limit", () => { + const { stdout } = captureOutput(() => + effectCommand(db, "list", [], { limit: 2 }), + ); + expect(stdout).toContain("showing 2 of 4"); + }); + + it("shows empty message when no match", () => { + const { stdout } = captureOutput(() => + effectCommand(db, "list", [], { name: "nonexistent" }), + ); + expect(stdout).toContain("No effects found"); + }); +}); + +describe("effect show", () => { + it("shows effect details", () => { + const { stdout } = captureOutput(() => + effectCommand(db, "show", ["2"], {}), + ); + expect(stdout).toContain('Effect 2: "render:Declaration"'); + expect(stdout).toContain("Source:"); + expect(stdout).toContain("Component: Declaration"); + expect(stdout).toContain("Tracks 1 refs"); + }); + + it("shows lifecycle info", () => { + const { stdout } = captureOutput(() => + effectCommand(db, "show", ["2"], {}), + ); + expect(stdout).toContain("Lifecycle: ran 2 times, skipped 0"); + expect(stdout).toContain("triggered by ref 1"); + }); + + it("shows json with all details", () => { + const { stdout } = captureOutput(() => + effectCommand(db, "show", ["3"], { json: true }), + ); + const parsed = JSON.parse(stdout); + expect(parsed.effect.name).toBe("content:models"); + expect(parsed.tracks).toBeInstanceOf(Array); + expect(parsed.triggeredBy).toBeInstanceOf(Array); + expect(parsed.triggers).toBeInstanceOf(Array); + expect(parsed.lifecycle).toBeInstanceOf(Array); + }); + + it("reports not found", () => { + const { stderr } = captureOutput(() => + effectCommand(db, "show", ["999"], {}), + ); + expect(stderr).toContain("not found"); + }); +}); + +describe("effect chain", () => { + it("shows causal chain from effect", () => { + const { stdout } = captureOutput(() => + effectCommand(db, "chain", ["3"], {}), + ); + expect(stdout).toContain("effect 3"); + expect(stdout).toContain("content:models"); + expect(stdout).toContain("Triggered by"); + }); + + it("reports not found", () => { + const { stderr } = captureOutput(() => + effectCommand(db, "chain", ["999"], {}), + ); + expect(stderr).toContain("not found"); + }); +}); + +describe("effect hotspots", () => { + it("shows effects sorted by activity", () => { + const { stdout } = captureOutput(() => + effectCommand(db, "hotspots", [], {}), + ); + expect(stdout).toContain("highest reactive activity"); + // effect 3 has most activity (tracks 1, triggers 1, triggered-by 1) + expect(stdout).toContain("content:models"); + }); + + it("respects limit", () => { + const { stdout } = captureOutput(() => + effectCommand(db, "hotspots", [], { limit: 1 }), + ); + const lines = stdout.split("\n").filter((l) => l.trim().match(/^\d/)); + expect(lines.length).toBeLessThanOrEqual(1); + }); + + it("returns json", () => { + const { stdout } = captureOutput(() => + effectCommand(db, "hotspots", [], { json: true }), + ); + const lines = stdout.split("\n").filter(Boolean); + const parsed = JSON.parse(lines[0]); + expect(parsed).toHaveProperty("tracks"); + expect(parsed).toHaveProperty("triggers"); + expect(parsed).toHaveProperty("refs_created"); + }); +}); + +describe("effect ancestry", () => { + it("walks up context ownership chain", () => { + const { stdout } = captureOutput(() => + effectCommand(db, "ancestry", ["3"], {}), + ); + expect(stdout).toContain('Effect 3: "content:models"'); + // effect 3 owner_context_id=200, which is effect 2's context_id + // effect 2 component=Declaration, owner_context_id=100 -> effect 1 component=SourceFile + expect(stdout).toContain("Declaration"); + expect(stdout).toContain("SourceFile"); + }); +}); + +describe("effect subtree", () => { + it("shows effects in context subtree", () => { + const { stdout } = captureOutput(() => + effectCommand(db, "subtree", ["100"], {}), + ); + expect(stdout).toContain("Subtree of effect context 100"); + expect(stdout).toContain("render:SourceFile"); + expect(stdout).toContain("Total effects:"); + expect(stdout).toContain("By type:"); + }); + + it("reports not found for invalid context", () => { + const { stderr } = captureOutput(() => + effectCommand(db, "subtree", ["9999"], {}), + ); + expect(stderr).toContain("No effect with context_id"); + }); +}); diff --git a/packages/trace-cli/src/test/misc-commands.test.ts b/packages/trace-cli/src/test/misc-commands.test.ts new file mode 100644 index 000000000..7ee6fde64 --- /dev/null +++ b/packages/trace-cli/src/test/misc-commands.test.ts @@ -0,0 +1,440 @@ +import { DatabaseSync } from "node:sqlite"; +import { afterEach, beforeEach, describe, expect, it } from "vitest"; +import { runErrors } from "../commands/errors.js"; +import { fileCommand } from "../commands/file.js"; +import { runQuery } from "../commands/query.js"; +import { scopeCommand } from "../commands/scope.js"; +import { statsCommand } from "../commands/stats.js"; +import { symbolCommand } from "../commands/symbol.js"; +import { captureOutput } from "./capture.js"; +import { createTestDb } from "./test-db.js"; + +let db: DatabaseSync; + +beforeEach(() => { + db = createTestDb(); +}); +afterEach(() => { + db.close(); +}); + +describe("stats", () => { + it("shows aggregate statistics", () => { + const { stdout } = captureOutput(() => statsCommand(db, [], {})); + expect(stdout).toContain("Summary"); + expect(stdout).toContain("Effects: 4"); + expect(stdout).toContain("Refs: 3"); + expect(stdout).toContain("Edges: 5"); + expect(stdout).toContain("Components: 2"); + expect(stdout).toContain("Render nodes: 8"); + expect(stdout).toContain("Symbols: 3"); + expect(stdout).toContain("Scopes: 2"); + expect(stdout).toContain("Output files: 2"); + expect(stdout).toContain("Errors: 1"); + }); + + it("shows overhead analysis", () => { + const { stdout } = captureOutput(() => statsCommand(db, [], {})); + expect(stdout).toContain("Overhead"); + expect(stdout).toContain("Framework effects:"); + expect(stdout).toContain("Effects per component:"); + }); + + it("shows effects by type", () => { + const { stdout } = captureOutput(() => statsCommand(db, [], {})); + expect(stdout).toContain("Effects by type"); + expect(stdout).toContain("render"); + expect(stdout).toContain("content"); + expect(stdout).toContain("binder"); + }); + + it("shows ref usage", () => { + const { stdout } = captureOutput(() => statsCommand(db, [], {})); + expect(stdout).toContain("Ref usage"); + expect(stdout).toContain("Never tracked:"); + expect(stdout).toContain("Completely unused:"); + }); + + it("shows top components", () => { + const { stdout } = captureOutput(() => statsCommand(db, [], {})); + expect(stdout).toContain("Top components"); + expect(stdout).toContain("SourceFile"); + expect(stdout).toContain("Declaration"); + }); + + it("returns json", () => { + const { stdout } = captureOutput(() => + statsCommand(db, [], { json: true }), + ); + const parsed = JSON.parse(stdout); + expect(parsed.effects).toBe(4); + expect(parsed.refs).toBe(3); + expect(parsed.edges).toBe(5); + expect(parsed.components).toBe(2); + expect(parsed.renderNodes).toBe(8); + expect(parsed.symbols).toBe(3); + expect(parsed.scopes).toBe(2); + expect(parsed.outputFiles).toBe(2); + expect(parsed.errors).toBe(1); + }); +}); + +describe("errors", () => { + it("lists render errors", () => { + const { stdout } = captureOutput(() => runErrors(db, {})); + expect(stdout).toContain("1 render error(s)"); + expect(stdout).toContain("TypeError: Cannot read property x"); + expect(stdout).toContain("at Declaration (/src/decl.tsx:15:3)"); + }); + + it("shows component stack with source locations", () => { + const { stdout } = captureOutput(() => runErrors(db, { allFrames: true })); + expect(stdout).toContain("Component stack:"); + expect(stdout).toContain("at SourceFile #2"); + expect(stdout).toContain("source-file.tsx:10:5)"); + expect(stdout).toContain("at Declaration #3"); + expect(stdout).toContain("declaration.tsx:25:3)"); + }); + + it("shows first stack line", () => { + const { stdout } = captureOutput(() => runErrors(db, {})); + expect(stdout).toContain("at Declaration"); + }); + + it("shows empty when no errors", () => { + db.exec("DELETE FROM render_errors"); + const { stdout } = captureOutput(() => runErrors(db, {})); + expect(stdout).toContain("No render errors"); + }); + + it("returns json", () => { + const { stdout } = captureOutput(() => runErrors(db, { json: true })); + const parsed = JSON.parse(stdout); + expect(parsed.name).toBe("TypeError"); + expect(parsed.message).toContain("Cannot read property"); + }); +}); + +describe("file", () => { + it("lists output files", () => { + const { stdout } = captureOutput(() => fileCommand(db, "list", [], {})); + expect(stdout).toContain("2 file(s) generated"); + expect(stdout).toContain("src/models.ts"); + expect(stdout).toContain("src/index.ts"); + }); + + it("returns json", () => { + const { stdout } = captureOutput(() => + fileCommand(db, "list", [], { json: true }), + ); + const lines = stdout.split("\n").filter(Boolean); + expect(lines.length).toBe(2); + const parsed = JSON.parse(lines[0]); + expect(parsed.path).toBe("src/models.ts"); + }); + + it("shows file content", () => { + const { stdout } = captureOutput(() => + fileCommand(db, "show", ["src/models.ts"], {}), + ); + expect(stdout).toContain("src/models.ts"); + expect(stdout).toContain("export interface Foo"); + }); + + it("shows empty when no files", () => { + db.exec("DELETE FROM output_files"); + const { stdout } = captureOutput(() => fileCommand(db, "list", [], {})); + expect(stdout).toContain("No output files recorded"); + }); + + describe("search", () => { + it("finds text in file content and shows component stack", () => { + const { stdout } = captureOutput(() => + fileCommand(db, "search", ["src/models.ts", "interface", "Foo"], {}), + ); + expect(stdout).toContain("interface Foo"); + expect(stdout).toContain("at Declaration #3"); + expect(stdout).toContain("at SourceFile #2"); + }); + + it("finds cross-node text", () => { + const { stdout } = captureOutput(() => + fileCommand(db, "search", ["src/models.ts", "export", "interface"], {}), + ); + expect(stdout).toContain("export interface"); + }); + + it("shows source locations in stack trace format", () => { + const { stdout } = captureOutput(() => + fileCommand(db, "search", ["src/models.ts", "interface", "Foo"], { + allFrames: true, + }), + ); + expect(stdout).toContain("at Declaration #3"); + expect(stdout).toContain("declaration.tsx:25:3)"); + expect(stdout).toContain("at SourceFile #2"); + expect(stdout).toContain("source-file.tsx:10:5)"); + }); + + it("shows 3 lines of context", () => { + const { stdout } = captureOutput(() => + fileCommand(db, "search", ["src/models.ts", "bar:", "string"], {}), + ); + // Should include line before and after the match line + expect(stdout).toContain("export interface Foo {"); + expect(stdout).toContain("bar: string;"); + expect(stdout).toContain("}"); + }); + + it("matches by path suffix", () => { + const { stdout } = captureOutput(() => + fileCommand(db, "search", ["models.ts", "interface"], {}), + ); + expect(stdout).toContain("interface Foo"); + }); + + it("finds import text in fragment subtree", () => { + const { stdout } = captureOutput(() => + fileCommand(db, "search", ["src/models.ts", "import"], {}), + ); + expect(stdout).toContain("import { Bar }"); + expect(stdout).toContain("at SourceFile #2"); + }); + + it("reports no match", () => { + const { stdout } = captureOutput(() => + fileCommand(db, "search", ["src/models.ts", "nonexistent"], {}), + ); + expect(stdout).toContain("No text matching"); + }); + + it("returns json", () => { + const { stdout } = captureOutput(() => + fileCommand(db, "search", ["src/models.ts", "interface", "Foo"], { + json: true, + }), + ); + const parsed = JSON.parse(stdout); + expect(parsed.text).toBe("interface Foo"); + expect(parsed.offset).toBeTypeOf("number"); + expect(parsed.textNodeId).toBeTypeOf("number"); + expect(parsed.stack).toBeInstanceOf(Array); + }); + + it("maps text nodes correctly when seq differs from DFS order", () => { + // Add a type reference text node with a high seq (created late, e.g. via + // reactive resolution) that renders BETWEEN existing text nodes in the tree. + // Declaration(3) -> TypeRef(10) -> text(11, "Bar", seq=100) + // In the file content "Bar" appears as the type in "bar: Bar" which is + // between "interface Foo {" and "}". The text node's seq (100) is higher + // than closing "}" text node (seq=6), but DFS order places it correctly. + db.exec(` + INSERT INTO render_nodes VALUES (10, 3, 'component', 'TypeRef', NULL, + '/home/user/packages/typescript/src/components/type-ref.tsx', 5, 1, NULL, NULL, 100); + INSERT INTO render_nodes VALUES (11, 10, 'text', NULL, NULL, NULL, NULL, NULL, NULL, 'string', 101); + `); + // Update file content to include the type reference text node's value + // Text node DFS order: Fragment->text(6) "import..." then Declaration->text(4) "export interface..." + // then Declaration->TypeRef->text(11) "string" (which is already in text(4)) + // This test just verifies the DFS walk collects nodes correctly even with + // high-seq children. + const { stdout } = captureOutput(() => + fileCommand(db, "search", ["src/models.ts", "interface", "Foo"], { + json: true, + }), + ); + const parsed = JSON.parse(stdout); + expect(parsed.textNodeId).toBe(4); + }); + + it("resolves correct node when same text appears in multiple tree branches", () => { + // Simulate: "Foo" appears both in the declaration (node 4, under Declaration) + // and as a type reference (node 12, under a sibling MemberExpr). + // Search for "interface Foo" should match the declaration, not the reference. + db.exec(` + INSERT INTO render_nodes VALUES (10, 2, 'component', 'MemberExpr', NULL, NULL, NULL, NULL, NULL, NULL, 200); + INSERT INTO render_nodes VALUES (12, 10, 'text', NULL, NULL, NULL, NULL, NULL, NULL, 'Foo', 201); + `); + // File content has "interface Foo" only once, at the declaration site + const { stdout } = captureOutput(() => + fileCommand(db, "search", ["src/models.ts", "interface", "Foo"], { + json: true, + }), + ); + const parsed = JSON.parse(stdout); + // The match should map to a node within the Declaration subtree (node 4), + // not the MemberExpr reference (node 12) + expect(parsed.textNodeId).toBe(4); + }); + }); +}); + +describe("query", () => { + it("executes raw SQL and shows results", () => { + const { stdout } = captureOutput(() => + runQuery(db, ["SELECT COUNT(*) as n FROM effects"], {}), + ); + expect(stdout).toContain("n"); + expect(stdout).toContain("4"); + }); + + it("shows no results message", () => { + const { stdout } = captureOutput(() => + runQuery(db, ["SELECT * FROM effects WHERE id = -1"], {}), + ); + expect(stdout).toContain("(no results)"); + }); + + it("returns json", () => { + const { stdout } = captureOutput(() => + runQuery(db, ["SELECT id, name FROM effects LIMIT 1"], { json: true }), + ); + const parsed = JSON.parse(stdout); + expect(parsed).toHaveProperty("id"); + expect(parsed).toHaveProperty("name"); + }); + + it("shows sql error for invalid query", () => { + const { stderr } = captureOutput(() => { + try { + runQuery(db, ["INVALID SQL"], {}); + } catch { + // process.exit is called, so we catch + } + }); + expect(stderr).toContain("SQL error"); + }); + + it("respects limit", () => { + const { stdout } = captureOutput(() => + runQuery(db, ["SELECT * FROM render_nodes"], { limit: 2 }), + ); + // Should show a "more rows" message since there are 8 nodes + expect(stdout).toContain("more rows"); + }); +}); + +describe("symbol", () => { + it("lists all symbols", () => { + const { stdout } = captureOutput(() => symbolCommand(db, "list", [], {})); + expect(stdout).toContain("Foo"); + expect(stdout).toContain("bar"); + expect(stdout).toContain("Baz"); + }); + + it("filters by name", () => { + const { stdout } = captureOutput(() => + symbolCommand(db, "list", [], { name: "Foo" }), + ); + expect(stdout).toContain("Foo"); + expect(stdout).not.toContain("bar"); + }); + + it("shows symbol flags", () => { + const { stdout } = captureOutput(() => symbolCommand(db, "list", [], {})); + expect(stdout).toContain("member"); + expect(stdout).toContain("transient"); + }); + + it("shows original name when different", () => { + const { stdout } = captureOutput(() => symbolCommand(db, "list", [], {})); + expect(stdout).toContain("original: BazOriginal"); + }); + + it("shows symbol details", () => { + const { stdout } = captureOutput(() => + symbolCommand(db, "show", ["1"], {}), + ); + expect(stdout).toContain('Symbol 1: "Foo"'); + expect(stdout).toContain("Scope:"); + }); + + it("shows json for symbol show", () => { + const { stdout } = captureOutput(() => + symbolCommand(db, "show", ["1"], { json: true }), + ); + const parsed = JSON.parse(stdout); + expect(parsed.name).toBe("Foo"); + expect(parsed.scope_id).toBe(1); + }); + + it("reports not found", () => { + const { stderr } = captureOutput(() => + symbolCommand(db, "show", ["999"], {}), + ); + expect(stderr).toContain("not found"); + }); + + it("returns json list", () => { + const { stdout } = captureOutput(() => + symbolCommand(db, "list", [], { json: true }), + ); + const lines = stdout.split("\n").filter(Boolean); + expect(lines.length).toBe(3); + }); + + it("shows empty message", () => { + const { stdout } = captureOutput(() => + symbolCommand(db, "list", [], { name: "nonexistent" }), + ); + expect(stdout).toContain("No symbols found"); + }); +}); + +describe("scope", () => { + it("lists all scopes", () => { + const { stdout } = captureOutput(() => scopeCommand(db, "list", [], {})); + expect(stdout).toContain("global"); + expect(stdout).toContain("Foo"); + }); + + it("shows member scope flag", () => { + const { stdout } = captureOutput(() => scopeCommand(db, "list", [], {})); + expect(stdout).toContain("[member]"); + }); + + it("filters by name", () => { + const { stdout } = captureOutput(() => + scopeCommand(db, "list", [], { name: "global" }), + ); + expect(stdout).toContain("global"); + expect(stdout).not.toContain("Foo"); + }); + + it("shows scope details", () => { + const { stdout } = captureOutput(() => scopeCommand(db, "show", ["2"], {})); + expect(stdout).toContain('Scope 2: "Foo"'); + expect(stdout).toContain("Parent:"); + expect(stdout).toContain("Member scope: true"); + expect(stdout).toContain("Owner symbol:"); + }); + + it("lists scope symbols", () => { + const { stdout } = captureOutput(() => scopeCommand(db, "show", ["1"], {})); + expect(stdout).toContain("Symbols:"); + expect(stdout).toContain("Foo"); + }); + + it("returns json", () => { + const { stdout } = captureOutput(() => + scopeCommand(db, "show", ["2"], { json: true }), + ); + const parsed = JSON.parse(stdout); + expect(parsed.name).toBe("Foo"); + expect(parsed.is_member_scope).toBe(1); + }); + + it("reports not found", () => { + const { stderr } = captureOutput(() => + scopeCommand(db, "show", ["999"], {}), + ); + expect(stderr).toContain("not found"); + }); + + it("shows empty message", () => { + const { stdout } = captureOutput(() => + scopeCommand(db, "list", [], { name: "nonexistent" }), + ); + expect(stdout).toContain("No scopes found"); + }); +}); diff --git a/packages/trace-cli/src/test/ref.test.ts b/packages/trace-cli/src/test/ref.test.ts new file mode 100644 index 000000000..aac08db20 --- /dev/null +++ b/packages/trace-cli/src/test/ref.test.ts @@ -0,0 +1,183 @@ +import { DatabaseSync } from "node:sqlite"; +import { afterEach, beforeEach, describe, expect, it } from "vitest"; +import { refCommand } from "../commands/ref.js"; +import { captureOutput } from "./capture.js"; +import { createTestDb } from "./test-db.js"; + +let db: DatabaseSync; + +beforeEach(() => { + db = createTestDb(); +}); +afterEach(() => { + db.close(); +}); + +describe("ref list", () => { + it("lists all refs", () => { + const { stdout } = captureOutput(() => refCommand(db, "list", [], {})); + // 3 refs in seed data + expect(stdout).toContain("ref"); + expect(stdout).toContain("computed"); + }); + + it("filters by type/kind", () => { + const { stdout } = captureOutput(() => + refCommand(db, "list", [], { type: "computed" }), + ); + expect(stdout).toContain("computed"); + expect(stdout).not.toMatch(/^\s+\d+\s+ref\s/m); + }); + + it("filters by min-trackers", () => { + const { stdout } = captureOutput(() => + refCommand(db, "list", [], { minTrackers: 3 }), + ); + // ref 1 is tracked by effects 2, 3, 4 = 3 trackers + expect(stdout).toContain("1"); + }); + + it("shows unused refs", () => { + const { stdout } = captureOutput(() => + refCommand(db, "list", [], { unused: true }), + ); + // ref 3 (unusedRef) has no edges + expect(stdout).toContain("3"); + }); + + it("returns json", () => { + const { stdout } = captureOutput(() => + refCommand(db, "list", [], { json: true }), + ); + const lines = stdout.split("\n").filter(Boolean); + expect(lines.length).toBe(3); + const parsed = JSON.parse(lines[0]); + expect(parsed).toHaveProperty("id"); + expect(parsed).toHaveProperty("kind"); + expect(parsed).toHaveProperty("tracked_by"); + expect(parsed).toHaveProperty("triggered"); + }); + + it("shows empty message", () => { + const { stdout } = captureOutput(() => + refCommand(db, "list", [], { type: "nonexistent" }), + ); + expect(stdout).toContain("No refs found"); + }); +}); + +describe("ref show", () => { + it("shows ref details", () => { + const { stdout } = captureOutput(() => refCommand(db, "show", ["1"], {})); + expect(stdout).toContain("Ref 1"); + expect(stdout).toContain("kind: ref"); + expect(stdout).toContain("Tracked by 3 effects"); + }); + + it("shows writers", () => { + const { stdout } = captureOutput(() => refCommand(db, "show", ["2"], {})); + expect(stdout).toContain("Written by"); + expect(stdout).toContain("content:models"); + }); + + it("shows json with relationships", () => { + const { stdout } = captureOutput(() => + refCommand(db, "show", ["1"], { json: true }), + ); + const parsed = JSON.parse(stdout); + expect(parsed.ref.kind).toBe("ref"); + expect(parsed.trackedBy.length).toBe(3); + }); + + it("reports not found", () => { + const { stderr } = captureOutput(() => refCommand(db, "show", ["999"], {})); + expect(stderr).toContain("not found"); + }); +}); + +describe("ref chain", () => { + it("shows reactive chain from a ref", () => { + const { stdout } = captureOutput(() => refCommand(db, "chain", ["1"], {})); + expect(stdout).toContain("ref 1"); + expect(stdout).toContain("triggers effect"); + }); + + it("detects cycles", () => { + // Add a cycle: effect 3 triggers ref 2, and we add an edge where effect 2 is triggered-by ref 2 + db.exec(` + INSERT INTO edges VALUES (100, 'triggered-by', 2, 2, NULL, NULL, NULL, NULL, NULL); + INSERT INTO edges VALUES (101, 'trigger', 2, 1, NULL, NULL, NULL, NULL, NULL); + `); + // Should not infinite loop — cycle: ref 1 -> effect 3 -> ref 2 -> effect 2 -> ref 1 + const { stdout } = captureOutput(() => refCommand(db, "chain", ["1"], {})); + expect(stdout).toContain("ref 1"); + // chain should terminate (visited set prevents revisiting ref 1) + }); +}); + +describe("ref hotspots", () => { + it("shows refs sorted by tracker count", () => { + const { stdout } = captureOutput(() => refCommand(db, "hotspots", [], {})); + expect(stdout).toContain("most trackers"); + // ref 1 has 3 trackers — should be first + const lines = stdout.split("\n").filter((l) => l.trim().match(/^\d/)); + expect(lines.length).toBeGreaterThan(0); + }); + + it("returns json", () => { + const { stdout } = captureOutput(() => + refCommand(db, "hotspots", [], { json: true }), + ); + const first = JSON.parse(stdout.split("\n")[0]); + expect(first).toHaveProperty("tracked_by"); + expect(first).toHaveProperty("triggered"); + }); +}); + +describe("ref fanout", () => { + it("shows which effects track a ref", () => { + const { stdout } = captureOutput(() => refCommand(db, "fanout", ["1"], {})); + expect(stdout).toContain("Ref 1"); + expect(stdout).toContain("tracked by 3 effects"); + }); + + it("groups trackers by component", () => { + const { stdout } = captureOutput(() => refCommand(db, "fanout", ["1"], {})); + // effects 2 and 3 have component Declaration (or none), effect 4 has no component + expect(stdout).toContain("Declaration"); + }); + + it("shows writers", () => { + const { stdout } = captureOutput(() => refCommand(db, "fanout", ["2"], {})); + expect(stdout).toContain("Written by"); + }); + + it("reports not found", () => { + const { stderr } = captureOutput(() => + refCommand(db, "fanout", ["999"], {}), + ); + expect(stderr).toContain("not found"); + }); +}); + +describe("ref ownership", () => { + it("shows component ancestry for a ref", () => { + const { stdout } = captureOutput(() => + refCommand(db, "ownership", ["2"], {}), + ); + expect(stdout).toContain("Ref 2"); + expect(stdout).toContain("Created by: effect 3"); + expect(stdout).toContain("Component ancestry"); + }); + + it("handles ref with no creator", () => { + // Add a ref with no creator + db.exec( + "INSERT INTO refs VALUES (10, 'ref', 'orphan', NULL, NULL, NULL, NULL, 10)", + ); + const { stdout } = captureOutput(() => + refCommand(db, "ownership", ["10"], {}), + ); + expect(stdout).toContain("Created outside reactive tracking"); + }); +}); diff --git a/packages/trace-cli/src/test/test-db.ts b/packages/trace-cli/src/test/test-db.ts new file mode 100644 index 000000000..33b311579 --- /dev/null +++ b/packages/trace-cli/src/test/test-db.ts @@ -0,0 +1,195 @@ +import { DatabaseSync } from "node:sqlite"; + +const SCHEMA = ` + CREATE TABLE effects ( + id INTEGER PRIMARY KEY, name TEXT, type TEXT, context_id INTEGER, + owner_context_id INTEGER, component TEXT, source_file TEXT, + source_line INTEGER, source_col INTEGER, seq INTEGER + ); + CREATE TABLE refs ( + id INTEGER PRIMARY KEY, kind TEXT, label TEXT, + created_by_effect_id INTEGER, source_file TEXT, + source_line INTEGER, source_col INTEGER, seq INTEGER + ); + CREATE TABLE edges ( + seq INTEGER PRIMARY KEY, type TEXT NOT NULL, effect_id INTEGER NOT NULL, + ref_id INTEGER, target_id INTEGER, target_key TEXT, caused_by INTEGER, + source_file TEXT, source_line INTEGER + ); + CREATE TABLE scheduler_jobs ( + seq INTEGER PRIMARY KEY, event TEXT NOT NULL, effect_id INTEGER, + immediate INTEGER, queue_size INTEGER + ); + CREATE TABLE scheduler_flushes (seq INTEGER PRIMARY KEY, jobs_run INTEGER); + CREATE TABLE render_nodes ( + id INTEGER PRIMARY KEY, parent_id INTEGER, kind TEXT NOT NULL, + name TEXT, props TEXT, source_file TEXT, source_line INTEGER, + source_col INTEGER, context_id INTEGER, value TEXT, seq INTEGER + ); + CREATE TABLE symbols ( + id INTEGER PRIMARY KEY, name TEXT NOT NULL, original_name TEXT, + scope_id INTEGER, owner_symbol_id INTEGER, render_node_id INTEGER, + is_member INTEGER, is_transient INTEGER, is_alias INTEGER, + metadata TEXT, seq INTEGER + ); + CREATE TABLE scopes ( + id INTEGER PRIMARY KEY, name TEXT NOT NULL, parent_id INTEGER, + owner_symbol_id INTEGER, render_node_id INTEGER, + is_member_scope INTEGER, metadata TEXT, seq INTEGER + ); + CREATE TABLE render_errors ( + id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT, message TEXT, + stack TEXT, component_stack TEXT, seq INTEGER + ); + CREATE TABLE output_files ( + id INTEGER PRIMARY KEY AUTOINCREMENT, path TEXT NOT NULL, + filetype TEXT, render_node_id INTEGER, content TEXT, seq INTEGER + ); + CREATE TABLE directories ( + id INTEGER PRIMARY KEY AUTOINCREMENT, path TEXT NOT NULL UNIQUE, + seq INTEGER + ); + CREATE TABLE effect_lifecycle ( + id INTEGER PRIMARY KEY AUTOINCREMENT, effect_id INTEGER NOT NULL, + event TEXT NOT NULL, trigger_ref_id INTEGER, source_file TEXT, + source_line INTEGER, source_col INTEGER, seq INTEGER + ); + CREATE TABLE diagnostics ( + id INTEGER PRIMARY KEY AUTOINCREMENT, message TEXT NOT NULL, + severity TEXT, source_file TEXT, source_line INTEGER, + source_col INTEGER, component_stack TEXT, seq INTEGER + ); + CREATE TABLE source_maps ( + id INTEGER PRIMARY KEY AUTOINCREMENT, output_path TEXT NOT NULL, + map_json TEXT NOT NULL, output_text TEXT + ); +`; + +/** + * Creates an in-memory SQLite database with the trace schema and seed data. + * Seed data models a small render tree: + * root -> SourceFile(comp) -> Declaration(comp) -> text node + * Effects own contexts and track/trigger refs. + */ +export function createTestDb(): DatabaseSync { + const db = new DatabaseSync(":memory:"); + db.exec(SCHEMA); + seedData(db); + return db; +} + +function seedData(db: DatabaseSync) { + // Render tree: root(1) -> SourceFile(2) -> Declaration(3) -> text(4) + // -> Fragment(5) -> text(6) + db.exec(` + INSERT INTO render_nodes VALUES (1, NULL, 'root', NULL, NULL, NULL, NULL, NULL, NULL, NULL, 1); + INSERT INTO render_nodes VALUES (2, 1, 'component', 'SourceFile', '{"path":"src/models.ts"}', + '/home/user/packages/typescript/src/components/source-file.tsx', 10, 5, 100, NULL, 2); + INSERT INTO render_nodes VALUES (3, 2, 'component', 'Declaration', NULL, + '/home/user/packages/typescript/src/components/declaration.tsx', 25, 3, 200, NULL, 5); + INSERT INTO render_nodes VALUES (4, 3, 'text', NULL, NULL, NULL, NULL, NULL, NULL, 'export interface Foo { + bar: string; +}', 6); + INSERT INTO render_nodes VALUES (5, 2, 'fragment', NULL, NULL, NULL, NULL, NULL, NULL, NULL, 3); + INSERT INTO render_nodes VALUES (6, 5, 'text', NULL, NULL, NULL, NULL, NULL, NULL, 'import { Bar } from "bar"; +', 4); + INSERT INTO render_nodes VALUES (7, 2, 'memo', 'mapJoin', NULL, + '/home/user/packages/core/src/utils.tsx', 100, 1, 300, NULL, 7); + INSERT INTO render_nodes VALUES (8, 7, 'customContext', 'NamePolicy', NULL, NULL, NULL, NULL, NULL, NULL, 8); + `); + + // Effects: render effects for the components, plus a computed memo + db.exec(` + INSERT INTO effects VALUES (1, 'render:SourceFile', 'render', 100, NULL, 'SourceFile', + '/home/user/packages/typescript/src/components/source-file.tsx', 15, 1, 1); + INSERT INTO effects VALUES (2, 'render:Declaration', 'render', 200, 100, 'Declaration', + '/home/user/packages/typescript/src/components/declaration.tsx', 30, 1, 2); + INSERT INTO effects VALUES (3, 'content:models', 'content', 300, 200, NULL, + '/home/user/packages/typescript/src/components/declaration.tsx', 35, 1, 3); + INSERT INTO effects VALUES (4, 'binder:resolve', 'binder', 400, 100, NULL, NULL, NULL, NULL, 4); + `); + + // Refs: two reactive values + db.exec(` + INSERT INTO refs VALUES (1, 'ref', 'allTypes', 1, '/home/user/packages/typescript/src/models.ts', 10, 1, 1); + INSERT INTO refs VALUES (2, 'computed', 'typeCount', 3, + '/home/user/packages/typescript/src/components/declaration.tsx', 40, 1, 2); + INSERT INTO refs VALUES (3, 'ref', 'unusedRef', 2, '/home/user/packages/typescript/src/other.ts', 5, 1, 3); + `); + + // Edges: effect 2 tracks ref 1, effect 3 triggers ref 2, effect 3 triggered-by ref 1 + db.exec(` + INSERT INTO edges VALUES (1, 'track', 2, 1, NULL, NULL, NULL, NULL, NULL); + INSERT INTO edges VALUES (2, 'trigger', 3, 2, NULL, NULL, NULL, NULL, NULL); + INSERT INTO edges VALUES (3, 'triggered-by', 3, 1, NULL, NULL, NULL, NULL, NULL); + INSERT INTO edges VALUES (4, 'track', 3, 1, NULL, NULL, NULL, NULL, NULL); + INSERT INTO edges VALUES (5, 'track', 4, 1, NULL, NULL, NULL, NULL, NULL); + `); + + // Scheduler + db.exec(` + INSERT INTO scheduler_jobs VALUES (1, 'run', 2, 0, 1); + INSERT INTO scheduler_jobs VALUES (2, 'run', 3, 0, 2); + INSERT INTO scheduler_flushes VALUES (1, 2); + `); + + // Effect lifecycle + db.exec(` + INSERT INTO effect_lifecycle VALUES (1, 2, 'ran', NULL, NULL, NULL, NULL, 10); + INSERT INTO effect_lifecycle VALUES (2, 2, 'ran', 1, NULL, NULL, NULL, 20); + INSERT INTO effect_lifecycle VALUES (3, 3, 'ran', NULL, NULL, NULL, NULL, 11); + INSERT INTO effect_lifecycle VALUES (4, 3, 'skipped', NULL, NULL, NULL, NULL, 30); + `); + + // Scopes: global scope with a child member scope + db.exec(` + INSERT INTO scopes VALUES (1, 'global', NULL, NULL, 2, 0, NULL, 1); + INSERT INTO scopes VALUES (2, 'Foo', 1, 1, 3, 1, NULL, 2); + `); + + // Symbols + db.exec(` + INSERT INTO symbols VALUES (1, 'Foo', 'Foo', 1, NULL, 3, 0, 0, 0, NULL, 1); + INSERT INTO symbols VALUES (2, 'bar', 'bar', 2, 1, NULL, 1, 0, 0, NULL, 2); + INSERT INTO symbols VALUES (3, 'Baz', 'BazOriginal', 1, NULL, NULL, 0, 1, 0, NULL, 3); + `); + + // Output files — content must equal concatenation of descendant text nodes + db.prepare( + "INSERT INTO output_files VALUES (1, 'src/models.ts', 'typescript', 2, ?, 1)", + ).run( + 'import { Bar } from "bar";\nexport interface Foo {\n bar: string;\n}', + ); + db.exec(` + INSERT INTO output_files VALUES (2, 'src/index.ts', 'typescript', 2, 'export { Foo } from "./models";', 2); + `); + + // Render errors + db.prepare("INSERT INTO render_errors VALUES (1, ?, ?, ?, ?, 1)").run( + "TypeError", + "Cannot read property x of undefined", + "TypeError: Cannot read property x\n at Declaration (/src/decl.tsx:15:3)", + JSON.stringify([ + { + name: "SourceFile", + renderNodeId: 2, + source: { + fileName: + "/home/user/packages/typescript/src/components/source-file.tsx", + lineNumber: 10, + columnNumber: 5, + }, + }, + { + name: "Declaration", + renderNodeId: 3, + source: { + fileName: + "/home/user/packages/typescript/src/components/declaration.tsx", + lineNumber: 25, + columnNumber: 3, + }, + }, + ]), + ); +} diff --git a/packages/trace-cli/src/test/types.test.ts b/packages/trace-cli/src/test/types.test.ts new file mode 100644 index 000000000..9bbdc326b --- /dev/null +++ b/packages/trace-cli/src/test/types.test.ts @@ -0,0 +1,245 @@ +import { DatabaseSync } from "node:sqlite"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; +import { + formatComponentStack, + outputFileContextsCte, + outputFileRenderNodesCte, + printPaginationFooter, + requireId, + shortPath, +} from "../types.js"; +import { captureOutput } from "./capture.js"; +import { createTestDb } from "./test-db.js"; + +// eslint-disable-next-line no-control-regex +const stripAnsi = (s: string) => s.replace(/\x1b\[[0-9;]*m/g, ""); + +describe("shortPath", () => { + it("converts absolute path to relative from cwd", () => { + const cwd = process.cwd(); + const abs = cwd + "/src/types.ts"; + expect(shortPath(abs)).toBe("src/types.ts"); + }); + + it("handles paths outside cwd with ../", () => { + const result = shortPath("/some/other/path.ts"); + expect(result).toContain("path.ts"); + // Should be relative, not absolute + expect(result).not.toMatch(/^\//); + }); +}); + +describe("requireId", () => { + it("returns parsed integer", () => { + expect(requireId(["42"], "usage")).toBe(42); + }); + + it("exits on missing arg", () => { + const mockExit = vi.spyOn(process, "exit").mockImplementation(() => { + throw new Error("exit"); + }); + expect(() => requireId([], "Usage: foo")).toThrow("exit"); + mockExit.mockRestore(); + }); + + it("exits on NaN", () => { + const mockExit = vi.spyOn(process, "exit").mockImplementation(() => { + throw new Error("exit"); + }); + expect(() => requireId(["abc"], "Usage: foo")).toThrow("exit"); + mockExit.mockRestore(); + }); +}); + +describe("formatComponentStack", () => { + it("formats entries with source locations", () => { + const cwd = process.cwd(); + const json = JSON.stringify([ + { + name: "App", + source: { + fileName: cwd + "/src/app.tsx", + lineNumber: 10, + columnNumber: 3, + }, + }, + { + name: "Child", + source: { fileName: cwd + "/src/child.tsx", lineNumber: 20 }, + }, + ]); + const result = stripAnsi(formatComponentStack(json, true)!); + expect(result).toContain("at App (src/app.tsx:10:3)"); + expect(result).toContain("at Child (src/child.tsx:20)"); + }); + + it("formats entries without source locations", () => { + const json = JSON.stringify([{ name: "Anonymous" }]); + const result = stripAnsi(formatComponentStack(json, true)!); + expect(result).toBe(" at Anonymous"); + }); + + it("includes render node IDs when present", () => { + const cwd = process.cwd(); + const json = JSON.stringify([ + { + name: "App", + renderNodeId: 42, + source: { + fileName: cwd + "/src/app.tsx", + lineNumber: 10, + columnNumber: 3, + }, + }, + { name: "Child", renderNodeId: 99 }, + ]); + const result = stripAnsi(formatComponentStack(json, true)!); + expect(result).toContain("at App #42 (src/app.tsx:10:3)"); + expect(result).toContain("at Child #99"); + }); + + it("hides node_modules frames by default", () => { + const json = JSON.stringify([ + { + name: "UserComp", + source: { + fileName: "/home/user/my-project/src/app.tsx", + lineNumber: 5, + }, + }, + { + name: "LibComp", + source: { + fileName: "/home/user/node_modules/@alloy-js/core/src/lib.tsx", + lineNumber: 10, + }, + }, + { name: "NoSource" }, + ]); + const result = stripAnsi(formatComponentStack(json)!); + expect(result).toContain("at UserComp"); + expect(result).not.toContain("at LibComp"); + // Sourceless frames are hidden (treated as external) + expect(result).not.toContain("at NoSource"); + expect(result).toContain( + "2 external frames hidden (use --all-frames to show)", + ); + }); + + it("shows all frames when allFrames is true", () => { + const json = JSON.stringify([ + { + name: "UserComp", + source: { + fileName: "/home/user/my-project/src/app.tsx", + lineNumber: 5, + }, + }, + { + name: "LibComp", + source: { + fileName: "/home/user/node_modules/@alloy-js/core/src/lib.tsx", + lineNumber: 10, + }, + }, + ]); + const result = stripAnsi(formatComponentStack(json, true)!); + expect(result).toContain("at UserComp"); + expect(result).toContain("at LibComp"); + expect(result).not.toContain("external frames hidden"); + }); + + it("returns undefined when all frames are library and not showing all", () => { + const json = JSON.stringify([ + { + name: "LibComp", + source: { + fileName: "/home/user/node_modules/@alloy-js/core/src/lib.tsx", + lineNumber: 10, + }, + }, + ]); + expect(formatComponentStack(json)).toBeUndefined(); + }); + + it("returns undefined for invalid JSON", () => { + expect(formatComponentStack("not json")).toBeUndefined(); + }); +}); + +describe("printPaginationFooter", () => { + let db: DatabaseSync; + + beforeEach(() => { + db = createTestDb(); + }); + afterEach(() => { + db.close(); + }); + + it("prints footer when more results exist", () => { + const { stdout } = captureOutput(() => + printPaginationFooter( + db, + "SELECT COUNT(*) as n FROM render_nodes WHERE kind = 'component'", + [], + 1, + 1, + ), + ); + expect(stdout).toContain("showing 1 of 2"); + }); + + it("does not print when shown < limit", () => { + const { stdout } = captureOutput(() => + printPaginationFooter( + db, + "SELECT COUNT(*) as n FROM render_nodes WHERE kind = 'component'", + [], + 10, + 2, + ), + ); + expect(stdout).toBe(""); + }); +}); + +describe("outputFileContextsCte", () => { + let db: DatabaseSync; + + beforeEach(() => { + db = createTestDb(); + // Link render_node 2 (SourceFile) to context_id 100 + db.exec("UPDATE render_nodes SET context_id = 100 WHERE id = 2"); + }); + afterEach(() => { + db.close(); + }); + + it("returns valid SQL subquery for context lookup", () => { + const cte = outputFileContextsCte(); + const sql = `SELECT * FROM effects WHERE context_id IN ${cte}`; + const rows = db.prepare(sql).all("%models%") as any[]; + // Should find effects whose context_id matches SourceFile's context hierarchy + expect(rows.length).toBeGreaterThan(0); + }); +}); + +describe("outputFileRenderNodesCte", () => { + let db: DatabaseSync; + + beforeEach(() => { + db = createTestDb(); + }); + afterEach(() => { + db.close(); + }); + + it("returns valid SQL subquery for render node descendants", () => { + const cte = outputFileRenderNodesCte(); + const sql = `SELECT * FROM render_nodes WHERE id IN ${cte}`; + const rows = db.prepare(sql).all("%models%") as any[]; + // SourceFile (node 2) and all descendants: 3, 4, 5, 6, 7, 8 + expect(rows.length).toBeGreaterThanOrEqual(6); + }); +}); diff --git a/packages/trace-cli/src/types.ts b/packages/trace-cli/src/types.ts new file mode 100644 index 000000000..31642d92a --- /dev/null +++ b/packages/trace-cli/src/types.ts @@ -0,0 +1,161 @@ +import { relative } from "node:path"; +import type { DatabaseSync } from "node:sqlite"; + +export interface Opts { + json?: boolean; + limit?: number; + depth?: number; + sourceFile?: string; + outputFile?: string; + component?: string; + name?: string; + type?: string; + minTrackers?: number; + unused?: boolean; + framework?: boolean; + allFrames?: boolean; +} + +export type Db = DatabaseSync; + +/** + * Returns a SQL subquery that finds all context_ids belonging to an output file, + * by walking the context ownership chain from the source file's render node. + */ +export function outputFileContextsCte(): string { + return `( + WITH RECURSIVE file_contexts(cid) AS ( + SELECT rn.context_id + FROM output_files of2 + JOIN render_nodes rn ON rn.id = of2.render_node_id + WHERE of2.path LIKE ? AND rn.context_id IS NOT NULL + UNION ALL + SELECT e.context_id + FROM effects e + JOIN file_contexts fc ON e.owner_context_id = fc.cid + WHERE e.context_id IS NOT NULL + ) + SELECT cid FROM file_contexts + )`; +} + +/** + * Returns a SQL subquery that finds all render_node ids that are descendants + * of an output file's render node in the render tree. + */ +export function outputFileRenderNodesCte(): string { + return `( + WITH RECURSIVE desc_nodes(id) AS ( + SELECT render_node_id FROM output_files WHERE path LIKE ? + UNION ALL + SELECT rn.id FROM render_nodes rn JOIN desc_nodes d ON rn.parent_id = d.id + ) + SELECT id FROM desc_nodes + )`; +} + +/** + * Convert an absolute path to a display-friendly relative path from cwd. + * Falls back to the original path if it can't be relativized. + */ +export function shortPath(p: string): string { + const rel = relative(process.cwd(), p); + // If relative path starts with too many '../', it's not useful — but still + // shorter than the absolute path in most cases. Return it as-is. + return rel || p; +} + +export function requireId(args: string[], usage: string): number { + if (!args[0]) { + console.error(usage); + process.exit(1); + } + const id = parseInt(args[0], 10); + if (isNaN(id)) { + console.error(usage); + process.exit(1); + } + return id; +} + +export function printPaginationFooter( + db: Db, + countSql: string, + params: any[], + limit: number, + shown: number, +) { + if (shown >= limit) { + const total = (db.prepare(countSql).get(...params) as any)?.n; + if (total > limit) { + console.log( + `\n (showing ${limit} of ${total}, use --limit to show more)`, + ); + } + } +} + +interface StackEntry { + name: string; + renderNodeId?: number; + source?: { fileName?: string; lineNumber?: number; columnNumber?: number }; +} + +/** + * A frame is "external" if its source is inside node_modules or if it has + * no source location at all. Matches the devtools filtering approach. + */ +function isExternalFrame(entry: StackEntry): boolean { + if (!entry.source?.fileName) return true; + return entry.source.fileName.includes("/node_modules/"); +} + +// ANSI color helpers +const bold = (s: string) => `\x1b[1m${s}\x1b[22m`; +const dim = (s: string) => `\x1b[2m${s}\x1b[22m`; +const cyan = (s: string) => `\x1b[36m${s}\x1b[39m`; + +function formatEntry(entry: StackEntry): string { + const id = entry.renderNodeId != null ? dim(` #${entry.renderNodeId}`) : ""; + const loc = entry.source; + if (loc?.fileName) { + const parts = [shortPath(loc.fileName)]; + if (loc.lineNumber != null) parts.push(String(loc.lineNumber)); + if (loc.columnNumber != null) parts.push(String(loc.columnNumber)); + return ` at ${bold(entry.name)}${id} ${cyan(`(${parts.join(":")})`)}`; + } + return ` at ${bold(entry.name)}${id}`; +} + +/** + * Formats a JSON component_stack string as a stack-trace-style string. + * When allFrames is false (default), only user frames are shown and a + * hint about hidden library frames is appended. + */ +export function formatComponentStack( + json: string, + allFrames = false, +): string | undefined { + try { + const stack = JSON.parse(json) as StackEntry[]; + if (allFrames) { + return stack.map(formatEntry).join("\n"); + } + + const userFrames = stack.filter((e) => !isExternalFrame(e)); + const hiddenCount = stack.length - userFrames.length; + const lines = userFrames.map(formatEntry); + + if (hiddenCount > 0 && lines.length > 0) { + lines.push( + dim( + ` ... ${hiddenCount} external frames hidden (use --all-frames to show)`, + ), + ); + } + + return lines.length > 0 ? lines.join("\n") : undefined; + } catch { + return undefined; + } +} diff --git a/packages/trace-cli/tsconfig.json b/packages/trace-cli/tsconfig.json new file mode 100644 index 000000000..215578d31 --- /dev/null +++ b/packages/trace-cli/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "lib": ["es2023"], + "module": "NodeNext", + "moduleResolution": "NodeNext", + "target": "es2022", + "strict": true, + "skipLibCheck": true, + "isolatedModules": true, + "declaration": true, + "sourceMap": true, + "declarationMap": true, + "outDir": "dist", + "rootDir": "src" + }, + "include": ["src/**/*.ts"] +} diff --git a/packages/trace-cli/vitest.config.ts b/packages/trace-cli/vitest.config.ts new file mode 100644 index 000000000..5e58d016f --- /dev/null +++ b/packages/trace-cli/vitest.config.ts @@ -0,0 +1,15 @@ +import { defineConfig } from "vitest/config"; + +export default defineConfig({ + test: { + pool: "forks", + poolOptions: { + forks: { + execArgv: [ + "--experimental-sqlite", + "--no-warnings=ExperimentalWarning", + ], + }, + }, + }, +}); diff --git a/packages/typescript/package.json b/packages/typescript/package.json index d58ad1bb9..176f5a648 100644 --- a/packages/typescript/package.json +++ b/packages/typescript/package.json @@ -8,27 +8,31 @@ }, "exports": { ".": { - "development": "./src/index.ts", + "source": "./src/index.ts", + "development": "./dist/dev/src/index.js", "import": "./dist/src/index.js" }, "./stc": { - "development": "./src/components/stc/index.ts", + "source": "./src/components/stc/index.ts", + "development": "./dist/dev/src/components/stc/index.js", "import": "./dist/src/components/stc/index.js" }, "./testing": { - "development": "./testing/index.ts", + "source": "./testing/index.ts", + "development": "./dist/dev/testing/index.js", "import": "./dist/testing/index.js" } }, "imports": { "#components/*": { - "development": "./src/components/*", + "source": "./src/components/*", + "development": "./dist/dev/src/components/*", "default": "./dist/src/components/*" } }, "scripts": { "generate-docs": "api-extractor run", - "build": "alloy build && pnpm run generate-docs", + "build": "alloy build --with-dev && pnpm run generate-docs", "clean": "rimraf dist/ .temp/", "test:watch": "vitest -w", "watch": "alloy build --watch", diff --git a/packages/typescript/vitest.config.ts b/packages/typescript/vitest.config.ts index a98451981..5fa2542c0 100644 --- a/packages/typescript/vitest.config.ts +++ b/packages/typescript/vitest.config.ts @@ -2,6 +2,14 @@ import alloyPlugin from "@alloy-js/rollup-plugin"; import { defineConfig } from "vitest/config"; export default defineConfig({ + resolve: { + conditions: ["source"], + }, + ssr: { + resolve: { + conditions: ["source"], + }, + }, esbuild: { jsx: "preserve", sourcemap: "both", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 951bc459c..c7e42c0b3 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -834,6 +834,22 @@ importers: specifier: 'catalog:' version: 3.2.4(@types/debug@4.1.12)(@types/node@24.10.9)(esbuild@0.25.8)(jiti@2.6.1)(tsx@4.20.3)(yaml@2.8.0) + packages/trace-cli: + dependencies: + diff-match-patch: + specifier: ^1.0.5 + version: 1.0.5 + devDependencies: + '@types/diff-match-patch': + specifier: ^1.0.36 + version: 1.0.36 + '@types/node': + specifier: 'catalog:' + version: 24.10.9 + typescript: + specifier: 'catalog:' + version: 5.9.3 + packages/typescript: dependencies: '@alloy-js/core': @@ -4233,11 +4249,13 @@ packages: glob@10.4.5: resolution: {integrity: sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==} + deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me hasBin: true glob@11.0.3: resolution: {integrity: sha512-2Nim7dha1KVkaiF4q6Dj+ngPPMdfvLJEOpZk/jKiUAkqKebpGAWQXAq9z1xu9HKu5lWfqw/FASuccEjyznjPaA==} engines: {node: 20 || >=22} + deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me hasBin: true globals@14.0.0: @@ -5801,10 +5819,12 @@ packages: tar@6.2.1: resolution: {integrity: sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==} engines: {node: '>=10'} + deprecated: Old versions of tar are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me tar@7.4.3: resolution: {integrity: sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==} engines: {node: '>=18'} + deprecated: Old versions of tar are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me temporal-polyfill@0.3.0: resolution: {integrity: sha512-qNsTkX9K8hi+FHDfHmf22e/OGuXmfBm9RqNismxBrnSmZVJKegQ+HYYXT+R7Ha8F/YSm2Y34vmzD4cxMu2u95g==} diff --git a/scripts/strip-dev-exports.js b/scripts/strip-dev-exports.js index 58a99544c..f229fdb75 100644 --- a/scripts/strip-dev-exports.js +++ b/scripts/strip-dev-exports.js @@ -6,27 +6,27 @@ import path from "path"; const pkgPath = path.resolve(process.cwd(), "./package.json"); const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf8")); -// Recursively remove "development" keys from exports -function removeDevExports(exportsField) { +// Recursively remove "source" keys from exports +function removeSourceExports(exportsField) { if (exportsField && typeof exportsField === "object") { - if ("development" in exportsField) { - delete exportsField.development; + if ("source" in exportsField) { + delete exportsField.source; } // Recursively handle nested export objects for (const key of Object.keys(exportsField)) { - removeDevExports(exportsField[key]); + removeSourceExports(exportsField[key]); } } } if (pkg.exports) { - removeDevExports(pkg.exports); + removeSourceExports(pkg.exports); } if (pkg.imports) { - removeDevExports(pkg.imports); + removeSourceExports(pkg.imports); } // Write the modified package.json back fs.writeFileSync(pkgPath, JSON.stringify(pkg, null, 2) + "\n", "utf8"); -console.log("Stripped development exports from package.json."); +console.log("Stripped source exports from package.json.");