From c2ab22eb945344115669829195d28b0db361e3d8 Mon Sep 17 00:00:00 2001 From: Ditadi Date: Mon, 12 Jan 2026 22:52:40 +0000 Subject: [PATCH 1/4] feat(observability): add standard errors on appkit --- packages/appkit/src/analytics/query.ts | 16 +- .../appkit/src/analytics/tests/query.test.ts | 12 +- packages/appkit/src/cache/index.ts | 6 +- .../appkit/src/cache/storage/persistent.ts | 9 +- .../appkit/src/connectors/lakebase/client.ts | 41 +- .../appkit/src/connectors/lakebase/utils.ts | 20 +- .../src/connectors/sql-warehouse/client.ts | 45 +- .../src/connectors/tests/lakebase.test.ts | 4 +- .../appkit/src/context/service-context.ts | 35 +- .../observability/errors/authentication.ts | 59 +++ .../appkit/src/observability/errors/base.ts | 122 +++++ .../src/observability/errors/configuration.ts | 60 +++ .../src/observability/errors/connection.ts | 60 +++ .../src/observability/errors/execution.ts | 61 +++ .../appkit/src/observability/errors/index.ts | 30 ++ .../observability/errors/initialization.ts | 52 ++ .../appkit/src/observability/errors/server.ts | 52 ++ .../observability/errors/tests/errors.test.ts | 482 ++++++++++++++++++ .../appkit/src/observability/errors/tunnel.ts | 53 ++ .../src/observability/errors/validation.ts | 58 +++ packages/appkit/src/observability/index.ts | 16 + packages/appkit/src/plugin/dev-reader.ts | 7 +- packages/appkit/src/plugin/plugin.ts | 11 +- packages/appkit/src/server/index.ts | 9 +- .../appkit/src/server/tests/server.test.ts | 6 +- packages/appkit/src/server/vite-dev-server.ts | 7 +- .../src/stream/arrow-stream-processor.ts | 11 +- packages/appkit/src/stream/buffers.ts | 7 +- .../tests/arrow-stream-processor.test.ts | 6 +- packages/appkit/src/utils/env-validator.ts | 6 +- 30 files changed, 1243 insertions(+), 120 deletions(-) create mode 100644 packages/appkit/src/observability/errors/authentication.ts create mode 100644 packages/appkit/src/observability/errors/base.ts create mode 100644 packages/appkit/src/observability/errors/configuration.ts create mode 100644 packages/appkit/src/observability/errors/connection.ts create mode 100644 packages/appkit/src/observability/errors/execution.ts create mode 100644 packages/appkit/src/observability/errors/index.ts create mode 100644 packages/appkit/src/observability/errors/initialization.ts create mode 100644 packages/appkit/src/observability/errors/server.ts create mode 100644 packages/appkit/src/observability/errors/tests/errors.test.ts create mode 100644 packages/appkit/src/observability/errors/tunnel.ts create mode 100644 packages/appkit/src/observability/errors/validation.ts create mode 100644 packages/appkit/src/observability/index.ts diff --git a/packages/appkit/src/analytics/query.ts b/packages/appkit/src/analytics/query.ts index c8916871..e5a9e4f4 100644 --- a/packages/appkit/src/analytics/query.ts +++ b/packages/appkit/src/analytics/query.ts @@ -2,6 +2,7 @@ import { createHash } from "node:crypto"; import type { sql } from "@databricks/sdk-experimental"; import { isSQLTypeMarker, type SQLTypeMarker, sql as sqlHelpers } from "shared"; import { getWorkspaceId } from "../context"; +import { ValidationError } from "../observability/errors"; type SQLParameterValue = SQLTypeMarker | null | undefined; @@ -45,10 +46,11 @@ export class QueryProcessor { // only allow parameters that exist in the query for (const key of Object.keys(parameters)) { if (!queryParams.has(key)) { - throw new Error( - `Parameter "${key}" not found in query. Valid parameters: ${ - Array.from(queryParams).join(", ") || "none" - }`, + const validParams = Array.from(queryParams).join(", ") || "none"; + throw ValidationError.invalidValue( + key, + parameters[key], + `a parameter defined in the query (valid: ${validParams})`, ); } } @@ -74,8 +76,10 @@ export class QueryProcessor { } if (!isSQLTypeMarker(value)) { - throw new Error( - `Parameter "${key}" must be a SQL type. Use sql.string(), sql.number(), sql.date(), sql.timestamp(), or sql.boolean().`, + throw ValidationError.invalidValue( + key, + value, + "SQL type (use sql.string(), sql.number(), sql.date(), sql.timestamp(), or sql.boolean())", ); } diff --git a/packages/appkit/src/analytics/tests/query.test.ts b/packages/appkit/src/analytics/tests/query.test.ts index 864f276d..9616440d 100644 --- a/packages/appkit/src/analytics/tests/query.test.ts +++ b/packages/appkit/src/analytics/tests/query.test.ts @@ -47,7 +47,7 @@ describe("QueryProcessor", () => { expect(() => { processor.convertToSQLParameters(query, parameters); }).toThrow( - 'Parameter "malicious_param" not found in query. Valid parameters: user_id', + "Invalid value for malicious_param: expected a parameter defined in the query (valid: user_id)", ); }); @@ -61,7 +61,7 @@ describe("QueryProcessor", () => { expect(() => { processor.convertToSQLParameters(query, parameters); - }).toThrow('Parameter "admin_flag" not found in query'); + }).toThrow("Invalid value for admin_flag"); }); test("should allow parameters with underscores and mixed case", () => { @@ -86,7 +86,7 @@ describe("QueryProcessor", () => { expect(() => { processor.convertToSQLParameters(query, parameters); }).toThrow( - 'Parameter "user_id" not found in query. Valid parameters: none', + "Invalid value for user_id: expected a parameter defined in the query (valid: none)", ); }); @@ -135,7 +135,7 @@ describe("QueryProcessor", () => { expect(() => { processor.convertToSQLParameters(query, attackParameters); - }).toThrow('Parameter "admin_override" not found in query'); + }).toThrow("Invalid value for admin_override"); }); test("should handle duplicate parameter names in query correctly", () => { @@ -252,9 +252,7 @@ describe("QueryProcessor", () => { expect(() => { processor.convertToSQLParameters(query, parameters); - }).toThrow( - 'Parameter "userId" must be a SQL type. Use sql.string(), sql.number(), sql.date(), sql.timestamp(), or sql.boolean().', - ); + }).toThrow("Invalid value for userId"); }); }); }); diff --git a/packages/appkit/src/cache/index.ts b/packages/appkit/src/cache/index.ts index ec377f92..a561accc 100644 --- a/packages/appkit/src/cache/index.ts +++ b/packages/appkit/src/cache/index.ts @@ -2,6 +2,7 @@ import { createHash } from "node:crypto"; import { WorkspaceClient } from "@databricks/sdk-experimental"; import type { CacheConfig, CacheStorage } from "shared"; import { LakebaseConnector } from "@/connectors"; +import { InitializationError } from "../observability/errors"; import type { Counter, TelemetryProvider } from "../telemetry"; import { SpanStatusCode, TelemetryManager } from "../telemetry"; import { deepMerge } from "../utils"; @@ -72,8 +73,9 @@ export class CacheManager { */ static getInstanceSync(): CacheManager { if (!CacheManager.instance) { - throw new Error( - "CacheManager not initialized. Ensure AppKit.create() has completed before accessing the cache.", + throw InitializationError.notInitialized( + "CacheManager", + "Ensure AppKit.create() has completed before accessing the cache", ); } diff --git a/packages/appkit/src/cache/storage/persistent.ts b/packages/appkit/src/cache/storage/persistent.ts index 9309a8ba..89e29837 100644 --- a/packages/appkit/src/cache/storage/persistent.ts +++ b/packages/appkit/src/cache/storage/persistent.ts @@ -1,6 +1,7 @@ import { createHash } from "node:crypto"; import type { CacheConfig, CacheEntry, CacheStorage } from "shared"; import type { LakebaseConnector } from "../../connectors"; +import { ValidationError } from "../../observability/errors"; import { lakebaseStorageDefaults } from "./defaults"; /** @@ -104,8 +105,10 @@ export class PersistentStorage implements CacheStorage { const byteSize = keyBytes.length + valueBytes.length; if (byteSize > this.maxEntryBytes) { - throw new Error( - `Cache entry too large: ${byteSize} bytes exceeds maximum of ${this.maxEntryBytes} bytes`, + throw ValidationError.invalidValue( + "cache entry size", + byteSize, + `maximum ${this.maxEntryBytes} bytes`, ); } @@ -251,7 +254,7 @@ export class PersistentStorage implements CacheStorage { /** Generate a 64-bit hash for the cache key using SHA256 */ private hashKey(key: string): bigint { - if (!key) throw new Error("Cache key cannot be empty"); + if (!key) throw ValidationError.missingField("key"); const hash = createHash("sha256").update(key).digest(); return hash.readBigInt64BE(0); } diff --git a/packages/appkit/src/connectors/lakebase/client.ts b/packages/appkit/src/connectors/lakebase/client.ts index e05d2827..a41cf9a3 100644 --- a/packages/appkit/src/connectors/lakebase/client.ts +++ b/packages/appkit/src/connectors/lakebase/client.ts @@ -9,6 +9,12 @@ import { TelemetryManager, type TelemetryProvider, } from "@/telemetry"; +import { + AuthenticationError, + ConfigurationError, + ConnectionError, + ValidationError, +} from "../../observability/errors"; import { deepMerge } from "../../utils"; import { lakebaseDefaults } from "./defaults"; import type { @@ -72,7 +78,11 @@ export class LakebaseConnector { // validate configuration if (this.config.maxPoolSize < 1) { - throw new Error("maxPoolSize must be at least 1"); + throw ValidationError.invalidValue( + "maxPoolSize", + this.config.maxPoolSize, + "at least 1", + ); } } @@ -282,8 +292,9 @@ export class LakebaseConnector { this.config.workspaceClient = client; return client; } catch (_error) { - throw new Error( - "Databricks workspace client not available. Either pass it in config or ensure ServiceContext is initialized.", + throw ConnectionError.clientUnavailable( + "Databricks workspace client", + "Either pass it in config or ensure ServiceContext is initialized", ); } } @@ -291,9 +302,9 @@ export class LakebaseConnector { /** Get or create connection pool */ private async getPool(): Promise { if (!this.connectionConfig) { - throw new Error( - "Lakebase connection not configured. " + - "Set PGHOST, PGDATABASE, PGAPPNAME env vars, provide a connectionString, or pass explicit config.", + throw ConfigurationError.invalidConnection( + "Lakebase", + "Set PGHOST, PGDATABASE, PGAPPNAME env vars, provide a connectionString, or pass explicit config", ); } @@ -382,7 +393,7 @@ export class LakebaseConnector { const workspaceClient = this.getWorkspaceClient(); const user = await workspaceClient.currentUser.me(); if (!user.userName) { - throw new Error("Failed to get current user from Databricks workspace"); + throw AuthenticationError.userLookupFailed(); } return user.userName; } @@ -394,7 +405,7 @@ export class LakebaseConnector { const apiClient = new ApiClient(config); if (!this.connectionConfig.appName) { - throw new Error(`Database app name not found in connection config`); + throw ConfigurationError.resourceNotFound("Database app name"); } const credentials = await apiClient.request({ @@ -409,8 +420,8 @@ export class LakebaseConnector { }); if (!this.validateCredentials(credentials)) { - throw new Error( - `Failed to generate credentials for instance: ${this.connectionConfig.appName}`, + throw AuthenticationError.credentialsFailed( + this.connectionConfig.appName, ); } @@ -488,16 +499,16 @@ export class LakebaseConnector { const pgDatabase = process.env.PGDATABASE; const pgAppName = process.env.PGAPPNAME; if (!pgHost || !pgDatabase || !pgAppName) { - throw new Error( - "Lakebase connection not configured. Required env vars: PGHOST, PGDATABASE, PGAPPNAME. " + - "Optional: PGPORT (default: 5432), PGSSLMODE (default: require).", + throw ConfigurationError.invalidConnection( + "Lakebase", + "Required env vars: PGHOST, PGDATABASE, PGAPPNAME. Optional: PGPORT (default: 5432), PGSSLMODE (default: require)", ); } const pgPort = process.env.PGPORT; const port = pgPort ? parseInt(pgPort, 10) : 5432; if (Number.isNaN(port)) { - throw new Error(`Invalid port: ${pgPort}. Must be a number.`); + throw ValidationError.invalidValue("port", pgPort, "a number"); } const pgSSLMode = process.env.PGSSLMODE; @@ -519,7 +530,7 @@ export class LakebaseConnector { const url = new URL(connectionString); const appName = url.searchParams.get("appName"); if (!appName) { - throw new Error("Connection string must include appName parameter"); + throw ConfigurationError.missingConnectionParam("appName"); } return { diff --git a/packages/appkit/src/connectors/lakebase/utils.ts b/packages/appkit/src/connectors/lakebase/utils.ts index e7d79c1a..b2b1bdc2 100644 --- a/packages/appkit/src/connectors/lakebase/utils.ts +++ b/packages/appkit/src/connectors/lakebase/utils.ts @@ -1,3 +1,7 @@ +import { + ConfigurationError, + ValidationError, +} from "../../observability/errors"; import type { LakebaseConnectionConfig } from "./types"; export interface ParsedConnectionString { @@ -23,13 +27,15 @@ export function parseConnectionString( const url = new URL(cleanedString); if (url.protocol !== "postgresql:" && url.protocol !== "postgres:") { - throw new Error( - `Invalid connection string protocol: ${url.protocol}. Expected postgresql: or postgres:`, + throw ValidationError.invalidValue( + "protocol", + url.protocol, + "postgresql: or postgres:", ); } if (!url.hostname) { - throw new Error("Connection string must include a hostname"); + throw ValidationError.missingField("hostname"); } const dbName = url.pathname.slice(1) || "databricks_postgres"; @@ -51,9 +57,7 @@ export function parseConnectionString( } if (!database) { - throw new Error( - "Database name is required when using hostname directly (PGHOST format)", - ); + throw ValidationError.missingField("database"); } const connectionParams: LakebaseConnectionConfig = { @@ -76,11 +80,11 @@ export function parseFromEnv(): LakebaseConnectionConfig { const port = process.env.PGPORT ? parseInt(process.env.PGPORT, 10) : 5432; if (!host) { - throw new Error("PGHOST environment variable is required"); + throw ConfigurationError.missingEnvVar("PGHOST"); } if (!database) { - throw new Error("PGDATABASE environment variable is required"); + throw ConfigurationError.missingEnvVar("PGDATABASE"); } return { diff --git a/packages/appkit/src/connectors/sql-warehouse/client.ts b/packages/appkit/src/connectors/sql-warehouse/client.ts index f1186f1e..76b4204c 100644 --- a/packages/appkit/src/connectors/sql-warehouse/client.ts +++ b/packages/appkit/src/connectors/sql-warehouse/client.ts @@ -14,6 +14,11 @@ import { SpanStatusCode, TelemetryManager, } from "../../telemetry"; +import { + ConnectionError, + ExecutionError, + ValidationError, +} from "../../observability/errors"; import { executeStatementDefaults } from "./defaults"; export interface SQLWarehouseConfig { @@ -97,15 +102,11 @@ export class SQLWarehouseConnector { try { // validate required fields if (!input.statement) { - throw new Error( - "Statement is required: Please provide a SQL statement to execute", - ); + throw ValidationError.missingField("statement"); } if (!input.warehouse_id) { - throw new Error( - "Warehouse ID is required: Please provide a warehouse_id to execute the statement", - ); + throw ValidationError.missingField("warehouse_id"); } const body: sql.ExecuteStatementRequest = { @@ -136,7 +137,7 @@ export class SQLWarehouseConnector { ); if (!response) { - throw new Error("No response received from SQL Warehouse API"); + throw ConnectionError.apiFailure("SQL Warehouse"); } const status = response.status; const statementId = response.statement_id as string; @@ -168,17 +169,13 @@ export class SQLWarehouseConnector { result = this._transformDataArray(response); break; case "FAILED": - throw new Error( - `Statement failed: ${status.error?.message || "Unknown error"}`, - ); + throw ExecutionError.statementFailed(status.error?.message); case "CANCELED": - throw new Error("Statement was canceled"); + throw ExecutionError.canceled(); case "CLOSED": - throw new Error( - "Statement execution completed but results are no longer available (CLOSED state)", - ); + throw ExecutionError.resultsClosed(); default: - throw new Error(`Unknown statement state: ${status?.state}`); + throw ExecutionError.unknownState(status?.state as string); } const resultData = result.result as any; @@ -278,7 +275,7 @@ export class SQLWarehouseConnector { this._createContext(signal), ); if (!response) { - throw new Error("No response received from SQL Warehouse API"); + throw ConnectionError.apiFailure("SQL Warehouse"); } const status = response.status; @@ -303,19 +300,13 @@ export class SQLWarehouseConnector { span.setStatus({ code: SpanStatusCode.OK }); return this._transformDataArray(response); case "FAILED": - throw new Error( - `Statement failed: ${ - status.error?.message || "Unknown error" - }`, - ); + throw ExecutionError.statementFailed(status.error?.message); case "CANCELED": - throw new Error("Statement was canceled"); + throw ExecutionError.canceled(); case "CLOSED": - throw new Error( - "Statement execution completed but results are no longer available (CLOSED state)", - ); + throw ExecutionError.resultsClosed(); default: - throw new Error(`Unknown statement state: ${status?.state}`); + throw ExecutionError.unknownState(status?.state as string); } // continue polling after delay @@ -427,7 +418,7 @@ export class SQLWarehouseConnector { const schema = response.manifest?.schema; if (!chunks || !schema) { - throw new Error("No chunks or schema found in response"); + throw ExecutionError.missingData("chunks or schema"); } span.setAttribute("arrow.chunk_count", chunks.length); diff --git a/packages/appkit/src/connectors/tests/lakebase.test.ts b/packages/appkit/src/connectors/tests/lakebase.test.ts index 22322ac5..26d85911 100644 --- a/packages/appkit/src/connectors/tests/lakebase.test.ts +++ b/packages/appkit/src/connectors/tests/lakebase.test.ts @@ -72,7 +72,7 @@ describe("LakebaseConnector", () => { maxPoolSize: 0, workspaceClient: {} as any, }), - ).toThrow("maxPoolSize must be at least 1"); + ).toThrow("Invalid value for maxPoolSize"); }); test("should create connector with valid config", () => { @@ -96,7 +96,7 @@ describe("LakebaseConnector", () => { test("should throw when PGPORT is invalid", () => { process.env.PGPORT = "invalid"; - expect(() => new LakebaseConnector()).toThrow("Invalid port"); + expect(() => new LakebaseConnector()).toThrow("Invalid value for port"); }); test("should parse env vars correctly", () => { diff --git a/packages/appkit/src/context/service-context.ts b/packages/appkit/src/context/service-context.ts index 0492f566..217b0398 100644 --- a/packages/appkit/src/context/service-context.ts +++ b/packages/appkit/src/context/service-context.ts @@ -8,6 +8,11 @@ import { name as productName, version as productVersion, } from "../../package.json"; +import { + AuthenticationError, + ConfigurationError, + InitializationError, +} from "../observability/errors"; import type { UserContext } from "./user-context"; /** @@ -73,8 +78,9 @@ export class ServiceContext { */ static get(): ServiceContextState { if (!ServiceContext.instance) { - throw new Error( - "ServiceContext not initialized. Call ServiceContext.initialize() first.", + throw InitializationError.notInitialized( + "ServiceContext", + "Call ServiceContext.initialize() first", ); } return ServiceContext.instance; @@ -101,14 +107,12 @@ export class ServiceContext { userName?: string, ): UserContext { if (!token) { - throw new Error("User token is required to create user context"); + throw AuthenticationError.missingToken("user token"); } const host = process.env.DATABRICKS_HOST; if (!host) { - throw new Error( - "DATABRICKS_HOST environment variable is required for user context", - ); + throw ConfigurationError.missingEnvVar("DATABRICKS_HOST"); } const serviceCtx = ServiceContext.get(); @@ -151,7 +155,7 @@ export class ServiceContext { const currentUser = await client.currentUser.me(); if (!currentUser.id) { - throw new Error("Service user ID not found"); + throw ConfigurationError.resourceNotFound("Service user ID"); } return { @@ -179,7 +183,7 @@ export class ServiceContext { })) as { "x-databricks-org-id": string }; if (!response["x-databricks-org-id"]) { - throw new Error("Workspace ID not found"); + throw ConfigurationError.resourceNotFound("Workspace ID"); } return response["x-databricks-org-id"]; @@ -219,8 +223,9 @@ export class ServiceContext { }); if (response.warehouses.length === 0) { - throw new Error( - "Warehouse ID not found. Please configure the DATABRICKS_WAREHOUSE_ID environment variable.", + throw ConfigurationError.resourceNotFound( + "Warehouse ID", + "Please configure the DATABRICKS_WAREHOUSE_ID environment variable", ); } @@ -230,16 +235,18 @@ export class ServiceContext { firstWarehouse.state === "DELETING" || !firstWarehouse.id ) { - throw new Error( - "Warehouse ID not found. Please configure the DATABRICKS_WAREHOUSE_ID environment variable.", + throw ConfigurationError.resourceNotFound( + "Warehouse ID", + "Please configure the DATABRICKS_WAREHOUSE_ID environment variable", ); } return firstWarehouse.id; } - throw new Error( - "Warehouse ID not found. Please configure the DATABRICKS_WAREHOUSE_ID environment variable.", + throw ConfigurationError.resourceNotFound( + "Warehouse ID", + "Please configure the DATABRICKS_WAREHOUSE_ID environment variable", ); } diff --git a/packages/appkit/src/observability/errors/authentication.ts b/packages/appkit/src/observability/errors/authentication.ts new file mode 100644 index 00000000..6bc911c2 --- /dev/null +++ b/packages/appkit/src/observability/errors/authentication.ts @@ -0,0 +1,59 @@ +import { AppKitError } from "./base"; + +/** + * Error thrown when authentication fails. + * Use for missing tokens, invalid credentials, or authorization failures. + * + * @example + * ```typescript + * throw new AuthenticationError("User token is required"); + * throw new AuthenticationError("Failed to generate credentials", { cause: originalError }); + * ``` + */ +export class AuthenticationError extends AppKitError { + readonly code = "AUTHENTICATION_ERROR"; + readonly statusCode = 401; + readonly isRetryable = false; + + /** + * Create an authentication error for missing token + */ + static missingToken(tokenType = "access token"): AuthenticationError { + return new AuthenticationError(`Missing ${tokenType} in request headers`, { + context: { tokenType }, + }); + } + + /** + * Create an authentication error for missing user identity + */ + static missingUserId(): AuthenticationError { + return new AuthenticationError( + "User ID not available in request headers. " + + "Ensure the request has the x-forwarded-user header.", + ); + } + + /** + * Create an authentication error for credential generation failure + */ + static credentialsFailed( + instance: string, + cause?: Error, + ): AuthenticationError { + return new AuthenticationError( + `Failed to generate credentials for instance: ${instance}`, + { cause, context: { instance } }, + ); + } + + /** + * Create an authentication error for failed user lookup + */ + static userLookupFailed(cause?: Error): AuthenticationError { + return new AuthenticationError( + "Failed to get current user from Databricks workspace", + { cause }, + ); + } +} diff --git a/packages/appkit/src/observability/errors/base.ts b/packages/appkit/src/observability/errors/base.ts new file mode 100644 index 00000000..50233823 --- /dev/null +++ b/packages/appkit/src/observability/errors/base.ts @@ -0,0 +1,122 @@ +/** Fields that should be redacted in logs/serialization for security */ +const SENSITIVE_FIELD_PATTERNS = [ + /token/i, + /password/i, + /secret/i, + /credential/i, + /auth/i, + /key$/i, + /apikey/i, +]; + +/** + * Base error class for all AppKit errors. + * Provides a consistent structure for error handling across the framework. + * + * @example + * ```typescript + * // Catching errors by type + * try { + * await lakebase.query("..."); + * } catch (e) { + * if (e instanceof AuthenticationError) { + * // Re-authenticate + * } else if (e instanceof ConnectionError && e.isRetryable) { + * // Retry with backoff + * } + * } + * + * // Logging errors + * console.error(error.toJSON()); // Safe for logging, sensitive values redacted + * ``` + */ +export abstract class AppKitError extends Error { + /** Error code for programmatic error handling */ + abstract readonly code: string; + + /** HTTP status code suggestion (can be overridden) */ + abstract readonly statusCode: number; + + /** Whether this error type is generally safe to retry */ + abstract readonly isRetryable: boolean; + + /** Optional cause of the error */ + readonly cause?: Error; + + /** Additional context for the error */ + readonly context?: Record; + + constructor( + message: string, + options?: { cause?: Error; context?: Record }, + ) { + super(message); + this.name = this.constructor.name; + this.cause = options?.cause; + this.context = options?.context; + + // Maintains proper stack trace for where the error was thrown + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + } + + /** + * Convert error to JSON for logging/serialization. + * Sensitive values in context are automatically redacted. + */ + toJSON(): Record { + return { + name: this.name, + code: this.code, + message: this.message, + statusCode: this.statusCode, + isRetryable: this.isRetryable, + context: this.sanitizeContext(this.context), + cause: this.cause?.message, + stack: this.stack, + }; + } + + /** + * Create a human-readable string representation + */ + toString(): string { + let str = `${this.name} [${this.code}]: ${this.message}`; + if (this.cause) { + str += ` (caused by: ${this.cause.message})`; + } + return str; + } + + /** + * Sanitize context by redacting sensitive field values + */ + private sanitizeContext( + context?: Record, + ): Record | undefined { + if (!context) return undefined; + + const sanitized: Record = {}; + for (const [key, value] of Object.entries(context)) { + if (this.isSensitiveField(key)) { + sanitized[key] = "[REDACTED]"; + } else if (typeof value === "object" && value !== null) { + // Don't deep-sanitize nested objects, just indicate their type + sanitized[key] = Array.isArray(value) + ? `[Array(${value.length})]` + : "[Object]"; + } else { + sanitized[key] = value; + } + } + return sanitized; + } + + /** + * Check if a field name matches sensitive patterns + */ + private isSensitiveField(fieldName: string): boolean { + return SENSITIVE_FIELD_PATTERNS.some((pattern) => pattern.test(fieldName)); + } +} diff --git a/packages/appkit/src/observability/errors/configuration.ts b/packages/appkit/src/observability/errors/configuration.ts new file mode 100644 index 00000000..a7ce168e --- /dev/null +++ b/packages/appkit/src/observability/errors/configuration.ts @@ -0,0 +1,60 @@ +import { AppKitError } from "./base"; + +/** + * Error thrown when configuration is missing or invalid. + * Use for missing environment variables, invalid settings, or setup issues. + * + * @example + * ```typescript + * throw new ConfigurationError("DATABRICKS_HOST environment variable is required"); + * throw new ConfigurationError("Warehouse ID not found", { context: { env: "production" } }); + * ``` + */ +export class ConfigurationError extends AppKitError { + readonly code = "CONFIGURATION_ERROR"; + readonly statusCode = 500; + readonly isRetryable = false; + + /** + * Create a configuration error for missing environment variable + */ + static missingEnvVar(varName: string): ConfigurationError { + return new ConfigurationError( + `${varName} environment variable is required`, + { context: { envVar: varName } }, + ); + } + + /** + * Create a configuration error for missing resource + */ + static resourceNotFound(resource: string, hint?: string): ConfigurationError { + const message = hint + ? `${resource} not found. ${hint}` + : `${resource} not found`; + return new ConfigurationError(message, { context: { resource } }); + } + + /** + * Create a configuration error for invalid connection config + */ + static invalidConnection( + service: string, + details?: string, + ): ConfigurationError { + const message = details + ? `${service} connection not configured. ${details}` + : `${service} connection not configured`; + return new ConfigurationError(message, { context: { service } }); + } + + /** + * Create a configuration error for missing connection string parameter + */ + static missingConnectionParam(param: string): ConfigurationError { + return new ConfigurationError( + `Connection string must include ${param} parameter`, + { context: { parameter: param } }, + ); + } +} diff --git a/packages/appkit/src/observability/errors/connection.ts b/packages/appkit/src/observability/errors/connection.ts new file mode 100644 index 00000000..de5ab9fe --- /dev/null +++ b/packages/appkit/src/observability/errors/connection.ts @@ -0,0 +1,60 @@ +import { AppKitError } from "./base"; + +/** + * Error thrown when a connection or network operation fails. + * Use for database pool errors, API failures, timeouts, etc. + * + * @example + * ```typescript + * throw new ConnectionError("Query failed", { cause: pgError }); + * throw new ConnectionError("No response received from SQL Warehouse API"); + * ``` + */ +export class ConnectionError extends AppKitError { + readonly code = "CONNECTION_ERROR"; + readonly statusCode = 503; + readonly isRetryable = true; + + /** + * Create a connection error for query failure + */ + static queryFailed(cause?: Error): ConnectionError { + return new ConnectionError("Query failed", { cause }); + } + + /** + * Create a connection error for transaction failure + */ + static transactionFailed(cause?: Error): ConnectionError { + return new ConnectionError("Transaction failed", { cause }); + } + + /** + * Create a connection error for pool errors + */ + static poolError(operation: string, cause?: Error): ConnectionError { + return new ConnectionError(`Connection pool error: ${operation}`, { + cause, + }); + } + + /** + * Create a connection error for API failures + */ + static apiFailure(service: string, cause?: Error): ConnectionError { + return new ConnectionError(`No response received from ${service} API`, { + cause, + context: { service }, + }); + } + + /** + * Create a connection error for client unavailable + */ + static clientUnavailable(clientType: string, hint?: string): ConnectionError { + const message = hint + ? `${clientType} not available. ${hint}` + : `${clientType} not available`; + return new ConnectionError(message, { context: { clientType } }); + } +} diff --git a/packages/appkit/src/observability/errors/execution.ts b/packages/appkit/src/observability/errors/execution.ts new file mode 100644 index 00000000..42de7704 --- /dev/null +++ b/packages/appkit/src/observability/errors/execution.ts @@ -0,0 +1,61 @@ +import { AppKitError } from "./base"; + +/** + * Error thrown when an operation execution fails. + * Use for statement failures, canceled operations, or unexpected states. + * + * @example + * ```typescript + * throw new ExecutionError("Statement failed: syntax error"); + * throw new ExecutionError("Statement was canceled"); + * ``` + */ +export class ExecutionError extends AppKitError { + readonly code = "EXECUTION_ERROR"; + readonly statusCode = 500; + readonly isRetryable = false; + + /** + * Create an execution error for statement failure + */ + static statementFailed(errorMessage?: string): ExecutionError { + const message = errorMessage + ? `Statement failed: ${errorMessage}` + : "Statement failed: Unknown error"; + return new ExecutionError(message); + } + + /** + * Create an execution error for canceled operation + */ + static canceled(): ExecutionError { + return new ExecutionError("Statement was canceled"); + } + + /** + * Create an execution error for closed/expired results + */ + static resultsClosed(): ExecutionError { + return new ExecutionError( + "Statement execution completed but results are no longer available (CLOSED state)", + ); + } + + /** + * Create an execution error for unknown state + */ + static unknownState(state: string): ExecutionError { + return new ExecutionError(`Unknown statement state: ${state}`, { + context: { state }, + }); + } + + /** + * Create an execution error for missing data + */ + static missingData(dataType: string): ExecutionError { + return new ExecutionError(`No ${dataType} found in response`, { + context: { dataType }, + }); + } +} diff --git a/packages/appkit/src/observability/errors/index.ts b/packages/appkit/src/observability/errors/index.ts new file mode 100644 index 00000000..a367b843 --- /dev/null +++ b/packages/appkit/src/observability/errors/index.ts @@ -0,0 +1,30 @@ +/** + * AppKit Error Classes + * + * Standardized error classes for consistent error handling across the framework. + * + * @example + * ```typescript + * import { ValidationError, AuthenticationError } from "@databricks/appkit"; + * + * // Validation errors + * throw new ValidationError("Invalid parameter value"); + * throw ValidationError.missingField("warehouseId"); + * + * // Authentication errors + * throw AuthenticationError.missingToken(); + * + * // Configuration errors + * throw ConfigurationError.missingEnvVar("DATABRICKS_HOST"); + * ``` + */ + +export { AuthenticationError } from "./authentication"; +export { AppKitError } from "./base"; +export { ConfigurationError } from "./configuration"; +export { ConnectionError } from "./connection"; +export { ExecutionError } from "./execution"; +export { InitializationError } from "./initialization"; +export { ServerError } from "./server"; +export { TunnelError } from "./tunnel"; +export { ValidationError } from "./validation"; diff --git a/packages/appkit/src/observability/errors/initialization.ts b/packages/appkit/src/observability/errors/initialization.ts new file mode 100644 index 00000000..755ba0ae --- /dev/null +++ b/packages/appkit/src/observability/errors/initialization.ts @@ -0,0 +1,52 @@ +import { AppKitError } from "./base"; + +/** + * Error thrown when a service or component is not properly initialized. + * Use when accessing services before they are ready. + * + * @example + * ```typescript + * throw new InitializationError("CacheManager not initialized"); + * throw new InitializationError("ServiceContext not initialized. Call ServiceContext.initialize() first."); + * ``` + */ +export class InitializationError extends AppKitError { + readonly code = "INITIALIZATION_ERROR"; + readonly statusCode = 500; + readonly isRetryable = true; + + /** + * Create an initialization error for a service that is not ready + */ + static notInitialized( + serviceName: string, + hint?: string, + ): InitializationError { + const message = hint + ? `${serviceName} not initialized. ${hint}` + : `${serviceName} not initialized`; + return new InitializationError(message, { + context: { service: serviceName }, + }); + } + + /** + * Create an initialization error for setup failure + */ + static setupFailed(component: string, cause?: Error): InitializationError { + return new InitializationError(`Failed to initialize ${component}`, { + cause, + context: { component }, + }); + } + + /** + * Create an initialization error for migration failure + */ + static migrationFailed(cause?: Error): InitializationError { + return new InitializationError( + "Error in running migrations for persistent storage", + { cause }, + ); + } +} diff --git a/packages/appkit/src/observability/errors/server.ts b/packages/appkit/src/observability/errors/server.ts new file mode 100644 index 00000000..6af5b59f --- /dev/null +++ b/packages/appkit/src/observability/errors/server.ts @@ -0,0 +1,52 @@ +import { AppKitError } from "./base"; + +/** + * Error thrown when server lifecycle operations fail. + * Use for server start/stop issues, configuration conflicts, etc. + * + * @example + * ```typescript + * throw new ServerError("Cannot get server when autoStart is true"); + * throw new ServerError("Server not started"); + * ``` + */ +export class ServerError extends AppKitError { + readonly code = "SERVER_ERROR"; + readonly statusCode = 500; + readonly isRetryable = false; + + /** + * Create a server error for autoStart conflict + */ + static autoStartConflict(operation: string): ServerError { + return new ServerError(`Cannot ${operation} when autoStart is true`, { + context: { operation }, + }); + } + + /** + * Create a server error for server not started + */ + static notStarted(): ServerError { + return new ServerError( + "Server not started. Please start the server first by calling the start() method", + ); + } + + /** + * Create a server error for Vite dev server not initialized + */ + static viteNotInitialized(): ServerError { + return new ServerError("Vite dev server not initialized"); + } + + /** + * Create a server error for missing client directory + */ + static clientDirectoryNotFound(searchedPaths: string[]): ServerError { + return new ServerError( + `Could not find client directory. Searched for vite.config.ts/js + index.html in: ${searchedPaths.join(", ")}`, + { context: { searchedPaths } }, + ); + } +} diff --git a/packages/appkit/src/observability/errors/tests/errors.test.ts b/packages/appkit/src/observability/errors/tests/errors.test.ts new file mode 100644 index 00000000..c404a18f --- /dev/null +++ b/packages/appkit/src/observability/errors/tests/errors.test.ts @@ -0,0 +1,482 @@ +import { describe, expect, test } from "vitest"; +import { + AppKitError, + AuthenticationError, + ConfigurationError, + ConnectionError, + ExecutionError, + InitializationError, + ServerError, + TunnelError, + ValidationError, +} from "../index"; + +describe("AppKitError base class", () => { + test("should be an instance of Error", () => { + const error = new ValidationError("test"); + expect(error).toBeInstanceOf(Error); + expect(error).toBeInstanceOf(AppKitError); + }); + + test("should have correct name property", () => { + const error = new ValidationError("test"); + expect(error.name).toBe("ValidationError"); + }); + + test("should preserve cause", () => { + const cause = new Error("original error"); + const error = new ValidationError("wrapped error", { cause }); + expect(error.cause).toBe(cause); + expect(error.cause?.message).toBe("original error"); + }); + + test("should preserve context", () => { + const error = new ValidationError("test", { + context: { field: "email", value: "invalid" }, + }); + expect(error.context).toEqual({ field: "email", value: "invalid" }); + }); + + test("toJSON should return serializable object", () => { + const cause = new Error("cause"); + const error = new ValidationError("test message", { + cause, + context: { field: "value" }, + }); + const json = error.toJSON(); + expect(json.name).toBe("ValidationError"); + expect(json.code).toBe("VALIDATION_ERROR"); + expect(json.message).toBe("test message"); + expect(json.statusCode).toBe(400); + expect(json.isRetryable).toBe(false); + expect(json.context).toEqual({ field: "value" }); + expect(json.cause).toBe("cause"); + expect(json.stack).toBeDefined(); + }); + + test("toJSON should redact sensitive fields in context", () => { + const error = new ValidationError("test", { + context: { + userToken: "secret-token-123", + password: "my-password", + apiKey: "api-key-value", + normalField: "visible", + }, + }); + const json = error.toJSON(); + const context = json.context as Record; + expect(context.userToken).toBe("[REDACTED]"); + expect(context.password).toBe("[REDACTED]"); + expect(context.apiKey).toBe("[REDACTED]"); + expect(context.normalField).toBe("visible"); + }); + + test("toJSON should handle nested objects in context", () => { + const error = new ValidationError("test", { + context: { + config: { nested: "object" }, + items: [1, 2, 3], + }, + }); + const json = error.toJSON(); + const context = json.context as Record; + expect(context.config).toBe("[Object]"); + expect(context.items).toBe("[Array(3)]"); + }); + + test("toString should return formatted string", () => { + const error = new ValidationError("field is invalid"); + expect(error.toString()).toBe( + "ValidationError [VALIDATION_ERROR]: field is invalid", + ); + }); + + test("toString should include cause", () => { + const cause = new Error("root cause"); + const error = new ValidationError("field is invalid", { cause }); + expect(error.toString()).toContain("caused by: root cause"); + }); +}); + +describe("ValidationError", () => { + test("should have correct code and statusCode", () => { + const error = new ValidationError("test"); + expect(error.code).toBe("VALIDATION_ERROR"); + expect(error.statusCode).toBe(400); + expect(error.isRetryable).toBe(false); + }); + + test("missingField should create proper error", () => { + const error = ValidationError.missingField("username"); + expect(error.message).toBe("Missing required field: username"); + expect(error.context?.field).toBe("username"); + }); + + test("invalidValue should create proper error", () => { + const error = ValidationError.invalidValue("age", -5, "positive number"); + expect(error.message).toBe( + "Invalid value for age: expected positive number", + ); + expect(error.context?.field).toBe("age"); + expect(error.context?.valueType).toBe("number"); + expect(error.context?.expected).toBe("positive number"); + }); + + test("invalidValue should not store raw value for security", () => { + const error = ValidationError.invalidValue("token", "secret-token-123"); + expect(error.context?.value).toBeUndefined(); + expect(error.context?.valueType).toBe("string"); + }); + + test("invalidValue should handle null values", () => { + const error = ValidationError.invalidValue("field", null); + expect(error.context?.valueType).toBe("null"); + }); + + test("missingEnvVars should create proper error", () => { + const error = ValidationError.missingEnvVars(["API_KEY", "SECRET"]); + expect(error.message).toBe( + "Missing required environment variables: API_KEY, SECRET", + ); + expect(error.context?.missingVars).toEqual(["API_KEY", "SECRET"]); + }); +}); + +describe("AuthenticationError", () => { + test("should have correct code and statusCode", () => { + const error = new AuthenticationError("test"); + expect(error.code).toBe("AUTHENTICATION_ERROR"); + expect(error.statusCode).toBe(401); + expect(error.isRetryable).toBe(false); + }); + + test("missingToken should create proper error", () => { + const error = AuthenticationError.missingToken("bearer token"); + expect(error.message).toBe("Missing bearer token in request headers"); + expect(error.context?.tokenType).toBe("bearer token"); + }); + + test("missingUserId should create proper error", () => { + const error = AuthenticationError.missingUserId(); + expect(error.message).toContain("User ID not available"); + }); + + test("credentialsFailed should create proper error with cause", () => { + const cause = new Error("network timeout"); + const error = AuthenticationError.credentialsFailed("my-instance", cause); + expect(error.message).toContain("my-instance"); + expect(error.cause).toBe(cause); + expect(error.context?.instance).toBe("my-instance"); + }); + + test("userLookupFailed should create proper error", () => { + const error = AuthenticationError.userLookupFailed(); + expect(error.message).toContain("Failed to get current user"); + }); +}); + +describe("ConfigurationError", () => { + test("should have correct code and statusCode", () => { + const error = new ConfigurationError("test"); + expect(error.code).toBe("CONFIGURATION_ERROR"); + expect(error.statusCode).toBe(500); + expect(error.isRetryable).toBe(false); + }); + + test("missingEnvVar should create proper error", () => { + const error = ConfigurationError.missingEnvVar("DATABASE_URL"); + expect(error.message).toBe("DATABASE_URL environment variable is required"); + expect(error.context?.envVar).toBe("DATABASE_URL"); + }); + + test("resourceNotFound should create proper error with hint", () => { + const error = ConfigurationError.resourceNotFound( + "Warehouse ID", + "Set DATABRICKS_WAREHOUSE_ID", + ); + expect(error.message).toBe( + "Warehouse ID not found. Set DATABRICKS_WAREHOUSE_ID", + ); + expect(error.context?.resource).toBe("Warehouse ID"); + }); + + test("invalidConnection should create proper error", () => { + const error = ConfigurationError.invalidConnection( + "PostgreSQL", + "Check env vars", + ); + expect(error.message).toBe( + "PostgreSQL connection not configured. Check env vars", + ); + expect(error.context?.service).toBe("PostgreSQL"); + }); + + test("missingConnectionParam should create proper error", () => { + const error = ConfigurationError.missingConnectionParam("appName"); + expect(error.message).toBe( + "Connection string must include appName parameter", + ); + expect(error.context?.parameter).toBe("appName"); + }); +}); + +describe("ConnectionError", () => { + test("should have correct code and statusCode", () => { + const error = new ConnectionError("test"); + expect(error.code).toBe("CONNECTION_ERROR"); + expect(error.statusCode).toBe(503); + expect(error.isRetryable).toBe(true); + }); + + test("queryFailed should create proper error", () => { + const cause = new Error("timeout"); + const error = ConnectionError.queryFailed(cause); + expect(error.message).toBe("Query failed"); + expect(error.cause).toBe(cause); + }); + + test("transactionFailed should create proper error", () => { + const error = ConnectionError.transactionFailed(); + expect(error.message).toBe("Transaction failed"); + }); + + test("poolError should create proper error", () => { + const error = ConnectionError.poolError("closing connection"); + expect(error.message).toBe("Connection pool error: closing connection"); + }); + + test("apiFailure should create proper error", () => { + const error = ConnectionError.apiFailure("SQL Warehouse"); + expect(error.message).toBe("No response received from SQL Warehouse API"); + expect(error.context?.service).toBe("SQL Warehouse"); + }); + + test("clientUnavailable should create proper error with hint", () => { + const error = ConnectionError.clientUnavailable( + "Databricks client", + "Initialize ServiceContext first", + ); + expect(error.message).toBe( + "Databricks client not available. Initialize ServiceContext first", + ); + expect(error.context?.clientType).toBe("Databricks client"); + }); +}); + +describe("ExecutionError", () => { + test("should have correct code and statusCode", () => { + const error = new ExecutionError("test"); + expect(error.code).toBe("EXECUTION_ERROR"); + expect(error.statusCode).toBe(500); + expect(error.isRetryable).toBe(false); + }); + + test("statementFailed should create proper error", () => { + const error = ExecutionError.statementFailed("syntax error near SELECT"); + expect(error.message).toBe("Statement failed: syntax error near SELECT"); + }); + + test("statementFailed should handle undefined message", () => { + const error = ExecutionError.statementFailed(); + expect(error.message).toBe("Statement failed: Unknown error"); + }); + + test("canceled should create proper error", () => { + const error = ExecutionError.canceled(); + expect(error.message).toBe("Statement was canceled"); + }); + + test("resultsClosed should create proper error", () => { + const error = ExecutionError.resultsClosed(); + expect(error.message).toContain("no longer available"); + expect(error.message).toContain("CLOSED state"); + }); + + test("unknownState should create proper error", () => { + const error = ExecutionError.unknownState("WEIRD_STATE"); + expect(error.message).toBe("Unknown statement state: WEIRD_STATE"); + expect(error.context?.state).toBe("WEIRD_STATE"); + }); + + test("missingData should create proper error", () => { + const error = ExecutionError.missingData("chunks or schema"); + expect(error.message).toBe("No chunks or schema found in response"); + expect(error.context?.dataType).toBe("chunks or schema"); + }); +}); + +describe("InitializationError", () => { + test("should have correct code and statusCode", () => { + const error = new InitializationError("test"); + expect(error.code).toBe("INITIALIZATION_ERROR"); + expect(error.statusCode).toBe(500); + expect(error.isRetryable).toBe(true); + }); + + test("notInitialized should create proper error with hint", () => { + const error = InitializationError.notInitialized( + "CacheManager", + "Call AppKit.create() first", + ); + expect(error.message).toBe( + "CacheManager not initialized. Call AppKit.create() first", + ); + expect(error.context?.service).toBe("CacheManager"); + }); + + test("setupFailed should create proper error with cause", () => { + const cause = new Error("connection refused"); + const error = InitializationError.setupFailed("persistent storage", cause); + expect(error.message).toBe("Failed to initialize persistent storage"); + expect(error.cause).toBe(cause); + expect(error.context?.component).toBe("persistent storage"); + }); + + test("migrationFailed should create proper error", () => { + const cause = new Error("table already exists"); + const error = InitializationError.migrationFailed(cause); + expect(error.message).toContain("migrations"); + expect(error.cause).toBe(cause); + }); +}); + +describe("ServerError", () => { + test("should have correct code and statusCode", () => { + const error = new ServerError("test"); + expect(error.code).toBe("SERVER_ERROR"); + expect(error.statusCode).toBe(500); + expect(error.isRetryable).toBe(false); + }); + + test("autoStartConflict should create proper error", () => { + const error = ServerError.autoStartConflict("get server"); + expect(error.message).toBe("Cannot get server when autoStart is true"); + expect(error.context?.operation).toBe("get server"); + }); + + test("notStarted should create proper error", () => { + const error = ServerError.notStarted(); + expect(error.message).toContain("Server not started"); + expect(error.message).toContain("start()"); + }); + + test("viteNotInitialized should create proper error", () => { + const error = ServerError.viteNotInitialized(); + expect(error.message).toBe("Vite dev server not initialized"); + }); + + test("clientDirectoryNotFound should create proper error", () => { + const error = ServerError.clientDirectoryNotFound(["client", "src", "."]); + expect(error.message).toContain("Could not find client directory"); + expect(error.message).toContain("client, src, ."); + expect(error.context?.searchedPaths).toEqual(["client", "src", "."]); + }); +}); + +describe("TunnelError", () => { + test("should have correct code and statusCode", () => { + const error = new TunnelError("test"); + expect(error.code).toBe("TUNNEL_ERROR"); + expect(error.statusCode).toBe(502); + expect(error.isRetryable).toBe(true); + }); + + test("getterNotRegistered should create proper error", () => { + const error = TunnelError.getterNotRegistered(); + expect(error.message).toContain("Tunnel getter not registered"); + }); + + test("noConnection should create proper error", () => { + const error = TunnelError.noConnection(); + expect(error.message).toContain("No tunnel connection available"); + }); + + test("fetchFailed should create proper error with path", () => { + const cause = new Error("timeout"); + const error = TunnelError.fetchFailed("/assets/main.js", cause); + expect(error.message).toBe("Failed to fetch asset"); + expect(error.cause).toBe(cause); + expect(error.context?.path).toBe("/assets/main.js"); + }); + + test("parseError should create proper error", () => { + const cause = new SyntaxError("Unexpected token"); + const error = TunnelError.parseError("WebSocket", cause); + expect(error.message).toBe("Failed to parse WebSocket message"); + expect(error.cause).toBe(cause); + expect(error.context?.messageType).toBe("WebSocket"); + }); +}); + +describe("Error hierarchy", () => { + test("all errors should extend AppKitError", () => { + expect(new ValidationError("test")).toBeInstanceOf(AppKitError); + expect(new AuthenticationError("test")).toBeInstanceOf(AppKitError); + expect(new ConfigurationError("test")).toBeInstanceOf(AppKitError); + expect(new ConnectionError("test")).toBeInstanceOf(AppKitError); + expect(new ExecutionError("test")).toBeInstanceOf(AppKitError); + expect(new InitializationError("test")).toBeInstanceOf(AppKitError); + expect(new ServerError("test")).toBeInstanceOf(AppKitError); + expect(new TunnelError("test")).toBeInstanceOf(AppKitError); + }); + + test("errors can be caught by base class", () => { + try { + throw new ValidationError("test"); + } catch (e) { + expect(e).toBeInstanceOf(AppKitError); + } + }); + + test("errors can be distinguished by code", () => { + const errors = [ + new ValidationError("test"), + new AuthenticationError("test"), + new ConfigurationError("test"), + new ConnectionError("test"), + new ExecutionError("test"), + new InitializationError("test"), + new ServerError("test"), + new TunnelError("test"), + ]; + + const codes = errors.map((e) => e.code); + expect(codes).toEqual([ + "VALIDATION_ERROR", + "AUTHENTICATION_ERROR", + "CONFIGURATION_ERROR", + "CONNECTION_ERROR", + "EXECUTION_ERROR", + "INITIALIZATION_ERROR", + "SERVER_ERROR", + "TUNNEL_ERROR", + ]); + }); + + test("retryable errors should be marked correctly", () => { + // These errors are generally transient and can be retried + expect(new ConnectionError("test").isRetryable).toBe(true); + expect(new InitializationError("test").isRetryable).toBe(true); + expect(new TunnelError("test").isRetryable).toBe(true); + + // These errors require fixing, not retrying + expect(new ValidationError("test").isRetryable).toBe(false); + expect(new AuthenticationError("test").isRetryable).toBe(false); + expect(new ConfigurationError("test").isRetryable).toBe(false); + expect(new ExecutionError("test").isRetryable).toBe(false); + expect(new ServerError("test").isRetryable).toBe(false); + }); + + test("isRetryable can guide retry logic", () => { + const errors: AppKitError[] = [ + new ConnectionError("temporary network issue"), + new ValidationError("invalid input"), + new TunnelError("tunnel disconnected"), + ]; + + const retryable = errors.filter((e) => e.isRetryable); + expect(retryable).toHaveLength(2); + expect(retryable[0]).toBeInstanceOf(ConnectionError); + expect(retryable[1]).toBeInstanceOf(TunnelError); + }); +}); diff --git a/packages/appkit/src/observability/errors/tunnel.ts b/packages/appkit/src/observability/errors/tunnel.ts new file mode 100644 index 00000000..b3438503 --- /dev/null +++ b/packages/appkit/src/observability/errors/tunnel.ts @@ -0,0 +1,53 @@ +import { AppKitError } from "./base"; + +/** + * Error thrown when remote tunnel operations fail. + * Use for tunnel connection issues, message parsing failures, etc. + * + * @example + * ```typescript + * throw new TunnelError("No tunnel connection available"); + * throw new TunnelError("Failed to parse WebSocket message", { cause: parseError }); + * ``` + */ +export class TunnelError extends AppKitError { + readonly code = "TUNNEL_ERROR"; + readonly statusCode = 502; + readonly isRetryable = true; + + /** + * Create a tunnel error for missing tunnel getter + */ + static getterNotRegistered(): TunnelError { + return new TunnelError( + "Tunnel getter not registered for DevFileReader singleton", + ); + } + + /** + * Create a tunnel error for no available connection + */ + static noConnection(): TunnelError { + return new TunnelError("No tunnel connection available for file read"); + } + + /** + * Create a tunnel error for asset fetch failure + */ + static fetchFailed(path: string, cause?: Error): TunnelError { + return new TunnelError("Failed to fetch asset", { + cause, + context: { path }, + }); + } + + /** + * Create a tunnel error for message parsing failure + */ + static parseError(messageType: string, cause?: Error): TunnelError { + return new TunnelError(`Failed to parse ${messageType} message`, { + cause, + context: { messageType }, + }); + } +} diff --git a/packages/appkit/src/observability/errors/validation.ts b/packages/appkit/src/observability/errors/validation.ts new file mode 100644 index 00000000..ca4ddfd5 --- /dev/null +++ b/packages/appkit/src/observability/errors/validation.ts @@ -0,0 +1,58 @@ +import { AppKitError } from "./base"; + +/** + * Error thrown when input validation fails. + * Use for invalid parameters, missing required fields, or type mismatches. + * + * @example + * ```typescript + * throw new ValidationError("Statement is required", { context: { field: "statement" } }); + * throw new ValidationError("maxPoolSize must be at least 1", { context: { value: config.maxPoolSize } }); + * ``` + */ +export class ValidationError extends AppKitError { + readonly code = "VALIDATION_ERROR"; + readonly statusCode = 400; + readonly isRetryable = false; + + /** + * Create a validation error for a missing required field + */ + static missingField(fieldName: string): ValidationError { + return new ValidationError(`Missing required field: ${fieldName}`, { + context: { field: fieldName }, + }); + } + + /** + * Create a validation error for an invalid field value. + * Note: The actual value is not stored in context for security reasons. + * Only the value's type is recorded. + */ + static invalidValue( + fieldName: string, + value: unknown, + expected?: string, + ): ValidationError { + const msg = expected + ? `Invalid value for ${fieldName}: expected ${expected}` + : `Invalid value for ${fieldName}`; + return new ValidationError(msg, { + context: { + field: fieldName, + valueType: value === null ? "null" : typeof value, + expected, + }, + }); + } + + /** + * Create a validation error for missing environment variables + */ + static missingEnvVars(vars: string[]): ValidationError { + return new ValidationError( + `Missing required environment variables: ${vars.join(", ")}`, + { context: { missingVars: vars } }, + ); + } +} diff --git a/packages/appkit/src/observability/index.ts b/packages/appkit/src/observability/index.ts new file mode 100644 index 00000000..c939ec98 --- /dev/null +++ b/packages/appkit/src/observability/index.ts @@ -0,0 +1,16 @@ +export type { Counter, Histogram, Span } from "@opentelemetry/api"; +export { SpanKind, SpanStatusCode } from "@opentelemetry/api"; +export { + AppKitError, + AuthenticationError, + ConfigurationError, + ConnectionError, + ExecutionError, + InitializationError, + ServerError, + TunnelError, + ValidationError, +} from "./errors"; +export { createDebug } from "./debug"; +export { WideEvent, type WideEventData } from "./wide-event"; +export type { LogLevel } from "./types"; diff --git a/packages/appkit/src/plugin/dev-reader.ts b/packages/appkit/src/plugin/dev-reader.ts index f5464753..16c2e7dc 100644 --- a/packages/appkit/src/plugin/dev-reader.ts +++ b/packages/appkit/src/plugin/dev-reader.ts @@ -1,6 +1,7 @@ import { randomUUID } from "node:crypto"; import type { TunnelConnection } from "shared"; import { isRemoteTunnelAllowedByEnv } from "@/server/remote-tunnel/gate"; +import { TunnelError } from "../observability/errors"; type TunnelConnectionGetter = ( req: import("express").Request, @@ -56,14 +57,12 @@ export class DevFileReader { req: import("express").Request, ): Promise { if (!this.getTunnelForRequest) { - throw new Error( - "Tunnel getter not registered for DevFileReader singleton", - ); + throw TunnelError.getterNotRegistered(); } const tunnel = this.getTunnelForRequest(req); if (!tunnel) { - throw new Error("No tunnel connection available for file read"); + throw TunnelError.noConnection(); } const { ws, pendingFileReads } = tunnel; diff --git a/packages/appkit/src/plugin/plugin.ts b/packages/appkit/src/plugin/plugin.ts index a8902810..08444099 100644 --- a/packages/appkit/src/plugin/plugin.ts +++ b/packages/appkit/src/plugin/plugin.ts @@ -19,6 +19,7 @@ import { runInUserContext, type UserContext, } from "../context"; +import { AuthenticationError } from "../observability/errors"; import { StreamManager } from "../stream"; import { type ITelemetry, @@ -148,17 +149,11 @@ export abstract class Plugin< } if (!token) { - throw new Error( - "User token not available in request headers. " + - "Ensure the request has the x-forwarded-access-token header.", - ); + throw AuthenticationError.missingToken("user token"); } if (!userId && !isDev) { - throw new Error( - "User ID not available in request headers. " + - "Ensure the request has the x-forwarded-user header.", - ); + throw AuthenticationError.missingUserId(); } const effectiveUserId = userId || "dev-user"; diff --git a/packages/appkit/src/server/index.ts b/packages/appkit/src/server/index.ts index 24810dbd..39bd771d 100644 --- a/packages/appkit/src/server/index.ts +++ b/packages/appkit/src/server/index.ts @@ -4,6 +4,7 @@ import path from "node:path"; import dotenv from "dotenv"; import express from "express"; import type { PluginPhase } from "shared"; +import { ServerError } from "../observability/errors"; import { Plugin, toPlugin } from "../plugin"; import { instrumentations } from "../telemetry"; import { RemoteTunnelController } from "./remote-tunnel/remote-tunnel-controller"; @@ -132,13 +133,11 @@ export class ServerPlugin extends Plugin { */ getServer(): HTTPServer { if (this.shouldAutoStart()) { - throw new Error("Cannot get server when autoStart is true."); + throw ServerError.autoStartConflict("get server"); } if (!this.server) { - throw new Error( - "Server not started. Please start the server first by calling the start() method.", - ); + throw ServerError.notStarted(); } return this.server; @@ -153,7 +152,7 @@ export class ServerPlugin extends Plugin { */ extend(fn: (app: express.Application) => void) { if (this.shouldAutoStart()) { - throw new Error("Cannot extend server when autoStart is true."); + throw ServerError.autoStartConflict("extend server"); } this.serverExtensions.push(fn); diff --git a/packages/appkit/src/server/tests/server.test.ts b/packages/appkit/src/server/tests/server.test.ts index 6898f627..7b876d11 100644 --- a/packages/appkit/src/server/tests/server.test.ts +++ b/packages/appkit/src/server/tests/server.test.ts @@ -323,7 +323,7 @@ describe("ServerPlugin", () => { const extensionFn = vi.fn(); expect(() => plugin.extend(extensionFn)).toThrow( - "Cannot extend server when autoStart is true.", + "Cannot extend server when autoStart is true", ); }); @@ -343,7 +343,7 @@ describe("ServerPlugin", () => { const plugin = new ServerPlugin({ autoStart: true }); expect(() => plugin.getServer()).toThrow( - "Cannot get server when autoStart is true.", + "Cannot get server when autoStart is true", ); }); @@ -351,7 +351,7 @@ describe("ServerPlugin", () => { const plugin = new ServerPlugin({ autoStart: false }); expect(() => plugin.getServer()).toThrow( - "Server not started. Please start the server first by calling the start() method.", + "Server not started. Please start the server first by calling the start() method", ); }); diff --git a/packages/appkit/src/server/vite-dev-server.ts b/packages/appkit/src/server/vite-dev-server.ts index 30ad558f..b74150f5 100644 --- a/packages/appkit/src/server/vite-dev-server.ts +++ b/packages/appkit/src/server/vite-dev-server.ts @@ -3,6 +3,7 @@ import path from "node:path"; import type express from "express"; import type { ViteDevServer as ViteDevServerType } from "vite"; import { mergeConfigDedup } from "@/utils"; +import { ServerError } from "../observability/errors"; import { BaseServer } from "./base-server"; import type { PluginEndpoints } from "./utils"; import { appKitTypesPlugin } from "../type-generator/vite-plugin"; @@ -119,9 +120,7 @@ export class ViteDevServer extends BaseServer { } } - throw new Error( - `Could not find client directory. Searched for vite.config.ts/js + index.html in: ${candidates.join(", ")}`, - ); + throw ServerError.clientDirectoryNotFound(candidates); } // type assertion to ensure vite is not null @@ -129,7 +128,7 @@ export class ViteDevServer extends BaseServer { vite: ViteDevServerType | null, ): asserts vite is ViteDevServerType { if (!vite) { - throw new Error("Vite dev server not initialized"); + throw ServerError.viteNotInitialized(); } } } diff --git a/packages/appkit/src/stream/arrow-stream-processor.ts b/packages/appkit/src/stream/arrow-stream-processor.ts index e229598b..b97c462f 100644 --- a/packages/appkit/src/stream/arrow-stream-processor.ts +++ b/packages/appkit/src/stream/arrow-stream-processor.ts @@ -1,4 +1,5 @@ import type { sql } from "@databricks/sdk-experimental"; +import { ExecutionError, ValidationError } from "../observability/errors"; type ResultManifest = sql.ResultManifest; type ExternalLink = sql.ExternalLink; @@ -65,7 +66,7 @@ export class ArrowStreamProcessor { signal?: AbortSignal, ): Promise { if (chunks.length === 0) { - throw new Error("No Arrow chunks provided"); + throw ValidationError.missingField("chunks"); } const buffers = await this.downloadChunksRaw(chunks, signal); @@ -126,7 +127,7 @@ export class ArrowStreamProcessor { }); if (!response.ok) { - throw new Error( + throw ExecutionError.statementFailed( `Failed to download chunk ${chunk.chunk_index}: ${response.status} ${response.statusText}`, ); } @@ -143,7 +144,7 @@ export class ArrowStreamProcessor { } if (signal?.aborted) { - throw new Error("Arrow stream processing was aborted"); + throw ExecutionError.canceled(); } if (attempt < this.options.retries - 1) { @@ -154,7 +155,7 @@ export class ArrowStreamProcessor { } } - throw new Error( + throw ExecutionError.statementFailed( `Failed to download chunk ${chunk.chunk_index} after ${this.options.retries} attempts: ${lastError?.message}`, ); } @@ -165,7 +166,7 @@ export class ArrowStreamProcessor { */ private concatenateBuffers(buffers: Uint8Array[]): Uint8Array { if (buffers.length === 0) { - throw new Error("No buffers to concatenate"); + throw ValidationError.missingField("buffers"); } if (buffers.length === 1) { diff --git a/packages/appkit/src/stream/buffers.ts b/packages/appkit/src/stream/buffers.ts index a25d815d..bddeb639 100644 --- a/packages/appkit/src/stream/buffers.ts +++ b/packages/appkit/src/stream/buffers.ts @@ -1,3 +1,4 @@ +import { ValidationError } from "../observability/errors"; import type { BufferedEvent } from "./types"; // generic ring buffer implementation @@ -11,7 +12,11 @@ export class RingBuffer { constructor(capacity: number, keyExtractor: (item: T) => string) { if (capacity <= 0) { - throw new Error("Capacity must be greater than 0"); + throw ValidationError.invalidValue( + "capacity", + capacity, + "greater than 0", + ); } this.capacity = capacity; diff --git a/packages/appkit/src/stream/tests/arrow-stream-processor.test.ts b/packages/appkit/src/stream/tests/arrow-stream-processor.test.ts index 9b04e8ab..56d15ff6 100644 --- a/packages/appkit/src/stream/tests/arrow-stream-processor.test.ts +++ b/packages/appkit/src/stream/tests/arrow-stream-processor.test.ts @@ -89,7 +89,7 @@ describe("ArrowStreamProcessor", () => { test("should throw error when no chunks provided", async () => { await expect( processor.processChunks([], createMockSchema()), - ).rejects.toThrow("No Arrow chunks provided"); + ).rejects.toThrow("Missing required field: chunks"); }); test("should process single chunk successfully", async () => { @@ -356,7 +356,7 @@ describe("ArrowStreamProcessor", () => { createMockSchema(), abortController.signal, ), - ).rejects.toThrow(/abort/i); + ).rejects.toThrow("Statement was canceled"); }); test("should abort in-flight requests when signal fires", async () => { @@ -397,7 +397,7 @@ describe("ArrowStreamProcessor", () => { await vi.waitFor(() => expect(fetchStarted).toBe(true)); abortController.abort(); - await expect(promise).rejects.toThrow(/abort/i); + await expect(promise).rejects.toThrow("Statement was canceled"); }); }); diff --git a/packages/appkit/src/utils/env-validator.ts b/packages/appkit/src/utils/env-validator.ts index 68b5638b..3105b355 100644 --- a/packages/appkit/src/utils/env-validator.ts +++ b/packages/appkit/src/utils/env-validator.ts @@ -1,3 +1,5 @@ +import { ValidationError } from "../observability/errors"; + export function validateEnv(envVars: string[]) { const missingVars = []; @@ -8,8 +10,6 @@ export function validateEnv(envVars: string[]) { } if (missingVars.length > 0) { - throw new Error( - `Missing required environment variables: ${missingVars.join(", ")}`, - ); + throw ValidationError.missingEnvVars(missingVars); } } From 40c3452403c4d9ccdadc581699915b63cacdead3 Mon Sep 17 00:00:00 2001 From: Ditadi Date: Tue, 13 Jan 2026 00:05:17 +0000 Subject: [PATCH 2/4] feat(observability): add structured logging --- packages/appkit/package.json | 1 + packages/appkit/src/analytics/analytics.ts | 19 +- packages/appkit/src/app/index.ts | 24 +- packages/appkit/src/cache/index.ts | 19 +- .../appkit/src/cache/storage/persistent.ts | 18 +- .../appkit/src/cache/tests/persistent.test.ts | 4 +- .../appkit/src/connectors/lakebase/client.ts | 29 +- .../src/connectors/sql-warehouse/client.ts | 48 +- .../src/connectors/tests/lakebase.test.ts | 6 +- packages/appkit/src/observability/index.ts | 4 +- packages/appkit/src/observability/logger.ts | 50 +++ .../src/observability/tests/logger.test.ts | 133 ++++++ .../observability/tests/wide-event.test.ts | 420 ++++++++++++++++++ packages/appkit/src/observability/types.ts | 4 + .../appkit/src/observability/wide-event.ts | 285 ++++++++++++ packages/appkit/src/plugin/dev-reader.ts | 5 +- packages/appkit/src/plugin/plugin.ts | 12 +- packages/appkit/src/server/index.ts | 36 +- .../remote-tunnel/remote-tunnel-controller.ts | 7 +- .../remote-tunnel/remote-tunnel-manager.ts | 29 +- .../appkit/src/server/tests/server.test.ts | 39 +- packages/appkit/src/server/vite-dev-server.ts | 7 +- .../src/stream/arrow-stream-processor.ts | 5 +- .../tests/arrow-stream-processor.test.ts | 5 +- .../appkit/src/telemetry/telemetry-manager.ts | 13 +- packages/appkit/src/type-generator/index.ts | 7 +- .../src/type-generator/query-registry.ts | 7 +- .../appkit/src/type-generator/vite-plugin.ts | 15 +- pnpm-lock.yaml | 8 + 29 files changed, 1138 insertions(+), 121 deletions(-) create mode 100644 packages/appkit/src/observability/logger.ts create mode 100644 packages/appkit/src/observability/tests/logger.test.ts create mode 100644 packages/appkit/src/observability/tests/wide-event.test.ts create mode 100644 packages/appkit/src/observability/types.ts create mode 100644 packages/appkit/src/observability/wide-event.ts diff --git a/packages/appkit/package.json b/packages/appkit/package.json index 9cb701d6..dfb8cbcc 100644 --- a/packages/appkit/package.json +++ b/packages/appkit/package.json @@ -60,6 +60,7 @@ "@types/semver": "^7.7.1", "dotenv": "^16.6.1", "express": "^4.22.0", + "obug": "^2.1.1", "pg": "^8.16.3", "semver": "^7.7.3", "shared": "workspace:*", diff --git a/packages/appkit/src/analytics/analytics.ts b/packages/appkit/src/analytics/analytics.ts index 5d960687..e80cee05 100644 --- a/packages/appkit/src/analytics/analytics.ts +++ b/packages/appkit/src/analytics/analytics.ts @@ -1,4 +1,5 @@ import type { WorkspaceClient } from "@databricks/sdk-experimental"; +import type express from "express"; import type { IAppRouter, PluginExecuteConfig, @@ -11,7 +12,7 @@ import { getWarehouseId, getWorkspaceClient, } from "../context"; -import type express from "express"; +import { createLogger } from "../observability/logger"; import { Plugin, toPlugin } from "../plugin"; import { queryDefaults } from "./defaults"; import { QueryProcessor } from "./query"; @@ -21,6 +22,8 @@ import type { IAnalyticsQueryRequest, } from "./types"; +const logger = createLogger("analytics"); + export class AnalyticsPlugin extends Plugin { name = "analytics"; envVars = []; @@ -95,8 +98,10 @@ export class AnalyticsPlugin extends Plugin { const { jobId } = req.params; const workspaceClient = getWorkspaceClient(); - console.log( - `Processing Arrow job request: ${jobId} for plugin: ${this.name}`, + logger.debug( + "Processing Arrow job request: %s for plugin: %s", + jobId, + this.name, ); const result = await this.getArrowData(workspaceClient, jobId); @@ -105,12 +110,14 @@ export class AnalyticsPlugin extends Plugin { res.setHeader("Content-Length", result.data.length.toString()); res.setHeader("Cache-Control", "public, max-age=3600"); - console.log( - `Sending Arrow buffer: ${result.data.length} bytes for job ${jobId}`, + logger.debug( + "Sending Arrow buffer: %d bytes for job %s", + result.data.length, + jobId, ); res.send(Buffer.from(result.data)); } catch (error) { - console.error(`Arrow job error for ${this.name}:`, error); + logger.error("Arrow job error for %s: %O", this.name, error); res.status(404).json({ error: error instanceof Error ? error.message : "Arrow job not found", plugin: this.name, diff --git a/packages/appkit/src/app/index.ts b/packages/appkit/src/app/index.ts index 9bfadfa6..c08328fa 100644 --- a/packages/appkit/src/app/index.ts +++ b/packages/appkit/src/app/index.ts @@ -1,5 +1,8 @@ import fs from "node:fs/promises"; import path from "node:path"; +import { createLogger } from "../observability/logger"; + +const logger = createLogger("app"); interface RequestLike { query?: Record; @@ -27,8 +30,9 @@ export class AppManager { ): Promise { // Security: Sanitize query key to prevent path traversal if (!queryKey || !/^[a-zA-Z0-9_-]+$/.test(queryKey)) { - console.error( - `Invalid query key format: "${queryKey}". Only alphanumeric characters, underscores, and hyphens are allowed.`, + logger.error( + "Invalid query key format: %s. Only alphanumeric characters, underscores, and hyphens are allowed.", + queryKey, ); return null; } @@ -44,7 +48,7 @@ export class AppManager { const queriesDir = path.resolve(process.cwd(), "config/queries"); if (!resolvedPath.startsWith(queriesDir)) { - console.error(`Invalid query path: path traversal detected`); + logger.error("Invalid query path: path traversal detected"); return null; } @@ -57,8 +61,10 @@ export class AppManager { const relativePath = path.relative(process.cwd(), resolvedPath); return await devFileReader.readFile(relativePath, req); } catch (error) { - console.error( - `Failed to read query "${queryKey}" from dev tunnel: ${(error as Error).message}`, + logger.error( + "Failed to read query %s from dev tunnel: %s", + queryKey, + (error as Error).message, ); return null; } @@ -70,11 +76,13 @@ export class AppManager { return query; } catch (error) { if ((error as NodeJS.ErrnoException).code === "ENOENT") { - console.error(`Query "${queryKey}" not found at path: ${resolvedPath}`); + logger.debug("Query %s not found at path: %s", queryKey, resolvedPath); return null; } - console.error( - `Failed to read query "${queryKey}" from server filesystem: ${(error as Error).message}`, + logger.error( + "Failed to read query %s from server filesystem: %s", + queryKey, + (error as Error).message, ); return null; } diff --git a/packages/appkit/src/cache/index.ts b/packages/appkit/src/cache/index.ts index a561accc..b06dceb4 100644 --- a/packages/appkit/src/cache/index.ts +++ b/packages/appkit/src/cache/index.ts @@ -2,13 +2,20 @@ import { createHash } from "node:crypto"; import { WorkspaceClient } from "@databricks/sdk-experimental"; import type { CacheConfig, CacheStorage } from "shared"; import { LakebaseConnector } from "@/connectors"; -import { InitializationError } from "../observability/errors"; +import { + AppKitError, + ExecutionError, + InitializationError, +} from "../observability/errors"; +import { createLogger } from "../observability/logger"; import type { Counter, TelemetryProvider } from "../telemetry"; import { SpanStatusCode, TelemetryManager } from "../telemetry"; import { deepMerge } from "../utils"; import { cacheDefaults } from "./defaults"; import { InMemoryStorage, PersistentStorage } from "./storage"; +const logger = createLogger("cache"); + /** * Cache manager class to handle cache operations. * Can be used with in-memory storage or persistent storage (Lakebase). @@ -35,7 +42,6 @@ export class CacheManager { private cleanupInProgress: boolean; private lastCleanupAttempt: number; - // Telemetry private telemetry: TelemetryProvider; private telemetryMetrics: { cacheHitCount: Counter; @@ -244,7 +250,12 @@ export class CacheManager { .catch((error) => { span.recordException(error); span.setStatus({ code: SpanStatusCode.ERROR }); - throw error; + if (error instanceof AppKitError) { + throw error; + } + throw ExecutionError.statementFailed( + error instanceof Error ? error.message : String(error), + ); }) .finally(() => { this.inFlightRequests.delete(cacheKey); @@ -306,7 +317,7 @@ export class CacheManager { (this.storage as PersistentStorage) .cleanupExpired() .catch((error) => { - console.debug("Error cleaning up expired entries:", error); + logger.debug("Error cleaning up expired entries: %O", error); }) .finally(() => { this.cleanupInProgress = false; diff --git a/packages/appkit/src/cache/storage/persistent.ts b/packages/appkit/src/cache/storage/persistent.ts index 89e29837..8b2298e0 100644 --- a/packages/appkit/src/cache/storage/persistent.ts +++ b/packages/appkit/src/cache/storage/persistent.ts @@ -1,9 +1,15 @@ import { createHash } from "node:crypto"; import type { CacheConfig, CacheEntry, CacheStorage } from "shared"; import type { LakebaseConnector } from "../../connectors"; -import { ValidationError } from "../../observability/errors"; +import { + InitializationError, + ValidationError, +} from "../../observability/errors"; +import { createLogger } from "../../observability/logger"; import { lakebaseStorageDefaults } from "./defaults"; +const logger = createLogger("cache:persistent"); + /** * Persistent cache storage implementation. Uses a least recently used (LRU) eviction policy * to manage memory usage and ensure efficient cache operations. @@ -48,7 +54,7 @@ export class PersistentStorage implements CacheStorage { await this.runMigrations(); this.initialized = true; } catch (error) { - console.error("Error in persistent storage initialization:", error); + logger.error("Error in persistent storage initialization: %O", error); throw error; } } @@ -81,7 +87,7 @@ export class PersistentStorage implements CacheStorage { [keyHash], ) .catch(() => { - console.debug("Error updating last_accessed time for key:", key); + logger.debug("Error updating last_accessed time for key: %s", key); }); return { @@ -305,11 +311,11 @@ export class PersistentStorage implements CacheStorage { `CREATE INDEX IF NOT EXISTS idx_${this.tableName}_byte_size ON ${this.tableName} (byte_size); `, ); } catch (error) { - console.error( - "Error in running migrations for persistent storage:", + logger.error( + "Error in running migrations for persistent storage: %O", error, ); - throw error; + throw InitializationError.migrationFailed(error as Error); } } } diff --git a/packages/appkit/src/cache/tests/persistent.test.ts b/packages/appkit/src/cache/tests/persistent.test.ts index 2623e25b..6e658f4b 100644 --- a/packages/appkit/src/cache/tests/persistent.test.ts +++ b/packages/appkit/src/cache/tests/persistent.test.ts @@ -57,7 +57,9 @@ describe("PersistentStorage", () => { mockConnector.query.mockRejectedValue(new Error("migration failed")); - await expect(storage.initialize()).rejects.toThrow("migration failed"); + await expect(storage.initialize()).rejects.toThrow( + "Error in running migrations for persistent storage", + ); consoleErrorSpy.mockRestore(); }); diff --git a/packages/appkit/src/connectors/lakebase/client.ts b/packages/appkit/src/connectors/lakebase/client.ts index a41cf9a3..3f9e0854 100644 --- a/packages/appkit/src/connectors/lakebase/client.ts +++ b/packages/appkit/src/connectors/lakebase/client.ts @@ -10,11 +10,13 @@ import { type TelemetryProvider, } from "@/telemetry"; import { + AppKitError, AuthenticationError, ConfigurationError, ConnectionError, ValidationError, } from "../../observability/errors"; +import { createLogger } from "../../observability/logger"; import { deepMerge } from "../../utils"; import { lakebaseDefaults } from "./defaults"; import type { @@ -23,6 +25,8 @@ import type { LakebaseCredentials, } from "./types"; +const logger = createLogger("connectors:lakebase"); + /** * Enterprise-grade connector for Databricks Lakebase * @example Simplest - everything from env/context @@ -140,7 +144,10 @@ export class LakebaseConnector { span.recordException(error as Error); span.setStatus({ code: SpanStatusCode.ERROR }); - throw error; + if (error instanceof AppKitError) { + throw error; + } + throw ConnectionError.queryFailed(error as Error); } finally { const duration = Date.now() - startTime; this.telemetryMetrics.queryCount.add(1); @@ -223,7 +230,11 @@ export class LakebaseConnector { } span.recordException(error as Error); span.setStatus({ code: SpanStatusCode.ERROR }); - throw error; + + if (error instanceof AppKitError) { + throw error; + } + throw ConnectionError.transactionFailed(error as Error); } finally { client.release(); const duration = Date.now() - startTime; @@ -264,7 +275,7 @@ export class LakebaseConnector { async close(): Promise { if (this.pool) { await this.pool.end().catch((error: unknown) => { - console.error("Error closing connection pool:", error); + logger.error("Error closing connection pool: %O", error); }); this.pool = null; } @@ -335,9 +346,11 @@ export class LakebaseConnector { }); pool.on("error", (error: Error & { code?: string }) => { - console.error("Connection pool error:", error.message, { - code: error.code, - }); + logger.error( + "Connection pool error: %s (code: %s)", + error.message, + error.code, + ); }); return pool; @@ -380,8 +393,8 @@ export class LakebaseConnector { const oldPool = this.pool; this.pool = null; oldPool.end().catch((error: unknown) => { - console.error( - "Error closing old connection pool during rotation:", + logger.error( + "Error closing old connection pool during rotation: %O", error, ); }); diff --git a/packages/appkit/src/connectors/sql-warehouse/client.ts b/packages/appkit/src/connectors/sql-warehouse/client.ts index 76b4204c..8aeeebf2 100644 --- a/packages/appkit/src/connectors/sql-warehouse/client.ts +++ b/packages/appkit/src/connectors/sql-warehouse/client.ts @@ -3,8 +3,15 @@ import { type sql, type WorkspaceClient, } from "@databricks/sdk-experimental"; -import { ArrowStreamProcessor } from "../../stream/arrow-stream-processor"; import type { TelemetryOptions } from "shared"; +import { + AppKitError, + ConnectionError, + ExecutionError, + ValidationError, +} from "../../observability/errors"; +import { createLogger } from "../../observability/logger"; +import { ArrowStreamProcessor } from "../../stream/arrow-stream-processor"; import type { TelemetryProvider } from "../../telemetry"; import { type Counter, @@ -14,13 +21,10 @@ import { SpanStatusCode, TelemetryManager, } from "../../telemetry"; -import { - ConnectionError, - ExecutionError, - ValidationError, -} from "../../observability/errors"; import { executeStatementDefaults } from "./defaults"; +const logger = createLogger("connectors:sql-warehouse"); + export interface SQLWarehouseConfig { timeout?: number; telemetry?: TelemetryOptions; @@ -197,7 +201,13 @@ export class SQLWarehouseConnector { code: SpanStatusCode.ERROR, message: error instanceof Error ? error.message : String(error), }); - throw error; + + if (error instanceof AppKitError) { + throw error; + } + throw ExecutionError.statementFailed( + error instanceof Error ? error.message : String(error), + ); } finally { const duration = Date.now() - startTime; span.end(); @@ -246,8 +256,8 @@ export class SQLWarehouseConnector { // check if timeout exceeded const elapsedTime = Date.now() - startTime; if (elapsedTime > timeout) { - const error = new Error( - `Statement polling timeout exceeded after ${timeout}ms (elapsed: ${elapsedTime}ms)`, + const error = ExecutionError.statementFailed( + `Polling timeout exceeded after ${timeout}ms (elapsed: ${elapsedTime}ms)`, ); span.recordException(error); span.setStatus({ code: SpanStatusCode.ERROR }); @@ -255,7 +265,7 @@ export class SQLWarehouseConnector { } if (signal?.aborted) { - const error = new Error("Request aborted"); + const error = ExecutionError.canceled(); span.recordException(error); span.setStatus({ code: SpanStatusCode.ERROR }); throw error; @@ -319,7 +329,13 @@ export class SQLWarehouseConnector { code: SpanStatusCode.ERROR, message: error instanceof Error ? error.message : String(error), }); - throw error; + + if (error instanceof AppKitError) { + throw error; + } + throw ExecutionError.statementFailed( + error instanceof Error ? error.message : String(error), + ); } finally { span.end(); } @@ -452,8 +468,14 @@ export class SQLWarehouseConnector { status: "error", }); - console.error(`Failed Arrow job: ${jobId}`, error); - throw error; + logger.error("Failed Arrow job: %s %O", jobId, error); + + if (error instanceof AppKitError) { + throw error; + } + throw ExecutionError.statementFailed( + error instanceof Error ? error.message : String(error), + ); } }, ); diff --git a/packages/appkit/src/connectors/tests/lakebase.test.ts b/packages/appkit/src/connectors/tests/lakebase.test.ts index 26d85911..2b8b30b1 100644 --- a/packages/appkit/src/connectors/tests/lakebase.test.ts +++ b/packages/appkit/src/connectors/tests/lakebase.test.ts @@ -208,9 +208,7 @@ describe("LakebaseConnector", () => { mockQuery.mockRejectedValue(transientError); - await expect(connector.query("SELECT 1")).rejects.toThrow( - "connection reset", - ); + await expect(connector.query("SELECT 1")).rejects.toThrow("Query failed"); expect(mockQuery).toHaveBeenCalledTimes(2); }); @@ -220,7 +218,7 @@ describe("LakebaseConnector", () => { mockQuery.mockRejectedValue(syntaxError); - await expect(connector.query("SELEC 1")).rejects.toThrow("syntax error"); + await expect(connector.query("SELEC 1")).rejects.toThrow("Query failed"); expect(mockQuery).toHaveBeenCalledTimes(1); }); }); diff --git a/packages/appkit/src/observability/index.ts b/packages/appkit/src/observability/index.ts index c939ec98..3a2f9960 100644 --- a/packages/appkit/src/observability/index.ts +++ b/packages/appkit/src/observability/index.ts @@ -11,6 +11,6 @@ export { TunnelError, ValidationError, } from "./errors"; -export { createDebug } from "./debug"; -export { WideEvent, type WideEventData } from "./wide-event"; +export { createLogger, type Logger } from "./logger"; export type { LogLevel } from "./types"; +export { WideEvent, type WideEventData } from "./wide-event"; diff --git a/packages/appkit/src/observability/logger.ts b/packages/appkit/src/observability/logger.ts new file mode 100644 index 00000000..1ab68427 --- /dev/null +++ b/packages/appkit/src/observability/logger.ts @@ -0,0 +1,50 @@ +import { format } from "node:util"; +import { createDebug as createObug } from "obug"; + +/** + * Logger interface for AppKit components + */ +export interface Logger { + /** Debug output (disabled by default, enable via DEBUG env var) */ + debug: (message: string, ...args: unknown[]) => void; + /** Info output (always visible, for operational messages) */ + info: (message: string, ...args: unknown[]) => void; + /** Warning output (always visible, for degraded states) */ + warn: (message: string, ...args: unknown[]) => void; + /** Error output (always visible, for failures) */ + error: (message: string, ...args: unknown[]) => void; +} + +/** + * Create a logger instance for a specific scope + * @param scope - The scope identifier (e.g., "connectors:lakebase") + * @returns Logger instance with debug, info, warn, and error methods + * + * @example + * ```typescript + * const logger = createLogger("connectors:lakebase"); + * logger.debug("Connection established with pool size: %d", poolSize); + * logger.info("Server started on port %d", port); + * logger.warn("Connection pool running low: %d remaining", available); + * logger.error("Failed to connect: %O", error); + * ``` + */ +export function createLogger(scope: string): Logger { + const debug = createObug(`appkit:${scope}`, { useColors: true }); + const prefix = `[appkit:${scope}]`; + + return { + debug: (message: string, ...args: unknown[]) => { + debug(message, ...args); + }, + info: (message: string, ...args: unknown[]) => { + console.log(prefix, format(message, ...args)); + }, + warn: (message: string, ...args: unknown[]) => { + console.warn(prefix, format(message, ...args)); + }, + error: (message: string, ...args: unknown[]) => { + console.error(prefix, format(message, ...args)); + }, + }; +} diff --git a/packages/appkit/src/observability/tests/logger.test.ts b/packages/appkit/src/observability/tests/logger.test.ts new file mode 100644 index 00000000..d1706328 --- /dev/null +++ b/packages/appkit/src/observability/tests/logger.test.ts @@ -0,0 +1,133 @@ +import { afterEach, beforeEach, describe, expect, test, vi } from "vitest"; +import { createLogger } from "../logger"; + +describe("createLogger", () => { + let consoleLogSpy: ReturnType; + let consoleWarnSpy: ReturnType; + let consoleErrorSpy: ReturnType; + + beforeEach(() => { + consoleLogSpy = vi.spyOn(console, "log").mockImplementation(() => {}); + consoleWarnSpy = vi.spyOn(console, "warn").mockImplementation(() => {}); + consoleErrorSpy = vi.spyOn(console, "error").mockImplementation(() => {}); + }); + + afterEach(() => { + consoleLogSpy.mockRestore(); + consoleWarnSpy.mockRestore(); + consoleErrorSpy.mockRestore(); + }); + + test("returns logger with all methods", () => { + const logger = createLogger("test"); + + expect(logger).toHaveProperty("debug"); + expect(logger).toHaveProperty("info"); + expect(logger).toHaveProperty("warn"); + expect(logger).toHaveProperty("error"); + expect(typeof logger.debug).toBe("function"); + expect(typeof logger.info).toBe("function"); + expect(typeof logger.warn).toBe("function"); + expect(typeof logger.error).toBe("function"); + }); + + test("info uses console.log with correct prefix", () => { + const logger = createLogger("test-scope"); + + logger.info("hello world"); + + expect(consoleLogSpy).toHaveBeenCalledWith( + "[appkit:test-scope]", + "hello world", + ); + }); + + test("warn uses console.warn with correct prefix", () => { + const logger = createLogger("my-plugin"); + + logger.warn("something is wrong"); + + expect(consoleWarnSpy).toHaveBeenCalledWith( + "[appkit:my-plugin]", + "something is wrong", + ); + }); + + test("error uses console.error with correct prefix", () => { + const logger = createLogger("connector"); + + logger.error("operation failed"); + + expect(consoleErrorSpy).toHaveBeenCalledWith( + "[appkit:connector]", + "operation failed", + ); + }); + + test("formats string placeholders correctly", () => { + const logger = createLogger("test"); + + logger.info("User %s logged in", "alice"); + + expect(consoleLogSpy).toHaveBeenCalledWith( + "[appkit:test]", + "User alice logged in", + ); + }); + + test("formats number placeholders correctly", () => { + const logger = createLogger("server"); + + logger.info("Server running on port %d", 8080); + + expect(consoleLogSpy).toHaveBeenCalledWith( + "[appkit:server]", + "Server running on port 8080", + ); + }); + + test("formats multiple placeholders", () => { + const logger = createLogger("cache"); + + logger.info("Cache hit for key %s, size: %d bytes", "user:123", 1024); + + expect(consoleLogSpy).toHaveBeenCalledWith( + "[appkit:cache]", + "Cache hit for key user:123, size: 1024 bytes", + ); + }); + + test("formats object placeholders with %O", () => { + const logger = createLogger("test"); + const obj = { id: 1, name: "test" }; + + logger.error("Failed with context: %O", obj); + + expect(consoleErrorSpy).toHaveBeenCalledWith( + "[appkit:test]", + expect.stringContaining("id"), + ); + }); + + test("handles nested scope names", () => { + const logger = createLogger("connectors:lakebase"); + + logger.info("connected"); + + expect(consoleLogSpy).toHaveBeenCalledWith( + "[appkit:connectors:lakebase]", + "connected", + ); + }); + + test("handles messages without placeholders and extra args", () => { + const logger = createLogger("test"); + + logger.info("simple message", "extra", "args"); + + expect(consoleLogSpy).toHaveBeenCalledWith( + "[appkit:test]", + "simple message extra args", + ); + }); +}); diff --git a/packages/appkit/src/observability/tests/wide-event.test.ts b/packages/appkit/src/observability/tests/wide-event.test.ts new file mode 100644 index 00000000..433a9367 --- /dev/null +++ b/packages/appkit/src/observability/tests/wide-event.test.ts @@ -0,0 +1,420 @@ +import { afterEach, beforeEach, describe, expect, test, vi } from "vitest"; +import { WideEvent } from "../wide-event"; + +describe("WideEvent", () => { + beforeEach(() => { + vi.useFakeTimers(); + vi.setSystemTime(new Date("2024-01-15T10:00:00.000Z")); + }); + + afterEach(() => { + vi.useRealTimers(); + }); + + describe("constructor", () => { + test("initializes with request_id", () => { + const event = new WideEvent("req-123"); + const data = event.toJSON(); + + expect(data.request_id).toBe("req-123"); + }); + + test("initializes with timestamp", () => { + const event = new WideEvent("req-123"); + const data = event.toJSON(); + + expect(data.timestamp).toBe("2024-01-15T10:00:00.000Z"); + }); + + test("initializes service metadata", () => { + const event = new WideEvent("req-123"); + const data = event.toJSON(); + + expect(data.service).toBeDefined(); + expect(data.service?.name).toBe("appkit"); + }); + + test("initializes empty logs array", () => { + const event = new WideEvent("req-123"); + const data = event.toJSON(); + + expect(data.logs).toEqual([]); + }); + + test("initializes empty context object", () => { + const event = new WideEvent("req-123"); + const data = event.toJSON(); + + expect(data.context).toEqual({}); + }); + }); + + describe("set", () => { + test("sets simple values", () => { + const event = new WideEvent("req-123"); + + event.set("method", "POST"); + event.set("path", "/api/query"); + + const data = event.toJSON(); + expect(data.method).toBe("POST"); + expect(data.path).toBe("/api/query"); + }); + + test("merges object values", () => { + const event = new WideEvent("req-123"); + + event.set("service", { region: "us-west-2" }); + + const data = event.toJSON(); + expect(data.service?.name).toBe("appkit"); + expect(data.service?.region).toBe("us-west-2"); + }); + + test("returns this for chaining", () => { + const event = new WideEvent("req-123"); + + const result = event.set("method", "GET"); + + expect(result).toBe(event); + }); + }); + + describe("setComponent", () => { + test("sets component name", () => { + const event = new WideEvent("req-123"); + + event.setComponent("analytics"); + + const data = event.toJSON(); + expect(data.component?.name).toBe("analytics"); + }); + + test("sets component name and operation", () => { + const event = new WideEvent("req-123"); + + event.setComponent("sql-warehouse", "executeQuery"); + + const data = event.toJSON(); + expect(data.component?.name).toBe("sql-warehouse"); + expect(data.component?.operation).toBe("executeQuery"); + }); + + test("returns this for chaining", () => { + const event = new WideEvent("req-123"); + + const result = event.setComponent("test"); + + expect(result).toBe(event); + }); + }); + + describe("setUser", () => { + test("sets user context", () => { + const event = new WideEvent("req-123"); + + event.setUser({ id: "user-456" }); + + const data = event.toJSON(); + expect(data.user?.id).toBe("user-456"); + }); + + test("merges with existing user context", () => { + const event = new WideEvent("req-123"); + + event.setUser({ id: "user-456" }); + event.setUser({ role: "admin" }); + + const data = event.toJSON(); + expect(data.user?.id).toBe("user-456"); + expect(data.user?.role).toBe("admin"); + }); + }); + + describe("setExecution", () => { + test("sets execution context", () => { + const event = new WideEvent("req-123"); + + event.setExecution({ cache_hit: true, cache_key: "query:abc" }); + + const data = event.toJSON(); + expect(data.execution?.cache_hit).toBe(true); + expect(data.execution?.cache_key).toBe("query:abc"); + }); + + test("merges with existing execution context", () => { + const event = new WideEvent("req-123"); + + event.setExecution({ cache_hit: true }); + event.setExecution({ retry_attempts: 2 }); + + const data = event.toJSON(); + expect(data.execution?.cache_hit).toBe(true); + expect(data.execution?.retry_attempts).toBe(2); + }); + }); + + describe("setStream", () => { + test("sets stream context", () => { + const event = new WideEvent("req-123"); + + event.setStream({ stream_id: "stream-789", events_sent: 10 }); + + const data = event.toJSON(); + expect(data.stream?.stream_id).toBe("stream-789"); + expect(data.stream?.events_sent).toBe(10); + }); + + test("merges with existing stream context", () => { + const event = new WideEvent("req-123"); + + event.setStream({ stream_id: "stream-789" }); + event.setStream({ buffer_size: 100 }); + + const data = event.toJSON(); + expect(data.stream?.stream_id).toBe("stream-789"); + expect(data.stream?.buffer_size).toBe(100); + }); + }); + + describe("setError", () => { + test("extracts standard error fields", () => { + const event = new WideEvent("req-123"); + const error = new Error("Something went wrong"); + + event.setError(error); + + const data = event.toJSON(); + expect(data.error?.type).toBe("Error"); + expect(data.error?.code).toBe("UNKNOWN_ERROR"); + expect(data.error?.message).toBe("Something went wrong"); + expect(data.error?.retriable).toBe(false); + }); + + test("extracts AppKitError fields", () => { + const event = new WideEvent("req-123"); + const error = new Error("Validation failed") as any; + error.name = "ValidationError"; + error.code = "INVALID_INPUT"; + error.statusCode = 400; + error.retriable = false; + + event.setError(error); + + const data = event.toJSON(); + expect(data.error?.type).toBe("ValidationError"); + expect(data.error?.code).toBe("INVALID_INPUT"); + expect(data.error?.retriable).toBe(false); + }); + + test("extracts error cause", () => { + const event = new WideEvent("req-123"); + const cause = new Error("Original error"); + const error = new Error("Wrapped error", { cause }); + + event.setError(error); + + const data = event.toJSON(); + expect(data.error?.cause).toBe("Error: Original error"); + }); + + test("returns this for chaining", () => { + const event = new WideEvent("req-123"); + + const result = event.setError(new Error("test")); + + expect(result).toBe(event); + }); + }); + + describe("setContext", () => { + test("adds scoped context", () => { + const event = new WideEvent("req-123"); + + event.setContext("analytics", { query_key: "apps_list" }); + + const data = event.toJSON(); + expect(data.context?.analytics?.query_key).toBe("apps_list"); + }); + + test("merges with existing scoped context", () => { + const event = new WideEvent("req-123"); + + event.setContext("analytics", { query_key: "apps_list" }); + event.setContext("analytics", { rows_returned: 100 }); + + const data = event.toJSON(); + expect(data.context?.analytics?.query_key).toBe("apps_list"); + expect(data.context?.analytics?.rows_returned).toBe(100); + }); + + test("supports multiple scopes", () => { + const event = new WideEvent("req-123"); + + event.setContext("analytics", { query_key: "apps_list" }); + event.setContext("sql-warehouse", { warehouse_id: "wh-123" }); + + const data = event.toJSON(); + expect(data.context?.analytics?.query_key).toBe("apps_list"); + expect(data.context?.["sql-warehouse"]?.warehouse_id).toBe("wh-123"); + }); + }); + + describe("addLog", () => { + test("adds log entry", () => { + const event = new WideEvent("req-123"); + + event.addLog("info", "Query started"); + + const data = event.toJSON(); + expect(data.logs).toHaveLength(1); + expect(data.logs?.[0].level).toBe("info"); + expect(data.logs?.[0].message).toBe("Query started"); + expect(data.logs?.[0].timestamp).toBe("2024-01-15T10:00:00.000Z"); + }); + + test("adds log with context", () => { + const event = new WideEvent("req-123"); + + event.addLog("debug", "Cache lookup", { key: "user:123" }); + + const data = event.toJSON(); + expect(data.logs?.[0].context).toEqual({ key: "user:123" }); + }); + + test("supports all log levels", () => { + const event = new WideEvent("req-123"); + + event.addLog("debug", "debug message"); + event.addLog("info", "info message"); + event.addLog("warn", "warn message"); + event.addLog("error", "error message"); + + const data = event.toJSON(); + expect(data.logs).toHaveLength(4); + expect(data.logs?.map((l) => l.level)).toEqual([ + "debug", + "info", + "warn", + "error", + ]); + }); + + test("truncates logs at 50 entries", () => { + const event = new WideEvent("req-123"); + + for (let i = 0; i < 60; i++) { + event.addLog("info", `message ${i}`); + } + + const data = event.toJSON(); + expect(data.logs).toHaveLength(50); + expect(data.logs?.[0].message).toBe("message 10"); + expect(data.logs?.[49].message).toBe("message 59"); + }); + + test("returns this for chaining", () => { + const event = new WideEvent("req-123"); + + const result = event.addLog("info", "test"); + + expect(result).toBe(event); + }); + }); + + describe("finalize", () => { + test("sets status_code", () => { + const event = new WideEvent("req-123"); + + event.finalize(200); + + const data = event.toJSON(); + expect(data.status_code).toBe(200); + }); + + test("sets duration_ms", () => { + const event = new WideEvent("req-123"); + + vi.advanceTimersByTime(150); + event.finalize(200); + + const data = event.toJSON(); + expect(data.duration_ms).toBe(150); + }); + + test("returns event data", () => { + const event = new WideEvent("req-123"); + + const result = event.finalize(201); + + expect(result.request_id).toBe("req-123"); + expect(result.status_code).toBe(201); + }); + }); + + describe("getDurationMs", () => { + test("calculates duration from start time", () => { + const event = new WideEvent("req-123"); + + vi.advanceTimersByTime(250); + + expect(event.getDurationMs()).toBe(250); + }); + + test("returns set duration_ms if already set", () => { + const event = new WideEvent("req-123"); + + vi.advanceTimersByTime(50); + event.finalize(200); + + vi.advanceTimersByTime(100); + + // Should return the duration at finalize time (50ms), not current time (150ms) + expect(event.getDurationMs()).toBe(50); + }); + }); + + describe("toJSON", () => { + test("returns complete event data", () => { + const event = new WideEvent("req-123"); + + event + .set("method", "POST") + .set("path", "/api/query") + .setComponent("analytics", "query") + .setUser({ id: "user-456" }) + .setExecution({ cache_hit: false }) + .addLog("info", "Started"); + + const data = event.toJSON(); + + expect(data.request_id).toBe("req-123"); + expect(data.method).toBe("POST"); + expect(data.path).toBe("/api/query"); + expect(data.component?.name).toBe("analytics"); + expect(data.user?.id).toBe("user-456"); + expect(data.execution?.cache_hit).toBe(false); + expect(data.logs).toHaveLength(1); + }); + }); + + describe("chaining", () => { + test("supports fluent API", () => { + const event = new WideEvent("req-123"); + + const data = event + .set("method", "GET") + .set("path", "/api/health") + .setComponent("server", "healthCheck") + .setUser({ id: "anonymous" }) + .setExecution({ timeout_ms: 5000 }) + .addLog("info", "Health check started") + .finalize(200); + + expect(data.method).toBe("GET"); + expect(data.path).toBe("/api/health"); + expect(data.component?.name).toBe("server"); + expect(data.status_code).toBe(200); + }); + }); +}); diff --git a/packages/appkit/src/observability/types.ts b/packages/appkit/src/observability/types.ts new file mode 100644 index 00000000..21440337 --- /dev/null +++ b/packages/appkit/src/observability/types.ts @@ -0,0 +1,4 @@ +/** + * Log level for WideEvent logs + */ +export type LogLevel = "debug" | "info" | "warn" | "error"; diff --git a/packages/appkit/src/observability/wide-event.ts b/packages/appkit/src/observability/wide-event.ts new file mode 100644 index 00000000..626e0161 --- /dev/null +++ b/packages/appkit/src/observability/wide-event.ts @@ -0,0 +1,285 @@ +import { createLogger } from "./logger"; +import type { LogLevel } from "./types"; + +const logger = createLogger("wide-event"); + +export interface QueryData { + key?: string; + warehouse_id?: string; + rows_returned?: number; + query_duration_ms?: number; + bytes_scanned?: number; + [key: string]: unknown; +} + +/** + * WideEvent data interface + * - Represents a single event for a request + * - Fields are camelCase to match OpenTelemetry + */ +export interface WideEventData { + // request metadata + timestamp: string; + request_id: string; + method?: string; + path?: string; + status_code?: number; + duration_ms?: number; + + // service metadata + service?: { + name: string; + version: string; + region?: string; + deployment_id?: string; + node_env?: string; + }; + + // component metadata (plugin, connector, or service) + component?: { + name: string; + operation?: string; + }; + + // user metadata + user?: { + id?: string; + [key: string]: unknown; + }; + + // execution metadata + execution?: { + cache_hit?: boolean; + cache_key?: string; + retry_attempts?: number; + timeout_ms?: number; + [key: string]: unknown; + }; + + // stream metadata + stream?: { + stream_id?: string; + events_sent?: number; + buffer_size?: number; + reconnections?: number; + [key: string]: unknown; + }; + + // error metadata + error?: { + type: string; + code: string; + message: string; + retriable?: boolean; + cause?: string; + }; + + // log metadata + logs?: Array<{ + level: LogLevel; + message: string; + timestamp: string; + context?: Record; + }>; + + /** + * Scoped context data + * Each scope (plugin, connector, service) can add its own namespaced data here. + * Example: { analytics: { query_key: "..."}, "sql-warehouse": { warehouse_id: "..."} } + */ + context?: Record>; + + [key: string]: unknown; +} + +/** + * WideEvent + * - Represents a single event for a request + * - Fields are camelCase to match OpenTelemetry + */ +export class WideEvent { + private data: WideEventData; + private startTime: number; + + constructor(requestId: string) { + this.startTime = Date.now(); + this.data = { + timestamp: new Date().toISOString(), + request_id: requestId, + service: { + name: "appkit", + version: process.env.npm_package_version || "unknown", + region: process.env.REGION, + deployment_id: process.env.DEPLOYMENT_ID, + node_env: process.env.NODE_ENV, + }, + logs: [], + context: {}, + }; + } + + /** + * Set a value in the event + * @param key - The key to set + * @param value - The value to set + * @returns The event + */ + set(key: K, value: WideEventData[K]): this { + if (typeof value === "object" && value !== null && !Array.isArray(value)) { + // merge objects + this.data[key] = { + ...(this.data[key] as object), + ...value, + } as WideEventData[K]; + } else { + this.data[key] = value; + } + return this; + } + + /** + * Set the component name and operation. + * Component can be a plugin, connector, or service. + * @param name - The name of the component (e.g., "analytics", "sql-warehouse", "cache-manager") + * @param operation - The operation being performed (e.g., "query", "getOrExecute") + * @returns The event + */ + setComponent(name: string, operation?: string): this { + this.data.component = { name, operation }; + return this; + } + + /** + * Set the user context + * @param user - The user context + * @returns The event + */ + setUser(user: WideEventData["user"]): this { + this.data.user = { ...this.data.user, ...user }; + return this; + } + + /** + * Set the execution context + * @param execution - The execution context + * @returns The event + */ + setExecution(execution: WideEventData["execution"]): this { + this.data.execution = { ...this.data.execution, ...execution }; + return this; + } + + /** + * Set the stream context + * @param stream - The stream context + * @returns The event + */ + setStream(stream: WideEventData["stream"]): this { + this.data.stream = { ...this.data.stream, ...stream }; + return this; + } + + /** + * Set the error context + * @param error - The error context + * @returns The event + */ + setError(error: Error): this { + const isAppKitError = "code" in error && "statusCode" in error; + const errorCause = (error as any).cause; + + this.data.error = { + type: error.name, + code: isAppKitError ? (error as any).code : "UNKNOWN_ERROR", + message: error.message, + retriable: isAppKitError ? (error as any).retriable : false, + cause: errorCause ? String(errorCause) : undefined, + }; + + return this; + } + + /** + * Add scoped context to the event + * @param scope - The scope name (plugin, connector, or service name) + * @param ctx - Context data to merge + * @example + * event.setContext("analytics", { query_key: "apps_list", rows_returned: 100 }); + * event.setContext("sql-warehouse", { warehouse_id: "1234567890" }); + */ + setContext(scope: string, ctx: Record): this { + if (!this.data.context) { + this.data.context = {}; + } + + this.data.context[scope] = { + ...this.data.context[scope], + ...ctx, + }; + + return this; + } + + /** + * Add a log to the event + * @param level - The level of the log + * @param message - The message of the log + * @param context - The context of the log + * @returns The event + */ + addLog( + level: LogLevel, + message: string, + context?: Record, + ): this { + if (!this.data.logs) { + this.data.logs = []; + } + + this.data.logs.push({ + level, + message, + timestamp: new Date().toISOString(), + context, + }); + + // Keep only last 50 logs to prevent unbounded growth + if (this.data.logs.length > 50) { + const originalLength = this.data.logs.length; + this.data.logs = this.data.logs.slice(-50); + logger.debug( + "Log limit exceeded, truncated from %d to 50 logs", + originalLength, + ); + } + + return this; + } + + /** + * Finalize the event + * @param statusCode - The status code of the response + * @returns The event data + */ + finalize(statusCode: number): WideEventData { + this.data.status_code = statusCode; + this.data.duration_ms = this.getDurationMs(); + return this.data; + } + + /** + * Get the duration of the event in milliseconds + * @returns The duration of the event in milliseconds + */ + getDurationMs(): number { + return this.data.duration_ms || Date.now() - this.startTime; + } + + /** + * Convert the event to a JSON object + * @returns The event data as a JSON object + */ + toJSON(): WideEventData { + return this.data; + } +} diff --git a/packages/appkit/src/plugin/dev-reader.ts b/packages/appkit/src/plugin/dev-reader.ts index 16c2e7dc..a620b44f 100644 --- a/packages/appkit/src/plugin/dev-reader.ts +++ b/packages/appkit/src/plugin/dev-reader.ts @@ -2,6 +2,9 @@ import { randomUUID } from "node:crypto"; import type { TunnelConnection } from "shared"; import { isRemoteTunnelAllowedByEnv } from "@/server/remote-tunnel/gate"; import { TunnelError } from "../observability/errors"; +import { createLogger } from "../observability/logger"; + +const logger = createLogger("plugin:dev-reader"); type TunnelConnectionGetter = ( req: import("express").Request, @@ -32,7 +35,7 @@ export class DevFileReader { if (typeof value === "function") { return function noop() { - console.info(`Noop: ${String(prop)} (remote server disabled)`); + logger.debug("Noop: %s (remote server disabled)", String(prop)); return Promise.resolve(""); }; } diff --git a/packages/appkit/src/plugin/plugin.ts b/packages/appkit/src/plugin/plugin.ts index 08444099..f915f2e8 100644 --- a/packages/appkit/src/plugin/plugin.ts +++ b/packages/appkit/src/plugin/plugin.ts @@ -14,12 +14,13 @@ import type { import { AppManager } from "../app"; import { CacheManager } from "../cache"; import { - ServiceContext, getCurrentUserId, runInUserContext, + ServiceContext, type UserContext, } from "../context"; import { AuthenticationError } from "../observability/errors"; +import { createLogger } from "../observability/logger"; import { StreamManager } from "../stream"; import { type ITelemetry, @@ -33,10 +34,12 @@ import { RetryInterceptor } from "./interceptors/retry"; import { TelemetryInterceptor } from "./interceptors/telemetry"; import { TimeoutInterceptor } from "./interceptors/timeout"; import type { - InterceptorContext, ExecutionInterceptor, + InterceptorContext, } from "./interceptors/types"; +const logger = createLogger("plugin"); + /** * Methods that should not be proxied by asUser(). * These are lifecycle/internal methods that don't make sense @@ -140,9 +143,8 @@ export abstract class Plugin< // In local development, fall back to service principal // since there's no user token available if (!token && isDev) { - console.warn( - "[AppKit] asUser() called without user token in development mode. " + - "Using service principal.", + logger.warn( + "asUser() called without user token in development mode. Using service principal.", ); return this; diff --git a/packages/appkit/src/server/index.ts b/packages/appkit/src/server/index.ts index 39bd771d..bfd0504e 100644 --- a/packages/appkit/src/server/index.ts +++ b/packages/appkit/src/server/index.ts @@ -5,16 +5,19 @@ import dotenv from "dotenv"; import express from "express"; import type { PluginPhase } from "shared"; import { ServerError } from "../observability/errors"; +import { createLogger } from "../observability/logger"; import { Plugin, toPlugin } from "../plugin"; import { instrumentations } from "../telemetry"; import { RemoteTunnelController } from "./remote-tunnel/remote-tunnel-controller"; import { StaticServer } from "./static-server"; import type { ServerConfig } from "./types"; -import { type PluginEndpoints, getRoutes } from "./utils"; +import { getRoutes, type PluginEndpoints } from "./utils"; import { ViteDevServer } from "./vite-dev-server"; dotenv.config({ path: path.resolve(process.cwd(), "./.env") }); +const logger = createLogger("server"); + /** * Server plugin for the AppKit. * @@ -241,7 +244,7 @@ export class ServerPlugin extends Plugin { for (const p of staticPaths) { const fullPath = path.resolve(cwd, p); if (fs.existsSync(path.resolve(fullPath, "index.html"))) { - console.log(`Static files: serving from ${fullPath}`); + logger.debug("Static files: serving from %s", fullPath); return fullPath; } } @@ -254,30 +257,30 @@ export class ServerPlugin extends Plugin { const port = this.config.port ?? ServerPlugin.DEFAULT_CONFIG.port; const host = this.config.host ?? ServerPlugin.DEFAULT_CONFIG.host; - console.log(`Server running on http://${host}:${port}`); + logger.info("Server running on http://%s:%d", host, port); if (hasExplicitStaticPath) { - console.log(`Mode: static (${this.config.staticPath})`); + logger.info("Mode: static (%s)", this.config.staticPath); } else if (isDev) { - console.log("Mode: development (Vite HMR)"); + logger.info("Mode: development (Vite HMR)"); } else { - console.log("Mode: production (static)"); + logger.info("Mode: production (static)"); } const remoteServerController = this.remoteTunnelController; if (!remoteServerController) { - console.log("Remote tunnel: disabled (controller not initialized)"); + logger.debug("Remote tunnel: disabled (controller not initialized)"); } else { - console.log( - `Remote tunnel: ${ - remoteServerController.isAllowedByEnv() ? "allowed" : "blocked" - }; ${remoteServerController.isActive() ? "active" : "inactive"}`, + logger.debug( + "Remote tunnel: %s; %s", + remoteServerController.isAllowedByEnv() ? "allowed" : "blocked", + remoteServerController.isActive() ? "active" : "inactive", ); } } private async _gracefulShutdown() { - console.log("Starting graceful shutdown..."); + logger.info("Starting graceful shutdown..."); if (this.viteDevServer) { await this.viteDevServer.close(); @@ -294,8 +297,9 @@ export class ServerPlugin extends Plugin { try { plugin.abortActiveOperations(); } catch (err) { - console.error( - `Error aborting operations for plugin ${plugin.name}:`, + logger.error( + "Error aborting operations for plugin %s: %O", + plugin.name, err, ); } @@ -306,13 +310,13 @@ export class ServerPlugin extends Plugin { // 2. close the server if (this.server) { this.server.close(() => { - console.log("Server closed gracefully"); + logger.debug("Server closed gracefully"); process.exit(0); }); // 3. timeout to force shutdown after 15 seconds setTimeout(() => { - console.log("Force shutdown after timeout"); + logger.debug("Force shutdown after timeout"); process.exit(1); }, 15000); } else { diff --git a/packages/appkit/src/server/remote-tunnel/remote-tunnel-controller.ts b/packages/appkit/src/server/remote-tunnel/remote-tunnel-controller.ts index 5c799117..b589de38 100644 --- a/packages/appkit/src/server/remote-tunnel/remote-tunnel-controller.ts +++ b/packages/appkit/src/server/remote-tunnel/remote-tunnel-controller.ts @@ -1,5 +1,6 @@ import type { Server as HTTPServer } from "node:http"; import type express from "express"; +import { createLogger } from "../../observability/logger"; import type { DevFileReader } from "../../plugin/dev-reader"; import { hasDevQuery, @@ -9,6 +10,8 @@ import { } from "./gate"; import type { RemoteTunnelManager } from "./remote-tunnel-manager"; +const logger = createLogger("server:remote-tunnel:controller"); + /** * Controller for the remote tunnel * @@ -124,7 +127,7 @@ export class RemoteTunnelController { // attach server + ws setup this.maybeSetupWebSocket(); - console.log("RemoteTunnel: initialized (on-demand)"); + logger.debug("RemoteTunnel: initialized (on-demand)"); return remoteTunnelManager; })(); @@ -147,6 +150,6 @@ export class RemoteTunnelController { this.manager.setupWebSocket(); this.wsReady = true; - console.log("RemoteTunnel: web socket setup complete"); + logger.debug("RemoteTunnel: web socket setup complete"); } } diff --git a/packages/appkit/src/server/remote-tunnel/remote-tunnel-manager.ts b/packages/appkit/src/server/remote-tunnel/remote-tunnel-manager.ts index e88ebf01..fcf159bc 100644 --- a/packages/appkit/src/server/remote-tunnel/remote-tunnel-manager.ts +++ b/packages/appkit/src/server/remote-tunnel/remote-tunnel-manager.ts @@ -6,6 +6,7 @@ import { fileURLToPath } from "node:url"; import type express from "express"; import type { TunnelConnection } from "shared"; import { WebSocketServer } from "ws"; +import { createLogger } from "../../observability/logger"; import { generateTunnelIdFromEmail, getConfigScript, @@ -17,6 +18,8 @@ const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); const MAX_ASSET_FETCH_TIMEOUT = 60_000; +const logger = createLogger("server:remote-tunnel"); + interface DevFileReader { registerTunnelGetter( getter: (req: express.Request) => TunnelConnection | null, @@ -102,7 +105,7 @@ export class RemoteTunnelManager { ws.send(JSON.stringify(request)); }).catch((err) => { - console.error(`Failed to fetch ${path}:`, err.message); + logger.error("Failed to fetch %s: %s", path, err.message); return { status: 504, body: Buffer.from(""), headers: {} }; }); @@ -271,7 +274,7 @@ export class RemoteTunnelManager { if (isBinary) { if (!tunnel.waitingForBinaryBody) { - console.warn( + logger.debug( "Received binary message but no requestId is waiting for body", ); return; @@ -281,7 +284,7 @@ export class RemoteTunnelManager { const pending = tunnel.pendingFetches.get(requestId); if (!pending || !pending.metadata) { - console.warn("Received binary message but pending fetch not found"); + logger.debug("Received binary message but pending fetch not found"); tunnel.waitingForBinaryBody = null; return; } @@ -307,12 +310,18 @@ export class RemoteTunnelManager { if (data.approved) { tunnel.approvedViewers.add(data.viewer); - console.log( - `✅ Approved ${data.viewer} for tunnel ${tunnelId}`, + logger.debug( + "✅ Approved %s for tunnel %s", + data.viewer, + tunnelId, ); } else { tunnel.rejectedViewers.add(data.viewer); - console.log(`❌ Denied ${data.viewer} for tunnel ${tunnelId}`); + logger.debug( + "❌ Denied %s for tunnel %s", + data.viewer, + tunnelId, + ); } } } else if (data.type === "fetch:response:meta") { @@ -352,7 +361,7 @@ export class RemoteTunnelManager { } } } catch (e) { - console.error("Failed to parse WebSocket message:", e); + logger.error("Failed to parse WebSocket message: %O", e); } }); @@ -393,7 +402,7 @@ export class RemoteTunnelManager { // Browser → CLI browserWs.on("message", (msg) => { const hmrStart = Date.now(); - console.log("browser -> cli browserWS message", msg.toString()); + logger.debug("browser -> cli browserWS message: %s", msg.toString()); cliWs.send( JSON.stringify({ type: "hmr:message", @@ -415,8 +424,8 @@ export class RemoteTunnelManager { browserWs.send(data.body); } } catch { - console.error( - "Failed to parse CLI message for HMR:", + logger.error( + "Failed to parse CLI message for HMR: %s", msg.toString().substring(0, 100), ); } diff --git a/packages/appkit/src/server/tests/server.test.ts b/packages/appkit/src/server/tests/server.test.ts index 7b876d11..3bbf8c0a 100644 --- a/packages/appkit/src/server/tests/server.test.ts +++ b/packages/appkit/src/server/tests/server.test.ts @@ -102,6 +102,23 @@ vi.mock("../vite-dev-server", () => ({ })), })); +// Mock logger for testing log output +const { mockLoggerDebug, mockLoggerInfo, mockLoggerWarn, mockLoggerError } = + vi.hoisted(() => ({ + mockLoggerDebug: vi.fn(), + mockLoggerInfo: vi.fn(), + mockLoggerWarn: vi.fn(), + mockLoggerError: vi.fn(), + })); +vi.mock("../../observability/logger", () => ({ + createLogger: vi.fn(() => ({ + debug: mockLoggerDebug, + info: mockLoggerInfo, + warn: mockLoggerWarn, + error: mockLoggerError, + })), +})); + vi.mock("../static-server", () => ({ StaticServer: vi.fn().mockImplementation(() => ({ setup: vi.fn(), @@ -382,20 +399,19 @@ describe("ServerPlugin", () => { describe("logStartupInfo", () => { test("logs remote tunnel controller disabled when missing", () => { - const logSpy = vi.spyOn(console, "log").mockImplementation(() => {}); + mockLoggerDebug.mockClear(); const plugin = new ServerPlugin({ autoStart: false }); (plugin as any).remoteTunnelController = undefined; (plugin as any).logStartupInfo(); - expect(logSpy).toHaveBeenCalledWith( + expect(mockLoggerDebug).toHaveBeenCalledWith( "Remote tunnel: disabled (controller not initialized)", ); - logSpy.mockRestore(); }); test("logs remote tunnel allowed/active when controller present", () => { - const logSpy = vi.spyOn(console, "log").mockImplementation(() => {}); + mockLoggerDebug.mockClear(); const plugin = new ServerPlugin({ autoStart: false }); (plugin as any).remoteTunnelController = { isAllowedByEnv: () => true, @@ -405,17 +421,16 @@ describe("ServerPlugin", () => { (plugin as any).logStartupInfo(); expect( - logSpy.mock.calls.some((c) => - String(c[0]).includes("Remote tunnel: allowed; active"), + mockLoggerDebug.mock.calls.some((c) => + String(c[0]).includes("Remote tunnel:"), ), ).toBe(true); - logSpy.mockRestore(); }); }); describe("findStaticPath", () => { test("returns first matching static path and logs it", () => { - const logSpy = vi.spyOn(console, "log").mockImplementation(() => {}); + mockLoggerDebug.mockClear(); vi.mocked(fs.existsSync).mockImplementation((p: any) => { return String(p).endsWith("dist/index.html"); }); @@ -423,21 +438,20 @@ describe("ServerPlugin", () => { const p = (ServerPlugin as any).findStaticPath(); expect(String(p)).toContain("dist"); expect( - logSpy.mock.calls.some((c) => + mockLoggerDebug.mock.calls.some((c) => String(c[0]).includes("Static files: serving from"), ), ).toBe(true); - logSpy.mockRestore(); }); }); describe("_gracefulShutdown", () => { test("aborts plugin operations (with error isolation) and closes server", async () => { vi.useFakeTimers(); + mockLoggerError.mockClear(); const exitSpy = vi .spyOn(process, "exit") .mockImplementation(((_code?: number) => undefined) as any); - const errSpy = vi.spyOn(console, "error").mockImplementation(() => {}); const plugin = new ServerPlugin({ autoStart: false, @@ -461,11 +475,10 @@ describe("ServerPlugin", () => { await (plugin as any)._gracefulShutdown(); vi.runAllTimers(); - expect(errSpy).toHaveBeenCalled(); + expect(mockLoggerError).toHaveBeenCalled(); expect(mockHttpServer.close).toHaveBeenCalled(); expect(exitSpy).toHaveBeenCalled(); - errSpy.mockRestore(); exitSpy.mockRestore(); vi.useRealTimers(); }); diff --git a/packages/appkit/src/server/vite-dev-server.ts b/packages/appkit/src/server/vite-dev-server.ts index b74150f5..9b23a251 100644 --- a/packages/appkit/src/server/vite-dev-server.ts +++ b/packages/appkit/src/server/vite-dev-server.ts @@ -4,9 +4,12 @@ import type express from "express"; import type { ViteDevServer as ViteDevServerType } from "vite"; import { mergeConfigDedup } from "@/utils"; import { ServerError } from "../observability/errors"; +import { createLogger } from "../observability/logger"; +import { appKitTypesPlugin } from "../type-generator/vite-plugin"; import { BaseServer } from "./base-server"; import type { PluginEndpoints } from "./utils"; -import { appKitTypesPlugin } from "../type-generator/vite-plugin"; + +const logger = createLogger("server:vite"); /** * Vite dev server for the AppKit. @@ -115,7 +118,7 @@ export class ViteDevServer extends BaseServer { const hasIndexHtml = fs.existsSync(path.join(fullPath, "index.html")); if (hasViteConfig && hasIndexHtml) { - console.log(`Vite dev server: using client root ${fullPath}`); + logger.debug("Vite dev server: using client root %s", fullPath); return fullPath; } } diff --git a/packages/appkit/src/stream/arrow-stream-processor.ts b/packages/appkit/src/stream/arrow-stream-processor.ts index b97c462f..0cc0a4f8 100644 --- a/packages/appkit/src/stream/arrow-stream-processor.ts +++ b/packages/appkit/src/stream/arrow-stream-processor.ts @@ -1,5 +1,8 @@ import type { sql } from "@databricks/sdk-experimental"; import { ExecutionError, ValidationError } from "../observability/errors"; +import { createLogger } from "../observability/logger"; + +const logger = createLogger("stream:arrow"); type ResultManifest = sql.ResultManifest; type ExternalLink = sql.ExternalLink; @@ -118,7 +121,7 @@ export class ArrowStreamProcessor { try { const externalLink = chunk.external_link; if (!externalLink) { - console.error("External link is required", chunk); + logger.error("External link is required for chunk: %O", chunk); continue; } diff --git a/packages/appkit/src/stream/tests/arrow-stream-processor.test.ts b/packages/appkit/src/stream/tests/arrow-stream-processor.test.ts index 56d15ff6..8a95fbdf 100644 --- a/packages/appkit/src/stream/tests/arrow-stream-processor.test.ts +++ b/packages/appkit/src/stream/tests/arrow-stream-processor.test.ts @@ -452,9 +452,10 @@ describe("ArrowStreamProcessor", () => { processor.processChunks(chunks, createMockSchema()), ).rejects.toThrow(); + // Logger uses util.format, so the message is pre-formatted expect(consoleSpy).toHaveBeenCalledWith( - "External link is required", - expect.objectContaining({ chunk_index: 0 }), + "[appkit:stream:arrow]", + expect.stringContaining("External link is required for chunk:"), ); consoleSpy.mockRestore(); diff --git a/packages/appkit/src/telemetry/telemetry-manager.ts b/packages/appkit/src/telemetry/telemetry-manager.ts index f9b34847..e256610c 100644 --- a/packages/appkit/src/telemetry/telemetry-manager.ts +++ b/packages/appkit/src/telemetry/telemetry-manager.ts @@ -1,4 +1,3 @@ -import type { TelemetryOptions } from "shared"; import { getNodeAutoInstrumentations } from "@opentelemetry/auto-instrumentations-node"; import { OTLPLogExporter } from "@opentelemetry/exporter-logs-otlp-proto"; import { OTLPMetricExporter } from "@opentelemetry/exporter-metrics-otlp-proto"; @@ -17,14 +16,18 @@ import { } from "@opentelemetry/resources"; import { BatchLogRecordProcessor } from "@opentelemetry/sdk-logs"; import { PeriodicExportingMetricReader } from "@opentelemetry/sdk-metrics"; +import { NodeSDK } from "@opentelemetry/sdk-node"; import { AlwaysOnSampler } from "@opentelemetry/sdk-trace-base"; import { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION, } from "@opentelemetry/semantic-conventions"; +import type { TelemetryOptions } from "shared"; +import { createLogger } from "../observability/logger"; import { TelemetryProvider } from "./telemetry-provider"; import type { TelemetryConfig } from "./types"; -import { NodeSDK } from "@opentelemetry/sdk-node"; + +const logger = createLogger("telemetry"); export class TelemetryManager { private static readonly DEFAULT_EXPORT_INTERVAL_MS = 10000; @@ -93,9 +96,9 @@ export class TelemetryManager { this.sdk.start(); this.registerShutdown(); - console.log("[Telemetry] Initialized successfully"); + logger.debug("Initialized successfully"); } catch (error) { - console.error("[Telemetry] Failed to initialize:", error); + logger.error("Failed to initialize: %O", error); } } @@ -172,7 +175,7 @@ export class TelemetryManager { await this.sdk.shutdown(); this.sdk = undefined; } catch (error) { - console.error("[Telemetry] Error shutting down:", error); + logger.error("Error shutting down: %O", error); } } } diff --git a/packages/appkit/src/type-generator/index.ts b/packages/appkit/src/type-generator/index.ts index 085db6de..86d51bd5 100644 --- a/packages/appkit/src/type-generator/index.ts +++ b/packages/appkit/src/type-generator/index.ts @@ -1,10 +1,13 @@ import fs from "node:fs"; import dotenv from "dotenv"; +import { createLogger } from "../observability/logger"; import { generateQueriesFromDescribe } from "./query-registry"; import type { QuerySchema } from "./types"; dotenv.config(); +const logger = createLogger("type-generator"); + /** * Generate type declarations for QueryRegistry * Create the d.ts file from the plugin routes and query schemas @@ -50,7 +53,7 @@ export async function generateFromEntryPoint(options: { }) { const { outFile, queryFolder, warehouseId, noCache } = options; - console.log("\n[AppKit] Starting type generation...\n"); + logger.debug("Starting type generation..."); let queryRegistry: QuerySchema[] = []; if (queryFolder) @@ -66,5 +69,5 @@ export async function generateFromEntryPoint(options: { fs.writeFileSync(outFile, typeDeclarations, "utf-8"); - console.log("\n[AppKit] Type generation complete!\n"); + logger.debug("Type generation complete!"); } diff --git a/packages/appkit/src/type-generator/query-registry.ts b/packages/appkit/src/type-generator/query-registry.ts index 4842b810..1556373e 100644 --- a/packages/appkit/src/type-generator/query-registry.ts +++ b/packages/appkit/src/type-generator/query-registry.ts @@ -1,6 +1,7 @@ import fs from "node:fs"; import path from "node:path"; import { WorkspaceClient } from "@databricks/sdk-experimental"; +import { createLogger } from "../observability/logger"; import { CACHE_VERSION, hashSQL, loadCache, saveCache } from "./cache"; import { Spinner } from "./spinner"; import { @@ -10,6 +11,8 @@ import { sqlTypeToMarker, } from "./types"; +const logger = createLogger("type-generator:query-registry"); + /** * Extract parameters from a SQL query * @param sql - the SQL query to extract parameters from @@ -121,7 +124,7 @@ export async function generateQueriesFromDescribe( .readdirSync(queryFolder) .filter((file) => file.endsWith(".sql")); - console.log(` Found ${queryFiles.length} SQL queries\n`); + logger.debug("Found %d SQL queries", queryFiles.length); // load cache const cache = noCache ? { version: CACHE_VERSION, queries: {} } : loadCache(); @@ -193,7 +196,7 @@ export async function generateQueriesFromDescribe( // log warning if there are failed queries if (failedQueries.length > 0) { - console.warn(` Warning: ${failedQueries.length} queries failed\n`); + logger.debug("Warning: %d queries failed", failedQueries.length); } return querySchemas; diff --git a/packages/appkit/src/type-generator/vite-plugin.ts b/packages/appkit/src/type-generator/vite-plugin.ts index 67480d79..9a188e4b 100644 --- a/packages/appkit/src/type-generator/vite-plugin.ts +++ b/packages/appkit/src/type-generator/vite-plugin.ts @@ -1,8 +1,11 @@ +import fs from "node:fs"; import path from "node:path"; import type { Plugin } from "vite"; -import fs from "node:fs"; +import { createLogger } from "../observability/logger"; import { generateFromEntryPoint } from "./index"; +const logger = createLogger("type-generator:vite-plugin"); + /** * Options for the AppKit types plugin. */ @@ -29,9 +32,7 @@ export function appKitTypesPlugin(options?: AppKitTypesPluginOptions): Plugin { const warehouseId = process.env.DATABRICKS_WAREHOUSE_ID || ""; if (!warehouseId) { - console.warn( - "[AppKit] Warehouse ID not found. Skipping type generation.", - ); + logger.debug("Warehouse ID not found. Skipping type generation."); return; } @@ -46,7 +47,7 @@ export function appKitTypesPlugin(options?: AppKitTypesPluginOptions): Plugin { if (process.env.NODE_ENV === "production") { throw error; } - console.error("[AppKit] Error generating types:", error); + logger.error("Error generating types: %O", error); } } @@ -57,9 +58,7 @@ export function appKitTypesPlugin(options?: AppKitTypesPluginOptions): Plugin { const warehouseId = process.env.DATABRICKS_WAREHOUSE_ID || ""; if (!warehouseId) { - console.warn( - "[AppKit] Warehouse ID not found. Skipping type generation.", - ); + logger.debug("Warehouse ID not found. Skipping type generation."); return false; } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index a2e94bf8..86444350 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -269,6 +269,9 @@ importers: express: specifier: ^4.22.0 version: 4.22.0 + obug: + specifier: ^2.1.1 + version: 2.1.1 pg: specifier: ^8.16.3 version: 8.16.3 @@ -8009,6 +8012,9 @@ packages: obuf@1.1.2: resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==} + obug@2.1.1: + resolution: {integrity: sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==} + ohash@2.0.11: resolution: {integrity: sha512-RdR9FQrFwNBNXAr4GixM8YaRZRJ5PUWbKYbE5eOsrwAjJW0q2REGcf79oYPsLyskQCZG1PLN+S/K1V00joZAoQ==} @@ -19727,6 +19733,8 @@ snapshots: obuf@1.1.2: {} + obug@2.1.1: {} + ohash@2.0.11: {} on-finished@2.4.1: From 303a06565be0bd19de358abc4cee555c6125b891 Mon Sep 17 00:00:00 2001 From: Ditadi Date: Tue, 13 Jan 2026 16:18:10 +0000 Subject: [PATCH 3/4] feat(logging): add request-scoped wide-event tracking with sampling --- packages/appkit/src/analytics/analytics.ts | 51 +++- packages/appkit/src/analytics/query.ts | 2 +- packages/appkit/src/app/index.ts | 2 +- packages/appkit/src/cache/index.ts | 29 +- .../appkit/src/cache/storage/persistent.ts | 7 +- .../appkit/src/connectors/lakebase/client.ts | 4 +- .../appkit/src/connectors/lakebase/utils.ts | 5 +- .../src/connectors/sql-warehouse/client.ts | 64 +++- .../appkit/src/context/service-context.ts | 2 +- .../errors/authentication.ts | 0 .../src/{observability => }/errors/base.ts | 0 .../errors/configuration.ts | 0 .../{observability => }/errors/connection.ts | 0 .../{observability => }/errors/execution.ts | 0 .../src/{observability => }/errors/index.ts | 0 .../errors/initialization.ts | 0 .../src/{observability => }/errors/server.ts | 0 .../errors/tests/errors.test.ts | 0 .../src/{observability => }/errors/tunnel.ts | 0 .../{observability => }/errors/validation.ts | 0 .../src/{observability => logging}/index.ts | 8 +- packages/appkit/src/logging/logger.ts | 274 ++++++++++++++++++ packages/appkit/src/logging/sampling.ts | 120 ++++++++ .../logging/tests/logger-with-event.test.ts | 224 ++++++++++++++ .../tests/logger.test.ts | 0 .../appkit/src/logging/tests/sampling.test.ts | 209 +++++++++++++ .../logging/tests/wide-event-emitter.test.ts | 257 ++++++++++++++++ .../tests/wide-event.test.ts | 9 +- .../src/{observability => logging}/types.ts | 0 .../appkit/src/logging/wide-event-emitter.ts | 193 ++++++++++++ .../{observability => logging}/wide-event.ts | 12 +- packages/appkit/src/observability/logger.ts | 50 ---- packages/appkit/src/plugin/dev-reader.ts | 4 +- .../appkit/src/plugin/interceptors/cache.ts | 11 +- .../appkit/src/plugin/interceptors/retry.ts | 22 +- .../src/plugin/interceptors/telemetry.ts | 53 +++- .../appkit/src/plugin/interceptors/timeout.ts | 12 +- .../appkit/src/plugin/interceptors/types.ts | 3 + packages/appkit/src/plugin/plugin.ts | 13 +- packages/appkit/src/server/index.ts | 4 +- .../remote-tunnel/remote-tunnel-controller.ts | 2 +- .../remote-tunnel/remote-tunnel-manager.ts | 2 +- .../appkit/src/server/tests/server.test.ts | 10 +- packages/appkit/src/server/vite-dev-server.ts | 4 +- .../src/stream/arrow-stream-processor.ts | 4 +- packages/appkit/src/stream/buffers.ts | 2 +- packages/appkit/src/stream/stream-manager.ts | 144 +++++---- packages/appkit/src/stream/types.ts | 2 + .../appkit/src/telemetry/instrumentations.ts | 6 +- .../appkit/src/telemetry/telemetry-manager.ts | 6 +- .../appkit/src/telemetry/trace-sampler.ts | 56 ++++ packages/appkit/src/type-generator/index.ts | 2 +- .../src/type-generator/query-registry.ts | 2 +- .../appkit/src/type-generator/vite-plugin.ts | 2 +- packages/appkit/src/utils/env-validator.ts | 2 +- packages/appkit/src/utils/index.ts | 1 + packages/appkit/src/utils/path-exclusions.ts | 89 ++++++ turbo.json | 1 + 58 files changed, 1778 insertions(+), 203 deletions(-) rename packages/appkit/src/{observability => }/errors/authentication.ts (100%) rename packages/appkit/src/{observability => }/errors/base.ts (100%) rename packages/appkit/src/{observability => }/errors/configuration.ts (100%) rename packages/appkit/src/{observability => }/errors/connection.ts (100%) rename packages/appkit/src/{observability => }/errors/execution.ts (100%) rename packages/appkit/src/{observability => }/errors/index.ts (100%) rename packages/appkit/src/{observability => }/errors/initialization.ts (100%) rename packages/appkit/src/{observability => }/errors/server.ts (100%) rename packages/appkit/src/{observability => }/errors/tests/errors.test.ts (100%) rename packages/appkit/src/{observability => }/errors/tunnel.ts (100%) rename packages/appkit/src/{observability => }/errors/validation.ts (100%) rename packages/appkit/src/{observability => logging}/index.ts (72%) create mode 100644 packages/appkit/src/logging/logger.ts create mode 100644 packages/appkit/src/logging/sampling.ts create mode 100644 packages/appkit/src/logging/tests/logger-with-event.test.ts rename packages/appkit/src/{observability => logging}/tests/logger.test.ts (100%) create mode 100644 packages/appkit/src/logging/tests/sampling.test.ts create mode 100644 packages/appkit/src/logging/tests/wide-event-emitter.test.ts rename packages/appkit/src/{observability => logging}/tests/wide-event.test.ts (97%) rename packages/appkit/src/{observability => logging}/types.ts (100%) create mode 100644 packages/appkit/src/logging/wide-event-emitter.ts rename packages/appkit/src/{observability => logging}/wide-event.ts (95%) delete mode 100644 packages/appkit/src/observability/logger.ts create mode 100644 packages/appkit/src/telemetry/trace-sampler.ts create mode 100644 packages/appkit/src/utils/path-exclusions.ts diff --git a/packages/appkit/src/analytics/analytics.ts b/packages/appkit/src/analytics/analytics.ts index e80cee05..2c11f5da 100644 --- a/packages/appkit/src/analytics/analytics.ts +++ b/packages/appkit/src/analytics/analytics.ts @@ -12,7 +12,7 @@ import { getWarehouseId, getWorkspaceClient, } from "../context"; -import { createLogger } from "../observability/logger"; +import { createLogger } from "../logging/logger"; import { Plugin, toPlugin } from "../plugin"; import { queryDefaults } from "./defaults"; import { QueryProcessor } from "./query"; @@ -98,26 +98,33 @@ export class AnalyticsPlugin extends Plugin { const { jobId } = req.params; const workspaceClient = getWorkspaceClient(); - logger.debug( - "Processing Arrow job request: %s for plugin: %s", - jobId, - this.name, - ); + logger.debug(req, "Processing Arrow job request for jobId=%s", jobId); + + const event = logger.event(req); + event.setComponent("analytics", "getArrowData").setContext("analytics", { + job_id: jobId, + plugin: this.name, + }); const result = await this.getArrowData(workspaceClient, jobId); + event.setContext("sql-warehouse", { + arrow_data_size_bytes: result.data.length, + }); + res.setHeader("Content-Type", "application/octet-stream"); res.setHeader("Content-Length", result.data.length.toString()); res.setHeader("Cache-Control", "public, max-age=3600"); logger.debug( + req, "Sending Arrow buffer: %d bytes for job %s", result.data.length, jobId, ); res.send(Buffer.from(result.data)); } catch (error) { - logger.error("Arrow job error for %s: %O", this.name, error); + logger.error(req, "Arrow job error: %O", error); res.status(404).json({ error: error instanceof Error ? error.message : "Arrow job not found", plugin: this.name, @@ -135,6 +142,18 @@ export class AnalyticsPlugin extends Plugin { ): Promise { const { query_key } = req.params; const { parameters, format = "JSON" } = req.body as IAnalyticsQueryRequest; + + // Request-scoped logging with WideEvent tracking + logger.debug(req, "Executing query: %s (format=%s)", query_key, format); + + const event = logger.event(req); + event.setComponent("analytics", "executeQuery").setContext("analytics", { + query_key, + format, + parameter_count: parameters ? Object.keys(parameters).length : 0, + plugin: this.name, + }); + const queryParameters = format === "ARROW" ? { @@ -201,12 +220,14 @@ export class AnalyticsPlugin extends Plugin { processedParams, queryParameters.formatParameters, signal, + req, ); return { type: queryParameters.type, ...result }; }, streamExecutionSettings, userKey, + req, ); } @@ -230,6 +251,7 @@ export class AnalyticsPlugin extends Plugin { parameters?: Record, formatParameters?: Record, signal?: AbortSignal, + req?: express.Request, ): Promise { const workspaceClient = getWorkspaceClient(); const warehouseId = await getWarehouseId(); @@ -237,6 +259,7 @@ export class AnalyticsPlugin extends Plugin { const { statement, parameters: sqlParameters } = this.queryProcessor.convertToSQLParameters(query, parameters); + const queryStartTime = Date.now(); const response = await this.SQLClient.executeStatement( workspaceClient, { @@ -247,6 +270,20 @@ export class AnalyticsPlugin extends Plugin { }, signal, ); + const queryDurationMs = Date.now() - queryStartTime; + + // Set warehouse execution data on WideEvent + if (req) { + const event = logger.event(req); + const result = response.result as any; + const rowCount = result?.data?.length ?? result?.data_array?.length ?? 0; + + event.setContext("sql-warehouse", { + warehouse_id: warehouseId, + rows_returned: rowCount, + query_duration_ms: queryDurationMs, + }); + } return response.result; } diff --git a/packages/appkit/src/analytics/query.ts b/packages/appkit/src/analytics/query.ts index e5a9e4f4..2b9afbb0 100644 --- a/packages/appkit/src/analytics/query.ts +++ b/packages/appkit/src/analytics/query.ts @@ -2,7 +2,7 @@ import { createHash } from "node:crypto"; import type { sql } from "@databricks/sdk-experimental"; import { isSQLTypeMarker, type SQLTypeMarker, sql as sqlHelpers } from "shared"; import { getWorkspaceId } from "../context"; -import { ValidationError } from "../observability/errors"; +import { ValidationError } from "../errors"; type SQLParameterValue = SQLTypeMarker | null | undefined; diff --git a/packages/appkit/src/app/index.ts b/packages/appkit/src/app/index.ts index c08328fa..01ef4f6c 100644 --- a/packages/appkit/src/app/index.ts +++ b/packages/appkit/src/app/index.ts @@ -1,6 +1,6 @@ import fs from "node:fs/promises"; import path from "node:path"; -import { createLogger } from "../observability/logger"; +import { createLogger } from "../logging/logger"; const logger = createLogger("app"); diff --git a/packages/appkit/src/cache/index.ts b/packages/appkit/src/cache/index.ts index b06dceb4..783480f7 100644 --- a/packages/appkit/src/cache/index.ts +++ b/packages/appkit/src/cache/index.ts @@ -2,12 +2,9 @@ import { createHash } from "node:crypto"; import { WorkspaceClient } from "@databricks/sdk-experimental"; import type { CacheConfig, CacheStorage } from "shared"; import { LakebaseConnector } from "@/connectors"; -import { - AppKitError, - ExecutionError, - InitializationError, -} from "../observability/errors"; -import { createLogger } from "../observability/logger"; +import { AppKitError, ExecutionError, InitializationError } from "../errors"; +import { createLogger } from "../logging/logger"; +import type { WideEvent } from "../logging/wide-event"; import type { Counter, TelemetryProvider } from "../telemetry"; import { SpanStatusCode, TelemetryManager } from "../telemetry"; import { deepMerge } from "../utils"; @@ -180,6 +177,7 @@ export class CacheManager { * @param fn - Function to execute * @param userKey - User key * @param options - Options for the cache + * @param wideEvent - Optional WideEvent to track cache execution data * @returns Promise of the result */ async getOrExecute( @@ -187,6 +185,7 @@ export class CacheManager { fn: () => Promise, userKey: string, options?: { ttl?: number }, + wideEvent?: WideEvent | null, ): Promise { if (!this.config.enabled) return fn(); @@ -211,6 +210,12 @@ export class CacheManager { this.telemetryMetrics.cacheHitCount.add(1, { "cache.key": cacheKey, }); + + wideEvent?.setExecution({ + cache_hit: true, + cache_key: cacheKey, + }); + return cached.value as T; } @@ -227,6 +232,13 @@ export class CacheManager { "cache.key": cacheKey, "cache.deduplication": "true", }); + + wideEvent?.setExecution({ + cache_hit: true, + cache_key: cacheKey, + cache_deduplication: true, + }); + span.end(); return inFlight as Promise; } @@ -238,6 +250,11 @@ export class CacheManager { "cache.key": cacheKey, }); + wideEvent?.setExecution({ + cache_hit: false, + cache_key: cacheKey, + }); + const promise = fn() .then(async (result) => { await this.set(cacheKey, result, options); diff --git a/packages/appkit/src/cache/storage/persistent.ts b/packages/appkit/src/cache/storage/persistent.ts index 8b2298e0..2ae9ff99 100644 --- a/packages/appkit/src/cache/storage/persistent.ts +++ b/packages/appkit/src/cache/storage/persistent.ts @@ -1,11 +1,8 @@ import { createHash } from "node:crypto"; import type { CacheConfig, CacheEntry, CacheStorage } from "shared"; import type { LakebaseConnector } from "../../connectors"; -import { - InitializationError, - ValidationError, -} from "../../observability/errors"; -import { createLogger } from "../../observability/logger"; +import { InitializationError, ValidationError } from "../../errors"; +import { createLogger } from "../../logging/logger"; import { lakebaseStorageDefaults } from "./defaults"; const logger = createLogger("cache:persistent"); diff --git a/packages/appkit/src/connectors/lakebase/client.ts b/packages/appkit/src/connectors/lakebase/client.ts index 3f9e0854..89420eff 100644 --- a/packages/appkit/src/connectors/lakebase/client.ts +++ b/packages/appkit/src/connectors/lakebase/client.ts @@ -15,8 +15,8 @@ import { ConfigurationError, ConnectionError, ValidationError, -} from "../../observability/errors"; -import { createLogger } from "../../observability/logger"; +} from "../../errors"; +import { createLogger } from "../../logging/logger"; import { deepMerge } from "../../utils"; import { lakebaseDefaults } from "./defaults"; import type { diff --git a/packages/appkit/src/connectors/lakebase/utils.ts b/packages/appkit/src/connectors/lakebase/utils.ts index b2b1bdc2..5bf726d4 100644 --- a/packages/appkit/src/connectors/lakebase/utils.ts +++ b/packages/appkit/src/connectors/lakebase/utils.ts @@ -1,7 +1,4 @@ -import { - ConfigurationError, - ValidationError, -} from "../../observability/errors"; +import { ConfigurationError, ValidationError } from "../../errors"; import type { LakebaseConnectionConfig } from "./types"; export interface ParsedConnectionString { diff --git a/packages/appkit/src/connectors/sql-warehouse/client.ts b/packages/appkit/src/connectors/sql-warehouse/client.ts index 8aeeebf2..71c69cb8 100644 --- a/packages/appkit/src/connectors/sql-warehouse/client.ts +++ b/packages/appkit/src/connectors/sql-warehouse/client.ts @@ -9,8 +9,8 @@ import { ConnectionError, ExecutionError, ValidationError, -} from "../../observability/errors"; -import { createLogger } from "../../observability/logger"; +} from "../../errors"; +import { createLogger } from "../../logging/logger"; import { ArrowStreamProcessor } from "../../stream/arrow-stream-processor"; import type { TelemetryProvider } from "../../telemetry"; import { @@ -89,6 +89,11 @@ export class SQLWarehouseConnector { const startTime = Date.now(); let success = false; + // if signal is aborted, throw an error + if (signal?.aborted) { + throw ExecutionError.canceled(); + } + return this.telemetry.startActiveSpan( "sql.query", { @@ -103,6 +108,24 @@ export class SQLWarehouseConnector { }, }, async (span: Span) => { + let abortHandler: (() => void) | undefined; + let isAborted = false; + + if (signal) { + abortHandler = () => { + // abort span if not recording + if (!span.isRecording()) return; + isAborted = true; + span.setAttribute("cancelled", true); + span.setStatus({ + code: SpanStatusCode.ERROR, + message: "Query cancelled by client", + }); + span.end(); + }; + signal.addEventListener("abort", abortHandler, { once: true }); + } + try { // validate required fields if (!input.statement) { @@ -179,7 +202,9 @@ export class SQLWarehouseConnector { case "CLOSED": throw ExecutionError.resultsClosed(); default: - throw ExecutionError.unknownState(status?.state as string); + throw ExecutionError.unknownState( + String(status?.state ?? "unknown"), + ); } const resultData = result.result as any; @@ -193,14 +218,20 @@ export class SQLWarehouseConnector { } success = true; - span.setStatus({ code: SpanStatusCode.OK }); + // only set success status if not aborted + if (!isAborted) { + span.setStatus({ code: SpanStatusCode.OK }); + } return result; } catch (error) { - span.recordException(error as Error); - span.setStatus({ - code: SpanStatusCode.ERROR, - message: error instanceof Error ? error.message : String(error), - }); + // only record error if not already handled by abort + if (!isAborted) { + span.recordException(error as Error); + span.setStatus({ + code: SpanStatusCode.ERROR, + message: error instanceof Error ? error.message : String(error), + }); + } if (error instanceof AppKitError) { throw error; @@ -209,8 +240,17 @@ export class SQLWarehouseConnector { error instanceof Error ? error.message : String(error), ); } finally { + // remove abort handler + if (abortHandler && signal) { + signal.removeEventListener("abort", abortHandler); + } + const duration = Date.now() - startTime; - span.end(); + + // end span if not already ended by abort handler + if (!isAborted) { + span.end(); + } const attributes = { "db.warehouse_id": input.warehouse_id, @@ -316,7 +356,9 @@ export class SQLWarehouseConnector { case "CLOSED": throw ExecutionError.resultsClosed(); default: - throw ExecutionError.unknownState(status?.state as string); + throw ExecutionError.unknownState( + String(status?.state ?? "unknown"), + ); } // continue polling after delay diff --git a/packages/appkit/src/context/service-context.ts b/packages/appkit/src/context/service-context.ts index 217b0398..0304196f 100644 --- a/packages/appkit/src/context/service-context.ts +++ b/packages/appkit/src/context/service-context.ts @@ -12,7 +12,7 @@ import { AuthenticationError, ConfigurationError, InitializationError, -} from "../observability/errors"; +} from "../errors"; import type { UserContext } from "./user-context"; /** diff --git a/packages/appkit/src/observability/errors/authentication.ts b/packages/appkit/src/errors/authentication.ts similarity index 100% rename from packages/appkit/src/observability/errors/authentication.ts rename to packages/appkit/src/errors/authentication.ts diff --git a/packages/appkit/src/observability/errors/base.ts b/packages/appkit/src/errors/base.ts similarity index 100% rename from packages/appkit/src/observability/errors/base.ts rename to packages/appkit/src/errors/base.ts diff --git a/packages/appkit/src/observability/errors/configuration.ts b/packages/appkit/src/errors/configuration.ts similarity index 100% rename from packages/appkit/src/observability/errors/configuration.ts rename to packages/appkit/src/errors/configuration.ts diff --git a/packages/appkit/src/observability/errors/connection.ts b/packages/appkit/src/errors/connection.ts similarity index 100% rename from packages/appkit/src/observability/errors/connection.ts rename to packages/appkit/src/errors/connection.ts diff --git a/packages/appkit/src/observability/errors/execution.ts b/packages/appkit/src/errors/execution.ts similarity index 100% rename from packages/appkit/src/observability/errors/execution.ts rename to packages/appkit/src/errors/execution.ts diff --git a/packages/appkit/src/observability/errors/index.ts b/packages/appkit/src/errors/index.ts similarity index 100% rename from packages/appkit/src/observability/errors/index.ts rename to packages/appkit/src/errors/index.ts diff --git a/packages/appkit/src/observability/errors/initialization.ts b/packages/appkit/src/errors/initialization.ts similarity index 100% rename from packages/appkit/src/observability/errors/initialization.ts rename to packages/appkit/src/errors/initialization.ts diff --git a/packages/appkit/src/observability/errors/server.ts b/packages/appkit/src/errors/server.ts similarity index 100% rename from packages/appkit/src/observability/errors/server.ts rename to packages/appkit/src/errors/server.ts diff --git a/packages/appkit/src/observability/errors/tests/errors.test.ts b/packages/appkit/src/errors/tests/errors.test.ts similarity index 100% rename from packages/appkit/src/observability/errors/tests/errors.test.ts rename to packages/appkit/src/errors/tests/errors.test.ts diff --git a/packages/appkit/src/observability/errors/tunnel.ts b/packages/appkit/src/errors/tunnel.ts similarity index 100% rename from packages/appkit/src/observability/errors/tunnel.ts rename to packages/appkit/src/errors/tunnel.ts diff --git a/packages/appkit/src/observability/errors/validation.ts b/packages/appkit/src/errors/validation.ts similarity index 100% rename from packages/appkit/src/observability/errors/validation.ts rename to packages/appkit/src/errors/validation.ts diff --git a/packages/appkit/src/observability/index.ts b/packages/appkit/src/logging/index.ts similarity index 72% rename from packages/appkit/src/observability/index.ts rename to packages/appkit/src/logging/index.ts index 3a2f9960..ff9b15ee 100644 --- a/packages/appkit/src/observability/index.ts +++ b/packages/appkit/src/logging/index.ts @@ -10,7 +10,13 @@ export { ServerError, TunnelError, ValidationError, -} from "./errors"; +} from "../errors"; export { createLogger, type Logger } from "./logger"; +export { + DEFAULT_SAMPLING_CONFIG, + type SamplingConfig, + shouldSample, +} from "./sampling"; export type { LogLevel } from "./types"; export { WideEvent, type WideEventData } from "./wide-event"; +export { WideEventEmitter } from "./wide-event-emitter"; diff --git a/packages/appkit/src/logging/logger.ts b/packages/appkit/src/logging/logger.ts new file mode 100644 index 00000000..efc3ebdd --- /dev/null +++ b/packages/appkit/src/logging/logger.ts @@ -0,0 +1,274 @@ +import { format } from "node:util"; +import { trace } from "@opentelemetry/api"; +import type { Request, Response } from "express"; +import { createDebug as createObug } from "obug"; +import { DEFAULT_SAMPLING_CONFIG, shouldSample } from "./sampling"; +import { WideEvent } from "./wide-event"; +import { WideEventEmitter } from "./wide-event-emitter"; + +/** + * Logger interface for AppKit components + */ +export interface Logger { + /** Debug output (disabled by default, enable via DEBUG env var) */ + debug(message: string, ...args: unknown[]): void; + debug(req: Request, message: string, ...args: unknown[]): void; + + /** Info output (always visible, for operational messages) */ + info(message: string, ...args: unknown[]): void; + info(req: Request, message: string, ...args: unknown[]): void; + + /** Warning output (always visible, for degraded states) */ + warn(message: string, ...args: unknown[]): void; + warn(req: Request, message: string, ...args: unknown[]): void; + + /** Error output (always visible, for failures) */ + error(message: string, ...args: unknown[]): void; + error(req: Request, message: string, ...args: unknown[]): void; + + /** Get or create request-scoped WideEvent */ + event(req: Request): WideEvent; +} + +// WeakMap to store WideEvent per request +const eventsByRequest = new WeakMap(); + +// Global emitter instance +const emitter = new WideEventEmitter(); + +const MAX_REQUEST_ID_LENGTH = 128; + +/** + * Sanitize a request ID from user headers + */ +function sanitizeRequestId(id: string): string { + // Remove any characters that aren't alphanumeric, dash, underscore, or dot + const sanitized = id.replace(/[^a-zA-Z0-9_.-]/g, ""); + // Limit length + return sanitized.slice(0, MAX_REQUEST_ID_LENGTH); +} + +/** + * Generate a request ID from the request + */ +function generateRequestId(req: Request): string { + // Use existing request ID if available + const existingId = + req.headers["x-request-id"] || + req.headers["x-correlation-id"] || + req.headers["x-amzn-trace-id"]; + + if (existingId && typeof existingId === "string" && existingId.length > 0) { + const sanitized = sanitizeRequestId(existingId); + if (sanitized.length > 0) { + return sanitized; + } + } + + // Generate a simple ID based on timestamp and random + return `req_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`; +} + +/** + * Get or create a WideEvent for the given request + */ +function getOrCreateEvent(req: Request): WideEvent { + let wideEvent = eventsByRequest.get(req); + + if (!wideEvent) { + const requestId = generateRequestId(req); + wideEvent = new WideEvent(requestId); + + // Set initial request metadata + const path = req.path || req.url || req.originalUrl; + wideEvent.set("method", req.method).set("path", path); + + // Extract user ID from request headers + const userId = req.headers["x-forwarded-user"] as string | undefined; + if (userId) { + wideEvent.setUser({ id: userId }); + } + + // Extract trace ID from active span for distributed tracing + const currentSpan = trace.getActiveSpan(); + const spanContext = currentSpan?.spanContext(); + if (spanContext?.traceId) { + wideEvent.set("trace_id", spanContext.traceId); + + const debugLogger = createObug("appkit:logger:event", { + useColors: true, + }); + debugLogger( + "WideEvent created: %s %s (reqId: %s, traceId: %s)", + req.method, + path, + requestId.substring(0, 8), + spanContext.traceId.substring(0, 8), + ); + } + + // Update service scope + if (wideEvent.data.service) { + wideEvent.data.service = { + ...wideEvent.data.service, + name: "appkit", + }; + } + + eventsByRequest.set(req, wideEvent); + + // Auto-finalize on response finish + const res = req.res as Response | undefined; + if (res) { + res.once("finish", () => { + const event = eventsByRequest.get(req); + if (event) { + // Finalize the event with status code + const finalizedData = event.finalize(res.statusCode || 200); + + // Emit to OpenTelemetry if sampled + const sampled = shouldSample(finalizedData, DEFAULT_SAMPLING_CONFIG); + + if (sampled) { + emitter.emit(finalizedData); + } + + // Clean up to prevent memory leaks + eventsByRequest.delete(req); + } + }); + + // Also handle aborted requests + res.once("close", () => { + if (!res.writableFinished) { + // Request was aborted/cancelled + const event = eventsByRequest.get(req); + + if (event) { + // Try to end the active span with error status + const currentSpan = trace.getActiveSpan(); + if (currentSpan) { + currentSpan.setStatus({ + code: 1, // ERROR + message: "Request aborted by client", + }); + currentSpan.end(); + } + } + + eventsByRequest.delete(req); + } + }); + } + } + + return wideEvent; +} + +/** + * Check if the first argument is an Express Request + */ +function isRequest(arg: unknown): arg is Request { + return ( + typeof arg === "object" && + arg !== null && + "method" in arg && + "path" in arg && + typeof (arg as Request).method === "string" + ); +} + +/** + * Create a logger instance for a specific scope + * @param scope - The scope identifier (e.g., "connectors:lakebase") + * @returns Logger instance with debug, info, warn, and error methods + * + * @example + * ```typescript + * const logger = createLogger("connectors:lakebase"); + * + * // Regular logging (no request tracking) + * logger.debug("Connection established with pool size: %d", poolSize); + * logger.info("Server started on port %d", port); + * + * // Request-scoped logging (tracks in WideEvent) + * logger.debug(req, "Processing query: %s", queryId); + * logger.error(req, "Query failed: %O", error); + * + * // Get WideEvent for manual updates + * const event = logger.event(req); + * event.setComponent("analytics", "executeQuery"); + * ``` + */ +export function createLogger(scope: string): Logger { + const debug = createObug(`appkit:${scope}`, { useColors: true }); + const prefix = `[appkit:${scope}]`; + + function debugLog(reqOrMessage: Request | string, ...args: unknown[]): void { + if (isRequest(reqOrMessage)) { + const req = reqOrMessage; + const message = args[0] as string; + const logArgs = args.slice(1); + const formatted = format(message, ...logArgs); + + debug(message, ...logArgs); + getOrCreateEvent(req).addLog("debug", formatted); + } else { + debug(reqOrMessage, ...args); + } + } + + function infoLog(reqOrMessage: Request | string, ...args: unknown[]): void { + if (isRequest(reqOrMessage)) { + const req = reqOrMessage; + const message = args[0] as string; + const logArgs = args.slice(1); + const formatted = format(message, ...logArgs); + + console.log(prefix, formatted); + getOrCreateEvent(req).addLog("info", formatted); + } else { + console.log(prefix, format(reqOrMessage, ...args)); + } + } + + function warnLog(reqOrMessage: Request | string, ...args: unknown[]): void { + if (isRequest(reqOrMessage)) { + const req = reqOrMessage; + const message = args[0] as string; + const logArgs = args.slice(1); + const formatted = format(message, ...logArgs); + + console.warn(prefix, formatted); + getOrCreateEvent(req).addLog("warn", formatted); + } else { + console.warn(prefix, format(reqOrMessage, ...args)); + } + } + + function errorLog(reqOrMessage: Request | string, ...args: unknown[]): void { + if (isRequest(reqOrMessage)) { + const req = reqOrMessage; + const message = args[0] as string; + const logArgs = args.slice(1); + const formatted = format(message, ...logArgs); + + console.error(prefix, formatted); + getOrCreateEvent(req).addLog("error", formatted); + } else { + console.error(prefix, format(reqOrMessage, ...args)); + } + } + + function event(req: Request): WideEvent { + return getOrCreateEvent(req); + } + + return { + debug: debugLog as Logger["debug"], + info: infoLog as Logger["info"], + warn: warnLog as Logger["warn"], + error: errorLog as Logger["error"], + event, + }; +} diff --git a/packages/appkit/src/logging/sampling.ts b/packages/appkit/src/logging/sampling.ts new file mode 100644 index 00000000..81c26043 --- /dev/null +++ b/packages/appkit/src/logging/sampling.ts @@ -0,0 +1,120 @@ +import { shouldExcludePath } from "../utils/path-exclusions"; +import type { WideEventData } from "./wide-event"; + +/** + * Sampling configuration for WideEvents + */ +export interface SamplingConfig { + /** Always sample if any of these conditions are true */ + alwaysSampleIf: { + /** Sample if event has errors */ + hasErrors: boolean; + /** Sample if status code >= this value (e.g., 400) */ + statusCodeGte: number; + /** Sample if duration >= this value in ms (e.g., 5000) */ + durationGte: number; + /** Sample if cache was used (hit or miss tracked) */ + hasCacheInfo: boolean; + }; + + /** Sample rate for normal requests (0-1, e.g., 0.1 = 10%) */ + sampleRate: number; +} + +/** + * Get sample rate from environment variable or default to 1.0 (100%) + */ +function getSampleRate(): number { + const envRate = process.env.APPKIT_SAMPLE_RATE; + if (envRate) { + const parsed = parseFloat(envRate); + if (!Number.isNaN(parsed) && parsed >= 0 && parsed <= 1) { + return parsed; + } + } + return 1; +} + +/** + * Default sampling configuration + */ +export const DEFAULT_SAMPLING_CONFIG: SamplingConfig = { + alwaysSampleIf: { + hasErrors: true, + statusCodeGte: 400, + durationGte: 5000, // 5 seconds + hasCacheInfo: true, // Always sample requests with cache info (hit or miss) + }, + sampleRate: getSampleRate(), +}; + +/** + * Simple hash function for deterministic sampling + */ +function hashString(str: string): number { + let hash = 0; + for (let i = 0; i < str.length; i++) { + const char = str.charCodeAt(i); + hash = (hash << 5) - hash + char; + hash = hash & hash; // Convert to 32-bit integer + } + return Math.abs(hash); +} + +/** + * Determine if a WideEvent should be sampled based on configuration. + * Uses shared path exclusions from utils/path-exclusions.ts. + */ +export function shouldSample( + event: WideEventData, + config: SamplingConfig = DEFAULT_SAMPLING_CONFIG, +): boolean { + // Check exclusions first using shared path exclusions + if (shouldExcludePath(event.path)) { + return false; + } + + // Always sample if has errors + if (config.alwaysSampleIf.hasErrors && event.error) { + return true; + } + + // Always sample if status code >= threshold + if ( + config.alwaysSampleIf.statusCodeGte && + event.status_code && + event.status_code >= config.alwaysSampleIf.statusCodeGte + ) { + return true; + } + + // Always sample if duration >= threshold + if ( + config.alwaysSampleIf.durationGte && + event.duration_ms && + event.duration_ms >= config.alwaysSampleIf.durationGte + ) { + return true; + } + + // Always sample if cache info is present (cache hit or miss) + if ( + config.alwaysSampleIf.hasCacheInfo && + event.execution?.cache_hit !== undefined + ) { + return true; + } + + // Sample based on sample rate + if (config.sampleRate >= 1) { + return true; + } + + if (config.sampleRate <= 0) { + return false; + } + + // Deterministic sampling based on request ID + const hash = hashString(event.request_id); + return hash % 100 < config.sampleRate * 100; +} diff --git a/packages/appkit/src/logging/tests/logger-with-event.test.ts b/packages/appkit/src/logging/tests/logger-with-event.test.ts new file mode 100644 index 00000000..a60b6f59 --- /dev/null +++ b/packages/appkit/src/logging/tests/logger-with-event.test.ts @@ -0,0 +1,224 @@ +import type { Request, Response } from "express"; +import { beforeEach, describe, expect, test, vi } from "vitest"; +import { createLogger } from "../logger"; +import { WideEvent } from "../wide-event"; + +describe("Logger with WideEvent Integration", () => { + let mockReq: Partial; + let mockRes: Partial; + let finishCallback: (() => void) | undefined; + + beforeEach(() => { + finishCallback = undefined; + mockRes = { + once: vi.fn((event: string, callback: () => void) => { + if (event === "finish") { + finishCallback = callback; + } + return mockRes as Response; + }), + }; + + mockReq = { + method: "POST", + path: "/api/query", + url: "/api/query", + headers: { + "user-agent": "test-agent", + "x-forwarded-for": "127.0.0.1", + }, + res: mockRes as Response, + }; + }); + + describe("Regular Logging (No Request)", () => { + test("should log debug messages", () => { + const logger = createLogger("test"); + const debugSpy = vi.spyOn(console, "log"); + + logger.debug("Test message: %s", "value"); + + // Debug uses obug, not console.log + debugSpy.mockRestore(); + }); + + test("should log info messages", () => { + const logger = createLogger("test"); + const infoSpy = vi.spyOn(console, "log"); + + logger.info("Server started on port %d", 3000); + + expect(infoSpy).toHaveBeenCalledWith( + "[appkit:test]", + "Server started on port 3000", + ); + + infoSpy.mockRestore(); + }); + + test("should log error messages", () => { + const logger = createLogger("test"); + const errorSpy = vi.spyOn(console, "error"); + + logger.error("Failed to connect: %s", "timeout"); + + expect(errorSpy).toHaveBeenCalledWith( + "[appkit:test]", + "Failed to connect: timeout", + ); + + errorSpy.mockRestore(); + }); + }); + + describe("Request-Scoped Logging", () => { + test("should create WideEvent on first access", () => { + const logger = createLogger("analytics"); + + const event = logger.event(mockReq as Request); + + expect(event).toBeInstanceOf(WideEvent); + expect(event.data.request_id).toBeDefined(); + expect(event.data.method).toBe("POST"); + expect(event.data.path).toBe("/api/query"); + }); + + test("should reuse same WideEvent for same request", () => { + const logger = createLogger("analytics"); + + const event1 = logger.event(mockReq as Request); + const event2 = logger.event(mockReq as Request); + + expect(event1).toBe(event2); + }); + + test("should add logs to WideEvent when logging with request", () => { + const logger = createLogger("analytics"); + const infoSpy = vi.spyOn(console, "log"); + + logger.info(mockReq as Request, "Processing query: %s", "SELECT *"); + + const event = logger.event(mockReq as Request); + expect(event.data.logs).toHaveLength(1); + expect(event.data.logs![0].level).toBe("info"); + expect(event.data.logs![0].message).toBe("Processing query: SELECT *"); + + infoSpy.mockRestore(); + }); + + test("should add multiple logs to same WideEvent", () => { + const logger = createLogger("analytics"); + const infoSpy = vi.spyOn(console, "log"); + const errorSpy = vi.spyOn(console, "error"); + + logger.info(mockReq as Request, "Starting query"); + logger.info(mockReq as Request, "Query completed: %d rows", 100); + logger.error(mockReq as Request, "Warning: slow query"); + + const event = logger.event(mockReq as Request); + expect(event.data.logs).toHaveLength(3); + expect(event.data.logs![0].message).toBe("Starting query"); + expect(event.data.logs![1].message).toBe("Query completed: 100 rows"); + expect(event.data.logs![2].message).toBe("Warning: slow query"); + + infoSpy.mockRestore(); + errorSpy.mockRestore(); + }); + + test("should set service scope from logger", () => { + const logger = createLogger("connectors:lakebase"); + + const event = logger.event(mockReq as Request); + + expect(event.data.service?.name).toBe("appkit"); + // Note: scope is not currently stored in WideEventData + // It's used for logger context but not part of the event structure + }); + + test("should finalize WideEvent on response finish", () => { + const logger = createLogger("analytics"); + const event = logger.event(mockReq as Request); + + expect(event.data.duration_ms).toBeUndefined(); + expect(event.data.status_code).toBeUndefined(); + + // Trigger finish event + finishCallback?.(); + + expect(event.data.duration_ms).toBeDefined(); + expect(event.data.status_code).toBe(200); + }); + + test("should allow manual updates to WideEvent", () => { + const logger = createLogger("analytics"); + const event = logger.event(mockReq as Request); + + event.setComponent("analytics", "executeQuery"); + event.setExecution({ + statement: "SELECT * FROM users", + duration: 125, + rowCount: 100, + }); + + expect(event.data.component?.name).toBe("analytics"); + expect(event.data.component?.operation).toBe("executeQuery"); + expect(event.data.execution?.statement).toBe("SELECT * FROM users"); + expect(event.data.execution?.duration).toBe(125); + }); + + test("should track errors in WideEvent", () => { + const logger = createLogger("analytics"); + const errorSpy = vi.spyOn(console, "error"); + + const error = new Error("Query failed"); + logger.error(mockReq as Request, "Query failed: %O", error); + + const event = logger.event(mockReq as Request); + expect(event.data.logs).toHaveLength(1); + expect(event.data.logs![0].level).toBe("error"); + + errorSpy.mockRestore(); + }); + }); + + describe("Mixed Logging", () => { + test("should handle mix of request and non-request logging", () => { + const logger = createLogger("test"); + const infoSpy = vi.spyOn(console, "log"); + + // Non-request log + logger.info("Server started"); + + // Request log + logger.info(mockReq as Request, "Processing request"); + + // Non-request log again + logger.info("Request count: %d", 1); + + const event = logger.event(mockReq as Request); + expect(event.data.logs).toHaveLength(1); // Only request log + expect(event.data.logs![0].message).toBe("Processing request"); + + infoSpy.mockRestore(); + }); + }); + + describe("Request Detection", () => { + test("should correctly identify Request objects", () => { + const logger = createLogger("test"); + const infoSpy = vi.spyOn(console, "log"); + + // String message + logger.info("Test message"); + + // Request + message + logger.info(mockReq as Request, "Request message"); + + // Non-request object shouldn't be treated as request + const notRequest = { method: "POST" }; // Missing path + logger.info(notRequest as any, "Should not crash"); + + infoSpy.mockRestore(); + }); + }); +}); diff --git a/packages/appkit/src/observability/tests/logger.test.ts b/packages/appkit/src/logging/tests/logger.test.ts similarity index 100% rename from packages/appkit/src/observability/tests/logger.test.ts rename to packages/appkit/src/logging/tests/logger.test.ts diff --git a/packages/appkit/src/logging/tests/sampling.test.ts b/packages/appkit/src/logging/tests/sampling.test.ts new file mode 100644 index 00000000..de87ae14 --- /dev/null +++ b/packages/appkit/src/logging/tests/sampling.test.ts @@ -0,0 +1,209 @@ +import { describe, expect, test } from "vitest"; +import { DEFAULT_SAMPLING_CONFIG, shouldSample } from "../sampling"; +import type { WideEventData } from "../wide-event"; + +describe("Sampling", () => { + const baseEvent: WideEventData = { + timestamp: new Date().toISOString(), + request_id: "test-request-id", + method: "GET", + path: "/api/test", + status_code: 200, + duration_ms: 100, + }; + + describe("shouldSample", () => { + test("should exclude health check paths", () => { + const event: WideEventData = { + ...baseEvent, + path: "/health", + }; + + expect(shouldSample(event, DEFAULT_SAMPLING_CONFIG)).toBe(false); + }); + + test("should exclude metrics paths", () => { + const event: WideEventData = { + ...baseEvent, + path: "/metrics", + }; + + expect(shouldSample(event, DEFAULT_SAMPLING_CONFIG)).toBe(false); + }); + + test("should exclude static assets", () => { + const event: WideEventData = { + ...baseEvent, + path: "/favicon.ico", + }; + + expect(shouldSample(event, DEFAULT_SAMPLING_CONFIG)).toBe(false); + }); + + test("should exclude _next paths", () => { + const event: WideEventData = { + ...baseEvent, + path: "/_next/static/chunks/main.js", + }; + + expect(shouldSample(event, DEFAULT_SAMPLING_CONFIG)).toBe(false); + }); + + test("should always sample events with errors", () => { + const event: WideEventData = { + ...baseEvent, + error: { + type: "Error", + code: "QUERY_FAILED", + message: "Query failed", + retriable: false, + }, + }; + + expect(shouldSample(event, DEFAULT_SAMPLING_CONFIG)).toBe(true); + }); + + test("should always sample 4xx status codes", () => { + const event: WideEventData = { + ...baseEvent, + status_code: 404, + }; + + expect(shouldSample(event, DEFAULT_SAMPLING_CONFIG)).toBe(true); + }); + + test("should always sample 5xx status codes", () => { + const event: WideEventData = { + ...baseEvent, + status_code: 500, + }; + + expect(shouldSample(event, DEFAULT_SAMPLING_CONFIG)).toBe(true); + }); + + test("should always sample slow requests", () => { + const event: WideEventData = { + ...baseEvent, + duration_ms: 6000, // 6 seconds + }; + + expect(shouldSample(event, DEFAULT_SAMPLING_CONFIG)).toBe(true); + }); + + test("should sample requests deterministically based on request ID", () => { + const config = { + ...DEFAULT_SAMPLING_CONFIG, + alwaysSampleIf: { + hasErrors: false, + statusCodeGte: 999, + durationGte: 999999, + hasCacheInfo: false, + }, + sampleRate: 0.5, // 50% sample rate to get both true and false + }; + + // Test multiple request IDs to ensure deterministic behavior + const results = new Set(); + + for (let i = 0; i < 100; i++) { + const event: WideEventData = { + ...baseEvent, + request_id: `test-request-${i}`, + }; + + // Same request ID should always produce same result + const result1 = shouldSample(event, config); + const result2 = shouldSample(event, config); + expect(result1).toBe(result2); + + results.add(result1); + } + + // Should have both true and false results (not all sampled or all excluded) + expect(results.size).toBe(2); + }); + + test("should respect sample rate of 0 (never sample)", () => { + const config = { + ...DEFAULT_SAMPLING_CONFIG, + alwaysSampleIf: { + hasErrors: false, + statusCodeGte: 999, + durationGte: 999999, + hasCacheInfo: false, + }, + sampleRate: 0, + }; + + const event: WideEventData = { + ...baseEvent, + request_id: "test-no-sample", + }; + + expect(shouldSample(event, config)).toBe(false); + }); + + test("should respect sample rate of 1 (always sample)", () => { + const config = { + ...DEFAULT_SAMPLING_CONFIG, + sampleRate: 1, + }; + + const event: WideEventData = { + ...baseEvent, + request_id: "test-always-sample", + }; + + expect(shouldSample(event, config)).toBe(true); + }); + + test("should sample approximately 10% with 0.1 sample rate", () => { + const config = { + ...DEFAULT_SAMPLING_CONFIG, + alwaysSampleIf: { + hasErrors: false, + statusCodeGte: 999, + durationGte: 999999, + hasCacheInfo: false, + }, + sampleRate: 0.1, + }; + + let sampledCount = 0; + const totalRequests = 1000; + + for (let i = 0; i < totalRequests; i++) { + const event: WideEventData = { + ...baseEvent, + request_id: `request-${i}`, + }; + + if (shouldSample(event, config)) { + sampledCount++; + } + } + + // Allow 5-15% range (expected 10%) + const sampleRate = sampledCount / totalRequests; + expect(sampleRate).toBeGreaterThan(0.05); + expect(sampleRate).toBeLessThan(0.15); + }); + + test("should prioritize always-sample conditions over exclusions", () => { + // Error on health check path - should still be excluded + const event: WideEventData = { + ...baseEvent, + path: "/health", + error: { + type: "Error", + code: "TEST", + message: "Test", + retriable: false, + }, + }; + + // Exclusions take precedence + expect(shouldSample(event, DEFAULT_SAMPLING_CONFIG)).toBe(false); + }); + }); +}); diff --git a/packages/appkit/src/logging/tests/wide-event-emitter.test.ts b/packages/appkit/src/logging/tests/wide-event-emitter.test.ts new file mode 100644 index 00000000..a640b4fb --- /dev/null +++ b/packages/appkit/src/logging/tests/wide-event-emitter.test.ts @@ -0,0 +1,257 @@ +import { logs } from "@opentelemetry/api-logs"; +import { beforeEach, describe, expect, test, vi } from "vitest"; +import type { WideEventData } from "../wide-event"; +import { WideEventEmitter } from "../wide-event-emitter"; + +// Mock OpenTelemetry logs API +vi.mock("@opentelemetry/api-logs", () => ({ + logs: { + getLogger: vi.fn(() => ({ + emit: vi.fn(), + })), + }, + SeverityNumber: { + DEBUG: 5, + INFO: 9, + WARN: 13, + ERROR: 17, + }, +})); + +describe("WideEventEmitter", () => { + let emitter: WideEventEmitter; + let mockLogger: any; + + beforeEach(() => { + mockLogger = { + emit: vi.fn(), + }; + vi.mocked(logs.getLogger).mockReturnValue(mockLogger); + emitter = new WideEventEmitter(); + }); + + const baseEvent: WideEventData = { + timestamp: "2026-01-13T10:00:00.000Z", + request_id: "req_123", + method: "GET", + path: "/api/test", + status_code: 200, + duration_ms: 125, + service: { + name: "appkit", + version: "1.0.0", + node_env: "production", + }, + }; + + test("should emit basic event to OpenTelemetry", () => { + emitter.emit(baseEvent); + + expect(mockLogger.emit).toHaveBeenCalledTimes(1); + const logRecord = mockLogger.emit.mock.calls[0][0]; + + expect(logRecord.timestamp).toBeDefined(); + expect(logRecord.severityNumber).toBeDefined(); + expect(logRecord.body).toContain("GET /api/test"); + expect(logRecord.body).toContain("200"); + expect(logRecord.attributes).toBeDefined(); + }); + + test("should set correct severity for successful requests", () => { + emitter.emit(baseEvent); + + const logRecord = mockLogger.emit.mock.calls[0][0]; + expect(logRecord.severityText).toBe("INFO"); + }); + + test("should set ERROR severity for events with errors", () => { + const event: WideEventData = { + ...baseEvent, + error: { + type: "Error", + code: "QUERY_FAILED", + message: "Query failed", + retriable: false, + }, + }; + + emitter.emit(event); + + const logRecord = mockLogger.emit.mock.calls[0][0]; + expect(logRecord.severityText).toBe("ERROR"); + }); + + test("should set ERROR severity for 5xx status codes", () => { + const event: WideEventData = { + ...baseEvent, + status_code: 500, + }; + + emitter.emit(event); + + const logRecord = mockLogger.emit.mock.calls[0][0]; + expect(logRecord.severityText).toBe("ERROR"); + }); + + test("should set WARN severity for 4xx status codes", () => { + const event: WideEventData = { + ...baseEvent, + status_code: 404, + }; + + emitter.emit(event); + + const logRecord = mockLogger.emit.mock.calls[0][0]; + expect(logRecord.severityText).toBe("WARN"); + }); + + test("should include HTTP attributes", () => { + emitter.emit(baseEvent); + + const logRecord = mockLogger.emit.mock.calls[0][0]; + expect(logRecord.attributes["http.method"]).toBe("GET"); + expect(logRecord.attributes["http.route"]).toBe("/api/test"); + expect(logRecord.attributes["http.status_code"]).toBe(200); + expect(logRecord.attributes["http.request.duration_ms"]).toBe(125); + }); + + test("should include trace ID if present", () => { + const event: WideEventData = { + ...baseEvent, + trace_id: "abc123def456", + }; + + emitter.emit(event); + + const logRecord = mockLogger.emit.mock.calls[0][0]; + expect(logRecord.attributes["trace_id"]).toBe("abc123def456"); + }); + + test("should include component information", () => { + const event: WideEventData = { + ...baseEvent, + component: { + name: "analytics", + operation: "executeQuery", + }, + }; + + emitter.emit(event); + + const logRecord = mockLogger.emit.mock.calls[0][0]; + expect(logRecord.body).toContain("[analytics.executeQuery]"); + expect(logRecord.attributes["component.name"]).toBe("analytics"); + expect(logRecord.attributes["component.operation"]).toBe("executeQuery"); + }); + + test("should include error details in attributes", () => { + const event: WideEventData = { + ...baseEvent, + error: { + type: "ValidationError", + code: "INVALID_PARAM", + message: "Invalid parameter", + retriable: false, + }, + }; + + emitter.emit(event); + + const logRecord = mockLogger.emit.mock.calls[0][0]; + expect(logRecord.body).toContain("ERROR: Invalid parameter"); + expect(logRecord.attributes["error.type"]).toBe("ValidationError"); + expect(logRecord.attributes["error.code"]).toBe("INVALID_PARAM"); + expect(logRecord.attributes["error.message"]).toBe("Invalid parameter"); + expect(logRecord.attributes["error.retriable"]).toBe(false); + }); + + test("should include execution metadata", () => { + const event: WideEventData = { + ...baseEvent, + execution: { + cache_hit: true, + cache_key: "test-key", + retry_attempts: 2, + }, + }; + + emitter.emit(event); + + const logRecord = mockLogger.emit.mock.calls[0][0]; + expect(logRecord.attributes["execution.cache_hit"]).toBe(true); + expect(logRecord.attributes["execution.cache_key"]).toBe("test-key"); + expect(logRecord.attributes["execution.retry_attempts"]).toBe(2); + }); + + test("should include custom context as attributes", () => { + const event: WideEventData = { + ...baseEvent, + context: { + analytics: { + query_key: "user_stats", + cache_hit: true, + }, + warehouse: { + warehouse_id: "abc123", + }, + }, + }; + + emitter.emit(event); + + const logRecord = mockLogger.emit.mock.calls[0][0]; + expect(logRecord.attributes["analytics.query_key"]).toBe("user_stats"); + expect(logRecord.attributes["analytics.cache_hit"]).toBe(true); + expect(logRecord.attributes["warehouse.warehouse_id"]).toBe("abc123"); + }); + + test("should include log count", () => { + const event: WideEventData = { + ...baseEvent, + logs: [ + { level: "info", message: "Log 1", timestamp: "2026-01-13T10:00:00Z" }, + { level: "info", message: "Log 2", timestamp: "2026-01-13T10:00:01Z" }, + ], + }; + + emitter.emit(event); + + const logRecord = mockLogger.emit.mock.calls[0][0]; + expect(logRecord.attributes["log_count"]).toBe(2); + }); + + test("should not include undefined attributes", () => { + const event: WideEventData = { + ...baseEvent, + // No error, user, or component + }; + + emitter.emit(event); + + const logRecord = mockLogger.emit.mock.calls[0][0]; + expect(logRecord.attributes["error.type"]).toBeUndefined(); + expect(logRecord.attributes["user.id"]).toBeUndefined(); + expect(logRecord.attributes["component.name"]).toBeUndefined(); + }); + + test("should format log body correctly", () => { + const event: WideEventData = { + ...baseEvent, + method: "POST", + path: "/api/query", + status_code: 201, + duration_ms: 250, + component: { + name: "analytics", + operation: "executeQuery", + }, + }; + + emitter.emit(event); + + const logRecord = mockLogger.emit.mock.calls[0][0]; + expect(logRecord.body).toBe( + "POST /api/query → 201 (250ms) [analytics.executeQuery]", + ); + }); +}); diff --git a/packages/appkit/src/observability/tests/wide-event.test.ts b/packages/appkit/src/logging/tests/wide-event.test.ts similarity index 97% rename from packages/appkit/src/observability/tests/wide-event.test.ts rename to packages/appkit/src/logging/tests/wide-event.test.ts index 433a9367..adbef2ce 100644 --- a/packages/appkit/src/observability/tests/wide-event.test.ts +++ b/packages/appkit/src/logging/tests/wide-event.test.ts @@ -64,11 +64,11 @@ describe("WideEvent", () => { test("merges object values", () => { const event = new WideEvent("req-123"); - event.set("service", { region: "us-west-2" }); + // Use setContext for partial updates since service requires name and version + event.setContext("test", { region: "us-west-2" }); const data = event.toJSON(); - expect(data.service?.name).toBe("appkit"); - expect(data.service?.region).toBe("us-west-2"); + expect(data.context?.test?.region).toBe("us-west-2"); }); test("returns this for chaining", () => { @@ -210,7 +210,8 @@ describe("WideEvent", () => { test("extracts error cause", () => { const event = new WideEvent("req-123"); const cause = new Error("Original error"); - const error = new Error("Wrapped error", { cause }); + const error = new Error("Wrapped error"); + (error as any).cause = cause; event.setError(error); diff --git a/packages/appkit/src/observability/types.ts b/packages/appkit/src/logging/types.ts similarity index 100% rename from packages/appkit/src/observability/types.ts rename to packages/appkit/src/logging/types.ts diff --git a/packages/appkit/src/logging/wide-event-emitter.ts b/packages/appkit/src/logging/wide-event-emitter.ts new file mode 100644 index 00000000..3c96c057 --- /dev/null +++ b/packages/appkit/src/logging/wide-event-emitter.ts @@ -0,0 +1,193 @@ +import { logs, SeverityNumber } from "@opentelemetry/api-logs"; +import type { WideEventData } from "./wide-event"; + +/** + * Emits WideEvents to OpenTelemetry as structured logs + */ +export class WideEventEmitter { + private logger = logs.getLogger("appkit", "1.0.0"); + + /** + * Emit a WideEvent to OpenTelemetry. + * Fails silently to avoid crashing the application due to observability issues. + */ + emit(event: WideEventData): void { + try { + const logRecord = { + timestamp: Date.parse(event.timestamp), + severityNumber: this.getSeverityNumber(event), + severityText: this.getSeverityText(event), + body: this.createLogBody(event), + attributes: this.createAttributes(event), + }; + + this.logger.emit(logRecord); + } catch { + // Silent fail - observability should never crash the application + } + } + + /** + * Get OpenTelemetry severity number based on event data + */ + private getSeverityNumber(event: WideEventData): SeverityNumber { + // Error level + if (event.error) { + return SeverityNumber.ERROR; + } + + // Status code based + if (event.status_code) { + if (event.status_code >= 500) { + return SeverityNumber.ERROR; + } + if (event.status_code >= 400) { + return SeverityNumber.WARN; + } + } + + // Check logs for errors/warnings + if (event.logs) { + const hasError = event.logs.some((log) => log.level === "error"); + if (hasError) { + return SeverityNumber.ERROR; + } + + const hasWarn = event.logs.some((log) => log.level === "warn"); + if (hasWarn) { + return SeverityNumber.WARN; + } + } + + return SeverityNumber.INFO; + } + + /** + * Get severity text based on severity number + */ + private getSeverityText(event: WideEventData): string { + const severityNumber = this.getSeverityNumber(event); + + if (severityNumber >= SeverityNumber.ERROR) { + return "ERROR"; + } + if (severityNumber >= SeverityNumber.WARN) { + return "WARN"; + } + if (severityNumber >= SeverityNumber.INFO) { + return "INFO"; + } + return "DEBUG"; + } + + /** + * Create log body from event data + */ + private createLogBody(event: WideEventData): string { + const parts: string[] = []; + + // HTTP request info + if (event.method && event.path) { + parts.push(`${event.method} ${event.path}`); + } + + // Status code + if (event.status_code) { + parts.push(`→ ${event.status_code}`); + } + + // Duration + if (event.duration_ms) { + parts.push(`(${event.duration_ms}ms)`); + } + + // Component info + if (event.component) { + const componentStr = event.component.operation + ? `${event.component.name}.${event.component.operation}` + : event.component.name; + parts.push(`[${componentStr}]`); + } + + // Error message + if (event.error) { + parts.push(`ERROR: ${event.error.message}`); + } + + return parts.join(" "); + } + + /** + * Create OpenTelemetry attributes from event data + */ + private createAttributes( + event: WideEventData, + ): Record { + const attributes: Record = { + // Request metadata + request_id: event.request_id, + trace_id: event.trace_id, + + // HTTP attributes (OpenTelemetry semantic conventions) + "http.method": event.method, + "http.route": event.path, + "http.status_code": event.status_code, + "http.request.duration_ms": event.duration_ms, + + // Service attributes + "service.name": event.service?.name, + "service.version": event.service?.version, + "service.region": event.service?.region, + "service.deployment_id": event.service?.deployment_id, + "service.node_env": event.service?.node_env, + + // Component attributes + "component.name": event.component?.name, + "component.operation": event.component?.operation, + + // User attributes + "user.id": event.user?.id, + + // Error attributes + "error.type": event.error?.type, + "error.code": event.error?.code, + "error.message": event.error?.message, + "error.retriable": event.error?.retriable, + + // Execution metadata + "execution.timeout_ms": event.execution?.timeout_ms, + "execution.retry_attempts": event.execution?.retry_attempts, + "execution.cache_hit": event.execution?.cache_hit, + "execution.cache_key": event.execution?.cache_key, + "execution.cache_deduplication": event.execution?.cache_deduplication, + + // Stream metadata + "stream.id": event.stream?.stream_id, + "stream.events_sent": event.stream?.events_sent, + + // Log count + log_count: event.logs?.length, + }; + + // Add custom context as attributes with scope prefix (no "appkit" prefix) + if (event.context) { + for (const [scope, scopeData] of Object.entries(event.context)) { + for (const [key, value] of Object.entries(scopeData)) { + // Only add primitive values + if ( + typeof value === "string" || + typeof value === "number" || + typeof value === "boolean" + ) { + attributes[`${scope}.${key}`] = value; + } + } + } + } + + // Remove undefined values + return Object.fromEntries( + Object.entries(attributes).filter(([_, value]) => value !== undefined), + ); + } +} diff --git a/packages/appkit/src/observability/wide-event.ts b/packages/appkit/src/logging/wide-event.ts similarity index 95% rename from packages/appkit/src/observability/wide-event.ts rename to packages/appkit/src/logging/wide-event.ts index 626e0161..7bf44587 100644 --- a/packages/appkit/src/observability/wide-event.ts +++ b/packages/appkit/src/logging/wide-event.ts @@ -1,8 +1,5 @@ -import { createLogger } from "./logger"; import type { LogLevel } from "./types"; -const logger = createLogger("wide-event"); - export interface QueryData { key?: string; warehouse_id?: string; @@ -21,6 +18,7 @@ export interface WideEventData { // request metadata timestamp: string; request_id: string; + trace_id?: string; method?: string; path?: string; status_code?: number; @@ -51,6 +49,7 @@ export interface WideEventData { execution?: { cache_hit?: boolean; cache_key?: string; + cache_deduplication?: boolean; retry_attempts?: number; timeout_ms?: number; [key: string]: unknown; @@ -98,7 +97,7 @@ export interface WideEventData { * - Fields are camelCase to match OpenTelemetry */ export class WideEvent { - private data: WideEventData; + public data: WideEventData; private startTime: number; constructor(requestId: string) { @@ -245,12 +244,7 @@ export class WideEvent { // Keep only last 50 logs to prevent unbounded growth if (this.data.logs.length > 50) { - const originalLength = this.data.logs.length; this.data.logs = this.data.logs.slice(-50); - logger.debug( - "Log limit exceeded, truncated from %d to 50 logs", - originalLength, - ); } return this; diff --git a/packages/appkit/src/observability/logger.ts b/packages/appkit/src/observability/logger.ts deleted file mode 100644 index 1ab68427..00000000 --- a/packages/appkit/src/observability/logger.ts +++ /dev/null @@ -1,50 +0,0 @@ -import { format } from "node:util"; -import { createDebug as createObug } from "obug"; - -/** - * Logger interface for AppKit components - */ -export interface Logger { - /** Debug output (disabled by default, enable via DEBUG env var) */ - debug: (message: string, ...args: unknown[]) => void; - /** Info output (always visible, for operational messages) */ - info: (message: string, ...args: unknown[]) => void; - /** Warning output (always visible, for degraded states) */ - warn: (message: string, ...args: unknown[]) => void; - /** Error output (always visible, for failures) */ - error: (message: string, ...args: unknown[]) => void; -} - -/** - * Create a logger instance for a specific scope - * @param scope - The scope identifier (e.g., "connectors:lakebase") - * @returns Logger instance with debug, info, warn, and error methods - * - * @example - * ```typescript - * const logger = createLogger("connectors:lakebase"); - * logger.debug("Connection established with pool size: %d", poolSize); - * logger.info("Server started on port %d", port); - * logger.warn("Connection pool running low: %d remaining", available); - * logger.error("Failed to connect: %O", error); - * ``` - */ -export function createLogger(scope: string): Logger { - const debug = createObug(`appkit:${scope}`, { useColors: true }); - const prefix = `[appkit:${scope}]`; - - return { - debug: (message: string, ...args: unknown[]) => { - debug(message, ...args); - }, - info: (message: string, ...args: unknown[]) => { - console.log(prefix, format(message, ...args)); - }, - warn: (message: string, ...args: unknown[]) => { - console.warn(prefix, format(message, ...args)); - }, - error: (message: string, ...args: unknown[]) => { - console.error(prefix, format(message, ...args)); - }, - }; -} diff --git a/packages/appkit/src/plugin/dev-reader.ts b/packages/appkit/src/plugin/dev-reader.ts index a620b44f..f4966d83 100644 --- a/packages/appkit/src/plugin/dev-reader.ts +++ b/packages/appkit/src/plugin/dev-reader.ts @@ -1,8 +1,8 @@ import { randomUUID } from "node:crypto"; import type { TunnelConnection } from "shared"; import { isRemoteTunnelAllowedByEnv } from "@/server/remote-tunnel/gate"; -import { TunnelError } from "../observability/errors"; -import { createLogger } from "../observability/logger"; +import { TunnelError } from "../errors"; +import { createLogger } from "../logging/logger"; const logger = createLogger("plugin:dev-reader"); diff --git a/packages/appkit/src/plugin/interceptors/cache.ts b/packages/appkit/src/plugin/interceptors/cache.ts index 7f9f31e6..d4d965af 100644 --- a/packages/appkit/src/plugin/interceptors/cache.ts +++ b/packages/appkit/src/plugin/interceptors/cache.ts @@ -1,6 +1,9 @@ -import type { CacheManager } from "../../cache"; import type { CacheConfig } from "shared"; -import type { InterceptorContext, ExecutionInterceptor } from "./types"; +import type { CacheManager } from "../../cache"; +import { createLogger } from "../../logging/logger"; +import type { ExecutionInterceptor, InterceptorContext } from "./types"; + +const logger = createLogger("interceptors:cache"); // interceptor to handle caching logic export class CacheInterceptor implements ExecutionInterceptor { @@ -18,11 +21,15 @@ export class CacheInterceptor implements ExecutionInterceptor { return fn(); } + // get wide event from request if available + const wideEvent = context.request ? logger.event(context.request) : null; + return this.cacheManager.getOrExecute( this.config.cacheKey, fn, context.userKey, { ttl: this.config.ttl }, + wideEvent, ); } } diff --git a/packages/appkit/src/plugin/interceptors/retry.ts b/packages/appkit/src/plugin/interceptors/retry.ts index 274a0d36..d66b9e0b 100644 --- a/packages/appkit/src/plugin/interceptors/retry.ts +++ b/packages/appkit/src/plugin/interceptors/retry.ts @@ -1,5 +1,8 @@ import type { RetryConfig } from "shared"; -import type { InterceptorContext, ExecutionInterceptor } from "./types"; +import { createLogger } from "../../logging/logger"; +import type { ExecutionInterceptor, InterceptorContext } from "./types"; + +const logger = createLogger("interceptors:retry"); // interceptor to handle retry logic export class RetryInterceptor implements ExecutionInterceptor { @@ -21,12 +24,27 @@ export class RetryInterceptor implements ExecutionInterceptor { for (let attempt = 1; attempt <= this.attempts; attempt++) { try { - return await fn(); + const result = await fn(); + + if (attempt > 1 && context.request) { + const event = logger.event(context.request); + event.setExecution({ + retry_attempts: attempt - 1, + }); + } + + return result; } catch (error) { lastError = error; // last attempt, rethrow the error if (attempt === this.attempts) { + if (context.request) { + const event = logger.event(context.request); + event.setExecution({ + retry_attempts: attempt - 1, + }); + } throw error; } diff --git a/packages/appkit/src/plugin/interceptors/telemetry.ts b/packages/appkit/src/plugin/interceptors/telemetry.ts index 3c0b6594..d08d3149 100644 --- a/packages/appkit/src/plugin/interceptors/telemetry.ts +++ b/packages/appkit/src/plugin/interceptors/telemetry.ts @@ -1,12 +1,8 @@ +import type { TelemetryConfig } from "shared"; import type { ITelemetry, Span } from "../../telemetry"; import { SpanStatusCode } from "../../telemetry"; -import type { TelemetryConfig } from "shared"; -import type { InterceptorContext, ExecutionInterceptor } from "./types"; +import type { ExecutionInterceptor, InterceptorContext } from "./types"; -/** - * Interceptor to automatically instrument plugin executions with telemetry spans. - * Wraps the execution in a span and handles success/error status. - */ export class TelemetryInterceptor implements ExecutionInterceptor { constructor( private telemetry: ITelemetry, @@ -15,23 +11,58 @@ export class TelemetryInterceptor implements ExecutionInterceptor { async intercept( fn: () => Promise, - _context: InterceptorContext, + context: InterceptorContext, ): Promise { const spanName = this.config?.spanName || "plugin.execute"; + + // abort operation if signal is aborted + if (context.signal?.aborted) { + throw new Error("Operation aborted before execution"); + } + return this.telemetry.startActiveSpan( spanName, { attributes: this.config?.attributes }, async (span: Span) => { + let abortHandler: (() => void) | undefined; + let isAborted = false; + + if (context.signal) { + abortHandler = () => { + // abort span if not recording + if (!span.isRecording()) return; + isAborted = true; + span.setAttribute("cancelled", true); + span.setStatus({ + code: SpanStatusCode.ERROR, + message: "Operation cancelled by client", + }); + span.end(); + }; + context.signal.addEventListener("abort", abortHandler, { + once: true, + }); + } + try { const result = await fn(); - span.setStatus({ code: SpanStatusCode.OK }); + if (!isAborted) { + span.setStatus({ code: SpanStatusCode.OK }); + } return result; } catch (error) { - span.recordException(error as Error); - span.setStatus({ code: SpanStatusCode.ERROR }); + if (!isAborted) { + span.recordException(error as Error); + span.setStatus({ code: SpanStatusCode.ERROR }); + } throw error; } finally { - span.end(); + if (abortHandler && context.signal) { + context.signal.removeEventListener("abort", abortHandler); + } + if (!isAborted) { + span.end(); + } } }, ); diff --git a/packages/appkit/src/plugin/interceptors/timeout.ts b/packages/appkit/src/plugin/interceptors/timeout.ts index 1f5a26a7..5f39696a 100644 --- a/packages/appkit/src/plugin/interceptors/timeout.ts +++ b/packages/appkit/src/plugin/interceptors/timeout.ts @@ -1,4 +1,7 @@ -import type { InterceptorContext, ExecutionInterceptor } from "./types"; +import { createLogger } from "../../logging/logger"; +import type { ExecutionInterceptor, InterceptorContext } from "./types"; + +const logger = createLogger("interceptors:timeout"); // interceptor to handle timeout logic export class TimeoutInterceptor implements ExecutionInterceptor { @@ -8,6 +11,13 @@ export class TimeoutInterceptor implements ExecutionInterceptor { fn: () => Promise, context: InterceptorContext, ): Promise { + if (context.request) { + const event = logger.event(context.request); + event.setExecution({ + timeout_ms: this.timeoutMs, + }); + } + // create timeout signal const timeoutController = new AbortController(); const timeoutId = setTimeout(() => { diff --git a/packages/appkit/src/plugin/interceptors/types.ts b/packages/appkit/src/plugin/interceptors/types.ts index 633e38de..5b3aea93 100644 --- a/packages/appkit/src/plugin/interceptors/types.ts +++ b/packages/appkit/src/plugin/interceptors/types.ts @@ -1,3 +1,5 @@ +import type { Request } from "express"; + /** * Context passed through the interceptor chain. * Contains signal for cancellation, metadata, and user identification. @@ -6,6 +8,7 @@ export interface InterceptorContext { signal?: AbortSignal; metadata?: Map; userKey: string; + request?: Request; } export interface ExecutionInterceptor { diff --git a/packages/appkit/src/plugin/plugin.ts b/packages/appkit/src/plugin/plugin.ts index f915f2e8..fb63de43 100644 --- a/packages/appkit/src/plugin/plugin.ts +++ b/packages/appkit/src/plugin/plugin.ts @@ -19,8 +19,8 @@ import { ServiceContext, type UserContext, } from "../context"; -import { AuthenticationError } from "../observability/errors"; -import { createLogger } from "../observability/logger"; +import { AuthenticationError } from "../errors"; +import { createLogger } from "../logging/logger"; import { StreamManager } from "../stream"; import { type ITelemetry, @@ -200,6 +200,7 @@ export abstract class Plugin< fn: StreamExecuteHandler, options: StreamExecutionSettings, userKey?: string, + req?: express.Request, ) { // destructure options const { @@ -214,7 +215,7 @@ export abstract class Plugin< user: userConfig, }); - // Get user key from context if not provided + // get user key from context if not provided const effectiveUserKey = userKey ?? getCurrentUserId(); const self = this; @@ -226,6 +227,7 @@ export abstract class Plugin< signal: streamSignal, metadata: new Map(), userKey: effectiveUserKey, + request: req, }; // build interceptors @@ -261,17 +263,19 @@ export abstract class Plugin< fn: (signal?: AbortSignal) => Promise, options: PluginExecutionSettings, userKey?: string, + req?: express.Request, ): Promise { const executeConfig = this._buildExecutionConfig(options); const interceptors = this._buildInterceptors(executeConfig); - // Get user key from context if not provided + // get user key from context if not provided const effectiveUserKey = userKey ?? getCurrentUserId(); const context: InterceptorContext = { metadata: new Map(), userKey: effectiveUserKey, + request: req, }; try { @@ -318,7 +322,6 @@ export abstract class Plugin< // order matters: telemetry → timeout → retry → cache (innermost to outermost) - // Only add telemetry interceptor if traces are enabled const telemetryConfig = normalizeTelemetryOptions(this.config.telemetry); if ( telemetryConfig.traces && diff --git a/packages/appkit/src/server/index.ts b/packages/appkit/src/server/index.ts index bfd0504e..229c4fc8 100644 --- a/packages/appkit/src/server/index.ts +++ b/packages/appkit/src/server/index.ts @@ -4,8 +4,8 @@ import path from "node:path"; import dotenv from "dotenv"; import express from "express"; import type { PluginPhase } from "shared"; -import { ServerError } from "../observability/errors"; -import { createLogger } from "../observability/logger"; +import { ServerError } from "../errors"; +import { createLogger } from "../logging/logger"; import { Plugin, toPlugin } from "../plugin"; import { instrumentations } from "../telemetry"; import { RemoteTunnelController } from "./remote-tunnel/remote-tunnel-controller"; diff --git a/packages/appkit/src/server/remote-tunnel/remote-tunnel-controller.ts b/packages/appkit/src/server/remote-tunnel/remote-tunnel-controller.ts index b589de38..712c4bad 100644 --- a/packages/appkit/src/server/remote-tunnel/remote-tunnel-controller.ts +++ b/packages/appkit/src/server/remote-tunnel/remote-tunnel-controller.ts @@ -1,6 +1,6 @@ import type { Server as HTTPServer } from "node:http"; import type express from "express"; -import { createLogger } from "../../observability/logger"; +import { createLogger } from "../../logging/logger"; import type { DevFileReader } from "../../plugin/dev-reader"; import { hasDevQuery, diff --git a/packages/appkit/src/server/remote-tunnel/remote-tunnel-manager.ts b/packages/appkit/src/server/remote-tunnel/remote-tunnel-manager.ts index fcf159bc..5416b4db 100644 --- a/packages/appkit/src/server/remote-tunnel/remote-tunnel-manager.ts +++ b/packages/appkit/src/server/remote-tunnel/remote-tunnel-manager.ts @@ -6,7 +6,7 @@ import { fileURLToPath } from "node:url"; import type express from "express"; import type { TunnelConnection } from "shared"; import { WebSocketServer } from "ws"; -import { createLogger } from "../../observability/logger"; +import { createLogger } from "../../logging/logger"; import { generateTunnelIdFromEmail, getConfigScript, diff --git a/packages/appkit/src/server/tests/server.test.ts b/packages/appkit/src/server/tests/server.test.ts index 3bbf8c0a..ee79f1f3 100644 --- a/packages/appkit/src/server/tests/server.test.ts +++ b/packages/appkit/src/server/tests/server.test.ts @@ -110,12 +110,20 @@ const { mockLoggerDebug, mockLoggerInfo, mockLoggerWarn, mockLoggerError } = mockLoggerWarn: vi.fn(), mockLoggerError: vi.fn(), })); -vi.mock("../../observability/logger", () => ({ +vi.mock("../../logging/logger", () => ({ createLogger: vi.fn(() => ({ debug: mockLoggerDebug, info: mockLoggerInfo, warn: mockLoggerWarn, error: mockLoggerError, + event: vi.fn(() => ({ + set: vi.fn().mockReturnThis(), + setComponent: vi.fn().mockReturnThis(), + setContext: vi.fn().mockReturnThis(), + setUser: vi.fn().mockReturnThis(), + setExecution: vi.fn().mockReturnThis(), + setError: vi.fn().mockReturnThis(), + })), })), })); diff --git a/packages/appkit/src/server/vite-dev-server.ts b/packages/appkit/src/server/vite-dev-server.ts index 9b23a251..13bd361f 100644 --- a/packages/appkit/src/server/vite-dev-server.ts +++ b/packages/appkit/src/server/vite-dev-server.ts @@ -3,8 +3,8 @@ import path from "node:path"; import type express from "express"; import type { ViteDevServer as ViteDevServerType } from "vite"; import { mergeConfigDedup } from "@/utils"; -import { ServerError } from "../observability/errors"; -import { createLogger } from "../observability/logger"; +import { ServerError } from "../errors"; +import { createLogger } from "../logging/logger"; import { appKitTypesPlugin } from "../type-generator/vite-plugin"; import { BaseServer } from "./base-server"; import type { PluginEndpoints } from "./utils"; diff --git a/packages/appkit/src/stream/arrow-stream-processor.ts b/packages/appkit/src/stream/arrow-stream-processor.ts index 0cc0a4f8..f9051e20 100644 --- a/packages/appkit/src/stream/arrow-stream-processor.ts +++ b/packages/appkit/src/stream/arrow-stream-processor.ts @@ -1,6 +1,6 @@ import type { sql } from "@databricks/sdk-experimental"; -import { ExecutionError, ValidationError } from "../observability/errors"; -import { createLogger } from "../observability/logger"; +import { ExecutionError, ValidationError } from "../errors"; +import { createLogger } from "../logging/logger"; const logger = createLogger("stream:arrow"); diff --git a/packages/appkit/src/stream/buffers.ts b/packages/appkit/src/stream/buffers.ts index bddeb639..cdefe748 100644 --- a/packages/appkit/src/stream/buffers.ts +++ b/packages/appkit/src/stream/buffers.ts @@ -1,4 +1,4 @@ -import { ValidationError } from "../observability/errors"; +import { ValidationError } from "../errors"; import type { BufferedEvent } from "./types"; // generic ring buffer implementation diff --git a/packages/appkit/src/stream/stream-manager.ts b/packages/appkit/src/stream/stream-manager.ts index 354b89e1..41764772 100644 --- a/packages/appkit/src/stream/stream-manager.ts +++ b/packages/appkit/src/stream/stream-manager.ts @@ -1,4 +1,5 @@ import { randomUUID } from "node:crypto"; +import { context } from "@opentelemetry/api"; import type { IAppResponse, StreamConfig } from "shared"; import { EventRingBuffer } from "./buffers"; import { streamDefaults } from "./defaults"; @@ -26,13 +27,18 @@ export class StreamManager { } // main streaming method - handles new connection and reconnection - stream( + async stream( res: IAppResponse, handler: (signal: AbortSignal) => AsyncGenerator, options?: StreamConfig, ): Promise { const { streamId } = options || {}; + // check if response is already closed + if (res.writableEnded || res.destroyed) { + return; + } + // setup SSE headers this.sseWriter.setupHeaders(res); @@ -139,6 +145,12 @@ export class StreamManager { options?: StreamConfig, ): Promise { const streamId = options?.streamId ?? randomUUID(); + + // abort stream if response is closed + if (res.writableEnded || res.destroyed) { + return; + } + const abortController = new AbortController(); // create event buffer @@ -153,6 +165,15 @@ export class StreamManager { ); const heartbeat = this.sseWriter.startHeartbeat(res, combinedSignal); + // capture the current trace context at stream creation time + const traceContext = context.active(); + + // abort stream if response is closed + if (res.writableEnded || res.destroyed) { + clearInterval(heartbeat); + return; + } + // create stream entry const streamEntry: StreamEntry = { streamId, @@ -162,6 +183,7 @@ export class StreamManager { isCompleted: false, lastAccess: Date.now(), abortController, + traceContext, }; this.streamRegistry.add(streamEntry); @@ -173,7 +195,6 @@ export class StreamManager { }; this.activeOperations.add(streamOperation); - // handle client disconnect res.on("close", () => { clearInterval(heartbeat); this.activeOperations.delete(streamOperation); @@ -190,71 +211,74 @@ export class StreamManager { private async _processGeneratorInBackground( streamEntry: StreamEntry, ): Promise { - try { - // retrieve all events from generator - for await (const event of streamEntry.generator) { - if (streamEntry.abortController.signal.aborted) break; - const eventId = randomUUID(); - const eventData = JSON.stringify(event); - - // validate event size - if (eventData.length > this.maxEventSize) { - const errorMsg = `Event exceeds max size of ${this.maxEventSize} bytes`; - const errorCode = SSEErrorCode.INVALID_REQUEST; - // broadcast error to all connected clients - this._broadcastErrorToClients( - streamEntry, - eventId, - errorMsg, - errorCode, - ); - continue; + // run the entire generator processing within the captured trace context + return context.with(streamEntry.traceContext, async () => { + try { + // retrieve all events from generator + for await (const event of streamEntry.generator) { + if (streamEntry.abortController.signal.aborted) break; + const eventId = randomUUID(); + const eventData = JSON.stringify(event); + + // validate event size + if (eventData.length > this.maxEventSize) { + const errorMsg = `Event exceeds max size of ${this.maxEventSize} bytes`; + const errorCode = SSEErrorCode.INVALID_REQUEST; + // broadcast error to all connected clients + this._broadcastErrorToClients( + streamEntry, + eventId, + errorMsg, + errorCode, + ); + continue; + } + + // buffer event for reconnection + streamEntry.eventBuffer.add({ + id: eventId, + type: event.type, + data: eventData, + timestamp: Date.now(), + }); + + // broadcast to all connected clients + this._broadcastEventsToClients(streamEntry, eventId, event); + streamEntry.lastAccess = Date.now(); } - // buffer event for reconnection + streamEntry.isCompleted = true; + + // close all clients + this._closeAllClients(streamEntry); + + // cleanup if no clients are connected + this._cleanupStream(streamEntry); + } catch (error) { + const errorMsg = + error instanceof Error ? error.message : "Internal server error"; + const errorEventId = randomUUID(); + const errorCode = this._categorizeError(error); + + // buffer error event streamEntry.eventBuffer.add({ - id: eventId, - type: event.type, - data: eventData, + id: errorEventId, + type: "error", + data: JSON.stringify({ error: errorMsg, code: errorCode }), timestamp: Date.now(), }); - // broadcast to all connected clients - this._broadcastEventsToClients(streamEntry, eventId, event); - streamEntry.lastAccess = Date.now(); + // send error event to all connected clients + this._broadcastErrorToClients( + streamEntry, + errorEventId, + errorMsg, + errorCode, + true, + ); + streamEntry.isCompleted = true; } - - streamEntry.isCompleted = true; - - // close all clients - this._closeAllClients(streamEntry); - - // cleanup if no clients are connected - this._cleanupStream(streamEntry); - } catch (error) { - const errorMsg = - error instanceof Error ? error.message : "Internal server error"; - const errorEventId = randomUUID(); - const errorCode = this._categorizeError(error); - - // buffer error event - streamEntry.eventBuffer.add({ - id: errorEventId, - type: "error", - data: JSON.stringify({ error: errorMsg, code: errorCode }), - timestamp: Date.now(), - }); - - // send error event to all connected clients - this._broadcastErrorToClients( - streamEntry, - errorEventId, - errorMsg, - errorCode, - true, - ); - streamEntry.isCompleted = true; - } + }); } private _combineSignals( diff --git a/packages/appkit/src/stream/types.ts b/packages/appkit/src/stream/types.ts index f7d6ba46..f5526680 100644 --- a/packages/appkit/src/stream/types.ts +++ b/packages/appkit/src/stream/types.ts @@ -1,3 +1,4 @@ +import type { Context } from "@opentelemetry/api"; import type { IAppResponse } from "shared"; import type { EventRingBuffer } from "./buffers"; @@ -39,6 +40,7 @@ export interface StreamEntry { isCompleted: boolean; lastAccess: number; abortController: AbortController; + traceContext: Context; } export interface BufferEntry { diff --git a/packages/appkit/src/telemetry/instrumentations.ts b/packages/appkit/src/telemetry/instrumentations.ts index 6b356f00..581bdc0e 100644 --- a/packages/appkit/src/telemetry/instrumentations.ts +++ b/packages/appkit/src/telemetry/instrumentations.ts @@ -1,6 +1,7 @@ +import type { Instrumentation } from "@opentelemetry/instrumentation"; import { ExpressInstrumentation } from "@opentelemetry/instrumentation-express"; import { HttpInstrumentation } from "@opentelemetry/instrumentation-http"; -import type { Instrumentation } from "@opentelemetry/instrumentation"; +import { shouldIgnoreRequest } from "../utils/path-exclusions"; /** * Registry of pre-configured instrumentations for common use cases. @@ -11,6 +12,9 @@ import type { Instrumentation } from "@opentelemetry/instrumentation"; */ export const instrumentations: Record = { http: new HttpInstrumentation({ + // Filter out requests before creating spans - this is the most efficient approach + ignoreIncomingRequestHook: shouldIgnoreRequest, + applyCustomAttributesOnSpan(span: any, request: any) { let spanName: string | null = null; diff --git a/packages/appkit/src/telemetry/telemetry-manager.ts b/packages/appkit/src/telemetry/telemetry-manager.ts index e256610c..6660b6b2 100644 --- a/packages/appkit/src/telemetry/telemetry-manager.ts +++ b/packages/appkit/src/telemetry/telemetry-manager.ts @@ -17,14 +17,14 @@ import { import { BatchLogRecordProcessor } from "@opentelemetry/sdk-logs"; import { PeriodicExportingMetricReader } from "@opentelemetry/sdk-metrics"; import { NodeSDK } from "@opentelemetry/sdk-node"; -import { AlwaysOnSampler } from "@opentelemetry/sdk-trace-base"; import { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION, } from "@opentelemetry/semantic-conventions"; import type { TelemetryOptions } from "shared"; -import { createLogger } from "../observability/logger"; +import { createLogger } from "../logging/logger"; import { TelemetryProvider } from "./telemetry-provider"; +import { AppKitSampler } from "./trace-sampler"; import type { TelemetryConfig } from "./types"; const logger = createLogger("telemetry"); @@ -76,7 +76,7 @@ export class TelemetryManager { this.sdk = new NodeSDK({ resource: this.createResource(config), autoDetectResources: false, - sampler: new AlwaysOnSampler(), + sampler: new AppKitSampler(), traceExporter: new OTLPTraceExporter({ headers: config.headers }), metricReaders: [ new PeriodicExportingMetricReader({ diff --git a/packages/appkit/src/telemetry/trace-sampler.ts b/packages/appkit/src/telemetry/trace-sampler.ts new file mode 100644 index 00000000..7101d4c1 --- /dev/null +++ b/packages/appkit/src/telemetry/trace-sampler.ts @@ -0,0 +1,56 @@ +import type { Attributes, Context, Link } from "@opentelemetry/api"; +import type { Sampler, SamplingResult } from "@opentelemetry/sdk-trace-base"; +import { SamplingDecision } from "@opentelemetry/sdk-trace-base"; +import { shouldExcludePath } from "../utils/path-exclusions"; + +/** + * Custom sampler that filters out asset requests and other noise. + * + * This acts as a secondary filter after HttpInstrumentation.ignoreIncomingRequestHook. + * It catches any spans that slip through the primary filter. + */ +export class AppKitSampler implements Sampler { + shouldSample( + _context: Context, + _traceId: string, + spanName: string, + _spanKind: number, + attributes: Attributes, + _links: Link[], + ): SamplingResult { + // Check if this is an HTTP request span + const httpTarget = attributes["http.target"] as string | undefined; + const httpRoute = attributes["http.route"] as string | undefined; + const httpUrl = attributes["http.url"] as string | undefined; + + // Try to extract path from various attributes + let path = httpTarget || httpRoute; + if (!path && httpUrl) { + try { + path = new URL(httpUrl).pathname; + } catch { + // Not a valid URL, use as-is + path = httpUrl; + } + } + if (!path) { + path = spanName; + } + + // Check if path should be excluded + if (shouldExcludePath(path)) { + return { + decision: SamplingDecision.NOT_RECORD, + }; + } + + // For all other requests, record and sample + return { + decision: SamplingDecision.RECORD_AND_SAMPLED, + }; + } + + toString(): string { + return "AppKitSampler"; + } +} diff --git a/packages/appkit/src/type-generator/index.ts b/packages/appkit/src/type-generator/index.ts index 86d51bd5..4ecd0b56 100644 --- a/packages/appkit/src/type-generator/index.ts +++ b/packages/appkit/src/type-generator/index.ts @@ -1,6 +1,6 @@ import fs from "node:fs"; import dotenv from "dotenv"; -import { createLogger } from "../observability/logger"; +import { createLogger } from "../logging/logger"; import { generateQueriesFromDescribe } from "./query-registry"; import type { QuerySchema } from "./types"; diff --git a/packages/appkit/src/type-generator/query-registry.ts b/packages/appkit/src/type-generator/query-registry.ts index 1556373e..91f4de1f 100644 --- a/packages/appkit/src/type-generator/query-registry.ts +++ b/packages/appkit/src/type-generator/query-registry.ts @@ -1,7 +1,7 @@ import fs from "node:fs"; import path from "node:path"; import { WorkspaceClient } from "@databricks/sdk-experimental"; -import { createLogger } from "../observability/logger"; +import { createLogger } from "../logging/logger"; import { CACHE_VERSION, hashSQL, loadCache, saveCache } from "./cache"; import { Spinner } from "./spinner"; import { diff --git a/packages/appkit/src/type-generator/vite-plugin.ts b/packages/appkit/src/type-generator/vite-plugin.ts index 9a188e4b..998daf0a 100644 --- a/packages/appkit/src/type-generator/vite-plugin.ts +++ b/packages/appkit/src/type-generator/vite-plugin.ts @@ -1,7 +1,7 @@ import fs from "node:fs"; import path from "node:path"; import type { Plugin } from "vite"; -import { createLogger } from "../observability/logger"; +import { createLogger } from "../logging/logger"; import { generateFromEntryPoint } from "./index"; const logger = createLogger("type-generator:vite-plugin"); diff --git a/packages/appkit/src/utils/env-validator.ts b/packages/appkit/src/utils/env-validator.ts index 3105b355..adc35a22 100644 --- a/packages/appkit/src/utils/env-validator.ts +++ b/packages/appkit/src/utils/env-validator.ts @@ -1,4 +1,4 @@ -import { ValidationError } from "../observability/errors"; +import { ValidationError } from "../errors"; export function validateEnv(envVars: string[]) { const missingVars = []; diff --git a/packages/appkit/src/utils/index.ts b/packages/appkit/src/utils/index.ts index 4f954956..23770d21 100644 --- a/packages/appkit/src/utils/index.ts +++ b/packages/appkit/src/utils/index.ts @@ -1,3 +1,4 @@ export * from "./env-validator"; export * from "./merge"; +export * from "./path-exclusions"; export * from "./vite-config-merge"; diff --git a/packages/appkit/src/utils/path-exclusions.ts b/packages/appkit/src/utils/path-exclusions.ts new file mode 100644 index 00000000..e7f55849 --- /dev/null +++ b/packages/appkit/src/utils/path-exclusions.ts @@ -0,0 +1,89 @@ +import type { IncomingMessage } from "node:http"; + +/** + * Paths and patterns to exclude from tracing and logging. + * Requests matching these will not create spans or WideEvents. + */ +export const EXCLUDED_PATH_PREFIXES = [ + // Vite dev server internals + "/@fs/", + "/@vite/", + "/@id/", + "/@react-refresh", + "/src/", // Vite HMR source files + "/node_modules/", + + // Static assets and common paths + "/favicon.ico", + "/_next/", + "/static/", + + // Health checks + "/health", + "/metrics", +]; + +/** + * File extensions to exclude from tracing. + * These are typically static assets that don't need tracing. + */ +export const EXCLUDED_EXTENSIONS = [ + ".svg", + ".png", + ".jpg", + ".jpeg", + ".gif", + ".webp", + ".ico", + ".css", + ".woff", + ".woff2", + ".ttf", + ".eot", + ".map", // Source maps + ".js", // Static JS files (not API endpoints) +]; + +/** + * Check if a request should be ignored for tracing. + * This is the primary filter used by HttpInstrumentation. + */ +export function shouldIgnoreRequest(request: IncomingMessage): boolean { + const url = request.url; + if (!url) return false; + + // Remove query string for path matching + const path = url.split("?")[0]; + + return shouldExcludePath(path); +} + +/** + * Check if a path should be excluded from tracing/logging. + * Returns true if path should be excluded, false otherwise. + */ +export function shouldExcludePath(path: string | undefined): boolean { + if (typeof path !== "string") return false; + + // Remove query string + const cleanPath = path.split("?")[0]; + const lowerPath = cleanPath.toLowerCase(); + + // Check path prefixes + for (const prefix of EXCLUDED_PATH_PREFIXES) { + if (cleanPath.startsWith(prefix) || cleanPath.includes(prefix)) { + return true; + } + } + + // Check file extensions (but not for /api/ routes) + if (!cleanPath.startsWith("/api/")) { + for (const ext of EXCLUDED_EXTENSIONS) { + if (lowerPath.endsWith(ext)) { + return true; + } + } + } + + return false; +} diff --git a/turbo.json b/turbo.json index 2ec810c8..7e9592de 100644 --- a/turbo.json +++ b/turbo.json @@ -1,5 +1,6 @@ { "$schema": "https://turborepo.com/schema.json", + "globalPassThroughEnv": ["DEBUG"], "ui": "tui", "tasks": { "build:watch": { From b679766daf0af4f31282474aca7e5e3f33a7c138 Mon Sep 17 00:00:00 2001 From: Ditadi Date: Tue, 13 Jan 2026 18:24:17 +0000 Subject: [PATCH 4/4] feat(logging): use asyncLocalStorage for wideEvent --- packages/appkit/src/analytics/analytics.ts | 31 +-- packages/appkit/src/cache/index.ts | 9 +- .../src/connectors/sql-warehouse/client.ts | 24 ++- packages/appkit/src/logging/logger.ts | 197 ++++++++++-------- .../logging/tests/logger-with-event.test.ts | 57 ++--- .../appkit/src/plugin/interceptors/cache.ts | 7 - .../appkit/src/plugin/interceptors/retry.ts | 14 +- .../appkit/src/plugin/interceptors/timeout.ts | 9 +- .../appkit/src/plugin/interceptors/types.ts | 3 - packages/appkit/src/plugin/plugin.ts | 4 - 10 files changed, 169 insertions(+), 186 deletions(-) diff --git a/packages/appkit/src/analytics/analytics.ts b/packages/appkit/src/analytics/analytics.ts index 2c11f5da..97bdec3c 100644 --- a/packages/appkit/src/analytics/analytics.ts +++ b/packages/appkit/src/analytics/analytics.ts @@ -98,33 +98,28 @@ export class AnalyticsPlugin extends Plugin { const { jobId } = req.params; const workspaceClient = getWorkspaceClient(); - logger.debug(req, "Processing Arrow job request for jobId=%s", jobId); + logger.debug("Processing Arrow job request for jobId=%s", jobId); const event = logger.event(req); - event.setComponent("analytics", "getArrowData").setContext("analytics", { + event?.setComponent("analytics", "getArrowData").setContext("analytics", { job_id: jobId, plugin: this.name, }); const result = await this.getArrowData(workspaceClient, jobId); - event.setContext("sql-warehouse", { - arrow_data_size_bytes: result.data.length, - }); - res.setHeader("Content-Type", "application/octet-stream"); res.setHeader("Content-Length", result.data.length.toString()); res.setHeader("Cache-Control", "public, max-age=3600"); logger.debug( - req, "Sending Arrow buffer: %d bytes for job %s", result.data.length, jobId, ); res.send(Buffer.from(result.data)); } catch (error) { - logger.error(req, "Arrow job error: %O", error); + logger.error("Arrow job error: %O", error); res.status(404).json({ error: error instanceof Error ? error.message : "Arrow job not found", plugin: this.name, @@ -147,7 +142,7 @@ export class AnalyticsPlugin extends Plugin { logger.debug(req, "Executing query: %s (format=%s)", query_key, format); const event = logger.event(req); - event.setComponent("analytics", "executeQuery").setContext("analytics", { + event?.setComponent("analytics", "executeQuery").setContext("analytics", { query_key, format, parameter_count: parameters ? Object.keys(parameters).length : 0, @@ -220,14 +215,12 @@ export class AnalyticsPlugin extends Plugin { processedParams, queryParameters.formatParameters, signal, - req, ); return { type: queryParameters.type, ...result }; }, streamExecutionSettings, userKey, - req, ); } @@ -251,7 +244,6 @@ export class AnalyticsPlugin extends Plugin { parameters?: Record, formatParameters?: Record, signal?: AbortSignal, - req?: express.Request, ): Promise { const workspaceClient = getWorkspaceClient(); const warehouseId = await getWarehouseId(); @@ -259,7 +251,6 @@ export class AnalyticsPlugin extends Plugin { const { statement, parameters: sqlParameters } = this.queryProcessor.convertToSQLParameters(query, parameters); - const queryStartTime = Date.now(); const response = await this.SQLClient.executeStatement( workspaceClient, { @@ -270,20 +261,6 @@ export class AnalyticsPlugin extends Plugin { }, signal, ); - const queryDurationMs = Date.now() - queryStartTime; - - // Set warehouse execution data on WideEvent - if (req) { - const event = logger.event(req); - const result = response.result as any; - const rowCount = result?.data?.length ?? result?.data_array?.length ?? 0; - - event.setContext("sql-warehouse", { - warehouse_id: warehouseId, - rows_returned: rowCount, - query_duration_ms: queryDurationMs, - }); - } return response.result; } diff --git a/packages/appkit/src/cache/index.ts b/packages/appkit/src/cache/index.ts index 783480f7..140f36b8 100644 --- a/packages/appkit/src/cache/index.ts +++ b/packages/appkit/src/cache/index.ts @@ -4,7 +4,6 @@ import type { CacheConfig, CacheStorage } from "shared"; import { LakebaseConnector } from "@/connectors"; import { AppKitError, ExecutionError, InitializationError } from "../errors"; import { createLogger } from "../logging/logger"; -import type { WideEvent } from "../logging/wide-event"; import type { Counter, TelemetryProvider } from "../telemetry"; import { SpanStatusCode, TelemetryManager } from "../telemetry"; import { deepMerge } from "../utils"; @@ -177,7 +176,6 @@ export class CacheManager { * @param fn - Function to execute * @param userKey - User key * @param options - Options for the cache - * @param wideEvent - Optional WideEvent to track cache execution data * @returns Promise of the result */ async getOrExecute( @@ -185,7 +183,6 @@ export class CacheManager { fn: () => Promise, userKey: string, options?: { ttl?: number }, - wideEvent?: WideEvent | null, ): Promise { if (!this.config.enabled) return fn(); @@ -211,7 +208,7 @@ export class CacheManager { "cache.key": cacheKey, }); - wideEvent?.setExecution({ + logger.event()?.setExecution({ cache_hit: true, cache_key: cacheKey, }); @@ -233,7 +230,7 @@ export class CacheManager { "cache.deduplication": "true", }); - wideEvent?.setExecution({ + logger.event()?.setExecution({ cache_hit: true, cache_key: cacheKey, cache_deduplication: true, @@ -250,7 +247,7 @@ export class CacheManager { "cache.key": cacheKey, }); - wideEvent?.setExecution({ + logger.event()?.setExecution({ cache_hit: false, cache_key: cacheKey, }); diff --git a/packages/appkit/src/connectors/sql-warehouse/client.ts b/packages/appkit/src/connectors/sql-warehouse/client.ts index 71c69cb8..ca47fa54 100644 --- a/packages/appkit/src/connectors/sql-warehouse/client.ts +++ b/packages/appkit/src/connectors/sql-warehouse/client.ts @@ -208,15 +208,20 @@ export class SQLWarehouseConnector { } const resultData = result.result as any; - if (resultData?.data) { - span.setAttribute("db.result.row_count", resultData.data.length); - } else if (resultData?.data_array) { - span.setAttribute( - "db.result.row_count", - resultData.data_array.length, - ); + const rowCount = + resultData?.data?.length ?? resultData?.data_array?.length ?? 0; + + if (rowCount > 0) { + span.setAttribute("db.result.row_count", rowCount); } + const duration = Date.now() - startTime; + logger.event()?.setContext("sql-warehouse", { + warehouse_id: input.warehouse_id, + rows_returned: rowCount, + query_duration_ms: duration, + }); + success = true; // only set success status if not aborted if (!isAborted) { @@ -496,6 +501,11 @@ export class SQLWarehouseConnector { status: "success", }); + logger.event()?.setContext("sql-warehouse", { + arrow_data_size_bytes: result.data.length, + arrow_job_id: jobId, + }); + return result; } catch (error) { span.setStatus({ diff --git a/packages/appkit/src/logging/logger.ts b/packages/appkit/src/logging/logger.ts index efc3ebdd..69433d66 100644 --- a/packages/appkit/src/logging/logger.ts +++ b/packages/appkit/src/logging/logger.ts @@ -1,3 +1,4 @@ +import { AsyncLocalStorage } from "node:async_hooks"; import { format } from "node:util"; import { trace } from "@opentelemetry/api"; import type { Request, Response } from "express"; @@ -26,11 +27,14 @@ export interface Logger { error(message: string, ...args: unknown[]): void; error(req: Request, message: string, ...args: unknown[]): void; - /** Get or create request-scoped WideEvent */ - event(req: Request): WideEvent; + /** Get request-scoped WideEvent (from AsyncLocalStorage or explicit req) */ + event(req?: Request): WideEvent | undefined; } -// WeakMap to store WideEvent per request +// AsyncLocalStorage for WideEvent context propagation +const eventStorage = new AsyncLocalStorage(); + +// WeakMap to store WideEvent per request (for explicit req usage) const eventsByRequest = new WeakMap(); // Global emitter instance @@ -42,9 +46,7 @@ const MAX_REQUEST_ID_LENGTH = 128; * Sanitize a request ID from user headers */ function sanitizeRequestId(id: string): string { - // Remove any characters that aren't alphanumeric, dash, underscore, or dot const sanitized = id.replace(/[^a-zA-Z0-9_.-]/g, ""); - // Limit length return sanitized.slice(0, MAX_REQUEST_ID_LENGTH); } @@ -52,7 +54,6 @@ function sanitizeRequestId(id: string): string { * Generate a request ID from the request */ function generateRequestId(req: Request): string { - // Use existing request ID if available const existingId = req.headers["x-request-id"] || req.headers["x-correlation-id"] || @@ -65,106 +66,117 @@ function generateRequestId(req: Request): string { } } - // Generate a simple ID based on timestamp and random return `req_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`; } /** - * Get or create a WideEvent for the given request + * Create a WideEvent for a request */ -function getOrCreateEvent(req: Request): WideEvent { - let wideEvent = eventsByRequest.get(req); +function createEventForRequest(req: Request): WideEvent { + const requestId = generateRequestId(req); + const wideEvent = new WideEvent(requestId); + + // extract path from request (strip query string) + const rawPath = req.path || req.url || req.originalUrl; + const path = rawPath?.split("?")[0]; + wideEvent.set("method", req.method).set("path", path); + + // extract user id from request headers (sanitized) + const rawUserId = req.headers["x-forwarded-user"]; + if (rawUserId && typeof rawUserId === "string" && rawUserId.length > 0) { + const userId = rawUserId.replace(/[^a-zA-Z0-9_@.-]/g, "").slice(0, 128); + if (userId.length > 0) { + wideEvent.setUser({ id: userId }); + } + } - if (!wideEvent) { - const requestId = generateRequestId(req); - wideEvent = new WideEvent(requestId); + // extract trace id from active span for distributed tracing + const currentSpan = trace.getActiveSpan(); + const spanContext = currentSpan?.spanContext(); + if (spanContext?.traceId) { + wideEvent.set("trace_id", spanContext.traceId); + + const debugLogger = createObug("appkit:logger:event", { useColors: true }); + debugLogger( + "WideEvent created: %s %s (reqId: %s, traceId: %s)", + req.method, + path, + requestId.substring(0, 8), + spanContext.traceId.substring(0, 8), + ); + } + + // Update service scope + if (wideEvent.data.service) { + wideEvent.data.service = { + ...wideEvent.data.service, + name: "appkit", + }; + } - // Set initial request metadata - const path = req.path || req.url || req.originalUrl; - wideEvent.set("method", req.method).set("path", path); + return wideEvent; +} - // Extract user ID from request headers - const userId = req.headers["x-forwarded-user"] as string | undefined; - if (userId) { - wideEvent.setUser({ id: userId }); - } +/** + * Setup response lifecycle handlers for WideEvent finalization + */ +function setupResponseHandlers(req: Request, wideEvent: WideEvent): void { + const res = req.res as Response | undefined; + if (!res) return; - // Extract trace ID from active span for distributed tracing - const currentSpan = trace.getActiveSpan(); - const spanContext = currentSpan?.spanContext(); - if (spanContext?.traceId) { - wideEvent.set("trace_id", spanContext.traceId); - - const debugLogger = createObug("appkit:logger:event", { - useColors: true, - }); - debugLogger( - "WideEvent created: %s %s (reqId: %s, traceId: %s)", - req.method, - path, - requestId.substring(0, 8), - spanContext.traceId.substring(0, 8), - ); - } + res.once("finish", () => { + // Finalize the event with status code + const finalizedData = wideEvent.finalize(res.statusCode || 200); - // Update service scope - if (wideEvent.data.service) { - wideEvent.data.service = { - ...wideEvent.data.service, - name: "appkit", - }; + // Emit to OpenTelemetry if sampled + if (shouldSample(finalizedData, DEFAULT_SAMPLING_CONFIG)) { + emitter.emit(finalizedData); } - eventsByRequest.set(req, wideEvent); + // Clean up WeakMap + eventsByRequest.delete(req); + }); - // Auto-finalize on response finish - const res = req.res as Response | undefined; - if (res) { - res.once("finish", () => { - const event = eventsByRequest.get(req); - if (event) { - // Finalize the event with status code - const finalizedData = event.finalize(res.statusCode || 200); - - // Emit to OpenTelemetry if sampled - const sampled = shouldSample(finalizedData, DEFAULT_SAMPLING_CONFIG); - - if (sampled) { - emitter.emit(finalizedData); - } - - // Clean up to prevent memory leaks - eventsByRequest.delete(req); - } - }); - - // Also handle aborted requests - res.once("close", () => { - if (!res.writableFinished) { - // Request was aborted/cancelled - const event = eventsByRequest.get(req); - - if (event) { - // Try to end the active span with error status - const currentSpan = trace.getActiveSpan(); - if (currentSpan) { - currentSpan.setStatus({ - code: 1, // ERROR - message: "Request aborted by client", - }); - currentSpan.end(); - } - } - - eventsByRequest.delete(req); - } - }); + res.once("close", () => { + if (!res.writableFinished) { + // Request was aborted - just cleanup + eventsByRequest.delete(req); } + }); +} + +/** + * Get or create a WideEvent for the given request. + * Also sets the event in AsyncLocalStorage so downstream code can access it via logger.event() + */ +function getOrCreateEvent(req: Request): WideEvent { + let wideEvent = eventsByRequest.get(req); + + if (!wideEvent) { + wideEvent = createEventForRequest(req); + eventsByRequest.set(req, wideEvent); + setupResponseHandlers(req, wideEvent); } + // set in AsyncLocalStorage so downstream code can access via logger.event() + eventStorage.enterWith(wideEvent); + return wideEvent; } +/** + * Get current WideEvent from AsyncLocalStorage or request + */ +function getCurrentEvent(req?: Request): WideEvent | undefined { + // If req provided, use it (explicit usage in route handlers) + if (req) { + return getOrCreateEvent(req); + } + + // Otherwise, get from AsyncLocalStorage (interceptors, etc.) + return eventStorage.getStore(); +} + /** * Check if the first argument is an Express Request */ @@ -195,9 +207,10 @@ function isRequest(arg: unknown): arg is Request { * logger.debug(req, "Processing query: %s", queryId); * logger.error(req, "Query failed: %O", error); * - * // Get WideEvent for manual updates - * const event = logger.event(req); - * event.setComponent("analytics", "executeQuery"); + * // Get WideEvent - works in route handlers (with req) or interceptors (from context) + * const event = logger.event(req); // In route handler + * const event = logger.event(); // In interceptor (gets from AsyncLocalStorage) + * event?.setComponent("analytics", "executeQuery"); * ``` */ export function createLogger(scope: string): Logger { @@ -260,8 +273,8 @@ export function createLogger(scope: string): Logger { } } - function event(req: Request): WideEvent { - return getOrCreateEvent(req); + function event(req?: Request): WideEvent | undefined { + return getCurrentEvent(req); } return { diff --git a/packages/appkit/src/logging/tests/logger-with-event.test.ts b/packages/appkit/src/logging/tests/logger-with-event.test.ts index a60b6f59..ce28ded7 100644 --- a/packages/appkit/src/logging/tests/logger-with-event.test.ts +++ b/packages/appkit/src/logging/tests/logger-with-event.test.ts @@ -78,9 +78,9 @@ describe("Logger with WideEvent Integration", () => { const event = logger.event(mockReq as Request); expect(event).toBeInstanceOf(WideEvent); - expect(event.data.request_id).toBeDefined(); - expect(event.data.method).toBe("POST"); - expect(event.data.path).toBe("/api/query"); + expect(event!.data.request_id).toBeDefined(); + expect(event!.data.method).toBe("POST"); + expect(event!.data.path).toBe("/api/query"); }); test("should reuse same WideEvent for same request", () => { @@ -99,9 +99,10 @@ describe("Logger with WideEvent Integration", () => { logger.info(mockReq as Request, "Processing query: %s", "SELECT *"); const event = logger.event(mockReq as Request); - expect(event.data.logs).toHaveLength(1); - expect(event.data.logs![0].level).toBe("info"); - expect(event.data.logs![0].message).toBe("Processing query: SELECT *"); + expect(event).toBeDefined(); + expect(event!.data.logs).toHaveLength(1); + expect(event!.data.logs![0].level).toBe("info"); + expect(event!.data.logs![0].message).toBe("Processing query: SELECT *"); infoSpy.mockRestore(); }); @@ -116,10 +117,11 @@ describe("Logger with WideEvent Integration", () => { logger.error(mockReq as Request, "Warning: slow query"); const event = logger.event(mockReq as Request); - expect(event.data.logs).toHaveLength(3); - expect(event.data.logs![0].message).toBe("Starting query"); - expect(event.data.logs![1].message).toBe("Query completed: 100 rows"); - expect(event.data.logs![2].message).toBe("Warning: slow query"); + expect(event).toBeDefined(); + expect(event!.data.logs).toHaveLength(3); + expect(event!.data.logs![0].message).toBe("Starting query"); + expect(event!.data.logs![1].message).toBe("Query completed: 100 rows"); + expect(event!.data.logs![2].message).toBe("Warning: slow query"); infoSpy.mockRestore(); errorSpy.mockRestore(); @@ -129,8 +131,9 @@ describe("Logger with WideEvent Integration", () => { const logger = createLogger("connectors:lakebase"); const event = logger.event(mockReq as Request); + expect(event).toBeDefined(); - expect(event.data.service?.name).toBe("appkit"); + expect(event!.data.service?.name).toBe("appkit"); // Note: scope is not currently stored in WideEventData // It's used for logger context but not part of the event structure }); @@ -138,32 +141,34 @@ describe("Logger with WideEvent Integration", () => { test("should finalize WideEvent on response finish", () => { const logger = createLogger("analytics"); const event = logger.event(mockReq as Request); + expect(event).toBeDefined(); - expect(event.data.duration_ms).toBeUndefined(); - expect(event.data.status_code).toBeUndefined(); + expect(event!.data.duration_ms).toBeUndefined(); + expect(event!.data.status_code).toBeUndefined(); // Trigger finish event finishCallback?.(); - expect(event.data.duration_ms).toBeDefined(); - expect(event.data.status_code).toBe(200); + expect(event!.data.duration_ms).toBeDefined(); + expect(event!.data.status_code).toBe(200); }); test("should allow manual updates to WideEvent", () => { const logger = createLogger("analytics"); const event = logger.event(mockReq as Request); + expect(event).toBeDefined(); - event.setComponent("analytics", "executeQuery"); - event.setExecution({ + event!.setComponent("analytics", "executeQuery"); + event!.setExecution({ statement: "SELECT * FROM users", duration: 125, rowCount: 100, }); - expect(event.data.component?.name).toBe("analytics"); - expect(event.data.component?.operation).toBe("executeQuery"); - expect(event.data.execution?.statement).toBe("SELECT * FROM users"); - expect(event.data.execution?.duration).toBe(125); + expect(event!.data.component?.name).toBe("analytics"); + expect(event!.data.component?.operation).toBe("executeQuery"); + expect(event!.data.execution?.statement).toBe("SELECT * FROM users"); + expect(event!.data.execution?.duration).toBe(125); }); test("should track errors in WideEvent", () => { @@ -174,8 +179,9 @@ describe("Logger with WideEvent Integration", () => { logger.error(mockReq as Request, "Query failed: %O", error); const event = logger.event(mockReq as Request); - expect(event.data.logs).toHaveLength(1); - expect(event.data.logs![0].level).toBe("error"); + expect(event).toBeDefined(); + expect(event!.data.logs).toHaveLength(1); + expect(event!.data.logs![0].level).toBe("error"); errorSpy.mockRestore(); }); @@ -196,8 +202,9 @@ describe("Logger with WideEvent Integration", () => { logger.info("Request count: %d", 1); const event = logger.event(mockReq as Request); - expect(event.data.logs).toHaveLength(1); // Only request log - expect(event.data.logs![0].message).toBe("Processing request"); + expect(event).toBeDefined(); + expect(event!.data.logs).toHaveLength(1); // Only request log + expect(event!.data.logs![0].message).toBe("Processing request"); infoSpy.mockRestore(); }); diff --git a/packages/appkit/src/plugin/interceptors/cache.ts b/packages/appkit/src/plugin/interceptors/cache.ts index d4d965af..b8af0ca3 100644 --- a/packages/appkit/src/plugin/interceptors/cache.ts +++ b/packages/appkit/src/plugin/interceptors/cache.ts @@ -1,10 +1,7 @@ import type { CacheConfig } from "shared"; import type { CacheManager } from "../../cache"; -import { createLogger } from "../../logging/logger"; import type { ExecutionInterceptor, InterceptorContext } from "./types"; -const logger = createLogger("interceptors:cache"); - // interceptor to handle caching logic export class CacheInterceptor implements ExecutionInterceptor { constructor( @@ -21,15 +18,11 @@ export class CacheInterceptor implements ExecutionInterceptor { return fn(); } - // get wide event from request if available - const wideEvent = context.request ? logger.event(context.request) : null; - return this.cacheManager.getOrExecute( this.config.cacheKey, fn, context.userKey, { ttl: this.config.ttl }, - wideEvent, ); } } diff --git a/packages/appkit/src/plugin/interceptors/retry.ts b/packages/appkit/src/plugin/interceptors/retry.ts index d66b9e0b..c0a184b8 100644 --- a/packages/appkit/src/plugin/interceptors/retry.ts +++ b/packages/appkit/src/plugin/interceptors/retry.ts @@ -26,9 +26,8 @@ export class RetryInterceptor implements ExecutionInterceptor { try { const result = await fn(); - if (attempt > 1 && context.request) { - const event = logger.event(context.request); - event.setExecution({ + if (attempt > 1) { + logger.event()?.setExecution({ retry_attempts: attempt - 1, }); } @@ -39,12 +38,9 @@ export class RetryInterceptor implements ExecutionInterceptor { // last attempt, rethrow the error if (attempt === this.attempts) { - if (context.request) { - const event = logger.event(context.request); - event.setExecution({ - retry_attempts: attempt - 1, - }); - } + logger.event()?.setExecution({ + retry_attempts: attempt - 1, + }); throw error; } diff --git a/packages/appkit/src/plugin/interceptors/timeout.ts b/packages/appkit/src/plugin/interceptors/timeout.ts index 5f39696a..7b363151 100644 --- a/packages/appkit/src/plugin/interceptors/timeout.ts +++ b/packages/appkit/src/plugin/interceptors/timeout.ts @@ -11,12 +11,9 @@ export class TimeoutInterceptor implements ExecutionInterceptor { fn: () => Promise, context: InterceptorContext, ): Promise { - if (context.request) { - const event = logger.event(context.request); - event.setExecution({ - timeout_ms: this.timeoutMs, - }); - } + logger.event()?.setExecution({ + timeout_ms: this.timeoutMs, + }); // create timeout signal const timeoutController = new AbortController(); diff --git a/packages/appkit/src/plugin/interceptors/types.ts b/packages/appkit/src/plugin/interceptors/types.ts index 5b3aea93..633e38de 100644 --- a/packages/appkit/src/plugin/interceptors/types.ts +++ b/packages/appkit/src/plugin/interceptors/types.ts @@ -1,5 +1,3 @@ -import type { Request } from "express"; - /** * Context passed through the interceptor chain. * Contains signal for cancellation, metadata, and user identification. @@ -8,7 +6,6 @@ export interface InterceptorContext { signal?: AbortSignal; metadata?: Map; userKey: string; - request?: Request; } export interface ExecutionInterceptor { diff --git a/packages/appkit/src/plugin/plugin.ts b/packages/appkit/src/plugin/plugin.ts index fb63de43..96b7e6ce 100644 --- a/packages/appkit/src/plugin/plugin.ts +++ b/packages/appkit/src/plugin/plugin.ts @@ -200,7 +200,6 @@ export abstract class Plugin< fn: StreamExecuteHandler, options: StreamExecutionSettings, userKey?: string, - req?: express.Request, ) { // destructure options const { @@ -227,7 +226,6 @@ export abstract class Plugin< signal: streamSignal, metadata: new Map(), userKey: effectiveUserKey, - request: req, }; // build interceptors @@ -263,7 +261,6 @@ export abstract class Plugin< fn: (signal?: AbortSignal) => Promise, options: PluginExecutionSettings, userKey?: string, - req?: express.Request, ): Promise { const executeConfig = this._buildExecutionConfig(options); @@ -275,7 +272,6 @@ export abstract class Plugin< const context: InterceptorContext = { metadata: new Map(), userKey: effectiveUserKey, - request: req, }; try {