From 4ea90d19325b6747d8be00f0a4c10ac10ef21d1c Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Tue, 16 Dec 2025 22:29:04 +0000 Subject: [PATCH 01/30] feat: experimental OpenAI Apps SDK compatibility MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add transparent support for OpenAI's Apps SDK environment alongside MCP. - `transport.ts` - OpenAITransport implementing MCP Transport interface - `types.ts` - TypeScript types for OpenAI Apps SDK (`window.openai`) - `transport.test.ts` - Comprehensive tests - Add `experimentalOAICompatibility` option (default: `true`) - Auto-detect platform: check for `window.openai` → use OpenAI, else MCP - `connect()` creates appropriate transport automatically - Add `experimentalOAICompatibility` prop to `UseAppOptions` - Pass through to App constructor Apps work transparently in both environments: ```typescript // Works in both MCP hosts and ChatGPT const app = new App(appInfo, capabilities); await app.connect(); // Auto-detects platform // Force MCP-only mode const app = new App(appInfo, capabilities, { experimentalOAICompatibility: false }); ``` 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- src/app.ts | 89 ++++-- src/openai/transport.test.ts | 354 +++++++++++++++++++++++ src/openai/transport.ts | 538 +++++++++++++++++++++++++++++++++++ src/openai/types.ts | 244 ++++++++++++++++ src/react/useApp.tsx | 62 ++-- 5 files changed, 1243 insertions(+), 44 deletions(-) create mode 100644 src/openai/transport.test.ts create mode 100644 src/openai/transport.ts create mode 100644 src/openai/types.ts diff --git a/src/app.ts b/src/app.ts index 129b5802..7d31858d 100644 --- a/src/app.ts +++ b/src/app.ts @@ -16,7 +16,6 @@ import { PingRequestSchema, } from "@modelcontextprotocol/sdk/types.js"; import { AppNotification, AppRequest, AppResult } from "./types"; -import { PostMessageTransport } from "./message-transport"; import { LATEST_PROTOCOL_VERSION, McpUiAppCapabilities, @@ -47,8 +46,12 @@ import { McpUiRequestDisplayModeResultSchema, } from "./types"; import { Transport } from "@modelcontextprotocol/sdk/shared/transport.js"; +import { PostMessageTransport } from "./message-transport"; +import { OpenAITransport, isOpenAIEnvironment } from "./openai/transport.js"; export { PostMessageTransport } from "./message-transport"; +export { OpenAITransport, isOpenAIEnvironment } from "./openai/transport"; +export * from "./openai/types"; export * from "./types"; export { applyHostStyleVariables, @@ -101,7 +104,7 @@ export const RESOURCE_MIME_TYPE = "text/html;profile=mcp-app"; * * @see ProtocolOptions from @modelcontextprotocol/sdk for inherited options */ -type AppOptions = ProtocolOptions & { +export type AppOptions = ProtocolOptions & { /** * Automatically report size changes to the host using ResizeObserver. * @@ -112,6 +115,19 @@ type AppOptions = ProtocolOptions & { * @default true */ autoResize?: boolean; + + /** + * Enable experimental OpenAI compatibility. + * + * When enabled (default), the App will auto-detect the environment: + * - If `window.openai` exists → use OpenAI Apps SDK + * - Otherwise → use MCP Apps protocol via PostMessageTransport + * + * Set to `false` to force MCP-only mode. + * + * @default true + */ + experimentalOAICompatibility?: boolean; }; type RequestHandlerExtra = Parameters< @@ -220,7 +236,10 @@ export class App extends Protocol { constructor( private _appInfo: Implementation, private _capabilities: McpUiAppCapabilities = {}, - private options: AppOptions = { autoResize: true }, + private options: AppOptions = { + autoResize: true, + experimentalOAICompatibility: true, + }, ) { super(options); @@ -989,50 +1008,73 @@ export class App extends Protocol { return () => resizeObserver.disconnect(); } + /** + * Create the default transport based on detected platform. + * @internal + */ + private createDefaultTransport(): Transport { + const experimentalOAI = this.options?.experimentalOAICompatibility ?? true; + if (experimentalOAI && isOpenAIEnvironment()) { + return new OpenAITransport(); + } + return new PostMessageTransport(window.parent, window.parent); + } + /** * Establish connection with the host and perform initialization handshake. * * This method performs the following steps: - * 1. Connects the transport layer - * 2. Sends `ui/initialize` request with app info and capabilities - * 3. Receives host capabilities and context in response - * 4. Sends `ui/notifications/initialized` notification - * 5. Sets up auto-resize using {@link setupSizeChangedNotifications} if enabled (default) + * 1. Auto-detects platform if no transport is provided + * 2. Connects the transport layer + * 3. Sends `ui/initialize` request with app info and capabilities + * 4. Receives host capabilities and context in response + * 5. Sends `ui/notifications/initialized` notification + * 6. Sets up auto-resize using {@link setupSizeChangedNotifications} if enabled (default) + * 7. For OpenAI mode: delivers initial tool input/result from window.openai * * If initialization fails, the connection is automatically closed and an error * is thrown. * - * @param transport - Transport layer (typically PostMessageTransport) + * @param transport - Optional transport layer. If not provided, auto-detects + * based on the `platform` option: + * - `'openai'` or `window.openai` exists → uses {@link OpenAITransport} + * - `'mcp'` or no `window.openai` → uses {@link PostMessageTransport} * @param options - Request options for the initialize request * * @throws {Error} If initialization fails or connection is lost * - * @example Connect with PostMessageTransport + * @example Auto-detect platform (recommended) * ```typescript * const app = new App( * { name: "MyApp", version: "1.0.0" }, * {} * ); * - * try { - * await app.connect(new PostMessageTransport(window.parent)); - * console.log("Connected successfully!"); - * } catch (error) { - * console.error("Failed to connect:", error); - * } + * // Auto-detects: OpenAI if window.openai exists, MCP otherwise + * await app.connect(); + * ``` + * + * @example Explicit MCP transport + * ```typescript + * await app.connect(new PostMessageTransport(window.parent)); + * ``` + * + * @example Explicit OpenAI transport + * ```typescript + * await app.connect(new OpenAITransport()); * ``` * * @see {@link McpUiInitializeRequest} for the initialization request structure * @see {@link McpUiInitializedNotification} for the initialized notification - * @see {@link PostMessageTransport} for the typical transport implementation + * @see {@link PostMessageTransport} for MCP-compatible hosts + * @see {@link OpenAITransport} for OpenAI/ChatGPT hosts */ override async connect( - transport: Transport = new PostMessageTransport( - window.parent, - window.parent, - ), + transport?: Transport, options?: RequestOptions, ): Promise { + transport ??= this.createDefaultTransport(); + await super.connect(transport); try { @@ -1064,6 +1106,11 @@ export class App extends Protocol { if (this.options?.autoResize) { this.setupSizeChangedNotifications(); } + + // For OpenAI mode: deliver initial state from window.openai + if (transport instanceof OpenAITransport) { + transport.deliverInitialState(); + } } catch (error) { // Disconnect if initialization fails. void this.close(); diff --git a/src/openai/transport.test.ts b/src/openai/transport.test.ts new file mode 100644 index 00000000..01911e09 --- /dev/null +++ b/src/openai/transport.test.ts @@ -0,0 +1,354 @@ +import { describe, test, expect, beforeEach, afterEach, mock } from "bun:test"; +import { OpenAITransport, isOpenAIEnvironment } from "./transport"; +import type { OpenAIGlobal, WindowWithOpenAI } from "./types"; + +describe("isOpenAIEnvironment", () => { + const originalWindow = globalThis.window; + + afterEach(() => { + // Restore original window + if (originalWindow === undefined) { + delete (globalThis as { window?: unknown }).window; + } else { + (globalThis as { window?: unknown }).window = originalWindow; + } + }); + + test("returns false when window is undefined", () => { + delete (globalThis as { window?: unknown }).window; + expect(isOpenAIEnvironment()).toBe(false); + }); + + test("returns false when window.openai is undefined", () => { + (globalThis as { window?: unknown }).window = {}; + expect(isOpenAIEnvironment()).toBe(false); + }); + + test("returns true when window.openai is an object", () => { + (globalThis as { window?: unknown }).window = { + openai: {}, + }; + expect(isOpenAIEnvironment()).toBe(true); + }); +}); + +describe("OpenAITransport", () => { + let mockOpenAI: OpenAIGlobal; + + beforeEach(() => { + mockOpenAI = { + theme: "dark", + locale: "en-US", + displayMode: "inline", + maxHeight: 600, + toolInput: { location: "Tokyo" }, + toolOutput: { temperature: 22 }, + callTool: mock(() => + Promise.resolve({ content: { result: "success" } }), + ) as unknown as OpenAIGlobal["callTool"], + sendFollowUpMessage: mock(() => + Promise.resolve(), + ) as unknown as OpenAIGlobal["sendFollowUpMessage"], + openExternal: mock(() => + Promise.resolve(), + ) as unknown as OpenAIGlobal["openExternal"], + notifyIntrinsicHeight: mock( + () => {}, + ) as unknown as OpenAIGlobal["notifyIntrinsicHeight"], + }; + + (globalThis as { window?: unknown }).window = { + openai: mockOpenAI, + }; + }); + + afterEach(() => { + delete (globalThis as { window?: unknown }).window; + }); + + test("throws when window.openai is not available", () => { + delete (globalThis as { window?: unknown }).window; + expect(() => new OpenAITransport()).toThrow( + "OpenAITransport requires window.openai", + ); + }); + + test("constructs successfully when window.openai is available", () => { + const transport = new OpenAITransport(); + expect(transport).toBeDefined(); + }); + + test("start() completes without error", async () => { + const transport = new OpenAITransport(); + await expect(transport.start()).resolves.toBeUndefined(); + }); + + test("close() calls onclose callback", async () => { + const transport = new OpenAITransport(); + const onclose = mock(() => {}); + transport.onclose = onclose; + + await transport.close(); + + expect(onclose).toHaveBeenCalled(); + }); + + describe("ui/initialize request", () => { + test("returns synthesized host info from window.openai", async () => { + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 1, + method: "ui/initialize", + params: { + protocolVersion: "2025-11-21", + appInfo: { name: "TestApp", version: "1.0.0" }, + appCapabilities: {}, + }, + }); + + // Wait for microtask to complete + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 1, + result: { + hostInfo: { name: "ChatGPT", version: "1.0.0" }, + hostContext: { + theme: "dark", + locale: "en-US", + displayMode: "inline", + }, + }, + }); + }); + }); + + describe("tools/call request", () => { + test("delegates to window.openai.callTool()", async () => { + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 2, + method: "tools/call", + params: { + name: "get_weather", + arguments: { location: "Tokyo" }, + }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(mockOpenAI.callTool).toHaveBeenCalledWith("get_weather", { + location: "Tokyo", + }); + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 2, + result: expect.any(Object), + }); + }); + + test("returns error when callTool is not available", async () => { + delete mockOpenAI.callTool; + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 3, + method: "tools/call", + params: { name: "test_tool" }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 3, + error: { + code: -32601, + message: expect.stringContaining("not supported"), + }, + }); + }); + }); + + describe("ui/message request", () => { + test("delegates to window.openai.sendFollowUpMessage()", async () => { + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 4, + method: "ui/message", + params: { + role: "user", + content: [{ type: "text", text: "Hello!" }], + }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(mockOpenAI.sendFollowUpMessage).toHaveBeenCalledWith({ + prompt: "Hello!", + }); + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 4, + result: {}, + }); + }); + }); + + describe("ui/open-link request", () => { + test("delegates to window.openai.openExternal()", async () => { + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 5, + method: "ui/open-link", + params: { url: "https://example.com" }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(mockOpenAI.openExternal).toHaveBeenCalledWith({ + href: "https://example.com", + }); + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 5, + result: {}, + }); + }); + }); + + describe("ui/request-display-mode request", () => { + test("delegates to window.openai.requestDisplayMode()", async () => { + mockOpenAI.requestDisplayMode = mock(() => + Promise.resolve(), + ) as unknown as OpenAIGlobal["requestDisplayMode"]; + + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 6, + method: "ui/request-display-mode", + params: { mode: "fullscreen" }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(mockOpenAI.requestDisplayMode).toHaveBeenCalledWith({ + mode: "fullscreen", + }); + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 6, + result: { mode: "fullscreen" }, + }); + }); + }); + + describe("ui/notifications/size-changed notification", () => { + test("delegates to window.openai.notifyIntrinsicHeight()", async () => { + const transport = new OpenAITransport(); + + await transport.send({ + jsonrpc: "2.0", + method: "ui/notifications/size-changed", + params: { width: 400, height: 300 }, + }); + + expect(mockOpenAI.notifyIntrinsicHeight).toHaveBeenCalledWith(300); + }); + }); + + describe("deliverInitialState", () => { + test("delivers tool input notification", async () => { + const transport = new OpenAITransport(); + const messages: unknown[] = []; + transport.onmessage = (msg) => { + messages.push(msg); + }; + + transport.deliverInitialState(); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + const toolInputNotification = messages.find( + (m: unknown) => + (m as { method?: string }).method === "ui/notifications/tool-input", + ); + expect(toolInputNotification).toMatchObject({ + jsonrpc: "2.0", + method: "ui/notifications/tool-input", + params: { arguments: { location: "Tokyo" } }, + }); + }); + + test("delivers tool result notification", async () => { + const transport = new OpenAITransport(); + const messages: unknown[] = []; + transport.onmessage = (msg) => { + messages.push(msg); + }; + + transport.deliverInitialState(); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + const toolResultNotification = messages.find( + (m: unknown) => + (m as { method?: string }).method === "ui/notifications/tool-result", + ); + expect(toolResultNotification).toBeDefined(); + }); + + test("does not deliver notifications when data is missing", async () => { + delete mockOpenAI.toolInput; + delete mockOpenAI.toolOutput; + + const transport = new OpenAITransport(); + const messages: unknown[] = []; + transport.onmessage = (msg) => { + messages.push(msg); + }; + + transport.deliverInitialState(); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(messages).toHaveLength(0); + }); + }); +}); diff --git a/src/openai/transport.ts b/src/openai/transport.ts new file mode 100644 index 00000000..399ef949 --- /dev/null +++ b/src/openai/transport.ts @@ -0,0 +1,538 @@ +/** + * Transport adapter for OpenAI Apps SDK (window.openai) compatibility. + * + * This transport allows MCP Apps to run in OpenAI's ChatGPT environment by + * translating between the MCP Apps protocol and the OpenAI Apps SDK APIs. + * + * @see https://developers.openai.com/apps-sdk/build/chatgpt-ui/ + */ + +import { + JSONRPCMessage, + JSONRPCRequest, + JSONRPCNotification, + RequestId, +} from "@modelcontextprotocol/sdk/types.js"; +import { + Transport, + TransportSendOptions, +} from "@modelcontextprotocol/sdk/shared/transport.js"; +import { OpenAIGlobal, getOpenAIGlobal, isOpenAIEnvironment } from "./types.js"; +import { LATEST_PROTOCOL_VERSION, McpUiHostContext } from "../spec.types.js"; + +/** + * JSON-RPC success response message. + * @internal + */ +interface JSONRPCSuccessResponse { + jsonrpc: "2.0"; + id: RequestId; + result: Record; +} + +/** + * JSON-RPC error response message. + * @internal + */ +interface JSONRPCErrorResponse { + jsonrpc: "2.0"; + id: RequestId; + error: { code: number; message: string; data?: unknown }; +} + +/** + * Check if a message is a JSON-RPC request (has method and id). + */ +function isRequest(message: JSONRPCMessage): message is JSONRPCRequest { + return "method" in message && "id" in message; +} + +/** + * Check if a message is a JSON-RPC notification (has method but no id). + */ +function isNotification( + message: JSONRPCMessage, +): message is JSONRPCNotification { + return "method" in message && !("id" in message); +} + +/** + * Transport implementation that bridges MCP Apps protocol to OpenAI Apps SDK. + * + * This transport enables MCP Apps to run seamlessly in ChatGPT by: + * - Synthesizing initialization responses from window.openai properties + * - Mapping tool calls to window.openai.callTool() + * - Mapping messages to window.openai.sendFollowUpMessage() + * - Mapping link opens to window.openai.openExternal() + * - Reporting size changes via window.openai.notifyIntrinsicHeight() + * + * ## Usage + * + * Typically you don't create this transport directly. The App will create + * it automatically when `experimentalOAICompatibility` is enabled (default) + * and `window.openai` is detected. + * + * ```typescript + * import { App } from '@modelcontextprotocol/ext-apps'; + * + * const app = new App({ name: "MyApp", version: "1.0.0" }, {}); + * await app.connect(); // Auto-detects OpenAI environment + * ``` + * + * ## Manual Usage + * + * For advanced use cases, you can create the transport directly: + * + * ```typescript + * import { App, OpenAITransport } from '@modelcontextprotocol/ext-apps'; + * + * const app = new App({ name: "MyApp", version: "1.0.0" }, {}); + * await app.connect(new OpenAITransport()); + * ``` + * + * @see {@link App.connect} for automatic transport selection + * @see {@link PostMessageTransport} for MCP-compatible hosts + */ +export class OpenAITransport implements Transport { + private openai: OpenAIGlobal; + private _closed = false; + + /** + * Create a new OpenAITransport. + * + * @throws {Error} If window.openai is not available + * + * @example + * ```typescript + * if (isOpenAIEnvironment()) { + * const transport = new OpenAITransport(); + * await app.connect(transport); + * } + * ``` + */ + constructor() { + const openai = getOpenAIGlobal(); + if (!openai) { + throw new Error( + "OpenAITransport requires window.openai to be available. " + + "This transport should only be used in OpenAI/ChatGPT environments.", + ); + } + this.openai = openai; + } + + /** + * Begin listening for messages. + * + * In OpenAI mode, there's no event-based message flow to start. + * The data is pre-populated in window.openai properties. + */ + async start(): Promise { + // Nothing to do - window.openai is already available and populated + } + + /** + * Send a JSON-RPC message. + * + * Requests are handled by mapping to window.openai methods. + * Notifications are handled for size changes; others are no-ops. + * + * @param message - JSON-RPC message to send + * @param _options - Send options (unused) + */ + async send( + message: JSONRPCMessage, + _options?: TransportSendOptions, + ): Promise { + if (this._closed) { + throw new Error("Transport is closed"); + } + + if (isRequest(message)) { + // Handle requests - map to window.openai methods and synthesize responses + const response = await this.handleRequest(message); + // Deliver response asynchronously to maintain message ordering + queueMicrotask(() => this.onmessage?.(response)); + } else if (isNotification(message)) { + // Handle notifications + this.handleNotification(message); + } + // Responses are ignored - we don't receive requests from OpenAI + } + + /** + * Handle an outgoing JSON-RPC request by mapping to window.openai. + */ + private async handleRequest( + request: JSONRPCRequest, + ): Promise { + const { method, id, params } = request; + + try { + switch (method) { + case "ui/initialize": + return this.handleInitialize(id); + + case "tools/call": + return await this.handleToolCall( + id, + params as { name: string; arguments?: Record }, + ); + + case "ui/message": + return await this.handleMessage( + id, + params as { role: string; content: unknown[] }, + ); + + case "ui/open-link": + return await this.handleOpenLink(id, params as { url: string }); + + case "ui/request-display-mode": + return await this.handleRequestDisplayMode( + id, + params as { mode: string }, + ); + + case "ping": + return this.createSuccessResponse(id, {}); + + default: + return this.createErrorResponse( + id, + -32601, + `Method not supported in OpenAI mode: ${method}`, + ); + } + } catch (error) { + return this.createErrorResponse( + id, + -32603, + error instanceof Error ? error.message : String(error), + ); + } + } + + /** + * Handle ui/initialize request by synthesizing response from window.openai. + */ + private handleInitialize(id: RequestId): JSONRPCSuccessResponse { + // Safely extract userAgent - could be string or object + let userAgent: string | undefined; + if (typeof this.openai.userAgent === "string") { + userAgent = this.openai.userAgent; + } else if ( + this.openai.userAgent && + typeof this.openai.userAgent === "object" + ) { + userAgent = JSON.stringify(this.openai.userAgent); + } + + // Safely extract safeAreaInsets - only include if all values are present + let safeAreaInsets: McpUiHostContext["safeAreaInsets"]; + const sa = this.openai.safeArea; + if ( + sa && + typeof sa.top === "number" && + typeof sa.right === "number" && + typeof sa.bottom === "number" && + typeof sa.left === "number" + ) { + safeAreaInsets = sa; + } + + const hostContext: McpUiHostContext = { + theme: this.openai.theme, + locale: this.openai.locale, + displayMode: this.openai.displayMode, + viewport: this.openai.maxHeight + ? { width: 0, height: 0, maxHeight: this.openai.maxHeight } + : undefined, + safeAreaInsets, + userAgent, + }; + + return this.createSuccessResponse(id, { + protocolVersion: LATEST_PROTOCOL_VERSION, + hostInfo: { + name: "ChatGPT", + version: "1.0.0", + }, + hostCapabilities: { + serverTools: {}, + openLinks: {}, + logging: {}, + }, + hostContext, + }); + } + + /** + * Handle tools/call request by delegating to window.openai.callTool(). + */ + private async handleToolCall( + id: RequestId, + params: { name: string; arguments?: Record }, + ): Promise { + if (!this.openai.callTool) { + return this.createErrorResponse( + id, + -32601, + "Tool calls are not supported in this OpenAI environment", + ); + } + + const result = await this.openai.callTool(params.name, params.arguments); + + // Handle different response formats from OpenAI + // Could be { content: [...] }, { structuredContent: ... }, or the raw data + let content: { type: string; text: string }[]; + if (Array.isArray(result.content)) { + // Clean up content items - remove null values for annotations/_meta + content = result.content.map((item: unknown) => { + if ( + typeof item === "object" && + item !== null && + "type" in item && + "text" in item + ) { + const typedItem = item as { + type: string; + text: string; + annotations?: unknown; + _meta?: unknown; + }; + return { type: typedItem.type, text: typedItem.text }; + } + return { type: "text", text: JSON.stringify(item) }; + }); + } else if (result.structuredContent !== undefined) { + content = [ + { type: "text", text: JSON.stringify(result.structuredContent) }, + ]; + } else if (result.content !== undefined) { + content = [{ type: "text", text: JSON.stringify(result.content) }]; + } else { + // The result itself might be the structured content + content = [{ type: "text", text: JSON.stringify(result) }]; + } + + return this.createSuccessResponse(id, { + content, + isError: result.isError, + }); + } + + /** + * Handle ui/message request by delegating to window.openai.sendFollowUpMessage(). + */ + private async handleMessage( + id: RequestId, + params: { role: string; content: unknown[] }, + ): Promise { + if (!this.openai.sendFollowUpMessage) { + return this.createErrorResponse( + id, + -32601, + "Sending messages is not supported in this OpenAI environment", + ); + } + + // Extract text content from the message + const textContent = params.content + .filter( + (c): c is { type: "text"; text: string } => + typeof c === "object" && + c !== null && + (c as { type?: string }).type === "text", + ) + .map((c) => c.text) + .join("\n"); + + await this.openai.sendFollowUpMessage({ prompt: textContent }); + + return this.createSuccessResponse(id, {}); + } + + /** + * Handle ui/open-link request by delegating to window.openai.openExternal(). + */ + private async handleOpenLink( + id: RequestId, + params: { url: string }, + ): Promise { + if (!this.openai.openExternal) { + return this.createErrorResponse( + id, + -32601, + "Opening external links is not supported in this OpenAI environment", + ); + } + + await this.openai.openExternal({ href: params.url }); + + return this.createSuccessResponse(id, {}); + } + + /** + * Handle ui/request-display-mode by delegating to window.openai.requestDisplayMode(). + */ + private async handleRequestDisplayMode( + id: RequestId, + params: { mode: string }, + ): Promise { + if (!this.openai.requestDisplayMode) { + return this.createErrorResponse( + id, + -32601, + "Display mode changes are not supported in this OpenAI environment", + ); + } + + const mode = params.mode as "inline" | "pip" | "fullscreen"; + await this.openai.requestDisplayMode({ mode }); + + return this.createSuccessResponse(id, { mode }); + } + + /** + * Handle an outgoing notification. + */ + private handleNotification(notification: JSONRPCNotification): void { + const { method, params } = notification; + + switch (method) { + case "ui/notifications/size-changed": + this.handleSizeChanged(params as { width?: number; height?: number }); + break; + + case "ui/notifications/initialized": + // No-op - OpenAI doesn't need this notification + break; + + case "notifications/message": + // Log messages - could be sent to console in OpenAI mode + console.log("[MCP App Log]", params); + break; + + default: + // Ignore unknown notifications + break; + } + } + + /** + * Handle size changed notification by calling window.openai.notifyIntrinsicHeight(). + */ + private handleSizeChanged(params: { width?: number; height?: number }): void { + if (this.openai.notifyIntrinsicHeight && params.height !== undefined) { + this.openai.notifyIntrinsicHeight(params.height); + } + } + + /** + * Create a success JSON-RPC response. + */ + private createSuccessResponse( + id: RequestId, + result: Record, + ): JSONRPCSuccessResponse { + return { + jsonrpc: "2.0", + id, + result, + }; + } + + /** + * Create an error JSON-RPC response. + */ + private createErrorResponse( + id: RequestId, + code: number, + message: string, + ): JSONRPCErrorResponse { + return { + jsonrpc: "2.0", + id, + error: { code, message }, + }; + } + + /** + * Deliver initial tool input and result notifications. + * + * Called by App after connection to deliver pre-populated data from + * window.openai as notifications that the app's handlers expect. + * + * @internal + */ + deliverInitialState(): void { + // Deliver tool input if available + if (this.openai.toolInput !== undefined) { + queueMicrotask(() => { + this.onmessage?.({ + jsonrpc: "2.0", + method: "ui/notifications/tool-input", + params: { arguments: this.openai.toolInput }, + } as JSONRPCNotification); + }); + } + + // Deliver tool output if available + if (this.openai.toolOutput !== undefined) { + queueMicrotask(() => { + this.onmessage?.({ + jsonrpc: "2.0", + method: "ui/notifications/tool-result", + params: { + content: Array.isArray(this.openai.toolOutput) + ? this.openai.toolOutput + : [ + { + type: "text", + text: JSON.stringify(this.openai.toolOutput), + }, + ], + }, + } as JSONRPCNotification); + }); + } + } + + /** + * Close the transport. + */ + async close(): Promise { + this._closed = true; + this.onclose?.(); + } + + /** + * Called when the transport is closed. + */ + onclose?: () => void; + + /** + * Called when an error occurs. + */ + onerror?: (error: Error) => void; + + /** + * Called when a message is received. + */ + onmessage?: (message: JSONRPCMessage) => void; + + /** + * Session identifier (unused in OpenAI mode). + */ + sessionId?: string; + + /** + * Callback to set the negotiated protocol version. + */ + setProtocolVersion?: (version: string) => void; +} + +// Re-export utility functions +export { isOpenAIEnvironment, getOpenAIGlobal }; diff --git a/src/openai/types.ts b/src/openai/types.ts new file mode 100644 index 00000000..435823f9 --- /dev/null +++ b/src/openai/types.ts @@ -0,0 +1,244 @@ +/** + * Type definitions for the OpenAI Apps SDK's window.openai object. + * + * These types describe the API surface that ChatGPT injects into widget iframes. + * When running in OpenAI mode, the {@link OpenAITransport} uses these APIs to + * communicate with the ChatGPT host. + * + * @see https://developers.openai.com/apps-sdk/build/chatgpt-ui/ + */ + +/** + * Display mode for the widget in ChatGPT. + */ +export type OpenAIDisplayMode = "inline" | "pip" | "fullscreen"; + +/** + * Theme setting from the ChatGPT host. + */ +export type OpenAITheme = "light" | "dark"; + +/** + * Safe area insets for the widget viewport. + */ +export interface OpenAISafeArea { + top: number; + right: number; + bottom: number; + left: number; +} + +/** + * Result of a tool call via window.openai.callTool(). + * + * Note: The exact return type isn't fully documented by OpenAI. + * Based on observed behavior, it returns structured content. + */ +export interface OpenAIToolCallResult { + /** Structured content from the tool (may be any shape) */ + structuredContent?: unknown; + /** Legacy content field (for compatibility) */ + content?: unknown; + /** Whether the tool call resulted in an error */ + isError?: boolean; +} + +/** + * The window.openai object injected by ChatGPT into widget iframes. + * + * This interface describes the API surface available to widgets running + * in the ChatGPT environment. + */ +export interface OpenAIGlobal { + // ───────────────────────────────────────────────────────────────────────── + // State & Data Properties + // ───────────────────────────────────────────────────────────────────────── + + /** + * Tool arguments passed when invoking the tool. + * Pre-populated when the widget loads. + */ + toolInput?: Record; + + /** + * Structured content returned by the MCP server. + * Pre-populated when the widget loads (if tool has completed). + */ + toolOutput?: unknown; + + /** + * The `_meta` payload from tool response (widget-only, hidden from model). + */ + toolResponseMetadata?: Record; + + /** + * Persisted UI state snapshot between renders. + * Set via setWidgetState(), rehydrated on subsequent renders. + */ + widgetState?: unknown; + + /** + * Current theme setting. + */ + theme?: OpenAITheme; + + /** + * Current display mode of the widget. + */ + displayMode?: OpenAIDisplayMode; + + /** + * Maximum height available for the widget. + */ + maxHeight?: number; + + /** + * Safe area insets for the widget. + */ + safeArea?: OpenAISafeArea; + + /** + * Current view mode. + */ + view?: string; + + /** + * User agent string from the host. + */ + userAgent?: string; + + /** + * Locale setting (BCP 47 language tag). + */ + locale?: string; + + // ───────────────────────────────────────────────────────────────────────── + // State Management Methods + // ───────────────────────────────────────────────────────────────────────── + + /** + * Persist UI state synchronously after interactions. + * State is scoped to this widget instance and rehydrated on re-renders. + * + * @param state - State object to persist + */ + setWidgetState?(state: unknown): void; + + // ───────────────────────────────────────────────────────────────────────── + // Tool & Chat Integration Methods + // ───────────────────────────────────────────────────────────────────────── + + /** + * Invoke another MCP tool from the widget. + * + * @param name - Name of the tool to call + * @param args - Arguments to pass to the tool + * @returns Promise resolving to the tool result + */ + callTool?( + name: string, + args?: Record, + ): Promise; + + /** + * Inject a user message into the conversation. + * + * @param options - Message options + * @param options.prompt - The message text to send + */ + sendFollowUpMessage?(options: { prompt: string }): Promise; + + // ───────────────────────────────────────────────────────────────────────── + // File Operations + // ───────────────────────────────────────────────────────────────────────── + + /** + * Upload a user-selected file. + * + * @param file - File to upload + * @returns Promise resolving to the file ID + */ + uploadFile?(file: File): Promise<{ fileId: string }>; + + /** + * Retrieve a temporary download URL for a file. + * + * @param options - File options + * @param options.fileId - ID of the file to download + * @returns Promise resolving to the download URL + */ + getFileDownloadUrl?(options: { fileId: string }): Promise<{ url: string }>; + + // ───────────────────────────────────────────────────────────────────────── + // Layout & Display Methods + // ───────────────────────────────────────────────────────────────────────── + + /** + * Request a display mode change (inline, pip, fullscreen). + * + * @param options - Display mode options + * @param options.mode - Requested display mode + */ + requestDisplayMode?(options: { mode: OpenAIDisplayMode }): Promise; + + /** + * Spawn a ChatGPT-owned modal. + */ + requestModal?(options: unknown): Promise; + + /** + * Report dynamic widget height to the host. + * + * @param height - Height in pixels + */ + notifyIntrinsicHeight?(height: number): void; + + /** + * Close the widget from the UI. + */ + requestClose?(): void; + + // ───────────────────────────────────────────────────────────────────────── + // Navigation Methods + // ───────────────────────────────────────────────────────────────────────── + + /** + * Open a vetted external link in a new tab. + * + * @param options - Link options + * @param options.href - URL to open + */ + openExternal?(options: { href: string }): Promise; +} + +/** + * Window type augmentation for OpenAI environment. + */ +export interface WindowWithOpenAI { + openai: OpenAIGlobal; +} + +/** + * Detect if the current environment has window.openai available. + * + * @returns true if running in OpenAI/ChatGPT environment + */ +export function isOpenAIEnvironment(): boolean { + return ( + typeof window !== "undefined" && + typeof (window as unknown as WindowWithOpenAI).openai === "object" && + (window as unknown as WindowWithOpenAI).openai !== null + ); +} + +/** + * Get the window.openai object if available. + * + * @returns The OpenAI global object, or undefined if not in OpenAI environment + */ +export function getOpenAIGlobal(): OpenAIGlobal | undefined { + if (isOpenAIEnvironment()) { + return (window as unknown as WindowWithOpenAI).openai; + } + return undefined; +} diff --git a/src/react/useApp.tsx b/src/react/useApp.tsx index 73f2812e..111f8591 100644 --- a/src/react/useApp.tsx +++ b/src/react/useApp.tsx @@ -1,16 +1,12 @@ import { useEffect, useState } from "react"; import { Implementation } from "@modelcontextprotocol/sdk/types.js"; import { Client } from "@modelcontextprotocol/sdk/client"; -import { App, McpUiAppCapabilities, PostMessageTransport } from "../app"; +import { App, McpUiAppCapabilities } from "../app"; export * from "../app"; /** * Options for configuring the useApp hook. * - * Note: This interface does NOT expose App options like `autoResize`. - * The hook creates the App with default options (autoResize: true). If you need - * custom App options, create the App manually instead of using this hook. - * * @see {@link useApp} for the hook that uses these options * @see {@link useAutoResize} for manual auto-resize control with custom App options */ @@ -19,6 +15,18 @@ export interface UseAppOptions { appInfo: Implementation; /** Features and capabilities this app provides */ capabilities: McpUiAppCapabilities; + /** + * Enable experimental OpenAI compatibility. + * + * When enabled (default), the App will auto-detect the environment: + * - If `window.openai` exists → use OpenAI Apps SDK + * - Otherwise → use MCP Apps protocol via PostMessageTransport + * + * Set to `false` to force MCP-only mode. + * + * @default true + */ + experimentalOAICompatibility?: boolean; /** * Called after App is created but before connection. * @@ -60,14 +68,18 @@ export interface AppState { * React hook to create and connect an MCP App. * * This hook manages the complete lifecycle of an {@link App}: creation, connection, - * and cleanup. It automatically creates a {@link PostMessageTransport} to window.parent - * and handles initialization. + * and cleanup. It automatically detects the platform (MCP or OpenAI) and uses the + * appropriate transport. + * + * **Cross-Platform Support**: The hook supports both MCP-compatible hosts and + * OpenAI's ChatGPT environment. By default, it auto-detects the platform. + * Set `experimentalOAICompatibility: false` to force MCP-only mode. * * **Important**: The hook intentionally does NOT re-run when options change * to avoid reconnection loops. Options are only used during the initial mount. * * **Note**: This is part of the optional React integration. The core SDK - * (App, PostMessageTransport) is framework-agnostic and can be + * (App, PostMessageTransport, OpenAITransport) is framework-agnostic and can be * used with any UI framework or vanilla JavaScript. * * @param options - Configuration for the app @@ -75,22 +87,18 @@ export interface AppState { * initialization, the `error` field will contain the error (typically connection * timeouts, initialization handshake failures, or transport errors). * - * @example Basic usage + * @example Basic usage (auto-detects platform) * ```typescript - * import { useApp, McpUiToolInputNotificationSchema } from '@modelcontextprotocol/ext-apps/react'; + * import { useApp } from '@modelcontextprotocol/ext-apps/react'; * * function MyApp() { * const { app, isConnected, error } = useApp({ * appInfo: { name: "MyApp", version: "1.0.0" }, * capabilities: {}, * onAppCreated: (app) => { - * // Register handlers before connection - * app.setNotificationHandler( - * McpUiToolInputNotificationSchema, - * (notification) => { - * console.log("Tool input:", notification.params.arguments); - * } - * ); + * app.ontoolinput = (params) => { + * console.log("Tool input:", params.arguments); + * }; * }, * }); * @@ -100,12 +108,22 @@ export interface AppState { * } * ``` * + * @example Force MCP-only mode + * ```typescript + * const { app } = useApp({ + * appInfo: { name: "MyApp", version: "1.0.0" }, + * capabilities: {}, + * experimentalOAICompatibility: false, // Disable OpenAI auto-detection + * }); + * ``` + * * @see {@link App.connect} for the underlying connection method * @see {@link useAutoResize} for manual auto-resize control when using custom App options */ export function useApp({ appInfo, capabilities, + experimentalOAICompatibility = true, onAppCreated, }: UseAppOptions): AppState { const [app, setApp] = useState(null); @@ -117,16 +135,14 @@ export function useApp({ async function connect() { try { - const transport = new PostMessageTransport( - window.parent, - window.parent, - ); - const app = new App(appInfo, capabilities); + const app = new App(appInfo, capabilities, { + experimentalOAICompatibility, + }); // Register handlers BEFORE connecting onAppCreated?.(app); - await app.connect(transport); + await app.connect(); if (mounted) { setApp(app); From 28a39246495ee5b6efd563686e7ae741095476a5 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Tue, 16 Dec 2025 23:25:00 +0000 Subject: [PATCH 02/30] feat: add cross-platform support for OpenAI Apps SDK MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Dynamic capability detection based on window.openai availability - Report availableDisplayModes when requestDisplayMode is available - Include toolResponseMetadata as _meta in tool-result notification - registerAppTool adds openai/outputTemplate metadata automatically - registerAppResource registers both MCP and OpenAI (+skybridge) variants - Preserve custom MIME types in OpenAI resource callback 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- src/openai/transport.test.ts | 95 ++++++++++++++++++++ src/openai/transport.ts | 28 ++++-- src/server/index.test.ts | 163 +++++++++++++++++++++++++++++------ src/server/index.ts | 72 +++++++++++++++- 4 files changed, 323 insertions(+), 35 deletions(-) diff --git a/src/openai/transport.test.ts b/src/openai/transport.test.ts index 01911e09..800073ca 100644 --- a/src/openai/transport.test.ts +++ b/src/openai/transport.test.ts @@ -128,6 +128,75 @@ describe("OpenAITransport", () => { }, }); }); + + test("dynamically reports capabilities based on available methods", async () => { + // Remove callTool to test dynamic detection + delete mockOpenAI.callTool; + + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 1, + method: "ui/initialize", + params: { + protocolVersion: "2025-11-21", + appInfo: { name: "TestApp", version: "1.0.0" }, + appCapabilities: {}, + }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + const result = (response as { result: { hostCapabilities: unknown } }) + .result.hostCapabilities as Record; + + // serverTools should NOT be present since callTool is missing + expect(result.serverTools).toBeUndefined(); + // openLinks should be present since openExternal exists + expect(result.openLinks).toBeDefined(); + // logging is always available + expect(result.logging).toBeDefined(); + }); + + test("includes availableDisplayModes when requestDisplayMode is available", async () => { + mockOpenAI.requestDisplayMode = mock(() => + Promise.resolve(), + ) as unknown as OpenAIGlobal["requestDisplayMode"]; + + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 1, + method: "ui/initialize", + params: { + protocolVersion: "2025-11-21", + appInfo: { name: "TestApp", version: "1.0.0" }, + appCapabilities: {}, + }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 1, + result: { + hostContext: { + availableDisplayModes: ["inline", "pip", "fullscreen"], + }, + }, + }); + }); }); describe("tools/call request", () => { @@ -334,6 +403,32 @@ describe("OpenAITransport", () => { expect(toolResultNotification).toBeDefined(); }); + test("includes _meta from toolResponseMetadata in tool result", async () => { + mockOpenAI.toolResponseMetadata = { widgetId: "abc123", version: 2 }; + + const transport = new OpenAITransport(); + const messages: unknown[] = []; + transport.onmessage = (msg) => { + messages.push(msg); + }; + + transport.deliverInitialState(); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + const toolResultNotification = messages.find( + (m: unknown) => + (m as { method?: string }).method === "ui/notifications/tool-result", + ); + expect(toolResultNotification).toMatchObject({ + jsonrpc: "2.0", + method: "ui/notifications/tool-result", + params: { + _meta: { widgetId: "abc123", version: 2 }, + }, + }); + }); + test("does not deliver notifications when data is missing", async () => { delete mockOpenAI.toolInput; delete mockOpenAI.toolOutput; diff --git a/src/openai/transport.ts b/src/openai/transport.ts index 399ef949..8c5cfb84 100644 --- a/src/openai/transport.ts +++ b/src/openai/transport.ts @@ -245,6 +245,10 @@ export class OpenAITransport implements Transport { theme: this.openai.theme, locale: this.openai.locale, displayMode: this.openai.displayMode, + // If requestDisplayMode is available, ChatGPT supports all three modes + availableDisplayModes: this.openai.requestDisplayMode + ? ["inline", "pip", "fullscreen"] + : undefined, viewport: this.openai.maxHeight ? { width: 0, height: 0, maxHeight: this.openai.maxHeight } : undefined, @@ -252,17 +256,29 @@ export class OpenAITransport implements Transport { userAgent, }; + // Dynamically determine capabilities based on what window.openai supports + const hostCapabilities: Record = { + // Logging is always available (we map to console.log) + logging: {}, + }; + + // Only advertise serverTools if callTool is available + if (this.openai.callTool) { + hostCapabilities.serverTools = {}; + } + + // Only advertise openLinks if openExternal is available + if (this.openai.openExternal) { + hostCapabilities.openLinks = {}; + } + return this.createSuccessResponse(id, { protocolVersion: LATEST_PROTOCOL_VERSION, hostInfo: { name: "ChatGPT", version: "1.0.0", }, - hostCapabilities: { - serverTools: {}, - openLinks: {}, - logging: {}, - }, + hostCapabilities, hostContext, }); } @@ -494,6 +510,8 @@ export class OpenAITransport implements Transport { text: JSON.stringify(this.openai.toolOutput), }, ], + // Include _meta from toolResponseMetadata if available + _meta: this.openai.toolResponseMetadata, }, } as JSONRPCNotification); }); diff --git a/src/server/index.test.ts b/src/server/index.test.ts index d5e0a80a..e4425583 100644 --- a/src/server/index.test.ts +++ b/src/server/index.test.ts @@ -4,6 +4,8 @@ import { registerAppResource, RESOURCE_URI_META_KEY, RESOURCE_MIME_TYPE, + OPENAI_RESOURCE_SUFFIX, + OPENAI_MIME_TYPE, } from "./index"; import type { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; @@ -53,6 +55,34 @@ describe("registerAppTool", () => { expect(capturedHandler).toBe(handler); }); + it("should add openai/outputTemplate metadata for cross-platform compatibility", () => { + let capturedConfig: Record | undefined; + + const mockServer = { + registerTool: mock( + (_name: string, config: Record, _handler: unknown) => { + capturedConfig = config; + }, + ), + }; + + registerAppTool( + mockServer as unknown as Pick, + "my-tool", + { + _meta: { + [RESOURCE_URI_META_KEY]: "ui://test/widget.html", + }, + }, + async () => ({ content: [{ type: "text" as const, text: "ok" }] }), + ); + + const meta = capturedConfig?._meta as Record; + expect(meta["openai/outputTemplate"]).toBe( + "ui://test/widget.html" + OPENAI_RESOURCE_SUFFIX, + ); + }); + describe("backward compatibility", () => { it("should set legacy key when _meta.ui.resourceUri is provided", () => { let capturedConfig: Record | undefined; @@ -196,18 +226,18 @@ describe("registerAppTool", () => { }); describe("registerAppResource", () => { - it("should register a resource with default MIME type", () => { - let capturedName: string | undefined; - let capturedUri: string | undefined; - let capturedConfig: Record | undefined; + it("should register both MCP and OpenAI resources", () => { + const registrations: Array<{ + name: string; + uri: string; + config: Record; + }> = []; const mockServer = { registerTool: mock(() => {}), registerResource: mock( (name: string, uri: string, config: Record) => { - capturedName = name; - capturedUri = uri; - capturedConfig = config; + registrations.push({ name, uri, config }); }, ), }; @@ -233,21 +263,32 @@ describe("registerAppResource", () => { callback, ); - expect(mockServer.registerResource).toHaveBeenCalledTimes(1); - expect(capturedName).toBe("My Resource"); - expect(capturedUri).toBe("ui://test/widget.html"); - expect(capturedConfig?.mimeType).toBe(RESOURCE_MIME_TYPE); - expect(capturedConfig?.description).toBe("A test resource"); + // Should register TWO resources (MCP + OpenAI) + expect(mockServer.registerResource).toHaveBeenCalledTimes(2); + + // First: MCP resource + expect(registrations[0].name).toBe("My Resource"); + expect(registrations[0].uri).toBe("ui://test/widget.html"); + expect(registrations[0].config.mimeType).toBe(RESOURCE_MIME_TYPE); + expect(registrations[0].config.description).toBe("A test resource"); + + // Second: OpenAI resource + expect(registrations[1].name).toBe("My Resource (OpenAI)"); + expect(registrations[1].uri).toBe( + "ui://test/widget.html" + OPENAI_RESOURCE_SUFFIX, + ); + expect(registrations[1].config.mimeType).toBe(OPENAI_MIME_TYPE); + expect(registrations[1].config.description).toBe("A test resource"); }); - it("should allow custom MIME type to override default", () => { - let capturedConfig: Record | undefined; + it("should allow custom MIME type to override default for MCP resource", () => { + const registrations: Array<{ config: Record }> = []; const mockServer = { registerTool: mock(() => {}), registerResource: mock( (_name: string, _uri: string, config: Record) => { - capturedConfig = config; + registrations.push({ config }); }, ), }; @@ -271,12 +312,16 @@ describe("registerAppResource", () => { }), ); - // Custom mimeType should override the default - expect(capturedConfig?.mimeType).toBe("text/html"); + // MCP resource should use custom mimeType + expect(registrations[0].config.mimeType).toBe("text/html"); + // OpenAI resource should always use skybridge MIME type + expect(registrations[1].config.mimeType).toBe(OPENAI_MIME_TYPE); }); - it("should call the callback when handler is invoked", async () => { - let capturedHandler: (() => Promise) | undefined; + it("should transform OpenAI resource callback to use skybridge MIME type", async () => { + let mcpHandler: (() => Promise) | undefined; + let openaiHandler: (() => Promise) | undefined; + let callCount = 0; const mockServer = { registerTool: mock(() => {}), @@ -287,12 +332,17 @@ describe("registerAppResource", () => { _config: unknown, handler: () => Promise, ) => { - capturedHandler = handler; + if (callCount === 0) { + mcpHandler = handler; + } else { + openaiHandler = handler; + } + callCount++; }, ), }; - const expectedResult = { + const callback = mock(async () => ({ contents: [ { uri: "ui://test/widget.html", @@ -300,8 +350,7 @@ describe("registerAppResource", () => { text: "content", }, ], - }; - const callback = mock(async () => expectedResult); + })); registerAppResource( mockServer as unknown as Pick, @@ -311,10 +360,70 @@ describe("registerAppResource", () => { callback, ); - expect(capturedHandler).toBeDefined(); - const result = await capturedHandler!(); + // MCP handler should return original content + const mcpResult = (await mcpHandler!()) as { + contents: Array<{ uri: string; mimeType: string }>; + }; + expect(mcpResult.contents[0].mimeType).toBe(RESOURCE_MIME_TYPE); + + // OpenAI handler should return with skybridge MIME type + const openaiResult = (await openaiHandler!()) as { + contents: Array<{ uri: string; mimeType: string }>; + }; + expect(openaiResult.contents[0].uri).toBe( + "ui://test/widget.html" + OPENAI_RESOURCE_SUFFIX, + ); + expect(openaiResult.contents[0].mimeType).toBe(OPENAI_MIME_TYPE); + }); + + it("should preserve custom MIME types in OpenAI resource callback", async () => { + let openaiHandler: (() => Promise) | undefined; + let callCount = 0; + + const mockServer = { + registerTool: mock(() => {}), + registerResource: mock( + ( + _name: string, + _uri: string, + _config: unknown, + handler: () => Promise, + ) => { + if (callCount === 1) { + openaiHandler = handler; + } + callCount++; + }, + ), + }; + + // Callback returns custom MIME type (not the default MCP App type) + const callback = mock(async () => ({ + contents: [ + { + uri: "ui://test/widget.html", + mimeType: "application/json", + text: "{}", + }, + ], + })); - expect(callback).toHaveBeenCalledTimes(1); - expect(result).toEqual(expectedResult); + registerAppResource( + mockServer as unknown as Pick, + "My Resource", + "ui://test/widget.html", + { _meta: { ui: {} } }, + callback, + ); + + // OpenAI handler should preserve the custom MIME type + const openaiResult = (await openaiHandler!()) as { + contents: Array<{ uri: string; mimeType: string }>; + }; + expect(openaiResult.contents[0].uri).toBe( + "ui://test/widget.html" + OPENAI_RESOURCE_SUFFIX, + ); + // Custom MIME type should be preserved, not converted to skybridge + expect(openaiResult.contents[0].mimeType).toBe("application/json"); }); }); diff --git a/src/server/index.ts b/src/server/index.ts index 720cf658..a94281af 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -1,6 +1,16 @@ /** * Server Helpers for MCP Apps. * + * These utilities register tools and resources that work with both + * MCP-compatible hosts and OpenAI's ChatGPT Apps SDK. + * + * ## Cross-Platform Support + * + * | Feature | MCP Apps | OpenAI Apps SDK | + * |---------|----------|-----------------| + * | Tool metadata | `_meta.ui.resourceUri` | `_meta["openai/outputTemplate"]` | + * | Resource MIME | `text/html;profile=mcp-app` | `text/html+skybridge` | + * * @module server-helpers */ @@ -26,6 +36,17 @@ import type { ToolAnnotations } from "@modelcontextprotocol/sdk/types.js"; export { RESOURCE_URI_META_KEY, RESOURCE_MIME_TYPE }; export type { ResourceMetadata, ToolCallback, ReadResourceCallback }; +/** + * OpenAI skybridge URI suffix. + * Appended to resource URIs for OpenAI-specific resource registration. + */ +export const OPENAI_RESOURCE_SUFFIX = "+skybridge"; + +/** + * OpenAI skybridge MIME type. + */ +export const OPENAI_MIME_TYPE = "text/html+skybridge"; + /** * Tool configuration (same as McpServer.registerTool). */ @@ -50,7 +71,7 @@ export interface McpUiAppToolConfig extends ToolConfig { | { /** * URI of the UI resource to display for this tool. - * This is converted to `_meta["ui/resourceUri"]`. + * This is converted to `_meta.ui.resourceUri`. * * @example "ui://weather/widget.html" * @@ -125,15 +146,31 @@ export function registerAppTool< normalizedMeta = { ...meta, ui: { ...uiMeta, resourceUri: legacyUri } }; } + // Get the resource URI after normalization + const resourceUri = (normalizedMeta.ui as McpUiToolMeta | undefined) + ?.resourceUri; + + // Add OpenAI outputTemplate metadata for cross-platform compatibility + if (resourceUri) { + normalizedMeta = { + ...normalizedMeta, + "openai/outputTemplate": resourceUri + OPENAI_RESOURCE_SUFFIX, + }; + } + server.registerTool(name, { ...config, _meta: normalizedMeta }, handler); } /** - * Register an app resource with the MCP server. + * Register an app resource with dual MCP/OpenAI support. * * This is a convenience wrapper around `server.registerResource` that: * - Defaults the MIME type to "text/html;profile=mcp-app" - * - Provides a cleaner API matching the SDK's callback signature + * - Registers both MCP and OpenAI variants for cross-platform compatibility + * + * Registers two resources: + * 1. MCP resource at the base URI with `text/html;profile=mcp-app` MIME type + * 2. OpenAI resource at URI+skybridge with `text/html+skybridge` MIME type * * @param server - The MCP server instance * @param name - Human-readable resource name @@ -164,6 +201,9 @@ export function registerAppResource( config: McpUiAppResourceConfig, readCallback: ReadResourceCallback, ): void { + const openaiUri = uri + OPENAI_RESOURCE_SUFFIX; + + // Register MCP resource (text/html;profile=mcp-app) server.registerResource( name, uri, @@ -174,4 +214,30 @@ export function registerAppResource( }, readCallback, ); + + // Register OpenAI resource (text/html+skybridge) + // Re-uses the same callback but returns with OpenAI MIME type + server.registerResource( + name + " (OpenAI)", + openaiUri, + { + ...config, + // Force OpenAI MIME type + mimeType: OPENAI_MIME_TYPE, + }, + async (resourceUri, extra) => { + const result = await readCallback(resourceUri, extra); + // Transform contents to use OpenAI MIME type + return { + contents: result.contents.map((content) => ({ + ...content, + uri: content.uri + OPENAI_RESOURCE_SUFFIX, + mimeType: + content.mimeType === RESOURCE_MIME_TYPE + ? OPENAI_MIME_TYPE + : content.mimeType, + })), + }; + }, + ); } From 38e098cbcadcadd53184976f4654e46bb5218f63 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Wed, 17 Dec 2025 13:22:01 +0000 Subject: [PATCH 03/30] test: update Three.js golden snapshot MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The learn_threejs tool was added in #173, which adds a second option in the Tool dropdown. This updates the golden snapshot to match. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- .../e2e/servers.spec.ts-snapshots/threejs.png | Bin 37725 -> 21343 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/tests/e2e/servers.spec.ts-snapshots/threejs.png b/tests/e2e/servers.spec.ts-snapshots/threejs.png index 683a77de457d63e0946b183339bbb72748251beb..fbbb8e7167f8b95ca197dc35baf2405661608017 100644 GIT binary patch literal 21343 zcmeIacU04P|1TIsWgG<@8zN1|f{K8ObOI_W7K(_9QgsxNUPFKof})_(Y>0q>bP#FM z2}wj$MtTc@KoWX@kc66$ge3ckKF{3w?cKZk+`D_uo^#LqqmXYxKIL6r@Ai6f+1y0( z*ImCtAP~t5=g(Y$K-PgD*L41}2K;wY>7p0}vKeyW%&DtEZ)Zu;Z|$6r>VhZ~<9w%m z*T>%+%H=k{JPF%SairmxtH%XN_JRFX7q%+9M(p42m2uMSAa1YUc>vu!MA&}0^Yj1(yc0GV>0_!#291a$I@0T@4 z!E2JIAnPHJcYEXyLyUy0i^63`;c~B)1(ZGH58edno~ap&(;s=bl{^=zEsvOerpwO9 zAlQll#;?Lu5}Ij_LhjV&5(J4RI*Dw2d557KjAFRhs*z<9Z#enpj#OR8GqIKL%IPs$ zsX?PpM1L}Ft>ueYEOy$=TW_i_E2{41w$v;v1w4S5JU;mC-0i^pBI7hU{*Zr%?xHN% z!rni9n-t)WPko44Bl$mC{V$)u$4{&k+X>zu`JZkXd`SL_Mu$O9PtPLwHvWt?ZdaFN zydifAY`M6$ma|-fLh9I21gvV@%2##8Qorf$9N#jJf#WG2AHN>}{}f=gH8nLc6RO@x zI=-XRdFhHT`~x82G6*jC8EltKCO^-moDX<871`C674fhUwmRP=0eN@-tderr!prnf zhJQLwz{V-P*b5Q&c_&=$5N;~zzb!wWLRp;zm#}@icj%a$IOOCnwpAV*zU#U%$ruUJ z+{##ddLYqNEc~05T4EHJel*lX0e02acJQ&6UyEw>859FL7Ne0XMD@gzXDuoqoi!oK ztT5m2GAKGBtf;bbneL6^J@!I8K1$nl3nI1D+`}AE*mn`vMPMSC2wXLXy#~_xDY@zd z2K>J9!q&fuOM%sq9b=;;W*GeZ`4?g22BYNpxp|B@1E2<%9C2hUpy?B%7qp-?!n}6?}Rqj9%Ad{^o?s zq^|b>JB!9aF^N7MxoH|MZAc5mf%+>09$FAcujz|vT5gKhuDA6yHbo9WBabZS6G}oS zrm`f?Erc$nP~_v*x}?JTjy1`@JnxceypLuQ*nYIp8zGhEf5R>pbwjD{jY|gSfHdF}R(;7l3vn3P>9V$JWhmumik2^xb zi6Qi?X>II$Y>{pCInviV-(m(({FfGnoT;q%I0b#y_)815#1_lz9$sF8EF*4KgF@I! zqtcG(ua1I+M9UWj)7dX0^#n)ZgA_Q9M@kp2QmJVOM!sd%dTRy=t^M>PuoQ%JBx?YU zT}*WWKg>5N2|q@06H%PRDcV^X`+!f}>iP-QmPB=76Kb_-%Hi`bgR4c}NE#53U;>)F zTnMiAk|VcU8^>vb*N-W5z4UL_rPj~KdJmyG@(YCA5@AU5p}bim-@fzGicsp_)bCsK z9h3Unll&X&Djl)HV&JHEmAEWh>9 z={--Ic}l1^XiP6pTye zP;un+81Z|=wI;Xqm~l<>J6vQ1QD;kvtZ@ z(AosrC{n&m##E1m5w!WWx&m3UnW_3A61hkCIF_!pI{H?e@|b%a5X)k`-1k6kMWXcFNN0?9~Q7Q)fIs*x>-Ky z>5Z`6Hn<)Usg9ogn3opf##xxK%#IA1@ zWyG5;SK+lKX()49iMUm#C3&DSC|)&6PTp9Ps61mCe}%h2`R&|ADXF6eFXB=dH-+>( zb3+v41G_?r_p%S>u4IR@&~a{mG-k8ORT`2_cJC7p6^S|O6jKg3Wt{;O5H1R;I^SQX z`BTI+yeUpWOcI<*cJza$dLV$#iWXUyja@lE-HyNh+plo>B5>iVm&EZhet^I*b_WbB*%>@8tdorUA1;DSFb8O?{A`V!^0Ex%$I$$DE`TQ7;A2y$g4N?KtnHl|1e)bUs)I$`xoFZ9&kf77 zNEb9qOLZ#uaNFUSyjo7Hzq#&hpKFfm4KoV!@JBl#CqHL?*e0_fZ5=;(-Ee~jOyaA{ zjN)*NMti?-kQqwV?;@x#Zdv_&;i4OMv#qU7hP{#BvzO0F7Y6qp6JY}(0yxq+C8D5+h@t^Sw9uzlm?#F1 zOBk&*0F)t^E$;{$0mhzB5%OymSGOz5zoV3S>y|I7N))%g?d}S`c>dcZlqLkSZ%R{t zUC0|~zJ%RDx%j=6_qsAM#*cre%)#Zb>o)FKUCJ}!i!84ZuiOX$46deIZ$2=;wgd+R zee`Q|S*nytR<< zT|sSY=0=|AOJp*d&jiXn*RCItv`l0^-L8@ux58jBY`rm>Gd%tmWy5(9KFCiwvxg(D zv~n`wWrSC3$W0)iDCkoB@znDvV-R`A_3}Bi*RNj#ze|~UtF$B`&V}wjm|Td zdEktpy{YFob3hDgcnmb#5tv@Vg2;c%>~Gb@xY?Xe(IPBe1T{IX~XR zarxjyErCcfn5*r~P@v}kFPUtc$&|h!Z7_H8mAM6Uxfk36EE|1xI-oI9a^zW~p{gTx z75E;?R~g4uxX2Q6^H5xO9LpUDoOEV~OU@P7ncf2Lke(E&E@D29O%c!mLkxKE8CE#8ynbn}H(eb@tN#3Fx_0=V za+H;CBFr$7@I-101R`bqAi_vmZ(+^cR9BX^*U*YcE`g=$n86ns3pad6Et#OU<r>7z|Sy6sC z;YO9mO}ir)XIBuu+4}k)Q#(i89{TtwVtg4Q{Zvm@Pfza!(X&$>xNrcOj~q7Fo!GtA zHf=~<{`FR#q)W&1s^BwjAwD21I`iscPR;o_yQ#f85M`Rka!^qMDgncQi4HysVgQ@D zs>~FtZywbhgh1TRKomEx8)kzDz?WZDjcKd73@O2e8X8@0+X#U?*%~eZu$uRes|M~t z9=X&7e4k)=QyqxIdL_hG*151YLh8>oPx#qYtTSE`gM0^3#QEsc8JlG;MOeNQd)GDL zSNrV5kASWBg3lcF)Ze1f-LR)$|FC-_A&TZgt{+YqJq=m^PRgl#00OzJ0p92Oyn`z7 zK3L@YO(fzx@bp6UwBx>`uadMqr=`FGHce>$@(Iz`u7K>dyrV|0QI*-L&3`N45n)?@8$2SFeuCU;TG3KonR1+{wQF?RL?pO6#hJdF9Njjk@jI5S?&cZDT&jpf;6!U$p!I@jRPpeDQq4}MS9 z$Y~M}H_)jufeo5%&C0Tnbfh%mlO$7zUvgt<=}b|3MNIlpr%5MuS5}6S zwLyRX!;~ry6fj&--0EW=sGEUt_Df)G_~c<#NRP00S5M*HPteaiPJ{)(-XSFn zim{$Wh(Ko06hnBdJUQ5B`Rka*-hO2_hnAHmIGDpF%}o`nbB@u88ueoV-e@PrE8G-M z<#g|iZ&o2%5qYnPb(B66t;|5|h+@}p1#vzDucVD^pfrqzOS^2j=U8H-pxSq83}W)u zLrV^*+kt7tVW=&mW-Nm|$rjMrfu4gktT_GaB`BZ4;&fz@&+;qrg?kejJjGqeX>lVO zxzgQ&-Jw`B^P!WsTnV409g6+E9ZwjtcGdF+&kGq(0t*DQ_j9P4*BI!@8GSAyP{Z;Q zJ-Qz89)Hl0v7xP~Eu&f;V$J)*=NHS9ieB4xX)L~u1zCoh!mdMYdt%TlaT3*=f=Z3t zrO|B>RVBE|nFM{JI(OU}wPadBD~1kQcgREMY66RdQzltWu)b^QOiEQA50ReKqj;D- z6wq=@deVnA{W8gCU`9|?j4LRaC>gE{l5Ny8hYijYmOSZZ*65~l9C~NyU6)v`9oWkG zEcIE9p{DJpJK{{x9lUforJ|(G1S4TALEHINc4meB52i14_UPYJE-Swy|}c% zWljG<BoznZ&f+Zh9R%SlCE$(K)wZQ$Gsi;-G&(7y8-W+!dZ8@*VGaOSxPk+2GAh*7GqMQP zUA{6ty;M1vxXU-7n{@^1yzA&{IQyK!C(ZA;D7VG1WD259)9RekIE#d?@?bvUzEx%< z?$`>TGVXa^V{u%HQMF5dpXBfd&Qp!Ymw1X9u@=wrp}wACYCb&o{?g zk;;+Q-uIrSrSFl`e$a#u&8O9+9d*#UfFe3sPkysGBoQ%NHV0ZS_x>xbWe^Q7=|5~{|N(o*Z3Tn$xQW|H*>G{;H^IJMP}_UQ2;Rq(wb1UYEE4 z_l_R>9$D{sI=*m6hcpE8Oz$i5w4a+yxWtn5=pYd_&6pFv1kCMNmO&-IfgfSRu9B#} zcG{}qkl}n)j(Wh^QwM*wM~-M;Y_n7(V?7aYM~S)Ho9=HnnM}@o|N58}H!WI0x>!l? zzLZOPW5A7)ucdKYi;|8Wcxa+6MViW1w3bW<0B$$*A^^(!6vBZIw#Nw0y7V&!stlWp zZZA#!_BP?ItcKI%m)z1jbQ5o)&e39KTC6rQfF6ex%gx>Nc9KY-^>0xK8!T>}I;NU5 zHQ1Gurr<~+4f@@1ImwooDs|zppqj0pfPG5sQ(X7eRt@dUFK*x!fUj^GOnW}7apZfw z=qy?u*7mf|xG&+g>DuakLdxFLpW{`^M1nhOCx5+{$eTpH9&@h|c^8Uq6;hSy(E84|8>FE5&c6*<7E!Wj1=C=5qOy1UhZeGfptr+npZ7TlC2eUK)|SujxFgT9}Bf z*1W37?}oxDNQyB!Qh#Rg@!fA7_87W}^lFE$i)K_BNneo<@E=Lx8wO4kY*m(YNxyx5 zrgG<`@41e8r#p@*wkk-~IAVPkV(H5v%^%wXJP2; z7AbT0)hmyztiHx;m`H^0wXU^`i>_i-ff_`WleTJfbk3#3xbeWKU(26#h_@Dc-uh$6 zxD~tB=i$L^zxtM)h?!-}LoTEIYVFS3w3*xwQ@vJAIx(NW$E?QH7HXPpMXGcft4iw( z*r^<*{bdodid)j$flDD&CcaO8lBhPf^`3$-XZ-n zH4T3ISz0=2^QvW{=3ev;NO<;!Pmt`h8J8bN=X9+BhhnED^WuQ067@`PgOtGF{Lh-x zkE%_!dD)Gajf3;uq)z#a--4i9%k{Iwl`?Ps<{f2}zxEvl0(fB66F z5GJRdt-&j>)oQ0^Uz)_~!5T0LkdqP3%&h=6?v{jpBH1!+uzT!23d$a((v!eaA)3!3 zAw#wWOIq-N!1FjrY>}N>(^=aJkOESb6sy_w9&h?AOl+6^f4v^(%}2mTuP8S(q)X)N zhFBY0Xx_B}mjdZ+cUKHAL~EDWSbL=#*0$+(f)~GtBDCHXgHQP4iJo?x;CZgUcYjT3 z)_(BpB<@N9NRF)^CIl!Bn>J-ipcdLHH#R2ItUc&bKp1|SG>1j2cedZmFn0y-lW0}1 z)tMf?J5HS3+d^Fw$JYZvsoA#aLBjVFE~hI&LICnE&@^K0-vumZ55y(YeAq2tboc*t zC$GWod&zx6Xn!ihjjAIfCX%};aJ3FSmhU7g0$Lq)s%YK*WP_RBb?mwwqA1hm{8!CS zNxq9r4q1f6AbipGK-lK-^HB2sgS-Fyd`EbpzSf62+;J~Am;O3e6WY=v4TqehmoMrl zg!g=i$pgPyc^OZj56>!2m`0q~0-cal@vSg_+_1Y;8pP$)7$C5#MBQN?$I@|fq1#OxwfCE)-n;coTxsb`N+EX1$4b}gGYe0zp&d%%blDet05__LSTt@0jvUL z7NtSL8wGQuHs{jRG0-D6!Hwg6$U+-s$7(;z8h$rT8?*X_+_k7hoptTny0iNWZyjV< zlg$7=KCZ&*orY?WD#&*xncnk?YqqAZj$|{~m2RkB_E4qQ*Qb5H<+)Y*)7*sbF{!A* zZ~kb+;gr{oFzcG}zOz=gDYoBY+aB&IcYp`G)H-oJ;q6{T{v6-E&klFXg#9g6i8hPa zb6nZphvXUE?im+9q*ttIb5r3D{t$1$pv6k6b90KeM;)9NbAevcy^(~0 zwZ+OEFJ~srkMde7I&Pfc73;PTTJ_+TE-e{S&s@{>ghPYgcIC@2Qk0eanyeGp8zCp{ z^+wk>xOsWGF`}Pk>_42`Sa_g-hb$sme#&c_2G>+}y;C`_#w zTy?o^W#w+tlwF`;mDiruHE?30yDg;!-(H54<4S399u@aA$ZaYU+Zw>oaUPh3pV3*?4NThQpf&{FT^Cwb$roDl4OkgSxp=jNjA@vx-POA}))0wLhP)oPNxg_#ob6%IHx!?vaZ_ z2r8IruVFE5lo#0ClIj!EIVPW(DuE)fBaq;0lG}A?&`1M704uMr4Vh;H7sFj`PCb zcC4|&ypEM7aosV@Yw?22P$}#FpVr968reM`{9j&N_n=MR{)rN(c1e20wTqZJzFRR!sm+1wprH64G?GHjC$ejlHFTHgB z9_xFY!C~bV`$l`EZPz-2@iT6MsZh0zKDwD%PjoV)(~4STTJ6iwV+*#-THc*iE8S!(aElA}NbPQe><65C(=%R!|_2;OtE7)?+RmiZ|>BLJ4)v*)%P3m5<$lRy}R2 zw5rwOHfz%~B8DSSXj}9`FHT}LH1evhA81GFka6zD))r0M*+YHsH`AVIO8Ky$4J{Lg zNJ@H|+|+Kflu^~G%bFAK`IBgrg_`=1>t9miH8P$r9sF(eLQBakQ31KFeQB0_6Iica z-Qvpq73OJBuZ2(dZN9JqL7w6a;dADt^b$~h2 zGTE>nw^)I3jJ#*DRM+a*t9!YlCRC7Y<{o(cy+X#WWTLV~x?ZJ=6|IhBm1myZOB$%# z^J{d}D#wa`$hsY~T*y8$#Psy*9E<9Eg^B6Xk1csbA#iDFS+_DSrI)-oPfI*f;$Y~0 zjye)g*7)KZ=|;WWVXG6QUx)`ckVS9;LGUku(FXYtIKGwSPx$$Rf>G&YbdrU?FY^tj@E8@hX=4kzJTXwW3 zuO#gRDzGJ|5E%3d)bIL#9sN*llr?gE4DLy+TYN&E8Y~;D%R;xE(5!}4Xh#+2PSv_U z5p;HwP>p0`;i{{f{+|^i5=MfCl;C}scmN%94cGX=^i2z#bZ)g9DOupYu-oj9@D z4%{@Fx>w3l#^7}a_>G8uDc`qyh`Qs8eJ9T7n{W+GlzQ}Sn;t#i`JL}M zd)*&CXNn@Kls%b=8EMpfm!Pi?E-^jrgBqdLfpL;4YN^3ItiIP1xtHJXCOg#NGbl1` zbOnjD1f%hJX~r+XN(n9EiP;s)!s#`j5efENIL~N?byaG1_0PLDSq`NFi%)#yHKdZ2 zE!RlN#9nqHtzj9{CCSt8Pd=VB`BJUskJ7OsR6XVSnP=98eBV3LAs_M)^hw?%gwiJ3KNy*+Dj@UTlUM$1Pdi#%%PO(90;NqD zdl)f&((*toS6n}mW5=@pC6uusNAO6+Q9{p4xLOd&>c$xL!|@+%%F&CFXqjbuH%t1A z;wsu?DO7RRdrefRzx%cwdSi_-o*VR2 zi2BO#m|-hhSX1OR_o~Qtjym*InHF8C?>Bv_W3^A)S+-$48sX+1+KROpvSh+{J`s=8 zxT`^|cJTZL!Gu1Y^WpP*ti&^$crP z@9iVOn8{`nomwyf_7c*V%^+bUIEl{-_%u9f%`9IUzASHWb~ylQ*M7!=CK!C55JmVf zaMd$~f0`#zX`XDPSw3Femhe3)S?3TCs6E83XXnz~{1N{0Fhroy$E>{SvCUDgyQVwR ze^bO2lTEaE?Mm>^$#73tQdNnZ0pK9V-QR&~8-8uw7^v60M zzn~y|leSB42|Xs{aB^Nd<8fbD>0bZEUdDE%o7N0Z2JS#v&Yixcz9c0_KV(bhv?oZC zr0RdpepK$LhG)&%Rj+QPeg4os5G5CG(l6+Q_WA56n(E$+}tfVUKe$}<-s+%KvVmm$J$mRnXPmg%4Rj&!$}nUvBF9k zYa%6W3K~FNIPaMF{O1Kgq6*A&Pr0RrG!j9YR+WOj%Xq8eJyh!LOzD$VLL&xxOF7(2 z>Xx-LK$Z&7mD{l4GaDIYDbjwQY7E%Nbu|he--V4hZA}Q$#Uumaf zvX9ev;Tp)UQ=*2QEEa===#-2%60Er9rS3Q<>U)(s8ByS2 zbDX+2p@n%<7lCu=o{2-K+m~i8-(e4}W=6Xe+E-hoRAnM3BH+?VW@t3So`5&J6Gkoc z;Xo_lp!Y8Ro)B~eeEr((ZYRD866-Z!#xvY`L}5T8!ua6;S7#mQCON}TS5Zk+t6BQi zhc?nW>IHM^S+-V{wjM$@-Z#FuJR6kVt+gC^A?lZERfTdH#TlFHGqzeOpl4V!OJZlZ zjjXHm+{U+}eWyXe*lk8lTj-9ZDifj7s_xN6@0-2k4BiqXw|R?J8bLA%pC&`JUK~jspYVq4u=vANotn zZP0;{j3Ri^%7P&EPrL%Hcy%~=rq|uN#IE<|&G`lu!dJ1-{AGwqAz*?h0mxLDc3EPC6!dRpBN#-7x)SJhx(N1lFH#mCL$nh_}Ig5&PD5j+3D zw8Y=vd*-j)oq{j)6MZ9d(2qUK0!^Xbwal|!#nmp_ngxMt(9A38Vw>{2&9X8@1B9rMXJgh{ zM|5DlNr_}faxkIQiW{#5(jLoBL|KYkZ^1^btDk+YYEdfPs5O@+(|M<#G7iD79lxVL zXv@`)vi4+54-DW5tr>kFapW_;Za8C!#tcO~1Pl)K=cVo^67St)%68#1_B_C2urI^3fMY5b<{UZ6=!mahU=)^LXJ$YJJ98ahOc58mKO|J5jse)RU3?G20z(F*P+!?8Nu@ zeuY-6&iwiCZ?OywJ*nMuyx8B*y(8JQCrqh4(5T+oA2!o;ovej}KAgUTp!km@gY^k_ zC#4p(rP$~nFiRUcUjG^Zv&t!B&kXRKPZ_tbw))LqvWtKp=+boq+OHft+ND$UBL$+?OV!#y9hYw11Fs`w#GR-uSbsf^C9=q$f-Euk zgFRt^ldkCsLq(nr-pprB`Pb2f_}1ZDR%XC3XYejJm6Ty=LFjMxew6Hk&v8>BBc@il zb%@5;7TY{>=;C-^>7nM)uJ_nL$udQdOyo|BNl9D?F-tN95w@N0U}l3@_VFTdK3d}C z_ll~!@1Z=kWIBDiK#{wm*=I$z4Eqr-7zF)3{53DN-g$z+NFjG8Pa`vl!Hby%MHzMI zD;ttneb~!}y&>Sw?==Mp^XRqc2Q*~}?rJV%`XugRy|9as|0*{~ogqeZ{T z=`=g8&3=Mhm596P`1wO5`KTRULGF6dz%D(i`di;A*~eLNL(9@vM>jhRfIxup-ocM1 zxd_@gO)Il7ob1VhRHQN-&K$BSu4OgxwUKSnV8&P(5&Dx(!YWJCcaVarCeKz0Y2435ut3_gXvX5oz)RSyxe@FQdDPgj` zJuxLvr12B!H)Nb)?O{p4W+@N`^wf$~641%X?-gDu$J1yHUg;-9 zBN^@SW-~YfP)~W;j{{)LB|j-7Y5nxD%>nhg#mQ+rLLIo^NB)T)`mf5KkH8nlo63V; z#zPuLw&0|%QR{;#tC%7UubM?;D(eGUu(z-|m0+)pLA{$yyzoplbx}QCl#BW4v)u!+ zhkF>#-`eWvRgcA(ao8^b-xH+oh&uhVfU_q+8p_S}`h@A&_$yAz@`{sqB&)i*tRcP!QO< zgwD3hEeY1thy42JwA~TOy*y*}e^r-~A*;U^S;>%~FiUWA!V#}j!*|ngpEc|CDSP5P86R+a# z=}HqBQRSubWh4-cloOShdN2GW>37j<&qqNDykJVqYr1|ll7ru_(Gj}%YQp|Bcy_W; zg}(L0%ct(3pj%uMW_F{7l|pFWN!S9pcllpa9lO@}RMx|K7*(8V^8`={vH8m@<=oje zby4$fvc}Ik${)M-0Zj1Uswdk;)e}p0iPWwCZUdr{3DA{+L4zvHA+W-e=|62v3H`+R z1cc06y_q(u9T8vV6BhMbDR`crN~n^kfw4$N$w)Z(J)FNC)SAwhs`T%^0e;OkFSz=2 zqu9=Hy;p6{U;wBMRK>E-{#+M<=h_eavPFJ1_}SlPzs{%r zQMOZNQVcwNmzDtf0>MOBzVXk~i2;Q^oAG~VWZwSusq@i)l`lZs9{AoL;hzm8{M+l6 ziPIwV^zZ7?UyBpn=%F3azPA-c4`*)vvn5c4{O6Yb5xjqN>0h&&|Ck5SLHuLH|232R zk6-$65dWBm|DDal?i690+){^v&iO;qt8fSp@vE_(K6~F{Qeu!gR7-l;$Kl` z#^ql?N1Uiz^`{oi?9*9U-2FGM4bZp^8b@12S@n}pm4u7Fdm!SMqkg7lWJU1iPwJv? zSjRfSivKF(0h91Bwwa+&%x+pDYU) zkOBo9oUEf9zPESV3sE7kqK1j(g>ie*Be7*_WLduYVZ*um(r*D>gynovN9g1e<+Og3 z%U={faoVC`d=5zLI;tgBds7sD{*gC2zJEY=>w;PES7urY87YA5q;|?3aPTk%3})oG zC|&mz44JcxRw9gaCSF;+fDUYUcN>fn5Duiob((b;@;vhIXGPYvG%2B0D|*Z~?ww!i zv5uh0XVJ{P-ddk*DJyn^3{LgRI}!0Y*P=kAO0nS1`zb^mSP_K#0jUq;;Z}37EwaySnW4oRlNeE*Tu#Jx#VxvexCF%qgb#K#7|?y zpSA$^9eWY7euk0?C{MufJBCm9HUh#U0p^E{y_wlm+q(7FW6sH1t+j6{MF z54FTa&MJ!MEu^OlrqjU?wIl%JGeg$cg#^GL(w1%{%zkMOW=%xQ6B0l<0VNRw*I|L; z_5co^6wx5QL&zdvkN^|!vzwbJXgE_unnNF~RtlhjR#Ha2J0$$o=J5M^)g|{1V}^31A+uECW++fk1$hOfc;Ls5)rpIq#3T%a=fItXv!c2(RBK{~_Nd?3qRfAq= zV4X(I5-v`N3<*{MILKJ#>3k7~1K>bF$=BoSU^GB|we`_Vtv~e#l~1ufI+N5_Y|q28 zG3xKfkXoL^-oZg$mJ)v|LTMG~#_n}%Z@p_jt57duzfcm4D0%gl-tPNw(=wisz6!{4 zcz|RC&`9N3e&MG8Dwf>b+^NB><~k#vH>%s!`ZSf*1y1O;dm?(X zPNU?2SAHU%uN}WW0`~zJuhhs!Gh$xe-tpJJQ5XZ|%5|JK$L@d|p1!4c>z&~29@f_M zl`~V!Ud%|O9N=WI;IA|p{U9PZxYI-h7{ysWTv0#J!>*pw=B@Zay=M3UAc{CNvlcu0 zR3)>-@|hvfeJqeN6N#9od~bnB3w?b74U~N821q#JHTaS=5sw=ea8T&4c)GjO0Nsce zmNS{JuRSB6)9JC-SxZZ^qz*u7HJU1-q>?5domrcv5BCb)u>_cn2J($`52pc|D!}CB z&5Qp=+YEmHJ{tz}Ats3`2<%&3*4iwg7;A8OOqym^eyVDeh1cwa=tUNF{+$05vO*$V4 z^Zf9^>k>NDxBl&Vv=5imYWK|upfmYdnVLYC++1C2Swwdc!_zs6;mU9XkXRk=JtJ!| z$dzUZO&}G|H80i^?EKBPnPm;@c@5RrscGwU<9{55$_miKr&?$Ebw4!6X)HrZ6V4bC zF`LYDI-Vcoix{i`QB(_{Jd-1>0(K3dh|UY}umHN5XBSPr+W!ujX8((=Zlv@~YK4eG zBm{w=aN6PD_|-ffaMQmj7!bEfwSbegEV@2OB(6C*7QEl+t(b!ou;;rziztSc`M{a< z=37RkhV8YK08U`{@{e-U{~e#<|Ft1|*A4~qVt^8;L&Tc`==mU@8cqNPKR`yvt#ss7 zis&%J-i3)SC%T&{9X1zXdceQ?f->i-#L-t%c@rW|KK>_BZI@rMcBa1zy!R{;XUT=m zf7yym6W4h-6?Jzh>p1s52e4wS4d?EMh%e6q<{|^YptuIoxS9WHmcfvMEVmr$JP8H8 zs(ALsqo!Tbrj7d{%VvKYiu+I8u0(hwyY|vI%=GX%zIZZ?KYG|E>I_9 z-QyXRdJhJdLV5n_tL4)#^@zOWA@VU-ZoM?J?uP95-#`)izT&NLpN4KtIm6CD>*Y|M zw^68P8GXjWeo?;-NzvBCVN|W8N-`(QIE5tzX@kkK49WNUM#%3=XC1$K_Dc-c7`y_x NaMt`x@#$L+{twT!p=|&F literal 37725 zcmeFZ2UL@5w=Ep70wRilB27i5sfbFEu7U_EH38`YA_#;cRUm*BL5hWf03lh6c_-a`xRUoU&_`|WeSbMC$8A9vg`#>d!0+~!U4zE4?et~uv={M1#I80inw zqfjVDDAW%4D{bR$TKFZ;qe6p1?L#RmTtd4=PV~@4-dxzMo}KPo`c@Kbcocoi z`c;kg9Xo|j=ynyqkYjXLG&Fu56sSF}sLTARlEv>eE--U?wPd|-bM^a5Pv5DIfT>^g zksVppgR&({HcKA$#UEE?qa926Ds!wm_Q3a{P|Ak%+mPQr4EucGt5K*ldDKqicOEUx zA>?;I{r20)Zy%w5f5D@4l;_VcPtB@>@G>6l$Tg^aoy`f?!!J7iZcWyzCdoI#*O*4e zQVJ~^qHKPAGOr2w_UC1)QPK~VXMPqvn6CUwUobn%{HNl;G&ubGzA)%gR`Hmm$AVI!91jk6`c+we?K)J7~`$|Ng(| z0w13P?Ck8^+}!4-4N)>!-czRnbn8sR{6>vcu3fukY001Uw(`{76M?!IqIRlgg8wY> z&ej0>pt77sHSsj)<6;|EU`)5uZ*K7HMWK$$Qnb~yUo6_3^T28)$!baV>cF{Db<3%vo zuVHNL^M8?F`#yWtt3X}&9{u<>myG*`Ac0J`*?~)!F0~}8 z8~gh|KW>m+ok-G%50IPaDO_rh+w9ITxiZzAe?3&td37>3Q6;nn8AKFwfZVnji)dVX zvBy&0seG?>szLsJQQPiL*~!k#!D=3F8``#+pXH%mZHHvsDdXv-8%%1K>)wkJv1F3V z<6cRpk+V)CjoJ5F4kMptFJrOC(*nKb9dtd$F6G9%bEH=yW>t|F9p6VdFO0Rpr^`@B z;+tC112`{!vT7p~x=fx!-siKuzk%NV(;bJv76;>V`wC9{34N7Njb=ng?u4v`_bC>b< zN&kb>k_)d-TraZd@mie}vHdwWR9ome9fZAYWw&cDr-aMItq{LNwdmeVGhdQ-d0Q~t zl|=*h0rAn6#Odz*`Nr5QW^1u?FbytK-O1G)wib17MN~b$zC5a*YrDQkw)0+J=#bl3 z=2Z&fbfH%G9S}AuvfR-1HItTbaQgXW$7KDFEQ`TU8NC9bDT8@uQQhhz&ailn&@AUO zM@)G4*cX_UKV$sA5^}Ows>N-P=s{*vs$ETX?O_Rx1S@+ApIl$hJ7r#H;o32JFS!aN%gN&W|@E z?zMbe=}i{zb>kg&=2MHxJ`hTEUl_xXY<{4VHCa#HQV$;cWu0E^0$<-ripM#1!CLWD zZ|pORwBlV~U26FGN(|fOJlK$y>PGD z(lumdx^!~`W;t1YG9}>+L%kl4=gem%9vsWSEOk7c1DWdGZgg%uw-Y`;+_tOBML|Jf zs3x>{Sg=cAs>@}tD$sj#-L*JwiFT+q+&IrZ_VwE?6@x|BEC?+>O1(?5;iq()?^cEz zx!>!|FlmySsW_l^XaKO1otv)o{iduAXtlH4KNRz^) zYoD%%pCRF3>9bzLpG`K0urQF4@SKC2+jhXF`Gc~m4|-s-G4}DBJ!TUmOxE3-iK!Ts zP{9P16w^BaW~My~zVV!#tbyZgL<{m*>U_@qjx-zr&<@4cFtf z5>+t^&G{SJEwE4KTGZr{uZ?e)GuMcq!?@!`t;6t1H5p?Kf;O-`gpEr)T}(GO)^A3R zCSb2znhiJhTKV-xP0XfKJ=0WCN`#bds}j*We6&5?c(YyKt}Rtx=Y#>PZyRQsSYnq} z-n&al4YsthMf;;23^ry>afG=rhZxI{uw*gI#v@YZ5FKFqnNd2d8e<+7 zd@x9*FG5m@kI%tl0cUP7&3R<4g^Nyf@Zrh6(9cqZ9JTnP$~e(?wc68Cte~JPG~)%>cy;f-fnS za6j;{=ozD;k=Az}OH-3hYJ7T;c_p4JbNs>|KCHLt7$@`N`^u4hB45Vv9F;;%)Ffhs z_DC)D$T$orgq&dgM$dL?L^|hZI{^a1$8lHyLpO5G@3h^+3p|+r?HoqownitY9uRjw zo!A3WU=RB_bdvg|A%jq($vpMU)TD52@Aeek`QH(Cl7*w{nR@2G%6*yo43M?cGzXC= zYZA7&0o)O!=RVvL9^VXK6e6JI9-N;*xPMZ(-g%TjxjH)bm`V1nAVzR2Gt==-!1AHl z@ALTa``L3DzCO(U1b$S4Rbe>lIc#bf%Ly2TkJ!QvD)yjBz;-m3p<%8JJ1E z%nS%)z40>D*FudFhT4caM5m(LpJ|7EYe;q^ZjS?8LHF%X@tw8q>4Vq+JI6`PG;6jh zkfS&}cs3_nJLEgnEf^WEBWDxN`su<-!Mg-=%S4(a1oCmR320WhRb6Q}8*NT-o$ke2 zTr9N>eQ{8jFlFU^izt}p$HA%!F|iwR-^|PmEc0Fu3Cm&?$W%*HJ>v!^vB^|-%7_M8 z+d=f^`wLbdu7{^R^F91{7Zb@5vxxKx5q`F)2PJ&_9SJr#VUkf<8rby#J#RBDY{u`Hqt8$jdM?dvqqY| zb}Z&yI|(z#1xtCU8$km1$g9U!iRJ$7YI5t8Jcq$nZEYQ=+!lS3&AsvrS2+$bn-Q6+ z_w=m*PSkmw$6Dbgd%eAfn!dnoE*r)K(Bd469L;iVdu+~xR=zlBQ6G7cSp8PStZwtF z`RUPYjKOzz{JQ8wSB^D@s1+oV1V}_x%W*Zi_33vo1Pr(KK3sFT?n&yj*>uso>aeyl z@6~`q68160Rkt3B9Xs}gy;g66>Fr?ePmcQ~>SuI6Rj@O3)bQ8SQvoq~Y`#2%Vh(cTS%|8N&Mn%7LixQ~YB;}sf=2k^BKM9O zDO+7Ylp<%?2ft*VzLyxlCFKIx3ijkuRNAr0_oCKT9chy{zP%S6hp1QJJkB=MkuO}b zJOIl8|4OIVZIF@dHkuF~az1casZ_%#8=z!Yo=OEFMDjqJ_M{W!chVCM!vd$#XG}aB zkYuiwvf4u&^`MLZm^GcIxjoQnGJVCwbia9>l|JjzVz*<%sD}kWq~fJ(=~9lr>mX>8 zWf#0^i^yAxf`7ZBnrr*wBpEQF4Rh%Ha0BrMAyPL+p2i|O@etg-O?41gO>Vy9a8Oh( zX1VR7&1k3FK$>H9&jCKwJ5a88`~-+YOe5X|ydgZ5IWyKqe8mk*86xyDqtd&%9uqmu zYJf`jAuAWJO1|ES+EA3qB*eK5j=Srj|l*T&Y>1~l`=ntb#zjN3$WWax*&FEVP zl%z@(jd~Hsh`e~j;TWn~8G(r=4wwWUu#_z1*{5SO*v3acICqr-@RLf)9aV|Y(A?C3h*@_7{Mntbjl)WOqw zt;b>i2Czd!ISjMw_4}QLsliE+sc>EiiB7TRL^V59zwyDthpDc_rvM0M3gIjJ+y(>T z=5nptYL0WSE>31K^6Y4?$TJRtv9Azf=%nE&+M|4=w4SskJX0u})XpLM{w zJM6SRva$p*gbz0(#A*Z~5dR=yBlbQD>#)9Pt>_Bt^7O~0V;BNj@0?w45p1Rxp90+7 zMjB%wuMkr613ARC%U6@GdN05fP5#JIB3immK}2@G3!AZ~ZY(2TcNY_PchQ3^Rgn*> zc)cAiTtvVTqi>3~^He4y;T@{j%2fZ&__2(rLV0(Hc@;h0pr6h<8E0R zC!BtO9!VRsi5JK@);idWRr!LX;ktJ%58MHoLV4^o-P=>hv@$o;!X5~BtwYqFOpNxr z|8e}R?B?1$^^lug22=)i#S3Bc2c`g*$}M#}l6s(=H}40iDalZ})a&7bKI%xH9KdkmytB&9XfJAVe)78i>+i4|5_%}Ar31f2POJffzp7kXbs*w;*r`!m)ul@1 z(CYXP#u}#!YoNdwsTATfRZWQlyBH56@rYmP*2Fj3;rQiR;Zlh#sCR$ky@p`PEN&PiZwr>C zT8_7;#`9b_fyK?S{zihTkNnUu60o#66z>510{BD2d<<5gGg45#U&FxWx-qeu-JN z`~qN7-0yMCM0;=O?r_1I=K1YO$(xEa&d9x~J&Y;Qo@#SYuf&-2)?>t6R4-V5gOc<6 z#ofRL1ajqE-=&mkz=-cr4>e*`bXPPU9y)pBZGV8;M>V(D%TMbzy~>;-Apxd6v+5z8 zNxfn$2<<;F;UK@<| z0L;;MvdrstaJv-vhxx0iqF<;k=#%YL-UI7JVw@(111z>|Nr&X3y+iz|+A2Ep``soe z&PJ18M~%G^+e73}4TN#4JTrbGK5*cqf*X&!ty)HQW~$g_e2yr zzV5v_nX?`_V%C-KXdz`deCk5aigOzOq7=5x3s13@7IiU4o8RA|F_VXQem!Dh>MX2fc(^6?U2Rc1ZJ#aL^4E=!QnoJAH;DOz1B`BJn z?d7r>n3mgIX?UY1n;h)BRI$J<7%<-B2~=I+@Ni9i_sHj*1YazVluO;pa`JxirRTGc zJckdkuJ3!`T6QsXA6KsW&JDU@yOY2JT%OtFZ(QnejnDCd5c{@Wn!_vbC@p>%W{uE$zbYBL8EcJ2&Pk#OyC z4qtb023R@4ny+Cw?av}$DqU%1F#rfA&Nz&dlhYFz;)rFSibGY>x9!}cz6&9)O)KeH zrSbWI#H9kO=9 zvX(w!sn0I9$+3U1awT>e($IuksfsR;8a=>WrQWe*etjS}j zg=bx-3=&jW$AWp}O57K$hUxDldX3|_KfrRV|9*K0m}DTdzd`9a-ymm~*bQ6;DfYWy zh@?{g!(H6!H`;XHPW2R4yH?zt-aC@@!B#3F)gXWF6|WK?e<^p0CBm~DQD3=;BKQ3e zWNS~=UjnYE2WWRF&ajI0XVd|dk=1v401&p(uO-$b9*S>{dvlyMf(%i(4dBD%9)k~l z?u64qk9!=dxl|>`n&N%eezb>;{3NNzKGM$3a;k|5QG!hkpmcP-88Q%NvV$}Z$T3;-M6oHr zgBNc68v<|0#=xsg7YllHonYM%)k&mts-RpRbA){oc1o9d=-h9KPfGd^jfF)ZH-&x% z)^!|rG_$4LEmJ&PbKzFC$|Oh}L@WGheKDyHfziZkufDJg-Vio~DsOTFnCp-0%~EMh zJ}2veMnBWfi7nm%ba1k9=?0K+EkzF;F{REU*{cLGJ2L6Jo7eFP{7>tSv{b$9Uf~3s zl;U(aTjT0b&%@`>cefRY78{p(=_y)VeShKJIe4mw!rh!cymOK;`Y%yy()1MAfI#Ao zm_20V5aZ7yGy&dl7_55GK+nXabNj{Ii|-H!LQm!IMl4i4rR*EDE&otk`p&NFkClkCgm zB|#c;_P>ZEvrZsKd3w(Sozv`Fud`cXJ7^; z5x|t0V0_VY+1@4#ts(Ht%)moS)i z0G^IICU~dD&$C81rgb2AAv7ZJzD+yr65OYEWSfUR#|OV0Dl*P=mRAE+N5**!-7E~d zZQs{50~)pyZJQ%E3&+=(cKK~WD(GW|BjQ6hNIJ^90r~M z{@dPKf?Gu&!KnM4uB?1flhIWO8Far;00-`(jMoPKX_fl(7 zGrI)KkcnoFp`F-71t=)2V4-@HjEFgute)CD0;qE0)DW3$TlgJSR{9)~UO7!3r+JHv zu$Kz&l>L&jY4`Yv!mnbeJq82C%X{5<4uH+XlwY0a z&&>re$;f`LqB|t^imz&C6`fHJODF8lUmYmoLLt$UkzphcYU3!G@T4;htmr zhmOaXls^MrfC>`;p*8C0m1hvh!-S1{^Zll7c0$^LI&&az21a}j%8l-C>Erq~`}nJY zg6O1u)lYGIu=wpy+z-h$X)KharrEjnK1kfs3A|ri-!?D>}0CMKkx)z-0eFtG5mC0x@gUfcaxZW}0^3W!MT zzuBvlxo@=60z!p$f;5c4U7!kp3qeNZ$gBlQn=0eI0qRB` znKc;1whHlWN>{ITU48yj52)pS?~OfWgmqvWr73bx5p~`=bY{k$JaTW%d(Z=-SKczv zJHB$;&g#f+LvgOD!P9R)yz}#xApe&YV4wY8l+FQQhu|j-@d|ABf4Og`Hx%h~$RgOy z5*r)qovVisC8fN76Xtibyn6MDwEs`alLv^Tef(d_%dzo^vzvj_G${Fjf$GoET>pvL zV0&an##!>rU{2%TFL-$D_><2dgv-+&gKwf$w6q-QOJ##wDVzRl4zAs$Er)6xYSej% z#b@-gzoJkE0)Iwq+63CQCM6!Isna1EfQATEvJQmm+`y=+P4N14%Y4XC9Qg?s$$|s}HOLk{!88h%`!yyQ^qAlhe0}kFFweSUk@L2+(+M=jsj}meW)a0qqBlU z0k?eW!Uf8-HUqr7-!->{mXn)%1KtjCV+iWn3&4v1aT1epdL#15D4@-pq4tarxBJW6 zaXAJJYiBo|$&3Z8I-fs?$G`D#^7-4TCk=|+92O^jKqQm}@4!-@w>On?&++;Rb??30 z(n+acH*ongL$dtFb z&7k&l(3O2i&9ZDdhP{b9bn*V3b+AmNx&W+!xjfW=7Y}3{L}9q&68oQDfbzI`yOEs; z5}49EH^?xh8`Rp0H|_32p*r=^a(2vg_^g&v>jQ_5r33efqC2vG7czw8>QlO(B-GYN zW~+Ioe`F0u$#|e~fwgmVtJq;!6Z|w-!sGu+0-ggM5EmZa-Jkll%G(HjG ztNTidv3-yV^=4yzx#x^z0dMH_L;89@R2(N>9QIqxwp*eqzb4yZ)E;QA`@{8yfR#bp z(cjcm2|Ja1$i`OGd=A)y4N8le#>Z%2vFfF!A-bpe1%o~+sHqwu_Wd|B5Zg?Vz0Ztc zI()_#it3luj_oaDKX<1w-ZVDw`J#wFqCZ8jGBayt-Ti*ZqVm>hzw6$%j{;lVTf^pm zf9DuHw|PfA9lR{*x&-Ud+TwiOlEN{MwCQ7^!OnKd&f|Tf!GcMWTIapimMx+lTve;~ zIMlGPTg4XOzQ!u7-4ylMFV}^OKivM92&3GAI7+%fqrh2$QD8N_gi3&&>CXuwLfqYZ zim7qBF16Y)_e(&@O4E^mGPd$u{9#dVAR_}`!01s3bDoir0aAt?Yz*Wp&d6oS@_lEFDl&o0NWkb=6sYhSf_%xqzH0d-$5oc#$}00UT3|? zI)2~1HC106)HBsh;6SrGzDI~}0_~YI1!6*)!Ux{pDS1$L7wQ%#1wB6Oy*BInktmHs zAO#tx0EM*TKIC+C#CoNJ_2?R>Y zZ^`teNhS`l8$?UR)D0-~s)l@j_=iAZ+Bo+fW^Ki2_Ty@NZnD3xcJ_6`FUv&Bu<3)C z6-t!H)V}P z)p@*=vDshci;BD6Y-Tlg%cAqe6J!S$2I?)VG}$os3l8U@BEnZR zT`ZD*Fo(#Z`}LwT0dkY`OXPM!u(ty$2+&Te2$zNVJ*o9)(Zd`YS#5P_p9Mp}Jq_xZ)9e*&#%i%8&(69%y={cN^gBA?Df@%|0vmp!R&@XkFyR77v>B zh-O+z+OdqVrfT5x%w58I$3H*bH6EYfWu=t8f3=_szmxH>Wh!8{vA~uB|30W(9Z1Pc zNa?c#BC%yyuPe4?$Wl!9*V|njCJQ{l@J6`mhXTK z9=o1+m^!wY?3;BH*U@@VDbx=r^jP6_*i$W4OVfRuP{#IU$W_gAp+&9`i9R34B2OQz zeri<;f*3kJ&iA~b@dY8Bv?7p{aICgJJ|&cp2=n!#&Hjbd65^AVQtwSbyOv56w6XHS zdY)xdoZ!g@iW_XvT)V!sZj7(4LlG7)OJ-SuvR52Xi|yt& zzUXYmVhUK(Z1WC65GX8hg!dX zM>GxCn=&7i#7|I2bo)r(3pq5-ARA0%FpE-tLRZhO;SIanwHmD?BP22=CTH*p&6Uo!sysre z0a_qOm@1vr;6GK*vmNJVv)?FLUjV%s7dTwuPmPK1fqVkRipFYEQohTi9`+7|kr!;I zRUj$XJB%>BZ%!G=bdO_2w3VlmFRZfcN>)C8AuQ@0$cdYuZM48dXbw6uNonxqQx2AO zKM!82b-PMi`-<1sTlf}0KihcWvjA9yS(RtRt_d=ZTE8o~BZ)AH60j85&EWj3!2F zFWd#?Sa6|YXt<;|f8gX7WhZa|s8cb^X!~dKMJ=jT6OZT4n022 zVy)$Vy7=WezmgZ^M-kIYwM!;CaqZzALl2=Su(DNGZv^{_4M{iC^l9^5P({$I&01|0 zEEhvsfW)_OJuQZ$D#7QnhPd;x$z^Vi>!Uq|xr@b+v1{C@N@+|{AIHCrt6OW87^A13 zo>L3)b9dfxb~yQ199E&!#gOOd4%W6sZ|W#tB_IB+dNe+NT_#|7&?(V(si0nQ!{16{ zz}SY;a8o$ZRed#0f}nNUT_1!*tSv>P@o00rZ1&>GOv%mH!p5>UTm88EqKEU=vM{q+ z!9XenW;XbT*HlpxS#Y4aCEJu2_D`sXw7szH%2wW_MEX&c0A3t-R(3YTlbd&4U59*C zqLK%KMysIYy&N#7M)`(a5Id_W_Qyka|(>~fjWQJ!BWwiQdO=F$Rs7VqPN zp-4g5Tc7xhO8F40)TV4>+7y2AsS$2i2&mMkF^0Gj2KR7t^t&~Pq?i}Ap+e!aA)+>x zt??xak~Fs#)CTj@)I_ROHMo?63EbrSVRrQXgoWh(l<*)gXUa`=Z_4R>AfXqNOTGx^ zS_=mZ>zQMmS;QuSo}YW>q7@pAmvZg8U@dG%a%5G)h-YEBPT+Rv>_(Th=UT9?qw7wA z>0)^6FoThdbG^+?M3 zKGJbA$=^~qxI?NrUWsX1*WW_r=Gf~;S`?ioJwp1}+vT(*j}|$Rw)1?dkx982-^-gN zEEyQ-RyI%1Jt2)?mLgvHiTkal7CIgxjqNjCm98F|H#(@GZHjc?zJ9) zh{m%972LSf#W-r>hk~R}<{`T79TRq|3~Z;8(eESCg_j^auD5%o*WYuo%AReWlW`pc*sA($s? z7Fc(j;qvU8)-l*yxL7Gx6#OhGm>Y2n4NF)mntuRfbvSx)s)u86I(u-Vw@1O7vfO2z zo@^P`enlC>T6&0~N&M2q?*hh3bY+B!u}N*NWk4+}0IFDuinU(O5sqmAJDFWTT?G6k z#4P0~di|#sopGqAxee>l;?2p5Yr{U-wOrC6FIw4$WLTv!VU$^sj8aw|btjb{RgRR9 zkjOjSUEYP`QKj0Q$0crLhU<;kIRCO)R+&sp1+q{wC5#11voC6w?6)s~GRY zJVc8qn2Qg)tLh)t#8i02w)(3I+yi?<{oMs;os@a1SX25NnnME?FEYj zGgdQJADs6dN;+cv081dL<2r}h?H9G{NrQ7M8cogYmV#GZe8xux3y4gJ8m}rSv)E}S zD5Yeashf3|pcl*~C8lyu=IWwFZr2dx7P0`fBe7H!k#6dFepq+0n&MlIfr&{ZO0AVK zw>#Zf4u2{m-kYPL0Vm}=_G?|d(d`+%W0(7CoWjTmJU(IF%GJpp;eULTyW{1nuT@@1 z=mrVZaKA0s9Krt5)B?8JA7T9y!DiqrNTeU8E{uZ{JQn?Ny3hX=@Esuk`G4}IKk5I1 z@$iL=c(rvP@&)TVc?NT^oKOi7~sd`Ia$> z`YhF;80H7c4R>DZT=JlIie;B=CfR-!R|Aj+d7=G>+A^BSkkpJd2NO z;w}Vc*}a9mEcyhD4l64#geFQv(%?2SgF5D2>ul3KTAZj|%g4aiQ8N2q?%PoIc0#ir znqZw%c`a!iquX1`wQ$MqWth%`b~n`vBe={K*1Cun)h^ejy z_|~~GER=RwRy~0wDB!3avAP7d4SW1~!VEA9ksbFHMql?P!*V8=x!7D(dbWq%X;JCn z3`kI8KI_ie#a^9D)$*CT(H3}VDr7ibD9OHjp`^F{7XKrGw+V1d9oM%}x^==-<|Z9g;ea?En1C;On+xcm=%b19BB z&iU8E*rJSRlz%~s8vbOXTmz)hdxMhIvF9zDs?&qMEUhBWac4~>a{iq3)L*m)KZ(b3 zR6o4rB9y0^p=>yT)}?F1z1M2$sP2RUU4yCsktiA+jAwhe9xf+v+RoDK zgD_<*cwNvKaQWoNlHe|(f&~(xK7Q^=wo@A8exqyz{TGud3z) zwOQ?OirU$Ym$HNiv14H+lH^y0lVJ;|J;Cj?I2s=;OL&}oU{^^YfvaOmDx&Q=V$kr{ zcS)OG5+aqSrOz#v$X1=N3ub;_%#G1kFH@H>jy5^ry=vQa<-?BbI`jMOQD`;<1;k=GjYi&8=sI?RHmsKs0jJX{~z>(bf> zhRjx?j+%+G?ltG45#bLnfx=63`}mR#mqtoHO(t!ypwkR=BbxnQ1CqE{JL2Rw5Iz4c z1)cm_R=R$qMpkwwwe?3+N2GI3SX_8<;Je_4EWh>n!L_Wmi^5nz()gP5m>`(Uslz2^p7zD+ou z{wb?hcyQ$wfk;*8nbD);nGUbVjW8w4n9i;<&Uo+g@C@Dm0lJflmvO<%!aOEo_jcaC zzTTvB0T=y31j{YwWzfb9Gwzky2ZbpG+q@AaCdrzfZ3`lW?eY#0No@P{==exed{+y2 zX%(Xr#C4SV!FAqbXE*p*`x{;=S4Ng_qe_CBNw`4`B7W&$6!vln(~)yl{RtTf7#aWB zPO4X_Ej45Ot#y|BXr6IORE+6(u#2(oS0i1*k;L4xi=!{EB~Ne;)fY(W#-G0ln5BuG z=BelOu@B|d)%{>+QW%xBDtGeFrO;N3=&`u}mxK}Sl`vosu z_m-#d@s~W-L%fz{-tcGHG+G5WnbW~?D!y8dwH%RE!4Yzr`Yr}E4K`sT=_+>#&RH2V zMWEeLo1rKg3A(+HcA#eGAf+NkNpRNnUUB&b+5p(c;-7O2I`1PK@(3122g?kx*s0W-L15b*Iqhb~4}z|22qX<;Q}5-UyKH^v z;NwwrM}Z~XwrejGCwJhE!S!{oD?8cIxNml_8hYD`E zJ_U3qpq{jNNz;nwKwca|E$b*40pSTG{;`tnl8|y2Mc_8-wH}Pb`m*?8zKCwh14RqG zOs~Y5NSJhh5|9S?++RBtEf2f^vTU>eF_URz1bi4m=;~*5_RNMWzd4Bm$_2Pnb*AD( zp>AN9G(YP*B2&TpyPIhRtQex!?T~AR!JyFxrd%MC$Kfl^qh8MZp@jSty9UNtJR&(9 zA3&w3CsqUFCm=~M;W^E&@Z5G%t9vnr3^`UiZQs(W? zkAQ(B=f6+WLzIU&-gnRn1wdjwr^Co`FCuoLj%)fYGh5yR(<^L(x#0%T!PooyZ%^yO zjG=oz!6$%@w*j(lQF-E6HfWA*r&8~C7`g_spTGL5?cFMH&^oh?dKyNmWKJ=gTkts? z7c6I>JA<#*|!J=2{g#c1>zD!Z0MMbGz*UD}WubQobmL-LD-dk}tKE?v%>*X8o)#fUj1)gF zgSk)2b2;Pnhm%3vGE|sN>%6n0V zENB6u*|xI|94Md33Q`h+ufc%QCEMoE_@%xZ-0K&xU9BYN_wvC|J0SW4YDSozfgy`g z&WHgYV)vK=QUaJoZ?+lk(A%NI;e+HmSaa7`NPy3)w5BmRT9-5)wr}_^CnrWV4}w$> zXM+Xg*!30SXQeZF&i28Ma7 zjbkM9{!mSQ2*_lqM#XwkL>I_#Y-bEc>biX>E-^%^{n%y(47jjS1*_yU^K`Apflx0f!`MMU>O?O zV3iQ}KF>-M{0aFdm8(_~0PGn%WQ84hbsZ@+08w4x5VviOU+0ztW55{JD++KA+!-CKDeJo_Wa|j3ikqIZ#&%PaH-i5R#Fjk@~yFmZJZLAEtBLPC{R`#PL zdIDk$G%UINlpYMeH!LkJ)z#G@w1RkY*!;0dt&9C#E+UYvzo7ur*l|l=tH^UAvl_fw z#0nzXhT+R#b+C@XDZ_$nLvZLPkUZVNGX_OB(gRrK_`n6Ewh(Cs7XE#{T<5@ms^z=D z46JU=8@^z_;KQO;NoCe#{~UN@0jJpYzqo)bY~{V-s#4HCkzU6oJrHxU=;RRm!|9{(u(gB z5ssqiEdFsa!q<-vqs7E2VL#C+g1Nas0jVRfV{BCV##&FeUP*%T8$^)|z)oSg0neaN zuj&0gX6NCb)|Ll@s~m9);7%f9hTa0LAesW&v&Et8AC4yeBhDK$cY6HkNr(K-+mjp)<>wDih>ptVZba&;7S9F9JI_F ze1Y`l?kmp$dsEq_Ee_=cU;r%q#-Mej>gNIx2WIAO!9QAJrnc?*vn599oFlOT>s4aDp%7l4n*UxpC7^%IS+p+W%v zbzOw`Gyu=70f9vcZK_eY#UIe>07f4i^k0JcL*?k7E0%wW2C=4cLjVGivL4P};6VAT zf=7I##|khTYVC|Vtv#&xX)rE8ukL%Pqo7~{>bWexy&q&cZh&l|Iz~s=X?-FT|L}?k zZh1wZQ*&~;0R&?(793>w5Bv}cC|P$(5;sVne|R0}2HS&ai@^!C)yxB~g{>AK6mwN- zItmu7N=_euBj_cEp@pB@H~z8+==1ng2lH$N7Sy%d5ASmvN1-lXKC}jJKYj#I=n?h5 zJxtKwj(0esuJCtr3(Tv0%QtTBn%n6Mf9v9cVf)X#7*c;bM&rNJ9k$uG%nj3Z#*1_9$0ejN`RB7kX)YyZUc|XvRba+@MW##VCv|BMfj(QSwZ0m)}H4! zK;L~^p3Y3L7m$&0Kp`EaTYmvV>c2ao>q!4!9@&$~m49Ban*||1;Q#2JZ5fI&gg-mG z|3dG7z& z&@@2R3JRS&muTQRI%KYPvjBse!-xREnZf>sy?;Z5@SgwnthB0s0(#58UEm|Hkz<8{ zX?=NnYei@z(Er2d4sYIamG9#}cI=qV<5ie`pC^B~$$>TL+KCJU;z<85X1yf690Fk7 zYiGAsM3Uyyf4IZp8@HxpHw(-o=`#)%iu~oRjamtH6ZSDuT&zhfXKnrE&+&lw{B?*N zoC3$jc9+Aei0%EqDMQeydTnso@k3iH0-gM&zX2*wD4@0=ciJD11ieG#e%Ug3;po=d z{U2{f`LcXHiq)r7#NBUC6jDqp;#f?>uFmQqBMep1lP6IBjEVdR+f*xsB2t6ReuFLN zn)hE^z-INSyZKgO8-`DOeLd7V5-2fX|=l?u(TVN94}5}#*P{q1H^2lgc(qb(AzII>i|doZE@(R0Odc*0r)uJYE?ADzFe4*Nhy zBpga%at@X}%$J6`4<-QfHW2%8YZ%0)fh+^@U&>=KWov?uWF#*VDG|^|+{re>`p2YS zJNLiyQT`qj;f|3N_wublp8ZcCPvF=^QIwDVX)v&XRt+Rp4vmk(u@Rs};W(dd35X_n z2XqcN5N;=|GO)hS0SN?*+KFx3uAPES%ZwbZLF|&J?(iv#-Qx+Cd{6{|GTZYS{y0wI zdI2Yd(I|R{t?1JGC_L%=TMUOg*@`Ie`>!4J-=f7|k@0`!1#m<5zHqbYx>>)0&qu1z zH0Y;!NkJ4DaB9{73ktvB;16SHo(Ae1;tMJdGMYTII0#^kjg8>BONQ=m6PdJInU|H9 zK_5Xr#P%|LNX5-4Y29Gp7ZIfhv?T2`gLfikSBHLrDEIb>!(^8zL^+=_$Q>^$zT>5I zf`RdbxQRcE918# z`iwg#BWo5&e@o;v!&YtO70_V7W@7_q8-ZsWu_cpTASj@0pB@opMKQksj-71%b`a^o zfT%h_()l~(YDwx3}pd$i~?CZRrIcp$`*yiEz zkR)(XjzT9e@s*sn7uEzQ`~?}%=HmT;+3SPDKMupTfG$uY%4E_$;K`Ho^5yDL_Se;E z;E|o-cnUYKCB()IXLb_7>QY0jztso^8SN2}6Dq-F37v6381It7ji=^DwuS1!*$hs= z{DQGWJLQ%LaP#lMDtv5EfJO@8(@xiZ@s$!BF;FFSIL$sF81~_kPdRVJo0mlY=M$)$~A+*wio(v z7WJ}K=9j}j1zp?2QV;<_tV{eO~`Qq7Uo5?Zd!b)d~d1so7?dJ6mCPDEn;bVqq;7?KReC&wKq!a%;h zGhobu6%DK2h4cpYHR#9p8%$u8qCTH!M7{n5rx|sCU;xJxz*RX~h`k5897K_WAfziz z29x{3DC$#~Ls8JM6q5O=NCuQ6ch8?s4+acVEjU7j&`B}XFu?qD)>1Xa@Q}a}cGM!A z-}arfXMH~0!1O_{N0plwodN6`uxKC$@@!1$Xi+-B0y&r;(F7+kHiMG}2>S)_oWo|5 zCH?rId_X2mFQ>+!bVE0L2m-CJ?x$aAg;Q402t!{91EC914V>9`;d}NS%G-Weo_Pt} zFJO}^go%Kx?&AM?Y&hE6DF42B4s_v4Sk!Yq#Yn}47A>^vrx!22&vO5CM?;N;4SN3I zSiCO1AkK@h^zZQ9ZHU6b>6VDicfk#8Kku92U_9tSlnnhfLPpkHX#fC}YZo@YrJjWI z|31MchB;vo(mn<8py{B*&?`uY-fz*uz~_5!Zom@zh@774!+-!{zum02QE4w>4vukW zo_^U>zGenV5+uD+_+La^7->; zXk}K0P($PM;RR~X5dkeKa_$TCfP#$22v>l^s+p&b`Gm^Lhcw4c08Jb>B&-Xr!xKXz zfoF5H%KGQl=$qS6$_F77YP!JchThhSR~xtI9`8pfbA-$P^WDMC{7-~*1eqV-Kim+F zcsl^nAtB2Yf+?y6mR-y(WJ;jN35~S-T-fv{aL?)u!I?IK5a?igGO!AUz|6qvK+aG_ z;_&8+>oGKC7p&WF&@__(ibQ~~6m%zYBEaZEm&x~sAvTF_`b$ma@fy{p>*b6=)BM+0 zyw%WEkY0Q%)3g%$reMvNf_Mh!{{Tco^-BVhddnB(_T?UOKF$f1kP(p4&YOdQ{Xlb~j*U@z_C15Gmt1aEcqmxL5p7ug3O z+8aTZfu5yfu%98qBPTLo__;+)pM*8Ne$UL!J!SK``jlORd*|Yxf#3RpOvz%GHJtPG zykrgh8w+m%G+ER@Z%#L~RCr9~S1*lU9rc8Ov~U5M`hL`dnE*s;r{8t#Inc;cjrcAX z6)bkaxnxOxh>cb-UMaY}3i`=-$lNr$_knukFFTu>+j|bxe;6Rc^Ec4mJqwu<yh z^;IdC2?;o$;HESLI8f&Fa&2*BY%cc4rxPIUf`elWaum2YcI`Rz681ev;`}^ar|dTj6`-H88F?B;G)i&JUZYC-6lzCaNKVB z)G`*M+=aFHMW*29ts3b?f*eag;E>btUA} zJ?c#flYY9(SrL+JDNl7p#?2=7EghvTd-2-ZoqDG-nwu%GJlet+p@Wq2F*+mC_W?k+ z-CZAk3{RQI%sC{~ZSUk$@)>7Fu6R)e1ywH2GJ3s$zYktmLo2M#!PE?yiXe2CX<`_r zgul)f4g>E-C-IysAC(iGVUeUzG~LVO#~hq}HJC>uE5u+gLf?%AH2<00TO83Svp9qa zPio~8AMYCKt$Fa&m3n6lrhlcvzrPFnD6ASI_naqLA+bKi-+xo*%f#OoH;&oE_+sRu zrUi8#-aoDo%c1jS$?QBY$v7XG_zbE*QhHXOaCI@yGiMnydE#(BhAu>nYI?kYB6<+d zMvDppnzZau+-*GE%gm0Jc7FHn5#>;E?w4?yvmWh0so@(e#3UnjF4A0O#f2}I&oEAB zc=Alnf;C4-6c$_<$#OjSYyd(FU&|0bt(y=?8=|!f*wgUna1Y#BoREp}0rt7nWIc^j z3?22_|Mcn~&s4W-9?rx0Lz5NC+E73K%bDE~p){-MNP8X4-Gh2L!|2(mYqyqTLSD%a z4McXKOa3!kiS-;SWVZrTj2pvlS-zoie|rdi=D&T5J%|IzH3JdjYJZHDz^SvXDy1!F z8g0r@_^2YzUY9IyNHXJ635q}9$-?ycp7`J3~_=&w=f9T4!IFh zf*FJ`%$cN&%nUTier=Zxg(1l{mu(i$9bq9roHHxF;>V}QK9}MY5aYS##D_d@fL1V6 z>jnf9oJF{iZ(yw46QcA-EUFy51wKe*`*q)uOl0ZATR6;Kn@kMMp$4@&dIi(F9||dO z1&BR+Knzo{;-sW;bab5LP0xO01W|&4j1|Zxo;O7o<34BYEv(0MxrYrpWa9qi-~@ zlHeC&bl6O)OqvP=r{PB$xX^?#`zgXx2AYU{`QeP@zqkOG$qzV}>S>1JFqk2^bm=r6 zs}!Z|4gUAe3;zXC_MdJ4%RrfMngDVDQwUJGAAQepX0cs}%HM7UxRQhC#UC43p2dS$NTEdEHHD*tyb4@vFOU~_Y2^h1EdZAhXJAhMRhQD<&!{}gg+(AvChtW4zt553GnDT_@B)Y6 zjJkl9>Yf2O2vB}VHfA{gc#d+lL9QYeZJS^YkadZI@A!nOX>v`bv=^f>ep*`qg+-`& zsX{4!4fwyv$#~U&-us}4qc*oeA=$)8K3X`xN~)3IY4V@{l|%nOxXU1dsJ``(DzNxQ zHlCZD1>(b1TH0HY{PG9fRuHU1klG)+ zeFR3fK{9*nKHwLPbPI(oQCWJ?ms0=XzHGB1xLxY|ejd#g4I5uetJlXMz{eAaQt`8P zGtxiZ+Y95g#dGds`IyD=vTQ{>xPANfnT6Tz%^bEUB%^dB425#|FBh~JyQ)v3SGhGpPP&)`cltsG4)`KW7-MDb# zwZ&{Q5(pfay9K4MghQRK-FWsxXJ{l>M9l{cCT2sjBe_QU;H0c%x^cc0{~Yl zDEdc3ZT7Ei0L}kwp@wl9qqPl5J!CpDT>-RRR@jTq_zx> z+IZvAh1CZNuL6}QM@qafL1g$IEr>tgqDz!_g*EoXg-(AidhX{%Kk>MQ#HHpWjkk`e z+O4BI#BZpNAU#|O249fmotz!Cp=B@g$s{jHfRX5)vnkr?2-;)tajb82 zusfgq4wDd504jjIDDX6sa&3vI7a*34Wg~|~5vY2wOVJMI5Pdy~9Wicrx(GuMwXD_> zE-l`^Wr(mHDp0@mZyF%3xvir^8s^Oj&s*sIoBvT$^$KmGnps>-HfT+q-chm}d9ws{}Vm&Dqn4RL>tL6WZ+#C6#A$C>%#+P(ZO zCz%ft>M)-8$k%*uG!Fwwt0k^RIUA-yr||%l4D;C;w@pp(1% zBh`GrpUJ_w7DFM6w)gs_8yu0Cn0Yma8)NUEj5bI?{_xIoFjzgTW#|M7mz&PlA=UgN zXds^TW;KBoh1THdn}Wb*ixVr!B*E1r4~e?K0MBQCUefb-N#R$lq?s`?|E=6B=b#cGeJjmSmgj;d*_Kh6|E+-A+PiiREqEFteXa(rG`7lCP{~JpOU5T&oifJ26;A`t19&&fZq{1!u3ptG>l}=qrV||;Kxz*bb--mq z@BlS-;UN^M2QJCM7)W>7D8c2&Q;@Fpk~%Iws}(Qg&^?H7Uz zK!Tii#wZuWr)MukJz#x=;$I-c#I{FjzS~fT0~?gWGrsA@^oVFftF|+0D6uc%E6&bqXrJ`!%_@ zCrKwt9c}2rkz)tEXf)j+%dR<%A%mdnCcii|xpCc|SK*cyB4*W>Bbv1Hy=cr8aKl;7 zb{EvjZ|1hDG6nSRz+MBhq!LRpe86i{#C;X1QsIFJ_at5He2A1lbqxcC{?DbQ8Oh39 zNgw!a&PW&d_m!E@mz}?p94!ty<^*|HeMbfJmEKnKSWmn^WamqT&+GI z(Q@4lCgY~Z)0XOfot4UF1w9us_mhlrJJW^WL_5N+JNT=m(HQpPuxIZfRJyR_T>qY8 zeJHJqWt|@cG)4!SBZX?L>CQCs^X@~l#HVK?O^aU3Gxgt}P}+BNC<9ws^tKwtc0D^m zJEd9i6%af@)H01-tEL#dALws%aFR=2UEx5+j3o^Qj$O%;WSr$NMYpwyja}3T=<7oh z;*`vo7Lp2BuP}VlToyyj;N{GzM05ZY%Un>;Nks*`zjMBw68ZwI8eXT4ug{?Nj!B6< zjq;soJ*v)9=NT(;jTae!)pNW_*V1Pe|HANTly1U-2_Q374~5m4FaSp-pRDlK`yMIE z6hpImfrTzJ&#O~@BIu`TzIZaW#I;7WM^F#;N%pS4lG;-4rJk{h9CS3VYUu=q>>mIV z(X7E{c3dk~>p&N>I@ipHPx-d-uV(?Ov9akyPyVSu0b|_skE3+}%MpeuqB~TD)~XUFdx65}DoF1+LHpA86mXbEvAQ4Di1f(0 zDXvg-Oa)OJ^k8VYRf7+?<|ay}>gsCNr5IsvRPVg+lfbP7B;APGky>Eq6DmRm)iC0K z#6Xq__#~@@AEkI0v2_xXRrx~+ZwpL!hru1!R@xL*b~>K^s=96tH$UlpQpb)SUh^>E zDvf$S2}RyP|2S+0>%QGq-VS=izE#SvL&lNpGxf#Vq*F=4(KbO zf2Z2+fYhA6*O*x2AeEdxo=9K@iV+AG0&h`&x%-b|;QxD;0FSDSnKZ>4 zpNLqqGcgIBFQa;Ro*y4;MGvACsl0A^(;af>97XT_M$xriZ}~$wN0zNAkN+f;5O%SV z8c!;UW+R{eRM8O4kgh5C;#qP4j^tCO?lE4H2MPgKYQBC`E~wx0uNQ)JB{tciPy z*6>wiCyAW6vJ`2{PVe{dn-j*XTPueGG*u0iH7Rot$p9}fwACC$+NX~@(R(G$9u(Td zhU>tr=tnseaXTrvd{-k7r8-ZeLyBE}sg<~!5&0-@a(yu_#LbhO4##$Ut9v=2nwBr3 zI8Y`8_YJ#I3%j3RP8{DC5-#D=fmg7T0?MXrnMJ_38ue?-Z}2?;<(6WhXZmV`-_|c; z9&jpQ0PLQFCX_Jwmcwv-CO>wT+)K9weA&AY-t*=dZXNfr(TI@?0Q)`f{ACTh<1@4z zrW-HG;*#R+rk)1tf`em)pWft;d!oOKi>vzbe#Pm}k}lJ!GTpX#aC%^6kN?DQ{yRYI zZQl_YHaH)!L!L%LVfHE3x(Sz6?`;QNF-QcUyV8q|A(|B;*T4c2Lr5DQp1<~^&IzNA z@N?i?Ehl^X4=Rv|IBcL0tJ1gfThZ)t+XmAOeln_}ihCe-i z-HPK%`wg;|k!H*17OF4SI6C52zqr#Cky9nV8I8*&VKkG-Lnd8G{LBBn7)0(HCNxn< zbi-Q=yv6YCJ9j3&*SwK(|2yEvoEf-|gMv)Sr<(0V51AiFX zk=AHU=v1n*{fmjs{}U&P2_f+QQac7V?vrGp_Rq3eM@X&z8&1&!t3CC{Fba%;GuD1(xq$!8zpgs+)Uqwyu)|xlqV-U8=cBDd`#wIq=)vS(gH6QG{iE59$KxJmgCYaa6rphlU=e*YbON*T zV;K`9;i&w+m7Ed40Sq`1g=V=AUaZJPBvEKT+Q`%*C^^`gARcu8Zh^>T!WkW8ZvMCX z&$_{R;@OMDjexZYfQ9)j2#!SBh1YOZ4R7(PAl_qC6@ppkU6;vdTq{C#;lD{Gl_s;R z232t0M_Lg>p6-!luuXOuuh>ScS`eu7xDlK3w*T#tc>!aLbyLq+DiCPiy#qN}25|#p z51+<~cS=)z``SkODSGvwoP6FcJp5DyleQvL1nnif)E}m^qe)}=huNp|Qihx>Rxw^$ zq()Y3DvWhulsuq%3~0(!{Pn7oge4f-{7n0=XmT)SRMYi?)u# z9PUN-N3uu|kIHimg%`)2Wx-{^5bun|j(&dBxR{CZyVd2pz$Ji3(R(#b}} z!CCtGeAGPjsI2ruG0BWBr1dpARuqkJp{_;!-HoV-pRR z_H+b5%<&nt6q-Hn@lg~aHH~B_>3bKFhl^`L8fy^xU>UBe^I?p-%be}~Ny>F0LosAYxSxQ{5A_|su}SZCxDt^?^rfE0k-X##GU_Zxk|X3Sv-z3_td)Zsit zk>lv#q5F*bjT@{+q*{STC@9lYcW$L4$^9at(;ip3_*@@6^+$h~68gLj42y!>7 z;^Vt_pR~nbW)7CpR9J8rxPT9ua)Ln~$0OYTS#8*UHbLj(vS7zK)t;=K9u?BOYYv@5 zavV{L(6byWD>JeB*6>4V!R8NlJr4+1KFBdqJE8{!Y;%@gEm5Ui#qDA+FT5R>+oWRk z<7?wj2H84g?taYk*xQNjJOWEHoQqU#AW;L>jU)bov_DY;CwC=;$MueQ^ISAf9eijr zo1f+#g20hOt*!UKkmErb>Pl{X?wdSEx2v#gKIs5rp)X5%2Kqk|F03sN`Uk+Q2TixM zVZHKTfr2KHPchu~{)+8Pb1-~p)?{y`GO-`xUX9drd%#R*7p?n?-9eb1N()^Uip#%f2Phww}1eUU8S%A@^dx*HY z9eWuKD4vSPqgc{1SxcOU3^ra*u*RxA9Oe8a^zICpUFS8E7*qW`C8FjwC4EV2S~cBx zBad#ShCpMkEHX&QCq5^!JnHVr``(vfl)wcQ!_%ZManG(KM`!{eGPx5Y{?+Q@m?kx| zgwtN&ql^#(3vkyr!Xa4jc69K59mCE%81&e>jd!&Svv9EaWaF~fV#iW~53C){Bgv)* z){llPD%`5_XZETU?m@@Fe8f~+yA82rIM?_^azT1T^1^`r@Qdh}f^>%4bkC6rRah$&DYxHfM`Pk#Po30s2|)ZJn8C4t;|xW!w2a2*{>w8n-7&mTi-X z^YMzOF7Ju;9%<4*tRgU%KLE8}(gv$bDs_Ns#tN4VbFus?w-ZA`h7WZF{>oY$rE#vK6M5VBFL zee=5ppl@mrq4QeYUyhH4-W+t|ptf}p!8)ajk>nn%OBa=>Hxd;90juWd_#1IAfoBMp z_&5}m_)#>SI~Qn^dck2sR^_GOuJDpa!VPo;*%+4w_bzx0;^Ak~k40ao8f`VI4@+PU zo{c_N!BzpgXrqM)hpx|sVf}vW8vTZ7&+Bf~sDPVx?6q{Hh#CM>cgbLW16_j1HKSgt z1z9i`cVLsRaTMKmL`!uCUbQ%%h%1Wsfz332p6y9aE4ii+owQ0cOk;^+p=L7%D?5jM zqn}GZCuVe?g_lRS%Z!1>EViianeambQEs{HeBnhz#^t7c8V5B7-eE~gQWru#9-vK8GB9&BJaBqd|Y_Lb1*u;O-* z@I+T}uMbujEyWl;QMCdHAyd(C3?tLUQ*ZE)I~c>`hA7KWX2)&`9{!bnR{(2WfND@3-~F&n`PB>?c&WFT*H5G6liH^q+3 zqOZ7}u_YuTy4AKo^3YnD9p#!7sah1ebT z`$-vu`%!zRk(#QL88=uq*oTr1eU|>k&Xdk284RR6{F}^T^JJZNKvkshfvjZ32>8^s z>6lYw)st^GF}t<)KG0%5!`zJQiCv+ktPOr;zWY^qE|x+!)^R0b-=jkpUL$BHs?knM zOP&0^fAIrF!$wp66mxJ-B_L zp4s%|By&3Ur16e?*BSqTKYwq<)k`X2sclv2Oe|6;aB_C2=+H7BPwtJ+ zzs;bcb&$Tam;ZQK@NOiSLZ&0Isk(V94y6N8u0+*}eyiYPKYupWPvT_{H%q+@@v!$D#87svEl}{By zqkWU7L6v^2MX_DiK~0sj=~J?WX@6Pl$@-PqP16@c+l48qotFveB*dsi^~X=Q;RxrO z52lu@4zeZi(OroR-hIv6Gp?FDmRr@Bv4yY3d&f07PH$!tV`Z79+LVEpdk=eR?^0kn zrp|DYT{om9r%|16%2^_H=1rfC_~@JL;|nF0)2>T4lBx4^`koD6|D2haC>R~mRM^8) z<{cIOssGeVCK<}#{{#u#qI(&|<^A_Rs{77TYyQXAYFCl(|M#ycJeG1>qm7^9=7z_W zBtb!-7h_kx`fJ&p{1S!4uV`}V=;&c-wGFE58$UuWe#2QjL&Wap-84|FR(Z}+hod_3F8PR8Jy?16Ouw`Z z#|`;%q2mQlbKyDYeds#Y4AtnuIKpY~rvs?RP}J}j4hwV}UaQ%=&A@JR`k~IQt8eH@ zU4meC%(`2wJ8;9>T4=~K-5G7Aj-?#q_PT`mic=>~YPVw}-0L@mqXoSF9f2pKOGvu` z&0$@TF6jGW1!&*x_56oeu}C4}RD^B)G3iJ5nk z)V=HB;fo3hoOb+WAvYk>`UzJwB>^r3@2YIGsY^-W^Ybo7>$u1PV-JP(;Q_NWaf~+y z=>vY>KUnf8AmD5IQ(FoLPeafx+eOqX>-rKmm|47kMxDKvmxvD9-4hY5+=H|pK`DqxtOpN|f9)Xb%|S{*<=CX0I3#WEW0_gfDKoyk6{aQ*P+ zi+mqGJAT->fxmEOxP1bu9er9Y1AFGqvu5o(xw!{&I<2B@``h4j=D0CS1-m64eXoEw z`@_h{v674FpX%*YK|&?&IuP5L|651i)3g2kR@>|1#rt_dKAUpmlq=XIQ1N=Jo(@py z?Dw=afox@JEMUV+kRSEIoR_7u`}rfAXM_A$jw{-{MjO{Z;M=iBK58Z|w?TGK6?>hW=XOxE(-zUsyl;D89L7#hbruuWo9Vd0M_SJu`9 zoG}jE&2yEkGkTvDRD4Nb@He){P|B%njI%f*Mq6L7n!g)G-5z}LoA+cJRMm+aD8hf% zP*)T{cp)3`+b#FunWtjybW}6rx|yc*_7|-@Si;u_b{}>8xpOG7(_@=qwW0b_T$t4j zKy=VzYzTdZBcGVq~ zzVS+95qsbA$NIZR&dmIrc|8Q~Hcx-RA$7}>J6U}l?qX(k;1pZ1xaGZA5IIH{Yfs6JNV^ z{_@+rnC>1NV6)j#x8hdWGohHo=KTD35JBG(QyfrJ5w93R7B^P1v$I3=idoAnudK{| z=)AkocRVn9dU`fVS26p_9SOEZM!R1SVs*3Q1K`MaK%R{3sw1E>TGP%&b_8-|9At!C z{=s+&?0T;K3=iK?uzJswmZd}bu#k#?Ep4D_NU`;|3Rk2{zdNw7t#)Ln;cR*1xVaQY zG&LFf4}t>PsZ#fP7oZ?*R0m+Wn%<9X)me}C_`CFuj)Pj!N6RkcK%4Xf3->L%b&FD8 z?uv|Odb?Y8?_SgG&M~(xvAb00#RNz^zt1r&+T!{PC8{3l~O3zkO3-~XhT=&75Ig&s(=D6KDxiLuI`NJ zW3@-?cK+aIVPfDH`QGp{peN5g!FeM`6`zCbjFnkQWkx&;q!}05dnzhw6zn2|8_k!D zGcq#=xb!1zjF~Vr3nk%|e_p#p!(vfNa8)fh=J@)}?{~3e_~<)XoH;o~7WcJhu8B3? ze#$oF*M*3MvsS88ZJWm;(5YaUKpmh+aChfff0ocOodKV*GhuQXJxo{JeMYT7Lxn`K zH+6Jv&Z}p}Z$tB><|JArJzno=@%9}Jlu(%#zA?ocw!dMjcBE$ALL(Sybj-?4$|&IG zLR0?X{43D0^J(SL)O;`gh$hExaJHG=Z_IMlEIX7p>txv~4r;2aH$%+@4)$qjA;iW$ z+v%e$Ot-L{>no-rS^e7z!tg$Kh-2Z)ieZl7sL7~uk-cEasm9_9zHpaEMi6#xs@8pSyEOz>K<8exu#i$D)F#c3FE3`Id`X%kF1F zfqLNB1iHMxe*Ic2=B4)`ld3d-WXsGr5t`bF{b0K4yiXzFE4aYEv9pb|zy7Rt)R|UV zs{gt*&wp)W{%d)cV@EUxT(aj^4NW`c3c4`gqnaOf*nW^-%j}f)KKdUUy(LcEO($-6 z8FlwC@SlEHQey?9(MYWZ2JFp}a>sm_5F9;yn)j^QcVP?f)1x-K>o2d;E_;P8mZ#(y zz!~t@r>WL{`)=3@islJzh8S)g!ofAUx2^fue=fJ=)$nZ1qiG03>Rdkl(o@<|3Z|*Z z(^46}WcX`;84I?jnV`0&OjzfO(6%3Ko@lf$p1HLa648^p{MZf}0&tkr?pZNVLNKbV}GvI(eKJGY5XhEgkp z4v31P;vR?2eB1r&ug#>HEcf$&yX9-qnKOcsU)1Q_{I3*z7$gJw-kBO3!UfHmu5j~$9fRYkh;3>*>`-rvV%lz;Aikb||PswSDLlPugFB`(MJxer_@ z1sV-dT@|K_t;<3Z670ddx1Qe6*wl1j{z`Jf=FOAnqX$^#U-@%C6K!2x9j7+w0d~0* zfW|B!>pkyrJopr^!m-(!MkEPc_xFfR-@BR>*pCNAM2w_4A9l!myawC>`fUYoON?(| z5=E%tv?-IYMceuIhI=Qax8DPL*PYIFJ1}o*|EmlDSXx%L=_f`Fu;(cBi6Yg5xkkzI zDM0#;3=>7L7!cW>^8BJ@x9?QL7gsP67EWb%&70PRL}b}|e2M*D_Sa8T@1~c&r(V8Q z-jdV^T9C93FcE%`J8`ObMg0rf-?zmLij5(MdK5CDKiMctC z;LbB^n!yK2*Wx6fba~u-Ue)VLSWdX|&NO_mK}Tv=|5Ce!kg}Ir?A-wSE%*CCp|iYu zHFq1?yVmZn-+SHYIqDLl{Bpmxd-*z{hc-2p@$?>`jxK*${s>F6s-(`!MoLzlR9btn zcuXRDcueHm{(Y}cDg}gK?Ej`;5k+&iFfO@?V&aR1t7PWrjI>YtbkXZ#L$vVXcWY+nRnpVbC*98&-rL>y zwQlu!zeYo!C;CC_Xqw-)kAN_}a0d;6$9H}HJVN&>Bo{nw+-DlE6obn3yso49eczKi zCfTb!SVMWk4KHzdDsmDrRO=eE&7UdV{05|L_^nKRU^!`17DUJ z;#8Esa+smyT!!plhN3`krthxE_dSFqVq*W3pWz@aGeEQdbxTG@1_{+pLQdz)XImPw z*M07$!{!ehPo=C(pRQ)!38}6mkf3Mq=hstQGQl-NN7HQV_v4xKaB-Cxh`U;OhZljs zuG<5-NLOz&XYtzo!d#LscC58y+gE4*%|K($GYTz`od&lgbG!}wd3A5`e+!qe&W?S0 z)EL&}^Y0qEr^ZDL)dQuo5OLzKnx)M<=1cJ?2G?D$Shjr>8Rh2K+xV+r`-e1gL1l2A zhSJ6S5We2J=QJr?we7y7q>H_qv8OC{?S@s7oTfpMd#>$%JObvYh{HJ8-(O01G-3m< z_piw4zrWvgrNBRC56Ls*mMtzzYn85DyQZ!ll)I=q9jXkp-{Q(l5a=vN^g8mZ|4_nH z0}Vgrnj-Y|Wy9y+8ceaAD_=4bSzdz?&}G|ijC^Px{y|PGEmdIF3C Date: Wed, 17 Dec 2025 15:26:54 +0000 Subject: [PATCH 04/30] Fix Unknown message type error for tool notifications MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Change _meta: null to _meta: undefined in OpenAI transport - Register default no-op handlers for all tool notifications in App constructor The SDK's Protocol class throws 'Unknown message type' for unhandled notifications. Now all tool-related notifications have default handlers. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- src/app.ts | 8 ++++++-- src/openai/transport.ts | 4 ++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/app.ts b/src/app.ts index 7d31858d..1935e374 100644 --- a/src/app.ts +++ b/src/app.ts @@ -248,9 +248,13 @@ export class App extends Protocol { return {}; }); - // Set up default handler to update _hostContext when notifications arrive. - // Users can override this by setting onhostcontextchanged. + // Set up default handlers for notifications. + // Users can override these by setting the corresponding on* properties. this.onhostcontextchanged = () => {}; + this.ontoolinput = () => {}; + this.ontoolinputpartial = () => {}; + this.ontoolresult = () => {}; + this.ontoolcancelled = () => {}; } /** diff --git a/src/openai/transport.ts b/src/openai/transport.ts index 8c5cfb84..c05c6326 100644 --- a/src/openai/transport.ts +++ b/src/openai/transport.ts @@ -510,8 +510,8 @@ export class OpenAITransport implements Transport { text: JSON.stringify(this.openai.toolOutput), }, ], - // Include _meta from toolResponseMetadata if available - _meta: this.openai.toolResponseMetadata, + // Include _meta from toolResponseMetadata if available (use undefined not null) + _meta: this.openai.toolResponseMetadata ?? undefined, }, } as JSONRPCNotification); }); From 4e1380aab5dc2189b5ccb79a4e238dd8333a5a32 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Wed, 17 Dec 2025 15:30:22 +0000 Subject: [PATCH 05/30] Add tests for notification handler fixes MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Test that null _meta is converted to undefined in OpenAI transport - Test that default no-op handlers accept tool notifications without error 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- src/app-bridge.test.ts | 15 +++++++++++++++ src/openai/transport.test.ts | 23 +++++++++++++++++++++++ 2 files changed, 38 insertions(+) diff --git a/src/app-bridge.test.ts b/src/app-bridge.test.ts index 1e55f6bd..cbc9698a 100644 --- a/src/app-bridge.test.ts +++ b/src/app-bridge.test.ts @@ -215,6 +215,21 @@ describe("App <-> AppBridge integration", () => { expect(receivedCancellations[0]).toEqual({}); }); + it("tool notifications work with default no-op handlers", async () => { + // Don't set any custom handlers - use defaults + await app.connect(appTransport); + + // These should not throw (default handlers silently accept them) + // Just verify they complete without error + await bridge.sendToolInput({ arguments: {} }); + await bridge.sendToolInputPartial({ arguments: {} }); + await bridge.sendToolResult({ content: [{ type: "text", text: "ok" }] }); + await bridge.sendToolCancelled({}); + + // If we got here without throwing, the test passes + expect(true).toBe(true); + }); + it("setHostContext triggers app.onhostcontextchanged", async () => { const receivedContexts: unknown[] = []; app.onhostcontextchanged = (params) => { diff --git a/src/openai/transport.test.ts b/src/openai/transport.test.ts index 800073ca..37ecc6dd 100644 --- a/src/openai/transport.test.ts +++ b/src/openai/transport.test.ts @@ -429,6 +429,29 @@ describe("OpenAITransport", () => { }); }); + test("converts null _meta to undefined in tool result", async () => { + // Simulate null being set (e.g., from JSON parsing where null is valid) + (mockOpenAI as unknown as { toolResponseMetadata: null }).toolResponseMetadata = null; + + const transport = new OpenAITransport(); + const messages: unknown[] = []; + transport.onmessage = (msg) => { + messages.push(msg); + }; + + transport.deliverInitialState(); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + const toolResultNotification = messages.find( + (m: unknown) => + (m as { method?: string }).method === "ui/notifications/tool-result", + ) as { params?: { _meta?: unknown } } | undefined; + expect(toolResultNotification).toBeDefined(); + // _meta should be undefined, not null (SDK rejects null) + expect(toolResultNotification?.params?._meta).toBeUndefined(); + }); + test("does not deliver notifications when data is missing", async () => { delete mockOpenAI.toolInput; delete mockOpenAI.toolOutput; From f10e1780e367ce1644aa549cdafca1d6e2668c48 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Wed, 17 Dec 2025 15:50:03 +0000 Subject: [PATCH 06/30] Fix null toolOutput being sent as text 'null' MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Check for both null and undefined before delivering tool-result notification. Previously null passed through and was stringified. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- src/openai/transport.test.ts | 26 +++++++++++++++++++++++++- src/openai/transport.ts | 4 ++-- 2 files changed, 27 insertions(+), 3 deletions(-) diff --git a/src/openai/transport.test.ts b/src/openai/transport.test.ts index 37ecc6dd..962a0c6c 100644 --- a/src/openai/transport.test.ts +++ b/src/openai/transport.test.ts @@ -431,7 +431,9 @@ describe("OpenAITransport", () => { test("converts null _meta to undefined in tool result", async () => { // Simulate null being set (e.g., from JSON parsing where null is valid) - (mockOpenAI as unknown as { toolResponseMetadata: null }).toolResponseMetadata = null; + ( + mockOpenAI as unknown as { toolResponseMetadata: null } + ).toolResponseMetadata = null; const transport = new OpenAITransport(); const messages: unknown[] = []; @@ -452,6 +454,28 @@ describe("OpenAITransport", () => { expect(toolResultNotification?.params?._meta).toBeUndefined(); }); + test("does not deliver tool-result when toolOutput is null", async () => { + // Simulate null being set (e.g., from JSON parsing) + (mockOpenAI as unknown as { toolOutput: null }).toolOutput = null; + + const transport = new OpenAITransport(); + const messages: unknown[] = []; + transport.onmessage = (msg) => { + messages.push(msg); + }; + + transport.deliverInitialState(); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + const toolResultNotification = messages.find( + (m: unknown) => + (m as { method?: string }).method === "ui/notifications/tool-result", + ); + // Should NOT deliver tool-result when toolOutput is null + expect(toolResultNotification).toBeUndefined(); + }); + test("does not deliver notifications when data is missing", async () => { delete mockOpenAI.toolInput; delete mockOpenAI.toolOutput; diff --git a/src/openai/transport.ts b/src/openai/transport.ts index c05c6326..55ca6272 100644 --- a/src/openai/transport.ts +++ b/src/openai/transport.ts @@ -495,8 +495,8 @@ export class OpenAITransport implements Transport { }); } - // Deliver tool output if available - if (this.openai.toolOutput !== undefined) { + // Deliver tool output if available (check for both null and undefined) + if (this.openai.toolOutput != null) { queueMicrotask(() => { this.onmessage?.({ jsonrpc: "2.0", From e554054c95bd0ef93da37bd9c95259f00f71c430 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Wed, 17 Dec 2025 15:55:49 +0000 Subject: [PATCH 07/30] Fix double-stringification of toolOutput in OpenAI transport MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Handle different shapes of toolOutput from ChatGPT: - Array of content blocks: use directly - Single content block {type, text}: wrap in array - Object with just {text}: extract and wrap - Other: stringify as fallback This prevents double-stringification when ChatGPT passes content in different formats. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- src/openai/transport.ts | 37 +++++++++++++++++++++++++++++-------- 1 file changed, 29 insertions(+), 8 deletions(-) diff --git a/src/openai/transport.ts b/src/openai/transport.ts index 55ca6272..7ca34678 100644 --- a/src/openai/transport.ts +++ b/src/openai/transport.ts @@ -498,18 +498,39 @@ export class OpenAITransport implements Transport { // Deliver tool output if available (check for both null and undefined) if (this.openai.toolOutput != null) { queueMicrotask(() => { + // Normalize toolOutput to MCP content array format + let content: Array<{ type: string; text?: string; [key: string]: unknown }>; + const output = this.openai.toolOutput; + + if (Array.isArray(output)) { + // Already an array of content blocks + content = output; + } else if ( + typeof output === "object" && + output !== null && + "type" in output && + typeof (output as { type: unknown }).type === "string" + ) { + // Single content block object like {type: "text", text: "..."} + content = [output as { type: string; text?: string }]; + } else if ( + typeof output === "object" && + output !== null && + "text" in output && + typeof (output as { text: unknown }).text === "string" + ) { + // Object with just text field - treat as text content + content = [{ type: "text", text: (output as { text: string }).text }]; + } else { + // Unknown shape - stringify it + content = [{ type: "text", text: JSON.stringify(output) }]; + } + this.onmessage?.({ jsonrpc: "2.0", method: "ui/notifications/tool-result", params: { - content: Array.isArray(this.openai.toolOutput) - ? this.openai.toolOutput - : [ - { - type: "text", - text: JSON.stringify(this.openai.toolOutput), - }, - ], + content, // Include _meta from toolResponseMetadata if available (use undefined not null) _meta: this.openai.toolResponseMetadata ?? undefined, }, From a5ad9e47a645893559e87e9984cba4a9df29f00a Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Wed, 17 Dec 2025 15:56:55 +0000 Subject: [PATCH 08/30] Add structuredContent support to OpenAI transport MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When toolOutput contains structuredContent, include it in the tool-result notification. Also auto-extract structuredContent from plain objects that aren't content arrays. This allows apps to access structured data directly without parsing JSON from text content. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- src/openai/transport.ts | 38 +++++++++++++++++++++++++++++++++++--- 1 file changed, 35 insertions(+), 3 deletions(-) diff --git a/src/openai/transport.ts b/src/openai/transport.ts index 7ca34678..bc87b7fe 100644 --- a/src/openai/transport.ts +++ b/src/openai/transport.ts @@ -498,11 +498,38 @@ export class OpenAITransport implements Transport { // Deliver tool output if available (check for both null and undefined) if (this.openai.toolOutput != null) { queueMicrotask(() => { - // Normalize toolOutput to MCP content array format - let content: Array<{ type: string; text?: string; [key: string]: unknown }>; + // Normalize toolOutput to MCP CallToolResult format + let content: Array<{ + type: string; + text?: string; + [key: string]: unknown; + }>; + let structuredContent: Record | undefined; const output = this.openai.toolOutput; - if (Array.isArray(output)) { + // Check if output is already a CallToolResult-like object with content/structuredContent + if ( + typeof output === "object" && + output !== null && + ("content" in output || "structuredContent" in output) + ) { + const result = output as { + content?: unknown; + structuredContent?: Record; + }; + // Prefer structuredContent if available + if (result.structuredContent !== undefined) { + structuredContent = result.structuredContent; + // Generate content from structuredContent if not provided + content = Array.isArray(result.content) + ? result.content + : [{ type: "text", text: JSON.stringify(result.structuredContent) }]; + } else if (Array.isArray(result.content)) { + content = result.content; + } else { + content = [{ type: "text", text: JSON.stringify(output) }]; + } + } else if (Array.isArray(output)) { // Already an array of content blocks content = output; } else if ( @@ -521,6 +548,10 @@ export class OpenAITransport implements Transport { ) { // Object with just text field - treat as text content content = [{ type: "text", text: (output as { text: string }).text }]; + } else if (typeof output === "object" && output !== null) { + // Plain object - use as structuredContent and generate text content + structuredContent = output as Record; + content = [{ type: "text", text: JSON.stringify(output) }]; } else { // Unknown shape - stringify it content = [{ type: "text", text: JSON.stringify(output) }]; @@ -531,6 +562,7 @@ export class OpenAITransport implements Transport { method: "ui/notifications/tool-result", params: { content, + structuredContent, // Include _meta from toolResponseMetadata if available (use undefined not null) _meta: this.openai.toolResponseMetadata ?? undefined, }, From 35dc00a748bbdfbb76655cc15d8fe32156e42fc7 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Wed, 7 Jan 2026 16:59:41 +0000 Subject: [PATCH 09/30] style: format OpenAI transport --- src/openai/transport.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/openai/transport.ts b/src/openai/transport.ts index bc87b7fe..f4c9c6aa 100644 --- a/src/openai/transport.ts +++ b/src/openai/transport.ts @@ -523,7 +523,12 @@ export class OpenAITransport implements Transport { // Generate content from structuredContent if not provided content = Array.isArray(result.content) ? result.content - : [{ type: "text", text: JSON.stringify(result.structuredContent) }]; + : [ + { + type: "text", + text: JSON.stringify(result.structuredContent), + }, + ]; } else if (Array.isArray(result.content)) { content = result.content; } else { From d6048d8b017641652c56a7a960df888f50c27c25 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Thu, 8 Jan 2026 16:08:44 +0000 Subject: [PATCH 10/30] fix: include autoResize option in useApp hook The React useApp hook was overriding the entire options object when passing experimentalOAICompatibility, causing autoResize to be undefined instead of true. This prevented automatic size notifications from being set up. --- src/react/useApp.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/src/react/useApp.tsx b/src/react/useApp.tsx index 111f8591..12bcf86d 100644 --- a/src/react/useApp.tsx +++ b/src/react/useApp.tsx @@ -137,6 +137,7 @@ export function useApp({ try { const app = new App(appInfo, capabilities, { experimentalOAICompatibility, + autoResize: true, }); // Register handlers BEFORE connecting From c261e6770602f3f249626f3eab26d7ce98bc8364 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Tue, 16 Dec 2025 22:29:04 +0000 Subject: [PATCH 11/30] feat: experimental OpenAI Apps SDK compatibility MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add transparent support for OpenAI's Apps SDK environment alongside MCP. - `transport.ts` - OpenAITransport implementing MCP Transport interface - `types.ts` - TypeScript types for OpenAI Apps SDK (`window.openai`) - `transport.test.ts` - Comprehensive tests - Add `experimentalOAICompatibility` option (default: `true`) - Auto-detect platform: check for `window.openai` → use OpenAI, else MCP - `connect()` creates appropriate transport automatically - Add `experimentalOAICompatibility` prop to `UseAppOptions` - Pass through to App constructor Apps work transparently in both environments: ```typescript // Works in both MCP hosts and ChatGPT const app = new App(appInfo, capabilities); await app.connect(); // Auto-detects platform // Force MCP-only mode const app = new App(appInfo, capabilities, { experimentalOAICompatibility: false }); ``` 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- src/app.ts | 89 ++++-- src/openai/transport.test.ts | 354 +++++++++++++++++++++++ src/openai/transport.ts | 538 +++++++++++++++++++++++++++++++++++ src/openai/types.ts | 244 ++++++++++++++++ src/react/useApp.tsx | 62 ++-- 5 files changed, 1243 insertions(+), 44 deletions(-) create mode 100644 src/openai/transport.test.ts create mode 100644 src/openai/transport.ts create mode 100644 src/openai/types.ts diff --git a/src/app.ts b/src/app.ts index 2b43cd5e..f02266b1 100644 --- a/src/app.ts +++ b/src/app.ts @@ -17,7 +17,6 @@ import { PingRequestSchema, } from "@modelcontextprotocol/sdk/types.js"; import { AppNotification, AppRequest, AppResult } from "./types"; -import { PostMessageTransport } from "./message-transport"; import { LATEST_PROTOCOL_VERSION, McpUiAppCapabilities, @@ -49,8 +48,12 @@ import { McpUiRequestDisplayModeResultSchema, } from "./types"; import { Transport } from "@modelcontextprotocol/sdk/shared/transport.js"; +import { PostMessageTransport } from "./message-transport"; +import { OpenAITransport, isOpenAIEnvironment } from "./openai/transport.js"; export { PostMessageTransport } from "./message-transport"; +export { OpenAITransport, isOpenAIEnvironment } from "./openai/transport"; +export * from "./openai/types"; export * from "./types"; export { applyHostStyleVariables, @@ -103,7 +106,7 @@ export const RESOURCE_MIME_TYPE = "text/html;profile=mcp-app"; * * @see ProtocolOptions from @modelcontextprotocol/sdk for inherited options */ -type AppOptions = ProtocolOptions & { +export type AppOptions = ProtocolOptions & { /** * Automatically report size changes to the host using ResizeObserver. * @@ -114,6 +117,19 @@ type AppOptions = ProtocolOptions & { * @default true */ autoResize?: boolean; + + /** + * Enable experimental OpenAI compatibility. + * + * When enabled (default), the App will auto-detect the environment: + * - If `window.openai` exists → use OpenAI Apps SDK + * - Otherwise → use MCP Apps protocol via PostMessageTransport + * + * Set to `false` to force MCP-only mode. + * + * @default true + */ + experimentalOAICompatibility?: boolean; }; type RequestHandlerExtra = Parameters< @@ -222,7 +238,10 @@ export class App extends Protocol { constructor( private _appInfo: Implementation, private _capabilities: McpUiAppCapabilities = {}, - private options: AppOptions = { autoResize: true }, + private options: AppOptions = { + autoResize: true, + experimentalOAICompatibility: true, + }, ) { super(options); @@ -1037,50 +1056,73 @@ export class App extends Protocol { return () => resizeObserver.disconnect(); } + /** + * Create the default transport based on detected platform. + * @internal + */ + private createDefaultTransport(): Transport { + const experimentalOAI = this.options?.experimentalOAICompatibility ?? true; + if (experimentalOAI && isOpenAIEnvironment()) { + return new OpenAITransport(); + } + return new PostMessageTransport(window.parent, window.parent); + } + /** * Establish connection with the host and perform initialization handshake. * * This method performs the following steps: - * 1. Connects the transport layer - * 2. Sends `ui/initialize` request with app info and capabilities - * 3. Receives host capabilities and context in response - * 4. Sends `ui/notifications/initialized` notification - * 5. Sets up auto-resize using {@link setupSizeChangedNotifications} if enabled (default) + * 1. Auto-detects platform if no transport is provided + * 2. Connects the transport layer + * 3. Sends `ui/initialize` request with app info and capabilities + * 4. Receives host capabilities and context in response + * 5. Sends `ui/notifications/initialized` notification + * 6. Sets up auto-resize using {@link setupSizeChangedNotifications} if enabled (default) + * 7. For OpenAI mode: delivers initial tool input/result from window.openai * * If initialization fails, the connection is automatically closed and an error * is thrown. * - * @param transport - Transport layer (typically PostMessageTransport) + * @param transport - Optional transport layer. If not provided, auto-detects + * based on the `platform` option: + * - `'openai'` or `window.openai` exists → uses {@link OpenAITransport} + * - `'mcp'` or no `window.openai` → uses {@link PostMessageTransport} * @param options - Request options for the initialize request * * @throws {Error} If initialization fails or connection is lost * - * @example Connect with PostMessageTransport + * @example Auto-detect platform (recommended) * ```typescript * const app = new App( * { name: "MyApp", version: "1.0.0" }, * {} * ); * - * try { - * await app.connect(new PostMessageTransport(window.parent)); - * console.log("Connected successfully!"); - * } catch (error) { - * console.error("Failed to connect:", error); - * } + * // Auto-detects: OpenAI if window.openai exists, MCP otherwise + * await app.connect(); + * ``` + * + * @example Explicit MCP transport + * ```typescript + * await app.connect(new PostMessageTransport(window.parent)); + * ``` + * + * @example Explicit OpenAI transport + * ```typescript + * await app.connect(new OpenAITransport()); * ``` * * @see {@link McpUiInitializeRequest} for the initialization request structure * @see {@link McpUiInitializedNotification} for the initialized notification - * @see {@link PostMessageTransport} for the typical transport implementation + * @see {@link PostMessageTransport} for MCP-compatible hosts + * @see {@link OpenAITransport} for OpenAI/ChatGPT hosts */ override async connect( - transport: Transport = new PostMessageTransport( - window.parent, - window.parent, - ), + transport?: Transport, options?: RequestOptions, ): Promise { + transport ??= this.createDefaultTransport(); + await super.connect(transport); try { @@ -1112,6 +1154,11 @@ export class App extends Protocol { if (this.options?.autoResize) { this.setupSizeChangedNotifications(); } + + // For OpenAI mode: deliver initial state from window.openai + if (transport instanceof OpenAITransport) { + transport.deliverInitialState(); + } } catch (error) { // Disconnect if initialization fails. void this.close(); diff --git a/src/openai/transport.test.ts b/src/openai/transport.test.ts new file mode 100644 index 00000000..01911e09 --- /dev/null +++ b/src/openai/transport.test.ts @@ -0,0 +1,354 @@ +import { describe, test, expect, beforeEach, afterEach, mock } from "bun:test"; +import { OpenAITransport, isOpenAIEnvironment } from "./transport"; +import type { OpenAIGlobal, WindowWithOpenAI } from "./types"; + +describe("isOpenAIEnvironment", () => { + const originalWindow = globalThis.window; + + afterEach(() => { + // Restore original window + if (originalWindow === undefined) { + delete (globalThis as { window?: unknown }).window; + } else { + (globalThis as { window?: unknown }).window = originalWindow; + } + }); + + test("returns false when window is undefined", () => { + delete (globalThis as { window?: unknown }).window; + expect(isOpenAIEnvironment()).toBe(false); + }); + + test("returns false when window.openai is undefined", () => { + (globalThis as { window?: unknown }).window = {}; + expect(isOpenAIEnvironment()).toBe(false); + }); + + test("returns true when window.openai is an object", () => { + (globalThis as { window?: unknown }).window = { + openai: {}, + }; + expect(isOpenAIEnvironment()).toBe(true); + }); +}); + +describe("OpenAITransport", () => { + let mockOpenAI: OpenAIGlobal; + + beforeEach(() => { + mockOpenAI = { + theme: "dark", + locale: "en-US", + displayMode: "inline", + maxHeight: 600, + toolInput: { location: "Tokyo" }, + toolOutput: { temperature: 22 }, + callTool: mock(() => + Promise.resolve({ content: { result: "success" } }), + ) as unknown as OpenAIGlobal["callTool"], + sendFollowUpMessage: mock(() => + Promise.resolve(), + ) as unknown as OpenAIGlobal["sendFollowUpMessage"], + openExternal: mock(() => + Promise.resolve(), + ) as unknown as OpenAIGlobal["openExternal"], + notifyIntrinsicHeight: mock( + () => {}, + ) as unknown as OpenAIGlobal["notifyIntrinsicHeight"], + }; + + (globalThis as { window?: unknown }).window = { + openai: mockOpenAI, + }; + }); + + afterEach(() => { + delete (globalThis as { window?: unknown }).window; + }); + + test("throws when window.openai is not available", () => { + delete (globalThis as { window?: unknown }).window; + expect(() => new OpenAITransport()).toThrow( + "OpenAITransport requires window.openai", + ); + }); + + test("constructs successfully when window.openai is available", () => { + const transport = new OpenAITransport(); + expect(transport).toBeDefined(); + }); + + test("start() completes without error", async () => { + const transport = new OpenAITransport(); + await expect(transport.start()).resolves.toBeUndefined(); + }); + + test("close() calls onclose callback", async () => { + const transport = new OpenAITransport(); + const onclose = mock(() => {}); + transport.onclose = onclose; + + await transport.close(); + + expect(onclose).toHaveBeenCalled(); + }); + + describe("ui/initialize request", () => { + test("returns synthesized host info from window.openai", async () => { + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 1, + method: "ui/initialize", + params: { + protocolVersion: "2025-11-21", + appInfo: { name: "TestApp", version: "1.0.0" }, + appCapabilities: {}, + }, + }); + + // Wait for microtask to complete + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 1, + result: { + hostInfo: { name: "ChatGPT", version: "1.0.0" }, + hostContext: { + theme: "dark", + locale: "en-US", + displayMode: "inline", + }, + }, + }); + }); + }); + + describe("tools/call request", () => { + test("delegates to window.openai.callTool()", async () => { + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 2, + method: "tools/call", + params: { + name: "get_weather", + arguments: { location: "Tokyo" }, + }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(mockOpenAI.callTool).toHaveBeenCalledWith("get_weather", { + location: "Tokyo", + }); + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 2, + result: expect.any(Object), + }); + }); + + test("returns error when callTool is not available", async () => { + delete mockOpenAI.callTool; + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 3, + method: "tools/call", + params: { name: "test_tool" }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 3, + error: { + code: -32601, + message: expect.stringContaining("not supported"), + }, + }); + }); + }); + + describe("ui/message request", () => { + test("delegates to window.openai.sendFollowUpMessage()", async () => { + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 4, + method: "ui/message", + params: { + role: "user", + content: [{ type: "text", text: "Hello!" }], + }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(mockOpenAI.sendFollowUpMessage).toHaveBeenCalledWith({ + prompt: "Hello!", + }); + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 4, + result: {}, + }); + }); + }); + + describe("ui/open-link request", () => { + test("delegates to window.openai.openExternal()", async () => { + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 5, + method: "ui/open-link", + params: { url: "https://example.com" }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(mockOpenAI.openExternal).toHaveBeenCalledWith({ + href: "https://example.com", + }); + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 5, + result: {}, + }); + }); + }); + + describe("ui/request-display-mode request", () => { + test("delegates to window.openai.requestDisplayMode()", async () => { + mockOpenAI.requestDisplayMode = mock(() => + Promise.resolve(), + ) as unknown as OpenAIGlobal["requestDisplayMode"]; + + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 6, + method: "ui/request-display-mode", + params: { mode: "fullscreen" }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(mockOpenAI.requestDisplayMode).toHaveBeenCalledWith({ + mode: "fullscreen", + }); + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 6, + result: { mode: "fullscreen" }, + }); + }); + }); + + describe("ui/notifications/size-changed notification", () => { + test("delegates to window.openai.notifyIntrinsicHeight()", async () => { + const transport = new OpenAITransport(); + + await transport.send({ + jsonrpc: "2.0", + method: "ui/notifications/size-changed", + params: { width: 400, height: 300 }, + }); + + expect(mockOpenAI.notifyIntrinsicHeight).toHaveBeenCalledWith(300); + }); + }); + + describe("deliverInitialState", () => { + test("delivers tool input notification", async () => { + const transport = new OpenAITransport(); + const messages: unknown[] = []; + transport.onmessage = (msg) => { + messages.push(msg); + }; + + transport.deliverInitialState(); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + const toolInputNotification = messages.find( + (m: unknown) => + (m as { method?: string }).method === "ui/notifications/tool-input", + ); + expect(toolInputNotification).toMatchObject({ + jsonrpc: "2.0", + method: "ui/notifications/tool-input", + params: { arguments: { location: "Tokyo" } }, + }); + }); + + test("delivers tool result notification", async () => { + const transport = new OpenAITransport(); + const messages: unknown[] = []; + transport.onmessage = (msg) => { + messages.push(msg); + }; + + transport.deliverInitialState(); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + const toolResultNotification = messages.find( + (m: unknown) => + (m as { method?: string }).method === "ui/notifications/tool-result", + ); + expect(toolResultNotification).toBeDefined(); + }); + + test("does not deliver notifications when data is missing", async () => { + delete mockOpenAI.toolInput; + delete mockOpenAI.toolOutput; + + const transport = new OpenAITransport(); + const messages: unknown[] = []; + transport.onmessage = (msg) => { + messages.push(msg); + }; + + transport.deliverInitialState(); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(messages).toHaveLength(0); + }); + }); +}); diff --git a/src/openai/transport.ts b/src/openai/transport.ts new file mode 100644 index 00000000..399ef949 --- /dev/null +++ b/src/openai/transport.ts @@ -0,0 +1,538 @@ +/** + * Transport adapter for OpenAI Apps SDK (window.openai) compatibility. + * + * This transport allows MCP Apps to run in OpenAI's ChatGPT environment by + * translating between the MCP Apps protocol and the OpenAI Apps SDK APIs. + * + * @see https://developers.openai.com/apps-sdk/build/chatgpt-ui/ + */ + +import { + JSONRPCMessage, + JSONRPCRequest, + JSONRPCNotification, + RequestId, +} from "@modelcontextprotocol/sdk/types.js"; +import { + Transport, + TransportSendOptions, +} from "@modelcontextprotocol/sdk/shared/transport.js"; +import { OpenAIGlobal, getOpenAIGlobal, isOpenAIEnvironment } from "./types.js"; +import { LATEST_PROTOCOL_VERSION, McpUiHostContext } from "../spec.types.js"; + +/** + * JSON-RPC success response message. + * @internal + */ +interface JSONRPCSuccessResponse { + jsonrpc: "2.0"; + id: RequestId; + result: Record; +} + +/** + * JSON-RPC error response message. + * @internal + */ +interface JSONRPCErrorResponse { + jsonrpc: "2.0"; + id: RequestId; + error: { code: number; message: string; data?: unknown }; +} + +/** + * Check if a message is a JSON-RPC request (has method and id). + */ +function isRequest(message: JSONRPCMessage): message is JSONRPCRequest { + return "method" in message && "id" in message; +} + +/** + * Check if a message is a JSON-RPC notification (has method but no id). + */ +function isNotification( + message: JSONRPCMessage, +): message is JSONRPCNotification { + return "method" in message && !("id" in message); +} + +/** + * Transport implementation that bridges MCP Apps protocol to OpenAI Apps SDK. + * + * This transport enables MCP Apps to run seamlessly in ChatGPT by: + * - Synthesizing initialization responses from window.openai properties + * - Mapping tool calls to window.openai.callTool() + * - Mapping messages to window.openai.sendFollowUpMessage() + * - Mapping link opens to window.openai.openExternal() + * - Reporting size changes via window.openai.notifyIntrinsicHeight() + * + * ## Usage + * + * Typically you don't create this transport directly. The App will create + * it automatically when `experimentalOAICompatibility` is enabled (default) + * and `window.openai` is detected. + * + * ```typescript + * import { App } from '@modelcontextprotocol/ext-apps'; + * + * const app = new App({ name: "MyApp", version: "1.0.0" }, {}); + * await app.connect(); // Auto-detects OpenAI environment + * ``` + * + * ## Manual Usage + * + * For advanced use cases, you can create the transport directly: + * + * ```typescript + * import { App, OpenAITransport } from '@modelcontextprotocol/ext-apps'; + * + * const app = new App({ name: "MyApp", version: "1.0.0" }, {}); + * await app.connect(new OpenAITransport()); + * ``` + * + * @see {@link App.connect} for automatic transport selection + * @see {@link PostMessageTransport} for MCP-compatible hosts + */ +export class OpenAITransport implements Transport { + private openai: OpenAIGlobal; + private _closed = false; + + /** + * Create a new OpenAITransport. + * + * @throws {Error} If window.openai is not available + * + * @example + * ```typescript + * if (isOpenAIEnvironment()) { + * const transport = new OpenAITransport(); + * await app.connect(transport); + * } + * ``` + */ + constructor() { + const openai = getOpenAIGlobal(); + if (!openai) { + throw new Error( + "OpenAITransport requires window.openai to be available. " + + "This transport should only be used in OpenAI/ChatGPT environments.", + ); + } + this.openai = openai; + } + + /** + * Begin listening for messages. + * + * In OpenAI mode, there's no event-based message flow to start. + * The data is pre-populated in window.openai properties. + */ + async start(): Promise { + // Nothing to do - window.openai is already available and populated + } + + /** + * Send a JSON-RPC message. + * + * Requests are handled by mapping to window.openai methods. + * Notifications are handled for size changes; others are no-ops. + * + * @param message - JSON-RPC message to send + * @param _options - Send options (unused) + */ + async send( + message: JSONRPCMessage, + _options?: TransportSendOptions, + ): Promise { + if (this._closed) { + throw new Error("Transport is closed"); + } + + if (isRequest(message)) { + // Handle requests - map to window.openai methods and synthesize responses + const response = await this.handleRequest(message); + // Deliver response asynchronously to maintain message ordering + queueMicrotask(() => this.onmessage?.(response)); + } else if (isNotification(message)) { + // Handle notifications + this.handleNotification(message); + } + // Responses are ignored - we don't receive requests from OpenAI + } + + /** + * Handle an outgoing JSON-RPC request by mapping to window.openai. + */ + private async handleRequest( + request: JSONRPCRequest, + ): Promise { + const { method, id, params } = request; + + try { + switch (method) { + case "ui/initialize": + return this.handleInitialize(id); + + case "tools/call": + return await this.handleToolCall( + id, + params as { name: string; arguments?: Record }, + ); + + case "ui/message": + return await this.handleMessage( + id, + params as { role: string; content: unknown[] }, + ); + + case "ui/open-link": + return await this.handleOpenLink(id, params as { url: string }); + + case "ui/request-display-mode": + return await this.handleRequestDisplayMode( + id, + params as { mode: string }, + ); + + case "ping": + return this.createSuccessResponse(id, {}); + + default: + return this.createErrorResponse( + id, + -32601, + `Method not supported in OpenAI mode: ${method}`, + ); + } + } catch (error) { + return this.createErrorResponse( + id, + -32603, + error instanceof Error ? error.message : String(error), + ); + } + } + + /** + * Handle ui/initialize request by synthesizing response from window.openai. + */ + private handleInitialize(id: RequestId): JSONRPCSuccessResponse { + // Safely extract userAgent - could be string or object + let userAgent: string | undefined; + if (typeof this.openai.userAgent === "string") { + userAgent = this.openai.userAgent; + } else if ( + this.openai.userAgent && + typeof this.openai.userAgent === "object" + ) { + userAgent = JSON.stringify(this.openai.userAgent); + } + + // Safely extract safeAreaInsets - only include if all values are present + let safeAreaInsets: McpUiHostContext["safeAreaInsets"]; + const sa = this.openai.safeArea; + if ( + sa && + typeof sa.top === "number" && + typeof sa.right === "number" && + typeof sa.bottom === "number" && + typeof sa.left === "number" + ) { + safeAreaInsets = sa; + } + + const hostContext: McpUiHostContext = { + theme: this.openai.theme, + locale: this.openai.locale, + displayMode: this.openai.displayMode, + viewport: this.openai.maxHeight + ? { width: 0, height: 0, maxHeight: this.openai.maxHeight } + : undefined, + safeAreaInsets, + userAgent, + }; + + return this.createSuccessResponse(id, { + protocolVersion: LATEST_PROTOCOL_VERSION, + hostInfo: { + name: "ChatGPT", + version: "1.0.0", + }, + hostCapabilities: { + serverTools: {}, + openLinks: {}, + logging: {}, + }, + hostContext, + }); + } + + /** + * Handle tools/call request by delegating to window.openai.callTool(). + */ + private async handleToolCall( + id: RequestId, + params: { name: string; arguments?: Record }, + ): Promise { + if (!this.openai.callTool) { + return this.createErrorResponse( + id, + -32601, + "Tool calls are not supported in this OpenAI environment", + ); + } + + const result = await this.openai.callTool(params.name, params.arguments); + + // Handle different response formats from OpenAI + // Could be { content: [...] }, { structuredContent: ... }, or the raw data + let content: { type: string; text: string }[]; + if (Array.isArray(result.content)) { + // Clean up content items - remove null values for annotations/_meta + content = result.content.map((item: unknown) => { + if ( + typeof item === "object" && + item !== null && + "type" in item && + "text" in item + ) { + const typedItem = item as { + type: string; + text: string; + annotations?: unknown; + _meta?: unknown; + }; + return { type: typedItem.type, text: typedItem.text }; + } + return { type: "text", text: JSON.stringify(item) }; + }); + } else if (result.structuredContent !== undefined) { + content = [ + { type: "text", text: JSON.stringify(result.structuredContent) }, + ]; + } else if (result.content !== undefined) { + content = [{ type: "text", text: JSON.stringify(result.content) }]; + } else { + // The result itself might be the structured content + content = [{ type: "text", text: JSON.stringify(result) }]; + } + + return this.createSuccessResponse(id, { + content, + isError: result.isError, + }); + } + + /** + * Handle ui/message request by delegating to window.openai.sendFollowUpMessage(). + */ + private async handleMessage( + id: RequestId, + params: { role: string; content: unknown[] }, + ): Promise { + if (!this.openai.sendFollowUpMessage) { + return this.createErrorResponse( + id, + -32601, + "Sending messages is not supported in this OpenAI environment", + ); + } + + // Extract text content from the message + const textContent = params.content + .filter( + (c): c is { type: "text"; text: string } => + typeof c === "object" && + c !== null && + (c as { type?: string }).type === "text", + ) + .map((c) => c.text) + .join("\n"); + + await this.openai.sendFollowUpMessage({ prompt: textContent }); + + return this.createSuccessResponse(id, {}); + } + + /** + * Handle ui/open-link request by delegating to window.openai.openExternal(). + */ + private async handleOpenLink( + id: RequestId, + params: { url: string }, + ): Promise { + if (!this.openai.openExternal) { + return this.createErrorResponse( + id, + -32601, + "Opening external links is not supported in this OpenAI environment", + ); + } + + await this.openai.openExternal({ href: params.url }); + + return this.createSuccessResponse(id, {}); + } + + /** + * Handle ui/request-display-mode by delegating to window.openai.requestDisplayMode(). + */ + private async handleRequestDisplayMode( + id: RequestId, + params: { mode: string }, + ): Promise { + if (!this.openai.requestDisplayMode) { + return this.createErrorResponse( + id, + -32601, + "Display mode changes are not supported in this OpenAI environment", + ); + } + + const mode = params.mode as "inline" | "pip" | "fullscreen"; + await this.openai.requestDisplayMode({ mode }); + + return this.createSuccessResponse(id, { mode }); + } + + /** + * Handle an outgoing notification. + */ + private handleNotification(notification: JSONRPCNotification): void { + const { method, params } = notification; + + switch (method) { + case "ui/notifications/size-changed": + this.handleSizeChanged(params as { width?: number; height?: number }); + break; + + case "ui/notifications/initialized": + // No-op - OpenAI doesn't need this notification + break; + + case "notifications/message": + // Log messages - could be sent to console in OpenAI mode + console.log("[MCP App Log]", params); + break; + + default: + // Ignore unknown notifications + break; + } + } + + /** + * Handle size changed notification by calling window.openai.notifyIntrinsicHeight(). + */ + private handleSizeChanged(params: { width?: number; height?: number }): void { + if (this.openai.notifyIntrinsicHeight && params.height !== undefined) { + this.openai.notifyIntrinsicHeight(params.height); + } + } + + /** + * Create a success JSON-RPC response. + */ + private createSuccessResponse( + id: RequestId, + result: Record, + ): JSONRPCSuccessResponse { + return { + jsonrpc: "2.0", + id, + result, + }; + } + + /** + * Create an error JSON-RPC response. + */ + private createErrorResponse( + id: RequestId, + code: number, + message: string, + ): JSONRPCErrorResponse { + return { + jsonrpc: "2.0", + id, + error: { code, message }, + }; + } + + /** + * Deliver initial tool input and result notifications. + * + * Called by App after connection to deliver pre-populated data from + * window.openai as notifications that the app's handlers expect. + * + * @internal + */ + deliverInitialState(): void { + // Deliver tool input if available + if (this.openai.toolInput !== undefined) { + queueMicrotask(() => { + this.onmessage?.({ + jsonrpc: "2.0", + method: "ui/notifications/tool-input", + params: { arguments: this.openai.toolInput }, + } as JSONRPCNotification); + }); + } + + // Deliver tool output if available + if (this.openai.toolOutput !== undefined) { + queueMicrotask(() => { + this.onmessage?.({ + jsonrpc: "2.0", + method: "ui/notifications/tool-result", + params: { + content: Array.isArray(this.openai.toolOutput) + ? this.openai.toolOutput + : [ + { + type: "text", + text: JSON.stringify(this.openai.toolOutput), + }, + ], + }, + } as JSONRPCNotification); + }); + } + } + + /** + * Close the transport. + */ + async close(): Promise { + this._closed = true; + this.onclose?.(); + } + + /** + * Called when the transport is closed. + */ + onclose?: () => void; + + /** + * Called when an error occurs. + */ + onerror?: (error: Error) => void; + + /** + * Called when a message is received. + */ + onmessage?: (message: JSONRPCMessage) => void; + + /** + * Session identifier (unused in OpenAI mode). + */ + sessionId?: string; + + /** + * Callback to set the negotiated protocol version. + */ + setProtocolVersion?: (version: string) => void; +} + +// Re-export utility functions +export { isOpenAIEnvironment, getOpenAIGlobal }; diff --git a/src/openai/types.ts b/src/openai/types.ts new file mode 100644 index 00000000..435823f9 --- /dev/null +++ b/src/openai/types.ts @@ -0,0 +1,244 @@ +/** + * Type definitions for the OpenAI Apps SDK's window.openai object. + * + * These types describe the API surface that ChatGPT injects into widget iframes. + * When running in OpenAI mode, the {@link OpenAITransport} uses these APIs to + * communicate with the ChatGPT host. + * + * @see https://developers.openai.com/apps-sdk/build/chatgpt-ui/ + */ + +/** + * Display mode for the widget in ChatGPT. + */ +export type OpenAIDisplayMode = "inline" | "pip" | "fullscreen"; + +/** + * Theme setting from the ChatGPT host. + */ +export type OpenAITheme = "light" | "dark"; + +/** + * Safe area insets for the widget viewport. + */ +export interface OpenAISafeArea { + top: number; + right: number; + bottom: number; + left: number; +} + +/** + * Result of a tool call via window.openai.callTool(). + * + * Note: The exact return type isn't fully documented by OpenAI. + * Based on observed behavior, it returns structured content. + */ +export interface OpenAIToolCallResult { + /** Structured content from the tool (may be any shape) */ + structuredContent?: unknown; + /** Legacy content field (for compatibility) */ + content?: unknown; + /** Whether the tool call resulted in an error */ + isError?: boolean; +} + +/** + * The window.openai object injected by ChatGPT into widget iframes. + * + * This interface describes the API surface available to widgets running + * in the ChatGPT environment. + */ +export interface OpenAIGlobal { + // ───────────────────────────────────────────────────────────────────────── + // State & Data Properties + // ───────────────────────────────────────────────────────────────────────── + + /** + * Tool arguments passed when invoking the tool. + * Pre-populated when the widget loads. + */ + toolInput?: Record; + + /** + * Structured content returned by the MCP server. + * Pre-populated when the widget loads (if tool has completed). + */ + toolOutput?: unknown; + + /** + * The `_meta` payload from tool response (widget-only, hidden from model). + */ + toolResponseMetadata?: Record; + + /** + * Persisted UI state snapshot between renders. + * Set via setWidgetState(), rehydrated on subsequent renders. + */ + widgetState?: unknown; + + /** + * Current theme setting. + */ + theme?: OpenAITheme; + + /** + * Current display mode of the widget. + */ + displayMode?: OpenAIDisplayMode; + + /** + * Maximum height available for the widget. + */ + maxHeight?: number; + + /** + * Safe area insets for the widget. + */ + safeArea?: OpenAISafeArea; + + /** + * Current view mode. + */ + view?: string; + + /** + * User agent string from the host. + */ + userAgent?: string; + + /** + * Locale setting (BCP 47 language tag). + */ + locale?: string; + + // ───────────────────────────────────────────────────────────────────────── + // State Management Methods + // ───────────────────────────────────────────────────────────────────────── + + /** + * Persist UI state synchronously after interactions. + * State is scoped to this widget instance and rehydrated on re-renders. + * + * @param state - State object to persist + */ + setWidgetState?(state: unknown): void; + + // ───────────────────────────────────────────────────────────────────────── + // Tool & Chat Integration Methods + // ───────────────────────────────────────────────────────────────────────── + + /** + * Invoke another MCP tool from the widget. + * + * @param name - Name of the tool to call + * @param args - Arguments to pass to the tool + * @returns Promise resolving to the tool result + */ + callTool?( + name: string, + args?: Record, + ): Promise; + + /** + * Inject a user message into the conversation. + * + * @param options - Message options + * @param options.prompt - The message text to send + */ + sendFollowUpMessage?(options: { prompt: string }): Promise; + + // ───────────────────────────────────────────────────────────────────────── + // File Operations + // ───────────────────────────────────────────────────────────────────────── + + /** + * Upload a user-selected file. + * + * @param file - File to upload + * @returns Promise resolving to the file ID + */ + uploadFile?(file: File): Promise<{ fileId: string }>; + + /** + * Retrieve a temporary download URL for a file. + * + * @param options - File options + * @param options.fileId - ID of the file to download + * @returns Promise resolving to the download URL + */ + getFileDownloadUrl?(options: { fileId: string }): Promise<{ url: string }>; + + // ───────────────────────────────────────────────────────────────────────── + // Layout & Display Methods + // ───────────────────────────────────────────────────────────────────────── + + /** + * Request a display mode change (inline, pip, fullscreen). + * + * @param options - Display mode options + * @param options.mode - Requested display mode + */ + requestDisplayMode?(options: { mode: OpenAIDisplayMode }): Promise; + + /** + * Spawn a ChatGPT-owned modal. + */ + requestModal?(options: unknown): Promise; + + /** + * Report dynamic widget height to the host. + * + * @param height - Height in pixels + */ + notifyIntrinsicHeight?(height: number): void; + + /** + * Close the widget from the UI. + */ + requestClose?(): void; + + // ───────────────────────────────────────────────────────────────────────── + // Navigation Methods + // ───────────────────────────────────────────────────────────────────────── + + /** + * Open a vetted external link in a new tab. + * + * @param options - Link options + * @param options.href - URL to open + */ + openExternal?(options: { href: string }): Promise; +} + +/** + * Window type augmentation for OpenAI environment. + */ +export interface WindowWithOpenAI { + openai: OpenAIGlobal; +} + +/** + * Detect if the current environment has window.openai available. + * + * @returns true if running in OpenAI/ChatGPT environment + */ +export function isOpenAIEnvironment(): boolean { + return ( + typeof window !== "undefined" && + typeof (window as unknown as WindowWithOpenAI).openai === "object" && + (window as unknown as WindowWithOpenAI).openai !== null + ); +} + +/** + * Get the window.openai object if available. + * + * @returns The OpenAI global object, or undefined if not in OpenAI environment + */ +export function getOpenAIGlobal(): OpenAIGlobal | undefined { + if (isOpenAIEnvironment()) { + return (window as unknown as WindowWithOpenAI).openai; + } + return undefined; +} diff --git a/src/react/useApp.tsx b/src/react/useApp.tsx index 73f2812e..111f8591 100644 --- a/src/react/useApp.tsx +++ b/src/react/useApp.tsx @@ -1,16 +1,12 @@ import { useEffect, useState } from "react"; import { Implementation } from "@modelcontextprotocol/sdk/types.js"; import { Client } from "@modelcontextprotocol/sdk/client"; -import { App, McpUiAppCapabilities, PostMessageTransport } from "../app"; +import { App, McpUiAppCapabilities } from "../app"; export * from "../app"; /** * Options for configuring the useApp hook. * - * Note: This interface does NOT expose App options like `autoResize`. - * The hook creates the App with default options (autoResize: true). If you need - * custom App options, create the App manually instead of using this hook. - * * @see {@link useApp} for the hook that uses these options * @see {@link useAutoResize} for manual auto-resize control with custom App options */ @@ -19,6 +15,18 @@ export interface UseAppOptions { appInfo: Implementation; /** Features and capabilities this app provides */ capabilities: McpUiAppCapabilities; + /** + * Enable experimental OpenAI compatibility. + * + * When enabled (default), the App will auto-detect the environment: + * - If `window.openai` exists → use OpenAI Apps SDK + * - Otherwise → use MCP Apps protocol via PostMessageTransport + * + * Set to `false` to force MCP-only mode. + * + * @default true + */ + experimentalOAICompatibility?: boolean; /** * Called after App is created but before connection. * @@ -60,14 +68,18 @@ export interface AppState { * React hook to create and connect an MCP App. * * This hook manages the complete lifecycle of an {@link App}: creation, connection, - * and cleanup. It automatically creates a {@link PostMessageTransport} to window.parent - * and handles initialization. + * and cleanup. It automatically detects the platform (MCP or OpenAI) and uses the + * appropriate transport. + * + * **Cross-Platform Support**: The hook supports both MCP-compatible hosts and + * OpenAI's ChatGPT environment. By default, it auto-detects the platform. + * Set `experimentalOAICompatibility: false` to force MCP-only mode. * * **Important**: The hook intentionally does NOT re-run when options change * to avoid reconnection loops. Options are only used during the initial mount. * * **Note**: This is part of the optional React integration. The core SDK - * (App, PostMessageTransport) is framework-agnostic and can be + * (App, PostMessageTransport, OpenAITransport) is framework-agnostic and can be * used with any UI framework or vanilla JavaScript. * * @param options - Configuration for the app @@ -75,22 +87,18 @@ export interface AppState { * initialization, the `error` field will contain the error (typically connection * timeouts, initialization handshake failures, or transport errors). * - * @example Basic usage + * @example Basic usage (auto-detects platform) * ```typescript - * import { useApp, McpUiToolInputNotificationSchema } from '@modelcontextprotocol/ext-apps/react'; + * import { useApp } from '@modelcontextprotocol/ext-apps/react'; * * function MyApp() { * const { app, isConnected, error } = useApp({ * appInfo: { name: "MyApp", version: "1.0.0" }, * capabilities: {}, * onAppCreated: (app) => { - * // Register handlers before connection - * app.setNotificationHandler( - * McpUiToolInputNotificationSchema, - * (notification) => { - * console.log("Tool input:", notification.params.arguments); - * } - * ); + * app.ontoolinput = (params) => { + * console.log("Tool input:", params.arguments); + * }; * }, * }); * @@ -100,12 +108,22 @@ export interface AppState { * } * ``` * + * @example Force MCP-only mode + * ```typescript + * const { app } = useApp({ + * appInfo: { name: "MyApp", version: "1.0.0" }, + * capabilities: {}, + * experimentalOAICompatibility: false, // Disable OpenAI auto-detection + * }); + * ``` + * * @see {@link App.connect} for the underlying connection method * @see {@link useAutoResize} for manual auto-resize control when using custom App options */ export function useApp({ appInfo, capabilities, + experimentalOAICompatibility = true, onAppCreated, }: UseAppOptions): AppState { const [app, setApp] = useState(null); @@ -117,16 +135,14 @@ export function useApp({ async function connect() { try { - const transport = new PostMessageTransport( - window.parent, - window.parent, - ); - const app = new App(appInfo, capabilities); + const app = new App(appInfo, capabilities, { + experimentalOAICompatibility, + }); // Register handlers BEFORE connecting onAppCreated?.(app); - await app.connect(transport); + await app.connect(); if (mounted) { setApp(app); From 5014876e75792d23b35421422fff41bae7c4d706 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Tue, 16 Dec 2025 23:25:00 +0000 Subject: [PATCH 12/30] feat: add cross-platform support for OpenAI Apps SDK MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Dynamic capability detection based on window.openai availability - Report availableDisplayModes when requestDisplayMode is available - Include toolResponseMetadata as _meta in tool-result notification - registerAppTool adds openai/outputTemplate metadata automatically - registerAppResource registers both MCP and OpenAI (+skybridge) variants - Preserve custom MIME types in OpenAI resource callback 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- src/openai/transport.test.ts | 95 ++++++++++++++++++++ src/openai/transport.ts | 28 ++++-- src/server/index.test.ts | 163 +++++++++++++++++++++++++++++------ src/server/index.ts | 72 +++++++++++++++- 4 files changed, 323 insertions(+), 35 deletions(-) diff --git a/src/openai/transport.test.ts b/src/openai/transport.test.ts index 01911e09..800073ca 100644 --- a/src/openai/transport.test.ts +++ b/src/openai/transport.test.ts @@ -128,6 +128,75 @@ describe("OpenAITransport", () => { }, }); }); + + test("dynamically reports capabilities based on available methods", async () => { + // Remove callTool to test dynamic detection + delete mockOpenAI.callTool; + + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 1, + method: "ui/initialize", + params: { + protocolVersion: "2025-11-21", + appInfo: { name: "TestApp", version: "1.0.0" }, + appCapabilities: {}, + }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + const result = (response as { result: { hostCapabilities: unknown } }) + .result.hostCapabilities as Record; + + // serverTools should NOT be present since callTool is missing + expect(result.serverTools).toBeUndefined(); + // openLinks should be present since openExternal exists + expect(result.openLinks).toBeDefined(); + // logging is always available + expect(result.logging).toBeDefined(); + }); + + test("includes availableDisplayModes when requestDisplayMode is available", async () => { + mockOpenAI.requestDisplayMode = mock(() => + Promise.resolve(), + ) as unknown as OpenAIGlobal["requestDisplayMode"]; + + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 1, + method: "ui/initialize", + params: { + protocolVersion: "2025-11-21", + appInfo: { name: "TestApp", version: "1.0.0" }, + appCapabilities: {}, + }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 1, + result: { + hostContext: { + availableDisplayModes: ["inline", "pip", "fullscreen"], + }, + }, + }); + }); }); describe("tools/call request", () => { @@ -334,6 +403,32 @@ describe("OpenAITransport", () => { expect(toolResultNotification).toBeDefined(); }); + test("includes _meta from toolResponseMetadata in tool result", async () => { + mockOpenAI.toolResponseMetadata = { widgetId: "abc123", version: 2 }; + + const transport = new OpenAITransport(); + const messages: unknown[] = []; + transport.onmessage = (msg) => { + messages.push(msg); + }; + + transport.deliverInitialState(); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + const toolResultNotification = messages.find( + (m: unknown) => + (m as { method?: string }).method === "ui/notifications/tool-result", + ); + expect(toolResultNotification).toMatchObject({ + jsonrpc: "2.0", + method: "ui/notifications/tool-result", + params: { + _meta: { widgetId: "abc123", version: 2 }, + }, + }); + }); + test("does not deliver notifications when data is missing", async () => { delete mockOpenAI.toolInput; delete mockOpenAI.toolOutput; diff --git a/src/openai/transport.ts b/src/openai/transport.ts index 399ef949..8c5cfb84 100644 --- a/src/openai/transport.ts +++ b/src/openai/transport.ts @@ -245,6 +245,10 @@ export class OpenAITransport implements Transport { theme: this.openai.theme, locale: this.openai.locale, displayMode: this.openai.displayMode, + // If requestDisplayMode is available, ChatGPT supports all three modes + availableDisplayModes: this.openai.requestDisplayMode + ? ["inline", "pip", "fullscreen"] + : undefined, viewport: this.openai.maxHeight ? { width: 0, height: 0, maxHeight: this.openai.maxHeight } : undefined, @@ -252,17 +256,29 @@ export class OpenAITransport implements Transport { userAgent, }; + // Dynamically determine capabilities based on what window.openai supports + const hostCapabilities: Record = { + // Logging is always available (we map to console.log) + logging: {}, + }; + + // Only advertise serverTools if callTool is available + if (this.openai.callTool) { + hostCapabilities.serverTools = {}; + } + + // Only advertise openLinks if openExternal is available + if (this.openai.openExternal) { + hostCapabilities.openLinks = {}; + } + return this.createSuccessResponse(id, { protocolVersion: LATEST_PROTOCOL_VERSION, hostInfo: { name: "ChatGPT", version: "1.0.0", }, - hostCapabilities: { - serverTools: {}, - openLinks: {}, - logging: {}, - }, + hostCapabilities, hostContext, }); } @@ -494,6 +510,8 @@ export class OpenAITransport implements Transport { text: JSON.stringify(this.openai.toolOutput), }, ], + // Include _meta from toolResponseMetadata if available + _meta: this.openai.toolResponseMetadata, }, } as JSONRPCNotification); }); diff --git a/src/server/index.test.ts b/src/server/index.test.ts index d5e0a80a..e4425583 100644 --- a/src/server/index.test.ts +++ b/src/server/index.test.ts @@ -4,6 +4,8 @@ import { registerAppResource, RESOURCE_URI_META_KEY, RESOURCE_MIME_TYPE, + OPENAI_RESOURCE_SUFFIX, + OPENAI_MIME_TYPE, } from "./index"; import type { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; @@ -53,6 +55,34 @@ describe("registerAppTool", () => { expect(capturedHandler).toBe(handler); }); + it("should add openai/outputTemplate metadata for cross-platform compatibility", () => { + let capturedConfig: Record | undefined; + + const mockServer = { + registerTool: mock( + (_name: string, config: Record, _handler: unknown) => { + capturedConfig = config; + }, + ), + }; + + registerAppTool( + mockServer as unknown as Pick, + "my-tool", + { + _meta: { + [RESOURCE_URI_META_KEY]: "ui://test/widget.html", + }, + }, + async () => ({ content: [{ type: "text" as const, text: "ok" }] }), + ); + + const meta = capturedConfig?._meta as Record; + expect(meta["openai/outputTemplate"]).toBe( + "ui://test/widget.html" + OPENAI_RESOURCE_SUFFIX, + ); + }); + describe("backward compatibility", () => { it("should set legacy key when _meta.ui.resourceUri is provided", () => { let capturedConfig: Record | undefined; @@ -196,18 +226,18 @@ describe("registerAppTool", () => { }); describe("registerAppResource", () => { - it("should register a resource with default MIME type", () => { - let capturedName: string | undefined; - let capturedUri: string | undefined; - let capturedConfig: Record | undefined; + it("should register both MCP and OpenAI resources", () => { + const registrations: Array<{ + name: string; + uri: string; + config: Record; + }> = []; const mockServer = { registerTool: mock(() => {}), registerResource: mock( (name: string, uri: string, config: Record) => { - capturedName = name; - capturedUri = uri; - capturedConfig = config; + registrations.push({ name, uri, config }); }, ), }; @@ -233,21 +263,32 @@ describe("registerAppResource", () => { callback, ); - expect(mockServer.registerResource).toHaveBeenCalledTimes(1); - expect(capturedName).toBe("My Resource"); - expect(capturedUri).toBe("ui://test/widget.html"); - expect(capturedConfig?.mimeType).toBe(RESOURCE_MIME_TYPE); - expect(capturedConfig?.description).toBe("A test resource"); + // Should register TWO resources (MCP + OpenAI) + expect(mockServer.registerResource).toHaveBeenCalledTimes(2); + + // First: MCP resource + expect(registrations[0].name).toBe("My Resource"); + expect(registrations[0].uri).toBe("ui://test/widget.html"); + expect(registrations[0].config.mimeType).toBe(RESOURCE_MIME_TYPE); + expect(registrations[0].config.description).toBe("A test resource"); + + // Second: OpenAI resource + expect(registrations[1].name).toBe("My Resource (OpenAI)"); + expect(registrations[1].uri).toBe( + "ui://test/widget.html" + OPENAI_RESOURCE_SUFFIX, + ); + expect(registrations[1].config.mimeType).toBe(OPENAI_MIME_TYPE); + expect(registrations[1].config.description).toBe("A test resource"); }); - it("should allow custom MIME type to override default", () => { - let capturedConfig: Record | undefined; + it("should allow custom MIME type to override default for MCP resource", () => { + const registrations: Array<{ config: Record }> = []; const mockServer = { registerTool: mock(() => {}), registerResource: mock( (_name: string, _uri: string, config: Record) => { - capturedConfig = config; + registrations.push({ config }); }, ), }; @@ -271,12 +312,16 @@ describe("registerAppResource", () => { }), ); - // Custom mimeType should override the default - expect(capturedConfig?.mimeType).toBe("text/html"); + // MCP resource should use custom mimeType + expect(registrations[0].config.mimeType).toBe("text/html"); + // OpenAI resource should always use skybridge MIME type + expect(registrations[1].config.mimeType).toBe(OPENAI_MIME_TYPE); }); - it("should call the callback when handler is invoked", async () => { - let capturedHandler: (() => Promise) | undefined; + it("should transform OpenAI resource callback to use skybridge MIME type", async () => { + let mcpHandler: (() => Promise) | undefined; + let openaiHandler: (() => Promise) | undefined; + let callCount = 0; const mockServer = { registerTool: mock(() => {}), @@ -287,12 +332,17 @@ describe("registerAppResource", () => { _config: unknown, handler: () => Promise, ) => { - capturedHandler = handler; + if (callCount === 0) { + mcpHandler = handler; + } else { + openaiHandler = handler; + } + callCount++; }, ), }; - const expectedResult = { + const callback = mock(async () => ({ contents: [ { uri: "ui://test/widget.html", @@ -300,8 +350,7 @@ describe("registerAppResource", () => { text: "content", }, ], - }; - const callback = mock(async () => expectedResult); + })); registerAppResource( mockServer as unknown as Pick, @@ -311,10 +360,70 @@ describe("registerAppResource", () => { callback, ); - expect(capturedHandler).toBeDefined(); - const result = await capturedHandler!(); + // MCP handler should return original content + const mcpResult = (await mcpHandler!()) as { + contents: Array<{ uri: string; mimeType: string }>; + }; + expect(mcpResult.contents[0].mimeType).toBe(RESOURCE_MIME_TYPE); + + // OpenAI handler should return with skybridge MIME type + const openaiResult = (await openaiHandler!()) as { + contents: Array<{ uri: string; mimeType: string }>; + }; + expect(openaiResult.contents[0].uri).toBe( + "ui://test/widget.html" + OPENAI_RESOURCE_SUFFIX, + ); + expect(openaiResult.contents[0].mimeType).toBe(OPENAI_MIME_TYPE); + }); + + it("should preserve custom MIME types in OpenAI resource callback", async () => { + let openaiHandler: (() => Promise) | undefined; + let callCount = 0; + + const mockServer = { + registerTool: mock(() => {}), + registerResource: mock( + ( + _name: string, + _uri: string, + _config: unknown, + handler: () => Promise, + ) => { + if (callCount === 1) { + openaiHandler = handler; + } + callCount++; + }, + ), + }; + + // Callback returns custom MIME type (not the default MCP App type) + const callback = mock(async () => ({ + contents: [ + { + uri: "ui://test/widget.html", + mimeType: "application/json", + text: "{}", + }, + ], + })); - expect(callback).toHaveBeenCalledTimes(1); - expect(result).toEqual(expectedResult); + registerAppResource( + mockServer as unknown as Pick, + "My Resource", + "ui://test/widget.html", + { _meta: { ui: {} } }, + callback, + ); + + // OpenAI handler should preserve the custom MIME type + const openaiResult = (await openaiHandler!()) as { + contents: Array<{ uri: string; mimeType: string }>; + }; + expect(openaiResult.contents[0].uri).toBe( + "ui://test/widget.html" + OPENAI_RESOURCE_SUFFIX, + ); + // Custom MIME type should be preserved, not converted to skybridge + expect(openaiResult.contents[0].mimeType).toBe("application/json"); }); }); diff --git a/src/server/index.ts b/src/server/index.ts index df7a36e5..08cfcc34 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -1,6 +1,16 @@ /** * Server Helpers for MCP Apps. * + * These utilities register tools and resources that work with both + * MCP-compatible hosts and OpenAI's ChatGPT Apps SDK. + * + * ## Cross-Platform Support + * + * | Feature | MCP Apps | OpenAI Apps SDK | + * |---------|----------|-----------------| + * | Tool metadata | `_meta.ui.resourceUri` | `_meta["openai/outputTemplate"]` | + * | Resource MIME | `text/html;profile=mcp-app` | `text/html+skybridge` | + * * @module server-helpers */ @@ -28,6 +38,17 @@ import type { ToolAnnotations } from "@modelcontextprotocol/sdk/types.js"; export { RESOURCE_URI_META_KEY, RESOURCE_MIME_TYPE }; export type { ResourceMetadata, ToolCallback, ReadResourceCallback }; +/** + * OpenAI skybridge URI suffix. + * Appended to resource URIs for OpenAI-specific resource registration. + */ +export const OPENAI_RESOURCE_SUFFIX = "+skybridge"; + +/** + * OpenAI skybridge MIME type. + */ +export const OPENAI_MIME_TYPE = "text/html+skybridge"; + /** * Tool configuration (same as McpServer.registerTool). */ @@ -53,7 +74,7 @@ export interface McpUiAppToolConfig extends ToolConfig { | { /** * URI of the UI resource to display for this tool. - * This is converted to `_meta["ui/resourceUri"]`. + * This is converted to `_meta.ui.resourceUri`. * * @example "ui://weather/widget.html" * @@ -130,15 +151,31 @@ export function registerAppTool< normalizedMeta = { ...meta, ui: { ...uiMeta, resourceUri: legacyUri } }; } + // Get the resource URI after normalization + const resourceUri = (normalizedMeta.ui as McpUiToolMeta | undefined) + ?.resourceUri; + + // Add OpenAI outputTemplate metadata for cross-platform compatibility + if (resourceUri) { + normalizedMeta = { + ...normalizedMeta, + "openai/outputTemplate": resourceUri + OPENAI_RESOURCE_SUFFIX, + }; + } + return server.registerTool(name, { ...config, _meta: normalizedMeta }, cb); } /** - * Register an app resource with the MCP server. + * Register an app resource with dual MCP/OpenAI support. * * This is a convenience wrapper around `server.registerResource` that: * - Defaults the MIME type to "text/html;profile=mcp-app" - * - Provides a cleaner API matching the SDK's callback signature + * - Registers both MCP and OpenAI variants for cross-platform compatibility + * + * Registers two resources: + * 1. MCP resource at the base URI with `text/html;profile=mcp-app` MIME type + * 2. OpenAI resource at URI+skybridge with `text/html+skybridge` MIME type * * @param server - The MCP server instance * @param name - Human-readable resource name @@ -169,6 +206,9 @@ export function registerAppResource( config: McpUiAppResourceConfig, readCallback: ReadResourceCallback, ): void { + const openaiUri = uri + OPENAI_RESOURCE_SUFFIX; + + // Register MCP resource (text/html;profile=mcp-app) server.registerResource( name, uri, @@ -179,4 +219,30 @@ export function registerAppResource( }, readCallback, ); + + // Register OpenAI resource (text/html+skybridge) + // Re-uses the same callback but returns with OpenAI MIME type + server.registerResource( + name + " (OpenAI)", + openaiUri, + { + ...config, + // Force OpenAI MIME type + mimeType: OPENAI_MIME_TYPE, + }, + async (resourceUri, extra) => { + const result = await readCallback(resourceUri, extra); + // Transform contents to use OpenAI MIME type + return { + contents: result.contents.map((content) => ({ + ...content, + uri: content.uri + OPENAI_RESOURCE_SUFFIX, + mimeType: + content.mimeType === RESOURCE_MIME_TYPE + ? OPENAI_MIME_TYPE + : content.mimeType, + })), + }; + }, + ); } From 1df66a3a8bff43f6f9d7ec218dc12a09c8b5665a Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Wed, 17 Dec 2025 13:22:01 +0000 Subject: [PATCH 13/30] test: update Three.js golden snapshot MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The learn_threejs tool was added in #173, which adds a second option in the Tool dropdown. This updates the golden snapshot to match. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- .../e2e/servers.spec.ts-snapshots/threejs.png | Bin 32501 -> 21343 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/tests/e2e/servers.spec.ts-snapshots/threejs.png b/tests/e2e/servers.spec.ts-snapshots/threejs.png index a71fa6f31bf3289682cc8cac18182cc3f6b492b2..fbbb8e7167f8b95ca197dc35baf2405661608017 100644 GIT binary patch literal 21343 zcmeIacU04P|1TIsWgG<@8zN1|f{K8ObOI_W7K(_9QgsxNUPFKof})_(Y>0q>bP#FM z2}wj$MtTc@KoWX@kc66$ge3ckKF{3w?cKZk+`D_uo^#LqqmXYxKIL6r@Ai6f+1y0( z*ImCtAP~t5=g(Y$K-PgD*L41}2K;wY>7p0}vKeyW%&DtEZ)Zu;Z|$6r>VhZ~<9w%m z*T>%+%H=k{JPF%SairmxtH%XN_JRFX7q%+9M(p42m2uMSAa1YUc>vu!MA&}0^Yj1(yc0GV>0_!#291a$I@0T@4 z!E2JIAnPHJcYEXyLyUy0i^63`;c~B)1(ZGH58edno~ap&(;s=bl{^=zEsvOerpwO9 zAlQll#;?Lu5}Ij_LhjV&5(J4RI*Dw2d557KjAFRhs*z<9Z#enpj#OR8GqIKL%IPs$ zsX?PpM1L}Ft>ueYEOy$=TW_i_E2{41w$v;v1w4S5JU;mC-0i^pBI7hU{*Zr%?xHN% z!rni9n-t)WPko44Bl$mC{V$)u$4{&k+X>zu`JZkXd`SL_Mu$O9PtPLwHvWt?ZdaFN zydifAY`M6$ma|-fLh9I21gvV@%2##8Qorf$9N#jJf#WG2AHN>}{}f=gH8nLc6RO@x zI=-XRdFhHT`~x82G6*jC8EltKCO^-moDX<871`C674fhUwmRP=0eN@-tderr!prnf zhJQLwz{V-P*b5Q&c_&=$5N;~zzb!wWLRp;zm#}@icj%a$IOOCnwpAV*zU#U%$ruUJ z+{##ddLYqNEc~05T4EHJel*lX0e02acJQ&6UyEw>859FL7Ne0XMD@gzXDuoqoi!oK ztT5m2GAKGBtf;bbneL6^J@!I8K1$nl3nI1D+`}AE*mn`vMPMSC2wXLXy#~_xDY@zd z2K>J9!q&fuOM%sq9b=;;W*GeZ`4?g22BYNpxp|B@1E2<%9C2hUpy?B%7qp-?!n}6?}Rqj9%Ad{^o?s zq^|b>JB!9aF^N7MxoH|MZAc5mf%+>09$FAcujz|vT5gKhuDA6yHbo9WBabZS6G}oS zrm`f?Erc$nP~_v*x}?JTjy1`@JnxceypLuQ*nYIp8zGhEf5R>pbwjD{jY|gSfHdF}R(;7l3vn3P>9V$JWhmumik2^xb zi6Qi?X>II$Y>{pCInviV-(m(({FfGnoT;q%I0b#y_)815#1_lz9$sF8EF*4KgF@I! zqtcG(ua1I+M9UWj)7dX0^#n)ZgA_Q9M@kp2QmJVOM!sd%dTRy=t^M>PuoQ%JBx?YU zT}*WWKg>5N2|q@06H%PRDcV^X`+!f}>iP-QmPB=76Kb_-%Hi`bgR4c}NE#53U;>)F zTnMiAk|VcU8^>vb*N-W5z4UL_rPj~KdJmyG@(YCA5@AU5p}bim-@fzGicsp_)bCsK z9h3Unll&X&Djl)HV&JHEmAEWh>9 z={--Ic}l1^XiP6pTye zP;un+81Z|=wI;Xqm~l<>J6vQ1QD;kvtZ@ z(AosrC{n&m##E1m5w!WWx&m3UnW_3A61hkCIF_!pI{H?e@|b%a5X)k`-1k6kMWXcFNN0?9~Q7Q)fIs*x>-Ky z>5Z`6Hn<)Usg9ogn3opf##xxK%#IA1@ zWyG5;SK+lKX()49iMUm#C3&DSC|)&6PTp9Ps61mCe}%h2`R&|ADXF6eFXB=dH-+>( zb3+v41G_?r_p%S>u4IR@&~a{mG-k8ORT`2_cJC7p6^S|O6jKg3Wt{;O5H1R;I^SQX z`BTI+yeUpWOcI<*cJza$dLV$#iWXUyja@lE-HyNh+plo>B5>iVm&EZhet^I*b_WbB*%>@8tdorUA1;DSFb8O?{A`V!^0Ex%$I$$DE`TQ7;A2y$g4N?KtnHl|1e)bUs)I$`xoFZ9&kf77 zNEb9qOLZ#uaNFUSyjo7Hzq#&hpKFfm4KoV!@JBl#CqHL?*e0_fZ5=;(-Ee~jOyaA{ zjN)*NMti?-kQqwV?;@x#Zdv_&;i4OMv#qU7hP{#BvzO0F7Y6qp6JY}(0yxq+C8D5+h@t^Sw9uzlm?#F1 zOBk&*0F)t^E$;{$0mhzB5%OymSGOz5zoV3S>y|I7N))%g?d}S`c>dcZlqLkSZ%R{t zUC0|~zJ%RDx%j=6_qsAM#*cre%)#Zb>o)FKUCJ}!i!84ZuiOX$46deIZ$2=;wgd+R zee`Q|S*nytR<< zT|sSY=0=|AOJp*d&jiXn*RCItv`l0^-L8@ux58jBY`rm>Gd%tmWy5(9KFCiwvxg(D zv~n`wWrSC3$W0)iDCkoB@znDvV-R`A_3}Bi*RNj#ze|~UtF$B`&V}wjm|Td zdEktpy{YFob3hDgcnmb#5tv@Vg2;c%>~Gb@xY?Xe(IPBe1T{IX~XR zarxjyErCcfn5*r~P@v}kFPUtc$&|h!Z7_H8mAM6Uxfk36EE|1xI-oI9a^zW~p{gTx z75E;?R~g4uxX2Q6^H5xO9LpUDoOEV~OU@P7ncf2Lke(E&E@D29O%c!mLkxKE8CE#8ynbn}H(eb@tN#3Fx_0=V za+H;CBFr$7@I-101R`bqAi_vmZ(+^cR9BX^*U*YcE`g=$n86ns3pad6Et#OU<r>7z|Sy6sC z;YO9mO}ir)XIBuu+4}k)Q#(i89{TtwVtg4Q{Zvm@Pfza!(X&$>xNrcOj~q7Fo!GtA zHf=~<{`FR#q)W&1s^BwjAwD21I`iscPR;o_yQ#f85M`Rka!^qMDgncQi4HysVgQ@D zs>~FtZywbhgh1TRKomEx8)kzDz?WZDjcKd73@O2e8X8@0+X#U?*%~eZu$uRes|M~t z9=X&7e4k)=QyqxIdL_hG*151YLh8>oPx#qYtTSE`gM0^3#QEsc8JlG;MOeNQd)GDL zSNrV5kASWBg3lcF)Ze1f-LR)$|FC-_A&TZgt{+YqJq=m^PRgl#00OzJ0p92Oyn`z7 zK3L@YO(fzx@bp6UwBx>`uadMqr=`FGHce>$@(Iz`u7K>dyrV|0QI*-L&3`N45n)?@8$2SFeuCU;TG3KonR1+{wQF?RL?pO6#hJdF9Njjk@jI5S?&cZDT&jpf;6!U$p!I@jRPpeDQq4}MS9 z$Y~M}H_)jufeo5%&C0Tnbfh%mlO$7zUvgt<=}b|3MNIlpr%5MuS5}6S zwLyRX!;~ry6fj&--0EW=sGEUt_Df)G_~c<#NRP00S5M*HPteaiPJ{)(-XSFn zim{$Wh(Ko06hnBdJUQ5B`Rka*-hO2_hnAHmIGDpF%}o`nbB@u88ueoV-e@PrE8G-M z<#g|iZ&o2%5qYnPb(B66t;|5|h+@}p1#vzDucVD^pfrqzOS^2j=U8H-pxSq83}W)u zLrV^*+kt7tVW=&mW-Nm|$rjMrfu4gktT_GaB`BZ4;&fz@&+;qrg?kejJjGqeX>lVO zxzgQ&-Jw`B^P!WsTnV409g6+E9ZwjtcGdF+&kGq(0t*DQ_j9P4*BI!@8GSAyP{Z;Q zJ-Qz89)Hl0v7xP~Eu&f;V$J)*=NHS9ieB4xX)L~u1zCoh!mdMYdt%TlaT3*=f=Z3t zrO|B>RVBE|nFM{JI(OU}wPadBD~1kQcgREMY66RdQzltWu)b^QOiEQA50ReKqj;D- z6wq=@deVnA{W8gCU`9|?j4LRaC>gE{l5Ny8hYijYmOSZZ*65~l9C~NyU6)v`9oWkG zEcIE9p{DJpJK{{x9lUforJ|(G1S4TALEHINc4meB52i14_UPYJE-Swy|}c% zWljG<BoznZ&f+Zh9R%SlCE$(K)wZQ$Gsi;-G&(7y8-W+!dZ8@*VGaOSxPk+2GAh*7GqMQP zUA{6ty;M1vxXU-7n{@^1yzA&{IQyK!C(ZA;D7VG1WD259)9RekIE#d?@?bvUzEx%< z?$`>TGVXa^V{u%HQMF5dpXBfd&Qp!Ymw1X9u@=wrp}wACYCb&o{?g zk;;+Q-uIrSrSFl`e$a#u&8O9+9d*#UfFe3sPkysGBoQ%NHV0ZS_x>xbWe^Q7=|5~{|N(o*Z3Tn$xQW|H*>G{;H^IJMP}_UQ2;Rq(wb1UYEE4 z_l_R>9$D{sI=*m6hcpE8Oz$i5w4a+yxWtn5=pYd_&6pFv1kCMNmO&-IfgfSRu9B#} zcG{}qkl}n)j(Wh^QwM*wM~-M;Y_n7(V?7aYM~S)Ho9=HnnM}@o|N58}H!WI0x>!l? zzLZOPW5A7)ucdKYi;|8Wcxa+6MViW1w3bW<0B$$*A^^(!6vBZIw#Nw0y7V&!stlWp zZZA#!_BP?ItcKI%m)z1jbQ5o)&e39KTC6rQfF6ex%gx>Nc9KY-^>0xK8!T>}I;NU5 zHQ1Gurr<~+4f@@1ImwooDs|zppqj0pfPG5sQ(X7eRt@dUFK*x!fUj^GOnW}7apZfw z=qy?u*7mf|xG&+g>DuakLdxFLpW{`^M1nhOCx5+{$eTpH9&@h|c^8Uq6;hSy(E84|8>FE5&c6*<7E!Wj1=C=5qOy1UhZeGfptr+npZ7TlC2eUK)|SujxFgT9}Bf z*1W37?}oxDNQyB!Qh#Rg@!fA7_87W}^lFE$i)K_BNneo<@E=Lx8wO4kY*m(YNxyx5 zrgG<`@41e8r#p@*wkk-~IAVPkV(H5v%^%wXJP2; z7AbT0)hmyztiHx;m`H^0wXU^`i>_i-ff_`WleTJfbk3#3xbeWKU(26#h_@Dc-uh$6 zxD~tB=i$L^zxtM)h?!-}LoTEIYVFS3w3*xwQ@vJAIx(NW$E?QH7HXPpMXGcft4iw( z*r^<*{bdodid)j$flDD&CcaO8lBhPf^`3$-XZ-n zH4T3ISz0=2^QvW{=3ev;NO<;!Pmt`h8J8bN=X9+BhhnED^WuQ067@`PgOtGF{Lh-x zkE%_!dD)Gajf3;uq)z#a--4i9%k{Iwl`?Ps<{f2}zxEvl0(fB66F z5GJRdt-&j>)oQ0^Uz)_~!5T0LkdqP3%&h=6?v{jpBH1!+uzT!23d$a((v!eaA)3!3 zAw#wWOIq-N!1FjrY>}N>(^=aJkOESb6sy_w9&h?AOl+6^f4v^(%}2mTuP8S(q)X)N zhFBY0Xx_B}mjdZ+cUKHAL~EDWSbL=#*0$+(f)~GtBDCHXgHQP4iJo?x;CZgUcYjT3 z)_(BpB<@N9NRF)^CIl!Bn>J-ipcdLHH#R2ItUc&bKp1|SG>1j2cedZmFn0y-lW0}1 z)tMf?J5HS3+d^Fw$JYZvsoA#aLBjVFE~hI&LICnE&@^K0-vumZ55y(YeAq2tboc*t zC$GWod&zx6Xn!ihjjAIfCX%};aJ3FSmhU7g0$Lq)s%YK*WP_RBb?mwwqA1hm{8!CS zNxq9r4q1f6AbipGK-lK-^HB2sgS-Fyd`EbpzSf62+;J~Am;O3e6WY=v4TqehmoMrl zg!g=i$pgPyc^OZj56>!2m`0q~0-cal@vSg_+_1Y;8pP$)7$C5#MBQN?$I@|fq1#OxwfCE)-n;coTxsb`N+EX1$4b}gGYe0zp&d%%blDet05__LSTt@0jvUL z7NtSL8wGQuHs{jRG0-D6!Hwg6$U+-s$7(;z8h$rT8?*X_+_k7hoptTny0iNWZyjV< zlg$7=KCZ&*orY?WD#&*xncnk?YqqAZj$|{~m2RkB_E4qQ*Qb5H<+)Y*)7*sbF{!A* zZ~kb+;gr{oFzcG}zOz=gDYoBY+aB&IcYp`G)H-oJ;q6{T{v6-E&klFXg#9g6i8hPa zb6nZphvXUE?im+9q*ttIb5r3D{t$1$pv6k6b90KeM;)9NbAevcy^(~0 zwZ+OEFJ~srkMde7I&Pfc73;PTTJ_+TE-e{S&s@{>ghPYgcIC@2Qk0eanyeGp8zCp{ z^+wk>xOsWGF`}Pk>_42`Sa_g-hb$sme#&c_2G>+}y;C`_#w zTy?o^W#w+tlwF`;mDiruHE?30yDg;!-(H54<4S399u@aA$ZaYU+Zw>oaUPh3pV3*?4NThQpf&{FT^Cwb$roDl4OkgSxp=jNjA@vx-POA}))0wLhP)oPNxg_#ob6%IHx!?vaZ_ z2r8IruVFE5lo#0ClIj!EIVPW(DuE)fBaq;0lG}A?&`1M704uMr4Vh;H7sFj`PCb zcC4|&ypEM7aosV@Yw?22P$}#FpVr968reM`{9j&N_n=MR{)rN(c1e20wTqZJzFRR!sm+1wprH64G?GHjC$ejlHFTHgB z9_xFY!C~bV`$l`EZPz-2@iT6MsZh0zKDwD%PjoV)(~4STTJ6iwV+*#-THc*iE8S!(aElA}NbPQe><65C(=%R!|_2;OtE7)?+RmiZ|>BLJ4)v*)%P3m5<$lRy}R2 zw5rwOHfz%~B8DSSXj}9`FHT}LH1evhA81GFka6zD))r0M*+YHsH`AVIO8Ky$4J{Lg zNJ@H|+|+Kflu^~G%bFAK`IBgrg_`=1>t9miH8P$r9sF(eLQBakQ31KFeQB0_6Iica z-Qvpq73OJBuZ2(dZN9JqL7w6a;dADt^b$~h2 zGTE>nw^)I3jJ#*DRM+a*t9!YlCRC7Y<{o(cy+X#WWTLV~x?ZJ=6|IhBm1myZOB$%# z^J{d}D#wa`$hsY~T*y8$#Psy*9E<9Eg^B6Xk1csbA#iDFS+_DSrI)-oPfI*f;$Y~0 zjye)g*7)KZ=|;WWVXG6QUx)`ckVS9;LGUku(FXYtIKGwSPx$$Rf>G&YbdrU?FY^tj@E8@hX=4kzJTXwW3 zuO#gRDzGJ|5E%3d)bIL#9sN*llr?gE4DLy+TYN&E8Y~;D%R;xE(5!}4Xh#+2PSv_U z5p;HwP>p0`;i{{f{+|^i5=MfCl;C}scmN%94cGX=^i2z#bZ)g9DOupYu-oj9@D z4%{@Fx>w3l#^7}a_>G8uDc`qyh`Qs8eJ9T7n{W+GlzQ}Sn;t#i`JL}M zd)*&CXNn@Kls%b=8EMpfm!Pi?E-^jrgBqdLfpL;4YN^3ItiIP1xtHJXCOg#NGbl1` zbOnjD1f%hJX~r+XN(n9EiP;s)!s#`j5efENIL~N?byaG1_0PLDSq`NFi%)#yHKdZ2 zE!RlN#9nqHtzj9{CCSt8Pd=VB`BJUskJ7OsR6XVSnP=98eBV3LAs_M)^hw?%gwiJ3KNy*+Dj@UTlUM$1Pdi#%%PO(90;NqD zdl)f&((*toS6n}mW5=@pC6uusNAO6+Q9{p4xLOd&>c$xL!|@+%%F&CFXqjbuH%t1A z;wsu?DO7RRdrefRzx%cwdSi_-o*VR2 zi2BO#m|-hhSX1OR_o~Qtjym*InHF8C?>Bv_W3^A)S+-$48sX+1+KROpvSh+{J`s=8 zxT`^|cJTZL!Gu1Y^WpP*ti&^$crP z@9iVOn8{`nomwyf_7c*V%^+bUIEl{-_%u9f%`9IUzASHWb~ylQ*M7!=CK!C55JmVf zaMd$~f0`#zX`XDPSw3Femhe3)S?3TCs6E83XXnz~{1N{0Fhroy$E>{SvCUDgyQVwR ze^bO2lTEaE?Mm>^$#73tQdNnZ0pK9V-QR&~8-8uw7^v60M zzn~y|leSB42|Xs{aB^Nd<8fbD>0bZEUdDE%o7N0Z2JS#v&Yixcz9c0_KV(bhv?oZC zr0RdpepK$LhG)&%Rj+QPeg4os5G5CG(l6+Q_WA56n(E$+}tfVUKe$}<-s+%KvVmm$J$mRnXPmg%4Rj&!$}nUvBF9k zYa%6W3K~FNIPaMF{O1Kgq6*A&Pr0RrG!j9YR+WOj%Xq8eJyh!LOzD$VLL&xxOF7(2 z>Xx-LK$Z&7mD{l4GaDIYDbjwQY7E%Nbu|he--V4hZA}Q$#Uumaf zvX9ev;Tp)UQ=*2QEEa===#-2%60Er9rS3Q<>U)(s8ByS2 zbDX+2p@n%<7lCu=o{2-K+m~i8-(e4}W=6Xe+E-hoRAnM3BH+?VW@t3So`5&J6Gkoc z;Xo_lp!Y8Ro)B~eeEr((ZYRD866-Z!#xvY`L}5T8!ua6;S7#mQCON}TS5Zk+t6BQi zhc?nW>IHM^S+-V{wjM$@-Z#FuJR6kVt+gC^A?lZERfTdH#TlFHGqzeOpl4V!OJZlZ zjjXHm+{U+}eWyXe*lk8lTj-9ZDifj7s_xN6@0-2k4BiqXw|R?J8bLA%pC&`JUK~jspYVq4u=vANotn zZP0;{j3Ri^%7P&EPrL%Hcy%~=rq|uN#IE<|&G`lu!dJ1-{AGwqAz*?h0mxLDc3EPC6!dRpBN#-7x)SJhx(N1lFH#mCL$nh_}Ig5&PD5j+3D zw8Y=vd*-j)oq{j)6MZ9d(2qUK0!^Xbwal|!#nmp_ngxMt(9A38Vw>{2&9X8@1B9rMXJgh{ zM|5DlNr_}faxkIQiW{#5(jLoBL|KYkZ^1^btDk+YYEdfPs5O@+(|M<#G7iD79lxVL zXv@`)vi4+54-DW5tr>kFapW_;Za8C!#tcO~1Pl)K=cVo^67St)%68#1_B_C2urI^3fMY5b<{UZ6=!mahU=)^LXJ$YJJ98ahOc58mKO|J5jse)RU3?G20z(F*P+!?8Nu@ zeuY-6&iwiCZ?OywJ*nMuyx8B*y(8JQCrqh4(5T+oA2!o;ovej}KAgUTp!km@gY^k_ zC#4p(rP$~nFiRUcUjG^Zv&t!B&kXRKPZ_tbw))LqvWtKp=+boq+OHft+ND$UBL$+?OV!#y9hYw11Fs`w#GR-uSbsf^C9=q$f-Euk zgFRt^ldkCsLq(nr-pprB`Pb2f_}1ZDR%XC3XYejJm6Ty=LFjMxew6Hk&v8>BBc@il zb%@5;7TY{>=;C-^>7nM)uJ_nL$udQdOyo|BNl9D?F-tN95w@N0U}l3@_VFTdK3d}C z_ll~!@1Z=kWIBDiK#{wm*=I$z4Eqr-7zF)3{53DN-g$z+NFjG8Pa`vl!Hby%MHzMI zD;ttneb~!}y&>Sw?==Mp^XRqc2Q*~}?rJV%`XugRy|9as|0*{~ogqeZ{T z=`=g8&3=Mhm596P`1wO5`KTRULGF6dz%D(i`di;A*~eLNL(9@vM>jhRfIxup-ocM1 zxd_@gO)Il7ob1VhRHQN-&K$BSu4OgxwUKSnV8&P(5&Dx(!YWJCcaVarCeKz0Y2435ut3_gXvX5oz)RSyxe@FQdDPgj` zJuxLvr12B!H)Nb)?O{p4W+@N`^wf$~641%X?-gDu$J1yHUg;-9 zBN^@SW-~YfP)~W;j{{)LB|j-7Y5nxD%>nhg#mQ+rLLIo^NB)T)`mf5KkH8nlo63V; z#zPuLw&0|%QR{;#tC%7UubM?;D(eGUu(z-|m0+)pLA{$yyzoplbx}QCl#BW4v)u!+ zhkF>#-`eWvRgcA(ao8^b-xH+oh&uhVfU_q+8p_S}`h@A&_$yAz@`{sqB&)i*tRcP!QO< zgwD3hEeY1thy42JwA~TOy*y*}e^r-~A*;U^S;>%~FiUWA!V#}j!*|ngpEc|CDSP5P86R+a# z=}HqBQRSubWh4-cloOShdN2GW>37j<&qqNDykJVqYr1|ll7ru_(Gj}%YQp|Bcy_W; zg}(L0%ct(3pj%uMW_F{7l|pFWN!S9pcllpa9lO@}RMx|K7*(8V^8`={vH8m@<=oje zby4$fvc}Ik${)M-0Zj1Uswdk;)e}p0iPWwCZUdr{3DA{+L4zvHA+W-e=|62v3H`+R z1cc06y_q(u9T8vV6BhMbDR`crN~n^kfw4$N$w)Z(J)FNC)SAwhs`T%^0e;OkFSz=2 zqu9=Hy;p6{U;wBMRK>E-{#+M<=h_eavPFJ1_}SlPzs{%r zQMOZNQVcwNmzDtf0>MOBzVXk~i2;Q^oAG~VWZwSusq@i)l`lZs9{AoL;hzm8{M+l6 ziPIwV^zZ7?UyBpn=%F3azPA-c4`*)vvn5c4{O6Yb5xjqN>0h&&|Ck5SLHuLH|232R zk6-$65dWBm|DDal?i690+){^v&iO;qt8fSp@vE_(K6~F{Qeu!gR7-l;$Kl` z#^ql?N1Uiz^`{oi?9*9U-2FGM4bZp^8b@12S@n}pm4u7Fdm!SMqkg7lWJU1iPwJv? zSjRfSivKF(0h91Bwwa+&%x+pDYU) zkOBo9oUEf9zPESV3sE7kqK1j(g>ie*Be7*_WLduYVZ*um(r*D>gynovN9g1e<+Og3 z%U={faoVC`d=5zLI;tgBds7sD{*gC2zJEY=>w;PES7urY87YA5q;|?3aPTk%3})oG zC|&mz44JcxRw9gaCSF;+fDUYUcN>fn5Duiob((b;@;vhIXGPYvG%2B0D|*Z~?ww!i zv5uh0XVJ{P-ddk*DJyn^3{LgRI}!0Y*P=kAO0nS1`zb^mSP_K#0jUq;;Z}37EwaySnW4oRlNeE*Tu#Jx#VxvexCF%qgb#K#7|?y zpSA$^9eWY7euk0?C{MufJBCm9HUh#U0p^E{y_wlm+q(7FW6sH1t+j6{MF z54FTa&MJ!MEu^OlrqjU?wIl%JGeg$cg#^GL(w1%{%zkMOW=%xQ6B0l<0VNRw*I|L; z_5co^6wx5QL&zdvkN^|!vzwbJXgE_unnNF~RtlhjR#Ha2J0$$o=J5M^)g|{1V}^31A+uECW++fk1$hOfc;Ls5)rpIq#3T%a=fItXv!c2(RBK{~_Nd?3qRfAq= zV4X(I5-v`N3<*{MILKJ#>3k7~1K>bF$=BoSU^GB|we`_Vtv~e#l~1ufI+N5_Y|q28 zG3xKfkXoL^-oZg$mJ)v|LTMG~#_n}%Z@p_jt57duzfcm4D0%gl-tPNw(=wisz6!{4 zcz|RC&`9N3e&MG8Dwf>b+^NB><~k#vH>%s!`ZSf*1y1O;dm?(X zPNU?2SAHU%uN}WW0`~zJuhhs!Gh$xe-tpJJQ5XZ|%5|JK$L@d|p1!4c>z&~29@f_M zl`~V!Ud%|O9N=WI;IA|p{U9PZxYI-h7{ysWTv0#J!>*pw=B@Zay=M3UAc{CNvlcu0 zR3)>-@|hvfeJqeN6N#9od~bnB3w?b74U~N821q#JHTaS=5sw=ea8T&4c)GjO0Nsce zmNS{JuRSB6)9JC-SxZZ^qz*u7HJU1-q>?5domrcv5BCb)u>_cn2J($`52pc|D!}CB z&5Qp=+YEmHJ{tz}Ats3`2<%&3*4iwg7;A8OOqym^eyVDeh1cwa=tUNF{+$05vO*$V4 z^Zf9^>k>NDxBl&Vv=5imYWK|upfmYdnVLYC++1C2Swwdc!_zs6;mU9XkXRk=JtJ!| z$dzUZO&}G|H80i^?EKBPnPm;@c@5RrscGwU<9{55$_miKr&?$Ebw4!6X)HrZ6V4bC zF`LYDI-Vcoix{i`QB(_{Jd-1>0(K3dh|UY}umHN5XBSPr+W!ujX8((=Zlv@~YK4eG zBm{w=aN6PD_|-ffaMQmj7!bEfwSbegEV@2OB(6C*7QEl+t(b!ou;;rziztSc`M{a< z=37RkhV8YK08U`{@{e-U{~e#<|Ft1|*A4~qVt^8;L&Tc`==mU@8cqNPKR`yvt#ss7 zis&%J-i3)SC%T&{9X1zXdceQ?f->i-#L-t%c@rW|KK>_BZI@rMcBa1zy!R{;XUT=m zf7yym6W4h-6?Jzh>p1s52e4wS4d?EMh%e6q<{|^YptuIoxS9WHmcfvMEVmr$JP8H8 zs(ALsqo!Tbrj7d{%VvKYiu+I8u0(hwyY|vI%=GX%zIZZ?KYG|E>I_9 z-QyXRdJhJdLV5n_tL4)#^@zOWA@VU-ZoM?J?uP95-#`)izT&NLpN4KtIm6CD>*Y|M zw^68P8GXjWeo?;-NzvBCVN|W8N-`(QIE5tzX@kkK49WNUM#%3=XC1$K_Dc-c7`y_x NaMt`x@#$L+{twT!p=|&F literal 32501 zcmeFZXH-+$`!0$VMS(3<1w^Gu38+Y~g3^17bm`JN2myj3ARr*UgY-_Qp%aiMgeIL( zLhrp32%Ht%``mHP@BZ((cbq%!xF2@DtU!{r=34WeZ+YJNJON4yl7s{g3Gndn2&JXo zsNmt<0N!5hyLA;fh|^y53m%kwz-K#y^ekc?pW( zzoS*b#X9-}^w$sOte;y+IsuvTvgX8mRZDbej?;a1*J>U17cdBPw>l?XGL!`v3m#sN zi!yGQczC3b@vh;Ha-Z<-;*QU6Tz!r^Hj>?H#2xWe|K~6GhFsk?OYvOXJ0W@`c>mUQ zvdgbYn3TN39Bx>ajkzDJ4o!KwQwzDjn!PKHsHmuT@!~}(fo2%)Dd08cWZ1DTjQjcd zRaI3TzO<{o@^Ndbes`;SDLMc0M7*RKXR$;bHbpJo3h!G^OwB{SM~>#lj)K=Q;rq584@+4 z5pq{^PGD!fO@~KUaM-#Q;oc$-dZp3Bxu}6V0q@@O*bU6Ew(Vqzkz|WzZf-8I$k}!{ zgDkf-s@$OYrZdLlaLkO9Nuh4PUzXNm`}L6e!NCFIxRpZ>iamf{oT7bh5fyE|c?8(C8VSM-i>%JaH*)(br^1H<%; z+P<0d?bIJljhC7P@fT)zpYI|$Bqb&Di%<=or-Qkw`T6;Qib^Vcr`V(d^XhJC>gnDXM0Zn-W4zGn!f+IxZmBd@5~2! zCeKS#YR3XRR{AsXiKxG$wtj`tRWJh&x?!h{ih&q$h_Y<hUYC6FFXc6i~}b~ zEG>G0&fjU4nxh30hU=#JF5=B2{0`N#`&Ui~x6iSsnZ}q=qtK0s>avCX($dn-P8svT z;bGzZKJi-DEh02@16A>I0S3idel(O-3DvEzh?uL9C);C8HeRlpSH1>&a!G;iJ*z zi`CGrGGggqk%|>?QZ(|hv<%=q$_G{=$=tW6n}+hiiV58E2?x!z=i8-7b}Am53D<_b zq_#E*^F-+RiFTz;lz_+nZ^>{zwAZQi?dEVb+o~DAZGse!U1`ndjErpy4j`>^sfQQG z?IIH2?*E>;>9Lo9?&|1pJKJf`1cg2g62vU0Q|fz^3|U}-dEy{uc>!tkc7bl0myOZe zc>(LE-u-|;k^0pKrPr@tBWs=O)C-ey)k5{XPE24Ukj?5Pzmeyjn0???UjE9hVBDrw zo?x9)?_;VUrm%8~Wi71YU<-}#!BW`IQRS!9FmEk7|JJ{l5@Kayl>XsavYp$in5n2k zaz0sD>100@RAE2IsF1|_9vQ>|sy8>l{Q6m!(O2rbx4Q_-rP)7VM#}~kP3=y^pSZEu zAXVL@!^^@kvQsc?D{3WeR>_?iE%Pm^#~1sG;pLh9-0$ZPj@a(Q_qvz7j#M_juGz%L zlIgq`4L?LMq4E}&Dz>^A+>XwQUSZZqvwLVLc&v=~e7H9~SZrszV(PGx9&T1AkJyr288bPTOQruDcmtWGCox(UvKD#?CyG%s1sr^&gh6S0W_PZ zz1fERoLKM5oACl7gz86Z%g>D5JCwr7M~kXQY}O|P$saUvHW3;0@cYY^8peu6-S|z4#wsjhzVwSee$1ex>2tk@wWvyL^8#fFoBTKz zj6OfC94XLD7ps^ZD>Fk9FmOXpkov+ihi8YCh$vnV>u;|HZDr-KxVV!-ThDx-o^jP| z1;Euo7(UC0)?-%l8Tcye70uvz3!I*N-8>h-uR3h1PZkp@s#Sh_Vh=_#6InMTAGy8h zhgq#A@;R#I6tji0vO;%SDKo!Q+9N6|cPcV_qulYaTj*qnhv75J1zi!2k0d`#-W&`Q zL7b3|dlH=<%rjX~Go)i?g`2c}cxUuPS(Z!7XOK2UHh#<`xW;axN`Inkp0UH)URwGZ z5kj*TW}lL0^NouC1G+)$B)z0Y(^b2vx-ixJ6qM$%iW5fuh%JQd$@n-+_e`@k z^r-f=>LXUq)vPcOWwiTJcuZZ$8i9<@3teuvok4q7;iKbdUs9h>#dFoEAlY8+*kq>A zX9TibhbW>g2$k0D%b^R0#JJH9kf*G(f+CRq&G|ODJrm)b9 zBj}8P)a!Shv91&L{?W7N=!;V*7Z;a`{E$p3MbKM|)O3m9nv3((iqqZ{5fOIw%>bt4 zzVpbmWus6b#qF14B4RF4|@7#Q9@%{Lw)qNv{M5nzDGg0vn2;gV9v?o*_( zzWYjMv}d+JnK~$Cr6qtA2ryMH8>np^bJ;MD_0bX)iUuHXWxOfK)DMe{EdRJO8%)L1 zu-nPHu~B*WMdBiFQd4())B?41Q$8tXB=PB-UnbMR6u8zlWdefkp{%S+_mdao=D4rT zM5MZ91B%v_~jRVZEaWG=BeE`za0S(j?i;8_{fd^+nZMHqpTF zu!Fwy)(I}@au3AxPs{p!aW6k~$jI<|fpfBGJbwYfJ26Ox3$NV{rWs}uI_Pdh%9o2s zmK-t&;;;f%q0`)#D$c+u%XM6=b`H0p5%Kb<@-7h5jWoF@4bLN`-;wzl`SHQIdT{}W zg9GOC*8OM`bT9)2A~wm-%v)@6I)VM+?1sYqLPr~awr$c08@mi;>#8-Eo{3I6eMl!*pAnX# zzscn8{e}T#E>ky|jcKo_BkS`vu*VKHy#E#yA{$8c*WJc&NP2|C&HUj0Da~42QI&A^ zD^>M{1deiDR562?u+Q2~Rh7ouS7sRIGC-e-cfL3(&bL5HgOJ=B2+ewgtB$)$hF3?@ z*y2<(5b-!5XRM>aOE}|QUY_<(xHEDNL3nO&w=7R;6QqnUTC3=P;o!%UeEXJxk}TlGit!n7S;wDOk; zkvs%knS*pPkh#bzC?vl!@An}V9^HAY!lG6iUB!y*6yF*xnX`qSjsw{ava>UTzfcSe zS?$7Q5+k~SF=T=3YV~A2ltA&Uu&`S#=0=>*@Qr-Mu&zZBs{+{(8u_Ae+c+a6JeA{+$WWApzIn zVTtx&x&DRO>J6KytV#lxpCQlbjv@1fu6l+-@VN$$p#j=XMjpwAkfr3t+egOW7{ZNZ z7w8wSS?iGKcT|(DCP0$vt*Y3+h59+NdQ&IdQFoJPFH4>}ZOT*mhw4DQZT8X&}l+e70y2*cvO7Jg>(dj#r>NU0$aMd!FxvLZu82H!9K5qbGnD z2jcp2oLTY-ka&%idW~P{xTkX7-PLur3)t6e5J#TUjVd6uS&fq+3Jndt*h{%!yVHrl zt$4wmZPaj~Hjmrxd=IRU?sxMpg-A^0M|989qZFR4o;dwU6-*_?+~c?Y=N$cpspYjC zv91aDu>{yuhS~SG--&G{3YV{Bu1OZ;J?R8=P7rJFaq(2w!SJk$Ej{s%N}2}3{D z9zo8s3_y!FjZOlVTuk%%Cn9upxy#d!Hi9NNs!7KuW>`lHFRjZ-JK{*cx6sr2$UDff zh)TrC+qjt{I6gi*nbl*bl?%GISFh^%AYE4e_ZAm{=sJcompw>uq&}M$oBu53B*Sc@f zz}~{lIakGAtTh+KgH{W+S3O|Ha}6U)$KMB^s!jZ|pPN;1*p%k&5OVu$;v3Q7U zpH#sjB)wAAT_dCqETtc7Y3t$=q3ZfHFIPWbY486KX*9?+kcJe(Dq>B(EA%>h3)rH{ z&1s=$J9+B)KUJ@OYCPVVgCWk{&7mu@`tkhEt9!j7Fx?yAGRqNm<`j1Q2A$KgqN2?$ zpqhyWX`_lJmi7p#xUGQt48(*hA;76J6@kY68PY1@^&9YUB5K|~hnkFG>v4F8pR+^$ z(|-5YXy^sDG!M7TP%ZgIH8K`R#OdUg@acNlw$Nog zW(=Xqb7?r9En#g3lH^uO*Syp@MhlT1!Vnkr)5RXta@Cb&x%QsIXI!Ll1Rtt{7v1`Xe%y$8~ceC+FGi6x)WA#cRZZ z+m{g(Ne0q}vk3@5IwP4wg$_r7>f~wEc*@y4Q;HWRmxA%Frsfz=!w%tX6Ufn~b%jGh zHxK|k!Dq1mxHbO9{3OZqtpQv?8t7>h^jfo^AsvI8FAz$>f$7SQbf6P97?Z zSq37;R>SG0f{pqcI>``fVUJ@~MWLwh@bLKfJo$31LSJ9s(mYF8y+(4a)p$IYrPtXU zEmGXlGefbnp}&b(e^Ee2TIO}Z_ZPBTInz1d-@lt!mi<>V{2CuVx0)!PvK*e|w4G_F zGYGcb{FYp^owaW>9GWh<*p1ZQ?ap4(4_D(gF|^akZA-Ep&f@U0FtF>U=@t59v))0< zMkOT2y|%L@Siheamf6((@Gt7;?(;&E=tY}LOP*$XzmI2C@-Er)P?I@u3bIu{kgXVV z-{WKfyWj*Z@&&dhAok&NcDuteZKDJ(x5yK$isa_ zcGv78bwmg=5$;BpKx&h-ROHU|>Z+=flby%%MzA!KBO!frc{=B*vjHNWlz*Is_> zGEC?RI`^8>=%NkiGW$DmFRydA#z7EG(@j26VL;?ZGFI@QR8>_YCSDU{(-4<3(EIiP zRYQwM3Yxk6m7*EbhJ7o>#X4E=?SV!f1b5ua7`3&FT@B zmlDtGu-KCe1$kPqh$Q21H~x;fo$?qS0T_^jfH>#}AJBZ!Oux8U#Sjoi^UayJ6jnRH((9EV#E z6y-!FCl_t|1|-yEIJFB|OX>wa`ffZOXkl~^olT%$FhzNW-tfowgt?W&6nHsPIDj+# z;9#Ei1WMFA2)~;fO5vq4-XlJTc{c!Z=;j*HaUN3N+1Vk`Ph75=RuAua9$dj8+;F^u zx?*o{Plq@fon;^uxVYQ%>reW^%E)L6EMEY>BEEhVSeksfCXmB*a{#mh5Ur+dIbAzO z?9EHS%sa&SFH?E7D}PY52mkM(X#DuoSy@>=_{5L+oun>NQUFS#BqtxrR!jk)A{p@& z$zEsaW$f`>XfNMVqGPYnh+)Yy;5p#oDd;`^3#jo(p7#oD3DR)dDQ42*95x>AjU zt{<5tKK(Di!uQ`UYfl6sgA0-oykc)ta0z3If+cP@$`fCR6h1h}H~AZ3++4nj!#Ds8 z_1g7mMPXt0?ZzeO=FTJRmcLL>D7gNq?KqWfowB<6ki{%exTdA2t7q4rW7YtClv3A8 zIe!Ilf!%U+bi}`V|HtJZC++CzSvo~cMv_;Z1TY=Rjq6*!7gm@dPkBTgjPs7oL{&*v zmV|Q$+iz|7r?1jj)eidj}~5RIZ_nuSg&WBza~s8 z?D6qvbJ7c;@BFi!7CO-Z!QGU1gD`e~AUnkiHMjGunKxMTEXN|;YB;^!*9S7`ux5tP z%#+`^XU(@nNlYX3P)YvE5U0#RdAZhl|6k^3Kx$RDUW$DA@}*W!VPRp^ubA)O`*vUX zth;tZyVMGCAI^K}vfa_p&|qX_JX{-rE$jpF;@Mxfe(e)Q1IVSnv3d2mH23Rw zdavGJkohT1R7;(C1SZR?GJ6N#5E~<>=RW=-m?i(2fR5W2t|pxolIa*1Q(qiXS!~c;PKJ5+EA4Gi#$9bS zHTtG^hO0whee6KWxf;nFkOv5qiRCs-0gD)LQY7A6HETrx2x@UfB@I{0XiKN!(kmcY zV_f@Ib^O!!4i0$9%FNWYZzWH1tLpDpxt*dish+B0CnM9jJ=44(?Ys5EcjyQWMK3te z0C`1(qHtJLrH&6hG@#po;5}f?xP;?{;TO5cx%F-ybfgKxghH~QW>y^l)07rupn(i1 zBP*+uXsp=4hmFBppZnloL_a2Q|Ho?|Zkvf`%KFI-my2LCtbF(99ge$?1mB5C<`cp7 zGY0mlrqd;-pmVKrjL)2rpPFfDgf{goFW_bEpYjw6*hj(DX=$@m=wrXbY&G`<(#C%OERS2HLsdPFWfG@-|pO%79H`AvFWCkrISCef?#tSrWk zlv2|TMZU-MSa?lnt6?0!bDmlmxKOLYu$91#9dP=3NP%Cfwc9`MFjOoqEg@0#J9$dc z(P>A3E6Jb+wI>QHIXUg~AWphVjN0|w7NTtH52E1(WD&M?J8gH^t3)XhxvjM{G@=tZ zlyKgqPkn|x#{d_O2NXp>>O#)(iOB?Zq2d>wUUTDg)Y0=-XsjFCh#!i4G&1Tx4jKwr z#}#9Q7mtT$>~fS`9dm~*R=}Kg)1u5?c|EsNqWflTK0eR0YkzOu4a9DhNk)4t;DQ{G zM7`EGu6`;leJMQ$0QtD+g)@A>p!M_bTN*P-y#@QGIUM`9%+TV=Zbm()CMBgS(Nrn_ zVld{AOK<7OQL~(;SMQ#yw0<#1d+{tZJT`XJVF7mob}HrZIh{U*nj=tl&!u!Wgz^}4VhZcPSr!2kJf+Blv(EH^A~*PD+3dd!f83BZ{mMr2jLOCUTp$wp65_nnCprA;(>^OBgG9V&{#7cAyS`N7T5rh^R z%T*nt^4Z;|F<}wx>w9rWEeH0uAwVS;mX<>hWpS~eErQEhRQBp-16ecb8X!kDaeA7X z%;bYe_F6gT(CbpnnVspl77)+^jCQ=gy9Fex*->A4tVaK0zbAXI0uG0Bu0=*ZqNE(z z@jUZFw5)GSlQECN*=#I{kDBmlrREwxU9%i601^@3Nm=KUQJ>zff2&fc%(UBmIn|$n?cL)CTfjQ3I~dkhU#1q<)0+Ztk!KaE%%f8d zND#?}+7UfH{V(?nV;0zKlIs++YQ|?Mkh@KKoo@6u%=l5kZ6@<3Ie{({_X^;?qrh5J z;Ve&cTV$FJ&-QwW6;HOCiS;2HT;!|zSl}{JY0-ir7K6_!$FYN|ikgKQns54T^Jgl6 zcm~9tqb|-~DRRu3{zc8OV=ejZ4iy4+0Ej+gTeEV0VLT@L(sQ9RYWFXd<>u zI9WO0B5%;tWSqVVUbm_^GU2h;4M5zX8!o3zDIQN(YD|7bPSf1Ft|e4XrYwS4rgK~V zYF~b5zWM|Bxy2S%aS*U%&p}Z8YDY!=Dzlj)VBiINW5hv^HO1K8&b#+0NC4imT_=wyl<) zvaBVp7)4lLhWz(V*&KNI9#NaK!3)kk>km|Q?$KqSwM)hd+srT%nF=x2)tvjUa#6*= z@@PePI2a^G4o>S$Y$}_hOOd*hk@3k^0|N2snm(x}{ z)JP{7%O|;w6qJ9JOoll)8W`ADgNI{Yc?}o1g!I~o@adaq`Z_RAr&0fd1!Pvw56b>1 zN-IFchofdn=EoZ>OyVpt-<$;S6JIPtJWxpRnTJWWegertjm^w@iTyo2bH2OLrPHqog)jW;YRYmwNZE+?}tX^=*x=j5uGYbN?(O1u!A?Rp1jl7^7WWTD}> zJd#)aXmvLQmTPtXQE4ZFa_U9XgB-2X?Pl#dR7OT(N>6v-+EfB;5<543K3@vz;53u` z=q$)TCNisan|~*{KI0h zU+f~X+6_djr$A&g;q;h=tNX#giO(ygH9g&T>|524f%Q(>VlM+lCZ$u0nHpVlx7;S{ ziqNE5W2+Aq+b!n7bRS=`^bL}cr|=yeFOAY9jfvcIWdkCztfF)L@!G1oRd{;3R&=Xj zndU~giV-F$Ja+8hKvEjBVOEBs=TpUU-Ia|5u`@V2WYv4Mvm;hZyf6 zCa-AsMx_0y#KiVAY%xSAm*2cmTTf%Dt4CT9-afkSa!N^lO799oQ$$1w%^qLey^+B< zF)cSjuy$oAW6gZFXq^`4^#CWo*#+hfa{QUd*VHx<*&CA&V%4gLmt(vG<+~GvIE8?< zvS%@qcQEe4ENOALzoOaXE+gK(IK(;}b!yeDnnWnWx`|=op(vVEE0G=Z3r%@pB=&sh zjTS=1*@Tq>Rv?q)Ir1&i1>xUX<*-yU4;@$vxuqxor}45pi(PRv{Z=gfYplwBA#2y2 zT*PAc+XJJd==X|nFQd;fY2L4r28;$2U{B6$Rf-y{m41NOjA`cZf@P5M7+QI`lJb zcGm*Y9Tmsv+4JTHg?T3tulM)H&0nt7{JOx{cl> zL?wT6Xr-gw(Ei8f&_dA2aodC!WG}gdS7qR|kgagPTH=H2Z5Myg;h2%fl$QQub|KrW zH>*nitFuGC39M`CA|sJ}O@V3flT8a~0V}H$lRCJ@p<0_yO;2as3qcO5Sc^g zh&flmoj011plNHnjB2$7A{fZAZJ*6dbsS6Tp<<(*mF^x<+n++s7SQG{8x%AdrKcG; z6c?AklS*aA(6c|c$D2ydbwe?y-0i{-)1CJs_aK2g>eR5kJ1${Og)4Jyq}u1xn4xS> zXIOKYLf$EP2)(-c#(}n~gX!Y+n7a+pmct#&rt$y;FT^2qe^?9AaJb!jJNkUI@BrK6 zsA*9VFta({G<6NhD$$+9r7?ADMh}#CfWb~#AcR`oCOTR~V?tAYDGL7F8Bp3_-y`m| z^km~2j0E3I9P2T%6&vy$VI54V?%v^StvNWxl48g>v^1BCl%<*3-JV4F)E0Dy1jn47 z^G$Wm;_}KoV{}Z`u`EC`Hgif}(#R550 zH+JBt&IKKvmvLXjMONBpRM_$ zFUq!J!$_C|5ym$S{$Lwr1UFSM2?6tRea!PBhfG@$+S zN;`~w#YpUSXytXYw}dHPBZ*E48KE`1L$2sj{_>MK8w4v)|9M4qX**v`5s(>nPSkan zr+!Ddw0Ex67Uh&PTQB}B7hTCqJ1{94GqSeUN6SGouEA1G{YO?iZLnWJhx25}VFFw6 zz`}YTRR{AL$1(`Sy0he=N@Hby+l##r-)8J4lZMSy^2$TYy^bs3dkI2EpVT0GwHfg8 zoYA$;(-k?0^G;q`g~AeHZWDv$s&3ihRFoRH&f{QFrepHcuDrI#ZeAABEe`cCWhl+C ze@XDW(=2=6y~EJ;_u%0!wt#!iCi&5}Q#Gu;6gl~2AqWGHv53}#>M}+!nN41f;$Dn| zyHV$e=MhCzT-i20-K^LtdQ~ps%sl2t4SRWE(Jd;+Gnj<{Cb;r-Xz#Q#U@afLlZ>E!0Fg$k%%S5&-jx#+vIllc% zlRYYO?!!ST_eUhlwT|{(SR(A!Ah<}cZYVo3&W$rW!)dcCh--GosH74lvKvlDXBf7gXFZp-+*yaO(g~6Jrfg?C-_!jC`u)}_cnykHgng3S zOcl-+1DSNO6)&jl-|C`HS^ZWv+Lj%41L1%3tYRvw?^SIpf19R;-WGV{#(M~=KtD4# zf(*mH(AgswAOIisiw|U3=#-a*x99A4^%rXAt(DT_Nw(JX1N-mTu9$_(51n3%ey5FK z%;g=d>7TXgiFnZ6X6XD5t6d2mW(Y1GPwLoMr_~>4V!!fkEWADACe&x~dtr{%huu?) zv%?pp{o!+LhcAlyL3;X+8GdpMuA4jZS6eDnc`Zb~v~>1ND&=3#o6?>uYiZLIz)rF; zn(rp|;y)mew?HlEHAt~As=SQzlq{AS>svtMAzk}QIJcali&)t_KG?@;Y+BSCne=Lm zXzR?CW!RAal0%o4)(8WA^Flqe-&cG+(FPLMwQV?L)UfQ}jvD5yJM_tk!TN-LuyuuH zL|YYiAqPx4)RJh6qW$asjEk(C&wv5w2wzcAEk@RP9p60lN`)9}UOqc4lqMpE#4H9j zZN8~a&efpCcfT-XP%g#;1ePvQ=4$Zs7OVs1HKe66npl;BHtC7l;P`rW;-Q626e(7B z29FcwK@WKZnTIv*8M*SWzfh-GI66gn8u7O;7>Dv6I#2d{e96BL2^UvK!t>zvuNY0* zd=N>g{LQiLW=mezsC6_aLxx;0-ud2aihp79rCXn321`|AW`&w(HgDIUG;w_A5k)?< zLKIWSo7(F?{bGqaYV~W$yYPOU=(Nt7n_yMvdM*arZ>;Q$!j4t7Ic=0ugl|523bf8j z*-I1_Mmq9i^YX8S*oWLFA00BqN{3Nw?Cuy@@3qj;iZr>Vw)ATdk57YQF1)_1Av7el zhCjy8F^p~LRluuWb#tYvA`q3M{LW{ueraA=`?@YSMk!5N3fG9Kto@lYI(U1`Tmd4= zFb400@-OHYqYxbEU1vu2I&LS}V>bnQhAaH}R&Ou1|Y& zk7fnZrZ|+!VtkU03Y0!;`G}$&$s* z6H77Kx7B|5PoX}`EMqSbt;4%i3K6|%Co7rAw>|A6Cl$fpDm)vopBQgPxeZVS62wf^ zv$I=!xVY5hmpL=a>R`SJLJekS->yZped|sB&WKseu z^1w>2T$_~^Okv64eP}RG=jvC&LCD&f1{h&e|44Z!u53xF8)27OTbK2OAv58?GYI6i z{!KuqC<@6_ov&J!j#?iTwd=Kk82;woP_(o#{g6`O^%9eYx0_O$*eP0W*yUy#=* z(MUpbBvRd!(LXt()xz;ZyPR62RTLXF!)?^Vfv0*%cqftVc@iXY=&3e2KLMMIL{YP(R1Hy zxU1v*r@9E*iY8h338YSgsQeWzD@BkzOSab-4Pk@sxER*f$B=V@PW`RBrvO9Si-}aY zn%mmCI$@Ju56K>t7EE8^#yhnL#DSRuIF!~hfoZB|hjcyo(ZFgF`Ls&$Un`kXh9Z#R&FP~#(pLBSjEpS4XtIXzhyohM$7jjTKMN*Fow zcl=2s%lN9&T9+^X4ql_?+N(o*K$4lk^eE>xXu{zw zg{6o;t65usjAGK})ycSn5xxS#Aw5fv_yLXs>D;PTC)W4ZX&F0{1x>|dR#3YZ()aUD zGi>N@q{aJX^|=ntxfXalsb~-FxQ!h>jrFVHhS|nbDw_&TH z(-}Kzohh)YW8Q5kwD|=3cfebt*}s9GST=mLXaQ zQ8)0llu^)wXO`Mk3+!q*HQXuZ7YOOy4y5xFc;odC7SI6ofw`y+?oe5mgvP!!YdMZD zO;fi7kIycd=Di176h3}<>sZb1-PyAML+QC)t@CLy-7{+Q}hXIJ9mQ0+OFqhWVR z)7qu4Z15gx5#`;s+s9%TQQs%Gwmy6)|HagW%W4y|lz)H@>G{NIKk!z=)F^5#t18-b zwMeONR6aSb0A{jHa)TwmAOc1ll^G=;LI(NP9cEm}%zAY5s&f?VX8TtH=RLMmRqMN%`lr#P-=2~9@l&&Ofwk4C&Wb3P9)=mmbQuTKLHYMueDqb;WG2ZQ zMEz@-MsHv$Q)*!YnnL}U!ozp!ON66VF8OHfY-)_oiVT55PgXp^R!5!tr}$uE?6M?r zk@X`e-^aK86$z{so}c_`@+=S_O}%Jt0S!&Vh`9xFjyv)MQhr~nR$UGeLt)f&`MI`b z2|!7a0va>GdLV~|c(;1UO`Hoi&0RCwX`g{$mPsM2Uqee+vnWbU-IJ09~iObPY>z6#eZ@hHIk9X&WlPOh*~Y@ zqI9};KJ9NJ-UzN&mq}rV$bD;G*xwqZ#<1PVniFjq;b}1}d&QMFEk0o=8}4J9U#r4k zPWvfrm^LBaqR_^RlWXv1XcNcuaY+l~*j28nIhAafyJS5V^U9tYm51zg%I`c1z8r_6 zR7}$#4tDl@Wtzmi)j_~Hh0$POZ+@g^$sJyFtiQvx9n6L`O@cUu1S&jQXK7(EAjNo( z_KDj)83oL_zHft;V0T+;UOE;zj%83s2~SlxFNljc3PePNC#K4Tx5n|gdC32c5UE`d zjaZH0$A&0G!}&ps=4W3r7Ff4Z5LykGyD1GyG9=7nV^Nj_P#4+y))#`DW}i73XZuFv z4@`VzW85>XXo~6OPhLB|7HkU>ZqqEcIq~U?e`(SdDP=Tu^J?vcqF`NllZ7e}o$L5g zXBEe>1BU^U?_ye$lAcW83?68GwpF2_W2GjOPD zyrVSe@P?&*XcgyTIX0PmVKBVCh<=}CoE1$8^Y~?LZ>pOee0$>196lxzQf|-A_c=z( zx`fC-eNxOSdn%S2Y-)Of8IrB+sIlr9y(VLJlTUa9gx_Srr?PF|Ze~^~@)PT!o}j^2 zcsKaBnPSR|w;Fm>g5$qoS=&o{`NY>L@6v=ak45C)84(eLR_MQN^_Nf%F+asjhPZSa|`_c{8s&a62P8xyySPaM<8spsi>_d)^|Fe|Ln!vb*8s(n=`iY4E zItIlBtXgO3dEOwCpZhBckde@oTLMD3;-Xr`Xwu=(sih>DtaGVwJ8zy%5*co{(3VEN zFqC>Pe=olMd$^O{N~pZN;_CEW*)MZUN+T{8Ri!VDqS-Gr-`25qtOOo#ag1BEuR3f3 zNH+Dr-16`AduBk>3?JCYpusfPOe`XJakeMmW~M(Df$UTmNiBn8`p@xZDi>Wv9 zugzO1mq3`|P`~qV-Fs0b@AJx7T|eRU*N24*E!;S+Epmpu+qFpc0xyVgMRsHi9z5%-(Zxb20EGR>xC%(#)^)vV$eQ|K)Y z5J(-fV!yxq`zxm~u;r`C2R}ce#JD58*`FAA5@+%LMJp$(%(GGH9!MEISuLU%XfXjEU%lpIK5B%Zr`H4~f;nU4jP1+Qu8Zp1?#arL`ST2z^ad{WZ zdEwzCg$8|iI!a1ExF~*IZg|0?OTF_7_wj$t8ch85?QSLyFyCTgpK)y@ydUFE)MtEh zy?BW!Dd>s#EZqO_o|DyxUf%ZI_6-+HLLWW+(Yc)u!EdTBZ?aFFfq_A*`Nx&2A3sDb z*}hX<{zl@T5&xG2zyFh4U*SD=j7`46D(`%#-#zBJy!N**?0uJS{(~j?k9FAr9mL3~{kx5~Z{PlV zO$^v?FFgmIgqN3>qhHqBEs*!JRN@)h3#L3A?ccJ;^!@v{&$8hW>Y$}JJFFSh+JqjI` ziu^ORy6P$_XLWqxu z|No__8}zN=K?ENahCTZ;wKaJe8E-7_zrv#=Bl{41gY+pMGnDoe%!;gYmbjp9G)=6Dj zI$-$u2R!y?&-xzTnZL9G@34R7>7V)ipAG)Lf`o)$e*$3EPoBK~vtq3IiO#8kC(+Z} zYilEaNsYw2Pw=|NqYdXV|;D{tp%a_>uphum8K) z|BpoG-)R1CH2)tH*MFn=zv=qF>H5Fv`oHOV<7E-@ANA?K_4vQ__`mh|zxDXPfZP8D z;3lS+nF3HmEQ3WQME~F|T>7>RJvO|it75$b#gjJZdWb84V~5l^ygATd1s4ItGDW07 z7a#=#K-&H9|2v}m-@)bldd_7_w zzbw|-uZQ2`xcDp~HkQ}m=XC%~XKiGUSpx`V4TGO>v=Z-u4d9zyhQkl|*8qx-dbVij z;iFxEMR8y7WAP>IINkO8b*#z%X98;8%*)i?3GwlOZrsP^z9;7Ya619nFF@J>P>*9J z1ny#lA@yi3R74nHfc6L-=>)X`dnthzuiGcl#;qgSM!~JRJ*Tclp(wA6L)MiM)3UI? zA#lc2l{6CoB`2ol1*o28c*0K0--j&JFMsW%?d0SnF7DGG+N1j;7khd4PEW3=qGvMi z|BZhBSEyR-=j3!z*MTsa3*{8gKQJ}#jc+90p_EAWK)=yMesp}fnh^d&sGy8YV+ z|EkEZn6@x^s*p45`pZj++TJ`S+|Ewno_rxmV>5Vef$4^&kwmzAnY@9xLIDm@US6J- zeBvu}6(^_i)$$R2s1PILVpscOZIB_Qed25Q*I6XpJDKhsw zB2Q(lrYbDKOkj3nS^8WV6i&J>A~6wYNu>~N);X^Y0|Et9HdO|V-d9j$0a#$jMChg! zr%nW*)sifC0ceS^`K|%&zQx7GWXB%9QdYf>GdJR%om4|F&ga^K*X(N5OiVHbn@FcF zA#;B7i*qc%wyj@I3!?u}e)nZI5@1LOJrp@_R~2!qWO?VX?xrR$!j1JsqY+Gt)P>k2 z^N7<79Pu<#?PNJUROYX!H<+DvTC^+LQ!B*t{A7v88nus;l=*4w;^Ly95VHCze~2Ya z#rNqxAoOS49nOT)4gk2t_P@SAVNpX)mYT?FX`yi9et^=EVivNccC$uSWk2?Xky=jk zAwUd{!GHAeah*cPhAjdMAB;1>95o!(A6~V4{MYv=Sv%_?{91rD@8;$P%9d(~GCcTl zo0Pq7TkNE)yj(2}ma!kB=gtgJtCFkaCW;Njf$v#ZSmaBFH7h4;A@#;~vm?VyWRCBC z9*oW;n1tygq7zsuOqeNg>K)-5k@cY3$y<_~IIFl8eqBvn{Xhhup=y2O4_45|v%yr& zjzG^Uhoisp)kF6STzcc=OYy9NnPra&?h~v^wKgk$AGw>0lsU*m&EY! zmmbG%g9Cb<%_jiFyBBvEYCFV{pDy* z?wu*jHlvYN@Z}7k#pNn-eBcuT*prQb?4kKUmL?m;7*%*812K{yJw2Vl6q!;Su~XIt~FihvLnh5&!JUo6=@-u+Ec`w@HlI%y_gyyPJr>W~gV@ z;L#y1)%E<=`g66qxz4k^^rft1J5QC$f}ET_f?_e2dJ-LmvU)(K4B!F|%QOK5U7YqE z*?R$^#@5zWm9xk*X1Cq*(=9~>h3Eia5z%s4L<)X!Nr{b(y#OQ*RgR;+;-1G0AVd+f z?m8gP!;Jm);>k+o(3^lUmE@@GX@}~=jqyr{g-%<~HEosFC~pivpu79PgVPK=j{SI0aYGgS)e#^OHorZj(#5K?)poELnaBn(W!0nyC(vVS^>eLHWH;|Q8~`w?C;p{#iOIBq@+a4c!*JE!kSAW zriy7Q|Gpq@vA`WHb724FQs@4$bn1& z3X|j*8e3^;>2WUERCSVEez(#WC|H>KvQ?m91&(Ju%&4q^JRy0V`G}QCjlKd17QTRZ zQH@;WD|Yn><#dS(ou~=P)i#tn;D!<9HD2Ko8mw2mfM6b8js~2OxIa-vjkvvi$@)f1 z%NuJcp3994-f(($`S@2WfX>0je5Rv9(m+I=hyXySwYUW*I$>J+c1Wv#e`_q=-x;y> zYwG<(IDu)e=npW{FdsnL^?&640oc$GzCeG9(<5PV3VFi8u?4VtBS}h(d$et;TNfO1 zd&1)Lz5r?`JThUNJUn1ASb>gV@}MnrKb>|=43OL0ngAq{UMl~DlLeb~#Xh-%OAG)F zhnxw?wGy_sgBq5AtVfRVitimtuMJE61VJ~47Ri`cP%c*$KnHi9@ui%n4tV3Psgzd% z$TyJ4n-DTHGo!fT=HI&m0gk~-c13`TOZ_x#572sANX5l7oL<4$Cyk`_im?21K&C|| z_1$4OSzAR8I$X^iM+-Mgc4K5`R|rwd1^EZH{kPh_JF2N{;Wrjkbd<4CMMeP`x{45r zlo3%O6qOPnKoACyme4~FmXQHel-{vWq$Lm_5CRE^h!6-$uL0>DDWL_(w{z#-ch~yv zyYJoat@Zxlk8>6$=j^`suYHfd4(uxkB4roImdDLGGjZ*>*y$JNRPN_fzN4~o;nBn) zg5s&PG8;;|L~@bC;h_`Pcw0=vXHBQf38+Qix*pX4CtcFYbFt#`v|mm2`jqnGo{=~G z9{DN$ViE5*Gn9}Ow+)(Uxn5F?g!m22EAE2){L!&7GEtWx?nkaMZQGZp1LJk~Rkc+}r-C$?fRg z*go^^8P!GSvE9aDfmma{A}?R_Ps^EN=zVL78BPej!<_5E%Z5h8=X>4wznB2XUm^=Qm;cq@ z`QIVi`gLFSDc?m*GTz3dJ!-Z@SO!K8y?g%dZGNYDMgyf1yNq)ExwGHrIZgUQSU ze<6H>q5_iXkArBVzK7(0Koob*I~B4^Q9!~AF< zg@q*b%p9v-i66GEE~%y`tJ_CPk3t_%E9^=UFWS~fuyi?Y+vdwVjH?v`4`6*0IFz1& z5i$MdwL+^3FZVx;B%=UzEYQr0_1o0iQYH>honuXz;D_O*zTN>Y#P@EcZ3+2U$l~_) z0OBj$w92!?4DW7bLq7s*$d7lx%_y`ZGNO)bN)4vYbu`{44e>T6;SP>y4HZZyEZxae zmu_YFnawoEcO{pu+<&RIU#*0FKrt15W{ZHmJW;8=V63s7V^b9cH~RS{HO7Yn~Le2uP?NGn>g1l9Wr-d==>>xp=`hBkNtt9;^}RgY|1 zI&Kig$7c$4jxEenSW1cKT^4PElSswIm4kEe?yTif$MV%WK;Q6CO(#MI@78!)lYE=k zEO36(QTLPzy7JwW9A1=Im}0p8DR7n8%9i=~n75$olznMKH=@RjrW+E0Ug_ErK0TS{ zFIrWF4R$b+ll9%T?e1Mn+)t*kMHe#V&`B9AByU+Wd%(`Va9=_85EJ7^%kP$zT{B8f zor;{X-L+;EkBD~0;V{q_sVc5Fr+@XxZbbK)FB;IiEG=c6qe?d2iC>S#(Ph@-uAIO) z>!iEi|F<%PWoP>dd-snl0rj}by`jU6f}>s7KI z5I%m^eW|;1WVNpp2*POhNdeaPzEz1e9v2@l3xhN0E-D&S1nM;n%HG!)=b` zx>v88>c||J7l9@AVDmSOdV1tN@0mL|r!q~_c4k(#MdhdQtB2fl>GnofFG;GK#wVIV z&sAi}h_js%H-nG9+@C7l;rpg=Q0Spbi2@5r@XmQ}L+~CE-MZVkKAyeR-XF? zAe7Em`}gp~&=T+;&MBywW86PjRs?55T^D^^m)l3RyyhP@&st8TUG@%4P`IUNQe$au zVr8?CSR1PlqW58h-07(DW_rv>`Xbw~QMtiyv(-R5G`psQTL8v@Dj*E6CYENFpW$J!P!T=VwYM4+%+T$_sob_|?X` zl0>j!dK)C4mYD$d$(VA@?)Z49f||~#(;H%$hr>5Bd{=ym&%8&J2mdGK#;knB>@It+ zdf&w_ENbhc2Qb0dvb@TcRHX}6de6&7N}$EVf|`!oX#y*s_ERE9;|W^?c8B+b!9$+1 z5c_`FeJY`mN?ZqsI>ajyPHu&mg?b+JaFg>F3&EAHN8 zp`zmKnQ%lz=23tQkNZE)0=gS>#XlZ~Jzp_J$ykx|>=FsbMKtX{KSgCxw4Ay#&-`jl zs&?CKncp^r(atG2Wyj9eyS=FggvjxTro50$sR!PMv7Kb+!@H% zHb~CMqL&8btB*z!i;`l4H?>{_wN|BMRjWCY-1bqAhDPE0O)48QmF99w-$_N8@!>G^ zR@l))VfnR5q>45IoL5fTZ%GfB7PkfbGP9Y1qb$rt>ZP_j%IP1A!%0~pCsWkT95*qh z2n94U2_=aj1$!q$eQykKQTeU=G>LHnJQ}aOtc=FA^zvJ+ZDwH6rCo5d?et__r3q6? z%53c>>6p_&J^1x|WnGGD#-yl7OoL&v+R(k4(1~pWNnG_Ab?kYe2lW>h*vd}b{ypuZ zvhhvzG`$0Pq*MBKqlOE66%`>N2Ymm#`q5Y0a^QIBheEZ`_|B?hWs52MrScuNhU_re zYEW`TPfpoz{PoR)ky3W0QyQ;a(4mQ)3A|^6tOGRDo$+!ure)nT)fW`+5-FAK8e8kh zp7A=;-K@tYCJdXCCk{l_7)&o_+4>Rh_|aS3v4<5+c=0jNA1#^m*Ky~I52<!Ys~it@ zcyi>O&m-6BE=sstie%25)G!~#xLc7)aw>Y=%h>Ew<=Ra*X4!Gaj3(Raomq=2L_TbE@1s}zlw}HE(TXW?^MdoTaljS0%gf1+r zTp6`DG5%3e4S4!|Tuw|OMHWuT%IjQrJf6Zz=z7~Kf?-*cUa#X@n`Y?7>l8q_ zbSmjM8@9`2c9&gQme}CBbQf;%H!bH!ft?lp?NLjF^+L2r;$mc}BqD)UgCbX4klYj^ zj$g^6^GA8zVHCJMj4YEw`qnfkYDhOYANxHAZDC<8a^%p+2XpxLkqGF%Tp0`Oeeshp zt8QT!Ax@*J412Ly*$Q`ezoEC;;*W-xUQHMWd}K*(7I{9V;C}nL#T7whe)eayh{6R~ z6}p|snAFfHlk2`+f57N^nFUqPc!<}T-Du$^Ii@f~i!w8ny1hQ1)v?M{x{I_%8#D=x zMMg!mom-U4U*>9`AL=W{iSQryrSQYIm!F8kth>scLjnu}t%3yF;MR-f4tu}IUi)nF z(0DGNHfP^Bp8cs$cD?Z0Mt-CqcXhlCqJPaNph>vTzS-%Ozl+LgfQ^nlz_AL|F-wWDY^e0{psneGo+dxGa@p1=)%i*xsdS~Nb_ zH6#M%Lo>1Yu&LQ)NDwB@UAA2HYDe0Y&WL@GT3xl{U}gH3lZnaln6aUsF1EPGjy7y| zXvg=_2mYeFg6C`-<+)Bq6!N+j%5Dz*YRk>rmU0nKTU!06o-mdeV?Ho7$8#-D-lk{6 zNR0P(KF!R`=H3GS!!aexxZ0<9A9E zxq|ED@7VA8nP}n(qs&+88nTAvTecDY!TtTA#Rr$l?OTUdd!8f2Ejt)cB-Xe2_hxG>=ZuO;rKPIrG%bar33Wa`b|?Gm)qK?q}`dAD34~UZuLm)#Q{B z=)N(U2iHs~mX>E@WBOVPgmfBu?P5Pqw9?gaaXsnB=S~g$DRjOP8Zs35u3Zn=?8#*U z`@1Z#zwZRhqXmW4*47d}c8jA4{No0`dfZ@7%j9A0^azP(p=caUPfGCPd15%J^+R|us0THY5q8MiHFH|;$UV&iMOIVBZ)WF^&2)ALxkf%K{&`Ta{4;W~r^OQ+(gG zKg=a-VH|-(H@pjO<0PEnxIvPhlfm_(c=lvP-ZxoT;8t2E7+M5l-_(Zlg=5`cro5hV zhU0>_%|}G4!9H2cu;bUSyg@Ov2i(`vtg^q^VUAaHSHfH$i^eN$3O;_)t33AJFW$6U zx37RuMre9%hhBme2iNdSKGs_G*G%Xg_cxu9J60q;vRbaW>0H1yaQ4H{&ev*0CMI*(#q{ z=w0uMFWUSji^1h9&wR6s9xndE8itoN3Eyd@pPyPo`Sm>PP$jcNO!2&EsSTZUHzoI) zJ^pao%y0>*T1okB-PEzNiU)N^9A5`^N7!V^iSWmWS{b?;ziBm2ta90T)V@;nMY`-W zPju11EN(q-@<^VWVPY$S_ndtG+p5ArCyYuVZ#d0NLLLhQbJkVmm(I`&7eo#vRGzfA zd-9#SJN1?R=z(WWjzvMmGvcL>#y(lf6PJ%H%uj}wsalonjU?oSVH%b21~1$G{PqQC z69;}CyxQ|;mMvy)a+}z7(aBeJuwx(6z|>m%au@l2`r9iN1EIw}E30*<21^xDm~0|` zAZY3!S&O~yz0o>7@BH-6Tt%;4kwKl@*LCx#wz*Q(wJqmrMahQmc-&e>%WwU6N$YhY zyANk3zan%rOo?~|ne?uk@8O4n{PlsDh3-3e^rsE5#i-?*Y*C#}MFq7jFWJXk=rzGF6CRDoV zKm1ZiUtmA0(!+~ON;Ai!N{f2Xu%-+v>&m_FTLrsPQlW!sB+@eTB(!DpPOFcpo8_`? ziMUEoApK8yT*p79aamRQ;&QTT-UBPyQN%AIFFn1{ZN4fU5>8!bG_GR}-XQPy$5nTJ zU7|{4p={QF3_0lP;K@p7XG?I&-$)B2gx~sLD_ou6p_g^lS-*XgTbBlB zXkyEMFaF-ar&rXGq3RIq@I6weVpDcGuP?xP#oyGO@27))q)dN*=OiQcnD3_^gi0)pj8*NFuPRW^W z${g}~qsxS)hc>qOSVunw?V*3+9>pc9Ci7rRSc7B5>`E9fB8FyWb3YIln61<^CB5eiC*%Svzer~`%oyWnKT*UIhK8lQIY9aSo48JU_%~89{@*u zz<<-w<@Je=^UkyTg=KzyVPKpZs=>Z3-8>tYd@Z1&$jXN%D>186IJW29TV8WtJX@7M z>-x-m^fR5bIHk)mlk>H-Mu0a;{O2Qof&cXX73~HYtUwO~kd<>(1M=zq zdEKqFPf$=0cyzO7ax?$B`;-2=f`jG}C8rz*$VR}+J(>u`=jAm`tqQC9{7LysktJGc zE(79!UatwA?OHU43v=22^Bs$8pgg zB4eaMZ6%%E$6wor29iq9F8oMV0H{Q6HN%zAj6!hNRs+~>JCSlT0%s*Ox0*oE$`mtD z{k##7)%p3YKRYhOIq24Z1`0)H127uKCnpU7%LGsofD$i?S2jh(dyY*cAik;!gnhpl;MGHR*jMZ6^RwhP6mWSIy)ANNMB`J{yjZ?J{Kl<*?GU z&QOp81Mv8dYJds@lIaS>7zy4}ZJBIJ~F)==F0*BLvjvhM(9E8z&2y$DFLFx$+4J%Vq zMb~Z_&Zu?(bO2?V?jsCqeSw4GaqO@M2d(m^86Ne=LEuP4ON@`y$qj%UF9A2$oj%a0 zL6pWUhRRAO+Un?ldX&CzY#hX#jnB-Kf8+;35~Z%Clw}Q~gU5t`x)3~-)npX$^l=qJZM>>FjhHti1QQM8A4G z24=|!02BzzdQ?UD034Vg1Jl~|nIelRNq zU*-{bZ{HygB{i?{FTjLcc(3EThYW%^4?<+J3u#A3%ulhrp2dd2wo@0`Y@J^MEPCZu-r9};M}*B2K+Pr_{e z_#P!sF)tV<`T&lY1*zq}HNmMZ11QD;8p;Kk>`3Qvf^0s%a`YO+c)F_~u`_fabpQ-% za=FM0Tz`9Vy(r{wlHGr+P5l=#^#5+CQl^D$&*XAS7BKw4G!2HF*K1%B0z@fw(Cz*j zke%c?HIke)3_f07Rh1)D4=y+mw*Dn|0t^8Y2UdZ#@Oa0mT8lz32$zC)72gFPvJ!Fv z*zt zm|)whR^U-W|6^tTKjKlp8t)66IXk4hHuz-}7qBuEg4a~_yE{9dSTh?nBAKG(^Xj_G zn66AIyXf6*C!~Us?_H1B!`Uxu9Q)mks%j)kIa*YOH~W8r%rd8~ul3wuicmaKiFXxByCXKC*xOun_n=UnQ*X2YS8C_H6r zG`13JKOxP9ieIi>bw!_VPxJG})wGwhQ^e8Li_X>lN4P@3vMYlCGqrDAy?vu=B!*LeZ>F zIN=zAbKsl)jDcPc9NV;Qo2-0mk5As6re)k2O405J6saIZY_-L)=aPj|I?!s-K|7x{ zHycHwU%+ZS>!*ZkgoXosf(O42Iz8d(pYHp)I*}OZiV9knbwA5UbV4evrQC*I4;oO+ z%OgqjpoMDrK<{&~S@NV#zS0y4n!myY$pJG?*lE}6)s4HGqobc&0%&IJ2aE=^x?k!| z+q2KpmAF3(g)Uc0*fR*Xy{x{mkXX0CMR|tc)upWkwF*szHfDEC2ilGPhj2@z85U025j^%46xCCEuWejrv}&EQn}upR#31Ghb`7D6D5W+?nG1#HL8b><%WzQ;8p9@ zdx_AOAu6asALHQ3dWJ#*7|So!QC%AJJRxo&)2I&kXzlgJGakQKJa-bdS&$)m;bRH8!17t^AIUtWOIgFU{V4M*CL z-rlE;hV!W5LVmnX^}Z_a_X@l30C_US_0;Lp9Ku2Yp3*OL@%WZE2rUJHyC#4Cu&0dO z)8Xd|(Cdqu#n`3KyR7IgyTes-20y(>5@&Y8ZH5&tE3iiGsUm(>X#t+tHUv6ANG++0 zu6;{QPB^`CNe}6jr6RQ~!ZVh(??P~fFA5048LnYrH5I!JJ zNotq34F>)$AeC1|#p1>i^aiLMRB!Qbj*^hehY!{552H_mB^lX!djp*zkq1U-HgB-2 zd7bCRW;1p}WYIQZ{qz`dJDS4!!yr*(t2d^RFNE#6rS?5CQx>VDSvs_ox0|iMGZGKI zGaXVf%@CR@v|5RRO(CUA!UX57uEyIVxA0EU zf=Rva*d-=Ur&?LGXDUmS?|qqldzbPs1J~2UMlKlf?oP-Wz00j6U?y7=4~ggzus-OX zuITHph6!JVsvmZKHNCm0(lO*f&bl>yB1{TT>69#+ouKnrj4td>3_5sNxjJF`351bs|pcdekxq^EKU z-a`ii2Hwk4r3*YXgO=7x>~m#9>kA-hZ$TinShg*g?6tW-$&FX8)Rnu+hxkp{Lv7Zd zNvJNjgNARRE-!J$av)QjtE(Bxm|hqf{$T0%`}vSnfiZWSxgoELeDz;xI z!--qm#bdLFK_){xNW59^Q$qt5aL8L{B0kuuZ{_f6xX)(gNl_0UkFo;f;Kr@duAY|~ zW@#@CTLQ)d8j2^MTt;1e^+c-{kPP$ zW8icg3MQ?vd)FwY*I0;+-P{d0DkVUYZ{i?ePz!@*^!#Gr+75h!&RGsBEB?n@uMyQ+ z2;C9vbM`>k_*uxC)8hw>HI%>>vj#kZ?g-X@%Kksz?1%U~4z_J>%mE9G9yVCNvf1Q4 i6=m>Khd4XT1CU0Ic9aWuMg+J5(bX}$iMwI@_`d+24wDxE From 32cfe585eeb83fa533f61dea5e452875ef7b1a32 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Wed, 17 Dec 2025 15:26:54 +0000 Subject: [PATCH 14/30] Fix Unknown message type error for tool notifications MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Change _meta: null to _meta: undefined in OpenAI transport - Register default no-op handlers for all tool notifications in App constructor The SDK's Protocol class throws 'Unknown message type' for unhandled notifications. Now all tool-related notifications have default handlers. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- src/app.ts | 8 ++++++-- src/openai/transport.ts | 4 ++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/app.ts b/src/app.ts index f02266b1..5ce106ef 100644 --- a/src/app.ts +++ b/src/app.ts @@ -250,9 +250,13 @@ export class App extends Protocol { return {}; }); - // Set up default handler to update _hostContext when notifications arrive. - // Users can override this by setting onhostcontextchanged. + // Set up default handlers for notifications. + // Users can override these by setting the corresponding on* properties. this.onhostcontextchanged = () => {}; + this.ontoolinput = () => {}; + this.ontoolinputpartial = () => {}; + this.ontoolresult = () => {}; + this.ontoolcancelled = () => {}; } /** diff --git a/src/openai/transport.ts b/src/openai/transport.ts index 8c5cfb84..c05c6326 100644 --- a/src/openai/transport.ts +++ b/src/openai/transport.ts @@ -510,8 +510,8 @@ export class OpenAITransport implements Transport { text: JSON.stringify(this.openai.toolOutput), }, ], - // Include _meta from toolResponseMetadata if available - _meta: this.openai.toolResponseMetadata, + // Include _meta from toolResponseMetadata if available (use undefined not null) + _meta: this.openai.toolResponseMetadata ?? undefined, }, } as JSONRPCNotification); }); From c6d5a6dbd6ee3e76d5cd4c56dd2c6777f7ce0800 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Wed, 17 Dec 2025 15:30:22 +0000 Subject: [PATCH 15/30] Add tests for notification handler fixes MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Test that null _meta is converted to undefined in OpenAI transport - Test that default no-op handlers accept tool notifications without error 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- src/app-bridge.test.ts | 15 +++++++++++++++ src/openai/transport.test.ts | 23 +++++++++++++++++++++++ 2 files changed, 38 insertions(+) diff --git a/src/app-bridge.test.ts b/src/app-bridge.test.ts index 66d5f830..ad7785c9 100644 --- a/src/app-bridge.test.ts +++ b/src/app-bridge.test.ts @@ -215,6 +215,21 @@ describe("App <-> AppBridge integration", () => { expect(receivedCancellations[0]).toEqual({}); }); + it("tool notifications work with default no-op handlers", async () => { + // Don't set any custom handlers - use defaults + await app.connect(appTransport); + + // These should not throw (default handlers silently accept them) + // Just verify they complete without error + await bridge.sendToolInput({ arguments: {} }); + await bridge.sendToolInputPartial({ arguments: {} }); + await bridge.sendToolResult({ content: [{ type: "text", text: "ok" }] }); + await bridge.sendToolCancelled({}); + + // If we got here without throwing, the test passes + expect(true).toBe(true); + }); + it("setHostContext triggers app.onhostcontextchanged", async () => { const receivedContexts: unknown[] = []; app.onhostcontextchanged = (params) => { diff --git a/src/openai/transport.test.ts b/src/openai/transport.test.ts index 800073ca..37ecc6dd 100644 --- a/src/openai/transport.test.ts +++ b/src/openai/transport.test.ts @@ -429,6 +429,29 @@ describe("OpenAITransport", () => { }); }); + test("converts null _meta to undefined in tool result", async () => { + // Simulate null being set (e.g., from JSON parsing where null is valid) + (mockOpenAI as unknown as { toolResponseMetadata: null }).toolResponseMetadata = null; + + const transport = new OpenAITransport(); + const messages: unknown[] = []; + transport.onmessage = (msg) => { + messages.push(msg); + }; + + transport.deliverInitialState(); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + const toolResultNotification = messages.find( + (m: unknown) => + (m as { method?: string }).method === "ui/notifications/tool-result", + ) as { params?: { _meta?: unknown } } | undefined; + expect(toolResultNotification).toBeDefined(); + // _meta should be undefined, not null (SDK rejects null) + expect(toolResultNotification?.params?._meta).toBeUndefined(); + }); + test("does not deliver notifications when data is missing", async () => { delete mockOpenAI.toolInput; delete mockOpenAI.toolOutput; From ca12b5b3a367f84ea93fd3d87347dc5852467824 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Wed, 17 Dec 2025 15:50:03 +0000 Subject: [PATCH 16/30] Fix null toolOutput being sent as text 'null' MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Check for both null and undefined before delivering tool-result notification. Previously null passed through and was stringified. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- src/openai/transport.test.ts | 26 +++++++++++++++++++++++++- src/openai/transport.ts | 4 ++-- 2 files changed, 27 insertions(+), 3 deletions(-) diff --git a/src/openai/transport.test.ts b/src/openai/transport.test.ts index 37ecc6dd..962a0c6c 100644 --- a/src/openai/transport.test.ts +++ b/src/openai/transport.test.ts @@ -431,7 +431,9 @@ describe("OpenAITransport", () => { test("converts null _meta to undefined in tool result", async () => { // Simulate null being set (e.g., from JSON parsing where null is valid) - (mockOpenAI as unknown as { toolResponseMetadata: null }).toolResponseMetadata = null; + ( + mockOpenAI as unknown as { toolResponseMetadata: null } + ).toolResponseMetadata = null; const transport = new OpenAITransport(); const messages: unknown[] = []; @@ -452,6 +454,28 @@ describe("OpenAITransport", () => { expect(toolResultNotification?.params?._meta).toBeUndefined(); }); + test("does not deliver tool-result when toolOutput is null", async () => { + // Simulate null being set (e.g., from JSON parsing) + (mockOpenAI as unknown as { toolOutput: null }).toolOutput = null; + + const transport = new OpenAITransport(); + const messages: unknown[] = []; + transport.onmessage = (msg) => { + messages.push(msg); + }; + + transport.deliverInitialState(); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + const toolResultNotification = messages.find( + (m: unknown) => + (m as { method?: string }).method === "ui/notifications/tool-result", + ); + // Should NOT deliver tool-result when toolOutput is null + expect(toolResultNotification).toBeUndefined(); + }); + test("does not deliver notifications when data is missing", async () => { delete mockOpenAI.toolInput; delete mockOpenAI.toolOutput; diff --git a/src/openai/transport.ts b/src/openai/transport.ts index c05c6326..55ca6272 100644 --- a/src/openai/transport.ts +++ b/src/openai/transport.ts @@ -495,8 +495,8 @@ export class OpenAITransport implements Transport { }); } - // Deliver tool output if available - if (this.openai.toolOutput !== undefined) { + // Deliver tool output if available (check for both null and undefined) + if (this.openai.toolOutput != null) { queueMicrotask(() => { this.onmessage?.({ jsonrpc: "2.0", From b320fdae932b9e7d1ae249d1ffdda316a00efbe1 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Wed, 17 Dec 2025 15:55:49 +0000 Subject: [PATCH 17/30] Fix double-stringification of toolOutput in OpenAI transport MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Handle different shapes of toolOutput from ChatGPT: - Array of content blocks: use directly - Single content block {type, text}: wrap in array - Object with just {text}: extract and wrap - Other: stringify as fallback This prevents double-stringification when ChatGPT passes content in different formats. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- src/openai/transport.ts | 37 +++++++++++++++++++++++++++++-------- 1 file changed, 29 insertions(+), 8 deletions(-) diff --git a/src/openai/transport.ts b/src/openai/transport.ts index 55ca6272..7ca34678 100644 --- a/src/openai/transport.ts +++ b/src/openai/transport.ts @@ -498,18 +498,39 @@ export class OpenAITransport implements Transport { // Deliver tool output if available (check for both null and undefined) if (this.openai.toolOutput != null) { queueMicrotask(() => { + // Normalize toolOutput to MCP content array format + let content: Array<{ type: string; text?: string; [key: string]: unknown }>; + const output = this.openai.toolOutput; + + if (Array.isArray(output)) { + // Already an array of content blocks + content = output; + } else if ( + typeof output === "object" && + output !== null && + "type" in output && + typeof (output as { type: unknown }).type === "string" + ) { + // Single content block object like {type: "text", text: "..."} + content = [output as { type: string; text?: string }]; + } else if ( + typeof output === "object" && + output !== null && + "text" in output && + typeof (output as { text: unknown }).text === "string" + ) { + // Object with just text field - treat as text content + content = [{ type: "text", text: (output as { text: string }).text }]; + } else { + // Unknown shape - stringify it + content = [{ type: "text", text: JSON.stringify(output) }]; + } + this.onmessage?.({ jsonrpc: "2.0", method: "ui/notifications/tool-result", params: { - content: Array.isArray(this.openai.toolOutput) - ? this.openai.toolOutput - : [ - { - type: "text", - text: JSON.stringify(this.openai.toolOutput), - }, - ], + content, // Include _meta from toolResponseMetadata if available (use undefined not null) _meta: this.openai.toolResponseMetadata ?? undefined, }, From d0a909638f862e7482d9687dbeff98e1a620df50 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Wed, 17 Dec 2025 15:56:55 +0000 Subject: [PATCH 18/30] Add structuredContent support to OpenAI transport MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When toolOutput contains structuredContent, include it in the tool-result notification. Also auto-extract structuredContent from plain objects that aren't content arrays. This allows apps to access structured data directly without parsing JSON from text content. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- src/openai/transport.ts | 38 +++++++++++++++++++++++++++++++++++--- 1 file changed, 35 insertions(+), 3 deletions(-) diff --git a/src/openai/transport.ts b/src/openai/transport.ts index 7ca34678..bc87b7fe 100644 --- a/src/openai/transport.ts +++ b/src/openai/transport.ts @@ -498,11 +498,38 @@ export class OpenAITransport implements Transport { // Deliver tool output if available (check for both null and undefined) if (this.openai.toolOutput != null) { queueMicrotask(() => { - // Normalize toolOutput to MCP content array format - let content: Array<{ type: string; text?: string; [key: string]: unknown }>; + // Normalize toolOutput to MCP CallToolResult format + let content: Array<{ + type: string; + text?: string; + [key: string]: unknown; + }>; + let structuredContent: Record | undefined; const output = this.openai.toolOutput; - if (Array.isArray(output)) { + // Check if output is already a CallToolResult-like object with content/structuredContent + if ( + typeof output === "object" && + output !== null && + ("content" in output || "structuredContent" in output) + ) { + const result = output as { + content?: unknown; + structuredContent?: Record; + }; + // Prefer structuredContent if available + if (result.structuredContent !== undefined) { + structuredContent = result.structuredContent; + // Generate content from structuredContent if not provided + content = Array.isArray(result.content) + ? result.content + : [{ type: "text", text: JSON.stringify(result.structuredContent) }]; + } else if (Array.isArray(result.content)) { + content = result.content; + } else { + content = [{ type: "text", text: JSON.stringify(output) }]; + } + } else if (Array.isArray(output)) { // Already an array of content blocks content = output; } else if ( @@ -521,6 +548,10 @@ export class OpenAITransport implements Transport { ) { // Object with just text field - treat as text content content = [{ type: "text", text: (output as { text: string }).text }]; + } else if (typeof output === "object" && output !== null) { + // Plain object - use as structuredContent and generate text content + structuredContent = output as Record; + content = [{ type: "text", text: JSON.stringify(output) }]; } else { // Unknown shape - stringify it content = [{ type: "text", text: JSON.stringify(output) }]; @@ -531,6 +562,7 @@ export class OpenAITransport implements Transport { method: "ui/notifications/tool-result", params: { content, + structuredContent, // Include _meta from toolResponseMetadata if available (use undefined not null) _meta: this.openai.toolResponseMetadata ?? undefined, }, From e3acd7057716182640bb71180c746cbf5a8f6738 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Wed, 7 Jan 2026 16:59:41 +0000 Subject: [PATCH 19/30] style: format OpenAI transport --- src/openai/transport.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/openai/transport.ts b/src/openai/transport.ts index bc87b7fe..f4c9c6aa 100644 --- a/src/openai/transport.ts +++ b/src/openai/transport.ts @@ -523,7 +523,12 @@ export class OpenAITransport implements Transport { // Generate content from structuredContent if not provided content = Array.isArray(result.content) ? result.content - : [{ type: "text", text: JSON.stringify(result.structuredContent) }]; + : [ + { + type: "text", + text: JSON.stringify(result.structuredContent), + }, + ]; } else if (Array.isArray(result.content)) { content = result.content; } else { From a1714473f8e4f1faa8abad8806e5b40169932432 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Thu, 8 Jan 2026 16:08:44 +0000 Subject: [PATCH 20/30] fix: include autoResize option in useApp hook The React useApp hook was overriding the entire options object when passing experimentalOAICompatibility, causing autoResize to be undefined instead of true. This prevented automatic size notifications from being set up. --- src/react/useApp.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/src/react/useApp.tsx b/src/react/useApp.tsx index 111f8591..12bcf86d 100644 --- a/src/react/useApp.tsx +++ b/src/react/useApp.tsx @@ -137,6 +137,7 @@ export function useApp({ try { const app = new App(appInfo, capabilities, { experimentalOAICompatibility, + autoResize: true, }); // Register handlers BEFORE connecting From 3862794c06c66e3822ee6c582912da65b04cfc56 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Tue, 13 Jan 2026 04:00:30 +0000 Subject: [PATCH 21/30] docs: add OpenAI to MCP Apps SDK migration guide --- docs/openai-migration.md | 198 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 198 insertions(+) create mode 100644 docs/openai-migration.md diff --git a/docs/openai-migration.md b/docs/openai-migration.md new file mode 100644 index 00000000..691b68a0 --- /dev/null +++ b/docs/openai-migration.md @@ -0,0 +1,198 @@ +# Migrating from OpenAI Apps SDK to MCP Apps SDK + +This guide helps you migrate from the OpenAI Apps SDK (`window.openai.*`) to the MCP Apps SDK (`@modelcontextprotocol/ext-apps`). + +## Quick Start Comparison + +| OpenAI Apps SDK | MCP Apps SDK | +|-----------------|--------------| +| Implicit global (`window.openai`) | Explicit instance (`new App(...)`) | +| Properties pre-populated on load | Async connection + notifications | +| Sync property access | Getters + event handlers | + +## Setup & Connection + +| OpenAI | MCP Apps | Notes | +|--------|----------|-------| +| `window.openai` (auto-available) | `const app = new App({name, version}, {})` | MCP requires explicit instantiation | +| (implicit) | `await app.connect()` | MCP requires async connection; auto-detects OpenAI env | +| — | `await app.connect(new OpenAITransport())` | Force OpenAI mode explicitly | +| — | `await app.connect(new PostMessageTransport(...))` | Force MCP mode explicitly | + +## Host Context Properties + +| OpenAI | MCP Apps | Notes | +|--------|----------|-------| +| `window.openai.theme` | `app.getHostContext()?.theme` | `"light"` \| `"dark"` | +| `window.openai.locale` | `app.getHostContext()?.locale` | BCP 47 language tag (e.g., `"en-US"`) | +| `window.openai.displayMode` | `app.getHostContext()?.displayMode` | `"inline"` \| `"pip"` \| `"fullscreen"` | +| `window.openai.maxHeight` | `app.getHostContext()?.viewport?.maxHeight` | Max container height in px | +| `window.openai.safeArea` | `app.getHostContext()?.safeAreaInsets` | `{ top, right, bottom, left }` | +| `window.openai.userAgent` | `app.getHostContext()?.userAgent` | Host user agent string | +| — | `app.getHostContext()?.availableDisplayModes` | MCP adds: which modes host supports | +| — | `app.getHostContext()?.toolInfo` | MCP adds: tool metadata during call | + +## Tool Data (Input/Output) + +| OpenAI | MCP Apps | Notes | +|--------|----------|-------| +| `window.openai.toolInput` | `app.ontoolinput = (params) => { params.arguments }` | Tool arguments; MCP uses callback | +| `window.openai.toolOutput` | `app.ontoolresult = (params) => { params.content }` | Tool result; MCP uses callback | +| `window.openai.toolResponseMetadata` | `app.ontoolresult` → `params._meta` | Widget-only metadata from server | +| — | `app.ontoolinputpartial = (params) => {...}` | MCP adds: streaming partial args | +| — | `app.ontoolcancelled = (params) => {...}` | MCP adds: cancellation notification | + +## Calling Tools + +| OpenAI | MCP Apps | Notes | +|--------|----------|-------| +| `await window.openai.callTool(name, args)` | `await app.callServerTool({ name, arguments: args })` | Call another MCP server tool | +| Returns `{ structuredContent?, content?, isError? }` | Returns `{ content, structuredContent?, isError? }` | Same shape, slightly different ordering | + +## Sending Messages + +| OpenAI | MCP Apps | Notes | +|--------|----------|-------| +| `await window.openai.sendFollowUpMessage({ prompt })` | `await app.sendMessage({ role: "user", content: [{ type: "text", text: prompt }] })` | MCP uses structured content array | + +## External Links + +| OpenAI | MCP Apps | Notes | +|--------|----------|-------| +| `await window.openai.openExternal({ href })` | `await app.openLink({ url: href })` | Different param name: `href` → `url` | + +## Display Mode + +| OpenAI | MCP Apps | Notes | +|--------|----------|-------| +| `await window.openai.requestDisplayMode({ mode })` | `await app.requestDisplayMode({ mode })` | Same API | +| — | Check `app.getHostContext()?.availableDisplayModes` first | MCP lets you check what's available | + +## Size Reporting + +| OpenAI | MCP Apps | Notes | +|--------|----------|-------| +| `window.openai.notifyIntrinsicHeight(height)` | `app.sendSizeChanged({ width, height })` | MCP includes width | +| Manual only | Auto via `{ autoResize: true }` (default) | MCP auto-reports via ResizeObserver | + +## State Persistence + +| OpenAI | MCP Apps | Notes | +|--------|----------|-------| +| `window.openai.widgetState` | — | Not directly available in MCP | +| `window.openai.setWidgetState(state)` | — | Use framework state (React, localStorage, etc.) | + +## File Operations (Not Yet in MCP Apps) + +| OpenAI | MCP Apps | Notes | +|--------|----------|-------| +| `await window.openai.uploadFile(file)` | — | Not yet implemented | +| `await window.openai.getFileDownloadUrl({ fileId })` | — | Not yet implemented | + +## Other (Not Yet in MCP Apps) + +| OpenAI | MCP Apps | Notes | +|--------|----------|-------| +| `await window.openai.requestModal(options)` | — | Not yet implemented | +| `window.openai.requestClose()` | — | Not yet implemented | +| `window.openai.view` | — | Not yet mapped | + +## Event Handling + +| OpenAI | MCP Apps | Notes | +|--------|----------|-------| +| Read `window.openai.*` on load | `app.ontoolinput = (params) => {...}` | Register before `connect()` | +| Read `window.openai.*` on load | `app.ontoolresult = (params) => {...}` | Register before `connect()` | +| Poll or re-read properties | `app.onhostcontextchanged = (ctx) => {...}` | MCP pushes context changes | +| — | `app.onteardown = async () => {...}` | MCP adds: cleanup before unmount | + +## Logging + +| OpenAI | MCP Apps | Notes | +|--------|----------|-------| +| `console.log(...)` | `app.sendLog({ level: "info", data: "..." })` | MCP provides structured logging | + +## Host Info + +| OpenAI | MCP Apps | Notes | +|--------|----------|-------| +| — | `app.getHostVersion()` | Returns `{ name, version }` of host | +| — | `app.getHostCapabilities()` | Check `serverTools`, `openLinks`, `logging`, etc. | + +## Full Migration Example + +### Before (OpenAI) + +```typescript +// OpenAI Apps SDK +const theme = window.openai.theme; +const toolArgs = window.openai.toolInput; +const toolResult = window.openai.toolOutput; + +// Call a tool +const result = await window.openai.callTool("get_weather", { city: "Tokyo" }); + +// Send a message +await window.openai.sendFollowUpMessage({ prompt: "Weather updated!" }); + +// Report height +window.openai.notifyIntrinsicHeight(400); + +// Open link +await window.openai.openExternal({ href: "https://example.com" }); +``` + +### After (MCP Apps) + +```typescript +import { App } from "@modelcontextprotocol/ext-apps"; + +const app = new App( + { name: "MyApp", version: "1.0.0" }, + {}, + { autoResize: true } // auto height reporting +); + +// Register handlers BEFORE connect +app.ontoolinput = (params) => { + console.log("Tool args:", params.arguments); +}; + +app.ontoolresult = (params) => { + console.log("Tool result:", params.content); +}; + +app.onhostcontextchanged = (ctx) => { + if (ctx.theme) applyTheme(ctx.theme); +}; + +// Connect (auto-detects OpenAI vs MCP) +await app.connect(); + +// Access context +const theme = app.getHostContext()?.theme; + +// Call a tool +const result = await app.callServerTool({ + name: "get_weather", + arguments: { city: "Tokyo" } +}); + +// Send a message +await app.sendMessage({ + role: "user", + content: [{ type: "text", text: "Weather updated!" }] +}); + +// Open link (note: url not href) +await app.openLink({ url: "https://example.com" }); +``` + +## Key Differences Summary + +1. **Initialization**: OpenAI is implicit; MCP requires `new App()` + `await app.connect()` +2. **Data Flow**: OpenAI pre-populates; MCP uses async notifications (register handlers before `connect()`) +3. **Auto-resize**: MCP has built-in ResizeObserver support via `autoResize` option +4. **Structured Content**: MCP uses `{ type: "text", text: "..." }` arrays for messages +5. **Context Changes**: MCP pushes updates via `onhostcontextchanged`; no polling needed +6. **Capabilities**: MCP lets you check what the host supports before calling methods From 6d2bac5a7da392d2b66e579ca5e1e189ac6f5db2 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Tue, 13 Jan 2026 13:47:36 +0000 Subject: [PATCH 22/30] feat(openai): Add widget state management and file operations - Add StructuredWidgetState type with modelContent/privateContent/imageIds - Add onwidgetstate handler for receiving persisted state on load - Add updateModelContext method to persist state and update model context - Add uploadFile and getFileDownloadUrl methods for file handling - Handle image content blocks in sendMessage by uploading and adding to context - Wire all new functionality in OpenAITransport - Add comprehensive tests for all new features - Update migration doc with new feature mappings This enables MCP Apps to: - Persist UI state across widget renders - Provide context visible to ChatGPT for follow-up turns - Keep private UI state hidden from the model - Upload and reference images in model context --- docs/openai-migration.md | 144 ++++++++--------- src/app.ts | 180 +++++++++++++++++++++ src/generated/schema.json | 171 ++++++++++++++++++++ src/generated/schema.test.ts | 60 +++++++ src/generated/schema.ts | 136 ++++++++++++++++ src/openai/transport.test.ts | 299 +++++++++++++++++++++++++++++++++++ src/openai/transport.ts | 171 +++++++++++++++++++- src/openai/types.ts | 44 +++++- src/spec.types.ts | 109 +++++++++++++ src/types.ts | 24 +++ 10 files changed, 1261 insertions(+), 77 deletions(-) diff --git a/docs/openai-migration.md b/docs/openai-migration.md index 691b68a0..056c8619 100644 --- a/docs/openai-migration.md +++ b/docs/openai-migration.md @@ -4,120 +4,120 @@ This guide helps you migrate from the OpenAI Apps SDK (`window.openai.*`) to the ## Quick Start Comparison -| OpenAI Apps SDK | MCP Apps SDK | -|-----------------|--------------| +| OpenAI Apps SDK | MCP Apps SDK | +| --------------------------------- | ---------------------------------- | | Implicit global (`window.openai`) | Explicit instance (`new App(...)`) | -| Properties pre-populated on load | Async connection + notifications | -| Sync property access | Getters + event handlers | +| Properties pre-populated on load | Async connection + notifications | +| Sync property access | Getters + event handlers | ## Setup & Connection -| OpenAI | MCP Apps | Notes | -|--------|----------|-------| -| `window.openai` (auto-available) | `const app = new App({name, version}, {})` | MCP requires explicit instantiation | -| (implicit) | `await app.connect()` | MCP requires async connection; auto-detects OpenAI env | -| — | `await app.connect(new OpenAITransport())` | Force OpenAI mode explicitly | -| — | `await app.connect(new PostMessageTransport(...))` | Force MCP mode explicitly | +| OpenAI | MCP Apps | Notes | +| -------------------------------- | -------------------------------------------------- | ------------------------------------------------------ | +| `window.openai` (auto-available) | `const app = new App({name, version}, {})` | MCP requires explicit instantiation | +| (implicit) | `await app.connect()` | MCP requires async connection; auto-detects OpenAI env | +| — | `await app.connect(new OpenAITransport())` | Force OpenAI mode explicitly | +| — | `await app.connect(new PostMessageTransport(...))` | Force MCP mode explicitly | ## Host Context Properties -| OpenAI | MCP Apps | Notes | -|--------|----------|-------| -| `window.openai.theme` | `app.getHostContext()?.theme` | `"light"` \| `"dark"` | -| `window.openai.locale` | `app.getHostContext()?.locale` | BCP 47 language tag (e.g., `"en-US"`) | -| `window.openai.displayMode` | `app.getHostContext()?.displayMode` | `"inline"` \| `"pip"` \| `"fullscreen"` | -| `window.openai.maxHeight` | `app.getHostContext()?.viewport?.maxHeight` | Max container height in px | -| `window.openai.safeArea` | `app.getHostContext()?.safeAreaInsets` | `{ top, right, bottom, left }` | -| `window.openai.userAgent` | `app.getHostContext()?.userAgent` | Host user agent string | -| — | `app.getHostContext()?.availableDisplayModes` | MCP adds: which modes host supports | -| — | `app.getHostContext()?.toolInfo` | MCP adds: tool metadata during call | +| OpenAI | MCP Apps | Notes | +| --------------------------- | --------------------------------------------- | --------------------------------------- | +| `window.openai.theme` | `app.getHostContext()?.theme` | `"light"` \| `"dark"` | +| `window.openai.locale` | `app.getHostContext()?.locale` | BCP 47 language tag (e.g., `"en-US"`) | +| `window.openai.displayMode` | `app.getHostContext()?.displayMode` | `"inline"` \| `"pip"` \| `"fullscreen"` | +| `window.openai.maxHeight` | `app.getHostContext()?.viewport?.maxHeight` | Max container height in px | +| `window.openai.safeArea` | `app.getHostContext()?.safeAreaInsets` | `{ top, right, bottom, left }` | +| `window.openai.userAgent` | `app.getHostContext()?.userAgent` | Host user agent string | +| — | `app.getHostContext()?.availableDisplayModes` | MCP adds: which modes host supports | +| — | `app.getHostContext()?.toolInfo` | MCP adds: tool metadata during call | ## Tool Data (Input/Output) -| OpenAI | MCP Apps | Notes | -|--------|----------|-------| -| `window.openai.toolInput` | `app.ontoolinput = (params) => { params.arguments }` | Tool arguments; MCP uses callback | -| `window.openai.toolOutput` | `app.ontoolresult = (params) => { params.content }` | Tool result; MCP uses callback | -| `window.openai.toolResponseMetadata` | `app.ontoolresult` → `params._meta` | Widget-only metadata from server | -| — | `app.ontoolinputpartial = (params) => {...}` | MCP adds: streaming partial args | -| — | `app.ontoolcancelled = (params) => {...}` | MCP adds: cancellation notification | +| OpenAI | MCP Apps | Notes | +| ------------------------------------ | ---------------------------------------------------- | ----------------------------------- | +| `window.openai.toolInput` | `app.ontoolinput = (params) => { params.arguments }` | Tool arguments; MCP uses callback | +| `window.openai.toolOutput` | `app.ontoolresult = (params) => { params.content }` | Tool result; MCP uses callback | +| `window.openai.toolResponseMetadata` | `app.ontoolresult` → `params._meta` | Widget-only metadata from server | +| — | `app.ontoolinputpartial = (params) => {...}` | MCP adds: streaming partial args | +| — | `app.ontoolcancelled = (params) => {...}` | MCP adds: cancellation notification | ## Calling Tools -| OpenAI | MCP Apps | Notes | -|--------|----------|-------| -| `await window.openai.callTool(name, args)` | `await app.callServerTool({ name, arguments: args })` | Call another MCP server tool | -| Returns `{ structuredContent?, content?, isError? }` | Returns `{ content, structuredContent?, isError? }` | Same shape, slightly different ordering | +| OpenAI | MCP Apps | Notes | +| ---------------------------------------------------- | ----------------------------------------------------- | --------------------------------------- | +| `await window.openai.callTool(name, args)` | `await app.callServerTool({ name, arguments: args })` | Call another MCP server tool | +| Returns `{ structuredContent?, content?, isError? }` | Returns `{ content, structuredContent?, isError? }` | Same shape, slightly different ordering | ## Sending Messages -| OpenAI | MCP Apps | Notes | -|--------|----------|-------| +| OpenAI | MCP Apps | Notes | +| ----------------------------------------------------- | ------------------------------------------------------------------------------------ | --------------------------------- | | `await window.openai.sendFollowUpMessage({ prompt })` | `await app.sendMessage({ role: "user", content: [{ type: "text", text: prompt }] })` | MCP uses structured content array | ## External Links -| OpenAI | MCP Apps | Notes | -|--------|----------|-------| +| OpenAI | MCP Apps | Notes | +| -------------------------------------------- | ----------------------------------- | ------------------------------------ | | `await window.openai.openExternal({ href })` | `await app.openLink({ url: href })` | Different param name: `href` → `url` | ## Display Mode -| OpenAI | MCP Apps | Notes | -|--------|----------|-------| -| `await window.openai.requestDisplayMode({ mode })` | `await app.requestDisplayMode({ mode })` | Same API | -| — | Check `app.getHostContext()?.availableDisplayModes` first | MCP lets you check what's available | +| OpenAI | MCP Apps | Notes | +| -------------------------------------------------- | --------------------------------------------------------- | ----------------------------------- | +| `await window.openai.requestDisplayMode({ mode })` | `await app.requestDisplayMode({ mode })` | Same API | +| — | Check `app.getHostContext()?.availableDisplayModes` first | MCP lets you check what's available | ## Size Reporting -| OpenAI | MCP Apps | Notes | -|--------|----------|-------| -| `window.openai.notifyIntrinsicHeight(height)` | `app.sendSizeChanged({ width, height })` | MCP includes width | -| Manual only | Auto via `{ autoResize: true }` (default) | MCP auto-reports via ResizeObserver | +| OpenAI | MCP Apps | Notes | +| --------------------------------------------- | ----------------------------------------- | ----------------------------------- | +| `window.openai.notifyIntrinsicHeight(height)` | `app.sendSizeChanged({ width, height })` | MCP includes width | +| Manual only | Auto via `{ autoResize: true }` (default) | MCP auto-reports via ResizeObserver | ## State Persistence -| OpenAI | MCP Apps | Notes | -|--------|----------|-------| -| `window.openai.widgetState` | — | Not directly available in MCP | -| `window.openai.setWidgetState(state)` | — | Use framework state (React, localStorage, etc.) | +| OpenAI | MCP Apps | Notes | +| ------------------------------------- | --------------------------------------------------------------------- | -------------------------------------------------- | +| `window.openai.widgetState` | `app.onwidgetstate = (params) => { params.state }` | MCP uses notification callback | +| `window.openai.setWidgetState(state)` | `app.updateModelContext({ modelContent, privateContent, imageIds })` | MCP uses structured format | -## File Operations (Not Yet in MCP Apps) +## File Operations -| OpenAI | MCP Apps | Notes | -|--------|----------|-------| -| `await window.openai.uploadFile(file)` | — | Not yet implemented | -| `await window.openai.getFileDownloadUrl({ fileId })` | — | Not yet implemented | +| OpenAI | MCP Apps | Notes | +| ---------------------------------------------------- | ------------------------------------------------- | ------------------------------ | +| `await window.openai.uploadFile(file)` | `await app.uploadFile(file)` | Returns `{ fileId }` | +| `await window.openai.getFileDownloadUrl({ fileId })` | `await app.getFileDownloadUrl({ fileId })` | Returns `{ url }` | ## Other (Not Yet in MCP Apps) -| OpenAI | MCP Apps | Notes | -|--------|----------|-------| -| `await window.openai.requestModal(options)` | — | Not yet implemented | -| `window.openai.requestClose()` | — | Not yet implemented | -| `window.openai.view` | — | Not yet mapped | +| OpenAI | MCP Apps | Notes | +| ------------------------------------------- | -------- | ------------------- | +| `await window.openai.requestModal(options)` | — | Not yet implemented | +| `window.openai.requestClose()` | — | Not yet implemented | +| `window.openai.view` | — | Not yet mapped | ## Event Handling -| OpenAI | MCP Apps | Notes | -|--------|----------|-------| -| Read `window.openai.*` on load | `app.ontoolinput = (params) => {...}` | Register before `connect()` | -| Read `window.openai.*` on load | `app.ontoolresult = (params) => {...}` | Register before `connect()` | -| Poll or re-read properties | `app.onhostcontextchanged = (ctx) => {...}` | MCP pushes context changes | -| — | `app.onteardown = async () => {...}` | MCP adds: cleanup before unmount | +| OpenAI | MCP Apps | Notes | +| ------------------------------ | ------------------------------------------- | -------------------------------- | +| Read `window.openai.*` on load | `app.ontoolinput = (params) => {...}` | Register before `connect()` | +| Read `window.openai.*` on load | `app.ontoolresult = (params) => {...}` | Register before `connect()` | +| Poll or re-read properties | `app.onhostcontextchanged = (ctx) => {...}` | MCP pushes context changes | +| — | `app.onteardown = async () => {...}` | MCP adds: cleanup before unmount | ## Logging -| OpenAI | MCP Apps | Notes | -|--------|----------|-------| +| OpenAI | MCP Apps | Notes | +| ------------------ | --------------------------------------------- | ------------------------------- | | `console.log(...)` | `app.sendLog({ level: "info", data: "..." })` | MCP provides structured logging | ## Host Info -| OpenAI | MCP Apps | Notes | -|--------|----------|-------| -| — | `app.getHostVersion()` | Returns `{ name, version }` of host | -| — | `app.getHostCapabilities()` | Check `serverTools`, `openLinks`, `logging`, etc. | +| OpenAI | MCP Apps | Notes | +| ------ | --------------------------- | ------------------------------------------------- | +| — | `app.getHostVersion()` | Returns `{ name, version }` of host | +| — | `app.getHostCapabilities()` | Check `serverTools`, `openLinks`, `logging`, etc. | ## Full Migration Example @@ -150,7 +150,7 @@ import { App } from "@modelcontextprotocol/ext-apps"; const app = new App( { name: "MyApp", version: "1.0.0" }, {}, - { autoResize: true } // auto height reporting + { autoResize: true }, // auto height reporting ); // Register handlers BEFORE connect @@ -175,13 +175,13 @@ const theme = app.getHostContext()?.theme; // Call a tool const result = await app.callServerTool({ name: "get_weather", - arguments: { city: "Tokyo" } + arguments: { city: "Tokyo" }, }); // Send a message await app.sendMessage({ role: "user", - content: [{ type: "text", text: "Weather updated!" }] + content: [{ type: "text", text: "Weather updated!" }], }); // Open link (note: url not href) diff --git a/src/app.ts b/src/app.ts index 5ce106ef..7abfbd90 100644 --- a/src/app.ts +++ b/src/app.ts @@ -44,6 +44,13 @@ import { McpUiToolInputPartialNotificationSchema, McpUiToolResultNotification, McpUiToolResultNotificationSchema, + McpUiWidgetStateNotification, + McpUiWidgetStateNotificationSchema, + McpUiUpdateModelContextNotification, + McpUiUploadFileRequest, + McpUiUploadFileResultSchema, + McpUiGetFileUrlRequest, + McpUiGetFileUrlResultSchema, McpUiRequestDisplayModeRequest, McpUiRequestDisplayModeResultSchema, } from "./types"; @@ -257,6 +264,7 @@ export class App extends Protocol { this.ontoolinputpartial = () => {}; this.ontoolresult = () => {}; this.ontoolcancelled = () => {}; + this.onwidgetstate = () => {}; } /** @@ -502,6 +510,47 @@ export class App extends Protocol { ); } + /** + * Convenience handler for receiving persisted widget state from the host. + * + * Set this property to register a handler that will be called when the host + * delivers previously persisted widget state. This is sent during initialization + * when running in OpenAI mode, allowing apps to hydrate their UI state. + * + * The state can be either a simple object or a StructuredWidgetState with + * modelContent/privateContent/imageIds separation. + * + * This setter is a convenience wrapper around `setNotificationHandler()` that + * automatically handles the notification schema and extracts the params for you. + * + * Register handlers before calling {@link connect} to avoid missing notifications. + * + * @param callback - Function called with the persisted widget state + * + * @example Hydrate app state from previous session + * ```typescript + * app.onwidgetstate = (params) => { + * if (params.state.selectedId) { + * setSelectedItem(params.state.selectedId); + * } + * if (params.state.privateContent?.viewMode) { + * setViewMode(params.state.privateContent.viewMode); + * } + * }; + * ``` + * + * @see {@link setNotificationHandler} for the underlying method + * @see {@link McpUiWidgetStateNotification} for the notification structure + * @see {@link updateModelContext} for persisting state updates + */ + set onwidgetstate( + callback: (params: McpUiWidgetStateNotification["params"]) => void, + ) { + this.setNotificationHandler(McpUiWidgetStateNotificationSchema, (n) => + callback(n.params), + ); + } + /** * Convenience handler for host context changes (theme, locale, etc.). * @@ -984,6 +1033,137 @@ export class App extends Protocol { }); } + /** + * Update model context and persist widget state. + * + * This method allows apps to update what the model sees for follow-up turns + * and persist UI state. In OpenAI mode, this calls window.openai.setWidgetState(). + * + * Use the structured format with modelContent/privateContent/imageIds for fine-grained + * control over what the model sees vs. what stays private to the UI: + * - `modelContent`: Text or JSON visible to the model for follow-up reasoning + * - `privateContent`: UI-only state hidden from the model (view mode, selections, etc.) + * - `imageIds`: File IDs from uploadFile() for images the model should reason about + * + * @param params - Model context and widget state to persist + * + * @example Update model context with selection + * ```typescript + * app.updateModelContext({ + * modelContent: { selectedItem: item.name, quantity: 5 }, + * privateContent: { viewMode: "grid", scrollPosition: 150 }, + * }); + * ``` + * + * @example Update with uploaded images + * ```typescript + * const { fileId } = await app.uploadFile(imageFile); + * app.updateModelContext({ + * modelContent: "User uploaded an image for analysis", + * imageIds: [fileId], + * }); + * ``` + * + * @returns Promise that resolves when the notification is sent + * + * @see {@link McpUiUpdateModelContextNotification} for notification structure + * @see {@link onwidgetstate} for receiving persisted state on reload + */ + updateModelContext( + params: McpUiUpdateModelContextNotification["params"], + ) { + return this.notification({ + method: "ui/notifications/update-model-context", + params, + }); + } + + /** + * Upload a file for use in model context. + * + * This allows apps to upload images and other files that can be referenced + * in model context via imageIds in {@link updateModelContext}. + * + * In OpenAI mode, this delegates to window.openai.uploadFile(). + * + * @param file - The File object to upload + * @param options - Request options (timeout, etc.) + * @returns Promise resolving to the file ID + * + * @throws {Error} If file upload is not supported in this environment + * @throws {Error} If the upload fails + * + * @example Upload an image and add to model context + * ```typescript + * const file = new File([imageBlob], "screenshot.png", { type: "image/png" }); + * const { fileId } = await app.uploadFile(file); + * + * // Make the image available to the model + * app.updateModelContext({ + * modelContent: "User uploaded a screenshot", + * imageIds: [fileId], + * }); + * ``` + * + * @see {@link updateModelContext} for using uploaded files in model context + * @see {@link getFileDownloadUrl} for retrieving uploaded files + */ + async uploadFile(file: File, options?: RequestOptions) { + // Convert File to base64 + const arrayBuffer = await file.arrayBuffer(); + const base64 = btoa( + String.fromCharCode(...new Uint8Array(arrayBuffer)), + ); + + return this.request( + { + method: "ui/upload-file", + params: { + name: file.name, + mimeType: file.type, + data: base64, + }, + }, + McpUiUploadFileResultSchema, + options, + ); + } + + /** + * Get a temporary download URL for a previously uploaded file. + * + * In OpenAI mode, this delegates to window.openai.getFileDownloadUrl(). + * + * @param params - The file ID from a previous upload + * @param options - Request options (timeout, etc.) + * @returns Promise resolving to the download URL + * + * @throws {Error} If file URL retrieval is not supported in this environment + * @throws {Error} If the file ID is invalid or expired + * + * @example Download a previously uploaded file + * ```typescript + * const { url } = await app.getFileDownloadUrl({ fileId }); + * const response = await fetch(url); + * const blob = await response.blob(); + * ``` + * + * @see {@link uploadFile} for uploading files + */ + getFileDownloadUrl( + params: McpUiGetFileUrlRequest["params"], + options?: RequestOptions, + ) { + return this.request( + { + method: "ui/get-file-url", + params, + }, + McpUiGetFileUrlResultSchema, + options, + ); + } + /** * Set up automatic size change notifications using ResizeObserver. * diff --git a/src/generated/schema.json b/src/generated/schema.json index 1d87d9c8..00f68fd3 100644 --- a/src/generated/schema.json +++ b/src/generated/schema.json @@ -46,6 +46,41 @@ ], "description": "Display mode for UI presentation." }, + "McpUiGetFileUrlRequest": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "method": { + "type": "string", + "const": "ui/get-file-url" + }, + "params": { + "type": "object", + "properties": { + "fileId": { + "type": "string", + "description": "The file ID from a previous upload" + } + }, + "required": ["fileId"], + "additionalProperties": false + } + }, + "required": ["method", "params"], + "additionalProperties": false + }, + "McpUiGetFileUrlResult": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "url": { + "type": "string", + "description": "Temporary download URL for the file" + } + }, + "required": ["url"], + "additionalProperties": {} + }, "McpUiHostCapabilities": { "$schema": "https://json-schema.org/draft/2020-12/schema", "type": "object", @@ -5354,6 +5389,70 @@ ], "description": "Tool visibility scope - who can access the tool." }, + "McpUiUpdateModelContextNotification": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "method": { + "type": "string", + "const": "ui/notifications/update-model-context" + }, + "params": { + "type": "object", + "properties": { + "modelContent": { + "anyOf": [ + { + "description": "Text or JSON the model should see for follow-up reasoning.\nKeep focused and under 4k tokens.", + "anyOf": [ + { + "type": "string" + }, + { + "type": "object", + "propertyNames": { + "type": "string" + }, + "additionalProperties": {} + } + ] + }, + { + "type": "null" + } + ] + }, + "privateContent": { + "anyOf": [ + { + "type": "object", + "propertyNames": { + "type": "string" + }, + "additionalProperties": { + "description": "UI-only state the model should NOT see.\nUse for ephemeral UI details like current view, filters, selections." + } + }, + { + "type": "null" + } + ], + "description": "UI-only state the model should NOT see.\nUse for ephemeral UI details like current view, filters, selections." + }, + "imageIds": { + "description": "File IDs for images the model should reason about.\nUse file IDs from uploadFile() or received as file params.", + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + } + }, + "required": ["method", "params"], + "additionalProperties": false + }, "McpUiUpdateModelContextRequest": { "$schema": "https://json-schema.org/draft/2020-12/schema", "type": "object", @@ -5698,6 +5797,78 @@ }, "required": ["method", "params"], "additionalProperties": false + }, + "McpUiUploadFileRequest": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "method": { + "type": "string", + "const": "ui/upload-file" + }, + "params": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "File name with extension" + }, + "mimeType": { + "type": "string", + "description": "MIME type of the file" + }, + "data": { + "type": "string", + "description": "Base64-encoded file data" + } + }, + "required": ["name", "mimeType", "data"], + "additionalProperties": false + } + }, + "required": ["method", "params"], + "additionalProperties": false + }, + "McpUiUploadFileResult": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "fileId": { + "type": "string", + "description": "The file ID to use in imageIds for model context" + } + }, + "required": ["fileId"], + "additionalProperties": {} + }, + "McpUiWidgetStateNotification": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "method": { + "type": "string", + "const": "ui/notifications/widget-state" + }, + "params": { + "type": "object", + "properties": { + "state": { + "type": "object", + "propertyNames": { + "type": "string" + }, + "additionalProperties": { + "description": "The persisted widget state from previous interaction." + }, + "description": "The persisted widget state from previous interaction." + } + }, + "required": ["state"], + "additionalProperties": false + } + }, + "required": ["method", "params"], + "additionalProperties": false } } } diff --git a/src/generated/schema.test.ts b/src/generated/schema.test.ts index 727c28d2..6cb3e961 100644 --- a/src/generated/schema.test.ts +++ b/src/generated/schema.test.ts @@ -67,6 +67,14 @@ export type McpUiToolCancelledNotificationSchemaInferredType = z.infer< typeof generated.McpUiToolCancelledNotificationSchema >; +export type McpUiWidgetStateNotificationSchemaInferredType = z.infer< + typeof generated.McpUiWidgetStateNotificationSchema +>; + +export type McpUiUpdateModelContextNotificationSchemaInferredType = z.infer< + typeof generated.McpUiUpdateModelContextNotificationSchema +>; + export type McpUiHostCssSchemaInferredType = z.infer< typeof generated.McpUiHostCssSchema >; @@ -119,6 +127,22 @@ export type McpUiToolMetaSchemaInferredType = z.infer< typeof generated.McpUiToolMetaSchema >; +export type McpUiUploadFileRequestSchemaInferredType = z.infer< + typeof generated.McpUiUploadFileRequestSchema +>; + +export type McpUiUploadFileResultSchemaInferredType = z.infer< + typeof generated.McpUiUploadFileResultSchema +>; + +export type McpUiGetFileUrlRequestSchemaInferredType = z.infer< + typeof generated.McpUiGetFileUrlRequestSchema +>; + +export type McpUiGetFileUrlResultSchemaInferredType = z.infer< + typeof generated.McpUiGetFileUrlResultSchema +>; + export type McpUiMessageRequestSchemaInferredType = z.infer< typeof generated.McpUiMessageRequestSchema >; @@ -215,6 +239,18 @@ expectType( expectType( {} as spec.McpUiToolCancelledNotification, ); +expectType( + {} as McpUiWidgetStateNotificationSchemaInferredType, +); +expectType( + {} as spec.McpUiWidgetStateNotification, +); +expectType( + {} as McpUiUpdateModelContextNotificationSchemaInferredType, +); +expectType( + {} as spec.McpUiUpdateModelContextNotification, +); expectType({} as McpUiHostCssSchemaInferredType); expectType({} as spec.McpUiHostCss); expectType({} as McpUiHostStylesSchemaInferredType); @@ -277,6 +313,30 @@ expectType( ); expectType({} as McpUiToolMetaSchemaInferredType); expectType({} as spec.McpUiToolMeta); +expectType( + {} as McpUiUploadFileRequestSchemaInferredType, +); +expectType( + {} as spec.McpUiUploadFileRequest, +); +expectType( + {} as McpUiUploadFileResultSchemaInferredType, +); +expectType( + {} as spec.McpUiUploadFileResult, +); +expectType( + {} as McpUiGetFileUrlRequestSchemaInferredType, +); +expectType( + {} as spec.McpUiGetFileUrlRequest, +); +expectType( + {} as McpUiGetFileUrlResultSchemaInferredType, +); +expectType( + {} as spec.McpUiGetFileUrlResult, +); expectType( {} as McpUiMessageRequestSchemaInferredType, ); diff --git a/src/generated/schema.ts b/src/generated/schema.ts index 69abe496..4ca5b24f 100644 --- a/src/generated/schema.ts +++ b/src/generated/schema.ts @@ -315,6 +315,85 @@ export const McpUiToolCancelledNotificationSchema = z.object({ }), }); +/** + * @description Notification containing persisted widget state (Host -> Guest UI). + * + * This notification delivers previously persisted UI state on widget load. + * In OpenAI mode, this comes from window.openai.widgetState. Apps use this + * to hydrate their UI state from previous sessions. + * + * The state can be either a simple object or a StructuredWidgetState with + * separate modelContent/privateContent/imageIds fields. + */ +export const McpUiWidgetStateNotificationSchema = z.object({ + method: z.literal("ui/notifications/widget-state"), + params: z.object({ + /** @description The persisted widget state from previous interaction. */ + state: z + .record( + z.string(), + z + .unknown() + .describe("The persisted widget state from previous interaction."), + ) + .describe("The persisted widget state from previous interaction."), + }), +}); + +/** + * @description Notification to update model context and persist widget state (Guest UI -> Host). + * + * This notification allows apps to update what the model sees for follow-up turns + * and persist UI state. In OpenAI mode, this calls window.openai.setWidgetState(). + * + * Use the structured format with modelContent/privateContent/imageIds for fine-grained + * control over what the model sees vs. what stays private to the UI. + */ +export const McpUiUpdateModelContextNotificationSchema = z.object({ + method: z.literal("ui/notifications/update-model-context"), + params: z.object({ + /** + * @description Text or JSON the model should see for follow-up reasoning. + * Keep focused and under 4k tokens. + */ + modelContent: z + .union([z.string(), z.record(z.string(), z.unknown())]) + .optional() + .describe( + "Text or JSON the model should see for follow-up reasoning.\nKeep focused and under 4k tokens.", + ) + .nullable(), + /** + * @description UI-only state the model should NOT see. + * Use for ephemeral UI details like current view, filters, selections. + */ + privateContent: z + .record( + z.string(), + z + .unknown() + .describe( + "UI-only state the model should NOT see.\nUse for ephemeral UI details like current view, filters, selections.", + ), + ) + .optional() + .nullable() + .describe( + "UI-only state the model should NOT see.\nUse for ephemeral UI details like current view, filters, selections.", + ), + /** + * @description File IDs for images the model should reason about. + * Use file IDs from uploadFile() or received as file params. + */ + imageIds: z + .array(z.string()) + .optional() + .describe( + "File IDs for images the model should reason about.\nUse file IDs from uploadFile() or received as file params.", + ), + }), +}); + /** * @description CSS blocks that can be injected by apps. */ @@ -568,6 +647,63 @@ export const McpUiToolMetaSchema = z.object({ ), }); +/** + * @description Request to upload a file for use in model context. + * + * This allows apps to upload images and other files that can be referenced + * in model context via imageIds in updateModelContext. + * + * @see {@link app.App.uploadFile} for the method that sends this request + */ +export const McpUiUploadFileRequestSchema = z.object({ + method: z.literal("ui/upload-file"), + params: z.object({ + /** @description File name with extension */ + name: z.string().describe("File name with extension"), + /** @description MIME type of the file */ + mimeType: z.string().describe("MIME type of the file"), + /** @description Base64-encoded file data */ + data: z.string().describe("Base64-encoded file data"), + }), +}); + +/** + * @description Result from uploading a file. + * @see {@link McpUiUploadFileRequest} + */ +export const McpUiUploadFileResultSchema = z + .object({ + /** @description The file ID to use in imageIds for model context */ + fileId: z + .string() + .describe("The file ID to use in imageIds for model context"), + }) + .passthrough(); + +/** + * @description Request to get a download URL for a previously uploaded file. + * + * @see {@link app.App.getFileDownloadUrl} for the method that sends this request + */ +export const McpUiGetFileUrlRequestSchema = z.object({ + method: z.literal("ui/get-file-url"), + params: z.object({ + /** @description The file ID from a previous upload */ + fileId: z.string().describe("The file ID from a previous upload"), + }), +}); + +/** + * @description Result from getting a file download URL. + * @see {@link McpUiGetFileUrlRequest} + */ +export const McpUiGetFileUrlResultSchema = z + .object({ + /** @description Temporary download URL for the file */ + url: z.string().describe("Temporary download URL for the file"), + }) + .passthrough(); + /** * @description Request to send a message to the host's chat interface. * @see {@link app.App.sendMessage} for the method that sends this request diff --git a/src/openai/transport.test.ts b/src/openai/transport.test.ts index 962a0c6c..d9df4505 100644 --- a/src/openai/transport.test.ts +++ b/src/openai/transport.test.ts @@ -492,5 +492,304 @@ describe("OpenAITransport", () => { expect(messages).toHaveLength(0); }); + + test("delivers widget state notification when widgetState is present", async () => { + mockOpenAI.widgetState = { + modelContent: "Selected item: Widget A", + privateContent: { viewMode: "grid" }, + imageIds: ["file_123"], + }; + + const transport = new OpenAITransport(); + const messages: unknown[] = []; + transport.onmessage = (msg) => { + messages.push(msg); + }; + + transport.deliverInitialState(); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + const widgetStateNotification = messages.find( + (m: unknown) => + (m as { method?: string }).method === "ui/notifications/widget-state", + ); + expect(widgetStateNotification).toMatchObject({ + jsonrpc: "2.0", + method: "ui/notifications/widget-state", + params: { + state: { + modelContent: "Selected item: Widget A", + privateContent: { viewMode: "grid" }, + imageIds: ["file_123"], + }, + }, + }); + }); + }); + + describe("ui/notifications/update-model-context notification", () => { + test("delegates to window.openai.setWidgetState()", async () => { + mockOpenAI.setWidgetState = mock(() => {}) as unknown as OpenAIGlobal["setWidgetState"]; + + const transport = new OpenAITransport(); + + await transport.send({ + jsonrpc: "2.0", + method: "ui/notifications/update-model-context", + params: { + modelContent: { selectedItem: "Widget B" }, + privateContent: { scrollPosition: 100 }, + imageIds: ["file_456"], + }, + }); + + expect(mockOpenAI.setWidgetState).toHaveBeenCalledWith({ + modelContent: { selectedItem: "Widget B" }, + privateContent: { scrollPosition: 100 }, + imageIds: ["file_456"], + }); + }); + + test("uses defaults when params are missing", async () => { + mockOpenAI.setWidgetState = mock(() => {}) as unknown as OpenAIGlobal["setWidgetState"]; + + const transport = new OpenAITransport(); + + await transport.send({ + jsonrpc: "2.0", + method: "ui/notifications/update-model-context", + params: { + modelContent: "Just text", + }, + }); + + expect(mockOpenAI.setWidgetState).toHaveBeenCalledWith({ + modelContent: "Just text", + privateContent: null, + imageIds: [], + }); + }); + + test("does nothing when setWidgetState is not available", async () => { + delete mockOpenAI.setWidgetState; + + const transport = new OpenAITransport(); + + // Should not throw + await transport.send({ + jsonrpc: "2.0", + method: "ui/notifications/update-model-context", + params: { modelContent: "test" }, + }); + }); + }); + + describe("ui/upload-file request", () => { + test("delegates to window.openai.uploadFile()", async () => { + mockOpenAI.uploadFile = mock(() => + Promise.resolve({ fileId: "file_abc123" }), + ) as unknown as OpenAIGlobal["uploadFile"]; + + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + // Base64 encoded "test" + const base64Data = btoa("test file content"); + + await transport.send({ + jsonrpc: "2.0", + id: 10, + method: "ui/upload-file", + params: { + name: "test.txt", + mimeType: "text/plain", + data: base64Data, + }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(mockOpenAI.uploadFile).toHaveBeenCalled(); + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 10, + result: { fileId: "file_abc123" }, + }); + }); + + test("returns error when uploadFile is not available", async () => { + delete mockOpenAI.uploadFile; + + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 11, + method: "ui/upload-file", + params: { + name: "test.txt", + mimeType: "text/plain", + data: btoa("test"), + }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 11, + error: { + code: -32601, + message: expect.stringContaining("not supported"), + }, + }); + }); + }); + + describe("ui/get-file-url request", () => { + test("delegates to window.openai.getFileDownloadUrl()", async () => { + mockOpenAI.getFileDownloadUrl = mock(() => + Promise.resolve({ url: "https://cdn.openai.com/files/file_abc123" }), + ) as unknown as OpenAIGlobal["getFileDownloadUrl"]; + + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 12, + method: "ui/get-file-url", + params: { fileId: "file_abc123" }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(mockOpenAI.getFileDownloadUrl).toHaveBeenCalledWith({ + fileId: "file_abc123", + }); + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 12, + result: { url: "https://cdn.openai.com/files/file_abc123" }, + }); + }); + + test("returns error when getFileDownloadUrl is not available", async () => { + delete mockOpenAI.getFileDownloadUrl; + + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 13, + method: "ui/get-file-url", + params: { fileId: "file_xyz" }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 13, + error: { + code: -32601, + message: expect.stringContaining("not supported"), + }, + }); + }); + }); + + describe("ui/message with image content", () => { + test("uploads images and adds to model context", async () => { + mockOpenAI.uploadFile = mock(() => + Promise.resolve({ fileId: "file_img123" }), + ) as unknown as OpenAIGlobal["uploadFile"]; + mockOpenAI.setWidgetState = mock(() => {}) as unknown as OpenAIGlobal["setWidgetState"]; + + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + // Base64 encoded minimal PNG (1x1 pixel) + const pngData = "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="; + + await transport.send({ + jsonrpc: "2.0", + id: 14, + method: "ui/message", + params: { + role: "user", + content: [ + { type: "text", text: "Check out this image" }, + { type: "image", data: pngData, mimeType: "image/png" }, + ], + }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(mockOpenAI.uploadFile).toHaveBeenCalled(); + expect(mockOpenAI.setWidgetState).toHaveBeenCalledWith( + expect.objectContaining({ + imageIds: expect.arrayContaining(["file_img123"]), + }), + ); + expect(mockOpenAI.sendFollowUpMessage).toHaveBeenCalledWith({ + prompt: "Check out this image", + }); + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 14, + result: {}, + }); + }); + + test("merges new imageIds with existing ones", async () => { + mockOpenAI.widgetState = { imageIds: ["existing_file"] }; + mockOpenAI.uploadFile = mock(() => + Promise.resolve({ fileId: "new_file" }), + ) as unknown as OpenAIGlobal["uploadFile"]; + mockOpenAI.setWidgetState = mock(() => {}) as unknown as OpenAIGlobal["setWidgetState"]; + + const transport = new OpenAITransport(); + transport.onmessage = () => {}; + + const pngData = "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="; + + await transport.send({ + jsonrpc: "2.0", + id: 15, + method: "ui/message", + params: { + role: "user", + content: [{ type: "image", data: pngData, mimeType: "image/png" }], + }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(mockOpenAI.setWidgetState).toHaveBeenCalledWith( + expect.objectContaining({ + imageIds: ["existing_file", "new_file"], + }), + ); + }); }); }); diff --git a/src/openai/transport.ts b/src/openai/transport.ts index f4c9c6aa..d5ad76bc 100644 --- a/src/openai/transport.ts +++ b/src/openai/transport.ts @@ -194,6 +194,15 @@ export class OpenAITransport implements Transport { params as { mode: string }, ); + case "ui/upload-file": + return await this.handleUploadFile( + id, + params as { name: string; mimeType: string; data: string }, + ); + + case "ui/get-file-url": + return await this.handleGetFileUrl(id, params as { fileId: string }); + case "ping": return this.createSuccessResponse(id, {}); @@ -341,6 +350,9 @@ export class OpenAITransport implements Transport { /** * Handle ui/message request by delegating to window.openai.sendFollowUpMessage(). + * + * For image content, images are uploaded via uploadFile and added to model context + * via setWidgetState, since sendFollowUpMessage only accepts text. */ private async handleMessage( id: RequestId, @@ -365,11 +377,76 @@ export class OpenAITransport implements Transport { .map((c) => c.text) .join("\n"); - await this.openai.sendFollowUpMessage({ prompt: textContent }); + // Extract image content blocks + const imageContent = params.content.filter( + (c): c is { type: "image"; data: string; mimeType: string } => + typeof c === "object" && + c !== null && + (c as { type?: string }).type === "image", + ); + + // Upload images if present and uploadFile is available + const imageIds: string[] = []; + if (imageContent.length > 0 && this.openai.uploadFile) { + for (const image of imageContent) { + try { + // Convert base64 data to File object + const binaryString = atob(image.data); + const bytes = new Uint8Array(binaryString.length); + for (let i = 0; i < binaryString.length; i++) { + bytes[i] = binaryString.charCodeAt(i); + } + const blob = new Blob([bytes], { type: image.mimeType }); + const file = new File([blob], `image.${this.getExtension(image.mimeType)}`, { + type: image.mimeType, + }); + + const result = await this.openai.uploadFile(file); + imageIds.push(result.fileId); + } catch (error) { + console.warn("[MCP App] Failed to upload image:", error); + } + } + + // Add uploaded images to model context via setWidgetState + if (imageIds.length > 0 && this.openai.setWidgetState) { + // Get current state and merge with new imageIds + const currentState = this.openai.widgetState ?? {}; + const existingImageIds = Array.isArray( + (currentState as { imageIds?: unknown }).imageIds, + ) + ? ((currentState as { imageIds: string[] }).imageIds) + : []; + + this.openai.setWidgetState({ + ...currentState, + imageIds: [...existingImageIds, ...imageIds], + }); + } + } + + // Send text message (or empty if only images were sent) + if (textContent || imageIds.length === 0) { + await this.openai.sendFollowUpMessage({ prompt: textContent }); + } return this.createSuccessResponse(id, {}); } + /** + * Get file extension from MIME type. + */ + private getExtension(mimeType: string): string { + const mimeToExt: Record = { + "image/png": "png", + "image/jpeg": "jpg", + "image/gif": "gif", + "image/webp": "webp", + "image/svg+xml": "svg", + }; + return mimeToExt[mimeType] ?? "bin"; + } + /** * Handle ui/open-link request by delegating to window.openai.openExternal(). */ @@ -411,6 +488,57 @@ export class OpenAITransport implements Transport { return this.createSuccessResponse(id, { mode }); } + /** + * Handle ui/upload-file by delegating to window.openai.uploadFile(). + */ + private async handleUploadFile( + id: RequestId, + params: { name: string; mimeType: string; data: string }, + ): Promise { + if (!this.openai.uploadFile) { + return this.createErrorResponse( + id, + -32601, + "File upload is not supported in this OpenAI environment", + ); + } + + // Convert base64 data back to File object + const binaryString = atob(params.data); + const bytes = new Uint8Array(binaryString.length); + for (let i = 0; i < binaryString.length; i++) { + bytes[i] = binaryString.charCodeAt(i); + } + const blob = new Blob([bytes], { type: params.mimeType }); + const file = new File([blob], params.name, { type: params.mimeType }); + + const result = await this.openai.uploadFile(file); + + return this.createSuccessResponse(id, { fileId: result.fileId }); + } + + /** + * Handle ui/get-file-url by delegating to window.openai.getFileDownloadUrl(). + */ + private async handleGetFileUrl( + id: RequestId, + params: { fileId: string }, + ): Promise { + if (!this.openai.getFileDownloadUrl) { + return this.createErrorResponse( + id, + -32601, + "File URL retrieval is not supported in this OpenAI environment", + ); + } + + const result = await this.openai.getFileDownloadUrl({ + fileId: params.fileId, + }); + + return this.createSuccessResponse(id, { url: result.url }); + } + /** * Handle an outgoing notification. */ @@ -422,6 +550,16 @@ export class OpenAITransport implements Transport { this.handleSizeChanged(params as { width?: number; height?: number }); break; + case "ui/notifications/update-model-context": + this.handleUpdateModelContext( + params as { + modelContent?: string | Record | null; + privateContent?: Record | null; + imageIds?: string[]; + }, + ); + break; + case "ui/notifications/initialized": // No-op - OpenAI doesn't need this notification break; @@ -446,6 +584,24 @@ export class OpenAITransport implements Transport { } } + /** + * Handle update model context notification by calling window.openai.setWidgetState(). + */ + private handleUpdateModelContext(params: { + modelContent?: string | Record | null; + privateContent?: Record | null; + imageIds?: string[]; + }): void { + if (this.openai.setWidgetState) { + // Construct StructuredWidgetState format + this.openai.setWidgetState({ + modelContent: params.modelContent ?? null, + privateContent: params.privateContent ?? null, + imageIds: params.imageIds ?? [], + }); + } + } + /** * Create a success JSON-RPC response. */ @@ -476,7 +632,7 @@ export class OpenAITransport implements Transport { } /** - * Deliver initial tool input and result notifications. + * Deliver initial tool input, result, and widget state notifications. * * Called by App after connection to deliver pre-populated data from * window.openai as notifications that the app's handlers expect. @@ -495,6 +651,17 @@ export class OpenAITransport implements Transport { }); } + // Deliver widget state if available (for state hydration) + if (this.openai.widgetState !== undefined) { + queueMicrotask(() => { + this.onmessage?.({ + jsonrpc: "2.0", + method: "ui/notifications/widget-state", + params: { state: this.openai.widgetState }, + } as JSONRPCNotification); + }); + } + // Deliver tool output if available (check for both null and undefined) if (this.openai.toolOutput != null) { queueMicrotask(() => { diff --git a/src/openai/types.ts b/src/openai/types.ts index 435823f9..e1252c3e 100644 --- a/src/openai/types.ts +++ b/src/openai/types.ts @@ -28,6 +28,36 @@ export interface OpenAISafeArea { left: number; } +/** + * Structured widget state for OpenAI Apps SDK. + * + * This type allows separating model-visible context from private UI state. + * When using setWidgetState with this shape, the host will: + * - Expose `modelContent` and `imageIds` to the model for context awareness + * - Keep `privateContent` isolated from the conversation model + * + * @see https://developers.openai.com/apps-sdk/build/state-management/ + */ +export interface StructuredWidgetState { + /** + * Text or JSON the model should see for follow-up reasoning. + * Keep focused and under 4k tokens. + */ + modelContent: string | Record | null; + + /** + * UI-only state the model should NOT see. + * Use for ephemeral UI details like current view, filters, selections. + */ + privateContent: Record | null; + + /** + * File IDs uploaded by the widget (via uploadFile) or received as file params. + * The model can reason about these images in follow-up turns. + */ + imageIds: string[]; +} + /** * Result of a tool call via window.openai.callTool(). * @@ -74,8 +104,11 @@ export interface OpenAIGlobal { /** * Persisted UI state snapshot between renders. * Set via setWidgetState(), rehydrated on subsequent renders. + * + * Can be either a simple object or a StructuredWidgetState with + * modelContent/privateContent/imageIds separation. */ - widgetState?: unknown; + widgetState?: StructuredWidgetState | Record; /** * Current theme setting. @@ -120,9 +153,14 @@ export interface OpenAIGlobal { * Persist UI state synchronously after interactions. * State is scoped to this widget instance and rehydrated on re-renders. * - * @param state - State object to persist + * When using StructuredWidgetState shape: + * - `modelContent` is exposed to the model for follow-up reasoning + * - `privateContent` stays private to the UI + * - `imageIds` allows the model to reason about uploaded images + * + * @param state - State object to persist (simple or StructuredWidgetState) */ - setWidgetState?(state: unknown): void; + setWidgetState?(state: StructuredWidgetState | Record): void; // ───────────────────────────────────────────────────────────────────────── // Tool & Chat Integration Methods diff --git a/src/spec.types.ts b/src/spec.types.ts index 998e4d26..d5a945cb 100644 --- a/src/spec.types.ts +++ b/src/spec.types.ts @@ -283,6 +283,56 @@ export interface McpUiToolCancelledNotification { }; } +/** + * @description Notification containing persisted widget state (Host -> Guest UI). + * + * This notification delivers previously persisted UI state on widget load. + * In OpenAI mode, this comes from window.openai.widgetState. Apps use this + * to hydrate their UI state from previous sessions. + * + * The state can be either a simple object or a StructuredWidgetState with + * separate modelContent/privateContent/imageIds fields. + */ +export interface McpUiWidgetStateNotification { + method: "ui/notifications/widget-state"; + params: { + /** @description The persisted widget state from previous interaction. */ + state: Record; + }; +} + +/** + * @description Notification to update model context and persist widget state (Guest UI -> Host). + * + * This notification allows apps to update what the model sees for follow-up turns + * and persist UI state. In OpenAI mode, this calls window.openai.setWidgetState(). + * + * Use the structured format with modelContent/privateContent/imageIds for fine-grained + * control over what the model sees vs. what stays private to the UI. + */ +export interface McpUiUpdateModelContextNotification { + method: "ui/notifications/update-model-context"; + params: { + /** + * @description Text or JSON the model should see for follow-up reasoning. + * Keep focused and under 4k tokens. + */ + modelContent?: string | Record | null; + + /** + * @description UI-only state the model should NOT see. + * Use for ephemeral UI details like current view, filters, selections. + */ + privateContent?: Record | null; + + /** + * @description File IDs for images the model should reason about. + * Use file IDs from uploadFile() or received as file params. + */ + imageIds?: string[]; + }; +} + /** * @description CSS blocks that can be injected by apps. */ @@ -630,6 +680,65 @@ export interface McpUiToolMeta { visibility?: McpUiToolVisibility[]; } +/** + * @description Request to upload a file for use in model context. + * + * This allows apps to upload images and other files that can be referenced + * in model context via imageIds in updateModelContext. + * + * @see {@link app.App.uploadFile} for the method that sends this request + */ +export interface McpUiUploadFileRequest { + method: "ui/upload-file"; + params: { + /** @description File name with extension */ + name: string; + /** @description MIME type of the file */ + mimeType: string; + /** @description Base64-encoded file data */ + data: string; + }; +} + +/** + * @description Result from uploading a file. + * @see {@link McpUiUploadFileRequest} + */ +export interface McpUiUploadFileResult { + /** @description The file ID to use in imageIds for model context */ + fileId: string; + /** + * Index signature required for MCP SDK `Protocol` class compatibility. + */ + [key: string]: unknown; +} + +/** + * @description Request to get a download URL for a previously uploaded file. + * + * @see {@link app.App.getFileDownloadUrl} for the method that sends this request + */ +export interface McpUiGetFileUrlRequest { + method: "ui/get-file-url"; + params: { + /** @description The file ID from a previous upload */ + fileId: string; + }; +} + +/** + * @description Result from getting a file download URL. + * @see {@link McpUiGetFileUrlRequest} + */ +export interface McpUiGetFileUrlResult { + /** @description Temporary download URL for the file */ + url: string; + /** + * Index signature required for MCP SDK `Protocol` class compatibility. + */ + [key: string]: unknown; +} + /** * Method string constants for MCP Apps protocol messages. * diff --git a/src/types.ts b/src/types.ts index 5cbc0b48..5a41da17 100644 --- a/src/types.ts +++ b/src/types.ts @@ -61,6 +61,12 @@ export { type McpUiRequestDisplayModeResult, type McpUiToolVisibility, type McpUiToolMeta, + type McpUiWidgetStateNotification, + type McpUiUpdateModelContextNotification, + type McpUiUploadFileRequest, + type McpUiUploadFileResult, + type McpUiGetFileUrlRequest, + type McpUiGetFileUrlResult, } from "./spec.types.js"; // Import types needed for protocol type unions (not re-exported, just used internally) @@ -76,6 +82,8 @@ import type { McpUiToolInputPartialNotification, McpUiToolResultNotification, McpUiToolCancelledNotification, + McpUiWidgetStateNotification, + McpUiUpdateModelContextNotification, McpUiSandboxResourceReadyNotification, McpUiInitializedNotification, McpUiSizeChangedNotification, @@ -85,6 +93,10 @@ import type { McpUiMessageResult, McpUiResourceTeardownResult, McpUiRequestDisplayModeResult, + McpUiUploadFileRequest, + McpUiUploadFileResult, + McpUiGetFileUrlRequest, + McpUiGetFileUrlResult, } from "./spec.types.js"; // Re-export all schemas from generated/schema.ts (already PascalCase) @@ -122,6 +134,12 @@ export { McpUiRequestDisplayModeResultSchema, McpUiToolVisibilitySchema, McpUiToolMetaSchema, + McpUiWidgetStateNotificationSchema, + McpUiUpdateModelContextNotificationSchema, + McpUiUploadFileRequestSchema, + McpUiUploadFileResultSchema, + McpUiGetFileUrlRequestSchema, + McpUiGetFileUrlResultSchema, } from "./generated/schema.js"; // Re-export SDK types used in protocol type unions @@ -161,6 +179,8 @@ export type AppRequest = | McpUiUpdateModelContextRequest | McpUiResourceTeardownRequest | McpUiRequestDisplayModeRequest + | McpUiUploadFileRequest + | McpUiGetFileUrlRequest | CallToolRequest | ListToolsRequest | ListResourcesRequest @@ -189,6 +209,7 @@ export type AppNotification = | McpUiToolInputPartialNotification | McpUiToolResultNotification | McpUiToolCancelledNotification + | McpUiWidgetStateNotification | McpUiSandboxResourceReadyNotification | ToolListChangedNotification | ResourceListChangedNotification @@ -197,6 +218,7 @@ export type AppNotification = | McpUiInitializedNotification | McpUiSizeChangedNotification | McpUiSandboxProxyReadyNotification + | McpUiUpdateModelContextNotification | LoggingMessageNotification; /** @@ -208,6 +230,8 @@ export type AppResult = | McpUiMessageResult | McpUiResourceTeardownResult | McpUiRequestDisplayModeResult + | McpUiUploadFileResult + | McpUiGetFileUrlResult | CallToolResult | ListToolsResult | ListResourcesResult From e97a57177d03b0fa564fdc880c0e71a0c622c4fb Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Tue, 13 Jan 2026 14:14:56 +0000 Subject: [PATCH 23/30] fix(openai): Use main's request-based updateModelContext API - Wire ui/update-model-context request to setWidgetState in OpenAI transport - Translate MCP content/structuredContent to OpenAI's modelContent format - Update tests to use request-based API instead of notification - Keep widget state notification for hydration (onwidgetstate) --- src/app.ts | 46 --------------------- src/openai/transport.test.ts | 80 +++++++++++++++++++++++++++--------- src/openai/transport.ts | 77 +++++++++++++++++++++++----------- 3 files changed, 115 insertions(+), 88 deletions(-) diff --git a/src/app.ts b/src/app.ts index 7abfbd90..86ab971a 100644 --- a/src/app.ts +++ b/src/app.ts @@ -46,7 +46,6 @@ import { McpUiToolResultNotificationSchema, McpUiWidgetStateNotification, McpUiWidgetStateNotificationSchema, - McpUiUpdateModelContextNotification, McpUiUploadFileRequest, McpUiUploadFileResultSchema, McpUiGetFileUrlRequest, @@ -1033,51 +1032,6 @@ export class App extends Protocol { }); } - /** - * Update model context and persist widget state. - * - * This method allows apps to update what the model sees for follow-up turns - * and persist UI state. In OpenAI mode, this calls window.openai.setWidgetState(). - * - * Use the structured format with modelContent/privateContent/imageIds for fine-grained - * control over what the model sees vs. what stays private to the UI: - * - `modelContent`: Text or JSON visible to the model for follow-up reasoning - * - `privateContent`: UI-only state hidden from the model (view mode, selections, etc.) - * - `imageIds`: File IDs from uploadFile() for images the model should reason about - * - * @param params - Model context and widget state to persist - * - * @example Update model context with selection - * ```typescript - * app.updateModelContext({ - * modelContent: { selectedItem: item.name, quantity: 5 }, - * privateContent: { viewMode: "grid", scrollPosition: 150 }, - * }); - * ``` - * - * @example Update with uploaded images - * ```typescript - * const { fileId } = await app.uploadFile(imageFile); - * app.updateModelContext({ - * modelContent: "User uploaded an image for analysis", - * imageIds: [fileId], - * }); - * ``` - * - * @returns Promise that resolves when the notification is sent - * - * @see {@link McpUiUpdateModelContextNotification} for notification structure - * @see {@link onwidgetstate} for receiving persisted state on reload - */ - updateModelContext( - params: McpUiUpdateModelContextNotification["params"], - ) { - return this.notification({ - method: "ui/notifications/update-model-context", - params, - }); - } - /** * Upload a file for use in model context. * diff --git a/src/openai/transport.test.ts b/src/openai/transport.test.ts index d9df4505..1b3d58e0 100644 --- a/src/openai/transport.test.ts +++ b/src/openai/transport.test.ts @@ -528,59 +528,101 @@ describe("OpenAITransport", () => { }); }); - describe("ui/notifications/update-model-context notification", () => { - test("delegates to window.openai.setWidgetState()", async () => { - mockOpenAI.setWidgetState = mock(() => {}) as unknown as OpenAIGlobal["setWidgetState"]; + describe("ui/update-model-context request", () => { + test("delegates to window.openai.setWidgetState() with structuredContent", async () => { + mockOpenAI.setWidgetState = mock( + () => {}, + ) as unknown as OpenAIGlobal["setWidgetState"]; const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; await transport.send({ jsonrpc: "2.0", - method: "ui/notifications/update-model-context", + id: 20, + method: "ui/update-model-context", params: { - modelContent: { selectedItem: "Widget B" }, - privateContent: { scrollPosition: 100 }, - imageIds: ["file_456"], + structuredContent: { selectedItem: "Widget B", quantity: 5 }, }, }); + await new Promise((resolve) => setTimeout(resolve, 0)); + expect(mockOpenAI.setWidgetState).toHaveBeenCalledWith({ - modelContent: { selectedItem: "Widget B" }, - privateContent: { scrollPosition: 100 }, - imageIds: ["file_456"], + modelContent: { selectedItem: "Widget B", quantity: 5 }, + privateContent: null, + imageIds: [], + }); + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 20, + result: {}, }); }); - test("uses defaults when params are missing", async () => { - mockOpenAI.setWidgetState = mock(() => {}) as unknown as OpenAIGlobal["setWidgetState"]; + test("extracts text from content blocks", async () => { + mockOpenAI.setWidgetState = mock( + () => {}, + ) as unknown as OpenAIGlobal["setWidgetState"]; const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; await transport.send({ jsonrpc: "2.0", - method: "ui/notifications/update-model-context", + id: 21, + method: "ui/update-model-context", params: { - modelContent: "Just text", + content: [ + { type: "text", text: "Line one" }, + { type: "text", text: "Line two" }, + ], }, }); + await new Promise((resolve) => setTimeout(resolve, 0)); + expect(mockOpenAI.setWidgetState).toHaveBeenCalledWith({ - modelContent: "Just text", + modelContent: "Line one\nLine two", privateContent: null, imageIds: [], }); + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 21, + result: {}, + }); }); - test("does nothing when setWidgetState is not available", async () => { + test("returns success when setWidgetState is not available", async () => { delete mockOpenAI.setWidgetState; const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; - // Should not throw await transport.send({ jsonrpc: "2.0", - method: "ui/notifications/update-model-context", - params: { modelContent: "test" }, + id: 22, + method: "ui/update-model-context", + params: { structuredContent: { test: true } }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + // Should still return success (no-op if not supported) + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 22, + result: {}, }); }); }); diff --git a/src/openai/transport.ts b/src/openai/transport.ts index d5ad76bc..89640fa6 100644 --- a/src/openai/transport.ts +++ b/src/openai/transport.ts @@ -203,6 +203,15 @@ export class OpenAITransport implements Transport { case "ui/get-file-url": return await this.handleGetFileUrl(id, params as { fileId: string }); + case "ui/update-model-context": + return this.handleUpdateModelContextRequest( + id, + params as { + content?: unknown[]; + structuredContent?: Record; + }, + ); + case "ping": return this.createSuccessResponse(id, {}); @@ -550,16 +559,6 @@ export class OpenAITransport implements Transport { this.handleSizeChanged(params as { width?: number; height?: number }); break; - case "ui/notifications/update-model-context": - this.handleUpdateModelContext( - params as { - modelContent?: string | Record | null; - privateContent?: Record | null; - imageIds?: string[]; - }, - ); - break; - case "ui/notifications/initialized": // No-op - OpenAI doesn't need this notification break; @@ -585,21 +584,53 @@ export class OpenAITransport implements Transport { } /** - * Handle update model context notification by calling window.openai.setWidgetState(). + * Handle ui/update-model-context request by calling window.openai.setWidgetState(). + * + * Translates MCP's content/structuredContent format to OpenAI's setWidgetState format. */ - private handleUpdateModelContext(params: { - modelContent?: string | Record | null; - privateContent?: Record | null; - imageIds?: string[]; - }): void { - if (this.openai.setWidgetState) { - // Construct StructuredWidgetState format - this.openai.setWidgetState({ - modelContent: params.modelContent ?? null, - privateContent: params.privateContent ?? null, - imageIds: params.imageIds ?? [], - }); + private handleUpdateModelContextRequest( + id: RequestId, + params: { + content?: unknown[]; + structuredContent?: Record; + }, + ): JSONRPCSuccessResponse | JSONRPCErrorResponse { + if (!this.openai.setWidgetState) { + // No-op if setWidgetState is not available, but still return success + // since the host may not need to persist state + return this.createSuccessResponse(id, {}); } + + // Translate MCP format to OpenAI's setWidgetState format + // content is ContentBlock[], structuredContent is freeform JSON + let modelContent: string | Record | null = null; + + // If structuredContent is provided, use it as modelContent + if (params.structuredContent) { + modelContent = params.structuredContent; + } else if (params.content && params.content.length > 0) { + // Extract text from content blocks + const textParts = params.content + .filter( + (c): c is { type: "text"; text: string } => + typeof c === "object" && + c !== null && + (c as { type?: string }).type === "text", + ) + .map((c) => c.text); + + if (textParts.length > 0) { + modelContent = textParts.join("\n"); + } + } + + this.openai.setWidgetState({ + modelContent, + privateContent: null, + imageIds: [], + }); + + return this.createSuccessResponse(id, {}); } /** From e119d6455994df023eafa9faede9db33f7cf3e4c Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Wed, 14 Jan 2026 22:56:05 +0000 Subject: [PATCH 24/30] feat(openai): Forward runtime context changes and add safe area handling - Add event listener for 'openai:set_globals' in OpenAITransport.start() to forward runtime property changes (theme, displayMode, safeArea, maxHeight, locale, userAgent) as ui/notifications/host-context-changed - Clean up event listener in OpenAITransport.close() - Add unit tests for the new event forwarding functionality - Update map-server to handle safe area insets: - Adjust fullscreen button position based on insets - Keep map full-bleed while ensuring controls aren't obscured - Listen for runtime inset changes via onhostcontextchanged --- examples/map-server/src/mcp-app.ts | 50 +++++++++++- src/openai/transport.test.ts | 85 +++++++++++++++++++- src/openai/transport.ts | 124 +++++++++++++++++++++++++++-- 3 files changed, 245 insertions(+), 14 deletions(-) diff --git a/examples/map-server/src/mcp-app.ts b/examples/map-server/src/mcp-app.ts index 9b2a068f..46598768 100644 --- a/examples/map-server/src/mcp-app.ts +++ b/examples/map-server/src/mcp-app.ts @@ -713,6 +713,41 @@ const PREFERRED_INLINE_HEIGHT = 400; // Current display mode let currentDisplayMode: "inline" | "fullscreen" | "pip" = "inline"; +// Default button offset from edge (matches CSS) +const BUTTON_EDGE_OFFSET = 10; + +/** + * Safe area insets from host context. + * Used to offset fixed UI elements on mobile devices with notches/etc. + */ +interface SafeAreaInsets { + top: number; + right: number; + bottom: number; + left: number; +} + +/** + * Update fixed UI element positions based on safe area insets. + * This keeps the map full-bleed while ensuring controls aren't obscured + * by device notches, status bars, or navigation bars. + */ +function applySafeAreaInsets(insets?: SafeAreaInsets): void { + const btn = document.getElementById("fullscreen-btn"); + if (btn) { + // Offset button from top-right corner, accounting for safe area + btn.style.top = `${BUTTON_EDGE_OFFSET + (insets?.top ?? 0)}px`; + btn.style.right = `${BUTTON_EDGE_OFFSET + (insets?.right ?? 0)}px`; + } + + // Also adjust loading indicator if visible + const loadingEl = document.getElementById("loading"); + if (loadingEl && insets) { + // Center with safe area awareness (only affects vertical position) + loadingEl.style.top = `calc(50% + ${(insets.top - insets.bottom) / 2}px)`; + } +} + // Create App instance with tool capabilities // autoResize: false - we manually send size since map fills its container const app = new App( @@ -823,7 +858,7 @@ app.onteardown = async () => { app.onerror = log.error; -// Listen for host context changes (display mode, theme, etc.) +// Listen for host context changes (display mode, theme, safe area, etc.) app.onhostcontextchanged = (params) => { log.info("Host context changed:", params); @@ -837,6 +872,11 @@ app.onhostcontextchanged = (params) => { if (params.availableDisplayModes) { updateFullscreenButton(); } + + // Update UI element positions if safe area insets changed + if (params.safeAreaInsets) { + applySafeAreaInsets(params.safeAreaInsets); + } }; // Handle initial tool input (bounding box from show-map tool) @@ -984,7 +1024,7 @@ async function init() { await app.connect(); log.info("Connected to host"); - // Get initial display mode from host context + // Get initial context from host const context = app.getHostContext(); if (context?.displayMode) { currentDisplayMode = context.displayMode as @@ -994,6 +1034,12 @@ async function init() { } log.info("Initial display mode:", currentDisplayMode); + // Apply initial safe area insets for mobile devices + if (context?.safeAreaInsets) { + applySafeAreaInsets(context.safeAreaInsets); + log.info("Applied safe area insets:", context.safeAreaInsets); + } + // Tell host our preferred size for inline mode if (currentDisplayMode === "inline") { app.sendSizeChanged({ height: PREFERRED_INLINE_HEIGHT }); diff --git a/src/openai/transport.test.ts b/src/openai/transport.test.ts index 1b3d58e0..ad2c58b1 100644 --- a/src/openai/transport.test.ts +++ b/src/openai/transport.test.ts @@ -59,6 +59,9 @@ describe("OpenAITransport", () => { (globalThis as { window?: unknown }).window = { openai: mockOpenAI, + // Mock event listener methods for start()/close() tests + addEventListener: mock(() => {}), + removeEventListener: mock(() => {}), }; }); @@ -93,6 +96,74 @@ describe("OpenAITransport", () => { expect(onclose).toHaveBeenCalled(); }); + test("start() sets up event listener and close() removes it", async () => { + const mockWindow = globalThis.window as unknown as { + addEventListener: ReturnType; + removeEventListener: ReturnType; + }; + + const transport = new OpenAITransport(); + + await transport.start(); + expect(mockWindow.addEventListener).toHaveBeenCalledWith( + "openai:set_globals", + expect.any(Function), + { passive: true }, + ); + + await transport.close(); + expect(mockWindow.removeEventListener).toHaveBeenCalledWith( + "openai:set_globals", + expect.any(Function), + ); + }); + + test("start() forwards host context changes as notifications", async () => { + // Capture the event handler when addEventListener is called + let capturedHandler: ((event: Event) => void) | undefined; + const mockWindow = globalThis.window as unknown as { + addEventListener: ReturnType; + }; + mockWindow.addEventListener = mock((event: string, handler: (event: Event) => void) => { + if (event === "openai:set_globals") { + capturedHandler = handler; + } + }); + + const transport = new OpenAITransport(); + const messages: unknown[] = []; + transport.onmessage = (msg) => { + messages.push(msg); + }; + + await transport.start(); + expect(capturedHandler).toBeDefined(); + + // Simulate ChatGPT sending a globals change event + const mockEvent = new CustomEvent("openai:set_globals", { + detail: { + theme: "light", + safeArea: { top: 44, right: 0, bottom: 34, left: 0 }, + displayMode: "fullscreen", + }, + }); + capturedHandler!(mockEvent); + + // Wait for microtask + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(messages).toHaveLength(1); + expect(messages[0]).toMatchObject({ + jsonrpc: "2.0", + method: "ui/notifications/host-context-changed", + params: { + theme: "light", + safeAreaInsets: { top: 44, right: 0, bottom: 34, left: 0 }, + displayMode: "fullscreen", + }, + }); + }); + describe("ui/initialize request", () => { test("returns synthesized host info from window.openai", async () => { const transport = new OpenAITransport(); @@ -761,7 +832,9 @@ describe("OpenAITransport", () => { mockOpenAI.uploadFile = mock(() => Promise.resolve({ fileId: "file_img123" }), ) as unknown as OpenAIGlobal["uploadFile"]; - mockOpenAI.setWidgetState = mock(() => {}) as unknown as OpenAIGlobal["setWidgetState"]; + mockOpenAI.setWidgetState = mock( + () => {}, + ) as unknown as OpenAIGlobal["setWidgetState"]; const transport = new OpenAITransport(); let response: unknown; @@ -770,7 +843,8 @@ describe("OpenAITransport", () => { }; // Base64 encoded minimal PNG (1x1 pixel) - const pngData = "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="; + const pngData = + "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="; await transport.send({ jsonrpc: "2.0", @@ -808,12 +882,15 @@ describe("OpenAITransport", () => { mockOpenAI.uploadFile = mock(() => Promise.resolve({ fileId: "new_file" }), ) as unknown as OpenAIGlobal["uploadFile"]; - mockOpenAI.setWidgetState = mock(() => {}) as unknown as OpenAIGlobal["setWidgetState"]; + mockOpenAI.setWidgetState = mock( + () => {}, + ) as unknown as OpenAIGlobal["setWidgetState"]; const transport = new OpenAITransport(); transport.onmessage = () => {}; - const pngData = "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="; + const pngData = + "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="; await transport.send({ jsonrpc: "2.0", diff --git a/src/openai/transport.ts b/src/openai/transport.ts index 89640fa6..d5cb9089 100644 --- a/src/openai/transport.ts +++ b/src/openai/transport.ts @@ -17,9 +17,35 @@ import { Transport, TransportSendOptions, } from "@modelcontextprotocol/sdk/shared/transport.js"; -import { OpenAIGlobal, getOpenAIGlobal, isOpenAIEnvironment } from "./types.js"; +import { + OpenAIGlobal, + OpenAISafeArea, + OpenAIDisplayMode, + OpenAITheme, + getOpenAIGlobal, + isOpenAIEnvironment, +} from "./types.js"; import { LATEST_PROTOCOL_VERSION, McpUiHostContext } from "../spec.types.js"; +/** + * Event name dispatched by ChatGPT host when window.openai properties change. + * @see https://developers.openai.com/apps-sdk/build/chatgpt-ui/ + */ +const SET_GLOBALS_EVENT_TYPE = "openai:set_globals"; + +/** + * Partial update payload from the openai:set_globals event. + * Contains only the properties that changed. + */ +interface OpenAIGlobalsChangeDetail { + theme?: OpenAITheme; + displayMode?: OpenAIDisplayMode; + maxHeight?: number; + safeArea?: OpenAISafeArea; + locale?: string; + userAgent?: string; +} + /** * JSON-RPC success response message. * @internal @@ -96,6 +122,7 @@ function isNotification( export class OpenAITransport implements Transport { private openai: OpenAIGlobal; private _closed = false; + private _globalsChangeHandler?: (event: Event) => void; /** * Create a new OpenAITransport. @@ -124,11 +151,78 @@ export class OpenAITransport implements Transport { /** * Begin listening for messages. * - * In OpenAI mode, there's no event-based message flow to start. - * The data is pre-populated in window.openai properties. + * Sets up a listener for the `openai:set_globals` event that ChatGPT + * dispatches when host context properties change (theme, displayMode, + * safeArea, maxHeight, etc.). Changes are forwarded to the app as + * `ui/notifications/host-context-changed` notifications. */ async start(): Promise { - // Nothing to do - window.openai is already available and populated + // Listen for OpenAI global property changes + this._globalsChangeHandler = (event: Event) => { + if (this._closed) return; + + const detail = (event as CustomEvent).detail; + if (!detail) return; + + const changes: Partial = {}; + let hasChanges = false; + + // Map OpenAI properties to MCP host context + if (detail.safeArea !== undefined) { + const sa = detail.safeArea; + if ( + typeof sa.top === "number" && + typeof sa.right === "number" && + typeof sa.bottom === "number" && + typeof sa.left === "number" + ) { + changes.safeAreaInsets = sa; + hasChanges = true; + } + } + + if (detail.theme !== undefined) { + changes.theme = detail.theme; + hasChanges = true; + } + + if (detail.displayMode !== undefined) { + changes.displayMode = detail.displayMode; + hasChanges = true; + } + + if (detail.maxHeight !== undefined) { + changes.containerDimensions = { maxHeight: detail.maxHeight }; + hasChanges = true; + } + + if (detail.locale !== undefined) { + changes.locale = detail.locale; + hasChanges = true; + } + + if (detail.userAgent !== undefined) { + changes.userAgent = detail.userAgent; + hasChanges = true; + } + + // Forward changes as host-context-changed notification + if (hasChanges) { + queueMicrotask(() => { + this.onmessage?.({ + jsonrpc: "2.0", + method: "ui/notifications/host-context-changed", + params: changes, + } as JSONRPCNotification); + }); + } + }; + + window.addEventListener( + SET_GLOBALS_EVENT_TYPE, + this._globalsChangeHandler, + { passive: true }, + ); } /** @@ -406,9 +500,13 @@ export class OpenAITransport implements Transport { bytes[i] = binaryString.charCodeAt(i); } const blob = new Blob([bytes], { type: image.mimeType }); - const file = new File([blob], `image.${this.getExtension(image.mimeType)}`, { - type: image.mimeType, - }); + const file = new File( + [blob], + `image.${this.getExtension(image.mimeType)}`, + { + type: image.mimeType, + }, + ); const result = await this.openai.uploadFile(file); imageIds.push(result.fileId); @@ -424,7 +522,7 @@ export class OpenAITransport implements Transport { const existingImageIds = Array.isArray( (currentState as { imageIds?: unknown }).imageIds, ) - ? ((currentState as { imageIds: string[] }).imageIds) + ? (currentState as { imageIds: string[] }).imageIds : []; this.openai.setWidgetState({ @@ -779,6 +877,16 @@ export class OpenAITransport implements Transport { */ async close(): Promise { this._closed = true; + + // Clean up the globals change listener + if (this._globalsChangeHandler) { + window.removeEventListener( + SET_GLOBALS_EVENT_TYPE, + this._globalsChangeHandler, + ); + this._globalsChangeHandler = undefined; + } + this.onclose?.(); } From 7b4f76d2113cbc5dc76ea584b74498ac7f94ed9b Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Wed, 14 Jan 2026 23:03:04 +0000 Subject: [PATCH 25/30] feat(examples): Add debug-server for comprehensive SDK testing Add a debug-server example that exercises all MCP Apps SDK capabilities: Server (server.ts): - debug-tool: Configurable tool testing all content types (text, image, audio, resource, resourceLink, mixed), with options for multiple blocks, structuredContent, _meta, error simulation, and delays - debug-refresh: App-only tool (hidden from model) for polling server state Guest UI (src/mcp-app.ts): - Event Log: Real-time log of all SDK events with filtering and timestamps - Host Info: Display of context, capabilities, container dimensions, styles - Callback Status: Table showing all registered callbacks with call counts - Action Buttons: Test every SDK method: - Messages (text and image) - Logging (debug/info/warning/error) - Model context updates (text and structured) - Display mode requests (inline/fullscreen/pip) - Link opening - Manual/auto resize controls - Server tool calls with full configuration - File upload and URL retrieval This example serves as both a testing tool and reference implementation for all SDK features. --- examples/debug-server/mcp-app.html | 225 ++++++++++ examples/debug-server/package.json | 43 ++ examples/debug-server/server-utils.ts | 72 ++++ examples/debug-server/server.ts | 219 ++++++++++ examples/debug-server/src/global.css | 33 ++ examples/debug-server/src/mcp-app.css | 332 +++++++++++++++ examples/debug-server/src/mcp-app.ts | 585 ++++++++++++++++++++++++++ examples/debug-server/tsconfig.json | 19 + examples/debug-server/vite.config.ts | 24 ++ 9 files changed, 1552 insertions(+) create mode 100644 examples/debug-server/mcp-app.html create mode 100644 examples/debug-server/package.json create mode 100644 examples/debug-server/server-utils.ts create mode 100644 examples/debug-server/server.ts create mode 100644 examples/debug-server/src/global.css create mode 100644 examples/debug-server/src/mcp-app.css create mode 100644 examples/debug-server/src/mcp-app.ts create mode 100644 examples/debug-server/tsconfig.json create mode 100644 examples/debug-server/vite.config.ts diff --git a/examples/debug-server/mcp-app.html b/examples/debug-server/mcp-app.html new file mode 100644 index 00000000..6a5ca667 --- /dev/null +++ b/examples/debug-server/mcp-app.html @@ -0,0 +1,225 @@ + + + + + + + Debug App + + +
+ +
+

+ Event Log +
+ + +
+

+
+
+ + +
+

+ Host Info + +

+
+
+
+

Context

+
+
+
+

Capabilities

+
+
+
+

Container

+
+
+
+

Styles Sample

+
+
+
+
+
+ + +
+

+ Callback Status + +

+
+ + + + + + + + + + +
CallbackRegisteredCountLast Payload
+
+
+ + +
+

+ Actions + +

+
+ +
+

Messages

+
+ + +
+
+ +
+
+ + +
+

Logging

+
+ +
+
+ + + + +
+
+ + +
+

Model Context

+
+ + +
+
+ +
+
+ + +
+

Display Mode

+
+ + + +
+
+ + +
+

Links

+
+ + +
+
+ + +
+

Size

+
+ +
+
+ + + +
+
+ Current: measuring... +
+
+ + +
+

Server Tools

+
+
+ + +
+
+ +
+
+ +
+
+ +
+
+ +
+
+ + +
+
+
+ +
+
+ +
+
+ + +
+

Files

+
+ +
+
+ +
+
+ Last fileId: none +
+
+ +
+
+
+
+
+ + + diff --git a/examples/debug-server/package.json b/examples/debug-server/package.json new file mode 100644 index 00000000..c2d85c3a --- /dev/null +++ b/examples/debug-server/package.json @@ -0,0 +1,43 @@ +{ + "name": "@modelcontextprotocol/server-debug", + "version": "0.4.0", + "type": "module", + "description": "Debug MCP App Server for testing all SDK capabilities", + "repository": { + "type": "git", + "url": "https://github.com/modelcontextprotocol/ext-apps", + "directory": "examples/debug-server" + }, + "license": "MIT", + "main": "server.ts", + "files": [ + "server.ts", + "server-utils.ts", + "dist" + ], + "scripts": { + "build": "tsc --noEmit && cross-env INPUT=mcp-app.html vite build", + "watch": "cross-env INPUT=mcp-app.html vite build --watch", + "serve": "bun --watch server.ts", + "start": "cross-env NODE_ENV=development npm run build && npm run serve", + "dev": "cross-env NODE_ENV=development concurrently 'npm run watch' 'npm run serve'", + "prepublishOnly": "npm run build" + }, + "dependencies": { + "@modelcontextprotocol/ext-apps": "^0.4.0", + "@modelcontextprotocol/sdk": "^1.24.0", + "zod": "^4.1.13" + }, + "devDependencies": { + "@types/cors": "^2.8.19", + "@types/express": "^5.0.0", + "@types/node": "^22.0.0", + "concurrently": "^9.2.1", + "cors": "^2.8.5", + "cross-env": "^10.1.0", + "express": "^5.1.0", + "typescript": "^5.9.3", + "vite": "^6.0.0", + "vite-plugin-singlefile": "^2.3.0" + } +} diff --git a/examples/debug-server/server-utils.ts b/examples/debug-server/server-utils.ts new file mode 100644 index 00000000..9fe9745a --- /dev/null +++ b/examples/debug-server/server-utils.ts @@ -0,0 +1,72 @@ +/** + * Shared utilities for running MCP servers with Streamable HTTP transport. + */ + +import { createMcpExpressApp } from "@modelcontextprotocol/sdk/server/express.js"; +import type { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import { StreamableHTTPServerTransport } from "@modelcontextprotocol/sdk/server/streamableHttp.js"; +import cors from "cors"; +import type { Request, Response } from "express"; + +export interface ServerOptions { + port: number; + name?: string; +} + +/** + * Starts an MCP server with Streamable HTTP transport in stateless mode. + * + * @param createServer - Factory function that creates a new McpServer instance per request. + * @param options - Server configuration options. + */ +export async function startServer( + createServer: () => McpServer, + options: ServerOptions, +): Promise { + const { port, name = "MCP Server" } = options; + + const app = createMcpExpressApp({ host: "0.0.0.0" }); + app.use(cors()); + + app.all("/mcp", async (req: Request, res: Response) => { + const server = createServer(); + const transport = new StreamableHTTPServerTransport({ + sessionIdGenerator: undefined, + }); + + res.on("close", () => { + transport.close().catch(() => {}); + server.close().catch(() => {}); + }); + + try { + await server.connect(transport); + await transport.handleRequest(req, res, req.body); + } catch (error) { + console.error("MCP error:", error); + if (!res.headersSent) { + res.status(500).json({ + jsonrpc: "2.0", + error: { code: -32603, message: "Internal server error" }, + id: null, + }); + } + } + }); + + const httpServer = app.listen(port, (err) => { + if (err) { + console.error("Failed to start server:", err); + process.exit(1); + } + console.log(`${name} listening on http://localhost:${port}/mcp`); + }); + + const shutdown = () => { + console.log("\nShutting down..."); + httpServer.close(() => process.exit(0)); + }; + + process.on("SIGINT", shutdown); + process.on("SIGTERM", shutdown); +} diff --git a/examples/debug-server/server.ts b/examples/debug-server/server.ts new file mode 100644 index 00000000..9a67fd46 --- /dev/null +++ b/examples/debug-server/server.ts @@ -0,0 +1,219 @@ +import { registerAppResource, registerAppTool, RESOURCE_MIME_TYPE } from "@modelcontextprotocol/ext-apps/server"; +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; +import type { CallToolResult, ReadResourceResult } from "@modelcontextprotocol/sdk/types.js"; +import fs from "node:fs/promises"; +import path from "node:path"; +import { z } from "zod"; +import { startServer } from "./server-utils.js"; + +const DIST_DIR = path.join(import.meta.dirname, "dist"); + +// Track call counter across requests (stateful for demo purposes) +let callCounter = 0; + +// Minimal 1x1 blue PNG (base64) +const BLUE_PNG_1X1 = "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPj/HwADBwIAMCbHYQAAAABJRU5ErkJggg=="; + +// Minimal silent WAV (base64) - 44 byte header + 1 sample +const SILENT_WAV = "UklGRiYAAABXQVZFZm10IBAAAAABAAEARKwAAIhYAQACABAAZGF0YQIAAAAAAA=="; + +/** + * Input schema for the debug-tool + */ +const DebugInputSchema = z.object({ + // Content configuration + contentType: z.enum(["text", "image", "audio", "resource", "resourceLink", "mixed"]).default("text"), + multipleBlocks: z.boolean().default(false), + includeStructuredContent: z.boolean().default(true), + includeMeta: z.boolean().default(false), + + // Streaming test (large input) + largeInput: z.string().optional(), + + // Error/delay simulation + simulateError: z.boolean().default(false), + delayMs: z.number().optional(), +}); + +type DebugInput = z.infer; + +/** + * Output schema for structured content + */ +const DebugOutputSchema = z.object({ + config: z.record(z.string(), z.unknown()), + timestamp: z.string(), + counter: z.number(), + largeInputLength: z.number().optional(), +}); + +/** + * Builds content blocks based on configuration + */ +function buildContent(args: DebugInput): CallToolResult["content"] { + const count = args.multipleBlocks ? 3 : 1; + const content: CallToolResult["content"] = []; + + for (let i = 0; i < count; i++) { + const suffix = args.multipleBlocks ? ` #${i + 1}` : ""; + + switch (args.contentType) { + case "text": + content.push({ type: "text", text: `Debug text content${suffix}` }); + break; + case "image": + content.push({ type: "image", data: BLUE_PNG_1X1, mimeType: "image/png" }); + break; + case "audio": + content.push({ type: "audio", data: SILENT_WAV, mimeType: "audio/wav" }); + break; + case "resource": + content.push({ + type: "resource", + resource: { + uri: `debug://embedded-resource${suffix.replace(/\s/g, "-")}`, + text: `Embedded resource content${suffix}`, + mimeType: "text/plain", + }, + }); + break; + case "resourceLink": + content.push({ + type: "resource_link", + uri: `debug://linked-resource${suffix.replace(/\s/g, "-")}`, + name: `Linked Resource${suffix}`, + mimeType: "text/plain", + }); + break; + case "mixed": + // Return one of each type (ignore multipleBlocks for mixed) + return [ + { type: "text", text: "Mixed content: text block" }, + { type: "image", data: BLUE_PNG_1X1, mimeType: "image/png" }, + { type: "audio", data: SILENT_WAV, mimeType: "audio/wav" }, + ]; + } + } + + return content; +} + +/** + * Creates a new MCP server instance with debug tools registered. + */ +export function createServer(): McpServer { + const server = new McpServer({ + name: "Debug MCP App Server", + version: "1.0.0", + }); + + const resourceUri = "ui://debug-tool/mcp-app.html"; + + // Main debug tool - exercises all result variations + registerAppTool(server, + "debug-tool", + { + title: "Debug Tool", + description: "Comprehensive debug tool for testing MCP Apps SDK. Configure content types, error simulation, delays, and more.", + inputSchema: DebugInputSchema, + outputSchema: DebugOutputSchema, + _meta: { ui: { resourceUri } }, + }, + async (args): Promise => { + // Apply delay if requested + if (args.delayMs && args.delayMs > 0) { + await new Promise(resolve => setTimeout(resolve, args.delayMs)); + } + + // Build content based on config + const content = buildContent(args); + + // Build result + const result: CallToolResult = { content }; + + // Add structured content if requested + if (args.includeStructuredContent) { + result.structuredContent = { + config: args, + timestamp: new Date().toISOString(), + counter: ++callCounter, + ...(args.largeInput ? { largeInputLength: args.largeInput.length } : {}), + }; + } + + // Add _meta if requested + if (args.includeMeta) { + result._meta = { + debugInfo: { + processedAt: Date.now(), + serverVersion: "1.0.0", + }, + }; + } + + // Set error flag if requested + if (args.simulateError) { + result.isError = true; + } + + return result; + }, + ); + + // App-only refresh tool (hidden from model) + registerAppTool(server, + "debug-refresh", + { + title: "Refresh Debug Info", + description: "App-only tool for polling server state. Not visible to the model.", + inputSchema: z.object({}), + outputSchema: z.object({ timestamp: z.string(), counter: z.number() }), + _meta: { + ui: { + resourceUri, + visibility: ["app"], + }, + }, + }, + async (): Promise => { + const timestamp = new Date().toISOString(); + return { + content: [{ type: "text", text: `Server timestamp: ${timestamp}` }], + structuredContent: { timestamp, counter: callCounter }, + }; + }, + ); + + // Register the resource which returns the bundled HTML/JavaScript for the UI + registerAppResource(server, + resourceUri, + resourceUri, + { mimeType: RESOURCE_MIME_TYPE }, + async (): Promise => { + const html = await fs.readFile(path.join(DIST_DIR, "mcp-app.html"), "utf-8"); + + return { + contents: [ + { uri: resourceUri, mimeType: RESOURCE_MIME_TYPE, text: html }, + ], + }; + }, + ); + + return server; +} + +async function main() { + if (process.argv.includes("--stdio")) { + await createServer().connect(new StdioServerTransport()); + } else { + const port = parseInt(process.env.PORT ?? "3102", 10); + await startServer(createServer, { port, name: "Debug MCP App Server" }); + } +} + +main().catch((e) => { + console.error(e); + process.exit(1); +}); diff --git a/examples/debug-server/src/global.css b/examples/debug-server/src/global.css new file mode 100644 index 00000000..18863262 --- /dev/null +++ b/examples/debug-server/src/global.css @@ -0,0 +1,33 @@ +* { + box-sizing: border-box; +} + +html, body { + font-family: system-ui, -apple-system, sans-serif; + font-size: 14px; + margin: 0; + padding: 0; +} + +code { + font-family: ui-monospace, "SF Mono", Monaco, "Cascadia Code", Consolas, monospace; + font-size: 0.9em; + background: rgba(0, 0, 0, 0.05); + padding: 0.1em 0.3em; + border-radius: 3px; +} + +@media (prefers-color-scheme: dark) { + code { + background: rgba(255, 255, 255, 0.1); + } +} + +button { + cursor: pointer; +} + +input, select, button { + font-family: inherit; + font-size: inherit; +} diff --git a/examples/debug-server/src/mcp-app.css b/examples/debug-server/src/mcp-app.css new file mode 100644 index 00000000..c9d4509c --- /dev/null +++ b/examples/debug-server/src/mcp-app.css @@ -0,0 +1,332 @@ +.main { + --color-primary: #2563eb; + --color-primary-hover: #1d4ed8; + --color-success: #16a34a; + --color-warning: #ca8a04; + --color-error: #dc2626; + --color-border: #e5e7eb; + --color-bg-subtle: #f9fafb; + + width: 100%; + max-width: 800px; + padding: 1rem; + margin: 0 auto; +} + +@media (prefers-color-scheme: dark) { + .main { + --color-border: #374151; + --color-bg-subtle: #1f2937; + } +} + +/* Section styling */ +.section { + border: 1px solid var(--color-border); + border-radius: 8px; + margin-bottom: 1rem; + overflow: hidden; +} + +.section-header { + display: flex; + justify-content: space-between; + align-items: center; + padding: 0.75rem 1rem; + margin: 0; + font-size: 1rem; + font-weight: 600; + background: var(--color-bg-subtle); + border-bottom: 1px solid var(--color-border); +} + +.collapsible .section-header { + cursor: pointer; + user-select: none; +} + +.collapsible .section-header:hover { + background: var(--color-border); +} + +.toggle-icon { + transition: transform 0.2s; +} + +.collapsed .toggle-icon { + transform: rotate(-90deg); +} + +.collapsed .section-content { + display: none; +} + +.section-content { + padding: 1rem; +} + +.header-actions { + display: flex; + gap: 0.5rem; + align-items: center; +} + +/* Event log */ +.event-log { + max-height: 200px; + overflow-y: auto; + padding: 0.5rem; + font-family: ui-monospace, monospace; + font-size: 0.85rem; + background: var(--color-bg-subtle); +} + +.log-entry { + padding: 0.25rem 0; + border-bottom: 1px solid var(--color-border); +} + +.log-entry:last-child { + border-bottom: none; +} + +.log-time { + color: #6b7280; + margin-right: 0.5rem; +} + +.log-type { + font-weight: 600; + margin-right: 0.5rem; +} + +.log-type.tool-input { color: var(--color-primary); } +.log-type.tool-input-partial { color: #8b5cf6; } +.log-type.tool-result { color: var(--color-success); } +.log-type.tool-cancelled { color: var(--color-warning); } +.log-type.widget-state { color: #0891b2; } +.log-type.host-context-changed { color: #7c3aed; } +.log-type.teardown { color: #f97316; } +.log-type.call-tool { color: #ec4899; } +.log-type.list-tools { color: #14b8a6; } +.log-type.error { color: var(--color-error); } + +.log-payload { + color: #6b7280; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + max-width: 400px; + display: inline-block; + vertical-align: bottom; + cursor: pointer; +} + +.log-payload:hover { + white-space: normal; + word-break: break-all; +} + +/* Info grid */ +.info-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(180px, 1fr)); + gap: 1rem; +} + +.info-group h3 { + margin: 0 0 0.5rem 0; + font-size: 0.9rem; + color: #6b7280; +} + +.info-group dl { + margin: 0; + font-size: 0.85rem; +} + +.info-group dt { + font-weight: 600; + color: #374151; +} + +.info-group dd { + margin: 0 0 0.5rem 0; + color: #6b7280; +} + +@media (prefers-color-scheme: dark) { + .info-group dt { color: #d1d5db; } + .info-group dd { color: #9ca3af; } +} + +.styles-sample { + display: flex; + flex-wrap: wrap; + gap: 0.5rem; +} + +.style-swatch { + width: 24px; + height: 24px; + border-radius: 4px; + border: 1px solid var(--color-border); + display: flex; + align-items: center; + justify-content: center; + font-size: 10px; + color: white; + text-shadow: 0 0 2px black; +} + +/* Callback table */ +.callback-table { + width: 100%; + border-collapse: collapse; + font-size: 0.85rem; +} + +.callback-table th, +.callback-table td { + padding: 0.5rem; + text-align: left; + border-bottom: 1px solid var(--color-border); +} + +.callback-table th { + background: var(--color-bg-subtle); + font-weight: 600; +} + +.callback-table .registered-yes { + color: var(--color-success); +} + +.callback-table .registered-no { + color: #9ca3af; +} + +.callback-table .payload-preview { + max-width: 200px; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + cursor: pointer; + font-family: ui-monospace, monospace; + font-size: 0.8rem; +} + +.callback-table .payload-preview:hover { + white-space: normal; + word-break: break-all; +} + +/* Action groups */ +.action-group { + margin-bottom: 1.5rem; +} + +.action-group:last-child { + margin-bottom: 0; +} + +.action-group h3 { + margin: 0 0 0.75rem 0; + font-size: 0.9rem; + font-weight: 600; + color: #374151; + border-bottom: 1px solid var(--color-border); + padding-bottom: 0.25rem; +} + +@media (prefers-color-scheme: dark) { + .action-group h3 { color: #d1d5db; } +} + +.action-row { + display: flex; + gap: 0.5rem; + align-items: center; + margin-bottom: 0.5rem; +} + +.action-row:last-child { + margin-bottom: 0; +} + +.action-row input[type="text"], +.action-row input[type="url"], +.action-row input[type="number"] { + flex: 1; + padding: 0.5rem; + border: 1px solid var(--color-border); + border-radius: 4px; + background: transparent; +} + +.action-row input[type="file"] { + flex: 1; +} + +.action-row button { + padding: 0.5rem 1rem; + border: none; + border-radius: 4px; + background: var(--color-primary); + color: white; + font-weight: 500; +} + +.action-row button:hover { + background: var(--color-primary-hover); +} + +.btn-row { + flex-wrap: wrap; +} + +.btn-small { + padding: 0.375rem 0.75rem !important; + font-size: 0.85rem; +} + +/* Tool config */ +.tool-config { + background: var(--color-bg-subtle); + padding: 0.75rem; + border-radius: 4px; + margin-bottom: 0.75rem; +} + +.config-row { + display: flex; + align-items: center; + gap: 0.5rem; + margin-bottom: 0.5rem; +} + +.config-row:last-child { + margin-bottom: 0; +} + +.config-row label { + min-width: 120px; + font-size: 0.85rem; +} + +.config-row select, +.config-row input[type="number"] { + padding: 0.25rem 0.5rem; + border: 1px solid var(--color-border); + border-radius: 4px; + background: transparent; +} + +/* Filter select */ +#log-filter { + padding: 0.25rem 0.5rem; + border: 1px solid var(--color-border); + border-radius: 4px; + background: transparent; + font-size: 0.85rem; +} diff --git a/examples/debug-server/src/mcp-app.ts b/examples/debug-server/src/mcp-app.ts new file mode 100644 index 00000000..1d0fbd77 --- /dev/null +++ b/examples/debug-server/src/mcp-app.ts @@ -0,0 +1,585 @@ +/** + * @file Debug App - Comprehensive testing/debugging tool for the MCP Apps SDK. + * + * This app exercises every capability, callback, and result format combination. + */ +import { App, type McpUiHostContext } from "@modelcontextprotocol/ext-apps"; +import "./global.css"; +import "./mcp-app.css"; + +// ============================================================================ +// Types +// ============================================================================ + +interface LogEntry { + time: number; + type: string; + payload: unknown; +} + +interface AppState { + eventLog: LogEntry[]; + callbackCounts: Map; + lastPayloads: Map; + uploadedFileId: string | null; + autoResizeCleanup: (() => void) | null; + logFilter: string; +} + +// ============================================================================ +// State +// ============================================================================ + +const state: AppState = { + eventLog: [], + callbackCounts: new Map(), + lastPayloads: new Map(), + uploadedFileId: null, + autoResizeCleanup: null, + logFilter: "all", +}; + +// Callbacks we track +const CALLBACKS = [ + "ontoolinput", + "ontoolinputpartial", + "ontoolresult", + "ontoolcancelled", + "onwidgetstate", + "onhostcontextchanged", + "onteardown", + "oncalltool", + "onlisttools", + "onerror", +] as const; + +// ============================================================================ +// DOM Elements +// ============================================================================ + +const mainEl = document.querySelector(".main") as HTMLElement; +const eventLogEl = document.getElementById("event-log")!; +const logFilterEl = document.getElementById("log-filter") as HTMLSelectElement; +const clearLogBtn = document.getElementById("clear-log-btn")!; + +// Host info +const hostContextInfoEl = document.getElementById("host-context-info")!; +const hostCapabilitiesInfoEl = document.getElementById("host-capabilities-info")!; +const hostContainerInfoEl = document.getElementById("host-container-info")!; +const hostStylesSampleEl = document.getElementById("host-styles-sample")!; + +// Callback status +const callbackTableBodyEl = document.getElementById("callback-table-body")!; + +// Action elements +const messageTextEl = document.getElementById("message-text") as HTMLInputElement; +const sendMessageTextBtn = document.getElementById("send-message-text-btn")!; +const sendMessageImageBtn = document.getElementById("send-message-image-btn")!; + +const logDataEl = document.getElementById("log-data") as HTMLInputElement; +const logDebugBtn = document.getElementById("log-debug-btn")!; +const logInfoBtn = document.getElementById("log-info-btn")!; +const logWarningBtn = document.getElementById("log-warning-btn")!; +const logErrorBtn = document.getElementById("log-error-btn")!; + +const contextTextEl = document.getElementById("context-text") as HTMLInputElement; +const updateContextTextBtn = document.getElementById("update-context-text-btn")!; +const updateContextStructuredBtn = document.getElementById("update-context-structured-btn")!; + +const displayInlineBtn = document.getElementById("display-inline-btn")!; +const displayFullscreenBtn = document.getElementById("display-fullscreen-btn")!; +const displayPipBtn = document.getElementById("display-pip-btn")!; + +const linkUrlEl = document.getElementById("link-url") as HTMLInputElement; +const openLinkBtn = document.getElementById("open-link-btn")!; + +const autoResizeToggleEl = document.getElementById("auto-resize-toggle") as HTMLInputElement; +const resize200x100Btn = document.getElementById("resize-200x100-btn")!; +const resize400x300Btn = document.getElementById("resize-400x300-btn")!; +const resize800x600Btn = document.getElementById("resize-800x600-btn")!; +const currentSizeEl = document.getElementById("current-size")!; + +// Tool config elements +const toolContentTypeEl = document.getElementById("tool-content-type") as HTMLSelectElement; +const toolMultipleBlocksEl = document.getElementById("tool-multiple-blocks") as HTMLInputElement; +const toolStructuredContentEl = document.getElementById("tool-structured-content") as HTMLInputElement; +const toolIncludeMetaEl = document.getElementById("tool-include-meta") as HTMLInputElement; +const toolSimulateErrorEl = document.getElementById("tool-simulate-error") as HTMLInputElement; +const toolDelayMsEl = document.getElementById("tool-delay-ms") as HTMLInputElement; +const callDebugToolBtn = document.getElementById("call-debug-tool-btn")!; +const callDebugRefreshBtn = document.getElementById("call-debug-refresh-btn")!; + +// File elements +const fileInputEl = document.getElementById("file-input") as HTMLInputElement; +const uploadFileBtn = document.getElementById("upload-file-btn")!; +const lastFileIdEl = document.getElementById("last-file-id")!; +const getFileUrlBtn = document.getElementById("get-file-url-btn")!; + +// ============================================================================ +// Utility Functions +// ============================================================================ + +function formatTime(timestamp: number): string { + const date = new Date(timestamp); + const h = date.getHours().toString().padStart(2, "0"); + const m = date.getMinutes().toString().padStart(2, "0"); + const s = date.getSeconds().toString().padStart(2, "0"); + const ms = date.getMilliseconds().toString().padStart(3, "0"); + return `${h}:${m}:${s}.${ms}`; +} + +function truncatePayload(payload: unknown): string { + const str = JSON.stringify(payload); + if (str.length > 100) { + return str.slice(0, 100) + "..."; + } + return str; +} + +// ============================================================================ +// Rendering Functions +// ============================================================================ + +function renderEventLog(): void { + const filtered = state.logFilter === "all" + ? state.eventLog + : state.eventLog.filter(e => e.type === state.logFilter); + + eventLogEl.innerHTML = filtered.map(entry => ` +
+ [${formatTime(entry.time)}] + ${entry.type}: + ${truncatePayload(entry.payload)} +
+ `).join(""); + + // Auto-scroll to bottom + eventLogEl.scrollTop = eventLogEl.scrollHeight; +} + +function renderCallbackStatus(): void { + callbackTableBodyEl.innerHTML = CALLBACKS.map(name => { + const count = state.callbackCounts.get(name) ?? 0; + const lastPayload = state.lastPayloads.get(name); + const registered = name !== "onerror"; // All callbacks are registered + + return ` + + ${name} + ${registered ? "✓" : "✗"} + ${count} + ${lastPayload ? truncatePayload(lastPayload) : "-"} + + `; + }).join(""); +} + +function renderHostInfo(): void { + const ctx = app.getHostContext(); + const caps = app.getHostCapabilities(); + const version = app.getHostVersion(); + + // Context info + if (ctx) { + hostContextInfoEl.innerHTML = ` +
Theme
${ctx.theme ?? "unknown"}
+
Locale
${ctx.locale ?? "unknown"}
+
TimeZone
${ctx.timeZone ?? "unknown"}
+
Platform
${ctx.platform ?? "unknown"}
+
Display Mode
${ctx.displayMode ?? "unknown"}
+
Host
${version?.name ?? "unknown"} v${version?.version ?? "?"}
+ `; + } else { + hostContextInfoEl.innerHTML = "
No context available
"; + } + + // Capabilities + if (caps) { + hostCapabilitiesInfoEl.innerHTML = ` +
openLinks
${caps.openLinks ? "✓" : "✗"}
+
serverTools
${caps.serverTools ? "✓" : "✗"}
+
serverResources
${caps.serverResources ? "✓" : "✗"}
+
logging
${caps.logging ? "✓" : "✗"}
+
message
${caps.message ? "✓" : "✗"}
+
updateModelContext
${caps.updateModelContext ? "✓" : "✗"}
+ `; + } else { + hostCapabilitiesInfoEl.innerHTML = "
No capabilities available
"; + } + + // Container info + if (ctx?.containerDimensions) { + const dims = ctx.containerDimensions; + hostContainerInfoEl.innerHTML = ` +
Width
${"width" in dims ? dims.width + "px" : `max ${dims.maxWidth ?? "?"}px`}
+
Height
${"height" in dims ? dims.height + "px" : `max ${dims.maxHeight ?? "?"}px`}
+
Safe Area
${ctx.safeAreaInsets ? `T${ctx.safeAreaInsets.top} R${ctx.safeAreaInsets.right} B${ctx.safeAreaInsets.bottom} L${ctx.safeAreaInsets.left}` : "none"}
+ `; + } else { + hostContainerInfoEl.innerHTML = "
No container info
"; + } + + // Styles sample + if (ctx?.styles) { + const styleVars = Object.entries(ctx.styles).slice(0, 6); + hostStylesSampleEl.innerHTML = styleVars.map(([key, value]) => { + const color = String(value); + return `
`; + }).join(""); + } else { + hostStylesSampleEl.innerHTML = "No styles"; + } +} + +function updateCurrentSize(): void { + const w = document.documentElement.scrollWidth; + const h = document.documentElement.scrollHeight; + currentSizeEl.textContent = `${w}x${h}`; +} + +// ============================================================================ +// Event Logging +// ============================================================================ + +function logEvent(type: string, payload: unknown): void { + const count = (state.callbackCounts.get(type) ?? 0) + 1; + state.callbackCounts.set(type, count); + state.lastPayloads.set(type, payload); + state.eventLog.push({ time: Date.now(), type, payload }); + + // Keep log manageable (max 100 entries) + if (state.eventLog.length > 100) { + state.eventLog.shift(); + } + + renderEventLog(); + renderCallbackStatus(); +} + +// ============================================================================ +// Safe Area Handling +// ============================================================================ + +function handleHostContextChanged(ctx: McpUiHostContext): void { + if (ctx.safeAreaInsets) { + mainEl.style.paddingTop = `${ctx.safeAreaInsets.top}px`; + mainEl.style.paddingRight = `${ctx.safeAreaInsets.right}px`; + mainEl.style.paddingBottom = `${ctx.safeAreaInsets.bottom}px`; + mainEl.style.paddingLeft = `${ctx.safeAreaInsets.left}px`; + } + renderHostInfo(); +} + +// ============================================================================ +// App Instance & Callbacks +// ============================================================================ + +const app = new App( + { name: "Debug App", version: "1.0.0" }, + {}, // capabilities + { autoResize: false }, // We'll manage auto-resize ourselves for toggle demo +); + +// Register ALL callbacks BEFORE connecting +app.ontoolinput = (params) => { + logEvent("tool-input", params); +}; + +app.ontoolinputpartial = (params) => { + logEvent("tool-input-partial", params); +}; + +app.ontoolresult = (result) => { + logEvent("tool-result", result); +}; + +app.ontoolcancelled = (params) => { + logEvent("tool-cancelled", params); +}; + +app.onwidgetstate = (params) => { + logEvent("widget-state", params); +}; + +app.onhostcontextchanged = (ctx) => { + logEvent("host-context-changed", ctx); + handleHostContextChanged(ctx); +}; + +app.onteardown = async (params) => { + logEvent("teardown", params); + return {}; +}; + +app.oncalltool = async (params) => { + logEvent("call-tool", params); + return { + content: [{ type: "text", text: "App handled tool call" }], + }; +}; + +app.onlisttools = async (params) => { + logEvent("list-tools", params); + return { tools: [] }; +}; + +app.onerror = (error) => { + logEvent("error", error); +}; + +// ============================================================================ +// Section Collapsing +// ============================================================================ + +document.querySelectorAll(".section-header[data-toggle]").forEach(header => { + header.addEventListener("click", () => { + const section = header.closest(".section"); + section?.classList.toggle("collapsed"); + }); +}); + +// ============================================================================ +// Event Log Controls +// ============================================================================ + +logFilterEl.addEventListener("change", () => { + state.logFilter = logFilterEl.value; + renderEventLog(); +}); + +clearLogBtn.addEventListener("click", () => { + state.eventLog = []; + renderEventLog(); +}); + +// ============================================================================ +// Message Actions +// ============================================================================ + +sendMessageTextBtn.addEventListener("click", async () => { + try { + const result = await app.sendMessage({ + role: "user", + content: [{ type: "text", text: messageTextEl.value }], + }); + logEvent("send-message-result", result); + } catch (e) { + logEvent("error", e); + } +}); + +sendMessageImageBtn.addEventListener("click", async () => { + // 1x1 red PNG for testing + const redPng = "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mP8z8DwHwAFBQIAX8jx0gAAAABJRU5ErkJggg=="; + try { + const result = await app.sendMessage({ + role: "user", + content: [{ type: "image", data: redPng, mimeType: "image/png" }], + }); + logEvent("send-message-result", result); + } catch (e) { + logEvent("error", e); + } +}); + +// ============================================================================ +// Logging Actions +// ============================================================================ + +function sendLog(level: "debug" | "info" | "warning" | "error"): void { + app.sendLog({ level, data: logDataEl.value }); + logEvent("send-log", { level, data: logDataEl.value }); +} + +logDebugBtn.addEventListener("click", () => sendLog("debug")); +logInfoBtn.addEventListener("click", () => sendLog("info")); +logWarningBtn.addEventListener("click", () => sendLog("warning")); +logErrorBtn.addEventListener("click", () => sendLog("error")); + +// ============================================================================ +// Model Context Actions +// ============================================================================ + +updateContextTextBtn.addEventListener("click", async () => { + try { + await app.updateModelContext({ + content: [{ type: "text", text: contextTextEl.value }], + }); + logEvent("update-context", { type: "text", value: contextTextEl.value }); + } catch (e) { + logEvent("error", e); + } +}); + +updateContextStructuredBtn.addEventListener("click", async () => { + try { + await app.updateModelContext({ + structuredContent: { + debugState: { + eventCount: state.eventLog.length, + timestamp: new Date().toISOString(), + uploadedFileId: state.uploadedFileId, + }, + }, + }); + logEvent("update-context", { type: "structured" }); + } catch (e) { + logEvent("error", e); + } +}); + +// ============================================================================ +// Display Mode Actions +// ============================================================================ + +async function requestDisplayMode(mode: "inline" | "fullscreen" | "pip"): Promise { + try { + const result = await app.requestDisplayMode({ mode }); + logEvent("display-mode-result", { mode, result }); + } catch (e) { + logEvent("error", e); + } +} + +displayInlineBtn.addEventListener("click", () => requestDisplayMode("inline")); +displayFullscreenBtn.addEventListener("click", () => requestDisplayMode("fullscreen")); +displayPipBtn.addEventListener("click", () => requestDisplayMode("pip")); + +// ============================================================================ +// Link Action +// ============================================================================ + +openLinkBtn.addEventListener("click", async () => { + try { + const result = await app.openLink({ url: linkUrlEl.value }); + logEvent("open-link-result", result); + } catch (e) { + logEvent("error", e); + } +}); + +// ============================================================================ +// Size Controls +// ============================================================================ + +autoResizeToggleEl.addEventListener("change", () => { + if (autoResizeToggleEl.checked) { + if (!state.autoResizeCleanup) { + state.autoResizeCleanup = app.setupSizeChangedNotifications(); + } + } else { + if (state.autoResizeCleanup) { + state.autoResizeCleanup(); + state.autoResizeCleanup = null; + } + } + logEvent("auto-resize-toggle", { enabled: autoResizeToggleEl.checked }); +}); + +function manualResize(width: number, height: number): void { + app.sendSizeChanged({ width, height }); + logEvent("manual-resize", { width, height }); +} + +resize200x100Btn.addEventListener("click", () => manualResize(200, 100)); +resize400x300Btn.addEventListener("click", () => manualResize(400, 300)); +resize800x600Btn.addEventListener("click", () => manualResize(800, 600)); + +// Update current size periodically +setInterval(updateCurrentSize, 1000); + +// ============================================================================ +// Server Tool Actions +// ============================================================================ + +callDebugToolBtn.addEventListener("click", async () => { + const args = { + contentType: toolContentTypeEl.value, + multipleBlocks: toolMultipleBlocksEl.checked, + includeStructuredContent: toolStructuredContentEl.checked, + includeMeta: toolIncludeMetaEl.checked, + simulateError: toolSimulateErrorEl.checked, + delayMs: parseInt(toolDelayMsEl.value, 10) || undefined, + }; + + try { + logEvent("call-server-tool", { name: "debug-tool", arguments: args }); + const result = await app.callServerTool({ name: "debug-tool", arguments: args }); + logEvent("server-tool-result", result); + } catch (e) { + logEvent("error", e); + } +}); + +callDebugRefreshBtn.addEventListener("click", async () => { + try { + logEvent("call-server-tool", { name: "debug-refresh", arguments: {} }); + const result = await app.callServerTool({ name: "debug-refresh", arguments: {} }); + logEvent("server-tool-result", result); + } catch (e) { + logEvent("error", e); + } +}); + +// ============================================================================ +// File Operations +// ============================================================================ + +uploadFileBtn.addEventListener("click", async () => { + const file = fileInputEl.files?.[0]; + if (!file) { + logEvent("error", { message: "No file selected" }); + return; + } + + try { + logEvent("upload-file", { name: file.name, size: file.size, type: file.type }); + const result = await app.uploadFile(file); + state.uploadedFileId = result.fileId; + lastFileIdEl.textContent = result.fileId; + logEvent("upload-file-result", result); + } catch (e) { + logEvent("error", e); + } +}); + +getFileUrlBtn.addEventListener("click", async () => { + if (!state.uploadedFileId) { + logEvent("error", { message: "No file uploaded yet" }); + return; + } + + try { + logEvent("get-file-url", { fileId: state.uploadedFileId }); + const result = await app.getFileDownloadUrl({ fileId: state.uploadedFileId }); + logEvent("get-file-url-result", result); + } catch (e) { + logEvent("error", e); + } +}); + +// ============================================================================ +// Initialization +// ============================================================================ + +// Initial render +renderCallbackStatus(); + +// Connect to host +app.connect().then(() => { + logEvent("connected", { success: true }); + + const ctx = app.getHostContext(); + if (ctx) { + handleHostContextChanged(ctx); + } + + renderHostInfo(); + updateCurrentSize(); + + // Auto-resize is enabled by default in App, capture cleanup if we want to toggle + // We'll set it up ourselves since we want toggle control + state.autoResizeCleanup = app.setupSizeChangedNotifications(); +}).catch(e => { + logEvent("error", e); +}); diff --git a/examples/debug-server/tsconfig.json b/examples/debug-server/tsconfig.json new file mode 100644 index 00000000..535267b2 --- /dev/null +++ b/examples/debug-server/tsconfig.json @@ -0,0 +1,19 @@ +{ + "compilerOptions": { + "target": "ESNext", + "lib": ["ESNext", "DOM", "DOM.Iterable"], + "module": "ESNext", + "moduleResolution": "bundler", + "allowImportingTsExtensions": true, + "resolveJsonModule": true, + "isolatedModules": true, + "verbatimModuleSyntax": true, + "noEmit": true, + "strict": true, + "skipLibCheck": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true + }, + "include": ["src", "server.ts"] +} diff --git a/examples/debug-server/vite.config.ts b/examples/debug-server/vite.config.ts new file mode 100644 index 00000000..6ff6d997 --- /dev/null +++ b/examples/debug-server/vite.config.ts @@ -0,0 +1,24 @@ +import { defineConfig } from "vite"; +import { viteSingleFile } from "vite-plugin-singlefile"; + +const INPUT = process.env.INPUT; +if (!INPUT) { + throw new Error("INPUT environment variable is not set"); +} + +const isDevelopment = process.env.NODE_ENV === "development"; + +export default defineConfig({ + plugins: [viteSingleFile()], + build: { + sourcemap: isDevelopment ? "inline" : undefined, + cssMinify: !isDevelopment, + minify: !isDevelopment, + + rollupOptions: { + input: INPUT, + }, + outDir: "dist", + emptyOutDir: false, + }, +}); From 439d562432293d1c0f65b6918c0dc0d4b1d37d13 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Wed, 14 Jan 2026 23:05:34 +0000 Subject: [PATCH 26/30] docs(examples): Add README for debug-server --- examples/debug-server/README.md | 55 +++++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 examples/debug-server/README.md diff --git a/examples/debug-server/README.md b/examples/debug-server/README.md new file mode 100644 index 00000000..eef1ae6e --- /dev/null +++ b/examples/debug-server/README.md @@ -0,0 +1,55 @@ +# Debug Server + +A comprehensive testing/debugging tool for the MCP Apps SDK that exercises every capability, callback, and result format combination. + +## Tools + +### debug-tool + +Configurable tool for testing all result variations: + +| Parameter | Type | Default | Description | +|-----------|------|---------|-------------| +| `contentType` | `"text"` \| `"image"` \| `"audio"` \| `"resource"` \| `"resourceLink"` \| `"mixed"` | `"text"` | Content block type to return | +| `multipleBlocks` | boolean | `false` | Return 3 content blocks | +| `includeStructuredContent` | boolean | `true` | Include structuredContent in result | +| `includeMeta` | boolean | `false` | Include _meta in result | +| `largeInput` | string | - | Large text input (tests tool-input-partial) | +| `simulateError` | boolean | `false` | Return isError: true | +| `delayMs` | number | - | Delay before response (ms) | + +### debug-refresh + +App-only tool (hidden from model) for polling server state. Returns current timestamp and call counter. + +## App UI + +The debug app provides a dashboard with: + +- **Event Log**: Real-time log of all SDK events with filtering +- **Host Info**: Context, capabilities, container dimensions, styles +- **Callback Status**: Table of all callbacks with call counts +- **Actions**: Buttons to test every SDK method: + - Send messages (text/image) + - Logging (debug/info/warning/error) + - Model context updates + - Display mode requests + - Link opening + - Resize controls + - Server tool calls + - File operations + +## Usage + +```bash +# Build +npm run --workspace examples/debug-server build + +# Run standalone +npm run --workspace examples/debug-server serve + +# Run with all examples +npm start +``` + +Then open `http://localhost:8080/basic-host/` and select "Debug MCP App Server" from the dropdown. From e1fd055059e45eb8fade8894493277aa390578b8 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Wed, 14 Jan 2026 23:46:42 +0000 Subject: [PATCH 27/30] feat(debug-server): Add file logging with debug-log tool - Add --log-file argument (default: /tmp/mcp-apps-debug-server.log) - Add debug-log app-private tool for app to send logs to file - App now logs all events to console AND server log file - Wrap server log calls in try/catch to prevent failures from breaking app --- examples/debug-server/server.ts | 125 ++++++++++++++++--- examples/debug-server/src/mcp-app.ts | 172 +++++++++++++++++++-------- 2 files changed, 235 insertions(+), 62 deletions(-) diff --git a/examples/debug-server/server.ts b/examples/debug-server/server.ts index 9a67fd46..32b89f3b 100644 --- a/examples/debug-server/server.ts +++ b/examples/debug-server/server.ts @@ -1,8 +1,16 @@ -import { registerAppResource, registerAppTool, RESOURCE_MIME_TYPE } from "@modelcontextprotocol/ext-apps/server"; +import { + registerAppResource, + registerAppTool, + RESOURCE_MIME_TYPE, +} from "@modelcontextprotocol/ext-apps/server"; import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; -import type { CallToolResult, ReadResourceResult } from "@modelcontextprotocol/sdk/types.js"; +import type { + CallToolResult, + ReadResourceResult, +} from "@modelcontextprotocol/sdk/types.js"; import fs from "node:fs/promises"; +import { appendFileSync } from "node:fs"; import path from "node:path"; import { z } from "zod"; import { startServer } from "./server-utils.js"; @@ -12,18 +20,50 @@ const DIST_DIR = path.join(import.meta.dirname, "dist"); // Track call counter across requests (stateful for demo purposes) let callCounter = 0; +// Parse --log-file argument or use default +const DEFAULT_LOG_FILE = "/tmp/mcp-apps-debug-server.log"; +function getLogFilePath(): string { + const logFileArg = process.argv.find((arg) => arg.startsWith("--log-file=")); + if (logFileArg) { + return logFileArg.split("=")[1]; + } + return process.env.DEBUG_LOG_FILE ?? DEFAULT_LOG_FILE; +} + +const logFilePath = getLogFilePath(); + +/** + * Append a log entry to the log file + */ +function appendToLogFile(entry: { + timestamp: string; + type: string; + payload: unknown; +}): void { + try { + const line = JSON.stringify(entry) + "\n"; + appendFileSync(logFilePath, line, "utf-8"); + } catch (e) { + console.error("[debug-server] Failed to write to log file:", e); + } +} + // Minimal 1x1 blue PNG (base64) -const BLUE_PNG_1X1 = "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPj/HwADBwIAMCbHYQAAAABJRU5ErkJggg=="; +const BLUE_PNG_1X1 = + "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPj/HwADBwIAMCbHYQAAAABJRU5ErkJggg=="; // Minimal silent WAV (base64) - 44 byte header + 1 sample -const SILENT_WAV = "UklGRiYAAABXQVZFZm10IBAAAAABAAEARKwAAIhYAQACABAAZGF0YQIAAAAAAA=="; +const SILENT_WAV = + "UklGRiYAAABXQVZFZm10IBAAAAABAAEARKwAAIhYAQACABAAZGF0YQIAAAAAAA=="; /** * Input schema for the debug-tool */ const DebugInputSchema = z.object({ // Content configuration - contentType: z.enum(["text", "image", "audio", "resource", "resourceLink", "mixed"]).default("text"), + contentType: z + .enum(["text", "image", "audio", "resource", "resourceLink", "mixed"]) + .default("text"), multipleBlocks: z.boolean().default(false), includeStructuredContent: z.boolean().default(true), includeMeta: z.boolean().default(false), @@ -63,10 +103,18 @@ function buildContent(args: DebugInput): CallToolResult["content"] { content.push({ type: "text", text: `Debug text content${suffix}` }); break; case "image": - content.push({ type: "image", data: BLUE_PNG_1X1, mimeType: "image/png" }); + content.push({ + type: "image", + data: BLUE_PNG_1X1, + mimeType: "image/png", + }); break; case "audio": - content.push({ type: "audio", data: SILENT_WAV, mimeType: "audio/wav" }); + content.push({ + type: "audio", + data: SILENT_WAV, + mimeType: "audio/wav", + }); break; case "resource": content.push({ @@ -111,11 +159,13 @@ export function createServer(): McpServer { const resourceUri = "ui://debug-tool/mcp-app.html"; // Main debug tool - exercises all result variations - registerAppTool(server, + registerAppTool( + server, "debug-tool", { title: "Debug Tool", - description: "Comprehensive debug tool for testing MCP Apps SDK. Configure content types, error simulation, delays, and more.", + description: + "Comprehensive debug tool for testing MCP Apps SDK. Configure content types, error simulation, delays, and more.", inputSchema: DebugInputSchema, outputSchema: DebugOutputSchema, _meta: { ui: { resourceUri } }, @@ -123,7 +173,7 @@ export function createServer(): McpServer { async (args): Promise => { // Apply delay if requested if (args.delayMs && args.delayMs > 0) { - await new Promise(resolve => setTimeout(resolve, args.delayMs)); + await new Promise((resolve) => setTimeout(resolve, args.delayMs)); } // Build content based on config @@ -138,7 +188,9 @@ export function createServer(): McpServer { config: args, timestamp: new Date().toISOString(), counter: ++callCounter, - ...(args.largeInput ? { largeInputLength: args.largeInput.length } : {}), + ...(args.largeInput + ? { largeInputLength: args.largeInput.length } + : {}), }; } @@ -162,11 +214,13 @@ export function createServer(): McpServer { ); // App-only refresh tool (hidden from model) - registerAppTool(server, + registerAppTool( + server, "debug-refresh", { title: "Refresh Debug Info", - description: "App-only tool for polling server state. Not visible to the model.", + description: + "App-only tool for polling server state. Not visible to the model.", inputSchema: z.object({}), outputSchema: z.object({ timestamp: z.string(), counter: z.number() }), _meta: { @@ -185,13 +239,47 @@ export function createServer(): McpServer { }, ); + // App-only log tool - writes events to log file + registerAppTool( + server, + "debug-log", + { + title: "Log to File", + description: + "App-only tool for logging events to the server log file. Not visible to the model.", + inputSchema: z.object({ + type: z.string(), + payload: z.unknown(), + }), + outputSchema: z.object({ logged: z.boolean(), logFile: z.string() }), + _meta: { + ui: { + resourceUri, + visibility: ["app"], + }, + }, + }, + async (args): Promise => { + const timestamp = new Date().toISOString(); + appendToLogFile({ timestamp, type: args.type, payload: args.payload }); + return { + content: [{ type: "text", text: `Logged to ${logFilePath}` }], + structuredContent: { logged: true, logFile: logFilePath }, + }; + }, + ); + // Register the resource which returns the bundled HTML/JavaScript for the UI - registerAppResource(server, + registerAppResource( + server, resourceUri, resourceUri, { mimeType: RESOURCE_MIME_TYPE }, async (): Promise => { - const html = await fs.readFile(path.join(DIST_DIR, "mcp-app.html"), "utf-8"); + const html = await fs.readFile( + path.join(DIST_DIR, "mcp-app.html"), + "utf-8", + ); return { contents: [ @@ -205,6 +293,13 @@ export function createServer(): McpServer { } async function main() { + console.log(`[debug-server] Log file: ${logFilePath}`); + appendToLogFile({ + timestamp: new Date().toISOString(), + type: "server-start", + payload: { logFilePath, pid: process.pid }, + }); + if (process.argv.includes("--stdio")) { await createServer().connect(new StdioServerTransport()); } else { diff --git a/examples/debug-server/src/mcp-app.ts b/examples/debug-server/src/mcp-app.ts index 1d0fbd77..0b36e984 100644 --- a/examples/debug-server/src/mcp-app.ts +++ b/examples/debug-server/src/mcp-app.ts @@ -64,7 +64,9 @@ const clearLogBtn = document.getElementById("clear-log-btn")!; // Host info const hostContextInfoEl = document.getElementById("host-context-info")!; -const hostCapabilitiesInfoEl = document.getElementById("host-capabilities-info")!; +const hostCapabilitiesInfoEl = document.getElementById( + "host-capabilities-info", +)!; const hostContainerInfoEl = document.getElementById("host-container-info")!; const hostStylesSampleEl = document.getElementById("host-styles-sample")!; @@ -72,7 +74,9 @@ const hostStylesSampleEl = document.getElementById("host-styles-sample")!; const callbackTableBodyEl = document.getElementById("callback-table-body")!; // Action elements -const messageTextEl = document.getElementById("message-text") as HTMLInputElement; +const messageTextEl = document.getElementById( + "message-text", +) as HTMLInputElement; const sendMessageTextBtn = document.getElementById("send-message-text-btn")!; const sendMessageImageBtn = document.getElementById("send-message-image-btn")!; @@ -82,9 +86,15 @@ const logInfoBtn = document.getElementById("log-info-btn")!; const logWarningBtn = document.getElementById("log-warning-btn")!; const logErrorBtn = document.getElementById("log-error-btn")!; -const contextTextEl = document.getElementById("context-text") as HTMLInputElement; -const updateContextTextBtn = document.getElementById("update-context-text-btn")!; -const updateContextStructuredBtn = document.getElementById("update-context-structured-btn")!; +const contextTextEl = document.getElementById( + "context-text", +) as HTMLInputElement; +const updateContextTextBtn = document.getElementById( + "update-context-text-btn", +)!; +const updateContextStructuredBtn = document.getElementById( + "update-context-structured-btn", +)!; const displayInlineBtn = document.getElementById("display-inline-btn")!; const displayFullscreenBtn = document.getElementById("display-fullscreen-btn")!; @@ -93,19 +103,33 @@ const displayPipBtn = document.getElementById("display-pip-btn")!; const linkUrlEl = document.getElementById("link-url") as HTMLInputElement; const openLinkBtn = document.getElementById("open-link-btn")!; -const autoResizeToggleEl = document.getElementById("auto-resize-toggle") as HTMLInputElement; +const autoResizeToggleEl = document.getElementById( + "auto-resize-toggle", +) as HTMLInputElement; const resize200x100Btn = document.getElementById("resize-200x100-btn")!; const resize400x300Btn = document.getElementById("resize-400x300-btn")!; const resize800x600Btn = document.getElementById("resize-800x600-btn")!; const currentSizeEl = document.getElementById("current-size")!; // Tool config elements -const toolContentTypeEl = document.getElementById("tool-content-type") as HTMLSelectElement; -const toolMultipleBlocksEl = document.getElementById("tool-multiple-blocks") as HTMLInputElement; -const toolStructuredContentEl = document.getElementById("tool-structured-content") as HTMLInputElement; -const toolIncludeMetaEl = document.getElementById("tool-include-meta") as HTMLInputElement; -const toolSimulateErrorEl = document.getElementById("tool-simulate-error") as HTMLInputElement; -const toolDelayMsEl = document.getElementById("tool-delay-ms") as HTMLInputElement; +const toolContentTypeEl = document.getElementById( + "tool-content-type", +) as HTMLSelectElement; +const toolMultipleBlocksEl = document.getElementById( + "tool-multiple-blocks", +) as HTMLInputElement; +const toolStructuredContentEl = document.getElementById( + "tool-structured-content", +) as HTMLInputElement; +const toolIncludeMetaEl = document.getElementById( + "tool-include-meta", +) as HTMLInputElement; +const toolSimulateErrorEl = document.getElementById( + "tool-simulate-error", +) as HTMLInputElement; +const toolDelayMsEl = document.getElementById( + "tool-delay-ms", +) as HTMLInputElement; const callDebugToolBtn = document.getElementById("call-debug-tool-btn")!; const callDebugRefreshBtn = document.getElementById("call-debug-refresh-btn")!; @@ -141,24 +165,29 @@ function truncatePayload(payload: unknown): string { // ============================================================================ function renderEventLog(): void { - const filtered = state.logFilter === "all" - ? state.eventLog - : state.eventLog.filter(e => e.type === state.logFilter); - - eventLogEl.innerHTML = filtered.map(entry => ` + const filtered = + state.logFilter === "all" + ? state.eventLog + : state.eventLog.filter((e) => e.type === state.logFilter); + + eventLogEl.innerHTML = filtered + .map( + (entry) => `
[${formatTime(entry.time)}] ${entry.type}: ${truncatePayload(entry.payload)}
- `).join(""); + `, + ) + .join(""); // Auto-scroll to bottom eventLogEl.scrollTop = eventLogEl.scrollHeight; } function renderCallbackStatus(): void { - callbackTableBodyEl.innerHTML = CALLBACKS.map(name => { + callbackTableBodyEl.innerHTML = CALLBACKS.map((name) => { const count = state.callbackCounts.get(name) ?? 0; const lastPayload = state.lastPayloads.get(name); const registered = name !== "onerror"; // All callbacks are registered @@ -222,10 +251,12 @@ function renderHostInfo(): void { // Styles sample if (ctx?.styles) { const styleVars = Object.entries(ctx.styles).slice(0, 6); - hostStylesSampleEl.innerHTML = styleVars.map(([key, value]) => { - const color = String(value); - return `
`; - }).join(""); + hostStylesSampleEl.innerHTML = styleVars + .map(([key, value]) => { + const color = String(value); + return `
`; + }) + .join(""); } else { hostStylesSampleEl.innerHTML = "No styles"; } @@ -241,11 +272,32 @@ function updateCurrentSize(): void { // Event Logging // ============================================================================ +/** + * Send a log entry to the server's debug-log tool (writes to file) + */ +async function sendToServerLog(type: string, payload: unknown): Promise { + try { + await app.callServerTool({ + name: "debug-log", + arguments: { type, payload }, + }); + } catch (e) { + // Log to console only - don't call logEvent to avoid infinite loop + console.error("[debug-app] Failed to send log to server:", e); + } +} + function logEvent(type: string, payload: unknown): void { + const time = Date.now(); + + // Log to console + console.log(`[debug-app] ${type}:`, payload); + + // Update state const count = (state.callbackCounts.get(type) ?? 0) + 1; state.callbackCounts.set(type, count); state.lastPayloads.set(type, payload); - state.eventLog.push({ time: Date.now(), type, payload }); + state.eventLog.push({ time, type, payload }); // Keep log manageable (max 100 entries) if (state.eventLog.length > 100) { @@ -254,6 +306,12 @@ function logEvent(type: string, payload: unknown): void { renderEventLog(); renderCallbackStatus(); + + // Send to server log file (async, fire-and-forget) + // Skip sending debug-log results to avoid noise + if (type !== "server-tool-result" || (payload as { name?: string })?.name !== "debug-log") { + sendToServerLog(type, payload); + } } // ============================================================================ @@ -331,7 +389,7 @@ app.onerror = (error) => { // Section Collapsing // ============================================================================ -document.querySelectorAll(".section-header[data-toggle]").forEach(header => { +document.querySelectorAll(".section-header[data-toggle]").forEach((header) => { header.addEventListener("click", () => { const section = header.closest(".section"); section?.classList.toggle("collapsed"); @@ -370,7 +428,8 @@ sendMessageTextBtn.addEventListener("click", async () => { sendMessageImageBtn.addEventListener("click", async () => { // 1x1 red PNG for testing - const redPng = "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mP8z8DwHwAFBQIAX8jx0gAAAABJRU5ErkJggg=="; + const redPng = + "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mP8z8DwHwAFBQIAX8jx0gAAAABJRU5ErkJggg=="; try { const result = await app.sendMessage({ role: "user", @@ -432,7 +491,9 @@ updateContextStructuredBtn.addEventListener("click", async () => { // Display Mode Actions // ============================================================================ -async function requestDisplayMode(mode: "inline" | "fullscreen" | "pip"): Promise { +async function requestDisplayMode( + mode: "inline" | "fullscreen" | "pip", +): Promise { try { const result = await app.requestDisplayMode({ mode }); logEvent("display-mode-result", { mode, result }); @@ -442,7 +503,9 @@ async function requestDisplayMode(mode: "inline" | "fullscreen" | "pip"): Promis } displayInlineBtn.addEventListener("click", () => requestDisplayMode("inline")); -displayFullscreenBtn.addEventListener("click", () => requestDisplayMode("fullscreen")); +displayFullscreenBtn.addEventListener("click", () => + requestDisplayMode("fullscreen"), +); displayPipBtn.addEventListener("click", () => requestDisplayMode("pip")); // ============================================================================ @@ -504,7 +567,10 @@ callDebugToolBtn.addEventListener("click", async () => { try { logEvent("call-server-tool", { name: "debug-tool", arguments: args }); - const result = await app.callServerTool({ name: "debug-tool", arguments: args }); + const result = await app.callServerTool({ + name: "debug-tool", + arguments: args, + }); logEvent("server-tool-result", result); } catch (e) { logEvent("error", e); @@ -514,7 +580,10 @@ callDebugToolBtn.addEventListener("click", async () => { callDebugRefreshBtn.addEventListener("click", async () => { try { logEvent("call-server-tool", { name: "debug-refresh", arguments: {} }); - const result = await app.callServerTool({ name: "debug-refresh", arguments: {} }); + const result = await app.callServerTool({ + name: "debug-refresh", + arguments: {}, + }); logEvent("server-tool-result", result); } catch (e) { logEvent("error", e); @@ -533,7 +602,11 @@ uploadFileBtn.addEventListener("click", async () => { } try { - logEvent("upload-file", { name: file.name, size: file.size, type: file.type }); + logEvent("upload-file", { + name: file.name, + size: file.size, + type: file.type, + }); const result = await app.uploadFile(file); state.uploadedFileId = result.fileId; lastFileIdEl.textContent = result.fileId; @@ -551,7 +624,9 @@ getFileUrlBtn.addEventListener("click", async () => { try { logEvent("get-file-url", { fileId: state.uploadedFileId }); - const result = await app.getFileDownloadUrl({ fileId: state.uploadedFileId }); + const result = await app.getFileDownloadUrl({ + fileId: state.uploadedFileId, + }); logEvent("get-file-url-result", result); } catch (e) { logEvent("error", e); @@ -566,20 +641,23 @@ getFileUrlBtn.addEventListener("click", async () => { renderCallbackStatus(); // Connect to host -app.connect().then(() => { - logEvent("connected", { success: true }); - - const ctx = app.getHostContext(); - if (ctx) { - handleHostContextChanged(ctx); - } +app + .connect() + .then(() => { + logEvent("connected", { success: true }); + + const ctx = app.getHostContext(); + if (ctx) { + handleHostContextChanged(ctx); + } - renderHostInfo(); - updateCurrentSize(); + renderHostInfo(); + updateCurrentSize(); - // Auto-resize is enabled by default in App, capture cleanup if we want to toggle - // We'll set it up ourselves since we want toggle control - state.autoResizeCleanup = app.setupSizeChangedNotifications(); -}).catch(e => { - logEvent("error", e); -}); + // Auto-resize is enabled by default in App, capture cleanup if we want to toggle + // We'll set it up ourselves since we want toggle control + state.autoResizeCleanup = app.setupSizeChangedNotifications(); + }) + .catch((e) => { + logEvent("error", e); + }); From 2d0f1cc2262f34871793214fe032e6f1cbc51b17 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Wed, 14 Jan 2026 23:50:33 +0000 Subject: [PATCH 28/30] update --- examples/debug-server/README.md | 18 +++++++++--------- examples/debug-server/src/mcp-app.ts | 5 ++++- 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/examples/debug-server/README.md b/examples/debug-server/README.md index eef1ae6e..89d9a7d2 100644 --- a/examples/debug-server/README.md +++ b/examples/debug-server/README.md @@ -8,15 +8,15 @@ A comprehensive testing/debugging tool for the MCP Apps SDK that exercises every Configurable tool for testing all result variations: -| Parameter | Type | Default | Description | -|-----------|------|---------|-------------| -| `contentType` | `"text"` \| `"image"` \| `"audio"` \| `"resource"` \| `"resourceLink"` \| `"mixed"` | `"text"` | Content block type to return | -| `multipleBlocks` | boolean | `false` | Return 3 content blocks | -| `includeStructuredContent` | boolean | `true` | Include structuredContent in result | -| `includeMeta` | boolean | `false` | Include _meta in result | -| `largeInput` | string | - | Large text input (tests tool-input-partial) | -| `simulateError` | boolean | `false` | Return isError: true | -| `delayMs` | number | - | Delay before response (ms) | +| Parameter | Type | Default | Description | +| -------------------------- | ----------------------------------------------------------------------------------- | -------- | ------------------------------------------- | +| `contentType` | `"text"` \| `"image"` \| `"audio"` \| `"resource"` \| `"resourceLink"` \| `"mixed"` | `"text"` | Content block type to return | +| `multipleBlocks` | boolean | `false` | Return 3 content blocks | +| `includeStructuredContent` | boolean | `true` | Include structuredContent in result | +| `includeMeta` | boolean | `false` | Include \_meta in result | +| `largeInput` | string | - | Large text input (tests tool-input-partial) | +| `simulateError` | boolean | `false` | Return isError: true | +| `delayMs` | number | - | Delay before response (ms) | ### debug-refresh diff --git a/examples/debug-server/src/mcp-app.ts b/examples/debug-server/src/mcp-app.ts index 0b36e984..1c084d2b 100644 --- a/examples/debug-server/src/mcp-app.ts +++ b/examples/debug-server/src/mcp-app.ts @@ -309,7 +309,10 @@ function logEvent(type: string, payload: unknown): void { // Send to server log file (async, fire-and-forget) // Skip sending debug-log results to avoid noise - if (type !== "server-tool-result" || (payload as { name?: string })?.name !== "debug-log") { + if ( + type !== "server-tool-result" || + (payload as { name?: string })?.name !== "debug-log" + ) { sendToServerLog(type, payload); } } From 06af4d324615112ec041c71adeadef22c9a6c3d5 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Thu, 15 Jan 2026 01:06:20 +0000 Subject: [PATCH 29/30] style: formatting fixes --- docs/openai-migration.md | 16 ++++++++-------- src/app.ts | 4 +--- src/openai/transport.test.ts | 12 +++++++----- 3 files changed, 16 insertions(+), 16 deletions(-) diff --git a/docs/openai-migration.md b/docs/openai-migration.md index 056c8619..4d534f94 100644 --- a/docs/openai-migration.md +++ b/docs/openai-migration.md @@ -77,17 +77,17 @@ This guide helps you migrate from the OpenAI Apps SDK (`window.openai.*`) to the ## State Persistence -| OpenAI | MCP Apps | Notes | -| ------------------------------------- | --------------------------------------------------------------------- | -------------------------------------------------- | -| `window.openai.widgetState` | `app.onwidgetstate = (params) => { params.state }` | MCP uses notification callback | -| `window.openai.setWidgetState(state)` | `app.updateModelContext({ modelContent, privateContent, imageIds })` | MCP uses structured format | +| OpenAI | MCP Apps | Notes | +| ------------------------------------- | -------------------------------------------------------------------- | ------------------------------ | +| `window.openai.widgetState` | `app.onwidgetstate = (params) => { params.state }` | MCP uses notification callback | +| `window.openai.setWidgetState(state)` | `app.updateModelContext({ modelContent, privateContent, imageIds })` | MCP uses structured format | ## File Operations -| OpenAI | MCP Apps | Notes | -| ---------------------------------------------------- | ------------------------------------------------- | ------------------------------ | -| `await window.openai.uploadFile(file)` | `await app.uploadFile(file)` | Returns `{ fileId }` | -| `await window.openai.getFileDownloadUrl({ fileId })` | `await app.getFileDownloadUrl({ fileId })` | Returns `{ url }` | +| OpenAI | MCP Apps | Notes | +| ---------------------------------------------------- | ------------------------------------------ | -------------------- | +| `await window.openai.uploadFile(file)` | `await app.uploadFile(file)` | Returns `{ fileId }` | +| `await window.openai.getFileDownloadUrl({ fileId })` | `await app.getFileDownloadUrl({ fileId })` | Returns `{ url }` | ## Other (Not Yet in MCP Apps) diff --git a/src/app.ts b/src/app.ts index 86ab971a..26341b76 100644 --- a/src/app.ts +++ b/src/app.ts @@ -1065,9 +1065,7 @@ export class App extends Protocol { async uploadFile(file: File, options?: RequestOptions) { // Convert File to base64 const arrayBuffer = await file.arrayBuffer(); - const base64 = btoa( - String.fromCharCode(...new Uint8Array(arrayBuffer)), - ); + const base64 = btoa(String.fromCharCode(...new Uint8Array(arrayBuffer))); return this.request( { diff --git a/src/openai/transport.test.ts b/src/openai/transport.test.ts index ad2c58b1..dc266201 100644 --- a/src/openai/transport.test.ts +++ b/src/openai/transport.test.ts @@ -124,11 +124,13 @@ describe("OpenAITransport", () => { const mockWindow = globalThis.window as unknown as { addEventListener: ReturnType; }; - mockWindow.addEventListener = mock((event: string, handler: (event: Event) => void) => { - if (event === "openai:set_globals") { - capturedHandler = handler; - } - }); + mockWindow.addEventListener = mock( + (event: string, handler: (event: Event) => void) => { + if (event === "openai:set_globals") { + capturedHandler = handler; + } + }, + ); const transport = new OpenAITransport(); const messages: unknown[] = []; From a7e8dd45baab852d3c5c006dbb6368d78d538de4 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Thu, 15 Jan 2026 11:08:10 +0000 Subject: [PATCH 30/30] chore: update package-lock.json after merge --- package-lock.json | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/package-lock.json b/package-lock.json index 4930413f..a789865d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -470,6 +470,45 @@ "dev": true, "license": "MIT" }, + "examples/debug-server": { + "name": "@modelcontextprotocol/server-debug", + "version": "0.4.0", + "license": "MIT", + "dependencies": { + "@modelcontextprotocol/ext-apps": "^0.4.0", + "@modelcontextprotocol/sdk": "^1.24.0", + "zod": "^4.1.13" + }, + "devDependencies": { + "@types/cors": "^2.8.19", + "@types/express": "^5.0.0", + "@types/node": "^22.0.0", + "concurrently": "^9.2.1", + "cors": "^2.8.5", + "cross-env": "^10.1.0", + "express": "^5.1.0", + "typescript": "^5.9.3", + "vite": "^6.0.0", + "vite-plugin-singlefile": "^2.3.0" + } + }, + "examples/debug-server/node_modules/@types/node": { + "version": "22.19.6", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.6.tgz", + "integrity": "sha512-qm+G8HuG6hOHQigsi7VGuLjUVu6TtBo/F05zvX04Mw2uCg9Dv0Qxy3Qw7j41SidlTcl5D/5yg0SEZqOB+EqZnQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "examples/debug-server/node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "dev": true, + "license": "MIT" + }, "examples/integration-server": { "version": "1.0.0", "dependencies": { @@ -2420,6 +2459,10 @@ "resolved": "examples/customer-segmentation-server", "link": true }, + "node_modules/@modelcontextprotocol/server-debug": { + "resolved": "examples/debug-server", + "link": true + }, "node_modules/@modelcontextprotocol/server-map": { "resolved": "examples/map-server", "link": true