@@ -21,6 +21,7 @@ import { type Attributes, trace } from "@opentelemetry/api";
2121import { auth } from "./auth.js" ;
2222import { locals } from "./locals.js" ;
2323import { metadata } from "./metadata.js" ;
24+ import type { ResolvedPrompt } from "./prompt.js" ;
2425import { streams } from "./streams.js" ;
2526import { createTask } from "./shared.js" ;
2627import { tracer } from "./tracer.js" ;
@@ -404,6 +405,118 @@ const chatUIStreamStaticKey = locals.create<ChatUIMessageStreamOptions>("chat.ui
404405/** Per-turn UIMessageStream options, set via chat.setUIMessageStreamOptions(). @internal */
405406const chatUIStreamPerTurnKey = locals . create < ChatUIMessageStreamOptions > ( "chat.uiMessageStreamOptions.perTurn" ) ;
406407
408+ // ---------------------------------------------------------------------------
409+ // chat.prompt — store and retrieve a resolved prompt for the current run
410+ // ---------------------------------------------------------------------------
411+
412+ /**
413+ * A resolved prompt stored via `chat.prompt.set()`. Either a full `ResolvedPrompt`
414+ * from `prompts.define().resolve()`, or a lightweight wrapper around a plain string.
415+ */
416+ export type ChatPromptValue = ResolvedPrompt | {
417+ text : string ;
418+ model : undefined ;
419+ config : undefined ;
420+ promptId : string ;
421+ version : number ;
422+ labels : string [ ] ;
423+ toAISDKTelemetry : ( additionalMetadata ?: Record < string , string > ) => {
424+ experimental_telemetry : { isEnabled : true ; metadata : Record < string , string > } ;
425+ } ;
426+ } ;
427+
428+ /** @internal */
429+ const chatPromptKey = locals . create < ChatPromptValue > ( "chat.prompt" ) ;
430+
431+ /**
432+ * Store a resolved prompt (or plain string) for the current run.
433+ * Call from any hook (`onPreload`, `onChatStart`, `onTurnStart`) or `run()`.
434+ */
435+ function setChatPrompt ( resolved : ResolvedPrompt | string ) : void {
436+ if ( typeof resolved === "string" ) {
437+ locals . set ( chatPromptKey , {
438+ text : resolved ,
439+ model : undefined ,
440+ config : undefined ,
441+ promptId : "" ,
442+ version : 0 ,
443+ labels : [ ] ,
444+ toAISDKTelemetry : ( ) => ( {
445+ experimental_telemetry : { isEnabled : true , metadata : { } } ,
446+ } ) ,
447+ } ) ;
448+ } else {
449+ locals . set ( chatPromptKey , resolved ) ;
450+ }
451+ }
452+
453+ /**
454+ * Read the stored prompt. Throws if `chat.prompt.set()` has not been called.
455+ */
456+ function getChatPrompt ( ) : ChatPromptValue {
457+ const prompt = locals . get ( chatPromptKey ) ;
458+ if ( ! prompt ) {
459+ throw new Error (
460+ "chat.prompt() called before chat.prompt.set(). Set a prompt in onPreload, onChatStart, onTurnStart, or run() first."
461+ ) ;
462+ }
463+ return prompt ;
464+ }
465+
466+ /**
467+ * Options for {@link toStreamTextOptions}.
468+ */
469+ export type ToStreamTextOptionsOptions = {
470+ /** Additional telemetry metadata merged into `experimental_telemetry.metadata`. */
471+ telemetry ?: Record < string , string > ;
472+ /**
473+ * An AI SDK provider registry (from `createProviderRegistry`) or any object
474+ * with a `languageModel(id)` method. When provided and the stored prompt has
475+ * a `model` string, the resolved `LanguageModel` is included in the returned
476+ * options so `streamText` uses it directly.
477+ *
478+ * The model string should use the `"provider:model-id"` format
479+ * (e.g. `"openai:gpt-4o"`, `"anthropic:claude-sonnet-4-6"`).
480+ */
481+ registry ?: { languageModel ( modelId : string ) : unknown } ;
482+ } ;
483+
484+ /**
485+ * Returns an options object ready to spread into `streamText()`.
486+ *
487+ * Includes `system`, `experimental_telemetry`, and any config fields
488+ * (temperature, maxTokens, etc.) from the stored prompt.
489+ *
490+ * When a `registry` is provided and the prompt has a `model` string,
491+ * the resolved `LanguageModel` is included as `model`.
492+ *
493+ * If no prompt has been set, returns `{}` (no-op spread).
494+ */
495+ function toStreamTextOptions ( options ?: ToStreamTextOptionsOptions ) : Record < string , unknown > {
496+ const prompt = locals . get ( chatPromptKey ) ;
497+ if ( ! prompt ) return { } ;
498+
499+ const result : Record < string , unknown > = {
500+ system : prompt . text ,
501+ } ;
502+
503+ // Resolve model via registry if both are present
504+ if ( options ?. registry && prompt . model ) {
505+ result . model = options . registry . languageModel ( prompt . model ) ;
506+ }
507+
508+ // Spread config (temperature, maxTokens, etc.)
509+ if ( prompt . config ) {
510+ Object . assign ( result , prompt . config ) ;
511+ }
512+
513+ // Add telemetry (forward additional metadata from caller)
514+ const telemetry = prompt . toAISDKTelemetry ( options ?. telemetry ) ;
515+ Object . assign ( result , telemetry ) ;
516+
517+ return result ;
518+ }
519+
407520/**
408521 * Options for `pipeChat`.
409522 */
@@ -2302,6 +2415,19 @@ export const chat = {
23022415 MessageAccumulator : ChatMessageAccumulator ,
23032416 /** Create a chat session (async iterator). See {@link createChatSession}. */
23042417 createSession : createChatSession ,
2418+ /**
2419+ * Store and retrieve a resolved prompt for the current run.
2420+ *
2421+ * - `chat.prompt.set(resolved)` — store a `ResolvedPrompt` or plain string
2422+ * - `chat.prompt()` — read the stored prompt (throws if not set)
2423+ */
2424+ prompt : Object . assign ( getChatPrompt , { set : setChatPrompt } ) ,
2425+ /**
2426+ * Returns an options object ready to spread into `streamText()`.
2427+ * Reads the stored prompt and returns `{ system, experimental_telemetry, ...config }`.
2428+ * Returns `{}` if no prompt has been set.
2429+ */
2430+ toStreamTextOptions,
23052431} ;
23062432
23072433/**
0 commit comments