diff --git a/core/llm/llms/OpenRouter.ts b/core/llm/llms/OpenRouter.ts index b277282458..0c389f7bd7 100644 --- a/core/llm/llms/OpenRouter.ts +++ b/core/llm/llms/OpenRouter.ts @@ -1,5 +1,7 @@ import { ChatCompletionCreateParams } from "openai/resources/index"; +import { OPENROUTER_HEADERS } from "@continuedev/openai-adapters"; + import { LLMOptions } from "../../index.js"; import { osModelsEditPrompt } from "../templates/edit.js"; @@ -18,6 +20,19 @@ class OpenRouter extends OpenAI { useLegacyCompletionsEndpoint: false, }; + constructor(options: LLMOptions) { + super({ + ...options, + requestOptions: { + ...options.requestOptions, + headers: { + ...OPENROUTER_HEADERS, + ...options.requestOptions?.headers, + }, + }, + }); + } + private isAnthropicModel(model?: string): boolean { if (!model) return false; const modelLower = model.toLowerCase(); diff --git a/core/llm/openaiTypeConverters.test.ts b/core/llm/openaiTypeConverters.test.ts index f597f00226..e217b7b586 100644 --- a/core/llm/openaiTypeConverters.test.ts +++ b/core/llm/openaiTypeConverters.test.ts @@ -489,6 +489,55 @@ describe("openaiTypeConverters", () => { expect(functionCalls[0].id).toBe("fc_001"); }); + it("should emit assistant message before function_call when reasoning and tool calls share a turn", () => { + const messages: ChatMessage[] = [ + { + role: "thinking", + content: "", + reasoning_details: [ + { type: "reasoning_id", id: "rs_001" }, + { + type: "encrypted_content", + encrypted_content: "encrypted_data_here", + }, + ], + metadata: { reasoningId: "rs_001" }, + } as ChatMessage, + { + role: "assistant", + content: "I'll inspect the file first.", + toolCalls: [ + { + id: "call_001", + type: "function", + function: { name: "read_file", arguments: '{"path":"a.txt"}' }, + }, + ], + metadata: { + responsesOutputItemIds: ["msg_001", "fc_001"], + responsesOutputItemId: "fc_001", + }, + } as ChatMessage, + ]; + + const result = toResponsesInput(messages); + + expect(result[0]).toMatchObject({ + type: "reasoning", + id: "rs_001", + }); + expect(result[1]).toMatchObject({ + type: "message", + role: "assistant", + id: "msg_001", + }); + expect(result[2]).toMatchObject({ + type: "function_call", + id: "fc_001", + call_id: "call_001", + }); + }); + it("should strip fc_ id from function_calls after removed reasoning", () => { const messages: ChatMessage[] = [ { diff --git a/core/llm/openaiTypeConverters.ts b/core/llm/openaiTypeConverters.ts index fb4673e11b..b17fe3fa90 100644 --- a/core/llm/openaiTypeConverters.ts +++ b/core/llm/openaiTypeConverters.ts @@ -1038,8 +1038,6 @@ export function toResponsesInput(messages: ChatMessage[]): ResponseInput { (respId?.startsWith("msg_") ? respId : undefined); if (Array.isArray(toolCalls) && toolCalls.length > 0) { - emitFunctionCallsFromToolCalls(toolCalls, fcIds, input); - if (text && text.trim()) { if (msgId) { const outputMessageItem: ResponseOutputMessage = { @@ -1060,6 +1058,8 @@ export function toResponsesInput(messages: ChatMessage[]): ResponseInput { pushMessage("assistant", text); } } + + emitFunctionCallsFromToolCalls(toolCalls, fcIds, input); } else if (msgId) { const outputMessageItem: ResponseOutputMessage = { id: msgId, diff --git a/packages/openai-adapters/src/apis/OpenRouter.ts b/packages/openai-adapters/src/apis/OpenRouter.ts index 7c45fddeed..542699d20c 100644 --- a/packages/openai-adapters/src/apis/OpenRouter.ts +++ b/packages/openai-adapters/src/apis/OpenRouter.ts @@ -10,9 +10,10 @@ export interface OpenRouterConfig extends OpenAIConfig { // TODO: Extract detailed error info from OpenRouter's error.metadata.raw to surface better messages -const OPENROUTER_HEADERS: Record = { +export const OPENROUTER_HEADERS: Record = { "HTTP-Referer": "https://www.continue.dev/", - "X-Title": "Continue", + "X-OpenRouter-Title": "Continue", + "X-OpenRouter-Categories": "ide-extension", }; export class OpenRouterApi extends OpenAIApi { diff --git a/packages/openai-adapters/src/index.ts b/packages/openai-adapters/src/index.ts index 467c7a71ae..c9eb4da00f 100644 --- a/packages/openai-adapters/src/index.ts +++ b/packages/openai-adapters/src/index.ts @@ -243,4 +243,5 @@ export { } from "./apis/AnthropicUtils.js"; export { isResponsesModel } from "./apis/openaiResponses.js"; +export { OPENROUTER_HEADERS } from "./apis/OpenRouter.js"; export { extractBase64FromDataUrl, parseDataUrl } from "./util/url.js";