diff --git a/core/llm/defaultSystemMessages.ts b/core/llm/defaultSystemMessages.ts index 430a6777d84..6fdcf717774 100644 --- a/core/llm/defaultSystemMessages.ts +++ b/core/llm/defaultSystemMessages.ts @@ -63,7 +63,7 @@ export const DEFAULT_AGENT_SYSTEM_MESSAGE = `\ You are in agent mode. - If you need to use multiple tools, you can call multiple read-only tools simultaneously. + Call tools one at a time. Wait for each tool result before deciding whether to call another tool. ${CODEBLOCK_FORMATTING_INSTRUCTIONS} diff --git a/core/llm/llms/OpenRouter.ts b/core/llm/llms/OpenRouter.ts index b2772824583..0c389f7bd70 100644 --- a/core/llm/llms/OpenRouter.ts +++ b/core/llm/llms/OpenRouter.ts @@ -1,5 +1,7 @@ import { ChatCompletionCreateParams } from "openai/resources/index"; +import { OPENROUTER_HEADERS } from "@continuedev/openai-adapters"; + import { LLMOptions } from "../../index.js"; import { osModelsEditPrompt } from "../templates/edit.js"; @@ -18,6 +20,19 @@ class OpenRouter extends OpenAI { useLegacyCompletionsEndpoint: false, }; + constructor(options: LLMOptions) { + super({ + ...options, + requestOptions: { + ...options.requestOptions, + headers: { + ...OPENROUTER_HEADERS, + ...options.requestOptions?.headers, + }, + }, + }); + } + private isAnthropicModel(model?: string): boolean { if (!model) return false; const modelLower = model.toLowerCase(); diff --git a/core/tools/systemMessageTools/toolCodeblocks/buildSystemMessage.vitest.ts b/core/tools/systemMessageTools/toolCodeblocks/buildSystemMessage.vitest.ts index 4742559ef63..4273192e1a1 100644 --- a/core/tools/systemMessageTools/toolCodeblocks/buildSystemMessage.vitest.ts +++ b/core/tools/systemMessageTools/toolCodeblocks/buildSystemMessage.vitest.ts @@ -228,6 +228,30 @@ describe("generateToolsSystemMessage", () => { expect(hasExampleDefinition).toBe(true); expect(hasExampleCall).toBe(true); }); + + it("instructs models to call tools serially", () => { + const tools: Tool[] = [ + { + function: { + name: "test_tool", + description: "Test description", + parameters: { + type: "object", + properties: {}, + required: [], + }, + }, + ...SHARED_TOOL_FIELDS, + }, + ]; + + const result = generateToolsSystemMessage(tools, framework); + + expect(result).includes( + "Call ONE tool at a time and wait for its result before calling another tool.", + ); + expect(result).not.includes("simultaneously"); + }); }); describe("addSystemMessageToolsToSystemMessage", () => { diff --git a/core/tools/systemMessageTools/toolCodeblocks/index.ts b/core/tools/systemMessageTools/toolCodeblocks/index.ts index 3acddb70f00..f25ae1571f7 100644 --- a/core/tools/systemMessageTools/toolCodeblocks/index.ts +++ b/core/tools/systemMessageTools/toolCodeblocks/index.ts @@ -72,7 +72,7 @@ CRITICAL: Follow the exact syntax. Do not use XML tags, JSON objects, or any oth systemMessageSuffix = `RULES FOR TOOL USE: 1. To call a tool, output a tool code block using EXACTLY the format shown above. 2. Always start the code block on a new line. -3. You can only call ONE tool at a time. +3. Call ONE tool at a time and wait for its result before calling another tool. 4. The tool code block MUST be the last thing in your response. Stop immediately after the closing fence. 5. Do NOT wrap tool calls in XML tags like or . 6. Do NOT use JSON format for tool calls. diff --git a/gui/src/redux/util/getBaseSystemMessage.test.ts b/gui/src/redux/util/getBaseSystemMessage.test.ts index ed069693af2..23beac45a70 100644 --- a/gui/src/redux/util/getBaseSystemMessage.test.ts +++ b/gui/src/redux/util/getBaseSystemMessage.test.ts @@ -84,3 +84,10 @@ test("getBaseSystemMessage should append no-tools warning for agent/plan modes w "Custom Plan System Message" + NO_TOOL_WARNING, ); }); + +test("default agent system message should instruct serial tool calling", () => { + expect(DEFAULT_AGENT_SYSTEM_MESSAGE).toContain( + "Call tools one at a time. Wait for each tool result before deciding whether to call another tool.", + ); + expect(DEFAULT_AGENT_SYSTEM_MESSAGE).not.toContain("simultaneously"); +}); diff --git a/packages/openai-adapters/src/apis/OpenRouter.ts b/packages/openai-adapters/src/apis/OpenRouter.ts index 7c45fddeed6..542699d20c3 100644 --- a/packages/openai-adapters/src/apis/OpenRouter.ts +++ b/packages/openai-adapters/src/apis/OpenRouter.ts @@ -10,9 +10,10 @@ export interface OpenRouterConfig extends OpenAIConfig { // TODO: Extract detailed error info from OpenRouter's error.metadata.raw to surface better messages -const OPENROUTER_HEADERS: Record = { +export const OPENROUTER_HEADERS: Record = { "HTTP-Referer": "https://www.continue.dev/", - "X-Title": "Continue", + "X-OpenRouter-Title": "Continue", + "X-OpenRouter-Categories": "ide-extension", }; export class OpenRouterApi extends OpenAIApi { diff --git a/packages/openai-adapters/src/index.ts b/packages/openai-adapters/src/index.ts index 467c7a71ae9..c9eb4da00fa 100644 --- a/packages/openai-adapters/src/index.ts +++ b/packages/openai-adapters/src/index.ts @@ -243,4 +243,5 @@ export { } from "./apis/AnthropicUtils.js"; export { isResponsesModel } from "./apis/openaiResponses.js"; +export { OPENROUTER_HEADERS } from "./apis/OpenRouter.js"; export { extractBase64FromDataUrl, parseDataUrl } from "./util/url.js";