Files
openclaw/src/agents/pi-embedded-runner/extra-params.openrouter-cache-control.test.ts

100 lines
3.3 KiB
TypeScript
Raw Normal View History

import type { StreamFn } from "@mariozechner/pi-agent-core";
import type { Context, Model } from "@mariozechner/pi-ai";
import { createAssistantMessageEventStream } from "@mariozechner/pi-ai";
import { describe, expect, it } from "vitest";
import { applyExtraParamsToAgent } from "./extra-params.js";
describe("extra-params: OpenRouter Anthropic cache_control", () => {
it("injects cache_control into system message for OpenRouter Anthropic models", () => {
const payload = {
messages: [
{ role: "system", content: "You are a helpful assistant." },
{ role: "user", content: "Hello" },
],
};
const baseStreamFn: StreamFn = (_model, _context, options) => {
options?.onPayload?.(payload);
return createAssistantMessageEventStream();
};
const agent = { streamFn: baseStreamFn };
applyExtraParamsToAgent(agent, undefined, "openrouter", "anthropic/claude-opus-4-6");
const model = {
api: "openai-completions",
provider: "openrouter",
id: "anthropic/claude-opus-4-6",
} as Model<"openai-completions">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, {});
expect(payload.messages[0].content).toEqual([
{ type: "text", text: "You are a helpful assistant.", cache_control: { type: "ephemeral" } },
]);
expect(payload.messages[1].content).toBe("Hello");
});
it("adds cache_control to last content block when system message is already array", () => {
const payload = {
messages: [
{
role: "system",
content: [
{ type: "text", text: "Part 1" },
{ type: "text", text: "Part 2" },
],
},
],
};
const baseStreamFn: StreamFn = (_model, _context, options) => {
options?.onPayload?.(payload);
return createAssistantMessageEventStream();
};
const agent = { streamFn: baseStreamFn };
applyExtraParamsToAgent(agent, undefined, "openrouter", "anthropic/claude-opus-4-6");
const model = {
api: "openai-completions",
provider: "openrouter",
id: "anthropic/claude-opus-4-6",
} as Model<"openai-completions">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, {});
const content = payload.messages[0].content as Array<Record<string, unknown>>;
expect(content[0]).toEqual({ type: "text", text: "Part 1" });
expect(content[1]).toEqual({
type: "text",
text: "Part 2",
cache_control: { type: "ephemeral" },
});
});
it("does not inject cache_control for OpenRouter non-Anthropic models", () => {
const payload = {
messages: [{ role: "system", content: "You are a helpful assistant." }],
};
const baseStreamFn: StreamFn = (_model, _context, options) => {
options?.onPayload?.(payload);
return createAssistantMessageEventStream();
};
const agent = { streamFn: baseStreamFn };
applyExtraParamsToAgent(agent, undefined, "openrouter", "google/gemini-3-pro");
const model = {
api: "openai-completions",
provider: "openrouter",
id: "google/gemini-3-pro",
} as Model<"openai-completions">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, {});
expect(payload.messages[0].content).toBe("You are a helpful assistant.");
});
});