refactor(tests): dedupe pi embedded test harness

This commit is contained in:
Peter Steinberger
2026-03-03 01:14:32 +00:00
parent 3e4dd84511
commit 1b4062defd
2 changed files with 72 additions and 172 deletions

View File

@@ -11,10 +11,6 @@ import {
} from "./pi-embedded-subscribe.e2e-harness.js";
import { subscribeEmbeddedPiSession } from "./pi-embedded-subscribe.js";
type StubSession = {
subscribe: (fn: (evt: unknown) => void) => () => void;
};
describe("subscribeEmbeddedPiSession", () => {
function createAgentEventHarness(options?: { runId?: string; sessionKey?: string }) {
const { session, emit } = createStubSessionHarness();
@@ -41,6 +37,32 @@ describe("subscribeEmbeddedPiSession", () => {
return { emit, subscription };
}
function createSubscribedHarness(
options: Omit<Parameters<typeof subscribeEmbeddedPiSession>[0], "session">,
) {
const { session, emit } = createStubSessionHarness();
subscribeEmbeddedPiSession({
session,
...options,
});
return { emit };
}
function emitAssistantTextDelta(
emit: (evt: unknown) => void,
delta: string,
message: Record<string, unknown> = { role: "assistant" },
) {
emit({
type: "message_update",
message,
assistantMessageEvent: {
type: "text_delta",
delta,
},
});
}
function createWriteFailureHarness(params: {
runId: string;
path: string;
@@ -85,19 +107,10 @@ describe("subscribeEmbeddedPiSession", () => {
it.each(THINKING_TAG_CASES)(
"streams <%s> reasoning via onReasoningStream without leaking into final text",
({ open, close }) => {
let handler: ((evt: unknown) => void) | undefined;
const session: StubSession = {
subscribe: (fn) => {
handler = fn;
return () => {};
},
};
const onReasoningStream = vi.fn();
const onBlockReply = vi.fn();
subscribeEmbeddedPiSession({
session: session as unknown as Parameters<typeof subscribeEmbeddedPiSession>[0]["session"],
const { emit } = createSubscribedHarness({
runId: "run",
onReasoningStream,
onBlockReply,
@@ -105,23 +118,8 @@ describe("subscribeEmbeddedPiSession", () => {
reasoningMode: "stream",
});
handler?.({
type: "message_update",
message: { role: "assistant" },
assistantMessageEvent: {
type: "text_delta",
delta: `${open}\nBecause`,
},
});
handler?.({
type: "message_update",
message: { role: "assistant" },
assistantMessageEvent: {
type: "text_delta",
delta: ` it helps\n${close}\n\nFinal answer`,
},
});
emitAssistantTextDelta(emit, `${open}\nBecause`);
emitAssistantTextDelta(emit, ` it helps\n${close}\n\nFinal answer`);
const assistantMessage = {
role: "assistant",
@@ -133,7 +131,7 @@ describe("subscribeEmbeddedPiSession", () => {
],
} as AssistantMessage;
handler?.({ type: "message_end", message: assistantMessage });
emit({ type: "message_end", message: assistantMessage });
expect(onBlockReply).toHaveBeenCalledTimes(1);
expect(onBlockReply.mock.calls[0][0].text).toBe("Final answer");
@@ -152,18 +150,9 @@ describe("subscribeEmbeddedPiSession", () => {
it.each(THINKING_TAG_CASES)(
"suppresses <%s> blocks across chunk boundaries",
({ open, close }) => {
let handler: ((evt: unknown) => void) | undefined;
const session: StubSession = {
subscribe: (fn) => {
handler = fn;
return () => {};
},
};
const onBlockReply = vi.fn();
subscribeEmbeddedPiSession({
session: session as unknown as Parameters<typeof subscribeEmbeddedPiSession>[0]["session"],
const { emit } = createSubscribedHarness({
runId: "run",
onBlockReply,
blockReplyBreak: "text_end",
@@ -174,29 +163,13 @@ describe("subscribeEmbeddedPiSession", () => {
},
});
handler?.({ type: "message_start", message: { role: "assistant" } });
handler?.({
type: "message_update",
message: { role: "assistant" },
assistantMessageEvent: {
type: "text_delta",
delta: `${open}Reasoning chunk that should not leak`,
},
});
emit({ type: "message_start", message: { role: "assistant" } });
emitAssistantTextDelta(emit, `${open}Reasoning chunk that should not leak`);
expect(onBlockReply).not.toHaveBeenCalled();
handler?.({
type: "message_update",
message: { role: "assistant" },
assistantMessageEvent: {
type: "text_delta",
delta: `${close}\n\nFinal answer`,
},
});
handler?.({
emitAssistantTextDelta(emit, `${close}\n\nFinal answer`);
emit({
type: "message_update",
message: { role: "assistant" },
assistantMessageEvent: { type: "text_end" },
@@ -216,26 +189,17 @@ describe("subscribeEmbeddedPiSession", () => {
);
it("streams native thinking_delta events and signals reasoning end", () => {
let handler: ((evt: unknown) => void) | undefined;
const session: StubSession = {
subscribe: (fn) => {
handler = fn;
return () => {};
},
};
const onReasoningStream = vi.fn();
const onReasoningEnd = vi.fn();
subscribeEmbeddedPiSession({
session: session as unknown as Parameters<typeof subscribeEmbeddedPiSession>[0]["session"],
const { emit } = createSubscribedHarness({
runId: "run",
reasoningMode: "stream",
onReasoningStream,
onReasoningEnd,
});
handler?.({
emit({
type: "message_update",
message: {
role: "assistant",
@@ -247,7 +211,7 @@ describe("subscribeEmbeddedPiSession", () => {
},
});
handler?.({
emit({
type: "message_update",
message: {
role: "assistant",
@@ -266,36 +230,18 @@ describe("subscribeEmbeddedPiSession", () => {
});
it("emits reasoning end once when native and tagged reasoning end overlap", () => {
let handler: ((evt: unknown) => void) | undefined;
const session: StubSession = {
subscribe: (fn) => {
handler = fn;
return () => {};
},
};
const onReasoningEnd = vi.fn();
subscribeEmbeddedPiSession({
session: session as unknown as Parameters<typeof subscribeEmbeddedPiSession>[0]["session"],
const { emit } = createSubscribedHarness({
runId: "run",
reasoningMode: "stream",
onReasoningStream: vi.fn(),
onReasoningEnd,
});
handler?.({ type: "message_start", message: { role: "assistant" } });
handler?.({
type: "message_update",
message: { role: "assistant" },
assistantMessageEvent: {
type: "text_delta",
delta: "<think>Checking",
},
});
handler?.({
emit({ type: "message_start", message: { role: "assistant" } });
emitAssistantTextDelta(emit, "<think>Checking");
emit({
type: "message_update",
message: {
role: "assistant",
@@ -306,14 +252,7 @@ describe("subscribeEmbeddedPiSession", () => {
},
});
handler?.({
type: "message_update",
message: { role: "assistant" },
assistantMessageEvent: {
type: "text_delta",
delta: " files</think>\nFinal answer",
},
});
emitAssistantTextDelta(emit, " files</think>\nFinal answer");
expect(onReasoningEnd).toHaveBeenCalledTimes(1);
});
@@ -374,16 +313,8 @@ describe("subscribeEmbeddedPiSession", () => {
const { emit, onAgentEvent } = createAgentEventHarness();
emit({ type: "message_start", message: { role: "assistant" } });
emit({
type: "message_update",
message: { role: "assistant" },
assistantMessageEvent: { type: "text_delta", delta: "MEDIA:" },
});
emit({
type: "message_update",
message: { role: "assistant" },
assistantMessageEvent: { type: "text_delta", delta: " https://example.com/a.png\nCaption" },
});
emitAssistantTextDelta(emit, "MEDIA:");
emitAssistantTextDelta(emit, " https://example.com/a.png\nCaption");
const payloads = extractAgentEventPayloads(onAgentEvent.mock.calls);
expect(payloads).toHaveLength(1);
@@ -394,11 +325,7 @@ describe("subscribeEmbeddedPiSession", () => {
const { emit, onAgentEvent } = createAgentEventHarness();
emit({ type: "message_start", message: { role: "assistant" } });
emit({
type: "message_update",
message: { role: "assistant" },
assistantMessageEvent: { type: "text_delta", delta: "MEDIA: https://example.com/a.png" },
});
emitAssistantTextDelta(emit, "MEDIA: https://example.com/a.png");
const payloads = extractAgentEventPayloads(onAgentEvent.mock.calls);
expect(payloads).toHaveLength(1);

View File

@@ -48,16 +48,10 @@ function makeImageToolResult(params: {
toolName: string;
text: string;
}): AgentMessage {
const base = makeToolResult(params);
return {
role: "toolResult",
toolCallId: params.toolCallId,
toolName: params.toolName,
content: [
{ type: "image", data: "AA==", mimeType: "image/png" },
{ type: "text", text: params.text },
],
isError: false,
timestamp: Date.now(),
...base,
content: [{ type: "image", data: "AA==", mimeType: "image/png" }, ...base.content],
};
}
@@ -121,6 +115,23 @@ function pruneWithAggressiveDefaults(
});
}
function makeLargeExecToolResult(toolCallId: string, textChar: string): AgentMessage {
return makeToolResult({
toolCallId,
toolName: "exec",
text: textChar.repeat(20_000),
});
}
function makeSimpleToolPruningMessages(includeTrailingAssistant = false): AgentMessage[] {
return [
makeUser("u1"),
makeAssistant("a1"),
makeLargeExecToolResult("t1", "x"),
...(includeTrailingAssistant ? [makeAssistant("a2")] : []),
];
}
type ContextHandler = (
event: { messages: AgentMessage[] },
ctx: ExtensionContext,
@@ -235,23 +246,11 @@ describe("context-pruning", () => {
const messages: AgentMessage[] = [
makeUser("u1"),
makeAssistant("a1"),
makeToolResult({
toolCallId: "t1",
toolName: "exec",
text: "x".repeat(20_000),
}),
makeToolResult({
toolCallId: "t2",
toolName: "exec",
text: "y".repeat(20_000),
}),
makeLargeExecToolResult("t1", "x"),
makeLargeExecToolResult("t2", "y"),
makeUser("u2"),
makeAssistant("a2"),
makeToolResult({
toolCallId: "t3",
toolName: "exec",
text: "z".repeat(20_000),
}),
makeLargeExecToolResult("t3", "z"),
];
const next = pruneWithAggressiveDefaults(messages, {
@@ -267,16 +266,7 @@ describe("context-pruning", () => {
});
it("uses contextWindow override when ctx.model is missing", () => {
const messages: AgentMessage[] = [
makeUser("u1"),
makeAssistant("a1"),
makeToolResult({
toolCallId: "t1",
toolName: "exec",
text: "x".repeat(20_000),
}),
makeAssistant("a2"),
];
const messages = makeSimpleToolPruningMessages(true);
const next = pruneContextMessages({
messages,
@@ -298,16 +288,7 @@ describe("context-pruning", () => {
lastCacheTouchAt: Date.now() - DEFAULT_CONTEXT_PRUNING_SETTINGS.ttlMs - 1000,
});
const messages: AgentMessage[] = [
makeUser("u1"),
makeAssistant("a1"),
makeToolResult({
toolCallId: "t1",
toolName: "exec",
text: "x".repeat(20_000),
}),
makeAssistant("a2"),
];
const messages = makeSimpleToolPruningMessages(true);
const handler = createContextHandler();
const result = runContextHandler(handler, messages, sessionManager);
@@ -329,15 +310,7 @@ describe("context-pruning", () => {
lastCacheTouchAt: lastTouch,
});
const messages: AgentMessage[] = [
makeUser("u1"),
makeAssistant("a1"),
makeToolResult({
toolCallId: "t1",
toolName: "exec",
text: "x".repeat(20_000),
}),
];
const messages = makeSimpleToolPruningMessages();
const handler = createContextHandler();
const first = runContextHandler(handler, messages, sessionManager);