feat: surface ask_user questions in chat transcript

This commit is contained in:
bryan
2026-04-28 18:16:47 -07:00
parent 4ef951447d
commit fe9a903928
3 changed files with 81 additions and 6 deletions
+35 -3
View File
@@ -309,12 +309,44 @@ describe("sseEventToChatMessage", () => {
expect(result!.id).toMatch(/^stream-t-\d+-chat$/);
});
it("returns null for client_input_requested (handled in workspace.tsx)", () => {
it("converts single client_input_requested question to a queen-style bubble", () => {
const event = makeEvent({
type: "client_input_requested",
node_id: "chat",
node_id: "queen",
execution_id: "abc",
data: { prompt: "What next?" },
data: {
questions: [{ id: "q0", prompt: "Which folder?" }],
},
});
const result = sseEventToChatMessage(event, "t");
expect(result).not.toBeNull();
expect(result!.content).toBe("Which folder?");
expect(result!.id).toMatch(/^ask-user-abc-/);
});
it("converts multi-question client_input_requested to a numbered list", () => {
const event = makeEvent({
type: "client_input_requested",
node_id: "queen",
execution_id: "abc",
data: {
questions: [
{ id: "q0", prompt: "Which folder?" },
{ id: "q1", prompt: "Which date range?" },
],
},
});
const result = sseEventToChatMessage(event, "t");
expect(result).not.toBeNull();
expect(result!.content).toBe("1. Which folder?\n2. Which date range?");
});
it("returns null for client_input_requested with no questions (auto-wait park)", () => {
const event = makeEvent({
type: "client_input_requested",
node_id: "queen",
execution_id: "abc",
data: {},
});
expect(sseEventToChatMessage(event, "t")).toBeNull();
});
+41 -3
View File
@@ -140,9 +140,47 @@ export function sseEventToChatMessage(
};
}
case "client_input_requested":
// Handled explicitly in handleSSEEvent (workspace.tsx) for queen input widgets.
return null;
case "client_input_requested": {
// Surface the question(s) as a queen bubble in the chat history so the
// transcript records what was asked alongside the user's answer. The
// input widget at the bottom of the panel still drives the actual
// answer flow — this bubble is read-only context.
const rawQuestions = event.data?.questions;
if (!Array.isArray(rawQuestions) || rawQuestions.length === 0) return null;
const prompts: string[] = [];
for (const q of rawQuestions) {
if (!q || typeof q !== "object") continue;
const qo = q as Record<string, unknown>;
const prompt =
typeof qo.prompt === "string"
? qo.prompt
: typeof qo.question === "string"
? (qo.question as string)
: null;
if (prompt) prompts.push(prompt);
}
if (prompts.length === 0) return null;
const content =
prompts.length === 1
? prompts[0]
: prompts.map((p, i) => `${i + 1}. ${p}`).join("\n");
return {
// Stable per-request id so live + replay paths upsert the same row.
id: `ask-user-${event.execution_id ?? ""}-${event.timestamp ?? createdAt}`,
agent: agentDisplayName || event.node_id || "Agent",
agentColor: "",
content,
timestamp: "",
// Default to worker; the replayEvent wrapper upgrades to "queen"
// when stream_id === "queen". Mirrors llm_text_delta's pattern.
role: "worker",
thread,
createdAt,
nodeId: event.node_id || undefined,
executionId: event.execution_id || undefined,
streamId: event.stream_id || undefined,
};
}
case "client_input_received": {
const userContent = (event.data?.content as string) || "";
+5
View File
@@ -662,6 +662,11 @@ export default function QueenDM() {
queenAboutToResumeRef.current = false;
break;
}
// Drop the queen's question into the transcript so it lives
// alongside the user's answer when scrolling back. Synthesized
// by replayEvent above; upsert by id so cold-replay doesn't
// duplicate it.
for (const m of emittedMessages) upsertMessage(m);
setAwaitingInput(true);
setIsTyping(false);
setIsStreaming(false);