Merge branch 'feat/ask-user-chat-display'

This commit is contained in:
Richard Tang
2026-04-29 19:22:51 -07:00
8 changed files with 270 additions and 82 deletions
+2
View File
@@ -3,6 +3,7 @@ import AppLayout from "./layouts/AppLayout";
import Home from "./pages/home";
import ColonyChat from "./pages/colony-chat";
import QueenDM from "./pages/queen-dm";
import QueenRouting from "./pages/queen-routing";
import OrgChart from "./pages/org-chart";
import PromptLibrary from "./pages/prompt-library";
import SkillsLibrary from "./pages/skills-library";
@@ -16,6 +17,7 @@ function App() {
<Route element={<AppLayout />}>
<Route path="/" element={<Home />} />
<Route path="/colony/:colonyId" element={<ColonyChat />} />
<Route path="/queen-routing" element={<QueenRouting />} />
<Route path="/queen/:queenId" element={<QueenDM />} />
<Route path="/org-chart" element={<OrgChart />} />
<Route path="/skills-library" element={<SkillsLibrary />} />
+22 -9
View File
@@ -91,6 +91,10 @@ interface ChatPanelProps {
activeThread: string;
/** When true, the input is disabled (e.g. during loading) */
disabled?: boolean;
/** When true, only the send button is locked — the textarea stays typable.
* Used during new-session bootstrap so the user can compose a follow-up
* while the queen finishes warming up / streaming her first reply. */
sendLocked?: boolean;
/** When false, the image attach button is hidden (model lacks vision support) */
supportsImages?: boolean;
/** Called when user clicks the stop button to cancel the queen's current turn */
@@ -916,6 +920,7 @@ export default function ChatPanel({
isBusy,
activeThread,
disabled,
sendLocked,
onCancel,
onSteer,
onCancelQueued,
@@ -1401,8 +1406,10 @@ export default function ChatPanel({
);
})}
{/* Show typing indicator while waiting for first queen response (disabled + empty chat) */}
{(isWaiting || (disabled && threadMessages.length === 0)) && (
{/* Show typing indicator while waiting for first queen response
(disabled / sendLocked + empty chat counts as warm-up). */}
{(isWaiting ||
((disabled || sendLocked) && threadMessages.length === 0)) && (
<div className="flex gap-3">
<div
className="flex-shrink-0 w-9 h-9 rounded-xl flex items-center justify-center overflow-hidden"
@@ -1669,9 +1676,11 @@ export default function ChatPanel({
placeholder={
disabled
? "Connecting to agent..."
: isBusy
? "Queue a message — or click Steer to inject now..."
: "Message Queen Bee..."
: sendLocked
? "Type ahead — send unlocks once the queen is ready..."
: isBusy
? "Queue a message — or click Steer to inject now..."
: "Message Queen Bee..."
}
disabled={disabled}
className="flex-1 bg-transparent text-sm text-foreground outline-none placeholder:text-muted-foreground disabled:opacity-50 disabled:cursor-not-allowed resize-none overflow-y-auto"
@@ -1689,12 +1698,16 @@ export default function ChatPanel({
<button
type="submit"
disabled={
(!input.trim() && pendingImages.length === 0) || disabled
(!input.trim() && pendingImages.length === 0) ||
disabled ||
sendLocked
}
title={
isBusy
? "Queue message — sent after the current turn, or click Steer on the bubble to send now"
: "Send"
sendLocked
? "Hold tight — the queen is starting up. Send unlocks once she's ready."
: isBusy
? "Queue message — sent after the current turn, or click Steer on the bubble to send now"
: "Send"
}
className={`p-2 rounded-lg disabled:opacity-30 hover:opacity-90 transition-opacity ${
isBusy
+35 -3
View File
@@ -309,12 +309,44 @@ describe("sseEventToChatMessage", () => {
expect(result!.id).toMatch(/^stream-t-\d+-chat$/);
});
it("returns null for client_input_requested (handled in workspace.tsx)", () => {
it("converts single client_input_requested question to a queen-style bubble", () => {
const event = makeEvent({
type: "client_input_requested",
node_id: "chat",
node_id: "queen",
execution_id: "abc",
data: { prompt: "What next?" },
data: {
questions: [{ id: "q0", prompt: "Which folder?" }],
},
});
const result = sseEventToChatMessage(event, "t");
expect(result).not.toBeNull();
expect(result!.content).toBe("Which folder?");
expect(result!.id).toMatch(/^ask-user-abc-/);
});
it("converts multi-question client_input_requested to a numbered list", () => {
const event = makeEvent({
type: "client_input_requested",
node_id: "queen",
execution_id: "abc",
data: {
questions: [
{ id: "q0", prompt: "Which folder?" },
{ id: "q1", prompt: "Which date range?" },
],
},
});
const result = sseEventToChatMessage(event, "t");
expect(result).not.toBeNull();
expect(result!.content).toBe("1. Which folder?\n2. Which date range?");
});
it("returns null for client_input_requested with no questions (auto-wait park)", () => {
const event = makeEvent({
type: "client_input_requested",
node_id: "queen",
execution_id: "abc",
data: {},
});
expect(sseEventToChatMessage(event, "t")).toBeNull();
});
+41 -3
View File
@@ -140,9 +140,47 @@ export function sseEventToChatMessage(
};
}
case "client_input_requested":
// Handled explicitly in handleSSEEvent (workspace.tsx) for queen input widgets.
return null;
case "client_input_requested": {
// Surface the question(s) as a queen bubble in the chat history so the
// transcript records what was asked alongside the user's answer. The
// input widget at the bottom of the panel still drives the actual
// answer flow — this bubble is read-only context.
const rawQuestions = event.data?.questions;
if (!Array.isArray(rawQuestions) || rawQuestions.length === 0) return null;
const prompts: string[] = [];
for (const q of rawQuestions) {
if (!q || typeof q !== "object") continue;
const qo = q as Record<string, unknown>;
const prompt =
typeof qo.prompt === "string"
? qo.prompt
: typeof qo.question === "string"
? (qo.question as string)
: null;
if (prompt) prompts.push(prompt);
}
if (prompts.length === 0) return null;
const content =
prompts.length === 1
? prompts[0]
: prompts.map((p, i) => `${i + 1}. ${p}`).join("\n");
return {
// Stable per-request id so live + replay paths upsert the same row.
id: `ask-user-${event.execution_id ?? ""}-${event.timestamp ?? createdAt}`,
agent: agentDisplayName || event.node_id || "Agent",
agentColor: "",
content,
timestamp: "",
// Default to worker; the replayEvent wrapper upgrades to "queen"
// when stream_id === "queen". Mirrors llm_text_delta's pattern.
role: "worker",
thread,
createdAt,
nodeId: event.node_id || undefined,
executionId: event.execution_id || undefined,
streamId: event.stream_id || undefined,
};
}
case "client_input_received": {
const userContent = (event.data?.content as string) || "";
+31
View File
@@ -326,6 +326,11 @@ export default function ColonyChat() {
// client_input_requested so we don't flicker the typing bubble off while
// the queen is about to resume on the flushed input.
const queenAboutToResumeRef = useRef(false);
// Question bubble for an ask_user that's actively awaiting an answer.
// Stashed instead of pushed into messages so the user only sees ONE copy
// of the question (the popup widget) while answering. Committed to the
// transcript on client_input_received so it lands above the user's reply.
const pendingAskUserBubbleRef = useRef<ChatMessage | null>(null);
const suppressIntroRef = useRef(false);
const loadingRef = useRef(false);
@@ -710,8 +715,34 @@ export default function ColonyChat() {
case "client_input_received":
case "client_input_requested":
case "llm_text_delta": {
// Defer the queen's ask_user bubble so it doesn't render alongside
// the popup widget. Stash on request, commit on receive — see
// pendingAskUserBubbleRef declaration above for rationale.
let stashedAskUserBubble: ChatMessage | null = null;
if (
event.type === "client_input_requested" &&
isQueen &&
emittedMessages.length > 0
) {
const rawQuestions = event.data?.questions;
if (Array.isArray(rawQuestions) && rawQuestions.length > 0) {
stashedAskUserBubble = emittedMessages[0];
pendingAskUserBubbleRef.current = stashedAskUserBubble;
}
}
if (
event.type === "client_input_received" &&
pendingAskUserBubbleRef.current &&
!suppressQueenMessages
) {
// Commit the stashed bubble first; createdAt predates this
// event so timestamp-ordered insert places it above the answer.
upsertMessage(pendingAskUserBubbleRef.current);
pendingAskUserBubbleRef.current = null;
}
if (!suppressQueenMessages) {
for (const msg of emittedMessages) {
if (msg === stashedAskUserBubble) continue;
if (isQueen) {
msg.phase = queenPhaseRef.current as ChatMessage["phase"];
}
+15 -39
View File
@@ -1,8 +1,8 @@
import { useState, useRef } from "react";
import { useNavigate } from "react-router-dom";
import { Loader2, Send } from "lucide-react";
import { messagesApi } from "@/api/messages";
import { Send } from "lucide-react";
import { useColony } from "@/context/ColonyContext";
import { PENDING_CLASSIFY_KEY } from "./queen-routing";
const promptHints = [
"Check my inbox for urgent emails",
@@ -13,32 +13,25 @@ const promptHints = [
export default function Home() {
const navigate = useNavigate();
const { userProfile, refresh } = useColony();
const { userProfile } = useColony();
const [inputValue, setInputValue] = useState("");
const [submitting, setSubmitting] = useState(false);
const [activePrompt, setActivePrompt] = useState<string | null>(null);
const textareaRef = useRef<HTMLTextAreaElement>(null);
const displayName = userProfile.displayName || "there";
const startQueenSession = async (text: string) => {
// Stash the prompt and bounce to /queen-routing immediately. The classify
// LLM call (2-5s) runs on the routing screen rather than blocking nav, so
// the user never watches a spinner on the home page.
const startQueenSession = (text: string) => {
const trimmed = text.trim();
if (!trimmed || submitting) return;
setSubmitting(true);
setActivePrompt(trimmed);
if (!trimmed) return;
try {
const { queen_id } = await messagesApi.classify(trimmed);
// Hand the first message to queen-dm via sessionStorage so it
// survives the navigation without leaking into the URL/history.
sessionStorage.setItem(`queenFirstMessage:${queen_id}`, trimmed);
refresh();
navigate(`/queen/${queen_id}?new=1`);
sessionStorage.setItem(PENDING_CLASSIFY_KEY, trimmed);
} catch {
// Keep the user on home if bootstrap fails.
} finally {
setSubmitting(false);
setActivePrompt(null);
// sessionStorage disabled — fall through; the routing page will
// redirect back to home when the key is missing.
}
navigate("/queen-routing");
};
const handlePromptHint = (text: string) => {
@@ -97,14 +90,10 @@ export default function Home() {
<div className="absolute right-3 bottom-2.5">
<button
type="submit"
disabled={!inputValue.trim() || submitting}
disabled={!inputValue.trim()}
className="w-8 h-8 rounded-lg bg-primary/90 hover:bg-primary text-primary-foreground flex items-center justify-center transition-colors disabled:opacity-30 disabled:cursor-not-allowed"
>
{submitting && !activePrompt ? (
<Loader2 className="w-3.5 h-3.5 animate-spin" />
) : (
<Send className="w-3.5 h-3.5" />
)}
<Send className="w-3.5 h-3.5" />
</button>
</div>
</div>
@@ -116,25 +105,12 @@ export default function Home() {
<button
key={hint}
onClick={() => handlePromptHint(hint)}
disabled={submitting}
className="text-xs text-muted-foreground hover:text-foreground border border-border/50 hover:border-primary/30 rounded-full px-3.5 py-1.5 transition-all hover:bg-primary/[0.03] disabled:opacity-60 disabled:cursor-not-allowed"
className="text-xs text-muted-foreground hover:text-foreground border border-border/50 hover:border-primary/30 rounded-full px-3.5 py-1.5 transition-all hover:bg-primary/[0.03]"
>
{hint}
</button>
))}
</div>
{submitting && activePrompt && (
<p className="mt-4 text-center text-xs">
<span className="queen-debate-line">
<span>The queens are debating who should take this on</span>
<span aria-hidden="true">
{[0, 1, 2].map((dot) => (
<span key={dot}>.</span>
))}
</span>
</span>
</p>
)}
</div>
</div>
);
+32 -28
View File
@@ -1,6 +1,6 @@
import { useState, useCallback, useRef, useEffect, useMemo } from "react";
import { useParams, useSearchParams } from "react-router-dom";
import { Loader2, Minus, Plus } from "lucide-react";
import { Minus, Plus } from "lucide-react";
import ChatPanel, {
type ChatMessage,
type ImageContent,
@@ -117,6 +117,12 @@ export default function QueenDM() {
// client_input_requested so we don't flicker the typing bubble off while
// the queen is about to resume on the flushed input.
const queenAboutToResumeRef = useRef(false);
// Question bubble for an ask_user that's actively awaiting an answer. We
// stash it here instead of pushing it into messages so the user only sees
// ONE copy of the question (the popup widget) while answering. Committed
// to the transcript on client_input_received so the bubble lands right
// above the user's answer for scroll-back context.
const pendingAskUserBubbleRef = useRef<ChatMessage | null>(null);
const [queenPhase, setQueenPhase] = useState<
"independent" | "incubating" | "working" | "reviewing"
>("independent");
@@ -541,19 +547,11 @@ export default function QueenDM() {
const handleCreateNewSession = useCallback(() => {
if (!queenId) return;
setCreatingNewSession(true);
const request = queensApi.createNewSession(
queenId,
undefined,
"independent",
);
request
.then((result) => {
setSearchParams({ session: result.session_id });
})
.catch(() => {
setCreatingNewSession(false);
});
// Bounce through the ?new=1 bootstrap path so the chat shell appears
// immediately with a typing indicator while createNewSession runs in
// the background. URL is replaced with ?session=<id> when it resolves.
// Avoids the 5s "nothing happens, then chat appears" dead window.
setSearchParams({ new: "1" });
}, [queenId, setSearchParams]);
useEffect(() => {
@@ -662,6 +660,14 @@ export default function QueenDM() {
queenAboutToResumeRef.current = false;
break;
}
// Stash the question bubble (synthesized by replayEvent) instead
// of upserting now: while the popup widget is open the user only
// wants to see ONE copy of the question. We commit the bubble on
// client_input_received so it lands right above the user's
// answer in the transcript.
if (emittedMessages.length > 0) {
pendingAskUserBubbleRef.current = emittedMessages[0];
}
setAwaitingInput(true);
setIsTyping(false);
setIsStreaming(false);
@@ -670,6 +676,14 @@ export default function QueenDM() {
}
case "client_input_received": {
// Commit the stashed ask_user bubble first so it appears above
// the user's reply in scroll-back. Its createdAt predates this
// event's, so the timestamp-ordered insert in upsertMessage
// places it correctly.
if (pendingAskUserBubbleRef.current) {
upsertMessage(pendingAskUserBubbleRef.current);
pendingAskUserBubbleRef.current = null;
}
for (const msg of emittedMessages) {
upsertMessage(msg, { reconcileOptimisticUser: true });
}
@@ -918,19 +932,6 @@ export default function QueenDM() {
<div className="flex flex-col h-full">
{/* Chat */}
<div className="flex-1 min-h-0 relative">
{loading && (
<div className="absolute inset-0 z-10 flex items-center justify-center bg-background/60 backdrop-blur-sm">
<div className="flex items-center gap-3 text-muted-foreground">
<Loader2 className="w-5 h-5 animate-spin" />
<span className="text-sm">
{selectedSessionParam?.startsWith("session_")
? "Connecting to session..."
: `Connecting to ${queenName}...`}
</span>
</div>
</div>
)}
<ChatPanel
messages={messages}
onSend={handleSend}
@@ -940,7 +941,10 @@ export default function QueenDM() {
activeThread="queen-dm"
isWaiting={isTyping && !isStreaming}
isBusy={isTyping}
disabled={loading || !queenReady}
// Keep the textarea typable while the queen is warming up so the
// user can compose a follow-up immediately. Send stays locked
// until the session is live and the queen is ready.
sendLocked={loading || !queenReady}
queenPhase={queenPhase}
showQueenPhaseBadge
pendingQuestions={awaitingInput ? pendingQuestions : null}
+92
View File
@@ -0,0 +1,92 @@
import { useEffect, useRef, useState } from "react";
import { useNavigate } from "react-router-dom";
import { Loader2 } from "lucide-react";
import { messagesApi } from "@/api/messages";
import { useColony } from "@/context/ColonyContext";
/**
* Transient routing screen the user lands on right after submitting from the
* home page. Reads the pending prompt from sessionStorage, runs queen
* classification, and redirects (replace) to the resolved queen DM with
* ?new=1 so the existing bootstrap flow takes over.
*
* The point of this page is to get the user out of the home screen *before*
* the classify LLM call runs — they should never sit on the home page
* watching a spinner.
*/
export const PENDING_CLASSIFY_KEY = "hive:pendingClassifyMessage";
export default function QueenRouting() {
const navigate = useNavigate();
const { refresh } = useColony();
const [error, setError] = useState<string | null>(null);
// Re-runs of this effect (StrictMode, fast re-mounts) must not re-fire the
// classify call — once we've grabbed the pending message we own it.
const startedRef = useRef(false);
useEffect(() => {
if (startedRef.current) return;
startedRef.current = true;
let pending: string | null = null;
try {
pending = sessionStorage.getItem(PENDING_CLASSIFY_KEY);
if (pending) sessionStorage.removeItem(PENDING_CLASSIFY_KEY);
} catch {
pending = null;
}
if (!pending || !pending.trim()) {
navigate("/", { replace: true });
return;
}
const trimmed = pending.trim();
let cancelled = false;
(async () => {
try {
const { queen_id } = await messagesApi.classify(trimmed);
if (cancelled) return;
// Hand the prompt off to queen-dm via the same key its bootstrap
// path already consumes. Avoids leaking the message into the URL.
sessionStorage.setItem(`queenFirstMessage:${queen_id}`, trimmed);
refresh();
navigate(`/queen/${queen_id}?new=1`, { replace: true });
} catch {
if (cancelled) return;
setError("Couldn't route your request. Try again from the home screen.");
}
})();
return () => {
cancelled = true;
};
}, [navigate, refresh]);
return (
<div className="flex-1 flex flex-col items-center justify-center p-6">
<div className="flex items-center gap-3 text-muted-foreground">
<Loader2 className="w-5 h-5 animate-spin" />
<span className="queen-debate-line text-sm">
<span>The queens are debating who should take this on</span>
<span aria-hidden="true">
{[0, 1, 2].map((dot) => (
<span key={dot}>.</span>
))}
</span>
</span>
</div>
{error && (
<div className="mt-6 flex flex-col items-center gap-3">
<p className="text-sm text-destructive">{error}</p>
<button
onClick={() => navigate("/", { replace: true })}
className="text-xs text-muted-foreground hover:text-foreground border border-border/50 hover:border-primary/30 rounded-full px-3.5 py-1.5 transition-all"
>
Back to home
</button>
</div>
)}
</div>
);
}