feat(chat): Implement real-time SSE streaming with reasoning steps and improved UI indicators.

This commit is contained in:
Yunxiao Xu
2026-02-13 00:00:50 -08:00
parent af731413af
commit 339f69a2a3
14 changed files with 777 additions and 17 deletions

View File

@@ -0,0 +1,22 @@
county,voters,pct_total,rank_by_size
Bergen,637753,10.46,1
Middlesex,531951,8.72,2
Essex,499446,8.19,3
Monmouth,472627,7.75,4
Ocean,453981,7.45,5
Hudson,374651,6.14,6
Morris,368252,6.04,7
Camden,359742,5.90,8
Union,354205,5.81,9
Burlington,340761,5.59,10
Passaic,313061,5.13,11
Somerset,241463,3.96,12
Mercer,241236,3.96,13
Gloucester,217083,3.56,14
Atlantic,189627,3.11,15
Sussex,110789,1.82,16
Hunterdon,100606,1.65,17
Cumberland,90934,1.49,18
Warren,81642,1.34,19
Cape May,72299,1.19,20
Salem,45018,0.74,21
1 county voters pct_total rank_by_size
2 Bergen 637753 10.46 1
3 Middlesex 531951 8.72 2
4 Essex 499446 8.19 3
5 Monmouth 472627 7.75 4
6 Ocean 453981 7.45 5
7 Hudson 374651 6.14 6
8 Morris 368252 6.04 7
9 Camden 359742 5.90 8
10 Union 354205 5.81 9
11 Burlington 340761 5.59 10
12 Passaic 313061 5.13 11
13 Somerset 241463 3.96 12
14 Mercer 241236 3.96 13
15 Gloucester 217083 3.56 14
16 Atlantic 189627 3.11 15
17 Sussex 110789 1.82 16
18 Hunterdon 100606 1.65 17
19 Cumberland 90934 1.49 18
20 Warren 81642 1.34 19
21 Cape May 72299 1.19 20
22 Salem 45018 0.74 21

View File

@@ -11,7 +11,7 @@ from ea_chatbot.history.models import User as UserDB, Conversation
from ea_chatbot.api.schemas import ChatRequest
import io
import base64
from langchain_core.messages import BaseMessage
from langchain_core.runnables.config import RunnableConfig
router = APIRouter(prefix="/chat", tags=["agent"])
@@ -39,7 +39,7 @@ async def stream_agent_events(
"dfs": {}
}
config = {"configurable": {"thread_id": thread_id}}
config: RunnableConfig = {"configurable": {"thread_id": thread_id}}
assistant_chunks: List[str] = []
assistant_plots: List[bytes] = []
@@ -56,23 +56,26 @@ async def stream_agent_events(
):
kind = event.get("event")
name = event.get("name")
node_name = event.get("metadata", {}).get("langgraph_node", name)
data = event.get("data", {})
# Standardize event for frontend
output_event = {
"type": kind,
"name": name,
"node": node_name,
"data": data
}
# Buffer assistant chunks (summarizer and researcher might stream)
if kind == "on_chat_model_stream" and name in ["summarizer", "researcher"]:
if kind == "on_chat_model_stream" and node_name in ["summarizer", "researcher", "clarification"]:
chunk = data.get("chunk", "")
# Use utility to safely extract text content from the chunk
chunk_data = convert_to_json_compatible(chunk)
if isinstance(chunk_data, dict) and "content" in chunk_data:
assistant_chunks.append(str(chunk_data["content"]))
else:
# TODO: need better way to handle this
assistant_chunks.append(str(chunk_data))
# Buffer and encode plots
@@ -80,7 +83,7 @@ async def stream_agent_events(
output = data.get("output", {})
if isinstance(output, dict) and "plots" in output:
plots = output["plots"]
encoded_plots = []
encoded_plots: list[str] = []
for fig in plots:
buf = io.BytesIO()
fig.savefig(buf, format="png")
@@ -131,7 +134,7 @@ async def stream_agent_events(
except Exception as e:
error_msg = f"Agent execution failed: {str(e)}"
history_manager.add_message(thread_id, "assistant", error_msg)
yield f"data: {json.dumps({'type': 'error', 'message': error_msg})}\n\n"
yield f"data: {json.dumps({'type': 'error', 'data': {'message': error_msg}})}\n\n"
@router.post("/stream")
async def chat_stream(

View File

@@ -52,6 +52,11 @@ def decode_access_token(token: str) -> Optional[dict]:
def convert_to_json_compatible(obj: Any) -> Any:
"""Recursively convert LangChain objects, Pydantic models, and others to JSON compatible formats."""
# Handle known non-serializable types first to avoid recursion
type_name = type(obj).__name__
if type_name == "Figure" or type_name == "DataFrame":
return f"<{type_name} object>"
if isinstance(obj, list):
return [convert_to_json_compatible(item) for item in obj]
elif isinstance(obj, dict):
@@ -91,4 +96,11 @@ def convert_to_json_compatible(obj: Any) -> Any:
return str(obj.content)
elif isinstance(obj, (datetime, timezone)):
return obj.isoformat()
return obj
# Final fallback for any other types that might not be JSON serializable
import json
try:
json.dumps(obj)
return obj
except (TypeError, OverflowError):
return str(obj)

View File

@@ -4,7 +4,9 @@ import { MainLayout } from "./components/layout/MainLayout"
import { LoginForm } from "./components/auth/LoginForm"
import { RegisterForm } from "./components/auth/RegisterForm"
import { AuthCallback } from "./components/auth/AuthCallback"
import { ChatInterface } from "./components/chat/ChatInterface"
import { AuthService, type UserResponse } from "./services/auth"
import { ChatService } from "./services/chat"
import { registerUnauthorizedCallback } from "./services/api"
function App() {
@@ -12,6 +14,7 @@ function App() {
const [user, setUser] = useState<UserResponse | null>(null)
const [authMode, setAuthMode] = useState<"login" | "register">("login")
const [isLoading, setIsLoading] = useState(true)
const [selectedThreadId, setSelectedThreadId] = useState<string | null>(null)
useEffect(() => {
// Register callback to handle session expiration from anywhere in the app
@@ -55,6 +58,17 @@ function App() {
} finally {
setIsAuthenticated(false)
setUser(null)
setSelectedThreadId(null)
}
}
const handleCreateTempChat = async () => {
try {
const conv = await ChatService.createConversation("Temporary Chat")
setSelectedThreadId(conv.id)
} catch (err) {
console.error("Failed to create conversation:", err)
alert("Failed to start chat session. Please try again.")
}
}
@@ -88,13 +102,12 @@ function App() {
</div>
) : (
<MainLayout>
<div className="flex flex-col gap-4">
<div className="flex justify-between items-center">
<div className="flex flex-col h-full gap-4">
<div className="flex justify-between items-center shrink-0">
<div>
<h1 className="text-2xl font-bold">
<h1 className="text-xl font-bold">
Welcome, {user?.display_name || user?.email || "User"}!
</h1>
<p className="text-sm text-muted-foreground">{user?.email}</p>
</div>
<button
onClick={handleLogout}
@@ -103,9 +116,43 @@ function App() {
Logout
</button>
</div>
<p className="text-muted-foreground mt-4">
Select a conversation from the sidebar or start a new one to begin your analysis.
</p>
<div className="flex-1 min-h-0">
{selectedThreadId ? (
<ChatInterface threadId={selectedThreadId} />
) : (
<div className="flex flex-col items-center justify-center h-full text-center space-y-4 bg-muted/30 rounded-xl border border-dashed p-12">
<div className="p-4 bg-background rounded-full shadow-sm">
<svg
className="w-12 h-12 text-primary"
fill="none"
stroke="currentColor"
viewBox="0 0 24 24"
xmlns="http://www.w3.org/2000/svg"
>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth={2}
d="M8 10h.01M12 10h.01M16 10h.01M9 16H5a2 2 0 01-2-2V6a2 2 0 012-2h14a2 2 0 012 2v8a2 2 0 01-2 2h-5l-5 5v-5z"
/>
</svg>
</div>
<div className="max-w-xs space-y-2">
<h2 className="text-lg font-semibold">Ready to analyze election data?</h2>
<p className="text-sm text-muted-foreground">
Create a new conversation in the sidebar to start asking questions.
</p>
<button
onClick={handleCreateTempChat}
className="mt-4 px-4 py-2 bg-primary text-primary-foreground rounded-md text-sm font-medium hover:bg-primary/90"
>
Start Temporary Chat
</button>
</div>
</div>
)}
</div>
</div>
</MainLayout>
)

View File

@@ -0,0 +1,38 @@
import * as React from "react"
import { SendIcon } from "lucide-react"
import { Button } from "@/components/ui/button"
import { Input } from "@/components/ui/input"
interface ChatInputProps {
onSendMessage: (message: string) => void
disabled?: boolean
}
export function ChatInput({ onSendMessage, disabled }: ChatInputProps) {
const [message, setMessage] = React.useState("")
const handleSubmit = (e: React.FormEvent) => {
e.preventDefault()
if (message.trim() && !disabled) {
onSendMessage(message)
setMessage("")
}
}
return (
<form onSubmit={handleSubmit} className="flex w-full items-center space-x-2 p-4 border-t bg-background">
<Input
type="text"
placeholder="Type your question about election data..."
value={message}
onChange={(e) => setMessage(e.target.value)}
disabled={disabled}
className="flex-1"
/>
<Button type="submit" size="icon" disabled={disabled || !message.trim()}>
<SendIcon className="h-4 w-4" />
<span className="sr-only">Send</span>
</Button>
</form>
)
}

View File

@@ -0,0 +1,62 @@
import { render, screen, fireEvent, waitFor } from "@testing-library/react"
import { describe, it, expect, vi, beforeEach } from "vitest"
import { ChatInterface } from "./ChatInterface"
import { ChatService, type StreamCallbacks } from "@/services/chat"
vi.mock("@/services/chat", () => ({
ChatService: {
streamChat: vi.fn(),
}
}))
describe("ChatInterface", () => {
beforeEach(() => {
vi.resetAllMocks()
})
it("renders correctly with initial messages", () => {
const initialMessages = [
{ id: "1", role: "user" as const, content: "Hello", created_at: new Date().toISOString() }
]
render(<ChatInterface threadId="test-thread" initialMessages={initialMessages} />)
expect(screen.getByText("Hello")).toBeInTheDocument()
})
it("calls streamChat when a message is sent", async () => {
render(<ChatInterface threadId="test-thread" />)
const input = screen.getByPlaceholderText(/Type your question/i)
const sendButton = screen.getByRole("button", { name: /send/i })
fireEvent.change(input, { target: { value: "Tell me about New Jersey" } })
fireEvent.click(sendButton)
expect(ChatService.streamChat).toHaveBeenCalledWith(
"Tell me about New Jersey",
"test-thread",
[],
expect.any(Object)
)
})
it("displays error message when stream fails", async () => {
const mockedStreamChat = vi.mocked(ChatService.streamChat)
mockedStreamChat.mockImplementation((_msg: string, _id: string, _msgs: any[], callbacks: StreamCallbacks) => {
if (callbacks.onError) {
callbacks.onError("Connection failed")
}
return Promise.resolve()
})
render(<ChatInterface threadId="test-thread" />)
const input = screen.getByPlaceholderText(/Type your question/i)
fireEvent.change(input, { target: { value: "test" } })
fireEvent.click(screen.getByRole("button", { name: /send/i }))
await waitFor(() => {
expect(screen.getByText("Connection failed")).toBeInTheDocument()
})
})
})

View File

@@ -0,0 +1,79 @@
import * as React from "react"
import { MessageList } from "./MessageList"
import { ChatInput } from "./ChatInput"
import { ChatService, type MessageResponse } from "@/services/chat"
import { AlertCircle } from "lucide-react"
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert"
interface ChatInterfaceProps {
threadId: string
initialMessages?: MessageResponse[]
}
const EMPTY_MESSAGES: MessageResponse[] = []
export function ChatInterface({ threadId, initialMessages = EMPTY_MESSAGES }: ChatInterfaceProps) {
const [messages, setMessages] = React.useState<MessageResponse[]>(initialMessages)
const [isStreaming, setIsStreaming] = React.useState(false)
const [error, setError] = React.useState<string | null>(null)
// Sync messages if threadId or initialMessages changes
React.useEffect(() => {
console.log("ChatInterface: Syncing messages", initialMessages)
setMessages(initialMessages)
setError(null)
}, [threadId, initialMessages])
// Log messages changes for debugging
React.useEffect(() => {
console.log("ChatInterface: Messages state updated", messages)
}, [messages])
const handleSendMessage = async (text: string) => {
setError(null)
setIsStreaming(true)
try {
await ChatService.streamChat(
text,
threadId,
messages,
{
onMessageUpdate: (updatedMessages) => {
setMessages(updatedMessages)
},
onDone: () => {
setIsStreaming(false)
},
onError: (err) => {
setError(err)
setIsStreaming(false)
}
}
)
} catch (err: unknown) {
const errorMessage = err instanceof Error ? err.message : "Failed to start chat"
setError(errorMessage)
setIsStreaming(false)
}
}
return (
<div className="flex flex-col h-full bg-background rounded-xl border shadow-lg overflow-hidden">
{error && (
<Alert variant="destructive" className="m-4">
<AlertCircle className="h-4 w-4" />
<AlertTitle>Error</AlertTitle>
<AlertDescription>{error}</AlertDescription>
</Alert>
)}
<MessageList messages={messages} />
<ChatInput
onSendMessage={handleSendMessage}
disabled={isStreaming}
/>
</div>
)
}

View File

@@ -0,0 +1,76 @@
import { cn } from "@/lib/utils"
import { type MessageResponse } from "@/services/chat"
interface MessageBubbleProps {
message: MessageResponse
}
export function MessageBubble({ message }: MessageBubbleProps) {
const isAssistant = message.role === "assistant"
return (
<div
className={cn(
"flex w-full mb-4",
isAssistant ? "justify-start" : "justify-end"
)}
>
<div
className={cn(
"max-w-[80%] rounded-lg p-4 shadow-sm",
isAssistant
? "bg-secondary text-secondary-foreground"
: "bg-primary text-primary-foreground"
)}
>
{isAssistant && message.steps && message.steps.length > 0 && (
<div className="mb-3 space-y-1 border-b border-secondary-foreground/10 pb-2">
{message.steps.map((step, index) => {
const isLast = index === message.steps!.length - 1
return (
<div
key={index}
className={cn(
"flex items-center gap-2 transition-all duration-300",
isLast ? "text-xs font-medium opacity-80" : "text-[10px] opacity-40"
)}
>
<div className={cn("rounded-full bg-current", isLast ? "h-1 w-1" : "h-0.5 w-0.5")} />
{step}
</div>
)
})}
</div>
)}
<div className="whitespace-pre-wrap break-words text-sm">
{message.content || (isAssistant && !message.plots?.length ? (
<div className="flex items-center gap-1 py-1">
<div className="flex gap-1">
<div className="h-1.5 w-1.5 rounded-full bg-current animate-bounce [animation-delay:-0.3s]" />
<div className="h-1.5 w-1.5 rounded-full bg-current animate-bounce [animation-delay:-0.15s]" />
<div className="h-1.5 w-1.5 rounded-full bg-current animate-bounce" />
</div>
</div>
) : "")}
</div>
{message.plots && message.plots.length > 0 && (
<div className="mt-4 grid grid-cols-1 gap-2">
{message.plots.map((plot, index) => (
<img
key={index}
src={`data:image/png;base64,${plot}`}
alt="Analysis Plot"
className="rounded-md border bg-white w-full h-auto cursor-pointer hover:opacity-90 transition-opacity"
onClick={() => {
// TODO: Open in modal (Phase 5)
}}
/>
))}
</div>
)}
</div>
</div>
)
}

View File

@@ -0,0 +1,34 @@
import * as React from "react"
import { MessageBubble } from "./MessageBubble"
import { type MessageResponse } from "@/services/chat"
interface MessageListProps {
messages: MessageResponse[]
}
export function MessageList({ messages }: MessageListProps) {
const scrollRef = React.useRef<HTMLDivElement>(null)
React.useEffect(() => {
if (scrollRef.current) {
scrollRef.current.scrollTop = scrollRef.current.scrollHeight
}
}, [messages])
return (
<div
ref={scrollRef}
className="flex-1 overflow-y-auto p-4 flex flex-col"
>
{messages.length === 0 ? (
<div className="flex-1 flex items-center justify-center text-muted-foreground text-sm">
No messages yet. Ask a question to get started!
</div>
) : (
messages.map((msg) => (
<MessageBubble key={msg.id} message={msg} />
))
)}
</div>
)
}

View File

@@ -8,7 +8,7 @@ interface MainLayoutProps {
export function MainLayout({ children }: MainLayoutProps) {
return (
<SidebarProvider>
<div className="flex min-h-screen w-full">
<div className="flex h-screen w-full overflow-hidden">
<Sidebar role="complementary">
<SidebarHeader>
<div className="p-4 font-bold text-xl">EA Chatbot</div>
@@ -20,12 +20,12 @@ export function MainLayout({ children }: MainLayoutProps) {
<div className="p-4 text-xs text-muted-foreground">© 2026 Election Analytics</div>
</SidebarFooter>
</Sidebar>
<SidebarInset className="flex flex-col flex-1">
<SidebarInset className="flex flex-col flex-1 h-full overflow-hidden">
<header className="flex h-16 shrink-0 items-center gap-2 border-b px-4" role="navigation">
<SidebarTrigger />
<div className="font-semibold">Chat</div>
</header>
<main className="flex-1 overflow-auto p-6">
<main className="flex-1 flex flex-col p-6 overflow-hidden bg-muted/10">
{children}
</main>
</SidebarInset>

View File

@@ -0,0 +1,58 @@
import * as React from "react"
import { cva, type VariantProps } from "class-variance-authority"
import { cn } from "@/lib/utils"
const alertVariants = cva(
"relative w-full rounded-lg border p-4 [&>svg~*]:pl-7 [&>svg+div]:translate-y-[-3px] [&>svg]:absolute [&>svg]:left-4 [&>svg]:top-4 [&>svg]:text-foreground",
{
variants: {
variant: {
default: "bg-background text-foreground",
destructive:
"border-destructive/50 text-destructive dark:border-destructive [&>svg]:text-destructive",
},
},
defaultVariants: {
variant: "default",
},
}
)
const Alert = React.forwardRef<
HTMLDivElement,
React.HTMLAttributes<HTMLDivElement> & VariantProps<typeof alertVariants>
>(({ className, variant, ...props }, ref) => (
<div
ref={ref}
role="alert"
className={cn(alertVariants({ variant }), className)}
{...props}
/>
))
Alert.displayName = "Alert"
const AlertTitle = React.forwardRef<
HTMLParagraphElement,
React.HTMLAttributes<HTMLHeadingElement>
>(({ className, ...props }, ref) => (
<h5
ref={ref}
className={cn("mb-1 font-medium leading-none tracking-tight", className)}
{...props}
/>
))
AlertTitle.displayName = "AlertTitle"
const AlertDescription = React.forwardRef<
HTMLParagraphElement,
React.HTMLAttributes<HTMLParagraphElement>
>(({ className, ...props }, ref) => (
<div
ref={ref}
className={cn("text-sm [&_p]:leading-relaxed", className)}
{...props}
/>
))
AlertDescription.displayName = "AlertDescription"
export { Alert, AlertTitle, AlertDescription }

View File

@@ -0,0 +1,67 @@
import { describe, it, expect, vi } from "vitest"
import { ChatService, type ChatEvent } from "./chat"
describe("ChatService SSE Parsing", () => {
it("should correctly parse a text stream chunk", () => {
const rawChunk = `data: {"type": "on_chat_model_stream", "name": "summarizer", "data": {"chunk": "Hello"}}\n\n`
const events = ChatService.parseSSEChunk(rawChunk)
expect(events).toHaveLength(1)
expect(events[0]).toEqual({
type: "on_chat_model_stream",
name: "summarizer",
data: { chunk: "Hello" }
})
})
it("should handle multiple events in one chunk", () => {
const rawChunk =
`data: {"type": "on_chat_model_stream", "name": "summarizer", "data": {"chunk": "Hello"}}\n\n` +
`data: {"type": "on_chat_model_stream", "name": "summarizer", "data": {"chunk": " World"}}\n\n`
const events = ChatService.parseSSEChunk(rawChunk)
expect(events).toHaveLength(2)
expect(events[1].data.chunk).toBe(" World")
})
it("should parse encoded plots from executor node", () => {
const rawChunk = `data: {"type": "on_chain_end", "name": "executor", "data": {"encoded_plots": ["base64data"]}}\n\n`
const events = ChatService.parseSSEChunk(rawChunk)
expect(events[0].data.encoded_plots).toEqual(["base64data"])
})
it("should identify the done event", () => {
const rawChunk = `data: {"type": "done"}\n\n`
const events = ChatService.parseSSEChunk(rawChunk)
expect(events[0].type).toBe("done")
})
})
describe("ChatService Message State Management", () => {
it("should append text chunks to the last message content", () => {
const messages = [{ id: "1", role: "assistant", content: "Initial", created_at: new Date().toISOString() }]
const event: ChatEvent = {
type: "on_chat_model_stream",
node: "summarizer",
data: { chunk: { content: " text" } }
}
const updatedMessages = ChatService.updateMessagesWithEvent(messages as any, event)
expect(updatedMessages[0].content).toBe("Initial text")
})
it("should add plots to the message state", () => {
const messages = [{ id: "1", role: "assistant", content: "Analysis", created_at: new Date().toISOString(), plots: [] }]
const event: ChatEvent = {
type: "on_chain_end",
name: "executor",
data: { encoded_plots: ["plot1"] }
}
const updatedMessages = ChatService.updateMessagesWithEvent(messages as any, event)
expect(updatedMessages[0].plots).toEqual(["plot1"])
})
})

View File

@@ -0,0 +1,263 @@
import api from "./api"
export interface MessageResponse {
id: string
role: "user" | "assistant"
content: string
created_at: string
plots?: string[] // base64 encoded plots
steps?: string[] // reasoning steps
}
export interface ChatEvent {
type: string
name?: string
node?: string
data?: any
}
export interface StreamCallbacks {
onMessageUpdate: (messages: MessageResponse[]) => void
onDone?: () => void
onError?: (error: string) => void
}
export const ChatService = {
/**
* Parse a raw SSE chunk into one or more ChatEvent objects.
* Handles partial lines by returning the processed events and any remaining buffer.
*/
parseSSEBuffer(buffer: string): { events: ChatEvent[], remaining: string } {
const events: ChatEvent[] = []
const lines = buffer.split("\n")
// The last element might be a partial line if it doesn't end with \n
const remaining = buffer.endsWith("\n") ? "" : lines.pop() || ""
for (const line of lines) {
if (line.startsWith("data: ")) {
const dataStr = line.slice(6).trim()
if (!dataStr) continue
try {
const event = JSON.parse(dataStr)
events.push(event)
} catch (err) {
console.error("Failed to parse SSE event JSON:", err, dataStr)
}
}
}
return { events, remaining }
},
/**
* Legacy method for backward compatibility in tests
*/
parseSSEChunk(chunk: string): ChatEvent[] {
return this.parseSSEBuffer(chunk).events
},
/**
* Update a list of messages based on a new ChatEvent.
* This is a pure function designed for use with React state updates.
*/
updateMessagesWithEvent(messages: MessageResponse[], event: ChatEvent): MessageResponse[] {
const { type, name, node, data } = event
// 1. Handle incremental LLM chunks for terminal nodes
if (type === "on_chat_model_stream" && (node === "summarizer" || node === "researcher" || node === "clarification")) {
const chunk = data?.chunk?.content || ""
if (!chunk) return messages
const newMessages = [...messages]
const lastMsgIndex = newMessages.length - 1
const lastMsg = { ...newMessages[lastMsgIndex] }
if (lastMsg && lastMsg.role === "assistant") {
lastMsg.content = (lastMsg.content || "") + chunk
newMessages[lastMsgIndex] = lastMsg
}
return newMessages
}
// 2. Handle final node outputs
if (type === "on_chain_end") {
const newMessages = [...messages]
const lastMsgIndex = newMessages.length - 1
const lastMsg = { ...newMessages[lastMsgIndex] }
if (!lastMsg || lastMsg.role !== "assistant") return messages
// Terminal nodes final text
if (name === "summarizer" || name === "researcher" || name === "clarification") {
const messages_list = data?.output?.messages
const msg = messages_list ? messages_list[messages_list.length - 1]?.content : null
if (msg) {
lastMsg.content = msg
newMessages[lastMsgIndex] = lastMsg
return newMessages
}
}
// Plots from executor
if (name === "executor" && data?.encoded_plots) {
lastMsg.plots = [...(lastMsg.plots || []), ...data.encoded_plots]
// Filter out the 'active' step and replace with 'complete'
const filteredSteps = (lastMsg.steps || []).filter(s => s !== "Performing data analysis...");
lastMsg.steps = [...filteredSteps, "Data analysis and visualization complete."]
newMessages[lastMsgIndex] = lastMsg
return newMessages
}
// Status for intermediate nodes (completion)
const statusMap: Record<string, string> = {
"query_analyzer": "Query analysis complete.",
"planner": "Strategic plan generated.",
"coder": "Analysis code generated."
}
if (name && statusMap[name]) {
// Find and replace the active status if it exists
const activeStatus = name === "query_analyzer" ? "Analyzing query..." :
name === "planner" ? "Generating strategic plan..." :
name === "coder" ? "Writing analysis code..." : null;
let filteredSteps = lastMsg.steps || [];
if (activeStatus) {
filteredSteps = filteredSteps.filter(s => s !== activeStatus);
}
lastMsg.steps = [...filteredSteps, statusMap[name]]
newMessages[lastMsgIndex] = lastMsg
return newMessages
}
}
// 3. Handle node start events for progress feedback
if (type === "on_chain_start") {
const startStatusMap: Record<string, string> = {
"query_analyzer": "Analyzing query...",
"planner": "Generating strategic plan...",
"coder": "Writing analysis code...",
"executor": "Performing data analysis..."
}
if (name && startStatusMap[name]) {
const newMessages = [...messages]
const lastMsgIndex = newMessages.length - 1
const lastMsg = { ...newMessages[lastMsgIndex] }
if (lastMsg && lastMsg.role === "assistant") {
// Avoid duplicate start messages
if (!(lastMsg.steps || []).includes(startStatusMap[name])) {
lastMsg.steps = [...(lastMsg.steps || []), startStatusMap[name]]
newMessages[lastMsgIndex] = lastMsg
return newMessages
}
}
}
}
return messages
},
/**
* Stream agent execution events via SSE.
* Uses fetch + ReadableStream because backend uses POST.
*/
async streamChat(
message: string,
threadId: string,
currentMessages: MessageResponse[],
callbacks: StreamCallbacks
) {
const { onMessageUpdate, onDone, onError } = callbacks
// Add user message and a placeholder assistant message
let activeMessages: MessageResponse[] = [
...currentMessages,
{
id: `user-${Date.now()}`,
role: "user",
content: message,
created_at: new Date().toISOString()
},
{
id: `assistant-${Date.now()}`,
role: "assistant",
content: "",
created_at: new Date().toISOString(),
plots: []
}
]
onMessageUpdate(activeMessages)
let buffer = ""
try {
const API_URL = import.meta.env.VITE_API_URL || ""
const response = await fetch(`${API_URL}/api/v1/chat/stream`, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
message,
thread_id: threadId
}),
credentials: "include"
})
if (!response.ok) {
throw new Error(`Streaming failed: ${response.statusText}`)
}
const reader = response.body?.getReader()
if (!reader) throw new Error("No readable stream in response body")
const decoder = new TextDecoder()
while (true) {
const { done, value } = await reader.read()
if (done) break
buffer += decoder.decode(value, { stream: true })
const { events, remaining } = this.parseSSEBuffer(buffer)
buffer = remaining
for (const event of events) {
if (event.type === "done") {
if (onDone) onDone()
continue
}
if (event.type === "error") {
if (onError) onError(event.data?.message || "Unknown error")
continue
}
activeMessages = this.updateMessagesWithEvent(activeMessages, event)
onMessageUpdate(activeMessages)
}
}
} catch (err: any) {
console.error("Streaming error:", err)
if (onError) onError(err.message || "Connection failed")
}
},
async listConversations() {
const response = await api.get("/conversations")
return response.data
},
async createConversation(name: string = "New Conversation") {
const response = await api.post("/conversations", { name })
return response.data
},
async getMessages(conversationId: string) {
const response = await api.get(`/conversations/${conversationId}/messages`)
return response.data
}
}

View File

@@ -1,6 +1,5 @@
{
"compilerOptions": {
"baseUrl": ".",
"paths": {
"@/*": [
"./src/*"