import pytest import yaml from unittest.mock import MagicMock, patch from langchain_core.messages import AIMessage from ea_chatbot.graph.workflow import app from ea_chatbot.graph.nodes.query_analyzer import QueryAnalysis from ea_chatbot.schemas import TaskPlanResponse, TaskPlanContext, CodeGenerationResponse @pytest.fixture def mock_llms(): with patch("ea_chatbot.graph.nodes.query_analyzer.get_llm_model") as mock_qa_llm, \ patch("ea_chatbot.graph.nodes.planner.get_llm_model") as mock_planner_llm, \ patch("ea_chatbot.graph.nodes.coder.get_llm_model") as mock_coder_llm, \ patch("ea_chatbot.graph.nodes.summarizer.get_llm_model") as mock_summarizer_llm, \ patch("ea_chatbot.graph.nodes.researcher.get_llm_model") as mock_researcher_llm, \ patch("ea_chatbot.graph.nodes.summarize_conversation.get_llm_model") as mock_summary_llm, \ patch("ea_chatbot.utils.database_inspection.get_data_summary") as mock_get_summary: mock_get_summary.return_value = "Data summary" # Mock summary LLM to return a simple response mock_summary_instance = MagicMock() mock_summary_llm.return_value = mock_summary_instance mock_summary_instance.invoke.return_value = AIMessage(content="Turn summary") yield { "qa": mock_qa_llm, "planner": mock_planner_llm, "coder": mock_coder_llm, "summarizer": mock_summarizer_llm, "researcher": mock_researcher_llm, "summary": mock_summary_llm } def test_workflow_data_analysis_flow(mock_llms): """Test full flow: QueryAnalyzer -> Planner -> Coder -> Executor -> Summarizer.""" # 1. Mock Query Analyzer (routes to plan) mock_qa_instance = MagicMock() mock_llms["qa"].return_value = mock_qa_instance mock_qa_instance.with_structured_output.return_value.invoke.return_value = QueryAnalysis( data_required=["2024 results"], unknowns=[], ambiguities=[], conditions=[], next_action="plan" ) # 2. Mock Planner mock_planner_instance = MagicMock() mock_llms["planner"].return_value = mock_planner_instance mock_planner_instance.with_structured_output.return_value.invoke.return_value = TaskPlanResponse( goal="Get results", reflection="Reflect", context=TaskPlanContext(initial_context="Ctx", assumptions=[], constraints=[]), steps=["Step 1"] ) # 3. Mock Coder mock_coder_instance = MagicMock() mock_llms["coder"].return_value = mock_coder_instance mock_coder_instance.with_structured_output.return_value.invoke.return_value = CodeGenerationResponse( code="print('Execution Success')", explanation="Explain" ) # 4. Mock Summarizer mock_summarizer_instance = MagicMock() mock_llms["summarizer"].return_value = mock_summarizer_instance mock_summarizer_instance.invoke.return_value = AIMessage(content="Final Summary: Success") # Initial state initial_state = { "messages": [], "question": "Show me 2024 results", "analysis": None, "next_action": "", "plan": None, "code": None, "error": None, "plots": [], "dfs": {} } # Run the graph result = app.invoke(initial_state, config={"recursion_limit": 15}) assert result["next_action"] == "plan" assert "Execution Success" in result["code_output"] assert "Final Summary: Success" in result["messages"][-1].content def test_workflow_research_flow(mock_llms): """Test flow: QueryAnalyzer -> Researcher -> Summarizer.""" # 1. Mock Query Analyzer (routes to research) mock_qa_instance = MagicMock() mock_llms["qa"].return_value = mock_qa_instance mock_qa_instance.with_structured_output.return_value.invoke.return_value = QueryAnalysis( data_required=[], unknowns=[], ambiguities=[], conditions=[], next_action="research" ) # 2. Mock Researcher mock_researcher_instance = MagicMock() mock_llms["researcher"].return_value = mock_researcher_instance # Researcher node uses bind_tools if it's ChatOpenAI/ChatGoogleGenerativeAI # Since it's a MagicMock, it will fallback to using the base instance mock_researcher_instance.invoke.return_value = AIMessage(content="Research Results") # Also mock bind_tools just in case we ever use spec mock_llm_with_tools = MagicMock() mock_researcher_instance.bind_tools.return_value = mock_llm_with_tools mock_llm_with_tools.invoke.return_value = AIMessage(content="Research Results") # 3. Mock Summarizer (not used in this flow, but kept for completeness) mock_summarizer_instance = MagicMock() mock_llms["summarizer"].return_value = mock_summarizer_instance mock_summarizer_instance.invoke.return_value = AIMessage(content="Final Summary: Research Success") # Initial state initial_state = { "messages": [], "question": "Who is the governor of Florida?", "analysis": None, "next_action": "", "plan": None, "code": None, "error": None, "plots": [], "dfs": {} } # Run the graph result = app.invoke(initial_state, config={"recursion_limit": 10}) assert result["next_action"] == "research" assert "Research Results" in result["messages"][-1].content