35 lines
1.3 KiB
Python
35 lines
1.3 KiB
Python
import pytest
|
|
from unittest.mock import MagicMock, patch
|
|
from langchain_core.messages import AIMessage
|
|
from langchain_openai import ChatOpenAI
|
|
from ea_chatbot.graph.nodes.researcher import researcher_node
|
|
|
|
@pytest.fixture
|
|
def mock_llm():
|
|
with patch("ea_chatbot.graph.nodes.researcher.get_llm_model") as mock_get_llm:
|
|
mock_llm_instance = MagicMock(spec=ChatOpenAI)
|
|
mock_get_llm.return_value = mock_llm_instance
|
|
yield mock_llm_instance
|
|
|
|
def test_researcher_node_success(mock_llm):
|
|
"""Test that researcher_node invokes LLM with web_search tool and returns messages."""
|
|
state = {
|
|
"question": "What is the capital of France?",
|
|
"messages": []
|
|
}
|
|
|
|
mock_llm_with_tools = MagicMock()
|
|
mock_llm.bind_tools.return_value = mock_llm_with_tools
|
|
mock_llm_with_tools.invoke.return_value = AIMessage(content="The capital of France is Paris.")
|
|
|
|
result = researcher_node(state)
|
|
|
|
assert mock_llm.bind_tools.called
|
|
# Check that it was called with web_search
|
|
args, kwargs = mock_llm.bind_tools.call_args
|
|
assert {"type": "web_search"} in args[0]
|
|
|
|
assert mock_llm_with_tools.invoke.called
|
|
assert "messages" in result
|
|
assert result["messages"][0].content == "The capital of France is Paris."
|