feat: Add intelligent auto-router and enhanced integrations

- Add intelligent-router.sh hook for automatic agent routing
- Add AUTO-TRIGGER-SUMMARY.md documentation
- Add FINAL-INTEGRATION-SUMMARY.md documentation
- Complete Prometheus integration (6 commands + 4 tools)
- Complete Dexto integration (12 commands + 5 tools)
- Enhanced Ralph with access to all agents
- Fix /clawd command (removed disable-model-invocation)
- Update hooks.json to v5 with intelligent routing
- 291 total skills now available
- All 21 commands with automatic routing

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
admin
2026-01-28 00:27:56 +04:00
Unverified
parent 3b128ba3bd
commit b52318eeae
1724 changed files with 351216 additions and 0 deletions

View File

View File

@@ -0,0 +1,60 @@
from unittest.mock import Mock
import pytest
from langchain_core.language_models.chat_models import BaseChatModel
from prometheus.docker.base_container import BaseContainer
from prometheus.git.git_repository import GitRepository
from prometheus.graph.knowledge_graph import KnowledgeGraph
from prometheus.lang_graph.graphs.issue_graph import IssueGraph
@pytest.fixture
def mock_advanced_model():
return Mock(spec=BaseChatModel)
@pytest.fixture
def mock_base_model():
return Mock(spec=BaseChatModel)
@pytest.fixture
def mock_kg():
kg = Mock(spec=KnowledgeGraph)
kg.get_all_ast_node_types.return_value = ["FunctionDef", "ClassDef", "Module", "Import", "Call"]
kg.root_node_id = 0
return kg
@pytest.fixture
def mock_git_repo():
git_repo = Mock(spec=GitRepository)
git_repo.playground_path = "mock/playground/path"
return git_repo
@pytest.fixture
def mock_container():
return Mock(spec=BaseContainer)
def test_issue_graph_basic_initialization(
mock_advanced_model,
mock_base_model,
mock_kg,
mock_git_repo,
mock_container,
):
"""Test that IssueGraph initializes correctly with basic components."""
graph = IssueGraph(
advanced_model=mock_advanced_model,
base_model=mock_base_model,
kg=mock_kg,
git_repo=mock_git_repo,
container=mock_container,
repository_id=1,
)
assert graph.graph is not None
assert graph.git_repo == mock_git_repo

View File

@@ -0,0 +1,137 @@
from langchain_core.messages import HumanMessage
from prometheus.lang_graph.nodes.add_context_refined_query_message_node import (
AddContextRefinedQueryMessageNode,
)
from prometheus.models.query import Query
def test_add_context_refined_query_message_node_with_all_fields():
"""Test node with all fields populated in refined_query."""
node = AddContextRefinedQueryMessageNode()
refined_query = Query(
essential_query="Find all authentication logic",
extra_requirements="Include error handling and validation",
purpose="Security audit",
)
state = {"refined_query": refined_query}
result = node(state)
assert "context_provider_messages" in result
assert len(result["context_provider_messages"]) == 1
assert isinstance(result["context_provider_messages"][0], HumanMessage)
message_content = result["context_provider_messages"][0].content
assert "Essential query: Find all authentication logic" in message_content
assert "Extra requirements: Include error handling and validation" in message_content
assert "Purpose: Security audit" in message_content
def test_add_context_refined_query_message_node_essential_query_only():
"""Test node with only essential_query populated."""
node = AddContextRefinedQueryMessageNode()
refined_query = Query(
essential_query="Locate the main entry point",
extra_requirements="",
purpose="",
)
state = {"refined_query": refined_query}
result = node(state)
assert "context_provider_messages" in result
assert len(result["context_provider_messages"]) == 1
message_content = result["context_provider_messages"][0].content
assert "Essential query: Locate the main entry point" in message_content
assert "Extra requirements:" not in message_content
assert "Purpose:" not in message_content
def test_add_context_refined_query_message_node_with_extra_requirements_only():
"""Test node with essential_query and extra_requirements only."""
node = AddContextRefinedQueryMessageNode()
refined_query = Query(
essential_query="Find database queries",
extra_requirements="Focus on SQL injection vulnerabilities",
purpose="",
)
state = {"refined_query": refined_query}
result = node(state)
assert "context_provider_messages" in result
message_content = result["context_provider_messages"][0].content
assert "Essential query: Find database queries" in message_content
assert "Extra requirements: Focus on SQL injection vulnerabilities" in message_content
assert "Purpose:" not in message_content
def test_add_context_refined_query_message_node_with_purpose_only():
"""Test node with essential_query and purpose only."""
node = AddContextRefinedQueryMessageNode()
refined_query = Query(
essential_query="Identify all API endpoints",
extra_requirements="",
purpose="Documentation generation",
)
state = {"refined_query": refined_query}
result = node(state)
assert "context_provider_messages" in result
message_content = result["context_provider_messages"][0].content
assert "Essential query: Identify all API endpoints" in message_content
assert "Extra requirements:" not in message_content
assert "Purpose: Documentation generation" in message_content
def test_add_context_refined_query_message_node_returns_list():
"""Test that node returns a list with exactly one HumanMessage."""
node = AddContextRefinedQueryMessageNode()
refined_query = Query(
essential_query="Test query",
extra_requirements="Test requirements",
purpose="Test purpose",
)
state = {"refined_query": refined_query}
result = node(state)
assert isinstance(result["context_provider_messages"], list)
assert len(result["context_provider_messages"]) == 1
assert isinstance(result["context_provider_messages"][0], HumanMessage)
def test_add_context_refined_query_message_node_message_format():
"""Test the exact format of the constructed message."""
node = AddContextRefinedQueryMessageNode()
refined_query = Query(
essential_query="Query text",
extra_requirements="Requirements text",
purpose="Purpose text",
)
state = {"refined_query": refined_query}
result = node(state)
expected_content = (
"Essential query: Query text\nExtra requirements: Requirements text\nPurpose: Purpose text"
)
assert result["context_provider_messages"][0].content == expected_content

View File

@@ -0,0 +1,52 @@
from unittest.mock import Mock
import pytest
from langchain_core.messages import AIMessage, HumanMessage
from prometheus.docker.base_container import BaseContainer
from prometheus.lang_graph.nodes.bug_fix_verify_node import BugFixVerifyNode
from prometheus.lang_graph.subgraphs.bug_fix_verification_state import BugFixVerificationState
from tests.test_utils.util import FakeListChatWithToolsModel
@pytest.fixture
def mock_container():
return Mock(spec=BaseContainer)
@pytest.fixture
def test_state():
return BugFixVerificationState(
{
"reproduced_bug_file": "test_bug.py",
"reproduced_bug_commands": ["python test_bug.py", "./run_test.sh"],
"bug_fix_verify_messages": [AIMessage(content="Previous verification result")],
}
)
@pytest.fixture
def fake_llm():
return FakeListChatWithToolsModel(responses=["Test execution completed"])
def test_format_human_message(mock_container, fake_llm, test_state):
"""Test human message formatting."""
node = BugFixVerifyNode(fake_llm, mock_container)
message = node.format_human_message(test_state)
assert isinstance(message, HumanMessage)
assert "test_bug.py" in message.content
assert "python test_bug.py" in message.content
assert "./run_test.sh" in message.content
def test_call_method(mock_container, fake_llm, test_state):
"""Test the __call__ method execution."""
node = BugFixVerifyNode(fake_llm, mock_container)
result = node(test_state)
assert "bug_fix_verify_messages" in result
assert len(result["bug_fix_verify_messages"]) == 1
assert result["bug_fix_verify_messages"][0].content == "Test execution completed"

View File

@@ -0,0 +1,74 @@
from unittest.mock import Mock
import pytest
from langchain_core.messages import AIMessage, HumanMessage
from prometheus.docker.base_container import BaseContainer
from prometheus.lang_graph.nodes.bug_reproducing_execute_node import BugReproducingExecuteNode
from prometheus.lang_graph.subgraphs.bug_reproduction_state import BugReproductionState
from tests.test_utils.util import FakeListChatWithToolsModel
@pytest.fixture
def mock_container():
return Mock(spec=BaseContainer)
@pytest.fixture
def test_state():
return BugReproductionState(
{
"issue_title": "Test Bug",
"issue_body": "Bug description",
"issue_comments": ["Comment 1", "Comment 2"],
"bug_context": "Context of the bug",
"bug_reproducing_write_messages": [AIMessage(content="patch")],
"bug_reproducing_file_messages": [AIMessage(content="path")],
"bug_reproducing_execute_messages": [],
"bug_reproducing_patch": "--- /dev/null\n+++ b/newfile\n@@ -0,0 +1 @@\n+content",
}
)
def test_format_human_message_with_test_commands(mock_container, test_state):
"""Test message formatting with provided test commands."""
fake_llm = FakeListChatWithToolsModel(responses=["test"])
test_commands = ["pytest", "python -m unittest"]
node = BugReproducingExecuteNode(fake_llm, mock_container, test_commands)
message = node.format_human_message(test_state, "/foo/bar/test.py")
assert isinstance(message, HumanMessage)
assert "Test Bug" in message.content
assert "Bug description" in message.content
assert "Comment 1" in message.content
assert "Comment 2" in message.content
assert "pytest" in message.content
assert "/foo/bar/test.py" in message.content
def test_format_human_message_without_test_commands(mock_container, test_state):
"""Test message formatting without test commands."""
fake_llm = FakeListChatWithToolsModel(responses=["test"])
node = BugReproducingExecuteNode(fake_llm, mock_container)
message = node.format_human_message(test_state, "/foo/bar/test.py")
assert isinstance(message, HumanMessage)
assert "Test Bug" in message.content
assert "Bug description" in message.content
assert "User provided test commands:\n" in message.content
assert "/foo/bar/test.py" in message.content
def test_call_method(mock_container, test_state):
"""Test the __call__ method execution."""
fake_response = "Test execution completed"
fake_llm = FakeListChatWithToolsModel(responses=[fake_response])
node = BugReproducingExecuteNode(fake_llm, mock_container)
result = node(test_state)
assert "bug_reproducing_execute_messages" in result
assert len(result["bug_reproducing_execute_messages"]) == 1
assert result["bug_reproducing_execute_messages"][0].content == fake_response

View File

@@ -0,0 +1,68 @@
from unittest.mock import Mock
import pytest
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
from prometheus.graph.knowledge_graph import KnowledgeGraph
from prometheus.lang_graph.nodes.bug_reproducing_file_node import BugReproducingFileNode
from prometheus.lang_graph.subgraphs.bug_reproduction_state import BugReproductionState
from tests.test_utils.util import FakeListChatWithToolsModel
@pytest.fixture
def mock_kg():
kg = Mock(spec=KnowledgeGraph)
kg.get_file_tree.return_value = "test_dir/\n test_file.py"
return kg
@pytest.fixture
def fake_llm():
return FakeListChatWithToolsModel(responses=["test_output.py"])
@pytest.fixture
def basic_state():
return BugReproductionState(
issue_title="mock issue title",
issue_body="mock issue body",
issue_comments=[],
max_refined_query_loop=3,
bug_reproducing_query="mock query",
bug_reproducing_context=[],
bug_reproducing_patch="",
bug_reproducing_write_messages=[AIMessage(content="def test_bug():\n assert 1 == 2")],
bug_reproducing_file_messages=[],
bug_reproducing_execute_messages=[],
reproduced_bug=False,
reproduced_bug_failure_log="",
reproduced_bug_file="",
reproduced_bug_commands=[],
)
def test_initialization(mock_kg, fake_llm):
"""Test basic initialization of BugReproducingFileNode."""
node = BugReproducingFileNode(fake_llm, mock_kg, "test/path")
assert isinstance(node.system_prompt, SystemMessage)
assert len(node.tools) == 2 # read_file, create_file
def test_format_human_message(mock_kg, fake_llm, basic_state):
"""Test human message formatting with bug file."""
node = BugReproducingFileNode(fake_llm, mock_kg, "test/path")
message = node.format_human_message(basic_state)
assert isinstance(message, HumanMessage)
assert "def test_bug():" in message.content
def test_call_method(mock_kg, fake_llm, basic_state):
"""Test the __call__ method execution."""
node = BugReproducingFileNode(fake_llm, mock_kg, "test/path")
result = node(basic_state)
assert "bug_reproducing_file_messages" in result
assert len(result["bug_reproducing_file_messages"]) == 1
assert result["bug_reproducing_file_messages"][0].content == "test_output.py"

View File

@@ -0,0 +1,49 @@
from unittest.mock import Mock
import pytest
from langchain_core.messages import HumanMessage
from prometheus.graph.knowledge_graph import KnowledgeGraph
from prometheus.lang_graph.nodes.bug_reproducing_write_node import BugReproducingWriteNode
from prometheus.lang_graph.subgraphs.bug_reproduction_state import BugReproductionState
from tests.test_utils.fixtures import temp_test_dir # noqa: F401
from tests.test_utils.util import FakeListChatWithToolsModel
@pytest.fixture
def mock_kg():
kg = Mock(spec=KnowledgeGraph)
return kg
@pytest.fixture
def test_state():
return BugReproductionState(
issue_title="Test Bug",
issue_body="Bug description",
issue_comments=[{"user1": "Comment 1"}, {"user2": "Comment 2"}],
max_refined_query_loop=3,
bug_reproducing_query="mock query",
bug_reproducing_context=[],
bug_reproducing_write_messages=[HumanMessage("assert x == 10")],
bug_reproducing_file_messages=[],
bug_reproducing_execute_messages=[],
bug_reproducing_patch="",
reproduced_bug=False,
reproduced_bug_failure_log="Test failure log",
reproduced_bug_file="test/file.py",
reproduced_bug_commands=[],
)
def test_call_method(mock_kg, test_state, temp_test_dir): # noqa: F811
"""Test the __call__ method execution."""
fake_response = "Created test file"
fake_llm = FakeListChatWithToolsModel(responses=[fake_response])
node = BugReproducingWriteNode(fake_llm, temp_test_dir, mock_kg)
result = node(test_state)
assert "bug_reproducing_write_messages" in result
assert len(result["bug_reproducing_write_messages"]) == 1
assert result["bug_reproducing_write_messages"][0].content == fake_response

View File

@@ -0,0 +1,41 @@
import pytest
from langchain_core.messages import AIMessage, ToolMessage
from prometheus.graph.knowledge_graph import KnowledgeGraph
from prometheus.lang_graph.nodes.context_provider_node import ContextProviderNode
from tests.test_utils import test_project_paths
from tests.test_utils.util import FakeListChatWithToolsModel
@pytest.fixture(scope="function")
async def knowledge_graph_fixture():
kg = KnowledgeGraph(1000, 100, 10, 0)
await kg.build_graph(test_project_paths.TEST_PROJECT_PATH)
return kg
@pytest.mark.slow
async def test_context_provider_node_basic_query(knowledge_graph_fixture):
"""Test basic query handling with the ContextProviderNode."""
fake_response = "Fake response"
fake_llm = FakeListChatWithToolsModel(responses=[fake_response])
node = ContextProviderNode(
model=fake_llm,
kg=knowledge_graph_fixture,
local_path=test_project_paths.TEST_PROJECT_PATH,
)
test_messages = [
AIMessage(content="This code handles file processing"),
ToolMessage(content="Found implementation in utils.py", tool_call_id="test_tool_call_1"),
]
test_state = {
"original_query": "How does the error handling work?",
"context_provider_messages": test_messages,
}
result = node(test_state)
assert "context_provider_messages" in result
assert len(result["context_provider_messages"]) == 1
assert result["context_provider_messages"][0].content == fake_response

View File

@@ -0,0 +1,127 @@
from unittest.mock import patch
import pytest
from langchain_core.messages import HumanMessage
from prometheus.lang_graph.nodes.edit_message_node import EditMessageNode
from prometheus.models.context import Context
@pytest.fixture
def edit_node():
return EditMessageNode(
context_key="bug_fix_context", analyzer_message_key="issue_bug_analyzer_messages"
)
@pytest.fixture
def base_state():
return {
"issue_title": "Test Bug",
"issue_body": "This is a test bug description",
"issue_comments": [
{"username": "user1", "comment": "Comment 1"},
{"username": "user2", "comment": "Comment 2"},
],
"bug_fix_context": [
Context(
relative_path="foobar.py",
content="# Context 1",
start_line_number=1,
end_line_number=1,
)
],
"issue_bug_analyzer_messages": ["Analysis message"],
}
def test_first_message_formatting(edit_node, base_state):
# Using context managers for patching
with patch(
"prometheus.lang_graph.nodes.edit_message_node.format_issue_info"
) as mock_format_issue:
with patch(
"prometheus.lang_graph.nodes.edit_message_node.get_last_message_content"
) as mock_last_message:
mock_format_issue.return_value = "Formatted Issue Info"
mock_last_message.return_value = "Last Analysis Message"
result = edit_node(base_state)
assert isinstance(result, dict)
assert "edit_messages" in result
assert len(result["edit_messages"]) == 1
assert isinstance(result["edit_messages"][0], HumanMessage)
message_content = result["edit_messages"][0].content
assert "Formatted Issue Info" in message_content
assert "# Context 1" in message_content
assert "Last Analysis Message" in message_content
def test_followup_message_with_build_fail(edit_node, base_state):
# Add build failure to state
base_state["build_fail_log"] = "Build failed: error in compilation"
with patch(
"prometheus.lang_graph.nodes.edit_message_node.get_last_message_content"
) as mock_last_message:
mock_last_message.return_value = "Last Analysis Message"
result = edit_node(base_state)
message_content = result["edit_messages"][0].content
assert "Build failed: error in compilation" in message_content
assert "Please implement these revised changes carefully" in message_content
def test_followup_message_with_test_fail(edit_node, base_state):
# Add test failure to state
base_state["reproducing_test_fail_log"] = "Test failed: assertion error"
with patch(
"prometheus.lang_graph.nodes.edit_message_node.get_last_message_content"
) as mock_last_message:
mock_last_message.return_value = "Last Analysis Message"
result = edit_node(base_state)
message_content = result["edit_messages"][0].content
assert "Test failed: assertion error" in message_content
assert "Please implement these revised changes carefully" in message_content
def test_followup_message_with_existing_test_fail(edit_node, base_state):
# Add existing test failure to state
base_state["existing_test_fail_log"] = "Existing test failed"
with patch(
"prometheus.lang_graph.nodes.edit_message_node.get_last_message_content"
) as mock_last_message:
mock_last_message.return_value = "Last Analysis Message"
result = edit_node(base_state)
message_content = result["edit_messages"][0].content
assert "Existing test failed" in message_content
assert "Please implement these revised changes carefully" in message_content
def test_error_priority(edit_node, base_state):
# Add multiple error types to test priority handling
base_state["reproducing_test_fail_log"] = "Test failed"
base_state["build_fail_log"] = "Build failed"
base_state["existing_test_fail_log"] = "Existing test failed"
with patch(
"prometheus.lang_graph.nodes.edit_message_node.get_last_message_content"
) as mock_last_message:
mock_last_message.return_value = "Last Analysis Message"
result = edit_node(base_state)
message_content = result["edit_messages"][0].content
# Should prioritize reproducing test failure
assert "Test failed" in message_content
assert "Build failed" not in message_content
assert "Existing test failed" not in message_content

View File

@@ -0,0 +1,41 @@
from unittest.mock import Mock
import pytest
from langchain_core.messages import HumanMessage, SystemMessage
from prometheus.graph.knowledge_graph import KnowledgeGraph
from prometheus.lang_graph.nodes.edit_node import EditNode
from tests.test_utils.fixtures import temp_test_dir # noqa: F401
from tests.test_utils.util import FakeListChatWithToolsModel
@pytest.fixture
def mock_kg():
kg = Mock(spec=KnowledgeGraph)
return kg
@pytest.fixture
def fake_llm():
return FakeListChatWithToolsModel(responses=["File edit completed successfully"])
def test_init_edit_node(mock_kg, fake_llm, temp_test_dir): # noqa: F811
"""Test EditNode initialization."""
node = EditNode(fake_llm, temp_test_dir, mock_kg)
assert isinstance(node.system_prompt, SystemMessage)
assert len(node.tools) == 5 # Should have 5 file operation tools
assert node.model_with_tools is not None
def test_call_method_basic(mock_kg, fake_llm, temp_test_dir): # noqa: F811
"""Test basic call functionality without tool execution."""
node = EditNode(fake_llm, temp_test_dir, mock_kg)
state = {"edit_messages": [HumanMessage(content="Make the following changes: ...")]}
result = node(state)
assert "edit_messages" in result
assert len(result["edit_messages"]) == 1
assert result["edit_messages"][0].content == "File edit completed successfully"

View File

@@ -0,0 +1,64 @@
from unittest.mock import Mock
import pytest
from langchain_core.messages import HumanMessage
from prometheus.docker.base_container import BaseContainer
from prometheus.graph.knowledge_graph import KnowledgeGraph
from prometheus.lang_graph.nodes.general_build_node import GeneralBuildNode
from prometheus.lang_graph.subgraphs.build_and_test_state import BuildAndTestState
from tests.test_utils.util import FakeListChatWithToolsModel
@pytest.fixture
def mock_container():
return Mock(spec=BaseContainer)
@pytest.fixture
def mock_kg():
kg = Mock(spec=KnowledgeGraph)
kg.get_file_tree.return_value = ".\n├── src\n│ └── main.py\n└── build.gradle"
return kg
@pytest.fixture
def fake_llm():
return FakeListChatWithToolsModel(responses=["Build command executed successfully"])
def test_format_human_message_basic(mock_container, mock_kg, fake_llm):
"""Test basic human message formatting."""
node = GeneralBuildNode(fake_llm, mock_container, mock_kg)
state = BuildAndTestState({})
message = node.format_human_message(state)
assert isinstance(message, HumanMessage)
assert "project structure is:" in message.content
assert mock_kg.get_file_tree() in message.content
def test_format_human_message_with_build_summary(mock_container, mock_kg, fake_llm):
"""Test message formatting with build command summary."""
node = GeneralBuildNode(fake_llm, mock_container, mock_kg)
state = BuildAndTestState({"build_command_summary": "Previous build used gradle"})
message = node.format_human_message(state)
assert "Previous build used gradle" in message.content
assert "The previous build summary is:" in message.content
def test_call_method_with_no_build(mock_container, mock_kg, fake_llm):
"""Test __call__ method when exist_build is False."""
node = GeneralBuildNode(fake_llm, mock_container, mock_kg)
state = BuildAndTestState({"exist_build": False})
result = node(state)
assert "build_messages" in result
assert len(result["build_messages"]) == 1
assert (
"Previous agent determined there is no build system" in result["build_messages"][0].content
)

View File

@@ -0,0 +1,75 @@
from unittest.mock import Mock
import pytest
from langchain_core.messages import HumanMessage
from prometheus.docker.base_container import BaseContainer
from prometheus.graph.knowledge_graph import KnowledgeGraph
from prometheus.lang_graph.nodes.general_test_node import GeneralTestNode
from prometheus.lang_graph.subgraphs.build_and_test_state import BuildAndTestState
from tests.test_utils.util import FakeListChatWithToolsModel
@pytest.fixture
def mock_container():
return Mock(spec=BaseContainer)
@pytest.fixture
def mock_kg():
kg = Mock(spec=KnowledgeGraph)
kg.get_file_tree.return_value = "./\n├── tests/\n│ └── test_main.py"
return kg
@pytest.fixture
def fake_llm():
return FakeListChatWithToolsModel(responses=["Tests executed successfully"])
@pytest.fixture
def basic_state():
return BuildAndTestState(
{"exist_test": True, "test_messages": [], "test_command_summary": None}
)
def test_format_human_message(mock_container, mock_kg, fake_llm, basic_state):
"""Test basic message formatting."""
node = GeneralTestNode(fake_llm, mock_container, mock_kg)
message = node.format_human_message(basic_state)
assert isinstance(message, HumanMessage)
assert mock_kg.get_file_tree() in message.content
def test_call_with_no_tests(mock_container, mock_kg, fake_llm):
"""Test behavior when no tests exist."""
node = GeneralTestNode(fake_llm, mock_container, mock_kg)
state = BuildAndTestState({"exist_test": False})
result = node(state)
assert "build_messages" in result
assert "no test framework" in result["build_messages"][0].content
def test_call_normal_execution(mock_container, mock_kg, fake_llm, basic_state):
"""Test normal execution flow."""
node = GeneralTestNode(fake_llm, mock_container, mock_kg)
result = node(basic_state)
assert "test_messages" in result
assert len(result["test_messages"]) == 1
assert result["test_messages"][0].content == "Tests executed successfully"
def test_format_human_message_with_summary(mock_container, mock_kg, fake_llm):
"""Test message formatting with test summary."""
node = GeneralTestNode(fake_llm, mock_container, mock_kg)
state = BuildAndTestState({"test_command_summary": "Previous test used pytest"})
message = node.format_human_message(state)
assert "Previous test used pytest" in message.content

View File

@@ -0,0 +1,35 @@
from unittest.mock import Mock
import pytest
from prometheus.git.git_repository import GitRepository
from prometheus.lang_graph.nodes.git_diff_node import GitDiffNode
@pytest.fixture
def mock_git_repo():
git_repo = Mock(spec=GitRepository)
git_repo.get_diff.return_value = "sample diff content"
return git_repo
def test_git_diff_node(mock_git_repo):
node = GitDiffNode(mock_git_repo, "patch")
# Execute
result = node({})
# Assert
assert result == {"patch": "sample diff content"}
mock_git_repo.get_diff.assert_called_with(None)
def test_git_diff_node_with_excluded_files(mock_git_repo):
node = GitDiffNode(mock_git_repo, "patch", "excluded_file")
# Execute
result = node({"excluded_file": "/foo/bar.py"})
# Assert
assert result == {"patch": "sample diff content"}
mock_git_repo.get_diff.assert_called_with(["/foo/bar.py"])

View File

@@ -0,0 +1,135 @@
import pytest
from langchain_core.messages import AIMessage, HumanMessage, ToolMessage
from langchain_core.messages.tool import ToolCall
from prometheus.lang_graph.nodes.issue_bug_analyzer_node import IssueBugAnalyzerNode
from tests.test_utils.util import FakeListChatWithToolsModel
@pytest.fixture
def fake_llm():
return FakeListChatWithToolsModel(responses=["Bug analysis completed successfully"])
@pytest.fixture
def fake_llm_with_tool_call():
"""LLM that simulates making a web_search tool call."""
return FakeListChatWithToolsModel(
responses=["I need to search for information about this error."]
)
def test_init_issue_bug_analyzer_node(fake_llm):
"""Test IssueBugAnalyzerNode initialization."""
node = IssueBugAnalyzerNode(fake_llm)
assert node.system_prompt is not None
assert len(node.tools) == 1 # Should have web_search tool
assert node.tools[0].name == "web_search"
assert node.model_with_tools is not None
def test_call_method_basic(fake_llm):
"""Test basic call functionality."""
node = IssueBugAnalyzerNode(fake_llm)
state = {"issue_bug_analyzer_messages": [HumanMessage(content="Please analyze this bug: ...")]}
result = node(state)
assert "issue_bug_analyzer_messages" in result
assert len(result["issue_bug_analyzer_messages"]) == 1
assert result["issue_bug_analyzer_messages"][0].content == "Bug analysis completed successfully"
def test_web_search_tool_integration(fake_llm_with_tool_call):
"""Test that the web_search tool is properly integrated and can be called."""
node = IssueBugAnalyzerNode(fake_llm_with_tool_call)
state = {
"issue_bug_analyzer_messages": [
HumanMessage(
content="I'm getting a ValueError in my Python code. Can you help analyze it?"
)
]
}
result = node(state)
# Verify the result contains the response message
assert "issue_bug_analyzer_messages" in result
assert len(result["issue_bug_analyzer_messages"]) == 1
assert (
result["issue_bug_analyzer_messages"][0].content
== "I need to search for information about this error."
)
def test_web_search_tool_call_with_correct_parameters(fake_llm):
"""Test that web_search tool has correct configuration and can be called."""
node = IssueBugAnalyzerNode(fake_llm)
# Test that the tool exists and has correct configuration
web_search_tool = node.tools[0]
assert web_search_tool.name == "web_search"
assert "technical information" in web_search_tool.description.lower()
# Test that the tool has the correct args schema
assert hasattr(web_search_tool, "args_schema")
assert web_search_tool.args_schema is not None
def test_system_prompt_contains_web_search_info(fake_llm):
"""Test that the system prompt mentions web_search tool."""
node = IssueBugAnalyzerNode(fake_llm)
system_prompt_content = node.system_prompt.content.lower()
assert "web_search" in system_prompt_content
assert "technical information" in system_prompt_content
def test_web_search_tool_schema_validation(fake_llm):
"""Test that the web_search tool has proper input validation."""
node = IssueBugAnalyzerNode(fake_llm)
web_search_tool = node.tools[0]
# Check that the tool has an args_schema
assert hasattr(web_search_tool, "args_schema")
assert web_search_tool.args_schema is not None
# Test with valid input
valid_input = {"query": "Python debugging techniques"}
# This should not raise an exception
validated_input = web_search_tool.args_schema(**valid_input)
assert validated_input.query == "Python debugging techniques"
def test_multiple_tool_calls_in_conversation(fake_llm):
"""Test handling multiple web_search calls in a conversation."""
node = IssueBugAnalyzerNode(fake_llm)
# Simulate a conversation with tool calls
state = {
"issue_bug_analyzer_messages": [
HumanMessage(content="Analyze this bug: ImportError in my application"),
AIMessage(
content="Let me search for information about this error.",
tool_calls=[
ToolCall(
name="web_search",
args={"query": "Python ImportError debugging"},
id="call_1",
)
],
),
ToolMessage(
content="Search results: ImportError occurs when...", tool_call_id="call_1"
),
HumanMessage(content="The error still persists after trying the suggested fixes"),
]
}
result = node(state)
assert "issue_bug_analyzer_messages" in result
assert len(result["issue_bug_analyzer_messages"]) == 1
# The new response should be added to the conversation
assert result["issue_bug_analyzer_messages"][0].content == "Bug analysis completed successfully"

View File

@@ -0,0 +1,113 @@
import pytest
from prometheus.lang_graph.nodes.issue_bug_responder_node import IssueBugResponderNode
from prometheus.lang_graph.subgraphs.issue_bug_state import IssueBugState
from tests.test_utils.util import FakeListChatWithToolsModel
@pytest.fixture
def fake_llm():
return FakeListChatWithToolsModel(
responses=["Thank you for reporting this issue. The fix has been implemented and verified."]
)
@pytest.fixture
def basic_state():
return IssueBugState(
issue_title="Test Bug",
issue_body="Found a bug in the code",
issue_comments=[
{"username": "user1", "comment": "This affects my workflow"},
{"username": "user2", "comment": "Same issue here"},
],
edit_patch="Fixed array index calculation",
passed_reproducing_test=True,
passed_regression_test=True,
passed_existing_test=True,
run_build=True,
run_existing_test=True,
run_regression_test=True,
run_reproduce_test=True,
number_of_candidate_patch=6,
reproduced_bug=True,
reproduced_bug_file="mock.py",
reproduced_bug_patch="mock patch to reproduce the bug",
reproduced_bug_commands="pytest test_bug.py",
selected_regression_tests=["tests:tests"],
issue_response="Mock Response",
)
def test_format_human_message_basic(fake_llm, basic_state):
"""Test basic human message formatting."""
node = IssueBugResponderNode(fake_llm)
message = node.format_human_message(basic_state)
assert "Test Bug" in message.content
assert "Found a bug in the code" in message.content
assert "user1" in message.content
assert "user2" in message.content
assert "Fixed array index" in message.content
def test_format_human_message_verification(fake_llm, basic_state):
"""Test verification message formatting."""
node = IssueBugResponderNode(fake_llm)
message = node.format_human_message(basic_state)
assert "✓ The bug reproducing test passed" in message.content
assert "✓ All selected regression tests passes successfully" in message.content
assert "✓ All existing tests pass successfully" in message.content
def test_format_human_message_no_verification(fake_llm):
"""Test message formatting without verifications."""
state = IssueBugState(
issue_title="Test Bug",
issue_body="Bug description",
issue_comments=[],
edit_patch="Fixed array index calculation",
passed_reproducing_test=False,
passed_existing_test=False,
passed_regression_test=False,
)
node = IssueBugResponderNode(fake_llm)
message = node.format_human_message(state)
assert "✓ The bug reproducing test passed" not in message.content
assert "✓ All selected regression tests passes successfully" not in message.content
assert "✓ All existing tests pass successfully" not in message.content
def test_format_human_message_partial_verification(fake_llm):
"""Test message formatting with partial verifications."""
state = IssueBugState(
issue_title="Test Bug",
issue_body="Bug description",
issue_comments=[],
edit_patch="Fixed array index calculation",
passed_reproducing_test=True,
passed_existing_test=True,
passed_regression_test=True,
)
node = IssueBugResponderNode(fake_llm)
message = node.format_human_message(state)
assert "✓ The bug reproducing test passed" in message.content
assert "✓ Build passes successfully" not in message.content
assert "✓ All existing tests pass successfully" in message.content
def test_call_method(fake_llm, basic_state):
"""Test the call method execution."""
node = IssueBugResponderNode(fake_llm)
result = node(basic_state)
assert "issue_response" in result
assert (
result["issue_response"]
== "Thank you for reporting this issue. The fix has been implemented and verified."
)

View File

@@ -0,0 +1,98 @@
import pytest
from langchain_core.messages import HumanMessage
from prometheus.lang_graph.nodes.issue_documentation_analyzer_message_node import (
IssueDocumentationAnalyzerMessageNode,
)
from prometheus.lang_graph.subgraphs.issue_documentation_state import IssueDocumentationState
from prometheus.models.context import Context
@pytest.fixture
def basic_state():
return IssueDocumentationState(
issue_title="Update API documentation",
issue_body="The API documentation needs to be updated",
issue_comments=[
{"username": "user1", "comment": "Please add examples"},
],
max_refined_query_loop=3,
documentation_query="Find API docs",
documentation_context=[
Context(
relative_path="/docs/api.md",
content="# API Documentation\n\nAPI Documentation content",
)
],
issue_documentation_analyzer_messages=[],
edit_messages=[],
edit_patch="",
issue_response="",
)
def test_init_issue_documentation_analyzer_message_node():
"""Test IssueDocumentationAnalyzerMessageNode initialization."""
node = IssueDocumentationAnalyzerMessageNode()
assert node is not None
def test_call_method_creates_message(basic_state):
"""Test that the node creates a human message."""
node = IssueDocumentationAnalyzerMessageNode()
result = node(basic_state)
assert "issue_documentation_analyzer_messages" in result
assert len(result["issue_documentation_analyzer_messages"]) == 1
assert isinstance(result["issue_documentation_analyzer_messages"][0], HumanMessage)
def test_message_contains_issue_info(basic_state):
"""Test that the message contains issue information."""
node = IssueDocumentationAnalyzerMessageNode()
result = node(basic_state)
message_content = result["issue_documentation_analyzer_messages"][0].content
assert "Update API documentation" in message_content
def test_message_contains_context(basic_state):
"""Test that the message includes documentation context."""
node = IssueDocumentationAnalyzerMessageNode()
result = node(basic_state)
message_content = result["issue_documentation_analyzer_messages"][0].content
# Should include context or reference to it
assert "context" in message_content.lower() or "API Documentation" in message_content
def test_message_includes_analysis_instructions(basic_state):
"""Test that the message includes analysis instructions."""
node = IssueDocumentationAnalyzerMessageNode()
result = node(basic_state)
message_content = result["issue_documentation_analyzer_messages"][0].content
# Should include instructions for analysis
assert "plan" in message_content.lower() or "analyze" in message_content.lower()
def test_call_with_empty_context():
"""Test the node with empty documentation context."""
state = IssueDocumentationState(
issue_title="Create new docs",
issue_body="Create documentation for new feature",
issue_comments=[],
max_refined_query_loop=3,
documentation_query="",
documentation_context=[],
issue_documentation_analyzer_messages=[],
edit_messages=[],
edit_patch="",
issue_response="",
)
node = IssueDocumentationAnalyzerMessageNode()
result = node(state)
assert "issue_documentation_analyzer_messages" in result
assert len(result["issue_documentation_analyzer_messages"]) == 1

View File

@@ -0,0 +1,81 @@
import pytest
from langchain_core.messages import HumanMessage
from prometheus.lang_graph.nodes.issue_documentation_analyzer_node import (
IssueDocumentationAnalyzerNode,
)
from prometheus.lang_graph.subgraphs.issue_documentation_state import IssueDocumentationState
from tests.test_utils.util import FakeListChatWithToolsModel
@pytest.fixture
def fake_llm():
return FakeListChatWithToolsModel(
responses=[
"Documentation Plan:\n1. Update README.md with new API documentation\n"
"2. Add code examples\n3. Update table of contents"
]
)
@pytest.fixture
def basic_state():
return IssueDocumentationState(
issue_title="Update API documentation",
issue_body="The API documentation needs to be updated with the new endpoints",
issue_comments=[
{"username": "user1", "comment": "Please include examples"},
{"username": "user2", "comment": "Add authentication details"},
],
max_refined_query_loop=3,
documentation_query="Find API documentation files",
documentation_context=[],
issue_documentation_analyzer_messages=[
HumanMessage(content="Please analyze the documentation request and provide a plan")
],
edit_messages=[],
edit_patch="",
issue_response="",
)
def test_init_issue_documentation_analyzer_node(fake_llm):
"""Test IssueDocumentationAnalyzerNode initialization."""
node = IssueDocumentationAnalyzerNode(fake_llm)
assert node.system_prompt is not None
assert node.web_search_tool is not None
assert len(node.tools) == 1 # Should have web search tool
assert node.model_with_tools is not None
def test_call_method_basic(fake_llm, basic_state):
"""Test basic call functionality."""
node = IssueDocumentationAnalyzerNode(fake_llm)
result = node(basic_state)
assert "issue_documentation_analyzer_messages" in result
assert len(result["issue_documentation_analyzer_messages"]) == 1
assert "Documentation Plan" in result["issue_documentation_analyzer_messages"][0].content
def test_call_method_with_empty_messages(fake_llm):
"""Test call method with empty message history."""
state = IssueDocumentationState(
issue_title="Test",
issue_body="Test body",
issue_comments=[],
max_refined_query_loop=3,
documentation_query="",
documentation_context=[],
issue_documentation_analyzer_messages=[],
edit_messages=[],
edit_patch="",
issue_response="",
)
node = IssueDocumentationAnalyzerNode(fake_llm)
result = node(state)
assert "issue_documentation_analyzer_messages" in result
assert len(result["issue_documentation_analyzer_messages"]) == 1

View File

@@ -0,0 +1,80 @@
import pytest
from prometheus.lang_graph.nodes.issue_documentation_context_message_node import (
IssueDocumentationContextMessageNode,
)
from prometheus.lang_graph.subgraphs.issue_documentation_state import IssueDocumentationState
@pytest.fixture
def basic_state():
return IssueDocumentationState(
issue_title="Update API documentation",
issue_body="The API documentation needs to be updated with new endpoints",
issue_comments=[
{"username": "user1", "comment": "Please include examples"},
],
max_refined_query_loop=3,
documentation_query="",
documentation_context=[],
issue_documentation_analyzer_messages=[],
edit_messages=[],
edit_patch="",
issue_response="",
)
def test_init_issue_documentation_context_message_node():
"""Test IssueDocumentationContextMessageNode initialization."""
node = IssueDocumentationContextMessageNode()
assert node is not None
def test_call_method_generates_query(basic_state):
"""Test that the node generates a documentation query."""
node = IssueDocumentationContextMessageNode()
result = node(basic_state)
assert "documentation_query" in result
assert len(result["documentation_query"]) > 0
def test_query_contains_issue_info(basic_state):
"""Test that the query contains issue information."""
node = IssueDocumentationContextMessageNode()
result = node(basic_state)
query = result["documentation_query"]
assert "Update API documentation" in query or "API documentation" in query
def test_query_includes_instructions(basic_state):
"""Test that the query includes documentation finding instructions."""
node = IssueDocumentationContextMessageNode()
result = node(basic_state)
query = result["documentation_query"]
# Should include instructions about finding documentation
assert "documentation" in query.lower() or "find" in query.lower()
def test_call_with_empty_comments():
"""Test the node with empty comments."""
state = IssueDocumentationState(
issue_title="Test title",
issue_body="Test body",
issue_comments=[],
max_refined_query_loop=3,
documentation_query="",
documentation_context=[],
issue_documentation_analyzer_messages=[],
edit_messages=[],
edit_patch="",
issue_response="",
)
node = IssueDocumentationContextMessageNode()
result = node(state)
assert "documentation_query" in result
assert len(result["documentation_query"]) > 0

View File

@@ -0,0 +1,117 @@
import pytest
from langchain_core.messages import AIMessage, HumanMessage
from prometheus.lang_graph.nodes.issue_documentation_edit_message_node import (
IssueDocumentationEditMessageNode,
)
from prometheus.lang_graph.subgraphs.issue_documentation_state import IssueDocumentationState
from prometheus.models.context import Context
@pytest.fixture
def basic_state():
return IssueDocumentationState(
issue_title="Update API documentation",
issue_body="The API documentation needs to be updated",
issue_comments=[],
max_refined_query_loop=3,
documentation_query="Find API docs",
documentation_context=[
Context(
relative_path="/docs/api.md",
content="# API Documentation\n\nAPI Documentation content",
)
],
issue_documentation_analyzer_messages=[
AIMessage(content="Plan:\n1. Update README.md\n2. Add new examples\n3. Fix typos")
],
edit_messages=[],
edit_patch="",
issue_response="",
)
def test_init_issue_documentation_edit_message_node():
"""Test IssueDocumentationEditMessageNode initialization."""
node = IssueDocumentationEditMessageNode()
assert node is not None
def test_call_method_creates_message(basic_state):
"""Test that the node creates a human message."""
node = IssueDocumentationEditMessageNode()
result = node(basic_state)
assert "edit_messages" in result
assert len(result["edit_messages"]) == 1
assert isinstance(result["edit_messages"][0], HumanMessage)
def test_message_contains_plan(basic_state):
"""Test that the message contains the documentation plan."""
node = IssueDocumentationEditMessageNode()
result = node(basic_state)
message_content = result["edit_messages"][0].content
assert "Plan:" in message_content or "Update README.md" in message_content
def test_message_contains_context(basic_state):
"""Test that the message includes documentation context."""
node = IssueDocumentationEditMessageNode()
result = node(basic_state)
message_content = result["edit_messages"][0].content
# Should include context
assert "context" in message_content.lower() or "API Documentation" in message_content
def test_message_includes_edit_instructions(basic_state):
"""Test that the message includes editing instructions."""
node = IssueDocumentationEditMessageNode()
result = node(basic_state)
message_content = result["edit_messages"][0].content
# Should include instructions for implementing changes
assert any(
keyword in message_content.lower() for keyword in ["implement", "edit", "changes", "file"]
)
def test_call_with_empty_context():
"""Test the node with empty documentation context."""
state = IssueDocumentationState(
issue_title="Create docs",
issue_body="Create new documentation",
issue_comments=[],
max_refined_query_loop=3,
documentation_query="",
documentation_context=[],
issue_documentation_analyzer_messages=[AIMessage(content="Create new documentation files")],
edit_messages=[],
edit_patch="",
issue_response="",
)
node = IssueDocumentationEditMessageNode()
result = node(state)
assert "edit_messages" in result
assert len(result["edit_messages"]) == 1
def test_extracts_last_analyzer_message(basic_state):
"""Test that the node extracts the last message from analyzer history."""
# Add multiple messages to analyzer history
basic_state["issue_documentation_analyzer_messages"] = [
AIMessage(content="First message"),
AIMessage(content="Second message"),
AIMessage(content="Final plan: Update docs"),
]
node = IssueDocumentationEditMessageNode()
result = node(basic_state)
message_content = result["edit_messages"][0].content
# Should contain the final plan
assert "Final plan" in message_content

View File

@@ -0,0 +1,96 @@
import pytest
from langchain_core.messages import AIMessage
from prometheus.lang_graph.nodes.issue_documentation_responder_node import (
IssueDocumentationResponderNode,
)
from prometheus.lang_graph.subgraphs.issue_documentation_state import IssueDocumentationState
from tests.test_utils.util import FakeListChatWithToolsModel
@pytest.fixture
def fake_llm():
return FakeListChatWithToolsModel(
responses=[
"The documentation has been successfully updated. "
"I've added new API endpoint documentation and included examples as requested."
]
)
@pytest.fixture
def basic_state():
return IssueDocumentationState(
issue_title="Update API documentation",
issue_body="The API documentation needs to be updated with the new endpoints",
issue_comments=[
{"username": "user1", "comment": "Please include examples"},
],
max_refined_query_loop=3,
documentation_query="Find API documentation",
documentation_context=[],
issue_documentation_analyzer_messages=[
AIMessage(content="Plan: Update README.md with new API endpoints and add examples")
],
edit_messages=[],
edit_patch="diff --git a/README.md b/README.md\n+New API documentation",
issue_response="",
)
def test_init_issue_documentation_responder_node(fake_llm):
"""Test IssueDocumentationResponderNode initialization."""
node = IssueDocumentationResponderNode(fake_llm)
assert node.model is not None
def test_call_method_basic(fake_llm, basic_state):
"""Test basic call functionality."""
node = IssueDocumentationResponderNode(fake_llm)
result = node(basic_state)
assert "issue_response" in result
assert "successfully updated" in result["issue_response"]
assert len(result["issue_response"]) > 0
def test_call_method_with_patch(fake_llm, basic_state):
"""Test response generation with patch."""
node = IssueDocumentationResponderNode(fake_llm)
result = node(basic_state)
assert "issue_response" in result
assert isinstance(result["issue_response"], str)
def test_call_method_without_patch(fake_llm):
"""Test response generation without patch."""
state = IssueDocumentationState(
issue_title="Update docs",
issue_body="Please update the documentation",
issue_comments=[],
max_refined_query_loop=3,
documentation_query="",
documentation_context=[],
issue_documentation_analyzer_messages=[AIMessage(content="Documentation plan created")],
edit_messages=[],
edit_patch="",
issue_response="",
)
node = IssueDocumentationResponderNode(fake_llm)
result = node(state)
assert "issue_response" in result
assert len(result["issue_response"]) > 0
def test_response_includes_issue_details(fake_llm, basic_state):
"""Test that the generated response is relevant to the issue."""
node = IssueDocumentationResponderNode(fake_llm)
result = node(basic_state)
assert "issue_response" in result
# The response should be a string with meaningful content
assert len(result["issue_response"]) > 10

View File

@@ -0,0 +1,89 @@
import pytest
from prometheus.lang_graph.nodes.issue_feature_responder_node import IssueFeatureResponderNode
from prometheus.lang_graph.subgraphs.issue_feature_state import IssueFeatureState
from tests.test_utils.util import FakeListChatWithToolsModel
@pytest.fixture
def fake_llm():
return FakeListChatWithToolsModel(
responses=[
"Thank you for requesting this feature. The implementation has been completed and is ready for review."
]
)
@pytest.fixture
def basic_state():
return IssueFeatureState(
issue_title="Add dark mode support",
issue_body="Please add dark mode to the application",
issue_comments=[
{"username": "user1", "comment": "This would be great!"},
{"username": "user2", "comment": "I need this feature"},
],
final_patch="Added dark mode theme switching functionality",
run_regression_test=True,
number_of_candidate_patch=3,
selected_regression_tests=["tests:tests"],
issue_response="Mock Response",
)
def test_format_human_message_basic(fake_llm, basic_state):
"""Test basic human message formatting."""
node = IssueFeatureResponderNode(fake_llm)
message = node.format_human_message(basic_state)
assert "Add dark mode support" in message.content
assert "Please add dark mode to the application" in message.content
assert "user1" in message.content
assert "user2" in message.content
assert "Added dark mode theme switching functionality" in message.content
def test_format_human_message_with_regression_tests(fake_llm, basic_state):
"""Test message formatting with regression tests."""
# Add tested_patch_result to simulate passed tests
from prometheus.models.test_patch_result import TestedPatchResult
basic_state["tested_patch_result"] = [
TestedPatchResult(patch="test patch", passed=True, regression_test_failure_log="")
]
node = IssueFeatureResponderNode(fake_llm)
message = node.format_human_message(basic_state)
assert "✓ All selected regression tests passed successfully" in message.content
def test_format_human_message_no_tests(fake_llm):
"""Test message formatting without tests."""
state = IssueFeatureState(
issue_title="Add feature",
issue_body="Feature description",
issue_comments=[],
final_patch="Implementation patch",
run_regression_test=False,
number_of_candidate_patch=1,
selected_regression_tests=[],
issue_response="",
)
node = IssueFeatureResponderNode(fake_llm)
message = node.format_human_message(state)
assert "No automated tests were run for this feature implementation." in message.content
def test_call_method(fake_llm, basic_state):
"""Test the call method execution."""
node = IssueFeatureResponderNode(fake_llm)
result = node(basic_state)
assert "issue_response" in result
assert (
result["issue_response"]
== "Thank you for requesting this feature. The implementation has been completed and is ready for review."
)

View File

@@ -0,0 +1,20 @@
from langchain_core.messages import HumanMessage
from prometheus.lang_graph.nodes.reset_messages_node import ResetMessagesNode
def test_reset_messages_node():
reset_build_messages_node = ResetMessagesNode("build_messages")
state = {
"build_messages": [HumanMessage(content="message 1"), HumanMessage(content="message 2")],
"test_messages": [HumanMessage(content="message 3"), HumanMessage(content="message 4")],
}
reset_build_messages_node(state)
assert "build_messages" in state
assert len(state["build_messages"]) == 0
assert "test_messages" in state
assert len(state["test_messages"]) == 2

View File

@@ -0,0 +1,24 @@
from pathlib import Path
from unittest.mock import Mock
from prometheus.docker.general_container import GeneralContainer
from prometheus.git.git_repository import GitRepository
from prometheus.lang_graph.nodes.update_container_node import UpdateContainerNode
def test_update_container_node():
mocked_container = Mock(spec=GeneralContainer)
mocked_container.is_running.return_value = True
mocked_git_repo = Mock(spec=GitRepository)
mocked_git_repo.get_diff.return_value = "--- /dev/null\n+++ b/newfile\n@@ -0,0 +1 @@\n+content"
mocked_git_repo.get_working_directory.return_value = Path("/test/working/dir/repositories/repo")
update_container_node = UpdateContainerNode(mocked_container, mocked_git_repo)
update_container_node(None)
assert mocked_git_repo.get_diff.call_count == 1
assert mocked_container.is_running.call_count == 1
assert mocked_container.update_files.call_count == 1
mocked_container.update_files.assert_called_with(
Path("/test/working/dir/repositories/repo"), [Path("newfile")], []
)

View File

@@ -0,0 +1,32 @@
from unittest.mock import Mock
import pytest
from langchain_core.messages import ToolMessage
from prometheus.docker.base_container import BaseContainer
from prometheus.lang_graph.nodes.user_defined_build_node import UserDefinedBuildNode
@pytest.fixture
def mock_container():
container = Mock(spec=BaseContainer)
container.run_build.return_value = "Build successful"
return container
@pytest.fixture
def build_node(mock_container):
return UserDefinedBuildNode(container=mock_container)
def test_successful_build(build_node, mock_container):
expected_output = "Build successful"
result = build_node(None)
assert isinstance(result, dict)
assert "build_messages" in result
assert len(result["build_messages"]) == 1
assert isinstance(result["build_messages"][0], ToolMessage)
assert result["build_messages"][0].content == expected_output
mock_container.run_build.assert_called_once()

View File

@@ -0,0 +1,32 @@
from unittest.mock import Mock
import pytest
from langchain_core.messages import ToolMessage
from prometheus.docker.base_container import BaseContainer
from prometheus.lang_graph.nodes.user_defined_test_node import UserDefinedTestNode
@pytest.fixture
def mock_container():
container = Mock(spec=BaseContainer)
container.run_test.return_value = "Test successful"
return container
@pytest.fixture
def test_node(mock_container):
return UserDefinedTestNode(container=mock_container)
def test_successful_test(test_node, mock_container):
expected_output = "Test successful"
result = test_node(None)
assert isinstance(result, dict)
assert "test_messages" in result
assert len(result["test_messages"]) == 1
assert isinstance(result["test_messages"][0], ToolMessage)
assert result["test_messages"][0].content == expected_output
mock_container.run_test.assert_called_once()

View File

@@ -0,0 +1,38 @@
from unittest.mock import Mock
import pytest
from langgraph.checkpoint.base import BaseCheckpointSaver
from prometheus.docker.base_container import BaseContainer
from prometheus.git.git_repository import GitRepository
from prometheus.lang_graph.subgraphs.bug_fix_verification_subgraph import BugFixVerificationSubgraph
from tests.test_utils.util import FakeListChatWithToolsModel
@pytest.fixture
def mock_container():
return Mock(spec=BaseContainer)
@pytest.fixture
def mock_checkpointer():
return Mock(spec=BaseCheckpointSaver)
@pytest.fixture
def mock_git_repo():
git_repo = Mock(spec=GitRepository)
git_repo.playground_path = "mock/playground/path"
return git_repo
def test_bug_fix_verification_subgraph_basic_initialization(
mock_container,
mock_git_repo,
):
"""Test that BugFixVerificationSubgraph initializes correctly with basic components."""
fake_model = FakeListChatWithToolsModel(responses=[])
subgraph = BugFixVerificationSubgraph(fake_model, mock_container, mock_git_repo)
assert subgraph.subgraph is not None

View File

@@ -0,0 +1,49 @@
from unittest.mock import Mock
import pytest
from prometheus.docker.base_container import BaseContainer
from prometheus.git.git_repository import GitRepository
from prometheus.graph.knowledge_graph import KnowledgeGraph
from prometheus.lang_graph.subgraphs.bug_reproduction_subgraph import BugReproductionSubgraph
from tests.test_utils.util import FakeListChatWithToolsModel
@pytest.fixture
def mock_container():
return Mock(spec=BaseContainer)
@pytest.fixture
def mock_kg():
kg = Mock(spec=KnowledgeGraph)
kg.get_all_ast_node_types.return_value = ["FunctionDef", "ClassDef", "Module", "Import", "Call"]
kg.root_node_id = 0
return kg
@pytest.fixture
def mock_git_repo():
git_repo = Mock(spec=GitRepository)
git_repo.playground_path = "mock/playground/path"
return git_repo
def test_bug_reproduction_subgraph_basic_initialization(mock_container, mock_kg, mock_git_repo):
"""Test that BugReproductionSubgraph initializes correctly with basic components."""
# Initialize fake model with empty responses
fake_advanced_model = FakeListChatWithToolsModel(responses=[])
fake_base_model = FakeListChatWithToolsModel(responses=[])
# Initialize the subgraph
subgraph = BugReproductionSubgraph(
fake_advanced_model,
fake_base_model,
mock_container,
mock_kg,
mock_git_repo,
None,
)
# Verify the subgraph was created
assert subgraph.subgraph is not None

View File

@@ -0,0 +1,47 @@
from unittest.mock import Mock
import pytest
from prometheus.docker.base_container import BaseContainer
from prometheus.graph.knowledge_graph import KnowledgeGraph
from prometheus.lang_graph.subgraphs.build_and_test_subgraph import BuildAndTestSubgraph
from tests.test_utils.util import FakeListChatWithToolsModel
@pytest.fixture
def mock_container():
return Mock(spec=BaseContainer)
@pytest.fixture
def mock_kg():
return Mock(spec=KnowledgeGraph)
def test_build_and_test_subgraph_basic_initialization(mock_container, mock_kg):
"""Test that BuildAndTestSubgraph initializes correctly with basic components."""
# Initialize fake model with empty responses
fake_model = FakeListChatWithToolsModel(responses=[])
# Initialize the subgraph
subgraph = BuildAndTestSubgraph(container=mock_container, model=fake_model, kg=mock_kg)
# Verify the subgraph was created
assert subgraph.subgraph is not None
def test_build_and_test_subgraph_with_commands(mock_container, mock_kg):
"""Test that BuildAndTestSubgraph initializes correctly with build and test commands."""
fake_model = FakeListChatWithToolsModel(responses=[])
build_commands = ["make build"]
test_commands = ["make test"]
subgraph = BuildAndTestSubgraph(
container=mock_container,
model=fake_model,
kg=mock_kg,
build_commands=build_commands,
test_commands=test_commands,
)
assert subgraph.subgraph is not None

View File

@@ -0,0 +1,69 @@
from unittest.mock import Mock
import pytest
from prometheus.docker.base_container import BaseContainer
from prometheus.git.git_repository import GitRepository
from prometheus.graph.knowledge_graph import KnowledgeGraph
from prometheus.lang_graph.subgraphs.issue_bug_subgraph import IssueBugSubgraph
from tests.test_utils.util import FakeListChatWithToolsModel
@pytest.fixture
def mock_container():
return Mock(spec=BaseContainer)
@pytest.fixture
def mock_kg():
kg = Mock(spec=KnowledgeGraph)
# Configure the mock to return a list of AST node types
kg.get_all_ast_node_types.return_value = ["FunctionDef", "ClassDef", "Module", "Import", "Call"]
kg.root_node_id = 0
return kg
@pytest.fixture
def mock_git_repo():
git_repo = Mock(spec=GitRepository)
git_repo.playground_path = "mock/playground/path"
return git_repo
def test_issue_bug_subgraph_basic_initialization(mock_container, mock_kg, mock_git_repo):
"""Test that IssueBugSubgraph initializes correctly with basic components."""
# Initialize fake model with empty responses
fake_advanced_model = FakeListChatWithToolsModel(responses=[])
fake_base_model = FakeListChatWithToolsModel(responses=[])
# Initialize the subgraph with required parameters
subgraph = IssueBugSubgraph(
advanced_model=fake_advanced_model,
base_model=fake_base_model,
container=mock_container,
kg=mock_kg,
git_repo=mock_git_repo,
repository_id=1,
)
# Verify the subgraph was created
assert subgraph.subgraph is not None
def test_issue_bug_subgraph_with_commands(mock_container, mock_kg, mock_git_repo):
"""Test that IssueBugSubgraph initializes correctly with build and test commands."""
fake_advanced_model = FakeListChatWithToolsModel(responses=[])
fake_base_model = FakeListChatWithToolsModel(responses=[])
test_commands = ["make test"]
subgraph = IssueBugSubgraph(
advanced_model=fake_advanced_model,
base_model=fake_base_model,
container=mock_container,
kg=mock_kg,
git_repo=mock_git_repo,
repository_id=1,
test_commands=test_commands,
)
assert subgraph.subgraph is not None

View File

@@ -0,0 +1,45 @@
from unittest.mock import Mock
import pytest
from prometheus.git.git_repository import GitRepository
from prometheus.graph.knowledge_graph import KnowledgeGraph
from prometheus.lang_graph.subgraphs.issue_classification_subgraph import (
IssueClassificationSubgraph,
)
from tests.test_utils.util import FakeListChatWithToolsModel
@pytest.fixture
def mock_kg():
kg = Mock(spec=KnowledgeGraph)
# Configure the mock to return a list of AST node types
kg.get_all_ast_node_types.return_value = ["FunctionDef", "ClassDef", "Module", "Import", "Call"]
kg.root_node_id = 0
return kg
@pytest.fixture
def mock_git_repo():
git_repo = Mock(spec=GitRepository)
git_repo.playground_path = "mock/playground/path"
return git_repo
def test_issue_classification_subgraph_basic_initialization(mock_kg, mock_git_repo):
"""Test that IssueClassificationSubgraph initializes correctly with basic components."""
# Initialize fake model with empty responses
fake_model = FakeListChatWithToolsModel(responses=[])
fake_advanced_model = FakeListChatWithToolsModel(responses=[])
# Initialize the subgraph with required parameters
subgraph = IssueClassificationSubgraph(
advanced_model=fake_advanced_model,
model=fake_model,
kg=mock_kg,
local_path=mock_git_repo.playground_path,
repository_id=1,
)
# Verify the subgraph was created
assert subgraph.subgraph is not None

View File

@@ -0,0 +1,51 @@
from unittest.mock import Mock
import pytest
from prometheus.git.git_repository import GitRepository
from prometheus.graph.knowledge_graph import KnowledgeGraph
from prometheus.lang_graph.subgraphs.issue_documentation_subgraph import (
IssueDocumentationSubgraph,
)
from tests.test_utils.util import FakeListChatWithToolsModel
@pytest.fixture
def mock_kg():
kg = Mock(spec=KnowledgeGraph)
# Configure the mock to return a list of AST node types
kg.get_all_ast_node_types.return_value = [
"FunctionDef",
"ClassDef",
"Module",
"Import",
"Call",
]
kg.root_node_id = 0
return kg
@pytest.fixture
def mock_git_repo():
git_repo = Mock(spec=GitRepository)
git_repo.playground_path = "mock/playground/path"
return git_repo
def test_issue_documentation_subgraph_basic_initialization(mock_kg, mock_git_repo):
"""Test that IssueDocumentationSubgraph initializes correctly with basic components."""
# Initialize fake model with empty responses
fake_advanced_model = FakeListChatWithToolsModel(responses=[])
fake_base_model = FakeListChatWithToolsModel(responses=[])
# Initialize the subgraph with required parameters
subgraph = IssueDocumentationSubgraph(
advanced_model=fake_advanced_model,
base_model=fake_base_model,
kg=mock_kg,
git_repo=mock_git_repo,
repository_id=1,
)
# Verify the subgraph was created
assert subgraph.subgraph is not None

View File

@@ -0,0 +1,49 @@
from unittest.mock import Mock
import pytest
from prometheus.docker.base_container import BaseContainer
from prometheus.git.git_repository import GitRepository
from prometheus.graph.knowledge_graph import KnowledgeGraph
from prometheus.lang_graph.subgraphs.issue_question_subgraph import IssueQuestionSubgraph
from tests.test_utils.util import FakeListChatWithToolsModel
@pytest.fixture
def mock_container():
return Mock(spec=BaseContainer)
@pytest.fixture
def mock_kg():
kg = Mock(spec=KnowledgeGraph)
# Configure the mock to return a list of AST node types
kg.get_all_ast_node_types.return_value = ["FunctionDef", "ClassDef", "Module", "Import", "Call"]
kg.root_node_id = 0
return kg
@pytest.fixture
def mock_git_repo():
git_repo = Mock(spec=GitRepository)
git_repo.playground_path = "mock/playground/path"
return git_repo
def test_issue_question_subgraph_basic_initialization(mock_container, mock_kg, mock_git_repo):
"""Test that IssueQuestionSubgraph initializes correctly with basic components."""
# Initialize fake model with empty responses
fake_advanced_model = FakeListChatWithToolsModel(responses=[])
fake_base_model = FakeListChatWithToolsModel(responses=[])
# Initialize the subgraph with required parameters
subgraph = IssueQuestionSubgraph(
advanced_model=fake_advanced_model,
base_model=fake_base_model,
kg=mock_kg,
git_repo=mock_git_repo,
repository_id=1,
)
# Verify the subgraph was created
assert subgraph.subgraph is not None