Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,7 @@ llama-index-llms-ollama==0.9.0
# via code-graph (pyproject.toml)
llama-index-workflows==2.11.0
# via llama-index-core
# via code-graph (pyproject.toml)
loguru==0.7.3
# via code-graph (pyproject.toml)
markupsafe==3.0.3
Expand Down
111 changes: 111 additions & 0 deletions src/codebase_rag/api/agent_routes.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
"""FastAPI routes exposing the unified LlamaIndex agent workflow."""

from __future__ import annotations

from typing import Any, Dict, List, Optional

from fastapi import APIRouter, HTTPException, Path
from pydantic import BaseModel, Field

from codebase_rag.services.agents import agent_session_manager


router = APIRouter(prefix="/agent", tags=["Agent Orchestration"])


class SessionSummary(BaseModel):
session_id: str
project_id: str
metadata: Dict[str, Any] = Field(default_factory=dict)
turns: int = 0
tool_events: int = 0


class CreateSessionRequest(BaseModel):
project_id: str = Field(..., description="Project identifier used for retrieval and memory scoping.")
metadata: Optional[Dict[str, Any]] = Field(default=None, description="Optional metadata stored alongside the session.")


class CreateSessionResponse(SessionSummary):
pass


class AgentMessageRequest(BaseModel):
message: str = Field(..., description="User message to send to the orchestrator agent.")
auto_save_memories: bool = Field(
default=False,
description="If True, the memory extraction tool will persist high-confidence memories automatically.",
)


class AgentMessageResponse(BaseModel):
session_id: str
reply: str
tool_events: List[Dict[str, Any]] = Field(default_factory=list)
task: Dict[str, Any] = Field(default_factory=dict)
chat_history: List[Dict[str, str]] = Field(default_factory=list)


class SessionStateResponse(BaseModel):
session_id: str
project_id: str
metadata: Dict[str, Any]
chat_history: List[Dict[str, str]]
tool_events: List[Dict[str, Any]]
task_trace: List[Dict[str, Any]]


@router.post("/sessions", response_model=CreateSessionResponse)
async def create_agent_session(payload: CreateSessionRequest) -> Dict[str, Any]:
"""Create a new agent session scoped to the provided project."""

return await agent_session_manager.create_session(
project_id=payload.project_id,
metadata=payload.metadata,
)


@router.get("/sessions", response_model=Dict[str, List[SessionSummary]])
async def list_agent_sessions() -> Dict[str, List[SessionSummary]]:
"""List all active agent sessions."""

sessions = await agent_session_manager.list_sessions()
return {"sessions": sessions}


@router.get("/sessions/{session_id}", response_model=SessionStateResponse)
async def get_agent_session(session_id: str = Path(..., description="Session identifier")) -> Dict[str, Any]:
"""Fetch detailed state for a specific session."""

try:
return await agent_session_manager.get_session_state(session_id)
except KeyError as exc: # pragma: no cover - defensive
raise HTTPException(status_code=404, detail=str(exc)) from exc


@router.delete("/sessions/{session_id}")
async def close_agent_session(session_id: str = Path(..., description="Session identifier")) -> Dict[str, str]:
"""Terminate an existing agent session."""

await agent_session_manager.close_session(session_id)
return {"status": "closed", "session_id": session_id}


@router.post("/sessions/{session_id}/messages", response_model=AgentMessageResponse)
async def send_agent_message(
payload: AgentMessageRequest,
session_id: str = Path(..., description="Session identifier"),
) -> Dict[str, Any]:
"""Send a message to the orchestrator agent and obtain the response."""

try:
return await agent_session_manager.process_message(
session_id=session_id,
message=payload.message,
auto_save_memories=payload.auto_save_memories,
)
except KeyError as exc:
raise HTTPException(status_code=404, detail=str(exc)) from exc
except AttributeError as exc: # pragma: no cover - unexpected agent shape
raise HTTPException(status_code=500, detail=str(exc)) from exc

2 changes: 2 additions & 0 deletions src/codebase_rag/core/routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from codebase_rag.api.websocket_routes import router as ws_router
from codebase_rag.api.sse_routes import router as sse_router
from codebase_rag.api.memory_routes import router as memory_router
from codebase_rag.api.agent_routes import router as agent_router


def setup_routes(app: FastAPI) -> None:
Expand All @@ -21,4 +22,5 @@ def setup_routes(app: FastAPI) -> None:
app.include_router(task_router, prefix="/api/v1", tags=["Task Management"])
app.include_router(sse_router, prefix="/api/v1", tags=["Real-time Updates"])
app.include_router(memory_router, tags=["Memory Management"])
app.include_router(agent_router, prefix="/api/v1", tags=["Agent Orchestration"])

1 change: 1 addition & 0 deletions src/codebase_rag/services/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,4 +28,5 @@
"utils",
"pipeline",
"graph",
"agents",
]
17 changes: 17 additions & 0 deletions src/codebase_rag/services/agents/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
"""Agent orchestration services built on top of LlamaIndex workflows."""

from .base import create_default_agent
from .session_manager import AgentSessionManager
from .tools import AGENT_TOOLS, KNOWLEDGE_TOOLS, MEMORY_TOOLS

__all__ = [
"create_default_agent",
"AgentSessionManager",
"agent_session_manager",
"AGENT_TOOLS",
"KNOWLEDGE_TOOLS",
"MEMORY_TOOLS",
]


agent_session_manager = AgentSessionManager()
44 changes: 44 additions & 0 deletions src/codebase_rag/services/agents/base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
"""Factories for constructing LlamaIndex workflow agents."""

from typing import Sequence

from llama_index.core import Settings
from llama_index.core.agent.workflow import FunctionAgent

from codebase_rag.config import settings

from .tools import AGENT_TOOLS


def create_default_agent(*, tools: Sequence = AGENT_TOOLS) -> FunctionAgent:
"""Create a FunctionAgent wired with the default toolset.

The agent uses the globally configured LlamaIndex LLM settings and provides
instructions aimed at orchestrating knowledge retrieval, memory extraction and
lightweight task tracking across a project-oriented workflow.
"""

if Settings.llm is None:
raise ValueError(
"Settings.llm is not configured. Initialize the Neo4j knowledge service "
"or configure Settings.llm before creating agents."
)

description = (
"Project knowledge orchestrator capable of looking up graph knowledge, "
"searching vector similarities, extracting new memories and persisting them."
)

system_prompt = (
"You are the CodebaseRAG coordinator. Always inspect the available tools to "
"answer user questions, retrieve supporting context from Neo4j, and store new "
"memories when relevant. Make sure responses explain which tools were used."
)

return FunctionAgent(
name=settings.app_name or "codebase-rag-agent",
description=description,
system_prompt=system_prompt,
tools=list(tools),
llm=Settings.llm,
)
Loading
Loading