55"""
66
77import json
8- from typing import Dict , Any , Optional
8+ from typing import Dict , Any , Optional , List
99from models .state import AgentState
1010from models .responses import ArchitectureDesignResponse
1111from models .simplified_responses import SimplifiedComponent , SimplifiedArchitectureResponse , create_simplified_architecture_response
12- from . .core .base_agent import BaseAgent
12+ from agents .core .base_agent import BaseAgent
1313from prompts import get_agent_prompt_loader
1414import google .generativeai as genai
1515
16+
17+ # LangGraph integration check
18+ try :
19+ from langgraph .graph import StateGraph , END
20+ from langgraph .checkpoint .memory import MemorySaver
21+ from pydantic import BaseModel , Field
22+ LANGGRAPH_AVAILABLE = True
23+ except ImportError :
24+ LANGGRAPH_AVAILABLE = False
25+ logging .warning ("LangGraph not available - agent will work in legacy mode only" )
26+
1627try :
1728 from langchain_core .output_parsers import JsonOutputParser
1829 from langchain_core .prompts import PromptTemplate
2233 LANGCHAIN_AVAILABLE = False
2334
2435
36+
37+
38+ class ArchitectureDesignerState (BaseModel ):
39+ """State for ArchitectureDesigner LangGraph workflow using Pydantic BaseModel."""
40+
41+ # Input fields
42+ input_data : Dict [str , Any ] = Field (default_factory = dict , description = "Input data" )
43+
44+ # Output fields
45+ output_data : Dict [str , Any ] = Field (default_factory = dict , description = "Output data" )
46+
47+ # Control fields
48+ errors : List [str ] = Field (default_factory = list , description = "Error messages" )
49+ status : str = Field (default = "initialized" , description = "Current status" )
50+ metrics : Dict [str , float ] = Field (default_factory = dict , description = "Execution metrics" )
51+
52+ class Config :
53+ """Pydantic configuration."""
54+ arbitrary_types_allowed = True
55+
2556class ArchitectureDesigner (BaseAgent ):
2657 """
2758 Agent responsible for designing system architecture.
@@ -38,6 +69,17 @@ def __init__(self, config, gemini_client):
3869 else :
3970 self .json_parser = None
4071
72+
73+ # Build LangGraph workflow if available
74+ if LANGGRAPH_AVAILABLE :
75+ self .workflow = self ._build_langgraph_workflow ()
76+ self .app = self .workflow .compile ()
77+ self .logger .info ("✅ LangGraph workflow compiled and ready" )
78+ else :
79+ self .workflow = None
80+ self .app = None
81+ self .logger .info ("⚠️ LangGraph not available - using legacy mode" )
82+
4183 def validate_task (self , task : Any ) -> bool :
4284 """
4385 Validate that the task is appropriate for architecture design.
@@ -542,3 +584,61 @@ def validate_input(self, state: AgentState) -> bool:
542584 # Don't fail, just warn - we can still design basic architecture
543585
544586 return True
587+
588+
589+ def _build_langgraph_workflow (self ) -> StateGraph :
590+ """Build LangGraph workflow for ArchitectureDesigner."""
591+ workflow = StateGraph (ArchitectureDesignerState )
592+
593+ # Simple workflow: just execute the agent
594+ workflow .add_node ("execute" , self ._langgraph_execute_node )
595+ workflow .set_entry_point ("execute" )
596+ workflow .add_edge ("execute" , END )
597+
598+ return workflow
599+
600+ async def _langgraph_execute_node (self , state : ArchitectureDesignerState ) -> ArchitectureDesignerState :
601+ """Execute agent in LangGraph workflow."""
602+ import time
603+ start = time .time ()
604+
605+ try :
606+ # Call the agent's execute method
607+ result = await self .execute (state .input_data )
608+
609+ # Update state with results
610+ state .output_data = result
611+ state .status = "completed"
612+ state .metrics ["execution_time" ] = time .time () - start
613+
614+ except Exception as e :
615+ self .logger .error (f"LangGraph execution failed: { e } " )
616+ state .errors .append (str (e ))
617+ state .status = "failed"
618+ state .metrics ["execution_time" ] = time .time () - start
619+
620+ return state
621+
622+
623+ # Export for LangGraph Studio
624+ _default_instance = None
625+
626+ def get_graph ():
627+ """Get the compiled graph for LangGraph Studio."""
628+ global _default_instance
629+ if _default_instance is None and LANGGRAPH_AVAILABLE :
630+ from models .config import AgentConfig
631+ from utils .llm .gemini_client_factory import get_gemini_client
632+
633+ config = AgentConfig (
634+ agent_id = 'architecture_designer' ,
635+ name = 'ArchitectureDesigner' ,
636+ description = 'ArchitectureDesigner agent' ,
637+ model_name = 'gemini-2.5-flash'
638+ )
639+ client = get_gemini_client (agent_name = 'architecture_designer' )
640+ _default_instance = ArchitectureDesigner (config , gemini_client = client )
641+ return _default_instance .app if _default_instance else None
642+
643+ # Studio expects 'graph' variable
644+ graph = get_graph ()
0 commit comments