69 lines
2.4 KiB
Python
69 lines
2.4 KiB
Python
# Multi-agent sysadmin assistant using LangChain + LangGraph Supervisor
|
|
# Requires: `pip install langchain-openai langgraph langgraph-supervisor`
|
|
|
|
from __future__ import annotations
|
|
|
|
from supervisor import create_sysadmin_supervisor
|
|
from utils import print_step_info, explain_supervisor_pattern
|
|
|
|
if __name__ == "__main__":
|
|
# Create the supervisor
|
|
supervisor = create_sysadmin_supervisor()
|
|
|
|
# Example run - demonstrating both invoke and streaming with debug output
|
|
query = {
|
|
"messages": [
|
|
{
|
|
"role": "user",
|
|
"content": "Nginx returns 502 Bad Gateway on my server. What can I do?",
|
|
}
|
|
]
|
|
}
|
|
|
|
print("🚀 Starting multi-agent sysadmin analysis...")
|
|
print(f"📝 User Query: {query['messages'][0]['content']}")
|
|
print("=" * 80)
|
|
|
|
# Show explanation of the supervisor pattern
|
|
explain_supervisor_pattern()
|
|
|
|
print("\n=== Using invoke() method ===")
|
|
result = supervisor.invoke(query)
|
|
|
|
print("\n📊 FINAL RESULT:")
|
|
print("-" * 40)
|
|
print(result["messages"][-1].content)
|
|
print("-" * 40)
|
|
|
|
print(f"\n📈 Total messages exchanged: {len(result['messages'])}")
|
|
|
|
print("\n=== Using stream() method for detailed step-by-step analysis ===")
|
|
step_count = 0
|
|
max_steps = 20 # Prevent infinite loops
|
|
|
|
try:
|
|
chunks_processed = []
|
|
for chunk in supervisor.stream(query):
|
|
step_count += 1
|
|
chunks_processed.append(chunk)
|
|
print_step_info(step_count, chunk)
|
|
|
|
# Safety check to prevent infinite loops
|
|
if step_count >= max_steps:
|
|
print(f"\n⚠️ Reached maximum steps ({max_steps}), stopping stream...")
|
|
break
|
|
|
|
print(f"\n✅ Streaming completed successfully with {step_count} steps")
|
|
print(f"📊 Total chunks processed: {len(chunks_processed)}")
|
|
|
|
# Check if the last chunk contains a complete final response
|
|
if chunks_processed:
|
|
last_chunk = chunks_processed[-1]
|
|
print(f"🔍 Last chunk keys: {list(last_chunk.keys()) if isinstance(last_chunk, dict) else type(last_chunk)}")
|
|
|
|
except Exception as e:
|
|
print(f"\n❌ Streaming error after {step_count} steps: {e}")
|
|
print("💡 The invoke() method worked fine, so the supervisor itself is functional.")
|
|
import traceback
|
|
traceback.print_exc()
|