From c20be407d5c77e7df87a08dc56abf5fd5a52394f Mon Sep 17 00:00:00 2001 From: Gaetan Hurel Date: Thu, 26 Jun 2025 15:04:54 +0200 Subject: [PATCH] multi-round chat --- multi-agent-supervisor/main-multi-agent.py | 117 +++++++++++---------- 1 file changed, 60 insertions(+), 57 deletions(-) diff --git a/multi-agent-supervisor/main-multi-agent.py b/multi-agent-supervisor/main-multi-agent.py index d13b92e..0aeb53c 100644 --- a/multi-agent-supervisor/main-multi-agent.py +++ b/multi-agent-supervisor/main-multi-agent.py @@ -9,60 +9,63 @@ from utils import print_step_info, explain_supervisor_pattern if __name__ == "__main__": # Create the supervisor supervisor = create_sysadmin_supervisor() - - # Example run - demonstrating both invoke and streaming with debug output - query = { - "messages": [ - { - "role": "user", - "content": "Nginx returns 502 Bad Gateway on my server. What can I do?", - } - ] - } - - print("šŸš€ Starting multi-agent sysadmin analysis...") - print(f"šŸ“ User Query: {query['messages'][0]['content']}") - print("=" * 80) - - # Show explanation of the supervisor pattern - explain_supervisor_pattern() - - print("\n=== Using invoke() method ===") - result = supervisor.invoke(query) - - print("\nšŸ“Š FINAL RESULT:") - print("-" * 40) - print(result["messages"][-1].content) - print("-" * 40) - - print(f"\nšŸ“ˆ Total messages exchanged: {len(result['messages'])}") - - print("\n=== Using stream() method for detailed step-by-step analysis ===") - step_count = 0 - max_steps = 20 # Prevent infinite loops - - try: - chunks_processed = [] - for chunk in supervisor.stream(query): - step_count += 1 - chunks_processed.append(chunk) - print_step_info(step_count, chunk) - - # Safety check to prevent infinite loops - if step_count >= max_steps: - print(f"\nāš ļø Reached maximum steps ({max_steps}), stopping stream...") - break - - print(f"\nāœ… Streaming completed successfully with {step_count} steps") - print(f"šŸ“Š Total chunks processed: {len(chunks_processed)}") - - # Check if the last chunk contains a complete final response - if chunks_processed: - last_chunk = chunks_processed[-1] - print(f"šŸ” Last chunk keys: {list(last_chunk.keys()) if isinstance(last_chunk, dict) else type(last_chunk)}") - - except Exception as e: - print(f"\nāŒ Streaming error after {step_count} steps: {e}") - print("šŸ’” The invoke() method worked fine, so the supervisor itself is functional.") - import traceback - traceback.print_exc() + + # Interactive conversation loop + messages = [] + print("Welcome to the multi-agent sysadmin assistant!") + print("Type your sysadmin question below. Type 'exit' to quit.") + while True: + user_input = input("\nšŸ“ User: ") + if user_input.strip().lower() == 'exit': + print("Goodbye!") + break + messages.append({"role": "user", "content": user_input}) + query = {"messages": messages} + + print("\n=== Using invoke() method ===") + result = supervisor.invoke(query) + + print("\nšŸ“Š FINAL RESULT:") + print("-" * 40) + print(result["messages"][-1].content) + print("-" * 40) + print(f"\nšŸ“ˆ Total messages exchanged: {len(result['messages'])}") + + # Add the assistant's reply to the conversation history + messages.append({"role": "assistant", "content": result["messages"][-1].content}) + + # Ask if the user wants to continue + cont = input("\nWould you like to continue the conversation? (y/n): ") + if cont.strip().lower() not in ('y', 'yes'): + print("Session ended.") + break + + print("\n=== Using stream() method for detailed step-by-step analysis ===") + step_count = 0 + max_steps = 20 # Prevent infinite loops + + try: + chunks_processed = [] + for chunk in supervisor.stream(query): + step_count += 1 + chunks_processed.append(chunk) + print_step_info(step_count, chunk) + + # Safety check to prevent infinite loops + if step_count >= max_steps: + print(f"\nāš ļø Reached maximum steps ({max_steps}), stopping stream...") + break + + print(f"\nāœ… Streaming completed successfully with {step_count} steps") + print(f"šŸ“Š Total chunks processed: {len(chunks_processed)}") + + # Check if the last chunk contains a complete final response + if chunks_processed: + last_chunk = chunks_processed[-1] + print(f"šŸ” Last chunk keys: {list(last_chunk.keys()) if isinstance(last_chunk, dict) else type(last_chunk)}") + + except Exception as e: + print(f"\nāŒ Streaming error after {step_count} steps: {e}") + print("šŸ’” The invoke() method worked fine, so the supervisor itself is functional.") + import traceback + traceback.print_exc()