agent-pard0x/main.py
2025-06-25 15:27:47 +02:00

123 lines
4.1 KiB
Python

import os
from typing import Annotated
from typing_extensions import TypedDict
from langchain.chat_models import init_chat_model
from langchain_community.tools.shell.tool import ShellTool
from langgraph.graph import StateGraph, START, END
from langgraph.graph.message import add_messages
from langgraph.prebuilt import ToolNode, tools_condition
from log_analyzer import analyze_log_file
class State(TypedDict):
# Messages have the type "list". The `add_messages` function
# in the annotation defines how this state key should be updated
# (in this case, it appends messages to the list, rather than overwriting them)
messages: Annotated[list, add_messages]
def create_chatbot():
"""Create and return a compiled chatbot graph with shell capabilities."""
# Initialize the StateGraph
graph_builder = StateGraph(State)
# Initialize the chat model (using OpenAI GPT-4)
# Make sure you have set your OPENAI_API_KEY environment variable
llm = init_chat_model("openai:gpt-4o-mini")
# Define the tools
shell_tool = ShellTool()
tools = [shell_tool, analyze_log_file]
# Bind tools to the LLM so it knows how to use them
llm_with_tools = llm.bind_tools(tools)
def chatbot(state: State):
"""Chatbot node function that processes messages."""
# Print the messages being processed
print("Current messages:", state["messages"])
return {"messages": [llm_with_tools.invoke(state["messages"])]}
# Add the chatbot node to the graph
graph_builder.add_node("chatbot", chatbot)
# Add the tool node to handle tool calls
tool_node = ToolNode(tools=tools)
graph_builder.add_node("tools", tool_node)
# Add conditional edges to route between chatbot and tools
graph_builder.add_conditional_edges(
"chatbot",
tools_condition,
)
# Add edges
graph_builder.add_edge(START, "chatbot")
graph_builder.add_edge("tools", "chatbot")
# Compile the graph
graph = graph_builder.compile()
return graph
def stream_graph_updates(graph, user_input: str, conversation_state: dict):
"""Stream graph updates for a user input while maintaining conversation history."""
# Add the new user message to the existing conversation
conversation_state["messages"].append({"role": "user", "content": user_input})
# Stream the graph with the full conversation history
for event in graph.stream(conversation_state):
for value in event.values():
# Update conversation state with new messages
conversation_state["messages"] = value["messages"]
print("Assistant:", value["messages"][-1].content)
def main():
# Check if required API keys are set
if not os.getenv("OPENAI_API_KEY"):
print("Please set your OPENAI_API_KEY environment variable.")
print("You can set it by running: export OPENAI_API_KEY='your-api-key-here'")
return
print("🤖 LangGraph Chatbot with Shell Access")
print("Type 'quit', 'exit', or 'q' to exit the chat.")
print("⚠️ WARNING: This bot has shell access - use with caution!")
print("-" * 50)
# Create the chatbot
try:
graph = create_chatbot()
print("✅ Chatbot with shell tool initialized successfully!")
except Exception as e:
print(f"❌ Error initializing chatbot: {e}")
return
# Initialize conversation state to maintain history
conversation_state = {"messages": []}
# Start the chat loop
while True:
try:
user_input = input("\nUser: ")
if user_input.lower() in ["quit", "exit", "q"]:
print("👋 Goodbye!")
break
if user_input.strip():
stream_graph_updates(graph, user_input, conversation_state)
else:
print("Please enter a message.")
except KeyboardInterrupt:
print("\n👋 Goodbye!")
break
except Exception as e:
print(f"❌ Error: {e}")
if __name__ == "__main__":
main()