from langchain_core.messages import AIMessage, SystemMessage
from langchain_openai import ChatOpenAI
from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph import END, START, StateGraph, add_messages
from langgraph.prebuilt import ToolNode
from typing import Annotated
from typing_extensions import TypedDict
import asyncio
class State(TypedDict):
messages: Annotated[list, add_messages]
user_name: str
user_node_uuid: str
async def chatbot(state: State):
"""Agent node that retrieves context from Graphiti and generates responses."""
facts_string = None
if len(state['messages']) > 0:
last_message = state['messages'][-1]
query = f'{"SalesBot" if isinstance(last_message, AIMessage) else state["user_name"]}: {last_message.content}'
# Search Graphiti using user's node as center
# Facts closer to the user node are ranked higher
edge_results = await client.search(
query,
center_node_uuid=state['user_node_uuid'],
num_results=5
)
facts_string = edges_to_facts_string(edge_results)
# Build system message with retrieved facts
system_message = SystemMessage(
content=f"""You are a skillful shoe salesperson working for ManyBirds.
Review information about the user and their conversation below.
Keep responses short and concise. Always be selling and helpful!
Things you need to know to close a sale:
- User's shoe size
- Any special needs (wide feet, arch support, etc.)
- Preferred colors and styles
- Budget
Facts about the user:
{facts_string or 'No facts about the user yet'}"""
)
messages = [system_message] + state['messages']
response = await llm.ainvoke(messages)
# Asynchronously persist interaction to Graphiti
asyncio.create_task(
client.add_episode(
name='Chatbot Response',
episode_body=f'{state["user_name"]}: {state["messages"][-1].content}\nSalesBot: {response.content}',
source=EpisodeType.message,
reference_time=datetime.now(timezone.utc),
source_description='Chatbot',
)
)
return {'messages': [response]}
# Initialize LLM with tools
llm = ChatOpenAI(model='gpt-4.1-mini', temperature=0).bind_tools(tools)
tool_node = ToolNode(tools)
# Define conditional logic
async def should_continue(state, config):
last_message = state['messages'][-1]
return 'end' if not last_message.tool_calls else 'continue'
# Build the graph
graph_builder = StateGraph(State)
memory = MemorySaver()
graph_builder.add_node('agent', chatbot)
graph_builder.add_node('tools', tool_node)
graph_builder.add_edge(START, 'agent')
graph_builder.add_conditional_edges(
'agent',
should_continue,
{'continue': 'tools', 'end': END}
)
graph_builder.add_edge('tools', 'agent')
graph = graph_builder.compile(checkpointer=memory)