from fibonacci import Workflow, LLMNode, MemoryConfig
# Configure with Redis backend
workflow = Workflow(
name="chatbot",
memory_config=MemoryConfig(
backend="redis",
connection_url="redis://localhost:6379/0",
key_prefix="chatbot:"
)
)
# Chat node reads history
chat = LLMNode(
id="chat",
model="claude-sonnet-4-5-20250929",
prompt="""Conversation history:
{{memory.history}}
User: {{input.message}}
Respond naturally.""",
memory_read=["history"]
)
workflow.add_node(chat)
def chat_with_user(user_id: str, message: str):
# Get history
history = workflow.memory.get(
"history",
scope="user",
user_id=user_id,
default=[]
)
# Execute
result = workflow.execute(
inputs={"message": message},
user_id=user_id
)
# Update history
history.append({"role": "user", "content": message})
history.append({"role": "assistant", "content": result["chat"]})
history = history[-20:] # Keep last 20
workflow.memory.set(
"history",
history,
scope="user",
user_id=user_id
)
return result["chat"]