Learn how to use HippocampAI with practical, real-world examples
Build a chatbot that remembers user preferences and conversation history across sessions.
Initialize HippocampAI with your configuration.
from hippocamp import MemoryEngine
import os
# Initialize the memory engine
engine = MemoryEngine(
api_key=os.getenv("OPENAI_API_KEY"),
qdrant_host="localhost",
qdrant_port=6333
)
print("Memory engine initialized!")Memory engine initialized!
Start a new conversation session for a user.
# Create a session for the user
user_id = "user_123"
session_id = "chat_session_1"
session = engine.create_session(
user_id=user_id,
session_id=session_id
)
print(f"Session created: {session_id}")Session created: chat_session_1
Save messages to build conversation history.
# User sends a message
user_message = "Hi! I prefer dark mode and I'm interested in AI."
# Store the user message
session.add_message(
role="user",
content=user_message
)
# Store user preferences as a separate memory
engine.store(
content="User prefers dark mode interface",
user_id=user_id,
metadata={"type": "preference", "category": "ui"},
importance=0.8
)
engine.store(
content="User is interested in AI and machine learning",
user_id=user_id,
metadata={"type": "interest", "category": "topics"},
tags=["ai", "ml", "technology"]
)
print("Messages and preferences stored!")Messages and preferences stored!
Get relevant memories to personalize the response.
# When generating a response, get relevant context
query = "What does the user like?"
# Search for relevant memories
memories = engine.search(
query=query,
user_id=user_id,
limit=5
)
print("Relevant memories found:")
for memory in memories:
print(f" - {memory.content} (relevance: {memory.relevance:.2f})")
# Get conversation context
context = session.get_context(limit=10)
print(f"\nConversation context: {len(context)} messages")Relevant memories found: - User prefers dark mode interface (relevance: 0.92) - User is interested in AI and machine learning (relevance: 0.87) Conversation context: 1 messages
Full working chatbot with memory integration.
from hippocamp import MemoryEngine
from openai import OpenAI
class MemoryEnabledChatbot:
def __init__(self, api_key: str):
self.engine = MemoryEngine(api_key=api_key)
self.openai = OpenAI(api_key=api_key)
self.user_id = None
self.session = None
def start_session(self, user_id: str, session_id: str):
self.user_id = user_id
self.session = self.engine.create_session(user_id, session_id)
def chat(self, user_message: str) -> str:
# Store user message
self.session.add_message(role="user", content=user_message)
# Get relevant memories
memories = self.engine.search(
query=user_message,
user_id=self.user_id,
limit=5
)
# Build context
memory_context = "\n".join([m.content for m in memories])
# Generate response with context
response = self.openai.chat.completions.create(
model="gpt-4",
messages=[
{"role": "system", "content": f"User context:\n{memory_context}"},
{"role": "user", "content": user_message}
]
)
assistant_message = response.choices[0].message.content
# Store assistant response
self.session.add_message(role="assistant", content=assistant_message)
return assistant_message
# Usage
bot = MemoryEnabledChatbot(api_key="your_key")
bot.start_session("user_123", "chat_1")
response = bot.chat("What's the weather like?")
print(response)