Stable checkpoint with chat interface

This commit is contained in:
Pratik Narola 2025-09-04 17:07:49 +05:30
parent f929165a89
commit 28a8953ac5
7 changed files with 1181 additions and 22 deletions

View file

@ -14,6 +14,8 @@ class Settings(BaseSettings):
openai_base_url: str = Field(..., env="OPENAI_BASE_URL")
embedder_api_key: str = Field(..., env="EMBEDDER_API_KEY")
# ollama_base_url: str = Field(..., env="OLLAMA_BASE_URL")
# Database Configuration
postgres_host: str = Field("localhost", env="POSTGRES_HOST")
postgres_port: int = Field(5432, env="POSTGRES_PORT")

View file

@ -71,11 +71,11 @@ app = FastAPI(
lifespan=lifespan
)
# Add CORS middleware
# Add CORS middleware - Allow all origins for development
app.add_middleware(
CORSMiddleware,
allow_origins=settings.cors_origins_list,
allow_credentials=True,
allow_origins=["*"], # Allow all origins for development
allow_credentials=False, # Must be False when allow_origins=["*"]
allow_methods=["*"],
allow_headers=["*"],
)
@ -203,11 +203,17 @@ async def chat_with_memory(request: ChatRequest):
try:
logger.info(f"Processing chat request for user: {request.user_id}")
# Convert ChatMessage objects to dict format if context provided
context_dict = None
if request.context:
context_dict = [{"role": msg.role, "content": msg.content} for msg in request.context]
result = await mem0_manager.chat_with_memory(
message=request.message,
user_id=request.user_id,
agent_id=request.agent_id,
run_id=request.run_id,
context=context_dict
)
return result
@ -341,7 +347,7 @@ async def get_graph_relationships(user_id: str):
"""Get graph relationships - pure Mem0 passthrough."""
try:
logger.info(f"Retrieving graph relationships for user: {user_id}")
result = await mem0_manager.get_graph_relationships(user_id=user_id, agent_id=None, run_id=None)
result = await mem0_manager.get_graph_relationships(user_id=user_id, agent_id=None, run_id=None, limit=10000)
return result

View file

@ -32,10 +32,10 @@ class Mem0Manager:
"embedder": {
"provider": "ollama",
"config": {
"model": "hf.co/Qwen/Qwen3-Embedding-8B-GGUF:Q8_0",
"model": "hf.co/Qwen/Qwen3-Embedding-4B-GGUF:Q8_0",
# "api_key": settings.embedder_api_key,
"ollama_base_url": "http://host.docker.internal:11434",
"embedding_dims": 4096
"embedding_dims": 2560
}
},
"vector_store": {
@ -46,7 +46,7 @@ class Mem0Manager:
"password": settings.postgres_password,
"host": settings.postgres_host,
"port": settings.postgres_port,
"embedding_model_dims": 4096
"embedding_model_dims": 2560
}
},
"graph_store": {
@ -196,7 +196,7 @@ class Mem0Manager:
raise e
async def get_graph_relationships(self, user_id: Optional[str], agent_id: Optional[str], run_id: Optional[str]) -> Dict[str, Any]:
async def get_graph_relationships(self, user_id: Optional[str], agent_id: Optional[str], run_id: Optional[str], limit: int = 50) -> Dict[str, Any]:
"""Get graph relationships - using correct Mem0 get_all() method."""
try:
# Use get_all() to retrieve memories with graph relationships
@ -204,7 +204,7 @@ class Mem0Manager:
user_id=user_id,
agent_id=agent_id,
run_id=run_id,
limit=50
limit=limit
)
# Extract relationships from Mem0's response structure
@ -256,30 +256,68 @@ class Mem0Manager:
user_id: Optional[str] = None,
agent_id: Optional[str] = None,
run_id: Optional[str] = None,
# context: Optional[List[Dict[str, str]]] = None,
context: Optional[List[Dict[str, str]]] = None,
# metadata: Optional[Dict[str, Any]] = None
) -> Dict[str, Any]:
"""Chat with memory - native Mem0 pattern (15 lines vs 95)."""
"""Chat with memory - native Mem0 pattern with detailed timing."""
import time
try:
# Retrieve relevant memories using direct Mem0 search
total_start_time = time.time()
print(f"\n🚀 Starting chat request for user: {user_id}")
# Stage 1: Memory Search
search_start_time = time.time()
search_result = self.memory.search(query=message, user_id=user_id, agent_id=agent_id, run_id=run_id, limit=10, threshold=0.3)
relevant_memories = search_result.get("results", [])
memories_str = "\n".join(f"- {entry['memory']}" for entry in relevant_memories)
search_time = time.time() - search_start_time
print(f"🔍 Memory search took: {search_time:.2f}s (found {len(relevant_memories)} memories)")
# Generate Assistant response using Mem0's standard pattern
# Stage 2: Prepare LLM messages
prep_start_time = time.time()
system_prompt = f"You are a helpful AI. Answer the question based on query and memories.\nUser Memories:\n{memories_str}"
messages = [{"role": "system", "content": system_prompt}, {"role": "user", "content": message}]
messages = [{"role": "system", "content": system_prompt}]
# Add conversation context if provided (last 50 messages)
if context:
messages.extend(context)
print(f"📝 Added {len(context)} context messages")
# Add current user message
messages.append({"role": "user", "content": message})
prep_time = time.time() - prep_start_time
print(f"📋 Message preparation took: {prep_time:.3f}s")
# Stage 3: LLM Call
llm_start_time = time.time()
response = self.openai_client.chat.completions.create(model=settings.default_model, messages=messages)
assistant_response = response.choices[0].message.content
llm_time = time.time() - llm_start_time
print(f"🤖 LLM call took: {llm_time:.2f}s (model: {settings.default_model})")
# Create new memories from the conversation
messages.append({"role": "assistant", "content": assistant_response})
self.memory.add(messages, user_id=user_id)
# Stage 4: Memory Add
add_start_time = time.time()
memory_messages = [{"role": "user", "content": message}, {"role": "assistant", "content": assistant_response}]
self.memory.add(memory_messages, user_id=user_id)
add_time = time.time() - add_start_time
print(f"💾 Memory add took: {add_time:.2f}s")
# Total timing summary
total_time = time.time() - total_start_time
print(f"⏱️ TOTAL: {total_time:.2f}s | Search: {search_time:.2f}s | LLM: {llm_time:.2f}s | Add: {add_time:.2f}s | Prep: {prep_time:.3f}s")
print(f"📊 Breakdown: Search {(search_time/total_time)*100:.1f}% | LLM {(llm_time/total_time)*100:.1f}% | Add {(add_time/total_time)*100:.1f}%\n")
return {
"response": assistant_response,
"memories_used": len(relevant_memories),
"model_used": settings.default_model
"model_used": settings.default_model,
"timing": {
"total": round(total_time, 2),
"search": round(search_time, 2),
"llm": round(llm_time, 2),
"add": round(add_time, 2)
}
}
except Exception as e:

View file

@ -25,7 +25,7 @@ CREATE TABLE IF NOT EXISTS embeddings (
id SERIAL PRIMARY KEY,
user_id VARCHAR(255),
content TEXT,
embedding VECTOR(4096), -- OpenAI embedding dimension
embedding VECTOR(2560), -- OpenAI embedding dimension
metadata JSONB,
created_at TIMESTAMP DEFAULT NOW()
);

60
frontend/README.md Normal file
View file

@ -0,0 +1,60 @@
# Mem0 Chat Frontend
A simple, clean frontend for chatting with your Mem0 memories.
## Features
- **Chat Interface**: Send messages and get AI responses with memory context
- **Memory Sidebar**: View all your memories with timestamps
- **Memory Management**: Delete individual memories with confirmation
- **Persistent Chat**: Chat history saved in localStorage (survives page refresh)
- **Real-time Updates**: Memories automatically refresh after each chat
- **Simple & Clean**: No fancy animations, just working functionality
## Setup
1. **Make sure the backend is running** on `http://localhost:8000`
2. **Open the frontend** by simply opening `index.html` in your browser:
```bash
open frontend/index.html
```
Or serve it with a simple HTTP server:
```bash
cd frontend
python3 -m http.server 8080
# Then open http://localhost:8080
```
## Usage
1. **Start chatting** - Type messages in the input field and press Enter or click Send
2. **View memories** - All extracted memories appear in the right sidebar
3. **Delete memories** - Click the "Delete" button on any memory (with confirmation)
4. **Refresh memories** - Click the 🔄 button to manually refresh the memories list
5. **Persistent history** - Your chat history is saved and will reload when you refresh the page
## Configuration
- **User ID**: Hardcoded as "pratik" in the JavaScript
- **Backend URL**: `http://localhost:8000` (change in the JavaScript if needed)
- **Memory Limit**: Loads up to 50 memories
## Debugging
Open browser console and use:
- `clearChatHistory()` - Clear all stored chat history
## Files
- `index.html` - Complete frontend (HTML + CSS + JavaScript all in one file)
- `README.md` - This file
## API Integration
The frontend uses these backend endpoints:
- `POST /chat` - Send messages and get responses
- `GET /memories/pratik` - Load user memories
- `DELETE /memories/{id}` - Delete specific memory
Simple, functional, and clean - exactly as requested!

545
frontend/graph.html Normal file
View file

@ -0,0 +1,545 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Memory Graph Visualization</title>
<script src="https://d3js.org/d3.v7.min.js"></script>
<style>
* {
margin: 0;
padding: 0;
box-sizing: border-box;
}
body {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
background-color: #f5f5f5;
height: 100vh;
overflow: hidden;
}
.container {
width: 100%;
height: 100vh;
display: flex;
flex-direction: column;
}
.header {
background: white;
padding: 15px 20px;
border-bottom: 1px solid #e0e0e0;
display: flex;
justify-content: space-between;
align-items: center;
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
}
.header h1 {
font-size: 24px;
color: #333;
margin: 0;
}
.stats {
display: flex;
gap: 20px;
font-size: 14px;
color: #666;
}
.stat-item {
background: #f8f9fa;
padding: 5px 10px;
border-radius: 4px;
border: 1px solid #e9ecef;
}
.controls {
display: flex;
gap: 10px;
align-items: center;
}
.control-btn {
background: #007bff;
color: white;
border: none;
padding: 8px 16px;
border-radius: 4px;
cursor: pointer;
font-size: 12px;
font-weight: 500;
}
.control-btn:hover {
background: #0056b3;
}
.control-btn:disabled {
background: #6c757d;
cursor: not-allowed;
}
.graph-container {
flex: 1;
background: white;
position: relative;
overflow: hidden;
}
.graph-svg {
width: 100%;
height: 100%;
cursor: grab;
}
.graph-svg:active {
cursor: grabbing;
}
.node {
stroke: #333;
stroke-width: 1.5px;
cursor: pointer;
}
.node:hover {
stroke-width: 3px;
stroke: #007bff;
}
.link {
stroke: #999;
stroke-opacity: 0.6;
stroke-width: 1.5px;
}
.link:hover {
stroke: #007bff;
stroke-opacity: 1;
stroke-width: 3px;
}
.node-label {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
font-size: 11px;
fill: #333;
text-anchor: middle;
pointer-events: none;
user-select: none;
}
.relationship-label {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
font-size: 9px;
fill: #666;
text-anchor: middle;
pointer-events: none;
user-select: none;
opacity: 0;
transition: opacity 0.3s;
}
.status-indicator {
position: absolute;
top: 20px;
right: 20px;
background: #28a745;
color: white;
padding: 5px 10px;
border-radius: 4px;
font-size: 12px;
font-weight: 500;
}
.status-indicator.loading {
background: #ffc107;
color: #333;
}
.status-indicator.error {
background: #dc3545;
}
.loading-overlay {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
background: rgba(255, 255, 255, 0.8);
display: flex;
justify-content: center;
align-items: center;
font-size: 18px;
color: #666;
z-index: 1000;
}
.tooltip {
position: absolute;
background: rgba(0, 0, 0, 0.8);
color: white;
padding: 8px 12px;
border-radius: 4px;
font-size: 12px;
pointer-events: none;
z-index: 1001;
opacity: 0;
transition: opacity 0.3s;
}
</style>
</head>
<body>
<div class="container">
<div class="header">
<h1>Memory Graph Visualization</h1>
<div class="stats">
<div class="stat-item">
Memories: <span id="totalMemories">0</span>
</div>
<div class="stat-item">
Relationships: <span id="totalRelationships">0</span>
</div>
<div class="stat-item">
Entities: <span id="totalEntities">0</span>
</div>
</div>
<div class="controls">
<button class="control-btn" id="pauseBtn">Pause</button>
<button class="control-btn" id="refreshBtn">Refresh</button>
<button class="control-btn" id="resetBtn">Reset View</button>
</div>
</div>
<div class="graph-container">
<div class="loading-overlay" id="loadingOverlay">
Loading graph data...
</div>
<div class="status-indicator" id="statusIndicator">Connecting...</div>
<div class="tooltip" id="tooltip"></div>
<svg class="graph-svg" id="graphSvg"></svg>
</div>
</div>
<script>
// Configuration
const API_BASE = 'http://localhost:8000';
const USER_ID = 'pratik';
const POLL_INTERVAL = 30000; // 30 seconds
// Global variables
let simulation;
let svg, g;
let nodes = [];
let links = [];
let isPolling = true;
let pollTimer;
let currentData = null;
// DOM elements
const loadingOverlay = document.getElementById('loadingOverlay');
const statusIndicator = document.getElementById('statusIndicator');
const totalMemories = document.getElementById('totalMemories');
const totalRelationships = document.getElementById('totalRelationships');
const totalEntities = document.getElementById('totalEntities');
const pauseBtn = document.getElementById('pauseBtn');
const refreshBtn = document.getElementById('refreshBtn');
const resetBtn = document.getElementById('resetBtn');
const tooltip = document.getElementById('tooltip');
// Initialize on page load
document.addEventListener('DOMContentLoaded', function() {
initializeGraph();
setupEventListeners();
loadGraphData();
startPolling();
});
function setupEventListeners() {
pauseBtn.addEventListener('click', togglePolling);
refreshBtn.addEventListener('click', loadGraphData);
resetBtn.addEventListener('click', resetView);
}
function initializeGraph() {
const container = document.querySelector('.graph-container');
const width = container.clientWidth;
const height = container.clientHeight;
svg = d3.select('#graphSvg')
.attr('width', width)
.attr('height', height);
// Create main group for zoom/pan
g = svg.append('g');
// Setup zoom behavior
const zoom = d3.zoom()
.scaleExtent([0.1, 10])
.on('zoom', (event) => {
g.attr('transform', event.transform);
});
svg.call(zoom);
// Initialize force simulation
simulation = d3.forceSimulation()
.force('link', d3.forceLink().id(d => d.id).distance(80))
.force('charge', d3.forceManyBody().strength(-300))
.force('center', d3.forceCenter(width / 2, height / 2))
.force('collision', d3.forceCollide().radius(20));
}
async function loadGraphData() {
try {
showStatus('Loading...', 'loading');
const response = await fetch(`${API_BASE}/graph/relationships/${USER_ID}`);
if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
const data = await response.json();
currentData = data;
// Update stats display
totalMemories.textContent = data.total_memories || 0;
totalRelationships.textContent = data.total_relationships || 0;
totalEntities.textContent = data.entities ? data.entities.length : 0;
// Process data for D3
processDataAndUpdateGraph(data);
showStatus('Connected', 'success');
hideLoading();
} catch (error) {
console.error('Error loading graph data:', error);
showStatus('Error: ' + error.message, 'error');
hideLoading();
}
}
function processDataAndUpdateGraph(data) {
// Extract unique entities from relationships and entities array
const entitySet = new Set();
// Add entities from the entities array
if (data.entities) {
data.entities.forEach(entity => {
entitySet.add(entity.name);
});
}
// Add entities from relationships (in case some are missing from entities array)
if (data.relationships) {
data.relationships.forEach(rel => {
entitySet.add(rel.source);
entitySet.add(rel.target);
});
}
// Create nodes array
nodes = Array.from(entitySet).map(name => ({
id: name,
name: name
}));
// Create links array from relationships
links = data.relationships ? data.relationships.map(rel => ({
source: rel.source,
target: rel.target,
relationship: rel.relationship
})) : [];
updateGraph();
}
function updateGraph() {
// Clear existing elements
g.selectAll('.link').remove();
g.selectAll('.node').remove();
g.selectAll('.node-label').remove();
// Create links
const link = g.selectAll('.link')
.data(links)
.enter().append('line')
.attr('class', 'link')
.on('mouseover', showLinkTooltip)
.on('mouseout', hideTooltip);
// Create nodes
const node = g.selectAll('.node')
.data(nodes)
.enter().append('circle')
.attr('class', 'node')
.attr('r', 8)
.attr('fill', '#69b3ff')
.on('mouseover', showNodeTooltip)
.on('mouseout', hideTooltip)
.call(d3.drag()
.on('start', dragStarted)
.on('drag', dragged)
.on('end', dragEnded));
// Create node labels
const label = g.selectAll('.node-label')
.data(nodes)
.enter().append('text')
.attr('class', 'node-label')
.text(d => d.name)
.attr('dy', -12);
// Update simulation
simulation.nodes(nodes);
simulation.force('link').links(links);
simulation.alpha(1).restart();
// Update positions on each tick
simulation.on('tick', () => {
link
.attr('x1', d => d.source.x)
.attr('y1', d => d.source.y)
.attr('x2', d => d.target.x)
.attr('y2', d => d.target.y);
node
.attr('cx', d => d.x)
.attr('cy', d => d.y);
label
.attr('x', d => d.x)
.attr('y', d => d.y);
});
}
function showNodeTooltip(event, d) {
const connections = links.filter(link =>
link.source.id === d.id || link.target.id === d.id
).length;
tooltip.innerHTML = `
<strong>${d.name}</strong><br>
Connections: ${connections}
`;
tooltip.style.opacity = 1;
tooltip.style.left = (event.pageX + 10) + 'px';
tooltip.style.top = (event.pageY - 10) + 'px';
}
function showLinkTooltip(event, d) {
tooltip.innerHTML = `
<strong>${d.source.name}</strong><br>
<em>${d.relationship}</em><br>
<strong>${d.target.name}</strong>
`;
tooltip.style.opacity = 1;
tooltip.style.left = (event.pageX + 10) + 'px';
tooltip.style.top = (event.pageY - 10) + 'px';
}
function hideTooltip() {
tooltip.style.opacity = 0;
}
function dragStarted(event, d) {
if (!event.active) simulation.alphaTarget(0.3).restart();
d.fx = d.x;
d.fy = d.y;
}
function dragged(event, d) {
d.fx = event.x;
d.fy = event.y;
}
function dragEnded(event, d) {
if (!event.active) simulation.alphaTarget(0);
d.fx = null;
d.fy = null;
}
function startPolling() {
if (pollTimer) clearInterval(pollTimer);
pollTimer = setInterval(() => {
if (isPolling) {
loadGraphData();
}
}, POLL_INTERVAL);
}
function togglePolling() {
isPolling = !isPolling;
pauseBtn.textContent = isPolling ? 'Pause' : 'Resume';
if (isPolling) {
startPolling();
showStatus('Connected', 'success');
} else {
clearInterval(pollTimer);
showStatus('Paused', 'loading');
}
}
function resetView() {
const container = document.querySelector('.graph-container');
const width = container.clientWidth;
const height = container.clientHeight;
svg.transition()
.duration(750)
.call(
d3.zoom().transform,
d3.zoomIdentity.translate(0, 0).scale(1)
);
// Restart simulation to recenter
if (simulation) {
simulation.force('center', d3.forceCenter(width / 2, height / 2));
simulation.alpha(1).restart();
}
}
function showStatus(message, type) {
statusIndicator.textContent = message;
statusIndicator.className = 'status-indicator';
if (type) {
statusIndicator.classList.add(type);
}
}
function hideLoading() {
loadingOverlay.style.display = 'none';
}
// Handle window resize
window.addEventListener('resize', () => {
const container = document.querySelector('.graph-container');
const width = container.clientWidth;
const height = container.clientHeight;
svg.attr('width', width).attr('height', height);
if (simulation) {
simulation.force('center', d3.forceCenter(width / 2, height / 2));
simulation.alpha(1).restart();
}
});
// Handle page visibility change (pause polling when tab is hidden)
document.addEventListener('visibilitychange', () => {
if (document.hidden && isPolling) {
clearInterval(pollTimer);
} else if (!document.hidden && isPolling) {
startPolling();
}
});
</script>
</body>
</html>

508
frontend/index.html Normal file
View file

@ -0,0 +1,508 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Mem0 Chat Interface</title>
<style>
* {
margin: 0;
padding: 0;
box-sizing: border-box;
}
body {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
background-color: #f5f5f5;
height: 100vh;
display: flex;
}
.container {
display: flex;
width: 100%;
height: 100vh;
}
/* Chat Section */
.chat-section {
flex: 1;
display: flex;
flex-direction: column;
background: white;
border-right: 1px solid #e0e0e0;
}
.chat-header {
padding: 20px;
background: #fafafa;
border-bottom: 1px solid #e0e0e0;
text-align: center;
}
.chat-header h1 {
font-size: 24px;
color: #333;
margin-bottom: 5px;
}
.chat-header p {
color: #666;
font-size: 14px;
}
.chat-messages {
flex: 1;
overflow-y: auto;
padding: 20px;
display: flex;
flex-direction: column;
gap: 15px;
}
.message {
max-width: 80%;
padding: 12px 16px;
border-radius: 18px;
word-wrap: break-word;
}
.message.user {
align-self: flex-end;
background: #007bff;
color: white;
}
.message.assistant {
align-self: flex-start;
background: #f1f1f1;
color: #333;
border: 1px solid #e0e0e0;
}
.message-metadata {
font-size: 11px;
color: #888;
margin-top: 6px;
font-style: italic;
}
.chat-input {
padding: 20px;
background: white;
border-top: 1px solid #e0e0e0;
display: flex;
gap: 10px;
}
.chat-input input {
flex: 1;
padding: 12px 16px;
border: 1px solid #ddd;
border-radius: 20px;
font-size: 14px;
outline: none;
}
.chat-input input:focus {
border-color: #007bff;
}
.chat-input button {
padding: 12px 20px;
background: #007bff;
color: white;
border: none;
border-radius: 20px;
cursor: pointer;
font-size: 14px;
font-weight: 500;
}
.chat-input button:hover {
background: #0056b3;
}
.chat-input button:disabled {
background: #ccc;
cursor: not-allowed;
}
/* Memories Section */
.memories-section {
width: 350px;
background: white;
display: flex;
flex-direction: column;
}
.memories-header {
padding: 20px;
background: #fafafa;
border-bottom: 1px solid #e0e0e0;
display: flex;
justify-content: space-between;
align-items: center;
}
.memories-header h2 {
font-size: 18px;
color: #333;
}
.refresh-btn {
background: none;
border: none;
cursor: pointer;
font-size: 16px;
color: #666;
padding: 5px;
border-radius: 4px;
}
.refresh-btn:hover {
background: #f0f0f0;
}
.memories-list {
flex: 1;
overflow-y: auto;
padding: 10px;
}
.memory-item {
background: #f9f9f9;
border: 1px solid #e0e0e0;
border-radius: 8px;
padding: 12px;
margin-bottom: 10px;
position: relative;
}
.memory-content {
font-size: 14px;
color: #333;
margin-bottom: 8px;
line-height: 1.4;
}
.memory-meta {
display: flex;
justify-content: space-between;
align-items: center;
font-size: 12px;
color: #666;
}
.memory-timestamp {
font-size: 11px;
}
.delete-btn {
background: #ff4757;
color: white;
border: none;
border-radius: 4px;
padding: 4px 8px;
cursor: pointer;
font-size: 11px;
}
.delete-btn:hover {
background: #ff3742;
}
.loading {
text-align: center;
padding: 20px;
color: #666;
font-style: italic;
}
.error {
background: #ffe6e6;
border: 1px solid #ffcccc;
color: #cc0000;
padding: 10px;
border-radius: 4px;
margin: 10px;
font-size: 14px;
}
/* Responsive */
@media (max-width: 768px) {
.memories-section {
width: 280px;
}
}
</style>
</head>
<body>
<div class="container">
<!-- Chat Section -->
<div class="chat-section">
<div class="chat-header">
<h1>What can I help you with?</h1>
<p>Chat with your memories - User: pratik</p>
</div>
<div class="chat-messages" id="chatMessages">
<!-- Messages will be loaded here -->
</div>
<div class="chat-input">
<input type="text" id="messageInput" placeholder="Type a message..." maxlength="1000">
<button id="sendButton">Send</button>
</div>
</div>
<!-- Memories Section -->
<div class="memories-section">
<div class="memories-header">
<h2>Your Memories (<span id="memoryCount">0</span>)</h2>
<button class="refresh-btn" id="refreshMemories" title="Refresh memories">🔄</button>
</div>
<div class="memories-list" id="memoriesList">
<div class="loading">Loading memories...</div>
</div>
</div>
</div>
<script>
// Configuration
const API_BASE = 'http://localhost:8000';
const USER_ID = 'pratik';
// DOM Elements
const chatMessages = document.getElementById('chatMessages');
const messageInput = document.getElementById('messageInput');
const sendButton = document.getElementById('sendButton');
const memoriesList = document.getElementById('memoriesList');
const memoryCount = document.getElementById('memoryCount');
const refreshButton = document.getElementById('refreshMemories');
// Chat history in localStorage
let chatHistory = JSON.parse(localStorage.getItem('chatHistory') || '[]');
// Initialize
document.addEventListener('DOMContentLoaded', function() {
loadChatHistory();
loadMemories();
// Event listeners
sendButton.addEventListener('click', sendMessage);
messageInput.addEventListener('keypress', function(e) {
if (e.key === 'Enter') sendMessage();
});
refreshButton.addEventListener('click', loadMemories);
});
// Load chat history from localStorage
function loadChatHistory() {
chatMessages.innerHTML = '';
chatHistory.forEach(item => {
displayMessage(item.message, item.isUser);
});
scrollToBottom();
}
// Save message to localStorage
function saveMessage(message, isUser) {
chatHistory.push({
message,
isUser,
timestamp: Date.now()
});
localStorage.setItem('chatHistory', JSON.stringify(chatHistory));
}
// Display message in chat
function displayMessage(message, isUser, metadata = null) {
const messageDiv = document.createElement('div');
messageDiv.className = `message ${isUser ? 'user' : 'assistant'}`;
messageDiv.textContent = message;
// Add metadata for assistant messages
if (!isUser && metadata) {
const metadataDiv = document.createElement('div');
metadataDiv.className = 'message-metadata';
metadataDiv.textContent = `📊 ${metadata.memories_used || 0} memories used • ${metadata.model_used || 'unknown model'}`;
messageDiv.appendChild(metadataDiv);
}
chatMessages.appendChild(messageDiv);
scrollToBottom();
}
// Scroll to bottom of chat
function scrollToBottom() {
chatMessages.scrollTop = chatMessages.scrollHeight;
}
// Send message
async function sendMessage() {
const message = messageInput.value.trim();
if (!message) return;
// Disable input
sendButton.disabled = true;
messageInput.disabled = true;
// Display user message
displayMessage(message, true);
saveMessage(message, true);
messageInput.value = '';
try {
// Get last 50 messages from localStorage as context
const context = chatHistory.slice(-50).map(item => ({
role: item.isUser ? "user" : "assistant",
content: item.message
}));
// Send to backend with context
const response = await fetch(`${API_BASE}/chat`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
message: message,
user_id: USER_ID,
context: context
})
});
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const data = await response.json();
// Display assistant response with metadata
if (data.response) {
displayMessage(data.response, false, {
memories_used: data.memories_used,
model_used: data.model_used
});
saveMessage(data.response, false);
}
// Refresh memories after chat
setTimeout(() => loadMemories(), 500);
} catch (error) {
console.error('Error sending message:', error);
displayMessage('Sorry, there was an error processing your message. Please try again.', false);
showError('Failed to send message: ' + error.message);
} finally {
// Re-enable input
sendButton.disabled = false;
messageInput.disabled = false;
messageInput.focus();
}
}
// Load memories from backend
async function loadMemories() {
try {
const response = await fetch(`${API_BASE}/memories/${USER_ID}?limit=50`);
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const memories = await response.json();
displayMemories(memories);
} catch (error) {
console.error('Error loading memories:', error);
memoriesList.innerHTML = `<div class="error">Failed to load memories: ${error.message}</div>`;
}
}
// Display memories in sidebar
function displayMemories(memories) {
memoryCount.textContent = memories.length;
if (memories.length === 0) {
memoriesList.innerHTML = '<div class="loading">No memories yet. Start chatting to create some!</div>';
return;
}
memoriesList.innerHTML = '';
memories.forEach(memory => {
const memoryDiv = document.createElement('div');
memoryDiv.className = 'memory-item';
const content = memory.memory || memory.content || 'No content';
const timestamp = memory.created_at ? new Date(memory.created_at).toLocaleString() : 'Unknown time';
memoryDiv.innerHTML = `
<div class="memory-content">${content}</div>
<div class="memory-meta">
<span class="memory-timestamp">${timestamp}</span>
<button class="delete-btn" onclick="deleteMemory('${memory.id}')">Delete</button>
</div>
`;
memoriesList.appendChild(memoryDiv);
});
}
// Delete memory
async function deleteMemory(memoryId) {
if (!confirm('Are you sure you want to delete this memory?')) {
return;
}
try {
const response = await fetch(`${API_BASE}/memories/${memoryId}`, {
method: 'DELETE'
});
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
// Refresh memories list
loadMemories();
} catch (error) {
console.error('Error deleting memory:', error);
showError('Failed to delete memory: ' + error.message);
}
}
// Show error message
function showError(message) {
const errorDiv = document.createElement('div');
errorDiv.className = 'error';
errorDiv.textContent = message;
// Insert at top of memories list
memoriesList.insertBefore(errorDiv, memoriesList.firstChild);
// Remove after 5 seconds
setTimeout(() => {
if (errorDiv.parentNode) {
errorDiv.parentNode.removeChild(errorDiv);
}
}, 5000);
}
// Clear chat history (for debugging)
function clearChatHistory() {
chatHistory = [];
localStorage.removeItem('chatHistory');
chatMessages.innerHTML = '';
console.log('Chat history cleared');
}
// Make clearChatHistory available globally for debugging
window.clearChatHistory = clearChatHistory;
</script>
</body>
</html>