knowledge-base/.env.example
Pratik Narola 7689409950 Initial commit: Production-ready Mem0 interface with monitoring
- Complete Mem0 OSS integration with hybrid datastore
- PostgreSQL + pgvector for vector storage
- Neo4j 5.18 for graph relationships
- Google Gemini embeddings integration
- Comprehensive monitoring with correlation IDs
- Real-time statistics and performance tracking
- Production-grade observability features
- Clean repository with no exposed secrets
2025-08-10 17:34:41 +05:30

39 lines
No EOL
1.5 KiB
Text

# API Configuration - UPDATE THESE WITH YOUR VALUES
OPENAI_API_KEY=sk-your-openai-api-key-here
OPENAI_COMPAT_API_KEY=sk-your-openai-compatible-api-key-here
OPENAI_BASE_URL=https://your-openai-compatible-endpoint.com/v1
EMBEDDER_API_KEY=AIzaSy-your-google-gemini-api-key-here
# Database Configuration
POSTGRES_DB=mem0_db
POSTGRES_USER=mem0_user
POSTGRES_PASSWORD=mem0_password
POSTGRES_HOST=postgres
POSTGRES_PORT=5432
# Neo4j Configuration
NEO4J_AUTH=neo4j/mem0_neo4j_password
NEO4J_URI=bolt://neo4j:7687
NEO4J_USERNAME=neo4j
NEO4J_PASSWORD=mem0_neo4j_password
# Application Configuration
BACKEND_PORT=8000
FRONTEND_PORT=3000
LOG_LEVEL=INFO
CORS_ORIGINS=http://localhost:3000
# Model Configuration - Intelligent Routing
# These models are automatically selected based on task complexity
DEFAULT_MODEL=claude-sonnet-4 # General purpose model
EXTRACTION_MODEL=claude-sonnet-4 # Memory extraction and processing
FAST_MODEL=o4-mini # Simple/quick responses
ANALYTICAL_MODEL=gemini-2.5-pro # Analysis and comparison tasks
REASONING_MODEL=claude-sonnet-4 # Complex reasoning tasks
EXPERT_MODEL=o3 # Expert-level comprehensive analysis
# IMPORTANT NOTES:
# - Ensure all models are available on your OpenAI-compatible endpoint
# - Verify model availability: curl -H "Authorization: Bearer $API_KEY" $BASE_URL/v1/models
# - Neo4j must be version 5.18+ for vector.similarity.cosine() function
# - Ollama must be running locally with nomic-embed-text:latest model