208 lines
No EOL
9.6 KiB
Python
208 lines
No EOL
9.6 KiB
Python
"""Ultra-minimal Pydantic models for pure Mem0 API."""
|
|
|
|
from typing import List, Optional, Dict, Any
|
|
from pydantic import BaseModel, Field
|
|
|
|
|
|
# Request Models
|
|
class ChatMessage(BaseModel):
|
|
"""Chat message structure."""
|
|
role: str = Field(..., description="Message role (user, assistant, system)")
|
|
content: str = Field(..., description="Message content")
|
|
|
|
|
|
class ChatRequest(BaseModel):
|
|
"""Ultra-minimal chat request."""
|
|
message: str = Field(..., description="User message")
|
|
user_id: Optional[str] = Field("default", description="User identifier")
|
|
agent_id: Optional[str] = Field(None, description="Agent identifier")
|
|
run_id: Optional[str] = Field(None, description="Run identifier")
|
|
context: Optional[List[ChatMessage]] = Field(None, description="Previous conversation context")
|
|
metadata: Optional[Dict[str, Any]] = Field(None, description="Additional metadata")
|
|
|
|
|
|
class MemoryAddRequest(BaseModel):
|
|
"""Request to add memories with hierarchy support - open-source compatible."""
|
|
messages: List[ChatMessage] = Field(..., description="Messages to process")
|
|
user_id: Optional[str] = Field("default", description="User identifier")
|
|
agent_id: Optional[str] = Field(None, description="Agent identifier")
|
|
run_id: Optional[str] = Field(None, description="Run identifier")
|
|
metadata: Optional[Dict[str, Any]] = Field(None, description="Additional metadata")
|
|
|
|
|
|
class MemorySearchRequest(BaseModel):
|
|
"""Request to search memories with hierarchy filtering."""
|
|
query: str = Field(..., description="Search query")
|
|
user_id: Optional[str] = Field("default", description="User identifier")
|
|
agent_id: Optional[str] = Field(None, description="Agent identifier")
|
|
run_id: Optional[str] = Field(None, description="Run identifier")
|
|
limit: int = Field(5, description="Maximum number of results")
|
|
threshold: Optional[float] = Field(None, description="Minimum relevance score")
|
|
filters: Optional[Dict[str, Any]] = Field(None, description="Additional filters")
|
|
|
|
# Hierarchy filters (open-source compatible)
|
|
agent_id: Optional[str] = Field(None, description="Filter by agent identifier")
|
|
run_id: Optional[str] = Field(None, description="Filter by run identifier")
|
|
|
|
|
|
class MemoryUpdateRequest(BaseModel):
|
|
"""Request to update a memory."""
|
|
memory_id: str = Field(..., description="Memory ID to update")
|
|
content: str = Field(..., description="New memory content")
|
|
metadata: Optional[Dict[str, Any]] = Field(None, description="Updated metadata")
|
|
|
|
|
|
# Response Models - Ultra-minimal
|
|
|
|
|
|
class MemoryItem(BaseModel):
|
|
"""Individual memory item."""
|
|
id: str = Field(..., description="Memory unique identifier")
|
|
memory: str = Field(..., description="Memory content")
|
|
user_id: Optional[str] = Field(None, description="Associated user ID")
|
|
agent_id: Optional[str] = Field(None, description="Associated agent ID")
|
|
run_id: Optional[str] = Field(None, description="Associated run ID")
|
|
metadata: Optional[Dict[str, Any]] = Field(None, description="Memory metadata")
|
|
score: Optional[float] = Field(None, description="Relevance score (for search results)")
|
|
created_at: Optional[str] = Field(None, description="Creation timestamp")
|
|
updated_at: Optional[str] = Field(None, description="Last update timestamp")
|
|
|
|
|
|
class MemorySearchResponse(BaseModel):
|
|
"""Memory search results - pure Mem0 structure."""
|
|
memories: List[MemoryItem] = Field(..., description="Found memories")
|
|
total_count: int = Field(..., description="Total number of memories found")
|
|
query: str = Field(..., description="Original search query")
|
|
|
|
|
|
class MemoryAddResponse(BaseModel):
|
|
"""Response from adding memories - pure Mem0 structure."""
|
|
added_memories: List[Dict[str, Any]] = Field(..., description="Memories that were added")
|
|
message: str = Field(..., description="Success message")
|
|
|
|
|
|
class GraphRelationship(BaseModel):
|
|
"""Graph relationship structure."""
|
|
source: str = Field(..., description="Source entity")
|
|
relationship: str = Field(..., description="Relationship type")
|
|
target: str = Field(..., description="Target entity")
|
|
properties: Optional[Dict[str, Any]] = Field(None, description="Relationship properties")
|
|
|
|
|
|
class GraphResponse(BaseModel):
|
|
"""Graph relationships - pure Mem0 structure."""
|
|
relationships: List[GraphRelationship] = Field(..., description="Found relationships")
|
|
entities: List[str] = Field(..., description="Unique entities")
|
|
user_id: str = Field(..., description="User identifier")
|
|
|
|
|
|
class HealthResponse(BaseModel):
|
|
"""Health check response."""
|
|
status: str = Field(..., description="Service status")
|
|
services: Dict[str, str] = Field(..., description="Individual service statuses")
|
|
timestamp: str = Field(..., description="Health check timestamp")
|
|
|
|
|
|
class ErrorResponse(BaseModel):
|
|
"""Error response structure."""
|
|
error: str = Field(..., description="Error message")
|
|
detail: Optional[str] = Field(None, description="Detailed error information")
|
|
status_code: int = Field(..., description="HTTP status code")
|
|
|
|
|
|
# Statistics and Monitoring Models
|
|
|
|
class MemoryOperationStats(BaseModel):
|
|
"""Memory operation statistics."""
|
|
add: int = Field(..., description="Number of add operations")
|
|
search: int = Field(..., description="Number of search operations")
|
|
update: int = Field(..., description="Number of update operations")
|
|
delete: int = Field(..., description="Number of delete operations")
|
|
|
|
|
|
class GlobalStatsResponse(BaseModel):
|
|
"""Global application statistics."""
|
|
total_memories: int = Field(..., description="Total memories across all users")
|
|
total_users: int = Field(..., description="Total number of users")
|
|
api_calls_today: int = Field(..., description="Total API calls today")
|
|
avg_response_time_ms: float = Field(..., description="Average response time in milliseconds")
|
|
memory_operations: MemoryOperationStats = Field(..., description="Memory operation breakdown")
|
|
uptime_seconds: float = Field(..., description="Application uptime in seconds")
|
|
|
|
|
|
class UserStatsResponse(BaseModel):
|
|
"""User-specific statistics."""
|
|
user_id: str = Field(..., description="User identifier")
|
|
memory_count: int = Field(..., description="Number of memories for this user")
|
|
relationship_count: int = Field(..., description="Number of graph relationships for this user")
|
|
last_activity: Optional[str] = Field(None, description="Last activity timestamp")
|
|
api_calls_today: int = Field(..., description="API calls made by this user today")
|
|
avg_response_time_ms: float = Field(..., description="Average response time for this user's requests")
|
|
|
|
|
|
# OpenAI-Compatible API Models
|
|
|
|
class OpenAIMessage(BaseModel):
|
|
"""OpenAI message format."""
|
|
role: str = Field(..., description="Message role (system, user, assistant)")
|
|
content: str = Field(..., description="Message content")
|
|
|
|
|
|
class OpenAIChatCompletionRequest(BaseModel):
|
|
"""OpenAI chat completion request format."""
|
|
model: str = Field(..., description="Model to use (will use configured default)")
|
|
messages: List[Dict[str, str]] = Field(..., description="List of messages")
|
|
temperature: Optional[float] = Field(0.7, description="Sampling temperature")
|
|
max_tokens: Optional[int] = Field(None, description="Maximum tokens to generate")
|
|
stream: Optional[bool] = Field(False, description="Whether to stream responses")
|
|
top_p: Optional[float] = Field(1.0, description="Nucleus sampling parameter")
|
|
n: Optional[int] = Field(1, description="Number of completions to generate")
|
|
stop: Optional[List[str]] = Field(None, description="Stop sequences")
|
|
presence_penalty: Optional[float] = Field(0, description="Presence penalty")
|
|
frequency_penalty: Optional[float] = Field(0, description="Frequency penalty")
|
|
user: Optional[str] = Field(None, description="User identifier (ignored, uses API key)")
|
|
|
|
|
|
class OpenAIUsage(BaseModel):
|
|
"""Token usage information."""
|
|
prompt_tokens: int = Field(..., description="Tokens in the prompt")
|
|
completion_tokens: int = Field(..., description="Tokens in the completion")
|
|
total_tokens: int = Field(..., description="Total tokens used")
|
|
|
|
|
|
class OpenAIChoiceMessage(BaseModel):
|
|
"""Message in a choice."""
|
|
role: str = Field(..., description="Role of the message")
|
|
content: str = Field(..., description="Content of the message")
|
|
|
|
|
|
class OpenAIChoice(BaseModel):
|
|
"""Individual completion choice."""
|
|
index: int = Field(..., description="Choice index")
|
|
message: OpenAIChoiceMessage = Field(..., description="Message content")
|
|
finish_reason: str = Field(..., description="Reason for completion finish")
|
|
|
|
|
|
class OpenAIChatCompletionResponse(BaseModel):
|
|
"""OpenAI chat completion response format."""
|
|
id: str = Field(..., description="Unique completion ID")
|
|
object: str = Field(default="chat.completion", description="Object type")
|
|
created: int = Field(..., description="Unix timestamp of creation")
|
|
model: str = Field(..., description="Model used for completion")
|
|
choices: List[OpenAIChoice] = Field(..., description="List of completion choices")
|
|
usage: Optional[OpenAIUsage] = Field(None, description="Token usage information")
|
|
|
|
|
|
# Streaming-specific models
|
|
|
|
class OpenAIStreamDelta(BaseModel):
|
|
"""Delta content in a streaming chunk."""
|
|
role: Optional[str] = Field(None, description="Role (only in first chunk)")
|
|
content: Optional[str] = Field(None, description="Incremental content")
|
|
|
|
|
|
class OpenAIStreamChoice(BaseModel):
|
|
"""Individual streaming choice."""
|
|
index: int = Field(..., description="Choice index")
|
|
delta: OpenAIStreamDelta = Field(..., description="Delta content")
|
|
finish_reason: Optional[str] = Field(None, description="Reason for completion finish") |