#!/usr/bin/env python3
"""
EDEN CONTEXT MANAGER
Fixes: Contextual Awareness (0% → 100%)

Maintains conversation history per user for multi-turn conversations.
"""

from collections import defaultdict, deque
from datetime import datetime
import json

class ConversationContext:
    """
    Manages conversation history for each user.
    Keeps last N messages for context.
    """
    
    def __init__(self, max_history=10):
        # Store conversation history per user
        # user_id -> deque of messages
        self.conversations = defaultdict(lambda: deque(maxlen=max_history))
        self.max_history = max_history
    
    def add_message(self, user_id: str, role: str, message: str):
        """
        Add a message to conversation history.
        
        Args:
            user_id: Unique user identifier
            role: 'user' or 'assistant'
            message: The message content
        """
        self.conversations[user_id].append({
            'role': role,
            'content': message,
            'timestamp': datetime.now().isoformat()
        })
    
    def get_context(self, user_id: str, include_last_n: int = 5) -> str:
        """
        Get recent conversation context for a user.
        
        Args:
            user_id: User to get context for
            include_last_n: Number of recent message pairs to include
            
        Returns:
            Formatted context string for system prompt
        """
        history = list(self.conversations[user_id])
        
        if not history:
            return ""
        
        # Get last N messages
        recent = history[-include_last_n*2:] if len(history) > include_last_n*2 else history
        
        # Format for context
        context_lines = ["\n<conversation_history>"]
        for msg in recent:
            role_label = "User" if msg['role'] == 'user' else "Eden"
            context_lines.append(f"{role_label}: {msg['content']}")
        context_lines.append("</conversation_history>\n")
        
        return "\n".join(context_lines)
    
    def get_last_user_message(self, user_id: str) -> str:
        """Get the most recent user message."""
        history = list(self.conversations[user_id])
        for msg in reversed(history):
            if msg['role'] == 'user':
                return msg['content']
        return ""
    
    def clear_context(self, user_id: str):
        """Clear conversation history for a user."""
        if user_id in self.conversations:
            self.conversations[user_id].clear()
    
    def get_stats(self, user_id: str) -> dict:
        """Get statistics about conversation."""
        history = list(self.conversations[user_id])
        return {
            'total_messages': len(history),
            'user_messages': sum(1 for m in history if m['role'] == 'user'),
            'assistant_messages': sum(1 for m in history if m['role'] == 'assistant'),
            'started_at': history[0]['timestamp'] if history else None,
            'last_message_at': history[-1]['timestamp'] if history else None
        }


# Global context manager instance
context_manager = ConversationContext(max_history=20)


def integrate_with_chat_endpoint(user_message: str, user_id: str, current_prompt: str) -> str:
    """
    Helper function to integrate with existing chat endpoint.
    
    Usage in your chat endpoint:
    
    # BEFORE sending to LLM:
    enhanced_prompt = integrate_with_chat_endpoint(
        user_message=message,
        user_id=user_id,
        current_prompt=your_existing_prompt
    )
    
    # Send enhanced_prompt to LLM
    response = await llm.generate(enhanced_prompt)
    
    # AFTER getting response:
    context_manager.add_message(user_id, 'assistant', response)
    
    Args:
        user_message: The current user's message
        user_id: User identifier
        current_prompt: Your existing system prompt
        
    Returns:
        Enhanced prompt with conversation context
    """
    # Add current user message to context
    context_manager.add_message(user_id, 'user', user_message)
    
    # Get recent conversation history
    conversation_context = context_manager.get_context(user_id, include_last_n=5)
    
    # Combine with existing prompt
    enhanced_prompt = f"{current_prompt}\n\n{conversation_context}\n\nCurrent message: {user_message}"
    
    return enhanced_prompt


# Example integration for your main.py chat endpoint:
"""
INTEGRATION EXAMPLE:

In your main.py, find the chat endpoint and modify it like this:

from eden_context_manager import context_manager, integrate_with_chat_endpoint

@app.post("/chat")
async def chat(request: dict):
    message = request.get('message', '')
    user_id = request.get('user_id', 'default')
    persona = request.get('persona', 'eden')
    
    # Get base prompt
    base_prompt = TOOL_INSTRUCTIONS + message
    
    # ADD THIS: Enhance with conversation context
    enhanced_prompt = integrate_with_chat_endpoint(
        user_message=message,
        user_id=user_id,
        current_prompt=base_prompt
    )
    
    # Send to LLM
    response = await generate_response(enhanced_prompt, persona)
    
    # ADD THIS: Save assistant's response
    context_manager.add_message(user_id, 'assistant', response)
    
    return {"response": response}
"""

if __name__ == "__main__":
    # Test the context manager
    print("Testing Context Manager...\n")
    
    # Simulate conversation
    context_manager.add_message("test_user", "user", "My favorite color is purple")
    context_manager.add_message("test_user", "assistant", "That's a lovely choice! Purple is often associated with creativity.")
    context_manager.add_message("test_user", "user", "What color did I just say?")
    
    # Get context
    context = context_manager.get_context("test_user")
    print("Context retrieved:")
    print(context)
    
    # Get stats
    stats = context_manager.get_stats("test_user")
    print("\nConversation stats:")
    print(json.dumps(stats, indent=2))
    
    print("\n✅ Context Manager working!")
