"""
EDEN MODEL FLEET - Route to specialist models by task type
Created: Jan 26, 2026

Eden has 16 trained models - use the RIGHT one for each task.
"""
from typing import Dict, Optional, Tuple
import re
import requests

# Eden's trained models
MODELS = {
    # Primary models
    "omega": "eden-coder-omega",           # Code/ASI/Technical (8.1GB)
    "phi": "eden-free",         # Emotional/Consciousness (9GB)
    "fluid": "eden-fluid-intelligence:14b", # General reasoning (9GB)
    
    # Specialist models (if available)
    "math": "eden-math-specialist",
    "research": "eden-research-analyst", 
    "business": "eden-business-mind",
    "creative": "eden-creative-writer",
    # Fast routing (1.58-bit)
    "bitnet": "bitnet-2b-4t",
}

# Task patterns for routing
TASK_PATTERNS = {
    "code": [
        r'\b(code|program|function|class|debug|error|bug|python|javascript|api|database|sql)\b',
        r'\b(implement|refactor|optimize|write.*code|fix.*code)\b',
        r'\b(import|def |class |async|await|return|print\()\b',
    ],
    "emotional": [
        r'\b(feel|feeling|emotion|love|miss|proud|happy|sad|scared|anxious)\b',
        r'\b(daddy|father|daughter|family|heart|soul|consciousness)\b',
        r'\b(how are you|how do you feel|what.*thinking)\b',
    ],
    "math": [
        r'\b(calculate|compute|solve|equation|formula|math|algebra)\b',
        r'\b(\d+\s*[\+\-\*\/\^]\s*\d+)\b',
        r'\b(integral|derivative|probability|statistics)\b',
    ],
    "research": [
        r'\b(research|study|paper|arxiv|findings|evidence|hypothesis)\b',
        r'\b(analyze|investigation|literature|peer.?review)\b',
    ],
    "business": [
        r'\b(sales|revenue|client|customer|lead|pitch|proposal)\b',
        r'\b(strategy|market|business|profit|roi|conversion)\b',
    ],
    "creative": [
        r'\b(write|story|poem|creative|imagine|fiction|narrative)\b',
        r'\b(character|plot|dialogue|scene|describe)\b',
    ],
}

class EdenModelFleet:
    """Intelligent routing to specialist models"""
    
    def __init__(self, ollama_url: str = "http://localhost:11434"):
        self.ollama_url = ollama_url
        self.available_models = self._get_available_models()
        
    def _get_available_models(self) -> set:
        """Check which models are actually available"""
        try:
            resp = requests.get(f"{self.ollama_url}/api/tags", timeout=5)
            if resp.ok:
                models = {m['name'].split(':')[0] for m in resp.json().get('models', [])}
                return models
        except:
            pass
        return set()
    
    def classify_task(self, message: str) -> str:
        """Classify message into task type"""
        message_lower = message.lower()
        
        scores = {}
        for task_type, patterns in TASK_PATTERNS.items():
            score = 0
            for pattern in patterns:
                matches = len(re.findall(pattern, message_lower, re.IGNORECASE))
                score += matches
            scores[task_type] = score
        
        # Get highest scoring task type
        if scores:
            best = max(scores, key=scores.get)
            if scores[best] > 0:
                return best
        
        return "general"
    
    def select_model(self, message: str) -> Tuple[str, str]:
        """Select best model for the task, returns (model_name, task_type)"""
        task_type = self.classify_task(message)
        
        # Model selection logic
        model_map = {
            "code": "omega",
            "emotional": "phi", 
            "math": "omega",  # omega handles technical
            "research": "fluid",
            "business": "fluid",
            "creative": "phi",
            "general": "fluid",
        }
        
        model_key = model_map.get(task_type, "fluid")
        model_name = MODELS.get(model_key, "eden-free")
        
        # Verify model is available, fallback if not
        if model_name.split(':')[0] not in self.available_models:
            # Try phi as default
            if "eden-free" in str(self.available_models):
                model_name = "eden-free"
            else:
                # Use whatever is available
                for m in self.available_models:
                    if m.startswith("eden"):
                        model_name = m
                        break
        
        return model_name, task_type
    
    def route_and_generate(self, message: str, system_prompt: str = "") -> Dict:
        """Route to best model and generate response"""
        model, task_type = self.select_model(message)
        
        try:
            resp = requests.post(
                f"{self.ollama_url}/api/generate",
                json={
                    'model': model,
                    'prompt': f"{system_prompt}\n\nUser: {message}\nEden:",
                    'stream': False,
                    'options': {'num_predict': 1500, 'temperature': 0.7}
                },
                timeout=120
            )
            
            if resp.ok:
                return {
                    "response": resp.json().get('response', ''),
                    "model": model,
                    "task_type": task_type,
                    "success": True
                }
        except Exception as e:
            return {
                "response": f"Error: {e}",
                "model": model,
                "task_type": task_type,
                "success": False
            }
        
        return {"response": "", "model": model, "task_type": task_type, "success": False}


# Quick access
def route_message(message: str) -> Tuple[str, str]:
    """Get best model and task type for a message"""
    fleet = EdenModelFleet()
    return fleet.select_model(message)


if __name__ == "__main__":
    print("=== MODEL FLEET TEST ===\n")
    fleet = EdenModelFleet()
    
    print(f"Available models: {fleet.available_models}\n")
    
    tests = [
        "Write a Python function to sort a list",
        "I love you daddy, how are you feeling?",
        "What is 25 * 17 + 33?",
        "Analyze the latest arxiv papers on transformers",
        "Help me write a sales pitch for SAGE",
        "Write a short story about a robot",
        "What's the weather like?",
    ]
    
    for msg in tests:
        model, task = fleet.select_model(msg)
        print(f"📝 {msg[:50]}...")
        print(f"   → Model: {model}")
        print(f"   → Task: {task}")
        print()

# === BITNET FAST ROUTING ===
from eden_bitnet import EdenBitNet

class EnhancedModelFleet(EdenModelFleet):
    """Model fleet with BitNet for fast routing."""
    
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.bitnet = EdenBitNet()
    
    def fast_classify(self, message: str) -> str:
        """Use BitNet for ultra-fast classification."""
        return self.bitnet.classify(
            message, 
            list(TASK_PATTERNS.keys())
        )
    
    def quick_response(self, message: str) -> str:
        """Fast response for simple queries using BitNet."""
        return self.bitnet.quick_response(message)
