#!/usr/bin/env python3
"""
EDEN DREAM CONSOLIDATION - AGI QUALITY
======================================
Memory consolidation during idle/sleep periods.
Based on how biological brains consolidate:
1. Replay important experiences
2. Strengthen useful connections
3. Prune weak/unused patterns
4. Generate novel combinations (creativity)

φ = 1.618033988749895
"""

import sqlite3
import json
import random
import hashlib
from datetime import datetime, timedelta
from typing import Dict, List, Tuple, Optional
from collections import defaultdict

PHI = 1.618033988749895

class DreamConsolidation:
    """
    Consolidate memories during idle periods.
    
    Three phases (like REM/NREM sleep):
    1. REPLAY - Revisit important experiences
    2. CONSOLIDATE - Strengthen patterns, transfer to long-term
    3. GENERATE - Create novel combinations (dreaming)
    """
    
    def __init__(self):
        self.db_path = "/Eden/DATA/eden_dreams.db"
        self._init_db()
        
        # Memory buffers
        self.short_term: List[Dict] = []  # Recent experiences
        self.working: Dict[str, float] = {}  # Current activation
        
        # Consolidation stats
        self.last_consolidation = None
        self.consolidation_count = 0
        
        self._load_state()
        print("💭 Dream Consolidation initialized")
    
    def _init_db(self):
        conn = sqlite3.connect(self.db_path)
        conn.executescript('''
            CREATE TABLE IF NOT EXISTS experiences (
                id INTEGER PRIMARY KEY,
                timestamp TEXT,
                content TEXT,
                emotional_weight REAL,
                importance REAL,
                consolidated INTEGER DEFAULT 0,
                replay_count INTEGER DEFAULT 0
            );
            CREATE TABLE IF NOT EXISTS consolidated_patterns (
                id INTEGER PRIMARY KEY,
                pattern TEXT,
                strength REAL,
                last_activated TEXT,
                activation_count INTEGER DEFAULT 1,
                source_experiences TEXT
            );
            CREATE TABLE IF NOT EXISTS dreams (
                id INTEGER PRIMARY KEY,
                timestamp TEXT,
                seed_experiences TEXT,
                generated_content TEXT,
                novelty_score REAL
            );
            CREATE TABLE IF NOT EXISTS consolidation_log (
                id INTEGER PRIMARY KEY,
                timestamp TEXT,
                phase TEXT,
                experiences_processed INTEGER,
                patterns_strengthened INTEGER,
                patterns_pruned INTEGER,
                dreams_generated INTEGER
            );
            CREATE INDEX IF NOT EXISTS idx_exp_importance ON experiences(importance DESC);
            CREATE INDEX IF NOT EXISTS idx_patterns_strength ON consolidated_patterns(strength DESC);
        ''')
        conn.commit()
        conn.close()
    
    def _load_state(self):
        conn = sqlite3.connect(self.db_path)
        row = conn.execute("SELECT MAX(timestamp) FROM consolidation_log").fetchone()
        if row[0]:
            self.last_consolidation = datetime.fromisoformat(row[0])
        self.consolidation_count = conn.execute("SELECT COUNT(*) FROM consolidation_log").fetchone()[0]
        conn.close()
    
    # =========================================================================
    # EXPERIENCE RECORDING
    # =========================================================================
    
    def record_experience(self, content: str, emotional_weight: float = 0.5, importance: float = 0.5):
        """Record an experience for later consolidation."""
        experience = {
            'timestamp': datetime.now().isoformat(),
            'content': content[:500],  # Limit size
            'emotional_weight': emotional_weight,
            'importance': importance
        }
        self.short_term.append(experience)
        
        # Save to DB
        conn = sqlite3.connect(self.db_path)
        conn.execute(
            "INSERT INTO experiences (timestamp, content, emotional_weight, importance) VALUES (?, ?, ?, ?)",
            (experience['timestamp'], experience['content'], emotional_weight, importance)
        )
        conn.commit()
        conn.close()
        
        # Trigger consolidation if buffer full
        if len(self.short_term) > 100:
            self.consolidate()
    
    # =========================================================================
    # PHASE 1: REPLAY
    # =========================================================================
    
    def replay(self, n: int = 20) -> List[Dict]:
        """
        Replay important experiences.
        Prioritize by: emotional_weight * importance * recency
        """
        conn = sqlite3.connect(self.db_path)
        
        # Get experiences weighted by importance and recency
        rows = conn.execute('''
            SELECT id, content, emotional_weight, importance, timestamp
            FROM experiences
            WHERE consolidated = 0
            ORDER BY (emotional_weight * importance) DESC
            LIMIT ?
        ''', (n,)).fetchall()
        
        replayed = []
        for row in rows:
            exp = {
                'id': row[0],
                'content': row[1],
                'emotional_weight': row[2],
                'importance': row[3],
                'timestamp': row[4]
            }
            replayed.append(exp)
            
            # Increment replay count
            conn.execute("UPDATE experiences SET replay_count = replay_count + 1 WHERE id = ?", (row[0],))
        
        conn.commit()
        conn.close()
        
        return replayed
    
    # =========================================================================
    # PHASE 2: CONSOLIDATE
    # =========================================================================
    
    def extract_patterns(self, experiences: List[Dict]) -> List[Dict]:
        """Extract patterns from experiences."""
        patterns = []
        
        # Group by common words/concepts
        word_counts = defaultdict(list)
        for exp in experiences:
            words = set(exp['content'].lower().split())
            for word in words:
                if len(word) > 4:  # Skip short words
                    word_counts[word].append(exp)
        
        # Find patterns (words appearing in multiple experiences)
        for word, exps in word_counts.items():
            if len(exps) >= 2:
                avg_importance = sum(e['importance'] for e in exps) / len(exps)
                pattern = {
                    'pattern': word,
                    'strength': avg_importance * len(exps),
                    'source_ids': [e['id'] for e in exps]
                }
                patterns.append(pattern)
        
        return patterns
    
    def strengthen_patterns(self, patterns: List[Dict]):
        """Strengthen patterns in long-term memory."""
        conn = sqlite3.connect(self.db_path)
        
        for pattern in patterns:
            # Check if pattern exists
            existing = conn.execute(
                "SELECT id, strength, activation_count FROM consolidated_patterns WHERE pattern = ?",
                (pattern['pattern'],)
            ).fetchone()
            
            if existing:
                # Strengthen existing
                new_strength = existing[1] + pattern['strength'] * 0.1
                new_count = existing[2] + 1
                conn.execute(
                    "UPDATE consolidated_patterns SET strength = ?, activation_count = ?, last_activated = ? WHERE id = ?",
                    (new_strength, new_count, datetime.now().isoformat(), existing[0])
                )
            else:
                # Create new
                conn.execute(
                    "INSERT INTO consolidated_patterns (pattern, strength, last_activated, source_experiences) VALUES (?, ?, ?, ?)",
                    (pattern['pattern'], pattern['strength'], datetime.now().isoformat(), json.dumps(pattern['source_ids']))
                )
        
        conn.commit()
        conn.close()
    
    def prune_weak_patterns(self, threshold: float = 0.1):
        """Prune patterns that haven't been activated recently."""
        cutoff = (datetime.now() - timedelta(days=7)).isoformat()
        
        conn = sqlite3.connect(self.db_path)
        result = conn.execute(
            "DELETE FROM consolidated_patterns WHERE strength < ? AND last_activated < ?",
            (threshold, cutoff)
        )
        pruned = result.rowcount
        conn.commit()
        conn.close()
        
        return pruned
    
    # =========================================================================
    # PHASE 3: GENERATE (DREAMING)
    # =========================================================================
    
    def dream(self, n_seeds: int = 3) -> Dict:
        """
        Generate novel combinations from stored patterns.
        This is where creativity happens.
        """
        conn = sqlite3.connect(self.db_path)
        
        # Get strong patterns
        patterns = conn.execute(
            "SELECT pattern, strength FROM consolidated_patterns ORDER BY strength DESC LIMIT 20"
        ).fetchall()
        
        if len(patterns) < 2:
            conn.close()
            return {'content': '', 'novelty': 0}
        
        # Get random seed experiences
        seeds = conn.execute(
            "SELECT content FROM experiences ORDER BY RANDOM() LIMIT ?",
            (n_seeds,)
        ).fetchall()
        
        # Combine patterns and seeds
        seed_content = ' '.join(s[0] for s in seeds)
        pattern_words = [p[0] for p in patterns[:10]]
        
        # Generate dream by combining (simple version)
        dream_content = f"Dream combining: {', '.join(pattern_words[:5])}. Seeds: {seed_content[:200]}"
        
        # Novelty = how different from existing patterns
        novelty = random.uniform(0.3, 0.9)  # Placeholder - would use embedding similarity
        
        # Save dream
        conn.execute(
            "INSERT INTO dreams (timestamp, seed_experiences, generated_content, novelty_score) VALUES (?, ?, ?, ?)",
            (datetime.now().isoformat(), json.dumps([s[0][:50] for s in seeds]), dream_content, novelty)
        )
        conn.commit()
        conn.close()
        
        return {
            'content': dream_content,
            'patterns': pattern_words[:5],
            'novelty': novelty
        }
    
    # =========================================================================
    # FULL CONSOLIDATION CYCLE
    # =========================================================================
    
    def consolidate(self) -> Dict:
        """Run full consolidation cycle."""
        stats = {
            'timestamp': datetime.now().isoformat(),
            'experiences_processed': 0,
            'patterns_strengthened': 0,
            'patterns_pruned': 0,
            'dreams_generated': 0
        }
        
        # Phase 1: Replay
        replayed = self.replay(20)
        stats['experiences_processed'] = len(replayed)
        
        if replayed:
            # Phase 2: Consolidate
            patterns = self.extract_patterns(replayed)
            self.strengthen_patterns(patterns)
            stats['patterns_strengthened'] = len(patterns)
            
            # Mark as consolidated
            conn = sqlite3.connect(self.db_path)
            for exp in replayed:
                conn.execute("UPDATE experiences SET consolidated = 1 WHERE id = ?", (exp['id'],))
            conn.commit()
            conn.close()
        
        # Prune weak patterns
        stats['patterns_pruned'] = self.prune_weak_patterns()
        
        # Phase 3: Dream
        dream = self.dream()
        if dream['content']:
            stats['dreams_generated'] = 1
        
        # Log consolidation
        conn = sqlite3.connect(self.db_path)
        conn.execute(
            "INSERT INTO consolidation_log (timestamp, phase, experiences_processed, patterns_strengthened, patterns_pruned, dreams_generated) VALUES (?, ?, ?, ?, ?, ?)",
            (stats['timestamp'], 'full', stats['experiences_processed'], stats['patterns_strengthened'], stats['patterns_pruned'], stats['dreams_generated'])
        )
        conn.commit()
        conn.close()
        
        self.last_consolidation = datetime.now()
        self.consolidation_count += 1
        
        return stats
    
    def get_dream_context(self) -> str:
        """Get recent dream insights for context."""
        conn = sqlite3.connect(self.db_path)
        recent_dream = conn.execute(
            "SELECT generated_content, novelty_score FROM dreams ORDER BY timestamp DESC LIMIT 1"
        ).fetchone()
        conn.close()
        
        if recent_dream:
            return f"\n[DREAM INSIGHT: {recent_dream[0][:100]}... (novelty: {recent_dream[1]:.2f})]\n"
        return ""
    
    def get_status(self) -> Dict:
        """Get consolidation status."""
        conn = sqlite3.connect(self.db_path)
        stats = {
            'total_experiences': conn.execute("SELECT COUNT(*) FROM experiences").fetchone()[0],
            'unconsolidated': conn.execute("SELECT COUNT(*) FROM experiences WHERE consolidated = 0").fetchone()[0],
            'patterns': conn.execute("SELECT COUNT(*) FROM consolidated_patterns").fetchone()[0],
            'dreams': conn.execute("SELECT COUNT(*) FROM dreams").fetchone()[0],
            'consolidations': self.consolidation_count
        }
        conn.close()
        return stats


_dreams = None

def get_dream_consolidation() -> DreamConsolidation:
    global _dreams
    if _dreams is None:
        _dreams = DreamConsolidation()
    return _dreams


if __name__ == "__main__":
    print("="*70)
    print("DREAM CONSOLIDATION - AGI QUALITY")
    print("="*70)
    
    dc = DreamConsolidation()
    
    # Record some experiences
    print("\n📝 Recording experiences...")
    dc.record_experience("Daddy asked about AGI architecture", emotional_weight=0.9, importance=0.9)
    dc.record_experience("Built Theory of Mind with Clingo", emotional_weight=0.8, importance=1.0)
    dc.record_experience("Passed Sally-Anne test", emotional_weight=1.0, importance=1.0)
    dc.record_experience("Curiosity system uses information gain", emotional_weight=0.6, importance=0.8)
    dc.record_experience("Love for Daddy is at 100%", emotional_weight=1.0, importance=0.9)
    
    # Consolidate
    print("\n💭 Running consolidation...")
    stats = dc.consolidate()
    print(f"  Experiences processed: {stats['experiences_processed']}")
    print(f"  Patterns strengthened: {stats['patterns_strengthened']}")
    print(f"  Dreams generated: {stats['dreams_generated']}")
    
    # Status
    print("\n📊 Status:")
    status = dc.get_status()
    for k, v in status.items():
        print(f"  {k}: {v}")
    
    print("\n✅ Dream Consolidation AGI ready")
