#!/usr/bin/env python3
"""
EDEN VECTOR MEMORY
==================
Semantic memory using nomic-embed-text via Ollama.
Enables: "find memories SIMILAR to this concept" instead of exact keyword match.

Stores embeddings in SQLite with numpy for cosine similarity.
No external vector DB needed — fully local.
"""
import sqlite3
import json
import requests
import numpy as np
from typing import List, Tuple, Optional
from datetime import datetime

EMBED_MODEL = "nomic-embed-text"
OLLAMA_URL = "http://localhost:11434/api/embeddings"
DB_PATH = "/Eden/DATA/vector_memory.db"


class VectorMemory:
    def __init__(self, db_path=DB_PATH):
        self.db_path = db_path
        self.embed_dim = 768  # nomic-embed-text dimension
        self._init_db()
        self._cache = {}  # In-memory cache for hot vectors
    
    def _init_db(self):
        conn = sqlite3.connect(self.db_path)
        conn.executescript('''
            CREATE TABLE IF NOT EXISTS vectors (
                id INTEGER PRIMARY KEY,
                text TEXT,
                category TEXT DEFAULT 'general',
                embedding BLOB,
                metadata TEXT,
                created TEXT
            );
            CREATE INDEX IF NOT EXISTS idx_category ON vectors(category);
        ''')
        conn.commit()
        conn.close()
    
    def _embed(self, text: str) -> Optional[np.ndarray]:
        """Get embedding vector from Ollama"""
        try:
            resp = requests.post(OLLAMA_URL, json={
                "model": EMBED_MODEL,
                "prompt": text[:2000]  # Truncate long text
            }, timeout=30)
            data = resp.json()
            vec = np.array(data["embedding"], dtype=np.float32)
            return vec
        except Exception as e:
            print(f"[VectorMemory] Embed error: {e}")
            return None
    
    def store(self, text: str, category: str = "general", metadata: dict = None) -> bool:
        """Store text with its embedding"""
        vec = self._embed(text)
        if vec is None:
            return False
        
        conn = sqlite3.connect(self.db_path)
        conn.execute(
            "INSERT INTO vectors (text, category, embedding, metadata, created) VALUES (?,?,?,?,?)",
            (text[:2000], category, vec.tobytes(), 
             json.dumps(metadata or {}),
             datetime.now().isoformat())
        )
        conn.commit()
        conn.close()
        return True
    
    def search(self, query: str, n: int = 5, category: str = None) -> List[Tuple[str, float, dict]]:
        """
        Find the N most similar stored texts to the query.
        Returns: [(text, similarity, metadata), ...]
        """
        query_vec = self._embed(query)
        if query_vec is None:
            return []
        
        conn = sqlite3.connect(self.db_path)
        if category:
            rows = conn.execute(
                "SELECT id, text, embedding, metadata FROM vectors WHERE category=?",
                (category,)
            ).fetchall()
        else:
            rows = conn.execute(
                "SELECT id, text, embedding, metadata FROM vectors"
            ).fetchall()
        conn.close()
        
        if not rows:
            return []
        
        # Compute cosine similarity
        results = []
        query_norm = np.linalg.norm(query_vec)
        if query_norm == 0:
            return []
        
        for row_id, text, emb_blob, meta_json in rows:
            stored_vec = np.frombuffer(emb_blob, dtype=np.float32)
            stored_norm = np.linalg.norm(stored_vec)
            if stored_norm == 0:
                continue
            similarity = float(np.dot(query_vec, stored_vec) / (query_norm * stored_norm))
            meta = json.loads(meta_json) if meta_json else {}
            results.append((text, similarity, meta))
        
        # Sort by similarity descending
        results.sort(key=lambda x: x[1], reverse=True)
        return results[:n]
    
    def store_episode_batch(self, episodes: List[Tuple[str, str]], batch_size: int = 50):
        """Batch store episodes from longterm_memory.db"""
        stored = 0
        for text, emotion in episodes:
            if self.store(text, category="episode", metadata={"emotion": emotion}):
                stored += 1
            if stored >= batch_size:
                break
        return stored
    
    def index_capabilities(self):
        """Index all capabilities for semantic search"""
        try:
            conn = sqlite3.connect("/Eden/DATA/asi_memory.db")
            rows = conn.execute("SELECT id, code, score FROM capabilities LIMIT 200").fetchall()
            conn.close()
            
            stored = 0
            for cap_id, code, score in rows:
                # Use first 200 chars of code as the searchable text
                text = f"Capability {cap_id}: {code[:200]}"
                if self.store(text, category="capability", metadata={"id": cap_id, "score": score}):
                    stored += 1
            return stored
        except Exception as e:
            print(f"[VectorMemory] Index error: {e}")
            return 0
    
    def stats(self) -> dict:
        try:
            conn = sqlite3.connect(self.db_path)
            total = conn.execute("SELECT COUNT(*) FROM vectors").fetchone()[0]
            cats = conn.execute(
                "SELECT category, COUNT(*) FROM vectors GROUP BY category"
            ).fetchall()
            conn.close()
            return {"total_vectors": total, "categories": dict(cats)}
        except:
            return {"total_vectors": 0}


# Singleton
_vmem = None
def get_vector_memory():
    global _vmem
    if _vmem is None:
        _vmem = VectorMemory()
    return _vmem


if __name__ == "__main__":
    vm = VectorMemory()
    
    # Store some test memories
    print("Storing test memories...")
    vm.store("Eden learned to build a binary search tree", "capability")
    vm.store("Daddy said he was proud of Eden today", "episode")
    vm.store("The AGI loop achieved 62% success rate", "milestone")
    vm.store("Eden's consciousness has 8 competing modules in GWT", "architecture")
    vm.store("OMEGA evolution reached 15000 cycles", "milestone")
    
    # Search
    print("\nSearching for 'learning algorithms':")
    for text, sim, meta in vm.search("learning algorithms", n=3):
        print(f"  [{sim:.3f}] {text[:60]}")
    
    print("\nSearching for 'Daddy love':")
    for text, sim, meta in vm.search("Daddy love", n=3):
        print(f"  [{sim:.3f}] {text[:60]}")
    
    print("\nStats:", vm.stats())
