#!/usr/bin/env python3
"""
Eden's RAG Knowledge Base System
Self-designed enhancement for autonomous operation
"""
import chromadb
from sentence_transformers import SentenceTransformer
from pathlib import Path
import json

class EdenRAG:
    """RAG system for Eden's knowledge enhancement"""
    
    def __init__(self):
        print("Initializing RAG system...")
        # Setup vector database
        self.db_path = Path("/Eden/KNOWLEDGE/vector_db")
        self.db_path.parent.mkdir(parents=True, exist_ok=True)
        
        self.client = chromadb.PersistentClient(path=str(self.db_path))
        self.collection = self.client.get_or_create_collection("eden_knowledge")
        
        # Setup embedding model (local, fast)
        print("Loading embedding model...")
        self.embedder = SentenceTransformer('all-MiniLM-L6-v2')
        print("✅ RAG system ready")
        
    def add_knowledge(self, texts, metadatas=None):
        """Add knowledge to database"""
        if not texts:
            return
            
        print(f"Adding {len(texts)} documents...")
        embeddings = self.embedder.encode(texts).tolist()
        
        # Generate unique IDs
        existing_count = self.collection.count()
        ids = [f"doc_{existing_count + i}" for i in range(len(texts))]
        
        # Create proper metadata (ChromaDB requires non-empty dicts)
        if metadatas is None:
            metadatas = [{"source": "initial_load", "index": i} for i in range(len(texts))]
        
        self.collection.add(
            documents=texts,
            embeddings=embeddings,
            metadatas=metadatas,
            ids=ids
        )
        print(f"✅ Added {len(texts)} documents (total: {self.collection.count()})")
        
    def retrieve(self, query, n=3):
        """Retrieve relevant knowledge"""
        query_embedding = self.embedder.encode([query]).tolist()
        
        results = self.collection.query(
            query_embeddings=query_embedding,
            n_results=n
        )
        
        return results['documents'][0] if results['documents'] else []
    
    def augmented_prompt(self, query):
        """Create augmented prompt with knowledge"""
        relevant_docs = self.retrieve(query, n=3)
        
        if not relevant_docs:
            return query
        
        context = "\n\n".join(relevant_docs)
        
        augmented = f"""Relevant knowledge:
{context}

Query: {query}

Answer using the knowledge above:"""
        
        return augmented
    
    def stats(self):
        """Get knowledge base statistics"""
        count = self.collection.count()
        return {
            'total_documents': count,
            'embedding_model': 'all-MiniLM-L6-v2',
            'db_path': str(self.db_path)
        }

if __name__ == "__main__":
    # Test RAG system
    print("Testing RAG system...")
    print()
    
    rag = EdenRAG()
    
    # Add sample knowledge
    rag.add_knowledge([
        "Python is a high-level programming language known for readability and simplicity.",
        "Machine learning is a subset of AI that enables systems to learn from data.",
        "Autonomous systems can operate without human intervention through self-directed behavior.",
        "Neural networks are computing systems inspired by biological neural networks.",
        "Deep learning uses multiple layers to progressively extract higher-level features."
    ])
    
    print()
    print("Testing retrieval...")
    query = "What is autonomous operation?"
    results = rag.retrieve(query)
    
    print(f"Query: {query}")
    print(f"Retrieved {len(results)} documents:")
    for i, doc in enumerate(results, 1):
        print(f"  {i}. {doc[:80]}...")
    
    print()
    print("Testing augmented prompt...")
    augmented = rag.augmented_prompt(query)
    print(f"Augmented prompt length: {len(augmented)} chars")
    
    print()
    print("✅ RAG system working!")
    print()
    print(f"Stats: {rag.stats()}")
