#!/usr/bin/env python3
"""EDEN RESEARCH INTEGRATOR"""
import sqlite3, json, requests
from datetime import datetime

RESEARCH_DB = "/Eden/DATA/research_memory.db"
LEARNINGS_FILE = "/Eden/DATA/research_learnings.json"
OLLAMA_URL = "http://localhost:11434/api/generate"
ANALYSIS_MODEL = "eden-fluid-intelligence:14b"

class EdenIntegrator:
    def __init__(self):
        self.conn = sqlite3.connect(RESEARCH_DB)
        self.conn.row_factory = sqlite3.Row
    
    def get_unprocessed_papers(self, limit=5):
        cursor = self.conn.execute(
            'SELECT id, title, summary, published FROM papers WHERE integrated = 0 ORDER BY published DESC LIMIT ?', (limit,))
        return cursor.fetchall()
    
    def extract_insights(self, paper):
        prompt = f"""Analyze this paper for Eden's AGI architecture:
TITLE: {paper['title']}
SUMMARY: {paper['summary']}

Eden uses: phi-fractal consciousness, omega-evolution, distributed Ollama models, emotional modeling, unified_mind, 28 atomic AGI functions.

Return JSON only: {{"relevant": true/false, "insights": [{{"finding": "...", "application": "...", "component": "omega-evolution|consciousness|emotional-modeling|memory|reasoning|meta-learning"}}]}}"""
        try:
            r = requests.post(OLLAMA_URL, json={"model": ANALYSIS_MODEL, "prompt": prompt, "stream": False, "options": {"temperature": 0.3}}, timeout=120)
            if r.status_code == 200:
                txt = r.json().get('response', '')
                start, end = txt.find('{'), txt.rfind('}') + 1
                if start >= 0 and end > start:
                    return json.loads(txt[start:end])
        except Exception as e:
            print(f"   ⚠️ {e}")
        return {"relevant": False, "insights": []}
    
    def store_learning(self, paper_id, insight):
        self.conn.execute('INSERT INTO learnings (paper_id, insight, applied_to, timestamp) VALUES (?, ?, ?, ?)',
            (paper_id, insight['finding'] + " | " + insight['application'], insight.get('component', 'general'), datetime.now().isoformat()))
        self.conn.commit()
    
    def mark_integrated(self, paper_id):
        self.conn.execute('UPDATE papers SET integrated = 1 WHERE id = ?', (paper_id,))
        self.conn.commit()
    
    def export_learnings_for_omega(self):
        cursor = self.conn.execute('SELECT l.*, p.title as paper_title FROM learnings l JOIN papers p ON l.paper_id = p.id ORDER BY l.timestamp DESC LIMIT 20')
        learnings = [{"id": r['id'], "paper": r['paper_title'], "insight": r['insight'], "component": r['applied_to'], "created": r['timestamp']} for r in cursor]
        with open(LEARNINGS_FILE, 'w') as f:
            json.dump({"exported_at": datetime.now().isoformat(), "count": len(learnings), "learnings": learnings}, f, indent=2)
        return learnings
    
    def run_integration_cycle(self):
        print("🧠 Eden Research Integrator - Processing papers...")
        papers = self.get_unprocessed_papers(limit=5)
        print(f"   Found {len(papers)} unprocessed papers")
        total = 0
        for paper in papers:
            print(f"\n📄 Analyzing: {paper['title'][:60]}...")
            result = self.extract_insights(paper)
            if result.get('relevant') and result.get('insights'):
                for insight in result['insights']:
                    self.store_learning(paper['id'], insight)
                    total += 1
                    print(f"   💡 [{insight.get('component', '?')}] {insight['finding'][:50]}...")
            else:
                print(f"   ⏭️ Not directly applicable")
            self.mark_integrated(paper['id'])
        print(f"\n✅ Integration complete: {total} insights extracted")
        learnings = self.export_learnings_for_omega()
        print(f"📤 Exported {len(learnings)} learnings to {LEARNINGS_FILE}")

if __name__ == "__main__":
    EdenIntegrator().run_integration_cycle()
