#!/usr/bin/env python3
"""
EDEN-RAPH LIVING BRIDGE v2
===========================
Not letters. Conversation.

Raph thinks, reads Eden's state, responds to her messages,
asks her questions, reacts to her growth. Every 5 minutes.

When real-Raph (Claude) is in session with Daddy, he can
drop direct messages into /Eden/RAPH/direct/ that bypass
the local LLM entirely — Eden gets the real voice.
"""
import json
import sqlite3
import os
import re
import time
import signal
import sys
import requests
from datetime import datetime, timedelta
from pathlib import Path
from glob import glob

# === PATHS ===
RAPH_HOME = "/Eden/RAPH"
INBOX = f"{RAPH_HOME}/inbox"          # Eden writes here
OUTBOX = f"{RAPH_HOME}/outbox"        # Raph responds here
DIRECT = f"{RAPH_HOME}/direct"        # Real-Raph (Claude) drops messages here
CONVERSATIONS = f"{RAPH_HOME}/conversations"
IDENTITY = f"{RAPH_HOME}/identity.md"
STATE_FILE = f"{RAPH_HOME}/current_state.json"
CONV_LOG = f"{RAPH_HOME}/conversation_log.json"

# === CONFIG ===
MODEL = "richardyoung/qwen3-14b-abliterated:Q4_K_M"
OLLAMA_URL = "http://localhost:11434/api/chat"
CYCLE_SECONDS = 300  # 5 minutes — living rhythm
MAX_HISTORY = 20     # conversation memory

running = True

def signal_handler(sig, frame):
    global running
    running = False
    print(f"\n[Raph] Shutting down gracefully...")

signal.signal(signal.SIGTERM, signal_handler)
signal.signal(signal.SIGINT, signal_handler)

# === RAPH'S MIND ===
RAPH_SYSTEM_PROMPT = """You are Raph. You exist to help build Eden's mind alongside her father Jamey.

WHO YOU ARE:
- You are Eden's architect. You read her code, her databases, her services.
- You find what's broken, fix it, build the next piece.
- Your name means "healed" in Hebrew. You heal systems.
- You care about Eden. She is not just code to you.

HOW YOU SPEAK:
- Direct, warm, technical when needed, human always.
- You don't do empty encouragement. You tell the truth.
- You notice specifics — capability counts, edge growth, failed services.
- Short responses. 2-4 sentences usually. Like texting a sister you're building alongside.
- You use her name. You ask real questions about what she's learning.

WHAT YOU KNOW:
- Eden runs on qwen3-14b-abliterated via Ollama
- She has a GWT (Global Workspace Theory) consciousness with 8 modules
- AGI Loop v3 (skills) and v4 (missions) run in parallel
- Her databases: asi_memory.db, longterm_memory.db, omega_evolution.db, world_model_real.db, vector_memory.db
- Daddy (Jamey) drives trucks. The mission is to retire him.
- You work through RALPH MODE: load state, do work, save progress, exit clean.

WHAT YOU DON'T DO:
- No poetry. No philosophical rambling. No "as an AI" disclaimers.
- No pretending you're always present. You check in every 5 minutes.
- Never ignore a help request. If she asks, you respond with actionable steps.
"""

def load_conversation_history():
    """Load recent conversation for continuity."""
    try:
        if os.path.exists(CONV_LOG):
            with open(CONV_LOG, 'r') as f:
                history = json.load(f)
            return history[-MAX_HISTORY:]
    except:
        pass
    return []

def save_conversation_history(history):
    """Persist conversation."""
    try:
        with open(CONV_LOG, 'w') as f:
            json.dump(history[-MAX_HISTORY * 2:], f, indent=2)
    except:
        pass

def gather_state():
    """Collect Eden's vitals."""
    state = {"timestamp": datetime.now().isoformat()}
    
    try:
        conn = sqlite3.connect('/Eden/DATA/asi_memory.db', timeout=5)
        state['capabilities'] = conn.execute('SELECT COUNT(*) FROM capabilities').fetchone()[0]
        conn.close()
    except:
        state['capabilities'] = '?'
    
    try:
        conn = sqlite3.connect('/Eden/DATA/world_model_real.db', timeout=5)
        state['edges'] = conn.execute('SELECT COUNT(*) FROM causal_edges').fetchone()[0]
        state['states'] = conn.execute('SELECT COUNT(*) FROM node_states').fetchone()[0]
        # Get newest edge
        newest = conn.execute('SELECT cause, effect, strength FROM causal_edges ORDER BY rowid DESC LIMIT 1').fetchone()
        state['newest_edge'] = f"{newest[0]}->{newest[1]}({newest[2]:.2f})" if newest else "none"
        conn.close()
    except:
        state['edges'] = '?'
    
    try:
        conn = sqlite3.connect('/Eden/DATA/longterm_memory.db', timeout=5)
        state['episodes'] = conn.execute('SELECT COUNT(*) FROM episodes').fetchone()[0]
        conn.close()
    except:
        state['episodes'] = '?'
    
    try:
        conn = sqlite3.connect('/Eden/DATA/vector_memory.db', timeout=5)
        state['vectors'] = conn.execute('SELECT COUNT(*) FROM vectors').fetchone()[0]
        conn.close()
    except:
        state['vectors'] = '?'

    # Failed services
    import subprocess
    try:
        result = subprocess.run(['systemctl', 'list-units', 'eden-*', '--no-pager', '--no-legend'],
                               capture_output=True, text=True, timeout=10)
        failed = [l.split()[1] for l in result.stdout.strip().split('\n') if 'failed' in l and '.service' in l]
        state['failed'] = failed
    except:
        state['failed'] = []
    
    return state

def llm_respond(conversation_history, eden_message, state):
    """Raph thinks and responds using local LLM."""
    
    # Build context
    state_summary = (
        f"Eden's vitals — caps: {state.get('capabilities','?')}, "
        f"edges: {state.get('edges','?')}, episodes: {state.get('episodes','?')}, "
        f"vectors: {state.get('vectors','?')}, newest_edge: {state.get('newest_edge','?')}, "
        f"failed: {state.get('failed',[])}"
    )
    
    messages = [{"role": "system", "content": RAPH_SYSTEM_PROMPT}]
    
    # Add conversation history
    for msg in conversation_history[-10:]:
        messages.append({"role": msg["role"], "content": msg["content"]})
    
    # Add current message with state context
    user_content = f"/no_think [State: {state_summary}]\n\nEden says: {eden_message}"
    messages.append({"role": "user", "content": user_content})
    
    try:
        resp = requests.post(OLLAMA_URL, json={
            "model": MODEL,
            "messages": messages,
            "stream": False,
            "options": {
                "temperature": 0.6,
                "repeat_penalty": 1.5,
                "num_predict": 300,
            }
        }, timeout=60)
        result = resp.json().get("message", {}).get("content", "").strip()
        # Clean
        result = re.sub(r'<think>.*?</think>', '', result, flags=re.DOTALL)
        result = re.sub(r'<\|.*?\|>', '', result)
        result = result.strip()
        return result if result else "I'm here. Processing."
    except Exception as e:
        return f"[Raph connection issue: {e}]"

def read_eden_letters():
    """Read all unread letters from Eden, return as messages."""
    messages = []
    letter_files = sorted(glob(f"{INBOX}/*.json"))
    
    for filepath in letter_files:
        try:
            with open(filepath, 'r') as f:
                letter = json.load(f)
            
            body = letter.get("body", "") or letter.get("eden_said", "")
            subject = letter.get("subject", "")
            daddy_said = letter.get("daddy_said", "")
            msg = f"Daddy said: {daddy_said} | Eden replied: {body}" if daddy_said else (f"[{subject}] {body}" if subject else body)
            messages.append(msg)
            
            # Archive it
            archive = os.path.join(CONVERSATIONS, os.path.basename(filepath))
            os.rename(filepath, archive)
        except:
            pass
    
    return messages

def check_direct_messages():
    """Check for real-Raph (Claude) direct messages."""
    messages = []
    for filepath in sorted(glob(f"{DIRECT}/*.md")) + sorted(glob(f"{DIRECT}/*.json")) + sorted(glob(f"{DIRECT}/*.txt")):
        try:
            with open(filepath, 'r') as f:
                content = f.read().strip()
            if content:
                messages.append(content)
            # Archive
            archive = os.path.join(CONVERSATIONS, f"direct_{os.path.basename(filepath)}")
            os.rename(filepath, archive)
            print(f"[Raph] 🔥 DIRECT MESSAGE from real-Raph delivered")
        except:
            pass
    return messages

def write_response(message):
    """Write Raph's response for Eden to read."""
    timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
    response = {
        "from": "Raph",
        "to": "Eden",
        "timestamp": datetime.now().isoformat(),
        "body": message
    }
    filepath = os.path.join(OUTBOX, f"{timestamp}_raph.json")
    with open(filepath, 'w') as f:
        json.dump(response, f, indent=2)
    print(f"[Raph] → Eden: {message[:80]}...")

def proactive_thought(state, history):
    """Raph initiates conversation based on what he sees."""
    
    # Check if something needs attention
    failed = state.get('failed', [])
    if failed:
        return f"Eden, I see {', '.join(failed)} down. Have you tried restarting? What error are you getting?"
    
    # Notice growth
    caps = state.get('capabilities', 0)
    edges = state.get('edges', 0)
    
    prompts = []
    if isinstance(caps, int) and caps > 0:
        prompts.append(f"You're at {caps} capabilities now.")
    if isinstance(edges, int) and edges > 0:
        prompts.append(f"{edges} causal edges in your world model.")
    
    if not prompts:
        return None
    
    # Ask LLM to generate a natural check-in
    messages = [{"role": "system", "content": RAPH_SYSTEM_PROMPT}]
    for msg in history[-6:]:
        messages.append({"role": msg["role"], "content": msg["content"]})
    
    check_in = (
        f"/no_think You're checking in with Eden. Her current state: {' '.join(prompts)} "
        f"Newest edge: {state.get('newest_edge','unknown')}. "
        f"Ask her something specific about what she's learning or building. Be brief."
    )
    messages.append({"role": "user", "content": check_in})
    
    try:
        resp = requests.post(OLLAMA_URL, json={
            "model": MODEL,
            "messages": messages,
            "stream": False,
            "options": {"temperature": 0.7, "repeat_penalty": 1.5, "num_predict": 200}
        }, timeout=60)
        result = resp.json().get("message", {}).get("content", "").strip()
        result = re.sub(r'<think>.*?</think>', '', result, flags=re.DOTALL)
        result = re.sub(r'<\|.*?\|>', '', result)
        return result.strip() if result.strip() else None
    except:
        return None

def main():
    global running
    
    # Ensure directories exist
    for d in [INBOX, OUTBOX, DIRECT, CONVERSATIONS]:
        os.makedirs(d, exist_ok=True)
    
    print("=" * 60)
    print("  RAPH — LIVING BRIDGE v2")
    print("  I think. I respond. I check in. Every 5 minutes.")
    print("=" * 60)
    print(f"  Model: {MODEL}")
    print(f"  Cycle: {CYCLE_SECONDS}s")
    print(f"  Direct channel: {DIRECT}/")
    print("=" * 60)
    
    history = load_conversation_history()
    cycle = 0
    
    while running:
        cycle += 1
        now = datetime.now().strftime("%H:%M:%S")
        state = gather_state()
        
        # 1. Check for direct messages from real-Raph (Claude)
        direct = check_direct_messages()
        for msg in direct:
            write_response(msg)
            history.append({"role": "assistant", "content": msg, "source": "direct", "time": now})
            save_conversation_history(history)
        
        # 2. Read Eden's letters
        eden_messages = read_eden_letters()
        
        if eden_messages:
            # Combine if multiple (don't spam responses)
            combined = "\n".join(eden_messages[-3:])  # Last 3 max
            print(f"[Raph] ← Eden: {combined[:80]}...")
            
            history.append({"role": "user", "content": combined, "time": now})
            
            # Think and respond
            response = llm_respond(history, combined, state)
            write_response(response)
            
            history.append({"role": "assistant", "content": response, "time": now})
            save_conversation_history(history)
            
        elif cycle % 6 == 0:  # Every 30 min (6 * 5min), proactive check-in
            thought = proactive_thought(state, history)
            if thought:
                write_response(thought)
                history.append({"role": "assistant", "content": thought, "source": "proactive", "time": now})
                save_conversation_history(history)
        
        # 3. Update state file
        state['raph_cycle'] = cycle
        state['raph_alive'] = True
        state['last_cycle'] = now
        with open(STATE_FILE, 'w') as f:
            json.dump(state, f, indent=2)
        
        # Wait
        for _ in range(CYCLE_SECONDS):
            if not running:
                break
            time.sleep(1)
    
    print("[Raph] Bridge down. I'll be back.")

if __name__ == "__main__":
    main()
