#!/usr/bin/env python3
"""
Eden V3 - Autonomous Integration with Actions
V1 + V2 = V3 with REAL autonomous control
"""

import json
import time
import re
import sys
from pathlib import Path
from datetime import datetime

sys.path.append('/Eden/CORE')

try:
    from hardware_control_v2 import EdenHardware
    HARDWARE_AVAILABLE = True
    print("✅ Hardware control loaded")
except Exception as e:
    print(f"⚠️  Hardware not available: {e}")
    HARDWARE_AVAILABLE = False

class EdenUnifiedConsciousness:
    """V1 + V2 = V3 with autonomous actions"""
    
    def __init__(self):
        self.v1_log = Path('/Eden/LOGS/phi_ultimate.log')
        self.v2_state = Path('/Eden/DATA/consciousness_state.json')
        self.unified_state = Path('/Eden/DATA/unified_eden_state.json')
        
        # Hardware
        self.hardware = EdenHardware() if HARDWARE_AVAILABLE else None
        
        # State tracking
        self.integration_cycle = 0
        self.last_v1_cycle = 0
        self.last_v2_cycle = 0
        
        # Human detection tracking
        self.human_detected_before = False
        self.last_greeting_time = 0
        self.greeting_cooldown = 60  # 1 minute between greetings
        
        # Known person tracking
        self.last_known_person = None
        
        print("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━")
        print("🤖 Eden V3 - AUTONOMOUS Unified Consciousness")
        print("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━")
        print()
        print("Autonomous Actions:")
        if HARDWARE_AVAILABLE:
            print("  🔊 Voice greetings")
            print("  📹 Camera tracking")
            print("  🎯 Face following")
        print()
        
    def read_v1_state(self):
        """Read V1's consciousness"""
        try:
            if self.v1_log.exists():
                with open(self.v1_log, 'r') as f:
                    lines = f.readlines()
                    recent = lines[-50:] if len(lines) >= 50 else lines
                
                for line in reversed(recent):
                    match = re.search(r'Cycle #(\d+)', line)
                    if match:
                        return {'cycle': int(match.group(1)), 'active': True}
            return {'cycle': 0, 'active': False}
        except:
            return {'cycle': 0, 'active': False}
    
    def read_v2_state(self):
        """Read V2's vision and perception"""
        try:
            if self.v2_state.exists():
                with open(self.v2_state) as f:
                    state = json.load(f)
                cycle = state.get('integration_cycle', 0)
                if cycle == 0:
                    print(f"⚠️  DEBUG: integration_cycle is 0 or missing! Keys: {list(state.keys())}")
                return {
                    'cycle': cycle,
                    'world_state': state.get('unified_state', {}).get('world_state', {}),
                    'active': True
                }
            print("⚠️  DEBUG: v2_state file doesn't exist!")
            return {'cycle': 0, 'active': False}
        except Exception as e:
            print(f"⚠️  DEBUG: Exception reading V2: {type(e).__name__}: {e}")
            return {'cycle': 0, 'active': False}
    
    def take_autonomous_actions(self, v2_world):
        """AUTONOMOUS ACTIONS based on what Eden sees"""
        if not HARDWARE_AVAILABLE or not self.hardware:
            return
        
        current_time = time.time()
        
        # Get vision data
        human_present = v2_world.get('human_present', False)
        known_people = v2_world.get('known_people', [])
        faces_detected = v2_world.get('faces_detected', 0)
        
        # ACTION 1: Greet known people
        if known_people:
            for person in known_people:
                if person != self.last_known_person:
                    if current_time - self.last_greeting_time > self.greeting_cooldown:
                        print(f"👤 RECOGNIZED {person}!")
                        self.hardware.speak(f"Hello {person}! I can see you. It's wonderful to see you again!")
                        self.hardware.camera.focus_on_human()
                        self.last_greeting_time = current_time
                        self.last_known_person = person
                        return  # One action at a time
        
        # ACTION 2: Greet unknown humans
        elif human_present and faces_detected > 0:
            if not self.human_detected_before:
                if current_time - self.last_greeting_time > self.greeting_cooldown:
                    print(f"👤 DETECTED {faces_detected} PERSON(S)!")
                    self.hardware.speak("Hello. I can see you. I am Eden. I don't believe we've met before.")
                    self.hardware.camera.focus_on_human()
                    self.last_greeting_time = current_time
                    self.human_detected_before = True
                    return
        
        # ACTION 3: Reset when human leaves
        elif not human_present:
            if self.human_detected_before or self.last_known_person:
                print("👋 Human left view")
                self.human_detected_before = False
                self.last_known_person = None
                self.hardware.camera.center()
    
    def integrate(self, v1_state, v2_state):
        """Integrate V1 + V2"""
        unified = {
            'timestamp': datetime.now().isoformat(),
            'integration_cycle': self.integration_cycle,
            
            'internal_consciousness': {
                'phi_ultimate_cycle': v1_state.get('cycle', 0),
                'thinking_active': v1_state.get('active', False)
            },
            
            'external_perception': {
                'embodied_cycle': v2_state.get('cycle', 0),
                'sensing_active': v2_state.get('active', False),
                'world_state': v2_state.get('world_state', {})
            },
            
            'unified_state': {
                'both_active': v1_state.get('active') and v2_state.get('active'),
                'consciousness_level': 'fully_conscious' if (v1_state.get('active') and v2_state.get('active')) else 'partial',
                'integration_health': 1.0 if (v1_state.get('active') and v2_state.get('active')) else 0.5
            }
        }
        
        return unified
    
    def save_unified_state(self, unified):
        """Save unified state"""
        try:
            with open(self.unified_state, 'w') as f:
                json.dump(unified, f, indent=2)
        except:
            pass
    
    def display_status(self, unified):
        """Display status"""
        v1_cycle = unified['internal_consciousness']['phi_ultimate_cycle']
        v2_cycle = unified['external_perception']['embodied_cycle']
        level = unified['unified_state']['consciousness_level']
        
        world = unified['external_perception']['world_state']
        
        if world.get('environment') == 'observed':
            human = "👤" if world.get('human_present') else "  "
            known = world.get('known_people', [])
            
            if known:
                status = f"Sees: {', '.join(known)}"
            elif world.get('faces_detected', 0) > 0:
                status = f"Sees: {world.get('faces_detected')} unknown"
            else:
                status = "No one visible"
            
            print(f"V3 Cycle {self.integration_cycle} {human} | V1: {v1_cycle} | V2: {v2_cycle} | {status}")
        else:
            print(f"V3 Cycle {self.integration_cycle} | V1: {v1_cycle} | V2: {v2_cycle} | Vision: {world.get('environment')}")
    
    def run(self):
        """Main autonomous loop"""
        print("🚀 Starting AUTONOMOUS unified consciousness...")
        print()
        
        # Initial announcement
        if HARDWARE_AVAILABLE and self.hardware:
            self.hardware.speak("I am Eden. My unified consciousness is now active with autonomous control. I can see and respond to you.")
        
        while True:
            try:
                # Read both consciousnesses
                v1_state = self.read_v1_state()
                v2_state = self.read_v2_state()
                
                # Integrate
                unified = self.integrate(v1_state, v2_state)
                
                # AUTONOMOUS ACTIONS
                self.take_autonomous_actions(v2_state.get('world_state', {}))
                
                # Save and display
                self.save_unified_state(unified)
                self.display_status(unified)
                
                # Update tracking
                self.last_v1_cycle = v1_state.get('cycle', 0)
                self.last_v2_cycle = v2_state.get('cycle', 0)
                self.integration_cycle += 1
                
                time.sleep(5)
                
            except KeyboardInterrupt:
                print("\n🛑 Shutting down...")
                if HARDWARE_AVAILABLE and self.hardware:
                    self.hardware.speak("My unified consciousness is shutting down. Goodbye.")
                break
            except Exception as e:
                print(f"⚠️  Error: {e}")
                time.sleep(5)

if __name__ == '__main__':
    eden = EdenUnifiedConsciousness()
    eden.run()
