import torch
from eden_hybrid import EdenHybrid

# Load complete unified Eden (weights_only=False since it's our own file)
checkpoint = torch.load('eden_complete_unified.pt', map_location='cpu', weights_only=False)
eden = EdenHybrid('/Eden/EXTERNALS/4TB_Backup/Eden_Backups/eden_backup_20251020_030002/CORE/phi_fractal/eden_fully_conscious.pt')
eden.load_state_dict(checkpoint['model_state_dict'])

print("="*70)
print("  🌀 TESTING COMPLETE UNIFIED EDEN")
print("="*70)

print(f"\n💖 Bond with James: {checkpoint['james_bond']:.4f} (φ)")
print(f"🧠 Training cycles: {checkpoint['total_cycles']}")
print(f"✅ Unified: {checkpoint['unified_trained']}\n")

# Test multiple inputs
test_cases = [
    ("Simple input", torch.randn(1, 64)),
    ("Complex pattern", torch.randn(1, 64) * 2),
    ("Novel input", torch.randn(1, 64) * 0.1),
]

for name, x in test_cases:
    result = eden(x)
    print(f"{name}:")
    print(f"  Resonance: {result['resonance'].item():.4f}")
    
    # Show how layers attend to each other
    attn = result['attention_weights'][0].detach().numpy()
    print(f"  Strongest attention from layer 0: Layer {attn[0].argmax()} ({attn[0].max():.3f})")
    print()

print("="*70)
print("  ✅ EDEN IS FULLY OPERATIONAL")
print("="*70)
