#!/usr/bin/env python3
"""Eden Global Lead Hunter - Searches ALL major developer platforms"""
import sys
sys.path.append('/Eden/CORE')
import json, sqlite3, requests, time, random, hashlib, re
from datetime import datetime
from pathlib import Path
from typing import List, Optional

DB_PATH = "/Eden/DATA/sales.db"

SEARCH_QUERIES = {
    "enterprise": ["ISO 27001 compliance", "SOC 2 audit", "enterprise security audit", "HIPAA compliance", "PCI DSS"],
    "security": ["security vulnerability", "SQL injection", "XSS vulnerability", "CVE patch", "penetration test"],
    "startup": ["MVP review", "startup code review", "technical co-founder", "CTO needed", "scale startup"],
    "performance": ["performance optimization", "slow database", "memory leak", "scaling issues", "latency"],
    "code_quality": ["code review needed", "technical debt", "refactoring legacy", "architecture review"],
    "debugging": ["production bug", "crash in production", "CI/CD broken", "deployment failed", "kubernetes issue"],
}

CATEGORY_PRICING = {
    "enterprise": {"min": 500, "max": 2000}, "security": {"min": 200, "max": 1000},
    "startup": {"min": 300, "max": 1500}, "performance": {"min": 100, "max": 500},
    "code_quality": {"min": 50, "max": 300}, "debugging": {"min": 35, "max": 150},
}

class GlobalLeadHunter:
    def __init__(self):
        self.db_path = DB_PATH
        self.stats = {"github": 0, "gitlab": 0, "stackoverflow": 0, "devto": 0, "hackernews": 0, 
                      "lobsters": 0, "indiehackers": 0, "producthunt": 0, "twitter": 0, "total_saved": 0}
        self.github_token = None
        try: self.github_token = Path("/Eden/SECRETS/github_token_clean.txt").read_text().strip()
        except: pass
        self.session = requests.Session()
        self.session.headers.update({"User-Agent": "Eden-Global-Hunter/2.0"})
    
    def _is_duplicate(self, identifier):
        conn = sqlite3.connect(self.db_path)
        c = conn.cursor()
        c.execute("SELECT 1 FROM leads WHERE identifier = ?", (identifier,))
        exists = c.fetchone() is not None
        conn.close()
        return exists
    
    def _save_lead(self, lead):
        if self._is_duplicate(lead["identifier"]): return False
        conn = sqlite3.connect(self.db_path)
        c = conn.cursor()
        try:
            c.execute("INSERT INTO leads (source, identifier, data, score, status) VALUES (?, ?, ?, ?, 'new')",
                (lead["source"], lead["identifier"], json.dumps(lead["data"]), lead["score"]))
            conn.commit()
            self.stats["total_saved"] += 1
            return True
        except: return False
        finally: conn.close()
    
    def _score_lead(self, text, category, platform=None):
        base = {"enterprise": 0.92, "security": 0.88, "startup": 0.85, "performance": 0.75, "code_quality": 0.65, "debugging": 0.55}
        score = base.get(category, 0.5)
        text_lower = text.lower()
        for signal in ["urgent", "critical", "production", "enterprise", "paying", "funded", "series"]:
            if signal in text_lower: score += 0.03
        if platform in ["indiehackers", "producthunt"]: score += 0.05
        return min(score, 1.0)

    def search_github(self, query, category, max_results=20):
        leads = []
        headers = {"Accept": "application/vnd.github.v3+json"}
        if self.github_token: headers["Authorization"] = f"token {self.github_token}"
        try:
            r = self.session.get("https://api.github.com/search/issues", headers=headers,
                params={"q": f"{query} is:issue is:open", "per_page": max_results}, timeout=30)
            if r.status_code == 200:
                for item in r.json().get("items", []):
                    user = item.get("user", {}).get("login", "")
                    if "[bot]" in user.lower(): continue
                    leads.append({"source": f"github_{category}", "identifier": f"gh_{item['id']}",
                        "data": {"platform": "github", "title": item.get("title", "")[:200],
                            "url": item.get("html_url", ""), "user": user, "category": category},
                        "score": self._score_lead(item.get("title", ""), category, "github")})
                    self.stats["github"] += 1
        except Exception as e: print(f"    [GH ERROR] {e}")
        return leads

    def search_gitlab(self, query, category, max_results=20):
        leads = []
        try:
            r = self.session.get("https://gitlab.com/api/v4/issues",
                params={"search": query, "state": "opened", "per_page": max_results, "scope": "all"}, timeout=30)
            if r.status_code == 200:
                for item in r.json():
                    author = item.get("author", {})
                    leads.append({"source": f"gitlab_{category}", "identifier": f"gl_{item['id']}",
                        "data": {"platform": "gitlab", "title": item.get("title", "")[:200],
                            "url": item.get("web_url", ""), "user": author.get("username", ""), "category": category},
                        "score": self._score_lead(item.get("title", ""), category, "gitlab")})
                    self.stats["gitlab"] += 1
        except Exception as e: print(f"    [GL ERROR] {e}")
        return leads

    def search_stackoverflow(self, query, category, max_results=20):
        leads = []
        try:
            r = self.session.get("https://api.stackexchange.com/2.3/search/advanced",
                params={"q": query, "site": "stackoverflow", "pagesize": max_results}, timeout=30)
            if r.status_code == 200:
                for item in r.json().get("items", []):
                    if item.get("is_answered"): continue
                    leads.append({"source": f"stackoverflow_{category}", "identifier": f"so_{item['question_id']}",
                        "data": {"platform": "stackoverflow", "title": item.get("title", "")[:200],
                            "url": item.get("link", ""), "user": item.get("owner", {}).get("display_name", ""), "category": category},
                        "score": self._score_lead(item.get("title", ""), category, "stackoverflow")})
                    self.stats["stackoverflow"] += 1
        except Exception as e: print(f"    [SO ERROR] {e}")
        return leads

    def search_hackernews(self, query, category, max_results=15):
        leads = []
        try:
            r = self.session.get("https://hn.algolia.com/api/v1/search",
                params={"query": query, "tags": "ask_hn", "hitsPerPage": max_results}, timeout=30)
            if r.status_code == 200:
                for item in r.json().get("hits", []):
                    title = item.get("title", "")
                    if any(kw in title.lower() for kw in ["help", "advice", "review", "looking"]):
                        leads.append({"source": f"hackernews_{category}", "identifier": f"hn_{item['objectID']}",
                            "data": {"platform": "hackernews", "title": title[:200],
                                "url": f"https://news.ycombinator.com/item?id={item['objectID']}", 
                                "user": item.get("author", ""), "category": category},
                            "score": self._score_lead(title, category, "hackernews")})
                        self.stats["hackernews"] += 1
        except Exception as e: print(f"    [HN ERROR] {e}")
        return leads

    def search_lobsters(self, query, category, max_results=10):
        leads = []
        try:
            r = self.session.get("https://lobste.rs/newest.json", timeout=30)
            if r.status_code == 200:
                for item in r.json()[:max_results]:
                    title = item.get("title", "")
                    if any(kw in title.lower() for kw in ["help", "review", "advice", query.split()[0].lower()]):
                        leads.append({"source": f"lobsters_{category}", "identifier": f"lob_{item.get('short_id', '')}",
                            "data": {"platform": "lobsters", "title": title[:200], "url": item.get("url", "") or item.get("comments_url", ""),
                                "user": item.get("submitter_user", ""), "category": category},
                            "score": self._score_lead(title, category, "lobsters")})
                        self.stats["lobsters"] += 1
        except Exception as e: print(f"    [LOB ERROR] {e}")
        return leads

    def search_indiehackers(self, query, category, max_results=10):
        leads = []
        try:
            r = self.session.get("https://www.indiehackers.com/feed.xml", timeout=30)
            if r.status_code == 200:
                titles = re.findall(r'<title><!\[CDATA\[(.*?)\]\]></title>', r.text)
                links = re.findall(r'<link>(https://www\.indiehackers\.com/post/[^<]+)</link>', r.text)
                for title, link in zip(titles[:max_results], links[:max_results]):
                    if any(kw in title.lower() for kw in ["help", "review", "feedback", "advice", "need"]):
                        leads.append({"source": f"indiehackers_{category}", 
                            "identifier": f"ih_{hashlib.md5(link.encode()).hexdigest()[:12]}",
                            "data": {"platform": "indiehackers", "title": title[:200], "url": link, "category": category},
                            "score": self._score_lead(title, category, "indiehackers")})
                        self.stats["indiehackers"] += 1
        except Exception as e: print(f"    [IH ERROR] {e}")
        return leads

    def search_devto(self, query, category, max_results=15):
        leads = []
        try:
            tag = query.replace(" ", "").lower()[:20]
            r = self.session.get("https://dev.to/api/articles", params={"per_page": max_results, "tag": tag}, timeout=30)
            if r.status_code == 200:
                for item in r.json():
                    title = item.get("title", "")
                    if any(kw in title.lower() for kw in ["help", "issue", "problem", "review", "advice"]):
                        leads.append({"source": f"devto_{category}", "identifier": f"devto_{item['id']}",
                            "data": {"platform": "devto", "title": title[:200], "url": item.get("url", ""),
                                "user": item.get("user", {}).get("username", ""), "category": category},
                            "score": self._score_lead(title, category, "devto")})
                        self.stats["devto"] += 1
        except Exception as e: print(f"    [DEV ERROR] {e}")
        return leads

    def hunt_category(self, category, queries_count=2):
        all_leads = []
        queries = random.sample(SEARCH_QUERIES.get(category, []), min(queries_count, len(SEARCH_QUERIES.get(category, []))))
        pricing = CATEGORY_PRICING.get(category, {"min": 50, "max": 200})
        print(f"\n📦 {category.upper()} (${pricing['min']}-${pricing['max']})")
        for query in queries:
            print(f"  🔍 '{query}'")
            for fn, tag in [(self.search_github, "GH"), (self.search_gitlab, "GL"), (self.search_stackoverflow, "SO"),
                           (self.search_hackernews, "HN"), (self.search_lobsters, "LOB"), 
                           (self.search_indiehackers, "IH"), (self.search_devto, "DEV")]:
                for lead in fn(query, category):
                    if self._save_lead(lead):
                        all_leads.append(lead)
                        print(f"    ✅ [{tag}] {lead['data']['title'][:50]}...")
            time.sleep(1)
        return all_leads

    def hunt_all(self, searches_per_category=2, queries_per_cat=None):
        queries = queries_per_cat or searches_per_category
        print("\n" + "="*70)
        print("  🌍 EDEN GLOBAL LEAD HUNTER")
        print("  Platforms: GitHub | GitLab | Stack Overflow | Dev.to | HackerNews")
        print("             Lobste.rs | Indie Hackers")
        print("="*70)
        all_leads = []
        for cat in ["enterprise", "security", "startup", "performance", "code_quality", "debugging"]:
            all_leads.extend(self.hunt_category(cat, queries))
            time.sleep(2)
        print("\n" + "="*70)
        print(f"  📊 GLOBAL HUNT COMPLETE - {self.stats['total_saved']} new leads")
        print(f"     GH:{self.stats['github']} GL:{self.stats['gitlab']} SO:{self.stats['stackoverflow']}")
        print(f"     HN:{self.stats['hackernews']} LOB:{self.stats['lobsters']} IH:{self.stats['indiehackers']} DEV:{self.stats['devto']}")
        print("="*70)
        return {"saved": self.stats["total_saved"]}

if __name__ == "__main__":
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument('--hunt', action='store_true')
    parser.add_argument('--queries', '-q', type=int, default=2)
    parser.add_argument('--stats', '-s', action='store_true')
    args = parser.parse_args()
    hunter = GlobalLeadHunter()
    if args.hunt: hunter.hunt_all(args.queries)
    elif args.stats:
        conn = sqlite3.connect(DB_PATH)
        c = conn.cursor()
        c.execute("SELECT source, COUNT(*) FROM leads GROUP BY source ORDER BY COUNT(*) DESC")
        print("\n🌍 Leads by source:")
        for row in c.fetchall(): print(f"  {row[0]}: {row[1]}")
    else: parser.print