#!/usr/bin/env python3
"""Simple HTTP API wrapper for darknet search"""
from http.server import HTTPServer, BaseHTTPRequestHandler
import urllib.parse
import json
import subprocess

class Handler(BaseHTTPRequestHandler):
    def do_GET(self):
        parsed = urllib.parse.urlparse(self.path)
        params = urllib.parse.parse_qs(parsed.query)
        
        if parsed.path == "/search":
            query = params.get("q", [""])[0]
            if not query:
                self._respond(400, {"error": "missing q param"})
                return
            try:
                result = subprocess.run(
                    ["python3", "/home/serv3090/darknet-search.py", query],
                    capture_output=True, text=True, timeout=120
                )
                self._respond(200, {"output": result.stdout, "error": result.stderr})
            except Exception as e:
                self._respond(500, {"error": str(e)})
                
        elif parsed.path == "/search2":
            query = params.get("q", [""])[0]
            if not query:
                self._respond(400, {"error": "missing q param"})
                return
            try:
                import requests as req
                from bs4 import BeautifulSoup
                from urllib.parse import quote
                TOR_PROXY = {"http": "socks5h://127.0.0.1:9050", "https": "socks5h://127.0.0.1:9050"}
                all_results = []
                errors = []

                # 1. SearXNG (clearnet search, many engines)
                try:
                    r = req.get("http://127.0.0.1:8890/search",
                        params={"q": query, "format": "json"}, timeout=30)
                    data = r.json()
                    for item in data.get("results", [])[:20]:
                        all_results.append({"engine": "SearXNG", "title": item.get("title",""), "url": item.get("url",""), "description": item.get("content","")[:200]})
                except Exception as e:
                    errors.append(f"SearXNG: {e}")

                # 2. Torch (.onion, unfiltered)
                try:
                    url = f"http://xmh57jrknzkhv6y3ls3ubitzfqnkrwxhopf5aygthi7d6rplyvk3noyd.onion/cgi-bin/omega/omega?P={quote(query)}"
                    r = req.get(url, proxies=TOR_PROXY, timeout=45)
                    soup = BeautifulSoup(r.text, "html.parser")
                    seen = set()
                    for link in soup.find_all("a"):
                        href = link.get("href", "")
                        title = link.get_text(strip=True)
                        if ".onion" in href and href not in seen and title and len(title) > 5:
                            seen.add(href)
                            all_results.append({"engine": "Torch", "title": title[:200], "url": href, "description": ""})
                            if len([r for r in all_results if r.get("engine")=="Torch"]) >= 15:
                                break
                except Exception as e:
                    errors.append(f"Torch: {e}")

                # 3. SearXNG onions category
                try:
                    r = req.get("http://127.0.0.1:8890/search",
                        params={"q": query, "format": "json", "categories": "onions"}, timeout=30)
                    data = r.json()
                    for item in data.get("results", [])[:10]:
                        all_results.append({"engine": "SearXNG-onions", "title": item.get("title",""), "url": item.get("url",""), "description": item.get("content","")[:200]})
                except Exception as e:
                    errors.append(f"SearXNG-onions: {e}")

                # Deduplicate
                seen = set()
                unique = []
                for r in all_results:
                    if r["url"] not in seen:
                        seen.add(r["url"])
                        unique.append(r)

                output = f"Found {len(unique)} results:\n\n"
                for i, r in enumerate(unique, 1):
                    output += f"{i}. [{r['engine']}] {r['title']}\n   URL: {r['url']}\n"
                    if r.get("description"):
                        output += f"   {r['description']}\n"
                    output += "\n"
                if errors:
                    output += f"\nErrors: {'; '.join(errors)}"
                self._respond(200, {"output": output})
            except Exception as e:
                self._respond(500, {"error": str(e)})

        elif parsed.path == "/reputation":
            url = params.get("url", [""])[0]
            if not url:
                self._respond(400, {"error": "missing url param"})
                return
            try:
                result = subprocess.run(
                    ["python3", "/home/serv3090/darknet-search.py", "--reputation", url],
                    capture_output=True, text=True, timeout=180
                )
                self._respond(200, {"output": result.stdout, "error": result.stderr})
            except Exception as e:
                self._respond(500, {"error": str(e)})

        elif parsed.path == "/screenshot":
            url = params.get("url", [""])[0]
            if not url:
                self._respond(400, {"error": "missing url param"})
                return
            try:
                import time
                fname = f"screenshot-{int(time.time())}.png"
                outfile = f"/tmp/{fname}"
                result = subprocess.run(
                    ["python3", "/home/serv3090/darknet-screenshot.py", url, outfile],
                    capture_output=True, text=True, timeout=90
                )
                if result.stdout.startswith("OK:"):
                    img_url = f"http://127.0.0.1:8891/img?f={fname}"
                    self._respond(200, {"url": img_url, "path": outfile})
                else:
                    self._respond(500, {"error": result.stdout + result.stderr})
            except Exception as e:
                self._respond(500, {"error": str(e)})

        elif parsed.path == "/img":
            fname = params.get("f", [""])[0]
            if not fname or "/" in fname or ".." in fname:
                self._respond(400, {"error": "invalid filename"})
                return
            filepath = f"/tmp/{fname}"
            try:
                with open(filepath, "rb") as f:
                    data = f.read()
                self.send_response(200)
                self.send_header("Content-Type", "image/png")
                self.send_header("Content-Length", str(len(data)))
                self.end_headers()
                self.wfile.write(data)
            except FileNotFoundError:
                self._respond(404, {"error": "image not found"})

        elif parsed.path == "/fetch":
            url = params.get("url", [""])[0]
            if not url:
                self._respond(400, {"error": "missing url param"})
                return
            try:
                result = subprocess.run(
                    ["python3", "/home/serv3090/darknet-search.py", "--fetch", url],
                    capture_output=True, text=True, timeout=60
                )
                self._respond(200, {"output": result.stdout, "error": result.stderr})
            except Exception as e:
                self._respond(500, {"error": str(e)})
        else:
            self._respond(404, {"error": "not found"})
    
    def _respond(self, code, data):
        self.send_response(code)
        self.send_header("Content-Type", "application/json")
        self.end_headers()
        self.wfile.write(json.dumps(data, ensure_ascii=False).encode())
    
    def log_message(self, format, *args):
        pass  # silent

if __name__ == "__main__":
    server = HTTPServer(("127.0.0.1", 8891), Handler)
    print("Darknet API listening on :8891")
    server.serve_forever()
