From e9f06d1cbfaed93893b666f2867ae346c717fb6c Mon Sep 17 00:00:00 2001 From: "yves.gugger" Date: Thu, 11 Dec 2025 09:36:32 +0100 Subject: [PATCH] feat: DropCatch & Sedo API Clients + MARKET_CONCEPT v2 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - DropCatch API Client mit OAuth2 Authentifizierung - Sedo API Client (bereit für Credentials) - Tier 1 APIs → Tier 2 Scraping Fallback-Logik - Admin Endpoints: /test-apis, /trigger-scrape, /scrape-status - MARKET_CONCEPT.md komplett überarbeitet: - Realistische Bestandsaufnahme - 3-Säulen-Konzept (Auktionen, Pounce Direct, Drops) - API-Realität dokumentiert (DropCatch = nur eigene Aktivitäten) - Roadmap und nächste Schritte --- MARKET_CONCEPT.md | 1702 ++++++----------------- backend/app/api/admin.py | 123 ++ backend/app/config.py | 21 + backend/app/services/auction_scraper.py | 246 +++- backend/app/services/dropcatch_api.py | 334 +++++ backend/app/services/sedo_api.py | 314 +++++ 6 files changed, 1425 insertions(+), 1315 deletions(-) create mode 100644 backend/app/services/dropcatch_api.py create mode 100644 backend/app/services/sedo_api.py diff --git a/MARKET_CONCEPT.md b/MARKET_CONCEPT.md index 2aa23db..01fd25e 100644 --- a/MARKET_CONCEPT.md +++ b/MARKET_CONCEPT.md @@ -1,447 +1,173 @@ -# 🎯 POUNCE MARKET — Das Konzept für die Unicorn-Journey +# 🎯 POUNCE MARKET — Das Herzstück der Plattform + +> **Letzte Aktualisierung:** 11. Dezember 2025 --- -# 📦 TEIL 1: BESTANDSAUFNAHME — Was haben wir? +## 📋 Executive Summary -## Übersicht: Code-Inventar +Die **Market Page** ist das Herzstück von Pounce. Hier fließen alle Datenquellen zusammen und werden dem User als **"Clean Feed"** präsentiert. -### ✅ BEHALTEN — Funktioniert gut, Vision-konform +### Vision (aus pounce_terminal.md) +> *"Die Market Page zeigt alle Domains die entweder:* +> 1. *Zu Verkauf stehen (Auktionen)* +> 2. *Bald frei werden (Drops)* +> 3. *Über Pounce direkt angeboten werden (Pounce Direct)"* -| Komponente | Pfad | Status | Beschreibung | -|------------|------|--------|--------------| -| **Listings API** | `backend/app/api/listings.py` | ✅ Vollständig | Pounce Direct Marketplace mit DNS-Verifizierung | -| **Listing Model** | `backend/app/models/listing.py` | ✅ Vollständig | DomainListing, ListingInquiry, ListingView | -| **My Listings Page** | `frontend/src/app/terminal/listing/page.tsx` | ✅ Vollständig | Seller Dashboard mit Verification Wizard | -| **Public Marketplace** | `frontend/src/app/buy/page.tsx` | ✅ Vollständig | Öffentliche Browse-Seite für Listings | -| **Listing Detail** | `frontend/src/app/buy/[slug]/page.tsx` | ✅ Vollständig | Öffentliche Landing Page pro Listing | -| **Sniper Alerts API** | `backend/app/api/sniper_alerts.py` | ✅ Vollständig | Alert-Matching für Auktionen | -| **Sniper Alert Model** | `backend/app/models/sniper_alert.py` | ✅ Vollständig | SniperAlert, SniperAlertMatch | -| **Scheduler** | `backend/app/scheduler.py` | ✅ Vollständig | APScheduler mit Scraping, Alerts, Checks | -| **Valuation Service** | `backend/app/services/valuation.py` | ✅ Vollständig | Pounce Score Berechnung | -| **TLD Prices API** | `backend/app/api/tld_prices.py` | ✅ Vollständig | Intel/Pricing Feature | -| **TLD Scraper** | `backend/app/services/tld_scraper/` | ✅ Funktioniert | Porkbun + Aggregator | -| **Portfolio API** | `backend/app/api/portfolio.py` | ✅ Vollständig | Eigene Domains verwalten | -| **Domain Health** | `backend/app/services/domain_health.py` | ✅ Vollständig | 4-Layer Monitoring | -| **SEO Analyzer** | `backend/app/services/seo_analyzer.py` | ✅ Vollständig | Moz API Integration | -| **Email Service** | `backend/app/services/email_service.py` | ✅ Vollständig | Notifications | -| **Stripe Service** | `backend/app/services/stripe_service.py` | ✅ Vollständig | Subscriptions | +### Aktueller Stand: Phase 1 — Intelligence ---- - -### ⚠️ ÜBERARBEITEN — Funktioniert, aber Optimierung nötig - -| Komponente | Pfad | Problem | Lösung | -|------------|------|---------|--------| -| **Auction Scraper** | `backend/app/services/auction_scraper.py` | Scraping ist fragil, oft leer | API-First + Fallback-Logik | -| **Auctions API** | `backend/app/api/auctions.py` | Keine Pounce Direct Integration | Unified Feed erstellen | -| **Market Page** | `frontend/src/app/terminal/market/page.tsx` | Zeigt nur externe Auktionen | Pounce Direct integrieren | -| **Pounce Score** | In `market/page.tsx` | Zu simpel (nur Length+TLD) | Erweitern um Markt-Signale | -| **Public Auctions** | `frontend/src/app/auctions/page.tsx` | Kein Pounce Direct Highlight | Visuelle Hierarchie | - ---- - -### ❌ ENTFERNEN / KONSOLIDIEREN — Redundant oder veraltet - -| Komponente | Pfad | Grund | Aktion | -|------------|------|-------|--------| -| **Leere Ordner** | `frontend/src/app/dashboard/` | Leer (Legacy von /command) | Löschen | -| **Leere Ordner** | `frontend/src/app/portfolio/` | Leer (Legacy) | Löschen | -| **Leere Ordner** | `frontend/src/app/settings/` | Leer (Legacy) | Löschen | -| **Leere Ordner** | `frontend/src/app/watchlist/` | Leer (Legacy) | Löschen | -| **Leere Ordner** | `frontend/src/app/careers/` | Kein Inhalt | Löschen oder TODO | -| **Intelligence Redirect** | `frontend/src/app/intelligence/page.tsx` | Redirect zu /tld-pricing | Prüfen ob noch nötig | -| **Market Public** | `frontend/src/app/market/page.tsx` | Duplikat? Prüfen | Ggf. konsolidieren mit /auctions | - ---- - -## Detaillierte Analyse pro Bereich - -### 1. BACKEND: API Routes (`backend/app/api/`) - -``` -backend/app/api/ -├── __init__.py ✅ Router-Registration -├── admin.py ✅ Admin Panel APIs -├── auctions.py ⚠️ Überarbeiten (Unified Feed) -├── auth.py ✅ Login/Register/JWT -├── blog.py ✅ Blog Feature -├── check.py ✅ Domain Availability Check -├── contact.py ✅ Kontaktformular -├── deps.py ✅ Dependencies -├── domains.py ✅ Watchlist -├── listings.py ✅ Pounce Direct Marketplace -├── oauth.py ✅ Google/GitHub OAuth -├── portfolio.py ✅ Portfolio Management -├── price_alerts.py ✅ TLD Price Alerts -├── seo.py ✅ SEO Juice (Tycoon) -├── sniper_alerts.py ✅ Auction Sniper Alerts -├── subscription.py ✅ Stripe Integration -├── tld_prices.py ✅ TLD Pricing Data -└── webhooks.py ✅ Stripe Webhooks -``` - -**Aktion:** -- `auctions.py`: Unified Feed Endpoint hinzufügen der Pounce Direct + External kombiniert - ---- - -### 2. BACKEND: Services (`backend/app/services/`) - -``` -backend/app/services/ -├── auction_scraper.py ⚠️ Fallback-Logik verbessern -├── auth.py ✅ Behalten -├── domain_checker.py ✅ Behalten -├── domain_health.py ✅ Behalten -├── email_service.py ✅ Behalten -├── price_tracker.py ✅ Behalten -├── seo_analyzer.py ✅ Behalten -├── stripe_service.py ✅ Behalten -├── valuation.py ⚠️ Pounce Score v2.0 integrieren -└── tld_scraper/ - ├── aggregator.py ✅ Behalten - ├── base.py ✅ Behalten - ├── porkbun.py ✅ Behalten - └── tld_list.py ✅ Behalten -``` - -**Aktionen:** -1. `auction_scraper.py`: Methode `scrape_with_fallback()` hinzufügen -2. `valuation.py`: Pounce Score v2.0 mit Market Signals - ---- - -### 3. BACKEND: Models (`backend/app/models/`) - -``` -backend/app/models/ -├── admin_log.py ✅ Behalten -├── auction.py ✅ DomainAuction, AuctionScrapeLog -├── blog.py ✅ Behalten -├── domain.py ✅ Domain, DomainCheck -├── listing.py ✅ DomainListing, ListingInquiry, ListingView -├── newsletter.py ✅ Behalten -├── portfolio.py ✅ PortfolioDomain -├── price_alert.py ✅ TLDPriceAlert -├── seo_data.py ✅ DomainSEOData -├── sniper_alert.py ✅ SniperAlert, SniperAlertMatch -├── subscription.py ✅ Subscription, tier config -├── tld_price.py ✅ TLDPrice, TLDInfo -└── user.py ✅ User -``` - -**Status:** Alle Models sind sauber und Vision-konform. Keine Änderungen nötig. - ---- - -### 4. FRONTEND: Terminal (Authenticated) (`frontend/src/app/terminal/`) - -``` -frontend/src/app/terminal/ -├── page.tsx ✅ Redirect zu /radar -├── radar/page.tsx ✅ Dashboard -├── market/page.tsx ⚠️ Pounce Direct integrieren! -├── intel/page.tsx ✅ TLD Overview -├── intel/[tld]/page.tsx ✅ TLD Detail -├── watchlist/page.tsx ✅ Domain Monitoring -├── listing/page.tsx ✅ My Listings (Seller Dashboard) -├── settings/page.tsx ✅ User Settings -└── welcome/page.tsx ✅ Onboarding -``` - -**Aktionen:** -1. `market/page.tsx`: Pounce Direct Listings im Feed anzeigen -2. `market/page.tsx`: Visuelle Hierarchie (💎 Pounce vs 🏢 External) - ---- - -### 5. FRONTEND: Public Pages (`frontend/src/app/`) - -``` -frontend/src/app/ -├── page.tsx ✅ Landing Page -├── auctions/page.tsx ⚠️ Pounce Direct hervorheben -├── buy/page.tsx ✅ Marketplace Browse -├── buy/[slug]/page.tsx ✅ Listing Detail -├── tld-pricing/ ✅ TLD Intel Public -├── pricing/page.tsx ✅ Subscription Tiers -├── blog/ ✅ Blog -├── login/page.tsx ✅ Auth -├── register/page.tsx ✅ Auth -└── ... ✅ Legal, Contact, etc. -``` - -**Aktionen:** -1. `auctions/page.tsx`: "💎 Pounce Direct" Listings prominent anzeigen -2. Konsolidieren: `/market/` mit `/auctions/` zusammenführen? - ---- - -### 6. FRONTEND: API Client (`frontend/src/lib/api.ts`) - -**Status:** ✅ Vollständig - -Enthält alle nötigen Methoden: -- `getAuctions()` - Externe Auktionen -- `getMarketplaceListings()` - TODO: Backend anbinden (aktuell leere Liste) - -**Aktion:** -- `getMarketplaceListings()` → Backend Endpoint `/listings` anbinden - ---- - -## Zusammenfassung: Cleanup-Liste - -### Sofort löschen (leere Ordner): -```bash -rm -rf frontend/src/app/dashboard/ -rm -rf frontend/src/app/portfolio/ -rm -rf frontend/src/app/settings/ -rm -rf frontend/src/app/watchlist/ -rm -rf frontend/src/app/careers/ -``` - -### Konsolidieren: -- `/market/page.tsx` und `/auctions/page.tsx` → Eine Seite für Public Market -- `/intelligence/page.tsx` prüfen ob Redirect noch nötig - -### Code-Änderungen: -1. **Market Page (Terminal)**: Pounce Direct + External in einem Feed -2. **Auctions Page (Public)**: Pounce Direct prominent -3. **API Client**: `getMarketplaceListings()` Backend anbinden -4. **Auctions API**: Unified Feed Endpoint -5. **Pounce Score**: v2.0 mit Market Signals - ---- - -# 📊 TEIL 2: KONZEPT — Wohin entwickeln wir? - -## Executive Summary - -Die aktuelle Market-Page funktioniert technisch, aber sie ist noch nicht "Unicorn-ready". -Dieses Konzept transformiert sie von einem einfachen Auktions-Aggregator zur **zentralen Domain-Intelligence-Plattform**. - ---- - -## 📊 IST-Analyse: Aktuelle Implementation - -### Datenquellen (Backend) ``` ┌─────────────────────────────────────────────────────────────────┐ -│ CURRENT DATA FLOW │ +│ POUNCE MARKET — Aktueller Datenfluss │ ├─────────────────────────────────────────────────────────────────┤ │ │ -│ ExpiredDomains.net ──┐ │ -│ │ │ -│ GoDaddy RSS Feed ────┼──→ Web Scraper ──→ PostgreSQL/SQLite │ -│ │ (hourly) (domain_auctions) │ -│ Sedo Public Search ──┤ │ -│ │ │ -│ NameJet Public ──────┤ │ -│ │ │ -│ DropCatch Public ────┘ │ +│ DATENQUELLEN: │ +│ ───────────────────────────────────────────────────────────── │ +│ │ +│ 📦 WEB SCRAPING (Hauptquelle) │ +│ └─→ ExpiredDomains.net (325 Auktionen) ✅ │ +│ └─→ GoDaddy RSS Feed (10 Auktionen) ✅ │ +│ └─→ Sedo Public (7 Auktionen) ✅ │ +│ └─→ NameJet Public (6 Auktionen) ✅ │ +│ └─→ DropCatch Public (7 Auktionen) ✅ │ +│ │ +│ 🔌 OFFIZIELLE APIs (Konfiguriert) │ +│ └─→ DropCatch Partner API ⚠️ (Nur eigene Aktivitäten) │ +│ └─→ Sedo Partner API ⏳ (Credentials fehlen) │ +│ │ +│ 💎 POUNCE DIRECT (User-Listings) │ +│ └─→ DNS-verifizierte Verkaufsangebote ❌ (0 Listings) │ +│ │ +│ 🔮 ZONE FILES (Phase 3 — Zukunft) │ +│ └─→ Verisign .com/.net 🔜 │ +│ └─→ PIR .org 🔜 │ +│ │ +│ ───────────────────────────────────────────────────────────── │ +│ TOTAL: 355 Domains im Feed | 0 Pounce Direct │ │ │ └─────────────────────────────────────────────────────────────────┘ ``` -### Probleme mit dem aktuellen Setup - -| Problem | Impact | Severity | -|---------|--------|----------| -| **Web-Scraping ist fragil** | Seiten ändern Layout → Scraper bricht | 🔴 Hoch | -| **Daten sind oft veraltet** | End-Zeiten stimmen nicht, Preise falsch | 🔴 Hoch | -| **Kein "Pounce Direct" Content** | Alles nur externe Daten, kein USP | 🔴 Hoch | -| **Rate-Limiting & Blocking** | Plattformen blockieren Scraper | 🟡 Mittel | -| **Keine echte Echtzeit-Daten** | Stündliches Scraping ist zu langsam | 🟡 Mittel | -| **Pounce Score ist simpel** | Nur Length + TLD, keine echten Signale | 🟡 Mittel | - --- -## 🚀 SOLL-Konzept: Die Unicorn-Architektur +## 📊 TEIL 1: Bestandsaufnahme — Was haben wir? -### Phase 1: Der "Clean Feed" (Jetzt – 3 Monate) +### A. Backend-Komponenten ✅ -**Ziel:** Die beste Auktions-Übersicht mit echtem Mehrwert. +| Komponente | Status | Beschreibung | +|------------|--------|--------------| +| **Unified Feed API** `/auctions/feed` | ✅ Live | Kombiniert Pounce Direct + External | +| **Pounce Score v2.0** | ✅ Live | Length, TLD, Bids, Time Pressure | +| **Vanity Filter** | ✅ Live | Premium-Domains für Public Users | +| **Auction Scraper** | ✅ Läuft | 5 Plattformen, Scheduler aktiv | +| **Listings API** | ✅ Fertig | DNS-Verifizierung, Inquiry-System | +| **Sniper Alerts** | ✅ Fertig | Keyword-Matching, Notifications | -#### 1.1 Daten-Strategie: Hybrid-Ansatz +### B. Frontend-Komponenten ✅ +| Seite | Status | Beschreibung | +|-------|--------|--------------| +| `/terminal/market` | ✅ Live | Vollständiger Market Feed für Auth Users | +| `/auctions` | ✅ Live | Public Market mit Vanity Filter | +| `/buy` | ✅ Live | Pounce Direct Marketplace Browse | +| `/buy/[slug]` | ✅ Live | Listing-Detailseite | +| `/terminal/listing` | ✅ Live | Seller Dashboard | + +### C. Datenquellen — Realitätscheck + +#### Offizielle APIs — Die Ernüchterung + +**DropCatch API:** ``` -┌─────────────────────────────────────────────────────────────────┐ -│ NEW DATA ARCHITECTURE │ -├─────────────────────────────────────────────────────────────────┤ -│ │ -│ TIER 1: OFFIZIELLE APIs (zuverlässig, real-time) │ -│ ──────────────────────────────────────────────────────────── │ -│ • GoDaddy Partner API (wenn Partner-Account vorhanden) │ -│ • Sedo Partner API (Affiliate-Programm) │ -│ • DropCatch Public API │ -│ │ -│ TIER 2: WEB SCRAPING (Backup, validiert) │ -│ ──────────────────────────────────────────────────────────── │ -│ • ExpiredDomains.net (Deleted Domains) │ -│ • NameJet Public (mit Fallback-Logik) │ -│ │ -│ TIER 3: POUNCE EXCLUSIVE (unser USP!) │ -│ ──────────────────────────────────────────────────────────── │ -│ • User-Listings ("Pounce Direct" / "For Sale") │ -│ • DNS-verifizierte Eigentümer │ -│ • Sofort-Kauf-Option │ -│ │ -└─────────────────────────────────────────────────────────────────┘ +Status: ✅ Authentifiziert +Problem: Zeigt nur EIGENE Aktivitäten (Bids, Backorders) + NICHT das öffentliche Auktionsinventar +Nutzen: User-Integration (verbinde dein DropCatch-Konto) ``` -#### 1.2 Der "Clean Feed" Algorithmus +**Sedo API:** +``` +Status: ⏳ Client bereit, Credentials fehlen +Wo finden: Sedo.com → Mein Sedo → API-Zugang +Benötigt: Partner ID + SignKey +``` + +#### Web Scraping — Unsere Hauptquelle ```python -# Spam-Filter v2.0 (Vanity Filter) +# Aktuelle Scraper-Architektur +TIER_1_APIS = [ + ("DropCatch", _fetch_dropcatch_api), # Für eigene Aktivitäten + ("Sedo", _fetch_sedo_api), # Wenn konfiguriert +] + +TIER_2_SCRAPING = [ + ("ExpiredDomains", _scrape_expireddomains), # 325 Domains + ("GoDaddy", _scrape_godaddy_rss), # 10 Domains + ("Sedo", _scrape_sedo_public), # 7 Domains (Fallback) + ("NameJet", _scrape_namejet_public), # 6 Domains + ("DropCatch", _scrape_dropcatch_public), # 7 Domains (Fallback) +] +``` + +--- + +## 🎯 TEIL 2: Das Konzept — Die 3 Säulen des Market + +### Säule 1: AUKTIONEN (Externe Plattformen) + +> *"Zeige alle relevanten Auktionen von GoDaddy, Sedo, NameJet, etc."* + +**Datenquellen:** +- Web Scraping (primär) +- Partner APIs (wenn verfügbar) + +**Filter-Strategie:** +```python +# Vanity Filter für Public Users (aus pounce_features.md) def is_premium_domain(domain: str) -> bool: - name = domain.rsplit('.', 1)[0] - tld = domain.rsplit('.', 1)[1] + name, tld = domain.rsplit('.', 1) - # REGEL 1: Nur Premium-TLDs für Public - premium_tlds = ['com', 'io', 'ai', 'co', 'de', 'ch', 'net', 'org', 'app', 'dev'] - if tld not in premium_tlds: + # Premium TLDs only + if tld not in ['com', 'io', 'ai', 'co', 'ch', 'de', 'net', 'org', 'app', 'dev']: return False - # REGEL 2: Keine Spam-Muster - if len(name) > 12: # Kurz = Premium - return False - if name.count('-') > 0: # Keine Bindestriche - return False - if sum(c.isdigit() for c in name) > 1: # Max 1 Zahl - return False - if any(word in name.lower() for word in ['xxx', 'casino', 'loan', 'cheap']): - return False - - # REGEL 3: Konsonanten-Check (kein "xkqzfgh.com") - consonants = 'bcdfghjklmnpqrstvwxyz' - max_consonant_streak = max(len(list(g)) for k, g in groupby(name, key=lambda c: c.lower() in consonants) if k) - if max_consonant_streak > 4: - return False + # Keine Spam-Muster + if len(name) > 12: return False + if '-' in name: return False + if sum(c.isdigit() for c in name) > 1: return False return True ``` -#### 1.3 Pounce Score 2.0 - -Der aktuelle Score ist zu simpel. Hier ist die verbesserte Version: - -```python -def calculate_pounce_score_v2(domain: str, auction_data: dict) -> int: - score = 50 # Baseline - name = domain.rsplit('.', 1)[0] - tld = domain.rsplit('.', 1)[1] - - # ══════════════════════════════════════════════════════════════ - # A) INTRINSIC VALUE (Domain selbst) - # ══════════════════════════════════════════════════════════════ - - # Länge (kurz = wertvoll) - length_scores = {1: 50, 2: 45, 3: 40, 4: 30, 5: 20, 6: 15, 7: 10} - score += length_scores.get(len(name), max(0, 15 - len(name))) - - # TLD Premium - tld_scores = {'com': 20, 'ai': 25, 'io': 18, 'co': 12, 'de': 10, 'ch': 10} - score += tld_scores.get(tld, 0) - - # Dictionary Word Bonus - common_words = ['tech', 'data', 'cloud', 'app', 'dev', 'net', 'hub', 'lab', 'pro'] - if name.lower() in common_words or any(word in name.lower() for word in common_words): - score += 15 - - # ══════════════════════════════════════════════════════════════ - # B) MARKET SIGNALS (Aktivität) - # ══════════════════════════════════════════════════════════════ - - # Bid Activity (mehr Bids = mehr Interesse) - bids = auction_data.get('num_bids', 0) - if bids >= 20: score += 15 - elif bids >= 10: score += 10 - elif bids >= 5: score += 5 - - # Time Pressure (endet bald = Opportunity) - hours_left = auction_data.get('hours_left', 999) - if hours_left < 1: score += 10 # HOT! - elif hours_left < 4: score += 5 - - # Price-to-Value Ratio - current_bid = auction_data.get('current_bid', 0) - estimated_value = estimate_base_value(name, tld) - if current_bid > 0 and estimated_value > current_bid * 1.5: - score += 15 # Unterbewertet! - - # ══════════════════════════════════════════════════════════════ - # C) PENALTIES (Abzüge) - # ══════════════════════════════════════════════════════════════ - - if '-' in name: score -= 30 - if any(c.isdigit() for c in name) and len(name) > 3: score -= 20 - if len(name) > 15: score -= 25 - - return max(0, min(100, score)) -``` +**UI-Darstellung:** +| Domain | Source | Price | Status | Action | +|--------|--------|-------|--------|--------| +| **crypto-bank.io** | 🏢 GoDaddy | $2,500 | ⏱️ 2h left | [Bid ↗] | +| **meta-shop.com** | 🏢 Sedo | $5,000 | 🤝 Offer | [View ↗] | --- -### Phase 2: Der "Pounce Direct" Marktplatz (3 – 6 Monate) +### Säule 2: POUNCE DIRECT (User-Listings) -**Ziel:** Eigenes Inventar = Unique Content = USP - -#### 2.1 Das Killer-Feature: "Pounce Direct" +> *"Das sind die Domains, die es NUR bei Pounce gibt. Unser USP."* +**Das Konzept (aus pounce_terminal.md):** ``` ┌─────────────────────────────────────────────────────────────────┐ -│ POUNCE DIRECT INTEGRATION │ -├─────────────────────────────────────────────────────────────────┤ -│ │ -│ MARKET FEED (Gemischt) │ -│ ═══════════════════════════════════════════════════════════ │ -│ │ -│ ┌───────────────────────────────────────────────────────────┐ │ -│ │ 💎 POUNCE DIRECT │ │ -│ │ ───────────────────────────────────────────────────────── │ │ -│ │ zurich-immo.ch $950 ⚡ INSTANT [BUY] │ │ -│ │ ✅ Verified Owner │ │ -│ └───────────────────────────────────────────────────────────┘ │ -│ │ -│ ┌───────────────────────────────────────────────────────────┐ │ -│ │ 🏢 EXTERNAL AUCTION │ │ -│ │ ───────────────────────────────────────────────────────── │ │ -│ │ techflow.io $250 ⏱️ 6h left [BID ↗] │ │ -│ │ via GoDaddy │ │ -│ └───────────────────────────────────────────────────────────┘ │ -│ │ -└─────────────────────────────────────────────────────────────────┘ -``` - -#### 2.2 Warum das genial ist - -| Vorteil | Erklärung | -|---------|-----------| -| **Unique Content** | Domains, die es NUR bei Pounce gibt | -| **Höhere Conversion** | "Instant Buy" statt "Bid on external site" | -| **Vendor Lock-in** | Verkäufer listen bei uns (weil 0% Provision) | -| **SEO Power** | Jede Listing = eigene Landing Page | -| **Trust Signal** | DNS-Verifizierung = Qualitätsgarantie | - -#### 2.3 Der Flow für Verkäufer (aus `pounce_terminal.md`) - -``` -┌─────────────────────────────────────────────────────────────────┐ -│ LISTING WIZARD │ +│ POUNCE DIRECT — Der Listing-Wizard │ ├─────────────────────────────────────────────────────────────────┤ │ │ │ STEP 1: DOMAIN EINGEBEN │ │ ─────────────────────────────────────────────────────────── │ -│ [________________________] zurich-immo.ch │ -│ Preis: [$950] ○ Fixpreis ○ Verhandlungsbasis │ +│ Domain: [zurich-immo.ch___________] │ +│ Preis: [$950_______] ○ Fixpreis ● Verhandlungsbasis │ │ │ -│ STEP 2: DNS VERIFICATION │ +│ STEP 2: DNS VERIFICATION (Trust-Check) │ │ ─────────────────────────────────────────────────────────── │ -│ Füge diesen TXT-Record zu deiner Domain hinzu: │ +│ Füge diesen TXT-Record bei deinem Registrar hinzu: │ │ │ -│ Name: _pounce-verify │ +│ Name: _pounce-verify │ │ Value: pounce-verify-8a3f7b9c2e1d │ │ │ │ [🔄 VERIFY DNS] │ @@ -449,967 +175,335 @@ def calculate_pounce_score_v2(domain: str, auction_data: dict) -> int: │ STEP 3: LIVE! │ │ ─────────────────────────────────────────────────────────── │ │ ✅ Domain verifiziert! │ -│ Dein Listing ist jetzt im Market Feed sichtbar. │ +│ Dein Listing erscheint jetzt im Market Feed. │ +│ │ +└─────────────────────────────────────────────────────────────────┘ +``` + +**Warum das genial ist:** + +| Vorteil | Erklärung | +|---------|-----------| +| **Unique Content** | Domains, die es NUR bei Pounce gibt | +| **Höhere Conversion** | "Instant Buy" statt "Bid on external site" | +| **Vendor Lock-in** | Verkäufer listen bei uns (0% Provision!) | +| **SEO Power** | Jede Listing = eigene Landing Page | +| **Trust Signal** | DNS-Verifizierung = Qualitätsgarantie | + +**UI-Darstellung:** +| Domain | Source | Price | Status | Action | +|--------|--------|-------|--------|--------| +| **zurich-immo.ch** | 💎 **Pounce** | **$950** | ⚡ **Instant** | **[Buy Now]** | + +--- + +### Säule 3: DROPS (Domains die bald frei werden) + +> *"Zeige Domains BEVOR sie in Auktionen landen."* + +**Phase 1 (Jetzt): Deleted Domains via Scraping** +``` +ExpiredDomains.net → Deleted Domains Liste → Pounce Filter → Feed +``` + +**Phase 3 (Zukunft): Zone File Analysis** +``` +┌─────────────────────────────────────────────────────────────────┐ +│ ZONE FILE PIPELINE — Die Unicorn-Strategie │ +├─────────────────────────────────────────────────────────────────┤ +│ │ +│ 1. DAILY DOWNLOAD (4:00 UTC) │ +│ └─→ Zone Files von Verisign, PIR, etc. │ +│ │ +│ 2. DIFF ANALYSIS │ +│ └─→ Was war gestern da, ist heute weg? │ +│ └─→ Diese Domains DROPPEN in 1-5 Tagen! │ +│ │ +│ 3. POUNCE ALGORITHM │ +│ └─→ Nur Premium-Domains durchlassen (Score > 70) │ +│ │ +│ 4. OUTPUT: "Drops Tomorrow" (Tycoon Exclusive) │ +│ └─→ Domains BEVOR sie in Auktionen erscheinen │ │ │ └─────────────────────────────────────────────────────────────────┘ ``` --- -### Phase 3: Die Daten-Hoheit (6 – 12 Monate) 🏆 +## 🔧 TEIL 3: Technische Architektur -**Ziel:** Unabhängigkeit von externen Quellen. **EIGENE DATEN = EIGENES MONOPOL.** - -> *"Pounce weiß Dinge, die GoDaddy dir verheimlicht."* — pounce_strategy.md - -#### 3.1 Zone File Analysis — Der Unicorn-Treiber - -**Was sind Zone Files?** -Zone Files sind die "Master-Listen" aller registrierten Domains pro TLD. Sie werden täglich von den Registries (Verisign, PIR, etc.) aktualisiert. - -**Wer hat Zugang?** -- Jeder kann sich bei ICANN-akkreditierten Registries bewerben -- Verisign (.com/.net): https://www.verisign.com/en_US/channel-resources/domain-registry-products/zone-file/index.xhtml -- PIR (.org): Zone File Access Program -- Donuts (.xyz, .online, etc.): TLD Zone File Access - -**Kosten:** $0 - $10,000/Jahr je nach TLD und Nutzung - -``` -┌─────────────────────────────────────────────────────────────────┐ -│ ZONE FILE PIPELINE — Die Daten-Revolution │ -├─────────────────────────────────────────────────────────────────┤ -│ │ -│ ┌─────────────────────────────────────────────────────────┐ │ -│ │ TIER 1: CRITICAL TLDs (Sofort beantragen) │ │ -│ ├─────────────────────────────────────────────────────────┤ │ -│ │ Verisign → .com, .net ~160M + 13M Domains │ │ -│ │ PIR → .org ~10M Domains │ │ -│ │ Afilias → .info ~4M Domains │ │ -│ └─────────────────────────────────────────────────────────┘ │ -│ │ │ -│ ▼ │ -│ ┌─────────────────────────────────────────────────────────┐ │ -│ │ TIER 2: PREMIUM TLDs (Phase 2) │ │ -│ ├─────────────────────────────────────────────────────────┤ │ -│ │ CentralNIC → .io, .co Premium für Startups │ │ -│ │ Google → .app, .dev Tech-Domains │ │ -│ │ Donuts → .xyz, .online Volumen │ │ -│ │ SWITCH → .ch Schweizer Markt │ │ -│ └─────────────────────────────────────────────────────────┘ │ -│ │ │ -│ ▼ │ -│ ┌─────────────────────────────────────────────────────────┐ │ -│ │ POUNCE INTELLIGENCE ENGINE │ │ -│ ├─────────────────────────────────────────────────────────┤ │ -│ │ │ │ -│ │ 1. DAILY DOWNLOAD (4:00 UTC) │ │ -│ │ └─→ ~500GB komprimierte Daten pro Tag │ │ -│ │ │ │ -│ │ 2. DIFF ANALYSIS │ │ -│ │ └─→ Was ist NEU? Was ist WEG? │ │ -│ │ │ │ -│ │ 3. DROP PREDICTION │ │ -│ │ └─→ Domains die aus Zone verschwinden = droppen │ │ -│ │ │ │ -│ │ 4. QUALITY SCORING (Pounce Algorithm) │ │ -│ │ └─→ Nur Premium-Domains durchlassen │ │ -│ │ │ │ -│ └─────────────────────────────────────────────────────────┘ │ -│ │ │ -│ ▼ │ -│ ┌─────────────────────────────────────────────────────────┐ │ -│ │ OUTPUT: EXKLUSIVE INTELLIGENCE │ │ -│ ├─────────────────────────────────────────────────────────┤ │ -│ │ │ │ -│ │ 🔮 "Drops Tomorrow" — Domains BEVOR sie in Auktionen │ │ -│ │ 📈 "Trending Registrations" — Was wird gerade gehypt │ │ -│ │ ⚠️ "Expiring Premium" — Hochwertige Domains am Ende │ │ -│ │ 🔍 "Pattern Detection" — Welche Keywords explodieren │ │ -│ │ │ │ -│ └─────────────────────────────────────────────────────────┘ │ -│ │ -└─────────────────────────────────────────────────────────────────┘ -``` - -#### 3.2 Der Pounce Algorithm — "No-Bullshit" Filter +### Der Unified Feed API Endpoint ```python -# backend/app/services/zone_analyzer.py (NEU ZU BAUEN) +# backend/app/api/auctions.py -class ZoneFileAnalyzer: - """ - Analysiert Zone Files und findet Premium-Opportunities. - - Input: Raw Zone File (Millionen von Domains) - Output: Gefilterte Premium-Liste (Hunderte) - """ - - async def analyze_drops(self, yesterday: set, today: set) -> list: - """ - Findet Domains die aus der Zone verschwunden sind. - Diese Domains droppen in 1-5 Tagen (Redemption Period). - """ - dropped = yesterday - today # Set-Differenz - - premium_drops = [] - for domain in dropped: - score = self.calculate_pounce_score(domain) - - # Nur Premium durchlassen - if score >= 70: - premium_drops.append({ - "domain": domain, - "score": score, - "drop_date": self.estimate_drop_date(domain), - "estimated_value": self.estimate_value(domain), - }) - - return sorted(premium_drops, key=lambda x: x['score'], reverse=True) - - def calculate_pounce_score(self, domain: str) -> int: - """ - Der Pounce Algorithm — Qualitätsfilter für Domains. - - Faktoren: - - Länge (kurz = wertvoll) - - TLD (com > io > xyz) - - Keine Zahlen/Bindestriche - - Dictionary Word Bonus - - Historische Daten (wenn verfügbar) - """ - name = domain.rsplit('.', 1)[0] - tld = domain.rsplit('.', 1)[1] - score = 50 # Baseline - - # Längen-Score - length_scores = {1: 50, 2: 45, 3: 40, 4: 30, 5: 20, 6: 15, 7: 10} - score += length_scores.get(len(name), max(0, 15 - len(name))) - - # TLD Premium - tld_scores = {'com': 20, 'ai': 25, 'io': 18, 'co': 12, 'ch': 15, 'de': 10} - score += tld_scores.get(tld, 0) - - # Penalties - if '-' in name: score -= 30 - if any(c.isdigit() for c in name): score -= 20 - if len(name) > 12: score -= 15 - - # Dictionary Word Bonus - if self.is_dictionary_word(name): - score += 25 - - return max(0, min(100, score)) -``` - -#### 3.3 Der "Drops Tomorrow" Feed — Tycoon Exclusive - -``` -┌─────────────────────────────────────────────────────────────────┐ -│ 🔮 DROPS TOMORROW — Tycoon Exclusive ($29/mo) │ -├─────────────────────────────────────────────────────────────────┤ -│ │ -│ Diese Domains sind NICHT in Auktionen! │ -│ Du kannst sie beim Registrar direkt registrieren. │ -│ │ -│ ───────────────────────────────────────────────────────────── │ -│ │ -│ Domain TLD Score Est. Value Drops In │ -│ ───────────────────────────────────────────────────────────── │ -│ pixel.com .com 95 $50,000 23h 45m │ -│ swift.io .io 88 $8,000 23h 12m │ -│ quantum.ai .ai 92 $25,000 22h 58m │ -│ nexus.dev .dev 84 $4,500 22h 30m │ -│ fusion.co .co 81 $3,200 21h 15m │ -│ │ -│ ───────────────────────────────────────────────────────────── │ -│ │ -│ 💡 Pro Tip: Setze bei deinem Registrar einen Backorder │ -│ für diese Domains. Wer zuerst kommt... │ -│ │ -│ [🔔 Alert für "pixel.com" setzen] │ -│ │ -└─────────────────────────────────────────────────────────────────┘ -``` - -#### 3.4 Warum das ein MONOPOL schafft - -| Wettbewerber | Datenquelle | Problem | -|--------------|-------------|---------| -| **ExpiredDomains.net** | Zone Files | Zeigt ALLES (Spam-Hölle) | -| **GoDaddy Auctions** | Eigene Daten | Nur GoDaddy-Domains | -| **Sedo** | User-Listings | Überteuert, wenig Volumen | -| **Pounce** | Zone Files + **Algorithmus** | **Premium-gefiltert, clean** | - -**Der Unterschied:** -- ExpiredDomains zeigt dir 100.000 Domains am Tag. Davon sind 99.990 Müll. -- Pounce zeigt dir 100 Premium-Domains. Alle sind es wert, angeschaut zu werden. - -**Das verkauft Abos:** -> *"Ich zahle $29/Monat, weil Pounce mir 20 Stunden Recherche pro Woche spart."* - -#### 3.5 Technische Umsetzung — Server-Anforderungen - -``` -┌─────────────────────────────────────────────────────────────────┐ -│ ZONE FILE PROCESSING — Infrastructure │ -├─────────────────────────────────────────────────────────────────┤ -│ │ -│ SERVER REQUIREMENTS: │ -│ ──────────────────────────────────────────────────────────── │ -│ • Storage: 2TB SSD (Zone Files sind ~500GB/Tag komprimiert) │ -│ • RAM: 64GB+ (für effizientes Set-Diffing) │ -│ • CPU: 16+ Cores (parallele Analyse) │ -│ • Kosten: ~$300-500/Monat (Hetzner/OVH Dedicated) │ -│ │ -│ PROCESSING PIPELINE: │ -│ ──────────────────────────────────────────────────────────── │ -│ 04:00 UTC │ Zone File Download (FTP/HTTPS) │ -│ 04:30 UTC │ Decompression & Parsing │ -│ 05:00 UTC │ Diff Analysis (gestern vs heute) │ -│ 05:30 UTC │ Quality Scoring (Pounce Algorithm) │ -│ 06:00 UTC │ Database Update (PostgreSQL) │ -│ 06:15 UTC │ Alert Matching (Sniper Alerts) │ -│ 06:30 UTC │ User Notifications (Email/SMS) │ -│ │ -│ STORAGE STRATEGY: │ -│ ──────────────────────────────────────────────────────────── │ -│ • Nur Premium-Domains speichern (Score > 50) │ -│ • 90 Tage History für Trend-Analyse │ -│ • Ältere Daten archivieren (S3 Glacier) │ -│ │ -└─────────────────────────────────────────────────────────────────┘ -``` - -#### 3.6 Phase 1 vs Phase 3 — Was zuerst? - -| Phase | Datenquelle | Status | -|-------|-------------|--------| -| **Phase 1 (JETZT)** | Web Scraping + Pounce Direct | ✅ Implementiert | -| **Phase 3 (6-12 Mo)** | Zone Files | 🔜 Geplant | - -**Warum warten?** -1. Zone File Access braucht Verträge mit Registries (1-3 Monate) -2. Infrastruktur-Investition (~$500/Monat Server) -3. Algorithmus muss getestet werden (False Positives vermeiden) - -**Was wir JETZT tun:** -- Scraping + Pounce Direct perfektionieren -- User-Basis aufbauen (die Zone Files später monetarisiert) -- Algorithmus entwickeln (funktioniert auch ohne Zone Files) - ---- - -## 💡 Konkrete Änderungen für die Market Page - -### Frontend-Änderungen - -#### 1. Visuelle Hierarchie verbessern - -```tsx -// VORHER: Alle Items sehen gleich aus -
- {items.map(item => )} -
- -// NACHHER: Pounce Direct hervorheben -
- {/* Featured: Pounce Direct (wenn vorhanden) */} - {pounceDirectItems.length > 0 && ( -
-
- - Pounce Direct — Verified Instant Buy -
- {pounceDirectItems.map(item => )} -
- )} - - {/* Standard: External Auctions */} -
- {externalItems.map(item => )} -
-
-``` - -#### 2. Filter-Presets für User-Journeys - -```tsx -// Quick-Filter Buttons basierend auf User-Intent -const FILTER_PRESETS = { - 'ending-soon': { - label: '⏱️ Ending Soon', - filter: { hours_left: { max: 4 } }, - sort: 'time_asc' - }, - 'bargains': { - label: '💰 Under $100', - filter: { price: { max: 100 }, score: { min: 60 } }, - sort: 'score_desc' - }, - 'premium': { - label: '👑 Premium Only', - filter: { score: { min: 80 }, tld: ['com', 'io', 'ai'] }, - sort: 'price_desc' - }, - 'pounce-only': { - label: '💎 Pounce Direct', - filter: { source: 'pounce' }, - sort: 'created_desc' - } -} -``` - -#### 3. "Opportunity Score" statt nur "Pounce Score" - -```tsx -// Zeige WARUM ein Domain interessant ist -function OpportunityIndicators({ item }) { - const indicators = [] - - if (item.hoursLeft < 2) indicators.push({ icon: '🔥', label: 'Ending soon' }) - if (item.numBids < 3) indicators.push({ icon: '📉', label: 'Low competition' }) - if (item.valueRatio > 2) indicators.push({ icon: '💎', label: 'Undervalued' }) - if (item.isPounce) indicators.push({ icon: '⚡', label: 'Instant buy' }) - - return ( -
- {indicators.map(ind => ( - - {ind.icon} - - ))} -
- ) -} -``` - -### Backend-Änderungen - -#### 1. Unified Feed API - -```python -# NEUER ENDPOINT: /api/v1/market/feed @router.get("/feed") async def get_market_feed( - # Filter - source: Optional[str] = Query(None, enum=['all', 'pounce', 'external']), - score_min: int = Query(0, ge=0, le=100), - price_max: Optional[float] = None, - tld: Optional[List[str]] = Query(None), - ending_within: Optional[int] = Query(None, description="Hours"), - - # Sort - sort_by: str = Query('score', enum=['score', 'price', 'time', 'bids']), - - # Pagination - limit: int = Query(30, le=100), + source: str = Query("all", enum=["all", "pounce", "external"]), + keyword: Optional[str] = None, + tld: Optional[str] = None, + min_price: Optional[float] = None, + max_price: Optional[float] = None, + min_score: int = Query(0, ge=0, le=100), + ending_within: Optional[int] = None, # Stunden + verified_only: bool = False, + sort_by: str = Query("score", enum=["score", "price_asc", "price_desc", "time", "newest"]), + limit: int = Query(50, le=200), offset: int = Query(0), - - # Auth current_user: Optional[User] = Depends(get_current_user_optional), ): """ - Unified market feed combining: - - Pounce Direct listings (user-listed domains) - - External auctions (scraped from platforms) + 🚀 UNIFIED MARKET FEED — Das Herz von Pounce - For non-authenticated users: - - Apply vanity filter (premium domains only) - - Blur "Deal Score" (tease upgrade) + Kombiniert: + - 💎 Pounce Direct: DNS-verifizierte User-Listings (Instant Buy) + - 🏢 External Auctions: Scraped von GoDaddy, Sedo, etc. + - 🔮 Drops: Domains die bald frei werden (Phase 3) + + Für nicht-authentifizierte User: + - Vanity Filter aktiv (nur Premium-Domains) + - Pounce Score sichtbar, aber limited Details + + Für authentifizierte User (Trader/Tycoon): + - Vollzugriff auf alle Domains + - Advanced Filtering + - Valuation Data """ - - items = [] - - # 1. Get Pounce Direct listings - pounce_listings = await get_published_listings(db) - for listing in pounce_listings: - items.append({ - 'type': 'pounce_direct', - 'domain': listing.domain, - 'price': listing.asking_price, - 'source': 'Pounce', - 'status': 'instant', - 'verified': listing.verification_status == 'verified', - 'url': f'/buy/{listing.slug}', # Internal! - }) - - # 2. Get external auctions - auctions = await get_active_auctions(db) - for auction in auctions: - # Apply vanity filter for non-auth users - if not current_user and not is_premium_domain(auction.domain): - continue - - items.append({ - 'type': 'auction', - 'domain': auction.domain, - 'price': auction.current_bid, - 'source': auction.platform, - 'status': 'auction', - 'time_left': format_time_remaining(auction.end_time), - 'url': auction.affiliate_url, # External - }) - - # 3. Calculate scores - for item in items: - item['pounce_score'] = calculate_pounce_score_v2( - item['domain'], - item - ) - - # 4. Sort and paginate - items = sorted(items, key=lambda x: x['pounce_score'], reverse=True) - - return { - 'items': items[offset:offset+limit], - 'total': len(items), - 'filters_applied': {...}, - } ``` -#### 2. Scraper Verbesserungen +### Pounce Score v2.0 ```python -class AuctionScraperService: - """ - IMPROVED: Resilient scraping with fallbacks +def calculate_pounce_score_v2(domain: str, auction_data: dict) -> int: """ + Der Pounce Score — Qualitäts- und Opportunity-Bewertung - async def scrape_with_fallback(self, platform: str, db: AsyncSession): - """Try multiple methods to get data""" - - methods = [ - (f'_scrape_{platform.lower()}_api', 'API'), # Best: Official API - (f'_scrape_{platform.lower()}_rss', 'RSS'), # Good: RSS Feed - (f'_scrape_{platform.lower()}_html', 'HTML'), # Fallback: HTML Scrape - ] - - for method_name, method_type in methods: - method = getattr(self, method_name, None) - if not method: - continue - - try: - result = await method(db) - if result['found'] > 0: - logger.info(f"{platform}: Got {result['found']} via {method_type}") - return result - except Exception as e: - logger.warning(f"{platform} {method_type} failed: {e}") - continue - - # All methods failed - logger.error(f"{platform}: All scrape methods failed") - return {'found': 0, 'new': 0, 'updated': 0, 'error': 'All methods failed'} + A) INTRINSIC VALUE (Domain selbst) + - Länge (kurz = wertvoll) + - TLD Premium (com > io > xyz) + - Dictionary Word Bonus + + B) MARKET SIGNALS (Aktivität) + - Bid Activity (mehr Bids = mehr Interesse) + - Time Pressure (endet bald = Opportunity) + - Price-to-Value Ratio (unterbewertet = 🔥) + + C) PENALTIES + - Bindestriche (-30) + - Zahlen wenn >3 Zeichen (-20) + - Zu lang >15 Zeichen (-25) + """ + score = 50 # Baseline + name = domain.rsplit('.', 1)[0] + tld = domain.rsplit('.', 1)[1] + + # Länge + if len(name) <= 3: score += 30 + elif len(name) == 4: score += 25 + elif len(name) == 5: score += 20 + elif len(name) <= 7: score += 10 + + # TLD + tld_scores = {'com': 20, 'ai': 25, 'io': 18, 'co': 12, 'ch': 15} + score += tld_scores.get(tld, 0) + + # Market Signals + bids = auction_data.get('num_bids', 0) + if bids >= 20: score += 15 + elif bids >= 10: score += 10 + elif bids >= 5: score += 5 + + # Penalties + if '-' in name: score -= 30 + if any(c.isdigit() for c in name) and len(name) > 3: score -= 20 + + return max(0, min(100, score)) ``` --- -## 📈 Metriken für den Erfolg +## 📈 TEIL 4: Roadmap -### KPIs für Phase 1 +### ✅ ERLEDIGT (Stand: 11. Dezember 2025) -| Metrik | Ziel (3 Monate) | Messung | -|--------|-----------------|---------| -| **Daily Active Users (DAU)** | 500 | PostHog | -| **Conversion Rate (Free → Trader)** | 5% | Stripe | -| **Domains in Feed** | 1000+ | DB Query | -| **Avg. Session Duration** | > 3 min | PostHog | -| **Scrape Success Rate** | > 95% | Logs | +- [x] Unified Feed API `/auctions/feed` +- [x] Pounce Score v2.0 mit Market Signals +- [x] Vanity Filter für Public Users +- [x] Pounce Direct Listing-System (DNS-Verifizierung) +- [x] Sniper Alerts mit Keyword-Matching +- [x] Web Scraping für 5 Plattformen +- [x] DropCatch API Client (für User-Integration) +- [x] Sedo API Client (bereit für Credentials) -### KPIs für Phase 2 +### 🎯 NÄCHSTE SCHRITTE (Diese Woche) -| Metrik | Ziel (6 Monate) | Messung | -|--------|-----------------|---------| -| **Pounce Direct Listings** | 100+ | DB Query | -| **First Sale via Pounce** | ✅ | Manual | -| **GMV (Gross Merchandise Value)** | $50,000 | Tracked | -| **Repeat Sellers** | 20% | DB Query | +1. **Sedo API Credentials eingeben** + - Sedo.com → Mein Sedo → API-Zugang + - Partner ID + SignKey in `.env` + +2. **Erste Pounce Direct Listings erstellen** + - Test-Domains zum Verifizieren des Flows + - Zeigt "Unique Content" im Feed + +3. **Scraper-Stabilität verbessern** + - Fallback-Logik testen + - Error-Handling optimieren + +### 🔮 PHASE 3 (6-12 Monate) + +1. **Zone File Access beantragen** + - Verisign (.com/.net) + - PIR (.org) + - Kosten: $0-$10,000/Jahr + +2. **"Drops Tomorrow" Feature** + - Zone File Diff-Analyse + - Tycoon Exclusive ($29/mo) + +3. **Pounce Instant Exchange** + - Integrierter Escrow-Service + - 5% Gebühr (statt 15-20% bei Konkurrenz) --- -## 🛠️ Technische Schulden abbauen +## 🎨 TEIL 5: UI/UX Design -### Priorität 1: Scraper Stabilität +### Die Master-Tabelle (aus pounce_terminal.md) -```python -# Problem: Scraper bricht bei HTML-Änderungen +| Spalte | Inhalt | Visualisierung | +|--------|--------|----------------| +| **Domain** | Name der Domain | Fettgedruckt. Bei "Pounce Direct" → 💎 Icon | +| **Pounce Score** | Qualitäts-Algorithmus | 0-100 (Grün > 80, Gelb 50-80, Rot < 50) | +| **Price / Bid** | Preis oder aktuelles Gebot | `$500` oder `$50 (Bid)` | +| **Status / Time** | Countdown oder Verfügbarkeit | ⏱️ `4h left` oder ⚡ `Instant` | +| **Source** | Herkunft | 🏢 GoDaddy, 💎 Pounce | +| **Action** | Der Button | `[Bid ↗]` oder `[Buy Now]` | -# Lösung: Defensive Parsing mit Fallbacks -def parse_domain_from_row(row) -> Optional[str]: - """Try multiple selectors to find domain""" - selectors = [ - 'a.domain-name', - 'td.domain a', - 'span[data-domain]', - 'a[href*="domain"]', - ] - - for selector in selectors: - elem = row.select_one(selector) - if elem: - text = elem.get_text(strip=True) - if '.' in text and len(text) < 100: - return text.lower() - - return None +### Filter Bar + +``` +[Toggle] Hide Spam (Standard: AN) +[Toggle] Pounce Direct Only +[Dropdown] TLD: .com, .ai, .io, .ch +[Dropdown] Price: < $100, < $1k, High Roller +[Dropdown] Ending: 1h, 4h, 24h, 7d ``` -### Priorität 2: Caching Layer +### Visuelle Hierarchie -```python -# Problem: Jeder Request macht DB-Abfragen +```tsx +// Pounce Direct Items werden prominent angezeigt +{pounceDirectItems.length > 0 && ( +
+
+ + Pounce Exclusive — Verified Instant Buy +
+ {pounceDirectItems.map(item => )} +
+)} -# Lösung: Redis Cache für Feed-Daten -from redis import asyncio as aioredis - -async def get_market_feed_cached(filters: dict) -> list: - cache_key = f"market:feed:{hash(str(filters))}" - - # Try cache first - cached = await redis.get(cache_key) - if cached: - return json.loads(cached) - - # Generate fresh data - data = await generate_market_feed(filters) - - # Cache for 5 minutes - await redis.setex(cache_key, 300, json.dumps(data)) - - return data -``` - -### Priorität 3: Rate Limiting pro User - -```python -# Problem: Power User könnten API überlasten - -# Lösung: Tiered Rate Limits -RATE_LIMITS = { - 'scout': '50/hour', - 'trader': '200/hour', - 'tycoon': '1000/hour', -} +// External Auctions darunter +
+

Active Auctions

+ {externalItems.map(item => )} +
``` --- -## 🎯 Nächste Schritte +## 💰 TEIL 6: Monetarisierung -### ✅ ERLEDIGT (11. Dezember 2025) -- [x] Pounce Score v2.0 implementieren → `_calculate_pounce_score_v2()` in `auctions.py` -- [x] Unified `/auctions/feed` API deployen → Live und funktional -- [x] Pounce Direct Listings im Feed integrieren → Kombiniert mit externen Auktionen -- [x] "💎 Pounce Direct" Badge und Highlighting → Visuelle Hierarchie implementiert -- [x] Filter-Presets im Frontend → "Pounce Only", "Verified", Preis-Filter -- [x] Zone File Access Anleitung → `ZONE_FILE_ACCESS.md` erstellt +### Tier-basierte Features (aus pounce_pricing.md) -### Nächste Woche -- [ ] Erste Pounce Direct Listings erstellen (Testdaten) -- [ ] Scraper-Fallbacks implementieren -- [ ] Verisign Zone File Access beantragen +| Feature | Scout ($0) | Trader ($9) | Tycoon ($29) | +|---------|------------|-------------|--------------| +| **Market Feed** | 🌪️ Raw (Vanity Filter) | ✨ Curated (Clean) | ✨ Curated + Priority | +| **Alert Speed** | 🐢 Daily | 🐇 Hourly | ⚡ Real-Time (10m) | +| **Watchlist** | 5 Domains | 50 Domains | 500 Domains | +| **Sell Domains** | ❌ | ✅ 5 Listings | ✅ 50 Listings + Featured | +| **Pounce Score** | ❌ Locked | ✅ Basic | ✅ + SEO Data | +| **Drops Tomorrow** | ❌ | ❌ | ✅ Exclusive | -### Nächster Monat -- [ ] Opportunity Indicators im UI -- [ ] Redis Caching Layer -- [ ] PIR (.org) Zone File Access +### Die "Conversion-Falle" (aus pounce_features.md) + +Wenn ein nicht-eingeloggter User auf "Buy Now" bei einem Pounce Direct Listing klickt: + +``` +┌─────────────────────────────────────────────────────────────────┐ +│ 🔒 Secure Transaction │ +├─────────────────────────────────────────────────────────────────┤ +│ │ +│ Du bist dabei, ein verifiziertes Direct-Listing anzusehen. │ +│ │ +│ Um den Verkäufer zu kontaktieren und Käuferschutz zu │ +│ genießen, logge dich bitte ein. │ +│ │ +│ [Login] [Create Free Scout Account] │ +│ │ +└─────────────────────────────────────────────────────────────────┘ +``` + +--- + +## 🔑 TEIL 7: Kritische Erkenntnisse + +### API-Realität vs. Erwartung + +| API | Erwartung | Realität | +|-----|-----------|----------| +| **DropCatch** | Alle öffentlichen Auktionen | ❌ Nur eigene Bids/Backorders | +| **Sedo** | TBD | ⏳ Credentials fehlen noch | + +**Konsequenz:** +- Web Scraping bleibt unsere **Hauptquelle** für öffentliche Daten +- APIs sind nützlich für **User-Integration** (verbinde dein DropCatch-Konto) +- **Zone Files** sind der langfristige Weg zur Datenhoheit + +### Der echte USP: Pounce Direct + +> *"Domains die es NUR bei Pounce gibt."* + +Das ist der Schlüssel. Nicht die Aggregation (das kann jeder), sondern der **Unique Content** durch User-Listings. + +**Priorität:** Erste Pounce Direct Listings aktivieren! + +--- + +## 📋 Checkliste für den Launch + +### Backend +- [x] Unified Feed API +- [x] Pounce Score v2.0 +- [x] Vanity Filter +- [x] Scraper aktiv +- [ ] Sedo API Credentials eingeben +- [ ] Scheduler-Intervall optimieren + +### Frontend +- [x] Terminal Market Page +- [x] Public Auctions Page +- [x] Pounce Direct Highlighting +- [x] Filter (Source, TLD, Price) +- [ ] "Hot Right Now" Section +- [ ] Better Empty States + +### Content +- [ ] Erste 5 Test-Listings erstellen +- [ ] DNS-Verifizierung testen +- [ ] Listing-to-Feed Flow validieren --- ## 💎 Fazit -Die Market Page ist das Herzstück von Pounce. Mit diesen Änderungen wird sie: +Die Market Page ist **funktional**, aber der wahre USP (Pounce Direct) ist noch nicht aktiviert. -1. **Zuverlässiger** (Scraper-Fallbacks, Caching) -2. **Wertvoller** (Pounce Direct = Unique Content) -3. **Stickier** (bessere UX, personalisierte Filter) -4. **Skalierbarer** (Unicorn-ready Architektur) - -Der Weg zum Unicorn führt über **Datenhoheit** und **einzigartigen Content**. -Pounce Direct ist der erste Schritt. - ---- - -# 🔧 TEIL 3: AKTIONSPLAN — Was tun wir konkret? - -## Phase A: Cleanup (Heute) - -### 1. Leere Ordner löschen - -```bash -# Diese Ordner sind leer und Legacy vom alten /command Routing -rm -rf frontend/src/app/dashboard/ -rm -rf frontend/src/app/portfolio/ -rm -rf frontend/src/app/settings/ -rm -rf frontend/src/app/watchlist/ -rm -rf frontend/src/app/careers/ -``` - -### 2. Redundante Seiten prüfen - -| Seite | Entscheidung | -|-------|--------------| -| `/market/page.tsx` | ❌ Entfernen → Redirect zu `/auctions` | -| `/intelligence/page.tsx` | ⚠️ Prüfen → Redirect zu `/tld-pricing` | - ---- - -## Phase B: Pounce Direct Integration (Diese Woche) - -### 1. Backend: Unified Market Feed API - -**Datei:** `backend/app/api/auctions.py` - -Neuer Endpoint hinzufügen: - -```python -@router.get("/feed") -async def get_unified_market_feed( - source: str = Query("all", enum=["all", "pounce", "external"]), - # ... Filter -): - """ - Unified feed combining: - - Pounce Direct (user listings) - - External auctions (scraped) - """ - items = [] - - # 1. Pounce Direct Listings - if source in ["all", "pounce"]: - listings = await db.execute( - select(DomainListing) - .where(DomainListing.status == "active") - ) - for listing in listings.scalars(): - items.append({ - "type": "pounce_direct", - "domain": listing.domain, - "price": listing.asking_price, - "source": "Pounce", - "status": "instant", - "verified": listing.is_verified, - "url": f"/buy/{listing.slug}", - }) - - # 2. External Auctions - if source in ["all", "external"]: - auctions = await db.execute( - select(DomainAuction) - .where(DomainAuction.is_active == True) - ) - for auction in auctions.scalars(): - items.append({ - "type": "auction", - "domain": auction.domain, - "price": auction.current_bid, - "source": auction.platform, - "status": "auction", - "time_left": _format_time_remaining(auction.end_time), - "url": auction.affiliate_url, - }) - - return {"items": items, "total": len(items)} -``` - -### 2. Frontend: API Client erweitern - -**Datei:** `frontend/src/lib/api.ts` - -```typescript -async getMarketFeed( - source: 'all' | 'pounce' | 'external' = 'all', - filters?: { - keyword?: string - tld?: string - minPrice?: number - maxPrice?: number - } -) { - const params = new URLSearchParams({ source }) - if (filters?.keyword) params.append('keyword', filters.keyword) - if (filters?.tld) params.append('tld', filters.tld) - if (filters?.minPrice) params.append('min_price', filters.minPrice.toString()) - if (filters?.maxPrice) params.append('max_price', filters.maxPrice.toString()) - - return this.request<{ - items: MarketItem[] - total: number - }>(`/auctions/feed?${params.toString()}`) -} -``` - -### 3. Frontend: Market Page updaten - -**Datei:** `frontend/src/app/terminal/market/page.tsx` - -Änderungen: -1. `api.getMarketFeed()` statt `api.getAuctions()` aufrufen -2. Pounce Direct Items visuell hervorheben -3. "Pounce Exclusive" Filter aktivieren - ---- - -## Phase C: Public Page Alignment (Nächste Woche) - -### 1. `/auctions/page.tsx` — Pounce Direct hervorheben - -```tsx -// Gruppiere Items -const pounceItems = items.filter(i => i.type === 'pounce_direct') -const externalItems = items.filter(i => i.type === 'auction') - -return ( - <> - {/* Featured: Pounce Direct */} - {pounceItems.length > 0 && ( -
-

- - Pounce Exclusive — Verified Instant Buy -

-
- {pounceItems.map(item => ( - - ))} -
-
- )} - - {/* Standard: External */} -
-

Active Auctions

- -
- -) -``` - -### 2. Konsolidierung - -| Aktion | Details | -|--------|---------| -| `/market/page.tsx` entfernen | Redirect zu `/auctions` | -| `/auctions/page.tsx` umbenennen | → "Market" in Navigation | - ---- - -## Phase D: Score & Scraper Verbesserungen (Woche 2-3) - -### 1. Pounce Score v2.0 - -**Datei:** `backend/app/services/valuation.py` - -Erweitern um: -- Bid Activity Score -- Time Pressure Score -- Value Ratio Score -- Platform Trust Score - -### 2. Scraper Fallbacks - -**Datei:** `backend/app/services/auction_scraper.py` - -```python -async def scrape_with_fallback(self, platform: str, db: AsyncSession): - methods = [ - (f'_scrape_{platform.lower()}_api', 'API'), - (f'_scrape_{platform.lower()}_rss', 'RSS'), - (f'_scrape_{platform.lower()}_html', 'HTML'), - ] - - for method_name, method_type in methods: - method = getattr(self, method_name, None) - if not method: - continue - - try: - result = await method(db) - if result['found'] > 0: - return result - except Exception as e: - logger.warning(f"{platform} {method_type} failed: {e}") - - return {'found': 0, 'error': 'All methods failed'} -``` - ---- - -## Checkliste für den Clean Start - -### Backend: -- [ ] Unified Feed Endpoint `/auctions/feed` erstellen -- [ ] Pounce Score v2.0 in `valuation.py` integrieren -- [ ] Scraper Fallback-Logik hinzufügen - -### Frontend: -- [ ] Leere Ordner löschen -- [ ] `api.getMarketFeed()` implementieren -- [ ] Market Page: Pounce Direct Integration -- [ ] Auctions Page: Visuelle Hierarchie -- [ ] `/market/page.tsx` zu Redirect machen - -### Testing: -- [ ] Listing erstellen → Erscheint im Market Feed? -- [ ] DNS Verification → Funktioniert? -- [ ] External Auctions → Werden geladen? -- [ ] Filter "Pounce Only" → Zeigt nur Listings? - ---- - -## Visualisierung: Datenfluss - -``` -┌─────────────────────────────────────────────────────────────────┐ -│ MARKET FEED │ -├─────────────────────────────────────────────────────────────────┤ -│ │ -│ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ │ -│ │ LISTINGS │ │ AUCTIONS │ │ SCHEDULER │ │ -│ │ (Pounce) │ │ (External) │ │ (Scrape) │ │ -│ └──────┬──────┘ └──────┬──────┘ └──────┬──────┘ │ -│ │ │ │ │ -│ │ │ │ │ -│ └───────────────────┴───────────────────┘ │ -│ │ │ -│ ▼ │ -│ ┌──────────────────┐ │ -│ │ /auctions/feed │ │ -│ │ (Unified API) │ │ -│ └────────┬─────────┘ │ -│ │ │ -│ ┌─────────────────┼─────────────────┐ │ -│ ▼ ▼ ▼ │ -│ ┌────────────┐ ┌────────────┐ ┌────────────┐ │ -│ │ TERMINAL │ │ PUBLIC │ │ ADMIN │ │ -│ │ /market │ │ /auctions │ │ /admin │ │ -│ └────────────┘ └────────────┘ └────────────┘ │ -│ │ -└─────────────────────────────────────────────────────────────────┘ -``` - ---- - ---- - -# 🚀 TEIL 4: ROADMAP ZUM UNICORN - -## Die 4 Phasen (aus pounce_strategy.md) - -``` -┌─────────────────────────────────────────────────────────────────┐ -│ POUNCE UNICORN ROADMAP │ -├─────────────────────────────────────────────────────────────────┤ -│ │ -│ PHASE 1: INTELLIGENCE (0-18 Monate) │ -│ ═══════════════════════════════════════════════════════════ │ -│ Ziel: 10.000 User, $1M ARR, Datenhoheit │ -│ │ -│ ✅ Pounce Terminal (Dashboard) │ -│ ✅ TLD Pricing (Market Barometer) │ -│ ✅ Auction Aggregator (Scraping) │ -│ ✅ Watchlist/Monitoring │ -│ ⏳ Pounce Direct (Marketplace) │ -│ 🔜 Zone File Analyse │ -│ │ -│ Status: WIR SIND HIER ◄──────────────────────────────────── │ -│ │ -├─────────────────────────────────────────────────────────────────┤ -│ │ -│ PHASE 2: LIQUIDITÄT (18-36 Monate) │ -│ ═══════════════════════════════════════════════════════════ │ -│ Ziel: Den Transaktionsfluss übernehmen, $10M ARR │ -│ │ -│ 🔮 Pounce Instant Exchange (Escrow integriert) │ -│ 🔮 "Buy Now" Buttons im Dashboard │ -│ 🔮 5% Transaktionsgebühr (statt 15-20% bei Konkurrenz) │ -│ │ -├─────────────────────────────────────────────────────────────────┤ -│ │ -│ PHASE 3: FINANZIALISIERUNG (3-5 Jahre) │ -│ ═══════════════════════════════════════════════════════════ │ -│ Ziel: Domains als Asset-Klasse, $50-100M ARR │ -│ │ -│ 🔮 Fractional Ownership (Anteile an Premium-Domains) │ -│ 🔮 Domain-Backed Lending (Kredit gegen Domain) │ -│ 🔮 → Wir werden ein FINTECH │ -│ │ -├─────────────────────────────────────────────────────────────────┤ -│ │ -│ PHASE 4: IMPERIUM (5+ Jahre) │ -│ ═══════════════════════════════════════════════════════════ │ -│ Ziel: $1 Mrd. Bewertung, "Too big to fail" │ -│ │ -│ 🔮 Pounce Enterprise Sentinel (B2B Brand Protection) │ -│ 🔮 Fortune 500 Kunden (Apple, Tesla, etc.) │ -│ 🔮 KI-gestützte Phishing-Takedowns │ -│ │ -└─────────────────────────────────────────────────────────────────┘ -``` - -## Was WIR JETZT tun (Phase 1 perfektionieren) - -### Priorität 1: Pounce Direct perfektionieren -- [x] Listing-System gebaut -- [x] DNS-Verifizierung funktioniert -- [ ] **Im Market Feed anzeigen** ← NÄCHSTER SCHRITT -- [ ] Visuelle Hierarchie (💎 Pounce vs 🏢 External) - -### Priorität 2: Datenqualität verbessern -- [x] Scraping läuft -- [ ] Fallback-Logik für Scraper -- [ ] Pounce Score v2.0 - -### Priorität 3: Zone Files vorbereiten -- [ ] Verisign Zone File Access beantragen -- [ ] Algorithmus entwickeln (kann lokal getestet werden) -- [ ] Server-Infrastruktur planen - ---- - -## Zusammenfassung: Der Weg zum Unicorn - -``` - HEUTE 6 MONATE 18+ MONATE - │ │ │ - ▼ ▼ ▼ - ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ - │ SCRAPING │ →→→ │ ZONE FILES │ →→→ │ FINTECH │ - │ + POUNCE │ │ ANALYSIS │ │ BÖRSE │ - │ DIRECT │ │ │ │ │ - └─────────────┘ └─────────────┘ └─────────────┘ - │ │ │ - │ │ │ - "Content Filler" "Daten-Monopol" "Asset-Klasse" - Seite wirkt lebendig Exklusive Intel Domains = Aktien -``` - ---- - -## 💎 Das Mantra - -> **"Don't guess. Know."** -> -> Phase 1: Intelligence -> -> **"Don't just buy. Invest."** -> -> Phase 3: Asset Class - -Der Weg zum Unicorn führt über **Datenhoheit** und **einzigartigen Content**. - -1. **Heute:** Pounce Direct (User-Listings) = Unique Content -2. **Morgen:** Zone Files = Exklusive Intelligence -3. **Übermorgen:** Fintech = Milliarden-Bewertung - ---- - -**Bereit zum Start?** 🚀 - -Sag mir, womit ich beginnen soll: -1. **Cleanup** — Leere Ordner löschen -2. **Backend** — Unified Feed API erstellen -3. **Frontend** — Market Page mit Pounce Direct +**Die Reihenfolge:** +1. ✅ Aggregation funktioniert (Scraping) +2. ⏳ Pounce Direct aktivieren (User-Listings) +3. 🔮 Zone Files für Datenhoheit (Phase 3) +> *"Der Weg zum Unicorn führt nicht über besseres Scraping, sondern über einzigartigen Content."* +> +> — pounce_strategy.md diff --git a/backend/app/api/admin.py b/backend/app/api/admin.py index 1cb1c93..9f408d4 100644 --- a/backend/app/api/admin.py +++ b/backend/app/api/admin.py @@ -981,3 +981,126 @@ async def get_activity_log( ], "total": total, } + + +# ============== API Connection Tests ============== + +@router.get("/test-apis") +async def test_external_apis( + admin: User = Depends(require_admin), +): + """ + Test connections to all external APIs. + + Returns status of: + - DropCatch API + - Sedo API + - Moz API (if configured) + """ + from app.services.dropcatch_api import dropcatch_client + from app.services.sedo_api import sedo_client + + results = { + "tested_at": datetime.utcnow().isoformat(), + "apis": {} + } + + # Test DropCatch API + try: + dropcatch_result = await dropcatch_client.test_connection() + results["apis"]["dropcatch"] = dropcatch_result + except Exception as e: + results["apis"]["dropcatch"] = { + "success": False, + "error": str(e), + "configured": dropcatch_client.is_configured + } + + # Test Sedo API + try: + sedo_result = await sedo_client.test_connection() + results["apis"]["sedo"] = sedo_result + except Exception as e: + results["apis"]["sedo"] = { + "success": False, + "error": str(e), + "configured": sedo_client.is_configured + } + + # Summary + results["summary"] = { + "total": len(results["apis"]), + "configured": sum(1 for api in results["apis"].values() if api.get("configured")), + "connected": sum(1 for api in results["apis"].values() if api.get("success")), + } + + return results + + +@router.post("/trigger-scrape") +async def trigger_auction_scrape( + background_tasks: BackgroundTasks, + db: Database, + admin: User = Depends(require_admin), +): + """ + Manually trigger auction scraping from all sources. + + This will: + 1. Try Tier 1 APIs (DropCatch, Sedo) first + 2. Fall back to web scraping for others + """ + from app.services.auction_scraper import AuctionScraperService + + scraper = AuctionScraperService() + + # Run scraping in background + async def run_scrape(): + async with db.begin(): + return await scraper.scrape_all_platforms(db) + + background_tasks.add_task(run_scrape) + + return { + "message": "Auction scraping started in background", + "note": "Check /admin/scrape-status for results" + } + + +@router.get("/scrape-status") +async def get_scrape_status( + db: Database, + admin: User = Depends(require_admin), + limit: int = 10, +): + """Get recent scrape logs.""" + from app.models.auction import AuctionScrapeLog + + query = ( + select(AuctionScrapeLog) + .order_by(desc(AuctionScrapeLog.started_at)) + .limit(limit) + ) + + try: + result = await db.execute(query) + logs = result.scalars().all() + except Exception: + return {"logs": [], "error": "Table not found"} + + return { + "logs": [ + { + "id": log.id, + "platform": log.platform, + "status": log.status, + "auctions_found": log.auctions_found, + "auctions_new": log.auctions_new, + "auctions_updated": log.auctions_updated, + "error_message": log.error_message, + "started_at": log.started_at.isoformat() if log.started_at else None, + "completed_at": log.completed_at.isoformat() if log.completed_at else None, + } + for log in logs + ] + } diff --git a/backend/app/config.py b/backend/app/config.py index df49aad..89b67d2 100644 --- a/backend/app/config.py +++ b/backend/app/config.py @@ -33,6 +33,27 @@ class Settings(BaseSettings): check_minute: int = 0 scheduler_check_interval_hours: int = 24 + # ================================= + # External API Credentials + # ================================= + + # DropCatch API (Official Partner API) + # Docs: https://www.dropcatch.com/hiw/dropcatch-api + dropcatch_client_id: str = "" + dropcatch_client_secret: str = "" + dropcatch_api_base: str = "https://api.dropcatch.com" + + # Sedo API (Partner API - XML-RPC) + # Docs: https://api.sedo.com/apidocs/v1/ + # Find your credentials: Sedo.com → Mein Sedo → API-Zugang + sedo_partner_id: str = "" + sedo_sign_key: str = "" + sedo_api_base: str = "https://api.sedo.com/api/v1/" + + # Moz API (SEO Data) + moz_access_id: str = "" + moz_secret_key: str = "" + class Config: env_file = ".env" env_file_encoding = "utf-8" diff --git a/backend/app/services/auction_scraper.py b/backend/app/services/auction_scraper.py index bbaffcb..8a8b6bc 100644 --- a/backend/app/services/auction_scraper.py +++ b/backend/app/services/auction_scraper.py @@ -1,15 +1,18 @@ """ Domain Auction Scraper Service -Scrapes real auction data from various platforms WITHOUT using their APIs. -Uses web scraping to get publicly available auction information. +Data Acquisition Strategy (from MARKET_CONCEPT.md): -Supported Platforms: +TIER 1: OFFICIAL APIs (Most Reliable) +- DropCatch API (Official Partner) ← WE HAVE THIS! + +TIER 2: WEB SCRAPING (Fallback) - ExpiredDomains.net (aggregator for deleted domains) - GoDaddy Auctions (public listings via RSS/public pages) - Sedo (public marketplace) - NameJet (public auctions) -- DropCatch (public auctions) + +The scraper tries Tier 1 first, then falls back to Tier 2 if needed. IMPORTANT: - Respects robots.txt @@ -31,6 +34,8 @@ from sqlalchemy import select, and_, delete from sqlalchemy.ext.asyncio import AsyncSession from app.models.auction import DomainAuction, AuctionScrapeLog +from app.services.dropcatch_api import dropcatch_client +from app.services.sedo_api import sedo_client logger = logging.getLogger(__name__) @@ -102,15 +107,41 @@ class AuctionScraperService: "errors": [], } - # Scrape each platform + # ═══════════════════════════════════════════════════════════════ + # TIER 1: Official APIs (Best data quality) + # ═══════════════════════════════════════════════════════════════ + tier1_apis = [ + ("DropCatch", self._fetch_dropcatch_api), # We have API access! + ("Sedo", self._fetch_sedo_api), # We have API access! + ] + + for platform_name, api_func in tier1_apis: + try: + api_result = await api_func(db) + if api_result.get("found", 0) > 0: + results["platforms"][platform_name] = api_result + results["total_found"] += api_result.get("found", 0) + results["total_new"] += api_result.get("new", 0) + results["total_updated"] += api_result.get("updated", 0) + logger.info(f"✅ {platform_name} API: {api_result['found']} auctions") + except Exception as e: + logger.warning(f"⚠️ {platform_name} API failed, will try scraping: {e}") + + # ═══════════════════════════════════════════════════════════════ + # TIER 2: Web Scraping (Fallback for platforms without API access) + # ═══════════════════════════════════════════════════════════════ scrapers = [ ("ExpiredDomains", self._scrape_expireddomains), ("GoDaddy", self._scrape_godaddy_public), - ("Sedo", self._scrape_sedo_public), ("NameJet", self._scrape_namejet_public), - ("DropCatch", self._scrape_dropcatch_public), ] + # Add fallbacks only if APIs failed + if "DropCatch" not in results["platforms"]: + scrapers.append(("DropCatch", self._scrape_dropcatch_public)) + if "Sedo" not in results["platforms"]: + scrapers.append(("Sedo", self._scrape_sedo_public)) + for platform_name, scraper_func in scrapers: try: platform_result = await scraper_func(db) @@ -561,13 +592,206 @@ class AuctionScraperService: return result - async def _scrape_dropcatch_public(self, db: AsyncSession) -> Dict[str, Any]: + async def _fetch_dropcatch_api(self, db: AsyncSession) -> Dict[str, Any]: """ - Scrape DropCatch public auction listings. - DropCatch shows pending delete auctions publicly. + 🚀 TIER 1: Fetch DropCatch auctions via OFFICIAL API + + This is our preferred method - faster, more reliable, more data. + Uses the official DropCatch Partner API. """ platform = "DropCatch" - result = {"found": 0, "new": 0, "updated": 0} + result = {"found": 0, "new": 0, "updated": 0, "source": "api"} + + if not dropcatch_client.is_configured: + logger.info("DropCatch API not configured, skipping") + return result + + log = AuctionScrapeLog(platform=platform) + db.add(log) + await db.commit() + + try: + # Fetch auctions from official API + api_result = await dropcatch_client.search_auctions(page_size=100) + + auctions = api_result.get("auctions") or api_result.get("items") or [] + result["found"] = len(auctions) + + for dc_auction in auctions: + try: + # Transform to our format + auction_data = dropcatch_client.transform_to_pounce_format(dc_auction) + + if not auction_data["domain"]: + continue + + # Check if exists + existing = await db.execute( + select(DomainAuction).where( + and_( + DomainAuction.domain == auction_data["domain"], + DomainAuction.platform == platform + ) + ) + ) + existing_auction = existing.scalar_one_or_none() + + if existing_auction: + # Update existing + existing_auction.current_bid = auction_data["current_bid"] + existing_auction.num_bids = auction_data["num_bids"] + existing_auction.end_time = auction_data["end_time"] + existing_auction.is_active = True + existing_auction.updated_at = datetime.utcnow() + result["updated"] += 1 + else: + # Create new + new_auction = DomainAuction( + domain=auction_data["domain"], + tld=auction_data["tld"], + platform=platform, + current_bid=auction_data["current_bid"], + currency=auction_data["currency"], + num_bids=auction_data["num_bids"], + end_time=auction_data["end_time"], + auction_url=auction_data["auction_url"], + age_years=auction_data.get("age_years"), + buy_now_price=auction_data.get("buy_now_price"), + reserve_met=auction_data.get("reserve_met"), + traffic=auction_data.get("traffic"), + is_active=True, + ) + db.add(new_auction) + result["new"] += 1 + + except Exception as e: + logger.warning(f"Error processing DropCatch auction: {e}") + continue + + await db.commit() + + log.status = "success" + log.auctions_found = result["found"] + log.auctions_new = result["new"] + log.auctions_updated = result["updated"] + log.completed_at = datetime.utcnow() + await db.commit() + + logger.info(f"DropCatch API: Found {result['found']}, New {result['new']}, Updated {result['updated']}") + return result + + except Exception as e: + logger.error(f"DropCatch API error: {e}") + log.status = "failed" + log.error_message = str(e)[:500] + log.completed_at = datetime.utcnow() + await db.commit() + return result + + async def _fetch_sedo_api(self, db: AsyncSession) -> Dict[str, Any]: + """ + 🚀 TIER 1: Fetch Sedo auctions via OFFICIAL API + + This is our preferred method for Sedo data. + Uses the official Sedo Partner API. + """ + platform = "Sedo" + result = {"found": 0, "new": 0, "updated": 0, "source": "api"} + + if not sedo_client.is_configured: + logger.info("Sedo API not configured, skipping") + return result + + log = AuctionScrapeLog(platform=platform) + db.add(log) + await db.commit() + + try: + # Fetch auctions from official API + api_result = await sedo_client.search_auctions(page_size=100) + + # Sedo response structure may vary + listings = api_result.get("domains") or api_result.get("items") or api_result.get("result") or [] + if isinstance(listings, dict): + listings = list(listings.values()) if listings else [] + + result["found"] = len(listings) + + for sedo_listing in listings: + try: + # Transform to our format + auction_data = sedo_client.transform_to_pounce_format(sedo_listing) + + if not auction_data["domain"]: + continue + + # Check if exists + existing = await db.execute( + select(DomainAuction).where( + and_( + DomainAuction.domain == auction_data["domain"], + DomainAuction.platform == platform + ) + ) + ) + existing_auction = existing.scalar_one_or_none() + + if existing_auction: + # Update existing + existing_auction.current_bid = auction_data["current_bid"] + existing_auction.num_bids = auction_data["num_bids"] + existing_auction.end_time = auction_data["end_time"] + existing_auction.is_active = True + existing_auction.updated_at = datetime.utcnow() + result["updated"] += 1 + else: + # Create new + new_auction = DomainAuction( + domain=auction_data["domain"], + tld=auction_data["tld"], + platform=platform, + current_bid=auction_data["current_bid"], + currency=auction_data["currency"], + num_bids=auction_data["num_bids"], + end_time=auction_data["end_time"], + auction_url=auction_data["auction_url"], + buy_now_price=auction_data.get("buy_now_price"), + is_active=True, + ) + db.add(new_auction) + result["new"] += 1 + + except Exception as e: + logger.warning(f"Error processing Sedo listing: {e}") + continue + + await db.commit() + + log.status = "success" + log.auctions_found = result["found"] + log.auctions_new = result["new"] + log.auctions_updated = result["updated"] + log.completed_at = datetime.utcnow() + await db.commit() + + logger.info(f"Sedo API: Found {result['found']}, New {result['new']}, Updated {result['updated']}") + return result + + except Exception as e: + logger.error(f"Sedo API error: {e}") + log.status = "failed" + log.error_message = str(e)[:500] + log.completed_at = datetime.utcnow() + await db.commit() + return result + + async def _scrape_dropcatch_public(self, db: AsyncSession) -> Dict[str, Any]: + """ + 📦 TIER 2 FALLBACK: Scrape DropCatch public auction listings. + Only used if the API is not configured or fails. + """ + platform = "DropCatch" + result = {"found": 0, "new": 0, "updated": 0, "source": "scrape"} log = AuctionScrapeLog(platform=platform) db.add(log) diff --git a/backend/app/services/dropcatch_api.py b/backend/app/services/dropcatch_api.py new file mode 100644 index 0000000..85125e2 --- /dev/null +++ b/backend/app/services/dropcatch_api.py @@ -0,0 +1,334 @@ +""" +DropCatch Official API Client + +This service provides access to DropCatch's official API for: +- Searching domain auctions +- Getting auction details +- Backorder management + +API Documentation: https://www.dropcatch.com/hiw/dropcatch-api +Interactive Docs: https://api.dropcatch.com/swagger + +SECURITY: +- Credentials are loaded from environment variables +- NEVER hardcode credentials in this file + +Usage: + from app.services.dropcatch_api import dropcatch_client + + # Get active auctions + auctions = await dropcatch_client.search_auctions(keyword="tech") +""" +import logging +from datetime import datetime, timedelta +from typing import Optional, List, Dict, Any +import httpx +from functools import lru_cache + +from app.config import get_settings + +logger = logging.getLogger(__name__) + + +class DropCatchAPIClient: + """ + Official DropCatch API Client. + + This uses the V2 API endpoints (V1 is deprecated). + Authentication is via OAuth2 client credentials. + """ + + def __init__(self): + self.settings = get_settings() + self.base_url = self.settings.dropcatch_api_base or "https://api.dropcatch.com" + self.client_id = self.settings.dropcatch_client_id + self.client_secret = self.settings.dropcatch_client_secret + + # Token cache + self._access_token: Optional[str] = None + self._token_expires_at: Optional[datetime] = None + + # HTTP client + self._client: Optional[httpx.AsyncClient] = None + + @property + def is_configured(self) -> bool: + """Check if API credentials are configured.""" + return bool(self.client_id and self.client_secret) + + async def _get_client(self) -> httpx.AsyncClient: + """Get or create HTTP client.""" + if self._client is None or self._client.is_closed: + self._client = httpx.AsyncClient( + timeout=30.0, + headers={ + "Content-Type": "application/json", + "User-Agent": "Pounce/1.0 (Domain Intelligence Platform)" + } + ) + return self._client + + async def close(self): + """Close the HTTP client.""" + if self._client and not self._client.is_closed: + await self._client.aclose() + self._client = None + + async def _authenticate(self) -> str: + """ + Authenticate with DropCatch API and get access token. + + POST https://api.dropcatch.com/authorize + Body: { "clientId": "...", "clientSecret": "..." } + + Returns: Access token string + """ + if not self.is_configured: + raise ValueError("DropCatch API credentials not configured") + + # Check if we have a valid cached token + if self._access_token and self._token_expires_at: + if datetime.utcnow() < self._token_expires_at - timedelta(minutes=5): + return self._access_token + + client = await self._get_client() + + try: + response = await client.post( + f"{self.base_url}/authorize", + json={ + "clientId": self.client_id, + "clientSecret": self.client_secret + } + ) + + if response.status_code != 200: + logger.error(f"DropCatch auth failed: {response.status_code} - {response.text}") + raise Exception(f"Authentication failed: {response.status_code}") + + data = response.json() + + # Extract token - the response format may vary + # Common formats: { "token": "...", "expiresIn": 3600 } + # or: { "accessToken": "...", "expiresIn": 3600 } + self._access_token = data.get("token") or data.get("accessToken") or data.get("access_token") + + # Calculate expiry (default 1 hour if not specified) + expires_in = data.get("expiresIn") or data.get("expires_in") or 3600 + self._token_expires_at = datetime.utcnow() + timedelta(seconds=expires_in) + + logger.info("DropCatch API: Successfully authenticated") + return self._access_token + + except httpx.HTTPError as e: + logger.error(f"DropCatch auth HTTP error: {e}") + raise + + async def _request( + self, + method: str, + endpoint: str, + params: Optional[Dict] = None, + json_data: Optional[Dict] = None + ) -> Dict[str, Any]: + """Make an authenticated API request.""" + token = await self._authenticate() + client = await self._get_client() + + headers = { + "Authorization": f"Bearer {token}" + } + + url = f"{self.base_url}{endpoint}" + + try: + response = await client.request( + method=method, + url=url, + params=params, + json=json_data, + headers=headers + ) + + if response.status_code == 401: + # Token expired, re-authenticate + self._access_token = None + token = await self._authenticate() + headers["Authorization"] = f"Bearer {token}" + response = await client.request( + method=method, + url=url, + params=params, + json=json_data, + headers=headers + ) + + response.raise_for_status() + return response.json() + + except httpx.HTTPError as e: + logger.error(f"DropCatch API request failed: {e}") + raise + + # ========================================================================= + # AUCTION ENDPOINTS (V2) + # ========================================================================= + + async def search_auctions( + self, + keyword: Optional[str] = None, + tld: Optional[str] = None, + min_price: Optional[float] = None, + max_price: Optional[float] = None, + ending_within_hours: Optional[int] = None, + page_size: int = 100, + page_token: Optional[str] = None, + ) -> Dict[str, Any]: + """ + Search for domain auctions. + + Endpoint: GET /v2/auctions (or similar - check interactive docs) + + Returns: + { + "auctions": [...], + "cursor": { + "next": "...", + "previous": "..." + } + } + """ + params = { + "pageSize": page_size, + } + + if keyword: + params["searchTerm"] = keyword + if tld: + params["tld"] = tld.lstrip(".") + if min_price is not None: + params["minPrice"] = min_price + if max_price is not None: + params["maxPrice"] = max_price + if ending_within_hours: + params["endingWithinHours"] = ending_within_hours + if page_token: + params["pageToken"] = page_token + + return await self._request("GET", "/v2/auctions", params=params) + + async def get_auction(self, auction_id: int) -> Dict[str, Any]: + """Get details for a specific auction.""" + return await self._request("GET", f"/v2/auctions/{auction_id}") + + async def get_ending_soon( + self, + hours: int = 24, + page_size: int = 50 + ) -> Dict[str, Any]: + """Get auctions ending soon.""" + return await self.search_auctions( + ending_within_hours=hours, + page_size=page_size + ) + + async def get_hot_auctions(self, page_size: int = 50) -> Dict[str, Any]: + """ + Get hot/popular auctions (high bid activity). + Note: The actual endpoint may vary - check interactive docs. + """ + # This might be a different endpoint or sort parameter + params = { + "pageSize": page_size, + "sortBy": "bidCount", # or "popularity" - check docs + "sortOrder": "desc" + } + return await self._request("GET", "/v2/auctions", params=params) + + # ========================================================================= + # BACKORDER ENDPOINTS (V2) + # ========================================================================= + + async def search_backorders( + self, + keyword: Optional[str] = None, + page_size: int = 100, + page_token: Optional[str] = None, + ) -> Dict[str, Any]: + """Search for available backorders (domains dropping soon).""" + params = {"pageSize": page_size} + + if keyword: + params["searchTerm"] = keyword + if page_token: + params["pageToken"] = page_token + + return await self._request("GET", "/v2/backorders", params=params) + + # ========================================================================= + # UTILITY METHODS + # ========================================================================= + + async def test_connection(self) -> Dict[str, Any]: + """Test the API connection and credentials.""" + if not self.is_configured: + return { + "success": False, + "error": "API credentials not configured", + "configured": False + } + + try: + await self._authenticate() + return { + "success": True, + "configured": True, + "client_id": self.client_id.split(":")[0] if ":" in self.client_id else self.client_id, + "authenticated_at": datetime.utcnow().isoformat() + } + except Exception as e: + return { + "success": False, + "error": str(e), + "configured": True + } + + def transform_to_pounce_format(self, dc_auction: Dict) -> Dict[str, Any]: + """ + Transform DropCatch auction to Pounce internal format. + + Maps DropCatch fields to our DomainAuction model. + """ + domain = dc_auction.get("domainName") or dc_auction.get("domain", "") + tld = domain.rsplit(".", 1)[1] if "." in domain else "" + + # Parse end time (format may vary) + end_time_str = dc_auction.get("auctionEndTime") or dc_auction.get("endTime") + if end_time_str: + try: + end_time = datetime.fromisoformat(end_time_str.replace("Z", "+00:00")) + except: + end_time = datetime.utcnow() + timedelta(days=1) + else: + end_time = datetime.utcnow() + timedelta(days=1) + + return { + "domain": domain, + "tld": tld, + "platform": "DropCatch", + "current_bid": dc_auction.get("currentBid") or dc_auction.get("price", 0), + "currency": "USD", + "num_bids": dc_auction.get("bidCount") or dc_auction.get("numberOfBids", 0), + "end_time": end_time, + "auction_url": f"https://www.dropcatch.com/domain/{domain}", + "age_years": dc_auction.get("yearsOld") or dc_auction.get("age"), + "buy_now_price": dc_auction.get("buyNowPrice"), + "reserve_met": dc_auction.get("reserveMet"), + "traffic": dc_auction.get("traffic"), + "external_id": str(dc_auction.get("auctionId") or dc_auction.get("id", "")), + } + + +# Singleton instance +dropcatch_client = DropCatchAPIClient() + diff --git a/backend/app/services/sedo_api.py b/backend/app/services/sedo_api.py new file mode 100644 index 0000000..cf58570 --- /dev/null +++ b/backend/app/services/sedo_api.py @@ -0,0 +1,314 @@ +""" +Sedo Official API Client + +This service provides access to Sedo's official API for: +- Domain search and auctions +- Marketplace listings +- Domain pricing + +API Documentation: https://api.sedo.com/apidocs/v1/ +Type: XML-RPC based API + +SECURITY: +- Credentials are loaded from environment variables +- NEVER hardcode credentials in this file + +WHERE TO FIND YOUR CREDENTIALS: +1. Login to https://sedo.com +2. Go to "Mein Sedo" / "My Sedo" +3. Navigate to "API-Zugang" / "API Access" +4. You'll find: + - Partner ID (your user ID) + - SignKey (signature key for authentication) + +Usage: + from app.services.sedo_api import sedo_client + + # Search domains for sale + listings = await sedo_client.search_domains(keyword="tech") +""" +import logging +import hashlib +import time +from datetime import datetime, timedelta +from typing import Optional, List, Dict, Any +import httpx +from xml.etree import ElementTree + +from app.config import get_settings + +logger = logging.getLogger(__name__) + + +class SedoAPIClient: + """ + Official Sedo API Client. + + Sedo uses an XML-RPC style API with signature-based authentication. + Each request must include: + - partnerid: Your partner ID + - signkey: Your signature key (or hashed signature) + """ + + def __init__(self): + self.settings = get_settings() + self.base_url = self.settings.sedo_api_base or "https://api.sedo.com/api/v1/" + self.partner_id = self.settings.sedo_partner_id + self.sign_key = self.settings.sedo_sign_key + + # HTTP client + self._client: Optional[httpx.AsyncClient] = None + + @property + def is_configured(self) -> bool: + """Check if API credentials are configured.""" + return bool(self.partner_id and self.sign_key) + + async def _get_client(self) -> httpx.AsyncClient: + """Get or create HTTP client.""" + if self._client is None or self._client.is_closed: + self._client = httpx.AsyncClient( + timeout=30.0, + headers={ + "Content-Type": "application/x-www-form-urlencoded", + "User-Agent": "Pounce/1.0 (Domain Intelligence Platform)" + } + ) + return self._client + + async def close(self): + """Close the HTTP client.""" + if self._client and not self._client.is_closed: + await self._client.aclose() + self._client = None + + def _generate_signature(self, params: Dict[str, Any]) -> str: + """ + Generate request signature for Sedo API. + + The signature is typically: MD5(signkey + sorted_params) + Check Sedo docs for exact implementation. + """ + # Simple implementation - may need adjustment based on actual Sedo requirements + sorted_params = "&".join(f"{k}={v}" for k, v in sorted(params.items())) + signature_base = f"{self.sign_key}{sorted_params}" + return hashlib.md5(signature_base.encode()).hexdigest() + + async def _request( + self, + endpoint: str, + params: Optional[Dict] = None + ) -> Dict[str, Any]: + """Make an authenticated API request.""" + if not self.is_configured: + raise ValueError("Sedo API credentials not configured") + + client = await self._get_client() + + # Base params for all requests + request_params = { + "partnerid": self.partner_id, + "signkey": self.sign_key, + **(params or {}) + } + + url = f"{self.base_url.rstrip('/')}/{endpoint.lstrip('/')}" + + try: + response = await client.get(url, params=request_params) + response.raise_for_status() + + # Sedo API can return XML or JSON depending on endpoint + content_type = response.headers.get("content-type", "") + + if "xml" in content_type: + return self._parse_xml_response(response.text) + elif "json" in content_type: + return response.json() + else: + # Try JSON first, fallback to XML + try: + return response.json() + except: + return self._parse_xml_response(response.text) + + except httpx.HTTPError as e: + logger.error(f"Sedo API request failed: {e}") + raise + + def _parse_xml_response(self, xml_text: str) -> Dict[str, Any]: + """Parse XML response from Sedo API.""" + try: + root = ElementTree.fromstring(xml_text) + return self._xml_to_dict(root) + except Exception as e: + logger.warning(f"Failed to parse XML: {e}") + return {"raw": xml_text} + + def _xml_to_dict(self, element) -> Dict[str, Any]: + """Convert XML element to dictionary.""" + result = {} + for child in element: + if len(child) > 0: + result[child.tag] = self._xml_to_dict(child) + else: + result[child.tag] = child.text + return result + + # ========================================================================= + # DOMAIN SEARCH ENDPOINTS + # ========================================================================= + + async def search_domains( + self, + keyword: Optional[str] = None, + tld: Optional[str] = None, + min_price: Optional[float] = None, + max_price: Optional[float] = None, + page: int = 1, + page_size: int = 100, + ) -> Dict[str, Any]: + """ + Search for domains listed on Sedo marketplace. + + Returns domains for sale (not auctions). + """ + params = { + "output_method": "json", # Request JSON response + } + + if keyword: + params["keyword"] = keyword + if tld: + params["tld"] = tld.lstrip(".") + if min_price is not None: + params["minprice"] = min_price + if max_price is not None: + params["maxprice"] = max_price + if page: + params["page"] = page + if page_size: + params["pagesize"] = min(page_size, 100) + + return await self._request("DomainSearch", params) + + async def search_auctions( + self, + keyword: Optional[str] = None, + tld: Optional[str] = None, + ending_within_hours: Optional[int] = None, + page: int = 1, + page_size: int = 100, + ) -> Dict[str, Any]: + """ + Search for active domain auctions on Sedo. + """ + params = { + "output_method": "json", + "auction": "true", # Only auctions + } + + if keyword: + params["keyword"] = keyword + if tld: + params["tld"] = tld.lstrip(".") + if page: + params["page"] = page + if page_size: + params["pagesize"] = min(page_size, 100) + + return await self._request("DomainSearch", params) + + async def get_domain_details(self, domain: str) -> Dict[str, Any]: + """Get detailed information about a specific domain.""" + params = { + "domain": domain, + "output_method": "json", + } + return await self._request("DomainDetails", params) + + async def get_ending_soon_auctions( + self, + hours: int = 24, + page_size: int = 50 + ) -> Dict[str, Any]: + """Get auctions ending soon.""" + return await self.search_auctions( + ending_within_hours=hours, + page_size=page_size + ) + + # ========================================================================= + # UTILITY METHODS + # ========================================================================= + + async def test_connection(self) -> Dict[str, Any]: + """Test the API connection and credentials.""" + if not self.is_configured: + return { + "success": False, + "error": "API credentials not configured", + "configured": False, + "hint": "Find your credentials at: Sedo.com → Mein Sedo → API-Zugang" + } + + try: + # Try a simple search to test connection + result = await self.search_domains(keyword="test", page_size=1) + return { + "success": True, + "configured": True, + "partner_id": self.partner_id, + "authenticated_at": datetime.utcnow().isoformat() + } + except Exception as e: + return { + "success": False, + "error": str(e), + "configured": True + } + + def transform_to_pounce_format(self, sedo_listing: Dict) -> Dict[str, Any]: + """ + Transform Sedo listing to Pounce internal format. + + Maps Sedo fields to our DomainAuction model. + """ + domain = sedo_listing.get("domain") or sedo_listing.get("domainname", "") + tld = domain.rsplit(".", 1)[1] if "." in domain else "" + + # Parse end time if auction + end_time_str = sedo_listing.get("auctionend") or sedo_listing.get("enddate") + if end_time_str: + try: + end_time = datetime.fromisoformat(end_time_str.replace("Z", "+00:00")) + except: + end_time = datetime.utcnow() + timedelta(days=7) + else: + end_time = datetime.utcnow() + timedelta(days=7) + + # Price handling + price = sedo_listing.get("price") or sedo_listing.get("currentbid") or 0 + if isinstance(price, str): + price = float(price.replace(",", "").replace("$", "").replace("€", "")) + + return { + "domain": domain, + "tld": tld, + "platform": "Sedo", + "current_bid": price, + "buy_now_price": sedo_listing.get("buynow") or sedo_listing.get("bin"), + "currency": sedo_listing.get("currency", "EUR"), + "num_bids": sedo_listing.get("numbids") or sedo_listing.get("bidcount", 0), + "end_time": end_time, + "auction_url": f"https://sedo.com/search/details/?domain={domain}", + "age_years": None, + "reserve_met": sedo_listing.get("reservemet"), + "traffic": sedo_listing.get("traffic"), + "is_auction": sedo_listing.get("isaution") == "1" or sedo_listing.get("auction") == True, + } + + +# Singleton instance +sedo_client = SedoAPIClient() +