Compare commits
17 Commits
main
...
a58db843e0
| Author | SHA1 | Date | |
|---|---|---|---|
| a58db843e0 | |||
| 41abd8214f | |||
| a42435c24d | |||
| 940622a7b7 | |||
| 641b5c1dc2 | |||
| 26ea22899c | |||
| 35d943a372 | |||
| f648457353 | |||
| ae1416bd34 | |||
| f40d11edb7 | |||
| d5ee48e0e2 | |||
| d5e8dcb197 | |||
| 70a710ca83 | |||
| 0582b26be7 | |||
| 3f456658ee | |||
| d815c0780f | |||
| 170eef6d0a |
9
.gitignore
vendored
9
.gitignore
vendored
@ -28,15 +28,6 @@ dist/
|
|||||||
.env.*.local
|
.env.*.local
|
||||||
*.log
|
*.log
|
||||||
|
|
||||||
# Deployment env files (MUST NOT be committed)
|
|
||||||
DEPLOY_*.env
|
|
||||||
|
|
||||||
# Sensitive runtime artifacts
|
|
||||||
backend/data/cookies/*.json
|
|
||||||
|
|
||||||
# Local security backup artifacts (created during history rewrite)
|
|
||||||
.security-backup/
|
|
||||||
|
|
||||||
# IDEs
|
# IDEs
|
||||||
.vscode/
|
.vscode/
|
||||||
.idea/
|
.idea/
|
||||||
|
|||||||
@ -1,340 +0,0 @@
|
|||||||
# Database Migrations Guide
|
|
||||||
|
|
||||||
## Quick Overview
|
|
||||||
|
|
||||||
When deploying Pounce to a new server, these tables need to be created:
|
|
||||||
|
|
||||||
```
|
|
||||||
✅ Core Tables (17) - User, Subscription, Domain, TLD, etc.
|
|
||||||
🆕 New Tables (6) - Listings, Sniper Alerts, SEO Data
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Automatic Migration
|
|
||||||
|
|
||||||
The easiest way to create all tables:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd backend
|
|
||||||
source venv/bin/activate
|
|
||||||
python scripts/init_db.py
|
|
||||||
```
|
|
||||||
|
|
||||||
This creates all tables from the SQLAlchemy models automatically.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Manual SQL Migration
|
|
||||||
|
|
||||||
If you need to run migrations manually (e.g., on an existing database), use the SQL below.
|
|
||||||
|
|
||||||
### NEW Table 1: Domain Listings (For Sale Marketplace)
|
|
||||||
|
|
||||||
```sql
|
|
||||||
-- Main listing table
|
|
||||||
CREATE TABLE domain_listings (
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
user_id INTEGER NOT NULL REFERENCES users(id),
|
|
||||||
domain VARCHAR(255) NOT NULL UNIQUE,
|
|
||||||
slug VARCHAR(300) NOT NULL UNIQUE,
|
|
||||||
title VARCHAR(200),
|
|
||||||
description TEXT,
|
|
||||||
asking_price FLOAT,
|
|
||||||
min_offer FLOAT,
|
|
||||||
currency VARCHAR(3) DEFAULT 'USD',
|
|
||||||
price_type VARCHAR(20) DEFAULT 'fixed', -- 'fixed', 'negotiable', 'make_offer'
|
|
||||||
pounce_score INTEGER,
|
|
||||||
estimated_value FLOAT,
|
|
||||||
verification_status VARCHAR(20) DEFAULT 'not_started', -- 'not_started', 'pending', 'verified', 'failed'
|
|
||||||
verification_code VARCHAR(64),
|
|
||||||
verified_at TIMESTAMP,
|
|
||||||
status VARCHAR(30) DEFAULT 'draft', -- 'draft', 'published', 'sold', 'expired', 'removed'
|
|
||||||
show_valuation BOOLEAN DEFAULT TRUE,
|
|
||||||
allow_offers BOOLEAN DEFAULT TRUE,
|
|
||||||
featured BOOLEAN DEFAULT FALSE,
|
|
||||||
view_count INTEGER DEFAULT 0,
|
|
||||||
inquiry_count INTEGER DEFAULT 0,
|
|
||||||
expires_at TIMESTAMP,
|
|
||||||
created_at TIMESTAMP DEFAULT NOW(),
|
|
||||||
updated_at TIMESTAMP DEFAULT NOW(),
|
|
||||||
published_at TIMESTAMP
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX idx_listings_user_id ON domain_listings(user_id);
|
|
||||||
CREATE INDEX idx_listings_domain ON domain_listings(domain);
|
|
||||||
CREATE INDEX idx_listings_slug ON domain_listings(slug);
|
|
||||||
CREATE INDEX idx_listings_status ON domain_listings(status);
|
|
||||||
CREATE INDEX idx_listings_price ON domain_listings(asking_price);
|
|
||||||
```
|
|
||||||
|
|
||||||
### NEW Table 2: Listing Inquiries
|
|
||||||
|
|
||||||
```sql
|
|
||||||
-- Contact inquiries from potential buyers
|
|
||||||
CREATE TABLE listing_inquiries (
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
listing_id INTEGER NOT NULL REFERENCES domain_listings(id) ON DELETE CASCADE,
|
|
||||||
name VARCHAR(100) NOT NULL,
|
|
||||||
email VARCHAR(255) NOT NULL,
|
|
||||||
phone VARCHAR(50),
|
|
||||||
company VARCHAR(200),
|
|
||||||
message TEXT NOT NULL,
|
|
||||||
offer_amount FLOAT,
|
|
||||||
status VARCHAR(20) DEFAULT 'new', -- 'new', 'read', 'replied', 'archived'
|
|
||||||
ip_address VARCHAR(45),
|
|
||||||
user_agent VARCHAR(500),
|
|
||||||
created_at TIMESTAMP DEFAULT NOW(),
|
|
||||||
read_at TIMESTAMP,
|
|
||||||
replied_at TIMESTAMP
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX idx_inquiries_listing_id ON listing_inquiries(listing_id);
|
|
||||||
CREATE INDEX idx_inquiries_status ON listing_inquiries(status);
|
|
||||||
```
|
|
||||||
|
|
||||||
### NEW Table 3: Listing Views
|
|
||||||
|
|
||||||
```sql
|
|
||||||
-- Analytics: page views
|
|
||||||
CREATE TABLE listing_views (
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
listing_id INTEGER NOT NULL REFERENCES domain_listings(id) ON DELETE CASCADE,
|
|
||||||
ip_address VARCHAR(45),
|
|
||||||
user_agent VARCHAR(500),
|
|
||||||
referrer VARCHAR(500),
|
|
||||||
user_id INTEGER REFERENCES users(id),
|
|
||||||
viewed_at TIMESTAMP DEFAULT NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX idx_views_listing_id ON listing_views(listing_id);
|
|
||||||
CREATE INDEX idx_views_date ON listing_views(viewed_at);
|
|
||||||
```
|
|
||||||
|
|
||||||
### NEW Table 4: Sniper Alerts
|
|
||||||
|
|
||||||
```sql
|
|
||||||
-- Saved filter configurations for personalized auction alerts
|
|
||||||
CREATE TABLE sniper_alerts (
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
user_id INTEGER NOT NULL REFERENCES users(id),
|
|
||||||
name VARCHAR(100) NOT NULL,
|
|
||||||
description VARCHAR(500),
|
|
||||||
|
|
||||||
-- Filter criteria (stored as JSON for flexibility)
|
|
||||||
filter_criteria JSONB NOT NULL DEFAULT '{}',
|
|
||||||
|
|
||||||
-- Quick filters (also stored as columns for fast queries)
|
|
||||||
tlds VARCHAR(500), -- comma-separated: "com,net,io"
|
|
||||||
keywords VARCHAR(500), -- comma-separated search terms
|
|
||||||
exclude_keywords VARCHAR(500), -- words to exclude
|
|
||||||
max_length INTEGER,
|
|
||||||
min_length INTEGER,
|
|
||||||
max_price FLOAT,
|
|
||||||
min_price FLOAT,
|
|
||||||
max_bids INTEGER,
|
|
||||||
ending_within_hours INTEGER,
|
|
||||||
platforms VARCHAR(200), -- "GoDaddy,Sedo,NameJet"
|
|
||||||
|
|
||||||
-- Vanity filters
|
|
||||||
no_numbers BOOLEAN DEFAULT FALSE,
|
|
||||||
no_hyphens BOOLEAN DEFAULT FALSE,
|
|
||||||
exclude_chars VARCHAR(50),
|
|
||||||
|
|
||||||
-- Notification settings
|
|
||||||
notify_email BOOLEAN DEFAULT TRUE,
|
|
||||||
notify_sms BOOLEAN DEFAULT FALSE,
|
|
||||||
notify_push BOOLEAN DEFAULT FALSE,
|
|
||||||
max_notifications_per_day INTEGER DEFAULT 10,
|
|
||||||
cooldown_minutes INTEGER DEFAULT 30,
|
|
||||||
|
|
||||||
-- Status
|
|
||||||
is_active BOOLEAN DEFAULT TRUE,
|
|
||||||
matches_count INTEGER DEFAULT 0,
|
|
||||||
notifications_sent INTEGER DEFAULT 0,
|
|
||||||
last_matched_at TIMESTAMP,
|
|
||||||
last_notified_at TIMESTAMP,
|
|
||||||
|
|
||||||
created_at TIMESTAMP DEFAULT NOW(),
|
|
||||||
updated_at TIMESTAMP DEFAULT NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX idx_alerts_user_id ON sniper_alerts(user_id);
|
|
||||||
CREATE INDEX idx_alerts_active ON sniper_alerts(is_active);
|
|
||||||
```
|
|
||||||
|
|
||||||
### NEW Table 5: Sniper Alert Matches
|
|
||||||
|
|
||||||
```sql
|
|
||||||
-- Matched auctions for each alert
|
|
||||||
CREATE TABLE sniper_alert_matches (
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
alert_id INTEGER NOT NULL REFERENCES sniper_alerts(id) ON DELETE CASCADE,
|
|
||||||
domain VARCHAR(255) NOT NULL,
|
|
||||||
platform VARCHAR(50) NOT NULL,
|
|
||||||
current_bid FLOAT NOT NULL,
|
|
||||||
end_time TIMESTAMP NOT NULL,
|
|
||||||
auction_url VARCHAR(500),
|
|
||||||
notified BOOLEAN DEFAULT FALSE,
|
|
||||||
clicked BOOLEAN DEFAULT FALSE,
|
|
||||||
matched_at TIMESTAMP DEFAULT NOW(),
|
|
||||||
notified_at TIMESTAMP
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX idx_matches_alert_id ON sniper_alert_matches(alert_id);
|
|
||||||
CREATE INDEX idx_matches_domain ON sniper_alert_matches(domain);
|
|
||||||
CREATE INDEX idx_matches_notified ON sniper_alert_matches(notified);
|
|
||||||
```
|
|
||||||
|
|
||||||
### NEW Table 6: SEO Data (Tycoon Feature)
|
|
||||||
|
|
||||||
```sql
|
|
||||||
-- Cached SEO metrics for domains (Moz API or estimation)
|
|
||||||
CREATE TABLE domain_seo_data (
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
domain VARCHAR(255) NOT NULL UNIQUE,
|
|
||||||
|
|
||||||
-- Core metrics
|
|
||||||
domain_authority INTEGER, -- 0-100
|
|
||||||
page_authority INTEGER, -- 0-100
|
|
||||||
spam_score INTEGER, -- 0-100
|
|
||||||
total_backlinks INTEGER,
|
|
||||||
referring_domains INTEGER,
|
|
||||||
|
|
||||||
-- Backlink analysis
|
|
||||||
top_backlinks JSONB, -- [{domain, authority, page}, ...]
|
|
||||||
notable_backlinks TEXT, -- comma-separated high-value domains
|
|
||||||
|
|
||||||
-- Notable link flags
|
|
||||||
has_wikipedia_link BOOLEAN DEFAULT FALSE,
|
|
||||||
has_gov_link BOOLEAN DEFAULT FALSE,
|
|
||||||
has_edu_link BOOLEAN DEFAULT FALSE,
|
|
||||||
has_news_link BOOLEAN DEFAULT FALSE,
|
|
||||||
|
|
||||||
-- Value estimation
|
|
||||||
seo_value_estimate FLOAT, -- Estimated $ value based on SEO metrics
|
|
||||||
|
|
||||||
-- Metadata
|
|
||||||
data_source VARCHAR(50) DEFAULT 'estimated', -- 'moz', 'estimated'
|
|
||||||
last_updated TIMESTAMP DEFAULT NOW(),
|
|
||||||
expires_at TIMESTAMP, -- Cache expiry (7 days)
|
|
||||||
fetch_count INTEGER DEFAULT 0
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX idx_seo_domain ON domain_seo_data(domain);
|
|
||||||
CREATE INDEX idx_seo_da ON domain_seo_data(domain_authority);
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## All Tables Summary
|
|
||||||
|
|
||||||
### Core Tables (Already Implemented)
|
|
||||||
|
|
||||||
| Table | Purpose |
|
|
||||||
|-------|---------|
|
|
||||||
| `users` | User accounts and authentication |
|
|
||||||
| `subscriptions` | Subscription plans (Scout, Trader, Tycoon) |
|
|
||||||
| `domains` | Tracked domains in watchlists |
|
|
||||||
| `domain_checks` | Domain availability check history |
|
|
||||||
| `tld_prices` | TLD price history (886+ TLDs) |
|
|
||||||
| `tld_info` | TLD metadata and categories |
|
|
||||||
| `portfolio_domains` | User-owned domains |
|
|
||||||
| `domain_valuations` | Domain valuation history |
|
|
||||||
| `domain_auctions` | Scraped auction listings |
|
|
||||||
| `auction_scrape_logs` | Scraping job logs |
|
|
||||||
| `newsletter_subscribers` | Email newsletter list |
|
|
||||||
| `price_alerts` | TLD price change alerts |
|
|
||||||
| `admin_activity_logs` | Admin action audit log |
|
|
||||||
| `blog_posts` | Blog content |
|
|
||||||
|
|
||||||
### New Tables (v2.0)
|
|
||||||
|
|
||||||
| Table | Purpose | Required For |
|
|
||||||
|-------|---------|--------------|
|
|
||||||
| `domain_listings` | For Sale marketplace | `/command/listings`, `/buy` |
|
|
||||||
| `listing_inquiries` | Buyer messages | Marketplace inquiries |
|
|
||||||
| `listing_views` | View analytics | Listing stats |
|
|
||||||
| `sniper_alerts` | Alert configurations | `/command/alerts` |
|
|
||||||
| `sniper_alert_matches` | Matched auctions | Alert notifications |
|
|
||||||
| `domain_seo_data` | SEO metrics cache | `/command/seo` (Tycoon) |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Verification
|
|
||||||
|
|
||||||
After migration, verify all tables exist:
|
|
||||||
|
|
||||||
```sql
|
|
||||||
-- PostgreSQL
|
|
||||||
SELECT table_name
|
|
||||||
FROM information_schema.tables
|
|
||||||
WHERE table_schema = 'public'
|
|
||||||
ORDER BY table_name;
|
|
||||||
|
|
||||||
-- Should include:
|
|
||||||
-- domain_listings
|
|
||||||
-- listing_inquiries
|
|
||||||
-- listing_views
|
|
||||||
-- sniper_alerts
|
|
||||||
-- sniper_alert_matches
|
|
||||||
-- domain_seo_data
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Environment Variables for New Features
|
|
||||||
|
|
||||||
### Moz API (Optional - for real SEO data)
|
|
||||||
|
|
||||||
```env
|
|
||||||
MOZ_ACCESS_ID=your_moz_access_id
|
|
||||||
MOZ_SECRET_KEY=your_moz_secret_key
|
|
||||||
```
|
|
||||||
|
|
||||||
Without these variables, the SEO analyzer uses **estimation mode** based on domain characteristics (length, TLD, keywords).
|
|
||||||
|
|
||||||
### Stripe (Required for payments)
|
|
||||||
|
|
||||||
```env
|
|
||||||
STRIPE_SECRET_KEY=sk_live_xxx
|
|
||||||
STRIPE_WEBHOOK_SECRET=whsec_xxx
|
|
||||||
STRIPE_PRICE_TRADER=price_xxx # €9/month
|
|
||||||
STRIPE_PRICE_TYCOON=price_xxx # €29/month
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Scheduler Jobs
|
|
||||||
|
|
||||||
These background jobs run automatically when the backend starts:
|
|
||||||
|
|
||||||
| Job | Schedule | Table Affected |
|
|
||||||
|-----|----------|----------------|
|
|
||||||
| Sniper Alert Matching | Every 15 min | `sniper_alert_matches` |
|
|
||||||
| Auction Scrape | Hourly | `domain_auctions` |
|
|
||||||
| TLD Price Scrape | Daily 03:00 | `tld_prices` |
|
|
||||||
| Domain Check | Daily 06:00 | `domain_checks` |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Rollback
|
|
||||||
|
|
||||||
If you need to remove the new tables:
|
|
||||||
|
|
||||||
```sql
|
|
||||||
DROP TABLE IF EXISTS sniper_alert_matches CASCADE;
|
|
||||||
DROP TABLE IF EXISTS sniper_alerts CASCADE;
|
|
||||||
DROP TABLE IF EXISTS listing_views CASCADE;
|
|
||||||
DROP TABLE IF EXISTS listing_inquiries CASCADE;
|
|
||||||
DROP TABLE IF EXISTS domain_listings CASCADE;
|
|
||||||
DROP TABLE IF EXISTS domain_seo_data CASCADE;
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Related Documentation
|
|
||||||
|
|
||||||
- `README.md` - Full deployment guide
|
|
||||||
- `DEPLOYMENT.md` - Server setup details
|
|
||||||
- `backend/app/models/` - SQLAlchemy model definitions
|
|
||||||
@ -197,29 +197,48 @@ Mit diesen Verbesserungen wird Pounce ein **echtes Premium-Tool**, das keine ext
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## ✅ GELÖST: Keine Sample-/Fake-Daten im Auction Feed
|
## ⚠️ KRITISCHES PROBLEM: Sample-Daten vs. Echte Daten
|
||||||
|
|
||||||
### Neuer Zustand der Auktions-Daten (Stand: 2025-12)
|
### Aktueller Zustand der Auktions-Daten:
|
||||||
|
|
||||||
**Das Scraping liefert jetzt ausschließlich echte Auktionsdaten** (keine Schätzpreise, kein Random-Fallback, kein Seed/Demo):
|
**Das Scraping ist implementiert ABER:**
|
||||||
|
|
||||||
1. **GoDaddy / Namecheap / Sedo** (robust, ohne Cloudflare-Probleme):
|
1. **ExpiredDomains.net**: Funktioniert, aber:
|
||||||
- Ingestion über die ExpiredDomains-Provider-Seiten mit **Price / Bids / Endtime**
|
- Preise sind **geschätzt** (nicht echt): `estimated_price = base_prices.get(tld, 15)`
|
||||||
- Vorteil: Wir müssen die Cloudflare-geschützten Provider nicht direkt scrapen, bekommen aber echte Live-Daten.
|
- Dies sind Registrierungspreise, KEINE Auktionspreise
|
||||||
|
|
||||||
2. **Park.io**
|
2. **GoDaddy/Sedo/NameJet/DropCatch**: Scraping existiert, aber:
|
||||||
- Scraping der öffentlichen Auktionstabelle (inkl. **Price / Bids / Close Date**)
|
- Websites haben Anti-Bot-Maßnahmen
|
||||||
|
- Layouts ändern sich regelmäßig
|
||||||
|
- **Aktuell werden oft Sample-Daten als Fallback verwendet**
|
||||||
|
|
||||||
3. **Sav**
|
3. **In der Praxis zeigt die Seite oft:**
|
||||||
- Scraping des Tabellen-Endpoints `load_domains_ajax/*` (inkl. **Price / Bids / Time left** → deterministische `end_time` Ableitung)
|
```python
|
||||||
|
# backend/app/services/auction_scraper.py:689-780
|
||||||
|
async def seed_sample_auctions(self, db: AsyncSession):
|
||||||
|
# DIESE DATEN SIND FAKE (Demo-Daten)!
|
||||||
|
sample_auctions = [
|
||||||
|
{"domain": "techflow.io", "platform": "GoDaddy", "current_bid": 250, ...},
|
||||||
|
...
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
4. **Dynadot**
|
### 🚨 Für Premium-Qualität erforderlich:
|
||||||
- Hidden JSON API (Frontend-API) mit echten Preis- und Endzeit-Feldern
|
|
||||||
|
|
||||||
### Datenqualitäts-Regeln
|
1. **Keine geschätzten Preise** - Nur echte Auktionspreise anzeigen
|
||||||
|
2. **Klare Kennzeichnung** - Wenn Daten unsicher sind, transparent kommunizieren
|
||||||
|
3. **Fallback-Strategie** - Wenn Scraping fehlschlägt, keine Fake-Daten zeigen
|
||||||
|
|
||||||
- **`current_bid > 0` und `end_time` müssen vorhanden sein**, sonst wird der Datensatz verworfen.
|
### Empfohlene Änderungen:
|
||||||
- Es gibt **keinen** `/api/v1/auctions/seed` Endpunkt mehr und **keine** Seed-/Demo-Skripte.
|
|
||||||
|
```python
|
||||||
|
# Statt geschätzter Preise:
|
||||||
|
"current_bid": float(estimated_price), # ❌ FALSCH
|
||||||
|
|
||||||
|
# Besser:
|
||||||
|
"current_bid": None, # Kein Preis = keine falsche Info
|
||||||
|
"price_type": "registration_estimate", # Kennzeichnung
|
||||||
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|||||||
414
DEPLOY.md
414
DEPLOY.md
@ -1,414 +0,0 @@
|
|||||||
# Pounce Deployment Guide
|
|
||||||
|
|
||||||
## Server Information
|
|
||||||
- **Server IP**: `10.42.0.73`
|
|
||||||
- **User**: `user`
|
|
||||||
- **Git Remote**: `git.6bit.ch` (10.13.12.81)
|
|
||||||
- **Frontend Port**: 3000
|
|
||||||
- **Backend Port**: 8000
|
|
||||||
- **Public URL**: https://pounce.ch
|
|
||||||
|
|
||||||
## Automated Deployment (Recommended)
|
|
||||||
|
|
||||||
### Using the Deploy Script
|
|
||||||
|
|
||||||
The `deploy.sh` script handles zero-downtime deployments automatically:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Full deployment (commit + push + deploy)
|
|
||||||
./deploy.sh "Your commit message"
|
|
||||||
|
|
||||||
# Frontend only
|
|
||||||
./deploy.sh -f "Frontend changes"
|
|
||||||
|
|
||||||
# Backend only
|
|
||||||
./deploy.sh -b "Backend changes"
|
|
||||||
|
|
||||||
# Quick sync without git operations
|
|
||||||
./deploy.sh -q
|
|
||||||
|
|
||||||
# Force deploy (skips safety checks)
|
|
||||||
./deploy.sh --force "Force deploy"
|
|
||||||
```
|
|
||||||
|
|
||||||
### What the Script Does
|
|
||||||
|
|
||||||
1. **Git Operations** (unless `-q` flag):
|
|
||||||
- Commits all changes with your message
|
|
||||||
- Pushes to `git.6bit.ch`
|
|
||||||
|
|
||||||
2. **Syncing Files**:
|
|
||||||
- Uses `rsync` to transfer only changed files to server
|
|
||||||
- Preserves timestamps and permissions
|
|
||||||
- Frontend: syncs to `~/pounce/frontend/`
|
|
||||||
- Backend: syncs to `~/pounce/backend/`
|
|
||||||
|
|
||||||
3. **Building**:
|
|
||||||
- Frontend: `npm run build` (creates optimized production build)
|
|
||||||
- Backend: `pip install -r requirements.txt` (updates dependencies)
|
|
||||||
|
|
||||||
4. **Restarting Services**:
|
|
||||||
- Gracefully restarts Next.js and Uvicorn
|
|
||||||
- Zero downtime using `./start.sh`
|
|
||||||
|
|
||||||
## Manual Deployment
|
|
||||||
|
|
||||||
### Step 1: Commit & Push Local Changes
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd /Users/yvesgugger/Documents/Projekte/pounce
|
|
||||||
|
|
||||||
# Check status
|
|
||||||
git status
|
|
||||||
|
|
||||||
# Add all changes
|
|
||||||
git add -A
|
|
||||||
|
|
||||||
# Commit
|
|
||||||
git commit -m "Your descriptive commit message"
|
|
||||||
|
|
||||||
# Push to git.6bit.ch
|
|
||||||
git push
|
|
||||||
```
|
|
||||||
|
|
||||||
### Step 2: SSH into Server & Pull Changes
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Connect to server
|
|
||||||
sshpass -p "user" ssh user@10.42.0.73
|
|
||||||
|
|
||||||
# Navigate to project
|
|
||||||
cd ~/pounce
|
|
||||||
|
|
||||||
# Pull latest changes
|
|
||||||
git pull
|
|
||||||
```
|
|
||||||
|
|
||||||
### Step 3: Frontend Deployment
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Navigate to frontend
|
|
||||||
cd ~/pounce/frontend
|
|
||||||
|
|
||||||
# Install dependencies (if package.json changed)
|
|
||||||
npm install
|
|
||||||
|
|
||||||
# Build production version
|
|
||||||
npm run build
|
|
||||||
|
|
||||||
# The build creates a .next folder with optimized static files
|
|
||||||
```
|
|
||||||
|
|
||||||
### Step 4: Backend Deployment
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Navigate to backend
|
|
||||||
cd ~/pounce/backend
|
|
||||||
|
|
||||||
# Activate virtual environment
|
|
||||||
source venv/bin/activate
|
|
||||||
|
|
||||||
# Install/update dependencies (if requirements.txt changed)
|
|
||||||
pip install -r requirements.txt
|
|
||||||
|
|
||||||
# Deactivate venv
|
|
||||||
deactivate
|
|
||||||
```
|
|
||||||
|
|
||||||
### Step 5: Restart Services
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Navigate to project root
|
|
||||||
cd ~/pounce
|
|
||||||
|
|
||||||
# Stop running services
|
|
||||||
pkill -f 'uvicorn'
|
|
||||||
pkill -f 'next start'
|
|
||||||
|
|
||||||
# Start services using start script
|
|
||||||
./start.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
## Start Script (`start.sh`)
|
|
||||||
|
|
||||||
The `start.sh` script handles:
|
|
||||||
- Stopping existing processes on ports 8000 and 3000
|
|
||||||
- Starting the backend (Uvicorn) with proper settings
|
|
||||||
- Starting the frontend (Next.js) in production mode
|
|
||||||
- Health checks for both services
|
|
||||||
- Logging to `backend.log` and `frontend.log`
|
|
||||||
|
|
||||||
### Manual Service Management
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check running processes
|
|
||||||
ps aux | grep uvicorn
|
|
||||||
ps aux | grep next
|
|
||||||
|
|
||||||
# View logs
|
|
||||||
tail -f ~/pounce/backend/backend.log
|
|
||||||
tail -f ~/pounce/frontend/frontend.log
|
|
||||||
|
|
||||||
# Check ports
|
|
||||||
lsof -i :8000 # Backend
|
|
||||||
lsof -i :3000 # Frontend
|
|
||||||
```
|
|
||||||
|
|
||||||
## Environment Configuration
|
|
||||||
|
|
||||||
### Backend `.env` (~/pounce/backend/.env)
|
|
||||||
|
|
||||||
```env
|
|
||||||
DATABASE_URL=postgresql://user:password@localhost:5432/domainwatch
|
|
||||||
SECRET_KEY=your-secret-key-here
|
|
||||||
STRIPE_SECRET_KEY=sk_live_xxx
|
|
||||||
STRIPE_PUBLISHABLE_KEY=pk_live_xxx
|
|
||||||
STRIPE_WEBHOOK_SECRET=whsec_xxx
|
|
||||||
ZOHO_SMTP_USER=noreply@pounce.ch
|
|
||||||
ZOHO_SMTP_PASSWORD=xxx
|
|
||||||
GOOGLE_CLIENT_ID=xxx
|
|
||||||
GOOGLE_CLIENT_SECRET=xxx
|
|
||||||
GITHUB_CLIENT_ID=xxx
|
|
||||||
GITHUB_CLIENT_SECRET=xxx
|
|
||||||
site_url=https://pounce.ch
|
|
||||||
```
|
|
||||||
|
|
||||||
### Frontend `.env.local` (~/pounce/frontend/.env.local)
|
|
||||||
|
|
||||||
```env
|
|
||||||
NEXT_PUBLIC_API_URL=https://pounce.ch/api/v1
|
|
||||||
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=pk_live_xxx
|
|
||||||
NEXT_PUBLIC_POSTHOG_KEY=phc_xxx
|
|
||||||
NEXT_PUBLIC_POSTHOG_HOST=https://eu.i.posthog.com
|
|
||||||
```
|
|
||||||
|
|
||||||
## Nginx Configuration
|
|
||||||
|
|
||||||
Nginx acts as reverse proxy on the server:
|
|
||||||
|
|
||||||
```nginx
|
|
||||||
# Frontend (Next.js)
|
|
||||||
location / {
|
|
||||||
proxy_pass http://localhost:3000;
|
|
||||||
proxy_http_version 1.1;
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
|
||||||
proxy_set_header Connection 'upgrade';
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_cache_bypass $http_upgrade;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Backend (FastAPI)
|
|
||||||
location /api {
|
|
||||||
proxy_pass http://localhost:8000;
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### Frontend won't start
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check for port conflicts
|
|
||||||
lsof -i :3000
|
|
||||||
|
|
||||||
# Check build errors
|
|
||||||
cd ~/pounce/frontend
|
|
||||||
npm run build
|
|
||||||
|
|
||||||
# Check logs
|
|
||||||
tail -f ~/pounce/frontend/frontend.log
|
|
||||||
```
|
|
||||||
|
|
||||||
### Backend won't start
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check for port conflicts
|
|
||||||
lsof -i :8000
|
|
||||||
|
|
||||||
# Test backend manually
|
|
||||||
cd ~/pounce/backend
|
|
||||||
source venv/bin/activate
|
|
||||||
uvicorn app.main:app --host 0.0.0.0 --port 8000
|
|
||||||
|
|
||||||
# Check logs
|
|
||||||
tail -f ~/pounce/backend/backend.log
|
|
||||||
```
|
|
||||||
|
|
||||||
### Database issues
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check PostgreSQL status
|
|
||||||
sudo systemctl status postgresql
|
|
||||||
|
|
||||||
# Connect to database
|
|
||||||
psql -U user -d domainwatch
|
|
||||||
|
|
||||||
# Check migrations
|
|
||||||
cd ~/pounce/backend
|
|
||||||
alembic current
|
|
||||||
alembic upgrade head
|
|
||||||
```
|
|
||||||
|
|
||||||
### SSL Certificate issues
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check certificate expiry
|
|
||||||
sudo certbot certificates
|
|
||||||
|
|
||||||
# Renew certificates
|
|
||||||
sudo certbot renew
|
|
||||||
|
|
||||||
# Restart Nginx
|
|
||||||
sudo systemctl restart nginx
|
|
||||||
```
|
|
||||||
|
|
||||||
## Health Checks
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Backend health
|
|
||||||
curl http://localhost:8000/health
|
|
||||||
|
|
||||||
# Frontend health
|
|
||||||
curl -I http://localhost:3000
|
|
||||||
|
|
||||||
# Full stack check via public URL
|
|
||||||
curl https://pounce.ch
|
|
||||||
curl https://pounce.ch/api/health
|
|
||||||
```
|
|
||||||
|
|
||||||
## Rollback Procedure
|
|
||||||
|
|
||||||
If deployment fails:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# On server
|
|
||||||
cd ~/pounce
|
|
||||||
|
|
||||||
# See recent commits
|
|
||||||
git log --oneline -10
|
|
||||||
|
|
||||||
# Rollback to previous commit
|
|
||||||
git reset --hard <commit-hash>
|
|
||||||
|
|
||||||
# Rebuild
|
|
||||||
cd frontend && npm run build
|
|
||||||
cd ../backend && source venv/bin/activate && pip install -r requirements.txt
|
|
||||||
|
|
||||||
# Restart
|
|
||||||
cd .. && ./start.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
## Monitoring & Maintenance
|
|
||||||
|
|
||||||
### Log Rotation
|
|
||||||
|
|
||||||
Logs are in:
|
|
||||||
- `~/pounce/backend/backend.log`
|
|
||||||
- `~/pounce/frontend/frontend.log`
|
|
||||||
|
|
||||||
Set up log rotation to prevent disk space issues:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Create logrotate config
|
|
||||||
sudo nano /etc/logrotate.d/pounce
|
|
||||||
```
|
|
||||||
|
|
||||||
```
|
|
||||||
/home/user/pounce/backend/backend.log {
|
|
||||||
daily
|
|
||||||
rotate 14
|
|
||||||
compress
|
|
||||||
delaycompress
|
|
||||||
notifempty
|
|
||||||
create 0640 user user
|
|
||||||
}
|
|
||||||
|
|
||||||
/home/user/pounce/frontend/frontend.log {
|
|
||||||
daily
|
|
||||||
rotate 14
|
|
||||||
compress
|
|
||||||
delaycompress
|
|
||||||
notifempty
|
|
||||||
create 0640 user user
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Cron Jobs
|
|
||||||
|
|
||||||
Check scheduled tasks:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
crontab -l
|
|
||||||
```
|
|
||||||
|
|
||||||
Common cron jobs for Pounce:
|
|
||||||
- Domain scraping
|
|
||||||
- Health checks
|
|
||||||
- Database cleanup
|
|
||||||
- Backup scripts
|
|
||||||
|
|
||||||
## Backup & Recovery
|
|
||||||
|
|
||||||
### Database Backup
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Manual backup
|
|
||||||
pg_dump -U user domainwatch > backup_$(date +%Y%m%d_%H%M%S).sql
|
|
||||||
|
|
||||||
# Restore from backup
|
|
||||||
psql -U user domainwatch < backup_20250101_120000.sql
|
|
||||||
```
|
|
||||||
|
|
||||||
### Code Backup
|
|
||||||
|
|
||||||
All code is backed up on `git.6bit.ch`. To clone fresh:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git clone user@10.13.12.81:yvg/pounce.git
|
|
||||||
```
|
|
||||||
|
|
||||||
## Security Notes
|
|
||||||
|
|
||||||
- Server uses SSH key authentication (password: `user` for development)
|
|
||||||
- SSL certificates via Let's Encrypt (auto-renewal)
|
|
||||||
- Database credentials in `.env` files (not committed to git)
|
|
||||||
- Stripe webhooks require signing secret verification
|
|
||||||
- OAuth secrets must match registered redirect URIs
|
|
||||||
|
|
||||||
## Quick Reference
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Deploy everything
|
|
||||||
./deploy.sh "message"
|
|
||||||
|
|
||||||
# Frontend only
|
|
||||||
./deploy.sh -f "message"
|
|
||||||
|
|
||||||
# Backend only
|
|
||||||
./deploy.sh -b "message"
|
|
||||||
|
|
||||||
# Quick sync (no git)
|
|
||||||
./deploy.sh -q
|
|
||||||
|
|
||||||
# Check logs
|
|
||||||
ssh user@10.42.0.73 'tail -f ~/pounce/backend/backend.log'
|
|
||||||
|
|
||||||
# Restart services
|
|
||||||
ssh user@10.42.0.73 'cd ~/pounce && ./start.sh'
|
|
||||||
|
|
||||||
# Check health
|
|
||||||
curl https://pounce.ch/api/health
|
|
||||||
```
|
|
||||||
|
|
||||||
## Support
|
|
||||||
|
|
||||||
For issues or questions, check:
|
|
||||||
1. Application logs (`backend.log`, `frontend.log`)
|
|
||||||
2. Nginx logs (`/var/log/nginx/error.log`)
|
|
||||||
3. PostgreSQL logs (`/var/log/postgresql/`)
|
|
||||||
4. System logs (`journalctl -xe`)
|
|
||||||
|
|
||||||
@ -48,8 +48,8 @@ python init_db.py
|
|||||||
# TLD Preise seeden
|
# TLD Preise seeden
|
||||||
python seed_tld_prices.py
|
python seed_tld_prices.py
|
||||||
|
|
||||||
# Auctions initial scrapen (echte Daten, keine Demo-Daten)
|
# Auctions seeden (optional für Demo-Daten)
|
||||||
python scripts/scrape_auctions.py
|
python seed_auctions.py
|
||||||
|
|
||||||
# Stripe Produkte erstellen
|
# Stripe Produkte erstellen
|
||||||
python -c "
|
python -c "
|
||||||
|
|||||||
66
DEPLOY_backend.env
Normal file
66
DEPLOY_backend.env
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
# =================================
|
||||||
|
# pounce Backend Configuration
|
||||||
|
# =================================
|
||||||
|
# DEPLOY FILE - Copy this to backend/.env on the server
|
||||||
|
|
||||||
|
# Database
|
||||||
|
# SQLite (Development)
|
||||||
|
DATABASE_URL=sqlite+aiosqlite:///./domainwatch.db
|
||||||
|
|
||||||
|
# PostgreSQL (Production)
|
||||||
|
# DATABASE_URL=postgresql+asyncpg://user:password@localhost:5432/pounce
|
||||||
|
|
||||||
|
# Security
|
||||||
|
SECRET_KEY=62003b69b382cd55f32aba6301a81039e74a84914505d1bfbf254a97a5ccfb36
|
||||||
|
|
||||||
|
# JWT Settings
|
||||||
|
ACCESS_TOKEN_EXPIRE_MINUTES=10080
|
||||||
|
|
||||||
|
# CORS Origins (comma-separated)
|
||||||
|
CORS_ORIGINS=http://localhost:3000,http://127.0.0.1:3000,https://pounce.ch,https://www.pounce.ch
|
||||||
|
|
||||||
|
# Scheduler Settings
|
||||||
|
SCHEDULER_CHECK_INTERVAL_HOURS=24
|
||||||
|
|
||||||
|
# OAuth - Google
|
||||||
|
GOOGLE_CLIENT_ID=865146315769-vi7vcu91d3i7huv8ikjun52jo9ob7spk.apps.googleusercontent.com
|
||||||
|
GOOGLE_CLIENT_SECRET=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
|
||||||
|
GOOGLE_REDIRECT_URI=https://pounce.ch/api/v1/oauth/google/callback
|
||||||
|
|
||||||
|
# OAuth - GitHub
|
||||||
|
GITHUB_CLIENT_ID=Ov23liBjROk39vYXi3G5
|
||||||
|
GITHUB_CLIENT_SECRET=fce447621fb9b497b53eef673de15e39b991e21c
|
||||||
|
GITHUB_REDIRECT_URI=https://pounce.ch/api/v1/oauth/github/callback
|
||||||
|
|
||||||
|
# Site URL
|
||||||
|
SITE_URL=https://pounce.ch
|
||||||
|
|
||||||
|
# =================================
|
||||||
|
# Email (Zoho Mail)
|
||||||
|
# =================================
|
||||||
|
SMTP_HOST=smtp.zoho.eu
|
||||||
|
SMTP_PORT=465
|
||||||
|
SMTP_USER=hello@pounce.ch
|
||||||
|
SMTP_PASSWORD=DvYT0MBvSZ0d
|
||||||
|
SMTP_FROM_EMAIL=hello@pounce.ch
|
||||||
|
SMTP_FROM_NAME=pounce
|
||||||
|
SMTP_USE_TLS=false
|
||||||
|
SMTP_USE_SSL=true
|
||||||
|
CONTACT_EMAIL=hello@pounce.ch
|
||||||
|
|
||||||
|
# =================================
|
||||||
|
# Stripe Payments
|
||||||
|
# =================================
|
||||||
|
STRIPE_SECRET_KEY=sk_test_51ScLbjCtFUamNRpNMtVAN6kIWRauhabZEJz8lmvlfjT5tcntAFsHzvMlXrlD2hE6wQQgsAgLKYzkkYISH7TYprUJ00lIXh6DXb
|
||||||
|
STRIPE_PUBLISHABLE_KEY=pk_test_51ScLbjCtFUamNRpNpbrN2JnGoCDpR4sq6ny28ao3ircCWcvJjAQi9vclO5bScGMenkmzmZ6FSG2HWWuCOkL2LFjS009lI4QG59
|
||||||
|
STRIPE_PRICE_TRADER=price_1ScTLKCtFUamNRpNt8s6oVQi
|
||||||
|
STRIPE_PRICE_TYCOON=price_1ScTLLCtFUamNRpNhQsEIFUx
|
||||||
|
STRIPE_WEBHOOK_SECRET=whsec_pqWdtvFbQTtBgCfDTgHwgtxxcWl7JbsZ
|
||||||
|
|
||||||
|
# Email Verification
|
||||||
|
REQUIRE_EMAIL_VERIFICATION=false
|
||||||
|
|
||||||
|
# Environment
|
||||||
|
ENVIRONMENT=production
|
||||||
|
DEBUG=false
|
||||||
|
|
||||||
@ -1,55 +0,0 @@
|
|||||||
# Deployment environment template (NO SECRETS)
|
|
||||||
#
|
|
||||||
# Copy to a *local-only* file and keep it OUT of git:
|
|
||||||
# cp DEPLOY_backend.env.example DEPLOY_backend.env
|
|
||||||
#
|
|
||||||
# Then fill values from your password manager / secret store.
|
|
||||||
# Never commit DEPLOY_backend.env.
|
|
||||||
#
|
|
||||||
# Core
|
|
||||||
DATABASE_URL=postgresql+asyncpg://pounce:<DB_PASSWORD>@db:5432/pounce
|
|
||||||
SECRET_KEY=<GENERATE_64_HEX_CHARS>
|
|
||||||
ACCESS_TOKEN_EXPIRE_MINUTES=1440
|
|
||||||
DEBUG=false
|
|
||||||
ENVIRONMENT=production
|
|
||||||
SITE_URL=https://your-domain.com
|
|
||||||
|
|
||||||
# CORS (comma-separated)
|
|
||||||
ALLOWED_ORIGINS=https://your-domain.com,https://www.your-domain.com
|
|
||||||
|
|
||||||
# Email (optional)
|
|
||||||
SMTP_HOST=
|
|
||||||
SMTP_PORT=587
|
|
||||||
SMTP_USER=
|
|
||||||
SMTP_PASSWORD=
|
|
||||||
SMTP_FROM_EMAIL=
|
|
||||||
SMTP_FROM_NAME=pounce
|
|
||||||
SMTP_USE_TLS=true
|
|
||||||
SMTP_USE_SSL=false
|
|
||||||
CONTACT_EMAIL=
|
|
||||||
|
|
||||||
# Stripe (optional)
|
|
||||||
STRIPE_SECRET_KEY=
|
|
||||||
STRIPE_PUBLISHABLE_KEY=
|
|
||||||
STRIPE_PRICE_TRADER=
|
|
||||||
STRIPE_PRICE_TYCOON=
|
|
||||||
STRIPE_WEBHOOK_SECRET=
|
|
||||||
|
|
||||||
# OAuth (optional)
|
|
||||||
GOOGLE_CLIENT_ID=
|
|
||||||
GOOGLE_CLIENT_SECRET=
|
|
||||||
GOOGLE_REDIRECT_URI=https://api.your-domain.com/api/v1/oauth/google/callback
|
|
||||||
GITHUB_CLIENT_ID=
|
|
||||||
GITHUB_CLIENT_SECRET=
|
|
||||||
GITHUB_REDIRECT_URI=https://api.your-domain.com/api/v1/oauth/github/callback
|
|
||||||
|
|
||||||
# Optional integrations
|
|
||||||
DROPCATCH_CLIENT_ID=
|
|
||||||
DROPCATCH_CLIENT_SECRET=
|
|
||||||
DROPCATCH_API_BASE=https://api.dropcatch.com
|
|
||||||
SEDO_PARTNER_ID=
|
|
||||||
SEDO_SIGN_KEY=
|
|
||||||
SEDO_API_BASE=https://api.sedo.com/api/v1/
|
|
||||||
MOZ_ACCESS_ID=
|
|
||||||
MOZ_SECRET_KEY=
|
|
||||||
|
|
||||||
@ -1,46 +0,0 @@
|
|||||||
# Docker Compose environment (NO SECRETS)
|
|
||||||
#
|
|
||||||
# Copy to `.env` (it is gitignored):
|
|
||||||
# cp DEPLOY_docker_compose.env.example .env
|
|
||||||
#
|
|
||||||
# Then set real values (password manager / vault).
|
|
||||||
|
|
||||||
# Core (required)
|
|
||||||
DB_PASSWORD=change-me
|
|
||||||
SECRET_KEY=GENERATE_A_LONG_RANDOM_SECRET
|
|
||||||
ENVIRONMENT=production
|
|
||||||
SITE_URL=https://your-domain.com
|
|
||||||
|
|
||||||
# CORS (only needed if frontend and backend are different origins)
|
|
||||||
ALLOWED_ORIGINS=https://your-domain.com,https://www.your-domain.com
|
|
||||||
|
|
||||||
# Cookies (optional)
|
|
||||||
COOKIE_SECURE=true
|
|
||||||
# COOKIE_DOMAIN=.your-domain.com
|
|
||||||
|
|
||||||
# Email (optional but recommended for alerts)
|
|
||||||
# SMTP_HOST=smtp.example.com
|
|
||||||
# SMTP_PORT=587
|
|
||||||
# SMTP_USER=
|
|
||||||
# SMTP_PASSWORD=
|
|
||||||
# SMTP_FROM_EMAIL=
|
|
||||||
# SMTP_FROM_NAME=pounce
|
|
||||||
# SMTP_USE_TLS=true
|
|
||||||
# SMTP_USE_SSL=false
|
|
||||||
# CONTACT_EMAIL=
|
|
||||||
|
|
||||||
# OAuth (optional)
|
|
||||||
# GOOGLE_CLIENT_ID=
|
|
||||||
# GOOGLE_CLIENT_SECRET=
|
|
||||||
# GOOGLE_REDIRECT_URI=https://your-domain.com/api/v1/oauth/google/callback
|
|
||||||
# GITHUB_CLIENT_ID=
|
|
||||||
# GITHUB_CLIENT_SECRET=
|
|
||||||
# GITHUB_REDIRECT_URI=https://your-domain.com/api/v1/oauth/github/callback
|
|
||||||
|
|
||||||
# Stripe (optional)
|
|
||||||
# STRIPE_SECRET_KEY=
|
|
||||||
# STRIPE_WEBHOOK_SECRET=
|
|
||||||
# STRIPE_PRICE_TRADER=
|
|
||||||
# STRIPE_PRICE_TYCOON=
|
|
||||||
|
|
||||||
|
|
||||||
9
DEPLOY_frontend.env
Normal file
9
DEPLOY_frontend.env
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
# =================================
|
||||||
|
# pounce Frontend Configuration
|
||||||
|
# =================================
|
||||||
|
# DEPLOY FILE - Copy this to frontend/.env.local on the server
|
||||||
|
|
||||||
|
# Backend API URL
|
||||||
|
# For production, point to your backend API
|
||||||
|
NEXT_PUBLIC_API_URL=https://pounce.ch/api/v1
|
||||||
|
|
||||||
@ -1,7 +0,0 @@
|
|||||||
# Deployment environment template (NO SECRETS)
|
|
||||||
#
|
|
||||||
# Copy to a *local-only* file and keep it OUT of git:
|
|
||||||
# cp DEPLOY_frontend.env.example DEPLOY_frontend.env
|
|
||||||
#
|
|
||||||
NEXT_PUBLIC_API_URL=https://your-domain.com/api/v1
|
|
||||||
|
|
||||||
@ -1,352 +0,0 @@
|
|||||||
# 🎯 POUNCE MARKET — Das Herzstück der Plattform
|
|
||||||
|
|
||||||
> **Letzte Aktualisierung:** 11. Dezember 2025
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📋 Executive Summary
|
|
||||||
|
|
||||||
Die **Market Page** ist das Herzstück von Pounce. Hier fließen alle Datenquellen zusammen:
|
|
||||||
|
|
||||||
1. **Pounce Direct** — User-Listings (unser USP, 0% Provision)
|
|
||||||
2. **Live Auktionen** — Externe Plattformen (8+ Quellen!)
|
|
||||||
3. **Drops Tomorrow** — Domains bevor sie in Auktionen landen (Phase 3)
|
|
||||||
|
|
||||||
### Der Weg zum Unicorn (aus pounce_strategy.md)
|
|
||||||
|
|
||||||
> *"Der Weg zum Unicorn führt nicht über besseres Scraping, sondern über einzigartigen Content."*
|
|
||||||
|
|
||||||
**Aggregation kann jeder. Pounce Direct ist unser USP.**
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🚀 DATENQUELLEN — 3-Tier Architektur
|
|
||||||
|
|
||||||
```
|
|
||||||
┌─────────────────────────────────────────────────────────────────┐
|
|
||||||
│ POUNCE DATA ACQUISITION PIPELINE │
|
|
||||||
├─────────────────────────────────────────────────────────────────┤
|
|
||||||
│ │
|
|
||||||
│ 🥇 TIER 0: HIDDEN JSON APIs (Schnellste, Stabilste) │
|
|
||||||
│ ───────────────────────────────────────────────────────────── │
|
|
||||||
│ ✅ Dynadot REST: 101 Auktionen ← FUNKTIONIERT! │
|
|
||||||
│ ⚠️ GoDaddy JSON: findApiProxy/v4 (Cloudflare-blocked) │
|
|
||||||
│ ⚠️ NameJet AJAX: LoadPage (Cloudflare-blocked) │
|
|
||||||
│ ❌ Namecheap GraphQL: Braucht Query Hash │
|
|
||||||
│ ❌ Park.io: API nicht öffentlich │
|
|
||||||
│ ❌ Sav.com: HTML-only Fallback │
|
|
||||||
│ │
|
|
||||||
│ 🥈 TIER 1: OFFICIAL PARTNER APIs │
|
|
||||||
│ ───────────────────────────────────────────────────────────── │
|
|
||||||
│ ✅ DropCatch API: Konfiguriert (nur eigene Aktivitäten) │
|
|
||||||
│ ⏳ Sedo Partner API: Credentials konfiguriert │
|
|
||||||
│ │
|
|
||||||
│ 🥉 TIER 2: WEB SCRAPING (Stabil) │
|
|
||||||
│ ───────────────────────────────────────────────────────────── │
|
|
||||||
│ ✅ ExpiredDomains.net: 425 Domains ← HAUPTQUELLE! │
|
|
||||||
│ ✅ Sedo Public: 7 Domains │
|
|
||||||
│ ⚠️ GoDaddy/NameJet: Cloudflare-protected │
|
|
||||||
│ │
|
|
||||||
│ 💎 POUNCE DIRECT (Unique Content) │
|
|
||||||
│ ───────────────────────────────────────────────────────────── │
|
|
||||||
│ ⏳ User-Listings: DNS-verifiziert, 0% Provision │
|
|
||||||
│ │
|
|
||||||
│ 📊 TOTAL: 537+ aktive Auktionen │
|
|
||||||
│ │
|
|
||||||
└─────────────────────────────────────────────────────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 💰 AFFILIATE MONETARISIERUNG
|
|
||||||
|
|
||||||
Jeder Link zu einer externen Auktion enthält Affiliate-Tracking:
|
|
||||||
|
|
||||||
| Platform | Affiliate Program | Revenue Share |
|
|
||||||
|----------|------------------|---------------|
|
|
||||||
| **Namecheap** | ✅ Impact Radius | ~$20/sale |
|
|
||||||
| **Dynadot** | ✅ Direct | 5% lifetime |
|
|
||||||
| **GoDaddy** | ✅ CJ Affiliate | $10-50/sale |
|
|
||||||
| **Sedo** | ✅ Partner Program | 10-15% |
|
|
||||||
| **Sav.com** | ✅ Referral | $5/registration |
|
|
||||||
| **DropCatch** | ❌ | - |
|
|
||||||
| **NameJet** | ❌ | - |
|
|
||||||
|
|
||||||
```python
|
|
||||||
# Affiliate URL Builder (hidden_api_scrapers.py)
|
|
||||||
AFFILIATE_CONFIG = {
|
|
||||||
"Namecheap": {
|
|
||||||
"auction_url_template": "https://www.namecheap.com/market/domain/{domain}?aff=pounce",
|
|
||||||
},
|
|
||||||
"GoDaddy": {
|
|
||||||
"auction_url_template": "https://auctions.godaddy.com/...?isc=cjcpounce",
|
|
||||||
},
|
|
||||||
# ... etc
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📊 Die 3 Säulen des Market
|
|
||||||
|
|
||||||
### Säule 1: POUNCE DIRECT (Unser USP!)
|
|
||||||
|
|
||||||
> *"Das sind die Domains, die es NUR bei Pounce gibt."*
|
|
||||||
|
|
||||||
| Vorteil | Erklärung |
|
|
||||||
|---------|-----------|
|
|
||||||
| **Unique Content** | Domains, die es NUR bei Pounce gibt |
|
|
||||||
| **0% Provision** | vs. 15-20% bei Sedo/Afternic |
|
|
||||||
| **DNS-Verifizierung** | Trust-Signal für Käufer |
|
|
||||||
| **Instant Buy** | Kein Bieten, direkt kaufen |
|
|
||||||
| **SEO Power** | Jedes Listing = Landing Page |
|
|
||||||
|
|
||||||
**Status:** ⏳ 0 Listings — Muss aktiviert werden!
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Säule 2: LIVE AUKTIONEN (8+ Quellen)
|
|
||||||
|
|
||||||
> *"Zeige alle relevanten Auktionen von allen Plattformen."*
|
|
||||||
|
|
||||||
**Data Freshness Garantie:**
|
|
||||||
- Scraping: Alle 2 Stunden
|
|
||||||
- Cleanup: Alle 15 Minuten
|
|
||||||
- Filter: `end_time > now()` (nur laufende Auktionen)
|
|
||||||
|
|
||||||
**Qualitätsfilter:**
|
|
||||||
- Vanity Filter für Public Users (nur Premium-Domains)
|
|
||||||
- Pounce Score (0-100)
|
|
||||||
- TLD Filter (com, io, ai, etc.)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Säule 3: DROPS TOMORROW (Phase 3)
|
|
||||||
|
|
||||||
> *"Zeige Domains BEVOR sie in Auktionen landen."*
|
|
||||||
|
|
||||||
**Zone File Analysis:**
|
|
||||||
- Verisign (.com/.net) Zone Files
|
|
||||||
- Tägliche Diff-Analyse
|
|
||||||
- Pounce Algorithm filtert nur Premium
|
|
||||||
|
|
||||||
**Status:** 🔜 Geplant (6-12 Monate)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## ⚙️ Technische Architektur
|
|
||||||
|
|
||||||
### Scraper Priority Chain
|
|
||||||
|
|
||||||
```python
|
|
||||||
# auction_scraper.py — scrape_all_platforms()
|
|
||||||
|
|
||||||
async def scrape_all_platforms(self, db):
|
|
||||||
# ═══════════════════════════════════════════════════════════
|
|
||||||
# TIER 0: Hidden JSON APIs (Most Reliable!)
|
|
||||||
# ═══════════════════════════════════════════════════════════
|
|
||||||
hidden_api_result = await hidden_api_scraper.scrape_all()
|
|
||||||
# → Namecheap (GraphQL)
|
|
||||||
# → Dynadot (REST)
|
|
||||||
# → Sav.com (AJAX)
|
|
||||||
|
|
||||||
# ═══════════════════════════════════════════════════════════
|
|
||||||
# TIER 1: Official Partner APIs
|
|
||||||
# ═══════════════════════════════════════════════════════════
|
|
||||||
await self._fetch_dropcatch_api(db)
|
|
||||||
await self._fetch_sedo_api(db)
|
|
||||||
|
|
||||||
# ═══════════════════════════════════════════════════════════
|
|
||||||
# TIER 2: Web Scraping (Fallback)
|
|
||||||
# ═══════════════════════════════════════════════════════════
|
|
||||||
await self._scrape_expireddomains(db)
|
|
||||||
await self._scrape_godaddy_public(db)
|
|
||||||
await self._scrape_namejet_public(db)
|
|
||||||
```
|
|
||||||
|
|
||||||
### Scheduler Jobs
|
|
||||||
|
|
||||||
```python
|
|
||||||
# Aktive Jobs (scheduler.py)
|
|
||||||
# ─────────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
# Auction Scrape — Alle 2 Stunden
|
|
||||||
scheduler.add_job(scrape_auctions, CronTrigger(hour='*/2', minute=30))
|
|
||||||
|
|
||||||
# Expired Cleanup — Alle 15 Minuten (KRITISCH!)
|
|
||||||
scheduler.add_job(cleanup_expired_auctions, CronTrigger(minute='*/15'))
|
|
||||||
|
|
||||||
# Sniper Matching — Alle 30 Minuten
|
|
||||||
scheduler.add_job(match_sniper_alerts, CronTrigger(minute='*/30'))
|
|
||||||
|
|
||||||
# TLD Prices — Täglich 03:00 UTC
|
|
||||||
scheduler.add_job(scrape_tld_prices, CronTrigger(hour=3))
|
|
||||||
```
|
|
||||||
|
|
||||||
### API Endpoints
|
|
||||||
|
|
||||||
```python
|
|
||||||
GET /api/v1/auctions/feed # Unified Feed (Pounce + External)
|
|
||||||
GET /api/v1/auctions # External Auctions only
|
|
||||||
GET /api/v1/auctions/ending-soon
|
|
||||||
GET /api/v1/auctions/hot
|
|
||||||
GET /api/v1/listings # Pounce Direct Listings
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🎨 UI/UX: Die Market Page
|
|
||||||
|
|
||||||
### Filter Bar
|
|
||||||
|
|
||||||
```
|
|
||||||
[✓] Hide Spam [○] Pounce Only [TLD ▾] [Price ▾] [Ending ▾]
|
|
||||||
```
|
|
||||||
|
|
||||||
### Visuelle Hierarchie
|
|
||||||
|
|
||||||
```
|
|
||||||
┌─────────────────────────────────────────────────────────────────┐
|
|
||||||
│ MARKET FEED │
|
|
||||||
├─────────────────────────────────────────────────────────────────┤
|
|
||||||
│ │
|
|
||||||
│ 💎 POUNCE EXCLUSIVE — Verified Instant Buy │
|
|
||||||
│ ┌───────────────────────────────────────────────────────────┐ │
|
|
||||||
│ │ zurich-immo.ch $950 ⚡ Instant ✅ Verified [Buy] │ │
|
|
||||||
│ │ crypto-hub.io $2.5k ⚡ Instant ✅ Verified [Buy] │ │
|
|
||||||
│ └───────────────────────────────────────────────────────────┘ │
|
|
||||||
│ │
|
|
||||||
│ 🏢 LIVE AUCTIONS (8+ Plattformen) │
|
|
||||||
│ ┌───────────────────────────────────────────────────────────┐ │
|
|
||||||
│ │ techflow.io $250 ⏱️ 4h left Namecheap [Bid ↗] │ │
|
|
||||||
│ │ datalab.com $1.2k ⏱️ 23h left Dynadot [Bid ↗] │ │
|
|
||||||
│ │ nexus.ai $5k ⏱️ 2d left Sav.com [Bid ↗] │ │
|
|
||||||
│ │ fintech.io $800 ⏱️ 6h left GoDaddy [Bid ↗] │ │
|
|
||||||
│ └───────────────────────────────────────────────────────────┘ │
|
|
||||||
│ │
|
|
||||||
│ 🔮 DROPS TOMORROW (Tycoon Only) │
|
|
||||||
│ ┌───────────────────────────────────────────────────────────┐ │
|
|
||||||
│ │ 🔒 Upgrade to Tycoon to see domains dropping tomorrow │ │
|
|
||||||
│ └───────────────────────────────────────────────────────────┘ │
|
|
||||||
│ │
|
|
||||||
└─────────────────────────────────────────────────────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📈 Roadmap
|
|
||||||
|
|
||||||
### ✅ ERLEDIGT (11. Dezember 2025)
|
|
||||||
|
|
||||||
**Phase 1: Intelligence — VOLLSTÄNDIG IMPLEMENTIERT!**
|
|
||||||
|
|
||||||
- [x] Unified Feed API `/auctions/feed`
|
|
||||||
- [x] Pounce Score v2.0
|
|
||||||
- [x] Vanity Filter
|
|
||||||
- [x] **Dynadot REST API** ← 101 Auktionen!
|
|
||||||
- [x] **GoDaddy Hidden API** (entdeckt, Cloudflare-blocked)
|
|
||||||
- [x] **NameJet AJAX API** (entdeckt, Cloudflare-blocked)
|
|
||||||
- [x] **Park.io API** (entdeckt, nicht öffentlich)
|
|
||||||
- [x] **Affiliate-Link System für alle Plattformen**
|
|
||||||
- [x] **FIX: end_time Filter** (nur laufende Auktionen)
|
|
||||||
- [x] **FIX: Cleanup alle 15 Minuten**
|
|
||||||
- [x] **FIX: Scraper alle 2 Stunden**
|
|
||||||
- [x] Sniper Alerts
|
|
||||||
- [x] **542+ aktive Auktionen in DB**
|
|
||||||
- [x] **5 Pounce Direct Listings erstellt**
|
|
||||||
- [x] **Public + Terminal Seiten synchronisiert**
|
|
||||||
- [x] **Playwright Stealth Scraper implementiert**
|
|
||||||
- [x] **Listing Limits enforced (2/10/50 by tier)**
|
|
||||||
- [x] **Featured Listings für Tycoon**
|
|
||||||
|
|
||||||
### 🎯 NÄCHSTE SCHRITTE
|
|
||||||
|
|
||||||
1. **Cloudflare-Bypass für GoDaddy/NameJet**
|
|
||||||
- Option A: Playwright mit stealth plugin
|
|
||||||
- Option B: Proxy-Rotation
|
|
||||||
- Option C: Headless Browser as a Service
|
|
||||||
|
|
||||||
2. **Affiliate-IDs einrichten**
|
|
||||||
- Dynadot Affiliate Program (JETZT - funktioniert!)
|
|
||||||
- GoDaddy CJ Affiliate
|
|
||||||
- Sedo Partner Program
|
|
||||||
|
|
||||||
3. **Erste Pounce Direct Listings erstellen**
|
|
||||||
- Test-Domains zum Verifizieren des Flows
|
|
||||||
- USP aktivieren!
|
|
||||||
|
|
||||||
### 🔮 PHASE 2-3 (6-12 Monate)
|
|
||||||
|
|
||||||
1. **Zone File Access beantragen**
|
|
||||||
- Verisign (.com/.net)
|
|
||||||
- "Drops Tomorrow" Feature
|
|
||||||
|
|
||||||
2. **Pounce Instant Exchange**
|
|
||||||
- Integrierter Escrow-Service
|
|
||||||
- 5% Gebühr
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 💰 Monetarisierung (aus pounce_pricing.md)
|
|
||||||
|
|
||||||
| Feature | Scout ($0) | Trader ($9) | Tycoon ($29) |
|
|
||||||
|---------|------------|-------------|--------------|
|
|
||||||
| **Market Feed** | 🌪️ Vanity Filter | ✨ Clean | ✨ Clean + Priority |
|
|
||||||
| **Alert Speed** | 🐢 Daily | 🐇 Hourly | ⚡ Real-Time (10m) |
|
|
||||||
| **Watchlist** | 5 Domains | 50 Domains | 500 Domains |
|
|
||||||
| **Sell Domains** | ❌ | ✅ 5 Listings | ✅ 50 + Featured |
|
|
||||||
| **Pounce Score** | ❌ Locked | ✅ Basic | ✅ + SEO Data |
|
|
||||||
| **Drops Tomorrow** | ❌ | ❌ | ✅ Exclusive |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🚀 Der Unicorn-Pfad
|
|
||||||
|
|
||||||
```
|
|
||||||
Phase 1: INTELLIGENCE (Jetzt)
|
|
||||||
├── 8+ Datenquellen aggregiert ✅
|
|
||||||
├── Affiliate-Monetarisierung ✅
|
|
||||||
├── Pounce Direct aktivieren (Unique Content)
|
|
||||||
└── 10.000 User, $1M ARR
|
|
||||||
|
|
||||||
Phase 2: LIQUIDITÄT (18-36 Monate)
|
|
||||||
├── Pounce Instant Exchange
|
|
||||||
├── Buy Now im Dashboard
|
|
||||||
├── 5% Gebühr
|
|
||||||
└── $10M ARR
|
|
||||||
|
|
||||||
Phase 3: FINANZIALISIERUNG (3-5 Jahre)
|
|
||||||
├── Fractional Ownership
|
|
||||||
├── Domain-Backed Lending
|
|
||||||
└── = FINTECH ($50-100M ARR)
|
|
||||||
|
|
||||||
Phase 4: IMPERIUM (5+ Jahre)
|
|
||||||
├── Enterprise Sentinel (B2B)
|
|
||||||
├── Fortune 500 Kunden
|
|
||||||
└── = $1 Mrd. Bewertung
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📁 Neue Dateien
|
|
||||||
|
|
||||||
| Datei | Beschreibung |
|
|
||||||
|-------|--------------|
|
|
||||||
| `hidden_api_scrapers.py` | Namecheap/Dynadot/Sav.com JSON APIs |
|
|
||||||
| `AFFILIATE_CONFIG` | Affiliate-Links für alle Plattformen |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 💎 Das Fazit
|
|
||||||
|
|
||||||
**Wir haben jetzt 8+ Datenquellen und Affiliate-Monetarisierung!**
|
|
||||||
|
|
||||||
Der Weg zum Unicorn:
|
|
||||||
1. ✅ Aggregation (8+ Plattformen)
|
|
||||||
2. ✅ Monetarisierung (Affiliate-Links)
|
|
||||||
3. ⏳ Unique Content (Pounce Direct aktivieren!)
|
|
||||||
4. 🔮 Datenhoheit (Zone Files)
|
|
||||||
|
|
||||||
> *"Don't guess. Know."*
|
|
||||||
>
|
|
||||||
> — Phase 1: Intelligence
|
|
||||||
@ -1,212 +0,0 @@
|
|||||||
# Performance & Architektur Report (Pounce)
|
|
||||||
|
|
||||||
**Stand (Codebase):** `d08ca33fe3c88b3b2d716f0bdf22b71f989a5eb9`
|
|
||||||
**Datum:** 2025-12-12
|
|
||||||
**Scope:** `frontend/` (Next.js 14 App Router) + `backend/` (FastAPI + async SQLAlchemy + APScheduler) + DB + Docker/Deploy.
|
|
||||||
|
|
||||||
## Status (umgesetzt)
|
|
||||||
|
|
||||||
- ✅ **Phase 0**: Scheduler split, Market-Feed bounded paging, Health cache-first, PriceTracker N+1 fix (`2e8ff50`)
|
|
||||||
- ✅ **Phase 1**: DB migrations (indexes + optional columns), persisted `pounce_score`, Admin N+1 removal, Radar summary endpoint (`ee4266d`)
|
|
||||||
- ✅ **Phase 2**: Redis + ARQ worker scaffolding, Prometheus metrics (`/metrics`), load test scaffolding, Docker hardening (`5d23d34`)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Executive Summary (die 5 größten Hebel)
|
|
||||||
|
|
||||||
1. **Scheduler aus dem API-Prozess herauslösen**
|
|
||||||
Aktuell startet der Scheduler in `backend/app/main.py` im App-Lifespan. Bei mehreren Uvicorn/Gunicorn-Workern laufen Jobs **mehrfach parallel** → doppelte Scrapes/Checks, DB-Last, E-Mail-Spam, inkonsistente Zustände.
|
|
||||||
|
|
||||||
2. **Market Feed Endpoint (`/api/v1/auctions/feed`) DB-seitig paginieren/sortieren**
|
|
||||||
`backend/app/api/auctions.py` lädt derzeit **alle aktiven Auktionen + alle aktiven Listings** in Python, berechnet Score, sortiert, und paginiert erst am Ende. Das skaliert schlecht sobald ihr > ein paar hundert Auktionen habt.
|
|
||||||
|
|
||||||
3. **Price Tracker N+1 eliminieren**
|
|
||||||
`backend/app/services/price_tracker.py::detect_price_changes()` macht aktuell: *distinct(tld, registrar) → pro Paar query(limit 2)*. Das ist ein klassischer N+1 und wird bei 800+ TLDs schnell sehr langsam.
|
|
||||||
|
|
||||||
4. **Health-Cache wirklich nutzen**
|
|
||||||
Es gibt `DomainHealthCache`, und der Scheduler schreibt Status/Score. Aber `GET /domains/{id}/health` macht immer einen **Live-Check** (`domain_health.py` mit HTTP/DNS/SSL). Für UI/Performance besser: default **cached**, live nur “Refresh”.
|
|
||||||
|
|
||||||
5. **Valuation im Request-Path reduzieren (Auctions)**
|
|
||||||
`backend/app/api/auctions.py` berechnet pro Auction im Response optional valuation, und `valuation_service` fragt pro Domain auch DB-Daten ab (TLD cost). Das ist pro Request potenziell **sehr teuer**.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Messwerte (Frontend Build)
|
|
||||||
|
|
||||||
Aus `frontend/` → `npm run build` (Next.js 14.0.4):
|
|
||||||
|
|
||||||
- **First Load JS (shared):** ~81.9 kB
|
|
||||||
- **Größte Pages (First Load):**
|
|
||||||
- `/terminal/watchlist`: ~120 kB
|
|
||||||
- `/terminal/radar`: ~120 kB
|
|
||||||
- `/terminal/intel/[tld]`: ~115 kB
|
|
||||||
- `/terminal/market`: ~113 kB
|
|
||||||
- **Warnings:** Einige Routen “deopted into client-side rendering” (z.B. `/terminal/radar`, `/terminal/listing`, `/unsubscribe`, `/terminal/welcome`). Das ist nicht zwingend schlimm, aber ein Hinweis: dort wird kein echtes SSR/Static genutzt.
|
|
||||||
|
|
||||||
**Interpretation:** Das Frontend ist vom Bundle her bereits recht schlank. Die größten Performance-Risiken liegen aktuell eher im **Backend (Queries, Jobs, N+1, Caching)**.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Backend – konkrete Hotspots & Fixes
|
|
||||||
|
|
||||||
### 1) Scheduler: Architektur & Skalierung
|
|
||||||
|
|
||||||
**Ist-Zustand**
|
|
||||||
- `backend/app/main.py`: `start_scheduler()` im `lifespan()` → Scheduler läuft im selben Prozess wie die API.
|
|
||||||
- `backend/app/scheduler.py`: viele Jobs (Domain Checks, Health Checks, TLD Scrape, Auction Scrape, Cleanup, Sniper Matching).
|
|
||||||
|
|
||||||
**Probleme**
|
|
||||||
- Multi-worker Deployment (Gunicorn/Uvicorn) → Scheduler läuft pro Worker → Job-Duplikate.
|
|
||||||
- Jobs sind teils sequentiell (Domain Checks), teils N+1 (Health Cache, Digests, Sniper Matching).
|
|
||||||
|
|
||||||
**Empfehlung (Best Practice)**
|
|
||||||
- **Scheduler als separater Service/Container** laufen lassen (z.B. eigener Docker Service `scheduler`, oder systemd/cron job, oder Celery Worker + Beat).
|
|
||||||
- Wenn Scheduler im selben Code bleiben soll: **Leader-Lock** (Redis/DB advisory lock) einbauen, sodass nur ein Prozess Jobs ausführt.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 2) Market Feed (`backend/app/api/auctions.py::get_market_feed`)
|
|
||||||
|
|
||||||
**Ist-Zustand**
|
|
||||||
- Holt Listings und Auktionen ohne DB-Limit/Offset, baut `items` in Python, sortiert in Python, paginiert erst am Ende.
|
|
||||||
|
|
||||||
**Warum das weh tut**
|
|
||||||
- Bei z.B. 10’000 aktiven Auktionen ist jeder Request an `/feed` ein “Full table scan + Python sort + JSON build”.
|
|
||||||
|
|
||||||
**Fix-Strategie**
|
|
||||||
- **Score persistieren**: `pounce_score` in `DomainAuction` und `DomainListing` speichern/aktualisieren (beim Scrape bzw. beim Listing Create/Update).
|
|
||||||
Dann kann man DB-seitig `WHERE pounce_score >= :min_score` und `ORDER BY pounce_score DESC` machen.
|
|
||||||
- **DB-Pagination**: `LIMIT/OFFSET` in SQL, nicht in Python.
|
|
||||||
- **Filter DB-seitig**: `keyword`, `tld`, `price range`, `ending_within` in SQL.
|
|
||||||
- **Response caching**: Für public feed (oder häufige Filterkombos) Redis TTL 15–60s.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 3) Auction Search (`backend/app/api/auctions.py::search_auctions`)
|
|
||||||
|
|
||||||
**Ist-Zustand**
|
|
||||||
- Nach Query werden Auktionen in Python gefiltert (Vanity Filter) und dann pro Auction in einer Schleife `valuation_service.estimate_value(...)` aufgerufen.
|
|
||||||
|
|
||||||
**Probleme**
|
|
||||||
- Valuation kann DB-Queries pro Item auslösen (TLD cost avg), und läuft seriell.
|
|
||||||
|
|
||||||
**Fix-Strategie**
|
|
||||||
- Valuations **vorberechnen** (Background Job) und in einer Tabelle/Spalte cachen.
|
|
||||||
- Alternativ: Valuation nur **für Top-N** (z.B. 20) berechnen und für den Rest weglassen.
|
|
||||||
- TLD-Cost als **in-memory cache** (LRU/TTL) oder einmal pro Request prefetchen.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 4) Price Tracker (`backend/app/services/price_tracker.py`)
|
|
||||||
|
|
||||||
**Ist-Zustand**
|
|
||||||
- N+1 Queries: distinct(tld, registrar) → pro Paar 1 Query für die letzten 2 Preise.
|
|
||||||
|
|
||||||
**Fix-Strategie**
|
|
||||||
- SQL Window Function (Postgres & SQLite können das):
|
|
||||||
- `ROW_NUMBER() OVER (PARTITION BY tld, registrar ORDER BY recorded_at DESC)`
|
|
||||||
- dann self-join oder `LAG()` für vorherigen Preis.
|
|
||||||
- Zusätzlich DB-Index: `tld_prices(tld, registrar, recorded_at DESC)`
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 5) Domain Health (`backend/app/services/domain_health.py` + `backend/app/api/domains.py`)
|
|
||||||
|
|
||||||
**Ist-Zustand**
|
|
||||||
- Live Health Check macht pro Request echte DNS/HTTP/SSL Checks.
|
|
||||||
- Scheduler schreibt `DomainHealthCache`, aber Endpoint nutzt ihn nicht.
|
|
||||||
|
|
||||||
**Fix-Strategie**
|
|
||||||
- Neue Endpoints:
|
|
||||||
- `GET /domains/health-cache` → cached health für alle Domains eines Users (1 Request für UI)
|
|
||||||
- `POST /domains/{id}/health/refresh` → live refresh (asynchron, job queue)
|
|
||||||
- `DomainHealthCache` auch mit `dns_data/http_data/ssl_data` befüllen (ist im Model vorgesehen).
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Datenbank – Indexing & Query Patterns
|
|
||||||
|
|
||||||
### Empfohlene Indizes (High Impact)
|
|
||||||
|
|
||||||
- **Domain Checks**
|
|
||||||
- `domain_checks(domain_id, checked_at DESC)` für `/domains/{id}/history`
|
|
||||||
- **TLD Prices**
|
|
||||||
- `tld_prices(tld, registrar, recorded_at DESC)` für “latest two prices” und history queries
|
|
||||||
- **Health Cache**
|
|
||||||
- `domain_health_cache(domain_id)` (unique ist vorhanden), optional `checked_at`
|
|
||||||
|
|
||||||
### Query-Patterns (Quick Wins)
|
|
||||||
|
|
||||||
- In `backend/app/api/domains.py::add_domain()` wird aktuell `len(current_user.domains)` genutzt → lädt potenziell viele Rows.
|
|
||||||
Besser: `SELECT COUNT(*) FROM domains WHERE user_id = ...`.
|
|
||||||
|
|
||||||
- Admin “Users list”: vermeidet N+1 (Subscription + Domain Count pro User) → `JOIN` + `GROUP BY`.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Frontend – Verbesserungen (gezielt, nicht “blind refactor”)
|
|
||||||
|
|
||||||
### 1) Reduziere API-Calls pro Screen (Dashboard/Watchlist)
|
|
||||||
|
|
||||||
Aktuell holen manche Screens mehrere Endpoints und rechnen Stats client-side:
|
|
||||||
- `/terminal/radar`: holt u.a. Auctions und `GET /listings/my` nur um Stats zu zählen.
|
|
||||||
|
|
||||||
**Empfehlung**
|
|
||||||
- Ein Endpoint: `GET /dashboard/summary` (counts + small previews) → 1 Request statt 3–5.
|
|
||||||
|
|
||||||
### 2) Tabellen/Listen skalieren
|
|
||||||
|
|
||||||
- Für sehr große Listen (Market Feed / TLDs / Admin Users) mittelfristig:
|
|
||||||
- Pagination + “infinite scroll”
|
|
||||||
- ggf. Virtualisierung (`react-window`) falls 1000+ Rows.
|
|
||||||
|
|
||||||
### 3) Kleine Code-Health Fixes (auch Performance)
|
|
||||||
|
|
||||||
- Achtung bei `.sort()` auf State-Arrays: `.sort()` mutiert. Immer vorher kopieren (`[...arr].sort(...)`), sonst entstehen subtile Bugs und unnötige Re-Renders.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Deployment/Infra – “Production grade” Performance
|
|
||||||
|
|
||||||
### Backend
|
|
||||||
- **Gunicorn + Uvicorn Workers** (oder Uvicorn `--workers`) ist gut für CPU/IO – aber **nur wenn Scheduler separat läuft**.
|
|
||||||
- **DB Pooling**: `create_async_engine(..., pool_size=..., max_overflow=...)` für Postgres (nicht bei SQLite).
|
|
||||||
- **slowapi**: in Production Redis storage verwenden (sonst pro Worker eigener limiter state).
|
|
||||||
|
|
||||||
### Frontend
|
|
||||||
- Dockerfile erwartet `.next/standalone`. Dafür in `frontend/next.config.js` `output: 'standalone'` aktivieren (oder Dockerfile anpassen).
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Priorisierte Roadmap
|
|
||||||
|
|
||||||
### Phase 0 (0–1 Tag, Quick Wins)
|
|
||||||
- Scheduler entkoppeln ODER Leader-Lock einbauen
|
|
||||||
- `/auctions/feed`: DB-limit + offset + order_by (keine full scans)
|
|
||||||
- `PriceTracker.detect_price_changes`: Window-Query statt N+1
|
|
||||||
- Cached Health Endpoint für Watchlist
|
|
||||||
|
|
||||||
### Phase 1 (1–2 Wochen)
|
|
||||||
- Precompute `pounce_score` + valuations (Background Jobs), persistieren & cachen
|
|
||||||
- Admin N+1 entfernen (Users list)
|
|
||||||
- DB Indizes ergänzen (DomainCheck, TLDPrice)
|
|
||||||
- “Dashboard summary” Endpoint + Frontend umstellen
|
|
||||||
|
|
||||||
### Phase 2 (2–6 Wochen)
|
|
||||||
- Background-Job System (Celery/RQ/Dramatiq) + Redis
|
|
||||||
- Observability: Request timing, DB query timing, Prometheus metrics, tracing
|
|
||||||
- Load testing + Performance budgets (API p95, page LCP/TTFB)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Mess-/Monitoring Plan (damit wir nicht im Dunkeln optimieren)
|
|
||||||
|
|
||||||
- **Backend**
|
|
||||||
- Log: Request duration + endpoint + status
|
|
||||||
- DB: slow query logging / EXPLAIN ANALYZE (prod-like)
|
|
||||||
- Metrics: p50/p95 latency pro endpoint, queue depth, job runtime
|
|
||||||
- **Frontend**
|
|
||||||
- Core Web Vitals Tracking (ist bereits angelegt in `frontend/src/lib/analytics.ts`)
|
|
||||||
- “API Timing” (TTFB + payload size) für Market/Watchlist
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,361 +0,0 @@
|
|||||||
# Public Pages Analyse-Report
|
|
||||||
## Zielgruppen-Klarheit & Mehrwert-Kommunikation
|
|
||||||
|
|
||||||
**Analysedatum:** 12. Dezember 2025
|
|
||||||
**Zielgruppe:** Domain-Investoren, professionelle Trader, Founder auf Domain-Suche
|
|
||||||
**Kernbotschaft laut Strategie:** "Don't guess. Know." (Intelligence & Trust)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Executive Summary
|
|
||||||
|
|
||||||
| Seite | Klarheit | Mehrwert | CTAs | Trust | Gesamt |
|
|
||||||
|-------|----------|----------|------|-------|--------|
|
|
||||||
| **Landing Page** | ⭐⭐⭐⭐⭐ | ⭐⭐⭐⭐⭐ | ⭐⭐⭐⭐ | ⭐⭐⭐⭐⭐ | **Exzellent** |
|
|
||||||
| **Market Page** | ⭐⭐⭐⭐⭐ | ⭐⭐⭐⭐ | ⭐⭐⭐⭐ | ⭐⭐⭐⭐ | **Sehr gut** |
|
|
||||||
| **Intel Page** | ⭐⭐⭐⭐ | ⭐⭐⭐⭐⭐ | ⭐⭐⭐⭐ | ⭐⭐⭐⭐⭐ | **Sehr gut** |
|
|
||||||
| **Pricing Page** | ⭐⭐⭐⭐⭐ | ⭐⭐⭐⭐ | ⭐⭐⭐⭐⭐ | ⭐⭐⭐⭐ | **Sehr gut** |
|
|
||||||
|
|
||||||
**Gesamtbewertung:** Die Public Pages sind **strategisch exzellent aufgebaut** und kommunizieren den Mehrwert klar für die Zielgruppe Domain-Investoren.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 1. Landing Page (Home)
|
|
||||||
|
|
||||||
### ✅ Stärken
|
|
||||||
|
|
||||||
#### Value Proposition sofort klar
|
|
||||||
```
|
|
||||||
Headline: "The market never sleeps. You should."
|
|
||||||
Subline: "Domain Intelligence for Investors. Scan, track, and trade digital assets."
|
|
||||||
Tagline: "Don't guess. Know."
|
|
||||||
```
|
|
||||||
**Analyse:** Die Headline spricht die "Pain" der Zielgruppe direkt an (ständig monitoren müssen). Die Subline definiert klar WAS Pounce macht (Intelligence) und für WEN (Investors).
|
|
||||||
|
|
||||||
#### Trust-Signale
|
|
||||||
- **886+ TLDs** — Zeigt Datentiefe
|
|
||||||
- **Live Auctions** — Zeigt Aktualität
|
|
||||||
- **Instant Alerts** — Zeigt Reaktionsgeschwindigkeit
|
|
||||||
- **Price Intel** — Zeigt analytischen Mehrwert
|
|
||||||
|
|
||||||
#### Three Pillars (Discover → Track → Trade)
|
|
||||||
| Pillar | Value Proposition |
|
|
||||||
|--------|-------------------|
|
|
||||||
| **Discover** | "Not just 'taken' — but WHY, WHEN it expires, and SMARTER alternatives" |
|
|
||||||
| **Track** | "4-layer health analysis. Know the second it weakens." |
|
|
||||||
| **Trade** | "Buy & sell directly. 0% Commission. Verified owners." |
|
|
||||||
|
|
||||||
**Analyse:** Jeder Pillar adressiert eine konkrete Nutzen-Stufe im Domain-Investing-Workflow.
|
|
||||||
|
|
||||||
#### Live Market Teaser (Gatekeeper)
|
|
||||||
- Zeigt 4 echte Domains mit Preisen
|
|
||||||
- 5. Zeile ist geblurrt
|
|
||||||
- CTA: "Sign in to see X+ more domains"
|
|
||||||
|
|
||||||
**Analyse:** Perfekte Umsetzung des "Teaser & Gatekeeper"-Prinzips.
|
|
||||||
|
|
||||||
### ⚠️ Verbesserungspotential
|
|
||||||
|
|
||||||
| Problem | Aktuelle Umsetzung | Empfehlung |
|
|
||||||
|---------|-------------------|------------|
|
|
||||||
| **DomainChecker Placeholder** | Statischer Text | Animierter Typing-Effect fehlt noch ("Search crypto.ai...", "Search hotel.zurich...") |
|
|
||||||
| **Beyond Hunting Section** | "Own. Protect. Monetize." | Guter Text, aber Link zu `/buy` könnte verwirrend sein - besser `/market` oder `/terminal` |
|
|
||||||
| **Sniper Alerts Link** | `/terminal/watchlist` | Für nicht-eingeloggte User nutzlos - sollte zu `/register` führen |
|
|
||||||
|
|
||||||
### 📊 Kennzahlen
|
|
||||||
|
|
||||||
- **Sections:** 8 (Hero, Ticker, Market Teaser, Pillars, Beyond, TLDs, Stats, CTA)
|
|
||||||
- **CTAs zum Registrieren:** 4
|
|
||||||
- **Trust-Indikatoren:** 7
|
|
||||||
- **Lock/Blur-Elemente:** 2 (Market Teaser, TLD Preise)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 2. Market Page
|
|
||||||
|
|
||||||
### ✅ Stärken
|
|
||||||
|
|
||||||
#### Klare Positionierung
|
|
||||||
```
|
|
||||||
H1: "Live Domain Market"
|
|
||||||
Sub: "Aggregated from GoDaddy, Sedo, and Pounce Direct."
|
|
||||||
```
|
|
||||||
**Analyse:** Sofort klar: Aggregation mehrerer Quellen an einem Ort = Zeitersparnis.
|
|
||||||
|
|
||||||
#### Vanity-Filter für nicht-eingeloggte User
|
|
||||||
```javascript
|
|
||||||
// Rules: No numbers (except short domains), no hyphens, length < 12, only premium TLDs
|
|
||||||
const PREMIUM_TLDS = ['com', 'io', 'ai', 'co', 'de', 'ch', 'net', 'org', 'app', 'dev', 'xyz']
|
|
||||||
```
|
|
||||||
**Analyse:** Zeigt nur "Premium-Looking" Domains → Professioneller erster Eindruck.
|
|
||||||
|
|
||||||
#### Pounce Score & Valuation geblurrt
|
|
||||||
- Sichtbar aber geblurrt mit Lock-Icon
|
|
||||||
- Hover-Text verfügbar
|
|
||||||
- Motiviert zur Registrierung
|
|
||||||
|
|
||||||
#### Bottom CTA
|
|
||||||
```
|
|
||||||
"Tired of digging through spam? Our 'Trader' plan filters 99% of junk domains automatically."
|
|
||||||
[Upgrade Filter]
|
|
||||||
```
|
|
||||||
**Analyse:** Adressiert direkten Pain Point (Spam in Auktionen) und bietet Lösung.
|
|
||||||
|
|
||||||
### ⚠️ Verbesserungspotential
|
|
||||||
|
|
||||||
| Problem | Aktuelle Umsetzung | Empfehlung |
|
|
||||||
|---------|-------------------|------------|
|
|
||||||
| **Pounce Direct Section** | Zeigt interne Listings | Gut, aber "0% Commission" sollte prominenter sein |
|
|
||||||
| **Mobile Darstellung** | Einige Spalten hidden | Ok, aber Deal Score sollte auch mobil geblurrt sichtbar sein |
|
|
||||||
|
|
||||||
### 📊 Gatekeeper-Elemente
|
|
||||||
|
|
||||||
- ✅ Vanity Filter (nur schöne Domains für Ausgeloggte)
|
|
||||||
- ✅ Pounce Score geblurrt
|
|
||||||
- ✅ Valuation geblurrt
|
|
||||||
- ✅ Bottom CTA für Upgrade
|
|
||||||
- ✅ Login Banner
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 3. Intel Page (TLD Inflation Monitor)
|
|
||||||
|
|
||||||
### ✅ Stärken
|
|
||||||
|
|
||||||
#### Unique Value Proposition
|
|
||||||
```
|
|
||||||
H1: "TLD Market Inflation Monitor"
|
|
||||||
Sub: "Don't fall for promo prices. See renewal costs, spot traps, and track price trends..."
|
|
||||||
```
|
|
||||||
**Analyse:** Adressiert einen echten, wenig bekannten Pain Point: Registrar locken mit günstigen Erstjahr-Preisen, aber Renewals sind teuer ("Renewal Traps").
|
|
||||||
|
|
||||||
#### Top Movers Cards
|
|
||||||
- Zeigt TLDs mit größten Preisänderungen
|
|
||||||
- Visuell ansprechend mit Trend-Badges
|
|
||||||
- Sofort sichtbarer Mehrwert
|
|
||||||
|
|
||||||
#### Intelligentes Gating
|
|
||||||
```
|
|
||||||
.com, .net, .org → Vollständig sichtbar (als Beweis)
|
|
||||||
Alle anderen → Buy Price + Trend sichtbar, Renewal + Risk geblurrt
|
|
||||||
```
|
|
||||||
**Analyse:** Perfekte Umsetzung: Zeigt DASS die Daten existieren (bei .com), versteckt die "Intelligence" (Renewal/Risk) für andere.
|
|
||||||
|
|
||||||
#### Trust-Indikatoren
|
|
||||||
- "Renewal Trap Detection" Badge
|
|
||||||
- "Risk Levels" Badge mit Farben
|
|
||||||
- "1y/3y Trends" Badge
|
|
||||||
|
|
||||||
### ⚠️ Verbesserungspotential
|
|
||||||
|
|
||||||
| Problem | Aktuelle Umsetzung | Empfehlung |
|
|
||||||
|---------|-------------------|------------|
|
|
||||||
| **SEO-Titel** | "TLD Market Inflation Monitor" | Exzellent für SEO - bleibt so |
|
|
||||||
| **Top Movers Links** | Führen zu `/register` für Ausgeloggte | Ok, aber könnte auch zur Intel-Detailseite mit Gating führen |
|
|
||||||
|
|
||||||
### 📊 Gatekeeper-Elemente
|
|
||||||
|
|
||||||
- ✅ Renewal Price geblurrt (außer .com/.net/.org)
|
|
||||||
- ✅ Risk Level geblurrt (außer .com/.net/.org)
|
|
||||||
- ✅ Login Banner prominent
|
|
||||||
- ✅ "Stop overpaying" Messaging
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 4. Pricing Page
|
|
||||||
|
|
||||||
### ✅ Stärken
|
|
||||||
|
|
||||||
#### Klare Tier-Struktur
|
|
||||||
```
|
|
||||||
Scout (Free) → Trader ($9) → Tycoon ($29)
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Feature-Differenzierung mit Emojis
|
|
||||||
| Feature | Scout | Trader | Tycoon |
|
|
||||||
|---------|-------|--------|--------|
|
|
||||||
| Market Feed | 🌪️ Raw | ✨ Curated | ⚡ Priority |
|
|
||||||
| Alert Speed | 🐢 Daily | 🐇 Hourly | ⚡ 10 min |
|
|
||||||
| Watchlist | 5 Domains | 50 Domains | 500 Domains |
|
|
||||||
|
|
||||||
**Analyse:** Emojis machen die Differenzierung sofort visuell verständlich.
|
|
||||||
|
|
||||||
#### FAQ Section
|
|
||||||
Adressiert echte Fragen:
|
|
||||||
- "How fast will I know when a domain drops?"
|
|
||||||
- "What's domain valuation?"
|
|
||||||
- "Can I track domains I already own?"
|
|
||||||
|
|
||||||
#### Best Value Highlight
|
|
||||||
- Trader-Plan hat "Best Value" Badge
|
|
||||||
- Visuell hervorgehoben (Rahmen/Farbe)
|
|
||||||
|
|
||||||
### ⚠️ Verbesserungspotential
|
|
||||||
|
|
||||||
| Problem | Aktuelle Umsetzung | Empfehlung |
|
|
||||||
|---------|-------------------|------------|
|
|
||||||
| **Sniper Alerts** | Scout: "—", Trader: "5", Tycoon: "Unlimited" | Könnte klarer erklärt werden was das ist |
|
|
||||||
| **Portfolio Feature** | Scout: "—", Trader: "25 Domains" | Sollte erklären: "Track YOUR owned domains" |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 5. Header & Navigation
|
|
||||||
|
|
||||||
### ✅ Stärken
|
|
||||||
|
|
||||||
```
|
|
||||||
Market | Intel | Pricing | [Sign In] | [Start Hunting]
|
|
||||||
```
|
|
||||||
|
|
||||||
- **Dark Mode durchgängig** — Professioneller Look
|
|
||||||
- **"Start Hunting" statt "Get Started"** — Spricht die Zielgruppe direkt an
|
|
||||||
- **Neon-grüner CTA** — Hohe Visibility
|
|
||||||
- **Minimalistisch** — Keine Überladung
|
|
||||||
|
|
||||||
### ⚠️ Verbesserungspotential
|
|
||||||
|
|
||||||
| Problem | Aktuelle Umsetzung | Empfehlung |
|
|
||||||
|---------|-------------------|------------|
|
|
||||||
| **Mobile Menu** | Funktional | Ok, aber CTA sollte noch prominenter sein |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 6. Footer
|
|
||||||
|
|
||||||
### ✅ Stärken
|
|
||||||
|
|
||||||
- **"Don't guess. Know."** — Tagline präsent
|
|
||||||
- **Social Links** — Twitter, LinkedIn, Email
|
|
||||||
- **Korrekte Links** — Market, Intel, Pricing
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Zielgruppen-Analyse
|
|
||||||
|
|
||||||
### Primäre Zielgruppe: Domain-Investoren
|
|
||||||
|
|
||||||
| Bedürfnis | Wird adressiert? | Wo? |
|
|
||||||
|-----------|------------------|-----|
|
|
||||||
| Auktionen monitoren | ✅ | Market Page, Ticker |
|
|
||||||
| Expiring Domains finden | ✅ | Track Pillar, Alerts |
|
|
||||||
| TLD-Preise vergleichen | ✅ | Intel Page |
|
|
||||||
| Portfolio verwalten | ✅ | Beyond Hunting Section |
|
|
||||||
| Domains verkaufen | ✅ | Trade Pillar, Marketplace |
|
|
||||||
|
|
||||||
### Sekundäre Zielgruppe: Founder auf Domain-Suche
|
|
||||||
|
|
||||||
| Bedürfnis | Wird adressiert? | Wo? |
|
|
||||||
|-----------|------------------|-----|
|
|
||||||
| Domain-Verfügbarkeit prüfen | ✅ | DomainChecker (Hero) |
|
|
||||||
| Alternativen finden | ✅ | "AI-powered alternatives" |
|
|
||||||
| Faire Preise kennen | ✅ | Intel Page |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Conversion-Funnel Analyse
|
|
||||||
|
|
||||||
```
|
|
||||||
┌─────────────────────────────────────────────────────────┐
|
|
||||||
│ LANDING PAGE │
|
|
||||||
│ "The market never sleeps. You should." │
|
|
||||||
│ │
|
|
||||||
│ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ │
|
|
||||||
│ │ DISCOVER │ │ TRACK │ │ TRADE │ │
|
|
||||||
│ └─────────────┘ └─────────────┘ └─────────────┘ │
|
|
||||||
│ │
|
|
||||||
│ ┌──────────────────────────────────────────────┐ │
|
|
||||||
│ │ LIVE MARKET TEASER (Blurred) │ │
|
|
||||||
│ │ "Sign in to see X+ more domains" │ │
|
|
||||||
│ └──────────────────────────────────────────────┘ │
|
|
||||||
│ ↓ │
|
|
||||||
│ [START HUNTING] │
|
|
||||||
└─────────────────────────────────────────────────────────┘
|
|
||||||
↓
|
|
||||||
┌─────────────────────────────────────────────────────────┐
|
|
||||||
│ MARKET PAGE │
|
|
||||||
│ "Aggregated from GoDaddy, Sedo, and Pounce Direct" │
|
|
||||||
│ │
|
|
||||||
│ ┌──────────────────────────────────────────────┐ │
|
|
||||||
│ │ Domain | Price | Score (🔒) | Valuation (🔒) │ │
|
|
||||||
│ └──────────────────────────────────────────────┘ │
|
|
||||||
│ │
|
|
||||||
│ "Tired of digging through spam?" → [UPGRADE FILTER] │
|
|
||||||
└─────────────────────────────────────────────────────────┘
|
|
||||||
↓
|
|
||||||
┌─────────────────────────────────────────────────────────┐
|
|
||||||
│ INTEL PAGE │
|
|
||||||
│ "TLD Market Inflation Monitor" │
|
|
||||||
│ │
|
|
||||||
│ .com, .net, .org → FULL DATA │
|
|
||||||
│ Others → Renewal (🔒), Risk (🔒) │
|
|
||||||
│ │
|
|
||||||
│ "Stop overpaying. Know the true costs." │
|
|
||||||
│ ↓ │
|
|
||||||
│ [START HUNTING] │
|
|
||||||
└─────────────────────────────────────────────────────────┘
|
|
||||||
↓
|
|
||||||
┌─────────────────────────────────────────────────────────┐
|
|
||||||
│ PRICING PAGE │
|
|
||||||
│ │
|
|
||||||
│ Scout (Free) → Trader ($9) → Tycoon ($29) │
|
|
||||||
│ │
|
|
||||||
│ "Start with Scout. It's free forever." │
|
|
||||||
└─────────────────────────────────────────────────────────┘
|
|
||||||
↓
|
|
||||||
┌─────────────────────────────────────────────────────────┐
|
|
||||||
│ REGISTER PAGE │
|
|
||||||
│ │
|
|
||||||
│ "Track up to 5 domains. Free." │
|
|
||||||
│ "Daily status scans. Never miss a drop." │
|
|
||||||
└─────────────────────────────────────────────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Empfehlungen für Optimierung
|
|
||||||
|
|
||||||
### Hohe Priorität
|
|
||||||
|
|
||||||
1. **DomainChecker Animation**
|
|
||||||
- Implementiere den Typing-Effect für Placeholder
|
|
||||||
- Beispiele: "Search crypto.ai...", "Search hotel.zurich..."
|
|
||||||
- Macht den Hero interaktiver und zeigt Anwendungsfälle
|
|
||||||
|
|
||||||
2. **Links für Ausgeloggte korrigieren**
|
|
||||||
- `/terminal/watchlist` → `/register?redirect=/terminal/watchlist`
|
|
||||||
- `/buy` → Klarstellen, dass dies der Marketplace ist
|
|
||||||
|
|
||||||
### Mittlere Priorität
|
|
||||||
|
|
||||||
3. **Testimonials/Social Proof hinzufügen**
|
|
||||||
- Aktuell: Nur Zahlen (886+ TLDs, 24/7)
|
|
||||||
- Fehlt: User-Testimonials, bekannte Nutzer, Logos
|
|
||||||
|
|
||||||
4. **Video/Demo**
|
|
||||||
- Ein kurzes Video (30s) auf der Landing Page
|
|
||||||
- Zeigt das Dashboard in Aktion
|
|
||||||
|
|
||||||
### Niedrige Priorität
|
|
||||||
|
|
||||||
5. **Blog/Briefings SEO**
|
|
||||||
- Mehr Content für organischen Traffic
|
|
||||||
- Themen: "Top 10 TLDs 2025", "Domain Investing Guide"
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Fazit
|
|
||||||
|
|
||||||
Die Public Pages sind **strategisch exzellent umgesetzt** und folgen dem "Teaser & Gatekeeper"-Prinzip konsequent:
|
|
||||||
|
|
||||||
1. **✅ Mehrwert ist sofort klar** — "Domain Intelligence for Investors"
|
|
||||||
2. **✅ Zielgruppe wird direkt angesprochen** — "Hunters", "Investors", "Trade"
|
|
||||||
3. **✅ Daten werden gezeigt, Intelligenz versteckt** — Blurred Scores, Locked Features
|
|
||||||
4. **✅ Trust-Signale sind präsent** — 886+ TLDs, Live Data, Dark Mode Pro-Look
|
|
||||||
5. **✅ CTAs sind konsistent** — "Start Hunting" überall
|
|
||||||
|
|
||||||
**Die Pages sind bereit für Launch.**
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
*Report generiert am 12. Dezember 2025*
|
|
||||||
|
|
||||||
@ -1,403 +0,0 @@
|
|||||||
# SEO & Performance Optimization Guide
|
|
||||||
|
|
||||||
## ✅ Implemented Features
|
|
||||||
|
|
||||||
### 1. **SEO Meta Tags & Structured Data**
|
|
||||||
|
|
||||||
#### Global Configuration
|
|
||||||
- **Root Layout** (`frontend/src/app/layout.tsx`):
|
|
||||||
- Complete OpenGraph tags
|
|
||||||
- Twitter Card tags
|
|
||||||
- Favicon & App Icons
|
|
||||||
- Organization & WebSite schema (JSON-LD)
|
|
||||||
- Search box schema for Google
|
|
||||||
|
|
||||||
#### Page-Specific Metadata
|
|
||||||
- **Homepage** (`frontend/src/app/metadata.ts`):
|
|
||||||
- SoftwareApplication schema
|
|
||||||
- AggregateRating schema
|
|
||||||
- Feature list
|
|
||||||
|
|
||||||
- **TLD Pages** (`frontend/src/app/intel/[tld]/metadata.ts`):
|
|
||||||
- Dynamic metadata generation
|
|
||||||
- Article schema
|
|
||||||
- Product schema (domain TLD)
|
|
||||||
- Breadcrumb schema
|
|
||||||
- Registrar comparison offers
|
|
||||||
|
|
||||||
- **Pricing Page** (`frontend/src/app/pricing/metadata.ts`):
|
|
||||||
- ProductGroup schema
|
|
||||||
- Multiple offer types (Scout, Trader, Tycoon)
|
|
||||||
- FAQ schema
|
|
||||||
- AggregateRating for each plan
|
|
||||||
|
|
||||||
- **Market Page** (`frontend/src/app/market/metadata.ts`):
|
|
||||||
- CollectionPage schema
|
|
||||||
- ItemList schema
|
|
||||||
- Individual auction schemas
|
|
||||||
|
|
||||||
- **Domain Listings** (`frontend/src/lib/domain-seo.ts`):
|
|
||||||
- Product schema with Offer
|
|
||||||
- Price specification
|
|
||||||
- Aggregate rating
|
|
||||||
- Breadcrumb
|
|
||||||
- FAQ schema for buying process
|
|
||||||
- Domain quality scoring
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 2. **Programmatic SEO**
|
|
||||||
|
|
||||||
#### Sitemap Generation (`frontend/src/app/sitemap.ts`)
|
|
||||||
- **Automatic sitemap** for:
|
|
||||||
- Main pages (Home, Market, Intel, Pricing)
|
|
||||||
- **120+ TLD landing pages** (programmatic SEO)
|
|
||||||
- Dynamic priorities & change frequencies
|
|
||||||
- Proper lastModified timestamps
|
|
||||||
|
|
||||||
#### robots.txt (`frontend/public/robots.txt`)
|
|
||||||
- Allow public pages
|
|
||||||
- Disallow private areas (/terminal/, /api/, /login, etc.)
|
|
||||||
- Crawl-delay directive
|
|
||||||
- Sitemap location
|
|
||||||
|
|
||||||
#### TLD Landing Pages
|
|
||||||
- **120+ indexed TLD pages** for SEO traffic
|
|
||||||
- Rich snippets for each TLD
|
|
||||||
- Registrar comparison data
|
|
||||||
- Price trends & market analysis
|
|
||||||
- Schema markup for search engines
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 3. **Performance Optimizations**
|
|
||||||
|
|
||||||
#### Next.js Configuration (`frontend/next.config.js`)
|
|
||||||
- **Image Optimization**:
|
|
||||||
- AVIF & WebP formats
|
|
||||||
- Responsive device sizes
|
|
||||||
- 1-year cache TTL
|
|
||||||
- SVG safety
|
|
||||||
|
|
||||||
- **Compression**: Gzip enabled
|
|
||||||
- **Security Headers**:
|
|
||||||
- HSTS (Strict-Transport-Security)
|
|
||||||
- X-Frame-Options
|
|
||||||
- X-Content-Type-Options
|
|
||||||
- X-XSS-Protection
|
|
||||||
- CSP for images
|
|
||||||
- Referrer-Policy
|
|
||||||
- Permissions-Policy
|
|
||||||
|
|
||||||
- **Cache Headers**:
|
|
||||||
- Static assets: 1 year immutable cache
|
|
||||||
|
|
||||||
- **Remove X-Powered-By**: Security improvement
|
|
||||||
|
|
||||||
#### Web Performance Monitoring (`frontend/src/lib/analytics.ts`)
|
|
||||||
- **Core Web Vitals**:
|
|
||||||
- FCP (First Contentful Paint)
|
|
||||||
- LCP (Largest Contentful Paint)
|
|
||||||
- FID (First Input Delay)
|
|
||||||
- CLS (Cumulative Layout Shift)
|
|
||||||
- TTFB (Time to First Byte)
|
|
||||||
|
|
||||||
- **Analytics Integration**:
|
|
||||||
- Google Analytics (gtag)
|
|
||||||
- Plausible Analytics (privacy-friendly)
|
|
||||||
- Custom endpoint support
|
|
||||||
|
|
||||||
- **Event Tracking**:
|
|
||||||
- Page views
|
|
||||||
- Search queries
|
|
||||||
- Domain views
|
|
||||||
- Inquiries
|
|
||||||
- Signups
|
|
||||||
- Subscriptions
|
|
||||||
- Errors
|
|
||||||
- A/B tests
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 4. **Dynamic OG Images**
|
|
||||||
|
|
||||||
#### TLD OG Images (`frontend/src/app/api/og/tld/route.tsx`)
|
|
||||||
- **Edge Runtime** for fast generation
|
|
||||||
- Dynamic content:
|
|
||||||
- TLD name
|
|
||||||
- Current price
|
|
||||||
- Trend indicator (up/down)
|
|
||||||
- Brand colors & logo
|
|
||||||
|
|
||||||
#### Domain OG Images (`frontend/src/app/api/og/domain/route.tsx`)
|
|
||||||
- Dynamic listing images:
|
|
||||||
- Domain name (SLD + TLD split)
|
|
||||||
- Price
|
|
||||||
- Featured badge
|
|
||||||
- "For Sale" indicator
|
|
||||||
- Trust signals (Instant Transfer, 0% Commission, Secure Escrow)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 5. **Geo-Targeting & Internationalization**
|
|
||||||
|
|
||||||
#### Multi-Language Support (`frontend/src/lib/seo.ts`)
|
|
||||||
- **13 Supported Locales**:
|
|
||||||
- en-US, en-GB, en-CA, en-AU
|
|
||||||
- de-DE, de-CH
|
|
||||||
- fr-FR, es-ES, it-IT, nl-NL
|
|
||||||
- pt-BR, ja-JP, zh-CN
|
|
||||||
|
|
||||||
- **Hreflang Generation**: Automatic alternate language tags
|
|
||||||
- **Locale Detection**: From Accept-Language header
|
|
||||||
- **Price Formatting**: Currency per locale
|
|
||||||
- **x-default**: Fallback for unsupported regions
|
|
||||||
|
|
||||||
#### SEO Utilities
|
|
||||||
- Canonical URL generation
|
|
||||||
- Slug generation
|
|
||||||
- Breadcrumb schema builder
|
|
||||||
- UTM parameter tracking
|
|
||||||
- External URL detection
|
|
||||||
- Lazy loading setup
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 6. **PWA Support**
|
|
||||||
|
|
||||||
#### Web Manifest (`frontend/public/site.webmanifest`)
|
|
||||||
- **Installable** as Progressive Web App
|
|
||||||
- App shortcuts:
|
|
||||||
- Market
|
|
||||||
- Intel
|
|
||||||
- Terminal
|
|
||||||
- Themed icons (192x192, 512x512)
|
|
||||||
- Standalone display mode
|
|
||||||
- Categories: Finance, Business, Productivity
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🎯 SEO Strategy Implementation
|
|
||||||
|
|
||||||
### Content Strategy
|
|
||||||
1. **Programmatic SEO for TLDs**:
|
|
||||||
- 120+ indexed pages targeting `.com domain price`, `.io domain registration`, etc.
|
|
||||||
- Each page: 1,200+ words of unique content
|
|
||||||
- Rich snippets with pricing & registrar data
|
|
||||||
|
|
||||||
2. **Domain Marketplace SEO**:
|
|
||||||
- Each listing: Product schema
|
|
||||||
- Optimized titles & descriptions
|
|
||||||
- Quality scoring algorithm
|
|
||||||
- FAQ schema for common questions
|
|
||||||
|
|
||||||
3. **Blog/Content Marketing** (Future):
|
|
||||||
- Domain investing guides
|
|
||||||
- TLD market reports
|
|
||||||
- Success stories
|
|
||||||
- Industry news
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🚀 Performance Targets
|
|
||||||
|
|
||||||
### Core Web Vitals (Google PageSpeed)
|
|
||||||
- **LCP**: < 2.5s ✅
|
|
||||||
- **FID**: < 100ms ✅
|
|
||||||
- **CLS**: < 0.1 ✅
|
|
||||||
|
|
||||||
### Lighthouse Scores (Target)
|
|
||||||
- **Performance**: 95+ ✅
|
|
||||||
- **Accessibility**: 100 ✅
|
|
||||||
- **Best Practices**: 100 ✅
|
|
||||||
- **SEO**: 100 ✅
|
|
||||||
|
|
||||||
### Optimizations Applied
|
|
||||||
- Image lazy loading
|
|
||||||
- Code splitting
|
|
||||||
- Tree shaking
|
|
||||||
- Compression (gzip/brotli)
|
|
||||||
- Browser caching
|
|
||||||
- CDN delivery (static assets)
|
|
||||||
- Edge functions (OG images)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📊 Analytics & Tracking
|
|
||||||
|
|
||||||
### Implemented Events
|
|
||||||
- `pageview`: Every page navigation
|
|
||||||
- `search`: Domain/TLD searches
|
|
||||||
- `domain_view`: Listing views
|
|
||||||
- `listing_inquiry`: Contact seller
|
|
||||||
- `signup`: New user registration
|
|
||||||
- `subscription`: Tier upgrades
|
|
||||||
- `error`: Client-side errors
|
|
||||||
- `ab_test`: A/B test variants
|
|
||||||
|
|
||||||
### Privacy
|
|
||||||
- **GDPR Compliant**: Consent management
|
|
||||||
- **Cookie-less option**: Plausible Analytics
|
|
||||||
- **Anonymous tracking**: No PII stored
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🔧 Setup Instructions
|
|
||||||
|
|
||||||
### Environment Variables
|
|
||||||
```bash
|
|
||||||
# SEO & Analytics
|
|
||||||
NEXT_PUBLIC_SITE_URL=https://pounce.com
|
|
||||||
NEXT_PUBLIC_GA_ID=G-XXXXXXXXXX
|
|
||||||
NEXT_PUBLIC_ANALYTICS_ENDPOINT=https://api.pounce.com/analytics
|
|
||||||
|
|
||||||
# Optional: Plausible
|
|
||||||
NEXT_PUBLIC_PLAUSIBLE_DOMAIN=pounce.com
|
|
||||||
```
|
|
||||||
|
|
||||||
### Google Search Console
|
|
||||||
1. Verify domain ownership
|
|
||||||
2. Submit sitemap: `https://pounce.com/sitemap.xml`
|
|
||||||
3. Request indexing for priority pages
|
|
||||||
4. Monitor Core Web Vitals
|
|
||||||
|
|
||||||
### Google Analytics
|
|
||||||
1. Create GA4 property
|
|
||||||
2. Add tracking ID to `.env.local`
|
|
||||||
3. Configure custom events
|
|
||||||
4. Set up conversions (signups, subscriptions)
|
|
||||||
|
|
||||||
### Bing Webmaster Tools
|
|
||||||
1. Import from Google Search Console
|
|
||||||
2. Submit sitemap
|
|
||||||
3. Monitor crawl stats
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🎨 OG Image Generation
|
|
||||||
|
|
||||||
### TLD Pages
|
|
||||||
```
|
|
||||||
https://pounce.com/api/og/tld?tld=com&price=9.99&trend=5.2
|
|
||||||
```
|
|
||||||
|
|
||||||
### Domain Listings
|
|
||||||
```
|
|
||||||
https://pounce.com/api/og/domain?domain=crypto.io&price=50000&featured=true
|
|
||||||
```
|
|
||||||
|
|
||||||
### Custom Generator
|
|
||||||
Use `generateOGImageUrl()` from `src/lib/seo.ts` for dynamic generation.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📱 Mobile Optimization
|
|
||||||
|
|
||||||
### Responsive Images
|
|
||||||
- Automatic srcset generation
|
|
||||||
- AVIF/WebP fallbacks
|
|
||||||
- Lazy loading
|
|
||||||
- Proper aspect ratios
|
|
||||||
|
|
||||||
### Touch Optimization
|
|
||||||
- Minimum 44x44px touch targets
|
|
||||||
- Swipe gestures
|
|
||||||
- Mobile-first CSS
|
|
||||||
|
|
||||||
### Performance
|
|
||||||
- Service Worker (PWA)
|
|
||||||
- Offline fallback
|
|
||||||
- Cache-first strategy for static assets
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🔍 Search Engine Submission
|
|
||||||
|
|
||||||
### Submit to:
|
|
||||||
1. **Google Search Console**: https://search.google.com/search-console
|
|
||||||
2. **Bing Webmaster Tools**: https://www.bing.com/webmasters
|
|
||||||
3. **Yandex Webmaster**: https://webmaster.yandex.com
|
|
||||||
4. **Baidu Webmaster**: https://ziyuan.baidu.com (for China)
|
|
||||||
|
|
||||||
### Sitemap URL
|
|
||||||
```
|
|
||||||
https://pounce.com/sitemap.xml
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🎯 Next Steps
|
|
||||||
|
|
||||||
### Immediate (Week 1)
|
|
||||||
- [ ] Add GA4 tracking code
|
|
||||||
- [ ] Submit sitemap to Google
|
|
||||||
- [ ] Generate OG images for top 50 TLDs
|
|
||||||
- [ ] Test Core Web Vitals on Lighthouse
|
|
||||||
|
|
||||||
### Short-term (Month 1)
|
|
||||||
- [ ] Content for top 20 TLD pages (1,500+ words each)
|
|
||||||
- [ ] Internal linking strategy
|
|
||||||
- [ ] Backlink outreach (domain blogs, forums)
|
|
||||||
- [ ] Create domain investing guides
|
|
||||||
|
|
||||||
### Long-term (Quarter 1)
|
|
||||||
- [ ] Blog with 2-3 posts/week
|
|
||||||
- [ ] Video content (YouTube SEO)
|
|
||||||
- [ ] Domain market reports (monthly)
|
|
||||||
- [ ] Influencer partnerships
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📈 Expected Results
|
|
||||||
|
|
||||||
### Traffic Growth (Conservative)
|
|
||||||
- **Month 1**: 1,000 organic visitors/month
|
|
||||||
- **Month 3**: 5,000 organic visitors/month
|
|
||||||
- **Month 6**: 20,000 organic visitors/month
|
|
||||||
- **Month 12**: 100,000+ organic visitors/month
|
|
||||||
|
|
||||||
### Top Keywords (Target Rankings)
|
|
||||||
- "domain pricing" (Top 10)
|
|
||||||
- ".io domain" (Top 5)
|
|
||||||
- "domain marketplace" (Top 20)
|
|
||||||
- "buy premium domains" (Top 20)
|
|
||||||
- "TLD prices" (Top 10)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🛠️ Maintenance
|
|
||||||
|
|
||||||
### Weekly
|
|
||||||
- Check GSC for crawl errors
|
|
||||||
- Monitor Core Web Vitals
|
|
||||||
- Review top queries
|
|
||||||
- Update sitemap if needed
|
|
||||||
|
|
||||||
### Monthly
|
|
||||||
- Analyze traffic trends
|
|
||||||
- Update TLD price data
|
|
||||||
- Refresh OG images for trending TLDs
|
|
||||||
- Content updates
|
|
||||||
|
|
||||||
### Quarterly
|
|
||||||
- SEO audit
|
|
||||||
- Competitor analysis
|
|
||||||
- Backlink review
|
|
||||||
- Strategy adjustment
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📚 Resources
|
|
||||||
|
|
||||||
- [Next.js SEO Guide](https://nextjs.org/learn/seo/introduction-to-seo)
|
|
||||||
- [Google Search Central](https://developers.google.com/search)
|
|
||||||
- [Schema.org Documentation](https://schema.org/docs/schemas.html)
|
|
||||||
- [Core Web Vitals](https://web.dev/vitals/)
|
|
||||||
- [Open Graph Protocol](https://ogp.me/)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Status**: ✅ **Production Ready**
|
|
||||||
|
|
||||||
All SEO & performance optimizations are implemented and ready for launch. The platform is configured for maximum visibility and lightning-fast performance.
|
|
||||||
|
|
||||||
@ -1,170 +0,0 @@
|
|||||||
# Server Deployment (Docker Compose)
|
|
||||||
|
|
||||||
## Ziel
|
|
||||||
|
|
||||||
Pounce auf einem Server starten mit:
|
|
||||||
|
|
||||||
- **Frontend** (Next.js)
|
|
||||||
- **Backend API** (FastAPI)
|
|
||||||
- **Postgres**
|
|
||||||
- **Redis** (Rate-Limit Storage + Job Queue)
|
|
||||||
- **Scheduler** (APScheduler) – **separater Prozess**
|
|
||||||
- **Worker** (ARQ) – **separater Prozess**
|
|
||||||
|
|
||||||
Damit laufen Jobs nicht mehrfach bei mehreren API-Workern und die UI bleibt schnell.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Voraussetzungen
|
|
||||||
|
|
||||||
- Linux Server (z.B. Ubuntu 22.04+)
|
|
||||||
- Docker + Docker Compose Plugin
|
|
||||||
- Domain + HTTPS Reverse Proxy (empfohlen), damit Cookie-Auth zuverlässig funktioniert
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 1) Repo auf den Server holen
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd /opt
|
|
||||||
git clone <your-repo-url> pounce
|
|
||||||
cd pounce
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 2) Server-Environment anlegen
|
|
||||||
|
|
||||||
In `/opt/pounce`:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cp DEPLOY_docker_compose.env.example .env
|
|
||||||
```
|
|
||||||
|
|
||||||
Dann `.env` öffnen und mindestens setzen:
|
|
||||||
|
|
||||||
- **DB_PASSWORD**
|
|
||||||
- **SECRET_KEY**
|
|
||||||
- **SITE_URL** (z.B. `https://pounce.example.com`)
|
|
||||||
- **ALLOWED_ORIGINS** (z.B. `https://pounce.example.com`)
|
|
||||||
|
|
||||||
Optional (aber empfohlen):
|
|
||||||
|
|
||||||
- **SMTP_\*** (für Alerts/Emails)
|
|
||||||
- **COOKIE_DOMAIN** (wenn du Cookies über Subdomains teilen willst)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 3) Starten
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker compose up -d --build
|
|
||||||
```
|
|
||||||
|
|
||||||
Services:
|
|
||||||
|
|
||||||
- `frontend` (Port 3000)
|
|
||||||
- `backend` (Port 8000)
|
|
||||||
- `scheduler` (kein Port)
|
|
||||||
- `worker` (kein Port)
|
|
||||||
- `db` (kein Port)
|
|
||||||
- `redis` (kein Port)
|
|
||||||
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 4) Initial Setup (1× nach erstem Start)
|
|
||||||
|
|
||||||
### DB Tabellen + Baseline Seed
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker compose exec backend python scripts/init_db.py
|
|
||||||
```
|
|
||||||
|
|
||||||
### TLD Price Seed (886+)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker compose exec backend python scripts/seed_tld_prices.py
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 5) Reverse Proxy (empfohlen)
|
|
||||||
|
|
||||||
### Warum?
|
|
||||||
|
|
||||||
Das Frontend ruft im Browser standardmässig `https://<domain>/api/v1/...` auf (same-origin).
|
|
||||||
Darum solltest du:
|
|
||||||
|
|
||||||
- **HTTPS** terminieren
|
|
||||||
- `/api/v1/*` an das Backend routen
|
|
||||||
- `/` an das Frontend routen
|
|
||||||
|
|
||||||
### Beispiel: Caddy (sehr simpel)
|
|
||||||
|
|
||||||
```caddy
|
|
||||||
pounce.example.com {
|
|
||||||
encode zstd gzip
|
|
||||||
|
|
||||||
# API
|
|
||||||
handle_path /api/v1/* {
|
|
||||||
reverse_proxy 127.0.0.1:8000
|
|
||||||
}
|
|
||||||
|
|
||||||
# Frontend
|
|
||||||
reverse_proxy 127.0.0.1:3000
|
|
||||||
|
|
||||||
# optional: metrics nur intern
|
|
||||||
@metrics path /metrics
|
|
||||||
handle @metrics {
|
|
||||||
respond 403
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Wichtig:
|
|
||||||
|
|
||||||
- Setze `SITE_URL=https://pounce.example.com`
|
|
||||||
- Setze `COOKIE_SECURE=true` (oder via `ENVIRONMENT=production`)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 6) Checks (nach Deploy)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
curl -f http://127.0.0.1:8000/health
|
|
||||||
curl -f http://127.0.0.1:8000/metrics
|
|
||||||
```
|
|
||||||
|
|
||||||
Logs:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker compose logs -f backend
|
|
||||||
docker compose logs -f scheduler
|
|
||||||
docker compose logs -f worker
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 7) Updates
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd /opt/pounce
|
|
||||||
git pull
|
|
||||||
docker compose up -d --build
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Troubleshooting (häufig)
|
|
||||||
|
|
||||||
- **Cookies/Login klappt nicht**:
|
|
||||||
- Prüfe `SITE_URL` und HTTPS (Secure Cookies)
|
|
||||||
- Prüfe `ALLOWED_ORIGINS` (falls Frontend/Backend nicht same-origin sind)
|
|
||||||
- **Scheduler läuft doppelt**:
|
|
||||||
- Stelle sicher, dass nur **ein** `scheduler` Service läuft (keine zweite Instanz)
|
|
||||||
- **Emails werden nicht gesendet**:
|
|
||||||
- `docker compose exec scheduler env | grep SMTP_`
|
|
||||||
- SMTP Vars müssen im Container vorhanden sein (kommen aus `.env`)
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,382 +0,0 @@
|
|||||||
# 🐆 Pounce Terminal - Umbauplan
|
|
||||||
|
|
||||||
> **Von "Command Center" zu "Terminal"**
|
|
||||||
>
|
|
||||||
> Design-Prinzip: **"High Density, Low Noise"** - Wie ein Trading-Dashboard
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📊 IST vs. SOLL Analyse
|
|
||||||
|
|
||||||
### Aktuelle Struktur (Terminal) ✅ IMPLEMENTIERT
|
|
||||||
```
|
|
||||||
/terminal/
|
|
||||||
├── radar/ → RADAR (Startseite/Dashboard)
|
|
||||||
├── market/ → MARKET (Auktionen + Listings)
|
|
||||||
├── intel/ → INTEL (TLD Pricing)
|
|
||||||
│ └── [tld]/ → Detail-Seite pro TLD
|
|
||||||
├── watchlist/ → WATCHLIST (Watching + Portfolio)
|
|
||||||
├── listing/ → LISTING (Verkaufs-Wizard)
|
|
||||||
├── settings/ → SETTINGS (Einstellungen)
|
|
||||||
└── welcome/ → Onboarding
|
|
||||||
```
|
|
||||||
|
|
||||||
### Ziel-Struktur (Terminal - laut pounce_terminal.md)
|
|
||||||
```
|
|
||||||
/terminal/
|
|
||||||
├── radar/ → RADAR (Dashboard/Startseite)
|
|
||||||
├── market/ → MARKET (Auktionen + User-Listings gemischt)
|
|
||||||
├── intel/ → INTEL (TLD Data/Pricing erweitert)
|
|
||||||
├── watchlist/ → WATCHLIST (Watching + My Portfolio)
|
|
||||||
├── listing/ → LISTING (Verkaufs-Wizard)
|
|
||||||
├── settings/ → SETTINGS (Admin/Account)
|
|
||||||
└── welcome/ → Onboarding (bleibt)
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## ✅ Master-Checkliste
|
|
||||||
|
|
||||||
### Phase 1: Umbenennung & Routing ✅ ABGESCHLOSSEN
|
|
||||||
- [x] 1.1 Route `/command` → `/terminal` umbenennen
|
|
||||||
- [x] 1.2 `CommandCenterLayout` → `TerminalLayout` umbenennen
|
|
||||||
- [x] 1.3 Alle internen Links aktualisieren
|
|
||||||
- [x] 1.4 Redirect von `/command/*` → `/terminal/*` einrichten
|
|
||||||
- [x] 1.5 Sidebar-Navigation aktualisieren
|
|
||||||
|
|
||||||
### Phase 2: Module neu strukturieren ✅ ABGESCHLOSSEN
|
|
||||||
- [x] 2.1 **RADAR** Module (Dashboard → /terminal/radar)
|
|
||||||
- [x] 2.2 **MARKET** Module (Auktionen + Listings → /terminal/market)
|
|
||||||
- [x] 2.3 **INTEL** Module (TLD Pricing → /terminal/intel)
|
|
||||||
- [x] 2.4 **WATCHLIST** Module (Watching + Portfolio → /terminal/watchlist)
|
|
||||||
- [x] 2.5 **LISTING** Module (Verkaufs-Wizard → /terminal/listing)
|
|
||||||
- [x] 2.6 **SETTINGS** Module (Admin → /terminal/settings)
|
|
||||||
|
|
||||||
### Phase 3: UI/UX Verbesserungen ✅ ABGESCHLOSSEN
|
|
||||||
- [x] 3.1 Universal Search verbessert (RADAR - simultane Suche)
|
|
||||||
- [x] 3.2 Ticker/Laufband für Marktbewegungen (RADAR)
|
|
||||||
- [x] 3.3 Pounce Score Algorithmus (MARKET)
|
|
||||||
- [x] 3.4 Health Status Ampel-System (WATCHLIST)
|
|
||||||
- [x] 3.5 Hide Spam / Pounce Direct Filter (MARKET)
|
|
||||||
- [x] 3.6 Tier Paywall für Listings (LISTING)
|
|
||||||
|
|
||||||
### Phase 4: Cleanup ✅ ABGESCHLOSSEN
|
|
||||||
- [x] 4.1 Alte `/command` Routen entfernen
|
|
||||||
- [x] 4.2 Unbenutzte Komponenten löschen (CommandCenterLayout)
|
|
||||||
- [x] 4.3 Alle verbleibenden Referenzen fixen
|
|
||||||
- [x] 4.4 Test aller neuen Routen (Build erfolgreich)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📋 Detaillierte Checklisten pro Modul
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 🛰️ Modul 1: RADAR (Startseite/Dashboard)
|
|
||||||
|
|
||||||
**Route:** `/terminal/radar` (Hauptseite nach Login)
|
|
||||||
|
|
||||||
**Konzept-Features:**
|
|
||||||
- A. **The Ticker** (Top) - Laufband mit Marktbewegungen
|
|
||||||
- B. **Quick Stats** (Karten) - Watching, Market, My Listings
|
|
||||||
- C. **Universal Search** (Hero Element) - Gleichzeitige Suche
|
|
||||||
- D. **Recent Alerts** (Liste) - Chronologische Ereignisse
|
|
||||||
|
|
||||||
**Checkliste:**
|
|
||||||
|
|
||||||
| # | Task | Status | Priorität |
|
|
||||||
|---|------|--------|-----------|
|
|
||||||
| 1.1 | Ticker-Komponente bauen | [ ] | HIGH |
|
|
||||||
| 1.2 | Ticker mit Live-Daten füttern (TLD-Trends, Watchlist-Alerts) | [ ] | HIGH |
|
|
||||||
| 1.3 | Quick Stats zu 3 Karten konsolidieren | [ ] | MEDIUM |
|
|
||||||
| 1.4 | Universal Search implementieren | [ ] | HIGH |
|
|
||||||
| 1.5 | Search-Logik: Gleichzeitige Prüfung (Whois, Auktionen, Marketplace) | [ ] | HIGH |
|
|
||||||
| 1.6 | Recent Alerts Liste mit Timeline-Design | [ ] | MEDIUM |
|
|
||||||
| 1.7 | "Morgenkaffee"-Layout optimieren (wichtigste Infos oben) | [ ] | MEDIUM |
|
|
||||||
|
|
||||||
**Aktueller Stand in Codebase:**
|
|
||||||
- `command/dashboard/page.tsx` vorhanden
|
|
||||||
- Hot Auctions, Trending TLDs, Quick Add Domain bereits implementiert
|
|
||||||
- ⚠️ Fehlt: Ticker, verbesserte Universal Search
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 🏪 Modul 2: MARKET (Der Feed)
|
|
||||||
|
|
||||||
**Route:** `/terminal/market`
|
|
||||||
|
|
||||||
**Konzept-Features:**
|
|
||||||
- Filter Bar (Hide Spam, Pounce Direct Only, TLD, Price)
|
|
||||||
- Master-Tabelle mit: Domain, Pounce Score, Price/Bid, Status/Time, Source, Action
|
|
||||||
- User-Listings (💎 Pounce Direct) gemischt mit API-Daten
|
|
||||||
|
|
||||||
**Checkliste:**
|
|
||||||
|
|
||||||
| # | Task | Status | Priorität |
|
|
||||||
|---|------|--------|-----------|
|
|
||||||
| 2.1 | `/command/auctions` + `/command/marketplace` zusammenführen | [ ] | HIGH |
|
|
||||||
| 2.2 | Einheitliche Tabelle für alle Listings | [ ] | HIGH |
|
|
||||||
| 2.3 | "Hide Spam" Toggle (Default: AN) | [ ] | HIGH |
|
|
||||||
| 2.4 | "Pounce Direct Only" Toggle | [ ] | MEDIUM |
|
|
||||||
| 2.5 | Pounce Score Spalte hinzufügen (0-100, Farbcodiert) | [ ] | HIGH |
|
|
||||||
| 2.6 | Source-Spalte mit Logos/Icons (GoDaddy, Sedo, Pounce) | [ ] | MEDIUM |
|
|
||||||
| 2.7 | Status-Spalte: Countdown für Auktionen, "⚡ Instant" für Direct | [ ] | HIGH |
|
|
||||||
| 2.8 | 💎 Pounce Direct Listings hervorheben (leichte Hintergrundfarbe) | [ ] | MEDIUM |
|
|
||||||
| 2.9 | API-Filter Backend: `spam_score < 50` für Clean Feed | [ ] | HIGH |
|
|
||||||
|
|
||||||
**Aktueller Stand in Codebase:**
|
|
||||||
- `command/auctions/page.tsx` - Auktionen von GoDaddy/Sedo
|
|
||||||
- `command/marketplace/page.tsx` - Pounce-Listings
|
|
||||||
- ⚠️ Getrennt! Muss zusammengeführt werden
|
|
||||||
- ⚠️ Kein Pounce Score implementiert
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 📊 Modul 3: INTEL (TLD Data)
|
|
||||||
|
|
||||||
**Route:** `/terminal/intel` + `/terminal/intel/[tld]`
|
|
||||||
|
|
||||||
**Konzept-Features:**
|
|
||||||
- Inflation Monitor (Renewal Price Warnung wenn >200% von Buy Price)
|
|
||||||
- Trend Charts (30 Tage, 1 Jahr)
|
|
||||||
- Best Registrar Finder
|
|
||||||
|
|
||||||
**Checkliste:**
|
|
||||||
|
|
||||||
| # | Task | Status | Priorität |
|
|
||||||
|---|------|--------|-----------|
|
|
||||||
| 3.1 | `/command/pricing` → `/terminal/intel` verschieben | [ ] | HIGH |
|
|
||||||
| 3.2 | Inflation Monitor: Warn-Indikator ⚠️ bei Renewal > 200% Buy | [ ] | HIGH |
|
|
||||||
| 3.3 | Trend Charts: 30 Tage Timeline | [ ] | MEDIUM |
|
|
||||||
| 3.4 | Trend Charts: 1 Jahr Timeline | [ ] | LOW |
|
|
||||||
| 3.5 | Best Registrar Finder pro TLD | [ ] | HIGH |
|
|
||||||
| 3.6 | "Cheapest at: XYZ ($X.XX)" Anzeige | [ ] | HIGH |
|
|
||||||
| 3.7 | Detail-Seite `[tld]` mit allen Registrar-Preisen | [ ] | HIGH |
|
|
||||||
| 3.8 | Renewal Trap Warning prominent anzeigen | [ ] | MEDIUM |
|
|
||||||
|
|
||||||
**Aktueller Stand in Codebase:**
|
|
||||||
- `command/pricing/page.tsx` - TLD Übersicht ✅
|
|
||||||
- `command/pricing/[tld]/page.tsx` - TLD Details ✅
|
|
||||||
- ⚠️ Charts vorhanden aber einfach
|
|
||||||
- ⚠️ Renewal-Warning existiert teilweise
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 👁️ Modul 4: WATCHLIST (Portfolio)
|
|
||||||
|
|
||||||
**Route:** `/terminal/watchlist`
|
|
||||||
|
|
||||||
**Konzept-Features:**
|
|
||||||
- Tab 1: "Watching" (Fremde Domains)
|
|
||||||
- Tab 2: "My Portfolio" (Eigene Domains - verifiziert)
|
|
||||||
- Health-Status: 🟢 Online, 🟡 DNS Changed, 🔴 Offline/Error
|
|
||||||
- Expiry-Datum mit Rot-Markierung wenn <30 Tage
|
|
||||||
- SMS/Email Alert-Einstellungen pro Domain
|
|
||||||
|
|
||||||
**Checkliste:**
|
|
||||||
|
|
||||||
| # | Task | Status | Priorität |
|
|
||||||
|---|------|--------|-----------|
|
|
||||||
| 4.1 | `/command/watchlist` + `/command/portfolio` zusammenführen | [ ] | HIGH |
|
|
||||||
| 4.2 | Tab-Navigation: "Watching" / "My Portfolio" | [ ] | HIGH |
|
|
||||||
| 4.3 | Health-Status Ampel-System implementieren | [ ] | HIGH |
|
|
||||||
| 4.4 | DNS-Change Detection Backend | [ ] | HIGH |
|
|
||||||
| 4.5 | Offline/Error Detection Backend (HTTP Request Check) | [ ] | HIGH |
|
|
||||||
| 4.6 | Expiry-Spalte mit Rot wenn <30 Tage | [ ] | MEDIUM |
|
|
||||||
| 4.7 | "Change" Spalte (z.B. "Nameserver updated 2h ago") | [ ] | MEDIUM |
|
|
||||||
| 4.8 | Per-Domain Alert Settings (SMS/Email Checkboxes) | [ ] | MEDIUM |
|
|
||||||
| 4.9 | Portfolio-Bewertung (Estimated Value) | [ ] | LOW |
|
|
||||||
|
|
||||||
**Aktueller Stand in Codebase:**
|
|
||||||
- `command/watchlist/page.tsx` - Fremde Domains ✅
|
|
||||||
- `command/portfolio/page.tsx` - Eigene Domains ✅
|
|
||||||
- ⚠️ Getrennt! Muss zusammengeführt werden
|
|
||||||
- ⚠️ Kein Health-Check System
|
|
||||||
- ⚠️ Keine DNS-Change Detection
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 🏷️ Modul 5: LISTING (Verkaufen)
|
|
||||||
|
|
||||||
**Route:** `/terminal/listing`
|
|
||||||
|
|
||||||
**Konzept-Features:**
|
|
||||||
- Nur für Trader ($9) und Tycoon ($29)
|
|
||||||
- 3-Step Wizard:
|
|
||||||
1. Input (Domain + Preis)
|
|
||||||
2. DNS Verification (`pounce-verify-XXXX` TXT Record)
|
|
||||||
3. Publish
|
|
||||||
|
|
||||||
**Checkliste:**
|
|
||||||
|
|
||||||
| # | Task | Status | Priorität |
|
|
||||||
|---|------|--------|-----------|
|
|
||||||
| 5.1 | `/command/listings` → `/terminal/listing` umbenennen | [ ] | HIGH |
|
|
||||||
| 5.2 | 3-Step Wizard UI bauen | [ ] | HIGH |
|
|
||||||
| 5.3 | Step 1: Domain + Preis Input (Fixpreis oder Verhandlungsbasis) | [ ] | HIGH |
|
|
||||||
| 5.4 | Step 2: DNS Verification Code generieren | [ ] | HIGH |
|
|
||||||
| 5.5 | Step 2: "Verify DNS" Button mit TXT-Record Check | [ ] | HIGH |
|
|
||||||
| 5.6 | Step 3: Publish mit Bestätigung | [ ] | MEDIUM |
|
|
||||||
| 5.7 | "✅ Verified Owner" Badge nach Verifizierung | [ ] | HIGH |
|
|
||||||
| 5.8 | Tier-Check: Scout blockiert, nur Trader/Tycoon | [ ] | HIGH |
|
|
||||||
| 5.9 | Listing-Limit pro Tier (Trader: 5, Tycoon: 50) | [ ] | MEDIUM |
|
|
||||||
| 5.10 | Backend: DNS TXT Record Verification API | [ ] | HIGH |
|
|
||||||
|
|
||||||
**Aktueller Stand in Codebase:**
|
|
||||||
- `command/listings/page.tsx` - Listings-Verwaltung
|
|
||||||
- ⚠️ Kein DNS-Verification Wizard
|
|
||||||
- ⚠️ Keine TXT-Record Prüfung
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### ⚙️ Modul 6: SETTINGS
|
|
||||||
|
|
||||||
**Route:** `/terminal/settings`
|
|
||||||
|
|
||||||
**Konzept-Features:**
|
|
||||||
- Subscription (Upgrade/Downgrade via Stripe)
|
|
||||||
- Verification (Handynummer, Identity Badge)
|
|
||||||
- Notifications (Daily Digest, Instant SMS)
|
|
||||||
|
|
||||||
**Checkliste:**
|
|
||||||
|
|
||||||
| # | Task | Status | Priorität |
|
|
||||||
|---|------|--------|-----------|
|
|
||||||
| 6.1 | Subscription-Management via Stripe Customer Portal | [ ] | HIGH |
|
|
||||||
| 6.2 | Handynummer-Verifizierung (SMS Code) | [ ] | MEDIUM |
|
|
||||||
| 6.3 | "Identity Verified" Badge System | [ ] | LOW |
|
|
||||||
| 6.4 | Notification-Einstellungen (Daily Digest Toggle) | [ ] | MEDIUM |
|
|
||||||
| 6.5 | Notification-Einstellungen (Instant SMS Toggle) | [ ] | MEDIUM |
|
|
||||||
| 6.6 | E-Mail Preferences | [ ] | MEDIUM |
|
|
||||||
|
|
||||||
**Aktueller Stand in Codebase:**
|
|
||||||
- `command/settings/page.tsx` - Settings vorhanden ✅
|
|
||||||
- ⚠️ Stripe Portal Link prüfen
|
|
||||||
- ⚠️ Keine SMS-Verifizierung
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🎨 UI/UX Verbesserungen
|
|
||||||
|
|
||||||
### Global Search (CMD+K)
|
|
||||||
|
|
||||||
| # | Task | Status | Priorität |
|
|
||||||
|---|------|--------|-----------|
|
|
||||||
| G1 | Gleichzeitige Suche: Whois Check | [ ] | HIGH |
|
|
||||||
| G2 | Gleichzeitige Suche: Auktionen durchsuchen | [ ] | HIGH |
|
|
||||||
| G3 | Gleichzeitige Suche: Pounce Marketplace | [ ] | HIGH |
|
|
||||||
| G4 | Ergebnisse gruppiert anzeigen | [ ] | MEDIUM |
|
|
||||||
| G5 | Quick Actions (Track, Bid, View) | [ ] | MEDIUM |
|
|
||||||
|
|
||||||
### Pounce Score Algorithmus
|
|
||||||
|
|
||||||
| # | Task | Status | Priorität |
|
|
||||||
|---|------|--------|-----------|
|
|
||||||
| P1 | Score-Berechnung definieren (0-100) | [ ] | HIGH |
|
|
||||||
| P2 | Faktoren: Domain-Länge, TLD-Wert, Keine Zahlen/Bindestriche | [ ] | HIGH |
|
|
||||||
| P3 | Faktoren: Keyword-Relevanz | [ ] | MEDIUM |
|
|
||||||
| P4 | Spam-Score inverse (High Score = Low Spam) | [ ] | HIGH |
|
|
||||||
| P5 | Farbcodierung: Grün >80, Gelb 40-80, Rot <40 | [ ] | MEDIUM |
|
|
||||||
|
|
||||||
### Ticker/Laufband
|
|
||||||
|
|
||||||
| # | Task | Status | Priorität |
|
|
||||||
|---|------|--------|-----------|
|
|
||||||
| T1 | Ticker-Komponente mit horizontalem Scroll | [ ] | MEDIUM |
|
|
||||||
| T2 | Live TLD-Preisänderungen | [ ] | MEDIUM |
|
|
||||||
| T3 | Watchlist-Alerts (Domain offline, etc.) | [ ] | HIGH |
|
|
||||||
| T4 | Neue Hot Auctions | [ ] | LOW |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🔧 Backend-Änderungen
|
|
||||||
|
|
||||||
| # | Task | Status | Priorität |
|
|
||||||
|---|------|--------|-----------|
|
|
||||||
| B1 | `spam_score` Spalte in `domains` Tabelle | [ ] | HIGH |
|
|
||||||
| B2 | Spam-Score Berechnung beim Import | [ ] | HIGH |
|
|
||||||
| B3 | DNS Health Check Cronjob (alle 6h) | [ ] | HIGH |
|
|
||||||
| B4 | DNS TXT Record Verification Endpoint | [ ] | HIGH |
|
|
||||||
| B5 | Domain Status Change Detection | [ ] | HIGH |
|
|
||||||
| B6 | Alert-Email bei Status-Änderung | [ ] | HIGH |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📂 Dateien die geändert werden müssen
|
|
||||||
|
|
||||||
### Umbenennungen (Phase 1)
|
|
||||||
|
|
||||||
| Datei | Aktion |
|
|
||||||
|-------|--------|
|
|
||||||
| `frontend/src/app/command/` | → `frontend/src/app/terminal/` |
|
|
||||||
| `frontend/src/components/CommandCenterLayout.tsx` | → `TerminalLayout.tsx` |
|
|
||||||
| Alle `CommandCenterLayout` Imports | Aktualisieren |
|
|
||||||
| `frontend/src/components/Sidebar.tsx` | Navigation Links aktualisieren |
|
|
||||||
| `frontend/src/components/Header.tsx` | Links zu `/terminal` |
|
|
||||||
| `frontend/src/app/login/page.tsx` | Redirect zu `/terminal/radar` |
|
|
||||||
| `frontend/src/app/register/page.tsx` | Redirect zu `/terminal/radar` |
|
|
||||||
| `frontend/src/app/oauth/callback/page.tsx` | Redirect aktualisieren |
|
|
||||||
|
|
||||||
### Zusammenführungen (Phase 2)
|
|
||||||
|
|
||||||
| Alt | Neu |
|
|
||||||
|-----|-----|
|
|
||||||
| `command/auctions/` + `command/marketplace/` | → `terminal/market/` |
|
|
||||||
| `command/watchlist/` + `command/portfolio/` | → `terminal/watchlist/` |
|
|
||||||
| `command/dashboard/` | → `terminal/radar/` |
|
|
||||||
| `command/pricing/` | → `terminal/intel/` |
|
|
||||||
| `command/listings/` | → `terminal/listing/` |
|
|
||||||
| `command/settings/` | → `terminal/settings/` |
|
|
||||||
|
|
||||||
### Zu löschen (Phase 4)
|
|
||||||
|
|
||||||
| Datei | Grund |
|
|
||||||
|-------|-------|
|
|
||||||
| `command/alerts/` | In RADAR integriert |
|
|
||||||
| `command/seo/` | Später als Premium-Feature |
|
|
||||||
| Alte `/command` Ordner | Nach Migration |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🚀 Empfohlene Reihenfolge
|
|
||||||
|
|
||||||
### Sprint 1: Foundation (2-3 Tage)
|
|
||||||
1. ✅ Route-Umbenennung `/command` → `/terminal`
|
|
||||||
2. ✅ Layout-Umbenennung
|
|
||||||
3. ✅ Sidebar aktualisieren
|
|
||||||
4. ✅ Redirects einrichten
|
|
||||||
|
|
||||||
### Sprint 2: Core Modules (3-4 Tage)
|
|
||||||
1. 🔄 RADAR (Dashboard) aufbauen
|
|
||||||
2. 🔄 MARKET (Auctions + Marketplace) zusammenführen
|
|
||||||
3. 🔄 WATCHLIST (Watchlist + Portfolio) zusammenführen
|
|
||||||
|
|
||||||
### Sprint 3: Features (3-4 Tage)
|
|
||||||
1. 🔜 Pounce Score implementieren
|
|
||||||
2. 🔜 Spam Filter
|
|
||||||
3. 🔜 DNS Verification für Listings
|
|
||||||
4. 🔜 Universal Search verbessern
|
|
||||||
|
|
||||||
### Sprint 4: Polish (2 Tage)
|
|
||||||
1. 🔜 Ticker-Komponente
|
|
||||||
2. 🔜 Health Check System
|
|
||||||
3. 🔜 Alert-Emails
|
|
||||||
4. 🔜 Cleanup & Testing
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📈 Metriken für Erfolg
|
|
||||||
|
|
||||||
- [ ] Alle Routen funktionieren unter `/terminal/*`
|
|
||||||
- [ ] Kein 404 bei alten `/command/*` URLs (Redirects)
|
|
||||||
- [ ] Pounce Score für alle Domains sichtbar
|
|
||||||
- [ ] Spam-Filter filtert >90% der schlechten Domains
|
|
||||||
- [ ] DNS-Verification funktioniert für Listings
|
|
||||||
- [ ] Health-Check System läuft (6h Intervall)
|
|
||||||
- [ ] Universal Search zeigt alle 3 Quellen
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
*Erstellt: $(date)*
|
|
||||||
*Basierend auf: pounce_strategy.md, pounce_terminal.md, pounce_features.md, pounce_plan.md*
|
|
||||||
|
|
||||||
291
UNICORN_PLAN.md
291
UNICORN_PLAN.md
@ -1,291 +0,0 @@
|
|||||||
## Pounce Unicorn Plan (integriert)
|
|
||||||
|
|
||||||
Ziel: Pounce von einem starken Produkt (Trust + Inventory + Lead Capture) zu einem skalierbaren System mit Moat + Flywheel entwickeln.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Umsetzungsstatus (Stand: 2025-12-15)
|
|
||||||
|
|
||||||
### Wo wir stehen (kurz, ehrlich)
|
|
||||||
|
|
||||||
- **Deal-System (Liquidity Loop)**: **fertig & gehärtet** (Inbox → Threading → Sold/GMV → Anti‑Abuse).
|
|
||||||
- **Yield (Moat)**: **Connect + Routing + Tracking + Webhooks + Ledger-Basis** ist da. Wir können Domains verbinden, Traffic routen, Clicks/Conversions tracken und Payouts vorbereiten/abschliessen.
|
|
||||||
- **Flywheel/Distribution**: teilweise (Public Deal Surface + Login Gate ist da), Programmatic SEO & Viral Loop noch nicht systematisch ausgebaut.
|
|
||||||
- **Telemetry/Ops**: einzelne Events existieren implizit (Audit/Transactions), aber **kein zentrales Event-Schema + KPIs Dashboard**.
|
|
||||||
|
|
||||||
### Fortschritt nach Workstream
|
|
||||||
|
|
||||||
#### 1) Deal‑System
|
|
||||||
- [x] 1A Inbox Workflow (Status, Close Reason, Audit)
|
|
||||||
- [x] 1B Threading/Negotiation (Buyer/Seller Threads + Email + Rate Limits + Content Safety)
|
|
||||||
- [x] 1C Deal Closure + GMV (Mark as Sold, Close open inquiries)
|
|
||||||
- [x] 1D Anti‑Abuse (Limits + Safety Checks an den kritischen Stellen)
|
|
||||||
|
|
||||||
#### 2) Yield (Moat)
|
|
||||||
- [x] 2A Connect/Nameserver Flow (Portfolio‑Only + DNS Verified + Connect Wizard + `connected_at`)
|
|
||||||
- [x] 2B Routing → Tracking (Async, Click Tracking, IP‑Hashing, Rate‑Limit, strict partner config)
|
|
||||||
- [x] 2B Attribution (Webhook kann `click_id` mitschicken)
|
|
||||||
- [x] 2C Ledger/Payout‑Basics (generate payouts + complete payouts; server‑safe keys)
|
|
||||||
- [x] 2C.2 Dashboard‑Korrektheit (monatliche Stats = confirmed/paid, pending payout = confirmed+unpaid)
|
|
||||||
|
|
||||||
#### 3) Flywheel / Distribution
|
|
||||||
- [~] 3B Public Deal Surface + Login Gate (Pounce Direct gated) — **vorhanden**
|
|
||||||
- [~] 3A Programmatic SEO maximal (Templates + CTA Pfade + Indexation)
|
|
||||||
- [~] 3C Viral Loop „Powered by Pounce“ (nur wo intent passt, sauberer Referral Loop)
|
|
||||||
|
|
||||||
**3C Stand (Viral Loop)**
|
|
||||||
- **Invite Codes**: jeder User hat jetzt einen eigenen `invite_code` (unique) + `GET /api/v1/auth/referral` liefert den Invite‑Link.
|
|
||||||
- **Attribution**: `ref` wird auf Public Pages in Cookie gespeichert (30 Tage) und bei `/register` mitgeschickt → Backend setzt `referred_by_user_id`.
|
|
||||||
- **Surfaces (intent-fit)**:
|
|
||||||
- Terminal Settings: “Invite” Panel mit Copy‑Link
|
|
||||||
- Public Buy Listing: “Powered by Pounce” → Register mit `?ref=<seller_invite_code>`
|
|
||||||
- **Telemetry**: Events `user_registered`, `referral_attributed`, `referral_link_viewed`
|
|
||||||
- **Admin KPIs (3C.2)**: Telemetry Tab zeigt jetzt Referral‑KPIs (Link Views + Signups pro Referrer) via `GET /api/v1/telemetry/referrals?days=...`
|
|
||||||
- **Rewards/Badges (3C.2)**: Deterministische Referral‑Rewards (abuse‑resistent) → `subscriptions.referral_bonus_domains` (+5 Slots pro 3 “qualified referrals”), Badge `verified_referrer` / `elite_referrer` wird im Terminal‑Settings Invite‑Panel angezeigt.
|
|
||||||
- **Anti‑Fraud/Cooldown**: Qualified zählt erst nach **Cooldown** (User+Subscription Age) und wird bei **shared IP / duplicate IP / missing IP** disqualifiziert (Telemetry `ip_hash`).
|
|
||||||
|
|
||||||
**3A Stand (Programmatic SEO)**
|
|
||||||
- **Indexation**: `sitemap.xml` ist jetzt dynamisch (Discover‑TLDs aus DB + Blog Slugs + Public Listings) und `robots.txt` blockt Legacy Pfade.
|
|
||||||
- **Canonical Cleanup**: Legacy Routen (`/tld/*`, `/tld-pricing/*`) redirecten server-seitig nach `/discover/*`.
|
|
||||||
- **Templates**: `/discover/[tld]` hat jetzt server‑seitiges Metadata + JSON‑LD (aus echten Registrar‑Compare Daten). `/buy/[slug]` ist server‑seitig (Metadata + JSON‑LD).
|
|
||||||
- **Blog Article SEO**: `/blog/[slug]` hat jetzt server‑seitiges `generateMetadata` + Article JSON‑LD, ohne View‑Count Side‑Effects (Meta‑Endpoint).
|
|
||||||
|
|
||||||
#### 4) Skalierung / Telemetry
|
|
||||||
- [x] 4A Events (kanonisches Event-Schema + persistente Events in Deal+Yield Funnel)
|
|
||||||
- [x] 4A.2 KPI Views (Admin KPIs aus Telemetry Events: Rates + Median Times)
|
|
||||||
- [x] 4B Ops (Backups + Restore-Verification + Monitoring/Alerts + Deliverability)
|
|
||||||
|
|
||||||
**4B Stand (Ops)**
|
|
||||||
- **Backups**: Admin-Endpoint + Scheduler Daily Backup + Restore-Verification (SQLite integrity_check / Postgres pg_restore --list)
|
|
||||||
- **Monitoring**: `/metrics` exportiert jetzt zusätzlich Business-KPIs (Deal+Yield aus `telemetry_events`, gecached) + Ops-Metriken (Backup enabled + Backup age)
|
|
||||||
- **Deliverability**: Newsletter Emails mit `List-Unsubscribe` (One-Click) + neue One-Click Unsubscribe Route
|
|
||||||
- **Alerting (Vorbereitung)**: `ops/prometheus-alerts.yml` mit Alerts (5xx rate, Backup stale, 24h Funnel-Null)
|
|
||||||
- **Alerting (ohne Docker)**: Scheduler Job `ops_alerting` + Admin Endpoint `POST /api/v1/admin/system/ops-alerts/run`
|
|
||||||
- **Alert History + Cooldown (persistiert)**: Table `ops_alert_events` + Admin Endpoint `GET /api/v1/admin/system/ops-alerts/history` + Admin UI History Panel
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Absicht & holistisches Konzept
|
|
||||||
|
|
||||||
### Absicht (warum es Pounce gibt)
|
|
||||||
|
|
||||||
Pounce existiert, um Domains von „toten Namen“ (nur Renewal-Kosten, keine Nutzung) zu **messbaren, handelbaren digitalen Assets** zu machen.
|
|
||||||
Wir bauen nicht nur einen Feed oder einen Marktplatz, sondern eine **Lifecycle Engine**: entdecken → erwerben → monetarisieren → liquidieren.
|
|
||||||
|
|
||||||
### Für wen (Zielgruppe)
|
|
||||||
|
|
||||||
- **Domain Investors / Operators**: brauchen sauberes Inventory, schnelle Entscheidungen, klare Workflows.
|
|
||||||
- **Builders / Entrepreneurs**: wollen gute Assets finden und sofort nutzen/monetarisieren.
|
|
||||||
- **Portfolio Owner** (ab 10+ Domains): wollen Governance (Health, Renewal, Cashflow) statt Chaos.
|
|
||||||
|
|
||||||
### Positionierung (klarer Satz)
|
|
||||||
|
|
||||||
**Pounce ist das Operating System für Domains**: ein Clean Market Feed + Verified Direct Deals + Yield Routing – mit Messbarkeit vom ersten View bis zum Exit.
|
|
||||||
|
|
||||||
### Das Gesamtmodell (4 Module)
|
|
||||||
|
|
||||||
1. **Discover (Intelligence)**
|
|
||||||
Findet Assets: Clean Feed, Scores, TLD Intel, Filter, Alerts.
|
|
||||||
|
|
||||||
2. **Acquire (Marketplace / Liquidity)**
|
|
||||||
Sichert Assets: externe Auktionen + **Pounce Direct** (DNS-verified Owner).
|
|
||||||
|
|
||||||
3. **Yield (Intent Routing)**
|
|
||||||
Monetarisiert Assets: Domain-Traffic → Intent → Partner → Revenue Share.
|
|
||||||
|
|
||||||
4. **Trade (Exit / Outcomes)**
|
|
||||||
Liquidität und Bewertung: Domains werden nach **Cashflow** bepreist (Multiple), nicht nur nach „Vibe“.
|
|
||||||
|
|
||||||
### Warum das Unicorn-Potenzial hat (Moat + Flywheel)
|
|
||||||
|
|
||||||
- **Moat**: Proprietäre Daten über Intent, Traffic, Conversion und Cashflow auf Domain-Level (schwer kopierbar).
|
|
||||||
- **Flywheel**: mehr Domains → mehr Routing/Conversions → mehr Daten → bessere Scores/Routing → mehr Deals → mehr Domains.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 0) Leitprinzipien
|
|
||||||
|
|
||||||
- **Moat entsteht dort, wo proprietäre Daten entstehen**: Yield/Intent + Deal Outcomes.
|
|
||||||
- **Trust ist ein Feature**: alles, was Spam/Scam senkt, steigert Conversion.
|
|
||||||
- **Telemetry ist nicht „später“**: jede neue Funktion erzeugt Events + messbare KPIs.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 1) Deal‑System (Liquidity Loop fertig machen)
|
|
||||||
|
|
||||||
### 1A — Inbox Workflow (Woche 1)
|
|
||||||
|
|
||||||
**Ziel**: Seller können Leads zuverlässig triagieren und messen.
|
|
||||||
|
|
||||||
- **Inquiry Status Workflow komplett**: `new → read → replied → closed` + `spam`
|
|
||||||
- Backend PATCH Endpoint + UI Actions
|
|
||||||
- „Close“ inkl. Grund (z.B. sold elsewhere / low offer / no fit)
|
|
||||||
- **Audit Trail (minimal)**
|
|
||||||
- jede Statusänderung speichert: `who/when/old/new`
|
|
||||||
|
|
||||||
**KPIs**
|
|
||||||
- inquiry→read rate
|
|
||||||
- inquiry→replied rate
|
|
||||||
- median reply time
|
|
||||||
|
|
||||||
### 1B — Threading/Negotiation (Woche 2–3)
|
|
||||||
|
|
||||||
**Ziel**: Verhandlung im Produkt, nicht off-platform.
|
|
||||||
|
|
||||||
- **Threading**: Buyer ↔ Seller Messages als Conversation pro Listing
|
|
||||||
- **Notifications**: E‑Mail „New message“ + Login‑Gate
|
|
||||||
- **Audit Trail (voll)**: message events + status events
|
|
||||||
- **Security**: rate limits (buyer + seller), keyword checks, link safety
|
|
||||||
|
|
||||||
**KPIs**
|
|
||||||
- inquiry→first message
|
|
||||||
- messages/thread
|
|
||||||
- reply rate
|
|
||||||
|
|
||||||
### 1C — Deal Closure + GMV (Woche 3–4)
|
|
||||||
|
|
||||||
**Ziel**: echte Conversion/GMV messbar machen.
|
|
||||||
|
|
||||||
- **“Mark as Sold”** auf Listing
|
|
||||||
- Gründe: sold on Pounce / sold off‑platform / removed
|
|
||||||
- optional: **deal_value** + currency
|
|
||||||
- optional sauberer **Deal-Record**
|
|
||||||
- `deal_id`, `listing_id`, `buyer_user_id(optional)`, `final_price`, `closed_at`
|
|
||||||
|
|
||||||
**KPIs**
|
|
||||||
- inquiry→sold
|
|
||||||
- close rate
|
|
||||||
- time-to-close
|
|
||||||
- GMV
|
|
||||||
|
|
||||||
### 1D — Anti‑Abuse (laufend ab Woche 1)
|
|
||||||
|
|
||||||
- **Rate limit** pro IP + pro User (inquire + message + status flips)
|
|
||||||
- **Spam flagging** (Heuristiken + manuell)
|
|
||||||
- **Blocklist** (buyer account/email/domain-level)
|
|
||||||
|
|
||||||
**KPIs**
|
|
||||||
- spam rate
|
|
||||||
- blocked attempts
|
|
||||||
- false positive rate
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 2) Yield als Burggraben (Moat)
|
|
||||||
|
|
||||||
### 2A — Connect/Nameserver Flow (Woche 2–4)
|
|
||||||
|
|
||||||
**Ziel**: Domains „unter Kontrolle“ bringen (Connect Layer).
|
|
||||||
|
|
||||||
- **Connect Wizard** (Portfolio → Yield)
|
|
||||||
- Anleitung: NS/TXT Setup
|
|
||||||
- Status: pending/verified/active
|
|
||||||
- **Backend checks** (NS/TXT) + Speicherung: `connected_at`
|
|
||||||
- **Routing Entry** (Edge/Web): Request → route decision
|
|
||||||
|
|
||||||
**KPIs**
|
|
||||||
- connect attempts→verified
|
|
||||||
- connected domains
|
|
||||||
|
|
||||||
### 2B — Intent → Routing → Tracking (Monat 2)
|
|
||||||
|
|
||||||
**Ziel**: Intent Routing MVP für 1 Vertical.
|
|
||||||
|
|
||||||
- **Intent detection** (MVP)
|
|
||||||
- **Routing** zu Partnern + Fallbacks
|
|
||||||
- **Tracking**: click_id, domain_id, partner_id
|
|
||||||
- **Attribution**: conversion mapping + payout status
|
|
||||||
|
|
||||||
**KPIs**
|
|
||||||
- clicks/domain
|
|
||||||
- conversion rate
|
|
||||||
- revenue/domain
|
|
||||||
|
|
||||||
### 2C — Payout + Revenue Share (Monat 2–3)
|
|
||||||
|
|
||||||
- Ledger: pending → confirmed → paid
|
|
||||||
- payout schedule (monatlich) + export/reports
|
|
||||||
|
|
||||||
**KPIs**
|
|
||||||
- payout accuracy
|
|
||||||
- disputes
|
|
||||||
- net margin
|
|
||||||
|
|
||||||
### 2D — Portfolio Cashflow Dashboard (Monat 3)
|
|
||||||
|
|
||||||
- Portfolio zeigt: **MRR, last 30d revenue, ROI**, top routes
|
|
||||||
- Domains werden „yield-bearing assets“ → später handelbar nach Multiple
|
|
||||||
|
|
||||||
**KPIs**
|
|
||||||
- MRR
|
|
||||||
- retention/churn
|
|
||||||
- expansion
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 3) Flywheel / Distribution
|
|
||||||
|
|
||||||
### 3A — Programmatic SEO maximal (Monat 1–2)
|
|
||||||
|
|
||||||
- Templates skalieren (TLD/Intel/Price)
|
|
||||||
- klare CTA‑Pfade: „Track this TLD“, „Enter Terminal“, „View Direct Deals“
|
|
||||||
|
|
||||||
**KPIs**
|
|
||||||
- organic sessions
|
|
||||||
- signup conversion
|
|
||||||
|
|
||||||
### 3B — Public Deal Surface + Login Gate (Monat 1)
|
|
||||||
|
|
||||||
- Public Acquire + /buy als Conversion‑Engine
|
|
||||||
- “contact requires login” überall konsistent
|
|
||||||
|
|
||||||
**KPIs**
|
|
||||||
- view→login
|
|
||||||
- login→inquiry
|
|
||||||
|
|
||||||
### 3C — Viral Loop „Powered by Pounce“ (Monat 2–3)
|
|
||||||
|
|
||||||
- nur wenn intent passt / low intent fallback
|
|
||||||
- referral link + revenue share
|
|
||||||
|
|
||||||
**KPIs**
|
|
||||||
- referral signups
|
|
||||||
- CAC ~0
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 4) Skalierung / Telemetry
|
|
||||||
|
|
||||||
### 4A — Events (Woche 1–2)
|
|
||||||
|
|
||||||
Definiere & logge Events:
|
|
||||||
- `listing_view`
|
|
||||||
- `inquiry_created`
|
|
||||||
- `inquiry_status_changed`
|
|
||||||
- `message_sent`
|
|
||||||
- `listing_marked_sold`
|
|
||||||
- `yield_connected`
|
|
||||||
- `yield_click`
|
|
||||||
- `yield_conversion`
|
|
||||||
- `payout_paid`
|
|
||||||
|
|
||||||
**KPIs**
|
|
||||||
- funnel conversion
|
|
||||||
- time metrics
|
|
||||||
|
|
||||||
### 4B — Ops (Monat 1)
|
|
||||||
|
|
||||||
- Monitoring/alerts (Errors + Business KPIs)
|
|
||||||
- Backups (DB daily + restore drill)
|
|
||||||
- Deliverability (SPF/DKIM/DMARC, bounce handling)
|
|
||||||
- Abuse monitoring dashboards
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Empfohlene Reihenfolge (damit es schnell „unfair“ wird)
|
|
||||||
|
|
||||||
1. **Deal-System 1A–1C** (GMV & close-rate messbar)
|
|
||||||
2. **Yield 2A** (Connect Layer) parallel starten
|
|
||||||
3. **Events 4A** sofort mitziehen
|
|
||||||
4. **Yield 2B–2C** (Moat) sobald Connect stabil
|
|
||||||
5. Flywheel 3A–3C kontinuierlich
|
|
||||||
@ -1,506 +0,0 @@
|
|||||||
# Yield / Intent Routing – Integrations-Konzept
|
|
||||||
|
|
||||||
**Ziel:** Domains von "toten Assets" zu "Yield-Generatoren" machen.
|
|
||||||
**Kern-Mechanismus:** User verbindet Domain → Pounce erkennt Intent → Routing zu Affiliate-Partnern → Passive Einnahmen.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 1. Public Pages (nicht eingeloggt)
|
|
||||||
|
|
||||||
### 1.1 Landing Page – 4. Pillar hinzufügen
|
|
||||||
|
|
||||||
Aktuell: **DISCOVER → TRACK → TRADE**
|
|
||||||
|
|
||||||
Neu: **DISCOVER → TRACK → TRADE → YIELD**
|
|
||||||
|
|
||||||
```
|
|
||||||
┌─────────────────────────────────────────────────────────────────┐
|
|
||||||
│ │
|
|
||||||
│ YIELD │
|
|
||||||
│ "Let your domains work for you." │
|
|
||||||
│ │
|
|
||||||
│ ┌─────────────────────────────────────────────────────────┐ │
|
|
||||||
│ │ 🔌 Connect Point DNS to ns.pounce.ch │ │
|
|
||||||
│ │ 🧠 Analyze We detect: "kredit.ch" → Loan Intent │ │
|
|
||||||
│ │ 💰 Earn Affiliate routing → CHF 25/lead │ │
|
|
||||||
│ └─────────────────────────────────────────────────────────┘ │
|
|
||||||
│ │
|
|
||||||
│ "Your domains become autonomous agents." │
|
|
||||||
│ │
|
|
||||||
│ [Activate My Domains →] │
|
|
||||||
│ │
|
|
||||||
└─────────────────────────────────────────────────────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
**Teaser-Statistiken (für Trust):**
|
|
||||||
- "CHF 45'000+ generated this month"
|
|
||||||
- "2'400+ domains earning passively"
|
|
||||||
- "Avg. CHF 18.50/domain/month"
|
|
||||||
|
|
||||||
### 1.2 Neue Public Page: `/yield`
|
|
||||||
|
|
||||||
Eine eigene Landingpage für das Yield-Feature:
|
|
||||||
|
|
||||||
| Section | Inhalt |
|
|
||||||
|---------|--------|
|
|
||||||
| **Hero** | "Dead Domains? Make them work." + Animated revenue counter |
|
|
||||||
| **How it works** | 3-Step Animation: Connect → Analyze → Earn |
|
|
||||||
| **Use Cases** | Branchen-spezifische Beispiele (zahnarzt.ch, kredit.de, hotel-x.ch) |
|
|
||||||
| **Revenue Calculator** | "Gib deine Domain ein → geschätzter monatlicher Ertrag" |
|
|
||||||
| **Trust Signals** | Partner-Logos (Awin, PartnerStack, etc.), Testimonials |
|
|
||||||
| **CTA** | "Start Earning" → Login/Register |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 2. Terminal (eingeloggt)
|
|
||||||
|
|
||||||
### 2.1 Sidebar-Erweiterung
|
|
||||||
|
|
||||||
**Neue Struktur der Sidebar:**
|
|
||||||
|
|
||||||
```
|
|
||||||
DISCOVER
|
|
||||||
├── MARKET (Auktionen)
|
|
||||||
└── INTEL (TLD Pricing)
|
|
||||||
|
|
||||||
MANAGE
|
|
||||||
├── RADAR (Dashboard)
|
|
||||||
├── WATCHLIST (Monitoring)
|
|
||||||
├── SNIPER (Alerts)
|
|
||||||
├── FOR SALE (Listings)
|
|
||||||
└── YIELD ✨ ← NEU
|
|
||||||
|
|
||||||
SETTINGS
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2.2 Neue Seite: `/terminal/yield`
|
|
||||||
|
|
||||||
**Layout:**
|
|
||||||
|
|
||||||
```
|
|
||||||
┌──────────────────────────────────────────────────────────────────────────┐
|
|
||||||
│ YIELD [?] Help │
|
|
||||||
├──────────────────────────────────────────────────────────────────────────┤
|
|
||||||
│ │
|
|
||||||
│ ┌────────────┐ ┌────────────┐ ┌────────────┐ ┌────────────┐ │
|
|
||||||
│ │ Active │ │ Monthly │ │ Pending │ │ Total │ │
|
|
||||||
│ │ Domains │ │ Revenue │ │ Payout │ │ Earned │ │
|
|
||||||
│ │ 12 │ │ CHF 156 │ │ CHF 89 │ │ CHF 1'245 │ │
|
|
||||||
│ └────────────┘ └────────────┘ └────────────┘ └────────────┘ │
|
|
||||||
│ │
|
|
||||||
├──────────────────────────────────────────────────────────────────────────┤
|
|
||||||
│ │
|
|
||||||
│ 🔍 Search domains... [+ Activate Domain] │
|
|
||||||
│ │
|
|
||||||
├──────────────────────────────────────────────────────────────────────────┤
|
|
||||||
│ │
|
|
||||||
│ ┌──────────────────────────────────────────────────────────────────┐ │
|
|
||||||
│ │ Domain │ Status │ Intent │ Route │ Yield │ │
|
|
||||||
│ ├──────────────────────────────────────────────────────────────────┤ │
|
|
||||||
│ │ zahnarzt-zh.ch │ 🟢 Active │ 🏥 Medical │ Comparis │ CHF 45 │ │
|
|
||||||
│ │ crm-tool.io │ 🟢 Active │ 💻 SaaS │ HubSpot │ $ 23 │ │
|
|
||||||
│ │ hotel-davos.ch │ 🟢 Active │ 🏨 Travel │ Booking │ CHF 67 │ │
|
|
||||||
│ │ mein-blog.de │ ⚪ Idle │ ❓ Unknown │ — │ — │ │
|
|
||||||
│ │ kredit-ch.com │ 🟡 Pending│ 💰 Finance │ Analyzing │ — │ │
|
|
||||||
│ └──────────────────────────────────────────────────────────────────┘ │
|
|
||||||
│ │
|
|
||||||
└──────────────────────────────────────────────────────────────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2.3 Domain Aktivieren – Modal/Wizard
|
|
||||||
|
|
||||||
**Schritt 1: Domain eingeben**
|
|
||||||
```
|
|
||||||
┌─────────────────────────────────────────────────┐
|
|
||||||
│ Activate Domain for Yield │
|
|
||||||
├─────────────────────────────────────────────────┤
|
|
||||||
│ │
|
|
||||||
│ Enter your domain: │
|
|
||||||
│ ┌─────────────────────────────────────────┐ │
|
|
||||||
│ │ zahnarzt-zuerich.ch │ │
|
|
||||||
│ └─────────────────────────────────────────┘ │
|
|
||||||
│ │
|
|
||||||
│ [Continue →] │
|
|
||||||
│ │
|
|
||||||
└─────────────────────────────────────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
**Schritt 2: Intent-Erkennung (automatisch)**
|
|
||||||
```
|
|
||||||
┌─────────────────────────────────────────────────┐
|
|
||||||
│ Intent Detected │
|
|
||||||
├─────────────────────────────────────────────────┤
|
|
||||||
│ │
|
|
||||||
│ Domain: zahnarzt-zuerich.ch │
|
|
||||||
│ │
|
|
||||||
│ 🧠 Detected Intent: │
|
|
||||||
│ ┌─────────────────────────────────────────┐ │
|
|
||||||
│ │ 🏥 MEDICAL / DENTAL │ │
|
|
||||||
│ │ │ │
|
|
||||||
│ │ Keywords: zahnarzt, zuerich │ │
|
|
||||||
│ │ Confidence: 94% │ │
|
|
||||||
│ └─────────────────────────────────────────┘ │
|
|
||||||
│ │
|
|
||||||
│ 💰 Estimated Revenue: CHF 15-45/month │
|
|
||||||
│ │
|
|
||||||
│ Recommended Partners: │
|
|
||||||
│ • Comparis (Dental Comparison) │
|
|
||||||
│ • Doctolib (Appointment Booking) │
|
|
||||||
│ │
|
|
||||||
│ [Continue →] │
|
|
||||||
│ │
|
|
||||||
└─────────────────────────────────────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
**Schritt 3: DNS Setup**
|
|
||||||
```
|
|
||||||
┌─────────────────────────────────────────────────┐
|
|
||||||
│ Connect Your Domain │
|
|
||||||
├─────────────────────────────────────────────────┤
|
|
||||||
│ │
|
|
||||||
│ Change your nameservers to: │
|
|
||||||
│ │
|
|
||||||
│ ┌─────────────────────────────────────────┐ │
|
|
||||||
│ │ ns1.pounce.ch [📋] │ │
|
|
||||||
│ │ ns2.pounce.ch [📋] │ │
|
|
||||||
│ └─────────────────────────────────────────┘ │
|
|
||||||
│ │
|
|
||||||
│ ⏳ We're checking your DNS... │
|
|
||||||
│ │
|
|
||||||
│ Status: Waiting for propagation (~10 min) │
|
|
||||||
│ │
|
|
||||||
│ [I've updated my nameservers] │
|
|
||||||
│ │
|
|
||||||
└─────────────────────────────────────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
**Schritt 4: Aktiviert**
|
|
||||||
```
|
|
||||||
┌─────────────────────────────────────────────────┐
|
|
||||||
│ ✅ Domain Activated! │
|
|
||||||
├─────────────────────────────────────────────────┤
|
|
||||||
│ │
|
|
||||||
│ zahnarzt-zuerich.ch is now earning. │
|
|
||||||
│ │
|
|
||||||
│ 🏥 Intent: Medical/Dental │
|
|
||||||
│ ➔ Route: Comparis Dental │
|
|
||||||
│ 💰 Est. Yield: CHF 15-45/month │
|
|
||||||
│ │
|
|
||||||
│ What happens now: │
|
|
||||||
│ • We host a minimal landing page │
|
|
||||||
│ • Visitors are routed to partners │
|
|
||||||
│ • You earn affiliate commissions │
|
|
||||||
│ • Payouts monthly (min. CHF 50) │
|
|
||||||
│ │
|
|
||||||
│ [View My Yield Dashboard] │
|
|
||||||
│ │
|
|
||||||
└─────────────────────────────────────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2.4 Portfolio-Tab Integration (Alternative)
|
|
||||||
|
|
||||||
Statt einer separaten Seite kann "Yield" auch als **Tab in der Watchlist** integriert werden:
|
|
||||||
|
|
||||||
```
|
|
||||||
┌────────────────────────────────────────────────────────────────┐
|
|
||||||
│ [Watching] [My Portfolio] [Yield] ✨ │
|
|
||||||
└────────────────────────────────────────────────────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
**Vorteil:** Weniger Navigation, alles an einem Ort.
|
|
||||||
**Nachteil:** Watchlist wird komplexer.
|
|
||||||
|
|
||||||
**Empfehlung:** Starte mit separater `/terminal/yield` Seite, kann später in Portfolio integriert werden.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 3. Backend-Architektur (High-Level)
|
|
||||||
|
|
||||||
### 3.1 Neue Models
|
|
||||||
|
|
||||||
```python
|
|
||||||
# backend/app/models/yield_domain.py
|
|
||||||
|
|
||||||
class YieldDomain(Base):
|
|
||||||
"""Domain activated for yield/intent routing."""
|
|
||||||
__tablename__ = "yield_domains"
|
|
||||||
|
|
||||||
id: int
|
|
||||||
user_id: int # FK → users
|
|
||||||
domain: str # "zahnarzt-zuerich.ch"
|
|
||||||
|
|
||||||
# Intent
|
|
||||||
detected_intent: str # "medical_dental"
|
|
||||||
intent_confidence: float # 0.94
|
|
||||||
intent_keywords: str # JSON: ["zahnarzt", "zuerich"]
|
|
||||||
|
|
||||||
# Routing
|
|
||||||
active_route: str # "comparis_dental"
|
|
||||||
partner_id: int # FK → affiliate_partners
|
|
||||||
|
|
||||||
# Status
|
|
||||||
status: str # "pending", "active", "paused", "inactive"
|
|
||||||
dns_verified: bool
|
|
||||||
activated_at: datetime
|
|
||||||
|
|
||||||
# Revenue
|
|
||||||
total_clicks: int
|
|
||||||
total_conversions: int
|
|
||||||
total_revenue: Decimal
|
|
||||||
|
|
||||||
created_at: datetime
|
|
||||||
updated_at: datetime
|
|
||||||
|
|
||||||
|
|
||||||
class YieldTransaction(Base):
|
|
||||||
"""Revenue events from affiliate partners."""
|
|
||||||
__tablename__ = "yield_transactions"
|
|
||||||
|
|
||||||
id: int
|
|
||||||
yield_domain_id: int # FK
|
|
||||||
|
|
||||||
event_type: str # "click", "lead", "sale"
|
|
||||||
partner_id: int
|
|
||||||
amount: Decimal
|
|
||||||
currency: str
|
|
||||||
|
|
||||||
# Attribution
|
|
||||||
referrer: str
|
|
||||||
user_agent: str
|
|
||||||
geo_country: str
|
|
||||||
|
|
||||||
# Status
|
|
||||||
status: str # "pending", "confirmed", "paid", "rejected"
|
|
||||||
confirmed_at: datetime
|
|
||||||
paid_at: datetime
|
|
||||||
|
|
||||||
created_at: datetime
|
|
||||||
|
|
||||||
|
|
||||||
class AffiliatePartner(Base):
|
|
||||||
"""Affiliate network/partner configuration."""
|
|
||||||
__tablename__ = "affiliate_partners"
|
|
||||||
|
|
||||||
id: int
|
|
||||||
name: str # "Comparis Dental"
|
|
||||||
network: str # "awin", "partnerstack", "direct"
|
|
||||||
|
|
||||||
# Matching
|
|
||||||
intent_categories: str # JSON: ["medical_dental", "medical_general"]
|
|
||||||
geo_countries: str # JSON: ["CH", "DE", "AT"]
|
|
||||||
|
|
||||||
# Payout
|
|
||||||
payout_type: str # "cpc", "cpl", "cps"
|
|
||||||
payout_amount: Decimal
|
|
||||||
payout_currency: str
|
|
||||||
|
|
||||||
# Integration
|
|
||||||
tracking_url_template: str
|
|
||||||
api_endpoint: str
|
|
||||||
api_key_encrypted: str
|
|
||||||
|
|
||||||
is_active: bool
|
|
||||||
created_at: datetime
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3.2 Neue API Endpoints
|
|
||||||
|
|
||||||
```python
|
|
||||||
# backend/app/api/yield.py
|
|
||||||
|
|
||||||
@router.get("/domains")
|
|
||||||
# Liste alle Yield-Domains des Users
|
|
||||||
|
|
||||||
@router.post("/domains/activate")
|
|
||||||
# Neue Domain aktivieren (Step 1-4 Wizard)
|
|
||||||
|
|
||||||
@router.get("/domains/{domain}/intent")
|
|
||||||
# Intent-Detection für eine Domain
|
|
||||||
|
|
||||||
@router.get("/domains/{domain}/verify-dns")
|
|
||||||
# DNS-Verifizierung prüfen
|
|
||||||
|
|
||||||
@router.put("/domains/{domain}/pause")
|
|
||||||
# Routing pausieren
|
|
||||||
|
|
||||||
@router.get("/stats")
|
|
||||||
# Gesamtstatistiken (Revenue, Clicks, etc.)
|
|
||||||
|
|
||||||
@router.get("/transactions")
|
|
||||||
# Transaktions-Historie
|
|
||||||
|
|
||||||
@router.get("/payouts")
|
|
||||||
# Payout-Historie
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3.3 Intent-Detection Service
|
|
||||||
|
|
||||||
```python
|
|
||||||
# backend/app/services/intent_detector.py
|
|
||||||
|
|
||||||
class IntentDetector:
|
|
||||||
"""Erkennt den Intent einer Domain basierend auf Name und TLD."""
|
|
||||||
|
|
||||||
INTENT_CATEGORIES = {
|
|
||||||
"medical_dental": {
|
|
||||||
"keywords": ["zahnarzt", "dentist", "dental", "zahn"],
|
|
||||||
"partners": ["comparis_dental", "doctolib"],
|
|
||||||
"avg_cpl": 25.00
|
|
||||||
},
|
|
||||||
"travel_hotel": {
|
|
||||||
"keywords": ["hotel", "ferien", "vacation", "resort"],
|
|
||||||
"partners": ["booking", "hotels_com"],
|
|
||||||
"avg_cpl": 15.00
|
|
||||||
},
|
|
||||||
"finance_loan": {
|
|
||||||
"keywords": ["kredit", "loan", "finanz", "hypothek"],
|
|
||||||
"partners": ["comparis_finance", "lendico"],
|
|
||||||
"avg_cpl": 50.00
|
|
||||||
},
|
|
||||||
"saas_software": {
|
|
||||||
"keywords": ["crm", "erp", "software", "tool", "app"],
|
|
||||||
"partners": ["hubspot", "partnerstack"],
|
|
||||||
"avg_cpl": 30.00
|
|
||||||
},
|
|
||||||
# ... weitere Kategorien
|
|
||||||
}
|
|
||||||
|
|
||||||
def detect(self, domain: str) -> IntentResult:
|
|
||||||
"""Analysiert Domain und gibt Intent zurück."""
|
|
||||||
name = domain.rsplit('.', 1)[0].lower()
|
|
||||||
# ... Matching-Logik
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3.4 DNS/Hosting Service
|
|
||||||
|
|
||||||
```python
|
|
||||||
# backend/app/services/yield_dns.py
|
|
||||||
|
|
||||||
class YieldDNSService:
|
|
||||||
"""Verwaltet DNS und Hosting für Yield-Domains."""
|
|
||||||
|
|
||||||
async def verify_nameservers(self, domain: str) -> bool:
|
|
||||||
"""Prüft ob Domain auf ns1/ns2.pounce.ch zeigt."""
|
|
||||||
|
|
||||||
async def provision_landing_page(self, domain: str, intent: str) -> str:
|
|
||||||
"""Erstellt minimale Landing Page für Routing."""
|
|
||||||
|
|
||||||
async def get_tracking_url(self, domain: str, partner_id: int) -> str:
|
|
||||||
"""Generiert Affiliate-Tracking-URL."""
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 4. Phasen-Plan
|
|
||||||
|
|
||||||
### Phase 2.1: MVP (4-6 Wochen)
|
|
||||||
|
|
||||||
| Task | Prio | Aufwand |
|
|
||||||
|------|------|---------|
|
|
||||||
| Intent-Detection Engine (Keyword-basiert) | 🔴 | 1 Woche |
|
|
||||||
| Yield-Domain Model + API | 🔴 | 1 Woche |
|
|
||||||
| `/terminal/yield` UI (Basic) | 🔴 | 1 Woche |
|
|
||||||
| DNS-Verifizierung | 🔴 | 3 Tage |
|
|
||||||
| 1 Partner-Integration (z.B. Awin) | 🔴 | 1 Woche |
|
|
||||||
| Landing Page Generator (Minimal) | 🟡 | 3 Tage |
|
|
||||||
| Transaction Tracking | 🟡 | 3 Tage |
|
|
||||||
|
|
||||||
**Ergebnis:** User können Domains aktivieren, wir routen zu 1 Partner-Netzwerk.
|
|
||||||
|
|
||||||
### Phase 2.2: Erweiterung (4 Wochen)
|
|
||||||
|
|
||||||
| Task | Prio | Aufwand |
|
|
||||||
|------|------|---------|
|
|
||||||
| Weitere Partner (5-10) | 🔴 | 2 Wochen |
|
|
||||||
| Payout-System | 🔴 | 1 Woche |
|
|
||||||
| Public Landing `/yield` | 🟡 | 3 Tage |
|
|
||||||
| Landing Page Customization | 🟡 | 3 Tage |
|
|
||||||
| Revenue Analytics Dashboard | 🟡 | 3 Tage |
|
|
||||||
|
|
||||||
### Phase 2.3: Marktplatz-Integration
|
|
||||||
|
|
||||||
| Task | Prio | Aufwand |
|
|
||||||
|------|------|---------|
|
|
||||||
| "Yield-Generating Domains" Kategorie | 🟡 | 1 Woche |
|
|
||||||
| Valuation basierend auf Yield (30x MRR) | 🟡 | 3 Tage |
|
|
||||||
| Yield-History für Käufer sichtbar | 🟡 | 3 Tage |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 5. Monetarisierung
|
|
||||||
|
|
||||||
### Revenue Split
|
|
||||||
|
|
||||||
| Party | Anteil |
|
|
||||||
|-------|--------|
|
|
||||||
| **Domain Owner** | 70% |
|
|
||||||
| **Pounce** | 30% |
|
|
||||||
|
|
||||||
### Tier-Gating
|
|
||||||
|
|
||||||
| Tier | Yield-Domains | Payout Threshold |
|
|
||||||
|------|---------------|------------------|
|
|
||||||
| **Scout** | 0 (Feature locked) | — |
|
|
||||||
| **Trader** | 5 | CHF 100 |
|
|
||||||
| **Tycoon** | Unlimited | CHF 50 |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 6. UX-Philosophie
|
|
||||||
|
|
||||||
### Prinzipien
|
|
||||||
|
|
||||||
1. **Zero Config:** User ändert nur Nameserver. Alles andere ist automatisch.
|
|
||||||
2. **Transparent:** Klare Anzeige was passiert, welcher Partner, welche Einnahmen.
|
|
||||||
3. **Instant Value:** Zeige geschätzten Revenue VOR Aktivierung.
|
|
||||||
4. **Trust:** Partner-Logos, echte Zahlen, keine Versprechen.
|
|
||||||
|
|
||||||
### Sprache
|
|
||||||
|
|
||||||
- ❌ "Domain Parking" (klingt nach 2005)
|
|
||||||
- ✅ "Domain Yield" / "Intent Routing"
|
|
||||||
- ❌ "Passive Income" (scammy)
|
|
||||||
- ✅ "Your domain works for you"
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 7. Technische Voraussetzungen
|
|
||||||
|
|
||||||
| Komponente | Benötigt | Status |
|
|
||||||
|------------|----------|--------|
|
|
||||||
| Eigene Nameserver (ns1/ns2.pounce.ch) | ✅ | Neu |
|
|
||||||
| DNS-Hosting (Cloudflare API oder ähnlich) | ✅ | Neu |
|
|
||||||
| Landing Page CDN | ✅ | Neu |
|
|
||||||
| Affiliate-Netzwerk Accounts | ✅ | Neu |
|
|
||||||
| Payout-System (Stripe Connect?) | ✅ | Teilweise (Stripe existiert) |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 8. Zusammenfassung
|
|
||||||
|
|
||||||
### Was ändert sich im UI?
|
|
||||||
|
|
||||||
| Bereich | Änderung |
|
|
||||||
|---------|----------|
|
|
||||||
| **Landing Page** | Neuer 4. Pillar "YIELD" + Link zu `/yield` |
|
|
||||||
| **Public `/yield`** | Neue Landingpage mit Calculator |
|
|
||||||
| **Terminal Sidebar** | Neuer Menüpunkt "YIELD" unter MANAGE |
|
|
||||||
| **`/terminal/yield`** | Neue Seite: Domain-Liste, Stats, Activate-Wizard |
|
|
||||||
| **Watchlist** | Optional: "Activate for Yield" Button bei eigenen Domains |
|
|
||||||
|
|
||||||
### Backend-Aufwand
|
|
||||||
|
|
||||||
- 3 neue Models
|
|
||||||
- 1 neuer API Router
|
|
||||||
- 2 neue Services (Intent, DNS)
|
|
||||||
- Partner-Integrationen (Awin, PartnerStack, etc.)
|
|
||||||
|
|
||||||
### Priorität
|
|
||||||
|
|
||||||
**Starte mit `/terminal/yield` + Intent-Detection + 1 Partner.**
|
|
||||||
Public Page und Marktplatz-Integration kommen später.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
*"Domains werden keine toten Assets mehr. Sie werden autonome Agenten."*
|
|
||||||
|
|
||||||
256
YIELD_SETUP.md
256
YIELD_SETUP.md
@ -1,256 +0,0 @@
|
|||||||
# Pounce Yield - Complete Setup Guide
|
|
||||||
|
|
||||||
This guide covers the complete setup of the Yield/Intent Routing feature.
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
Pounce Yield allows users to monetize their parked domains by:
|
|
||||||
1. Detecting user intent from domain names (e.g., "zahnarzt-zuerich.ch" → Medical/Dental)
|
|
||||||
2. Routing visitors to relevant affiliate partners
|
|
||||||
3. Tracking clicks, leads, and sales
|
|
||||||
4. Splitting revenue 70/30 (user/Pounce)
|
|
||||||
|
|
||||||
## Architecture
|
|
||||||
|
|
||||||
```
|
|
||||||
┌─────────────────┐ ┌──────────────────┐ ┌─────────────────┐
|
|
||||||
│ User Domain │────▶│ Pounce Yield │────▶│ Affiliate │
|
|
||||||
│ (DNS → Pounce) │ │ Routing Engine │ │ Partner │
|
|
||||||
└─────────────────┘ └──────────────────┘ └─────────────────┘
|
|
||||||
│
|
|
||||||
▼
|
|
||||||
┌──────────────────┐
|
|
||||||
│ Transaction │
|
|
||||||
│ Tracking │
|
|
||||||
└──────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
## Setup Steps
|
|
||||||
|
|
||||||
### 1. Database Setup
|
|
||||||
|
|
||||||
The yield tables are created automatically on startup. To apply migrations to an existing database:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd backend
|
|
||||||
python -c "from app.database import init_db; import asyncio; asyncio.run(init_db())"
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Seed Affiliate Partners
|
|
||||||
|
|
||||||
Populate the affiliate partners with default Swiss/German partners:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd backend
|
|
||||||
python scripts/seed_yield_partners.py
|
|
||||||
```
|
|
||||||
|
|
||||||
This seeds ~30 partners across categories:
|
|
||||||
- Medical (Dental, General, Beauty)
|
|
||||||
- Finance (Insurance, Mortgage, Banking)
|
|
||||||
- Legal
|
|
||||||
- Real Estate
|
|
||||||
- Travel
|
|
||||||
- Automotive
|
|
||||||
- Jobs
|
|
||||||
- Education
|
|
||||||
- Technology/Hosting
|
|
||||||
- Shopping
|
|
||||||
- Food/Delivery
|
|
||||||
|
|
||||||
### 3. Configure DNS
|
|
||||||
|
|
||||||
For yield domains to work, you need to set up DNS infrastructure:
|
|
||||||
|
|
||||||
#### Option A: Dedicated Nameservers (Recommended for Scale)
|
|
||||||
|
|
||||||
1. Set up two nameserver instances (e.g., `ns1.pounce.ch`, `ns2.pounce.ch`)
|
|
||||||
2. Run PowerDNS or similar with a backend that queries your yield_domains table
|
|
||||||
3. Return A records pointing to your yield routing service
|
|
||||||
|
|
||||||
#### Option B: CNAME Approach (Simpler)
|
|
||||||
|
|
||||||
1. Set up a wildcard SSL certificate for `*.yield.pounce.ch`
|
|
||||||
2. Configure Nginx/Caddy to handle all incoming hosts
|
|
||||||
3. Users add CNAME: `@ → yield.pounce.ch`
|
|
||||||
|
|
||||||
### 4. Nginx Configuration
|
|
||||||
|
|
||||||
For host-based routing, add this to your nginx config:
|
|
||||||
|
|
||||||
```nginx
|
|
||||||
# Yield domain catch-all
|
|
||||||
server {
|
|
||||||
listen 443 ssl http2;
|
|
||||||
server_name ~^(?<domain>.+)$;
|
|
||||||
|
|
||||||
# Wildcard cert
|
|
||||||
ssl_certificate /etc/ssl/yield.pounce.ch.crt;
|
|
||||||
ssl_certificate_key /etc/ssl/yield.pounce.ch.key;
|
|
||||||
|
|
||||||
location / {
|
|
||||||
proxy_pass http://backend:8000/api/v1/r/$domain;
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 5. Partner Integration
|
|
||||||
|
|
||||||
Each affiliate partner requires:
|
|
||||||
|
|
||||||
1. **Tracking URL Template**: How to pass click IDs to the partner
|
|
||||||
2. **Webhook URL**: Where the partner sends conversion data back
|
|
||||||
|
|
||||||
Update partners in the database or via admin panel:
|
|
||||||
|
|
||||||
```sql
|
|
||||||
UPDATE affiliate_partners
|
|
||||||
SET tracking_url_template = 'https://partner.com/?clickid={click_id}&ref={domain}'
|
|
||||||
WHERE slug = 'partner_slug';
|
|
||||||
```
|
|
||||||
|
|
||||||
### 6. Webhook Configuration
|
|
||||||
|
|
||||||
Partners send conversion data to:
|
|
||||||
|
|
||||||
```
|
|
||||||
POST https://api.pounce.ch/api/v1/yield-webhooks/{partner_slug}
|
|
||||||
|
|
||||||
{
|
|
||||||
"event_type": "lead",
|
|
||||||
"domain": "zahnarzt-zuerich.ch",
|
|
||||||
"transaction_id": "abc123",
|
|
||||||
"amount": 25.00,
|
|
||||||
"currency": "CHF"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
For Awin network, use the dedicated endpoint:
|
|
||||||
```
|
|
||||||
POST https://api.pounce.ch/api/v1/yield-webhooks/awin/postback
|
|
||||||
```
|
|
||||||
|
|
||||||
## API Endpoints
|
|
||||||
|
|
||||||
### Public
|
|
||||||
|
|
||||||
| Method | Endpoint | Description |
|
|
||||||
|--------|----------|-------------|
|
|
||||||
| POST | `/api/v1/yield/analyze?domain=X` | Analyze domain intent (no auth) |
|
|
||||||
| GET | `/api/v1/yield/partners` | List available partners |
|
|
||||||
|
|
||||||
### Authenticated (User)
|
|
||||||
|
|
||||||
| Method | Endpoint | Description |
|
|
||||||
|--------|----------|-------------|
|
|
||||||
| GET | `/api/v1/yield/dashboard` | User yield dashboard |
|
|
||||||
| GET | `/api/v1/yield/domains` | List user's yield domains |
|
|
||||||
| POST | `/api/v1/yield/activate` | Activate a domain |
|
|
||||||
| POST | `/api/v1/yield/domains/{id}/verify` | Verify DNS setup |
|
|
||||||
| GET | `/api/v1/yield/transactions` | Transaction history |
|
|
||||||
| GET | `/api/v1/yield/payouts` | Payout history |
|
|
||||||
|
|
||||||
### Routing
|
|
||||||
|
|
||||||
| Method | Endpoint | Description |
|
|
||||||
|--------|----------|-------------|
|
|
||||||
| GET | `/api/v1/r/{domain}` | Route traffic & track click |
|
|
||||||
| GET | `/api/v1/r/{domain}?direct=true` | Direct redirect (no landing) |
|
|
||||||
|
|
||||||
### Webhooks (Partner → Pounce)
|
|
||||||
|
|
||||||
| Method | Endpoint | Description |
|
|
||||||
|--------|----------|-------------|
|
|
||||||
| POST | `/api/v1/yield-webhooks/{partner}` | Generic partner webhook |
|
|
||||||
| POST | `/api/v1/yield-webhooks/awin/postback` | Awin network postback |
|
|
||||||
| POST | `/api/v1/yield-webhooks/confirm/{tx_id}` | Manual confirmation (internal) |
|
|
||||||
| POST | `/api/v1/yield-webhooks/batch-import` | Bulk import (internal) |
|
|
||||||
|
|
||||||
## Revenue Model
|
|
||||||
|
|
||||||
- **Clicks**: Usually CPC (cost per click), CHF 0.10-0.60
|
|
||||||
- **Leads**: CPL (cost per lead), CHF 15-120
|
|
||||||
- **Sales**: CPS (cost per sale), 2-10% of sale value
|
|
||||||
|
|
||||||
Revenue split:
|
|
||||||
- **User**: 70%
|
|
||||||
- **Pounce**: 30%
|
|
||||||
|
|
||||||
## Intent Categories
|
|
||||||
|
|
||||||
The IntentDetector recognizes these categories:
|
|
||||||
|
|
||||||
| Category | Subcategories | Example Domains |
|
|
||||||
|----------|---------------|-----------------|
|
|
||||||
| medical | dental, general, beauty | zahnarzt.ch, arzt-bern.ch |
|
|
||||||
| finance | insurance, mortgage, banking | versicherung.ch, hypothek.ch |
|
|
||||||
| legal | general | anwalt-zuerich.ch |
|
|
||||||
| realestate | buy, rent | wohnung-mieten.ch |
|
|
||||||
| travel | flights, hotels | flug-buchen.ch |
|
|
||||||
| auto | buy, service | autokauf.ch |
|
|
||||||
| jobs | - | stellenmarkt.ch |
|
|
||||||
| education | - | kurse-online.ch |
|
|
||||||
| tech | hosting, software | webhosting.ch |
|
|
||||||
| shopping | general, fashion | mode-shop.ch |
|
|
||||||
| food | restaurant, delivery | pizza-lieferung.ch |
|
|
||||||
|
|
||||||
## Monitoring
|
|
||||||
|
|
||||||
### Metrics
|
|
||||||
|
|
||||||
Enable Prometheus metrics:
|
|
||||||
|
|
||||||
```env
|
|
||||||
ENABLE_METRICS=true
|
|
||||||
```
|
|
||||||
|
|
||||||
Key yield metrics:
|
|
||||||
- `yield_clicks_total{domain, partner}`
|
|
||||||
- `yield_conversions_total{domain, partner, type}`
|
|
||||||
- `yield_revenue_total{currency}`
|
|
||||||
|
|
||||||
### Alerts
|
|
||||||
|
|
||||||
Set up alerts for:
|
|
||||||
- Webhook failures
|
|
||||||
- Low conversion rates
|
|
||||||
- DNS verification failures
|
|
||||||
- Partner API errors
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### Domain not routing
|
|
||||||
|
|
||||||
1. Check DNS: `dig +short {domain}`
|
|
||||||
2. Verify domain status: `SELECT status FROM yield_domains WHERE domain = '{domain}'`
|
|
||||||
3. Check nginx logs for routing errors
|
|
||||||
|
|
||||||
### No conversions
|
|
||||||
|
|
||||||
1. Verify partner webhook URL is correct
|
|
||||||
2. Check webhook logs for incoming calls
|
|
||||||
3. Validate transaction ID format
|
|
||||||
|
|
||||||
### Low revenue
|
|
||||||
|
|
||||||
1. Check intent detection: Some domains may be classified as "generic"
|
|
||||||
2. Review partner matching: Higher-priority partners should be assigned
|
|
||||||
3. Analyze geo distribution: Swiss visitors convert better
|
|
||||||
|
|
||||||
## Security Considerations
|
|
||||||
|
|
||||||
- All partner webhooks should use HMAC signature verification
|
|
||||||
- IP addresses are hashed before storage (privacy)
|
|
||||||
- User revenue data is isolated by user_id
|
|
||||||
- Rate limiting on routing endpoint
|
|
||||||
|
|
||||||
## Support
|
|
||||||
|
|
||||||
For issues with:
|
|
||||||
- Partner integrations: partners@pounce.ch
|
|
||||||
- Technical issues: dev@pounce.ch
|
|
||||||
- Payout questions: finance@pounce.ch
|
|
||||||
|
|
||||||
@ -1,307 +0,0 @@
|
|||||||
# 🌐 Zone File Access — Anleitung zur Datenhoheit
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Was sind Zone Files?
|
|
||||||
|
|
||||||
Zone Files sind die **Master-Listen** aller registrierten Domains pro TLD (Top-Level-Domain). Sie werden täglich von den Registries aktualisiert und enthalten:
|
|
||||||
|
|
||||||
- **Alle aktiven Domains** einer TLD
|
|
||||||
- **Nameserver-Informationen**
|
|
||||||
- **Keine WHOIS-Daten** (nur Domain + NS)
|
|
||||||
|
|
||||||
**Beispiel `.com` Zone File (vereinfacht):**
|
|
||||||
```
|
|
||||||
example.com. 86400 IN NS ns1.example.com.
|
|
||||||
example.com. 86400 IN NS ns2.example.com.
|
|
||||||
google.com. 86400 IN NS ns1.google.com.
|
|
||||||
...
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Warum Zone Files = Unicorn?
|
|
||||||
|
|
||||||
| Vorteil | Beschreibung |
|
|
||||||
|---------|--------------|
|
|
||||||
| **Drop Prediction** | Domains die aus der Zone verschwinden = droppen in 1-5 Tagen |
|
|
||||||
| **Exklusive Intel** | Diese Domains sind NOCH NICHT in Auktionen |
|
|
||||||
| **Früher als Konkurrenz** | Backorder setzen bevor andere es wissen |
|
|
||||||
| **Trend-Analyse** | Welche Keywords werden gerade registriert? |
|
|
||||||
| **Daten-Monopol** | Gefilterte, cleane Daten vs. Spam-Flut von ExpiredDomains |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Registries und Zugang
|
|
||||||
|
|
||||||
### Tier 1: Critical TLDs (Sofort beantragen)
|
|
||||||
|
|
||||||
| Registry | TLDs | Domains | Link |
|
|
||||||
|----------|------|---------|------|
|
|
||||||
| **Verisign** | `.com`, `.net` | ~160M + 13M | [Zone File Access](https://www.verisign.com/en_US/channel-resources/domain-registry-products/zone-file/index.xhtml) |
|
|
||||||
| **PIR** | `.org` | ~10M | [Zone File Access Program](https://tld.org/zone-file-access/) |
|
|
||||||
| **Afilias** | `.info` | ~4M | Contact: registry@afilias.info |
|
|
||||||
|
|
||||||
### Tier 2: Premium TLDs (Phase 2)
|
|
||||||
|
|
||||||
| Registry | TLDs | Fokus |
|
|
||||||
|----------|------|-------|
|
|
||||||
| **CentralNIC** | `.io`, `.co` | Startups |
|
|
||||||
| **Google** | `.app`, `.dev` | Tech |
|
|
||||||
| **Donuts** | `.xyz`, `.online`, etc. | Volumen |
|
|
||||||
| **SWITCH** | `.ch` | Schweizer Markt |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Bewerbungsprozess: Verisign (.com/.net)
|
|
||||||
|
|
||||||
### 1. Voraussetzungen
|
|
||||||
|
|
||||||
- Gültige Firma/Organisation
|
|
||||||
- Technische Infrastruktur für große Datenmengen (~500GB/Tag)
|
|
||||||
- Akzeptanz der Nutzungsbedingungen (keine Resale der Rohdaten)
|
|
||||||
|
|
||||||
### 2. Online-Bewerbung
|
|
||||||
|
|
||||||
1. Gehe zu: https://www.verisign.com/en_US/channel-resources/domain-registry-products/zone-file/index.xhtml
|
|
||||||
2. Klicke auf "Request Zone File Access"
|
|
||||||
3. Fülle das Formular aus:
|
|
||||||
- **Organization Name:** GenTwo AG
|
|
||||||
- **Purpose:** Domain research and analytics platform
|
|
||||||
- **Contact:** (technischer Ansprechpartner)
|
|
||||||
|
|
||||||
### 3. Wartezeit
|
|
||||||
|
|
||||||
- **Review:** 1-4 Wochen
|
|
||||||
- **Genehmigung:** Per E-Mail mit FTP/HTTPS Zugangsdaten
|
|
||||||
|
|
||||||
### 4. Kosten
|
|
||||||
|
|
||||||
- **Verisign:** Kostenlos für nicht-kommerzielle/Forschungszwecke
|
|
||||||
- **Kommerzielle Nutzung:** $10,000/Jahr (verhandelbar)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Technische Integration
|
|
||||||
|
|
||||||
### Server-Anforderungen
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
# Minimale Infrastruktur
|
|
||||||
CPU: 16+ Cores (parallele Verarbeitung)
|
|
||||||
RAM: 64GB+ (effizientes Set-Diffing)
|
|
||||||
Storage: 2TB SSD (Zone Files + History)
|
|
||||||
Network: 1Gbps (schneller Download)
|
|
||||||
|
|
||||||
# Geschätzte Kosten
|
|
||||||
Provider: Hetzner/OVH Dedicated
|
|
||||||
Preis: ~$300-500/Monat
|
|
||||||
```
|
|
||||||
|
|
||||||
### Processing Pipeline
|
|
||||||
|
|
||||||
```
|
|
||||||
04:00 UTC │ Zone File Download (FTP/HTTPS)
|
|
||||||
│ └─→ ~500GB komprimiert für .com/.net
|
|
||||||
│
|
|
||||||
04:30 UTC │ Decompression & Parsing
|
|
||||||
│ └─→ Extrahiere Domain-Namen
|
|
||||||
│
|
|
||||||
05:00 UTC │ Diff Analysis
|
|
||||||
│ └─→ Vergleiche mit gestern
|
|
||||||
│ └─→ NEU: Neue Registrierungen
|
|
||||||
│ └─→ WEG: Potentielle Drops
|
|
||||||
│
|
|
||||||
05:30 UTC │ Quality Scoring (Pounce Algorithm)
|
|
||||||
│ └─→ Filtere Spam raus (99%+)
|
|
||||||
│ └─→ Nur Premium-Domains durchlassen
|
|
||||||
│
|
|
||||||
06:00 UTC │ Database Update
|
|
||||||
│ └─→ PostgreSQL: pounce_zone_drops
|
|
||||||
│
|
|
||||||
06:15 UTC │ Alert Matching
|
|
||||||
│ └─→ Sniper Alerts triggern
|
|
||||||
│
|
|
||||||
06:30 UTC │ User Notifications
|
|
||||||
│ └─→ E-Mail/SMS für Tycoon-User
|
|
||||||
```
|
|
||||||
|
|
||||||
### Datenbank-Schema (geplant)
|
|
||||||
|
|
||||||
```sql
|
|
||||||
-- Zone File Drops
|
|
||||||
CREATE TABLE pounce_zone_drops (
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
domain VARCHAR(255) NOT NULL,
|
|
||||||
tld VARCHAR(20) NOT NULL,
|
|
||||||
|
|
||||||
-- Analyse
|
|
||||||
pounce_score INT NOT NULL,
|
|
||||||
estimated_value DECIMAL(10,2),
|
|
||||||
|
|
||||||
-- Status
|
|
||||||
detected_at TIMESTAMP DEFAULT NOW(),
|
|
||||||
estimated_drop_date TIMESTAMP,
|
|
||||||
status VARCHAR(20) DEFAULT 'pending', -- pending, dropped, backordered, registered
|
|
||||||
|
|
||||||
-- Tracking
|
|
||||||
notified_users INT DEFAULT 0,
|
|
||||||
backorder_count INT DEFAULT 0,
|
|
||||||
|
|
||||||
UNIQUE(domain)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Index für schnelle Suche
|
|
||||||
CREATE INDEX idx_zone_drops_score ON pounce_zone_drops(pounce_score DESC);
|
|
||||||
CREATE INDEX idx_zone_drops_date ON pounce_zone_drops(estimated_drop_date);
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Der Pounce Algorithm — Zone File Edition
|
|
||||||
|
|
||||||
```python
|
|
||||||
# backend/app/services/zone_analyzer.py (ZU BAUEN)
|
|
||||||
|
|
||||||
class ZoneFileAnalyzer:
|
|
||||||
"""
|
|
||||||
Analysiert Zone Files und findet Premium-Opportunities.
|
|
||||||
|
|
||||||
Input: Raw Zone File (Millionen von Domains)
|
|
||||||
Output: Gefilterte Premium-Liste (Hunderte)
|
|
||||||
"""
|
|
||||||
|
|
||||||
async def analyze_drops(self, yesterday: set, today: set) -> list:
|
|
||||||
"""
|
|
||||||
Findet Domains die aus der Zone verschwunden sind.
|
|
||||||
Diese Domains droppen in 1-5 Tagen (Redemption Period).
|
|
||||||
"""
|
|
||||||
dropped = yesterday - today # Set-Differenz
|
|
||||||
|
|
||||||
premium_drops = []
|
|
||||||
for domain in dropped:
|
|
||||||
score = self.calculate_pounce_score(domain)
|
|
||||||
|
|
||||||
# Nur Premium durchlassen (>70 Score)
|
|
||||||
if score >= 70:
|
|
||||||
premium_drops.append({
|
|
||||||
"domain": domain,
|
|
||||||
"score": score,
|
|
||||||
"drop_date": self.estimate_drop_date(domain),
|
|
||||||
"estimated_value": self.estimate_value(domain),
|
|
||||||
})
|
|
||||||
|
|
||||||
return sorted(premium_drops, key=lambda x: x['score'], reverse=True)
|
|
||||||
|
|
||||||
def calculate_pounce_score(self, domain: str) -> int:
|
|
||||||
"""
|
|
||||||
Der Pounce Algorithm — Qualitätsfilter für Domains.
|
|
||||||
|
|
||||||
Faktoren:
|
|
||||||
- Länge (kurz = wertvoll)
|
|
||||||
- TLD (com > io > xyz)
|
|
||||||
- Keine Zahlen/Bindestriche
|
|
||||||
- Dictionary Word Bonus
|
|
||||||
"""
|
|
||||||
name = domain.rsplit('.', 1)[0]
|
|
||||||
tld = domain.rsplit('.', 1)[1]
|
|
||||||
score = 50 # Baseline
|
|
||||||
|
|
||||||
# Längen-Score (exponentiell für kurze Domains)
|
|
||||||
length_scores = {1: 50, 2: 45, 3: 40, 4: 30, 5: 20, 6: 15, 7: 10}
|
|
||||||
score += length_scores.get(len(name), max(0, 15 - len(name)))
|
|
||||||
|
|
||||||
# TLD Premium
|
|
||||||
tld_scores = {'com': 20, 'ai': 25, 'io': 18, 'co': 12, 'ch': 15, 'de': 10}
|
|
||||||
score += tld_scores.get(tld, 0)
|
|
||||||
|
|
||||||
# Penalties
|
|
||||||
if '-' in name: score -= 30
|
|
||||||
if any(c.isdigit() for c in name): score -= 20
|
|
||||||
if len(name) > 12: score -= 15
|
|
||||||
|
|
||||||
return max(0, min(100, score))
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Feature: "Drops Tomorrow" (Tycoon Exclusive)
|
|
||||||
|
|
||||||
```
|
|
||||||
┌─────────────────────────────────────────────────────────────────┐
|
|
||||||
│ 🔮 DROPS TOMORROW — Tycoon Exclusive ($29/mo) │
|
|
||||||
├─────────────────────────────────────────────────────────────────┤
|
|
||||||
│ │
|
|
||||||
│ Diese Domains sind NICHT in Auktionen! │
|
|
||||||
│ Du kannst sie beim Registrar direkt registrieren. │
|
|
||||||
│ │
|
|
||||||
│ ───────────────────────────────────────────────────────────── │
|
|
||||||
│ │
|
|
||||||
│ Domain TLD Score Est. Value Drops In │
|
|
||||||
│ ───────────────────────────────────────────────────────────── │
|
|
||||||
│ pixel.com .com 95 $50,000 23h 45m │
|
|
||||||
│ swift.io .io 88 $8,000 23h 12m │
|
|
||||||
│ quantum.ai .ai 92 $25,000 22h 58m │
|
|
||||||
│ nexus.dev .dev 84 $4,500 22h 30m │
|
|
||||||
│ fusion.co .co 81 $3,200 21h 15m │
|
|
||||||
│ │
|
|
||||||
│ ───────────────────────────────────────────────────────────── │
|
|
||||||
│ │
|
|
||||||
│ 💡 Pro Tip: Setze bei deinem Registrar einen Backorder │
|
|
||||||
│ für diese Domains. Wer zuerst kommt... │
|
|
||||||
│ │
|
|
||||||
│ [🔔 Alert für "pixel.com" setzen] │
|
|
||||||
│ │
|
|
||||||
└─────────────────────────────────────────────────────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Roadmap
|
|
||||||
|
|
||||||
### Phase 1: Jetzt (Bewerbung)
|
|
||||||
- [ ] Verisign Zone File Access beantragen
|
|
||||||
- [ ] PIR (.org) Zone File Access beantragen
|
|
||||||
- [ ] Server-Infrastruktur planen
|
|
||||||
|
|
||||||
### Phase 2: 3-6 Monate (Integration)
|
|
||||||
- [ ] Download-Pipeline bauen
|
|
||||||
- [ ] Diff-Analyse implementieren
|
|
||||||
- [ ] Pounce Algorithm testen
|
|
||||||
- [ ] "Drops Tomorrow" Feature für Tycoon
|
|
||||||
|
|
||||||
### Phase 3: 6-12 Monate (Skalierung)
|
|
||||||
- [ ] Weitere TLDs (.io, .co, .ch, .de)
|
|
||||||
- [ ] Historische Trend-Analyse
|
|
||||||
- [ ] Keyword-Tracking
|
|
||||||
- [ ] Enterprise Features
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Risiken und Mitigierung
|
|
||||||
|
|
||||||
| Risiko | Wahrscheinlichkeit | Mitigierung |
|
|
||||||
|--------|-------------------|-------------|
|
|
||||||
| Ablehnung durch Registry | Mittel | Klare Business-Case, ggf. Partnerschaften |
|
|
||||||
| Hohe Serverkosten | Niedrig | Cloud-Skalierung, nur Premium-TLDs |
|
|
||||||
| Konkurrenz kopiert | Mittel | First-Mover-Vorteil, besserer Algorithmus |
|
|
||||||
| Datenqualität | Niedrig | Mehrere Quellen, Validierung |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Nächster Schritt
|
|
||||||
|
|
||||||
**Aktion für diese Woche:**
|
|
||||||
|
|
||||||
1. **Verisign bewerben:** https://www.verisign.com/en_US/channel-resources/domain-registry-products/zone-file/index.xhtml
|
|
||||||
2. **E-Mail an PIR:** zone-file-access@pir.org
|
|
||||||
3. **Server bei Hetzner reservieren:** AX101 Dedicated (~€60/Monat)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Zusammenfassung
|
|
||||||
|
|
||||||
Zone Files sind der **Schlüssel zur Datenhoheit**. Während die Konkurrenz auf Scraping setzt, werden wir die Rohdaten direkt von der Quelle haben — und mit dem Pounce Algorithm filtern, sodass nur Premium-Opportunities zu unseren Usern gelangen.
|
|
||||||
|
|
||||||
**Das ist der Unicorn-Treiber.** 🦄
|
|
||||||
|
|
||||||
173
analysis_1.md
173
analysis_1.md
@ -1,173 +0,0 @@
|
|||||||
Das ist ein gewaltiger Schritt nach vorne! 🚀
|
|
||||||
|
|
||||||
Die Seiten wirken jetzt kohärent, professionell und haben eine klare psychologische Führung (Hook -> Value -> Gate -> Sign Up). Besonders der Wechsel auf **$9 für den Einstieg** (Trader) ist smart – das ist ein "No-Brainer"-Preis für Impulse-Käufe.
|
|
||||||
|
|
||||||
Hier ist mein Feedback zu den einzelnen Seiten mit Fokus auf Conversion und UX:
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 1. Navigation & Globales Layout
|
|
||||||
Die Navigation ist **perfekt minimalistisch**.
|
|
||||||
* `Market | TLD Intel | Pricing` – Das sind genau die drei Säulen.
|
|
||||||
* **Vorschlag:** Ich würde "Market" eventuell in **"Auctions"** oder **"Live Market"** umbenennen. "Market" ist etwas vage. "Auctions" triggert eher das Gefühl "Hier gibt es Schnäppchen".
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 2. Landing Page
|
|
||||||
**Das Starke:**
|
|
||||||
* Die Headline *"The market never sleeps. You should."* ist Weltklasse.
|
|
||||||
* Der Ticker mit den Live-Preisen erzeugt sofort FOMO (Fear Of Missing Out).
|
|
||||||
* Die Sektion "TLD Intelligence" mit den "Sign in to view"-Overlays bei den Daten ist ein **exzellenter Conversion-Treiber**. Der User sieht, dass da Daten *sind*, aber er muss sich anmelden (kostenlos), um sie zu sehen. Das ist der perfekte "Account-Erstellungs-Köder".
|
|
||||||
|
|
||||||
**Kritikpunkt / To-Do:**
|
|
||||||
* **Der "Search"-Fokus:** Du schreibst *"Try dream.com..."*, aber visuell muss dort ein **riesiges Input-Feld** sein. Das muss das dominante Element sein.
|
|
||||||
* **Der Ticker:** Achte darauf, dass der Ticker technisch sauber läuft (marquee/scrolling). Im Text oben wiederholt sich die Liste statisch – auf der echten Seite muss das fließen.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 3. Market / Auctions Page (WICHTIG!)
|
|
||||||
Hier sehe ich das **größte Risiko**.
|
|
||||||
Dein Konzept ("Unlock Smart Opportunities") ist super. Aber die **Beispiel-Daten**, die du auf der Public-Seite zeigst, sind gefährlich.
|
|
||||||
|
|
||||||
**Das Problem:**
|
|
||||||
In deiner Liste stehen Dinge wie:
|
|
||||||
* `fgagtqjisqxyoyjrjfizxshtw.xyz`
|
|
||||||
* `52gao1588.cc`
|
|
||||||
* `professional-packing-services...website`
|
|
||||||
|
|
||||||
Wenn ein neuer User das sieht, denkt er: **"Das ist eine Spam-Seite voll mit Schrott."** Er wird sich nicht anmelden.
|
|
||||||
|
|
||||||
**Die Lösung (Der "Vanity-Filter"):**
|
|
||||||
Du musst für die **öffentliche Seite (ausgeloggt)** einen harten Filter in den Code bauen. Zeige ausgeloggten Usern **NUR** Domains an, die schön aussehen.
|
|
||||||
* Regel 1: Keine Zahlen (außer bei kurzen Domains).
|
|
||||||
* Regel 2: Keine Bindestriche (Hyphens).
|
|
||||||
* Regel 3: Länge < 12 Zeichen.
|
|
||||||
* Regel 4: Nur .com, .io, .ai, .co, .de, .ch (Keine .cc, .website Spam-Cluster).
|
|
||||||
|
|
||||||
**Warum?**
|
|
||||||
Der User soll denken: "Wow, hier gibt es Premium-Domains wie `nexus.dev`". Er darf den Müll nicht sehen, bevor er eingeloggt ist (und selbst dann solltest du den Müll filtern, wie wir besprochen haben).
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 4. TLD Pricing Page
|
|
||||||
**Sehr gut gelöst.**
|
|
||||||
* Die "Moving Now"-Karten oben (.ai +35%) sind der Haken.
|
|
||||||
* Die Tabelle darunter mit "Sign in" zu sperren (Blur-Effekt oder Schloss-Icon), ist genau richtig.
|
|
||||||
* Der User bekommt genug Info ("Aha, .com ist beliebt"), aber für die Details ("Ist der Trend steigend?") muss er 'Scout' werden.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 5. Pricing Page
|
|
||||||
Die neue Struktur mit **Scout (Free) / Trader ($9) / Tycoon ($29)** ist viel besser als das alte $19-Modell.
|
|
||||||
|
|
||||||
**Optimierung der Tabelle:**
|
|
||||||
Du musst den Unterschied zwischen **Scout** und **Trader** noch schärfer machen, damit die Leute die $9 bezahlen.
|
|
||||||
|
|
||||||
| Feature | Scout (Free) | Trader ($9) | Warum Upgrade? |
|
|
||||||
| :--- | :--- | :--- | :--- |
|
|
||||||
| **Auctions** | Raw Feed (Ungefiltert) | **Smart Clean Feed** | *"Ich will den Spam nicht sehen."* |
|
|
||||||
| **Data** | Nur Preise | **Valuation & Deal Score** | *"Ich will wissen, ob es ein Schnäppchen ist."* |
|
|
||||||
| **Updates** | Täglich | **Stündlich** | *"Ich will schneller sein als andere."* |
|
|
||||||
|
|
||||||
**Wichtig:** Füge in der "Trader"-Spalte explizit **"Spam Filters"** oder **"Curated List"** hinzu. Das ist Zeitersparnis, und dafür zahlen Leute.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Zusammenfassung & Tone of Voice
|
|
||||||
|
|
||||||
Der Tone of Voice ist jetzt konsistent: **Analytisch, Knapp, Strategisch.**
|
|
||||||
|
|
||||||
* *Alt:* "Jage Domains." (Bisschen spielerisch)
|
|
||||||
* *Neu:* "Don't guess. Know." (Professionell, B2B-tauglich)
|
|
||||||
|
|
||||||
**Letzter Check vor dem Launch:**
|
|
||||||
1. **Mobile View:** Prüfe die riesige Tabelle auf dem Handy. Wahrscheinlich musst du auf Mobile Spalten ausblenden (z.B. nur Domain + Preis + Button zeigen).
|
|
||||||
2. **Der Filter:** Bitte, bitte filtere die `fgagtqjis...xyz` Domains auf der Startseite raus. Das ist der wichtigste Punkt für den ersten Eindruck.
|
|
||||||
|
|
||||||
Das sieht nach einem Produkt aus, für das ich meine Kreditkarte zücken würde. Gute Arbeit!
|
|
||||||
|
|
||||||
Hier ist die komplette **Master-Zusammenfassung** für `pounce.ch`. Dies ist dein Bauplan (Blueprint) für die Umsetzung.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 1. Die Vision & Positionierung
|
|
||||||
**Name:** Pounce
|
|
||||||
**Tagline:** *Domain Intelligence for Hunters.*
|
|
||||||
**Slogan:** *"Don't guess. Know."*
|
|
||||||
**Konzept:** Pounce ist das "Bloomberg Terminal" für Domains. Es verwandelt den unübersichtlichen, lauten Domain-Markt in klare, handlungsfähige Daten. Es richtet sich an Leute, die nicht suchen, sondern finden wollen.
|
|
||||||
|
|
||||||
* **Zielgruppe:**
|
|
||||||
* **Dreamers (Gründer):** Suchen den perfekten Namen für ihr Projekt.
|
|
||||||
* **Hunters (Investoren/Händler):** Suchen unterbewertete Assets für Arbitrage (günstig kaufen, teuer verkaufen).
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 2. Die 3 Produktsäulen (Das "Command Center")
|
|
||||||
|
|
||||||
Das Produkt gliedert sich logisch in drei Phasen der Domain-Beschaffung:
|
|
||||||
|
|
||||||
#### A. DISCOVER (Markt-Intelligenz)
|
|
||||||
*Der "Honigtopf", um User anzuziehen (SEO & Traffic).*
|
|
||||||
* **TLD Intel:** Zeigt Markttrends (z.B. `.ai` steigt um 35%).
|
|
||||||
* **Smart Search:** Wenn eine Domain vergeben ist, zeigt Pounce **intelligente Alternativen** (z.B. `.io` für Tech, `.shop` für E-Commerce), statt nur zufällige Endungen.
|
|
||||||
* **Der Hook:** Öffentliche Besucher sehen Trends, aber Details (Charts, Historie) sind ausgeblendet ("Sign in to view").
|
|
||||||
|
|
||||||
#### B. TRACK (Die Watchlist)
|
|
||||||
*Das Tool für Kundenbindung.*
|
|
||||||
* **Funktion:** Überwachung von *vergebenen* Domains.
|
|
||||||
* **Der USP:** Nicht nur "frei/besetzt", sondern **"Pre-Drop Indicators"**. Warnung bei DNS-Änderungen oder wenn die Webseite offline geht. Das gibt dem User einen Zeitvorsprung vor der Konkurrenz.
|
|
||||||
|
|
||||||
#### C. ACQUIRE (Der Auktions-Aggregator)
|
|
||||||
*Der Hauptgrund für das Upgrade.*
|
|
||||||
* **Funktion:** Aggregiert Live-Auktionen von GoDaddy, Sedo, NameJet & DropCatch an einem Ort.
|
|
||||||
* **Der "Killer-Feature" (Spam-Filter):**
|
|
||||||
* *Free User:* Sieht alles (auch "Müll"-Domains wie `kredit-24-online.info`).
|
|
||||||
* *Paid User:* Sieht einen **kuratierten Feed**. Der Algorithmus filtert Zahlen, Bindestriche und Spam raus. Übrig bleiben nur hochwertige Investitions-Chancen.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 3. Das Geschäftsmodell (Pricing)
|
|
||||||
|
|
||||||
Das Modell basiert auf "Freemium mit Schranken". Der Preis von $9 ist ein "No-Brainer" (Impulskauf), um die Hürde niedrig zu halten.
|
|
||||||
|
|
||||||
| Plan | Preis | Zielgruppe | Haupt-Features | Der "Schmerz" (Warum upgraden?) |
|
|
||||||
| :--- | :--- | :--- | :--- | :--- |
|
|
||||||
| **SCOUT** | **0 €** | Neugierige | 5 Watchlist-Domains, roher Auktions-Feed, Basis-Suche. | Muss sich durch "Spam" wühlen, sieht keine Bewertungen, langsame Alerts. |
|
|
||||||
| **TRADER** | **9 €** | Hobby-Investoren | 50 Watchlist-Domains, **Spam-freier Feed**, Deal Scores (Bewertungen), stündliche Checks. | Zahlt für Zeitersparnis (Filter) und Sicherheit (Bewertung). |
|
|
||||||
| **TYCOON** | **29 €** | Profis | 500 Domains, Echtzeit-Checks (10 Min), API-Zugriff (geplant). | Braucht Volumen und Geschwindigkeit. |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 4. UX/UI & Tone of Voice
|
|
||||||
|
|
||||||
* **Design-Philosophie:** "Dark Mode & Data".
|
|
||||||
* Dunkler Hintergrund (Schwarz/Grau) wirkt professionell (wie Trading-Software).
|
|
||||||
* Akzentfarben: Neon-Grün (für "Frei" / "Profit") und Warn-Orange.
|
|
||||||
* Wenig Text, viele Datenpunkte, klare Tabellen.
|
|
||||||
* **Tone of Voice:**
|
|
||||||
* Knapp, präzise, strategisch.
|
|
||||||
* Kein Marketing-Bla-Bla.
|
|
||||||
* *Beispiel:* Statt "Wir haben viele tolle Funktionen" → "Three moves to dominate."
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 5. Die User Journey (Der "Golden Path")
|
|
||||||
|
|
||||||
1. **Der Einstieg:** User googelt "Domain Preise .ai" und landet auf deiner **TLD Intel Page**.
|
|
||||||
2. **Der Hook:** Er sieht "`.ai` +35%", will aber die Details sehen. Die Tabelle ist unscharf. Button: *"Sign In to view details"*.
|
|
||||||
3. **Die Registrierung:** Er erstellt einen Free Account ("Scout").
|
|
||||||
4. **Die Erkenntnis:** Er geht zu den Auktionen. Er sieht eine interessante Domain, aber weiß nicht, ob der Preis gut ist. Neben dem Preis steht: *"Valuation locked"*.
|
|
||||||
5. **Das Upgrade:** Er sieht das Angebot: "Für nur $9/Monat siehst du den echten Wert und wir filtern den Müll für dich."
|
|
||||||
6. **Der Kauf:** Er abonniert den "Trader"-Plan.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Zusammenfassung für den Entwickler (Tech Stack Requirements)
|
|
||||||
|
|
||||||
* **Frontend:** Muss extrem schnell sein (Reagierende Suche). Mobile-freundlich (Tabellen müssen auf dem Handy lesbar sein oder ausgeblendet werden).
|
|
||||||
* **Daten-Integration:** APIs zu GoDaddy, Sedo etc. oder Scraping für die Auktionsdaten.
|
|
||||||
* **Logik:**
|
|
||||||
* **Filter-Algorithmus:** Das Wichtigste! (Regeln: Keine Zahlen, max. 2 Bindestriche, Wörterbuch-Abgleich).
|
|
||||||
* **Alert-System:** Cronjobs für E-Mail/SMS Benachrichtigungen.
|
|
||||||
|
|
||||||
Das Konzept ist jetzt rund, logisch und bereit für den Bau. Viel Erfolg mit **Pounce**! 🚀
|
|
||||||
166
analysis_3.md
166
analysis_3.md
@ -1,166 +0,0 @@
|
|||||||
Um die Churn Rate (Absprungrate) zu senken und den Umsatz pro Kunde (LTV - Lifetime Value) zu steigern, musst du das Mindset des Nutzers ändern:
|
|
||||||
|
|
||||||
**Von:** *"Ich nutze Pounce, um eine Domain zu **finden**."* (Einmaliges Projekt)
|
|
||||||
**Zu:** *"Ich nutze Pounce, um mein Domain-Business zu **betreiben**."* (Laufender Prozess)
|
|
||||||
|
|
||||||
Wenn Pounce nur ein "Such-Tool" ist, kündigen die Leute, sobald sie fündig wurden. Wenn Pounce aber ihr "Betriebssystem" wird, bleiben sie für immer.
|
|
||||||
|
|
||||||
Hier sind 4 Strategien, um Pounce unverzichtbar zu machen:
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 1. Strategie: Vom "Jäger" zum "Wächter" (Portfolio Monitoring)
|
|
||||||
*Ziel: Den Nutzer binden, auch wenn er gerade nichts kaufen will.*
|
|
||||||
|
|
||||||
Viele Domainer und Agenturen besitzen bereits 50-500 Domains. Sie haben Angst, eine Verlängerung zu verpassen oder technische Fehler nicht zu bemerken.
|
|
||||||
|
|
||||||
* **Das Feature:** **"My Portfolio Health"**
|
|
||||||
Der Nutzer importiert seine *eigenen* Domains in Pounce (nicht um sie zu kaufen, sondern zu verwalten).
|
|
||||||
* **Uptime Monitor:** Ist meine Seite noch online?
|
|
||||||
* **SSL Monitor:** Läuft mein Zertifikat ab?
|
|
||||||
* **Expiration Alert:** Erinnere mich 30 Tage vor Ablauf (besser als die Spam-Mails der Registrare).
|
|
||||||
* **Blacklist Check:** Landet meine Domain auf einer Spam-Liste?
|
|
||||||
|
|
||||||
* **Der Lock-in Effekt:**
|
|
||||||
Niemand kündigt das Tool, das seine Assets überwacht ("Versicherungs-Psychologie"). Wenn du ihre 50 Domains überwachst, bist du unverzichtbar.
|
|
||||||
|
|
||||||
### 2. Strategie: Der "Micro-Marktplatz" (Liquidity)
|
|
||||||
*Ziel: Mehr Umsatz durch Transaktionen.*
|
|
||||||
|
|
||||||
Wenn ein "Hunter" eine Domain über Pounce findet, will er sie oft später wieder verkaufen (Flipping). Aktuell schickst du ihn dafür weg zu Sedo. Warum nicht im Haus behalten?
|
|
||||||
|
|
||||||
* **Das Feature:** **"Pounce 'For Sale' Landing Pages"**
|
|
||||||
Ein User (Trader/Tycoon) kann für seine Domains mit einem Klick eine schicke Verkaufsseite erstellen.
|
|
||||||
* *Domain:* `super-startup.ai`
|
|
||||||
* *Pounce generiert:* `pounce.ch/buy/super-startup-ai`
|
|
||||||
* *Design:* Hochwertig, zeigt deine "Valuation Daten" (Pounce Score) an, um den Preis zu rechtfertigen.
|
|
||||||
* *Kontakt:* Ein einfaches Kontaktformular, das die Anfrage direkt an den User leitet.
|
|
||||||
|
|
||||||
* **Das Geld:**
|
|
||||||
* Entweder Teil des Abo-Preises ("Erstelle 5 Verkaufsseiten kostenlos").
|
|
||||||
* Oder: Du nimmst keine Provision, aber der Käufer muss sich bei Pounce registrieren, um den Verkäufer zu kontaktieren (Lead Gen).
|
|
||||||
|
|
||||||
### 3. Strategie: SEO-Daten & Backlinks (Neue Zielgruppe)
|
|
||||||
*Ziel: Kunden mit hohem Budget gewinnen (Agenturen).*
|
|
||||||
|
|
||||||
SEO-Agenturen kündigen fast nie, weil sie monatliche Budgets für Tools haben. Sie suchen Domains nicht wegen dem Namen, sondern wegen der **Power** (Backlinks).
|
|
||||||
|
|
||||||
* **Das Feature:** **"SEO Juice Detector"**
|
|
||||||
Wenn eine Domain droppt, prüfst du nicht nur den Namen, sondern (über günstige APIs wie Moz oder durch Scraping öffentlicher Daten), ob Backlinks existieren.
|
|
||||||
* *Anzeige:* "Domain `alte-bäckerei-münchen.de` ist frei. Hat Links von `sueddeutsche.de` und `wikipedia.org`."
|
|
||||||
* **Der Wert:** Solche Domains sind für SEOs 100€ - 500€ wert, auch wenn der Name hässlich ist.
|
|
||||||
* **Monetarisierung:** Das ist ein reines **Tycoon-Feature ($29 oder sogar $49/Monat)**.
|
|
||||||
|
|
||||||
### 4. Strategie: Alerts "nach Maß" (Hyper-Personalisierung)
|
|
||||||
*Ziel: Den Nutzer täglich zurückholen.*
|
|
||||||
|
|
||||||
Wenn ich nur eine Mail bekomme "Hier sind 100 neue Domains", ist das oft Spam für mich. Ich will nur *genau das*, was ich suche.
|
|
||||||
|
|
||||||
* **Das Feature:** **"Sniper Alerts"**
|
|
||||||
Der User kann extrem spezifische Filter speichern:
|
|
||||||
* *"Informiere mich NUR, wenn eine 4-Letter .com Domain droppt, die kein 'q' oder 'x' enthält."*
|
|
||||||
* *"Informiere mich, wenn eine .ch Domain droppt, die das Wort 'Immo' enthält."*
|
|
||||||
* **Der Effekt:** Wenn die SMS/Mail kommt, weiß der User: "Das ist relevant". Er klickt, loggt sich ein, bleibt aktiv.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Zusammenfassung des erweiterten Business-Modells
|
|
||||||
|
|
||||||
So sieht deine Umsatz-Maschine dann aus:
|
|
||||||
|
|
||||||
| Stufe | Was der User tut | Warum er bleibt (Retention) | Dein Umsatz |
|
|
||||||
| :--- | :--- | :--- | :--- |
|
|
||||||
| **Phase 1: Finding** | Sucht freie/droppende Domains. | Findet bessere Deals durch Spam-Filter. | $9 / Monat |
|
|
||||||
| **Phase 2: Monitoring** | Überwacht Wettbewerber & eigene Domains. | Angst, Status-Änderungen zu verpassen (Versicherung). | Churn sinkt drastisch. |
|
|
||||||
| **Phase 3: Selling** | Erstellt Verkaufs-Landings via Pounce. | Nutzt Pounce als Schaufenster für sein Business. | User ist "locked in". |
|
|
||||||
| **Phase 4: SEO** | Sucht Backlink-Monster. | Verdient Geld mit deinen Daten (ROI). | $29 - $49 / Monat |
|
|
||||||
|
|
||||||
### Mein Tipp für den Start:
|
|
||||||
Konzentriere dich auf **Strategie 1 (Portfolio Monitoring)** als erstes Zusatz-Feature nach dem Launch.
|
|
||||||
|
|
||||||
Warum?
|
|
||||||
Es ist technisch einfach (du hast die Ping-Skripte ja schon für die Analyse gebaut). Du erlaubst dem User einfach, Domains *manuell* hinzuzufügen.
|
|
||||||
Sobald ein User mal 50 seiner eigenen Domains eingetragen hat, wird er sein Abo **niemals kündigen**, weil er sonst seine Überwachung verliert. Das ist der ultimative "Golden Handcuff".
|
|
||||||
|
|
||||||
Vertrauen ist im Domain-Business tatsächlich die **härteste Währung**. Die Branche ist leider voll von Betrügern (Domain-Diebstahl, Phishing, Fake-Auktionen).
|
|
||||||
|
|
||||||
Wenn `pounce.ch` als "Command Center" wahrgenommen werden soll, muss die Plattform **sauberer sein als der Rest**.
|
|
||||||
|
|
||||||
Hier ist ein **4-Säulen-Sicherheitskonzept**, mit dem du Missbrauch verhinderst und gleichzeitig massives Vertrauen bei deinen echten Nutzern aufbaust.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Säule 1: Identity Verification (Wer bist du?)
|
|
||||||
*Hürde: Betrüger hassen Identifikation.*
|
|
||||||
|
|
||||||
Du darfst "Tycoon"-Features (und vor allem Verkaufs-Features) nicht einfach jedem geben, der eine E-Mail-Adresse hat.
|
|
||||||
|
|
||||||
1. **Stripe Identity / Radar:**
|
|
||||||
Nutze für die Zahlungsabwicklung Stripe. Stripe hat eingebaute Betrugserkennung ("Radar"). Wenn jemand eine gestohlene Kreditkarte nutzt, blockiert Stripe ihn meist sofort. Das ist deine erste Firewall.
|
|
||||||
2. **SMS-Verifizierung (2FA):**
|
|
||||||
Jeder Account, der Domains verkaufen oder überwachen will, muss eine **Handynummer verifizieren**. Wegwerf-Nummern (VoIP) werden blockiert. Das erhöht die Hürde für Spammer massiv.
|
|
||||||
3. **LinkedIn-Login (Optional für Trust):**
|
|
||||||
Biete an: "Verbinde dein LinkedIn für den 'Verified Professional' Status". Ein Profil mit 500+ Kontakten und Historie ist selten ein Fake.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Säule 2: Asset Verification (Gehört dir das wirklich?)
|
|
||||||
*Hürde: Verhindern, dass Leute fremde Domains als ihre eigenen ausgeben.*
|
|
||||||
|
|
||||||
Das ist der wichtigste Punkt, wenn du Features wie "Portfolio Monitoring" oder "For Sale Pages" anbietest.
|
|
||||||
|
|
||||||
**Die technische Lösung: DNS Ownership Verify**
|
|
||||||
Bevor ein Nutzer eine Domain in sein Portfolio aufnehmen kann, um sie zu verkaufen oder tief zu analysieren, muss er beweisen, dass er der Admin ist.
|
|
||||||
* **Wie es funktioniert:**
|
|
||||||
1. User fügt `mein-startup.ch` hinzu.
|
|
||||||
2. Pounce sagt: "Bitte erstelle einen TXT-Record in deinen DNS-Einstellungen mit dem Inhalt: `pounce-verification=847392`."
|
|
||||||
3. Dein System prüft den Record.
|
|
||||||
4. Nur wenn er da ist -> **Domain Verified ✅**.
|
|
||||||
|
|
||||||
*Das ist der Industriestandard (macht Google auch). Wer keinen Zugriff auf die DNS hat, kann die Domain nicht claimen.*
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Säule 3: Content Monitoring (Was machst du damit?)
|
|
||||||
*Hürde: Verhindern, dass deine "For Sale"-Seiten für Phishing genutzt werden.*
|
|
||||||
|
|
||||||
Wenn User über Pounce Verkaufsseiten ("Landers") erstellen können, könnten sie dort versuchen, Bankdaten abzugreifen.
|
|
||||||
|
|
||||||
1. **Automatischer Blacklist-Scan:**
|
|
||||||
Jede Domain, die ins System kommt, wird sofort gegen **Google Safe Browsing** und **Spamhaus** geprüft. Ist die Domain dort als "Malware" gelistet? -> **Sofortiger Ban.**
|
|
||||||
2. **Keyword-Blocking:**
|
|
||||||
Erlaube keine Titel oder Texte auf Verkaufsseiten, die Wörter enthalten wie: "Login", "Bank", "Verify", "Paypal", "Password".
|
|
||||||
3. **No Custom HTML:**
|
|
||||||
Erlaube Usern auf ihren Verkaufsseiten *kein* eigenes HTML/JavaScript. Nur Text und vordefinierte Buttons. So können sie keine Schadsoftware einschleusen.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Säule 4: The "Safe Harbor" Badge (Marketing)
|
|
||||||
*Nutzen: Du machst die Sicherheit zu deinem Verkaufsargument.*
|
|
||||||
|
|
||||||
Du kommunizierst diese Strenge nicht als "Nervigkeit", sondern als **Qualitätsmerkmal**.
|
|
||||||
|
|
||||||
* **Das "Pounce Verified" Siegel:**
|
|
||||||
Auf jeder Verkaufsseite oder in jedem Profil zeigst du an:
|
|
||||||
* ✅ **ID Verified** (Handy/Zahlung geprüft)
|
|
||||||
* ✅ **Owner Verified** (DNS geprüft)
|
|
||||||
* ✅ **Clean History** (Keine Spam-Reports)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Prozess bei Verstößen ("Zero Tolerance")
|
|
||||||
|
|
||||||
Du brauchst klare AGBs ("Terms of Service"):
|
|
||||||
1. **One Strike Policy:** Wer versucht, Phishing zu betreiben oder gestohlene Domains anzubieten, wird sofort permanent gesperrt. Keine Diskussion.
|
|
||||||
2. **Reporting Button:** Gib der Community Macht. Ein "Report Abuse"-Button auf jeder Seite. Wenn 2-3 unabhängige User etwas melden, wird das Asset automatisch offline genommen, bis du es geprüft hast.
|
|
||||||
|
|
||||||
### Zusammenfassung: Der "Trust Stack"
|
|
||||||
|
|
||||||
| Ebene | Maßnahme | Effekt |
|
|
||||||
| :--- | :--- | :--- |
|
|
||||||
| **Login** | SMS / 2FA + Stripe Radar | Hält Bots und Kreditkartenbetrüger fern. |
|
|
||||||
| **Portfolio** | **DNS TXT Record (Zwingend)** | Nur der echte Besitzer kann Domains verwalten. |
|
|
||||||
| **Marktplatz** | Google Safe Browsing Check | Verhindert Malware/Phishing auf deiner Plattform. |
|
|
||||||
| **Frontend** | "Verified Owner" Badge | Käufer wissen: Das hier ist sicher. |
|
|
||||||
|
|
||||||
**Damit positionierst du Pounce als den "Safe Space" im wilden Westen des Domain-Handels.** Das ist für seriöse Investoren oft wichtiger als der Preis.
|
|
||||||
149
analysis_4.md
149
analysis_4.md
@ -1,149 +0,0 @@
|
|||||||
Deine TLD-Pricing-Seite ist ein guter Start, aber für eine **"Intelligence Platform"** ist sie noch zu sehr eine reine "Liste".
|
|
||||||
|
|
||||||
Das Problem: Du zeigst nur den **Status Quo** (aktueller Preis).
|
|
||||||
Ein "Hunter" will aber wissen: **"Wo ist der Haken?"** und **"Wo ist die Marge?"**
|
|
||||||
|
|
||||||
Hier sind die konkreten Optimierungen, um diese Seite von "nett" zu **"unverzichtbar"** zu machen.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 1. Das "Hidden Cost" Problem lösen (Killer-Feature)
|
|
||||||
|
|
||||||
Der größte Schmerzpunkt bei Domains sind die **Verlängerungspreise (Renewals)**. Viele TLDs ködern mit $1.99 im ersten Jahr und verlangen dann $50.
|
|
||||||
* **Aktuell:** Du zeigst nur einen Preis (vermutlich Registration).
|
|
||||||
* **Optimierung:** Splitte die Preis-Spalte.
|
|
||||||
* Spalte A: **Buy Now** (z.B. $1.99)
|
|
||||||
* Spalte B: **Renews at** (z.B. $49.00)
|
|
||||||
* **Pounce-Alert:** Wenn die Differenz > 200% ist, markiere es mit einem kleinen Warndreieck ⚠️ ("Trap Alert"). Das baut massiv Vertrauen auf.
|
|
||||||
|
|
||||||
### 2. Visuelle "Sparklines" statt nackter Zahlen
|
|
||||||
In der Spalte "12-Month Trend" zeigst du aktuell zwei Zahlen (`$10.75` -> `$9.58`). Das muss das Gehirn erst rechnen.
|
|
||||||
* **Optimierung:** Ersetze die Zahlen durch eine **Mini-Chart (Sparkline)**.
|
|
||||||
* Eine kleine grüne oder rote Linie, die den Verlauf zeigt.
|
|
||||||
* Das wirkt sofort wie ein Trading-Terminal (Bloomberg-Style).
|
|
||||||
* *Beispiel:* `.ai` hat eine steil ansteigende Kurve 📈. `.xyz` hat eine flache Linie.
|
|
||||||
|
|
||||||
### 3. "Arbitrage" Spalte (Der "Hunter"-Faktor)
|
|
||||||
Du hast Zugang zu verschiedenen Registraren. Zeige die Preisspanne!
|
|
||||||
* **Optimierung:** Füge eine Spalte **"Spread"** oder **"Arbitrage"** hinzu.
|
|
||||||
* *"Low: $60 (Namecheap) - High: $90 (GoDaddy)"*
|
|
||||||
* Zeige dem User: *"Hier sparst du $30, wenn du den richtigen Anbieter wählst."*
|
|
||||||
* Das ist der perfekte Ort für deinen Affiliate-Link ("Buy at lowest price").
|
|
||||||
|
|
||||||
### 4. Smarte Filter (UX)
|
|
||||||
886 TLDs sind zu viel zum Scrollen. Deine "Discovery"-Sektion oben ist gut, aber die Tabelle braucht **Tabs**.
|
|
||||||
* **Vorschlag für Tabs oberhalb der Tabelle:**
|
|
||||||
* **[All]**
|
|
||||||
* **[Tech]** (.ai, .io, .app, .dev)
|
|
||||||
* **[Geo]** (.ch, .de, .uk, .nyc)
|
|
||||||
* **[Budget]** (Alles unter $5)
|
|
||||||
* **[Premium]** (Alles über $100)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Visueller Entwurf (Mockup der Tabelle)
|
|
||||||
|
|
||||||
Hier ist, wie die Tabelle im **Command Center** aussehen sollte:
|
|
||||||
|
|
||||||
| TLD | Trend (12m) | Buy (1y) | Renew (1y) | Spread | Pounce Intel |
|
|
||||||
| :--- | :--- | :--- | :--- | :--- | :--- |
|
|
||||||
| **.ai** | 📈 *(Sparkline)* | **$71.63** | $71.63 | $15.00 | 🔥 High Demand |
|
|
||||||
| **.xyz** | 📉 *(Sparkline)* | **$0.99** | $13.99 | ⚠️ | 🚩 Renewal Trap |
|
|
||||||
| **.io** | ➖ *(Sparkline)* | **$32.00** | $32.00 | $4.50 | ✅ Stable Asset |
|
|
||||||
| **.ch** | ➖ *(Sparkline)* | **$11.56** | $11.56 | $1.20 | 🛡️ Trust Signal |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 5. Conversion-Elemente (Psychologie)
|
|
||||||
|
|
||||||
* **Das "Login"-Schloss:**
|
|
||||||
Lass die ersten 3-5 Zeilen (wie .com, .net, .ai) **offen sichtbar**.
|
|
||||||
Ab Zeile 6 legst du einen **Blur-Effekt** über die Spalten "Renew" und "Trend".
|
|
||||||
* *CTA:* "Stop overpaying via GoDaddy. Unlock renewal prices & arbitrage data for 800+ TLDs. [Start Free]"
|
|
||||||
|
|
||||||
* **Data-Tooltips:**
|
|
||||||
Wenn man über `.ai` hovert, zeige ein kleines Popup:
|
|
||||||
*"Preisanstieg +35% getrieben durch KI-Boom. Empfohlener Registrar: Dynadot ($69)."*
|
|
||||||
|
|
||||||
### Zusammenfassung der To-Dos:
|
|
||||||
|
|
||||||
1. **Renew-Spalte hinzufügen:** Das ist Pflicht für Transparenz.
|
|
||||||
2. **Sparklines einbauen:** Macht die Seite optisch hochwertiger.
|
|
||||||
3. **Kategorien-Tabs:** Erleichtert die Navigation.
|
|
||||||
4. **Blur-Effekt strategisch nutzen:** Gib Daten ("Teaser"), aber verstecke das Gold (Trends & Renewals).
|
|
||||||
|
|
||||||
Damit wird die Seite von einer bloßen Preisliste zu einem echten **Investment-Tool**.
|
|
||||||
|
|
||||||
Du hast absolut recht. "Arbitrage" ist der falsche Begriff, wenn es nicht um den direkten An- und Verkauf (Trading), sondern um die Registrierung geht. Und du willst den Fokus auf die **Preisentwicklung der Endung** selbst legen (Inflation, Registry-Preiserhöhungen).
|
|
||||||
|
|
||||||
Wir müssen die Seite also von einem "Trading-Tool" zu einem **"Inflation & Market Monitor"** umbauen. Der User soll sehen: *Wird diese Endung teurer oder billiger? Lohnt es sich, jetzt für 10 Jahre im Voraus zu verlängern?*
|
|
||||||
|
|
||||||
Hier ist das korrigierte Konzept für die **TLD Pricing & Trends Optimierung**:
|
|
||||||
|
|
||||||
### 1. Das neue Kern-Konzept: "Inflation Monitor"
|
|
||||||
Statt "Arbitrage" zeigen wir die **"Price Stability"**.
|
|
||||||
Registries (wie Verisign bei .com) erhöhen regelmäßig die Preise. Dein Tool warnt davor.
|
|
||||||
|
|
||||||
* **Die neue Spalte:** **"Volatility / Stability"**
|
|
||||||
* **Der Wert:**
|
|
||||||
* **Stable:** Preis hat sich seit 2 Jahren nicht geändert (z.B. .ch).
|
|
||||||
* **Rising:** Registry hat Preise erhöht (z.B. .com erhöht oft um 7% pro Jahr).
|
|
||||||
* **Promo-Driven:** Preis schwankt stark (oft bei .xyz oder .store, die mal $0.99, mal $10 kosten).
|
|
||||||
|
|
||||||
### 2. Preistrend-Visualisierung (Deine Anforderung)
|
|
||||||
Du möchtest zeigen, wie sich der Preis für die *Endung* verändert hat.
|
|
||||||
|
|
||||||
* **Die Visualisierung:** Statt einer einfachen Sparkline, zeige (für Pro User im Detail, für Free User vereinfacht) die **"Wholesale Price History"**.
|
|
||||||
* **Die Spalten in der Tabelle:**
|
|
||||||
* **Current Price:** $71.63
|
|
||||||
* **1y Change:** **+12% 📈** (Das ist der entscheidende Indikator!)
|
|
||||||
* **3y Change:** **+35%**
|
|
||||||
|
|
||||||
### 3. Das "Renewal Trap" Feature (Vertrauen)
|
|
||||||
Das bleibt extrem wichtig. Da dir die Domain nicht gehört, mietest du sie. Der Mietpreis (Renewal) ist wichtiger als der Einstiegspreis.
|
|
||||||
|
|
||||||
* **Logic:**
|
|
||||||
* Registration: $1.99
|
|
||||||
* Renewal: $45.00
|
|
||||||
* **Pounce Index:** Zeige ein Verhältnis an.
|
|
||||||
* *Ratio 1.0:* Fair (Reg = Renew).
|
|
||||||
* *Ratio 20.0:* Falle (Reg billig, Renew teuer).
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Das optimierte Tabellen-Layout
|
|
||||||
|
|
||||||
Hier ist der konkrete Vorschlag für die Spalten deiner Tabelle auf `pounce.ch/tld-prices`:
|
|
||||||
|
|
||||||
| TLD | Price (Buy) | Price (Renew) | 1y Trend | 3y Trend | Risk Level |
|
|
||||||
| :--- | :--- | :--- | :--- | :--- | :--- |
|
|
||||||
| **.ai** | **$71.63** | $71.63 | **+15% 📈** | **+35% 📈** | 🟢 Low (Stable but rising) |
|
|
||||||
| **.com** | **$10.75** | $10.75 | **+7% 📈** | **+14% 📈** | 🟢 Low (Predictable) |
|
|
||||||
| **.xyz** | **$0.99** | $13.99 | **-10% 📉** | **-5%** | 🔴 High (Renewal Trap) |
|
|
||||||
| **.io** | **$32.00** | $32.00 | **0% ➖** | **+5%** | 🟢 Low |
|
|
||||||
| **.tech** | **$5.00** | $55.00 | **0% ➖** | **0%** | 🔴 High (High Renewal) |
|
|
||||||
|
|
||||||
**Erklärung der Spalten für den User:**
|
|
||||||
|
|
||||||
* **1y Trend:** *"Der Einkaufspreis für diese Endung ist im letzten Jahr um 15% gestiegen. Jetzt sichern, bevor es teurer wird!"*
|
|
||||||
* **Risk Level:** *"Achtung, diese Endung lockt mit günstigen Einstiegspreisen, wird aber im zweiten Jahr 10x teurer."*
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Feature-Idee: "Lock-in Calculator" (Mehrwert)
|
|
||||||
|
|
||||||
Unterhalb der Tabelle oder im Detail-View einer TLD bietest du einen Rechner an:
|
|
||||||
|
|
||||||
> **Should I renew early?**
|
|
||||||
> *TLD: .com*
|
|
||||||
> *Trend: +7% p.a.*
|
|
||||||
>
|
|
||||||
> 💡 **Pounce Empfehlung:** *"Ja. Wenn du deine .com jetzt für 10 Jahre verlängerst, sparst du voraussichtlich $15 gegenüber jährlicher Verlängerung."*
|
|
||||||
|
|
||||||
**Das ist echte "Domain Intelligence".** Du hilfst dem User, Geld zu sparen, indem er Marktmechanismen (Preiserhöhungen der Registry) versteht.
|
|
||||||
|
|
||||||
### Zusammenfassung
|
|
||||||
|
|
||||||
Wir entfernen "Arbitrage" und ersetzen es durch **"Inflation Tracking"**.
|
|
||||||
Die Story für den User ist:
|
|
||||||
*"Domain-Preise ändern sich. .ai wird teurer, .xyz ist eine Falle. Pounce zeigt dir die wahren Kosten über 10 Jahre, nicht nur den Lockvogel-Preis von heute."*
|
|
||||||
@ -1,34 +0,0 @@
|
|||||||
"""Add DNS verification fields to portfolio_domains
|
|
||||||
|
|
||||||
Revision ID: 006
|
|
||||||
Revises: 005
|
|
||||||
Create Date: 2025-12-13
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = '006'
|
|
||||||
down_revision = '005'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Add DNS verification columns to portfolio_domains table."""
|
|
||||||
# Add columns with default values (nullable to avoid issues with existing rows)
|
|
||||||
op.add_column('portfolio_domains', sa.Column('is_dns_verified', sa.Boolean(), nullable=True, server_default='0'))
|
|
||||||
op.add_column('portfolio_domains', sa.Column('verification_status', sa.String(50), nullable=True, server_default='unverified'))
|
|
||||||
op.add_column('portfolio_domains', sa.Column('verification_code', sa.String(100), nullable=True))
|
|
||||||
op.add_column('portfolio_domains', sa.Column('verification_started_at', sa.DateTime(), nullable=True))
|
|
||||||
op.add_column('portfolio_domains', sa.Column('verified_at', sa.DateTime(), nullable=True))
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Remove DNS verification columns from portfolio_domains table."""
|
|
||||||
op.drop_column('portfolio_domains', 'verified_at')
|
|
||||||
op.drop_column('portfolio_domains', 'verification_started_at')
|
|
||||||
op.drop_column('portfolio_domains', 'verification_code')
|
|
||||||
op.drop_column('portfolio_domains', 'verification_status')
|
|
||||||
op.drop_column('portfolio_domains', 'is_dns_verified')
|
|
||||||
@ -1,74 +0,0 @@
|
|||||||
"""Add inquiry close fields + audit trail
|
|
||||||
|
|
||||||
Revision ID: 007
|
|
||||||
Revises: 006
|
|
||||||
Create Date: 2025-12-15
|
|
||||||
"""
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = '007'
|
|
||||||
down_revision = '006'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
# listing_inquiries: deal workflow
|
|
||||||
op.add_column('listing_inquiries', sa.Column('closed_reason', sa.String(200), nullable=True))
|
|
||||||
op.add_column('listing_inquiries', sa.Column('closed_at', sa.DateTime(), nullable=True))
|
|
||||||
|
|
||||||
op.create_index(
|
|
||||||
'ix_listing_inquiries_listing_created',
|
|
||||||
'listing_inquiries',
|
|
||||||
['listing_id', 'created_at'],
|
|
||||||
unique=False,
|
|
||||||
)
|
|
||||||
op.create_index(
|
|
||||||
'ix_listing_inquiries_listing_status',
|
|
||||||
'listing_inquiries',
|
|
||||||
['listing_id', 'status'],
|
|
||||||
unique=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
# listing_inquiry_events: audit trail
|
|
||||||
op.create_table(
|
|
||||||
'listing_inquiry_events',
|
|
||||||
sa.Column('id', sa.Integer(), primary_key=True),
|
|
||||||
sa.Column('inquiry_id', sa.Integer(), sa.ForeignKey('listing_inquiries.id'), nullable=False, index=True),
|
|
||||||
sa.Column('listing_id', sa.Integer(), sa.ForeignKey('domain_listings.id'), nullable=False, index=True),
|
|
||||||
sa.Column('actor_user_id', sa.Integer(), sa.ForeignKey('users.id'), nullable=False, index=True),
|
|
||||||
sa.Column('old_status', sa.String(20), nullable=True),
|
|
||||||
sa.Column('new_status', sa.String(20), nullable=False),
|
|
||||||
sa.Column('reason', sa.String(200), nullable=True),
|
|
||||||
sa.Column('ip_address', sa.String(45), nullable=True),
|
|
||||||
sa.Column('user_agent', sa.String(500), nullable=True),
|
|
||||||
sa.Column('created_at', sa.DateTime(), nullable=True, index=True),
|
|
||||||
)
|
|
||||||
|
|
||||||
op.create_index(
|
|
||||||
'ix_listing_inquiry_events_inquiry_created',
|
|
||||||
'listing_inquiry_events',
|
|
||||||
['inquiry_id', 'created_at'],
|
|
||||||
unique=False,
|
|
||||||
)
|
|
||||||
op.create_index(
|
|
||||||
'ix_listing_inquiry_events_listing_created',
|
|
||||||
'listing_inquiry_events',
|
|
||||||
['listing_id', 'created_at'],
|
|
||||||
unique=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
op.drop_index('ix_listing_inquiry_events_listing_created', table_name='listing_inquiry_events')
|
|
||||||
op.drop_index('ix_listing_inquiry_events_inquiry_created', table_name='listing_inquiry_events')
|
|
||||||
op.drop_table('listing_inquiry_events')
|
|
||||||
|
|
||||||
op.drop_index('ix_listing_inquiries_listing_status', table_name='listing_inquiries')
|
|
||||||
op.drop_index('ix_listing_inquiries_listing_created', table_name='listing_inquiries')
|
|
||||||
op.drop_column('listing_inquiries', 'closed_at')
|
|
||||||
op.drop_column('listing_inquiries', 'closed_reason')
|
|
||||||
@ -1,61 +0,0 @@
|
|||||||
"""Add inquiry threading (buyer link + messages)
|
|
||||||
|
|
||||||
Revision ID: 008
|
|
||||||
Revises: 007
|
|
||||||
Create Date: 2025-12-15
|
|
||||||
"""
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
revision = '008'
|
|
||||||
down_revision = '007'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
# Link inquiry to buyer account
|
|
||||||
op.add_column('listing_inquiries', sa.Column('buyer_user_id', sa.Integer(), sa.ForeignKey('users.id'), nullable=True))
|
|
||||||
op.create_index('ix_listing_inquiries_buyer_user', 'listing_inquiries', ['buyer_user_id'], unique=False)
|
|
||||||
|
|
||||||
# Thread messages
|
|
||||||
op.create_table(
|
|
||||||
'listing_inquiry_messages',
|
|
||||||
sa.Column('id', sa.Integer(), primary_key=True),
|
|
||||||
sa.Column('inquiry_id', sa.Integer(), sa.ForeignKey('listing_inquiries.id'), nullable=False, index=True),
|
|
||||||
sa.Column('listing_id', sa.Integer(), sa.ForeignKey('domain_listings.id'), nullable=False, index=True),
|
|
||||||
sa.Column('sender_user_id', sa.Integer(), sa.ForeignKey('users.id'), nullable=False, index=True),
|
|
||||||
sa.Column('body', sa.Text(), nullable=False),
|
|
||||||
sa.Column('created_at', sa.DateTime(), nullable=True, index=True),
|
|
||||||
)
|
|
||||||
|
|
||||||
op.create_index(
|
|
||||||
'ix_listing_inquiry_messages_inquiry_created',
|
|
||||||
'listing_inquiry_messages',
|
|
||||||
['inquiry_id', 'created_at'],
|
|
||||||
unique=False,
|
|
||||||
)
|
|
||||||
op.create_index(
|
|
||||||
'ix_listing_inquiry_messages_listing_created',
|
|
||||||
'listing_inquiry_messages',
|
|
||||||
['listing_id', 'created_at'],
|
|
||||||
unique=False,
|
|
||||||
)
|
|
||||||
op.create_index(
|
|
||||||
'ix_listing_inquiry_messages_sender_created',
|
|
||||||
'listing_inquiry_messages',
|
|
||||||
['sender_user_id', 'created_at'],
|
|
||||||
unique=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
op.drop_index('ix_listing_inquiry_messages_sender_created', table_name='listing_inquiry_messages')
|
|
||||||
op.drop_index('ix_listing_inquiry_messages_listing_created', table_name='listing_inquiry_messages')
|
|
||||||
op.drop_index('ix_listing_inquiry_messages_inquiry_created', table_name='listing_inquiry_messages')
|
|
||||||
op.drop_table('listing_inquiry_messages')
|
|
||||||
|
|
||||||
op.drop_index('ix_listing_inquiries_buyer_user', table_name='listing_inquiries')
|
|
||||||
op.drop_column('listing_inquiries', 'buyer_user_id')
|
|
||||||
@ -1,31 +0,0 @@
|
|||||||
"""Add listing sold fields (GMV tracking)
|
|
||||||
|
|
||||||
Revision ID: 009
|
|
||||||
Revises: 008
|
|
||||||
Create Date: 2025-12-15
|
|
||||||
"""
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
revision = '009'
|
|
||||||
down_revision = '008'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
op.add_column('domain_listings', sa.Column('sold_at', sa.DateTime(), nullable=True))
|
|
||||||
op.add_column('domain_listings', sa.Column('sold_reason', sa.String(200), nullable=True))
|
|
||||||
op.add_column('domain_listings', sa.Column('sold_price', sa.Float(), nullable=True))
|
|
||||||
op.add_column('domain_listings', sa.Column('sold_currency', sa.String(3), nullable=True))
|
|
||||||
|
|
||||||
op.create_index('ix_domain_listings_status', 'domain_listings', ['status'], unique=False)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
op.drop_index('ix_domain_listings_status', table_name='domain_listings')
|
|
||||||
op.drop_column('domain_listings', 'sold_currency')
|
|
||||||
op.drop_column('domain_listings', 'sold_price')
|
|
||||||
op.drop_column('domain_listings', 'sold_reason')
|
|
||||||
op.drop_column('domain_listings', 'sold_at')
|
|
||||||
@ -1,25 +0,0 @@
|
|||||||
"""Add yield connected_at timestamp.
|
|
||||||
|
|
||||||
Revision ID: 010_add_yield_connected_at
|
|
||||||
Revises: 009_add_listing_sold_fields
|
|
||||||
Create Date: 2025-12-15
|
|
||||||
"""
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "010_add_yield_connected_at"
|
|
||||||
down_revision = "009_add_listing_sold_fields"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
op.add_column("yield_domains", sa.Column("connected_at", sa.DateTime(), nullable=True))
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
op.drop_column("yield_domains", "connected_at")
|
|
||||||
|
|
||||||
@ -1,28 +0,0 @@
|
|||||||
"""Add click_id + destination_url to yield transactions.
|
|
||||||
|
|
||||||
Revision ID: 011_add_yield_transaction_click_id
|
|
||||||
Revises: 010_add_yield_connected_at
|
|
||||||
Create Date: 2025-12-15
|
|
||||||
"""
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
revision = "011_add_yield_transaction_click_id"
|
|
||||||
down_revision = "010_add_yield_connected_at"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
op.add_column("yield_transactions", sa.Column("click_id", sa.String(length=64), nullable=True))
|
|
||||||
op.add_column("yield_transactions", sa.Column("destination_url", sa.Text(), nullable=True))
|
|
||||||
op.create_index("ix_yield_transactions_click_id", "yield_transactions", ["click_id"], unique=False)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
op.drop_index("ix_yield_transactions_click_id", table_name="yield_transactions")
|
|
||||||
op.drop_column("yield_transactions", "destination_url")
|
|
||||||
op.drop_column("yield_transactions", "click_id")
|
|
||||||
|
|
||||||
@ -1,67 +0,0 @@
|
|||||||
"""Add telemetry_events table.
|
|
||||||
|
|
||||||
Revision ID: 012_add_telemetry_events
|
|
||||||
Revises: 011_add_yield_transaction_click_id
|
|
||||||
Create Date: 2025-12-15
|
|
||||||
"""
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
revision = "012_add_telemetry_events"
|
|
||||||
down_revision = "011_add_yield_transaction_click_id"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
op.create_table(
|
|
||||||
"telemetry_events",
|
|
||||||
sa.Column("id", sa.Integer(), primary_key=True),
|
|
||||||
sa.Column("user_id", sa.Integer(), nullable=True),
|
|
||||||
sa.Column("event_name", sa.String(length=60), nullable=False),
|
|
||||||
sa.Column("listing_id", sa.Integer(), nullable=True),
|
|
||||||
sa.Column("inquiry_id", sa.Integer(), nullable=True),
|
|
||||||
sa.Column("yield_domain_id", sa.Integer(), nullable=True),
|
|
||||||
sa.Column("click_id", sa.String(length=64), nullable=True),
|
|
||||||
sa.Column("domain", sa.String(length=255), nullable=True),
|
|
||||||
sa.Column("source", sa.String(length=30), nullable=True),
|
|
||||||
sa.Column("ip_hash", sa.String(length=64), nullable=True),
|
|
||||||
sa.Column("user_agent", sa.String(length=500), nullable=True),
|
|
||||||
sa.Column("referrer", sa.String(length=500), nullable=True),
|
|
||||||
sa.Column("metadata_json", sa.Text(), nullable=True),
|
|
||||||
sa.Column("is_authenticated", sa.Boolean(), nullable=True),
|
|
||||||
sa.Column("created_at", sa.DateTime(), nullable=False),
|
|
||||||
sa.ForeignKeyConstraint(["user_id"], ["users.id"]),
|
|
||||||
)
|
|
||||||
|
|
||||||
op.create_index("ix_telemetry_events_event_name", "telemetry_events", ["event_name"])
|
|
||||||
op.create_index("ix_telemetry_events_user_id", "telemetry_events", ["user_id"])
|
|
||||||
op.create_index("ix_telemetry_events_listing_id", "telemetry_events", ["listing_id"])
|
|
||||||
op.create_index("ix_telemetry_events_inquiry_id", "telemetry_events", ["inquiry_id"])
|
|
||||||
op.create_index("ix_telemetry_events_yield_domain_id", "telemetry_events", ["yield_domain_id"])
|
|
||||||
op.create_index("ix_telemetry_events_click_id", "telemetry_events", ["click_id"])
|
|
||||||
op.create_index("ix_telemetry_events_domain", "telemetry_events", ["domain"])
|
|
||||||
op.create_index("ix_telemetry_events_created_at", "telemetry_events", ["created_at"])
|
|
||||||
op.create_index("ix_telemetry_event_name_created", "telemetry_events", ["event_name", "created_at"])
|
|
||||||
op.create_index("ix_telemetry_user_created", "telemetry_events", ["user_id", "created_at"])
|
|
||||||
op.create_index("ix_telemetry_listing_created", "telemetry_events", ["listing_id", "created_at"])
|
|
||||||
op.create_index("ix_telemetry_yield_created", "telemetry_events", ["yield_domain_id", "created_at"])
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
op.drop_index("ix_telemetry_yield_created", table_name="telemetry_events")
|
|
||||||
op.drop_index("ix_telemetry_listing_created", table_name="telemetry_events")
|
|
||||||
op.drop_index("ix_telemetry_user_created", table_name="telemetry_events")
|
|
||||||
op.drop_index("ix_telemetry_event_name_created", table_name="telemetry_events")
|
|
||||||
op.drop_index("ix_telemetry_events_created_at", table_name="telemetry_events")
|
|
||||||
op.drop_index("ix_telemetry_events_domain", table_name="telemetry_events")
|
|
||||||
op.drop_index("ix_telemetry_events_click_id", table_name="telemetry_events")
|
|
||||||
op.drop_index("ix_telemetry_events_yield_domain_id", table_name="telemetry_events")
|
|
||||||
op.drop_index("ix_telemetry_events_inquiry_id", table_name="telemetry_events")
|
|
||||||
op.drop_index("ix_telemetry_events_listing_id", table_name="telemetry_events")
|
|
||||||
op.drop_index("ix_telemetry_events_user_id", table_name="telemetry_events")
|
|
||||||
op.drop_index("ix_telemetry_events_event_name", table_name="telemetry_events")
|
|
||||||
op.drop_table("telemetry_events")
|
|
||||||
|
|
||||||
@ -1,41 +0,0 @@
|
|||||||
"""add ops alert events
|
|
||||||
|
|
||||||
Revision ID: 013_add_ops_alert_events
|
|
||||||
Revises: 012_add_telemetry_events
|
|
||||||
Create Date: 2025-12-15
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
revision = "013_add_ops_alert_events"
|
|
||||||
down_revision = "012_add_telemetry_events"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
op.create_table(
|
|
||||||
"ops_alert_events",
|
|
||||||
sa.Column("id", sa.Integer(), primary_key=True),
|
|
||||||
sa.Column("alert_key", sa.String(length=80), nullable=False),
|
|
||||||
sa.Column("severity", sa.String(length=10), nullable=False),
|
|
||||||
sa.Column("title", sa.String(length=200), nullable=False),
|
|
||||||
sa.Column("detail", sa.Text(), nullable=True),
|
|
||||||
sa.Column("status", sa.String(length=20), nullable=False),
|
|
||||||
sa.Column("recipients", sa.Text(), nullable=True),
|
|
||||||
sa.Column("send_reason", sa.String(length=60), nullable=True),
|
|
||||||
sa.Column("created_at", sa.DateTime(), nullable=False, server_default=sa.text("now()")),
|
|
||||||
)
|
|
||||||
op.create_index("ix_ops_alert_key_created", "ops_alert_events", ["alert_key", "created_at"])
|
|
||||||
op.create_index("ix_ops_alert_status_created", "ops_alert_events", ["status", "created_at"])
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
op.drop_index("ix_ops_alert_status_created", table_name="ops_alert_events")
|
|
||||||
op.drop_index("ix_ops_alert_key_created", table_name="ops_alert_events")
|
|
||||||
op.drop_table("ops_alert_events")
|
|
||||||
|
|
||||||
@ -1,28 +0,0 @@
|
|||||||
"""add users invite_code
|
|
||||||
|
|
||||||
Revision ID: 014_add_user_invite_code
|
|
||||||
Revises: 013_add_ops_alert_events
|
|
||||||
Create Date: 2025-12-15
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
revision = "014_add_user_invite_code"
|
|
||||||
down_revision = "013_add_ops_alert_events"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
op.add_column("users", sa.Column("invite_code", sa.String(length=32), nullable=True))
|
|
||||||
op.create_index("ix_users_invite_code", "users", ["invite_code"], unique=True)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
op.drop_index("ix_users_invite_code", table_name="users")
|
|
||||||
op.drop_column("users", "invite_code")
|
|
||||||
|
|
||||||
@ -1,29 +0,0 @@
|
|||||||
"""add subscription referral bonus domains
|
|
||||||
|
|
||||||
Revision ID: 015_add_subscription_referral_bonus_domains
|
|
||||||
Revises: 014_add_user_invite_code
|
|
||||||
Create Date: 2025-12-15
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from alembic import op
|
|
||||||
|
|
||||||
|
|
||||||
revision = "015_add_subscription_referral_bonus_domains"
|
|
||||||
down_revision = "014_add_user_invite_code"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
op.add_column(
|
|
||||||
"subscriptions",
|
|
||||||
sa.Column("referral_bonus_domains", sa.Integer(), nullable=False, server_default="0"),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
op.drop_column("subscriptions", "referral_bonus_domains")
|
|
||||||
|
|
||||||
@ -1,75 +0,0 @@
|
|||||||
"""add llm artifacts and yield landing config
|
|
||||||
|
|
||||||
Revision ID: 016_add_llm_artifacts_and_yield_landing_config
|
|
||||||
Revises: 015_add_subscription_referral_bonus_domains
|
|
||||||
Create Date: 2025-12-17
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from alembic import op
|
|
||||||
|
|
||||||
|
|
||||||
revision = "016_add_llm_artifacts_and_yield_landing_config"
|
|
||||||
down_revision = "015_add_subscription_referral_bonus_domains"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
op.create_table(
|
|
||||||
"llm_artifacts",
|
|
||||||
sa.Column("id", sa.Integer(), primary_key=True, nullable=False),
|
|
||||||
sa.Column("user_id", sa.Integer(), sa.ForeignKey("users.id"), nullable=True),
|
|
||||||
sa.Column("kind", sa.String(length=50), nullable=False),
|
|
||||||
sa.Column("domain", sa.String(length=255), nullable=False),
|
|
||||||
sa.Column("prompt_version", sa.String(length=50), nullable=False),
|
|
||||||
sa.Column("model", sa.String(length=100), nullable=False),
|
|
||||||
sa.Column("payload_json", sa.Text(), nullable=False),
|
|
||||||
sa.Column("created_at", sa.DateTime(), nullable=False),
|
|
||||||
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
|
||||||
sa.Column("expires_at", sa.DateTime(), nullable=True),
|
|
||||||
)
|
|
||||||
op.create_index("ix_llm_artifacts_id", "llm_artifacts", ["id"])
|
|
||||||
op.create_index("ix_llm_artifacts_user_id", "llm_artifacts", ["user_id"])
|
|
||||||
op.create_index("ix_llm_artifacts_kind", "llm_artifacts", ["kind"])
|
|
||||||
op.create_index("ix_llm_artifacts_domain", "llm_artifacts", ["domain"])
|
|
||||||
op.create_index("ix_llm_artifacts_prompt_version", "llm_artifacts", ["prompt_version"])
|
|
||||||
op.create_index("ix_llm_artifacts_created_at", "llm_artifacts", ["created_at"])
|
|
||||||
op.create_index("ix_llm_artifacts_expires_at", "llm_artifacts", ["expires_at"])
|
|
||||||
op.create_index(
|
|
||||||
"ix_llm_artifacts_kind_domain_prompt",
|
|
||||||
"llm_artifacts",
|
|
||||||
["kind", "domain", "prompt_version"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Yield landing config (generated by LLM on activation)
|
|
||||||
op.add_column("yield_domains", sa.Column("landing_config_json", sa.Text(), nullable=True))
|
|
||||||
op.add_column("yield_domains", sa.Column("landing_template", sa.String(length=50), nullable=True))
|
|
||||||
op.add_column("yield_domains", sa.Column("landing_headline", sa.String(length=300), nullable=True))
|
|
||||||
op.add_column("yield_domains", sa.Column("landing_intro", sa.Text(), nullable=True))
|
|
||||||
op.add_column("yield_domains", sa.Column("landing_cta_label", sa.String(length=120), nullable=True))
|
|
||||||
op.add_column("yield_domains", sa.Column("landing_model", sa.String(length=100), nullable=True))
|
|
||||||
op.add_column("yield_domains", sa.Column("landing_generated_at", sa.DateTime(), nullable=True))
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
op.drop_column("yield_domains", "landing_generated_at")
|
|
||||||
op.drop_column("yield_domains", "landing_model")
|
|
||||||
op.drop_column("yield_domains", "landing_cta_label")
|
|
||||||
op.drop_column("yield_domains", "landing_intro")
|
|
||||||
op.drop_column("yield_domains", "landing_headline")
|
|
||||||
op.drop_column("yield_domains", "landing_template")
|
|
||||||
op.drop_column("yield_domains", "landing_config_json")
|
|
||||||
|
|
||||||
op.drop_index("ix_llm_artifacts_kind_domain_prompt", table_name="llm_artifacts")
|
|
||||||
op.drop_index("ix_llm_artifacts_expires_at", table_name="llm_artifacts")
|
|
||||||
op.drop_index("ix_llm_artifacts_created_at", table_name="llm_artifacts")
|
|
||||||
op.drop_index("ix_llm_artifacts_prompt_version", table_name="llm_artifacts")
|
|
||||||
op.drop_index("ix_llm_artifacts_domain", table_name="llm_artifacts")
|
|
||||||
op.drop_index("ix_llm_artifacts_kind", table_name="llm_artifacts")
|
|
||||||
op.drop_index("ix_llm_artifacts_user_id", table_name="llm_artifacts")
|
|
||||||
op.drop_index("ix_llm_artifacts_id", table_name="llm_artifacts")
|
|
||||||
op.drop_table("llm_artifacts")
|
|
||||||
|
|
||||||
@ -14,22 +14,6 @@ from app.api.webhooks import router as webhooks_router
|
|||||||
from app.api.contact import router as contact_router
|
from app.api.contact import router as contact_router
|
||||||
from app.api.price_alerts import router as price_alerts_router
|
from app.api.price_alerts import router as price_alerts_router
|
||||||
from app.api.blog import router as blog_router
|
from app.api.blog import router as blog_router
|
||||||
from app.api.listings import router as listings_router
|
|
||||||
from app.api.sniper_alerts import router as sniper_alerts_router
|
|
||||||
from app.api.seo import router as seo_router
|
|
||||||
from app.api.dashboard import router as dashboard_router
|
|
||||||
from app.api.yield_domains import router as yield_router
|
|
||||||
from app.api.yield_webhooks import router as yield_webhooks_router
|
|
||||||
from app.api.yield_routing import router as yield_routing_router
|
|
||||||
from app.api.yield_payout_admin import router as yield_payout_admin_router
|
|
||||||
from app.api.telemetry import router as telemetry_router
|
|
||||||
from app.api.analyze import router as analyze_router
|
|
||||||
from app.api.hunt import router as hunt_router
|
|
||||||
from app.api.cfo import router as cfo_router
|
|
||||||
from app.api.drops import router as drops_router
|
|
||||||
from app.api.llm import router as llm_router
|
|
||||||
from app.api.llm_naming import router as llm_naming_router
|
|
||||||
from app.api.llm_vision import router as llm_vision_router
|
|
||||||
|
|
||||||
api_router = APIRouter()
|
api_router = APIRouter()
|
||||||
|
|
||||||
@ -43,32 +27,6 @@ api_router.include_router(tld_prices_router, prefix="/tld-prices", tags=["TLD Pr
|
|||||||
api_router.include_router(price_alerts_router, prefix="/price-alerts", tags=["Price Alerts"])
|
api_router.include_router(price_alerts_router, prefix="/price-alerts", tags=["Price Alerts"])
|
||||||
api_router.include_router(portfolio_router, prefix="/portfolio", tags=["Portfolio"])
|
api_router.include_router(portfolio_router, prefix="/portfolio", tags=["Portfolio"])
|
||||||
api_router.include_router(auctions_router, prefix="/auctions", tags=["Smart Pounce - Auctions"])
|
api_router.include_router(auctions_router, prefix="/auctions", tags=["Smart Pounce - Auctions"])
|
||||||
api_router.include_router(dashboard_router, prefix="/dashboard", tags=["Dashboard"])
|
|
||||||
api_router.include_router(analyze_router, prefix="/analyze", tags=["Analyze"])
|
|
||||||
api_router.include_router(hunt_router, prefix="/hunt", tags=["Hunt"])
|
|
||||||
api_router.include_router(cfo_router, prefix="/cfo", tags=["CFO"])
|
|
||||||
api_router.include_router(drops_router, tags=["Drops - Zone Files"])
|
|
||||||
api_router.include_router(llm_router, tags=["LLM"])
|
|
||||||
api_router.include_router(llm_naming_router, tags=["LLM Naming"])
|
|
||||||
api_router.include_router(llm_vision_router, tags=["LLM Vision"])
|
|
||||||
|
|
||||||
# Marketplace (For Sale) - from analysis_3.md
|
|
||||||
api_router.include_router(listings_router, prefix="/listings", tags=["Marketplace - For Sale"])
|
|
||||||
|
|
||||||
# Sniper Alerts - from analysis_3.md
|
|
||||||
api_router.include_router(sniper_alerts_router, prefix="/sniper-alerts", tags=["Sniper Alerts"])
|
|
||||||
|
|
||||||
# SEO Data / Backlinks - from analysis_3.md (Tycoon-only)
|
|
||||||
api_router.include_router(seo_router, prefix="/seo", tags=["SEO Data - Tycoon"])
|
|
||||||
|
|
||||||
# Yield / Intent Routing - Passive income from parked domains
|
|
||||||
api_router.include_router(yield_router, tags=["Yield - Intent Routing"])
|
|
||||||
api_router.include_router(yield_webhooks_router, tags=["Yield - Webhooks"])
|
|
||||||
api_router.include_router(yield_routing_router, tags=["Yield - Routing"])
|
|
||||||
api_router.include_router(yield_payout_admin_router, tags=["Yield - Admin"])
|
|
||||||
|
|
||||||
# Telemetry / KPIs (admin)
|
|
||||||
api_router.include_router(telemetry_router, tags=["Telemetry"])
|
|
||||||
|
|
||||||
# Support & Communication
|
# Support & Communication
|
||||||
api_router.include_router(contact_router, prefix="/contact", tags=["Contact & Newsletter"])
|
api_router.include_router(contact_router, prefix="/contact", tags=["Contact & Newsletter"])
|
||||||
|
|||||||
@ -9,14 +9,12 @@ Provides admin-only access to:
|
|||||||
- Domain/Portfolio overview
|
- Domain/Portfolio overview
|
||||||
"""
|
"""
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from pathlib import Path
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from fastapi import APIRouter, HTTPException, status, Depends, BackgroundTasks
|
from fastapi import APIRouter, HTTPException, status, BackgroundTasks, Depends
|
||||||
from pydantic import BaseModel, EmailStr
|
from pydantic import BaseModel, EmailStr
|
||||||
from sqlalchemy import select, func, desc
|
from sqlalchemy import select, func, desc
|
||||||
|
|
||||||
from app.api.deps import Database, get_current_user
|
from app.api.deps import Database, get_current_user
|
||||||
from app.config import get_settings
|
|
||||||
from app.models.user import User
|
from app.models.user import User
|
||||||
from app.models.subscription import Subscription, SubscriptionTier, SubscriptionStatus, TIER_CONFIG
|
from app.models.subscription import Subscription, SubscriptionTier, SubscriptionStatus, TIER_CONFIG
|
||||||
from app.models.domain import Domain
|
from app.models.domain import Domain
|
||||||
@ -25,13 +23,10 @@ from app.models.newsletter import NewsletterSubscriber
|
|||||||
from app.models.tld_price import TLDPrice, TLDInfo
|
from app.models.tld_price import TLDPrice, TLDInfo
|
||||||
from app.models.auction import DomainAuction
|
from app.models.auction import DomainAuction
|
||||||
from app.models.price_alert import PriceAlert
|
from app.models.price_alert import PriceAlert
|
||||||
from app.models.listing import DomainListing
|
|
||||||
from app.services.db_backup import create_backup, list_backups
|
|
||||||
from app.services.ops_alerts import run_ops_alert_checks
|
|
||||||
from app.models.ops_alert import OpsAlertEvent
|
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
settings = get_settings()
|
|
||||||
|
|
||||||
# ============== Admin Authentication ==============
|
# ============== Admin Authentication ==============
|
||||||
|
|
||||||
async def require_admin(
|
async def require_admin(
|
||||||
@ -46,60 +41,6 @@ async def require_admin(
|
|||||||
return current_user
|
return current_user
|
||||||
|
|
||||||
|
|
||||||
# ============== Scraping Ops (Server-only, free alternative to paid proxies) ==============
|
|
||||||
|
|
||||||
class PlaywrightCookiesUpload(BaseModel):
|
|
||||||
"""Upload Playwright cookies JSON used by protected scrapers (e.g. NameJet)."""
|
|
||||||
cookies: list[dict]
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/scraping/playwright-cookies")
|
|
||||||
async def upload_playwright_cookies(
|
|
||||||
payload: PlaywrightCookiesUpload,
|
|
||||||
admin: User = Depends(require_admin),
|
|
||||||
):
|
|
||||||
"""Replace the server's Playwright cookie jar file."""
|
|
||||||
cookie_dir = Path(__file__).parent.parent / "data" / "cookies"
|
|
||||||
cookie_dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
cookie_file = cookie_dir / "session_cookies.json"
|
|
||||||
|
|
||||||
if not payload.cookies:
|
|
||||||
raise HTTPException(status_code=400, detail="cookies must not be empty")
|
|
||||||
|
|
||||||
try:
|
|
||||||
import json
|
|
||||||
cookie_file.write_text(json.dumps(payload.cookies, indent=2))
|
|
||||||
except Exception as e:
|
|
||||||
raise HTTPException(status_code=500, detail=f"Failed to write cookie file: {e}")
|
|
||||||
|
|
||||||
return {
|
|
||||||
"status": "ok",
|
|
||||||
"cookies_count": len(payload.cookies),
|
|
||||||
"updated_at": datetime.utcnow().isoformat(),
|
|
||||||
"note": "Enable protected scraping with POUNCE_ENABLE_PROTECTED_SCRAPERS=true",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/scraping/playwright-cookies")
|
|
||||||
async def get_playwright_cookie_status(
|
|
||||||
admin: User = Depends(require_admin),
|
|
||||||
):
|
|
||||||
"""Return Playwright cookie jar status (no contents)."""
|
|
||||||
cookie_dir = Path(__file__).parent.parent / "data" / "cookies"
|
|
||||||
cookie_file = cookie_dir / "session_cookies.json"
|
|
||||||
|
|
||||||
if not cookie_file.exists():
|
|
||||||
return {"exists": False}
|
|
||||||
|
|
||||||
stat = cookie_file.stat()
|
|
||||||
return {
|
|
||||||
"exists": True,
|
|
||||||
"path": str(cookie_file),
|
|
||||||
"size_bytes": stat.st_size,
|
|
||||||
"modified_at": datetime.utcfromtimestamp(stat.st_mtime).isoformat() + "Z",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# ============== Dashboard Stats ==============
|
# ============== Dashboard Stats ==============
|
||||||
|
|
||||||
@router.get("/stats")
|
@router.get("/stats")
|
||||||
@ -193,213 +134,6 @@ async def get_admin_stats(
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# ============== Earnings / Revenue ==============
|
|
||||||
|
|
||||||
@router.get("/earnings")
|
|
||||||
async def get_admin_earnings(
|
|
||||||
db: Database,
|
|
||||||
admin: User = Depends(require_admin)
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Get earnings and revenue metrics for admin dashboard.
|
|
||||||
|
|
||||||
Calculates MRR, ARR, and subscription breakdown.
|
|
||||||
"""
|
|
||||||
# Tier prices (from TIER_CONFIG)
|
|
||||||
tier_prices = {
|
|
||||||
SubscriptionTier.SCOUT: 0,
|
|
||||||
SubscriptionTier.TRADER: 9,
|
|
||||||
SubscriptionTier.TYCOON: 29,
|
|
||||||
}
|
|
||||||
|
|
||||||
# Get all active subscriptions
|
|
||||||
result = await db.execute(
|
|
||||||
select(Subscription).where(
|
|
||||||
Subscription.status == SubscriptionStatus.ACTIVE
|
|
||||||
)
|
|
||||||
)
|
|
||||||
active_subs = result.scalars().all()
|
|
||||||
|
|
||||||
# Calculate MRR
|
|
||||||
mrr = 0.0
|
|
||||||
tier_breakdown = {
|
|
||||||
"scout": {"count": 0, "revenue": 0},
|
|
||||||
"trader": {"count": 0, "revenue": 0},
|
|
||||||
"tycoon": {"count": 0, "revenue": 0},
|
|
||||||
}
|
|
||||||
|
|
||||||
for sub in active_subs:
|
|
||||||
price = tier_prices.get(sub.tier, 0)
|
|
||||||
mrr += price
|
|
||||||
tier_key = sub.tier.value
|
|
||||||
if tier_key in tier_breakdown:
|
|
||||||
tier_breakdown[tier_key]["count"] += 1
|
|
||||||
tier_breakdown[tier_key]["revenue"] += price
|
|
||||||
|
|
||||||
arr = mrr * 12
|
|
||||||
|
|
||||||
# New subscriptions this week
|
|
||||||
week_ago = datetime.utcnow() - timedelta(days=7)
|
|
||||||
new_subs_week = await db.execute(
|
|
||||||
select(func.count(Subscription.id)).where(
|
|
||||||
Subscription.started_at >= week_ago,
|
|
||||||
Subscription.tier != SubscriptionTier.SCOUT
|
|
||||||
)
|
|
||||||
)
|
|
||||||
new_subs_week = new_subs_week.scalar() or 0
|
|
||||||
|
|
||||||
# New subscriptions this month
|
|
||||||
month_ago = datetime.utcnow() - timedelta(days=30)
|
|
||||||
new_subs_month = await db.execute(
|
|
||||||
select(func.count(Subscription.id)).where(
|
|
||||||
Subscription.started_at >= month_ago,
|
|
||||||
Subscription.tier != SubscriptionTier.SCOUT
|
|
||||||
)
|
|
||||||
)
|
|
||||||
new_subs_month = new_subs_month.scalar() or 0
|
|
||||||
|
|
||||||
# Cancelled subscriptions this month (churn)
|
|
||||||
cancelled_month = await db.execute(
|
|
||||||
select(func.count(Subscription.id)).where(
|
|
||||||
Subscription.cancelled_at >= month_ago,
|
|
||||||
Subscription.cancelled_at.isnot(None)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
cancelled_month = cancelled_month.scalar() or 0
|
|
||||||
|
|
||||||
# Total paying customers
|
|
||||||
paying_customers = tier_breakdown["trader"]["count"] + tier_breakdown["tycoon"]["count"]
|
|
||||||
|
|
||||||
# Revenue from Yield (platform's 30% cut)
|
|
||||||
try:
|
|
||||||
from app.models.yield_domain import YieldTransaction
|
|
||||||
yield_revenue = await db.execute(
|
|
||||||
select(func.sum(YieldTransaction.net_amount)).where(
|
|
||||||
YieldTransaction.created_at >= month_ago,
|
|
||||||
YieldTransaction.status == "confirmed"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
yield_revenue_month = float(yield_revenue.scalar() or 0) * 0.30 / 0.70 # Platform's cut
|
|
||||||
except Exception:
|
|
||||||
yield_revenue_month = 0
|
|
||||||
|
|
||||||
return {
|
|
||||||
"mrr": round(mrr, 2),
|
|
||||||
"arr": round(arr, 2),
|
|
||||||
"paying_customers": paying_customers,
|
|
||||||
"tier_breakdown": tier_breakdown,
|
|
||||||
"new_subscriptions": {
|
|
||||||
"week": new_subs_week,
|
|
||||||
"month": new_subs_month,
|
|
||||||
},
|
|
||||||
"churn": {
|
|
||||||
"month": cancelled_month,
|
|
||||||
},
|
|
||||||
"yield_revenue_month": round(yield_revenue_month, 2),
|
|
||||||
"total_revenue_month": round(mrr + yield_revenue_month, 2),
|
|
||||||
"timestamp": datetime.utcnow().isoformat(),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# ============== Earnings History ==============
|
|
||||||
|
|
||||||
@router.get("/earnings/history")
|
|
||||||
async def get_admin_earnings_history(
|
|
||||||
db: Database,
|
|
||||||
admin: User = Depends(require_admin),
|
|
||||||
months: int = 12
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Get historical earnings data for charts.
|
|
||||||
|
|
||||||
Calculates MRR for each month based on subscription start dates.
|
|
||||||
"""
|
|
||||||
tier_prices = {
|
|
||||||
SubscriptionTier.SCOUT: 0,
|
|
||||||
SubscriptionTier.TRADER: 9,
|
|
||||||
SubscriptionTier.TYCOON: 29,
|
|
||||||
}
|
|
||||||
|
|
||||||
# Get all subscriptions
|
|
||||||
result = await db.execute(select(Subscription))
|
|
||||||
all_subs = result.scalars().all()
|
|
||||||
|
|
||||||
# Generate monthly data for the last N months
|
|
||||||
monthly_data = []
|
|
||||||
now = datetime.utcnow()
|
|
||||||
|
|
||||||
for i in range(months - 1, -1, -1):
|
|
||||||
# Calculate the start of each month
|
|
||||||
month_start = datetime(now.year, now.month, 1) - timedelta(days=i * 30)
|
|
||||||
month_end = month_start + timedelta(days=30)
|
|
||||||
month_name = month_start.strftime("%b %Y")
|
|
||||||
|
|
||||||
# Calculate MRR for this month
|
|
||||||
mrr = 0.0
|
|
||||||
tier_counts = {"scout": 0, "trader": 0, "tycoon": 0}
|
|
||||||
new_subs = 0
|
|
||||||
churned = 0
|
|
||||||
|
|
||||||
for sub in all_subs:
|
|
||||||
# Was this subscription active during this month?
|
|
||||||
started_before_month_end = sub.started_at <= month_end
|
|
||||||
cancelled_after_month_start = (sub.cancelled_at is None or sub.cancelled_at >= month_start)
|
|
||||||
|
|
||||||
if started_before_month_end and cancelled_after_month_start:
|
|
||||||
price = tier_prices.get(sub.tier, 0)
|
|
||||||
mrr += price
|
|
||||||
tier_key = sub.tier.value
|
|
||||||
if tier_key in tier_counts:
|
|
||||||
tier_counts[tier_key] += 1
|
|
||||||
|
|
||||||
# New subscriptions in this month
|
|
||||||
if month_start <= sub.started_at < month_end and sub.tier != SubscriptionTier.SCOUT:
|
|
||||||
new_subs += 1
|
|
||||||
|
|
||||||
# Churned in this month
|
|
||||||
if sub.cancelled_at and month_start <= sub.cancelled_at < month_end:
|
|
||||||
churned += 1
|
|
||||||
|
|
||||||
monthly_data.append({
|
|
||||||
"month": month_name,
|
|
||||||
"mrr": round(mrr, 2),
|
|
||||||
"arr": round(mrr * 12, 2),
|
|
||||||
"paying_customers": tier_counts["trader"] + tier_counts["tycoon"],
|
|
||||||
"scout": tier_counts["scout"],
|
|
||||||
"trader": tier_counts["trader"],
|
|
||||||
"tycoon": tier_counts["tycoon"],
|
|
||||||
"new_subscriptions": new_subs,
|
|
||||||
"churn": churned,
|
|
||||||
})
|
|
||||||
|
|
||||||
# Calculate growth metrics
|
|
||||||
if len(monthly_data) >= 2:
|
|
||||||
current_mrr = monthly_data[-1]["mrr"]
|
|
||||||
prev_mrr = monthly_data[-2]["mrr"] if monthly_data[-2]["mrr"] > 0 else 1
|
|
||||||
mrr_growth = ((current_mrr - prev_mrr) / prev_mrr) * 100
|
|
||||||
else:
|
|
||||||
mrr_growth = 0
|
|
||||||
|
|
||||||
# Calculate average revenue per user (ARPU)
|
|
||||||
current_paying = monthly_data[-1]["paying_customers"] if monthly_data else 0
|
|
||||||
current_mrr = monthly_data[-1]["mrr"] if monthly_data else 0
|
|
||||||
arpu = current_mrr / current_paying if current_paying > 0 else 0
|
|
||||||
|
|
||||||
# Calculate LTV (assuming 12 month average retention)
|
|
||||||
ltv = arpu * 12
|
|
||||||
|
|
||||||
return {
|
|
||||||
"monthly_data": monthly_data,
|
|
||||||
"metrics": {
|
|
||||||
"mrr_growth_percent": round(mrr_growth, 1),
|
|
||||||
"arpu": round(arpu, 2),
|
|
||||||
"ltv": round(ltv, 2),
|
|
||||||
"total_customers": sum(m["paying_customers"] for m in monthly_data[-1:]),
|
|
||||||
},
|
|
||||||
"timestamp": datetime.utcnow().isoformat(),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# ============== User Management ==============
|
# ============== User Management ==============
|
||||||
|
|
||||||
class UpdateUserRequest(BaseModel):
|
class UpdateUserRequest(BaseModel):
|
||||||
@ -425,48 +159,43 @@ async def list_users(
|
|||||||
search: Optional[str] = None,
|
search: Optional[str] = None,
|
||||||
):
|
):
|
||||||
"""List all users with pagination and search."""
|
"""List all users with pagination and search."""
|
||||||
# PERF: Avoid N+1 queries (subscription + domain_count per user).
|
query = select(User).order_by(desc(User.created_at))
|
||||||
domain_counts = (
|
|
||||||
select(
|
|
||||||
Domain.user_id.label("user_id"),
|
|
||||||
func.count(Domain.id).label("domain_count"),
|
|
||||||
)
|
|
||||||
.group_by(Domain.user_id)
|
|
||||||
.subquery()
|
|
||||||
)
|
|
||||||
|
|
||||||
base = (
|
|
||||||
select(
|
|
||||||
User,
|
|
||||||
Subscription,
|
|
||||||
func.coalesce(domain_counts.c.domain_count, 0).label("domain_count"),
|
|
||||||
)
|
|
||||||
.outerjoin(Subscription, Subscription.user_id == User.id)
|
|
||||||
.outerjoin(domain_counts, domain_counts.c.user_id == User.id)
|
|
||||||
)
|
|
||||||
|
|
||||||
if search:
|
if search:
|
||||||
base = base.where(
|
query = query.where(
|
||||||
User.email.ilike(f"%{search}%") | User.name.ilike(f"%{search}%")
|
User.email.ilike(f"%{search}%") |
|
||||||
|
User.name.ilike(f"%{search}%")
|
||||||
)
|
)
|
||||||
|
|
||||||
# Total count (for pagination UI)
|
query = query.offset(offset).limit(limit)
|
||||||
|
result = await db.execute(query)
|
||||||
|
users = result.scalars().all()
|
||||||
|
|
||||||
|
# Get total count
|
||||||
count_query = select(func.count(User.id))
|
count_query = select(func.count(User.id))
|
||||||
if search:
|
if search:
|
||||||
count_query = count_query.where(
|
count_query = count_query.where(
|
||||||
User.email.ilike(f"%{search}%") | User.name.ilike(f"%{search}%")
|
User.email.ilike(f"%{search}%") |
|
||||||
|
User.name.ilike(f"%{search}%")
|
||||||
)
|
)
|
||||||
total = (await db.execute(count_query)).scalar() or 0
|
total = await db.execute(count_query)
|
||||||
|
total = total.scalar()
|
||||||
result = await db.execute(
|
|
||||||
base.order_by(desc(User.created_at)).offset(offset).limit(limit)
|
|
||||||
)
|
|
||||||
rows = result.all()
|
|
||||||
|
|
||||||
user_list = []
|
user_list = []
|
||||||
for user, subscription, domain_count in rows:
|
for user in users:
|
||||||
user_list.append(
|
# Get subscription
|
||||||
{
|
sub_result = await db.execute(
|
||||||
|
select(Subscription).where(Subscription.user_id == user.id)
|
||||||
|
)
|
||||||
|
subscription = sub_result.scalar_one_or_none()
|
||||||
|
|
||||||
|
# Get domain count
|
||||||
|
domain_count = await db.execute(
|
||||||
|
select(func.count(Domain.id)).where(Domain.user_id == user.id)
|
||||||
|
)
|
||||||
|
domain_count = domain_count.scalar()
|
||||||
|
|
||||||
|
user_list.append({
|
||||||
"id": user.id,
|
"id": user.id,
|
||||||
"email": user.email,
|
"email": user.email,
|
||||||
"name": user.name,
|
"name": user.name,
|
||||||
@ -475,7 +204,7 @@ async def list_users(
|
|||||||
"is_admin": user.is_admin,
|
"is_admin": user.is_admin,
|
||||||
"created_at": user.created_at.isoformat(),
|
"created_at": user.created_at.isoformat(),
|
||||||
"last_login": user.last_login.isoformat() if user.last_login else None,
|
"last_login": user.last_login.isoformat() if user.last_login else None,
|
||||||
"domain_count": int(domain_count or 0),
|
"domain_count": domain_count,
|
||||||
"subscription": {
|
"subscription": {
|
||||||
"tier": subscription.tier.value if subscription else "scout",
|
"tier": subscription.tier.value if subscription else "scout",
|
||||||
"tier_name": TIER_CONFIG.get(subscription.tier, {}).get("name", "Scout") if subscription else "Scout",
|
"tier_name": TIER_CONFIG.get(subscription.tier, {}).get("name", "Scout") if subscription else "Scout",
|
||||||
@ -487,10 +216,14 @@ async def list_users(
|
|||||||
"status": None,
|
"status": None,
|
||||||
"domain_limit": 5,
|
"domain_limit": 5,
|
||||||
},
|
},
|
||||||
}
|
})
|
||||||
)
|
|
||||||
|
|
||||||
return {"users": user_list, "total": total, "limit": limit, "offset": offset}
|
return {
|
||||||
|
"users": user_list,
|
||||||
|
"total": total,
|
||||||
|
"limit": limit,
|
||||||
|
"offset": offset,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
# ============== User Export ==============
|
# ============== User Export ==============
|
||||||
@ -505,26 +238,8 @@ async def export_users_csv(
|
|||||||
import csv
|
import csv
|
||||||
import io
|
import io
|
||||||
|
|
||||||
domain_counts = (
|
result = await db.execute(select(User).order_by(User.created_at))
|
||||||
select(
|
users_list = result.scalars().all()
|
||||||
Domain.user_id.label("user_id"),
|
|
||||||
func.count(Domain.id).label("domain_count"),
|
|
||||||
)
|
|
||||||
.group_by(Domain.user_id)
|
|
||||||
.subquery()
|
|
||||||
)
|
|
||||||
|
|
||||||
result = await db.execute(
|
|
||||||
select(
|
|
||||||
User,
|
|
||||||
Subscription,
|
|
||||||
func.coalesce(domain_counts.c.domain_count, 0).label("domain_count"),
|
|
||||||
)
|
|
||||||
.outerjoin(Subscription, Subscription.user_id == User.id)
|
|
||||||
.outerjoin(domain_counts, domain_counts.c.user_id == User.id)
|
|
||||||
.order_by(User.created_at)
|
|
||||||
)
|
|
||||||
users_list = result.all()
|
|
||||||
|
|
||||||
# Create CSV
|
# Create CSV
|
||||||
output = io.StringIO()
|
output = io.StringIO()
|
||||||
@ -536,7 +251,19 @@ async def export_users_csv(
|
|||||||
"Created At", "Last Login", "Tier", "Domain Limit", "Domains Used"
|
"Created At", "Last Login", "Tier", "Domain Limit", "Domains Used"
|
||||||
])
|
])
|
||||||
|
|
||||||
for user, subscription, domain_count in users_list:
|
for user in users_list:
|
||||||
|
# Get subscription
|
||||||
|
sub_result = await db.execute(
|
||||||
|
select(Subscription).where(Subscription.user_id == user.id)
|
||||||
|
)
|
||||||
|
subscription = sub_result.scalar_one_or_none()
|
||||||
|
|
||||||
|
# Get domain count
|
||||||
|
domain_count = await db.execute(
|
||||||
|
select(func.count(Domain.id)).where(Domain.user_id == user.id)
|
||||||
|
)
|
||||||
|
domain_count = domain_count.scalar()
|
||||||
|
|
||||||
writer.writerow([
|
writer.writerow([
|
||||||
user.id,
|
user.id,
|
||||||
user.email,
|
user.email,
|
||||||
@ -548,7 +275,7 @@ async def export_users_csv(
|
|||||||
user.last_login.strftime("%Y-%m-%d %H:%M") if user.last_login else "",
|
user.last_login.strftime("%Y-%m-%d %H:%M") if user.last_login else "",
|
||||||
subscription.tier.value if subscription else "scout",
|
subscription.tier.value if subscription else "scout",
|
||||||
subscription.domain_limit if subscription else 5,
|
subscription.domain_limit if subscription else 5,
|
||||||
int(domain_count or 0),
|
domain_count,
|
||||||
])
|
])
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@ -736,12 +463,12 @@ async def upgrade_user(
|
|||||||
user_id=user.id,
|
user_id=user.id,
|
||||||
tier=new_tier,
|
tier=new_tier,
|
||||||
status=SubscriptionStatus.ACTIVE,
|
status=SubscriptionStatus.ACTIVE,
|
||||||
max_domains=config.get("domain_limit", 5),
|
domain_limit=config.get("domain_limit", 5),
|
||||||
)
|
)
|
||||||
db.add(subscription)
|
db.add(subscription)
|
||||||
else:
|
else:
|
||||||
subscription.tier = new_tier
|
subscription.tier = new_tier
|
||||||
subscription.max_domains = config.get("domain_limit", 5)
|
subscription.domain_limit = config.get("domain_limit", 5)
|
||||||
subscription.status = SubscriptionStatus.ACTIVE
|
subscription.status = SubscriptionStatus.ACTIVE
|
||||||
|
|
||||||
await db.commit()
|
await db.commit()
|
||||||
@ -819,14 +546,6 @@ async def trigger_tld_scrape(
|
|||||||
admin: User = Depends(require_admin),
|
admin: User = Depends(require_admin),
|
||||||
):
|
):
|
||||||
"""Manually trigger a TLD price scrape."""
|
"""Manually trigger a TLD price scrape."""
|
||||||
# Prefer job queue in production (non-blocking)
|
|
||||||
if settings.enable_job_queue and settings.redis_url:
|
|
||||||
from app.jobs.client import enqueue_job
|
|
||||||
|
|
||||||
job_id = await enqueue_job("scrape_tld_prices")
|
|
||||||
return {"message": "TLD price scrape enqueued", "job_id": job_id}
|
|
||||||
|
|
||||||
# Fallback: run inline
|
|
||||||
from app.services.tld_scraper.aggregator import tld_aggregator
|
from app.services.tld_scraper.aggregator import tld_aggregator
|
||||||
|
|
||||||
result = await tld_aggregator.run_scrape(db)
|
result = await tld_aggregator.run_scrape(db)
|
||||||
@ -1022,7 +741,7 @@ async def test_email(
|
|||||||
"""Send a test email to the admin user."""
|
"""Send a test email to the admin user."""
|
||||||
from app.services.email_service import email_service
|
from app.services.email_service import email_service
|
||||||
|
|
||||||
if not email_service.is_configured():
|
if not email_service.is_configured:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=400,
|
status_code=400,
|
||||||
detail="Email service is not configured. Check SMTP settings."
|
detail="Email service is not configured. Check SMTP settings."
|
||||||
@ -1108,83 +827,6 @@ async def get_scheduler_status(
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# ============== Ops: Backups (4B) ==============
|
|
||||||
|
|
||||||
@router.get("/system/backups")
|
|
||||||
async def get_backups(
|
|
||||||
admin: User = Depends(require_admin),
|
|
||||||
limit: int = 20,
|
|
||||||
):
|
|
||||||
"""List recent DB backups on the server."""
|
|
||||||
return {"backups": list_backups(limit=limit)}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/system/backups")
|
|
||||||
async def create_db_backup(
|
|
||||||
admin: User = Depends(require_admin),
|
|
||||||
verify: bool = True,
|
|
||||||
):
|
|
||||||
"""Create a DB backup on the server (and verify it)."""
|
|
||||||
if not settings.enable_db_backups:
|
|
||||||
raise HTTPException(status_code=403, detail="DB backups are disabled (ENABLE_DB_BACKUPS=false).")
|
|
||||||
try:
|
|
||||||
result = create_backup(verify=verify)
|
|
||||||
return {
|
|
||||||
"status": "ok",
|
|
||||||
"backup": {
|
|
||||||
"path": result.path,
|
|
||||||
"size_bytes": result.size_bytes,
|
|
||||||
"created_at": result.created_at,
|
|
||||||
"verified": result.verified,
|
|
||||||
"verification_detail": result.verification_detail,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
raise HTTPException(status_code=500, detail=f"Backup failed: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/system/ops-alerts/run")
|
|
||||||
async def run_ops_alerts_now(
|
|
||||||
admin: User = Depends(require_admin),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Run ops alert checks immediately (and send alerts if enabled).
|
|
||||||
Useful for server validation without Docker.
|
|
||||||
"""
|
|
||||||
return await run_ops_alert_checks()
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/system/ops-alerts/history")
|
|
||||||
async def get_ops_alert_history(
|
|
||||||
db: Database,
|
|
||||||
admin: User = Depends(require_admin),
|
|
||||||
limit: int = 100,
|
|
||||||
):
|
|
||||||
"""Return recent persisted ops alert events."""
|
|
||||||
limit = max(1, min(int(limit), 500))
|
|
||||||
rows = (
|
|
||||||
await db.execute(
|
|
||||||
select(OpsAlertEvent).order_by(OpsAlertEvent.created_at.desc()).limit(limit)
|
|
||||||
)
|
|
||||||
).scalars().all()
|
|
||||||
return {
|
|
||||||
"events": [
|
|
||||||
{
|
|
||||||
"id": e.id,
|
|
||||||
"alert_key": e.alert_key,
|
|
||||||
"severity": e.severity,
|
|
||||||
"title": e.title,
|
|
||||||
"detail": e.detail,
|
|
||||||
"status": e.status,
|
|
||||||
"send_reason": e.send_reason,
|
|
||||||
"recipients": e.recipients,
|
|
||||||
"created_at": e.created_at.isoformat(),
|
|
||||||
}
|
|
||||||
for e in rows
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# ============== Bulk Operations ==============
|
# ============== Bulk Operations ==============
|
||||||
|
|
||||||
class BulkUpgradeRequest(BaseModel):
|
class BulkUpgradeRequest(BaseModel):
|
||||||
@ -1339,390 +981,3 @@ async def get_activity_log(
|
|||||||
],
|
],
|
||||||
"total": total,
|
"total": total,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# ============== API Connection Tests ==============
|
|
||||||
|
|
||||||
@router.get("/test-apis")
|
|
||||||
async def test_external_apis(
|
|
||||||
admin: User = Depends(require_admin),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Test connections to all external APIs.
|
|
||||||
|
|
||||||
Returns status of:
|
|
||||||
- DropCatch API
|
|
||||||
- Sedo API
|
|
||||||
- Moz API (if configured)
|
|
||||||
"""
|
|
||||||
from app.services.dropcatch_api import dropcatch_client
|
|
||||||
from app.services.sedo_api import sedo_client
|
|
||||||
|
|
||||||
results = {
|
|
||||||
"tested_at": datetime.utcnow().isoformat(),
|
|
||||||
"apis": {}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Test DropCatch API
|
|
||||||
try:
|
|
||||||
dropcatch_result = await dropcatch_client.test_connection()
|
|
||||||
results["apis"]["dropcatch"] = dropcatch_result
|
|
||||||
except Exception as e:
|
|
||||||
results["apis"]["dropcatch"] = {
|
|
||||||
"success": False,
|
|
||||||
"error": str(e),
|
|
||||||
"configured": dropcatch_client.is_configured
|
|
||||||
}
|
|
||||||
|
|
||||||
# Test Sedo API
|
|
||||||
try:
|
|
||||||
sedo_result = await sedo_client.test_connection()
|
|
||||||
results["apis"]["sedo"] = sedo_result
|
|
||||||
except Exception as e:
|
|
||||||
results["apis"]["sedo"] = {
|
|
||||||
"success": False,
|
|
||||||
"error": str(e),
|
|
||||||
"configured": sedo_client.is_configured
|
|
||||||
}
|
|
||||||
|
|
||||||
# Summary
|
|
||||||
results["summary"] = {
|
|
||||||
"total": len(results["apis"]),
|
|
||||||
"configured": sum(1 for api in results["apis"].values() if api.get("configured")),
|
|
||||||
"connected": sum(1 for api in results["apis"].values() if api.get("success")),
|
|
||||||
}
|
|
||||||
|
|
||||||
return results
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/trigger-scrape")
|
|
||||||
async def trigger_auction_scrape(
|
|
||||||
db: Database,
|
|
||||||
admin: User = Depends(require_admin),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Manually trigger auction scraping from all sources.
|
|
||||||
|
|
||||||
This will:
|
|
||||||
1. Try Tier 1 APIs (DropCatch, Sedo) first
|
|
||||||
2. Fall back to web scraping for others
|
|
||||||
"""
|
|
||||||
# Prefer job queue in production (non-blocking)
|
|
||||||
if settings.enable_job_queue and settings.redis_url:
|
|
||||||
from app.jobs.client import enqueue_job
|
|
||||||
|
|
||||||
job_id = await enqueue_job("scrape_auctions")
|
|
||||||
return {
|
|
||||||
"message": "Auction scraping enqueued",
|
|
||||||
"job_id": job_id,
|
|
||||||
"note": "Check /admin/scrape-status for results",
|
|
||||||
}
|
|
||||||
|
|
||||||
# Fallback: run inline
|
|
||||||
from app.services.auction_scraper import AuctionScraperService
|
|
||||||
|
|
||||||
scraper = AuctionScraperService()
|
|
||||||
result = await scraper.scrape_all_platforms(db)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"message": "Auction scraping completed",
|
|
||||||
"result": result,
|
|
||||||
"note": "Check /admin/scrape-status for results",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/scrape-status")
|
|
||||||
async def get_scrape_status(
|
|
||||||
db: Database,
|
|
||||||
admin: User = Depends(require_admin),
|
|
||||||
limit: int = 10,
|
|
||||||
):
|
|
||||||
"""Get recent scrape logs."""
|
|
||||||
from app.models.auction import AuctionScrapeLog
|
|
||||||
|
|
||||||
query = (
|
|
||||||
select(AuctionScrapeLog)
|
|
||||||
.order_by(desc(AuctionScrapeLog.started_at))
|
|
||||||
.limit(limit)
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
result = await db.execute(query)
|
|
||||||
logs = result.scalars().all()
|
|
||||||
except Exception:
|
|
||||||
return {"logs": [], "error": "Table not found"}
|
|
||||||
|
|
||||||
return {
|
|
||||||
"logs": [
|
|
||||||
{
|
|
||||||
"id": log.id,
|
|
||||||
"platform": log.platform,
|
|
||||||
"status": log.status,
|
|
||||||
"auctions_found": log.auctions_found,
|
|
||||||
"auctions_new": log.auctions_new,
|
|
||||||
"auctions_updated": log.auctions_updated,
|
|
||||||
"error_message": log.error_message,
|
|
||||||
"started_at": log.started_at.isoformat() if log.started_at else None,
|
|
||||||
"completed_at": log.completed_at.isoformat() if log.completed_at else None,
|
|
||||||
}
|
|
||||||
for log in logs
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# ============== Subscription Management ==============
|
|
||||||
|
|
||||||
class SubscriptionUpdate(BaseModel):
|
|
||||||
"""Manual subscription update request."""
|
|
||||||
tier: str # "scout", "trader", "tycoon"
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/users/{user_id}/sync-subscription")
|
|
||||||
async def sync_user_subscription_from_stripe(
|
|
||||||
user_id: int,
|
|
||||||
db: Database,
|
|
||||||
admin: User = Depends(require_admin),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Sync a user's subscription status from Stripe.
|
|
||||||
|
|
||||||
Use this if the webhook failed to update the subscription.
|
|
||||||
"""
|
|
||||||
import stripe
|
|
||||||
import os
|
|
||||||
|
|
||||||
stripe.api_key = os.getenv("STRIPE_SECRET_KEY")
|
|
||||||
|
|
||||||
if not stripe.api_key:
|
|
||||||
raise HTTPException(status_code=503, detail="Stripe not configured")
|
|
||||||
|
|
||||||
# Get user
|
|
||||||
result = await db.execute(select(User).where(User.id == user_id))
|
|
||||||
user = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not user:
|
|
||||||
raise HTTPException(status_code=404, detail="User not found")
|
|
||||||
|
|
||||||
if not user.stripe_customer_id:
|
|
||||||
raise HTTPException(status_code=400, detail="User has no Stripe customer ID")
|
|
||||||
|
|
||||||
# Get subscriptions from Stripe
|
|
||||||
try:
|
|
||||||
subscriptions = stripe.Subscription.list(
|
|
||||||
customer=user.stripe_customer_id,
|
|
||||||
status="active",
|
|
||||||
limit=1
|
|
||||||
)
|
|
||||||
except stripe.error.StripeError as e:
|
|
||||||
raise HTTPException(status_code=500, detail=f"Stripe error: {e}")
|
|
||||||
|
|
||||||
if not subscriptions.data:
|
|
||||||
return {
|
|
||||||
"status": "no_active_subscription",
|
|
||||||
"message": "No active subscription found in Stripe",
|
|
||||||
"user_email": user.email,
|
|
||||||
"stripe_customer_id": user.stripe_customer_id
|
|
||||||
}
|
|
||||||
|
|
||||||
stripe_sub = subscriptions.data[0]
|
|
||||||
# Access items via dict notation (Stripe returns StripeObject)
|
|
||||||
items_data = stripe_sub.get("items", {}).get("data", [])
|
|
||||||
price_id = items_data[0].get("price", {}).get("id") if items_data else None
|
|
||||||
|
|
||||||
# Map price_id to tier
|
|
||||||
trader_price = os.getenv("STRIPE_PRICE_TRADER")
|
|
||||||
tycoon_price = os.getenv("STRIPE_PRICE_TYCOON")
|
|
||||||
|
|
||||||
if price_id == trader_price:
|
|
||||||
tier = SubscriptionTier.TRADER
|
|
||||||
tier_name = "trader"
|
|
||||||
elif price_id == tycoon_price:
|
|
||||||
tier = SubscriptionTier.TYCOON
|
|
||||||
tier_name = "tycoon"
|
|
||||||
else:
|
|
||||||
return {
|
|
||||||
"status": "unknown_price",
|
|
||||||
"message": f"Unknown price ID: {price_id}",
|
|
||||||
"stripe_subscription_id": stripe_sub.id
|
|
||||||
}
|
|
||||||
|
|
||||||
# Update subscription in database
|
|
||||||
sub_result = await db.execute(
|
|
||||||
select(Subscription).where(Subscription.user_id == user.id)
|
|
||||||
)
|
|
||||||
subscription = sub_result.scalar_one_or_none()
|
|
||||||
|
|
||||||
tier_config = TIER_CONFIG[tier]
|
|
||||||
|
|
||||||
if subscription:
|
|
||||||
old_tier = subscription.tier
|
|
||||||
subscription.tier = tier
|
|
||||||
subscription.status = SubscriptionStatus.ACTIVE
|
|
||||||
subscription.stripe_subscription_id = stripe_sub.id
|
|
||||||
subscription.max_domains = tier_config["domain_limit"]
|
|
||||||
subscription.check_frequency = tier_config["check_frequency"]
|
|
||||||
else:
|
|
||||||
subscription = Subscription(
|
|
||||||
user_id=user.id,
|
|
||||||
tier=tier,
|
|
||||||
status=SubscriptionStatus.ACTIVE,
|
|
||||||
stripe_subscription_id=stripe_sub.id,
|
|
||||||
max_domains=tier_config["domain_limit"],
|
|
||||||
check_frequency=tier_config["check_frequency"],
|
|
||||||
)
|
|
||||||
db.add(subscription)
|
|
||||||
old_tier = None
|
|
||||||
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"status": "synced",
|
|
||||||
"user_email": user.email,
|
|
||||||
"stripe_customer_id": user.stripe_customer_id,
|
|
||||||
"stripe_subscription_id": stripe_sub.id,
|
|
||||||
"old_tier": old_tier.value if old_tier else None,
|
|
||||||
"new_tier": tier.value,
|
|
||||||
"tier_config": {
|
|
||||||
"domain_limit": tier_config["domain_limit"],
|
|
||||||
"check_frequency": tier_config["check_frequency"],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/users/{user_id}/set-subscription")
|
|
||||||
async def set_user_subscription(
|
|
||||||
user_id: int,
|
|
||||||
update: SubscriptionUpdate,
|
|
||||||
db: Database,
|
|
||||||
admin: User = Depends(require_admin),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Manually set a user's subscription tier.
|
|
||||||
|
|
||||||
Use this to manually upgrade/downgrade users (e.g., for refunds or promotions).
|
|
||||||
"""
|
|
||||||
tier_map = {
|
|
||||||
"scout": SubscriptionTier.SCOUT,
|
|
||||||
"trader": SubscriptionTier.TRADER,
|
|
||||||
"tycoon": SubscriptionTier.TYCOON,
|
|
||||||
}
|
|
||||||
|
|
||||||
if update.tier.lower() not in tier_map:
|
|
||||||
raise HTTPException(status_code=400, detail=f"Invalid tier: {update.tier}")
|
|
||||||
|
|
||||||
tier = tier_map[update.tier.lower()]
|
|
||||||
|
|
||||||
# Get user
|
|
||||||
result = await db.execute(select(User).where(User.id == user_id))
|
|
||||||
user = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not user:
|
|
||||||
raise HTTPException(status_code=404, detail="User not found")
|
|
||||||
|
|
||||||
# Get/create subscription
|
|
||||||
sub_result = await db.execute(
|
|
||||||
select(Subscription).where(Subscription.user_id == user.id)
|
|
||||||
)
|
|
||||||
subscription = sub_result.scalar_one_or_none()
|
|
||||||
|
|
||||||
tier_config = TIER_CONFIG[tier]
|
|
||||||
|
|
||||||
if subscription:
|
|
||||||
old_tier = subscription.tier
|
|
||||||
subscription.tier = tier
|
|
||||||
subscription.status = SubscriptionStatus.ACTIVE
|
|
||||||
subscription.max_domains = tier_config["domain_limit"]
|
|
||||||
subscription.check_frequency = tier_config["check_frequency"]
|
|
||||||
else:
|
|
||||||
subscription = Subscription(
|
|
||||||
user_id=user.id,
|
|
||||||
tier=tier,
|
|
||||||
status=SubscriptionStatus.ACTIVE,
|
|
||||||
max_domains=tier_config["domain_limit"],
|
|
||||||
check_frequency=tier_config["check_frequency"],
|
|
||||||
)
|
|
||||||
db.add(subscription)
|
|
||||||
old_tier = None
|
|
||||||
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"status": "updated",
|
|
||||||
"user_email": user.email,
|
|
||||||
"user_id": user.id,
|
|
||||||
"old_tier": old_tier.value if old_tier else None,
|
|
||||||
"new_tier": tier.value,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# ============== Listing Debug Endpoints ==============
|
|
||||||
|
|
||||||
@router.get("/listings/debug")
|
|
||||||
async def debug_listings(
|
|
||||||
domain: Optional[str] = None,
|
|
||||||
slug: Optional[str] = None,
|
|
||||||
db: Database = None,
|
|
||||||
_: User = Depends(require_admin),
|
|
||||||
):
|
|
||||||
"""Debug listings - search by domain or slug (ignores status)."""
|
|
||||||
query = select(DomainListing)
|
|
||||||
|
|
||||||
if domain:
|
|
||||||
query = query.where(DomainListing.domain.ilike(f"%{domain}%"))
|
|
||||||
if slug:
|
|
||||||
query = query.where(DomainListing.slug.ilike(f"%{slug}%"))
|
|
||||||
|
|
||||||
query = query.order_by(desc(DomainListing.created_at)).limit(20)
|
|
||||||
|
|
||||||
result = await db.execute(query)
|
|
||||||
listings = list(result.scalars().all())
|
|
||||||
|
|
||||||
return {
|
|
||||||
"count": len(listings),
|
|
||||||
"listings": [
|
|
||||||
{
|
|
||||||
"id": l.id,
|
|
||||||
"domain": l.domain,
|
|
||||||
"slug": l.slug,
|
|
||||||
"status": l.status,
|
|
||||||
"is_verified": l.is_verified,
|
|
||||||
"verification_status": l.verification_status,
|
|
||||||
"public_url": l.public_url,
|
|
||||||
"created_at": str(l.created_at) if l.created_at else None,
|
|
||||||
"published_at": str(l.published_at) if l.published_at else None,
|
|
||||||
"user_id": l.user_id,
|
|
||||||
}
|
|
||||||
for l in listings
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/listings/{listing_id}/force-activate")
|
|
||||||
async def force_activate_listing(
|
|
||||||
listing_id: int,
|
|
||||||
db: Database = None,
|
|
||||||
_: User = Depends(require_admin),
|
|
||||||
):
|
|
||||||
"""Force-activate a listing (bypass DNS verification)."""
|
|
||||||
result = await db.execute(
|
|
||||||
select(DomainListing).where(DomainListing.id == listing_id)
|
|
||||||
)
|
|
||||||
listing = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not listing:
|
|
||||||
raise HTTPException(status_code=404, detail="Listing not found")
|
|
||||||
|
|
||||||
listing.status = "active"
|
|
||||||
listing.is_verified = True
|
|
||||||
listing.verification_status = "verified"
|
|
||||||
listing.published_at = datetime.utcnow()
|
|
||||||
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"status": "activated",
|
|
||||||
"listing_id": listing.id,
|
|
||||||
"domain": listing.domain,
|
|
||||||
"slug": listing.slug,
|
|
||||||
"public_url": listing.public_url,
|
|
||||||
}
|
|
||||||
|
|||||||
@ -1,36 +0,0 @@
|
|||||||
"""Analyze API endpoints (Alpha Terminal - Diligence)."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Query, Request
|
|
||||||
from slowapi import Limiter
|
|
||||||
from slowapi.util import get_remote_address
|
|
||||||
|
|
||||||
from app.api.deps import CurrentUser, Database
|
|
||||||
from app.schemas.analyze import AnalyzeResponse
|
|
||||||
from app.services.analyze.service import get_domain_analysis
|
|
||||||
|
|
||||||
router = APIRouter()
|
|
||||||
limiter = Limiter(key_func=get_remote_address)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{domain}", response_model=AnalyzeResponse)
|
|
||||||
@limiter.limit("60/minute")
|
|
||||||
async def analyze_domain(
|
|
||||||
request: Request,
|
|
||||||
domain: str,
|
|
||||||
current_user: CurrentUser,
|
|
||||||
db: Database,
|
|
||||||
fast: bool = Query(False, description="Skip slower HTTP/SSL checks"),
|
|
||||||
refresh: bool = Query(False, description="Bypass cache and recompute"),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Analyze a domain with open-data-first signals.
|
|
||||||
|
|
||||||
Requires authentication (Terminal feature).
|
|
||||||
"""
|
|
||||||
_ = current_user # enforce auth
|
|
||||||
res = await get_domain_analysis(db, domain, fast=fast, refresh=refresh)
|
|
||||||
await db.commit() # persist cache upsert
|
|
||||||
return res
|
|
||||||
|
|
||||||
@ -10,11 +10,6 @@ Data Sources (Web Scraping):
|
|||||||
- Sedo (public search)
|
- Sedo (public search)
|
||||||
- NameJet (public auctions)
|
- NameJet (public auctions)
|
||||||
|
|
||||||
PLUS Pounce Direct Listings (user-created marketplace):
|
|
||||||
- DNS-verified owner listings
|
|
||||||
- Instant buy option
|
|
||||||
- 0% commission
|
|
||||||
|
|
||||||
IMPORTANT:
|
IMPORTANT:
|
||||||
- All data comes from web scraping of public pages
|
- All data comes from web scraping of public pages
|
||||||
- No mock data - everything is real scraped data
|
- No mock data - everything is real scraped data
|
||||||
@ -29,17 +24,15 @@ Legal Note (Switzerland):
|
|||||||
import logging
|
import logging
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from typing import Optional, List
|
from typing import Optional, List
|
||||||
from itertools import groupby
|
|
||||||
from fastapi import APIRouter, Depends, Query, HTTPException
|
from fastapi import APIRouter, Depends, Query, HTTPException
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from sqlalchemy import select, func, and_, or_
|
from sqlalchemy import select, func, and_
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
from app.database import get_db
|
from app.database import get_db
|
||||||
from app.api.deps import get_current_user, get_current_user_optional
|
from app.api.deps import get_current_user, get_current_user_optional
|
||||||
from app.models.user import User
|
from app.models.user import User
|
||||||
from app.models.auction import DomainAuction, AuctionScrapeLog
|
from app.models.auction import DomainAuction, AuctionScrapeLog
|
||||||
from app.models.listing import DomainListing, ListingStatus, VerificationStatus
|
|
||||||
from app.services.valuation import valuation_service
|
from app.services.valuation import valuation_service
|
||||||
from app.services.auction_scraper import auction_scraper
|
from app.services.auction_scraper import auction_scraper
|
||||||
|
|
||||||
@ -110,65 +103,13 @@ class ScrapeStatus(BaseModel):
|
|||||||
next_scrape: Optional[datetime]
|
next_scrape: Optional[datetime]
|
||||||
|
|
||||||
|
|
||||||
class MarketFeedItem(BaseModel):
|
|
||||||
"""Unified market feed item - combines auctions and Pounce Direct listings."""
|
|
||||||
id: str
|
|
||||||
domain: str
|
|
||||||
tld: str
|
|
||||||
price: float
|
|
||||||
currency: str = "USD"
|
|
||||||
price_type: str # "bid" or "fixed"
|
|
||||||
status: str # "auction" or "instant"
|
|
||||||
|
|
||||||
# Source info
|
|
||||||
source: str # "Pounce", "GoDaddy", "Sedo", etc.
|
|
||||||
is_pounce: bool = False
|
|
||||||
verified: bool = False
|
|
||||||
|
|
||||||
# Auction-specific
|
|
||||||
time_remaining: Optional[str] = None
|
|
||||||
end_time: Optional[datetime] = None
|
|
||||||
num_bids: Optional[int] = None
|
|
||||||
|
|
||||||
# Pounce Direct specific
|
|
||||||
slug: Optional[str] = None
|
|
||||||
seller_verified: bool = False
|
|
||||||
|
|
||||||
# URLs
|
|
||||||
url: str # Internal for Pounce, external for auctions
|
|
||||||
is_external: bool = True
|
|
||||||
|
|
||||||
# Scoring
|
|
||||||
pounce_score: int = 50
|
|
||||||
|
|
||||||
# Valuation (optional)
|
|
||||||
valuation: Optional[AuctionValuation] = None
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
from_attributes = True
|
|
||||||
|
|
||||||
|
|
||||||
class MarketFeedResponse(BaseModel):
|
|
||||||
"""Response for unified market feed."""
|
|
||||||
items: List[MarketFeedItem]
|
|
||||||
total: int
|
|
||||||
pounce_direct_count: int
|
|
||||||
auction_count: int
|
|
||||||
sources: List[str]
|
|
||||||
last_updated: datetime
|
|
||||||
filters_applied: dict = {}
|
|
||||||
|
|
||||||
|
|
||||||
# ============== Helper Functions ==============
|
# ============== Helper Functions ==============
|
||||||
|
|
||||||
def _format_time_remaining(end_time: datetime, now: Optional[datetime] = None) -> str:
|
def _format_time_remaining(end_time: datetime) -> str:
|
||||||
"""Format time remaining in human-readable format."""
|
"""Format time remaining in human-readable format."""
|
||||||
ref = now or datetime.utcnow()
|
delta = end_time - datetime.utcnow()
|
||||||
delta = end_time - ref
|
|
||||||
|
|
||||||
# Small grace window to avoid displaying "Ended" due to request processing time.
|
if delta.total_seconds() <= 0:
|
||||||
# If an auction ends within the next ~2 seconds, we show "0m".
|
|
||||||
if delta.total_seconds() <= -2:
|
|
||||||
return "Ended"
|
return "Ended"
|
||||||
|
|
||||||
hours = int(delta.total_seconds() // 3600)
|
hours = int(delta.total_seconds() // 3600)
|
||||||
@ -180,31 +121,19 @@ def _format_time_remaining(end_time: datetime, now: Optional[datetime] = None) -
|
|||||||
elif hours > 0:
|
elif hours > 0:
|
||||||
return f"{hours}h {minutes}m"
|
return f"{hours}h {minutes}m"
|
||||||
else:
|
else:
|
||||||
return f"{max(minutes, 0)}m"
|
return f"{minutes}m"
|
||||||
|
|
||||||
|
|
||||||
def _get_affiliate_url(platform: str, domain: str, auction_url: str) -> str:
|
def _get_affiliate_url(platform: str, domain: str, auction_url: str) -> str:
|
||||||
"""Get affiliate URL for a platform - links directly to the auction page with affiliate tracking."""
|
"""Get affiliate URL for a platform - links directly to the auction page."""
|
||||||
# SEDO SPECIAL CASE: Always use direct Sedo link with partner ID
|
|
||||||
# This ensures we get affiliate revenue even from scraped data
|
|
||||||
if platform == "Sedo":
|
|
||||||
return f"https://sedo.com/search/details/?domain={domain}&partnerid=335830"
|
|
||||||
|
|
||||||
# Import here to avoid circular imports
|
|
||||||
from app.services.hidden_api_scrapers import build_affiliate_url
|
|
||||||
|
|
||||||
# Try to build affiliate URL first (includes partner IDs)
|
|
||||||
affiliate_url = build_affiliate_url(platform, domain, auction_url)
|
|
||||||
if affiliate_url:
|
|
||||||
return affiliate_url
|
|
||||||
|
|
||||||
# Use the scraped auction URL directly if available
|
# Use the scraped auction URL directly if available
|
||||||
if auction_url and auction_url.startswith("http"):
|
if auction_url and auction_url.startswith("http"):
|
||||||
return auction_url
|
return auction_url
|
||||||
|
|
||||||
# Fallback to platform-specific search/listing pages (without affiliate tracking)
|
# Fallback to platform-specific search/listing pages
|
||||||
platform_urls = {
|
platform_urls = {
|
||||||
"GoDaddy": f"https://auctions.godaddy.com/trpItemListing.aspx?domain={domain}",
|
"GoDaddy": f"https://auctions.godaddy.com/trpItemListing.aspx?domain={domain}",
|
||||||
|
"Sedo": f"https://sedo.com/search/?keyword={domain}",
|
||||||
"NameJet": f"https://www.namejet.com/Pages/Auctions/BackorderSearch.aspx?q={domain}",
|
"NameJet": f"https://www.namejet.com/Pages/Auctions/BackorderSearch.aspx?q={domain}",
|
||||||
"DropCatch": f"https://www.dropcatch.com/domain/{domain}",
|
"DropCatch": f"https://www.dropcatch.com/domain/{domain}",
|
||||||
"ExpiredDomains": f"https://www.expireddomains.net/domain-name-search/?q={domain}",
|
"ExpiredDomains": f"https://www.expireddomains.net/domain-name-search/?q={domain}",
|
||||||
@ -218,8 +147,7 @@ def _get_affiliate_url(platform: str, domain: str, auction_url: str) -> str:
|
|||||||
async def _convert_to_listing(
|
async def _convert_to_listing(
|
||||||
auction: DomainAuction,
|
auction: DomainAuction,
|
||||||
db: AsyncSession,
|
db: AsyncSession,
|
||||||
include_valuation: bool = True,
|
include_valuation: bool = True
|
||||||
now: Optional[datetime] = None,
|
|
||||||
) -> AuctionListing:
|
) -> AuctionListing:
|
||||||
"""Convert database auction to API response."""
|
"""Convert database auction to API response."""
|
||||||
valuation_data = None
|
valuation_data = None
|
||||||
@ -250,7 +178,7 @@ async def _convert_to_listing(
|
|||||||
currency=auction.currency,
|
currency=auction.currency,
|
||||||
num_bids=auction.num_bids,
|
num_bids=auction.num_bids,
|
||||||
end_time=auction.end_time,
|
end_time=auction.end_time,
|
||||||
time_remaining=_format_time_remaining(auction.end_time, now=now),
|
time_remaining=_format_time_remaining(auction.end_time),
|
||||||
buy_now_price=auction.buy_now_price,
|
buy_now_price=auction.buy_now_price,
|
||||||
reserve_met=auction.reserve_met,
|
reserve_met=auction.reserve_met,
|
||||||
traffic=auction.traffic,
|
traffic=auction.traffic,
|
||||||
@ -293,14 +221,8 @@ async def search_auctions(
|
|||||||
- Look for value_ratio > 1.0 (estimated value exceeds current bid)
|
- Look for value_ratio > 1.0 (estimated value exceeds current bid)
|
||||||
- Focus on auctions ending soon with low bid counts
|
- Focus on auctions ending soon with low bid counts
|
||||||
"""
|
"""
|
||||||
# Build query - ONLY show active auctions that haven't ended yet
|
# Build query
|
||||||
now = datetime.utcnow()
|
query = select(DomainAuction).where(DomainAuction.is_active == True)
|
||||||
query = select(DomainAuction).where(
|
|
||||||
and_(
|
|
||||||
DomainAuction.is_active == True,
|
|
||||||
DomainAuction.end_time > now # ← KRITISCH: Nur Auktionen die noch laufen!
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# VANITY FILTER: For public (non-logged-in) users, only show premium-looking domains
|
# VANITY FILTER: For public (non-logged-in) users, only show premium-looking domains
|
||||||
# This ensures the first impression is high-quality, not spam domains
|
# This ensures the first impression is high-quality, not spam domains
|
||||||
@ -399,7 +321,7 @@ async def search_auctions(
|
|||||||
# Convert to response with valuations
|
# Convert to response with valuations
|
||||||
listings = []
|
listings = []
|
||||||
for auction in auctions:
|
for auction in auctions:
|
||||||
listing = await _convert_to_listing(auction, db, include_valuation=True, now=now)
|
listing = await _convert_to_listing(auction, db, include_valuation=True)
|
||||||
listings.append(listing)
|
listings.append(listing)
|
||||||
|
|
||||||
# Sort by value_ratio if requested (after valuation)
|
# Sort by value_ratio if requested (after valuation)
|
||||||
@ -442,8 +364,7 @@ async def get_ending_soon(
|
|||||||
|
|
||||||
Data is scraped from public auction sites - no mock data.
|
Data is scraped from public auction sites - no mock data.
|
||||||
"""
|
"""
|
||||||
now = datetime.utcnow()
|
cutoff = datetime.utcnow() + timedelta(hours=hours)
|
||||||
cutoff = now + timedelta(hours=hours)
|
|
||||||
|
|
||||||
query = (
|
query = (
|
||||||
select(DomainAuction)
|
select(DomainAuction)
|
||||||
@ -451,7 +372,7 @@ async def get_ending_soon(
|
|||||||
and_(
|
and_(
|
||||||
DomainAuction.is_active == True,
|
DomainAuction.is_active == True,
|
||||||
DomainAuction.end_time <= cutoff,
|
DomainAuction.end_time <= cutoff,
|
||||||
DomainAuction.end_time > now,
|
DomainAuction.end_time > datetime.utcnow(),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.order_by(DomainAuction.end_time.asc())
|
.order_by(DomainAuction.end_time.asc())
|
||||||
@ -463,7 +384,7 @@ async def get_ending_soon(
|
|||||||
|
|
||||||
listings = []
|
listings = []
|
||||||
for auction in auctions:
|
for auction in auctions:
|
||||||
listing = await _convert_to_listing(auction, db, include_valuation=True, now=now)
|
listing = await _convert_to_listing(auction, db, include_valuation=True)
|
||||||
listings.append(listing)
|
listings.append(listing)
|
||||||
|
|
||||||
return listings
|
return listings
|
||||||
@ -480,15 +401,9 @@ async def get_hot_auctions(
|
|||||||
|
|
||||||
Data is scraped from public auction sites - no mock data.
|
Data is scraped from public auction sites - no mock data.
|
||||||
"""
|
"""
|
||||||
now = datetime.utcnow()
|
|
||||||
query = (
|
query = (
|
||||||
select(DomainAuction)
|
select(DomainAuction)
|
||||||
.where(
|
.where(DomainAuction.is_active == True)
|
||||||
and_(
|
|
||||||
DomainAuction.is_active == True,
|
|
||||||
DomainAuction.end_time > now # Only show active auctions
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.order_by(DomainAuction.num_bids.desc())
|
.order_by(DomainAuction.num_bids.desc())
|
||||||
.limit(limit)
|
.limit(limit)
|
||||||
)
|
)
|
||||||
@ -498,7 +413,7 @@ async def get_hot_auctions(
|
|||||||
|
|
||||||
listings = []
|
listings = []
|
||||||
for auction in auctions:
|
for auction in auctions:
|
||||||
listing = await _convert_to_listing(auction, db, include_valuation=True, now=now)
|
listing = await _convert_to_listing(auction, db, include_valuation=True)
|
||||||
listings.append(listing)
|
listings.append(listing)
|
||||||
|
|
||||||
return listings
|
return listings
|
||||||
@ -514,7 +429,6 @@ async def get_platform_stats(
|
|||||||
|
|
||||||
Data is scraped from public auction sites - no mock data.
|
Data is scraped from public auction sites - no mock data.
|
||||||
"""
|
"""
|
||||||
now = datetime.utcnow()
|
|
||||||
# Get stats per platform
|
# Get stats per platform
|
||||||
stats_query = (
|
stats_query = (
|
||||||
select(
|
select(
|
||||||
@ -522,12 +436,7 @@ async def get_platform_stats(
|
|||||||
func.count(DomainAuction.id).label("count"),
|
func.count(DomainAuction.id).label("count"),
|
||||||
func.avg(DomainAuction.current_bid).label("avg_bid"),
|
func.avg(DomainAuction.current_bid).label("avg_bid"),
|
||||||
)
|
)
|
||||||
.where(
|
.where(DomainAuction.is_active == True)
|
||||||
and_(
|
|
||||||
DomainAuction.is_active == True,
|
|
||||||
DomainAuction.end_time > now,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.group_by(DomainAuction.platform)
|
.group_by(DomainAuction.platform)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -535,7 +444,7 @@ async def get_platform_stats(
|
|||||||
platform_data = result.all()
|
platform_data = result.all()
|
||||||
|
|
||||||
# Get ending soon counts
|
# Get ending soon counts
|
||||||
cutoff = now + timedelta(hours=1)
|
cutoff = datetime.utcnow() + timedelta(hours=1)
|
||||||
ending_query = (
|
ending_query = (
|
||||||
select(
|
select(
|
||||||
DomainAuction.platform,
|
DomainAuction.platform,
|
||||||
@ -544,7 +453,6 @@ async def get_platform_stats(
|
|||||||
.where(
|
.where(
|
||||||
and_(
|
and_(
|
||||||
DomainAuction.is_active == True,
|
DomainAuction.is_active == True,
|
||||||
DomainAuction.end_time > now,
|
|
||||||
DomainAuction.end_time <= cutoff,
|
DomainAuction.end_time <= cutoff,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -572,7 +480,6 @@ async def get_scrape_status(
|
|||||||
db: AsyncSession = Depends(get_db),
|
db: AsyncSession = Depends(get_db),
|
||||||
):
|
):
|
||||||
"""Get status of auction scraping."""
|
"""Get status of auction scraping."""
|
||||||
now = datetime.utcnow()
|
|
||||||
# Get last successful scrape
|
# Get last successful scrape
|
||||||
last_scrape_query = (
|
last_scrape_query = (
|
||||||
select(AuctionScrapeLog)
|
select(AuctionScrapeLog)
|
||||||
@ -584,12 +491,7 @@ async def get_scrape_status(
|
|||||||
last_log = result.scalar_one_or_none()
|
last_log = result.scalar_one_or_none()
|
||||||
|
|
||||||
# Get total auctions
|
# Get total auctions
|
||||||
total_query = select(func.count(DomainAuction.id)).where(
|
total_query = select(func.count(DomainAuction.id)).where(DomainAuction.is_active == True)
|
||||||
and_(
|
|
||||||
DomainAuction.is_active == True,
|
|
||||||
DomainAuction.end_time > now,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
total_result = await db.execute(total_query)
|
total_result = await db.execute(total_query)
|
||||||
total = total_result.scalar() or 0
|
total = total_result.scalar() or 0
|
||||||
|
|
||||||
@ -629,48 +531,25 @@ async def trigger_scrape(
|
|||||||
raise HTTPException(status_code=500, detail=f"Scrape failed: {str(e)}")
|
raise HTTPException(status_code=500, detail=f"Scrape failed: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
@router.get("/sedo")
|
@router.post("/seed")
|
||||||
async def get_sedo_listings(
|
async def seed_auctions(
|
||||||
keyword: Optional[str] = Query(None, description="Search keyword"),
|
current_user: User = Depends(get_current_user),
|
||||||
tld: Optional[str] = Query(None, description="Filter by TLD"),
|
db: AsyncSession = Depends(get_db),
|
||||||
limit: int = Query(50, le=100),
|
|
||||||
current_user: Optional[User] = Depends(get_current_user_optional),
|
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Get live domain listings from Sedo marketplace.
|
Seed the database with realistic sample auction data.
|
||||||
|
Useful for development and demo purposes.
|
||||||
Returns real-time data from Sedo API with affiliate tracking.
|
|
||||||
All links include Pounce partner ID for commission tracking.
|
|
||||||
"""
|
"""
|
||||||
from app.services.sedo_api import sedo_client
|
|
||||||
|
|
||||||
if not sedo_client.is_configured:
|
|
||||||
return {
|
|
||||||
"items": [],
|
|
||||||
"error": "Sedo API not configured",
|
|
||||||
"source": "sedo"
|
|
||||||
}
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
listings = await sedo_client.get_listings_for_display(
|
result = await auction_scraper.seed_sample_auctions(db)
|
||||||
keyword=keyword,
|
|
||||||
tld=tld,
|
|
||||||
page_size=limit
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"items": listings,
|
"status": "success",
|
||||||
"count": len(listings),
|
"message": "Sample auctions seeded",
|
||||||
"source": "sedo",
|
"result": result,
|
||||||
"affiliate_note": "All links include Pounce partner ID for commission tracking"
|
|
||||||
}
|
}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Sedo API error: {e}")
|
logger.error(f"Seeding failed: {e}")
|
||||||
return {
|
raise HTTPException(status_code=500, detail=f"Seeding failed: {str(e)}")
|
||||||
"items": [],
|
|
||||||
"error": str(e),
|
|
||||||
"source": "sedo"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/opportunities")
|
@router.get("/opportunities")
|
||||||
@ -689,15 +568,9 @@ async def get_smart_opportunities(
|
|||||||
Opportunity Score = time_urgency × competition_factor × price_factor
|
Opportunity Score = time_urgency × competition_factor × price_factor
|
||||||
"""
|
"""
|
||||||
# Get active auctions
|
# Get active auctions
|
||||||
now = datetime.utcnow()
|
|
||||||
query = (
|
query = (
|
||||||
select(DomainAuction)
|
select(DomainAuction)
|
||||||
.where(
|
.where(DomainAuction.is_active == True)
|
||||||
and_(
|
|
||||||
DomainAuction.is_active == True,
|
|
||||||
DomainAuction.end_time > now,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.order_by(DomainAuction.end_time.asc())
|
.order_by(DomainAuction.end_time.asc())
|
||||||
.limit(100)
|
.limit(100)
|
||||||
)
|
)
|
||||||
@ -777,7 +650,7 @@ async def get_smart_opportunities(
|
|||||||
if opportunity_score < 3:
|
if opportunity_score < 3:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
listing = await _convert_to_listing(auction, db, include_valuation=False, now=now)
|
listing = await _convert_to_listing(auction, db, include_valuation=False)
|
||||||
|
|
||||||
recommendation = (
|
recommendation = (
|
||||||
"🔥 Hot" if opportunity_score >= 10 else
|
"🔥 Hot" if opportunity_score >= 10 else
|
||||||
@ -838,415 +711,3 @@ def _get_opportunity_reasoning(value_ratio: float, hours_left: float, num_bids:
|
|||||||
reasons.append(f"🔥 High demand ({num_bids} bids)")
|
reasons.append(f"🔥 High demand ({num_bids} bids)")
|
||||||
|
|
||||||
return " | ".join(reasons)
|
return " | ".join(reasons)
|
||||||
|
|
||||||
|
|
||||||
def _calculate_pounce_score_v2(domain: str, tld: str, num_bids: int = 0, age_years: int = 0, is_pounce: bool = False) -> int:
|
|
||||||
# Backward-compatible wrapper (shared implementation lives in services)
|
|
||||||
from app.services.pounce_score import calculate_pounce_score_v2
|
|
||||||
|
|
||||||
return calculate_pounce_score_v2(
|
|
||||||
domain,
|
|
||||||
tld,
|
|
||||||
num_bids=num_bids,
|
|
||||||
age_years=age_years,
|
|
||||||
is_pounce=is_pounce,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _is_premium_domain(domain_name: str) -> bool:
|
|
||||||
"""Check if a domain looks premium/professional (Vanity Filter)."""
|
|
||||||
parts = domain_name.rsplit('.', 1)
|
|
||||||
name = parts[0] if parts else domain_name
|
|
||||||
tld = parts[1].lower() if len(parts) > 1 else ""
|
|
||||||
|
|
||||||
# Premium TLDs only
|
|
||||||
premium_tlds = ['com', 'io', 'ai', 'co', 'de', 'ch', 'net', 'org', 'app', 'dev', 'xyz']
|
|
||||||
if tld and tld not in premium_tlds:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Length check
|
|
||||||
if len(name) > 15:
|
|
||||||
return False
|
|
||||||
if len(name) < 3:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Hyphen check
|
|
||||||
if name.count('-') > 1:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Digit check
|
|
||||||
if sum(1 for c in name if c.isdigit()) > 2:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Consonant cluster check
|
|
||||||
consonants = 'bcdfghjklmnpqrstvwxyz'
|
|
||||||
max_streak = 0
|
|
||||||
current_streak = 0
|
|
||||||
for c in name.lower():
|
|
||||||
if c in consonants:
|
|
||||||
current_streak += 1
|
|
||||||
max_streak = max(max_streak, current_streak)
|
|
||||||
else:
|
|
||||||
current_streak = 0
|
|
||||||
if max_streak > 4:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
# ============== UNIFIED MARKET FEED ==============
|
|
||||||
|
|
||||||
@router.get("/feed", response_model=MarketFeedResponse)
|
|
||||||
async def get_market_feed(
|
|
||||||
# Source filter
|
|
||||||
source: str = Query("all", enum=["all", "pounce", "external"]),
|
|
||||||
|
|
||||||
# Search & filters
|
|
||||||
keyword: Optional[str] = Query(None, description="Search in domain names"),
|
|
||||||
tld: Optional[str] = Query(None, description="Filter by TLD"),
|
|
||||||
min_price: Optional[float] = Query(None, ge=0),
|
|
||||||
max_price: Optional[float] = Query(None, ge=0),
|
|
||||||
min_score: int = Query(0, ge=0, le=100),
|
|
||||||
ending_within: Optional[int] = Query(None, description="Auctions ending within X hours"),
|
|
||||||
verified_only: bool = Query(False, description="Only show verified Pounce listings"),
|
|
||||||
|
|
||||||
# Sort
|
|
||||||
sort_by: str = Query("score", enum=["score", "price_asc", "price_desc", "time", "newest"]),
|
|
||||||
|
|
||||||
# Pagination
|
|
||||||
limit: int = Query(50, le=200),
|
|
||||||
offset: int = Query(0, ge=0),
|
|
||||||
|
|
||||||
# Auth
|
|
||||||
current_user: Optional[User] = Depends(get_current_user_optional),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
🚀 UNIFIED MARKET FEED — The heart of Pounce
|
|
||||||
|
|
||||||
Combines:
|
|
||||||
- 💎 Pounce Direct: DNS-verified user listings (instant buy)
|
|
||||||
- 🏢 External Auctions: Scraped from GoDaddy, Sedo, NameJet, etc.
|
|
||||||
|
|
||||||
For non-authenticated users:
|
|
||||||
- Vanity filter applied (premium domains only)
|
|
||||||
- Pounce Score visible but limited details
|
|
||||||
|
|
||||||
For authenticated users (Trader/Tycoon):
|
|
||||||
- Full access to all domains
|
|
||||||
- Advanced filtering
|
|
||||||
- Valuation data
|
|
||||||
|
|
||||||
POUNCE EXCLUSIVE domains are highlighted and appear first.
|
|
||||||
"""
|
|
||||||
# NOTE: This endpoint is called frequently by the Market UI.
|
|
||||||
# Avoid loading *all* auctions/listings into Python. Instead, we:
|
|
||||||
# - Apply filters + ordering in SQL where possible
|
|
||||||
# - Over-fetch a bounded window for combined feeds ("all") and score-sorting
|
|
||||||
now = datetime.utcnow()
|
|
||||||
tld_clean = tld.lower().lstrip(".") if tld else None
|
|
||||||
|
|
||||||
requested = offset + limit
|
|
||||||
fetch_window = min(max(requested * 3, 200), 2000) # bounded overfetch for merge/sort
|
|
||||||
|
|
||||||
built: list[dict] = [] # {"item": MarketFeedItem, "newest_ts": datetime}
|
|
||||||
|
|
||||||
# -----------------------------
|
|
||||||
# Build base filters (SQL-side)
|
|
||||||
# -----------------------------
|
|
||||||
listing_filters = [DomainListing.status == ListingStatus.ACTIVE.value]
|
|
||||||
if keyword:
|
|
||||||
listing_filters.append(DomainListing.domain.ilike(f"%{keyword}%"))
|
|
||||||
if verified_only:
|
|
||||||
listing_filters.append(DomainListing.verification_status == VerificationStatus.VERIFIED.value)
|
|
||||||
if min_price is not None:
|
|
||||||
listing_filters.append(DomainListing.asking_price >= min_price)
|
|
||||||
if max_price is not None:
|
|
||||||
listing_filters.append(DomainListing.asking_price <= max_price)
|
|
||||||
if tld_clean:
|
|
||||||
listing_filters.append(DomainListing.domain.ilike(f"%.{tld_clean}"))
|
|
||||||
|
|
||||||
auction_filters = [
|
|
||||||
DomainAuction.is_active == True,
|
|
||||||
DomainAuction.end_time > now,
|
|
||||||
]
|
|
||||||
if keyword:
|
|
||||||
auction_filters.append(DomainAuction.domain.ilike(f"%{keyword}%"))
|
|
||||||
if tld_clean:
|
|
||||||
auction_filters.append(DomainAuction.tld == tld_clean)
|
|
||||||
if min_price is not None:
|
|
||||||
auction_filters.append(DomainAuction.current_bid >= min_price)
|
|
||||||
if max_price is not None:
|
|
||||||
auction_filters.append(DomainAuction.current_bid <= max_price)
|
|
||||||
if ending_within:
|
|
||||||
cutoff = now + timedelta(hours=ending_within)
|
|
||||||
auction_filters.append(DomainAuction.end_time <= cutoff)
|
|
||||||
|
|
||||||
# -----------------------------
|
|
||||||
# Counts (used for UI stats)
|
|
||||||
# -----------------------------
|
|
||||||
pounce_total = 0
|
|
||||||
auction_total = 0
|
|
||||||
if source in ["all", "pounce"]:
|
|
||||||
pounce_total = (await db.execute(select(func.count(DomainListing.id)).where(and_(*listing_filters)))).scalar() or 0
|
|
||||||
if source in ["all", "external"]:
|
|
||||||
auction_total = (await db.execute(select(func.count(DomainAuction.id)).where(and_(*auction_filters)))).scalar() or 0
|
|
||||||
|
|
||||||
# -----------------------------
|
|
||||||
# Fetch + build items (bounded)
|
|
||||||
# -----------------------------
|
|
||||||
# For "all": fetch a bounded window from each source and then merge/sort in Python.
|
|
||||||
# For single-source: fetch offset/limit directly when sort can be pushed to SQL.
|
|
||||||
listing_offset = 0
|
|
||||||
listing_limit = fetch_window
|
|
||||||
auction_offset = 0
|
|
||||||
auction_limit = fetch_window
|
|
||||||
|
|
||||||
if source == "pounce":
|
|
||||||
listing_offset = offset
|
|
||||||
listing_limit = limit
|
|
||||||
if source == "external":
|
|
||||||
auction_offset = offset
|
|
||||||
auction_limit = limit
|
|
||||||
|
|
||||||
# Pounce Direct listings
|
|
||||||
if source in ["all", "pounce"]:
|
|
||||||
listing_query = select(DomainListing).where(and_(*listing_filters))
|
|
||||||
|
|
||||||
# SQL ordering for listings (best-effort)
|
|
||||||
if sort_by == "price_asc":
|
|
||||||
listing_query = listing_query.order_by(func.coalesce(DomainListing.asking_price, 0).asc())
|
|
||||||
elif sort_by == "price_desc":
|
|
||||||
listing_query = listing_query.order_by(func.coalesce(DomainListing.asking_price, 0).desc())
|
|
||||||
elif sort_by == "newest":
|
|
||||||
listing_query = listing_query.order_by(DomainListing.updated_at.desc())
|
|
||||||
else:
|
|
||||||
# score/time: prefer higher score first for listings
|
|
||||||
listing_query = listing_query.order_by(DomainListing.pounce_score.desc(), DomainListing.updated_at.desc())
|
|
||||||
|
|
||||||
listing_query = listing_query.offset(listing_offset).limit(listing_limit)
|
|
||||||
listings = (await db.execute(listing_query)).scalars().all()
|
|
||||||
|
|
||||||
for listing in listings:
|
|
||||||
domain_tld = listing.domain.rsplit(".", 1)[1] if "." in listing.domain else ""
|
|
||||||
pounce_score = listing.pounce_score or _calculate_pounce_score_v2(listing.domain, domain_tld, is_pounce=True)
|
|
||||||
if pounce_score < min_score:
|
|
||||||
continue
|
|
||||||
|
|
||||||
item = MarketFeedItem(
|
|
||||||
id=f"pounce-{listing.id}",
|
|
||||||
domain=listing.domain,
|
|
||||||
tld=domain_tld,
|
|
||||||
price=listing.asking_price or 0,
|
|
||||||
currency=listing.currency or "USD",
|
|
||||||
price_type="fixed" if listing.price_type == "fixed" else "negotiable",
|
|
||||||
status="instant",
|
|
||||||
source="Pounce",
|
|
||||||
is_pounce=True,
|
|
||||||
verified=listing.is_verified,
|
|
||||||
seller_verified=listing.is_verified,
|
|
||||||
slug=listing.slug,
|
|
||||||
url=f"/buy/{listing.slug}",
|
|
||||||
is_external=False,
|
|
||||||
pounce_score=pounce_score,
|
|
||||||
)
|
|
||||||
built.append({"item": item, "newest_ts": listing.updated_at or listing.created_at or datetime.min})
|
|
||||||
|
|
||||||
# External auctions (from DB)
|
|
||||||
if source in ["all", "external"]:
|
|
||||||
auction_query = select(DomainAuction).where(and_(*auction_filters))
|
|
||||||
|
|
||||||
# SQL ordering for auctions when possible
|
|
||||||
if sort_by == "time":
|
|
||||||
auction_query = auction_query.order_by(DomainAuction.end_time.asc())
|
|
||||||
elif sort_by == "price_asc":
|
|
||||||
auction_query = auction_query.order_by(DomainAuction.current_bid.asc())
|
|
||||||
elif sort_by == "price_desc":
|
|
||||||
auction_query = auction_query.order_by(DomainAuction.current_bid.desc())
|
|
||||||
elif sort_by == "newest":
|
|
||||||
auction_query = auction_query.order_by(DomainAuction.updated_at.desc())
|
|
||||||
else:
|
|
||||||
# score: prefer persisted score for DB-level sorting
|
|
||||||
auction_query = auction_query.order_by(
|
|
||||||
func.coalesce(DomainAuction.pounce_score, 0).desc(),
|
|
||||||
DomainAuction.updated_at.desc(),
|
|
||||||
)
|
|
||||||
|
|
||||||
auction_query = auction_query.offset(auction_offset).limit(auction_limit)
|
|
||||||
auctions = (await db.execute(auction_query)).scalars().all()
|
|
||||||
|
|
||||||
for auction in auctions:
|
|
||||||
# Vanity filter for anonymous users
|
|
||||||
if current_user is None and not _is_premium_domain(auction.domain):
|
|
||||||
continue
|
|
||||||
|
|
||||||
pounce_score = auction.pounce_score
|
|
||||||
if pounce_score is None:
|
|
||||||
pounce_score = _calculate_pounce_score_v2(
|
|
||||||
auction.domain,
|
|
||||||
auction.tld,
|
|
||||||
num_bids=auction.num_bids,
|
|
||||||
age_years=auction.age_years or 0,
|
|
||||||
is_pounce=False,
|
|
||||||
)
|
|
||||||
if pounce_score < min_score:
|
|
||||||
continue
|
|
||||||
|
|
||||||
item = MarketFeedItem(
|
|
||||||
id=f"auction-{auction.id}",
|
|
||||||
domain=auction.domain,
|
|
||||||
tld=auction.tld,
|
|
||||||
price=auction.current_bid,
|
|
||||||
currency=auction.currency,
|
|
||||||
price_type="bid",
|
|
||||||
status="auction",
|
|
||||||
source=auction.platform,
|
|
||||||
is_pounce=False,
|
|
||||||
verified=False,
|
|
||||||
time_remaining=_format_time_remaining(auction.end_time, now=now),
|
|
||||||
end_time=auction.end_time,
|
|
||||||
num_bids=auction.num_bids,
|
|
||||||
url=_get_affiliate_url(auction.platform, auction.domain, auction.auction_url),
|
|
||||||
is_external=True,
|
|
||||||
pounce_score=pounce_score,
|
|
||||||
)
|
|
||||||
built.append({"item": item, "newest_ts": auction.updated_at or auction.scraped_at or datetime.min})
|
|
||||||
|
|
||||||
# =========================================================================
|
|
||||||
# LIVE SEDO DATA - Fetch and merge real-time listings from Sedo API
|
|
||||||
# =========================================================================
|
|
||||||
try:
|
|
||||||
from app.services.sedo_api import sedo_client
|
|
||||||
|
|
||||||
if sedo_client.is_configured:
|
|
||||||
# Use search keyword or fall back to popular terms for discovery
|
|
||||||
sedo_keyword = keyword
|
|
||||||
if not sedo_keyword:
|
|
||||||
# Fetch popular domains when no specific search
|
|
||||||
import random
|
|
||||||
popular_terms = ["ai", "tech", "crypto", "app", "cloud", "digital", "smart", "pro"]
|
|
||||||
sedo_keyword = random.choice(popular_terms)
|
|
||||||
|
|
||||||
# Fetch live Sedo listings (limit to avoid slow responses)
|
|
||||||
sedo_listings = await sedo_client.get_listings_for_display(
|
|
||||||
keyword=sedo_keyword,
|
|
||||||
tld=tld_clean,
|
|
||||||
page_size=min(30, limit) # Cap at 30 to avoid slow API calls
|
|
||||||
)
|
|
||||||
|
|
||||||
# Track domains already in results to avoid duplicates
|
|
||||||
existing_domains = {item["item"].domain.lower() for item in built}
|
|
||||||
|
|
||||||
for sedo_item in sedo_listings:
|
|
||||||
domain = sedo_item.get("domain", "").lower()
|
|
||||||
|
|
||||||
# Skip if already have this domain from scraped data
|
|
||||||
if domain in existing_domains:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Apply vanity filter for anonymous users
|
|
||||||
if current_user is None and not _is_premium_domain(domain):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Apply price filters
|
|
||||||
price = sedo_item.get("price", 0)
|
|
||||||
if min_price is not None and price < min_price and price > 0:
|
|
||||||
continue
|
|
||||||
if max_price is not None and price > max_price:
|
|
||||||
continue
|
|
||||||
|
|
||||||
domain_tld = sedo_item.get("tld", "")
|
|
||||||
pounce_score = _calculate_pounce_score_v2(
|
|
||||||
domain,
|
|
||||||
domain_tld,
|
|
||||||
num_bids=0,
|
|
||||||
age_years=0,
|
|
||||||
is_pounce=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
if pounce_score < min_score:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Determine price type
|
|
||||||
price_type = "bid" if sedo_item.get("is_auction") else (
|
|
||||||
"negotiable" if price == 0 else "fixed"
|
|
||||||
)
|
|
||||||
|
|
||||||
item = MarketFeedItem(
|
|
||||||
id=f"sedo-live-{hash(domain) % 1000000}",
|
|
||||||
domain=domain,
|
|
||||||
tld=domain_tld,
|
|
||||||
price=price,
|
|
||||||
currency="USD",
|
|
||||||
price_type=price_type,
|
|
||||||
status="auction" if sedo_item.get("is_auction") else "instant",
|
|
||||||
source="Sedo",
|
|
||||||
is_pounce=False,
|
|
||||||
verified=False,
|
|
||||||
time_remaining=None,
|
|
||||||
end_time=None,
|
|
||||||
num_bids=None,
|
|
||||||
url=sedo_item.get("url", ""),
|
|
||||||
is_external=True,
|
|
||||||
pounce_score=pounce_score,
|
|
||||||
)
|
|
||||||
built.append({"item": item, "newest_ts": now})
|
|
||||||
existing_domains.add(domain)
|
|
||||||
|
|
||||||
# Update auction count
|
|
||||||
auction_total += 1
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(f"Failed to fetch live Sedo data: {e}")
|
|
||||||
|
|
||||||
# -----------------------------
|
|
||||||
# Merge sort (Python) + paginate
|
|
||||||
# -----------------------------
|
|
||||||
if sort_by == "score":
|
|
||||||
built.sort(key=lambda x: (x["item"].pounce_score, int(x["item"].is_pounce), x["item"].domain), reverse=True)
|
|
||||||
elif sort_by == "price_asc":
|
|
||||||
built.sort(key=lambda x: (x["item"].price, -int(x["item"].is_pounce), x["item"].domain))
|
|
||||||
elif sort_by == "price_desc":
|
|
||||||
built.sort(key=lambda x: (-x["item"].price, -int(x["item"].is_pounce), x["item"].domain))
|
|
||||||
elif sort_by == "time":
|
|
||||||
# Pounce Direct first (no time limit), then by end time
|
|
||||||
built.sort(
|
|
||||||
key=lambda x: (0 if x["item"].is_pounce else 1, x["item"].end_time or datetime.max)
|
|
||||||
)
|
|
||||||
elif sort_by == "newest":
|
|
||||||
built.sort(key=lambda x: (int(x["item"].is_pounce), x["newest_ts"]), reverse=True)
|
|
||||||
|
|
||||||
total = pounce_total + auction_total if source == "all" else (pounce_total if source == "pounce" else auction_total)
|
|
||||||
|
|
||||||
page_slice = built[offset:offset + limit]
|
|
||||||
items = [x["item"] for x in page_slice]
|
|
||||||
|
|
||||||
# Unique sources (after pagination)
|
|
||||||
sources = list(set(item.source for item in items))
|
|
||||||
|
|
||||||
# Last update time (auctions)
|
|
||||||
if source in ["all", "external"]:
|
|
||||||
last_update_result = await db.execute(select(func.max(DomainAuction.updated_at)))
|
|
||||||
last_updated = last_update_result.scalar() or now
|
|
||||||
else:
|
|
||||||
last_updated = now
|
|
||||||
|
|
||||||
return MarketFeedResponse(
|
|
||||||
items=items,
|
|
||||||
total=total,
|
|
||||||
pounce_direct_count=pounce_total,
|
|
||||||
auction_count=auction_total,
|
|
||||||
sources=sources,
|
|
||||||
last_updated=last_updated,
|
|
||||||
filters_applied={
|
|
||||||
"source": source,
|
|
||||||
"keyword": keyword,
|
|
||||||
"tld": tld,
|
|
||||||
"min_price": min_price,
|
|
||||||
"max_price": max_price,
|
|
||||||
"min_score": min_score,
|
|
||||||
"ending_within": ending_within,
|
|
||||||
"verified_only": verified_only,
|
|
||||||
"sort_by": sort_by,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|||||||
@ -14,11 +14,10 @@ Endpoints:
|
|||||||
import os
|
import os
|
||||||
import secrets
|
import secrets
|
||||||
import logging
|
import logging
|
||||||
import re
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, status, BackgroundTasks, Request, Response
|
from fastapi import APIRouter, HTTPException, status, BackgroundTasks, Request
|
||||||
from pydantic import BaseModel, EmailStr
|
from pydantic import BaseModel, EmailStr
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
from slowapi import Limiter
|
from slowapi import Limiter
|
||||||
@ -26,24 +25,10 @@ from slowapi.util import get_remote_address
|
|||||||
|
|
||||||
from app.api.deps import Database, CurrentUser
|
from app.api.deps import Database, CurrentUser
|
||||||
from app.config import get_settings
|
from app.config import get_settings
|
||||||
from app.schemas.auth import (
|
from app.schemas.auth import UserCreate, UserLogin, UserResponse, Token
|
||||||
LoginResponse,
|
|
||||||
ReferralLinkResponse,
|
|
||||||
ReferralStats,
|
|
||||||
UserCreate,
|
|
||||||
UserLogin,
|
|
||||||
UserResponse,
|
|
||||||
)
|
|
||||||
from app.services.auth import AuthService
|
from app.services.auth import AuthService
|
||||||
from app.services.email_service import email_service
|
from app.services.email_service import email_service
|
||||||
from app.models.user import User
|
from app.models.user import User
|
||||||
from app.security import set_auth_cookie, clear_auth_cookie
|
|
||||||
from app.services.telemetry import track_event
|
|
||||||
from app.services.referral_rewards import (
|
|
||||||
QUALIFIED_REFERRAL_BATCH_SIZE,
|
|
||||||
apply_referral_rewards_for_user,
|
|
||||||
compute_badge,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -86,9 +71,7 @@ class UpdateUserRequest(BaseModel):
|
|||||||
# ============== Endpoints ==============
|
# ============== Endpoints ==============
|
||||||
|
|
||||||
@router.post("/register", response_model=UserResponse, status_code=status.HTTP_201_CREATED)
|
@router.post("/register", response_model=UserResponse, status_code=status.HTTP_201_CREATED)
|
||||||
@limiter.limit("5/minute")
|
|
||||||
async def register(
|
async def register(
|
||||||
request: Request,
|
|
||||||
user_data: UserCreate,
|
user_data: UserCreate,
|
||||||
db: Database,
|
db: Database,
|
||||||
background_tasks: BackgroundTasks,
|
background_tasks: BackgroundTasks,
|
||||||
@ -116,62 +99,6 @@ async def register(
|
|||||||
name=user_data.name,
|
name=user_data.name,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Process referral if present.
|
|
||||||
# Supported formats:
|
|
||||||
# - yield_{user_id}_{domain_id}
|
|
||||||
# - invite code (12 hex chars)
|
|
||||||
referral_applied = False
|
|
||||||
referrer_user_id: Optional[int] = None
|
|
||||||
referral_type: Optional[str] = None
|
|
||||||
|
|
||||||
if user_data.ref:
|
|
||||||
ref_raw = user_data.ref.strip()
|
|
||||||
|
|
||||||
# Yield referral: yield_{user_id}_{domain_id}
|
|
||||||
if ref_raw.startswith("yield_"):
|
|
||||||
try:
|
|
||||||
parts = ref_raw.split("_")
|
|
||||||
if len(parts) >= 3:
|
|
||||||
referrer_user_id = int(parts[1])
|
|
||||||
user.referred_by_user_id = referrer_user_id
|
|
||||||
user.referral_code = ref_raw
|
|
||||||
referral_type = "yield"
|
|
||||||
|
|
||||||
# Try to map the yield_domain_id to a domain string
|
|
||||||
try:
|
|
||||||
from app.models.yield_domain import YieldDomain
|
|
||||||
|
|
||||||
yield_domain_id = int(parts[2])
|
|
||||||
yd_res = await db.execute(select(YieldDomain).where(YieldDomain.id == yield_domain_id))
|
|
||||||
yd = yd_res.scalar_one_or_none()
|
|
||||||
if yd:
|
|
||||||
user.referred_by_domain = yd.domain
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
await db.commit()
|
|
||||||
referral_applied = True
|
|
||||||
logger.info("User %s referred via yield by user %s", user.email, referrer_user_id)
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning("Failed to process yield referral code: %s, error: %s", ref_raw, e)
|
|
||||||
else:
|
|
||||||
# Invite code referral (viral loop)
|
|
||||||
code = ref_raw.lower()
|
|
||||||
if re.fullmatch(r"[0-9a-f]{12}", code):
|
|
||||||
try:
|
|
||||||
ref_user_res = await db.execute(select(User).where(User.invite_code == code))
|
|
||||||
ref_user = ref_user_res.scalar_one_or_none()
|
|
||||||
if ref_user and ref_user.id != user.id:
|
|
||||||
referrer_user_id = ref_user.id
|
|
||||||
user.referred_by_user_id = ref_user.id
|
|
||||||
user.referral_code = code
|
|
||||||
referral_type = "invite"
|
|
||||||
await db.commit()
|
|
||||||
referral_applied = True
|
|
||||||
logger.info("User %s referred via invite_code by user %s", user.email, ref_user.id)
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning("Failed to process invite referral code: %s, error: %s", code, e)
|
|
||||||
|
|
||||||
# Auto-admin for specific email
|
# Auto-admin for specific email
|
||||||
ADMIN_EMAILS = ["guggeryves@hotmail.com"]
|
ADMIN_EMAILS = ["guggeryves@hotmail.com"]
|
||||||
if user.email.lower() in [e.lower() for e in ADMIN_EMAILS]:
|
if user.email.lower() in [e.lower() for e in ADMIN_EMAILS]:
|
||||||
@ -179,15 +106,8 @@ async def register(
|
|||||||
user.is_verified = True # Auto-verify admins
|
user.is_verified = True # Auto-verify admins
|
||||||
await db.commit()
|
await db.commit()
|
||||||
|
|
||||||
# Give admin Tycoon subscription (only if no subscription exists)
|
# Give admin Tycoon subscription
|
||||||
from app.models.subscription import Subscription, SubscriptionTier, SubscriptionStatus, TIER_CONFIG
|
from app.models.subscription import Subscription, SubscriptionTier, SubscriptionStatus, TIER_CONFIG
|
||||||
from sqlalchemy import select
|
|
||||||
|
|
||||||
# Check if subscription already exists
|
|
||||||
existing_sub = await db.execute(
|
|
||||||
select(Subscription).where(Subscription.user_id == user.id)
|
|
||||||
)
|
|
||||||
if not existing_sub.scalar_one_or_none():
|
|
||||||
tycoon_config = TIER_CONFIG.get(SubscriptionTier.TYCOON, {})
|
tycoon_config = TIER_CONFIG.get(SubscriptionTier.TYCOON, {})
|
||||||
subscription = Subscription(
|
subscription = Subscription(
|
||||||
user_id=user.id,
|
user_id=user.id,
|
||||||
@ -204,39 +124,9 @@ async def register(
|
|||||||
user.email_verification_expires = datetime.utcnow() + timedelta(hours=24)
|
user.email_verification_expires = datetime.utcnow() + timedelta(hours=24)
|
||||||
await db.commit()
|
await db.commit()
|
||||||
|
|
||||||
# Telemetry: registration + referral attribution
|
|
||||||
try:
|
|
||||||
await track_event(
|
|
||||||
db,
|
|
||||||
event_name="user_registered",
|
|
||||||
request=request,
|
|
||||||
user_id=user.id,
|
|
||||||
is_authenticated=False,
|
|
||||||
source="public",
|
|
||||||
metadata={"ref": bool(user_data.ref)},
|
|
||||||
)
|
|
||||||
if referral_applied:
|
|
||||||
await track_event(
|
|
||||||
db,
|
|
||||||
event_name="referral_attributed",
|
|
||||||
request=request,
|
|
||||||
user_id=user.id,
|
|
||||||
is_authenticated=False,
|
|
||||||
source="public",
|
|
||||||
metadata={
|
|
||||||
"referral_type": referral_type,
|
|
||||||
"referrer_user_id": referrer_user_id,
|
|
||||||
"ref": user_data.ref,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
await db.commit()
|
|
||||||
except Exception:
|
|
||||||
# never block registration
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Send verification email in background
|
# Send verification email in background
|
||||||
if email_service.is_configured():
|
if email_service.is_configured:
|
||||||
site_url = (settings.site_url or "http://localhost:3000").rstrip("/")
|
site_url = os.getenv("SITE_URL", "http://localhost:3000")
|
||||||
verify_url = f"{site_url}/verify-email?token={verification_token}"
|
verify_url = f"{site_url}/verify-email?token={verification_token}"
|
||||||
|
|
||||||
background_tasks.add_task(
|
background_tasks.add_task(
|
||||||
@ -249,104 +139,8 @@ async def register(
|
|||||||
return user
|
return user
|
||||||
|
|
||||||
|
|
||||||
@router.get("/referral", response_model=ReferralLinkResponse)
|
@router.post("/login", response_model=Token)
|
||||||
async def get_referral_link(
|
async def login(user_data: UserLogin, db: Database):
|
||||||
request: Request,
|
|
||||||
current_user: CurrentUser,
|
|
||||||
db: Database,
|
|
||||||
days: int = 30,
|
|
||||||
):
|
|
||||||
"""Return the authenticated user's invite link."""
|
|
||||||
if not current_user.invite_code:
|
|
||||||
# Generate on demand for older users
|
|
||||||
for _ in range(12):
|
|
||||||
code = secrets.token_hex(6)
|
|
||||||
exists = await db.execute(select(User.id).where(User.invite_code == code))
|
|
||||||
if exists.scalar_one_or_none() is None:
|
|
||||||
current_user.invite_code = code
|
|
||||||
await db.commit()
|
|
||||||
break
|
|
||||||
if not current_user.invite_code:
|
|
||||||
raise HTTPException(status_code=500, detail="Failed to generate invite code")
|
|
||||||
|
|
||||||
# Apply rewards (idempotent) so UI reflects current state even without scheduler
|
|
||||||
snapshot = await apply_referral_rewards_for_user(db, current_user.id)
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
base = (settings.site_url or "http://localhost:3000").rstrip("/")
|
|
||||||
url = f"{base}/register?ref={current_user.invite_code}"
|
|
||||||
|
|
||||||
try:
|
|
||||||
await track_event(
|
|
||||||
db,
|
|
||||||
event_name="referral_link_viewed",
|
|
||||||
request=request,
|
|
||||||
user_id=current_user.id,
|
|
||||||
is_authenticated=True,
|
|
||||||
source="terminal",
|
|
||||||
metadata={"invite_code": current_user.invite_code},
|
|
||||||
)
|
|
||||||
await db.commit()
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Count link views in the chosen window
|
|
||||||
try:
|
|
||||||
from datetime import timedelta
|
|
||||||
from sqlalchemy import and_, func
|
|
||||||
|
|
||||||
from app.models.telemetry import TelemetryEvent
|
|
||||||
|
|
||||||
window_days = max(1, min(int(days), 365))
|
|
||||||
end = datetime.utcnow()
|
|
||||||
start = end - timedelta(days=window_days)
|
|
||||||
views = (
|
|
||||||
await db.execute(
|
|
||||||
select(func.count(TelemetryEvent.id)).where(
|
|
||||||
and_(
|
|
||||||
TelemetryEvent.event_name == "referral_link_viewed",
|
|
||||||
TelemetryEvent.user_id == current_user.id,
|
|
||||||
TelemetryEvent.created_at >= start,
|
|
||||||
TelemetryEvent.created_at <= end,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).scalar()
|
|
||||||
referral_link_views_window = int(views or 0)
|
|
||||||
except Exception:
|
|
||||||
window_days = 30
|
|
||||||
referral_link_views_window = 0
|
|
||||||
|
|
||||||
qualified = int(snapshot.qualified_referrals_total)
|
|
||||||
if qualified < QUALIFIED_REFERRAL_BATCH_SIZE:
|
|
||||||
next_reward_at = QUALIFIED_REFERRAL_BATCH_SIZE
|
|
||||||
else:
|
|
||||||
remainder = qualified % QUALIFIED_REFERRAL_BATCH_SIZE
|
|
||||||
next_reward_at = qualified + (QUALIFIED_REFERRAL_BATCH_SIZE - remainder) if remainder else qualified + QUALIFIED_REFERRAL_BATCH_SIZE
|
|
||||||
|
|
||||||
return ReferralLinkResponse(
|
|
||||||
invite_code=current_user.invite_code,
|
|
||||||
url=url,
|
|
||||||
stats=ReferralStats(
|
|
||||||
window_days=int(window_days),
|
|
||||||
referred_users_total=int(snapshot.referred_users_total),
|
|
||||||
qualified_referrals_total=qualified,
|
|
||||||
referral_link_views_window=int(referral_link_views_window),
|
|
||||||
bonus_domains=int(snapshot.bonus_domains),
|
|
||||||
next_reward_at=int(next_reward_at),
|
|
||||||
badge=compute_badge(qualified),
|
|
||||||
cooldown_days=int(getattr(snapshot, "cooldown_days", 7) or 7),
|
|
||||||
disqualified_cooldown_total=int(getattr(snapshot, "disqualified_cooldown_total", 0) or 0),
|
|
||||||
disqualified_missing_ip_total=int(getattr(snapshot, "disqualified_missing_ip_total", 0) or 0),
|
|
||||||
disqualified_shared_ip_total=int(getattr(snapshot, "disqualified_shared_ip_total", 0) or 0),
|
|
||||||
disqualified_duplicate_ip_total=int(getattr(snapshot, "disqualified_duplicate_ip_total", 0) or 0),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/login", response_model=LoginResponse)
|
|
||||||
@limiter.limit("10/minute")
|
|
||||||
async def login(request: Request, user_data: UserLogin, db: Database, response: Response):
|
|
||||||
"""
|
"""
|
||||||
Authenticate user and return JWT token.
|
Authenticate user and return JWT token.
|
||||||
|
|
||||||
@ -410,23 +204,12 @@ async def login(request: Request, user_data: UserLogin, db: Database, response:
|
|||||||
expires_delta=access_token_expires,
|
expires_delta=access_token_expires,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Set HttpOnly cookie (preferred for browser clients)
|
return Token(
|
||||||
set_auth_cookie(
|
access_token=access_token,
|
||||||
response=response,
|
token_type="bearer",
|
||||||
token=access_token,
|
expires_in=settings.access_token_expire_minutes * 60,
|
||||||
max_age_seconds=settings.access_token_expire_minutes * 60,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Do NOT return the token in the response body (prevents leaks via logs/JS storage)
|
|
||||||
return LoginResponse(expires_in=settings.access_token_expire_minutes * 60)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/logout", response_model=MessageResponse)
|
|
||||||
async def logout(response: Response):
|
|
||||||
"""Clear auth cookie."""
|
|
||||||
clear_auth_cookie(response)
|
|
||||||
return MessageResponse(message="Logged out")
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/me", response_model=UserResponse)
|
@router.get("/me", response_model=UserResponse)
|
||||||
async def get_current_user_info(current_user: CurrentUser):
|
async def get_current_user_info(current_user: CurrentUser):
|
||||||
@ -451,10 +234,8 @@ async def update_current_user(
|
|||||||
|
|
||||||
|
|
||||||
@router.post("/forgot-password", response_model=MessageResponse)
|
@router.post("/forgot-password", response_model=MessageResponse)
|
||||||
@limiter.limit("3/minute")
|
|
||||||
async def forgot_password(
|
async def forgot_password(
|
||||||
request: Request,
|
request: ForgotPasswordRequest,
|
||||||
payload: ForgotPasswordRequest,
|
|
||||||
db: Database,
|
db: Database,
|
||||||
background_tasks: BackgroundTasks,
|
background_tasks: BackgroundTasks,
|
||||||
):
|
):
|
||||||
@ -469,7 +250,9 @@ async def forgot_password(
|
|||||||
success_message = "If an account with this email exists, a password reset link has been sent."
|
success_message = "If an account with this email exists, a password reset link has been sent."
|
||||||
|
|
||||||
# Look up user
|
# Look up user
|
||||||
result = await db.execute(select(User).where(User.email == payload.email.lower()))
|
result = await db.execute(
|
||||||
|
select(User).where(User.email == request.email.lower())
|
||||||
|
)
|
||||||
user = result.scalar_one_or_none()
|
user = result.scalar_one_or_none()
|
||||||
|
|
||||||
if not user:
|
if not user:
|
||||||
@ -483,8 +266,8 @@ async def forgot_password(
|
|||||||
await db.commit()
|
await db.commit()
|
||||||
|
|
||||||
# Send reset email in background
|
# Send reset email in background
|
||||||
if email_service.is_configured():
|
if email_service.is_configured:
|
||||||
site_url = (settings.site_url or "http://localhost:3000").rstrip("/")
|
site_url = os.getenv("SITE_URL", "http://localhost:3000")
|
||||||
reset_url = f"{site_url}/reset-password?token={reset_token}"
|
reset_url = f"{site_url}/reset-password?token={reset_token}"
|
||||||
|
|
||||||
background_tasks.add_task(
|
background_tasks.add_task(
|
||||||
@ -582,10 +365,8 @@ async def verify_email(
|
|||||||
|
|
||||||
|
|
||||||
@router.post("/resend-verification", response_model=MessageResponse)
|
@router.post("/resend-verification", response_model=MessageResponse)
|
||||||
@limiter.limit("3/minute")
|
|
||||||
async def resend_verification(
|
async def resend_verification(
|
||||||
request: Request,
|
request: ForgotPasswordRequest, # Reuse schema - just needs email
|
||||||
payload: ForgotPasswordRequest, # Reuse schema - just needs email
|
|
||||||
db: Database,
|
db: Database,
|
||||||
background_tasks: BackgroundTasks,
|
background_tasks: BackgroundTasks,
|
||||||
):
|
):
|
||||||
@ -599,7 +380,7 @@ async def resend_verification(
|
|||||||
|
|
||||||
# Look up user
|
# Look up user
|
||||||
result = await db.execute(
|
result = await db.execute(
|
||||||
select(User).where(User.email == payload.email.lower())
|
select(User).where(User.email == request.email.lower())
|
||||||
)
|
)
|
||||||
user = result.scalar_one_or_none()
|
user = result.scalar_one_or_none()
|
||||||
|
|
||||||
@ -613,8 +394,8 @@ async def resend_verification(
|
|||||||
await db.commit()
|
await db.commit()
|
||||||
|
|
||||||
# Send verification email
|
# Send verification email
|
||||||
if email_service.is_configured():
|
if email_service.is_configured:
|
||||||
site_url = (settings.site_url or "http://localhost:3000").rstrip("/")
|
site_url = os.getenv("SITE_URL", "http://localhost:3000")
|
||||||
verify_url = f"{site_url}/verify-email?token={verification_token}"
|
verify_url = f"{site_url}/verify-email?token={verification_token}"
|
||||||
|
|
||||||
background_tasks.add_task(
|
background_tasks.add_task(
|
||||||
|
|||||||
@ -15,7 +15,6 @@ from sqlalchemy.orm import selectinload
|
|||||||
from app.api.deps import Database, get_current_user, get_current_user_optional
|
from app.api.deps import Database, get_current_user, get_current_user_optional
|
||||||
from app.models.user import User
|
from app.models.user import User
|
||||||
from app.models.blog import BlogPost
|
from app.models.blog import BlogPost
|
||||||
from app.services.html_sanitizer import sanitize_html
|
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
@ -195,39 +194,7 @@ async def get_blog_post(
|
|||||||
post.view_count += 1
|
post.view_count += 1
|
||||||
await db.commit()
|
await db.commit()
|
||||||
|
|
||||||
data = post.to_dict(include_content=True)
|
return post.to_dict(include_content=True)
|
||||||
data["content"] = sanitize_html(data.get("content") or "")
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/posts/{slug}/meta")
|
|
||||||
async def get_blog_post_meta(
|
|
||||||
slug: str,
|
|
||||||
db: Database,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Get blog post metadata by slug (public).
|
|
||||||
|
|
||||||
IMPORTANT: This endpoint does NOT increment view_count.
|
|
||||||
It's intended for SEO metadata generation (generateMetadata, JSON-LD).
|
|
||||||
"""
|
|
||||||
result = await db.execute(
|
|
||||||
select(BlogPost)
|
|
||||||
.options(selectinload(BlogPost.author))
|
|
||||||
.where(
|
|
||||||
BlogPost.slug == slug,
|
|
||||||
BlogPost.is_published == True,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
post = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not post:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
|
||||||
detail="Blog post not found",
|
|
||||||
)
|
|
||||||
|
|
||||||
return post.to_dict(include_content=False)
|
|
||||||
|
|
||||||
|
|
||||||
# ============== Admin Endpoints ==============
|
# ============== Admin Endpoints ==============
|
||||||
@ -288,7 +255,7 @@ async def create_blog_post(
|
|||||||
post = BlogPost(
|
post = BlogPost(
|
||||||
title=data.title,
|
title=data.title,
|
||||||
slug=slug,
|
slug=slug,
|
||||||
content=sanitize_html(data.content),
|
content=data.content,
|
||||||
excerpt=data.excerpt,
|
excerpt=data.excerpt,
|
||||||
cover_image=data.cover_image,
|
cover_image=data.cover_image,
|
||||||
category=data.category,
|
category=data.category,
|
||||||
@ -355,7 +322,7 @@ async def update_blog_post(
|
|||||||
# Optionally update slug if title changes
|
# Optionally update slug if title changes
|
||||||
# post.slug = generate_slug(data.title)
|
# post.slug = generate_slug(data.title)
|
||||||
if data.content is not None:
|
if data.content is not None:
|
||||||
post.content = sanitize_html(data.content)
|
post.content = data.content
|
||||||
if data.excerpt is not None:
|
if data.excerpt is not None:
|
||||||
post.excerpt = data.excerpt
|
post.excerpt = data.excerpt
|
||||||
if data.cover_image is not None:
|
if data.cover_image is not None:
|
||||||
|
|||||||
@ -1,197 +0,0 @@
|
|||||||
"""CFO (Management) endpoints."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import datetime, timedelta, timezone
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Request, status
|
|
||||||
from slowapi import Limiter
|
|
||||||
from slowapi.util import get_remote_address
|
|
||||||
from sqlalchemy import and_, case, func, select
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from app.api.deps import get_current_user
|
|
||||||
from app.database import get_db
|
|
||||||
from app.models.portfolio import PortfolioDomain
|
|
||||||
from app.models.user import User
|
|
||||||
from app.models.yield_domain import YieldDomain, YieldTransaction
|
|
||||||
from app.schemas.cfo import (
|
|
||||||
CfoKillListRow,
|
|
||||||
CfoMonthlyBucket,
|
|
||||||
CfoSummaryResponse,
|
|
||||||
CfoUpcomingCostRow,
|
|
||||||
SetToDropResponse,
|
|
||||||
)
|
|
||||||
from app.services.analyze.renewal_cost import get_tld_price_snapshot
|
|
||||||
|
|
||||||
router = APIRouter()
|
|
||||||
limiter = Limiter(key_func=get_remote_address)
|
|
||||||
|
|
||||||
|
|
||||||
def _utcnow() -> datetime:
|
|
||||||
return datetime.now(timezone.utc)
|
|
||||||
|
|
||||||
|
|
||||||
def _month_key(dt: datetime) -> str:
|
|
||||||
return f"{dt.year:04d}-{dt.month:02d}"
|
|
||||||
|
|
||||||
|
|
||||||
async def _estimate_renewal_cost_usd(db: AsyncSession, domain: str) -> tuple[float | None, str]:
|
|
||||||
# If the user stored renewal_cost, we treat it as the source of truth.
|
|
||||||
# Else we estimate using our own collected `tld_prices` DB.
|
|
||||||
tld = domain.split(".")[-1].lower()
|
|
||||||
snap = await get_tld_price_snapshot(db, tld)
|
|
||||||
if snap.min_renew_usd is None:
|
|
||||||
return None, "unknown"
|
|
||||||
return float(snap.min_renew_usd), "tld_prices"
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/summary", response_model=CfoSummaryResponse)
|
|
||||||
@limiter.limit("30/minute")
|
|
||||||
async def cfo_summary(
|
|
||||||
request: Request,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
CFO dashboard summary:
|
|
||||||
- Burn rate timeline (renewal costs)
|
|
||||||
- Upcoming costs (30d)
|
|
||||||
- Kill list (renewal soon + no yield signals)
|
|
||||||
"""
|
|
||||||
now = _utcnow()
|
|
||||||
now_naive = now.replace(tzinfo=None)
|
|
||||||
|
|
||||||
domains = (
|
|
||||||
await db.execute(select(PortfolioDomain).where(PortfolioDomain.user_id == current_user.id))
|
|
||||||
).scalars().all()
|
|
||||||
|
|
||||||
# Yield stats (last 60d) by domain
|
|
||||||
since_60d = now_naive - timedelta(days=60)
|
|
||||||
yd_rows = (
|
|
||||||
await db.execute(
|
|
||||||
select(
|
|
||||||
YieldDomain.domain,
|
|
||||||
func.coalesce(func.sum(YieldTransaction.net_amount), 0).label("net_sum"),
|
|
||||||
func.coalesce(func.sum(case((YieldTransaction.event_type == "click", 1), else_=0)), 0).label("clicks"),
|
|
||||||
)
|
|
||||||
.join(
|
|
||||||
YieldTransaction,
|
|
||||||
and_(YieldTransaction.yield_domain_id == YieldDomain.id, YieldTransaction.created_at >= since_60d),
|
|
||||||
isouter=True,
|
|
||||||
)
|
|
||||||
.where(YieldDomain.user_id == current_user.id)
|
|
||||||
.group_by(YieldDomain.domain)
|
|
||||||
)
|
|
||||||
).all()
|
|
||||||
yield_by_domain = {str(d).lower(): {"net": float(n or 0), "clicks": int(c or 0)} for d, n, c in yd_rows}
|
|
||||||
|
|
||||||
# Monthly buckets next 12 months
|
|
||||||
buckets: dict[str, CfoMonthlyBucket] = {}
|
|
||||||
for i in range(0, 12):
|
|
||||||
d = (now + timedelta(days=30 * i)).replace(day=1)
|
|
||||||
buckets[_month_key(d)] = CfoMonthlyBucket(month=_month_key(d), total_cost_usd=0.0, domains=0)
|
|
||||||
|
|
||||||
upcoming_rows: list[CfoUpcomingCostRow] = []
|
|
||||||
kill_list: list[CfoKillListRow] = []
|
|
||||||
|
|
||||||
cutoff_30d = now_naive + timedelta(days=30)
|
|
||||||
|
|
||||||
for pd in domains:
|
|
||||||
if pd.is_sold:
|
|
||||||
continue
|
|
||||||
|
|
||||||
renewal_dt = pd.renewal_date
|
|
||||||
if not renewal_dt:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if renewal_dt.tzinfo is not None:
|
|
||||||
renewal_dt_naive = renewal_dt.astimezone(timezone.utc).replace(tzinfo=None)
|
|
||||||
else:
|
|
||||||
renewal_dt_naive = renewal_dt
|
|
||||||
|
|
||||||
# cost source: portfolio overrides
|
|
||||||
if pd.renewal_cost is not None:
|
|
||||||
cost = float(pd.renewal_cost)
|
|
||||||
source = "portfolio"
|
|
||||||
else:
|
|
||||||
cost, source = await _estimate_renewal_cost_usd(db, pd.domain)
|
|
||||||
|
|
||||||
# Monthly burn timeline
|
|
||||||
month = _month_key(renewal_dt_naive)
|
|
||||||
if month not in buckets:
|
|
||||||
buckets[month] = CfoMonthlyBucket(month=month, total_cost_usd=0.0, domains=0)
|
|
||||||
if cost is not None:
|
|
||||||
buckets[month].total_cost_usd = float(buckets[month].total_cost_usd) + float(cost)
|
|
||||||
buckets[month].domains = int(buckets[month].domains) + 1
|
|
||||||
|
|
||||||
# Upcoming 30d
|
|
||||||
if now_naive <= renewal_dt_naive <= cutoff_30d:
|
|
||||||
upcoming_rows.append(
|
|
||||||
CfoUpcomingCostRow(
|
|
||||||
domain_id=pd.id,
|
|
||||||
domain=pd.domain,
|
|
||||||
renewal_date=renewal_dt,
|
|
||||||
renewal_cost_usd=cost,
|
|
||||||
cost_source=source,
|
|
||||||
is_sold=bool(pd.is_sold),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
y = yield_by_domain.get(pd.domain.lower(), {"net": 0.0, "clicks": 0})
|
|
||||||
if float(y["net"]) <= 0.0 and int(y["clicks"]) <= 0:
|
|
||||||
kill_list.append(
|
|
||||||
CfoKillListRow(
|
|
||||||
domain_id=pd.id,
|
|
||||||
domain=pd.domain,
|
|
||||||
renewal_date=renewal_dt,
|
|
||||||
renewal_cost_usd=cost,
|
|
||||||
cost_source=source,
|
|
||||||
auto_renew=bool(pd.auto_renew),
|
|
||||||
is_dns_verified=bool(getattr(pd, "is_dns_verified", False) or False),
|
|
||||||
yield_net_60d=float(y["net"]),
|
|
||||||
yield_clicks_60d=int(y["clicks"]),
|
|
||||||
reason="No yield signals tracked in the last 60 days and renewal is due within 30 days.",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Sort rows
|
|
||||||
upcoming_rows.sort(key=lambda r: (r.renewal_date or now_naive))
|
|
||||||
kill_list.sort(key=lambda r: (r.renewal_date or now_naive))
|
|
||||||
|
|
||||||
upcoming_total = sum((r.renewal_cost_usd or 0) for r in upcoming_rows)
|
|
||||||
monthly_sorted = [buckets[k] for k in sorted(buckets.keys())]
|
|
||||||
|
|
||||||
return CfoSummaryResponse(
|
|
||||||
computed_at=now,
|
|
||||||
upcoming_30d_total_usd=float(round(upcoming_total, 2)),
|
|
||||||
upcoming_30d_rows=upcoming_rows,
|
|
||||||
monthly=monthly_sorted,
|
|
||||||
kill_list=kill_list[:50],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/domains/{domain_id}/set-to-drop", response_model=SetToDropResponse)
|
|
||||||
@limiter.limit("30/minute")
|
|
||||||
async def set_to_drop(
|
|
||||||
request: Request,
|
|
||||||
domain_id: int,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Mark portfolio domain as 'to drop' by turning off local auto-renew flag.
|
|
||||||
(We cannot disable auto-renew at the registrar automatically.)
|
|
||||||
"""
|
|
||||||
pd = (
|
|
||||||
await db.execute(
|
|
||||||
select(PortfolioDomain).where(and_(PortfolioDomain.id == domain_id, PortfolioDomain.user_id == current_user.id))
|
|
||||||
)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
if not pd:
|
|
||||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Portfolio domain not found")
|
|
||||||
pd.auto_renew = False
|
|
||||||
pd.updated_at = datetime.utcnow()
|
|
||||||
await db.commit()
|
|
||||||
return SetToDropResponse(domain_id=pd.id, auto_renew=bool(pd.auto_renew), updated_at=pd.updated_at.replace(tzinfo=timezone.utc))
|
|
||||||
|
|
||||||
@ -16,12 +16,10 @@ from datetime import datetime
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, status, BackgroundTasks, Request
|
from fastapi import APIRouter, HTTPException, status, BackgroundTasks, Request
|
||||||
from fastapi.responses import HTMLResponse
|
|
||||||
from pydantic import BaseModel, EmailStr, Field
|
from pydantic import BaseModel, EmailStr, Field
|
||||||
from sqlalchemy import select, delete
|
from sqlalchemy import select, delete
|
||||||
from slowapi import Limiter
|
from slowapi import Limiter
|
||||||
from slowapi.util import get_remote_address
|
from slowapi.util import get_remote_address
|
||||||
from urllib.parse import urlencode
|
|
||||||
|
|
||||||
from app.api.deps import Database
|
from app.api.deps import Database
|
||||||
from app.services.email_service import email_service
|
from app.services.email_service import email_service
|
||||||
@ -34,11 +32,6 @@ router = APIRouter()
|
|||||||
# Rate limiter for contact endpoints
|
# Rate limiter for contact endpoints
|
||||||
limiter = Limiter(key_func=get_remote_address)
|
limiter = Limiter(key_func=get_remote_address)
|
||||||
|
|
||||||
def _build_unsubscribe_url(email: str, token: str) -> str:
|
|
||||||
base = os.getenv("SITE_URL", "https://pounce.ch").rstrip("/")
|
|
||||||
query = urlencode({"email": email, "token": token})
|
|
||||||
return f"{base}/api/v1/contact/newsletter/unsubscribe?{query}"
|
|
||||||
|
|
||||||
|
|
||||||
# ============== Schemas ==============
|
# ============== Schemas ==============
|
||||||
|
|
||||||
@ -146,7 +139,6 @@ async def subscribe_newsletter(
|
|||||||
background_tasks.add_task(
|
background_tasks.add_task(
|
||||||
email_service.send_newsletter_welcome,
|
email_service.send_newsletter_welcome,
|
||||||
to_email=email_lower,
|
to_email=email_lower,
|
||||||
unsubscribe_url=_build_unsubscribe_url(email_lower, existing.unsubscribe_token),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return MessageResponse(
|
return MessageResponse(
|
||||||
@ -168,7 +160,6 @@ async def subscribe_newsletter(
|
|||||||
background_tasks.add_task(
|
background_tasks.add_task(
|
||||||
email_service.send_newsletter_welcome,
|
email_service.send_newsletter_welcome,
|
||||||
to_email=email_lower,
|
to_email=email_lower,
|
||||||
unsubscribe_url=_build_unsubscribe_url(email_lower, subscriber.unsubscribe_token),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info(f"Newsletter subscription: {email_lower}")
|
logger.info(f"Newsletter subscription: {email_lower}")
|
||||||
@ -225,50 +216,6 @@ async def unsubscribe_newsletter(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.get("/newsletter/unsubscribe")
|
|
||||||
async def unsubscribe_newsletter_one_click(
|
|
||||||
email: EmailStr,
|
|
||||||
token: str,
|
|
||||||
db: Database,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
One-click unsubscribe endpoint (for List-Unsubscribe header).
|
|
||||||
Always returns 200 with a human-readable HTML response.
|
|
||||||
"""
|
|
||||||
email_lower = email.lower()
|
|
||||||
result = await db.execute(
|
|
||||||
select(NewsletterSubscriber).where(
|
|
||||||
NewsletterSubscriber.email == email_lower,
|
|
||||||
NewsletterSubscriber.unsubscribe_token == token,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
subscriber = result.scalar_one_or_none()
|
|
||||||
if subscriber and subscriber.is_active:
|
|
||||||
subscriber.is_active = False
|
|
||||||
subscriber.unsubscribed_at = datetime.utcnow()
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
return HTMLResponse(
|
|
||||||
content="""
|
|
||||||
<!doctype html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8" />
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
||||||
<title>Unsubscribed</title>
|
|
||||||
</head>
|
|
||||||
<body style="font-family: system-ui, -apple-system, Segoe UI, Roboto, Helvetica, Arial, sans-serif; padding: 32px;">
|
|
||||||
<h1 style="margin: 0 0 12px 0;">You are unsubscribed.</h1>
|
|
||||||
<p style="margin: 0; color: #555;">
|
|
||||||
If you were subscribed, you will no longer receive pounce insights emails.
|
|
||||||
</p>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
""".strip(),
|
|
||||||
status_code=200,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/newsletter/status")
|
@router.get("/newsletter/status")
|
||||||
async def check_newsletter_status(
|
async def check_newsletter_status(
|
||||||
email: EmailStr,
|
email: EmailStr,
|
||||||
|
|||||||
@ -1,105 +0,0 @@
|
|||||||
"""Dashboard summary endpoints (reduce frontend API round-trips)."""
|
|
||||||
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends
|
|
||||||
from sqlalchemy import select, func, and_
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from app.api.deps import get_current_user
|
|
||||||
from app.database import get_db
|
|
||||||
from app.models.auction import DomainAuction
|
|
||||||
from app.models.listing import DomainListing, ListingStatus
|
|
||||||
from app.models.user import User
|
|
||||||
|
|
||||||
# Reuse helpers for consistent formatting
|
|
||||||
from app.api.auctions import _format_time_remaining, _get_affiliate_url
|
|
||||||
from app.api.tld_prices import get_trending_tlds
|
|
||||||
|
|
||||||
router = APIRouter()
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/summary")
|
|
||||||
async def get_dashboard_summary(
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Return a compact dashboard payload used by `/terminal/radar`.
|
|
||||||
|
|
||||||
Goal: 1 request instead of multiple heavy round-trips.
|
|
||||||
"""
|
|
||||||
now = datetime.utcnow()
|
|
||||||
|
|
||||||
# -------------------------
|
|
||||||
# Market stats + preview
|
|
||||||
# -------------------------
|
|
||||||
active_auctions_filter = and_(DomainAuction.is_active == True, DomainAuction.end_time > now)
|
|
||||||
|
|
||||||
total_auctions = (await db.execute(select(func.count(DomainAuction.id)).where(active_auctions_filter))).scalar() or 0
|
|
||||||
|
|
||||||
cutoff = now + timedelta(hours=24)
|
|
||||||
ending_soon_filter = and_(
|
|
||||||
DomainAuction.is_active == True,
|
|
||||||
DomainAuction.end_time > now,
|
|
||||||
DomainAuction.end_time <= cutoff,
|
|
||||||
)
|
|
||||||
|
|
||||||
ending_soon_count = (await db.execute(select(func.count(DomainAuction.id)).where(ending_soon_filter))).scalar() or 0
|
|
||||||
|
|
||||||
ending_soon = (
|
|
||||||
await db.execute(
|
|
||||||
select(DomainAuction)
|
|
||||||
.where(ending_soon_filter)
|
|
||||||
.order_by(DomainAuction.end_time.asc())
|
|
||||||
.limit(5)
|
|
||||||
)
|
|
||||||
).scalars().all()
|
|
||||||
|
|
||||||
ending_soon_preview = [
|
|
||||||
{
|
|
||||||
"domain": a.domain,
|
|
||||||
"current_bid": a.current_bid,
|
|
||||||
"time_remaining": _format_time_remaining(a.end_time, now=now),
|
|
||||||
"platform": a.platform,
|
|
||||||
"affiliate_url": _get_affiliate_url(a.platform, a.domain, a.auction_url),
|
|
||||||
}
|
|
||||||
for a in ending_soon
|
|
||||||
]
|
|
||||||
|
|
||||||
# -------------------------
|
|
||||||
# Listings stats (user)
|
|
||||||
# -------------------------
|
|
||||||
listing_counts = (
|
|
||||||
await db.execute(
|
|
||||||
select(DomainListing.status, func.count(DomainListing.id))
|
|
||||||
.where(DomainListing.user_id == current_user.id)
|
|
||||||
.group_by(DomainListing.status)
|
|
||||||
)
|
|
||||||
).all()
|
|
||||||
by_status = {status: int(count) for status, count in listing_counts}
|
|
||||||
|
|
||||||
listing_stats = {
|
|
||||||
"active": by_status.get(ListingStatus.ACTIVE.value, 0),
|
|
||||||
"sold": by_status.get(ListingStatus.SOLD.value, 0),
|
|
||||||
"draft": by_status.get(ListingStatus.DRAFT.value, 0),
|
|
||||||
"total": sum(by_status.values()),
|
|
||||||
}
|
|
||||||
|
|
||||||
# -------------------------
|
|
||||||
# Trending TLDs (public data)
|
|
||||||
# -------------------------
|
|
||||||
trending = await get_trending_tlds(db)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"market": {
|
|
||||||
"total_auctions": total_auctions,
|
|
||||||
"ending_soon": ending_soon_count,
|
|
||||||
"ending_soon_preview": ending_soon_preview,
|
|
||||||
},
|
|
||||||
"listings": listing_stats,
|
|
||||||
"tlds": trending,
|
|
||||||
"timestamp": now.isoformat(),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,22 +1,21 @@
|
|||||||
"""API dependencies."""
|
"""API dependencies."""
|
||||||
from typing import Annotated, Optional
|
from typing import Annotated, Optional
|
||||||
|
|
||||||
from fastapi import Depends, HTTPException, Request, status
|
from fastapi import Depends, HTTPException, status
|
||||||
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
from app.database import get_db
|
from app.database import get_db
|
||||||
from app.services.auth import AuthService
|
from app.services.auth import AuthService
|
||||||
from app.models.user import User
|
from app.models.user import User
|
||||||
from app.security import AUTH_COOKIE_NAME
|
|
||||||
|
|
||||||
# Security scheme
|
# Security scheme
|
||||||
|
security = HTTPBearer()
|
||||||
security_optional = HTTPBearer(auto_error=False)
|
security_optional = HTTPBearer(auto_error=False)
|
||||||
|
|
||||||
|
|
||||||
async def get_current_user(
|
async def get_current_user(
|
||||||
request: Request,
|
credentials: Annotated[HTTPAuthorizationCredentials, Depends(security)],
|
||||||
credentials: Annotated[Optional[HTTPAuthorizationCredentials], Depends(security_optional)],
|
|
||||||
db: Annotated[AsyncSession, Depends(get_db)],
|
db: Annotated[AsyncSession, Depends(get_db)],
|
||||||
) -> User:
|
) -> User:
|
||||||
"""Get current authenticated user from JWT token."""
|
"""Get current authenticated user from JWT token."""
|
||||||
@ -26,15 +25,7 @@ async def get_current_user(
|
|||||||
headers={"WWW-Authenticate": "Bearer"},
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
)
|
)
|
||||||
|
|
||||||
token: Optional[str] = None
|
|
||||||
if credentials is not None:
|
|
||||||
token = credentials.credentials
|
token = credentials.credentials
|
||||||
if not token:
|
|
||||||
token = request.cookies.get(AUTH_COOKIE_NAME)
|
|
||||||
|
|
||||||
if not token:
|
|
||||||
raise credentials_exception
|
|
||||||
|
|
||||||
payload = AuthService.decode_token(token)
|
payload = AuthService.decode_token(token)
|
||||||
|
|
||||||
if payload is None:
|
if payload is None:
|
||||||
@ -76,7 +67,6 @@ async def get_current_active_user(
|
|||||||
|
|
||||||
|
|
||||||
async def get_current_user_optional(
|
async def get_current_user_optional(
|
||||||
request: Request,
|
|
||||||
credentials: Annotated[Optional[HTTPAuthorizationCredentials], Depends(security_optional)],
|
credentials: Annotated[Optional[HTTPAuthorizationCredentials], Depends(security_optional)],
|
||||||
db: Annotated[AsyncSession, Depends(get_db)],
|
db: Annotated[AsyncSession, Depends(get_db)],
|
||||||
) -> Optional[User]:
|
) -> Optional[User]:
|
||||||
@ -85,15 +75,10 @@ async def get_current_user_optional(
|
|||||||
This allows endpoints to work for both authenticated and anonymous users,
|
This allows endpoints to work for both authenticated and anonymous users,
|
||||||
potentially showing different content based on auth status.
|
potentially showing different content based on auth status.
|
||||||
"""
|
"""
|
||||||
token: Optional[str] = None
|
if credentials is None:
|
||||||
if credentials is not None:
|
|
||||||
token = credentials.credentials
|
|
||||||
if not token:
|
|
||||||
token = request.cookies.get(AUTH_COOKIE_NAME)
|
|
||||||
|
|
||||||
if not token:
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
token = credentials.credentials
|
||||||
payload = AuthService.decode_token(token)
|
payload = AuthService.decode_token(token)
|
||||||
|
|
||||||
if payload is None:
|
if payload is None:
|
||||||
|
|||||||
@ -1,14 +1,13 @@
|
|||||||
"""Domain management API (requires authentication)."""
|
"""Domain management API (requires authentication)."""
|
||||||
import json
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from math import ceil
|
from math import ceil
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, status, Query
|
from fastapi import APIRouter, HTTPException, status, Query
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from sqlalchemy import select, func, and_
|
from sqlalchemy import select, func
|
||||||
|
|
||||||
from app.api.deps import Database, CurrentUser
|
from app.api.deps import Database, CurrentUser
|
||||||
from app.models.domain import Domain, DomainCheck, DomainStatus, DomainHealthCache
|
from app.models.domain import Domain, DomainCheck, DomainStatus
|
||||||
from app.models.subscription import TIER_CONFIG, SubscriptionTier
|
from app.models.subscription import TIER_CONFIG, SubscriptionTier
|
||||||
from app.schemas.domain import DomainCreate, DomainResponse, DomainListResponse
|
from app.schemas.domain import DomainCreate, DomainResponse, DomainListResponse
|
||||||
from app.services.domain_checker import domain_checker
|
from app.services.domain_checker import domain_checker
|
||||||
@ -16,38 +15,6 @@ from app.services.domain_health import get_health_checker, HealthStatus
|
|||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
def _safe_json_loads(value: str | None, default):
|
|
||||||
if not value:
|
|
||||||
return default
|
|
||||||
try:
|
|
||||||
return json.loads(value)
|
|
||||||
except Exception:
|
|
||||||
return default
|
|
||||||
|
|
||||||
|
|
||||||
def _health_cache_to_report(domain: Domain, cache: DomainHealthCache) -> dict:
|
|
||||||
"""Convert DomainHealthCache row into the same shape as DomainHealthReport.to_dict()."""
|
|
||||||
return {
|
|
||||||
"domain": domain.name,
|
|
||||||
"status": cache.status or "unknown",
|
|
||||||
"score": cache.score or 0,
|
|
||||||
"signals": _safe_json_loads(cache.signals, []),
|
|
||||||
"recommendations": [], # not stored in cache (yet)
|
|
||||||
"checked_at": cache.checked_at.isoformat() if cache.checked_at else datetime.utcnow().isoformat(),
|
|
||||||
"dns": _safe_json_loads(
|
|
||||||
cache.dns_data,
|
|
||||||
{"has_ns": False, "has_a": False, "has_mx": False, "nameservers": [], "is_parked": False, "error": None},
|
|
||||||
),
|
|
||||||
"http": _safe_json_loads(
|
|
||||||
cache.http_data,
|
|
||||||
{"is_reachable": False, "status_code": None, "is_parked": False, "parking_keywords": [], "content_length": 0, "error": None},
|
|
||||||
),
|
|
||||||
"ssl": _safe_json_loads(
|
|
||||||
cache.ssl_data,
|
|
||||||
{"has_certificate": False, "is_valid": False, "expires_at": None, "days_until_expiry": None, "issuer": None, "error": None},
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("", response_model=DomainListResponse)
|
@router.get("", response_model=DomainListResponse)
|
||||||
async def list_domains(
|
async def list_domains(
|
||||||
@ -82,40 +49,6 @@ async def list_domains(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.get("/health-cache")
|
|
||||||
async def get_domains_health_cache(
|
|
||||||
current_user: CurrentUser,
|
|
||||||
db: Database,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Get cached domain health reports for the current user (bulk).
|
|
||||||
|
|
||||||
This avoids N requests from the frontend and returns the cached health
|
|
||||||
data written by the scheduler job.
|
|
||||||
"""
|
|
||||||
result = await db.execute(
|
|
||||||
select(Domain, DomainHealthCache)
|
|
||||||
.outerjoin(DomainHealthCache, DomainHealthCache.domain_id == Domain.id)
|
|
||||||
.where(Domain.user_id == current_user.id)
|
|
||||||
)
|
|
||||||
rows = result.all()
|
|
||||||
|
|
||||||
reports: dict[str, dict] = {}
|
|
||||||
cached = 0
|
|
||||||
for domain, cache in rows:
|
|
||||||
if cache is None:
|
|
||||||
continue
|
|
||||||
reports[str(domain.id)] = _health_cache_to_report(domain, cache)
|
|
||||||
cached += 1
|
|
||||||
|
|
||||||
return {
|
|
||||||
"reports": reports,
|
|
||||||
"total_domains": len(rows),
|
|
||||||
"cached_domains": cached,
|
|
||||||
"timestamp": datetime.utcnow().isoformat(),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("", response_model=DomainResponse, status_code=status.HTTP_201_CREATED)
|
@router.post("", response_model=DomainResponse, status_code=status.HTTP_201_CREATED)
|
||||||
async def add_domain(
|
async def add_domain(
|
||||||
domain_data: DomainCreate,
|
domain_data: DomainCreate,
|
||||||
@ -127,7 +60,7 @@ async def add_domain(
|
|||||||
await db.refresh(current_user, ["subscription", "domains"])
|
await db.refresh(current_user, ["subscription", "domains"])
|
||||||
|
|
||||||
if current_user.subscription:
|
if current_user.subscription:
|
||||||
limit = current_user.subscription.domain_limit
|
limit = current_user.subscription.max_domains
|
||||||
else:
|
else:
|
||||||
limit = TIER_CONFIG[SubscriptionTier.SCOUT]["domain_limit"]
|
limit = TIER_CONFIG[SubscriptionTier.SCOUT]["domain_limit"]
|
||||||
|
|
||||||
@ -315,59 +248,6 @@ async def update_notification_settings(
|
|||||||
return domain
|
return domain
|
||||||
|
|
||||||
|
|
||||||
@router.patch("/{domain_id}/expiry", response_model=DomainResponse)
|
|
||||||
async def update_expiration_date(
|
|
||||||
domain_id: int,
|
|
||||||
data: dict,
|
|
||||||
current_user: CurrentUser,
|
|
||||||
db: Database,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Manually set the expiration date for a domain.
|
|
||||||
|
|
||||||
Useful for TLDs like .ch, .de that don't expose expiration via public WHOIS/RDAP.
|
|
||||||
The date can be found in your registrar's control panel.
|
|
||||||
"""
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
result = await db.execute(
|
|
||||||
select(Domain).where(
|
|
||||||
Domain.id == domain_id,
|
|
||||||
Domain.user_id == current_user.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
domain = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not domain:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
|
||||||
detail="Domain not found",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Parse and set expiration date
|
|
||||||
expiration_str = data.get('expiration_date')
|
|
||||||
if expiration_str:
|
|
||||||
try:
|
|
||||||
if isinstance(expiration_str, str):
|
|
||||||
# Parse ISO format
|
|
||||||
expiration_str = expiration_str.replace('Z', '+00:00')
|
|
||||||
domain.expiration_date = datetime.fromisoformat(expiration_str)
|
|
||||||
else:
|
|
||||||
domain.expiration_date = expiration_str
|
|
||||||
except Exception as e:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail=f"Invalid date format: {e}",
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
domain.expiration_date = None
|
|
||||||
|
|
||||||
await db.commit()
|
|
||||||
await db.refresh(domain)
|
|
||||||
|
|
||||||
return domain
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{domain_id}/history")
|
@router.get("/{domain_id}/history")
|
||||||
async def get_domain_history(
|
async def get_domain_history(
|
||||||
domain_id: int,
|
domain_id: int,
|
||||||
@ -439,7 +319,6 @@ async def get_domain_health(
|
|||||||
domain_id: int,
|
domain_id: int,
|
||||||
current_user: CurrentUser,
|
current_user: CurrentUser,
|
||||||
db: Database,
|
db: Database,
|
||||||
refresh: bool = Query(False, description="Force a live health check instead of using cache"),
|
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Get comprehensive health report for a domain.
|
Get comprehensive health report for a domain.
|
||||||
@ -468,44 +347,11 @@ async def get_domain_health(
|
|||||||
detail="Domain not found",
|
detail="Domain not found",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Prefer cached report for UI performance
|
# Run health check
|
||||||
if not refresh:
|
|
||||||
cache_result = await db.execute(
|
|
||||||
select(DomainHealthCache).where(DomainHealthCache.domain_id == domain.id)
|
|
||||||
)
|
|
||||||
cache = cache_result.scalar_one_or_none()
|
|
||||||
if cache is not None:
|
|
||||||
return _health_cache_to_report(domain, cache)
|
|
||||||
|
|
||||||
# Live health check (slow) + update cache
|
|
||||||
health_checker = get_health_checker()
|
health_checker = get_health_checker()
|
||||||
report = await health_checker.check_domain(domain.name)
|
report = await health_checker.check_domain(domain.name)
|
||||||
report_dict = report.to_dict()
|
|
||||||
|
|
||||||
signals_json = json.dumps(report_dict.get("signals") or [])
|
return report.to_dict()
|
||||||
dns_json = json.dumps(report_dict.get("dns") or {})
|
|
||||||
http_json = json.dumps(report_dict.get("http") or {})
|
|
||||||
ssl_json = json.dumps(report_dict.get("ssl") or {})
|
|
||||||
|
|
||||||
cache_result = await db.execute(
|
|
||||||
select(DomainHealthCache).where(DomainHealthCache.domain_id == domain.id)
|
|
||||||
)
|
|
||||||
cache = cache_result.scalar_one_or_none()
|
|
||||||
if cache is None:
|
|
||||||
cache = DomainHealthCache(domain_id=domain.id)
|
|
||||||
db.add(cache)
|
|
||||||
|
|
||||||
cache.status = report_dict.get("status") or "unknown"
|
|
||||||
cache.score = int(report_dict.get("score") or 0)
|
|
||||||
cache.signals = signals_json
|
|
||||||
cache.dns_data = dns_json
|
|
||||||
cache.http_data = http_json
|
|
||||||
cache.ssl_data = ssl_json
|
|
||||||
cache.checked_at = datetime.utcnow()
|
|
||||||
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
return report_dict
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/health-check")
|
@router.post("/health-check")
|
||||||
|
|||||||
@ -1,177 +0,0 @@
|
|||||||
"""
|
|
||||||
Drops API - Zone File Analysis Endpoints
|
|
||||||
=========================================
|
|
||||||
API endpoints for accessing freshly dropped domains from:
|
|
||||||
- Switch.ch zone files (.ch, .li)
|
|
||||||
- ICANN CZDS zone files (.com, .net, .org, .xyz, .info, .dev, .app, .online)
|
|
||||||
"""
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Query, BackgroundTasks
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from app.database import get_db
|
|
||||||
from app.api.deps import get_current_user
|
|
||||||
from app.services.zone_file import (
|
|
||||||
ZoneFileService,
|
|
||||||
get_dropped_domains,
|
|
||||||
get_zone_stats,
|
|
||||||
)
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/drops", tags=["drops"])
|
|
||||||
|
|
||||||
# All supported TLDs
|
|
||||||
SWITCH_TLDS = ["ch", "li"]
|
|
||||||
CZDS_TLDS = ["xyz", "org", "online", "info", "dev", "app"] # Approved
|
|
||||||
CZDS_PENDING = ["com", "net", "club", "biz"] # Pending approval
|
|
||||||
ALL_TLDS = SWITCH_TLDS + CZDS_TLDS
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# PUBLIC ENDPOINTS (for stats)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@router.get("/stats")
|
|
||||||
async def api_get_zone_stats(
|
|
||||||
db: AsyncSession = Depends(get_db)
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Get zone file statistics.
|
|
||||||
Returns domain counts and last sync times for .ch and .li.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
stats = await get_zone_stats(db)
|
|
||||||
return stats
|
|
||||||
except Exception as e:
|
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# AUTHENTICATED ENDPOINTS
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@router.get("")
|
|
||||||
async def api_get_drops(
|
|
||||||
tld: Optional[str] = Query(None, description="Filter by TLD"),
|
|
||||||
hours: int = Query(24, ge=1, le=48, description="Hours to look back (max 48h, we only store 48h)"),
|
|
||||||
min_length: Optional[int] = Query(None, ge=1, le=63, description="Minimum domain length"),
|
|
||||||
max_length: Optional[int] = Query(None, ge=1, le=63, description="Maximum domain length"),
|
|
||||||
exclude_numeric: bool = Query(False, description="Exclude numeric-only domains"),
|
|
||||||
exclude_hyphen: bool = Query(False, description="Exclude domains with hyphens"),
|
|
||||||
keyword: Optional[str] = Query(None, description="Search keyword"),
|
|
||||||
limit: int = Query(50, ge=1, le=200, description="Results per page"),
|
|
||||||
offset: int = Query(0, ge=0, description="Offset for pagination"),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
current_user = Depends(get_current_user)
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Get recently dropped domains from zone files.
|
|
||||||
|
|
||||||
Supports:
|
|
||||||
- Switch.ch zones: .ch, .li
|
|
||||||
- ICANN CZDS zones: .xyz, .org, .online, .info, .dev, .app
|
|
||||||
|
|
||||||
Domains are detected by comparing daily zone file snapshots.
|
|
||||||
Only available for authenticated users.
|
|
||||||
"""
|
|
||||||
if tld and tld not in ALL_TLDS:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400,
|
|
||||||
detail=f"Unsupported TLD. Supported: {', '.join(ALL_TLDS)}"
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
result = await get_dropped_domains(
|
|
||||||
db=db,
|
|
||||||
tld=tld,
|
|
||||||
hours=hours,
|
|
||||||
min_length=min_length,
|
|
||||||
max_length=max_length,
|
|
||||||
exclude_numeric=exclude_numeric,
|
|
||||||
exclude_hyphen=exclude_hyphen,
|
|
||||||
keyword=keyword,
|
|
||||||
limit=limit,
|
|
||||||
offset=offset
|
|
||||||
)
|
|
||||||
return result
|
|
||||||
except Exception as e:
|
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/sync/{tld}")
|
|
||||||
async def api_trigger_sync(
|
|
||||||
tld: str,
|
|
||||||
background_tasks: BackgroundTasks,
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
current_user = Depends(get_current_user)
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Trigger a manual zone file sync for a specific TLD.
|
|
||||||
Only available for admin users.
|
|
||||||
|
|
||||||
This is normally run automatically by the scheduler.
|
|
||||||
"""
|
|
||||||
# Check if user is admin
|
|
||||||
if not getattr(current_user, 'is_admin', False):
|
|
||||||
raise HTTPException(status_code=403, detail="Admin access required")
|
|
||||||
|
|
||||||
if tld not in ALL_TLDS:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400,
|
|
||||||
detail=f"Unsupported TLD. Supported: {', '.join(ALL_TLDS)}"
|
|
||||||
)
|
|
||||||
|
|
||||||
async def run_sync():
|
|
||||||
from app.database import AsyncSessionLocal
|
|
||||||
|
|
||||||
async with AsyncSessionLocal() as session:
|
|
||||||
try:
|
|
||||||
if tld in SWITCH_TLDS:
|
|
||||||
# Use Switch.ch zone transfer
|
|
||||||
service = ZoneFileService()
|
|
||||||
await service.run_daily_sync(session, tld)
|
|
||||||
else:
|
|
||||||
# Use ICANN CZDS
|
|
||||||
from app.services.czds_client import CZDSClient
|
|
||||||
client = CZDSClient()
|
|
||||||
await client.sync_zone(session, tld)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Zone sync failed for .{tld}: {e}")
|
|
||||||
|
|
||||||
background_tasks.add_task(run_sync)
|
|
||||||
|
|
||||||
return {"status": "sync_started", "tld": tld}
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# HELPER ENDPOINTS
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@router.get("/tlds")
|
|
||||||
async def api_get_supported_tlds():
|
|
||||||
"""
|
|
||||||
Get list of supported TLDs for zone file analysis.
|
|
||||||
"""
|
|
||||||
return {
|
|
||||||
"tlds": [
|
|
||||||
# Switch.ch zones
|
|
||||||
{"tld": "ch", "name": "Switzerland", "flag": "🇨🇭", "registry": "Switch", "source": "switch"},
|
|
||||||
{"tld": "li", "name": "Liechtenstein", "flag": "🇱🇮", "registry": "Switch", "source": "switch"},
|
|
||||||
# ICANN CZDS zones (approved)
|
|
||||||
{"tld": "xyz", "name": "XYZ", "flag": "🌐", "registry": "XYZ.COM LLC", "source": "czds"},
|
|
||||||
{"tld": "org", "name": "Organization", "flag": "🏛️", "registry": "PIR", "source": "czds"},
|
|
||||||
{"tld": "online", "name": "Online", "flag": "💻", "registry": "Radix", "source": "czds"},
|
|
||||||
{"tld": "info", "name": "Information", "flag": "ℹ️", "registry": "Afilias", "source": "czds"},
|
|
||||||
{"tld": "dev", "name": "Developer", "flag": "👨💻", "registry": "Google", "source": "czds"},
|
|
||||||
{"tld": "app", "name": "Application", "flag": "📱", "registry": "Google", "source": "czds"},
|
|
||||||
],
|
|
||||||
"pending": [
|
|
||||||
# CZDS pending approval
|
|
||||||
{"tld": "com", "name": "Commercial", "flag": "🏢", "registry": "Verisign", "source": "czds"},
|
|
||||||
{"tld": "net", "name": "Network", "flag": "🌐", "registry": "Verisign", "source": "czds"},
|
|
||||||
{"tld": "club", "name": "Club", "flag": "🎉", "registry": "GoDaddy", "source": "czds"},
|
|
||||||
{"tld": "biz", "name": "Business", "flag": "💼", "registry": "GoDaddy", "source": "czds"},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@ -1,247 +0,0 @@
|
|||||||
"""HUNT (Discovery) endpoints."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import datetime, timezone
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Query, Request
|
|
||||||
from slowapi import Limiter
|
|
||||||
from slowapi.util import get_remote_address
|
|
||||||
from sqlalchemy import and_, func, select
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from app.api.deps import get_current_user
|
|
||||||
from app.database import get_db
|
|
||||||
from app.models.auction import DomainAuction
|
|
||||||
from app.models.user import User
|
|
||||||
from app.schemas.hunt import (
|
|
||||||
BrandableRequest,
|
|
||||||
BrandableCandidate,
|
|
||||||
BrandableResponse,
|
|
||||||
HuntSniperItem,
|
|
||||||
HuntSniperResponse,
|
|
||||||
KeywordAvailabilityRequest,
|
|
||||||
KeywordAvailabilityResponse,
|
|
||||||
KeywordAvailabilityRow,
|
|
||||||
TrendsResponse,
|
|
||||||
TrendItem,
|
|
||||||
TypoCheckRequest,
|
|
||||||
TypoCheckResponse,
|
|
||||||
TypoCandidate,
|
|
||||||
)
|
|
||||||
from app.services.domain_checker import domain_checker
|
|
||||||
from app.services.hunt.brandables import check_domains, generate_cvcvc, generate_cvccv, generate_human
|
|
||||||
from app.services.hunt.trends import fetch_google_trends_daily_rss
|
|
||||||
from app.services.hunt.typos import generate_typos
|
|
||||||
|
|
||||||
router = APIRouter()
|
|
||||||
limiter = Limiter(key_func=get_remote_address)
|
|
||||||
|
|
||||||
|
|
||||||
def _utcnow() -> datetime:
|
|
||||||
return datetime.now(timezone.utc)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/bargain-bin", response_model=HuntSniperResponse)
|
|
||||||
@limiter.limit("60/minute")
|
|
||||||
async def bargain_bin(
|
|
||||||
request: Request,
|
|
||||||
_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
limit: int = Query(100, ge=1, le=500),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Closeout Sniper (Chris logic):
|
|
||||||
price < $10 AND age_years >= 5 AND backlinks > 0
|
|
||||||
|
|
||||||
Uses ONLY real scraped auction data (DomainAuction.age_years/backlinks).
|
|
||||||
Items without required fields are excluded.
|
|
||||||
"""
|
|
||||||
now = _utcnow().replace(tzinfo=None)
|
|
||||||
base = and_(DomainAuction.is_active == True, DomainAuction.end_time > now) # noqa: E712
|
|
||||||
|
|
||||||
rows = (
|
|
||||||
await db.execute(
|
|
||||||
select(DomainAuction)
|
|
||||||
.where(base)
|
|
||||||
.where(DomainAuction.current_bid < 10)
|
|
||||||
.order_by(DomainAuction.end_time.asc())
|
|
||||||
.limit(limit * 3) # allow filtering
|
|
||||||
)
|
|
||||||
).scalars().all()
|
|
||||||
|
|
||||||
filtered_out = 0
|
|
||||||
items: list[HuntSniperItem] = []
|
|
||||||
for a in rows:
|
|
||||||
if a.age_years is None or a.backlinks is None:
|
|
||||||
filtered_out += 1
|
|
||||||
continue
|
|
||||||
if int(a.age_years) < 5 or int(a.backlinks) <= 0:
|
|
||||||
continue
|
|
||||||
items.append(
|
|
||||||
HuntSniperItem(
|
|
||||||
domain=a.domain,
|
|
||||||
platform=a.platform,
|
|
||||||
auction_url=a.auction_url,
|
|
||||||
current_bid=float(a.current_bid),
|
|
||||||
currency=a.currency,
|
|
||||||
end_time=a.end_time.replace(tzinfo=timezone.utc) if a.end_time and a.end_time.tzinfo is None else a.end_time,
|
|
||||||
age_years=int(a.age_years) if a.age_years is not None else None,
|
|
||||||
backlinks=int(a.backlinks) if a.backlinks is not None else None,
|
|
||||||
pounce_score=int(a.pounce_score) if a.pounce_score is not None else None,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
if len(items) >= limit:
|
|
||||||
break
|
|
||||||
|
|
||||||
last_updated = (
|
|
||||||
await db.execute(select(func.max(DomainAuction.updated_at)).where(DomainAuction.is_active == True)) # noqa: E712
|
|
||||||
).scalar()
|
|
||||||
|
|
||||||
return HuntSniperResponse(
|
|
||||||
items=items,
|
|
||||||
total=len(items),
|
|
||||||
filtered_out_missing_data=int(filtered_out),
|
|
||||||
last_updated=last_updated.replace(tzinfo=timezone.utc) if last_updated and last_updated.tzinfo is None else last_updated,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/trends", response_model=TrendsResponse)
|
|
||||||
@limiter.limit("30/minute")
|
|
||||||
async def trends(
|
|
||||||
request: Request,
|
|
||||||
_user: User = Depends(get_current_user),
|
|
||||||
geo: str = Query("US", min_length=2, max_length=2),
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
items_raw = await fetch_google_trends_daily_rss(geo=geo)
|
|
||||||
except Exception:
|
|
||||||
# Don't 500 the whole UI when the public feed is temporarily unavailable.
|
|
||||||
raise HTTPException(status_code=502, detail="Google Trends feed unavailable")
|
|
||||||
items = [
|
|
||||||
TrendItem(
|
|
||||||
title=i["title"],
|
|
||||||
approx_traffic=i.get("approx_traffic"),
|
|
||||||
published_at=i.get("published_at"),
|
|
||||||
link=i.get("link"),
|
|
||||||
)
|
|
||||||
for i in items_raw[:50]
|
|
||||||
]
|
|
||||||
return TrendsResponse(geo=geo.upper(), items=items, fetched_at=_utcnow())
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/keywords", response_model=KeywordAvailabilityResponse)
|
|
||||||
@limiter.limit("30/minute")
|
|
||||||
async def keyword_availability(
|
|
||||||
request: Request,
|
|
||||||
payload: KeywordAvailabilityRequest,
|
|
||||||
_user: User = Depends(get_current_user),
|
|
||||||
):
|
|
||||||
# Normalize + cap work for UX/perf
|
|
||||||
keywords = []
|
|
||||||
for kw in payload.keywords[:25]:
|
|
||||||
k = kw.strip().lower().replace(" ", "")
|
|
||||||
if k:
|
|
||||||
keywords.append(kw)
|
|
||||||
|
|
||||||
tlds = [t.lower().lstrip(".") for t in payload.tlds[:20] if t.strip()]
|
|
||||||
if not tlds:
|
|
||||||
tlds = ["com"]
|
|
||||||
|
|
||||||
# Build candidate domains
|
|
||||||
candidates: list[tuple[str, str, str]] = []
|
|
||||||
domain_list: list[str] = []
|
|
||||||
for kw in keywords:
|
|
||||||
k = kw.strip().lower().replace(" ", "")
|
|
||||||
if not k:
|
|
||||||
continue
|
|
||||||
for t in tlds:
|
|
||||||
d = f"{k}.{t}"
|
|
||||||
candidates.append((kw, t, d))
|
|
||||||
domain_list.append(d)
|
|
||||||
|
|
||||||
checked = await check_domains(domain_list, concurrency=40)
|
|
||||||
by_domain = {c.domain: c for c in checked}
|
|
||||||
|
|
||||||
rows: list[KeywordAvailabilityRow] = []
|
|
||||||
for kw, t, d in candidates:
|
|
||||||
c = by_domain.get(d)
|
|
||||||
if not c:
|
|
||||||
rows.append(KeywordAvailabilityRow(keyword=kw, domain=d, tld=t, is_available=None, status="unknown"))
|
|
||||||
else:
|
|
||||||
rows.append(KeywordAvailabilityRow(keyword=kw, domain=d, tld=t, is_available=c.is_available, status=c.status))
|
|
||||||
return KeywordAvailabilityResponse(items=rows)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/typos", response_model=TypoCheckResponse)
|
|
||||||
@limiter.limit("20/minute")
|
|
||||||
async def typo_check(
|
|
||||||
request: Request,
|
|
||||||
payload: TypoCheckRequest,
|
|
||||||
_user: User = Depends(get_current_user),
|
|
||||||
):
|
|
||||||
brand = payload.brand.strip()
|
|
||||||
typos = generate_typos(brand, limit=min(int(payload.limit) * 4, 400))
|
|
||||||
|
|
||||||
# Build domain list (dedup)
|
|
||||||
tlds = [t.lower().lstrip(".") for t in payload.tlds if t.strip()]
|
|
||||||
candidates: list[str] = []
|
|
||||||
seen = set()
|
|
||||||
for typo in typos:
|
|
||||||
for t in tlds:
|
|
||||||
d = f"{typo}.{t}"
|
|
||||||
if d not in seen:
|
|
||||||
candidates.append(d)
|
|
||||||
seen.add(d)
|
|
||||||
if len(candidates) >= payload.limit * 4:
|
|
||||||
break
|
|
||||||
if len(candidates) >= payload.limit * 4:
|
|
||||||
break
|
|
||||||
|
|
||||||
checked = await check_domains(candidates, concurrency=30)
|
|
||||||
available = [c for c in checked if c.status == "available"]
|
|
||||||
items = [TypoCandidate(domain=c.domain, is_available=c.is_available, status=c.status) for c in available[: payload.limit]]
|
|
||||||
return TypoCheckResponse(brand=brand, items=items)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/brandables", response_model=BrandableResponse)
|
|
||||||
@limiter.limit("15/minute")
|
|
||||||
async def brandables(
|
|
||||||
request: Request,
|
|
||||||
payload: BrandableRequest,
|
|
||||||
_user: User = Depends(get_current_user),
|
|
||||||
):
|
|
||||||
pattern = payload.pattern.strip().lower()
|
|
||||||
if pattern not in ("cvcvc", "cvccv", "human"):
|
|
||||||
pattern = "cvcvc"
|
|
||||||
|
|
||||||
tlds = [t.lower().lstrip(".") for t in payload.tlds if t.strip()]
|
|
||||||
if not tlds:
|
|
||||||
tlds = ["com"]
|
|
||||||
|
|
||||||
# Generate + check up to max_checks; return only available
|
|
||||||
candidates: list[str] = []
|
|
||||||
for _ in range(int(payload.max_checks)):
|
|
||||||
if pattern == "cvcvc":
|
|
||||||
sld = generate_cvcvc()
|
|
||||||
elif pattern == "cvccv":
|
|
||||||
sld = generate_cvccv()
|
|
||||||
else:
|
|
||||||
sld = generate_human()
|
|
||||||
for t in tlds:
|
|
||||||
candidates.append(f"{sld}.{t}")
|
|
||||||
|
|
||||||
checked = await check_domains(candidates, concurrency=40)
|
|
||||||
available = [c for c in checked if c.status == "available"]
|
|
||||||
# De-dup by domain
|
|
||||||
seen = set()
|
|
||||||
out = []
|
|
||||||
for c in available:
|
|
||||||
if c.domain not in seen:
|
|
||||||
seen.add(c.domain)
|
|
||||||
out.append(BrandableCandidate(domain=c.domain, is_available=c.is_available, status=c.status))
|
|
||||||
if len(out) >= payload.limit:
|
|
||||||
break
|
|
||||||
|
|
||||||
return BrandableResponse(pattern=payload.pattern, items=out)
|
|
||||||
|
|
||||||
File diff suppressed because it is too large
Load Diff
@ -1,93 +0,0 @@
|
|||||||
"""
|
|
||||||
LLM API endpoints (Pounce -> Ollama Gateway).
|
|
||||||
|
|
||||||
This is intentionally a thin proxy:
|
|
||||||
- Enforces Pounce authentication (HttpOnly cookie)
|
|
||||||
- Enforces tier gating (Trader/Tycoon)
|
|
||||||
- Proxies to the internal LLM gateway (which talks to Ollama)
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import Any, Literal, Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Query, Request, status
|
|
||||||
from fastapi.responses import JSONResponse, StreamingResponse
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
from sqlalchemy import select
|
|
||||||
|
|
||||||
from app.api.deps import CurrentUser, Database
|
|
||||||
from app.config import get_settings
|
|
||||||
from app.models.subscription import Subscription, SubscriptionTier
|
|
||||||
from app.services.llm_gateway import LLMGatewayError, chat_completions, chat_completions_stream
|
|
||||||
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/llm", tags=["LLM"])
|
|
||||||
settings = get_settings()
|
|
||||||
|
|
||||||
|
|
||||||
class ChatMessage(BaseModel):
|
|
||||||
role: Literal["system", "user", "assistant"]
|
|
||||||
content: str
|
|
||||||
|
|
||||||
|
|
||||||
class ChatCompletionsRequest(BaseModel):
|
|
||||||
model: Optional[str] = None
|
|
||||||
messages: list[ChatMessage] = Field(default_factory=list, min_length=1)
|
|
||||||
temperature: Optional[float] = Field(default=None, ge=0.0, le=2.0)
|
|
||||||
stream: bool = False
|
|
||||||
|
|
||||||
|
|
||||||
async def _get_or_create_subscription(db: Database, user_id: int) -> Subscription:
|
|
||||||
res = await db.execute(select(Subscription).where(Subscription.user_id == user_id))
|
|
||||||
sub = res.scalar_one_or_none()
|
|
||||||
if sub:
|
|
||||||
return sub
|
|
||||||
sub = Subscription(user_id=user_id, tier=SubscriptionTier.SCOUT, max_domains=5, check_frequency="daily")
|
|
||||||
db.add(sub)
|
|
||||||
await db.commit()
|
|
||||||
await db.refresh(sub)
|
|
||||||
return sub
|
|
||||||
|
|
||||||
|
|
||||||
def _require_trader_or_higher(sub: Subscription) -> None:
|
|
||||||
if sub.tier not in (SubscriptionTier.TRADER, SubscriptionTier.TYCOON):
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
|
||||||
detail="Chat is available on Trader and Tycoon plans. Upgrade to unlock.",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/chat/completions")
|
|
||||||
async def llm_chat_completions(
|
|
||||||
req: ChatCompletionsRequest,
|
|
||||||
current_user: CurrentUser,
|
|
||||||
db: Database,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Proxy Chat Completions to internal Ollama gateway.
|
|
||||||
Returns OpenAI-ish JSON or SSE when stream=true.
|
|
||||||
"""
|
|
||||||
sub = await _get_or_create_subscription(db, current_user.id)
|
|
||||||
_require_trader_or_higher(sub)
|
|
||||||
|
|
||||||
payload: dict[str, Any] = {
|
|
||||||
"model": (req.model or settings.llm_default_model),
|
|
||||||
"messages": [m.model_dump() for m in req.messages],
|
|
||||||
"temperature": req.temperature,
|
|
||||||
"stream": bool(req.stream),
|
|
||||||
}
|
|
||||||
|
|
||||||
try:
|
|
||||||
if req.stream:
|
|
||||||
return StreamingResponse(
|
|
||||||
chat_completions_stream(payload),
|
|
||||||
media_type="text/event-stream",
|
|
||||||
headers={"Cache-Control": "no-cache", "Connection": "keep-alive"},
|
|
||||||
)
|
|
||||||
data = await chat_completions(payload)
|
|
||||||
return JSONResponse(data)
|
|
||||||
except LLMGatewayError as e:
|
|
||||||
raise HTTPException(status_code=502, detail=str(e))
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,171 +0,0 @@
|
|||||||
"""
|
|
||||||
API endpoints for LLM-powered naming features.
|
|
||||||
Used by Trends and Forge tabs in the Hunt page.
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
from sqlalchemy import select
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from app.api.deps import get_current_user
|
|
||||||
from app.database import get_db
|
|
||||||
from app.models.subscription import Subscription, SubscriptionTier
|
|
||||||
from app.models.user import User
|
|
||||||
from app.services.llm_naming import (
|
|
||||||
expand_trend_keywords,
|
|
||||||
analyze_trend,
|
|
||||||
generate_brandable_names,
|
|
||||||
generate_similar_names,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/naming", tags=["LLM Naming"])
|
|
||||||
|
|
||||||
|
|
||||||
def _tier_level(tier: str) -> int:
|
|
||||||
t = (tier or "").lower()
|
|
||||||
if t == "tycoon":
|
|
||||||
return 3
|
|
||||||
if t == "trader":
|
|
||||||
return 2
|
|
||||||
return 1
|
|
||||||
|
|
||||||
|
|
||||||
async def _get_user_tier(db: AsyncSession, user: User) -> str:
|
|
||||||
res = await db.execute(select(Subscription).where(Subscription.user_id == user.id))
|
|
||||||
sub = res.scalar_one_or_none()
|
|
||||||
if not sub:
|
|
||||||
return "scout"
|
|
||||||
return sub.tier.value
|
|
||||||
|
|
||||||
|
|
||||||
async def _require_trader_or_above(db: AsyncSession, user: User):
|
|
||||||
"""Check that user has at least Trader tier."""
|
|
||||||
tier = await _get_user_tier(db, user)
|
|
||||||
if _tier_level(tier) < 2:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
|
||||||
detail="AI naming features require Trader or Tycoon plan."
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# TRENDS TAB ENDPOINTS
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
class TrendExpandRequest(BaseModel):
|
|
||||||
trend: str = Field(..., min_length=1, max_length=100)
|
|
||||||
geo: str = Field(default="US", max_length=5)
|
|
||||||
|
|
||||||
|
|
||||||
class TrendExpandResponse(BaseModel):
|
|
||||||
keywords: list[str]
|
|
||||||
trend: str
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/trends/expand", response_model=TrendExpandResponse)
|
|
||||||
async def expand_trend(
|
|
||||||
request: TrendExpandRequest,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Expand a trending topic into related domain-friendly keywords.
|
|
||||||
Requires Trader or Tycoon subscription.
|
|
||||||
"""
|
|
||||||
await _require_trader_or_above(db, current_user)
|
|
||||||
|
|
||||||
keywords = await expand_trend_keywords(request.trend, request.geo)
|
|
||||||
return TrendExpandResponse(keywords=keywords, trend=request.trend)
|
|
||||||
|
|
||||||
|
|
||||||
class TrendAnalyzeRequest(BaseModel):
|
|
||||||
trend: str = Field(..., min_length=1, max_length=100)
|
|
||||||
geo: str = Field(default="US", max_length=5)
|
|
||||||
|
|
||||||
|
|
||||||
class TrendAnalyzeResponse(BaseModel):
|
|
||||||
analysis: str
|
|
||||||
trend: str
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/trends/analyze", response_model=TrendAnalyzeResponse)
|
|
||||||
async def analyze_trend_endpoint(
|
|
||||||
request: TrendAnalyzeRequest,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Get AI analysis of a trending topic for domain investors.
|
|
||||||
Requires Trader or Tycoon subscription.
|
|
||||||
"""
|
|
||||||
await _require_trader_or_above(db, current_user)
|
|
||||||
|
|
||||||
analysis = await analyze_trend(request.trend, request.geo)
|
|
||||||
return TrendAnalyzeResponse(analysis=analysis, trend=request.trend)
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# FORGE TAB ENDPOINTS
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
class BrandableGenerateRequest(BaseModel):
|
|
||||||
concept: str = Field(..., min_length=3, max_length=200)
|
|
||||||
style: Optional[str] = Field(default=None, max_length=50)
|
|
||||||
count: int = Field(default=15, ge=5, le=30)
|
|
||||||
|
|
||||||
|
|
||||||
class BrandableGenerateResponse(BaseModel):
|
|
||||||
names: list[str]
|
|
||||||
concept: str
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/forge/generate", response_model=BrandableGenerateResponse)
|
|
||||||
async def generate_brandables(
|
|
||||||
request: BrandableGenerateRequest,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Generate brandable domain names based on a concept description.
|
|
||||||
Requires Trader or Tycoon subscription.
|
|
||||||
"""
|
|
||||||
await _require_trader_or_above(db, current_user)
|
|
||||||
|
|
||||||
names = await generate_brandable_names(
|
|
||||||
request.concept,
|
|
||||||
style=request.style,
|
|
||||||
count=request.count
|
|
||||||
)
|
|
||||||
return BrandableGenerateResponse(names=names, concept=request.concept)
|
|
||||||
|
|
||||||
|
|
||||||
class SimilarNamesRequest(BaseModel):
|
|
||||||
brand: str = Field(..., min_length=2, max_length=50)
|
|
||||||
count: int = Field(default=12, ge=5, le=20)
|
|
||||||
|
|
||||||
|
|
||||||
class SimilarNamesResponse(BaseModel):
|
|
||||||
names: list[str]
|
|
||||||
brand: str
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/forge/similar", response_model=SimilarNamesResponse)
|
|
||||||
async def generate_similar(
|
|
||||||
request: SimilarNamesRequest,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Generate names similar to an existing brand.
|
|
||||||
Requires Trader or Tycoon subscription.
|
|
||||||
"""
|
|
||||||
await _require_trader_or_above(db, current_user)
|
|
||||||
|
|
||||||
names = await generate_similar_names(request.brand, count=request.count)
|
|
||||||
return SimilarNamesResponse(names=names, brand=request.brand)
|
|
||||||
|
|
||||||
@ -1,232 +0,0 @@
|
|||||||
"""
|
|
||||||
Vision API (Terminal-only).
|
|
||||||
|
|
||||||
- Trader + Tycoon: can generate Vision JSON (cached in DB)
|
|
||||||
- Scout: receives a 403 with an upgrade teaser message
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import json
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from typing import Any, Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, Query, status
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
from sqlalchemy import and_, select
|
|
||||||
|
|
||||||
from app.api.deps import CurrentUser, Database
|
|
||||||
from app.models.llm_artifact import LLMArtifact
|
|
||||||
from app.models.subscription import Subscription, SubscriptionTier
|
|
||||||
from app.services.llm_gateway import LLMGatewayError
|
|
||||||
from app.services.llm_vision import (
|
|
||||||
VISION_PROMPT_VERSION,
|
|
||||||
YIELD_LANDING_PROMPT_VERSION,
|
|
||||||
VisionResult,
|
|
||||||
YieldLandingConfig,
|
|
||||||
generate_vision,
|
|
||||||
generate_yield_landing,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/llm", tags=["LLM Vision"])
|
|
||||||
|
|
||||||
|
|
||||||
class VisionResponse(BaseModel):
|
|
||||||
domain: str
|
|
||||||
cached: bool
|
|
||||||
model: str
|
|
||||||
prompt_version: str
|
|
||||||
generated_at: str
|
|
||||||
result: VisionResult
|
|
||||||
|
|
||||||
|
|
||||||
class YieldLandingPreviewResponse(BaseModel):
|
|
||||||
domain: str
|
|
||||||
cached: bool
|
|
||||||
model: str
|
|
||||||
prompt_version: str
|
|
||||||
generated_at: str
|
|
||||||
result: YieldLandingConfig
|
|
||||||
|
|
||||||
|
|
||||||
async def _get_or_create_subscription(db: Database, user_id: int) -> Subscription:
|
|
||||||
res = await db.execute(select(Subscription).where(Subscription.user_id == user_id))
|
|
||||||
sub = res.scalar_one_or_none()
|
|
||||||
if sub:
|
|
||||||
return sub
|
|
||||||
sub = Subscription(user_id=user_id, tier=SubscriptionTier.SCOUT, max_domains=5, check_frequency="daily")
|
|
||||||
db.add(sub)
|
|
||||||
await db.commit()
|
|
||||||
await db.refresh(sub)
|
|
||||||
return sub
|
|
||||||
|
|
||||||
|
|
||||||
def _require_trader_or_higher(sub: Subscription) -> None:
|
|
||||||
if sub.tier not in (SubscriptionTier.TRADER, SubscriptionTier.TYCOON):
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
|
||||||
detail="Vision is available on Trader and Tycoon plans. Upgrade to unlock.",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/vision", response_model=VisionResponse)
|
|
||||||
async def get_vision(
|
|
||||||
current_user: CurrentUser,
|
|
||||||
db: Database,
|
|
||||||
domain: str = Query(..., min_length=3, max_length=255),
|
|
||||||
refresh: bool = Query(False, description="Bypass cache and regenerate"),
|
|
||||||
):
|
|
||||||
sub = await _get_or_create_subscription(db, current_user.id)
|
|
||||||
_require_trader_or_higher(sub)
|
|
||||||
|
|
||||||
normalized = domain.strip().lower()
|
|
||||||
now = datetime.utcnow()
|
|
||||||
ttl_days = 30
|
|
||||||
|
|
||||||
if not refresh:
|
|
||||||
cached = (
|
|
||||||
await db.execute(
|
|
||||||
select(LLMArtifact)
|
|
||||||
.where(
|
|
||||||
and_(
|
|
||||||
LLMArtifact.kind == "vision_v1",
|
|
||||||
LLMArtifact.domain == normalized,
|
|
||||||
LLMArtifact.prompt_version == VISION_PROMPT_VERSION,
|
|
||||||
(LLMArtifact.expires_at.is_(None) | (LLMArtifact.expires_at > now)),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.order_by(LLMArtifact.created_at.desc())
|
|
||||||
.limit(1)
|
|
||||||
)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
if cached:
|
|
||||||
try:
|
|
||||||
payload = json.loads(cached.payload_json)
|
|
||||||
result = VisionResult.model_validate(payload)
|
|
||||||
except Exception:
|
|
||||||
# Corrupt cache: regenerate.
|
|
||||||
cached = None
|
|
||||||
else:
|
|
||||||
return VisionResponse(
|
|
||||||
domain=normalized,
|
|
||||||
cached=True,
|
|
||||||
model=cached.model,
|
|
||||||
prompt_version=cached.prompt_version,
|
|
||||||
generated_at=cached.created_at.isoformat(),
|
|
||||||
result=result,
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
result, model_used = await generate_vision(normalized)
|
|
||||||
except LLMGatewayError as e:
|
|
||||||
raise HTTPException(status_code=502, detail=str(e))
|
|
||||||
except Exception as e:
|
|
||||||
raise HTTPException(status_code=500, detail=f"Vision generation failed: {e}")
|
|
||||||
|
|
||||||
artifact = LLMArtifact(
|
|
||||||
user_id=current_user.id,
|
|
||||||
kind="vision_v1",
|
|
||||||
domain=normalized,
|
|
||||||
prompt_version=VISION_PROMPT_VERSION,
|
|
||||||
model=model_used,
|
|
||||||
payload_json=result.model_dump_json(),
|
|
||||||
created_at=now,
|
|
||||||
updated_at=now,
|
|
||||||
expires_at=now + timedelta(days=ttl_days),
|
|
||||||
)
|
|
||||||
db.add(artifact)
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
return VisionResponse(
|
|
||||||
domain=normalized,
|
|
||||||
cached=False,
|
|
||||||
model=model_used,
|
|
||||||
prompt_version=VISION_PROMPT_VERSION,
|
|
||||||
generated_at=now.isoformat(),
|
|
||||||
result=result,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/yield/landing-preview", response_model=YieldLandingPreviewResponse)
|
|
||||||
async def get_yield_landing_preview(
|
|
||||||
current_user: CurrentUser,
|
|
||||||
db: Database,
|
|
||||||
domain: str = Query(..., min_length=3, max_length=255),
|
|
||||||
refresh: bool = Query(False, description="Bypass cache and regenerate"),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Generate a Yield landing page configuration preview for Terminal UX.
|
|
||||||
|
|
||||||
Trader + Tycoon: allowed.
|
|
||||||
Scout: blocked (upgrade teaser).
|
|
||||||
"""
|
|
||||||
sub = await _get_or_create_subscription(db, current_user.id)
|
|
||||||
_require_trader_or_higher(sub)
|
|
||||||
|
|
||||||
normalized = domain.strip().lower()
|
|
||||||
now = datetime.utcnow()
|
|
||||||
ttl_days = 30
|
|
||||||
|
|
||||||
if not refresh:
|
|
||||||
cached = (
|
|
||||||
await db.execute(
|
|
||||||
select(LLMArtifact)
|
|
||||||
.where(
|
|
||||||
and_(
|
|
||||||
LLMArtifact.kind == "yield_landing_preview_v1",
|
|
||||||
LLMArtifact.domain == normalized,
|
|
||||||
LLMArtifact.prompt_version == YIELD_LANDING_PROMPT_VERSION,
|
|
||||||
(LLMArtifact.expires_at.is_(None) | (LLMArtifact.expires_at > now)),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.order_by(LLMArtifact.created_at.desc())
|
|
||||||
.limit(1)
|
|
||||||
)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
if cached:
|
|
||||||
try:
|
|
||||||
payload = json.loads(cached.payload_json)
|
|
||||||
result = YieldLandingConfig.model_validate(payload)
|
|
||||||
except Exception:
|
|
||||||
cached = None
|
|
||||||
else:
|
|
||||||
return YieldLandingPreviewResponse(
|
|
||||||
domain=normalized,
|
|
||||||
cached=True,
|
|
||||||
model=cached.model,
|
|
||||||
prompt_version=cached.prompt_version,
|
|
||||||
generated_at=cached.created_at.isoformat(),
|
|
||||||
result=result,
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
result, model_used = await generate_yield_landing(normalized)
|
|
||||||
except LLMGatewayError as e:
|
|
||||||
raise HTTPException(status_code=502, detail=str(e))
|
|
||||||
except Exception as e:
|
|
||||||
raise HTTPException(status_code=500, detail=f"Landing preview generation failed: {e}")
|
|
||||||
|
|
||||||
artifact = LLMArtifact(
|
|
||||||
user_id=current_user.id,
|
|
||||||
kind="yield_landing_preview_v1",
|
|
||||||
domain=normalized,
|
|
||||||
prompt_version=YIELD_LANDING_PROMPT_VERSION,
|
|
||||||
model=model_used,
|
|
||||||
payload_json=result.model_dump_json(),
|
|
||||||
created_at=now,
|
|
||||||
updated_at=now,
|
|
||||||
expires_at=now + timedelta(days=ttl_days),
|
|
||||||
)
|
|
||||||
db.add(artifact)
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
return YieldLandingPreviewResponse(
|
|
||||||
domain=normalized,
|
|
||||||
cached=False,
|
|
||||||
model=model_used,
|
|
||||||
prompt_version=YIELD_LANDING_PROMPT_VERSION,
|
|
||||||
generated_at=now.isoformat(),
|
|
||||||
result=result,
|
|
||||||
)
|
|
||||||
|
|
||||||
@ -5,20 +5,15 @@ Supports:
|
|||||||
- Google OAuth 2.0
|
- Google OAuth 2.0
|
||||||
- GitHub OAuth
|
- GitHub OAuth
|
||||||
"""
|
"""
|
||||||
import base64
|
|
||||||
import hashlib
|
|
||||||
import hmac
|
|
||||||
import json
|
|
||||||
import os
|
import os
|
||||||
import secrets
|
import secrets
|
||||||
import logging
|
import logging
|
||||||
import time
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
from fastapi import APIRouter, HTTPException, status, Query, Request
|
from fastapi import APIRouter, HTTPException, status, Query
|
||||||
from fastapi.responses import RedirectResponse
|
from fastapi.responses import RedirectResponse
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
@ -28,7 +23,6 @@ from app.config import get_settings
|
|||||||
from app.models.user import User
|
from app.models.user import User
|
||||||
from app.models.subscription import Subscription, SubscriptionTier, SubscriptionStatus, TIER_CONFIG
|
from app.models.subscription import Subscription, SubscriptionTier, SubscriptionStatus, TIER_CONFIG
|
||||||
from app.services.auth import AuthService
|
from app.services.auth import AuthService
|
||||||
from app.security import set_auth_cookie, should_use_secure_cookies
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
@ -47,123 +41,6 @@ GITHUB_REDIRECT_URI = os.getenv("GITHUB_REDIRECT_URI", "http://localhost:8000/ap
|
|||||||
|
|
||||||
FRONTEND_URL = os.getenv("SITE_URL", "http://localhost:3000")
|
FRONTEND_URL = os.getenv("SITE_URL", "http://localhost:3000")
|
||||||
|
|
||||||
OAUTH_STATE_TTL_SECONDS = 600 # 10 minutes
|
|
||||||
|
|
||||||
|
|
||||||
def _sanitize_redirect_path(redirect: Optional[str]) -> str:
|
|
||||||
"""
|
|
||||||
Only allow internal (relative) redirects.
|
|
||||||
Prevents open-redirect and token/referrer exfil paths.
|
|
||||||
"""
|
|
||||||
default = "/terminal/radar"
|
|
||||||
if not redirect:
|
|
||||||
return default
|
|
||||||
|
|
||||||
r = redirect.strip()
|
|
||||||
if not r.startswith("/"):
|
|
||||||
return default
|
|
||||||
if r.startswith("//"):
|
|
||||||
return default
|
|
||||||
if "://" in r:
|
|
||||||
return default
|
|
||||||
if "\\" in r:
|
|
||||||
return default
|
|
||||||
if len(r) > 2048:
|
|
||||||
return default
|
|
||||||
return r
|
|
||||||
|
|
||||||
|
|
||||||
def _b64url_encode(data: bytes) -> str:
|
|
||||||
return base64.urlsafe_b64encode(data).rstrip(b"=").decode("ascii")
|
|
||||||
|
|
||||||
|
|
||||||
def _b64url_decode(data: str) -> bytes:
|
|
||||||
pad = "=" * (-len(data) % 4)
|
|
||||||
return base64.urlsafe_b64decode(data + pad)
|
|
||||||
|
|
||||||
|
|
||||||
def _oauth_nonce_cookie_name(provider: str) -> str:
|
|
||||||
return f"pounce_oauth_nonce_{provider}"
|
|
||||||
|
|
||||||
|
|
||||||
def _set_oauth_nonce_cookie(response: RedirectResponse, provider: str, nonce: str) -> None:
|
|
||||||
response.set_cookie(
|
|
||||||
key=_oauth_nonce_cookie_name(provider),
|
|
||||||
value=nonce,
|
|
||||||
httponly=True,
|
|
||||||
secure=should_use_secure_cookies(),
|
|
||||||
samesite="lax",
|
|
||||||
max_age=OAUTH_STATE_TTL_SECONDS,
|
|
||||||
path="/api/v1/oauth",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _clear_oauth_nonce_cookie(response: RedirectResponse, provider: str) -> None:
|
|
||||||
response.delete_cookie(
|
|
||||||
key=_oauth_nonce_cookie_name(provider),
|
|
||||||
path="/api/v1/oauth",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _create_oauth_state(provider: str, nonce: str, redirect_path: str) -> str:
|
|
||||||
"""
|
|
||||||
Signed, short-lived state payload.
|
|
||||||
|
|
||||||
Also protects the redirect_path against tampering.
|
|
||||||
"""
|
|
||||||
if not settings.secret_key:
|
|
||||||
raise RuntimeError("SECRET_KEY is required for OAuth state signing")
|
|
||||||
|
|
||||||
payload = {
|
|
||||||
"p": provider,
|
|
||||||
"n": nonce,
|
|
||||||
"r": redirect_path,
|
|
||||||
"ts": int(time.time()),
|
|
||||||
}
|
|
||||||
payload_b64 = _b64url_encode(
|
|
||||||
json.dumps(payload, separators=(",", ":"), ensure_ascii=False).encode("utf-8")
|
|
||||||
)
|
|
||||||
sig = hmac.new(
|
|
||||||
settings.secret_key.encode("utf-8"),
|
|
||||||
payload_b64.encode("utf-8"),
|
|
||||||
hashlib.sha256,
|
|
||||||
).digest()
|
|
||||||
return f"{payload_b64}.{_b64url_encode(sig)}"
|
|
||||||
|
|
||||||
|
|
||||||
def _verify_oauth_state(state: str, provider: str) -> tuple[str, str]:
|
|
||||||
if not settings.secret_key:
|
|
||||||
raise ValueError("OAuth state verification not available (missing SECRET_KEY)")
|
|
||||||
|
|
||||||
if not state or "." not in state:
|
|
||||||
raise ValueError("Invalid state format")
|
|
||||||
|
|
||||||
payload_b64, sig_b64 = state.split(".", 1)
|
|
||||||
expected_sig = _b64url_encode(
|
|
||||||
hmac.new(
|
|
||||||
settings.secret_key.encode("utf-8"),
|
|
||||||
payload_b64.encode("utf-8"),
|
|
||||||
hashlib.sha256,
|
|
||||||
).digest()
|
|
||||||
)
|
|
||||||
if not hmac.compare_digest(expected_sig, sig_b64):
|
|
||||||
raise ValueError("Invalid state signature")
|
|
||||||
|
|
||||||
payload = json.loads(_b64url_decode(payload_b64).decode("utf-8"))
|
|
||||||
if payload.get("p") != provider:
|
|
||||||
raise ValueError("State provider mismatch")
|
|
||||||
|
|
||||||
ts = int(payload.get("ts") or 0)
|
|
||||||
if ts <= 0 or (int(time.time()) - ts) > OAUTH_STATE_TTL_SECONDS:
|
|
||||||
raise ValueError("State expired")
|
|
||||||
|
|
||||||
nonce = str(payload.get("n") or "")
|
|
||||||
redirect_path = _sanitize_redirect_path(payload.get("r"))
|
|
||||||
if not nonce:
|
|
||||||
raise ValueError("Missing nonce")
|
|
||||||
|
|
||||||
return nonce, redirect_path
|
|
||||||
|
|
||||||
|
|
||||||
# ============== Schemas ==============
|
# ============== Schemas ==============
|
||||||
|
|
||||||
@ -225,8 +102,7 @@ async def get_or_create_oauth_user(
|
|||||||
# Create new user
|
# Create new user
|
||||||
user = User(
|
user = User(
|
||||||
email=email.lower(),
|
email=email.lower(),
|
||||||
# Random password (won't be used), but keep it a valid bcrypt hash.
|
hashed_password=secrets.token_urlsafe(32), # Random password (won't be used)
|
||||||
hashed_password=AuthService.hash_password(secrets.token_urlsafe(32)),
|
|
||||||
name=name,
|
name=name,
|
||||||
oauth_provider=provider,
|
oauth_provider=provider,
|
||||||
oauth_id=oauth_id,
|
oauth_id=oauth_id,
|
||||||
@ -294,9 +170,10 @@ async def google_login(redirect: Optional[str] = Query(None)):
|
|||||||
detail="Google OAuth not configured",
|
detail="Google OAuth not configured",
|
||||||
)
|
)
|
||||||
|
|
||||||
redirect_path = _sanitize_redirect_path(redirect)
|
# Store redirect URL in state
|
||||||
nonce = secrets.token_urlsafe(16)
|
state = secrets.token_urlsafe(16)
|
||||||
state = _create_oauth_state("google", nonce, redirect_path)
|
if redirect:
|
||||||
|
state = f"{state}:{redirect}"
|
||||||
|
|
||||||
params = {
|
params = {
|
||||||
"client_id": GOOGLE_CLIENT_ID,
|
"client_id": GOOGLE_CLIENT_ID,
|
||||||
@ -309,14 +186,11 @@ async def google_login(redirect: Optional[str] = Query(None)):
|
|||||||
}
|
}
|
||||||
|
|
||||||
url = f"https://accounts.google.com/o/oauth2/v2/auth?{urlencode(params)}"
|
url = f"https://accounts.google.com/o/oauth2/v2/auth?{urlencode(params)}"
|
||||||
response = RedirectResponse(url=url)
|
return RedirectResponse(url=url)
|
||||||
_set_oauth_nonce_cookie(response, "google", nonce)
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/google/callback")
|
@router.get("/google/callback")
|
||||||
async def google_callback(
|
async def google_callback(
|
||||||
request: Request,
|
|
||||||
code: str = Query(...),
|
code: str = Query(...),
|
||||||
state: str = Query(""),
|
state: str = Query(""),
|
||||||
db: Database = None,
|
db: Database = None,
|
||||||
@ -328,16 +202,10 @@ async def google_callback(
|
|||||||
detail="Google OAuth not configured",
|
detail="Google OAuth not configured",
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
# Parse redirect from state
|
||||||
nonce, redirect_path = _verify_oauth_state(state, "google")
|
redirect_path = "/dashboard"
|
||||||
except Exception as e:
|
if ":" in state:
|
||||||
logger.warning(f"Invalid OAuth state (google): {e}")
|
_, redirect_path = state.split(":", 1)
|
||||||
return RedirectResponse(url=f"{FRONTEND_URL}/login?error=oauth_state_invalid")
|
|
||||||
|
|
||||||
cookie_nonce = request.cookies.get(_oauth_nonce_cookie_name("google"))
|
|
||||||
if not cookie_nonce or not hmac.compare_digest(cookie_nonce, nonce):
|
|
||||||
logger.warning("OAuth nonce mismatch (google)")
|
|
||||||
return RedirectResponse(url=f"{FRONTEND_URL}/login?error=oauth_state_invalid")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Exchange code for tokens
|
# Exchange code for tokens
|
||||||
@ -389,20 +257,12 @@ async def google_callback(
|
|||||||
# Create JWT
|
# Create JWT
|
||||||
jwt_token, _ = create_jwt_for_user(user)
|
jwt_token, _ = create_jwt_for_user(user)
|
||||||
|
|
||||||
# Redirect to frontend WITHOUT token in URL; set auth cookie instead.
|
# Redirect to frontend with token
|
||||||
query = {"redirect": redirect_path}
|
redirect_url = f"{FRONTEND_URL}/oauth/callback?token={jwt_token}&redirect={redirect_path}"
|
||||||
if is_new:
|
if is_new:
|
||||||
query["new"] = "true"
|
redirect_url += "&new=true"
|
||||||
redirect_url = f"{FRONTEND_URL}/oauth/callback?{urlencode(query)}"
|
|
||||||
|
|
||||||
response = RedirectResponse(url=redirect_url)
|
return RedirectResponse(url=redirect_url)
|
||||||
_clear_oauth_nonce_cookie(response, "google")
|
|
||||||
set_auth_cookie(
|
|
||||||
response=response,
|
|
||||||
token=jwt_token,
|
|
||||||
max_age_seconds=settings.access_token_expire_minutes * 60,
|
|
||||||
)
|
|
||||||
return response
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception(f"Google OAuth error: {e}")
|
logger.exception(f"Google OAuth error: {e}")
|
||||||
@ -422,9 +282,10 @@ async def github_login(redirect: Optional[str] = Query(None)):
|
|||||||
detail="GitHub OAuth not configured",
|
detail="GitHub OAuth not configured",
|
||||||
)
|
)
|
||||||
|
|
||||||
redirect_path = _sanitize_redirect_path(redirect)
|
# Store redirect URL in state
|
||||||
nonce = secrets.token_urlsafe(16)
|
state = secrets.token_urlsafe(16)
|
||||||
state = _create_oauth_state("github", nonce, redirect_path)
|
if redirect:
|
||||||
|
state = f"{state}:{redirect}"
|
||||||
|
|
||||||
params = {
|
params = {
|
||||||
"client_id": GITHUB_CLIENT_ID,
|
"client_id": GITHUB_CLIENT_ID,
|
||||||
@ -434,14 +295,11 @@ async def github_login(redirect: Optional[str] = Query(None)):
|
|||||||
}
|
}
|
||||||
|
|
||||||
url = f"https://github.com/login/oauth/authorize?{urlencode(params)}"
|
url = f"https://github.com/login/oauth/authorize?{urlencode(params)}"
|
||||||
response = RedirectResponse(url=url)
|
return RedirectResponse(url=url)
|
||||||
_set_oauth_nonce_cookie(response, "github", nonce)
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/github/callback")
|
@router.get("/github/callback")
|
||||||
async def github_callback(
|
async def github_callback(
|
||||||
request: Request,
|
|
||||||
code: str = Query(...),
|
code: str = Query(...),
|
||||||
state: str = Query(""),
|
state: str = Query(""),
|
||||||
db: Database = None,
|
db: Database = None,
|
||||||
@ -453,16 +311,10 @@ async def github_callback(
|
|||||||
detail="GitHub OAuth not configured",
|
detail="GitHub OAuth not configured",
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
# Parse redirect from state
|
||||||
nonce, redirect_path = _verify_oauth_state(state, "github")
|
redirect_path = "/dashboard"
|
||||||
except Exception as e:
|
if ":" in state:
|
||||||
logger.warning(f"Invalid OAuth state (github): {e}")
|
_, redirect_path = state.split(":", 1)
|
||||||
return RedirectResponse(url=f"{FRONTEND_URL}/login?error=oauth_state_invalid")
|
|
||||||
|
|
||||||
cookie_nonce = request.cookies.get(_oauth_nonce_cookie_name("github"))
|
|
||||||
if not cookie_nonce or not hmac.compare_digest(cookie_nonce, nonce):
|
|
||||||
logger.warning("OAuth nonce mismatch (github)")
|
|
||||||
return RedirectResponse(url=f"{FRONTEND_URL}/login?error=oauth_state_invalid")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
async with httpx.AsyncClient() as client:
|
async with httpx.AsyncClient() as client:
|
||||||
@ -547,19 +399,12 @@ async def github_callback(
|
|||||||
# Create JWT
|
# Create JWT
|
||||||
jwt_token, _ = create_jwt_for_user(user)
|
jwt_token, _ = create_jwt_for_user(user)
|
||||||
|
|
||||||
query = {"redirect": redirect_path}
|
# Redirect to frontend with token
|
||||||
|
redirect_url = f"{FRONTEND_URL}/oauth/callback?token={jwt_token}&redirect={redirect_path}"
|
||||||
if is_new:
|
if is_new:
|
||||||
query["new"] = "true"
|
redirect_url += "&new=true"
|
||||||
redirect_url = f"{FRONTEND_URL}/oauth/callback?{urlencode(query)}"
|
|
||||||
|
|
||||||
response = RedirectResponse(url=redirect_url)
|
return RedirectResponse(url=redirect_url)
|
||||||
_clear_oauth_nonce_cookie(response, "github")
|
|
||||||
set_auth_cookie(
|
|
||||||
response=response,
|
|
||||||
token=jwt_token,
|
|
||||||
max_age_seconds=settings.access_token_expire_minutes * 60,
|
|
||||||
)
|
|
||||||
return response
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception(f"GitHub OAuth error: {e}")
|
logger.exception(f"GitHub OAuth error: {e}")
|
||||||
|
|||||||
@ -1,12 +1,10 @@
|
|||||||
"""Portfolio API routes."""
|
"""Portfolio API routes."""
|
||||||
import secrets
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Optional, List
|
from typing import Optional, List
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
from sqlalchemy import select, func, and_
|
from sqlalchemy import select, func, and_
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
import dns.resolver
|
|
||||||
|
|
||||||
from app.database import get_db
|
from app.database import get_db
|
||||||
from app.api.deps import get_current_user
|
from app.api.deps import get_current_user
|
||||||
@ -73,11 +71,6 @@ class PortfolioDomainResponse(BaseModel):
|
|||||||
notes: Optional[str]
|
notes: Optional[str]
|
||||||
tags: Optional[str]
|
tags: Optional[str]
|
||||||
roi: Optional[float]
|
roi: Optional[float]
|
||||||
# DNS Verification fields
|
|
||||||
is_dns_verified: bool = False
|
|
||||||
verification_status: str = "unverified"
|
|
||||||
verification_code: Optional[str] = None
|
|
||||||
verified_at: Optional[datetime] = None
|
|
||||||
created_at: datetime
|
created_at: datetime
|
||||||
updated_at: datetime
|
updated_at: datetime
|
||||||
|
|
||||||
@ -85,25 +78,6 @@ class PortfolioDomainResponse(BaseModel):
|
|||||||
from_attributes = True
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
class DNSVerificationStartResponse(BaseModel):
|
|
||||||
"""Response when starting DNS verification."""
|
|
||||||
domain_id: int
|
|
||||||
domain: str
|
|
||||||
verification_code: str
|
|
||||||
dns_record_type: str
|
|
||||||
dns_record_name: str
|
|
||||||
dns_record_value: str
|
|
||||||
instructions: str
|
|
||||||
status: str
|
|
||||||
|
|
||||||
|
|
||||||
class DNSVerificationCheckResponse(BaseModel):
|
|
||||||
"""Response when checking DNS verification."""
|
|
||||||
verified: bool
|
|
||||||
status: str
|
|
||||||
message: str
|
|
||||||
|
|
||||||
|
|
||||||
class PortfolioSummary(BaseModel):
|
class PortfolioSummary(BaseModel):
|
||||||
"""Summary of user's portfolio."""
|
"""Summary of user's portfolio."""
|
||||||
total_domains: int
|
total_domains: int
|
||||||
@ -176,112 +150,7 @@ class ValuationResponse(BaseModel):
|
|||||||
disclaimer: str
|
disclaimer: str
|
||||||
|
|
||||||
|
|
||||||
# ============== Helper Functions ==============
|
|
||||||
|
|
||||||
def _generate_verification_code() -> str:
|
|
||||||
"""Generate a unique verification code."""
|
|
||||||
return f"pounce-verify-{secrets.token_hex(8)}"
|
|
||||||
|
|
||||||
|
|
||||||
def _domain_to_response(domain: PortfolioDomain) -> PortfolioDomainResponse:
|
|
||||||
"""Convert PortfolioDomain to response schema."""
|
|
||||||
return PortfolioDomainResponse(
|
|
||||||
id=domain.id,
|
|
||||||
domain=domain.domain,
|
|
||||||
purchase_date=domain.purchase_date,
|
|
||||||
purchase_price=domain.purchase_price,
|
|
||||||
purchase_registrar=domain.purchase_registrar,
|
|
||||||
registrar=domain.registrar,
|
|
||||||
renewal_date=domain.renewal_date,
|
|
||||||
renewal_cost=domain.renewal_cost,
|
|
||||||
auto_renew=domain.auto_renew,
|
|
||||||
estimated_value=domain.estimated_value,
|
|
||||||
value_updated_at=domain.value_updated_at,
|
|
||||||
is_sold=domain.is_sold,
|
|
||||||
sale_date=domain.sale_date,
|
|
||||||
sale_price=domain.sale_price,
|
|
||||||
status=domain.status,
|
|
||||||
notes=domain.notes,
|
|
||||||
tags=domain.tags,
|
|
||||||
roi=domain.roi,
|
|
||||||
is_dns_verified=getattr(domain, 'is_dns_verified', False) or False,
|
|
||||||
verification_status=getattr(domain, 'verification_status', 'unverified') or 'unverified',
|
|
||||||
verification_code=getattr(domain, 'verification_code', None),
|
|
||||||
verified_at=getattr(domain, 'verified_at', None),
|
|
||||||
created_at=domain.created_at,
|
|
||||||
updated_at=domain.updated_at,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ============== Portfolio Endpoints ==============
|
# ============== Portfolio Endpoints ==============
|
||||||
# IMPORTANT: Static routes must come BEFORE dynamic routes like /{domain_id}
|
|
||||||
|
|
||||||
@router.get("/verified", response_model=List[PortfolioDomainResponse])
|
|
||||||
async def get_verified_domains(
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Get only DNS-verified portfolio domains.
|
|
||||||
|
|
||||||
These domains can be used for Yield or For Sale listings.
|
|
||||||
"""
|
|
||||||
result = await db.execute(
|
|
||||||
select(PortfolioDomain).where(
|
|
||||||
and_(
|
|
||||||
PortfolioDomain.user_id == current_user.id,
|
|
||||||
PortfolioDomain.is_dns_verified == True,
|
|
||||||
PortfolioDomain.is_sold == False,
|
|
||||||
)
|
|
||||||
).order_by(PortfolioDomain.domain.asc())
|
|
||||||
)
|
|
||||||
domains = result.scalars().all()
|
|
||||||
|
|
||||||
return [_domain_to_response(d) for d in domains]
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/summary", response_model=PortfolioSummary)
|
|
||||||
async def get_portfolio_summary(
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""Get portfolio summary statistics."""
|
|
||||||
result = await db.execute(
|
|
||||||
select(PortfolioDomain).where(PortfolioDomain.user_id == current_user.id)
|
|
||||||
)
|
|
||||||
domains = result.scalars().all()
|
|
||||||
|
|
||||||
total_domains = len(domains)
|
|
||||||
active_domains = sum(1 for d in domains if d.status == "active" and not d.is_sold)
|
|
||||||
sold_domains = sum(1 for d in domains if d.is_sold)
|
|
||||||
|
|
||||||
total_invested = sum(d.purchase_price or 0 for d in domains)
|
|
||||||
total_value = sum(d.estimated_value or 0 for d in domains if not d.is_sold)
|
|
||||||
total_sold_value = sum(d.sale_price or 0 for d in domains if d.is_sold)
|
|
||||||
|
|
||||||
# Calculate active investment for ROI
|
|
||||||
active_investment = sum(d.purchase_price or 0 for d in domains if not d.is_sold)
|
|
||||||
sold_investment = sum(d.purchase_price or 0 for d in domains if d.is_sold)
|
|
||||||
|
|
||||||
unrealized_profit = total_value - active_investment
|
|
||||||
realized_profit = total_sold_value - sold_investment
|
|
||||||
|
|
||||||
overall_roi = 0.0
|
|
||||||
if total_invested > 0:
|
|
||||||
overall_roi = ((total_value + total_sold_value - total_invested) / total_invested) * 100
|
|
||||||
|
|
||||||
return PortfolioSummary(
|
|
||||||
total_domains=total_domains,
|
|
||||||
active_domains=active_domains,
|
|
||||||
sold_domains=sold_domains,
|
|
||||||
total_invested=round(total_invested, 2),
|
|
||||||
total_value=round(total_value, 2),
|
|
||||||
total_sold_value=round(total_sold_value, 2),
|
|
||||||
unrealized_profit=round(unrealized_profit, 2),
|
|
||||||
realized_profit=round(realized_profit, 2),
|
|
||||||
overall_roi=round(overall_roi, 2),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("", response_model=List[PortfolioDomainResponse])
|
@router.get("", response_model=List[PortfolioDomainResponse])
|
||||||
async def get_portfolio(
|
async def get_portfolio(
|
||||||
@ -335,10 +204,6 @@ async def get_portfolio(
|
|||||||
notes=d.notes,
|
notes=d.notes,
|
||||||
tags=d.tags,
|
tags=d.tags,
|
||||||
roi=d.roi,
|
roi=d.roi,
|
||||||
is_dns_verified=getattr(d, 'is_dns_verified', False) or False,
|
|
||||||
verification_status=getattr(d, 'verification_status', 'unverified') or 'unverified',
|
|
||||||
verification_code=getattr(d, 'verification_code', None),
|
|
||||||
verified_at=getattr(d, 'verified_at', None),
|
|
||||||
created_at=d.created_at,
|
created_at=d.created_at,
|
||||||
updated_at=d.updated_at,
|
updated_at=d.updated_at,
|
||||||
)
|
)
|
||||||
@ -347,6 +212,49 @@ async def get_portfolio(
|
|||||||
return responses
|
return responses
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/summary", response_model=PortfolioSummary)
|
||||||
|
async def get_portfolio_summary(
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db),
|
||||||
|
):
|
||||||
|
"""Get portfolio summary statistics."""
|
||||||
|
result = await db.execute(
|
||||||
|
select(PortfolioDomain).where(PortfolioDomain.user_id == current_user.id)
|
||||||
|
)
|
||||||
|
domains = result.scalars().all()
|
||||||
|
|
||||||
|
total_domains = len(domains)
|
||||||
|
active_domains = sum(1 for d in domains if d.status == "active" and not d.is_sold)
|
||||||
|
sold_domains = sum(1 for d in domains if d.is_sold)
|
||||||
|
|
||||||
|
total_invested = sum(d.purchase_price or 0 for d in domains)
|
||||||
|
total_value = sum(d.estimated_value or 0 for d in domains if not d.is_sold)
|
||||||
|
total_sold_value = sum(d.sale_price or 0 for d in domains if d.is_sold)
|
||||||
|
|
||||||
|
# Calculate active investment for ROI
|
||||||
|
active_investment = sum(d.purchase_price or 0 for d in domains if not d.is_sold)
|
||||||
|
sold_investment = sum(d.purchase_price or 0 for d in domains if d.is_sold)
|
||||||
|
|
||||||
|
unrealized_profit = total_value - active_investment
|
||||||
|
realized_profit = total_sold_value - sold_investment
|
||||||
|
|
||||||
|
overall_roi = 0.0
|
||||||
|
if total_invested > 0:
|
||||||
|
overall_roi = ((total_value + total_sold_value - total_invested) / total_invested) * 100
|
||||||
|
|
||||||
|
return PortfolioSummary(
|
||||||
|
total_domains=total_domains,
|
||||||
|
active_domains=active_domains,
|
||||||
|
sold_domains=sold_domains,
|
||||||
|
total_invested=round(total_invested, 2),
|
||||||
|
total_value=round(total_value, 2),
|
||||||
|
total_sold_value=round(total_sold_value, 2),
|
||||||
|
unrealized_profit=round(unrealized_profit, 2),
|
||||||
|
realized_profit=round(realized_profit, 2),
|
||||||
|
overall_roi=round(overall_roi, 2),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.post("", response_model=PortfolioDomainResponse, status_code=status.HTTP_201_CREATED)
|
@router.post("", response_model=PortfolioDomainResponse, status_code=status.HTTP_201_CREATED)
|
||||||
async def add_portfolio_domain(
|
async def add_portfolio_domain(
|
||||||
data: PortfolioDomainCreate,
|
data: PortfolioDomainCreate,
|
||||||
@ -443,10 +351,6 @@ async def add_portfolio_domain(
|
|||||||
notes=domain.notes,
|
notes=domain.notes,
|
||||||
tags=domain.tags,
|
tags=domain.tags,
|
||||||
roi=domain.roi,
|
roi=domain.roi,
|
||||||
is_dns_verified=getattr(domain, 'is_dns_verified', False) or False,
|
|
||||||
verification_status=getattr(domain, 'verification_status', 'unverified') or 'unverified',
|
|
||||||
verification_code=getattr(domain, 'verification_code', None),
|
|
||||||
verified_at=getattr(domain, 'verified_at', None),
|
|
||||||
created_at=domain.created_at,
|
created_at=domain.created_at,
|
||||||
updated_at=domain.updated_at,
|
updated_at=domain.updated_at,
|
||||||
)
|
)
|
||||||
@ -494,10 +398,6 @@ async def get_portfolio_domain(
|
|||||||
notes=domain.notes,
|
notes=domain.notes,
|
||||||
tags=domain.tags,
|
tags=domain.tags,
|
||||||
roi=domain.roi,
|
roi=domain.roi,
|
||||||
is_dns_verified=getattr(domain, 'is_dns_verified', False) or False,
|
|
||||||
verification_status=getattr(domain, 'verification_status', 'unverified') or 'unverified',
|
|
||||||
verification_code=getattr(domain, 'verification_code', None),
|
|
||||||
verified_at=getattr(domain, 'verified_at', None),
|
|
||||||
created_at=domain.created_at,
|
created_at=domain.created_at,
|
||||||
updated_at=domain.updated_at,
|
updated_at=domain.updated_at,
|
||||||
)
|
)
|
||||||
@ -554,10 +454,6 @@ async def update_portfolio_domain(
|
|||||||
notes=domain.notes,
|
notes=domain.notes,
|
||||||
tags=domain.tags,
|
tags=domain.tags,
|
||||||
roi=domain.roi,
|
roi=domain.roi,
|
||||||
is_dns_verified=getattr(domain, 'is_dns_verified', False) or False,
|
|
||||||
verification_status=getattr(domain, 'verification_status', 'unverified') or 'unverified',
|
|
||||||
verification_code=getattr(domain, 'verification_code', None),
|
|
||||||
verified_at=getattr(domain, 'verified_at', None),
|
|
||||||
created_at=domain.created_at,
|
created_at=domain.created_at,
|
||||||
updated_at=domain.updated_at,
|
updated_at=domain.updated_at,
|
||||||
)
|
)
|
||||||
@ -614,10 +510,6 @@ async def mark_domain_sold(
|
|||||||
notes=domain.notes,
|
notes=domain.notes,
|
||||||
tags=domain.tags,
|
tags=domain.tags,
|
||||||
roi=domain.roi,
|
roi=domain.roi,
|
||||||
is_dns_verified=getattr(domain, 'is_dns_verified', False) or False,
|
|
||||||
verification_status=getattr(domain, 'verification_status', 'unverified') or 'unverified',
|
|
||||||
verification_code=getattr(domain, 'verification_code', None),
|
|
||||||
verified_at=getattr(domain, 'verified_at', None),
|
|
||||||
created_at=domain.created_at,
|
created_at=domain.created_at,
|
||||||
updated_at=domain.updated_at,
|
updated_at=domain.updated_at,
|
||||||
)
|
)
|
||||||
@ -701,10 +593,6 @@ async def refresh_domain_value(
|
|||||||
notes=domain.notes,
|
notes=domain.notes,
|
||||||
tags=domain.tags,
|
tags=domain.tags,
|
||||||
roi=domain.roi,
|
roi=domain.roi,
|
||||||
is_dns_verified=getattr(domain, 'is_dns_verified', False) or False,
|
|
||||||
verification_status=getattr(domain, 'verification_status', 'unverified') or 'unverified',
|
|
||||||
verification_code=getattr(domain, 'verification_code', None),
|
|
||||||
verified_at=getattr(domain, 'verified_at', None),
|
|
||||||
created_at=domain.created_at,
|
created_at=domain.created_at,
|
||||||
updated_at=domain.updated_at,
|
updated_at=domain.updated_at,
|
||||||
)
|
)
|
||||||
@ -729,160 +617,3 @@ async def get_domain_valuation(
|
|||||||
|
|
||||||
return ValuationResponse(**valuation)
|
return ValuationResponse(**valuation)
|
||||||
|
|
||||||
|
|
||||||
# ============== DNS Verification Endpoints ==============
|
|
||||||
|
|
||||||
@router.post("/{domain_id}/verify-dns", response_model=DNSVerificationStartResponse)
|
|
||||||
async def start_dns_verification(
|
|
||||||
domain_id: int,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Start DNS verification for a portfolio domain.
|
|
||||||
|
|
||||||
Returns a verification code that must be added as a TXT record.
|
|
||||||
"""
|
|
||||||
result = await db.execute(
|
|
||||||
select(PortfolioDomain).where(
|
|
||||||
and_(
|
|
||||||
PortfolioDomain.id == domain_id,
|
|
||||||
PortfolioDomain.user_id == current_user.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
domain = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not domain:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
|
||||||
detail="Domain not found in portfolio",
|
|
||||||
)
|
|
||||||
|
|
||||||
if domain.is_dns_verified:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail="Domain is already verified",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Generate or reuse existing verification code
|
|
||||||
if not domain.verification_code:
|
|
||||||
domain.verification_code = _generate_verification_code()
|
|
||||||
|
|
||||||
domain.verification_status = "pending"
|
|
||||||
domain.verification_started_at = datetime.utcnow()
|
|
||||||
|
|
||||||
await db.commit()
|
|
||||||
await db.refresh(domain)
|
|
||||||
|
|
||||||
return DNSVerificationStartResponse(
|
|
||||||
domain_id=domain.id,
|
|
||||||
domain=domain.domain,
|
|
||||||
verification_code=domain.verification_code,
|
|
||||||
dns_record_type="TXT",
|
|
||||||
dns_record_name=f"_pounce.{domain.domain}",
|
|
||||||
dns_record_value=domain.verification_code,
|
|
||||||
instructions=f"Add a TXT record to your DNS settings:\n\nHost/Name: _pounce\nType: TXT\nValue: {domain.verification_code}\n\nDNS changes can take up to 48 hours to propagate, but usually complete within minutes.",
|
|
||||||
status=domain.verification_status,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{domain_id}/verify-dns/check", response_model=DNSVerificationCheckResponse)
|
|
||||||
async def check_dns_verification(
|
|
||||||
domain_id: int,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Check if DNS verification is complete.
|
|
||||||
|
|
||||||
Looks for the TXT record and verifies it matches the expected code.
|
|
||||||
"""
|
|
||||||
result = await db.execute(
|
|
||||||
select(PortfolioDomain).where(
|
|
||||||
and_(
|
|
||||||
PortfolioDomain.id == domain_id,
|
|
||||||
PortfolioDomain.user_id == current_user.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
domain = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not domain:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
|
||||||
detail="Domain not found in portfolio",
|
|
||||||
)
|
|
||||||
|
|
||||||
if domain.is_dns_verified:
|
|
||||||
return DNSVerificationCheckResponse(
|
|
||||||
verified=True,
|
|
||||||
status="verified",
|
|
||||||
message="Domain ownership already verified",
|
|
||||||
)
|
|
||||||
|
|
||||||
if not domain.verification_code:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail="Verification not started. Call POST /verify-dns first.",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check DNS TXT record
|
|
||||||
txt_record_name = f"_pounce.{domain.domain}"
|
|
||||||
verified = False
|
|
||||||
|
|
||||||
try:
|
|
||||||
resolver = dns.resolver.Resolver()
|
|
||||||
resolver.timeout = 5
|
|
||||||
resolver.lifetime = 10
|
|
||||||
|
|
||||||
answers = resolver.resolve(txt_record_name, 'TXT')
|
|
||||||
|
|
||||||
for rdata in answers:
|
|
||||||
txt_value = rdata.to_text().strip('"')
|
|
||||||
if txt_value == domain.verification_code:
|
|
||||||
verified = True
|
|
||||||
break
|
|
||||||
except dns.resolver.NXDOMAIN:
|
|
||||||
return DNSVerificationCheckResponse(
|
|
||||||
verified=False,
|
|
||||||
status="pending",
|
|
||||||
message=f"TXT record not found. Please add a TXT record at _pounce.{domain.domain}",
|
|
||||||
)
|
|
||||||
except dns.resolver.NoAnswer:
|
|
||||||
return DNSVerificationCheckResponse(
|
|
||||||
verified=False,
|
|
||||||
status="pending",
|
|
||||||
message="TXT record exists but has no value. Check your DNS configuration.",
|
|
||||||
)
|
|
||||||
except dns.resolver.Timeout:
|
|
||||||
return DNSVerificationCheckResponse(
|
|
||||||
verified=False,
|
|
||||||
status="pending",
|
|
||||||
message="DNS query timed out. Please try again.",
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
return DNSVerificationCheckResponse(
|
|
||||||
verified=False,
|
|
||||||
status="error",
|
|
||||||
message=f"DNS lookup error: {str(e)}",
|
|
||||||
)
|
|
||||||
|
|
||||||
if verified:
|
|
||||||
domain.is_dns_verified = True
|
|
||||||
domain.verification_status = "verified"
|
|
||||||
domain.verified_at = datetime.utcnow()
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
return DNSVerificationCheckResponse(
|
|
||||||
verified=True,
|
|
||||||
status="verified",
|
|
||||||
message="Domain ownership verified successfully! You can now list this domain for sale or activate Yield.",
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return DNSVerificationCheckResponse(
|
|
||||||
verified=False,
|
|
||||||
status="pending",
|
|
||||||
message=f"TXT record found but value doesn't match. Expected: {domain.verification_code}",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|||||||
@ -1,242 +0,0 @@
|
|||||||
"""
|
|
||||||
SEO Data API - "SEO Juice Detector"
|
|
||||||
|
|
||||||
This implements Strategie 3 from analysis_3.md:
|
|
||||||
"Das Feature: 'SEO Juice Detector'
|
|
||||||
Wenn eine Domain droppt, prüfst du nicht nur den Namen,
|
|
||||||
sondern ob Backlinks existieren.
|
|
||||||
Monetarisierung: Das ist ein reines Tycoon-Feature ($29/Monat)."
|
|
||||||
|
|
||||||
Endpoints:
|
|
||||||
- GET /seo/{domain} - Get SEO data for a domain (TYCOON ONLY)
|
|
||||||
- POST /seo/batch - Analyze multiple domains (TYCOON ONLY)
|
|
||||||
"""
|
|
||||||
import logging
|
|
||||||
from typing import List
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from app.database import get_db
|
|
||||||
from app.api.deps import get_current_user
|
|
||||||
from app.models.user import User
|
|
||||||
from app.services.seo_analyzer import seo_analyzer
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
router = APIRouter()
|
|
||||||
|
|
||||||
|
|
||||||
# ============== Schemas ==============
|
|
||||||
|
|
||||||
class SEOMetrics(BaseModel):
|
|
||||||
domain_authority: int | None
|
|
||||||
page_authority: int | None
|
|
||||||
spam_score: int | None
|
|
||||||
total_backlinks: int | None
|
|
||||||
referring_domains: int | None
|
|
||||||
|
|
||||||
|
|
||||||
class NotableLinks(BaseModel):
|
|
||||||
has_wikipedia: bool
|
|
||||||
has_gov: bool
|
|
||||||
has_edu: bool
|
|
||||||
has_news: bool
|
|
||||||
notable_domains: List[str]
|
|
||||||
|
|
||||||
|
|
||||||
class BacklinkInfo(BaseModel):
|
|
||||||
domain: str
|
|
||||||
authority: int
|
|
||||||
page: str = ""
|
|
||||||
|
|
||||||
|
|
||||||
class SEOResponse(BaseModel):
|
|
||||||
domain: str
|
|
||||||
seo_score: int
|
|
||||||
value_category: str
|
|
||||||
metrics: SEOMetrics
|
|
||||||
notable_links: NotableLinks
|
|
||||||
top_backlinks: List[BacklinkInfo]
|
|
||||||
estimated_value: float | None
|
|
||||||
data_source: str
|
|
||||||
last_updated: str | None
|
|
||||||
is_estimated: bool
|
|
||||||
|
|
||||||
|
|
||||||
class BatchSEORequest(BaseModel):
|
|
||||||
domains: List[str]
|
|
||||||
|
|
||||||
|
|
||||||
class BatchSEOResponse(BaseModel):
|
|
||||||
results: List[SEOResponse]
|
|
||||||
total_requested: int
|
|
||||||
total_processed: int
|
|
||||||
|
|
||||||
|
|
||||||
# ============== Helper ==============
|
|
||||||
|
|
||||||
def _check_tycoon_access(user: User) -> None:
|
|
||||||
"""Verify user has Tycoon tier access."""
|
|
||||||
if not user.subscription:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=403,
|
|
||||||
detail="SEO data is a Tycoon feature. Please upgrade your subscription."
|
|
||||||
)
|
|
||||||
|
|
||||||
tier = user.subscription.tier.lower() if user.subscription.tier else ""
|
|
||||||
if tier != "tycoon":
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=403,
|
|
||||||
detail="SEO data is a Tycoon-only feature. Please upgrade to access backlink analysis."
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ============== Endpoints ==============
|
|
||||||
|
|
||||||
@router.get("/{domain}", response_model=SEOResponse)
|
|
||||||
async def get_seo_data(
|
|
||||||
domain: str,
|
|
||||||
force_refresh: bool = Query(False, description="Force refresh from API"),
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Get SEO data for a domain.
|
|
||||||
|
|
||||||
TYCOON FEATURE ONLY.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
- Domain Authority (0-100)
|
|
||||||
- Page Authority (0-100)
|
|
||||||
- Spam Score (0-100)
|
|
||||||
- Total Backlinks
|
|
||||||
- Referring Domains
|
|
||||||
- Notable links (Wikipedia, .gov, .edu, news sites)
|
|
||||||
- Top backlinks with authority scores
|
|
||||||
- Estimated SEO value
|
|
||||||
|
|
||||||
From analysis_3.md:
|
|
||||||
"Domain `alte-bäckerei-münchen.de` ist frei.
|
|
||||||
Hat Links von `sueddeutsche.de` und `wikipedia.org`."
|
|
||||||
"""
|
|
||||||
# Check Tycoon access
|
|
||||||
_check_tycoon_access(current_user)
|
|
||||||
|
|
||||||
# Clean domain input
|
|
||||||
domain = domain.lower().strip()
|
|
||||||
if domain.startswith('http://'):
|
|
||||||
domain = domain[7:]
|
|
||||||
if domain.startswith('https://'):
|
|
||||||
domain = domain[8:]
|
|
||||||
if domain.startswith('www.'):
|
|
||||||
domain = domain[4:]
|
|
||||||
domain = domain.rstrip('/')
|
|
||||||
|
|
||||||
# Get SEO data
|
|
||||||
result = await seo_analyzer.analyze_domain(domain, db, force_refresh)
|
|
||||||
|
|
||||||
return SEOResponse(**result)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/batch", response_model=BatchSEOResponse)
|
|
||||||
async def batch_seo_analysis(
|
|
||||||
request: BatchSEORequest,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Analyze multiple domains for SEO data.
|
|
||||||
|
|
||||||
TYCOON FEATURE ONLY.
|
|
||||||
|
|
||||||
Limited to 10 domains per request to prevent abuse.
|
|
||||||
"""
|
|
||||||
# Check Tycoon access
|
|
||||||
_check_tycoon_access(current_user)
|
|
||||||
|
|
||||||
# Limit batch size
|
|
||||||
domains = request.domains[:10]
|
|
||||||
|
|
||||||
results = []
|
|
||||||
for domain in domains:
|
|
||||||
try:
|
|
||||||
# Clean domain
|
|
||||||
domain = domain.lower().strip()
|
|
||||||
if domain.startswith('http://'):
|
|
||||||
domain = domain[7:]
|
|
||||||
if domain.startswith('https://'):
|
|
||||||
domain = domain[8:]
|
|
||||||
if domain.startswith('www.'):
|
|
||||||
domain = domain[4:]
|
|
||||||
domain = domain.rstrip('/')
|
|
||||||
|
|
||||||
result = await seo_analyzer.analyze_domain(domain, db)
|
|
||||||
results.append(SEOResponse(**result))
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error analyzing {domain}: {e}")
|
|
||||||
# Skip failed domains
|
|
||||||
continue
|
|
||||||
|
|
||||||
return BatchSEOResponse(
|
|
||||||
results=results,
|
|
||||||
total_requested=len(request.domains),
|
|
||||||
total_processed=len(results),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{domain}/quick")
|
|
||||||
async def get_seo_quick_summary(
|
|
||||||
domain: str,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Get a quick SEO summary for a domain.
|
|
||||||
|
|
||||||
This is a lighter version that shows basic metrics without full backlink analysis.
|
|
||||||
Available to Trader+ users.
|
|
||||||
"""
|
|
||||||
# Check at least Trader access
|
|
||||||
if not current_user.subscription:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=403,
|
|
||||||
detail="SEO data requires a paid subscription."
|
|
||||||
)
|
|
||||||
|
|
||||||
tier = current_user.subscription.tier.lower() if current_user.subscription.tier else ""
|
|
||||||
if tier == "scout":
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=403,
|
|
||||||
detail="SEO data requires Trader or higher subscription."
|
|
||||||
)
|
|
||||||
|
|
||||||
# Clean domain
|
|
||||||
domain = domain.lower().strip().rstrip('/')
|
|
||||||
if domain.startswith('http://'):
|
|
||||||
domain = domain[7:]
|
|
||||||
if domain.startswith('https://'):
|
|
||||||
domain = domain[8:]
|
|
||||||
if domain.startswith('www.'):
|
|
||||||
domain = domain[4:]
|
|
||||||
|
|
||||||
result = await seo_analyzer.analyze_domain(domain, db)
|
|
||||||
|
|
||||||
# Return limited data for non-Tycoon
|
|
||||||
if tier != "tycoon":
|
|
||||||
return {
|
|
||||||
'domain': result['domain'],
|
|
||||||
'seo_score': result['seo_score'],
|
|
||||||
'value_category': result['value_category'],
|
|
||||||
'domain_authority': result['metrics']['domain_authority'],
|
|
||||||
'has_notable_links': (
|
|
||||||
result['notable_links']['has_wikipedia'] or
|
|
||||||
result['notable_links']['has_gov'] or
|
|
||||||
result['notable_links']['has_news']
|
|
||||||
),
|
|
||||||
'is_estimated': result['is_estimated'],
|
|
||||||
'upgrade_for_details': True,
|
|
||||||
'message': "Upgrade to Tycoon for full backlink analysis"
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
@ -1,458 +0,0 @@
|
|||||||
"""
|
|
||||||
Sniper Alerts API - Hyper-personalized auction notifications
|
|
||||||
|
|
||||||
This implements "Strategie 4: Alerts nach Maß" from analysis_3.md:
|
|
||||||
"Der User kann extrem spezifische Filter speichern:
|
|
||||||
- Informiere mich NUR, wenn eine 4-Letter .com Domain droppt, die kein 'q' oder 'x' enthält."
|
|
||||||
|
|
||||||
Endpoints:
|
|
||||||
- GET /sniper-alerts - Get user's alerts
|
|
||||||
- POST /sniper-alerts - Create new alert
|
|
||||||
- PUT /sniper-alerts/{id} - Update alert
|
|
||||||
- DELETE /sniper-alerts/{id} - Delete alert
|
|
||||||
- GET /sniper-alerts/{id}/matches - Get matched auctions
|
|
||||||
- POST /sniper-alerts/{id}/test - Test alert against current auctions
|
|
||||||
"""
|
|
||||||
import logging
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional, List
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
from sqlalchemy import select, func, and_
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from app.database import get_db
|
|
||||||
from app.api.deps import get_current_user
|
|
||||||
from app.models.user import User
|
|
||||||
from app.models.sniper_alert import SniperAlert, SniperAlertMatch
|
|
||||||
from app.models.auction import DomainAuction
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
router = APIRouter()
|
|
||||||
|
|
||||||
|
|
||||||
# ============== Schemas ==============
|
|
||||||
|
|
||||||
class SniperAlertCreate(BaseModel):
|
|
||||||
"""Create a new sniper alert."""
|
|
||||||
name: str = Field(..., min_length=1, max_length=100)
|
|
||||||
description: Optional[str] = Field(None, max_length=500)
|
|
||||||
|
|
||||||
# Filter criteria
|
|
||||||
tlds: Optional[str] = Field(None, description="Comma-separated TLDs: com,io,ai")
|
|
||||||
keywords: Optional[str] = Field(None, description="Must contain (comma-separated)")
|
|
||||||
exclude_keywords: Optional[str] = Field(None, description="Must not contain")
|
|
||||||
max_length: Optional[int] = Field(None, ge=1, le=63)
|
|
||||||
min_length: Optional[int] = Field(None, ge=1, le=63)
|
|
||||||
max_price: Optional[float] = Field(None, ge=0)
|
|
||||||
min_price: Optional[float] = Field(None, ge=0)
|
|
||||||
max_bids: Optional[int] = Field(None, ge=0, description="Max bids (low competition)")
|
|
||||||
ending_within_hours: Optional[int] = Field(None, ge=1, le=168)
|
|
||||||
platforms: Optional[str] = Field(None, description="Comma-separated platforms")
|
|
||||||
|
|
||||||
# Advanced
|
|
||||||
no_numbers: bool = False
|
|
||||||
no_hyphens: bool = False
|
|
||||||
exclude_chars: Optional[str] = Field(None, description="Chars to exclude: q,x,z")
|
|
||||||
|
|
||||||
# Notifications
|
|
||||||
notify_email: bool = True
|
|
||||||
notify_sms: bool = False
|
|
||||||
|
|
||||||
|
|
||||||
class SniperAlertUpdate(BaseModel):
|
|
||||||
"""Update a sniper alert."""
|
|
||||||
name: Optional[str] = Field(None, max_length=100)
|
|
||||||
description: Optional[str] = Field(None, max_length=500)
|
|
||||||
tlds: Optional[str] = None
|
|
||||||
keywords: Optional[str] = None
|
|
||||||
exclude_keywords: Optional[str] = None
|
|
||||||
max_length: Optional[int] = Field(None, ge=1, le=63)
|
|
||||||
min_length: Optional[int] = Field(None, ge=1, le=63)
|
|
||||||
max_price: Optional[float] = Field(None, ge=0)
|
|
||||||
min_price: Optional[float] = Field(None, ge=0)
|
|
||||||
max_bids: Optional[int] = Field(None, ge=0)
|
|
||||||
ending_within_hours: Optional[int] = Field(None, ge=1, le=168)
|
|
||||||
platforms: Optional[str] = None
|
|
||||||
no_numbers: Optional[bool] = None
|
|
||||||
no_hyphens: Optional[bool] = None
|
|
||||||
exclude_chars: Optional[str] = None
|
|
||||||
notify_email: Optional[bool] = None
|
|
||||||
notify_sms: Optional[bool] = None
|
|
||||||
is_active: Optional[bool] = None
|
|
||||||
|
|
||||||
|
|
||||||
class SniperAlertResponse(BaseModel):
|
|
||||||
"""Sniper alert response."""
|
|
||||||
id: int
|
|
||||||
name: str
|
|
||||||
description: Optional[str]
|
|
||||||
tlds: Optional[str]
|
|
||||||
keywords: Optional[str]
|
|
||||||
exclude_keywords: Optional[str]
|
|
||||||
max_length: Optional[int]
|
|
||||||
min_length: Optional[int]
|
|
||||||
max_price: Optional[float]
|
|
||||||
min_price: Optional[float]
|
|
||||||
max_bids: Optional[int]
|
|
||||||
ending_within_hours: Optional[int]
|
|
||||||
platforms: Optional[str]
|
|
||||||
no_numbers: bool
|
|
||||||
no_hyphens: bool
|
|
||||||
exclude_chars: Optional[str]
|
|
||||||
notify_email: bool
|
|
||||||
notify_sms: bool
|
|
||||||
is_active: bool
|
|
||||||
matches_count: int
|
|
||||||
notifications_sent: int
|
|
||||||
last_matched_at: Optional[datetime]
|
|
||||||
created_at: datetime
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
from_attributes = True
|
|
||||||
|
|
||||||
|
|
||||||
class MatchResponse(BaseModel):
|
|
||||||
"""Alert match response."""
|
|
||||||
id: int
|
|
||||||
domain: str
|
|
||||||
platform: str
|
|
||||||
current_bid: float
|
|
||||||
end_time: datetime
|
|
||||||
auction_url: Optional[str]
|
|
||||||
matched_at: datetime
|
|
||||||
notified: bool
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
from_attributes = True
|
|
||||||
|
|
||||||
|
|
||||||
# ============== Endpoints ==============
|
|
||||||
|
|
||||||
@router.get("", response_model=List[SniperAlertResponse])
|
|
||||||
async def get_sniper_alerts(
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""Get user's sniper alerts."""
|
|
||||||
result = await db.execute(
|
|
||||||
select(SniperAlert)
|
|
||||||
.where(SniperAlert.user_id == current_user.id)
|
|
||||||
.order_by(SniperAlert.created_at.desc())
|
|
||||||
)
|
|
||||||
alerts = list(result.scalars().all())
|
|
||||||
|
|
||||||
return [
|
|
||||||
SniperAlertResponse(
|
|
||||||
id=alert.id,
|
|
||||||
name=alert.name,
|
|
||||||
description=alert.description,
|
|
||||||
tlds=alert.tlds,
|
|
||||||
keywords=alert.keywords,
|
|
||||||
exclude_keywords=alert.exclude_keywords,
|
|
||||||
max_length=alert.max_length,
|
|
||||||
min_length=alert.min_length,
|
|
||||||
max_price=alert.max_price,
|
|
||||||
min_price=alert.min_price,
|
|
||||||
max_bids=alert.max_bids,
|
|
||||||
ending_within_hours=alert.ending_within_hours,
|
|
||||||
platforms=alert.platforms,
|
|
||||||
no_numbers=alert.no_numbers,
|
|
||||||
no_hyphens=alert.no_hyphens,
|
|
||||||
exclude_chars=alert.exclude_chars,
|
|
||||||
notify_email=alert.notify_email,
|
|
||||||
notify_sms=alert.notify_sms,
|
|
||||||
is_active=alert.is_active,
|
|
||||||
matches_count=alert.matches_count,
|
|
||||||
notifications_sent=alert.notifications_sent,
|
|
||||||
last_matched_at=alert.last_matched_at,
|
|
||||||
created_at=alert.created_at,
|
|
||||||
)
|
|
||||||
for alert in alerts
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("", response_model=SniperAlertResponse)
|
|
||||||
async def create_sniper_alert(
|
|
||||||
data: SniperAlertCreate,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""Create a new sniper alert."""
|
|
||||||
# Check alert limit based on subscription
|
|
||||||
user_alerts = await db.execute(
|
|
||||||
select(func.count(SniperAlert.id)).where(
|
|
||||||
SniperAlert.user_id == current_user.id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
alert_count = user_alerts.scalar() or 0
|
|
||||||
|
|
||||||
from app.models.subscription import TIER_CONFIG, SubscriptionTier
|
|
||||||
tier = current_user.subscription.tier if current_user.subscription else SubscriptionTier.SCOUT
|
|
||||||
tier_config = TIER_CONFIG.get(tier, TIER_CONFIG[SubscriptionTier.SCOUT])
|
|
||||||
max_alerts = tier_config.get("sniper_limit", 2)
|
|
||||||
|
|
||||||
if alert_count >= max_alerts:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=403,
|
|
||||||
detail=f"Alert limit reached ({max_alerts}). Upgrade for more."
|
|
||||||
)
|
|
||||||
|
|
||||||
# SMS notifications are Tycoon only
|
|
||||||
if data.notify_sms and tier != "tycoon":
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=403,
|
|
||||||
detail="SMS notifications are a Tycoon feature"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Build filter criteria JSON
|
|
||||||
filter_criteria = {
|
|
||||||
"tlds": data.tlds.split(',') if data.tlds else None,
|
|
||||||
"keywords": data.keywords.split(',') if data.keywords else None,
|
|
||||||
"exclude_keywords": data.exclude_keywords.split(',') if data.exclude_keywords else None,
|
|
||||||
"max_length": data.max_length,
|
|
||||||
"min_length": data.min_length,
|
|
||||||
"max_price": data.max_price,
|
|
||||||
"min_price": data.min_price,
|
|
||||||
"max_bids": data.max_bids,
|
|
||||||
"ending_within_hours": data.ending_within_hours,
|
|
||||||
"platforms": data.platforms.split(',') if data.platforms else None,
|
|
||||||
"no_numbers": data.no_numbers,
|
|
||||||
"no_hyphens": data.no_hyphens,
|
|
||||||
"exclude_chars": data.exclude_chars.split(',') if data.exclude_chars else None,
|
|
||||||
}
|
|
||||||
|
|
||||||
alert = SniperAlert(
|
|
||||||
user_id=current_user.id,
|
|
||||||
name=data.name,
|
|
||||||
description=data.description,
|
|
||||||
filter_criteria=filter_criteria,
|
|
||||||
tlds=data.tlds,
|
|
||||||
keywords=data.keywords,
|
|
||||||
exclude_keywords=data.exclude_keywords,
|
|
||||||
max_length=data.max_length,
|
|
||||||
min_length=data.min_length,
|
|
||||||
max_price=data.max_price,
|
|
||||||
min_price=data.min_price,
|
|
||||||
max_bids=data.max_bids,
|
|
||||||
ending_within_hours=data.ending_within_hours,
|
|
||||||
platforms=data.platforms,
|
|
||||||
no_numbers=data.no_numbers,
|
|
||||||
no_hyphens=data.no_hyphens,
|
|
||||||
exclude_chars=data.exclude_chars,
|
|
||||||
notify_email=data.notify_email,
|
|
||||||
notify_sms=data.notify_sms,
|
|
||||||
)
|
|
||||||
|
|
||||||
db.add(alert)
|
|
||||||
await db.commit()
|
|
||||||
await db.refresh(alert)
|
|
||||||
|
|
||||||
return SniperAlertResponse(
|
|
||||||
id=alert.id,
|
|
||||||
name=alert.name,
|
|
||||||
description=alert.description,
|
|
||||||
tlds=alert.tlds,
|
|
||||||
keywords=alert.keywords,
|
|
||||||
exclude_keywords=alert.exclude_keywords,
|
|
||||||
max_length=alert.max_length,
|
|
||||||
min_length=alert.min_length,
|
|
||||||
max_price=alert.max_price,
|
|
||||||
min_price=alert.min_price,
|
|
||||||
max_bids=alert.max_bids,
|
|
||||||
ending_within_hours=alert.ending_within_hours,
|
|
||||||
platforms=alert.platforms,
|
|
||||||
no_numbers=alert.no_numbers,
|
|
||||||
no_hyphens=alert.no_hyphens,
|
|
||||||
exclude_chars=alert.exclude_chars,
|
|
||||||
notify_email=alert.notify_email,
|
|
||||||
notify_sms=alert.notify_sms,
|
|
||||||
is_active=alert.is_active,
|
|
||||||
matches_count=alert.matches_count,
|
|
||||||
notifications_sent=alert.notifications_sent,
|
|
||||||
last_matched_at=alert.last_matched_at,
|
|
||||||
created_at=alert.created_at,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/{id}", response_model=SniperAlertResponse)
|
|
||||||
async def update_sniper_alert(
|
|
||||||
id: int,
|
|
||||||
data: SniperAlertUpdate,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""Update a sniper alert."""
|
|
||||||
result = await db.execute(
|
|
||||||
select(SniperAlert).where(
|
|
||||||
and_(
|
|
||||||
SniperAlert.id == id,
|
|
||||||
SniperAlert.user_id == current_user.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
alert = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not alert:
|
|
||||||
raise HTTPException(status_code=404, detail="Alert not found")
|
|
||||||
|
|
||||||
# Update fields
|
|
||||||
update_fields = data.model_dump(exclude_unset=True)
|
|
||||||
for field, value in update_fields.items():
|
|
||||||
if hasattr(alert, field):
|
|
||||||
setattr(alert, field, value)
|
|
||||||
|
|
||||||
await db.commit()
|
|
||||||
await db.refresh(alert)
|
|
||||||
|
|
||||||
return SniperAlertResponse(
|
|
||||||
id=alert.id,
|
|
||||||
name=alert.name,
|
|
||||||
description=alert.description,
|
|
||||||
tlds=alert.tlds,
|
|
||||||
keywords=alert.keywords,
|
|
||||||
exclude_keywords=alert.exclude_keywords,
|
|
||||||
max_length=alert.max_length,
|
|
||||||
min_length=alert.min_length,
|
|
||||||
max_price=alert.max_price,
|
|
||||||
min_price=alert.min_price,
|
|
||||||
max_bids=alert.max_bids,
|
|
||||||
ending_within_hours=alert.ending_within_hours,
|
|
||||||
platforms=alert.platforms,
|
|
||||||
no_numbers=alert.no_numbers,
|
|
||||||
no_hyphens=alert.no_hyphens,
|
|
||||||
exclude_chars=alert.exclude_chars,
|
|
||||||
notify_email=alert.notify_email,
|
|
||||||
notify_sms=alert.notify_sms,
|
|
||||||
is_active=alert.is_active,
|
|
||||||
matches_count=alert.matches_count,
|
|
||||||
notifications_sent=alert.notifications_sent,
|
|
||||||
last_matched_at=alert.last_matched_at,
|
|
||||||
created_at=alert.created_at,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/{id}")
|
|
||||||
async def delete_sniper_alert(
|
|
||||||
id: int,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""Delete a sniper alert."""
|
|
||||||
result = await db.execute(
|
|
||||||
select(SniperAlert).where(
|
|
||||||
and_(
|
|
||||||
SniperAlert.id == id,
|
|
||||||
SniperAlert.user_id == current_user.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
alert = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not alert:
|
|
||||||
raise HTTPException(status_code=404, detail="Alert not found")
|
|
||||||
|
|
||||||
await db.delete(alert)
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
return {"success": True, "message": "Alert deleted"}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{id}/matches", response_model=List[MatchResponse])
|
|
||||||
async def get_alert_matches(
|
|
||||||
id: int,
|
|
||||||
limit: int = 50,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""Get matched auctions for an alert."""
|
|
||||||
# Verify ownership
|
|
||||||
result = await db.execute(
|
|
||||||
select(SniperAlert).where(
|
|
||||||
and_(
|
|
||||||
SniperAlert.id == id,
|
|
||||||
SniperAlert.user_id == current_user.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
alert = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not alert:
|
|
||||||
raise HTTPException(status_code=404, detail="Alert not found")
|
|
||||||
|
|
||||||
matches_result = await db.execute(
|
|
||||||
select(SniperAlertMatch)
|
|
||||||
.where(SniperAlertMatch.alert_id == id)
|
|
||||||
.order_by(SniperAlertMatch.matched_at.desc())
|
|
||||||
.limit(limit)
|
|
||||||
)
|
|
||||||
matches = list(matches_result.scalars().all())
|
|
||||||
|
|
||||||
return [
|
|
||||||
MatchResponse(
|
|
||||||
id=m.id,
|
|
||||||
domain=m.domain,
|
|
||||||
platform=m.platform,
|
|
||||||
current_bid=m.current_bid,
|
|
||||||
end_time=m.end_time,
|
|
||||||
auction_url=m.auction_url,
|
|
||||||
matched_at=m.matched_at,
|
|
||||||
notified=m.notified,
|
|
||||||
)
|
|
||||||
for m in matches
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{id}/test")
|
|
||||||
async def test_sniper_alert(
|
|
||||||
id: int,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""Test alert against current auctions."""
|
|
||||||
# Verify ownership
|
|
||||||
result = await db.execute(
|
|
||||||
select(SniperAlert).where(
|
|
||||||
and_(
|
|
||||||
SniperAlert.id == id,
|
|
||||||
SniperAlert.user_id == current_user.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
alert = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not alert:
|
|
||||||
raise HTTPException(status_code=404, detail="Alert not found")
|
|
||||||
|
|
||||||
# Get active auctions
|
|
||||||
auctions_result = await db.execute(
|
|
||||||
select(DomainAuction)
|
|
||||||
.where(DomainAuction.is_active == True)
|
|
||||||
.limit(500)
|
|
||||||
)
|
|
||||||
auctions = list(auctions_result.scalars().all())
|
|
||||||
|
|
||||||
matches = []
|
|
||||||
for auction in auctions:
|
|
||||||
if alert.matches_domain(
|
|
||||||
auction.domain,
|
|
||||||
auction.tld,
|
|
||||||
auction.current_bid,
|
|
||||||
auction.num_bids
|
|
||||||
):
|
|
||||||
matches.append({
|
|
||||||
"domain": auction.domain,
|
|
||||||
"platform": auction.platform,
|
|
||||||
"current_bid": auction.current_bid,
|
|
||||||
"num_bids": auction.num_bids,
|
|
||||||
"end_time": auction.end_time.isoformat(),
|
|
||||||
})
|
|
||||||
|
|
||||||
return {
|
|
||||||
"alert_name": alert.name,
|
|
||||||
"auctions_checked": len(auctions),
|
|
||||||
"matches_found": len(matches),
|
|
||||||
"matches": matches[:20], # Limit to 20 for preview
|
|
||||||
"message": f"Found {len(matches)} matching auctions" if matches else "No matches found. Try adjusting your criteria.",
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -84,7 +84,7 @@ async def get_subscription(
|
|||||||
tier=subscription.tier.value,
|
tier=subscription.tier.value,
|
||||||
tier_name=config["name"],
|
tier_name=config["name"],
|
||||||
status=subscription.status.value,
|
status=subscription.status.value,
|
||||||
domain_limit=subscription.domain_limit,
|
domain_limit=subscription.max_domains,
|
||||||
domains_used=domains_used,
|
domains_used=domains_used,
|
||||||
portfolio_limit=config.get("portfolio_limit", 0),
|
portfolio_limit=config.get("portfolio_limit", 0),
|
||||||
check_frequency=config["check_frequency"],
|
check_frequency=config["check_frequency"],
|
||||||
@ -225,7 +225,7 @@ async def create_checkout_session(
|
|||||||
# Get site URL from environment
|
# Get site URL from environment
|
||||||
site_url = os.getenv("SITE_URL", "http://localhost:3000")
|
site_url = os.getenv("SITE_URL", "http://localhost:3000")
|
||||||
|
|
||||||
success_url = request.success_url or f"{site_url}/command/welcome?plan={request.plan}"
|
success_url = request.success_url or f"{site_url}/dashboard?upgraded=true"
|
||||||
cancel_url = request.cancel_url or f"{site_url}/pricing?cancelled=true"
|
cancel_url = request.cancel_url or f"{site_url}/pricing?cancelled=true"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -285,7 +285,7 @@ async def create_portal_session(
|
|||||||
)
|
)
|
||||||
|
|
||||||
site_url = os.getenv("SITE_URL", "http://localhost:3000")
|
site_url = os.getenv("SITE_URL", "http://localhost:3000")
|
||||||
return_url = f"{site_url}/command/settings"
|
return_url = f"{site_url}/dashboard"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
portal_url = await StripeService.create_portal_session(
|
portal_url = await StripeService.create_portal_session(
|
||||||
|
|||||||
@ -1,365 +0,0 @@
|
|||||||
"""
|
|
||||||
Telemetry KPIs (4A.2).
|
|
||||||
|
|
||||||
Admin-only endpoint to compute funnel KPIs from telemetry_events.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import json
|
|
||||||
import statistics
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from typing import Any, Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
|
||||||
from sqlalchemy import and_, case, func, select
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from app.api.deps import get_current_user, get_db
|
|
||||||
from app.models.telemetry import TelemetryEvent
|
|
||||||
from app.models.user import User
|
|
||||||
from app.schemas.referrals import ReferralKpiWindow, ReferralKpisResponse, ReferralReferrerRow
|
|
||||||
from app.schemas.telemetry import (
|
|
||||||
DealFunnelKpis,
|
|
||||||
TelemetryKpiWindow,
|
|
||||||
TelemetryKpisResponse,
|
|
||||||
YieldFunnelKpis,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/telemetry", tags=["telemetry"])
|
|
||||||
|
|
||||||
|
|
||||||
def _require_admin(user: User) -> None:
|
|
||||||
if not user.is_admin:
|
|
||||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Admin access required")
|
|
||||||
|
|
||||||
|
|
||||||
def _safe_json(metadata_json: Optional[str]) -> dict[str, Any]:
|
|
||||||
if not metadata_json:
|
|
||||||
return {}
|
|
||||||
try:
|
|
||||||
value = json.loads(metadata_json)
|
|
||||||
return value if isinstance(value, dict) else {}
|
|
||||||
except Exception:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
def _median(values: list[float]) -> Optional[float]:
|
|
||||||
if not values:
|
|
||||||
return None
|
|
||||||
return float(statistics.median(values))
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/kpis", response_model=TelemetryKpisResponse)
|
|
||||||
async def get_kpis(
|
|
||||||
days: int = Query(30, ge=1, le=365),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
):
|
|
||||||
_require_admin(current_user)
|
|
||||||
|
|
||||||
end = datetime.utcnow()
|
|
||||||
start = end - timedelta(days=days)
|
|
||||||
|
|
||||||
event_names = [
|
|
||||||
# Deal funnel
|
|
||||||
"listing_view",
|
|
||||||
"inquiry_created",
|
|
||||||
"inquiry_status_changed",
|
|
||||||
"message_sent",
|
|
||||||
"listing_marked_sold",
|
|
||||||
# Yield funnel
|
|
||||||
"yield_connected",
|
|
||||||
"yield_click",
|
|
||||||
"yield_conversion",
|
|
||||||
"payout_paid",
|
|
||||||
]
|
|
||||||
|
|
||||||
rows = (
|
|
||||||
await db.execute(
|
|
||||||
select(
|
|
||||||
TelemetryEvent.event_name,
|
|
||||||
TelemetryEvent.created_at,
|
|
||||||
TelemetryEvent.listing_id,
|
|
||||||
TelemetryEvent.inquiry_id,
|
|
||||||
TelemetryEvent.yield_domain_id,
|
|
||||||
TelemetryEvent.click_id,
|
|
||||||
TelemetryEvent.metadata_json,
|
|
||||||
).where(
|
|
||||||
and_(
|
|
||||||
TelemetryEvent.created_at >= start,
|
|
||||||
TelemetryEvent.created_at <= end,
|
|
||||||
TelemetryEvent.event_name.in_(event_names),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).all()
|
|
||||||
|
|
||||||
# -----------------------------
|
|
||||||
# Deal KPIs
|
|
||||||
# -----------------------------
|
|
||||||
listing_views = 0
|
|
||||||
inquiries_created = 0
|
|
||||||
inquiry_created_at: dict[int, datetime] = {}
|
|
||||||
first_seller_reply_at: dict[int, datetime] = {}
|
|
||||||
listings_with_inquiries: set[int] = set()
|
|
||||||
sold_listings: set[int] = set()
|
|
||||||
sold_at_by_listing: dict[int, datetime] = {}
|
|
||||||
first_inquiry_at_by_listing: dict[int, datetime] = {}
|
|
||||||
|
|
||||||
# -----------------------------
|
|
||||||
# Yield KPIs
|
|
||||||
# -----------------------------
|
|
||||||
connected_domains = 0
|
|
||||||
clicks = 0
|
|
||||||
conversions = 0
|
|
||||||
payouts_paid = 0
|
|
||||||
payouts_paid_amount_total = 0.0
|
|
||||||
|
|
||||||
for event_name, created_at, listing_id, inquiry_id, yield_domain_id, click_id, metadata_json in rows:
|
|
||||||
created_at = created_at # already datetime
|
|
||||||
|
|
||||||
if event_name == "listing_view":
|
|
||||||
listing_views += 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
if event_name == "inquiry_created":
|
|
||||||
inquiries_created += 1
|
|
||||||
if inquiry_id:
|
|
||||||
inquiry_created_at[inquiry_id] = created_at
|
|
||||||
if listing_id:
|
|
||||||
listings_with_inquiries.add(listing_id)
|
|
||||||
prev = first_inquiry_at_by_listing.get(listing_id)
|
|
||||||
if prev is None or created_at < prev:
|
|
||||||
first_inquiry_at_by_listing[listing_id] = created_at
|
|
||||||
continue
|
|
||||||
|
|
||||||
if event_name == "message_sent":
|
|
||||||
if not inquiry_id:
|
|
||||||
continue
|
|
||||||
meta = _safe_json(metadata_json)
|
|
||||||
if meta.get("role") == "seller":
|
|
||||||
prev = first_seller_reply_at.get(inquiry_id)
|
|
||||||
if prev is None or created_at < prev:
|
|
||||||
first_seller_reply_at[inquiry_id] = created_at
|
|
||||||
continue
|
|
||||||
|
|
||||||
if event_name == "listing_marked_sold":
|
|
||||||
if listing_id:
|
|
||||||
sold_listings.add(listing_id)
|
|
||||||
sold_at_by_listing[listing_id] = created_at
|
|
||||||
continue
|
|
||||||
|
|
||||||
if event_name == "yield_connected":
|
|
||||||
connected_domains += 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
if event_name == "yield_click":
|
|
||||||
clicks += 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
if event_name == "yield_conversion":
|
|
||||||
conversions += 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
if event_name == "payout_paid":
|
|
||||||
payouts_paid += 1
|
|
||||||
meta = _safe_json(metadata_json)
|
|
||||||
amount = meta.get("amount")
|
|
||||||
if isinstance(amount, (int, float)):
|
|
||||||
payouts_paid_amount_total += float(amount)
|
|
||||||
continue
|
|
||||||
|
|
||||||
seller_replied_inquiries = len(first_seller_reply_at.keys())
|
|
||||||
inquiry_reply_rate = (seller_replied_inquiries / inquiries_created) if inquiries_created else 0.0
|
|
||||||
|
|
||||||
# Inquiry → Sold rate (on listing-level intersection)
|
|
||||||
sold_from_inquiry = sold_listings.intersection(listings_with_inquiries)
|
|
||||||
inquiry_to_sold_listing_rate = (len(sold_from_inquiry) / len(listings_with_inquiries)) if listings_with_inquiries else 0.0
|
|
||||||
|
|
||||||
# Median reply time (seconds): inquiry_created → first seller message
|
|
||||||
reply_deltas: list[float] = []
|
|
||||||
for inq_id, created in inquiry_created_at.items():
|
|
||||||
replied = first_seller_reply_at.get(inq_id)
|
|
||||||
if replied:
|
|
||||||
reply_deltas.append((replied - created).total_seconds())
|
|
||||||
|
|
||||||
# Median time-to-sold (seconds): first inquiry on listing → listing sold
|
|
||||||
sold_deltas: list[float] = []
|
|
||||||
for listing in sold_from_inquiry:
|
|
||||||
inq_at = first_inquiry_at_by_listing.get(listing)
|
|
||||||
sold_at = sold_at_by_listing.get(listing)
|
|
||||||
if inq_at and sold_at and sold_at >= inq_at:
|
|
||||||
sold_deltas.append((sold_at - inq_at).total_seconds())
|
|
||||||
|
|
||||||
deal = DealFunnelKpis(
|
|
||||||
listing_views=listing_views,
|
|
||||||
inquiries_created=inquiries_created,
|
|
||||||
seller_replied_inquiries=seller_replied_inquiries,
|
|
||||||
inquiry_reply_rate=float(inquiry_reply_rate),
|
|
||||||
listings_with_inquiries=len(listings_with_inquiries),
|
|
||||||
listings_sold=len(sold_listings),
|
|
||||||
inquiry_to_sold_listing_rate=float(inquiry_to_sold_listing_rate),
|
|
||||||
median_reply_seconds=_median(reply_deltas),
|
|
||||||
median_time_to_sold_seconds=_median(sold_deltas),
|
|
||||||
)
|
|
||||||
|
|
||||||
yield_kpis = YieldFunnelKpis(
|
|
||||||
connected_domains=connected_domains,
|
|
||||||
clicks=clicks,
|
|
||||||
conversions=conversions,
|
|
||||||
conversion_rate=float(conversions / clicks) if clicks else 0.0,
|
|
||||||
payouts_paid=payouts_paid,
|
|
||||||
payouts_paid_amount_total=float(payouts_paid_amount_total),
|
|
||||||
)
|
|
||||||
|
|
||||||
return TelemetryKpisResponse(
|
|
||||||
window=TelemetryKpiWindow(days=days, start=start, end=end),
|
|
||||||
deal=deal,
|
|
||||||
yield_=yield_kpis,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/referrals", response_model=ReferralKpisResponse)
|
|
||||||
async def get_referral_kpis(
|
|
||||||
days: int = Query(30, ge=1, le=365),
|
|
||||||
limit: int = Query(200, ge=1, le=1000),
|
|
||||||
offset: int = Query(0, ge=0),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Admin-only referral KPIs for the viral loop (3C.2).
|
|
||||||
|
|
||||||
This is intentionally user-based (users.referred_by_user_id) + telemetry-based (referral_link_viewed),
|
|
||||||
so it stays robust even if ref codes evolve.
|
|
||||||
"""
|
|
||||||
_require_admin(current_user)
|
|
||||||
|
|
||||||
end = datetime.utcnow()
|
|
||||||
start = end - timedelta(days=days)
|
|
||||||
|
|
||||||
# Referred user counts per referrer (all-time + window)
|
|
||||||
referred_counts_subq = (
|
|
||||||
select(
|
|
||||||
User.referred_by_user_id.label("referrer_user_id"),
|
|
||||||
func.count(User.id).label("referred_users_total"),
|
|
||||||
func.coalesce(
|
|
||||||
func.sum(case((User.created_at >= start, 1), else_=0)),
|
|
||||||
0,
|
|
||||||
).label("referred_users_window"),
|
|
||||||
)
|
|
||||||
.where(User.referred_by_user_id.isnot(None))
|
|
||||||
.group_by(User.referred_by_user_id)
|
|
||||||
.subquery()
|
|
||||||
)
|
|
||||||
|
|
||||||
# Referral link views in window (telemetry)
|
|
||||||
link_views_subq = (
|
|
||||||
select(
|
|
||||||
TelemetryEvent.user_id.label("referrer_user_id"),
|
|
||||||
func.count(TelemetryEvent.id).label("referral_link_views_window"),
|
|
||||||
)
|
|
||||||
.where(
|
|
||||||
and_(
|
|
||||||
TelemetryEvent.event_name == "referral_link_viewed",
|
|
||||||
TelemetryEvent.created_at >= start,
|
|
||||||
TelemetryEvent.created_at <= end,
|
|
||||||
TelemetryEvent.user_id.isnot(None),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.group_by(TelemetryEvent.user_id)
|
|
||||||
.subquery()
|
|
||||||
)
|
|
||||||
|
|
||||||
# Referrers: anyone with an invite_code (we still show even if counts are zero)
|
|
||||||
rows = (
|
|
||||||
await db.execute(
|
|
||||||
select(
|
|
||||||
User.id,
|
|
||||||
User.email,
|
|
||||||
User.invite_code,
|
|
||||||
User.created_at,
|
|
||||||
func.coalesce(referred_counts_subq.c.referred_users_total, 0),
|
|
||||||
func.coalesce(referred_counts_subq.c.referred_users_window, 0),
|
|
||||||
func.coalesce(link_views_subq.c.referral_link_views_window, 0),
|
|
||||||
)
|
|
||||||
.where(User.invite_code.isnot(None))
|
|
||||||
.outerjoin(referred_counts_subq, referred_counts_subq.c.referrer_user_id == User.id)
|
|
||||||
.outerjoin(link_views_subq, link_views_subq.c.referrer_user_id == User.id)
|
|
||||||
.order_by(
|
|
||||||
func.coalesce(referred_counts_subq.c.referred_users_window, 0).desc(),
|
|
||||||
func.coalesce(referred_counts_subq.c.referred_users_total, 0).desc(),
|
|
||||||
User.created_at.desc(),
|
|
||||||
)
|
|
||||||
.offset(offset)
|
|
||||||
.limit(limit)
|
|
||||||
)
|
|
||||||
).all()
|
|
||||||
|
|
||||||
referrers = [
|
|
||||||
ReferralReferrerRow(
|
|
||||||
user_id=int(user_id),
|
|
||||||
email=str(email),
|
|
||||||
invite_code=str(invite_code) if invite_code else None,
|
|
||||||
created_at=created_at,
|
|
||||||
referred_users_total=int(referred_total or 0),
|
|
||||||
referred_users_window=int(referred_window or 0),
|
|
||||||
referral_link_views_window=int(link_views or 0),
|
|
||||||
)
|
|
||||||
for user_id, email, invite_code, created_at, referred_total, referred_window, link_views in rows
|
|
||||||
]
|
|
||||||
|
|
||||||
totals = {}
|
|
||||||
totals["referrers_with_invite_code"] = int(
|
|
||||||
(
|
|
||||||
await db.execute(
|
|
||||||
select(func.count(User.id)).where(User.invite_code.isnot(None))
|
|
||||||
)
|
|
||||||
).scalar()
|
|
||||||
or 0
|
|
||||||
)
|
|
||||||
totals["referred_users_total"] = int(
|
|
||||||
(
|
|
||||||
await db.execute(
|
|
||||||
select(func.count(User.id)).where(User.referred_by_user_id.isnot(None))
|
|
||||||
)
|
|
||||||
).scalar()
|
|
||||||
or 0
|
|
||||||
)
|
|
||||||
totals["referred_users_window"] = int(
|
|
||||||
(
|
|
||||||
await db.execute(
|
|
||||||
select(func.count(User.id)).where(
|
|
||||||
and_(
|
|
||||||
User.referred_by_user_id.isnot(None),
|
|
||||||
User.created_at >= start,
|
|
||||||
User.created_at <= end,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).scalar()
|
|
||||||
or 0
|
|
||||||
)
|
|
||||||
totals["referral_link_views_window"] = int(
|
|
||||||
(
|
|
||||||
await db.execute(
|
|
||||||
select(func.count(TelemetryEvent.id)).where(
|
|
||||||
and_(
|
|
||||||
TelemetryEvent.event_name == "referral_link_viewed",
|
|
||||||
TelemetryEvent.created_at >= start,
|
|
||||||
TelemetryEvent.created_at <= end,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).scalar()
|
|
||||||
or 0
|
|
||||||
)
|
|
||||||
|
|
||||||
return ReferralKpisResponse(
|
|
||||||
window=ReferralKpiWindow(days=days, start=start, end=end),
|
|
||||||
totals=totals,
|
|
||||||
referrers=referrers,
|
|
||||||
)
|
|
||||||
|
|
||||||
@ -64,38 +64,6 @@ async def get_db_price_count(db) -> int:
|
|||||||
return result.scalar() or 0
|
return result.scalar() or 0
|
||||||
|
|
||||||
|
|
||||||
@router.get("/tlds")
|
|
||||||
async def list_tracked_tlds(
|
|
||||||
db: Database,
|
|
||||||
limit: int = Query(5000, ge=1, le=20000),
|
|
||||||
offset: int = Query(0, ge=0),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
List distinct TLDs tracked in the database (DB-driven).
|
|
||||||
|
|
||||||
This endpoint is intentionally database-only (no static fallback),
|
|
||||||
so callers (e.g. sitemap generation) can rely on real tracked inventory.
|
|
||||||
"""
|
|
||||||
rows = (
|
|
||||||
await db.execute(
|
|
||||||
select(TLDPrice.tld)
|
|
||||||
.distinct()
|
|
||||||
.order_by(TLDPrice.tld)
|
|
||||||
.offset(offset)
|
|
||||||
.limit(limit)
|
|
||||||
)
|
|
||||||
).scalars().all()
|
|
||||||
total = (await db.execute(select(func.count(func.distinct(TLDPrice.tld))))).scalar() or 0
|
|
||||||
latest = (await db.execute(select(func.max(TLDPrice.recorded_at)))).scalar()
|
|
||||||
return {
|
|
||||||
"tlds": [str(t).lstrip(".").lower() for t in rows if t],
|
|
||||||
"total": int(total),
|
|
||||||
"limit": int(limit),
|
|
||||||
"offset": int(offset),
|
|
||||||
"latest_recorded_at": latest.isoformat() if latest else None,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Real TLD price data based on current market research (December 2024)
|
# Real TLD price data based on current market research (December 2024)
|
||||||
# Prices in USD, sourced from major registrars: Namecheap, Cloudflare, Porkbun, Google Domains
|
# Prices in USD, sourced from major registrars: Namecheap, Cloudflare, Porkbun, Google Domains
|
||||||
TLD_DATA = {
|
TLD_DATA = {
|
||||||
@ -358,89 +326,6 @@ def get_max_price(tld_data: dict) -> float:
|
|||||||
return max(r["register"] for r in tld_data["registrars"].values())
|
return max(r["register"] for r in tld_data["registrars"].values())
|
||||||
|
|
||||||
|
|
||||||
def get_min_renewal_price(tld_data: dict) -> float:
|
|
||||||
"""Get minimum renewal price."""
|
|
||||||
return min(r["renew"] for r in tld_data["registrars"].values())
|
|
||||||
|
|
||||||
|
|
||||||
def get_avg_renewal_price(tld_data: dict) -> float:
|
|
||||||
"""Calculate average renewal price across registrars."""
|
|
||||||
prices = [r["renew"] for r in tld_data["registrars"].values()]
|
|
||||||
return round(sum(prices) / len(prices), 2)
|
|
||||||
|
|
||||||
|
|
||||||
def calculate_price_trends(tld: str, trend: str) -> dict:
|
|
||||||
"""
|
|
||||||
Calculate price change trends based on TLD characteristics.
|
|
||||||
|
|
||||||
In a real implementation, this would query historical price data.
|
|
||||||
For now, we estimate based on known market trends.
|
|
||||||
"""
|
|
||||||
# Known TLD price trend data (based on market research)
|
|
||||||
KNOWN_TRENDS = {
|
|
||||||
# Rising TLDs (AI boom, tech demand)
|
|
||||||
"ai": {"1y": 15.0, "3y": 45.0},
|
|
||||||
"io": {"1y": 5.0, "3y": 12.0},
|
|
||||||
"app": {"1y": 3.0, "3y": 8.0},
|
|
||||||
"dev": {"1y": 2.0, "3y": 5.0},
|
|
||||||
|
|
||||||
# Stable/Slight increase (registry price increases)
|
|
||||||
"com": {"1y": 7.0, "3y": 14.0},
|
|
||||||
"net": {"1y": 5.0, "3y": 10.0},
|
|
||||||
"org": {"1y": 4.0, "3y": 8.0},
|
|
||||||
|
|
||||||
# ccTLDs (mostly stable)
|
|
||||||
"ch": {"1y": 0.0, "3y": 2.0},
|
|
||||||
"de": {"1y": 0.0, "3y": 1.0},
|
|
||||||
"uk": {"1y": 1.0, "3y": 3.0},
|
|
||||||
"co": {"1y": 3.0, "3y": 7.0},
|
|
||||||
"eu": {"1y": 0.0, "3y": 2.0},
|
|
||||||
|
|
||||||
# Promo-driven (volatile)
|
|
||||||
"xyz": {"1y": -10.0, "3y": -5.0},
|
|
||||||
"online": {"1y": -5.0, "3y": 0.0},
|
|
||||||
"store": {"1y": -8.0, "3y": -3.0},
|
|
||||||
"tech": {"1y": 0.0, "3y": 5.0},
|
|
||||||
"site": {"1y": -5.0, "3y": 0.0},
|
|
||||||
}
|
|
||||||
|
|
||||||
if tld in KNOWN_TRENDS:
|
|
||||||
return KNOWN_TRENDS[tld]
|
|
||||||
|
|
||||||
# Default based on trend field
|
|
||||||
if trend == "up":
|
|
||||||
return {"1y": 8.0, "3y": 20.0}
|
|
||||||
elif trend == "down":
|
|
||||||
return {"1y": -5.0, "3y": -10.0}
|
|
||||||
else:
|
|
||||||
return {"1y": 2.0, "3y": 5.0}
|
|
||||||
|
|
||||||
|
|
||||||
def calculate_risk_level(min_price: float, min_renewal: float, trend_1y: float) -> dict:
|
|
||||||
"""
|
|
||||||
Calculate risk level for a TLD based on renewal ratio and volatility.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict with 'level' (low/medium/high) and 'reason'
|
|
||||||
"""
|
|
||||||
renewal_ratio = min_renewal / min_price if min_price > 0 else 1
|
|
||||||
|
|
||||||
# High risk: Renewal trap (ratio > 3x) or very volatile
|
|
||||||
if renewal_ratio > 3:
|
|
||||||
return {"level": "high", "reason": "Renewal Trap"}
|
|
||||||
|
|
||||||
# Medium risk: Moderate renewal (2-3x) or rising fast
|
|
||||||
if renewal_ratio > 2:
|
|
||||||
return {"level": "medium", "reason": "High Renewal"}
|
|
||||||
if trend_1y > 20:
|
|
||||||
return {"level": "medium", "reason": "Rising Fast"}
|
|
||||||
|
|
||||||
# Low risk
|
|
||||||
if trend_1y > 0:
|
|
||||||
return {"level": "low", "reason": "Stable Rising"}
|
|
||||||
return {"level": "low", "reason": "Stable"}
|
|
||||||
|
|
||||||
|
|
||||||
# Top TLDs by popularity (based on actual domain registration volumes)
|
# Top TLDs by popularity (based on actual domain registration volumes)
|
||||||
TOP_TLDS_BY_POPULARITY = [
|
TOP_TLDS_BY_POPULARITY = [
|
||||||
"com", "net", "org", "de", "uk", "cn", "ru", "nl", "br", "au",
|
"com", "net", "org", "de", "uk", "cn", "ru", "nl", "br", "au",
|
||||||
@ -481,28 +366,15 @@ async def get_tld_overview(
|
|||||||
# This ensures consistency with /compare endpoint which also uses static data first
|
# This ensures consistency with /compare endpoint which also uses static data first
|
||||||
if source in ["auto", "static"]:
|
if source in ["auto", "static"]:
|
||||||
for tld, data in TLD_DATA.items():
|
for tld, data in TLD_DATA.items():
|
||||||
min_price = get_min_price(data)
|
|
||||||
min_renewal = get_min_renewal_price(data)
|
|
||||||
trend = data.get("trend", "stable")
|
|
||||||
price_trends = calculate_price_trends(tld, trend)
|
|
||||||
risk = calculate_risk_level(min_price, min_renewal, price_trends["1y"])
|
|
||||||
|
|
||||||
tld_list.append({
|
tld_list.append({
|
||||||
"tld": tld,
|
"tld": tld,
|
||||||
"type": data["type"],
|
"type": data["type"],
|
||||||
"description": data["description"],
|
"description": data["description"],
|
||||||
"avg_registration_price": get_avg_price(data),
|
"avg_registration_price": get_avg_price(data),
|
||||||
"min_registration_price": min_price,
|
"min_registration_price": get_min_price(data),
|
||||||
"max_registration_price": get_max_price(data),
|
"max_registration_price": get_max_price(data),
|
||||||
"min_renewal_price": min_renewal,
|
|
||||||
"avg_renewal_price": get_avg_renewal_price(data),
|
|
||||||
"registrar_count": len(data["registrars"]),
|
"registrar_count": len(data["registrars"]),
|
||||||
"trend": trend,
|
"trend": data["trend"],
|
||||||
"price_change_7d": round(price_trends["1y"] / 52, 2), # Weekly estimate
|
|
||||||
"price_change_1y": price_trends["1y"],
|
|
||||||
"price_change_3y": price_trends["3y"],
|
|
||||||
"risk_level": risk["level"],
|
|
||||||
"risk_reason": risk["reason"],
|
|
||||||
"popularity_rank": TOP_TLDS_BY_POPULARITY.index(tld) if tld in TOP_TLDS_BY_POPULARITY else 999,
|
"popularity_rank": TOP_TLDS_BY_POPULARITY.index(tld) if tld in TOP_TLDS_BY_POPULARITY else 999,
|
||||||
})
|
})
|
||||||
tld_seen.add(tld)
|
tld_seen.add(tld)
|
||||||
@ -517,34 +389,15 @@ async def get_tld_overview(
|
|||||||
for tld, data in db_prices.items():
|
for tld, data in db_prices.items():
|
||||||
if tld not in tld_seen: # Only add if not already from static
|
if tld not in tld_seen: # Only add if not already from static
|
||||||
prices = data["prices"]
|
prices = data["prices"]
|
||||||
min_price = min(prices)
|
|
||||||
avg_price = round(sum(prices) / len(prices), 2)
|
|
||||||
|
|
||||||
# Get renewal prices from registrar data
|
|
||||||
renewal_prices = [r["renew"] for r in data["registrars"].values() if r.get("renew")]
|
|
||||||
min_renewal = min(renewal_prices) if renewal_prices else avg_price
|
|
||||||
avg_renewal = round(sum(renewal_prices) / len(renewal_prices), 2) if renewal_prices else avg_price
|
|
||||||
|
|
||||||
# Calculate trends and risk
|
|
||||||
price_trends = calculate_price_trends(tld, "stable")
|
|
||||||
risk = calculate_risk_level(min_price, min_renewal, price_trends["1y"])
|
|
||||||
|
|
||||||
tld_list.append({
|
tld_list.append({
|
||||||
"tld": tld,
|
"tld": tld,
|
||||||
"type": guess_tld_type(tld),
|
"type": guess_tld_type(tld),
|
||||||
"description": f".{tld} domain extension",
|
"description": f".{tld} domain extension",
|
||||||
"avg_registration_price": avg_price,
|
"avg_registration_price": round(sum(prices) / len(prices), 2),
|
||||||
"min_registration_price": min_price,
|
"min_registration_price": min(prices),
|
||||||
"max_registration_price": max(prices),
|
"max_registration_price": max(prices),
|
||||||
"min_renewal_price": min_renewal,
|
|
||||||
"avg_renewal_price": avg_renewal,
|
|
||||||
"registrar_count": len(data["registrars"]),
|
"registrar_count": len(data["registrars"]),
|
||||||
"trend": "stable",
|
"trend": "stable",
|
||||||
"price_change_7d": round(price_trends["1y"] / 52, 2),
|
|
||||||
"price_change_1y": price_trends["1y"],
|
|
||||||
"price_change_3y": price_trends["3y"],
|
|
||||||
"risk_level": risk["level"],
|
|
||||||
"risk_reason": risk["reason"],
|
|
||||||
"popularity_rank": TOP_TLDS_BY_POPULARITY.index(tld) if tld in TOP_TLDS_BY_POPULARITY else 999,
|
"popularity_rank": TOP_TLDS_BY_POPULARITY.index(tld) if tld in TOP_TLDS_BY_POPULARITY else 999,
|
||||||
})
|
})
|
||||||
tld_seen.add(tld)
|
tld_seen.add(tld)
|
||||||
@ -628,57 +481,6 @@ async def get_trending_tlds(db: Database):
|
|||||||
return {"trending": trending[:6]}
|
return {"trending": trending[:6]}
|
||||||
|
|
||||||
|
|
||||||
async def get_real_price_history(db, tld: str, days: int) -> list[dict]:
|
|
||||||
"""
|
|
||||||
Fetch real historical price data from the database.
|
|
||||||
|
|
||||||
Returns daily average prices for the TLD, grouped by date.
|
|
||||||
Works with both SQLite (dev) and PostgreSQL (prod).
|
|
||||||
"""
|
|
||||||
from sqlalchemy import literal_column
|
|
||||||
|
|
||||||
cutoff = datetime.utcnow() - timedelta(days=days)
|
|
||||||
|
|
||||||
# SQLite-compatible: use date() function or extract date from datetime
|
|
||||||
# We'll select the raw datetime and group by date string
|
|
||||||
result = await db.execute(
|
|
||||||
select(
|
|
||||||
TLDPrice.recorded_at,
|
|
||||||
TLDPrice.registration_price,
|
|
||||||
)
|
|
||||||
.where(TLDPrice.tld == tld)
|
|
||||||
.where(TLDPrice.recorded_at >= cutoff)
|
|
||||||
.order_by(TLDPrice.recorded_at)
|
|
||||||
)
|
|
||||||
|
|
||||||
rows = result.all()
|
|
||||||
|
|
||||||
if not rows:
|
|
||||||
return []
|
|
||||||
|
|
||||||
# Group by date in Python (SQLite-safe approach)
|
|
||||||
daily_prices: dict[str, list[float]] = {}
|
|
||||||
for row in rows:
|
|
||||||
# Handle both datetime objects and strings
|
|
||||||
if hasattr(row.recorded_at, 'strftime'):
|
|
||||||
date_str = row.recorded_at.strftime("%Y-%m-%d")
|
|
||||||
else:
|
|
||||||
date_str = str(row.recorded_at)[:10] # Take first 10 chars (YYYY-MM-DD)
|
|
||||||
|
|
||||||
if date_str not in daily_prices:
|
|
||||||
daily_prices[date_str] = []
|
|
||||||
daily_prices[date_str].append(row.registration_price)
|
|
||||||
|
|
||||||
# Calculate daily averages
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
"date": date_str,
|
|
||||||
"price": round(sum(prices) / len(prices), 2),
|
|
||||||
}
|
|
||||||
for date_str, prices in sorted(daily_prices.items())
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{tld}/history")
|
@router.get("/{tld}/history")
|
||||||
async def get_tld_price_history(
|
async def get_tld_price_history(
|
||||||
tld: str,
|
tld: str,
|
||||||
@ -687,8 +489,10 @@ async def get_tld_price_history(
|
|||||||
):
|
):
|
||||||
"""Get price history for a specific TLD.
|
"""Get price history for a specific TLD.
|
||||||
|
|
||||||
Returns REAL historical data from database (no simulation).
|
Returns real historical data from database if available,
|
||||||
|
otherwise generates simulated data based on current price.
|
||||||
"""
|
"""
|
||||||
|
import math
|
||||||
|
|
||||||
tld_clean = tld.lower().lstrip(".")
|
tld_clean = tld.lower().lstrip(".")
|
||||||
|
|
||||||
@ -714,40 +518,40 @@ async def get_tld_price_history(
|
|||||||
trend = static_data.get("trend", "stable")
|
trend = static_data.get("trend", "stable")
|
||||||
trend_reason = static_data.get("trend_reason", "Price tracking available")
|
trend_reason = static_data.get("trend_reason", "Price tracking available")
|
||||||
|
|
||||||
real_history = await get_real_price_history(db, tld_clean, days)
|
# Generate historical data (simulated for now, real when we have more scrapes)
|
||||||
|
history = []
|
||||||
|
current_date = datetime.utcnow()
|
||||||
|
|
||||||
if not real_history:
|
# Calculate trend factor based on known trends
|
||||||
raise HTTPException(status_code=404, detail=f"No historical data for '.{tld_clean}' yet")
|
trend_factor = 1.0
|
||||||
|
if trend == "up":
|
||||||
|
trend_factor = 0.92 # Prices were ~8% lower
|
||||||
|
elif trend == "down":
|
||||||
|
trend_factor = 1.05 # Prices were ~5% higher
|
||||||
|
|
||||||
history = real_history
|
# Generate weekly data points
|
||||||
data_source = "database"
|
for i in range(days, -1, -7):
|
||||||
|
date = current_date - timedelta(days=i)
|
||||||
|
progress = 1 - (i / days)
|
||||||
|
|
||||||
# Use the most recent daily average as current_price when available
|
if trend == "up":
|
||||||
if history:
|
price = current_price * (trend_factor + (1 - trend_factor) * progress)
|
||||||
current_price = float(history[-1]["price"])
|
elif trend == "down":
|
||||||
|
price = current_price * (trend_factor - (trend_factor - 1) * progress)
|
||||||
|
else:
|
||||||
|
# Add small fluctuation for stable prices
|
||||||
|
fluctuation = math.sin(i * 0.1) * 0.02
|
||||||
|
price = current_price * (1 + fluctuation)
|
||||||
|
|
||||||
def _price_at_or_before(days_ago_target: int) -> float:
|
history.append({
|
||||||
"""Get the closest historical price at or before the target age."""
|
"date": date.strftime("%Y-%m-%d"),
|
||||||
target_date = (datetime.utcnow() - timedelta(days=days_ago_target)).date()
|
"price": round(price, 2),
|
||||||
best = float(history[0]["price"])
|
})
|
||||||
for h in reversed(history):
|
|
||||||
try:
|
|
||||||
h_date = datetime.strptime(h["date"], "%Y-%m-%d").date()
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
if h_date <= target_date:
|
|
||||||
best = float(h["price"])
|
|
||||||
break
|
|
||||||
return best
|
|
||||||
|
|
||||||
price_7d_ago = _price_at_or_before(7)
|
# Calculate price changes
|
||||||
price_30d_ago = _price_at_or_before(30)
|
price_7d_ago = history[-2]["price"] if len(history) >= 2 else current_price
|
||||||
price_90d_ago = _price_at_or_before(90)
|
price_30d_ago = history[-5]["price"] if len(history) >= 5 else current_price
|
||||||
|
price_90d_ago = history[0]["price"] if history else current_price
|
||||||
# Calculate percentage changes safely
|
|
||||||
change_7d = round((current_price - price_7d_ago) / price_7d_ago * 100, 2) if price_7d_ago and price_7d_ago > 0 else 0
|
|
||||||
change_30d = round((current_price - price_30d_ago) / price_30d_ago * 100, 2) if price_30d_ago and price_30d_ago > 0 else 0
|
|
||||||
change_90d = round((current_price - price_90d_ago) / price_90d_ago * 100, 2) if price_90d_ago and price_90d_ago > 0 else 0
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"tld": tld_clean,
|
"tld": tld_clean,
|
||||||
@ -755,14 +559,13 @@ async def get_tld_price_history(
|
|||||||
"description": static_data.get("description", f".{tld_clean} domain extension"),
|
"description": static_data.get("description", f".{tld_clean} domain extension"),
|
||||||
"registry": static_data.get("registry", "Unknown"),
|
"registry": static_data.get("registry", "Unknown"),
|
||||||
"current_price": current_price,
|
"current_price": current_price,
|
||||||
"price_change_7d": change_7d,
|
"price_change_7d": round((current_price - price_7d_ago) / price_7d_ago * 100, 2) if price_7d_ago else 0,
|
||||||
"price_change_30d": change_30d,
|
"price_change_30d": round((current_price - price_30d_ago) / price_30d_ago * 100, 2) if price_30d_ago else 0,
|
||||||
"price_change_90d": change_90d,
|
"price_change_90d": round((current_price - price_90d_ago) / price_90d_ago * 100, 2) if price_90d_ago else 0,
|
||||||
"trend": trend,
|
"trend": trend,
|
||||||
"trend_reason": trend_reason,
|
"trend_reason": trend_reason,
|
||||||
"history": history,
|
"history": history,
|
||||||
"source": data_source,
|
"source": "simulated" if not static_data else "static",
|
||||||
"data_points": len(history),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -791,109 +594,74 @@ async def compare_tld_prices(
|
|||||||
tld: str,
|
tld: str,
|
||||||
db: Database,
|
db: Database,
|
||||||
):
|
):
|
||||||
"""Compare prices across different registrars for a TLD.
|
"""Compare prices across different registrars for a TLD."""
|
||||||
|
|
||||||
COMBINES static data AND database data for complete registrar coverage.
|
|
||||||
This ensures all scraped registrars (Porkbun, GoDaddy, Namecheap, etc.) appear.
|
|
||||||
"""
|
|
||||||
tld_clean = tld.lower().lstrip(".")
|
tld_clean = tld.lower().lstrip(".")
|
||||||
|
|
||||||
# Collect registrars from ALL sources
|
# Try static data first
|
||||||
registrars_map: dict[str, dict] = {}
|
|
||||||
metadata = {
|
|
||||||
"type": "generic",
|
|
||||||
"description": f".{tld_clean} domain extension",
|
|
||||||
"registry": "Unknown",
|
|
||||||
"introduced": None,
|
|
||||||
}
|
|
||||||
|
|
||||||
# 1. Add static data (curated, high-quality)
|
|
||||||
if tld_clean in TLD_DATA:
|
if tld_clean in TLD_DATA:
|
||||||
data = TLD_DATA[tld_clean]
|
data = TLD_DATA[tld_clean]
|
||||||
metadata = {
|
|
||||||
"type": data["type"],
|
|
||||||
"description": data["description"],
|
|
||||||
"registry": data.get("registry", "Unknown"),
|
|
||||||
"introduced": data.get("introduced"),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
registrars = []
|
||||||
for name, prices in data["registrars"].items():
|
for name, prices in data["registrars"].items():
|
||||||
registrars_map[name.lower()] = {
|
registrars.append({
|
||||||
"name": name,
|
"name": name,
|
||||||
"registration_price": prices["register"],
|
"registration_price": prices["register"],
|
||||||
"renewal_price": prices["renew"],
|
"renewal_price": prices["renew"],
|
||||||
"transfer_price": prices["transfer"],
|
"transfer_price": prices["transfer"],
|
||||||
"source": "static",
|
})
|
||||||
}
|
|
||||||
|
|
||||||
# 2. Add/update with database data (scraped from multiple registrars)
|
|
||||||
db_prices = await get_db_prices(db, tld_clean)
|
|
||||||
if db_prices and tld_clean in db_prices:
|
|
||||||
for registrar_name, prices in db_prices[tld_clean]["registrars"].items():
|
|
||||||
key = registrar_name.lower()
|
|
||||||
# Add if not exists, or update with fresher DB data
|
|
||||||
if key not in registrars_map:
|
|
||||||
registrars_map[key] = {
|
|
||||||
"name": registrar_name.title(),
|
|
||||||
"registration_price": prices["register"],
|
|
||||||
"renewal_price": prices["renew"],
|
|
||||||
"transfer_price": prices.get("transfer"),
|
|
||||||
"source": "database",
|
|
||||||
}
|
|
||||||
|
|
||||||
if not registrars_map:
|
|
||||||
raise HTTPException(status_code=404, detail=f"TLD '.{tld_clean}' not found")
|
|
||||||
|
|
||||||
# Convert to list and sort by price
|
|
||||||
registrars = list(registrars_map.values())
|
|
||||||
registrars.sort(key=lambda x: x["registration_price"])
|
registrars.sort(key=lambda x: x["registration_price"])
|
||||||
|
|
||||||
# Calculate price range from all registrars
|
|
||||||
all_prices = [r["registration_price"] for r in registrars]
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"tld": tld_clean,
|
"tld": tld_clean,
|
||||||
"type": metadata["type"],
|
"type": data["type"],
|
||||||
"description": metadata["description"],
|
"description": data["description"],
|
||||||
"registry": metadata["registry"],
|
"registry": data.get("registry", "Unknown"),
|
||||||
"introduced": metadata["introduced"],
|
"introduced": data.get("introduced"),
|
||||||
"registrars": registrars,
|
"registrars": registrars,
|
||||||
"cheapest_registrar": registrars[0]["name"],
|
"cheapest_registrar": registrars[0]["name"],
|
||||||
"cheapest_price": registrars[0]["registration_price"],
|
"cheapest_price": registrars[0]["registration_price"],
|
||||||
"price_range": {
|
"price_range": {
|
||||||
"min": min(all_prices),
|
"min": get_min_price(data),
|
||||||
"max": max(all_prices),
|
"max": get_max_price(data),
|
||||||
"avg": round(sum(all_prices) / len(all_prices), 2),
|
"avg": get_avg_price(data),
|
||||||
},
|
},
|
||||||
"registrar_count": len(registrars),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Fall back to database
|
||||||
|
db_prices = await get_db_prices(db, tld_clean)
|
||||||
|
if not db_prices:
|
||||||
|
raise HTTPException(status_code=404, detail=f"TLD '.{tld_clean}' not found")
|
||||||
|
|
||||||
def get_marketplace_links(tld: str) -> list:
|
tld_data = db_prices[tld_clean]
|
||||||
"""Get marketplace links for buying existing domains on this TLD."""
|
registrars = [
|
||||||
# Sedo partner ID for affiliate tracking
|
|
||||||
SEDO_PARTNER_ID = "335830"
|
|
||||||
|
|
||||||
return [
|
|
||||||
{
|
{
|
||||||
"name": "Sedo",
|
"name": name,
|
||||||
"description": "World's largest domain marketplace",
|
"registration_price": prices["register"],
|
||||||
"url": f"https://sedo.com/search/?keyword=.{tld}&partnerid={SEDO_PARTNER_ID}",
|
"renewal_price": prices["renew"],
|
||||||
"type": "marketplace",
|
"transfer_price": prices["transfer"],
|
||||||
},
|
}
|
||||||
{
|
for name, prices in tld_data["registrars"].items()
|
||||||
"name": "Afternic",
|
|
||||||
"description": "GoDaddy's premium marketplace",
|
|
||||||
"url": f"https://www.afternic.com/search?k=.{tld}",
|
|
||||||
"type": "marketplace",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Dan.com",
|
|
||||||
"description": "Fast domain transfers",
|
|
||||||
"url": f"https://dan.com/search?query=.{tld}",
|
|
||||||
"type": "marketplace",
|
|
||||||
},
|
|
||||||
]
|
]
|
||||||
|
registrars.sort(key=lambda x: x["registration_price"])
|
||||||
|
|
||||||
|
prices = tld_data["prices"]
|
||||||
|
|
||||||
|
return {
|
||||||
|
"tld": tld_clean,
|
||||||
|
"type": guess_tld_type(tld_clean),
|
||||||
|
"description": f".{tld_clean} domain extension",
|
||||||
|
"registry": "Unknown",
|
||||||
|
"introduced": None,
|
||||||
|
"registrars": registrars,
|
||||||
|
"cheapest_registrar": registrars[0]["name"] if registrars else "N/A",
|
||||||
|
"cheapest_price": min(prices) if prices else 0,
|
||||||
|
"price_range": {
|
||||||
|
"min": min(prices) if prices else 0,
|
||||||
|
"max": max(prices) if prices else 0,
|
||||||
|
"avg": round(sum(prices) / len(prices), 2) if prices else 0,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{tld}")
|
@router.get("/{tld}")
|
||||||
@ -904,9 +672,6 @@ async def get_tld_details(
|
|||||||
"""Get complete details for a specific TLD."""
|
"""Get complete details for a specific TLD."""
|
||||||
tld_clean = tld.lower().lstrip(".")
|
tld_clean = tld.lower().lstrip(".")
|
||||||
|
|
||||||
# Marketplace links (same for all TLDs)
|
|
||||||
marketplace_links = get_marketplace_links(tld_clean)
|
|
||||||
|
|
||||||
# Try static data first
|
# Try static data first
|
||||||
if tld_clean in TLD_DATA:
|
if tld_clean in TLD_DATA:
|
||||||
data = TLD_DATA[tld_clean]
|
data = TLD_DATA[tld_clean]
|
||||||
@ -936,7 +701,6 @@ async def get_tld_details(
|
|||||||
},
|
},
|
||||||
"registrars": registrars,
|
"registrars": registrars,
|
||||||
"cheapest_registrar": registrars[0]["name"],
|
"cheapest_registrar": registrars[0]["name"],
|
||||||
"marketplace_links": marketplace_links,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Fall back to database
|
# Fall back to database
|
||||||
@ -973,159 +737,4 @@ async def get_tld_details(
|
|||||||
},
|
},
|
||||||
"registrars": registrars,
|
"registrars": registrars,
|
||||||
"cheapest_registrar": registrars[0]["name"] if registrars else "N/A",
|
"cheapest_registrar": registrars[0]["name"] if registrars else "N/A",
|
||||||
"marketplace_links": marketplace_links,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# DIAGNOSTIC ENDPOINTS - Data Quality & Historical Stats
|
|
||||||
# =============================================================================
|
|
||||||
|
|
||||||
@router.get("/stats/data-quality")
|
|
||||||
async def get_data_quality_stats(db: Database):
|
|
||||||
"""
|
|
||||||
Get statistics about historical data quality.
|
|
||||||
|
|
||||||
Useful for monitoring:
|
|
||||||
- How many TLDs have real historical data
|
|
||||||
- Date range of collected data
|
|
||||||
- Scraping frequency and gaps
|
|
||||||
"""
|
|
||||||
from sqlalchemy import cast, Date as SQLDate
|
|
||||||
|
|
||||||
# Total TLDs tracked
|
|
||||||
tld_count = await db.execute(select(func.count(func.distinct(TLDPrice.tld))))
|
|
||||||
total_tlds = tld_count.scalar() or 0
|
|
||||||
|
|
||||||
# Total price records
|
|
||||||
record_count = await db.execute(select(func.count(TLDPrice.id)))
|
|
||||||
total_records = record_count.scalar() or 0
|
|
||||||
|
|
||||||
# Date range
|
|
||||||
date_range = await db.execute(
|
|
||||||
select(
|
|
||||||
func.min(TLDPrice.recorded_at).label("first_record"),
|
|
||||||
func.max(TLDPrice.recorded_at).label("last_record"),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
dates = date_range.one()
|
|
||||||
|
|
||||||
# Unique scrape days (how many days we have data)
|
|
||||||
# SQLite-compatible: count distinct date strings
|
|
||||||
all_dates = await db.execute(select(TLDPrice.recorded_at))
|
|
||||||
date_rows = all_dates.all()
|
|
||||||
unique_date_strs = set()
|
|
||||||
for row in date_rows:
|
|
||||||
if hasattr(row.recorded_at, 'strftime'):
|
|
||||||
unique_date_strs.add(row.recorded_at.strftime("%Y-%m-%d"))
|
|
||||||
elif row.recorded_at:
|
|
||||||
unique_date_strs.add(str(row.recorded_at)[:10])
|
|
||||||
scrape_days = len(unique_date_strs)
|
|
||||||
|
|
||||||
# TLDs with 5+ historical data points (enough for real charts)
|
|
||||||
tlds_with_history = await db.execute(
|
|
||||||
select(func.count())
|
|
||||||
.select_from(
|
|
||||||
select(TLDPrice.tld)
|
|
||||||
.group_by(TLDPrice.tld)
|
|
||||||
.having(func.count(TLDPrice.id) >= 5)
|
|
||||||
.subquery()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
chartable_tlds = tlds_with_history.scalar() or 0
|
|
||||||
|
|
||||||
# Registrars in database
|
|
||||||
registrar_count = await db.execute(
|
|
||||||
select(func.count(func.distinct(TLDPrice.registrar)))
|
|
||||||
)
|
|
||||||
total_registrars = registrar_count.scalar() or 0
|
|
||||||
|
|
||||||
# Calculate coverage
|
|
||||||
days_of_data = 0
|
|
||||||
if dates.first_record and dates.last_record:
|
|
||||||
days_of_data = (dates.last_record - dates.first_record).days + 1
|
|
||||||
|
|
||||||
coverage_percent = round((scrape_days / days_of_data * 100), 1) if days_of_data > 0 else 0
|
|
||||||
|
|
||||||
return {
|
|
||||||
"summary": {
|
|
||||||
"total_tlds_tracked": total_tlds,
|
|
||||||
"total_price_records": total_records,
|
|
||||||
"tlds_with_real_history": chartable_tlds,
|
|
||||||
"unique_registrars": total_registrars,
|
|
||||||
},
|
|
||||||
"time_range": {
|
|
||||||
"first_record": dates.first_record.isoformat() if dates.first_record else None,
|
|
||||||
"last_record": dates.last_record.isoformat() if dates.last_record else None,
|
|
||||||
"days_of_data": days_of_data,
|
|
||||||
"days_with_scrapes": scrape_days,
|
|
||||||
"coverage_percent": coverage_percent,
|
|
||||||
},
|
|
||||||
"chart_readiness": {
|
|
||||||
"tlds_ready_for_charts": chartable_tlds,
|
|
||||||
"tlds_with_insufficient_history": total_tlds - chartable_tlds,
|
|
||||||
"recommendation": "Run daily scrapes for 7+ days to enable richer charts" if chartable_tlds < 10 else "Good coverage!",
|
|
||||||
},
|
|
||||||
"data_sources": {
|
|
||||||
"static_tlds": len(TLD_DATA),
|
|
||||||
"database_tlds": total_tlds,
|
|
||||||
"combined_coverage": len(TLD_DATA) + max(0, total_tlds - len(TLD_DATA)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/stats/scrape-history")
|
|
||||||
async def get_scrape_history(
|
|
||||||
db: Database,
|
|
||||||
days: int = Query(30, ge=1, le=365),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Get scraping history - shows when scrapes ran and how many records were collected.
|
|
||||||
|
|
||||||
Useful for:
|
|
||||||
- Identifying gaps in data collection
|
|
||||||
- Verifying scheduler is working
|
|
||||||
- Troubleshooting data issues
|
|
||||||
"""
|
|
||||||
cutoff = datetime.utcnow() - timedelta(days=days)
|
|
||||||
|
|
||||||
# SQLite-compatible: fetch all and group in Python
|
|
||||||
result = await db.execute(
|
|
||||||
select(TLDPrice.recorded_at, TLDPrice.tld)
|
|
||||||
.where(TLDPrice.recorded_at >= cutoff)
|
|
||||||
)
|
|
||||||
rows = result.all()
|
|
||||||
|
|
||||||
# Group by date in Python
|
|
||||||
daily_data: dict[str, dict] = {}
|
|
||||||
for row in rows:
|
|
||||||
if hasattr(row.recorded_at, 'strftime'):
|
|
||||||
date_str = row.recorded_at.strftime("%Y-%m-%d")
|
|
||||||
elif row.recorded_at:
|
|
||||||
date_str = str(row.recorded_at)[:10]
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if date_str not in daily_data:
|
|
||||||
daily_data[date_str] = {"records": 0, "tlds": set()}
|
|
||||||
daily_data[date_str]["records"] += 1
|
|
||||||
daily_data[date_str]["tlds"].add(row.tld)
|
|
||||||
|
|
||||||
# Convert to list and sort by date descending
|
|
||||||
scrape_history = [
|
|
||||||
{
|
|
||||||
"date": date_str,
|
|
||||||
"records_collected": data["records"],
|
|
||||||
"tlds_scraped": len(data["tlds"]),
|
|
||||||
}
|
|
||||||
for date_str, data in sorted(daily_data.items(), reverse=True)
|
|
||||||
]
|
|
||||||
|
|
||||||
total_records = sum(h["records_collected"] for h in scrape_history)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"period_days": days,
|
|
||||||
"total_scrape_days": len(scrape_history),
|
|
||||||
"history": scrape_history,
|
|
||||||
"avg_records_per_day": round(total_records / len(scrape_history), 0) if scrape_history else 0,
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -5,8 +5,6 @@ Webhook endpoints for external service integrations.
|
|||||||
- Future: Other payment providers, notification services, etc.
|
- Future: Other payment providers, notification services, etc.
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
from datetime import datetime
|
|
||||||
from fastapi import APIRouter, HTTPException, Request, Header, status
|
from fastapi import APIRouter, HTTPException, Request, Header, status
|
||||||
|
|
||||||
from app.database import get_db
|
from app.database import get_db
|
||||||
@ -17,25 +15,6 @@ logger = logging.getLogger(__name__)
|
|||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
@router.get("/stripe/test")
|
|
||||||
async def test_stripe_webhook():
|
|
||||||
"""
|
|
||||||
Test endpoint to verify webhook route is accessible.
|
|
||||||
|
|
||||||
Use this to verify the webhook URL is correct.
|
|
||||||
The actual Stripe webhook should POST to /api/v1/webhooks/stripe
|
|
||||||
"""
|
|
||||||
return {
|
|
||||||
"status": "ok",
|
|
||||||
"message": "Stripe webhook endpoint is accessible",
|
|
||||||
"endpoint": "/api/v1/webhooks/stripe",
|
|
||||||
"method": "POST",
|
|
||||||
"stripe_configured": StripeService.is_configured(),
|
|
||||||
"webhook_secret_set": bool(os.getenv("STRIPE_WEBHOOK_SECRET")),
|
|
||||||
"timestamp": datetime.utcnow().isoformat(),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/stripe")
|
@router.post("/stripe")
|
||||||
async def stripe_webhook(
|
async def stripe_webhook(
|
||||||
request: Request,
|
request: Request,
|
||||||
@ -50,22 +29,18 @@ async def stripe_webhook(
|
|||||||
- Invoice is created or paid
|
- Invoice is created or paid
|
||||||
|
|
||||||
The webhook must be configured in Stripe Dashboard to point to:
|
The webhook must be configured in Stripe Dashboard to point to:
|
||||||
https://pounce.ch/api/v1/webhooks/stripe
|
https://your-domain.com/api/webhooks/stripe
|
||||||
|
|
||||||
Required Header:
|
Required Header:
|
||||||
- Stripe-Signature: Stripe's webhook signature for verification
|
- Stripe-Signature: Stripe's webhook signature for verification
|
||||||
"""
|
"""
|
||||||
logger.info("🔔 Stripe webhook received")
|
|
||||||
|
|
||||||
if not stripe_signature:
|
if not stripe_signature:
|
||||||
logger.error("❌ Missing Stripe-Signature header")
|
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
detail="Missing Stripe-Signature header",
|
detail="Missing Stripe-Signature header",
|
||||||
)
|
)
|
||||||
|
|
||||||
if not StripeService.is_configured():
|
if not StripeService.is_configured():
|
||||||
logger.error("❌ Stripe not configured")
|
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||||
detail="Stripe not configured",
|
detail="Stripe not configured",
|
||||||
@ -74,9 +49,6 @@ async def stripe_webhook(
|
|||||||
# Get raw body for signature verification
|
# Get raw body for signature verification
|
||||||
payload = await request.body()
|
payload = await request.body()
|
||||||
|
|
||||||
logger.info(f" Payload size: {len(payload)} bytes")
|
|
||||||
logger.info(f" Signature: {stripe_signature[:50]}...")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
async for db in get_db():
|
async for db in get_db():
|
||||||
result = await StripeService.handle_webhook(
|
result = await StripeService.handle_webhook(
|
||||||
@ -84,17 +56,16 @@ async def stripe_webhook(
|
|||||||
sig_header=stripe_signature,
|
sig_header=stripe_signature,
|
||||||
db=db,
|
db=db,
|
||||||
)
|
)
|
||||||
logger.info(f"✅ Webhook processed successfully: {result}")
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
logger.error(f"❌ Webhook validation error: {e}")
|
logger.error(f"Webhook validation error: {e}")
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
detail=str(e),
|
detail=str(e),
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception(f"❌ Webhook processing error: {e}")
|
logger.error(f"Webhook processing error: {e}")
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
detail="Webhook processing failed",
|
detail="Webhook processing failed",
|
||||||
|
|||||||
@ -1,835 +0,0 @@
|
|||||||
"""
|
|
||||||
Yield Domain API endpoints.
|
|
||||||
|
|
||||||
Manages domain activation for yield/intent routing and revenue tracking.
|
|
||||||
"""
|
|
||||||
import json
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from decimal import Decimal
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
|
||||||
from sqlalchemy import func, and_, or_, Integer, case, select
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from app.api.deps import get_db, get_current_user
|
|
||||||
from app.models.user import User
|
|
||||||
from app.models.yield_domain import YieldDomain, YieldTransaction, YieldPayout, AffiliatePartner
|
|
||||||
from app.config import get_settings
|
|
||||||
|
|
||||||
settings = get_settings()
|
|
||||||
from app.schemas.yield_domain import (
|
|
||||||
YieldDomainCreate,
|
|
||||||
YieldDomainUpdate,
|
|
||||||
YieldDomainResponse,
|
|
||||||
YieldDomainListResponse,
|
|
||||||
YieldTransactionResponse,
|
|
||||||
YieldTransactionListResponse,
|
|
||||||
YieldPayoutResponse,
|
|
||||||
YieldPayoutListResponse,
|
|
||||||
YieldDashboardStats,
|
|
||||||
YieldDashboardResponse,
|
|
||||||
DomainYieldAnalysis,
|
|
||||||
IntentAnalysis,
|
|
||||||
YieldValueEstimate,
|
|
||||||
AffiliatePartnerResponse,
|
|
||||||
DNSVerificationResult,
|
|
||||||
DNSSetupInstructions,
|
|
||||||
ActivateYieldRequest,
|
|
||||||
ActivateYieldResponse,
|
|
||||||
YieldLandingPreview,
|
|
||||||
)
|
|
||||||
from app.services.intent_detector import (
|
|
||||||
detect_domain_intent,
|
|
||||||
estimate_domain_yield,
|
|
||||||
get_intent_detector,
|
|
||||||
)
|
|
||||||
from app.services.yield_dns import verify_yield_dns
|
|
||||||
from app.services.telemetry import track_event
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/yield", tags=["yield"])
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Intent Analysis (Public)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@router.post("/analyze", response_model=DomainYieldAnalysis)
|
|
||||||
async def analyze_domain_intent(
|
|
||||||
domain: str = Query(..., min_length=3, description="Domain to analyze"),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Analyze a domain's intent and estimate yield potential.
|
|
||||||
|
|
||||||
This endpoint is public - no authentication required.
|
|
||||||
"""
|
|
||||||
analysis = estimate_domain_yield(domain)
|
|
||||||
|
|
||||||
intent_result = detect_domain_intent(domain)
|
|
||||||
|
|
||||||
return DomainYieldAnalysis(
|
|
||||||
domain=domain,
|
|
||||||
intent=IntentAnalysis(
|
|
||||||
category=intent_result.category,
|
|
||||||
subcategory=intent_result.subcategory,
|
|
||||||
confidence=intent_result.confidence,
|
|
||||||
keywords_matched=intent_result.keywords_matched,
|
|
||||||
suggested_partners=intent_result.suggested_partners,
|
|
||||||
monetization_potential=intent_result.monetization_potential,
|
|
||||||
),
|
|
||||||
value=YieldValueEstimate(
|
|
||||||
estimated_monthly_min=analysis["value"]["estimated_monthly_min"],
|
|
||||||
estimated_monthly_max=analysis["value"]["estimated_monthly_max"],
|
|
||||||
currency=analysis["value"]["currency"],
|
|
||||||
potential=analysis["value"]["potential"],
|
|
||||||
confidence=analysis["value"]["confidence"],
|
|
||||||
geo=analysis["value"]["geo"],
|
|
||||||
),
|
|
||||||
partners=analysis["partners"],
|
|
||||||
monetization_potential=analysis["monetization_potential"],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Dashboard
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@router.get("/dashboard", response_model=YieldDashboardResponse)
|
|
||||||
async def get_yield_dashboard(
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Get yield dashboard with stats, domains, and recent transactions.
|
|
||||||
"""
|
|
||||||
# Get user's yield domains
|
|
||||||
result = await db.execute(
|
|
||||||
select(YieldDomain)
|
|
||||||
.where(YieldDomain.user_id == current_user.id)
|
|
||||||
.order_by(YieldDomain.total_revenue.desc())
|
|
||||||
)
|
|
||||||
domains = list(result.scalars().all())
|
|
||||||
|
|
||||||
# Calculate stats
|
|
||||||
now = datetime.utcnow()
|
|
||||||
month_start = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
|
|
||||||
|
|
||||||
# Monthly stats from transactions (simplified for async)
|
|
||||||
monthly_revenue = Decimal("0")
|
|
||||||
monthly_clicks = 0
|
|
||||||
monthly_conversions = 0
|
|
||||||
|
|
||||||
if domains:
|
|
||||||
domain_ids = [d.id for d in domains]
|
|
||||||
monthly_result = await db.execute(
|
|
||||||
select(
|
|
||||||
func.coalesce(
|
|
||||||
func.sum(
|
|
||||||
case(
|
|
||||||
(YieldTransaction.status.in_(["confirmed", "paid"]), YieldTransaction.net_amount),
|
|
||||||
else_=0,
|
|
||||||
)
|
|
||||||
),
|
|
||||||
0,
|
|
||||||
).label("revenue"),
|
|
||||||
func.sum(
|
|
||||||
case(
|
|
||||||
(YieldTransaction.event_type == "click", 1),
|
|
||||||
else_=0,
|
|
||||||
)
|
|
||||||
).label("clicks"),
|
|
||||||
func.sum(
|
|
||||||
case(
|
|
||||||
(
|
|
||||||
and_(
|
|
||||||
YieldTransaction.event_type.in_(["lead", "sale"]),
|
|
||||||
YieldTransaction.status.in_(["confirmed", "paid"]),
|
|
||||||
),
|
|
||||||
1,
|
|
||||||
),
|
|
||||||
else_=0,
|
|
||||||
)
|
|
||||||
).label("conversions"),
|
|
||||||
).where(
|
|
||||||
YieldTransaction.yield_domain_id.in_(domain_ids),
|
|
||||||
YieldTransaction.created_at >= month_start,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
monthly_stats = monthly_result.first()
|
|
||||||
if monthly_stats:
|
|
||||||
monthly_revenue = monthly_stats.revenue or Decimal("0")
|
|
||||||
monthly_clicks = monthly_stats.clicks or 0
|
|
||||||
monthly_conversions = monthly_stats.conversions or 0
|
|
||||||
|
|
||||||
# Aggregate domain stats
|
|
||||||
total_active = sum(1 for d in domains if d.status == "active")
|
|
||||||
total_pending = sum(1 for d in domains if d.status in ["pending", "verifying"])
|
|
||||||
lifetime_revenue = sum(d.total_revenue for d in domains)
|
|
||||||
lifetime_clicks = sum(d.total_clicks for d in domains)
|
|
||||||
lifetime_conversions = sum(d.total_conversions for d in domains)
|
|
||||||
|
|
||||||
# Pending payout
|
|
||||||
pending_payout = Decimal("0")
|
|
||||||
if domains:
|
|
||||||
domain_ids = [d.id for d in domains]
|
|
||||||
pending_result = await db.execute(
|
|
||||||
select(func.coalesce(func.sum(YieldTransaction.net_amount), 0)).where(
|
|
||||||
YieldTransaction.yield_domain_id.in_(domain_ids),
|
|
||||||
YieldTransaction.status == "confirmed",
|
|
||||||
YieldTransaction.paid_at.is_(None),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
pending_payout = pending_result.scalar() or Decimal("0")
|
|
||||||
|
|
||||||
# Get recent transactions
|
|
||||||
recent_txs = []
|
|
||||||
if domains:
|
|
||||||
domain_ids = [d.id for d in domains]
|
|
||||||
recent_result = await db.execute(
|
|
||||||
select(YieldTransaction)
|
|
||||||
.where(YieldTransaction.yield_domain_id.in_(domain_ids))
|
|
||||||
.order_by(YieldTransaction.created_at.desc())
|
|
||||||
.limit(10)
|
|
||||||
)
|
|
||||||
recent_txs = list(recent_result.scalars().all())
|
|
||||||
|
|
||||||
# Top performing domains
|
|
||||||
top_domains = sorted(domains, key=lambda d: d.total_revenue, reverse=True)[:5]
|
|
||||||
|
|
||||||
stats = YieldDashboardStats(
|
|
||||||
total_domains=len(domains),
|
|
||||||
active_domains=total_active,
|
|
||||||
pending_domains=total_pending,
|
|
||||||
monthly_revenue=monthly_revenue,
|
|
||||||
monthly_clicks=monthly_clicks,
|
|
||||||
monthly_conversions=monthly_conversions,
|
|
||||||
lifetime_revenue=lifetime_revenue,
|
|
||||||
lifetime_clicks=lifetime_clicks,
|
|
||||||
lifetime_conversions=lifetime_conversions,
|
|
||||||
pending_payout=pending_payout,
|
|
||||||
next_payout_date=(month_start + timedelta(days=32)).replace(day=1),
|
|
||||||
currency="CHF",
|
|
||||||
)
|
|
||||||
|
|
||||||
return YieldDashboardResponse(
|
|
||||||
stats=stats,
|
|
||||||
domains=[_domain_to_response(d) for d in domains],
|
|
||||||
recent_transactions=[_tx_to_response(tx) for tx in recent_txs],
|
|
||||||
top_domains=[_domain_to_response(d) for d in top_domains],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Domain Management
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@router.get("/domains", response_model=YieldDomainListResponse)
|
|
||||||
async def list_yield_domains(
|
|
||||||
status: Optional[str] = Query(None, description="Filter by status"),
|
|
||||||
limit: int = Query(50, le=100),
|
|
||||||
offset: int = Query(0, ge=0),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
List user's yield domains.
|
|
||||||
"""
|
|
||||||
query = select(YieldDomain).where(YieldDomain.user_id == current_user.id)
|
|
||||||
|
|
||||||
if status:
|
|
||||||
query = query.where(YieldDomain.status == status)
|
|
||||||
|
|
||||||
# Get total count
|
|
||||||
count_result = await db.execute(
|
|
||||||
select(func.count(YieldDomain.id)).where(YieldDomain.user_id == current_user.id)
|
|
||||||
)
|
|
||||||
total = count_result.scalar() or 0
|
|
||||||
|
|
||||||
# Get domains
|
|
||||||
result = await db.execute(
|
|
||||||
query.order_by(YieldDomain.created_at.desc()).offset(offset).limit(limit)
|
|
||||||
)
|
|
||||||
domains = list(result.scalars().all())
|
|
||||||
|
|
||||||
# Aggregates from all domains
|
|
||||||
all_result = await db.execute(
|
|
||||||
select(YieldDomain).where(YieldDomain.user_id == current_user.id)
|
|
||||||
)
|
|
||||||
all_domains = list(all_result.scalars().all())
|
|
||||||
total_active = sum(1 for d in all_domains if d.status == "active")
|
|
||||||
total_revenue = sum(d.total_revenue for d in all_domains)
|
|
||||||
total_clicks = sum(d.total_clicks for d in all_domains)
|
|
||||||
|
|
||||||
return YieldDomainListResponse(
|
|
||||||
domains=[_domain_to_response(d) for d in domains],
|
|
||||||
total=total,
|
|
||||||
total_active=total_active,
|
|
||||||
total_revenue=total_revenue,
|
|
||||||
total_clicks=total_clicks,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/domains/{domain_id}", response_model=YieldDomainResponse)
|
|
||||||
async def get_yield_domain(
|
|
||||||
domain_id: int,
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Get details of a specific yield domain.
|
|
||||||
"""
|
|
||||||
result = await db.execute(
|
|
||||||
select(YieldDomain).where(
|
|
||||||
YieldDomain.id == domain_id,
|
|
||||||
YieldDomain.user_id == current_user.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
domain = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not domain:
|
|
||||||
raise HTTPException(status_code=404, detail="Yield domain not found")
|
|
||||||
|
|
||||||
return _domain_to_response(domain)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/activate", response_model=ActivateYieldResponse)
|
|
||||||
async def activate_domain_for_yield(
|
|
||||||
request: ActivateYieldRequest,
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Activate a domain for yield/intent routing.
|
|
||||||
|
|
||||||
SECURITY: Domain must be in user's portfolio AND DNS-verified.
|
|
||||||
This creates the yield domain record and returns DNS setup instructions.
|
|
||||||
"""
|
|
||||||
from app.models.portfolio import PortfolioDomain
|
|
||||||
from app.models.subscription import Subscription, SubscriptionTier
|
|
||||||
|
|
||||||
domain = request.domain.lower().strip()
|
|
||||||
|
|
||||||
# SECURITY CHECK 1: Domain must be in user's portfolio
|
|
||||||
portfolio_result = await db.execute(
|
|
||||||
select(PortfolioDomain).where(
|
|
||||||
PortfolioDomain.domain == domain,
|
|
||||||
PortfolioDomain.user_id == current_user.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
portfolio_domain = portfolio_result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not portfolio_domain:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=403,
|
|
||||||
detail="Domain must be in your portfolio before activating Yield. Add it to your portfolio first.",
|
|
||||||
)
|
|
||||||
|
|
||||||
# SECURITY CHECK 2: Domain must be DNS-verified
|
|
||||||
if not portfolio_domain.is_dns_verified:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=403,
|
|
||||||
detail="Domain must be DNS-verified before activating Yield. Verify ownership in your portfolio first.",
|
|
||||||
)
|
|
||||||
|
|
||||||
# SECURITY CHECK 3: Domain must not be sold
|
|
||||||
if portfolio_domain.is_sold:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400,
|
|
||||||
detail="Cannot activate Yield for a sold domain.",
|
|
||||||
)
|
|
||||||
|
|
||||||
# SECURITY CHECK 4: Tier gating + limits
|
|
||||||
sub_result = await db.execute(select(Subscription).where(Subscription.user_id == current_user.id))
|
|
||||||
subscription = sub_result.scalar_one_or_none()
|
|
||||||
tier = subscription.tier if subscription else SubscriptionTier.SCOUT
|
|
||||||
tier_value = tier.value if hasattr(tier, "value") else str(tier)
|
|
||||||
|
|
||||||
# Check if tier has yield feature
|
|
||||||
from app.models.subscription import TIER_CONFIG
|
|
||||||
tier_config = TIER_CONFIG.get(tier, {})
|
|
||||||
has_yield = tier_config.get("features", {}).get("yield", False)
|
|
||||||
|
|
||||||
if not has_yield:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=403,
|
|
||||||
detail="Yield is available on Tycoon plan only. Upgrade to unlock.",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Yield limits: Trader = 10, Tycoon = unlimited
|
|
||||||
max_yield_domains = 10 if tier_value == "trader" else 10_000_000
|
|
||||||
user_domain_count = (
|
|
||||||
await db.execute(
|
|
||||||
select(func.count(YieldDomain.id)).where(YieldDomain.user_id == current_user.id)
|
|
||||||
)
|
|
||||||
).scalar() or 0
|
|
||||||
if user_domain_count >= max_yield_domains:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=403,
|
|
||||||
detail=f"Yield domain limit reached for your plan ({max_yield_domains}).",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check if domain already exists in yield system
|
|
||||||
existing_result = await db.execute(
|
|
||||||
select(YieldDomain).where(YieldDomain.domain == domain)
|
|
||||||
)
|
|
||||||
existing = existing_result.scalar_one_or_none()
|
|
||||||
if existing:
|
|
||||||
if existing.user_id == current_user.id:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400,
|
|
||||||
detail="Domain already activated for yield"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400,
|
|
||||||
detail="Domain is already registered by another user"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Analyze domain intent
|
|
||||||
intent_result = detect_domain_intent(domain)
|
|
||||||
value_estimate = get_intent_detector().estimate_value(domain)
|
|
||||||
|
|
||||||
# Generate landing page config (Tycoon-only yield requirement)
|
|
||||||
# No fallback: if the LLM gateway is unavailable, activation must fail.
|
|
||||||
from app.services.llm_vision import generate_yield_landing
|
|
||||||
landing_cfg, landing_model = await generate_yield_landing(domain)
|
|
||||||
|
|
||||||
# Create yield domain record
|
|
||||||
yield_domain = YieldDomain(
|
|
||||||
user_id=current_user.id,
|
|
||||||
domain=domain,
|
|
||||||
detected_intent=f"{intent_result.category}_{intent_result.subcategory}" if intent_result.subcategory else intent_result.category,
|
|
||||||
intent_confidence=intent_result.confidence,
|
|
||||||
intent_keywords=json.dumps(intent_result.keywords_matched),
|
|
||||||
status="pending",
|
|
||||||
landing_config_json=landing_cfg.model_dump_json(),
|
|
||||||
landing_template=landing_cfg.template,
|
|
||||||
landing_headline=landing_cfg.headline,
|
|
||||||
landing_intro=landing_cfg.seo_intro,
|
|
||||||
landing_cta_label=landing_cfg.cta_label,
|
|
||||||
landing_model=landing_model,
|
|
||||||
landing_generated_at=datetime.utcnow(),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Find best matching partner
|
|
||||||
if intent_result.suggested_partners:
|
|
||||||
partner_result = await db.execute(
|
|
||||||
select(AffiliatePartner).where(
|
|
||||||
AffiliatePartner.slug == intent_result.suggested_partners[0],
|
|
||||||
AffiliatePartner.is_active == True,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
partner = partner_result.scalar_one_or_none()
|
|
||||||
if partner:
|
|
||||||
yield_domain.partner_id = partner.id
|
|
||||||
yield_domain.active_route = partner.slug
|
|
||||||
|
|
||||||
db.add(yield_domain)
|
|
||||||
await db.commit()
|
|
||||||
await db.refresh(yield_domain)
|
|
||||||
|
|
||||||
# Create DNS instructions
|
|
||||||
yield_nameservers = settings.yield_nameserver_list
|
|
||||||
if not yield_nameservers:
|
|
||||||
raise HTTPException(status_code=500, detail="Yield nameservers are not configured on server.")
|
|
||||||
dns_instructions = DNSSetupInstructions(
|
|
||||||
domain=domain,
|
|
||||||
nameservers=yield_nameservers,
|
|
||||||
cname_host="@",
|
|
||||||
cname_target=settings.yield_cname_target,
|
|
||||||
verification_url=f"{settings.site_url}/api/v1/yield/domains/{yield_domain.id}/verify",
|
|
||||||
)
|
|
||||||
|
|
||||||
return ActivateYieldResponse(
|
|
||||||
domain_id=yield_domain.id,
|
|
||||||
domain=domain,
|
|
||||||
status=yield_domain.status,
|
|
||||||
intent=IntentAnalysis(
|
|
||||||
category=intent_result.category,
|
|
||||||
subcategory=intent_result.subcategory,
|
|
||||||
confidence=intent_result.confidence,
|
|
||||||
keywords_matched=intent_result.keywords_matched,
|
|
||||||
suggested_partners=intent_result.suggested_partners,
|
|
||||||
monetization_potential=intent_result.monetization_potential,
|
|
||||||
),
|
|
||||||
value_estimate=YieldValueEstimate(
|
|
||||||
estimated_monthly_min=value_estimate["estimated_monthly_min"],
|
|
||||||
estimated_monthly_max=value_estimate["estimated_monthly_max"],
|
|
||||||
currency=value_estimate["currency"],
|
|
||||||
potential=value_estimate["potential"],
|
|
||||||
confidence=value_estimate["confidence"],
|
|
||||||
geo=value_estimate["geo"],
|
|
||||||
),
|
|
||||||
dns_instructions=dns_instructions,
|
|
||||||
landing=YieldLandingPreview(
|
|
||||||
template=yield_domain.landing_template or "generic",
|
|
||||||
headline=yield_domain.landing_headline or "",
|
|
||||||
seo_intro=yield_domain.landing_intro or "",
|
|
||||||
cta_label=yield_domain.landing_cta_label or "View offers",
|
|
||||||
model=getattr(yield_domain, "landing_model", None),
|
|
||||||
generated_at=getattr(yield_domain, "landing_generated_at", None),
|
|
||||||
),
|
|
||||||
message="Domain registered! Point your DNS to our nameservers to complete activation.",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/domains/{domain_id}/verify", response_model=DNSVerificationResult)
|
|
||||||
async def verify_domain_dns(
|
|
||||||
domain_id: int,
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Verify DNS configuration for a yield domain.
|
|
||||||
"""
|
|
||||||
result = await db.execute(
|
|
||||||
select(YieldDomain).where(
|
|
||||||
YieldDomain.id == domain_id,
|
|
||||||
YieldDomain.user_id == current_user.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
domain = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not domain:
|
|
||||||
raise HTTPException(status_code=404, detail="Yield domain not found")
|
|
||||||
|
|
||||||
# Production-grade DNS check
|
|
||||||
check = verify_yield_dns(
|
|
||||||
domain=domain.domain,
|
|
||||||
expected_nameservers=settings.yield_nameserver_list,
|
|
||||||
cname_target=settings.yield_cname_target,
|
|
||||||
)
|
|
||||||
verified = check.verified
|
|
||||||
actual_ns = check.actual_ns
|
|
||||||
error = check.error
|
|
||||||
|
|
||||||
# Update domain status
|
|
||||||
if verified and not domain.dns_verified:
|
|
||||||
domain.dns_verified = True
|
|
||||||
domain.dns_verified_at = datetime.utcnow()
|
|
||||||
domain.connected_at = domain.dns_verified_at
|
|
||||||
domain.status = "active"
|
|
||||||
domain.activated_at = datetime.utcnow()
|
|
||||||
|
|
||||||
await track_event(
|
|
||||||
db,
|
|
||||||
event_name="yield_connected",
|
|
||||||
request=None,
|
|
||||||
user_id=current_user.id,
|
|
||||||
is_authenticated=True,
|
|
||||||
source="terminal",
|
|
||||||
domain=domain.domain,
|
|
||||||
yield_domain_id=domain.id,
|
|
||||||
metadata={"method": check.method, "cname_ok": check.cname_ok, "actual_ns": check.actual_ns},
|
|
||||||
)
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
return DNSVerificationResult(
|
|
||||||
domain=domain.domain,
|
|
||||||
verified=verified,
|
|
||||||
expected_ns=settings.yield_nameserver_list,
|
|
||||||
actual_ns=actual_ns,
|
|
||||||
cname_ok=check.cname_ok if verified else False,
|
|
||||||
error=error,
|
|
||||||
checked_at=datetime.utcnow(),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.patch("/domains/{domain_id}", response_model=YieldDomainResponse)
|
|
||||||
async def update_yield_domain(
|
|
||||||
domain_id: int,
|
|
||||||
update: YieldDomainUpdate,
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Update yield domain settings.
|
|
||||||
"""
|
|
||||||
result = await db.execute(
|
|
||||||
select(YieldDomain).where(
|
|
||||||
YieldDomain.id == domain_id,
|
|
||||||
YieldDomain.user_id == current_user.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
domain = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not domain:
|
|
||||||
raise HTTPException(status_code=404, detail="Yield domain not found")
|
|
||||||
|
|
||||||
# Apply updates
|
|
||||||
if update.active_route is not None:
|
|
||||||
# Validate partner exists
|
|
||||||
partner_result = await db.execute(
|
|
||||||
select(AffiliatePartner).where(
|
|
||||||
AffiliatePartner.slug == update.active_route,
|
|
||||||
AffiliatePartner.is_active == True,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
partner = partner_result.scalar_one_or_none()
|
|
||||||
if not partner:
|
|
||||||
raise HTTPException(status_code=400, detail="Invalid partner route")
|
|
||||||
domain.active_route = update.active_route
|
|
||||||
domain.partner_id = partner.id
|
|
||||||
|
|
||||||
if update.landing_page_url is not None:
|
|
||||||
domain.landing_page_url = update.landing_page_url
|
|
||||||
|
|
||||||
if update.status is not None:
|
|
||||||
if update.status == "paused":
|
|
||||||
domain.status = "paused"
|
|
||||||
domain.paused_at = datetime.utcnow()
|
|
||||||
elif update.status == "active" and domain.dns_verified:
|
|
||||||
domain.status = "active"
|
|
||||||
domain.paused_at = None
|
|
||||||
|
|
||||||
await db.commit()
|
|
||||||
await db.refresh(domain)
|
|
||||||
|
|
||||||
return _domain_to_response(domain)
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/domains/{domain_id}")
|
|
||||||
async def delete_yield_domain(
|
|
||||||
domain_id: int,
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Remove a domain from yield program.
|
|
||||||
"""
|
|
||||||
result = await db.execute(
|
|
||||||
select(YieldDomain).where(
|
|
||||||
YieldDomain.id == domain_id,
|
|
||||||
YieldDomain.user_id == current_user.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
domain = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not domain:
|
|
||||||
raise HTTPException(status_code=404, detail="Yield domain not found")
|
|
||||||
|
|
||||||
await db.delete(domain)
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
return {"message": "Yield domain removed"}
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Transactions
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@router.get("/transactions", response_model=YieldTransactionListResponse)
|
|
||||||
async def list_transactions(
|
|
||||||
domain_id: Optional[int] = Query(None),
|
|
||||||
status: Optional[str] = Query(None),
|
|
||||||
limit: int = Query(50, le=100),
|
|
||||||
offset: int = Query(0, ge=0),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
List yield transactions for user's domains.
|
|
||||||
"""
|
|
||||||
# Get user's domain IDs
|
|
||||||
domain_ids_result = await db.execute(
|
|
||||||
select(YieldDomain.id).where(YieldDomain.user_id == current_user.id)
|
|
||||||
)
|
|
||||||
domain_ids = [row[0] for row in domain_ids_result.all()]
|
|
||||||
|
|
||||||
if not domain_ids:
|
|
||||||
return YieldTransactionListResponse(
|
|
||||||
transactions=[],
|
|
||||||
total=0,
|
|
||||||
total_gross=Decimal("0"),
|
|
||||||
total_net=Decimal("0"),
|
|
||||||
)
|
|
||||||
|
|
||||||
query = select(YieldTransaction).where(
|
|
||||||
YieldTransaction.yield_domain_id.in_(domain_ids)
|
|
||||||
)
|
|
||||||
|
|
||||||
if domain_id:
|
|
||||||
query = query.where(YieldTransaction.yield_domain_id == domain_id)
|
|
||||||
|
|
||||||
if status:
|
|
||||||
query = query.where(YieldTransaction.status == status)
|
|
||||||
|
|
||||||
# Get count
|
|
||||||
count_query = select(func.count(YieldTransaction.id)).where(
|
|
||||||
YieldTransaction.yield_domain_id.in_(domain_ids)
|
|
||||||
)
|
|
||||||
if domain_id:
|
|
||||||
count_query = count_query.where(YieldTransaction.yield_domain_id == domain_id)
|
|
||||||
if status:
|
|
||||||
count_query = count_query.where(YieldTransaction.status == status)
|
|
||||||
|
|
||||||
count_result = await db.execute(count_query)
|
|
||||||
total = count_result.scalar() or 0
|
|
||||||
|
|
||||||
# Get transactions
|
|
||||||
result = await db.execute(
|
|
||||||
query.order_by(YieldTransaction.created_at.desc()).offset(offset).limit(limit)
|
|
||||||
)
|
|
||||||
transactions = list(result.scalars().all())
|
|
||||||
|
|
||||||
# Aggregates
|
|
||||||
total_gross = sum(tx.gross_amount for tx in transactions)
|
|
||||||
total_net = sum(tx.net_amount for tx in transactions)
|
|
||||||
|
|
||||||
return YieldTransactionListResponse(
|
|
||||||
transactions=[_tx_to_response(tx) for tx in transactions],
|
|
||||||
total=total,
|
|
||||||
total_gross=total_gross,
|
|
||||||
total_net=total_net,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Payouts
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@router.get("/payouts", response_model=YieldPayoutListResponse)
|
|
||||||
async def list_payouts(
|
|
||||||
status: Optional[str] = Query(None),
|
|
||||||
limit: int = Query(20, le=50),
|
|
||||||
offset: int = Query(0, ge=0),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
List user's yield payouts.
|
|
||||||
"""
|
|
||||||
query = select(YieldPayout).where(YieldPayout.user_id == current_user.id)
|
|
||||||
|
|
||||||
if status:
|
|
||||||
query = query.where(YieldPayout.status == status)
|
|
||||||
|
|
||||||
# Get count
|
|
||||||
count_result = await db.execute(
|
|
||||||
select(func.count(YieldPayout.id)).where(YieldPayout.user_id == current_user.id)
|
|
||||||
)
|
|
||||||
total = count_result.scalar() or 0
|
|
||||||
|
|
||||||
# Get payouts
|
|
||||||
result = await db.execute(
|
|
||||||
query.order_by(YieldPayout.created_at.desc()).offset(offset).limit(limit)
|
|
||||||
)
|
|
||||||
payouts = list(result.scalars().all())
|
|
||||||
|
|
||||||
# Aggregates
|
|
||||||
total_paid = sum(p.amount for p in payouts if p.status == "completed")
|
|
||||||
total_pending = sum(p.amount for p in payouts if p.status in ["pending", "processing"])
|
|
||||||
|
|
||||||
return YieldPayoutListResponse(
|
|
||||||
payouts=[_payout_to_response(p) for p in payouts],
|
|
||||||
total=total,
|
|
||||||
total_paid=total_paid,
|
|
||||||
total_pending=total_pending,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Partners (Public info)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@router.get("/partners", response_model=list[AffiliatePartnerResponse])
|
|
||||||
async def list_partners(
|
|
||||||
category: Optional[str] = Query(None, description="Filter by intent category"),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
List available affiliate partners.
|
|
||||||
"""
|
|
||||||
result = await db.execute(
|
|
||||||
select(AffiliatePartner)
|
|
||||||
.where(AffiliatePartner.is_active == True)
|
|
||||||
.order_by(AffiliatePartner.priority.desc())
|
|
||||||
)
|
|
||||||
partners = list(result.scalars().all())
|
|
||||||
|
|
||||||
# Filter by category if specified
|
|
||||||
if category:
|
|
||||||
partners = [p for p in partners if category in p.intent_list]
|
|
||||||
|
|
||||||
return [
|
|
||||||
AffiliatePartnerResponse(
|
|
||||||
slug=p.slug,
|
|
||||||
name=p.name,
|
|
||||||
network=p.network,
|
|
||||||
intent_categories=p.intent_list,
|
|
||||||
geo_countries=p.country_list,
|
|
||||||
payout_type=p.payout_type,
|
|
||||||
description=p.description,
|
|
||||||
logo_url=p.logo_url,
|
|
||||||
)
|
|
||||||
for p in partners
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Helpers
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
def _domain_to_response(domain: YieldDomain) -> YieldDomainResponse:
|
|
||||||
"""Convert YieldDomain model to response schema."""
|
|
||||||
return YieldDomainResponse(
|
|
||||||
id=domain.id,
|
|
||||||
domain=domain.domain,
|
|
||||||
status=domain.status,
|
|
||||||
detected_intent=domain.detected_intent,
|
|
||||||
intent_confidence=domain.intent_confidence,
|
|
||||||
active_route=domain.active_route,
|
|
||||||
partner_name=domain.partner.name if domain.partner else None,
|
|
||||||
landing_template=getattr(domain, "landing_template", None),
|
|
||||||
landing_headline=getattr(domain, "landing_headline", None),
|
|
||||||
landing_intro=getattr(domain, "landing_intro", None),
|
|
||||||
landing_cta_label=getattr(domain, "landing_cta_label", None),
|
|
||||||
landing_model=getattr(domain, "landing_model", None),
|
|
||||||
landing_generated_at=getattr(domain, "landing_generated_at", None),
|
|
||||||
dns_verified=domain.dns_verified,
|
|
||||||
dns_verified_at=domain.dns_verified_at,
|
|
||||||
connected_at=getattr(domain, "connected_at", None),
|
|
||||||
total_clicks=domain.total_clicks,
|
|
||||||
total_conversions=domain.total_conversions,
|
|
||||||
total_revenue=domain.total_revenue,
|
|
||||||
currency=domain.currency,
|
|
||||||
activated_at=domain.activated_at,
|
|
||||||
created_at=domain.created_at,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _tx_to_response(tx: YieldTransaction) -> YieldTransactionResponse:
|
|
||||||
"""Convert YieldTransaction model to response schema."""
|
|
||||||
return YieldTransactionResponse(
|
|
||||||
id=tx.id,
|
|
||||||
event_type=tx.event_type,
|
|
||||||
partner_slug=tx.partner_slug,
|
|
||||||
click_id=getattr(tx, "click_id", None),
|
|
||||||
gross_amount=tx.gross_amount,
|
|
||||||
net_amount=tx.net_amount,
|
|
||||||
currency=tx.currency,
|
|
||||||
status=tx.status,
|
|
||||||
geo_country=tx.geo_country,
|
|
||||||
created_at=tx.created_at,
|
|
||||||
confirmed_at=tx.confirmed_at,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _payout_to_response(payout: YieldPayout) -> YieldPayoutResponse:
|
|
||||||
"""Convert YieldPayout model to response schema."""
|
|
||||||
return YieldPayoutResponse(
|
|
||||||
id=payout.id,
|
|
||||||
amount=payout.amount,
|
|
||||||
currency=payout.currency,
|
|
||||||
period_start=payout.period_start,
|
|
||||||
period_end=payout.period_end,
|
|
||||||
transaction_count=payout.transaction_count,
|
|
||||||
status=payout.status,
|
|
||||||
payment_method=payout.payment_method,
|
|
||||||
payment_reference=payout.payment_reference,
|
|
||||||
created_at=payout.created_at,
|
|
||||||
completed_at=payout.completed_at,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,188 +0,0 @@
|
|||||||
"""
|
|
||||||
Admin endpoints for Yield payouts (ledger).
|
|
||||||
|
|
||||||
Premium constraints:
|
|
||||||
- No placeholder payouts
|
|
||||||
- No currency mixing
|
|
||||||
- Idempotent generation per (user, currency, period)
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from decimal import Decimal
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
from sqlalchemy import and_, func, select
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from app.api.deps import get_current_user, get_db
|
|
||||||
from app.models.user import User
|
|
||||||
from app.models.yield_domain import YieldPayout, YieldTransaction
|
|
||||||
from app.services.telemetry import track_event
|
|
||||||
from app.services.yield_payouts import generate_payouts_for_period
|
|
||||||
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/yield", tags=["yield-admin"])
|
|
||||||
|
|
||||||
|
|
||||||
class PayoutGenerateRequest(BaseModel):
|
|
||||||
period_start: datetime
|
|
||||||
period_end: datetime
|
|
||||||
|
|
||||||
|
|
||||||
class GeneratedPayout(BaseModel):
|
|
||||||
id: int
|
|
||||||
user_id: int
|
|
||||||
amount: Decimal
|
|
||||||
currency: str
|
|
||||||
period_start: datetime
|
|
||||||
period_end: datetime
|
|
||||||
transaction_count: int
|
|
||||||
status: str
|
|
||||||
created_at: datetime
|
|
||||||
|
|
||||||
|
|
||||||
class PayoutGenerateResponse(BaseModel):
|
|
||||||
created: list[GeneratedPayout]
|
|
||||||
skipped_existing: int = 0
|
|
||||||
|
|
||||||
|
|
||||||
class PayoutCompleteRequest(BaseModel):
|
|
||||||
payment_method: str | None = Field(default=None, max_length=50)
|
|
||||||
payment_reference: str | None = Field(default=None, max_length=200)
|
|
||||||
|
|
||||||
|
|
||||||
class PayoutCompleteResponse(BaseModel):
|
|
||||||
payout_id: int
|
|
||||||
transactions_marked_paid: int
|
|
||||||
completed_at: datetime
|
|
||||||
|
|
||||||
|
|
||||||
def _require_admin(current_user: User) -> None:
|
|
||||||
if not current_user.is_admin:
|
|
||||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Admin access required")
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/payouts/generate", response_model=PayoutGenerateResponse)
|
|
||||||
async def generate_payouts(
|
|
||||||
payload: PayoutGenerateRequest,
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Create YieldPayout rows for confirmed, unpaid transactions in the period.
|
|
||||||
|
|
||||||
This does NOT mark payouts as completed. It only assigns transactions to a payout via payout_id.
|
|
||||||
Completion is a separate step once payment is executed.
|
|
||||||
"""
|
|
||||||
_require_admin(current_user)
|
|
||||||
|
|
||||||
if payload.period_end <= payload.period_start:
|
|
||||||
raise HTTPException(status_code=400, detail="period_end must be after period_start")
|
|
||||||
|
|
||||||
created_count, skipped_existing = await generate_payouts_for_period(
|
|
||||||
db,
|
|
||||||
period_start=payload.period_start,
|
|
||||||
period_end=payload.period_end,
|
|
||||||
)
|
|
||||||
|
|
||||||
payouts = (
|
|
||||||
await db.execute(
|
|
||||||
select(YieldPayout)
|
|
||||||
.where(
|
|
||||||
and_(
|
|
||||||
YieldPayout.period_start == payload.period_start,
|
|
||||||
YieldPayout.period_end == payload.period_end,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.order_by(YieldPayout.created_at.desc())
|
|
||||||
)
|
|
||||||
).scalars().all()
|
|
||||||
|
|
||||||
created = [
|
|
||||||
GeneratedPayout(
|
|
||||||
id=p.id,
|
|
||||||
user_id=p.user_id,
|
|
||||||
amount=p.amount,
|
|
||||||
currency=p.currency,
|
|
||||||
period_start=p.period_start,
|
|
||||||
period_end=p.period_end,
|
|
||||||
transaction_count=p.transaction_count,
|
|
||||||
status=p.status,
|
|
||||||
created_at=p.created_at,
|
|
||||||
)
|
|
||||||
for p in payouts
|
|
||||||
]
|
|
||||||
|
|
||||||
# created_count is still returned implicitly via list length; we keep it for logs later
|
|
||||||
_ = created_count
|
|
||||||
return PayoutGenerateResponse(created=created, skipped_existing=skipped_existing)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/payouts/{payout_id}/complete", response_model=PayoutCompleteResponse)
|
|
||||||
async def complete_payout(
|
|
||||||
payout_id: int,
|
|
||||||
payload: PayoutCompleteRequest,
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Mark a payout as completed and mark assigned transactions as paid.
|
|
||||||
"""
|
|
||||||
_require_admin(current_user)
|
|
||||||
|
|
||||||
payout = (
|
|
||||||
await db.execute(select(YieldPayout).where(YieldPayout.id == payout_id))
|
|
||||||
).scalar_one_or_none()
|
|
||||||
if not payout:
|
|
||||||
raise HTTPException(status_code=404, detail="Payout not found")
|
|
||||||
|
|
||||||
if payout.status == "completed":
|
|
||||||
raise HTTPException(status_code=400, detail="Payout already completed")
|
|
||||||
|
|
||||||
payout.status = "completed"
|
|
||||||
payout.completed_at = datetime.utcnow()
|
|
||||||
payout.payment_method = payload.payment_method
|
|
||||||
payout.payment_reference = payload.payment_reference
|
|
||||||
|
|
||||||
txs = (
|
|
||||||
await db.execute(
|
|
||||||
select(YieldTransaction).where(YieldTransaction.payout_id == payout.id)
|
|
||||||
)
|
|
||||||
).scalars().all()
|
|
||||||
|
|
||||||
marked = 0
|
|
||||||
for tx in txs:
|
|
||||||
if tx.status != "paid":
|
|
||||||
tx.status = "paid"
|
|
||||||
tx.paid_at = payout.completed_at
|
|
||||||
marked += 1
|
|
||||||
|
|
||||||
await track_event(
|
|
||||||
db,
|
|
||||||
event_name="payout_paid",
|
|
||||||
request=None,
|
|
||||||
user_id=payout.user_id,
|
|
||||||
is_authenticated=None,
|
|
||||||
source="admin",
|
|
||||||
domain=None,
|
|
||||||
yield_domain_id=None,
|
|
||||||
metadata={
|
|
||||||
"payout_id": payout.id,
|
|
||||||
"currency": payout.currency,
|
|
||||||
"amount": float(payout.amount),
|
|
||||||
"transaction_count": payout.transaction_count,
|
|
||||||
"payment_method": payout.payment_method,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
return PayoutCompleteResponse(
|
|
||||||
payout_id=payout.id,
|
|
||||||
transactions_marked_paid=marked,
|
|
||||||
completed_at=payout.completed_at,
|
|
||||||
)
|
|
||||||
|
|
||||||
@ -1,331 +0,0 @@
|
|||||||
"""
|
|
||||||
Yield Domain Routing API.
|
|
||||||
|
|
||||||
This handles incoming HTTP requests to yield domains:
|
|
||||||
1. Detect the domain from the Host header
|
|
||||||
2. Look up the yield configuration
|
|
||||||
3. Track the click
|
|
||||||
4. Redirect to the appropriate affiliate landing page
|
|
||||||
|
|
||||||
In production, this runs on a separate subdomain or IP (yield.pounce.ch)
|
|
||||||
that yield domains CNAME to.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from decimal import Decimal
|
|
||||||
from typing import Optional
|
|
||||||
from uuid import uuid4
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Query, Request
|
|
||||||
from fastapi.responses import HTMLResponse, RedirectResponse
|
|
||||||
from sqlalchemy import and_, func, or_, select
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from app.api.deps import get_db
|
|
||||||
from app.config import get_settings
|
|
||||||
from app.models.yield_domain import YieldDomain, YieldTransaction, AffiliatePartner
|
|
||||||
from app.services.intent_detector import detect_domain_intent
|
|
||||||
from app.services.telemetry import track_event
|
|
||||||
from app.services.yield_landing_page import render_yield_landing_html
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
settings = get_settings()
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/r", tags=["yield-routing"])
|
|
||||||
|
|
||||||
# Revenue split
|
|
||||||
USER_REVENUE_SHARE = Decimal("0.70")
|
|
||||||
|
|
||||||
|
|
||||||
def hash_ip(ip: str) -> str:
|
|
||||||
"""Hash IP for privacy-compliant storage."""
|
|
||||||
import hashlib
|
|
||||||
# Salt to prevent trivial rainbow table lookups.
|
|
||||||
return hashlib.sha256(f"{ip}|{settings.secret_key}".encode()).hexdigest()[:32]
|
|
||||||
|
|
||||||
|
|
||||||
def _get_client_ip(request: Request) -> Optional[str]:
|
|
||||||
# Prefer proxy headers when behind nginx
|
|
||||||
xff = request.headers.get("x-forwarded-for")
|
|
||||||
if xff:
|
|
||||||
# first IP in list
|
|
||||||
ip = xff.split(",")[0].strip()
|
|
||||||
if ip:
|
|
||||||
return ip
|
|
||||||
cf_ip = request.headers.get("cf-connecting-ip")
|
|
||||||
if cf_ip:
|
|
||||||
return cf_ip.strip()
|
|
||||||
return request.client.host if request.client else None
|
|
||||||
|
|
||||||
|
|
||||||
def _safe_tracking_url(template: str, *, click_id: str, domain: str, domain_id: int, partner: str) -> str:
|
|
||||||
try:
|
|
||||||
return template.format(
|
|
||||||
click_id=click_id,
|
|
||||||
domain=domain,
|
|
||||||
domain_id=domain_id,
|
|
||||||
partner=partner,
|
|
||||||
)
|
|
||||||
except KeyError as e:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=500,
|
|
||||||
detail=f"Partner tracking_url_template uses unsupported placeholder: {str(e)}",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def generate_tracking_url(
|
|
||||||
partner: AffiliatePartner,
|
|
||||||
yield_domain: YieldDomain,
|
|
||||||
click_id: str,
|
|
||||||
) -> str:
|
|
||||||
"""
|
|
||||||
Generate the tracking URL for a partner.
|
|
||||||
|
|
||||||
Most affiliate networks expect parameters like:
|
|
||||||
- clickid / subid: Our click tracking ID
|
|
||||||
- ref: Domain name or user reference
|
|
||||||
"""
|
|
||||||
if not partner.tracking_url_template:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=503,
|
|
||||||
detail=f"Partner routing not configured for {partner.slug}. Missing tracking_url_template.",
|
|
||||||
)
|
|
||||||
|
|
||||||
return _safe_tracking_url(
|
|
||||||
partner.tracking_url_template,
|
|
||||||
click_id=click_id,
|
|
||||||
domain=yield_domain.domain,
|
|
||||||
domain_id=yield_domain.id,
|
|
||||||
partner=partner.slug,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{domain}")
|
|
||||||
async def route_yield_domain(
|
|
||||||
domain: str,
|
|
||||||
request: Request,
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
direct: bool = Query(False, description="Direct redirect without landing page"),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Route traffic for a yield domain.
|
|
||||||
|
|
||||||
This is the main entry point for yield domain traffic.
|
|
||||||
|
|
||||||
Query params:
|
|
||||||
- direct: If true, redirect immediately without landing page
|
|
||||||
"""
|
|
||||||
domain = domain.lower().strip()
|
|
||||||
|
|
||||||
# Find yield domain (must be connected + active)
|
|
||||||
yield_domain = (
|
|
||||||
await db.execute(
|
|
||||||
select(YieldDomain).where(
|
|
||||||
and_(
|
|
||||||
YieldDomain.domain == domain,
|
|
||||||
YieldDomain.status == "active",
|
|
||||||
YieldDomain.dns_verified == True,
|
|
||||||
or_(YieldDomain.connected_at.is_not(None), YieldDomain.dns_verified_at.is_not(None)),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
|
|
||||||
if not yield_domain:
|
|
||||||
logger.warning(f"Route request for unknown/inactive/unconnected domain: {domain}")
|
|
||||||
raise HTTPException(status_code=404, detail="Domain not active for yield routing.")
|
|
||||||
|
|
||||||
# Resolve partner
|
|
||||||
partner: Optional[AffiliatePartner] = None
|
|
||||||
if yield_domain.partner_id:
|
|
||||||
partner = (
|
|
||||||
await db.execute(
|
|
||||||
select(AffiliatePartner).where(
|
|
||||||
and_(
|
|
||||||
AffiliatePartner.id == yield_domain.partner_id,
|
|
||||||
AffiliatePartner.is_active == True,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
|
|
||||||
if not partner and yield_domain.detected_intent:
|
|
||||||
# Match full detected intent first (e.g. medical_dental)
|
|
||||||
partner = (
|
|
||||||
await db.execute(
|
|
||||||
select(AffiliatePartner)
|
|
||||||
.where(
|
|
||||||
and_(
|
|
||||||
AffiliatePartner.is_active == True,
|
|
||||||
AffiliatePartner.intent_categories.ilike(f"%{yield_domain.detected_intent}%"),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.order_by(AffiliatePartner.priority.desc())
|
|
||||||
)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
|
|
||||||
if not partner:
|
|
||||||
raise HTTPException(status_code=503, detail="No active partner available for this domain intent.")
|
|
||||||
|
|
||||||
# Landing page mode: do NOT record a click yet.
|
|
||||||
# The CTA will call this endpoint again with direct=true, which records the click + redirects.
|
|
||||||
if not direct:
|
|
||||||
cta_url = f"/api/v1/r/{yield_domain.domain}?direct=true"
|
|
||||||
try:
|
|
||||||
html = render_yield_landing_html(yield_domain=yield_domain, cta_url=cta_url)
|
|
||||||
except Exception as e:
|
|
||||||
raise HTTPException(status_code=503, detail=f"Landing page not available: {e}")
|
|
||||||
|
|
||||||
await track_event(
|
|
||||||
db,
|
|
||||||
event_name="yield_landing_view",
|
|
||||||
request=request,
|
|
||||||
user_id=yield_domain.user_id,
|
|
||||||
is_authenticated=None,
|
|
||||||
source="routing",
|
|
||||||
domain=yield_domain.domain,
|
|
||||||
yield_domain_id=yield_domain.id,
|
|
||||||
metadata={"partner": partner.slug},
|
|
||||||
)
|
|
||||||
await db.commit()
|
|
||||||
return HTMLResponse(content=html, status_code=200)
|
|
||||||
|
|
||||||
# Rate limit: max 120 clicks/10min per IP per domain
|
|
||||||
client_ip = _get_client_ip(request)
|
|
||||||
ip_hash = hash_ip(client_ip) if client_ip else None
|
|
||||||
if ip_hash:
|
|
||||||
cutoff = datetime.utcnow() - timedelta(minutes=10)
|
|
||||||
recent = (
|
|
||||||
await db.execute(
|
|
||||||
select(func.count(YieldTransaction.id)).where(
|
|
||||||
and_(
|
|
||||||
YieldTransaction.yield_domain_id == yield_domain.id,
|
|
||||||
YieldTransaction.event_type == "click",
|
|
||||||
YieldTransaction.ip_hash == ip_hash,
|
|
||||||
YieldTransaction.created_at >= cutoff,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).scalar() or 0
|
|
||||||
if recent >= 120:
|
|
||||||
raise HTTPException(status_code=429, detail="Too many requests. Please slow down.")
|
|
||||||
|
|
||||||
# Compute click economics (only CPC can be accounted immediately)
|
|
||||||
gross = Decimal("0")
|
|
||||||
net = Decimal("0")
|
|
||||||
currency = (partner.payout_currency or "CHF").upper()
|
|
||||||
if (partner.payout_type or "").lower() == "cpc":
|
|
||||||
gross = partner.payout_amount or Decimal("0")
|
|
||||||
net = (gross * USER_REVENUE_SHARE).quantize(Decimal("0.01"))
|
|
||||||
|
|
||||||
click_id = uuid4().hex
|
|
||||||
destination_url = generate_tracking_url(partner, yield_domain, click_id)
|
|
||||||
|
|
||||||
user_agent = request.headers.get("user-agent")
|
|
||||||
referrer = request.headers.get("referer")
|
|
||||||
geo_country = request.headers.get("cf-ipcountry") or request.headers.get("x-country")
|
|
||||||
geo_country = geo_country.strip().upper() if geo_country else None
|
|
||||||
|
|
||||||
transaction = YieldTransaction(
|
|
||||||
yield_domain_id=yield_domain.id,
|
|
||||||
event_type="click",
|
|
||||||
partner_slug=partner.slug,
|
|
||||||
click_id=click_id,
|
|
||||||
destination_url=destination_url[:2000],
|
|
||||||
gross_amount=gross,
|
|
||||||
net_amount=net,
|
|
||||||
currency=currency,
|
|
||||||
referrer=referrer[:500] if referrer else None,
|
|
||||||
user_agent=user_agent[:500] if user_agent else None,
|
|
||||||
geo_country=geo_country[:2] if geo_country else None,
|
|
||||||
ip_hash=ip_hash,
|
|
||||||
status="confirmed",
|
|
||||||
confirmed_at=datetime.utcnow(),
|
|
||||||
)
|
|
||||||
db.add(transaction)
|
|
||||||
|
|
||||||
yield_domain.total_clicks += 1
|
|
||||||
yield_domain.last_click_at = datetime.utcnow()
|
|
||||||
if net > 0:
|
|
||||||
yield_domain.total_revenue += net
|
|
||||||
|
|
||||||
await track_event(
|
|
||||||
db,
|
|
||||||
event_name="yield_click",
|
|
||||||
request=request,
|
|
||||||
user_id=yield_domain.user_id,
|
|
||||||
is_authenticated=None,
|
|
||||||
source="routing",
|
|
||||||
domain=yield_domain.domain,
|
|
||||||
yield_domain_id=yield_domain.id,
|
|
||||||
click_id=click_id,
|
|
||||||
metadata={"partner": partner.slug, "currency": currency, "net_amount": float(net)},
|
|
||||||
)
|
|
||||||
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
return RedirectResponse(url=destination_url, status_code=302)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/")
|
|
||||||
async def yield_routing_info():
|
|
||||||
"""Info endpoint for yield routing service."""
|
|
||||||
return {
|
|
||||||
"service": "Pounce Yield Routing",
|
|
||||||
"version": "2.0.0",
|
|
||||||
"docs": f"{settings.site_url}/docs#/yield-routing",
|
|
||||||
"status": "active",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Host-based routing (for production deployment)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@router.api_route("/catch-all", methods=["GET", "HEAD"])
|
|
||||||
async def catch_all_route(
|
|
||||||
request: Request,
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Catch-all route for host-based routing.
|
|
||||||
|
|
||||||
In production, this endpoint handles requests where the Host header
|
|
||||||
is the yield domain itself (e.g., zahnarzt-zuerich.ch).
|
|
||||||
|
|
||||||
This requires:
|
|
||||||
1. Yield domains to CNAME to yield.pounce.ch
|
|
||||||
2. Nginx/Caddy to route all hosts to this backend
|
|
||||||
3. This endpoint to parse the Host header
|
|
||||||
"""
|
|
||||||
host = request.headers.get("host", "").lower()
|
|
||||||
|
|
||||||
# Remove port if present
|
|
||||||
if ":" in host:
|
|
||||||
host = host.split(":")[0]
|
|
||||||
|
|
||||||
# Skip our own domains
|
|
||||||
our_domains = ["pounce.ch", "localhost", "127.0.0.1"]
|
|
||||||
if any(host.endswith(d) for d in our_domains):
|
|
||||||
return {"status": "not a yield domain", "host": host}
|
|
||||||
|
|
||||||
# If host matches a connected yield domain, route it
|
|
||||||
_ = (
|
|
||||||
await db.execute(
|
|
||||||
select(YieldDomain.id).where(
|
|
||||||
and_(
|
|
||||||
YieldDomain.domain == host,
|
|
||||||
YieldDomain.status == "active",
|
|
||||||
YieldDomain.dns_verified == True,
|
|
||||||
or_(YieldDomain.connected_at.is_not(None), YieldDomain.dns_verified_at.is_not(None)),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
|
|
||||||
if not _:
|
|
||||||
raise HTTPException(status_code=404, detail="Host not configured for yield routing.")
|
|
||||||
|
|
||||||
return RedirectResponse(url=f"/api/v1/r/{host}?direct=false", status_code=302)
|
|
||||||
|
|
||||||
@ -1,563 +0,0 @@
|
|||||||
"""
|
|
||||||
Webhook endpoints for Yield affiliate partner callbacks.
|
|
||||||
|
|
||||||
Partners call these endpoints to report:
|
|
||||||
- Clicks (redirect happened)
|
|
||||||
- Leads (form submitted, signup, etc.)
|
|
||||||
- Sales (purchase completed)
|
|
||||||
|
|
||||||
Each partner may have different authentication methods:
|
|
||||||
- HMAC signature verification
|
|
||||||
- API key in header
|
|
||||||
- IP whitelist
|
|
||||||
"""
|
|
||||||
|
|
||||||
import hashlib
|
|
||||||
import hmac
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
from datetime import datetime
|
|
||||||
from decimal import Decimal
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, BackgroundTasks, Depends, Header, HTTPException, Request
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
from sqlalchemy import and_, select
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from app.api.deps import get_db
|
|
||||||
from app.config import get_settings
|
|
||||||
from app.models.yield_domain import YieldDomain, YieldTransaction, AffiliatePartner
|
|
||||||
from app.services.telemetry import track_event
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
settings = get_settings()
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/yield-webhooks", tags=["yield-webhooks"])
|
|
||||||
|
|
||||||
# Revenue split: User gets 70%, Pounce keeps 30%
|
|
||||||
USER_REVENUE_SHARE = Decimal("0.70")
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Schemas
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
class PartnerEvent(BaseModel):
|
|
||||||
"""Generic partner event payload."""
|
|
||||||
event_type: str = Field(..., description="click, lead, or sale")
|
|
||||||
domain: str = Field(..., description="The yield domain that generated this event")
|
|
||||||
transaction_id: Optional[str] = Field(None, description="Partner's transaction ID")
|
|
||||||
click_id: Optional[str] = Field(None, description="Pounce click_id for attribution (UUID hex)")
|
|
||||||
amount: Optional[float] = Field(None, description="Gross commission amount")
|
|
||||||
currency: Optional[str] = Field("CHF", description="Currency code")
|
|
||||||
|
|
||||||
# Optional attribution data
|
|
||||||
geo_country: Optional[str] = None
|
|
||||||
referrer: Optional[str] = None
|
|
||||||
user_agent: Optional[str] = None
|
|
||||||
|
|
||||||
# Optional metadata
|
|
||||||
metadata: Optional[dict] = None
|
|
||||||
|
|
||||||
|
|
||||||
class WebhookResponse(BaseModel):
|
|
||||||
"""Response for webhook calls."""
|
|
||||||
success: bool
|
|
||||||
transaction_id: Optional[int] = None
|
|
||||||
message: str
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Signature Verification Helpers
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
def verify_hmac_signature(
|
|
||||||
payload: bytes,
|
|
||||||
signature: str,
|
|
||||||
secret: str,
|
|
||||||
algorithm: str = "sha256"
|
|
||||||
) -> bool:
|
|
||||||
"""Verify HMAC signature for webhook payload."""
|
|
||||||
expected = hmac.new(
|
|
||||||
secret.encode(),
|
|
||||||
payload,
|
|
||||||
hashlib.sha256 if algorithm == "sha256" else hashlib.sha1
|
|
||||||
).hexdigest()
|
|
||||||
|
|
||||||
return hmac.compare_digest(signature, expected)
|
|
||||||
|
|
||||||
|
|
||||||
def hash_ip(ip: str) -> str:
|
|
||||||
"""Hash IP address for privacy-compliant storage."""
|
|
||||||
return hashlib.sha256(f"{ip}|{settings.secret_key}".encode()).hexdigest()[:32]
|
|
||||||
|
|
||||||
|
|
||||||
def _get_webhook_secret(partner_slug: str) -> Optional[str]:
|
|
||||||
"""
|
|
||||||
Webhook secrets are configured via environment:
|
|
||||||
- YIELD_WEBHOOK_SECRET (global default)
|
|
||||||
- YIELD_WEBHOOK_SECRET_<PARTNER_SLUG_UPPER> (partner-specific override)
|
|
||||||
"""
|
|
||||||
import os
|
|
||||||
|
|
||||||
specific = os.getenv(f"YIELD_WEBHOOK_SECRET_{partner_slug.upper()}")
|
|
||||||
if specific:
|
|
||||||
return specific
|
|
||||||
return os.getenv("YIELD_WEBHOOK_SECRET") or None
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Generic Webhook Endpoint
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@router.post("/{partner_slug}", response_model=WebhookResponse)
|
|
||||||
async def receive_partner_webhook(
|
|
||||||
partner_slug: str,
|
|
||||||
event: PartnerEvent,
|
|
||||||
request: Request,
|
|
||||||
background_tasks: BackgroundTasks,
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
x_webhook_signature: Optional[str] = Header(None),
|
|
||||||
x_api_key: Optional[str] = Header(None),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Receive webhook callback from affiliate partner.
|
|
||||||
|
|
||||||
Partners POST events here when clicks, leads, or sales occur.
|
|
||||||
"""
|
|
||||||
# 1. Find partner
|
|
||||||
partner = (
|
|
||||||
await db.execute(
|
|
||||||
select(AffiliatePartner).where(
|
|
||||||
and_(
|
|
||||||
AffiliatePartner.slug == partner_slug,
|
|
||||||
AffiliatePartner.is_active == True,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
|
|
||||||
if not partner:
|
|
||||||
logger.warning(f"Webhook from unknown partner: {partner_slug}")
|
|
||||||
raise HTTPException(status_code=404, detail="Unknown partner")
|
|
||||||
|
|
||||||
# 2. Verify authentication (strict)
|
|
||||||
secret = _get_webhook_secret(partner_slug)
|
|
||||||
if not secret:
|
|
||||||
raise HTTPException(status_code=503, detail="Webhook secret not configured on server.")
|
|
||||||
if not x_webhook_signature:
|
|
||||||
raise HTTPException(status_code=401, detail="Missing webhook signature.")
|
|
||||||
raw = await request.body()
|
|
||||||
if not verify_hmac_signature(raw, x_webhook_signature, secret):
|
|
||||||
raise HTTPException(status_code=401, detail="Invalid webhook signature.")
|
|
||||||
|
|
||||||
# 3. Find yield domain (must be active)
|
|
||||||
yield_domain = (
|
|
||||||
await db.execute(
|
|
||||||
select(YieldDomain).where(
|
|
||||||
and_(
|
|
||||||
YieldDomain.domain == event.domain.lower(),
|
|
||||||
YieldDomain.status == "active",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
|
|
||||||
if not yield_domain:
|
|
||||||
logger.warning(f"Webhook for unknown/inactive domain: {event.domain}")
|
|
||||||
raise HTTPException(status_code=404, detail="Domain not found or inactive")
|
|
||||||
|
|
||||||
# 4. Calculate amounts
|
|
||||||
gross_amount = Decimal(str(event.amount)) if event.amount else Decimal("0")
|
|
||||||
net_amount = gross_amount * USER_REVENUE_SHARE
|
|
||||||
|
|
||||||
# 5. Get client IP for hashing
|
|
||||||
client_ip = request.client.host if request.client else None
|
|
||||||
ip_hash = hash_ip(client_ip) if client_ip else None
|
|
||||||
|
|
||||||
# 6. Create transaction
|
|
||||||
transaction = YieldTransaction(
|
|
||||||
yield_domain_id=yield_domain.id,
|
|
||||||
event_type=event.event_type,
|
|
||||||
partner_slug=partner_slug,
|
|
||||||
partner_transaction_id=event.transaction_id,
|
|
||||||
click_id=(event.click_id[:64] if event.click_id else None),
|
|
||||||
gross_amount=gross_amount,
|
|
||||||
net_amount=net_amount,
|
|
||||||
currency=event.currency or "CHF",
|
|
||||||
referrer=event.referrer,
|
|
||||||
user_agent=event.user_agent,
|
|
||||||
geo_country=event.geo_country,
|
|
||||||
ip_hash=ip_hash,
|
|
||||||
status="pending" if event.event_type in ["lead", "sale"] else "confirmed",
|
|
||||||
confirmed_at=datetime.utcnow() if event.event_type == "click" else None,
|
|
||||||
)
|
|
||||||
|
|
||||||
db.add(transaction)
|
|
||||||
|
|
||||||
# Optional: attribute to an existing click transaction (same yield_domain + click_id)
|
|
||||||
if event.click_id:
|
|
||||||
click_tx = (
|
|
||||||
await db.execute(
|
|
||||||
select(YieldTransaction).where(
|
|
||||||
and_(
|
|
||||||
YieldTransaction.yield_domain_id == yield_domain.id,
|
|
||||||
YieldTransaction.event_type == "click",
|
|
||||||
YieldTransaction.click_id == event.click_id[:64],
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
if not click_tx:
|
|
||||||
logger.warning(
|
|
||||||
f"Webhook received click_id but no matching click found: partner={partner_slug} "
|
|
||||||
f"domain={yield_domain.domain} click_id={event.click_id[:64]}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# 7. Update domain aggregates
|
|
||||||
if event.event_type == "click":
|
|
||||||
yield_domain.total_clicks += 1
|
|
||||||
yield_domain.last_click_at = datetime.utcnow()
|
|
||||||
elif event.event_type in ["lead", "sale"]:
|
|
||||||
yield_domain.total_conversions += 1
|
|
||||||
yield_domain.last_conversion_at = datetime.utcnow()
|
|
||||||
# Add revenue when confirmed
|
|
||||||
if transaction.status == "confirmed":
|
|
||||||
yield_domain.total_revenue += net_amount
|
|
||||||
|
|
||||||
await track_event(
|
|
||||||
db,
|
|
||||||
event_name="yield_conversion",
|
|
||||||
request=request,
|
|
||||||
user_id=yield_domain.user_id,
|
|
||||||
is_authenticated=None,
|
|
||||||
source="webhook",
|
|
||||||
domain=yield_domain.domain,
|
|
||||||
yield_domain_id=yield_domain.id,
|
|
||||||
click_id=event.click_id,
|
|
||||||
metadata={
|
|
||||||
"partner": partner_slug,
|
|
||||||
"event_type": event.event_type,
|
|
||||||
"status": transaction.status,
|
|
||||||
"currency": transaction.currency,
|
|
||||||
"net_amount": float(net_amount),
|
|
||||||
"partner_transaction_id": event.transaction_id,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
await db.commit()
|
|
||||||
await db.refresh(transaction)
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"Webhook processed: {partner_slug} -> {event.domain} "
|
|
||||||
f"({event.event_type}, gross={gross_amount}, net={net_amount})"
|
|
||||||
)
|
|
||||||
|
|
||||||
return WebhookResponse(
|
|
||||||
success=True,
|
|
||||||
transaction_id=transaction.id,
|
|
||||||
message=f"Event {event.event_type} recorded successfully"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Awin-Specific Webhook
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
class AwinEvent(BaseModel):
|
|
||||||
"""Awin network postback format."""
|
|
||||||
clickRef: str # Our yield domain ID or domain name
|
|
||||||
transactionId: str
|
|
||||||
commission: float
|
|
||||||
commissionCurrency: str = "CHF"
|
|
||||||
status: str # "pending", "approved", "declined"
|
|
||||||
transactionType: str # "sale", "lead"
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/awin/postback", response_model=WebhookResponse)
|
|
||||||
async def receive_awin_postback(
|
|
||||||
event: AwinEvent,
|
|
||||||
request: Request,
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
x_awin_signature: Optional[str] = Header(None),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Receive postback from Awin affiliate network.
|
|
||||||
|
|
||||||
Awin sends postbacks for tracked conversions.
|
|
||||||
"""
|
|
||||||
# Verify authentication (strict)
|
|
||||||
secret = _get_webhook_secret("awin")
|
|
||||||
if not secret:
|
|
||||||
raise HTTPException(status_code=503, detail="Webhook secret not configured on server.")
|
|
||||||
if not x_awin_signature:
|
|
||||||
raise HTTPException(status_code=401, detail="Missing webhook signature.")
|
|
||||||
raw = await request.body()
|
|
||||||
if not verify_hmac_signature(raw, x_awin_signature, secret):
|
|
||||||
raise HTTPException(status_code=401, detail="Invalid webhook signature.")
|
|
||||||
|
|
||||||
# Find domain by click reference
|
|
||||||
yield_domain = (
|
|
||||||
await db.execute(select(YieldDomain).where(YieldDomain.domain == event.clickRef.lower()))
|
|
||||||
).scalar_one_or_none()
|
|
||||||
|
|
||||||
if not yield_domain:
|
|
||||||
# Try to find by ID if clickRef is numeric
|
|
||||||
try:
|
|
||||||
domain_id = int(event.clickRef)
|
|
||||||
yield_domain = (
|
|
||||||
await db.execute(select(YieldDomain).where(YieldDomain.id == domain_id))
|
|
||||||
).scalar_one_or_none()
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if not yield_domain:
|
|
||||||
logger.warning(f"Awin postback for unknown domain: {event.clickRef}")
|
|
||||||
raise HTTPException(status_code=404, detail="Domain not found")
|
|
||||||
|
|
||||||
# Calculate amounts
|
|
||||||
gross_amount = Decimal(str(event.commission))
|
|
||||||
net_amount = gross_amount * USER_REVENUE_SHARE
|
|
||||||
|
|
||||||
# Map Awin status to our status
|
|
||||||
status_map = {
|
|
||||||
"pending": "pending",
|
|
||||||
"approved": "confirmed",
|
|
||||||
"declined": "rejected",
|
|
||||||
}
|
|
||||||
status = status_map.get(event.status.lower(), "pending")
|
|
||||||
|
|
||||||
# Create or update transaction
|
|
||||||
existing_tx = (
|
|
||||||
await db.execute(
|
|
||||||
select(YieldTransaction).where(
|
|
||||||
and_(
|
|
||||||
YieldTransaction.partner_transaction_id == event.transactionId,
|
|
||||||
YieldTransaction.partner_slug.ilike("awin%"),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
|
|
||||||
if existing_tx:
|
|
||||||
# Update existing transaction
|
|
||||||
existing_tx.status = status
|
|
||||||
if status == "confirmed":
|
|
||||||
existing_tx.confirmed_at = datetime.utcnow()
|
|
||||||
yield_domain.total_revenue += net_amount
|
|
||||||
transaction_id = existing_tx.id
|
|
||||||
else:
|
|
||||||
# Create new transaction
|
|
||||||
transaction = YieldTransaction(
|
|
||||||
yield_domain_id=yield_domain.id,
|
|
||||||
event_type="lead" if event.transactionType.lower() == "lead" else "sale",
|
|
||||||
partner_slug=f"awin_{yield_domain.active_route or 'unknown'}",
|
|
||||||
partner_transaction_id=event.transactionId,
|
|
||||||
gross_amount=gross_amount,
|
|
||||||
net_amount=net_amount,
|
|
||||||
currency=event.commissionCurrency,
|
|
||||||
status=status,
|
|
||||||
confirmed_at=datetime.utcnow() if status == "confirmed" else None,
|
|
||||||
)
|
|
||||||
db.add(transaction)
|
|
||||||
|
|
||||||
# Update domain stats
|
|
||||||
yield_domain.total_conversions += 1
|
|
||||||
yield_domain.last_conversion_at = datetime.utcnow()
|
|
||||||
if status == "confirmed":
|
|
||||||
yield_domain.total_revenue += net_amount
|
|
||||||
|
|
||||||
await db.flush()
|
|
||||||
transaction_id = transaction.id
|
|
||||||
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
logger.info(f"Awin postback processed: {event.transactionId} -> {status}")
|
|
||||||
|
|
||||||
return WebhookResponse(
|
|
||||||
success=True,
|
|
||||||
transaction_id=transaction_id,
|
|
||||||
message=f"Awin event processed ({status})"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Transaction Confirmation Endpoint (Admin/Internal)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@router.post("/confirm/{transaction_id}", response_model=WebhookResponse)
|
|
||||||
async def confirm_transaction(
|
|
||||||
transaction_id: int,
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
x_internal_key: Optional[str] = Header(None),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Manually confirm a pending transaction.
|
|
||||||
|
|
||||||
Internal endpoint for admin use or automated confirmation.
|
|
||||||
"""
|
|
||||||
internal_key = (settings.internal_api_key or "").strip()
|
|
||||||
if not internal_key:
|
|
||||||
raise HTTPException(status_code=503, detail="internal_api_key is not configured on server.")
|
|
||||||
if x_internal_key != internal_key:
|
|
||||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
|
||||||
|
|
||||||
transaction = (
|
|
||||||
await db.execute(
|
|
||||||
select(YieldTransaction).where(
|
|
||||||
and_(
|
|
||||||
YieldTransaction.id == transaction_id,
|
|
||||||
YieldTransaction.status == "pending",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
|
|
||||||
if not transaction:
|
|
||||||
raise HTTPException(status_code=404, detail="Transaction not found or not pending")
|
|
||||||
|
|
||||||
# Confirm transaction
|
|
||||||
transaction.status = "confirmed"
|
|
||||||
transaction.confirmed_at = datetime.utcnow()
|
|
||||||
|
|
||||||
# Update domain revenue
|
|
||||||
yield_domain = (
|
|
||||||
await db.execute(select(YieldDomain).where(YieldDomain.id == transaction.yield_domain_id))
|
|
||||||
).scalar_one_or_none()
|
|
||||||
|
|
||||||
if yield_domain:
|
|
||||||
yield_domain.total_revenue += transaction.net_amount
|
|
||||||
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
logger.info(f"Transaction {transaction_id} confirmed manually")
|
|
||||||
|
|
||||||
return WebhookResponse(
|
|
||||||
success=True,
|
|
||||||
transaction_id=transaction_id,
|
|
||||||
message="Transaction confirmed"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Batch Transaction Import (for reconciliation)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
class BatchTransactionItem(BaseModel):
|
|
||||||
"""Single transaction in batch import."""
|
|
||||||
domain: str
|
|
||||||
event_type: str
|
|
||||||
partner_slug: str
|
|
||||||
transaction_id: str
|
|
||||||
click_id: Optional[str] = None
|
|
||||||
gross_amount: float
|
|
||||||
currency: str = "CHF"
|
|
||||||
status: str = "confirmed"
|
|
||||||
created_at: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class BatchImportRequest(BaseModel):
|
|
||||||
"""Batch transaction import request."""
|
|
||||||
transactions: list[BatchTransactionItem]
|
|
||||||
|
|
||||||
|
|
||||||
class BatchImportResponse(BaseModel):
|
|
||||||
"""Batch import response."""
|
|
||||||
success: bool
|
|
||||||
imported: int
|
|
||||||
skipped: int
|
|
||||||
errors: list[str]
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/batch-import", response_model=BatchImportResponse)
|
|
||||||
async def batch_import_transactions(
|
|
||||||
request_data: BatchImportRequest,
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
x_internal_key: Optional[str] = Header(None),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Batch import transactions for reconciliation.
|
|
||||||
|
|
||||||
Internal endpoint for importing partner reports.
|
|
||||||
"""
|
|
||||||
internal_key = (settings.internal_api_key or "").strip()
|
|
||||||
if not internal_key:
|
|
||||||
raise HTTPException(status_code=503, detail="internal_api_key is not configured on server.")
|
|
||||||
if x_internal_key != internal_key:
|
|
||||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
|
||||||
|
|
||||||
imported = 0
|
|
||||||
skipped = 0
|
|
||||||
errors = []
|
|
||||||
|
|
||||||
for item in request_data.transactions:
|
|
||||||
try:
|
|
||||||
# Find domain
|
|
||||||
yield_domain = (
|
|
||||||
await db.execute(select(YieldDomain).where(YieldDomain.domain == item.domain.lower()))
|
|
||||||
).scalar_one_or_none()
|
|
||||||
|
|
||||||
if not yield_domain:
|
|
||||||
errors.append(f"Domain not found: {item.domain}")
|
|
||||||
skipped += 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Check for duplicate
|
|
||||||
existing = (
|
|
||||||
await db.execute(
|
|
||||||
select(YieldTransaction).where(
|
|
||||||
and_(
|
|
||||||
YieldTransaction.partner_transaction_id == item.transaction_id,
|
|
||||||
YieldTransaction.partner_slug == item.partner_slug,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
|
|
||||||
if existing:
|
|
||||||
skipped += 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Create transaction
|
|
||||||
gross = Decimal(str(item.gross_amount))
|
|
||||||
net = gross * USER_REVENUE_SHARE
|
|
||||||
|
|
||||||
tx = YieldTransaction(
|
|
||||||
yield_domain_id=yield_domain.id,
|
|
||||||
event_type=item.event_type,
|
|
||||||
partner_slug=item.partner_slug,
|
|
||||||
partner_transaction_id=item.transaction_id,
|
|
||||||
click_id=(item.click_id[:64] if item.click_id else None),
|
|
||||||
gross_amount=gross,
|
|
||||||
net_amount=net,
|
|
||||||
currency=item.currency,
|
|
||||||
status=item.status,
|
|
||||||
confirmed_at=datetime.utcnow() if item.status == "confirmed" else None,
|
|
||||||
)
|
|
||||||
db.add(tx)
|
|
||||||
|
|
||||||
# Update domain stats
|
|
||||||
if item.event_type == "click":
|
|
||||||
yield_domain.total_clicks += 1
|
|
||||||
else:
|
|
||||||
yield_domain.total_conversions += 1
|
|
||||||
|
|
||||||
if item.status == "confirmed":
|
|
||||||
yield_domain.total_revenue += net
|
|
||||||
|
|
||||||
imported += 1
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
errors.append(f"Error importing {item.domain}/{item.transaction_id}: {str(e)}")
|
|
||||||
skipped += 1
|
|
||||||
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
return BatchImportResponse(
|
|
||||||
success=len(errors) == 0,
|
|
||||||
imported=imported,
|
|
||||||
skipped=skipped,
|
|
||||||
errors=errors[:10] # Limit error messages
|
|
||||||
)
|
|
||||||
|
|
||||||
@ -17,11 +17,6 @@ class Settings(BaseSettings):
|
|||||||
# App Settings
|
# App Settings
|
||||||
app_name: str = "DomainWatch"
|
app_name: str = "DomainWatch"
|
||||||
debug: bool = True
|
debug: bool = True
|
||||||
site_url: str = "https://pounce.ch" # Base URL for links in emails/API responses
|
|
||||||
|
|
||||||
# Internal admin operations (server-to-server / cron)
|
|
||||||
# MUST be set in production; used for protected internal endpoints.
|
|
||||||
internal_api_key: str = ""
|
|
||||||
|
|
||||||
# Email Settings (optional)
|
# Email Settings (optional)
|
||||||
smtp_host: str = ""
|
smtp_host: str = ""
|
||||||
@ -37,99 +32,6 @@ class Settings(BaseSettings):
|
|||||||
check_hour: int = 6
|
check_hour: int = 6
|
||||||
check_minute: int = 0
|
check_minute: int = 0
|
||||||
scheduler_check_interval_hours: int = 24
|
scheduler_check_interval_hours: int = 24
|
||||||
enable_scheduler: bool = False # Run APScheduler jobs in this process (recommend: separate scheduler process)
|
|
||||||
|
|
||||||
# Job Queue / Redis (Phase 2)
|
|
||||||
redis_url: str = "" # e.g. redis://redis:6379/0
|
|
||||||
enable_job_queue: bool = False
|
|
||||||
|
|
||||||
# Observability (Phase 2)
|
|
||||||
enable_metrics: bool = True
|
|
||||||
metrics_path: str = "/metrics"
|
|
||||||
enable_db_query_metrics: bool = False
|
|
||||||
enable_business_metrics: bool = True
|
|
||||||
business_metrics_days: int = 30
|
|
||||||
business_metrics_cache_seconds: int = 60
|
|
||||||
|
|
||||||
# Ops / Backups (4B)
|
|
||||||
enable_db_backups: bool = False
|
|
||||||
backup_dir: str = "backups"
|
|
||||||
backup_retention_days: int = 14
|
|
||||||
|
|
||||||
# Ops / Alerting (4B) - no Docker required
|
|
||||||
ops_alerts_enabled: bool = False
|
|
||||||
ops_alert_recipients: str = "" # comma-separated emails; if empty -> CONTACT_EMAIL env fallback
|
|
||||||
ops_alert_cooldown_minutes: int = 180
|
|
||||||
ops_alert_backup_stale_seconds: int = 93600 # ~26h
|
|
||||||
|
|
||||||
# Rate limiting storage (SlowAPI / limits). Use Redis in production.
|
|
||||||
rate_limit_storage_uri: str = "memory://"
|
|
||||||
|
|
||||||
# =================================
|
|
||||||
# Referral rewards / Anti-fraud (3C.2)
|
|
||||||
# =================================
|
|
||||||
referral_rewards_enabled: bool = True
|
|
||||||
referral_rewards_cooldown_days: int = 7
|
|
||||||
referral_rewards_ip_window_days: int = 30
|
|
||||||
referral_rewards_require_ip_hash: bool = True
|
|
||||||
|
|
||||||
# =================================
|
|
||||||
# Yield / Intent Routing
|
|
||||||
# =================================
|
|
||||||
# Comma-separated list of nameservers the user must delegate to for Yield.
|
|
||||||
# Example: "ns1.pounce.ch,ns2.pounce.ch"
|
|
||||||
yield_nameservers: str = "ns1.pounce.ch,ns2.pounce.ch"
|
|
||||||
# CNAME/ALIAS target for simpler DNS setup (provider-dependent).
|
|
||||||
# Example: "yield.pounce.ch"
|
|
||||||
yield_cname_target: str = "yield.pounce.ch"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def yield_nameserver_list(self) -> list[str]:
|
|
||||||
return [
|
|
||||||
ns.strip().lower()
|
|
||||||
for ns in (self.yield_nameservers or "").split(",")
|
|
||||||
if ns.strip()
|
|
||||||
]
|
|
||||||
|
|
||||||
# Database pooling (PostgreSQL)
|
|
||||||
db_pool_size: int = 5
|
|
||||||
db_max_overflow: int = 10
|
|
||||||
db_pool_timeout: int = 30
|
|
||||||
|
|
||||||
# =================================
|
|
||||||
# External API Credentials
|
|
||||||
# =================================
|
|
||||||
|
|
||||||
# DropCatch API (Official Partner API)
|
|
||||||
# Docs: https://www.dropcatch.com/hiw/dropcatch-api
|
|
||||||
dropcatch_client_id: str = ""
|
|
||||||
dropcatch_client_secret: str = ""
|
|
||||||
dropcatch_api_base: str = "https://api.dropcatch.com"
|
|
||||||
|
|
||||||
# Sedo API (Partner API - XML-RPC)
|
|
||||||
# Docs: https://api.sedo.com/apidocs/v1/
|
|
||||||
# Find your credentials: Sedo.com → Mein Sedo → API-Zugang
|
|
||||||
sedo_partner_id: str = ""
|
|
||||||
sedo_sign_key: str = ""
|
|
||||||
sedo_api_base: str = "https://api.sedo.com/api/v1/"
|
|
||||||
|
|
||||||
# Moz API (SEO Data)
|
|
||||||
moz_access_id: str = ""
|
|
||||||
moz_secret_key: str = ""
|
|
||||||
|
|
||||||
# =================================
|
|
||||||
# LLM Gateway (Ollama / Mistral Nemo)
|
|
||||||
# =================================
|
|
||||||
llm_gateway_url: str = "http://127.0.0.1:8812" # reverse-tunnel default on Pounce server
|
|
||||||
llm_gateway_api_key: str = ""
|
|
||||||
llm_default_model: str = "mistral-nemo:latest"
|
|
||||||
|
|
||||||
# ICANN CZDS (Centralized Zone Data Service)
|
|
||||||
# For downloading gTLD zone files (.com, .net, .org, etc.)
|
|
||||||
# Register at: https://czds.icann.org/
|
|
||||||
czds_username: str = ""
|
|
||||||
czds_password: str = ""
|
|
||||||
czds_data_dir: str = "/tmp/pounce_czds"
|
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
env_file = ".env"
|
env_file = ".env"
|
||||||
|
|||||||
@ -7,22 +7,11 @@ from app.config import get_settings
|
|||||||
settings = get_settings()
|
settings = get_settings()
|
||||||
|
|
||||||
# Create async engine
|
# Create async engine
|
||||||
engine_kwargs = {
|
engine = create_async_engine(
|
||||||
"echo": settings.debug,
|
settings.database_url,
|
||||||
"future": True,
|
echo=settings.debug,
|
||||||
}
|
future=True,
|
||||||
# Production hardening: enable connection pooling for Postgres
|
)
|
||||||
if settings.database_url.startswith("postgresql"):
|
|
||||||
engine_kwargs.update(
|
|
||||||
{
|
|
||||||
"pool_size": settings.db_pool_size,
|
|
||||||
"max_overflow": settings.db_max_overflow,
|
|
||||||
"pool_timeout": settings.db_pool_timeout,
|
|
||||||
"pool_pre_ping": True,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
engine = create_async_engine(settings.database_url, **engine_kwargs)
|
|
||||||
|
|
||||||
# Create async session factory
|
# Create async session factory
|
||||||
AsyncSessionLocal = async_sessionmaker(
|
AsyncSessionLocal = async_sessionmaker(
|
||||||
@ -56,7 +45,4 @@ async def init_db():
|
|||||||
"""Initialize database tables."""
|
"""Initialize database tables."""
|
||||||
async with engine.begin() as conn:
|
async with engine.begin() as conn:
|
||||||
await conn.run_sync(Base.metadata.create_all)
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
# Apply additive migrations (indexes / optional columns) for existing DBs
|
|
||||||
from app.db_migrations import apply_migrations
|
|
||||||
await apply_migrations(conn)
|
|
||||||
|
|
||||||
|
|||||||
@ -1,371 +0,0 @@
|
|||||||
"""
|
|
||||||
Lightweight, idempotent DB migrations.
|
|
||||||
|
|
||||||
This project historically used `Base.metadata.create_all()` for bootstrapping new installs.
|
|
||||||
That does NOT handle schema evolution on existing databases. For performance-related changes
|
|
||||||
(indexes, new optional columns), we apply additive migrations on startup.
|
|
||||||
|
|
||||||
Important:
|
|
||||||
- Only additive changes (ADD COLUMN / CREATE INDEX) should live here.
|
|
||||||
- Operations must be idempotent (safe to run on every startup).
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import logging
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from sqlalchemy import text
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
async def _sqlite_table_exists(conn: AsyncConnection, table: str) -> bool:
|
|
||||||
res = await conn.execute(
|
|
||||||
text("SELECT 1 FROM sqlite_master WHERE type='table' AND name=:name LIMIT 1"),
|
|
||||||
{"name": table},
|
|
||||||
)
|
|
||||||
return res.scalar() is not None
|
|
||||||
|
|
||||||
|
|
||||||
async def _sqlite_has_column(conn: AsyncConnection, table: str, column: str) -> bool:
|
|
||||||
res = await conn.execute(text(f"PRAGMA table_info({table})"))
|
|
||||||
rows = res.fetchall()
|
|
||||||
# PRAGMA table_info: (cid, name, type, notnull, dflt_value, pk)
|
|
||||||
return any(r[1] == column for r in rows)
|
|
||||||
|
|
||||||
|
|
||||||
async def _postgres_table_exists(conn: AsyncConnection, table: str) -> bool:
|
|
||||||
# to_regclass returns NULL if the relation does not exist
|
|
||||||
res = await conn.execute(text("SELECT to_regclass(:name)"), {"name": table})
|
|
||||||
return res.scalar() is not None
|
|
||||||
|
|
||||||
|
|
||||||
async def _postgres_has_column(conn: AsyncConnection, table: str, column: str) -> bool:
|
|
||||||
res = await conn.execute(
|
|
||||||
text(
|
|
||||||
"""
|
|
||||||
SELECT 1
|
|
||||||
FROM information_schema.columns
|
|
||||||
WHERE table_schema = current_schema()
|
|
||||||
AND table_name = :table
|
|
||||||
AND column_name = :column
|
|
||||||
LIMIT 1
|
|
||||||
"""
|
|
||||||
),
|
|
||||||
{"table": table, "column": column},
|
|
||||||
)
|
|
||||||
return res.scalar() is not None
|
|
||||||
|
|
||||||
|
|
||||||
async def _table_exists(conn: AsyncConnection, table: str) -> bool:
|
|
||||||
dialect = conn.engine.dialect.name
|
|
||||||
if dialect == "sqlite":
|
|
||||||
return await _sqlite_table_exists(conn, table)
|
|
||||||
return await _postgres_table_exists(conn, table)
|
|
||||||
|
|
||||||
|
|
||||||
async def _has_column(conn: AsyncConnection, table: str, column: str) -> bool:
|
|
||||||
dialect = conn.engine.dialect.name
|
|
||||||
if dialect == "sqlite":
|
|
||||||
return await _sqlite_has_column(conn, table, column)
|
|
||||||
return await _postgres_has_column(conn, table, column)
|
|
||||||
|
|
||||||
|
|
||||||
async def apply_migrations(conn: AsyncConnection) -> None:
|
|
||||||
"""
|
|
||||||
Apply idempotent migrations.
|
|
||||||
|
|
||||||
Called on startup after `create_all()` to keep existing DBs up-to-date.
|
|
||||||
"""
|
|
||||||
dialect = conn.engine.dialect.name
|
|
||||||
logger.info("DB migrations: starting (dialect=%s)", dialect)
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
# 1) domain_auctions.pounce_score (enables DB-level sorting/pagination)
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
if await _table_exists(conn, "domain_auctions"):
|
|
||||||
if not await _has_column(conn, "domain_auctions", "pounce_score"):
|
|
||||||
logger.info("DB migrations: adding column domain_auctions.pounce_score")
|
|
||||||
await conn.execute(text("ALTER TABLE domain_auctions ADD COLUMN pounce_score INTEGER"))
|
|
||||||
# Index for feed ordering
|
|
||||||
await conn.execute(
|
|
||||||
text("CREATE INDEX IF NOT EXISTS ix_domain_auctions_pounce_score ON domain_auctions(pounce_score)")
|
|
||||||
)
|
|
||||||
|
|
||||||
# ---------------------------------------------------------
|
|
||||||
# 2) domain_checks index for history queries (watchlist UI)
|
|
||||||
# ---------------------------------------------------------
|
|
||||||
if await _table_exists(conn, "domain_checks"):
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_domain_checks_domain_id_checked_at "
|
|
||||||
"ON domain_checks(domain_id, checked_at)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# ---------------------------------------------------
|
|
||||||
# 3) tld_prices composite index for trend computations
|
|
||||||
# ---------------------------------------------------
|
|
||||||
if await _table_exists(conn, "tld_prices"):
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_tld_prices_tld_registrar_recorded_at "
|
|
||||||
"ON tld_prices(tld, registrar, recorded_at)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# ----------------------------------------------------
|
|
||||||
# 4) domain_listings pounce_score index (market sorting)
|
|
||||||
# ----------------------------------------------------
|
|
||||||
if await _table_exists(conn, "domain_listings"):
|
|
||||||
if not await _has_column(conn, "domain_listings", "sold_at"):
|
|
||||||
logger.info("DB migrations: adding column domain_listings.sold_at")
|
|
||||||
await conn.execute(text("ALTER TABLE domain_listings ADD COLUMN sold_at DATETIME"))
|
|
||||||
if not await _has_column(conn, "domain_listings", "sold_reason"):
|
|
||||||
logger.info("DB migrations: adding column domain_listings.sold_reason")
|
|
||||||
await conn.execute(text("ALTER TABLE domain_listings ADD COLUMN sold_reason VARCHAR(200)"))
|
|
||||||
if not await _has_column(conn, "domain_listings", "sold_price"):
|
|
||||||
logger.info("DB migrations: adding column domain_listings.sold_price")
|
|
||||||
await conn.execute(text("ALTER TABLE domain_listings ADD COLUMN sold_price FLOAT"))
|
|
||||||
if not await _has_column(conn, "domain_listings", "sold_currency"):
|
|
||||||
logger.info("DB migrations: adding column domain_listings.sold_currency")
|
|
||||||
await conn.execute(text("ALTER TABLE domain_listings ADD COLUMN sold_currency VARCHAR(3)"))
|
|
||||||
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_domain_listings_pounce_score "
|
|
||||||
"ON domain_listings(pounce_score)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_domain_listings_status "
|
|
||||||
"ON domain_listings(status)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# ----------------------------------------------------
|
|
||||||
# 4b) listing_inquiries: deal workflow + audit trail
|
|
||||||
# ----------------------------------------------------
|
|
||||||
if await _table_exists(conn, "listing_inquiries"):
|
|
||||||
if not await _has_column(conn, "listing_inquiries", "buyer_user_id"):
|
|
||||||
logger.info("DB migrations: adding column listing_inquiries.buyer_user_id")
|
|
||||||
await conn.execute(text("ALTER TABLE listing_inquiries ADD COLUMN buyer_user_id INTEGER"))
|
|
||||||
if not await _has_column(conn, "listing_inquiries", "closed_at"):
|
|
||||||
logger.info("DB migrations: adding column listing_inquiries.closed_at")
|
|
||||||
await conn.execute(text("ALTER TABLE listing_inquiries ADD COLUMN closed_at DATETIME"))
|
|
||||||
if not await _has_column(conn, "listing_inquiries", "closed_reason"):
|
|
||||||
logger.info("DB migrations: adding column listing_inquiries.closed_reason")
|
|
||||||
await conn.execute(text("ALTER TABLE listing_inquiries ADD COLUMN closed_reason VARCHAR(200)"))
|
|
||||||
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_listing_inquiries_listing_created "
|
|
||||||
"ON listing_inquiries(listing_id, created_at)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_listing_inquiries_listing_status "
|
|
||||||
"ON listing_inquiries(listing_id, status)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_listing_inquiries_buyer_user "
|
|
||||||
"ON listing_inquiries(buyer_user_id)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# The table itself is created by `Base.metadata.create_all()` on startup.
|
|
||||||
# Here we only add indexes (idempotent) for existing DBs.
|
|
||||||
if await _table_exists(conn, "listing_inquiry_events"):
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_listing_inquiry_events_inquiry_created "
|
|
||||||
"ON listing_inquiry_events(inquiry_id, created_at)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_listing_inquiry_events_listing_created "
|
|
||||||
"ON listing_inquiry_events(listing_id, created_at)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if await _table_exists(conn, "listing_inquiry_messages"):
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_listing_inquiry_messages_inquiry_created "
|
|
||||||
"ON listing_inquiry_messages(inquiry_id, created_at)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_listing_inquiry_messages_listing_created "
|
|
||||||
"ON listing_inquiry_messages(listing_id, created_at)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_listing_inquiry_messages_sender_created "
|
|
||||||
"ON listing_inquiry_messages(sender_user_id, created_at)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# ----------------------------------------------------
|
|
||||||
# 5) Yield tables indexes
|
|
||||||
# ----------------------------------------------------
|
|
||||||
if await _table_exists(conn, "yield_domains"):
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_yield_domains_user_status "
|
|
||||||
"ON yield_domains(user_id, status)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_yield_domains_domain "
|
|
||||||
"ON yield_domains(domain)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if not await _has_column(conn, "yield_domains", "connected_at"):
|
|
||||||
logger.info("DB migrations: adding column yield_domains.connected_at")
|
|
||||||
await conn.execute(text("ALTER TABLE yield_domains ADD COLUMN connected_at DATETIME"))
|
|
||||||
|
|
||||||
if await _table_exists(conn, "yield_transactions"):
|
|
||||||
if not await _has_column(conn, "yield_transactions", "click_id"):
|
|
||||||
logger.info("DB migrations: adding column yield_transactions.click_id")
|
|
||||||
await conn.execute(text("ALTER TABLE yield_transactions ADD COLUMN click_id VARCHAR(64)"))
|
|
||||||
await conn.execute(text("CREATE INDEX IF NOT EXISTS ix_yield_transactions_click_id ON yield_transactions(click_id)"))
|
|
||||||
if not await _has_column(conn, "yield_transactions", "destination_url"):
|
|
||||||
logger.info("DB migrations: adding column yield_transactions.destination_url")
|
|
||||||
await conn.execute(text("ALTER TABLE yield_transactions ADD COLUMN destination_url TEXT"))
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_yield_tx_domain_created "
|
|
||||||
"ON yield_transactions(yield_domain_id, created_at)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_yield_tx_status_created "
|
|
||||||
"ON yield_transactions(status, created_at)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if await _table_exists(conn, "yield_payouts"):
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_yield_payouts_user_status "
|
|
||||||
"ON yield_payouts(user_id, status)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# ----------------------------------------------------
|
|
||||||
# 6) Referral rewards: subscriptions.referral_bonus_domains (3C.2)
|
|
||||||
# ----------------------------------------------------
|
|
||||||
if await _table_exists(conn, "subscriptions"):
|
|
||||||
if not await _has_column(conn, "subscriptions", "referral_bonus_domains"):
|
|
||||||
logger.info("DB migrations: adding column subscriptions.referral_bonus_domains")
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"ALTER TABLE subscriptions "
|
|
||||||
"ADD COLUMN referral_bonus_domains INTEGER NOT NULL DEFAULT 0"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# ----------------------------------------------------
|
|
||||||
# 6) Telemetry events indexes
|
|
||||||
# ----------------------------------------------------
|
|
||||||
if await _table_exists(conn, "telemetry_events"):
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_telemetry_event_name_created "
|
|
||||||
"ON telemetry_events(event_name, created_at)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_telemetry_user_created "
|
|
||||||
"ON telemetry_events(user_id, created_at)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_telemetry_listing_created "
|
|
||||||
"ON telemetry_events(listing_id, created_at)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_telemetry_yield_created "
|
|
||||||
"ON telemetry_events(yield_domain_id, created_at)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# ----------------------------------------------------
|
|
||||||
# 6b) Ops alert events (persisted cooldown + history)
|
|
||||||
# ----------------------------------------------------
|
|
||||||
# NOTE: Table is created by Base.metadata.create_all() for new installs.
|
|
||||||
# Here we ensure indexes exist for older DBs.
|
|
||||||
if await _table_exists(conn, "ops_alert_events"):
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_ops_alert_key_created "
|
|
||||||
"ON ops_alert_events(alert_key, created_at)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
await conn.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS ix_ops_alert_status_created "
|
|
||||||
"ON ops_alert_events(status, created_at)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# ----------------------------------------------------
|
|
||||||
# 7) User referral tracking columns
|
|
||||||
# ----------------------------------------------------
|
|
||||||
if await _table_exists(conn, "users"):
|
|
||||||
if not await _has_column(conn, "users", "referred_by_user_id"):
|
|
||||||
logger.info("DB migrations: adding column users.referred_by_user_id")
|
|
||||||
await conn.execute(text("ALTER TABLE users ADD COLUMN referred_by_user_id INTEGER"))
|
|
||||||
if not await _has_column(conn, "users", "referred_by_domain"):
|
|
||||||
logger.info("DB migrations: adding column users.referred_by_domain")
|
|
||||||
await conn.execute(text("ALTER TABLE users ADD COLUMN referred_by_domain VARCHAR(255)"))
|
|
||||||
if not await _has_column(conn, "users", "referral_code"):
|
|
||||||
logger.info("DB migrations: adding column users.referral_code")
|
|
||||||
await conn.execute(text("ALTER TABLE users ADD COLUMN referral_code VARCHAR(100)"))
|
|
||||||
if not await _has_column(conn, "users", "invite_code"):
|
|
||||||
logger.info("DB migrations: adding column users.invite_code")
|
|
||||||
await conn.execute(text("ALTER TABLE users ADD COLUMN invite_code VARCHAR(32)"))
|
|
||||||
|
|
||||||
# Unique index for invite_code (SQLite + Postgres)
|
|
||||||
await conn.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS ix_users_invite_code ON users(invite_code)"))
|
|
||||||
|
|
||||||
# ----------------------------------------------------
|
|
||||||
# 7) Portfolio DNS verification columns
|
|
||||||
# ----------------------------------------------------
|
|
||||||
if await _table_exists(conn, "portfolio_domains"):
|
|
||||||
if not await _has_column(conn, "portfolio_domains", "is_dns_verified"):
|
|
||||||
logger.info("DB migrations: adding column portfolio_domains.is_dns_verified")
|
|
||||||
await conn.execute(text("ALTER TABLE portfolio_domains ADD COLUMN is_dns_verified BOOLEAN DEFAULT 0"))
|
|
||||||
if not await _has_column(conn, "portfolio_domains", "verification_status"):
|
|
||||||
logger.info("DB migrations: adding column portfolio_domains.verification_status")
|
|
||||||
await conn.execute(text("ALTER TABLE portfolio_domains ADD COLUMN verification_status VARCHAR(50) DEFAULT 'unverified'"))
|
|
||||||
if not await _has_column(conn, "portfolio_domains", "verification_code"):
|
|
||||||
logger.info("DB migrations: adding column portfolio_domains.verification_code")
|
|
||||||
await conn.execute(text("ALTER TABLE portfolio_domains ADD COLUMN verification_code VARCHAR(100)"))
|
|
||||||
if not await _has_column(conn, "portfolio_domains", "verification_started_at"):
|
|
||||||
logger.info("DB migrations: adding column portfolio_domains.verification_started_at")
|
|
||||||
await conn.execute(text("ALTER TABLE portfolio_domains ADD COLUMN verification_started_at DATETIME"))
|
|
||||||
if not await _has_column(conn, "portfolio_domains", "verified_at"):
|
|
||||||
logger.info("DB migrations: adding column portfolio_domains.verified_at")
|
|
||||||
await conn.execute(text("ALTER TABLE portfolio_domains ADD COLUMN verified_at DATETIME"))
|
|
||||||
|
|
||||||
logger.info("DB migrations: done")
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,3 +0,0 @@
|
|||||||
"""Async job queue (ARQ / Redis)."""
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,38 +0,0 @@
|
|||||||
"""ARQ client helper to enqueue jobs."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from arq.connections import RedisSettings, create_pool
|
|
||||||
|
|
||||||
from app.config import get_settings
|
|
||||||
|
|
||||||
_pool = None
|
|
||||||
|
|
||||||
|
|
||||||
async def _get_pool():
|
|
||||||
global _pool
|
|
||||||
if _pool is not None:
|
|
||||||
return _pool
|
|
||||||
|
|
||||||
settings = get_settings()
|
|
||||||
if not settings.redis_url:
|
|
||||||
raise RuntimeError("redis_url is not configured (set REDIS_URL)")
|
|
||||||
|
|
||||||
_pool = await create_pool(RedisSettings.from_dsn(settings.redis_url))
|
|
||||||
return _pool
|
|
||||||
|
|
||||||
|
|
||||||
async def enqueue_job(name: str, *args: Any, **kwargs: Any) -> str:
|
|
||||||
"""
|
|
||||||
Enqueue a job by name. Returns the job id.
|
|
||||||
"""
|
|
||||||
pool = await _get_pool()
|
|
||||||
job = await pool.enqueue_job(name, *args, **kwargs)
|
|
||||||
# job may be None if enqueue failed
|
|
||||||
if job is None:
|
|
||||||
raise RuntimeError(f"Failed to enqueue job: {name}")
|
|
||||||
return job.job_id
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,72 +0,0 @@
|
|||||||
"""Job functions executed by the ARQ worker."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from sqlalchemy import select
|
|
||||||
|
|
||||||
from app.database import AsyncSessionLocal, init_db
|
|
||||||
from app.models.auction import DomainAuction
|
|
||||||
from app.services.auction_scraper import auction_scraper
|
|
||||||
from app.services.pounce_score import calculate_pounce_score_v2
|
|
||||||
from app.services.tld_scraper.aggregator import tld_aggregator
|
|
||||||
|
|
||||||
|
|
||||||
async def scrape_auctions(ctx) -> dict: # arq passes ctx
|
|
||||||
"""Scrape auctions from all platforms and store results."""
|
|
||||||
await init_db()
|
|
||||||
async with AsyncSessionLocal() as db:
|
|
||||||
result = await auction_scraper.scrape_all_platforms(db)
|
|
||||||
await db.commit()
|
|
||||||
return {"status": "ok", "result": result, "timestamp": datetime.utcnow().isoformat()}
|
|
||||||
|
|
||||||
|
|
||||||
async def scrape_tld_prices(ctx) -> dict:
|
|
||||||
"""Scrape TLD prices from all sources and store results."""
|
|
||||||
await init_db()
|
|
||||||
async with AsyncSessionLocal() as db:
|
|
||||||
result = await tld_aggregator.run_scrape(db)
|
|
||||||
await db.commit()
|
|
||||||
return {
|
|
||||||
"status": "ok",
|
|
||||||
"tlds_scraped": result.tlds_scraped,
|
|
||||||
"prices_saved": result.prices_saved,
|
|
||||||
"sources_succeeded": result.sources_succeeded,
|
|
||||||
"sources_attempted": result.sources_attempted,
|
|
||||||
"timestamp": datetime.utcnow().isoformat(),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async def backfill_auction_scores(ctx, *, limit: int = 5000) -> dict:
|
|
||||||
"""
|
|
||||||
Backfill DomainAuction.pounce_score for legacy rows.
|
|
||||||
|
|
||||||
Safe to run multiple times; only fills NULL scores.
|
|
||||||
"""
|
|
||||||
await init_db()
|
|
||||||
updated = 0
|
|
||||||
async with AsyncSessionLocal() as db:
|
|
||||||
rows = (
|
|
||||||
await db.execute(
|
|
||||||
select(DomainAuction)
|
|
||||||
.where(DomainAuction.pounce_score == None) # noqa: E711
|
|
||||||
.limit(limit)
|
|
||||||
)
|
|
||||||
).scalars().all()
|
|
||||||
|
|
||||||
for auction in rows:
|
|
||||||
auction.pounce_score = calculate_pounce_score_v2(
|
|
||||||
auction.domain,
|
|
||||||
auction.tld,
|
|
||||||
num_bids=auction.num_bids or 0,
|
|
||||||
age_years=auction.age_years or 0,
|
|
||||||
is_pounce=False,
|
|
||||||
)
|
|
||||||
updated += 1
|
|
||||||
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
return {"status": "ok", "updated": updated, "timestamp": datetime.utcnow().isoformat()}
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,26 +0,0 @@
|
|||||||
"""ARQ worker configuration."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from arq.connections import RedisSettings
|
|
||||||
|
|
||||||
from app.config import get_settings
|
|
||||||
from app.jobs import tasks
|
|
||||||
|
|
||||||
|
|
||||||
class WorkerSettings:
|
|
||||||
"""
|
|
||||||
Run with:
|
|
||||||
arq app.jobs.worker.WorkerSettings
|
|
||||||
"""
|
|
||||||
|
|
||||||
settings = get_settings()
|
|
||||||
|
|
||||||
redis_settings = RedisSettings.from_dsn(settings.redis_url or "redis://localhost:6379/0")
|
|
||||||
functions = [
|
|
||||||
tasks.scrape_auctions,
|
|
||||||
tasks.scrape_tld_prices,
|
|
||||||
tasks.backfill_auction_scores,
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@ -18,7 +18,6 @@ from app.api import api_router
|
|||||||
from app.config import get_settings
|
from app.config import get_settings
|
||||||
from app.database import init_db
|
from app.database import init_db
|
||||||
from app.scheduler import start_scheduler, stop_scheduler
|
from app.scheduler import start_scheduler, stop_scheduler
|
||||||
from app.observability.metrics import instrument_app
|
|
||||||
|
|
||||||
# Configure logging
|
# Configure logging
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
@ -33,7 +32,7 @@ settings = get_settings()
|
|||||||
limiter = Limiter(
|
limiter = Limiter(
|
||||||
key_func=get_remote_address,
|
key_func=get_remote_address,
|
||||||
default_limits=["200/minute"], # Global default
|
default_limits=["200/minute"], # Global default
|
||||||
storage_uri=settings.rate_limit_storage_uri, # Use Redis in production
|
storage_uri="memory://", # In-memory storage (use Redis in production)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -47,17 +46,13 @@ async def lifespan(app: FastAPI):
|
|||||||
await init_db()
|
await init_db()
|
||||||
logger.info("Database initialized")
|
logger.info("Database initialized")
|
||||||
|
|
||||||
# Start scheduler (optional - recommended: run in separate process/container)
|
# Start scheduler
|
||||||
if settings.enable_scheduler:
|
|
||||||
start_scheduler()
|
start_scheduler()
|
||||||
logger.info("Scheduler started")
|
logger.info("Scheduler started")
|
||||||
else:
|
|
||||||
logger.info("Scheduler disabled (ENABLE_SCHEDULER=false)")
|
|
||||||
|
|
||||||
yield
|
yield
|
||||||
|
|
||||||
# Shutdown
|
# Shutdown
|
||||||
if settings.enable_scheduler:
|
|
||||||
stop_scheduler()
|
stop_scheduler()
|
||||||
logger.info("Application shutdown complete")
|
logger.info("Application shutdown complete")
|
||||||
|
|
||||||
@ -79,8 +74,8 @@ Domain availability monitoring and portfolio management service.
|
|||||||
|
|
||||||
## Authentication
|
## Authentication
|
||||||
|
|
||||||
Most endpoints require authentication via HttpOnly session cookie (recommended).
|
Most endpoints require authentication via Bearer token.
|
||||||
Login: POST /api/v1/auth/login
|
Get a token via POST /api/v1/auth/login
|
||||||
|
|
||||||
## Rate Limits
|
## Rate Limits
|
||||||
|
|
||||||
@ -90,7 +85,7 @@ Login: POST /api/v1/auth/login
|
|||||||
|
|
||||||
## Support
|
## Support
|
||||||
|
|
||||||
For API issues, contact hello@pounce.ch
|
For API issues, contact support@pounce.ch
|
||||||
""",
|
""",
|
||||||
version="1.0.0",
|
version="1.0.0",
|
||||||
lifespan=lifespan,
|
lifespan=lifespan,
|
||||||
@ -99,10 +94,6 @@ For API issues, contact hello@pounce.ch
|
|||||||
redoc_url="/redoc",
|
redoc_url="/redoc",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Observability (Prometheus metrics)
|
|
||||||
if settings.enable_metrics:
|
|
||||||
instrument_app(app, metrics_path=settings.metrics_path, enable_db_metrics=settings.enable_db_query_metrics)
|
|
||||||
|
|
||||||
# Add rate limiter to app state
|
# Add rate limiter to app state
|
||||||
app.state.limiter = limiter
|
app.state.limiter = limiter
|
||||||
|
|
||||||
@ -118,15 +109,14 @@ async def rate_limit_handler(request: Request, exc: RateLimitExceeded):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
# Get allowed origins (env overrides settings)
|
# Get allowed origins from environment
|
||||||
origins_raw = (
|
ALLOWED_ORIGINS = os.getenv("ALLOWED_ORIGINS", "").split(",")
|
||||||
os.getenv("ALLOWED_ORIGINS", "").strip()
|
if not ALLOWED_ORIGINS or ALLOWED_ORIGINS == [""]:
|
||||||
or os.getenv("CORS_ORIGINS", "").strip()
|
ALLOWED_ORIGINS = [
|
||||||
or (settings.cors_origins or "").strip()
|
"http://localhost:3000",
|
||||||
)
|
"http://127.0.0.1:3000",
|
||||||
ALLOWED_ORIGINS = [o.strip() for o in origins_raw.split(",") if o.strip()]
|
"http://10.42.0.73:3000",
|
||||||
if not ALLOWED_ORIGINS:
|
]
|
||||||
ALLOWED_ORIGINS = ["http://localhost:3000", "http://127.0.0.1:3000"]
|
|
||||||
|
|
||||||
# Add production origins
|
# Add production origins
|
||||||
SITE_URL = os.getenv("SITE_URL", "")
|
SITE_URL = os.getenv("SITE_URL", "")
|
||||||
@ -168,22 +158,6 @@ async def health_check():
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@app.get("/api/health")
|
|
||||||
async def health_check_api():
|
|
||||||
"""
|
|
||||||
Health check behind Nginx `/api` proxy.
|
|
||||||
|
|
||||||
Nginx routes `/api/*` to the backend, so `https://pounce.ch/api/health` must exist.
|
|
||||||
"""
|
|
||||||
return await health_check()
|
|
||||||
|
|
||||||
|
|
||||||
@app.get("/api/v1/health")
|
|
||||||
async def health_check_api_v1():
|
|
||||||
"""Health check behind `/api/v1` prefix (convenience)."""
|
|
||||||
return await health_check()
|
|
||||||
|
|
||||||
|
|
||||||
# Rate-limited endpoints - apply specific limits to sensitive routes
|
# Rate-limited endpoints - apply specific limits to sensitive routes
|
||||||
from fastapi import Depends
|
from fastapi import Depends
|
||||||
|
|
||||||
|
|||||||
@ -9,15 +9,6 @@ from app.models.newsletter import NewsletterSubscriber
|
|||||||
from app.models.price_alert import PriceAlert
|
from app.models.price_alert import PriceAlert
|
||||||
from app.models.admin_log import AdminActivityLog
|
from app.models.admin_log import AdminActivityLog
|
||||||
from app.models.blog import BlogPost
|
from app.models.blog import BlogPost
|
||||||
from app.models.listing import DomainListing, ListingInquiry, ListingView
|
|
||||||
from app.models.sniper_alert import SniperAlert, SniperAlertMatch
|
|
||||||
from app.models.seo_data import DomainSEOData
|
|
||||||
from app.models.yield_domain import YieldDomain, YieldTransaction, YieldPayout, AffiliatePartner
|
|
||||||
from app.models.telemetry import TelemetryEvent
|
|
||||||
from app.models.ops_alert import OpsAlertEvent
|
|
||||||
from app.models.domain_analysis_cache import DomainAnalysisCache
|
|
||||||
from app.models.zone_file import ZoneSnapshot, DroppedDomain
|
|
||||||
from app.models.llm_artifact import LLMArtifact
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"User",
|
"User",
|
||||||
@ -34,28 +25,4 @@ __all__ = [
|
|||||||
"PriceAlert",
|
"PriceAlert",
|
||||||
"AdminActivityLog",
|
"AdminActivityLog",
|
||||||
"BlogPost",
|
"BlogPost",
|
||||||
# New: For Sale / Marketplace
|
|
||||||
"DomainListing",
|
|
||||||
"ListingInquiry",
|
|
||||||
"ListingView",
|
|
||||||
# New: Sniper Alerts
|
|
||||||
"SniperAlert",
|
|
||||||
"SniperAlertMatch",
|
|
||||||
# New: SEO Data (Tycoon feature)
|
|
||||||
"DomainSEOData",
|
|
||||||
# New: Yield / Intent Routing
|
|
||||||
"YieldDomain",
|
|
||||||
"YieldTransaction",
|
|
||||||
"YieldPayout",
|
|
||||||
"AffiliatePartner",
|
|
||||||
# New: Telemetry (events)
|
|
||||||
"TelemetryEvent",
|
|
||||||
"OpsAlertEvent",
|
|
||||||
# New: Analyze cache
|
|
||||||
"DomainAnalysisCache",
|
|
||||||
# New: Zone file drops
|
|
||||||
"ZoneSnapshot",
|
|
||||||
"DroppedDomain",
|
|
||||||
# New: LLM artifacts / cache
|
|
||||||
"LLMArtifact",
|
|
||||||
]
|
]
|
||||||
|
|||||||
@ -53,7 +53,6 @@ class DomainAuction(Base):
|
|||||||
age_years: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
|
age_years: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
|
||||||
backlinks: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
|
backlinks: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
|
||||||
domain_authority: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
|
domain_authority: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
|
||||||
pounce_score: Mapped[Optional[int]] = mapped_column(Integer, nullable=True, index=True)
|
|
||||||
|
|
||||||
# Scraping metadata
|
# Scraping metadata
|
||||||
scraped_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
scraped_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
||||||
@ -63,8 +62,7 @@ class DomainAuction(Base):
|
|||||||
|
|
||||||
# Indexes for common queries
|
# Indexes for common queries
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
# Enforce de-duplication at the database level.
|
Index('ix_auctions_platform_domain', 'platform', 'domain'),
|
||||||
Index('ux_auctions_platform_domain', 'platform', 'domain', unique=True),
|
|
||||||
Index('ix_auctions_end_time_active', 'end_time', 'is_active'),
|
Index('ix_auctions_end_time_active', 'end_time', 'is_active'),
|
||||||
Index('ix_auctions_tld_bid', 'tld', 'current_bid'),
|
Index('ix_auctions_tld_bid', 'tld', 'current_bid'),
|
||||||
)
|
)
|
||||||
|
|||||||
@ -2,7 +2,7 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from sqlalchemy import String, Boolean, DateTime, ForeignKey, Text, Enum as SQLEnum
|
from sqlalchemy import String, Boolean, DateTime, ForeignKey, Text, Enum as SQLEnum
|
||||||
from sqlalchemy.orm import Mapped, mapped_column, relationship, backref
|
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||||
|
|
||||||
from app.database import Base
|
from app.database import Base
|
||||||
|
|
||||||
@ -78,50 +78,3 @@ class DomainCheck(Base):
|
|||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return f"<DomainCheck {self.domain_id} at {self.checked_at}>"
|
return f"<DomainCheck {self.domain_id} at {self.checked_at}>"
|
||||||
|
|
||||||
|
|
||||||
class HealthStatus(str, Enum):
|
|
||||||
"""Domain health status levels."""
|
|
||||||
HEALTHY = "healthy"
|
|
||||||
WEAKENING = "weakening"
|
|
||||||
PARKED = "parked"
|
|
||||||
CRITICAL = "critical"
|
|
||||||
UNKNOWN = "unknown"
|
|
||||||
|
|
||||||
|
|
||||||
class DomainHealthCache(Base):
|
|
||||||
"""
|
|
||||||
Cached health check results for domains.
|
|
||||||
|
|
||||||
Updated daily by the scheduler to provide instant health status
|
|
||||||
without needing manual checks.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "domain_health_cache"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
|
||||||
domain_id: Mapped[int] = mapped_column(ForeignKey("domains.id"), unique=True, nullable=False)
|
|
||||||
|
|
||||||
# Health status
|
|
||||||
status: Mapped[str] = mapped_column(String(20), default="unknown")
|
|
||||||
score: Mapped[int] = mapped_column(default=0)
|
|
||||||
|
|
||||||
# Signals (JSON array as text)
|
|
||||||
signals: Mapped[str | None] = mapped_column(Text, nullable=True)
|
|
||||||
|
|
||||||
# Layer data (JSON as text for flexibility)
|
|
||||||
dns_data: Mapped[str | None] = mapped_column(Text, nullable=True)
|
|
||||||
http_data: Mapped[str | None] = mapped_column(Text, nullable=True)
|
|
||||||
ssl_data: Mapped[str | None] = mapped_column(Text, nullable=True)
|
|
||||||
|
|
||||||
# Timestamp
|
|
||||||
checked_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
|
||||||
|
|
||||||
# Relationship - cascade delete when domain is deleted
|
|
||||||
domain: Mapped["Domain"] = relationship(
|
|
||||||
"Domain",
|
|
||||||
backref=backref("health_cache", cascade="all, delete-orphan", uselist=False)
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"<DomainHealthCache {self.domain_id} status={self.status}>"
|
|
||||||
|
|
||||||
|
|||||||
@ -1,25 +0,0 @@
|
|||||||
"""
|
|
||||||
Domain analysis cache (Phase 2 Diligence).
|
|
||||||
|
|
||||||
We store computed JSON to avoid repeated RDAP/DNS/HTTP checks on each click.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from sqlalchemy import DateTime, Integer, String, Text
|
|
||||||
from sqlalchemy.orm import Mapped, mapped_column
|
|
||||||
|
|
||||||
from app.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
class DomainAnalysisCache(Base):
|
|
||||||
__tablename__ = "domain_analysis_cache"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
|
||||||
domain: Mapped[str] = mapped_column(String(255), unique=True, index=True, nullable=False)
|
|
||||||
payload_json: Mapped[str] = mapped_column(Text, nullable=False)
|
|
||||||
computed_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, index=True)
|
|
||||||
ttl_seconds: Mapped[int] = mapped_column(Integer, default=3600)
|
|
||||||
|
|
||||||
@ -1,267 +0,0 @@
|
|||||||
"""
|
|
||||||
Domain Listing models for "Pounce For Sale" feature.
|
|
||||||
|
|
||||||
This implements the "Micro-Marktplatz" strategy from analysis_3.md:
|
|
||||||
- Users can create professional landing pages for domains they want to sell
|
|
||||||
- Buyers can contact sellers through Pounce
|
|
||||||
- DNS verification ensures only real owners can list domains
|
|
||||||
|
|
||||||
DATABASE TABLES TO CREATE:
|
|
||||||
1. domain_listings - Main listing table
|
|
||||||
2. listing_inquiries - Contact requests from potential buyers
|
|
||||||
3. listing_views - Track views for analytics
|
|
||||||
|
|
||||||
Run migrations: alembic upgrade head
|
|
||||||
"""
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional, List
|
|
||||||
from sqlalchemy import String, DateTime, Float, Integer, Text, ForeignKey, Boolean, Enum as SQLEnum
|
|
||||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
|
||||||
import enum
|
|
||||||
|
|
||||||
from app.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
class ListingStatus(str, enum.Enum):
|
|
||||||
"""Status of a domain listing."""
|
|
||||||
DRAFT = "draft" # Not yet published
|
|
||||||
PENDING_VERIFICATION = "pending_verification" # Awaiting DNS verification
|
|
||||||
ACTIVE = "active" # Live and visible
|
|
||||||
SOLD = "sold" # Marked as sold
|
|
||||||
EXPIRED = "expired" # Listing expired
|
|
||||||
SUSPENDED = "suspended" # Suspended by admin
|
|
||||||
|
|
||||||
|
|
||||||
class VerificationStatus(str, enum.Enum):
|
|
||||||
"""DNS verification status."""
|
|
||||||
NOT_STARTED = "not_started"
|
|
||||||
PENDING = "pending"
|
|
||||||
VERIFIED = "verified"
|
|
||||||
FAILED = "failed"
|
|
||||||
|
|
||||||
|
|
||||||
class DomainListing(Base):
|
|
||||||
"""
|
|
||||||
Domain listing for the Pounce marketplace.
|
|
||||||
|
|
||||||
Users can list their domains for sale with a professional landing page.
|
|
||||||
URL: pounce.ch/buy/{slug}
|
|
||||||
|
|
||||||
Features:
|
|
||||||
- DNS verification for ownership proof
|
|
||||||
- Professional landing page with valuation
|
|
||||||
- Contact form for buyers
|
|
||||||
- Analytics (views, inquiries)
|
|
||||||
|
|
||||||
From analysis_3.md:
|
|
||||||
"Ein User (Trader/Tycoon) kann für seine Domains mit einem Klick
|
|
||||||
eine schicke Verkaufsseite erstellen."
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "domain_listings"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
|
||||||
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), index=True, nullable=False)
|
|
||||||
|
|
||||||
# Domain info
|
|
||||||
domain: Mapped[str] = mapped_column(String(255), unique=True, nullable=False, index=True)
|
|
||||||
slug: Mapped[str] = mapped_column(String(300), unique=True, nullable=False, index=True)
|
|
||||||
|
|
||||||
# Listing details
|
|
||||||
title: Mapped[Optional[str]] = mapped_column(String(200), nullable=True) # Custom headline
|
|
||||||
description: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
|
||||||
|
|
||||||
# Pricing
|
|
||||||
asking_price: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
|
|
||||||
min_offer: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
|
|
||||||
currency: Mapped[str] = mapped_column(String(3), default="USD")
|
|
||||||
price_type: Mapped[str] = mapped_column(String(20), default="fixed") # fixed, negotiable, make_offer
|
|
||||||
|
|
||||||
# Pounce valuation (calculated)
|
|
||||||
pounce_score: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) # 0-100
|
|
||||||
estimated_value: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
|
|
||||||
|
|
||||||
# Verification (from analysis_3.md - Säule 2: Asset Verification)
|
|
||||||
verification_status: Mapped[str] = mapped_column(
|
|
||||||
String(20),
|
|
||||||
default=VerificationStatus.NOT_STARTED.value
|
|
||||||
)
|
|
||||||
verification_code: Mapped[Optional[str]] = mapped_column(String(64), nullable=True)
|
|
||||||
verified_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
|
|
||||||
# Status
|
|
||||||
status: Mapped[str] = mapped_column(String(30), default=ListingStatus.DRAFT.value, index=True)
|
|
||||||
sold_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
sold_reason: Mapped[Optional[str]] = mapped_column(String(200), nullable=True)
|
|
||||||
sold_price: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
|
|
||||||
sold_currency: Mapped[Optional[str]] = mapped_column(String(3), nullable=True)
|
|
||||||
|
|
||||||
# Features
|
|
||||||
show_valuation: Mapped[bool] = mapped_column(Boolean, default=True)
|
|
||||||
allow_offers: Mapped[bool] = mapped_column(Boolean, default=True)
|
|
||||||
featured: Mapped[bool] = mapped_column(Boolean, default=False) # Premium placement
|
|
||||||
|
|
||||||
# Analytics
|
|
||||||
view_count: Mapped[int] = mapped_column(Integer, default=0)
|
|
||||||
inquiry_count: Mapped[int] = mapped_column(Integer, default=0)
|
|
||||||
|
|
||||||
# Expiry
|
|
||||||
expires_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
|
|
||||||
# Timestamps
|
|
||||||
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
|
||||||
updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
|
||||||
published_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
user: Mapped["User"] = relationship("User", back_populates="listings")
|
|
||||||
inquiries: Mapped[List["ListingInquiry"]] = relationship(
|
|
||||||
"ListingInquiry", back_populates="listing", cascade="all, delete-orphan"
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"<DomainListing {self.domain} ({self.status})>"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_verified(self) -> bool:
|
|
||||||
return self.verification_status == VerificationStatus.VERIFIED.value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_active(self) -> bool:
|
|
||||||
return self.status == ListingStatus.ACTIVE.value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def public_url(self) -> str:
|
|
||||||
return f"/buy/{self.slug}"
|
|
||||||
|
|
||||||
|
|
||||||
class ListingInquiry(Base):
|
|
||||||
"""
|
|
||||||
Contact request from a potential buyer.
|
|
||||||
|
|
||||||
From analysis_3.md:
|
|
||||||
"Ein einfaches Kontaktformular, das die Anfrage direkt an den User leitet."
|
|
||||||
|
|
||||||
Security (from analysis_3.md - Säule 3):
|
|
||||||
- Keyword blocking for phishing prevention
|
|
||||||
- Rate limiting per IP/user
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "listing_inquiries"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
|
||||||
listing_id: Mapped[int] = mapped_column(ForeignKey("domain_listings.id"), index=True, nullable=False)
|
|
||||||
buyer_user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("users.id"), index=True, nullable=True)
|
|
||||||
|
|
||||||
# Inquirer info
|
|
||||||
name: Mapped[str] = mapped_column(String(100), nullable=False)
|
|
||||||
email: Mapped[str] = mapped_column(String(255), nullable=False)
|
|
||||||
phone: Mapped[Optional[str]] = mapped_column(String(50), nullable=True)
|
|
||||||
company: Mapped[Optional[str]] = mapped_column(String(200), nullable=True)
|
|
||||||
|
|
||||||
# Message
|
|
||||||
message: Mapped[str] = mapped_column(Text, nullable=False)
|
|
||||||
offer_amount: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
|
|
||||||
|
|
||||||
# Status
|
|
||||||
status: Mapped[str] = mapped_column(String(20), default="new") # new, read, replied, closed, spam
|
|
||||||
closed_reason: Mapped[Optional[str]] = mapped_column(String(200), nullable=True)
|
|
||||||
|
|
||||||
# Tracking
|
|
||||||
ip_address: Mapped[Optional[str]] = mapped_column(String(45), nullable=True)
|
|
||||||
user_agent: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
|
|
||||||
|
|
||||||
# Timestamps
|
|
||||||
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
|
||||||
read_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
replied_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
closed_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
listing: Mapped["DomainListing"] = relationship("DomainListing", back_populates="inquiries")
|
|
||||||
messages: Mapped[List["ListingInquiryMessage"]] = relationship(
|
|
||||||
"ListingInquiryMessage", back_populates="inquiry", cascade="all, delete-orphan"
|
|
||||||
)
|
|
||||||
events: Mapped[List["ListingInquiryEvent"]] = relationship(
|
|
||||||
"ListingInquiryEvent", back_populates="inquiry", cascade="all, delete-orphan"
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"<ListingInquiry from {self.email} for listing #{self.listing_id}>"
|
|
||||||
|
|
||||||
|
|
||||||
class ListingInquiryEvent(Base):
|
|
||||||
"""
|
|
||||||
Audit trail for inquiry status changes.
|
|
||||||
|
|
||||||
This is the minimal “deal system” log:
|
|
||||||
- who changed what status
|
|
||||||
- when it happened
|
|
||||||
- optional reason (close/spam)
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "listing_inquiry_events"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
|
||||||
inquiry_id: Mapped[int] = mapped_column(ForeignKey("listing_inquiries.id"), index=True, nullable=False)
|
|
||||||
listing_id: Mapped[int] = mapped_column(ForeignKey("domain_listings.id"), index=True, nullable=False)
|
|
||||||
actor_user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), index=True, nullable=False)
|
|
||||||
|
|
||||||
old_status: Mapped[Optional[str]] = mapped_column(String(20), nullable=True)
|
|
||||||
new_status: Mapped[str] = mapped_column(String(20), nullable=False)
|
|
||||||
reason: Mapped[Optional[str]] = mapped_column(String(200), nullable=True)
|
|
||||||
|
|
||||||
ip_address: Mapped[Optional[str]] = mapped_column(String(45), nullable=True)
|
|
||||||
user_agent: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
|
|
||||||
|
|
||||||
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, index=True)
|
|
||||||
|
|
||||||
inquiry: Mapped["ListingInquiry"] = relationship("ListingInquiry", back_populates="events")
|
|
||||||
|
|
||||||
|
|
||||||
class ListingInquiryMessage(Base):
|
|
||||||
"""
|
|
||||||
Thread messages for listing inquiries (in-product negotiation).
|
|
||||||
|
|
||||||
- Buyer sends messages from their account
|
|
||||||
- Seller replies from Terminal
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "listing_inquiry_messages"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
|
||||||
inquiry_id: Mapped[int] = mapped_column(ForeignKey("listing_inquiries.id"), index=True, nullable=False)
|
|
||||||
listing_id: Mapped[int] = mapped_column(ForeignKey("domain_listings.id"), index=True, nullable=False)
|
|
||||||
|
|
||||||
sender_user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), index=True, nullable=False)
|
|
||||||
body: Mapped[str] = mapped_column(Text, nullable=False)
|
|
||||||
|
|
||||||
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, index=True)
|
|
||||||
|
|
||||||
inquiry: Mapped["ListingInquiry"] = relationship("ListingInquiry", back_populates="messages")
|
|
||||||
|
|
||||||
|
|
||||||
class ListingView(Base):
|
|
||||||
"""
|
|
||||||
Track listing page views for analytics.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "listing_views"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
|
||||||
listing_id: Mapped[int] = mapped_column(ForeignKey("domain_listings.id"), index=True, nullable=False)
|
|
||||||
|
|
||||||
# Visitor info
|
|
||||||
ip_address: Mapped[Optional[str]] = mapped_column(String(45), nullable=True)
|
|
||||||
user_agent: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
|
|
||||||
referrer: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
|
|
||||||
|
|
||||||
# User (if logged in)
|
|
||||||
user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("users.id"), nullable=True)
|
|
||||||
|
|
||||||
# Timestamp
|
|
||||||
viewed_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"<ListingView #{self.listing_id} at {self.viewed_at}>"
|
|
||||||
|
|
||||||
@ -1,52 +0,0 @@
|
|||||||
"""
|
|
||||||
LLM artifacts / cache.
|
|
||||||
|
|
||||||
Stores strict-JSON outputs from our internal LLM gateway for:
|
|
||||||
- Vision (business concept + buyer matchmaker)
|
|
||||||
- Yield landing page configs
|
|
||||||
|
|
||||||
Important:
|
|
||||||
- Tier gating is enforced at the API layer; never expose artifacts to Scout users.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from sqlalchemy import DateTime, Index, Integer, String, Text, ForeignKey
|
|
||||||
from sqlalchemy.orm import Mapped, mapped_column
|
|
||||||
|
|
||||||
from app.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
class LLMArtifact(Base):
|
|
||||||
__tablename__ = "llm_artifacts"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
|
||||||
|
|
||||||
# Optional: who generated it (for auditing). Not used for access control.
|
|
||||||
user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("users.id"), nullable=True, index=True)
|
|
||||||
|
|
||||||
# What this artifact represents.
|
|
||||||
# Examples: "vision_v1", "yield_landing_v1"
|
|
||||||
kind: Mapped[str] = mapped_column(String(50), nullable=False, index=True)
|
|
||||||
|
|
||||||
# Domain this artifact belongs to (lowercase).
|
|
||||||
domain: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
|
|
||||||
|
|
||||||
# Prompt/versioning for safe cache invalidation
|
|
||||||
prompt_version: Mapped[str] = mapped_column(String(50), nullable=False, index=True)
|
|
||||||
model: Mapped[str] = mapped_column(String(100), nullable=False)
|
|
||||||
|
|
||||||
# Strict JSON payload (string)
|
|
||||||
payload_json: Mapped[str] = mapped_column(Text, nullable=False)
|
|
||||||
|
|
||||||
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, index=True)
|
|
||||||
updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
|
||||||
expires_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True, index=True)
|
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
Index("ix_llm_artifacts_kind_domain_prompt", "kind", "domain", "prompt_version"),
|
|
||||||
)
|
|
||||||
|
|
||||||
@ -1,40 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from sqlalchemy import DateTime, Index, Integer, String, Text
|
|
||||||
from sqlalchemy.orm import Mapped, mapped_column
|
|
||||||
|
|
||||||
from app.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
class OpsAlertEvent(Base):
|
|
||||||
"""
|
|
||||||
Persisted ops alert events.
|
|
||||||
|
|
||||||
Used for:
|
|
||||||
- cooldown across process restarts
|
|
||||||
- audit/history in admin UI
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "ops_alert_events"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
|
||||||
alert_key: Mapped[str] = mapped_column(String(80), nullable=False, index=True)
|
|
||||||
severity: Mapped[str] = mapped_column(String(10), nullable=False, index=True) # "warn" | "page"
|
|
||||||
title: Mapped[str] = mapped_column(String(200), nullable=False)
|
|
||||||
detail: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
|
||||||
|
|
||||||
# "sent" | "skipped" | "error"
|
|
||||||
status: Mapped[str] = mapped_column(String(20), nullable=False, index=True)
|
|
||||||
recipients: Mapped[Optional[str]] = mapped_column(Text, nullable=True) # comma-separated
|
|
||||||
send_reason: Mapped[Optional[str]] = mapped_column(String(60), nullable=True)
|
|
||||||
|
|
||||||
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, index=True)
|
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
Index("ix_ops_alert_key_created", "alert_key", "created_at"),
|
|
||||||
Index("ix_ops_alert_status_created", "status", "created_at"),
|
|
||||||
)
|
|
||||||
|
|
||||||
@ -45,14 +45,6 @@ class PortfolioDomain(Base):
|
|||||||
# Status
|
# Status
|
||||||
status: Mapped[str] = mapped_column(String(50), default="active") # active, expired, sold, parked
|
status: Mapped[str] = mapped_column(String(50), default="active") # active, expired, sold, parked
|
||||||
|
|
||||||
# DNS Verification (required for Yield and For Sale)
|
|
||||||
# All fields nullable=True to avoid migration issues on existing databases
|
|
||||||
is_dns_verified: Mapped[Optional[bool]] = mapped_column(Boolean, default=False, nullable=True)
|
|
||||||
verification_status: Mapped[Optional[str]] = mapped_column(String(50), default="unverified", nullable=True)
|
|
||||||
verification_code: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
|
|
||||||
verification_started_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
verified_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
|
|
||||||
# Notes
|
# Notes
|
||||||
notes: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
notes: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
||||||
tags: Mapped[Optional[str]] = mapped_column(String(500), nullable=True) # Comma-separated
|
tags: Mapped[Optional[str]] = mapped_column(String(500), nullable=True) # Comma-separated
|
||||||
|
|||||||
@ -1,116 +0,0 @@
|
|||||||
"""
|
|
||||||
SEO Data models for the "SEO Juice Detector" feature.
|
|
||||||
|
|
||||||
This implements "Strategie 3: SEO-Daten & Backlinks" from analysis_3.md:
|
|
||||||
"SEO-Agenturen suchen Domains nicht wegen dem Namen, sondern wegen der Power (Backlinks).
|
|
||||||
Wenn eine Domain droppt, prüfst du nicht nur den Namen, sondern ob Backlinks existieren."
|
|
||||||
|
|
||||||
This is a TYCOON-ONLY feature ($29/month).
|
|
||||||
|
|
||||||
DATABASE TABLE TO CREATE:
|
|
||||||
- domain_seo_data - Cached SEO metrics for domains
|
|
||||||
|
|
||||||
Run migrations: alembic upgrade head
|
|
||||||
"""
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional, List
|
|
||||||
from sqlalchemy import String, DateTime, Float, Integer, Text, ForeignKey, Boolean, JSON
|
|
||||||
from sqlalchemy.orm import Mapped, mapped_column
|
|
||||||
|
|
||||||
from app.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
class DomainSEOData(Base):
|
|
||||||
"""
|
|
||||||
Cached SEO data for domains.
|
|
||||||
|
|
||||||
Stores backlink data, domain authority, and other SEO metrics
|
|
||||||
from Moz API or alternative sources.
|
|
||||||
|
|
||||||
From analysis_3.md:
|
|
||||||
"Domain `alte-bäckerei-münchen.de` ist frei.
|
|
||||||
Hat Links von `sueddeutsche.de` und `wikipedia.org`."
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "domain_seo_data"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
|
||||||
domain: Mapped[str] = mapped_column(String(255), unique=True, nullable=False, index=True)
|
|
||||||
|
|
||||||
# Moz metrics
|
|
||||||
domain_authority: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) # 0-100
|
|
||||||
page_authority: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) # 0-100
|
|
||||||
spam_score: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) # 0-100
|
|
||||||
|
|
||||||
# Backlink data
|
|
||||||
total_backlinks: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
|
|
||||||
referring_domains: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
|
|
||||||
|
|
||||||
# Top backlinks (JSON array of {domain, authority, type})
|
|
||||||
top_backlinks: Mapped[Optional[dict]] = mapped_column(JSON, nullable=True)
|
|
||||||
|
|
||||||
# Notable backlinks (high-authority sites)
|
|
||||||
notable_backlinks: Mapped[Optional[str]] = mapped_column(Text, nullable=True) # Comma-separated
|
|
||||||
has_wikipedia_link: Mapped[bool] = mapped_column(Boolean, default=False)
|
|
||||||
has_gov_link: Mapped[bool] = mapped_column(Boolean, default=False)
|
|
||||||
has_edu_link: Mapped[bool] = mapped_column(Boolean, default=False)
|
|
||||||
has_news_link: Mapped[bool] = mapped_column(Boolean, default=False)
|
|
||||||
|
|
||||||
# Estimated value based on SEO
|
|
||||||
seo_value_estimate: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
|
|
||||||
|
|
||||||
# Data source
|
|
||||||
data_source: Mapped[str] = mapped_column(String(50), default="moz") # moz, ahrefs, majestic, estimated
|
|
||||||
|
|
||||||
# Cache management
|
|
||||||
last_updated: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
|
||||||
expires_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
|
|
||||||
# Request tracking
|
|
||||||
fetch_count: Mapped[int] = mapped_column(Integer, default=0)
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"<DomainSEOData {self.domain} DA:{self.domain_authority}>"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_expired(self) -> bool:
|
|
||||||
if not self.expires_at:
|
|
||||||
return True
|
|
||||||
return datetime.utcnow() > self.expires_at
|
|
||||||
|
|
||||||
@property
|
|
||||||
def seo_score(self) -> int:
|
|
||||||
"""Calculate overall SEO score (0-100)."""
|
|
||||||
if not self.domain_authority:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
score = self.domain_authority
|
|
||||||
|
|
||||||
# Boost for notable links
|
|
||||||
if self.has_wikipedia_link:
|
|
||||||
score = min(100, score + 10)
|
|
||||||
if self.has_gov_link:
|
|
||||||
score = min(100, score + 5)
|
|
||||||
if self.has_edu_link:
|
|
||||||
score = min(100, score + 5)
|
|
||||||
if self.has_news_link:
|
|
||||||
score = min(100, score + 3)
|
|
||||||
|
|
||||||
# Penalty for spam
|
|
||||||
if self.spam_score and self.spam_score > 30:
|
|
||||||
score = max(0, score - (self.spam_score // 5))
|
|
||||||
|
|
||||||
return score
|
|
||||||
|
|
||||||
@property
|
|
||||||
def value_category(self) -> str:
|
|
||||||
"""Categorize SEO value for display."""
|
|
||||||
score = self.seo_score
|
|
||||||
if score >= 60:
|
|
||||||
return "High Value"
|
|
||||||
elif score >= 40:
|
|
||||||
return "Medium Value"
|
|
||||||
elif score >= 20:
|
|
||||||
return "Low Value"
|
|
||||||
return "Minimal"
|
|
||||||
|
|
||||||
@ -1,183 +0,0 @@
|
|||||||
"""
|
|
||||||
Sniper Alert models for hyper-personalized auction alerts.
|
|
||||||
|
|
||||||
This implements "Strategie 4: Alerts nach Maß" from analysis_3.md:
|
|
||||||
"Der User kann extrem spezifische Filter speichern:
|
|
||||||
- Informiere mich NUR, wenn eine 4-Letter .com Domain droppt, die kein 'q' oder 'x' enthält.
|
|
||||||
- Informiere mich, wenn eine .ch Domain droppt, die das Wort 'Immo' enthält."
|
|
||||||
|
|
||||||
DATABASE TABLES TO CREATE:
|
|
||||||
1. sniper_alerts - Saved filter configurations
|
|
||||||
2. sniper_alert_matches - Matched auctions for each alert
|
|
||||||
3. sniper_alert_notifications - Sent notifications
|
|
||||||
|
|
||||||
Run migrations: alembic upgrade head
|
|
||||||
"""
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional, List
|
|
||||||
from sqlalchemy import String, DateTime, Float, Integer, Text, ForeignKey, Boolean, JSON
|
|
||||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
|
||||||
|
|
||||||
from app.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
class SniperAlert(Base):
|
|
||||||
"""
|
|
||||||
Saved filter for hyper-personalized auction alerts.
|
|
||||||
|
|
||||||
Users can define very specific criteria and get notified
|
|
||||||
when matching domains appear in auctions.
|
|
||||||
|
|
||||||
Example filters:
|
|
||||||
- "4-letter .com without q or x"
|
|
||||||
- ".ch domains containing 'immo'"
|
|
||||||
- "Auctions under $100 ending in 1 hour"
|
|
||||||
|
|
||||||
From analysis_3.md:
|
|
||||||
"Wenn die SMS/Mail kommt, weiß der User: Das ist relevant."
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "sniper_alerts"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
|
||||||
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), index=True, nullable=False)
|
|
||||||
|
|
||||||
# Alert name
|
|
||||||
name: Mapped[str] = mapped_column(String(100), nullable=False)
|
|
||||||
description: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
|
|
||||||
|
|
||||||
# Filter criteria (stored as JSON for flexibility)
|
|
||||||
# Example: {"tlds": ["com", "io"], "max_length": 4, "exclude_chars": ["q", "x"]}
|
|
||||||
filter_criteria: Mapped[dict] = mapped_column(JSON, nullable=False, default=dict)
|
|
||||||
|
|
||||||
# Individual filter fields (for database queries)
|
|
||||||
tlds: Mapped[Optional[str]] = mapped_column(String(500), nullable=True) # Comma-separated: "com,io,ai"
|
|
||||||
keywords: Mapped[Optional[str]] = mapped_column(String(500), nullable=True) # Must contain
|
|
||||||
exclude_keywords: Mapped[Optional[str]] = mapped_column(String(500), nullable=True) # Must not contain
|
|
||||||
max_length: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
|
|
||||||
min_length: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
|
|
||||||
max_price: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
|
|
||||||
min_price: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
|
|
||||||
max_bids: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) # Low competition
|
|
||||||
ending_within_hours: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) # Urgency
|
|
||||||
platforms: Mapped[Optional[str]] = mapped_column(String(200), nullable=True) # Comma-separated
|
|
||||||
|
|
||||||
# Advanced filters
|
|
||||||
no_numbers: Mapped[bool] = mapped_column(Boolean, default=False)
|
|
||||||
no_hyphens: Mapped[bool] = mapped_column(Boolean, default=False)
|
|
||||||
exclude_chars: Mapped[Optional[str]] = mapped_column(String(50), nullable=True) # "q,x,z"
|
|
||||||
|
|
||||||
# Notification settings
|
|
||||||
notify_email: Mapped[bool] = mapped_column(Boolean, default=True)
|
|
||||||
notify_sms: Mapped[bool] = mapped_column(Boolean, default=False) # Tycoon feature
|
|
||||||
notify_push: Mapped[bool] = mapped_column(Boolean, default=False)
|
|
||||||
|
|
||||||
# Frequency limits
|
|
||||||
max_notifications_per_day: Mapped[int] = mapped_column(Integer, default=10)
|
|
||||||
cooldown_minutes: Mapped[int] = mapped_column(Integer, default=30) # Min time between alerts
|
|
||||||
|
|
||||||
# Status
|
|
||||||
is_active: Mapped[bool] = mapped_column(Boolean, default=True)
|
|
||||||
|
|
||||||
# Stats
|
|
||||||
matches_count: Mapped[int] = mapped_column(Integer, default=0)
|
|
||||||
notifications_sent: Mapped[int] = mapped_column(Integer, default=0)
|
|
||||||
last_matched_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
last_notified_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
|
|
||||||
# Timestamps
|
|
||||||
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
|
||||||
updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
user: Mapped["User"] = relationship("User", back_populates="sniper_alerts")
|
|
||||||
matches: Mapped[List["SniperAlertMatch"]] = relationship(
|
|
||||||
"SniperAlertMatch", back_populates="alert", cascade="all, delete-orphan"
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"<SniperAlert '{self.name}' (user={self.user_id})>"
|
|
||||||
|
|
||||||
def matches_domain(self, domain: str, tld: str, price: float, num_bids: int) -> bool:
|
|
||||||
"""Check if a domain matches this alert's criteria."""
|
|
||||||
name = domain.split('.')[0] if '.' in domain else domain
|
|
||||||
|
|
||||||
# TLD filter
|
|
||||||
if self.tlds:
|
|
||||||
allowed_tlds = [t.strip().lower() for t in self.tlds.split(',')]
|
|
||||||
if tld.lower() not in allowed_tlds:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Length filters
|
|
||||||
if self.max_length and len(name) > self.max_length:
|
|
||||||
return False
|
|
||||||
if self.min_length and len(name) < self.min_length:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Price filters
|
|
||||||
if self.max_price and price > self.max_price:
|
|
||||||
return False
|
|
||||||
if self.min_price and price < self.min_price:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Competition filter
|
|
||||||
if self.max_bids and num_bids > self.max_bids:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Keyword filters
|
|
||||||
if self.keywords:
|
|
||||||
required = [k.strip().lower() for k in self.keywords.split(',')]
|
|
||||||
if not any(kw in name.lower() for kw in required):
|
|
||||||
return False
|
|
||||||
|
|
||||||
if self.exclude_keywords:
|
|
||||||
excluded = [k.strip().lower() for k in self.exclude_keywords.split(',')]
|
|
||||||
if any(kw in name.lower() for kw in excluded):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Character filters
|
|
||||||
if self.no_numbers and any(c.isdigit() for c in name):
|
|
||||||
return False
|
|
||||||
|
|
||||||
if self.no_hyphens and '-' in name:
|
|
||||||
return False
|
|
||||||
|
|
||||||
if self.exclude_chars:
|
|
||||||
excluded_chars = [c.strip().lower() for c in self.exclude_chars.split(',')]
|
|
||||||
if any(c in name.lower() for c in excluded_chars):
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
class SniperAlertMatch(Base):
|
|
||||||
"""
|
|
||||||
Record of a domain that matched a sniper alert.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "sniper_alert_matches"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
|
||||||
alert_id: Mapped[int] = mapped_column(ForeignKey("sniper_alerts.id"), index=True, nullable=False)
|
|
||||||
|
|
||||||
# Matched auction info
|
|
||||||
domain: Mapped[str] = mapped_column(String(255), nullable=False)
|
|
||||||
platform: Mapped[str] = mapped_column(String(50), nullable=False)
|
|
||||||
current_bid: Mapped[float] = mapped_column(Float, nullable=False)
|
|
||||||
end_time: Mapped[datetime] = mapped_column(DateTime, nullable=False)
|
|
||||||
auction_url: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
|
|
||||||
|
|
||||||
# Status
|
|
||||||
notified: Mapped[bool] = mapped_column(Boolean, default=False)
|
|
||||||
clicked: Mapped[bool] = mapped_column(Boolean, default=False)
|
|
||||||
|
|
||||||
# Timestamps
|
|
||||||
matched_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
|
||||||
notified_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
alert: Mapped["SniperAlert"] = relationship("SniperAlert", back_populates="matches")
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"<SniperAlertMatch {self.domain} for alert #{self.alert_id}>"
|
|
||||||
|
|
||||||
@ -12,13 +12,13 @@ class SubscriptionTier(str, Enum):
|
|||||||
"""
|
"""
|
||||||
Subscription tiers for pounce.ch
|
Subscription tiers for pounce.ch
|
||||||
|
|
||||||
Scout (Free): 10 watchlist, 3 portfolio, 1 listing, daily checks
|
Scout (Free): 5 domains, daily checks, email alerts
|
||||||
Trader ($9/mo): 100 watchlist, 50 portfolio, 10 listings, hourly checks
|
Trader (€19/mo): 50 domains, hourly checks, portfolio, valuation
|
||||||
Tycoon ($29/mo): Unlimited, 5-min checks, API, bulk tools, exclusive drops
|
Tycoon (€49/mo): 500+ domains, 10-min checks, API, bulk tools
|
||||||
"""
|
"""
|
||||||
SCOUT = "scout" # Free tier
|
SCOUT = "scout" # Free tier
|
||||||
TRADER = "trader" # $9/month
|
TRADER = "trader" # €19/month
|
||||||
TYCOON = "tycoon" # $29/month
|
TYCOON = "tycoon" # €49/month
|
||||||
|
|
||||||
|
|
||||||
class SubscriptionStatus(str, Enum):
|
class SubscriptionStatus(str, Enum):
|
||||||
@ -31,42 +31,35 @@ class SubscriptionStatus(str, Enum):
|
|||||||
|
|
||||||
|
|
||||||
# Plan configuration - matches frontend pricing page
|
# Plan configuration - matches frontend pricing page
|
||||||
# Updated 2024: Better conversion funnel with taste-before-pay model
|
|
||||||
TIER_CONFIG = {
|
TIER_CONFIG = {
|
||||||
SubscriptionTier.SCOUT: {
|
SubscriptionTier.SCOUT: {
|
||||||
"name": "Scout",
|
"name": "Scout",
|
||||||
"price": 0,
|
"price": 0,
|
||||||
"currency": "USD",
|
"currency": "USD",
|
||||||
"domain_limit": 10, # Watchlist: 10 (was 5)
|
"domain_limit": 5,
|
||||||
"portfolio_limit": 3, # Portfolio: 3 (was 0) - taste the feature
|
"portfolio_limit": 0,
|
||||||
"listing_limit": 1, # Listings: 1 (was 0) - try selling
|
|
||||||
"sniper_limit": 2, # Sniper alerts
|
|
||||||
"check_frequency": "daily",
|
"check_frequency": "daily",
|
||||||
"history_days": 7, # 7 days history (was 0)
|
"history_days": 0,
|
||||||
"features": {
|
"features": {
|
||||||
"email_alerts": True,
|
"email_alerts": True,
|
||||||
"sms_alerts": False,
|
"sms_alerts": False,
|
||||||
"priority_alerts": False,
|
"priority_alerts": False,
|
||||||
"full_whois": False,
|
"full_whois": False,
|
||||||
"expiration_tracking": False,
|
"expiration_tracking": False,
|
||||||
"domain_valuation": True, # Basic score enabled
|
"domain_valuation": False,
|
||||||
"market_insights": False,
|
"market_insights": False,
|
||||||
"api_access": False,
|
"api_access": False,
|
||||||
"webhooks": False,
|
"webhooks": False,
|
||||||
"bulk_tools": False,
|
"bulk_tools": False,
|
||||||
"seo_metrics": False,
|
"seo_metrics": False,
|
||||||
"yield": False,
|
|
||||||
"daily_drop_digest": False,
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
SubscriptionTier.TRADER: {
|
SubscriptionTier.TRADER: {
|
||||||
"name": "Trader",
|
"name": "Trader",
|
||||||
"price": 9,
|
"price": 9,
|
||||||
"currency": "USD",
|
"currency": "USD",
|
||||||
"domain_limit": 100, # Watchlist: 100 (was 50)
|
"domain_limit": 50,
|
||||||
"portfolio_limit": 50, # Portfolio: 50 (was 25)
|
"portfolio_limit": 25,
|
||||||
"listing_limit": 10, # Listings: 10 (was 5)
|
|
||||||
"sniper_limit": 10, # Sniper alerts
|
|
||||||
"check_frequency": "hourly",
|
"check_frequency": "hourly",
|
||||||
"history_days": 90,
|
"history_days": 90,
|
||||||
"features": {
|
"features": {
|
||||||
@ -81,20 +74,15 @@ TIER_CONFIG = {
|
|||||||
"webhooks": False,
|
"webhooks": False,
|
||||||
"bulk_tools": False,
|
"bulk_tools": False,
|
||||||
"seo_metrics": False,
|
"seo_metrics": False,
|
||||||
# Yield (DNS routing + landing) is Tycoon-only. Trader can preview ideas in VISION.
|
|
||||||
"yield": False,
|
|
||||||
"daily_drop_digest": False,
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
SubscriptionTier.TYCOON: {
|
SubscriptionTier.TYCOON: {
|
||||||
"name": "Tycoon",
|
"name": "Tycoon",
|
||||||
"price": 29,
|
"price": 29,
|
||||||
"currency": "USD",
|
"currency": "USD",
|
||||||
"domain_limit": -1, # Unlimited watchlist
|
"domain_limit": 500,
|
||||||
"portfolio_limit": -1, # Unlimited portfolio
|
"portfolio_limit": -1, # Unlimited
|
||||||
"listing_limit": -1, # Unlimited listings
|
"check_frequency": "realtime", # Every 10 minutes
|
||||||
"sniper_limit": 50, # Sniper alerts
|
|
||||||
"check_frequency": "5min", # Every 5 minutes (was 10min)
|
|
||||||
"history_days": -1, # Unlimited
|
"history_days": -1, # Unlimited
|
||||||
"features": {
|
"features": {
|
||||||
"email_alerts": True,
|
"email_alerts": True,
|
||||||
@ -108,8 +96,6 @@ TIER_CONFIG = {
|
|||||||
"webhooks": True,
|
"webhooks": True,
|
||||||
"bulk_tools": True,
|
"bulk_tools": True,
|
||||||
"seo_metrics": True,
|
"seo_metrics": True,
|
||||||
"yield": True,
|
|
||||||
"daily_drop_digest": True, # Tycoon exclusive: Curated top 10 drops daily
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -137,8 +123,6 @@ class Subscription(Base):
|
|||||||
|
|
||||||
# Limits (can be overridden)
|
# Limits (can be overridden)
|
||||||
max_domains: Mapped[int] = mapped_column(Integer, default=5)
|
max_domains: Mapped[int] = mapped_column(Integer, default=5)
|
||||||
# Referral reward bonus (3C.2): additive, computed deterministically from qualified referrals
|
|
||||||
referral_bonus_domains: Mapped[int] = mapped_column(Integer, default=0)
|
|
||||||
check_frequency: Mapped[str] = mapped_column(String(50), default="daily")
|
check_frequency: Mapped[str] = mapped_column(String(50), default="daily")
|
||||||
|
|
||||||
# Stripe integration
|
# Stripe integration
|
||||||
@ -183,9 +167,7 @@ class Subscription(Base):
|
|||||||
@property
|
@property
|
||||||
def domain_limit(self) -> int:
|
def domain_limit(self) -> int:
|
||||||
"""Get maximum allowed domains for this subscription."""
|
"""Get maximum allowed domains for this subscription."""
|
||||||
base = int(self.max_domains or self.config["domain_limit"] or 0)
|
return self.max_domains or self.config["domain_limit"]
|
||||||
bonus = int(self.referral_bonus_domains or 0)
|
|
||||||
return max(0, base + bonus)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def portfolio_limit(self) -> int:
|
def portfolio_limit(self) -> int:
|
||||||
|
|||||||
@ -1,56 +0,0 @@
|
|||||||
"""
|
|
||||||
Telemetry events (4A).
|
|
||||||
|
|
||||||
Store canonical product events for funnel KPIs:
|
|
||||||
- Deal funnel: listing_view → inquiry_created → message_sent → listing_marked_sold
|
|
||||||
- Yield funnel: yield_connected → yield_click → yield_conversion → payout_paid
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from sqlalchemy import Boolean, DateTime, ForeignKey, Index, Integer, String, Text
|
|
||||||
from sqlalchemy.orm import Mapped, mapped_column
|
|
||||||
|
|
||||||
from app.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
class TelemetryEvent(Base):
|
|
||||||
__tablename__ = "telemetry_events"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
|
||||||
|
|
||||||
# Who
|
|
||||||
user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("users.id"), nullable=True, index=True)
|
|
||||||
|
|
||||||
# What
|
|
||||||
event_name: Mapped[str] = mapped_column(String(60), nullable=False, index=True)
|
|
||||||
|
|
||||||
# Entity links (optional)
|
|
||||||
listing_id: Mapped[Optional[int]] = mapped_column(Integer, nullable=True, index=True)
|
|
||||||
inquiry_id: Mapped[Optional[int]] = mapped_column(Integer, nullable=True, index=True)
|
|
||||||
yield_domain_id: Mapped[Optional[int]] = mapped_column(Integer, nullable=True, index=True)
|
|
||||||
click_id: Mapped[Optional[str]] = mapped_column(String(64), nullable=True, index=True)
|
|
||||||
domain: Mapped[Optional[str]] = mapped_column(String(255), nullable=True, index=True)
|
|
||||||
|
|
||||||
# Context
|
|
||||||
source: Mapped[Optional[str]] = mapped_column(String(30), nullable=True) # "public" | "terminal" | "webhook" | "scheduler" | "admin"
|
|
||||||
ip_hash: Mapped[Optional[str]] = mapped_column(String(64), nullable=True)
|
|
||||||
user_agent: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
|
|
||||||
referrer: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
|
|
||||||
metadata_json: Mapped[Optional[str]] = mapped_column(Text, nullable=True) # JSON string
|
|
||||||
|
|
||||||
# Flags
|
|
||||||
is_authenticated: Mapped[Optional[bool]] = mapped_column(Boolean, nullable=True)
|
|
||||||
|
|
||||||
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, index=True)
|
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
Index("ix_telemetry_event_name_created", "event_name", "created_at"),
|
|
||||||
Index("ix_telemetry_user_created", "user_id", "created_at"),
|
|
||||||
Index("ix_telemetry_listing_created", "listing_id", "created_at"),
|
|
||||||
Index("ix_telemetry_yield_created", "yield_domain_id", "created_at"),
|
|
||||||
)
|
|
||||||
|
|
||||||
@ -1,7 +1,7 @@
|
|||||||
"""User model."""
|
"""User model."""
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Optional, List
|
from typing import Optional, List
|
||||||
from sqlalchemy import String, Boolean, DateTime, Integer
|
from sqlalchemy import String, Boolean, DateTime
|
||||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||||
|
|
||||||
from app.database import Base
|
from app.database import Base
|
||||||
@ -40,12 +40,6 @@ class User(Base):
|
|||||||
oauth_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
|
oauth_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
|
||||||
oauth_avatar: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
|
oauth_avatar: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
|
||||||
|
|
||||||
# Yield Referral Tracking (for viral growth)
|
|
||||||
referred_by_user_id: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) # User who referred this user
|
|
||||||
referred_by_domain: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) # Domain that referred
|
|
||||||
referral_code: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) # Original referral code
|
|
||||||
invite_code: Mapped[Optional[str]] = mapped_column(String(32), nullable=True, unique=True, index=True) # user's own code
|
|
||||||
|
|
||||||
# Timestamps
|
# Timestamps
|
||||||
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
||||||
updated_at: Mapped[datetime] = mapped_column(
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
@ -66,21 +60,6 @@ class User(Base):
|
|||||||
price_alerts: Mapped[List["PriceAlert"]] = relationship(
|
price_alerts: Mapped[List["PriceAlert"]] = relationship(
|
||||||
"PriceAlert", cascade="all, delete-orphan", passive_deletes=True
|
"PriceAlert", cascade="all, delete-orphan", passive_deletes=True
|
||||||
)
|
)
|
||||||
# For Sale Marketplace
|
|
||||||
listings: Mapped[List["DomainListing"]] = relationship(
|
|
||||||
"DomainListing", back_populates="user", cascade="all, delete-orphan"
|
|
||||||
)
|
|
||||||
# Sniper Alerts
|
|
||||||
sniper_alerts: Mapped[List["SniperAlert"]] = relationship(
|
|
||||||
"SniperAlert", back_populates="user", cascade="all, delete-orphan"
|
|
||||||
)
|
|
||||||
# Yield Domains
|
|
||||||
yield_domains: Mapped[List["YieldDomain"]] = relationship(
|
|
||||||
"YieldDomain", back_populates="user", cascade="all, delete-orphan"
|
|
||||||
)
|
|
||||||
yield_payouts: Mapped[List["YieldPayout"]] = relationship(
|
|
||||||
"YieldPayout", back_populates="user", cascade="all, delete-orphan"
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return f"<User {self.email}>"
|
return f"<User {self.email}>"
|
||||||
|
|||||||
@ -1,257 +0,0 @@
|
|||||||
"""
|
|
||||||
Yield Domain models for Intent Routing feature.
|
|
||||||
|
|
||||||
Domains activated for yield generate passive income by routing
|
|
||||||
visitor intent to affiliate partners.
|
|
||||||
"""
|
|
||||||
from datetime import datetime
|
|
||||||
from decimal import Decimal
|
|
||||||
from typing import Optional
|
|
||||||
from sqlalchemy import String, DateTime, Float, Integer, Text, ForeignKey, Boolean, Numeric, Index
|
|
||||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
|
||||||
|
|
||||||
from app.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
class AffiliatePartner(Base):
|
|
||||||
"""
|
|
||||||
Affiliate network/partner configuration.
|
|
||||||
|
|
||||||
Partners are matched to domains based on detected intent category.
|
|
||||||
"""
|
|
||||||
__tablename__ = "affiliate_partners"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
|
||||||
|
|
||||||
# Identity
|
|
||||||
name: Mapped[str] = mapped_column(String(100), nullable=False) # "Comparis Dental"
|
|
||||||
slug: Mapped[str] = mapped_column(String(50), unique=True, nullable=False) # "comparis_dental"
|
|
||||||
network: Mapped[str] = mapped_column(String(50), nullable=False) # "awin", "partnerstack", "direct"
|
|
||||||
|
|
||||||
# Matching criteria (JSON arrays stored as comma-separated for simplicity)
|
|
||||||
intent_categories: Mapped[str] = mapped_column(Text, nullable=False) # "medical_dental,medical_general"
|
|
||||||
geo_countries: Mapped[str] = mapped_column(String(200), default="CH,DE,AT") # ISO codes
|
|
||||||
|
|
||||||
# Payout configuration
|
|
||||||
payout_type: Mapped[str] = mapped_column(String(20), default="cpl") # "cpc", "cpl", "cps"
|
|
||||||
payout_amount: Mapped[Decimal] = mapped_column(Numeric(10, 2), default=0)
|
|
||||||
payout_currency: Mapped[str] = mapped_column(String(3), default="CHF")
|
|
||||||
|
|
||||||
# Integration
|
|
||||||
tracking_url_template: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
|
||||||
api_endpoint: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
|
|
||||||
# Note: API keys should be stored encrypted or in env vars, not here
|
|
||||||
|
|
||||||
# Display
|
|
||||||
logo_url: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
|
|
||||||
description: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
|
||||||
|
|
||||||
# Status
|
|
||||||
is_active: Mapped[bool] = mapped_column(Boolean, default=True)
|
|
||||||
priority: Mapped[int] = mapped_column(Integer, default=0) # Higher = preferred
|
|
||||||
|
|
||||||
# Timestamps
|
|
||||||
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
|
||||||
updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
yield_domains: Mapped[list["YieldDomain"]] = relationship("YieldDomain", back_populates="partner")
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"<AffiliatePartner {self.slug}>"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def intent_list(self) -> list[str]:
|
|
||||||
"""Parse intent categories as list."""
|
|
||||||
return [c.strip() for c in self.intent_categories.split(",") if c.strip()]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def country_list(self) -> list[str]:
|
|
||||||
"""Parse geo countries as list."""
|
|
||||||
return [c.strip() for c in self.geo_countries.split(",") if c.strip()]
|
|
||||||
|
|
||||||
|
|
||||||
class YieldDomain(Base):
|
|
||||||
"""
|
|
||||||
Domain activated for yield/intent routing.
|
|
||||||
|
|
||||||
When a user activates a domain for yield:
|
|
||||||
1. They point DNS to our nameservers
|
|
||||||
2. We detect the intent (e.g., "zahnarzt.ch" → medical/dental)
|
|
||||||
3. We route traffic to affiliate partners
|
|
||||||
4. User earns commission split
|
|
||||||
"""
|
|
||||||
__tablename__ = "yield_domains"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
|
||||||
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), index=True, nullable=False)
|
|
||||||
|
|
||||||
# Domain info
|
|
||||||
domain: Mapped[str] = mapped_column(String(255), unique=True, nullable=False, index=True)
|
|
||||||
|
|
||||||
# Intent detection
|
|
||||||
detected_intent: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) # "medical_dental"
|
|
||||||
intent_confidence: Mapped[float] = mapped_column(Float, default=0.0) # 0.0 - 1.0
|
|
||||||
intent_keywords: Mapped[Optional[str]] = mapped_column(Text, nullable=True) # JSON: ["zahnarzt", "zuerich"]
|
|
||||||
|
|
||||||
# Routing
|
|
||||||
partner_id: Mapped[Optional[int]] = mapped_column(ForeignKey("affiliate_partners.id"), nullable=True)
|
|
||||||
active_route: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) # Partner slug
|
|
||||||
landing_page_url: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
|
|
||||||
|
|
||||||
# LLM-generated landing page config (used by routing when direct=false)
|
|
||||||
landing_config_json: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
|
||||||
landing_template: Mapped[Optional[str]] = mapped_column(String(50), nullable=True)
|
|
||||||
landing_headline: Mapped[Optional[str]] = mapped_column(String(300), nullable=True)
|
|
||||||
landing_intro: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
|
||||||
landing_cta_label: Mapped[Optional[str]] = mapped_column(String(120), nullable=True)
|
|
||||||
landing_model: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
|
|
||||||
landing_generated_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
|
|
||||||
# Status
|
|
||||||
status: Mapped[str] = mapped_column(String(30), default="pending", index=True)
|
|
||||||
# pending, verifying, active, paused, inactive, error
|
|
||||||
|
|
||||||
dns_verified: Mapped[bool] = mapped_column(Boolean, default=False)
|
|
||||||
dns_verified_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
# "Connect" timestamp for Yield (nameserver/CNAME verified)
|
|
||||||
connected_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
activated_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
paused_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
|
|
||||||
# Revenue tracking (aggregates, updated periodically)
|
|
||||||
total_clicks: Mapped[int] = mapped_column(Integer, default=0)
|
|
||||||
total_conversions: Mapped[int] = mapped_column(Integer, default=0)
|
|
||||||
total_revenue: Mapped[Decimal] = mapped_column(Numeric(12, 2), default=0)
|
|
||||||
currency: Mapped[str] = mapped_column(String(3), default="CHF")
|
|
||||||
|
|
||||||
# Last activity
|
|
||||||
last_click_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
last_conversion_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
|
|
||||||
# Timestamps
|
|
||||||
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
|
||||||
updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
user: Mapped["User"] = relationship("User", back_populates="yield_domains")
|
|
||||||
partner: Mapped[Optional["AffiliatePartner"]] = relationship("AffiliatePartner", back_populates="yield_domains")
|
|
||||||
transactions: Mapped[list["YieldTransaction"]] = relationship(
|
|
||||||
"YieldTransaction", back_populates="yield_domain", cascade="all, delete-orphan"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Indexes
|
|
||||||
__table_args__ = (
|
|
||||||
Index("ix_yield_domains_user_status", "user_id", "status"),
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"<YieldDomain {self.domain} ({self.status})>"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_earning(self) -> bool:
|
|
||||||
"""Check if domain is actively earning."""
|
|
||||||
return self.status == "active" and self.dns_verified
|
|
||||||
|
|
||||||
class YieldTransaction(Base):
|
|
||||||
"""
|
|
||||||
Revenue events from affiliate partners.
|
|
||||||
|
|
||||||
Tracks clicks, leads, and sales for each yield domain.
|
|
||||||
"""
|
|
||||||
__tablename__ = "yield_transactions"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
|
||||||
yield_domain_id: Mapped[int] = mapped_column(
|
|
||||||
ForeignKey("yield_domains.id", ondelete="CASCADE"),
|
|
||||||
index=True,
|
|
||||||
nullable=False
|
|
||||||
)
|
|
||||||
|
|
||||||
# Event type
|
|
||||||
event_type: Mapped[str] = mapped_column(String(20), nullable=False) # "click", "lead", "sale"
|
|
||||||
|
|
||||||
# Partner info
|
|
||||||
partner_slug: Mapped[str] = mapped_column(String(50), nullable=False)
|
|
||||||
partner_transaction_id: Mapped[Optional[str]] = mapped_column(String(200), nullable=True)
|
|
||||||
# Our click id for attribution across systems (UUID string)
|
|
||||||
click_id: Mapped[Optional[str]] = mapped_column(String(64), nullable=True, index=True)
|
|
||||||
destination_url: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
|
||||||
|
|
||||||
# Amount
|
|
||||||
gross_amount: Mapped[Decimal] = mapped_column(Numeric(10, 2), default=0) # Full commission
|
|
||||||
net_amount: Mapped[Decimal] = mapped_column(Numeric(10, 2), default=0) # After Pounce cut (70%)
|
|
||||||
currency: Mapped[str] = mapped_column(String(3), default="CHF")
|
|
||||||
|
|
||||||
# Attribution
|
|
||||||
referrer: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
|
|
||||||
user_agent: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
|
|
||||||
geo_country: Mapped[Optional[str]] = mapped_column(String(2), nullable=True)
|
|
||||||
ip_hash: Mapped[Optional[str]] = mapped_column(String(64), nullable=True) # Hashed for privacy
|
|
||||||
|
|
||||||
# Status
|
|
||||||
status: Mapped[str] = mapped_column(String(20), default="pending", index=True)
|
|
||||||
# pending, confirmed, paid, rejected
|
|
||||||
|
|
||||||
confirmed_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
paid_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
payout_id: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) # FK to future payouts table
|
|
||||||
|
|
||||||
# Timestamps
|
|
||||||
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, index=True)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
yield_domain: Mapped["YieldDomain"] = relationship("YieldDomain", back_populates="transactions")
|
|
||||||
|
|
||||||
# Indexes
|
|
||||||
__table_args__ = (
|
|
||||||
Index("ix_yield_tx_domain_created", "yield_domain_id", "created_at"),
|
|
||||||
Index("ix_yield_tx_status_created", "status", "created_at"),
|
|
||||||
Index("ix_yield_tx_click_id", "click_id"),
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"<YieldTransaction {self.event_type} {self.net_amount} {self.currency}>"
|
|
||||||
|
|
||||||
|
|
||||||
class YieldPayout(Base):
|
|
||||||
"""
|
|
||||||
Payout records for user earnings.
|
|
||||||
|
|
||||||
Aggregates confirmed transactions into periodic payouts.
|
|
||||||
"""
|
|
||||||
__tablename__ = "yield_payouts"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
|
||||||
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), index=True, nullable=False)
|
|
||||||
|
|
||||||
# Amount
|
|
||||||
amount: Mapped[Decimal] = mapped_column(Numeric(12, 2), nullable=False)
|
|
||||||
currency: Mapped[str] = mapped_column(String(3), default="CHF")
|
|
||||||
|
|
||||||
# Period
|
|
||||||
period_start: Mapped[datetime] = mapped_column(DateTime, nullable=False)
|
|
||||||
period_end: Mapped[datetime] = mapped_column(DateTime, nullable=False)
|
|
||||||
|
|
||||||
# Transaction count
|
|
||||||
transaction_count: Mapped[int] = mapped_column(Integer, default=0)
|
|
||||||
|
|
||||||
# Status
|
|
||||||
status: Mapped[str] = mapped_column(String(20), default="pending", index=True)
|
|
||||||
# pending, processing, completed, failed
|
|
||||||
|
|
||||||
# Payment details
|
|
||||||
payment_method: Mapped[Optional[str]] = mapped_column(String(50), nullable=True) # "stripe", "bank"
|
|
||||||
payment_reference: Mapped[Optional[str]] = mapped_column(String(200), nullable=True)
|
|
||||||
|
|
||||||
# Timestamps
|
|
||||||
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
|
||||||
processed_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
completed_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
|
||||||
|
|
||||||
# Relationship
|
|
||||||
user: Mapped["User"] = relationship("User", back_populates="yield_payouts")
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"<YieldPayout {self.amount} {self.currency} ({self.status})>"
|
|
||||||
|
|
||||||
@ -1,43 +0,0 @@
|
|||||||
"""
|
|
||||||
Zone File Models for .ch and .li domain drops
|
|
||||||
"""
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from sqlalchemy import Column, Integer, String, DateTime, Boolean, Index
|
|
||||||
|
|
||||||
from app.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
class ZoneSnapshot(Base):
|
|
||||||
"""Stores metadata about zone file snapshots (not the full data)"""
|
|
||||||
__tablename__ = "zone_snapshots"
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
tld = Column(String(10), nullable=False, index=True) # 'ch' or 'li'
|
|
||||||
snapshot_date = Column(DateTime, nullable=False, index=True)
|
|
||||||
domain_count = Column(Integer, nullable=False)
|
|
||||||
checksum = Column(String(64), nullable=False) # SHA256 of sorted domain list
|
|
||||||
created_at = Column(DateTime, default=datetime.utcnow)
|
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
Index('ix_zone_snapshots_tld_date', 'tld', 'snapshot_date'),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class DroppedDomain(Base):
|
|
||||||
"""Stores domains that were dropped (found in previous snapshot but not current)"""
|
|
||||||
__tablename__ = "dropped_domains"
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
domain = Column(String(255), nullable=False, index=True)
|
|
||||||
tld = Column(String(10), nullable=False, index=True)
|
|
||||||
dropped_date = Column(DateTime, nullable=False, index=True)
|
|
||||||
length = Column(Integer, nullable=False)
|
|
||||||
is_numeric = Column(Boolean, default=False)
|
|
||||||
has_hyphen = Column(Boolean, default=False)
|
|
||||||
created_at = Column(DateTime, default=datetime.utcnow)
|
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
Index('ix_dropped_domains_tld_date', 'tld', 'dropped_date'),
|
|
||||||
Index('ix_dropped_domains_length', 'length'),
|
|
||||||
)
|
|
||||||
@ -1,3 +0,0 @@
|
|||||||
"""Observability helpers (metrics, tracing)."""
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,304 +0,0 @@
|
|||||||
"""
|
|
||||||
Business KPIs exported as Prometheus metrics (4B Ops).
|
|
||||||
|
|
||||||
These KPIs are derived from real telemetry events in the database.
|
|
||||||
We cache computations to avoid putting load on the DB on every scrape.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import json
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from typing import Any, Optional
|
|
||||||
|
|
||||||
from sqlalchemy import and_, func, select
|
|
||||||
|
|
||||||
from app.config import get_settings
|
|
||||||
from app.database import AsyncSessionLocal
|
|
||||||
from app.models.telemetry import TelemetryEvent
|
|
||||||
|
|
||||||
|
|
||||||
settings = get_settings()
|
|
||||||
|
|
||||||
try:
|
|
||||||
from prometheus_client import Gauge
|
|
||||||
except Exception: # pragma: no cover
|
|
||||||
Gauge = None # type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
|
||||||
class TelemetryWindowKpis:
|
|
||||||
window_days: int
|
|
||||||
start: datetime
|
|
||||||
end: datetime
|
|
||||||
|
|
||||||
# Deal
|
|
||||||
listing_views: int
|
|
||||||
inquiries_created: int
|
|
||||||
seller_replied_inquiries: int
|
|
||||||
inquiry_reply_rate: float
|
|
||||||
listings_with_inquiries: int
|
|
||||||
listings_sold: int
|
|
||||||
inquiry_to_sold_listing_rate: float
|
|
||||||
|
|
||||||
# Yield
|
|
||||||
connected_domains: int
|
|
||||||
clicks: int
|
|
||||||
conversions: int
|
|
||||||
conversion_rate: float
|
|
||||||
payouts_paid: int
|
|
||||||
payouts_paid_amount_total: float
|
|
||||||
|
|
||||||
|
|
||||||
_cache_until_by_days: dict[int, datetime] = {}
|
|
||||||
_cache_value_by_days: dict[int, TelemetryWindowKpis] = {}
|
|
||||||
|
|
||||||
|
|
||||||
def _safe_json(metadata_json: Optional[str]) -> dict[str, Any]:
|
|
||||||
if not metadata_json:
|
|
||||||
return {}
|
|
||||||
try:
|
|
||||||
value = json.loads(metadata_json)
|
|
||||||
return value if isinstance(value, dict) else {}
|
|
||||||
except Exception:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
async def _compute_window_kpis(days: int) -> TelemetryWindowKpis:
|
|
||||||
end = datetime.utcnow()
|
|
||||||
start = end - timedelta(days=days)
|
|
||||||
|
|
||||||
async with AsyncSessionLocal() as db:
|
|
||||||
# Fast path: grouped counts for pure counter events
|
|
||||||
count_events = [
|
|
||||||
"listing_view",
|
|
||||||
"inquiry_created",
|
|
||||||
"yield_connected",
|
|
||||||
"yield_click",
|
|
||||||
"yield_conversion",
|
|
||||||
"payout_paid",
|
|
||||||
]
|
|
||||||
grouped = (
|
|
||||||
await db.execute(
|
|
||||||
select(TelemetryEvent.event_name, func.count(TelemetryEvent.id))
|
|
||||||
.where(
|
|
||||||
and_(
|
|
||||||
TelemetryEvent.created_at >= start,
|
|
||||||
TelemetryEvent.created_at <= end,
|
|
||||||
TelemetryEvent.event_name.in_(count_events),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.group_by(TelemetryEvent.event_name)
|
|
||||||
)
|
|
||||||
).all()
|
|
||||||
counts = {name: int(cnt) for name, cnt in grouped}
|
|
||||||
|
|
||||||
listing_views = counts.get("listing_view", 0)
|
|
||||||
inquiries_created = counts.get("inquiry_created", 0)
|
|
||||||
connected_domains = counts.get("yield_connected", 0)
|
|
||||||
clicks = counts.get("yield_click", 0)
|
|
||||||
conversions = counts.get("yield_conversion", 0)
|
|
||||||
payouts_paid = counts.get("payout_paid", 0)
|
|
||||||
|
|
||||||
# Distinct listing counts (deal)
|
|
||||||
listings_with_inquiries = (
|
|
||||||
await db.execute(
|
|
||||||
select(func.count(func.distinct(TelemetryEvent.listing_id))).where(
|
|
||||||
and_(
|
|
||||||
TelemetryEvent.created_at >= start,
|
|
||||||
TelemetryEvent.created_at <= end,
|
|
||||||
TelemetryEvent.event_name == "inquiry_created",
|
|
||||||
TelemetryEvent.listing_id.isnot(None),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).scalar() or 0
|
|
||||||
|
|
||||||
listings_sold = (
|
|
||||||
await db.execute(
|
|
||||||
select(func.count(func.distinct(TelemetryEvent.listing_id))).where(
|
|
||||||
and_(
|
|
||||||
TelemetryEvent.created_at >= start,
|
|
||||||
TelemetryEvent.created_at <= end,
|
|
||||||
TelemetryEvent.event_name == "listing_marked_sold",
|
|
||||||
TelemetryEvent.listing_id.isnot(None),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).scalar() or 0
|
|
||||||
|
|
||||||
# For rates we need intersections/uniques; keep it exact via minimal event fetch
|
|
||||||
inquiry_listing_ids = (
|
|
||||||
await db.execute(
|
|
||||||
select(func.distinct(TelemetryEvent.listing_id)).where(
|
|
||||||
and_(
|
|
||||||
TelemetryEvent.created_at >= start,
|
|
||||||
TelemetryEvent.created_at <= end,
|
|
||||||
TelemetryEvent.event_name == "inquiry_created",
|
|
||||||
TelemetryEvent.listing_id.isnot(None),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).scalars().all()
|
|
||||||
sold_listing_ids = (
|
|
||||||
await db.execute(
|
|
||||||
select(func.distinct(TelemetryEvent.listing_id)).where(
|
|
||||||
and_(
|
|
||||||
TelemetryEvent.created_at >= start,
|
|
||||||
TelemetryEvent.created_at <= end,
|
|
||||||
TelemetryEvent.event_name == "listing_marked_sold",
|
|
||||||
TelemetryEvent.listing_id.isnot(None),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).scalars().all()
|
|
||||||
|
|
||||||
inquiry_set = {int(x) for x in inquiry_listing_ids if x is not None}
|
|
||||||
sold_set = {int(x) for x in sold_listing_ids if x is not None}
|
|
||||||
sold_from_inquiry = inquiry_set.intersection(sold_set)
|
|
||||||
inquiry_to_sold_listing_rate = (len(sold_from_inquiry) / len(inquiry_set)) if inquiry_set else 0.0
|
|
||||||
|
|
||||||
# Seller reply rate: unique inquiries with at least one seller message
|
|
||||||
msg_rows = (
|
|
||||||
await db.execute(
|
|
||||||
select(TelemetryEvent.inquiry_id, TelemetryEvent.metadata_json).where(
|
|
||||||
and_(
|
|
||||||
TelemetryEvent.created_at >= start,
|
|
||||||
TelemetryEvent.created_at <= end,
|
|
||||||
TelemetryEvent.event_name == "message_sent",
|
|
||||||
TelemetryEvent.inquiry_id.isnot(None),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).all()
|
|
||||||
seller_replied_inquiries_set: set[int] = set()
|
|
||||||
for inquiry_id, metadata_json in msg_rows:
|
|
||||||
if inquiry_id is None:
|
|
||||||
continue
|
|
||||||
meta = _safe_json(metadata_json)
|
|
||||||
if meta.get("role") == "seller":
|
|
||||||
seller_replied_inquiries_set.add(int(inquiry_id))
|
|
||||||
|
|
||||||
seller_replied_inquiries = len(seller_replied_inquiries_set)
|
|
||||||
inquiry_reply_rate = (seller_replied_inquiries / inquiries_created) if inquiries_created else 0.0
|
|
||||||
|
|
||||||
# Payout amounts (sum of metadata amounts)
|
|
||||||
payout_rows = (
|
|
||||||
await db.execute(
|
|
||||||
select(TelemetryEvent.metadata_json).where(
|
|
||||||
and_(
|
|
||||||
TelemetryEvent.created_at >= start,
|
|
||||||
TelemetryEvent.created_at <= end,
|
|
||||||
TelemetryEvent.event_name == "payout_paid",
|
|
||||||
TelemetryEvent.metadata_json.isnot(None),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).scalars().all()
|
|
||||||
payouts_paid_amount_total = 0.0
|
|
||||||
for metadata_json in payout_rows:
|
|
||||||
meta = _safe_json(metadata_json)
|
|
||||||
amount = meta.get("amount")
|
|
||||||
if isinstance(amount, (int, float)):
|
|
||||||
payouts_paid_amount_total += float(amount)
|
|
||||||
|
|
||||||
conversion_rate = (conversions / clicks) if clicks else 0.0
|
|
||||||
|
|
||||||
return TelemetryWindowKpis(
|
|
||||||
window_days=days,
|
|
||||||
start=start,
|
|
||||||
end=end,
|
|
||||||
listing_views=int(listing_views),
|
|
||||||
inquiries_created=int(inquiries_created),
|
|
||||||
seller_replied_inquiries=int(seller_replied_inquiries),
|
|
||||||
inquiry_reply_rate=float(inquiry_reply_rate),
|
|
||||||
listings_with_inquiries=int(listings_with_inquiries),
|
|
||||||
listings_sold=int(listings_sold),
|
|
||||||
inquiry_to_sold_listing_rate=float(inquiry_to_sold_listing_rate),
|
|
||||||
connected_domains=int(connected_domains),
|
|
||||||
clicks=int(clicks),
|
|
||||||
conversions=int(conversions),
|
|
||||||
conversion_rate=float(conversion_rate),
|
|
||||||
payouts_paid=int(payouts_paid),
|
|
||||||
payouts_paid_amount_total=float(payouts_paid_amount_total),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def get_cached_window_kpis(days: int) -> Optional[TelemetryWindowKpis]:
|
|
||||||
"""Return cached KPIs for a window (recompute if TTL expired)."""
|
|
||||||
if not settings.enable_business_metrics:
|
|
||||||
return None
|
|
||||||
|
|
||||||
now = datetime.utcnow()
|
|
||||||
until = _cache_until_by_days.get(days)
|
|
||||||
cached = _cache_value_by_days.get(days)
|
|
||||||
if until is not None and cached is not None and now < until:
|
|
||||||
return cached
|
|
||||||
|
|
||||||
value = await _compute_window_kpis(int(days))
|
|
||||||
ttl_seconds = max(5, int(settings.business_metrics_cache_seconds))
|
|
||||||
_cache_until_by_days[int(days)] = now + timedelta(seconds=ttl_seconds)
|
|
||||||
_cache_value_by_days[int(days)] = value
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------
|
|
||||||
# Prometheus Gauges
|
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
if Gauge is not None:
|
|
||||||
_g = {
|
|
||||||
"deal_listing_views": Gauge("pounce_deal_listing_views", "Deal: listing views in window", ["window_days"]),
|
|
||||||
"deal_inquiries_created": Gauge("pounce_deal_inquiries_created", "Deal: inquiries created in window", ["window_days"]),
|
|
||||||
"deal_seller_replied_inquiries": Gauge(
|
|
||||||
"pounce_deal_seller_replied_inquiries", "Deal: inquiries with seller reply in window", ["window_days"]
|
|
||||||
),
|
|
||||||
"deal_inquiry_reply_rate": Gauge("pounce_deal_inquiry_reply_rate", "Deal: inquiry reply rate in window", ["window_days"]),
|
|
||||||
"deal_listings_with_inquiries": Gauge(
|
|
||||||
"pounce_deal_listings_with_inquiries", "Deal: distinct listings with inquiries in window", ["window_days"]
|
|
||||||
),
|
|
||||||
"deal_listings_sold": Gauge("pounce_deal_listings_sold", "Deal: distinct listings marked sold in window", ["window_days"]),
|
|
||||||
"deal_inquiry_to_sold_listing_rate": Gauge(
|
|
||||||
"pounce_deal_inquiry_to_sold_listing_rate", "Deal: (listings with inquiry) -> sold rate in window", ["window_days"]
|
|
||||||
),
|
|
||||||
"yield_connected_domains": Gauge("pounce_yield_connected_domains", "Yield: connected domains in window", ["window_days"]),
|
|
||||||
"yield_clicks": Gauge("pounce_yield_clicks", "Yield: clicks in window", ["window_days"]),
|
|
||||||
"yield_conversions": Gauge("pounce_yield_conversions", "Yield: conversions in window", ["window_days"]),
|
|
||||||
"yield_conversion_rate": Gauge("pounce_yield_conversion_rate", "Yield: conversion rate in window", ["window_days"]),
|
|
||||||
"yield_payouts_paid": Gauge("pounce_yield_payouts_paid", "Yield: payouts paid in window", ["window_days"]),
|
|
||||||
"yield_payouts_paid_amount_total": Gauge(
|
|
||||||
"pounce_yield_payouts_paid_amount_total", "Yield: total amount paid out in window", ["window_days"]
|
|
||||||
),
|
|
||||||
}
|
|
||||||
else: # pragma: no cover
|
|
||||||
_g = {}
|
|
||||||
|
|
||||||
|
|
||||||
async def update_prometheus_business_metrics() -> None:
|
|
||||||
"""Compute KPIs and set Prometheus gauges (no-op when disabled)."""
|
|
||||||
if Gauge is None or not _g:
|
|
||||||
return
|
|
||||||
if not settings.enable_business_metrics:
|
|
||||||
return
|
|
||||||
|
|
||||||
windows = {1, int(settings.business_metrics_days)}
|
|
||||||
for days in sorted(windows):
|
|
||||||
kpis = await get_cached_window_kpis(days)
|
|
||||||
if kpis is None:
|
|
||||||
continue
|
|
||||||
w = str(int(kpis.window_days))
|
|
||||||
_g["deal_listing_views"].labels(window_days=w).set(kpis.listing_views)
|
|
||||||
_g["deal_inquiries_created"].labels(window_days=w).set(kpis.inquiries_created)
|
|
||||||
_g["deal_seller_replied_inquiries"].labels(window_days=w).set(kpis.seller_replied_inquiries)
|
|
||||||
_g["deal_inquiry_reply_rate"].labels(window_days=w).set(kpis.inquiry_reply_rate)
|
|
||||||
_g["deal_listings_with_inquiries"].labels(window_days=w).set(kpis.listings_with_inquiries)
|
|
||||||
_g["deal_listings_sold"].labels(window_days=w).set(kpis.listings_sold)
|
|
||||||
_g["deal_inquiry_to_sold_listing_rate"].labels(window_days=w).set(kpis.inquiry_to_sold_listing_rate)
|
|
||||||
_g["yield_connected_domains"].labels(window_days=w).set(kpis.connected_domains)
|
|
||||||
_g["yield_clicks"].labels(window_days=w).set(kpis.clicks)
|
|
||||||
_g["yield_conversions"].labels(window_days=w).set(kpis.conversions)
|
|
||||||
_g["yield_conversion_rate"].labels(window_days=w).set(kpis.conversion_rate)
|
|
||||||
_g["yield_payouts_paid"].labels(window_days=w).set(kpis.payouts_paid)
|
|
||||||
_g["yield_payouts_paid_amount_total"].labels(window_days=w).set(kpis.payouts_paid_amount_total)
|
|
||||||
|
|
||||||
@ -1,137 +0,0 @@
|
|||||||
"""Prometheus metrics for FastAPI + optional DB query metrics."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import time
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from fastapi import FastAPI, Request, Response
|
|
||||||
|
|
||||||
try:
|
|
||||||
from prometheus_client import Counter, Histogram, generate_latest, CONTENT_TYPE_LATEST
|
|
||||||
except Exception: # pragma: no cover
|
|
||||||
Counter = None # type: ignore
|
|
||||||
Histogram = None # type: ignore
|
|
||||||
generate_latest = None # type: ignore
|
|
||||||
CONTENT_TYPE_LATEST = "text/plain; version=0.0.4" # type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
_instrumented = False
|
|
||||||
_db_instrumented = False
|
|
||||||
|
|
||||||
|
|
||||||
def _get_route_template(request: Request) -> str:
|
|
||||||
route = request.scope.get("route")
|
|
||||||
if route is not None and hasattr(route, "path"):
|
|
||||||
return str(route.path)
|
|
||||||
return request.url.path
|
|
||||||
|
|
||||||
|
|
||||||
def instrument_app(app: FastAPI, *, metrics_path: str = "/metrics", enable_db_metrics: bool = False) -> None:
|
|
||||||
"""
|
|
||||||
Add Prometheus request metrics and a `/metrics` endpoint.
|
|
||||||
|
|
||||||
- Low-cardinality path labels by using FastAPI route templates.
|
|
||||||
- Optional SQLAlchemy query timing metrics (off by default).
|
|
||||||
"""
|
|
||||||
global _instrumented
|
|
||||||
if _instrumented:
|
|
||||||
return
|
|
||||||
_instrumented = True
|
|
||||||
|
|
||||||
if Counter is None or Histogram is None:
|
|
||||||
# Dependency not installed; keep app working without metrics.
|
|
||||||
return
|
|
||||||
|
|
||||||
http_requests_total = Counter(
|
|
||||||
"http_requests_total",
|
|
||||||
"Total HTTP requests",
|
|
||||||
["method", "path", "status"],
|
|
||||||
)
|
|
||||||
http_request_duration_seconds = Histogram(
|
|
||||||
"http_request_duration_seconds",
|
|
||||||
"HTTP request duration (seconds)",
|
|
||||||
["method", "path"],
|
|
||||||
buckets=(0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1, 2.5, 5, 10),
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.middleware("http")
|
|
||||||
async def _metrics_middleware(request: Request, call_next):
|
|
||||||
start = time.perf_counter()
|
|
||||||
response: Optional[Response] = None
|
|
||||||
try:
|
|
||||||
response = await call_next(request)
|
|
||||||
return response
|
|
||||||
finally:
|
|
||||||
duration = time.perf_counter() - start
|
|
||||||
path = _get_route_template(request)
|
|
||||||
method = request.method
|
|
||||||
status = str(getattr(response, "status_code", 500))
|
|
||||||
http_requests_total.labels(method=method, path=path, status=status).inc()
|
|
||||||
http_request_duration_seconds.labels(method=method, path=path).observe(duration)
|
|
||||||
|
|
||||||
@app.get(metrics_path, include_in_schema=False)
|
|
||||||
async def _metrics_endpoint():
|
|
||||||
# Optional: export business KPIs derived from telemetry (cached).
|
|
||||||
try:
|
|
||||||
from app.observability.business_metrics import update_prometheus_business_metrics
|
|
||||||
|
|
||||||
await update_prometheus_business_metrics()
|
|
||||||
except Exception:
|
|
||||||
# Never break metrics scrape due to KPI computation issues.
|
|
||||||
pass
|
|
||||||
# Optional: export ops metrics (e.g. backup age).
|
|
||||||
try:
|
|
||||||
from app.observability.ops_metrics import update_prometheus_ops_metrics
|
|
||||||
|
|
||||||
await update_prometheus_ops_metrics()
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
return Response(generate_latest(), media_type=CONTENT_TYPE_LATEST)
|
|
||||||
|
|
||||||
if enable_db_metrics:
|
|
||||||
_instrument_db_metrics()
|
|
||||||
|
|
||||||
|
|
||||||
def _instrument_db_metrics() -> None:
|
|
||||||
"""Attach SQLAlchemy event listeners to track query latencies."""
|
|
||||||
global _db_instrumented
|
|
||||||
if _db_instrumented:
|
|
||||||
return
|
|
||||||
_db_instrumented = True
|
|
||||||
|
|
||||||
if Counter is None or Histogram is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
from sqlalchemy import event
|
|
||||||
from app.database import engine
|
|
||||||
|
|
||||||
db_queries_total = Counter(
|
|
||||||
"db_queries_total",
|
|
||||||
"Total DB queries executed",
|
|
||||||
["dialect"],
|
|
||||||
)
|
|
||||||
db_query_duration_seconds = Histogram(
|
|
||||||
"db_query_duration_seconds",
|
|
||||||
"DB query duration (seconds)",
|
|
||||||
["dialect"],
|
|
||||||
buckets=(0.001, 0.0025, 0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1, 2.5),
|
|
||||||
)
|
|
||||||
|
|
||||||
dialect = engine.sync_engine.dialect.name
|
|
||||||
|
|
||||||
@event.listens_for(engine.sync_engine, "before_cursor_execute")
|
|
||||||
def _before_cursor_execute(conn, cursor, statement, parameters, context, executemany): # type: ignore[no-untyped-def]
|
|
||||||
conn.info.setdefault("_query_start_time", []).append(time.perf_counter())
|
|
||||||
|
|
||||||
@event.listens_for(engine.sync_engine, "after_cursor_execute")
|
|
||||||
def _after_cursor_execute(conn, cursor, statement, parameters, context, executemany): # type: ignore[no-untyped-def]
|
|
||||||
start_list = conn.info.get("_query_start_time") or []
|
|
||||||
if not start_list:
|
|
||||||
return
|
|
||||||
start = start_list.pop()
|
|
||||||
duration = time.perf_counter() - start
|
|
||||||
db_queries_total.labels(dialect=dialect).inc()
|
|
||||||
db_query_duration_seconds.labels(dialect=dialect).observe(duration)
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,65 +0,0 @@
|
|||||||
"""
|
|
||||||
Ops/health metrics exported as Prometheus metrics (4B Ops).
|
|
||||||
|
|
||||||
These are low-frequency filesystem-based metrics (safe on scrape).
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from app.config import get_settings
|
|
||||||
|
|
||||||
|
|
||||||
settings = get_settings()
|
|
||||||
|
|
||||||
try:
|
|
||||||
from prometheus_client import Gauge
|
|
||||||
except Exception: # pragma: no cover
|
|
||||||
Gauge = None # type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
if Gauge is not None:
|
|
||||||
db_backups_enabled = Gauge("pounce_db_backups_enabled", "DB backups enabled (1/0)")
|
|
||||||
db_backup_latest_unixtime = Gauge("pounce_db_backup_latest_unixtime", "Unix time of latest backup file (0 if none)")
|
|
||||||
db_backup_latest_age_seconds = Gauge("pounce_db_backup_latest_age_seconds", "Age of latest backup file (seconds)")
|
|
||||||
else: # pragma: no cover
|
|
||||||
db_backups_enabled = None # type: ignore
|
|
||||||
db_backup_latest_unixtime = None # type: ignore
|
|
||||||
db_backup_latest_age_seconds = None # type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
def _backup_root() -> Path:
|
|
||||||
root = Path(settings.backup_dir)
|
|
||||||
if not root.is_absolute():
|
|
||||||
root = (Path.cwd() / root).resolve()
|
|
||||||
return root
|
|
||||||
|
|
||||||
|
|
||||||
async def update_prometheus_ops_metrics() -> None:
|
|
||||||
if Gauge is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
db_backups_enabled.set(1 if settings.enable_db_backups else 0)
|
|
||||||
|
|
||||||
root = _backup_root()
|
|
||||||
if not root.exists() or not root.is_dir():
|
|
||||||
db_backup_latest_unixtime.set(0)
|
|
||||||
db_backup_latest_age_seconds.set(0)
|
|
||||||
return
|
|
||||||
|
|
||||||
files = [p for p in root.glob("*") if p.is_file()]
|
|
||||||
if not files:
|
|
||||||
db_backup_latest_unixtime.set(0)
|
|
||||||
db_backup_latest_age_seconds.set(0)
|
|
||||||
return
|
|
||||||
|
|
||||||
latest = max(files, key=lambda p: p.stat().st_mtime)
|
|
||||||
mtime = float(latest.stat().st_mtime)
|
|
||||||
now = datetime.utcnow().timestamp()
|
|
||||||
age = max(0.0, now - mtime)
|
|
||||||
|
|
||||||
db_backup_latest_unixtime.set(mtime)
|
|
||||||
db_backup_latest_age_seconds.set(age)
|
|
||||||
|
|
||||||
@ -1,542 +0,0 @@
|
|||||||
"""Portfolio API routes."""
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional, List
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
from sqlalchemy import select, func, and_
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from app.database import get_db
|
|
||||||
from app.routes.auth import get_current_user
|
|
||||||
from app.models.user import User
|
|
||||||
from app.models.portfolio import PortfolioDomain, DomainValuation
|
|
||||||
from app.services.valuation import valuation_service
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/portfolio", tags=["portfolio"])
|
|
||||||
|
|
||||||
|
|
||||||
# ============== Schemas ==============
|
|
||||||
|
|
||||||
class PortfolioDomainCreate(BaseModel):
|
|
||||||
"""Schema for creating a portfolio domain."""
|
|
||||||
domain: str = Field(..., min_length=3, max_length=255)
|
|
||||||
purchase_date: Optional[datetime] = None
|
|
||||||
purchase_price: Optional[float] = Field(None, ge=0)
|
|
||||||
purchase_registrar: Optional[str] = None
|
|
||||||
registrar: Optional[str] = None
|
|
||||||
renewal_date: Optional[datetime] = None
|
|
||||||
renewal_cost: Optional[float] = Field(None, ge=0)
|
|
||||||
auto_renew: bool = True
|
|
||||||
notes: Optional[str] = None
|
|
||||||
tags: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class PortfolioDomainUpdate(BaseModel):
|
|
||||||
"""Schema for updating a portfolio domain."""
|
|
||||||
purchase_date: Optional[datetime] = None
|
|
||||||
purchase_price: Optional[float] = Field(None, ge=0)
|
|
||||||
purchase_registrar: Optional[str] = None
|
|
||||||
registrar: Optional[str] = None
|
|
||||||
renewal_date: Optional[datetime] = None
|
|
||||||
renewal_cost: Optional[float] = Field(None, ge=0)
|
|
||||||
auto_renew: Optional[bool] = None
|
|
||||||
status: Optional[str] = None
|
|
||||||
notes: Optional[str] = None
|
|
||||||
tags: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class PortfolioDomainSell(BaseModel):
|
|
||||||
"""Schema for marking a domain as sold."""
|
|
||||||
sale_date: datetime
|
|
||||||
sale_price: float = Field(..., ge=0)
|
|
||||||
|
|
||||||
|
|
||||||
class PortfolioDomainResponse(BaseModel):
|
|
||||||
"""Response schema for portfolio domain."""
|
|
||||||
id: int
|
|
||||||
domain: str
|
|
||||||
purchase_date: Optional[datetime]
|
|
||||||
purchase_price: Optional[float]
|
|
||||||
purchase_registrar: Optional[str]
|
|
||||||
registrar: Optional[str]
|
|
||||||
renewal_date: Optional[datetime]
|
|
||||||
renewal_cost: Optional[float]
|
|
||||||
auto_renew: bool
|
|
||||||
estimated_value: Optional[float]
|
|
||||||
value_updated_at: Optional[datetime]
|
|
||||||
is_sold: bool
|
|
||||||
sale_date: Optional[datetime]
|
|
||||||
sale_price: Optional[float]
|
|
||||||
status: str
|
|
||||||
notes: Optional[str]
|
|
||||||
tags: Optional[str]
|
|
||||||
roi: Optional[float]
|
|
||||||
created_at: datetime
|
|
||||||
updated_at: datetime
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
from_attributes = True
|
|
||||||
|
|
||||||
|
|
||||||
class PortfolioSummary(BaseModel):
|
|
||||||
"""Summary of user's portfolio."""
|
|
||||||
total_domains: int
|
|
||||||
active_domains: int
|
|
||||||
sold_domains: int
|
|
||||||
total_invested: float
|
|
||||||
total_value: float
|
|
||||||
total_sold_value: float
|
|
||||||
unrealized_profit: float
|
|
||||||
realized_profit: float
|
|
||||||
overall_roi: float
|
|
||||||
|
|
||||||
|
|
||||||
class ValuationResponse(BaseModel):
|
|
||||||
"""Response schema for domain valuation."""
|
|
||||||
domain: str
|
|
||||||
estimated_value: float
|
|
||||||
currency: str
|
|
||||||
scores: dict
|
|
||||||
factors: dict
|
|
||||||
confidence: str
|
|
||||||
source: str
|
|
||||||
calculated_at: str
|
|
||||||
|
|
||||||
|
|
||||||
# ============== Portfolio Endpoints ==============
|
|
||||||
|
|
||||||
@router.get("", response_model=List[PortfolioDomainResponse])
|
|
||||||
async def get_portfolio(
|
|
||||||
status: Optional[str] = Query(None, description="Filter by status"),
|
|
||||||
sort_by: str = Query("created_at", description="Sort field"),
|
|
||||||
sort_order: str = Query("desc", description="Sort order (asc/desc)"),
|
|
||||||
limit: int = Query(100, le=500),
|
|
||||||
offset: int = Query(0, ge=0),
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""Get user's portfolio domains."""
|
|
||||||
query = select(PortfolioDomain).where(PortfolioDomain.user_id == current_user.id)
|
|
||||||
|
|
||||||
# Filter by status
|
|
||||||
if status:
|
|
||||||
query = query.where(PortfolioDomain.status == status)
|
|
||||||
|
|
||||||
# Sorting
|
|
||||||
sort_column = getattr(PortfolioDomain, sort_by, PortfolioDomain.created_at)
|
|
||||||
if sort_order == "asc":
|
|
||||||
query = query.order_by(sort_column.asc())
|
|
||||||
else:
|
|
||||||
query = query.order_by(sort_column.desc())
|
|
||||||
|
|
||||||
# Pagination
|
|
||||||
query = query.offset(offset).limit(limit)
|
|
||||||
|
|
||||||
result = await db.execute(query)
|
|
||||||
domains = result.scalars().all()
|
|
||||||
|
|
||||||
# Calculate ROI for each domain
|
|
||||||
responses = []
|
|
||||||
for d in domains:
|
|
||||||
response = PortfolioDomainResponse(
|
|
||||||
id=d.id,
|
|
||||||
domain=d.domain,
|
|
||||||
purchase_date=d.purchase_date,
|
|
||||||
purchase_price=d.purchase_price,
|
|
||||||
purchase_registrar=d.purchase_registrar,
|
|
||||||
registrar=d.registrar,
|
|
||||||
renewal_date=d.renewal_date,
|
|
||||||
renewal_cost=d.renewal_cost,
|
|
||||||
auto_renew=d.auto_renew,
|
|
||||||
estimated_value=d.estimated_value,
|
|
||||||
value_updated_at=d.value_updated_at,
|
|
||||||
is_sold=d.is_sold,
|
|
||||||
sale_date=d.sale_date,
|
|
||||||
sale_price=d.sale_price,
|
|
||||||
status=d.status,
|
|
||||||
notes=d.notes,
|
|
||||||
tags=d.tags,
|
|
||||||
roi=d.roi,
|
|
||||||
created_at=d.created_at,
|
|
||||||
updated_at=d.updated_at,
|
|
||||||
)
|
|
||||||
responses.append(response)
|
|
||||||
|
|
||||||
return responses
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/summary", response_model=PortfolioSummary)
|
|
||||||
async def get_portfolio_summary(
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""Get portfolio summary statistics."""
|
|
||||||
result = await db.execute(
|
|
||||||
select(PortfolioDomain).where(PortfolioDomain.user_id == current_user.id)
|
|
||||||
)
|
|
||||||
domains = result.scalars().all()
|
|
||||||
|
|
||||||
total_domains = len(domains)
|
|
||||||
active_domains = sum(1 for d in domains if d.status == "active" and not d.is_sold)
|
|
||||||
sold_domains = sum(1 for d in domains if d.is_sold)
|
|
||||||
|
|
||||||
total_invested = sum(d.purchase_price or 0 for d in domains)
|
|
||||||
total_value = sum(d.estimated_value or 0 for d in domains if not d.is_sold)
|
|
||||||
total_sold_value = sum(d.sale_price or 0 for d in domains if d.is_sold)
|
|
||||||
|
|
||||||
# Calculate active investment for ROI
|
|
||||||
active_investment = sum(d.purchase_price or 0 for d in domains if not d.is_sold)
|
|
||||||
sold_investment = sum(d.purchase_price or 0 for d in domains if d.is_sold)
|
|
||||||
|
|
||||||
unrealized_profit = total_value - active_investment
|
|
||||||
realized_profit = total_sold_value - sold_investment
|
|
||||||
|
|
||||||
overall_roi = 0.0
|
|
||||||
if total_invested > 0:
|
|
||||||
overall_roi = ((total_value + total_sold_value - total_invested) / total_invested) * 100
|
|
||||||
|
|
||||||
return PortfolioSummary(
|
|
||||||
total_domains=total_domains,
|
|
||||||
active_domains=active_domains,
|
|
||||||
sold_domains=sold_domains,
|
|
||||||
total_invested=round(total_invested, 2),
|
|
||||||
total_value=round(total_value, 2),
|
|
||||||
total_sold_value=round(total_sold_value, 2),
|
|
||||||
unrealized_profit=round(unrealized_profit, 2),
|
|
||||||
realized_profit=round(realized_profit, 2),
|
|
||||||
overall_roi=round(overall_roi, 2),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("", response_model=PortfolioDomainResponse, status_code=status.HTTP_201_CREATED)
|
|
||||||
async def add_portfolio_domain(
|
|
||||||
data: PortfolioDomainCreate,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""Add a domain to portfolio."""
|
|
||||||
# Check if domain already exists in user's portfolio
|
|
||||||
existing = await db.execute(
|
|
||||||
select(PortfolioDomain).where(
|
|
||||||
and_(
|
|
||||||
PortfolioDomain.user_id == current_user.id,
|
|
||||||
PortfolioDomain.domain == data.domain.lower(),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
if existing.scalar_one_or_none():
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail="Domain already in portfolio",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get initial valuation
|
|
||||||
valuation = await valuation_service.estimate_value(data.domain, db, save_result=True)
|
|
||||||
estimated_value = valuation.get("estimated_value") if "error" not in valuation else None
|
|
||||||
|
|
||||||
# Create portfolio entry
|
|
||||||
domain = PortfolioDomain(
|
|
||||||
user_id=current_user.id,
|
|
||||||
domain=data.domain.lower(),
|
|
||||||
purchase_date=data.purchase_date,
|
|
||||||
purchase_price=data.purchase_price,
|
|
||||||
purchase_registrar=data.purchase_registrar,
|
|
||||||
registrar=data.registrar or data.purchase_registrar,
|
|
||||||
renewal_date=data.renewal_date,
|
|
||||||
renewal_cost=data.renewal_cost,
|
|
||||||
auto_renew=data.auto_renew,
|
|
||||||
estimated_value=estimated_value,
|
|
||||||
value_updated_at=datetime.utcnow() if estimated_value else None,
|
|
||||||
notes=data.notes,
|
|
||||||
tags=data.tags,
|
|
||||||
)
|
|
||||||
|
|
||||||
db.add(domain)
|
|
||||||
await db.commit()
|
|
||||||
await db.refresh(domain)
|
|
||||||
|
|
||||||
return PortfolioDomainResponse(
|
|
||||||
id=domain.id,
|
|
||||||
domain=domain.domain,
|
|
||||||
purchase_date=domain.purchase_date,
|
|
||||||
purchase_price=domain.purchase_price,
|
|
||||||
purchase_registrar=domain.purchase_registrar,
|
|
||||||
registrar=domain.registrar,
|
|
||||||
renewal_date=domain.renewal_date,
|
|
||||||
renewal_cost=domain.renewal_cost,
|
|
||||||
auto_renew=domain.auto_renew,
|
|
||||||
estimated_value=domain.estimated_value,
|
|
||||||
value_updated_at=domain.value_updated_at,
|
|
||||||
is_sold=domain.is_sold,
|
|
||||||
sale_date=domain.sale_date,
|
|
||||||
sale_price=domain.sale_price,
|
|
||||||
status=domain.status,
|
|
||||||
notes=domain.notes,
|
|
||||||
tags=domain.tags,
|
|
||||||
roi=domain.roi,
|
|
||||||
created_at=domain.created_at,
|
|
||||||
updated_at=domain.updated_at,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{domain_id}", response_model=PortfolioDomainResponse)
|
|
||||||
async def get_portfolio_domain(
|
|
||||||
domain_id: int,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""Get a specific portfolio domain."""
|
|
||||||
result = await db.execute(
|
|
||||||
select(PortfolioDomain).where(
|
|
||||||
and_(
|
|
||||||
PortfolioDomain.id == domain_id,
|
|
||||||
PortfolioDomain.user_id == current_user.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
domain = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not domain:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
|
||||||
detail="Domain not found in portfolio",
|
|
||||||
)
|
|
||||||
|
|
||||||
return PortfolioDomainResponse(
|
|
||||||
id=domain.id,
|
|
||||||
domain=domain.domain,
|
|
||||||
purchase_date=domain.purchase_date,
|
|
||||||
purchase_price=domain.purchase_price,
|
|
||||||
purchase_registrar=domain.purchase_registrar,
|
|
||||||
registrar=domain.registrar,
|
|
||||||
renewal_date=domain.renewal_date,
|
|
||||||
renewal_cost=domain.renewal_cost,
|
|
||||||
auto_renew=domain.auto_renew,
|
|
||||||
estimated_value=domain.estimated_value,
|
|
||||||
value_updated_at=domain.value_updated_at,
|
|
||||||
is_sold=domain.is_sold,
|
|
||||||
sale_date=domain.sale_date,
|
|
||||||
sale_price=domain.sale_price,
|
|
||||||
status=domain.status,
|
|
||||||
notes=domain.notes,
|
|
||||||
tags=domain.tags,
|
|
||||||
roi=domain.roi,
|
|
||||||
created_at=domain.created_at,
|
|
||||||
updated_at=domain.updated_at,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/{domain_id}", response_model=PortfolioDomainResponse)
|
|
||||||
async def update_portfolio_domain(
|
|
||||||
domain_id: int,
|
|
||||||
data: PortfolioDomainUpdate,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""Update a portfolio domain."""
|
|
||||||
result = await db.execute(
|
|
||||||
select(PortfolioDomain).where(
|
|
||||||
and_(
|
|
||||||
PortfolioDomain.id == domain_id,
|
|
||||||
PortfolioDomain.user_id == current_user.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
domain = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not domain:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
|
||||||
detail="Domain not found in portfolio",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Update fields
|
|
||||||
update_data = data.model_dump(exclude_unset=True)
|
|
||||||
for field, value in update_data.items():
|
|
||||||
setattr(domain, field, value)
|
|
||||||
|
|
||||||
await db.commit()
|
|
||||||
await db.refresh(domain)
|
|
||||||
|
|
||||||
return PortfolioDomainResponse(
|
|
||||||
id=domain.id,
|
|
||||||
domain=domain.domain,
|
|
||||||
purchase_date=domain.purchase_date,
|
|
||||||
purchase_price=domain.purchase_price,
|
|
||||||
purchase_registrar=domain.purchase_registrar,
|
|
||||||
registrar=domain.registrar,
|
|
||||||
renewal_date=domain.renewal_date,
|
|
||||||
renewal_cost=domain.renewal_cost,
|
|
||||||
auto_renew=domain.auto_renew,
|
|
||||||
estimated_value=domain.estimated_value,
|
|
||||||
value_updated_at=domain.value_updated_at,
|
|
||||||
is_sold=domain.is_sold,
|
|
||||||
sale_date=domain.sale_date,
|
|
||||||
sale_price=domain.sale_price,
|
|
||||||
status=domain.status,
|
|
||||||
notes=domain.notes,
|
|
||||||
tags=domain.tags,
|
|
||||||
roi=domain.roi,
|
|
||||||
created_at=domain.created_at,
|
|
||||||
updated_at=domain.updated_at,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{domain_id}/sell", response_model=PortfolioDomainResponse)
|
|
||||||
async def mark_domain_sold(
|
|
||||||
domain_id: int,
|
|
||||||
data: PortfolioDomainSell,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""Mark a domain as sold."""
|
|
||||||
result = await db.execute(
|
|
||||||
select(PortfolioDomain).where(
|
|
||||||
and_(
|
|
||||||
PortfolioDomain.id == domain_id,
|
|
||||||
PortfolioDomain.user_id == current_user.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
domain = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not domain:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
|
||||||
detail="Domain not found in portfolio",
|
|
||||||
)
|
|
||||||
|
|
||||||
domain.is_sold = True
|
|
||||||
domain.sale_date = data.sale_date
|
|
||||||
domain.sale_price = data.sale_price
|
|
||||||
domain.status = "sold"
|
|
||||||
|
|
||||||
await db.commit()
|
|
||||||
await db.refresh(domain)
|
|
||||||
|
|
||||||
return PortfolioDomainResponse(
|
|
||||||
id=domain.id,
|
|
||||||
domain=domain.domain,
|
|
||||||
purchase_date=domain.purchase_date,
|
|
||||||
purchase_price=domain.purchase_price,
|
|
||||||
purchase_registrar=domain.purchase_registrar,
|
|
||||||
registrar=domain.registrar,
|
|
||||||
renewal_date=domain.renewal_date,
|
|
||||||
renewal_cost=domain.renewal_cost,
|
|
||||||
auto_renew=domain.auto_renew,
|
|
||||||
estimated_value=domain.estimated_value,
|
|
||||||
value_updated_at=domain.value_updated_at,
|
|
||||||
is_sold=domain.is_sold,
|
|
||||||
sale_date=domain.sale_date,
|
|
||||||
sale_price=domain.sale_price,
|
|
||||||
status=domain.status,
|
|
||||||
notes=domain.notes,
|
|
||||||
tags=domain.tags,
|
|
||||||
roi=domain.roi,
|
|
||||||
created_at=domain.created_at,
|
|
||||||
updated_at=domain.updated_at,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/{domain_id}", status_code=status.HTTP_204_NO_CONTENT)
|
|
||||||
async def delete_portfolio_domain(
|
|
||||||
domain_id: int,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""Delete a domain from portfolio."""
|
|
||||||
result = await db.execute(
|
|
||||||
select(PortfolioDomain).where(
|
|
||||||
and_(
|
|
||||||
PortfolioDomain.id == domain_id,
|
|
||||||
PortfolioDomain.user_id == current_user.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
domain = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not domain:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
|
||||||
detail="Domain not found in portfolio",
|
|
||||||
)
|
|
||||||
|
|
||||||
await db.delete(domain)
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{domain_id}/refresh-value", response_model=PortfolioDomainResponse)
|
|
||||||
async def refresh_domain_value(
|
|
||||||
domain_id: int,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""Refresh the estimated value of a portfolio domain."""
|
|
||||||
result = await db.execute(
|
|
||||||
select(PortfolioDomain).where(
|
|
||||||
and_(
|
|
||||||
PortfolioDomain.id == domain_id,
|
|
||||||
PortfolioDomain.user_id == current_user.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
domain = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not domain:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
|
||||||
detail="Domain not found in portfolio",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get new valuation
|
|
||||||
valuation = await valuation_service.estimate_value(domain.domain, db, save_result=True)
|
|
||||||
|
|
||||||
if "error" not in valuation:
|
|
||||||
domain.estimated_value = valuation["estimated_value"]
|
|
||||||
domain.value_updated_at = datetime.utcnow()
|
|
||||||
await db.commit()
|
|
||||||
await db.refresh(domain)
|
|
||||||
|
|
||||||
return PortfolioDomainResponse(
|
|
||||||
id=domain.id,
|
|
||||||
domain=domain.domain,
|
|
||||||
purchase_date=domain.purchase_date,
|
|
||||||
purchase_price=domain.purchase_price,
|
|
||||||
purchase_registrar=domain.purchase_registrar,
|
|
||||||
registrar=domain.registrar,
|
|
||||||
renewal_date=domain.renewal_date,
|
|
||||||
renewal_cost=domain.renewal_cost,
|
|
||||||
auto_renew=domain.auto_renew,
|
|
||||||
estimated_value=domain.estimated_value,
|
|
||||||
value_updated_at=domain.value_updated_at,
|
|
||||||
is_sold=domain.is_sold,
|
|
||||||
sale_date=domain.sale_date,
|
|
||||||
sale_price=domain.sale_price,
|
|
||||||
status=domain.status,
|
|
||||||
notes=domain.notes,
|
|
||||||
tags=domain.tags,
|
|
||||||
roi=domain.roi,
|
|
||||||
created_at=domain.created_at,
|
|
||||||
updated_at=domain.updated_at,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ============== Valuation Endpoints ==============
|
|
||||||
|
|
||||||
@router.get("/valuation/{domain}", response_model=ValuationResponse)
|
|
||||||
async def get_domain_valuation(
|
|
||||||
domain: str,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: AsyncSession = Depends(get_db),
|
|
||||||
):
|
|
||||||
"""Get estimated value for any domain."""
|
|
||||||
valuation = await valuation_service.estimate_value(domain, db, save_result=True)
|
|
||||||
|
|
||||||
if "error" in valuation:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail=valuation["error"],
|
|
||||||
)
|
|
||||||
|
|
||||||
return ValuationResponse(**valuation)
|
|
||||||
|
|
||||||
File diff suppressed because it is too large
Load Diff
@ -1,35 +0,0 @@
|
|||||||
"""
|
|
||||||
Analyze schemas (Alpha Terminal - Phase 2 Diligence).
|
|
||||||
|
|
||||||
Open-data-first: we return null + reason when data isn't available.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Any, Optional
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
|
|
||||||
class AnalyzeItem(BaseModel):
|
|
||||||
key: str
|
|
||||||
label: str
|
|
||||||
value: Optional[Any] = None
|
|
||||||
status: str = Field(default="info", description="pass|warn|fail|info|na")
|
|
||||||
source: str = Field(default="internal", description="internal|rdap|whois|dns|http|ssl|db|open_data")
|
|
||||||
details: dict[str, Any] = Field(default_factory=dict)
|
|
||||||
|
|
||||||
|
|
||||||
class AnalyzeSection(BaseModel):
|
|
||||||
key: str
|
|
||||||
title: str
|
|
||||||
items: list[AnalyzeItem] = Field(default_factory=list)
|
|
||||||
|
|
||||||
|
|
||||||
class AnalyzeResponse(BaseModel):
|
|
||||||
domain: str
|
|
||||||
computed_at: datetime
|
|
||||||
cached: bool = False
|
|
||||||
sections: list[AnalyzeSection]
|
|
||||||
|
|
||||||
@ -10,8 +10,6 @@ class UserCreate(BaseModel):
|
|||||||
email: EmailStr
|
email: EmailStr
|
||||||
password: str = Field(..., min_length=8, max_length=100)
|
password: str = Field(..., min_length=8, max_length=100)
|
||||||
name: Optional[str] = Field(None, max_length=100)
|
name: Optional[str] = Field(None, max_length=100)
|
||||||
# Yield referral tracking
|
|
||||||
ref: Optional[str] = Field(None, max_length=100, description="Referral code from yield domain")
|
|
||||||
|
|
||||||
|
|
||||||
class UserLogin(BaseModel):
|
class UserLogin(BaseModel):
|
||||||
@ -41,36 +39,8 @@ class Token(BaseModel):
|
|||||||
expires_in: int
|
expires_in: int
|
||||||
|
|
||||||
|
|
||||||
class LoginResponse(BaseModel):
|
|
||||||
"""Login response when using HttpOnly cookie authentication."""
|
|
||||||
expires_in: int
|
|
||||||
|
|
||||||
|
|
||||||
class TokenData(BaseModel):
|
class TokenData(BaseModel):
|
||||||
"""Schema for token payload data."""
|
"""Schema for token payload data."""
|
||||||
user_id: Optional[int] = None
|
user_id: Optional[int] = None
|
||||||
email: Optional[str] = None
|
email: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class ReferralStats(BaseModel):
|
|
||||||
"""Referral reward snapshot for the current user (3C.2)."""
|
|
||||||
|
|
||||||
window_days: int = 30
|
|
||||||
referred_users_total: int = 0
|
|
||||||
qualified_referrals_total: int = 0
|
|
||||||
referral_link_views_window: int = 0
|
|
||||||
bonus_domains: int = 0
|
|
||||||
next_reward_at: int = 0
|
|
||||||
badge: Optional[str] = None # "verified_referrer" | "elite_referrer"
|
|
||||||
cooldown_days: int = 7
|
|
||||||
disqualified_cooldown_total: int = 0
|
|
||||||
disqualified_missing_ip_total: int = 0
|
|
||||||
disqualified_shared_ip_total: int = 0
|
|
||||||
disqualified_duplicate_ip_total: int = 0
|
|
||||||
|
|
||||||
|
|
||||||
class ReferralLinkResponse(BaseModel):
|
|
||||||
invite_code: str
|
|
||||||
url: str
|
|
||||||
stats: ReferralStats
|
|
||||||
|
|
||||||
|
|||||||
@ -1,51 +0,0 @@
|
|||||||
"""CFO (Management) schemas."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
|
|
||||||
class CfoMonthlyBucket(BaseModel):
|
|
||||||
month: str # YYYY-MM
|
|
||||||
total_cost_usd: float = 0.0
|
|
||||||
domains: int = 0
|
|
||||||
|
|
||||||
|
|
||||||
class CfoUpcomingCostRow(BaseModel):
|
|
||||||
domain_id: int
|
|
||||||
domain: str
|
|
||||||
renewal_date: Optional[datetime] = None
|
|
||||||
renewal_cost_usd: Optional[float] = None
|
|
||||||
cost_source: str = Field(default="unknown", description="portfolio|tld_prices|unknown")
|
|
||||||
is_sold: bool = False
|
|
||||||
|
|
||||||
|
|
||||||
class CfoKillListRow(BaseModel):
|
|
||||||
domain_id: int
|
|
||||||
domain: str
|
|
||||||
renewal_date: Optional[datetime] = None
|
|
||||||
renewal_cost_usd: Optional[float] = None
|
|
||||||
cost_source: str = "unknown"
|
|
||||||
auto_renew: bool = True
|
|
||||||
is_dns_verified: bool = False
|
|
||||||
yield_net_60d: float = 0.0
|
|
||||||
yield_clicks_60d: int = 0
|
|
||||||
reason: str
|
|
||||||
|
|
||||||
|
|
||||||
class CfoSummaryResponse(BaseModel):
|
|
||||||
computed_at: datetime
|
|
||||||
upcoming_30d_total_usd: float = 0.0
|
|
||||||
upcoming_30d_rows: list[CfoUpcomingCostRow] = []
|
|
||||||
monthly: list[CfoMonthlyBucket] = []
|
|
||||||
kill_list: list[CfoKillListRow] = []
|
|
||||||
|
|
||||||
|
|
||||||
class SetToDropResponse(BaseModel):
|
|
||||||
domain_id: int
|
|
||||||
auto_renew: bool
|
|
||||||
updated_at: datetime
|
|
||||||
|
|
||||||
@ -88,15 +88,3 @@ class DomainListResponse(BaseModel):
|
|||||||
per_page: int
|
per_page: int
|
||||||
pages: int
|
pages: int
|
||||||
|
|
||||||
|
|
||||||
class ExpiryUpdate(BaseModel):
|
|
||||||
"""Schema for manually setting domain expiration date."""
|
|
||||||
expiration_date: Optional[datetime] = None
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
json_schema_extra = {
|
|
||||||
"example": {
|
|
||||||
"expiration_date": "2025-12-31T00:00:00Z"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|||||||
@ -1,93 +0,0 @@
|
|||||||
"""HUNT (Discovery) schemas."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
|
|
||||||
class HuntSniperItem(BaseModel):
|
|
||||||
domain: str
|
|
||||||
platform: str
|
|
||||||
auction_url: str
|
|
||||||
current_bid: float
|
|
||||||
currency: str
|
|
||||||
end_time: datetime
|
|
||||||
age_years: Optional[int] = None
|
|
||||||
backlinks: Optional[int] = None
|
|
||||||
pounce_score: Optional[int] = None
|
|
||||||
|
|
||||||
|
|
||||||
class HuntSniperResponse(BaseModel):
|
|
||||||
items: list[HuntSniperItem]
|
|
||||||
total: int
|
|
||||||
filtered_out_missing_data: int = 0
|
|
||||||
last_updated: Optional[datetime] = None
|
|
||||||
|
|
||||||
|
|
||||||
class TrendItem(BaseModel):
|
|
||||||
title: str
|
|
||||||
approx_traffic: Optional[str] = None
|
|
||||||
published_at: Optional[datetime] = None
|
|
||||||
link: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class TrendsResponse(BaseModel):
|
|
||||||
geo: str = "US"
|
|
||||||
items: list[TrendItem]
|
|
||||||
fetched_at: datetime
|
|
||||||
|
|
||||||
|
|
||||||
class KeywordAvailabilityRequest(BaseModel):
|
|
||||||
keywords: list[str] = Field(min_length=1, max_length=25)
|
|
||||||
tlds: list[str] = Field(default_factory=lambda: ["com", "io", "ai", "net", "org"], max_length=20)
|
|
||||||
|
|
||||||
|
|
||||||
class KeywordAvailabilityRow(BaseModel):
|
|
||||||
keyword: str
|
|
||||||
domain: str
|
|
||||||
tld: str
|
|
||||||
is_available: Optional[bool] = None
|
|
||||||
status: str # available|taken|unknown
|
|
||||||
|
|
||||||
|
|
||||||
class KeywordAvailabilityResponse(BaseModel):
|
|
||||||
items: list[KeywordAvailabilityRow]
|
|
||||||
|
|
||||||
|
|
||||||
class TypoCheckRequest(BaseModel):
|
|
||||||
brand: str = Field(min_length=2, max_length=50)
|
|
||||||
tlds: list[str] = Field(default_factory=lambda: ["com"], max_length=10)
|
|
||||||
limit: int = Field(default=50, ge=1, le=200)
|
|
||||||
|
|
||||||
|
|
||||||
class TypoCandidate(BaseModel):
|
|
||||||
domain: str
|
|
||||||
is_available: Optional[bool] = None
|
|
||||||
status: str # available|taken|unknown
|
|
||||||
|
|
||||||
|
|
||||||
class TypoCheckResponse(BaseModel):
|
|
||||||
brand: str
|
|
||||||
items: list[TypoCandidate]
|
|
||||||
|
|
||||||
|
|
||||||
class BrandableRequest(BaseModel):
|
|
||||||
pattern: str = Field(description="cvcvc|cvccv|human", examples=["cvcvc"])
|
|
||||||
tlds: list[str] = Field(default_factory=lambda: ["com"], max_length=10)
|
|
||||||
limit: int = Field(default=30, ge=1, le=100)
|
|
||||||
max_checks: int = Field(default=400, ge=50, le=2000)
|
|
||||||
|
|
||||||
|
|
||||||
class BrandableCandidate(BaseModel):
|
|
||||||
domain: str
|
|
||||||
is_available: Optional[bool] = None
|
|
||||||
status: str # available|taken|unknown
|
|
||||||
|
|
||||||
|
|
||||||
class BrandableResponse(BaseModel):
|
|
||||||
pattern: str
|
|
||||||
items: list[BrandableCandidate]
|
|
||||||
|
|
||||||
@ -1,33 +0,0 @@
|
|||||||
"""
|
|
||||||
Referral schemas (3C.2).
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
|
|
||||||
class ReferralKpiWindow(BaseModel):
|
|
||||||
days: int = Field(ge=1, le=365)
|
|
||||||
start: datetime
|
|
||||||
end: datetime
|
|
||||||
|
|
||||||
|
|
||||||
class ReferralReferrerRow(BaseModel):
|
|
||||||
user_id: int
|
|
||||||
email: str
|
|
||||||
invite_code: Optional[str] = None
|
|
||||||
created_at: datetime
|
|
||||||
referred_users_total: int = 0
|
|
||||||
referred_users_window: int = 0
|
|
||||||
referral_link_views_window: int = 0
|
|
||||||
|
|
||||||
|
|
||||||
class ReferralKpisResponse(BaseModel):
|
|
||||||
window: ReferralKpiWindow
|
|
||||||
totals: dict[str, int]
|
|
||||||
referrers: list[ReferralReferrerRow]
|
|
||||||
|
|
||||||
@ -1,47 +0,0 @@
|
|||||||
"""
|
|
||||||
Telemetry schemas (4A.2).
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
|
|
||||||
class TelemetryKpiWindow(BaseModel):
|
|
||||||
days: int = Field(ge=1, le=365)
|
|
||||||
start: datetime
|
|
||||||
end: datetime
|
|
||||||
|
|
||||||
|
|
||||||
class DealFunnelKpis(BaseModel):
|
|
||||||
listing_views: int = 0
|
|
||||||
inquiries_created: int = 0
|
|
||||||
seller_replied_inquiries: int = 0
|
|
||||||
inquiry_reply_rate: float = 0.0
|
|
||||||
|
|
||||||
listings_with_inquiries: int = 0
|
|
||||||
listings_sold: int = 0
|
|
||||||
inquiry_to_sold_listing_rate: float = 0.0
|
|
||||||
|
|
||||||
median_reply_seconds: Optional[float] = None
|
|
||||||
median_time_to_sold_seconds: Optional[float] = None
|
|
||||||
|
|
||||||
|
|
||||||
class YieldFunnelKpis(BaseModel):
|
|
||||||
connected_domains: int = 0
|
|
||||||
clicks: int = 0
|
|
||||||
conversions: int = 0
|
|
||||||
conversion_rate: float = 0.0
|
|
||||||
|
|
||||||
payouts_paid: int = 0
|
|
||||||
payouts_paid_amount_total: float = 0.0
|
|
||||||
|
|
||||||
|
|
||||||
class TelemetryKpisResponse(BaseModel):
|
|
||||||
window: TelemetryKpiWindow
|
|
||||||
deal: DealFunnelKpis
|
|
||||||
yield_: YieldFunnelKpis = Field(alias="yield")
|
|
||||||
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user