diff --git a/UNICORN_PLAN.md b/UNICORN_PLAN.md index 8010e96..fb134d0 100644 --- a/UNICORN_PLAN.md +++ b/UNICORN_PLAN.md @@ -4,6 +4,67 @@ Ziel: Pounce von einem starken Produkt (Trust + Inventory + Lead Capture) zu ein --- +## Umsetzungsstatus (Stand: 2025-12-15) + +### Wo wir stehen (kurz, ehrlich) + +- **Deal-System (Liquidity Loop)**: **fertig & gehärtet** (Inbox → Threading → Sold/GMV → Anti‑Abuse). +- **Yield (Moat)**: **Connect + Routing + Tracking + Webhooks + Ledger-Basis** ist da. Wir können Domains verbinden, Traffic routen, Clicks/Conversions tracken und Payouts vorbereiten/abschliessen. +- **Flywheel/Distribution**: teilweise (Public Deal Surface + Login Gate ist da), Programmatic SEO & Viral Loop noch nicht systematisch ausgebaut. +- **Telemetry/Ops**: einzelne Events existieren implizit (Audit/Transactions), aber **kein zentrales Event-Schema + KPIs Dashboard**. + +### Fortschritt nach Workstream + +#### 1) Deal‑System +- [x] 1A Inbox Workflow (Status, Close Reason, Audit) +- [x] 1B Threading/Negotiation (Buyer/Seller Threads + Email + Rate Limits + Content Safety) +- [x] 1C Deal Closure + GMV (Mark as Sold, Close open inquiries) +- [x] 1D Anti‑Abuse (Limits + Safety Checks an den kritischen Stellen) + +#### 2) Yield (Moat) +- [x] 2A Connect/Nameserver Flow (Portfolio‑Only + DNS Verified + Connect Wizard + `connected_at`) +- [x] 2B Routing → Tracking (Async, Click Tracking, IP‑Hashing, Rate‑Limit, strict partner config) +- [x] 2B Attribution (Webhook kann `click_id` mitschicken) +- [x] 2C Ledger/Payout‑Basics (generate payouts + complete payouts; server‑safe keys) +- [x] 2C.2 Dashboard‑Korrektheit (monatliche Stats = confirmed/paid, pending payout = confirmed+unpaid) + +#### 3) Flywheel / Distribution +- [~] 3B Public Deal Surface + Login Gate (Pounce Direct gated) — **vorhanden** +- [~] 3A Programmatic SEO maximal (Templates + CTA Pfade + Indexation) +- [~] 3C Viral Loop „Powered by Pounce“ (nur wo intent passt, sauberer Referral Loop) + +**3C Stand (Viral Loop)** +- **Invite Codes**: jeder User hat jetzt einen eigenen `invite_code` (unique) + `GET /api/v1/auth/referral` liefert den Invite‑Link. +- **Attribution**: `ref` wird auf Public Pages in Cookie gespeichert (30 Tage) und bei `/register` mitgeschickt → Backend setzt `referred_by_user_id`. +- **Surfaces (intent-fit)**: + - Terminal Settings: “Invite” Panel mit Copy‑Link + - Public Buy Listing: “Powered by Pounce” → Register mit `?ref=` +- **Telemetry**: Events `user_registered`, `referral_attributed`, `referral_link_viewed` +- **Admin KPIs (3C.2)**: Telemetry Tab zeigt jetzt Referral‑KPIs (Link Views + Signups pro Referrer) via `GET /api/v1/telemetry/referrals?days=...` +- **Rewards/Badges (3C.2)**: Deterministische Referral‑Rewards (abuse‑resistent) → `subscriptions.referral_bonus_domains` (+5 Slots pro 3 “qualified referrals”), Badge `verified_referrer` / `elite_referrer` wird im Terminal‑Settings Invite‑Panel angezeigt. + - **Anti‑Fraud/Cooldown**: Qualified zählt erst nach **Cooldown** (User+Subscription Age) und wird bei **shared IP / duplicate IP / missing IP** disqualifiziert (Telemetry `ip_hash`). + +**3A Stand (Programmatic SEO)** +- **Indexation**: `sitemap.xml` ist jetzt dynamisch (Discover‑TLDs aus DB + Blog Slugs + Public Listings) und `robots.txt` blockt Legacy Pfade. +- **Canonical Cleanup**: Legacy Routen (`/tld/*`, `/tld-pricing/*`) redirecten server-seitig nach `/discover/*`. +- **Templates**: `/discover/[tld]` hat jetzt server‑seitiges Metadata + JSON‑LD (aus echten Registrar‑Compare Daten). `/buy/[slug]` ist server‑seitig (Metadata + JSON‑LD). +- **Blog Article SEO**: `/blog/[slug]` hat jetzt server‑seitiges `generateMetadata` + Article JSON‑LD, ohne View‑Count Side‑Effects (Meta‑Endpoint). + +#### 4) Skalierung / Telemetry +- [x] 4A Events (kanonisches Event-Schema + persistente Events in Deal+Yield Funnel) +- [x] 4A.2 KPI Views (Admin KPIs aus Telemetry Events: Rates + Median Times) +- [x] 4B Ops (Backups + Restore-Verification + Monitoring/Alerts + Deliverability) + +**4B Stand (Ops)** +- **Backups**: Admin-Endpoint + Scheduler Daily Backup + Restore-Verification (SQLite integrity_check / Postgres pg_restore --list) +- **Monitoring**: `/metrics` exportiert jetzt zusätzlich Business-KPIs (Deal+Yield aus `telemetry_events`, gecached) + Ops-Metriken (Backup enabled + Backup age) +- **Deliverability**: Newsletter Emails mit `List-Unsubscribe` (One-Click) + neue One-Click Unsubscribe Route +- **Alerting (Vorbereitung)**: `ops/prometheus-alerts.yml` mit Alerts (5xx rate, Backup stale, 24h Funnel-Null) +- **Alerting (ohne Docker)**: Scheduler Job `ops_alerting` + Admin Endpoint `POST /api/v1/admin/system/ops-alerts/run` +- **Alert History + Cooldown (persistiert)**: Table `ops_alert_events` + Admin Endpoint `GET /api/v1/admin/system/ops-alerts/history` + Admin UI History Panel + +--- + ## Absicht & holistisches Konzept ### Absicht (warum es Pounce gibt) diff --git a/backend/alembic/versions/007_add_inquiry_audit_and_close.py b/backend/alembic/versions/007_add_inquiry_audit_and_close.py new file mode 100644 index 0000000..3574086 --- /dev/null +++ b/backend/alembic/versions/007_add_inquiry_audit_and_close.py @@ -0,0 +1,74 @@ +"""Add inquiry close fields + audit trail + +Revision ID: 007 +Revises: 006 +Create Date: 2025-12-15 +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '007' +down_revision = '006' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # listing_inquiries: deal workflow + op.add_column('listing_inquiries', sa.Column('closed_reason', sa.String(200), nullable=True)) + op.add_column('listing_inquiries', sa.Column('closed_at', sa.DateTime(), nullable=True)) + + op.create_index( + 'ix_listing_inquiries_listing_created', + 'listing_inquiries', + ['listing_id', 'created_at'], + unique=False, + ) + op.create_index( + 'ix_listing_inquiries_listing_status', + 'listing_inquiries', + ['listing_id', 'status'], + unique=False, + ) + + # listing_inquiry_events: audit trail + op.create_table( + 'listing_inquiry_events', + sa.Column('id', sa.Integer(), primary_key=True), + sa.Column('inquiry_id', sa.Integer(), sa.ForeignKey('listing_inquiries.id'), nullable=False, index=True), + sa.Column('listing_id', sa.Integer(), sa.ForeignKey('domain_listings.id'), nullable=False, index=True), + sa.Column('actor_user_id', sa.Integer(), sa.ForeignKey('users.id'), nullable=False, index=True), + sa.Column('old_status', sa.String(20), nullable=True), + sa.Column('new_status', sa.String(20), nullable=False), + sa.Column('reason', sa.String(200), nullable=True), + sa.Column('ip_address', sa.String(45), nullable=True), + sa.Column('user_agent', sa.String(500), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=True, index=True), + ) + + op.create_index( + 'ix_listing_inquiry_events_inquiry_created', + 'listing_inquiry_events', + ['inquiry_id', 'created_at'], + unique=False, + ) + op.create_index( + 'ix_listing_inquiry_events_listing_created', + 'listing_inquiry_events', + ['listing_id', 'created_at'], + unique=False, + ) + + +def downgrade() -> None: + op.drop_index('ix_listing_inquiry_events_listing_created', table_name='listing_inquiry_events') + op.drop_index('ix_listing_inquiry_events_inquiry_created', table_name='listing_inquiry_events') + op.drop_table('listing_inquiry_events') + + op.drop_index('ix_listing_inquiries_listing_status', table_name='listing_inquiries') + op.drop_index('ix_listing_inquiries_listing_created', table_name='listing_inquiries') + op.drop_column('listing_inquiries', 'closed_at') + op.drop_column('listing_inquiries', 'closed_reason') diff --git a/backend/alembic/versions/008_add_inquiry_threading.py b/backend/alembic/versions/008_add_inquiry_threading.py new file mode 100644 index 0000000..9559269 --- /dev/null +++ b/backend/alembic/versions/008_add_inquiry_threading.py @@ -0,0 +1,61 @@ +"""Add inquiry threading (buyer link + messages) + +Revision ID: 008 +Revises: 007 +Create Date: 2025-12-15 +""" + +from alembic import op +import sqlalchemy as sa + + +revision = '008' +down_revision = '007' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # Link inquiry to buyer account + op.add_column('listing_inquiries', sa.Column('buyer_user_id', sa.Integer(), sa.ForeignKey('users.id'), nullable=True)) + op.create_index('ix_listing_inquiries_buyer_user', 'listing_inquiries', ['buyer_user_id'], unique=False) + + # Thread messages + op.create_table( + 'listing_inquiry_messages', + sa.Column('id', sa.Integer(), primary_key=True), + sa.Column('inquiry_id', sa.Integer(), sa.ForeignKey('listing_inquiries.id'), nullable=False, index=True), + sa.Column('listing_id', sa.Integer(), sa.ForeignKey('domain_listings.id'), nullable=False, index=True), + sa.Column('sender_user_id', sa.Integer(), sa.ForeignKey('users.id'), nullable=False, index=True), + sa.Column('body', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=True, index=True), + ) + + op.create_index( + 'ix_listing_inquiry_messages_inquiry_created', + 'listing_inquiry_messages', + ['inquiry_id', 'created_at'], + unique=False, + ) + op.create_index( + 'ix_listing_inquiry_messages_listing_created', + 'listing_inquiry_messages', + ['listing_id', 'created_at'], + unique=False, + ) + op.create_index( + 'ix_listing_inquiry_messages_sender_created', + 'listing_inquiry_messages', + ['sender_user_id', 'created_at'], + unique=False, + ) + + +def downgrade() -> None: + op.drop_index('ix_listing_inquiry_messages_sender_created', table_name='listing_inquiry_messages') + op.drop_index('ix_listing_inquiry_messages_listing_created', table_name='listing_inquiry_messages') + op.drop_index('ix_listing_inquiry_messages_inquiry_created', table_name='listing_inquiry_messages') + op.drop_table('listing_inquiry_messages') + + op.drop_index('ix_listing_inquiries_buyer_user', table_name='listing_inquiries') + op.drop_column('listing_inquiries', 'buyer_user_id') diff --git a/backend/alembic/versions/009_add_listing_sold_fields.py b/backend/alembic/versions/009_add_listing_sold_fields.py new file mode 100644 index 0000000..a3ce2fe --- /dev/null +++ b/backend/alembic/versions/009_add_listing_sold_fields.py @@ -0,0 +1,31 @@ +"""Add listing sold fields (GMV tracking) + +Revision ID: 009 +Revises: 008 +Create Date: 2025-12-15 +""" + +from alembic import op +import sqlalchemy as sa + +revision = '009' +down_revision = '008' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.add_column('domain_listings', sa.Column('sold_at', sa.DateTime(), nullable=True)) + op.add_column('domain_listings', sa.Column('sold_reason', sa.String(200), nullable=True)) + op.add_column('domain_listings', sa.Column('sold_price', sa.Float(), nullable=True)) + op.add_column('domain_listings', sa.Column('sold_currency', sa.String(3), nullable=True)) + + op.create_index('ix_domain_listings_status', 'domain_listings', ['status'], unique=False) + + +def downgrade() -> None: + op.drop_index('ix_domain_listings_status', table_name='domain_listings') + op.drop_column('domain_listings', 'sold_currency') + op.drop_column('domain_listings', 'sold_price') + op.drop_column('domain_listings', 'sold_reason') + op.drop_column('domain_listings', 'sold_at') diff --git a/backend/alembic/versions/010_add_yield_connected_at.py b/backend/alembic/versions/010_add_yield_connected_at.py new file mode 100644 index 0000000..ca4f714 --- /dev/null +++ b/backend/alembic/versions/010_add_yield_connected_at.py @@ -0,0 +1,25 @@ +"""Add yield connected_at timestamp. + +Revision ID: 010_add_yield_connected_at +Revises: 009_add_listing_sold_fields +Create Date: 2025-12-15 +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "010_add_yield_connected_at" +down_revision = "009_add_listing_sold_fields" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.add_column("yield_domains", sa.Column("connected_at", sa.DateTime(), nullable=True)) + + +def downgrade() -> None: + op.drop_column("yield_domains", "connected_at") + diff --git a/backend/alembic/versions/011_add_yield_transaction_click_id.py b/backend/alembic/versions/011_add_yield_transaction_click_id.py new file mode 100644 index 0000000..e987b8f --- /dev/null +++ b/backend/alembic/versions/011_add_yield_transaction_click_id.py @@ -0,0 +1,28 @@ +"""Add click_id + destination_url to yield transactions. + +Revision ID: 011_add_yield_transaction_click_id +Revises: 010_add_yield_connected_at +Create Date: 2025-12-15 +""" + +from alembic import op +import sqlalchemy as sa + + +revision = "011_add_yield_transaction_click_id" +down_revision = "010_add_yield_connected_at" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.add_column("yield_transactions", sa.Column("click_id", sa.String(length=64), nullable=True)) + op.add_column("yield_transactions", sa.Column("destination_url", sa.Text(), nullable=True)) + op.create_index("ix_yield_transactions_click_id", "yield_transactions", ["click_id"], unique=False) + + +def downgrade() -> None: + op.drop_index("ix_yield_transactions_click_id", table_name="yield_transactions") + op.drop_column("yield_transactions", "destination_url") + op.drop_column("yield_transactions", "click_id") + diff --git a/backend/alembic/versions/012_add_telemetry_events.py b/backend/alembic/versions/012_add_telemetry_events.py new file mode 100644 index 0000000..af23a29 --- /dev/null +++ b/backend/alembic/versions/012_add_telemetry_events.py @@ -0,0 +1,67 @@ +"""Add telemetry_events table. + +Revision ID: 012_add_telemetry_events +Revises: 011_add_yield_transaction_click_id +Create Date: 2025-12-15 +""" + +from alembic import op +import sqlalchemy as sa + + +revision = "012_add_telemetry_events" +down_revision = "011_add_yield_transaction_click_id" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.create_table( + "telemetry_events", + sa.Column("id", sa.Integer(), primary_key=True), + sa.Column("user_id", sa.Integer(), nullable=True), + sa.Column("event_name", sa.String(length=60), nullable=False), + sa.Column("listing_id", sa.Integer(), nullable=True), + sa.Column("inquiry_id", sa.Integer(), nullable=True), + sa.Column("yield_domain_id", sa.Integer(), nullable=True), + sa.Column("click_id", sa.String(length=64), nullable=True), + sa.Column("domain", sa.String(length=255), nullable=True), + sa.Column("source", sa.String(length=30), nullable=True), + sa.Column("ip_hash", sa.String(length=64), nullable=True), + sa.Column("user_agent", sa.String(length=500), nullable=True), + sa.Column("referrer", sa.String(length=500), nullable=True), + sa.Column("metadata_json", sa.Text(), nullable=True), + sa.Column("is_authenticated", sa.Boolean(), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(["user_id"], ["users.id"]), + ) + + op.create_index("ix_telemetry_events_event_name", "telemetry_events", ["event_name"]) + op.create_index("ix_telemetry_events_user_id", "telemetry_events", ["user_id"]) + op.create_index("ix_telemetry_events_listing_id", "telemetry_events", ["listing_id"]) + op.create_index("ix_telemetry_events_inquiry_id", "telemetry_events", ["inquiry_id"]) + op.create_index("ix_telemetry_events_yield_domain_id", "telemetry_events", ["yield_domain_id"]) + op.create_index("ix_telemetry_events_click_id", "telemetry_events", ["click_id"]) + op.create_index("ix_telemetry_events_domain", "telemetry_events", ["domain"]) + op.create_index("ix_telemetry_events_created_at", "telemetry_events", ["created_at"]) + op.create_index("ix_telemetry_event_name_created", "telemetry_events", ["event_name", "created_at"]) + op.create_index("ix_telemetry_user_created", "telemetry_events", ["user_id", "created_at"]) + op.create_index("ix_telemetry_listing_created", "telemetry_events", ["listing_id", "created_at"]) + op.create_index("ix_telemetry_yield_created", "telemetry_events", ["yield_domain_id", "created_at"]) + + +def downgrade() -> None: + op.drop_index("ix_telemetry_yield_created", table_name="telemetry_events") + op.drop_index("ix_telemetry_listing_created", table_name="telemetry_events") + op.drop_index("ix_telemetry_user_created", table_name="telemetry_events") + op.drop_index("ix_telemetry_event_name_created", table_name="telemetry_events") + op.drop_index("ix_telemetry_events_created_at", table_name="telemetry_events") + op.drop_index("ix_telemetry_events_domain", table_name="telemetry_events") + op.drop_index("ix_telemetry_events_click_id", table_name="telemetry_events") + op.drop_index("ix_telemetry_events_yield_domain_id", table_name="telemetry_events") + op.drop_index("ix_telemetry_events_inquiry_id", table_name="telemetry_events") + op.drop_index("ix_telemetry_events_listing_id", table_name="telemetry_events") + op.drop_index("ix_telemetry_events_user_id", table_name="telemetry_events") + op.drop_index("ix_telemetry_events_event_name", table_name="telemetry_events") + op.drop_table("telemetry_events") + diff --git a/backend/alembic/versions/013_add_ops_alert_events.py b/backend/alembic/versions/013_add_ops_alert_events.py new file mode 100644 index 0000000..b0ab604 --- /dev/null +++ b/backend/alembic/versions/013_add_ops_alert_events.py @@ -0,0 +1,41 @@ +"""add ops alert events + +Revision ID: 013_add_ops_alert_events +Revises: 012_add_telemetry_events +Create Date: 2025-12-15 +""" + +from __future__ import annotations + +from alembic import op +import sqlalchemy as sa + + +revision = "013_add_ops_alert_events" +down_revision = "012_add_telemetry_events" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.create_table( + "ops_alert_events", + sa.Column("id", sa.Integer(), primary_key=True), + sa.Column("alert_key", sa.String(length=80), nullable=False), + sa.Column("severity", sa.String(length=10), nullable=False), + sa.Column("title", sa.String(length=200), nullable=False), + sa.Column("detail", sa.Text(), nullable=True), + sa.Column("status", sa.String(length=20), nullable=False), + sa.Column("recipients", sa.Text(), nullable=True), + sa.Column("send_reason", sa.String(length=60), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=False, server_default=sa.text("now()")), + ) + op.create_index("ix_ops_alert_key_created", "ops_alert_events", ["alert_key", "created_at"]) + op.create_index("ix_ops_alert_status_created", "ops_alert_events", ["status", "created_at"]) + + +def downgrade() -> None: + op.drop_index("ix_ops_alert_status_created", table_name="ops_alert_events") + op.drop_index("ix_ops_alert_key_created", table_name="ops_alert_events") + op.drop_table("ops_alert_events") + diff --git a/backend/alembic/versions/014_add_user_invite_code.py b/backend/alembic/versions/014_add_user_invite_code.py new file mode 100644 index 0000000..a7fbf40 --- /dev/null +++ b/backend/alembic/versions/014_add_user_invite_code.py @@ -0,0 +1,28 @@ +"""add users invite_code + +Revision ID: 014_add_user_invite_code +Revises: 013_add_ops_alert_events +Create Date: 2025-12-15 +""" + +from __future__ import annotations + +from alembic import op +import sqlalchemy as sa + + +revision = "014_add_user_invite_code" +down_revision = "013_add_ops_alert_events" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.add_column("users", sa.Column("invite_code", sa.String(length=32), nullable=True)) + op.create_index("ix_users_invite_code", "users", ["invite_code"], unique=True) + + +def downgrade() -> None: + op.drop_index("ix_users_invite_code", table_name="users") + op.drop_column("users", "invite_code") + diff --git a/backend/alembic/versions/015_add_subscription_referral_bonus_domains.py b/backend/alembic/versions/015_add_subscription_referral_bonus_domains.py new file mode 100644 index 0000000..8febc42 --- /dev/null +++ b/backend/alembic/versions/015_add_subscription_referral_bonus_domains.py @@ -0,0 +1,29 @@ +"""add subscription referral bonus domains + +Revision ID: 015_add_subscription_referral_bonus_domains +Revises: 014_add_user_invite_code +Create Date: 2025-12-15 +""" + +from __future__ import annotations + +import sqlalchemy as sa +from alembic import op + + +revision = "015_add_subscription_referral_bonus_domains" +down_revision = "014_add_user_invite_code" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.add_column( + "subscriptions", + sa.Column("referral_bonus_domains", sa.Integer(), nullable=False, server_default="0"), + ) + + +def downgrade() -> None: + op.drop_column("subscriptions", "referral_bonus_domains") + diff --git a/backend/app/api/__init__.py b/backend/app/api/__init__.py index 53b5529..c243207 100644 --- a/backend/app/api/__init__.py +++ b/backend/app/api/__init__.py @@ -21,6 +21,8 @@ from app.api.dashboard import router as dashboard_router from app.api.yield_domains import router as yield_router from app.api.yield_webhooks import router as yield_webhooks_router from app.api.yield_routing import router as yield_routing_router +from app.api.yield_payout_admin import router as yield_payout_admin_router +from app.api.telemetry import router as telemetry_router api_router = APIRouter() @@ -49,6 +51,10 @@ api_router.include_router(seo_router, prefix="/seo", tags=["SEO Data - Tycoon"]) api_router.include_router(yield_router, tags=["Yield - Intent Routing"]) api_router.include_router(yield_webhooks_router, tags=["Yield - Webhooks"]) api_router.include_router(yield_routing_router, tags=["Yield - Routing"]) +api_router.include_router(yield_payout_admin_router, tags=["Yield - Admin"]) + +# Telemetry / KPIs (admin) +api_router.include_router(telemetry_router, tags=["Telemetry"]) # Support & Communication api_router.include_router(contact_router, prefix="/contact", tags=["Contact & Newsletter"]) diff --git a/backend/app/api/admin.py b/backend/app/api/admin.py index 21298b1..adc39b9 100644 --- a/backend/app/api/admin.py +++ b/backend/app/api/admin.py @@ -25,6 +25,9 @@ from app.models.newsletter import NewsletterSubscriber from app.models.tld_price import TLDPrice, TLDInfo from app.models.auction import DomainAuction from app.models.price_alert import PriceAlert +from app.services.db_backup import create_backup, list_backups +from app.services.ops_alerts import run_ops_alert_checks +from app.models.ops_alert import OpsAlertEvent router = APIRouter() settings = get_settings() @@ -525,12 +528,12 @@ async def upgrade_user( user_id=user.id, tier=new_tier, status=SubscriptionStatus.ACTIVE, - domain_limit=config.get("domain_limit", 5), + max_domains=config.get("domain_limit", 5), ) db.add(subscription) else: subscription.tier = new_tier - subscription.domain_limit = config.get("domain_limit", 5) + subscription.max_domains = config.get("domain_limit", 5) subscription.status = SubscriptionStatus.ACTIVE await db.commit() @@ -897,6 +900,83 @@ async def get_scheduler_status( } +# ============== Ops: Backups (4B) ============== + +@router.get("/system/backups") +async def get_backups( + admin: User = Depends(require_admin), + limit: int = 20, +): + """List recent DB backups on the server.""" + return {"backups": list_backups(limit=limit)} + + +@router.post("/system/backups") +async def create_db_backup( + admin: User = Depends(require_admin), + verify: bool = True, +): + """Create a DB backup on the server (and verify it).""" + if not settings.enable_db_backups: + raise HTTPException(status_code=403, detail="DB backups are disabled (ENABLE_DB_BACKUPS=false).") + try: + result = create_backup(verify=verify) + return { + "status": "ok", + "backup": { + "path": result.path, + "size_bytes": result.size_bytes, + "created_at": result.created_at, + "verified": result.verified, + "verification_detail": result.verification_detail, + }, + } + except Exception as e: + raise HTTPException(status_code=500, detail=f"Backup failed: {e}") + + +@router.post("/system/ops-alerts/run") +async def run_ops_alerts_now( + admin: User = Depends(require_admin), +): + """ + Run ops alert checks immediately (and send alerts if enabled). + Useful for server validation without Docker. + """ + return await run_ops_alert_checks() + + +@router.get("/system/ops-alerts/history") +async def get_ops_alert_history( + db: Database, + admin: User = Depends(require_admin), + limit: int = 100, +): + """Return recent persisted ops alert events.""" + limit = max(1, min(int(limit), 500)) + rows = ( + await db.execute( + select(OpsAlertEvent).order_by(OpsAlertEvent.created_at.desc()).limit(limit) + ) + ).scalars().all() + return { + "events": [ + { + "id": e.id, + "alert_key": e.alert_key, + "severity": e.severity, + "title": e.title, + "detail": e.detail, + "status": e.status, + "send_reason": e.send_reason, + "recipients": e.recipients, + "created_at": e.created_at.isoformat(), + } + for e in rows + ] + } + + # ============== Bulk Operations ============== class BulkUpgradeRequest(BaseModel): diff --git a/backend/app/api/auth.py b/backend/app/api/auth.py index 946b9c1..58d12c2 100644 --- a/backend/app/api/auth.py +++ b/backend/app/api/auth.py @@ -14,6 +14,7 @@ Endpoints: import os import secrets import logging +import re from datetime import datetime, timedelta from typing import Optional @@ -25,11 +26,24 @@ from slowapi.util import get_remote_address from app.api.deps import Database, CurrentUser from app.config import get_settings -from app.schemas.auth import UserCreate, UserLogin, UserResponse, LoginResponse +from app.schemas.auth import ( + LoginResponse, + ReferralLinkResponse, + ReferralStats, + UserCreate, + UserLogin, + UserResponse, +) from app.services.auth import AuthService from app.services.email_service import email_service from app.models.user import User from app.security import set_auth_cookie, clear_auth_cookie +from app.services.telemetry import track_event +from app.services.referral_rewards import ( + QUALIFIED_REFERRAL_BATCH_SIZE, + apply_referral_rewards_for_user, + compute_badge, +) logger = logging.getLogger(__name__) @@ -72,7 +86,9 @@ class UpdateUserRequest(BaseModel): # ============== Endpoints ============== @router.post("/register", response_model=UserResponse, status_code=status.HTTP_201_CREATED) +@limiter.limit("5/minute") async def register( + request: Request, user_data: UserCreate, db: Database, background_tasks: BackgroundTasks, @@ -100,32 +116,61 @@ async def register( name=user_data.name, ) - # Process yield referral if present - # Format: yield_{user_id}_{domain_id} - if user_data.ref and user_data.ref.startswith("yield_"): - try: - parts = user_data.ref.split("_") - if len(parts) >= 3: - referrer_user_id = int(parts[1]) - # Store referral info - user.referred_by_user_id = referrer_user_id - user.referral_code = user_data.ref - # Try to get domain name from yield_domain_id + # Process referral if present. + # Supported formats: + # - yield_{user_id}_{domain_id} + # - invite code (12 hex chars) + referral_applied = False + referrer_user_id: Optional[int] = None + referral_type: Optional[str] = None + + if user_data.ref: + ref_raw = user_data.ref.strip() + + # Yield referral: yield_{user_id}_{domain_id} + if ref_raw.startswith("yield_"): + try: + parts = ref_raw.split("_") + if len(parts) >= 3: + referrer_user_id = int(parts[1]) + user.referred_by_user_id = referrer_user_id + user.referral_code = ref_raw + referral_type = "yield" + + # Try to map the yield_domain_id to a domain string + try: + from app.models.yield_domain import YieldDomain + + yield_domain_id = int(parts[2]) + yd_res = await db.execute(select(YieldDomain).where(YieldDomain.id == yield_domain_id)) + yd = yd_res.scalar_one_or_none() + if yd: + user.referred_by_domain = yd.domain + except Exception: + pass + + await db.commit() + referral_applied = True + logger.info("User %s referred via yield by user %s", user.email, referrer_user_id) + except Exception as e: + logger.warning("Failed to process yield referral code: %s, error: %s", ref_raw, e) + else: + # Invite code referral (viral loop) + code = ref_raw.lower() + if re.fullmatch(r"[0-9a-f]{12}", code): try: - from app.models.yield_domain import YieldDomain - yield_domain_id = int(parts[2]) - yield_domain = await db.execute( - select(YieldDomain).where(YieldDomain.id == yield_domain_id) - ) - yd = yield_domain.scalar_one_or_none() - if yd: - user.referred_by_domain = yd.domain - except Exception: - pass - await db.commit() - logger.info(f"User {user.email} referred by user {referrer_user_id}") - except Exception as e: - logger.warning(f"Failed to process referral code: {user_data.ref}, error: {e}") + ref_user_res = await db.execute(select(User).where(User.invite_code == code)) + ref_user = ref_user_res.scalar_one_or_none() + if ref_user and ref_user.id != user.id: + referrer_user_id = ref_user.id + user.referred_by_user_id = ref_user.id + user.referral_code = code + referral_type = "invite" + await db.commit() + referral_applied = True + logger.info("User %s referred via invite_code by user %s", user.email, ref_user.id) + except Exception as e: + logger.warning("Failed to process invite referral code: %s, error: %s", code, e) # Auto-admin for specific email ADMIN_EMAILS = ["guggeryves@hotmail.com"] @@ -158,10 +203,40 @@ async def register( user.email_verification_token = verification_token user.email_verification_expires = datetime.utcnow() + timedelta(hours=24) await db.commit() + + # Telemetry: registration + referral attribution + try: + await track_event( + db, + event_name="user_registered", + request=request, + user_id=user.id, + is_authenticated=False, + source="public", + metadata={"ref": bool(user_data.ref)}, + ) + if referral_applied: + await track_event( + db, + event_name="referral_attributed", + request=request, + user_id=user.id, + is_authenticated=False, + source="public", + metadata={ + "referral_type": referral_type, + "referrer_user_id": referrer_user_id, + "ref": user_data.ref, + }, + ) + await db.commit() + except Exception: + # never block registration + pass # Send verification email in background if email_service.is_configured(): - site_url = os.getenv("SITE_URL", "http://localhost:3000") + site_url = (settings.site_url or "http://localhost:3000").rstrip("/") verify_url = f"{site_url}/verify-email?token={verification_token}" background_tasks.add_task( @@ -174,8 +249,104 @@ async def register( return user +@router.get("/referral", response_model=ReferralLinkResponse) +async def get_referral_link( + request: Request, + current_user: CurrentUser, + db: Database, + days: int = 30, +): + """Return the authenticated user's invite link.""" + if not current_user.invite_code: + # Generate on demand for older users + for _ in range(12): + code = secrets.token_hex(6) + exists = await db.execute(select(User.id).where(User.invite_code == code)) + if exists.scalar_one_or_none() is None: + current_user.invite_code = code + await db.commit() + break + if not current_user.invite_code: + raise HTTPException(status_code=500, detail="Failed to generate invite code") + + # Apply rewards (idempotent) so UI reflects current state even without scheduler + snapshot = await apply_referral_rewards_for_user(db, current_user.id) + await db.commit() + + base = (settings.site_url or "http://localhost:3000").rstrip("/") + url = f"{base}/register?ref={current_user.invite_code}" + + try: + await track_event( + db, + event_name="referral_link_viewed", + request=request, + user_id=current_user.id, + is_authenticated=True, + source="terminal", + metadata={"invite_code": current_user.invite_code}, + ) + await db.commit() + except Exception: + pass + + # Count link views in the chosen window + try: + from datetime import timedelta + from sqlalchemy import and_, func + + from app.models.telemetry import TelemetryEvent + + window_days = max(1, min(int(days), 365)) + end = datetime.utcnow() + start = end - timedelta(days=window_days) + views = ( + await db.execute( + select(func.count(TelemetryEvent.id)).where( + and_( + TelemetryEvent.event_name == "referral_link_viewed", + TelemetryEvent.user_id == current_user.id, + TelemetryEvent.created_at >= start, + TelemetryEvent.created_at <= end, + ) + ) + ) + ).scalar() + referral_link_views_window = int(views or 0) + except Exception: + window_days = 30 + referral_link_views_window = 0 + + qualified = int(snapshot.qualified_referrals_total) + if qualified < QUALIFIED_REFERRAL_BATCH_SIZE: + next_reward_at = QUALIFIED_REFERRAL_BATCH_SIZE + else: + remainder = qualified % QUALIFIED_REFERRAL_BATCH_SIZE + next_reward_at = qualified + (QUALIFIED_REFERRAL_BATCH_SIZE - remainder) if remainder else qualified + QUALIFIED_REFERRAL_BATCH_SIZE + + return ReferralLinkResponse( + invite_code=current_user.invite_code, + url=url, + stats=ReferralStats( + window_days=int(window_days), + referred_users_total=int(snapshot.referred_users_total), + qualified_referrals_total=qualified, + referral_link_views_window=int(referral_link_views_window), + bonus_domains=int(snapshot.bonus_domains), + next_reward_at=int(next_reward_at), + badge=compute_badge(qualified), + cooldown_days=int(getattr(snapshot, "cooldown_days", 7) or 7), + disqualified_cooldown_total=int(getattr(snapshot, "disqualified_cooldown_total", 0) or 0), + disqualified_missing_ip_total=int(getattr(snapshot, "disqualified_missing_ip_total", 0) or 0), + disqualified_shared_ip_total=int(getattr(snapshot, "disqualified_shared_ip_total", 0) or 0), + disqualified_duplicate_ip_total=int(getattr(snapshot, "disqualified_duplicate_ip_total", 0) or 0), + ), + ) + + @router.post("/login", response_model=LoginResponse) -async def login(user_data: UserLogin, db: Database, response: Response): +@limiter.limit("10/minute") +async def login(request: Request, user_data: UserLogin, db: Database, response: Response): """ Authenticate user and return JWT token. @@ -280,8 +451,10 @@ async def update_current_user( @router.post("/forgot-password", response_model=MessageResponse) +@limiter.limit("3/minute") async def forgot_password( - request: ForgotPasswordRequest, + request: Request, + payload: ForgotPasswordRequest, db: Database, background_tasks: BackgroundTasks, ): @@ -296,9 +469,7 @@ async def forgot_password( success_message = "If an account with this email exists, a password reset link has been sent." # Look up user - result = await db.execute( - select(User).where(User.email == request.email.lower()) - ) + result = await db.execute(select(User).where(User.email == payload.email.lower())) user = result.scalar_one_or_none() if not user: @@ -313,7 +484,7 @@ async def forgot_password( # Send reset email in background if email_service.is_configured(): - site_url = os.getenv("SITE_URL", "http://localhost:3000") + site_url = (settings.site_url or "http://localhost:3000").rstrip("/") reset_url = f"{site_url}/reset-password?token={reset_token}" background_tasks.add_task( @@ -411,8 +582,10 @@ async def verify_email( @router.post("/resend-verification", response_model=MessageResponse) +@limiter.limit("3/minute") async def resend_verification( - request: ForgotPasswordRequest, # Reuse schema - just needs email + request: Request, + payload: ForgotPasswordRequest, # Reuse schema - just needs email db: Database, background_tasks: BackgroundTasks, ): @@ -426,7 +599,7 @@ async def resend_verification( # Look up user result = await db.execute( - select(User).where(User.email == request.email.lower()) + select(User).where(User.email == payload.email.lower()) ) user = result.scalar_one_or_none() @@ -441,7 +614,7 @@ async def resend_verification( # Send verification email if email_service.is_configured(): - site_url = os.getenv("SITE_URL", "http://localhost:3000") + site_url = (settings.site_url or "http://localhost:3000").rstrip("/") verify_url = f"{site_url}/verify-email?token={verification_token}" background_tasks.add_task( diff --git a/backend/app/api/blog.py b/backend/app/api/blog.py index f60aa12..331d357 100644 --- a/backend/app/api/blog.py +++ b/backend/app/api/blog.py @@ -200,6 +200,36 @@ async def get_blog_post( return data +@router.get("/posts/{slug}/meta") +async def get_blog_post_meta( + slug: str, + db: Database, +): + """ + Get blog post metadata by slug (public). + + IMPORTANT: This endpoint does NOT increment view_count. + It's intended for SEO metadata generation (generateMetadata, JSON-LD). + """ + result = await db.execute( + select(BlogPost) + .options(selectinload(BlogPost.author)) + .where( + BlogPost.slug == slug, + BlogPost.is_published == True, + ) + ) + post = result.scalar_one_or_none() + + if not post: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Blog post not found", + ) + + return post.to_dict(include_content=False) + + # ============== Admin Endpoints ============== @router.get("/admin/posts") diff --git a/backend/app/api/contact.py b/backend/app/api/contact.py index 89817d2..ce97311 100644 --- a/backend/app/api/contact.py +++ b/backend/app/api/contact.py @@ -16,10 +16,12 @@ from datetime import datetime from typing import Optional from fastapi import APIRouter, HTTPException, status, BackgroundTasks, Request +from fastapi.responses import HTMLResponse from pydantic import BaseModel, EmailStr, Field from sqlalchemy import select, delete from slowapi import Limiter from slowapi.util import get_remote_address +from urllib.parse import urlencode from app.api.deps import Database from app.services.email_service import email_service @@ -32,6 +34,11 @@ router = APIRouter() # Rate limiter for contact endpoints limiter = Limiter(key_func=get_remote_address) +def _build_unsubscribe_url(email: str, token: str) -> str: + base = os.getenv("SITE_URL", "https://pounce.ch").rstrip("/") + query = urlencode({"email": email, "token": token}) + return f"{base}/api/v1/contact/newsletter/unsubscribe?{query}" + # ============== Schemas ============== @@ -139,6 +146,7 @@ async def subscribe_newsletter( background_tasks.add_task( email_service.send_newsletter_welcome, to_email=email_lower, + unsubscribe_url=_build_unsubscribe_url(email_lower, existing.unsubscribe_token), ) return MessageResponse( @@ -160,6 +168,7 @@ async def subscribe_newsletter( background_tasks.add_task( email_service.send_newsletter_welcome, to_email=email_lower, + unsubscribe_url=_build_unsubscribe_url(email_lower, subscriber.unsubscribe_token), ) logger.info(f"Newsletter subscription: {email_lower}") @@ -216,6 +225,50 @@ async def unsubscribe_newsletter( ) +@router.get("/newsletter/unsubscribe") +async def unsubscribe_newsletter_one_click( + email: EmailStr, + token: str, + db: Database, +): + """ + One-click unsubscribe endpoint (for List-Unsubscribe header). + Always returns 200 with a human-readable HTML response. + """ + email_lower = email.lower() + result = await db.execute( + select(NewsletterSubscriber).where( + NewsletterSubscriber.email == email_lower, + NewsletterSubscriber.unsubscribe_token == token, + ) + ) + subscriber = result.scalar_one_or_none() + if subscriber and subscriber.is_active: + subscriber.is_active = False + subscriber.unsubscribed_at = datetime.utcnow() + await db.commit() + + return HTMLResponse( + content=""" + + + + + + Unsubscribed + + +

You are unsubscribed.

+

+ If you were subscribed, you will no longer receive pounce insights emails. +

+ + +""".strip(), + status_code=200, + ) + + @router.get("/newsletter/status") async def check_newsletter_status( email: EmailStr, diff --git a/backend/app/api/domains.py b/backend/app/api/domains.py index 615cc66..1b48dcf 100644 --- a/backend/app/api/domains.py +++ b/backend/app/api/domains.py @@ -127,7 +127,7 @@ async def add_domain( await db.refresh(current_user, ["subscription", "domains"]) if current_user.subscription: - limit = current_user.subscription.max_domains + limit = current_user.subscription.domain_limit else: limit = TIER_CONFIG[SubscriptionTier.SCOUT]["domain_limit"] diff --git a/backend/app/api/listings.py b/backend/app/api/listings.py index eaabf95..ff44269 100644 --- a/backend/app/api/listings.py +++ b/backend/app/api/listings.py @@ -31,7 +31,15 @@ from sqlalchemy.ext.asyncio import AsyncSession from app.database import get_db from app.api.deps import get_current_user, get_current_user_optional from app.models.user import User -from app.models.listing import DomainListing, ListingInquiry, ListingView, ListingStatus, VerificationStatus +from app.models.listing import ( + DomainListing, + ListingInquiry, + ListingInquiryEvent, + ListingInquiryMessage, + ListingView, + ListingStatus, + VerificationStatus, +) from app.services.valuation import valuation_service @@ -104,6 +112,9 @@ class ListingUpdate(BaseModel): show_valuation: Optional[bool] = None allow_offers: Optional[bool] = None status: Optional[str] = None + sold_reason: Optional[str] = Field(None, max_length=200) + sold_price: Optional[float] = Field(None, ge=0) + sold_currency: Optional[str] = Field(None, max_length=3) class ListingResponse(BaseModel): @@ -129,6 +140,10 @@ class ListingResponse(BaseModel): public_url: str created_at: datetime published_at: Optional[datetime] + sold_at: Optional[datetime] = None + sold_reason: Optional[str] = None + sold_price: Optional[float] = None + sold_currency: Optional[str] = None # Seller info (minimal for privacy) seller_verified: bool = False @@ -156,6 +171,7 @@ class ListingPublicResponse(BaseModel): # Seller trust indicators seller_verified: bool seller_member_since: Optional[datetime] + seller_invite_code: Optional[str] = None class Config: from_attributes = True @@ -183,6 +199,10 @@ class InquiryResponse(BaseModel): status: str created_at: datetime read_at: Optional[datetime] + replied_at: Optional[datetime] = None + closed_at: Optional[datetime] = None + closed_reason: Optional[str] = None + buyer_user_id: Optional[int] = None class Config: from_attributes = True @@ -191,6 +211,23 @@ class InquiryResponse(BaseModel): class InquiryUpdate(BaseModel): """Update inquiry status for listing owner.""" status: str = Field(..., min_length=1, max_length=20) # new, read, replied, spam + reason: Optional[str] = Field(None, max_length=200) + + +class InquiryMessageCreate(BaseModel): + body: str = Field(..., min_length=1, max_length=4000) + + +class InquiryMessageResponse(BaseModel): + id: int + inquiry_id: int + listing_id: int + sender_user_id: int + body: str + created_at: datetime + + class Config: + from_attributes = True class VerificationResponse(BaseModel): @@ -309,6 +346,7 @@ async def browse_listings( public_url=listing.public_url, seller_verified=listing.is_verified, seller_member_since=listing.user.created_at if listing.user else None, + seller_invite_code=getattr(listing.user, "invite_code", None) if listing.user else None, )) await db.commit() # Save any updated pounce_scores @@ -353,6 +391,10 @@ async def get_my_listings( public_url=listing.public_url, created_at=listing.created_at, published_at=listing.published_at, + sold_at=getattr(listing, "sold_at", None), + sold_reason=getattr(listing, "sold_reason", None), + sold_price=getattr(listing, "sold_price", None), + sold_currency=getattr(listing, "sold_currency", None), seller_verified=current_user.is_verified, seller_member_since=current_user.created_at, ) @@ -395,6 +437,18 @@ async def get_listing_by_slug( # Increment view count listing.view_count += 1 + + await track_event( + db, + event_name="listing_view", + request=request, + user_id=current_user.id if current_user else None, + is_authenticated=bool(current_user), + source="public", + domain=listing.domain, + listing_id=listing.id, + metadata={"slug": listing.slug}, + ) # Calculate pounce_score dynamically if not stored (same as Market Feed) pounce_score = listing.pounce_score @@ -420,6 +474,7 @@ async def get_listing_by_slug( public_url=listing.public_url, seller_verified=listing.is_verified, seller_member_since=listing.user.created_at if listing.user else None, + seller_invite_code=getattr(listing.user, "invite_code", None) if listing.user else None, ) @@ -461,13 +516,13 @@ async def submit_inquiry( detail="Message contains blocked content. Please revise." ) - # Rate limiting check (simple: max 3 inquiries per email per listing per day) + # Rate limiting check (simple: max 3 inquiries per user per listing per day) today_start = datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0) existing_count = await db.execute( select(func.count(ListingInquiry.id)).where( and_( ListingInquiry.listing_id == listing.id, - ListingInquiry.email == inquiry.email.lower(), + ListingInquiry.buyer_user_id == current_user.id, ListingInquiry.created_at >= today_start, ) ) @@ -481,6 +536,7 @@ async def submit_inquiry( # Create inquiry new_inquiry = ListingInquiry( listing_id=listing.id, + buyer_user_id=current_user.id, name=inquiry.name, email=inquiry.email.lower(), phone=inquiry.phone, @@ -491,6 +547,34 @@ async def submit_inquiry( user_agent=request.headers.get("user-agent", "")[:500], ) db.add(new_inquiry) + await db.flush() + + await track_event( + db, + event_name="inquiry_created", + request=request, + user_id=current_user.id, + is_authenticated=True, + source="public", + domain=listing.domain, + listing_id=listing.id, + inquiry_id=new_inquiry.id, + metadata={ + "offer_amount": inquiry.offer_amount, + "has_phone": bool(inquiry.phone), + "has_company": bool(inquiry.company), + }, + ) + + # Seed thread with the initial message + db.add( + ListingInquiryMessage( + inquiry_id=new_inquiry.id, + listing_id=listing.id, + sender_user_id=current_user.id, + body=inquiry.message, + ) + ) # Increment inquiry count listing.inquiry_count += 1 @@ -716,6 +800,10 @@ async def get_listing_inquiries( status=inq.status, created_at=inq.created_at, read_at=inq.read_at, + replied_at=getattr(inq, "replied_at", None), + closed_at=getattr(inq, "closed_at", None), + closed_reason=getattr(inq, "closed_reason", None), + buyer_user_id=getattr(inq, "buyer_user_id", None), ) for inq in inquiries ] @@ -726,11 +814,12 @@ async def update_listing_inquiry( id: int, inquiry_id: int, data: InquiryUpdate, + request: Request, current_user: User = Depends(get_current_user), db: AsyncSession = Depends(get_db), ): """Update an inquiry status (listing owner only).""" - allowed = {"new", "read", "replied", "spam"} + allowed = {"new", "read", "replied", "closed", "spam"} status_clean = (data.status or "").strip().lower() if status_clean not in allowed: raise HTTPException(status_code=400, detail="Invalid status") @@ -761,11 +850,43 @@ async def update_listing_inquiry( raise HTTPException(status_code=404, detail="Inquiry not found") now = datetime.utcnow() + old_status = getattr(inquiry, "status", None) inquiry.status = status_clean if status_clean == "read" and inquiry.read_at is None: inquiry.read_at = now if status_clean == "replied": inquiry.replied_at = now + if status_clean == "closed": + inquiry.closed_at = now + inquiry.closed_reason = (data.reason or "").strip() or None + if status_clean == "spam": + inquiry.closed_reason = (data.reason or "").strip() or inquiry.closed_reason + + # Audit trail + event = ListingInquiryEvent( + inquiry_id=inquiry.id, + listing_id=listing.id, + actor_user_id=current_user.id, + old_status=old_status, + new_status=status_clean, + reason=(data.reason or "").strip() or None, + ip_address=request.client.host if request.client else None, + user_agent=request.headers.get("user-agent", "")[:500], + ) + db.add(event) + + await track_event( + db, + event_name="inquiry_status_changed", + request=request, + user_id=current_user.id, + is_authenticated=True, + source="terminal", + domain=listing.domain, + listing_id=listing.id, + inquiry_id=inquiry.id, + metadata={"old_status": old_status, "new_status": status_clean, "reason": (data.reason or "").strip() or None}, + ) await db.commit() await db.refresh(inquiry) @@ -781,13 +902,285 @@ async def update_listing_inquiry( status=inquiry.status, created_at=inquiry.created_at, read_at=inquiry.read_at, + replied_at=getattr(inquiry, "replied_at", None), + closed_at=getattr(inquiry, "closed_at", None), + closed_reason=getattr(inquiry, "closed_reason", None), + buyer_user_id=getattr(inquiry, "buyer_user_id", None), ) +@router.get("/{id}/inquiries/{inquiry_id}/messages", response_model=List[InquiryMessageResponse]) +async def get_inquiry_messages_for_seller( + id: int, + inquiry_id: int, + current_user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Seller: fetch thread messages for an inquiry.""" + listing_result = await db.execute( + select(DomainListing).where(and_(DomainListing.id == id, DomainListing.user_id == current_user.id)) + ) + listing = listing_result.scalar_one_or_none() + if not listing: + raise HTTPException(status_code=404, detail="Listing not found") + + inquiry_result = await db.execute( + select(ListingInquiry).where(and_(ListingInquiry.id == inquiry_id, ListingInquiry.listing_id == id)) + ) + inquiry = inquiry_result.scalar_one_or_none() + if not inquiry: + raise HTTPException(status_code=404, detail="Inquiry not found") + + msgs = ( + await db.execute( + select(ListingInquiryMessage) + .where(and_(ListingInquiryMessage.inquiry_id == inquiry_id, ListingInquiryMessage.listing_id == id)) + .order_by(ListingInquiryMessage.created_at.asc()) + ) + ).scalars().all() + return [InquiryMessageResponse.model_validate(m) for m in msgs] + + +@router.post("/{id}/inquiries/{inquiry_id}/messages", response_model=InquiryMessageResponse) +async def post_inquiry_message_as_seller( + id: int, + inquiry_id: int, + payload: InquiryMessageCreate, + request: Request, + current_user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Seller: post a message into an inquiry thread.""" + listing_result = await db.execute( + select(DomainListing).where(and_(DomainListing.id == id, DomainListing.user_id == current_user.id)) + ) + listing = listing_result.scalar_one_or_none() + if not listing: + raise HTTPException(status_code=404, detail="Listing not found") + + inquiry_result = await db.execute( + select(ListingInquiry).where(and_(ListingInquiry.id == inquiry_id, ListingInquiry.listing_id == id)) + ) + inquiry = inquiry_result.scalar_one_or_none() + if not inquiry: + raise HTTPException(status_code=404, detail="Inquiry not found") + + if inquiry.status in ["closed", "spam"]: + raise HTTPException(status_code=400, detail="Inquiry is closed") + + # Content safety (phishing keywords) + if not _check_content_safety(payload.body): + raise HTTPException(status_code=400, detail="Message contains blocked content. Please revise.") + + # Simple rate limit: max 30 messages per hour per inquiry + hour_start = datetime.utcnow() - timedelta(hours=1) + msg_count = ( + await db.execute( + select(func.count(ListingInquiryMessage.id)).where( + and_( + ListingInquiryMessage.inquiry_id == inquiry.id, + ListingInquiryMessage.sender_user_id == current_user.id, + ListingInquiryMessage.created_at >= hour_start, + ) + ) + ) + ).scalar() or 0 + if msg_count >= 30: + raise HTTPException(status_code=429, detail="Too many messages. Please slow down.") + + msg = ListingInquiryMessage( + inquiry_id=inquiry.id, + listing_id=listing.id, + sender_user_id=current_user.id, + body=payload.body, + ) + db.add(msg) + await db.flush() + + await track_event( + db, + event_name="message_sent", + request=request, + user_id=current_user.id, + is_authenticated=True, + source="terminal", + domain=listing.domain, + listing_id=listing.id, + inquiry_id=inquiry.id, + metadata={"role": "buyer"}, + ) + + await track_event( + db, + event_name="message_sent", + request=request, + user_id=current_user.id, + is_authenticated=True, + source="terminal", + domain=listing.domain, + listing_id=listing.id, + inquiry_id=inquiry.id, + metadata={"role": "seller"}, + ) + + # Email buyer (if configured) + try: + from app.services.email_service import email_service + if inquiry.buyer_user_id: + buyer = ( + await db.execute(select(User).where(User.id == inquiry.buyer_user_id)) + ).scalar_one_or_none() + else: + buyer = None + if buyer and buyer.email and email_service.is_configured(): + thread_url = f"https://pounce.ch/terminal/inbox?inquiry={inquiry.id}" + await email_service.send_listing_message( + to_email=buyer.email, + domain=listing.domain, + sender_name=current_user.name or current_user.email, + message=payload.body, + thread_url=thread_url, + ) + except Exception as e: + logger.error(f"Failed to send listing message notification: {e}") + + await db.commit() + await db.refresh(msg) + return InquiryMessageResponse.model_validate(msg) + + +@router.get("/inquiries/my") +async def get_my_inquiries_as_buyer( + current_user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Buyer: list inquiries created from this account.""" + result = await db.execute( + select(ListingInquiry, DomainListing) + .join(DomainListing, DomainListing.id == ListingInquiry.listing_id) + .where(ListingInquiry.buyer_user_id == current_user.id) + .order_by(ListingInquiry.created_at.desc()) + ) + rows = result.all() + return [ + { + "id": inq.id, + "listing_id": listing.id, + "domain": listing.domain, + "slug": listing.slug, + "status": inq.status, + "created_at": inq.created_at.isoformat(), + "closed_at": inq.closed_at.isoformat() if getattr(inq, "closed_at", None) else None, + "closed_reason": getattr(inq, "closed_reason", None), + } + for inq, listing in rows + ] + + +@router.get("/inquiries/{inquiry_id}/messages", response_model=List[InquiryMessageResponse]) +async def get_inquiry_messages_for_buyer( + inquiry_id: int, + current_user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Buyer: fetch thread messages for one inquiry.""" + inquiry = ( + await db.execute(select(ListingInquiry).where(ListingInquiry.id == inquiry_id)) + ).scalar_one_or_none() + if not inquiry or inquiry.buyer_user_id != current_user.id: + raise HTTPException(status_code=404, detail="Inquiry not found") + + msgs = ( + await db.execute( + select(ListingInquiryMessage) + .where(ListingInquiryMessage.inquiry_id == inquiry_id) + .order_by(ListingInquiryMessage.created_at.asc()) + ) + ).scalars().all() + return [InquiryMessageResponse.model_validate(m) for m in msgs] + + +@router.post("/inquiries/{inquiry_id}/messages", response_model=InquiryMessageResponse) +async def post_inquiry_message_as_buyer( + inquiry_id: int, + payload: InquiryMessageCreate, + request: Request, + current_user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Buyer: post a message into an inquiry thread.""" + inquiry = ( + await db.execute(select(ListingInquiry).where(ListingInquiry.id == inquiry_id)) + ).scalar_one_or_none() + if not inquiry or inquiry.buyer_user_id != current_user.id: + raise HTTPException(status_code=404, detail="Inquiry not found") + + if inquiry.status in ["closed", "spam"]: + raise HTTPException(status_code=400, detail="Inquiry is closed") + + # Content safety (phishing keywords) + if not _check_content_safety(payload.body): + raise HTTPException(status_code=400, detail="Message contains blocked content. Please revise.") + + # Simple rate limit: max 20 messages per hour per inquiry + hour_start = datetime.utcnow() - timedelta(hours=1) + msg_count = ( + await db.execute( + select(func.count(ListingInquiryMessage.id)).where( + and_( + ListingInquiryMessage.inquiry_id == inquiry.id, + ListingInquiryMessage.sender_user_id == current_user.id, + ListingInquiryMessage.created_at >= hour_start, + ) + ) + ) + ).scalar() or 0 + if msg_count >= 20: + raise HTTPException(status_code=429, detail="Too many messages. Please slow down.") + + listing = ( + await db.execute(select(DomainListing).where(DomainListing.id == inquiry.listing_id)) + ).scalar_one_or_none() + if not listing: + raise HTTPException(status_code=404, detail="Listing not found") + + msg = ListingInquiryMessage( + inquiry_id=inquiry.id, + listing_id=listing.id, + sender_user_id=current_user.id, + body=payload.body, + ) + db.add(msg) + await db.flush() + + # Email seller (if configured) + try: + from app.services.email_service import email_service + seller = ( + await db.execute(select(User).where(User.id == listing.user_id)) + ).scalar_one_or_none() + if seller and seller.email and email_service.is_configured(): + thread_url = f"https://pounce.ch/terminal/listing" + await email_service.send_listing_message( + to_email=seller.email, + domain=listing.domain, + sender_name=current_user.name or current_user.email, + message=payload.body, + thread_url=thread_url, + ) + except Exception as e: + logger.error(f"Failed to send listing message notification: {e}") + + await db.commit() + await db.refresh(msg) + return InquiryMessageResponse.model_validate(msg) + + @router.put("/{id}", response_model=ListingResponse) async def update_listing( id: int, data: ListingUpdate, + request: Request, current_user: User = Depends(get_current_user), db: AsyncSession = Depends(get_db), ): @@ -832,8 +1225,58 @@ async def update_listing( ) listing.status = ListingStatus.ACTIVE.value listing.published_at = datetime.utcnow() - elif data.status in ["draft", "sold", "expired"]: + elif data.status in ["draft", "expired"]: listing.status = data.status + elif data.status == "sold": + if listing.status != ListingStatus.ACTIVE.value: + raise HTTPException(status_code=400, detail="Only active listings can be marked as sold.") + listing.status = ListingStatus.SOLD.value + listing.sold_at = datetime.utcnow() + listing.sold_reason = (data.sold_reason or "").strip() or listing.sold_reason + listing.sold_price = data.sold_price if data.sold_price is not None else listing.sold_price + listing.sold_currency = (data.sold_currency or listing.currency or "USD").upper() + + # Close all open inquiries on this listing (deal is done). + inqs = ( + await db.execute( + select(ListingInquiry).where(ListingInquiry.listing_id == listing.id) + ) + ).scalars().all() + for inq in inqs: + if inq.status in ["closed", "spam"]: + continue + old = inq.status + inq.status = "closed" + inq.closed_at = datetime.utcnow() + inq.closed_reason = inq.closed_reason or "sold" + db.add( + ListingInquiryEvent( + inquiry_id=inq.id, + listing_id=listing.id, + actor_user_id=current_user.id, + old_status=old, + new_status="closed", + reason="sold", + ip_address=request.client.host if request.client else None, + user_agent=request.headers.get("user-agent", "")[:500], + ) + ) + + await track_event( + db, + event_name="listing_marked_sold", + request=request, + user_id=current_user.id, + is_authenticated=True, + source="terminal", + domain=listing.domain, + listing_id=listing.id, + metadata={ + "sold_reason": listing.sold_reason, + "sold_price": float(listing.sold_price) if listing.sold_price is not None else None, + "sold_currency": listing.sold_currency, + }, + ) await db.commit() await db.refresh(listing) @@ -860,6 +1303,10 @@ async def update_listing( public_url=listing.public_url, created_at=listing.created_at, published_at=listing.published_at, + sold_at=getattr(listing, "sold_at", None), + sold_reason=getattr(listing, "sold_reason", None), + sold_price=getattr(listing, "sold_price", None), + sold_currency=getattr(listing, "sold_currency", None), seller_verified=current_user.is_verified, seller_member_since=current_user.created_at, ) diff --git a/backend/app/api/subscription.py b/backend/app/api/subscription.py index 18cc7ec..ea5d6e9 100644 --- a/backend/app/api/subscription.py +++ b/backend/app/api/subscription.py @@ -84,7 +84,7 @@ async def get_subscription( tier=subscription.tier.value, tier_name=config["name"], status=subscription.status.value, - domain_limit=subscription.max_domains, + domain_limit=subscription.domain_limit, domains_used=domains_used, portfolio_limit=config.get("portfolio_limit", 0), check_frequency=config["check_frequency"], diff --git a/backend/app/api/telemetry.py b/backend/app/api/telemetry.py new file mode 100644 index 0000000..ef1d43a --- /dev/null +++ b/backend/app/api/telemetry.py @@ -0,0 +1,365 @@ +""" +Telemetry KPIs (4A.2). + +Admin-only endpoint to compute funnel KPIs from telemetry_events. +""" + +from __future__ import annotations + +import json +import statistics +from datetime import datetime, timedelta +from typing import Any, Optional + +from fastapi import APIRouter, Depends, HTTPException, Query, status +from sqlalchemy import and_, case, func, select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.api.deps import get_current_user, get_db +from app.models.telemetry import TelemetryEvent +from app.models.user import User +from app.schemas.referrals import ReferralKpiWindow, ReferralKpisResponse, ReferralReferrerRow +from app.schemas.telemetry import ( + DealFunnelKpis, + TelemetryKpiWindow, + TelemetryKpisResponse, + YieldFunnelKpis, +) + + +router = APIRouter(prefix="/telemetry", tags=["telemetry"]) + + +def _require_admin(user: User) -> None: + if not user.is_admin: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Admin access required") + + +def _safe_json(metadata_json: Optional[str]) -> dict[str, Any]: + if not metadata_json: + return {} + try: + value = json.loads(metadata_json) + return value if isinstance(value, dict) else {} + except Exception: + return {} + + +def _median(values: list[float]) -> Optional[float]: + if not values: + return None + return float(statistics.median(values)) + + +@router.get("/kpis", response_model=TelemetryKpisResponse) +async def get_kpis( + days: int = Query(30, ge=1, le=365), + db: AsyncSession = Depends(get_db), + current_user: User = Depends(get_current_user), +): + _require_admin(current_user) + + end = datetime.utcnow() + start = end - timedelta(days=days) + + event_names = [ + # Deal funnel + "listing_view", + "inquiry_created", + "inquiry_status_changed", + "message_sent", + "listing_marked_sold", + # Yield funnel + "yield_connected", + "yield_click", + "yield_conversion", + "payout_paid", + ] + + rows = ( + await db.execute( + select( + TelemetryEvent.event_name, + TelemetryEvent.created_at, + TelemetryEvent.listing_id, + TelemetryEvent.inquiry_id, + TelemetryEvent.yield_domain_id, + TelemetryEvent.click_id, + TelemetryEvent.metadata_json, + ).where( + and_( + TelemetryEvent.created_at >= start, + TelemetryEvent.created_at <= end, + TelemetryEvent.event_name.in_(event_names), + ) + ) + ) + ).all() + + # ----------------------------- + # Deal KPIs + # ----------------------------- + listing_views = 0 + inquiries_created = 0 + inquiry_created_at: dict[int, datetime] = {} + first_seller_reply_at: dict[int, datetime] = {} + listings_with_inquiries: set[int] = set() + sold_listings: set[int] = set() + sold_at_by_listing: dict[int, datetime] = {} + first_inquiry_at_by_listing: dict[int, datetime] = {} + + # ----------------------------- + # Yield KPIs + # ----------------------------- + connected_domains = 0 + clicks = 0 + conversions = 0 + payouts_paid = 0 + payouts_paid_amount_total = 0.0 + + for event_name, created_at, listing_id, inquiry_id, yield_domain_id, click_id, metadata_json in rows: + created_at = created_at # already datetime + + if event_name == "listing_view": + listing_views += 1 + continue + + if event_name == "inquiry_created": + inquiries_created += 1 + if inquiry_id: + inquiry_created_at[inquiry_id] = created_at + if listing_id: + listings_with_inquiries.add(listing_id) + prev = first_inquiry_at_by_listing.get(listing_id) + if prev is None or created_at < prev: + first_inquiry_at_by_listing[listing_id] = created_at + continue + + if event_name == "message_sent": + if not inquiry_id: + continue + meta = _safe_json(metadata_json) + if meta.get("role") == "seller": + prev = first_seller_reply_at.get(inquiry_id) + if prev is None or created_at < prev: + first_seller_reply_at[inquiry_id] = created_at + continue + + if event_name == "listing_marked_sold": + if listing_id: + sold_listings.add(listing_id) + sold_at_by_listing[listing_id] = created_at + continue + + if event_name == "yield_connected": + connected_domains += 1 + continue + + if event_name == "yield_click": + clicks += 1 + continue + + if event_name == "yield_conversion": + conversions += 1 + continue + + if event_name == "payout_paid": + payouts_paid += 1 + meta = _safe_json(metadata_json) + amount = meta.get("amount") + if isinstance(amount, (int, float)): + payouts_paid_amount_total += float(amount) + continue + + seller_replied_inquiries = len(first_seller_reply_at.keys()) + inquiry_reply_rate = (seller_replied_inquiries / inquiries_created) if inquiries_created else 0.0 + + # Inquiry → Sold rate (on listing-level intersection) + sold_from_inquiry = sold_listings.intersection(listings_with_inquiries) + inquiry_to_sold_listing_rate = (len(sold_from_inquiry) / len(listings_with_inquiries)) if listings_with_inquiries else 0.0 + + # Median reply time (seconds): inquiry_created → first seller message + reply_deltas: list[float] = [] + for inq_id, created in inquiry_created_at.items(): + replied = first_seller_reply_at.get(inq_id) + if replied: + reply_deltas.append((replied - created).total_seconds()) + + # Median time-to-sold (seconds): first inquiry on listing → listing sold + sold_deltas: list[float] = [] + for listing in sold_from_inquiry: + inq_at = first_inquiry_at_by_listing.get(listing) + sold_at = sold_at_by_listing.get(listing) + if inq_at and sold_at and sold_at >= inq_at: + sold_deltas.append((sold_at - inq_at).total_seconds()) + + deal = DealFunnelKpis( + listing_views=listing_views, + inquiries_created=inquiries_created, + seller_replied_inquiries=seller_replied_inquiries, + inquiry_reply_rate=float(inquiry_reply_rate), + listings_with_inquiries=len(listings_with_inquiries), + listings_sold=len(sold_listings), + inquiry_to_sold_listing_rate=float(inquiry_to_sold_listing_rate), + median_reply_seconds=_median(reply_deltas), + median_time_to_sold_seconds=_median(sold_deltas), + ) + + yield_kpis = YieldFunnelKpis( + connected_domains=connected_domains, + clicks=clicks, + conversions=conversions, + conversion_rate=float(conversions / clicks) if clicks else 0.0, + payouts_paid=payouts_paid, + payouts_paid_amount_total=float(payouts_paid_amount_total), + ) + + return TelemetryKpisResponse( + window=TelemetryKpiWindow(days=days, start=start, end=end), + deal=deal, + yield_=yield_kpis, + ) + + +@router.get("/referrals", response_model=ReferralKpisResponse) +async def get_referral_kpis( + days: int = Query(30, ge=1, le=365), + limit: int = Query(200, ge=1, le=1000), + offset: int = Query(0, ge=0), + db: AsyncSession = Depends(get_db), + current_user: User = Depends(get_current_user), +): + """ + Admin-only referral KPIs for the viral loop (3C.2). + + This is intentionally user-based (users.referred_by_user_id) + telemetry-based (referral_link_viewed), + so it stays robust even if ref codes evolve. + """ + _require_admin(current_user) + + end = datetime.utcnow() + start = end - timedelta(days=days) + + # Referred user counts per referrer (all-time + window) + referred_counts_subq = ( + select( + User.referred_by_user_id.label("referrer_user_id"), + func.count(User.id).label("referred_users_total"), + func.coalesce( + func.sum(case((User.created_at >= start, 1), else_=0)), + 0, + ).label("referred_users_window"), + ) + .where(User.referred_by_user_id.isnot(None)) + .group_by(User.referred_by_user_id) + .subquery() + ) + + # Referral link views in window (telemetry) + link_views_subq = ( + select( + TelemetryEvent.user_id.label("referrer_user_id"), + func.count(TelemetryEvent.id).label("referral_link_views_window"), + ) + .where( + and_( + TelemetryEvent.event_name == "referral_link_viewed", + TelemetryEvent.created_at >= start, + TelemetryEvent.created_at <= end, + TelemetryEvent.user_id.isnot(None), + ) + ) + .group_by(TelemetryEvent.user_id) + .subquery() + ) + + # Referrers: anyone with an invite_code (we still show even if counts are zero) + rows = ( + await db.execute( + select( + User.id, + User.email, + User.invite_code, + User.created_at, + func.coalesce(referred_counts_subq.c.referred_users_total, 0), + func.coalesce(referred_counts_subq.c.referred_users_window, 0), + func.coalesce(link_views_subq.c.referral_link_views_window, 0), + ) + .where(User.invite_code.isnot(None)) + .outerjoin(referred_counts_subq, referred_counts_subq.c.referrer_user_id == User.id) + .outerjoin(link_views_subq, link_views_subq.c.referrer_user_id == User.id) + .order_by( + func.coalesce(referred_counts_subq.c.referred_users_window, 0).desc(), + func.coalesce(referred_counts_subq.c.referred_users_total, 0).desc(), + User.created_at.desc(), + ) + .offset(offset) + .limit(limit) + ) + ).all() + + referrers = [ + ReferralReferrerRow( + user_id=int(user_id), + email=str(email), + invite_code=str(invite_code) if invite_code else None, + created_at=created_at, + referred_users_total=int(referred_total or 0), + referred_users_window=int(referred_window or 0), + referral_link_views_window=int(link_views or 0), + ) + for user_id, email, invite_code, created_at, referred_total, referred_window, link_views in rows + ] + + totals = {} + totals["referrers_with_invite_code"] = int( + ( + await db.execute( + select(func.count(User.id)).where(User.invite_code.isnot(None)) + ) + ).scalar() + or 0 + ) + totals["referred_users_total"] = int( + ( + await db.execute( + select(func.count(User.id)).where(User.referred_by_user_id.isnot(None)) + ) + ).scalar() + or 0 + ) + totals["referred_users_window"] = int( + ( + await db.execute( + select(func.count(User.id)).where( + and_( + User.referred_by_user_id.isnot(None), + User.created_at >= start, + User.created_at <= end, + ) + ) + ) + ).scalar() + or 0 + ) + totals["referral_link_views_window"] = int( + ( + await db.execute( + select(func.count(TelemetryEvent.id)).where( + and_( + TelemetryEvent.event_name == "referral_link_viewed", + TelemetryEvent.created_at >= start, + TelemetryEvent.created_at <= end, + ) + ) + ) + ).scalar() + or 0 + ) + + return ReferralKpisResponse( + window=ReferralKpiWindow(days=days, start=start, end=end), + totals=totals, + referrers=referrers, + ) + diff --git a/backend/app/api/tld_prices.py b/backend/app/api/tld_prices.py index 26123b2..a98f4b3 100644 --- a/backend/app/api/tld_prices.py +++ b/backend/app/api/tld_prices.py @@ -64,6 +64,38 @@ async def get_db_price_count(db) -> int: return result.scalar() or 0 +@router.get("/tlds") +async def list_tracked_tlds( + db: Database, + limit: int = Query(5000, ge=1, le=20000), + offset: int = Query(0, ge=0), +): + """ + List distinct TLDs tracked in the database (DB-driven). + + This endpoint is intentionally database-only (no static fallback), + so callers (e.g. sitemap generation) can rely on real tracked inventory. + """ + rows = ( + await db.execute( + select(TLDPrice.tld) + .distinct() + .order_by(TLDPrice.tld) + .offset(offset) + .limit(limit) + ) + ).scalars().all() + total = (await db.execute(select(func.count(func.distinct(TLDPrice.tld))))).scalar() or 0 + latest = (await db.execute(select(func.max(TLDPrice.recorded_at)))).scalar() + return { + "tlds": [str(t).lstrip(".").lower() for t in rows if t], + "total": int(total), + "limit": int(limit), + "offset": int(offset), + "latest_recorded_at": latest.isoformat() if latest else None, + } + + # Real TLD price data based on current market research (December 2024) # Prices in USD, sourced from major registrars: Namecheap, Cloudflare, Porkbun, Google Domains TLD_DATA = { @@ -655,14 +687,8 @@ async def get_tld_price_history( ): """Get price history for a specific TLD. - Returns REAL historical data from database if available (5+ data points), - otherwise generates simulated data based on current price and known trends. - - Data Source Priority: - 1. Real DB data (from daily scrapes) - marked as source: "database" - 2. Simulated data based on trend - marked as source: "simulated" + Returns REAL historical data from database (no simulation). """ - import math tld_clean = tld.lower().lstrip(".") @@ -688,81 +714,35 @@ async def get_tld_price_history( trend = static_data.get("trend", "stable") trend_reason = static_data.get("trend_reason", "Price tracking available") - # ========================================================================== - # TRY REAL HISTORICAL DATA FROM DATABASE FIRST - # ========================================================================== real_history = await get_real_price_history(db, tld_clean, days) - # Use real data if we have enough points (at least 5 data points) - if len(real_history) >= 5: - history = real_history - data_source = "database" - - # Calculate price changes from real data - price_7d_ago = None - price_30d_ago = None - price_90d_ago = None - - now = datetime.utcnow().date() - for h in history: + if not real_history: + raise HTTPException(status_code=404, detail=f"No historical data for '.{tld_clean}' yet") + + history = real_history + data_source = "database" + + # Use the most recent daily average as current_price when available + if history: + current_price = float(history[-1]["price"]) + + def _price_at_or_before(days_ago_target: int) -> float: + """Get the closest historical price at or before the target age.""" + target_date = (datetime.utcnow() - timedelta(days=days_ago_target)).date() + best = float(history[0]["price"]) + for h in reversed(history): try: h_date = datetime.strptime(h["date"], "%Y-%m-%d").date() - days_ago = (now - h_date).days - - if days_ago <= 7 and price_7d_ago is None: - price_7d_ago = h["price"] - if days_ago <= 30 and price_30d_ago is None: - price_30d_ago = h["price"] - if days_ago <= 90 and price_90d_ago is None: - price_90d_ago = h["price"] - except (ValueError, TypeError): + except Exception: continue - - # Fallback to earliest available - if price_7d_ago is None and history: - price_7d_ago = history[-1]["price"] - if price_30d_ago is None and history: - price_30d_ago = history[0]["price"] - if price_90d_ago is None and history: - price_90d_ago = history[0]["price"] - else: - # ========================================================================== - # FALLBACK: SIMULATED DATA BASED ON TREND - # ========================================================================== - data_source = "simulated" - history = [] - current_date = datetime.utcnow() - - # Calculate trend factor based on known trends - trend_factor = 1.0 - if trend == "up": - trend_factor = 0.92 # Prices were ~8% lower - elif trend == "down": - trend_factor = 1.05 # Prices were ~5% higher - - # Generate weekly data points - for i in range(days, -1, -7): - date = current_date - timedelta(days=i) - progress = 1 - (i / days) - - if trend == "up": - price = current_price * (trend_factor + (1 - trend_factor) * progress) - elif trend == "down": - price = current_price * (trend_factor - (trend_factor - 1) * progress) - else: - # Add small fluctuation for stable prices - fluctuation = math.sin(i * 0.1) * 0.02 - price = current_price * (1 + fluctuation) - - history.append({ - "date": date.strftime("%Y-%m-%d"), - "price": round(price, 2), - }) - - # Calculate price changes from simulated data - price_7d_ago = history[-2]["price"] if len(history) >= 2 else current_price - price_30d_ago = history[-5]["price"] if len(history) >= 5 else current_price - price_90d_ago = history[0]["price"] if history else current_price + if h_date <= target_date: + best = float(h["price"]) + break + return best + + price_7d_ago = _price_at_or_before(7) + price_30d_ago = _price_at_or_before(30) + price_90d_ago = _price_at_or_before(90) # Calculate percentage changes safely change_7d = round((current_price - price_7d_ago) / price_7d_ago * 100, 2) if price_7d_ago and price_7d_ago > 0 else 0 @@ -1051,8 +1031,8 @@ async def get_data_quality_stats(db: Database): }, "chart_readiness": { "tlds_ready_for_charts": chartable_tlds, - "tlds_using_simulation": total_tlds - chartable_tlds, - "recommendation": "Run daily scrapes for 7+ days to enable real charts" if chartable_tlds < 10 else "Good coverage!", + "tlds_with_insufficient_history": total_tlds - chartable_tlds, + "recommendation": "Run daily scrapes for 7+ days to enable richer charts" if chartable_tlds < 10 else "Good coverage!", }, "data_sources": { "static_tlds": len(TLD_DATA), diff --git a/backend/app/api/yield_domains.py b/backend/app/api/yield_domains.py index 0a50be0..955f921 100644 --- a/backend/app/api/yield_domains.py +++ b/backend/app/api/yield_domains.py @@ -43,13 +43,11 @@ from app.services.intent_detector import ( estimate_domain_yield, get_intent_detector, ) +from app.services.yield_dns import verify_yield_dns +from app.services.telemetry import track_event router = APIRouter(prefix="/yield", tags=["yield"]) -# DNS Configuration (would be in config in production) -YIELD_NAMESERVERS = ["ns1.pounce.io", "ns2.pounce.io"] -YIELD_CNAME_TARGET = "yield.pounce.io" - # ============================================================================ # Intent Analysis (Public) @@ -124,13 +122,36 @@ async def get_yield_dashboard( domain_ids = [d.id for d in domains] monthly_result = await db.execute( select( - func.count(YieldTransaction.id).label("count"), - func.coalesce(func.sum(YieldTransaction.net_amount), 0).label("revenue"), - func.sum(case((YieldTransaction.event_type == "click", 1), else_=0)).label("clicks"), - func.sum(case((YieldTransaction.event_type.in_(["lead", "sale"]), 1), else_=0)).label("conversions"), + func.coalesce( + func.sum( + case( + (YieldTransaction.status.in_(["confirmed", "paid"]), YieldTransaction.net_amount), + else_=0, + ) + ), + 0, + ).label("revenue"), + func.sum( + case( + (YieldTransaction.event_type == "click", 1), + else_=0, + ) + ).label("clicks"), + func.sum( + case( + ( + and_( + YieldTransaction.event_type.in_(["lead", "sale"]), + YieldTransaction.status.in_(["confirmed", "paid"]), + ), + 1, + ), + else_=0, + ) + ).label("conversions"), ).where( YieldTransaction.yield_domain_id.in_(domain_ids), - YieldTransaction.created_at >= month_start, + YieldTransaction.created_at >= month_start, ) ) monthly_stats = monthly_result.first() @@ -185,7 +206,7 @@ async def get_yield_dashboard( lifetime_clicks=lifetime_clicks, lifetime_conversions=lifetime_conversions, pending_payout=pending_payout, - next_payout_date=month_start + timedelta(days=32), # Approx next month + next_payout_date=(month_start + timedelta(days=32)).replace(day=1), currency="CHF", ) @@ -283,6 +304,7 @@ async def activate_domain_for_yield( This creates the yield domain record and returns DNS setup instructions. """ from app.models.portfolio import PortfolioDomain + from app.models.subscription import Subscription, SubscriptionTier domain = request.domain.lower().strip() @@ -314,6 +336,30 @@ async def activate_domain_for_yield( status_code=400, detail="Cannot activate Yield for a sold domain.", ) + + # SECURITY CHECK 4: Tier gating + limits + sub_result = await db.execute(select(Subscription).where(Subscription.user_id == current_user.id)) + subscription = sub_result.scalar_one_or_none() + tier = subscription.tier if subscription else SubscriptionTier.SCOUT + tier_value = tier.value if hasattr(tier, "value") else str(tier) + + if tier_value == "scout": + raise HTTPException( + status_code=403, + detail="Yield is not available on Scout plan. Upgrade to Trader or Tycoon.", + ) + + max_yield_domains = 5 if tier_value == "trader" else 10_000_000 + user_domain_count = ( + await db.execute( + select(func.count(YieldDomain.id)).where(YieldDomain.user_id == current_user.id) + ) + ).scalar() or 0 + if user_domain_count >= max_yield_domains: + raise HTTPException( + status_code=403, + detail=f"Yield domain limit reached for your plan ({max_yield_domains}).", + ) # Check if domain already exists in yield system existing_result = await db.execute( @@ -364,12 +410,15 @@ async def activate_domain_for_yield( await db.refresh(yield_domain) # Create DNS instructions + yield_nameservers = settings.yield_nameserver_list + if not yield_nameservers: + raise HTTPException(status_code=500, detail="Yield nameservers are not configured on server.") dns_instructions = DNSSetupInstructions( domain=domain, - nameservers=YIELD_NAMESERVERS, + nameservers=yield_nameservers, cname_host="@", - cname_target=YIELD_CNAME_TARGET, - verification_url=f"{settings.site_url}/api/v1/yield/verify/{yield_domain.id}", + cname_target=settings.yield_cname_target, + verification_url=f"{settings.site_url}/api/v1/yield/domains/{yield_domain.id}/verify", ) return ActivateYieldResponse( @@ -417,59 +466,43 @@ async def verify_domain_dns( if not domain: raise HTTPException(status_code=404, detail="Yield domain not found") - # Perform DNS check (simplified - in production use dnspython) - verified = False - actual_ns = [] - error = None - - try: - import dns.resolver - - # Check nameservers - try: - answers = dns.resolver.resolve(domain.domain, 'NS') - actual_ns = [str(rr.target).rstrip('.') for rr in answers] - - # Check if our nameservers are set - our_ns_set = set(ns.lower() for ns in YIELD_NAMESERVERS) - actual_ns_set = set(ns.lower() for ns in actual_ns) - - if our_ns_set.issubset(actual_ns_set): - verified = True - except dns.resolver.NXDOMAIN: - error = "Domain does not exist" - except dns.resolver.NoAnswer: - # Try CNAME instead - try: - cname_answers = dns.resolver.resolve(domain.domain, 'CNAME') - for rr in cname_answers: - if str(rr.target).rstrip('.').lower() == YIELD_CNAME_TARGET.lower(): - verified = True - break - except Exception: - error = "No NS or CNAME records found" - except Exception as e: - error = str(e) - - except ImportError: - # dnspython not installed - simulate for development - verified = True # Auto-verify in dev - actual_ns = YIELD_NAMESERVERS + # Production-grade DNS check + check = verify_yield_dns( + domain=domain.domain, + expected_nameservers=settings.yield_nameserver_list, + cname_target=settings.yield_cname_target, + ) + verified = check.verified + actual_ns = check.actual_ns + error = check.error # Update domain status if verified and not domain.dns_verified: domain.dns_verified = True domain.dns_verified_at = datetime.utcnow() + domain.connected_at = domain.dns_verified_at domain.status = "active" domain.activated_at = datetime.utcnow() + + await track_event( + db, + event_name="yield_connected", + request=None, + user_id=current_user.id, + is_authenticated=True, + source="terminal", + domain=domain.domain, + yield_domain_id=domain.id, + metadata={"method": check.method, "cname_ok": check.cname_ok, "actual_ns": check.actual_ns}, + ) await db.commit() return DNSVerificationResult( domain=domain.domain, verified=verified, - expected_ns=YIELD_NAMESERVERS, + expected_ns=settings.yield_nameserver_list, actual_ns=actual_ns, - cname_ok=verified and not actual_ns, + cname_ok=check.cname_ok if verified else False, error=error, checked_at=datetime.utcnow(), ) @@ -722,6 +755,7 @@ def _domain_to_response(domain: YieldDomain) -> YieldDomainResponse: partner_name=domain.partner.name if domain.partner else None, dns_verified=domain.dns_verified, dns_verified_at=domain.dns_verified_at, + connected_at=getattr(domain, "connected_at", None), total_clicks=domain.total_clicks, total_conversions=domain.total_conversions, total_revenue=domain.total_revenue, @@ -737,6 +771,7 @@ def _tx_to_response(tx: YieldTransaction) -> YieldTransactionResponse: id=tx.id, event_type=tx.event_type, partner_slug=tx.partner_slug, + click_id=getattr(tx, "click_id", None), gross_amount=tx.gross_amount, net_amount=tx.net_amount, currency=tx.currency, diff --git a/backend/app/api/yield_payout_admin.py b/backend/app/api/yield_payout_admin.py new file mode 100644 index 0000000..ccc0853 --- /dev/null +++ b/backend/app/api/yield_payout_admin.py @@ -0,0 +1,188 @@ +""" +Admin endpoints for Yield payouts (ledger). + +Premium constraints: +- No placeholder payouts +- No currency mixing +- Idempotent generation per (user, currency, period) +""" + +from __future__ import annotations + +from datetime import datetime +from decimal import Decimal + +from fastapi import APIRouter, Depends, HTTPException, status +from pydantic import BaseModel, Field +from sqlalchemy import and_, func, select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.api.deps import get_current_user, get_db +from app.models.user import User +from app.models.yield_domain import YieldPayout, YieldTransaction +from app.services.telemetry import track_event +from app.services.yield_payouts import generate_payouts_for_period + + +router = APIRouter(prefix="/yield", tags=["yield-admin"]) + + +class PayoutGenerateRequest(BaseModel): + period_start: datetime + period_end: datetime + + +class GeneratedPayout(BaseModel): + id: int + user_id: int + amount: Decimal + currency: str + period_start: datetime + period_end: datetime + transaction_count: int + status: str + created_at: datetime + + +class PayoutGenerateResponse(BaseModel): + created: list[GeneratedPayout] + skipped_existing: int = 0 + + +class PayoutCompleteRequest(BaseModel): + payment_method: str | None = Field(default=None, max_length=50) + payment_reference: str | None = Field(default=None, max_length=200) + + +class PayoutCompleteResponse(BaseModel): + payout_id: int + transactions_marked_paid: int + completed_at: datetime + + +def _require_admin(current_user: User) -> None: + if not current_user.is_admin: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Admin access required") + + +@router.post("/payouts/generate", response_model=PayoutGenerateResponse) +async def generate_payouts( + payload: PayoutGenerateRequest, + db: AsyncSession = Depends(get_db), + current_user: User = Depends(get_current_user), +): + """ + Create YieldPayout rows for confirmed, unpaid transactions in the period. + + This does NOT mark payouts as completed. It only assigns transactions to a payout via payout_id. + Completion is a separate step once payment is executed. + """ + _require_admin(current_user) + + if payload.period_end <= payload.period_start: + raise HTTPException(status_code=400, detail="period_end must be after period_start") + + created_count, skipped_existing = await generate_payouts_for_period( + db, + period_start=payload.period_start, + period_end=payload.period_end, + ) + + payouts = ( + await db.execute( + select(YieldPayout) + .where( + and_( + YieldPayout.period_start == payload.period_start, + YieldPayout.period_end == payload.period_end, + ) + ) + .order_by(YieldPayout.created_at.desc()) + ) + ).scalars().all() + + created = [ + GeneratedPayout( + id=p.id, + user_id=p.user_id, + amount=p.amount, + currency=p.currency, + period_start=p.period_start, + period_end=p.period_end, + transaction_count=p.transaction_count, + status=p.status, + created_at=p.created_at, + ) + for p in payouts + ] + + # created_count is still returned implicitly via list length; we keep it for logs later + _ = created_count + return PayoutGenerateResponse(created=created, skipped_existing=skipped_existing) + + +@router.post("/payouts/{payout_id}/complete", response_model=PayoutCompleteResponse) +async def complete_payout( + payout_id: int, + payload: PayoutCompleteRequest, + db: AsyncSession = Depends(get_db), + current_user: User = Depends(get_current_user), +): + """ + Mark a payout as completed and mark assigned transactions as paid. + """ + _require_admin(current_user) + + payout = ( + await db.execute(select(YieldPayout).where(YieldPayout.id == payout_id)) + ).scalar_one_or_none() + if not payout: + raise HTTPException(status_code=404, detail="Payout not found") + + if payout.status == "completed": + raise HTTPException(status_code=400, detail="Payout already completed") + + payout.status = "completed" + payout.completed_at = datetime.utcnow() + payout.payment_method = payload.payment_method + payout.payment_reference = payload.payment_reference + + txs = ( + await db.execute( + select(YieldTransaction).where(YieldTransaction.payout_id == payout.id) + ) + ).scalars().all() + + marked = 0 + for tx in txs: + if tx.status != "paid": + tx.status = "paid" + tx.paid_at = payout.completed_at + marked += 1 + + await track_event( + db, + event_name="payout_paid", + request=None, + user_id=payout.user_id, + is_authenticated=None, + source="admin", + domain=None, + yield_domain_id=None, + metadata={ + "payout_id": payout.id, + "currency": payout.currency, + "amount": float(payout.amount), + "transaction_count": payout.transaction_count, + "payment_method": payout.payment_method, + }, + ) + + await db.commit() + + return PayoutCompleteResponse( + payout_id=payout.id, + transactions_marked_paid=marked, + completed_at=payout.completed_at, + ) + diff --git a/backend/app/api/yield_routing.py b/backend/app/api/yield_routing.py index fd225b0..11ba332 100644 --- a/backend/app/api/yield_routing.py +++ b/backend/app/api/yield_routing.py @@ -12,17 +12,21 @@ that yield domains CNAME to. """ import logging -from datetime import datetime +from datetime import datetime, timedelta +from decimal import Decimal from typing import Optional +from uuid import uuid4 -from fastapi import APIRouter, Depends, Request, Response, HTTPException -from fastapi.responses import RedirectResponse, HTMLResponse -from sqlalchemy.orm import Session +from fastapi import APIRouter, Depends, HTTPException, Query, Request +from fastapi.responses import RedirectResponse +from sqlalchemy import and_, func, or_, select +from sqlalchemy.ext.asyncio import AsyncSession from app.api.deps import get_db from app.config import get_settings from app.models.yield_domain import YieldDomain, YieldTransaction, AffiliatePartner from app.services.intent_detector import detect_domain_intent +from app.services.telemetry import track_event logger = logging.getLogger(__name__) settings = get_settings() @@ -30,19 +34,49 @@ settings = get_settings() router = APIRouter(prefix="/r", tags=["yield-routing"]) # Revenue split -USER_REVENUE_SHARE = 0.70 +USER_REVENUE_SHARE = Decimal("0.70") def hash_ip(ip: str) -> str: """Hash IP for privacy-compliant storage.""" import hashlib - return hashlib.sha256(ip.encode()).hexdigest()[:32] + # Salt to prevent trivial rainbow table lookups. + return hashlib.sha256(f"{ip}|{settings.secret_key}".encode()).hexdigest()[:32] + + +def _get_client_ip(request: Request) -> Optional[str]: + # Prefer proxy headers when behind nginx + xff = request.headers.get("x-forwarded-for") + if xff: + # first IP in list + ip = xff.split(",")[0].strip() + if ip: + return ip + cf_ip = request.headers.get("cf-connecting-ip") + if cf_ip: + return cf_ip.strip() + return request.client.host if request.client else None + + +def _safe_tracking_url(template: str, *, click_id: str, domain: str, domain_id: int, partner: str) -> str: + try: + return template.format( + click_id=click_id, + domain=domain, + domain_id=domain_id, + partner=partner, + ) + except KeyError as e: + raise HTTPException( + status_code=500, + detail=f"Partner tracking_url_template uses unsupported placeholder: {str(e)}", + ) def generate_tracking_url( partner: AffiliatePartner, yield_domain: YieldDomain, - click_id: int, + click_id: str, ) -> str: """ Generate the tracking URL for a partner. @@ -51,403 +85,27 @@ def generate_tracking_url( - clickid / subid: Our click tracking ID - ref: Domain name or user reference """ - # If partner has a tracking URL template, use it - if partner.tracking_url_template: - return partner.tracking_url_template.format( - click_id=click_id, - domain=yield_domain.domain, - domain_id=yield_domain.id, - partner=partner.slug, + if not partner.tracking_url_template: + raise HTTPException( + status_code=503, + detail=f"Partner routing not configured for {partner.slug}. Missing tracking_url_template.", ) - - # Default fallbacks by network - network_urls = { - "comparis_dental": f"https://www.comparis.ch/zahnarzt?subid={click_id}&ref={yield_domain.domain}", - "comparis_health": f"https://www.comparis.ch/krankenkassen?subid={click_id}&ref={yield_domain.domain}", - "comparis_insurance": f"https://www.comparis.ch/versicherungen?subid={click_id}&ref={yield_domain.domain}", - "comparis_hypo": f"https://www.comparis.ch/hypotheken?subid={click_id}&ref={yield_domain.domain}", - "comparis_auto": f"https://www.comparis.ch/autoversicherung?subid={click_id}&ref={yield_domain.domain}", - "comparis_immo": f"https://www.comparis.ch/immobilien?subid={click_id}&ref={yield_domain.domain}", - "homegate": f"https://www.homegate.ch/?ref=pounce&clickid={click_id}", - "immoscout": f"https://www.immoscout24.ch/?ref=pounce&clickid={click_id}", - "autoscout": f"https://www.autoscout24.ch/?ref=pounce&clickid={click_id}", - "jobs_ch": f"https://www.jobs.ch/?ref=pounce&clickid={click_id}", - "booking_com": f"https://www.booking.com/?aid=pounce&clickid={click_id}", - "hostpoint": f"https://www.hostpoint.ch/?ref=pounce&clickid={click_id}", - "infomaniak": f"https://www.infomaniak.com/?ref=pounce&clickid={click_id}", - "galaxus": f"https://www.galaxus.ch/?ref=pounce&clickid={click_id}", - "zalando": f"https://www.zalando.ch/?ref=pounce&clickid={click_id}", - } - - if partner.slug in network_urls: - return network_urls[partner.slug] - - # Pounce self-promotion fallback with referral tracking - # Domain owner gets lifetime commission on signups via their domain - referral_code = f"yield_{yield_domain.user_id}_{yield_domain.id}" - return f"{settings.site_url}/register?ref={referral_code}&from={yield_domain.domain}&clickid={click_id}" - -def is_pounce_affinity_domain(domain: str) -> bool: - """ - Check if a domain has high affinity for Pounce self-promotion. - - Tech, investment, and domain-related domains convert better for Pounce. - """ - intent = detect_domain_intent(domain) - - # Check if the matched category has pounce_affinity flag - if intent.category in ["investment", "tech"] or intent.subcategory in ["domains", "dev"]: - return True - - # Check for specific keywords - pounce_keywords = { - "invest", "domain", "trading", "crypto", "asset", "portfolio", - "startup", "tech", "dev", "saas", "digital", "passive", "income" - } - domain_lower = domain.lower() - return any(kw in domain_lower for kw in pounce_keywords) - - -def generate_pounce_promo_page( - yield_domain: YieldDomain, - click_id: int, -) -> str: - """ - Generate Pounce self-promotion landing page. - - Used as fallback when no high-value partner is available, - or when the domain has high Pounce affinity. - """ - referral_code = f"yield_{yield_domain.user_id}_{yield_domain.id}" - register_url = f"{settings.site_url}/register?ref={referral_code}&from={yield_domain.domain}&clickid={click_id}" - - return f""" - - - - - - {yield_domain.domain} - Powered by Pounce - - - -
-
- - - - - This domain is monetized by Pounce -
- -
{yield_domain.domain}
- -

- Turn Your Domains Into
- Passive Income -

- -

- Stop paying renewal fees for idle domains.
- Let them earn money for you — automatically. -

- - - Start Earning Free - - - - - -
-
- - - - - Free Forever -
-
- - - - 70% Revenue Share -
-
- - - - - Swiss Quality -
-
- -
- 👋 The owner of this domain earns a commission when you sign up! -
-
- - - - -""" - - -def generate_landing_page( - yield_domain: YieldDomain, - partner: Optional[AffiliatePartner], - click_id: int, -) -> str: - """ - Generate an interstitial landing page. - - Shows for a moment before redirecting, to: - 1. Improve user experience - 2. Allow for A/B testing - 3. Comply with affiliate disclosure requirements - - If no partner, shows Pounce self-promotion instead. - """ - # If no partner or partner is pounce_promo, show Pounce promo page - if partner is None or partner.slug == "pounce_promo": - return generate_pounce_promo_page(yield_domain, click_id) - - intent = detect_domain_intent(yield_domain.domain) - - # Partner info - partner_name = partner.name - partner_desc = partner.description or "Find the best offers" - - # Generate redirect URL - redirect_url = generate_tracking_url(partner, yield_domain, click_id) - - return f""" - - - - - - - {yield_domain.domain} - Redirecting - - - -
-
{yield_domain.domain}
-
{intent.category.replace('_', ' ')}
- -
- -
Redirecting to {partner_name}...
-
{partner_desc}
- -

- Click here if not redirected -

-
- - - - - - -""" + return _safe_tracking_url( + partner.tracking_url_template, + click_id=click_id, + domain=yield_domain.domain, + domain_id=yield_domain.id, + partner=partner.slug, + ) @router.get("/{domain}") async def route_yield_domain( domain: str, request: Request, - db: Session = Depends(get_db), - direct: bool = False, # Skip landing page if true + db: AsyncSession = Depends(get_db), + direct: bool = Query(True, description="Direct redirect without landing page"), ): """ Route traffic for a yield domain. @@ -458,86 +116,133 @@ async def route_yield_domain( - direct: If true, redirect immediately without landing page """ domain = domain.lower().strip() - - # Find yield domain - yield_domain = db.query(YieldDomain).filter( - YieldDomain.domain == domain, - YieldDomain.status == "active", - ).first() - - if not yield_domain: - # Domain not found or not active - show error page - logger.warning(f"Route request for unknown/inactive domain: {domain}") - return HTMLResponse( - content=f""" - - Domain Not Active - -

Domain Not Active

-

The domain {domain} is not currently active for yield routing.

-

Visit Pounce

- - - """, - status_code=404 + + # Find yield domain (must be connected + active) + yield_domain = ( + await db.execute( + select(YieldDomain).where( + and_( + YieldDomain.domain == domain, + YieldDomain.status == "active", + YieldDomain.dns_verified == True, + or_(YieldDomain.connected_at.is_not(None), YieldDomain.dns_verified_at.is_not(None)), + ) + ) ) - - # Get partner - partner = None + ).scalar_one_or_none() + + if not yield_domain: + logger.warning(f"Route request for unknown/inactive/unconnected domain: {domain}") + raise HTTPException(status_code=404, detail="Domain not active for yield routing.") + + # Resolve partner + partner: Optional[AffiliatePartner] = None if yield_domain.partner_id: - partner = db.query(AffiliatePartner).filter( - AffiliatePartner.id == yield_domain.partner_id, - AffiliatePartner.is_active == True, - ).first() - - # If no partner assigned, try to find one based on intent + partner = ( + await db.execute( + select(AffiliatePartner).where( + and_( + AffiliatePartner.id == yield_domain.partner_id, + AffiliatePartner.is_active == True, + ) + ) + ) + ).scalar_one_or_none() + if not partner and yield_domain.detected_intent: - partner = db.query(AffiliatePartner).filter( - AffiliatePartner.intent_categories.contains(yield_domain.detected_intent.split('_')[0]), - AffiliatePartner.is_active == True, - ).order_by(AffiliatePartner.priority.desc()).first() - - # Create click transaction - client_ip = request.client.host if request.client else None + # Match full detected intent first (e.g. medical_dental) + partner = ( + await db.execute( + select(AffiliatePartner) + .where( + and_( + AffiliatePartner.is_active == True, + AffiliatePartner.intent_categories.ilike(f"%{yield_domain.detected_intent}%"), + ) + ) + .order_by(AffiliatePartner.priority.desc()) + ) + ).scalar_one_or_none() + + if not partner: + raise HTTPException(status_code=503, detail="No active partner available for this domain intent.") + + # Rate limit: max 120 clicks/10min per IP per domain + client_ip = _get_client_ip(request) + ip_hash = hash_ip(client_ip) if client_ip else None + if ip_hash: + cutoff = datetime.utcnow() - timedelta(minutes=10) + recent = ( + await db.execute( + select(func.count(YieldTransaction.id)).where( + and_( + YieldTransaction.yield_domain_id == yield_domain.id, + YieldTransaction.event_type == "click", + YieldTransaction.ip_hash == ip_hash, + YieldTransaction.created_at >= cutoff, + ) + ) + ) + ).scalar() or 0 + if recent >= 120: + raise HTTPException(status_code=429, detail="Too many requests. Please slow down.") + + # Compute click economics (only CPC can be accounted immediately) + gross = Decimal("0") + net = Decimal("0") + currency = (partner.payout_currency or "CHF").upper() + if (partner.payout_type or "").lower() == "cpc": + gross = partner.payout_amount or Decimal("0") + net = (gross * USER_REVENUE_SHARE).quantize(Decimal("0.01")) + + click_id = uuid4().hex + destination_url = generate_tracking_url(partner, yield_domain, click_id) + user_agent = request.headers.get("user-agent") referrer = request.headers.get("referer") - + geo_country = request.headers.get("cf-ipcountry") or request.headers.get("x-country") + geo_country = geo_country.strip().upper() if geo_country else None + transaction = YieldTransaction( yield_domain_id=yield_domain.id, event_type="click", - partner_slug=partner.slug if partner else "unknown", - gross_amount=0, - net_amount=0, - currency="CHF", - referrer=referrer, + partner_slug=partner.slug, + click_id=click_id, + destination_url=destination_url[:2000], + gross_amount=gross, + net_amount=net, + currency=currency, + referrer=referrer[:500] if referrer else None, user_agent=user_agent[:500] if user_agent else None, - ip_hash=hash_ip(client_ip) if client_ip else None, + geo_country=geo_country[:2] if geo_country else None, + ip_hash=ip_hash, status="confirmed", confirmed_at=datetime.utcnow(), ) - db.add(transaction) - - # Update domain stats + yield_domain.total_clicks += 1 yield_domain.last_click_at = datetime.utcnow() - - db.commit() - db.refresh(transaction) - - # Generate redirect URL - redirect_url = ( - generate_tracking_url(partner, yield_domain, transaction.id) - if partner else f"{settings.site_url}/buy?ref={domain}" + if net > 0: + yield_domain.total_revenue += net + + await track_event( + db, + event_name="yield_click", + request=request, + user_id=yield_domain.user_id, + is_authenticated=None, + source="routing", + domain=yield_domain.domain, + yield_domain_id=yield_domain.id, + click_id=click_id, + metadata={"partner": partner.slug, "currency": currency, "net_amount": float(net)}, ) - - # Direct redirect or show landing page - if direct: - return RedirectResponse(url=redirect_url, status_code=302) - - # Show interstitial landing page - html = generate_landing_page(yield_domain, partner, transaction.id) - return HTMLResponse(content=html) + + await db.commit() + + # Only direct redirect for MVP + return RedirectResponse(url=destination_url, status_code=302) @router.get("/") @@ -545,7 +250,7 @@ async def yield_routing_info(): """Info endpoint for yield routing service.""" return { "service": "Pounce Yield Routing", - "version": "1.0.0", + "version": "2.0.0", "docs": f"{settings.site_url}/docs#/yield-routing", "status": "active", } @@ -558,7 +263,7 @@ async def yield_routing_info(): @router.api_route("/catch-all", methods=["GET", "HEAD"]) async def catch_all_route( request: Request, - db: Session = Depends(get_db), + db: AsyncSession = Depends(get_db), ): """ Catch-all route for host-based routing. @@ -582,21 +287,22 @@ async def catch_all_route( if any(host.endswith(d) for d in our_domains): return {"status": "not a yield domain", "host": host} - # Look up yield domain - yield_domain = db.query(YieldDomain).filter( - YieldDomain.domain == host, - YieldDomain.status == "active", - ).first() - - if not yield_domain: - return HTMLResponse( - content=f"

Domain {host} not configured

", - status_code=404 + # If host matches a connected yield domain, route it + _ = ( + await db.execute( + select(YieldDomain.id).where( + and_( + YieldDomain.domain == host, + YieldDomain.status == "active", + YieldDomain.dns_verified == True, + or_(YieldDomain.connected_at.is_not(None), YieldDomain.dns_verified_at.is_not(None)), + ) + ) ) - - # Redirect to routing endpoint - return RedirectResponse( - url=f"/api/v1/r/{host}", - status_code=302 - ) + ).scalar_one_or_none() + + if not _: + raise HTTPException(status_code=404, detail="Host not configured for yield routing.") + + return RedirectResponse(url=f"/api/v1/r/{host}?direct=true", status_code=302) diff --git a/backend/app/api/yield_webhooks.py b/backend/app/api/yield_webhooks.py index 185c2e6..be589fe 100644 --- a/backend/app/api/yield_webhooks.py +++ b/backend/app/api/yield_webhooks.py @@ -20,14 +20,15 @@ from datetime import datetime from decimal import Decimal from typing import Optional -from fastapi import APIRouter, Depends, HTTPException, Request, Header, BackgroundTasks +from fastapi import APIRouter, BackgroundTasks, Depends, Header, HTTPException, Request from pydantic import BaseModel, Field -from sqlalchemy import select -from sqlalchemy.orm import Session +from sqlalchemy import and_, select +from sqlalchemy.ext.asyncio import AsyncSession from app.api.deps import get_db from app.config import get_settings from app.models.yield_domain import YieldDomain, YieldTransaction, AffiliatePartner +from app.services.telemetry import track_event logger = logging.getLogger(__name__) settings = get_settings() @@ -47,6 +48,7 @@ class PartnerEvent(BaseModel): event_type: str = Field(..., description="click, lead, or sale") domain: str = Field(..., description="The yield domain that generated this event") transaction_id: Optional[str] = Field(None, description="Partner's transaction ID") + click_id: Optional[str] = Field(None, description="Pounce click_id for attribution (UUID hex)") amount: Optional[float] = Field(None, description="Gross commission amount") currency: Optional[str] = Field("CHF", description="Currency code") @@ -88,7 +90,21 @@ def verify_hmac_signature( def hash_ip(ip: str) -> str: """Hash IP address for privacy-compliant storage.""" - return hashlib.sha256(ip.encode()).hexdigest()[:32] + return hashlib.sha256(f"{ip}|{settings.secret_key}".encode()).hexdigest()[:32] + + +def _get_webhook_secret(partner_slug: str) -> Optional[str]: + """ + Webhook secrets are configured via environment: + - YIELD_WEBHOOK_SECRET (global default) + - YIELD_WEBHOOK_SECRET_ (partner-specific override) + """ + import os + + specific = os.getenv(f"YIELD_WEBHOOK_SECRET_{partner_slug.upper()}") + if specific: + return specific + return os.getenv("YIELD_WEBHOOK_SECRET") or None # ============================================================================ @@ -101,7 +117,7 @@ async def receive_partner_webhook( event: PartnerEvent, request: Request, background_tasks: BackgroundTasks, - db: Session = Depends(get_db), + db: AsyncSession = Depends(get_db), x_webhook_signature: Optional[str] = Header(None), x_api_key: Optional[str] = Header(None), ): @@ -111,25 +127,42 @@ async def receive_partner_webhook( Partners POST events here when clicks, leads, or sales occur. """ # 1. Find partner - partner = db.query(AffiliatePartner).filter( - AffiliatePartner.slug == partner_slug, - AffiliatePartner.is_active == True, - ).first() + partner = ( + await db.execute( + select(AffiliatePartner).where( + and_( + AffiliatePartner.slug == partner_slug, + AffiliatePartner.is_active == True, + ) + ) + ) + ).scalar_one_or_none() if not partner: logger.warning(f"Webhook from unknown partner: {partner_slug}") raise HTTPException(status_code=404, detail="Unknown partner") - # 2. Verify authentication (if configured) - # Note: In production, store partner API keys in a secure location - # For now, we accept webhooks if the partner exists - # TODO: Add proper signature verification per partner + # 2. Verify authentication (strict) + secret = _get_webhook_secret(partner_slug) + if not secret: + raise HTTPException(status_code=503, detail="Webhook secret not configured on server.") + if not x_webhook_signature: + raise HTTPException(status_code=401, detail="Missing webhook signature.") + raw = await request.body() + if not verify_hmac_signature(raw, x_webhook_signature, secret): + raise HTTPException(status_code=401, detail="Invalid webhook signature.") - # 3. Find yield domain - yield_domain = db.query(YieldDomain).filter( - YieldDomain.domain == event.domain.lower(), - YieldDomain.status == "active", - ).first() + # 3. Find yield domain (must be active) + yield_domain = ( + await db.execute( + select(YieldDomain).where( + and_( + YieldDomain.domain == event.domain.lower(), + YieldDomain.status == "active", + ) + ) + ) + ).scalar_one_or_none() if not yield_domain: logger.warning(f"Webhook for unknown/inactive domain: {event.domain}") @@ -149,6 +182,7 @@ async def receive_partner_webhook( event_type=event.event_type, partner_slug=partner_slug, partner_transaction_id=event.transaction_id, + click_id=(event.click_id[:64] if event.click_id else None), gross_amount=gross_amount, net_amount=net_amount, currency=event.currency or "CHF", @@ -161,6 +195,25 @@ async def receive_partner_webhook( ) db.add(transaction) + + # Optional: attribute to an existing click transaction (same yield_domain + click_id) + if event.click_id: + click_tx = ( + await db.execute( + select(YieldTransaction).where( + and_( + YieldTransaction.yield_domain_id == yield_domain.id, + YieldTransaction.event_type == "click", + YieldTransaction.click_id == event.click_id[:64], + ) + ) + ) + ).scalar_one_or_none() + if not click_tx: + logger.warning( + f"Webhook received click_id but no matching click found: partner={partner_slug} " + f"domain={yield_domain.domain} click_id={event.click_id[:64]}" + ) # 7. Update domain aggregates if event.event_type == "click": @@ -172,9 +225,29 @@ async def receive_partner_webhook( # Add revenue when confirmed if transaction.status == "confirmed": yield_domain.total_revenue += net_amount + + await track_event( + db, + event_name="yield_conversion", + request=request, + user_id=yield_domain.user_id, + is_authenticated=None, + source="webhook", + domain=yield_domain.domain, + yield_domain_id=yield_domain.id, + click_id=event.click_id, + metadata={ + "partner": partner_slug, + "event_type": event.event_type, + "status": transaction.status, + "currency": transaction.currency, + "net_amount": float(net_amount), + "partner_transaction_id": event.transaction_id, + }, + ) - db.commit() - db.refresh(transaction) + await db.commit() + await db.refresh(transaction) logger.info( f"Webhook processed: {partner_slug} -> {event.domain} " @@ -206,7 +279,7 @@ class AwinEvent(BaseModel): async def receive_awin_postback( event: AwinEvent, request: Request, - db: Session = Depends(get_db), + db: AsyncSession = Depends(get_db), x_awin_signature: Optional[str] = Header(None), ): """ @@ -214,18 +287,28 @@ async def receive_awin_postback( Awin sends postbacks for tracked conversions. """ + # Verify authentication (strict) + secret = _get_webhook_secret("awin") + if not secret: + raise HTTPException(status_code=503, detail="Webhook secret not configured on server.") + if not x_awin_signature: + raise HTTPException(status_code=401, detail="Missing webhook signature.") + raw = await request.body() + if not verify_hmac_signature(raw, x_awin_signature, secret): + raise HTTPException(status_code=401, detail="Invalid webhook signature.") + # Find domain by click reference - yield_domain = db.query(YieldDomain).filter( - YieldDomain.domain == event.clickRef.lower(), - ).first() + yield_domain = ( + await db.execute(select(YieldDomain).where(YieldDomain.domain == event.clickRef.lower())) + ).scalar_one_or_none() if not yield_domain: # Try to find by ID if clickRef is numeric try: domain_id = int(event.clickRef) - yield_domain = db.query(YieldDomain).filter( - YieldDomain.id == domain_id, - ).first() + yield_domain = ( + await db.execute(select(YieldDomain).where(YieldDomain.id == domain_id)) + ).scalar_one_or_none() except ValueError: pass @@ -246,10 +329,16 @@ async def receive_awin_postback( status = status_map.get(event.status.lower(), "pending") # Create or update transaction - existing_tx = db.query(YieldTransaction).filter( - YieldTransaction.partner_transaction_id == event.transactionId, - YieldTransaction.partner_slug.like("awin%"), - ).first() + existing_tx = ( + await db.execute( + select(YieldTransaction).where( + and_( + YieldTransaction.partner_transaction_id == event.transactionId, + YieldTransaction.partner_slug.ilike("awin%"), + ) + ) + ) + ).scalar_one_or_none() if existing_tx: # Update existing transaction @@ -279,10 +368,10 @@ async def receive_awin_postback( if status == "confirmed": yield_domain.total_revenue += net_amount - db.flush() + await db.flush() transaction_id = transaction.id - db.commit() + await db.commit() logger.info(f"Awin postback processed: {event.transactionId} -> {status}") @@ -300,7 +389,7 @@ async def receive_awin_postback( @router.post("/confirm/{transaction_id}", response_model=WebhookResponse) async def confirm_transaction( transaction_id: int, - db: Session = Depends(get_db), + db: AsyncSession = Depends(get_db), x_internal_key: Optional[str] = Header(None), ): """ @@ -308,15 +397,22 @@ async def confirm_transaction( Internal endpoint for admin use or automated confirmation. """ - # Basic auth check - in production, use proper admin auth - internal_key = getattr(settings, 'internal_api_key', None) or settings.secret_key + internal_key = (settings.internal_api_key or "").strip() + if not internal_key: + raise HTTPException(status_code=503, detail="internal_api_key is not configured on server.") if x_internal_key != internal_key: raise HTTPException(status_code=401, detail="Unauthorized") - transaction = db.query(YieldTransaction).filter( - YieldTransaction.id == transaction_id, - YieldTransaction.status == "pending", - ).first() + transaction = ( + await db.execute( + select(YieldTransaction).where( + and_( + YieldTransaction.id == transaction_id, + YieldTransaction.status == "pending", + ) + ) + ) + ).scalar_one_or_none() if not transaction: raise HTTPException(status_code=404, detail="Transaction not found or not pending") @@ -326,14 +422,14 @@ async def confirm_transaction( transaction.confirmed_at = datetime.utcnow() # Update domain revenue - yield_domain = db.query(YieldDomain).filter( - YieldDomain.id == transaction.yield_domain_id - ).first() + yield_domain = ( + await db.execute(select(YieldDomain).where(YieldDomain.id == transaction.yield_domain_id)) + ).scalar_one_or_none() if yield_domain: yield_domain.total_revenue += transaction.net_amount - db.commit() + await db.commit() logger.info(f"Transaction {transaction_id} confirmed manually") @@ -354,6 +450,7 @@ class BatchTransactionItem(BaseModel): event_type: str partner_slug: str transaction_id: str + click_id: Optional[str] = None gross_amount: float currency: str = "CHF" status: str = "confirmed" @@ -376,7 +473,7 @@ class BatchImportResponse(BaseModel): @router.post("/batch-import", response_model=BatchImportResponse) async def batch_import_transactions( request_data: BatchImportRequest, - db: Session = Depends(get_db), + db: AsyncSession = Depends(get_db), x_internal_key: Optional[str] = Header(None), ): """ @@ -384,7 +481,9 @@ async def batch_import_transactions( Internal endpoint for importing partner reports. """ - internal_key = getattr(settings, 'internal_api_key', None) or settings.secret_key + internal_key = (settings.internal_api_key or "").strip() + if not internal_key: + raise HTTPException(status_code=503, detail="internal_api_key is not configured on server.") if x_internal_key != internal_key: raise HTTPException(status_code=401, detail="Unauthorized") @@ -395,9 +494,9 @@ async def batch_import_transactions( for item in request_data.transactions: try: # Find domain - yield_domain = db.query(YieldDomain).filter( - YieldDomain.domain == item.domain.lower(), - ).first() + yield_domain = ( + await db.execute(select(YieldDomain).where(YieldDomain.domain == item.domain.lower())) + ).scalar_one_or_none() if not yield_domain: errors.append(f"Domain not found: {item.domain}") @@ -405,10 +504,16 @@ async def batch_import_transactions( continue # Check for duplicate - existing = db.query(YieldTransaction).filter( - YieldTransaction.partner_transaction_id == item.transaction_id, - YieldTransaction.partner_slug == item.partner_slug, - ).first() + existing = ( + await db.execute( + select(YieldTransaction).where( + and_( + YieldTransaction.partner_transaction_id == item.transaction_id, + YieldTransaction.partner_slug == item.partner_slug, + ) + ) + ) + ).scalar_one_or_none() if existing: skipped += 1 @@ -423,6 +528,7 @@ async def batch_import_transactions( event_type=item.event_type, partner_slug=item.partner_slug, partner_transaction_id=item.transaction_id, + click_id=(item.click_id[:64] if item.click_id else None), gross_amount=gross, net_amount=net, currency=item.currency, @@ -446,7 +552,7 @@ async def batch_import_transactions( errors.append(f"Error importing {item.domain}/{item.transaction_id}: {str(e)}") skipped += 1 - db.commit() + await db.commit() return BatchImportResponse( success=len(errors) == 0, diff --git a/backend/app/config.py b/backend/app/config.py index e0f23f1..fa2115f 100644 --- a/backend/app/config.py +++ b/backend/app/config.py @@ -18,6 +18,10 @@ class Settings(BaseSettings): app_name: str = "DomainWatch" debug: bool = True site_url: str = "https://pounce.ch" # Base URL for links in emails/API responses + + # Internal admin operations (server-to-server / cron) + # MUST be set in production; used for protected internal endpoints. + internal_api_key: str = "" # Email Settings (optional) smtp_host: str = "" @@ -43,10 +47,50 @@ class Settings(BaseSettings): enable_metrics: bool = True metrics_path: str = "/metrics" enable_db_query_metrics: bool = False + enable_business_metrics: bool = True + business_metrics_days: int = 30 + business_metrics_cache_seconds: int = 60 + + # Ops / Backups (4B) + enable_db_backups: bool = False + backup_dir: str = "backups" + backup_retention_days: int = 14 + + # Ops / Alerting (4B) - no Docker required + ops_alerts_enabled: bool = False + ops_alert_recipients: str = "" # comma-separated emails; if empty -> CONTACT_EMAIL env fallback + ops_alert_cooldown_minutes: int = 180 + ops_alert_backup_stale_seconds: int = 93600 # ~26h # Rate limiting storage (SlowAPI / limits). Use Redis in production. rate_limit_storage_uri: str = "memory://" + # ================================= + # Referral rewards / Anti-fraud (3C.2) + # ================================= + referral_rewards_enabled: bool = True + referral_rewards_cooldown_days: int = 7 + referral_rewards_ip_window_days: int = 30 + referral_rewards_require_ip_hash: bool = True + + # ================================= + # Yield / Intent Routing + # ================================= + # Comma-separated list of nameservers the user must delegate to for Yield. + # Example: "ns1.pounce.io,ns2.pounce.io" + yield_nameservers: str = "ns1.pounce.io,ns2.pounce.io" + # CNAME/ALIAS target for simpler DNS setup (provider-dependent). + # Example: "yield.pounce.io" + yield_cname_target: str = "yield.pounce.io" + + @property + def yield_nameserver_list(self) -> list[str]: + return [ + ns.strip().lower() + for ns in (self.yield_nameservers or "").split(",") + if ns.strip() + ] + # Database pooling (PostgreSQL) db_pool_size: int = 5 db_max_overflow: int = 10 diff --git a/backend/app/db_migrations.py b/backend/app/db_migrations.py index 29cd5f4..b33d9d6 100644 --- a/backend/app/db_migrations.py +++ b/backend/app/db_migrations.py @@ -120,12 +120,100 @@ async def apply_migrations(conn: AsyncConnection) -> None: # 4) domain_listings pounce_score index (market sorting) # ---------------------------------------------------- if await _table_exists(conn, "domain_listings"): + if not await _has_column(conn, "domain_listings", "sold_at"): + logger.info("DB migrations: adding column domain_listings.sold_at") + await conn.execute(text("ALTER TABLE domain_listings ADD COLUMN sold_at DATETIME")) + if not await _has_column(conn, "domain_listings", "sold_reason"): + logger.info("DB migrations: adding column domain_listings.sold_reason") + await conn.execute(text("ALTER TABLE domain_listings ADD COLUMN sold_reason VARCHAR(200)")) + if not await _has_column(conn, "domain_listings", "sold_price"): + logger.info("DB migrations: adding column domain_listings.sold_price") + await conn.execute(text("ALTER TABLE domain_listings ADD COLUMN sold_price FLOAT")) + if not await _has_column(conn, "domain_listings", "sold_currency"): + logger.info("DB migrations: adding column domain_listings.sold_currency") + await conn.execute(text("ALTER TABLE domain_listings ADD COLUMN sold_currency VARCHAR(3)")) + await conn.execute( text( "CREATE INDEX IF NOT EXISTS ix_domain_listings_pounce_score " "ON domain_listings(pounce_score)" ) ) + await conn.execute( + text( + "CREATE INDEX IF NOT EXISTS ix_domain_listings_status " + "ON domain_listings(status)" + ) + ) + + # ---------------------------------------------------- + # 4b) listing_inquiries: deal workflow + audit trail + # ---------------------------------------------------- + if await _table_exists(conn, "listing_inquiries"): + if not await _has_column(conn, "listing_inquiries", "buyer_user_id"): + logger.info("DB migrations: adding column listing_inquiries.buyer_user_id") + await conn.execute(text("ALTER TABLE listing_inquiries ADD COLUMN buyer_user_id INTEGER")) + if not await _has_column(conn, "listing_inquiries", "closed_at"): + logger.info("DB migrations: adding column listing_inquiries.closed_at") + await conn.execute(text("ALTER TABLE listing_inquiries ADD COLUMN closed_at DATETIME")) + if not await _has_column(conn, "listing_inquiries", "closed_reason"): + logger.info("DB migrations: adding column listing_inquiries.closed_reason") + await conn.execute(text("ALTER TABLE listing_inquiries ADD COLUMN closed_reason VARCHAR(200)")) + + await conn.execute( + text( + "CREATE INDEX IF NOT EXISTS ix_listing_inquiries_listing_created " + "ON listing_inquiries(listing_id, created_at)" + ) + ) + await conn.execute( + text( + "CREATE INDEX IF NOT EXISTS ix_listing_inquiries_listing_status " + "ON listing_inquiries(listing_id, status)" + ) + ) + await conn.execute( + text( + "CREATE INDEX IF NOT EXISTS ix_listing_inquiries_buyer_user " + "ON listing_inquiries(buyer_user_id)" + ) + ) + + # The table itself is created by `Base.metadata.create_all()` on startup. + # Here we only add indexes (idempotent) for existing DBs. + if await _table_exists(conn, "listing_inquiry_events"): + await conn.execute( + text( + "CREATE INDEX IF NOT EXISTS ix_listing_inquiry_events_inquiry_created " + "ON listing_inquiry_events(inquiry_id, created_at)" + ) + ) + await conn.execute( + text( + "CREATE INDEX IF NOT EXISTS ix_listing_inquiry_events_listing_created " + "ON listing_inquiry_events(listing_id, created_at)" + ) + ) + + if await _table_exists(conn, "listing_inquiry_messages"): + await conn.execute( + text( + "CREATE INDEX IF NOT EXISTS ix_listing_inquiry_messages_inquiry_created " + "ON listing_inquiry_messages(inquiry_id, created_at)" + ) + ) + await conn.execute( + text( + "CREATE INDEX IF NOT EXISTS ix_listing_inquiry_messages_listing_created " + "ON listing_inquiry_messages(listing_id, created_at)" + ) + ) + await conn.execute( + text( + "CREATE INDEX IF NOT EXISTS ix_listing_inquiry_messages_sender_created " + "ON listing_inquiry_messages(sender_user_id, created_at)" + ) + ) # ---------------------------------------------------- # 5) Yield tables indexes @@ -144,7 +232,18 @@ async def apply_migrations(conn: AsyncConnection) -> None: ) ) + if not await _has_column(conn, "yield_domains", "connected_at"): + logger.info("DB migrations: adding column yield_domains.connected_at") + await conn.execute(text("ALTER TABLE yield_domains ADD COLUMN connected_at DATETIME")) + if await _table_exists(conn, "yield_transactions"): + if not await _has_column(conn, "yield_transactions", "click_id"): + logger.info("DB migrations: adding column yield_transactions.click_id") + await conn.execute(text("ALTER TABLE yield_transactions ADD COLUMN click_id VARCHAR(64)")) + await conn.execute(text("CREATE INDEX IF NOT EXISTS ix_yield_transactions_click_id ON yield_transactions(click_id)")) + if not await _has_column(conn, "yield_transactions", "destination_url"): + logger.info("DB migrations: adding column yield_transactions.destination_url") + await conn.execute(text("ALTER TABLE yield_transactions ADD COLUMN destination_url TEXT")) await conn.execute( text( "CREATE INDEX IF NOT EXISTS ix_yield_tx_domain_created " @@ -167,7 +266,68 @@ async def apply_migrations(conn: AsyncConnection) -> None: ) # ---------------------------------------------------- - # 6) User referral tracking columns + # 6) Referral rewards: subscriptions.referral_bonus_domains (3C.2) + # ---------------------------------------------------- + if await _table_exists(conn, "subscriptions"): + if not await _has_column(conn, "subscriptions", "referral_bonus_domains"): + logger.info("DB migrations: adding column subscriptions.referral_bonus_domains") + await conn.execute( + text( + "ALTER TABLE subscriptions " + "ADD COLUMN referral_bonus_domains INTEGER NOT NULL DEFAULT 0" + ) + ) + + # ---------------------------------------------------- + # 6) Telemetry events indexes + # ---------------------------------------------------- + if await _table_exists(conn, "telemetry_events"): + await conn.execute( + text( + "CREATE INDEX IF NOT EXISTS ix_telemetry_event_name_created " + "ON telemetry_events(event_name, created_at)" + ) + ) + await conn.execute( + text( + "CREATE INDEX IF NOT EXISTS ix_telemetry_user_created " + "ON telemetry_events(user_id, created_at)" + ) + ) + await conn.execute( + text( + "CREATE INDEX IF NOT EXISTS ix_telemetry_listing_created " + "ON telemetry_events(listing_id, created_at)" + ) + ) + await conn.execute( + text( + "CREATE INDEX IF NOT EXISTS ix_telemetry_yield_created " + "ON telemetry_events(yield_domain_id, created_at)" + ) + ) + + # ---------------------------------------------------- + # 6b) Ops alert events (persisted cooldown + history) + # ---------------------------------------------------- + # NOTE: Table is created by Base.metadata.create_all() for new installs. + # Here we ensure indexes exist for older DBs. + if await _table_exists(conn, "ops_alert_events"): + await conn.execute( + text( + "CREATE INDEX IF NOT EXISTS ix_ops_alert_key_created " + "ON ops_alert_events(alert_key, created_at)" + ) + ) + await conn.execute( + text( + "CREATE INDEX IF NOT EXISTS ix_ops_alert_status_created " + "ON ops_alert_events(status, created_at)" + ) + ) + + # ---------------------------------------------------- + # 7) User referral tracking columns # ---------------------------------------------------- if await _table_exists(conn, "users"): if not await _has_column(conn, "users", "referred_by_user_id"): @@ -179,6 +339,12 @@ async def apply_migrations(conn: AsyncConnection) -> None: if not await _has_column(conn, "users", "referral_code"): logger.info("DB migrations: adding column users.referral_code") await conn.execute(text("ALTER TABLE users ADD COLUMN referral_code VARCHAR(100)")) + if not await _has_column(conn, "users", "invite_code"): + logger.info("DB migrations: adding column users.invite_code") + await conn.execute(text("ALTER TABLE users ADD COLUMN invite_code VARCHAR(32)")) + + # Unique index for invite_code (SQLite + Postgres) + await conn.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS ix_users_invite_code ON users(invite_code)")) # ---------------------------------------------------- # 7) Portfolio DNS verification columns diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py index 882953a..7bce2fb 100644 --- a/backend/app/models/__init__.py +++ b/backend/app/models/__init__.py @@ -13,6 +13,8 @@ from app.models.listing import DomainListing, ListingInquiry, ListingView from app.models.sniper_alert import SniperAlert, SniperAlertMatch from app.models.seo_data import DomainSEOData from app.models.yield_domain import YieldDomain, YieldTransaction, YieldPayout, AffiliatePartner +from app.models.telemetry import TelemetryEvent +from app.models.ops_alert import OpsAlertEvent __all__ = [ "User", @@ -43,4 +45,7 @@ __all__ = [ "YieldTransaction", "YieldPayout", "AffiliatePartner", + # New: Telemetry (events) + "TelemetryEvent", + "OpsAlertEvent", ] diff --git a/backend/app/models/listing.py b/backend/app/models/listing.py index cd41772..73aab03 100644 --- a/backend/app/models/listing.py +++ b/backend/app/models/listing.py @@ -91,6 +91,10 @@ class DomainListing(Base): # Status status: Mapped[str] = mapped_column(String(30), default=ListingStatus.DRAFT.value, index=True) + sold_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True) + sold_reason: Mapped[Optional[str]] = mapped_column(String(200), nullable=True) + sold_price: Mapped[Optional[float]] = mapped_column(Float, nullable=True) + sold_currency: Mapped[Optional[str]] = mapped_column(String(3), nullable=True) # Features show_valuation: Mapped[bool] = mapped_column(Boolean, default=True) @@ -147,6 +151,7 @@ class ListingInquiry(Base): id: Mapped[int] = mapped_column(primary_key=True, index=True) listing_id: Mapped[int] = mapped_column(ForeignKey("domain_listings.id"), index=True, nullable=False) + buyer_user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("users.id"), index=True, nullable=True) # Inquirer info name: Mapped[str] = mapped_column(String(100), nullable=False) @@ -159,7 +164,8 @@ class ListingInquiry(Base): offer_amount: Mapped[Optional[float]] = mapped_column(Float, nullable=True) # Status - status: Mapped[str] = mapped_column(String(20), default="new") # new, read, replied, spam + status: Mapped[str] = mapped_column(String(20), default="new") # new, read, replied, closed, spam + closed_reason: Mapped[Optional[str]] = mapped_column(String(200), nullable=True) # Tracking ip_address: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) @@ -169,14 +175,72 @@ class ListingInquiry(Base): created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow) read_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True) replied_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True) + closed_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True) # Relationships listing: Mapped["DomainListing"] = relationship("DomainListing", back_populates="inquiries") + messages: Mapped[List["ListingInquiryMessage"]] = relationship( + "ListingInquiryMessage", back_populates="inquiry", cascade="all, delete-orphan" + ) + events: Mapped[List["ListingInquiryEvent"]] = relationship( + "ListingInquiryEvent", back_populates="inquiry", cascade="all, delete-orphan" + ) def __repr__(self) -> str: return f"" +class ListingInquiryEvent(Base): + """ + Audit trail for inquiry status changes. + + This is the minimal “deal system” log: + - who changed what status + - when it happened + - optional reason (close/spam) + """ + + __tablename__ = "listing_inquiry_events" + + id: Mapped[int] = mapped_column(primary_key=True, index=True) + inquiry_id: Mapped[int] = mapped_column(ForeignKey("listing_inquiries.id"), index=True, nullable=False) + listing_id: Mapped[int] = mapped_column(ForeignKey("domain_listings.id"), index=True, nullable=False) + actor_user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), index=True, nullable=False) + + old_status: Mapped[Optional[str]] = mapped_column(String(20), nullable=True) + new_status: Mapped[str] = mapped_column(String(20), nullable=False) + reason: Mapped[Optional[str]] = mapped_column(String(200), nullable=True) + + ip_address: Mapped[Optional[str]] = mapped_column(String(45), nullable=True) + user_agent: Mapped[Optional[str]] = mapped_column(String(500), nullable=True) + + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, index=True) + + inquiry: Mapped["ListingInquiry"] = relationship("ListingInquiry", back_populates="events") + + +class ListingInquiryMessage(Base): + """ + Thread messages for listing inquiries (in-product negotiation). + + - Buyer sends messages from their account + - Seller replies from Terminal + """ + + __tablename__ = "listing_inquiry_messages" + + id: Mapped[int] = mapped_column(primary_key=True, index=True) + inquiry_id: Mapped[int] = mapped_column(ForeignKey("listing_inquiries.id"), index=True, nullable=False) + listing_id: Mapped[int] = mapped_column(ForeignKey("domain_listings.id"), index=True, nullable=False) + + sender_user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), index=True, nullable=False) + body: Mapped[str] = mapped_column(Text, nullable=False) + + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, index=True) + + inquiry: Mapped["ListingInquiry"] = relationship("ListingInquiry", back_populates="messages") + + class ListingView(Base): """ Track listing page views for analytics. diff --git a/backend/app/models/ops_alert.py b/backend/app/models/ops_alert.py new file mode 100644 index 0000000..6f1dfff --- /dev/null +++ b/backend/app/models/ops_alert.py @@ -0,0 +1,40 @@ +from __future__ import annotations + +from datetime import datetime +from typing import Optional + +from sqlalchemy import DateTime, Index, Integer, String, Text +from sqlalchemy.orm import Mapped, mapped_column + +from app.database import Base + + +class OpsAlertEvent(Base): + """ + Persisted ops alert events. + + Used for: + - cooldown across process restarts + - audit/history in admin UI + """ + + __tablename__ = "ops_alert_events" + + id: Mapped[int] = mapped_column(primary_key=True, index=True) + alert_key: Mapped[str] = mapped_column(String(80), nullable=False, index=True) + severity: Mapped[str] = mapped_column(String(10), nullable=False, index=True) # "warn" | "page" + title: Mapped[str] = mapped_column(String(200), nullable=False) + detail: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + + # "sent" | "skipped" | "error" + status: Mapped[str] = mapped_column(String(20), nullable=False, index=True) + recipients: Mapped[Optional[str]] = mapped_column(Text, nullable=True) # comma-separated + send_reason: Mapped[Optional[str]] = mapped_column(String(60), nullable=True) + + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, index=True) + + __table_args__ = ( + Index("ix_ops_alert_key_created", "alert_key", "created_at"), + Index("ix_ops_alert_status_created", "status", "created_at"), + ) + diff --git a/backend/app/models/subscription.py b/backend/app/models/subscription.py index a84062a..28ab5f7 100644 --- a/backend/app/models/subscription.py +++ b/backend/app/models/subscription.py @@ -123,6 +123,8 @@ class Subscription(Base): # Limits (can be overridden) max_domains: Mapped[int] = mapped_column(Integer, default=5) + # Referral reward bonus (3C.2): additive, computed deterministically from qualified referrals + referral_bonus_domains: Mapped[int] = mapped_column(Integer, default=0) check_frequency: Mapped[str] = mapped_column(String(50), default="daily") # Stripe integration @@ -167,7 +169,9 @@ class Subscription(Base): @property def domain_limit(self) -> int: """Get maximum allowed domains for this subscription.""" - return self.max_domains or self.config["domain_limit"] + base = int(self.max_domains or self.config["domain_limit"] or 0) + bonus = int(self.referral_bonus_domains or 0) + return max(0, base + bonus) @property def portfolio_limit(self) -> int: diff --git a/backend/app/models/telemetry.py b/backend/app/models/telemetry.py new file mode 100644 index 0000000..6da913e --- /dev/null +++ b/backend/app/models/telemetry.py @@ -0,0 +1,56 @@ +""" +Telemetry events (4A). + +Store canonical product events for funnel KPIs: +- Deal funnel: listing_view → inquiry_created → message_sent → listing_marked_sold +- Yield funnel: yield_connected → yield_click → yield_conversion → payout_paid +""" + +from __future__ import annotations + +from datetime import datetime +from typing import Optional + +from sqlalchemy import Boolean, DateTime, ForeignKey, Index, Integer, String, Text +from sqlalchemy.orm import Mapped, mapped_column + +from app.database import Base + + +class TelemetryEvent(Base): + __tablename__ = "telemetry_events" + + id: Mapped[int] = mapped_column(primary_key=True, index=True) + + # Who + user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("users.id"), nullable=True, index=True) + + # What + event_name: Mapped[str] = mapped_column(String(60), nullable=False, index=True) + + # Entity links (optional) + listing_id: Mapped[Optional[int]] = mapped_column(Integer, nullable=True, index=True) + inquiry_id: Mapped[Optional[int]] = mapped_column(Integer, nullable=True, index=True) + yield_domain_id: Mapped[Optional[int]] = mapped_column(Integer, nullable=True, index=True) + click_id: Mapped[Optional[str]] = mapped_column(String(64), nullable=True, index=True) + domain: Mapped[Optional[str]] = mapped_column(String(255), nullable=True, index=True) + + # Context + source: Mapped[Optional[str]] = mapped_column(String(30), nullable=True) # "public" | "terminal" | "webhook" | "scheduler" | "admin" + ip_hash: Mapped[Optional[str]] = mapped_column(String(64), nullable=True) + user_agent: Mapped[Optional[str]] = mapped_column(String(500), nullable=True) + referrer: Mapped[Optional[str]] = mapped_column(String(500), nullable=True) + metadata_json: Mapped[Optional[str]] = mapped_column(Text, nullable=True) # JSON string + + # Flags + is_authenticated: Mapped[Optional[bool]] = mapped_column(Boolean, nullable=True) + + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, index=True) + + __table_args__ = ( + Index("ix_telemetry_event_name_created", "event_name", "created_at"), + Index("ix_telemetry_user_created", "user_id", "created_at"), + Index("ix_telemetry_listing_created", "listing_id", "created_at"), + Index("ix_telemetry_yield_created", "yield_domain_id", "created_at"), + ) + diff --git a/backend/app/models/user.py b/backend/app/models/user.py index 6e6107f..ce0e6c4 100644 --- a/backend/app/models/user.py +++ b/backend/app/models/user.py @@ -44,6 +44,7 @@ class User(Base): referred_by_user_id: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) # User who referred this user referred_by_domain: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) # Domain that referred referral_code: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) # Original referral code + invite_code: Mapped[Optional[str]] = mapped_column(String(32), nullable=True, unique=True, index=True) # user's own code # Timestamps created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow) diff --git a/backend/app/models/yield_domain.py b/backend/app/models/yield_domain.py index e86f9f2..59da7ae 100644 --- a/backend/app/models/yield_domain.py +++ b/backend/app/models/yield_domain.py @@ -105,6 +105,8 @@ class YieldDomain(Base): dns_verified: Mapped[bool] = mapped_column(Boolean, default=False) dns_verified_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True) + # "Connect" timestamp for Yield (nameserver/CNAME verified) + connected_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True) activated_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True) paused_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True) @@ -142,13 +144,6 @@ class YieldDomain(Base): """Check if domain is actively earning.""" return self.status == "active" and self.dns_verified - @property - def monthly_revenue(self) -> Decimal: - """Estimate monthly revenue (placeholder - should compute from transactions).""" - # In production: calculate from last 30 days of transactions - return self.total_revenue - - class YieldTransaction(Base): """ Revenue events from affiliate partners. @@ -170,6 +165,9 @@ class YieldTransaction(Base): # Partner info partner_slug: Mapped[str] = mapped_column(String(50), nullable=False) partner_transaction_id: Mapped[Optional[str]] = mapped_column(String(200), nullable=True) + # Our click id for attribution across systems (UUID string) + click_id: Mapped[Optional[str]] = mapped_column(String(64), nullable=True, index=True) + destination_url: Mapped[Optional[str]] = mapped_column(Text, nullable=True) # Amount gross_amount: Mapped[Decimal] = mapped_column(Numeric(10, 2), default=0) # Full commission @@ -200,6 +198,7 @@ class YieldTransaction(Base): __table_args__ = ( Index("ix_yield_tx_domain_created", "yield_domain_id", "created_at"), Index("ix_yield_tx_status_created", "status", "created_at"), + Index("ix_yield_tx_click_id", "click_id"), ) def __repr__(self) -> str: diff --git a/backend/app/observability/business_metrics.py b/backend/app/observability/business_metrics.py new file mode 100644 index 0000000..089c3fc --- /dev/null +++ b/backend/app/observability/business_metrics.py @@ -0,0 +1,304 @@ +""" +Business KPIs exported as Prometheus metrics (4B Ops). + +These KPIs are derived from real telemetry events in the database. +We cache computations to avoid putting load on the DB on every scrape. +""" + +from __future__ import annotations + +import json +from dataclasses import dataclass +from datetime import datetime, timedelta +from typing import Any, Optional + +from sqlalchemy import and_, func, select + +from app.config import get_settings +from app.database import AsyncSessionLocal +from app.models.telemetry import TelemetryEvent + + +settings = get_settings() + +try: + from prometheus_client import Gauge +except Exception: # pragma: no cover + Gauge = None # type: ignore + + +@dataclass(frozen=True) +class TelemetryWindowKpis: + window_days: int + start: datetime + end: datetime + + # Deal + listing_views: int + inquiries_created: int + seller_replied_inquiries: int + inquiry_reply_rate: float + listings_with_inquiries: int + listings_sold: int + inquiry_to_sold_listing_rate: float + + # Yield + connected_domains: int + clicks: int + conversions: int + conversion_rate: float + payouts_paid: int + payouts_paid_amount_total: float + + +_cache_until_by_days: dict[int, datetime] = {} +_cache_value_by_days: dict[int, TelemetryWindowKpis] = {} + + +def _safe_json(metadata_json: Optional[str]) -> dict[str, Any]: + if not metadata_json: + return {} + try: + value = json.loads(metadata_json) + return value if isinstance(value, dict) else {} + except Exception: + return {} + + +async def _compute_window_kpis(days: int) -> TelemetryWindowKpis: + end = datetime.utcnow() + start = end - timedelta(days=days) + + async with AsyncSessionLocal() as db: + # Fast path: grouped counts for pure counter events + count_events = [ + "listing_view", + "inquiry_created", + "yield_connected", + "yield_click", + "yield_conversion", + "payout_paid", + ] + grouped = ( + await db.execute( + select(TelemetryEvent.event_name, func.count(TelemetryEvent.id)) + .where( + and_( + TelemetryEvent.created_at >= start, + TelemetryEvent.created_at <= end, + TelemetryEvent.event_name.in_(count_events), + ) + ) + .group_by(TelemetryEvent.event_name) + ) + ).all() + counts = {name: int(cnt) for name, cnt in grouped} + + listing_views = counts.get("listing_view", 0) + inquiries_created = counts.get("inquiry_created", 0) + connected_domains = counts.get("yield_connected", 0) + clicks = counts.get("yield_click", 0) + conversions = counts.get("yield_conversion", 0) + payouts_paid = counts.get("payout_paid", 0) + + # Distinct listing counts (deal) + listings_with_inquiries = ( + await db.execute( + select(func.count(func.distinct(TelemetryEvent.listing_id))).where( + and_( + TelemetryEvent.created_at >= start, + TelemetryEvent.created_at <= end, + TelemetryEvent.event_name == "inquiry_created", + TelemetryEvent.listing_id.isnot(None), + ) + ) + ) + ).scalar() or 0 + + listings_sold = ( + await db.execute( + select(func.count(func.distinct(TelemetryEvent.listing_id))).where( + and_( + TelemetryEvent.created_at >= start, + TelemetryEvent.created_at <= end, + TelemetryEvent.event_name == "listing_marked_sold", + TelemetryEvent.listing_id.isnot(None), + ) + ) + ) + ).scalar() or 0 + + # For rates we need intersections/uniques; keep it exact via minimal event fetch + inquiry_listing_ids = ( + await db.execute( + select(func.distinct(TelemetryEvent.listing_id)).where( + and_( + TelemetryEvent.created_at >= start, + TelemetryEvent.created_at <= end, + TelemetryEvent.event_name == "inquiry_created", + TelemetryEvent.listing_id.isnot(None), + ) + ) + ) + ).scalars().all() + sold_listing_ids = ( + await db.execute( + select(func.distinct(TelemetryEvent.listing_id)).where( + and_( + TelemetryEvent.created_at >= start, + TelemetryEvent.created_at <= end, + TelemetryEvent.event_name == "listing_marked_sold", + TelemetryEvent.listing_id.isnot(None), + ) + ) + ) + ).scalars().all() + + inquiry_set = {int(x) for x in inquiry_listing_ids if x is not None} + sold_set = {int(x) for x in sold_listing_ids if x is not None} + sold_from_inquiry = inquiry_set.intersection(sold_set) + inquiry_to_sold_listing_rate = (len(sold_from_inquiry) / len(inquiry_set)) if inquiry_set else 0.0 + + # Seller reply rate: unique inquiries with at least one seller message + msg_rows = ( + await db.execute( + select(TelemetryEvent.inquiry_id, TelemetryEvent.metadata_json).where( + and_( + TelemetryEvent.created_at >= start, + TelemetryEvent.created_at <= end, + TelemetryEvent.event_name == "message_sent", + TelemetryEvent.inquiry_id.isnot(None), + ) + ) + ) + ).all() + seller_replied_inquiries_set: set[int] = set() + for inquiry_id, metadata_json in msg_rows: + if inquiry_id is None: + continue + meta = _safe_json(metadata_json) + if meta.get("role") == "seller": + seller_replied_inquiries_set.add(int(inquiry_id)) + + seller_replied_inquiries = len(seller_replied_inquiries_set) + inquiry_reply_rate = (seller_replied_inquiries / inquiries_created) if inquiries_created else 0.0 + + # Payout amounts (sum of metadata amounts) + payout_rows = ( + await db.execute( + select(TelemetryEvent.metadata_json).where( + and_( + TelemetryEvent.created_at >= start, + TelemetryEvent.created_at <= end, + TelemetryEvent.event_name == "payout_paid", + TelemetryEvent.metadata_json.isnot(None), + ) + ) + ) + ).scalars().all() + payouts_paid_amount_total = 0.0 + for metadata_json in payout_rows: + meta = _safe_json(metadata_json) + amount = meta.get("amount") + if isinstance(amount, (int, float)): + payouts_paid_amount_total += float(amount) + + conversion_rate = (conversions / clicks) if clicks else 0.0 + + return TelemetryWindowKpis( + window_days=days, + start=start, + end=end, + listing_views=int(listing_views), + inquiries_created=int(inquiries_created), + seller_replied_inquiries=int(seller_replied_inquiries), + inquiry_reply_rate=float(inquiry_reply_rate), + listings_with_inquiries=int(listings_with_inquiries), + listings_sold=int(listings_sold), + inquiry_to_sold_listing_rate=float(inquiry_to_sold_listing_rate), + connected_domains=int(connected_domains), + clicks=int(clicks), + conversions=int(conversions), + conversion_rate=float(conversion_rate), + payouts_paid=int(payouts_paid), + payouts_paid_amount_total=float(payouts_paid_amount_total), + ) + + +async def get_cached_window_kpis(days: int) -> Optional[TelemetryWindowKpis]: + """Return cached KPIs for a window (recompute if TTL expired).""" + if not settings.enable_business_metrics: + return None + + now = datetime.utcnow() + until = _cache_until_by_days.get(days) + cached = _cache_value_by_days.get(days) + if until is not None and cached is not None and now < until: + return cached + + value = await _compute_window_kpis(int(days)) + ttl_seconds = max(5, int(settings.business_metrics_cache_seconds)) + _cache_until_by_days[int(days)] = now + timedelta(seconds=ttl_seconds) + _cache_value_by_days[int(days)] = value + return value + + +# ----------------------------- +# Prometheus Gauges +# ----------------------------- + +if Gauge is not None: + _g = { + "deal_listing_views": Gauge("pounce_deal_listing_views", "Deal: listing views in window", ["window_days"]), + "deal_inquiries_created": Gauge("pounce_deal_inquiries_created", "Deal: inquiries created in window", ["window_days"]), + "deal_seller_replied_inquiries": Gauge( + "pounce_deal_seller_replied_inquiries", "Deal: inquiries with seller reply in window", ["window_days"] + ), + "deal_inquiry_reply_rate": Gauge("pounce_deal_inquiry_reply_rate", "Deal: inquiry reply rate in window", ["window_days"]), + "deal_listings_with_inquiries": Gauge( + "pounce_deal_listings_with_inquiries", "Deal: distinct listings with inquiries in window", ["window_days"] + ), + "deal_listings_sold": Gauge("pounce_deal_listings_sold", "Deal: distinct listings marked sold in window", ["window_days"]), + "deal_inquiry_to_sold_listing_rate": Gauge( + "pounce_deal_inquiry_to_sold_listing_rate", "Deal: (listings with inquiry) -> sold rate in window", ["window_days"] + ), + "yield_connected_domains": Gauge("pounce_yield_connected_domains", "Yield: connected domains in window", ["window_days"]), + "yield_clicks": Gauge("pounce_yield_clicks", "Yield: clicks in window", ["window_days"]), + "yield_conversions": Gauge("pounce_yield_conversions", "Yield: conversions in window", ["window_days"]), + "yield_conversion_rate": Gauge("pounce_yield_conversion_rate", "Yield: conversion rate in window", ["window_days"]), + "yield_payouts_paid": Gauge("pounce_yield_payouts_paid", "Yield: payouts paid in window", ["window_days"]), + "yield_payouts_paid_amount_total": Gauge( + "pounce_yield_payouts_paid_amount_total", "Yield: total amount paid out in window", ["window_days"] + ), + } +else: # pragma: no cover + _g = {} + + +async def update_prometheus_business_metrics() -> None: + """Compute KPIs and set Prometheus gauges (no-op when disabled).""" + if Gauge is None or not _g: + return + if not settings.enable_business_metrics: + return + + windows = {1, int(settings.business_metrics_days)} + for days in sorted(windows): + kpis = await get_cached_window_kpis(days) + if kpis is None: + continue + w = str(int(kpis.window_days)) + _g["deal_listing_views"].labels(window_days=w).set(kpis.listing_views) + _g["deal_inquiries_created"].labels(window_days=w).set(kpis.inquiries_created) + _g["deal_seller_replied_inquiries"].labels(window_days=w).set(kpis.seller_replied_inquiries) + _g["deal_inquiry_reply_rate"].labels(window_days=w).set(kpis.inquiry_reply_rate) + _g["deal_listings_with_inquiries"].labels(window_days=w).set(kpis.listings_with_inquiries) + _g["deal_listings_sold"].labels(window_days=w).set(kpis.listings_sold) + _g["deal_inquiry_to_sold_listing_rate"].labels(window_days=w).set(kpis.inquiry_to_sold_listing_rate) + _g["yield_connected_domains"].labels(window_days=w).set(kpis.connected_domains) + _g["yield_clicks"].labels(window_days=w).set(kpis.clicks) + _g["yield_conversions"].labels(window_days=w).set(kpis.conversions) + _g["yield_conversion_rate"].labels(window_days=w).set(kpis.conversion_rate) + _g["yield_payouts_paid"].labels(window_days=w).set(kpis.payouts_paid) + _g["yield_payouts_paid_amount_total"].labels(window_days=w).set(kpis.payouts_paid_amount_total) + diff --git a/backend/app/observability/metrics.py b/backend/app/observability/metrics.py index b756af3..253cc50 100644 --- a/backend/app/observability/metrics.py +++ b/backend/app/observability/metrics.py @@ -72,6 +72,21 @@ def instrument_app(app: FastAPI, *, metrics_path: str = "/metrics", enable_db_me @app.get(metrics_path, include_in_schema=False) async def _metrics_endpoint(): + # Optional: export business KPIs derived from telemetry (cached). + try: + from app.observability.business_metrics import update_prometheus_business_metrics + + await update_prometheus_business_metrics() + except Exception: + # Never break metrics scrape due to KPI computation issues. + pass + # Optional: export ops metrics (e.g. backup age). + try: + from app.observability.ops_metrics import update_prometheus_ops_metrics + + await update_prometheus_ops_metrics() + except Exception: + pass return Response(generate_latest(), media_type=CONTENT_TYPE_LATEST) if enable_db_metrics: diff --git a/backend/app/observability/ops_metrics.py b/backend/app/observability/ops_metrics.py new file mode 100644 index 0000000..e8d94cc --- /dev/null +++ b/backend/app/observability/ops_metrics.py @@ -0,0 +1,65 @@ +""" +Ops/health metrics exported as Prometheus metrics (4B Ops). + +These are low-frequency filesystem-based metrics (safe on scrape). +""" + +from __future__ import annotations + +from datetime import datetime +from pathlib import Path + +from app.config import get_settings + + +settings = get_settings() + +try: + from prometheus_client import Gauge +except Exception: # pragma: no cover + Gauge = None # type: ignore + + +if Gauge is not None: + db_backups_enabled = Gauge("pounce_db_backups_enabled", "DB backups enabled (1/0)") + db_backup_latest_unixtime = Gauge("pounce_db_backup_latest_unixtime", "Unix time of latest backup file (0 if none)") + db_backup_latest_age_seconds = Gauge("pounce_db_backup_latest_age_seconds", "Age of latest backup file (seconds)") +else: # pragma: no cover + db_backups_enabled = None # type: ignore + db_backup_latest_unixtime = None # type: ignore + db_backup_latest_age_seconds = None # type: ignore + + +def _backup_root() -> Path: + root = Path(settings.backup_dir) + if not root.is_absolute(): + root = (Path.cwd() / root).resolve() + return root + + +async def update_prometheus_ops_metrics() -> None: + if Gauge is None: + return + + db_backups_enabled.set(1 if settings.enable_db_backups else 0) + + root = _backup_root() + if not root.exists() or not root.is_dir(): + db_backup_latest_unixtime.set(0) + db_backup_latest_age_seconds.set(0) + return + + files = [p for p in root.glob("*") if p.is_file()] + if not files: + db_backup_latest_unixtime.set(0) + db_backup_latest_age_seconds.set(0) + return + + latest = max(files, key=lambda p: p.stat().st_mtime) + mtime = float(latest.stat().st_mtime) + now = datetime.utcnow().timestamp() + age = max(0.0, now - mtime) + + db_backup_latest_unixtime.set(mtime) + db_backup_latest_age_seconds.set(age) + diff --git a/backend/app/scheduler.py b/backend/app/scheduler.py index 376cb58..8c772e5 100644 --- a/backend/app/scheduler.py +++ b/backend/app/scheduler.py @@ -16,6 +16,10 @@ from app.models.subscription import Subscription, SubscriptionTier, TIER_CONFIG from app.services.domain_checker import domain_checker from app.services.email_service import email_service from app.services.price_tracker import price_tracker +from app.services.yield_payouts import generate_payouts_for_previous_month +from app.services.db_backup import create_backup +from app.services.ops_alerts import run_ops_alert_checks +from app.services.referral_rewards import apply_referral_rewards_all if TYPE_CHECKING: from app.models.sniper_alert import SniperAlert @@ -450,6 +454,53 @@ async def send_health_change_alerts(db, changes: list): logger.error(f"Failed to send health alert: {e}") +async def prepare_monthly_yield_payouts(): + """ + Prepare Yield payouts for previous month (admin automation). + + Safety: + - Only runs when `internal_api_key` is configured. + - Idempotent: generation skips existing payouts for the same period. + """ + if not (settings.internal_api_key or "").strip(): + return + try: + async with AsyncSessionLocal() as db: + await generate_payouts_for_previous_month(db) + except Exception as e: + logger.exception(f"Yield payout preparation failed: {e}") + + +async def run_db_backup(): + """Create a verified DB backup (4B Ops).""" + if not settings.enable_db_backups: + return + try: + # backup is filesystem / subprocess based; no DB session needed here + create_backup(verify=True) + except Exception as e: + logger.exception(f"DB backup failed: {e}") + + +async def run_ops_alerting(): + """Evaluate and (optionally) send ops alerts (4B).""" + try: + await run_ops_alert_checks() + except Exception as e: + logger.exception(f"Ops alerting failed: {e}") + + +async def run_referral_rewards(): + """Recompute and apply referral reward bonuses (3C.2).""" + try: + async with AsyncSessionLocal() as db: + res = await apply_referral_rewards_all(db) + await db.commit() + logger.info("Referral rewards applied: processed=%s updated=%s", res.get("processed"), res.get("updated")) + except Exception as e: + logger.exception(f"Referral rewards job failed: {e}") + + def setup_scheduler(): """Configure and start the scheduler.""" # Daily domain check for Scout users at configured hour @@ -505,6 +556,42 @@ def setup_scheduler(): name="Weekly Digest Email", replace_existing=True, ) + + # Yield payout preparation: run on 2nd day of month at 02:10 UTC + scheduler.add_job( + prepare_monthly_yield_payouts, + CronTrigger(day=2, hour=2, minute=10), + id="yield_payout_prepare", + name="Yield Payout Preparation (Monthly)", + replace_existing=True, + ) + + # DB backup: daily at 01:30 UTC + scheduler.add_job( + run_db_backup, + CronTrigger(hour=1, minute=30), + id="db_backup", + name="DB Backup (Daily)", + replace_existing=True, + ) + + # Ops alerting: hourly at :12 (staggered) + scheduler.add_job( + run_ops_alerting, + CronTrigger(minute=12), + id="ops_alerting", + name="Ops Alerting (Hourly)", + replace_existing=True, + ) + + # Referral rewards: daily at 00:22 UTC (staggered) + scheduler.add_job( + run_referral_rewards, + CronTrigger(hour=0, minute=22), + id="referral_rewards", + name="Referral Rewards (Daily)", + replace_existing=True, + ) # TLD price scrape 2x daily for better historical data # Morning scrape at 03:00 UTC diff --git a/backend/app/schemas/auth.py b/backend/app/schemas/auth.py index 7e3b64c..385bfb1 100644 --- a/backend/app/schemas/auth.py +++ b/backend/app/schemas/auth.py @@ -51,3 +51,26 @@ class TokenData(BaseModel): user_id: Optional[int] = None email: Optional[str] = None + +class ReferralStats(BaseModel): + """Referral reward snapshot for the current user (3C.2).""" + + window_days: int = 30 + referred_users_total: int = 0 + qualified_referrals_total: int = 0 + referral_link_views_window: int = 0 + bonus_domains: int = 0 + next_reward_at: int = 0 + badge: Optional[str] = None # "verified_referrer" | "elite_referrer" + cooldown_days: int = 7 + disqualified_cooldown_total: int = 0 + disqualified_missing_ip_total: int = 0 + disqualified_shared_ip_total: int = 0 + disqualified_duplicate_ip_total: int = 0 + + +class ReferralLinkResponse(BaseModel): + invite_code: str + url: str + stats: ReferralStats + diff --git a/backend/app/schemas/referrals.py b/backend/app/schemas/referrals.py new file mode 100644 index 0000000..f7805af --- /dev/null +++ b/backend/app/schemas/referrals.py @@ -0,0 +1,33 @@ +""" +Referral schemas (3C.2). +""" + +from __future__ import annotations + +from datetime import datetime +from typing import Optional + +from pydantic import BaseModel, Field + + +class ReferralKpiWindow(BaseModel): + days: int = Field(ge=1, le=365) + start: datetime + end: datetime + + +class ReferralReferrerRow(BaseModel): + user_id: int + email: str + invite_code: Optional[str] = None + created_at: datetime + referred_users_total: int = 0 + referred_users_window: int = 0 + referral_link_views_window: int = 0 + + +class ReferralKpisResponse(BaseModel): + window: ReferralKpiWindow + totals: dict[str, int] + referrers: list[ReferralReferrerRow] + diff --git a/backend/app/schemas/telemetry.py b/backend/app/schemas/telemetry.py new file mode 100644 index 0000000..cc389df --- /dev/null +++ b/backend/app/schemas/telemetry.py @@ -0,0 +1,47 @@ +""" +Telemetry schemas (4A.2). +""" + +from __future__ import annotations + +from datetime import datetime +from typing import Optional + +from pydantic import BaseModel, Field + + +class TelemetryKpiWindow(BaseModel): + days: int = Field(ge=1, le=365) + start: datetime + end: datetime + + +class DealFunnelKpis(BaseModel): + listing_views: int = 0 + inquiries_created: int = 0 + seller_replied_inquiries: int = 0 + inquiry_reply_rate: float = 0.0 + + listings_with_inquiries: int = 0 + listings_sold: int = 0 + inquiry_to_sold_listing_rate: float = 0.0 + + median_reply_seconds: Optional[float] = None + median_time_to_sold_seconds: Optional[float] = None + + +class YieldFunnelKpis(BaseModel): + connected_domains: int = 0 + clicks: int = 0 + conversions: int = 0 + conversion_rate: float = 0.0 + + payouts_paid: int = 0 + payouts_paid_amount_total: float = 0.0 + + +class TelemetryKpisResponse(BaseModel): + window: TelemetryKpiWindow + deal: DealFunnelKpis + yield_: YieldFunnelKpis = Field(alias="yield") + diff --git a/backend/app/schemas/yield_domain.py b/backend/app/schemas/yield_domain.py index fb0a35b..5de90fb 100644 --- a/backend/app/schemas/yield_domain.py +++ b/backend/app/schemas/yield_domain.py @@ -73,6 +73,7 @@ class YieldDomainResponse(BaseModel): # DNS dns_verified: bool = False dns_verified_at: Optional[datetime] = None + connected_at: Optional[datetime] = None # Stats total_clicks: int = 0 @@ -108,6 +109,7 @@ class YieldTransactionResponse(BaseModel): id: int event_type: str partner_slug: str + click_id: Optional[str] = None gross_amount: Decimal net_amount: Decimal diff --git a/backend/app/services/auth.py b/backend/app/services/auth.py index 6360b77..437c04a 100644 --- a/backend/app/services/auth.py +++ b/backend/app/services/auth.py @@ -3,6 +3,7 @@ from datetime import datetime, timedelta from typing import Optional import bcrypt +import secrets from jose import JWTError, jwt from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession @@ -92,11 +93,21 @@ class AuthService: name: Optional[str] = None ) -> User: """Create a new user with default subscription.""" + async def _generate_unique_invite_code() -> str: + # 12 hex chars; easy to validate + share + embed in URLs. + for _ in range(12): + code = secrets.token_hex(6) + exists = await db.execute(select(User.id).where(User.invite_code == code)) + if exists.scalar_one_or_none() is None: + return code + raise RuntimeError("Failed to generate unique invite code") + # Create user (normalize email to lowercase) user = User( email=email.lower().strip(), hashed_password=AuthService.hash_password(password), name=name, + invite_code=await _generate_unique_invite_code(), ) db.add(user) await db.flush() diff --git a/backend/app/services/db_backup.py b/backend/app/services/db_backup.py new file mode 100644 index 0000000..5537fed --- /dev/null +++ b/backend/app/services/db_backup.py @@ -0,0 +1,201 @@ +""" +DB backup utilities (4B Ops). + +Supports: +- SQLite: file copy + integrity_check verification +- Postgres: pg_dump custom format + pg_restore --list verification + +This is real ops code: it will fail loudly if the platform tooling isn't available. +""" + +from __future__ import annotations + +import os +import shutil +import subprocess +from dataclasses import dataclass +from datetime import datetime, timedelta +from pathlib import Path +from typing import Optional + +from sqlalchemy.engine.url import make_url + +from app.config import get_settings + + +settings = get_settings() + + +@dataclass(frozen=True) +class BackupResult: + path: str + size_bytes: int + created_at: str + verified: bool + verification_detail: Optional[str] = None + + +def _backup_root() -> Path: + root = Path(settings.backup_dir) + if not root.is_absolute(): + # Keep backups next to backend working dir by default + root = (Path.cwd() / root).resolve() + root.mkdir(parents=True, exist_ok=True) + return root + + +def _timestamp() -> str: + return datetime.utcnow().strftime("%Y%m%dT%H%M%SZ") + + +def _cleanup_old_backups(root: Path, retention_days: int) -> int: + if retention_days <= 0: + return 0 + cutoff = datetime.utcnow() - timedelta(days=retention_days) + removed = 0 + for p in root.glob("*"): + if not p.is_file(): + continue + try: + mtime = datetime.utcfromtimestamp(p.stat().st_mtime) + if mtime < cutoff: + p.unlink() + removed += 1 + except Exception: + continue + return removed + + +def _sqlite_path_from_url(database_url: str) -> Path: + url = make_url(database_url) + db_path = url.database + if not db_path: + raise RuntimeError("SQLite database path missing in DATABASE_URL") + p = Path(db_path) + if not p.is_absolute(): + p = (Path.cwd() / p).resolve() + return p + + +def _verify_sqlite(path: Path) -> tuple[bool, str]: + import sqlite3 + + conn = sqlite3.connect(str(path)) + try: + row = conn.execute("PRAGMA integrity_check;").fetchone() + ok = bool(row and str(row[0]).lower() == "ok") + return ok, str(row[0]) if row else "no result" + finally: + conn.close() + + +def _pg_dump_backup(database_url: str, out_file: Path) -> None: + url = make_url(database_url) + if not url.database: + raise RuntimeError("Postgres database name missing in DATABASE_URL") + + env = os.environ.copy() + if url.password: + env["PGPASSWORD"] = str(url.password) + + cmd = [ + "pg_dump", + "--format=custom", + "--no-owner", + "--no-privileges", + "--file", + str(out_file), + ] + if url.host: + cmd += ["--host", str(url.host)] + if url.port: + cmd += ["--port", str(url.port)] + if url.username: + cmd += ["--username", str(url.username)] + cmd += [str(url.database)] + + proc = subprocess.run(cmd, env=env, capture_output=True, text=True) + if proc.returncode != 0: + raise RuntimeError(f"pg_dump failed: {proc.stderr.strip() or proc.stdout.strip()}") + + +def _verify_pg_dump(out_file: Path) -> tuple[bool, str]: + # Basic size check + if out_file.stat().st_size < 1024: + return False, "backup file too small" + + proc = subprocess.run( + ["pg_restore", "--list", str(out_file)], + capture_output=True, + text=True, + ) + if proc.returncode != 0: + return False, proc.stderr.strip() or proc.stdout.strip() or "pg_restore failed" + return True, "pg_restore --list OK" + + +def create_backup(*, verify: bool = True) -> BackupResult: + root = _backup_root() + _cleanup_old_backups(root, settings.backup_retention_days) + + db_url = settings.database_url + driver = make_url(db_url).drivername + created_at = datetime.utcnow().isoformat() + "Z" + + if driver.startswith("sqlite"): + src = _sqlite_path_from_url(db_url) + if not src.exists(): + raise RuntimeError(f"SQLite DB file not found: {src}") + out = root / f"sqlite-backup-{_timestamp()}{src.suffix or '.db'}" + shutil.copy2(src, out) + ok = True + detail = None + if verify: + ok, detail = _verify_sqlite(out) + if not ok: + raise RuntimeError(f"SQLite backup verification failed: {detail}") + return BackupResult( + path=str(out), + size_bytes=out.stat().st_size, + created_at=created_at, + verified=ok, + verification_detail=detail, + ) + + if driver.startswith("postgresql"): + out = root / f"pg-backup-{_timestamp()}.dump" + _pg_dump_backup(db_url, out) + ok = True + detail = None + if verify: + ok, detail = _verify_pg_dump(out) + if not ok: + raise RuntimeError(f"Postgres backup verification failed: {detail}") + return BackupResult( + path=str(out), + size_bytes=out.stat().st_size, + created_at=created_at, + verified=ok, + verification_detail=detail, + ) + + raise RuntimeError(f"Unsupported database driver for backups: {driver}") + + +def list_backups(limit: int = 20) -> list[dict]: + root = _backup_root() + files = [p for p in root.glob("*") if p.is_file()] + files.sort(key=lambda p: p.stat().st_mtime, reverse=True) + out: list[dict] = [] + for p in files[: max(1, limit)]: + st = p.stat() + out.append( + { + "name": p.name, + "path": str(p), + "size_bytes": st.st_size, + "modified_at": datetime.utcfromtimestamp(st.st_mtime).isoformat() + "Z", + } + ) + return out + diff --git a/backend/app/services/email_service.py b/backend/app/services/email_service.py index 89f1707..73dfbdd 100644 --- a/backend/app/services/email_service.py +++ b/backend/app/services/email_service.py @@ -22,10 +22,12 @@ Environment Variables Required: """ import logging import os +import uuid from typing import Optional, List from email.mime.text import MIMEText from email.mime.multipart import MIMEMultipart from datetime import datetime +from email.utils import formatdate import aiosmtplib from jinja2 import Template @@ -273,6 +275,11 @@ TEMPLATES = { Visit pounce.ch +{% if unsubscribe_url %} +

+ Unsubscribe +

+{% endif %} """, "listing_inquiry": """ @@ -303,6 +310,26 @@ TEMPLATES = {

Manage your listings →

+""", + + "listing_message": """ +

+ New message on {{ domain }} +

+

+ From: {{ sender_name }} +

+
+

{{ message }}

+
+
+ + Open thread + +
+

+ Sent: {{ timestamp }} +

""", } @@ -341,6 +368,7 @@ class EmailService: subject: str, html_content: str, text_content: Optional[str] = None, + headers: Optional[dict[str, str]] = None, ) -> bool: """ Send an email via SMTP. @@ -364,6 +392,15 @@ class EmailService: msg["Subject"] = subject msg["From"] = f"{SMTP_CONFIG['from_name']} <{SMTP_CONFIG['from_email']}>" msg["To"] = to_email + msg["Date"] = formatdate(localtime=False) + msg["Message-ID"] = EmailService._make_message_id() + msg["Reply-To"] = SMTP_CONFIG["from_email"] + + # Optional extra headers (deliverability + RFC 8058 List-Unsubscribe) + if headers: + for k, v in headers.items(): + if v: + msg[k] = v # Add text part (fallback) if text_content: @@ -400,6 +437,16 @@ class EmailService: except Exception as e: logger.error(f"Failed to send email to {to_email}: {e}") return False + + @staticmethod + def _make_message_id() -> str: + """ + Generate a stable Message-ID with the sender domain. + Helps deliverability and threading in some clients. + """ + from_email = str(SMTP_CONFIG.get("from_email") or "hello@pounce.ch") + domain = from_email.split("@")[-1] if "@" in from_email else "pounce.ch" + return f"<{uuid.uuid4().hex}@{domain}>" # ============== Domain Alerts ============== @@ -601,15 +648,22 @@ class EmailService: @staticmethod async def send_newsletter_welcome( to_email: str, + unsubscribe_url: Optional[str] = None, ) -> bool: """Send newsletter subscription welcome email.""" - html = EmailService._render_email("newsletter_welcome") + html = EmailService._render_email("newsletter_welcome", unsubscribe_url=unsubscribe_url) + + extra_headers: dict[str, str] = {} + if unsubscribe_url: + extra_headers["List-Unsubscribe"] = f"<{unsubscribe_url}>" + extra_headers["List-Unsubscribe-Post"] = "List-Unsubscribe=One-Click" return await EmailService.send_email( to_email=to_email, subject="You're on the list. Welcome to POUNCE.", html_content=html, text_content="Welcome to POUNCE Insights. Expect market moves, strategies, and feature drops. No spam.", + headers=extra_headers or None, ) # ============== Listing Inquiries ============== @@ -646,6 +700,32 @@ class EmailService: text_content=f"New inquiry from {name} ({email}) for {domain}. Message: {message}", ) + @staticmethod + async def send_listing_message( + to_email: str, + domain: str, + sender_name: str, + message: str, + thread_url: str, + ) -> bool: + """Send notification when a new in-product message is posted.""" + html = EmailService._render_email( + "listing_message", + domain=domain, + sender_name=sender_name, + message=message, + thread_url=thread_url, + timestamp=datetime.utcnow().strftime("%Y-%m-%d %H:%M UTC"), + ) + + subject = f"New message on {domain}" + return await EmailService.send_email( + to_email=to_email, + subject=subject, + html_content=html, + text_content=f"New message on {domain} from {sender_name}: {message}", + ) + # Global instance email_service = EmailService() diff --git a/backend/app/services/ops_alerts.py b/backend/app/services/ops_alerts.py new file mode 100644 index 0000000..728aa17 --- /dev/null +++ b/backend/app/services/ops_alerts.py @@ -0,0 +1,256 @@ +""" +Ops alerting (4B) without external monitoring stack. + +Runs in the scheduler process: +- checks backup freshness (if backups enabled) +- checks basic 24h business signals from telemetry (deal inquiries / yield clicks) +- sends an aggregated email alert with cooldown to avoid spam +""" + +from __future__ import annotations + +import logging +import os +from dataclasses import dataclass +from datetime import datetime, timedelta +from pathlib import Path + +from sqlalchemy import and_, func, select + +from app.config import get_settings +from app.database import AsyncSessionLocal +from app.models.ops_alert import OpsAlertEvent +from app.models.telemetry import TelemetryEvent +from app.services.email_service import CONTACT_EMAIL, email_service + + +logger = logging.getLogger(__name__) +settings = get_settings() + + +@dataclass(frozen=True) +class OpsFinding: + key: str + severity: str # "warn" | "page" + title: str + detail: str + + +def _parse_recipients(raw: str) -> list[str]: + emails = [e.strip() for e in (raw or "").split(",") if e.strip()] + if emails: + return emails + fallback = (CONTACT_EMAIL or os.getenv("CONTACT_EMAIL", "")).strip() + return [fallback] if fallback else [] + + +def _backup_root() -> Path: + root = Path(settings.backup_dir) + if not root.is_absolute(): + root = (Path.cwd() / root).resolve() + return root + + +def _latest_backup_age_seconds() -> float | None: + root = _backup_root() + if not root.exists() or not root.is_dir(): + return None + files = [p for p in root.glob("*") if p.is_file()] + if not files: + return None + latest = max(files, key=lambda p: p.stat().st_mtime) + now = datetime.utcnow().timestamp() + return max(0.0, now - float(latest.stat().st_mtime)) + + +async def evaluate_ops_findings() -> list[OpsFinding]: + findings: list[OpsFinding] = [] + + # Backup stale check + if settings.enable_db_backups: + age = _latest_backup_age_seconds() + if age is None: + findings.append( + OpsFinding( + key="backup_missing", + severity="page", + title="DB backups enabled but no backup file found", + detail=f"backup_dir={_backup_root()}", + ) + ) + elif age > float(settings.ops_alert_backup_stale_seconds): + findings.append( + OpsFinding( + key="backup_stale", + severity="page", + title="DB backup is stale", + detail=f"latest_backup_age_seconds={int(age)} threshold={int(settings.ops_alert_backup_stale_seconds)}", + ) + ) + + # 24h telemetry signal checks (business sanity) + end = datetime.utcnow() + start = end - timedelta(days=1) + + async with AsyncSessionLocal() as db: + inquiries_24h = ( + await db.execute( + select(func.count(TelemetryEvent.id)).where( + and_( + TelemetryEvent.created_at >= start, + TelemetryEvent.created_at <= end, + TelemetryEvent.event_name == "inquiry_created", + ) + ) + ) + ).scalar() or 0 + + yield_clicks_24h = ( + await db.execute( + select(func.count(TelemetryEvent.id)).where( + and_( + TelemetryEvent.created_at >= start, + TelemetryEvent.created_at <= end, + TelemetryEvent.event_name == "yield_click", + ) + ) + ) + ).scalar() or 0 + + if int(inquiries_24h) == 0: + findings.append( + OpsFinding( + key="deal_inquiries_zero_24h", + severity="warn", + title="No inquiries created in last 24h", + detail="Deal funnel might be broken or traffic is zero.", + ) + ) + + if int(yield_clicks_24h) == 0: + findings.append( + OpsFinding( + key="yield_clicks_zero_24h", + severity="warn", + title="No yield clicks in last 24h", + detail="Yield routing might be misconfigured or traffic is zero.", + ) + ) + + return findings + + +async def _cooldown_ok(db, key: str) -> bool: + cooldown = max(5, int(settings.ops_alert_cooldown_minutes)) + cutoff = datetime.utcnow() - timedelta(minutes=cooldown) + last_sent = ( + await db.execute( + select(OpsAlertEvent.created_at) + .where( + OpsAlertEvent.alert_key == key, + OpsAlertEvent.status == "sent", + OpsAlertEvent.created_at >= cutoff, + ) + .order_by(OpsAlertEvent.created_at.desc()) + .limit(1) + ) + ).scalar_one_or_none() + return last_sent is None + + +async def send_ops_alerts(findings: list[OpsFinding]) -> dict: + recipients = _parse_recipients(settings.ops_alert_recipients) + if not recipients: + logger.warning("Ops alerts enabled but no recipients configured (OPS_ALERT_RECIPIENTS/CONTACT_EMAIL).") + return {"sent": 0, "skipped": len(findings), "reason": "no_recipients"} + if not email_service.is_configured(): + return {"sent": 0, "skipped": len(findings), "reason": "smtp_not_configured"} + + async with AsyncSessionLocal() as db: + actionable: list[OpsFinding] = [] + skipped = 0 + for f in findings: + if await _cooldown_ok(db, f.key): + actionable.append(f) + else: + skipped += 1 + db.add( + OpsAlertEvent( + alert_key=f.key, + severity=f.severity, + title=f.title, + detail=f.detail, + status="skipped", + recipients=",".join(recipients) if recipients else None, + send_reason="cooldown", + ) + ) + + if not actionable: + await db.commit() + return {"sent": 0, "skipped": len(findings), "reason": "cooldown"} + + sev = "PAGE" if any(f.severity == "page" for f in actionable) else "WARN" + subject = f"[pounce][{sev}] Ops alerts ({len(actionable)})" + + items_html = "".join( + f""" +
+
{f.title}
+
+ {f.key}: {f.detail} +
+
+ """.strip() + for f in actionable + ) + + html = f""" +

+ Ops alerts +

+

+ Detected {len(actionable)} issue(s). (Cooldown: {int(settings.ops_alert_cooldown_minutes)} min) +

+ {items_html} +

+ Timestamp: {datetime.utcnow().isoformat()}Z +

+ """.strip() + + text = "\n".join([f"- [{f.severity.upper()}] {f.title} ({f.key}) :: {f.detail}" for f in actionable]) + sent = 0 + for to in recipients: + ok = await email_service.send_email(to_email=to, subject=subject, html_content=html, text_content=text) + sent += 1 if ok else 0 + # Persist sent events for cooldown + history + async with AsyncSessionLocal() as db: + for f in actionable: + db.add( + OpsAlertEvent( + alert_key=f.key, + severity=f.severity, + title=f.title, + detail=f.detail, + status="sent" if sent else "error", + recipients=",".join(recipients) if recipients else None, + send_reason=None if sent else "send_failed", + ) + ) + await db.commit() + + return {"sent": sent, "actionable": len(actionable), "recipients": recipients} + + +async def run_ops_alert_checks() -> dict: + """ + Entry point for scheduler/admin. + Returns findings + send status (if enabled). + """ + findings = await evaluate_ops_findings() + if not settings.ops_alerts_enabled: + return {"enabled": False, "findings": [f.__dict__ for f in findings]} + + send_status = await send_ops_alerts(findings) + return {"enabled": True, "findings": [f.__dict__ for f in findings], "send": send_status} + diff --git a/backend/app/services/referral_rewards.py b/backend/app/services/referral_rewards.py new file mode 100644 index 0000000..40260e7 --- /dev/null +++ b/backend/app/services/referral_rewards.py @@ -0,0 +1,245 @@ +""" +Referral rewards (3C.2). + +Goals: +- Deterministic, abuse-resistant rewards +- No manual state tracking per referral; we compute from authoritative DB state +- Idempotent updates (can be run via scheduler and on-demand) + +Current reward: +- For every N qualified referrals, grant +M bonus watchlist domain slots. + +Qualified referral definition: +- referred user has `users.referred_by_user_id = referrer.id` +- referred user is_active AND is_verified +- referred user has an active subscription that is NOT Scout (Trader/Tycoon), and is currently active +""" + +from __future__ import annotations + +from dataclasses import dataclass +from datetime import datetime, timedelta +from typing import Optional + +from sqlalchemy import and_, func, or_, select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.config import get_settings +from app.models.subscription import Subscription, SubscriptionStatus, SubscriptionTier +from app.models.telemetry import TelemetryEvent +from app.models.user import User + + +QUALIFIED_REFERRAL_BATCH_SIZE = 3 +BONUS_DOMAINS_PER_BATCH = 5 +settings = get_settings() + + +def compute_bonus_domains(qualified_referrals: int) -> int: + if qualified_referrals <= 0: + return 0 + batches = qualified_referrals // QUALIFIED_REFERRAL_BATCH_SIZE + return int(batches * BONUS_DOMAINS_PER_BATCH) + + +def compute_badge(qualified_referrals: int) -> Optional[str]: + if qualified_referrals >= 10: + return "elite_referrer" + if qualified_referrals >= 3: + return "verified_referrer" + return None + + +@dataclass(frozen=True) +class ReferralRewardSnapshot: + referrer_user_id: int + referred_users_total: int + qualified_referrals_total: int + cooldown_days: int + disqualified_cooldown_total: int + disqualified_missing_ip_total: int + disqualified_shared_ip_total: int + disqualified_duplicate_ip_total: int + bonus_domains: int + badge: Optional[str] + computed_at: datetime + + +async def get_referral_reward_snapshot(db: AsyncSession, referrer_user_id: int) -> ReferralRewardSnapshot: + # Total referred users (all-time) + referred_users_total = int( + ( + await db.execute( + select(func.count(User.id)).where(User.referred_by_user_id == referrer_user_id) + ) + ).scalar() + or 0 + ) + + now = datetime.utcnow() + cooldown_days = max(0, int(getattr(settings, "referral_rewards_cooldown_days", 7) or 0)) + cooldown_cutoff = now - timedelta(days=cooldown_days) if cooldown_days else None + + # Referrer IP hashes (window) for self-ref/shared-ip checks + ip_window_days = max(1, int(getattr(settings, "referral_rewards_ip_window_days", 30) or 30)) + ip_window_start = now - timedelta(days=ip_window_days) + referrer_ip_rows = ( + await db.execute( + select(TelemetryEvent.ip_hash) + .where( + and_( + TelemetryEvent.user_id == referrer_user_id, + TelemetryEvent.ip_hash.isnot(None), + TelemetryEvent.created_at >= ip_window_start, + TelemetryEvent.created_at <= now, + ) + ) + .distinct() + ) + ).all() + referrer_ip_hashes = {str(r[0]) for r in referrer_ip_rows if r and r[0]} + + # Referred user's registration IP hash (from telemetry) as subquery + reg_ip_subq = ( + select( + TelemetryEvent.user_id.label("user_id"), + func.max(TelemetryEvent.ip_hash).label("signup_ip_hash"), + ) + .where( + and_( + TelemetryEvent.event_name == "user_registered", + TelemetryEvent.user_id.isnot(None), + ) + ) + .group_by(TelemetryEvent.user_id) + .subquery() + ) + + # Candidate referred users (paid + verified + active) + rows = ( + await db.execute( + select( + User.id, + User.created_at, + Subscription.started_at, + reg_ip_subq.c.signup_ip_hash, + ) + .select_from(User) + .join(Subscription, Subscription.user_id == User.id) + .outerjoin(reg_ip_subq, reg_ip_subq.c.user_id == User.id) + .where( + and_( + User.referred_by_user_id == referrer_user_id, + User.is_active == True, + User.is_verified == True, + Subscription.tier.in_([SubscriptionTier.TRADER, SubscriptionTier.TYCOON]), + Subscription.status.in_([SubscriptionStatus.ACTIVE, SubscriptionStatus.PAST_DUE]), + or_(Subscription.expires_at.is_(None), Subscription.expires_at >= now), + ) + ) + ) + ).all() + + require_ip = bool(getattr(settings, "referral_rewards_require_ip_hash", True)) + + disqualified_cooldown_total = 0 + disqualified_missing_ip_total = 0 + disqualified_shared_ip_total = 0 + disqualified_duplicate_ip_total = 0 + + qualified_ip_hashes: set[str] = set() + qualified_referrals_total = 0 + + for _user_id, user_created_at, sub_started_at, signup_ip_hash in rows: + # Cooldown: user account age AND subscription age must pass cooldown + if cooldown_cutoff is not None: + if (user_created_at and user_created_at > cooldown_cutoff) or ( + sub_started_at and sub_started_at > cooldown_cutoff + ): + disqualified_cooldown_total += 1 + continue + + ip_hash = str(signup_ip_hash) if signup_ip_hash else None + if require_ip and not ip_hash: + disqualified_missing_ip_total += 1 + continue + + if ip_hash and referrer_ip_hashes and ip_hash in referrer_ip_hashes: + disqualified_shared_ip_total += 1 + continue + + if ip_hash and ip_hash in qualified_ip_hashes: + disqualified_duplicate_ip_total += 1 + continue + + if ip_hash: + qualified_ip_hashes.add(ip_hash) + qualified_referrals_total += 1 + + bonus_domains = compute_bonus_domains(qualified_referrals_total) + badge = compute_badge(qualified_referrals_total) + return ReferralRewardSnapshot( + referrer_user_id=referrer_user_id, + referred_users_total=referred_users_total, + qualified_referrals_total=qualified_referrals_total, + cooldown_days=cooldown_days, + disqualified_cooldown_total=disqualified_cooldown_total, + disqualified_missing_ip_total=disqualified_missing_ip_total, + disqualified_shared_ip_total=disqualified_shared_ip_total, + disqualified_duplicate_ip_total=disqualified_duplicate_ip_total, + bonus_domains=bonus_domains, + badge=badge, + computed_at=datetime.utcnow(), + ) + + +async def apply_referral_rewards_for_user(db: AsyncSession, referrer_user_id: int) -> ReferralRewardSnapshot: + """ + Apply rewards to the referrer's subscription row, based on current qualified referrals. + + This is idempotent: it sets the bonus to the computed value. + """ + snapshot = await get_referral_reward_snapshot(db, referrer_user_id) + + sub_res = await db.execute(select(Subscription).where(Subscription.user_id == referrer_user_id)) + sub = sub_res.scalar_one_or_none() + if not sub: + # Create default subscription so bonus can be stored + sub = Subscription(user_id=referrer_user_id, tier=SubscriptionTier.SCOUT, max_domains=5) + db.add(sub) + await db.flush() + + desired = int(snapshot.bonus_domains) + current = int(getattr(sub, "referral_bonus_domains", 0) or 0) + if current != desired: + sub.referral_bonus_domains = desired + await db.flush() + + return snapshot + + +async def apply_referral_rewards_all(db: AsyncSession) -> dict[str, int]: + """ + Apply rewards for all users that have an invite_code. + """ + res = await db.execute(select(User.id).where(User.invite_code.isnot(None))) + user_ids = [int(r[0]) for r in res.all()] + + updated = 0 + processed = 0 + for user_id in user_ids: + processed += 1 + snap = await get_referral_reward_snapshot(db, user_id) + sub_res = await db.execute(select(Subscription).where(Subscription.user_id == user_id)) + sub = sub_res.scalar_one_or_none() + if not sub: + sub = Subscription(user_id=user_id, tier=SubscriptionTier.SCOUT, max_domains=5) + db.add(sub) + await db.flush() + desired = int(snap.bonus_domains) + current = int(getattr(sub, "referral_bonus_domains", 0) or 0) + if current != desired: + sub.referral_bonus_domains = desired + updated += 1 + return {"processed": processed, "updated": updated} + diff --git a/backend/app/services/telemetry.py b/backend/app/services/telemetry.py new file mode 100644 index 0000000..970d7f9 --- /dev/null +++ b/backend/app/services/telemetry.py @@ -0,0 +1,79 @@ +""" +Telemetry service (4A). + +Single entry-point for writing canonical product events. +""" + +from __future__ import annotations + +import hashlib +import json +from typing import Any, Optional + +from fastapi import Request +from sqlalchemy.ext.asyncio import AsyncSession + +from app.config import get_settings +from app.models.telemetry import TelemetryEvent + + +settings = get_settings() + + +def _hash_ip(ip: str) -> str: + return hashlib.sha256(f"{ip}|{settings.secret_key}".encode()).hexdigest()[:32] + + +def _get_client_ip(request: Request) -> Optional[str]: + xff = request.headers.get("x-forwarded-for") + if xff: + ip = xff.split(",")[0].strip() + if ip: + return ip + cf_ip = request.headers.get("cf-connecting-ip") + if cf_ip: + return cf_ip.strip() + return request.client.host if request.client else None + + +async def track_event( + db: AsyncSession, + *, + event_name: str, + request: Optional[Request] = None, + user_id: Optional[int] = None, + is_authenticated: Optional[bool] = None, + source: Optional[str] = None, + domain: Optional[str] = None, + listing_id: Optional[int] = None, + inquiry_id: Optional[int] = None, + yield_domain_id: Optional[int] = None, + click_id: Optional[str] = None, + referrer: Optional[str] = None, + user_agent: Optional[str] = None, + metadata: Optional[dict[str, Any]] = None, +) -> None: + ip_hash = None + if request is not None: + ip = _get_client_ip(request) + ip_hash = _hash_ip(ip) if ip else None + user_agent = user_agent or request.headers.get("user-agent") + referrer = referrer or request.headers.get("referer") + + row = TelemetryEvent( + user_id=user_id, + event_name=event_name, + listing_id=listing_id, + inquiry_id=inquiry_id, + yield_domain_id=yield_domain_id, + click_id=click_id[:64] if click_id else None, + domain=domain, + source=source, + ip_hash=ip_hash, + user_agent=user_agent[:500] if user_agent else None, + referrer=referrer[:500] if referrer else None, + metadata_json=json.dumps(metadata or {}, ensure_ascii=False) if metadata else None, + is_authenticated=is_authenticated, + ) + db.add(row) + diff --git a/backend/app/services/yield_dns.py b/backend/app/services/yield_dns.py new file mode 100644 index 0000000..15c4b57 --- /dev/null +++ b/backend/app/services/yield_dns.py @@ -0,0 +1,169 @@ +""" +Yield DNS verification helpers. + +Production-grade DNS checks for the Yield Connect flow: +- Option A (recommended): Nameserver delegation to our nameservers +- Option B (simpler): CNAME/ALIAS to a shared target +""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import Optional + +import dns.resolver + + +@dataclass(frozen=True) +class YieldDNSCheckResult: + verified: bool + method: Optional[str] # "nameserver" | "cname" | None + actual_ns: list[str] + cname_ok: bool + error: Optional[str] + + +def _resolver() -> dns.resolver.Resolver: + r = dns.resolver.Resolver() + r.timeout = 3 + r.lifetime = 5 + return r + + +def _normalize_host(host: str) -> str: + return host.rstrip(".").lower().strip() + + +def _resolve_ns(domain: str) -> list[str]: + r = _resolver() + answers = r.resolve(domain, "NS") + # NS answers are RRset with .target + return sorted({_normalize_host(str(rr.target)) for rr in answers}) + + +def _resolve_cname(domain: str) -> list[str]: + r = _resolver() + answers = r.resolve(domain, "CNAME") + return sorted({_normalize_host(str(rr.target)) for rr in answers}) + + +def _resolve_a(host: str) -> list[str]: + r = _resolver() + answers = r.resolve(host, "A") + return sorted({str(rr) for rr in answers}) + + +def verify_yield_dns(domain: str, expected_nameservers: list[str], cname_target: str) -> YieldDNSCheckResult: + """ + Verify that a domain is connected for Yield. + + We accept: + - Nameserver delegation (NS contains all expected nameservers), OR + - CNAME/ALIAS to `cname_target` (either CNAME matches, or A records match target A records) + """ + domain = _normalize_host(domain) + expected_ns = sorted({_normalize_host(ns) for ns in expected_nameservers if ns}) + target = _normalize_host(cname_target) + + if not domain: + return YieldDNSCheckResult( + verified=False, + method=None, + actual_ns=[], + cname_ok=False, + error="Domain is empty", + ) + if not expected_ns and not target: + return YieldDNSCheckResult( + verified=False, + method=None, + actual_ns=[], + cname_ok=False, + error="Yield DNS is not configured on server", + ) + + # Option A: NS delegation + try: + actual_ns = _resolve_ns(domain) + if expected_ns and set(expected_ns).issubset(set(actual_ns)): + return YieldDNSCheckResult( + verified=True, + method="nameserver", + actual_ns=actual_ns, + cname_ok=False, + error=None, + ) + except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer): + actual_ns = [] + except Exception as e: + return YieldDNSCheckResult( + verified=False, + method=None, + actual_ns=[], + cname_ok=False, + error=str(e), + ) + + # Option B: CNAME / ALIAS + if not target: + return YieldDNSCheckResult( + verified=False, + method=None, + actual_ns=actual_ns, + cname_ok=False, + error="Yield CNAME target is not configured on server", + ) + + # 1) Direct CNAME check (works for subdomain CNAME setups) + try: + cnames = _resolve_cname(domain) + if any(c == target for c in cnames): + return YieldDNSCheckResult( + verified=True, + method="cname", + actual_ns=actual_ns, + cname_ok=True, + error=None, + ) + except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer): + pass + except Exception as e: + return YieldDNSCheckResult( + verified=False, + method=None, + actual_ns=actual_ns, + cname_ok=False, + error=str(e), + ) + + # 2) ALIAS/ANAME flattening: compare A records against target A records + try: + target_as = set(_resolve_a(target)) + domain_as = set(_resolve_a(domain)) + if target_as and domain_as and domain_as.issubset(target_as): + return YieldDNSCheckResult( + verified=True, + method="cname", + actual_ns=actual_ns, + cname_ok=True, + error=None, + ) + except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer): + pass + except Exception as e: + return YieldDNSCheckResult( + verified=False, + method=None, + actual_ns=actual_ns, + cname_ok=False, + error=str(e), + ) + + return YieldDNSCheckResult( + verified=False, + method=None, + actual_ns=actual_ns, + cname_ok=False, + error=None, + ) + diff --git a/backend/app/services/yield_payouts.py b/backend/app/services/yield_payouts.py new file mode 100644 index 0000000..c30838d --- /dev/null +++ b/backend/app/services/yield_payouts.py @@ -0,0 +1,132 @@ +""" +Yield payout generation helpers (ledger). + +Used by: +- Admin endpoints (manual ops) +- Scheduler (automatic monthly preparation) +""" + +from __future__ import annotations + +from datetime import datetime, timedelta +from decimal import Decimal + +from sqlalchemy import and_, func, select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.models.yield_domain import YieldDomain, YieldPayout, YieldTransaction + + +async def generate_payouts_for_period( + db: AsyncSession, + *, + period_start: datetime, + period_end: datetime, +) -> tuple[int, int]: + """ + Create payouts for confirmed, unpaid transactions and assign payout_id. + + Returns: (created_count, skipped_existing_count) + """ + if period_end <= period_start: + raise ValueError("period_end must be after period_start") + + aggregates = ( + await db.execute( + select( + YieldDomain.user_id.label("user_id"), + YieldTransaction.currency.label("currency"), + func.count(YieldTransaction.id).label("tx_count"), + func.coalesce(func.sum(YieldTransaction.net_amount), 0).label("amount"), + ) + .join(YieldDomain, YieldDomain.id == YieldTransaction.yield_domain_id) + .where( + and_( + YieldTransaction.status == "confirmed", + YieldTransaction.payout_id.is_(None), + YieldTransaction.created_at >= period_start, + YieldTransaction.created_at < period_end, + ) + ) + .group_by(YieldDomain.user_id, YieldTransaction.currency) + ) + ).all() + + created = 0 + skipped = 0 + + for row in aggregates: + user_id = int(row.user_id) + currency = (row.currency or "CHF").upper() + tx_count = int(row.tx_count or 0) + amount = Decimal(str(row.amount or 0)) + + if tx_count <= 0 or amount <= 0: + continue + + existing = ( + await db.execute( + select(YieldPayout).where( + and_( + YieldPayout.user_id == user_id, + YieldPayout.currency == currency, + YieldPayout.period_start == period_start, + YieldPayout.period_end == period_end, + ) + ) + ) + ).scalar_one_or_none() + if existing: + skipped += 1 + continue + + payout = YieldPayout( + user_id=user_id, + amount=amount, + currency=currency, + period_start=period_start, + period_end=period_end, + transaction_count=tx_count, + status="pending", + payment_method=None, + payment_reference=None, + ) + db.add(payout) + await db.flush() + + tx_ids = ( + await db.execute( + select(YieldTransaction.id) + .join(YieldDomain, YieldDomain.id == YieldTransaction.yield_domain_id) + .where( + and_( + YieldDomain.user_id == user_id, + YieldTransaction.currency == currency, + YieldTransaction.status == "confirmed", + YieldTransaction.payout_id.is_(None), + YieldTransaction.created_at >= period_start, + YieldTransaction.created_at < period_end, + ) + ) + ) + ).scalars().all() + + for tx_id in tx_ids: + tx = ( + await db.execute(select(YieldTransaction).where(YieldTransaction.id == tx_id)) + ).scalar_one() + tx.payout_id = payout.id + + created += 1 + + await db.commit() + return created, skipped + + +async def generate_payouts_for_previous_month(db: AsyncSession) -> tuple[int, int]: + now = datetime.utcnow() + month_start = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0) + prev_month_end = month_start + prev_month_start = (month_start - timedelta(days=1)).replace(day=1) + return await generate_payouts_for_period(db, period_start=prev_month_start, period_end=prev_month_end) + diff --git a/deploy.sh b/deploy.sh index ff3744d..258d5b4 100755 --- a/deploy.sh +++ b/deploy.sh @@ -23,6 +23,7 @@ SERVER_USER="user" SERVER_HOST="10.42.0.73" SERVER_PATH="/home/user/pounce" SERVER_PASS="user" +SSH_OPTS="-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null" # Parse flags QUICK_MODE=false @@ -85,16 +86,16 @@ RSYNC_OPTS="-avz --delete" if ! $BACKEND_ONLY; then echo " Frontend:" - sshpass -p "$SERVER_PASS" rsync $RSYNC_OPTS \ + sshpass -p "$SERVER_PASS" rsync -e "ssh $SSH_OPTS" $RSYNC_OPTS \ --exclude 'node_modules' \ --exclude '.next' \ --exclude '.git' \ - frontend/ $SERVER_USER@$SERVER_HOST:$SERVER_PATH/frontend/ 2>&1 | sed 's/^/ /' + frontend/ $SERVER_USER@$SERVER_HOST:$SERVER_PATH/frontend/ fi if ! $FRONTEND_ONLY; then echo " Backend:" - sshpass -p "$SERVER_PASS" rsync $RSYNC_OPTS \ + sshpass -p "$SERVER_PASS" rsync -e "ssh $SSH_OPTS" $RSYNC_OPTS \ --exclude '__pycache__' \ --exclude '.pytest_cache' \ --exclude 'venv' \ @@ -102,25 +103,41 @@ if ! $FRONTEND_ONLY; then --exclude '*.pyc' \ --exclude '.env' \ --exclude '*.db' \ - backend/ $SERVER_USER@$SERVER_HOST:$SERVER_PATH/backend/ 2>&1 | sed 's/^/ /' + backend/ $SERVER_USER@$SERVER_HOST:$SERVER_PATH/backend/ fi # Step 3: Reload backend (graceful, no restart) if ! $FRONTEND_ONLY; then echo -e "\n${YELLOW}[3/4] Reloading backend (graceful)...${NC}" - sshpass -p "$SERVER_PASS" ssh $SERVER_USER@$SERVER_HOST << 'BACKEND_EOF' - # Signal uvicorn to reload (if running with --reload) - # Otherwise, just check it's running - BACKEND_PID=$(pgrep -f 'uvicorn app.main:app' | head -1) + sshpass -p "$SERVER_PASS" ssh $SSH_OPTS $SERVER_USER@$SERVER_HOST << 'BACKEND_EOF' + set -e + + cd ~/pounce/backend + if [ -f "venv/bin/activate" ]; then + source venv/bin/activate + elif [ -f "../venv/bin/activate" ]; then + source ../venv/bin/activate + else + echo " ✗ venv not found (expected backend/venv or ../venv)" + exit 1 + fi + + echo " Running DB migrations..." + python -c "from app.database import init_db; import asyncio; asyncio.run(init_db())" + echo " ✓ DB migrations applied" + + # Restart backend process (production typically runs without --reload) + BACKEND_PID=$(pgrep -f 'uvicorn app.main:app' | awk 'NR==1{print; exit}') if [ -n "$BACKEND_PID" ]; then - # Touch a file to trigger auto-reload if uvicorn has --reload - touch ~/pounce/backend/app/main.py - echo " ✓ Backend reload triggered (PID: $BACKEND_PID)" + echo " Restarting backend (PID: $BACKEND_PID)..." + kill "$BACKEND_PID" 2>/dev/null || true + sleep 1 + nohup uvicorn app.main:app --host 0.0.0.0 --port 8000 > backend.log 2>&1 & + sleep 2 + echo " ✓ Backend restarted" else echo " ⚠ Backend not running, starting..." - cd ~/pounce/backend - source ../venv/bin/activate nohup uvicorn app.main:app --host 0.0.0.0 --port 8000 > backend.log 2>&1 & sleep 2 echo " ✓ Backend started" @@ -133,18 +150,17 @@ fi # Step 4: Rebuild frontend (in background to minimize downtime) if ! $BACKEND_ONLY; then echo -e "\n${YELLOW}[4/4] Rebuilding frontend...${NC}" - sshpass -p "$SERVER_PASS" ssh $SERVER_USER@$SERVER_HOST << 'FRONTEND_EOF' + sshpass -p "$SERVER_PASS" ssh $SSH_OPTS $SERVER_USER@$SERVER_HOST << 'FRONTEND_EOF' cd ~/pounce/frontend # Build new version echo " Building..." - npm run build 2>&1 | grep -E '(✓|○|λ|Error|error)' | head -10 | sed 's/^/ /' - + npm run build BUILD_EXIT=$? if [ $BUILD_EXIT -eq 0 ]; then # Gracefully restart Next.js - NEXT_PID=$(pgrep -f 'next start' | head -1) + NEXT_PID=$(pgrep -f 'next start' | awk 'NR==1{print; exit}') if [ -n "$NEXT_PID" ]; then echo " Restarting Next.js (PID: $NEXT_PID)..." @@ -157,7 +173,7 @@ if ! $BACKEND_ONLY; then sleep 2 # Verify - NEW_PID=$(pgrep -f 'next start' | head -1) + NEW_PID=$(pgrep -f 'next start' | awk 'NR==1{print; exit}') if [ -n "$NEW_PID" ]; then echo " ✓ Frontend running (PID: $NEW_PID)" else diff --git a/frontend/src/app/(public)/layout.tsx b/frontend/src/app/(public)/layout.tsx index 6b95c8a..dbdc7d0 100644 --- a/frontend/src/app/(public)/layout.tsx +++ b/frontend/src/app/(public)/layout.tsx @@ -1,9 +1,16 @@ // Public pages layout - inherits from root layout +import ReferralCapture from '@/components/ReferralCapture' + export default function PublicLayout({ children, }: { children: React.ReactNode }) { - return children + return ( + <> + + {children} + + ) } diff --git a/frontend/src/app/about/layout.tsx b/frontend/src/app/about/layout.tsx new file mode 100644 index 0000000..6d2085b --- /dev/null +++ b/frontend/src/app/about/layout.tsx @@ -0,0 +1,45 @@ +import type { Metadata } from 'next' +import Script from 'next/script' +import { SITE_URL } from '@/lib/seo' + +export const metadata: Metadata = { + title: 'About | Pounce', + description: 'What Pounce is building: domain intelligence, verified listings, and monetization workflows for serious operators.', + alternates: { canonical: `${SITE_URL}/about` }, + openGraph: { + title: 'About | Pounce', + description: 'Domain intelligence, verified inventory, and operator-grade workflows.', + url: `${SITE_URL}/about`, + type: 'website', + }, +} + +export default function AboutLayout({ children }: { children: React.ReactNode }) { + const schema = { + '@context': 'https://schema.org', + '@graph': [ + { + '@type': 'BreadcrumbList', + itemListElement: [ + { '@type': 'ListItem', position: 1, name: 'Home', item: SITE_URL }, + { '@type': 'ListItem', position: 2, name: 'About', item: `${SITE_URL}/about` }, + ], + }, + { + '@type': 'WebPage', + name: 'About Pounce', + description: 'Pounce builds domain intelligence and verified workflows for operators.', + url: `${SITE_URL}/about`, + isPartOf: { '@type': 'WebSite', name: 'Pounce', url: SITE_URL }, + }, + ], + } + + return ( + <> +