diff --git a/backend/app/api/admin.py b/backend/app/api/admin.py index adc39b9..c9b4729 100644 --- a/backend/app/api/admin.py +++ b/backend/app/api/admin.py @@ -1260,3 +1260,188 @@ async def get_scrape_status( for log in logs ] } + + +# ============== Subscription Management ============== + +class SubscriptionUpdate(BaseModel): + """Manual subscription update request.""" + tier: str # "scout", "trader", "tycoon" + + +@router.post("/users/{user_id}/sync-subscription") +async def sync_user_subscription_from_stripe( + user_id: int, + db: Database, + admin: User = Depends(require_admin), +): + """ + Sync a user's subscription status from Stripe. + + Use this if the webhook failed to update the subscription. + """ + import stripe + import os + + stripe.api_key = os.getenv("STRIPE_SECRET_KEY") + + if not stripe.api_key: + raise HTTPException(status_code=503, detail="Stripe not configured") + + # Get user + result = await db.execute(select(User).where(User.id == user_id)) + user = result.scalar_one_or_none() + + if not user: + raise HTTPException(status_code=404, detail="User not found") + + if not user.stripe_customer_id: + raise HTTPException(status_code=400, detail="User has no Stripe customer ID") + + # Get subscriptions from Stripe + try: + subscriptions = stripe.Subscription.list( + customer=user.stripe_customer_id, + status="active", + limit=1 + ) + except stripe.error.StripeError as e: + raise HTTPException(status_code=500, detail=f"Stripe error: {e}") + + if not subscriptions.data: + return { + "status": "no_active_subscription", + "message": "No active subscription found in Stripe", + "user_email": user.email, + "stripe_customer_id": user.stripe_customer_id + } + + stripe_sub = subscriptions.data[0] + # Access items via dict notation (Stripe returns StripeObject) + items_data = stripe_sub.get("items", {}).get("data", []) + price_id = items_data[0].get("price", {}).get("id") if items_data else None + + # Map price_id to tier + trader_price = os.getenv("STRIPE_PRICE_TRADER") + tycoon_price = os.getenv("STRIPE_PRICE_TYCOON") + + if price_id == trader_price: + tier = SubscriptionTier.TRADER + tier_name = "trader" + elif price_id == tycoon_price: + tier = SubscriptionTier.TYCOON + tier_name = "tycoon" + else: + return { + "status": "unknown_price", + "message": f"Unknown price ID: {price_id}", + "stripe_subscription_id": stripe_sub.id + } + + # Update subscription in database + sub_result = await db.execute( + select(Subscription).where(Subscription.user_id == user.id) + ) + subscription = sub_result.scalar_one_or_none() + + tier_config = TIER_CONFIG[tier] + + if subscription: + old_tier = subscription.tier + subscription.tier = tier + subscription.status = SubscriptionStatus.ACTIVE + subscription.stripe_subscription_id = stripe_sub.id + subscription.max_domains = tier_config["domain_limit"] + subscription.check_frequency = tier_config["check_frequency"] + else: + subscription = Subscription( + user_id=user.id, + tier=tier, + status=SubscriptionStatus.ACTIVE, + stripe_subscription_id=stripe_sub.id, + max_domains=tier_config["domain_limit"], + check_frequency=tier_config["check_frequency"], + ) + db.add(subscription) + old_tier = None + + await db.commit() + + return { + "status": "synced", + "user_email": user.email, + "stripe_customer_id": user.stripe_customer_id, + "stripe_subscription_id": stripe_sub.id, + "old_tier": old_tier.value if old_tier else None, + "new_tier": tier.value, + "tier_config": { + "domain_limit": tier_config["domain_limit"], + "check_frequency": tier_config["check_frequency"], + } + } + + +@router.post("/users/{user_id}/set-subscription") +async def set_user_subscription( + user_id: int, + update: SubscriptionUpdate, + db: Database, + admin: User = Depends(require_admin), +): + """ + Manually set a user's subscription tier. + + Use this to manually upgrade/downgrade users (e.g., for refunds or promotions). + """ + tier_map = { + "scout": SubscriptionTier.SCOUT, + "trader": SubscriptionTier.TRADER, + "tycoon": SubscriptionTier.TYCOON, + } + + if update.tier.lower() not in tier_map: + raise HTTPException(status_code=400, detail=f"Invalid tier: {update.tier}") + + tier = tier_map[update.tier.lower()] + + # Get user + result = await db.execute(select(User).where(User.id == user_id)) + user = result.scalar_one_or_none() + + if not user: + raise HTTPException(status_code=404, detail="User not found") + + # Get/create subscription + sub_result = await db.execute( + select(Subscription).where(Subscription.user_id == user.id) + ) + subscription = sub_result.scalar_one_or_none() + + tier_config = TIER_CONFIG[tier] + + if subscription: + old_tier = subscription.tier + subscription.tier = tier + subscription.status = SubscriptionStatus.ACTIVE + subscription.max_domains = tier_config["domain_limit"] + subscription.check_frequency = tier_config["check_frequency"] + else: + subscription = Subscription( + user_id=user.id, + tier=tier, + status=SubscriptionStatus.ACTIVE, + max_domains=tier_config["domain_limit"], + check_frequency=tier_config["check_frequency"], + ) + db.add(subscription) + old_tier = None + + await db.commit() + + return { + "status": "updated", + "user_email": user.email, + "user_id": user.id, + "old_tier": old_tier.value if old_tier else None, + "new_tier": tier.value, + } diff --git a/backend/app/api/auctions.py b/backend/app/api/auctions.py index 86e050d..cdcbaf2 100644 --- a/backend/app/api/auctions.py +++ b/backend/app/api/auctions.py @@ -184,15 +184,23 @@ def _format_time_remaining(end_time: datetime, now: Optional[datetime] = None) - def _get_affiliate_url(platform: str, domain: str, auction_url: str) -> str: - """Get affiliate URL for a platform - links directly to the auction page.""" + """Get affiliate URL for a platform - links directly to the auction page with affiliate tracking.""" + # Import here to avoid circular imports + from app.services.hidden_api_scrapers import build_affiliate_url + + # Try to build affiliate URL first (includes partner IDs) + affiliate_url = build_affiliate_url(platform, domain, auction_url) + if affiliate_url: + return affiliate_url + # Use the scraped auction URL directly if available if auction_url and auction_url.startswith("http"): return auction_url - # Fallback to platform-specific search/listing pages + # Fallback to platform-specific search/listing pages (without affiliate tracking) platform_urls = { "GoDaddy": f"https://auctions.godaddy.com/trpItemListing.aspx?domain={domain}", - "Sedo": f"https://sedo.com/search/?keyword={domain}", + "Sedo": f"https://sedo.com/search/details/?domain={domain}&partnerid=335830", "NameJet": f"https://www.namejet.com/Pages/Auctions/BackorderSearch.aspx?q={domain}", "DropCatch": f"https://www.dropcatch.com/domain/{domain}", "ExpiredDomains": f"https://www.expireddomains.net/domain-name-search/?q={domain}", diff --git a/backend/app/api/webhooks.py b/backend/app/api/webhooks.py index ea2bb1a..125c8cb 100644 --- a/backend/app/api/webhooks.py +++ b/backend/app/api/webhooks.py @@ -5,6 +5,8 @@ Webhook endpoints for external service integrations. - Future: Other payment providers, notification services, etc. """ import logging +import os +from datetime import datetime from fastapi import APIRouter, HTTPException, Request, Header, status from app.database import get_db @@ -15,6 +17,25 @@ logger = logging.getLogger(__name__) router = APIRouter() +@router.get("/stripe/test") +async def test_stripe_webhook(): + """ + Test endpoint to verify webhook route is accessible. + + Use this to verify the webhook URL is correct. + The actual Stripe webhook should POST to /api/v1/webhooks/stripe + """ + return { + "status": "ok", + "message": "Stripe webhook endpoint is accessible", + "endpoint": "/api/v1/webhooks/stripe", + "method": "POST", + "stripe_configured": StripeService.is_configured(), + "webhook_secret_set": bool(os.getenv("STRIPE_WEBHOOK_SECRET")), + "timestamp": datetime.utcnow().isoformat(), + } + + @router.post("/stripe") async def stripe_webhook( request: Request, @@ -29,18 +50,22 @@ async def stripe_webhook( - Invoice is created or paid The webhook must be configured in Stripe Dashboard to point to: - https://your-domain.com/api/webhooks/stripe + https://pounce.ch/api/v1/webhooks/stripe Required Header: - Stripe-Signature: Stripe's webhook signature for verification """ + logger.info("๐Ÿ”” Stripe webhook received") + if not stripe_signature: + logger.error("โŒ Missing Stripe-Signature header") raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail="Missing Stripe-Signature header", ) if not StripeService.is_configured(): + logger.error("โŒ Stripe not configured") raise HTTPException( status_code=status.HTTP_503_SERVICE_UNAVAILABLE, detail="Stripe not configured", @@ -49,6 +74,9 @@ async def stripe_webhook( # Get raw body for signature verification payload = await request.body() + logger.info(f" Payload size: {len(payload)} bytes") + logger.info(f" Signature: {stripe_signature[:50]}...") + try: async for db in get_db(): result = await StripeService.handle_webhook( @@ -56,16 +84,17 @@ async def stripe_webhook( sig_header=stripe_signature, db=db, ) + logger.info(f"โœ… Webhook processed successfully: {result}") return result except ValueError as e: - logger.error(f"Webhook validation error: {e}") + logger.error(f"โŒ Webhook validation error: {e}") raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail=str(e), ) except Exception as e: - logger.error(f"Webhook processing error: {e}") + logger.exception(f"โŒ Webhook processing error: {e}") raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Webhook processing failed", diff --git a/backend/app/models/domain.py b/backend/app/models/domain.py index a878823..bdaea92 100644 --- a/backend/app/models/domain.py +++ b/backend/app/models/domain.py @@ -2,7 +2,7 @@ from datetime import datetime from enum import Enum from sqlalchemy import String, Boolean, DateTime, ForeignKey, Text, Enum as SQLEnum -from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy.orm import Mapped, mapped_column, relationship, backref from app.database import Base @@ -116,8 +116,11 @@ class DomainHealthCache(Base): # Timestamp checked_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow) - # Relationship - domain: Mapped["Domain"] = relationship("Domain", backref="health_cache") + # Relationship - cascade delete when domain is deleted + domain: Mapped["Domain"] = relationship( + "Domain", + backref=backref("health_cache", cascade="all, delete-orphan", uselist=False) + ) def __repr__(self) -> str: return f"" diff --git a/backend/app/services/czds_client.py b/backend/app/services/czds_client.py index 462f943..5e3602d 100644 --- a/backend/app/services/czds_client.py +++ b/backend/app/services/czds_client.py @@ -38,7 +38,7 @@ settings = get_settings() CZDS_AUTH_URL = "https://account-api.icann.org/api/authenticate" CZDS_ZONES_URL = "https://czds-api.icann.org/czds/downloads/links" -CZDS_DOWNLOAD_BASE = "https://czds-download.icann.org" +CZDS_DOWNLOAD_BASE = "https://czds-download-api.icann.org" # TLDs we have approved access to APPROVED_TLDS = ["xyz", "org", "online", "info", "dev", "app"] @@ -99,8 +99,11 @@ class CZDSClient: logger.info("CZDS authentication successful") return self._token - async def get_available_zones(self) -> list[str]: - """Get list of zone files available for download.""" + async def get_available_zones(self) -> dict[str, str]: + """ + Get list of zone files available for download. + Returns dict mapping TLD to download URL. + """ token = await self._authenticate() async with httpx.AsyncClient(timeout=60) as client: @@ -111,30 +114,40 @@ class CZDSClient: if response.status_code != 200: logger.error(f"Failed to get zone list: {response.status_code}") - return [] + return {} # Response is a list of download URLs urls = response.json() - # Extract TLDs from URLs - tlds = [] + # Extract TLDs and their URLs + zones = {} for url in urls: - # URL format: https://czds-download.icann.org/czds/downloads/xyz.zone + # URL format: https://czds-download-api.icann.org/czds/downloads/xyz.zone match = re.search(r'/([a-z0-9-]+)\.zone$', url, re.IGNORECASE) if match: - tlds.append(match.group(1).lower()) + tld = match.group(1).lower() + zones[tld] = url - logger.info(f"Available zones: {tlds}") - return tlds + logger.info(f"Available zones: {list(zones.keys())}") + return zones - async def download_zone(self, tld: str) -> Optional[Path]: - """Download a zone file for a specific TLD.""" + async def download_zone(self, tld: str, download_url: Optional[str] = None) -> Optional[Path]: + """ + Download a zone file for a specific TLD. + + Args: + tld: The TLD to download + download_url: Optional explicit download URL (from get_available_zones) + """ token = await self._authenticate() - download_url = f"{CZDS_DOWNLOAD_BASE}/czds/downloads/{tld}.zone" + # Use provided URL or construct one + if not download_url: + download_url = f"{CZDS_DOWNLOAD_BASE}/czds/downloads/{tld}.zone" + output_path = self.data_dir / f"{tld}.zone.txt.gz" - logger.info(f"Downloading zone file for .{tld}...") + logger.info(f"Downloading zone file for .{tld} from {download_url}...") async with httpx.AsyncClient(timeout=600, follow_redirects=True) as client: try: @@ -294,7 +307,12 @@ class CZDSClient: return dropped_records - async def sync_zone(self, db: AsyncSession, tld: str) -> dict: + async def sync_zone( + self, + db: AsyncSession, + tld: str, + download_url: Optional[str] = None + ) -> dict: """ Sync a single zone file: 1. Download zone file @@ -302,6 +320,11 @@ class CZDSClient: 3. Compare with previous snapshot 4. Store dropped domains 5. Save new snapshot + + Args: + db: Database session + tld: TLD to sync + download_url: Optional explicit download URL """ logger.info(f"Starting sync for .{tld}") @@ -317,7 +340,7 @@ class CZDSClient: try: # Download zone file - gz_path = await self.download_zone(tld) + gz_path = await self.download_zone(tld, download_url) if not gz_path: result["status"] = "download_failed" result["error"] = "Failed to download zone file" @@ -388,13 +411,33 @@ class CZDSClient: Returns: List of sync results for each TLD. """ - tlds = tlds or APPROVED_TLDS + target_tlds = tlds or APPROVED_TLDS - logger.info(f"Starting CZDS sync for {len(tlds)} zones: {tlds}") + # Get available zones with their download URLs + available_zones = await self.get_available_zones() + + logger.info(f"Starting CZDS sync for {len(target_tlds)} zones: {target_tlds}") + logger.info(f"Available zones: {list(available_zones.keys())}") results = [] - for tld in tlds: - result = await self.sync_zone(db, tld) + for tld in target_tlds: + # Get the actual download URL for this TLD + download_url = available_zones.get(tld) + + if not download_url: + logger.warning(f"No download URL available for .{tld}") + results.append({ + "tld": tld, + "status": "not_available", + "current_count": 0, + "previous_count": 0, + "dropped_count": 0, + "new_count": 0, + "error": f"No access to .{tld} zone" + }) + continue + + result = await self.sync_zone(db, tld, download_url) results.append(result) # Small delay between zones to be nice to ICANN servers @@ -406,7 +449,7 @@ class CZDSClient: logger.info( f"CZDS sync complete: " - f"{success_count}/{len(tlds)} zones successful, " + f"{success_count}/{len(target_tlds)} zones successful, " f"{total_dropped:,} total dropped domains" ) diff --git a/backend/app/services/stripe_service.py b/backend/app/services/stripe_service.py index 53cfecb..5866967 100644 --- a/backend/app/services/stripe_service.py +++ b/backend/app/services/stripe_service.py @@ -216,7 +216,8 @@ class StripeService: Handle Stripe webhook events. Important events: - - checkout.session.completed: Payment successful + - checkout.session.completed: Payment successful (initial) + - invoice.payment_succeeded: Invoice paid (recurring & initial) - customer.subscription.updated: Subscription changed - customer.subscription.deleted: Subscription cancelled - invoice.payment_failed: Payment failed @@ -231,39 +232,68 @@ class StripeService: payload, sig_header, webhook_secret ) except ValueError: + logger.error("โŒ Invalid webhook payload") raise ValueError("Invalid payload") except stripe.error.SignatureVerificationError: + logger.error("โŒ Invalid webhook signature") raise ValueError("Invalid signature") event_type = event["type"] data = event["data"]["object"] - logger.info(f"Processing Stripe webhook: {event_type}") + logger.info(f"๐Ÿ”” Processing Stripe webhook: {event_type}") + logger.info(f" Event ID: {event.get('id')}") - if event_type == "checkout.session.completed": - await StripeService._handle_checkout_complete(data, db) - - elif event_type == "customer.subscription.updated": - await StripeService._handle_subscription_updated(data, db) - - elif event_type == "customer.subscription.deleted": - await StripeService._handle_subscription_cancelled(data, db) - - elif event_type == "invoice.payment_failed": - await StripeService._handle_payment_failed(data, db) - - return {"status": "success", "event_type": event_type} + try: + if event_type == "checkout.session.completed": + await StripeService._handle_checkout_complete(data, db) + + elif event_type == "invoice.payment_succeeded": + # This is the main event for successful payments! + await StripeService._handle_invoice_paid(data, db) + + elif event_type == "customer.subscription.updated": + await StripeService._handle_subscription_updated(data, db) + + elif event_type == "customer.subscription.deleted": + await StripeService._handle_subscription_cancelled(data, db) + + elif event_type == "invoice.payment_failed": + await StripeService._handle_payment_failed(data, db) + + else: + logger.info(f" Unhandled event type: {event_type} (acknowledged)") + + return {"status": "success", "event_type": event_type} + + except Exception as e: + logger.exception(f"โŒ Error processing webhook {event_type}: {e}") + # Still return success to prevent Stripe from retrying + # The error is logged for investigation + return {"status": "error_logged", "event_type": event_type, "error": str(e)} @staticmethod async def _handle_checkout_complete(data: Dict, db: AsyncSession): - """Handle successful checkout - activate subscription.""" + """ + Handle successful checkout - activate subscription. + + IMPORTANT: This must be idempotent! Stripe may send webhooks multiple times. + """ + session_id = data.get("id") user_id = data.get("metadata", {}).get("user_id") plan = data.get("metadata", {}).get("plan") # "trader" or "tycoon" customer_id = data.get("customer") subscription_id = data.get("subscription") + logger.info(f"๐Ÿ”” Checkout complete webhook received:") + logger.info(f" Session: {session_id}") + logger.info(f" User ID: {user_id}") + logger.info(f" Plan: {plan}") + logger.info(f" Customer: {customer_id}") + logger.info(f" Subscription: {subscription_id}") + if not user_id or not plan: - logger.error("Missing user_id or plan in checkout metadata") + logger.error(f"โŒ Missing user_id or plan in checkout metadata: {data.get('metadata')}") return # Convert plan string to SubscriptionTier enum @@ -281,9 +311,23 @@ class StripeService: user = result.scalar_one_or_none() if not user: - logger.error(f"User {user_id} not found for checkout") + logger.error(f"โŒ User {user_id} not found for checkout") return + logger.info(f" User email: {user.email}") + + # IDEMPOTENCY CHECK: Check if this subscription_id was already processed + if subscription_id: + existing_sub = await db.execute( + select(Subscription).where( + Subscription.stripe_subscription_id == subscription_id + ) + ) + existing = existing_sub.scalar_one_or_none() + if existing: + logger.info(f"โš ๏ธ Subscription {subscription_id} already processed (idempotent)") + return + # Update user's Stripe customer ID user.stripe_customer_id = customer_id @@ -296,26 +340,158 @@ class StripeService: tier_info = TIER_FEATURES.get(plan.lower(), TIER_FEATURES["scout"]) if subscription: - subscription.tier = tier_enum # Use enum, not string + # Only upgrade if actually changing + old_tier = subscription.tier + subscription.tier = tier_enum subscription.status = SubscriptionStatus.ACTIVE subscription.stripe_subscription_id = subscription_id subscription.max_domains = tier_info["max_domains"] subscription.check_frequency = tier_info["check_frequency"] subscription.updated_at = datetime.utcnow() + logger.info(f"โœ… Updated subscription: {old_tier} โ†’ {tier_enum}") else: subscription = Subscription( user_id=user.id, - tier=tier_enum, # Use enum, not string + tier=tier_enum, status=SubscriptionStatus.ACTIVE, stripe_subscription_id=subscription_id, max_domains=tier_info["max_domains"], check_frequency=tier_info["check_frequency"], ) db.add(subscription) + logger.info(f"โœ… Created new subscription: {tier_enum}") - await db.commit() - logger.info(f"Activated {plan} subscription for user {user_id}") + try: + await db.commit() + logger.info(f"โœ… Activated {plan} subscription for user {user_id} ({user.email})") + except Exception as e: + logger.exception(f"โŒ Failed to commit subscription: {e}") + await db.rollback() + raise + @staticmethod + async def _handle_invoice_paid(data: Dict, db: AsyncSession): + """ + Handle successful invoice payment. + + This is the MAIN event for activating subscriptions! + Called for both initial payments and recurring payments. + + Invoice structure has metadata in: + - parent.subscription_details.metadata (for subscription invoices) + - lines.data[0].metadata (line item level) + """ + invoice_id = data.get("id") + customer_id = data.get("customer") + customer_email = data.get("customer_email") + billing_reason = data.get("billing_reason") # "subscription_create", "subscription_cycle", etc. + + logger.info(f"๐Ÿงพ Invoice paid webhook received:") + logger.info(f" Invoice: {invoice_id}") + logger.info(f" Customer: {customer_id}") + logger.info(f" Email: {customer_email}") + logger.info(f" Billing reason: {billing_reason}") + + # Extract metadata from subscription details + parent = data.get("parent", {}) + subscription_details = parent.get("subscription_details", {}) + metadata = subscription_details.get("metadata", {}) + subscription_id = subscription_details.get("subscription") + + user_id = metadata.get("user_id") + plan = metadata.get("plan") + + # Fallback: try to get from line items + if not user_id or not plan: + lines = data.get("lines", {}).get("data", []) + if lines: + line_metadata = lines[0].get("metadata", {}) + user_id = user_id or line_metadata.get("user_id") + plan = plan or line_metadata.get("plan") + + logger.info(f" User ID: {user_id}") + logger.info(f" Plan: {plan}") + logger.info(f" Subscription ID: {subscription_id}") + + if not user_id or not plan: + logger.warning(f"โš ๏ธ No user_id or plan in invoice metadata, skipping") + logger.warning(f" Full parent: {parent}") + return + + # Convert plan string to SubscriptionTier enum + tier_map = { + "trader": SubscriptionTier.TRADER, + "tycoon": SubscriptionTier.TYCOON, + "scout": SubscriptionTier.SCOUT, + } + tier_enum = tier_map.get(plan.lower(), SubscriptionTier.SCOUT) + + # Get user + result = await db.execute( + select(User).where(User.id == int(user_id)) + ) + user = result.scalar_one_or_none() + + if not user: + logger.error(f"โŒ User {user_id} not found for invoice") + return + + logger.info(f" Found user: {user.email}") + + # Update user's Stripe customer ID if not set + if not user.stripe_customer_id: + user.stripe_customer_id = customer_id + + # IDEMPOTENCY CHECK: Check if this subscription_id was already processed with this tier + if subscription_id: + existing_sub = await db.execute( + select(Subscription).where( + Subscription.stripe_subscription_id == subscription_id, + Subscription.tier == tier_enum + ) + ) + existing = existing_sub.scalar_one_or_none() + if existing: + logger.info(f"โš ๏ธ Subscription {subscription_id} already active as {tier_enum} (idempotent)") + return + + # Create or update subscription + sub_result = await db.execute( + select(Subscription).where(Subscription.user_id == user.id) + ) + subscription = sub_result.scalar_one_or_none() + + tier_info = TIER_FEATURES.get(plan.lower(), TIER_FEATURES["scout"]) + + if subscription: + old_tier = subscription.tier + subscription.tier = tier_enum + subscription.status = SubscriptionStatus.ACTIVE + subscription.stripe_subscription_id = subscription_id + subscription.max_domains = tier_info["max_domains"] + subscription.check_frequency = tier_info["check_frequency"] + subscription.updated_at = datetime.utcnow() + logger.info(f"โœ… Updated subscription: {old_tier} โ†’ {tier_enum}") + else: + subscription = Subscription( + user_id=user.id, + tier=tier_enum, + status=SubscriptionStatus.ACTIVE, + stripe_subscription_id=subscription_id, + max_domains=tier_info["max_domains"], + check_frequency=tier_info["check_frequency"], + ) + db.add(subscription) + logger.info(f"โœ… Created new subscription: {tier_enum}") + + try: + await db.commit() + logger.info(f"โœ… Activated {plan} subscription for user {user_id} ({user.email}) via invoice") + except Exception as e: + logger.exception(f"โŒ Failed to commit subscription: {e}") + await db.rollback() + raise + @staticmethod async def _handle_subscription_updated(data: Dict, db: AsyncSession): """Handle subscription update (plan change, renewal, etc.).""" diff --git a/backend/env.example b/backend/env.example index 4a800cc..c5e5017 100644 --- a/backend/env.example +++ b/backend/env.example @@ -57,6 +57,13 @@ MOZ_SECRET_KEY= # Sentry Error Tracking SENTRY_DSN= +# ============== ZONE FILE SERVICES ============== +# ICANN CZDS (Centralized Zone Data Service) +# Register at: https://czds.icann.org/ +CZDS_USERNAME= +CZDS_PASSWORD= +CZDS_DATA_DIR=/tmp/pounce_czds + # ============== PRODUCTION SETTINGS ============== # Uncomment for production deployment: # DATABASE_URL=postgresql+asyncpg://user:pass@localhost/pounce diff --git a/backend/requirements.txt b/backend/requirements.txt index abd8a8e..f29821f 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -54,3 +54,6 @@ redis>=5.0.0 # Production Database (optional) # asyncpg>=0.30.0 # Already included above + +# ICANN CZDS Zone File Access +pyCZDS>=1.7.0 diff --git a/backend/scripts/sync_czds_safe.py b/backend/scripts/sync_czds_safe.py new file mode 100644 index 0000000..5090592 --- /dev/null +++ b/backend/scripts/sync_czds_safe.py @@ -0,0 +1,365 @@ +#!/usr/bin/env python3 +""" +Safe CZDS Zone File Sync Script +================================ +Uses the official pyCZDS library to safely download zone files. + +IMPORTANT Rate Limits: +- Max 1 download per TLD per 24 hours +- Max 3 direct downloads per TLD per 24h (or you get blocked!) +- Zone files are updated daily between 00:00-06:00 UTC + +Run this script ONCE daily, after 06:00 UTC. +""" + +import asyncio +import gzip +import hashlib +import logging +import os +import sys +import time +from datetime import datetime, timedelta +from pathlib import Path + +# Add parent directory to path for imports +sys.path.insert(0, str(Path(__file__).parent.parent)) + +from pyczds.client import CZDSClient as PyCZDSClient + +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(levelname)s - %(message)s' +) +logger = logging.getLogger(__name__) + +# Configuration +APPROVED_TLDS = ["xyz", "org", "online", "info", "dev", "app"] +DATA_DIR = Path(os.getenv("CZDS_DATA_DIR", "/home/user/pounce_czds")) +DELAY_BETWEEN_DOWNLOADS = 5 # seconds - be nice to ICANN servers + + +def get_credentials(): + """Get CZDS credentials from environment or .env file.""" + # Try environment first + username = os.getenv("CZDS_USERNAME") + password = os.getenv("CZDS_PASSWORD") + + if not username or not password: + # Try loading from .env + env_file = Path(__file__).parent.parent / ".env" + if env_file.exists(): + for line in env_file.read_text().splitlines(): + if line.startswith("CZDS_USERNAME="): + username = line.split("=", 1)[1].strip() + elif line.startswith("CZDS_PASSWORD="): + password = line.split("=", 1)[1].strip() + + return username, password + + +def should_download_today(tld: str) -> bool: + """Check if we already downloaded this TLD today.""" + marker_file = DATA_DIR / f".{tld}_last_download" + + if marker_file.exists(): + last_download = datetime.fromisoformat(marker_file.read_text().strip()) + # Only download once per 24h + if datetime.utcnow() - last_download < timedelta(hours=23): + logger.info(f"โญ๏ธ .{tld}: Already downloaded within 24h, skipping") + return False + + return True + + +def mark_downloaded(tld: str): + """Mark TLD as downloaded today.""" + marker_file = DATA_DIR / f".{tld}_last_download" + marker_file.write_text(datetime.utcnow().isoformat()) + + +def parse_zone_file(zone_path: Path, tld: str) -> set[str]: + """ + Parse zone file and extract unique domain names. + + Returns set of domain names (without TLD suffix). + """ + logger.info(f"๐Ÿ“– Parsing zone file for .{tld}...") + + domains = set() + line_count = 0 + + # Handle gzipped files + if str(zone_path).endswith('.gz'): + open_func = lambda p: gzip.open(p, 'rt', encoding='utf-8', errors='ignore') + else: + open_func = lambda p: open(p, 'r', encoding='utf-8', errors='ignore') + + with open_func(zone_path) as f: + for line in f: + line_count += 1 + + # Skip comments and empty lines + if line.startswith(';') or not line.strip(): + continue + + # Parse zone file line + parts = line.split() + if len(parts) >= 4: + name = parts[0].rstrip('.') + + # Must end with our TLD + if name.lower().endswith(f'.{tld}'): + # Extract just the domain name part + domain_name = name[:-(len(tld) + 1)] + + # Skip the TLD itself and subdomains + if domain_name and '.' not in domain_name: + domains.add(domain_name.lower()) + + logger.info(f" Found {len(domains):,} unique domains from {line_count:,} lines") + return domains + + +def compute_checksum(domains: set[str]) -> str: + """Compute SHA256 checksum of sorted domain list.""" + sorted_domains = "\n".join(sorted(domains)) + return hashlib.sha256(sorted_domains.encode()).hexdigest() + + +def load_previous_domains(tld: str) -> set[str] | None: + """Load previous day's domain set from cache file.""" + cache_file = DATA_DIR / f"{tld}_domains.txt" + + if cache_file.exists(): + try: + content = cache_file.read_text() + return set(line.strip() for line in content.splitlines() if line.strip()) + except Exception as e: + logger.warning(f"Failed to load cache for .{tld}: {e}") + + return None + + +def save_domains(tld: str, domains: set[str]): + """Save current domains to cache file.""" + cache_file = DATA_DIR / f"{tld}_domains.txt" + cache_file.write_text("\n".join(sorted(domains))) + logger.info(f"๐Ÿ’พ Saved {len(domains):,} domains for .{tld}") + + +def find_drops(previous: set[str], current: set[str]) -> set[str]: + """Find dropped domains (present yesterday, missing today).""" + return previous - current + + +async def save_drops_to_db(tld: str, dropped: set[str]): + """Save dropped domains to database.""" + if not dropped: + return 0 + + try: + from app.database import AsyncSessionLocal, init_db + from app.models.zone_file import DroppedDomain + from datetime import datetime + + await init_db() + + today = datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0) + + async with AsyncSessionLocal() as db: + # Batch insert + records = [] + for name in dropped: + record = DroppedDomain( + domain=f"{name}.{tld}", + tld=tld, + dropped_date=today, + length=len(name), + is_numeric=name.isdigit(), + has_hyphen='-' in name + ) + records.append(record) + + db.add_all(records) + await db.commit() + + logger.info(f"๐Ÿ’พ Saved {len(dropped):,} dropped domains to database") + return len(dropped) + + except Exception as e: + logger.error(f"Failed to save drops to DB: {e}") + return 0 + + +def sync_single_tld(client: PyCZDSClient, tld: str, zone_urls: list[str]) -> dict: + """ + Sync a single TLD zone file. + + Returns dict with sync results. + """ + result = { + "tld": tld, + "status": "pending", + "domain_count": 0, + "dropped_count": 0, + "new_count": 0, + "error": None + } + + # Check if we should download + if not should_download_today(tld): + result["status"] = "skipped" + return result + + # Find URL for this TLD + tld_url = next((url for url in zone_urls if f"/{tld}.zone" in url.lower()), None) + + if not tld_url: + logger.warning(f"โŒ No access to .{tld} zone file") + result["status"] = "no_access" + result["error"] = "No access to this TLD" + return result + + try: + logger.info(f"โฌ‡๏ธ Downloading .{tld} zone file...") + + # Download zone file using pyCZDS + # This returns the path to the downloaded file + downloaded_file = client.get_zonefile(tld_url, download_dir=str(DATA_DIR)) + + if not downloaded_file or not Path(downloaded_file).exists(): + result["status"] = "download_failed" + result["error"] = "Download returned no file" + return result + + downloaded_path = Path(downloaded_file) + file_size_mb = downloaded_path.stat().st_size / (1024 * 1024) + logger.info(f" Downloaded: {file_size_mb:.1f} MB") + + # Parse zone file + current_domains = parse_zone_file(downloaded_path, tld) + result["domain_count"] = len(current_domains) + + # Load previous domains + previous_domains = load_previous_domains(tld) + + if previous_domains: + # Find drops + dropped = find_drops(previous_domains, current_domains) + result["dropped_count"] = len(dropped) + result["new_count"] = len(current_domains - previous_domains) + + logger.info(f" ๐Ÿ“‰ Dropped: {len(dropped):,}") + logger.info(f" ๐Ÿ“ˆ New: {result['new_count']:,}") + + # Save drops to database (async) + if dropped: + asyncio.run(save_drops_to_db(tld, dropped)) + else: + logger.info(f" โ„น๏ธ First sync, no comparison available") + + # Save current domains for next comparison + save_domains(tld, current_domains) + + # Mark as downloaded + mark_downloaded(tld) + + # Clean up downloaded zone file (can be huge) + downloaded_path.unlink() + logger.info(f" ๐Ÿ—‘๏ธ Cleaned up zone file") + + result["status"] = "success" + logger.info(f"โœ… .{tld} sync complete!") + + except Exception as e: + logger.exception(f"โŒ Error syncing .{tld}: {e}") + result["status"] = "error" + result["error"] = str(e) + + return result + + +def main(): + """Main sync function.""" + print("=" * 60) + print("๐ŸŒ CZDS Zone File Sync (Safe Mode)") + print("=" * 60) + print(f"๐Ÿ“… Time: {datetime.utcnow().isoformat()} UTC") + print(f"๐Ÿ“‚ Data dir: {DATA_DIR}") + print(f"๐Ÿ“‹ TLDs: {', '.join(APPROVED_TLDS)}") + print("-" * 60) + + # Ensure data directory exists + DATA_DIR.mkdir(parents=True, exist_ok=True) + + # Get credentials + username, password = get_credentials() + + if not username or not password: + print("โŒ CZDS credentials not configured!") + print(" Set CZDS_USERNAME and CZDS_PASSWORD in .env") + sys.exit(1) + + print(f"๐Ÿ‘ค User: {username}") + + # Initialize pyCZDS client + try: + client = PyCZDSClient(username, password) + print("โœ… Authenticated with ICANN CZDS") + except Exception as e: + print(f"โŒ Authentication failed: {e}") + sys.exit(1) + + # Get available zone files + try: + zone_urls = client.get_zonefiles_list() + available_tlds = [url.split('/')[-1].replace('.zone', '') for url in zone_urls] + print(f"โœ… Available zones: {', '.join(available_tlds)}") + except Exception as e: + print(f"โŒ Failed to get zone list: {e}") + sys.exit(1) + + print("-" * 60) + + # Sync each TLD + results = [] + for i, tld in enumerate(APPROVED_TLDS): + print(f"\n[{i+1}/{len(APPROVED_TLDS)}] Processing .{tld}...") + + result = sync_single_tld(client, tld, zone_urls) + results.append(result) + + # Delay between downloads (be nice to ICANN) + if i < len(APPROVED_TLDS) - 1 and result["status"] == "success": + print(f" โณ Waiting {DELAY_BETWEEN_DOWNLOADS}s before next download...") + time.sleep(DELAY_BETWEEN_DOWNLOADS) + + # Summary + print("\n" + "=" * 60) + print("๐Ÿ“Š SUMMARY") + print("=" * 60) + + success_count = sum(1 for r in results if r["status"] == "success") + total_drops = sum(r["dropped_count"] for r in results) + + for r in results: + icon = "โœ…" if r["status"] == "success" else "โญ๏ธ" if r["status"] == "skipped" else "โŒ" + print(f"{icon} .{r['tld']}: {r['status']} - {r['domain_count']:,} domains, {r['dropped_count']:,} dropped") + if r["error"]: + print(f" โš ๏ธ Error: {r['error']}") + + print("-" * 60) + print(f"โœ… Successful: {success_count}/{len(APPROVED_TLDS)}") + print(f"๐Ÿ“‰ Total drops: {total_drops:,}") + print("=" * 60) + + +if __name__ == "__main__": + # Parse arguments + if len(sys.argv) > 1: + # Only sync specific TLDs + APPROVED_TLDS = [tld.lower() for tld in sys.argv[1:]] + print(f"๐ŸŽฏ Syncing specific TLDs: {APPROVED_TLDS}") + + main() diff --git a/deploy.sh b/deploy.sh index 43ab9c4..af0af81 100755 --- a/deploy.sh +++ b/deploy.sh @@ -129,6 +129,18 @@ if ! $FRONTEND_ONLY; then exit 1 fi + # Update CZDS credentials if not set + if ! grep -q "CZDS_USERNAME=" .env 2>/dev/null; then + echo "" >> .env + echo "# ICANN CZDS Zone File Service" >> .env + echo "CZDS_USERNAME=guggeryves@hotmail.com" >> .env + echo "CZDS_PASSWORD=Achiarorocco1278!" >> .env + echo "CZDS_DATA_DIR=/home/user/pounce_czds" >> .env + echo " โœ“ CZDS credentials added to .env" + else + echo " โœ“ CZDS credentials already configured" + fi + echo " Running DB migrations..." python -c "from app.database import init_db; import asyncio; asyncio.run(init_db())" echo " โœ“ DB migrations applied" @@ -185,7 +197,11 @@ if ! $BACKEND_ONLY; then # Next.js standalone output requires public + static inside standalone folder mkdir -p .next/standalone/.next ln -sfn ../../static .next/standalone/.next/static - ln -sfn ../../public .next/standalone/public + + # Copy public folder (symlinks don't work reliably) + rm -rf .next/standalone/public + cp -r public .next/standalone/public + echo " โœ“ Public files copied to standalone" # Gracefully restart Next.js NEXT_PID=$(pgrep -af 'node \\.next/standalone/server\\.js|next start|next-server|next-serv' | awk 'NR==1{print $1; exit}') diff --git a/frontend/src/app/terminal/layout.tsx b/frontend/src/app/terminal/layout.tsx index 3236437..34d867e 100644 --- a/frontend/src/app/terminal/layout.tsx +++ b/frontend/src/app/terminal/layout.tsx @@ -4,6 +4,7 @@ import { useEffect, useState } from 'react' import { useRouter, usePathname } from 'next/navigation' import { useStore } from '@/lib/store' import { AnalyzePanelProvider } from '@/components/analyze/AnalyzePanelProvider' +import { BetaBanner } from '@/components/BetaBanner' import { Loader2 } from 'lucide-react' export default function TerminalLayout({ @@ -59,6 +60,11 @@ export default function TerminalLayout({ ) } - return {children} + return ( + + + {children} + + ) } diff --git a/frontend/src/components/BetaBanner.tsx b/frontend/src/components/BetaBanner.tsx new file mode 100644 index 0000000..29edaa9 --- /dev/null +++ b/frontend/src/components/BetaBanner.tsx @@ -0,0 +1,49 @@ +'use client' + +import { useState, useEffect } from 'react' +import { X, Zap } from 'lucide-react' + +const BANNER_DISMISSED_KEY = 'pounce_beta_banner_dismissed' + +export function BetaBanner() { + const [isDismissed, setIsDismissed] = useState(true) // Start hidden to avoid flash + + useEffect(() => { + // Check localStorage after mount + const dismissed = localStorage.getItem(BANNER_DISMISSED_KEY) + setIsDismissed(dismissed === 'true') + }, []) + + const handleDismiss = () => { + setIsDismissed(true) + localStorage.setItem(BANNER_DISMISSED_KEY, 'true') + } + + if (isDismissed) return null + + return ( +
+ +

+ Pounce is in Public Beta. + {' '}I ship code daily based on your feedback. If you spot a glitch, hit me up on{' '} + + Discord + + . +

+ +
+ ) +} diff --git a/frontend/src/components/Footer.tsx b/frontend/src/components/Footer.tsx index 9385800..7f39543 100644 --- a/frontend/src/components/Footer.tsx +++ b/frontend/src/components/Footer.tsx @@ -3,6 +3,13 @@ import Link from 'next/link' import Image from 'next/image' import { Twitter, Mail, Linkedin, ArrowRight } from 'lucide-react' + +// Discord icon (not in lucide-react) +const DiscordIcon = ({ className }: { className?: string }) => ( + + + +) import { useStore } from '@/lib/store' import { api } from '@/lib/api' import { useCallback, useMemo, useState } from 'react' @@ -94,16 +101,25 @@ export function Footer() {
+ + +