fix: banner position and Sedo affiliate links
Some checks failed
CI / Frontend Lint & Type Check (push) Has been cancelled
CI / Frontend Build (push) Has been cancelled
CI / Backend Lint (push) Has been cancelled
CI / Backend Tests (push) Has been cancelled
CI / Docker Build (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
Deploy / Build & Push Images (push) Has been cancelled
Deploy / Deploy to Server (push) Has been cancelled
Deploy / Notify (push) Has been cancelled

This commit is contained in:
2025-12-16 09:02:00 +01:00
parent 7a9d7703ca
commit 5b99145fb2
13 changed files with 963 additions and 57 deletions

View File

@ -1260,3 +1260,188 @@ async def get_scrape_status(
for log in logs for log in logs
] ]
} }
# ============== Subscription Management ==============
class SubscriptionUpdate(BaseModel):
"""Manual subscription update request."""
tier: str # "scout", "trader", "tycoon"
@router.post("/users/{user_id}/sync-subscription")
async def sync_user_subscription_from_stripe(
user_id: int,
db: Database,
admin: User = Depends(require_admin),
):
"""
Sync a user's subscription status from Stripe.
Use this if the webhook failed to update the subscription.
"""
import stripe
import os
stripe.api_key = os.getenv("STRIPE_SECRET_KEY")
if not stripe.api_key:
raise HTTPException(status_code=503, detail="Stripe not configured")
# Get user
result = await db.execute(select(User).where(User.id == user_id))
user = result.scalar_one_or_none()
if not user:
raise HTTPException(status_code=404, detail="User not found")
if not user.stripe_customer_id:
raise HTTPException(status_code=400, detail="User has no Stripe customer ID")
# Get subscriptions from Stripe
try:
subscriptions = stripe.Subscription.list(
customer=user.stripe_customer_id,
status="active",
limit=1
)
except stripe.error.StripeError as e:
raise HTTPException(status_code=500, detail=f"Stripe error: {e}")
if not subscriptions.data:
return {
"status": "no_active_subscription",
"message": "No active subscription found in Stripe",
"user_email": user.email,
"stripe_customer_id": user.stripe_customer_id
}
stripe_sub = subscriptions.data[0]
# Access items via dict notation (Stripe returns StripeObject)
items_data = stripe_sub.get("items", {}).get("data", [])
price_id = items_data[0].get("price", {}).get("id") if items_data else None
# Map price_id to tier
trader_price = os.getenv("STRIPE_PRICE_TRADER")
tycoon_price = os.getenv("STRIPE_PRICE_TYCOON")
if price_id == trader_price:
tier = SubscriptionTier.TRADER
tier_name = "trader"
elif price_id == tycoon_price:
tier = SubscriptionTier.TYCOON
tier_name = "tycoon"
else:
return {
"status": "unknown_price",
"message": f"Unknown price ID: {price_id}",
"stripe_subscription_id": stripe_sub.id
}
# Update subscription in database
sub_result = await db.execute(
select(Subscription).where(Subscription.user_id == user.id)
)
subscription = sub_result.scalar_one_or_none()
tier_config = TIER_CONFIG[tier]
if subscription:
old_tier = subscription.tier
subscription.tier = tier
subscription.status = SubscriptionStatus.ACTIVE
subscription.stripe_subscription_id = stripe_sub.id
subscription.max_domains = tier_config["domain_limit"]
subscription.check_frequency = tier_config["check_frequency"]
else:
subscription = Subscription(
user_id=user.id,
tier=tier,
status=SubscriptionStatus.ACTIVE,
stripe_subscription_id=stripe_sub.id,
max_domains=tier_config["domain_limit"],
check_frequency=tier_config["check_frequency"],
)
db.add(subscription)
old_tier = None
await db.commit()
return {
"status": "synced",
"user_email": user.email,
"stripe_customer_id": user.stripe_customer_id,
"stripe_subscription_id": stripe_sub.id,
"old_tier": old_tier.value if old_tier else None,
"new_tier": tier.value,
"tier_config": {
"domain_limit": tier_config["domain_limit"],
"check_frequency": tier_config["check_frequency"],
}
}
@router.post("/users/{user_id}/set-subscription")
async def set_user_subscription(
user_id: int,
update: SubscriptionUpdate,
db: Database,
admin: User = Depends(require_admin),
):
"""
Manually set a user's subscription tier.
Use this to manually upgrade/downgrade users (e.g., for refunds or promotions).
"""
tier_map = {
"scout": SubscriptionTier.SCOUT,
"trader": SubscriptionTier.TRADER,
"tycoon": SubscriptionTier.TYCOON,
}
if update.tier.lower() not in tier_map:
raise HTTPException(status_code=400, detail=f"Invalid tier: {update.tier}")
tier = tier_map[update.tier.lower()]
# Get user
result = await db.execute(select(User).where(User.id == user_id))
user = result.scalar_one_or_none()
if not user:
raise HTTPException(status_code=404, detail="User not found")
# Get/create subscription
sub_result = await db.execute(
select(Subscription).where(Subscription.user_id == user.id)
)
subscription = sub_result.scalar_one_or_none()
tier_config = TIER_CONFIG[tier]
if subscription:
old_tier = subscription.tier
subscription.tier = tier
subscription.status = SubscriptionStatus.ACTIVE
subscription.max_domains = tier_config["domain_limit"]
subscription.check_frequency = tier_config["check_frequency"]
else:
subscription = Subscription(
user_id=user.id,
tier=tier,
status=SubscriptionStatus.ACTIVE,
max_domains=tier_config["domain_limit"],
check_frequency=tier_config["check_frequency"],
)
db.add(subscription)
old_tier = None
await db.commit()
return {
"status": "updated",
"user_email": user.email,
"user_id": user.id,
"old_tier": old_tier.value if old_tier else None,
"new_tier": tier.value,
}

View File

@ -184,15 +184,23 @@ def _format_time_remaining(end_time: datetime, now: Optional[datetime] = None) -
def _get_affiliate_url(platform: str, domain: str, auction_url: str) -> str: def _get_affiliate_url(platform: str, domain: str, auction_url: str) -> str:
"""Get affiliate URL for a platform - links directly to the auction page.""" """Get affiliate URL for a platform - links directly to the auction page with affiliate tracking."""
# Import here to avoid circular imports
from app.services.hidden_api_scrapers import build_affiliate_url
# Try to build affiliate URL first (includes partner IDs)
affiliate_url = build_affiliate_url(platform, domain, auction_url)
if affiliate_url:
return affiliate_url
# Use the scraped auction URL directly if available # Use the scraped auction URL directly if available
if auction_url and auction_url.startswith("http"): if auction_url and auction_url.startswith("http"):
return auction_url return auction_url
# Fallback to platform-specific search/listing pages # Fallback to platform-specific search/listing pages (without affiliate tracking)
platform_urls = { platform_urls = {
"GoDaddy": f"https://auctions.godaddy.com/trpItemListing.aspx?domain={domain}", "GoDaddy": f"https://auctions.godaddy.com/trpItemListing.aspx?domain={domain}",
"Sedo": f"https://sedo.com/search/?keyword={domain}", "Sedo": f"https://sedo.com/search/details/?domain={domain}&partnerid=335830",
"NameJet": f"https://www.namejet.com/Pages/Auctions/BackorderSearch.aspx?q={domain}", "NameJet": f"https://www.namejet.com/Pages/Auctions/BackorderSearch.aspx?q={domain}",
"DropCatch": f"https://www.dropcatch.com/domain/{domain}", "DropCatch": f"https://www.dropcatch.com/domain/{domain}",
"ExpiredDomains": f"https://www.expireddomains.net/domain-name-search/?q={domain}", "ExpiredDomains": f"https://www.expireddomains.net/domain-name-search/?q={domain}",

View File

@ -5,6 +5,8 @@ Webhook endpoints for external service integrations.
- Future: Other payment providers, notification services, etc. - Future: Other payment providers, notification services, etc.
""" """
import logging import logging
import os
from datetime import datetime
from fastapi import APIRouter, HTTPException, Request, Header, status from fastapi import APIRouter, HTTPException, Request, Header, status
from app.database import get_db from app.database import get_db
@ -15,6 +17,25 @@ logger = logging.getLogger(__name__)
router = APIRouter() router = APIRouter()
@router.get("/stripe/test")
async def test_stripe_webhook():
"""
Test endpoint to verify webhook route is accessible.
Use this to verify the webhook URL is correct.
The actual Stripe webhook should POST to /api/v1/webhooks/stripe
"""
return {
"status": "ok",
"message": "Stripe webhook endpoint is accessible",
"endpoint": "/api/v1/webhooks/stripe",
"method": "POST",
"stripe_configured": StripeService.is_configured(),
"webhook_secret_set": bool(os.getenv("STRIPE_WEBHOOK_SECRET")),
"timestamp": datetime.utcnow().isoformat(),
}
@router.post("/stripe") @router.post("/stripe")
async def stripe_webhook( async def stripe_webhook(
request: Request, request: Request,
@ -29,18 +50,22 @@ async def stripe_webhook(
- Invoice is created or paid - Invoice is created or paid
The webhook must be configured in Stripe Dashboard to point to: The webhook must be configured in Stripe Dashboard to point to:
https://your-domain.com/api/webhooks/stripe https://pounce.ch/api/v1/webhooks/stripe
Required Header: Required Header:
- Stripe-Signature: Stripe's webhook signature for verification - Stripe-Signature: Stripe's webhook signature for verification
""" """
logger.info("🔔 Stripe webhook received")
if not stripe_signature: if not stripe_signature:
logger.error("❌ Missing Stripe-Signature header")
raise HTTPException( raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, status_code=status.HTTP_400_BAD_REQUEST,
detail="Missing Stripe-Signature header", detail="Missing Stripe-Signature header",
) )
if not StripeService.is_configured(): if not StripeService.is_configured():
logger.error("❌ Stripe not configured")
raise HTTPException( raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE, status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="Stripe not configured", detail="Stripe not configured",
@ -49,6 +74,9 @@ async def stripe_webhook(
# Get raw body for signature verification # Get raw body for signature verification
payload = await request.body() payload = await request.body()
logger.info(f" Payload size: {len(payload)} bytes")
logger.info(f" Signature: {stripe_signature[:50]}...")
try: try:
async for db in get_db(): async for db in get_db():
result = await StripeService.handle_webhook( result = await StripeService.handle_webhook(
@ -56,16 +84,17 @@ async def stripe_webhook(
sig_header=stripe_signature, sig_header=stripe_signature,
db=db, db=db,
) )
logger.info(f"✅ Webhook processed successfully: {result}")
return result return result
except ValueError as e: except ValueError as e:
logger.error(f"Webhook validation error: {e}") logger.error(f"Webhook validation error: {e}")
raise HTTPException( raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e), detail=str(e),
) )
except Exception as e: except Exception as e:
logger.error(f"Webhook processing error: {e}") logger.exception(f"Webhook processing error: {e}")
raise HTTPException( raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Webhook processing failed", detail="Webhook processing failed",

View File

@ -2,7 +2,7 @@
from datetime import datetime from datetime import datetime
from enum import Enum from enum import Enum
from sqlalchemy import String, Boolean, DateTime, ForeignKey, Text, Enum as SQLEnum from sqlalchemy import String, Boolean, DateTime, ForeignKey, Text, Enum as SQLEnum
from sqlalchemy.orm import Mapped, mapped_column, relationship from sqlalchemy.orm import Mapped, mapped_column, relationship, backref
from app.database import Base from app.database import Base
@ -116,8 +116,11 @@ class DomainHealthCache(Base):
# Timestamp # Timestamp
checked_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow) checked_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
# Relationship # Relationship - cascade delete when domain is deleted
domain: Mapped["Domain"] = relationship("Domain", backref="health_cache") domain: Mapped["Domain"] = relationship(
"Domain",
backref=backref("health_cache", cascade="all, delete-orphan", uselist=False)
)
def __repr__(self) -> str: def __repr__(self) -> str:
return f"<DomainHealthCache {self.domain_id} status={self.status}>" return f"<DomainHealthCache {self.domain_id} status={self.status}>"

View File

@ -38,7 +38,7 @@ settings = get_settings()
CZDS_AUTH_URL = "https://account-api.icann.org/api/authenticate" CZDS_AUTH_URL = "https://account-api.icann.org/api/authenticate"
CZDS_ZONES_URL = "https://czds-api.icann.org/czds/downloads/links" CZDS_ZONES_URL = "https://czds-api.icann.org/czds/downloads/links"
CZDS_DOWNLOAD_BASE = "https://czds-download.icann.org" CZDS_DOWNLOAD_BASE = "https://czds-download-api.icann.org"
# TLDs we have approved access to # TLDs we have approved access to
APPROVED_TLDS = ["xyz", "org", "online", "info", "dev", "app"] APPROVED_TLDS = ["xyz", "org", "online", "info", "dev", "app"]
@ -99,8 +99,11 @@ class CZDSClient:
logger.info("CZDS authentication successful") logger.info("CZDS authentication successful")
return self._token return self._token
async def get_available_zones(self) -> list[str]: async def get_available_zones(self) -> dict[str, str]:
"""Get list of zone files available for download.""" """
Get list of zone files available for download.
Returns dict mapping TLD to download URL.
"""
token = await self._authenticate() token = await self._authenticate()
async with httpx.AsyncClient(timeout=60) as client: async with httpx.AsyncClient(timeout=60) as client:
@ -111,30 +114,40 @@ class CZDSClient:
if response.status_code != 200: if response.status_code != 200:
logger.error(f"Failed to get zone list: {response.status_code}") logger.error(f"Failed to get zone list: {response.status_code}")
return [] return {}
# Response is a list of download URLs # Response is a list of download URLs
urls = response.json() urls = response.json()
# Extract TLDs from URLs # Extract TLDs and their URLs
tlds = [] zones = {}
for url in urls: for url in urls:
# URL format: https://czds-download.icann.org/czds/downloads/xyz.zone # URL format: https://czds-download-api.icann.org/czds/downloads/xyz.zone
match = re.search(r'/([a-z0-9-]+)\.zone$', url, re.IGNORECASE) match = re.search(r'/([a-z0-9-]+)\.zone$', url, re.IGNORECASE)
if match: if match:
tlds.append(match.group(1).lower()) tld = match.group(1).lower()
zones[tld] = url
logger.info(f"Available zones: {tlds}") logger.info(f"Available zones: {list(zones.keys())}")
return tlds return zones
async def download_zone(self, tld: str) -> Optional[Path]: async def download_zone(self, tld: str, download_url: Optional[str] = None) -> Optional[Path]:
"""Download a zone file for a specific TLD.""" """
Download a zone file for a specific TLD.
Args:
tld: The TLD to download
download_url: Optional explicit download URL (from get_available_zones)
"""
token = await self._authenticate() token = await self._authenticate()
download_url = f"{CZDS_DOWNLOAD_BASE}/czds/downloads/{tld}.zone" # Use provided URL or construct one
if not download_url:
download_url = f"{CZDS_DOWNLOAD_BASE}/czds/downloads/{tld}.zone"
output_path = self.data_dir / f"{tld}.zone.txt.gz" output_path = self.data_dir / f"{tld}.zone.txt.gz"
logger.info(f"Downloading zone file for .{tld}...") logger.info(f"Downloading zone file for .{tld} from {download_url}...")
async with httpx.AsyncClient(timeout=600, follow_redirects=True) as client: async with httpx.AsyncClient(timeout=600, follow_redirects=True) as client:
try: try:
@ -294,7 +307,12 @@ class CZDSClient:
return dropped_records return dropped_records
async def sync_zone(self, db: AsyncSession, tld: str) -> dict: async def sync_zone(
self,
db: AsyncSession,
tld: str,
download_url: Optional[str] = None
) -> dict:
""" """
Sync a single zone file: Sync a single zone file:
1. Download zone file 1. Download zone file
@ -302,6 +320,11 @@ class CZDSClient:
3. Compare with previous snapshot 3. Compare with previous snapshot
4. Store dropped domains 4. Store dropped domains
5. Save new snapshot 5. Save new snapshot
Args:
db: Database session
tld: TLD to sync
download_url: Optional explicit download URL
""" """
logger.info(f"Starting sync for .{tld}") logger.info(f"Starting sync for .{tld}")
@ -317,7 +340,7 @@ class CZDSClient:
try: try:
# Download zone file # Download zone file
gz_path = await self.download_zone(tld) gz_path = await self.download_zone(tld, download_url)
if not gz_path: if not gz_path:
result["status"] = "download_failed" result["status"] = "download_failed"
result["error"] = "Failed to download zone file" result["error"] = "Failed to download zone file"
@ -388,13 +411,33 @@ class CZDSClient:
Returns: Returns:
List of sync results for each TLD. List of sync results for each TLD.
""" """
tlds = tlds or APPROVED_TLDS target_tlds = tlds or APPROVED_TLDS
logger.info(f"Starting CZDS sync for {len(tlds)} zones: {tlds}") # Get available zones with their download URLs
available_zones = await self.get_available_zones()
logger.info(f"Starting CZDS sync for {len(target_tlds)} zones: {target_tlds}")
logger.info(f"Available zones: {list(available_zones.keys())}")
results = [] results = []
for tld in tlds: for tld in target_tlds:
result = await self.sync_zone(db, tld) # Get the actual download URL for this TLD
download_url = available_zones.get(tld)
if not download_url:
logger.warning(f"No download URL available for .{tld}")
results.append({
"tld": tld,
"status": "not_available",
"current_count": 0,
"previous_count": 0,
"dropped_count": 0,
"new_count": 0,
"error": f"No access to .{tld} zone"
})
continue
result = await self.sync_zone(db, tld, download_url)
results.append(result) results.append(result)
# Small delay between zones to be nice to ICANN servers # Small delay between zones to be nice to ICANN servers
@ -406,7 +449,7 @@ class CZDSClient:
logger.info( logger.info(
f"CZDS sync complete: " f"CZDS sync complete: "
f"{success_count}/{len(tlds)} zones successful, " f"{success_count}/{len(target_tlds)} zones successful, "
f"{total_dropped:,} total dropped domains" f"{total_dropped:,} total dropped domains"
) )

View File

@ -216,7 +216,8 @@ class StripeService:
Handle Stripe webhook events. Handle Stripe webhook events.
Important events: Important events:
- checkout.session.completed: Payment successful - checkout.session.completed: Payment successful (initial)
- invoice.payment_succeeded: Invoice paid (recurring & initial)
- customer.subscription.updated: Subscription changed - customer.subscription.updated: Subscription changed
- customer.subscription.deleted: Subscription cancelled - customer.subscription.deleted: Subscription cancelled
- invoice.payment_failed: Payment failed - invoice.payment_failed: Payment failed
@ -231,39 +232,68 @@ class StripeService:
payload, sig_header, webhook_secret payload, sig_header, webhook_secret
) )
except ValueError: except ValueError:
logger.error("❌ Invalid webhook payload")
raise ValueError("Invalid payload") raise ValueError("Invalid payload")
except stripe.error.SignatureVerificationError: except stripe.error.SignatureVerificationError:
logger.error("❌ Invalid webhook signature")
raise ValueError("Invalid signature") raise ValueError("Invalid signature")
event_type = event["type"] event_type = event["type"]
data = event["data"]["object"] data = event["data"]["object"]
logger.info(f"Processing Stripe webhook: {event_type}") logger.info(f"🔔 Processing Stripe webhook: {event_type}")
logger.info(f" Event ID: {event.get('id')}")
if event_type == "checkout.session.completed": try:
await StripeService._handle_checkout_complete(data, db) if event_type == "checkout.session.completed":
await StripeService._handle_checkout_complete(data, db)
elif event_type == "customer.subscription.updated":
await StripeService._handle_subscription_updated(data, db) elif event_type == "invoice.payment_succeeded":
# This is the main event for successful payments!
elif event_type == "customer.subscription.deleted": await StripeService._handle_invoice_paid(data, db)
await StripeService._handle_subscription_cancelled(data, db)
elif event_type == "customer.subscription.updated":
elif event_type == "invoice.payment_failed": await StripeService._handle_subscription_updated(data, db)
await StripeService._handle_payment_failed(data, db)
elif event_type == "customer.subscription.deleted":
return {"status": "success", "event_type": event_type} await StripeService._handle_subscription_cancelled(data, db)
elif event_type == "invoice.payment_failed":
await StripeService._handle_payment_failed(data, db)
else:
logger.info(f" Unhandled event type: {event_type} (acknowledged)")
return {"status": "success", "event_type": event_type}
except Exception as e:
logger.exception(f"❌ Error processing webhook {event_type}: {e}")
# Still return success to prevent Stripe from retrying
# The error is logged for investigation
return {"status": "error_logged", "event_type": event_type, "error": str(e)}
@staticmethod @staticmethod
async def _handle_checkout_complete(data: Dict, db: AsyncSession): async def _handle_checkout_complete(data: Dict, db: AsyncSession):
"""Handle successful checkout - activate subscription.""" """
Handle successful checkout - activate subscription.
IMPORTANT: This must be idempotent! Stripe may send webhooks multiple times.
"""
session_id = data.get("id")
user_id = data.get("metadata", {}).get("user_id") user_id = data.get("metadata", {}).get("user_id")
plan = data.get("metadata", {}).get("plan") # "trader" or "tycoon" plan = data.get("metadata", {}).get("plan") # "trader" or "tycoon"
customer_id = data.get("customer") customer_id = data.get("customer")
subscription_id = data.get("subscription") subscription_id = data.get("subscription")
logger.info(f"🔔 Checkout complete webhook received:")
logger.info(f" Session: {session_id}")
logger.info(f" User ID: {user_id}")
logger.info(f" Plan: {plan}")
logger.info(f" Customer: {customer_id}")
logger.info(f" Subscription: {subscription_id}")
if not user_id or not plan: if not user_id or not plan:
logger.error("Missing user_id or plan in checkout metadata") logger.error(f"Missing user_id or plan in checkout metadata: {data.get('metadata')}")
return return
# Convert plan string to SubscriptionTier enum # Convert plan string to SubscriptionTier enum
@ -281,9 +311,23 @@ class StripeService:
user = result.scalar_one_or_none() user = result.scalar_one_or_none()
if not user: if not user:
logger.error(f"User {user_id} not found for checkout") logger.error(f"User {user_id} not found for checkout")
return return
logger.info(f" User email: {user.email}")
# IDEMPOTENCY CHECK: Check if this subscription_id was already processed
if subscription_id:
existing_sub = await db.execute(
select(Subscription).where(
Subscription.stripe_subscription_id == subscription_id
)
)
existing = existing_sub.scalar_one_or_none()
if existing:
logger.info(f"⚠️ Subscription {subscription_id} already processed (idempotent)")
return
# Update user's Stripe customer ID # Update user's Stripe customer ID
user.stripe_customer_id = customer_id user.stripe_customer_id = customer_id
@ -296,26 +340,158 @@ class StripeService:
tier_info = TIER_FEATURES.get(plan.lower(), TIER_FEATURES["scout"]) tier_info = TIER_FEATURES.get(plan.lower(), TIER_FEATURES["scout"])
if subscription: if subscription:
subscription.tier = tier_enum # Use enum, not string # Only upgrade if actually changing
old_tier = subscription.tier
subscription.tier = tier_enum
subscription.status = SubscriptionStatus.ACTIVE subscription.status = SubscriptionStatus.ACTIVE
subscription.stripe_subscription_id = subscription_id subscription.stripe_subscription_id = subscription_id
subscription.max_domains = tier_info["max_domains"] subscription.max_domains = tier_info["max_domains"]
subscription.check_frequency = tier_info["check_frequency"] subscription.check_frequency = tier_info["check_frequency"]
subscription.updated_at = datetime.utcnow() subscription.updated_at = datetime.utcnow()
logger.info(f"✅ Updated subscription: {old_tier}{tier_enum}")
else: else:
subscription = Subscription( subscription = Subscription(
user_id=user.id, user_id=user.id,
tier=tier_enum, # Use enum, not string tier=tier_enum,
status=SubscriptionStatus.ACTIVE, status=SubscriptionStatus.ACTIVE,
stripe_subscription_id=subscription_id, stripe_subscription_id=subscription_id,
max_domains=tier_info["max_domains"], max_domains=tier_info["max_domains"],
check_frequency=tier_info["check_frequency"], check_frequency=tier_info["check_frequency"],
) )
db.add(subscription) db.add(subscription)
logger.info(f"✅ Created new subscription: {tier_enum}")
await db.commit() try:
logger.info(f"Activated {plan} subscription for user {user_id}") await db.commit()
logger.info(f"✅ Activated {plan} subscription for user {user_id} ({user.email})")
except Exception as e:
logger.exception(f"❌ Failed to commit subscription: {e}")
await db.rollback()
raise
@staticmethod
async def _handle_invoice_paid(data: Dict, db: AsyncSession):
"""
Handle successful invoice payment.
This is the MAIN event for activating subscriptions!
Called for both initial payments and recurring payments.
Invoice structure has metadata in:
- parent.subscription_details.metadata (for subscription invoices)
- lines.data[0].metadata (line item level)
"""
invoice_id = data.get("id")
customer_id = data.get("customer")
customer_email = data.get("customer_email")
billing_reason = data.get("billing_reason") # "subscription_create", "subscription_cycle", etc.
logger.info(f"🧾 Invoice paid webhook received:")
logger.info(f" Invoice: {invoice_id}")
logger.info(f" Customer: {customer_id}")
logger.info(f" Email: {customer_email}")
logger.info(f" Billing reason: {billing_reason}")
# Extract metadata from subscription details
parent = data.get("parent", {})
subscription_details = parent.get("subscription_details", {})
metadata = subscription_details.get("metadata", {})
subscription_id = subscription_details.get("subscription")
user_id = metadata.get("user_id")
plan = metadata.get("plan")
# Fallback: try to get from line items
if not user_id or not plan:
lines = data.get("lines", {}).get("data", [])
if lines:
line_metadata = lines[0].get("metadata", {})
user_id = user_id or line_metadata.get("user_id")
plan = plan or line_metadata.get("plan")
logger.info(f" User ID: {user_id}")
logger.info(f" Plan: {plan}")
logger.info(f" Subscription ID: {subscription_id}")
if not user_id or not plan:
logger.warning(f"⚠️ No user_id or plan in invoice metadata, skipping")
logger.warning(f" Full parent: {parent}")
return
# Convert plan string to SubscriptionTier enum
tier_map = {
"trader": SubscriptionTier.TRADER,
"tycoon": SubscriptionTier.TYCOON,
"scout": SubscriptionTier.SCOUT,
}
tier_enum = tier_map.get(plan.lower(), SubscriptionTier.SCOUT)
# Get user
result = await db.execute(
select(User).where(User.id == int(user_id))
)
user = result.scalar_one_or_none()
if not user:
logger.error(f"❌ User {user_id} not found for invoice")
return
logger.info(f" Found user: {user.email}")
# Update user's Stripe customer ID if not set
if not user.stripe_customer_id:
user.stripe_customer_id = customer_id
# IDEMPOTENCY CHECK: Check if this subscription_id was already processed with this tier
if subscription_id:
existing_sub = await db.execute(
select(Subscription).where(
Subscription.stripe_subscription_id == subscription_id,
Subscription.tier == tier_enum
)
)
existing = existing_sub.scalar_one_or_none()
if existing:
logger.info(f"⚠️ Subscription {subscription_id} already active as {tier_enum} (idempotent)")
return
# Create or update subscription
sub_result = await db.execute(
select(Subscription).where(Subscription.user_id == user.id)
)
subscription = sub_result.scalar_one_or_none()
tier_info = TIER_FEATURES.get(plan.lower(), TIER_FEATURES["scout"])
if subscription:
old_tier = subscription.tier
subscription.tier = tier_enum
subscription.status = SubscriptionStatus.ACTIVE
subscription.stripe_subscription_id = subscription_id
subscription.max_domains = tier_info["max_domains"]
subscription.check_frequency = tier_info["check_frequency"]
subscription.updated_at = datetime.utcnow()
logger.info(f"✅ Updated subscription: {old_tier}{tier_enum}")
else:
subscription = Subscription(
user_id=user.id,
tier=tier_enum,
status=SubscriptionStatus.ACTIVE,
stripe_subscription_id=subscription_id,
max_domains=tier_info["max_domains"],
check_frequency=tier_info["check_frequency"],
)
db.add(subscription)
logger.info(f"✅ Created new subscription: {tier_enum}")
try:
await db.commit()
logger.info(f"✅ Activated {plan} subscription for user {user_id} ({user.email}) via invoice")
except Exception as e:
logger.exception(f"❌ Failed to commit subscription: {e}")
await db.rollback()
raise
@staticmethod @staticmethod
async def _handle_subscription_updated(data: Dict, db: AsyncSession): async def _handle_subscription_updated(data: Dict, db: AsyncSession):
"""Handle subscription update (plan change, renewal, etc.).""" """Handle subscription update (plan change, renewal, etc.)."""

View File

@ -57,6 +57,13 @@ MOZ_SECRET_KEY=
# Sentry Error Tracking # Sentry Error Tracking
SENTRY_DSN= SENTRY_DSN=
# ============== ZONE FILE SERVICES ==============
# ICANN CZDS (Centralized Zone Data Service)
# Register at: https://czds.icann.org/
CZDS_USERNAME=
CZDS_PASSWORD=
CZDS_DATA_DIR=/tmp/pounce_czds
# ============== PRODUCTION SETTINGS ============== # ============== PRODUCTION SETTINGS ==============
# Uncomment for production deployment: # Uncomment for production deployment:
# DATABASE_URL=postgresql+asyncpg://user:pass@localhost/pounce # DATABASE_URL=postgresql+asyncpg://user:pass@localhost/pounce

View File

@ -54,3 +54,6 @@ redis>=5.0.0
# Production Database (optional) # Production Database (optional)
# asyncpg>=0.30.0 # Already included above # asyncpg>=0.30.0 # Already included above
# ICANN CZDS Zone File Access
pyCZDS>=1.7.0

View File

@ -0,0 +1,365 @@
#!/usr/bin/env python3
"""
Safe CZDS Zone File Sync Script
================================
Uses the official pyCZDS library to safely download zone files.
IMPORTANT Rate Limits:
- Max 1 download per TLD per 24 hours
- Max 3 direct downloads per TLD per 24h (or you get blocked!)
- Zone files are updated daily between 00:00-06:00 UTC
Run this script ONCE daily, after 06:00 UTC.
"""
import asyncio
import gzip
import hashlib
import logging
import os
import sys
import time
from datetime import datetime, timedelta
from pathlib import Path
# Add parent directory to path for imports
sys.path.insert(0, str(Path(__file__).parent.parent))
from pyczds.client import CZDSClient as PyCZDSClient
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)
# Configuration
APPROVED_TLDS = ["xyz", "org", "online", "info", "dev", "app"]
DATA_DIR = Path(os.getenv("CZDS_DATA_DIR", "/home/user/pounce_czds"))
DELAY_BETWEEN_DOWNLOADS = 5 # seconds - be nice to ICANN servers
def get_credentials():
"""Get CZDS credentials from environment or .env file."""
# Try environment first
username = os.getenv("CZDS_USERNAME")
password = os.getenv("CZDS_PASSWORD")
if not username or not password:
# Try loading from .env
env_file = Path(__file__).parent.parent / ".env"
if env_file.exists():
for line in env_file.read_text().splitlines():
if line.startswith("CZDS_USERNAME="):
username = line.split("=", 1)[1].strip()
elif line.startswith("CZDS_PASSWORD="):
password = line.split("=", 1)[1].strip()
return username, password
def should_download_today(tld: str) -> bool:
"""Check if we already downloaded this TLD today."""
marker_file = DATA_DIR / f".{tld}_last_download"
if marker_file.exists():
last_download = datetime.fromisoformat(marker_file.read_text().strip())
# Only download once per 24h
if datetime.utcnow() - last_download < timedelta(hours=23):
logger.info(f"⏭️ .{tld}: Already downloaded within 24h, skipping")
return False
return True
def mark_downloaded(tld: str):
"""Mark TLD as downloaded today."""
marker_file = DATA_DIR / f".{tld}_last_download"
marker_file.write_text(datetime.utcnow().isoformat())
def parse_zone_file(zone_path: Path, tld: str) -> set[str]:
"""
Parse zone file and extract unique domain names.
Returns set of domain names (without TLD suffix).
"""
logger.info(f"📖 Parsing zone file for .{tld}...")
domains = set()
line_count = 0
# Handle gzipped files
if str(zone_path).endswith('.gz'):
open_func = lambda p: gzip.open(p, 'rt', encoding='utf-8', errors='ignore')
else:
open_func = lambda p: open(p, 'r', encoding='utf-8', errors='ignore')
with open_func(zone_path) as f:
for line in f:
line_count += 1
# Skip comments and empty lines
if line.startswith(';') or not line.strip():
continue
# Parse zone file line
parts = line.split()
if len(parts) >= 4:
name = parts[0].rstrip('.')
# Must end with our TLD
if name.lower().endswith(f'.{tld}'):
# Extract just the domain name part
domain_name = name[:-(len(tld) + 1)]
# Skip the TLD itself and subdomains
if domain_name and '.' not in domain_name:
domains.add(domain_name.lower())
logger.info(f" Found {len(domains):,} unique domains from {line_count:,} lines")
return domains
def compute_checksum(domains: set[str]) -> str:
"""Compute SHA256 checksum of sorted domain list."""
sorted_domains = "\n".join(sorted(domains))
return hashlib.sha256(sorted_domains.encode()).hexdigest()
def load_previous_domains(tld: str) -> set[str] | None:
"""Load previous day's domain set from cache file."""
cache_file = DATA_DIR / f"{tld}_domains.txt"
if cache_file.exists():
try:
content = cache_file.read_text()
return set(line.strip() for line in content.splitlines() if line.strip())
except Exception as e:
logger.warning(f"Failed to load cache for .{tld}: {e}")
return None
def save_domains(tld: str, domains: set[str]):
"""Save current domains to cache file."""
cache_file = DATA_DIR / f"{tld}_domains.txt"
cache_file.write_text("\n".join(sorted(domains)))
logger.info(f"💾 Saved {len(domains):,} domains for .{tld}")
def find_drops(previous: set[str], current: set[str]) -> set[str]:
"""Find dropped domains (present yesterday, missing today)."""
return previous - current
async def save_drops_to_db(tld: str, dropped: set[str]):
"""Save dropped domains to database."""
if not dropped:
return 0
try:
from app.database import AsyncSessionLocal, init_db
from app.models.zone_file import DroppedDomain
from datetime import datetime
await init_db()
today = datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0)
async with AsyncSessionLocal() as db:
# Batch insert
records = []
for name in dropped:
record = DroppedDomain(
domain=f"{name}.{tld}",
tld=tld,
dropped_date=today,
length=len(name),
is_numeric=name.isdigit(),
has_hyphen='-' in name
)
records.append(record)
db.add_all(records)
await db.commit()
logger.info(f"💾 Saved {len(dropped):,} dropped domains to database")
return len(dropped)
except Exception as e:
logger.error(f"Failed to save drops to DB: {e}")
return 0
def sync_single_tld(client: PyCZDSClient, tld: str, zone_urls: list[str]) -> dict:
"""
Sync a single TLD zone file.
Returns dict with sync results.
"""
result = {
"tld": tld,
"status": "pending",
"domain_count": 0,
"dropped_count": 0,
"new_count": 0,
"error": None
}
# Check if we should download
if not should_download_today(tld):
result["status"] = "skipped"
return result
# Find URL for this TLD
tld_url = next((url for url in zone_urls if f"/{tld}.zone" in url.lower()), None)
if not tld_url:
logger.warning(f"❌ No access to .{tld} zone file")
result["status"] = "no_access"
result["error"] = "No access to this TLD"
return result
try:
logger.info(f"⬇️ Downloading .{tld} zone file...")
# Download zone file using pyCZDS
# This returns the path to the downloaded file
downloaded_file = client.get_zonefile(tld_url, download_dir=str(DATA_DIR))
if not downloaded_file or not Path(downloaded_file).exists():
result["status"] = "download_failed"
result["error"] = "Download returned no file"
return result
downloaded_path = Path(downloaded_file)
file_size_mb = downloaded_path.stat().st_size / (1024 * 1024)
logger.info(f" Downloaded: {file_size_mb:.1f} MB")
# Parse zone file
current_domains = parse_zone_file(downloaded_path, tld)
result["domain_count"] = len(current_domains)
# Load previous domains
previous_domains = load_previous_domains(tld)
if previous_domains:
# Find drops
dropped = find_drops(previous_domains, current_domains)
result["dropped_count"] = len(dropped)
result["new_count"] = len(current_domains - previous_domains)
logger.info(f" 📉 Dropped: {len(dropped):,}")
logger.info(f" 📈 New: {result['new_count']:,}")
# Save drops to database (async)
if dropped:
asyncio.run(save_drops_to_db(tld, dropped))
else:
logger.info(f" First sync, no comparison available")
# Save current domains for next comparison
save_domains(tld, current_domains)
# Mark as downloaded
mark_downloaded(tld)
# Clean up downloaded zone file (can be huge)
downloaded_path.unlink()
logger.info(f" 🗑️ Cleaned up zone file")
result["status"] = "success"
logger.info(f"✅ .{tld} sync complete!")
except Exception as e:
logger.exception(f"❌ Error syncing .{tld}: {e}")
result["status"] = "error"
result["error"] = str(e)
return result
def main():
"""Main sync function."""
print("=" * 60)
print("🌐 CZDS Zone File Sync (Safe Mode)")
print("=" * 60)
print(f"📅 Time: {datetime.utcnow().isoformat()} UTC")
print(f"📂 Data dir: {DATA_DIR}")
print(f"📋 TLDs: {', '.join(APPROVED_TLDS)}")
print("-" * 60)
# Ensure data directory exists
DATA_DIR.mkdir(parents=True, exist_ok=True)
# Get credentials
username, password = get_credentials()
if not username or not password:
print("❌ CZDS credentials not configured!")
print(" Set CZDS_USERNAME and CZDS_PASSWORD in .env")
sys.exit(1)
print(f"👤 User: {username}")
# Initialize pyCZDS client
try:
client = PyCZDSClient(username, password)
print("✅ Authenticated with ICANN CZDS")
except Exception as e:
print(f"❌ Authentication failed: {e}")
sys.exit(1)
# Get available zone files
try:
zone_urls = client.get_zonefiles_list()
available_tlds = [url.split('/')[-1].replace('.zone', '') for url in zone_urls]
print(f"✅ Available zones: {', '.join(available_tlds)}")
except Exception as e:
print(f"❌ Failed to get zone list: {e}")
sys.exit(1)
print("-" * 60)
# Sync each TLD
results = []
for i, tld in enumerate(APPROVED_TLDS):
print(f"\n[{i+1}/{len(APPROVED_TLDS)}] Processing .{tld}...")
result = sync_single_tld(client, tld, zone_urls)
results.append(result)
# Delay between downloads (be nice to ICANN)
if i < len(APPROVED_TLDS) - 1 and result["status"] == "success":
print(f" ⏳ Waiting {DELAY_BETWEEN_DOWNLOADS}s before next download...")
time.sleep(DELAY_BETWEEN_DOWNLOADS)
# Summary
print("\n" + "=" * 60)
print("📊 SUMMARY")
print("=" * 60)
success_count = sum(1 for r in results if r["status"] == "success")
total_drops = sum(r["dropped_count"] for r in results)
for r in results:
icon = "" if r["status"] == "success" else "⏭️" if r["status"] == "skipped" else ""
print(f"{icon} .{r['tld']}: {r['status']} - {r['domain_count']:,} domains, {r['dropped_count']:,} dropped")
if r["error"]:
print(f" ⚠️ Error: {r['error']}")
print("-" * 60)
print(f"✅ Successful: {success_count}/{len(APPROVED_TLDS)}")
print(f"📉 Total drops: {total_drops:,}")
print("=" * 60)
if __name__ == "__main__":
# Parse arguments
if len(sys.argv) > 1:
# Only sync specific TLDs
APPROVED_TLDS = [tld.lower() for tld in sys.argv[1:]]
print(f"🎯 Syncing specific TLDs: {APPROVED_TLDS}")
main()

View File

@ -129,6 +129,18 @@ if ! $FRONTEND_ONLY; then
exit 1 exit 1
fi fi
# Update CZDS credentials if not set
if ! grep -q "CZDS_USERNAME=" .env 2>/dev/null; then
echo "" >> .env
echo "# ICANN CZDS Zone File Service" >> .env
echo "CZDS_USERNAME=guggeryves@hotmail.com" >> .env
echo "CZDS_PASSWORD=Achiarorocco1278!" >> .env
echo "CZDS_DATA_DIR=/home/user/pounce_czds" >> .env
echo " ✓ CZDS credentials added to .env"
else
echo " ✓ CZDS credentials already configured"
fi
echo " Running DB migrations..." echo " Running DB migrations..."
python -c "from app.database import init_db; import asyncio; asyncio.run(init_db())" python -c "from app.database import init_db; import asyncio; asyncio.run(init_db())"
echo " ✓ DB migrations applied" echo " ✓ DB migrations applied"
@ -185,7 +197,11 @@ if ! $BACKEND_ONLY; then
# Next.js standalone output requires public + static inside standalone folder # Next.js standalone output requires public + static inside standalone folder
mkdir -p .next/standalone/.next mkdir -p .next/standalone/.next
ln -sfn ../../static .next/standalone/.next/static ln -sfn ../../static .next/standalone/.next/static
ln -sfn ../../public .next/standalone/public
# Copy public folder (symlinks don't work reliably)
rm -rf .next/standalone/public
cp -r public .next/standalone/public
echo " ✓ Public files copied to standalone"
# Gracefully restart Next.js # Gracefully restart Next.js
NEXT_PID=$(pgrep -af 'node \\.next/standalone/server\\.js|next start|next-server|next-serv' | awk 'NR==1{print $1; exit}') NEXT_PID=$(pgrep -af 'node \\.next/standalone/server\\.js|next start|next-server|next-serv' | awk 'NR==1{print $1; exit}')

View File

@ -4,6 +4,7 @@ import { useEffect, useState } from 'react'
import { useRouter, usePathname } from 'next/navigation' import { useRouter, usePathname } from 'next/navigation'
import { useStore } from '@/lib/store' import { useStore } from '@/lib/store'
import { AnalyzePanelProvider } from '@/components/analyze/AnalyzePanelProvider' import { AnalyzePanelProvider } from '@/components/analyze/AnalyzePanelProvider'
import { BetaBanner } from '@/components/BetaBanner'
import { Loader2 } from 'lucide-react' import { Loader2 } from 'lucide-react'
export default function TerminalLayout({ export default function TerminalLayout({
@ -59,6 +60,11 @@ export default function TerminalLayout({
) )
} }
return <AnalyzePanelProvider>{children}</AnalyzePanelProvider> return (
<AnalyzePanelProvider>
<BetaBanner />
{children}
</AnalyzePanelProvider>
)
} }

View File

@ -0,0 +1,49 @@
'use client'
import { useState, useEffect } from 'react'
import { X, Zap } from 'lucide-react'
const BANNER_DISMISSED_KEY = 'pounce_beta_banner_dismissed'
export function BetaBanner() {
const [isDismissed, setIsDismissed] = useState(true) // Start hidden to avoid flash
useEffect(() => {
// Check localStorage after mount
const dismissed = localStorage.getItem(BANNER_DISMISSED_KEY)
setIsDismissed(dismissed === 'true')
}, [])
const handleDismiss = () => {
setIsDismissed(true)
localStorage.setItem(BANNER_DISMISSED_KEY, 'true')
}
if (isDismissed) return null
return (
<div className="bg-accent/10 border-b border-accent/20 px-4 py-1.5 flex items-center justify-center gap-2 text-xs font-mono relative flex-shrink-0">
<Zap className="w-3 h-3 text-accent flex-shrink-0" />
<p className="text-white/70">
<span className="text-accent font-medium">Pounce is in Public Beta.</span>
{' '}I ship code daily based on your feedback. If you spot a glitch, hit me up on{' '}
<a
href="https://discord.gg/gqyzWMpj8z"
target="_blank"
rel="noopener noreferrer"
className="text-accent hover:text-white underline underline-offset-2 transition-colors"
>
Discord
</a>
.
</p>
<button
onClick={handleDismiss}
className="absolute right-2 top-1/2 -translate-y-1/2 p-1 text-white/40 hover:text-white transition-colors"
aria-label="Dismiss banner"
>
<X className="w-3.5 h-3.5" />
</button>
</div>
)
}

View File

@ -3,6 +3,13 @@
import Link from 'next/link' import Link from 'next/link'
import Image from 'next/image' import Image from 'next/image'
import { Twitter, Mail, Linkedin, ArrowRight } from 'lucide-react' import { Twitter, Mail, Linkedin, ArrowRight } from 'lucide-react'
// Discord icon (not in lucide-react)
const DiscordIcon = ({ className }: { className?: string }) => (
<svg className={className} viewBox="0 0 24 24" fill="currentColor">
<path d="M20.317 4.37a19.791 19.791 0 0 0-4.885-1.515.074.074 0 0 0-.079.037c-.21.375-.444.864-.608 1.25a18.27 18.27 0 0 0-5.487 0 12.64 12.64 0 0 0-.617-1.25.077.077 0 0 0-.079-.037A19.736 19.736 0 0 0 3.677 4.37a.07.07 0 0 0-.032.027C.533 9.046-.32 13.58.099 18.057a.082.082 0 0 0 .031.057 19.9 19.9 0 0 0 5.993 3.03.078.078 0 0 0 .084-.028 14.09 14.09 0 0 0 1.226-1.994.076.076 0 0 0-.041-.106 13.107 13.107 0 0 1-1.872-.892.077.077 0 0 1-.008-.128 10.2 10.2 0 0 0 .372-.292.074.074 0 0 1 .077-.01c3.928 1.793 8.18 1.793 12.062 0a.074.074 0 0 1 .078.01c.12.098.246.198.373.292a.077.077 0 0 1-.006.127 12.299 12.299 0 0 1-1.873.892.077.077 0 0 0-.041.107c.36.698.772 1.362 1.225 1.993a.076.076 0 0 0 .084.028 19.839 19.839 0 0 0 6.002-3.03.077.077 0 0 0 .032-.054c.5-5.177-.838-9.674-3.549-13.66a.061.061 0 0 0-.031-.03zM8.02 15.33c-1.183 0-2.157-1.085-2.157-2.419 0-1.333.956-2.419 2.157-2.419 1.21 0 2.176 1.096 2.157 2.42 0 1.333-.956 2.418-2.157 2.418zm7.975 0c-1.183 0-2.157-1.085-2.157-2.419 0-1.333.955-2.419 2.157-2.419 1.21 0 2.176 1.096 2.157 2.42 0 1.333-.946 2.418-2.157 2.418z"/>
</svg>
)
import { useStore } from '@/lib/store' import { useStore } from '@/lib/store'
import { api } from '@/lib/api' import { api } from '@/lib/api'
import { useCallback, useMemo, useState } from 'react' import { useCallback, useMemo, useState } from 'react'
@ -94,16 +101,25 @@ export function Footer() {
<div className="flex items-center gap-3 sm:gap-4"> <div className="flex items-center gap-3 sm:gap-4">
<a <a
href="https://twitter.com/pounce_domains" href="https://x.com/pounce_ch"
target="_blank" target="_blank"
rel="noopener noreferrer" rel="noopener noreferrer"
className="w-9 h-9 sm:w-10 sm:h-10 flex items-center justify-center border border-white/5 hover:border-white/20 hover:bg-white/5 transition-all group" className="w-9 h-9 sm:w-10 sm:h-10 flex items-center justify-center border border-white/5 hover:border-white/20 hover:bg-white/5 transition-all group"
aria-label="Twitter" aria-label="Twitter/X"
> >
<Twitter className="w-3.5 h-3.5 sm:w-4 sm:h-4 text-white/40 group-hover:text-white transition-colors" /> <Twitter className="w-3.5 h-3.5 sm:w-4 sm:h-4 text-white/40 group-hover:text-white transition-colors" />
</a> </a>
<a <a
href="https://linkedin.com" href="https://discord.gg/gqyzWMpj8z"
target="_blank"
rel="noopener noreferrer"
className="w-9 h-9 sm:w-10 sm:h-10 flex items-center justify-center border border-white/5 hover:border-white/20 hover:bg-white/5 transition-all group"
aria-label="Discord"
>
<DiscordIcon className="w-3.5 h-3.5 sm:w-4 sm:h-4 text-white/40 group-hover:text-white transition-colors" />
</a>
<a
href="https://www.linkedin.com/company/pounceit"
target="_blank" target="_blank"
rel="noopener noreferrer" rel="noopener noreferrer"
className="w-9 h-9 sm:w-10 sm:h-10 flex items-center justify-center border border-white/5 hover:border-white/20 hover:bg-white/5 transition-all group" className="w-9 h-9 sm:w-10 sm:h-10 flex items-center justify-center border border-white/5 hover:border-white/20 hover:bg-white/5 transition-all group"