diff --git a/backend/app/api/auctions.py b/backend/app/api/auctions.py
index cdcbaf2..8eb17fc 100644
--- a/backend/app/api/auctions.py
+++ b/backend/app/api/auctions.py
@@ -185,6 +185,11 @@ def _format_time_remaining(end_time: datetime, now: Optional[datetime] = None) -
def _get_affiliate_url(platform: str, domain: str, auction_url: str) -> str:
"""Get affiliate URL for a platform - links directly to the auction page with affiliate tracking."""
+ # SEDO SPECIAL CASE: Always use direct Sedo link with partner ID
+ # This ensures we get affiliate revenue even from scraped data
+ if platform == "Sedo":
+ return f"https://sedo.com/search/details/?domain={domain}&partnerid=335830"
+
# Import here to avoid circular imports
from app.services.hidden_api_scrapers import build_affiliate_url
@@ -200,7 +205,6 @@ def _get_affiliate_url(platform: str, domain: str, auction_url: str) -> str:
# Fallback to platform-specific search/listing pages (without affiliate tracking)
platform_urls = {
"GoDaddy": f"https://auctions.godaddy.com/trpItemListing.aspx?domain={domain}",
- "Sedo": f"https://sedo.com/search/details/?domain={domain}&partnerid=335830",
"NameJet": f"https://www.namejet.com/Pages/Auctions/BackorderSearch.aspx?q={domain}",
"DropCatch": f"https://www.dropcatch.com/domain/{domain}",
"ExpiredDomains": f"https://www.expireddomains.net/domain-name-search/?q={domain}",
@@ -625,6 +629,50 @@ async def trigger_scrape(
raise HTTPException(status_code=500, detail=f"Scrape failed: {str(e)}")
+@router.get("/sedo")
+async def get_sedo_listings(
+ keyword: Optional[str] = Query(None, description="Search keyword"),
+ tld: Optional[str] = Query(None, description="Filter by TLD"),
+ limit: int = Query(50, le=100),
+ current_user: Optional[User] = Depends(get_current_user_optional),
+):
+ """
+ Get live domain listings from Sedo marketplace.
+
+ Returns real-time data from Sedo API with affiliate tracking.
+ All links include Pounce partner ID for commission tracking.
+ """
+ from app.services.sedo_api import sedo_client
+
+ if not sedo_client.is_configured:
+ return {
+ "items": [],
+ "error": "Sedo API not configured",
+ "source": "sedo"
+ }
+
+ try:
+ listings = await sedo_client.get_listings_for_display(
+ keyword=keyword,
+ tld=tld,
+ page_size=limit
+ )
+
+ return {
+ "items": listings,
+ "count": len(listings),
+ "source": "sedo",
+ "affiliate_note": "All links include Pounce partner ID for commission tracking"
+ }
+ except Exception as e:
+ logger.error(f"Sedo API error: {e}")
+ return {
+ "items": [],
+ "error": str(e),
+ "source": "sedo"
+ }
+
+
@router.get("/opportunities")
async def get_smart_opportunities(
current_user: User = Depends(get_current_user),
@@ -1004,7 +1052,7 @@ async def get_market_feed(
)
built.append({"item": item, "newest_ts": listing.updated_at or listing.created_at or datetime.min})
- # External auctions
+ # External auctions (from DB)
if source in ["all", "external"]:
auction_query = select(DomainAuction).where(and_(*auction_filters))
@@ -1063,6 +1111,93 @@ async def get_market_feed(
pounce_score=pounce_score,
)
built.append({"item": item, "newest_ts": auction.updated_at or auction.scraped_at or datetime.min})
+
+ # =========================================================================
+ # LIVE SEDO DATA - Fetch and merge real-time listings from Sedo API
+ # =========================================================================
+ try:
+ from app.services.sedo_api import sedo_client
+
+ if sedo_client.is_configured:
+ # Use search keyword or fall back to popular terms for discovery
+ sedo_keyword = keyword
+ if not sedo_keyword:
+ # Fetch popular domains when no specific search
+ import random
+ popular_terms = ["ai", "tech", "crypto", "app", "cloud", "digital", "smart", "pro"]
+ sedo_keyword = random.choice(popular_terms)
+
+ # Fetch live Sedo listings (limit to avoid slow responses)
+ sedo_listings = await sedo_client.get_listings_for_display(
+ keyword=sedo_keyword,
+ tld=tld_clean,
+ page_size=min(30, limit) # Cap at 30 to avoid slow API calls
+ )
+
+ # Track domains already in results to avoid duplicates
+ existing_domains = {item["item"].domain.lower() for item in built}
+
+ for sedo_item in sedo_listings:
+ domain = sedo_item.get("domain", "").lower()
+
+ # Skip if already have this domain from scraped data
+ if domain in existing_domains:
+ continue
+
+ # Apply vanity filter for anonymous users
+ if current_user is None and not _is_premium_domain(domain):
+ continue
+
+ # Apply price filters
+ price = sedo_item.get("price", 0)
+ if min_price is not None and price < min_price and price > 0:
+ continue
+ if max_price is not None and price > max_price:
+ continue
+
+ domain_tld = sedo_item.get("tld", "")
+ pounce_score = _calculate_pounce_score_v2(
+ domain,
+ domain_tld,
+ num_bids=0,
+ age_years=0,
+ is_pounce=False,
+ )
+
+ if pounce_score < min_score:
+ continue
+
+ # Determine price type
+ price_type = "bid" if sedo_item.get("is_auction") else (
+ "negotiable" if price == 0 else "fixed"
+ )
+
+ item = MarketFeedItem(
+ id=f"sedo-live-{hash(domain) % 1000000}",
+ domain=domain,
+ tld=domain_tld,
+ price=price,
+ currency="USD",
+ price_type=price_type,
+ status="auction" if sedo_item.get("is_auction") else "instant",
+ source="Sedo",
+ is_pounce=False,
+ verified=False,
+ time_remaining=None,
+ end_time=None,
+ num_bids=None,
+ url=sedo_item.get("url", ""),
+ is_external=True,
+ pounce_score=pounce_score,
+ )
+ built.append({"item": item, "newest_ts": now})
+ existing_domains.add(domain)
+
+ # Update auction count
+ auction_total += 1
+
+ except Exception as e:
+ logger.warning(f"Failed to fetch live Sedo data: {e}")
# -----------------------------
# Merge sort (Python) + paginate
diff --git a/backend/app/api/tld_prices.py b/backend/app/api/tld_prices.py
index a98f4b3..4d6482d 100644
--- a/backend/app/api/tld_prices.py
+++ b/backend/app/api/tld_prices.py
@@ -869,6 +869,33 @@ async def compare_tld_prices(
}
+def get_marketplace_links(tld: str) -> list:
+ """Get marketplace links for buying existing domains on this TLD."""
+ # Sedo partner ID for affiliate tracking
+ SEDO_PARTNER_ID = "335830"
+
+ return [
+ {
+ "name": "Sedo",
+ "description": "World's largest domain marketplace",
+ "url": f"https://sedo.com/search/?keyword=.{tld}&partnerid={SEDO_PARTNER_ID}",
+ "type": "marketplace",
+ },
+ {
+ "name": "Afternic",
+ "description": "GoDaddy's premium marketplace",
+ "url": f"https://www.afternic.com/search?k=.{tld}",
+ "type": "marketplace",
+ },
+ {
+ "name": "Dan.com",
+ "description": "Fast domain transfers",
+ "url": f"https://dan.com/search?query=.{tld}",
+ "type": "marketplace",
+ },
+ ]
+
+
@router.get("/{tld}")
async def get_tld_details(
tld: str,
@@ -877,6 +904,9 @@ async def get_tld_details(
"""Get complete details for a specific TLD."""
tld_clean = tld.lower().lstrip(".")
+ # Marketplace links (same for all TLDs)
+ marketplace_links = get_marketplace_links(tld_clean)
+
# Try static data first
if tld_clean in TLD_DATA:
data = TLD_DATA[tld_clean]
@@ -906,6 +936,7 @@ async def get_tld_details(
},
"registrars": registrars,
"cheapest_registrar": registrars[0]["name"],
+ "marketplace_links": marketplace_links,
}
# Fall back to database
@@ -942,6 +973,7 @@ async def get_tld_details(
},
"registrars": registrars,
"cheapest_registrar": registrars[0]["name"] if registrars else "N/A",
+ "marketplace_links": marketplace_links,
}
diff --git a/backend/app/scheduler.py b/backend/app/scheduler.py
index 3f366c8..b49ddf3 100644
--- a/backend/app/scheduler.py
+++ b/backend/app/scheduler.py
@@ -933,11 +933,12 @@ async def sync_czds_zones():
async def match_sniper_alerts():
- """Match active sniper alerts against current auctions and notify users."""
+ """Match active sniper alerts against auctions AND drops and notify users."""
from app.models.sniper_alert import SniperAlert, SniperAlertMatch
from app.models.auction import DomainAuction
+ from app.models.zone_file import DroppedDomain
- logger.info("Matching sniper alerts against new auctions...")
+ logger.info("Matching sniper alerts against auctions and drops...")
try:
async with AsyncSessionLocal() as db:
@@ -952,39 +953,65 @@ async def match_sniper_alerts():
return
# Get recent auctions (added in last 2 hours)
- cutoff = datetime.utcnow() - timedelta(hours=2)
+ auction_cutoff = datetime.utcnow() - timedelta(hours=2)
auctions_result = await db.execute(
select(DomainAuction).where(
and_(
DomainAuction.is_active == True,
- DomainAuction.scraped_at >= cutoff,
+ DomainAuction.scraped_at >= auction_cutoff,
)
)
)
auctions = auctions_result.scalars().all()
- if not auctions:
- logger.info("No recent auctions to match against")
- return
+ # Get recent drops (last 24 hours)
+ drop_cutoff = datetime.utcnow() - timedelta(hours=24)
+ drops_result = await db.execute(
+ select(DroppedDomain).where(DroppedDomain.dropped_date >= drop_cutoff)
+ )
+ drops = drops_result.scalars().all()
+
+ logger.info(f"Checking {len(alerts)} alerts against {len(auctions)} auctions and {len(drops)} drops")
matches_created = 0
notifications_sent = 0
for alert in alerts:
- matching_auctions = []
+ matching_items = []
+ # Match against auctions
for auction in auctions:
if _auction_matches_alert(auction, alert):
- matching_auctions.append(auction)
+ matching_items.append({
+ 'domain': auction.domain,
+ 'source': 'auction',
+ 'platform': auction.platform,
+ 'price': auction.current_bid,
+ 'end_time': auction.end_time,
+ 'url': auction.auction_url,
+ })
- if matching_auctions:
- for auction in matching_auctions:
+ # Match against drops
+ for drop in drops:
+ if _drop_matches_alert(drop, alert):
+ full_domain = f"{drop.domain}.{drop.tld}"
+ matching_items.append({
+ 'domain': full_domain,
+ 'source': 'drop',
+ 'platform': f'.{drop.tld} zone',
+ 'price': 0,
+ 'end_time': None,
+ 'url': f"https://pounce.ch/terminal/hunt?tab=drops&search={drop.domain}",
+ })
+
+ if matching_items:
+ for item in matching_items:
# Check if this match already exists
existing = await db.execute(
select(SniperAlertMatch).where(
and_(
SniperAlertMatch.alert_id == alert.id,
- SniperAlertMatch.domain == auction.domain,
+ SniperAlertMatch.domain == item['domain'],
)
)
)
@@ -994,48 +1021,61 @@ async def match_sniper_alerts():
# Create new match
match = SniperAlertMatch(
alert_id=alert.id,
- domain=auction.domain,
- platform=auction.platform,
- current_bid=auction.current_bid,
- end_time=auction.end_time,
- auction_url=auction.auction_url,
+ domain=item['domain'],
+ platform=item['platform'],
+ current_bid=item['price'],
+ end_time=item['end_time'] or datetime.utcnow(),
+ auction_url=item['url'],
matched_at=datetime.utcnow(),
)
db.add(match)
matches_created += 1
+
+ # Update alert stats
+ alert.matches_count = (alert.matches_count or 0) + 1
+ alert.last_matched_at = datetime.utcnow()
- # Update alert last_triggered
- alert.last_triggered = datetime.utcnow()
-
- # Send notification if enabled
- if alert.notify_email:
+ # Send notification if enabled (batch notification)
+ if alert.notify_email and matching_items:
try:
user_result = await db.execute(
select(User).where(User.id == alert.user_id)
)
user = user_result.scalar_one_or_none()
- if user and email_service.is_enabled:
- # Send email with matching domains
- domains_list = ", ".join([a.domain for a in matching_auctions[:5]])
+ if user and email_service.is_configured():
+ auction_matches = [m for m in matching_items if m['source'] == 'auction']
+ drop_matches = [m for m in matching_items if m['source'] == 'drop']
+
+ # Build HTML content
+ html_parts = [f'
Your Sniper Alert "{alert.name}" matched!
']
+
+ if auction_matches:
+ html_parts.append(f'π― {len(auction_matches)} Auction Match{"es" if len(auction_matches) > 1 else ""}
')
+ for m in auction_matches[:10]:
+ html_parts.append(f'- {m["domain"]} - ${m["price"]:.0f} on {m["platform"]}
')
+ html_parts.append('
')
+
+ if drop_matches:
+ html_parts.append(f'π₯ {len(drop_matches)} Fresh Drop{"s" if len(drop_matches) > 1 else ""}
')
+ for m in drop_matches[:10]:
+ html_parts.append(f'- {m["domain"]} - Just dropped!
')
+ html_parts.append('
')
+
+ html_parts.append('View all matches in Pounce
')
+
await email_service.send_email(
to_email=user.email,
- subject=f"π― Sniper Alert: {len(matching_auctions)} matching domains found!",
- html_content=f"""
- Your Sniper Alert "{alert.name}" matched!
- We found {len(matching_auctions)} domains matching your criteria:
-
- {"".join(f"- {a.domain} - ${a.current_bid:.0f} on {a.platform}
" for a in matching_auctions[:10])}
-
- View all matches in your Command Center
- """
+ subject=f"π― Sniper Alert: {len(matching_items)} matching domains found!",
+ html_content=''.join(html_parts),
)
notifications_sent += 1
+ alert.notifications_sent = (alert.notifications_sent or 0) + 1
except Exception as e:
logger.error(f"Failed to send sniper alert notification: {e}")
await db.commit()
- logger.info(f"Sniper alert matching complete: {matches_created} matches created, {notifications_sent} notifications sent")
+ logger.info(f"Sniper alert matching complete: {matches_created} matches, {notifications_sent} notifications")
except Exception as e:
logger.exception(f"Sniper alert matching failed: {e}")
@@ -1045,9 +1085,16 @@ def _auction_matches_alert(auction: "DomainAuction", alert: "SniperAlert") -> bo
"""Check if an auction matches the criteria of a sniper alert."""
domain_name = auction.domain.rsplit('.', 1)[0] if '.' in auction.domain else auction.domain
- # Check keyword filter
- if alert.keyword:
- if alert.keyword.lower() not in domain_name.lower():
+ # Check keyword filter (must contain any of the keywords)
+ if alert.keywords:
+ required = [k.strip().lower() for k in alert.keywords.split(',')]
+ if not any(kw in domain_name.lower() for kw in required):
+ return False
+
+ # Check exclude keywords
+ if alert.exclude_keywords:
+ excluded = [k.strip().lower() for k in alert.exclude_keywords.split(',')]
+ if any(kw in domain_name.lower() for kw in excluded):
return False
# Check TLD filter
@@ -1056,6 +1103,12 @@ def _auction_matches_alert(auction: "DomainAuction", alert: "SniperAlert") -> bo
if auction.tld.lower() not in allowed_tlds:
return False
+ # Check platform filter
+ if alert.platforms:
+ allowed_platforms = [p.strip().lower() for p in alert.platforms.split(',')]
+ if auction.platform.lower() not in allowed_platforms:
+ return False
+
# Check length filters
if alert.min_length and len(domain_name) < alert.min_length:
return False
@@ -1068,17 +1121,68 @@ def _auction_matches_alert(auction: "DomainAuction", alert: "SniperAlert") -> bo
if alert.max_price and auction.current_bid > alert.max_price:
return False
- # Check exclusion filters
- if alert.exclude_numbers:
+ # Check bids filter (low competition)
+ if alert.max_bids and auction.num_bids and auction.num_bids > alert.max_bids:
+ return False
+
+ # Check no_numbers filter
+ if alert.no_numbers:
if any(c.isdigit() for c in domain_name):
return False
- if alert.exclude_hyphens:
+ # Check no_hyphens filter
+ if alert.no_hyphens:
if '-' in domain_name:
return False
+ # Check exclude_chars
if alert.exclude_chars:
- excluded = set(alert.exclude_chars.lower())
+ excluded = set(c.strip().lower() for c in alert.exclude_chars.split(','))
+ if any(c in excluded for c in domain_name.lower()):
+ return False
+
+ return True
+
+
+def _drop_matches_alert(drop, alert: "SniperAlert") -> bool:
+ """Check if a dropped domain matches the criteria of a sniper alert."""
+ domain_name = drop.domain # Already just the name without TLD
+
+ # Check keyword filter
+ if alert.keywords:
+ required = [k.strip().lower() for k in alert.keywords.split(',')]
+ if not any(kw in domain_name.lower() for kw in required):
+ return False
+
+ # Check exclude keywords
+ if alert.exclude_keywords:
+ excluded = [k.strip().lower() for k in alert.exclude_keywords.split(',')]
+ if any(kw in domain_name.lower() for kw in excluded):
+ return False
+
+ # Check TLD filter
+ if alert.tlds:
+ allowed_tlds = [t.strip().lower() for t in alert.tlds.split(',')]
+ if drop.tld.lower() not in allowed_tlds:
+ return False
+
+ # Check length filters
+ if alert.min_length and len(domain_name) < alert.min_length:
+ return False
+ if alert.max_length and len(domain_name) > alert.max_length:
+ return False
+
+ # Check no_numbers filter (use drop.is_numeric)
+ if alert.no_numbers and drop.is_numeric:
+ return False
+
+ # Check no_hyphens filter (use drop.has_hyphen)
+ if alert.no_hyphens and drop.has_hyphen:
+ return False
+
+ # Check exclude_chars
+ if alert.exclude_chars:
+ excluded = set(c.strip().lower() for c in alert.exclude_chars.split(','))
if any(c in excluded for c in domain_name.lower()):
return False
diff --git a/backend/app/services/sedo_api.py b/backend/app/services/sedo_api.py
index cf58570..6292ad7 100644
--- a/backend/app/services/sedo_api.py
+++ b/backend/app/services/sedo_api.py
@@ -140,10 +140,41 @@ class SedoAPIClient:
"""Parse XML response from Sedo API."""
try:
root = ElementTree.fromstring(xml_text)
+
+ # Check for error response
+ if root.tag == "fault" or root.find(".//faultcode") is not None:
+ fault_code = root.findtext(".//faultcode") or root.findtext("faultcode")
+ fault_string = root.findtext(".//faultstring") or root.findtext("faultstring")
+ return {"error": True, "faultcode": fault_code, "faultstring": fault_string}
+
+ # Parse SEDOSEARCH response (domain listings)
+ if root.tag == "SEDOSEARCH":
+ items = []
+ for item in root.findall("item"):
+ domain_data = {}
+ for child in item:
+ # Get the text content, handle type attribute
+ value = child.text
+ type_attr = child.get("type", "")
+
+ # Convert types
+ if "double" in type_attr or "int" in type_attr:
+ try:
+ value = float(value) if value else 0
+ except:
+ pass
+
+ domain_data[child.tag] = value
+ items.append(domain_data)
+
+ return {"items": items, "count": len(items)}
+
+ # Generic XML to dict fallback
return self._xml_to_dict(root)
+
except Exception as e:
logger.warning(f"Failed to parse XML: {e}")
- return {"raw": xml_text}
+ return {"raw": xml_text, "error": str(e)}
def _xml_to_dict(self, element) -> Dict[str, Any]:
"""Convert XML element to dictionary."""
@@ -171,20 +202,18 @@ class SedoAPIClient:
"""
Search for domains listed on Sedo marketplace.
- Returns domains for sale (not auctions).
+ Returns domains for sale (XML parsed to dict).
"""
- params = {
- "output_method": "json", # Request JSON response
- }
+ params = {}
if keyword:
params["keyword"] = keyword
if tld:
params["tld"] = tld.lstrip(".")
if min_price is not None:
- params["minprice"] = min_price
+ params["minprice"] = int(min_price)
if max_price is not None:
- params["maxprice"] = max_price
+ params["maxprice"] = int(max_price)
if page:
params["page"] = page
if page_size:
@@ -202,11 +231,11 @@ class SedoAPIClient:
) -> Dict[str, Any]:
"""
Search for active domain auctions on Sedo.
+
+ Note: Sedo API doesn't have a dedicated auction filter.
+ We filter by type='A' (auction) in post-processing.
"""
- params = {
- "output_method": "json",
- "auction": "true", # Only auctions
- }
+ params = {}
if keyword:
params["keyword"] = keyword
@@ -217,7 +246,72 @@ class SedoAPIClient:
if page_size:
params["pagesize"] = min(page_size, 100)
- return await self._request("DomainSearch", params)
+ result = await self._request("DomainSearch", params)
+
+ # Filter to only show auctions (type='A')
+ if "items" in result:
+ result["items"] = [
+ item for item in result["items"]
+ if item.get("type") == "A"
+ ]
+ result["count"] = len(result["items"])
+
+ return result
+
+ async def get_listings_for_display(
+ self,
+ keyword: Optional[str] = None,
+ tld: Optional[str] = None,
+ page_size: int = 50,
+ ) -> List[Dict[str, Any]]:
+ """
+ Get Sedo listings formatted for display in Pounce.
+
+ Returns a list of domains with affiliate URLs.
+ """
+ result = await self.search_domains(
+ keyword=keyword,
+ tld=tld,
+ page_size=page_size
+ )
+
+ if "error" in result or "items" not in result:
+ logger.warning(f"Sedo API error: {result}")
+ return []
+
+ listings = []
+ for item in result.get("items", []):
+ domain = item.get("domain", "")
+ if not domain:
+ continue
+
+ # Get price (Sedo returns 0 for "Make Offer")
+ price = item.get("price", 0)
+ if isinstance(price, str):
+ try:
+ price = float(price)
+ except:
+ price = 0
+
+ # Use the URL from Sedo (includes partner ID and tracking)
+ url = item.get("url", f"https://sedo.com/search/details/?domain={domain}&partnerid={self.partner_id}")
+
+ # Determine listing type
+ listing_type = item.get("type", "D") # D=Direct, A=Auction
+ is_auction = listing_type == "A"
+
+ listings.append({
+ "domain": domain,
+ "tld": domain.rsplit(".", 1)[1] if "." in domain else "",
+ "price": price,
+ "price_type": "bid" if is_auction else ("make_offer" if price == 0 else "fixed"),
+ "is_auction": is_auction,
+ "platform": "Sedo",
+ "url": url,
+ "rank": item.get("rank", 0),
+ })
+
+ return listings
async def get_domain_details(self, domain: str) -> Dict[str, Any]:
"""Get detailed information about a specific domain."""
diff --git a/backend/scripts/setup_zone_cron.sh b/backend/scripts/setup_zone_cron.sh
new file mode 100644
index 0000000..4185689
--- /dev/null
+++ b/backend/scripts/setup_zone_cron.sh
@@ -0,0 +1,59 @@
+#!/bin/bash
+# Setup cron job for automated zone file synchronization
+# Run this script on the server to install the daily sync job
+
+set -e
+
+echo "π§ Setting up Pounce Zone Sync Cron Job"
+echo "========================================"
+
+# Create log directory
+mkdir -p /home/user/logs
+touch /home/user/logs/zone_sync.log
+
+# Create the cron wrapper script
+cat > /home/user/pounce/backend/scripts/run_zone_sync.sh << 'EOF'
+#!/bin/bash
+# Wrapper script for zone sync with proper environment
+
+cd /home/user/pounce/backend
+source venv/bin/activate
+
+# Run sync with timeout (max 2 hours)
+timeout 7200 python scripts/sync_all_zones.py >> /home/user/logs/zone_sync.log 2>&1
+
+# Rotate log if too big (keep last 10MB)
+if [ -f /home/user/logs/zone_sync.log ]; then
+ size=$(stat -f%z /home/user/logs/zone_sync.log 2>/dev/null || stat -c%s /home/user/logs/zone_sync.log 2>/dev/null)
+ if [ "$size" -gt 10485760 ]; then
+ tail -c 5242880 /home/user/logs/zone_sync.log > /home/user/logs/zone_sync.log.tmp
+ mv /home/user/logs/zone_sync.log.tmp /home/user/logs/zone_sync.log
+ fi
+fi
+EOF
+
+chmod +x /home/user/pounce/backend/scripts/run_zone_sync.sh
+
+# Add cron job (runs daily at 06:00 UTC - after most registry updates)
+# Remove existing pounce zone sync jobs first
+crontab -l 2>/dev/null | grep -v "run_zone_sync.sh" > /tmp/crontab.tmp || true
+
+# Add new job
+echo "# Pounce Zone File Sync - Daily at 06:00 UTC" >> /tmp/crontab.tmp
+echo "0 6 * * * /home/user/pounce/backend/scripts/run_zone_sync.sh" >> /tmp/crontab.tmp
+
+# Install crontab
+crontab /tmp/crontab.tmp
+rm /tmp/crontab.tmp
+
+echo ""
+echo "β
Cron job installed!"
+echo ""
+echo "Schedule: Daily at 06:00 UTC"
+echo "Log file: /home/user/logs/zone_sync.log"
+echo ""
+echo "Current crontab:"
+crontab -l
+echo ""
+echo "To run manually: /home/user/pounce/backend/scripts/run_zone_sync.sh"
+echo "To view logs: tail -f /home/user/logs/zone_sync.log"
diff --git a/backend/scripts/sync_all_zones.py b/backend/scripts/sync_all_zones.py
new file mode 100644
index 0000000..0d7c2f6
--- /dev/null
+++ b/backend/scripts/sync_all_zones.py
@@ -0,0 +1,594 @@
+#!/usr/bin/env python3
+"""
+Pounce Zone File Sync - Daily Automated Zone File Synchronization
+
+This script:
+1. Downloads zone files from ICANN CZDS (app, dev, info, online, org, xyz)
+2. Downloads zone files from Switch.ch via AXFR (.ch, .li)
+3. Compares with yesterday's data to detect drops
+4. Stores drops in the database for the Drops tab
+5. Cleans up all temporary files and compresses domain lists
+
+STORAGE STRATEGY (Ultra-Efficient):
+- Raw zone files: DELETED immediately after parsing
+- Domain lists: Stored COMPRESSED (.gz) - ~80% size reduction
+- Only keeps current snapshot (no history)
+- Drops stored in DB for 48h only
+
+Run daily via cron at 06:00 UTC (after most registries update)
+"""
+
+import asyncio
+import gzip
+import logging
+import subprocess
+import sys
+import shutil
+from datetime import datetime, timedelta
+from pathlib import Path
+from typing import Optional, Set
+
+# Add parent to path for imports
+sys.path.insert(0, str(Path(__file__).parent.parent))
+
+from sqlalchemy import text
+from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
+from sqlalchemy.orm import sessionmaker
+
+# Configuration
+CZDS_DIR = Path("/home/user/pounce_czds")
+SWITCH_DIR = Path("/home/user/pounce_switch")
+LOG_FILE = Path("/home/user/logs/zone_sync.log")
+
+# Storage efficiency: compress domain lists
+COMPRESS_DOMAIN_LISTS = True
+
+# CZDS TLDs we have access to
+CZDS_TLDS = ["app", "dev", "info", "online", "org", "xyz"]
+
+# Switch.ch AXFR config
+SWITCH_CONFIG = {
+ "ch": {
+ "server": "zonedata.switch.ch",
+ "key_name": "tsig-zonedata-ch-public-21-01.",
+ "key_secret": "stZwEGApYumtXkh73qMLPqfbIDozWKZLkqRvcjKSpRnsor6A6MxixRL6C2HeSVBQNfMW4wer+qjS0ZSfiWiJ3Q=="
+ },
+ "li": {
+ "server": "zonedata.switch.ch",
+ "key_name": "tsig-zonedata-li-public-21-01.",
+ "key_secret": "t8GgeCn+fhPaj+cRy/lakQPb6M45xz/NZwmcp4iqbBxKFCCH0/k3xNGe6sf3ObmoaKDBedge/La4cpPfLqtFkw=="
+ }
+}
+
+# Setup logging
+logging.basicConfig(
+ level=logging.INFO,
+ format='%(asctime)s [%(levelname)s] %(message)s',
+ handlers=[
+ logging.StreamHandler(),
+ logging.FileHandler(LOG_FILE) if LOG_FILE.parent.exists() else logging.StreamHandler()
+ ]
+)
+logger = logging.getLogger(__name__)
+
+
+class ZoneSyncResult:
+ """Result of a zone sync operation"""
+ def __init__(self, tld: str):
+ self.tld = tld
+ self.success = False
+ self.domain_count = 0
+ self.drops_count = 0
+ self.error: Optional[str] = None
+ self.duration_seconds = 0
+
+
+async def get_db_session():
+ """Create async database session"""
+ from app.config import settings
+
+ engine = create_async_engine(settings.database_url.replace("sqlite://", "sqlite+aiosqlite://"))
+ async_session = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
+ return async_session()
+
+
+def download_czds_zone(tld: str) -> Optional[Path]:
+ """Download a single CZDS zone file using pyCZDS"""
+ try:
+ from pyczds.client import CZDSClient
+
+ # Read credentials from .env
+ env_file = Path(__file__).parent.parent / ".env"
+ if not env_file.exists():
+ env_file = Path("/home/user/pounce/backend/.env")
+
+ env_content = env_file.read_text()
+ username = password = None
+
+ for line in env_content.splitlines():
+ if line.startswith("CZDS_USERNAME="):
+ username = line.split("=", 1)[1].strip()
+ elif line.startswith("CZDS_PASSWORD="):
+ password = line.split("=", 1)[1].strip()
+
+ if not username or not password:
+ logger.error(f"CZDS credentials not found in .env")
+ return None
+
+ client = CZDSClient(username, password)
+ urls = client.get_zonefiles_list()
+
+ # Find URL for this TLD
+ target_url = None
+ for url in urls:
+ if f"{tld}.zone" in url or f"/{tld}." in url:
+ target_url = url
+ break
+
+ if not target_url:
+ logger.warning(f"No access to .{tld} zone file")
+ return None
+
+ logger.info(f"Downloading .{tld} from CZDS...")
+ result = client.get_zonefile(target_url, download_dir=str(CZDS_DIR))
+
+ # Find the downloaded file
+ gz_file = CZDS_DIR / f"{tld}.txt.gz"
+ if gz_file.exists():
+ return gz_file
+
+ # Try alternative naming
+ for f in CZDS_DIR.glob(f"*{tld}*.gz"):
+ return f
+
+ return None
+
+ except Exception as e:
+ logger.error(f"CZDS download failed for .{tld}: {e}")
+ return None
+
+
+def download_switch_zone(tld: str) -> Optional[Path]:
+ """Download zone file from Switch.ch via AXFR"""
+ config = SWITCH_CONFIG.get(tld)
+ if not config:
+ return None
+
+ try:
+ output_file = SWITCH_DIR / f"{tld}_zone.txt"
+
+ cmd = [
+ "dig", "@" + config["server"],
+ f"{tld}.", "AXFR",
+ "-y", f"hmac-sha512:{config['key_name']}:{config['key_secret']}"
+ ]
+
+ logger.info(f"Downloading .{tld} via AXFR from Switch.ch...")
+ result = subprocess.run(cmd, capture_output=True, text=True, timeout=600)
+
+ if result.returncode != 0:
+ logger.error(f"AXFR failed for .{tld}: {result.stderr}")
+ return None
+
+ output_file.write_text(result.stdout)
+ return output_file
+
+ except subprocess.TimeoutExpired:
+ logger.error(f"AXFR timeout for .{tld}")
+ return None
+ except Exception as e:
+ logger.error(f"AXFR failed for .{tld}: {e}")
+ return None
+
+
+def parse_czds_zone(gz_file: Path, tld: str) -> set:
+ """Parse a gzipped CZDS zone file and extract unique root domains"""
+ domains = set()
+
+ try:
+ with gzip.open(gz_file, 'rt', encoding='utf-8', errors='ignore') as f:
+ for line in f:
+ if line.startswith(';') or not line.strip():
+ continue
+
+ parts = line.split()
+ if len(parts) >= 4:
+ name = parts[0].rstrip('.')
+
+ if name.lower().endswith(f'.{tld}'):
+ domain_name = name[:-(len(tld) + 1)]
+
+ # Only root domains (no subdomains)
+ if domain_name and '.' not in domain_name:
+ domains.add(domain_name.lower())
+
+ return domains
+
+ except Exception as e:
+ logger.error(f"Failed to parse .{tld} zone file: {e}")
+ return set()
+
+
+def parse_switch_zone(zone_file: Path, tld: str) -> set:
+ """Parse Switch.ch AXFR output and extract unique root domains"""
+ domains = set()
+
+ try:
+ content = zone_file.read_text()
+
+ for line in content.splitlines():
+ if line.startswith(';') or not line.strip():
+ continue
+
+ parts = line.split()
+ if len(parts) >= 4:
+ name = parts[0].rstrip('.')
+
+ # Skip the TLD itself
+ if name.lower() == tld:
+ continue
+
+ if name.lower().endswith(f'.{tld}'):
+ domain_name = name[:-(len(tld) + 1)]
+
+ # Only root domains
+ if domain_name and '.' not in domain_name:
+ domains.add(domain_name.lower())
+
+ return domains
+
+ except Exception as e:
+ logger.error(f"Failed to parse .{tld} zone file: {e}")
+ return set()
+
+
+def save_domains(tld: str, domains: Set[str], directory: Path) -> Path:
+ """Save domain list to compressed file for storage efficiency"""
+ if COMPRESS_DOMAIN_LISTS:
+ out_file = directory / f"{tld}_domains.txt.gz"
+ # Remove old uncompressed file if exists
+ old_file = directory / f"{tld}_domains.txt"
+ if old_file.exists():
+ old_file.unlink()
+ # Write compressed
+ with gzip.open(out_file, 'wt', encoding='utf-8', compresslevel=9) as f:
+ f.write('\n'.join(sorted(domains)))
+ return out_file
+ else:
+ out_file = directory / f"{tld}_domains.txt"
+ out_file.write_text('\n'.join(sorted(domains)))
+ return out_file
+
+
+def load_previous_domains(tld: str, directory: Path) -> Set[str]:
+ """Load previous day's domain list (compressed or uncompressed)"""
+ # Try compressed first
+ gz_file = directory / f"{tld}_domains.txt.gz"
+ if gz_file.exists():
+ try:
+ with gzip.open(gz_file, 'rt', encoding='utf-8') as f:
+ return set(f.read().splitlines())
+ except Exception as e:
+ logger.warning(f"Failed to read compressed domains for .{tld}: {e}")
+ return set()
+
+ # Fallback to uncompressed
+ txt_file = directory / f"{tld}_domains.txt"
+ if txt_file.exists():
+ try:
+ return set(txt_file.read_text().splitlines())
+ except Exception:
+ return set()
+
+ return set()
+
+
+def detect_drops(tld: str, today_domains: set, yesterday_domains: set) -> set:
+ """Detect domains that were dropped (present yesterday, missing today)"""
+ if not yesterday_domains:
+ logger.info(f".{tld}: No previous data for comparison (first run)")
+ return set()
+
+ drops = yesterday_domains - today_domains
+ return drops
+
+
+async def store_drops_in_db(drops: list[tuple[str, str]], session: AsyncSession):
+ """Store dropped domains in database using existing DroppedDomain model"""
+ if not drops:
+ return 0
+
+ now = datetime.utcnow()
+
+ # Delete old drops (older than 48 hours)
+ cutoff = now - timedelta(hours=48)
+ await session.execute(
+ text("DELETE FROM dropped_domains WHERE dropped_date < :cutoff"),
+ {"cutoff": cutoff}
+ )
+
+ # Insert new drops
+ count = 0
+ for domain, tld in drops:
+ try:
+ # Calculate domain properties
+ length = len(domain)
+ is_numeric = domain.isdigit()
+ has_hyphen = '-' in domain
+
+ await session.execute(
+ text("""
+ INSERT OR REPLACE INTO dropped_domains
+ (domain, tld, dropped_date, length, is_numeric, has_hyphen, created_at)
+ VALUES (:domain, :tld, :dropped_date, :length, :is_numeric, :has_hyphen, :created_at)
+ """),
+ {
+ "domain": domain,
+ "tld": tld,
+ "dropped_date": now,
+ "length": length,
+ "is_numeric": is_numeric,
+ "has_hyphen": has_hyphen,
+ "created_at": now
+ }
+ )
+ count += 1
+ except Exception as e:
+ logger.debug(f"Failed to insert drop {domain}.{tld}: {e}")
+
+ await session.commit()
+ return count
+
+
+async def sync_czds_tld(tld: str) -> ZoneSyncResult:
+ """Sync a single CZDS TLD"""
+ result = ZoneSyncResult(tld)
+ start = datetime.now()
+
+ try:
+ # Load previous domains for comparison
+ yesterday_domains = load_previous_domains(tld, CZDS_DIR)
+
+ # Download new zone file
+ gz_file = download_czds_zone(tld)
+ if not gz_file:
+ result.error = "Download failed"
+ return result
+
+ # Parse zone file
+ logger.info(f"Parsing .{tld} zone file...")
+ today_domains = parse_czds_zone(gz_file, tld)
+
+ if not today_domains:
+ result.error = "Parsing failed - no domains extracted"
+ return result
+
+ result.domain_count = len(today_domains)
+
+ # Detect drops
+ drops = detect_drops(tld, today_domains, yesterday_domains)
+ result.drops_count = len(drops)
+
+ # Save current domains for tomorrow's comparison
+ save_domains(tld, today_domains, CZDS_DIR)
+
+ # Cleanup gz file
+ if gz_file.exists():
+ gz_file.unlink()
+
+ # Update last download marker
+ marker = CZDS_DIR / f".{tld}_last_download"
+ marker.write_text(datetime.utcnow().isoformat())
+
+ result.success = True
+ logger.info(f"β
.{tld}: {result.domain_count:,} domains, {result.drops_count:,} drops")
+
+ # Return drops for DB storage
+ result.drops = [(d, tld) for d in drops]
+
+ except Exception as e:
+ result.error = str(e)
+ logger.error(f"β .{tld} sync failed: {e}")
+
+ result.duration_seconds = (datetime.now() - start).total_seconds()
+ return result
+
+
+async def sync_switch_tld(tld: str) -> ZoneSyncResult:
+ """Sync a single Switch.ch TLD"""
+ result = ZoneSyncResult(tld)
+ start = datetime.now()
+
+ try:
+ # Load previous domains for comparison
+ yesterday_domains = load_previous_domains(tld, SWITCH_DIR)
+
+ # Download new zone file
+ zone_file = download_switch_zone(tld)
+ if not zone_file:
+ result.error = "AXFR failed"
+ return result
+
+ # Parse zone file
+ logger.info(f"Parsing .{tld} zone file...")
+ today_domains = parse_switch_zone(zone_file, tld)
+
+ if not today_domains:
+ result.error = "Parsing failed - no domains extracted"
+ return result
+
+ result.domain_count = len(today_domains)
+
+ # Detect drops
+ drops = detect_drops(tld, today_domains, yesterday_domains)
+ result.drops_count = len(drops)
+
+ # Save current domains for tomorrow's comparison
+ save_domains(tld, today_domains, SWITCH_DIR)
+
+ # Cleanup raw zone file (keep only domain list)
+ if zone_file.exists():
+ zone_file.unlink()
+
+ result.success = True
+ logger.info(f"β
.{tld}: {result.domain_count:,} domains, {result.drops_count:,} drops")
+
+ # Return drops for DB storage
+ result.drops = [(d, tld) for d in drops]
+
+ except Exception as e:
+ result.error = str(e)
+ logger.error(f"β .{tld} sync failed: {e}")
+
+ result.duration_seconds = (datetime.now() - start).total_seconds()
+ return result
+
+
+def cleanup_stray_files(directory: Path, keep_extensions: list = None):
+ """Remove any stray/temporary files to save space"""
+ if keep_extensions is None:
+ keep_extensions = ['.txt.gz', '.txt'] # Only keep domain lists
+
+ removed_count = 0
+ removed_size = 0
+
+ for f in directory.iterdir():
+ if f.is_file():
+ # Keep marker files
+ if f.name.startswith('.'):
+ continue
+ # Keep domain list files
+ if any(f.name.endswith(ext) for ext in keep_extensions):
+ continue
+ # Remove everything else (raw zone files, temp files)
+ try:
+ size = f.stat().st_size
+ f.unlink()
+ removed_count += 1
+ removed_size += size
+ logger.info(f"ποΈ Removed stray file: {f.name} ({size / (1024*1024):.1f} MB)")
+ except Exception as e:
+ logger.warning(f"Failed to remove {f.name}: {e}")
+
+ return removed_count, removed_size
+
+
+def get_directory_size(directory: Path) -> int:
+ """Get total size of directory in bytes"""
+ total = 0
+ for f in directory.rglob('*'):
+ if f.is_file():
+ total += f.stat().st_size
+ return total
+
+
+def log_storage_stats():
+ """Log current storage usage"""
+ czds_size = get_directory_size(CZDS_DIR) if CZDS_DIR.exists() else 0
+ switch_size = get_directory_size(SWITCH_DIR) if SWITCH_DIR.exists() else 0
+ total = czds_size + switch_size
+
+ logger.info(f"πΎ STORAGE: CZDS={czds_size/(1024*1024):.1f}MB, Switch={switch_size/(1024*1024):.1f}MB, Total={total/(1024*1024):.1f}MB")
+ return total
+
+
+async def main():
+ """Main sync process"""
+ logger.info("=" * 60)
+ logger.info("π POUNCE ZONE SYNC - Starting daily synchronization")
+ logger.info("=" * 60)
+
+ start_time = datetime.now()
+
+ # Ensure directories exist
+ CZDS_DIR.mkdir(parents=True, exist_ok=True)
+ SWITCH_DIR.mkdir(parents=True, exist_ok=True)
+ LOG_FILE.parent.mkdir(parents=True, exist_ok=True)
+
+ # Log initial storage
+ logger.info("\nπ Initial storage check...")
+ initial_storage = log_storage_stats()
+
+ all_drops = []
+ results = []
+
+ # Sync CZDS TLDs (sequentially to respect rate limits)
+ logger.info("\nπ¦ Syncing ICANN CZDS zone files...")
+ for tld in CZDS_TLDS:
+ result = await sync_czds_tld(tld)
+ results.append(result)
+ if hasattr(result, 'drops'):
+ all_drops.extend(result.drops)
+
+ # Rate limit: wait between downloads
+ if tld != CZDS_TLDS[-1]:
+ logger.info("β³ Waiting 5 seconds (rate limit)...")
+ await asyncio.sleep(5)
+
+ # Sync Switch.ch TLDs
+ logger.info("\nπ¨π Syncing Switch.ch zone files...")
+ for tld in ["ch", "li"]:
+ result = await sync_switch_tld(tld)
+ results.append(result)
+ if hasattr(result, 'drops'):
+ all_drops.extend(result.drops)
+
+ # Store drops in database
+ if all_drops:
+ logger.info(f"\nπΎ Storing {len(all_drops)} drops in database...")
+ try:
+ session = await get_db_session()
+ stored = await store_drops_in_db(all_drops, session)
+ await session.close()
+ logger.info(f"β
Stored {stored} drops in database")
+ except Exception as e:
+ logger.error(f"β Failed to store drops: {e}")
+
+ # Cleanup stray files
+ logger.info("\nπ§Ή Cleaning up temporary files...")
+ czds_removed, czds_freed = cleanup_stray_files(CZDS_DIR)
+ switch_removed, switch_freed = cleanup_stray_files(SWITCH_DIR)
+ total_freed = czds_freed + switch_freed
+ if total_freed > 0:
+ logger.info(f"β
Freed {total_freed / (1024*1024):.1f} MB ({czds_removed + switch_removed} files)")
+ else:
+ logger.info("β
No stray files found")
+
+ # Summary
+ duration = (datetime.now() - start_time).total_seconds()
+
+ logger.info("\n" + "=" * 60)
+ logger.info("π SYNC SUMMARY")
+ logger.info("=" * 60)
+
+ total_domains = 0
+ total_drops = 0
+ success_count = 0
+
+ for r in results:
+ status = "β
" if r.success else "β"
+ logger.info(f" {status} .{r.tld}: {r.domain_count:,} domains, {r.drops_count:,} drops ({r.duration_seconds:.1f}s)")
+ if r.success:
+ total_domains += r.domain_count
+ total_drops += r.drops_count
+ success_count += 1
+
+ logger.info("-" * 60)
+ logger.info(f" TOTAL: {total_domains:,} domains across {success_count}/{len(results)} TLDs")
+ logger.info(f" DROPS: {total_drops:,} new drops detected")
+ logger.info(f" TIME: {duration:.1f} seconds")
+
+ # Final storage stats
+ logger.info("-" * 60)
+ final_storage = log_storage_stats()
+ if initial_storage > 0:
+ change = final_storage - initial_storage
+ logger.info(f" CHANGE: {'+' if change > 0 else ''}{change/(1024*1024):.1f} MB")
+ logger.info("=" * 60)
+
+ return 0 if success_count == len(results) else 1
+
+
+if __name__ == "__main__":
+ exit_code = asyncio.run(main())
+ sys.exit(exit_code)
diff --git a/frontend/src/app/blog/[slug]/page.tsx b/frontend/src/app/blog/[slug]/page.tsx
index f39c701..898ea1f 100644
--- a/frontend/src/app/blog/[slug]/page.tsx
+++ b/frontend/src/app/blog/[slug]/page.tsx
@@ -6,13 +6,34 @@ import BlogPostClient from './BlogPostClient'
import type { BlogPost } from './types'
async function fetchPostMeta(slug: string): Promise {
- const baseUrl = (process.env.BACKEND_URL || process.env.NEXT_PUBLIC_SITE_URL || SITE_URL).replace(/\/$/, '')
- const res = await fetch(`${baseUrl}/api/v1/blog/posts/${encodeURIComponent(slug)}/meta`, {
- next: { revalidate: 3600 },
- })
- if (res.status === 404) return null
- if (!res.ok) throw new Error(`Failed to load blog post meta: ${res.status}`)
- return (await res.json()) as BlogPost
+ try {
+ // Build API URL correctly:
+ // - BACKEND_URL is just the host (e.g. http://127.0.0.1:8000)
+ // - NEXT_PUBLIC_API_URL already includes /api/v1 (e.g. https://pounce.ch/api/v1)
+ // - SITE_URL is just the frontend host (e.g. https://pounce.ch)
+ let apiUrl: string
+
+ if (process.env.BACKEND_URL) {
+ apiUrl = `${process.env.BACKEND_URL.replace(/\/$/, '')}/api/v1/blog/posts/${encodeURIComponent(slug)}/meta`
+ } else if (process.env.NEXT_PUBLIC_API_URL) {
+ apiUrl = `${process.env.NEXT_PUBLIC_API_URL.replace(/\/$/, '')}/blog/posts/${encodeURIComponent(slug)}/meta`
+ } else {
+ apiUrl = `${SITE_URL.replace(/\/$/, '')}/api/v1/blog/posts/${encodeURIComponent(slug)}/meta`
+ }
+
+ const res = await fetch(apiUrl, {
+ next: { revalidate: 3600 },
+ })
+ if (res.status === 404) return null
+ if (!res.ok) {
+ console.error(`[fetchPostMeta] Failed: ${res.status} from ${apiUrl}`)
+ return null
+ }
+ return (await res.json()) as BlogPost
+ } catch (error) {
+ console.error(`[fetchPostMeta] Error fetching ${slug}:`, error)
+ return null
+ }
}
export async function generateMetadata({
diff --git a/frontend/src/app/buy/[slug]/page.tsx b/frontend/src/app/buy/[slug]/page.tsx
index afebb64..423c434 100644
--- a/frontend/src/app/buy/[slug]/page.tsx
+++ b/frontend/src/app/buy/[slug]/page.tsx
@@ -6,13 +6,41 @@ import BuyDomainClient from './BuyDomainClient'
import type { Listing } from './types'
async function fetchListing(slug: string): Promise {
- const baseUrl = (process.env.BACKEND_URL || process.env.NEXT_PUBLIC_SITE_URL || SITE_URL).replace(/\/$/, '')
- const res = await fetch(`${baseUrl}/api/v1/listings/${encodeURIComponent(slug)}`, {
- next: { revalidate: 60 },
- })
- if (res.status === 404) return null
- if (!res.ok) throw new Error(`Failed to load listing: ${res.status}`)
- return (await res.json()) as Listing
+ try {
+ // Build API URL correctly:
+ // - BACKEND_URL is just the host (e.g. http://127.0.0.1:8000)
+ // - NEXT_PUBLIC_API_URL already includes /api/v1 (e.g. https://pounce.ch/api/v1)
+ // - SITE_URL is just the frontend host (e.g. https://pounce.ch)
+ let apiUrl: string
+
+ if (process.env.BACKEND_URL) {
+ // Internal backend URL (no /api/v1 suffix)
+ apiUrl = `${process.env.BACKEND_URL.replace(/\/$/, '')}/api/v1/listings/${encodeURIComponent(slug)}`
+ } else if (process.env.NEXT_PUBLIC_API_URL) {
+ // Already includes /api/v1
+ apiUrl = `${process.env.NEXT_PUBLIC_API_URL.replace(/\/$/, '')}/listings/${encodeURIComponent(slug)}`
+ } else {
+ // Fallback to site URL
+ apiUrl = `${SITE_URL.replace(/\/$/, '')}/api/v1/listings/${encodeURIComponent(slug)}`
+ }
+
+ const res = await fetch(apiUrl, {
+ next: { revalidate: 60 },
+ headers: {
+ 'Accept': 'application/json',
+ },
+ })
+
+ if (res.status === 404) return null
+ if (!res.ok) {
+ console.error(`[fetchListing] Failed to load listing ${slug}: ${res.status} from ${apiUrl}`)
+ return null
+ }
+ return (await res.json()) as Listing
+ } catch (error) {
+ console.error(`[fetchListing] Error fetching listing ${slug}:`, error)
+ return null
+ }
}
export async function generateMetadata({
diff --git a/frontend/src/app/discover/[tld]/layout.tsx b/frontend/src/app/discover/[tld]/layout.tsx
index 0337af7..2308021 100644
--- a/frontend/src/app/discover/[tld]/layout.tsx
+++ b/frontend/src/app/discover/[tld]/layout.tsx
@@ -22,13 +22,30 @@ type TldCompareResponse = {
}
async function fetchTldCompare(tld: string): Promise {
- const baseUrl = (process.env.BACKEND_URL || process.env.NEXT_PUBLIC_SITE_URL || SITE_URL).replace(/\/$/, '')
- const res = await fetch(`${baseUrl}/api/v1/tld-prices/${encodeURIComponent(tld)}/compare`, {
- next: { revalidate: 3600 },
- })
- if (res.status === 404) return null
- if (!res.ok) throw new Error(`Failed to fetch tld compare: ${res.status}`)
- return (await res.json()) as TldCompareResponse
+ try {
+ // Build API URL correctly
+ let apiUrl: string
+ if (process.env.BACKEND_URL) {
+ apiUrl = `${process.env.BACKEND_URL.replace(/\/$/, '')}/api/v1/tld-prices/${encodeURIComponent(tld)}/compare`
+ } else if (process.env.NEXT_PUBLIC_API_URL) {
+ apiUrl = `${process.env.NEXT_PUBLIC_API_URL.replace(/\/$/, '')}/tld-prices/${encodeURIComponent(tld)}/compare`
+ } else {
+ apiUrl = `${SITE_URL.replace(/\/$/, '')}/api/v1/tld-prices/${encodeURIComponent(tld)}/compare`
+ }
+
+ const res = await fetch(apiUrl, {
+ next: { revalidate: 3600 },
+ })
+ if (res.status === 404) return null
+ if (!res.ok) {
+ console.error(`[fetchTldCompare] Failed: ${res.status} from ${apiUrl}`)
+ return null
+ }
+ return (await res.json()) as TldCompareResponse
+ } catch (error) {
+ console.error(`[fetchTldCompare] Error fetching ${tld}:`, error)
+ return null
+ }
}
export async function generateMetadata({
diff --git a/frontend/src/app/terminal/hunt/page.tsx b/frontend/src/app/terminal/hunt/page.tsx
index 7efd98f..3ed150d 100644
--- a/frontend/src/app/terminal/hunt/page.tsx
+++ b/frontend/src/app/terminal/hunt/page.tsx
@@ -58,7 +58,7 @@ const TABS: Array<{ key: HuntTab; label: string; shortLabel: string; icon: any;
// ============================================================================
export default function HuntPage() {
- const { user, subscription, logout, checkAuth, domains } = useStore()
+ const { user, subscription, logout, checkAuth } = useStore()
const { toast, showToast, hideToast } = useToast()
const [tab, setTab] = useState('auctions')
@@ -70,10 +70,6 @@ export default function HuntPage() {
checkAuth()
}, [checkAuth])
- // Computed
- const availableDomains = domains?.filter((d) => d.is_available) || []
- const totalDomains = domains?.length || 0
-
// Nav Items for Mobile Bottom Bar
const mobileNavItems = [
{ href: '/terminal/hunt', label: 'Hunt', icon: Crosshair, active: true },
@@ -131,14 +127,9 @@ export default function HuntPage() {
>
{/* Top Row */}
-
-
-
- Domain Hunt
-
-
- {totalDomains} tracked Β· {availableDomains.length} available
-
+
{/* Tab Bar - Scrollable */}
@@ -179,10 +170,10 @@ export default function HuntPage() {
{/* DESKTOP HEADER + TAB BAR */}
{/* βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ */}
-
-
-
-
+
+
+
Domain Hunt
@@ -190,17 +181,6 @@ export default function HuntPage() {
Search domains, browse auctions, discover drops, ride trends, or generate brandables.
-
-
-
-
{totalDomains}
-
Tracked
-
-
-
{availableDomains.length}
-
Available
-
-
{/* Desktop Tab Bar */}
diff --git a/frontend/src/app/terminal/intel/[tld]/page.tsx b/frontend/src/app/terminal/intel/[tld]/page.tsx
index 9b1f2bd..7a9dac9 100644
--- a/frontend/src/app/terminal/intel/[tld]/page.tsx
+++ b/frontend/src/app/terminal/intel/[tld]/page.tsx
@@ -57,6 +57,13 @@ function getTierLevel(tier: UserTier): number {
}
}
+interface MarketplaceLink {
+ name: string
+ description: string
+ url: string
+ type: string
+}
+
interface TldDetails {
tld: string
type: string
@@ -78,6 +85,7 @@ interface TldDetails {
price_change_3y: number
risk_level: 'low' | 'medium' | 'high'
risk_reason: string
+ marketplace_links?: MarketplaceLink[]
}
interface TldHistory {
@@ -775,6 +783,37 @@ export default function TldDetailPage() {
)}
+
+ {/* Marketplace Links */}
+ {details.marketplace_links && details.marketplace_links.length > 0 && (
+
+
+
Buy Existing Domains
+
+
+
+
+ )}
diff --git a/frontend/src/app/terminal/portfolio/page.tsx b/frontend/src/app/terminal/portfolio/page.tsx
index e514ed9..ed9c0df 100755
--- a/frontend/src/app/terminal/portfolio/page.tsx
+++ b/frontend/src/app/terminal/portfolio/page.tsx
@@ -677,6 +677,7 @@ export default function PortfolioPage() {
// Health data
const [healthByDomain, setHealthByDomain] = useState
>({})
const [checkingHealth, setCheckingHealth] = useState>(new Set())
+ const [healthLoadStarted, setHealthLoadStarted] = useState(false)
// External status (Yield, Listed)
const [yieldByDomain, setYieldByDomain] = useState>({})
@@ -750,6 +751,46 @@ export default function PortfolioPage() {
}
}, [activeTab, cfoData, cfoLoading, loadCfoData])
+ // Auto-load health data for all domains when domains are first loaded
+ useEffect(() => {
+ // Only run once when domains are first loaded
+ if (!domains.length || healthLoadStarted) return
+
+ setHealthLoadStarted(true)
+
+ const loadHealthForDomains = async () => {
+ // Load health for up to 20 domains to avoid too many requests
+ const domainsToCheck = domains.slice(0, 20)
+
+ for (const domain of domainsToCheck) {
+ const key = domain.domain.toLowerCase()
+ // Skip if already have health data
+ if (healthByDomain[key]) continue
+
+ // Add to checking set
+ setCheckingHealth(prev => new Set(prev).add(key))
+
+ try {
+ const report = await api.quickHealthCheck(domain.domain)
+ setHealthByDomain(prev => ({ ...prev, [key]: report }))
+ } catch {
+ // Silently fail for individual domains
+ } finally {
+ setCheckingHealth(prev => {
+ const next = new Set(prev)
+ next.delete(key)
+ return next
+ })
+ }
+
+ // Small delay to avoid rate limiting
+ await new Promise(resolve => setTimeout(resolve, 300))
+ }
+ }
+
+ loadHealthForDomains()
+ }, [domains, healthLoadStarted])
+
// Stats
const stats = useMemo(() => {
const active = domains.filter(d => !d.is_sold).length
@@ -806,7 +847,7 @@ export default function PortfolioPage() {
}
// Actions
- const handleHealthCheck = async (domainName: string) => {
+ const handleHealthCheck = async (domainName: string, showError = true) => {
const key = domainName.toLowerCase()
if (checkingHealth.has(key)) return
setCheckingHealth(prev => new Set(prev).add(key))
@@ -814,7 +855,9 @@ export default function PortfolioPage() {
const report = await api.quickHealthCheck(domainName)
setHealthByDomain(prev => ({ ...prev, [key]: report }))
} catch (err: any) {
- showToast(err?.message || 'Health check failed', 'error')
+ if (showError) {
+ showToast(err?.message || 'Health check failed', 'error')
+ }
} finally {
setCheckingHealth(prev => {
const next = new Set(prev)
@@ -1012,10 +1055,11 @@ export default function PortfolioPage() {
{/* MOBILE HEADER */}
+ {/* Top Row */}
-
-
Portfolio
+
+
Portfolio
-
-
-
{stats.active}
-
Active
-
-
-
{formatCurrency(summary?.total_value || 0).replace('$', '')}
-
Value
-
-
-
= 0 ? "text-accent" : "text-rose-400")}>
- {formatROI(summary?.overall_roi || 0)}
-
-
ROI
-
-
-
{stats.verified}
-
Verified
+
+ {/* Tab Bar - Scrollable */}
+
+
+
+
@@ -1092,34 +1151,34 @@ export default function PortfolioPage() {
- {/* TABS */}
+ {/* TABS - Matching Hunt page style */}
-
+
@@ -446,7 +437,6 @@ export default function SniperAlertsPage() {
alert={editingAlert}
onClose={() => { setShowCreateModal(false); setEditingAlert(null) }}
onSuccess={() => { loadAlerts(); setShowCreateModal(false); setEditingAlert(null) }}
- isTycoon={isTycoon}
/>
)}
@@ -457,11 +447,10 @@ export default function SniperAlertsPage() {
// CREATE/EDIT MODAL
// ============================================================================
-function CreateEditModal({ alert, onClose, onSuccess, isTycoon }: {
+function CreateEditModal({ alert, onClose, onSuccess }: {
alert: SniperAlert | null
onClose: () => void
onSuccess: () => void
- isTycoon: boolean
}) {
const isEditing = !!alert
const [loading, setLoading] = useState(false)
@@ -484,7 +473,6 @@ function CreateEditModal({ alert, onClose, onSuccess, isTycoon }: {
no_hyphens: alert?.no_hyphens || false,
exclude_chars: alert?.exclude_chars || '',
notify_email: alert?.notify_email ?? true,
- notify_sms: alert?.notify_sms || false,
})
const handleSubmit = async (e: React.FormEvent) => {
@@ -510,7 +498,6 @@ function CreateEditModal({ alert, onClose, onSuccess, isTycoon }: {
no_hyphens: form.no_hyphens,
exclude_chars: form.exclude_chars || null,
notify_email: form.notify_email,
- notify_sms: form.notify_sms && isTycoon,
}
if (isEditing && alert) {
@@ -584,18 +571,27 @@ function CreateEditModal({ alert, onClose, onSuccess, isTycoon }: {
+ {/* Sources info */}
+
+
Monitors
+
+
+
+ Auctions
+
+
+
+ Zone Drops
+
+
+
+
-
diff --git a/frontend/src/app/terminal/watchlist/page.tsx b/frontend/src/app/terminal/watchlist/page.tsx
index 595012a..13b85e6 100755
--- a/frontend/src/app/terminal/watchlist/page.tsx
+++ b/frontend/src/app/terminal/watchlist/page.tsx
@@ -35,12 +35,86 @@ import {
Search,
ChevronUp,
ChevronDown,
- Briefcase
+ Briefcase,
+ ShoppingCart,
+ Crosshair
} from 'lucide-react'
import clsx from 'clsx'
import Link from 'next/link'
import Image from 'next/image'
+// ============================================================================
+// ADD MODAL COMPONENT (like Portfolio)
+// ============================================================================
+
+function AddModal({
+ onClose,
+ onAdd
+}: {
+ onClose: () => void
+ onAdd: (domain: string) => Promise
+}) {
+ const [domain, setDomain] = useState('')
+ const [adding, setAdding] = useState(false)
+
+ const handleSubmit = async (e: React.FormEvent) => {
+ e.preventDefault()
+ if (!domain.trim()) return
+ setAdding(true)
+ try {
+ await onAdd(domain.trim().toLowerCase())
+ onClose()
+ } finally {
+ setAdding(false)
+ }
+ }
+
+ return (
+
+
e.stopPropagation()}
+ >
+
+
+
+ Add to Watchlist
+
+
+
+
+
+
+
+
+
+ )
+}
+
// ============================================================================
// HELPERS
// ============================================================================
@@ -75,9 +149,8 @@ export default function WatchlistPage() {
const { toast, showToast, hideToast } = useToast()
const openAnalyze = useAnalyzePanelStore((s) => s.open)
- const [newDomain, setNewDomain] = useState('')
- const [adding, setAdding] = useState(false)
- const [searchFocused, setSearchFocused] = useState(false)
+ // Modal state
+ const [showAddModal, setShowAddModal] = useState(false)
const [refreshingId, setRefreshingId] = useState(null)
const [deletingId, setDeletingId] = useState(null)
const [togglingNotifyId, setTogglingNotifyId] = useState(null)
@@ -155,21 +228,15 @@ export default function WatchlistPage() {
}, [sortField])
// Handlers
- const handleAdd = useCallback(async (e: React.FormEvent) => {
- e.preventDefault()
- if (!newDomain.trim()) return
- const domainName = newDomain.trim().toLowerCase()
- setAdding(true)
+ const handleAdd = useCallback(async (domainName: string) => {
try {
await addDomain(domainName)
showToast(`Added: ${domainName}`, 'success')
- setNewDomain('')
} catch (err: any) {
showToast(err.message || 'Failed', 'error')
- } finally {
- setAdding(false)
+ throw err
}
- }, [newDomain, addDomain, showToast])
+ }, [addDomain, showToast])
// Auto-trigger health check for newly added domains
useEffect(() => {
@@ -247,7 +314,7 @@ export default function WatchlistPage() {
// Mobile Nav
const mobileNavItems = [
- { href: '/terminal/hunt', label: 'Hunt', icon: Target, active: false },
+ { href: '/terminal/hunt', label: 'Hunt', icon: Crosshair, active: false },
{ href: '/terminal/watchlist', label: 'Watch', icon: Eye, active: true },
{ href: '/terminal/portfolio', label: 'Portfolio', icon: Briefcase, active: false },
{ href: '/terminal/intel', label: 'Intel', icon: TrendingUp, active: false },
@@ -260,14 +327,14 @@ export default function WatchlistPage() {
{
title: 'Discover',
items: [
- { href: '/terminal/hunt', label: 'Hunt', icon: Target },
+ { href: '/terminal/hunt', label: 'Hunt', icon: Crosshair },
{ href: '/terminal/intel', label: 'Intel', icon: TrendingUp },
]
},
{
title: 'Manage',
items: [
- { href: '/terminal/watchlist', label: 'Watchlist', icon: Eye },
+ { href: '/terminal/watchlist', label: 'Watchlist', icon: Eye, active: true },
{ href: '/terminal/portfolio', label: 'Portfolio', icon: Briefcase },
{ href: '/terminal/sniper', label: 'Sniper', icon: Target },
]
@@ -275,7 +342,7 @@ export default function WatchlistPage() {
{
title: 'Monetize',
items: [
- { href: '/terminal/yield', label: 'Yield', icon: Coins, isNew: true },
+ { href: '/terminal/yield', label: 'Yield', icon: Coins },
{ href: '/terminal/listing', label: 'For Sale', icon: Tag },
]
}
@@ -302,28 +369,31 @@ export default function WatchlistPage() {
{/* Top Row */}
-
- {stats.total} domains
- {stats.available} available
+
+ Watchlist
+
setShowAddModal(true)}
+ className="flex items-center gap-1.5 px-3 py-1.5 bg-accent text-black text-[10px] font-bold uppercase"
+ >
+
+ Add
+
{/* Stats Grid */}
-
+
{stats.total}
-
Tracked
+
Tracked
-
+
{stats.available}
-
Available
+
Available
-
+
{stats.expiring}
-
Expiring
+
Expiring
@@ -333,23 +403,26 @@ export default function WatchlistPage() {
{/* DESKTOP HEADER */}
{/* βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ */}
-
+
-
- Watchlist
- {stats.total}
+
+ Watchlist
Track domains you want. Get alerts when they become available or expire.
-
+
+
+
{stats.total}
+
Tracked
+
{stats.available}
Available
@@ -358,43 +431,24 @@ export default function WatchlistPage() {
{stats.expiring}
Expiring
+
+
setShowAddModal(true)}
+ className="flex items-center gap-2 px-5 py-3 bg-accent text-black text-xs font-bold uppercase tracking-wider hover:bg-white transition-colors"
+ >
+
+ Add Domain
+
+
{/* βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ */}
- {/* ADD DOMAIN + FILTERS */}
+ {/* FILTERS */}
{/* βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ */}
-
- {/* Add Domain Form - Always visible with accent border */}
-
-
- {/* Filters */}
-
+
+
{[
{ value: 'all', label: 'All', count: stats.total },
{ value: 'available', label: 'Available', count: stats.available },
@@ -404,7 +458,7 @@ export default function WatchlistPage() {
key={item.value}
onClick={() => setFilter(item.value as typeof filter)}
className={clsx(
- "px-3 py-2 text-[10px] font-mono uppercase tracking-wider border transition-colors",
+ "shrink-0 px-3 py-2 text-[10px] font-mono uppercase tracking-wider border transition-colors",
filter === item.value
? "bg-white/10 text-white border-white/20"
: "text-white/40 border-transparent hover:text-white/60"
@@ -427,9 +481,9 @@ export default function WatchlistPage() {
Add a domain above to start monitoring
) : (
-
+
{/* Desktop Table Header */}
-
+
handleSortWatch('domain')} className="flex items-center gap-1 hover:text-white/60 text-left">
Domain
{sortField === 'domain' && (sortDirection === 'asc' ? : )}
@@ -462,13 +516,18 @@ export default function WatchlistPage() {
className="bg-[#020202] hover:bg-white/[0.02] transition-all"
>
{/* Mobile Row */}
-
-
-
+
+
+
{domain.is_available ? (
@@ -493,14 +552,16 @@ export default function WatchlistPage() {
- {domain.is_available ? 'AVAIL' : 'TAKEN'}
+ {domain.is_available ? 'β AVAIL' : 'TAKEN'}
{ setSelectedDomain(domain.id); handleHealthCheck(domain.id) }}
- className="flex items-center gap-1"
+ className="flex items-center gap-1 justify-end"
>
{loadingHealth[domain.id] ? (
@@ -514,6 +575,14 @@ export default function WatchlistPage() {
+ {/* Expiry Info */}
+ {days !== null && days <= 30 && days > 0 && !domain.is_available && (
+
+
+ Expires in {days} days
+
+ )}
+
{/* Actions */}
{domain.is_available ? (
@@ -521,44 +590,44 @@ export default function WatchlistPage() {
href={`https://www.namecheap.com/domains/registration/results/?domain=${domain.name}`}
target="_blank"
rel="noopener noreferrer"
- className="flex-1 py-2.5 bg-accent text-black text-[10px] font-bold uppercase tracking-wider flex items-center justify-center gap-1.5"
+ className="flex-1 py-3 bg-accent text-black text-[11px] font-bold uppercase tracking-wider flex items-center justify-center gap-2"
>
-
- Register
+
+ Buy Now
) : (
handleToggleNotify(domain.id, domain.notify_on_available)}
disabled={togglingNotifyId === domain.id}
className={clsx(
- "flex-1 py-2 text-[10px] font-bold uppercase tracking-wider border flex items-center justify-center gap-1.5 transition-all",
+ "flex-1 py-2.5 text-[10px] font-bold uppercase tracking-wider border flex items-center justify-center gap-1.5 transition-all",
domain.notify_on_available
- ? "border-accent bg-accent/10 text-accent"
- : "border-white/[0.08] text-white/40"
+ ? "border-accent/30 bg-accent/10 text-accent"
+ : "border-white/10 bg-white/[0.02] text-white/40"
)}
>
{togglingNotifyId === domain.id ? (
) : domain.notify_on_available ? (
-
+
) : (
-
+
)}
- Alert
+ {domain.notify_on_available ? 'Alert ON' : 'Set Alert'}
)}
handleRefresh(domain.id)}
disabled={refreshingId === domain.id}
- className="px-4 py-2 border border-white/[0.08] text-white/40"
+ className="px-3 py-2 border border-white/10 text-white/40 hover:bg-white/5"
>
openAnalyze(domain.name)}
- className="px-4 py-2 border border-white/[0.08] text-white/40 hover:text-white hover:bg-white/5"
+ className="px-3 py-2 border border-white/10 text-white/40 hover:text-accent hover:border-accent/20 hover:bg-accent/10"
title="Analyze"
>
@@ -567,7 +636,7 @@ export default function WatchlistPage() {
handleDelete(domain.id, domain.name)}
disabled={deletingId === domain.id}
- className="px-4 py-2 border border-white/[0.08] text-white/40 hover:text-rose-400 hover:border-rose-400/20 hover:bg-rose-400/5"
+ className="px-3 py-2 border border-white/10 text-white/40 hover:text-rose-400 hover:border-rose-400/20 hover:bg-rose-400/5"
>
{deletingId === domain.id ? (
@@ -579,21 +648,27 @@ export default function WatchlistPage() {
{/* Desktop Row */}
-
+
+ {/* Domain */}
{domain.is_available ? (
-
+
) : (
)}
-
+
openAnalyze(domain.name)}
className="text-sm font-bold text-white font-mono truncate group-hover:text-accent transition-colors text-left"
@@ -602,99 +677,115 @@ export default function WatchlistPage() {
{domain.name}
- {domain.registrar || 'Unknown'}
+ {domain.registrar || 'Unknown registrar'}
-
+
{/* Status */}
-
+
- {domain.is_available ? 'AVAIL' : 'TAKEN'}
+ {domain.is_available ? 'β AVAIL' : 'TAKEN'}
{/* Health */}
-
{ setSelectedDomain(domain.id); handleHealthCheck(domain.id) }}
- className="w-24 flex items-center gap-1.5 hover:opacity-80 transition-opacity shrink-0"
- >
- {loadingHealth[domain.id] ? (
-
- ) : (
- <>
-
- {config.label}
- >
- )}
-
+
+
{ setSelectedDomain(domain.id); handleHealthCheck(domain.id) }}
+ className={clsx(
+ "flex items-center gap-1.5 px-2 py-1 text-[10px] font-mono uppercase border transition-colors hover:opacity-80",
+ config.color,
+ config.bg.replace('bg-', 'bg-'),
+ "border-white/10"
+ )}
+ >
+ {loadingHealth[domain.id] ? (
+
+ ) : (
+ <>
+
+ {config.label}
+ >
+ )}
+
+
{/* Expires */}
-
+
{days !== null && days <= 30 && days > 0 ? (
- {days}d
+ {days}d left
) : (
- formatExpiryDate(domain.expiration_date)
+ {formatExpiryDate(domain.expiration_date)}
)}
{/* Alert */}
-
handleToggleNotify(domain.id, domain.notify_on_available)}
- disabled={togglingNotifyId === domain.id}
- className={clsx(
- "w-8 h-8 flex items-center justify-center border transition-colors shrink-0",
- domain.notify_on_available
- ? "text-accent border-accent/20 bg-accent/10"
- : "text-white/20 border-white/10 hover:text-white/40"
- )}
- >
- {togglingNotifyId === domain.id ? (
-
- ) : domain.notify_on_available ? (
-
- ) : (
-
- )}
-
+
+ handleToggleNotify(domain.id, domain.notify_on_available)}
+ disabled={togglingNotifyId === domain.id}
+ className={clsx(
+ "w-9 h-9 flex items-center justify-center border transition-colors",
+ domain.notify_on_available
+ ? "text-accent border-accent/30 bg-accent/10"
+ : "text-white/20 border-white/10 hover:text-white/40 hover:bg-white/5"
+ )}
+ >
+ {togglingNotifyId === domain.id ? (
+
+ ) : domain.notify_on_available ? (
+
+ ) : (
+
+ )}
+
+
{/* Actions */}
-
- {domain.is_available && (
+
+ {domain.is_available ? (
- Register
-
+
+ Buy Now
+ ) : (
+ <>
+
handleRefresh(domain.id)}
+ disabled={refreshingId === domain.id}
+ title="Refresh"
+ className="w-8 h-8 flex items-center justify-center text-white/30 hover:text-white border border-white/10 hover:bg-white/5 transition-all"
+ >
+
+
+
openAnalyze(domain.name)}
+ title="Analyze"
+ className="w-8 h-8 flex items-center justify-center text-white/30 hover:text-accent border border-white/10 hover:bg-accent/10 hover:border-accent/20 transition-all"
+ >
+
+
+ >
)}
-
handleRefresh(domain.id)}
- disabled={refreshingId === domain.id}
- className="w-7 h-7 flex items-center justify-center text-white/20 hover:text-white border border-white/10 hover:bg-white/5 transition-all"
- >
-
-
-
openAnalyze(domain.name)}
- className="w-7 h-7 flex items-center justify-center text-white/20 hover:text-accent border border-white/10 hover:bg-accent/10 hover:border-accent/20 transition-all"
- title="Analyze"
- >
-
-
handleDelete(domain.id, domain.name)}
disabled={deletingId === domain.id}
- className="w-7 h-7 flex items-center justify-center text-white/20 hover:text-rose-400 border border-white/10 hover:border-rose-400/20 hover:bg-rose-500/10 transition-all"
+ title="Remove"
+ className="w-8 h-8 flex items-center justify-center text-white/30 hover:text-rose-400 border border-white/10 hover:border-rose-400/20 hover:bg-rose-500/10 transition-all"
>
{deletingId === domain.id ? (
@@ -995,6 +1086,14 @@ export default function WatchlistPage() {
)}
+ {/* ADD MODAL */}
+ {showAddModal && (
+ setShowAddModal(false)}
+ onAdd={handleAdd}
+ />
+ )}
+
{toast && }
)
diff --git a/frontend/src/components/hunt/BrandableForgeTab.tsx b/frontend/src/components/hunt/BrandableForgeTab.tsx
index f224ed0..ef18cac 100644
--- a/frontend/src/components/hunt/BrandableForgeTab.tsx
+++ b/frontend/src/components/hunt/BrandableForgeTab.tsx
@@ -8,12 +8,15 @@ import {
Shield,
Sparkles,
Eye,
- RefreshCw,
Wand2,
Settings,
- ChevronRight,
Zap,
- Filter,
+ Copy,
+ Check,
+ ShoppingCart,
+ Star,
+ Lightbulb,
+ RefreshCw,
} from 'lucide-react'
import { api } from '@/lib/api'
import { useAnalyzePanelStore } from '@/lib/analyze-store'
@@ -24,24 +27,39 @@ import { useStore } from '@/lib/store'
// ============================================================================
const PATTERNS = [
- { key: 'cvcvc', label: 'CVCVC', desc: '5-letter brandables (Zalor, Mivex)' },
- { key: 'cvccv', label: 'CVCCV', desc: '5-letter variants (Bento, Salvo)' },
- { key: 'human', label: 'Human', desc: '2-syllable names (Siri, Alexa)' },
+ {
+ key: 'cvcvc',
+ label: 'CVCVC',
+ desc: 'Classic 5-letter brandables',
+ examples: ['Zalor', 'Mivex', 'Ronix'],
+ color: 'accent'
+ },
+ {
+ key: 'cvccv',
+ label: 'CVCCV',
+ desc: 'Punchy 5-letter names',
+ examples: ['Bento', 'Salvo', 'Vento'],
+ color: 'blue'
+ },
+ {
+ key: 'human',
+ label: 'Human',
+ desc: 'AI agent ready names',
+ examples: ['Siri', 'Alexa', 'Levi'],
+ color: 'purple'
+ },
]
-const TLDS = ['com', 'io', 'ai', 'co', 'net', 'org']
-
-// ============================================================================
-// HELPERS
-// ============================================================================
-
-function parseTlds(input: string): string[] {
- return input
- .split(',')
- .map((t) => t.trim().toLowerCase().replace(/^\./, ''))
- .filter(Boolean)
- .slice(0, 10)
-}
+const TLDS = [
+ { tld: 'com', premium: true, label: '.com' },
+ { tld: 'io', premium: true, label: '.io' },
+ { tld: 'ai', premium: true, label: '.ai' },
+ { tld: 'co', premium: false, label: '.co' },
+ { tld: 'net', premium: false, label: '.net' },
+ { tld: 'org', premium: false, label: '.org' },
+ { tld: 'app', premium: false, label: '.app' },
+ { tld: 'dev', premium: false, label: '.dev' },
+]
// ============================================================================
// COMPONENT
@@ -53,7 +71,7 @@ export function BrandableForgeTab({ showToast }: { showToast: (message: string,
// Config State
const [pattern, setPattern] = useState('cvcvc')
- const [selectedTlds, setSelectedTlds] = useState
(['com'])
+ const [selectedTlds, setSelectedTlds] = useState(['com', 'io'])
const [limit, setLimit] = useState(30)
const [showConfig, setShowConfig] = useState(false)
@@ -62,6 +80,7 @@ export function BrandableForgeTab({ showToast }: { showToast: (message: string,
const [items, setItems] = useState>([])
const [error, setError] = useState(null)
const [tracking, setTracking] = useState(null)
+ const [copied, setCopied] = useState(null)
const toggleTld = useCallback((tld: string) => {
setSelectedTlds((prev) =>
@@ -69,6 +88,18 @@ export function BrandableForgeTab({ showToast }: { showToast: (message: string,
)
}, [])
+ const copyDomain = useCallback((domain: string) => {
+ navigator.clipboard.writeText(domain)
+ setCopied(domain)
+ setTimeout(() => setCopied(null), 1500)
+ }, [])
+
+ const copyAll = useCallback(() => {
+ if (items.length === 0) return
+ navigator.clipboard.writeText(items.map(i => i.domain).join('\n'))
+ showToast(`Copied ${items.length} domains to clipboard`, 'success')
+ }, [items, showToast])
+
const run = useCallback(async () => {
if (selectedTlds.length === 0) {
showToast('Select at least one TLD', 'error')
@@ -76,11 +107,14 @@ export function BrandableForgeTab({ showToast }: { showToast: (message: string,
}
setLoading(true)
setError(null)
+ setItems([])
try {
const res = await api.huntBrandables({ pattern, tlds: selectedTlds, limit, max_checks: 400 })
setItems(res.items.map((i) => ({ domain: i.domain, status: i.status })))
if (res.items.length === 0) {
showToast('No available domains found. Try different settings.', 'info')
+ } else {
+ showToast(`Found ${res.items.length} available brandable domains!`, 'success')
}
} catch (e) {
const msg = e instanceof Error ? e.message : String(e)
@@ -98,7 +132,7 @@ export function BrandableForgeTab({ showToast }: { showToast: (message: string,
setTracking(domain)
try {
await addDomain(domain)
- showToast(`Tracked ${domain}`, 'success')
+ showToast(`Added to watchlist: ${domain}`, 'success')
} catch (e) {
showToast(e instanceof Error ? e.message : 'Failed to track domain', 'error')
} finally {
@@ -108,248 +142,343 @@ export function BrandableForgeTab({ showToast }: { showToast: (message: string,
[addDomain, showToast, tracking]
)
+ const currentPattern = PATTERNS.find(p => p.key === pattern)
+
return (
-
- {/* Header with Generate Button */}
+
+ {/* βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ */}
+ {/* MAIN GENERATOR CARD */}
+ {/* βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ */}
-
-
-
-
+ {/* Header */}
+
+
+
+
+
+
+
+
Brandable Forge
+
+ AI-powered brandable name generator
+
+
-
-
Brandable Forge
-
Generate available brandable names
+
+ setShowConfig(!showConfig)}
+ className={clsx(
+ "w-9 h-9 flex items-center justify-center border transition-all",
+ showConfig
+ ? "border-accent/30 bg-accent/10 text-accent"
+ : "border-white/10 text-white/40 hover:text-white hover:bg-white/5"
+ )}
+ title="Settings"
+ >
+
+
+
+ {loading ? (
+ <>
+
+ Generating...
+ >
+ ) : (
+ <>
+
+ Generate
+ >
+ )}
+
-
- setShowConfig(!showConfig)}
- className={clsx(
- "w-8 h-8 flex items-center justify-center border transition-colors",
- showConfig ? "border-accent/30 bg-accent/10 text-accent" : "border-white/10 text-white/30 hover:text-white hover:bg-white/5"
- )}
- >
-
-
-
- {loading ? : }
- Generate
-
-
{/* Pattern Selection */}
-
-
- {PATTERNS.map((p) => (
-
setPattern(p.key)}
- className={clsx(
- "flex-1 min-w-[120px] px-3 py-2 border transition-all text-left",
- pattern === p.key
- ? "border-accent bg-accent/10"
- : "border-white/[0.08] hover:border-white/20"
- )}
- >
-
- {p.label}
-
- {p.desc}
-
- ))}
+
+
+
+ Choose Pattern
+
+
+ {PATTERNS.map((p) => {
+ const isActive = pattern === p.key
+ const colorClass = p.color === 'accent' ? 'accent' : p.color === 'blue' ? 'blue-400' : 'purple-400'
+ return (
+
setPattern(p.key)}
+ className={clsx(
+ "p-4 border text-left transition-all group",
+ isActive
+ ? `border-${colorClass}/40 bg-${colorClass}/10`
+ : "border-white/[0.08] hover:border-white/20 bg-white/[0.02] hover:bg-white/[0.04]"
+ )}
+ >
+
+
+ {p.label}
+
+ {isActive && (
+
+ )}
+
+ {p.desc}
+
+ {p.examples.map((ex, i) => (
+
+ {ex}
+
+ ))}
+
+
+ )
+ })}
{/* TLD Selection */}
-
-
-
TLDs
+
+
+
+ Select TLDs
+ ({selectedTlds.length} selected)
+
+
setSelectedTlds(selectedTlds.length === TLDS.length ? ['com'] : TLDS.map(t => t.tld))}
+ className="text-[10px] font-mono text-accent hover:text-white transition-colors"
+ >
+ {selectedTlds.length === TLDS.length ? 'Select .com only' : 'Select all'}
+
-
- {TLDS.map((tld) => (
+
+ {TLDS.map((t) => (
toggleTld(tld)}
+ key={t.tld}
+ onClick={() => toggleTld(t.tld)}
className={clsx(
- "px-3 py-1.5 text-[10px] font-mono uppercase border transition-colors",
- selectedTlds.includes(tld)
+ "px-3 py-2 text-[11px] font-mono uppercase border transition-all flex items-center gap-1.5",
+ selectedTlds.includes(t.tld)
? "border-accent bg-accent/10 text-accent"
- : "border-white/[0.08] text-white/40 hover:text-white/60"
+ : "border-white/[0.08] text-white/40 hover:text-white/60 hover:border-white/20"
)}
>
- .{tld}
+ {t.premium && }
+ {t.label}
))}
- {/* Advanced Config (collapsed) */}
+ {/* Advanced Config */}
{showConfig && (
-
-
+
+
-
- Generate up to {limit} available brandable domains. We check via DNS/RDAP and only return verified available domains.
+
+
We'll check up to 400 random combinations and return the first {limit} verified available domains.
)}
{/* Stats Bar */}
-
-
{items.length} domains generated
-
-
- All verified available
+
+
+ {items.length > 0 ? (
+
+
+ {items.length} brandable domains ready
+
+ ) : (
+ 'Configure settings and click Generate'
+ )}
+ {items.length > 0 && (
+
+
+ Copy All
+
+ )}
{/* Error Message */}
{error && (
-
- {error}
+
+
+
+
+
+
{error}
+
+ Try again β
+
+
)}
- {/* Results Grid */}
+ {/* βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ */}
+ {/* RESULTS */}
+ {/* βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ */}
{items.length > 0 && (
-
- {/* Desktop Header */}
-
-
Domain
-
Status
-
Actions
+
+
+
+ Generated Domains
+
+
+
+ Regenerate
+
-
- {items.map((i) => (
-
- {/* Mobile Row */}
-
-
+
+
+ {items.map((i, idx) => (
+
+
-
-
-
-
-
openAnalyze(i.domain)}
- className="text-sm font-bold text-white font-mono truncate text-left"
- >
- {i.domain}
-
-
- AVAILABLE
-
+
+ {String(idx + 1).padStart(2, '0')}
+
openAnalyze(i.domain)}
+ className="text-sm font-bold text-white font-mono truncate group-hover:text-accent transition-colors text-left"
+ >
+ {i.domain}
+
+
+
+
+
+ β AVAIL
+
+
+
copyDomain(i.domain)}
+ className="w-8 h-8 flex items-center justify-center border border-white/10 text-white/30 hover:text-white hover:bg-white/5 transition-all"
+ title="Copy"
+ >
+ {copied === i.domain ? : }
+
+
+
track(i.domain)}
+ disabled={tracking === i.domain}
+ className="w-8 h-8 flex items-center justify-center border border-white/10 text-white/30 hover:text-white hover:bg-white/5 transition-all"
+ title="Add to Watchlist"
+ >
+ {tracking === i.domain ? : }
+
+
+
openAnalyze(i.domain)}
+ className="w-8 h-8 flex items-center justify-center border border-white/10 text-white/30 hover:text-accent hover:border-accent/20 hover:bg-accent/10 transition-all"
+ title="Analyze"
+ >
+
+
+
+
+
+ Buy
+
-
-
-
-
track(i.domain)}
- disabled={tracking === i.domain}
- className="flex-1 py-2 text-[10px] font-bold uppercase tracking-wider border border-white/[0.08] text-white/40 flex items-center justify-center gap-1.5 transition-all hover:text-white hover:bg-white/5"
- >
- {tracking === i.domain ? : }
- Track
-
-
openAnalyze(i.domain)}
- className="w-10 py-2 text-[10px] font-bold uppercase tracking-wider border border-white/[0.08] text-white/50 flex items-center justify-center transition-all hover:text-white hover:bg-white/5"
- >
-
-
-
- Register
-
-
+ ))}
+
+
+ )}
- {/* Desktop Row */}
-
-
-
-
-
-
openAnalyze(i.domain)}
- className="text-sm font-bold text-white font-mono truncate group-hover:text-accent transition-colors text-left"
- >
- {i.domain}
-
-
+ {/* Empty State */}
+ {items.length === 0 && !loading && (
+
+
+
+
+
Ready to forge
+
+ Select a pattern and TLDs, then click "Generate" to discover available brandable domain names
+
+
+ Verified available
+ β’
+ DNS checked
+
+
+ )}
-
-
- AVAILABLE
-
-
-
-
-
track(i.domain)}
- disabled={tracking === i.domain}
- className="w-7 h-7 flex items-center justify-center border border-white/10 text-white/30 hover:text-white hover:bg-white/5 transition-colors"
- >
- {tracking === i.domain ? : }
-
-
openAnalyze(i.domain)}
- className="w-7 h-7 flex items-center justify-center border border-white/10 text-white/30 hover:text-accent hover:border-accent/20 hover:bg-accent/10 transition-colors"
- >
-
-
-
- Register
-
-
+ {/* Loading State */}
+ {loading && items.length === 0 && (
+
+ {[...Array(6)].map((_, i) => (
+
))}
)}
-
- {/* Empty State */}
- {items.length === 0 && !loading && (
-
-
-
No domains generated yet
-
Click "Generate" to create brandable names
-
- )}
-
)
}
diff --git a/frontend/src/components/hunt/TrendSurferTab.tsx b/frontend/src/components/hunt/TrendSurferTab.tsx
index 6b3054e..0ab9f26 100644
--- a/frontend/src/components/hunt/TrendSurferTab.tsx
+++ b/frontend/src/components/hunt/TrendSurferTab.tsx
@@ -11,16 +11,36 @@ import {
Eye,
TrendingUp,
RefreshCw,
- Filter,
- ChevronRight,
Globe,
Zap,
- X
+ X,
+ Check,
+ Copy,
+ ShoppingCart,
+ Flame,
+ ArrowRight,
+ AlertCircle
} from 'lucide-react'
import { api } from '@/lib/api'
import { useAnalyzePanelStore } from '@/lib/analyze-store'
import { useStore } from '@/lib/store'
+// ============================================================================
+// TYPES & CONSTANTS
+// ============================================================================
+
+const GEO_OPTIONS = [
+ { value: 'US', label: 'United States', flag: 'πΊπΈ' },
+ { value: 'CH', label: 'Switzerland', flag: 'π¨π' },
+ { value: 'DE', label: 'Germany', flag: 'π©πͺ' },
+ { value: 'GB', label: 'United Kingdom', flag: 'π¬π§' },
+ { value: 'FR', label: 'France', flag: 'π«π·' },
+ { value: 'CA', label: 'Canada', flag: 'π¨π¦' },
+ { value: 'AU', label: 'Australia', flag: 'π¦πΊ' },
+]
+
+const POPULAR_TLDS = ['com', 'io', 'ai', 'co', 'net', 'org', 'app', 'dev']
+
// ============================================================================
// HELPERS
// ============================================================================
@@ -48,6 +68,7 @@ export function TrendSurferTab({ showToast }: { showToast: (message: string, typ
// Keyword Check State
const [keywordInput, setKeywordInput] = useState('')
const [keywordFocused, setKeywordFocused] = useState(false)
+ const [selectedTlds, setSelectedTlds] = useState
(['com', 'io', 'ai'])
const [availability, setAvailability] = useState>([])
const [checking, setChecking] = useState(false)
@@ -57,8 +78,15 @@ export function TrendSurferTab({ showToast }: { showToast: (message: string, typ
const [typos, setTypos] = useState>([])
const [typoLoading, setTypoLoading] = useState(false)
- // Tracking State
+ // Tracking & Copy State
const [tracking, setTracking] = useState(null)
+ const [copied, setCopied] = useState(null)
+
+ const copyDomain = useCallback((domain: string) => {
+ navigator.clipboard.writeText(domain)
+ setCopied(domain)
+ setTimeout(() => setCopied(null), 1500)
+ }, [])
const track = useCallback(
async (domain: string) => {
@@ -66,7 +94,7 @@ export function TrendSurferTab({ showToast }: { showToast: (message: string, typ
setTracking(domain)
try {
await addDomain(domain)
- showToast(`Tracked ${domain}`, 'success')
+ showToast(`Added to watchlist: ${domain}`, 'success')
} catch (e) {
showToast(e instanceof Error ? e.message : 'Failed to track domain', 'error')
} finally {
@@ -86,12 +114,11 @@ export function TrendSurferTab({ showToast }: { showToast: (message: string, typ
} catch (e) {
const msg = e instanceof Error ? e.message : String(e)
setError(msg)
- showToast(msg, 'error')
setTrends([])
} finally {
if (isRefresh) setRefreshing(false)
}
- }, [geo, selected, showToast])
+ }, [geo, selected])
useEffect(() => {
let cancelled = false
@@ -111,12 +138,22 @@ export function TrendSurferTab({ showToast }: { showToast: (message: string, typ
const keyword = useMemo(() => normalizeKeyword(keywordInput || selected || ''), [keywordInput, selected])
+ const toggleTld = useCallback((tld: string) => {
+ setSelectedTlds(prev =>
+ prev.includes(tld) ? prev.filter(t => t !== tld) : [...prev, tld]
+ )
+ }, [])
+
const runCheck = useCallback(async () => {
if (!keyword) return
+ if (selectedTlds.length === 0) {
+ showToast('Select at least one TLD', 'error')
+ return
+ }
setChecking(true)
try {
const kw = keyword.toLowerCase().replace(/\s+/g, '')
- const res = await api.huntKeywords({ keywords: [kw], tlds: ['com', 'io', 'ai', 'net', 'org'] })
+ const res = await api.huntKeywords({ keywords: [kw], tlds: selectedTlds })
setAvailability(res.items.map((r) => ({ domain: r.domain, status: r.status, is_available: r.is_available })))
} catch (e) {
const msg = e instanceof Error ? e.message : 'Failed to check availability'
@@ -125,7 +162,7 @@ export function TrendSurferTab({ showToast }: { showToast: (message: string, typ
} finally {
setChecking(false)
}
- }, [keyword, showToast])
+ }, [keyword, selectedTlds, showToast])
const runTypos = useCallback(async () => {
const b = brand.trim()
@@ -134,6 +171,9 @@ export function TrendSurferTab({ showToast }: { showToast: (message: string, typ
try {
const res = await api.huntTypos({ brand: b, tlds: ['com'], limit: 50 })
setTypos(res.items.map((i) => ({ domain: i.domain, status: i.status })))
+ if (res.items.length === 0) {
+ showToast('No available typo domains found', 'info')
+ }
} catch (e) {
const msg = e instanceof Error ? e.message : 'Failed to run typo check'
showToast(msg, 'error')
@@ -143,116 +183,169 @@ export function TrendSurferTab({ showToast }: { showToast: (message: string, typ
}
}, [brand, showToast])
+ const availableCount = useMemo(() => availability.filter(a => a.status === 'available').length, [availability])
+ const currentGeo = GEO_OPTIONS.find(g => g.value === geo)
+
if (loading) {
return (
-
-
+
+ {/* Skeleton Loader */}
+
+
+
+ {[...Array(8)].map((_, i) => (
+
+ ))}
+
+
)
}
return (
-
- {/* Trends Header */}
+
+ {/* βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ */}
+ {/* TRENDING TOPICS */}
+ {/* βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ */}
-
-
-
-
+
+
+
+
+
+
+
+
Trending Now
+
+ Real-time Google Trends β’ {currentGeo?.flag} {currentGeo?.label}
+
+
-
-
Google Trends (24h)
-
Real-time trending topics
+
+
+ loadTrends(true)}
+ disabled={refreshing}
+ className="w-9 h-9 flex items-center justify-center border border-white/10 text-white/40 hover:text-white hover:bg-white/5 transition-all"
+ >
+
+
-
-
- loadTrends(true)}
- disabled={refreshing}
- className="w-8 h-8 flex items-center justify-center border border-white/10 text-white/30 hover:text-white hover:bg-white/5 transition-colors"
- >
-
-
-
{error ? (
-
{error}
+
+
+
{error}
+
loadTrends(true)}
+ className="ml-auto text-[10px] font-mono text-rose-400 underline hover:no-underline"
+ >
+ Retry
+
+
) : (
-
- {trends.slice(0, 20).map((t) => {
- const active = selected === t.title
- return (
-
{
- setSelected(t.title)
- setKeywordInput('')
- setAvailability([])
- }}
- className={clsx(
- 'px-3 py-2 border text-xs font-mono transition-all',
- active
- ? 'border-accent bg-accent/10 text-accent'
- : 'border-white/[0.08] text-white/60 hover:border-white/20 hover:text-white'
- )}
- >
- {t.title}
- {t.approx_traffic && (
- {t.approx_traffic}
- )}
-
- )
- })}
+
+
+ {trends.slice(0, 16).map((t, idx) => {
+ const active = selected === t.title
+ const isHot = idx < 3
+ return (
+
{
+ setSelected(t.title)
+ setKeywordInput('')
+ setAvailability([])
+ }}
+ className={clsx(
+ 'group relative px-4 py-2.5 border text-left transition-all',
+ active
+ ? 'border-accent bg-accent/10'
+ : 'border-white/[0.08] hover:border-white/20 bg-white/[0.02] hover:bg-white/[0.04]'
+ )}
+ >
+
+ {isHot && (
+
+ π₯
+
+ )}
+
+ {t.title}
+
+
+ {t.approx_traffic && (
+ {t.approx_traffic}
+ )}
+
+ )
+ })}
+
+ {trends.length === 0 && (
+
+ No trends available for this region
+
+ )}
)}
- {/* Keyword Availability Check */}
+ {/* βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ */}
+ {/* DOMAIN AVAILABILITY CHECKER */}
+ {/* βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ */}
-
+
-
-
+
+
-
Domain Availability
-
Check {keyword || 'keyword'} across TLDs
+
Check Availability
+
+ {keyword ? `Find ${keyword.toLowerCase().replace(/\s+/g, '')} across multiple TLDs` : 'Select a trend or enter a keyword'}
+
-
-
+
+ {/* Keyword Input */}
+
-
+
setKeywordInput(e.target.value)}
onFocus={() => setKeywordFocused(true)}
onBlur={() => setKeywordFocused(false)}
- placeholder="Type a keyword..."
- className="flex-1 bg-transparent px-3 py-3 text-sm text-white placeholder:text-white/20 outline-none font-mono"
+ onKeyDown={(e) => e.key === 'Enter' && runCheck()}
+ placeholder="Enter keyword or select trend above..."
+ className="flex-1 bg-transparent px-3 py-3.5 text-sm text-white placeholder:text-white/25 outline-none font-mono"
/>
{(keywordInput || selected) && (
{ setKeywordInput(''); setSelected(''); setAvailability([]) }}
- className="p-3 text-white/30 hover:text-white"
+ className="p-3 text-white/30 hover:text-white transition-colors"
>
@@ -263,107 +356,179 @@ export function TrendSurferTab({ showToast }: { showToast: (message: string, typ
onClick={runCheck}
disabled={!keyword || checking}
className={clsx(
- "px-4 py-3 text-xs font-bold uppercase tracking-wider transition-all",
+ "px-6 py-3 text-sm font-bold uppercase tracking-wider transition-all flex items-center gap-2",
!keyword || checking
- ? "bg-white/5 text-white/20"
+ ? "bg-white/5 text-white/20 cursor-not-allowed"
: "bg-accent text-black hover:bg-white"
)}
>
- {checking ? : "Check"}
+ {checking ? : }
+ Check
- {/* Results Grid */}
- {availability.length > 0 && (
-
- {availability.map((a) => (
-
-
-
-
openAnalyze(a.domain)}
- className="text-sm font-mono text-white/70 hover:text-accent truncate text-left"
- >
- {a.domain}
-
-
-
-
- {a.status.toUpperCase()}
-
-
track(a.domain)}
- disabled={tracking === a.domain}
- className="w-7 h-7 flex items-center justify-center border border-white/10 text-white/30 hover:text-white hover:bg-white/5 transition-colors"
- >
- {tracking === a.domain ? : }
-
-
openAnalyze(a.domain)}
- className="w-7 h-7 flex items-center justify-center border border-white/10 text-white/30 hover:text-accent hover:border-accent/20 hover:bg-accent/10 transition-colors"
- >
-
-
- {a.status === 'available' && (
-
- Buy
-
- )}
-
-
+ {/* TLD Selection */}
+
+
+ Select TLDs
+ ({selectedTlds.length} selected)
+
+
+ {POPULAR_TLDS.map(tld => (
+ toggleTld(tld)}
+ className={clsx(
+ "px-3 py-1.5 text-[11px] font-mono uppercase border transition-all",
+ selectedTlds.includes(tld)
+ ? "border-accent bg-accent/10 text-accent"
+ : "border-white/[0.08] text-white/40 hover:text-white/60 hover:border-white/20"
+ )}
+ >
+ .{tld}
+
))}
+
+
+ {/* Results */}
+ {availability.length > 0 && (
+
+
+
+ Results β’ {availableCount} available
+
+
+
+ {availability.map((a) => {
+ const isAvailable = a.status === 'available'
+ return (
+
+
+
+
openAnalyze(a.domain)}
+ className={clsx(
+ "text-sm font-mono truncate text-left transition-colors",
+ isAvailable ? "text-white hover:text-accent" : "text-white/50"
+ )}
+ >
+ {a.domain}
+
+
+
+
+
+ {isAvailable ? 'β AVAIL' : 'TAKEN'}
+
+
+
copyDomain(a.domain)}
+ className="w-8 h-8 flex items-center justify-center border border-white/10 text-white/30 hover:text-white hover:bg-white/5 transition-all"
+ title="Copy"
+ >
+ {copied === a.domain ? : }
+
+
+
track(a.domain)}
+ disabled={tracking === a.domain}
+ className="w-8 h-8 flex items-center justify-center border border-white/10 text-white/30 hover:text-white hover:bg-white/5 transition-all"
+ title="Add to Watchlist"
+ >
+ {tracking === a.domain ? : }
+
+
+
openAnalyze(a.domain)}
+ className="w-8 h-8 flex items-center justify-center border border-white/10 text-white/30 hover:text-accent hover:border-accent/20 hover:bg-accent/10 transition-all"
+ title="Analyze"
+ >
+
+
+
+ {isAvailable && (
+
+
+ Buy
+
+ )}
+
+
+ )
+ })}
+
+
)}
+ {/* Empty State */}
{availability.length === 0 && keyword && !checking && (
-
-
-
Click "Check" to find available domains
+
+
+
Ready to check
+
+ Click "Check" to find available domains for {keyword.toLowerCase().replace(/\s+/g, '')}
+
)}
- {/* Typo Finder */}
+ {/* βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ */}
+ {/* TYPO FINDER */}
+ {/* βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ */}
-
+
-
-
+
+
-
Typo Finder
-
Find available typos of big brands
+
Typo Finder
+
+ Find available misspellings of popular brands
+
-
-
+
+
-
+
setBrand(e.target.value)}
onFocus={() => setBrandFocused(true)}
onBlur={() => setBrandFocused(false)}
- placeholder="e.g. Shopify, Amazon, Google..."
- className="flex-1 bg-transparent px-3 py-3 text-sm text-white placeholder:text-white/20 outline-none font-mono"
+ onKeyDown={(e) => e.key === 'Enter' && runTypos()}
+ placeholder="Enter a brand name (e.g. Google, Amazon, Shopify)..."
+ className="flex-1 bg-transparent px-3 py-3.5 text-sm text-white placeholder:text-white/25 outline-none font-mono"
/>
{brand && (
{ setBrand(''); setTypos([]) }} className="p-3 text-white/30 hover:text-white">
@@ -376,35 +541,44 @@ export function TrendSurferTab({ showToast }: { showToast: (message: string, typ
onClick={runTypos}
disabled={!brand.trim() || typoLoading}
className={clsx(
- "px-4 py-3 text-xs font-bold uppercase tracking-wider transition-all",
+ "px-6 py-3 text-sm font-bold uppercase tracking-wider transition-all flex items-center gap-2",
!brand.trim() || typoLoading
- ? "bg-white/5 text-white/20"
- : "bg-white/10 text-white hover:bg-white/20"
+ ? "bg-white/5 text-white/20 cursor-not-allowed"
+ : "bg-purple-500 text-white hover:bg-purple-400"
)}
>
- {typoLoading ? : "Find"}
+ {typoLoading ? : }
+ Find
- {/* Typo Results Grid */}
+ {/* Typo Results */}
{typos.length > 0 && (
-
+
{typos.map((t) => (
-
+
openAnalyze(t.domain)}
- className="text-xs font-mono text-white/70 group-hover:text-accent truncate text-left transition-colors"
+ className="text-xs font-mono text-white/70 group-hover:text-purple-400 truncate text-left transition-colors"
>
{t.domain}
-
-
- {t.status.toUpperCase()}
-
+
+ copyDomain(t.domain)}
+ className="w-6 h-6 flex items-center justify-center text-white/30 hover:text-white transition-colors"
+ title="Copy"
+ >
+ {copied === t.domain ? : }
+
track(t.domain)}
disabled={tracking === t.domain}
- className="w-6 h-6 flex items-center justify-center border border-white/10 text-white/30 hover:text-white hover:bg-white/5 transition-colors"
+ className="w-6 h-6 flex items-center justify-center text-white/30 hover:text-white transition-colors"
+ title="Track"
>
{tracking === t.domain ? : }
@@ -412,7 +586,8 @@ export function TrendSurferTab({ showToast }: { showToast: (message: string, typ
href={`https://www.namecheap.com/domains/registration/results/?domain=${t.domain}`}
target="_blank"
rel="noopener noreferrer"
- className="w-6 h-6 flex items-center justify-center border border-white/10 text-white/30 hover:text-white hover:bg-white/5 transition-colors"
+ className="w-6 h-6 flex items-center justify-center text-white/30 hover:text-accent transition-colors"
+ title="Buy"
>
@@ -422,9 +597,12 @@ export function TrendSurferTab({ showToast }: { showToast: (message: string, typ
)}
+ {/* Empty State */}
{typos.length === 0 && !typoLoading && (
-
- Enter a brand name to find available typo domains
+
+
+ Enter a brand name to discover available typo domains
+
)}