diff --git a/backend/app/services/auction_scraper.py b/backend/app/services/auction_scraper.py
index 5c1e0e1..a085873 100644
--- a/backend/app/services/auction_scraper.py
+++ b/backend/app/services/auction_scraper.py
@@ -68,11 +68,11 @@ class AuctionScraperService:
"""
Orchestrates scraping across multiple sources and stores results in DB.
"""
-
+
def __init__(self):
self.http_client: Optional[httpx.AsyncClient] = None
self._last_request: Dict[str, datetime] = {}
-
+
async def _get_client(self) -> httpx.AsyncClient:
"""Get or create HTTP client with appropriate headers (and optional proxy)."""
if self.http_client is None or self.http_client.is_closed:
@@ -92,7 +92,7 @@ class AuctionScraperService:
},
)
return self.http_client
-
+
async def _rate_limit(self, platform: str):
"""Enforce rate limiting per platform."""
min_interval = 60 / RATE_LIMITS.get(platform, 10)
@@ -102,7 +102,7 @@ class AuctionScraperService:
if elapsed < min_interval:
await asyncio.sleep(min_interval - elapsed)
self._last_request[platform] = datetime.utcnow()
-
+
# ----------------------------
# Parsing & validation helpers
# ----------------------------
@@ -305,7 +305,7 @@ class AuctionScraperService:
cleaned["currency"] = str(currency).strip().upper()
return cleaned
-
+
async def _store_auction(self, db: AsyncSession, auction_data: Dict[str, Any]) -> str:
"""Store or update an auction in the database. Returns 'new', 'updated' or 'skipped'."""
cleaned = self._sanitize_auction_payload(auction_data)
@@ -325,7 +325,7 @@ class AuctionScraperService:
)
)
existing = existing.scalar_one_or_none()
-
+
if existing:
# Prevent "end_time drift" on sources that only provide rounded time-left.
# `end_time` must be monotonically decreasing (or stable) across scrapes.
@@ -384,15 +384,15 @@ class AuctionScraperService:
) -> Dict[str, Any]:
"""Scrape ExpiredDomains provider-specific auction pages (real Price/Bids/Endtime)."""
result = {"found": 0, "new": 0, "updated": 0}
-
+
log = AuctionScrapeLog(platform=platform)
db.add(log)
await db.commit()
-
+
try:
await self._rate_limit("ExpiredDomains")
client = await self._get_client()
-
+
resp = await client.get(url, timeout=20.0)
if resp.status_code != 200:
raise Exception(f"HTTP {resp.status_code}")
@@ -413,14 +413,14 @@ class AuctionScraperService:
now = datetime.utcnow()
for row in rows[:limit]:
- cols = row.find_all("td")
+ cols = row.find_all("td")
if len(cols) < len(headers):
- continue
-
+ continue
+
domain = cols[header_index["Domain"]].get_text(" ", strip=True).lower()
if not domain or "." not in domain:
- continue
-
+ continue
+
tld = domain.rsplit(".", 1)[-1].lower()
parsed_price = self._parse_price_currency(cols[header_index["Price"]].get_text(" ", strip=True))
@@ -428,8 +428,8 @@ class AuctionScraperService:
continue
current_bid, currency = parsed_price
if current_bid <= 0:
- continue
-
+ continue
+
bids_raw = cols[header_index["Bids"]].get_text(" ", strip=True)
try:
num_bids = int(re.sub(r"[^0-9]", "", bids_raw) or "0")
@@ -446,26 +446,26 @@ class AuctionScraperService:
href = domain_link.get("href") if domain_link else None
if href and href.startswith("/"):
href = f"https://www.expireddomains.net{href}"
-
- auction_data = {
- "domain": domain,
- "tld": tld,
- "platform": platform,
- "platform_auction_id": None,
+
+ auction_data = {
+ "domain": domain,
+ "tld": tld,
+ "platform": platform,
+ "platform_auction_id": None,
"auction_url": href or build_affiliate_url(platform, domain),
"current_bid": current_bid,
"currency": currency,
"num_bids": num_bids,
"end_time": end_time,
"scrape_source": f"expireddomains:{url}",
- }
-
- status = await self._store_auction(db, auction_data)
+ }
+
+ status = await self._store_auction(db, auction_data)
if status == "skipped":
continue
- result["found"] += 1
- result[status] += 1
-
+ result["found"] += 1
+ result[status] += 1
+
await db.commit()
log.completed_at = datetime.utcnow()
@@ -474,16 +474,16 @@ class AuctionScraperService:
log.auctions_new = result["new"]
log.auctions_updated = result["updated"]
await db.commit()
-
+
except Exception as e:
log.completed_at = datetime.utcnow()
log.status = "failed"
log.error_message = str(e)[:500]
await db.commit()
logger.error(f"ExpiredDomains({platform}) scrape failed: {e}")
-
+
return result
-
+
async def _scrape_expireddomains_godaddy(self, db: AsyncSession) -> Dict[str, Any]:
return await self._scrape_expireddomains_auction_page(
db=db,
@@ -509,15 +509,15 @@ class AuctionScraperService:
"""Scrape Park.io public auctions page (includes price + close date)."""
platform = "Park.io"
result = {"found": 0, "new": 0, "updated": 0}
-
+
log = AuctionScrapeLog(platform=platform)
db.add(log)
await db.commit()
-
+
try:
await self._rate_limit(platform)
client = await self._get_client()
-
+
resp = await client.get("https://park.io/auctions", timeout=20.0)
if resp.status_code != 200:
raise Exception(f"HTTP {resp.status_code}")
@@ -531,8 +531,8 @@ class AuctionScraperService:
for row in rows[:200]:
cols = row.find_all("td")
if len(cols) < 5:
- continue
-
+ continue
+
domain = cols[1].get_text(" ", strip=True).lower()
if not domain or "." not in domain:
continue
@@ -544,14 +544,14 @@ class AuctionScraperService:
continue
current_bid, currency = parsed_price
if current_bid <= 0:
- continue
-
+ continue
+
bids_raw = cols[3].get_text(" ", strip=True)
try:
num_bids = int(re.sub(r"[^0-9]", "", bids_raw) or "0")
except Exception:
- continue
-
+ continue
+
close_raw = cols[4].get_text(" ", strip=True)
try:
# Park.io displays a naive timestamp in their platform timezone.
@@ -567,25 +567,25 @@ class AuctionScraperService:
href = link_el["href"] if link_el else None
if href and href.startswith("/"):
href = f"https://park.io{href}"
-
- auction_data = {
- "domain": domain,
- "tld": tld,
- "platform": platform,
+
+ auction_data = {
+ "domain": domain,
+ "tld": tld,
+ "platform": platform,
"auction_url": href or "https://park.io/auctions",
"current_bid": current_bid,
"currency": currency,
- "num_bids": num_bids,
+ "num_bids": num_bids,
"end_time": end_time,
"scrape_source": "park.io:auctions",
- }
-
- status = await self._store_auction(db, auction_data)
+ }
+
+ status = await self._store_auction(db, auction_data)
if status == "skipped":
continue
- result["found"] += 1
- result[status] += 1
-
+ result["found"] += 1
+ result[status] += 1
+
await db.commit()
log.completed_at = datetime.utcnow()
@@ -594,29 +594,29 @@ class AuctionScraperService:
log.auctions_new = result["new"]
log.auctions_updated = result["updated"]
await db.commit()
-
+
except Exception as e:
log.completed_at = datetime.utcnow()
log.status = "failed"
log.error_message = str(e)[:500]
await db.commit()
logger.error(f"Park.io scrape failed: {e}")
-
+
return result
-
+
async def _scrape_sav_public(self, db: AsyncSession) -> Dict[str, Any]:
"""Scrape Sav auctions from their HTML table endpoint."""
platform = "Sav"
result = {"found": 0, "new": 0, "updated": 0}
-
+
log = AuctionScrapeLog(platform=platform)
db.add(log)
await db.commit()
-
+
try:
await self._rate_limit(platform)
client = await self._get_client()
-
+
now = datetime.utcnow()
for page in range(0, 3):
resp = await client.post(
@@ -636,7 +636,7 @@ class AuctionScraperService:
cells = row.find_all("td")
if len(cells) < 7:
continue
-
+
domain_link = cells[1].find("a")
domain = domain_link.get_text(" ", strip=True).lower() if domain_link else ""
if not domain or "." not in domain:
@@ -655,38 +655,38 @@ class AuctionScraperService:
try:
num_bids = int(re.sub(r"[^0-9]", "", bids_raw) or "0")
except Exception:
- continue
-
+ continue
+
time_left_raw = cells[6].get_text(" ", strip=True)
delta = self._parse_timeleft(time_left_raw)
if not delta:
- continue
+ continue
end_time = now + delta
href = domain_link.get("href") if domain_link else None
if href and href.startswith("/"):
href = f"https://www.sav.com{href}"
-
- auction_data = {
- "domain": domain,
+
+ auction_data = {
+ "domain": domain,
"tld": tld,
- "platform": platform,
+ "platform": platform,
"auction_url": href or "https://www.sav.com/domains/auctions",
"current_bid": current_bid,
"currency": currency,
"num_bids": num_bids,
"end_time": end_time,
"scrape_source": f"sav:load_domains_ajax:{page}",
- }
-
- status = await self._store_auction(db, auction_data)
+ }
+
+ status = await self._store_auction(db, auction_data)
if status == "skipped":
continue
- result["found"] += 1
- result[status] += 1
-
+ result["found"] += 1
+ result[status] += 1
+
await asyncio.sleep(1)
-
+
await db.commit()
log.completed_at = datetime.utcnow()
@@ -695,16 +695,16 @@ class AuctionScraperService:
log.auctions_new = result["new"]
log.auctions_updated = result["updated"]
await db.commit()
-
+
except Exception as e:
log.completed_at = datetime.utcnow()
log.status = "failed"
log.error_message = str(e)[:500]
await db.commit()
logger.error(f"Sav scrape failed: {e}")
-
+
return result
-
+
# ----------------------------
# Orchestration
# ----------------------------
@@ -729,7 +729,7 @@ class AuctionScraperService:
for item in hidden_api_result.get("items", []):
action = await self._store_auction(db, item)
if action == "skipped":
- continue
+ continue
platform = item.get("platform", "Unknown")
_touch_platform(platform)
results["platforms"][platform]["found"] += 1
@@ -777,7 +777,7 @@ class AuctionScraperService:
results["total_found"] += r.get("found", 0)
results["total_new"] += r.get("new", 0)
results["total_updated"] += r.get("updated", 0)
- except Exception as e:
+ except Exception as e:
results["errors"].append(f"{platform_name}: {str(e)}")
# TIER 3: Playwright (opt-in)
@@ -804,98 +804,98 @@ class AuctionScraperService:
results["errors"].append(f"Playwright: {error}")
except Exception as e:
results["errors"].append(f"Playwright: {str(e)}")
-
- await db.commit()
+
+ await db.commit()
await self._cleanup_ended_auctions(db)
return results
# ----------------------------
# Tier 1 helpers (official APIs)
# ----------------------------
-
+
async def _fetch_dropcatch_api(self, db: AsyncSession) -> Dict[str, Any]:
platform = "DropCatch"
result = {"found": 0, "new": 0, "updated": 0, "source": "api"}
-
+
if not dropcatch_client.is_configured:
return result
-
+
log = AuctionScrapeLog(platform=platform)
db.add(log)
await db.commit()
-
+
try:
api_result = await dropcatch_client.search_auctions(page_size=100)
auctions = api_result.get("auctions") or api_result.get("items") or []
result["found"] = len(auctions)
-
+
for dc_auction in auctions:
- auction_data = dropcatch_client.transform_to_pounce_format(dc_auction)
+ auction_data = dropcatch_client.transform_to_pounce_format(dc_auction)
status = await self._store_auction(db, auction_data)
if status == "skipped":
- continue
+ continue
result[status] += 1
-
+
await db.commit()
-
+
log.status = "success"
log.auctions_found = result["found"]
log.auctions_new = result["new"]
log.auctions_updated = result["updated"]
log.completed_at = datetime.utcnow()
await db.commit()
-
+
except Exception as e:
log.status = "failed"
log.error_message = str(e)[:500]
log.completed_at = datetime.utcnow()
await db.commit()
- return result
-
+ return result
+
async def _fetch_sedo_api(self, db: AsyncSession) -> Dict[str, Any]:
platform = "Sedo"
result = {"found": 0, "new": 0, "updated": 0, "source": "api"}
-
+
if not sedo_client.is_configured:
return result
-
+
log = AuctionScrapeLog(platform=platform)
db.add(log)
await db.commit()
-
+
try:
api_result = await sedo_client.search_auctions(page_size=100)
listings = api_result.get("domains") or api_result.get("items") or api_result.get("result") or []
if isinstance(listings, dict):
listings = list(listings.values()) if listings else []
-
+
result["found"] = len(listings)
-
+
for sedo_listing in listings:
- auction_data = sedo_client.transform_to_pounce_format(sedo_listing)
+ auction_data = sedo_client.transform_to_pounce_format(sedo_listing)
status = await self._store_auction(db, auction_data)
if status == "skipped":
- continue
+ continue
result[status] += 1
-
+
await db.commit()
-
+
log.status = "success"
log.auctions_found = result["found"]
log.auctions_new = result["new"]
log.auctions_updated = result["updated"]
log.completed_at = datetime.utcnow()
await db.commit()
-
+
except Exception as e:
log.status = "failed"
log.error_message = str(e)[:500]
log.completed_at = datetime.utcnow()
await db.commit()
-
+
return result
-
+
# ----------------------------
# DB cleanup / queries
# ----------------------------
@@ -903,7 +903,7 @@ class AuctionScraperService:
async def _cleanup_ended_auctions(self, db: AsyncSession):
"""Mark auctions that have ended as inactive and delete very old inactive auctions."""
now = datetime.utcnow()
-
+
from sqlalchemy import update
await db.execute(
@@ -911,14 +911,14 @@ class AuctionScraperService:
.where(and_(DomainAuction.end_time < now, DomainAuction.is_active == True))
.values(is_active=False)
)
-
+
cutoff = now - timedelta(days=30)
await db.execute(
delete(DomainAuction).where(and_(DomainAuction.is_active == False, DomainAuction.end_time < cutoff))
)
-
+
await db.commit()
-
+
async def get_active_auctions(
self,
db: AsyncSession,
@@ -934,7 +934,7 @@ class AuctionScraperService:
) -> List[DomainAuction]:
"""Get active auctions from database with filters."""
query = select(DomainAuction).where(DomainAuction.is_active == True)
-
+
if platform:
query = query.where(DomainAuction.platform == platform)
if tld:
@@ -948,7 +948,7 @@ class AuctionScraperService:
if ending_within_hours:
cutoff = datetime.utcnow() + timedelta(hours=ending_within_hours)
query = query.where(DomainAuction.end_time <= cutoff)
-
+
if sort_by == "end_time":
query = query.order_by(DomainAuction.end_time.asc())
elif sort_by == "bid_asc":
@@ -957,17 +957,17 @@ class AuctionScraperService:
query = query.order_by(DomainAuction.current_bid.desc())
elif sort_by == "bids":
query = query.order_by(DomainAuction.num_bids.desc())
-
+
result = await db.execute(query.offset(offset).limit(limit))
return list(result.scalars().all())
-
+
async def get_auction_count(self, db: AsyncSession) -> int:
"""Get total count of active auctions."""
from sqlalchemy import func
result = await db.execute(select(func.count(DomainAuction.id)).where(DomainAuction.is_active == True))
return result.scalar() or 0
-
+
async def close(self):
"""Close HTTP client."""
if self.http_client and not self.http_client.is_closed:
diff --git a/backend/app/services/domain_checker.py b/backend/app/services/domain_checker.py
index 851bf88..2628f08 100644
--- a/backend/app/services/domain_checker.py
+++ b/backend/app/services/domain_checker.py
@@ -265,8 +265,8 @@ class DomainChecker:
return None
except Exception as e:
logger.warning(f"Custom RDAP error for {domain}: {e}")
- return None
-
+ return None
+
async def _check_rdap(self, domain: str) -> Optional[DomainCheckResult]:
"""
Check domain using RDAP (Registration Data Access Protocol).
@@ -459,12 +459,12 @@ class DomainChecker:
'object does not exist',
]
if any(phrase in error_str for phrase in not_found_phrases):
- return DomainCheckResult(
- domain=domain,
- status=DomainStatus.AVAILABLE,
- is_available=True,
- check_method="whois",
- )
+ return DomainCheckResult(
+ domain=domain,
+ status=DomainStatus.AVAILABLE,
+ is_available=True,
+ check_method="whois",
+ )
# Otherwise it's a real error
return DomainCheckResult(
domain=domain,
@@ -589,16 +589,16 @@ class DomainChecker:
# Priority 3: Fall back to WHOIS (skip for TLDs that block it like .ch)
if tld not in self.CUSTOM_RDAP_ENDPOINTS:
- whois_result = await self._check_whois(domain)
-
- # Validate with DNS
- if whois_result.is_available:
- dns_available = await self._check_dns(domain)
- if not dns_available:
- whois_result.status = DomainStatus.TAKEN
- whois_result.is_available = False
-
- return whois_result
+ whois_result = await self._check_whois(domain)
+
+ # Validate with DNS
+ if whois_result.is_available:
+ dns_available = await self._check_dns(domain)
+ if not dns_available:
+ whois_result.status = DomainStatus.TAKEN
+ whois_result.is_available = False
+
+ return whois_result
# Final fallback: DNS-only check (for TLDs where everything else failed)
dns_available = await self._check_dns(domain)
diff --git a/backend/app/services/hidden_api_scrapers.py b/backend/app/services/hidden_api_scrapers.py
index 028ede3..56aad8f 100644
--- a/backend/app/services/hidden_api_scrapers.py
+++ b/backend/app/services/hidden_api_scrapers.py
@@ -26,7 +26,7 @@ logger = logging.getLogger(__name__)
def build_affiliate_url(platform: str, domain: str, original_url: Optional[str] = None) -> str:
"""
Build an affiliate URL for a given platform and domain.
-
+
If the affiliate program is not configured, returns the plain provider URL.
If `original_url` is provided, it is preferred (e.g. ExpiredDomains click-through links).
"""
@@ -73,14 +73,14 @@ def build_affiliate_url(platform: str, domain: str, original_url: Optional[str]
class DynadotApiScraper:
"""
Scraper for Dynadot Marketplace using their hidden JSON API.
-
+
Endpoint:
- https://www.dynadot.com/dynadot-vue-api/dynadot-service/marketplace-api
"""
-
+
BASE_URL = "https://www.dynadot.com"
MARKETPLACE_API = "/dynadot-vue-api/dynadot-service/marketplace-api"
-
+
def _parse_end_time(self, item: Dict[str, Any]) -> Optional[datetime]:
# Dynadot often provides an epoch timestamp in ms
end_time_stamp = item.get("end_time_stamp")
@@ -121,7 +121,7 @@ class DynadotApiScraper:
}
if keyword:
params["keyword"] = keyword
-
+
resp = await client.post(
f"{self.BASE_URL}{self.MARKETPLACE_API}",
params=params,
@@ -131,13 +131,13 @@ class DynadotApiScraper:
"Referer": "https://www.dynadot.com/market",
},
)
-
+
if resp.status_code != 200:
return {"items": [], "total": 0, "error": f"HTTP {resp.status_code}: {resp.text[:200]}"}
data = resp.json()
listings = data.get("data", {}).get("records", []) or data.get("data", {}).get("list", [])
-
+
transformed: List[Dict[str, Any]] = []
for item in listings:
domain = item.get("domain") or item.get("name") or item.get("utf8_name") or ""
@@ -170,21 +170,21 @@ class DynadotApiScraper:
transformed.append(
{
- "domain": domain,
+ "domain": domain,
"tld": tld,
- "platform": "Dynadot",
+ "platform": "Dynadot",
"current_bid": current_bid,
"currency": str(item.get("bid_price_currency") or "USD").upper(),
"num_bids": num_bids,
"end_time": end_time,
"auction_url": build_affiliate_url("Dynadot", domain),
- "buy_now_price": float(item.get("accepted_bid_price")) if item.get("accepted_bid_price") else None,
+ "buy_now_price": float(item.get("accepted_bid_price")) if item.get("accepted_bid_price") else None,
"age_years": int(item.get("age", 0) or 0) or None,
"backlinks": int(item.get("links", 0) or 0) or None,
"scrape_source": "dynadot:hidden_api",
}
)
-
+
return {
"items": transformed,
"total": data.get("data", {}).get("total_count", len(transformed)),
@@ -197,10 +197,10 @@ class DynadotApiScraper:
class HiddenApiScraperService:
"""Orchestrates enabled hidden API scrapers."""
-
+
def __init__(self):
self.dynadot = DynadotApiScraper()
-
+
async def scrape_all(self, limit_per_platform: int = 100) -> Dict[str, Any]:
results: Dict[str, Any] = {"total_found": 0, "platforms": {}, "errors": [], "items": []}
@@ -212,12 +212,12 @@ class HiddenApiScraperService:
}
results["items"].extend(dynadot_data.get("items", []))
results["total_found"] += len(dynadot_data.get("items", []))
-
+
if dynadot_data.get("error"):
results["errors"].append(f"Dynadot: {dynadot_data['error']}")
except Exception as e:
results["errors"].append(f"Dynadot: {str(e)}")
-
+
return results
diff --git a/frontend/src/app/terminal/intel/[tld]/page.tsx b/frontend/src/app/terminal/intel/[tld]/page.tsx
index d97f492..49fc144 100644
--- a/frontend/src/app/terminal/intel/[tld]/page.tsx
+++ b/frontend/src/app/terminal/intel/[tld]/page.tsx
@@ -499,7 +499,7 @@ export default function CommandTldDetailPage() {
)}>
The extension .{tld} is not currently tracked.
Historical registration price trends
-Instantly check if your desired .{details.tld} domain is available.
-Live comparison sorted by price
-|
${registrar.registration_price.toFixed(2)}
-
+
|
{canSeeRenewal ? (
${registrar.renewal_price.toFixed(2)}
-
+
) : (
—
)}
@@ -856,7 +856,7 @@ export default function CommandTldDetailPage() {
})}
|
+ Real-time monitoring across {marketStats.totalAuctions.toLocaleString()}+ auctions. + Your targets. Your intel. +
+ + {/* Stats Row */} +- Search domains, track availability, discover opportunities. -
-- Monitor {stats.total} domain{stats.total !== 1 ? 's' : ''} for availability changes -
-