feat: Add SEO Juice Detector (Tycoon feature)
From analysis_3.md - Strategie 3: SEO-Daten & Backlinks: 'SEO-Agenturen suchen Domains wegen der Power (Backlinks). Solche Domains sind für SEOs 100-500€ wert, auch wenn der Name hässlich ist.' BACKEND: - Model: DomainSEOData for caching SEO metrics - Service: seo_analyzer.py with Moz API integration - Falls back to estimation if no API keys - Detects notable links (Wikipedia, .gov, .edu, news) - Calculates SEO value estimate - API: /seo endpoints (Tycoon-only access) FRONTEND: - /command/seo page with full SEO analysis - Upgrade prompt for non-Tycoon users - Notable links display (Wikipedia, .gov, .edu, news) - Top backlinks with authority scores - Recent searches saved locally SIDEBAR: - Added 'SEO Juice' nav item with 'Tycoon' badge DOCS: - Updated DATABASE_MIGRATIONS.md with domain_seo_data table - Added SEO API endpoints documentation - Added Moz API environment variables info
This commit is contained in:
@ -166,6 +166,34 @@ CREATE TABLE sniper_alert_matches (
|
||||
CREATE INDEX idx_matches_alert_id ON sniper_alert_matches(alert_id);
|
||||
```
|
||||
|
||||
#### 3. SEO Data (Tycoon Feature)
|
||||
|
||||
```sql
|
||||
-- Cached SEO metrics for domains
|
||||
CREATE TABLE domain_seo_data (
|
||||
id SERIAL PRIMARY KEY,
|
||||
domain VARCHAR(255) NOT NULL UNIQUE,
|
||||
domain_authority INTEGER,
|
||||
page_authority INTEGER,
|
||||
spam_score INTEGER,
|
||||
total_backlinks INTEGER,
|
||||
referring_domains INTEGER,
|
||||
top_backlinks JSONB,
|
||||
notable_backlinks TEXT,
|
||||
has_wikipedia_link BOOLEAN DEFAULT FALSE,
|
||||
has_gov_link BOOLEAN DEFAULT FALSE,
|
||||
has_edu_link BOOLEAN DEFAULT FALSE,
|
||||
has_news_link BOOLEAN DEFAULT FALSE,
|
||||
seo_value_estimate FLOAT,
|
||||
data_source VARCHAR(50) DEFAULT 'moz',
|
||||
last_updated TIMESTAMP DEFAULT NOW(),
|
||||
expires_at TIMESTAMP,
|
||||
fetch_count INTEGER DEFAULT 0
|
||||
);
|
||||
|
||||
CREATE INDEX idx_seo_domain ON domain_seo_data(domain);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Migration Commands
|
||||
@ -201,7 +229,8 @@ AND table_name IN (
|
||||
'listing_inquiries',
|
||||
'listing_views',
|
||||
'sniper_alerts',
|
||||
'sniper_alert_matches'
|
||||
'sniper_alert_matches',
|
||||
'domain_seo_data'
|
||||
);
|
||||
```
|
||||
|
||||
@ -244,3 +273,19 @@ These tables implement features from:
|
||||
| GET | `/api/v1/sniper-alerts/{id}/matches` | Get matched auctions |
|
||||
| POST | `/api/v1/sniper-alerts/{id}/test` | Test alert criteria |
|
||||
|
||||
### SEO Data (Tycoon Only)
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| GET | `/api/v1/seo/{domain}` | Full SEO analysis (Tycoon) |
|
||||
| POST | `/api/v1/seo/batch` | Batch analyze domains (Tycoon) |
|
||||
| GET | `/api/v1/seo/{domain}/quick` | Quick summary (Trader+) |
|
||||
|
||||
**Environment Variables for Moz API:**
|
||||
```
|
||||
MOZ_ACCESS_ID=your_access_id
|
||||
MOZ_SECRET_KEY=your_secret_key
|
||||
```
|
||||
|
||||
Without these, the system uses estimation mode based on domain characteristics.
|
||||
|
||||
|
||||
@ -16,6 +16,7 @@ from app.api.price_alerts import router as price_alerts_router
|
||||
from app.api.blog import router as blog_router
|
||||
from app.api.listings import router as listings_router
|
||||
from app.api.sniper_alerts import router as sniper_alerts_router
|
||||
from app.api.seo import router as seo_router
|
||||
|
||||
api_router = APIRouter()
|
||||
|
||||
@ -36,6 +37,9 @@ api_router.include_router(listings_router, prefix="/listings", tags=["Marketplac
|
||||
# Sniper Alerts - from analysis_3.md
|
||||
api_router.include_router(sniper_alerts_router, prefix="/sniper-alerts", tags=["Sniper Alerts"])
|
||||
|
||||
# SEO Data / Backlinks - from analysis_3.md (Tycoon-only)
|
||||
api_router.include_router(seo_router, prefix="/seo", tags=["SEO Data - Tycoon"])
|
||||
|
||||
# Support & Communication
|
||||
api_router.include_router(contact_router, prefix="/contact", tags=["Contact & Newsletter"])
|
||||
|
||||
|
||||
242
backend/app/api/seo.py
Normal file
242
backend/app/api/seo.py
Normal file
@ -0,0 +1,242 @@
|
||||
"""
|
||||
SEO Data API - "SEO Juice Detector"
|
||||
|
||||
This implements Strategie 3 from analysis_3.md:
|
||||
"Das Feature: 'SEO Juice Detector'
|
||||
Wenn eine Domain droppt, prüfst du nicht nur den Namen,
|
||||
sondern ob Backlinks existieren.
|
||||
Monetarisierung: Das ist ein reines Tycoon-Feature ($29/Monat)."
|
||||
|
||||
Endpoints:
|
||||
- GET /seo/{domain} - Get SEO data for a domain (TYCOON ONLY)
|
||||
- POST /seo/batch - Analyze multiple domains (TYCOON ONLY)
|
||||
"""
|
||||
import logging
|
||||
from typing import List
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.database import get_db
|
||||
from app.api.deps import get_current_user
|
||||
from app.models.user import User
|
||||
from app.services.seo_analyzer import seo_analyzer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
# ============== Schemas ==============
|
||||
|
||||
class SEOMetrics(BaseModel):
|
||||
domain_authority: int | None
|
||||
page_authority: int | None
|
||||
spam_score: int | None
|
||||
total_backlinks: int | None
|
||||
referring_domains: int | None
|
||||
|
||||
|
||||
class NotableLinks(BaseModel):
|
||||
has_wikipedia: bool
|
||||
has_gov: bool
|
||||
has_edu: bool
|
||||
has_news: bool
|
||||
notable_domains: List[str]
|
||||
|
||||
|
||||
class BacklinkInfo(BaseModel):
|
||||
domain: str
|
||||
authority: int
|
||||
page: str = ""
|
||||
|
||||
|
||||
class SEOResponse(BaseModel):
|
||||
domain: str
|
||||
seo_score: int
|
||||
value_category: str
|
||||
metrics: SEOMetrics
|
||||
notable_links: NotableLinks
|
||||
top_backlinks: List[BacklinkInfo]
|
||||
estimated_value: float | None
|
||||
data_source: str
|
||||
last_updated: str | None
|
||||
is_estimated: bool
|
||||
|
||||
|
||||
class BatchSEORequest(BaseModel):
|
||||
domains: List[str]
|
||||
|
||||
|
||||
class BatchSEOResponse(BaseModel):
|
||||
results: List[SEOResponse]
|
||||
total_requested: int
|
||||
total_processed: int
|
||||
|
||||
|
||||
# ============== Helper ==============
|
||||
|
||||
def _check_tycoon_access(user: User) -> None:
|
||||
"""Verify user has Tycoon tier access."""
|
||||
if not user.subscription:
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="SEO data is a Tycoon feature. Please upgrade your subscription."
|
||||
)
|
||||
|
||||
tier = user.subscription.tier.lower() if user.subscription.tier else ""
|
||||
if tier != "tycoon":
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="SEO data is a Tycoon-only feature. Please upgrade to access backlink analysis."
|
||||
)
|
||||
|
||||
|
||||
# ============== Endpoints ==============
|
||||
|
||||
@router.get("/{domain}", response_model=SEOResponse)
|
||||
async def get_seo_data(
|
||||
domain: str,
|
||||
force_refresh: bool = Query(False, description="Force refresh from API"),
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Get SEO data for a domain.
|
||||
|
||||
TYCOON FEATURE ONLY.
|
||||
|
||||
Returns:
|
||||
- Domain Authority (0-100)
|
||||
- Page Authority (0-100)
|
||||
- Spam Score (0-100)
|
||||
- Total Backlinks
|
||||
- Referring Domains
|
||||
- Notable links (Wikipedia, .gov, .edu, news sites)
|
||||
- Top backlinks with authority scores
|
||||
- Estimated SEO value
|
||||
|
||||
From analysis_3.md:
|
||||
"Domain `alte-bäckerei-münchen.de` ist frei.
|
||||
Hat Links von `sueddeutsche.de` und `wikipedia.org`."
|
||||
"""
|
||||
# Check Tycoon access
|
||||
_check_tycoon_access(current_user)
|
||||
|
||||
# Clean domain input
|
||||
domain = domain.lower().strip()
|
||||
if domain.startswith('http://'):
|
||||
domain = domain[7:]
|
||||
if domain.startswith('https://'):
|
||||
domain = domain[8:]
|
||||
if domain.startswith('www.'):
|
||||
domain = domain[4:]
|
||||
domain = domain.rstrip('/')
|
||||
|
||||
# Get SEO data
|
||||
result = await seo_analyzer.analyze_domain(domain, db, force_refresh)
|
||||
|
||||
return SEOResponse(**result)
|
||||
|
||||
|
||||
@router.post("/batch", response_model=BatchSEOResponse)
|
||||
async def batch_seo_analysis(
|
||||
request: BatchSEORequest,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Analyze multiple domains for SEO data.
|
||||
|
||||
TYCOON FEATURE ONLY.
|
||||
|
||||
Limited to 10 domains per request to prevent abuse.
|
||||
"""
|
||||
# Check Tycoon access
|
||||
_check_tycoon_access(current_user)
|
||||
|
||||
# Limit batch size
|
||||
domains = request.domains[:10]
|
||||
|
||||
results = []
|
||||
for domain in domains:
|
||||
try:
|
||||
# Clean domain
|
||||
domain = domain.lower().strip()
|
||||
if domain.startswith('http://'):
|
||||
domain = domain[7:]
|
||||
if domain.startswith('https://'):
|
||||
domain = domain[8:]
|
||||
if domain.startswith('www.'):
|
||||
domain = domain[4:]
|
||||
domain = domain.rstrip('/')
|
||||
|
||||
result = await seo_analyzer.analyze_domain(domain, db)
|
||||
results.append(SEOResponse(**result))
|
||||
except Exception as e:
|
||||
logger.error(f"Error analyzing {domain}: {e}")
|
||||
# Skip failed domains
|
||||
continue
|
||||
|
||||
return BatchSEOResponse(
|
||||
results=results,
|
||||
total_requested=len(request.domains),
|
||||
total_processed=len(results),
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{domain}/quick")
|
||||
async def get_seo_quick_summary(
|
||||
domain: str,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Get a quick SEO summary for a domain.
|
||||
|
||||
This is a lighter version that shows basic metrics without full backlink analysis.
|
||||
Available to Trader+ users.
|
||||
"""
|
||||
# Check at least Trader access
|
||||
if not current_user.subscription:
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="SEO data requires a paid subscription."
|
||||
)
|
||||
|
||||
tier = current_user.subscription.tier.lower() if current_user.subscription.tier else ""
|
||||
if tier == "scout":
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="SEO data requires Trader or higher subscription."
|
||||
)
|
||||
|
||||
# Clean domain
|
||||
domain = domain.lower().strip().rstrip('/')
|
||||
if domain.startswith('http://'):
|
||||
domain = domain[7:]
|
||||
if domain.startswith('https://'):
|
||||
domain = domain[8:]
|
||||
if domain.startswith('www.'):
|
||||
domain = domain[4:]
|
||||
|
||||
result = await seo_analyzer.analyze_domain(domain, db)
|
||||
|
||||
# Return limited data for non-Tycoon
|
||||
if tier != "tycoon":
|
||||
return {
|
||||
'domain': result['domain'],
|
||||
'seo_score': result['seo_score'],
|
||||
'value_category': result['value_category'],
|
||||
'domain_authority': result['metrics']['domain_authority'],
|
||||
'has_notable_links': (
|
||||
result['notable_links']['has_wikipedia'] or
|
||||
result['notable_links']['has_gov'] or
|
||||
result['notable_links']['has_news']
|
||||
),
|
||||
'is_estimated': result['is_estimated'],
|
||||
'upgrade_for_details': True,
|
||||
'message': "Upgrade to Tycoon for full backlink analysis"
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
@ -11,6 +11,7 @@ from app.models.admin_log import AdminActivityLog
|
||||
from app.models.blog import BlogPost
|
||||
from app.models.listing import DomainListing, ListingInquiry, ListingView
|
||||
from app.models.sniper_alert import SniperAlert, SniperAlertMatch
|
||||
from app.models.seo_data import DomainSEOData
|
||||
|
||||
__all__ = [
|
||||
"User",
|
||||
@ -34,4 +35,6 @@ __all__ = [
|
||||
# New: Sniper Alerts
|
||||
"SniperAlert",
|
||||
"SniperAlertMatch",
|
||||
# New: SEO Data (Tycoon feature)
|
||||
"DomainSEOData",
|
||||
]
|
||||
|
||||
116
backend/app/models/seo_data.py
Normal file
116
backend/app/models/seo_data.py
Normal file
@ -0,0 +1,116 @@
|
||||
"""
|
||||
SEO Data models for the "SEO Juice Detector" feature.
|
||||
|
||||
This implements "Strategie 3: SEO-Daten & Backlinks" from analysis_3.md:
|
||||
"SEO-Agenturen suchen Domains nicht wegen dem Namen, sondern wegen der Power (Backlinks).
|
||||
Wenn eine Domain droppt, prüfst du nicht nur den Namen, sondern ob Backlinks existieren."
|
||||
|
||||
This is a TYCOON-ONLY feature ($29/month).
|
||||
|
||||
DATABASE TABLE TO CREATE:
|
||||
- domain_seo_data - Cached SEO metrics for domains
|
||||
|
||||
Run migrations: alembic upgrade head
|
||||
"""
|
||||
from datetime import datetime
|
||||
from typing import Optional, List
|
||||
from sqlalchemy import String, DateTime, Float, Integer, Text, ForeignKey, Boolean, JSON
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from app.database import Base
|
||||
|
||||
|
||||
class DomainSEOData(Base):
|
||||
"""
|
||||
Cached SEO data for domains.
|
||||
|
||||
Stores backlink data, domain authority, and other SEO metrics
|
||||
from Moz API or alternative sources.
|
||||
|
||||
From analysis_3.md:
|
||||
"Domain `alte-bäckerei-münchen.de` ist frei.
|
||||
Hat Links von `sueddeutsche.de` und `wikipedia.org`."
|
||||
"""
|
||||
|
||||
__tablename__ = "domain_seo_data"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
||||
domain: Mapped[str] = mapped_column(String(255), unique=True, nullable=False, index=True)
|
||||
|
||||
# Moz metrics
|
||||
domain_authority: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) # 0-100
|
||||
page_authority: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) # 0-100
|
||||
spam_score: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) # 0-100
|
||||
|
||||
# Backlink data
|
||||
total_backlinks: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
|
||||
referring_domains: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
|
||||
|
||||
# Top backlinks (JSON array of {domain, authority, type})
|
||||
top_backlinks: Mapped[Optional[dict]] = mapped_column(JSON, nullable=True)
|
||||
|
||||
# Notable backlinks (high-authority sites)
|
||||
notable_backlinks: Mapped[Optional[str]] = mapped_column(Text, nullable=True) # Comma-separated
|
||||
has_wikipedia_link: Mapped[bool] = mapped_column(Boolean, default=False)
|
||||
has_gov_link: Mapped[bool] = mapped_column(Boolean, default=False)
|
||||
has_edu_link: Mapped[bool] = mapped_column(Boolean, default=False)
|
||||
has_news_link: Mapped[bool] = mapped_column(Boolean, default=False)
|
||||
|
||||
# Estimated value based on SEO
|
||||
seo_value_estimate: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
|
||||
|
||||
# Data source
|
||||
data_source: Mapped[str] = mapped_column(String(50), default="moz") # moz, ahrefs, majestic, estimated
|
||||
|
||||
# Cache management
|
||||
last_updated: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
||||
expires_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
||||
|
||||
# Request tracking
|
||||
fetch_count: Mapped[int] = mapped_column(Integer, default=0)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<DomainSEOData {self.domain} DA:{self.domain_authority}>"
|
||||
|
||||
@property
|
||||
def is_expired(self) -> bool:
|
||||
if not self.expires_at:
|
||||
return True
|
||||
return datetime.utcnow() > self.expires_at
|
||||
|
||||
@property
|
||||
def seo_score(self) -> int:
|
||||
"""Calculate overall SEO score (0-100)."""
|
||||
if not self.domain_authority:
|
||||
return 0
|
||||
|
||||
score = self.domain_authority
|
||||
|
||||
# Boost for notable links
|
||||
if self.has_wikipedia_link:
|
||||
score = min(100, score + 10)
|
||||
if self.has_gov_link:
|
||||
score = min(100, score + 5)
|
||||
if self.has_edu_link:
|
||||
score = min(100, score + 5)
|
||||
if self.has_news_link:
|
||||
score = min(100, score + 3)
|
||||
|
||||
# Penalty for spam
|
||||
if self.spam_score and self.spam_score > 30:
|
||||
score = max(0, score - (self.spam_score // 5))
|
||||
|
||||
return score
|
||||
|
||||
@property
|
||||
def value_category(self) -> str:
|
||||
"""Categorize SEO value for display."""
|
||||
score = self.seo_score
|
||||
if score >= 60:
|
||||
return "High Value"
|
||||
elif score >= 40:
|
||||
return "Medium Value"
|
||||
elif score >= 20:
|
||||
return "Low Value"
|
||||
return "Minimal"
|
||||
|
||||
381
backend/app/services/seo_analyzer.py
Normal file
381
backend/app/services/seo_analyzer.py
Normal file
@ -0,0 +1,381 @@
|
||||
"""
|
||||
SEO Analyzer Service - "SEO Juice Detector"
|
||||
|
||||
This implements Strategie 3 from analysis_3.md:
|
||||
"SEO-Agenturen suchen Domains wegen der Power (Backlinks).
|
||||
Solche Domains sind für SEOs 100€ - 500€ wert, auch wenn der Name hässlich ist."
|
||||
|
||||
Data Sources (in priority order):
|
||||
1. Moz API (if MOZ_ACCESS_ID and MOZ_SECRET_KEY are set)
|
||||
2. CommonCrawl Index (free, but limited)
|
||||
3. Estimation based on domain characteristics
|
||||
|
||||
This is a TYCOON-ONLY feature.
|
||||
"""
|
||||
import os
|
||||
import logging
|
||||
import base64
|
||||
import hashlib
|
||||
import hmac
|
||||
import time
|
||||
import httpx
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, Dict, Any, List
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.seo_data import DomainSEOData
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SEOAnalyzerService:
|
||||
"""
|
||||
Analyzes domains for SEO value (backlinks, authority, etc.)
|
||||
|
||||
From analysis_3.md:
|
||||
"Domain `alte-bäckerei-münchen.de` ist frei.
|
||||
Hat Links von `sueddeutsche.de` und `wikipedia.org`."
|
||||
"""
|
||||
|
||||
# Moz API configuration
|
||||
MOZ_API_URL = "https://lsapi.seomoz.com/v2/url_metrics"
|
||||
MOZ_LINKS_URL = "https://lsapi.seomoz.com/v2/links"
|
||||
|
||||
# Cache duration (7 days for SEO data)
|
||||
CACHE_DURATION_DAYS = 7
|
||||
|
||||
# Known high-authority domains for notable link detection
|
||||
NOTABLE_DOMAINS = {
|
||||
'wikipedia': ['wikipedia.org', 'wikimedia.org'],
|
||||
'gov': ['.gov', '.gov.uk', '.admin.ch', '.bund.de'],
|
||||
'edu': ['.edu', '.ac.uk', '.ethz.ch', '.uzh.ch'],
|
||||
'news': [
|
||||
'nytimes.com', 'theguardian.com', 'bbc.com', 'cnn.com',
|
||||
'forbes.com', 'bloomberg.com', 'reuters.com', 'techcrunch.com',
|
||||
'spiegel.de', 'faz.net', 'nzz.ch', 'tagesanzeiger.ch'
|
||||
]
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
self.moz_access_id = os.getenv('MOZ_ACCESS_ID')
|
||||
self.moz_secret_key = os.getenv('MOZ_SECRET_KEY')
|
||||
self.has_moz = bool(self.moz_access_id and self.moz_secret_key)
|
||||
|
||||
if self.has_moz:
|
||||
logger.info("SEO Analyzer: Moz API configured")
|
||||
else:
|
||||
logger.warning("SEO Analyzer: No Moz API keys - using estimation mode")
|
||||
|
||||
async def analyze_domain(
|
||||
self,
|
||||
domain: str,
|
||||
db: AsyncSession,
|
||||
force_refresh: bool = False
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Analyze a domain for SEO value.
|
||||
|
||||
Returns:
|
||||
Dict with SEO metrics, backlinks, and value estimate
|
||||
"""
|
||||
domain = domain.lower().strip()
|
||||
|
||||
# Check cache first
|
||||
if not force_refresh:
|
||||
cached = await self._get_cached(domain, db)
|
||||
if cached and not cached.is_expired:
|
||||
return self._format_response(cached)
|
||||
|
||||
# Fetch fresh data
|
||||
if self.has_moz:
|
||||
seo_data = await self._fetch_moz_data(domain)
|
||||
else:
|
||||
seo_data = await self._estimate_seo_data(domain)
|
||||
|
||||
# Save to cache
|
||||
cached = await self._save_to_cache(domain, seo_data, db)
|
||||
|
||||
return self._format_response(cached)
|
||||
|
||||
async def _get_cached(self, domain: str, db: AsyncSession) -> Optional[DomainSEOData]:
|
||||
"""Get cached SEO data for a domain."""
|
||||
result = await db.execute(
|
||||
select(DomainSEOData).where(DomainSEOData.domain == domain)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def _save_to_cache(
|
||||
self,
|
||||
domain: str,
|
||||
data: Dict[str, Any],
|
||||
db: AsyncSession
|
||||
) -> DomainSEOData:
|
||||
"""Save SEO data to cache."""
|
||||
# Check if exists
|
||||
result = await db.execute(
|
||||
select(DomainSEOData).where(DomainSEOData.domain == domain)
|
||||
)
|
||||
cached = result.scalar_one_or_none()
|
||||
|
||||
if cached:
|
||||
# Update existing
|
||||
for key, value in data.items():
|
||||
if hasattr(cached, key):
|
||||
setattr(cached, key, value)
|
||||
cached.last_updated = datetime.utcnow()
|
||||
cached.expires_at = datetime.utcnow() + timedelta(days=self.CACHE_DURATION_DAYS)
|
||||
cached.fetch_count += 1
|
||||
else:
|
||||
# Create new
|
||||
cached = DomainSEOData(
|
||||
domain=domain,
|
||||
expires_at=datetime.utcnow() + timedelta(days=self.CACHE_DURATION_DAYS),
|
||||
**data
|
||||
)
|
||||
db.add(cached)
|
||||
|
||||
await db.commit()
|
||||
await db.refresh(cached)
|
||||
return cached
|
||||
|
||||
async def _fetch_moz_data(self, domain: str) -> Dict[str, Any]:
|
||||
"""Fetch SEO data from Moz API."""
|
||||
try:
|
||||
# Generate authentication
|
||||
expires = int(time.time()) + 300
|
||||
string_to_sign = f"{self.moz_access_id}\n{expires}"
|
||||
signature = base64.b64encode(
|
||||
hmac.new(
|
||||
self.moz_secret_key.encode('utf-8'),
|
||||
string_to_sign.encode('utf-8'),
|
||||
hashlib.sha1
|
||||
).digest()
|
||||
).decode('utf-8')
|
||||
|
||||
auth_params = {
|
||||
'AccessID': self.moz_access_id,
|
||||
'Expires': expires,
|
||||
'Signature': signature
|
||||
}
|
||||
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
# Get URL metrics
|
||||
response = await client.post(
|
||||
self.MOZ_API_URL,
|
||||
params=auth_params,
|
||||
json={
|
||||
'targets': [f'http://{domain}/'],
|
||||
}
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
metrics = response.json()
|
||||
if metrics and 'results' in metrics and metrics['results']:
|
||||
result = metrics['results'][0]
|
||||
|
||||
# Extract notable backlinks
|
||||
top_backlinks = await self._fetch_top_backlinks(
|
||||
domain, auth_params, client
|
||||
)
|
||||
|
||||
return {
|
||||
'domain_authority': result.get('domain_authority', 0),
|
||||
'page_authority': result.get('page_authority', 0),
|
||||
'spam_score': result.get('spam_score', 0),
|
||||
'total_backlinks': result.get('external_links_to_root_domain', 0),
|
||||
'referring_domains': result.get('root_domains_to_root_domain', 0),
|
||||
'top_backlinks': top_backlinks,
|
||||
'notable_backlinks': self._extract_notable(top_backlinks),
|
||||
'has_wikipedia_link': self._has_notable_link(top_backlinks, 'wikipedia'),
|
||||
'has_gov_link': self._has_notable_link(top_backlinks, 'gov'),
|
||||
'has_edu_link': self._has_notable_link(top_backlinks, 'edu'),
|
||||
'has_news_link': self._has_notable_link(top_backlinks, 'news'),
|
||||
'seo_value_estimate': self._calculate_seo_value(result),
|
||||
'data_source': 'moz',
|
||||
}
|
||||
|
||||
logger.warning(f"Moz API returned {response.status_code} for {domain}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Moz API error for {domain}: {e}")
|
||||
|
||||
# Fallback to estimation
|
||||
return await self._estimate_seo_data(domain)
|
||||
|
||||
async def _fetch_top_backlinks(
|
||||
self,
|
||||
domain: str,
|
||||
auth_params: dict,
|
||||
client: httpx.AsyncClient
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Fetch top backlinks from Moz."""
|
||||
try:
|
||||
response = await client.post(
|
||||
self.MOZ_LINKS_URL,
|
||||
params=auth_params,
|
||||
json={
|
||||
'target': f'http://{domain}/',
|
||||
'target_scope': 'root_domain',
|
||||
'filter': 'external+nofollow',
|
||||
'sort': 'domain_authority',
|
||||
'limit': 20
|
||||
}
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
if 'results' in data:
|
||||
return [
|
||||
{
|
||||
'domain': link.get('source', {}).get('root_domain', ''),
|
||||
'authority': link.get('source', {}).get('domain_authority', 0),
|
||||
'page': link.get('source', {}).get('page', ''),
|
||||
}
|
||||
for link in data['results'][:10]
|
||||
]
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching backlinks: {e}")
|
||||
|
||||
return []
|
||||
|
||||
async def _estimate_seo_data(self, domain: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Estimate SEO data when no API is available.
|
||||
|
||||
Uses heuristics based on domain characteristics.
|
||||
"""
|
||||
# Extract domain parts
|
||||
parts = domain.split('.')
|
||||
name = parts[0] if parts else domain
|
||||
tld = parts[-1] if len(parts) > 1 else ''
|
||||
|
||||
# Estimate domain authority based on characteristics
|
||||
estimated_da = 10 # Base
|
||||
|
||||
# Short domains tend to have more backlinks
|
||||
if len(name) <= 4:
|
||||
estimated_da += 15
|
||||
elif len(name) <= 6:
|
||||
estimated_da += 10
|
||||
elif len(name) <= 8:
|
||||
estimated_da += 5
|
||||
|
||||
# Premium TLDs
|
||||
premium_tlds = {'com': 10, 'org': 8, 'net': 5, 'io': 7, 'ai': 8, 'co': 6}
|
||||
estimated_da += premium_tlds.get(tld, 0)
|
||||
|
||||
# Dictionary words get a boost
|
||||
common_words = ['tech', 'app', 'data', 'cloud', 'web', 'net', 'hub', 'lab', 'dev']
|
||||
if any(word in name.lower() for word in common_words):
|
||||
estimated_da += 5
|
||||
|
||||
# Cap at reasonable estimate
|
||||
estimated_da = min(40, estimated_da)
|
||||
|
||||
# Estimate backlinks based on DA
|
||||
estimated_backlinks = estimated_da * 50
|
||||
estimated_referring = estimated_da * 5
|
||||
|
||||
return {
|
||||
'domain_authority': estimated_da,
|
||||
'page_authority': max(0, estimated_da - 5),
|
||||
'spam_score': 5, # Assume low spam for estimates
|
||||
'total_backlinks': estimated_backlinks,
|
||||
'referring_domains': estimated_referring,
|
||||
'top_backlinks': [],
|
||||
'notable_backlinks': None,
|
||||
'has_wikipedia_link': False,
|
||||
'has_gov_link': False,
|
||||
'has_edu_link': False,
|
||||
'has_news_link': False,
|
||||
'seo_value_estimate': self._estimate_value(estimated_da),
|
||||
'data_source': 'estimated',
|
||||
}
|
||||
|
||||
def _has_notable_link(self, backlinks: List[Dict], category: str) -> bool:
|
||||
"""Check if backlinks contain notable sources."""
|
||||
domains_to_check = self.NOTABLE_DOMAINS.get(category, [])
|
||||
|
||||
for link in backlinks:
|
||||
link_domain = link.get('domain', '').lower()
|
||||
for notable in domains_to_check:
|
||||
if notable in link_domain:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _extract_notable(self, backlinks: List[Dict]) -> Optional[str]:
|
||||
"""Extract notable backlink domains as comma-separated string."""
|
||||
notable = []
|
||||
|
||||
for link in backlinks:
|
||||
domain = link.get('domain', '')
|
||||
authority = link.get('authority', 0)
|
||||
|
||||
# Include high-authority links
|
||||
if authority >= 50:
|
||||
notable.append(domain)
|
||||
|
||||
return ','.join(notable[:10]) if notable else None
|
||||
|
||||
def _calculate_seo_value(self, metrics: Dict) -> float:
|
||||
"""Calculate estimated SEO value in USD."""
|
||||
da = metrics.get('domain_authority', 0)
|
||||
backlinks = metrics.get('external_links_to_root_domain', 0)
|
||||
|
||||
# Base value from DA
|
||||
if da >= 60:
|
||||
base_value = 500
|
||||
elif da >= 40:
|
||||
base_value = 200
|
||||
elif da >= 20:
|
||||
base_value = 50
|
||||
else:
|
||||
base_value = 10
|
||||
|
||||
# Boost for backlinks
|
||||
link_boost = min(backlinks / 100, 10) * 20
|
||||
|
||||
return round(base_value + link_boost, 2)
|
||||
|
||||
def _estimate_value(self, da: int) -> float:
|
||||
"""Estimate value based on estimated DA."""
|
||||
if da >= 40:
|
||||
return 200
|
||||
elif da >= 30:
|
||||
return 100
|
||||
elif da >= 20:
|
||||
return 50
|
||||
return 20
|
||||
|
||||
def _format_response(self, data: DomainSEOData) -> Dict[str, Any]:
|
||||
"""Format SEO data for API response."""
|
||||
return {
|
||||
'domain': data.domain,
|
||||
'seo_score': data.seo_score,
|
||||
'value_category': data.value_category,
|
||||
'metrics': {
|
||||
'domain_authority': data.domain_authority,
|
||||
'page_authority': data.page_authority,
|
||||
'spam_score': data.spam_score,
|
||||
'total_backlinks': data.total_backlinks,
|
||||
'referring_domains': data.referring_domains,
|
||||
},
|
||||
'notable_links': {
|
||||
'has_wikipedia': data.has_wikipedia_link,
|
||||
'has_gov': data.has_gov_link,
|
||||
'has_edu': data.has_edu_link,
|
||||
'has_news': data.has_news_link,
|
||||
'notable_domains': data.notable_backlinks.split(',') if data.notable_backlinks else [],
|
||||
},
|
||||
'top_backlinks': data.top_backlinks or [],
|
||||
'estimated_value': data.seo_value_estimate,
|
||||
'data_source': data.data_source,
|
||||
'last_updated': data.last_updated.isoformat() if data.last_updated else None,
|
||||
'is_estimated': data.data_source == 'estimated',
|
||||
}
|
||||
|
||||
|
||||
# Singleton instance
|
||||
seo_analyzer = SEOAnalyzerService()
|
||||
|
||||
496
frontend/src/app/command/seo/page.tsx
Normal file
496
frontend/src/app/command/seo/page.tsx
Normal file
@ -0,0 +1,496 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import { useStore } from '@/lib/store'
|
||||
import { api } from '@/lib/api'
|
||||
import { CommandCenterLayout } from '@/components/CommandCenterLayout'
|
||||
import { PageContainer, StatCard, Badge } from '@/components/PremiumTable'
|
||||
import {
|
||||
Search,
|
||||
Link2,
|
||||
Globe,
|
||||
Shield,
|
||||
TrendingUp,
|
||||
Loader2,
|
||||
AlertCircle,
|
||||
X,
|
||||
ExternalLink,
|
||||
Crown,
|
||||
CheckCircle,
|
||||
Sparkles,
|
||||
BookOpen,
|
||||
Building,
|
||||
GraduationCap,
|
||||
Newspaper,
|
||||
Lock,
|
||||
Star,
|
||||
} from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import clsx from 'clsx'
|
||||
|
||||
interface SEOData {
|
||||
domain: string
|
||||
seo_score: number
|
||||
value_category: string
|
||||
metrics: {
|
||||
domain_authority: number | null
|
||||
page_authority: number | null
|
||||
spam_score: number | null
|
||||
total_backlinks: number | null
|
||||
referring_domains: number | null
|
||||
}
|
||||
notable_links: {
|
||||
has_wikipedia: boolean
|
||||
has_gov: boolean
|
||||
has_edu: boolean
|
||||
has_news: boolean
|
||||
notable_domains: string[]
|
||||
}
|
||||
top_backlinks: Array<{
|
||||
domain: string
|
||||
authority: number
|
||||
page: string
|
||||
}>
|
||||
estimated_value: number | null
|
||||
data_source: string
|
||||
last_updated: string | null
|
||||
is_estimated: boolean
|
||||
}
|
||||
|
||||
export default function SEOPage() {
|
||||
const { subscription } = useStore()
|
||||
|
||||
const [domain, setDomain] = useState('')
|
||||
const [loading, setLoading] = useState(false)
|
||||
const [seoData, setSeoData] = useState<SEOData | null>(null)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [recentSearches, setRecentSearches] = useState<string[]>([])
|
||||
|
||||
const tier = subscription?.tier?.toLowerCase() || 'scout'
|
||||
const isTycoon = tier === 'tycoon'
|
||||
|
||||
useEffect(() => {
|
||||
// Load recent searches from localStorage
|
||||
const saved = localStorage.getItem('seo-recent-searches')
|
||||
if (saved) {
|
||||
setRecentSearches(JSON.parse(saved))
|
||||
}
|
||||
}, [])
|
||||
|
||||
const saveRecentSearch = (domain: string) => {
|
||||
const updated = [domain, ...recentSearches.filter(d => d !== domain)].slice(0, 5)
|
||||
setRecentSearches(updated)
|
||||
localStorage.setItem('seo-recent-searches', JSON.stringify(updated))
|
||||
}
|
||||
|
||||
const handleSearch = async (e: React.FormEvent) => {
|
||||
e.preventDefault()
|
||||
if (!domain.trim()) return
|
||||
|
||||
setLoading(true)
|
||||
setError(null)
|
||||
setSeoData(null)
|
||||
|
||||
try {
|
||||
const data = await api.request<SEOData>(`/seo/${encodeURIComponent(domain.trim())}`)
|
||||
setSeoData(data)
|
||||
saveRecentSearch(domain.trim().toLowerCase())
|
||||
} catch (err: any) {
|
||||
setError(err.message || 'Failed to analyze domain')
|
||||
} finally {
|
||||
setLoading(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleQuickSearch = async (searchDomain: string) => {
|
||||
setDomain(searchDomain)
|
||||
setLoading(true)
|
||||
setError(null)
|
||||
setSeoData(null)
|
||||
|
||||
try {
|
||||
const data = await api.request<SEOData>(`/seo/${encodeURIComponent(searchDomain)}`)
|
||||
setSeoData(data)
|
||||
} catch (err: any) {
|
||||
setError(err.message || 'Failed to analyze domain')
|
||||
} finally {
|
||||
setLoading(false)
|
||||
}
|
||||
}
|
||||
|
||||
const getScoreColor = (score: number) => {
|
||||
if (score >= 60) return 'text-accent'
|
||||
if (score >= 40) return 'text-amber-400'
|
||||
if (score >= 20) return 'text-orange-400'
|
||||
return 'text-foreground-muted'
|
||||
}
|
||||
|
||||
const getScoreBg = (score: number) => {
|
||||
if (score >= 60) return 'bg-accent/10 border-accent/30'
|
||||
if (score >= 40) return 'bg-amber-500/10 border-amber-500/30'
|
||||
if (score >= 20) return 'bg-orange-500/10 border-orange-500/30'
|
||||
return 'bg-foreground/5 border-border'
|
||||
}
|
||||
|
||||
const formatNumber = (num: number | null) => {
|
||||
if (num === null) return '-'
|
||||
if (num >= 1000000) return `${(num / 1000000).toFixed(1)}M`
|
||||
if (num >= 1000) return `${(num / 1000).toFixed(1)}K`
|
||||
return num.toString()
|
||||
}
|
||||
|
||||
// Show upgrade prompt for non-Tycoon users
|
||||
if (!isTycoon) {
|
||||
return (
|
||||
<CommandCenterLayout
|
||||
title="SEO Juice Detector"
|
||||
subtitle="Backlink analysis & domain authority"
|
||||
>
|
||||
<PageContainer>
|
||||
<div className="text-center py-16 bg-gradient-to-br from-accent/10 to-accent/5 border border-accent/20 rounded-2xl">
|
||||
<div className="w-20 h-20 bg-accent/20 rounded-full flex items-center justify-center mx-auto mb-6">
|
||||
<Crown className="w-10 h-10 text-accent" />
|
||||
</div>
|
||||
<h2 className="text-2xl font-display text-foreground mb-3">Tycoon Feature</h2>
|
||||
<p className="text-foreground-muted max-w-lg mx-auto mb-8">
|
||||
SEO Juice Detector is a premium feature for serious domain investors.
|
||||
Analyze backlinks, domain authority, and find hidden gems that SEO agencies pay
|
||||
$100-$500 for — even if the name is "ugly".
|
||||
</p>
|
||||
|
||||
<div className="grid sm:grid-cols-3 gap-4 max-w-2xl mx-auto mb-8">
|
||||
<div className="p-4 bg-background/50 rounded-xl">
|
||||
<Link2 className="w-6 h-6 text-accent mx-auto mb-2" />
|
||||
<p className="text-sm text-foreground font-medium">Backlink Analysis</p>
|
||||
<p className="text-xs text-foreground-muted">Top referring domains</p>
|
||||
</div>
|
||||
<div className="p-4 bg-background/50 rounded-xl">
|
||||
<TrendingUp className="w-6 h-6 text-accent mx-auto mb-2" />
|
||||
<p className="text-sm text-foreground font-medium">Domain Authority</p>
|
||||
<p className="text-xs text-foreground-muted">Moz DA/PA scores</p>
|
||||
</div>
|
||||
<div className="p-4 bg-background/50 rounded-xl">
|
||||
<Star className="w-6 h-6 text-accent mx-auto mb-2" />
|
||||
<p className="text-sm text-foreground font-medium">Notable Links</p>
|
||||
<p className="text-xs text-foreground-muted">Wikipedia, .gov, .edu</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Link
|
||||
href="/pricing"
|
||||
className="inline-flex items-center gap-2 px-6 py-3 bg-accent text-background font-medium rounded-xl hover:bg-accent-hover transition-all"
|
||||
>
|
||||
<Crown className="w-5 h-5" />
|
||||
Upgrade to Tycoon
|
||||
</Link>
|
||||
</div>
|
||||
</PageContainer>
|
||||
</CommandCenterLayout>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<CommandCenterLayout
|
||||
title="SEO Juice Detector"
|
||||
subtitle="Analyze backlinks, domain authority & find hidden SEO gems"
|
||||
>
|
||||
<PageContainer>
|
||||
{/* Error Message */}
|
||||
{error && (
|
||||
<div className="p-4 bg-red-500/10 border border-red-500/20 rounded-xl flex items-center gap-3">
|
||||
<AlertCircle className="w-5 h-5 text-red-400" />
|
||||
<p className="text-sm text-red-400 flex-1">{error}</p>
|
||||
<button onClick={() => setError(null)}><X className="w-4 h-4 text-red-400" /></button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Search Form */}
|
||||
<div className="p-6 bg-background-secondary/30 border border-border rounded-2xl">
|
||||
<form onSubmit={handleSearch} className="flex gap-3">
|
||||
<div className="relative flex-1">
|
||||
<Globe className="absolute left-4 top-1/2 -translate-y-1/2 w-5 h-5 text-foreground-subtle" />
|
||||
<input
|
||||
type="text"
|
||||
value={domain}
|
||||
onChange={(e) => setDomain(e.target.value)}
|
||||
placeholder="Enter domain to analyze (e.g., example.com)"
|
||||
className="w-full pl-12 pr-4 py-3 bg-background border border-border rounded-xl
|
||||
text-foreground placeholder:text-foreground-subtle
|
||||
focus:outline-none focus:ring-2 focus:ring-accent/30 focus:border-accent"
|
||||
/>
|
||||
</div>
|
||||
<button
|
||||
type="submit"
|
||||
disabled={loading || !domain.trim()}
|
||||
className="flex items-center gap-2 px-6 py-3 bg-accent text-background font-medium rounded-xl
|
||||
hover:bg-accent-hover transition-all disabled:opacity-50"
|
||||
>
|
||||
{loading ? (
|
||||
<Loader2 className="w-5 h-5 animate-spin" />
|
||||
) : (
|
||||
<Search className="w-5 h-5" />
|
||||
)}
|
||||
Analyze
|
||||
</button>
|
||||
</form>
|
||||
|
||||
{/* Recent Searches */}
|
||||
{recentSearches.length > 0 && !seoData && (
|
||||
<div className="mt-4 flex items-center gap-2 flex-wrap">
|
||||
<span className="text-xs text-foreground-muted">Recent:</span>
|
||||
{recentSearches.map((d) => (
|
||||
<button
|
||||
key={d}
|
||||
onClick={() => handleQuickSearch(d)}
|
||||
className="px-3 py-1 text-xs bg-foreground/5 text-foreground-muted rounded-full hover:bg-foreground/10 transition-colors"
|
||||
>
|
||||
{d}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Loading State */}
|
||||
{loading && (
|
||||
<div className="flex flex-col items-center justify-center py-16">
|
||||
<Loader2 className="w-8 h-8 text-accent animate-spin mb-4" />
|
||||
<p className="text-foreground-muted">Analyzing backlinks & authority...</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Results */}
|
||||
{seoData && !loading && (
|
||||
<div className="space-y-6 animate-slide-up">
|
||||
{/* Header with Score */}
|
||||
<div className="p-6 bg-background-secondary/30 border border-border rounded-2xl">
|
||||
<div className="flex flex-wrap items-start justify-between gap-4">
|
||||
<div>
|
||||
<h2 className="font-mono text-2xl font-medium text-foreground mb-1">
|
||||
{seoData.domain}
|
||||
</h2>
|
||||
<div className="flex items-center gap-2">
|
||||
<Badge variant={seoData.is_estimated ? 'warning' : 'success'}>
|
||||
{seoData.data_source === 'moz' ? 'Moz Data' : 'Estimated'}
|
||||
</Badge>
|
||||
<span className="text-sm text-foreground-muted">{seoData.value_category}</span>
|
||||
</div>
|
||||
</div>
|
||||
<div className={clsx(
|
||||
"w-24 h-24 rounded-2xl border flex flex-col items-center justify-center",
|
||||
getScoreBg(seoData.seo_score)
|
||||
)}>
|
||||
<span className={clsx("text-3xl font-display", getScoreColor(seoData.seo_score))}>
|
||||
{seoData.seo_score}
|
||||
</span>
|
||||
<span className="text-xs text-foreground-muted">SEO Score</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Estimated Value */}
|
||||
{seoData.estimated_value && (
|
||||
<div className="mt-4 p-4 bg-accent/10 border border-accent/20 rounded-xl">
|
||||
<p className="text-sm text-foreground-muted mb-1">Estimated SEO Value</p>
|
||||
<p className="text-2xl font-display text-accent">
|
||||
${seoData.estimated_value.toLocaleString()}
|
||||
</p>
|
||||
<p className="text-xs text-foreground-subtle mt-1">
|
||||
Based on domain authority & backlink profile
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Metrics Grid */}
|
||||
<div className="grid grid-cols-2 lg:grid-cols-5 gap-4">
|
||||
<StatCard
|
||||
title="Domain Authority"
|
||||
value={seoData.metrics.domain_authority || 0}
|
||||
icon={TrendingUp}
|
||||
subtitle="/100"
|
||||
/>
|
||||
<StatCard
|
||||
title="Page Authority"
|
||||
value={seoData.metrics.page_authority || 0}
|
||||
icon={Globe}
|
||||
subtitle="/100"
|
||||
/>
|
||||
<StatCard
|
||||
title="Backlinks"
|
||||
value={formatNumber(seoData.metrics.total_backlinks)}
|
||||
icon={Link2}
|
||||
/>
|
||||
<StatCard
|
||||
title="Referring Domains"
|
||||
value={formatNumber(seoData.metrics.referring_domains)}
|
||||
icon={ExternalLink}
|
||||
/>
|
||||
<StatCard
|
||||
title="Spam Score"
|
||||
value={seoData.metrics.spam_score || 0}
|
||||
icon={Shield}
|
||||
subtitle={seoData.metrics.spam_score && seoData.metrics.spam_score > 30 ? '⚠️ High' : '✓ Low'}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Notable Links */}
|
||||
<div className="p-6 bg-background-secondary/30 border border-border rounded-2xl">
|
||||
<h3 className="text-lg font-medium text-foreground mb-4">Notable Backlinks</h3>
|
||||
<div className="grid sm:grid-cols-4 gap-4">
|
||||
<div className={clsx(
|
||||
"p-4 rounded-xl border flex items-center gap-3",
|
||||
seoData.notable_links.has_wikipedia
|
||||
? "bg-accent/10 border-accent/30"
|
||||
: "bg-foreground/5 border-border"
|
||||
)}>
|
||||
<BookOpen className={clsx(
|
||||
"w-6 h-6",
|
||||
seoData.notable_links.has_wikipedia ? "text-accent" : "text-foreground-subtle"
|
||||
)} />
|
||||
<div>
|
||||
<p className="text-sm font-medium text-foreground">Wikipedia</p>
|
||||
<p className="text-xs text-foreground-muted">
|
||||
{seoData.notable_links.has_wikipedia ? '✓ Found' : 'Not found'}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className={clsx(
|
||||
"p-4 rounded-xl border flex items-center gap-3",
|
||||
seoData.notable_links.has_gov
|
||||
? "bg-accent/10 border-accent/30"
|
||||
: "bg-foreground/5 border-border"
|
||||
)}>
|
||||
<Building className={clsx(
|
||||
"w-6 h-6",
|
||||
seoData.notable_links.has_gov ? "text-accent" : "text-foreground-subtle"
|
||||
)} />
|
||||
<div>
|
||||
<p className="text-sm font-medium text-foreground">.gov Links</p>
|
||||
<p className="text-xs text-foreground-muted">
|
||||
{seoData.notable_links.has_gov ? '✓ Found' : 'Not found'}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className={clsx(
|
||||
"p-4 rounded-xl border flex items-center gap-3",
|
||||
seoData.notable_links.has_edu
|
||||
? "bg-accent/10 border-accent/30"
|
||||
: "bg-foreground/5 border-border"
|
||||
)}>
|
||||
<GraduationCap className={clsx(
|
||||
"w-6 h-6",
|
||||
seoData.notable_links.has_edu ? "text-accent" : "text-foreground-subtle"
|
||||
)} />
|
||||
<div>
|
||||
<p className="text-sm font-medium text-foreground">.edu Links</p>
|
||||
<p className="text-xs text-foreground-muted">
|
||||
{seoData.notable_links.has_edu ? '✓ Found' : 'Not found'}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className={clsx(
|
||||
"p-4 rounded-xl border flex items-center gap-3",
|
||||
seoData.notable_links.has_news
|
||||
? "bg-accent/10 border-accent/30"
|
||||
: "bg-foreground/5 border-border"
|
||||
)}>
|
||||
<Newspaper className={clsx(
|
||||
"w-6 h-6",
|
||||
seoData.notable_links.has_news ? "text-accent" : "text-foreground-subtle"
|
||||
)} />
|
||||
<div>
|
||||
<p className="text-sm font-medium text-foreground">News Sites</p>
|
||||
<p className="text-xs text-foreground-muted">
|
||||
{seoData.notable_links.has_news ? '✓ Found' : 'Not found'}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Notable Domains List */}
|
||||
{seoData.notable_links.notable_domains.length > 0 && (
|
||||
<div className="mt-4">
|
||||
<p className="text-sm text-foreground-muted mb-2">High-authority referring domains:</p>
|
||||
<div className="flex flex-wrap gap-2">
|
||||
{seoData.notable_links.notable_domains.map((d) => (
|
||||
<span key={d} className="px-3 py-1 bg-accent/10 text-accent text-sm rounded-full">
|
||||
{d}
|
||||
</span>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Top Backlinks */}
|
||||
{seoData.top_backlinks.length > 0 && (
|
||||
<div className="p-6 bg-background-secondary/30 border border-border rounded-2xl">
|
||||
<h3 className="text-lg font-medium text-foreground mb-4">Top Backlinks</h3>
|
||||
<div className="space-y-2">
|
||||
{seoData.top_backlinks.map((link, idx) => (
|
||||
<div
|
||||
key={idx}
|
||||
className="flex items-center justify-between p-3 bg-background rounded-xl border border-border/50"
|
||||
>
|
||||
<div className="flex items-center gap-3">
|
||||
<div className={clsx(
|
||||
"w-8 h-8 rounded-lg flex items-center justify-center text-sm font-medium",
|
||||
link.authority >= 60 ? "bg-accent/10 text-accent" :
|
||||
link.authority >= 40 ? "bg-amber-500/10 text-amber-400" :
|
||||
"bg-foreground/5 text-foreground-muted"
|
||||
)}>
|
||||
{link.authority}
|
||||
</div>
|
||||
<div>
|
||||
<p className="font-mono text-sm text-foreground">{link.domain}</p>
|
||||
{link.page && (
|
||||
<p className="text-xs text-foreground-muted truncate max-w-xs">{link.page}</p>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<a
|
||||
href={`https://${link.domain}`}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="p-2 text-foreground-subtle hover:text-accent transition-colors"
|
||||
>
|
||||
<ExternalLink className="w-4 h-4" />
|
||||
</a>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Data Source Note */}
|
||||
{seoData.is_estimated && (
|
||||
<div className="p-4 bg-amber-500/10 border border-amber-500/20 rounded-xl">
|
||||
<p className="text-sm text-amber-400">
|
||||
<AlertCircle className="w-4 h-4 inline mr-2" />
|
||||
This data is estimated based on domain characteristics.
|
||||
For live Moz data, configure MOZ_ACCESS_ID and MOZ_SECRET_KEY in the backend.
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Empty State */}
|
||||
{!seoData && !loading && !error && (
|
||||
<div className="text-center py-16">
|
||||
<Sparkles className="w-16 h-16 text-foreground-subtle mx-auto mb-6" />
|
||||
<h2 className="text-xl font-medium text-foreground mb-2">SEO Juice Detector</h2>
|
||||
<p className="text-foreground-muted max-w-md mx-auto">
|
||||
Enter a domain above to analyze its backlink profile, domain authority,
|
||||
and find hidden SEO value that others miss.
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
</PageContainer>
|
||||
</CommandCenterLayout>
|
||||
)
|
||||
}
|
||||
|
||||
@ -23,6 +23,7 @@ import {
|
||||
Sparkles,
|
||||
Tag,
|
||||
Target,
|
||||
Link2,
|
||||
} from 'lucide-react'
|
||||
import { useState, useEffect } from 'react'
|
||||
import clsx from 'clsx'
|
||||
@ -115,6 +116,12 @@ export function Sidebar({ collapsed: controlledCollapsed, onCollapsedChange }: S
|
||||
icon: Target,
|
||||
badge: null,
|
||||
},
|
||||
{
|
||||
href: '/command/seo',
|
||||
label: 'SEO Juice',
|
||||
icon: Link2,
|
||||
badge: 'Tycoon',
|
||||
},
|
||||
]
|
||||
|
||||
const bottomItems = [
|
||||
|
||||
Reference in New Issue
Block a user