From analysis_3.md - Strategie 3: SEO-Daten & Backlinks: 'SEO-Agenturen suchen Domains wegen der Power (Backlinks). Solche Domains sind für SEOs 100-500€ wert, auch wenn der Name hässlich ist.' BACKEND: - Model: DomainSEOData for caching SEO metrics - Service: seo_analyzer.py with Moz API integration - Falls back to estimation if no API keys - Detects notable links (Wikipedia, .gov, .edu, news) - Calculates SEO value estimate - API: /seo endpoints (Tycoon-only access) FRONTEND: - /command/seo page with full SEO analysis - Upgrade prompt for non-Tycoon users - Notable links display (Wikipedia, .gov, .edu, news) - Top backlinks with authority scores - Recent searches saved locally SIDEBAR: - Added 'SEO Juice' nav item with 'Tycoon' badge DOCS: - Updated DATABASE_MIGRATIONS.md with domain_seo_data table - Added SEO API endpoints documentation - Added Moz API environment variables info
243 lines
6.7 KiB
Python
243 lines
6.7 KiB
Python
"""
|
|
SEO Data API - "SEO Juice Detector"
|
|
|
|
This implements Strategie 3 from analysis_3.md:
|
|
"Das Feature: 'SEO Juice Detector'
|
|
Wenn eine Domain droppt, prüfst du nicht nur den Namen,
|
|
sondern ob Backlinks existieren.
|
|
Monetarisierung: Das ist ein reines Tycoon-Feature ($29/Monat)."
|
|
|
|
Endpoints:
|
|
- GET /seo/{domain} - Get SEO data for a domain (TYCOON ONLY)
|
|
- POST /seo/batch - Analyze multiple domains (TYCOON ONLY)
|
|
"""
|
|
import logging
|
|
from typing import List
|
|
from fastapi import APIRouter, Depends, HTTPException, Query
|
|
from pydantic import BaseModel
|
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
|
|
from app.database import get_db
|
|
from app.api.deps import get_current_user
|
|
from app.models.user import User
|
|
from app.services.seo_analyzer import seo_analyzer
|
|
|
|
logger = logging.getLogger(__name__)
|
|
router = APIRouter()
|
|
|
|
|
|
# ============== Schemas ==============
|
|
|
|
class SEOMetrics(BaseModel):
|
|
domain_authority: int | None
|
|
page_authority: int | None
|
|
spam_score: int | None
|
|
total_backlinks: int | None
|
|
referring_domains: int | None
|
|
|
|
|
|
class NotableLinks(BaseModel):
|
|
has_wikipedia: bool
|
|
has_gov: bool
|
|
has_edu: bool
|
|
has_news: bool
|
|
notable_domains: List[str]
|
|
|
|
|
|
class BacklinkInfo(BaseModel):
|
|
domain: str
|
|
authority: int
|
|
page: str = ""
|
|
|
|
|
|
class SEOResponse(BaseModel):
|
|
domain: str
|
|
seo_score: int
|
|
value_category: str
|
|
metrics: SEOMetrics
|
|
notable_links: NotableLinks
|
|
top_backlinks: List[BacklinkInfo]
|
|
estimated_value: float | None
|
|
data_source: str
|
|
last_updated: str | None
|
|
is_estimated: bool
|
|
|
|
|
|
class BatchSEORequest(BaseModel):
|
|
domains: List[str]
|
|
|
|
|
|
class BatchSEOResponse(BaseModel):
|
|
results: List[SEOResponse]
|
|
total_requested: int
|
|
total_processed: int
|
|
|
|
|
|
# ============== Helper ==============
|
|
|
|
def _check_tycoon_access(user: User) -> None:
|
|
"""Verify user has Tycoon tier access."""
|
|
if not user.subscription:
|
|
raise HTTPException(
|
|
status_code=403,
|
|
detail="SEO data is a Tycoon feature. Please upgrade your subscription."
|
|
)
|
|
|
|
tier = user.subscription.tier.lower() if user.subscription.tier else ""
|
|
if tier != "tycoon":
|
|
raise HTTPException(
|
|
status_code=403,
|
|
detail="SEO data is a Tycoon-only feature. Please upgrade to access backlink analysis."
|
|
)
|
|
|
|
|
|
# ============== Endpoints ==============
|
|
|
|
@router.get("/{domain}", response_model=SEOResponse)
|
|
async def get_seo_data(
|
|
domain: str,
|
|
force_refresh: bool = Query(False, description="Force refresh from API"),
|
|
current_user: User = Depends(get_current_user),
|
|
db: AsyncSession = Depends(get_db),
|
|
):
|
|
"""
|
|
Get SEO data for a domain.
|
|
|
|
TYCOON FEATURE ONLY.
|
|
|
|
Returns:
|
|
- Domain Authority (0-100)
|
|
- Page Authority (0-100)
|
|
- Spam Score (0-100)
|
|
- Total Backlinks
|
|
- Referring Domains
|
|
- Notable links (Wikipedia, .gov, .edu, news sites)
|
|
- Top backlinks with authority scores
|
|
- Estimated SEO value
|
|
|
|
From analysis_3.md:
|
|
"Domain `alte-bäckerei-münchen.de` ist frei.
|
|
Hat Links von `sueddeutsche.de` und `wikipedia.org`."
|
|
"""
|
|
# Check Tycoon access
|
|
_check_tycoon_access(current_user)
|
|
|
|
# Clean domain input
|
|
domain = domain.lower().strip()
|
|
if domain.startswith('http://'):
|
|
domain = domain[7:]
|
|
if domain.startswith('https://'):
|
|
domain = domain[8:]
|
|
if domain.startswith('www.'):
|
|
domain = domain[4:]
|
|
domain = domain.rstrip('/')
|
|
|
|
# Get SEO data
|
|
result = await seo_analyzer.analyze_domain(domain, db, force_refresh)
|
|
|
|
return SEOResponse(**result)
|
|
|
|
|
|
@router.post("/batch", response_model=BatchSEOResponse)
|
|
async def batch_seo_analysis(
|
|
request: BatchSEORequest,
|
|
current_user: User = Depends(get_current_user),
|
|
db: AsyncSession = Depends(get_db),
|
|
):
|
|
"""
|
|
Analyze multiple domains for SEO data.
|
|
|
|
TYCOON FEATURE ONLY.
|
|
|
|
Limited to 10 domains per request to prevent abuse.
|
|
"""
|
|
# Check Tycoon access
|
|
_check_tycoon_access(current_user)
|
|
|
|
# Limit batch size
|
|
domains = request.domains[:10]
|
|
|
|
results = []
|
|
for domain in domains:
|
|
try:
|
|
# Clean domain
|
|
domain = domain.lower().strip()
|
|
if domain.startswith('http://'):
|
|
domain = domain[7:]
|
|
if domain.startswith('https://'):
|
|
domain = domain[8:]
|
|
if domain.startswith('www.'):
|
|
domain = domain[4:]
|
|
domain = domain.rstrip('/')
|
|
|
|
result = await seo_analyzer.analyze_domain(domain, db)
|
|
results.append(SEOResponse(**result))
|
|
except Exception as e:
|
|
logger.error(f"Error analyzing {domain}: {e}")
|
|
# Skip failed domains
|
|
continue
|
|
|
|
return BatchSEOResponse(
|
|
results=results,
|
|
total_requested=len(request.domains),
|
|
total_processed=len(results),
|
|
)
|
|
|
|
|
|
@router.get("/{domain}/quick")
|
|
async def get_seo_quick_summary(
|
|
domain: str,
|
|
current_user: User = Depends(get_current_user),
|
|
db: AsyncSession = Depends(get_db),
|
|
):
|
|
"""
|
|
Get a quick SEO summary for a domain.
|
|
|
|
This is a lighter version that shows basic metrics without full backlink analysis.
|
|
Available to Trader+ users.
|
|
"""
|
|
# Check at least Trader access
|
|
if not current_user.subscription:
|
|
raise HTTPException(
|
|
status_code=403,
|
|
detail="SEO data requires a paid subscription."
|
|
)
|
|
|
|
tier = current_user.subscription.tier.lower() if current_user.subscription.tier else ""
|
|
if tier == "scout":
|
|
raise HTTPException(
|
|
status_code=403,
|
|
detail="SEO data requires Trader or higher subscription."
|
|
)
|
|
|
|
# Clean domain
|
|
domain = domain.lower().strip().rstrip('/')
|
|
if domain.startswith('http://'):
|
|
domain = domain[7:]
|
|
if domain.startswith('https://'):
|
|
domain = domain[8:]
|
|
if domain.startswith('www.'):
|
|
domain = domain[4:]
|
|
|
|
result = await seo_analyzer.analyze_domain(domain, db)
|
|
|
|
# Return limited data for non-Tycoon
|
|
if tier != "tycoon":
|
|
return {
|
|
'domain': result['domain'],
|
|
'seo_score': result['seo_score'],
|
|
'value_category': result['value_category'],
|
|
'domain_authority': result['metrics']['domain_authority'],
|
|
'has_notable_links': (
|
|
result['notable_links']['has_wikipedia'] or
|
|
result['notable_links']['has_gov'] or
|
|
result['notable_links']['has_news']
|
|
),
|
|
'is_estimated': result['is_estimated'],
|
|
'upgrade_for_details': True,
|
|
'message': "Upgrade to Tycoon for full backlink analysis"
|
|
}
|
|
|
|
return result
|
|
|