from pydantic import BaseModel, Field, HttpUrl, validator, root_validator from typing import List, Literal, Union, Optional from datetime import datetime from enum import Enum class VerdictEnum(str, Enum): TRUE = "True" FALSE = "False" PARTIALLY_TRUE = "Partially True" UNVERIFIED = "Unverified" class ConfidenceEnum(str, Enum): HIGH = "High" MEDIUM = "Medium" LOW = "Low" class FactCheckRequest(BaseModel): query: Optional[str] = Field( None, min_length=3, max_length=500, description="The claim or statement to be fact-checked", example="Did NASA confirm finding alien structures on Mars in 2024?", ) url: Optional[str] = Field( None, description="URL to be fact-checked", example="https://example.com/article", ) @root_validator(pre=True) def validate_at_least_one(cls, values): """Validate that at least one of query or url is provided.""" query = values.get('query') url = values.get('url') if not query and not url: raise ValueError("At least one of 'query' or 'url' must be provided") return values @validator('url') def validate_url(cls, v): """Validate URL format if provided.""" if v is not None and len(v) < 3: raise ValueError("URL must be at least 3 characters") return v class Source(BaseModel): url: str name: str = "" @validator("url") def validate_url(cls, v): if not v or len(v) < 3: raise ValueError("URL must not be empty and must be at least 3 characters") return v class UnverifiedFactCheckResponse(BaseModel): claim: str = Field( ..., min_length=10, max_length=1000, description="The exact claim being verified", ) verdict: VerdictEnum = Field(..., description="The verification verdict") confidence: ConfidenceEnum = Field(..., description="Confidence level in the verdict") sources: List[Source] = Field( default=[], description="List of sources used in verification" ) evidence: str = Field( ..., min_length=20, max_length=500, description="Concise summary of key evidence", ) explanation: str = Field( ..., min_length=50, max_length=1000, description="Detailed explanation of verification findings", ) additional_context: str = Field( ..., min_length=20, max_length=500, description="Important context about the verification", ) class FactCheckResponse(BaseModel): claim: str = Field( ..., min_length=10, max_length=1000, description="The exact claim being verified", ) verdict: VerdictEnum = Field(..., description="The verification verdict") confidence: ConfidenceEnum = Field(..., description="Confidence level in the verdict") sources: List[Source] = Field( ..., min_items=1, description="List of sources used in verification" ) evidence: str = Field( ..., min_length=20, max_length=500, description="Concise summary of key evidence", ) explanation: str = Field( ..., min_length=50, max_length=1000, description="Detailed explanation of verification findings", ) additional_context: str = Field( ..., min_length=20, max_length=500, description="Important context about the verification", ) class Config: json_schema_extra = { "example": { "claim": "NASA confirmed finding alien structures on Mars in 2024", "verdict": "False", "confidence": "High", "sources": [ { "url": "https://www.nasa.gov/mars-exploration", "name": "NASA Mars Exploration", } ], "evidence": "NASA has made no such announcement. Recent Mars rover images show natural rock formations.", "explanation": "Multiple fact-checking organizations investigated this claim. NASA's official communications and Mars mission reports from 2024 contain no mention of alien structures.", "additional_context": "Similar false claims about alien structures on Mars have circulated periodically.", } } class ErrorResponse(BaseModel): detail: str error_code: str = Field(..., example="VALIDATION_ERROR") path: str = Field(..., example="/check-facts")