Files
yangxiaoyu-crypto 426cf4d2cd Add university scraper system with backend, frontend, and configs
- Add src/university_scraper module with scraper, analyzer, and CLI
- Add backend FastAPI service with API endpoints and database models
- Add frontend React app with university management pages
- Add configs for Harvard, Manchester, and UCL universities
- Add artifacts with various scraper implementations
- Add Docker compose configuration for deployment
- Update .gitignore to exclude generated files

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2025-12-22 15:25:08 +08:00

53 lines
1.0 KiB
Python

"""爬取任务相关的Pydantic模型"""
from datetime import datetime
from typing import Optional, List
from pydantic import BaseModel
class JobCreate(BaseModel):
"""创建任务请求"""
university_id: int
script_id: Optional[int] = None
class JobResponse(BaseModel):
"""任务响应"""
id: int
university_id: int
script_id: Optional[int] = None
status: str
progress: int
current_step: Optional[str] = None
started_at: Optional[datetime] = None
completed_at: Optional[datetime] = None
error_message: Optional[str] = None
created_at: datetime
class Config:
from_attributes = True
class JobStatusResponse(BaseModel):
"""任务状态响应"""
id: int
status: str
progress: int
current_step: Optional[str] = None
logs: List["LogResponse"] = []
class LogResponse(BaseModel):
"""日志响应"""
id: int
level: str
message: str
created_at: datetime
class Config:
from_attributes = True
# 解决循环引用
JobStatusResponse.model_rebuild()