Add university scraper system with backend, frontend, and configs
- Add src/university_scraper module with scraper, analyzer, and CLI - Add backend FastAPI service with API endpoints and database models - Add frontend React app with university management pages - Add configs for Harvard, Manchester, and UCL universities - Add artifacts with various scraper implementations - Add Docker compose configuration for deployment - Update .gitignore to exclude generated files 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
52
backend/app/schemas/job.py
Normal file
52
backend/app/schemas/job.py
Normal file
@ -0,0 +1,52 @@
|
||||
"""爬取任务相关的Pydantic模型"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional, List
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class JobCreate(BaseModel):
|
||||
"""创建任务请求"""
|
||||
university_id: int
|
||||
script_id: Optional[int] = None
|
||||
|
||||
|
||||
class JobResponse(BaseModel):
|
||||
"""任务响应"""
|
||||
id: int
|
||||
university_id: int
|
||||
script_id: Optional[int] = None
|
||||
status: str
|
||||
progress: int
|
||||
current_step: Optional[str] = None
|
||||
started_at: Optional[datetime] = None
|
||||
completed_at: Optional[datetime] = None
|
||||
error_message: Optional[str] = None
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class JobStatusResponse(BaseModel):
|
||||
"""任务状态响应"""
|
||||
id: int
|
||||
status: str
|
||||
progress: int
|
||||
current_step: Optional[str] = None
|
||||
logs: List["LogResponse"] = []
|
||||
|
||||
|
||||
class LogResponse(BaseModel):
|
||||
"""日志响应"""
|
||||
id: int
|
||||
level: str
|
||||
message: str
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# 解决循环引用
|
||||
JobStatusResponse.model_rebuild()
|
||||
Reference in New Issue
Block a user