Add university scraper system with backend, frontend, and configs
- Add src/university_scraper module with scraper, analyzer, and CLI - Add backend FastAPI service with API endpoints and database models - Add frontend React app with university management pages - Add configs for Harvard, Manchester, and UCL universities - Add artifacts with various scraper implementations - Add Docker compose configuration for deployment - Update .gitignore to exclude generated files 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
37
backend/app/config.py
Normal file
37
backend/app/config.py
Normal file
@ -0,0 +1,37 @@
|
||||
"""应用配置"""
|
||||
|
||||
from pydantic_settings import BaseSettings
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""应用设置"""
|
||||
|
||||
# 应用配置
|
||||
APP_NAME: str = "University Scraper API"
|
||||
APP_VERSION: str = "1.0.0"
|
||||
DEBUG: bool = True
|
||||
|
||||
# 数据库配置
|
||||
DATABASE_URL: str = "sqlite:///./university_scraper.db" # 开发环境使用SQLite
|
||||
# 生产环境使用: postgresql://user:password@localhost/university_scraper
|
||||
|
||||
# Redis配置 (用于Celery任务队列)
|
||||
REDIS_URL: str = "redis://localhost:6379/0"
|
||||
|
||||
# CORS配置
|
||||
CORS_ORIGINS: list = ["http://localhost:3000", "http://127.0.0.1:3000"]
|
||||
|
||||
# Agent配置 (用于自动生成脚本)
|
||||
OPENROUTER_API_KEY: Optional[str] = None
|
||||
|
||||
# 文件存储路径
|
||||
SCRIPTS_DIR: str = "./scripts"
|
||||
RESULTS_DIR: str = "./results"
|
||||
|
||||
class Config:
|
||||
env_file = ".env"
|
||||
case_sensitive = True
|
||||
|
||||
|
||||
settings = Settings()
|
||||
Reference in New Issue
Block a user