Add university scraper system with backend, frontend, and configs
- Add src/university_scraper module with scraper, analyzer, and CLI - Add backend FastAPI service with API endpoints and database models - Add frontend React app with university management pages - Add configs for Harvard, Manchester, and UCL universities - Add artifacts with various scraper implementations - Add Docker compose configuration for deployment - Update .gitignore to exclude generated files 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
35
backend/app/database.py
Normal file
35
backend/app/database.py
Normal file
@ -0,0 +1,35 @@
|
||||
"""数据库连接和会话管理"""
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from .config import settings
|
||||
|
||||
# 创建数据库引擎
|
||||
engine = create_engine(
|
||||
settings.DATABASE_URL,
|
||||
connect_args={"check_same_thread": False} if "sqlite" in settings.DATABASE_URL else {},
|
||||
echo=settings.DEBUG
|
||||
)
|
||||
|
||||
# 创建会话工厂
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
# 声明基类
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
def get_db():
|
||||
"""获取数据库会话 (依赖注入)"""
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
def init_db():
|
||||
"""初始化数据库 (创建所有表)"""
|
||||
from .models import university, script, job, result # noqa
|
||||
Base.metadata.create_all(bind=engine)
|
||||
Reference in New Issue
Block a user