- Add technical documentation (技术方案.md) with system architecture and design details - Create FastAPI application structure with modular organization (api, core, models, services, utils) - Implement job data crawler service with incremental collection from third-party API - Add Kafka service integration with Docker Compose configuration for message queue - Create data models for job listings, progress tracking, and API responses - Implement REST API endpoints for data consumption (/consume, /status) and task management - Add progress persistence layer using SQLite for tracking collection offsets - Implement date filtering logic to extract data published within 7 days - Create API client service for third-party data source integration - Add configuration management with environment-based settings - Include Docker support with Dockerfile and docker-compose.yml for containerized deployment - Add logging configuration and utility functions for date parsing - Include requirements.txt with all Python dependencies and README documentation
36 lines
862 B
Python
36 lines
862 B
Python
"""FastAPI应用入口"""
|
||
import logging
|
||
from contextlib import asynccontextmanager
|
||
from fastapi import FastAPI
|
||
from app.core.config import settings
|
||
from app.core.logging import setup_logging
|
||
from app.api import router
|
||
from app.services import kafka_service
|
||
|
||
setup_logging()
|
||
logger = logging.getLogger(__name__)
|
||
|
||
|
||
@asynccontextmanager
|
||
async def lifespan(app: FastAPI):
|
||
"""应用生命周期管理"""
|
||
logger.info("服务启动中...")
|
||
yield
|
||
logger.info("服务关闭中...")
|
||
kafka_service.close()
|
||
|
||
|
||
app = FastAPI(
|
||
title="招聘数据采集服务",
|
||
description="从八爪鱼API采集招聘数据,通过Kafka提供消费接口",
|
||
version=settings.app.version,
|
||
lifespan=lifespan
|
||
)
|
||
|
||
app.include_router(router)
|
||
|
||
|
||
if __name__ == "__main__":
|
||
import uvicorn
|
||
uvicorn.run("app.main:app", host="0.0.0.0", port=8000, reload=True)
|