- Add technical documentation (技术方案.md) with system architecture and design details - Create FastAPI application structure with modular organization (api, core, models, services, utils) - Implement job data crawler service with incremental collection from third-party API - Add Kafka service integration with Docker Compose configuration for message queue - Create data models for job listings, progress tracking, and API responses - Implement REST API endpoints for data consumption (/consume, /status) and task management - Add progress persistence layer using SQLite for tracking collection offsets - Implement date filtering logic to extract data published within 7 days - Create API client service for third-party data source integration - Add configuration management with environment-based settings - Include Docker support with Dockerfile and docker-compose.yml for containerized deployment - Add logging configuration and utility functions for date parsing - Include requirements.txt with all Python dependencies and README documentation
13 lines
432 B
Python
13 lines
432 B
Python
"""服务模块"""
|
|
from .api_client import api_client, BazhuayuClient
|
|
from .kafka_service import kafka_service, KafkaService
|
|
from .progress_store import progress_store, ProgressStore
|
|
from .crawler import crawler_manager, CrawlerManager, TaskCrawler
|
|
|
|
__all__ = [
|
|
"api_client", "BazhuayuClient",
|
|
"kafka_service", "KafkaService",
|
|
"progress_store", "ProgressStore",
|
|
"crawler_manager", "CrawlerManager", "TaskCrawler"
|
|
]
|