42 lines
906 B
Docker
42 lines
906 B
Docker
# Docker环境配置文件
|
||
# 复制此文件为 config.yml 并修改账号密码
|
||
|
||
# 应用配置
|
||
app:
|
||
name: job-crawler
|
||
version: 1.0.0
|
||
debug: false
|
||
|
||
# 八爪鱼API配置
|
||
api:
|
||
base_url: https://openapi.bazhuayu.com
|
||
username: "your_username"
|
||
password: "your_password"
|
||
batch_size: 100
|
||
# 多任务配置
|
||
tasks:
|
||
- id: "00f3b445-d8ec-44e8-88b2-4b971a228b1e"
|
||
name: "青岛招聘数据"
|
||
enabled: true
|
||
- id: "task-id-2"
|
||
name: "任务2"
|
||
enabled: false
|
||
|
||
# Kafka配置(Docker内部网络)
|
||
kafka:
|
||
bootstrap_servers: kafka:29092
|
||
topic: job_data
|
||
consumer_group: job_consumer_group
|
||
|
||
# 采集配置
|
||
crawler:
|
||
interval: 300
|
||
filter_days: 7
|
||
max_workers: 5
|
||
max_expired_batches: 3 # 连续过期批次阈值(首次采集时生效)
|
||
auto_start: true # 容器启动时自动开始采集
|
||
|
||
# 数据库配置
|
||
database:
|
||
path: /app/data/crawl_progress.db
|