|
|
@@ -33,7 +33,7 @@ class SearchTaskInput(BaseModel):
|
|
|
|
|
|
class CrawlTaskParams(BaseModel):
|
|
|
overwrite: Optional[bool] = False
|
|
|
- proxy_pool: Optional[List[str]] = Field(default_factory=list)
|
|
|
+ proxy_pool_url: Optional[List[str]] = Field(default_factory=list)
|
|
|
|
|
|
class ConvertTaskParams(BaseModel):
|
|
|
result_ids: List[str] = Field(..., min_length=1)
|
|
|
@@ -55,8 +55,9 @@ class WorkerModel(BaseModel):
|
|
|
return v
|
|
|
|
|
|
class CeleryWorker:
|
|
|
- def __init__(self, python_exe: str=sys.executable):
|
|
|
+ def __init__(self, python_exe: str=sys.executable, config = config):
|
|
|
self.python_exe = python_exe
|
|
|
+ self.config = config
|
|
|
self.workers_model: Dict[str, WorkerModel] = {}
|
|
|
self.redis_url = f"redis://{config.redis.host}:{config.redis.port}/{config.redis.db}"
|
|
|
self.redis_client = redis.Redis(host=config.redis.host, port=config.redis.port, db=config.redis.db)
|
|
|
@@ -254,8 +255,12 @@ class CeleryWorker:
|
|
|
# 根据worker类型验证任务数据
|
|
|
if worker_name == 'search':
|
|
|
task_model = SearchTaskInput(**data)
|
|
|
+ if select_proxy == 'pool':
|
|
|
+ task_model.config.proxy_pool_url = f"http://{self.config.backend.host}:{self.config.backend.port}/api/proxy/proxies-pool"
|
|
|
elif worker_name == 'crawl':
|
|
|
task_model = CrawlTaskParams(**data)
|
|
|
+ if select_proxy == 'pool':
|
|
|
+ task_model.proxy_pool_url = f"http://{self.config.backend.host}:{self.config.backend.port}/api/proxy/proxies-pool"
|
|
|
elif worker_name == 'convert':
|
|
|
task_model = ConvertTaskParams(**data)
|
|
|
|