| 12345678910111213141516171819202122232425 |
- from mylib.logu import logger
- from worker.celery.app import app as celery_app
- from worker.celery.crawl_client import submit_page_crawl_tasks
- from worker.celery.search_client import get_uncompleted_keywords,submit_tasks
- # 提交任务到指定的队列
- def main():
- # 提交到 search_queue
- keywords = ['123']
- # browser_config = {"proxy_pool": ["http://127.0.0.1:7890"]}
- search_result = celery_app.send_task('search_worker.add', kwargs={"keywords": keywords})
- return search_result
- print(f"Search task submitted. Task ID: {search_result.id}")
- # 提交到 crawl_queue
- crawl_result = crawl_task.apply_async(args=["example crawl data"], queue='crawl_queue')
- print(f"Crawl task submitted. Task ID: {crawl_result.id}")
- # 提交到 convert_queue
- convert_result = convert_task.apply_async(args=["example convert data"], queue='convert_queue')
- print(f"Convert task submitted. Task ID: {convert_result.id}")
- if __name__ == "__main__":
- main()
|