redis_celery_t.py 986 B

12345678910111213141516171819202122232425
  1. from mylib.logu import logger
  2. from worker.celery.app import app as celery_app
  3. from worker.celery.crawl_client import submit_page_crawl_tasks
  4. from worker.celery.search_client import get_uncompleted_keywords,submit_tasks
  5. # 提交任务到指定的队列
  6. def main():
  7. # 提交到 search_queue
  8. keywords = ['123']
  9. # browser_config = {"proxy_pool": ["http://127.0.0.1:7890"]}
  10. search_result = celery_app.send_task('search_worker.add', kwargs={"keywords": keywords})
  11. return search_result
  12. print(f"Search task submitted. Task ID: {search_result.id}")
  13. # 提交到 crawl_queue
  14. crawl_result = crawl_task.apply_async(args=["example crawl data"], queue='crawl_queue')
  15. print(f"Crawl task submitted. Task ID: {crawl_result.id}")
  16. # 提交到 convert_queue
  17. convert_result = convert_task.apply_async(args=["example convert data"], queue='convert_queue')
  18. print(f"Convert task submitted. Task ID: {convert_result.id}")
  19. if __name__ == "__main__":
  20. main()