t_cmd.py 1.5 KB

12345678910111213141516171819202122232425262728293031323334353637
  1. import asyncio
  2. import sys
  3. import time
  4. import os
  5. from pathlib import Path
  6. sys.path.append(str(Path(r'G:\code\upwork\zhang_crawl_bio\ui\backend')))
  7. from src.services.celery_worker import process_manager,celery_worker,WorkerModel
  8. import subprocess
  9. async def main():
  10. # 检查系统 PATH
  11. system_path = os.environ.get("PATH", "")
  12. pyexe = sys.executable
  13. file = r'G:\code\upwork\zhang_crawl_bio\worker\celery\client.py'
  14. command = f"{pyexe} {file}"
  15. # 自动生成queue_name
  16. worker1 = WorkerModel(name="worker1", cmd=["python", "app.py"])
  17. print(worker1.queue_name) # 输出: worker1_queue
  18. # 显式指定queue_name
  19. worker2 = WorkerModel(name="worker2", queue_name="custom_queue", cmd=["python", "app.py"])
  20. print(worker2.queue_name) # 输出: custom_queue
  21. return
  22. # startupinfo = subprocess.STARTUPINFO()
  23. # startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
  24. # process = subprocess.Popen(['cmd', '/k', command ], shell=True,startupinfo=startupinfo, creationflags=subprocess.CREATE_NEW_PROCESS_GROUP)
  25. # process = subprocess.Popen(['start','cmd', '/k', command ], shell=True)
  26. # res = process.pid
  27. res = await celery_worker.start_worker('search')
  28. res = await celery_worker.start_worker('crawl')
  29. res = await celery_worker.start_worker('convert')
  30. print(res)
  31. input('Press Enter to exit...')
  32. await celery_worker.start_worker('crawl')
  33. # `/k` 表示执行后保持窗口,`/c` 则表示执行后关闭
  34. if __name__ == "__main__":
  35. asyncio.run(main())