| 12345678910111213141516171819202122232425262728293031323334353637 |
- import asyncio
- import sys
- import time
- import os
- from pathlib import Path
- sys.path.append(str(Path(r'G:\code\upwork\zhang_crawl_bio\ui\backend')))
- from src.services.celery_worker import process_manager,celery_worker,WorkerModel
- import subprocess
- async def main():
- # 检查系统 PATH
- system_path = os.environ.get("PATH", "")
- pyexe = sys.executable
- file = r'G:\code\upwork\zhang_crawl_bio\worker\celery\client.py'
- command = f"{pyexe} {file}"
- # 自动生成queue_name
- worker1 = WorkerModel(name="worker1", cmd=["python", "app.py"])
- print(worker1.queue_name) # 输出: worker1_queue
- # 显式指定queue_name
- worker2 = WorkerModel(name="worker2", queue_name="custom_queue", cmd=["python", "app.py"])
- print(worker2.queue_name) # 输出: custom_queue
- return
- # startupinfo = subprocess.STARTUPINFO()
- # startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
- # process = subprocess.Popen(['cmd', '/k', command ], shell=True,startupinfo=startupinfo, creationflags=subprocess.CREATE_NEW_PROCESS_GROUP)
- # process = subprocess.Popen(['start','cmd', '/k', command ], shell=True)
- # res = process.pid
- res = await celery_worker.start_worker('search')
- res = await celery_worker.start_worker('crawl')
- res = await celery_worker.start_worker('convert')
- print(res)
- input('Press Enter to exit...')
- await celery_worker.start_worker('crawl')
- # `/k` 表示执行后保持窗口,`/c` 则表示执行后关闭
- if __name__ == "__main__":
- asyncio.run(main())
|