Explorar el Código

完善进程管理,支持异步

mrh hace 9 meses
padre
commit
96e9da080a

+ 1 - 0
ui/backend/.gitignore

@@ -0,0 +1 @@
+t.py

+ 8 - 2
ui/backend/config.yaml

@@ -3,6 +3,12 @@ mimo_exe: g:\code\upwork\zhang_crawl_bio\download\proxy_pool\mihomo-windows-amd6
 redis_exe: g:\code\upwork\zhang_crawl_bio\download\Redis-x64-5.0.14.1\redis-server.exe
 redis_port: null
 sub:
+  file: g:\code\upwork\zhang_crawl_bio\download\proxy_pool\6137e542.yaml
+  proxies:
+    9660:
+      file_path: g:\code\upwork\zhang_crawl_bio\download\proxy_pool\temp\9660.yaml
+      name: null
+      port: 9660
   start_port: 9660
-  url: null
-  file: null
+  temp_dir: g:\code\upwork\zhang_crawl_bio\download\proxy_pool\temp
+  url: https://www.yfjc.xyz/api/v1/client/subscribe?token=b74f2207492053926f7511a8e474048f

+ 8 - 2
ui/backend/main.py

@@ -4,13 +4,19 @@ import sys
 sys.path.append(str(Path(__file__).parent))
 from fastapi import FastAPI
 from routers.proxy import router
-
+from fastapi.middleware.cors import CORSMiddleware
 # 创建 FastAPI 应用实例
 app = FastAPI(
     description="",
     version="1.0.0",
 )
-
+app.add_middleware(
+    CORSMiddleware,
+    allow_origins=["*"],  # 允许所有域名访问
+    allow_credentials=True,
+    allow_methods=["*"],  # 允许所有方法(GET, POST, PUT, DELETE 等)
+    allow_headers=["*"],  # 允许所有头部
+)
 # 将 gpt_router 挂载到应用中
 app.include_router(router, prefix="/api/proxy", tags=["chat"])
 

+ 81 - 15
ui/backend/routers/proxy.py

@@ -4,9 +4,11 @@ from typing import List
 from fastapi import APIRouter
 from pydantic import BaseModel
 from utils.win import get_proxy_settings
-from utils.mihomo import get_sub,update_config,save_yaml_dump
-from utils.config import Config, config
+from utils.mihomo import get_sub,update_config,save_yaml_dump,find_free_port
+from utils.config import Config, config, PROXY_POLL_DIR, Sub, Proxy
 from utils.process_mgr import process_manager
+from utils.logu import get_logger,logger
+from src.services.subscription_manager import SubscriptionManager
 router = APIRouter()
 
 class SysProxyResponse(BaseModel):
@@ -21,6 +23,37 @@ def read_root():
     proxy_enable, proxy_server = get_proxy_settings()
     return SysProxyResponse(sys_open=proxy_enable, proxy_server=proxy_server)
 
+class ProxyUpdate(BaseModel):
+    id: int
+    name: str
+    port: int
+
+@router.get("/proxies")
+def get_proxies():
+    global config
+    return config.sub.proxies or []
+
+async def stop_proxy(proxy: Proxy):
+    global config
+    for proxy in config.sub.proxies:
+        if proxy.file_path and Path(proxy.file_path).exists():
+            stop_mimo()
+            config.sub.proxies.remove(proxy)
+            config.save()
+
+@router.post("/proxies")
+def create_proxy(request):
+    global config
+    logger.info(f"request: {request}")
+    new_proxy = Proxy(port=find_free_port(config.sub.start_port, config.sub.start_port + 10000))
+    # 如果存在相同的端口,则删除端口
+    for proxy in config.sub.proxies:
+        if proxy.port == new_proxy.port:
+            config.sub.proxies.remove(proxy)
+            break
+    config.sub.proxies.append(new_proxy)
+    config.save()
+    return {"err": 0, "msg": new_proxy}
 
 def start_mimo(exe_path: str, file_path: str):
     global process_manager
@@ -31,19 +64,52 @@ def start_mimo(exe_path: str, file_path: str):
 def stop_mimo():
     global process_manager
     process_manager.stop_process("mimo")
-async def post_sub_url(config: Config, url: str):
-    save_path = Path(config.sub.temp_dir).parent / f"{hashlib.md5(url.encode()).hexdigest()[:8]}.yaml"
-    config.sub.file = await get_sub(url, save_path)
-    config.sub.url = url
-@router.post("/sub_url")
-def post_sub_url(request: SubUrlPost):
-    global config
-    config.sub.url = request.sub_url
+
+
+@router.get("/subs")
+async def get_subscriptions():
+    manager = SubscriptionManager()
+    return {"err": 0, "data": [sub.dict() for sub in manager.list_subscriptions()]}
+
+@router.post("/subs")
+async def create_subscription(request: SubUrlPost):
     try:
-        if config.mimo_exe:
-            get_sub(config.sub.url, config.sub.file)
-            config.save()
-            return {"err": 0, "msg": config.sub}
+        manager = SubscriptionManager()
+        subscription = await manager.download_subscription(request.sub_url)
+        return {"err": 0, "data": {"id": subscription.id}}
+    except Exception as e:
+        return {"err": 1, "msg": str(e)}
+
+@router.post("/subs/{sub_id}/start")
+async def start_subscription(sub_id: str):
+    try:
+        manager = SubscriptionManager()
+        config_path = await manager.create_custom_config(sub_id)
+        proxy_manager.start_proxy(config.mimo_exe, config_path)
+        return {"err": 0}
+    except Exception as e:
+        return {"err": 1, "msg": str(e)}
+
+@router.post("/subs/{sub_id}/stop")
+async def stop_subscription(sub_id: str):
+    try:
+        manager = SubscriptionManager()
+        config_path = await manager.create_custom_config(sub_id)
+        proxy_manager.stop_proxy(config_path)
+        return {"err": 0}
+    except Exception as e:
+        return {"err": 1, "msg": str(e)}
+
+@router.post("/subs/{sub_id}/custom")
+async def save_custom_config(sub_id: str, config: dict):
+    try:
+        manager = SubscriptionManager()
+        subscription = manager.get_subscription(sub_id)
+        if not subscription:
+            return {"err": 1, "msg": "Subscription not found"}
+            
+        config_path = await manager.create_custom_config(sub_id, port=config.get("port"))
+        return {"err": 0, "data": {"path": str(config_path)}}
     except Exception as e:
         return {"err": 1, "msg": str(e)}
 
@@ -59,4 +125,4 @@ def main():
 
 
 if __name__ == "__main__":
-    main()
+    main()

+ 23 - 0
ui/backend/src/services/models.py

@@ -0,0 +1,23 @@
+from pydantic import BaseModel
+from datetime import datetime
+from typing import Optional, Dict, Any
+from pathlib import Path
+
+class Subscription(BaseModel):
+    id: str
+    url: str
+    file_path: Path
+    config: Dict[str, Any]
+    status: str = "stopped"
+    created_at: datetime
+    updated_at: datetime
+
+class ProxyConfig(BaseModel):
+    mixed_port: int
+    external_controller: str
+    proxies: list
+    meta: Optional[Dict] = None
+
+class CustomConfigCreate(BaseModel):
+    base_sub_id: str
+    config_overrides: Dict[str, Any]

+ 57 - 0
ui/backend/src/services/proxy_manager.py

@@ -0,0 +1,57 @@
+from pathlib import Path
+from typing import Optional
+from utils.process_mgr import process_manager
+import yaml
+
+class ProxyManager:
+    """管理代理进程的生命周期,包括启动、停止和重启"""
+    
+    def start_proxy(self, exe_path: str, config_path: str) -> int:
+        """
+        启动代理进程
+        Args:
+            exe_path (str): 代理可执行文件路径
+            config_path (str): 配置文件路径
+        Returns:
+            int: 进程ID
+        """
+        command = [exe_path, "-f", config_path]
+        return process_manager.start_process(command, f"mimo_{Path(config_path).stem}")
+
+    def stop_proxy(self, config_path: str):
+        """
+        停止代理进程
+        Args:
+            config_path (str): 配置文件路径
+        """
+        process_manager.stop_process(f"mimo_{Path(config_path).stem}")
+
+    def restart_proxy(self, exe_path: str, config_path: str) -> int:
+        """
+        重启代理进程
+        Args:
+            exe_path (str): 代理可执行文件路径
+            config_path (str): 配置文件路径
+        Returns:
+            int: 新的进程ID
+        """
+        self.stop_proxy(config_path)
+        return self.start_proxy(exe_path, config_path)
+
+    def get_management_url(self, config_path: Path) -> Optional[str]:
+        """
+        获取代理管理页面URL
+        Args:
+            config_path (Path): 配置文件路径
+        Returns:
+            Optional[str]: 管理页面URL,如果配置无效则返回None
+        """
+        with open(config_path, "r", encoding="utf-8") as f:
+            config = yaml.safe_load(f)
+        
+        if "external-controller" not in config:
+            return None
+            
+        host, port = config["external-controller"].split(":")
+        return f"https://yacd.metacubex.one/?hostname={host}&port={port}&secret=#/proxies"
+

+ 71 - 0
ui/backend/src/services/subscription_manager.py

@@ -0,0 +1,71 @@
+import hashlib
+from pathlib import Path
+from typing import Dict, Optional
+import httpx
+import yaml
+from datetime import datetime
+from src.services.proxy_manager import ProxyManager
+from utils.config import PROXY_POLL_DIR,config,Sub,Proxy,Config
+from utils.mihomo import get_sub,update_config,save_yaml_dump,find_free_port
+
+class SubscriptionManager:
+    """管理订阅的生命周期,包括下载、更新、启动、停止等操作"""
+    
+    def __init__(self, config: Config=config):
+        self.sub = config.sub
+        self.config = config
+        self.proxies_mgr = ProxyManager()
+    def save_config(self):
+        """保存配置到文件"""
+        return self.config.save()
+
+    async def download_subscription(self, url: str=None) -> Config:
+        """
+        从URL下载订阅配置
+        Args:
+            url (str): 订阅URL
+        Returns:
+            Config: 更新后的配置对象        """
+        if not url and self.sub.url:
+            url = self.sub.url
+        sub_id = hashlib.md5(url.encode()).hexdigest()[:8]
+        save_path = await get_sub(url, save_path=PROXY_POLL_DIR / f"{sub_id}.yaml")
+        self.sub.file = str(save_path)
+        return self.save_config()
+    def delete_subscription(self):
+        """
+        删除订阅配置
+        Args:
+            sub_id (str): 订阅ID
+        """
+        Path(self.sub.file).unlink(missing_ok=True)
+        
+    async def create_custom_config(self, port: int = None) -> Path:
+        """
+        基于源订阅文件,创建自定义配置文件
+        """
+        config = {}
+        temp_path = Path(self.sub.temp_dir) / f"{port}.yaml"
+        config['mixed-port'] = port
+        config['external-controller'] = f'127.0.0.1:{port + 1}'
+        config['bind-address'] = '127.0.0.1'
+        res = update_config(Path(self.sub.file), config, Path(temp_path))
+        self.sub.proxies.update({port: Proxy(file_path=str(temp_path), port=port)})
+        return self.save_config()
+    
+    def remove_custom_config(self, port: int):
+        """
+        删除自定义配置文件
+        """
+        if port in self.sub.proxies:
+            del self.sub.proxies[port]
+        return self.save_config()
+    
+    def start_proxy(self, port: int):
+        """
+        启动代理进程
+        """
+        proxy:Proxy = self.sub.proxies.get(port)
+        if not proxy:
+            return False
+        return self.proxies_mgr.start_proxy(self.config.mimo_exe, proxy.file_path)

+ 32 - 0
ui/backend/tests/mytests/t_process_mgr.py

@@ -0,0 +1,32 @@
+import asyncio
+from pathlib import Path
+import sys
+import time
+sys.path.append(str(Path(r'G:\code\upwork\zhang_crawl_bio\ui\backend')))
+from utils.process_mgr import ProcessManager
+from utils.logu import get_logger
+logger = get_logger('mytests', file=True)
+async def main():
+    # 单例实例
+    process_manager = ProcessManager()
+    bin_path = r'G:\code\upwork\zhang_crawl_bio\download\proxy_pool\mihomo-windows-amd64-go120.exe'
+    file1 = r'G:\code\upwork\zhang_crawl_bio\download\proxy_pool\yfjc_9361.yaml'
+    file2 = r'G:\code\upwork\zhang_crawl_bio\download\proxy_pool\yfjc_9363.yaml'
+    file3 = r'G:\code\upwork\zhang_crawl_bio\download\proxy_pool\yfjc_9365.yaml'
+    file4 = r'G:\code\upwork\zhang_crawl_bio\download\proxy_pool\yfjc_9367.yaml'
+    file5 = r'G:\code\upwork\zhang_crawl_bio\download\proxy_pool\yfjc_9369.yaml'
+    list_files = [file1,file2,file3,file4,file5]
+    for file in list_files:
+        pid = await process_manager.start_process(f"mimo_{Path(file).stem}", [bin_path, '-f' ,file], )
+        logger.info(f"start {pid}, file {file}")
+
+    logger.info(f"processes {process_manager.processes}")
+    try:
+        while True:
+            await asyncio.sleep(1)
+    except KeyboardInterrupt:
+        logger.info("KeyboardInterrupt")
+        success = await process_manager.cleanup()
+
+if __name__ == "__main__":
+    asyncio.run(main())

+ 8 - 2
ui/backend/utils/config.py

@@ -1,19 +1,25 @@
 import yaml
 from pathlib import Path
 from pydantic import BaseModel
-from typing import List, Dict, Union,Optional
+from typing import List, Dict, Union,Optional,Any
 APP_PATH = Path(__file__).parent.parent
 CONFIG_PATH = APP_PATH / "config.yaml"
 REPO_BASE_DIR = Path(APP_PATH.parent.parent)
 DOWNLOAD_DIR = REPO_BASE_DIR / "download"
 PROXY_POLL_DIR = DOWNLOAD_DIR / "proxy_pool"
 
+class Proxy(BaseModel):
+    name: Optional[str] = None
+    port: Optional[int] = None
+    file_path: Optional[str] = None
+
 
 class Sub(BaseModel):
     url: Optional[str] = None
     start_port: Optional[int] = 9660  # Changed to int
     file: Optional[str] = None
-    temp_dir: Optional[str] = str(PROXY_POLL_DIR / "temp")    
+    temp_dir: Optional[str] = str(PROXY_POLL_DIR / "temp")   
+    proxies: Optional[Dict[str, Any]] = {}
 
 class Config(BaseModel):
     sub: Optional[Sub] = Sub()

+ 2 - 3
ui/backend/utils/logu.py

@@ -5,10 +5,9 @@ from typing import Dict
 import loguru
 from loguru._logger import Logger
 from pathlib import Path
-sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
-
 LOG_LEVEL='info'
-LOG_DIR=Path(r'.').parent.parent / 'output' /'logs'
+LOG_DIR=Path(__file__).parent.parent / 'output' /'logs'
+print(LOG_DIR.absolute())
 # python_xx/site-packages/loguru/_handler.py  _serialize_record
 FORMAT = '<green>{time:YYYY-MM-DD HH:mm:ss}</green> | <level>{level: <8}</level> | <cyan>{file}:{line}</cyan> :<cyan>{function}</cyan> - {message}'
 loguru.logger.remove()

+ 23 - 0
ui/backend/utils/mihomo.py

@@ -12,6 +12,29 @@ import yaml
 from pathlib import Path
 from typing import Optional, Dict, Any
 from fastapi import HTTPException
+def get_sub_file_info(file_path: str):
+    with open(file_path, "r",encoding='utf-8') as f:
+        sub_yaml = yaml.safe_load(f)
+    groups = sub_yaml.get("proxy-groups", [])
+    if not groups:
+        raise ValueError("subscription file is not valid")
+    name = groups[0].get("name", "")
+    if not name:
+        raise ValueError("subscription file is not valid")
+    proxies = sub_yaml.get("proxies", [])
+    if not proxies:
+        raise ValueError("subscription file is not valid")
+    fileter_proxies = []
+    fileter_proxies = []
+    keywords = ['流量', '套餐', '剩余', '测试']
+
+    for proxy in proxies:
+        if not any(keyword in proxy.get("name", "") for keyword in keywords):
+            fileter_proxies.append(proxy)
+    return name, groups,proxies
+
+
+
 def save_yaml_dump(config: dict, save_as: Path) -> Path:
     """保存配置文件"""
     save_as.parent.mkdir(parents=True, exist_ok=True)

+ 214 - 52
ui/backend/utils/process_mgr.py

@@ -1,63 +1,225 @@
-import multiprocessing
-import subprocess
+import asyncio
 import os
 import signal
-from typing import List, Dict
-from utils.logu import logger, get_logger
+import subprocess
+import sys
+import time
+import atexit
+import platform
+from pathlib import Path
+from typing import Dict, Optional
+from utils.logu import get_logger,LOG_DIR
+from collections.abc import MutableMapping
+
+# Windows特定导入
+if platform.system() == 'Windows':
+    import win32api
+    import win32job
+    import win32con
+
+logger = get_logger('process_mgr')
 
 class ProcessManager:
     def __init__(self):
-        self.processes: Dict[str, multiprocessing.Process] = {}
-
-    def start_process(self, command: str, process_name: str):
-        """启动一个子进程并记录它"""
-        if process_name in self.processes and self.processes[process_name].is_alive():
-            logger.info(f"Process {process_name} is already running.")
-            return
-
-        parent_conn, child_conn = multiprocessing.Pipe()
-        p = multiprocessing.Process(target=self._run_command, args=(command, child_conn))
-        p.start()
-        pid = parent_conn.recv()  # 获取子进程的PID
-        logger.info(f"Started process {process_name} with PID: {pid}")
-        self.processes[process_name] = p
-        return pid
-    def stop_process(self, process_name: str):
-        """停止指定名称的子进程"""
-        if process_name in self.processes and self.processes[process_name].is_alive():
-            p = self.processes[process_name]
+        self.processes: MutableMapping[str, dict] = {}
+        self.job_object = None
+        self.lock = asyncio.Lock()
+        
+        if platform.system() == 'Windows':
+            self._create_windows_job()
+
+        self._register_signal_handlers()
+        atexit.register(self._sync_cleanup)
+
+    def _create_windows_job(self):
+        """创建Windows作业对象用于进程生命周期管理"""
+        try:
+            self.job_object = win32job.CreateJobObject(None, "")
+            info = win32job.QueryInformationJobObject(
+                self.job_object,
+                win32job.JobObjectExtendedLimitInformation
+            )
+            info['BasicLimitInformation']['LimitFlags'] = (
+                win32job.JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE |
+                win32job.JOB_OBJECT_LIMIT_SILENT_BREAKAWAY_OK
+            )
+            win32job.SetInformationJobObject(
+                self.job_object,
+                win32job.JobObjectExtendedLimitInformation,
+                info
+            )
+        except Exception as e:
+            logger.error(f"Windows job object creation failed: {e}")
+
+    def _register_signal_handlers(self):
+        """跨平台信号处理注册"""
+        if platform.system() == 'Windows':
+            self._setup_windows_ctrl_handler()
+        else:
+            self._setup_unix_signal_handlers()
+    def _setup_windows_ctrl_handler(self):
+        """Windows控制台事件处理"""
+        try:
+            import win32api
+            win32api.SetConsoleCtrlHandler(self._windows_ctrl_handler, True)
+        except ImportError:
+            logger.warning("pywin32 not installed, Windows signal handling disabled")
+        except Exception as e:
+            logger.error(f"Windows control handler setup failed: {str(e)}")
+
+    def _windows_ctrl_handler(self, dwCtrlType):
+        """Windows控制台事件回调"""
+        if dwCtrlType in {win32con.CTRL_C_EVENT, win32con.CTRL_BREAK_EVENT}:
+            asyncio.create_task(self.cleanup())
+            return True  # 表示已处理该事件
+        return False  # 继续传递事件
+
+    def _setup_unix_signal_handlers(self):
+        """Unix信号处理配置"""
+        loop = asyncio.get_running_loop()
+        for sig in (signal.SIGTERM, signal.SIGINT):
+            try:
+                loop.add_signal_handler(sig, self._unix_signal_handler, sig)
+            except NotImplementedError:
+                logger.warning(f"Signal {sig} not supported on this platform")
+
+    def _unix_signal_handler(self, signum):
+        """Unix信号处理回调"""
+        logger.info(f"Received signal {signum.name}")
+        asyncio.create_task(self.cleanup())
+    def _signal_handler(self):
+        """信号处理入口"""
+        asyncio.create_task(self.cleanup())
+
+    async def start_process(
+        self,
+        name: str,
+        command: list,
+        log_dir: Path = LOG_DIR / "process_mgr"
+    ) -> Optional[int]:
+        """启动并管理后台进程"""
+        async with self.lock:
+            if name in self.processes:
+                logger.warning(f"Process {name} already exists")
+                return None
+
+            log_dir.mkdir(parents=True, exist_ok=True)
+            log_file = log_dir / f"{name}.log"
+
             try:
-                os.kill(p.pid, signal.CTRL_BREAK_EVENT)  # 向Windows上的子进程发送CTRL_BREAK事件
+                # 使用二进制追加模式打开日志文件
+                log_fd = open(log_file, "ab")
+                
+                # 创建子进程
+                process = await asyncio.create_subprocess_exec(
+                    *command,
+                    stdout=log_fd,
+                    stderr=subprocess.STDOUT,
+                    stdin=subprocess.DEVNULL,
+                    start_new_session=True  # 重要:创建新会话/进程组
+                )
+
+                # Windows作业对象绑定
+                if platform.system() == 'Windows' and self.job_object:
+                    self._bind_to_windows_job(process.pid)
+
+                self.processes[name] = {
+                    "process": process,
+                    "log_file": log_file,
+                    "start_time": time.time(),
+                    "log_fd": log_fd
+                }
+
+                logger.info(f"Started process {name} (PID: {process.pid})")
+                return process.pid
+
             except Exception as e:
-                logger.info(f"Failed to send CTRL_BREAK_EVENT to process {p.pid}: {e}")
-            p.terminate()
-            p.join()
-            del self.processes[process_name]
-            logger.info(f"Stopped process {process_name}")
-
-    def stop_all_processes(self):
-        """停止所有子进程"""
-        for process_name in list(self.processes.keys()):
-            self.stop_process(process_name)
-
-    @staticmethod
-    def _run_command(command, conn):
-        """在子进程中运行命令并将PID通过管道返回给主进程"""
+                logger.error(f"Failed to start {name}: {str(e)}")
+                if 'log_fd' in locals():
+                    log_fd.close()
+                return None
+
+    def _bind_to_windows_job(self, pid: int):
+        """将进程绑定到Windows作业对象"""
         try:
-            process = subprocess.Popen(
-                command,
-                stdout=subprocess.PIPE,
-                stderr=subprocess.PIPE,
-                creationflags=subprocess.CREATE_NEW_PROCESS_GROUP  # 对于Windows有用
+            h_process = win32api.OpenProcess(
+                win32con.PROCESS_ALL_ACCESS, False, pid
             )
-            # 将子进程的PID发送给父进程
-            conn.send(process.pid)
-            conn.close()  # 关闭管道,避免阻塞
-            
-            # 持续运行,不调用 process.communicate()
-            # process.wait()  # 等待子进程结束(如果需要)
+            win32job.AssignProcessToJobObject(self.job_object, h_process)
+            win32api.CloseHandle(h_process)
         except Exception as e:
-            conn.send((None, str(e)))
-            conn.close()
+            logger.error(f"Windows job assignment failed: {str(e)}")
+
+    async def stop_process(self, name: str) -> bool:
+        """停止指定进程"""
+        async with self.lock:
+            if name not in self.processes:
+                return False
+
+            proc_info = self.processes[name]
+            process = proc_info["process"]
+            log_fd = proc_info["log_fd"]
+
+            try:
+                # 进程已自然退出
+                if process.returncode is not None:
+                    del self.processes[name]
+                    log_fd.close()
+                    return True
+
+                # 跨平台终止逻辑
+                if platform.system() == 'Windows':
+                    subprocess.run(
+                        ["taskkill", "/F", "/T", "/PID", str(process.pid)],
+                        check=True,
+                        stdout=subprocess.DEVNULL,
+                        stderr=subprocess.DEVNULL
+                    )
+                else:
+                    # 发送信号到整个进程组
+                    os.killpg(os.getpgid(process.pid), signal.SIGTERM)
+
+                # 等待进程终止
+                await process.wait()
+                logger.info(f"Stopped process {name} (PID: {process.pid})")
+                return True
+
+            except subprocess.CalledProcessError:
+                logger.warning(f"Force terminating {name} (PID: {process.pid})")
+                process.terminate()
+                await process.wait()
+                return True
+            except Exception as e:
+                logger.error(f"Error stopping {name}: {str(e)}")
+                return False
+            finally:
+                log_fd.close()
+                del self.processes[name]
+
+    async def cleanup(self):
+        """异步清理所有资源"""
+        async with self.lock:
+            logger.info("Performing async cleanup...")
+            for name in list(self.processes.keys()):
+                await self.stop_process(name)
+            
+            # 清理Windows作业对象
+            if platform.system() == 'Windows' and self.job_object:
+                win32api.CloseHandle(self.job_object)
+                self.job_object = None
+
+    def _sync_cleanup(self):
+        """同步清理用于atexit"""
+        if platform.system() == 'Windows':
+            asyncio.run(self.cleanup())
+        else:
+            try:
+                loop = asyncio.get_event_loop()
+                if loop.is_running():
+                    loop.create_task(self.cleanup())
+                else:
+                    loop.run_until_complete(self.cleanup())
+            except RuntimeError:
+                pass
+
 
-process_manager = ProcessManager()