mrh 11 сар өмнө
commit
2c848b1c75
9 өөрчлөгдсөн 594 нэмэгдсэн , 0 устгасан
  1. 16 0
      .env
  2. 6 0
      .gitignore
  3. 17 0
      Dockerfile
  4. 58 0
      docker-compose.yaml
  5. 0 0
      gpt.md
  6. 297 0
      readme.md
  7. 51 0
      src/code-server-plugins.py
  8. 13 0
      src/config.toml
  9. 136 0
      代码架构.md

+ 16 - 0
.env

@@ -0,0 +1,16 @@
+LLM_BASE_URL=https://aiapi.magong.site/v1
+LLM_API_KEY=sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf
+LLM_MODEL="openai/deepseek-chat"
+# CUSTOM_LLM_PROVIDER=openai
+OPENAI_API_KEY="sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf"
+OPENAI_API_BASE="https://aiapi.magong.site/v1"
+DEBUG=true
+LOG_ALL_EVENTS=true
+# SANDBOX_RUNTIME_CONTAINER_IMAGE=docker.all-hands.dev/all-hands-ai/runtime:0.16-nikolaik
+SANDBOX_RUNTIME_CONTAINER_IMAGE=all-hands-code-server:0.16-nikolaik
+SANDBOX_USER_ID=1000
+# WORKSPACE_MOUNT_PATH=/home/mrh/program/openhands/testm/workspace
+WORKSPACE_MOUNT_PATH=/home/mrh/program/openhands/OpenHands
+VSCODE_HOST=sv-v2
+VSCODE_PORT=9806
+FILE_STORE_PATH=/home/mrh/program/openhands/cache/file_store

+ 6 - 0
.gitignore

@@ -0,0 +1,6 @@
+.vscode
+OpenHands
+testm
+venv
+workspace
+cache

+ 17 - 0
Dockerfile

@@ -0,0 +1,17 @@
+# 使用现有的镜像作为基础镜像
+FROM docker.all-hands.dev/all-hands-ai/runtime:0.16-nikolaik
+
+# 设置代理服务器
+ENV http_proxy=http://192.168.2.11:1881
+ENV https_proxy=http://192.168.2.11:1881
+ENV no_proxy=localhost,127.0.0.1
+
+# 安装 code-server
+RUN curl -fsSL https://code-server.dev/install.sh | sh
+COPY ./src/code-server-plugins.py /openhands/code/openhands/runtime/plugins/vscode/__init__.py
+# 清除代理设置(可选)
+ENV http_proxy=
+ENV https_proxy=
+ENV no_proxy=
+# sudo docker build -t all-hands-code-server:0.16-nikolaik .
+# dk run --name csruntime --rm -p 9806:9806 -v /home/mrh/program/openhands/testm:/home/mrh/program/openhands/testm all-hands-code-server:0.16-nikolaik bash -c "code-server --bind-addr 0.0.0.0:9806 --auth none /home/mrh/program/openhands/testm"

+ 58 - 0
docker-compose.yaml

@@ -0,0 +1,58 @@
+version: '3.8'
+
+services:
+  openhands-app:
+    image: docker.all-hands.dev/all-hands-ai/openhands:0.16
+    container_name: openhands-app
+    # user: "${SANDBOX_USER_ID}:${SANDBOX_USER_ID}"
+    env_file:
+      - .env
+    # environment:
+    #   - LLM_BASE_URL=${LLM_BASE_URL}
+    #   - LLM_API_KEY=${LLM_API_KEY:-default_value}
+    #   - LLM_MODEL=${LLM_MODEL:- openai/deepseek-coder}
+    #   - LOG_ALL_EVENTS=${LLM_API_KEY:-true}
+    #   - SANDBOX_RUNTIME_CONTAINER_IMAGE=${SANDBOX_RUNTIME_CONTAINER_IMAGE}
+      # - SANDBOX_USER_ID=${SANDBOX_USER_ID}
+    #   - VSCODE_HOST=${VSCODE_HOST}
+    #   - VSCODE_PORT=${VSCODE_PORT}
+    #   - WORKSPACE_MOUNT_PATH=${WORKSPACE_MOUNT_PATH}
+    #   - DEBUG=${DEBUG:-false}
+    #   - /home/mrh/program/openhands/.env:/app/.env
+    volumes:
+      - ${WORKSPACE_MOUNT_PATH}:${WORKSPACE_MOUNT_PATH}
+      - /var/run/docker.sock:/var/run/docker.sock
+      - /home/mrh/program/openhands/OpenHands/config.toml:/app/config.toml
+      - ${FILE_STORE_PATH}:/mnt/file_store
+      # - /home/mrh/program/openhands/OpenHands/openhands:/app/openhands
+      # - /home/mrh/program/openhands/OpenHands/openhands/core/cli.py:/app/openhands/core/cli.py
+      # - /home/mrh/program/openhands/OpenHands/openhands/agenthub/codeact_agent/codeact_agent.py:/app/openhands/agenthub/codeact_agent/codeact_agent.py
+      -  /home/mrh/program/busybox-x86_64:/usr/local/bin/busybox
+    extra_hosts:
+      # 记得关闭防火墙,或者 sudo ufw allow from  172.0.0.0/8
+      - "host.docker.internal:host-gateway"
+    # network_mode: "bridge"
+    ports:
+      - "3000:3000"
+    # command: python
+    stdin_open: true
+    tty: true
+
+
+  # openhands-codeserv:
+  #   image: openhands-codeserv
+  #   container_name: openhands-codeserv-mrh
+  #   environment:
+  #     - LLM_BASE_URL=${LLM_BASE_URL}
+  #     - LLM_API_KEY=${LLM_API_KEY:-default_value}
+  #     - SANDBOX_RUNTIME_CONTAINER_IMAGE=${SANDBOX_RUNTIME_CONTAINER_IMAGE}
+  #     - SANDBOX_USER_ID=${SANDBOX_USER_ID}
+  #     - WORKSPACE_MOUNT_PATH=${WORKSPACE_MOUNT_PATH}
+  #   volumes:
+  #     - ${WORKSPACE_MOUNT_PATH}:${WORKSPACE_MOUNT_PATH}
+  #     - /var/run/docker.sock:/var/run/docker.sock
+  #   extra_hosts:
+  #     - "host.docker.internal:host-gateway"
+  #   # network_mode: "host"
+  #   stdin_open: true
+  #   tty: true

+ 0 - 0
gpt.md


+ 297 - 0
readme.md

@@ -0,0 +1,297 @@
+## 快速启动
+
+### 命令行
+```shell
+cd /home/mrh/program/openhands && dc up 
+export LLM_BASE_URL="http://10.0.0.3:3000/v1";
+export LLM_BASE_URL="https:aiapi.magong.site/v1";
+export LLM_API_KEY="sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf";
+export LLM_MODEL="openai/deepseek-coder";
+# 可以自定义修改挂载路径
+# export WORKSPACE_MOUNT_PATH="/"
+python -m openhands.core.cli
+
+# 需要关闭警告提示: if litellm.suppress_debug_info is False:
+# 不能这样设置:  export LLM_MODEL="deepseek-coder";
+
+export HOSTED_VLLM_API_BASE="http://10.0.0.3:3000/v1";
+export HOSTED_VLLM_API_KEY="sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf";
+export LLM_MODEL="hosted_vllm/deepseek-coder"
+```
+
+### WEB UI
+
+```shell
+# .env 文件,在 docker-compose.yml 同目录
+LLM_BASE_URL=https://aiapi.magong.site/v1
+LLM_API_KEY=sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf
+LLM_MODEL="openai/deepseek-coder"
+# CUSTOM_LLM_PROVIDER=openai
+# OPENAI_API_KEY="sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf"
+# OPENAI_API_BASE="https://aiapi.magong.site/v1"
+
+LOG_ALL_EVENTS=true
+SANDBOX_RUNTIME_CONTAINER_IMAGE=docker.all-hands.dev/all-hands-ai/runtime:0.15-nikolaik
+SANDBOX_USER_ID=1000
+WORKSPACE_MOUNT_PATH=/home/mrh/program/openhands/OpenHands
+VSCODE_HOST=sv-v2
+docker run -it --rm --pull=always \
+    -e SANDBOX_RUNTIME_CONTAINER_IMAGE=docker.all-hands.dev/all-hands-ai/runtime:0.16-nikolaik \
+    -e LOG_ALL_EVENTS=true \
+    -v /var/run/docker.sock:/var/run/docker.sock \
+    -v ~/.openhands:/home/openhands/.openhands \
+    -v /home/mrh/program/busybox-x86_64:/usr/local/bin/busybox \
+    -p 3000:3000 \
+    --add-host host.docker.internal:host-gateway \
+    --name openhands-app \
+    docker.all-hands.dev/all-hands-ai/openhands:0.16
+```
+
+```yaml
+version: '3.8'
+
+services:
+  openhands-app:
+    image: docker.all-hands.dev/all-hands-ai/openhands:0.15
+    container_name: openhands-app
+    environment:
+      - LLM_BASE_URL=${LLM_BASE_URL}
+      - LLM_API_KEY=${LLM_API_KEY:-default_value}
+      - LLM_MODEL=${LLM_MODEL:- openai/deepseek-coder}
+      - LOG_ALL_EVENTS=${LLM_API_KEY:-true}
+      - SANDBOX_RUNTIME_CONTAINER_IMAGE=${SANDBOX_RUNTIME_CONTAINER_IMAGE}
+      - SANDBOX_USER_ID=${SANDBOX_USER_ID}
+      - WORKSPACE_MOUNT_PATH=${WORKSPACE_MOUNT_PATH}
+    volumes:
+      - ${WORKSPACE_MOUNT_PATH}:${WORKSPACE_MOUNT_PATH}
+      - /var/run/docker.sock:/var/run/docker.sock
+      -  /home/mrh/program/busybox-x86_64:/usr/local/bin/busybox
+    extra_hosts:
+      # 记得关闭防火墙,或者 sudo ufw allow from  172.0.0.0/8
+      - "host.docker.internal:host-gateway"
+    ports:
+      - "3000:3000"
+    # command: python
+    stdin_open: true
+    tty: true
+```
+
+```shell
+export WORKSPACE_BASE=$(pwd)/workspace
+docker exec -it openhands-app bash
+
+    export LLM_BASE_URL="http://10.0.0.3:3000/v1";
+    export LLM_API_KEY="sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf";
+    export LLM_MODEL="deepseek/deepseek-coder";
+    export WORKSPACE_BASE=/home/mrh/program/openhands/OpenHands/openhands;
+    docker run -it --rm \
+        -e LLM_BASE_URL="http://10.0.0.3:3000/v1" \
+        -e SANDBOX_RUNTIME_CONTAINER_IMAGE=ghcr.io/all-hands-ai/runtime:0.14-nikolaik \
+        -e SANDBOX_USER_ID=$(id -u) \
+        -e WORKSPACE_MOUNT_PATH=$WORKSPACE_BASE \
+        -v $WORKSPACE_BASE:/opt/workspace_base \
+        -v /var/run/docker.sock:/var/run/docker.sock \
+        --add-host host.docker.internal:host-gateway \
+        --network host \
+        --name openhands-app \
+        ghcr.io/all-hands-ai/openhands:0.14 python -m openhands.core.cli
+
+
+docker rm -f $(docker ps -a -q --filter "name=^openhands-")
+docker rm -f $(docker ps -a -q --filter "name=^openhands-runtime")
+# 自定义接口可用模型
+docker exec -it openhands-app bash 
+export OPENAI_BASE_URL="http://10.0.0.32:3001/v1";
+export OPENAI_API_KEY="sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf";
+
+export LLM_BASE_URL="http://10.0.0.3:3000/v1";
+export LLM_API_KEY="sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf";
+
+
+
+```
+
+### 固定 Runtime
+```shell
+# docker-compose 挂载  - /home/mrh/program/openhands/OpenHands/config.toml:/app/config.toml
+[core]
+workspace_base="./workspace"
+# 设置 jwt 秘钥后,就能自动生成 token 固定 runtime 容器
+jwt_secret = "123"
+```
+
+如果要修改挂载路径,需要删除容器,再重新启动。
+
+## 开发调试
+
+### 文件修改
+```shell
+# 从 docker 容器 openhands-app 内复制到宿主机
+docker cp openhands-app:/app/openhands/core/cli.py /home/mrh/program/openhands
+docker cp openhands-app:/app/openhands/core/cli.py /home/mrh/program/openhands/OpenHands/openhands/core/cli.py
+# 切换到指定版本
+git checkout 0.15.0
+# 挂载
+      - /home/mrh/program/openhands/OpenHands/openhands/core/cli.py:/app/openhands/core/cli.py
+
+python -m openhands.core.cli
+
+# /home/mrh/program/openhands/OpenHands/openhands/server/listen.py
+# /home/mrh/program/openhands/OpenHands/containers/app/Dockerfile
+uvicorn openhands.server.listen:app --host 0.0.0.0 --port 3000
+
+# eventstream_runtime.py:234
+
+```
+
+
+
+### 启动调试
+```shell
+/home/mrh/program/openhands/OpenHands/openhands/runtime/builder/docker.py:/app/openhands/runtime/builder/docker.py
+
+export LLM_BASE_URL="http://10.0.0.3:3000/v1";
+export LLM_API_KEY="sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf";
+export LLM_MODEL="openai/deepseek-coder";
+export WORKSPACE_BASE=/home/mrh/program/openhands/OpenHands/openhands;
+LLM_BASE_URL=http://10.0.0.3:3000
+LLM_API_KEY=sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf
+LLM_MODEL="openai/deepseek-coder"
+
+os.environ["LLM_BASE_URL"] = "http://10.0.0.3:3000/v1"
+os.environ["LLM_API_KEY"] = "sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf"
+# 不行 os.environ["LLM_MODEL"] = "deepseek/deepseek-chat"
+
+export WORKSPACE_BASE=/home/mrh/program/openhands/testm
+
+docker run -it --rm --pull=always     -e SANDBOX_RUNTIME_CONTAINER_IMAGE=docker.all-hands.dev/all-hands-ai/runtime:0.16-nikolaik     -e LOG_ALL_EVENTS=true  -e DEBUG=true   -v /var/run/docker.sock:/var/run/docker.sock     -v ~/.openhands:/home/openhands/.openhands     -v /home/mrh/program/busybox-x86_64:/usr/local/bin/busybox     -p 3000:3000     --add-host host.docker.internal:host-gateway     --name openhands-app     docker.all-hands.dev/all-hands-ai/openhands:0.16
+
+docker run -it --rm --pull=always     -e SANDBOX_RUNTIME_CONTAINER_IMAGE=docker.all-hands.dev/all-hands-ai/runtime:0.15-nikolaik     -e LOG_ALL_EVENTS=true  -e DEBUG=true   -v /var/run/docker.sock:/var/run/docker.sock     -v ~/.openhands:/home/openhands/.openhands     -v /home/mrh/program/busybox-x86_64:/usr/local/bin/busybox     -p 3000:3000     --add-host host.docker.internal:host-gateway     --name openhands-app     docker.all-hands.dev/all-hands-ai/openhands:0.15
+
+
+docker run -it --rm \
+  --name runtime16 -p 9807:9807 \
+  -v $WORKSPACE_BASE:/workspace \
+  -v /home/mrh/program/busybox-x86_64:/usr/local/bin/busybox \
+  -v /var/run/docker.sock:/var/run/docker.sock \
+  docker.all-hands.dev/all-hands-ai/runtime:0.16-nikolaik bash
+  
+docker exec -it runtime16 bash
+# /home/mrh/program/openhands/OpenHands/openhands/runtime/impl/eventstream/eventstream_runtime.py:294
+/openhands/micromamba/bin/micromamba run -n openhands poetry run python -u -m openhands.runtime.action_execution_server 9807 --working-dir /workspace --plugins jupyter agent_skills vscode
+# --plugins 
+
+```
+
+
+
+```shell
+curl --location '    https://ai.liaobots.work/v1/chat/completions' \
+--header 'Authorization: Bearer MZRa169Q6p1Gb' \
+--header 'Content-Type: application/json' \
+--data '{
+    "model": "gpt-4",
+    "messages": [
+        {
+            "role": "system",
+            "content": "你是一个大型语言模型,请仔细遵循用户的指示 。"
+        },
+        {
+            "role": "user",
+            "content": "鲁迅和周树人有什么区别"
+        }
+    ],
+    "temperature": 1,
+    "stream": true
+}'
+
+LLM_API_KEY
+LLM_BASE_URL
+LLM_EMBEDDING_MODEL
+LLM_EMBEDDING_DEPLOYMENT_NAME
+LLM_API_VERSION
+```
+
+
+```shell
+# 测试
+curl http://192.168.2.32:3000/v1/chat/completions
+curl http://sv-v2:3000/v1/chat/completions
+curl http://10.0.0.32:3000/v1/chat/completions
+curl http://host.docker.internal:3000/v1/chat/completions
+curl -i http://www.baidu.com
+curl -i http://host.docker.internal:3000
+curl http://192.168.2.32:3001/
+curl http://192.168.2.32:9005/
+curl -i http://host.docker.internal:3001
+```
+
+### busybox
+```shell
+docker cp /home/mrh/program/busybox-x86_64 openhands-runtime-cli:/usr/local/bin/busybox
+dk exec -it openhands-runtime-cli bash
+busybox ip addr
+```
+
+## openai API 配置
+https://docs.all-hands.dev/modules/usage/llms/localLLMs
+
+## 测试本地embedding
+https://docs.llamaindex.ai/en/stable/examples/embeddings/huggingface/
+安装较久,大约有 1G 左右
+```shell
+pip install llama-index-embeddings-huggingface
+pip install llama-index-embeddings-instructor
+pip install llama-index
+```
+
+## 官方容器启动
+由于官方镜像路径 ghcr.io/opendevin/opendevin 特别慢,使用国内镜像源拉取:`docker pull ghcr.nju.edu.cn/opendevin/opendevin:0.3.1`
+大约要下载 3.8G ,最快 2M/s
+
+
+```shell
+# Your OpenAI API key, or any other LLM API key
+export LLM_API_KEY="MZRa169Q6p1Gb"
+export LLM_BASE_URL="https://ai.liaobots.work"
+export OPENAI_API_BASE="https://ai.liaobots.work"
+export OPENAI_API_KEY="MZRa169Q6p1Gb"
+export SSH_HOSTNAME="172.17.0.1"
+export PORT=3000
+
+# The directory you want OpenDevin to modify. MUST be an absolute path!
+WORKSPACE_BASE=$(pwd)/workspace
+docker run -it \
+    --pull=always \
+    -e SANDBOX_USER_ID=$(id -u) \
+    -e WORKSPACE_MOUNT_PATH=$WORKSPACE_BASE \
+    -v $WORKSPACE_BASE:/opt/workspace_base \
+    -v /var/run/docker.sock:/var/run/docker.sock \
+    -p 3000:3000 \
+    --add-host host.docker.internal:host-gateway \
+    --name opendevin-app-$(date +%Y%m%d%H%M%S) \
+    ghcr.io/opendevin/opendevin
+
+
+
+WORKSPACE_BASE=$(pwd)/workspace
+docker run -it \
+    --pull=always \
+    -e LLM_API_KEY="MZRa169Q6p1Gb" \
+    -e LLM_BASE_URL="https://ai.liaobots.work" \
+    -e OPENAI_API_BASE="https://ai.liaobots.work/v1" \
+    -e OPENAI_API_KEY="MZRa169Q6p1Gb" \
+    -e SANDBOX_USER_ID=$(id -u) \
+    -e PERSIST_SANDBOX="true" \
+    -e SSH_PASSWORD="123" \
+    -e WORKSPACE_MOUNT_PATH=/home/mrh/program/opendevin/workspace \
+    -v /home/mrh/program/opendevin/venv/lib/python3.12/site-packages/litellm:/app/.venv/lib/python3.12/site-packages/litellm \
+    -v /home/mrh/program/opendevin/workspace:/opt/workspace_base \
+    -v /var/run/docker.sock:/var/run/docker.sock \
+    -p 3000:3000 \
+    -p 3022:22 \
+    --add-host host.docker.internal:host-gateway \
+    --name opendevin \
+    ghcr.io/opendevin/opendevin:0.6
+```
+

+ 51 - 0
src/code-server-plugins.py

@@ -0,0 +1,51 @@
+import os
+import subprocess
+import time
+import uuid
+from dataclasses import dataclass
+
+from openhands.core.logger import openhands_logger as logger
+from openhands.runtime.plugins.requirement import Plugin, PluginRequirement
+from openhands.runtime.utils.system import check_port_available
+from openhands.utils.shutdown_listener import should_continue
+
+
+@dataclass
+class VSCodeRequirement(PluginRequirement):
+    name: str = 'vscode'
+
+
+class VSCodePlugin(Plugin):
+    name: str = 'vscode'
+
+    async def initialize(self, username: str):
+        self.vscode_port = int(os.environ['VSCODE_PORT'])
+        self.vscode_connection_token = str(uuid.uuid4())
+        assert check_port_available(self.vscode_port)
+        cmd = (
+            f"su - {username} -s /bin/bash << 'EOF'\n"
+            # f'sudo chown -R {username}:{username} /openhands/.code-server\n'
+            'cd /workspace\n'
+            f'exec code-server --bind-addr 0.0.0.0:{self.vscode_port} --auth none \n'
+            'EOF'
+        )
+        logger.debug(cmd)
+        self.gateway_process = subprocess.Popen(
+            cmd,
+            stderr=subprocess.STDOUT,
+            shell=True,
+        )
+        # read stdout until the kernel gateway is ready
+        output = ''
+        while should_continue() and self.gateway_process.stdout is not None:
+            line = self.gateway_process.stdout.readline().decode('utf-8')
+            print(line)
+            output += line
+            if 'at' in line:
+                break
+            time.sleep(1)
+            logger.debug('Waiting for VSCode server to start...')
+
+        logger.debug(
+            f'VSCode server started at port {self.vscode_port}. Output: {output}'
+        )

+ 13 - 0
src/config.toml

@@ -0,0 +1,13 @@
+[core]
+workspace_base="./workspace"
+jwt_secret = "123"
+runtime = "eventstream"
+#runtime = "remote"
+
+# file_store="local"
+# file_store_path="/mnt/file_store"
+
+[sandbox]
+api_key="123"
+local_runtime_url="http://sv-v2"
+#remote_runtime_api_url="http://host.docker.internal:32332"

+ 136 - 0
代码架构.md

@@ -0,0 +1,136 @@
+# web server
+```shell
+
+# ./openhands/server/listen.py
+# ./containers/app/Dockerfile
+# 等同于 openhands-app:/app/openhands/frontend/build ./frontend/build
+
+docker cp openhands-app:/app/openhands /home/mrh/program/openhands/testm
+uvicorn openhands.server.listen:app --host 0.0.0.0 --port 3000
+```
+
+### 启动 runtime
+启动了 web app , websocket_endpoint 利用 websocket 与控制是否启动 runtime
+按浏览器会话来决定runtime任务,可能是为了让不同的客户端会话决定对同一个项目做不同的任务发起 `session = session_manager.add_or_restart_session(sid, websocket)` 
+`await session.loop_recv()` 进入 ws 通信
+>  ./openhands/server/listen.py
+
+根据 ws 协议中的 action 操作runtime `await self.dispatch(data)` 
+初始化 `_initialize_agent` , `await self.agent_session.start` 启动 agent_session
+> ./openhands/server/session/session.py
+
+`await self._create_runtime` ,`runtime_cls = get_runtime_cls(runtime_name)`
+> ./openhands/server/session/agent_session.py
+
+尝试连接到之前的容器,如果之前保留了会话,可根据容器名连接: `async def connect(self):` , `await call_sync_from_async(self._attach_to_container)`
+如果容器不存在,则初始化一个新容器: `await call_sync_from_async(self._init_container)`
+容器名来自前缀+sid `self.container_name = CONTAINER_NAME_PREFIX + sid` ,sid 来自 `listen.py` 会话或者 jwt_token `sid = get_sid_from_token(jwt_token, config.jwt_secret)` `sid = str(uuid.uuid4())`
+> ./openhands/runtime/impl/eventstream/eventstream_runtime.py
+> ./openhands/server/listen.py
+
+在 runtime 内部,`python -u -m openhands.runtime.action_execution_server` 用 api server 来操作文件
+添加所有插件 `plugins_to_load.append(ALL_PLUGINS[plugin]())` 
+加载插件 ` (self._init_plugin(plugin) for plugin in self.plugins_to_load),` 
+初始化插件 `await plugin.initialize(self.username)`
+> ./openhands/runtime/action_execution_server.py
+
+在 runtime 内部,初始化 vscode 插件。 `f'exec /openhands/.openvscode-server/bin/openvscode-server --host 0.0.0.0 --connection-token {self.vscode_connection_token} --port {self.vscode_port}\n'`
+port 地址是 port_mapping 在文件 `eventstream_runtime.py`中 `self.container = self.docker_client.containers.run` 时传参数
+> ./openhands/runtime/plugins/vscode/__init__.py
+
+
+### 文件修改
+```shell
+# 从 docker 容器 openhands-app 内复制到宿主机
+docker cp openhands-app:/app/openhands/core/cli.py /home/mrh/program/openhands
+docker cp openhands-app:/app/openhands/core/cli.py ./openhands/core/cli.py
+# 切换到指定版本
+git checkout 0.15.0
+# 挂载
+      - ./openhands/core/cli.py:/app/openhands/core/cli.py
+
+python -m openhands.core.cli
+
+# ./openhands/server/listen.py
+# ./containers/app/Dockerfile
+uvicorn openhands.server.listen:app --host 0.0.0.0 --port 3000
+
+# eventstream_runtime.py:234
+
+```
+
+## vscode
+route:
+http://sv-v2:3000/api/vscode-url
+
+### vscode 访问地址
+```python
+# ./openhands/runtime/impl/eventstream/eventstream_runtime.py
+# VSCODE_HOST=sv-v2
+                vscode_host = os.environ.get('VSCODE_HOST', "localhost")
+                self._vscode_url = f'http://{vscode_host}:{self._host_port + 1}/?tkn={response_json["token"]}&folder={self.config.workspace_mount_path_in_sandbox}'
+```
+
+vscode server
+./openhands/runtime/utils/runtime_build.py
+./openhands/runtime/utils/runtime_templates/Dockerfile.j2
+```shell
+cd ~/program/openhands/testm
+docker run -it --init -p 9806:3000 -v "$(pwd):/home/workspace:cached" gitpod/openvscode-server
+```
+
+## 构建自己的 Runtime 镜像
+
+🎈 手动构建
+
+> 命令行构建说明: ./containers/runtime/README.md
+> 模板: ./openhands/runtime/utils/runtime_templates/Dockerfile.j2
+> 文档说明: ./docs/i18n/zh-Hans/docusaurus-plugin-content-docs/current/usage/architecture/runtime.md
+> 有关文件: openhands/runtime/utils/runtime_build.py
+
+`build_runtime_image` 使用了构建器 `runtime_builder=DockerRuntimeBuilder(docker.from_env()),`
+ `_build_sandbox_image` 
+> openhands/runtime/utils/runtime_build.py
+创建构建命令: `buildx_cmd` 
+> ./openhands/runtime/builder/docker.py
+
+指定文件夹需要有 `Dockerfile` template 文件。在 `buildx_cmd.append(path)  # must be last!` 中看到 path 是一个文件夹路径,构建时会自动在这个文件夹内查找 `Dockerfile` template 文件。
+在 `_generate_dockerfile` 中看到会渲染 `Dockerfile` template 文件
+根据已有 `Dockerfile.j2` 来渲染这个文件,也就是将文件中 {{base_image}} 等双花括号的内容通过 jijia 改成正常的 Dockerfile 文件。
+渲染的结果 `Dockerfile` 文件会保存到 `--build_folder` 同等目录下,
+> ./openhands/runtime/utils/runtime_templates/Dockerfile.j2
+
+🎈 启动时,如不存在则自动构建
+
+启动时,`if self.runtime_container_image is None` 会运行 EventStreamRuntime build_runtime_image 构建 runtime 镜像
+> ./openhands/runtime/impl/eventstream/eventstream_runtime.py
+> ./openhands/runtime/utils/runtime_build.py
+> ./openhands/runtime/utils/runtime_templates/Dockerfile.j2
+
+`self.runtime_container_image = self.config.sandbox.runtime_container_image` 取决于 `config.toml` 文件的配置
+> ./openhands/core/config/app_config.py
+
+`config.toml` 从这里来 `load_app_config`
+> ./openhands/server/listen.py
+
+### Dockerfile.j2 解析
+当你运行 `runtime_build.py` 后,调用 `build_runtime_image_in_folder` 函数。
+`base_image` 、 `build_from` 、`extra_deps` 需要作为函数传参
+`build_from` 是根据你的基准镜像来决定构建方式,如果不是默认镜像 nikolaik/python-nodejs 则 Dockerfile.j2 会条件渲染,安装有关的 Python 环境,如果 `build_from` 使用了默认的 Python 环境,则 Dockerfile.j2 会跳过安装,进而检查版本。
+`extra_deps` 似乎没有被用到,运行 py 脚本也没有这个传参。可能考虑未来如果需要额外安装别的软件,可以额外添加这个 shell 代码。
+> OpenHands/openhands/runtime/utils/runtime_build.py
+
+`{% macro setup_base_system() %}` 代码块是 jijia 模板的宏定义,以 `{% endmacro %}` 结尾,宏定义内的代码可以由Python渲染。代码内实现了常用工具的安装:wget curl sudo apt-utils git
+
+ `{% macro setup_vscode_server() %}` 实现了 openvscode-server 的安装。
+`{% macro install_dependencies() %}` 宏是实现了 openhands 项目中 Python requirement 等软件的安装
+`{% if build_from_scratch %}` 代码内可以看到,它决定了是否调用 `{{ setup_base_system() }} {{ setup_vscode_server() }}` ,也就是 build_from_scratch 变量决定了根据本地镜像和版本,是否复用构建或强制重新构建。
+这些镜像名称的不同区分主要是为了实现以下几个核心用途:
+| 镜像名称变量          | 实际镜像示例                                                                 | 用途                                                                 |
+|-----------------------|--------------------------------------------------------------------------|----------------------------------------------------------------------|
+| `base_image`          | `nikolaik/python-nodejs:python3.12-nodejs22`                              | 基础镜像,包含操作系统和基础工具。                                     |
+| `hash_image_name`     | `docker.all-hands.dev/all-hands-ai/runtime:oh_v0.15_image_nikolaik_tag_python3.12-nodejs22` | 最终目标镜像,包含源代码和所有依赖项。                                 |
+| `lock_image_name`     | `docker.all-hands.dev/all-hands-ai/runtime:oh_v0.15_lock_nikolaik_python3.12-nodejs22` | 基于依赖锁定文件生成的镜像,包含依赖项。                               |
+| `versioned_image_name`| `docker.all-hands.dev/all-hands-ai/runtime:oh_v0.15_nikolaik_python3.12-nodejs22` | 基于基础镜像和 OpenHands 版本生成的镜像,包含基础环境和部分依赖项。     |
+
+默认情况下是 `build_from == BuildFromImageType.SCRATCH` ,即 build_from_scratch == true ,build_from_versioned == false