Tidak Ada Deskripsi

mrh 32bab14095 升级到官方0.17.0 版本 1 tahun lalu
OpenHands @ 642e962f89 32bab14095 升级到官方0.17.0 版本 1 tahun lalu
src 7c33cf7df3 完善代码架构 1 tahun lalu
.env 32bab14095 升级到官方0.17.0 版本 1 tahun lalu
.gitignore 7c33cf7df3 完善代码架构 1 tahun lalu
.gitmodules 7c33cf7df3 完善代码架构 1 tahun lalu
Dockerfile f24bad60a2 完成环境变量 VSCODE_HOST 自定义 1 tahun lalu
dev.md 7c33cf7df3 完善代码架构 1 tahun lalu
docker-compose.yaml 32bab14095 升级到官方0.17.0 版本 1 tahun lalu
gpt.md 2c848b1c75 first commit 1 tahun lalu
readme.md 2c848b1c75 first commit 1 tahun lalu
代码架构.md 7c33cf7df3 完善代码架构 1 tahun lalu

readme.md

快速启动

命令行

cd /home/mrh/program/openhands && dc up 
export LLM_BASE_URL="http://10.0.0.3:3000/v1";
export LLM_BASE_URL="https:aiapi.magong.site/v1";
export LLM_API_KEY="sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf";
export LLM_MODEL="openai/deepseek-coder";
# 可以自定义修改挂载路径
# export WORKSPACE_MOUNT_PATH="/"
python -m openhands.core.cli

# 需要关闭警告提示: if litellm.suppress_debug_info is False:
# 不能这样设置:  export LLM_MODEL="deepseek-coder";

export HOSTED_VLLM_API_BASE="http://10.0.0.3:3000/v1";
export HOSTED_VLLM_API_KEY="sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf";
export LLM_MODEL="hosted_vllm/deepseek-coder"

WEB UI

# .env 文件,在 docker-compose.yml 同目录
LLM_BASE_URL=https://aiapi.magong.site/v1
LLM_API_KEY=sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf
LLM_MODEL="openai/deepseek-coder"
# CUSTOM_LLM_PROVIDER=openai
# OPENAI_API_KEY="sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf"
# OPENAI_API_BASE="https://aiapi.magong.site/v1"

LOG_ALL_EVENTS=true
SANDBOX_RUNTIME_CONTAINER_IMAGE=docker.all-hands.dev/all-hands-ai/runtime:0.15-nikolaik
SANDBOX_USER_ID=1000
WORKSPACE_MOUNT_PATH=/home/mrh/program/openhands/OpenHands
VSCODE_HOST=sv-v2
docker run -it --rm --pull=always \
    -e SANDBOX_RUNTIME_CONTAINER_IMAGE=docker.all-hands.dev/all-hands-ai/runtime:0.16-nikolaik \
    -e LOG_ALL_EVENTS=true \
    -v /var/run/docker.sock:/var/run/docker.sock \
    -v ~/.openhands:/home/openhands/.openhands \
    -v /home/mrh/program/busybox-x86_64:/usr/local/bin/busybox \
    -p 3000:3000 \
    --add-host host.docker.internal:host-gateway \
    --name openhands-app \
    docker.all-hands.dev/all-hands-ai/openhands:0.16
version: '3.8'

services:
  openhands-app:
    image: docker.all-hands.dev/all-hands-ai/openhands:0.15
    container_name: openhands-app
    environment:
      - LLM_BASE_URL=${LLM_BASE_URL}
      - LLM_API_KEY=${LLM_API_KEY:-default_value}
      - LLM_MODEL=${LLM_MODEL:- openai/deepseek-coder}
      - LOG_ALL_EVENTS=${LLM_API_KEY:-true}
      - SANDBOX_RUNTIME_CONTAINER_IMAGE=${SANDBOX_RUNTIME_CONTAINER_IMAGE}
      - SANDBOX_USER_ID=${SANDBOX_USER_ID}
      - WORKSPACE_MOUNT_PATH=${WORKSPACE_MOUNT_PATH}
    volumes:
      - ${WORKSPACE_MOUNT_PATH}:${WORKSPACE_MOUNT_PATH}
      - /var/run/docker.sock:/var/run/docker.sock
      -  /home/mrh/program/busybox-x86_64:/usr/local/bin/busybox
    extra_hosts:
      # 记得关闭防火墙,或者 sudo ufw allow from  172.0.0.0/8
      - "host.docker.internal:host-gateway"
    ports:
      - "3000:3000"
    # command: python
    stdin_open: true
    tty: true
export WORKSPACE_BASE=$(pwd)/workspace
docker exec -it openhands-app bash

    export LLM_BASE_URL="http://10.0.0.3:3000/v1";
    export LLM_API_KEY="sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf";
    export LLM_MODEL="deepseek/deepseek-coder";
    export WORKSPACE_BASE=/home/mrh/program/openhands/OpenHands/openhands;
    docker run -it --rm \
        -e LLM_BASE_URL="http://10.0.0.3:3000/v1" \
        -e SANDBOX_RUNTIME_CONTAINER_IMAGE=ghcr.io/all-hands-ai/runtime:0.14-nikolaik \
        -e SANDBOX_USER_ID=$(id -u) \
        -e WORKSPACE_MOUNT_PATH=$WORKSPACE_BASE \
        -v $WORKSPACE_BASE:/opt/workspace_base \
        -v /var/run/docker.sock:/var/run/docker.sock \
        --add-host host.docker.internal:host-gateway \
        --network host \
        --name openhands-app \
        ghcr.io/all-hands-ai/openhands:0.14 python -m openhands.core.cli


docker rm -f $(docker ps -a -q --filter "name=^openhands-")
docker rm -f $(docker ps -a -q --filter "name=^openhands-runtime")
# 自定义接口可用模型
docker exec -it openhands-app bash 
export OPENAI_BASE_URL="http://10.0.0.32:3001/v1";
export OPENAI_API_KEY="sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf";

export LLM_BASE_URL="http://10.0.0.3:3000/v1";
export LLM_API_KEY="sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf";



固定 Runtime

# docker-compose 挂载  - /home/mrh/program/openhands/OpenHands/config.toml:/app/config.toml
[core]
workspace_base="./workspace"
# 设置 jwt 秘钥后,就能自动生成 token 固定 runtime 容器
jwt_secret = "123"

如果要修改挂载路径,需要删除容器,再重新启动。

开发调试

文件修改

# 从 docker 容器 openhands-app 内复制到宿主机
docker cp openhands-app:/app/openhands/core/cli.py /home/mrh/program/openhands
docker cp openhands-app:/app/openhands/core/cli.py /home/mrh/program/openhands/OpenHands/openhands/core/cli.py
# 切换到指定版本
git checkout 0.15.0
# 挂载
      - /home/mrh/program/openhands/OpenHands/openhands/core/cli.py:/app/openhands/core/cli.py

python -m openhands.core.cli

# /home/mrh/program/openhands/OpenHands/openhands/server/listen.py
# /home/mrh/program/openhands/OpenHands/containers/app/Dockerfile
uvicorn openhands.server.listen:app --host 0.0.0.0 --port 3000

# eventstream_runtime.py:234

启动调试

/home/mrh/program/openhands/OpenHands/openhands/runtime/builder/docker.py:/app/openhands/runtime/builder/docker.py

export LLM_BASE_URL="http://10.0.0.3:3000/v1";
export LLM_API_KEY="sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf";
export LLM_MODEL="openai/deepseek-coder";
export WORKSPACE_BASE=/home/mrh/program/openhands/OpenHands/openhands;
LLM_BASE_URL=http://10.0.0.3:3000
LLM_API_KEY=sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf
LLM_MODEL="openai/deepseek-coder"

os.environ["LLM_BASE_URL"] = "http://10.0.0.3:3000/v1"
os.environ["LLM_API_KEY"] = "sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf"
# 不行 os.environ["LLM_MODEL"] = "deepseek/deepseek-chat"

export WORKSPACE_BASE=/home/mrh/program/openhands/testm

docker run -it --rm --pull=always     -e SANDBOX_RUNTIME_CONTAINER_IMAGE=docker.all-hands.dev/all-hands-ai/runtime:0.16-nikolaik     -e LOG_ALL_EVENTS=true  -e DEBUG=true   -v /var/run/docker.sock:/var/run/docker.sock     -v ~/.openhands:/home/openhands/.openhands     -v /home/mrh/program/busybox-x86_64:/usr/local/bin/busybox     -p 3000:3000     --add-host host.docker.internal:host-gateway     --name openhands-app     docker.all-hands.dev/all-hands-ai/openhands:0.16

docker run -it --rm --pull=always     -e SANDBOX_RUNTIME_CONTAINER_IMAGE=docker.all-hands.dev/all-hands-ai/runtime:0.15-nikolaik     -e LOG_ALL_EVENTS=true  -e DEBUG=true   -v /var/run/docker.sock:/var/run/docker.sock     -v ~/.openhands:/home/openhands/.openhands     -v /home/mrh/program/busybox-x86_64:/usr/local/bin/busybox     -p 3000:3000     --add-host host.docker.internal:host-gateway     --name openhands-app     docker.all-hands.dev/all-hands-ai/openhands:0.15


docker run -it --rm \
  --name runtime16 -p 9807:9807 \
  -v $WORKSPACE_BASE:/workspace \
  -v /home/mrh/program/busybox-x86_64:/usr/local/bin/busybox \
  -v /var/run/docker.sock:/var/run/docker.sock \
  docker.all-hands.dev/all-hands-ai/runtime:0.16-nikolaik bash
  
docker exec -it runtime16 bash
# /home/mrh/program/openhands/OpenHands/openhands/runtime/impl/eventstream/eventstream_runtime.py:294
/openhands/micromamba/bin/micromamba run -n openhands poetry run python -u -m openhands.runtime.action_execution_server 9807 --working-dir /workspace --plugins jupyter agent_skills vscode
# --plugins 

curl --location '    https://ai.liaobots.work/v1/chat/completions' \
--header 'Authorization: Bearer MZRa169Q6p1Gb' \
--header 'Content-Type: application/json' \
--data '{
    "model": "gpt-4",
    "messages": [
        {
            "role": "system",
            "content": "你是一个大型语言模型,请仔细遵循用户的指示 。"
        },
        {
            "role": "user",
            "content": "鲁迅和周树人有什么区别"
        }
    ],
    "temperature": 1,
    "stream": true
}'

LLM_API_KEY
LLM_BASE_URL
LLM_EMBEDDING_MODEL
LLM_EMBEDDING_DEPLOYMENT_NAME
LLM_API_VERSION
# 测试
curl http://192.168.2.32:3000/v1/chat/completions
curl http://sv-v2:3000/v1/chat/completions
curl http://10.0.0.32:3000/v1/chat/completions
curl http://host.docker.internal:3000/v1/chat/completions
curl -i http://www.baidu.com
curl -i http://host.docker.internal:3000
curl http://192.168.2.32:3001/
curl http://192.168.2.32:9005/
curl -i http://host.docker.internal:3001

busybox

docker cp /home/mrh/program/busybox-x86_64 openhands-runtime-cli:/usr/local/bin/busybox
dk exec -it openhands-runtime-cli bash
busybox ip addr

openai API 配置

https://docs.all-hands.dev/modules/usage/llms/localLLMs

测试本地embedding

https://docs.llamaindex.ai/en/stable/examples/embeddings/huggingface/ 安装较久,大约有 1G 左右

pip install llama-index-embeddings-huggingface
pip install llama-index-embeddings-instructor
pip install llama-index

官方容器启动

由于官方镜像路径 ghcr.io/opendevin/opendevin 特别慢,使用国内镜像源拉取:docker pull ghcr.nju.edu.cn/opendevin/opendevin:0.3.1 大约要下载 3.8G ,最快 2M/s

# Your OpenAI API key, or any other LLM API key
export LLM_API_KEY="MZRa169Q6p1Gb"
export LLM_BASE_URL="https://ai.liaobots.work"
export OPENAI_API_BASE="https://ai.liaobots.work"
export OPENAI_API_KEY="MZRa169Q6p1Gb"
export SSH_HOSTNAME="172.17.0.1"
export PORT=3000

# The directory you want OpenDevin to modify. MUST be an absolute path!
WORKSPACE_BASE=$(pwd)/workspace
docker run -it \
    --pull=always \
    -e SANDBOX_USER_ID=$(id -u) \
    -e WORKSPACE_MOUNT_PATH=$WORKSPACE_BASE \
    -v $WORKSPACE_BASE:/opt/workspace_base \
    -v /var/run/docker.sock:/var/run/docker.sock \
    -p 3000:3000 \
    --add-host host.docker.internal:host-gateway \
    --name opendevin-app-$(date +%Y%m%d%H%M%S) \
    ghcr.io/opendevin/opendevin



WORKSPACE_BASE=$(pwd)/workspace
docker run -it \
    --pull=always \
    -e LLM_API_KEY="MZRa169Q6p1Gb" \
    -e LLM_BASE_URL="https://ai.liaobots.work" \
    -e OPENAI_API_BASE="https://ai.liaobots.work/v1" \
    -e OPENAI_API_KEY="MZRa169Q6p1Gb" \
    -e SANDBOX_USER_ID=$(id -u) \
    -e PERSIST_SANDBOX="true" \
    -e SSH_PASSWORD="123" \
    -e WORKSPACE_MOUNT_PATH=/home/mrh/program/opendevin/workspace \
    -v /home/mrh/program/opendevin/venv/lib/python3.12/site-packages/litellm:/app/.venv/lib/python3.12/site-packages/litellm \
    -v /home/mrh/program/opendevin/workspace:/opt/workspace_base \
    -v /var/run/docker.sock:/var/run/docker.sock \
    -p 3000:3000 \
    -p 3022:22 \
    --add-host host.docker.internal:host-gateway \
    --name opendevin \
    ghcr.io/opendevin/opendevin:0.6