runtime.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361
  1. import asyncio
  2. import os
  3. import tempfile
  4. import uuid
  5. from typing import Any, Optional
  6. from zipfile import ZipFile
  7. import aiohttp
  8. import docker
  9. import tenacity
  10. from opendevin.core.config import AppConfig
  11. from opendevin.core.logger import opendevin_logger as logger
  12. from opendevin.events import EventStream
  13. from opendevin.events.action import (
  14. BrowseInteractiveAction,
  15. BrowseURLAction,
  16. CmdRunAction,
  17. FileReadAction,
  18. FileWriteAction,
  19. IPythonRunCellAction,
  20. )
  21. from opendevin.events.action.action import Action
  22. from opendevin.events.observation import (
  23. ErrorObservation,
  24. NullObservation,
  25. Observation,
  26. )
  27. from opendevin.events.serialization import event_to_dict, observation_from_dict
  28. from opendevin.events.serialization.action import ACTION_TYPE_TO_CLASS
  29. from opendevin.runtime.plugins import PluginRequirement
  30. from opendevin.runtime.runtime import Runtime
  31. from opendevin.runtime.tools import RuntimeTool
  32. from opendevin.runtime.utils import find_available_tcp_port
  33. from opendevin.runtime.utils.runtime_build import build_runtime_image
  34. class EventStreamRuntime(Runtime):
  35. """This runtime will subscribe the event stream.
  36. When receive an event, it will send the event to od-runtime-client which run inside the docker environment.
  37. """
  38. container_name_prefix = 'opendevin-sandbox-'
  39. def __init__(
  40. self,
  41. config: AppConfig,
  42. event_stream: EventStream,
  43. sid: str = 'default',
  44. plugins: list[PluginRequirement] | None = None,
  45. container_image: str | None = None,
  46. ):
  47. super().__init__(
  48. config, event_stream, sid, plugins
  49. ) # will initialize the event stream
  50. self._port = find_available_tcp_port()
  51. self.api_url = f'http://localhost:{self._port}'
  52. self.session: Optional[aiohttp.ClientSession] = None
  53. self.instance_id = (
  54. sid + str(uuid.uuid4()) if sid is not None else str(uuid.uuid4())
  55. )
  56. # TODO: We can switch to aiodocker when `get_od_sandbox_image` is updated to use aiodocker
  57. self.docker_client: docker.DockerClient = self._init_docker_client()
  58. self.container_image = (
  59. self.config.sandbox.container_image
  60. if container_image is None
  61. else container_image
  62. )
  63. self.container_name = self.container_name_prefix + self.instance_id
  64. self.container = None
  65. self.action_semaphore = asyncio.Semaphore(1) # Ensure one action at a time
  66. logger.debug(f'EventStreamRuntime `{sid}` config:\n{self.config}')
  67. async def ainit(self, env_vars: dict[str, str] | None = None):
  68. if self.config.sandbox.od_runtime_extra_deps:
  69. logger.info(
  70. f'Installing extra user-provided dependencies in the runtime image: {self.config.sandbox.od_runtime_extra_deps}'
  71. )
  72. self.container_image = build_runtime_image(
  73. self.container_image,
  74. self.docker_client,
  75. # NOTE: You can need set DEBUG=true to update the source code
  76. # inside the container. This is useful when you want to test/debug the
  77. # latest code in the runtime docker container.
  78. update_source_code=self.config.sandbox.update_source_code,
  79. extra_deps=self.config.sandbox.od_runtime_extra_deps,
  80. )
  81. self.container = await self._init_container(
  82. self.sandbox_workspace_dir,
  83. mount_dir=self.config.workspace_mount_path,
  84. plugins=self.plugins,
  85. )
  86. # MUST call super().ainit() to initialize both default env vars
  87. # AND the ones in env vars!
  88. await super().ainit(env_vars)
  89. logger.info(
  90. f'Container initialized with plugins: {[plugin.name for plugin in self.plugins]}'
  91. )
  92. logger.info(f'Container initialized with env vars: {env_vars}')
  93. @staticmethod
  94. def _init_docker_client() -> docker.DockerClient:
  95. try:
  96. return docker.from_env()
  97. except Exception as ex:
  98. logger.error(
  99. 'Launch docker client failed. Please make sure you have installed docker and started the docker daemon.'
  100. )
  101. raise ex
  102. @tenacity.retry(
  103. stop=tenacity.stop_after_attempt(5),
  104. wait=tenacity.wait_exponential(multiplier=1, min=4, max=60),
  105. )
  106. async def _init_container(
  107. self,
  108. sandbox_workspace_dir: str,
  109. mount_dir: str | None = None,
  110. plugins: list[PluginRequirement] | None = None,
  111. ):
  112. try:
  113. logger.info(
  114. f'Starting container with image: {self.container_image} and name: {self.container_name}'
  115. )
  116. plugin_arg = ''
  117. if plugins is not None and len(plugins) > 0:
  118. plugin_arg = (
  119. f'--plugins {" ".join([plugin.name for plugin in plugins])} '
  120. )
  121. network_mode: str | None = None
  122. port_mapping: dict[str, int] | None = None
  123. if self.config.sandbox.use_host_network:
  124. network_mode = 'host'
  125. logger.warn(
  126. 'Using host network mode. If you are using MacOS, please make sure you have the latest version of Docker Desktop and enabled host network feature: https://docs.docker.com/network/drivers/host/#docker-desktop'
  127. )
  128. else:
  129. port_mapping = {f'{self._port}/tcp': self._port}
  130. if mount_dir is not None:
  131. volumes = {mount_dir: {'bind': sandbox_workspace_dir, 'mode': 'rw'}}
  132. logger.info(f'Mount dir: {sandbox_workspace_dir}')
  133. else:
  134. logger.warn(
  135. 'Mount dir is not set, will not mount the workspace directory to the container.'
  136. )
  137. volumes = None
  138. if self.config.sandbox.browsergym_eval_env is not None:
  139. browsergym_arg = (
  140. f'--browsergym-eval-env {self.config.sandbox.browsergym_eval_env}'
  141. )
  142. else:
  143. browsergym_arg = ''
  144. container = self.docker_client.containers.run(
  145. self.container_image,
  146. command=(
  147. f'/opendevin/miniforge3/bin/mamba run --no-capture-output -n base '
  148. 'PYTHONUNBUFFERED=1 poetry run '
  149. f'python -u -m opendevin.runtime.client.client {self._port} '
  150. f'--working-dir {sandbox_workspace_dir} '
  151. f'{plugin_arg}'
  152. f'--username {"opendevin" if self.config.run_as_devin else "root"} '
  153. f'--user-id {self.config.sandbox.user_id} '
  154. f'{browsergym_arg}'
  155. ),
  156. network_mode=network_mode,
  157. ports=port_mapping,
  158. working_dir='/opendevin/code/',
  159. name=self.container_name,
  160. detach=True,
  161. environment={'DEBUG': 'true'} if self.config.debug else None,
  162. volumes=volumes,
  163. )
  164. logger.info(f'Container started. Server url: {self.api_url}')
  165. return container
  166. except Exception as e:
  167. logger.error('Failed to start container')
  168. logger.exception(e)
  169. await self.close(close_client=False)
  170. raise e
  171. async def _ensure_session(self):
  172. await asyncio.sleep(1)
  173. if self.session is None or self.session.closed:
  174. self.session = aiohttp.ClientSession()
  175. return self.session
  176. @tenacity.retry(
  177. stop=tenacity.stop_after_attempt(10),
  178. wait=tenacity.wait_exponential(multiplier=2, min=4, max=60),
  179. )
  180. async def _wait_until_alive(self):
  181. logger.info('Reconnecting session')
  182. async with aiohttp.ClientSession() as session:
  183. async with session.get(f'{self.api_url}/alive') as response:
  184. if response.status == 200:
  185. return
  186. else:
  187. msg = f'Action execution API is not alive. Response: {response}'
  188. logger.error(msg)
  189. raise RuntimeError(msg)
  190. @property
  191. def sandbox_workspace_dir(self):
  192. return self.config.workspace_mount_path_in_sandbox
  193. async def close(self, close_client: bool = True):
  194. if self.session is not None and not self.session.closed:
  195. await self.session.close()
  196. containers = self.docker_client.containers.list(all=True)
  197. for container in containers:
  198. try:
  199. if container.name.startswith(self.container_name_prefix):
  200. logs = container.logs(tail=1000).decode('utf-8')
  201. logger.debug(
  202. f'==== Container logs ====\n{logs}\n==== End of container logs ===='
  203. )
  204. container.remove(force=True)
  205. except docker.errors.NotFound:
  206. pass
  207. if close_client:
  208. self.docker_client.close()
  209. async def copy_to(
  210. self, host_src: str, sandbox_dest: str, recursive: bool = False
  211. ) -> dict[str, Any]:
  212. if not os.path.exists(host_src):
  213. raise FileNotFoundError(f'Source file {host_src} does not exist')
  214. session = await self._ensure_session()
  215. await self._wait_until_alive()
  216. try:
  217. if recursive:
  218. # For recursive copy, create a zip file
  219. with tempfile.NamedTemporaryFile(
  220. suffix='.zip', delete=False
  221. ) as temp_zip:
  222. temp_zip_path = temp_zip.name
  223. with ZipFile(temp_zip_path, 'w') as zipf:
  224. for root, _, files in os.walk(host_src):
  225. for file in files:
  226. file_path = os.path.join(root, file)
  227. arcname = os.path.relpath(
  228. file_path, os.path.dirname(host_src)
  229. )
  230. zipf.write(file_path, arcname)
  231. upload_data = {'file': open(temp_zip_path, 'rb')}
  232. else:
  233. # For single file copy
  234. upload_data = {'file': open(host_src, 'rb')}
  235. params = {'destination': sandbox_dest, 'recursive': str(recursive).lower()}
  236. async with session.post(
  237. f'{self.api_url}/upload_file', data=upload_data, params=params
  238. ) as response:
  239. if response.status == 200:
  240. return await response.json()
  241. else:
  242. error_message = await response.text()
  243. raise Exception(f'Copy operation failed: {error_message}')
  244. except asyncio.TimeoutError:
  245. raise TimeoutError('Copy operation timed out')
  246. except Exception as e:
  247. raise RuntimeError(f'Copy operation failed: {str(e)}')
  248. finally:
  249. if recursive:
  250. os.unlink(temp_zip_path)
  251. async def run_action(self, action: Action) -> Observation:
  252. # set timeout to default if not set
  253. if action.timeout is None:
  254. action.timeout = self.config.sandbox.timeout
  255. async with self.action_semaphore:
  256. if not action.runnable:
  257. return NullObservation('')
  258. action_type = action.action # type: ignore[attr-defined]
  259. if action_type not in ACTION_TYPE_TO_CLASS:
  260. return ErrorObservation(f'Action {action_type} does not exist.')
  261. if not hasattr(self, action_type):
  262. return ErrorObservation(
  263. f'Action {action_type} is not supported in the current runtime.'
  264. )
  265. logger.info('Awaiting session')
  266. session = await self._ensure_session()
  267. await self._wait_until_alive()
  268. assert action.timeout is not None
  269. try:
  270. logger.info('Executing command')
  271. async with session.post(
  272. f'{self.api_url}/execute_action',
  273. json={'action': event_to_dict(action)},
  274. timeout=action.timeout,
  275. ) as response:
  276. if response.status == 200:
  277. output = await response.json()
  278. obs = observation_from_dict(output)
  279. obs._cause = action.id # type: ignore[attr-defined]
  280. return obs
  281. else:
  282. error_message = await response.text()
  283. logger.error(f'Error from server: {error_message}')
  284. obs = ErrorObservation(
  285. f'Command execution failed: {error_message}'
  286. )
  287. except asyncio.TimeoutError:
  288. logger.error('No response received within the timeout period.')
  289. obs = ErrorObservation('Command execution timed out')
  290. except Exception as e:
  291. logger.error(f'Error during command execution: {e}')
  292. obs = ErrorObservation(f'Command execution failed: {str(e)}')
  293. return obs
  294. async def run(self, action: CmdRunAction) -> Observation:
  295. return await self.run_action(action)
  296. async def run_ipython(self, action: IPythonRunCellAction) -> Observation:
  297. return await self.run_action(action)
  298. async def read(self, action: FileReadAction) -> Observation:
  299. return await self.run_action(action)
  300. async def write(self, action: FileWriteAction) -> Observation:
  301. return await self.run_action(action)
  302. async def browse(self, action: BrowseURLAction) -> Observation:
  303. return await self.run_action(action)
  304. async def browse_interactive(self, action: BrowseInteractiveAction) -> Observation:
  305. return await self.run_action(action)
  306. ############################################################################
  307. # Keep the same with other runtimes
  308. ############################################################################
  309. def get_working_directory(self):
  310. raise NotImplementedError(
  311. 'This method is not implemented in the runtime client.'
  312. )
  313. def init_runtime_tools(
  314. self,
  315. runtime_tools: list[RuntimeTool],
  316. runtime_tools_config: Optional[dict[RuntimeTool, Any]] = None,
  317. ) -> None:
  318. # TODO: deprecate this method when we move to the new EventStreamRuntime
  319. logger.warning('init_runtime_tools is not implemented in the runtime client.')