Browse Source

remove Exception in the agent (#4054)

Engel Nyst 1 year ago
parent
commit
798aaeaef6
2 changed files with 4 additions and 26 deletions
  1. 1 13
      agenthub/codeact_agent/codeact_agent.py
  2. 3 13
      openhands/llm/llm.py

+ 1 - 13
agenthub/codeact_agent/codeact_agent.py

@@ -5,8 +5,6 @@ from agenthub.codeact_agent.action_parser import CodeActResponseParser
 from openhands.controller.agent import Agent
 from openhands.controller.state.state import State
 from openhands.core.config import AgentConfig
-from openhands.core.exceptions import OperationCancelled
-from openhands.core.logger import openhands_logger as logger
 from openhands.core.message import ImageContent, Message, TextContent
 from openhands.events.action import (
     Action,
@@ -211,17 +209,7 @@ class CodeActAgent(Agent):
                 'anthropic-beta': 'prompt-caching-2024-07-31',
             }
 
-        # TODO: move exception handling to agent_controller
-        try:
-            response = self.llm.completion(**params)
-        except OperationCancelled as e:
-            raise e
-        except Exception as e:
-            logger.error(f'{e}')
-            error_message = '{}: {}'.format(type(e).__name__, str(e).split('\n')[0])
-            return AgentFinishAction(
-                thought=f'Agent encountered an error while processing the last action.\nError: {error_message}\nPlease try again.'
-            )
+        response = self.llm.completion(**params)
 
         return self.action_parser.parse(response)
 

+ 3 - 13
openhands/llm/llm.py

@@ -33,7 +33,6 @@ from tenacity import (
 )
 
 from openhands.core.exceptions import (
-    LLMResponseError,
     OperationCancelled,
     UserCancelledError,
 )
@@ -498,10 +497,7 @@ class LLM:
 
         Check the complete documentation at https://litellm.vercel.app/docs/completion
         """
-        try:
-            return self._completion
-        except Exception as e:
-            raise LLMResponseError(e)
+        return self._completion
 
     @property
     def async_completion(self):
@@ -509,10 +505,7 @@ class LLM:
 
         Check the complete documentation at https://litellm.vercel.app/docs/providers/ollama#example-usage---streaming--acompletion
         """
-        try:
-            return self._async_completion
-        except Exception as e:
-            raise LLMResponseError(e)
+        return self._async_completion
 
     @property
     def async_streaming_completion(self):
@@ -520,10 +513,7 @@ class LLM:
 
         Check the complete documentation at https://litellm.vercel.app/docs/providers/ollama#example-usage---streaming--acompletion
         """
-        try:
-            return self._async_streaming_completion
-        except Exception as e:
-            raise LLMResponseError(e)
+        return self._async_streaming_completion
 
     def vision_is_active(self):
         return not self.config.disable_vision and self._supports_vision()