Jelajahi Sumber

Fix: llm completion exception breaks CodeActAgent (#3678)

* Catch exception and return finish action with an exception message in case of exception in llm completion

* Remove exception logs

* Raise llm response error for any exception in llm completion

* Raise LLMResponseError from async completion and async streaming completion as well
Shubham raj 1 tahun lalu
induk
melakukan
2bc3e8d584
2 mengubah file dengan 19 tambahan dan 6 penghapusan
  1. 6 2
      agenthub/codeact_agent/codeact_agent.py
  2. 13 4
      openhands/llm/llm.py

+ 6 - 2
agenthub/codeact_agent/codeact_agent.py

@@ -203,8 +203,12 @@ class CodeActAgent(Agent):
             params['extra_headers'] = {
             params['extra_headers'] = {
                 'anthropic-beta': 'prompt-caching-2024-07-31',
                 'anthropic-beta': 'prompt-caching-2024-07-31',
             }
             }
-
-        response = self.llm.completion(**params)
+        try:
+            response = self.llm.completion(**params)
+        except Exception:
+            return AgentFinishAction(
+                thought='Agent encountered an error while processing the last action. Please try again.'
+            )
 
 
         return self.action_parser.parse(response)
         return self.action_parser.parse(response)
 
 

+ 13 - 4
openhands/llm/llm.py

@@ -27,7 +27,7 @@ from tenacity import (
     wait_random_exponential,
     wait_random_exponential,
 )
 )
 
 
-from openhands.core.exceptions import UserCancelledError
+from openhands.core.exceptions import LLMResponseError, UserCancelledError
 from openhands.core.logger import llm_prompt_logger, llm_response_logger
 from openhands.core.logger import llm_prompt_logger, llm_response_logger
 from openhands.core.logger import openhands_logger as logger
 from openhands.core.logger import openhands_logger as logger
 from openhands.core.metrics import Metrics
 from openhands.core.metrics import Metrics
@@ -410,7 +410,10 @@ class LLM:
 
 
         Check the complete documentation at https://litellm.vercel.app/docs/completion
         Check the complete documentation at https://litellm.vercel.app/docs/completion
         """
         """
-        return self._completion
+        try:
+            return self._completion
+        except Exception as e:
+            raise LLMResponseError(e)
 
 
     @property
     @property
     def async_completion(self):
     def async_completion(self):
@@ -418,7 +421,10 @@ class LLM:
 
 
         Check the complete documentation at https://litellm.vercel.app/docs/providers/ollama#example-usage---streaming--acompletion
         Check the complete documentation at https://litellm.vercel.app/docs/providers/ollama#example-usage---streaming--acompletion
         """
         """
-        return self._async_completion
+        try:
+            return self._async_completion
+        except Exception as e:
+            raise LLMResponseError(e)
 
 
     @property
     @property
     def async_streaming_completion(self):
     def async_streaming_completion(self):
@@ -426,7 +432,10 @@ class LLM:
 
 
         Check the complete documentation at https://litellm.vercel.app/docs/providers/ollama#example-usage---streaming--acompletion
         Check the complete documentation at https://litellm.vercel.app/docs/providers/ollama#example-usage---streaming--acompletion
         """
         """
-        return self._async_streaming_completion
+        try:
+            return self._async_streaming_completion
+        except Exception as e:
+            raise LLMResponseError(e)
 
 
     def supports_vision(self):
     def supports_vision(self):
         return litellm.supports_vision(self.config.model)
         return litellm.supports_vision(self.config.model)