Explorar o código

llm: add NotFoundError to completion exception handling (#3668)

Co-authored-by: tofarr <tofarr@gmail.com>
tobitege hai 1 ano
pai
achega
c83fab8a00
Modificáronse 1 ficheiros con 20 adicións e 19 borrados
  1. 20 19
      openhands/llm/llm.py

+ 20 - 19
openhands/llm/llm.py

@@ -14,6 +14,7 @@ from litellm.exceptions import (
     APIConnectionError,
     ContentPolicyViolationError,
     InternalServerError,
+    NotFoundError,
     OpenAIError,
     RateLimitError,
     ServiceUnavailableError,
@@ -141,11 +142,11 @@ class LLM:
             ),
             retry=retry_if_exception_type(
                 (
-                    RateLimitError,
                     APIConnectionError,
-                    ServiceUnavailableError,
-                    InternalServerError,
                     ContentPolicyViolationError,
+                    InternalServerError,
+                    OpenAIError,
+                    RateLimitError,
                 )
             ),
             after=attempt_on_error,
@@ -229,11 +230,11 @@ class LLM:
             ),
             retry=retry_if_exception_type(
                 (
-                    RateLimitError,
                     APIConnectionError,
-                    ServiceUnavailableError,
-                    InternalServerError,
                     ContentPolicyViolationError,
+                    InternalServerError,
+                    OpenAIError,
+                    RateLimitError,
                 )
             ),
             after=attempt_on_error,
@@ -303,14 +304,14 @@ class LLM:
             except UserCancelledError:
                 logger.info('LLM request cancelled by user.')
                 raise
-            except OpenAIError as e:
-                logger.error(f'OpenAIError occurred:\n{e}')
-                raise
             except (
-                RateLimitError,
                 APIConnectionError,
-                ServiceUnavailableError,
+                ContentPolicyViolationError,
                 InternalServerError,
+                NotFoundError,
+                OpenAIError,
+                RateLimitError,
+                ServiceUnavailableError,
             ) as e:
                 logger.error(f'Completion Error occurred:\n{e}')
                 raise
@@ -333,11 +334,11 @@ class LLM:
             ),
             retry=retry_if_exception_type(
                 (
-                    RateLimitError,
                     APIConnectionError,
-                    ServiceUnavailableError,
-                    InternalServerError,
                     ContentPolicyViolationError,
+                    InternalServerError,
+                    OpenAIError,
+                    RateLimitError,
                 )
             ),
             after=attempt_on_error,
@@ -381,14 +382,14 @@ class LLM:
             except UserCancelledError:
                 logger.info('LLM request cancelled by user.')
                 raise
-            except OpenAIError as e:
-                logger.error(f'OpenAIError occurred:\n{e}')
-                raise
             except (
-                RateLimitError,
                 APIConnectionError,
-                ServiceUnavailableError,
+                ContentPolicyViolationError,
                 InternalServerError,
+                NotFoundError,
+                OpenAIError,
+                RateLimitError,
+                ServiceUnavailableError,
             ) as e:
                 logger.error(f'Completion Error occurred:\n{e}')
                 raise