|
|
@@ -33,7 +33,6 @@ from tenacity import (
|
|
|
)
|
|
|
|
|
|
from openhands.core.exceptions import (
|
|
|
- LLMResponseError,
|
|
|
OperationCancelled,
|
|
|
UserCancelledError,
|
|
|
)
|
|
|
@@ -498,10 +497,7 @@ class LLM:
|
|
|
|
|
|
Check the complete documentation at https://litellm.vercel.app/docs/completion
|
|
|
"""
|
|
|
- try:
|
|
|
- return self._completion
|
|
|
- except Exception as e:
|
|
|
- raise LLMResponseError(e)
|
|
|
+ return self._completion
|
|
|
|
|
|
@property
|
|
|
def async_completion(self):
|
|
|
@@ -509,10 +505,7 @@ class LLM:
|
|
|
|
|
|
Check the complete documentation at https://litellm.vercel.app/docs/providers/ollama#example-usage---streaming--acompletion
|
|
|
"""
|
|
|
- try:
|
|
|
- return self._async_completion
|
|
|
- except Exception as e:
|
|
|
- raise LLMResponseError(e)
|
|
|
+ return self._async_completion
|
|
|
|
|
|
@property
|
|
|
def async_streaming_completion(self):
|
|
|
@@ -520,10 +513,7 @@ class LLM:
|
|
|
|
|
|
Check the complete documentation at https://litellm.vercel.app/docs/providers/ollama#example-usage---streaming--acompletion
|
|
|
"""
|
|
|
- try:
|
|
|
- return self._async_streaming_completion
|
|
|
- except Exception as e:
|
|
|
- raise LLMResponseError(e)
|
|
|
+ return self._async_streaming_completion
|
|
|
|
|
|
def vision_is_active(self):
|
|
|
return not self.config.disable_vision and self._supports_vision()
|