|
|
@@ -342,9 +342,14 @@ class LLM(RetryMixin, DebugMixin):
|
|
|
# but model_info will have the correct value for some reason.
|
|
|
# we can go with it, but we will need to keep an eye if model_info is correct for Vertex or other providers
|
|
|
# remove when litellm is updated to fix https://github.com/BerriAI/litellm/issues/5608
|
|
|
- return litellm.supports_vision(self.config.model) or (
|
|
|
- self.model_info is not None
|
|
|
- and self.model_info.get('supports_vision', False)
|
|
|
+ # Check both the full model name and the name after proxy prefix for vision support
|
|
|
+ return (
|
|
|
+ litellm.supports_vision(self.config.model)
|
|
|
+ or litellm.supports_vision(self.config.model.split('/')[-1])
|
|
|
+ or (
|
|
|
+ self.model_info is not None
|
|
|
+ and self.model_info.get('supports_vision', False)
|
|
|
+ )
|
|
|
)
|
|
|
|
|
|
def is_caching_prompt_active(self) -> bool:
|