Przeglądaj źródła

Add Anthropic Models to Cache Prompt (#3775)

* Add Anthropic Models to Cache Prompt

* Update Cache Prompt Active Check for Partial String Matching
Cole Murray 1 rok temu
rodzic
commit
dadada18ce
1 zmienionych plików z 2 dodań i 3 usunięć
  1. 2 3
      openhands/llm/llm.py

+ 2 - 3
openhands/llm/llm.py

@@ -478,9 +478,8 @@ class LLM:
         Returns:
             boolean: True if prompt caching is active for the given model.
         """
-        return (
-            self.config.caching_prompt is True
-            and self.config.model in cache_prompting_supported_models
+        return self.config.caching_prompt is True and any(
+            model in self.config.model for model in cache_prompting_supported_models
         )
 
     def _post_completion(self, response) -> None: