Преглед на файлове

chore: bump the litellm version (#4632)

Xingyao Wang преди 1 година
родител
ревизия
3ae4bc0f8e
променени са 2 файла, в които са добавени 12 реда и са изтрити 16 реда
  1. 11 15
      poetry.lock
  2. 1 1
      pyproject.toml

+ 11 - 15
poetry.lock

@@ -3898,23 +3898,25 @@ types-tqdm = "*"
 
 [[package]]
 name = "litellm"
-version = "1.50.4"
+version = "1.51.1"
 description = "Library to easily interface with LLM API providers"
 optional = false
-python-versions = ">=3.8.1,<4.0, !=3.9.7"
-files = []
-develop = false
+python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8"
+files = [
+    {file = "litellm-1.51.1-py3-none-any.whl", hash = "sha256:1a389ca5b8ddd7a98d97ad229118d8323caeaaf9c1c5b79b1072edc2a18e773d"},
+    {file = "litellm-1.51.1.tar.gz", hash = "sha256:ef9019bdd8bbad927e49696a300d03ea00b86721ebe7b62621c923f728e50d18"},
+]
 
 [package.dependencies]
 aiohttp = "*"
 click = "*"
 importlib-metadata = ">=6.8.0"
-jinja2 = "^3.1.2"
-jsonschema = "^4.22.0"
+jinja2 = ">=3.1.2,<4.0.0"
+jsonschema = ">=4.22.0,<5.0.0"
 openai = ">=1.52.0"
-pydantic = "^2.0.0"
+pydantic = ">=2.0.0,<3.0.0"
 python-dotenv = ">=0.2.0"
-requests = "^2.31.0"
+requests = ">=2.31.0,<3.0.0"
 tiktoken = ">=0.7.0"
 tokenizers = "*"
 
@@ -3922,12 +3924,6 @@ tokenizers = "*"
 extra-proxy = ["azure-identity (>=1.15.0,<2.0.0)", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "resend (>=0.8.0,<0.9.0)"]
 proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "backoff", "cryptography (>=42.0.5,<43.0.0)", "fastapi (>=0.111.0,<0.112.0)", "fastapi-sso (>=0.10.0,<0.11.0)", "gunicorn (>=22.0.0,<23.0.0)", "orjson (>=3.9.7,<4.0.0)", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.9,<0.0.10)", "pyyaml (>=6.0.1,<7.0.0)", "rq", "uvicorn (>=0.22.0,<0.23.0)"]
 
-[package.source]
-type = "git"
-url = "https://github.com/BerriAI/litellm.git"
-reference = "58fe6610601297c5a90367fd4583469e2df3fcf9"
-resolved_reference = "58fe6610601297c5a90367fd4583469e2df3fcf9"
-
 [[package]]
 name = "llama-index"
 version = "0.10.45.post1"
@@ -10113,4 +10109,4 @@ testing = ["coverage[toml]", "zope.event", "zope.testing"]
 [metadata]
 lock-version = "2.0"
 python-versions = "^3.12"
-content-hash = "709ae467d042d1c9fa3799711f50445324420de32dc9552a42284aba99903981"
+content-hash = "2b268ef696ace0d8170276407dbdeb414134477839ebe4b7ecf29b1a1fe2cef3"

+ 1 - 1
pyproject.toml

@@ -14,7 +14,7 @@ packages = [
 python = "^3.12"
 datasets = "*"
 pandas = "*"
-litellm = { git = "https://github.com/BerriAI/litellm.git", rev = "58fe6610601297c5a90367fd4583469e2df3fcf9" }
+litellm = "^1.51.1"
 google-generativeai = "*" # To use litellm with Gemini Pro API
 termcolor = "*"
 seaborn = "*"