Pipfile.torchidx 690 B

123456789101112131415161718192021222324252627282930313233343536
  1. [[source]]
  2. url = "https://pypi.org/simple"
  3. verify_ssl = true
  4. name = "pypi"
  5. [[source]]
  6. url = "https://download.pytorch.org/whl/cpu"
  7. verify_ssl = true
  8. name = "pytorch"
  9. [packages]
  10. torch = {version = "*", index = "pytorch"}
  11. datasets = "*"
  12. pandas = "*"
  13. litellm = "*"
  14. termcolor = "*"
  15. seaborn = "*"
  16. docker = "*"
  17. fastapi = "*"
  18. uvicorn = {extras = ["standard"], version = "*"}
  19. ruff = "*"
  20. mypy = "*"
  21. llama-index = "*"
  22. llama-index-vector-stores-chroma = "*"
  23. chromadb = "*"
  24. llama-index-embeddings-huggingface = "*"
  25. llama-index-embeddings-azure-openai = "*"
  26. llama-index-embeddings-ollama = "*"
  27. google-generativeai = "*"
  28. toml = "*"
  29. json_repair = "*"
  30. [dev-packages]
  31. [requires]
  32. python_version = "3.11"