forked from jakobhoeg/nextjs-ollama-llm-ui
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathpyproject.toml
63 lines (52 loc) · 1.46 KB
/
pyproject.toml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
[tool.poetry]
name = "llm-chatbot-workshop"
version = "0.1.0"
description = "A Python virutal environment for tutorial notebooks"
authors = ["Adam Goh <adam.goh@zenika.com>"]
package-mode = false
[tool.poetry.dependencies]
python = ">=3.11,<3.13"
jupyter = "^1.0"
openai = ">=0.27"
torch = "^2.5.0"
pymupdf = "^1.24.12"
tiktoken = {extras = ["openai-public"], version = "^0.8.0"}
langchain = "^0.3.4"
langchain-community = "^0.3.3"
scikit-learn = "^1.5.2"
py-make = "^0.1.2"
[tool.poetry.group.dev.dependencies]
pytest = "^8.3.3"
mypy = "^1.13.0"
pylint = "^3.3.1"
flake8 = "^7.1.1"
black = "^24.10.0"
pylance = "^0.19.1"
# embeddings
[tool.poetry.group.tutorial-01.dependencies]
pymilvus = "^2.4.8"
tiktoken = "^0.8.0"
llama-index = "*"
transformers = "^4.46.1"
pymupdf4llm = "^0.0.17"
faiss-cpu = "^1.9.0"
# vector_database
# observability
llama-index-embeddings-huggingface = "^0.3.1"
llama-index-embeddings-ollama = "^0.3.1"
[tool.poetry.group.tutorial-03.dependencies]
langfuse = "^2.53.3"
mlflow = ">=2.17.0"
langchain-openai = "^0.2.5"
llama-index-llms-ollama = ">=0.3.4"
arize-phoenix = {extras = ["embeddings", "llama-index"], version = ">=5.6.0"}
deepeval = "^1.4.9"
llama-index-utils-workflow = "^0.2.2"
litellm = "^1.52.0"
[tool.poetry.group.tutorial-02.dependencies]
milvus = "^2.3.8"
llama-index-embeddings-huggingface = "^0.3.1"
llama-index-vector-stores-chroma = "^0.3.0"