forked from run-llama/llama_index
-
Notifications
You must be signed in to change notification settings - Fork 0
/
setup.py
39 lines (33 loc) · 975 Bytes
/
setup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
"""Set up the package."""
import sys
from pathlib import Path
from setuptools import find_packages, setup
with open(Path(__file__).absolute().parents[0] / "gpt_index" / "VERSION") as _f:
__version__ = _f.read().strip()
with open("README.md", "r", encoding="utf-8") as f:
long_description = f.read()
install_requires = [
"langchain",
"openai>=0.26.4",
"dataclasses_json",
"transformers",
"nltk",
"numpy",
"tenacity<8.2.0",
"pandas",
]
# NOTE: if python version >= 3.9, install tiktoken
if sys.version_info >= (3, 9):
install_requires.extend(["tiktoken"])
setup(
name="gpt_index",
version=__version__,
packages=find_packages(),
description="Interface between LLMs and your data",
install_requires=install_requires,
long_description=long_description,
license="MIT",
url="https://github.com/jerryjliu/gpt_index",
include_package_data=True,
long_description_content_type="text/markdown",
)