forked from dsgrid/dsgrid
-
Notifications
You must be signed in to change notification settings - Fork 0
/
setup.py
106 lines (97 loc) · 2.86 KB
/
setup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
"""
setup.py
"""
import logging
from pathlib import Path
from setuptools import setup, find_packages
logger = logging.getLogger(__name__)
here = Path(__file__).parent.resolve()
metadata = {}
with open(here / "dsgrid" / "_version.py", encoding="utf-8") as f:
exec(f.read(), metadata)
with open(here / "README.md", encoding="utf-8") as f:
readme = f.read()
dev_requires = ["black>=22.3.0", "pre-commit", "devtools", "jupyter", "flake8", "pyarrow"]
test_requires = [
"httpx", # starlette, used by fastapi, requires this as an optional dependency for testing.
"pytest",
"pytest-cov",
]
doc_requires = [
"furo",
"ghp-import",
"numpydoc",
"sphinx",
"sphinx-click",
"sphinx-copybutton",
"sphinx_argparse",
"sphinxcontrib.programoutput",
"autodoc_pydantic[erdantic]",
]
release_requires = ["twine", "setuptools", "wheel"]
setup(
name=metadata["__title__"],
version=metadata["__version__"],
description=metadata["__description__"],
long_description=readme,
long_description_content_type="text/markdown",
author=metadata["__author__"],
maintainer_email=metadata["__maintainer_email__"],
url=metadata["__url__"],
packages=find_packages(),
package_dir={"dsgrid": "dsgrid"},
package_data={
"dsgrid": [
"notebooks/*.ipynb",
]
},
python_requires=">=3.10",
entry_points={
"console_scripts": [
"dsgrid=dsgrid.cli.dsgrid:cli",
# This exists because spark-submit does not recognize the above 'dsgrid' as a Python
# application.
"dsgrid-cli.py=dsgrid.cli.dsgrid:cli",
"dsgrid-admin=dsgrid.cli.dsgrid_admin:cli",
],
},
include_package_data=True,
license=metadata["__license__"],
zip_safe=False,
keywords="dsgrid",
classifiers=[
"Development Status :: Alpha",
"Intended Audience :: Modelers",
"License :: OSI Approved :: BSD License",
"Natural Language :: English",
"Programming Language :: Python :: 3.10",
],
test_suite="tests",
install_requires=[
"awscli",
"boto3",
"click>=8",
"dash",
"dash_bootstrap_components",
"fastapi",
"json5",
"numpy~=1.23.0", # pyspark uses numpy.bool, which was removed in numpy 1.24
# Remove this restriction when pyspark is fixed.
"pandas~=1.5",
"prettytable",
"pydantic~=1.10.11",
"pyspark==3.3.1", # Keep this synced with the spark version in Dockerfile.
"python-arango",
"requests",
"s3path",
"semver",
"sqlalchemy",
"uvicorn",
"tzdata", # time zone stuff
],
extras_require={
"test": test_requires,
"dev": test_requires + dev_requires,
"admin": test_requires + dev_requires + doc_requires + release_requires,
},
)