forked from KoboldAI/KoboldAI-Client
-
Notifications
You must be signed in to change notification settings - Fork 132
/
requirements.txt
59 lines (59 loc) · 2.82 KB
/
requirements.txt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
--extra-index-url https://download.pytorch.org/whl/cu118
transformers[sentencepiece]==4.36.1
huggingface_hub==0.19.4
optimum[onnxruntime]==1.16.1
safetensors==0.4.1
Flask==2.3.3
Flask-SocketIO==5.3.2
Werkzeug==2.3.7
python-socketio==5.7.2
requests
torch == 2.1.*
flask-cloudflared==0.0.10
flask-ngrok
flask-cors
eventlet==0.33.3
dnspython==2.2.1
lupa==1.10
markdown
bleach==4.1.0
protobuf
accelerate==0.25.0
flask-session==0.5.0
marshmallow>=3.13
apispec-webframeworks
loguru
termcolor
git+https://github.com/VE-FORBRYDERNE/mkultra
Pillow
diffusers==0.24
psutil
ansi2html
flask_compress
ijson
bitsandbytes==0.40.0.post4; sys_platform == 'linux'
https://github.com/jllllll/bitsandbytes-windows-webui/releases/download/wheels/bitsandbytes-0.40.0.post4-py3-none-win_amd64.whl; sys_platform == 'win32'
ftfy
pydub
pytest==7.2.2
pytest-html==3.2.0
pytest-metadata==2.0.4
requests-mock==1.10.0
git+https://github.com/0cc4m/hf_bleeding_edge/
einops
peft==0.7.1
scipy
https://huggingface.github.io/autogptq-index/whl/cu118/auto-gptq/auto_gptq-0.5.1%2Bcu118-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl; sys_platform == 'linux' and python_version == '3.10'
https://huggingface.github.io/autogptq-index/whl/cu118/auto-gptq/auto_gptq-0.5.1%2Bcu118-cp310-cp310-win_amd64.whl; sys_platform == 'win32' and python_version == '3.10'
https://huggingface.github.io/autogptq-index/whl/cu118/auto-gptq/auto_gptq-0.5.1%2Bcu118-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl; sys_platform == 'linux' and python_version == '3.8'
https://huggingface.github.io/autogptq-index/whl/cu118/auto-gptq/auto_gptq-0.5.1%2Bcu118-cp38-cp38-win_amd64.whl; sys_platform == 'win32' and python_version == '3.8'
windows-curses; sys_platform == 'win32'
pynvml
https://github.com/Dao-AILab/flash-attention/releases/download/v2.3.6/flash_attn-2.3.6+cu118torch2.1cxx11abiFALSE-cp310-cp310-linux_x86_64.whl; sys_platform == 'linux' and python_version == '3.10'
https://github.com/Dao-AILab/flash-attention/releases/download/v2.3.6/flash_attn-2.3.6+cu118torch2.1cxx11abiFALSE-cp38-cp38-linux_x86_64.whl; sys_platform == 'linux' and python_version == '3.8'
xformers==0.0.23.post1
omegaconf
https://github.com/casper-hansen/AutoAWQ/releases/download/v0.1.8/autoawq-0.1.8+cu118-cp310-cp310-linux_x86_64.whl; sys_platform == 'linux' and python_version == '3.10'
https://github.com/casper-hansen/AutoAWQ/releases/download/v0.1.8/autoawq-0.1.8+cu118-cp310-cp310-win_amd64.whl; sys_platform == 'win32' and python_version == '3.10'
https://github.com/casper-hansen/AutoAWQ/releases/download/v0.1.8/autoawq-0.1.8+cu118-cp38-cp38-linux_x86_64.whl; sys_platform == 'linux' and python_version == '3.8'
https://github.com/casper-hansen/AutoAWQ/releases/download/v0.1.8/autoawq-0.1.8+cu118-cp38-cp38-win_amd64.whl; sys_platform == 'win32' and python_version == '3.8'