Skip to content

Update LLAMA tokenizer #2568

Update LLAMA tokenizer

Update LLAMA tokenizer #2568

Workflow file for this run

name: "pip-install"
on:
pull_request:
paths:
- "cmake/**"
- "config/**"
- "deps/**"
- "python/**"
- "setup.py"
- "requirements.txt"
- ".github/workflows/helpers/install_dependencies.sh"
- ".github/workflows/pip-install.yml"
push:
branches:
- "master"
paths:
- "cmake/**"
- "config/**"
- "deps/**"
- "python/**"
- "setup.py"
- "requirements.txt"
- ".github/workflows/helpers/install_dependencies.sh"
- ".github/workflows/pip-install.yml"
workflow_dispatch:
concurrency:
group: pip-install-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
pip-install-flexflow:
name: Install FlexFlow with pip
runs-on: ubuntu-20.04
defaults:
run:
shell: bash -l {0} # required to use an activated conda environment
steps:
- name: Checkout Git Repository
uses: actions/checkout@v3
with:
submodules: recursive
- name: Free additional space on runner
run: .github/workflows/helpers/free_space_on_runner.sh
- name: Install CUDA
uses: Jimver/cuda-toolkit@v0.2.16
id: cuda-toolkit
with:
cuda: "12.1.1"
# Disable caching of the CUDA binaries, since it does not give us any significant performance improvement
use-github-cache: "false"
- name: Install system dependencies
run: .github/workflows/helpers/install_dependencies.sh
- name: Install conda and FlexFlow dependencies
uses: conda-incubator/setup-miniconda@v2
with:
activate-environment: flexflow
environment-file: conda/environment.yml
auto-activate-base: false
- name: Build and Install FlexFlow
run: |
export FF_HOME=$(pwd)
export FF_CUDA_ARCH=70
pip install . --verbose
# Remove build folder to check that the installed version can run independently of the build files
rm -rf build
- name: Check availability of flexflow modules in Python
run: |
export LD_LIBRARY_PATH="$CUDA_PATH/lib64/stubs:$LD_LIBRARY_PATH"
sudo ln -s "$CUDA_PATH/lib64/stubs/libcuda.so" "$CUDA_PATH/lib64/stubs/libcuda.so.1"
python -c 'import flexflow.core; import flexflow.serve as ff; exit()'