Skip to content

Commit

Permalink
Add 2.1.2 release config
Browse files Browse the repository at this point in the history
test

test

test

Release 2.1.2

Numpy install

test

set_channel
  • Loading branch information
atalman committed Nov 30, 2023
1 parent 50e4012 commit 70422e6
Show file tree
Hide file tree
Showing 6 changed files with 30 additions and 3 deletions.
1 change: 1 addition & 0 deletions .github/workflows/userbenchmark-t4-metal.yml
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ jobs:
- name: Upload result jsons to Scribe
run: |
. "${SETUP_SCRIPT}" && conda activate "${CONDA_ENV_NAME}"
conda install -y six
pushd benchmark
RESULTS=($(find ${PWD}/../benchmark-output -name "metrics-*.json" -maxdepth 2 | sort -r))
echo "Uploading result jsons: ${RESULTS}"
Expand Down
2 changes: 1 addition & 1 deletion userbenchmark/release-test/configs/2.1.1.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,6 @@ cuda:
magma_version: magma-cuda121
pytorch:
- version: 2.1.1
conda_channel: pytorch-test
conda_channel: pytorch
- version: 2.1.0
conda_channel: pytorch
8 changes: 8 additions & 0 deletions userbenchmark/release-test/configs/2.1.2.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
cuda:
- version: 12.1
magma_version: magma-cuda121
pytorch:
- version: 2.1.2
conda_channel: pytorch-test
- version: 2.1.1
conda_channel: pytorch
5 changes: 5 additions & 0 deletions userbenchmark/release-test/run_release_test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,11 @@ if [[ ${PLATFORM_NAME} == "aws_t4_metal" ]]; then
fi

. switch-cuda.sh "${CUDA_VERSION}"

if [[ ${CUDA_VERSION} == "12.1" ]]; then
pip install nvidia-cuda-nvcc-cu12
fi

nvcc --version
sudo apt-get install bc
# run mnist
Expand Down
10 changes: 8 additions & 2 deletions userbenchmark/release-test/setup_env.sh
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,19 @@ conda update --all -y

sudo ln -sf /usr/local/cuda-${CUDA_VERSION} /usr/local/cuda
conda uninstall -y pytorch torchvision pytorch-cuda
conda uninstall -y pytorch torchvision cudatoolkit
conda uninstall -y pytorch torchvision
# make sure we have a clean environment without pytorch
pip uninstall -y torch torchvision

# install magma
conda install -y -c pytorch ${MAGMA_VERSION}
conda install --force-reinstall -v -y pytorch=${PYTORCH_VERSION} torchvision pytorch-cuda=${CUDA_VERSION} -c ${PYTORCH_CHANNEL} -c nvidia

# install pip version of pytorch and torchvision
if [[ ${PYTORCH_CHANNEL} == "pytorch-test" ]]; then
pip3 install torch torchvision --index-url https://download.pytorch.org/whl/test/cu121
else
pip3 install torch torchvision
fi

python -c 'import torch; print(torch.__version__); print(torch.version.git_version)'

Expand Down
7 changes: 7 additions & 0 deletions utils/cuda_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,12 @@ def prepare_cuda_env(cuda_version: str, dryrun=False):
env["LD_LIBRARY_PATH"] = f"{cuda_path_str}/lib64:{cuda_path_str}/extras/CUPTI/lib64:{env['LD_LIBRARY_PATH']}"
if dryrun:
print(f"CUDA_HOME is set to {env['CUDA_HOME']}")

# step 1.5: install the correct nvcc version
if cuda_version == "12.1":
install_nvcc = ["pip", "install", "nvidia-cuda-nvcc-cu12"]
subprocess.check_call(install_nvcc, env=env)

# step 2: test call to nvcc to confirm the version is correct
test_nvcc = ["nvcc", "--version"]
if dryrun:
Expand All @@ -53,6 +59,7 @@ def prepare_cuda_env(cuda_version: str, dryrun=False):
output = subprocess.check_output(test_nvcc, stderr=subprocess.STDOUT, env=env).decode()
print(f"NVCC version output: {output}")
assert _nvcc_output_match(output, cuda_version), f"Expected CUDA version {cuda_version}, getting nvcc test result {output}"

# step 3: install the correct magma version
install_magma_cmd = ["conda", "install", "-y", "-c", "pytorch", CUDA_VERSION_MAP[cuda_version]['magma_version']]
if dryrun:
Expand Down

0 comments on commit 70422e6

Please sign in to comment.