Skip to content

Commit

Permalink
Merge pull request #103 from mkandes/tscc-0.17.3
Browse files Browse the repository at this point in the history
Merge tscc-0.17.3 back into sdsc-0.17.3
  • Loading branch information
mkandes authored Sep 12, 2023
2 parents 33591e7 + a6be822 commit 66fd96b
Show file tree
Hide file tree
Showing 1,300 changed files with 2,807,719 additions and 71 deletions.
33 changes: 33 additions & 0 deletions etc/spack/licenses/arm-forge/Licence
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
#
# A license is required to use package 'arm-forge'.
#
# * If your system is already properly configured for such a license, save this
# file UNCHANGED. The system may be configured if:
#
# - A license file is installed in a default location.
# - One of the following environment variable(s) is set for you, possibly via
# a module file:
#
# ALLINEA_LICENSE_DIR
# ALLINEA_LICENCE_DIR
# ALLINEA_LICENSE_FILE
# ALLINEA_LICENCE_FILE
#
# * Otherwise, depending on the license you have, enter AT THE BEGINNING of
# this file:
#
# - the contents of your license file, or
# - the address(es) of your license server.
#
# After installation, the following symlink(s) will be added to point to
# this Spack-global file (relative to the installation prefix).
#
# licences/Licence
#
# * For further information on licensing, see:
#
# https://developer.arm.com/tools-and-software/server-and-hpc/help/help-and-tutorials/system-administration/licensing/arm-licence-server
#
# Recap:
# - You may not need to modify this file at all.
# - Otherwise, enter your license or server address AT THE BEGINNING.
6 changes: 6 additions & 0 deletions etc/spack/licenses/arm-forge/License.ddt
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
type=2
serial_number=4164
hostname=elprado.sdsc.edu
serverport=4241
features=ddt,cuda
hash2=6420174f4fcf497c9c2a62b41ae9af46d560651c
6 changes: 6 additions & 0 deletions etc/spack/licenses/arm-forge/License.map
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
type=2
serial_number=9550
hostname=elprado.sdsc.edu
serverport=4241
features=map
hash2=44e7229f4c77224b8b8c9748601e08a3565aa011
27 changes: 27 additions & 0 deletions etc/spack/licenses/gurobi/gurobi.lic
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# DO NOT EDIT THIS FILE except as noted
#
# License ID 928915
#
# Place this file in your home directory that takes precedence
# or in one of the following shared locations:
# * C:\gurobi\ on Windows
# * /opt/gurobi/ on Linux
# * /Library/gurobi/ on Mac OS X
# Or set the environment variable GRB_LICENSE_FILE to point to this file,
# it will override the default locations
#
TYPE=TOKEN
VERSION=10
TOKENSERVER=elprado
HOSTNAME=elprado
HOSTID=56b56819
CORES=8
USERNAME=jpg,fkucuksayacigil,zhz121,b3yao
EXPIRATION=2024-02-05
USELIMIT=4096
DISTRIBUTED=100
KEY=MCSI9EV6
CKEY=6H6O94N2
# Uncomment and edit the following lines as desired:
# PORT=41954
# PASSWORD=YourPrivatePassword
30 changes: 30 additions & 0 deletions etc/spack/licenses/gurobi/network.lic
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
#
# A license is required to use package 'gurobi'.
#
# * If your system is already properly configured for such a license, save this
# file UNCHANGED. The system may be configured if:
#
# - A license file is installed in a default location.
# - One of the following environment variable(s) is set for you, possibly via
# a module file:
#
# LM_LICENSE_FILE
#
# * Otherwise, depending on the license you have, enter AT THE BEGINNING of
# this file:
#
# - the contents of your license file, or
# - the address(es) of your license server.
#
# After installation, the following symlink(s) will be added to point to
# this Spack-global file (relative to the installation prefix).
#
# licenses/network.lic
#
# * For further information on licensing, see:
#
# https://www.gurobi.com/academia/academic-program-and-licenses
#
# Recap:
# - You may not need to modify this file at all.
# - Otherwise, enter your license or server address AT THE BEGINNING.
2 changes: 2 additions & 0 deletions etc/spack/licenses/intel/license.lic
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
SERVER elprado.sdsc.edu 005056B56819 40100
USE_SERVER
2 changes: 0 additions & 2 deletions etc/spack/repos.yaml

This file was deleted.

66 changes: 66 additions & 0 deletions etc/spack/sdsc/tscc/0.17.3/cpu/specs/anaconda3@2021.05.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
#!/usr/bin/env bash

#SBATCH --job-name=anaconda3@2021.05
#SBATCH --account=sys200
#SBATCH --partition=hotel
#SBATCH --nodes=1
#SBATCH --ntasks-per-node=1
#SBATCH --cpus-per-task=8
#SBATCH --time=02:00:00
#SBATCH --output=%x.o%j.%N

declare -xr LOCAL_TIME="$(date +'%Y%m%dT%H%M%S%z')"
declare -xir UNIX_TIME="$(date +'%s')"

declare -xr SYSTEM_NAME='tscc'

declare -xr SPACK_VERSION='0.17.3'
declare -xr SPACK_INSTANCE_NAME='cpu'
declare -xr SPACK_INSTANCE_DIR="/cm/shared/apps/spack/${SPACK_VERSION}/${SPACK_INSTANCE_NAME}"

declare -xr SLURM_JOB_SCRIPT="$(scontrol show job ${SLURM_JOB_ID} | awk -F= '/Command=/{print $2}')"
declare -xr SLURM_JOB_MD5SUM="$(md5sum ${SLURM_JOB_SCRIPT})"

declare -xr SCHEDULER_MODULE='slurm'

echo "${UNIX_TIME} ${SLURM_JOB_ID} ${SLURM_JOB_MD5SUM} ${SLURM_JOB_DEPENDENCY}"
echo ""

cat "${SLURM_JOB_SCRIPT}"

module purge
module load "${SCHEDULER_MODULE}"
module list
. "${SPACK_INSTANCE_DIR}/share/spack/setup-env.sh"

declare -xr SPACK_PACKAGE='anaconda3@2021.05'
declare -xr SPACK_COMPILER='gcc@11.2.0'
declare -xr SPACK_VARIANTS=''
declare -xr SPACK_DEPENDENCIES=''
declare -xr SPACK_SPEC="${SPACK_PACKAGE} % ${SPACK_COMPILER} ${SPACK_VARIANTS} ${SPACK_DEPENDENCIES}"

printenv

spack config get compilers
spack config get config
spack config get mirrors
spack config get modules
spack config get packages
spack config get repos
spack config get upstreams

spack spec --long --namespaces --types "${SPACK_SPEC}"
if [[ "${?}" -ne 0 ]]; then
echo 'ERROR: spack concretization failed.'
exit 1
fi

time -p spack install --jobs "${SLURM_CPUS_PER_TASK}" --fail-fast --yes-to-all "${SPACK_SPEC}"
if [[ "${?}" -ne 0 ]]; then
echo 'ERROR: spack install failed.'
exit 1
fi

spack module lmod refresh --delete-tree -y

sbatch --dependency="afterok:${SLURM_JOB_ID}" 'spark@3.2.1.sh'
68 changes: 68 additions & 0 deletions etc/spack/sdsc/tscc/0.17.3/cpu/specs/aria2@1.35.0.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
#!/usr/bin/env bash

#SBATCH --job-name=aria2@1.35.0
#SBATCH --account=sys200
#SBATCH --partition=hotel
#SBATCH --nodes=1
#SBATCH --ntasks-per-node=1
#SBATCH --cpus-per-task=8
#SBATCH --time=00:30:00
#SBATCH --output=%x.o%j.%N

declare -xr LOCAL_TIME="$(date +'%Y%m%dT%H%M%S%z')"
declare -xir UNIX_TIME="$(date +'%s')"

declare -xr SYSTEM_NAME='tscc'

declare -xr SPACK_VERSION='0.17.3'
declare -xr SPACK_INSTANCE_NAME='cpu'
declare -xr SPACK_INSTANCE_DIR="/cm/shared/apps/spack/${SPACK_VERSION}/${SPACK_INSTANCE_NAME}"

declare -xr SLURM_JOB_SCRIPT="$(scontrol show job ${SLURM_JOB_ID} | awk -F= '/Command=/{print $2}')"
declare -xr SLURM_JOB_MD5SUM="$(md5sum ${SLURM_JOB_SCRIPT})"

declare -xr SCHEDULER_MODULE='slurm'

echo "${UNIX_TIME} ${SLURM_JOB_ID} ${SLURM_JOB_MD5SUM} ${SLURM_JOB_DEPENDENCY}"
echo ""

cat "${SLURM_JOB_SCRIPT}"

module purge
module load "${SCHEDULER_MODULE}"
module list
. "${SPACK_INSTANCE_DIR}/share/spack/setup-env.sh"

declare -xr SPACK_PACKAGE='aria2@1.35.0'
declare -xr SPACK_COMPILER='gcc@11.2.0'
declare -xr SPACK_VARIANTS=''
declare -xr SPACK_DEPENDENCIES=''
declare -xr SPACK_SPEC="${SPACK_PACKAGE} % ${SPACK_COMPILER} ${SPACK_VARIANTS} ${SPACK_DEPENDENCIES}"

printenv

spack config get compilers
spack config get config
spack config get mirrors
spack config get modules
spack config get packages
spack config get repos
spack config get upstreams

spack spec --long --namespaces --types "${SPACK_SPEC}"
if [[ "${?}" -ne 0 ]]; then
echo 'ERROR: spack concretization failed.'
exit 1
fi

time -p spack install --jobs "${SLURM_CPUS_PER_TASK}" --fail-fast --yes-to-all "${SPACK_SPEC}"
if [[ "${?}" -ne 0 ]]; then
echo 'ERROR: spack install failed.'
exit 1
fi

spack module lmod refresh --delete-tree -y

sbatch --dependency="afterok:${SLURM_JOB_ID}" 'rclone@1.56.2.sh'

sleep 20
69 changes: 69 additions & 0 deletions etc/spack/sdsc/tscc/0.17.3/cpu/specs/arm-forge@21.1.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
#!/usr/bin/env bash

#SBATCH --job-name=arm-forge@21.1
#SBATCH --account=sys200
#SBATCH --partition=hotel
#SBATCH --nodes=1
#SBATCH --exclude=gpu1
#SBATCH --ntasks-per-node=1
#SBATCH --cpus-per-task=8
#SBATCH --time=00:30:00
#SBATCH --output=%x.o%j.%N

declare -xr LOCAL_TIME="$(date +'%Y%m%dT%H%M%S%z')"
declare -xir UNIX_TIME="$(date +'%s')"

declare -xr SYSTEM_NAME='tscc'

declare -xr SPACK_VERSION='0.17.3'
declare -xr SPACK_INSTANCE_NAME='cpu'
declare -xr SPACK_INSTANCE_DIR="/cm/shared/apps/spack/${SPACK_VERSION}/${SPACK_INSTANCE_NAME}"

declare -xr SLURM_JOB_SCRIPT="$(scontrol show job ${SLURM_JOB_ID} | awk -F= '/Command=/{print $2}')"
declare -xr SLURM_JOB_MD5SUM="$(md5sum ${SLURM_JOB_SCRIPT})"

declare -xr SCHEDULER_MODULE='slurm'

echo "${UNIX_TIME} ${SLURM_JOB_ID} ${SLURM_JOB_MD5SUM} ${SLURM_JOB_DEPENDENCY}"
echo ""

cat "${SLURM_JOB_SCRIPT}"

module purge
module load "${SCHEDULER_MODULE}"
module list
. "${SPACK_INSTANCE_DIR}/share/spack/setup-env.sh"

declare -xr SPACK_PACKAGE='arm-forge@21.1'
declare -xr SPACK_COMPILER='gcc@11.2.0'
declare -xr SPACK_VARIANTS=''
declare -xr SPACK_DEPENDENCIES=''
declare -xr SPACK_SPEC="${SPACK_PACKAGE} % ${SPACK_COMPILER} ${SPACK_VARIANTS} ${SPACK_DEPENDENCIES}"

printenv

spack config get compilers
spack config get config
spack config get mirrors
spack config get modules
spack config get packages
spack config get repos
spack config get upstreams

spack spec --long --namespaces --types "${SPACK_SPEC}"
if [[ "${?}" -ne 0 ]]; then
echo 'ERROR: spack concretization failed.'
exit 1
fi

time -p spack install --jobs "${SLURM_CPUS_PER_TASK}" --fail-fast --yes-to-all "${SPACK_SPEC}"
if [[ "${?}" -ne 0 ]]; then
echo 'ERROR: spack install failed.'
exit 1
fi

spack module lmod refresh --delete-tree -y

#sbatch --dependency="afterok:${SLURM_JOB_ID}" 'gh@2.0.0.sh'

#sleep 20
68 changes: 68 additions & 0 deletions etc/spack/sdsc/tscc/0.17.3/cpu/specs/aspera-cli@3.7.7.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
#!/usr/bin/env bash

#SBATCH --job-name=aspera-cli@3.7.7
#SBATCH --account=sys200
#SBATCH --partition=hotel
#SBATCH --nodes=1
#SBATCH --ntasks-per-node=1
#SBATCH --cpus-per-task=8
#SBATCH --time=00:30:00
#SBATCH --output=%x.o%j.%N

declare -xr LOCAL_TIME="$(date +'%Y%m%dT%H%M%S%z')"
declare -xir UNIX_TIME="$(date +'%s')"

declare -xr SYSTEM_NAME='tscc'

declare -xr SPACK_VERSION='0.17.3'
declare -xr SPACK_INSTANCE_NAME='cpu'
declare -xr SPACK_INSTANCE_DIR="/cm/shared/apps/spack/${SPACK_VERSION}/${SPACK_INSTANCE_NAME}"

declare -xr SLURM_JOB_SCRIPT="$(scontrol show job ${SLURM_JOB_ID} | awk -F= '/Command=/{print $2}')"
declare -xr SLURM_JOB_MD5SUM="$(md5sum ${SLURM_JOB_SCRIPT})"

declare -xr SCHEDULER_MODULE='slurm'

echo "${UNIX_TIME} ${SLURM_JOB_ID} ${SLURM_JOB_MD5SUM} ${SLURM_JOB_DEPENDENCY}"
echo ""

cat "${SLURM_JOB_SCRIPT}"

module purge
module load "${SCHEDULER_MODULE}"
module list
. "${SPACK_INSTANCE_DIR}/share/spack/setup-env.sh"

declare -xr SPACK_PACKAGE='aspera-cli@3.7.7'
declare -xr SPACK_COMPILER='gcc@11.2.0'
declare -xr SPACK_VARIANTS=''
declare -xr SPACK_DEPENDENCIES=''
declare -xr SPACK_SPEC="${SPACK_PACKAGE} % ${SPACK_COMPILER} ${SPACK_VARIANTS} ${SPACK_DEPENDENCIES}"

printenv

spack config get compilers
spack config get config
spack config get mirrors
spack config get modules
spack config get packages
spack config get repos
spack config get upstreams

spack spec --long --namespaces --types "${SPACK_SPEC}"
if [[ "${?}" -ne 0 ]]; then
echo 'ERROR: spack concretization failed.'
exit 1
fi

time -p spack install --jobs "${SLURM_CPUS_PER_TASK}" --fail-fast --yes-to-all "${SPACK_SPEC}"
if [[ "${?}" -ne 0 ]]; then
echo 'ERROR: spack install failed.'
exit 1
fi

spack module lmod refresh --delete-tree -y

sbatch --dependency="afterok:${SLURM_JOB_ID}" 'rclone@1.56.2.sh'

sleep 20
Loading

0 comments on commit 66fd96b

Please sign in to comment.