Commit 1b859709 authored by Gaurav Kukreja's avatar Gaurav Kukreja

Batch Scripts

 * assignment 3 scripts are super awesome! - Gaurav
Signed-off-by: 's avatarGaurav Kukreja <mailme.gaurav@gmail.com>
parent 490b19b0
#!/bin/bash
#SBATCH -o /home/cluster/h039v/h039val/workspace/batch/heat.$JOB_ID.out
#SBATCH -D /home/cluster/h039v/h039val
#SBATCH -J heat-parallel
#SBATCH --clusters=ice1
#SBATCH --get-user-env
#SBATCH --ntasks=8
#SBATCH --mail-type=end
#SBATCH --mail-user=gaurav.kukreja@tum.de
#SBATCH --export=NONE
#SBATCH --time=08:00:00
source /etc/profile.d/modules.sh
srun_ps /home/cluster/h039v/h039val/workspace/parallel-tars/run-parallel-tests-ice.sh > /home/cluster/h039v/h039val/workspace/batch/out.$JOB_ID.out
#!/bin/bash
#SBATCH -o /home/cluster/h039v/h039val/workspace/batch/heat.$JOB_ID.out
#SBATCH -D /home/cluster/h039v/h039val
#SBATCH -J heat-parallel
#SBATCH --clusters=mpp1
#SBATCH --get-user-env
#SBATCH --ntasks=16
#SBATCH --mail-type=end
#SBATCH --mail-user=gaurav.kukreja@tum.de
#SBATCH --export=NONE
#SBATCH --time=00:50:00
source /etc/profile.d/modules.sh
srun_ps /home/cluster/h039v/h039val/workspace/parallel-tars/run-parallel-tests-mpp.sh
#!/bin/bash
#SBATCH -o /home/cluster/h039v/h039val/workspace/batch/myjob.%j.%N.out
#SBATCH -D /home/cluster/h039v/h039val
#SBATCH -J heat-parallel
#SBATCH --clusters=uv3
#SBATCH --get-user-env
#SBATCH --ntasks=64
#SBATCH --mail-type=end
#SBATCH --mail-user=gaurav.kukreja@tum.de
#SBATCH --export=NONE
#SBATCH --time=08:00:00
source /etc/profile.d/modules.sh
srun_ps /home/cluster/h039v/h039val/workspace/parallel-tars/run-parallel-tests-uv.sh > OOPS.err 2> OOPS_err.err
#!/bin/bash
#SBATCH -o $HOME/workspace/batch/heat.$JOB_ID.out
#SBATCH -D /home/cluster/h039v/h039val
#SBATCH -J heat-parallel
#SBATCH --clusters=uv3
#SBATCH --get-user-env
#SBATCH --ntasks=64
#SBATCH --mail-type=end
#SBATCH --mail-user=gaurav.kukreja@tum.de
#SBATCH --export=NONE
#SBATCH --time=08:00:00
source /etc/profile.d/modules.sh
srun_ps /home/cluster/h039v/h039val/workspace/parallel-tars/run-parallel-tests-uv.sh
#!/bin/bash
#SBATCH -o /home/cluster/h039v/h039val/assign3/batch_result/heat.$JOB_ID.out
#SBATCH -D /home/cluster/h039v/h039val
#SBATCH -J mpi-ice
#SBATCH --clusters=ice1
#SBATCH --get-user-env
#SBATCH --ntasks=16
#SBATCH --mail-type=end
#SBATCH --mail-user=gaurav.kukreja@tum.de
#SBATCH --export=NONE
#SBATCH --time=08:00:00
source /etc/profile.d/modules.sh
srun_ps /home/cluster/h039v/h039val/assign3/supercomputer/batch_scripts/run-mpi-test.sh ice > /home/cluster/h039v/h039val/assign3/tmp/out_ice_script.$JOB_ID.out
#!/bin/bash
#SBATCH -o /home/cluster/h039v/h039val/assign3/batch_result/heat.$JOB_ID.out
#SBATCH -D /home/cluster/h039v/h039val
#SBATCH -J mpi-mpp
#SBATCH --clusters=mpp1
#SBATCH --get-user-env
#SBATCH --ntasks=32
#SBATCH --mail-type=end
#SBATCH --mail-user=gaurav.kukreja@tum.de
#SBATCH --export=NONE
#SBATCH --time=08:00:00
source /etc/profile.d/modules.sh
srun_ps /home/cluster/h039v/h039val/assign3/supercomputer/batch_scripts/run-mpi-test.sh mpp > /home/cluster/h039v/h039val/assign3/tmp/out_mpp_script.$JOB_ID.out
#!/bin/bash
#SBATCH -o /home/cluster/h039v/h039val/assign3/batch_result/heat.$JOB_ID.out
#SBATCH -D /home/cluster/h039v/h039val
#SBATCH -J mpi-uv
#SBATCH --clusters=uv3
#SBATCH --get-user-env
#SBATCH --ntasks=128
#SBATCH --mail-type=end
#SBATCH --mail-user=gaurav.kukreja@tum.de
#SBATCH --export=NONE
#SBATCH --time=08:00:00
source /etc/profile.d/modules.sh
srun_ps /home/cluster/h039v/h039val/assign3/supercomputer/batch_scripts/run-mpi-test.sh uv > /home/cluster/h039v/h039val/assign3/tmp/out_uv_script.$JOB_ID.out
#!/bin/sh
#if [ "$1"=="" ]
#then
# echo "Invalid usage: Specify ice or mpp or uv."
# exit 1
#fi
MACHINE=$1
#path to assignment 3 source code
PATH_SOURCE="/home/cluster/h039v/h039val/assign3/supercomputer/mpi-assign3"
OUTPUT_PATH="/home/cluster/h039v/h039val/assign3/batch_result/"$MACHINE"-"$(date +%T)
if [ "$MACHINE" == "ice" ]
then
EXEC_CMD="mpirun -np"
num_of_proc=("4" "8" "16" "32" "64")
elif [ "$MACHINE" == "mpp" ]
then
EXEC_CMD="mpiexec -n"
num_of_proc=("4" "8" "16" "32" "64")
elif [ "$MACHINE" == "uv" ]
then
EXEC_CMD="mpirun -np"
num_of_proc=("4" "8" "16" "32" "64" "128")
fi
echo $EXEC_CMD
cd $PATH_SOURCE
rm -rf $OUTPUT_PATH
mkdir $OUTPUT_PATH
make clean
make > $OUTPUT_PATH"/make-result-mpp.out"
for i in ${num_of_proc[@]}
do
$EXEC_CMD $i ./reduction > $OUTPUT_PATH"/reduction_"$i".out"
done
for i in ${num_of_proc[@]}
do
$EXEC_CMD $i ./ping-pong $OUTPUT_PATH"/ping_pong_"$i".out"
done
for i in {num_of_proc[@]}
do
$EXEC_CMD $i ./ping-pong-bandwidth $OUTPUT_PATH"/bandwidth_"$i".out"
done
exit
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment