... | ... | @@ -385,6 +385,9 @@ make clean |
|
|
make -f Makefile TARGET=MPI1
|
|
|
```
|
|
|
|
|
|
Please notice in the Makefile:
|
|
|
**CC=mpicc**
|
|
|
|
|
|
```
|
|
|
ldd ./IMB-MPI1
|
|
|
linux-vdso.so.1 (0x00007ffc29792000)
|
... | ... | @@ -417,30 +420,41 @@ ldd ./IMB-MPI1 |
|
|
|
|
|
```
|
|
|
#!/bin/bash
|
|
|
## wget https://github.com/intel/mpi-benchmarks/archive/v2018.1.tar.gz
|
|
|
## https://hpc.oit.uci.edu/software
|
|
|
#SBATCH -J IMB
|
|
|
#SBATCH -N 2
|
|
|
# SBATCH --ntasks-per-node=16
|
|
|
# SBATCH --ntasks-per-node=6
|
|
|
# SBATCH -n 32
|
|
|
# $SBATCH --exclusive
|
|
|
#SBATCH --time=00:30:00
|
|
|
#SBATCH --error=job.%J.err
|
|
|
#SBATCH --output=job.%J.out
|
|
|
#SBATCH --mail-type=BEGIN,END,FAIL
|
|
|
#SBATCH --mail-user=user@lumc.nl
|
|
|
|
|
|
# Clear the environment from any previously loaded modules
|
|
|
module purge > /dev/null 2>&1
|
|
|
|
|
|
# Load the module environment suitable for the job
|
|
|
module load mpi/openmpi/3.1.5/gcc-8.2.1
|
|
|
# module load mpi/openmpi/3.1.5/gcc-8.2.1
|
|
|
module load mpi/openmpi/4.0.2/gcc-8.2.1
|
|
|
###module load pmi/openpmix/3.1.4/gcc-8.2.1
|
|
|
|
|
|
echo "Starting at `date`"
|
|
|
echo "Running on hosts: $SLURM_NODELIST"
|
|
|
echo "Running on $SLURM_NNODES nodes."
|
|
|
|
|
|
echo "Running on hosts: $SLURM_JOB_NODELIST"
|
|
|
echo "Running on $SLURM_JOB_NUM_NODES nodes."
|
|
|
echo "Running $SLURM_NTASKS tasks."
|
|
|
echo "Account: $SLURM_JOB_ACCOUNT"
|
|
|
echo "Job ID: $SLURM_JOB_ID"
|
|
|
echo "Job name: $SLURM_JOB_NAME"
|
|
|
echo "Node running script: $SLURMD_NODENAME"
|
|
|
echo "Submit host: $SLURM_SUBMIT_HOST"
|
|
|
|
|
|
echo "Current working directory is `pwd`"
|
|
|
|
|
|
##mpirun ./IMB-MPI1 SendRecv
|
|
|
##mpirun ./IMB-MPI1 PingPong
|
|
|
mpirun ./IMB-MPI1
|
|
|
echo "Program finished with exit code $? at: `date`"
|
|
|
```
|
|
|
|
|
|
## More Slurm info |
|
|
\ No newline at end of file |