Update home authored by van Vliet's avatar van Vliet
...@@ -234,12 +234,14 @@ The setup is already described with the Linux client. ...@@ -234,12 +234,14 @@ The setup is already described with the Linux client.
module av module av
----------------------------------------------------------------------------------------------- /share/modulefiles ------------------------------------------------------------------------------------------------ ----------------------------------------------------------------------------------------------- /share/modulefiles ------------------------------------------------------------------------------------------------
BRAINimaging/fsl/fsleyes BRAINimaging/fsl/5.0.9 BRAINimaging/fsl/6.0.3 (D) library/cuda/9.1 mpi/mpich/3.3.2/gcc-8.2.1 singularity/3.4.2 MATLAB/R2018b NeuroImaging/fsl/5.0.9 Pharmaceutical/pirana/2.9.8 (D) library/cuda/10.2 (D) singularity/3.4.2
BRAINimaging/fsl/fslview BRAINimaging/fsl/5.0.10 MATLAB/R2018b library/cuda/9.2 mpi/openmpi/3.1.5/gcc-8.2.1 singularity/3.5.1 (D) MATLAB/R2019b (D) NeuroImaging/fsl/5.0.10 library/cuda/7.5 library/openblas/0.3.7/gcc-8.2.1 singularity/3.5.1 (D)
BRAINimaging/fsl/5.0.4 BRAINimaging/fsl/5.0.11 MATLAB/R2019b (D) library/cuda/10.0 mpi/openmpi/4.0.2/gcc-8.2.1 system/hwloc/1.11.13/gcc-8.2.1 NeuroImaging/freesurfer/stable-pub-v6.0.0 NeuroImaging/fsl/5.0.11 library/cuda/8.0 mpi/mpich/3.3.2/gcc-8.2.1 system/hwloc/1.11.13/gcc-8.2.1
BRAINimaging/fsl/5.0.6 BRAINimaging/fsl/6.0.0 library/cuda/7.5 library/cuda/10.1 pmi/openpmix/2.2.3/gcc-8.2.1 system/hwloc/2.1.0/gcc-8.2.1 NeuroImaging/fsl/fsleyes NeuroImaging/fsl/6.0.0 library/cuda/9.0 mpi/openmpi/3.1.5/gcc-8.2.1 system/hwloc/2.1.0/gcc-8.2.1
BRAINimaging/fsl/5.0.7 BRAINimaging/fsl/6.0.1 library/cuda/8.0 library/cuda/10.2 (D) pmi/openpmix/3.1.4/gcc-8.2.1 system/knem/1.1.3/gcc-8.2.1 NeuroImaging/fsl/5.0.4 NeuroImaging/fsl/6.0.1 library/cuda/9.1 mpi/openmpi/4.0.2/gcc-8.2.1 (L) system/knem/1.1.3/gcc-8.2.1
BRAINimaging/fsl/5.0.8 BRAINimaging/fsl/6.0.2 library/cuda/9.0 library/openblas/0.3.7/gcc-8.2.1 pmi/openpmix/4.0.0/gcc-8.2.1 NeuroImaging/fsl/5.0.6 NeuroImaging/fsl/6.0.2 library/cuda/9.2 pmi/openpmix/2.2.3/gcc-8.2.1
NeuroImaging/fsl/5.0.7 NeuroImaging/fsl/6.0.3 (D) library/cuda/10.0 pmi/openpmix/3.1.4/gcc-8.2.1
NeuroImaging/fsl/5.0.8 Pharmaceutical/pirana/2.9.7 library/cuda/10.1 pmi/openpmix/4.0.0/gcc-8.2.1
---------------------------------------------------------------------------------------- /share/software/easybuild/modules ---------------------------------------------------------------------------------------- ---------------------------------------------------------------------------------------- /share/software/easybuild/modules ----------------------------------------------------------------------------------------
all/EasyBuild/4.0.1 tools/EasyBuild/4.0.1 all/EasyBuild/4.0.1 tools/EasyBuild/4.0.1
...@@ -248,6 +250,7 @@ module av ...@@ -248,6 +250,7 @@ module av
lmod settarg lmod settarg
Where: Where:
L: Module is loaded
D: Default Module D: Default Module
Use "module spider" to find all possible modules and extensions. Use "module spider" to find all possible modules and extensions.
...@@ -279,7 +282,7 @@ module purge ...@@ -279,7 +282,7 @@ module purge
``` ```
## Compiling programs ## Compiling programs
- vi hello1.c - vi hello.c
``` ```
#include <stdio.h> #include <stdio.h>
...@@ -297,7 +300,7 @@ int main (int argc, char *argv[]) ...@@ -297,7 +300,7 @@ int main (int argc, char *argv[])
MPI_Comm_rank (MPI_COMM_WORLD, &id); MPI_Comm_rank (MPI_COMM_WORLD, &id);
MPI_Get_processor_name (processor_name, &processor_name_len); MPI_Get_processor_name (processor_name, &processor_name_len);
for (i=1;i<2;i++) for (i=1; i<2; i++)
printf ("Hello world from process %03d out of %03d, processor name %s \n", id, np, processor_name); printf ("Hello world from process %03d out of %03d, processor name %s \n", id, np, processor_name);
MPI_Finalize (); MPI_Finalize ();
...@@ -305,15 +308,15 @@ int main (int argc, char *argv[]) ...@@ -305,15 +308,15 @@ int main (int argc, char *argv[])
} }
``` ```
- mpicc hello1.c -o hello1 - mpicc hello.c -o hello
- ./hello1 - ./hello
``` ```
Hello world from process 000 out of 001, processor name res-hpc-lo01.researchlumc.nl Hello world from process 000 out of 001, processor name res-hpc-lo01.researchlumc.nl
``` ```
- mpirun ./hello1 - mpirun ./hello
``` ```
Hello world from process 003 out of 016, processor name res-hpc-lo01.researchlumc.nl Hello world from process 003 out of 016, processor name res-hpc-lo01.researchlumc.nl
...@@ -333,70 +336,13 @@ Hello world from process 008 out of 016, processor name res-hpc-lo01.researchlum ...@@ -333,70 +336,13 @@ Hello world from process 008 out of 016, processor name res-hpc-lo01.researchlum
Hello world from process 009 out of 016, processor name res-hpc-lo01.researchlumc.nl Hello world from process 009 out of 016, processor name res-hpc-lo01.researchlumc.nl
Hello world from process 014 out of 016, processor name res-hpc-lo01.researchlumc.nl Hello world from process 014 out of 016, processor name res-hpc-lo01.researchlumc.nl
``` ```
- vi hello2.c
```
#include "mpi.h"
#include "string.h"
#include <stdio.h>
int main (int argc, char *argv[])
{
int numprocs, myrank, namelen, i;
char processor_name[MPI_MAX_PROCESSOR_NAME];
char greeting[MPI_MAX_PROCESSOR_NAME + 80];
MPI_Status status;
MPI_Init (&argc, &argv);
MPI_Comm_size (MPI_COMM_WORLD, &numprocs);
MPI_Comm_rank (MPI_COMM_WORLD, &myrank);
MPI_Get_processor_name (processor_name, &namelen);
sprintf (greeting, "Hello world, from process %d of %d on %s", myrank, numprocs, processor_name);
if (myrank == 0)
{
printf ("%s\n", greeting);
for (i = 1; i < numprocs; i++ )
{
MPI_Recv (greeting, sizeof (greeting), MPI_CHAR, i, 1, MPI_COMM_WORLD, &status);
printf ("%s\n", greeting);
}
}
else
MPI_Send (greeting, strlen (greeting) + 1, MPI_CHAR, 0, 1, MPI_COMM_WORLD);
MPI_Finalize ();
return 0;
}
```
- mpicc hello2.c -o hello2
- ./hello2
```
Hello world, from process 0 of 16 on res-hpc-lo01.researchlumc.nl
Hello world, from process 1 of 16 on res-hpc-lo01.researchlumc.nl
Hello world, from process 2 of 16 on res-hpc-lo01.researchlumc.nl
Hello world, from process 3 of 16 on res-hpc-lo01.researchlumc.nl
Hello world, from process 4 of 16 on res-hpc-lo01.researchlumc.nl
Hello world, from process 5 of 16 on res-hpc-lo01.researchlumc.nl
Hello world, from process 6 of 16 on res-hpc-lo01.researchlumc.nl
Hello world, from process 7 of 16 on res-hpc-lo01.researchlumc.nl
Hello world, from process 8 of 16 on res-hpc-lo01.researchlumc.nl
Hello world, from process 9 of 16 on res-hpc-lo01.researchlumc.nl
Hello world, from process 10 of 16 on res-hpc-lo01.researchlumc.nl
Hello world, from process 11 of 16 on res-hpc-lo01.researchlumc.nl
Hello world, from process 12 of 16 on res-hpc-lo01.researchlumc.nl
Hello world, from process 13 of 16 on res-hpc-lo01.researchlumc.nl
Hello world, from process 14 of 16 on res-hpc-lo01.researchlumc.nl
Hello world, from process 15 of 16 on res-hpc-lo01.researchlumc.nl
```
## Submitting jobs ## Submitting jobs
``` ```
module purge
module add mpi/openmpi/4.0.2/gcc-8.2.1
git clone https://github.com/intel/mpi-benchmarks git clone https://github.com/intel/mpi-benchmarks
cd mpi-benchmarks cd mpi-benchmarks
make clean make clean
... ...
......