diff --git a/intro-HPC/examples/Compiling-and-testing-your-software-on-the-HPC/mpihello.pbs b/intro-HPC/examples/Compiling-and-testing-your-software-on-the-HPC/mpihello.pbs index 9d4783308e9..47aeea0c757 100644 --- a/intro-HPC/examples/Compiling-and-testing-your-software-on-the-HPC/mpihello.pbs +++ b/intro-HPC/examples/Compiling-and-testing-your-software-on-the-HPC/mpihello.pbs @@ -12,6 +12,6 @@ cd $PBS_O_WORKDIR # load the environment module purge -module load intel +module load foss mpirun ./mpihello diff --git a/intro-HPC/examples/HPC-UGent-GPU-clusters/TensorFlow_GPU.sh b/intro-HPC/examples/HPC-UGent-GPU-clusters/TensorFlow_GPU.sh index ff4e2aa558c..52c8be06ba0 100644 --- a/intro-HPC/examples/HPC-UGent-GPU-clusters/TensorFlow_GPU.sh +++ b/intro-HPC/examples/HPC-UGent-GPU-clusters/TensorFlow_GPU.sh @@ -2,7 +2,7 @@ #PBS -l walltime=5:0:0 #PBS -l nodes=1:ppn=quarter:gpus=1 -module load TensorFlow/2.6.0-foss-2021a-CUDA-11.3.1 +module load TensorFlow/2.11.0-foss-2022a-CUDA-11.7.0 cd $PBS_O_WORKDIR python example.py diff --git a/intro-HPC/examples/Job-script-examples/multi_core.sh b/intro-HPC/examples/Job-script-examples/multi_core.sh index ce7426bff03..3a38689de0b 100644 --- a/intro-HPC/examples/Job-script-examples/multi_core.sh +++ b/intro-HPC/examples/Job-script-examples/multi_core.sh @@ -2,7 +2,7 @@ #PBS -N mpi_hello ## job name #PBS -l nodes=2:ppn=all ## 2 nodes, all cores per node #PBS -l walltime=2:00:00 ## max. 2h of wall time -module load intel/2017b +module load foss/2023a module load vsc-mympirun ## We don't use a version here, this is on purpose # go to working directory, compile and run MPI hello world cd $PBS_O_WORKDIR diff --git a/intro-HPC/examples/Job-script-examples/single_core.sh b/intro-HPC/examples/Job-script-examples/single_core.sh index f6ed99259e9..a406b09931d 100644 --- a/intro-HPC/examples/Job-script-examples/single_core.sh +++ b/intro-HPC/examples/Job-script-examples/single_core.sh @@ -2,7 +2,7 @@ #PBS -N count_example ## job name #PBS -l nodes=1:ppn=1 ## single-node job, single core #PBS -l walltime=2:00:00 ## max. 2h of wall time -module load Python/3.6.4-intel-2018a +module load Python/3.11.3-GCCcore-12.3.0 # copy input data from location where job was submitted from cp $PBS_O_WORKDIR/input.txt $TMPDIR # go to temporary working directory (on local disk) & run diff --git a/intro-HPC/examples/MATLAB/jobscript.sh b/intro-HPC/examples/MATLAB/jobscript.sh index 3a785317b70..849289c329a 100644 --- a/intro-HPC/examples/MATLAB/jobscript.sh +++ b/intro-HPC/examples/MATLAB/jobscript.sh @@ -7,7 +7,7 @@ # # make sure the MATLAB version matches with the one used to compile the MATLAB program! -module load MATLAB/2018a +module load MATLAB/2022b-r5 # use temporary directory (not $HOME) for (mostly useless) MATLAB log files # subdir in $TMPDIR (if defined, or /tmp otherwise) diff --git a/intro-HPC/examples/Multi-core-jobs-Parallel-Computing/mpi_hello.pbs b/intro-HPC/examples/Multi-core-jobs-Parallel-Computing/mpi_hello.pbs index 7101571d64d..ffb42b14fa7 100644 --- a/intro-HPC/examples/Multi-core-jobs-Parallel-Computing/mpi_hello.pbs +++ b/intro-HPC/examples/Multi-core-jobs-Parallel-Computing/mpi_hello.pbs @@ -11,6 +11,6 @@ cd $PBS_O_WORKDIR # load the environment -module load intel +module load foss mpirun ./mpi_hello diff --git a/intro-HPC/examples/OpenFOAM/OpenFOAM_damBreak.sh b/intro-HPC/examples/OpenFOAM/OpenFOAM_damBreak.sh index dab57a4cd33..ca1991e1a1e 100644 --- a/intro-HPC/examples/OpenFOAM/OpenFOAM_damBreak.sh +++ b/intro-HPC/examples/OpenFOAM/OpenFOAM_damBreak.sh @@ -2,7 +2,7 @@ #PBS -l walltime=1:0:0 #PBS -l nodes=1:ppn=4 # check for more recent OpenFOAM modules with 'module avail OpenFOAM' -module load OpenFOAM/6-intel-2018a +module load OpenFOAM/11-foss-2023a source $FOAM_BASH # purposely not specifying a particular version to use most recent mympirun module load vsc-mympirun @@ -15,7 +15,7 @@ export MYMPIRUN_VARIABLESPREFIX=WM_PROJECT,FOAM,MPI export WORKDIR=$VSC_SCRATCH_NODE/$PBS_JOBID # for single-node jobs mkdir -p $WORKDIR # damBreak tutorial, see also https://cfd.direct/openfoam/user-guide/dambreak -cp -r $FOAM_TUTORIALS/multiphase/interFoam/laminar/damBreak/damBreak $WORKDIR +cp -r $FOAM_TUTORIALS/incompressibleVoF/damBreakLaminar/damBreak $WORKDIR cd $WORKDIR/damBreak echo "working directory: $PWD" # pre-processing: generate mesh diff --git a/intro-HPC/examples/Program-examples/04_MPI_C/mpihello.pbs b/intro-HPC/examples/Program-examples/04_MPI_C/mpihello.pbs index c31d6002c5b..86e8e49271c 100644 --- a/intro-HPC/examples/Program-examples/04_MPI_C/mpihello.pbs +++ b/intro-HPC/examples/Program-examples/04_MPI_C/mpihello.pbs @@ -13,6 +13,6 @@ cd $PBS_O_WORKDIR # load the environment module purge -module load intel +module load foss mpirun ./mpihello