Telemac on HPC Systems (openTELEMAC, TELEMAC-MASCARET)
What is Telemac?
Telemac is a powerful, open-source modelling and simulation software for free-surface flows in e.g. hydrodynamics and hydrology.
LRZ doesn't support it officially. But users can build it on their own.
Getting Started
Installation
The installation can be considerably complicated by the requirements for additional software-packages and features. We restrict here on only few things like MUMPS and SALOME-MED.
From Source
INSTALL_DIR=<Please specify!!>
TELEMAC_VERSION=v9.1.0
PYTHON_VERSION=v3.14.0
METIS_VERSION=v5.2.1
SCOTCH_VERSION=v7.0.11
# =====================================================================
MAKE_PARALLEL=70
install_python() {
if [ ! -f $THIRD_PARTY_INSTALL_DIR/bin/python3 ]; then
git clone https://github.com/libffi/libffi.git
pushd libffi/
git checkout v3.5.2
./autogen.sh
./configure --prefix=$THIRD_PARTY_INSTALL_DIR
make -j $MAKE_PARALLEL
make install
popd
git clone https://github.com/python/cpython.git
pushd cpython/
git checkout $PYTHON_VERSION
./configure --prefix=$THIRD_PARTY_INSTALL_DIR --enable-optimizations
make -j $MAKE_PARALLEL
make install
popd
pushd $THIRD_PARTY_INSTALL_DIR/bin
ln -s python3 python
ln -s pip3 pip
popd
pip install numpy scipy matplotlib
fi
}
install_gklib() {
if [ ! -f $THIRD_PARTY_INSTALL_DIR/lib64/libGKlib.a ]; then
git clone https://github.com/KarypisLab/GKlib.git gklib
pushd gklib
make config cc=gcc prefix=$THIRD_PARTY_INSTALL_DIR
make -j $MAKE_PARALLEL
make install
popd
fi
}
install_metis() {
if [ ! -f $THIRD_PARTY_INSTALL_DIR/lib/libmetis.so ]; then
git clone https://github.com/KarypisLab/METIS.git metis
pushd metis
git checkout $METIS_VERSION
make config shared=1 cc=gcc prefix=$THIRD_PARTY_INSTALL_DIR gklib_path=$THIRD_PARTY_INSTALL_DIR
make install
popd
fi
}
install_parmetis() {
if [ ! -f $THIRD_PARTY_INSTALL_DIR/lib/libparmetis.so ]; then
git clone https://github.com/KarypisLab/ParMETIS.git parmetis
pushd parmetis
make config shared=1 cc=mpicc prefix=$THIRD_PARTY_INSTALL_DIR gklib_path=$THIRD_PARTY_INSTALL_DIR metis_path=$THIRD_PARTY_INSTALL_DIR
make install
popd
fi
}
install_scotch() {
if [ ! -f $THIRD_PARTY_INSTALL_DIR/lib64/libscotch.so ]; then
git clone https://gitlab.inria.fr/scotch/scotch.git scotch
pushd scotch
git checkout $SCOTCH_VERSION
cmake -DBUILD_SHARED_LIBS=ON \
-DCMAKE_Fortran_COMPILER=gfortran \
-DCMAKE_C_FLAGS="-O3 -march=x86-64-v4" \
-DCMAKE_Fortran_FLAGS="-O3 -march=x86-64-v4" \
-DCMAKE_C_COMPILER=gcc \
-DMPI_HOME=$INTEL_ONEAPI_MPI_BASE \
-DCMAKE_INSTALL_PREFIX=$THIRD_PARTY_INSTALL_DIR \
-DINTSIZE=32 -DIDXSIZE=32 \
-DTHREADS=OFF -DMPI_THREAD_MULTIPLE=OFF \
-DINSTALL_METIS_HEADERS=OFF \
-S . \
-B build
cmake --build build -j $MAKE_PARALLEL
cmake --install build
popd
fi
}
install_mumps() {
if [ ! -f $THIRD_PARTY_INSTALL_DIR/lib/libmumps_common.so ] ; then
wget https://mumps-solver.org/MUMPS_5.7.3.tar.gz
tar xf MUMPS_5.7.3.tar.gz
pushd MUMPS_5.7.3/
cat > Makefile.inc << EOT
SCOTCHDIR = $THIRD_PARTY_INSTALL_DIR
ISCOTCH = -I\$(SCOTCHDIR)/include
LSCOTCH = -L\$(SCOTCHDIR)/lib64 -lptesmumps -lptscotch -lptscotcherr -lesmumps -lscotch -lscotcherr
LPORDDIR = \$(topdir)/PORD/lib/
IPORD = -I\$(topdir)/PORD/include/
LPORD = -L\$(LPORDDIR) -lpord\$(PLAT)
LMETISDIR = \$(SCOTCHDIR)/lib
IMETIS = \$(ISCOTCH)
LMETIS = -L\$(LMETISDIR) -lparmetis -lmetis
ORDERINGSF = -Dscotch -Dmetis -Dpord -Dptscotch -Dparmetis
ORDERINGSC = \$(ORDERINGSF)
LORDERINGS = \$(LMETIS) \$(LPORD) \$(LSCOTCH)
IORDERINGSF = \$(ISCOTCH)
IORDERINGSC = \$(IMETIS) \$(IPORD) \$(ISCOTCH)
PLAT =
LIBEXT = .a
LIBEXT_SHARED = .so
SONAME = -soname
SHARED_OPT = -shared
FPIC_OPT = -fPIC
OUTC = -o
OUTF = -o
RM = /bin/rm -f
CC = mpicc
FC = mpif90
FL = mpif90
AR = ar vr
RANLIB = echo
LAPACK = -Wl,-rpath,\$(MKLROOT) $(pkg-config --libs mkl-dynamic-lp64-seq)
SCALAP = -Wl,-rpath,\$(MKLROOT) -Wl,--start-group -L\$(MKLROOT)/lib/ -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -Wl,--end-group -lpthread -lm -ldl
LIBPAR = \$(SCALAP) \$(LAPACK)
INCSEQ = -I\$(topdir)/libseq
LIBSEQ = \$(LAPACK) -L\$(topdir)/libseq -lmpiseq\$(PLAT)
LIBBLAS = -Wl,-rpath,\$(MKLROOT) $(pkg-config --libs mkl-dynamic-lp64-seq)
LIBOTHERS = -lpthread -lm -ldl
CDEFS = -DAdd_
OPTF = -O3 -march=x86-64-v4 -DGEMMT_AVAILABLE
OPTL = -O3 -march=x86-64-v4
OPTC = -O3 -march=x86-64-v4
INCS = \$(INCPAR)
LIBS = \$(LIBPAR)
LIBSEQNEEDED =
EOT
make -j $MAKE_PARALLEL allshared
cp lib/*.so $THIRD_PARTY_INSTALL_DIR/lib
cp include/*.h $THIRD_PARTY_INSTALL_DIR/include
popd
fi
}
install_hdf5() {
if [ ! -f $THIRD_PARTY_INSTALL_DIR/lib/libhdf5.so ]; then
git clone https://github.com/HDFGroup/hdf5.git
pushd hdf5
git checkout hdf5-1_12_1
./configure \
CFLAGS="-O3 -march=x86-64-v4" \
CXXFLAGS="-O3 -march=x86-64-v4" \
FCFLAGS="-O3 -march=x86-64-v4" \
CC=$(which mpicc) CXX=$(which mpicxx) FC=$(which mpif90) \
--prefix=$THIRD_PARTY_INSTALL_DIR \
--enable-parallel --enable-hl --enable-fortran
make -j $MAKE_PARALLEL
make install
popd
fi
}
install_med() {
if [ ! -f $THIRD_PARTY_INSTALL_DIR/lib/libmed.so ]; then
wget --user-agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) Chrome/120.0.0.0 Safari/537.36" https://files.salome-platform.org/Salome/medfile/med-5.0.0.tar.bz2
tar xf med-5.0.0.tar.bz2
pushd med-5.0.0
cmake -DCMAKE_INSTALL_PREFIX=$THIRD_PARTY_INSTALL_DIR \
-DHDF5_DIR=$THIRD_PARTY_INSTALL_DIR \
-DCMAKE_C_FLAGS="-O3 -march=x86-64-v4" \
-DCMAKE_Fortran_FLAGS="-O3 -march=x86-64-v4" \
-DMED_MEDINT_TYPE=int \
-S . -B build
cmake --build build -j $MAKE_PARALLEL
cmake --install build
popd
fi
}
install_telemac() {
if [ ! -f $INSTALL_DIR/$TELEMAC_VERSION/bin/telemac3d ] ; then
wget https://github.com/git-lfs/git-lfs/releases/download/v3.7.1/git-lfs-linux-amd64-v3.7.1.tar.gz
tar xf git-lfs-linux-amd64-v3.7.1.tar.gz
export PATH=$PATH:$PWD/git-lfs-3.7.1
git clone https://gitlab.pam-retd.fr/otm/telemac-mascaret.git telemac
pushd telemac/
git checkout $TELEMAC_VERSION
cmake \
-DCMAKE_INSTALL_PREFIX=$INSTALL_DIR/$TELEMAC_VERSION \
-DCMAKE_C_FLAGS="-O3 -march=x86-64-v4" \
-DCMAKE_Fortran_FLAGS="-O3 -march=x86-64-v4" \
-DUSE_MPI=ON -DUSE_MUMPS=ON \
-DMETIS_INCLUDE_DIR=$THIRD_PARTY_INSTALL_DIR/include \
-DMETIS_LIBRARY=$THIRD_PARTY_INSTALL_DIR/lib/libmetis.so \
-DMUMPS_ROOT=$THIRD_PARTY_INSTALL_DIR \
-DMUMPS_s_FOUND=ON -DMUMPS_d_FOUND=ON \
-DUSE_MED=ON \
-DMED_ROOT=$THIRD_PARTY_INSTALL_DIR \
-S . -B build
cmake --build build -j $MAKE_PARALLEL
cmake --install build
popd
fi
}
# =====================================================================
COMPILER_MODULE=gcc/14.2.0
module load $COMPILER_MODULE
module load intel-mpi
module load intel-mkl
module load cmake
THIRD_PARTY_INSTALL_DIR=$INSTALL_DIR/3rd_party
mkdir -p $THIRD_PARTY_INSTALL_DIR
export LD_LIBRARY_PATH=$THIRD_PARTY_INSTALL_DIR/lib64:$THIRD_PARTY_INSTALL_DIR/lib:$LD_LIBRARY_PATH
export LIBRARY_PATH=$THIRD_PARTY_INSTALL_DIR/lib64:$LIBRARY_PATH
export CMAKE_PREFIX_PATH=$THIRD_PARTY_INSTALL_DIR/lib64/cake:$CMAKE_PREFIX_PATH
export PKG_CONFIG_PATH=$THIRD_PARTY_INSTALL_DIR/lib/pkgconfig:$PKG_CONFIG_PATH
export PATH=$THIRD_PARTY_INSTALL_DIR/bin:$PATH
install_python
install_gklib
install_metis
install_parmetis
install_scotch
install_mumps
install_hdf5
install_med
install_telemac
if [ ! -f $INSTALL_DIR/$TELEMAC_VERSION/bin/gfortran ]; then
cat > $INSTALL_DIR/$TELEMAC_VERSION/bin/gfortran << EOT
#!/bin/bash
\$GCC_BASE/bin/gfortran -fPIC -I$INSTALL_DIR/$TELEMAC_VERSION/modules \$@
EOT
chmod a+x $INSTALL_DIR/$TELEMAC_VERSION/bin/gfortran
fi
mkdir -p $INSTALL_DIR/modules/telemac
cat > $INSTALL_DIR/modules/telemac/$TELEMAC_VERSION << EOT
#%Module1.0
module-whatis {TELEMAC: Hydrology Simulation Toolkit:CFD}
proc ModulesHelp { } {
puts stderr {Name : telemac}
puts stderr {Version: $TELEMAC_VERSION}
puts stderr {Target : x86_64_v4}
puts stderr {}
puts stderr {TELEMAC hydrology simulation toolkit}
}
set VERSION "$TELEMAC_VERSION"
set BASE_DIR "$INSTALL_DIR"
conflict telemac
prereq $COMPILER_MODULE
prereq intel-mpi
prereq intel-mkl
set TELEMAC_BASE \${BASE_DIR}/\${VERSION}
set TELEMAC_3RD_PARTY \${BASE_DIR}/3rd_party
setenv TELEMAC_BASE \${TELEMAC_BASE}
prepend-path LD_LIBRARY_PATH \${TELEMAC_3RD_PARTY}/lib
prepend-path LD_LIBRARY_PATH \${TELEMAC_3RD_PARTY}/lib64
prepend-path PATH \${TELEMAC_3RD_PARTY}/bin
prepend-path LD_LIBRARY_PATH \${TELEMAC_BASE}/lib
prepend-path PATH \${TELEMAC_BASE}/bin
prepend-path PATH \${TELEMAC_BASE}/scripts/python3
prepend-path PYTHONPATH \${TELEMAC_BASE}/scripts/python3
EOT
The script should be executed via sh install_telemac.sh, after it was correctly modified (installation target path, modules, versions).
This script was used to build Telemac 9.1.0 (see below "Concession").
Also a module is created, which can be found and loaded after adding the module path via module use <path-to-modules>.
Conda/Micromamba
Conda-forge contains some Telemac packages.
> module load micromamba > micromamba search -c conda-forge opentelemac > micromamba -n my_telemac -c conda-forge opentelemac==v9.0.0 # for instance > micromamba activate my_telemac
As far as we could see, these packages are build using OpenMPI. And only for x86-64 optimization (no AVX vectorization). Also, no MUMPS support. It is not to be expected that great performance will result.
Spack Environment
You can use the spack (or other) package management tools to install the required dependencies. And then build telemac on top of these.
Spack environments (and views) are quite nice for this enterprise.
Usage
After loading the module, or activating a conda/micromamba environment, one can investigate the functionality of telemac on some of the examples. Please clone or download some from the corresponding gitlab page.
> cd telemac/examples/telemac3d/cone/
> salloc -n 4 -p cm4_inter -t 1:00:00
salloc: Granted job allocation 356265
> module use /lrz/sys/share/modules/extfiles
> module load telemac
> telemac3d.py t3d_cone.cas --mpi --ncsize $SLURM_NTASKS
Loading configuration
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
___ __ _
/ _ \ /_ | | |
| (_) | | | ______ __| | ___ __ __
\__, | | ||______| / _` | / _ \\ \ / /
/ / _ | | | (_| || __/ \ V /
/_/ (_) |_| \__,_| \___| \_/
Running your CAS file(s) for:
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+> root: /dss/lrzsys/sys/applications/telemac/v9.1.0
+> build: /dss/lrzsys/sys/applications/telemac/v9.1.0
+> modules: api / artemis / bief / damocles
gaia / gretel / hermes / khione
mascaret / nestor / parallel / partel
postel3d / special / stbtel / telemac2d
telemac3d / tomawac / waqtel
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
... processing the steering file
~> Checking keyword/rubrique coherence
... checking parallelisation
... handling temporary directories
copying: t3d_cone.cas -> /dss/lxclscratch/00/XXXXXXX/tmp_telemac/telemac/examples/telemac3d/cone/t3d_cone.cas_2026-03-12-09h27min36s/T3DCAS
copying: telemac3d.dico -> /dss/lxclscratch/00/XXXXXXX/tmp_telemac/telemac/examples/telemac3d/cone/t3d_cone.cas_2026-03-12-09h27min36s/T3DDICO
copying: geo_cone.cli -> /dss/lxclscratch/00/XXXXXXX/tmp_telemac/telemac/examples/telemac3d/cone/t3d_cone.cas_2026-03-12-09h27min36s/T3DCLI
copying: geo_cone.slf -> /dss/lxclscratch/00/XXXXXXX/tmp_telemac/telemac/examples/telemac3d/cone/t3d_cone.cas_2026-03-12-09h27min36s/T3DGEO
... partitioning base files (geo, conlim, sections, zones and weirs)
+> /dss/lrzsys/sys/applications/telemac/v9.1.0/bin/partel < partel_T3DGEO.par >> partel_T3DGEO.log
STOP 0
... splitting / copying other input files
... checking the executable
~> Compiling Fortran files
compiling: user_condi3d_trac.f ... completed
compiling: user_condi3d_uvw.f ... completed
compiling: user_preres_telemac3d.f ... completed
compiling: wave_equation.f ... completed
created: libuser_fortran.so
Running your simulation(s) :
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In /dss/lxclscratch/00/XXXXXXX/tmp_telemac/telemac/examples/telemac3d/cone/t3d_cone.cas_2026-03-12-09h27min36s:
mpiexec -n 4 /dss/lxclscratch/00/XXXXXXX/tmp_telemac/telemac/examples/telemac3d/cone/t3d_cone.cas_2026-03-12-09h27min36s/telemac3d
MASTER PROCESSOR NUMBER 0 OF THE GROUP OF 4
EXECUTABLE FILE: /dss/lxclscratch/00/di49zop/tmp_telemac/telemac/examples/telemac3d/cone/t3d_cone.cas_2026-03-12-09h27min36s/A.EXE
LISTING OF TELEMAC3D------------------------------------------------------------------------------
TTTTT EEEEE L EEEEE M M AAAAA CCCCC
T E L E MM MM A A C
T EEE L EEE M M M AAAAA C
T E L E M M A A C
T EEEEE LLLLL EEEEE M M A A CCCCC
3D VERSION 9.1 FORTRAN 2003
~^~^~^~^~^~^~^~^~^~^~^^~^~^~^~^~^~
~ ~
\ ' o '
/\ o \ o
>=)'> ' /\ '
\/ \ >=)'> ~
/ /\ \/
~ >=)'> / .
\/ )
/ (
~ ) )
} ~ ( ( (
{ ) ) )
} } . ( ( (
{ { /^^^^^^^^^^^^
^^^^^^^^^\ /
^^^^^^^^^
********************************************
* LECDON: *
* BEFORE CALLING DAMOCLES *
********************************************
DIFFERENT NUMBER OF PARALLEL PROCESSORS:
DECLARED BEFORE (CASE OF COUPLING ?): 4
TELEMAC-3D : 0
VALUE 4 IS KEPT
********************************************
* LECDON: *
* AFTER CALLING DAMOCLES *
* CHECKING OF DATA READ *
* IN THE STEERING FILE *
********************************************
TREATMENT OF NEGATIVE DEPTHS = 2
MASS-LUMPING FOR DEPTH AUTOMATICALLY SET TO 1.D0
EXITING LECDON. NAME OF THE STUDY:
TELEMAC 3D: ROTATING CONE TEST
OPENING FILES FOR TELEMAC3D
OPENING: T3DGEO-geo_cone.slf
OPENING: T3DCLI-geo_cone.cli
OPENING: T3DRES-r3d_cone.slf
OPENING: T3DHYD-r2d_cone.slf
POINT_TELEMAC3D: MEMORY ALLOCATION
READ_MESH_INFO: TITLE= TELEMAC 2D : GOUTTE D'EAU DANS UN BASSIN$
NUMBER OF ELEMENTS: 2244
NUMBER OF POINTS: 1190
TYPE OF ELEMENT: TRIANGLE
TYPE OF BND ELEMENT: POINT
SINGLE PRECISION FORMAT (R4)
MXPTEL (BIEF) : MAXIMUM NUMBER OF ELEMENTS AROUND A POINT: 7
MAXIMUM NUMBER OF POINTS AROUND A POINT: 8
(GLOBAL MESH)
SEGBOR (BIEF) : NUMBER OF BOUNDARY SEGMENTS = 134
INCLUDING THOSE DUE TO DOMAIN DECOMPOSITION
USER_CORRXY (BIEF):NO MODIFICATION OF COORDINATES
MESH: MESH2D ALLOCATED
READ_MESH_INFO: TITLE= TELEMAC 2D : GOUTTE D'EAU DANS UN BASSIN$
NUMBER OF ELEMENTS: 2244
NUMBER OF POINTS: 1190
TYPE OF ELEMENT: TRIANGLE
TYPE OF BND ELEMENT: POINT
SINGLE PRECISION FORMAT (R4)
MXPTEL (BIEF) : MAXIMUM NUMBER OF ELEMENTS AROUND A POINT: 7
MAXIMUM NUMBER OF POINTS AROUND A POINT: 8
(GLOBAL MESH)
SEGBOR (BIEF) : NUMBER OF BOUNDARY SEGMENTS = 134
INCLUDING THOSE DUE TO DOMAIN DECOMPOSITION
USER_CORRXY (BIEF):NO MODIFICATION OF COORDINATES
MESH: MESH3D ALLOCATED
[...]
--- TRACER12: NSC_TF , UNIT : TRACER 12 * M3)
INITIAL QUANTITY OF TRACER : 6.283183
FINAL QUANTITY OF TRACER : 6.148524
QUANTITY EXITING (BOUNDARY/SOURCE) : 0.1346589
TOTAL QUANTITY OF TRACER LOST : 0.1279372E-07
END OF TIME LOOP
EXITING MPI
*************************************
* END OF MEMORY ORGANIZATION: *
*************************************
CORRECT END OF RUN
ELAPSE TIME :
38 SECONDS
Note: The following floating-point exceptions are signalling: IEEE_UNDERFLOW_FLAG IEEE_DENORMAL
STOP 0
Note: The following floating-point exceptions are signalling: IEEE_UNDERFLOW_FLAG IEEE_DENORMAL
STOP 0
Note: The following floating-point exceptions are signalling: IEEE_UNDERFLOW_FLAG IEEE_DENORMAL
STOP 0
Note: The following floating-point exceptions are signalling: IEEE_UNDERFLOW_FLAG IEEE_DENORMAL
STOP 0
... merging separated result files
collecting: T3DRES
+> /dss/lrzsys/sys/applications/telemac/v9.1.0/bin/gretel < gretel_T3DRES.par >> gretel_T3DRES.log
Note: The following floating-point exceptions are signalling: IEEE_DENORMAL
STOP 0
collecting: T3DHYD
+> /dss/lrzsys/sys/applications/telemac/v9.1.0/bin/gretel < gretel_T3DHYD.par >> gretel_T3DHYD.log
Note: The following floating-point exceptions are signalling: IEEE_DENORMAL
STOP 0
... handling result files
moving: r3d_cone.slf
moving: r2d_cone.slf
... deleting working dir
My work is done
These steps can also be done within a non-interactive Slurm job, what we usually prefer. Note that Telemac usually doesn't call the executables and mpiexec directly, but through the python wrappers!
The usage from within python is also possible.
import sys
from telemac3d import main
sys.argv = [
'telemac3d.py',
'telemac3d',
'--ncsize=4',
'--mpi',
't3d_cone.cas'
]
if __name__ == "__main__":
print(f"Starte TELEMAC-3D Simulation: t3d_cone.cas mit 4 Ranks...")
main()
This script can be started via python telemac3d_cone.py. (We cannot tell how this is supposed to be better than using telemac3d.py directly in a bash shell environment. If a user could provide a more reasonable example, we would appreciate.)
Concession
We've installed some Telemac package with MUMPS and Intel MPI as outlined in the installation script above. If you want to use them, please check the following out.
> module use /lrz/sys/share/modules/extfiles > module avail telemac > module load telemac
We cannot give any guarantee for full functionality, as we didn't test all features. When you notice some issues, please give us feedback through our ServiceDesk.