mirror of
https://code.it4i.cz/sccs/easyconfigs-it4i.git
synced 2025-04-07 15:32:11 +01:00

modified: h/HDF5/HDF5-1.10.6-intel-2020b-parallel.eb new file: l/libcint/libcint-5.5.0-gfbf-2023b.eb new file: l/libxc/libxc-4.3.4-foss-2023b.eb modified: l/libxc/libxc-4.3.4-intel-2020b.eb modified: n/netCDF-Fortran/netCDF-Fortran-4.5.3-intel-2020b.eb modified: n/netCDF/netCDF-4.7.4-intel-2020b.eb modified: o/Octopus/Octopus-11.3-intel-2020b-mpi.eb new file: o/OpenMPI/OpenMPI-4.1.6-GCC-12.2.0-CUDA-12.4.0.eb
99 lines
3.5 KiB
Plaintext
99 lines
3.5 KiB
Plaintext
# IT4Innovations
|
|
# LK 2024
|
|
|
|
name = 'OpenMPI'
|
|
version = '4.1.6'
|
|
versionsuffix = '-CUDA-12.4.0'
|
|
|
|
homepage = 'https://www.open-mpi.org/'
|
|
description = """The Open MPI Project is an open source MPI-3 implementation."""
|
|
|
|
toolchain = {'name': 'GCC', 'version': '12.2.0'}
|
|
|
|
source_urls = ['https://www.open-mpi.org/software/ompi/v%(version_major_minor)s/downloads']
|
|
sources = [SOURCELOWER_TAR_BZ2]
|
|
patches = [
|
|
'OpenMPI-4.1.1_build-with-internal-cuda-header.patch',
|
|
'OpenMPI-4.1.1_opal-datatype-cuda-performance.patch',
|
|
]
|
|
checksums = [
|
|
{'openmpi-4.1.6.tar.bz2': 'f740994485516deb63b5311af122c265179f5328a0d857a567b85db00b11e415'},
|
|
{'OpenMPI-4.1.1_build-with-internal-cuda-header.patch':
|
|
'63eac52736bdf7644c480362440a7f1f0ae7c7cae47b7565f5635c41793f8c83'},
|
|
{'OpenMPI-4.1.1_opal-datatype-cuda-performance.patch':
|
|
'b767c7166cf0b32906132d58de5439c735193c9fd09ec3c5c11db8d5fa68750e'},
|
|
]
|
|
|
|
builddependencies = [
|
|
('pkgconf', '1.9.3'),
|
|
('Perl', '5.36.0'),
|
|
('Autotools', '20220317'),
|
|
]
|
|
|
|
dependencies = [
|
|
('zlib', '1.2.12'),
|
|
('hwloc', '2.8.0'),
|
|
('libevent', '2.1.12'),
|
|
('UCX', '1.16.0',),
|
|
('UCX-CUDA', '1.16.0', '-CUDA-%(cudaver)s'),
|
|
('libfabric', '1.16.1'),
|
|
('PMIx', '4.2.2'),
|
|
('UCC', '1.3.0'),
|
|
('UCC-CUDA', '1.3.0', '-CUDA-%(cudaver)s'),
|
|
('CUDA', '12.4.0', '', True),
|
|
]
|
|
|
|
preconfigopts = './autogen.pl --force && '
|
|
|
|
# IT4I-specific settings
|
|
|
|
# CUDA related patches and custom configure option can be removed if CUDA support isn't wanted.
|
|
configopts = '--with-cuda=internal '
|
|
configopts += '--enable-shared --enable-mpi-thread-multiple --with-verbs '
|
|
configopts += '--enable-mpirun-prefix-by-default '
|
|
configopts += '--with-hwloc=$EBROOTHWLOC ' # hwloc support
|
|
configopts += '--with-slurm ' # Enable slurm
|
|
configopts += '--enable-mpi-cxx ' # Enable building the C++ MPI bindings
|
|
configopts += '--with-ucx=$EBROOTUCX '
|
|
configopts += '--with-cuda=$EBROOTCUDA '
|
|
#configopts += '--with-pmix=/opt/it4i-libs/PMIx/4.2.6 '
|
|
|
|
osdependencies = [('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel')]
|
|
|
|
postinstallcmds = [
|
|
'echo "# By default, for Open MPI 4.0 and later, infiniband ports on a device are not used by default." >> %(installdir)s/etc/openmpi-mca-params.conf',
|
|
'echo "btl_openib_allow_ib = true" >> %(installdir)s/etc/openmpi-mca-params.conf',
|
|
]
|
|
|
|
local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"]
|
|
sanity_check_paths = {
|
|
'files': [
|
|
"bin/%s" %
|
|
binfile for binfile in [
|
|
"ompi_info", "opal_wrapper", "orterun"]] + [
|
|
"lib/lib%s.%s" %
|
|
(libfile, SHLIB_EXT) for libfile in local_libs] + [
|
|
"include/%s.h" %
|
|
x for x in [
|
|
"mpi-ext", "mpif-config", "mpif", "mpi", "mpi_portable_platform"]], 'dirs': [], }
|
|
|
|
import os
|
|
if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
|
|
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
|
|
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
|
|
'OMPI_MCA_orte_base_help_aggregate': '0',
|
|
'SLURM_MPI_TYPE': 'pmix_v4',
|
|
}
|
|
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
|
|
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
|
|
'OMPI_MCA_orte_base_help_aggregate': '0',
|
|
'SLURM_MPI_TYPE': 'pmix_v4',
|
|
}
|
|
else:
|
|
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
|
|
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
|
|
'SLURM_MPI_TYPE': 'pmix_v4',
|
|
}
|
|
|
|
moduleclass = 'mpi'
|