mirror of
https://code.it4i.cz/sccs/easyconfigs-it4i.git
synced 2025-04-07 15:32:11 +01:00
new file: o/OpenMPI/OpenMPI-4.1.4-NVHPC-22.7-CUDA-11.7.0.eb
modified: q/QuantumESPRESSO/QuantumESPRESSO-7.1-NVHPC-21.9.eb
This commit is contained in:
parent
df1e1b2cbc
commit
3685abaed5
110
o/OpenMPI/OpenMPI-4.1.4-NVHPC-22.7-CUDA-11.7.0.eb
Normal file
110
o/OpenMPI/OpenMPI-4.1.4-NVHPC-22.7-CUDA-11.7.0.eb
Normal file
@ -0,0 +1,110 @@
|
||||
# IT4Innovations 2023
|
||||
# JK
|
||||
|
||||
name = 'OpenMPI'
|
||||
version = '4.1.4'
|
||||
|
||||
homepage = 'https://www.open-mpi.org/'
|
||||
description = """The Open MPI Project is an open source MPI-3 implementation."""
|
||||
|
||||
toolchain = {'name': 'NVHPC', 'version': '22.7-CUDA-11.7.0'}
|
||||
|
||||
source_urls = ['https://www.open-mpi.org/software/ompi/v%(version_major_minor)s/downloads']
|
||||
sources = [SOURCELOWER_TAR_BZ2]
|
||||
patches = [
|
||||
'OpenMPI-4.1.1_build-with-internal-cuda-header.patch',
|
||||
'OpenMPI-4.1.1_opal-datatype-cuda-performance.patch',
|
||||
]
|
||||
checksums = [
|
||||
'92912e175fd1234368c8730c03f4996fe5942e7479bb1d10059405e7f2b3930d', # openmpi-4.1.4.tar.bz2
|
||||
# OpenMPI-4.1.1_build-with-internal-cuda-header.patch
|
||||
'63eac52736bdf7644c480362440a7f1f0ae7c7cae47b7565f5635c41793f8c83',
|
||||
# OpenMPI-4.1.1_opal-datatype-cuda-performance.patch
|
||||
'b767c7166cf0b32906132d58de5439c735193c9fd09ec3c5c11db8d5fa68750e',
|
||||
]
|
||||
|
||||
builddependencies = [
|
||||
('pkgconf', '1.8.0'),
|
||||
('Perl', '5.34.1'),
|
||||
('Autotools', '20220317'),
|
||||
]
|
||||
|
||||
dependencies = [
|
||||
('zlib', '1.2.12'),
|
||||
('hwloc', '2.7.1'),
|
||||
('libevent', '2.1.12'),
|
||||
('UCX', '1.12.1'),
|
||||
('UCX-CUDA', '1.12.1', '-CUDA-%(cudaver)s'),
|
||||
('libfabric', '1.15.1'),
|
||||
('PMIx', '4.1.2'),
|
||||
('UCC', '1.0.0'),
|
||||
('UCC-CUDA', '1.0.0', '-CUDA-%(cudaver)s'),
|
||||
]
|
||||
|
||||
# Update configure to include changes from the "internal-cuda" patch
|
||||
# by running a subset of autogen.pl sufficient to achieve this
|
||||
# without doing the full, long-running regeneration.
|
||||
preconfigopts = ' && '.join([
|
||||
'cd config',
|
||||
'autom4te --language=m4sh opal_get_version.m4sh -o opal_get_version.sh',
|
||||
'cd ..',
|
||||
'autoconf',
|
||||
'autoheader',
|
||||
'aclocal',
|
||||
'automake',
|
||||
''
|
||||
])
|
||||
|
||||
# CUDA related patches and custom configure option can be removed if CUDA support isn't wanted.
|
||||
configopts = '--with-cuda=internal '
|
||||
configopts += ' CC=pgcc CXX=pgc++ FC=pgfortran'
|
||||
|
||||
# IT4I-specific settings
|
||||
|
||||
configopts += '--enable-shared '
|
||||
configopts += '--enable-mpi-thread-multiple '
|
||||
configopts += '--with-verbs '
|
||||
configopts += '--enable-mpirun-prefix-by-default '
|
||||
configopts += '--with-hwloc=$EBROOTHWLOC ' # hwloc support
|
||||
configopts = '--with-tm=/opt/pbs ' # Enable PBS
|
||||
configopts += '--enable-mpi-cxx ' # Enable building the C++ MPI bindings
|
||||
configopts += '--with-ucx=$EBROOTUCX '
|
||||
|
||||
osdependencies = [('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel')]
|
||||
|
||||
postinstallcmds = [
|
||||
'echo "# By default, for Open MPI 4.0 and later, infiniband ports on a device are not used by default." >> %(installdir)s/etc/openmpi-mca-params.conf',
|
||||
'echo "btl_openib_allow_ib = true" >> %(installdir)s/etc/openmpi-mca-params.conf',
|
||||
]
|
||||
|
||||
local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"]
|
||||
sanity_check_paths = {
|
||||
'files': [
|
||||
"bin/%s" %
|
||||
binfile for binfile in [
|
||||
"ompi_info", "opal_wrapper", "orterun"]] + [
|
||||
"lib/lib%s.%s" %
|
||||
(libfile, SHLIB_EXT) for libfile in local_libs] + [
|
||||
"include/%s.h" %
|
||||
x for x in [
|
||||
"mpi-ext", "mpif-config", "mpif", "mpi", "mpi_portable_platform"]], 'dirs': [], }
|
||||
|
||||
import os
|
||||
if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
|
||||
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
|
||||
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
|
||||
'OMPI_MCA_orte_base_help_aggregate': '0',
|
||||
}
|
||||
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
|
||||
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
|
||||
'OMPI_MCA_orte_base_help_aggregate': '0',
|
||||
}
|
||||
else:
|
||||
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
|
||||
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
moduleclass = 'mpi'
|
@ -69,7 +69,7 @@ elif os.environ.get("CLUSTERNAME") in ["BARBORA"]:
|
||||
# gipaw zatim neni portle na GPU
|
||||
buildopts = 'all gwl xspectra couple epw w90'
|
||||
#buildopts = 'all gwl xspectra couple epw w90' # no rule to make target 'gipaw'
|
||||
preinstallopts = ' FC=pgfortran F77=pgfortran F90=pgfortran CC=pgcc CXX=pgc++ ctest -j4 --output-on-failure -L unit && '
|
||||
#preinstallopts = ' FC=pgfortran F77=pgfortran F90=pgfortran CC=pgcc CXX=pgc++ ctest -j4 --output-on-failure -L unit && '
|
||||
|
||||
# parallel build tends to fail
|
||||
parallel = 1
|
||||
|
Loading…
x
Reference in New Issue
Block a user