new file: archive/OpenMPI/OpenMPI-1.10.1-GCC-4.9.3-2.25.eb

new file:   archive/OpenMPI/OpenMPI-1.10.1-GNU-4.9.3-2.25.eb
	new file:   archive/OpenMPI/OpenMPI-1.10.2-GCC-4.9.3-2.25.eb
	new file:   archive/OpenMPI/OpenMPI-1.10.2-GCC-5.3.0-2.26.eb
	new file:   archive/OpenMPI/OpenMPI-1.10.2-GCC-6.1.0-2.27.eb
	new file:   archive/OpenMPI/OpenMPI-1.10.2-PGI-16.3-GCC-4.9.3-2.25.eb
	new file:   archive/OpenMPI/OpenMPI-1.10.2-PGI-16.4-GCC-5.3.0-2.26.eb
	new file:   archive/OpenMPI/OpenMPI-1.10.3-GCC-4.9.3-2.25.eb
	new file:   archive/OpenMPI/OpenMPI-1.10.3-GCC-6.1.0-2.27.eb
	new file:   archive/OpenMPI/OpenMPI-1.10.4-PGI-16.7-GCC-5.4.0-2.26.eb
	new file:   archive/OpenMPI/OpenMPI-1.10.7-GCC-4.9.3-2.25.eb
	new file:   archive/OpenMPI/OpenMPI-1.10.7-GCC-6.3.0-2.27-noPBS.eb
	new file:   archive/OpenMPI/OpenMPI-1.10.7-GCC-6.3.0-2.27-uv.eb
	new file:   archive/OpenMPI/OpenMPI-1.10.7-GCC-6.3.0-2.27.eb
	new file:   archive/OpenMPI/OpenMPI-1.10.7-GCC-7.1.0-2.28.eb
	new file:   archive/OpenMPI/OpenMPI-1.10.7-PGI-18.5-GCC-6.3.0-2.27.eb
	new file:   archive/OpenMPI/OpenMPI-1.6.5-GCC-4.8.3.eb
	new file:   archive/OpenMPI/OpenMPI-1.8.6-GCC-4.4.7-system.eb
	new file:   archive/OpenMPI/OpenMPI-1.8.6-GNU-4.4.7-system.eb
	new file:   archive/OpenMPI/OpenMPI-1.8.6-GNU-4.9.3-2.25.eb
	new file:   archive/OpenMPI/OpenMPI-1.8.6-GNU-5.1.0-2.25.eb
	new file:   archive/OpenMPI/OpenMPI-1.8.6-iccifort-2015.3.187-GNU-5.1.0-2.25.eb
	new file:   archive/OpenMPI/OpenMPI-1.8.8-GNU-4.9.3-2.25.eb
	new file:   archive/OpenMPI/OpenMPI-1.8.8-GNU-5.1.0-2.25.eb
	new file:   archive/OpenMPI/OpenMPI-1.8.8-iccifort-2015.3.187-GNU-4.9.3-2.25.eb
	new file:   archive/OpenMPI/OpenMPI-2.0.0-GCC-5.2.0.eb
	new file:   archive/OpenMPI/OpenMPI-2.0.1-GCC-6.2.0-2.27.eb
	new file:   archive/OpenMPI/OpenMPI-2.0.1-gcccuda-2016.10.eb
	new file:   archive/OpenMPI/OpenMPI-2.0.1-iccifort-2017.1.132-GCC-5.4.0-2.26.eb
	new file:   archive/OpenMPI/OpenMPI-2.0.1-iccifort-2017.1.132-GCC-6.3.0-2.27.eb
	new file:   archive/OpenMPI/OpenMPI-2.0.1.eb
	new file:   archive/OpenMPI/OpenMPI-2.0.2-GCC-6.3.0-2.27.eb
	new file:   archive/OpenMPI/OpenMPI-2.1.0-GCC-4.9.3-2.25.eb
	new file:   archive/OpenMPI/OpenMPI-2.1.0-GCC-6.3.0-2.27.eb
	new file:   archive/OpenMPI/OpenMPI-2.1.1-GCC-6.3.0-2.27-uv.eb
	new file:   archive/OpenMPI/OpenMPI-2.1.1-GCC-6.3.0-2.27.eb
	new file:   archive/OpenMPI/OpenMPI-2.1.1-GCC-7.1.0-2.28.eb
	new file:   archive/OpenMPI/OpenMPI-2.1.1-GCC-7.3.0-2.30.eb
	new file:   archive/OpenMPI/OpenMPI-2.1.1-GCC-8.1.0-2.30.eb
	new file:   archive/OpenMPI/OpenMPI-2.1.1-c7.eb
	new file:   archive/OpenMPI/OpenMPI-2.1.5-GCC-6.3.0-2.27-noPBS.eb
	new file:   archive/OpenMPI/OpenMPI-2.1.5-GCC-6.3.0-2.27.eb
	new file:   archive/OpenMPI/OpenMPI-2.1.5-GCC-8.3.0-2.32.eb
	new file:   archive/OpenMPI/OpenMPI-2.1.5-GCC-9.1.0-2.32.eb
	new file:   archive/OpenMPI/OpenMPI-3.0.0-GCC-6.3.0-2.27-uv.eb
	new file:   archive/OpenMPI/OpenMPI-3.0.0-GCC-6.3.0-2.27.eb
	new file:   archive/OpenMPI/OpenMPI-3.0.0-GCC-7.2.0-2.29.eb
	new file:   archive/OpenMPI/OpenMPI-3.0.1-GCC-6.3.0-2.27-nvidia-RHEL6.eb
	new file:   archive/OpenMPI/OpenMPI-3.0.1-GCC-6.3.0-2.27-nvidia.eb
	new file:   archive/OpenMPI/OpenMPI-3.1.3-GCC-8.2.0-2.31.1.eb
	new file:   archive/OpenMPI/OpenMPI-3.1.3-GCCcore-8.3.0.eb
	new file:   archive/OpenMPI/OpenMPI-3.1.3-PGI-19.4-GCC-8.2.0-2.31.1.eb
	new file:   archive/OpenMPI/OpenMPI-3.1.4-GCC-6.3.0-2.27-noPBS.eb
	new file:   archive/OpenMPI/OpenMPI-3.1.4-GCC-6.3.0-2.27-nvidia.eb
	new file:   archive/OpenMPI/OpenMPI-3.1.4-GCC-6.3.0-2.27.eb
	new file:   archive/OpenMPI/OpenMPI-3.1.4-GCC-8.3.0-2.32.eb
	new file:   archive/OpenMPI/OpenMPI-3.1.4-PGI-19.7-GCC-8.2.0-2.31.1.eb
	new file:   archive/OpenMPI/OpenMPI-3.1.5-GCCcore-8.3.0.eb
	new file:   archive/OpenMPI/OpenMPI-3.1.5.eb
	new file:   archive/OpenMPI/OpenMPI-3.1.6-GCCcore-8.3.0-noPBS.eb
	new file:   archive/OpenMPI/OpenMPI-4.0.0-GCC-6.3.0-2.27-noPBS.eb
	new file:   archive/OpenMPI/OpenMPI-4.0.0-GCC-6.3.0-2.27.eb
	new file:   archive/OpenMPI/OpenMPI-4.0.2-GCC-8.3.0-2.32.eb
	new file:   archive/OpenMPI/OpenMPI-4.0.3-GCC-9.3.0.eb
	new file:   archive/OpenMPI/OpenMPI-4.0.4-GCC-8.3.0-2.32-CUDA.eb
	new file:   archive/OpenMPI/OpenMPI-4.0.4-GCC-9.3.0-without-verbs.eb
	new file:   archive/OpenMPI/OpenMPI-4.0.4-GCC-9.3.0.eb
	new file:   archive/OpenMPI/OpenMPI-4.0.4-GCCcore-8.3.0.eb
	new file:   b/BLIS/BLIS-3.1-GCCcore-12.2.0-amd.eb
	new file:   f/FFTW.MPI/FFTW.MPI-3.3.10-NVHPC-23.5-CUDA-12.2.0.eb
	modified:   f/FFTW/FFTW-3.3.10-NVHPC-23.5-CUDA-12.2.0.eb
	new file:   h/HDF5/HDF5-1.14.0-NVHPC-23.5.eb
	new file:   l/libFLAME/libFLAME-5.2.0-GCCcore-12.2.0.eb
	new file:   n/NCCL/NCCL-2.12.12--CUDA-12.2.0.eb
	new file:   n/NCCL/NCCL-2.16.2-GCCcore-12.2.0-CUDA-12.0.0.eb
	modified:   n/NCCL/NCCL-2.16.2-GCCcore-12.2.0-CUDA-12.2.0.eb
	new file:   n/NCCL/NCCL-2.18.3-CUDA-12.2.0.eb
	new file:   n/NCCL/NCCL-2.18.3-GCCcore-12.2.0-CUDA-12.2.0.eb
	modified:   o/OpenMPI/OpenMPI-3.1.4-GCC-10.2.0-CUDA-12.2.0.eb
	modified:   o/OpenMPI/OpenMPI-3.1.4-GCC-10.2.0.eb
	modified:   o/OpenMPI/OpenMPI-3.1.6-GCCcore-8.3.0.eb
	modified:   o/OpenMPI/OpenMPI-4.0.5-GCC-10.2.0-Java-1.8.0_221.eb
	modified:   o/OpenMPI/OpenMPI-4.0.5-GCC-10.2.0-Java-13.0.1.eb
	modified:   o/OpenMPI/OpenMPI-4.0.5-GCC-10.2.0-test.eb
	modified:   o/OpenMPI/OpenMPI-4.0.5-GCCcore-9.3.0-CUDA-11.2.2-devel.eb
	modified:   o/OpenMPI/OpenMPI-4.0.5-GCCcore-9.3.0-CUDA-11.2.2.eb
	modified:   o/OpenMPI/OpenMPI-4.0.5-NVHPC-21.2-CUDA-11.2.2-UCX-11.1.0-rc2.eb
	modified:   o/OpenMPI/OpenMPI-4.0.5-NVHPC-21.2-CUDA-11.2.2-test.eb
	modified:   o/OpenMPI/OpenMPI-4.0.5-NVHPC-21.2-CUDA-11.2.2.eb
	modified:   o/OpenMPI/OpenMPI-4.0.5-NVHPC-21.2-CUDA-11.3.0.eb
	modified:   o/OpenMPI/OpenMPI-4.0.5-NVHPC-21.2-test.eb
	modified:   o/OpenMPI/OpenMPI-4.0.5-NVHPC-21.2.eb
	modified:   o/OpenMPI/OpenMPI-4.0.5-gcccuda-2020b.eb
	modified:   o/OpenMPI/OpenMPI-4.0.5-iccifort-2020.4.304.eb
	modified:   o/OpenMPI/OpenMPI-4.0.6-NVHPC-21.11-CUDA-11.4.1-v2.eb
	modified:   o/OpenMPI/OpenMPI-4.0.6-NVHPC-21.11-CUDA-11.4.1.eb
	modified:   o/OpenMPI/OpenMPI-4.0.6-NVHPC-21.2-CUDA-11.2.2.eb
	modified:   o/OpenMPI/OpenMPI-4.0.6-NVHPC-21.9-CUDA-11.4.1-v2.eb
	modified:   o/OpenMPI/OpenMPI-4.0.6-NVHPC-21.9-CUDA-11.4.1.eb
	modified:   o/OpenMPI/OpenMPI-4.0.6-NVHPC-22.2-CUDA-11.6.0.eb
	modified:   o/OpenMPI/OpenMPI-4.0.7-GCC-10.2.0-UCX-1.11.2-CUDA-11.4.1.eb
	modified:   o/OpenMPI/OpenMPI-4.0.7-GCC-9.3.0-UCX-1.9.0-CUDA-11.4.1.eb
	modified:   o/OpenMPI/OpenMPI-4.0.7-NVHPC-21.9-CUDA-11.4.1.eb
	modified:   o/OpenMPI/OpenMPI-4.1.1-AOCC-3.1.0.eb
	modified:   o/OpenMPI/OpenMPI-4.1.1-AOCL-3.0.1-AOCC-3.1.0.eb
	modified:   o/OpenMPI/OpenMPI-4.1.1-GCC-10.2.0-Java-1.8.0_221.eb
	modified:   o/OpenMPI/OpenMPI-4.1.1-GCC-10.2.0-test.eb
	modified:   o/OpenMPI/OpenMPI-4.1.1-GCC-10.2.0.eb
	modified:   o/OpenMPI/OpenMPI-4.1.1-GCC-10.3.0.eb
	modified:   o/OpenMPI/OpenMPI-4.1.1-GCC-11.2.0.eb
	modified:   o/OpenMPI/OpenMPI-4.1.2-GCC-11.2.0-Java-1.8.0_221.eb
	modified:   o/OpenMPI/OpenMPI-4.1.2-GCC-11.2.0.eb
	modified:   o/OpenMPI/OpenMPI-4.1.2-NVHPC-22.2-CUDA-11.6.0-v2.eb
	modified:   o/OpenMPI/OpenMPI-4.1.2-NVHPC-22.2-CUDA-11.6.0.eb
	modified:   o/OpenMPI/OpenMPI-4.1.4-GCC-11.3.0-CUDA-11.7.0.eb
	modified:   o/OpenMPI/OpenMPI-4.1.4-GCC-11.3.0.eb
	modified:   o/OpenMPI/OpenMPI-4.1.4-GCC-12.2.0.eb
	modified:   o/OpenMPI/OpenMPI-4.1.4-NVHPC-22.7-CUDA-11.7.0.eb
	modified:   o/OpenMPI/OpenMPI-4.1.5-GCC-12.3.0.eb
	modified:   o/OpenMPI/OpenMPI-4.1.5-NVHPC-23.5-CUDA-12.2.0-dgx.eb
	modified:   o/OpenMPI/OpenMPI-4.1.5-NVHPC-23.5-CUDA-12.2.0-test.eb
	modified:   o/OpenMPI/OpenMPI-4.1.5-NVHPC-23.5-CUDA-12.2.0.eb
	new file:   q/QD/QD-2.3.17-NVHPC-23.5.eb
	new file:   s/ScaLAPACK/ScaLAPACK-3.0-NVHPC-23.5-CUDA-12.2.0.eb
	modified:   u/UCC-CUDA/UCC-CUDA-1.1.0-GCCcore-12.2.0-CUDA-12.2.0.eb
	new file:   v/VASP/VASP-6.4.2-NVHPC-23.5-CUDA-12.2.0-adjust-makefile.patch
	new file:   v/VASP/VASP-6.4.2-NVHPC-23.5-CUDA-12.2.0.eb
	deleted:    o/OpenMPI/OpenMPI-1.10.1-GCC-4.9.3-2.25.eb
	deleted:    o/OpenMPI/OpenMPI-1.10.1-GNU-4.9.3-2.25.eb
	deleted:    o/OpenMPI/OpenMPI-1.10.2-GCC-4.9.3-2.25.eb
	deleted:    o/OpenMPI/OpenMPI-1.10.2-GCC-5.3.0-2.26.eb
	deleted:    o/OpenMPI/OpenMPI-1.10.2-GCC-6.1.0-2.27.eb
	deleted:    o/OpenMPI/OpenMPI-1.10.2-PGI-16.3-GCC-4.9.3-2.25.eb
	deleted:    o/OpenMPI/OpenMPI-1.10.2-PGI-16.4-GCC-5.3.0-2.26.eb
	deleted:    o/OpenMPI/OpenMPI-1.10.3-GCC-4.9.3-2.25.eb
	deleted:    o/OpenMPI/OpenMPI-1.10.3-GCC-6.1.0-2.27.eb
	deleted:    o/OpenMPI/OpenMPI-1.10.4-PGI-16.7-GCC-5.4.0-2.26.eb
	deleted:    o/OpenMPI/OpenMPI-1.10.7-GCC-4.9.3-2.25.eb
	deleted:    o/OpenMPI/OpenMPI-1.10.7-GCC-6.3.0-2.27-noPBS.eb
	deleted:    o/OpenMPI/OpenMPI-1.10.7-GCC-6.3.0-2.27-uv.eb
	deleted:    o/OpenMPI/OpenMPI-1.10.7-GCC-6.3.0-2.27.eb
	deleted:    o/OpenMPI/OpenMPI-1.10.7-GCC-7.1.0-2.28.eb
	deleted:    o/OpenMPI/OpenMPI-1.10.7-PGI-18.5-GCC-6.3.0-2.27.eb
	deleted:    o/OpenMPI/OpenMPI-1.6.5-GCC-4.8.3.eb
	deleted:    o/OpenMPI/OpenMPI-1.8.6-GCC-4.4.7-system.eb
	deleted:    o/OpenMPI/OpenMPI-1.8.6-GNU-4.4.7-system.eb
	deleted:    o/OpenMPI/OpenMPI-1.8.6-GNU-4.9.3-2.25.eb
	deleted:    o/OpenMPI/OpenMPI-1.8.6-GNU-5.1.0-2.25.eb
	deleted:    o/OpenMPI/OpenMPI-1.8.6-iccifort-2015.3.187-GNU-5.1.0-2.25.eb
	deleted:    o/OpenMPI/OpenMPI-1.8.8-GNU-4.9.3-2.25.eb
	deleted:    o/OpenMPI/OpenMPI-1.8.8-GNU-5.1.0-2.25.eb
	deleted:    o/OpenMPI/OpenMPI-1.8.8-iccifort-2015.3.187-GNU-4.9.3-2.25.eb
	deleted:    o/OpenMPI/OpenMPI-2.0.0-GCC-5.2.0.eb
	deleted:    o/OpenMPI/OpenMPI-2.0.1-GCC-6.2.0-2.27.eb
	deleted:    o/OpenMPI/OpenMPI-2.0.1-gcccuda-2016.10.eb
	deleted:    o/OpenMPI/OpenMPI-2.0.1-iccifort-2017.1.132-GCC-5.4.0-2.26.eb
	deleted:    o/OpenMPI/OpenMPI-2.0.1-iccifort-2017.1.132-GCC-6.3.0-2.27.eb
	deleted:    o/OpenMPI/OpenMPI-2.0.1.eb
	deleted:    o/OpenMPI/OpenMPI-2.0.2-GCC-6.3.0-2.27.eb
	deleted:    o/OpenMPI/OpenMPI-2.1.0-GCC-4.9.3-2.25.eb
	deleted:    o/OpenMPI/OpenMPI-2.1.0-GCC-6.3.0-2.27.eb
	deleted:    o/OpenMPI/OpenMPI-2.1.1-GCC-6.3.0-2.27-uv.eb
	deleted:    o/OpenMPI/OpenMPI-2.1.1-GCC-6.3.0-2.27.eb
	deleted:    o/OpenMPI/OpenMPI-2.1.1-GCC-7.1.0-2.28.eb
	deleted:    o/OpenMPI/OpenMPI-2.1.1-GCC-7.3.0-2.30.eb
	deleted:    o/OpenMPI/OpenMPI-2.1.1-GCC-8.1.0-2.30.eb
	deleted:    o/OpenMPI/OpenMPI-2.1.1-c7.eb
	deleted:    o/OpenMPI/OpenMPI-2.1.5-GCC-6.3.0-2.27-noPBS.eb
	deleted:    o/OpenMPI/OpenMPI-2.1.5-GCC-6.3.0-2.27.eb
	deleted:    o/OpenMPI/OpenMPI-2.1.5-GCC-8.3.0-2.32.eb
	deleted:    o/OpenMPI/OpenMPI-2.1.5-GCC-9.1.0-2.32.eb
	deleted:    o/OpenMPI/OpenMPI-3.0.0-GCC-6.3.0-2.27-uv.eb
	deleted:    o/OpenMPI/OpenMPI-3.0.0-GCC-6.3.0-2.27.eb
	deleted:    o/OpenMPI/OpenMPI-3.0.0-GCC-7.2.0-2.29.eb
	deleted:    o/OpenMPI/OpenMPI-3.0.1-GCC-6.3.0-2.27-nvidia-RHEL6.eb
	deleted:    o/OpenMPI/OpenMPI-3.0.1-GCC-6.3.0-2.27-nvidia.eb
	deleted:    o/OpenMPI/OpenMPI-3.1.3-GCC-8.2.0-2.31.1.eb
	deleted:    o/OpenMPI/OpenMPI-3.1.3-GCCcore-8.3.0.eb
	deleted:    o/OpenMPI/OpenMPI-3.1.3-PGI-19.4-GCC-8.2.0-2.31.1.eb
	deleted:    o/OpenMPI/OpenMPI-3.1.4-GCC-6.3.0-2.27-noPBS.eb
	deleted:    o/OpenMPI/OpenMPI-3.1.4-GCC-6.3.0-2.27-nvidia.eb
	deleted:    o/OpenMPI/OpenMPI-3.1.4-GCC-6.3.0-2.27.eb
	deleted:    o/OpenMPI/OpenMPI-3.1.4-GCC-8.3.0-2.32.eb
	deleted:    o/OpenMPI/OpenMPI-3.1.4-PGI-19.7-GCC-8.2.0-2.31.1.eb
	deleted:    o/OpenMPI/OpenMPI-3.1.5-GCCcore-8.3.0.eb
	deleted:    o/OpenMPI/OpenMPI-3.1.5.eb
	deleted:    o/OpenMPI/OpenMPI-3.1.6-GCCcore-8.3.0-noPBS.eb
	deleted:    o/OpenMPI/OpenMPI-4.0.0-GCC-6.3.0-2.27-noPBS.eb
	deleted:    o/OpenMPI/OpenMPI-4.0.0-GCC-6.3.0-2.27.eb
	deleted:    o/OpenMPI/OpenMPI-4.0.2-GCC-8.3.0-2.32.eb
	deleted:    o/OpenMPI/OpenMPI-4.0.3-GCC-9.3.0.eb
	deleted:    o/OpenMPI/OpenMPI-4.0.4-GCC-8.3.0-2.32-CUDA.eb
	deleted:    o/OpenMPI/OpenMPI-4.0.4-GCC-9.3.0-without-verbs.eb
	deleted:    o/OpenMPI/OpenMPI-4.0.4-GCC-9.3.0.eb
	deleted:    o/OpenMPI/OpenMPI-4.0.4-GCCcore-8.3.0.eb
This commit is contained in:
Lukas Krupcik 2023-08-17 14:22:00 +02:00
parent 721af089f6
commit 8ce22eb486
127 changed files with 956 additions and 90 deletions

View File

@ -0,0 +1,50 @@
# IT4Inoovations
# LK 2023
easyblock = 'ConfigureMake'
name = 'BLIS'
version = '3.1'
versionsuffix = '-amd'
homepage = 'https://developer.amd.com/amd-cpu-libraries/blas-library/'
description = """AMD's fork of BLIS. BLIS is a portable software framework for instantiating high-performance
BLAS-like dense linear algebra libraries."""
toolchain = {'name': 'GCCcore', 'version': '12.2.0'}
source_urls = ['https://github.com/amd/blis/archive/']
sources = ['%(version)s.tar.gz']
patches = [
'%(name)s-0.8.1_enable_ppc_autodetect.patch',
'%(name)s-0.8.1_fix_dgemm-fpe-signalling-on-broadwell.patch',
]
checksums = [
'2891948925b9db99eec02a1917d9887a7bee9ad2afc5421c9ba58602a620f2bf', # 3.1.tar.gz
# BLIS-0.8.1_enable_ppc_autodetect.patch
'b8a3d564a8d4f205e70241765ddfd28331c3c12355ef9c44172c9a0cab9f0111',
# BLIS-0.8.1_fix_dgemm-fpe-signalling-on-broadwell.patch
'345fa39933e9d1442d2eb1e4ed9129df3fe4aefecf4d104e5d4f25b3bca24d0d',
]
builddependencies = [
('binutils', '2.39'),
('Python', '3.10.8'),
('Perl', '5.36.0'),
]
# Build Serial and multithreaded library
configopts = ['--enable-cblas --enable-shared CC="$CC" auto',
'--enable-cblas --enable-threading=openmp --enable-shared CC="$CC" auto']
runtest = 'check'
sanity_check_paths = {
'files': ['include/blis/cblas.h', 'include/blis/blis.h',
'lib/libblis.a', 'lib/libblis.%s' % SHLIB_EXT,
'lib/libblis-mt.a', 'lib/libblis-mt.%s' % SHLIB_EXT],
'dirs': [],
}
modextrapaths = {'CPATH': 'include/blis'}
moduleclass = 'numlib'

View File

@ -0,0 +1,26 @@
# IT4Innovations
# LK 2023
name = 'FFTW.MPI'
version = '3.3.10'
versionsuffix = '-CUDA-12.2.0'
homepage = 'https://www.fftw.org'
description = """FFTW is a C subroutine library for computing the discrete Fourier transform (DFT)
in one or more dimensions, of arbitrary input size, and of both real and complex data."""
toolchain = {'name': 'NVHPC', 'version': '23.5'}
toolchainopts = {'pic': True}
source_urls = [homepage]
sources = ['fftw-%(version)s.tar.gz']
checksums = ['56c932549852cddcfafdab3820b0200c7742675be92179e59e6215b340e26467']
dependencies = [
('FFTW', '3.3.10', '-CUDA-12.2.0'),
('OpenMPI', '4.1.5', '-CUDA-12.2.0'),
]
runtest = 'check'
moduleclass = 'numlib'

View File

@ -3,6 +3,7 @@
name = 'FFTW'
version = '3.3.10'
versionsuffix = '-CUDA-12.2.0'
homepage = 'https://www.fftw.org'
description = """FFTW is a C subroutine library for computing the discrete Fourier transform (DFT)

View File

@ -0,0 +1,27 @@
# IT4Innovations
# LK 2023
name = 'HDF5'
version = '1.14.0'
homepage = 'https://portal.hdfgroup.org/display/support'
description = """HDF5 is a data model, library, and file format for storing and managing data.
It supports an unlimited variety of datatypes, and is designed for flexible
and efficient I/O and for high volume and complex data."""
toolchain = {'name': 'NVHPC', 'version': '23.5'}
toolchainopts = {'pic': True}
source_urls = ['https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major_minor)s/hdf5-%(version)s/src']
sources = [SOURCELOWER_TAR_GZ]
checksums = ['a571cc83efda62e1a51a0a912dd916d01895801c5025af91669484a1575a6ef4']
configopts = '--enable-fortran --enable-fortran 2003 --enable-cxx --enable-parallel --enable-unsupported --enable-shared'
dependencies = [
('OpenMPI', '4.1.5', '-CUDA-12.2.0'),
('zlib', '1.2.12'),
('Szip', '2.1.1'),
]
moduleclass = 'data'

View File

@ -0,0 +1,56 @@
# IT4Innovations
# LK 2023
easyblock = 'ConfigureMake'
name = 'libFLAME'
version = '5.2.0'
homepage = 'https://developer.amd.com/amd-cpu-libraries/blas-library/#libflame'
description = """libFLAME is a portable library for dense matrix computations,
providing much of the functionality present in LAPACK."""
toolchain = {'name': 'GCCcore', 'version': '12.2.0'}
toolchainopts = {'pic': True}
source_urls = ['https://github.com/flame/libflame/archive/']
sources = ['%(version)s.tar.gz']
checksums = ['997c860f351a5c7aaed8deec00f502167599288fd0559c92d5bfd77d0b4d475c']
# '--enable-max-arg-list-hack --enable-dynamic-build' requires 'file' function from GNU Make 4.x
builddependencies = [
('binutils', '2.39'),
('Python', '3.10.8'),
('make', '4.4.1'), # needed on Cent OS 7 where make 3 is installed
]
dependencies = [('BLIS', '3.1', '-amd')]
# Use unset FLIBS to let configure pick up LDFLAGS
preconfigopts = 'unset FLIBS && '
preconfigopts += 'LIBS="-lblis $LIBS" '
preconfigopts += 'LDFLAGS="$LDFLAGS -L$EBROOTBLIS/lib -fopenmp -lm -lpthread" '
preconfigopts += 'CFLAGS="$CFLAGS -I$EBROOTBLIS/include/blis" '
configopts = '--enable-max-arg-list-hack '
configopts += '--enable-lapack2flame '
configopts += '--enable-external-lapack-interfaces '
configopts += '--enable-cblas-interfaces '
configopts += '--enable-dynamic-build '
configopts += '--enable-multithreading=openmp '
# libFLAME C++ Template API tests
# runtest = 'checkcpp LIBBLAS=$EBROOTBLIS/lib/libblis.a'
# sanity_check_commands = [
# 'cd %(builddir)s/%(namelower)s-%(version)s/test '
# '&& make LIBBLAS=$EBROOTBLIS/lib/libblis-mt.so LDFLAGS="-fopenmp -lm -lpthread" '
# '&& ./test_libfame.x'
# ]
sanity_check_paths = {
'files': ['include/FLAME.h', 'lib/libflame.a', 'lib/libflame.%s' % SHLIB_EXT],
'dirs': ['lib'],
}
moduleclass = 'numlib'

View File

@ -0,0 +1,26 @@
# IT4Innovations
# LK 2023
name = 'NCCL'
version = '2.12.12'
versionsuffix = '-CUDA-%(cudaver)s'
homepage = 'https://developer.nvidia.com/nccl'
description = """The NVIDIA Collective Communications Library (NCCL) implements multi-GPU and multi-node collective
communication primitives that are performance optimized for NVIDIA GPUs."""
toolchain = SYSTEM #{'name': 'GCCcore', 'version': '12.2.0'}
github_account = 'NVIDIA'
source_urls = [GITHUB_SOURCE]
sources = ['v%(version)s-1.tar.gz']
checksums = ['49b4fbfeebf1f62f6ceb69e72504045d8d1b4e7609e3c2477906f3004c7e2d82']
#builddependencies = [('binutils', '2.39')]
dependencies = [
('CUDA', '12.2.0', '', SYSTEM),
# ('UCX-CUDA', '1.14.1', versionsuffix),
]
moduleclass = 'lib'

View File

@ -0,0 +1,26 @@
name = 'NCCL'
version = '2.16.2'
versionsuffix = '-CUDA-%(cudaver)s'
homepage = 'https://developer.nvidia.com/nccl'
description = """The NVIDIA Collective Communications Library (NCCL) implements multi-GPU and multi-node collective
communication primitives that are performance optimized for NVIDIA GPUs."""
toolchain = {'name': 'GCCcore', 'version': '12.2.0'}
github_account = 'NVIDIA'
source_urls = [GITHUB_SOURCE]
sources = ['v%(version)s-1.tar.gz']
checksums = ['7f7c738511a8876403fc574d13d48e7c250d934d755598d82e14bab12236fc64']
builddependencies = [('binutils', '2.39')]
dependencies = [
('CUDA', '12.0.0', '', SYSTEM),
('UCX-CUDA', '1.13.1', versionsuffix),
]
# default CUDA compute capabilities to use (override via --cuda-compute-capabilities)
cuda_compute_capabilities = ['5.0', '6.0', '7.0', '7.5', '8.0', '8.6', '9.0']
moduleclass = 'lib'

View File

@ -24,6 +24,6 @@ dependencies = [
]
# default CUDA compute capabilities to use (override via --cuda-compute-capabilities)
cuda_compute_capabilities = ['5.0', '6.0', '7.0', '7.5', '8.0', '8.6', '9.0']
#cuda_compute_capabilities = ['5.0', '6.0', '7.0', '7.5', '8.0', '8.6', '9.0']
moduleclass = 'lib'

View File

@ -0,0 +1,27 @@
# IT4Innovations
# LK 2023
name = 'NCCL'
version = '2.18.3'
versionsuffix = '-CUDA-%(cudaver)s'
homepage = 'https://developer.nvidia.com/nccl'
description = """The NVIDIA Collective Communications Library (NCCL) implements multi-GPU and multi-node collective
communication primitives that are performance optimized for NVIDIA GPUs."""
toolchain = SYSTEM
github_account = 'NVIDIA'
source_urls = [GITHUB_SOURCE]
sources = ['v%(version)s-1.tar.gz']
checksums = ['b4f5d7d9eea2c12e32e7a06fe138b2cfc75969c6d5c473aa6f819a792db2fc96']
dependencies = [
('CUDA', '12.2.0', '', SYSTEM),
# ('UCX-CUDA', '1.14.1', versionsuffix, ('GCCcore', '12.2.0')),
]
# default CUDA compute capabilities to use (override via --cuda-compute-capabilities)
cuda_compute_capabilities = ['8.0']
moduleclass = 'lib'

View File

@ -0,0 +1,29 @@
# IT4Innovations
# LK 2023
name = 'NCCL'
version = '2.18.3'
versionsuffix = '-CUDA-%(cudaver)s'
homepage = 'https://developer.nvidia.com/nccl'
description = """The NVIDIA Collective Communications Library (NCCL) implements multi-GPU and multi-node collective
communication primitives that are performance optimized for NVIDIA GPUs."""
toolchain = {'name': 'GCCcore', 'version': '12.2.0'}
github_account = 'NVIDIA'
source_urls = [GITHUB_SOURCE]
sources = ['v%(version)s-1.tar.gz']
checksums = ['b4f5d7d9eea2c12e32e7a06fe138b2cfc75969c6d5c473aa6f819a792db2fc96']
builddependencies = [('binutils', '2.39')]
dependencies = [
('CUDA', '12.2.0', '', SYSTEM),
('UCX-CUDA', '1.14.1', versionsuffix),
]
# default CUDA compute capabilities to use (override via --cuda-compute-capabilities)
cuda_compute_capabilities = ['8.0']
moduleclass = 'lib'

View File

@ -50,9 +50,22 @@ sanity_check_paths = {
x for x in [
"mpi-ext", "mpif-config", "mpif", "mpi", "mpi_portable_platform"]], 'dirs': [], }
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
import os
if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
'OMPI_MCA_orte_base_help_aggregate': '0',
}
'SLURM_MPI_TYPE': 'pmix_v4',
}
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
else:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
'SLURM_MPI_TYPE': 'pmix_v4',
}
moduleclass = 'mpi'

View File

@ -45,9 +45,22 @@ sanity_check_paths = {
x for x in [
"mpi-ext", "mpif-config", "mpif", "mpi", "mpi_portable_platform"]], 'dirs': [], }
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
import os
if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
'OMPI_MCA_orte_base_help_aggregate': '0',
'OMPI_MCA_btl_openib_warn_no_device_params_found': '0',
}
'SLURM_MPI_TYPE': 'pmix_v4',
}
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
else:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
'SLURM_MPI_TYPE': 'pmix_v4',
}
moduleclass = 'mpi'

View File

@ -43,10 +43,17 @@ if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
else:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
'SLURM_MPI_TYPE': 'pmix_v4',
}
moduleclass = 'mpi'

View File

@ -55,10 +55,17 @@ if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
else:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
'SLURM_MPI_TYPE': 'pmix_v4',
}
moduleclass = 'mpi'

View File

@ -59,10 +59,17 @@ if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
else:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
'SLURM_MPI_TYPE': 'pmix_v4',
}
moduleclass = 'mpi'

View File

@ -20,9 +20,6 @@ dependencies = [
('hwloc', '2.2.0'),
('zlib', '1.2.11'),
('UCX', '1.9.0',),
# ('libfabric', '1.11.0'),
# ('PMIx', '3.1.5'),
# ('libevent', '2.1.12'),
]
preconfigopts = './autogen.pl && '
@ -58,10 +55,17 @@ if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
else:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
'SLURM_MPI_TYPE': 'pmix_v4',
}
moduleclass = 'mpi'

View File

@ -49,4 +49,22 @@ sanity_check_paths = {
x for x in [
"mpi-ext", "mpif-config", "mpif", "mpi", "mpi_portable_platform"]], 'dirs': [], }
import os
if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
else:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
'SLURM_MPI_TYPE': 'pmix_v4',
}
moduleclass = 'mpi'

View File

@ -48,4 +48,22 @@ sanity_check_paths = {
x for x in [
"mpi-ext", "mpif-config", "mpif", "mpi", "mpi_portable_platform"]], 'dirs': [], }
import os
if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
else:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
'SLURM_MPI_TYPE': 'pmix_v4',
}
moduleclass = 'mpi'

View File

@ -29,4 +29,22 @@ configopts += '--enable-mpi-cxx ' # Enable building the C++ MPI bindings
configopts += '--with-ucx=$EBROOTUCX '
configopts += '--with-cuda=$EBROOTCUDACORE '
import os
if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
else:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
'SLURM_MPI_TYPE': 'pmix_v4',
}
moduleclass = 'mpi'

View File

@ -29,4 +29,22 @@ configopts += '--enable-mpi-cxx ' # Enable building the C++ MPI bindings
configopts += '--with-ucx=$EBROOTUCX '
configopts += '--with-cuda=$EBROOTCUDACORE '
import os
if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
else:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
'SLURM_MPI_TYPE': 'pmix_v4',
}
moduleclass = 'mpi'

View File

@ -29,4 +29,22 @@ configopts += '--enable-mpi-cxx ' # Enable building the C++ MPI bindings
configopts += '--with-ucx=$EBROOTUCX '
configopts += '--with-cuda=$EBROOTCUDACORE '
import os
if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
else:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
'SLURM_MPI_TYPE': 'pmix_v4',
}
moduleclass = 'mpi'

View File

@ -30,4 +30,22 @@ configopts += '--enable-mpi-cxx ' # Enable building the C++ MPI bindings
configopts += '--with-ucx=$EBROOTUCX '
configopts += '--with-cuda=$EBROOTCUDACORE '
import os
if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
else:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
'SLURM_MPI_TYPE': 'pmix_v4',
}
moduleclass = 'mpi'

View File

@ -28,5 +28,22 @@ configopts += '--enable-mpi-cxx ' # Enable building the C++ MPI bindings
#onfigopts += '--with-ucx=$EBROOTUCX '
configopts += '--with-cuda=$EBROOTNVHPC/Linux_x86_64/21.2/cuda/11.2 '
import os
if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
else:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
'SLURM_MPI_TYPE': 'pmix_v4',
}
moduleclass = 'mpi'

View File

@ -27,5 +27,22 @@ configopts += '--enable-mpi-cxx ' # Enable building the C++ MPI bindings
configopts += '--with-ucx=$EBROOTUCX '
configopts += '--with-cuda=$EBROOTNVHPC/Linux_x86_64/21.2/cuda/11.2 '
import os
if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
else:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
'SLURM_MPI_TYPE': 'pmix_v4',
}
moduleclass = 'mpi'

View File

@ -40,4 +40,22 @@ configopts += '--enable-mpi-cxx ' # Enable building the C++ MPI bindings
# to enable SLURM integration (site-specific)
# configopts += '--with-slurm --with-pmi=/usr/include/slurm --with-pmi-libdir=/usr'
import os
if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
else:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
'SLURM_MPI_TYPE': 'pmix_v4',
}
moduleclass = 'mpi'

View File

@ -83,4 +83,22 @@ else:
# to enable SLURM integration (site-specific)
# configopts += '--with-slurm --with-pmi=/usr/include/slurm --with-pmi-libdir=/usr'
import os
if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
else:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
'SLURM_MPI_TYPE': 'pmix_v4',
}
moduleclass = 'mpi'

View File

@ -29,16 +29,22 @@ configopts += '--enable-mpi-cxx ' # Enable building the C++ MPI bindings
configopts += '--with-ucx=$EBROOTUCX '
configopts += '--with-cuda=$EBROOTCUDACORE '
moduleclass = 'mpi'
#setenv("OMPI_MCA_btl_openib_if_include", "mlx5_0")
#setenv("export OMPI_MCA_btl_openib_allow_ib", "1")
#setenv("OMPI_MCA_pml", "^ucx")
#setenv("OMPI_MCA_orte_base_help_aggregate", "0")
import os
if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_btl_openib_allow_ib': '1',
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
'OMPI_MCA_orte_base_help_aggregate': '0',
'OMPI_MCA_pml': '^ucx',
'SLURM_MPI_TYPE': 'pmix_v4',
}
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
else:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
'SLURM_MPI_TYPE': 'pmix_v4',
}
moduleclass = 'mpi'

View File

@ -29,16 +29,22 @@ configopts += '--enable-mpi-cxx ' # Enable building the C++ MPI bindings
configopts += '--with-ucx=$EBROOTUCX '
configopts += '--with-cuda=$EBROOTCUDACORE '
moduleclass = 'mpi'
#setenv("OMPI_MCA_btl_openib_if_include", "mlx5_0")
#setenv("export OMPI_MCA_btl_openib_allow_ib", "1")
#setenv("OMPI_MCA_pml", "^ucx")
#setenv("OMPI_MCA_orte_base_help_aggregate", "0")
import os
if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_btl_openib_allow_ib': '1',
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
'OMPI_MCA_orte_base_help_aggregate': '0',
'OMPI_MCA_pml': '^ucx',
'SLURM_MPI_TYPE': 'pmix_v4',
}
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
else:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
'SLURM_MPI_TYPE': 'pmix_v4',
}
moduleclass = 'mpi'

View File

@ -29,4 +29,22 @@ configopts += '--enable-mpi-cxx ' # Enable building the C++ MPI bindings
configopts += '--with-ucx=$EBROOTUCX '
configopts += '--with-cuda=$EBROOTCUDACORE '
import os
if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
else:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
'SLURM_MPI_TYPE': 'pmix_v4',
}
moduleclass = 'mpi'

View File

@ -30,12 +30,22 @@ configopts += '--enable-mpi-cxx ' # Enable building the C++ MPI bindings
configopts += '--with-ucx=$EBROOTUCX '
configopts += '--with-cuda=$EBROOTCUDACORE '
moduleclass = 'mpi'
import os
if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_btl_openib_allow_ib': '1',
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
'OMPI_MCA_orte_base_help_aggregate': '0',
'OMPI_MCA_pml': '^ucx',
'SLURM_MPI_TYPE': 'pmix_v4',
}
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
else:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
'SLURM_MPI_TYPE': 'pmix_v4',
}
moduleclass = 'mpi'

View File

@ -29,16 +29,22 @@ configopts += '--enable-mpi-cxx ' # Enable building the C++ MPI bindings
configopts += '--with-ucx=$EBROOTUCX '
configopts += '--with-cuda=$EBROOTCUDACORE '
moduleclass = 'mpi'
#setenv("OMPI_MCA_btl_openib_if_include", "mlx5_0")
#setenv("export OMPI_MCA_btl_openib_allow_ib", "1")
#setenv("OMPI_MCA_pml", "^ucx")
#setenv("OMPI_MCA_orte_base_help_aggregate", "0")
import os
if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_btl_openib_allow_ib': '1',
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
'OMPI_MCA_orte_base_help_aggregate': '0',
'OMPI_MCA_pml': '^ucx',
'SLURM_MPI_TYPE': 'pmix_v4',
}
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
else:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
'SLURM_MPI_TYPE': 'pmix_v4',
}
moduleclass = 'mpi'

View File

@ -29,16 +29,22 @@ configopts += '--enable-mpi-cxx ' # Enable building the C++ MPI bindings
configopts += '--with-ucx=$EBROOTUCX '
configopts += '--with-cuda=$EBROOTCUDACORE '
moduleclass = 'mpi'
#setenv("OMPI_MCA_btl_openib_if_include", "mlx5_0")
#setenv("export OMPI_MCA_btl_openib_allow_ib", "1")
#setenv("OMPI_MCA_pml", "^ucx")
#setenv("OMPI_MCA_orte_base_help_aggregate", "0")
import os
if os.environ.get("CLUSTERNAME") in ["BARBORA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_btl_openib_allow_ib': '1',
'OMPI_MCA_btl_tcp_if_include': '10.33.4.0/24',
'OMPI_MCA_orte_base_help_aggregate': '0',
'OMPI_MCA_pml': '^ucx',
'SLURM_MPI_TYPE': 'pmix_v4',
}
elif os.environ.get("CLUSTERNAME") in ["KAROLINA"]:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx5_0',
'OMPI_MCA_orte_base_help_aggregate': '0',
'SLURM_MPI_TYPE': 'pmix_v4',
}
else:
modextravars = {'OMPI_MCA_btl_openib_if_include': 'mlx4_0',
'OMPI_MCA_oob_tcp_if_include': '10.0.0.0/8',
'SLURM_MPI_TYPE': 'pmix_v4',
}
moduleclass = 'mpi'

Some files were not shown because too many files have changed in this diff Show More