modified: b/Blender/Blender-3.5.1-GCC-10.3.0.eb

new file:   n/NVHPC/NVHPC-23.3.eb
	modified:   x/XALT/XALT-2.10.45-GPU-eb.eb
This commit is contained in:
Lukas Krupcik 2023-06-08 10:44:17 +02:00
parent 1b1e0bf05b
commit cd989bc200
3 changed files with 69 additions and 2 deletions

View File

@ -1,4 +1,5 @@
# IT4Innovations 2022
# IT4Innovations
# ML 2023
easyblock = 'Tarball'

66
n/NVHPC/NVHPC-23.3.eb Normal file
View File

@ -0,0 +1,66 @@
# IT4Innovations
# LK 2023
name = 'NVHPC'
version = '23.3'
homepage = 'https://developer.nvidia.com/hpc-sdk/'
description = """C, C++ and Fortran compilers included with the NVIDIA HPC SDK (previously: PGI)"""
toolchain = SYSTEM
# By downloading, you accept the HPC SDK Software License Agreement (https://docs.nvidia.com/hpc-sdk/eula/index.html)
accept_eula = True
source_urls = ['https://developer.download.nvidia.com/hpc-sdk/%(version)s/']
local_tarball_tmpl = 'nvhpc_2023_%%(version_major)s%%(version_minor)s_Linux_%s_cuda_multi.tar.gz'
sources = [local_tarball_tmpl % '%(arch)s']
checksums = ['b94d2b4ae5a23c1a0af7d5b07c785f5057850fe3a6ee5ba0aacdde1019af5d12']
local_gccver = '12.2.0'
dependencies = [
('GCCcore', local_gccver),
('binutils', '2.39', '', ('GCCcore', local_gccver)),
# This is necessary to avoid cases where just libnuma.so.1 is present in the system and -lnuma fails
('numactl', '2.0.16', '', ('GCCcore', local_gccver))
]
# specify default CUDA version that should be used by NVHPC
# should match one of the CUDA versions that are included with this NVHPC version
# (see install_components/Linux_x86_64/20.7/cuda/)
# for NVHPC 20.7, those are: 11.0, 10.2, 10.1;
# this version can be tweaked from the EasyBuild command line with
# --try-amend=default_cuda_version="10.2" (for example)
default_cuda_version = '12.0'
# NVHPC EasyBlock supports some features, which can be set via CLI or this easyconfig.
# The following list gives examples for the easyconfig
#
# NVHPC needs CUDA to work. Two options are available: 1) Use NVHPC-bundled CUDA, 2) use system CUDA
# 1) Bundled CUDA
# If no easybuild dependency to CUDA is present, the bundled CUDA is taken. A version needs to be specified with
# default_cuda_version = "11.0"
# in this easyconfig file; alternatively, it can be specified through the command line during installation with
# --try-amend=default_cuda_version="10.2"
# 2) CUDA provided via EasyBuild
# Use CUDAcore as a dependency, for example
# dependencies = [('CUDAcore', '11.0.2')]
# The parameter default_cuda_version still can be set as above.
# If not set, it will be deduced from the CUDA module (via $EBVERSIONCUDA)
#
# Define a NVHPC-default Compute Capability
cuda_compute_capabilities = "8.0"
# cuda_compute_capabilities = '7.0' #V100 GPU
# Can also be specified on the EasyBuild command line via --cuda-compute-capabilities=8.0
# Only single values supported, not lists of values!
#
# Options to add/remove things to/from environment module (defaults shown)
# module_byo_compilers = Yes # Remove compilers from PATH (Bring-your-own compilers)
# module_nvhpc_own_mpi = False # Add NVHPC's own pre-compiled OpenMPI
# module_add_math_libs = Yes # Add NVHPC's math libraries (which should be there from CUDA anyway)
# module_add_profilers = Yes # Add NVHPC's NVIDIA Profilers
# module_add_nccl = Yes # Add NVHPC's NCCL library
# module_add_nvshmem = Yes # Add NVHPC's NVSHMEM library
# module_add_cuda = Yes # Add NVHPC's bundled CUDA
# this bundle serves as a compiler-only toolchain, so it should be marked as compiler (important for HMNS)
moduleclass = 'compiler'

View File

@ -34,7 +34,7 @@ syshost = "env_var:CLUSTERNAME"
# file, or use 'eb --try-amend=transmission=<string>'. See
# https://xalt.readthedocs.io/en/latest/020_site_configuration.html
# for more information.
transmission = "file"
transmission = "syslog"
# Additional options
executable_tracking = True