From 5a6f06b0412785c5d1bc5c81a1bc1c0f4f2aa2eb Mon Sep 17 00:00:00 2001 From: Simeon Ehrig Date: Wed, 8 Jan 2025 13:49:34 +0100 Subject: [PATCH] fix lambda extension for nvcc --- script/job_generator/generate_job_yaml.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/script/job_generator/generate_job_yaml.py b/script/job_generator/generate_job_yaml.py index 189a2cb6a63..d3a9eb1ccca 100644 --- a/script/job_generator/generate_job_yaml.py +++ b/script/job_generator/generate_job_yaml.py @@ -345,15 +345,19 @@ def job_variables(job: Dict[str, Tuple[str, str]]) -> Dict[str, str]: variables["ALPAKA_CI_STDLIB"] = "libstdc++" variables["CMAKE_CUDA_ARCHITECTURES"] = job[SM_LEVEL][VERSION] variables["ALPAKA_CI_CUDA_VERSION"] = job[ALPAKA_ACC_GPU_CUDA_ENABLE][VERSION] - variables["alpaka_RELOCATABLE_DEVICE_CODE"] = OFF - variables["alpaka_CUDA_SHOW_REGISTER"] = OFF - variables["alpaka_CUDA_KEEP_FILES"] = OFF - variables["alpaka_CUDA_EXPT_EXTENDED_LAMBDA"] = OFF + variables["alpaka_RELOCATABLE_DEVICE_CODE"] = "OFF" + variables["alpaka_CUDA_SHOW_REGISTER"] = "OFF" + variables["alpaka_CUDA_KEEP_FILES"] = "OFF" + variables["alpaka_CUDA_EXPT_EXTENDED_LAMBDA"] = "OFF" if job[DEVICE_COMPILER][NAME] == NVCC: # general configuration, if nvcc is the CUDA compiler variables["ALPAKA_CI_CUDA_COMPILER"] = "nvcc" + # MdSpan requires alpaka_CUDA_EXPT_EXTENDED_LAMBDA + if job[MDSPAN][VERSION] == ON_VER: + variables["alpaka_CUDA_EXPT_EXTENDED_LAMBDA"] = "ON" + # configuration, if GCC is the CUDA host compiler if job[HOST_COMPILER][NAME] == GCC: variables["ALPAKA_CI_CXX"] = "g++"