From 9f8f365c4d07424a0230cac22e25a459dfaa2d04 Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Tue, 27 Aug 2024 12:42:54 -0400 Subject: [PATCH 01/29] update CI Docker container Update to CUDA 12.6, Ubuntu 24.04, and Clang 19. --- scripts/docker/Dockerfile.nvcc | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/scripts/docker/Dockerfile.nvcc b/scripts/docker/Dockerfile.nvcc index ee1a6baa062d..60027910083f 100644 --- a/scripts/docker/Dockerfile.nvcc +++ b/scripts/docker/Dockerfile.nvcc @@ -1,18 +1,17 @@ -FROM nvidia/cuda:11.6.1-devel-ubuntu20.04 +FROM nvidia/cuda:12.6.0-devel-ubuntu24.04 RUN apt-get clean && apt-get update -y && \ - DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends git python3-minimal libpython3-stdlib bc hwloc wget openssh-client python3-numpy python3-h5py python3-matplotlib python3-scipy python3-pip lcov curl cuda-nsight-systems-11-6 cmake ninja-build + DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends git python3-minimal libpython3-stdlib bc hwloc wget openssh-client python3-numpy python3-h5py python3-matplotlib python3-scipy python3-pip lcov curl cuda-nsight-systems-12-6 cmake ninja-build RUN pip3 install unyt RUN wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key| apt-key add - && \ - echo "deb http://apt.llvm.org/focal/ llvm-toolchain-focal-15 main" > /etc/apt/sources.list.d/llvm.list + echo "deb http://apt.llvm.org/noble/ llvm-toolchain-noble-19 main" > /etc/apt/sources.list.d/llvm.list RUN apt-get clean && apt-get update -y && \ - DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends clang-15 llvm-15 libomp-15-dev && \ + DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends clang-19 llvm-19 libomp-19-dev && \ rm -rf /var/lib/apt/lists/* - RUN cd /tmp && \ wget https://download.open-mpi.org/release/open-mpi/v4.1/openmpi-4.1.4.tar.bz2 && \ tar xjf openmpi-4.1.4.tar.bz2 && \ From 658130692e72447e241fd39f24bab7fc0fcb5b7d Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Tue, 27 Aug 2024 13:12:23 -0400 Subject: [PATCH 02/29] update OpenMPI --- scripts/docker/Dockerfile.nvcc | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/scripts/docker/Dockerfile.nvcc b/scripts/docker/Dockerfile.nvcc index 60027910083f..5f5cb0f56ed2 100644 --- a/scripts/docker/Dockerfile.nvcc +++ b/scripts/docker/Dockerfile.nvcc @@ -3,7 +3,7 @@ FROM nvidia/cuda:12.6.0-devel-ubuntu24.04 RUN apt-get clean && apt-get update -y && \ DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends git python3-minimal libpython3-stdlib bc hwloc wget openssh-client python3-numpy python3-h5py python3-matplotlib python3-scipy python3-pip lcov curl cuda-nsight-systems-12-6 cmake ninja-build -RUN pip3 install unyt +RUN pip3 install unyt --break-system-packages RUN wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key| apt-key add - && \ echo "deb http://apt.llvm.org/noble/ llvm-toolchain-noble-19 main" > /etc/apt/sources.list.d/llvm.list @@ -13,10 +13,10 @@ RUN apt-get clean && apt-get update -y && \ rm -rf /var/lib/apt/lists/* RUN cd /tmp && \ - wget https://download.open-mpi.org/release/open-mpi/v4.1/openmpi-4.1.4.tar.bz2 && \ - tar xjf openmpi-4.1.4.tar.bz2 && \ - cd openmpi-4.1.4 && \ - ./configure --prefix=/opt/openmpi --enable-mpi-cxx --with-cuda && \ + wget https://download.open-mpi.org/release/open-mpi/v5.0/openmpi-5.0.5.tar.bz2 && \ + tar xjf openmpi-5.0.5.tar.bz2 && \ + cd openmpi-5.0.5 && \ + ./configure --prefix=/opt/openmpi --with-cuda && \ make -j16 && \ make install && \ cd / && \ @@ -44,10 +44,8 @@ ENV PATH=/opt/cmake-3.23.2-linux-x86_64/bin:$PATH COPY build_ascent_cuda.sh /tmp/build-ascent/build_ascent_cuda.sh +## this fails when building on aarch64 macs :( RUN cd /tmp/build-ascent && \ bash build_ascent_cuda.sh && \ cd / && \ rm -rf /tmp/build-ascent - -# manually downgrade numpy as deprecated `typeDict` is still used by h5py -RUN pip install numpy==1.21 From 5add7b76388b3751448ba063c4148e0030426235 Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Tue, 27 Aug 2024 14:25:36 -0400 Subject: [PATCH 03/29] revert to OpenMPI 4.1.4 --- scripts/docker/Dockerfile.nvcc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/docker/Dockerfile.nvcc b/scripts/docker/Dockerfile.nvcc index 5f5cb0f56ed2..6515b28956a3 100644 --- a/scripts/docker/Dockerfile.nvcc +++ b/scripts/docker/Dockerfile.nvcc @@ -13,10 +13,10 @@ RUN apt-get clean && apt-get update -y && \ rm -rf /var/lib/apt/lists/* RUN cd /tmp && \ - wget https://download.open-mpi.org/release/open-mpi/v5.0/openmpi-5.0.5.tar.bz2 && \ - tar xjf openmpi-5.0.5.tar.bz2 && \ - cd openmpi-5.0.5 && \ - ./configure --prefix=/opt/openmpi --with-cuda && \ + wget https://download.open-mpi.org/release/open-mpi/v4.1/openmpi-4.1.4.tar.bz2 && \ + tar xjf openmpi-4.1.4.tar.bz2 && \ + cd openmpi-4.1.4 && \ + ./configure --prefix=/opt/openmpi --disable-mpi-fortran --disable-oshmem --with-cuda && \ make -j16 && \ make install && \ cd / && \ From d8b2fdc1c4f163343a14b7d3713383a3cd2b5b4d Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Tue, 27 Aug 2024 14:47:55 -0400 Subject: [PATCH 04/29] add ADIOS2+openPMD --- scripts/docker/Dockerfile.nvcc | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/scripts/docker/Dockerfile.nvcc b/scripts/docker/Dockerfile.nvcc index 6515b28956a3..d0a046041891 100644 --- a/scripts/docker/Dockerfile.nvcc +++ b/scripts/docker/Dockerfile.nvcc @@ -42,9 +42,18 @@ RUN curl -L https://github.com/Kitware/CMake/releases/download/v3.23.2/cmake-3.2 ENV PATH=/opt/cmake-3.23.2-linux-x86_64/bin:$PATH +RUN cd /tmp/build-adios2 && \ + git clone https://github.com/ornladios/ADIOS2.git ADIOS2 && \ + mkdir adios2-build && cd adios2-build && \ + cmake ../ADIOS2 -DADIOS2_USE_Blosc2=ON -DADIOS2_USE_Fortran=OFF && \ + make -j8 && make install && \ + cd / && + rm -rf /tmp/build-adios2 + +RUN openPMD_USE_MPI=ON pip3 install openpmd-api --no-binary openpmd-api --break-system-packages + COPY build_ascent_cuda.sh /tmp/build-ascent/build_ascent_cuda.sh -## this fails when building on aarch64 macs :( RUN cd /tmp/build-ascent && \ bash build_ascent_cuda.sh && \ cd / && \ From 8acc717c020c269690e735e9860f103ebf41af74 Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Tue, 27 Aug 2024 14:49:58 -0400 Subject: [PATCH 05/29] add c-blosc ubuntu package --- scripts/docker/Dockerfile.nvcc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/docker/Dockerfile.nvcc b/scripts/docker/Dockerfile.nvcc index d0a046041891..d06fce224347 100644 --- a/scripts/docker/Dockerfile.nvcc +++ b/scripts/docker/Dockerfile.nvcc @@ -1,7 +1,7 @@ FROM nvidia/cuda:12.6.0-devel-ubuntu24.04 RUN apt-get clean && apt-get update -y && \ - DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends git python3-minimal libpython3-stdlib bc hwloc wget openssh-client python3-numpy python3-h5py python3-matplotlib python3-scipy python3-pip lcov curl cuda-nsight-systems-12-6 cmake ninja-build + DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends git python3-minimal libpython3-stdlib bc hwloc wget openssh-client python3-numpy python3-h5py python3-matplotlib python3-scipy python3-pip lcov curl cuda-nsight-systems-12-6 cmake ninja-build c-blosc RUN pip3 install unyt --break-system-packages From 7a390596fe5a11e4c95f9c9d91ae3961f930c49e Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Tue, 27 Aug 2024 14:53:28 -0400 Subject: [PATCH 06/29] fix Dockerfile.nvcc --- scripts/docker/Dockerfile.nvcc | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/scripts/docker/Dockerfile.nvcc b/scripts/docker/Dockerfile.nvcc index d06fce224347..8bd5520b0775 100644 --- a/scripts/docker/Dockerfile.nvcc +++ b/scripts/docker/Dockerfile.nvcc @@ -1,10 +1,12 @@ FROM nvidia/cuda:12.6.0-devel-ubuntu24.04 RUN apt-get clean && apt-get update -y && \ - DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends git python3-minimal libpython3-stdlib bc hwloc wget openssh-client python3-numpy python3-h5py python3-matplotlib python3-scipy python3-pip lcov curl cuda-nsight-systems-12-6 cmake ninja-build c-blosc + DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends git python3-minimal libpython3-stdlib bc hwloc wget openssh-client python3-numpy python3-h5py python3-matplotlib python3-scipy python3-pip lcov curl cuda-nsight-systems-12-6 cmake ninja-build RUN pip3 install unyt --break-system-packages +RUN pip3 install blosc2 --break-system-packages + RUN wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key| apt-key add - && \ echo "deb http://apt.llvm.org/noble/ llvm-toolchain-noble-19 main" > /etc/apt/sources.list.d/llvm.list @@ -47,7 +49,7 @@ RUN cd /tmp/build-adios2 && \ mkdir adios2-build && cd adios2-build && \ cmake ../ADIOS2 -DADIOS2_USE_Blosc2=ON -DADIOS2_USE_Fortran=OFF && \ make -j8 && make install && \ - cd / && + cd / && \ rm -rf /tmp/build-adios2 RUN openPMD_USE_MPI=ON pip3 install openpmd-api --no-binary openpmd-api --break-system-packages From e6bcad2b87d9bc8b961109bc739dbf54237a97f8 Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Tue, 27 Aug 2024 15:03:11 -0400 Subject: [PATCH 07/29] fix dockerfile --- scripts/docker/Dockerfile.nvcc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/docker/Dockerfile.nvcc b/scripts/docker/Dockerfile.nvcc index 8bd5520b0775..f4aef9119224 100644 --- a/scripts/docker/Dockerfile.nvcc +++ b/scripts/docker/Dockerfile.nvcc @@ -44,7 +44,7 @@ RUN curl -L https://github.com/Kitware/CMake/releases/download/v3.23.2/cmake-3.2 ENV PATH=/opt/cmake-3.23.2-linux-x86_64/bin:$PATH -RUN cd /tmp/build-adios2 && \ +RUN mkdir /tmp/build-adios2 && cd /tmp/build-adios2 && \ git clone https://github.com/ornladios/ADIOS2.git ADIOS2 && \ mkdir adios2-build && cd adios2-build && \ cmake ../ADIOS2 -DADIOS2_USE_Blosc2=ON -DADIOS2_USE_Fortran=OFF && \ From cec0c64394172a036a235c006f315b022baf5c2c Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Tue, 27 Aug 2024 15:54:40 -0400 Subject: [PATCH 08/29] install python headers --- scripts/docker/Dockerfile.nvcc | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/scripts/docker/Dockerfile.nvcc b/scripts/docker/Dockerfile.nvcc index f4aef9119224..34d4eb9433c0 100644 --- a/scripts/docker/Dockerfile.nvcc +++ b/scripts/docker/Dockerfile.nvcc @@ -1,7 +1,7 @@ FROM nvidia/cuda:12.6.0-devel-ubuntu24.04 RUN apt-get clean && apt-get update -y && \ - DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends git python3-minimal libpython3-stdlib bc hwloc wget openssh-client python3-numpy python3-h5py python3-matplotlib python3-scipy python3-pip lcov curl cuda-nsight-systems-12-6 cmake ninja-build + DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends git python3-minimal libpython3-stdlib bc hwloc wget openssh-client python3-numpy python3-h5py python3-matplotlib python3-scipy python3-pip lcov curl cuda-nsight-systems-12-6 cmake ninja-build libpython3-dev RUN pip3 install unyt --break-system-packages @@ -37,8 +37,6 @@ RUN cd /tmp && \ cd / && \ rm -rf /tmp/hdf5-1.12.2* -RUN update-alternatives --install /usr/bin/python python /usr/bin/python3 10 - RUN curl -L https://github.com/Kitware/CMake/releases/download/v3.23.2/cmake-3.23.2-linux-x86_64.tar.gz -o cmake-3.23.2-linux-x86_64.tar.gz && \ tar -xzf cmake-3.23.2-linux-x86_64.tar.gz -C /opt From e36f2457c05314089fcb6bbd4f282c4bf89edd39 Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Tue, 27 Aug 2024 16:58:28 -0400 Subject: [PATCH 09/29] install cmake from apt (required for aarch64) --- scripts/docker/Dockerfile.nvcc | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/scripts/docker/Dockerfile.nvcc b/scripts/docker/Dockerfile.nvcc index 34d4eb9433c0..30d3ee85f98a 100644 --- a/scripts/docker/Dockerfile.nvcc +++ b/scripts/docker/Dockerfile.nvcc @@ -1,7 +1,7 @@ FROM nvidia/cuda:12.6.0-devel-ubuntu24.04 RUN apt-get clean && apt-get update -y && \ - DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends git python3-minimal libpython3-stdlib bc hwloc wget openssh-client python3-numpy python3-h5py python3-matplotlib python3-scipy python3-pip lcov curl cuda-nsight-systems-12-6 cmake ninja-build libpython3-dev + DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends git python3-minimal libpython3-stdlib bc hwloc wget openssh-client python3-numpy python3-h5py python3-matplotlib python3-scipy python3-pip lcov curl cuda-nsight-systems-12-6 cmake ninja-build libpython3-dev cmake RUN pip3 install unyt --break-system-packages @@ -37,11 +37,6 @@ RUN cd /tmp && \ cd / && \ rm -rf /tmp/hdf5-1.12.2* -RUN curl -L https://github.com/Kitware/CMake/releases/download/v3.23.2/cmake-3.23.2-linux-x86_64.tar.gz -o cmake-3.23.2-linux-x86_64.tar.gz && \ - tar -xzf cmake-3.23.2-linux-x86_64.tar.gz -C /opt - -ENV PATH=/opt/cmake-3.23.2-linux-x86_64/bin:$PATH - RUN mkdir /tmp/build-adios2 && cd /tmp/build-adios2 && \ git clone https://github.com/ornladios/ADIOS2.git ADIOS2 && \ mkdir adios2-build && cd adios2-build && \ From fb8e9575972996818c1f8278f10f862e8fc021e8 Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Tue, 27 Aug 2024 17:02:37 -0400 Subject: [PATCH 10/29] remove duplicate cmake dep --- scripts/docker/Dockerfile.nvcc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/docker/Dockerfile.nvcc b/scripts/docker/Dockerfile.nvcc index 30d3ee85f98a..fbc6a866598e 100644 --- a/scripts/docker/Dockerfile.nvcc +++ b/scripts/docker/Dockerfile.nvcc @@ -1,7 +1,7 @@ FROM nvidia/cuda:12.6.0-devel-ubuntu24.04 RUN apt-get clean && apt-get update -y && \ - DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends git python3-minimal libpython3-stdlib bc hwloc wget openssh-client python3-numpy python3-h5py python3-matplotlib python3-scipy python3-pip lcov curl cuda-nsight-systems-12-6 cmake ninja-build libpython3-dev cmake + DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends git python3-minimal libpython3-stdlib bc hwloc wget openssh-client python3-numpy python3-h5py python3-matplotlib python3-scipy python3-pip lcov curl cuda-nsight-systems-12-6 cmake ninja-build libpython3-dev RUN pip3 install unyt --break-system-packages From ebe3f9bc020073aa262045233b1c17ce04a2d759 Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Tue, 27 Aug 2024 17:12:31 -0400 Subject: [PATCH 11/29] disable ascent build --- scripts/docker/Dockerfile.nvcc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/docker/Dockerfile.nvcc b/scripts/docker/Dockerfile.nvcc index fbc6a866598e..96f06b1a57ff 100644 --- a/scripts/docker/Dockerfile.nvcc +++ b/scripts/docker/Dockerfile.nvcc @@ -49,7 +49,7 @@ RUN openPMD_USE_MPI=ON pip3 install openpmd-api --no-binary openpmd-api --break- COPY build_ascent_cuda.sh /tmp/build-ascent/build_ascent_cuda.sh -RUN cd /tmp/build-ascent && \ - bash build_ascent_cuda.sh && \ - cd / && \ - rm -rf /tmp/build-ascent +#RUN cd /tmp/build-ascent && \ +# bash build_ascent_cuda.sh && \ +# cd / && \ +# rm -rf /tmp/build-ascent From 18640dc66403717fc30c621c3b6266393df6f594 Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Tue, 27 Aug 2024 22:06:04 -0400 Subject: [PATCH 12/29] add newer ascent version --- scripts/docker/Dockerfile.nvcc | 10 +- .../docker/ascent_build/2023_01_30_raja.patch | 36 + ...2023_12_06_vtkm-mr3160-rocthrust-fix.patch | 74 ++ .../2024_05_03_vtkm-mr3215-ext-geom-fix.patch | 145 +++ ...4_07_02_vtkm-mr3246-raysubset_bugfix.patch | 43 + .../2024_07_25_silo_4_11_cmake_fix.patch | 23 + .../2024_07_29_silo-pr389-win32-bugfix.patch | 10 + .../2024_08_01_caliper-win-smaller-opts.patch | 1102 +++++++++++++++++ ...pr1311-detect-if-caliper-needs-adiak.patch | 147 +++ scripts/docker/ascent_build/build_ascent.sh | 982 +++++++++++++++ scripts/docker/build_ascent_cuda.sh | 438 ------- 11 files changed, 2567 insertions(+), 443 deletions(-) create mode 100644 scripts/docker/ascent_build/2023_01_30_raja.patch create mode 100644 scripts/docker/ascent_build/2023_12_06_vtkm-mr3160-rocthrust-fix.patch create mode 100644 scripts/docker/ascent_build/2024_05_03_vtkm-mr3215-ext-geom-fix.patch create mode 100644 scripts/docker/ascent_build/2024_07_02_vtkm-mr3246-raysubset_bugfix.patch create mode 100644 scripts/docker/ascent_build/2024_07_25_silo_4_11_cmake_fix.patch create mode 100644 scripts/docker/ascent_build/2024_07_29_silo-pr389-win32-bugfix.patch create mode 100644 scripts/docker/ascent_build/2024_08_01_caliper-win-smaller-opts.patch create mode 100644 scripts/docker/ascent_build/2024_08_01_conduit-pr1311-detect-if-caliper-needs-adiak.patch create mode 100644 scripts/docker/ascent_build/build_ascent.sh delete mode 100644 scripts/docker/build_ascent_cuda.sh diff --git a/scripts/docker/Dockerfile.nvcc b/scripts/docker/Dockerfile.nvcc index 96f06b1a57ff..a134a54266b9 100644 --- a/scripts/docker/Dockerfile.nvcc +++ b/scripts/docker/Dockerfile.nvcc @@ -47,9 +47,9 @@ RUN mkdir /tmp/build-adios2 && cd /tmp/build-adios2 && \ RUN openPMD_USE_MPI=ON pip3 install openpmd-api --no-binary openpmd-api --break-system-packages -COPY build_ascent_cuda.sh /tmp/build-ascent/build_ascent_cuda.sh +COPY ascent_build /tmp/ascent_build -#RUN cd /tmp/build-ascent && \ -# bash build_ascent_cuda.sh && \ -# cd / && \ -# rm -rf /tmp/build-ascent +RUN cd /tmp/ascent_build && \ + bash build_ascent.sh && \ + cd / && \ + rm -rf /tmp/ascent_build diff --git a/scripts/docker/ascent_build/2023_01_30_raja.patch b/scripts/docker/ascent_build/2023_01_30_raja.patch new file mode 100644 index 000000000000..7b1abcd6e0c2 --- /dev/null +++ b/scripts/docker/ascent_build/2023_01_30_raja.patch @@ -0,0 +1,36 @@ +From 9a50702cf835996f96cb33e2cb4c0aa1a7a86df5 Mon Sep 17 00:00:00 2001 +From: Cyrus Harrison +Date: Fri, 27 Jan 2023 15:49:35 -0800 +Subject: [PATCH] try new logic for windows shared exports + +--- + azure-pipelines.yml | 2 +- + include/RAJA/config.hpp.in | 2 +- + 2 files changed, 2 insertions(+), 2 deletions(-) + +diff --git a/azure-pipelines.yml b/azure-pipelines.yml +index c84a71eb18..268ba4a660 100644 +--- a/azure-pipelines.yml ++++ b/azure-pipelines.yml +@@ -3,7 +3,7 @@ jobs: + strategy: + matrix: + shared: +- SHARED_ARGS: '-DBUILD_SHARED_LIBS=On -DCMAKE_CXX_FLAGS="/DRAJASHAREDDLL_EXPORTS" ' ++ SHARED_ARGS: '-DBUILD_SHARED_LIBS=On' + static: + SHARED_ARGS: '-DBUILD_SHARED_LIBS=Off' + pool: +diff --git a/include/RAJA/config.hpp.in b/include/RAJA/config.hpp.in +index 26b0b0dbde..0347650fc4 100644 +--- a/include/RAJA/config.hpp.in ++++ b/include/RAJA/config.hpp.in +@@ -364,7 +364,7 @@ const int DATA_ALIGN = @RAJA_DATA_ALIGN@; + // + + #if (defined(_WIN32) || defined(_WIN64)) && !defined(RAJA_WIN_STATIC_BUILD) +-#ifdef RAJASHAREDDLL_EXPORTS ++#ifdef RAJA_EXPORTS + #define RAJASHAREDDLL_API __declspec(dllexport) + #else + #define RAJASHAREDDLL_API __declspec(dllimport) diff --git a/scripts/docker/ascent_build/2023_12_06_vtkm-mr3160-rocthrust-fix.patch b/scripts/docker/ascent_build/2023_12_06_vtkm-mr3160-rocthrust-fix.patch new file mode 100644 index 000000000000..174347033b3f --- /dev/null +++ b/scripts/docker/ascent_build/2023_12_06_vtkm-mr3160-rocthrust-fix.patch @@ -0,0 +1,74 @@ +From c9ec6ae6a62b9bd257e727e999987ef31384e3ac Mon Sep 17 00:00:00 2001 +From: Vicente Adolfo Bolea Sanchez +Date: Thu, 30 Nov 2023 15:55:32 -0500 +Subject: [PATCH] kokkos: let link vtkm_cont to roc::rocthrust + +Also reorder the declarion of the option VTKm_ENABLE_KOKKOS_THRUST +to be set before calling VTKmDeviceAdapters. +--- + CMake/VTKmDeviceAdapters.cmake | 5 +---- + CMakeLists.txt | 10 +++++----- + vtkm/cont/kokkos/internal/CMakeLists.txt | 3 +++ + 3 files changed, 9 insertions(+), 9 deletions(-) + +diff --git a/CMake/VTKmDeviceAdapters.cmake b/CMake/VTKmDeviceAdapters.cmake +index fb13d0bf85..7b8bf2df9b 100644 +--- a/CMake/VTKmDeviceAdapters.cmake ++++ b/CMake/VTKmDeviceAdapters.cmake +@@ -360,10 +360,7 @@ if(VTKm_ENABLE_KOKKOS AND NOT TARGET vtkm_kokkos) + + # Make sure rocthrust is available if requested + if(VTKm_ENABLE_KOKKOS_THRUST) +- find_package(rocthrust) +- if(NOT rocthrust_FOUND) +- message(FATAL_ERROR "rocthrust not found. Please set VTKm_ENABLE_KOKKOS_THRUST to OFF.") +- endif() ++ find_package(rocthrust REQUIRED CONFIG) + endif() + endif() + +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 39a9b3bc09..d8204114c7 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -191,6 +191,11 @@ vtkm_option(VTKm_OVERRIDE_CTEST_TIMEOUT "Disable default ctest timeout" OFF) + # VTKm_ENABLE_MPI=ON. + cmake_dependent_option(VTKm_ENABLE_GPU_MPI "Enable GPU AWARE MPI support" OFF "VTKm_ENABLE_MPI" OFF) + ++# By default: Set VTKm_ENABLE_KOKKOS_THRUST to ON if VTKm_ENABLE_KOKKOS is ON, otherwise ++# disable it (or if the user explicitly turns this option OFF) ++cmake_dependent_option(VTKm_ENABLE_KOKKOS_THRUST "Enable Kokkos thrust support (only valid with CUDA and HIP)" ++ ON "VTKm_ENABLE_KOKKOS;Kokkos_ENABLE_CUDA OR Kokkos_ENABLE_HIP" OFF) ++ + mark_as_advanced( + VTKm_ENABLE_LOGGING + VTKm_NO_ASSERT +@@ -232,11 +237,6 @@ include(VTKmBuildType) + # Include the vtk-m wrappers + include(VTKmWrappers) + +-# By default: Set VTKm_ENABLE_KOKKOS_THRUST to ON if VTKm_ENABLE_KOKKOS is ON, otherwise +-# disable it (or if the user explicitly turns this option OFF) +-cmake_dependent_option(VTKm_ENABLE_KOKKOS_THRUST "Enable Kokkos thrust support (only valid with CUDA and HIP)" +- ON "VTKm_ENABLE_KOKKOS;Kokkos_ENABLE_CUDA OR Kokkos_ENABLE_HIP" OFF) +- + # Create vtkm_compiler_flags library. This is an interface library that + # holds all the C++ compiler flags that are needed for consumers and + # when building VTK-m. +diff --git a/vtkm/cont/kokkos/internal/CMakeLists.txt b/vtkm/cont/kokkos/internal/CMakeLists.txt +index 9f924b0f4b..9b731c9fdd 100644 +--- a/vtkm/cont/kokkos/internal/CMakeLists.txt ++++ b/vtkm/cont/kokkos/internal/CMakeLists.txt +@@ -34,6 +34,9 @@ if (TARGET vtkm_kokkos) + elseif(TARGET vtkm_kokkos_hip) + set_source_files_properties(${sources} TARGET_DIRECTORY vtkm_cont PROPERTIES LANGUAGE HIP) + kokkos_compilation(SOURCE ${sources}) ++ if (VTKm_ENABLE_KOKKOS_THRUST) ++ target_link_libraries(vtkm_cont INTERFACE roc::rocthrust) ++ endif() + endif() + + else() +-- +2.35.3 + diff --git a/scripts/docker/ascent_build/2024_05_03_vtkm-mr3215-ext-geom-fix.patch b/scripts/docker/ascent_build/2024_05_03_vtkm-mr3215-ext-geom-fix.patch new file mode 100644 index 000000000000..ed82e5630e01 --- /dev/null +++ b/scripts/docker/ascent_build/2024_05_03_vtkm-mr3215-ext-geom-fix.patch @@ -0,0 +1,145 @@ +From 49518e5054c607942f644c82a5289e12b0f50476 Mon Sep 17 00:00:00 2001 +From: Kenneth Moreland +Date: Fri, 3 May 2024 09:22:56 -0400 +Subject: [PATCH] Fix bug with ExtractGeometry filter + +The `ExtractGeometry` filter was outputing datasets containing +`CellSetPermutation` as the representation for the cells. Although this is +technically correct and a very fast implementation, it is essentially +useless. The problem is that any downstream processing will have to know +that the data has a `CellSetPermutation`. None do (because the permutation +can be on any other cell set type, which creates an explosion of possible +cell types). + +Like was done with `Threshold` a while ago, this problem is fixed by deep +copying the result into a `CellSetExplicit`. This behavior is consistent +with VTK. +--- + .../changelog/extract-geometry-permutation.md | 13 +++++++ + .../testing/UnitTestExtractGeometryFilter.cxx | 13 ++++++- + .../worklet/ExtractGeometry.h | 34 +++++++------------ + 3 files changed, 37 insertions(+), 23 deletions(-) + create mode 100644 docs/changelog/extract-geometry-permutation.md + +diff --git a/docs/changelog/extract-geometry-permutation.md b/docs/changelog/extract-geometry-permutation.md +new file mode 100644 +index 0000000000..8a90495f76 +--- /dev/null ++++ b/docs/changelog/extract-geometry-permutation.md +@@ -0,0 +1,13 @@ ++# Fix bug with ExtractGeometry filter ++ ++The `ExtractGeometry` filter was outputing datasets containing ++`CellSetPermutation` as the representation for the cells. Although this is ++technically correct and a very fast implementation, it is essentially ++useless. The problem is that any downstream processing will have to know ++that the data has a `CellSetPermutation`. None do (because the permutation ++can be on any other cell set type, which creates an explosion of possible ++cell types). ++ ++Like was done with `Threshold` a while ago, this problem is fixed by deep ++copying the result into a `CellSetExplicit`. This behavior is consistent ++with VTK. +diff --git a/vtkm/filter/entity_extraction/testing/UnitTestExtractGeometryFilter.cxx b/vtkm/filter/entity_extraction/testing/UnitTestExtractGeometryFilter.cxx +index 675df8f77c..14de333666 100644 +--- a/vtkm/filter/entity_extraction/testing/UnitTestExtractGeometryFilter.cxx ++++ b/vtkm/filter/entity_extraction/testing/UnitTestExtractGeometryFilter.cxx +@@ -11,6 +11,7 @@ + #include + #include + ++#include + #include + + using vtkm::cont::testing::MakeTestDataSet; +@@ -41,11 +42,21 @@ public: + vtkm::cont::DataSet output = extractGeometry.Execute(dataset); + VTKM_TEST_ASSERT(test_equal(output.GetNumberOfCells(), 8), "Wrong result for ExtractGeometry"); + ++ vtkm::filter::clean_grid::CleanGrid cleanGrid; ++ cleanGrid.SetCompactPointFields(true); ++ cleanGrid.SetMergePoints(false); ++ vtkm::cont::DataSet cleanOutput = cleanGrid.Execute(output); ++ + vtkm::cont::ArrayHandle outCellData; +- output.GetField("cellvar").GetData().AsArrayHandle(outCellData); ++ cleanOutput.GetField("cellvar").GetData().AsArrayHandle(outCellData); + + VTKM_TEST_ASSERT(outCellData.ReadPortal().Get(0) == 21.f, "Wrong cell field data"); + VTKM_TEST_ASSERT(outCellData.ReadPortal().Get(7) == 42.f, "Wrong cell field data"); ++ ++ vtkm::cont::ArrayHandle outPointData; ++ cleanOutput.GetField("pointvar").GetData().AsArrayHandle(outPointData); ++ VTKM_TEST_ASSERT(outPointData.ReadPortal().Get(0) == 99); ++ VTKM_TEST_ASSERT(outPointData.ReadPortal().Get(7) == 90); + } + + static void TestUniformByBox1() +diff --git a/vtkm/filter/entity_extraction/worklet/ExtractGeometry.h b/vtkm/filter/entity_extraction/worklet/ExtractGeometry.h +index 97521335f2..449d7eae60 100644 +--- a/vtkm/filter/entity_extraction/worklet/ExtractGeometry.h ++++ b/vtkm/filter/entity_extraction/worklet/ExtractGeometry.h +@@ -10,11 +10,13 @@ + #ifndef vtkm_m_worklet_ExtractGeometry_h + #define vtkm_m_worklet_ExtractGeometry_h + ++#include + #include + + #include + #include + #include ++#include + #include + #include + #include +@@ -114,28 +116,13 @@ public: + } + }; + +- //////////////////////////////////////////////////////////////////////////////////// +- // Extract cells by ids permutes input data +- template +- vtkm::cont::CellSetPermutation Run(const CellSetType& cellSet, +- const vtkm::cont::ArrayHandle& cellIds) +- { +- using OutputType = vtkm::cont::CellSetPermutation; +- +- vtkm::cont::ArrayCopy(cellIds, this->ValidCellIds); +- +- return OutputType(this->ValidCellIds, cellSet); +- } +- +- //////////////////////////////////////////////////////////////////////////////////// +- // Extract cells by implicit function permutes input data + template +- vtkm::cont::CellSetPermutation Run(const CellSetType& cellSet, +- const vtkm::cont::CoordinateSystem& coordinates, +- const ImplicitFunction& implicitFunction, +- bool extractInside, +- bool extractBoundaryCells, +- bool extractOnlyBoundaryCells) ++ vtkm::cont::CellSetExplicit<> Run(const CellSetType& cellSet, ++ const vtkm::cont::CoordinateSystem& coordinates, ++ const ImplicitFunction& implicitFunction, ++ bool extractInside, ++ bool extractBoundaryCells, ++ bool extractOnlyBoundaryCells) + { + // Worklet output will be a boolean passFlag array + vtkm::cont::ArrayHandle passFlags; +@@ -149,7 +136,10 @@ public: + vtkm::cont::Algorithm::CopyIf(indices, passFlags, this->ValidCellIds); + + // generate the cellset +- return vtkm::cont::CellSetPermutation(this->ValidCellIds, cellSet); ++ vtkm::cont::CellSetPermutation permutedCellSet(this->ValidCellIds, cellSet); ++ ++ vtkm::cont::CellSetExplicit<> outputCells; ++ return vtkm::worklet::CellDeepCopy::Run(permutedCellSet); + } + + vtkm::cont::ArrayHandle GetValidCellIds() const { return this->ValidCellIds; } +-- +GitLab + diff --git a/scripts/docker/ascent_build/2024_07_02_vtkm-mr3246-raysubset_bugfix.patch b/scripts/docker/ascent_build/2024_07_02_vtkm-mr3246-raysubset_bugfix.patch new file mode 100644 index 000000000000..db4f075cd148 --- /dev/null +++ b/scripts/docker/ascent_build/2024_07_02_vtkm-mr3246-raysubset_bugfix.patch @@ -0,0 +1,43 @@ +From 763f13306b719bf6a213d00ead13fc93433e942e Mon Sep 17 00:00:00 2001 +From: Cyrus Harrison +Date: Tue, 2 Jul 2024 10:28:43 -0700 +Subject: [PATCH] fix bug with ray subsetting using wrong near and far planes + +--- + vtkm/rendering/raytracing/Camera.cxx | 12 ++++-------- + 1 file changed, 4 insertions(+), 8 deletions(-) + +diff --git a/vtkm/rendering/raytracing/Camera.cxx b/vtkm/rendering/raytracing/Camera.cxx +index f2a39bef9..10febf39f 100644 +--- a/vtkm/rendering/raytracing/Camera.cxx ++++ b/vtkm/rendering/raytracing/Camera.cxx +@@ -830,6 +830,7 @@ void Camera::FindSubset(const vtkm::Bounds& bounds) + transformed[2] = (transformed[2] * 0.5f + 0.5f); + zmin = vtkm::Min(zmin, transformed[2]); + zmax = vtkm::Max(zmax, transformed[2]); ++ // skip if outside near and far clipping + if (transformed[2] < 0 || transformed[2] > 1) + { + continue; +@@ -894,15 +895,10 @@ VTKM_CONT void Camera::UpdateDimensions(Ray& rays, + this->CameraView.SetLookAt(this->GetLookAt()); + this->CameraView.SetPosition(this->GetPosition()); + this->CameraView.SetViewUp(this->GetUp()); +- // +- // Just create come clipping range, we ignore the zmax value in subsetting +- // +- vtkm::Float64 maxDim = vtkm::Max( +- boundingBox.X.Max - boundingBox.X.Min, +- vtkm::Max(boundingBox.Y.Max - boundingBox.Y.Min, boundingBox.Z.Max - boundingBox.Z.Min)); + +- maxDim *= 100; +- this->CameraView.SetClippingRange(.0001, maxDim); ++ // Note: ++ // Use clipping range provided, the subsetting does take into consideration ++ // the near and far clipping planes. + + //Update our ViewProjection matrix + this->ViewProjectionMat = +-- +2.39.3 (Apple Git-145) + diff --git a/scripts/docker/ascent_build/2024_07_25_silo_4_11_cmake_fix.patch b/scripts/docker/ascent_build/2024_07_25_silo_4_11_cmake_fix.patch new file mode 100644 index 000000000000..6833fffa5080 --- /dev/null +++ b/scripts/docker/ascent_build/2024_07_25_silo_4_11_cmake_fix.patch @@ -0,0 +1,23 @@ +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 0dc4a5b..fd6baaf 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -57,7 +57,9 @@ cmake_minimum_required(VERSION 3.12 FATAL_ERROR) + ### + # grab the version string + ### +-file(STRINGS ${CMAKE_CURRENT_SOURCE_DIR}/VERSION SILO_VERSION) ++file(STRINGS ${CMAKE_CURRENT_SOURCE_DIR}/SILO_VERSION SILO_VERSION) ++# Strip suffix ++string(REGEX REPLACE "-.*" "" SILO_VERSION "${SILO_VERSION}") + + ###----------------------------------------------------------------------------- + # project command will automatically create cmake vars for major, minor, +@@ -139,7 +141,9 @@ CMAKE_DEPENDENT_OPTION(SILO_ENABLE_HZIP "Enable Lindstrom hex/quad mesh compress + ## + # Set up a default INSTALL prefix that is peer to the build directory + ## +-set(CMAKE_INSTALL_PREFIX ${Silo_BINARY_DIR}/../SiloInstall CACHE PATH "install prefix" FORCE) ++if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) ++ set(CMAKE_INSTALL_PREFIX ${Silo_BINARY_DIR}/../SiloInstall CACHE PATH "install prefix" FORCE) ++endif() diff --git a/scripts/docker/ascent_build/2024_07_29_silo-pr389-win32-bugfix.patch b/scripts/docker/ascent_build/2024_07_29_silo-pr389-win32-bugfix.patch new file mode 100644 index 000000000000..3d62a99781cc --- /dev/null +++ b/scripts/docker/ascent_build/2024_07_29_silo-pr389-win32-bugfix.patch @@ -0,0 +1,10 @@ +diff --git a/src/silo/silo_win32_compatibility.h b/src/silo/silo_win32_compatibility.h +index bc4d38f..00b970c 100644 +--- a/src/silo/silo_win32_compatibility.h ++++ b/src/silo/silo_win32_compatibility.h +@@ -1,4 +1,4 @@ +-#ifdef WIN32 ++#ifdef _WIN32 + #ifndef SILO_WIN32_COMPATIBILITY + #define SILO_WIN32_COMPATIBILITY + #include /* Include Windows IO * diff --git a/scripts/docker/ascent_build/2024_08_01_caliper-win-smaller-opts.patch b/scripts/docker/ascent_build/2024_08_01_caliper-win-smaller-opts.patch new file mode 100644 index 000000000000..2e47817d94ea --- /dev/null +++ b/scripts/docker/ascent_build/2024_08_01_caliper-win-smaller-opts.patch @@ -0,0 +1,1102 @@ +diff --git a/src/caliper/controllers/controllers.cpp b/src/caliper/controllers/controllers.cpp +index 787896c..4e2a44d 100644 +--- a/src/caliper/controllers/controllers.cpp ++++ b/src/caliper/controllers/controllers.cpp +@@ -223,947 +223,159 @@ const ConfigManager::ConfigInfo* builtin_controllers_table[] = { + nullptr + }; + ++/// NOTE: SMALLER SET OF BUILD OPTIONS FOR WINDOWS! ++/// Windows does not like long string literals, and ++/// caliper supports less services on windows. ++ + const char* builtin_option_specs = R"json( ++[ ++{ ++ "name" : "level", ++ "type" : "string", ++ "description" : "Minimum region level that triggers snapshots", ++ "category" : "event", ++ "config" : { "CALI_EVENT_REGION_LEVEL": "{}" } ++}, ++{ ++ "name" : "include_branches", ++ "type" : "string", ++ "description" : "Only take snapshots for branches with the given region names.", ++ "category" : "event", ++ "config" : { "CALI_EVENT_INCLUDE_BRANCHES": "{}" } ++}, ++{ ++ "name" : "include_regions", ++ "type" : "string", ++ "description" : "Only take snapshots for the given region names/patterns.", ++ "category" : "event", ++ "config" : { "CALI_EVENT_INCLUDE_REGIONS": "{}" } ++}, ++{ ++ "name" : "exclude_regions", ++ "type" : "string", ++ "description" : "Do not take snapshots for the given region names/patterns.", ++ "category" : "event", ++ "config" : { "CALI_EVENT_EXCLUDE_REGIONS": "{}" } ++}, ++{ ++ "name" : "region.count", ++ "description" : "Report number of begin/end region instances", ++ "type" : "bool", ++ "category" : "metric", ++ "query" : ++ [ ++ { "level" : "local", ++ "let" : [ "rc.count=first(sum#region.count,region.count)" ], ++ "select" : [ "sum(rc.count) as Calls unit count" ] ++ }, ++ { "level" : "cross", "select": + [ +- { +- "name" : "profile.mpi", +- "type" : "bool", +- "description" : "Profile MPI functions", +- "category" : "region", +- "services" : [ "mpi" ], +- "config": { "CALI_MPI_BLACKLIST": "MPI_Comm_rank,MPI_Comm_size,MPI_Wtick,MPI_Wtime" } +- }, +- { +- "name" : "profile.cuda", +- "type" : "bool", +- "description" : "Profile CUDA API functions", +- "category" : "region", +- "services" : [ "cupti" ] +- }, +- { +- "name" : "profile.hip", +- "type" : "bool", +- "description" : "Profile HIP API functions", +- "category" : "region", +- "services" : [ "roctracer" ], +- "config" : { "CALI_ROCTRACER_TRACE_ACTIVITIES": "false" } +- }, +- { +- "name" : "profile.kokkos", +- "type" : "bool", +- "description" : "Profile Kokkos functions", +- "category" : "region", +- "services" : [ "kokkostime" ] +- }, +- { +- "name" : "main_thread_only", +- "type" : "bool", +- "description" : "Only include measurements from the main thread in results.", +- "category" : "region", +- "services" : [ "pthread" ], +- "query" : +- [ +- { "level" : "local", +- "where" : "pthread.is_master=true" +- } +- ] +- }, +- { +- "name" : "level", +- "type" : "string", +- "description" : "Minimum region level that triggers snapshots", +- "category" : "event", +- "config" : { "CALI_EVENT_REGION_LEVEL": "{}" } +- }, +- { +- "name" : "include_branches", +- "type" : "string", +- "description" : "Only take snapshots for branches with the given region names.", +- "category" : "event", +- "config" : { "CALI_EVENT_INCLUDE_BRANCHES": "{}" } +- }, +- { +- "name" : "include_regions", +- "type" : "string", +- "description" : "Only take snapshots for the given region names/patterns.", +- "category" : "event", +- "config" : { "CALI_EVENT_INCLUDE_REGIONS": "{}" } +- }, +- { +- "name" : "exclude_regions", +- "type" : "string", +- "description" : "Do not take snapshots for the given region names/patterns.", +- "category" : "event", +- "config" : { "CALI_EVENT_EXCLUDE_REGIONS": "{}" } +- }, +- { +- "name" : "mpi.include", +- "type" : "string", +- "description" : "Only instrument these MPI functions.", +- "category" : "region", +- "config" : { "CALI_MPI_WHITELIST": "{}" } +- }, +- { +- "name" : "mpi.exclude", +- "type" : "string", +- "description" : "Do not instrument these MPI functions.", +- "category" : "region", +- "config" : { "CALI_MPI_BLACKLIST": "{}" } +- }, +- { +- "name" : "region.count", +- "description" : "Report number of begin/end region instances", +- "type" : "bool", +- "category" : "metric", +- "query" : +- [ +- { "level" : "local", +- "let" : [ "rc.count=first(sum#region.count,region.count)" ], +- "select" : [ "sum(rc.count) as Calls unit count" ] +- }, +- { "level" : "cross", "select": +- [ +- "min(sum#rc.count) as \"Calls/rank (min)\" unit count", +- "avg(sum#rc.count) as \"Calls/rank (avg)\" unit count", +- "max(sum#rc.count) as \"Calls/rank (max)\" unit count", +- "sum(sum#rc.count) as \"Calls/rank (total)\" unit count" +- ] +- } +- ] +- }, +- { +- "name" : "region.stats", +- "description" : "Detailed region timing statistics (min/max/avg time per visit)", +- "type" : "bool", +- "category" : "metric", +- "services" : [ "timer", "event" ], +- "config" : +- { +- "CALI_TIMER_INCLUSIVE_DURATION" : "true", +- "CALI_EVENT_ENABLE_SNAPSHOT_INFO" : "true" +- }, +- "query" : +- [ +- { "level" : "local", +- "let" : +- [ +- "rs.count=first(sum#region.count,region.count)", +- "rs.min=scale(min#time.inclusive.duration.ns,1e-9)", +- "rs.max=scale(max#time.inclusive.duration.ns,1e-9)", +- "rs.sum=scale(sum#time.inclusive.duration.ns,1e-9)" +- ], +- "aggregate": +- [ +- "sum(rs.sum)" +- ], +- "select" : +- [ +- "sum(rs.count) as Visits unit count", +- "min(rs.min) as \"Min time/visit\" unit sec", +- "ratio(rs.sum,rs.count) as \"Avg time/visit\" unit sec", +- "max(rs.max) as \"Max time/visit\" unit sec" +- ] +- }, +- { "level" : "cross", "select": +- [ +- "sum(sum#rs.count) as Visits unit count", +- "min(min#rs.min) as \"Min time/visit\" unit sec", +- "ratio(sum#rs.sum,sum#rs.count) as \"Avg time/visit\" unit sec", +- "max(max#rs.max) as \"Max time/visit\" unit sec" +- ] +- } +- ] +- }, +- { +- "name" : "node.order", +- "description" : "Report order in which regions first appeared", +- "type" : "bool", +- "category" : "metric", +- "query" : +- [ +- { "level" : "local", +- "select" : [ "min(aggregate.slot) as \"Node order\"" ] +- }, +- { "level" : "cross", +- "select" : [ "min(min#aggregate.slot) as \"Node order\"" ] +- } +- ] +- }, +- { +- "name" : "source.module", +- "type" : "bool", +- "category" : "sampling", +- "description" : "Report source module (.so/.exe)", +- "services" : [ "symbollookup" ], +- "config" : { "CALI_SYMBOLLOOKUP_LOOKUP_MODULE": "true" }, +- "query": +- [ +- { "level": "local", "group by": "module#cali.sampler.pc", +- "select": [ "module#cali.sampler.pc as \"Module\"" ] +- }, +- { "level": "cross", "group by": "module#cali.sampler.pc", +- "select": [ "module#cali.sampler.pc as \"Module\"" ] +- } +- ] +- }, +- { +- "name" : "source.function", +- "type" : "bool", +- "category" : "sampling", +- "description" : "Report source function symbol names", +- "services" : [ "symbollookup" ], +- "config" : { "CALI_SYMBOLLOOKUP_LOOKUP_FUNCTION": "true" }, +- "query": +- [ +- { "level": "local", "group by": "source.function#cali.sampler.pc", +- "select": [ "source.function#cali.sampler.pc as \"Function\"" ] +- }, +- { "level": "cross", "group by": "source.function#cali.sampler.pc", +- "select": [ "source.function#cali.sampler.pc as \"Function\"" ] +- } +- ] +- }, +- { +- "name" : "source.location", +- "type" : "bool", +- "category" : "sampling", +- "description" : "Report source location (file+line)", +- "services" : [ "symbollookup" ], +- "config" : { "CALI_SYMBOLLOOKUP_LOOKUP_SOURCELOC": "true" }, +- "query": +- [ +- { "level": "local", "group by": "sourceloc#cali.sampler.pc", +- "select": [ "sourceloc#cali.sampler.pc as \"Source\"" ] +- }, +- { "level": "cross", "group by": "sourceloc#cali.sampler.pc", +- "select": [ "sourceloc#cali.sampler.pc as \"Source\"" ] +- } +- ] +- }, +- { +- "name" : "cuda.memcpy", +- "description" : "Report MB copied between host and device with cudaMemcpy", +- "type" : "bool", +- "category" : "cuptitrace.metric", +- "query" : +- [ +- { "level" : "local", +- "let" : +- [ +- "cuda.memcpy.dtoh=scale(cupti.memcpy.bytes,1e-6) if cupti.memcpy.kind=DtoH", +- "cuda.memcpy.htod=scale(cupti.memcpy.bytes,1e-6) if cupti.memcpy.kind=HtoD" +- ], +- "select" : +- [ +- "sum(cuda.memcpy.htod) as \"Copy CPU->GPU\" unit MB", +- "sum(cuda.memcpy.dtoh) as \"Copy GPU->CPU\" unit MB" +- ] +- }, +- { "level" : "cross", "select": +- [ +- "avg(sum#cuda.memcpy.htod) as \"Copy CPU->GPU (avg)\" unit MB", +- "max(sum#cuda.memcpy.htod) as \"Copy CPU->GPU (max)\" unit MB", +- "avg(sum#cuda.memcpy.dtoh) as \"Copy GPU->CPU (avg)\" unit MB", +- "max(sum#cuda.memcpy.dtoh) as \"Copy GPU->CPU (max)\" unit MB" +- ] +- } +- ] +- }, +- { +- "name" : "cuda.gputime", +- "description" : "Report GPU time in CUDA activities", +- "type" : "bool", +- "category" : "metric", +- "services" : [ "cuptitrace" ], +- "query" : +- [ +- { "level" : "local", +- "select" : +- [ +- "inclusive_scale(cupti.activity.duration,1e-9) as \"GPU time (I)\" unit sec", +- ] +- }, +- { "level" : "cross", "select": +- [ +- "avg(iscale#cupti.activity.duration) as \"Avg GPU time/rank\" unit sec", +- "min(iscale#cupti.activity.duration) as \"Min GPU time/rank\" unit sec", +- "max(iscale#cupti.activity.duration) as \"Max GPU time/rank\" unit sec", +- "sum(iscale#cupti.activity.duration) as \"Total GPU time\" unit sec" +- ] +- } +- ] +- }, +- { +- "name" : "rocm.gputime", +- "description" : "Report GPU time in AMD ROCm activities", +- "type" : "bool", +- "category" : "metric", +- "services" : [ "roctracer" ], +- "config" : { "CALI_ROCTRACER_TRACE_ACTIVITIES": "true", "CALI_ROCTRACER_RECORD_KERNEL_NAMES": "false" }, +- "query" : +- [ +- { "level" : "local", +- "select" : +- [ +- "inclusive_scale(sum#rocm.activity.duration,1e-9) as \"GPU time (I)\" unit sec" +- ] +- }, +- { "level" : "cross", +- "select" : +- [ +- "avg(iscale#sum#rocm.activity.duration) as \"Avg GPU time/rank\" unit sec", +- "min(iscale#sum#rocm.activity.duration) as \"Min GPU time/rank\" unit sec", +- "max(iscale#sum#rocm.activity.duration) as \"Max GPU time/rank\" unit sec", +- "sum(iscale#sum#rocm.activity.duration) as \"Total GPU time\" unit sec" +- ] +- } +- ] +- }, +- { +- "name" : "mpi.message.size", +- "description": "MPI message size", +- "type" : "bool", +- "category" : "metric", +- "services" : [ "mpi" ], +- "config" : { "CALI_MPI_MSG_TRACING": "true", "CALI_MPI_BLACKLIST": "MPI_Wtime,MPI_Comm_rank,MPI_Comm_size" }, +- "query" : +- [ +- { "level" : "local", +- "let" : [ +- "mpimsg.min=first(min#mpi.msg.size,mpi.msg.size)", +- "mpimsg.avg=first(avg#mpi.msg.size,mpi.msg.size)", +- "mpimsg.max=first(max#mpi.msg.size,mpi.msg.size)" +- ], +- "select" : [ +- "min(mpimsg.min) as \"Msg size (min)\" unit Byte", +- "avg(mpimsg.avg) as \"Msg size (avg)\" unit Byte", +- "max(mpimsg.max) as \"Msg size (max)\" unit Byte" +- ] +- }, +- { "level" : "cross", +- "select" : [ +- "min(min#mpimsg.min) as \"Msg size (min)\" unit Byte", +- "avg(avg#mpimsg.avg) as \"Msg size (avg)\" unit Byte", +- "max(max#mpimsg.max) as \"Msg size (max)\" unit Byte" +- ] +- } +- ] +- }, +- { +- "name" : "mpi.message.count", +- "description": "Number of MPI send/recv/collective operations", +- "type" : "bool", +- "category" : "metric", +- "services" : [ "mpi" ], +- "config" : { "CALI_MPI_MSG_TRACING": "true", "CALI_MPI_BLACKLIST": "MPI_Wtime,MPI_Comm_rank,MPI_Comm_size" }, +- "query" : +- [ +- { "level" : "local", +- "let" : [ +- "mpicount.recv=first(sum#mpi.recv.count,mpi.recv.count)", +- "mpicount.send=first(sum#mpi.send.count,mpi.send.count)", +- "mpicount.coll=first(sum#mpi.coll.count,mpi.coll.count)" +- ], +- "select" : [ +- "sum(mpicount.send) as \"Msgs sent\" unit count", +- "sum(mpicount.recv) as \"Msgs recvd\" unit count", +- "sum(mpicount.coll) as \"Collectives\" unit count" +- ] +- }, +- { "level" : "cross", +- "select" : [ +- "avg(sum#mpicount.send) as \"Msgs sent (avg)\" unit count", +- "max(sum#mpicount.send) as \"Msgs sent (max)\" unit count", +- "avg(sum#mpicount.recv) as \"Msgs recvd (avg)\" unit count", +- "max(sum#mpicount.recv) as \"Msgs recvd (max)\" unit count", +- "max(sum#mpicount.coll) as \"Collectives (max)\" unit count" +- ] +- } +- ] +- }, +- { +- "name" : "openmp.times", +- "description" : "Report time spent in OpenMP work and barrier regions", +- "type" : "bool", +- "category" : "metric", +- "services" : [ "ompt", "timestamp" ], +- "query" : +- [ +- { "level" : "local", +- "let" : +- [ +- "t.omp.ns=first(sum#time.duration.ns,time.duration.ns)", +- "t.omp.work=scale(t.omp.ns,1e-9) if omp.work", +- "t.omp.sync=scale(t.omp.ns,1e-9) if omp.sync", +- "t.omp.total=first(t.omp.work,t.omp.sync)" +- ], +- "select" : +- [ "sum(t.omp.work) as \"Time (work)\" unit sec", +- "sum(t.omp.sync) as \"Time (barrier)\" unit sec" +- ] +- }, +- { "level" : "cross", "select": +- [ "avg(sum#t.omp.work) as \"Time (work) (avg)\" unit sec", +- "avg(sum#t.omp.sync) as \"Time (barrier) (avg)\" unit sec", +- "sum(sum#t.omp.work) as \"Time (work) (total)\" unit sec", +- "sum(sum#t.omp.sync) as \"Time (barrier) (total)\" unit sec" +- ] +- } +- ] +- }, +- { +- "name" : "openmp.efficiency", +- "description" : "Compute OpenMP efficiency metrics", +- "type" : "bool", +- "category" : "metric", +- "inherit" : [ "openmp.times" ], +- "query" : +- [ +- { "level" : "local", +- "select" : +- [ "inclusive_ratio(t.omp.work,t.omp.total,100.0) as \"Work %\" unit percent", +- "inclusive_ratio(t.omp.sync,t.omp.total,100.0) as \"Barrier %\" unit percent" +- ] +- }, +- { "level" : "cross", "select": +- [ "min(iratio#t.omp.work/t.omp.total) as \"Work % (min)\" unit percent", +- "avg(iratio#t.omp.work/t.omp.total) as \"Work % (avg)\" unit percent", +- "avg(iratio#t.omp.sync/t.omp.total) as \"Barrier % (avg)\" unit percent", +- "max(iratio#t.omp.sync/t.omp.total) as \"Barrier % (max)\" unit percent" +- ] +- } +- ] +- }, +- { +- "name" : "openmp.threads", +- "description" : "Show OpenMP threads", +- "type" : "bool", +- "category" : "metric", +- "services" : [ "ompt" ], +- "query" : +- [ +- { "level" : "local", +- "let" : [ "n.omp.threads=first(omp.num.threads)" ], +- "group by": "omp.thread.id,omp.thread.type", +- "select" : +- [ "max(n.omp.threads) as \"#Threads\"", +- "omp.thread.id as \"Thread\"" +- ] +- }, +- { "level" : "cross", +- "group by": "omp.thread.id,omp.thread.type", +- "select" : +- [ "max(max#n.omp.threads) as \"#Threads\"", +- "omp.thread.id as Thread" +- ] +- } +- ] +- }, +- { +- "name" : "io.bytes.written", +- "description" : "Report I/O bytes written", +- "type" : "bool", +- "category" : "metric", +- "services" : [ "io" ], +- "query" : +- [ +- { "level" : "local", +- "let" : [ "ibw.bytes.written=first(sum#io.bytes.written,io.bytes.written)" ], +- "select" : [ "sum(ibw.bytes.written) as \"Bytes written\" unit Byte" ] +- }, +- { "level" : "cross", "select": +- [ "avg(sum#ibw.bytes.written) as \"Avg written\" unit Byte", +- "sum(sum#ibw.bytes.written) as \"Total written\" unit Byte" +- ] +- } +- ] +- }, +- { +- "name" : "io.bytes.read", +- "description" : "Report I/O bytes read", +- "type" : "bool", +- "category" : "metric", +- "services" : [ "io" ], +- "query" : +- [ +- { "level" : "local", +- "let" : [ "ibr.bytes.read=first(sum#io.bytes.read,io.bytes.read)" ], +- "select" : [ "sum(ibr.bytes.read) as \"Bytes read\" unit Byte" ] +- }, +- { "level" : "cross", "select": +- [ "avg(sum#ibr.bytes.read) as \"Avg read\" unit Byte", +- "sum(sum#ibr.bytes.read) as \"Total read\" unit Byte" +- ] +- } +- ] +- }, +- { +- "name" : "io.bytes", +- "description" : "Report I/O bytes written and read", +- "type" : "bool", +- "category" : "metric", +- "inherit" : [ "io.bytes.read", "io.bytes.written" ] +- }, +- { +- "name" : "io.read.bandwidth", +- "description" : "Report I/O read bandwidth", +- "type" : "bool", +- "category" : "metric", +- "services" : [ "io" ], +- "query" : +- [ +- { "level" : "local", +- "group by" : "io.region", +- "let" : +- [ +- "irb.bytes.read=first(sum#io.bytes.read,io.bytes.read)", +- "irb.time.ns=first(sum#time.duration.ns,time.duration.ns)" +- ], +- "select" : +- [ +- "io.region as I/O", +- "ratio(irb.bytes.read,irb.time.ns,8e3) as \"Read Mbit/s\" unit Mb/s" +- ] +- }, +- { "level": "cross", "select": +- [ +- "avg(ratio#irb.bytes_read/irb.time.ns) as \"Avg read Mbit/s\" unit Mb/s", +- "max(ratio#irb.bytes_read/irb.time.ns) as \"Max read Mbit/s\" unit Mb/s" +- ] +- } +- ] +- }, +- { +- "name" : "io.write.bandwidth", +- "description" : "Report I/O write bandwidth", +- "type" : "bool", +- "category" : "metric", +- "services" : [ "io" ], +- "query" : +- [ +- { "level" : "local", +- "group by" : "io.region", +- "let" : +- [ +- "iwb.bytes.written=first(sum#io.bytes.written,io.bytes.written)", +- "iwb.time.ns=first(sum#time.duration.ns,time.duration.ns)" +- ], +- "select" : +- [ +- "io.region as I/O", +- "ratio(iwb.bytes.written,iwb.time.ns,8e3) as \"Write Mbit/s\" unit Mb/s" +- ] +- }, +- { "level": "cross", "select": +- [ +- "avg(ratio#iwb.bytes.written/iwb.time) as \"Avg write Mbit/s\" unit Mb/s", +- "max(ratio#iwb.bytes.written/iwb.time) as \"Max write Mbit/s\" unit Mb/s" +- ] +- } +- ] +- }, +- { +- "name" : "umpire.totals", +- "description" : "Report umpire allocation statistics (all allocators combined)", +- "type" : "bool", +- "category" : "metric", +- "services" : [ "umpire" ], +- "config" : { "CALI_UMPIRE_PER_ALLOCATOR_STATISTICS": "false" }, +- "query" : +- [ +- { "level" : "local", +- "let" : +- [ "umpt.size.bytes=first(max#umpire.total.size,umpire.total.size)", +- "umpt.count=first(max#umpire.total.count,umpire.total.count)", +- "umpt.hwm.bytes=first(max#umpire.total.hwm,umpire.total.hwm)", +- "umpt.size=scale(umpt.size.bytes,1e-6)", +- "umpt.hwm=scale(umpt.hwm.bytes,1e-6)" +- ], +- "select" : +- [ "inclusive_max(umpt.size) as \"Ump MB (Total)\" unit MB", +- "inclusive_max(umpt.count) as \"Ump allocs (Total)\"", +- "inclusive_max(umpt.hwm) as \"Ump HWM (Total)\"" +- ] +- }, +- { "level" : "cross", +- "select" : +- [ "avg(imax#umpt.size) as \"Ump MB (avg)\" unit MB", +- "max(imax#umpt.size) as \"Ump MB (max)\" unit MB", +- "avg(imax#umpt.count) as \"Ump allocs (avg)\"", +- "max(imax#umpt.count) as \"Ump allocs (max)\"", +- "max(imax#umpt.hwm) as \"Ump HWM (max)\"" +- ] +- } +- ] +- }, +- { +- "name" : "umpire.allocators", +- "description" : "Report umpire allocation statistics per allocator", +- "type" : "bool", +- "category" : "metric", +- "services" : [ "umpire" ], +- "config" : { "CALI_UMPIRE_PER_ALLOCATOR_STATISTICS": "true" }, +- "query" : +- [ +- { "level" : "local", +- "let" : +- [ "ump.size.bytes=first(max#umpire.alloc.current.size,umpire.alloc.current.size)", +- "ump.hwm.bytes=first(max#umpire.alloc.highwatermark,umpire.alloc.highwatermark)", +- "ump.count=first(max#umpire.alloc.count,umpire.alloc.count)", +- "ump.size=scale(ump.size.bytes,1e-6)", +- "ump.hwm=scale(ump.hwm.bytes,1e-6)" +- ], +- "select" : +- [ "umpire.alloc.name as Allocator", +- "inclusive_max(ump.size) as \"Alloc MB\" unit MB", +- "inclusive_max(ump.hwm) as \"Alloc HWM\" unit MB", +- "inclusive_max(ump.count) as \"Num allocs\"" +- ], +- "group by": "umpire.alloc.name" +- }, +- { "level" : "cross", +- "select" : +- [ "umpire.alloc.name as Allocator", +- "avg(imax#ump.size) as \"Alloc MB (avg)\" unit MB", +- "max(imax#ump.size) as \"Alloc MB (max)\" unit MB", +- "avg(imax#ump.hwm) as \"Alloc HWM (avg)\" unit MB", +- "max(imax#ump.hwm) as \"Alloc HWM (max)\" unit MB", +- "avg(imax#ump.count) as \"Num allocs (avg)\"", +- "max(imax#ump.count) as \"Num allocs (max)\"" +- ], +- "group by": "umpire.alloc.name" +- } +- ] +- }, +- { +- "name" : "umpire.filter", +- "description" : "Names of Umpire allocators to track", +- "type" : "string", +- "category" : "metric", +- "config" : { "CALI_UMPIRE_ALLOCATOR_FILTER": "{}" } +- }, +- { +- "name" : "mem.pages", +- "description" : "Memory pages used via /proc/self/statm", +- "type" : "bool", +- "category" : "metric", +- "services" : [ "memstat" ], +- "query" : +- [ +- { "level" : "local", +- "let" : +- [ "mem.vmsize = first(max#memstat.vmsize,memstat.vmsize)", +- "mem.vmrss = first(max#memstat.vmrss,memstat.vmrss)", +- "mem.data = first(max#memstat.data,memstat.data)" +- ], +- "select" : +- [ +- "max(mem.vmsize) as VmSize unit pages", +- "max(mem.vmrss) as VmRSS unit pages", +- "max(mem.data) as Data unit pages" +- ] +- }, +- { "level" : "cross", +- "select" : +- [ +- "max(max#mem.vmsize) as \"VmSize (max)\" unit pages", +- "max(max#mem.vmrss) as \"VmRSS (max)\" unit pages", +- "max(max#mem.data) as \"Data (max)\" unit pages" +- ] +- } +- ] +- }, +- { +- "name" : "mem.highwatermark", +- "description" : "Report memory high-water mark", +- "type" : "bool", +- "category" : "metric", +- "services" : [ "alloc", "sysalloc" ], +- "config" : { "CALI_ALLOC_TRACK_ALLOCATIONS": "false", "CALI_ALLOC_RECORD_HIGHWATERMARK": "true" }, +- "query" : +- [ +- { "level" : "local", +- "let" : +- [ "mem.highwatermark.bytes = first(max#alloc.region.highwatermark,alloc.region.highwatermark)", +- "mem.highwatermark = scale(mem.highwatermark.bytes,1e-6)" +- ], +- "select" : [ "max(mem.highwatermark) as \"Allocated MB\" unit MB" ] +- }, +- { "level" : "cross", +- "select" : [ "max(max#mem.highwatermark) as \"Allocated MB\" unit MB" ] +- } +- ] +- }, +- { +- "name" : "mem.read.bandwidth", +- "description" : "Record memory read bandwidth using the Performance Co-pilot API", +- "type" : "bool", +- "category" : "metric", +- "services" : [ "pcp.memory" ], +- "query" : +- [ +- { "level" : "local", +- "let" : [ "mrb.time=first(pcp.time.duration,sum#pcp.time.duration)" ], +- "select" : [ "ratio(mem.bytes.read,mrb.time,1e-6) as \"MB/s (r)\" unit MB/s" ] +- }, +- { "level" : "cross", "select": +- [ +- "avg(ratio#mem.bytes.read/mrb.time) as \"Avg MemBW (r) (MB/s)\" unit MB/s", +- "max(ratio#mem.bytes.read/mrb.time) as \"Max MemBW (r) (MB/s)\" unit MB/s", +- "sum(ratio#mem.bytes.read/mrb.time) as \"Total MemBW (r) (MB/s)\" unit MB/s" +- ] +- } +- ] +- }, +- { +- "name" : "mem.write.bandwidth", +- "description" : "Record memory write bandwidth using the Performance Co-pilot API", +- "type" : "bool", +- "category" : "metric", +- "services" : [ "pcp.memory" ], +- "query" : +- [ +- { "level" : "local", +- "let" : [ "mwb.time=first(pcp.time.duration,sum#pcp.time.duration)" ], +- "select" : [ "ratio(mem.bytes.written,mwb.time,1e-6) as \"MB/s (w)\" unit MB/s" ] +- }, +- { "level" : "cross", "select": +- [ +- "avg(ratio#mem.bytes.written/mwb.time) as \"Avg MemBW (w) (MB/s)\" unit MB/s", +- "max(ratio#mem.bytes.written/mwb.time) as \"Max MemBW (w) (MB/s)\" unit MB/s", +- "sum(ratio#mem.bytes.written/mwb.time) as \"Total MemBW (w) (MB/s)\" unit MB/s", +- ] +- } +- ] +- }, +- { +- "name" : "mem.bandwidth", +- "description" : "Record memory bandwidth using the Performance Co-pilot API", +- "type" : "bool", +- "category" : "metric", +- "inherit" : [ "mem.read.bandwidth", "mem.write.bandwidth" ], +- }, +- { +- "name" : "topdown.toplevel", +- "description" : "Top-down analysis for Intel CPUs (top level)", +- "type" : "bool", +- "category" : "metric", +- "services" : [ "topdown" ], +- "config" : { "CALI_TOPDOWN_LEVEL": "top" }, +- "query" : +- [ +- { "level": "local", "select": +- [ +- "any(topdown.retiring) as \"Retiring\"", +- "any(topdown.backend_bound) as \"Backend bound\"", +- "any(topdown.frontend_bound) as \"Frontend bound\"", +- "any(topdown.bad_speculation) as \"Bad speculation\"" +- ] +- }, +- { "level": "cross", "select": +- [ +- "any(any#topdown.retiring) as \"Retiring\"", +- "any(any#topdown.backend_bound) as \"Backend bound\"", +- "any(any#topdown.frontend_bound) as \"Frontend bound\"", +- "any(any#topdown.bad_speculation) as \"Bad speculation\"" +- ] +- } +- ] +- }, +- { +- "name" : "topdown.all", +- "description" : "Top-down analysis for Intel CPUs (all levels)", +- "type" : "bool", +- "category" : "metric", +- "services" : [ "topdown" ], +- "config" : { "CALI_TOPDOWN_LEVEL": "all" }, +- "query" : +- [ +- { "level": "local", "select": +- [ +- "any(topdown.retiring) as \"Retiring\"", +- "any(topdown.backend_bound) as \"Backend bound\"", +- "any(topdown.frontend_bound) as \"Frontend bound\"", +- "any(topdown.bad_speculation) as \"Bad speculation\"", +- "any(topdown.branch_mispredict) as \"Branch mispredict\"", +- "any(topdown.machine_clears) as \"Machine clears\"", +- "any(topdown.frontend_latency) as \"Frontend latency\"", +- "any(topdown.frontend_bandwidth) as \"Frontend bandwidth\"", +- "any(topdown.memory_bound) as \"Memory bound\"", +- "any(topdown.core_bound) as \"Core bound\"", +- "any(topdown.ext_mem_bound) as \"External Memory\"", +- "any(topdown.l1_bound) as \"L1 bound\"", +- "any(topdown.l2_bound) as \"L2 bound\"", +- "any(topdown.l3_bound) as \"L3 bound\"" +- ] +- }, +- { "level": "cross", "select": +- [ +- "any(any#topdown.retiring) as \"Retiring\"", +- "any(any#topdown.backend_bound) as \"Backend bound\"", +- "any(any#topdown.frontend_bound) as \"Frontend bound\"", +- "any(any#topdown.bad_speculation) as \"Bad speculation\"", +- "any(any#topdown.branch_mispredict) as \"Branch mispredict\"", +- "any(any#topdown.machine_clears) as \"Machine clears\"", +- "any(any#topdown.frontend_latency) as \"Frontend latency\"", +- "any(any#topdown.frontend_bandwidth) as \"Frontend bandwidth\"", +- "any(any#topdown.memory_bound) as \"Memory bound\"", +- "any(any#topdown.core_bound) as \"Core bound\"", +- "any(any#topdown.ext_mem_bound) as \"External Memory\"", +- "any(any#topdown.l1_bound) as \"L1 bound\"", +- "any(any#topdown.l2_bound) as \"L2 bound\"", +- "any(any#topdown.l3_bound) as \"L3 bound\"" +- ] +- } +- ] +- }, +- { +- "name" : "topdown-counters.toplevel", +- "description" : "Raw counter values for Intel top-down analysis (top level)", +- "type" : "bool", +- "category" : "metric", +- "services" : [ "papi" ], +- "config" : +- { +- "CALI_PAPI_COUNTERS": +- "CPU_CLK_THREAD_UNHALTED:THREAD_P,UOPS_RETIRED:RETIRE_SLOTS,UOPS_ISSUED:ANY,INT_MISC:RECOVERY_CYCLES,IDQ_UOPS_NOT_DELIVERED:CORE" +- }, +- "query" : +- [ +- { "level": "local", "select": +- [ +- "inclusive_sum(sum#papi.CPU_CLK_THREAD_UNHALTED:THREAD_P) as cpu_clk_thread_unhalted:thread_p", +- "inclusive_sum(sum#papi.UOPS_RETIRED:RETIRE_SLOTS) as uops_retired:retire_slots", +- "inclusive_sum(sum#papi.UOPS_ISSUED:ANY) as uops_issued:any", +- "inclusive_sum(sum#papi.INT_MISC:RECOVERY_CYCLES) as int_misc:recovery_cycles", +- "inclusive_sum(sum#papi.IDQ_UOPS_NOT_DELIVERED:CORE) as idq_uops_note_delivered:core" +- ] +- }, +- { "level": "cross", "select": +- [ +- "sum(inclusive#sum#papi.CPU_CLK_THREAD_UNHALTED:THREAD_P) as cpu_clk_thread_unhalted:thread_p", +- "sum(inclusive#sum#papi.UOPS_RETIRED:RETIRE_SLOTS) as uops_retired:retire_slots", +- "sum(inclusive#sum#papi.UOPS_ISSUED:ANY) as uops_issued:any", +- "sum(inclusive#sum#papi.INT_MISC:RECOVERY_CYCLES) as int_misc:recovery_cycles", +- "sum(inclusive#sum#papi.IDQ_UOPS_NOT_DELIVERED:CORE) as idq_uops_note_delivered:core" +- ] +- } +- ] +- }, +- { +- "name" : "topdown-counters.all", +- "description" : "Raw counter values for Intel top-down analysis (all levels)", +- "type" : "bool", +- "category" : "metric", +- "services" : [ "papi" ], +- "config" : +- { +- "CALI_PAPI_COUNTERS": +- "BR_MISP_RETIRED:ALL_BRANCHES +- ,CPU_CLK_THREAD_UNHALTED:THREAD_P +- ,CYCLE_ACTIVITY:CYCLES_NO_EXECUTE +- ,CYCLE_ACTIVITY:STALLS_L1D_PENDING +- ,CYCLE_ACTIVITY:STALLS_L2_PENDING +- ,CYCLE_ACTIVITY:STALLS_LDM_PENDING +- ,IDQ_UOPS_NOT_DELIVERED:CORE +- ,IDQ_UOPS_NOT_DELIVERED:CYCLES_0_UOPS_DELIV_CORE +- ,INT_MISC:RECOVERY_CYCLES +- ,MACHINE_CLEARS:COUNT +- ,MEM_LOAD_UOPS_RETIRED:L3_HIT +- ,MEM_LOAD_UOPS_RETIRED:L3_MISS +- ,UOPS_EXECUTED:CORE_CYCLES_GE_1 +- ,UOPS_EXECUTED:CORE_CYCLES_GE_2 +- ,UOPS_ISSUED:ANY +- ,UOPS_RETIRED:RETIRE_SLOTS" +- }, +- "query" : +- [ +- { "level": "local", "select": +- [ +- "inclusive_sum(sum#papi.BR_MISP_RETIRED:ALL_BRANCHES) as br_misp_retired:all_branches", +- "inclusive_sum(sum#papi.CPU_CLK_THREAD_UNHALTED:THREAD_P) as cpu_clk_thread_unhalted:thread_p", +- "inclusive_sum(sum#papi.CYCLE_ACTIVITY:CYCLES_NO_EXECUTE) as cycle_activity:cycles_no_execute", +- "inclusive_sum(sum#papi.CYCLE_ACTIVITY:STALLS_L1D_PENDING) as cycle_activity:stalls_l1d_pending", +- "inclusive_sum(sum#papi.CYCLE_ACTIVITY:STALLS_L2_PENDING) as cycle_activity:stalls_l2_pending", +- "inclusive_sum(sum#papi.CYCLE_ACTIVITY:STALLS_LDM_PENDING) as cycle_activity:stalls_ldm_pending", +- "inclusive_sum(sum#papi.IDQ_UOPS_NOT_DELIVERED:CORE) as idq_uops_note_delivered:core", +- "inclusive_sum(sum#papi.IDQ_UOPS_NOT_DELIVERED:CYCLES_0_UOPS_DELIV_CORE) as idq_uops_note_delivered:cycles_0_uops_deliv_core", +- "inclusive_sum(sum#papi.INT_MISC:RECOVERY_CYCLES) as int_misc:recovery_cycles", +- "inclusive_sum(sum#papi.MACHINE_CLEARS:COUNT) as machine_clears:count", +- "inclusive_sum(sum#papi.MEM_LOAD_UOPS_RETIRED:L3_HIT) as mem_load_uops_retired:l3_hit", +- "inclusive_sum(sum#papi.MEM_LOAD_UOPS_RETIRED:L3_MISS) as mem_load_uops_retired:l3_miss", +- "inclusive_sum(sum#papi.UOPS_EXECUTED:CORE_CYCLES_GE_1) as uops_executed:core_cycles_ge_1", +- "inclusive_sum(sum#papi.UOPS_EXECUTED:CORE_CYCLES_GE_2) as uops_executed:core_cycles_ge_2", +- "inclusive_sum(sum#papi.UOPS_ISSUED:ANY) as uops_issued:any", +- "inclusive_sum(sum#papi.UOPS_RETIRED:RETIRE_SLOTS) as uops_retired:retire_slots" +- ] +- }, +- { "level": "cross", "select": +- [ +- "sum(inclusive#sum#papi.BR_MISP_RETIRED:ALL_BRANCHES) as br_misp_retired:all_branches", +- "sum(inclusive#sum#papi.CPU_CLK_THREAD_UNHALTED:THREAD_P) as cpu_clk_thread_unhalted:thread_p", +- "sum(inclusive#sum#papi.CYCLE_ACTIVITY:CYCLES_NO_EXECUTE) as cycle_activity:cycles_no_execute", +- "sum(inclusive#sum#papi.CYCLE_ACTIVITY:STALLS_L1D_PENDING) as cycle_activity:stalls_l1d_pending", +- "sum(inclusive#sum#papi.CYCLE_ACTIVITY:STALLS_L2_PENDING) as cycle_activity:stalls_l2_pending", +- "sum(inclusive#sum#papi.CYCLE_ACTIVITY:STALLS_LDM_PENDING) as cycle_activity:stalls_ldm_pending", +- "sum(inclusive#sum#papi.IDQ_UOPS_NOT_DELIVERED:CORE) as idq_uops_note_delivered:core", +- "sum(inclusive#sum#papi.IDQ_UOPS_NOT_DELIVERED:CYCLES_0_UOPS_DELIV_CORE) as idq_uops_note_delivered:cycles_0_uops_deliv_core", +- "sum(inclusive#sum#papi.INT_MISC:RECOVERY_CYCLES) as int_misc:recovery_cycles", +- "sum(inclusive#sum#papi.MACHINE_CLEARS:COUNT) as machine_clears:count", +- "sum(inclusive#sum#papi.MEM_LOAD_UOPS_RETIRED:L3_HIT) as mem_load_uops_retired:l3_hit", +- "sum(inclusive#sum#papi.MEM_LOAD_UOPS_RETIRED:L3_MISS) as mem_load_uops_retired:l3_miss", +- "sum(inclusive#sum#papi.UOPS_EXECUTED:CORE_CYCLES_GE_1) as uops_executed:core_cycles_ge_1", +- "sum(inclusive#sum#papi.UOPS_EXECUTED:CORE_CYCLES_GE_2) as uops_executed:core_cycles_ge_2", +- "sum(inclusive#sum#papi.UOPS_ISSUED:ANY) as uops_issued:any", +- "sum(inclusive#sum#papi.UOPS_RETIRED:RETIRE_SLOTS) as uops_retired:retire_slots" +- ] +- } +- ] +- }, +- { +- "name" : "output", +- "description" : "Output location ('stdout', 'stderr', or filename)", +- "type" : "string", +- "category" : "output" +- }, +- { +- "name" : "adiak.import_categories", +- "services" : [ "adiak_import" ], +- "description" : "Adiak import categories. Comma-separated list of integers.", +- "type" : "string", +- "category" : "adiak" +- }, +- { +- "name" : "max_column_width", +- "type" : "int", +- "description" : "Maximum column width in the tree display", +- "category" : "treeformatter" +- }, +- { +- "name" : "print.metadata", +- "type" : "bool", +- "description" : "Print program metadata (Caliper globals and Adiak data)", +- "category" : "treeformatter" +- }, +- { +- "name" : "order_as_visited", +- "type" : "bool", +- "description" : "Print tree nodes in the original visit order", +- "category" : "treeformatter", +- "query" : +- [ +- { "level": "local", +- "let": [ "o_a_v.slot=first(aggregate.slot)" ], +- "aggregate": [ "min(o_a_v.slot)" ], +- "order by": [ "min#o_a_v.slot" ] +- }, +- { "level": "cross", +- "aggregate": [ "min(min#o_a_v.slot)" ], +- "order by": [ "min#min#o_a_v.slot" ] +- } +- ] +- } ++ "min(sum#rc.count) as \"Calls/rank (min)\" unit count", ++ "avg(sum#rc.count) as \"Calls/rank (avg)\" unit count", ++ "max(sum#rc.count) as \"Calls/rank (max)\" unit count", ++ "sum(sum#rc.count) as \"Calls (total)\" unit count" + ] ++ } ++ ] ++}, ++{ ++ "name" : "region.stats", ++ "description" : "Detailed region timing statistics (min/max/avg time per visit)", ++ "type" : "bool", ++ "category" : "metric", ++ "services" : [ "timer", "event" ], ++ "config" : ++ { ++ "CALI_TIMER_INCLUSIVE_DURATION" : "true", ++ "CALI_EVENT_ENABLE_SNAPSHOT_INFO" : "true" ++ }, ++ "query": ++ [ ++ { ++ "level": "local", ++ "let": ++ [ ++ "rs.count=first(sum#region.count,region.count)", ++ "rs.min=scale(min#time.inclusive.duration.ns,1e-9)", ++ "rs.max=scale(max#time.inclusive.duration.ns,1e-9)", ++ "rs.sum=scale(sum#time.inclusive.duration.ns,1e-9)" ++ ], ++ "aggregate": ++ [ ++ "sum(rs.sum)" ++ ], ++ "select": ++ [ ++ "sum(rs.count) as Visits unit count", ++ "min(rs.min) as \"Min time/visit\" unit sec", ++ "ratio(rs.sum,rs.count) as \"Avg time/visit\" unit sec", ++ "max(rs.max) as \"Max time/visit\" unit sec" ++ ] ++ }, ++ { ++ "level": "cross", ++ "select": ++ [ ++ "sum(sum#rs.count) as Visits unit count", ++ "min(min#rs.min) as \"Min time/visit\" unit sec", ++ "ratio(sum#rs.sum,sum#rs.count) as \"Avg time/visit\" unit sec", ++ "max(max#rs.max) as \"Max time/visit\" unit sec" ++ ] ++ } ++ ] ++}, ++{ ++ "name" : "node.order", ++ "description" : "Report order in which regions first appeared", ++ "type" : "bool", ++ "category" : "metric", ++ "query" : ++ [ ++ { "level" : "local", ++ "select" : [ "min(aggregate.slot) as \"Node order\"" ] ++ }, ++ { "level" : "cross", ++ "select" : [ "min(min#aggregate.slot) as \"Node order\"" ] ++ } ++ ] ++}, ++{ ++ "name" : "output", ++ "description" : "Output location ('stdout', 'stderr', or filename)", ++ "type" : "string", ++ "category" : "output" ++}, ++{ ++ "name" : "max_column_width", ++ "type" : "int", ++ "description" : "Maximum column width in the tree display", ++ "category" : "treeformatter" ++}, ++{ ++ "name" : "print.metadata", ++ "type" : "bool", ++ "description" : "Print program metadata (Caliper globals and Adiak data)", ++ "category" : "treeformatter" ++}, ++{ ++ "name" : "order_as_visited", ++ "type" : "bool", ++ "description" : "Print tree nodes in the original visit order", ++ "category" : "treeformatter", ++ "query" : ++ [ ++ { "level": "local", ++ "let": [ "o_a_v.slot=first(aggregate.slot)" ], ++ "aggregate": [ "min(o_a_v.slot)" ], ++ "order by": [ "min#o_a_v.slot" ] ++ }, ++ { "level": "cross", ++ "aggregate": [ "min(min#o_a_v.slot)" ], ++ "order by": [ "min#min#o_a_v.slot" ] ++ } ++ ] ++} ++] + )json"; + + } diff --git a/scripts/docker/ascent_build/2024_08_01_conduit-pr1311-detect-if-caliper-needs-adiak.patch b/scripts/docker/ascent_build/2024_08_01_conduit-pr1311-detect-if-caliper-needs-adiak.patch new file mode 100644 index 000000000000..76fdf525d2f0 --- /dev/null +++ b/scripts/docker/ascent_build/2024_08_01_conduit-pr1311-detect-if-caliper-needs-adiak.patch @@ -0,0 +1,147 @@ +From 7b25b95c30e083e7d8c541f131df10e401f658d6 Mon Sep 17 00:00:00 2001 +From: Cyrus Harrison +Date: Thu, 1 Aug 2024 13:26:40 -0700 +Subject: [PATCH] detect if caliper needs adiak + +--- + src/cmake/thirdparty/SetupCaliper.cmake | 47 +++++++++++++++++----- + src/config/conduit_setup_deps.cmake | 52 +++++++++++++++++++------ + 2 files changed, 77 insertions(+), 22 deletions(-) + +diff --git a/src/cmake/thirdparty/SetupCaliper.cmake b/src/cmake/thirdparty/SetupCaliper.cmake +index 44ad2cc2e..7469097f3 100644 +--- a/src/cmake/thirdparty/SetupCaliper.cmake ++++ b/src/cmake/thirdparty/SetupCaliper.cmake +@@ -12,24 +12,51 @@ if(NOT CALIPER_DIR) + MESSAGE(FATAL_ERROR "Caliper support needs explicit CALIPER_DIR") + endif() + +-# most common case: caliper is built with adiak support +-# and caliper needs us to find adiak, or else find_pacakge caliper +-# will fail + +-# Check for ADIAK_DIR ++# first: look for caliper config header + see what additional deps we need ++#. to resolve. ++ ++message(STATUS "Attempting to find cali-config.h with CALIPER_DIR=${CALIPER_DIR} ...") ++find_file(CALI_CONFIG_HEADER ++ NAMES caliper-config.h ++ PATHS ${CALIPER_DIR} ++ PATH_SUFFIXES include/caliper ++ NO_DEFAULT_PATH ++ NO_CMAKE_ENVIRONMENT_PATH ++ NO_CMAKE_PATH ++ NO_SYSTEM_ENVIRONMENT_PATH ++ NO_CMAKE_SYSTEM_PATH) ++ ++if(EXISTS ${CALI_CONFIG_HEADER}) ++ message(STATUS "Found Caliper Config Header: ${CALI_CONFIG_HEADER}") ++else() ++ message(FATAL_ERROR "Could not find caliper-config.h in caliper ${CALIPER_DIR}/include/caliper") ++endif() + +-if(NOT ADIAK_DIR) +- MESSAGE(FATAL_ERROR "Caliper support needs explicit ADIAK_DIR") ++file(READ ${CALI_CONFIG_HEADER} _CALI_CONFIG_HEADER_CONTENTS) ++ ++# check if we need ADIAK ++string(FIND ${_CALI_CONFIG_HEADER_CONTENTS} "#define CALIPER_HAVE_ADIAK" _caliper_have_adiak) ++ ++if(${_caliper_have_adiak} GREATER_EQUAL 0 ) ++ # caliper is built with adiak support and caliper needs us to find adiak, ++ # else find_pacakge caliper will fail ++ # Check for ADIAK_DIR ++ if(NOT ADIAK_DIR) ++ MESSAGE(FATAL_ERROR "Caliper support needs explicit ADIAK_DIR") ++ endif() ++ # find adiak ++ find_package(adiak REQUIRED ++ NO_DEFAULT_PATH ++ PATHS ${ADIAK_DIR}/lib/cmake/adiak) ++ set(ADIAK_FOUND TRUE) + endif() + +-find_package(adiak REQUIRED +- NO_DEFAULT_PATH +- PATHS ${ADIAK_DIR}/lib/cmake/adiak) + + find_package(caliper REQUIRED + NO_DEFAULT_PATH + PATHS ${CALIPER_DIR}/share/cmake/caliper) + +-set(ADIAK_FOUND TRUE) ++ + set(CALIPER_FOUND TRUE) + set(CONDUIT_USE_CALIPER TRUE) +diff --git a/src/config/conduit_setup_deps.cmake b/src/config/conduit_setup_deps.cmake +index 0334a2b54..f9022da66 100644 +--- a/src/config/conduit_setup_deps.cmake ++++ b/src/config/conduit_setup_deps.cmake +@@ -58,26 +58,54 @@ if(CALIPER_DIR) + message(STATUS "Conduit was built with Caliper Support") + endif() + +- if(NOT ADIAK_DIR) +- set(ADIAK_DIR ${CONDUIT_ADIAK_DIR}) ++ # use caliper config header to detect necessary deps ++ find_file(CALI_CONFIG_HEADER ++ NAMES caliper-config.h ++ PATHS ${CALIPER_DIR} ++ PATH_SUFFIXES include/caliper ++ NO_DEFAULT_PATH ++ NO_CMAKE_ENVIRONMENT_PATH ++ NO_CMAKE_PATH ++ NO_SYSTEM_ENVIRONMENT_PATH ++ NO_CMAKE_SYSTEM_PATH) ++ ++ if(EXISTS ${CALI_CONFIG_HEADER}) ++ if(NOT Conduit_FIND_QUIETLY) ++ message(STATUS "Found Caliper Config Header: ${CALI_CONFIG_HEADER}") ++ endif() ++ else() ++ message(FATAL_ERROR "Could not find caliper-config.h in caliper ${CALIPER_DIR}/include/caliper") + endif() + +- if(ADIAK_DIR) +- if(NOT Conduit_FIND_QUIETLY) +- message(STATUS "Looking for Adiak at: ${ADIAK_DIR}/lib/cmake/adiak") ++ file(READ ${CALI_CONFIG_HEADER} _CALI_CONFIG_HEADER_CONTENTS) ++ ++ # check if we need ADIAK ++ string(FIND ${_CALI_CONFIG_HEADER_CONTENTS} "#define CALIPER_HAVE_ADIAK" _caliper_have_adiak) ++ ++ if(${_caliper_have_adiak} GREATER_EQUAL 0 ) ++ # caliper is built with adiak support and caliper needs us to find adiak. ++ if(NOT ADIAK_DIR) ++ set(ADIAK_DIR ${CONDUIT_ADIAK_DIR}) ++ endif() ++ ++ if(ADIAK_DIR) ++ if(NOT Conduit_FIND_QUIETLY) ++ message(STATUS "Looking for Adiak at: ${ADIAK_DIR}/lib/cmake/adiak") ++ endif() ++ # find adiak first ++ find_dependency(adiak REQUIRED ++ NO_DEFAULT_PATH ++ PATHS ${ADIAK_DIR}/lib/cmake/adiak) + endif() +- # find adiak first +- find_package(adiak REQUIRED +- NO_DEFAULT_PATH +- PATHS ${ADIAK_DIR}/lib/cmake/adiak) + endif() ++ + if(NOT Conduit_FIND_QUIETLY) + message(STATUS "Looking for Caliper at: ${CALIPER_DIR}/share/cmake/caliper") + endif() + # find caliper +- find_package(caliper REQUIRED +- NO_DEFAULT_PATH +- PATHS ${CALIPER_DIR}/share/cmake/caliper) ++ find_dependency(caliper REQUIRED ++ NO_DEFAULT_PATH ++ PATHS ${CALIPER_DIR}/share/cmake/caliper) + endif() + + ############################################################################### diff --git a/scripts/docker/ascent_build/build_ascent.sh b/scripts/docker/ascent_build/build_ascent.sh new file mode 100644 index 000000000000..26a37bd23180 --- /dev/null +++ b/scripts/docker/ascent_build/build_ascent.sh @@ -0,0 +1,982 @@ +#!/bin/bash + +############################################################################## +# Adapted from: https://github.com/Alpine-DAV/ascent/blob/2228b2827dd99790c2fe4bc5b05f5b9c7ea42e71/scripts/build_ascent/build_ascent.sh +# Original Copyright (c) 2015-2024, Lawrence Livermore National Security, LLC. +# Released under BSD 3-Clause license +############################################################################## + +############################################################################## +# Demonstrates how to manually build Ascent and its dependencies, including: +# +# hdf5, conduit, vtk-m, mfem, raja, and umpire +# +# usage example: +# env enable_mpi=ON enable_openmp=ON ./build_ascent.sh +# +# +# Assumes: +# - cmake is in your path +# - selected compilers are in your path or set via env vars +# - [when enabled] MPI and Python (+numpy and mpi4py), are in your path +# +############################################################################## +set -eu -o pipefail + +############################################################################## +# Build Options +############################################################################## + +# shared options +enable_cuda="${enable_cuda:=ON}" +enable_hip="${enable_hip:=OFF}" +enable_fortran="${enable_fortran:=OFF}" +enable_python="${enable_python:=OFF}" +enable_openmp="${enable_openmp:=OFF}" +enable_mpi="${enable_mpi:=ON}" +enable_find_mpi="${enable_find_mpi:=ON}" +enable_tests="${enable_tests:=OFF}" +enable_verbose="${enable_verbose:=ON}" +build_jobs="${build_jobs:=8}" +build_config="${build_config:=Release}" +build_shared_libs="${build_shared_libs:=ON}" + +# tpl controls +build_zlib="${build_zlib:=true}" +build_hdf5="${build_hdf5:=false}" +build_pyvenv="${build_pyvenv:=false}" +build_caliper="${build_caliper:=false}" +build_silo="${build_silo:=false}" +build_conduit="${build_conduit:=true}" +build_vtkm="${build_vtkm:=true}" +build_camp="${build_camp:=true}" +build_raja="${build_raja:=true}" +build_umpire="${build_umpire:=true}" +build_mfem="${build_mfem:=false}" +build_catalyst="${build_catalyst:=false}" + +# ascent options +build_ascent="${build_ascent:=true}" + +# see if we are building on windows +build_windows="${build_windows:=OFF}" + +# see if we are building on macOS +build_macos="${build_macos:=OFF}" + +if [[ "$enable_cuda" == "ON" ]]; then + echo "*** configuring with CUDA support" + + CC="${CC:=gcc}" + CXX="${CXX:=g++}" + FTN="${FTN:=gfortran}" + + CUDA_ARCH="${CUDA_ARCH:=80}" + CUDA_ARCH_VTKM="${CUDA_ARCH_VTKM:=ampere}" +fi + +if [[ "$enable_hip" == "ON" ]]; then + echo "*** configuring with HIP support" + + CC="${CC:=/opt/rocm/llvm/bin/amdclang}" + CXX="${CXX:=/opt/rocm/llvm/bin/amdclang++}" + # FTN? + + ROCM_ARCH="${ROCM_ARCH:=gfx90a}" + ROCM_PATH="${ROCM_PATH:=/opt/rocm/}" + + # NOTE: this script only builds kokkos when enable_hip=ON + build_kokkos="${build_kokkos:=true}" +else + build_kokkos="${build_kokkos:=false}" +fi + +case "$OSTYPE" in + win*) build_windows="ON";; + msys*) build_windows="ON";; + darwin*) build_macos="ON";; + *) ;; +esac + +if [[ "$build_windows" == "ON" ]]; then + echo "*** configuring for windows" +fi + +if [[ "$build_macos" == "ON" ]]; then + echo "*** configuring for macos" +fi + +################ +# path helpers +################ +function ospath() +{ + if [[ "$build_windows" == "ON" ]]; then + echo `cygpath -m $1` + else + echo $1 + fi +} + +function abs_path() +{ + if [[ "$build_macos" == "ON" ]]; then + echo "$(cd $(dirname "$1");pwd)/$(basename "$1")" + else + echo `realpath $1` + fi +} + +root_dir=/usr/local +root_dir="${prefix:=${root_dir}}" +root_dir=$(ospath ${root_dir}) +root_dir=$(abs_path ${root_dir}) +script_dir=$(abs_path "$(dirname "${BASH_SOURCE[0]}")") +build_dir=$(ospath ${root_dir}/build) +source_dir=$(ospath ${root_dir}/source) + + +# root_dir is where we will build and install +# override with `prefix` env var +if [ ! -d ${root_dir} ]; then + mkdir -p ${root_dir} +fi + +cd ${root_dir} + +# install_dir is where we will install +# override with `prefix` env var +install_dir="${install_dir:=$root_dir/install}" + +echo "*** prefix: ${root_dir}" +echo "*** build root: ${build_dir}" +echo "*** sources root: ${source_dir}" +echo "*** install root: ${install_dir}" +echo "*** script dir: ${script_dir}" + +################ +# tar options +################ +tar_extra_args="" +if [[ "$build_windows" == "ON" ]]; then + tar_extra_args="--force-local" +fi + +# make sure sources dir exists +if [ ! -d ${source_dir} ]; then + mkdir -p ${source_dir} +fi +################ +# CMake Compiler Settings +################ +cmake_compiler_settings="" + +# capture compilers if they are provided via env vars +if [ ! -z ${CC+x} ]; then + cmake_compiler_settings="-DCMAKE_C_COMPILER:PATH=${CC}" +fi + +if [ ! -z ${CXX+x} ]; then + cmake_compiler_settings="${cmake_compiler_settings} -DCMAKE_CXX_COMPILER:PATH=${CXX}" +fi + +if [ ! -z ${FTN+x} ]; then + cmake_compiler_settings="${cmake_compiler_settings} -DCMAKE_Fortran_COMPILER:PATH=${FTN}" +fi + +################ +# print all build_ZZZ and enable_ZZZ options +################ +echo "*** cmake_compiler_settings: ${cmake_compiler_settings}" +echo "*** build_ascent `enable` settings:" +set | grep enable_ +echo "*** build_ascent `build` settings:" +set | grep build_ + +################ +# Zlib +################ +zlib_version=1.3.1 +zlib_src_dir=$(ospath ${source_dir}/zlib-${zlib_version}) +zlib_build_dir=$(ospath ${build_dir}/zlib-${zlib_version}/) +zlib_install_dir=$(ospath ${install_dir}/zlib-${zlib_version}/) +zlib_tarball=$(ospath ${source_dir}/zlib-${zlib_version}.tar.gz) + +# build only if install doesn't exist +if [ ! -d ${zlib_install_dir} ]; then +if ${build_zlib}; then +if [ ! -d ${zlib_src_dir} ]; then + echo "**** Downloading ${zlib_tarball}" + curl -L https://github.com/madler/zlib/releases/download/v${zlib_version}/zlib-${zlib_version}.tar.gz -o ${zlib_tarball} + tar ${tar_extra_args} -xzf ${zlib_tarball} -C ${source_dir} +fi + +echo "**** Configuring Zlib ${zlib_version}" +cmake -S ${zlib_src_dir} -B ${zlib_build_dir} ${cmake_compiler_settings} \ + -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose} \ + -DCMAKE_BUILD_TYPE=${build_config} \ + -DCMAKE_INSTALL_PREFIX=${zlib_install_dir} + +echo "**** Building Zlib ${zlib_version}" +cmake --build ${zlib_build_dir} --config ${build_config} -j${build_jobs} +echo "**** Installing Zlib ${zlib_version}" +cmake --install ${zlib_build_dir} --config ${build_config} + +fi +else + echo "**** Skipping Zlib build, install found at: ${zlib_install_dir}" +fi # build_zlib + + +################ +# HDF5 +################ +# release 1-2 GAH! +hdf5_version=1.14.1-2 +hdf5_middle_version=1.14.1 +hdf5_short_version=1.14 +hdf5_src_dir=$(ospath ${source_dir}/hdf5-${hdf5_version}) +hdf5_build_dir=$(ospath ${build_dir}/hdf5-${hdf5_version}/) +hdf5_install_dir=/usr/local/hdf5/serial +hdf5_tarball=$(ospath ${source_dir}/hdf5-${hdf5_version}.tar.gz) + +# build only if install doesn't exist +if [ ! -d ${hdf5_install_dir} ]; then +if ${build_hdf5}; then +if [ ! -d ${hdf5_src_dir} ]; then + echo "**** Downloading ${hdf5_tarball}" + curl -L https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-${hdf5_short_version}/hdf5-${hdf5_middle_version}/src/hdf5-${hdf5_version}.tar.gz -o ${hdf5_tarball} + tar ${tar_extra_args} -xzf ${hdf5_tarball} -C ${source_dir} +fi + +################# +# +# hdf5 1.14.x CMake recipe for using zlib +# +# -DHDF5_ENABLE_Z_LIB_SUPPORT=ON +# Add zlib install dir to CMAKE_PREFIX_PATH +# +################# + +echo "**** Configuring HDF5 ${hdf5_version}" +cmake -S ${hdf5_src_dir} -B ${hdf5_build_dir} ${cmake_compiler_settings} \ + -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose} \ + -DCMAKE_BUILD_TYPE=${build_config} \ + -DHDF5_ENABLE_Z_LIB_SUPPORT=ON \ + -DCMAKE_PREFIX_PATH=${zlib_install_dir} \ + -DCMAKE_INSTALL_PREFIX=${hdf5_install_dir} + +echo "**** Building HDF5 ${hdf5_version}" +cmake --build ${hdf5_build_dir} --config ${build_config} -j${build_jobs} +echo "**** Installing HDF5 ${hdf5_version}" +cmake --install ${hdf5_build_dir} --config ${build_config} + +fi +else + echo "**** Skipping HDF5 build, install found at: ${hdf5_install_dir}" +fi # build_hdf5 + +################ +# Silo +################ +silo_version=4.11.1 +silo_src_dir=$(ospath ${source_dir}/Silo-${silo_version}) +silo_build_dir=$(ospath ${build_dir}/silo-${silo_version}/) +silo_install_dir=$(ospath ${install_dir}/silo-${silo_version}/) +silo_tarball=$(ospath ${source_dir}/silo-${silo_version}.tar.gz) + +# build only if install doesn't exist +if [ ! -d ${silo_install_dir} ]; then +if ${build_silo}; then +if [ ! -d ${silo_src_dir} ]; then + echo "**** Downloading ${silo_tarball}" + curl -L https://github.com/LLNL/Silo/archive/refs/tags/${silo_version}.tar.gz -o ${silo_tarball} + # untar and avoid symlinks (which windows despises) + tar ${tar_extra_args} -xzf ${silo_tarball} -C ${source_dir} \ + --exclude="Silo-${silo_version}/config-site/*" \ + --exclude="Silo-${silo_version}/README.md" + # apply silo patches + cd ${silo_src_dir} + patch -p1 < ${script_dir}/2024_07_25_silo_4_11_cmake_fix.patch + + # windows specifc patch + if [[ "$build_windows" == "ON" ]]; then + patch -p1 < ${script_dir}/2024_07_29_silo-pr389-win32-bugfix.patch + fi + + cd ${root_dir} +fi + + +echo "**** Configuring Silo ${silo_version}" +cmake -S ${silo_src_dir} -B ${silo_build_dir} ${cmake_compiler_settings} \ + -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose} \ + -DCMAKE_BUILD_TYPE=${build_config} \ + -DCMAKE_INSTALL_PREFIX=${silo_install_dir} \ + -DSILO_ENABLE_SHARED=${build_shared_libs} \ + -DCMAKE_C_FLAGS=-Doff64_t=off_t \ + -DSILO_ENABLE_HDF5=ON \ + -DSILO_ENABLE_TESTS=OFF \ + -DSILO_BUILD_FOR_BSD_LICENSE=ON \ + -DSILO_ENABLE_FORTRAN=OFF \ + -DSILO_HDF5_DIR=${hdf5_install_dir}/cmake/ \ + -DCMAKE_PREFIX_PATH=${zlib_install_dir} + + +echo "**** Building Silo ${silo_version}" +cmake --build ${silo_build_dir} --config ${build_config} -j${build_jobs} +echo "**** Installing Silo ${silo_version}" +cmake --install ${silo_build_dir} --config ${build_config} + +fi +else + echo "**** Skipping Silo build, install found at: ${silo_install_dir}" +fi # build_silo + +############################ +# Python Virtual Env +############################ +python_exe="${python_exe:=python3}" +venv_install_dir=$(ospath ${install_dir}/python-venv/) +venv_python_exe=$(ospath ${venv_install_dir}/bin/python3) +venv_sphinx_exe=$(ospath ${venv_install_dir}/bin/sphinx-build) + +# build only if install doesn't exist +if [ ! -d ${venv_install_dir} ]; then +if ${build_pyvenv}; then + echo "**** Creating Python Virtual Env" + cd ${install_dir} && ${python_exe} -m venv python-venv + ${venv_python_exe} -m pip install --upgrade pip + ${venv_python_exe} -m pip install numpy sphinx sphinx_rtd_theme + if [[ "$enable_mpi" == "ON" ]]; then + ${venv_python_exe} -m pip install mpi4py + fi +fi +else + echo "**** Skipping Python venv build, install found at: ${venv_install_dir}" +fi # build_pyvenv + +if ${build_pyvenv}; then + venv_python_ver=`${venv_python_exe} -c "import sys;print('{0}.{1}'.format(sys.version_info.major, sys.version_info.minor))"` + venv_python_site_pkgs_dir=${venv_install_dir}/lib/python${venv_python_ver}/site-packages +fi + +################ +# Caliper +################ +caliper_version=2.11.0 +caliper_src_dir=$(ospath ${source_dir}/Caliper-${caliper_version}) +caliper_build_dir=$(ospath ${build_dir}/caliper-${caliper_version}/) +caliper_install_dir=$(ospath ${install_dir}/caliper-${caliper_version}/) +caliper_tarball=$(ospath ${source_dir}/caliper-${caliper_version}-src-with-blt.tar.gz) + +# build only if install doesn't exist +if [ ! -d ${caliper_install_dir} ]; then +if ${build_caliper}; then +if [ ! -d ${caliper_src_dir} ]; then + echo "**** Downloading ${caliper_tarball}" + curl -L https://github.com/LLNL/Caliper/archive/refs/tags/v${caliper_version}.tar.gz -o ${caliper_tarball} + tar ${tar_extra_args} -xzf ${caliper_tarball} -C ${source_dir} + # windows specifc patch + cd ${caliper_src_dir} + if [[ "$build_windows" == "ON" ]]; then + patch -p1 < ${script_dir}/2024_08_01_caliper-win-smaller-opts.patch + fi + cd ${root_dir} +fi + +# +# Note: Caliper has optional Umpire support, +# if we want to support in the future, we will need to build umpire first +# + +# -DWITH_CUPTI=ON -DWITH_NVTX=ON -DCUDA_TOOLKIT_ROOT_DIR={path} -DCUPTI_PREFIX={path} +# -DWITH_ROCTRACER=ON -DWITH_ROCTX=ON -DROCM_PREFIX={path} + +caliper_windows_cmake_flags="-DCMAKE_CXX_STANDARD=17 -DCMAKE_WINDOWS_EXPORT_ALL_SYMBOLS=ON -DWITH_TOOLS=OFF" + +caliper_extra_cmake_args="" +if [[ "$build_windows" == "ON" ]]; then + caliper_extra_cmake_args="${caliper_windows_cmake_flags}" +fi + +if [[ "$enable_hip" == "ON" ]]; then + caliper_extra_cmake_args="${caliper_extra_cmake_args} -DWITH_ROCTRACER=ON -DWITH_ROCTX=ON -DROCM_PREFIX=${ROCM_PATH}" +fi + +echo "**** Configuring Caliper ${caliper_version}" +cmake -S ${caliper_src_dir} -B ${caliper_build_dir} ${cmake_compiler_settings} \ + -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose} \ + -DCMAKE_BUILD_TYPE=${build_config} \ + -DBUILD_SHARED_LIBS=${build_shared_libs} \ + -DCMAKE_INSTALL_PREFIX=${caliper_install_dir} \ + -DWITH_MPI=${enable_mpi} ${caliper_extra_cmake_args} + +echo "**** Building Caliper ${caliper_version}" +cmake --build ${caliper_build_dir} --config ${build_config} -j${build_jobs} +echo "**** Installing Caliper ${caliper_version}" +cmake --install ${caliper_build_dir} --config ${build_config} + +fi +else + echo "**** Skipping Caliper build, install found at: ${caliper_install_dir}" +fi # build_caliper + + +################ +# Conduit +################ +conduit_version=v0.9.2 +conduit_src_dir=$(ospath ${source_dir}/conduit-${conduit_version}) +conduit_build_dir=$(ospath ${build_dir}/conduit-${conduit_version}/) +conduit_install_dir=$(ospath ${install_dir}/conduit-${conduit_version}/) +conduit_tarball=$(ospath ${source_dir}/conduit-${conduit_version}-src-with-blt.tar.gz) + +# build only if install doesn't exist +if [ ! -d ${conduit_install_dir} ]; then +if ${build_conduit}; then +if [ ! -d ${conduit_src_dir} ]; then + echo "**** Downloading ${conduit_tarball}" + curl -L https://github.com/LLNL/conduit/releases/download/${conduit_version}/conduit-${conduit_version}-src-with-blt.tar.gz -o ${conduit_tarball} + # untar and avoid symlinks (which windows despises) + tar ${tar_extra_args} -xzf ${conduit_tarball} -C ${source_dir} \ + --exclude="conduit-${conduit_version}/src/tests/relay/data/silo/*" + # caliper vs adiak patch + if ${build_caliper}; then + cd ${conduit_src_dir} + echo ${conduit_src_dir} + patch -p 1 < ${script_dir}/2024_08_01_conduit-pr1311-detect-if-caliper-needs-adiak.patch + cd ${root_dir} + fi +fi + +# +# extrat cmake args +# +conduit_extra_cmake_opts=-DENABLE_PYTHON=${enable_python} +if ${build_pyvenv}; then + conduit_extra_cmake_opts="${conduit_extra_cmake_opts} -DPYTHON_EXECUTABLE=${venv_python_exe}" + conduit_extra_cmake_opts="${conduit_extra_cmake_opts} -DSPHINX_EXECUTABLE=${venv_sphinx_exe}" + conduit_extra_cmake_opts="${conduit_extra_cmake_opts} -DPYTHON_MODULE_INSTALL_PREFIX=${venv_python_site_pkgs_dir}" +fi + +if ${build_caliper}; then + conduit_extra_cmake_opts="${conduit_extra_cmake_opts} -DCALIPER_DIR=${caliper_install_dir}" +fi + +if ${build_silo}; then + conduit_extra_cmake_opts="${conduit_extra_cmake_opts} -DSILO_DIR=${silo_install_dir}" +fi + +echo "**** Configuring Conduit ${conduit_version}" +cmake -S ${conduit_src_dir}/src -B ${conduit_build_dir} ${cmake_compiler_settings} \ + -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose} \ + -DCMAKE_BUILD_TYPE=${build_config} \ + -DBUILD_SHARED_LIBS=${build_shared_libs} \ + -DCMAKE_INSTALL_PREFIX=${conduit_install_dir} \ + -DENABLE_FORTRAN=${enable_fortran} \ + -DENABLE_MPI=${enable_mpi} \ + -DENABLE_FIND_MPI=${enable_find_mpi} \ + ${conduit_extra_cmake_opts} \ + -DENABLE_TESTS=OFF \ + -DHDF5_DIR=${hdf5_install_dir} \ + -DZLIB_DIR=${zlib_install_dir} + + +echo "**** Building Conduit ${conduit_version}" +cmake --build ${conduit_build_dir} --config ${build_config} -j${build_jobs} +echo "**** Installing Conduit ${conduit_version}" +cmake --install ${conduit_build_dir} --config ${build_config} + +fi +else + echo "**** Skipping Conduit build, install found at: ${conduit_install_dir}" +fi # build_conduit + +######################### +# Kokkos (only for hip) +######################### +kokkos_version=3.7.02 +kokkos_src_dir=$(ospath ${source_dir}/kokkos-${kokkos_version}) +kokkos_build_dir=$(ospath ${build_dir}/kokkos-${kokkos_version}) +kokkos_install_dir=$(ospath ${install_dir}/kokkos-${kokkos_version}/) +kokkos_tarball=$(ospath ${source_dir}/kokkos-${kokkos_version}.tar.gz) + +if [[ "$enable_hip" == "ON" ]]; then +# build only if install doesn't exist +if [ ! -d ${kokkos_install_dir} ]; then +if ${build_kokkos}; then +if [ ! -d ${kokkos_src_dir} ]; then + echo "**** Downloading ${kokkos_tarball}" + curl -L https://github.com/kokkos/kokkos/archive/refs/tags/${kokkos_version}.tar.gz -o ${kokkos_tarball} + tar ${tar_extra_args} -xzf ${kokkos_tarball} -C ${source_dir} +fi + +# TODO: DKokkos_ARCH_VEGA90A needs to be controlled / mapped? + +echo "**** Configuring Kokkos ${kokkos_version}" +cmake -S ${kokkos_src_dir} -B ${kokkos_build_dir} ${cmake_compiler_settings} \ + -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose}\ + -DCMAKE_BUILD_TYPE=${build_config} \ + -DBUILD_SHARED_LIBS=${build_shared_libs} \ + -DKokkos_ARCH_VEGA90A=ON \ + -DCMAKE_CXX_COMPILER=${ROCM_PATH}/bin/hipcc \ + -DKokkos_ENABLE_HIP=ON \ + -DKokkos_ENABLE_SERIAL=ON \ + -DKokkos_ENABLE_HIP_RELOCATABLE_DEVICE_CODE=OFF \ + -DCMAKE_INSTALL_PREFIX=${kokkos_install_dir} \ + -DCMAKE_CXX_FLAGS="--amdgpu-target=${ROCM_ARCH}" \ + -DBUILD_TESTING=OFF \ + -DCMAKE_INSTALL_PREFIX=${kokkos_install_dir} + +echo "**** Building Kokkos ${kokkos_version}" +cmake --build ${kokkos_build_dir} --config ${build_config} -j${build_jobs} +echo "**** Installing VTK-m ${kokkos_version}" +cmake --install ${kokkos_build_dir} --config ${build_config} + +fi +else + echo "**** Skipping Kokkos build, install found at: ${kokkos_install_dir}" +fi # build_kokkos + +fi # if enable_hip + +################ +# VTK-m +################ +vtkm_version=v2.1.0 +vtkm_src_dir=$(ospath ${source_dir}/vtk-m-${vtkm_version}) +vtkm_build_dir=$(ospath ${build_dir}/vtk-m-${vtkm_version}) +vtkm_install_dir=$(ospath ${install_dir}/vtk-m-${vtkm_version}/) +vtkm_tarball=$(ospath ${source_dir}/vtk-m-${vtkm_version}.tar.gz) + +# build only if install doesn't exist +if [ ! -d ${vtkm_install_dir} ]; then +if ${build_vtkm}; then +if [ ! -d ${vtkm_src_dir} ]; then + echo "**** Downloading ${vtkm_tarball}" + curl -L https://gitlab.kitware.com/vtk/vtk-m/-/archive/${vtkm_version}/vtk-m-${vtkm_version}.tar.gz -o ${vtkm_tarball} + tar ${tar_extra_args} -xzf ${vtkm_tarball} -C ${source_dir} + + # apply vtk-m patch + cd ${vtkm_src_dir} + patch -p1 < ${script_dir}/2023_12_06_vtkm-mr3160-rocthrust-fix.patch + patch -p1 < ${script_dir}/2024_05_03_vtkm-mr3215-ext-geom-fix.patch + patch -p1 < ${script_dir}/2024_07_02_vtkm-mr3246-raysubset_bugfix.patch + cd ${root_dir} +fi + + +vtkm_extra_cmake_args="" +if [[ "$enable_cuda" == "ON" ]]; then + vtkm_extra_cmake_args="-DVTKm_ENABLE_CUDA=ON" + vtkm_extra_cmake_args="${vtkm_extra_cmake_args} -DCMAKE_CUDA_HOST_COMPILER=${CXX}" + vtkm_extra_cmake_args="${vtkm_extra_cmake_args} -DCMAKE_CUDA_ARCHITECTURES=${CUDA_ARCH}" +fi + +if [[ "$enable_hip" == "ON" ]]; then + vtkm_extra_cmake_args="-DVTKm_ENABLE_KOKKOS=ON" + vtkm_extra_cmake_args="${vtkm_extra_cmake_args} -DCMAKE_PREFIX_PATH=${kokkos_install_dir}" + vtkm_extra_cmake_args="${vtkm_extra_cmake_args} -DCMAKE_HIP_ARCHITECTURES=${ROCM_ARCH}" + vtkm_extra_cmake_args="${vtkm_extra_cmake_args} -DVTKm_ENABLE_KOKKOS_THRUST=OFF" +fi + +echo "**** Configuring VTK-m ${vtkm_version}" +cmake -S ${vtkm_src_dir} -B ${vtkm_build_dir} ${cmake_compiler_settings} \ + -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose}\ + -DCMAKE_BUILD_TYPE=${build_config} \ + -DBUILD_SHARED_LIBS=${build_shared_libs} \ + -DVTKm_NO_DEPRECATED_VIRTUAL=ON \ + -DVTKm_USE_64BIT_IDS=OFF \ + -DVTKm_USE_DOUBLE_PRECISION=ON \ + -DVTKm_USE_DEFAULT_TYPES_FOR_ASCENT=ON \ + -DVTKm_ENABLE_MPI=${enable_mpi} \ + -DVTKm_ENABLE_OPENMP=${enable_openmp}\ + -DVTKm_ENABLE_RENDERING=ON \ + -DVTKm_ENABLE_TESTING=OFF\ + -DBUILD_TESTING=OFF \ + -DVTKm_ENABLE_BENCHMARKS=OFF ${vtkm_extra_cmake_args} \ + -DCMAKE_INSTALL_PREFIX=${vtkm_install_dir} + +echo "**** Building VTK-m ${vtkm_version}" +cmake --build ${vtkm_build_dir} --config ${build_config} -j${build_jobs} +echo "**** Installing VTK-m ${vtkm_version}" +cmake --install ${vtkm_build_dir} --config ${build_config} + +fi +else + echo "**** Skipping VTK-m build, install found at: ${vtkm_install_dir}" +fi # build_vtkm + + +################ +# Camp +################ +camp_version=v2024.02.1 +camp_src_dir=$(ospath ${source_dir}/camp-${camp_version}) +camp_build_dir=$(ospath ${build_dir}/camp-${camp_version}) +camp_install_dir=$(ospath ${install_dir}/camp-${camp_version}/) +camp_tarball=$(ospath ${source_dir}/camp-${camp_version}.tar.gz) + + +# build only if install doesn't exist +if [ ! -d ${camp_install_dir} ]; then +if ${build_camp}; then +if [ ! -d ${camp_src_dir} ]; then + echo "**** Downloading ${camp_tarball}" + curl -L https://github.com/LLNL/camp/releases/download/${camp_version}/camp-${camp_version}.tar.gz -o ${camp_tarball} + tar ${tar_extra_args} -xzf ${camp_tarball} -C ${source_dir} +fi + +camp_extra_cmake_args="" +if [[ "$enable_cuda" == "ON" ]]; then + camp_extra_cmake_args="-DENABLE_CUDA=ON" + camp_extra_cmake_args="${camp_extra_cmake_args} -DCMAKE_CUDA_ARCHITECTURES=${CUDA_ARCH}" +fi + +if [[ "$enable_hip" == "ON" ]]; then + camp_extra_cmake_args="-DENABLE_HIP=ON" + camp_extra_cmake_args="${camp_extra_cmake_args} -DCMAKE_HIP_ARCHITECTURES=${ROCM_ARCH}" + camp_extra_cmake_args="${camp_extra_cmake_args} -DROCM_PATH=${ROCM_PATH}" +fi + +echo "**** Configuring Camp ${camp_version}" +cmake -S ${camp_src_dir} -B ${camp_build_dir} ${cmake_compiler_settings} \ + -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose}\ + -DCMAKE_BUILD_TYPE=${build_config} \ + -DBUILD_SHARED_LIBS=${build_shared_libs} \ + -DENABLE_TESTS=OFF \ + -DENABLE_EXAMPLES=OFF ${camp_extra_cmake_args} \ + -DCMAKE_INSTALL_PREFIX=${camp_install_dir} + +echo "**** Building Camp ${camp_version}" +cmake --build ${camp_build_dir} --config ${build_config} -j${build_jobs} +echo "**** Installing Camp ${camp_version}" +cmake --install ${camp_build_dir} --config ${build_config} + +fi +else + echo "**** Skipping Camp build, install found at: ${camp_install_dir}" +fi # build_camp + + +################ +# RAJA +################ +raja_version=v2024.02.1 +raja_src_dir=$(ospath ${source_dir}/RAJA-${raja_version}) +raja_build_dir=$(ospath ${build_dir}/raja-${raja_version}) +raja_install_dir=$(ospath ${install_dir}/raja-${raja_version}/) +raja_tarball=$(ospath ${source_dir}/RAJA-${raja_version}.tar.gz) +raja_enable_vectorization="${raja_enable_vectorization:=ON}" + +# build only if install doesn't exist +if [ ! -d ${raja_install_dir} ]; then +if ${build_raja}; then +if [ ! -d ${raja_src_dir} ]; then + echo "**** Downloading ${raja_tarball}" + curl -L https://github.com/LLNL/RAJA/releases/download/${raja_version}/RAJA-${raja_version}.tar.gz -o ${raja_tarball} + tar ${tar_extra_args} -xzf ${raja_tarball} -C ${source_dir} +fi + +raja_extra_cmake_args="" +if [[ "$enable_cuda" == "ON" ]]; then + raja_extra_cmake_args="-DENABLE_CUDA=ON" + raja_extra_cmake_args="${raja_extra_cmake_args} -DCMAKE_CUDA_ARCHITECTURES=${CUDA_ARCH}" +fi + +if [[ "$enable_hip" == "ON" ]]; then + raja_extra_cmake_args="-DENABLE_HIP=ON" + raja_extra_cmake_args="${raja_extra_cmake_args} -DCMAKE_HIP_ARCHITECTURES=${ROCM_ARCH}" + raja_extra_cmake_args="${raja_extra_cmake_args} -DROCM_PATH=${ROCM_PATH}" +fi + +echo "**** Configuring RAJA ${raja_version}" +cmake -S ${raja_src_dir} -B ${raja_build_dir} ${cmake_compiler_settings} \ + -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose}\ + -DCMAKE_BUILD_TYPE=${build_config} \ + -DBUILD_SHARED_LIBS=${build_shared_libs} \ + -Dcamp_DIR=${camp_install_dir} \ + -DENABLE_OPENMP=${enable_openmp} \ + -DENABLE_TESTS=OFF \ + -DRAJA_ENABLE_TESTS=OFF \ + -DENABLE_EXAMPLES=OFF \ + -DENABLE_EXERCISES=OFF ${raja_extra_cmake_args} \ + -DCMAKE_INSTALL_PREFIX=${raja_install_dir} \ + -DRAJA_ENABLE_VECTORIZATION=${raja_enable_vectorization} + +echo "**** Building RAJA ${raja_version}" +cmake --build ${raja_build_dir} --config ${build_config} -j${build_jobs} +echo "**** Installing RAJA ${raja_version}" +cmake --install ${raja_build_dir} --config ${build_config} + +fi +else + echo "**** Skipping RAJA build, install found at: ${raja_install_dir}" +fi # build_raja + +################ +# Umpire +################ +umpire_version=2024.02.1 +umpire_src_dir=$(ospath ${source_dir}/umpire-${umpire_version}) +umpire_build_dir=$(ospath ${build_dir}/umpire-${umpire_version}) +umpire_install_dir=$(ospath ${install_dir}/umpire-${umpire_version}/) +umpire_tarball=$(ospath ${source_dir}/umpire-${umpire_version}.tar.gz) +umpire_windows_cmake_flags="-DBLT_CXX_STD=c++17 -DCMAKE_CXX_STANDARD=17 -DUMPIRE_ENABLE_FILESYSTEM=On -DCMAKE_WINDOWS_EXPORT_ALL_SYMBOLS=On" + +umpire_extra_cmake_args="" +if [[ "$build_windows" == "ON" ]]; then + umpire_extra_cmake_args="${umpire_windows_cmake_flags}" +fi + +if [[ "$enable_cuda" == "ON" ]]; then + umpire_extra_cmake_args="${umpire_extra_cmake_args} -DENABLE_CUDA=ON" + umpire_extra_cmake_args="${umpire_extra_cmake_args} -DCMAKE_CUDA_ARCHITECTURES=${CUDA_ARCH}" +fi + +if [[ "$enable_hip" == "ON" ]]; then + umpire_extra_cmake_args="${umpire_extra_cmake_args} -DENABLE_HIP=ON" + umpire_extra_cmake_args="${umpire_extra_cmake_args} -DCMAKE_HIP_ARCHITECTURES=${ROCM_ARCH}" + umpire_extra_cmake_args="${umpire_extra_cmake_args} -DROCM_PATH=${ROCM_PATH}" +fi + +# build only if install doesn't exist +if [ ! -d ${umpire_install_dir} ]; then +if ${build_umpire}; then +if [ ! -d ${umpire_src_dir} ]; then + echo "**** Downloading ${umpire_tarball}" + curl -L https://github.com/LLNL/Umpire/releases/download/v${umpire_version}/umpire-${umpire_version}.tar.gz -o ${umpire_tarball} + tar ${tar_extra_args} -xzf ${umpire_tarball} -C ${source_dir} +fi + +echo "**** Configuring Umpire ${umpire_version}" +cmake -S ${umpire_src_dir} -B ${umpire_build_dir} ${cmake_compiler_settings} \ + -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose} \ + -DCMAKE_BUILD_TYPE=${build_config} \ + -DBUILD_SHARED_LIBS=${build_shared_libs} \ + -Dcamp_DIR=${camp_install_dir} \ + -DENABLE_OPENMP=${enable_openmp} \ + -DENABLE_TESTS=OFF \ + -DUMPIRE_ENABLE_TOOLS=Off \ + -DUMPIRE_ENABLE_BENCHMARKS=OFF ${umpire_extra_cmake_args} \ + -DCMAKE_INSTALL_PREFIX=${umpire_install_dir} + +echo "**** Building Umpire ${umpire_version}" +cmake --build ${umpire_build_dir} --config ${build_config} -j${build_jobs} +echo "**** Installing Umpire ${umpire_version}" +cmake --install ${umpire_build_dir} --config ${build_config} + +fi +else + echo "**** Skipping Umpire build, install found at: ${umpire_install_dir}" +fi # build_umpire + +################ +# MFEM +################ +mfem_version=4.6 +mfem_src_dir=$(ospath ${source_dir}/mfem-${mfem_version}) +mfem_build_dir=$(ospath ${build_dir}/mfem-${mfem_version}) +mfem_install_dir=$(ospath ${install_dir}/mfem-${mfem_version}/) +mfem_tarball=$(ospath ${source_dir}/mfem-${mfem_version}.tar.gz) +mfem_windows_cmake_flags="-DCMAKE_WINDOWS_EXPORT_ALL_SYMBOLS=ON" + +mfem_extra_cmake_args="" +if [[ "$build_windows" == "ON" ]]; then + mfem_extra_cmake_args="${mfem_windows_cmake_flags}" +fi + + +# build only if install doesn't exist +if [ ! -d ${mfem_install_dir} ]; then +if ${build_mfem}; then +if [ ! -d ${mfem_src_dir} ]; then + echo "**** Downloading ${mfem_tarball}" + curl -L https://github.com/mfem/mfem/archive/refs/tags/v${mfem_version}.tar.gz -o ${mfem_tarball} + tar ${tar_extra_args} -xzf ${mfem_tarball} -C ${source_dir} +fi + +# +# Note: MFEM MPI requires Hypre and Metis +# -DMFEM_USE_MPI=${enable_mpi} \ + +echo "**** Configuring MFEM ${mfem_version}" +cmake -S ${mfem_src_dir} -B ${mfem_build_dir} ${cmake_compiler_settings} \ + -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose}\ + -DCMAKE_BUILD_TYPE=${build_config} \ + -DBUILD_SHARED_LIBS=${build_shared_libs} \ + -DMFEM_USE_CONDUIT=ON ${mfem_extra_cmake_args} \ + -DCMAKE_PREFIX_PATH="${conduit_install_dir}" \ + -DMFEM_ENABLE_TESTING=OFF \ + -DMFEM_ENABLE_EXAMPLES=OFF \ + -DCMAKE_INSTALL_PREFIX=${mfem_install_dir} + +echo "**** Building MFEM ${mfem_version}" +cmake --build ${mfem_build_dir} --config ${build_config} -j${build_jobs} +echo "**** Installing MFEM ${mfem_version}" +cmake --install ${mfem_build_dir} --config ${build_config} + +fi +else + echo "**** Skipping MFEM build, install found at: ${mfem_install_dir}" +fi # build_mfem + +################ +# Catalyst +################ +catalyst_version=2.0.0-rc4 +catalyst_src_dir=$(ospath ${source_dir}/catalyst-v${catalyst_version}) +catalyst_build_dir=$(ospath ${build_dir}/catalyst-v${catalyst_version}) +catalyst_install_dir=$(ospath ${install_dir}/catalyst-v${catalyst_version}/) +catalyst_cmake_dir=${catalyst_install_dir}lib64/cmake/catalyst-2.0/ +catalyst_tarball=$(ospath ${source_dir}/catalyst-v${catalyst_version}.tar.gz) + +# build only if install doesn't exist +if [ ! -d ${catalyst_install_dir} ]; then +if ${build_catalyst}; then +if [ ! -d ${catalyst_src_dir} ]; then + echo "**** Downloading ${catalyst_tarball}" + curl -L https://gitlab.kitware.com/paraview/catalyst/-/archive/v${catalyst_version}/catalyst-v${catalyst_version}.tar.gz -o ${catalyst_tarball} + tar ${tar_extra_args} -xzf ${catalyst_tarball} -C ${source_dir} +fi + +echo "**** Configuring Catalyst ${catalyst_version}" +cmake -S ${catalyst_src_dir} -B ${catalyst_build_dir} ${cmake_compiler_settings} \ + -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose}\ + -DCMAKE_BUILD_TYPE=${build_config} \ + -DCATALYST_BUILD_TESTING=OFF \ + -DCATALYST_USE_MPI=${enable_mpi} \ + -DCMAKE_INSTALL_PREFIX=${catalyst_install_dir} \ + +echo "**** Building Catalyst ${catalyst_version}" +cmake --build ${catalyst_build_dir} --config ${build_config} -j${build_jobs} +echo "**** Installing Catalyst ${catalyst_version}" +cmake --install ${catalyst_build_dir} --config ${build_config} + +fi +else + echo "**** Skipping Catalyst build, install found at: ${catalyst_install_dir}" +fi # build_catalyst + +################ +# Ascent +################ +# if we are in an ascent checkout, use existing source +ascent_checkout_dir=$(ospath ${script_dir}/../../src) +ascent_checkout_dir=$(abs_path ${ascent_checkout_dir}) +echo ${ascent_checkout_dir} +if [ -d ${ascent_checkout_dir} ]; then + ascent_version=checkout + ascent_src_dir=$(abs_path ${ascent_checkout_dir}) + echo "**** Using existing Ascent source repo checkout: ${ascent_src_dir}" +else + ascent_version=develop + ascent_src_dir=$(ospath ${source_dir}/ascent/src) +fi + +# otherwise use ascent develop +ascent_build_dir=$(ospath ${build_dir}/ascent-${ascent_version}/) +ascent_install_dir=$(ospath ${install_dir}//ascent-${ascent_version}/) + +echo "**** Creating Ascent host-config (ascent-config.cmake)" +# +echo '# host-config file generated by build_ascent.sh' > ${root_dir}/ascent-config.cmake + +# capture compilers if they are provided via env vars +if [ ! -z ${CC+x} ]; then + echo 'set(CMAKE_C_COMPILER ' ${CC} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake +fi + +if [ ! -z ${CXX+x} ]; then + echo 'set(CMAKE_CXX_COMPILER ' ${CXX} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake +fi + +if [ ! -z ${FTN+x} ]; then + echo 'set(CMAKE_Fortran_COMPILER ' ${FTN} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake +fi + +# capture compiler flags if they are provided via env vars +if [ ! -z ${CFLAGS+x} ]; then + echo 'set(CMAKE_C_FLAGS "' ${CFLAGS} '" CACHE PATH "")' >> ${root_dir}/ascent-config.cmake +fi + +if [ ! -z ${CXXFLAGS+x} ]; then + echo 'set(CMAKE_CXX_FLAGS "' ${CXXFLAGS} '" CACHE PATH "")' >> ${root_dir}/ascent-config.cmake +fi + +if [ ! -z ${FFLAGS+x} ]; then + echo 'set(CMAKE_F_FLAGS "' ${FFLAGS} '" CACHE PATH "")' >> ${root_dir}/ascent-config.cmake +fi + +echo 'set(CMAKE_VERBOSE_MAKEFILE ' ${enable_verbose} ' CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake +echo 'set(CMAKE_BUILD_TYPE ' ${build_config} ' CACHE STRING "")' >> ${root_dir}/ascent-config.cmake +echo 'set(BUILD_SHARED_LIBS ' ${build_shared_libs} ' CACHE STRING "")' >> ${root_dir}/ascent-config.cmake +echo 'set(CMAKE_INSTALL_PREFIX ' ${ascent_install_dir} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake +echo 'set(ENABLE_TESTS ' ${enable_tests} ' CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake +echo 'set(ENABLE_MPI ' ${enable_mpi} ' CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake +echo 'set(ENABLE_FIND_MPI ' ${enable_find_mpi} ' CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake +echo 'set(ENABLE_FORTRAN ' ${enable_fortran} ' CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake +echo 'set(ENABLE_PYTHON ' ${enable_python} ' CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake +if ${build_pyvenv}; then +echo 'set(PYTHON_EXECUTABLE ' ${venv_python_exe} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake +echo 'set(PYTHON_MODULE_INSTALL_PREFIX ' ${venv_python_site_pkgs_dir} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake +echo 'set(ENABLE_DOCS ON CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake +echo 'set(SPHINX_EXECUTABLE ' ${venv_sphinx_exe} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake +fi +if ${build_caliper}; then + echo 'set(CALIPER_DIR ' ${caliper_install_dir} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake +fi +echo 'set(BLT_CXX_STD c++14 CACHE STRING "")' >> ${root_dir}/ascent-config.cmake +echo 'set(CONDUIT_DIR ' ${conduit_install_dir} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake +echo 'set(VTKM_DIR ' ${vtkm_install_dir} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake +echo 'set(CAMP_DIR ' ${camp_install_dir} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake +echo 'set(RAJA_DIR ' ${raja_install_dir} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake +echo 'set(UMPIRE_DIR ' ${umpire_install_dir} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake +echo 'set(MFEM_DIR ' ${mfem_install_dir} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake +echo 'set(ENABLE_VTKH ON CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake +echo 'set(ENABLE_APCOMP ON CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake +echo 'set(ENABLE_DRAY ON CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake + + +if ${build_catalyst}; then + echo 'set(CATALYST_DIR ' ${catalyst_cmake_dir} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake +fi + +if [[ "$enable_cuda" == "ON" ]]; then + echo 'set(ENABLE_CUDA ON CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake + echo 'set(CMAKE_CUDA_ARCHITECTURES ' ${CUDA_ARCH} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake +fi + +if [[ "$enable_hip" == "ON" ]]; then + echo 'set(ENABLE_HIP ON CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake + echo 'set(CMAKE_HIP_ARCHITECTURES ' ${ROCM_ARCH} ' CACHE STRING "")' >> ${root_dir}/ascent-config.cmake + echo 'set(ROCM_PATH ' ${ROCM_PATH} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake + echo 'set(KOKKOS_DIR ' ${kokkos_install_dir} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake +fi + +# build only if install doesn't exist +if [ ! -d ${ascent_install_dir} ]; then +if ${build_ascent}; then +if [ ! -d ${ascent_src_dir} ]; then + echo "**** Cloning Ascent" + git clone --recursive https://github.com/Alpine-DAV/ascent.git +fi + +echo "**** Configuring Ascent" +cmake -S ${ascent_src_dir} -B ${ascent_build_dir} -C ${root_dir}/ascent-config.cmake + +echo "**** Building Ascent" +cmake --build ${ascent_build_dir} --config ${build_config} -j${build_jobs} + +echo "**** Installing Ascent" +cmake --install ${ascent_build_dir} --config ${build_config} + +if ${build_catalyst}; then + mv ${ascent_install_dir}/lib/libcatalyst-ascent.so ${catalyst_install_dir}lib64/catalyst/libcatalyst-ascent.so +fi + +fi +else + echo "**** Skipping Ascent build, install found at: ${ascent_install_dir}" +fi # build_ascent diff --git a/scripts/docker/build_ascent_cuda.sh b/scripts/docker/build_ascent_cuda.sh deleted file mode 100644 index a8eb38a5563f..000000000000 --- a/scripts/docker/build_ascent_cuda.sh +++ /dev/null @@ -1,438 +0,0 @@ -#!/bin/bash - - -############################################################################## -# Slightly adapted from https://github.com/Alpine-DAV/ascent/blob/0dedd70319145b3a31dd4d889fb82aaad995797b/scripts/build_ascent/build_ascent_cuda.sh -# Original Copyright (c) 2015-2023, Lawrence Livermore National Security, LLC. -# Released under BSD 3-Clause license -# -############################################################################## -# Demonstrates how to manually build Ascent and its dependencies, including: -# -# hdf5, conduit, vtk-m, mfem, raja, and umpire -# -# usage example: -# env enable_mpi=ON enable_openmp=ON ./build_ascent.sh -# -# -# Assumes: -# - cmake is in your path -# - selected compilers (including nvcc) are in your path or set via env vars -# - [when enabled] MPI and Python (+numpy and mpi4py), are in your path -# -############################################################################## -set -eu -o pipefail - -CC="${CC:=gcc}" -CXX="${CXX:=g++}" -FTN="${FTN:=gfortran}" - -CUDA_ARCH="${CUDA_ARCH:=80}" -CUDA_ARCH_VTKM="${CUDA_ARCH_VTKM:=ampere}" - -############################################################################## -# Build Options -############################################################################## - -# shared options -enable_fortran="${enable_fortran:=OFF}" -enable_python="${enable_python:=OFF}" -enable_openmp="${enable_openmp:=OFF}" -enable_mpi="${enable_mpi:=ON}" -enable_find_mpi="${enable_find_mpi:=ON}" -enable_tests="${enable_tests:=ON}" -enable_verbose="${enable_verbose:=ON}" -build_jobs="${build_jobs:=8}" -build_config="${build_config:=Release}" -build_shared_libs="${build_shared_libs:=ON}" - -# tpl controls -build_hdf5="${build_hdf5:=false}" -build_conduit="${build_conduit:=true}" -build_vtkm="${build_vtkm:=true}" -build_camp="${build_camp:=true}" -build_raja="${build_raja:=true}" -build_umpire="${build_umpire:=true}" -build_mfem="${build_mfem:=true}" - -# ascent options -build_ascent="${build_ascent:=true}" - -root_dir=$(pwd) - -################ -# HDF5 -################ -hdf5_version=1.12.2 -hdf5_src_dir=${root_dir}/hdf5-${hdf5_version} -hdf5_build_dir=${root_dir}/build/hdf5-${hdf5_version}/ -#hdf5_install_dir=${root_dir}/install/hdf5-${hdf5_version}/ -hdf5_install_dir=/usr/local/hdf5/parallel -hdf5_tarball=hdf5-${hdf5_version}.tar.gz - -# build only if install doesn't exist -if [ ! -d ${hdf5_install_dir} ]; then -if ${build_hdf5}; then -if [ ! -d ${hdf5_src_dir} ]; then - echo "**** Downloading ${hdf5_tarball}" - curl -L https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.12/hdf5-1.12.2/src/hdf5-1.12.2.tar.gz -o ${hdf5_tarball} - tar -xzf ${hdf5_tarball} -fi - -echo "**** Configuring HDF5 ${hdf5_version}" -cmake -S ${hdf5_src_dir} -B ${hdf5_build_dir} \ - -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose} \ - -DCMAKE_BUILD_TYPE=${build_config} \ - -DCMAKE_INSTALL_PREFIX=${hdf5_install_dir} - -echo "**** Building HDF5 ${hdf5_version}" -cmake --build ${hdf5_build_dir} --config ${build_config} -j${build_jobs} -echo "**** Installing HDF5 ${hdf5_version}" -cmake --install ${hdf5_build_dir} - -fi -else - echo "**** Skipping HDF5 build, install found at: ${hdf5_install_dir}" -fi # build_hdf5 - - -################ -# Conduit -################ -conduit_version=v0.8.6 -conduit_src_dir=${root_dir}/conduit-${conduit_version}/src -conduit_build_dir=${root_dir}/build/conduit-${conduit_version}/ -conduit_install_dir=/usr/local/conduit-${conduit_version}/ -conduit_tarball=conduit-${conduit_version}-src-with-blt.tar.gz - -# build only if install doesn't exist -if [ ! -d ${conduit_install_dir} ]; then -if ${build_conduit}; then -if [ ! -d ${conduit_src_dir} ]; then - echo "**** Downloading ${conduit_tarball}" - curl -L https://github.com/LLNL/conduit/releases/download/${conduit_version}/${conduit_tarball} -o ${conduit_tarball} - tar -xzf ${conduit_tarball} -fi - -echo "**** Configuring Conduit ${conduit_version}" -cmake -S ${conduit_src_dir} -B ${conduit_build_dir} \ - -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose}\ - -DCMAKE_BUILD_TYPE=${build_config} \ - -DBUILD_SHARED_LIBS=${build_shared_libs} \ - -DCMAKE_INSTALL_PREFIX=${conduit_install_dir} \ - -DENABLE_FORTRAN=${enable_fortran} \ - -DENABLE_MPI=${enable_mpi} \ - -DENABLE_FIND_MPI=${enable_find_mpi} \ - -DENABLE_PYTHON=${enable_python} \ - -DENABLE_TESTS=${enable_tests} \ - -DHDF5_DIR=${hdf5_install_dir} - -echo "**** Building Conduit ${conduit_version}" -cmake --build ${conduit_build_dir} --config ${build_config} -j${build_jobs} -echo "**** Installing Conduit ${conduit_version}" -cmake --install ${conduit_build_dir} - -fi -else - echo "**** Skipping Conduit build, install found at: ${conduit_install_dir}" -fi # build_conduit - - -################ -# VTK-m -################ -vtkm_version=v1.9.0 -vtkm_src_dir=${root_dir}/vtk-m-${vtkm_version} -vtkm_build_dir=${root_dir}/build/vtk-m-${vtkm_version} -vtkm_install_dir=/usr/local/vtk-m-${vtkm_version}/ -vtkm_tarball=vtk-m-${vtkm_version}.tar.gz - -# build only if install doesn't exist -if [ ! -d ${vtkm_install_dir} ]; then -if ${build_vtkm}; then -if [ ! -d ${vtkm_src_dir} ]; then - echo "**** Downloading ${vtkm_tarball}" - curl -L https://gitlab.kitware.com/vtk/vtk-m/-/archive/${vtkm_version}/${vtkm_tarball} -o ${vtkm_tarball} - tar -xzf ${vtkm_tarball} -fi - -echo "**** Configuring VTK-m ${vtkm_version}" -cmake -S ${vtkm_src_dir} -B ${vtkm_build_dir} \ - -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose}\ - -DCMAKE_BUILD_TYPE=${build_config} \ - -DBUILD_SHARED_LIBS=${build_shared_libs} \ - -DVTKm_NO_DEPRECATED_VIRTUAL=ON \ - -DVTKm_USE_64BIT_IDS=OFF \ - -DVTKm_USE_DOUBLE_PRECISION=ON \ - -DVTKm_USE_DEFAULT_TYPES_FOR_ASCENT=ON \ - -DVTKm_ENABLE_BENCHMARKS=OFF\ - -DVTKm_ENABLE_RENDERING=ON \ - -DVTKm_ENABLE_TESTING=OFF \ - -DBUILD_TESTING=OFF \ - -DVTKm_ENABLE_BENCHMARKS=OFF\ - -DVTKm_ENABLE_MPI=OFF \ - -DVTKm_ENABLE_CUDA=ON \ - -DVTKm_CUDA_Architecture=ampere \ - -DCMAKE_CUDA_HOST_COMPILER=${CXX}\ - -DCMAKE_CUDA_ARCHITECTURES=${CUDA_ARCH} \ - -DCMAKE_INSTALL_PREFIX=${vtkm_install_dir} - -echo "**** Building VTK-m ${vtkm_version}" -cmake --build ${vtkm_build_dir} --config ${build_config} -j${build_jobs} -echo "**** Installing VTK-m ${vtkm_version}" -cmake --install ${vtkm_build_dir} - -fi -else - echo "**** Skipping VTK-m build, install found at: ${vtkm_install_dir}" -fi # build_vtkm - - -################ -# Camp -################ -camp_version=2022.10.1 -camp_src_dir=${root_dir}/camp-${camp_version} -camp_build_dir=${root_dir}/build/camp-${camp_version} -camp_install_dir=/usr/local/camp-${camp_version}/ -camp_tarball=camp-${camp_version}.tar.gz - -# build only if install doesn't exist -if [ ! -d ${camp_install_dir} ]; then -if ${build_camp}; then -if [ ! -d ${camp_src_dir} ]; then - echo "**** Cloning Camp ${camp_version}" - # clone since camp releases don't contain submodules - git clone --recursive --depth 1 --branch v${camp_version} https://github.com/LLNL/camp.git camp-${camp_version} - # curl -L https://github.com/LLNL/camp/archive/refs/tags/v${camp_version}.tar.gz -o ${camp_tarball} - # tar -xzf ${camp_tarball} -fi - -echo "**** Configuring Camp ${camp_version}" -cmake -S ${camp_src_dir} -B ${camp_build_dir} \ - -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose} \ - -DCMAKE_BUILD_TYPE=${build_config} \ - -DBUILD_SHARED_LIBS=${build_shared_libs} \ - -DCMAKE_C_COMPILER=${CC} \ - -DCMAKE_CXX_COMPILER=${CXX} \ - -DENABLE_CUDA=ON \ - -DCMAKE_CUDA_ARCHITECTURES=${CUDA_ARCH} \ - -DENABLE_TESTS=OFF \ - -DCMAKE_INSTALL_PREFIX=${camp_install_dir} - -echo "**** Building Camp ${camp_version}" -cmake --build ${camp_build_dir} --config ${build_config} -j${build_jobs} -echo "**** Installing Camp ${camp_version}" -cmake --install ${camp_build_dir} - -fi -else - echo "**** Skipping Camp build, install found at: ${camp_install_dir}" -fi # build_camp - - -################ -# RAJA -################ -raja_version=v2022.10.4 -raja_src_dir=${root_dir}/RAJA-${raja_version} -raja_build_dir=${root_dir}/build/raja-${raja_version} -raja_install_dir=/usr/local/raja-${raja_version}/ -raja_tarball=RAJA-${raja_version}.tar.gz - -# build only if install doesn't exist -if [ ! -d ${raja_install_dir} ]; then -if ${build_raja}; then -if [ ! -d ${raja_src_dir} ]; then - echo "**** Downloading ${raja_tarball}" - curl -L https://github.com/LLNL/RAJA/releases/download/${raja_version}/${raja_tarball} -o ${raja_tarball} - tar -xzf ${raja_tarball} -fi - -echo "**** Configuring RAJA ${raja_version}" -cmake -S ${raja_src_dir} -B ${raja_build_dir} \ - -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose}\ - -DCMAKE_BUILD_TYPE=${build_config} \ - -DBUILD_SHARED_LIBS=${build_shared_libs} \ - -DCMAKE_C_COMPILER=${CC} \ - -DCMAKE_CXX_COMPILER=${CXX} \ - -DENABLE_OPENMP=OFF \ - -DENABLE_CUDA=ON \ - -DCMAKE_CUDA_ARCHITECTURES=${CUDA_ARCH} \ - -Dcamp_DIR=${camp_install_dir} \ - -DENABLE_TESTS=${enable_tests} \ - -DRAJA_ENABLE_TESTS=${enable_tests} \ - -DENABLE_EXAMPLES=${enable_tests} \ - -DENABLE_EXERCISES=${enable_tests} \ - -DRAJA_ENABLE_VECTORIZATION=OFF \ - -DCMAKE_INSTALL_PREFIX=${raja_install_dir} - - -echo "**** Building RAJA ${raja_version}" -cmake --build ${raja_build_dir} --config ${build_config} -j${build_jobs} -echo "**** Installing RAJA ${raja_version}" -cmake --install ${raja_build_dir} - -fi -else - echo "**** Skipping RAJA build, install found at: ${raja_install_dir}" -fi # build_raja - - -################ -# Umpire -################ -umpire_version=2022.10.0 -umpire_src_dir=${root_dir}/umpire-${umpire_version} -umpire_build_dir=${root_dir}/build/umpire-${umpire_version} -umpire_install_dir=/usr/local/umpire-${umpire_version}/ -umpire_tarball=umpire-${umpire_version}.tar.gz - -# build only if install doesn't exist -if [ ! -d ${umpire_install_dir} ]; then -if ${build_umpire}; then -if [ ! -d ${umpire_src_dir} ]; then - echo "**** Downloading ${umpire_tarball}" - curl -L https://github.com/LLNL/Umpire/releases/download/v${umpire_version}/${umpire_tarball} -o ${umpire_tarball} - tar -xzf ${umpire_tarball} -fi - -echo "**** Configuring Umpire ${umpire_version}" -cmake -S ${umpire_src_dir} -B ${umpire_build_dir} \ - -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose} \ - -DCMAKE_BUILD_TYPE=${build_config} \ - -DBUILD_SHARED_LIBS=${build_shared_libs} \ - -DCMAKE_C_COMPILER=${CC} \ - -DCMAKE_CXX_COMPILER=${CXX} \ - -DENABLE_CUDA=ON \ - -DCMAKE_CUDA_ARCHITECTURES=${CUDA_ARCH} \ - -Dcamp_DIR=${camp_install_dir} \ - -DENABLE_OPENMP=${enable_openmp} \ - -DENABLE_TESTS=${enable_tests} \ - -DCMAKE_INSTALL_PREFIX=${umpire_install_dir} - -echo "**** Building Umpire ${umpire_version}" -cmake --build ${umpire_build_dir} -j${build_jobs} -echo "**** Installing Umpire ${umpire_version}" -cmake --install ${umpire_build_dir} - -fi -else - echo "**** Skipping Umpire build, install found at: ${umpire_install_dir}" -fi # build_umpire - -################ -# MFEM -################ -mfem_version=4.4 -mfem_src_dir=${root_dir}/mfem-${mfem_version} -mfem_build_dir=${root_dir}/build/mfem-${mfem_version} -mfem_install_dir=/usr/local/mfem-${mfem_version}/ -mfem_tarball=mfem-${mfem_version}.tar.gz - -# build only if install doesn't exist -if [ ! -d ${mfem_install_dir} ]; then -if ${build_mfem}; then -if [ ! -d ${mfem_src_dir} ]; then - echo "**** Downloading ${mfem_tarball}" - curl -L https://github.com/mfem/mfem/archive/refs/tags/v4.4.tar.gz -o ${mfem_tarball} - tar -xzf ${mfem_tarball} -fi - -echo "**** Configuring MFEM ${mfem_version}" -cmake -S ${mfem_src_dir} -B ${mfem_build_dir} \ - -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose}\ - -DCMAKE_BUILD_TYPE=${build_config} \ - -DBUILD_SHARED_LIBS=${build_shared_libs} \ - -DMFEM_USE_CONDUIT=ON \ - -DCMAKE_PREFIX_PATH="${conduit_install_dir}" \ - -DCMAKE_INSTALL_PREFIX=${mfem_install_dir} - -echo "**** Building MFEM ${vtkm_version}" -cmake --build ${mfem_build_dir} -j${build_jobs} -echo "**** Installing MFEM ${mfem_version}" -cmake --install ${mfem_build_dir} - -fi -else - echo "**** Skipping MFEM build, install found at: ${mfem_install_dir}" -fi # build_mfem - - -################ -# Ascent -################ -ascent_version=develop -ascent_src_dir=${root_dir}/ascent/src -ascent_build_dir=${root_dir}/build/ascent-${ascent_version}/ -ascent_install_dir=/usr/local/ascent-${ascent_version}/ - -echo "**** Creating Ascent host-config (ascent-config.cmake)" -# -echo '# host-config file generated by build_ascent.sh' > ascent-config.cmake -echo 'set(CMAKE_VERBOSE_MAKEFILE ' ${enable_verbose} 'CACHE BOOL "")' >> ascent-config.cmake -echo 'set(CMAKE_C_COMPILER ' ${CC} ' CACHE PATH "")' >> ascent-config.cmake -echo 'set(CMAKE_CXX_COMPILER ' ${CXX} ' CACHE PATH "")' >> ascent-config.cmake -echo 'set(CMAKE_BUILD_TYPE ' ${build_config} ' CACHE STRING "")' >> ascent-config.cmake -echo 'set(BUILD_SHARED_LIBS ' ${build_shared_libs} ' CACHE STRING "")' >> ascent-config.cmake -echo 'set(CMAKE_INSTALL_PREFIX ' ${ascent_install_dir} ' CACHE PATH "")' >> ascent-config.cmake -echo 'set(ENABLE_TESTS ' ${enable_tests} ' CACHE BOOL "")' >> ascent-config.cmake -echo 'set(ENABLE_MPI ' ${enable_mpi} ' CACHE BOOL "")' >> ascent-config.cmake -echo 'set(ENABLE_FIND_MPI ' ${enable_find_mpi} ' CACHE BOOL "")' >> ascent-config.cmake -echo 'set(ENABLE_FORTRAN ' ${enable_fortran} ' CACHE BOOL "")' >> ascent-config.cmake -echo 'set(ENABLE_PYTHON ' ${enable_python} ' CACHE BOOL "")' >> ascent-config.cmake -echo 'set(BLT_CXX_STD c++14 CACHE STRING "")' >> ascent-config.cmake -echo 'set(ENABLE_CUDA ON CACHE BOOL "")' >> ascent-config.cmake -echo 'set(CMAKE_CUDA_ARCHITECTURES ' ${CUDA_ARCH} ' CACHE PATH "")' >> ascent-config.cmake -echo 'set(CONDUIT_DIR ' ${conduit_install_dir} ' CACHE PATH "")' >> ascent-config.cmake -echo 'set(VTKM_DIR ' ${vtkm_install_dir} ' CACHE PATH "")' >> ascent-config.cmake -echo 'set(CAMP_DIR ' ${camp_install_dir} ' CACHE PATH "")' >> ascent-config.cmake -echo 'set(RAJA_DIR ' ${raja_install_dir} ' CACHE PATH "")' >> ascent-config.cmake -echo 'set(UMPIRE_DIR ' ${umpire_install_dir} ' CACHE PATH "")' >> ascent-config.cmake -echo 'set(MFEM_DIR ' ${mfem_install_dir} ' CACHE PATH "")' >> ascent-config.cmake -echo 'set(ENABLE_VTKH ON CACHE BOOL "")' >> ascent-config.cmake -echo 'set(ENABLE_APCOMP ON CACHE BOOL "")' >> ascent-config.cmake -echo 'set(ENABLE_DRAY ON CACHE BOOL "")' >> ascent-config.cmake - -# build only if install doesn't exist -if [ ! -d ${ascent_install_dir} ]; then -if ${build_ascent}; then -if [ ! -d ${ascent_src_dir} ]; then - echo "**** Cloning Ascent" - git clone --recursive https://github.com/Alpine-DAV/ascent.git -fi - -echo "**** Configuring Ascent" -cmake -S ${ascent_src_dir} -B ${ascent_build_dir} \ - -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose} \ - -DCMAKE_BUILD_TYPE=${build_config} \ - -DBUILD_SHARED_LIBS=${build_shared_libs} \ - -DCMAKE_INSTALL_PREFIX=${ascent_install_dir} \ - -DENABLE_MPI=${enable_mpi} \ - -DENABLE_FIND_MPI=${enable_find_mpi} \ - -DENABLE_FORTRAN=${enable_fortran} \ - -DENABLE_TESTS=$enable_tests \ - -DENABLE_PYTHON=${enable_python} \ - -DBLT_CXX_STD=c++14 \ - -DENABLE_CUDA=ON \ - -DCMAKE_CUDA_ARCHITECTURES=${CUDA_ARCH} \ - -DCONDUIT_DIR=${conduit_install_dir} \ - -DVTKM_DIR=${vtkm_install_dir} \ - -DRAJA_DIR=${raja_install_dir} \ - -DUMPIRE_DIR=${umpire_install_dir} \ - -DCAMP_DIR=${camp_install_dir} \ - -DMFEM_DIR=${mfem_install_dir} \ - -DENABLE_VTKH=ON \ - -DENABLE_APCOMP=ON \ - -DENABLE_DRAY=ON - -echo "**** Building Ascent" -cmake --build ${ascent_build_dir} -j${build_jobs} -echo "**** Installing Ascent" -cmake --install ${ascent_build_dir} - -fi -else - echo "**** Skipping Ascent build, install found at: ${ascent_install_dir}" -fi # build_ascent - From 494048b7ed40d17e9720939105e0b7bc6d9f5698 Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Wed, 28 Aug 2024 09:34:40 -0400 Subject: [PATCH 13/29] disable ascent build; fix openpmd build --- scripts/docker/Dockerfile.nvcc | 26 +++++++++++++++++++------- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/scripts/docker/Dockerfile.nvcc b/scripts/docker/Dockerfile.nvcc index a134a54266b9..d567e94b79ef 100644 --- a/scripts/docker/Dockerfile.nvcc +++ b/scripts/docker/Dockerfile.nvcc @@ -1,7 +1,11 @@ FROM nvidia/cuda:12.6.0-devel-ubuntu24.04 RUN apt-get clean && apt-get update -y && \ - DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends git python3-minimal libpython3-stdlib bc hwloc wget openssh-client python3-numpy python3-h5py python3-matplotlib python3-scipy python3-pip lcov curl cuda-nsight-systems-12-6 cmake ninja-build libpython3-dev + DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends git python3-minimal libpython3-stdlib bc hwloc wget openssh-client python3-numpy python3-h5py python3-matplotlib python3-scipy python3-pip lcov curl cuda-nsight-systems-12-6 cmake ninja-build libpython3-dev gcc-11 g++-11 && \ + update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 10 && \ + update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-11 10 + +RUN g++ --version RUN pip3 install unyt --break-system-packages @@ -45,11 +49,19 @@ RUN mkdir /tmp/build-adios2 && cd /tmp/build-adios2 && \ cd / && \ rm -rf /tmp/build-adios2 -RUN openPMD_USE_MPI=ON pip3 install openpmd-api --no-binary openpmd-api --break-system-packages +RUN mkdir /tmp/build-openpmd && cd /tmp/build-openpmd && \ + git clone https://github.com/openPMD/openPMD-api.git && \ + mkdir openPMD-api-build && cd openPMD-api-build && \ + cmake ../openPMD-api -DopenPMD_USE_PYTHON=ON -DPython_EXECUTABLE=$(which python3) -DopenPMD_USE_ADIOS2=ON && \ + cmake --build . && \ + cmake --build . --target install && \ + cd / && \ + rm -rf /tmp/build-openpmd -COPY ascent_build /tmp/ascent_build +## Note: Ascent does not work with CUDA 12, so it's disabled +#COPY ascent_build /tmp/ascent_build -RUN cd /tmp/ascent_build && \ - bash build_ascent.sh && \ - cd / && \ - rm -rf /tmp/ascent_build +#RUN cd /tmp/ascent_build && \ +# bash build_ascent.sh && \ +# cd / && \ +# rm -rf /tmp/ascent_build From 9b7343468c909728248d4957264c223188323bbb Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Wed, 28 Aug 2024 16:56:52 -0400 Subject: [PATCH 14/29] downgrade to CUDA 12.0 --- scripts/docker/Dockerfile.nvcc | 19 +++++++++---------- scripts/docker/ascent_build/build_ascent.sh | 6 +++--- 2 files changed, 12 insertions(+), 13 deletions(-) diff --git a/scripts/docker/Dockerfile.nvcc b/scripts/docker/Dockerfile.nvcc index d567e94b79ef..8393406144fb 100644 --- a/scripts/docker/Dockerfile.nvcc +++ b/scripts/docker/Dockerfile.nvcc @@ -1,4 +1,4 @@ -FROM nvidia/cuda:12.6.0-devel-ubuntu24.04 +FROM nvidia/cuda:12.0.0-devel-ubuntu22.04 RUN apt-get clean && apt-get update -y && \ DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends git python3-minimal libpython3-stdlib bc hwloc wget openssh-client python3-numpy python3-h5py python3-matplotlib python3-scipy python3-pip lcov curl cuda-nsight-systems-12-6 cmake ninja-build libpython3-dev gcc-11 g++-11 && \ @@ -7,12 +7,12 @@ RUN apt-get clean && apt-get update -y && \ RUN g++ --version -RUN pip3 install unyt --break-system-packages +RUN pip3 install unyt -RUN pip3 install blosc2 --break-system-packages +RUN pip3 install blosc2 RUN wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key| apt-key add - && \ - echo "deb http://apt.llvm.org/noble/ llvm-toolchain-noble-19 main" > /etc/apt/sources.list.d/llvm.list + echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-19 main" > /etc/apt/sources.list.d/llvm.list RUN apt-get clean && apt-get update -y && \ DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends clang-19 llvm-19 libomp-19-dev && \ @@ -58,10 +58,9 @@ RUN mkdir /tmp/build-openpmd && cd /tmp/build-openpmd && \ cd / && \ rm -rf /tmp/build-openpmd -## Note: Ascent does not work with CUDA 12, so it's disabled -#COPY ascent_build /tmp/ascent_build +COPY ascent_build /tmp/ascent_build -#RUN cd /tmp/ascent_build && \ -# bash build_ascent.sh && \ -# cd / && \ -# rm -rf /tmp/ascent_build +RUN cd /tmp/ascent_build && \ + bash build_ascent.sh && \ + cd / && \ + rm -rf /tmp/ascent_build diff --git a/scripts/docker/ascent_build/build_ascent.sh b/scripts/docker/ascent_build/build_ascent.sh index 26a37bd23180..26b02a1a0497 100644 --- a/scripts/docker/ascent_build/build_ascent.sh +++ b/scripts/docker/ascent_build/build_ascent.sh @@ -37,7 +37,7 @@ enable_mpi="${enable_mpi:=ON}" enable_find_mpi="${enable_find_mpi:=ON}" enable_tests="${enable_tests:=OFF}" enable_verbose="${enable_verbose:=ON}" -build_jobs="${build_jobs:=8}" +build_jobs="${build_jobs:=1}" build_config="${build_config:=Release}" build_shared_libs="${build_shared_libs:=ON}" @@ -132,8 +132,8 @@ root_dir="${prefix:=${root_dir}}" root_dir=$(ospath ${root_dir}) root_dir=$(abs_path ${root_dir}) script_dir=$(abs_path "$(dirname "${BASH_SOURCE[0]}")") -build_dir=$(ospath ${root_dir}/build) -source_dir=$(ospath ${root_dir}/source) +build_dir=$(ospath build) +source_dir=$(ospath source) # root_dir is where we will build and install From 71416f525e57b2b1cef37fe5b1401b4ab79aa19e Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Wed, 28 Aug 2024 17:03:44 -0400 Subject: [PATCH 15/29] fix ascent build path --- scripts/docker/ascent_build/build_ascent.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/docker/ascent_build/build_ascent.sh b/scripts/docker/ascent_build/build_ascent.sh index 26b02a1a0497..6b3e434d2b58 100644 --- a/scripts/docker/ascent_build/build_ascent.sh +++ b/scripts/docker/ascent_build/build_ascent.sh @@ -127,7 +127,7 @@ function abs_path() fi } -root_dir=/usr/local +root_dir=$(pwd) root_dir="${prefix:=${root_dir}}" root_dir=$(ospath ${root_dir}) root_dir=$(abs_path ${root_dir}) @@ -146,7 +146,7 @@ cd ${root_dir} # install_dir is where we will install # override with `prefix` env var -install_dir="${install_dir:=$root_dir/install}" +install_dir=/usr/local echo "*** prefix: ${root_dir}" echo "*** build root: ${build_dir}" From 93c2045ebcfaa2a3e50f8b71f0bf869385aa939d Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Thu, 29 Aug 2024 10:26:34 -0400 Subject: [PATCH 16/29] fix bug in build_ascent.sh --- scripts/docker/ascent_build/build_ascent.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/docker/ascent_build/build_ascent.sh b/scripts/docker/ascent_build/build_ascent.sh index 6b3e434d2b58..3f1e0313d3a1 100644 --- a/scripts/docker/ascent_build/build_ascent.sh +++ b/scripts/docker/ascent_build/build_ascent.sh @@ -960,7 +960,7 @@ if [ ! -d ${ascent_install_dir} ]; then if ${build_ascent}; then if [ ! -d ${ascent_src_dir} ]; then echo "**** Cloning Ascent" - git clone --recursive https://github.com/Alpine-DAV/ascent.git + git clone --recursive https://github.com/Alpine-DAV/ascent.git $(ospath ${source_dir}/ascent) fi echo "**** Configuring Ascent" From 804faf4cc3424f2a412dc36244dd77cf39f9c21f Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Thu, 29 Aug 2024 10:36:39 -0400 Subject: [PATCH 17/29] remove unneeded patches --- .../2024_07_25_silo_4_11_cmake_fix.patch | 23 - .../2024_07_29_silo-pr389-win32-bugfix.patch | 10 - .../2024_08_01_caliper-win-smaller-opts.patch | 1102 ----------------- ...pr1311-detect-if-caliper-needs-adiak.patch | 147 --- 4 files changed, 1282 deletions(-) delete mode 100644 scripts/docker/ascent_build/2024_07_25_silo_4_11_cmake_fix.patch delete mode 100644 scripts/docker/ascent_build/2024_07_29_silo-pr389-win32-bugfix.patch delete mode 100644 scripts/docker/ascent_build/2024_08_01_caliper-win-smaller-opts.patch delete mode 100644 scripts/docker/ascent_build/2024_08_01_conduit-pr1311-detect-if-caliper-needs-adiak.patch diff --git a/scripts/docker/ascent_build/2024_07_25_silo_4_11_cmake_fix.patch b/scripts/docker/ascent_build/2024_07_25_silo_4_11_cmake_fix.patch deleted file mode 100644 index 6833fffa5080..000000000000 --- a/scripts/docker/ascent_build/2024_07_25_silo_4_11_cmake_fix.patch +++ /dev/null @@ -1,23 +0,0 @@ -diff --git a/CMakeLists.txt b/CMakeLists.txt -index 0dc4a5b..fd6baaf 100644 ---- a/CMakeLists.txt -+++ b/CMakeLists.txt -@@ -57,7 +57,9 @@ cmake_minimum_required(VERSION 3.12 FATAL_ERROR) - ### - # grab the version string - ### --file(STRINGS ${CMAKE_CURRENT_SOURCE_DIR}/VERSION SILO_VERSION) -+file(STRINGS ${CMAKE_CURRENT_SOURCE_DIR}/SILO_VERSION SILO_VERSION) -+# Strip suffix -+string(REGEX REPLACE "-.*" "" SILO_VERSION "${SILO_VERSION}") - - ###----------------------------------------------------------------------------- - # project command will automatically create cmake vars for major, minor, -@@ -139,7 +141,9 @@ CMAKE_DEPENDENT_OPTION(SILO_ENABLE_HZIP "Enable Lindstrom hex/quad mesh compress - ## - # Set up a default INSTALL prefix that is peer to the build directory - ## --set(CMAKE_INSTALL_PREFIX ${Silo_BINARY_DIR}/../SiloInstall CACHE PATH "install prefix" FORCE) -+if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) -+ set(CMAKE_INSTALL_PREFIX ${Silo_BINARY_DIR}/../SiloInstall CACHE PATH "install prefix" FORCE) -+endif() diff --git a/scripts/docker/ascent_build/2024_07_29_silo-pr389-win32-bugfix.patch b/scripts/docker/ascent_build/2024_07_29_silo-pr389-win32-bugfix.patch deleted file mode 100644 index 3d62a99781cc..000000000000 --- a/scripts/docker/ascent_build/2024_07_29_silo-pr389-win32-bugfix.patch +++ /dev/null @@ -1,10 +0,0 @@ -diff --git a/src/silo/silo_win32_compatibility.h b/src/silo/silo_win32_compatibility.h -index bc4d38f..00b970c 100644 ---- a/src/silo/silo_win32_compatibility.h -+++ b/src/silo/silo_win32_compatibility.h -@@ -1,4 +1,4 @@ --#ifdef WIN32 -+#ifdef _WIN32 - #ifndef SILO_WIN32_COMPATIBILITY - #define SILO_WIN32_COMPATIBILITY - #include /* Include Windows IO * diff --git a/scripts/docker/ascent_build/2024_08_01_caliper-win-smaller-opts.patch b/scripts/docker/ascent_build/2024_08_01_caliper-win-smaller-opts.patch deleted file mode 100644 index 2e47817d94ea..000000000000 --- a/scripts/docker/ascent_build/2024_08_01_caliper-win-smaller-opts.patch +++ /dev/null @@ -1,1102 +0,0 @@ -diff --git a/src/caliper/controllers/controllers.cpp b/src/caliper/controllers/controllers.cpp -index 787896c..4e2a44d 100644 ---- a/src/caliper/controllers/controllers.cpp -+++ b/src/caliper/controllers/controllers.cpp -@@ -223,947 +223,159 @@ const ConfigManager::ConfigInfo* builtin_controllers_table[] = { - nullptr - }; - -+/// NOTE: SMALLER SET OF BUILD OPTIONS FOR WINDOWS! -+/// Windows does not like long string literals, and -+/// caliper supports less services on windows. -+ - const char* builtin_option_specs = R"json( -+[ -+{ -+ "name" : "level", -+ "type" : "string", -+ "description" : "Minimum region level that triggers snapshots", -+ "category" : "event", -+ "config" : { "CALI_EVENT_REGION_LEVEL": "{}" } -+}, -+{ -+ "name" : "include_branches", -+ "type" : "string", -+ "description" : "Only take snapshots for branches with the given region names.", -+ "category" : "event", -+ "config" : { "CALI_EVENT_INCLUDE_BRANCHES": "{}" } -+}, -+{ -+ "name" : "include_regions", -+ "type" : "string", -+ "description" : "Only take snapshots for the given region names/patterns.", -+ "category" : "event", -+ "config" : { "CALI_EVENT_INCLUDE_REGIONS": "{}" } -+}, -+{ -+ "name" : "exclude_regions", -+ "type" : "string", -+ "description" : "Do not take snapshots for the given region names/patterns.", -+ "category" : "event", -+ "config" : { "CALI_EVENT_EXCLUDE_REGIONS": "{}" } -+}, -+{ -+ "name" : "region.count", -+ "description" : "Report number of begin/end region instances", -+ "type" : "bool", -+ "category" : "metric", -+ "query" : -+ [ -+ { "level" : "local", -+ "let" : [ "rc.count=first(sum#region.count,region.count)" ], -+ "select" : [ "sum(rc.count) as Calls unit count" ] -+ }, -+ { "level" : "cross", "select": - [ -- { -- "name" : "profile.mpi", -- "type" : "bool", -- "description" : "Profile MPI functions", -- "category" : "region", -- "services" : [ "mpi" ], -- "config": { "CALI_MPI_BLACKLIST": "MPI_Comm_rank,MPI_Comm_size,MPI_Wtick,MPI_Wtime" } -- }, -- { -- "name" : "profile.cuda", -- "type" : "bool", -- "description" : "Profile CUDA API functions", -- "category" : "region", -- "services" : [ "cupti" ] -- }, -- { -- "name" : "profile.hip", -- "type" : "bool", -- "description" : "Profile HIP API functions", -- "category" : "region", -- "services" : [ "roctracer" ], -- "config" : { "CALI_ROCTRACER_TRACE_ACTIVITIES": "false" } -- }, -- { -- "name" : "profile.kokkos", -- "type" : "bool", -- "description" : "Profile Kokkos functions", -- "category" : "region", -- "services" : [ "kokkostime" ] -- }, -- { -- "name" : "main_thread_only", -- "type" : "bool", -- "description" : "Only include measurements from the main thread in results.", -- "category" : "region", -- "services" : [ "pthread" ], -- "query" : -- [ -- { "level" : "local", -- "where" : "pthread.is_master=true" -- } -- ] -- }, -- { -- "name" : "level", -- "type" : "string", -- "description" : "Minimum region level that triggers snapshots", -- "category" : "event", -- "config" : { "CALI_EVENT_REGION_LEVEL": "{}" } -- }, -- { -- "name" : "include_branches", -- "type" : "string", -- "description" : "Only take snapshots for branches with the given region names.", -- "category" : "event", -- "config" : { "CALI_EVENT_INCLUDE_BRANCHES": "{}" } -- }, -- { -- "name" : "include_regions", -- "type" : "string", -- "description" : "Only take snapshots for the given region names/patterns.", -- "category" : "event", -- "config" : { "CALI_EVENT_INCLUDE_REGIONS": "{}" } -- }, -- { -- "name" : "exclude_regions", -- "type" : "string", -- "description" : "Do not take snapshots for the given region names/patterns.", -- "category" : "event", -- "config" : { "CALI_EVENT_EXCLUDE_REGIONS": "{}" } -- }, -- { -- "name" : "mpi.include", -- "type" : "string", -- "description" : "Only instrument these MPI functions.", -- "category" : "region", -- "config" : { "CALI_MPI_WHITELIST": "{}" } -- }, -- { -- "name" : "mpi.exclude", -- "type" : "string", -- "description" : "Do not instrument these MPI functions.", -- "category" : "region", -- "config" : { "CALI_MPI_BLACKLIST": "{}" } -- }, -- { -- "name" : "region.count", -- "description" : "Report number of begin/end region instances", -- "type" : "bool", -- "category" : "metric", -- "query" : -- [ -- { "level" : "local", -- "let" : [ "rc.count=first(sum#region.count,region.count)" ], -- "select" : [ "sum(rc.count) as Calls unit count" ] -- }, -- { "level" : "cross", "select": -- [ -- "min(sum#rc.count) as \"Calls/rank (min)\" unit count", -- "avg(sum#rc.count) as \"Calls/rank (avg)\" unit count", -- "max(sum#rc.count) as \"Calls/rank (max)\" unit count", -- "sum(sum#rc.count) as \"Calls/rank (total)\" unit count" -- ] -- } -- ] -- }, -- { -- "name" : "region.stats", -- "description" : "Detailed region timing statistics (min/max/avg time per visit)", -- "type" : "bool", -- "category" : "metric", -- "services" : [ "timer", "event" ], -- "config" : -- { -- "CALI_TIMER_INCLUSIVE_DURATION" : "true", -- "CALI_EVENT_ENABLE_SNAPSHOT_INFO" : "true" -- }, -- "query" : -- [ -- { "level" : "local", -- "let" : -- [ -- "rs.count=first(sum#region.count,region.count)", -- "rs.min=scale(min#time.inclusive.duration.ns,1e-9)", -- "rs.max=scale(max#time.inclusive.duration.ns,1e-9)", -- "rs.sum=scale(sum#time.inclusive.duration.ns,1e-9)" -- ], -- "aggregate": -- [ -- "sum(rs.sum)" -- ], -- "select" : -- [ -- "sum(rs.count) as Visits unit count", -- "min(rs.min) as \"Min time/visit\" unit sec", -- "ratio(rs.sum,rs.count) as \"Avg time/visit\" unit sec", -- "max(rs.max) as \"Max time/visit\" unit sec" -- ] -- }, -- { "level" : "cross", "select": -- [ -- "sum(sum#rs.count) as Visits unit count", -- "min(min#rs.min) as \"Min time/visit\" unit sec", -- "ratio(sum#rs.sum,sum#rs.count) as \"Avg time/visit\" unit sec", -- "max(max#rs.max) as \"Max time/visit\" unit sec" -- ] -- } -- ] -- }, -- { -- "name" : "node.order", -- "description" : "Report order in which regions first appeared", -- "type" : "bool", -- "category" : "metric", -- "query" : -- [ -- { "level" : "local", -- "select" : [ "min(aggregate.slot) as \"Node order\"" ] -- }, -- { "level" : "cross", -- "select" : [ "min(min#aggregate.slot) as \"Node order\"" ] -- } -- ] -- }, -- { -- "name" : "source.module", -- "type" : "bool", -- "category" : "sampling", -- "description" : "Report source module (.so/.exe)", -- "services" : [ "symbollookup" ], -- "config" : { "CALI_SYMBOLLOOKUP_LOOKUP_MODULE": "true" }, -- "query": -- [ -- { "level": "local", "group by": "module#cali.sampler.pc", -- "select": [ "module#cali.sampler.pc as \"Module\"" ] -- }, -- { "level": "cross", "group by": "module#cali.sampler.pc", -- "select": [ "module#cali.sampler.pc as \"Module\"" ] -- } -- ] -- }, -- { -- "name" : "source.function", -- "type" : "bool", -- "category" : "sampling", -- "description" : "Report source function symbol names", -- "services" : [ "symbollookup" ], -- "config" : { "CALI_SYMBOLLOOKUP_LOOKUP_FUNCTION": "true" }, -- "query": -- [ -- { "level": "local", "group by": "source.function#cali.sampler.pc", -- "select": [ "source.function#cali.sampler.pc as \"Function\"" ] -- }, -- { "level": "cross", "group by": "source.function#cali.sampler.pc", -- "select": [ "source.function#cali.sampler.pc as \"Function\"" ] -- } -- ] -- }, -- { -- "name" : "source.location", -- "type" : "bool", -- "category" : "sampling", -- "description" : "Report source location (file+line)", -- "services" : [ "symbollookup" ], -- "config" : { "CALI_SYMBOLLOOKUP_LOOKUP_SOURCELOC": "true" }, -- "query": -- [ -- { "level": "local", "group by": "sourceloc#cali.sampler.pc", -- "select": [ "sourceloc#cali.sampler.pc as \"Source\"" ] -- }, -- { "level": "cross", "group by": "sourceloc#cali.sampler.pc", -- "select": [ "sourceloc#cali.sampler.pc as \"Source\"" ] -- } -- ] -- }, -- { -- "name" : "cuda.memcpy", -- "description" : "Report MB copied between host and device with cudaMemcpy", -- "type" : "bool", -- "category" : "cuptitrace.metric", -- "query" : -- [ -- { "level" : "local", -- "let" : -- [ -- "cuda.memcpy.dtoh=scale(cupti.memcpy.bytes,1e-6) if cupti.memcpy.kind=DtoH", -- "cuda.memcpy.htod=scale(cupti.memcpy.bytes,1e-6) if cupti.memcpy.kind=HtoD" -- ], -- "select" : -- [ -- "sum(cuda.memcpy.htod) as \"Copy CPU->GPU\" unit MB", -- "sum(cuda.memcpy.dtoh) as \"Copy GPU->CPU\" unit MB" -- ] -- }, -- { "level" : "cross", "select": -- [ -- "avg(sum#cuda.memcpy.htod) as \"Copy CPU->GPU (avg)\" unit MB", -- "max(sum#cuda.memcpy.htod) as \"Copy CPU->GPU (max)\" unit MB", -- "avg(sum#cuda.memcpy.dtoh) as \"Copy GPU->CPU (avg)\" unit MB", -- "max(sum#cuda.memcpy.dtoh) as \"Copy GPU->CPU (max)\" unit MB" -- ] -- } -- ] -- }, -- { -- "name" : "cuda.gputime", -- "description" : "Report GPU time in CUDA activities", -- "type" : "bool", -- "category" : "metric", -- "services" : [ "cuptitrace" ], -- "query" : -- [ -- { "level" : "local", -- "select" : -- [ -- "inclusive_scale(cupti.activity.duration,1e-9) as \"GPU time (I)\" unit sec", -- ] -- }, -- { "level" : "cross", "select": -- [ -- "avg(iscale#cupti.activity.duration) as \"Avg GPU time/rank\" unit sec", -- "min(iscale#cupti.activity.duration) as \"Min GPU time/rank\" unit sec", -- "max(iscale#cupti.activity.duration) as \"Max GPU time/rank\" unit sec", -- "sum(iscale#cupti.activity.duration) as \"Total GPU time\" unit sec" -- ] -- } -- ] -- }, -- { -- "name" : "rocm.gputime", -- "description" : "Report GPU time in AMD ROCm activities", -- "type" : "bool", -- "category" : "metric", -- "services" : [ "roctracer" ], -- "config" : { "CALI_ROCTRACER_TRACE_ACTIVITIES": "true", "CALI_ROCTRACER_RECORD_KERNEL_NAMES": "false" }, -- "query" : -- [ -- { "level" : "local", -- "select" : -- [ -- "inclusive_scale(sum#rocm.activity.duration,1e-9) as \"GPU time (I)\" unit sec" -- ] -- }, -- { "level" : "cross", -- "select" : -- [ -- "avg(iscale#sum#rocm.activity.duration) as \"Avg GPU time/rank\" unit sec", -- "min(iscale#sum#rocm.activity.duration) as \"Min GPU time/rank\" unit sec", -- "max(iscale#sum#rocm.activity.duration) as \"Max GPU time/rank\" unit sec", -- "sum(iscale#sum#rocm.activity.duration) as \"Total GPU time\" unit sec" -- ] -- } -- ] -- }, -- { -- "name" : "mpi.message.size", -- "description": "MPI message size", -- "type" : "bool", -- "category" : "metric", -- "services" : [ "mpi" ], -- "config" : { "CALI_MPI_MSG_TRACING": "true", "CALI_MPI_BLACKLIST": "MPI_Wtime,MPI_Comm_rank,MPI_Comm_size" }, -- "query" : -- [ -- { "level" : "local", -- "let" : [ -- "mpimsg.min=first(min#mpi.msg.size,mpi.msg.size)", -- "mpimsg.avg=first(avg#mpi.msg.size,mpi.msg.size)", -- "mpimsg.max=first(max#mpi.msg.size,mpi.msg.size)" -- ], -- "select" : [ -- "min(mpimsg.min) as \"Msg size (min)\" unit Byte", -- "avg(mpimsg.avg) as \"Msg size (avg)\" unit Byte", -- "max(mpimsg.max) as \"Msg size (max)\" unit Byte" -- ] -- }, -- { "level" : "cross", -- "select" : [ -- "min(min#mpimsg.min) as \"Msg size (min)\" unit Byte", -- "avg(avg#mpimsg.avg) as \"Msg size (avg)\" unit Byte", -- "max(max#mpimsg.max) as \"Msg size (max)\" unit Byte" -- ] -- } -- ] -- }, -- { -- "name" : "mpi.message.count", -- "description": "Number of MPI send/recv/collective operations", -- "type" : "bool", -- "category" : "metric", -- "services" : [ "mpi" ], -- "config" : { "CALI_MPI_MSG_TRACING": "true", "CALI_MPI_BLACKLIST": "MPI_Wtime,MPI_Comm_rank,MPI_Comm_size" }, -- "query" : -- [ -- { "level" : "local", -- "let" : [ -- "mpicount.recv=first(sum#mpi.recv.count,mpi.recv.count)", -- "mpicount.send=first(sum#mpi.send.count,mpi.send.count)", -- "mpicount.coll=first(sum#mpi.coll.count,mpi.coll.count)" -- ], -- "select" : [ -- "sum(mpicount.send) as \"Msgs sent\" unit count", -- "sum(mpicount.recv) as \"Msgs recvd\" unit count", -- "sum(mpicount.coll) as \"Collectives\" unit count" -- ] -- }, -- { "level" : "cross", -- "select" : [ -- "avg(sum#mpicount.send) as \"Msgs sent (avg)\" unit count", -- "max(sum#mpicount.send) as \"Msgs sent (max)\" unit count", -- "avg(sum#mpicount.recv) as \"Msgs recvd (avg)\" unit count", -- "max(sum#mpicount.recv) as \"Msgs recvd (max)\" unit count", -- "max(sum#mpicount.coll) as \"Collectives (max)\" unit count" -- ] -- } -- ] -- }, -- { -- "name" : "openmp.times", -- "description" : "Report time spent in OpenMP work and barrier regions", -- "type" : "bool", -- "category" : "metric", -- "services" : [ "ompt", "timestamp" ], -- "query" : -- [ -- { "level" : "local", -- "let" : -- [ -- "t.omp.ns=first(sum#time.duration.ns,time.duration.ns)", -- "t.omp.work=scale(t.omp.ns,1e-9) if omp.work", -- "t.omp.sync=scale(t.omp.ns,1e-9) if omp.sync", -- "t.omp.total=first(t.omp.work,t.omp.sync)" -- ], -- "select" : -- [ "sum(t.omp.work) as \"Time (work)\" unit sec", -- "sum(t.omp.sync) as \"Time (barrier)\" unit sec" -- ] -- }, -- { "level" : "cross", "select": -- [ "avg(sum#t.omp.work) as \"Time (work) (avg)\" unit sec", -- "avg(sum#t.omp.sync) as \"Time (barrier) (avg)\" unit sec", -- "sum(sum#t.omp.work) as \"Time (work) (total)\" unit sec", -- "sum(sum#t.omp.sync) as \"Time (barrier) (total)\" unit sec" -- ] -- } -- ] -- }, -- { -- "name" : "openmp.efficiency", -- "description" : "Compute OpenMP efficiency metrics", -- "type" : "bool", -- "category" : "metric", -- "inherit" : [ "openmp.times" ], -- "query" : -- [ -- { "level" : "local", -- "select" : -- [ "inclusive_ratio(t.omp.work,t.omp.total,100.0) as \"Work %\" unit percent", -- "inclusive_ratio(t.omp.sync,t.omp.total,100.0) as \"Barrier %\" unit percent" -- ] -- }, -- { "level" : "cross", "select": -- [ "min(iratio#t.omp.work/t.omp.total) as \"Work % (min)\" unit percent", -- "avg(iratio#t.omp.work/t.omp.total) as \"Work % (avg)\" unit percent", -- "avg(iratio#t.omp.sync/t.omp.total) as \"Barrier % (avg)\" unit percent", -- "max(iratio#t.omp.sync/t.omp.total) as \"Barrier % (max)\" unit percent" -- ] -- } -- ] -- }, -- { -- "name" : "openmp.threads", -- "description" : "Show OpenMP threads", -- "type" : "bool", -- "category" : "metric", -- "services" : [ "ompt" ], -- "query" : -- [ -- { "level" : "local", -- "let" : [ "n.omp.threads=first(omp.num.threads)" ], -- "group by": "omp.thread.id,omp.thread.type", -- "select" : -- [ "max(n.omp.threads) as \"#Threads\"", -- "omp.thread.id as \"Thread\"" -- ] -- }, -- { "level" : "cross", -- "group by": "omp.thread.id,omp.thread.type", -- "select" : -- [ "max(max#n.omp.threads) as \"#Threads\"", -- "omp.thread.id as Thread" -- ] -- } -- ] -- }, -- { -- "name" : "io.bytes.written", -- "description" : "Report I/O bytes written", -- "type" : "bool", -- "category" : "metric", -- "services" : [ "io" ], -- "query" : -- [ -- { "level" : "local", -- "let" : [ "ibw.bytes.written=first(sum#io.bytes.written,io.bytes.written)" ], -- "select" : [ "sum(ibw.bytes.written) as \"Bytes written\" unit Byte" ] -- }, -- { "level" : "cross", "select": -- [ "avg(sum#ibw.bytes.written) as \"Avg written\" unit Byte", -- "sum(sum#ibw.bytes.written) as \"Total written\" unit Byte" -- ] -- } -- ] -- }, -- { -- "name" : "io.bytes.read", -- "description" : "Report I/O bytes read", -- "type" : "bool", -- "category" : "metric", -- "services" : [ "io" ], -- "query" : -- [ -- { "level" : "local", -- "let" : [ "ibr.bytes.read=first(sum#io.bytes.read,io.bytes.read)" ], -- "select" : [ "sum(ibr.bytes.read) as \"Bytes read\" unit Byte" ] -- }, -- { "level" : "cross", "select": -- [ "avg(sum#ibr.bytes.read) as \"Avg read\" unit Byte", -- "sum(sum#ibr.bytes.read) as \"Total read\" unit Byte" -- ] -- } -- ] -- }, -- { -- "name" : "io.bytes", -- "description" : "Report I/O bytes written and read", -- "type" : "bool", -- "category" : "metric", -- "inherit" : [ "io.bytes.read", "io.bytes.written" ] -- }, -- { -- "name" : "io.read.bandwidth", -- "description" : "Report I/O read bandwidth", -- "type" : "bool", -- "category" : "metric", -- "services" : [ "io" ], -- "query" : -- [ -- { "level" : "local", -- "group by" : "io.region", -- "let" : -- [ -- "irb.bytes.read=first(sum#io.bytes.read,io.bytes.read)", -- "irb.time.ns=first(sum#time.duration.ns,time.duration.ns)" -- ], -- "select" : -- [ -- "io.region as I/O", -- "ratio(irb.bytes.read,irb.time.ns,8e3) as \"Read Mbit/s\" unit Mb/s" -- ] -- }, -- { "level": "cross", "select": -- [ -- "avg(ratio#irb.bytes_read/irb.time.ns) as \"Avg read Mbit/s\" unit Mb/s", -- "max(ratio#irb.bytes_read/irb.time.ns) as \"Max read Mbit/s\" unit Mb/s" -- ] -- } -- ] -- }, -- { -- "name" : "io.write.bandwidth", -- "description" : "Report I/O write bandwidth", -- "type" : "bool", -- "category" : "metric", -- "services" : [ "io" ], -- "query" : -- [ -- { "level" : "local", -- "group by" : "io.region", -- "let" : -- [ -- "iwb.bytes.written=first(sum#io.bytes.written,io.bytes.written)", -- "iwb.time.ns=first(sum#time.duration.ns,time.duration.ns)" -- ], -- "select" : -- [ -- "io.region as I/O", -- "ratio(iwb.bytes.written,iwb.time.ns,8e3) as \"Write Mbit/s\" unit Mb/s" -- ] -- }, -- { "level": "cross", "select": -- [ -- "avg(ratio#iwb.bytes.written/iwb.time) as \"Avg write Mbit/s\" unit Mb/s", -- "max(ratio#iwb.bytes.written/iwb.time) as \"Max write Mbit/s\" unit Mb/s" -- ] -- } -- ] -- }, -- { -- "name" : "umpire.totals", -- "description" : "Report umpire allocation statistics (all allocators combined)", -- "type" : "bool", -- "category" : "metric", -- "services" : [ "umpire" ], -- "config" : { "CALI_UMPIRE_PER_ALLOCATOR_STATISTICS": "false" }, -- "query" : -- [ -- { "level" : "local", -- "let" : -- [ "umpt.size.bytes=first(max#umpire.total.size,umpire.total.size)", -- "umpt.count=first(max#umpire.total.count,umpire.total.count)", -- "umpt.hwm.bytes=first(max#umpire.total.hwm,umpire.total.hwm)", -- "umpt.size=scale(umpt.size.bytes,1e-6)", -- "umpt.hwm=scale(umpt.hwm.bytes,1e-6)" -- ], -- "select" : -- [ "inclusive_max(umpt.size) as \"Ump MB (Total)\" unit MB", -- "inclusive_max(umpt.count) as \"Ump allocs (Total)\"", -- "inclusive_max(umpt.hwm) as \"Ump HWM (Total)\"" -- ] -- }, -- { "level" : "cross", -- "select" : -- [ "avg(imax#umpt.size) as \"Ump MB (avg)\" unit MB", -- "max(imax#umpt.size) as \"Ump MB (max)\" unit MB", -- "avg(imax#umpt.count) as \"Ump allocs (avg)\"", -- "max(imax#umpt.count) as \"Ump allocs (max)\"", -- "max(imax#umpt.hwm) as \"Ump HWM (max)\"" -- ] -- } -- ] -- }, -- { -- "name" : "umpire.allocators", -- "description" : "Report umpire allocation statistics per allocator", -- "type" : "bool", -- "category" : "metric", -- "services" : [ "umpire" ], -- "config" : { "CALI_UMPIRE_PER_ALLOCATOR_STATISTICS": "true" }, -- "query" : -- [ -- { "level" : "local", -- "let" : -- [ "ump.size.bytes=first(max#umpire.alloc.current.size,umpire.alloc.current.size)", -- "ump.hwm.bytes=first(max#umpire.alloc.highwatermark,umpire.alloc.highwatermark)", -- "ump.count=first(max#umpire.alloc.count,umpire.alloc.count)", -- "ump.size=scale(ump.size.bytes,1e-6)", -- "ump.hwm=scale(ump.hwm.bytes,1e-6)" -- ], -- "select" : -- [ "umpire.alloc.name as Allocator", -- "inclusive_max(ump.size) as \"Alloc MB\" unit MB", -- "inclusive_max(ump.hwm) as \"Alloc HWM\" unit MB", -- "inclusive_max(ump.count) as \"Num allocs\"" -- ], -- "group by": "umpire.alloc.name" -- }, -- { "level" : "cross", -- "select" : -- [ "umpire.alloc.name as Allocator", -- "avg(imax#ump.size) as \"Alloc MB (avg)\" unit MB", -- "max(imax#ump.size) as \"Alloc MB (max)\" unit MB", -- "avg(imax#ump.hwm) as \"Alloc HWM (avg)\" unit MB", -- "max(imax#ump.hwm) as \"Alloc HWM (max)\" unit MB", -- "avg(imax#ump.count) as \"Num allocs (avg)\"", -- "max(imax#ump.count) as \"Num allocs (max)\"" -- ], -- "group by": "umpire.alloc.name" -- } -- ] -- }, -- { -- "name" : "umpire.filter", -- "description" : "Names of Umpire allocators to track", -- "type" : "string", -- "category" : "metric", -- "config" : { "CALI_UMPIRE_ALLOCATOR_FILTER": "{}" } -- }, -- { -- "name" : "mem.pages", -- "description" : "Memory pages used via /proc/self/statm", -- "type" : "bool", -- "category" : "metric", -- "services" : [ "memstat" ], -- "query" : -- [ -- { "level" : "local", -- "let" : -- [ "mem.vmsize = first(max#memstat.vmsize,memstat.vmsize)", -- "mem.vmrss = first(max#memstat.vmrss,memstat.vmrss)", -- "mem.data = first(max#memstat.data,memstat.data)" -- ], -- "select" : -- [ -- "max(mem.vmsize) as VmSize unit pages", -- "max(mem.vmrss) as VmRSS unit pages", -- "max(mem.data) as Data unit pages" -- ] -- }, -- { "level" : "cross", -- "select" : -- [ -- "max(max#mem.vmsize) as \"VmSize (max)\" unit pages", -- "max(max#mem.vmrss) as \"VmRSS (max)\" unit pages", -- "max(max#mem.data) as \"Data (max)\" unit pages" -- ] -- } -- ] -- }, -- { -- "name" : "mem.highwatermark", -- "description" : "Report memory high-water mark", -- "type" : "bool", -- "category" : "metric", -- "services" : [ "alloc", "sysalloc" ], -- "config" : { "CALI_ALLOC_TRACK_ALLOCATIONS": "false", "CALI_ALLOC_RECORD_HIGHWATERMARK": "true" }, -- "query" : -- [ -- { "level" : "local", -- "let" : -- [ "mem.highwatermark.bytes = first(max#alloc.region.highwatermark,alloc.region.highwatermark)", -- "mem.highwatermark = scale(mem.highwatermark.bytes,1e-6)" -- ], -- "select" : [ "max(mem.highwatermark) as \"Allocated MB\" unit MB" ] -- }, -- { "level" : "cross", -- "select" : [ "max(max#mem.highwatermark) as \"Allocated MB\" unit MB" ] -- } -- ] -- }, -- { -- "name" : "mem.read.bandwidth", -- "description" : "Record memory read bandwidth using the Performance Co-pilot API", -- "type" : "bool", -- "category" : "metric", -- "services" : [ "pcp.memory" ], -- "query" : -- [ -- { "level" : "local", -- "let" : [ "mrb.time=first(pcp.time.duration,sum#pcp.time.duration)" ], -- "select" : [ "ratio(mem.bytes.read,mrb.time,1e-6) as \"MB/s (r)\" unit MB/s" ] -- }, -- { "level" : "cross", "select": -- [ -- "avg(ratio#mem.bytes.read/mrb.time) as \"Avg MemBW (r) (MB/s)\" unit MB/s", -- "max(ratio#mem.bytes.read/mrb.time) as \"Max MemBW (r) (MB/s)\" unit MB/s", -- "sum(ratio#mem.bytes.read/mrb.time) as \"Total MemBW (r) (MB/s)\" unit MB/s" -- ] -- } -- ] -- }, -- { -- "name" : "mem.write.bandwidth", -- "description" : "Record memory write bandwidth using the Performance Co-pilot API", -- "type" : "bool", -- "category" : "metric", -- "services" : [ "pcp.memory" ], -- "query" : -- [ -- { "level" : "local", -- "let" : [ "mwb.time=first(pcp.time.duration,sum#pcp.time.duration)" ], -- "select" : [ "ratio(mem.bytes.written,mwb.time,1e-6) as \"MB/s (w)\" unit MB/s" ] -- }, -- { "level" : "cross", "select": -- [ -- "avg(ratio#mem.bytes.written/mwb.time) as \"Avg MemBW (w) (MB/s)\" unit MB/s", -- "max(ratio#mem.bytes.written/mwb.time) as \"Max MemBW (w) (MB/s)\" unit MB/s", -- "sum(ratio#mem.bytes.written/mwb.time) as \"Total MemBW (w) (MB/s)\" unit MB/s", -- ] -- } -- ] -- }, -- { -- "name" : "mem.bandwidth", -- "description" : "Record memory bandwidth using the Performance Co-pilot API", -- "type" : "bool", -- "category" : "metric", -- "inherit" : [ "mem.read.bandwidth", "mem.write.bandwidth" ], -- }, -- { -- "name" : "topdown.toplevel", -- "description" : "Top-down analysis for Intel CPUs (top level)", -- "type" : "bool", -- "category" : "metric", -- "services" : [ "topdown" ], -- "config" : { "CALI_TOPDOWN_LEVEL": "top" }, -- "query" : -- [ -- { "level": "local", "select": -- [ -- "any(topdown.retiring) as \"Retiring\"", -- "any(topdown.backend_bound) as \"Backend bound\"", -- "any(topdown.frontend_bound) as \"Frontend bound\"", -- "any(topdown.bad_speculation) as \"Bad speculation\"" -- ] -- }, -- { "level": "cross", "select": -- [ -- "any(any#topdown.retiring) as \"Retiring\"", -- "any(any#topdown.backend_bound) as \"Backend bound\"", -- "any(any#topdown.frontend_bound) as \"Frontend bound\"", -- "any(any#topdown.bad_speculation) as \"Bad speculation\"" -- ] -- } -- ] -- }, -- { -- "name" : "topdown.all", -- "description" : "Top-down analysis for Intel CPUs (all levels)", -- "type" : "bool", -- "category" : "metric", -- "services" : [ "topdown" ], -- "config" : { "CALI_TOPDOWN_LEVEL": "all" }, -- "query" : -- [ -- { "level": "local", "select": -- [ -- "any(topdown.retiring) as \"Retiring\"", -- "any(topdown.backend_bound) as \"Backend bound\"", -- "any(topdown.frontend_bound) as \"Frontend bound\"", -- "any(topdown.bad_speculation) as \"Bad speculation\"", -- "any(topdown.branch_mispredict) as \"Branch mispredict\"", -- "any(topdown.machine_clears) as \"Machine clears\"", -- "any(topdown.frontend_latency) as \"Frontend latency\"", -- "any(topdown.frontend_bandwidth) as \"Frontend bandwidth\"", -- "any(topdown.memory_bound) as \"Memory bound\"", -- "any(topdown.core_bound) as \"Core bound\"", -- "any(topdown.ext_mem_bound) as \"External Memory\"", -- "any(topdown.l1_bound) as \"L1 bound\"", -- "any(topdown.l2_bound) as \"L2 bound\"", -- "any(topdown.l3_bound) as \"L3 bound\"" -- ] -- }, -- { "level": "cross", "select": -- [ -- "any(any#topdown.retiring) as \"Retiring\"", -- "any(any#topdown.backend_bound) as \"Backend bound\"", -- "any(any#topdown.frontend_bound) as \"Frontend bound\"", -- "any(any#topdown.bad_speculation) as \"Bad speculation\"", -- "any(any#topdown.branch_mispredict) as \"Branch mispredict\"", -- "any(any#topdown.machine_clears) as \"Machine clears\"", -- "any(any#topdown.frontend_latency) as \"Frontend latency\"", -- "any(any#topdown.frontend_bandwidth) as \"Frontend bandwidth\"", -- "any(any#topdown.memory_bound) as \"Memory bound\"", -- "any(any#topdown.core_bound) as \"Core bound\"", -- "any(any#topdown.ext_mem_bound) as \"External Memory\"", -- "any(any#topdown.l1_bound) as \"L1 bound\"", -- "any(any#topdown.l2_bound) as \"L2 bound\"", -- "any(any#topdown.l3_bound) as \"L3 bound\"" -- ] -- } -- ] -- }, -- { -- "name" : "topdown-counters.toplevel", -- "description" : "Raw counter values for Intel top-down analysis (top level)", -- "type" : "bool", -- "category" : "metric", -- "services" : [ "papi" ], -- "config" : -- { -- "CALI_PAPI_COUNTERS": -- "CPU_CLK_THREAD_UNHALTED:THREAD_P,UOPS_RETIRED:RETIRE_SLOTS,UOPS_ISSUED:ANY,INT_MISC:RECOVERY_CYCLES,IDQ_UOPS_NOT_DELIVERED:CORE" -- }, -- "query" : -- [ -- { "level": "local", "select": -- [ -- "inclusive_sum(sum#papi.CPU_CLK_THREAD_UNHALTED:THREAD_P) as cpu_clk_thread_unhalted:thread_p", -- "inclusive_sum(sum#papi.UOPS_RETIRED:RETIRE_SLOTS) as uops_retired:retire_slots", -- "inclusive_sum(sum#papi.UOPS_ISSUED:ANY) as uops_issued:any", -- "inclusive_sum(sum#papi.INT_MISC:RECOVERY_CYCLES) as int_misc:recovery_cycles", -- "inclusive_sum(sum#papi.IDQ_UOPS_NOT_DELIVERED:CORE) as idq_uops_note_delivered:core" -- ] -- }, -- { "level": "cross", "select": -- [ -- "sum(inclusive#sum#papi.CPU_CLK_THREAD_UNHALTED:THREAD_P) as cpu_clk_thread_unhalted:thread_p", -- "sum(inclusive#sum#papi.UOPS_RETIRED:RETIRE_SLOTS) as uops_retired:retire_slots", -- "sum(inclusive#sum#papi.UOPS_ISSUED:ANY) as uops_issued:any", -- "sum(inclusive#sum#papi.INT_MISC:RECOVERY_CYCLES) as int_misc:recovery_cycles", -- "sum(inclusive#sum#papi.IDQ_UOPS_NOT_DELIVERED:CORE) as idq_uops_note_delivered:core" -- ] -- } -- ] -- }, -- { -- "name" : "topdown-counters.all", -- "description" : "Raw counter values for Intel top-down analysis (all levels)", -- "type" : "bool", -- "category" : "metric", -- "services" : [ "papi" ], -- "config" : -- { -- "CALI_PAPI_COUNTERS": -- "BR_MISP_RETIRED:ALL_BRANCHES -- ,CPU_CLK_THREAD_UNHALTED:THREAD_P -- ,CYCLE_ACTIVITY:CYCLES_NO_EXECUTE -- ,CYCLE_ACTIVITY:STALLS_L1D_PENDING -- ,CYCLE_ACTIVITY:STALLS_L2_PENDING -- ,CYCLE_ACTIVITY:STALLS_LDM_PENDING -- ,IDQ_UOPS_NOT_DELIVERED:CORE -- ,IDQ_UOPS_NOT_DELIVERED:CYCLES_0_UOPS_DELIV_CORE -- ,INT_MISC:RECOVERY_CYCLES -- ,MACHINE_CLEARS:COUNT -- ,MEM_LOAD_UOPS_RETIRED:L3_HIT -- ,MEM_LOAD_UOPS_RETIRED:L3_MISS -- ,UOPS_EXECUTED:CORE_CYCLES_GE_1 -- ,UOPS_EXECUTED:CORE_CYCLES_GE_2 -- ,UOPS_ISSUED:ANY -- ,UOPS_RETIRED:RETIRE_SLOTS" -- }, -- "query" : -- [ -- { "level": "local", "select": -- [ -- "inclusive_sum(sum#papi.BR_MISP_RETIRED:ALL_BRANCHES) as br_misp_retired:all_branches", -- "inclusive_sum(sum#papi.CPU_CLK_THREAD_UNHALTED:THREAD_P) as cpu_clk_thread_unhalted:thread_p", -- "inclusive_sum(sum#papi.CYCLE_ACTIVITY:CYCLES_NO_EXECUTE) as cycle_activity:cycles_no_execute", -- "inclusive_sum(sum#papi.CYCLE_ACTIVITY:STALLS_L1D_PENDING) as cycle_activity:stalls_l1d_pending", -- "inclusive_sum(sum#papi.CYCLE_ACTIVITY:STALLS_L2_PENDING) as cycle_activity:stalls_l2_pending", -- "inclusive_sum(sum#papi.CYCLE_ACTIVITY:STALLS_LDM_PENDING) as cycle_activity:stalls_ldm_pending", -- "inclusive_sum(sum#papi.IDQ_UOPS_NOT_DELIVERED:CORE) as idq_uops_note_delivered:core", -- "inclusive_sum(sum#papi.IDQ_UOPS_NOT_DELIVERED:CYCLES_0_UOPS_DELIV_CORE) as idq_uops_note_delivered:cycles_0_uops_deliv_core", -- "inclusive_sum(sum#papi.INT_MISC:RECOVERY_CYCLES) as int_misc:recovery_cycles", -- "inclusive_sum(sum#papi.MACHINE_CLEARS:COUNT) as machine_clears:count", -- "inclusive_sum(sum#papi.MEM_LOAD_UOPS_RETIRED:L3_HIT) as mem_load_uops_retired:l3_hit", -- "inclusive_sum(sum#papi.MEM_LOAD_UOPS_RETIRED:L3_MISS) as mem_load_uops_retired:l3_miss", -- "inclusive_sum(sum#papi.UOPS_EXECUTED:CORE_CYCLES_GE_1) as uops_executed:core_cycles_ge_1", -- "inclusive_sum(sum#papi.UOPS_EXECUTED:CORE_CYCLES_GE_2) as uops_executed:core_cycles_ge_2", -- "inclusive_sum(sum#papi.UOPS_ISSUED:ANY) as uops_issued:any", -- "inclusive_sum(sum#papi.UOPS_RETIRED:RETIRE_SLOTS) as uops_retired:retire_slots" -- ] -- }, -- { "level": "cross", "select": -- [ -- "sum(inclusive#sum#papi.BR_MISP_RETIRED:ALL_BRANCHES) as br_misp_retired:all_branches", -- "sum(inclusive#sum#papi.CPU_CLK_THREAD_UNHALTED:THREAD_P) as cpu_clk_thread_unhalted:thread_p", -- "sum(inclusive#sum#papi.CYCLE_ACTIVITY:CYCLES_NO_EXECUTE) as cycle_activity:cycles_no_execute", -- "sum(inclusive#sum#papi.CYCLE_ACTIVITY:STALLS_L1D_PENDING) as cycle_activity:stalls_l1d_pending", -- "sum(inclusive#sum#papi.CYCLE_ACTIVITY:STALLS_L2_PENDING) as cycle_activity:stalls_l2_pending", -- "sum(inclusive#sum#papi.CYCLE_ACTIVITY:STALLS_LDM_PENDING) as cycle_activity:stalls_ldm_pending", -- "sum(inclusive#sum#papi.IDQ_UOPS_NOT_DELIVERED:CORE) as idq_uops_note_delivered:core", -- "sum(inclusive#sum#papi.IDQ_UOPS_NOT_DELIVERED:CYCLES_0_UOPS_DELIV_CORE) as idq_uops_note_delivered:cycles_0_uops_deliv_core", -- "sum(inclusive#sum#papi.INT_MISC:RECOVERY_CYCLES) as int_misc:recovery_cycles", -- "sum(inclusive#sum#papi.MACHINE_CLEARS:COUNT) as machine_clears:count", -- "sum(inclusive#sum#papi.MEM_LOAD_UOPS_RETIRED:L3_HIT) as mem_load_uops_retired:l3_hit", -- "sum(inclusive#sum#papi.MEM_LOAD_UOPS_RETIRED:L3_MISS) as mem_load_uops_retired:l3_miss", -- "sum(inclusive#sum#papi.UOPS_EXECUTED:CORE_CYCLES_GE_1) as uops_executed:core_cycles_ge_1", -- "sum(inclusive#sum#papi.UOPS_EXECUTED:CORE_CYCLES_GE_2) as uops_executed:core_cycles_ge_2", -- "sum(inclusive#sum#papi.UOPS_ISSUED:ANY) as uops_issued:any", -- "sum(inclusive#sum#papi.UOPS_RETIRED:RETIRE_SLOTS) as uops_retired:retire_slots" -- ] -- } -- ] -- }, -- { -- "name" : "output", -- "description" : "Output location ('stdout', 'stderr', or filename)", -- "type" : "string", -- "category" : "output" -- }, -- { -- "name" : "adiak.import_categories", -- "services" : [ "adiak_import" ], -- "description" : "Adiak import categories. Comma-separated list of integers.", -- "type" : "string", -- "category" : "adiak" -- }, -- { -- "name" : "max_column_width", -- "type" : "int", -- "description" : "Maximum column width in the tree display", -- "category" : "treeformatter" -- }, -- { -- "name" : "print.metadata", -- "type" : "bool", -- "description" : "Print program metadata (Caliper globals and Adiak data)", -- "category" : "treeformatter" -- }, -- { -- "name" : "order_as_visited", -- "type" : "bool", -- "description" : "Print tree nodes in the original visit order", -- "category" : "treeformatter", -- "query" : -- [ -- { "level": "local", -- "let": [ "o_a_v.slot=first(aggregate.slot)" ], -- "aggregate": [ "min(o_a_v.slot)" ], -- "order by": [ "min#o_a_v.slot" ] -- }, -- { "level": "cross", -- "aggregate": [ "min(min#o_a_v.slot)" ], -- "order by": [ "min#min#o_a_v.slot" ] -- } -- ] -- } -+ "min(sum#rc.count) as \"Calls/rank (min)\" unit count", -+ "avg(sum#rc.count) as \"Calls/rank (avg)\" unit count", -+ "max(sum#rc.count) as \"Calls/rank (max)\" unit count", -+ "sum(sum#rc.count) as \"Calls (total)\" unit count" - ] -+ } -+ ] -+}, -+{ -+ "name" : "region.stats", -+ "description" : "Detailed region timing statistics (min/max/avg time per visit)", -+ "type" : "bool", -+ "category" : "metric", -+ "services" : [ "timer", "event" ], -+ "config" : -+ { -+ "CALI_TIMER_INCLUSIVE_DURATION" : "true", -+ "CALI_EVENT_ENABLE_SNAPSHOT_INFO" : "true" -+ }, -+ "query": -+ [ -+ { -+ "level": "local", -+ "let": -+ [ -+ "rs.count=first(sum#region.count,region.count)", -+ "rs.min=scale(min#time.inclusive.duration.ns,1e-9)", -+ "rs.max=scale(max#time.inclusive.duration.ns,1e-9)", -+ "rs.sum=scale(sum#time.inclusive.duration.ns,1e-9)" -+ ], -+ "aggregate": -+ [ -+ "sum(rs.sum)" -+ ], -+ "select": -+ [ -+ "sum(rs.count) as Visits unit count", -+ "min(rs.min) as \"Min time/visit\" unit sec", -+ "ratio(rs.sum,rs.count) as \"Avg time/visit\" unit sec", -+ "max(rs.max) as \"Max time/visit\" unit sec" -+ ] -+ }, -+ { -+ "level": "cross", -+ "select": -+ [ -+ "sum(sum#rs.count) as Visits unit count", -+ "min(min#rs.min) as \"Min time/visit\" unit sec", -+ "ratio(sum#rs.sum,sum#rs.count) as \"Avg time/visit\" unit sec", -+ "max(max#rs.max) as \"Max time/visit\" unit sec" -+ ] -+ } -+ ] -+}, -+{ -+ "name" : "node.order", -+ "description" : "Report order in which regions first appeared", -+ "type" : "bool", -+ "category" : "metric", -+ "query" : -+ [ -+ { "level" : "local", -+ "select" : [ "min(aggregate.slot) as \"Node order\"" ] -+ }, -+ { "level" : "cross", -+ "select" : [ "min(min#aggregate.slot) as \"Node order\"" ] -+ } -+ ] -+}, -+{ -+ "name" : "output", -+ "description" : "Output location ('stdout', 'stderr', or filename)", -+ "type" : "string", -+ "category" : "output" -+}, -+{ -+ "name" : "max_column_width", -+ "type" : "int", -+ "description" : "Maximum column width in the tree display", -+ "category" : "treeformatter" -+}, -+{ -+ "name" : "print.metadata", -+ "type" : "bool", -+ "description" : "Print program metadata (Caliper globals and Adiak data)", -+ "category" : "treeformatter" -+}, -+{ -+ "name" : "order_as_visited", -+ "type" : "bool", -+ "description" : "Print tree nodes in the original visit order", -+ "category" : "treeformatter", -+ "query" : -+ [ -+ { "level": "local", -+ "let": [ "o_a_v.slot=first(aggregate.slot)" ], -+ "aggregate": [ "min(o_a_v.slot)" ], -+ "order by": [ "min#o_a_v.slot" ] -+ }, -+ { "level": "cross", -+ "aggregate": [ "min(min#o_a_v.slot)" ], -+ "order by": [ "min#min#o_a_v.slot" ] -+ } -+ ] -+} -+] - )json"; - - } diff --git a/scripts/docker/ascent_build/2024_08_01_conduit-pr1311-detect-if-caliper-needs-adiak.patch b/scripts/docker/ascent_build/2024_08_01_conduit-pr1311-detect-if-caliper-needs-adiak.patch deleted file mode 100644 index 76fdf525d2f0..000000000000 --- a/scripts/docker/ascent_build/2024_08_01_conduit-pr1311-detect-if-caliper-needs-adiak.patch +++ /dev/null @@ -1,147 +0,0 @@ -From 7b25b95c30e083e7d8c541f131df10e401f658d6 Mon Sep 17 00:00:00 2001 -From: Cyrus Harrison -Date: Thu, 1 Aug 2024 13:26:40 -0700 -Subject: [PATCH] detect if caliper needs adiak - ---- - src/cmake/thirdparty/SetupCaliper.cmake | 47 +++++++++++++++++----- - src/config/conduit_setup_deps.cmake | 52 +++++++++++++++++++------ - 2 files changed, 77 insertions(+), 22 deletions(-) - -diff --git a/src/cmake/thirdparty/SetupCaliper.cmake b/src/cmake/thirdparty/SetupCaliper.cmake -index 44ad2cc2e..7469097f3 100644 ---- a/src/cmake/thirdparty/SetupCaliper.cmake -+++ b/src/cmake/thirdparty/SetupCaliper.cmake -@@ -12,24 +12,51 @@ if(NOT CALIPER_DIR) - MESSAGE(FATAL_ERROR "Caliper support needs explicit CALIPER_DIR") - endif() - --# most common case: caliper is built with adiak support --# and caliper needs us to find adiak, or else find_pacakge caliper --# will fail - --# Check for ADIAK_DIR -+# first: look for caliper config header + see what additional deps we need -+#. to resolve. -+ -+message(STATUS "Attempting to find cali-config.h with CALIPER_DIR=${CALIPER_DIR} ...") -+find_file(CALI_CONFIG_HEADER -+ NAMES caliper-config.h -+ PATHS ${CALIPER_DIR} -+ PATH_SUFFIXES include/caliper -+ NO_DEFAULT_PATH -+ NO_CMAKE_ENVIRONMENT_PATH -+ NO_CMAKE_PATH -+ NO_SYSTEM_ENVIRONMENT_PATH -+ NO_CMAKE_SYSTEM_PATH) -+ -+if(EXISTS ${CALI_CONFIG_HEADER}) -+ message(STATUS "Found Caliper Config Header: ${CALI_CONFIG_HEADER}") -+else() -+ message(FATAL_ERROR "Could not find caliper-config.h in caliper ${CALIPER_DIR}/include/caliper") -+endif() - --if(NOT ADIAK_DIR) -- MESSAGE(FATAL_ERROR "Caliper support needs explicit ADIAK_DIR") -+file(READ ${CALI_CONFIG_HEADER} _CALI_CONFIG_HEADER_CONTENTS) -+ -+# check if we need ADIAK -+string(FIND ${_CALI_CONFIG_HEADER_CONTENTS} "#define CALIPER_HAVE_ADIAK" _caliper_have_adiak) -+ -+if(${_caliper_have_adiak} GREATER_EQUAL 0 ) -+ # caliper is built with adiak support and caliper needs us to find adiak, -+ # else find_pacakge caliper will fail -+ # Check for ADIAK_DIR -+ if(NOT ADIAK_DIR) -+ MESSAGE(FATAL_ERROR "Caliper support needs explicit ADIAK_DIR") -+ endif() -+ # find adiak -+ find_package(adiak REQUIRED -+ NO_DEFAULT_PATH -+ PATHS ${ADIAK_DIR}/lib/cmake/adiak) -+ set(ADIAK_FOUND TRUE) - endif() - --find_package(adiak REQUIRED -- NO_DEFAULT_PATH -- PATHS ${ADIAK_DIR}/lib/cmake/adiak) - - find_package(caliper REQUIRED - NO_DEFAULT_PATH - PATHS ${CALIPER_DIR}/share/cmake/caliper) - --set(ADIAK_FOUND TRUE) -+ - set(CALIPER_FOUND TRUE) - set(CONDUIT_USE_CALIPER TRUE) -diff --git a/src/config/conduit_setup_deps.cmake b/src/config/conduit_setup_deps.cmake -index 0334a2b54..f9022da66 100644 ---- a/src/config/conduit_setup_deps.cmake -+++ b/src/config/conduit_setup_deps.cmake -@@ -58,26 +58,54 @@ if(CALIPER_DIR) - message(STATUS "Conduit was built with Caliper Support") - endif() - -- if(NOT ADIAK_DIR) -- set(ADIAK_DIR ${CONDUIT_ADIAK_DIR}) -+ # use caliper config header to detect necessary deps -+ find_file(CALI_CONFIG_HEADER -+ NAMES caliper-config.h -+ PATHS ${CALIPER_DIR} -+ PATH_SUFFIXES include/caliper -+ NO_DEFAULT_PATH -+ NO_CMAKE_ENVIRONMENT_PATH -+ NO_CMAKE_PATH -+ NO_SYSTEM_ENVIRONMENT_PATH -+ NO_CMAKE_SYSTEM_PATH) -+ -+ if(EXISTS ${CALI_CONFIG_HEADER}) -+ if(NOT Conduit_FIND_QUIETLY) -+ message(STATUS "Found Caliper Config Header: ${CALI_CONFIG_HEADER}") -+ endif() -+ else() -+ message(FATAL_ERROR "Could not find caliper-config.h in caliper ${CALIPER_DIR}/include/caliper") - endif() - -- if(ADIAK_DIR) -- if(NOT Conduit_FIND_QUIETLY) -- message(STATUS "Looking for Adiak at: ${ADIAK_DIR}/lib/cmake/adiak") -+ file(READ ${CALI_CONFIG_HEADER} _CALI_CONFIG_HEADER_CONTENTS) -+ -+ # check if we need ADIAK -+ string(FIND ${_CALI_CONFIG_HEADER_CONTENTS} "#define CALIPER_HAVE_ADIAK" _caliper_have_adiak) -+ -+ if(${_caliper_have_adiak} GREATER_EQUAL 0 ) -+ # caliper is built with adiak support and caliper needs us to find adiak. -+ if(NOT ADIAK_DIR) -+ set(ADIAK_DIR ${CONDUIT_ADIAK_DIR}) -+ endif() -+ -+ if(ADIAK_DIR) -+ if(NOT Conduit_FIND_QUIETLY) -+ message(STATUS "Looking for Adiak at: ${ADIAK_DIR}/lib/cmake/adiak") -+ endif() -+ # find adiak first -+ find_dependency(adiak REQUIRED -+ NO_DEFAULT_PATH -+ PATHS ${ADIAK_DIR}/lib/cmake/adiak) - endif() -- # find adiak first -- find_package(adiak REQUIRED -- NO_DEFAULT_PATH -- PATHS ${ADIAK_DIR}/lib/cmake/adiak) - endif() -+ - if(NOT Conduit_FIND_QUIETLY) - message(STATUS "Looking for Caliper at: ${CALIPER_DIR}/share/cmake/caliper") - endif() - # find caliper -- find_package(caliper REQUIRED -- NO_DEFAULT_PATH -- PATHS ${CALIPER_DIR}/share/cmake/caliper) -+ find_dependency(caliper REQUIRED -+ NO_DEFAULT_PATH -+ PATHS ${CALIPER_DIR}/share/cmake/caliper) - endif() - - ############################################################################### From ce137487088ee9b645820a67723882e8af8a8e5e Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Thu, 29 Aug 2024 12:13:25 -0400 Subject: [PATCH 18/29] ascent complains if MFEM is not built --- scripts/docker/ascent_build/build_ascent.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/docker/ascent_build/build_ascent.sh b/scripts/docker/ascent_build/build_ascent.sh index 3f1e0313d3a1..fc82b5817936 100644 --- a/scripts/docker/ascent_build/build_ascent.sh +++ b/scripts/docker/ascent_build/build_ascent.sh @@ -52,7 +52,7 @@ build_vtkm="${build_vtkm:=true}" build_camp="${build_camp:=true}" build_raja="${build_raja:=true}" build_umpire="${build_umpire:=true}" -build_mfem="${build_mfem:=false}" +build_mfem="${build_mfem:=true}" build_catalyst="${build_catalyst:=false}" # ascent options From cb08a39c6e70a75de475aab9e304b08b8a38add2 Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Thu, 29 Aug 2024 12:54:12 -0400 Subject: [PATCH 19/29] control cuda support for ascent with env var --- scripts/docker/Dockerfile.nvcc | 4 +++- scripts/docker/ascent_build/build_ascent.sh | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/scripts/docker/Dockerfile.nvcc b/scripts/docker/Dockerfile.nvcc index 8393406144fb..85bcdecabe38 100644 --- a/scripts/docker/Dockerfile.nvcc +++ b/scripts/docker/Dockerfile.nvcc @@ -60,7 +60,9 @@ RUN mkdir /tmp/build-openpmd && cd /tmp/build-openpmd && \ COPY ascent_build /tmp/ascent_build +## NOTE: with enable_cuda=ON, arm64 Dockerfile builds fail for some reason + RUN cd /tmp/ascent_build && \ - bash build_ascent.sh && \ + env enable_cuda=ON bash build_ascent.sh && \ cd / && \ rm -rf /tmp/ascent_build diff --git a/scripts/docker/ascent_build/build_ascent.sh b/scripts/docker/ascent_build/build_ascent.sh index fc82b5817936..b88635d506a4 100644 --- a/scripts/docker/ascent_build/build_ascent.sh +++ b/scripts/docker/ascent_build/build_ascent.sh @@ -28,7 +28,7 @@ set -eu -o pipefail ############################################################################## # shared options -enable_cuda="${enable_cuda:=ON}" +enable_cuda="${enable_cuda:=OFF}" enable_hip="${enable_hip:=OFF}" enable_fortran="${enable_fortran:=OFF}" enable_python="${enable_python:=OFF}" @@ -37,7 +37,7 @@ enable_mpi="${enable_mpi:=ON}" enable_find_mpi="${enable_find_mpi:=ON}" enable_tests="${enable_tests:=OFF}" enable_verbose="${enable_verbose:=ON}" -build_jobs="${build_jobs:=1}" +build_jobs="${build_jobs:=8}" build_config="${build_config:=Release}" build_shared_libs="${build_shared_libs:=ON}" From 16e47512d4287448d29759d15467511e627d4640 Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Thu, 29 Aug 2024 13:54:47 -0400 Subject: [PATCH 20/29] add MAKEOPTS=--output-sync=target --- scripts/docker/ascent_build/build_ascent.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/scripts/docker/ascent_build/build_ascent.sh b/scripts/docker/ascent_build/build_ascent.sh index b88635d506a4..de62a3648aa8 100644 --- a/scripts/docker/ascent_build/build_ascent.sh +++ b/scripts/docker/ascent_build/build_ascent.sh @@ -27,6 +27,8 @@ set -eu -o pipefail # Build Options ############################################################################## +export MAKEFLAGS="--output-sync=target" + # shared options enable_cuda="${enable_cuda:=OFF}" enable_hip="${enable_hip:=OFF}" From 3eae3ef468a6bd3434832d71df24964403fe229b Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Thu, 29 Aug 2024 17:07:18 -0400 Subject: [PATCH 21/29] add comment to Dockerfile --- scripts/docker/Dockerfile.nvcc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/docker/Dockerfile.nvcc b/scripts/docker/Dockerfile.nvcc index 85bcdecabe38..b4fa618cb5eb 100644 --- a/scripts/docker/Dockerfile.nvcc +++ b/scripts/docker/Dockerfile.nvcc @@ -60,7 +60,7 @@ RUN mkdir /tmp/build-openpmd && cd /tmp/build-openpmd && \ COPY ascent_build /tmp/ascent_build -## NOTE: with enable_cuda=ON, arm64 Dockerfile builds fail for some reason +## NOTE: with enable_cuda=ON, you need a Docker VM with a LARGE amount of RAM (at least 15 GB RAM, 4 GB swap) RUN cd /tmp/ascent_build && \ env enable_cuda=ON bash build_ascent.sh && \ From f4dbf1a59c38a9d87f277dacc423327cce673512 Mon Sep 17 00:00:00 2001 From: Philipp Grete Date: Fri, 30 Aug 2024 09:22:29 +0000 Subject: [PATCH 22/29] Downgrade numpy --- scripts/docker/Dockerfile.nvcc | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/scripts/docker/Dockerfile.nvcc b/scripts/docker/Dockerfile.nvcc index b4fa618cb5eb..13b568bd6bea 100644 --- a/scripts/docker/Dockerfile.nvcc +++ b/scripts/docker/Dockerfile.nvcc @@ -11,6 +11,11 @@ RUN pip3 install unyt RUN pip3 install blosc2 +# h5py from the repo is incompatible with the default numpy 2.1.0 +# Downgrading is not the cleanest solution, but it works... +# see https://stackoverflow.com/questions/78634235/numpy-dtype-size-changed-may-indicate-binary-incompatibility-expected-96-from +RUN pip3 install numpy==1.26.4 + RUN wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key| apt-key add - && \ echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-19 main" > /etc/apt/sources.list.d/llvm.list From e9fe3cc59d254d3e822a8a45b7e0533ba39ede5b Mon Sep 17 00:00:00 2001 From: Philipp Grete Date: Fri, 30 Aug 2024 09:44:53 +0000 Subject: [PATCH 23/29] Fix ADIOS2 and OpenPMD versions --- scripts/docker/Dockerfile.nvcc | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/scripts/docker/Dockerfile.nvcc b/scripts/docker/Dockerfile.nvcc index 13b568bd6bea..58acbcd9e5a5 100644 --- a/scripts/docker/Dockerfile.nvcc +++ b/scripts/docker/Dockerfile.nvcc @@ -47,17 +47,20 @@ RUN cd /tmp && \ rm -rf /tmp/hdf5-1.12.2* RUN mkdir /tmp/build-adios2 && cd /tmp/build-adios2 && \ - git clone https://github.com/ornladios/ADIOS2.git ADIOS2 && \ + wget https://github.com/ornladios/ADIOS2/archive/refs/tags/v2.10.1.tar.gz && \ + tar xzf v2.10.1.tar.gz && \ mkdir adios2-build && cd adios2-build && \ - cmake ../ADIOS2 -DADIOS2_USE_Blosc2=ON -DADIOS2_USE_Fortran=OFF && \ + cmake ../ADIOS2-2.10.1 -DADIOS2_USE_Blosc2=ON -DADIOS2_USE_Fortran=OFF && \ make -j8 && make install && \ cd / && \ rm -rf /tmp/build-adios2 +# commit version is dev branch on 2024-08-30 RUN mkdir /tmp/build-openpmd && cd /tmp/build-openpmd && \ - git clone https://github.com/openPMD/openPMD-api.git && \ + wget https://github.com/openPMD/openPMD-api/archive/1c7d7ff.tar.gz && \ + tar xzf 1c7d7ff.tar.gz && \ mkdir openPMD-api-build && cd openPMD-api-build && \ - cmake ../openPMD-api -DopenPMD_USE_PYTHON=ON -DPython_EXECUTABLE=$(which python3) -DopenPMD_USE_ADIOS2=ON && \ + cmake ../openPMD-api-1c7d7ffc5ef501e1d2dcbd5169b3e5eff677b399 -DopenPMD_USE_PYTHON=ON -DPython_EXECUTABLE=$(which python3) -DopenPMD_USE_ADIOS2=ON && \ cmake --build . && \ cmake --build . --target install && \ cd / && \ From 4d2bf950f7b586cfa89a1d80328606293cf1e9f0 Mon Sep 17 00:00:00 2001 From: Philipp Grete Date: Fri, 30 Aug 2024 14:41:33 +0000 Subject: [PATCH 24/29] Directly use Ascent script with small patch --- scripts/docker/Dockerfile.nvcc | 19 +- scripts/docker/ascent_build.patch | 49 + .../docker/ascent_build/2023_01_30_raja.patch | 36 - ...2023_12_06_vtkm-mr3160-rocthrust-fix.patch | 74 -- .../2024_05_03_vtkm-mr3215-ext-geom-fix.patch | 145 --- ...4_07_02_vtkm-mr3246-raysubset_bugfix.patch | 43 - scripts/docker/ascent_build/build_ascent.sh | 984 ------------------ 7 files changed, 63 insertions(+), 1287 deletions(-) create mode 100644 scripts/docker/ascent_build.patch delete mode 100644 scripts/docker/ascent_build/2023_01_30_raja.patch delete mode 100644 scripts/docker/ascent_build/2023_12_06_vtkm-mr3160-rocthrust-fix.patch delete mode 100644 scripts/docker/ascent_build/2024_05_03_vtkm-mr3215-ext-geom-fix.patch delete mode 100644 scripts/docker/ascent_build/2024_07_02_vtkm-mr3246-raysubset_bugfix.patch delete mode 100644 scripts/docker/ascent_build/build_ascent.sh diff --git a/scripts/docker/Dockerfile.nvcc b/scripts/docker/Dockerfile.nvcc index 58acbcd9e5a5..4679c9f5dba4 100644 --- a/scripts/docker/Dockerfile.nvcc +++ b/scripts/docker/Dockerfile.nvcc @@ -51,7 +51,7 @@ RUN mkdir /tmp/build-adios2 && cd /tmp/build-adios2 && \ tar xzf v2.10.1.tar.gz && \ mkdir adios2-build && cd adios2-build && \ cmake ../ADIOS2-2.10.1 -DADIOS2_USE_Blosc2=ON -DADIOS2_USE_Fortran=OFF && \ - make -j8 && make install && \ + make -j 16 && make install && \ cd / && \ rm -rf /tmp/build-adios2 @@ -61,16 +61,25 @@ RUN mkdir /tmp/build-openpmd && cd /tmp/build-openpmd && \ tar xzf 1c7d7ff.tar.gz && \ mkdir openPMD-api-build && cd openPMD-api-build && \ cmake ../openPMD-api-1c7d7ffc5ef501e1d2dcbd5169b3e5eff677b399 -DopenPMD_USE_PYTHON=ON -DPython_EXECUTABLE=$(which python3) -DopenPMD_USE_ADIOS2=ON && \ - cmake --build . && \ + cmake --build . -j 16 && \ cmake --build . --target install && \ cd / && \ rm -rf /tmp/build-openpmd -COPY ascent_build /tmp/ascent_build +RUN mkdir /tmp/build-ascent + +COPY ascent_build.patch /tmp/build-ascent ## NOTE: with enable_cuda=ON, you need a Docker VM with a LARGE amount of RAM (at least 15 GB RAM, 4 GB swap) -RUN cd /tmp/ascent_build && \ - env enable_cuda=ON bash build_ascent.sh && \ +# commit version is dev branch on 2024-08-30 +RUN cd /tmp/build-ascent && \ + wget https://github.com/Alpine-DAV/ascent/archive/dc2ec9c.tar.gz && \ + tar xzf dc2ec9c.tar.gz -C . --strip-components=1 && \ + wget https://github.com/LLNL/blt/archive/9ff7734.tar.gz && \ + tar xzf 9ff7734.tar.gz -C ./src/blt --strip-components=1 && \ + cd ./scripts/build_ascent && \ + patch -p1 build_ascent.sh /tmp/build-ascent/ascent_build.patch && \ + env enable_cuda=ON enable_mpi=ON build_hdf5=false build_silo=false bash build_ascent.sh && \ cd / && \ rm -rf /tmp/ascent_build diff --git a/scripts/docker/ascent_build.patch b/scripts/docker/ascent_build.patch new file mode 100644 index 000000000000..aa165a2eb4d0 --- /dev/null +++ b/scripts/docker/ascent_build.patch @@ -0,0 +1,49 @@ +--- build_ascent.sh 2024-08-29 21:00:24.000000000 +0000 ++++ build_ascent_parthenon.sh 2024-08-30 09:55:58.976365723 +0000 +@@ -21,6 +21,8 @@ + # Build Options + ############################################################################## + ++export MAKEFLAGS="--output-sync=target" ++ + # shared options + enable_cuda="${enable_cuda:=OFF}" + enable_hip="${enable_hip:=OFF}" +@@ -31,7 +33,7 @@ + enable_find_mpi="${enable_find_mpi:=ON}" + enable_tests="${enable_tests:=OFF}" + enable_verbose="${enable_verbose:=ON}" +-build_jobs="${build_jobs:=6}" ++build_jobs="${build_jobs:=16}" + build_config="${build_config:=Release}" + build_shared_libs="${build_shared_libs:=ON}" + +@@ -126,8 +128,8 @@ + root_dir=$(ospath ${root_dir}) + root_dir=$(abs_path ${root_dir}) + script_dir=$(abs_path "$(dirname "${BASH_SOURCE[0]}")") +-build_dir=$(ospath ${root_dir}/build) +-source_dir=$(ospath ${root_dir}/source) ++build_dir=$(ospath build) ++source_dir=$(ospath source) + + + # root_dir is where we will build and install +@@ -140,7 +142,7 @@ + + # install_dir is where we will install + # override with `prefix` env var +-install_dir="${install_dir:=$root_dir/install}" ++install_dir=/usr/local + + echo "*** prefix: ${root_dir}" + echo "*** build root: ${build_dir}" +@@ -231,7 +233,7 @@ + hdf5_short_version=1.14 + hdf5_src_dir=$(ospath ${source_dir}/hdf5-${hdf5_version}) + hdf5_build_dir=$(ospath ${build_dir}/hdf5-${hdf5_version}/) +-hdf5_install_dir=$(ospath ${install_dir}/hdf5-${hdf5_version}/) ++hdf5_install_dir=/usr/local/hdf5/serial + hdf5_tarball=$(ospath ${source_dir}/hdf5-${hdf5_version}.tar.gz) + + # build only if install doesn't exist diff --git a/scripts/docker/ascent_build/2023_01_30_raja.patch b/scripts/docker/ascent_build/2023_01_30_raja.patch deleted file mode 100644 index 7b1abcd6e0c2..000000000000 --- a/scripts/docker/ascent_build/2023_01_30_raja.patch +++ /dev/null @@ -1,36 +0,0 @@ -From 9a50702cf835996f96cb33e2cb4c0aa1a7a86df5 Mon Sep 17 00:00:00 2001 -From: Cyrus Harrison -Date: Fri, 27 Jan 2023 15:49:35 -0800 -Subject: [PATCH] try new logic for windows shared exports - ---- - azure-pipelines.yml | 2 +- - include/RAJA/config.hpp.in | 2 +- - 2 files changed, 2 insertions(+), 2 deletions(-) - -diff --git a/azure-pipelines.yml b/azure-pipelines.yml -index c84a71eb18..268ba4a660 100644 ---- a/azure-pipelines.yml -+++ b/azure-pipelines.yml -@@ -3,7 +3,7 @@ jobs: - strategy: - matrix: - shared: -- SHARED_ARGS: '-DBUILD_SHARED_LIBS=On -DCMAKE_CXX_FLAGS="/DRAJASHAREDDLL_EXPORTS" ' -+ SHARED_ARGS: '-DBUILD_SHARED_LIBS=On' - static: - SHARED_ARGS: '-DBUILD_SHARED_LIBS=Off' - pool: -diff --git a/include/RAJA/config.hpp.in b/include/RAJA/config.hpp.in -index 26b0b0dbde..0347650fc4 100644 ---- a/include/RAJA/config.hpp.in -+++ b/include/RAJA/config.hpp.in -@@ -364,7 +364,7 @@ const int DATA_ALIGN = @RAJA_DATA_ALIGN@; - // - - #if (defined(_WIN32) || defined(_WIN64)) && !defined(RAJA_WIN_STATIC_BUILD) --#ifdef RAJASHAREDDLL_EXPORTS -+#ifdef RAJA_EXPORTS - #define RAJASHAREDDLL_API __declspec(dllexport) - #else - #define RAJASHAREDDLL_API __declspec(dllimport) diff --git a/scripts/docker/ascent_build/2023_12_06_vtkm-mr3160-rocthrust-fix.patch b/scripts/docker/ascent_build/2023_12_06_vtkm-mr3160-rocthrust-fix.patch deleted file mode 100644 index 174347033b3f..000000000000 --- a/scripts/docker/ascent_build/2023_12_06_vtkm-mr3160-rocthrust-fix.patch +++ /dev/null @@ -1,74 +0,0 @@ -From c9ec6ae6a62b9bd257e727e999987ef31384e3ac Mon Sep 17 00:00:00 2001 -From: Vicente Adolfo Bolea Sanchez -Date: Thu, 30 Nov 2023 15:55:32 -0500 -Subject: [PATCH] kokkos: let link vtkm_cont to roc::rocthrust - -Also reorder the declarion of the option VTKm_ENABLE_KOKKOS_THRUST -to be set before calling VTKmDeviceAdapters. ---- - CMake/VTKmDeviceAdapters.cmake | 5 +---- - CMakeLists.txt | 10 +++++----- - vtkm/cont/kokkos/internal/CMakeLists.txt | 3 +++ - 3 files changed, 9 insertions(+), 9 deletions(-) - -diff --git a/CMake/VTKmDeviceAdapters.cmake b/CMake/VTKmDeviceAdapters.cmake -index fb13d0bf85..7b8bf2df9b 100644 ---- a/CMake/VTKmDeviceAdapters.cmake -+++ b/CMake/VTKmDeviceAdapters.cmake -@@ -360,10 +360,7 @@ if(VTKm_ENABLE_KOKKOS AND NOT TARGET vtkm_kokkos) - - # Make sure rocthrust is available if requested - if(VTKm_ENABLE_KOKKOS_THRUST) -- find_package(rocthrust) -- if(NOT rocthrust_FOUND) -- message(FATAL_ERROR "rocthrust not found. Please set VTKm_ENABLE_KOKKOS_THRUST to OFF.") -- endif() -+ find_package(rocthrust REQUIRED CONFIG) - endif() - endif() - -diff --git a/CMakeLists.txt b/CMakeLists.txt -index 39a9b3bc09..d8204114c7 100644 ---- a/CMakeLists.txt -+++ b/CMakeLists.txt -@@ -191,6 +191,11 @@ vtkm_option(VTKm_OVERRIDE_CTEST_TIMEOUT "Disable default ctest timeout" OFF) - # VTKm_ENABLE_MPI=ON. - cmake_dependent_option(VTKm_ENABLE_GPU_MPI "Enable GPU AWARE MPI support" OFF "VTKm_ENABLE_MPI" OFF) - -+# By default: Set VTKm_ENABLE_KOKKOS_THRUST to ON if VTKm_ENABLE_KOKKOS is ON, otherwise -+# disable it (or if the user explicitly turns this option OFF) -+cmake_dependent_option(VTKm_ENABLE_KOKKOS_THRUST "Enable Kokkos thrust support (only valid with CUDA and HIP)" -+ ON "VTKm_ENABLE_KOKKOS;Kokkos_ENABLE_CUDA OR Kokkos_ENABLE_HIP" OFF) -+ - mark_as_advanced( - VTKm_ENABLE_LOGGING - VTKm_NO_ASSERT -@@ -232,11 +237,6 @@ include(VTKmBuildType) - # Include the vtk-m wrappers - include(VTKmWrappers) - --# By default: Set VTKm_ENABLE_KOKKOS_THRUST to ON if VTKm_ENABLE_KOKKOS is ON, otherwise --# disable it (or if the user explicitly turns this option OFF) --cmake_dependent_option(VTKm_ENABLE_KOKKOS_THRUST "Enable Kokkos thrust support (only valid with CUDA and HIP)" -- ON "VTKm_ENABLE_KOKKOS;Kokkos_ENABLE_CUDA OR Kokkos_ENABLE_HIP" OFF) -- - # Create vtkm_compiler_flags library. This is an interface library that - # holds all the C++ compiler flags that are needed for consumers and - # when building VTK-m. -diff --git a/vtkm/cont/kokkos/internal/CMakeLists.txt b/vtkm/cont/kokkos/internal/CMakeLists.txt -index 9f924b0f4b..9b731c9fdd 100644 ---- a/vtkm/cont/kokkos/internal/CMakeLists.txt -+++ b/vtkm/cont/kokkos/internal/CMakeLists.txt -@@ -34,6 +34,9 @@ if (TARGET vtkm_kokkos) - elseif(TARGET vtkm_kokkos_hip) - set_source_files_properties(${sources} TARGET_DIRECTORY vtkm_cont PROPERTIES LANGUAGE HIP) - kokkos_compilation(SOURCE ${sources}) -+ if (VTKm_ENABLE_KOKKOS_THRUST) -+ target_link_libraries(vtkm_cont INTERFACE roc::rocthrust) -+ endif() - endif() - - else() --- -2.35.3 - diff --git a/scripts/docker/ascent_build/2024_05_03_vtkm-mr3215-ext-geom-fix.patch b/scripts/docker/ascent_build/2024_05_03_vtkm-mr3215-ext-geom-fix.patch deleted file mode 100644 index ed82e5630e01..000000000000 --- a/scripts/docker/ascent_build/2024_05_03_vtkm-mr3215-ext-geom-fix.patch +++ /dev/null @@ -1,145 +0,0 @@ -From 49518e5054c607942f644c82a5289e12b0f50476 Mon Sep 17 00:00:00 2001 -From: Kenneth Moreland -Date: Fri, 3 May 2024 09:22:56 -0400 -Subject: [PATCH] Fix bug with ExtractGeometry filter - -The `ExtractGeometry` filter was outputing datasets containing -`CellSetPermutation` as the representation for the cells. Although this is -technically correct and a very fast implementation, it is essentially -useless. The problem is that any downstream processing will have to know -that the data has a `CellSetPermutation`. None do (because the permutation -can be on any other cell set type, which creates an explosion of possible -cell types). - -Like was done with `Threshold` a while ago, this problem is fixed by deep -copying the result into a `CellSetExplicit`. This behavior is consistent -with VTK. ---- - .../changelog/extract-geometry-permutation.md | 13 +++++++ - .../testing/UnitTestExtractGeometryFilter.cxx | 13 ++++++- - .../worklet/ExtractGeometry.h | 34 +++++++------------ - 3 files changed, 37 insertions(+), 23 deletions(-) - create mode 100644 docs/changelog/extract-geometry-permutation.md - -diff --git a/docs/changelog/extract-geometry-permutation.md b/docs/changelog/extract-geometry-permutation.md -new file mode 100644 -index 0000000000..8a90495f76 ---- /dev/null -+++ b/docs/changelog/extract-geometry-permutation.md -@@ -0,0 +1,13 @@ -+# Fix bug with ExtractGeometry filter -+ -+The `ExtractGeometry` filter was outputing datasets containing -+`CellSetPermutation` as the representation for the cells. Although this is -+technically correct and a very fast implementation, it is essentially -+useless. The problem is that any downstream processing will have to know -+that the data has a `CellSetPermutation`. None do (because the permutation -+can be on any other cell set type, which creates an explosion of possible -+cell types). -+ -+Like was done with `Threshold` a while ago, this problem is fixed by deep -+copying the result into a `CellSetExplicit`. This behavior is consistent -+with VTK. -diff --git a/vtkm/filter/entity_extraction/testing/UnitTestExtractGeometryFilter.cxx b/vtkm/filter/entity_extraction/testing/UnitTestExtractGeometryFilter.cxx -index 675df8f77c..14de333666 100644 ---- a/vtkm/filter/entity_extraction/testing/UnitTestExtractGeometryFilter.cxx -+++ b/vtkm/filter/entity_extraction/testing/UnitTestExtractGeometryFilter.cxx -@@ -11,6 +11,7 @@ - #include - #include - -+#include - #include - - using vtkm::cont::testing::MakeTestDataSet; -@@ -41,11 +42,21 @@ public: - vtkm::cont::DataSet output = extractGeometry.Execute(dataset); - VTKM_TEST_ASSERT(test_equal(output.GetNumberOfCells(), 8), "Wrong result for ExtractGeometry"); - -+ vtkm::filter::clean_grid::CleanGrid cleanGrid; -+ cleanGrid.SetCompactPointFields(true); -+ cleanGrid.SetMergePoints(false); -+ vtkm::cont::DataSet cleanOutput = cleanGrid.Execute(output); -+ - vtkm::cont::ArrayHandle outCellData; -- output.GetField("cellvar").GetData().AsArrayHandle(outCellData); -+ cleanOutput.GetField("cellvar").GetData().AsArrayHandle(outCellData); - - VTKM_TEST_ASSERT(outCellData.ReadPortal().Get(0) == 21.f, "Wrong cell field data"); - VTKM_TEST_ASSERT(outCellData.ReadPortal().Get(7) == 42.f, "Wrong cell field data"); -+ -+ vtkm::cont::ArrayHandle outPointData; -+ cleanOutput.GetField("pointvar").GetData().AsArrayHandle(outPointData); -+ VTKM_TEST_ASSERT(outPointData.ReadPortal().Get(0) == 99); -+ VTKM_TEST_ASSERT(outPointData.ReadPortal().Get(7) == 90); - } - - static void TestUniformByBox1() -diff --git a/vtkm/filter/entity_extraction/worklet/ExtractGeometry.h b/vtkm/filter/entity_extraction/worklet/ExtractGeometry.h -index 97521335f2..449d7eae60 100644 ---- a/vtkm/filter/entity_extraction/worklet/ExtractGeometry.h -+++ b/vtkm/filter/entity_extraction/worklet/ExtractGeometry.h -@@ -10,11 +10,13 @@ - #ifndef vtkm_m_worklet_ExtractGeometry_h - #define vtkm_m_worklet_ExtractGeometry_h - -+#include - #include - - #include - #include - #include -+#include - #include - #include - #include -@@ -114,28 +116,13 @@ public: - } - }; - -- //////////////////////////////////////////////////////////////////////////////////// -- // Extract cells by ids permutes input data -- template -- vtkm::cont::CellSetPermutation Run(const CellSetType& cellSet, -- const vtkm::cont::ArrayHandle& cellIds) -- { -- using OutputType = vtkm::cont::CellSetPermutation; -- -- vtkm::cont::ArrayCopy(cellIds, this->ValidCellIds); -- -- return OutputType(this->ValidCellIds, cellSet); -- } -- -- //////////////////////////////////////////////////////////////////////////////////// -- // Extract cells by implicit function permutes input data - template -- vtkm::cont::CellSetPermutation Run(const CellSetType& cellSet, -- const vtkm::cont::CoordinateSystem& coordinates, -- const ImplicitFunction& implicitFunction, -- bool extractInside, -- bool extractBoundaryCells, -- bool extractOnlyBoundaryCells) -+ vtkm::cont::CellSetExplicit<> Run(const CellSetType& cellSet, -+ const vtkm::cont::CoordinateSystem& coordinates, -+ const ImplicitFunction& implicitFunction, -+ bool extractInside, -+ bool extractBoundaryCells, -+ bool extractOnlyBoundaryCells) - { - // Worklet output will be a boolean passFlag array - vtkm::cont::ArrayHandle passFlags; -@@ -149,7 +136,10 @@ public: - vtkm::cont::Algorithm::CopyIf(indices, passFlags, this->ValidCellIds); - - // generate the cellset -- return vtkm::cont::CellSetPermutation(this->ValidCellIds, cellSet); -+ vtkm::cont::CellSetPermutation permutedCellSet(this->ValidCellIds, cellSet); -+ -+ vtkm::cont::CellSetExplicit<> outputCells; -+ return vtkm::worklet::CellDeepCopy::Run(permutedCellSet); - } - - vtkm::cont::ArrayHandle GetValidCellIds() const { return this->ValidCellIds; } --- -GitLab - diff --git a/scripts/docker/ascent_build/2024_07_02_vtkm-mr3246-raysubset_bugfix.patch b/scripts/docker/ascent_build/2024_07_02_vtkm-mr3246-raysubset_bugfix.patch deleted file mode 100644 index db4f075cd148..000000000000 --- a/scripts/docker/ascent_build/2024_07_02_vtkm-mr3246-raysubset_bugfix.patch +++ /dev/null @@ -1,43 +0,0 @@ -From 763f13306b719bf6a213d00ead13fc93433e942e Mon Sep 17 00:00:00 2001 -From: Cyrus Harrison -Date: Tue, 2 Jul 2024 10:28:43 -0700 -Subject: [PATCH] fix bug with ray subsetting using wrong near and far planes - ---- - vtkm/rendering/raytracing/Camera.cxx | 12 ++++-------- - 1 file changed, 4 insertions(+), 8 deletions(-) - -diff --git a/vtkm/rendering/raytracing/Camera.cxx b/vtkm/rendering/raytracing/Camera.cxx -index f2a39bef9..10febf39f 100644 ---- a/vtkm/rendering/raytracing/Camera.cxx -+++ b/vtkm/rendering/raytracing/Camera.cxx -@@ -830,6 +830,7 @@ void Camera::FindSubset(const vtkm::Bounds& bounds) - transformed[2] = (transformed[2] * 0.5f + 0.5f); - zmin = vtkm::Min(zmin, transformed[2]); - zmax = vtkm::Max(zmax, transformed[2]); -+ // skip if outside near and far clipping - if (transformed[2] < 0 || transformed[2] > 1) - { - continue; -@@ -894,15 +895,10 @@ VTKM_CONT void Camera::UpdateDimensions(Ray& rays, - this->CameraView.SetLookAt(this->GetLookAt()); - this->CameraView.SetPosition(this->GetPosition()); - this->CameraView.SetViewUp(this->GetUp()); -- // -- // Just create come clipping range, we ignore the zmax value in subsetting -- // -- vtkm::Float64 maxDim = vtkm::Max( -- boundingBox.X.Max - boundingBox.X.Min, -- vtkm::Max(boundingBox.Y.Max - boundingBox.Y.Min, boundingBox.Z.Max - boundingBox.Z.Min)); - -- maxDim *= 100; -- this->CameraView.SetClippingRange(.0001, maxDim); -+ // Note: -+ // Use clipping range provided, the subsetting does take into consideration -+ // the near and far clipping planes. - - //Update our ViewProjection matrix - this->ViewProjectionMat = --- -2.39.3 (Apple Git-145) - diff --git a/scripts/docker/ascent_build/build_ascent.sh b/scripts/docker/ascent_build/build_ascent.sh deleted file mode 100644 index de62a3648aa8..000000000000 --- a/scripts/docker/ascent_build/build_ascent.sh +++ /dev/null @@ -1,984 +0,0 @@ -#!/bin/bash - -############################################################################## -# Adapted from: https://github.com/Alpine-DAV/ascent/blob/2228b2827dd99790c2fe4bc5b05f5b9c7ea42e71/scripts/build_ascent/build_ascent.sh -# Original Copyright (c) 2015-2024, Lawrence Livermore National Security, LLC. -# Released under BSD 3-Clause license -############################################################################## - -############################################################################## -# Demonstrates how to manually build Ascent and its dependencies, including: -# -# hdf5, conduit, vtk-m, mfem, raja, and umpire -# -# usage example: -# env enable_mpi=ON enable_openmp=ON ./build_ascent.sh -# -# -# Assumes: -# - cmake is in your path -# - selected compilers are in your path or set via env vars -# - [when enabled] MPI and Python (+numpy and mpi4py), are in your path -# -############################################################################## -set -eu -o pipefail - -############################################################################## -# Build Options -############################################################################## - -export MAKEFLAGS="--output-sync=target" - -# shared options -enable_cuda="${enable_cuda:=OFF}" -enable_hip="${enable_hip:=OFF}" -enable_fortran="${enable_fortran:=OFF}" -enable_python="${enable_python:=OFF}" -enable_openmp="${enable_openmp:=OFF}" -enable_mpi="${enable_mpi:=ON}" -enable_find_mpi="${enable_find_mpi:=ON}" -enable_tests="${enable_tests:=OFF}" -enable_verbose="${enable_verbose:=ON}" -build_jobs="${build_jobs:=8}" -build_config="${build_config:=Release}" -build_shared_libs="${build_shared_libs:=ON}" - -# tpl controls -build_zlib="${build_zlib:=true}" -build_hdf5="${build_hdf5:=false}" -build_pyvenv="${build_pyvenv:=false}" -build_caliper="${build_caliper:=false}" -build_silo="${build_silo:=false}" -build_conduit="${build_conduit:=true}" -build_vtkm="${build_vtkm:=true}" -build_camp="${build_camp:=true}" -build_raja="${build_raja:=true}" -build_umpire="${build_umpire:=true}" -build_mfem="${build_mfem:=true}" -build_catalyst="${build_catalyst:=false}" - -# ascent options -build_ascent="${build_ascent:=true}" - -# see if we are building on windows -build_windows="${build_windows:=OFF}" - -# see if we are building on macOS -build_macos="${build_macos:=OFF}" - -if [[ "$enable_cuda" == "ON" ]]; then - echo "*** configuring with CUDA support" - - CC="${CC:=gcc}" - CXX="${CXX:=g++}" - FTN="${FTN:=gfortran}" - - CUDA_ARCH="${CUDA_ARCH:=80}" - CUDA_ARCH_VTKM="${CUDA_ARCH_VTKM:=ampere}" -fi - -if [[ "$enable_hip" == "ON" ]]; then - echo "*** configuring with HIP support" - - CC="${CC:=/opt/rocm/llvm/bin/amdclang}" - CXX="${CXX:=/opt/rocm/llvm/bin/amdclang++}" - # FTN? - - ROCM_ARCH="${ROCM_ARCH:=gfx90a}" - ROCM_PATH="${ROCM_PATH:=/opt/rocm/}" - - # NOTE: this script only builds kokkos when enable_hip=ON - build_kokkos="${build_kokkos:=true}" -else - build_kokkos="${build_kokkos:=false}" -fi - -case "$OSTYPE" in - win*) build_windows="ON";; - msys*) build_windows="ON";; - darwin*) build_macos="ON";; - *) ;; -esac - -if [[ "$build_windows" == "ON" ]]; then - echo "*** configuring for windows" -fi - -if [[ "$build_macos" == "ON" ]]; then - echo "*** configuring for macos" -fi - -################ -# path helpers -################ -function ospath() -{ - if [[ "$build_windows" == "ON" ]]; then - echo `cygpath -m $1` - else - echo $1 - fi -} - -function abs_path() -{ - if [[ "$build_macos" == "ON" ]]; then - echo "$(cd $(dirname "$1");pwd)/$(basename "$1")" - else - echo `realpath $1` - fi -} - -root_dir=$(pwd) -root_dir="${prefix:=${root_dir}}" -root_dir=$(ospath ${root_dir}) -root_dir=$(abs_path ${root_dir}) -script_dir=$(abs_path "$(dirname "${BASH_SOURCE[0]}")") -build_dir=$(ospath build) -source_dir=$(ospath source) - - -# root_dir is where we will build and install -# override with `prefix` env var -if [ ! -d ${root_dir} ]; then - mkdir -p ${root_dir} -fi - -cd ${root_dir} - -# install_dir is where we will install -# override with `prefix` env var -install_dir=/usr/local - -echo "*** prefix: ${root_dir}" -echo "*** build root: ${build_dir}" -echo "*** sources root: ${source_dir}" -echo "*** install root: ${install_dir}" -echo "*** script dir: ${script_dir}" - -################ -# tar options -################ -tar_extra_args="" -if [[ "$build_windows" == "ON" ]]; then - tar_extra_args="--force-local" -fi - -# make sure sources dir exists -if [ ! -d ${source_dir} ]; then - mkdir -p ${source_dir} -fi -################ -# CMake Compiler Settings -################ -cmake_compiler_settings="" - -# capture compilers if they are provided via env vars -if [ ! -z ${CC+x} ]; then - cmake_compiler_settings="-DCMAKE_C_COMPILER:PATH=${CC}" -fi - -if [ ! -z ${CXX+x} ]; then - cmake_compiler_settings="${cmake_compiler_settings} -DCMAKE_CXX_COMPILER:PATH=${CXX}" -fi - -if [ ! -z ${FTN+x} ]; then - cmake_compiler_settings="${cmake_compiler_settings} -DCMAKE_Fortran_COMPILER:PATH=${FTN}" -fi - -################ -# print all build_ZZZ and enable_ZZZ options -################ -echo "*** cmake_compiler_settings: ${cmake_compiler_settings}" -echo "*** build_ascent `enable` settings:" -set | grep enable_ -echo "*** build_ascent `build` settings:" -set | grep build_ - -################ -# Zlib -################ -zlib_version=1.3.1 -zlib_src_dir=$(ospath ${source_dir}/zlib-${zlib_version}) -zlib_build_dir=$(ospath ${build_dir}/zlib-${zlib_version}/) -zlib_install_dir=$(ospath ${install_dir}/zlib-${zlib_version}/) -zlib_tarball=$(ospath ${source_dir}/zlib-${zlib_version}.tar.gz) - -# build only if install doesn't exist -if [ ! -d ${zlib_install_dir} ]; then -if ${build_zlib}; then -if [ ! -d ${zlib_src_dir} ]; then - echo "**** Downloading ${zlib_tarball}" - curl -L https://github.com/madler/zlib/releases/download/v${zlib_version}/zlib-${zlib_version}.tar.gz -o ${zlib_tarball} - tar ${tar_extra_args} -xzf ${zlib_tarball} -C ${source_dir} -fi - -echo "**** Configuring Zlib ${zlib_version}" -cmake -S ${zlib_src_dir} -B ${zlib_build_dir} ${cmake_compiler_settings} \ - -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose} \ - -DCMAKE_BUILD_TYPE=${build_config} \ - -DCMAKE_INSTALL_PREFIX=${zlib_install_dir} - -echo "**** Building Zlib ${zlib_version}" -cmake --build ${zlib_build_dir} --config ${build_config} -j${build_jobs} -echo "**** Installing Zlib ${zlib_version}" -cmake --install ${zlib_build_dir} --config ${build_config} - -fi -else - echo "**** Skipping Zlib build, install found at: ${zlib_install_dir}" -fi # build_zlib - - -################ -# HDF5 -################ -# release 1-2 GAH! -hdf5_version=1.14.1-2 -hdf5_middle_version=1.14.1 -hdf5_short_version=1.14 -hdf5_src_dir=$(ospath ${source_dir}/hdf5-${hdf5_version}) -hdf5_build_dir=$(ospath ${build_dir}/hdf5-${hdf5_version}/) -hdf5_install_dir=/usr/local/hdf5/serial -hdf5_tarball=$(ospath ${source_dir}/hdf5-${hdf5_version}.tar.gz) - -# build only if install doesn't exist -if [ ! -d ${hdf5_install_dir} ]; then -if ${build_hdf5}; then -if [ ! -d ${hdf5_src_dir} ]; then - echo "**** Downloading ${hdf5_tarball}" - curl -L https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-${hdf5_short_version}/hdf5-${hdf5_middle_version}/src/hdf5-${hdf5_version}.tar.gz -o ${hdf5_tarball} - tar ${tar_extra_args} -xzf ${hdf5_tarball} -C ${source_dir} -fi - -################# -# -# hdf5 1.14.x CMake recipe for using zlib -# -# -DHDF5_ENABLE_Z_LIB_SUPPORT=ON -# Add zlib install dir to CMAKE_PREFIX_PATH -# -################# - -echo "**** Configuring HDF5 ${hdf5_version}" -cmake -S ${hdf5_src_dir} -B ${hdf5_build_dir} ${cmake_compiler_settings} \ - -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose} \ - -DCMAKE_BUILD_TYPE=${build_config} \ - -DHDF5_ENABLE_Z_LIB_SUPPORT=ON \ - -DCMAKE_PREFIX_PATH=${zlib_install_dir} \ - -DCMAKE_INSTALL_PREFIX=${hdf5_install_dir} - -echo "**** Building HDF5 ${hdf5_version}" -cmake --build ${hdf5_build_dir} --config ${build_config} -j${build_jobs} -echo "**** Installing HDF5 ${hdf5_version}" -cmake --install ${hdf5_build_dir} --config ${build_config} - -fi -else - echo "**** Skipping HDF5 build, install found at: ${hdf5_install_dir}" -fi # build_hdf5 - -################ -# Silo -################ -silo_version=4.11.1 -silo_src_dir=$(ospath ${source_dir}/Silo-${silo_version}) -silo_build_dir=$(ospath ${build_dir}/silo-${silo_version}/) -silo_install_dir=$(ospath ${install_dir}/silo-${silo_version}/) -silo_tarball=$(ospath ${source_dir}/silo-${silo_version}.tar.gz) - -# build only if install doesn't exist -if [ ! -d ${silo_install_dir} ]; then -if ${build_silo}; then -if [ ! -d ${silo_src_dir} ]; then - echo "**** Downloading ${silo_tarball}" - curl -L https://github.com/LLNL/Silo/archive/refs/tags/${silo_version}.tar.gz -o ${silo_tarball} - # untar and avoid symlinks (which windows despises) - tar ${tar_extra_args} -xzf ${silo_tarball} -C ${source_dir} \ - --exclude="Silo-${silo_version}/config-site/*" \ - --exclude="Silo-${silo_version}/README.md" - # apply silo patches - cd ${silo_src_dir} - patch -p1 < ${script_dir}/2024_07_25_silo_4_11_cmake_fix.patch - - # windows specifc patch - if [[ "$build_windows" == "ON" ]]; then - patch -p1 < ${script_dir}/2024_07_29_silo-pr389-win32-bugfix.patch - fi - - cd ${root_dir} -fi - - -echo "**** Configuring Silo ${silo_version}" -cmake -S ${silo_src_dir} -B ${silo_build_dir} ${cmake_compiler_settings} \ - -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose} \ - -DCMAKE_BUILD_TYPE=${build_config} \ - -DCMAKE_INSTALL_PREFIX=${silo_install_dir} \ - -DSILO_ENABLE_SHARED=${build_shared_libs} \ - -DCMAKE_C_FLAGS=-Doff64_t=off_t \ - -DSILO_ENABLE_HDF5=ON \ - -DSILO_ENABLE_TESTS=OFF \ - -DSILO_BUILD_FOR_BSD_LICENSE=ON \ - -DSILO_ENABLE_FORTRAN=OFF \ - -DSILO_HDF5_DIR=${hdf5_install_dir}/cmake/ \ - -DCMAKE_PREFIX_PATH=${zlib_install_dir} - - -echo "**** Building Silo ${silo_version}" -cmake --build ${silo_build_dir} --config ${build_config} -j${build_jobs} -echo "**** Installing Silo ${silo_version}" -cmake --install ${silo_build_dir} --config ${build_config} - -fi -else - echo "**** Skipping Silo build, install found at: ${silo_install_dir}" -fi # build_silo - -############################ -# Python Virtual Env -############################ -python_exe="${python_exe:=python3}" -venv_install_dir=$(ospath ${install_dir}/python-venv/) -venv_python_exe=$(ospath ${venv_install_dir}/bin/python3) -venv_sphinx_exe=$(ospath ${venv_install_dir}/bin/sphinx-build) - -# build only if install doesn't exist -if [ ! -d ${venv_install_dir} ]; then -if ${build_pyvenv}; then - echo "**** Creating Python Virtual Env" - cd ${install_dir} && ${python_exe} -m venv python-venv - ${venv_python_exe} -m pip install --upgrade pip - ${venv_python_exe} -m pip install numpy sphinx sphinx_rtd_theme - if [[ "$enable_mpi" == "ON" ]]; then - ${venv_python_exe} -m pip install mpi4py - fi -fi -else - echo "**** Skipping Python venv build, install found at: ${venv_install_dir}" -fi # build_pyvenv - -if ${build_pyvenv}; then - venv_python_ver=`${venv_python_exe} -c "import sys;print('{0}.{1}'.format(sys.version_info.major, sys.version_info.minor))"` - venv_python_site_pkgs_dir=${venv_install_dir}/lib/python${venv_python_ver}/site-packages -fi - -################ -# Caliper -################ -caliper_version=2.11.0 -caliper_src_dir=$(ospath ${source_dir}/Caliper-${caliper_version}) -caliper_build_dir=$(ospath ${build_dir}/caliper-${caliper_version}/) -caliper_install_dir=$(ospath ${install_dir}/caliper-${caliper_version}/) -caliper_tarball=$(ospath ${source_dir}/caliper-${caliper_version}-src-with-blt.tar.gz) - -# build only if install doesn't exist -if [ ! -d ${caliper_install_dir} ]; then -if ${build_caliper}; then -if [ ! -d ${caliper_src_dir} ]; then - echo "**** Downloading ${caliper_tarball}" - curl -L https://github.com/LLNL/Caliper/archive/refs/tags/v${caliper_version}.tar.gz -o ${caliper_tarball} - tar ${tar_extra_args} -xzf ${caliper_tarball} -C ${source_dir} - # windows specifc patch - cd ${caliper_src_dir} - if [[ "$build_windows" == "ON" ]]; then - patch -p1 < ${script_dir}/2024_08_01_caliper-win-smaller-opts.patch - fi - cd ${root_dir} -fi - -# -# Note: Caliper has optional Umpire support, -# if we want to support in the future, we will need to build umpire first -# - -# -DWITH_CUPTI=ON -DWITH_NVTX=ON -DCUDA_TOOLKIT_ROOT_DIR={path} -DCUPTI_PREFIX={path} -# -DWITH_ROCTRACER=ON -DWITH_ROCTX=ON -DROCM_PREFIX={path} - -caliper_windows_cmake_flags="-DCMAKE_CXX_STANDARD=17 -DCMAKE_WINDOWS_EXPORT_ALL_SYMBOLS=ON -DWITH_TOOLS=OFF" - -caliper_extra_cmake_args="" -if [[ "$build_windows" == "ON" ]]; then - caliper_extra_cmake_args="${caliper_windows_cmake_flags}" -fi - -if [[ "$enable_hip" == "ON" ]]; then - caliper_extra_cmake_args="${caliper_extra_cmake_args} -DWITH_ROCTRACER=ON -DWITH_ROCTX=ON -DROCM_PREFIX=${ROCM_PATH}" -fi - -echo "**** Configuring Caliper ${caliper_version}" -cmake -S ${caliper_src_dir} -B ${caliper_build_dir} ${cmake_compiler_settings} \ - -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose} \ - -DCMAKE_BUILD_TYPE=${build_config} \ - -DBUILD_SHARED_LIBS=${build_shared_libs} \ - -DCMAKE_INSTALL_PREFIX=${caliper_install_dir} \ - -DWITH_MPI=${enable_mpi} ${caliper_extra_cmake_args} - -echo "**** Building Caliper ${caliper_version}" -cmake --build ${caliper_build_dir} --config ${build_config} -j${build_jobs} -echo "**** Installing Caliper ${caliper_version}" -cmake --install ${caliper_build_dir} --config ${build_config} - -fi -else - echo "**** Skipping Caliper build, install found at: ${caliper_install_dir}" -fi # build_caliper - - -################ -# Conduit -################ -conduit_version=v0.9.2 -conduit_src_dir=$(ospath ${source_dir}/conduit-${conduit_version}) -conduit_build_dir=$(ospath ${build_dir}/conduit-${conduit_version}/) -conduit_install_dir=$(ospath ${install_dir}/conduit-${conduit_version}/) -conduit_tarball=$(ospath ${source_dir}/conduit-${conduit_version}-src-with-blt.tar.gz) - -# build only if install doesn't exist -if [ ! -d ${conduit_install_dir} ]; then -if ${build_conduit}; then -if [ ! -d ${conduit_src_dir} ]; then - echo "**** Downloading ${conduit_tarball}" - curl -L https://github.com/LLNL/conduit/releases/download/${conduit_version}/conduit-${conduit_version}-src-with-blt.tar.gz -o ${conduit_tarball} - # untar and avoid symlinks (which windows despises) - tar ${tar_extra_args} -xzf ${conduit_tarball} -C ${source_dir} \ - --exclude="conduit-${conduit_version}/src/tests/relay/data/silo/*" - # caliper vs adiak patch - if ${build_caliper}; then - cd ${conduit_src_dir} - echo ${conduit_src_dir} - patch -p 1 < ${script_dir}/2024_08_01_conduit-pr1311-detect-if-caliper-needs-adiak.patch - cd ${root_dir} - fi -fi - -# -# extrat cmake args -# -conduit_extra_cmake_opts=-DENABLE_PYTHON=${enable_python} -if ${build_pyvenv}; then - conduit_extra_cmake_opts="${conduit_extra_cmake_opts} -DPYTHON_EXECUTABLE=${venv_python_exe}" - conduit_extra_cmake_opts="${conduit_extra_cmake_opts} -DSPHINX_EXECUTABLE=${venv_sphinx_exe}" - conduit_extra_cmake_opts="${conduit_extra_cmake_opts} -DPYTHON_MODULE_INSTALL_PREFIX=${venv_python_site_pkgs_dir}" -fi - -if ${build_caliper}; then - conduit_extra_cmake_opts="${conduit_extra_cmake_opts} -DCALIPER_DIR=${caliper_install_dir}" -fi - -if ${build_silo}; then - conduit_extra_cmake_opts="${conduit_extra_cmake_opts} -DSILO_DIR=${silo_install_dir}" -fi - -echo "**** Configuring Conduit ${conduit_version}" -cmake -S ${conduit_src_dir}/src -B ${conduit_build_dir} ${cmake_compiler_settings} \ - -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose} \ - -DCMAKE_BUILD_TYPE=${build_config} \ - -DBUILD_SHARED_LIBS=${build_shared_libs} \ - -DCMAKE_INSTALL_PREFIX=${conduit_install_dir} \ - -DENABLE_FORTRAN=${enable_fortran} \ - -DENABLE_MPI=${enable_mpi} \ - -DENABLE_FIND_MPI=${enable_find_mpi} \ - ${conduit_extra_cmake_opts} \ - -DENABLE_TESTS=OFF \ - -DHDF5_DIR=${hdf5_install_dir} \ - -DZLIB_DIR=${zlib_install_dir} - - -echo "**** Building Conduit ${conduit_version}" -cmake --build ${conduit_build_dir} --config ${build_config} -j${build_jobs} -echo "**** Installing Conduit ${conduit_version}" -cmake --install ${conduit_build_dir} --config ${build_config} - -fi -else - echo "**** Skipping Conduit build, install found at: ${conduit_install_dir}" -fi # build_conduit - -######################### -# Kokkos (only for hip) -######################### -kokkos_version=3.7.02 -kokkos_src_dir=$(ospath ${source_dir}/kokkos-${kokkos_version}) -kokkos_build_dir=$(ospath ${build_dir}/kokkos-${kokkos_version}) -kokkos_install_dir=$(ospath ${install_dir}/kokkos-${kokkos_version}/) -kokkos_tarball=$(ospath ${source_dir}/kokkos-${kokkos_version}.tar.gz) - -if [[ "$enable_hip" == "ON" ]]; then -# build only if install doesn't exist -if [ ! -d ${kokkos_install_dir} ]; then -if ${build_kokkos}; then -if [ ! -d ${kokkos_src_dir} ]; then - echo "**** Downloading ${kokkos_tarball}" - curl -L https://github.com/kokkos/kokkos/archive/refs/tags/${kokkos_version}.tar.gz -o ${kokkos_tarball} - tar ${tar_extra_args} -xzf ${kokkos_tarball} -C ${source_dir} -fi - -# TODO: DKokkos_ARCH_VEGA90A needs to be controlled / mapped? - -echo "**** Configuring Kokkos ${kokkos_version}" -cmake -S ${kokkos_src_dir} -B ${kokkos_build_dir} ${cmake_compiler_settings} \ - -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose}\ - -DCMAKE_BUILD_TYPE=${build_config} \ - -DBUILD_SHARED_LIBS=${build_shared_libs} \ - -DKokkos_ARCH_VEGA90A=ON \ - -DCMAKE_CXX_COMPILER=${ROCM_PATH}/bin/hipcc \ - -DKokkos_ENABLE_HIP=ON \ - -DKokkos_ENABLE_SERIAL=ON \ - -DKokkos_ENABLE_HIP_RELOCATABLE_DEVICE_CODE=OFF \ - -DCMAKE_INSTALL_PREFIX=${kokkos_install_dir} \ - -DCMAKE_CXX_FLAGS="--amdgpu-target=${ROCM_ARCH}" \ - -DBUILD_TESTING=OFF \ - -DCMAKE_INSTALL_PREFIX=${kokkos_install_dir} - -echo "**** Building Kokkos ${kokkos_version}" -cmake --build ${kokkos_build_dir} --config ${build_config} -j${build_jobs} -echo "**** Installing VTK-m ${kokkos_version}" -cmake --install ${kokkos_build_dir} --config ${build_config} - -fi -else - echo "**** Skipping Kokkos build, install found at: ${kokkos_install_dir}" -fi # build_kokkos - -fi # if enable_hip - -################ -# VTK-m -################ -vtkm_version=v2.1.0 -vtkm_src_dir=$(ospath ${source_dir}/vtk-m-${vtkm_version}) -vtkm_build_dir=$(ospath ${build_dir}/vtk-m-${vtkm_version}) -vtkm_install_dir=$(ospath ${install_dir}/vtk-m-${vtkm_version}/) -vtkm_tarball=$(ospath ${source_dir}/vtk-m-${vtkm_version}.tar.gz) - -# build only if install doesn't exist -if [ ! -d ${vtkm_install_dir} ]; then -if ${build_vtkm}; then -if [ ! -d ${vtkm_src_dir} ]; then - echo "**** Downloading ${vtkm_tarball}" - curl -L https://gitlab.kitware.com/vtk/vtk-m/-/archive/${vtkm_version}/vtk-m-${vtkm_version}.tar.gz -o ${vtkm_tarball} - tar ${tar_extra_args} -xzf ${vtkm_tarball} -C ${source_dir} - - # apply vtk-m patch - cd ${vtkm_src_dir} - patch -p1 < ${script_dir}/2023_12_06_vtkm-mr3160-rocthrust-fix.patch - patch -p1 < ${script_dir}/2024_05_03_vtkm-mr3215-ext-geom-fix.patch - patch -p1 < ${script_dir}/2024_07_02_vtkm-mr3246-raysubset_bugfix.patch - cd ${root_dir} -fi - - -vtkm_extra_cmake_args="" -if [[ "$enable_cuda" == "ON" ]]; then - vtkm_extra_cmake_args="-DVTKm_ENABLE_CUDA=ON" - vtkm_extra_cmake_args="${vtkm_extra_cmake_args} -DCMAKE_CUDA_HOST_COMPILER=${CXX}" - vtkm_extra_cmake_args="${vtkm_extra_cmake_args} -DCMAKE_CUDA_ARCHITECTURES=${CUDA_ARCH}" -fi - -if [[ "$enable_hip" == "ON" ]]; then - vtkm_extra_cmake_args="-DVTKm_ENABLE_KOKKOS=ON" - vtkm_extra_cmake_args="${vtkm_extra_cmake_args} -DCMAKE_PREFIX_PATH=${kokkos_install_dir}" - vtkm_extra_cmake_args="${vtkm_extra_cmake_args} -DCMAKE_HIP_ARCHITECTURES=${ROCM_ARCH}" - vtkm_extra_cmake_args="${vtkm_extra_cmake_args} -DVTKm_ENABLE_KOKKOS_THRUST=OFF" -fi - -echo "**** Configuring VTK-m ${vtkm_version}" -cmake -S ${vtkm_src_dir} -B ${vtkm_build_dir} ${cmake_compiler_settings} \ - -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose}\ - -DCMAKE_BUILD_TYPE=${build_config} \ - -DBUILD_SHARED_LIBS=${build_shared_libs} \ - -DVTKm_NO_DEPRECATED_VIRTUAL=ON \ - -DVTKm_USE_64BIT_IDS=OFF \ - -DVTKm_USE_DOUBLE_PRECISION=ON \ - -DVTKm_USE_DEFAULT_TYPES_FOR_ASCENT=ON \ - -DVTKm_ENABLE_MPI=${enable_mpi} \ - -DVTKm_ENABLE_OPENMP=${enable_openmp}\ - -DVTKm_ENABLE_RENDERING=ON \ - -DVTKm_ENABLE_TESTING=OFF\ - -DBUILD_TESTING=OFF \ - -DVTKm_ENABLE_BENCHMARKS=OFF ${vtkm_extra_cmake_args} \ - -DCMAKE_INSTALL_PREFIX=${vtkm_install_dir} - -echo "**** Building VTK-m ${vtkm_version}" -cmake --build ${vtkm_build_dir} --config ${build_config} -j${build_jobs} -echo "**** Installing VTK-m ${vtkm_version}" -cmake --install ${vtkm_build_dir} --config ${build_config} - -fi -else - echo "**** Skipping VTK-m build, install found at: ${vtkm_install_dir}" -fi # build_vtkm - - -################ -# Camp -################ -camp_version=v2024.02.1 -camp_src_dir=$(ospath ${source_dir}/camp-${camp_version}) -camp_build_dir=$(ospath ${build_dir}/camp-${camp_version}) -camp_install_dir=$(ospath ${install_dir}/camp-${camp_version}/) -camp_tarball=$(ospath ${source_dir}/camp-${camp_version}.tar.gz) - - -# build only if install doesn't exist -if [ ! -d ${camp_install_dir} ]; then -if ${build_camp}; then -if [ ! -d ${camp_src_dir} ]; then - echo "**** Downloading ${camp_tarball}" - curl -L https://github.com/LLNL/camp/releases/download/${camp_version}/camp-${camp_version}.tar.gz -o ${camp_tarball} - tar ${tar_extra_args} -xzf ${camp_tarball} -C ${source_dir} -fi - -camp_extra_cmake_args="" -if [[ "$enable_cuda" == "ON" ]]; then - camp_extra_cmake_args="-DENABLE_CUDA=ON" - camp_extra_cmake_args="${camp_extra_cmake_args} -DCMAKE_CUDA_ARCHITECTURES=${CUDA_ARCH}" -fi - -if [[ "$enable_hip" == "ON" ]]; then - camp_extra_cmake_args="-DENABLE_HIP=ON" - camp_extra_cmake_args="${camp_extra_cmake_args} -DCMAKE_HIP_ARCHITECTURES=${ROCM_ARCH}" - camp_extra_cmake_args="${camp_extra_cmake_args} -DROCM_PATH=${ROCM_PATH}" -fi - -echo "**** Configuring Camp ${camp_version}" -cmake -S ${camp_src_dir} -B ${camp_build_dir} ${cmake_compiler_settings} \ - -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose}\ - -DCMAKE_BUILD_TYPE=${build_config} \ - -DBUILD_SHARED_LIBS=${build_shared_libs} \ - -DENABLE_TESTS=OFF \ - -DENABLE_EXAMPLES=OFF ${camp_extra_cmake_args} \ - -DCMAKE_INSTALL_PREFIX=${camp_install_dir} - -echo "**** Building Camp ${camp_version}" -cmake --build ${camp_build_dir} --config ${build_config} -j${build_jobs} -echo "**** Installing Camp ${camp_version}" -cmake --install ${camp_build_dir} --config ${build_config} - -fi -else - echo "**** Skipping Camp build, install found at: ${camp_install_dir}" -fi # build_camp - - -################ -# RAJA -################ -raja_version=v2024.02.1 -raja_src_dir=$(ospath ${source_dir}/RAJA-${raja_version}) -raja_build_dir=$(ospath ${build_dir}/raja-${raja_version}) -raja_install_dir=$(ospath ${install_dir}/raja-${raja_version}/) -raja_tarball=$(ospath ${source_dir}/RAJA-${raja_version}.tar.gz) -raja_enable_vectorization="${raja_enable_vectorization:=ON}" - -# build only if install doesn't exist -if [ ! -d ${raja_install_dir} ]; then -if ${build_raja}; then -if [ ! -d ${raja_src_dir} ]; then - echo "**** Downloading ${raja_tarball}" - curl -L https://github.com/LLNL/RAJA/releases/download/${raja_version}/RAJA-${raja_version}.tar.gz -o ${raja_tarball} - tar ${tar_extra_args} -xzf ${raja_tarball} -C ${source_dir} -fi - -raja_extra_cmake_args="" -if [[ "$enable_cuda" == "ON" ]]; then - raja_extra_cmake_args="-DENABLE_CUDA=ON" - raja_extra_cmake_args="${raja_extra_cmake_args} -DCMAKE_CUDA_ARCHITECTURES=${CUDA_ARCH}" -fi - -if [[ "$enable_hip" == "ON" ]]; then - raja_extra_cmake_args="-DENABLE_HIP=ON" - raja_extra_cmake_args="${raja_extra_cmake_args} -DCMAKE_HIP_ARCHITECTURES=${ROCM_ARCH}" - raja_extra_cmake_args="${raja_extra_cmake_args} -DROCM_PATH=${ROCM_PATH}" -fi - -echo "**** Configuring RAJA ${raja_version}" -cmake -S ${raja_src_dir} -B ${raja_build_dir} ${cmake_compiler_settings} \ - -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose}\ - -DCMAKE_BUILD_TYPE=${build_config} \ - -DBUILD_SHARED_LIBS=${build_shared_libs} \ - -Dcamp_DIR=${camp_install_dir} \ - -DENABLE_OPENMP=${enable_openmp} \ - -DENABLE_TESTS=OFF \ - -DRAJA_ENABLE_TESTS=OFF \ - -DENABLE_EXAMPLES=OFF \ - -DENABLE_EXERCISES=OFF ${raja_extra_cmake_args} \ - -DCMAKE_INSTALL_PREFIX=${raja_install_dir} \ - -DRAJA_ENABLE_VECTORIZATION=${raja_enable_vectorization} - -echo "**** Building RAJA ${raja_version}" -cmake --build ${raja_build_dir} --config ${build_config} -j${build_jobs} -echo "**** Installing RAJA ${raja_version}" -cmake --install ${raja_build_dir} --config ${build_config} - -fi -else - echo "**** Skipping RAJA build, install found at: ${raja_install_dir}" -fi # build_raja - -################ -# Umpire -################ -umpire_version=2024.02.1 -umpire_src_dir=$(ospath ${source_dir}/umpire-${umpire_version}) -umpire_build_dir=$(ospath ${build_dir}/umpire-${umpire_version}) -umpire_install_dir=$(ospath ${install_dir}/umpire-${umpire_version}/) -umpire_tarball=$(ospath ${source_dir}/umpire-${umpire_version}.tar.gz) -umpire_windows_cmake_flags="-DBLT_CXX_STD=c++17 -DCMAKE_CXX_STANDARD=17 -DUMPIRE_ENABLE_FILESYSTEM=On -DCMAKE_WINDOWS_EXPORT_ALL_SYMBOLS=On" - -umpire_extra_cmake_args="" -if [[ "$build_windows" == "ON" ]]; then - umpire_extra_cmake_args="${umpire_windows_cmake_flags}" -fi - -if [[ "$enable_cuda" == "ON" ]]; then - umpire_extra_cmake_args="${umpire_extra_cmake_args} -DENABLE_CUDA=ON" - umpire_extra_cmake_args="${umpire_extra_cmake_args} -DCMAKE_CUDA_ARCHITECTURES=${CUDA_ARCH}" -fi - -if [[ "$enable_hip" == "ON" ]]; then - umpire_extra_cmake_args="${umpire_extra_cmake_args} -DENABLE_HIP=ON" - umpire_extra_cmake_args="${umpire_extra_cmake_args} -DCMAKE_HIP_ARCHITECTURES=${ROCM_ARCH}" - umpire_extra_cmake_args="${umpire_extra_cmake_args} -DROCM_PATH=${ROCM_PATH}" -fi - -# build only if install doesn't exist -if [ ! -d ${umpire_install_dir} ]; then -if ${build_umpire}; then -if [ ! -d ${umpire_src_dir} ]; then - echo "**** Downloading ${umpire_tarball}" - curl -L https://github.com/LLNL/Umpire/releases/download/v${umpire_version}/umpire-${umpire_version}.tar.gz -o ${umpire_tarball} - tar ${tar_extra_args} -xzf ${umpire_tarball} -C ${source_dir} -fi - -echo "**** Configuring Umpire ${umpire_version}" -cmake -S ${umpire_src_dir} -B ${umpire_build_dir} ${cmake_compiler_settings} \ - -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose} \ - -DCMAKE_BUILD_TYPE=${build_config} \ - -DBUILD_SHARED_LIBS=${build_shared_libs} \ - -Dcamp_DIR=${camp_install_dir} \ - -DENABLE_OPENMP=${enable_openmp} \ - -DENABLE_TESTS=OFF \ - -DUMPIRE_ENABLE_TOOLS=Off \ - -DUMPIRE_ENABLE_BENCHMARKS=OFF ${umpire_extra_cmake_args} \ - -DCMAKE_INSTALL_PREFIX=${umpire_install_dir} - -echo "**** Building Umpire ${umpire_version}" -cmake --build ${umpire_build_dir} --config ${build_config} -j${build_jobs} -echo "**** Installing Umpire ${umpire_version}" -cmake --install ${umpire_build_dir} --config ${build_config} - -fi -else - echo "**** Skipping Umpire build, install found at: ${umpire_install_dir}" -fi # build_umpire - -################ -# MFEM -################ -mfem_version=4.6 -mfem_src_dir=$(ospath ${source_dir}/mfem-${mfem_version}) -mfem_build_dir=$(ospath ${build_dir}/mfem-${mfem_version}) -mfem_install_dir=$(ospath ${install_dir}/mfem-${mfem_version}/) -mfem_tarball=$(ospath ${source_dir}/mfem-${mfem_version}.tar.gz) -mfem_windows_cmake_flags="-DCMAKE_WINDOWS_EXPORT_ALL_SYMBOLS=ON" - -mfem_extra_cmake_args="" -if [[ "$build_windows" == "ON" ]]; then - mfem_extra_cmake_args="${mfem_windows_cmake_flags}" -fi - - -# build only if install doesn't exist -if [ ! -d ${mfem_install_dir} ]; then -if ${build_mfem}; then -if [ ! -d ${mfem_src_dir} ]; then - echo "**** Downloading ${mfem_tarball}" - curl -L https://github.com/mfem/mfem/archive/refs/tags/v${mfem_version}.tar.gz -o ${mfem_tarball} - tar ${tar_extra_args} -xzf ${mfem_tarball} -C ${source_dir} -fi - -# -# Note: MFEM MPI requires Hypre and Metis -# -DMFEM_USE_MPI=${enable_mpi} \ - -echo "**** Configuring MFEM ${mfem_version}" -cmake -S ${mfem_src_dir} -B ${mfem_build_dir} ${cmake_compiler_settings} \ - -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose}\ - -DCMAKE_BUILD_TYPE=${build_config} \ - -DBUILD_SHARED_LIBS=${build_shared_libs} \ - -DMFEM_USE_CONDUIT=ON ${mfem_extra_cmake_args} \ - -DCMAKE_PREFIX_PATH="${conduit_install_dir}" \ - -DMFEM_ENABLE_TESTING=OFF \ - -DMFEM_ENABLE_EXAMPLES=OFF \ - -DCMAKE_INSTALL_PREFIX=${mfem_install_dir} - -echo "**** Building MFEM ${mfem_version}" -cmake --build ${mfem_build_dir} --config ${build_config} -j${build_jobs} -echo "**** Installing MFEM ${mfem_version}" -cmake --install ${mfem_build_dir} --config ${build_config} - -fi -else - echo "**** Skipping MFEM build, install found at: ${mfem_install_dir}" -fi # build_mfem - -################ -# Catalyst -################ -catalyst_version=2.0.0-rc4 -catalyst_src_dir=$(ospath ${source_dir}/catalyst-v${catalyst_version}) -catalyst_build_dir=$(ospath ${build_dir}/catalyst-v${catalyst_version}) -catalyst_install_dir=$(ospath ${install_dir}/catalyst-v${catalyst_version}/) -catalyst_cmake_dir=${catalyst_install_dir}lib64/cmake/catalyst-2.0/ -catalyst_tarball=$(ospath ${source_dir}/catalyst-v${catalyst_version}.tar.gz) - -# build only if install doesn't exist -if [ ! -d ${catalyst_install_dir} ]; then -if ${build_catalyst}; then -if [ ! -d ${catalyst_src_dir} ]; then - echo "**** Downloading ${catalyst_tarball}" - curl -L https://gitlab.kitware.com/paraview/catalyst/-/archive/v${catalyst_version}/catalyst-v${catalyst_version}.tar.gz -o ${catalyst_tarball} - tar ${tar_extra_args} -xzf ${catalyst_tarball} -C ${source_dir} -fi - -echo "**** Configuring Catalyst ${catalyst_version}" -cmake -S ${catalyst_src_dir} -B ${catalyst_build_dir} ${cmake_compiler_settings} \ - -DCMAKE_VERBOSE_MAKEFILE:BOOL=${enable_verbose}\ - -DCMAKE_BUILD_TYPE=${build_config} \ - -DCATALYST_BUILD_TESTING=OFF \ - -DCATALYST_USE_MPI=${enable_mpi} \ - -DCMAKE_INSTALL_PREFIX=${catalyst_install_dir} \ - -echo "**** Building Catalyst ${catalyst_version}" -cmake --build ${catalyst_build_dir} --config ${build_config} -j${build_jobs} -echo "**** Installing Catalyst ${catalyst_version}" -cmake --install ${catalyst_build_dir} --config ${build_config} - -fi -else - echo "**** Skipping Catalyst build, install found at: ${catalyst_install_dir}" -fi # build_catalyst - -################ -# Ascent -################ -# if we are in an ascent checkout, use existing source -ascent_checkout_dir=$(ospath ${script_dir}/../../src) -ascent_checkout_dir=$(abs_path ${ascent_checkout_dir}) -echo ${ascent_checkout_dir} -if [ -d ${ascent_checkout_dir} ]; then - ascent_version=checkout - ascent_src_dir=$(abs_path ${ascent_checkout_dir}) - echo "**** Using existing Ascent source repo checkout: ${ascent_src_dir}" -else - ascent_version=develop - ascent_src_dir=$(ospath ${source_dir}/ascent/src) -fi - -# otherwise use ascent develop -ascent_build_dir=$(ospath ${build_dir}/ascent-${ascent_version}/) -ascent_install_dir=$(ospath ${install_dir}//ascent-${ascent_version}/) - -echo "**** Creating Ascent host-config (ascent-config.cmake)" -# -echo '# host-config file generated by build_ascent.sh' > ${root_dir}/ascent-config.cmake - -# capture compilers if they are provided via env vars -if [ ! -z ${CC+x} ]; then - echo 'set(CMAKE_C_COMPILER ' ${CC} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake -fi - -if [ ! -z ${CXX+x} ]; then - echo 'set(CMAKE_CXX_COMPILER ' ${CXX} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake -fi - -if [ ! -z ${FTN+x} ]; then - echo 'set(CMAKE_Fortran_COMPILER ' ${FTN} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake -fi - -# capture compiler flags if they are provided via env vars -if [ ! -z ${CFLAGS+x} ]; then - echo 'set(CMAKE_C_FLAGS "' ${CFLAGS} '" CACHE PATH "")' >> ${root_dir}/ascent-config.cmake -fi - -if [ ! -z ${CXXFLAGS+x} ]; then - echo 'set(CMAKE_CXX_FLAGS "' ${CXXFLAGS} '" CACHE PATH "")' >> ${root_dir}/ascent-config.cmake -fi - -if [ ! -z ${FFLAGS+x} ]; then - echo 'set(CMAKE_F_FLAGS "' ${FFLAGS} '" CACHE PATH "")' >> ${root_dir}/ascent-config.cmake -fi - -echo 'set(CMAKE_VERBOSE_MAKEFILE ' ${enable_verbose} ' CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake -echo 'set(CMAKE_BUILD_TYPE ' ${build_config} ' CACHE STRING "")' >> ${root_dir}/ascent-config.cmake -echo 'set(BUILD_SHARED_LIBS ' ${build_shared_libs} ' CACHE STRING "")' >> ${root_dir}/ascent-config.cmake -echo 'set(CMAKE_INSTALL_PREFIX ' ${ascent_install_dir} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake -echo 'set(ENABLE_TESTS ' ${enable_tests} ' CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake -echo 'set(ENABLE_MPI ' ${enable_mpi} ' CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake -echo 'set(ENABLE_FIND_MPI ' ${enable_find_mpi} ' CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake -echo 'set(ENABLE_FORTRAN ' ${enable_fortran} ' CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake -echo 'set(ENABLE_PYTHON ' ${enable_python} ' CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake -if ${build_pyvenv}; then -echo 'set(PYTHON_EXECUTABLE ' ${venv_python_exe} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake -echo 'set(PYTHON_MODULE_INSTALL_PREFIX ' ${venv_python_site_pkgs_dir} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake -echo 'set(ENABLE_DOCS ON CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake -echo 'set(SPHINX_EXECUTABLE ' ${venv_sphinx_exe} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake -fi -if ${build_caliper}; then - echo 'set(CALIPER_DIR ' ${caliper_install_dir} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake -fi -echo 'set(BLT_CXX_STD c++14 CACHE STRING "")' >> ${root_dir}/ascent-config.cmake -echo 'set(CONDUIT_DIR ' ${conduit_install_dir} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake -echo 'set(VTKM_DIR ' ${vtkm_install_dir} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake -echo 'set(CAMP_DIR ' ${camp_install_dir} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake -echo 'set(RAJA_DIR ' ${raja_install_dir} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake -echo 'set(UMPIRE_DIR ' ${umpire_install_dir} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake -echo 'set(MFEM_DIR ' ${mfem_install_dir} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake -echo 'set(ENABLE_VTKH ON CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake -echo 'set(ENABLE_APCOMP ON CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake -echo 'set(ENABLE_DRAY ON CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake - - -if ${build_catalyst}; then - echo 'set(CATALYST_DIR ' ${catalyst_cmake_dir} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake -fi - -if [[ "$enable_cuda" == "ON" ]]; then - echo 'set(ENABLE_CUDA ON CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake - echo 'set(CMAKE_CUDA_ARCHITECTURES ' ${CUDA_ARCH} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake -fi - -if [[ "$enable_hip" == "ON" ]]; then - echo 'set(ENABLE_HIP ON CACHE BOOL "")' >> ${root_dir}/ascent-config.cmake - echo 'set(CMAKE_HIP_ARCHITECTURES ' ${ROCM_ARCH} ' CACHE STRING "")' >> ${root_dir}/ascent-config.cmake - echo 'set(ROCM_PATH ' ${ROCM_PATH} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake - echo 'set(KOKKOS_DIR ' ${kokkos_install_dir} ' CACHE PATH "")' >> ${root_dir}/ascent-config.cmake -fi - -# build only if install doesn't exist -if [ ! -d ${ascent_install_dir} ]; then -if ${build_ascent}; then -if [ ! -d ${ascent_src_dir} ]; then - echo "**** Cloning Ascent" - git clone --recursive https://github.com/Alpine-DAV/ascent.git $(ospath ${source_dir}/ascent) -fi - -echo "**** Configuring Ascent" -cmake -S ${ascent_src_dir} -B ${ascent_build_dir} -C ${root_dir}/ascent-config.cmake - -echo "**** Building Ascent" -cmake --build ${ascent_build_dir} --config ${build_config} -j${build_jobs} - -echo "**** Installing Ascent" -cmake --install ${ascent_build_dir} --config ${build_config} - -if ${build_catalyst}; then - mv ${ascent_install_dir}/lib/libcatalyst-ascent.so ${catalyst_install_dir}lib64/catalyst/libcatalyst-ascent.so -fi - -fi -else - echo "**** Skipping Ascent build, install found at: ${ascent_install_dir}" -fi # build_ascent From 3ef7b5cf275056e262145a4fda644a926e78326f Mon Sep 17 00:00:00 2001 From: Philipp Grete Date: Fri, 30 Aug 2024 19:50:45 +0000 Subject: [PATCH 25/29] Use Cuda12.1 container and drop to local user --- scripts/docker/Dockerfile.nvcc | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/scripts/docker/Dockerfile.nvcc b/scripts/docker/Dockerfile.nvcc index 4679c9f5dba4..cca952f4aeaa 100644 --- a/scripts/docker/Dockerfile.nvcc +++ b/scripts/docker/Dockerfile.nvcc @@ -1,4 +1,4 @@ -FROM nvidia/cuda:12.0.0-devel-ubuntu22.04 +FROM nvidia/cuda:12.1.0-devel-ubuntu22.04 RUN apt-get clean && apt-get update -y && \ DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends git python3-minimal libpython3-stdlib bc hwloc wget openssh-client python3-numpy python3-h5py python3-matplotlib python3-scipy python3-pip lcov curl cuda-nsight-systems-12-6 cmake ninja-build libpython3-dev gcc-11 g++-11 && \ @@ -83,3 +83,11 @@ RUN cd /tmp/build-ascent && \ env enable_cuda=ON enable_mpi=ON build_hdf5=false build_silo=false bash build_ascent.sh && \ cd / && \ rm -rf /tmp/ascent_build + +# uid 1000 maps to the one running the container on the CI host +RUN groupadd -g 109 render +RUN useradd --create-home --shell /bin/bash -u 1000 -G render,sudo ci + +USER ci + +WORKDIR /home/ci From 79595f2ff5725926d8b3c66bdfcf766b778ff7da Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Fri, 30 Aug 2024 21:27:00 -0400 Subject: [PATCH 26/29] add emacs and vi --- scripts/docker/Dockerfile.nvcc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/docker/Dockerfile.nvcc b/scripts/docker/Dockerfile.nvcc index cca952f4aeaa..343822afc8b8 100644 --- a/scripts/docker/Dockerfile.nvcc +++ b/scripts/docker/Dockerfile.nvcc @@ -1,7 +1,7 @@ FROM nvidia/cuda:12.1.0-devel-ubuntu22.04 RUN apt-get clean && apt-get update -y && \ - DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends git python3-minimal libpython3-stdlib bc hwloc wget openssh-client python3-numpy python3-h5py python3-matplotlib python3-scipy python3-pip lcov curl cuda-nsight-systems-12-6 cmake ninja-build libpython3-dev gcc-11 g++-11 && \ + DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends git python3-minimal libpython3-stdlib bc hwloc wget openssh-client python3-numpy python3-h5py python3-matplotlib python3-scipy python3-pip lcov curl cuda-nsight-systems-12-6 cmake ninja-build libpython3-dev gcc-11 g++-11 emacs nvi && \ update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 10 && \ update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-11 10 From d05d48c590d101638e8c37bc34abc6f39391fa4f Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Sat, 31 Aug 2024 14:29:49 -0400 Subject: [PATCH 27/29] set build_jobs=`nproc` to avoid OOM kill --- scripts/docker/ascent_build.patch | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/docker/ascent_build.patch b/scripts/docker/ascent_build.patch index aa165a2eb4d0..8954544d3d74 100644 --- a/scripts/docker/ascent_build.patch +++ b/scripts/docker/ascent_build.patch @@ -14,7 +14,7 @@ enable_tests="${enable_tests:=OFF}" enable_verbose="${enable_verbose:=ON}" -build_jobs="${build_jobs:=6}" -+build_jobs="${build_jobs:=16}" ++build_jobs=`nproc` build_config="${build_config:=Release}" build_shared_libs="${build_shared_libs:=ON}" From ba7827dce0f763ce04c886f4968740fca21c7422 Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Sat, 31 Aug 2024 17:40:50 -0400 Subject: [PATCH 28/29] add developer tools for Codespaces/VSCode --- scripts/docker/Dockerfile.nvcc | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/scripts/docker/Dockerfile.nvcc b/scripts/docker/Dockerfile.nvcc index 343822afc8b8..1a8a73dd6680 100644 --- a/scripts/docker/Dockerfile.nvcc +++ b/scripts/docker/Dockerfile.nvcc @@ -1,7 +1,7 @@ FROM nvidia/cuda:12.1.0-devel-ubuntu22.04 RUN apt-get clean && apt-get update -y && \ - DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends git python3-minimal libpython3-stdlib bc hwloc wget openssh-client python3-numpy python3-h5py python3-matplotlib python3-scipy python3-pip lcov curl cuda-nsight-systems-12-6 cmake ninja-build libpython3-dev gcc-11 g++-11 emacs nvi && \ + DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends git python3-minimal libpython3-stdlib bc hwloc wget openssh-client python3-numpy python3-h5py python3-matplotlib python3-scipy python3-pip lcov curl cuda-nsight-systems-12-6 cmake ninja-build libpython3-dev gcc-11 g++-11 emacs nvi sphinx-doc python3-sphinx-rtd-theme python3-sphinxcontrib.bibtex python3-sphinx-copybutton && \ update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 10 && \ update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-11 10 @@ -11,6 +11,9 @@ RUN pip3 install unyt RUN pip3 install blosc2 +# for Codespaces/VSCode Sphinx support +RUN pip3 install esbonio + # h5py from the repo is incompatible with the default numpy 2.1.0 # Downgrading is not the cleanest solution, but it works... # see https://stackoverflow.com/questions/78634235/numpy-dtype-size-changed-may-indicate-binary-incompatibility-expected-96-from @@ -20,7 +23,7 @@ RUN wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key| apt-key add - && \ echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-19 main" > /etc/apt/sources.list.d/llvm.list RUN apt-get clean && apt-get update -y && \ - DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends clang-19 llvm-19 libomp-19-dev && \ + DEBIAN_FRONTEND="noninteractive" TZ=America/New_York apt-get install -y --no-install-recommends clang-19 llvm-19 libomp-19-dev clangd-19 && \ rm -rf /var/lib/apt/lists/* RUN cd /tmp && \ From 652daf30fe3796b7e021ea86c627f2d20fb23349 Mon Sep 17 00:00:00 2001 From: Ben Wibking Date: Sat, 31 Aug 2024 17:43:34 -0400 Subject: [PATCH 29/29] add devcontainer.json --- .devcontainer/devcontainer.json | 35 +++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 .devcontainer/devcontainer.json diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 000000000000..e5318f046090 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,35 @@ +// devcontainer.json + { + "name": "parthenon-dev", + "image": "ghcr.io/parthenon-hpc-lab/cuda11.6-mpi-hdf5-ascent", + "hostRequirements": { + "cpus": 4 + }, + "customizations": { + "vscode": { + "settings": {}, + "extensions": [ + "-ms-vscode.cpptools", + "llvm-vs-code-extensions.vscode-clangd", + "github.vscode-pull-request-github", + "ms-python.python", + "ms-toolsai.jupyter", + "ms-vscode.live-server", + "ms-azuretools.vscode-docker", + "swyddfa.esbonio", + "tomoki1207.pdf", + "ms-vscode.cmake-tools", + "ms-vsliveshare.vsliveshare" + ] + } + }, + "remoteEnv": { + "PATH": "${containerEnv:PATH}:/usr/local/hdf5/parallel/bin", + "OMPI_MCA_opal_warn_on_missing_libcuda": "0" + }, + //"remoteUser": "ubuntu", + // we need to manually checkout the submodules, + // but VSCode may try to configure CMake before they are fully checked-out. + // workaround TBD + "postCreateCommand": "git submodule update --init" + }