diff --git a/.azure-pipelines/azure-pipelines-linux.yml b/.azure-pipelines/azure-pipelines-linux.yml index 0c63a8e..3696ae5 100755 --- a/.azure-pipelines/azure-pipelines-linux.yml +++ b/.azure-pipelines/azure-pipelines-linux.yml @@ -12,47 +12,74 @@ jobs: CONFIG: linux_64_mpimpichnumpy1.22python3.10.____cpython UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 - SHORT_CONFIG: linux_64_mpimpichnumpy1.22python3.10.____cpython linux_64_mpimpichnumpy1.22python3.9.____cpython: CONFIG: linux_64_mpimpichnumpy1.22python3.9.____cpython UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 - SHORT_CONFIG: linux_64_mpimpichnumpy1.22python3.9.____cpython linux_64_mpimpichnumpy1.23python3.11.____cpython: CONFIG: linux_64_mpimpichnumpy1.23python3.11.____cpython UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 - SHORT_CONFIG: linux_64_mpimpichnumpy1.23python3.11.____cpython linux_64_mpinompinumpy1.22python3.10.____cpython: CONFIG: linux_64_mpinompinumpy1.22python3.10.____cpython UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 - SHORT_CONFIG: linux_64_mpinompinumpy1.22python3.10.____cpython linux_64_mpinompinumpy1.22python3.9.____cpython: CONFIG: linux_64_mpinompinumpy1.22python3.9.____cpython UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 - SHORT_CONFIG: linux_64_mpinompinumpy1.22python3.9.____cpython linux_64_mpinompinumpy1.23python3.11.____cpython: CONFIG: linux_64_mpinompinumpy1.23python3.11.____cpython UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 - SHORT_CONFIG: linux_64_mpinompinumpy1.23python3.11.____cpython linux_64_mpiopenmpinumpy1.22python3.10.____cpython: CONFIG: linux_64_mpiopenmpinumpy1.22python3.10.____cpython UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 - SHORT_CONFIG: linux_64_mpiopenmpinumpy1.22python3_h3d7a55f7a1 linux_64_mpiopenmpinumpy1.22python3.9.____cpython: CONFIG: linux_64_mpiopenmpinumpy1.22python3.9.____cpython UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 - SHORT_CONFIG: linux_64_mpiopenmpinumpy1.22python3_h44cfe47919 linux_64_mpiopenmpinumpy1.23python3.11.____cpython: CONFIG: linux_64_mpiopenmpinumpy1.23python3.11.____cpython UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 - SHORT_CONFIG: linux_64_mpiopenmpinumpy1.23python3_h693df30380 + linux_aarch64_mpimpichnumpy1.22python3.10.____cpython: + CONFIG: linux_aarch64_mpimpichnumpy1.22python3.10.____cpython + UPLOAD_PACKAGES: 'True' + DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + linux_aarch64_mpimpichnumpy1.22python3.9.____cpython: + CONFIG: linux_aarch64_mpimpichnumpy1.22python3.9.____cpython + UPLOAD_PACKAGES: 'True' + DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + linux_aarch64_mpimpichnumpy1.23python3.11.____cpython: + CONFIG: linux_aarch64_mpimpichnumpy1.23python3.11.____cpython + UPLOAD_PACKAGES: 'True' + DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + linux_aarch64_mpinompinumpy1.22python3.10.____cpython: + CONFIG: linux_aarch64_mpinompinumpy1.22python3.10.____cpython + UPLOAD_PACKAGES: 'True' + DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + linux_aarch64_mpinompinumpy1.22python3.9.____cpython: + CONFIG: linux_aarch64_mpinompinumpy1.22python3.9.____cpython + UPLOAD_PACKAGES: 'True' + DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + linux_aarch64_mpinompinumpy1.23python3.11.____cpython: + CONFIG: linux_aarch64_mpinompinumpy1.23python3.11.____cpython + UPLOAD_PACKAGES: 'True' + DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + linux_aarch64_mpiopenmpinumpy1.22python3.10.____cpython: + CONFIG: linux_aarch64_mpiopenmpinumpy1.22python3.10.____cpython + UPLOAD_PACKAGES: 'True' + DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + linux_aarch64_mpiopenmpinumpy1.22python3.9.____cpython: + CONFIG: linux_aarch64_mpiopenmpinumpy1.22python3.9.____cpython + UPLOAD_PACKAGES: 'True' + DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + linux_aarch64_mpiopenmpinumpy1.23python3.11.____cpython: + CONFIG: linux_aarch64_mpiopenmpinumpy1.23python3.11.____cpython + UPLOAD_PACKAGES: 'True' + DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 timeoutInMinutes: 360 steps: @@ -81,33 +108,4 @@ jobs: env: BINSTAR_TOKEN: $(BINSTAR_TOKEN) FEEDSTOCK_TOKEN: $(FEEDSTOCK_TOKEN) - STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) - - script: | - export CI=azure - export CI_RUN_ID=$(build.BuildNumber).$(system.JobAttempt) - export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) - export CONDA_BLD_DIR=build_artifacts - export ARTIFACT_STAGING_DIR="$(Build.ArtifactStagingDirectory)" - # Archive everything in CONDA_BLD_DIR except environments - export BLD_ARTIFACT_PREFIX=conda_artifacts - if [[ "$AGENT_JOBSTATUS" == "Failed" ]]; then - # Archive the CONDA_BLD_DIR environments only when the job fails - export ENV_ARTIFACT_PREFIX=conda_envs - fi - ./.scripts/create_conda_build_artifacts.sh - displayName: Prepare conda build artifacts - condition: succeededOrFailed() - - - task: PublishPipelineArtifact@1 - displayName: Store conda build artifacts - condition: not(eq(variables.BLD_ARTIFACT_PATH, '')) - inputs: - targetPath: $(BLD_ARTIFACT_PATH) - artifactName: $(BLD_ARTIFACT_NAME) - - - task: PublishPipelineArtifact@1 - displayName: Store conda build environment artifacts - condition: not(eq(variables.ENV_ARTIFACT_PATH, '')) - inputs: - targetPath: $(ENV_ARTIFACT_PATH) - artifactName: $(ENV_ARTIFACT_NAME) \ No newline at end of file + STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) \ No newline at end of file diff --git a/.azure-pipelines/azure-pipelines-osx.yml b/.azure-pipelines/azure-pipelines-osx.yml index b903a24..efc644a 100755 --- a/.azure-pipelines/azure-pipelines-osx.yml +++ b/.azure-pipelines/azure-pipelines-osx.yml @@ -11,75 +11,57 @@ jobs: osx_64_mpimpichnumpy1.22python3.10.____cpython: CONFIG: osx_64_mpimpichnumpy1.22python3.10.____cpython UPLOAD_PACKAGES: 'True' - SHORT_CONFIG: osx_64_mpimpichnumpy1.22python3.10.____cpython osx_64_mpimpichnumpy1.22python3.9.____cpython: CONFIG: osx_64_mpimpichnumpy1.22python3.9.____cpython UPLOAD_PACKAGES: 'True' - SHORT_CONFIG: osx_64_mpimpichnumpy1.22python3.9.____cpython osx_64_mpimpichnumpy1.23python3.11.____cpython: CONFIG: osx_64_mpimpichnumpy1.23python3.11.____cpython UPLOAD_PACKAGES: 'True' - SHORT_CONFIG: osx_64_mpimpichnumpy1.23python3.11.____cpython osx_64_mpinompinumpy1.22python3.10.____cpython: CONFIG: osx_64_mpinompinumpy1.22python3.10.____cpython UPLOAD_PACKAGES: 'True' - SHORT_CONFIG: osx_64_mpinompinumpy1.22python3.10.____cpython osx_64_mpinompinumpy1.22python3.9.____cpython: CONFIG: osx_64_mpinompinumpy1.22python3.9.____cpython UPLOAD_PACKAGES: 'True' - SHORT_CONFIG: osx_64_mpinompinumpy1.22python3.9.____cpython osx_64_mpinompinumpy1.23python3.11.____cpython: CONFIG: osx_64_mpinompinumpy1.23python3.11.____cpython UPLOAD_PACKAGES: 'True' - SHORT_CONFIG: osx_64_mpinompinumpy1.23python3.11.____cpython osx_64_mpiopenmpinumpy1.22python3.10.____cpython: CONFIG: osx_64_mpiopenmpinumpy1.22python3.10.____cpython UPLOAD_PACKAGES: 'True' - SHORT_CONFIG: osx_64_mpiopenmpinumpy1.22python3.10.____cpython osx_64_mpiopenmpinumpy1.22python3.9.____cpython: CONFIG: osx_64_mpiopenmpinumpy1.22python3.9.____cpython UPLOAD_PACKAGES: 'True' - SHORT_CONFIG: osx_64_mpiopenmpinumpy1.22python3.9.____cpython osx_64_mpiopenmpinumpy1.23python3.11.____cpython: CONFIG: osx_64_mpiopenmpinumpy1.23python3.11.____cpython UPLOAD_PACKAGES: 'True' - SHORT_CONFIG: osx_64_mpiopenmpinumpy1.23python3.11.____cpython osx_arm64_mpimpichnumpy1.22python3.10.____cpython: CONFIG: osx_arm64_mpimpichnumpy1.22python3.10.____cpython UPLOAD_PACKAGES: 'True' - SHORT_CONFIG: osx_arm64_mpimpichnumpy1.22python3._ha977e349a6 osx_arm64_mpimpichnumpy1.22python3.9.____cpython: CONFIG: osx_arm64_mpimpichnumpy1.22python3.9.____cpython UPLOAD_PACKAGES: 'True' - SHORT_CONFIG: osx_arm64_mpimpichnumpy1.22python3.9.____cpython osx_arm64_mpimpichnumpy1.23python3.11.____cpython: CONFIG: osx_arm64_mpimpichnumpy1.23python3.11.____cpython UPLOAD_PACKAGES: 'True' - SHORT_CONFIG: osx_arm64_mpimpichnumpy1.23python3._h16632bb9fa osx_arm64_mpinompinumpy1.22python3.10.____cpython: CONFIG: osx_arm64_mpinompinumpy1.22python3.10.____cpython UPLOAD_PACKAGES: 'True' - SHORT_CONFIG: osx_arm64_mpinompinumpy1.22python3._hf338eb8225 osx_arm64_mpinompinumpy1.22python3.9.____cpython: CONFIG: osx_arm64_mpinompinumpy1.22python3.9.____cpython UPLOAD_PACKAGES: 'True' - SHORT_CONFIG: osx_arm64_mpinompinumpy1.22python3.9.____cpython osx_arm64_mpinompinumpy1.23python3.11.____cpython: CONFIG: osx_arm64_mpinompinumpy1.23python3.11.____cpython UPLOAD_PACKAGES: 'True' - SHORT_CONFIG: osx_arm64_mpinompinumpy1.23python3._ha64012be9c osx_arm64_mpiopenmpinumpy1.22python3.10.____cpython: CONFIG: osx_arm64_mpiopenmpinumpy1.22python3.10.____cpython UPLOAD_PACKAGES: 'True' - SHORT_CONFIG: osx_arm64_mpiopenmpinumpy1.22python_h9c171c2f17 osx_arm64_mpiopenmpinumpy1.22python3.9.____cpython: CONFIG: osx_arm64_mpiopenmpinumpy1.22python3.9.____cpython UPLOAD_PACKAGES: 'True' - SHORT_CONFIG: osx_arm64_mpiopenmpinumpy1.22python_hf37ea713c2 osx_arm64_mpiopenmpinumpy1.23python3.11.____cpython: CONFIG: osx_arm64_mpiopenmpinumpy1.23python3.11.____cpython UPLOAD_PACKAGES: 'True' - SHORT_CONFIG: osx_arm64_mpiopenmpinumpy1.23python_h4b7febe372 timeoutInMinutes: 360 steps: @@ -102,33 +84,4 @@ jobs: env: BINSTAR_TOKEN: $(BINSTAR_TOKEN) FEEDSTOCK_TOKEN: $(FEEDSTOCK_TOKEN) - STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) - - script: | - export CI=azure - export CI_RUN_ID=$(build.BuildNumber).$(system.JobAttempt) - export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) - export CONDA_BLD_DIR=/Users/runner/miniforge3/conda-bld - export ARTIFACT_STAGING_DIR="$(Build.ArtifactStagingDirectory)" - # Archive everything in CONDA_BLD_DIR except environments - export BLD_ARTIFACT_PREFIX=conda_artifacts - if [[ "$AGENT_JOBSTATUS" == "Failed" ]]; then - # Archive the CONDA_BLD_DIR environments only when the job fails - export ENV_ARTIFACT_PREFIX=conda_envs - fi - ./.scripts/create_conda_build_artifacts.sh - displayName: Prepare conda build artifacts - condition: succeededOrFailed() - - - task: PublishPipelineArtifact@1 - displayName: Store conda build artifacts - condition: not(eq(variables.BLD_ARTIFACT_PATH, '')) - inputs: - targetPath: $(BLD_ARTIFACT_PATH) - artifactName: $(BLD_ARTIFACT_NAME) - - - task: PublishPipelineArtifact@1 - displayName: Store conda build environment artifacts - condition: not(eq(variables.ENV_ARTIFACT_PATH, '')) - inputs: - targetPath: $(ENV_ARTIFACT_PATH) - artifactName: $(ENV_ARTIFACT_NAME) \ No newline at end of file + STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) \ No newline at end of file diff --git a/.azure-pipelines/azure-pipelines-win.yml b/.azure-pipelines/azure-pipelines-win.yml index 711eb80..e2307e3 100755 --- a/.azure-pipelines/azure-pipelines-win.yml +++ b/.azure-pipelines/azure-pipelines-win.yml @@ -11,19 +11,15 @@ jobs: win_64_numpy1.22python3.10.____cpython: CONFIG: win_64_numpy1.22python3.10.____cpython UPLOAD_PACKAGES: 'True' - SHORT_CONFIG: win_64_numpy1.22python3.10.____cpython win_64_numpy1.22python3.9.____cpython: CONFIG: win_64_numpy1.22python3.9.____cpython UPLOAD_PACKAGES: 'True' - SHORT_CONFIG: win_64_numpy1.22python3.9.____cpython win_64_numpy1.23python3.11.____cpython: CONFIG: win_64_numpy1.23python3.11.____cpython UPLOAD_PACKAGES: 'True' - SHORT_CONFIG: win_64_numpy1.23python3.11.____cpython timeoutInMinutes: 360 variables: CONDA_BLD_PATH: D:\\bld\\ - SET_PAGEFILE: 'True' UPLOAD_TEMP: D:\\tmp steps: @@ -59,31 +55,4 @@ jobs: UPLOAD_TEMP: $(UPLOAD_TEMP) BINSTAR_TOKEN: $(BINSTAR_TOKEN) FEEDSTOCK_TOKEN: $(FEEDSTOCK_TOKEN) - STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) - - script: | - set CI=azure - set CI_RUN_ID=$(build.BuildNumber).$(system.JobAttempt) - set FEEDSTOCK_NAME=$(build.Repository.Name) - set ARTIFACT_STAGING_DIR=$(Build.ArtifactStagingDirectory) - set CONDA_BLD_DIR=$(CONDA_BLD_PATH) - set BLD_ARTIFACT_PREFIX=conda_artifacts - if "%AGENT_JOBSTATUS%" == "Failed" ( - set ENV_ARTIFACT_PREFIX=conda_envs - ) - call ".scripts\create_conda_build_artifacts.bat" - displayName: Prepare conda build artifacts - condition: succeededOrFailed() - - - task: PublishPipelineArtifact@1 - displayName: Store conda build artifacts - condition: not(eq(variables.BLD_ARTIFACT_PATH, '')) - inputs: - targetPath: $(BLD_ARTIFACT_PATH) - artifactName: $(BLD_ARTIFACT_NAME) - - - task: PublishPipelineArtifact@1 - displayName: Store conda build environment artifacts - condition: not(eq(variables.ENV_ARTIFACT_PATH, '')) - inputs: - targetPath: $(ENV_ARTIFACT_PATH) - artifactName: $(ENV_ARTIFACT_NAME) \ No newline at end of file + STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) \ No newline at end of file diff --git a/.ci_support/linux_aarch64_mpimpichnumpy1.22python3.10.____cpython.yaml b/.ci_support/linux_aarch64_mpimpichnumpy1.22python3.10.____cpython.yaml new file mode 100644 index 0000000..e1a6255 --- /dev/null +++ b/.ci_support/linux_aarch64_mpimpichnumpy1.22python3.10.____cpython.yaml @@ -0,0 +1,41 @@ +BUILD: +- aarch64-conda_cos7-linux-gnu +c_compiler: +- gcc +c_compiler_version: +- '12' +cdt_arch: +- aarch64 +cdt_name: +- cos7 +channel_sources: +- conda-forge +channel_targets: +- conda-forge main +docker_image: +- quay.io/condaforge/linux-anvil-cos7-x86_64 +fortran_compiler: +- gfortran +fortran_compiler_version: +- '12' +mpi: +- mpich +mpich: +- '4' +numpy: +- '1.22' +openmpi: +- '4' +pin_run_as_build: + python: + min_pin: x.x + max_pin: x.x +python: +- 3.10.* *_cpython +target_platform: +- linux-aarch64 +zip_keys: +- - c_compiler_version + - fortran_compiler_version +- - python + - numpy diff --git a/.ci_support/linux_aarch64_mpimpichnumpy1.22python3.9.____cpython.yaml b/.ci_support/linux_aarch64_mpimpichnumpy1.22python3.9.____cpython.yaml new file mode 100644 index 0000000..98e45e6 --- /dev/null +++ b/.ci_support/linux_aarch64_mpimpichnumpy1.22python3.9.____cpython.yaml @@ -0,0 +1,41 @@ +BUILD: +- aarch64-conda_cos7-linux-gnu +c_compiler: +- gcc +c_compiler_version: +- '12' +cdt_arch: +- aarch64 +cdt_name: +- cos7 +channel_sources: +- conda-forge +channel_targets: +- conda-forge main +docker_image: +- quay.io/condaforge/linux-anvil-cos7-x86_64 +fortran_compiler: +- gfortran +fortran_compiler_version: +- '12' +mpi: +- mpich +mpich: +- '4' +numpy: +- '1.22' +openmpi: +- '4' +pin_run_as_build: + python: + min_pin: x.x + max_pin: x.x +python: +- 3.9.* *_cpython +target_platform: +- linux-aarch64 +zip_keys: +- - c_compiler_version + - fortran_compiler_version +- - python + - numpy diff --git a/.ci_support/linux_aarch64_mpimpichnumpy1.23python3.11.____cpython.yaml b/.ci_support/linux_aarch64_mpimpichnumpy1.23python3.11.____cpython.yaml new file mode 100644 index 0000000..d0f2f25 --- /dev/null +++ b/.ci_support/linux_aarch64_mpimpichnumpy1.23python3.11.____cpython.yaml @@ -0,0 +1,41 @@ +BUILD: +- aarch64-conda_cos7-linux-gnu +c_compiler: +- gcc +c_compiler_version: +- '12' +cdt_arch: +- aarch64 +cdt_name: +- cos7 +channel_sources: +- conda-forge +channel_targets: +- conda-forge main +docker_image: +- quay.io/condaforge/linux-anvil-cos7-x86_64 +fortran_compiler: +- gfortran +fortran_compiler_version: +- '12' +mpi: +- mpich +mpich: +- '4' +numpy: +- '1.23' +openmpi: +- '4' +pin_run_as_build: + python: + min_pin: x.x + max_pin: x.x +python: +- 3.11.* *_cpython +target_platform: +- linux-aarch64 +zip_keys: +- - c_compiler_version + - fortran_compiler_version +- - python + - numpy diff --git a/.ci_support/linux_aarch64_mpinompinumpy1.22python3.10.____cpython.yaml b/.ci_support/linux_aarch64_mpinompinumpy1.22python3.10.____cpython.yaml new file mode 100644 index 0000000..10f681b --- /dev/null +++ b/.ci_support/linux_aarch64_mpinompinumpy1.22python3.10.____cpython.yaml @@ -0,0 +1,41 @@ +BUILD: +- aarch64-conda_cos7-linux-gnu +c_compiler: +- gcc +c_compiler_version: +- '12' +cdt_arch: +- aarch64 +cdt_name: +- cos7 +channel_sources: +- conda-forge +channel_targets: +- conda-forge main +docker_image: +- quay.io/condaforge/linux-anvil-cos7-x86_64 +fortran_compiler: +- gfortran +fortran_compiler_version: +- '12' +mpi: +- nompi +mpich: +- '4' +numpy: +- '1.22' +openmpi: +- '4' +pin_run_as_build: + python: + min_pin: x.x + max_pin: x.x +python: +- 3.10.* *_cpython +target_platform: +- linux-aarch64 +zip_keys: +- - c_compiler_version + - fortran_compiler_version +- - python + - numpy diff --git a/.ci_support/linux_aarch64_mpinompinumpy1.22python3.9.____cpython.yaml b/.ci_support/linux_aarch64_mpinompinumpy1.22python3.9.____cpython.yaml new file mode 100644 index 0000000..ce1120b --- /dev/null +++ b/.ci_support/linux_aarch64_mpinompinumpy1.22python3.9.____cpython.yaml @@ -0,0 +1,41 @@ +BUILD: +- aarch64-conda_cos7-linux-gnu +c_compiler: +- gcc +c_compiler_version: +- '12' +cdt_arch: +- aarch64 +cdt_name: +- cos7 +channel_sources: +- conda-forge +channel_targets: +- conda-forge main +docker_image: +- quay.io/condaforge/linux-anvil-cos7-x86_64 +fortran_compiler: +- gfortran +fortran_compiler_version: +- '12' +mpi: +- nompi +mpich: +- '4' +numpy: +- '1.22' +openmpi: +- '4' +pin_run_as_build: + python: + min_pin: x.x + max_pin: x.x +python: +- 3.9.* *_cpython +target_platform: +- linux-aarch64 +zip_keys: +- - c_compiler_version + - fortran_compiler_version +- - python + - numpy diff --git a/.ci_support/linux_aarch64_mpinompinumpy1.23python3.11.____cpython.yaml b/.ci_support/linux_aarch64_mpinompinumpy1.23python3.11.____cpython.yaml new file mode 100644 index 0000000..33ab9ee --- /dev/null +++ b/.ci_support/linux_aarch64_mpinompinumpy1.23python3.11.____cpython.yaml @@ -0,0 +1,41 @@ +BUILD: +- aarch64-conda_cos7-linux-gnu +c_compiler: +- gcc +c_compiler_version: +- '12' +cdt_arch: +- aarch64 +cdt_name: +- cos7 +channel_sources: +- conda-forge +channel_targets: +- conda-forge main +docker_image: +- quay.io/condaforge/linux-anvil-cos7-x86_64 +fortran_compiler: +- gfortran +fortran_compiler_version: +- '12' +mpi: +- nompi +mpich: +- '4' +numpy: +- '1.23' +openmpi: +- '4' +pin_run_as_build: + python: + min_pin: x.x + max_pin: x.x +python: +- 3.11.* *_cpython +target_platform: +- linux-aarch64 +zip_keys: +- - c_compiler_version + - fortran_compiler_version +- - python + - numpy diff --git a/.ci_support/linux_aarch64_mpiopenmpinumpy1.22python3.10.____cpython.yaml b/.ci_support/linux_aarch64_mpiopenmpinumpy1.22python3.10.____cpython.yaml new file mode 100644 index 0000000..8ecfe24 --- /dev/null +++ b/.ci_support/linux_aarch64_mpiopenmpinumpy1.22python3.10.____cpython.yaml @@ -0,0 +1,41 @@ +BUILD: +- aarch64-conda_cos7-linux-gnu +c_compiler: +- gcc +c_compiler_version: +- '12' +cdt_arch: +- aarch64 +cdt_name: +- cos7 +channel_sources: +- conda-forge +channel_targets: +- conda-forge main +docker_image: +- quay.io/condaforge/linux-anvil-cos7-x86_64 +fortran_compiler: +- gfortran +fortran_compiler_version: +- '12' +mpi: +- openmpi +mpich: +- '4' +numpy: +- '1.22' +openmpi: +- '4' +pin_run_as_build: + python: + min_pin: x.x + max_pin: x.x +python: +- 3.10.* *_cpython +target_platform: +- linux-aarch64 +zip_keys: +- - c_compiler_version + - fortran_compiler_version +- - python + - numpy diff --git a/.ci_support/linux_aarch64_mpiopenmpinumpy1.22python3.9.____cpython.yaml b/.ci_support/linux_aarch64_mpiopenmpinumpy1.22python3.9.____cpython.yaml new file mode 100644 index 0000000..2d6a5c6 --- /dev/null +++ b/.ci_support/linux_aarch64_mpiopenmpinumpy1.22python3.9.____cpython.yaml @@ -0,0 +1,41 @@ +BUILD: +- aarch64-conda_cos7-linux-gnu +c_compiler: +- gcc +c_compiler_version: +- '12' +cdt_arch: +- aarch64 +cdt_name: +- cos7 +channel_sources: +- conda-forge +channel_targets: +- conda-forge main +docker_image: +- quay.io/condaforge/linux-anvil-cos7-x86_64 +fortran_compiler: +- gfortran +fortran_compiler_version: +- '12' +mpi: +- openmpi +mpich: +- '4' +numpy: +- '1.22' +openmpi: +- '4' +pin_run_as_build: + python: + min_pin: x.x + max_pin: x.x +python: +- 3.9.* *_cpython +target_platform: +- linux-aarch64 +zip_keys: +- - c_compiler_version + - fortran_compiler_version +- - python + - numpy diff --git a/.ci_support/linux_aarch64_mpiopenmpinumpy1.23python3.11.____cpython.yaml b/.ci_support/linux_aarch64_mpiopenmpinumpy1.23python3.11.____cpython.yaml new file mode 100644 index 0000000..c3f3dfc --- /dev/null +++ b/.ci_support/linux_aarch64_mpiopenmpinumpy1.23python3.11.____cpython.yaml @@ -0,0 +1,41 @@ +BUILD: +- aarch64-conda_cos7-linux-gnu +c_compiler: +- gcc +c_compiler_version: +- '12' +cdt_arch: +- aarch64 +cdt_name: +- cos7 +channel_sources: +- conda-forge +channel_targets: +- conda-forge main +docker_image: +- quay.io/condaforge/linux-anvil-cos7-x86_64 +fortran_compiler: +- gfortran +fortran_compiler_version: +- '12' +mpi: +- openmpi +mpich: +- '4' +numpy: +- '1.23' +openmpi: +- '4' +pin_run_as_build: + python: + min_pin: x.x + max_pin: x.x +python: +- 3.11.* *_cpython +target_platform: +- linux-aarch64 +zip_keys: +- - c_compiler_version + - fortran_compiler_version +- - python + - numpy diff --git a/.gitignore b/.gitignore index c89ecb7..179afe5 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,24 @@ -*.pyc +# User content belongs under recipe/. +# Feedstock configuration goes in `conda-forge.yml` +# Everything else is managed by the conda-smithy rerender process. +# Please do not modify + +# Ignore all files and folders in root +* +!/conda-forge.yml + +# Don't ignore any files/folders if the parent folder is 'un-ignored' +# This also avoids warnings when adding an already-checked file with an ignored parent. +!/**/ +# Don't ignore any files/folders recursively in the following folders +!/recipe/** +!/.ci_support/** -build_artifacts +# Since we ignore files/folders recursively, any folders inside +# build_artifacts gets ignored which trips some build systems. +# To avoid that we 'un-ignore' all files/folders recursively +# and only ignore the root build_artifacts folder. +!/build_artifacts/** +/build_artifacts + +*.pyc diff --git a/.scripts/build_steps.sh b/.scripts/build_steps.sh index eba1dfd..beda247 100755 --- a/.scripts/build_steps.sh +++ b/.scripts/build_steps.sh @@ -57,12 +57,6 @@ if [[ -f "${FEEDSTOCK_ROOT}/LICENSE.txt" ]]; then cp "${FEEDSTOCK_ROOT}/LICENSE.txt" "${RECIPE_ROOT}/recipe-scripts-license.txt" fi -if [[ "${sha:-}" == "" ]]; then - pushd ${FEEDSTOCK_ROOT} - sha=$(git rev-parse HEAD) - popd -fi - if [[ "${BUILD_WITH_CONDA_DEBUG:-0}" == 1 ]]; then if [[ "x${BUILD_OUTPUT_ID:-}" != "x" ]]; then EXTRA_CB_OPTIONS="${EXTRA_CB_OPTIONS:-} --output-id ${BUILD_OUTPUT_ID}" diff --git a/.scripts/create_conda_build_artifacts.bat b/.scripts/create_conda_build_artifacts.bat deleted file mode 100755 index 2853cfd..0000000 --- a/.scripts/create_conda_build_artifacts.bat +++ /dev/null @@ -1,80 +0,0 @@ -setlocal enableextensions enabledelayedexpansion - -rem INPUTS (environment variables that need to be set before calling this script): -rem -rem CI (azure/github_actions/UNSET) -rem CI_RUN_ID (unique identifier for the CI job run) -rem FEEDSTOCK_NAME -rem CONFIG (build matrix configuration string) -rem SHORT_CONFIG (uniquely-shortened configuration string) -rem CONDA_BLD_DIR (path to the conda-bld directory) -rem ARTIFACT_STAGING_DIR (use working directory if unset) -rem BLD_ARTIFACT_PREFIX (prefix for the conda build artifact name, skip if unset) -rem ENV_ARTIFACT_PREFIX (prefix for the conda build environments artifact name, skip if unset) - -rem OUTPUTS -rem -rem BLD_ARTIFACT_NAME -rem BLD_ARTIFACT_PATH -rem ENV_ARTIFACT_NAME -rem ENV_ARTIFACT_PATH - -rem Check that the conda-build directory exists -if not exist %CONDA_BLD_DIR% ( - echo conda-build directory does not exist - exit 1 -) - -if not defined ARTIFACT_STAGING_DIR ( - rem Set staging dir to the working dir - set ARTIFACT_STAGING_DIR=%cd% -) - -rem Set a unique ID for the artifact(s), specialized for this particular job run -set ARTIFACT_UNIQUE_ID=%CI_RUN_ID%_%CONFIG% -if not "%ARTIFACT_UNIQUE_ID%" == "%ARTIFACT_UNIQUE_ID:~0,80%" ( - set ARTIFACT_UNIQUE_ID=%CI_RUN_ID%_%SHORT_CONFIG% -) - -rem Set a descriptive ID for the archive(s), specialized for this particular job run -set ARCHIVE_UNIQUE_ID=%CI_RUN_ID%_%CONFIG% - -rem Make the build artifact zip -if defined BLD_ARTIFACT_PREFIX ( - set BLD_ARTIFACT_NAME=%BLD_ARTIFACT_PREFIX%_%ARTIFACT_UNIQUE_ID% - echo BLD_ARTIFACT_NAME: !BLD_ARTIFACT_NAME! - - set "BLD_ARTIFACT_PATH=%ARTIFACT_STAGING_DIR%\%FEEDSTOCK_NAME%_%BLD_ARTIFACT_PREFIX%_%ARCHIVE_UNIQUE_ID%.zip" - 7z a "!BLD_ARTIFACT_PATH!" "%CONDA_BLD_DIR%" -xr^^!.git/ -xr^^!_*_env*/ -xr^^!*_cache/ -bb - if errorlevel 1 exit 1 - echo BLD_ARTIFACT_PATH: !BLD_ARTIFACT_PATH! - - if "%CI%" == "azure" ( - echo ##vso[task.setVariable variable=BLD_ARTIFACT_NAME]!BLD_ARTIFACT_NAME! - echo ##vso[task.setVariable variable=BLD_ARTIFACT_PATH]!BLD_ARTIFACT_PATH! - ) - if "%CI%" == "github_actions" ( - echo BLD_ARTIFACT_NAME=!BLD_ARTIFACT_NAME!>> !GITHUB_OUTPUT! - echo BLD_ARTIFACT_PATH=!BLD_ARTIFACT_PATH!>> !GITHUB_OUTPUT! - ) -) - -rem Make the environments artifact zip -if defined ENV_ARTIFACT_PREFIX ( - set ENV_ARTIFACT_NAME=!ENV_ARTIFACT_PREFIX!_%ARTIFACT_UNIQUE_ID% - echo ENV_ARTIFACT_NAME: !ENV_ARTIFACT_NAME! - - set "ENV_ARTIFACT_PATH=%ARTIFACT_STAGING_DIR%\%FEEDSTOCK_NAME%_%ENV_ARTIFACT_PREFIX%_%ARCHIVE_UNIQUE_ID%.zip" - 7z a "!ENV_ARTIFACT_PATH!" -r "%CONDA_BLD_DIR%"/_*_env*/ -bb - if errorlevel 1 exit 1 - echo ENV_ARTIFACT_PATH: !ENV_ARTIFACT_PATH! - - if "%CI%" == "azure" ( - echo ##vso[task.setVariable variable=ENV_ARTIFACT_NAME]!ENV_ARTIFACT_NAME! - echo ##vso[task.setVariable variable=ENV_ARTIFACT_PATH]!ENV_ARTIFACT_PATH! - ) - if "%CI%" == "github_actions" ( - echo ENV_ARTIFACT_NAME=!ENV_ARTIFACT_NAME!>> !GITHUB_OUTPUT! - echo ENV_ARTIFACT_PATH=!ENV_ARTIFACT_PATH!>> !GITHUB_OUTPUT! - ) -) \ No newline at end of file diff --git a/.scripts/create_conda_build_artifacts.sh b/.scripts/create_conda_build_artifacts.sh deleted file mode 100755 index 17ec086..0000000 --- a/.scripts/create_conda_build_artifacts.sh +++ /dev/null @@ -1,113 +0,0 @@ -#!/usr/bin/env bash - -# INPUTS (environment variables that need to be set before calling this script): -# -# CI (azure/github_actions/UNSET) -# CI_RUN_ID (unique identifier for the CI job run) -# FEEDSTOCK_NAME -# CONFIG (build matrix configuration string) -# SHORT_CONFIG (uniquely-shortened configuration string) -# CONDA_BLD_DIR (path to the conda-bld directory) -# ARTIFACT_STAGING_DIR (use working directory if unset) -# BLD_ARTIFACT_PREFIX (prefix for the conda build artifact name, skip if unset) -# ENV_ARTIFACT_PREFIX (prefix for the conda build environments artifact name, skip if unset) - -# OUTPUTS -# -# BLD_ARTIFACT_NAME -# BLD_ARTIFACT_PATH -# ENV_ARTIFACT_NAME -# ENV_ARTIFACT_PATH - -source .scripts/logging_utils.sh - -# DON'T do set -x, because it results in double echo-ing pipeline commands -# and that might end up inserting extraneous quotation marks in output variables -set -e - -# Check that the conda-build directory exists -if [ ! -d "$CONDA_BLD_DIR" ]; then - echo "conda-build directory does not exist" - exit 1 -fi - -# Set staging dir to the working dir, in Windows style if applicable -if [[ -z "${ARTIFACT_STAGING_DIR}" ]]; then - if pwd -W; then - ARTIFACT_STAGING_DIR=$(pwd -W) - else - ARTIFACT_STAGING_DIR=$PWD - fi -fi -echo "ARTIFACT_STAGING_DIR: $ARTIFACT_STAGING_DIR" - -FEEDSTOCK_ROOT=$(cd "$(dirname "$0")/.."; pwd;) -if [ -z ${FEEDSTOCK_NAME} ]; then - export FEEDSTOCK_NAME=$(basename ${FEEDSTOCK_ROOT}) -fi - -# Set a unique ID for the artifact(s), specialized for this particular job run -ARTIFACT_UNIQUE_ID="${CI_RUN_ID}_${CONFIG}" -if [[ ${#ARTIFACT_UNIQUE_ID} -gt 80 ]]; then - ARTIFACT_UNIQUE_ID="${CI_RUN_ID}_${SHORT_CONFIG}" -fi -echo "ARTIFACT_UNIQUE_ID: $ARTIFACT_UNIQUE_ID" - -# Set a descriptive ID for the archive(s), specialized for this particular job run -ARCHIVE_UNIQUE_ID="${CI_RUN_ID}_${CONFIG}" - -# Make the build artifact zip -if [[ ! -z "$BLD_ARTIFACT_PREFIX" ]]; then - export BLD_ARTIFACT_NAME="${BLD_ARTIFACT_PREFIX}_${ARTIFACT_UNIQUE_ID}" - export BLD_ARTIFACT_PATH="${ARTIFACT_STAGING_DIR}/${FEEDSTOCK_NAME}_${BLD_ARTIFACT_PREFIX}_${ARCHIVE_UNIQUE_ID}.zip" - - ( startgroup "Archive conda build directory" ) 2> /dev/null - - # Try 7z and fall back to zip if it fails (for cross-platform use) - if ! 7z a "$BLD_ARTIFACT_PATH" "$CONDA_BLD_DIR" '-xr!.git/' '-xr!_*_env*/' '-xr!*_cache/' -bb; then - pushd "$CONDA_BLD_DIR" - zip -r -y -T "$BLD_ARTIFACT_PATH" . -x '*.git/*' '*_*_env*/*' '*_cache/*' - popd - fi - - ( endgroup "Archive conda build directory" ) 2> /dev/null - - echo "BLD_ARTIFACT_NAME: $BLD_ARTIFACT_NAME" - echo "BLD_ARTIFACT_PATH: $BLD_ARTIFACT_PATH" - - if [[ "$CI" == "azure" ]]; then - echo "##vso[task.setVariable variable=BLD_ARTIFACT_NAME]$BLD_ARTIFACT_NAME" - echo "##vso[task.setVariable variable=BLD_ARTIFACT_PATH]$BLD_ARTIFACT_PATH" - elif [[ "$CI" == "github_actions" ]]; then - echo "BLD_ARTIFACT_NAME=$BLD_ARTIFACT_NAME" >> $GITHUB_OUTPUT - echo "BLD_ARTIFACT_PATH=$BLD_ARTIFACT_PATH" >> $GITHUB_OUTPUT - fi -fi - -# Make the environments artifact zip -if [[ ! -z "$ENV_ARTIFACT_PREFIX" ]]; then - export ENV_ARTIFACT_NAME="${ENV_ARTIFACT_PREFIX}_${ARTIFACT_UNIQUE_ID}" - export ENV_ARTIFACT_PATH="${ARTIFACT_STAGING_DIR}/${FEEDSTOCK_NAME}_${ENV_ARTIFACT_PREFIX}_${ARCHIVE_UNIQUE_ID}.zip" - - ( startgroup "Archive conda build environments" ) 2> /dev/null - - # Try 7z and fall back to zip if it fails (for cross-platform use) - if ! 7z a "$ENV_ARTIFACT_PATH" -r "$CONDA_BLD_DIR"/'_*_env*/' -bb; then - pushd "$CONDA_BLD_DIR" - zip -r -y -T "$ENV_ARTIFACT_PATH" . -i '*_*_env*/*' - popd - fi - - ( endgroup "Archive conda build environments" ) 2> /dev/null - - echo "ENV_ARTIFACT_NAME: $ENV_ARTIFACT_NAME" - echo "ENV_ARTIFACT_PATH: $ENV_ARTIFACT_PATH" - - if [[ "$CI" == "azure" ]]; then - echo "##vso[task.setVariable variable=ENV_ARTIFACT_NAME]$ENV_ARTIFACT_NAME" - echo "##vso[task.setVariable variable=ENV_ARTIFACT_PATH]$ENV_ARTIFACT_PATH" - elif [[ "$CI" == "github_actions" ]]; then - echo "ENV_ARTIFACT_NAME=$ENV_ARTIFACT_NAME" >> $GITHUB_OUTPUT - echo "ENV_ARTIFACT_PATH=$ENV_ARTIFACT_PATH" >> $GITHUB_OUTPUT - fi -fi \ No newline at end of file diff --git a/.scripts/run_docker_build.sh b/.scripts/run_docker_build.sh index b70ef01..00f377a 100755 --- a/.scripts/run_docker_build.sh +++ b/.scripts/run_docker_build.sh @@ -21,6 +21,12 @@ if [ -z ${FEEDSTOCK_NAME} ]; then export FEEDSTOCK_NAME=$(basename ${FEEDSTOCK_ROOT}) fi +if [[ "${sha:-}" == "" ]]; then + pushd "${FEEDSTOCK_ROOT}" + sha=$(git rev-parse HEAD) + popd +fi + docker info # In order for the conda-build process in the container to write to the mounted diff --git a/README.md b/README.md index 4fb1f6b..a1272aa 100644 --- a/README.md +++ b/README.md @@ -100,6 +100,69 @@ Current build status variant + + linux_aarch64_mpimpichnumpy1.22python3.10.____cpython + + + variant + + + + linux_aarch64_mpimpichnumpy1.22python3.9.____cpython + + + variant + + + + linux_aarch64_mpimpichnumpy1.23python3.11.____cpython + + + variant + + + + linux_aarch64_mpinompinumpy1.22python3.10.____cpython + + + variant + + + + linux_aarch64_mpinompinumpy1.22python3.9.____cpython + + + variant + + + + linux_aarch64_mpinompinumpy1.23python3.11.____cpython + + + variant + + + + linux_aarch64_mpiopenmpinumpy1.22python3.10.____cpython + + + variant + + + + linux_aarch64_mpiopenmpinumpy1.22python3.9.____cpython + + + variant + + + + linux_aarch64_mpiopenmpinumpy1.23python3.11.____cpython + + + variant + + osx_64_mpimpichnumpy1.22python3.10.____cpython diff --git a/conda-forge.yml b/conda-forge.yml index 07235c9..de20696 100644 --- a/conda-forge.yml +++ b/conda-forge.yml @@ -1,16 +1,12 @@ -azure: - settings_win: - variables: - SET_PAGEFILE: 'True' - store_build_artifacts: true build_platform: osx_arm64: osx_64 + linux_aarch64: linux_64 conda_build: pkg_format: '2' conda_forge_output_validation: true github: branch_name: main tooling_branch_name: main -provider: - win: azure -test_on_native_only: true +test: native_and_emulated +#test_on_native_only: true + diff --git a/recipe/bld.bat b/recipe/bld.bat new file mode 100644 index 0000000..193efe1 --- /dev/null +++ b/recipe/bld.bat @@ -0,0 +1,24 @@ +@echo on + +cd %SRC_DIR% +set NP_INC=%SP_DIR%\numpy\core\include +REM set CC=cl +REM set FC=flang +REM set CC_LD=link + +REM See the unix build.sh for more details on the build process below. +set MESON_ARGS=-Dincdir_numpy=%NP_INC% -Dpython_target=%PYTHON% %EXTRA_FLAGS% + +%PYTHON% -m build -n -x -w . +REM %PYTHON% -m pip install --prefix "%PREFIX%" --no-deps dist/wisdem-3.9-py3-none-any.whl +REM %PYTHON% setup.py install --single-version-externally-managed --record=record.txt +REM pip install --prefix "%PREFIX%" --no-deps --no-index --find-links dist pyoptsparse +REM `pip install dist\numpy*.whl` does not work on windows, +REM so use a loop; there's only one wheel in dist/ anyway +for /f %%f in ('dir /b /S .\dist') do ( + REM need to use force to reinstall the tests the second time + REM (otherwise pip thinks the package is installed already) + %PYTHON% -m pip install --prefix "%PREFIX%" --no-deps %%f + if %ERRORLEVEL% neq 0 exit 1 +) +REM rmdir /s /q %SP_DIR%\meson_build diff --git a/recipe/build.sh b/recipe/build.sh new file mode 100644 index 0000000..cab7f86 --- /dev/null +++ b/recipe/build.sh @@ -0,0 +1,43 @@ +#!/bin/bash + +set -eox pipefail + +#export PKG_CONFIG_PATH=$PKG_CONFIG_PATH:$PREFIX/lib/pkgconfig:$BUILD_PREFIX/lib/pkgconfig + +#EXTRA_FLAGS="" +#NP_INC="${SP_DIR}/numpy/core/include/" +echo "PYTHON TARGET=${PYTHON}" + +if [ `uname` == Darwin ]; then + export LDFLAGS="$LDFLAGS -Wl,-flat_namespace,-undefined,suppress" +fi + +#if [[ $CONDA_BUILD_CROSS_COMPILATION == "1" ]]; then + # Add pkg-config to cross-file binaries since meson will disable it + # See https://github.com/mesonbuild/meson/issues/7276 + #echo "pkg-config = '$(which pkg-config)'" >> "$BUILD_PREFIX"/meson_cross_file.txt + # Use Meson cross-file flag to enable cross compilation + #EXTRA_FLAGS="--cross-file $BUILD_PREFIX/meson_cross_file.txt" + #NP_INC="" +#fi + +# This is done on two lines so that the command will return failure info if it fails +#PKG_CONFIG=$(which pkg-config) +#export PKG_CONFIG + + +#cd "${SRC_DIR}" + +# MESON_ARGS is used within setup.py to pass extra arguments to meson +# We need these so that dependencies on the build machine are not incorrectly used by meson when building for a different target +#export MESON_ARGS="-Dincdir_numpy=${NP_INC} -Dpython_target=${PYTHON} ${MESON_ARGS}" +export MESON_ARGS="-Dpython_target=${PYTHON} ${MESON_ARGS}" + +# We use this instead of pip install . because the way meson builds from within a conda-build process puts the build +# artifacts where pip install . can't find them. Here we explicitly build the wheel into the working director, wherever that is +# and then tell pip to install the wheel in the working directory. Also, python -m build is now the recommended way to build +# see https://packaging.python.org/en/latest/tutorials/packaging-projects/ +$PYTHON -m build -n -x -w . +$PYTHON -m pip install --prefix "${PREFIX}" --no-deps dist/*.whl +#pip install --prefix "${PREFIX}" --no-deps --no-index --find-links dist pyoptsparse +#bin/rm -rf ${SP_DIR}/meson_build diff --git a/recipe/meson_files.patch b/recipe/meson_files.patch new file mode 100644 index 0000000..2140a57 --- /dev/null +++ b/recipe/meson_files.patch @@ -0,0 +1,18 @@ +diff --git a/meson.build b/meson.build +index a5a31a80..3afde1cc 100644 +--- a/meson.build ++++ b/meson.build +@@ -28,12 +28,7 @@ endif + # https://mesonbuild.com/Python-module.html + # Here we differentiate from the python used by meson, py3_command, and that python target, py3_target. This is useful + # when cross compiling like on conda-forge +-py_mod = import('python') +-if get_option('python_target') != '' +- py3 = py_mod.find_installation(get_option('python_target')) +-else +- py3 = py_mod.find_installation('python') +-endif ++py3 = import('python').find_installation(pure: false) + py3_dep = py3.dependency() + + message(py3.path()) diff --git a/recipe/meta.yaml b/recipe/meta.yaml index dc33a10..ae3bd8a 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -1,6 +1,6 @@ {% set version = "3.12.0" %} {% set name = "wisdem" %} -{% set build = 0 %} +{% set build = 2 %} # MPI now supported! https://conda-forge.org/docs/maintainer/knowledge_base.html#message-passing-interface-mpi # ensure mpi is defined (needed for conda-smithy recipe-lint) @@ -18,7 +18,9 @@ source: url: https://github.com/WISDEM/WISDEM/archive/v{{ version }}.tar.gz # Download the tar-ball and do: openssl sha256 file.tar.gz to update sha256: 8930a03f5d5bfa0499d742b03f86bbd0cc883c75dd9bc11e342468cc7a2b9f8f - + #patches: + # - meson_files.patch + build: number: {{ build }} @@ -41,11 +43,10 @@ build: {% endif %} skip: true # [py<39] - script: - - {{ PYTHON }} -m pip install . --no-deps -vv - - rm -rf {{ SP_DIR }}/meson_build # [not win] - - rmdir /s /q {{ SP_DIR }}\meson_build # [win] - #script: {{ PYTHON }} setup.py install --single-version-externally-managed --record=record.txt + #script: + # - {{ PYTHON }} -m pip install . --no-deps -vv + # - rm -rf {{ SP_DIR }}/meson_build # [not win] + # - rmdir /s /q {{ SP_DIR }}\meson_build # [win] requirements: build: @@ -53,15 +54,14 @@ requirements: - {{ compiler('fortran') }} - {{ compiler('m2w64_c') }} # [win] - cross-python_{{ target_platform }} # [build_platform != target_platform] + - python # [build_platform != target_platform] - make - meson - ninja - numpy - - pip - pkg-config - - python - - python-build - wheel + - python-build host: - cython # - libblas