From 49f7a8c8cf3314bae26f7650da8b6845e5d79f3a Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Tue, 23 Jul 2024 10:41:19 -0500 Subject: [PATCH] Add publish files from branch workflow (#732) * Remove Fortran for examples until better supported in the library * Add msi binary --- .github/workflows/cmake-ctest.yml | 36 +++++++++++++-- .github/workflows/publish-branch.yml | 44 +++++++++++++++++++ .github/workflows/publish-release.yml | 4 +- HDF4Examples/CMakePresets.json | 4 -- .../config/cmake/HDFExampleMacros.cmake | 22 +++++----- 5 files changed, 89 insertions(+), 21 deletions(-) create mode 100644 .github/workflows/publish-branch.yml diff --git a/.github/workflows/cmake-ctest.yml b/.github/workflows/cmake-ctest.yml index a8c7531cf..4c388c834 100644 --- a/.github/workflows/cmake-ctest.yml +++ b/.github/workflows/cmake-ctest.yml @@ -86,11 +86,15 @@ jobs: cmake --workflow --preset=${{ inputs.preset_name }}-MSVC --fresh shell: bash - - name: Publish binary (Windows) - id: publish-ctest-binary + - name: Create build folders (Windows) run: | mkdir "${{ runner.workspace }}/build" mkdir "${{ runner.workspace }}/build/hdf4" + shell: bash + + - name: Publish binary (Windows) + id: publish-ctest-binary + run: | Copy-Item -Path ${{ runner.workspace }}/hdf4/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING -Destination ${{ runner.workspace }}/build/hdf4/ Copy-Item -Path ${{ runner.workspace }}/hdf4/build/${{ inputs.preset_name }}-MSVC/README.txt -Destination ${{ runner.workspace }}/build/hdf4/ Copy-Item -Path ${{ runner.workspace }}/hdf4/build/${{ inputs.preset_name }}-MSVC/* -Destination ${{ runner.workspace }}/build/hdf4/ -Include *.zip @@ -98,6 +102,16 @@ jobs: 7z a -tzip ${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip hdf4 shell: pwsh + - name: Publish binary (Windows) + id: publish-ctest-binary + run: | + Copy-Item -Path ${{ runner.workspace }}/hdf4/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING -Destination ${{ runner.workspace }}/build/hdf4/ + Copy-Item -Path ${{ runner.workspace }}/hdf4/build/${{ inputs.preset_name }}-MSVC/README.txt -Destination ${{ runner.workspace }}/build/hdf4/ + Copy-Item -Path ${{ runner.workspace }}/hdf4/build/${{ inputs.preset_name }}-MSVC/* -Destination ${{ runner.workspace }}/build/hdf4/ -Include *.msi + cd "${{ runner.workspace }}/build" + 7z a -tzip ${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_msi.zip hdf4 + shell: pwsh + - name: List files in the space (Windows) run: | Get-ChildItem -Path ${{ github.workspace }} @@ -380,11 +394,15 @@ jobs: cmake --workflow --preset=${{ inputs.preset_name }}-win-Intel --fresh shell: pwsh - - name: Publish binary (Windows_intel) - id: publish-ctest-binary + - name: Create build folders (Windows_intel) run: | mkdir "${{ runner.workspace }}/build" mkdir "${{ runner.workspace }}/build/hdf4" + shell: bash + + - name: Publish binary (Windows_intel) + id: publish-ctest-binary + run: | Copy-Item -Path ${{ runner.workspace }}/hdf4/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING -Destination ${{ runner.workspace }}/build/hdf4/ Copy-Item -Path ${{ runner.workspace }}/hdf4/build/${{ inputs.preset_name }}-Intel/README.txt -Destination ${{ runner.workspace }}/build/hdf4/ Copy-Item -Path ${{ runner.workspace }}/hdf4/build/${{ inputs.preset_name }}-Intel/* -Destination ${{ runner.workspace }}/build/hdf4/ -Include *.zip @@ -392,6 +410,16 @@ jobs: 7z a -tzip ${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_intel.zip hdf4 shell: pwsh + - name: Publish binary (Windows_intel) + id: publish-ctest-binary + run: | + Copy-Item -Path ${{ runner.workspace }}/hdf4/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING -Destination ${{ runner.workspace }}/build/hdf4/ + Copy-Item -Path ${{ runner.workspace }}/hdf4/build/${{ inputs.preset_name }}-Intel/README.txt -Destination ${{ runner.workspace }}/build/hdf4/ + Copy-Item -Path ${{ runner.workspace }}/hdf4/build/${{ inputs.preset_name }}-Intel/* -Destination ${{ runner.workspace }}/build/hdf4/ -Include *.msi + cd "${{ runner.workspace }}/build" + 7z a -tzip ${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_intel_msi.zip hdf4 + shell: pwsh + - name: List files in the space (Windows_intel) run: | Get-ChildItem -Path ${{ github.workspace }} diff --git a/.github/workflows/publish-branch.yml b/.github/workflows/publish-branch.yml new file mode 100644 index 000000000..0986f0edb --- /dev/null +++ b/.github/workflows/publish-branch.yml @@ -0,0 +1,44 @@ +name: hdf4 publish files in HDF4 folder from branch to S3 + +# Triggers the workflow on demand +on: + workflow_dispatch: + inputs: + local_dir: + description: 'HDF4 local directory' + type: string + required: true + target_dir: + description: 'hdf4 target bucket directory' + type: string + required: true + permissions: + contents: read + +jobs: + publish-tag: + runs-on: ubuntu-latest + steps: + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - name: Get Sources + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + with: + fetch-depth: 0 + ref: '${{ github.head_ref || github.ref_name }}' + + - name: List files for the space + run: | + ls -l ${{ github.workspace }} + ls ${{ github.workspace }}/HDF4 + + - name: Setup AWS CLI + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: ${{ secrets.AWS_REGION }} + + - name: Sync dir to S3 bucket + run: | + aws s3 sync ./HDF4/${{ inputs.local_dir }} s3://${{ secrets.AWS_S3_BUCKET }}/${{ vars.TARGET_PATH }}/${{ inputs.target_dir }} + diff --git a/.github/workflows/publish-release.yml b/.github/workflows/publish-release.yml index 7e7bc4217..799448f9d 100644 --- a/.github/workflows/publish-release.yml +++ b/.github/workflows/publish-release.yml @@ -47,9 +47,9 @@ jobs: aws s3 sync ./HDF4 s3://${{ secrets.AWS_S3_BUCKET }}/${{ vars.TARGET_PATH }}/${{ inputs.target_dir }}/downloads --delete - name: Uncompress source (Linux) - run: tar -zxvf ${{ github.workspace }}/${{ inputs.use_hdf }}.doxygen.tar.gz + run: tar -zxvf ${{ github.workspace }}/HDF4/${{ inputs.use_hdf }}.doxygen.tar.gz - name: Sync userguide to S3 bucket run: | - aws s3 sync ./doxygen s3://${{ secrets.AWS_S3_BUCKET }}/${{ vars.TARGET_PATH }}/${{ inputs.target_dir }}/documentation --delete + aws s3 sync ./HDF4/doxygen s3://${{ secrets.AWS_S3_BUCKET }}/${{ vars.TARGET_PATH }}/${{ inputs.target_dir }}/documentation/doxygen --delete diff --git a/HDF4Examples/CMakePresets.json b/HDF4Examples/CMakePresets.json index 8b80ad5f5..774fc700a 100644 --- a/HDF4Examples/CMakePresets.json +++ b/HDF4Examples/CMakePresets.json @@ -43,7 +43,6 @@ "inherits": [ "ci-x64-Release-MSVC", "ci-StdJava", - "ci-StdFortran", "ci-StdShar" ] }, @@ -53,7 +52,6 @@ "inherits": [ "ci-x64-Release-Clang", "ci-StdJava", - "ci-StdFortran", "ci-StdShar" ] }, @@ -63,7 +61,6 @@ "inherits": [ "ci-x64-Release-GNUC", "ci-StdJava", - "ci-StdFortran", "ci-StdShar" ] }, @@ -73,7 +70,6 @@ "inherits": [ "ci-x64-Release-Intel", "ci-StdJava", - "ci-StdFortran", "ci-StdShar" ] } diff --git a/HDF4Examples/config/cmake/HDFExampleMacros.cmake b/HDF4Examples/config/cmake/HDFExampleMacros.cmake index 952bed651..3f21c6198 100644 --- a/HDF4Examples/config/cmake/HDFExampleMacros.cmake +++ b/HDF4Examples/config/cmake/HDFExampleMacros.cmake @@ -33,7 +33,7 @@ macro (BASIC_SETTINGS varname) #----------------------------------------------------------------------------- # Compiler specific flags : Shouldn't there be compiler tests for these #----------------------------------------------------------------------------- - if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + if (CMAKE_C_COMPILER_ID STREQUAL "GNU") set (CMAKE_C_FLAGS "${CMAKE_ANSI_CFLAGS} ${CMAKE_C_FLAGS} -std=c99 -fomit-frame-pointer -finline-functions -fno-common") endif () if (CMAKE_CXX_COMPILER_LOADED AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU") @@ -150,9 +150,17 @@ macro (HDF4_SUPPORT) endif () set (H4EX_HDF4_DUMP_EXECUTABLE $) else () + if (HDF4_BUILD_MODE) + string (TOUPPER "_${HDF4_BUILD_MODE}" UPPER_BUILD_TYPE) + elseif (HDF_CFG_NAME) + string (TOUPPER "_${HDF_CFG_NAME}" UPPER_BUILD_TYPE) + else () + set (UPPER_BUILD_TYPE "") + endif () + get_filename_component (_LIBRARY_PATH ${HDF4_INCLUDE_DIR} DIRECTORY) + set (HDF4_LIBRARY_PATH "${_LIBRARY_PATH}/lib") if (USE_SHARED_LIBS AND HDF4_shared_C_FOUND) set (H4EX_HDF4_LINK_LIBS ${H4EX_HDF4_LINK_LIBS} ${HDF4_C_SHARED_LIBRARY}) - set (HDF4_LIBRARY_PATH ${PACKAGE_PREFIX_DIR}/lib) else () set (H4EX_HDF4_LINK_LIBS ${H4EX_HDF4_LINK_LIBS} ${HDF4_C_STATIC_LIBRARY}) endif () @@ -185,15 +193,7 @@ macro (HDF4_SUPPORT) if (HDF_BUILD_JAVA AND HDF4_Java_FOUND) if (${HDF4_BUILD_JAVA}) set (CMAKE_JAVA_INCLUDE_PATH "${CMAKE_JAVA_INCLUDE_PATH};${HDF4_JAVA_INCLUDE_DIRS}") - if (HDF4_BUILD_MODE) - string(TOUPPER "${HDF4_BUILD_MODE}" UPPER_BUILD_TYPE) - get_target_property(libsoname ${HDF4_JAVA_LIBRARY} IMPORTED_SONAME_${UPPER_BUILD_TYPE}) - elseif (HDF_CFG_NAME) - string(TOUPPER "${HDF_CFG_NAME}" UPPER_BUILD_TYPE) - get_target_property(libsoname ${HDF4_JAVA_LIBRARY} IMPORTED_SONAME_${UPPER_BUILD_TYPE}) - else () - get_target_property(libsoname ${HDF4_JAVA_LIBRARY} IMPORTED_SONAME) - endif () + get_target_property (libsoname ${HDF4_JAVA_LIBRARY} IMPORTED_SONAME${UPPER_BUILD_TYPE}) get_filename_component (libname ${libsoname} NAME_WE) string (REGEX REPLACE "^lib" "" libname ${libname}) message (STATUS "HDF4 lib:${HDF4_JAVA_LIBRARY} OR ${libsoname} OR ${libname}")