From c42c43fd8f6d4c13d9fd4f4f34490fb2886eb1be Mon Sep 17 00:00:00 2001 From: Andrew Benson Date: Thu, 21 Nov 2024 15:27:46 -0800 Subject: [PATCH 01/11] fix: Temporarily switch to using the HDF1.14.5 build environment --- .github/workflows/cicd.yml | 18 +++++++++--------- .github/workflows/prChecks.yml | 10 +++++----- .github/workflows/profile.yml | 2 +- .github/workflows/testCode.yml | 2 +- .github/workflows/testModel.yml | 2 +- 5 files changed, 17 insertions(+), 17 deletions(-) diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index d106f17b23..9b8363a200 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -54,7 +54,7 @@ jobs: ### Static executable and test codes Build-Executable-Linux: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:latest + container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." - run: echo "This job is now running on a ${{ runner.os }} server." @@ -114,7 +114,7 @@ jobs: ### Non-static executable Build-Executable-Linux-Non-Static: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:latest + container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." - run: echo "This job is now running on a ${{ runner.os }} server." @@ -151,7 +151,7 @@ jobs: ### Executable instrumented Build-Executables-Instrumented-Linux: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:latest + container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." - run: echo "This job is now running on a ${{ runner.os }} server." @@ -180,7 +180,7 @@ jobs: ### Executable MPI Build-Executables-MPI-Linux: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:latest + container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." - run: echo "This job is now running on a ${{ runner.os }} server." @@ -211,7 +211,7 @@ jobs: ### Executable debugging Build-Executables-Debugging-Linux: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:latest + container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." - run: echo "This job is now running on a ${{ runner.os }} server." @@ -238,7 +238,7 @@ jobs: ### Library Build-Library-Linux: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:latest + container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." - run: echo "This job is now running on a ${{ runner.os }} server." @@ -308,7 +308,7 @@ jobs: ### Documentation Build-Documentation-Linux: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:latest + container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." - run: echo "This job is now running on a ${{ runner.os }} server." @@ -337,7 +337,7 @@ jobs: ### Tools Build-Tools: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:latest + container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 needs: Build-Executable-Linux steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." @@ -1307,7 +1307,7 @@ jobs: ## Python Interface Python-Interface: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:latest + container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 needs: Build-Library-Linux steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." diff --git a/.github/workflows/prChecks.yml b/.github/workflows/prChecks.yml index 1a7ac49762..00d4eb94c4 100644 --- a/.github/workflows/prChecks.yml +++ b/.github/workflows/prChecks.yml @@ -82,7 +82,7 @@ jobs: # Validate Perl scripts Validate-Perl-Scripts: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:latest + container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 steps: - uses: actions/checkout@v4 - name: Check out repository analysis-perl @@ -128,7 +128,7 @@ jobs: # Validate Perl modules Validate-Perl-Modules: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:latest + container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 steps: - uses: actions/checkout@v4 - name: "Set environmental variables" @@ -162,7 +162,7 @@ jobs: # Fortran static analysis Fortran-Static-Analysis: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:latest + container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 steps: - uses: actions/checkout@v4 - name: "Set environmental variables" @@ -192,7 +192,7 @@ jobs: # Embedded XML and LaTeX checks Embedded-XML-LaTeX: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:latest + container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 steps: - name: Check out repository code uses: actions/checkout@v4 @@ -248,7 +248,7 @@ jobs: # Spell check LaTeX files Spell-Check-LaTeX: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:latest + container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 steps: - name: Check out repository code uses: actions/checkout@v4 diff --git a/.github/workflows/profile.yml b/.github/workflows/profile.yml index 441695e6b0..80311b155a 100644 --- a/.github/workflows/profile.yml +++ b/.github/workflows/profile.yml @@ -23,7 +23,7 @@ jobs: Profile-Model: runs-on: ubuntu-latest container: - image: ghcr.io/galacticusorg/buildenv:latest + image: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 options: --privileged steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." diff --git a/.github/workflows/testCode.yml b/.github/workflows/testCode.yml index b1018db141..c7145cb808 100644 --- a/.github/workflows/testCode.yml +++ b/.github/workflows/testCode.yml @@ -35,7 +35,7 @@ jobs: Test-Code: runs-on: ${{ inputs.runner }} container: - image: ${{ startsWith( format('{0}',inputs.runner), 'ubuntu') && 'ghcr.io/galacticusorg/buildenv:latest' || '' }} + image: ${{ startsWith( format('{0}',inputs.runner), 'ubuntu') && 'ghcr.io/galacticusorg/buildenv:feathdf1.14.5' || '' }} steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." - run: echo "This job is now running on a ${{ runner.os }} server." diff --git a/.github/workflows/testModel.yml b/.github/workflows/testModel.yml index 7cd6e897d1..7756d1b8c2 100644 --- a/.github/workflows/testModel.yml +++ b/.github/workflows/testModel.yml @@ -56,7 +56,7 @@ jobs: Test-Model: runs-on: ${{ inputs.runner }} container: - image: ${{ startsWith( format('{0}',inputs.runner), 'ubuntu') && 'ghcr.io/galacticusorg/buildenv:latest' || '' }} + image: ${{ startsWith( format('{0}',inputs.runner), 'ubuntu') && 'ghcr.io/galacticusorg/buildenv:feathdf1.14.5' || '' }} steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." - run: echo "This job is now running on a ${{ runner.os }} server." From 49594d6c3c25e06d9bf62daa868581b9adea344f Mon Sep 17 00:00:00 2001 From: Andrew Benson Date: Thu, 21 Nov 2024 15:28:25 -0800 Subject: [PATCH 02/11] feat: Update for compatibility with HDF v1.14.5 --- source/utility.IO.HDF5.F90 | 174 +++++++++++++++++++------------------ 1 file changed, 91 insertions(+), 83 deletions(-) diff --git a/source/utility.IO.HDF5.F90 b/source/utility.IO.HDF5.F90 index f3ae7f34fc..55142dd696 100644 --- a/source/utility.IO.HDF5.F90 +++ b/source/utility.IO.HDF5.F90 @@ -76,6 +76,7 @@ module IO_HDF5 integer(kind=HID_T ), dimension(8) , public :: H5T_NATIVE_INTEGER_8AS integer(kind=HID_T ), dimension(1) , public :: H5T_VLEN_DOUBLE , H5T_VLEN_VLEN_DOUBLE integer(kind=HID_T ), dimension(1) , public :: H5T_VLEN_INTEGER8 + integer(kind=HID_T ) , public :: H5T_INTEGER8 type hdf5Object !!{ @@ -450,12 +451,14 @@ subroutine IO_HDF5_Initialize Initialize the HDF5 subsystem. !!} use :: Error, only : Error_Report - use :: HDF5 , only : H5T_IEEE_F32BE , H5T_IEEE_F32LE , H5T_IEEE_F64BE , H5T_IEEE_F64LE, & - & H5T_NATIVE_DOUBLE, H5T_NATIVE_INTEGER, H5T_NATIVE_INTEGER_8, H5T_STD_I32BE , & - & H5T_STD_I32LE , H5T_STD_I64BE , H5T_STD_I64LE , H5T_STD_U32BE , & - & H5T_STD_U32LE , h5open_f , h5tvlen_create_f + use :: HDF5 , only : H5T_IEEE_F32BE , H5T_IEEE_F32LE , H5T_IEEE_F64BE, H5T_IEEE_F64LE , & + & H5T_NATIVE_DOUBLE, H5T_NATIVE_INTEGER, H5T_STD_I32BE , H5T_STD_U32LE , & + & H5T_STD_I32LE , H5T_STD_I64BE , H5T_STD_I64LE , H5T_STD_U32BE , & + & h5tcopy_f , h5tset_size_f , h5open_f , h5tvlen_create_f, & + & h5tequal_f implicit none integer :: errorCode + logical :: isLittleEndian, isBigEndian #ifdef DEBUGHDF5 call IO_HDF5_Assert_In_Critical() @@ -465,21 +468,35 @@ subroutine IO_HDF5_Initialize call h5open_f(errorCode) if (errorCode < 0) call Error_Report('failed to initialize HDF5 subsystem'//{introspection:location}) + ! Create required datatypes. + call h5tequal_f(H5T_NATIVE_INTEGER,H5T_STD_I32LE,isLittleEndian,errorCode) + if (errorCode < 0) call Error_Report('failed to test endianness'//{introspection:location}) + call h5tequal_f(H5T_NATIVE_INTEGER,H5T_STD_I32LE,isBigEndian ,errorCode) + if (errorCode < 0) call Error_Report('failed to test endianness'//{introspection:location}) + if (isLittleEndian) then + call h5tcopy_f(H5T_STD_I64LE,H5T_INTEGER8,errorCode) + else if (isBigEndian) then + call h5tcopy_f(H5T_STD_I64BE,H5T_INTEGER8,errorCode) + else + call Error_Report('unable to determine native endianness'//{introspection:location}) + end if + if (errorCode < 0) call Error_Report('failed to copy integer datatype'//{introspection:location}) + ! Ensure native datatype arrays are initialized. H5T_NATIVE_DOUBLES =[H5T_NATIVE_DOUBLE ,H5T_IEEE_F32BE,H5T_IEEE_F32LE,H5T_IEEE_F64BE,H5T_IEEE_F64LE] H5T_NATIVE_INTEGERS =[H5T_NATIVE_INTEGER ,H5T_STD_I32BE ,H5T_STD_I32LE ,H5T_STD_I64BE ,H5T_STD_I64LE ] H5T_NATIVE_UNSIGNED_INTEGERS=[H5T_STD_U32BE ,H5T_STD_U32LE ] - H5T_NATIVE_INTEGER_8S =[H5T_NATIVE_INTEGER_8,H5T_STD_I64BE ,H5T_STD_I64LE ] + H5T_NATIVE_INTEGER_8S =[H5T_INTEGER8,H5T_STD_I64BE ,H5T_STD_I64LE ] H5T_NATIVE_INTEGER_8AS(1:3) =H5T_NATIVE_INTEGERS(1:3) H5T_NATIVE_INTEGER_8AS(4:5) =H5T_NATIVE_UNSIGNED_INTEGERS H5T_NATIVE_INTEGER_8AS(6:8) =H5T_NATIVE_INTEGER_8S ! Create vlen datatypes. - call h5tvlen_create_f(H5T_NATIVE_DOUBLE ,H5T_VLEN_DOUBLE (1),errorCode) + call h5tvlen_create_f(H5T_NATIVE_DOUBLE ,H5T_VLEN_DOUBLE (1),errorCode) if (errorCode < 0) call Error_Report('failed to create vlen double HDF5 datatype' //{introspection:location}) - call h5tvlen_create_f(H5T_VLEN_DOUBLE (1),H5T_VLEN_VLEN_DOUBLE(1),errorCode) + call h5tvlen_create_f(H5T_VLEN_DOUBLE (1),H5T_VLEN_VLEN_DOUBLE(1),errorCode) if (errorCode < 0) call Error_Report('failed to create vlen-veln double HDF5 datatype'//{introspection:location}) - call h5tvlen_create_f(H5T_NATIVE_INTEGER_8 ,H5T_VLEN_INTEGER8 (1),errorCode) + call h5tvlen_create_f(H5T_INTEGER8 ,H5T_VLEN_INTEGER8 (1),errorCode) if (errorCode < 0) call Error_Report('failed to create vlen integer8 HDF5 datatype' //{introspection:location}) ! Initialize our OpenMP lock. @@ -1332,9 +1349,9 @@ function IO_HDF5_Open_Attribute(inObject,attributeName,attributeDataType,attribu Open an attribute in {\normalfont \ttfamily inObject}. !!} use :: Error , only : Error_Report - use :: HDF5 , only : H5T_NATIVE_CHARACTER, H5T_NATIVE_DOUBLE , H5T_NATIVE_INTEGER, H5T_NATIVE_INTEGER_8, & - & HID_T , HSIZE_T , h5acreate_f , h5aopen_f , & - & h5sclose_f , h5screate_simple_f + use :: HDF5 , only : H5T_NATIVE_CHARACTER, H5T_NATIVE_DOUBLE , H5T_NATIVE_INTEGER, h5screate_simple_f, & + & HID_T , HSIZE_T , h5acreate_f , h5aopen_f , & + & h5sclose_f use :: ISO_Varying_String, only : assignment(=) , operator(//) implicit none class (hdf5Object ) , intent(in ), target :: inObject @@ -1408,7 +1425,7 @@ function IO_HDF5_Open_Attribute(inObject,attributeName,attributeDataType,attribu case (hdf5DataTypeInteger ) dataTypeID=H5T_NATIVE_INTEGER case (hdf5DataTypeInteger8 ) - dataTypeID=H5T_NATIVE_INTEGER_8 + dataTypeID=H5T_INTEGER8 case (hdf5DataTypeDouble ) dataTypeID=H5T_NATIVE_DOUBLE case (hdf5DataTypeCharacter ) @@ -1644,7 +1661,6 @@ subroutine IO_HDF5_Write_Attribute_Integer8_Scalar(self,attributeValue,attribute Open and write a long integer scalar attribute in {\normalfont \ttfamily self}. !!} use :: Error , only : Error_Report - use :: HDF5 , only : H5T_NATIVE_INTEGER_8 use, intrinsic :: ISO_C_Binding , only : c_loc use :: ISO_Varying_String, only : assignment(=) , operator(//), trim implicit none @@ -1711,7 +1727,7 @@ subroutine IO_HDF5_Write_Attribute_Integer8_Scalar(self,attributeValue,attribute ! Write the attribute. dataBuffer=c_loc(attributeValue) - errorCode=H5Awrite(attributeObject%objectID,H5T_NATIVE_INTEGER_8,dataBuffer) + errorCode=H5Awrite(attributeObject%objectID,H5T_INTEGER8,dataBuffer) if (errorCode /= 0) then message="unable to write attribute '"//attributeNameActual//"' in object '"//self%objectName//"'" call Error_Report(message//self%locationReport()//{introspection:location}) @@ -1728,7 +1744,7 @@ subroutine IO_HDF5_Write_Attribute_Integer8_1D(self,attributeValue,attributeName Open and write an integer 1-D array attribute in {\normalfont \ttfamily self}. !!} use :: Error , only : Error_Report - use :: HDF5 , only : H5T_NATIVE_INTEGER_8, HSIZE_T + use :: HDF5 , only : HSIZE_T use, intrinsic :: ISO_C_Binding , only : c_loc use :: ISO_Varying_String, only : assignment(=) , operator(//), trim implicit none @@ -1802,7 +1818,7 @@ subroutine IO_HDF5_Write_Attribute_Integer8_1D(self,attributeValue,attributeName allocate(attributeValueContiguous,mold=attributeValue) attributeValueContiguous=attributeValue dataBuffer=c_loc(attributeValueContiguous) - errorCode=H5Awrite(attributeObject%objectID,H5T_NATIVE_INTEGER_8,dataBuffer) + errorCode=H5Awrite(attributeObject%objectID,H5T_INTEGER8,dataBuffer) if (errorCode /= 0) then message="unable to write attribute '"//attributeNameActual//"' in object '"//self%objectName//"'" call Error_Report(message//self%locationReport()//{introspection:location}) @@ -2619,10 +2635,10 @@ subroutine IO_HDF5_Read_Attribute_Integer8_Scalar(self,attributeName,attributeVa Open and read a long integer scalar attribute in {\normalfont \ttfamily self}. !!} use :: Error , only : Error_Report - use :: HDF5 , only : H5T_NATIVE_INTEGER_8, HID_T , HSIZE_T, h5aget_space_f, & - & h5sclose_f , h5sget_simple_extent_dims_f + use :: HDF5 , only : h5sget_simple_extent_dims_f, HID_T , HSIZE_T, h5aget_space_f, & + & h5sclose_f use, intrinsic :: ISO_C_Binding , only : c_loc - use :: ISO_Varying_String, only : assignment(=) , operator(//) , trim + use :: ISO_Varying_String, only : assignment(=) , operator(//), trim implicit none integer (kind=kind_int8) , intent( out) , target :: attributeValue class (hdf5Object ) , intent(inout) :: self @@ -2692,7 +2708,7 @@ subroutine IO_HDF5_Read_Attribute_Integer8_Scalar(self,attributeName,attributeVa if (matches) then ! Read the attribute. dataBuffer=c_loc(attributeValue) - errorCode=H5Aread(attributeObject%objectID,H5T_NATIVE_INTEGER_8,dataBuffer) + errorCode=H5Aread(attributeObject%objectID,H5T_INTEGER8,dataBuffer) if (errorCode /= 0) then message="unable to read attribute '"//trim(attributeNameActual)//"' in object '"//self%objectName//"'" call Error_Report(message//self%locationReport()//{introspection:location}) @@ -2739,10 +2755,10 @@ subroutine IO_HDF5_Read_Attribute_Integer8_1D_Array_Allocatable(self,attributeNa Open and read an integer scalar attribute in {\normalfont \ttfamily self}. !!} use :: Error , only : Error_Report - use :: HDF5 , only : H5T_NATIVE_INTEGER_8, HID_T , HSIZE_T, h5aget_space_f, & - & h5sclose_f , h5sget_simple_extent_dims_f + use :: HDF5 , only : h5sget_simple_extent_dims_f, HID_T , HSIZE_T, h5aget_space_f, & + & h5sclose_f use, intrinsic :: ISO_C_Binding , only : c_loc - use :: ISO_Varying_String, only : assignment(=) , operator(//) , trim + use :: ISO_Varying_String, only : assignment(=) , operator(//), trim implicit none integer (kind=kind_int8), allocatable, dimension(:), intent( out), target :: attributeValue class (hdf5Object ) , intent(inout) :: self @@ -2825,7 +2841,7 @@ subroutine IO_HDF5_Read_Attribute_Integer8_1D_Array_Allocatable(self,attributeNa ! Read the attribute. dataBuffer=c_loc(attributeValue) - errorCode=H5Aread(attributeObject%objectID,H5T_NATIVE_INTEGER_8,dataBuffer) + errorCode=H5Aread(attributeObject%objectID,H5T_INTEGER8,dataBuffer) if (errorCode /= 0) then message="unable to read attribute '"//trim(attributeNameActual)//"' in object '"//self%objectName//"'" call Error_Report(message//self%locationReport()//{introspection:location}) @@ -2842,10 +2858,10 @@ subroutine IO_HDF5_Read_Attribute_Integer8_1D_Array_Static(self,attributeName,at Open and read an integer scalar attribute in {\normalfont \ttfamily self}. !!} use :: Error , only : Error_Report - use :: HDF5 , only : H5T_NATIVE_INTEGER_8, HID_T , HSIZE_T, h5aget_space_f, & - & h5sclose_f , h5sget_simple_extent_dims_f + use :: HDF5 , only : h5sget_simple_extent_dims_f, HID_T , HSIZE_T, h5aget_space_f, & + & h5sclose_f use, intrinsic :: ISO_C_Binding , only : c_loc - use :: ISO_Varying_String, only : assignment(=) , operator(//) , trim + use :: ISO_Varying_String, only : assignment(=) , operator(//), trim implicit none integer (kind=kind_int8) , dimension(:), intent( out) :: attributeValue class (hdf5Object ) , intent(inout) :: self @@ -2932,7 +2948,7 @@ subroutine IO_HDF5_Read_Attribute_Integer8_1D_Array_Static(self,attributeName,at ! since it is of assumed shape. allocate(attributeValueContiguous,mold=attributeValue) dataBuffer=c_loc(attributeValueContiguous) - errorCode=H5Aread(attributeObject%objectID,H5T_NATIVE_INTEGER_8,dataBuffer) + errorCode=H5Aread(attributeObject%objectID,H5T_INTEGER8,dataBuffer) if (errorCode /= 0) then message="unable to read attribute '"//trim(attributeNameActual)//"' in object '"//self%objectName//"'" call Error_Report(message//self%locationReport()//{introspection:location}) @@ -4263,10 +4279,10 @@ function IO_HDF5_Open_Dataset(inObject,datasetName,comment,datasetDataType,datas !!} use :: Error , only : Error_Report use :: HDF5 , only : H5P_DATASET_CREATE_F, H5S_UNLIMITED_F , H5T_NATIVE_CHARACTER, H5T_NATIVE_DOUBLE , & - & H5T_NATIVE_INTEGER , H5T_NATIVE_INTEGER_8 , HID_T , HSIZE_T , & + & H5T_NATIVE_INTEGER , h5screate_simple_f , HID_T , HSIZE_T , & & h5dcreate_f , h5dget_create_plist_f, h5dopen_f , h5eset_auto_f , & & hsize_t , h5pclose_f , h5pcreate_f , h5pget_chunk_f , & - & h5pset_chunk_f , h5pset_deflate_f , h5sclose_f , h5screate_simple_f + & h5pset_chunk_f , h5pset_deflate_f , h5sclose_f use :: ISO_Varying_String, only : assignment(=) , operator(//) implicit none type (hdf5Object ) :: datasetObject @@ -4469,7 +4485,7 @@ function IO_HDF5_Open_Dataset(inObject,datasetName,comment,datasetDataType,datas case (hdf5DataTypeInteger ) dataTypeID=H5T_NATIVE_INTEGER case (hdf5DataTypeInteger8 ) - dataTypeID=H5T_NATIVE_INTEGER_8 + dataTypeID=H5T_INTEGER8 case (hdf5DataTypeDouble ) dataTypeID=H5T_NATIVE_DOUBLE case (hdf5DataTypeCharacter ) @@ -6430,9 +6446,9 @@ subroutine IO_HDF5_Write_Dataset_Integer8_1D(self,datasetValue,datasetName,comme Open and write a long integer 1-D array dataset in {\normalfont \ttfamily self}. !!} use :: Error , only : Error_Report - use :: HDF5 , only : H5P_DEFAULT_F , H5S_SELECT_SET_F , H5T_NATIVE_INTEGER_8 , HID_T , & + use :: HDF5 , only : H5P_DEFAULT_F , H5S_SELECT_SET_F , h5sselect_hyperslab_f, HID_T , & & HSIZE_T , h5dget_space_f , h5dset_extent_f , h5sclose_f, & - & h5screate_simple_f, h5sget_simple_extent_dims_f, h5sselect_hyperslab_f, hsize_t + & h5screate_simple_f, h5sget_simple_extent_dims_f, hsize_t use, intrinsic :: ISO_C_Binding , only : c_loc use :: ISO_Varying_String, only : assignment(=) , operator(//) , trim implicit none @@ -6573,7 +6589,7 @@ subroutine IO_HDF5_Write_Dataset_Integer8_1D(self,datasetValue,datasetName,comme allocate(datasetValueContiguous,mold=datasetValue) datasetValueContiguous=datasetValue dataBuffer=c_loc(datasetValueContiguous) - errorCode=h5dwrite(datasetObject%objectID,H5T_NATIVE_INTEGER_8,newDataspaceID,dataspaceID,H5P_DEFAULT_F,dataBuffer) + errorCode=h5dwrite(datasetObject%objectID,H5T_INTEGER8,newDataspaceID,dataspaceID,H5P_DEFAULT_F,dataBuffer) if (errorCode /= 0) then message="unable to write dataset '"//datasetNameActual//"' in object '"//self%objectName//"'" call Error_Report(message//self%locationReport()//{introspection:location}) @@ -6608,9 +6624,9 @@ subroutine IO_HDF5_Write_Dataset_Integer8_2D(self,datasetValue,datasetName,comme Open and write a long integer 2-D array dataset in {\normalfont \ttfamily self}. !!} use :: Error , only : Error_Report - use :: HDF5 , only : H5P_DEFAULT_F , H5S_SELECT_SET_F , H5T_NATIVE_INTEGER_8 , HID_T , & + use :: HDF5 , only : H5P_DEFAULT_F , H5S_SELECT_SET_F , h5sselect_hyperslab_f, HID_T , & & HSIZE_T , h5dget_space_f , h5dset_extent_f , h5sclose_f, & - & h5screate_simple_f, h5sget_simple_extent_dims_f, h5sselect_hyperslab_f, hsize_t + & h5screate_simple_f, h5sget_simple_extent_dims_f, hsize_t use, intrinsic :: ISO_C_Binding , only : c_loc use :: ISO_Varying_String, only : assignment(=) , operator(//) , trim implicit none @@ -6764,7 +6780,7 @@ subroutine IO_HDF5_Write_Dataset_Integer8_2D(self,datasetValue,datasetName,comme allocate(datasetValueContiguous,mold=datasetValue) datasetValueContiguous=datasetValue dataBuffer=c_loc(datasetValueContiguous) - errorCode=h5dwrite(datasetObject%objectID,H5T_NATIVE_INTEGER_8,newDataspaceID,dataspaceID,H5P_DEFAULT_F,dataBuffer) + errorCode=h5dwrite(datasetObject%objectID,H5T_INTEGER8,newDataspaceID,dataspaceID,H5P_DEFAULT_F,dataBuffer) if (errorCode /= 0) then message="unable to write dataset '"//datasetNameActual//"' in object '"//self%objectName//"'" call Error_Report(message//self%locationReport()//{introspection:location}) @@ -6799,9 +6815,9 @@ subroutine IO_HDF5_Write_Dataset_Integer8_3D(self,datasetValue,datasetName,comme Open and write a long integer 3-D array dataset in {\normalfont \ttfamily self}. !!} use :: Error , only : Error_Report - use :: HDF5 , only : H5P_DEFAULT_F , H5S_SELECT_SET_F , H5T_NATIVE_INTEGER_8 , HID_T , & + use :: HDF5 , only : H5P_DEFAULT_F , H5S_SELECT_SET_F , h5sselect_hyperslab_f, HID_T , & & HSIZE_T , h5dget_space_f , h5dset_extent_f , h5sclose_f, & - & h5screate_simple_f, h5sget_simple_extent_dims_f, h5sselect_hyperslab_f, hsize_t + & h5screate_simple_f, h5sget_simple_extent_dims_f, hsize_t use, intrinsic :: ISO_C_Binding , only : c_loc use :: ISO_Varying_String, only : assignment(=) , operator(//) , trim implicit none @@ -6942,7 +6958,7 @@ subroutine IO_HDF5_Write_Dataset_Integer8_3D(self,datasetValue,datasetName,comme allocate(datasetValueContiguous,mold=datasetValue) datasetValueContiguous=datasetValue dataBuffer=c_loc(datasetValueContiguous) - errorCode=h5dwrite(datasetObject%objectID,H5T_NATIVE_INTEGER_8,newDataspaceID,dataspaceID,H5P_DEFAULT_F,dataBuffer) + errorCode=h5dwrite(datasetObject%objectID,H5T_INTEGER8,newDataspaceID,dataspaceID,H5P_DEFAULT_F,dataBuffer) if (errorCode /= 0) then message="unable to write dataset '"//datasetNameActual//"' in object '"//self%objectName//"'" call Error_Report(message//self%locationReport()//{introspection:location}) @@ -6977,11 +6993,11 @@ subroutine IO_HDF5_Read_Dataset_Integer8_1D_Array_Static(self,datasetName,datase Open and read a long integer scalar dataset in {\normalfont \ttfamily self}. !!} use :: Error , only : Error_Report - use :: HDF5 , only : H5P_DEFAULT_F , H5S_ALL_F , H5S_SELECT_SET_F , H5T_NATIVE_INTEGER_8, & + use :: HDF5 , only : H5P_DEFAULT_F , H5S_ALL_F , H5S_SELECT_SET_F , h5sselect_elements_f, & & H5T_STD_REF_DSETREG , HID_T , HSIZE_T , h5dclose_f , & & h5dget_space_f , h5rdereference_f , h5rget_region_f , h5sclose_f , & - & h5screate_simple_f , h5sget_select_bounds_f, h5sget_simple_extent_dims_f, h5sselect_elements_f, & - & h5sselect_hyperslab_f, hdset_reg_ref_t_f , size_t + & h5screate_simple_f , h5sget_select_bounds_f, h5sget_simple_extent_dims_f, size_t , & + & h5sselect_hyperslab_f, hdset_reg_ref_t_f use, intrinsic :: ISO_C_Binding , only : c_loc use :: ISO_Varying_String, only : assignment(=) , operator(//) , trim implicit none @@ -7303,7 +7319,7 @@ subroutine IO_HDF5_Read_Dataset_Integer8_1D_Array_Static(self,datasetName,datase ! Read the dataset. allocate(datasetValueContiguous,mold=datasetValue) dataBuffer=c_loc(datasetValueContiguous) - errorCode=h5dread(datasetObject%objectID,H5T_NATIVE_INTEGER_8,memorySpaceID,datasetDataspaceID,H5P_DEFAULT_F,dataBuffer) + errorCode=h5dread(datasetObject%objectID,H5T_INTEGER8,memorySpaceID,datasetDataspaceID,H5P_DEFAULT_F,dataBuffer) if (errorCode /= 0) then message="unable to read dataset '"//trim(datasetNameActual)//"' in object '"//self%objectName//"'" call Error_Report(message//self%locationReport()//{introspection:location}) @@ -7352,11 +7368,11 @@ subroutine IO_HDF5_Read_Dataset_Integer8_1D_Array_Allocatable(self,datasetName,d Open and read a long integer scalar dataset in {\normalfont \ttfamily self}. !!} use :: Error , only : Error_Report - use :: HDF5 , only : H5P_DEFAULT_F , H5S_ALL_F , H5S_SELECT_SET_F , H5T_NATIVE_INTEGER_8, & + use :: HDF5 , only : H5P_DEFAULT_F , H5S_ALL_F , H5S_SELECT_SET_F , hdset_reg_ref_t_f , & & H5T_STD_REF_DSETREG , HID_T , HSIZE_T , h5dclose_f , & & h5dget_space_f , h5rdereference_f , h5rget_region_f , h5sclose_f , & & h5screate_simple_f , h5sget_select_bounds_f, h5sget_simple_extent_dims_f, h5sselect_elements_f, & - & h5sselect_hyperslab_f, hdset_reg_ref_t_f , size_t + & h5sselect_hyperslab_f, size_t use, intrinsic :: ISO_C_Binding , only : c_loc use :: ISO_Varying_String, only : assignment(=) , operator(//) , trim implicit none @@ -7676,7 +7692,7 @@ subroutine IO_HDF5_Read_Dataset_Integer8_1D_Array_Allocatable(self,datasetName,d ! Read the dataset. dataBuffer=c_loc(datasetValue) - errorCode=h5dread(datasetObject%objectID,H5T_NATIVE_INTEGER_8,memorySpaceID,datasetDataspaceID,H5P_DEFAULT_F,dataBuffer) + errorCode=h5dread(datasetObject%objectID,H5T_INTEGER8,memorySpaceID,datasetDataspaceID,H5P_DEFAULT_F,dataBuffer) if (errorCode /= 0) then message="unable to read dataset '"//trim(datasetNameActual)//"' in object '"//self%objectName//"'" call Error_Report(message//self%locationReport()//{introspection:location}) @@ -7723,11 +7739,11 @@ subroutine IO_HDF5_Read_Dataset_Integer8_2D_Array_Static(self,datasetName,datase Open and read a double scalar dataset in {\normalfont \ttfamily self}. !!} use :: Error , only : Error_Report - use :: HDF5 , only : H5P_DEFAULT_F , H5S_ALL_F , H5S_SELECT_SET_F , H5T_NATIVE_INTEGER_8, & + use :: HDF5 , only : H5P_DEFAULT_F , H5S_ALL_F , H5S_SELECT_SET_F , hdset_reg_ref_t_f , & & H5T_STD_REF_DSETREG , HID_T , HSIZE_T , h5dclose_f , & & h5dget_space_f , h5rdereference_f , h5rget_region_f , h5sclose_f , & & h5screate_simple_f , h5sget_select_bounds_f, h5sget_simple_extent_dims_f, h5sselect_elements_f, & - & h5sselect_hyperslab_f, hdset_reg_ref_t_f , size_t + & h5sselect_hyperslab_f, size_t use, intrinsic :: ISO_C_Binding , only : c_loc use :: ISO_Varying_String, only : assignment(=) , operator(//) , trim implicit none @@ -8054,7 +8070,7 @@ subroutine IO_HDF5_Read_Dataset_Integer8_2D_Array_Static(self,datasetName,datase ! Read the dataset. allocate(datasetValueContiguous,mold=datasetValue) dataBuffer=c_loc(datasetValueContiguous) - errorCode=h5dread(datasetObject%objectID,H5T_NATIVE_INTEGER_8,memorySpaceID,datasetDataspaceID,H5P_DEFAULT_F,dataBuffer) + errorCode=h5dread(datasetObject%objectID,H5T_INTEGER8,memorySpaceID,datasetDataspaceID,H5P_DEFAULT_F,dataBuffer) if (errorCode /= 0) then message="unable to read dataset '"//trim(datasetNameActual)//"' in object '"//self%objectName//"'" call Error_Report(message//self%locationReport()//{introspection:location}) @@ -8103,11 +8119,11 @@ subroutine IO_HDF5_Read_Dataset_Integer8_2D_Array_Allocatable(self,datasetName,d Open and read a double 2-D array dataset in {\normalfont \ttfamily self}. !!} use :: Error , only : Error_Report - use :: HDF5 , only : H5P_DEFAULT_F , H5S_ALL_F , H5S_SELECT_SET_F , H5T_NATIVE_INTEGER_8, & + use :: HDF5 , only : H5P_DEFAULT_F , H5S_ALL_F , H5S_SELECT_SET_F , hdset_reg_ref_t_f , & & H5T_STD_REF_DSETREG , HID_T , HSIZE_T , h5dclose_f , & & h5dget_space_f , h5rdereference_f , h5rget_region_f , h5sclose_f , & & h5screate_simple_f , h5sget_select_bounds_f, h5sget_simple_extent_dims_f, h5sselect_elements_f, & - & h5sselect_hyperslab_f, hdset_reg_ref_t_f , size_t + & h5sselect_hyperslab_f, size_t use, intrinsic :: ISO_C_Binding , only : c_loc use :: ISO_Varying_String, only : assignment(=) , operator(//) , trim implicit none @@ -8432,7 +8448,7 @@ subroutine IO_HDF5_Read_Dataset_Integer8_2D_Array_Allocatable(self,datasetName,d ! Read the dataset. dataBuffer=c_loc(datasetValue) - errorCode=h5dread(datasetObject%objectID,H5T_NATIVE_INTEGER_8,memorySpaceID,datasetDataspaceID,H5P_DEFAULT_F,dataBuffer) + errorCode=h5dread(datasetObject%objectID,H5T_INTEGER8,memorySpaceID,datasetDataspaceID,H5P_DEFAULT_F,dataBuffer) if (errorCode /= 0) then message="unable to read dataset '"//trim(datasetNameActual)//"' in object '"//self%objectName//"'" call Error_Report(message//self%locationReport()//{introspection:location}) @@ -8479,11 +8495,11 @@ subroutine IO_HDF5_Read_Dataset_Integer8_3D_Array_Allocatable(self,datasetName,d Open and read a double 3-D array dataset in {\normalfont \ttfamily self}. !!} use :: Error , only : Error_Report - use :: HDF5 , only : H5P_DEFAULT_F , H5S_ALL_F , H5S_SELECT_SET_F , H5T_NATIVE_INTEGER_8, & + use :: HDF5 , only : H5P_DEFAULT_F , H5S_ALL_F , H5S_SELECT_SET_F , hdset_reg_ref_t_f , & & H5T_STD_REF_DSETREG , HID_T , HSIZE_T , h5dclose_f , & & h5dget_space_f , h5rdereference_f , h5rget_region_f , h5sclose_f , & & h5screate_simple_f , h5sget_select_bounds_f, h5sget_simple_extent_dims_f, h5sselect_elements_f, & - & h5sselect_hyperslab_f, hdset_reg_ref_t_f , size_t + & h5sselect_hyperslab_f, size_t use, intrinsic :: ISO_C_Binding , only : c_loc use :: ISO_Varying_String, only : assignment(=) , operator(//) , trim implicit none @@ -8813,7 +8829,7 @@ subroutine IO_HDF5_Read_Dataset_Integer8_3D_Array_Allocatable(self,datasetName,d ! Read the dataset. dataBuffer=c_loc(datasetValue) - errorCode=h5dread(datasetObject%objectID,H5T_NATIVE_INTEGER_8,memorySpaceID,datasetDataspaceID,H5P_DEFAULT_F,dataBuffer) + errorCode=h5dread(datasetObject%objectID,H5T_INTEGER8,memorySpaceID,datasetDataspaceID,H5P_DEFAULT_F,dataBuffer) if (errorCode /= 0) then message="unable to read dataset '"//trim(datasetNameActual)//"' in object '"//self%objectName//"'" call Error_Report(message//self%locationReport()//{introspection:location}) @@ -14882,12 +14898,11 @@ subroutine IO_HDF5_Read_Dataset_VarDouble_1D_Array_Allocatable(self,datasetName, Open and read a varying-length 1D double dataset in {\normalfont \ttfamily self}. !!} use :: Error , only : Error_Report - use :: HDF5 , only : H5P_DEFAULT_F , H5S_ALL_F , H5S_SELECT_SET_F , H5T_NATIVE_DOUBLE , & + use :: HDF5 , only : H5P_DEFAULT_F , H5S_ALL_F , H5S_SELECT_SET_F , size_t , & & H5T_STD_REF_DSETREG , HID_T , HSIZE_T , h5dclose_f , & & h5dget_space_f , h5dread_f , h5rdereference_f , h5rget_region_f , & & h5sclose_f , h5screate_simple_f , h5sget_select_bounds_f, h5sget_simple_extent_dims_f, & - & h5sselect_elements_f, h5sselect_hyperslab_f, hdset_reg_ref_t_f , hsize_t , & - & size_t + & h5sselect_elements_f, h5sselect_hyperslab_f, hdset_reg_ref_t_f , hsize_t use, intrinsic :: ISO_C_Binding , only : c_f_pointer , c_loc use :: ISO_Varying_String, only : assignment(=) , operator(//) , trim implicit none @@ -15264,12 +15279,11 @@ subroutine IO_HDF5_Read_Dataset_VarVarDouble_1D_Array_Allocatable(self,datasetNa Open and read a varying-length $\times$ varying-length 1D double dataset in {\normalfont \ttfamily self}. !!} use :: Error , only : Error_Report - use :: HDF5 , only : H5P_DEFAULT_F , H5S_ALL_F , H5S_SELECT_SET_F , H5T_NATIVE_DOUBLE , & + use :: HDF5 , only : H5P_DEFAULT_F , H5S_ALL_F , H5S_SELECT_SET_F , size_t , & & H5T_STD_REF_DSETREG , HID_T , HSIZE_T , h5dclose_f , & & h5dget_space_f , h5dread_f , h5rdereference_f , h5rget_region_f , & & h5sclose_f , h5screate_simple_f , h5sget_select_bounds_f, h5sget_simple_extent_dims_f, & - & h5sselect_elements_f, h5sselect_hyperslab_f, hdset_reg_ref_t_f , hsize_t , & - & size_t + & h5sselect_elements_f, h5sselect_hyperslab_f, hdset_reg_ref_t_f , hsize_t use, intrinsic :: ISO_C_Binding , only : c_f_pointer , c_loc use :: ISO_Varying_String, only : assignment(=) , operator(//) , trim implicit none @@ -15658,12 +15672,11 @@ subroutine IO_HDF5_Read_Dataset_VarDouble_2D_Array_Allocatable(self,datasetName, Open and read a varying-length 2D double dataset in {\normalfont \ttfamily self}. !!} use :: Error , only : Error_Report - use :: HDF5 , only : H5P_DEFAULT_F , H5S_ALL_F , H5S_SELECT_SET_F , H5T_NATIVE_DOUBLE , & + use :: HDF5 , only : H5P_DEFAULT_F , H5S_ALL_F , H5S_SELECT_SET_F , size_t , & & H5T_STD_REF_DSETREG , HID_T , HSIZE_T , h5dclose_f , & & h5dget_space_f , h5dread_f , h5rdereference_f , h5rget_region_f , & & h5sclose_f , h5screate_simple_f , h5sget_select_bounds_f, h5sget_simple_extent_dims_f, & - & h5sselect_elements_f, h5sselect_hyperslab_f, hdset_reg_ref_t_f , hsize_t , & - & size_t + & h5sselect_elements_f, h5sselect_hyperslab_f, hdset_reg_ref_t_f , hsize_t use, intrinsic :: ISO_C_Binding , only : c_f_pointer , c_loc use :: ISO_Varying_String, only : assignment(=) , operator(//) , trim implicit none @@ -16043,10 +16056,9 @@ subroutine IO_HDF5_Write_Dataset_VarDouble_1D(self,datasetValue,datasetName,comm !!} use, intrinsic :: ISO_C_Binding , only : c_loc use :: Error , only : Error_Report - use :: HDF5 , only : H5P_DEFAULT_F, H5S_SELECT_SET_F , H5T_NATIVE_DOUBLE , HID_T , & + use :: HDF5 , only : H5P_DEFAULT_F, H5S_SELECT_SET_F , hsize_t , HID_T , & & HSIZE_T , h5dget_space_f , h5dset_extent_f , h5dwrite_vl_f , & - & h5sclose_f , h5screate_simple_f, h5sget_simple_extent_dims_f, h5sselect_hyperslab_f, & - & hsize_t + & h5sclose_f , h5screate_simple_f, h5sget_simple_extent_dims_f, h5sselect_hyperslab_f use :: ISO_Varying_String, only : assignment(=), operator(//) , trim implicit none class (hdf5Object ), intent(inout) :: self @@ -16226,10 +16238,9 @@ subroutine IO_HDF5_Write_Dataset_VarVarDouble_1D(self,datasetValue,datasetName,c !!} use, intrinsic :: ISO_C_Binding , only : c_loc use :: Error , only : Error_Report - use :: HDF5 , only : H5P_DEFAULT_F, H5S_SELECT_SET_F , H5T_NATIVE_DOUBLE , HID_T , & + use :: HDF5 , only : H5P_DEFAULT_F, H5S_SELECT_SET_F , hsize_t , HID_T , & & HSIZE_T , h5dget_space_f , h5dset_extent_f , h5dwrite_vl_f , & - & h5sclose_f , h5screate_simple_f, h5sget_simple_extent_dims_f, h5sselect_hyperslab_f, & - & hsize_t + & h5sclose_f , h5screate_simple_f, h5sget_simple_extent_dims_f, h5sselect_hyperslab_f use :: ISO_Varying_String, only : assignment(=), operator(//) , trim implicit none class (hdf5Object ), intent(inout) :: self @@ -16417,10 +16428,9 @@ subroutine IO_HDF5_Write_Dataset_VarDouble_2D(self,datasetValue,datasetName,comm !!} use, intrinsic :: ISO_C_Binding , only : c_loc use :: Error , only : Error_Report - use :: HDF5 , only : H5P_DEFAULT_F, H5S_SELECT_SET_F , H5T_NATIVE_DOUBLE , HID_T , & + use :: HDF5 , only : H5P_DEFAULT_F, H5S_SELECT_SET_F , hsize_t , HID_T , & & HSIZE_T , h5dget_space_f , h5dset_extent_f , h5dwrite_vl_f , & - & h5sclose_f , h5screate_simple_f, h5sget_simple_extent_dims_f, h5sselect_hyperslab_f, & - & hsize_t + & h5sclose_f , h5screate_simple_f, h5sget_simple_extent_dims_f, h5sselect_hyperslab_f use :: ISO_Varying_String, only : assignment(=), operator(//) , trim implicit none class (hdf5Object ), intent(inout) :: self @@ -16601,12 +16611,11 @@ subroutine IO_HDF5_Read_Dataset_VarInteger8_2D_Array_Allocatable(self,datasetNam Open and read a variable-length integer-8 2D array dataset in {\normalfont \ttfamily self}. !!} use :: Error , only : Error_Report - use :: HDF5 , only : H5P_DEFAULT_F , H5S_ALL_F , H5S_SELECT_SET_F , H5T_NATIVE_INTEGER_8 , & + use :: HDF5 , only : H5P_DEFAULT_F , H5S_ALL_F , H5S_SELECT_SET_F , hdset_reg_ref_t_f , & & H5T_STD_REF_DSETREG , HID_T , HSIZE_T , h5dclose_f , & & h5dget_space_f , h5dread_f , h5rdereference_f , h5rget_region_f , & & h5sclose_f , h5screate_simple_f , h5sget_select_bounds_f, h5sget_simple_extent_dims_f, & - & h5sselect_elements_f, h5sselect_hyperslab_f, hdset_reg_ref_t_f , hsize_t , & - & size_t + & h5sselect_elements_f, h5sselect_hyperslab_f, hsize_t , size_t use, intrinsic :: ISO_C_Binding , only : c_f_pointer , c_loc use :: ISO_Varying_String, only : assignment(=) , operator(//) , trim implicit none @@ -16984,10 +16993,9 @@ subroutine IO_HDF5_Write_Dataset_VarInteger8_2D(self,datasetValue,datasetName,co !!} use, intrinsic :: ISO_C_Binding , only : c_loc use :: Error , only : Error_Report - use :: HDF5 , only : H5P_DEFAULT_F, H5S_SELECT_SET_F , H5T_NATIVE_INTEGER_8 , HID_T , & - & HSIZE_T , h5dget_space_f , h5dset_extent_f , h5dwrite_vl_f , & - & h5sclose_f , h5screate_simple_f, h5sget_simple_extent_dims_f, h5sselect_hyperslab_f, & - & hsize_t + use :: HDF5 , only : H5P_DEFAULT_F, H5S_SELECT_SET_F , h5sget_simple_extent_dims_f, HID_T , & + & HSIZE_T , h5dget_space_f , h5dset_extent_f , h5dwrite_vl_f, & + & h5sclose_f , h5screate_simple_f, h5sselect_hyperslab_f , hsize_t use :: ISO_Varying_String, only : assignment(=), operator(//) , trim implicit none class (hdf5Object ), intent(inout) :: self @@ -17337,7 +17345,7 @@ subroutine IO_HDF5_Read_Table_Integer8_1D_Array_Allocatable(self,tableName,colum !!} use :: Error , only : Error_Report use :: H5TB , only : h5tbget_table_info_f - use :: HDF5 , only : H5T_NATIVE_INTEGER_8, HSIZE_T , h5tget_size_f + use :: HDF5 , only : HSIZE_T , h5tget_size_f use, intrinsic :: ISO_C_Binding , only : c_loc , c_null_char use :: ISO_Varying_String, only : assignment(=) , operator(//) implicit none @@ -17403,7 +17411,7 @@ subroutine IO_HDF5_Read_Table_Integer8_1D_Array_Allocatable(self,tableName,colum if (allocated(datasetValue)) deallocate(datasetValue) allocate(datasetValue(readCountActual)) ! Read the column. - call h5tget_size_f(H5T_NATIVE_INTEGER_8,recordTypeSize,errorCode) + call h5tget_size_f(H5T_INTEGER8,recordTypeSize,errorCode) if (errorCode /= 0) then message="unable to get long integer datatype size" call Error_Report(message//self%locationReport()//{introspection:location}) From 5ad2f9b19304a2dc40fabda0bf1a0d68f9406362 Mon Sep 17 00:00:00 2001 From: Andrew Benson Date: Thu, 21 Nov 2024 15:50:09 -0800 Subject: [PATCH 03/11] fix: Update comment section in HDF5 test file --- testSuite/data/IntegerRangeU32.hdf5 | Bin 2104 -> 2160 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/testSuite/data/IntegerRangeU32.hdf5 b/testSuite/data/IntegerRangeU32.hdf5 index d748cc4bc89b159a88a931801db090f45277169e..db535ab24041bbb77f1890809d95fa150099bd94 100644 GIT binary patch delta 104 zcmdlX@IhdL24lfQO-p$e21W)31_>Yr0RbQc0S*o@#gMVFaXY&O4}$@i%gDe1hRON4 hxv6<248lwd4BS8&kO?3Vl%HRskY1EuTA&B!0|4Yr0RbQc0S*o@#bB_paXUKzuiOSW From da153d0cbdec8de6fcf168ac627612dd504b4971 Mon Sep 17 00:00:00 2001 From: Andrew Benson Date: Thu, 21 Nov 2024 15:55:21 -0800 Subject: [PATCH 04/11] fix: Switch to HDF5 v1.14.5 for MacOS builds --- .github/actions/buildMacOS/action.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/actions/buildMacOS/action.yml b/.github/actions/buildMacOS/action.yml index e672b32b5e..5d309d363d 100644 --- a/.github/actions/buildMacOS/action.yml +++ b/.github/actions/buildMacOS/action.yml @@ -106,19 +106,19 @@ runs: - name: Install HDF5 shell: bash run: | - curl -L https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.8/hdf5-1.8.20/src/hdf5-1.8.20.tar.gz --output hdf5-1.8.20.tar.gz - tar -vxzf hdf5-1.8.20.tar.gz - cd hdf5-1.8.20 + curl -L https://support.hdfgroup.org/releases/hdf5/v1_14/v1_14_5/downloads/hdf5-1.14.5.tar.gz --output hdf5-1.14.5.tar.gz + tar -vxzf hdf5-1.14.5.tar.gz + cd hdf5-1.14.5 if [[ "${OS_VER}" -eq 13 ]]; then # For MacOS 13 force use of the classic linker as the new linker does not support the '-commons' option - see https://trac.macports.org/ticket/68194#comment:15 - CC=gcc-11 CXX=g++-11 FC=gfortran-11 LDFLAGS=-Wl,-ld_classic ./configure --prefix=/usr/local --enable-fortran --enable-production + CC=gcc-11 CXX=g++-11 FC=gfortran-11 LDFLAGS=-Wl,-ld_classic ./configure --prefix=/usr/local --enable-fortran --enable-build-mode=production else - CC=gcc-11 CXX=g++-11 FC=gfortran-11 ./configure --prefix=/usr/local --enable-fortran --enable-production + CC=gcc-11 CXX=g++-11 FC=gfortran-11 ./configure --prefix=/usr/local --enable-fortran --enable-build-mode=production fi make -j3 sudo make install cd .. - rm -rf hdf5-1.8.20 hdf5-1.8.20.tar.gz + rm -rf hdf5-1.14.5 hdf5-1.14.5.tar.gz - name: Install FoX shell: bash run: | From dc0f8bead9f1dcbea94e9092611e0f4038cf0bb8 Mon Sep 17 00:00:00 2001 From: Andrew Benson Date: Mon, 25 Nov 2024 14:05:36 -0800 Subject: [PATCH 05/11] fix: Port test codes to Python so that they work with HDF5 file reads `h5py` is more reliable. --- .github/workflows/cicd.yml | 12 +- testSuite/validate-PonosV.pl | 198 ------------ testSuite/validate-PonosV.py | 189 +++++++++++ testSuite/validate-baryonicSuppression.pl | 306 ------------------ testSuite/validate-baryonicSuppression.py | 315 +++++++++++++++++++ testSuite/validate-darkMatterOnlySubhalos.pl | 28 -- testSuite/validate-darkMatterOnlySubhalos.py | 25 ++ testSuite/validate-milkyWay.pl | 28 -- testSuite/validate-milkyWay.py | 25 ++ 9 files changed, 562 insertions(+), 564 deletions(-) delete mode 100755 testSuite/validate-PonosV.pl create mode 100755 testSuite/validate-PonosV.py delete mode 100755 testSuite/validate-baryonicSuppression.pl create mode 100755 testSuite/validate-baryonicSuppression.py delete mode 100755 testSuite/validate-darkMatterOnlySubhalos.pl create mode 100755 testSuite/validate-darkMatterOnlySubhalos.py delete mode 100755 testSuite/validate-milkyWay.pl create mode 100755 testSuite/validate-milkyWay.py diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index 9b8363a200..8ad2db3538 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -736,7 +736,8 @@ jobs: needs: Build-Executable-Linux uses: ./.github/workflows/testModel.yml with: - file: validate-darkMatterOnlySubhalos.pl + install: python3 python3-h5py python3-numpy python3-git + file: validate-darkMatterOnlySubhalos.py artifact: galacticus-exec runPath: ./testSuite cacheData: 0 @@ -747,7 +748,8 @@ jobs: needs: Build-Executable-Linux uses: ./.github/workflows/testModel.yml with: - file: validate-milkyWay.pl + install: python3 python3-h5py python3-numpy python3-git + file: validate-milkyWay.py artifact: galacticus-exec runPath: ./testSuite cacheData: 1 @@ -758,7 +760,8 @@ jobs: needs: Build-Executable-Linux uses: ./.github/workflows/testModel.yml with: - file: validate-PonosV.pl + install: python3 python3-h5py python3-numpy python3-git + file: validate-PonosV.py artifact: galacticus-exec runPath: ./testSuite cacheData: 0 @@ -769,7 +772,8 @@ jobs: needs: Build-Executable-Linux uses: ./.github/workflows/testModel.yml with: - file: validate-baryonicSuppression.pl + install: python3 python3-h5py python3-numpy python3-git + file: validate-baryonicSuppression.py artifact: galacticus-exec runPath: ./testSuite cacheData: 0 diff --git a/testSuite/validate-PonosV.pl b/testSuite/validate-PonosV.pl deleted file mode 100755 index dfd4175e5c..0000000000 --- a/testSuite/validate-PonosV.pl +++ /dev/null @@ -1,198 +0,0 @@ -#!/usr/bin/env perl -use strict; -use warnings; -use lib $ENV{'GALACTICUS_EXEC_PATH'}."/perl"; -use PDL; -use PDL::NiceSlice; -use PDL::IO::HDF5; -use PDL::Constants qw(PI); -use JSON::PP; -use Git; - -# Run models to validate subhalo projected density against the PonosV simulation of Fiacconi et al. (2016; https://ui.adsabs.harvard.edu/abs/2016ApJ...824..144F). -# Andrew Benson (13-February-2024) - -# Make output directory. -system("mkdir -p outputs/"); - -# Run the validate model. -system("cd ..; export OMP_NUM_THREADS=2; ./Galacticus.exe testSuite/parameters/validate_PonosV.xml"); -unless ( $? == 0 ) { - print "FAIL: PonosV validation model failed to run\n"; - exit; -} - -# Read data. -my $model = new PDL::IO::HDF5("outputs/validate_PonosV.hdf5"); -my $outputs = $model ->group('Outputs' ); -my $redshift0p0 = $outputs->group('Output2/nodeData'); -my $redshift0p7 = $outputs->group('Output1/nodeData'); -my $data; -$data->{'0.0'}->{$_} = $redshift0p0->dataset($_)->get() - foreach ( 'nodeIsIsolated', 'darkMatterOnlyRadiusVirial' ); -$data->{'0.7'}->{$_} = $redshift0p7->dataset($_)->get() - foreach ( 'nodeIsIsolated', 'hostDarkMatterOnlyRadiusVirial', 'positionOrbitalX', 'positionOrbitalY', 'positionOrbitalZ', 'satelliteBoundMass', 'mergerTreeIndex' ); - -# Validate z=0.0 host halo virial radii. -my $hostsFinal = which($data->{'0.0'}->{'nodeIsIsolated'} == 1); -my $radiusVirialTarget = pdl 0.6005; # Virial radius of PonosV from Table 1 of Fiacconi et al. (2016; https://ui.adsabs.harvard.edu/abs/2016ApJ...824..144F). -my $offsetFractional = ($data->{'0.0'}->{'darkMatterOnlyRadiusVirial'}->($hostsFinal)-$radiusVirialTarget)/$radiusVirialTarget; -if ( any($offsetFractional > 0.01) ) { - print "FAIL: PonosV z=0.0 host virial radii\n"; - print " Expected: ".$radiusVirialTarget."\n"; - print " Found: ".$data->{'0.0'}->{'darkMatterOnlyRadiusVirial'}->($hostsFinal)."\n"; -} else { - print "SUCCESS: PonosV z=0.0 host virial radii\n"; -} - -# Select z=0.7 subhalos. -my $radiusFractionalMinimum = pdl 0.00e0; -my $radiusFractionalMaximum = pdl 0.04e0; -my $massBoundMinimum8 = pdl 0.50e8; -my $massBoundMaximum8 = pdl 2.00e8; -my $massBoundMinimum9 = pdl 0.50e9; -my $massBoundMaximum9 = pdl 2.00e9; -my $kilo = pdl 1.00e3; -$data->{'0.7'}->{'radiusOrbital2D'} = sqrt(+$data->{'0.7'}->{'positionOrbitalX'}**2+$data->{'0.7'}->{'positionOrbitalY'}**2 ); -$data->{'0.7'}->{'radiusOrbital3D'} = sqrt(+$data->{'0.7'}->{'positionOrbitalX'}**2+$data->{'0.7'}->{'positionOrbitalY'}**2+$data->{'0.7'}->{'positionOrbitalZ'}**2); -my $selection - = which - ( - ($data->{'0.7'}->{'nodeIsIsolated' } == 0 ) # } Select subhalos. - & - ($data->{'0.7'}->{'radiusOrbital3D' } <= $data->{'0.7'}->{'hostDarkMatterOnlyRadiusVirial'}) # } Select subhalos within the host virial radius. - & - ($data->{'0.7'}->{'radiusOrbital2D' } >= $radiusFractionalMinimum*$data->{'0.7'}->{'hostDarkMatterOnlyRadiusVirial'}) # ⎫ - & # ⎬ Select subhalos close to projected radius of 0.02 of host virial radius. - ($data->{'0.7'}->{'radiusOrbital2D' } <= $radiusFractionalMaximum*$data->{'0.7'}->{'hostDarkMatterOnlyRadiusVirial'}) # ⎭ - ); -my $selection8 - = which - ( - ($data->{'0.7'}->{'satelliteBoundMass'}->($selection) >= $massBoundMinimum8 ) # ⎫ - & # ⎬ Select subhalos close to a bound mass of 10⁸M☉. - ($data->{'0.7'}->{'satelliteBoundMass'}->($selection) <= $massBoundMaximum8 ) # ⎭ - ); -my $selection9 - = which - ( - ($data->{'0.7'}->{'satelliteBoundMass'}->($selection) >= $massBoundMinimum9 ) # ⎫ - & # ⎬ Select subhalos close to a bound mass of 10⁹M☉. - ($data->{'0.7'}->{'satelliteBoundMass'}->($selection) <= $massBoundMaximum9 ) # ⎭ - ); - -# Compute the number density of selected subhalos in each tree. -(my $treeCount) = $model->group('Parameters')->group('mergerTreeBuildMasses')->attrGet('treeCount'); -my $subhaloSurfaceDensity8 = pdl double zeroes($treeCount->((0))->sclr()); -my $subhaloSurfaceDensity9 = pdl double zeroes($treeCount->((0))->sclr()); -for(my $i=0;$i<$treeCount;++$i) { - my $selectTree8 = which($data->{'0.7'}->{'mergerTreeIndex'}->($selection)->($selection8) == $i+1); - my $selectTree9 = which($data->{'0.7'}->{'mergerTreeIndex'}->($selection)->($selection9) == $i+1); - if ( nelem($selectTree8) > 0 ) { - my $countSubhalos8 = pdl double($selectTree8->dim(0)); - $subhaloSurfaceDensity8->(($i)) .= - +$countSubhalos8 - /2.0 - /PI - /$data->{'0.7'}->{'hostDarkMatterOnlyRadiusVirial'}->($selection)->($selectTree8)->((0))**2 - /$kilo **2 - /( - +$radiusFractionalMaximum**2 - -$radiusFractionalMinimum**2 - ) - /log10( - +$massBoundMaximum8 - /$massBoundMinimum8 - ); - } else { - $subhaloSurfaceDensity8->(($i)) .= 0.0; - } - if ( nelem($selectTree9) > 0 ) { - my $countSubhalos9 = pdl double($selectTree9->dim(0)); - $subhaloSurfaceDensity9->(($i)) .= - +$countSubhalos9 - /2.0 - /PI - /$data->{'0.7'}->{'hostDarkMatterOnlyRadiusVirial'}->($selection)->($selectTree9)->((0))**2 - /$kilo **2 - /( - +$radiusFractionalMaximum**2 - -$radiusFractionalMinimum**2 - ) - /log10( - +$massBoundMaximum9 - /$massBoundMinimum9 - ); - } else { - $subhaloSurfaceDensity9->(($i)) .= 0.0; - } -} - -# Set target values from the PonosV simulation of Fiacconi et al. (2016; https://ui.adsabs.harvard.edu/abs/2016ApJ...824..144F). -my $alphaPonosV = pdl 0.850; -my $subhaloSurfaceDensity8PonosV = pdl 0.006; - -# Compute percentage of realizations above/below the PonosV subhalo surface density, and report. -(my $above, my $below) = which_both($subhaloSurfaceDensity8 > $subhaloSurfaceDensity8PonosV); -my $percentageAbove = 100.0*double(nelem($above))/$treeCount->((0)); -my $percentageBelow = 100.0*double(nelem($below))/$treeCount->((0)); -my $statusSurfaceDensity = ($percentageAbove > 5.0 || $percentageBelow > 5.0) ? "SUCCESS" : "FAIL"; -print $statusSurfaceDensity.": Percentage of realizations above/below the PonosV subhalo surface density: ".sprintf("%5.1f",$percentageAbove)."/".sprintf("%5.1f",$percentageBelow)."\n"; - -# Compute the mean slope of the subhalo mass function, and report. -# We exclude models for which there are no subhalos present in one of the mass cuts. This probably introduces some bias. -my $nonZero = which(($subhaloSurfaceDensity8 > 0.0) & ($subhaloSurfaceDensity9 > 0.0)); -my $alphas = -log($subhaloSurfaceDensity8->($nonZero) /$subhaloSurfaceDensity9->($nonZero) ) - /log(sqrt($massBoundMinimum8*$massBoundMaximum8)/sqrt($massBoundMinimum9*$massBoundMaximum9)); -(my $aboveSlope, my $belowSlope) = which_both($alphas > $alphaPonosV); -my $percentageAboveSlope = 100.0*double(nelem($aboveSlope))/nelem($alphas); -my $percentageBelowSlope = 100.0*double(nelem($belowSlope))/nelem($alphas); -my $statusSlope = ($percentageAboveSlope > 5.0 || $percentageBelowSlope > 5.0) ? "SUCCESS" : "FAIL"; -print $statusSlope.": Percentage of realizations above/below the PonosV subhalo mass function slope: ".sprintf("%5.1f",$percentageAboveSlope)."/".sprintf("%5.1f",$percentageBelowSlope)."\n"; - -# Interface with git. -my $repo = Git->repository(Directory => $ENV{'GALACTICUS_EXEC_PATH'}); -my $lastRevision = $repo->command_oneline( [ 'rev-list', '--all' ], STDERR => 0 ); -(my $authorName = $repo->command_oneline( [ 'show', '-s', '--format="%an"', $lastRevision ], STDERR => 0 )) =~ s/"//g; -(my $authorEmail = $repo->command_oneline( [ 'show', '-s', '--format="%ae"', $lastRevision ], STDERR => 0 )) =~ s/"//g; -(my $authorDate = $repo->command_oneline( [ 'show', '-s', '--format="%aD"', $lastRevision ], STDERR => 0 )) =~ s/"//g; -(my $message = $repo->command_oneline( [ 'show', '-s', '--format="%s"' , $lastRevision ], STDERR => 0 )) =~ s/"//g; - -# Generate content for the validation metrics page. -my $output; -$output = -{ - repoUrl => "https://github.com/galacticusorg/galacticus", - parameterFile => "testSuite/parameters/validate_PonosV.xml", - commit => - { - author => - { - name => $authorName, - email => $authorEmail - }, - id => $lastRevision, - message => $message, - timestamp => $authorDate, - url => "https://github.com/galacticusorg/galacticus/commit/".$lastRevision - }, - surfaceDensity => - { - percentageBelow => $percentageBelow ->sclr(), - percentageAbove => $percentageAbove ->sclr(), - target => $subhaloSurfaceDensity8PonosV->sclr() - }, - slope => - { - percentageBelow => $percentageBelowSlope->sclr(), - percentageAbove => $percentageAboveSlope->sclr(), - target => $alphaPonosV ->sclr() - } -}; -my $json = JSON::PP->new()->pretty()->encode($output); -open(my $reportFile,">","outputs/results_PonosV.json"); -print $reportFile "window.PONOSV_DATA = "; -print $reportFile $json; -close($reportFile); - -exit; diff --git a/testSuite/validate-PonosV.py b/testSuite/validate-PonosV.py new file mode 100755 index 0000000000..410b8ca49b --- /dev/null +++ b/testSuite/validate-PonosV.py @@ -0,0 +1,189 @@ +#!/usr/bin/env python3 +import sys +import os +import subprocess +import numpy as np +import h5py +import json +import codecs +from git import Repo + +# Run models to validate subhalo projected density against the PonosV simulation of Fiacconi et al. (2016; https://ui.adsabs.harvard.edu/abs/2016ApJ...824..144F). +# Andrew Benson (13-February-2024) + +# Create output path. +try: + os.mkdir("outputs") +except FileExistsError: + pass + +# Run the validate model. +status = subprocess.run("cd ..; export OMP_NUM_THREADS=2; ./Galacticus.exe testSuite/parameters/validate_PonosV.xml",shell=True) +if status.returncode != 0: + print("FAILED: PonosV validation model failed to run") + sys.exit() + +# Read data. +model = h5py.File("outputs/validate_PonosV.hdf5","r") +outputs = model ['Outputs' ] +redshift0p0 = outputs['Output2/nodeData'] +redshift0p7 = outputs['Output1/nodeData'] +data = { + "0.0": {}, + "0.7": {} +} +for propertyName in 'nodeIsIsolated', 'darkMatterOnlyRadiusVirial': + data['0.0'][propertyName] = redshift0p0[propertyName][:] +for propertyName in 'nodeIsIsolated', 'hostDarkMatterOnlyRadiusVirial', 'positionOrbitalX', 'positionOrbitalY', 'positionOrbitalZ', 'satelliteBoundMass', 'mergerTreeIndex': + data['0.7'][propertyName] = redshift0p7[propertyName][:] + +# Validate z=0.0 host halo virial radii. +hostsFinal = np.nonzero(data['0.0']['nodeIsIsolated'] == 1) +radiusVirialTarget = 0.6005 # Virial radius of PonosV from Table 1 of Fiacconi et al. (2016; https://ui.adsabs.harvard.edu/abs/2016ApJ...824..144F). +offsetFractional = (data['0.0']['darkMatterOnlyRadiusVirial'][hostsFinal]-radiusVirialTarget)/radiusVirialTarget +if np.any(offsetFractional > 0.01): + print("FAIL: PonosV z=0.0 host virial radii") + print(" Expected: "+str(radiusVirialTarget)) + print(" Found: "+str(data['0.0']['darkMatterOnlyRadiusVirial'][hostsFinal])) +else: + print("SUCCESS: PonosV z=0.0 host virial radii") + +# Select z=0.7 subhalos. +radiusFractionalMinimum = 0.00e0 +radiusFractionalMaximum = 0.04e0 +massBoundMinimum8 = 0.50e8 +massBoundMaximum8 = 2.00e8 +massBoundMinimum9 = 0.50e9 +massBoundMaximum9 = 2.00e9 +kilo = 1.00e3 +data['0.7']['radiusOrbital2D'] = np.sqrt(+data['0.7']['positionOrbitalX']**2+data['0.7']['positionOrbitalY']**2 ) +data['0.7']['radiusOrbital3D'] = np.sqrt(+data['0.7']['positionOrbitalX']**2+data['0.7']['positionOrbitalY']**2+data['0.7']['positionOrbitalZ']**2) +selection = np.nonzero( + (data['0.7']['nodeIsIsolated' ] == 0 ) # ] Select subhalos. + & + (data['0.7']['radiusOrbital3D' ] <= data['0.7']['hostDarkMatterOnlyRadiusVirial']) # ] Select subhalos within the host virial radius. + & + (data['0.7']['radiusOrbital2D' ] >= radiusFractionalMinimum*data['0.7']['hostDarkMatterOnlyRadiusVirial']) # ⎫ + & # ⎬ Select subhalos close to projected radius of 0.02 of host virial radius. + (data['0.7']['radiusOrbital2D' ] <= radiusFractionalMaximum*data['0.7']['hostDarkMatterOnlyRadiusVirial']) # ⎭ +) +selection8 = np.nonzero( + (data['0.7']['satelliteBoundMass'][selection] >= massBoundMinimum8 ) # ⎫ + & # ⎬ Select subhalos close to a bound mass of 10⁸M☉. + (data['0.7']['satelliteBoundMass'][selection] <= massBoundMaximum8 ) # ⎭ +) +selection9 = np.nonzero( + (data['0.7']['satelliteBoundMass'][selection] >= massBoundMinimum9 ) # ⎫ + & # ⎬ Select subhalos close to a bound mass of 10⁹M☉. + (data['0.7']['satelliteBoundMass'][selection] <= massBoundMaximum9 ) # ⎭ +) + +# Compute the number density of selected subhalos in each tree. +treeCount = model['Parameters/mergerTreeBuildMasses'].attrs['treeCount'][0] +subhaloSurfaceDensity8 = np.zeros(treeCount) +subhaloSurfaceDensity9 = np.zeros(treeCount) +for i in range(treeCount): + selectTree8 = np.nonzero(data['0.7']['mergerTreeIndex'][selection][selection8] == i+1) + selectTree9 = np.nonzero(data['0.7']['mergerTreeIndex'][selection][selection9] == i+1) + if len(selectTree8[0]) > 0: + countSubhalos8 = len(selectTree8[0]) + subhaloSurfaceDensity8[i] = +( + +countSubhalos8 + /2.0 + /np.pi + /data['0.7']['hostDarkMatterOnlyRadiusVirial'][selection][selectTree8][0]**2 + /kilo **2 + ) \ + /( + +radiusFractionalMaximum**2 + -radiusFractionalMinimum**2 + ) \ + /np.log10( + +massBoundMaximum8 + /massBoundMinimum8 + ) + else: + subhaloSurfaceDensity8[i] = 0.0 + if len(selectTree9[0]) > 0: + countSubhalos9 = len(selectTree9[0]) + subhaloSurfaceDensity9[i] = +( + +countSubhalos9 + /2.0 + /np.pi + /data['0.7']['hostDarkMatterOnlyRadiusVirial'][selection][selectTree9][0]**2 + /kilo **2 + ) \ + /( + +radiusFractionalMaximum**2 + -radiusFractionalMinimum**2 + ) \ + /np.log10( + +massBoundMaximum9 + /massBoundMinimum9 + ) + else: + subhaloSurfaceDensity9[i] = 0.0 + +# Set target values from the PonosV simulation of Fiacconi et al. (2016; https://ui.adsabs.harvard.edu/abs/2016ApJ...824..144F). +alphaPonosV = 0.850 +subhaloSurfaceDensity8PonosV = 0.006 + +# Compute percentage of realizations above/below the PonosV subhalo surface density, and report. +above = np.nonzero(subhaloSurfaceDensity8 > subhaloSurfaceDensity8PonosV) +percentageAbove = 100.0*np.count_nonzero(subhaloSurfaceDensity8 > subhaloSurfaceDensity8PonosV)/treeCount +percentageBelow = 100.0-percentageAbove +statusSurfaceDensity = "SUCCESS" if percentageAbove > 5.0 or percentageBelow > 5.0 else "FAIL" +print(f"{statusSurfaceDensity}: Percentage of realizations above/below the PonosV subhalo surface density: {percentageAbove:5.1f}/{percentageBelow:5.1f}") + +# Compute the mean slope of the subhalo mass function, and report. +# We exclude models for which there are no subhalos present in one of the mass cuts. This probably introduces some bias. +nonZero = np.nonzero((subhaloSurfaceDensity8 > 0.0) & (subhaloSurfaceDensity9 > 0.0)) +alphas = -np.log( subhaloSurfaceDensity8[nonZero] / subhaloSurfaceDensity9[nonZero] ) \ + /np.log(np.sqrt(massBoundMinimum8*massBoundMaximum8)/np.sqrt(massBoundMinimum9*massBoundMaximum9)) +percentageAboveSlope = 100.0*np.count_nonzero(alphas > alphaPonosV)/len(alphas) +percentageBelowSlope = 100.0-percentageAboveSlope +statusSlope = "SUCCESS" if percentageAboveSlope > 5.0 or percentageBelowSlope > 5.0 else "FAIL" +print(f"{statusSlope}: Percentage of realizations above/below the PonosV subhalo mass function slope: {percentageAboveSlope:5.1f}/{percentageBelowSlope:5.1f}") + +# Interface with git. +repo = Repo(os.environ['GALACTICUS_EXEC_PATH']) +actor = repo.head.commit.author +lastRevision = repo.head.object.hexsha +authorName = actor.name +authorEmail = actor.email +authorDate = str(repo.head.commit.committed_datetime) +message = repo.head.commit.message + +# Generate content for the validation metrics page. +output = { + "repoUrl" : "https://github.com/galacticusorg/galacticus", + "parameterFile": "testSuite/parameters/validate_PonosV.xml", + "commit" : + { + "author": + { + "name" : authorName, + "email": authorEmail + }, + "id" : lastRevision, + "message" : message, + "timestamp": authorDate, + "url" : "https://github.com/galacticusorg/galacticus/commit/"+lastRevision + }, + "surfaceDensity": + { + "percentageBelow": percentageBelow , + "percentageAbove": percentageAbove , + "target" : subhaloSurfaceDensity8PonosV + }, + "slope": + { + "percentageBelow": percentageBelowSlope, + "percentageAbove": percentageAboveSlope, + "target" : alphaPonosV + } +} +f = codecs.open("outputs/results_PonosV.json", "w", "utf-8") +f.write("window.PONOSV_DATA = ") +f.write(json.dumps(output,indent=4,ensure_ascii=False)) +f.close() diff --git a/testSuite/validate-baryonicSuppression.pl b/testSuite/validate-baryonicSuppression.pl deleted file mode 100755 index cc044be59a..0000000000 --- a/testSuite/validate-baryonicSuppression.pl +++ /dev/null @@ -1,306 +0,0 @@ -#!/usr/bin/env perl -use strict; -use warnings; -use lib $ENV{'GALACTICUS_EXEC_PATH' }."/perl"; -use lib $ENV{'GALACTICUS_ANALYSIS_PERL_PATH'}."/perl"; -use PDL; -use PDL::NiceSlice; -use PDL::IO::HDF5; -use PDL::Constants qw(PI); -use JSON::PP; -use Git; -use Stats::Histograms; - -# Run models to validate suppression of the halo mass function by baryons. -# Andrew Benson (01-April-2024) - -# Define the target data for the mass function suppression. This was read from Figure 2 of Zheng et al. (2024; -# https://ui.adsabs.harvard.edu/abs/2024arXiv240317044Z). -# -# * 'withBaryons' corresponds to the "RI" model of Zheng et al. -# * 'withBaryons_noReionization' corresponds to the "NR" model of Zheng et al. -# -# Array indices correspond to redshift: -# -# * 4 ==> z = 9.27 -# * 3 ==> z = 5.72 -# * 2 ==> z = 3.06 -# * 1 ==> z = 0.00 -my @redshifts = ( "", "9.27", "5.72", "3.06", "0.00" ); -my $target; -$target->{'withBaryons' }->[4] = - pdl [ - 0.7115384615384613, - 0.6961538461538460, - 0.7576923076923074, - 0.6961538461538460, - 0.7730769230769230, - 0.7346153846153844, - 0.6423076923076921, - 0.9500000000000000 - ]; -$target->{'withBaryons_noReionization'}->[4] = - pdl [ - 0.7346153846153844, - 0.7423076923076921, - 0.8269230769230768, - 0.9038461538461537, - 0.9423076923076922, - 1.0346153846153845, - 0.9038461538461537, - 0.9807692307692306 - ]; -$target->{'withBaryons' }->[3] = - pdl [ - 0.6750000000000000, - 0.7321428571428572, - 0.6821428571428573, - 0.7392857142857143, - 0.5607142857142857, - 0.7464285714285716, - 0.9964285714285716, - 1.0000000000000000 - ]; -$target->{'withBaryons_noReionization'}->[3] = - pdl [ - 0.7321428571428572, - 0.7678571428571429, - 0.8250000000000001, - 0.9464285714285715, - 0.8964285714285715, - 0.9892857142857143, - 0.9964285714285716, - 1.0000000000000000 - ]; -$target->{'withBaryons' }->[2] = - pdl [ - 0.6685714285714284, - 0.7028571428571427, - 0.6876190476190476, - 0.7371428571428571, - 0.6609523809523808, - 0.9885714285714284, - 0.9885714285714284, - 1.0000000000000000 - ]; -$target->{'withBaryons_noReionization'}->[2] = - pdl [ - 0.6723809523809523, - 0.7104761904761904, - 0.8247619047619046, - 0.9314285714285714, - 0.9466666666666665, - 0.9999999999999999, - 0.9885714285714284, - 1.0000000000000000 - ]; -$target->{'withBaryons' }->[1] = - pdl [ - 0.6756756756756757, - 0.6540540540540540, - 0.7261261261261260, - 1.0000000000000000, - 1.0072072072072071, - 1.0000000000000000, - 1.0072072072072071, - 1.0000000000000000 - ]; -$target->{'withBaryons_noReionization'}->[1] = - pdl [ - 0.6756756756756757, - 0.6612612612612612, - 0.7333333333333334, - 1.0000000000000000, - 0.9927927927927928, - 1.0000000000000000, - 1.0000000000000000, - 1.0072072072072071 - ]; - -# Define χ² targets for each dataset. -my $chiSquaredTarget; -$chiSquaredTarget->{'withBaryons' }->[1] = 6.0; -$chiSquaredTarget->{'withBaryons_noReionization'}->[1] = 7.0; -$chiSquaredTarget->{'withBaryons' }->[2] = 4.0; -$chiSquaredTarget->{'withBaryons_noReionization'}->[2] = 3.0; -$chiSquaredTarget->{'withBaryons' }->[3] = 5.0; -$chiSquaredTarget->{'withBaryons_noReionization'}->[3] = 5.0; -$chiSquaredTarget->{'withBaryons' }->[4] = 3.0; -$chiSquaredTarget->{'withBaryons_noReionization'}->[4] = 4.0; - -# Make output directory. -system("mkdir -p outputs/"); - -# Run the validate model. -system("cd ..; ./Galacticus.exe testSuite/parameters/validate_baryonicSuppression_IGM_evolution.xml"); -unless ( $? == 0 ) { - print "FAIL: baryonic suppression (IGM evolution) validation model failed to run\n"; - exit; -} - -# Read data and repackage into a file suitable for re-reading by other models. -{ - my $model = new PDL::IO::HDF5( "outputs/validate_baryonicSuppression_IGM_evolution.hdf5"); - my $igmFile = new PDL::IO::HDF5(">outputs/validate_baryonicSuppression_IGM.hdf5" ); - my $data; - my $igmProperties = $model ->group ('igmProperties') ; - $data->{$_} = $igmProperties->dataset($_ )->get() - foreach ( 'redshift', 'temperature', 'densityHydrogen1', 'densityHydrogen2', 'densityHelium1', 'densityHelium2', 'densityHelium3' ); - $data->{'hIonizedFraction' } = $data->{'densityHydrogen2'} /($data->{'densityHydrogen1'}+$data->{'densityHydrogen2'} ); - $data->{'heIonizedFraction'} = ($data->{'densityHelium2' }+$data->{'densityHelium3'} )/($data->{'densityHelium1' }+$data->{'densityHelium2' }+$data->{'densityHelium3'}); - $data->{'electronFraction' } = ($data->{'densityHydrogen2'}+$data->{'densityHelium2'}+2.0*$data->{'densityHelium3'})/($data->{'densityHydrogen1'}+$data->{'densityHydrogen2'} ); - $igmFile->dataset('redshift' )->set($data->{'redshift' }); - $igmFile->dataset('matterTemperature')->set($data->{'temperature' }); - $igmFile->dataset('hIonizedFraction' )->set($data->{'hIonizedFraction' }); - $igmFile->dataset('heIonizedFraction')->set($data->{'heIonizedFraction'}); - $igmFile->dataset('electronFraction' )->set($data->{'electronFraction' }); - $igmFile->attrSet(extrapolationAllowed => pdl long 1); - $igmFile->attrSet(fileFormat => pdl long 1); -} - -# Establish bins in halo mass. -my $massHaloLogarithmicBins = pdl sequence(8)/7.0*3.5+4.0; -my $haloMassFunction; - -# Run models with and without baryonic suppression. -foreach my $suffix ( "withoutBaryons", "withBaryons", "withBaryons_noReionization" ) { - print "Running model: '".$suffix."'\n"; - system("cd ..; ./Galacticus.exe testSuite/parameters/validate_baryonicSuppression_evolve_".$suffix.".xml"); - unless ( $? == 0 ) { - print "FAIL: baryonic suppression (evolve: '".$suffix."') validation model failed to run\n"; - exit; - } - my $model = new PDL::IO::HDF5("outputs/validate_baryonicSuppression_evolve_".$suffix.".hdf5"); - my $cosmology = $model ->group ('Parameters/cosmologyParameters'); - (my $OmegaMatter, my $OmegaBaryon) = $cosmology->attrGet('OmegaMatter','OmegaBaryon'); - my $fractionDarkMatter = ($OmegaMatter-$OmegaBaryon)/$OmegaMatter; - # Iterate over outputs. - for(my $outputIndex=1;$outputIndex<=4;++$outputIndex) { - print "\tProcessing output: ".$outputIndex."\n"; - # Read all required data. - my $output = $model ->group('Outputs/Output'.$outputIndex); - my $nodes = $output->group('nodeData' ); - my $data; - $data->{$_} = $nodes->dataset($_)->get() - foreach ( 'nodeIsIsolated', 'mergerTreeIndex', 'nodeIndex', 'parentIndex', 'hotHaloMass', 'diskMassGas', 'massHaloEnclosedCurrent', 'basicMass', 'nodeIsIsolated', 'mergerTreeWeight' ); - # Identify isolated and subhalos. - (my $isolated, my $subhalo) = which_both($data->{'nodeIsIsolated'} == 1); - # Build an index mapping each halo to its host halo. We take advantage here of the depth-first ordering of the outputs. - my $index = pdl zeros(nelem($data->{'nodeIsIsolated'})); - for(my $i=0;$i(($i-1))+1; - my $indexEnd = $isolated->(($i )) ; - $index->($indexStart:$indexEnd) .= $isolated->(($i )) ; - } - # Accumulate masses of isolated halos. - my $massHalo; - if ( $suffix eq "withoutBaryons" ) { - # In models without baryons, the halo mass is just the dark matter mass. - $massHalo = +$data->{'massHaloEnclosedCurrent'}; - } else { - # In models with baryons we assume that the hot gas of the host is distributed as the dark matter, so compute a - # correction factor to account for the differing virial density contrast definitions in Galacticus and Zheng et - # al. Gas in the disk is assumed to always be included within the virial radius. - my $correctionFactor = +$data->{'massHaloEnclosedCurrent'} - /$data->{'basicMass' }; - $massHalo = +$data->{'massHaloEnclosedCurrent'}*$fractionDarkMatter - +$data->{'hotHaloMass' }*$correctionFactor - +$data->{'diskMassGas' }; - # Accumulate masses of subhalos halos. We assume that these are also distributed as the dark matter of the host and so - # apply the same correction factor. - $massHalo->($index->($subhalo)) += $data->{'hotHaloMass'}->($subhalo)*$correctionFactor->($index->($subhalo)); - $massHalo->($index->($subhalo)) += $data->{'diskMassGas'}->($subhalo)*$correctionFactor->($index->($subhalo)); - } - # Construct final quantities needed for the mass function. - my $weight = $data ->{'mergerTreeWeight'}->($isolated) ; - my $massHaloLogarithmic = $massHalo ->($isolated)->log10(); - # Construct the mass function. - ($haloMassFunction->{$suffix}->[$outputIndex]->{'massFunction'}, $haloMassFunction->{$suffix}->[$outputIndex]->{'massFunctionError'}) - = &Stats::Histograms::Histogram($massHaloLogarithmicBins,$massHaloLogarithmic,$weight,differential => 1); - } -} - -# Compute ratios of mass functions with the dark matter only model mass function. -my $output; -my $chiSquared; -my $failed = 0; -for(my $outputIndex=1;$outputIndex<=4;++$outputIndex) { - foreach my $suffix ( "withBaryons", "withBaryons_noReionization" ) { - $haloMassFunction->{$suffix}->[$outputIndex]->{'ratio' } = $haloMassFunction->{$suffix}->[$outputIndex]->{'massFunction'}/$haloMassFunction->{'withoutBaryons'}->[$outputIndex]->{'massFunction'}; - $haloMassFunction->{$suffix}->[$outputIndex]->{'ratioError'} = - +sqrt( - +( - +$haloMassFunction->{$suffix }->[$outputIndex]->{'massFunctionError'} - /$haloMassFunction->{$suffix }->[$outputIndex]->{'massFunction' } - )**2 - +( - +$haloMassFunction->{'withoutBaryons'}->[$outputIndex]->{'massFunctionError'} - /$haloMassFunction->{'withoutBaryons'}->[$outputIndex]->{'massFunction' } - )**2 - ) - * $haloMassFunction->{$suffix }->[$outputIndex]->{'ratio' }; - $chiSquared->{$suffix}->[$outputIndex] = - +sum ( - +( - +$haloMassFunction->{$suffix}->[$outputIndex]->{'ratio' } - -$target ->{$suffix}->[$outputIndex] - ) **2 - / $haloMassFunction->{$suffix}->[$outputIndex]->{'ratioError'}**2 - ) - /nelem( $target ->{$suffix}->[$outputIndex]); - delete($haloMassFunction->{$suffix }->[$outputIndex]->{'massFunction' }); - delete($haloMassFunction->{$suffix }->[$outputIndex]->{'massFunctionError'}); - @{$output->{'model'}->{$suffix }->[$outputIndex]->{'ratio' }} = $haloMassFunction->{$suffix }->[$outputIndex]->{'ratio' }->list(); - @{$output->{'model'}->{$suffix }->[$outputIndex]->{'ratioError'}} = $haloMassFunction->{$suffix }->[$outputIndex]->{'ratioError'}->list(); - # Report. - my $status = $chiSquared->{$suffix}->[$outputIndex] < $chiSquaredTarget->{$suffix}->[$outputIndex] ? "SUCCESS" : "FAILED"; - $failed = 1 - if ( $status eq "FAILED" ); - my $inequality = $chiSquared->{$suffix}->[$outputIndex] < $chiSquaredTarget->{$suffix}->[$outputIndex] ? "<" : "≥" ; - print $status.": model '".$suffix.(" " x (length("withBaryons_noReionization")-length($suffix)))."' at z=".$redshifts[$outputIndex]." validation (χ² = ".sprintf("%5.3f",$chiSquared->{$suffix}->[$outputIndex])." ".$inequality." ".sprintf("%5.3f",$chiSquaredTarget->{$suffix}->[$outputIndex]).")\n"; - } - delete($haloMassFunction->{'withoutBaryons'}->[$outputIndex]->{'massFunction' }); - delete($haloMassFunction->{'withoutBaryons'}->[$outputIndex]->{'massFunctionError'}); -} - -# Interface with git. -my $repo = Git->repository(Directory => $ENV{'GALACTICUS_EXEC_PATH'}); -my $lastRevision = $repo->command_oneline( [ 'rev-list', '--all' ], STDERR => 0 ); -(my $authorName = $repo->command_oneline( [ 'show', '-s', '--format="%an"', $lastRevision ], STDERR => 0 )) =~ s/"//g; -(my $authorEmail = $repo->command_oneline( [ 'show', '-s', '--format="%ae"', $lastRevision ], STDERR => 0 )) =~ s/"//g; -(my $authorDate = $repo->command_oneline( [ 'show', '-s', '--format="%aD"', $lastRevision ], STDERR => 0 )) =~ s/"//g; -(my $message = $repo->command_oneline( [ 'show', '-s', '--format="%s"' , $lastRevision ], STDERR => 0 )) =~ s/"//g; - -# Generate content for the validation metrics page. -$output->{'repoUrl' } = "https://github.com/galacticusorg/galacticus"; -$output->{'parameterFile'} = "testSuite/parameters/validate_baryonicSuppression_evolve_withBaryons.xml"; -$output->{'commit' } = -{ - author => - { - name => $authorName, - email => $authorEmail - }, - id => $lastRevision, - message => $message, - timestamp => $authorDate, - url => "https://github.com/galacticusorg/galacticus/commit/".$lastRevision -}; -foreach my $suffix ( "withBaryons", "withBaryons_noReionization" ) { - for(my $outputIndex=1;$outputIndex<=4;++$outputIndex) { - @{$output->{'target'}->{$suffix}->[$outputIndex]} = $target->{$suffix}->[$outputIndex]->list(); - } -} -@{$output->{'redshift'}} = @redshifts; -@{$output->{'massHalo'}} = $massHaloLogarithmicBins->list(); -my $json = JSON::PP->new()->pretty()->encode($output); -open(my $reportFile,">","outputs/results_baryonicSuppression.json"); -print $reportFile "window.BARYONICSUPPRESSION_DATA = "; -print $reportFile $json; -close($reportFile); -if ( $failed ) { - print "model failed - results were:\n\n"; - system("cat outputs/results_baryonicSuppression.json"); -} - -exit; diff --git a/testSuite/validate-baryonicSuppression.py b/testSuite/validate-baryonicSuppression.py new file mode 100755 index 0000000000..91ac092a93 --- /dev/null +++ b/testSuite/validate-baryonicSuppression.py @@ -0,0 +1,315 @@ +#!/usr/bin/env python3 +import sys +import os +import subprocess +import numpy as np +import h5py +import json +import codecs +from git import Repo + +# Run models to validate suppression of the halo mass function by baryons. +# Andrew Benson (01-April-2024) + +# Define the target data for the mass function suppression. This was read from Figure 2 of Zheng et al. (2024; +# https://ui.adsabs.harvard.edu/abs/2024arXiv240317044Z). +# +# * 'withBaryons' corresponds to the "RI" model of Zheng et al. +# * 'withBaryons_noReionization' corresponds to the "NR" model of Zheng et al. +# +# Array indices correspond to redshift: +# +# * 4 ==> z = 9.27 +# * 3 ==> z = 5.72 +# * 2 ==> z = 3.06 +# * 1 ==> z = 0.00 +redshifts = ( "", "9.27", "5.72", "3.06", "0.00" ) +target = {"withBaryons": [np.array([])], "withBaryons_noReionization": [np.array([])]} +target['withBaryons' ].append( + np.array( + [ + 0.6756756756756757, + 0.6540540540540540, + 0.7261261261261260, + 1.0000000000000000, + 1.0072072072072071, + 1.0000000000000000, + 1.0072072072072071, + 1.0000000000000000 + ] + ) + ) +target['withBaryons_noReionization'].append( + np.array( + [ + 0.6756756756756757, + 0.6612612612612612, + 0.7333333333333334, + 1.0000000000000000, + 0.9927927927927928, + 1.0000000000000000, + 1.0000000000000000, + 1.0072072072072071 + ] + ) + ) +target['withBaryons' ].append( + np.array( + [ + 0.6685714285714284, + 0.7028571428571427, + 0.6876190476190476, + 0.7371428571428571, + 0.6609523809523808, + 0.9885714285714284, + 0.9885714285714284, + 1.0000000000000000 + ] + ) + ) +target['withBaryons_noReionization'].append( + np.array( + [ + 0.6723809523809523, + 0.7104761904761904, + 0.8247619047619046, + 0.9314285714285714, + 0.9466666666666665, + 0.9999999999999999, + 0.9885714285714284, + 1.0000000000000000 + ] + ) + ) +target['withBaryons' ].append( + np.array( + [ + 0.6750000000000000, + 0.7321428571428572, + 0.6821428571428573, + 0.7392857142857143, + 0.5607142857142857, + 0.7464285714285716, + 0.9964285714285716, + 1.0000000000000000 + ] + ) + ) +target['withBaryons_noReionization'].append( + np.array( + [ + 0.7321428571428572, + 0.7678571428571429, + 0.8250000000000001, + 0.9464285714285715, + 0.8964285714285715, + 0.9892857142857143, + 0.9964285714285716, + 1.0000000000000000 + ] + ) + ) +target['withBaryons' ].append( + np.array( + [ + 0.7115384615384613, + 0.6961538461538460, + 0.7576923076923074, + 0.6961538461538460, + 0.7730769230769230, + 0.7346153846153844, + 0.6423076923076921, + 0.9500000000000000 + ] + ) + ) +target['withBaryons_noReionization'].append( + np.array( + [ + 0.7346153846153844, + 0.7423076923076921, + 0.8269230769230768, + 0.9038461538461537, + 0.9423076923076922, + 1.0346153846153845, + 0.9038461538461537, + 0.9807692307692306 + ] + ) + ) + +# Define χ² targets for each dataset. +chiSquaredTarget = {"withBaryons": np.array([0.0,6.0,4.0,5.0,3.0]), "withBaryons_noReionization": np.array([0.0,7.0,3.0,5.0,4.0])} + +# Create output path. +try: + os.mkdir("outputs") +except FileExistsError: + pass + +# Run the validate model. +status = subprocess.run("cd ..; ./Galacticus.exe testSuite/parameters/validate_baryonicSuppression_IGM_evolution.xml",shell=True) +if status.returncode != 0: + print("FAILED: baryonic suppression (IGM evolution) validation model failed to run") + sys.exit() + +# Read data and repackage into a file suitable for re-reading by other models. +with h5py.File("outputs/validate_baryonicSuppression_IGM_evolution.hdf5","r") as model: + igmFile = h5py.File("outputs/validate_baryonicSuppression_IGM.hdf5","w") + igmProperties = model['igmProperties'] + data = {} + for propertyName in ( 'redshift', 'temperature', 'densityHydrogen1', 'densityHydrogen2', 'densityHelium1', 'densityHelium2', 'densityHelium3' ): + data[propertyName] = igmProperties[propertyName][:] + data['hIonizedFraction' ] = data['densityHydrogen2'] /(data['densityHydrogen1']+data['densityHydrogen2'] ) + data['heIonizedFraction'] = (data['densityHelium2' ]+data['densityHelium3'] )/(data['densityHelium1' ]+data['densityHelium2' ]+data['densityHelium3']) + data['electronFraction' ] = (data['densityHydrogen2']+data['densityHelium2']+2.0*data['densityHelium3'])/(data['densityHydrogen1']+data['densityHydrogen2'] ) + igmFile['redshift' ] = data['redshift' ] + igmFile['matterTemperature'] = data['temperature' ] + igmFile['hIonizedFraction' ] = data['hIonizedFraction' ] + igmFile['heIonizedFraction'] = data['heIonizedFraction'] + igmFile['electronFraction' ] = data['electronFraction' ] + igmFile.attrs['extrapolationAllowed'] = 1 + igmFile.attrs['fileFormat' ] = 1 + +# Establish bins in halo mass. +massHaloLogarithmicBins = np.linspace(4.0,7.5,8) +haloMassFunction = {} + +# Run models with and without baryonic suppression. +for suffix in "withoutBaryons", "withBaryons", "withBaryons_noReionization": + print("Running model: '"+suffix+"'") + status = subprocess.run("cd ..; ./Galacticus.exe testSuite/parameters/validate_baryonicSuppression_evolve_"+suffix+".xml",shell=True) + if status.returncode != 0: + print("FAILED: baryonic suppression (evolve: '"+suffix+"') validation model failed to run") + sys.exit() + model = h5py.File("outputs/validate_baryonicSuppression_evolve_"+suffix+".hdf5","r") + cosmology = model['Parameters/cosmologyParameters'] + OmegaMatter = cosmology.attrs['OmegaMatter'] + OmegaBaryon = cosmology.attrs['OmegaBaryon'] + fractionDarkMatter = (OmegaMatter-OmegaBaryon)/OmegaMatter + haloMassFunction[suffix] = [None] * 5 + # Iterate over outputs. + for outputIndex in range(1,5): + print("\tProcessing output: "+str(outputIndex)) + # Read all required data. + output = model['Outputs/Output'+str(outputIndex)] + nodes = output['nodeData'] + data = {} + for propertyName in 'nodeIsIsolated', 'mergerTreeIndex', 'nodeIndex', 'parentIndex', 'hotHaloMass', 'diskMassGas', 'massHaloEnclosedCurrent', 'basicMass', 'nodeIsIsolated', 'mergerTreeWeight': + data[propertyName] = nodes[propertyName][:] + # Identify isolated and subhalos. + isolated = np.nonzero(data['nodeIsIsolated'] == 1)[0] + subhalo = np.nonzero(data['nodeIsIsolated'] == 0)[0] + # Build an index mapping each halo to its host halo. We take advantage here of the depth-first ordering of the outputs. + index = np.zeros(len(data['nodeIsIsolated']),dtype='int32') + for i in range(len(isolated)): + indexStart = 0 if i == 0 else isolated[i-1]+1 + indexEnd = isolated[i ]+1 + index[indexStart:indexEnd] = isolated[i ] + # Accumulate masses of isolated halos. + if suffix == "withoutBaryons": + # In models without baryons, the halo mass is just the dark matter mass. + massHalo = +data['massHaloEnclosedCurrent'] + else: + # In models with baryons we assume that the hot gas of the host is distributed as the dark matter, so compute a + # correction factor to account for the differing virial density contrast definitions in Galacticus and Zheng et + # al. Gas in the disk is assumed to always be included within the virial radius. + correctionFactor = +data['massHaloEnclosedCurrent'] \ + /data['basicMass' ] + massHalo = +data['massHaloEnclosedCurrent']*fractionDarkMatter \ + +data['hotHaloMass' ]*correctionFactor \ + +data['diskMassGas' ] + # Accumulate masses of subhalos halos. We assume that these are also distributed as the dark matter of the host and so + # apply the same correction factor. + massHalo[index][subhalo] += data['hotHaloMass'][subhalo]*correctionFactor[index][subhalo] + massHalo[index][subhalo] += data['diskMassGas'][subhalo]*correctionFactor[index][subhalo] + # Construct final quantities needed for the mass function. + weight = data ['mergerTreeWeight'][isolated] + massHaloLogarithmic = np.log10(massHalo [isolated]) + # Construct the mass function. + massHaloLogarithmicBinWidth = massHaloLogarithmicBins[1]-massHaloLogarithmicBins[0] + massHaloLogarithmicBinsEdges = np.append(massHaloLogarithmicBins-0.5*massHaloLogarithmicBinWidth,massHaloLogarithmicBins[-1]+0.5*massHaloLogarithmicBinWidth) + haloMassFunction[suffix][outputIndex] = {} + haloMassFunction[suffix][outputIndex]['massFunction' ] = np.histogram(massHaloLogarithmic,massHaloLogarithmicBinsEdges,weights=weight )[0] + haloMassFunction[suffix][outputIndex]['massFunctionError'] = np.sqrt(np.histogram(massHaloLogarithmic,massHaloLogarithmicBinsEdges,weights=weight**2)[0]) + +# Compute ratios of mass functions with the dark matter only model mass function. +output = {"model": {}, "target": {}} +chiSquared = {} +failed = False +for suffix in "withBaryons", "withBaryons_noReionization": + chiSquared [suffix] = [None] * 5 + output ["model" ][suffix] = [None] * 5 + output ["target"][suffix] = [None] * 5 + for outputIndex in range(1,5): + chiSquared [suffix][outputIndex] = {} + output ["model" ][suffix][outputIndex] = {} + output ["target"][suffix][outputIndex] = {} + haloMassFunction[suffix][outputIndex]['ratio' ] = haloMassFunction[suffix][outputIndex]['massFunction']/haloMassFunction['withoutBaryons'][outputIndex]['massFunction'] + haloMassFunction[suffix][outputIndex]['ratioError'] = +np.sqrt( + +( + +haloMassFunction[suffix ][outputIndex]['massFunctionError'] + /haloMassFunction[suffix ][outputIndex]['massFunction' ] + )**2 \ + +( + +haloMassFunction['withoutBaryons'][outputIndex]['massFunctionError'] + /haloMassFunction['withoutBaryons'][outputIndex]['massFunction' ] + )**2 + ) \ + * haloMassFunction[suffix ][outputIndex]['ratio' ] + chiSquared[suffix][outputIndex] = +np.sum( + +( + +haloMassFunction[suffix][outputIndex]['ratio' ] + -target [suffix][outputIndex] + ) **2 + / haloMassFunction[suffix][outputIndex]['ratioError']**2 + ) \ + / len( target [suffix][outputIndex]) + output['model'][suffix][outputIndex]['ratio' ] = list(haloMassFunction[suffix][outputIndex]['ratio' ]) + output['model'][suffix][outputIndex]['ratioError'] = list(haloMassFunction[suffix][outputIndex]['ratioError']) + # Report. + status = "SUCCESS" if chiSquared[suffix][outputIndex] < chiSquaredTarget[suffix][outputIndex] else "FAILED" + if status == "FAILED": + failed = True + inequality = "<" if chiSquared[suffix][outputIndex] < chiSquaredTarget[suffix][outputIndex] else "≥" + padding = " " * (len("withBaryons_noReionization")-len(suffix)) + print(f"{status}: model '{suffix}'{padding} at z={redshifts[outputIndex]} validation (χ² = {chiSquared[suffix][outputIndex]:5.3f} {inequality} {chiSquaredTarget[suffix][outputIndex]:5.3f})") + +# Interface with git. +repo = Repo(os.environ['GALACTICUS_EXEC_PATH']) +actor = repo.head.commit.author +lastRevision = repo.head.object.hexsha +authorName = actor.name +authorEmail = actor.email +authorDate = str(repo.head.commit.committed_datetime) +message = repo.head.commit.message + +# Generate content for the validation metrics page. +output['repoUrl' ] = "https://github.com/galacticusorg/galacticus"; +output['parameterFile'] = "testSuite/parameters/validate_baryonicSuppression_evolve_withBaryons.xml"; +output['commit' ] = { + "author": + { + "name" : authorName, + "email": authorEmail + }, + "id" : lastRevision, + "message" : message, + "timestamp": authorDate, + "url" : "https://github.com/galacticusorg/galacticus/commit/"+lastRevision +} +for suffix in "withBaryons", "withBaryons_noReionization": + for outputIndex in range(1,5): + output['target'][suffix][outputIndex] = list(target[suffix][outputIndex]) +output['redshift'] = redshifts +output['massHalo'] = list(massHaloLogarithmicBins) +f = codecs.open("outputs/results_baryonicSuppression.json", "w", "utf-8") +f.write("window.BARYONICSUPPRESSION_DATA = ") +f.write(json.dumps(output,indent=4,ensure_ascii=False)) +f.close() +if failed: + print("model failed - results were:\n") + with open("outputs/results_baryonicSuppression.json", "r") as file: + for line in file: + print(line.replace("\n","")) + diff --git a/testSuite/validate-darkMatterOnlySubhalos.pl b/testSuite/validate-darkMatterOnlySubhalos.pl deleted file mode 100755 index 46b8dfe9a8..0000000000 --- a/testSuite/validate-darkMatterOnlySubhalos.pl +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env perl -use strict; -use warnings; -use lib $ENV{'GALACTICUS_EXEC_PATH'}."/perl"; -use PDL; -use PDL::NiceSlice; -use PDL::IO::HDF5; -use Galacticus::Validation; - -# Run models to validate a dark matter only subhalo evolution model. -# Andrew Benson (05-August-2022) - -# Make output directory. -system("mkdir -p outputs/"); - -# Run the validate model. -system("cd ..; ./Galacticus.exe testSuite/parameters/validate_darkMatterOnlySubHalos.xml"); -unless ( $? == 0 ) { - print "FAIL: dark matter-only subhalos validation model failed to run\n"; - exit; -} - -# Extract and validate the likelihoods. -&Galacticus::Validation::extract("outputs/validate_darkMatterOnlySubHalos.hdf5","Dark Matter Only Subhalos","darkMatterOnlySubhalos","testSuite/parameters/validate_darkMatterOnlySubHalos.xml"); - -print "SUCCESS: dark matter-only subhalos validation model\n"; - -exit; diff --git a/testSuite/validate-darkMatterOnlySubhalos.py b/testSuite/validate-darkMatterOnlySubhalos.py new file mode 100755 index 0000000000..5ef9f2280c --- /dev/null +++ b/testSuite/validate-darkMatterOnlySubhalos.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +import sys +import os +import subprocess +import validate + +# Run models to validate a dark matter only subhalo evolution model. +# Andrew Benson (05-August-2022) + +# Create output path. +try: + os.mkdir("outputs/idealizedSubhaloSimulations") +except FileExistsError: + pass + +# Run the validation model. +status = subprocess.run("cd ..; ./Galacticus.exe testSuite/parameters/validate_darkMatterOnlySubHalos.xml",shell=True) +if status.returncode != 0: + print("FAILED: dark matter-only subhalos validation model failed to run") + sys.exit() + +# Extract and validate the likelihoods. +validate.extract("outputs/validate_darkMatterOnlySubHalos.hdf5","Dark Matter Only Subhalos","darkMatterOnlySubhalos","testSuite/parameters/validate_darkMatterOnlySubHalos.xml") + +print("SUCCESS: dark matter-only subhalos validation model") diff --git a/testSuite/validate-milkyWay.pl b/testSuite/validate-milkyWay.pl deleted file mode 100755 index 575ba538da..0000000000 --- a/testSuite/validate-milkyWay.pl +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env perl -use strict; -use warnings; -use lib $ENV{'GALACTICUS_EXEC_PATH'}."/perl"; -use PDL; -use PDL::NiceSlice; -use PDL::IO::HDF5; -use Galacticus::Validation; - -# Run models to validate a Milky Way model. -# Andrew Benson (10-August-2022) - -# Make output directory. -system("mkdir -p outputs/"); - -# Run the validate model. -system("cd ..; ./Galacticus.exe testSuite/parameters/validate_milkyWay.xml"); -unless ( $? == 0 ) { - print "FAIL: Milky Way validation model failed to run\n"; - exit; -} - -# Extract and validate the likelihoods. -&Galacticus::Validation::extract("outputs/validate_milkyWay.hdf5","Milky Way model","milkyWayModel","testSuite/parameters/validate_milkyWay.xml"); - -print "SUCCESS: Milky Way validation model\n"; - -exit; diff --git a/testSuite/validate-milkyWay.py b/testSuite/validate-milkyWay.py new file mode 100755 index 0000000000..3286f7b53f --- /dev/null +++ b/testSuite/validate-milkyWay.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +import sys +import os +import subprocess +import validate + +# Run models to validate a Milky Way model. +# Andrew Benson (10-August-2022) + +# Create output path. +try: + os.mkdir("outputs/idealizedSubhaloSimulations") +except FileExistsError: + pass + +# Run the validation model. +status = subprocess.run("cd ..; ./Galacticus.exe testSuite/parameters/validate_milkyWay.xml",shell=True) +if status.returncode != 0: + print("FAILED: Milky Way validation model failed to run") + sys.exit() + +# Extract and validate the likelihoods. +validate.extract("outputs/validate_milkyWay.hdf5","Milky Way model","milkyWayModel","testSuite/parameters/validate_milkyWay.xml") + +print("SUCCESS: Milky Way validation model") From c6c93ad613d2b3895bf2442d3fc8ccf7e62d7761 Mon Sep 17 00:00:00 2001 From: Andrew Benson Date: Mon, 25 Nov 2024 14:08:35 -0800 Subject: [PATCH 06/11] fix: Open files in read-only mode to avoid the HDF5 library from attempting to lock the files We do our own locking as necessary. --- source/intergalactic_medium.state.RecFast.F90 | 6 ++-- ..._mass_variance.filtered_power_spectrum.F90 | 32 +++++++++---------- 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/source/intergalactic_medium.state.RecFast.F90 b/source/intergalactic_medium.state.RecFast.F90 index cac91c1ced..56abdc9d88 100644 --- a/source/intergalactic_medium.state.RecFast.F90 +++ b/source/intergalactic_medium.state.RecFast.F90 @@ -141,9 +141,9 @@ function recFastConstructorInternal(cosmologyFunctions_,cosmologyParameters_) re if (File_Exists(char(self%fileName))) then ! Check file version number. !$ call hdf5Access%set() - call outputFile%openFile (char(self%fileName),overwrite=.false. ) - call outputFile%readAttribute('fileFormat' , fileFormatVersion) - call outputFile%close ( ) + call outputFile%openFile (char(self%fileName),overwrite=.false. ,readOnly=.true.) + call outputFile%readAttribute('fileFormat' , fileFormatVersion ) + call outputFile%close ( ) !$ call hdf5Access%unset() buildFile=fileFormatVersion /= fileFormatVersionCurrent else diff --git a/source/structure_formation.cosmological_mass_variance.filtered_power_spectrum.F90 b/source/structure_formation.cosmological_mass_variance.filtered_power_spectrum.F90 index 4d35f960d6..27f5663680 100644 --- a/source/structure_formation.cosmological_mass_variance.filtered_power_spectrum.F90 +++ b/source/structure_formation.cosmological_mass_variance.filtered_power_spectrum.F90 @@ -1308,22 +1308,22 @@ subroutine filteredPowerFileRead(self) if (.not.File_Exists(char(self%fileName))) return call displayMessage('reading σ(M) data from: '//self%fileName,verbosityLevelWorking) !$ call hdf5Access%set() - call dataFile%openFile (char(self%fileName) ,overWrite =.false.) - call dataFile%readDataset ('times' , timesTmp ) - call dataFile%readDataset ('mass' , massTmp ) - call dataFile%readDataset ('rootVariance' , rootVarianceTmp ) - call dataFile%readDataset ('rootVarianceUnique' , rootVarianceUniqueTmp ) - call dataFile%readDataset ('indexUnique' , indexTmp ) - call dataFile%readDataset ('uniqueSize' , uniqueSizeTmp ) - call dataFile%readAttribute('sigma8' ,self%sigma8Value ) - call dataFile%readAttribute('sigmaNormalization' ,self%sigmaNormalization ) - call dataFile%readAttribute('massMinimum' ,self%massMinimum ) - call dataFile%readAttribute('massMaximum' ,self%massMaximum ) - call dataFile%readAttribute('timeMinimum' ,self%timeMinimum ) - call dataFile%readAttribute('timeMaximum' ,self%timeMaximum ) - call dataFile%readAttribute('timeMinimumLogarithmic' ,self%timeMinimumLogarithmic ) - call dataFile%readAttribute('timeLogarithmicDeltaInverse',self%timeLogarithmicDeltaInverse ) - call dataFile%close ( ) + call dataFile%openFile (char(self%fileName) ,overWrite =.false.,readOnly=.true.) + call dataFile%readDataset ('times' , timesTmp ) + call dataFile%readDataset ('mass' , massTmp ) + call dataFile%readDataset ('rootVariance' , rootVarianceTmp ) + call dataFile%readDataset ('rootVarianceUnique' , rootVarianceUniqueTmp ) + call dataFile%readDataset ('indexUnique' , indexTmp ) + call dataFile%readDataset ('uniqueSize' , uniqueSizeTmp ) + call dataFile%readAttribute('sigma8' ,self%sigma8Value ) + call dataFile%readAttribute('sigmaNormalization' ,self%sigmaNormalization ) + call dataFile%readAttribute('massMinimum' ,self%massMinimum ) + call dataFile%readAttribute('massMaximum' ,self%massMaximum ) + call dataFile%readAttribute('timeMinimum' ,self%timeMinimum ) + call dataFile%readAttribute('timeMaximum' ,self%timeMaximum ) + call dataFile%readAttribute('timeMinimumLogarithmic' ,self%timeMinimumLogarithmic ) + call dataFile%readAttribute('timeLogarithmicDeltaInverse',self%timeLogarithmicDeltaInverse ) + call dataFile%close ( ) !$ call hdf5Access%unset() if (allocated(self%times )) deallocate(self%times ) if (allocated(self%rootVarianceTable )) deallocate(self%rootVarianceTable ) From 69ac047cd72b98fc0e18632e0a96943d9b54cd3f Mon Sep 17 00:00:00 2001 From: Andrew Benson Date: Mon, 25 Nov 2024 14:10:48 -0800 Subject: [PATCH 07/11] fix: Use a pointer to `parameters` instead of a copy to avoid problems writing to the output file `Parameters` group --- source/tasks.halo_mass_function.F90 | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/source/tasks.halo_mass_function.F90 b/source/tasks.halo_mass_function.F90 index 840cc46bfa..3053ef5cad 100644 --- a/source/tasks.halo_mass_function.F90 +++ b/source/tasks.halo_mass_function.F90 @@ -79,7 +79,7 @@ Implementation of a task which computes and outputs the halo mass function and r double precision , allocatable, dimension(:) :: fractionModeMasses type (virialDensityContrastList ), allocatable, dimension(:) :: virialDensityContrasts ! Pointer to the parameters for this task. - type (inputParameters ) :: parameters + type (inputParameters ), pointer :: parameters => null() contains !![ @@ -401,8 +401,7 @@ function haloMassFunctionConstructorInternal( !!] - self%parameters=inputParameters(parameters) - call self%parameters%parametersGroupCopy(parameters) + self%parameters => parameters allocate(self%virialDensityContrasts(size(virialDensityContrasts))) do i=1,size(virialDensityContrasts) self%virialDensityContrasts(i)%label=virialDensityContrasts(i)%label From 7929e4e988df5219928a9b67a7338b5cd6ce3185 Mon Sep 17 00:00:00 2001 From: Andrew Benson Date: Mon, 25 Nov 2024 19:46:08 -0800 Subject: [PATCH 08/11] fix: Ensure HDF5 file is closed --- testSuite/validate-baryonicSuppression.py | 1 + 1 file changed, 1 insertion(+) diff --git a/testSuite/validate-baryonicSuppression.py b/testSuite/validate-baryonicSuppression.py index 91ac092a93..108bb3ed8f 100755 --- a/testSuite/validate-baryonicSuppression.py +++ b/testSuite/validate-baryonicSuppression.py @@ -170,6 +170,7 @@ igmFile['electronFraction' ] = data['electronFraction' ] igmFile.attrs['extrapolationAllowed'] = 1 igmFile.attrs['fileFormat' ] = 1 + igmFile.close() # Establish bins in halo mass. massHaloLogarithmicBins = np.linspace(4.0,7.5,8) From 5a9ca4f656cf337f40ce5721445d5095bb126de5 Mon Sep 17 00:00:00 2001 From: Andrew Benson Date: Tue, 26 Nov 2024 11:25:17 -0800 Subject: [PATCH 09/11] fix: Ensure HDF5 files are opened in read-only mode to avoid the LHDF5 library attempting to lock them --- source/output.analyses.Local_Group.occupation_fraction.F90 | 2 +- ...put.analyses.Local_Group.stellar_mass_halo_mass_relation.F90 | 2 +- ...lyses.stellar_vs_halo_mass_relation.COSMOS_Leauthaud2012.F90 | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/source/output.analyses.Local_Group.occupation_fraction.F90 b/source/output.analyses.Local_Group.occupation_fraction.F90 index a3c89deec5..e698c5d99f 100644 --- a/source/output.analyses.Local_Group.occupation_fraction.F90 +++ b/source/output.analyses.Local_Group.occupation_fraction.F90 @@ -218,7 +218,7 @@ function localGroupOccupationFractionConstructorInternal(outputTimes_,positionTy ! Construct the target distribution. !$ call hdf5Access%set () - call fileData%openFile(char(inputPath(pathTypeDataStatic))//"observations/stellarHaloMassRelation/fractionOccupation_Local_Group_Nadler2020.hdf5") + call fileData%openFile(char(inputPath(pathTypeDataStatic))//"observations/stellarHaloMassRelation/fractionOccupation_Local_Group_Nadler2020.hdf5",readOnly=.true.) call fileData%readDataset('massHalo' ,massHaloData ) call fileData%readDataset('fractionOccupation',fractionOccupationData) call fileData%close ( ) diff --git a/source/output.analyses.Local_Group.stellar_mass_halo_mass_relation.F90 b/source/output.analyses.Local_Group.stellar_mass_halo_mass_relation.F90 index 7d6d3db05b..dd80bf8668 100644 --- a/source/output.analyses.Local_Group.stellar_mass_halo_mass_relation.F90 +++ b/source/output.analyses.Local_Group.stellar_mass_halo_mass_relation.F90 @@ -218,7 +218,7 @@ function localGroupStellarMassHaloMassRelationConstructorInternal(outputTimes_,p ! Construct the target distribution. !$ call hdf5Access%set () - call fileData%openFile(char(inputPath(pathTypeDataStatic))//"observations/stellarHaloMassRelation/stellarHaloMassRelation_Local_Group_Nadler2020.hdf5") + call fileData%openFile(char(inputPath(pathTypeDataStatic))//"observations/stellarHaloMassRelation/stellarHaloMassRelation_Local_Group_Nadler2020.hdf5",readOnly=.true.) call fileData%readDataset('massHalo' ,massHaloData ) call fileData%readDataset('massStellar' ,massStellarData ) call fileData%readDataset('massStellarScatter',massStellarScatterData) diff --git a/source/output.analyses.stellar_vs_halo_mass_relation.COSMOS_Leauthaud2012.F90 b/source/output.analyses.stellar_vs_halo_mass_relation.COSMOS_Leauthaud2012.F90 index c0b6d5422a..a6debb3415 100644 --- a/source/output.analyses.stellar_vs_halo_mass_relation.COSMOS_Leauthaud2012.F90 +++ b/source/output.analyses.stellar_vs_halo_mass_relation.COSMOS_Leauthaud2012.F90 @@ -292,7 +292,7 @@ function stellarVsHaloMassRelationLeauthaud2012ConstructorInternal(redshiftInter !!] ! Read observational data and convert masses to logarithmic. !$ call hdf5Access%set() - call fileData%openFile(char(inputPath(pathTypeDataStatic))//"observations/stellarHaloMassRelation/stellarHaloMassRelation_COSMOS_Leauthaud2012.hdf5") + call fileData%openFile(char(inputPath(pathTypeDataStatic))//"observations/stellarHaloMassRelation/stellarHaloMassRelation_COSMOS_Leauthaud2012.hdf5",readOnly=.true.) groupRedshiftName=var_str('redshiftInterval')//redshiftInterval groupRedshift=fileData%openGroup(char(groupRedshiftName)) call groupRedshift%readDataset('massStellar' ,massStellarData ) From a2232d472d2e03e0f7b22f009bbf5b025dc9281a Mon Sep 17 00:00:00 2001 From: Andrew Benson Date: Tue, 26 Nov 2024 21:52:24 -0800 Subject: [PATCH 10/11] fix: Adjust test tolerance --- testSuite/validate-baryonicSuppression.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/testSuite/validate-baryonicSuppression.py b/testSuite/validate-baryonicSuppression.py index 108bb3ed8f..1f55ab451e 100755 --- a/testSuite/validate-baryonicSuppression.py +++ b/testSuite/validate-baryonicSuppression.py @@ -139,7 +139,7 @@ ) # Define χ² targets for each dataset. -chiSquaredTarget = {"withBaryons": np.array([0.0,6.0,4.0,5.0,3.0]), "withBaryons_noReionization": np.array([0.0,7.0,3.0,5.0,4.0])} +chiSquaredTarget = {"withBaryons": np.array([0.0,6.0,4.5,5.0,3.0]), "withBaryons_noReionization": np.array([0.0,7.0,3.0,5.0,4.0])} # Create output path. try: From 59a239b5f4a564e507d4b5a2bfc194575b0e3955 Mon Sep 17 00:00:00 2001 From: Andrew Benson Date: Sun, 1 Dec 2024 13:57:06 -0800 Subject: [PATCH 11/11] fix: Switch back to `latest` build environment --- .github/workflows/cicd.yml | 18 +++++++++--------- .github/workflows/prChecks.yml | 10 +++++----- .github/workflows/profile.yml | 2 +- .github/workflows/testCode.yml | 2 +- .github/workflows/testModel.yml | 2 +- 5 files changed, 17 insertions(+), 17 deletions(-) diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index 8ad2db3538..d64158c53e 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -54,7 +54,7 @@ jobs: ### Static executable and test codes Build-Executable-Linux: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 + container: ghcr.io/galacticusorg/buildenv:latest steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." - run: echo "This job is now running on a ${{ runner.os }} server." @@ -114,7 +114,7 @@ jobs: ### Non-static executable Build-Executable-Linux-Non-Static: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 + container: ghcr.io/galacticusorg/buildenv:latest steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." - run: echo "This job is now running on a ${{ runner.os }} server." @@ -151,7 +151,7 @@ jobs: ### Executable instrumented Build-Executables-Instrumented-Linux: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 + container: ghcr.io/galacticusorg/buildenv:latest steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." - run: echo "This job is now running on a ${{ runner.os }} server." @@ -180,7 +180,7 @@ jobs: ### Executable MPI Build-Executables-MPI-Linux: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 + container: ghcr.io/galacticusorg/buildenv:latest steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." - run: echo "This job is now running on a ${{ runner.os }} server." @@ -211,7 +211,7 @@ jobs: ### Executable debugging Build-Executables-Debugging-Linux: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 + container: ghcr.io/galacticusorg/buildenv:latest steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." - run: echo "This job is now running on a ${{ runner.os }} server." @@ -238,7 +238,7 @@ jobs: ### Library Build-Library-Linux: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 + container: ghcr.io/galacticusorg/buildenv:latest steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." - run: echo "This job is now running on a ${{ runner.os }} server." @@ -308,7 +308,7 @@ jobs: ### Documentation Build-Documentation-Linux: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 + container: ghcr.io/galacticusorg/buildenv:latest steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." - run: echo "This job is now running on a ${{ runner.os }} server." @@ -337,7 +337,7 @@ jobs: ### Tools Build-Tools: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 + container: ghcr.io/galacticusorg/buildenv:latest needs: Build-Executable-Linux steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." @@ -1311,7 +1311,7 @@ jobs: ## Python Interface Python-Interface: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 + container: ghcr.io/galacticusorg/buildenv:latest needs: Build-Library-Linux steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." diff --git a/.github/workflows/prChecks.yml b/.github/workflows/prChecks.yml index 00d4eb94c4..1a7ac49762 100644 --- a/.github/workflows/prChecks.yml +++ b/.github/workflows/prChecks.yml @@ -82,7 +82,7 @@ jobs: # Validate Perl scripts Validate-Perl-Scripts: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 + container: ghcr.io/galacticusorg/buildenv:latest steps: - uses: actions/checkout@v4 - name: Check out repository analysis-perl @@ -128,7 +128,7 @@ jobs: # Validate Perl modules Validate-Perl-Modules: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 + container: ghcr.io/galacticusorg/buildenv:latest steps: - uses: actions/checkout@v4 - name: "Set environmental variables" @@ -162,7 +162,7 @@ jobs: # Fortran static analysis Fortran-Static-Analysis: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 + container: ghcr.io/galacticusorg/buildenv:latest steps: - uses: actions/checkout@v4 - name: "Set environmental variables" @@ -192,7 +192,7 @@ jobs: # Embedded XML and LaTeX checks Embedded-XML-LaTeX: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 + container: ghcr.io/galacticusorg/buildenv:latest steps: - name: Check out repository code uses: actions/checkout@v4 @@ -248,7 +248,7 @@ jobs: # Spell check LaTeX files Spell-Check-LaTeX: runs-on: ubuntu-latest - container: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 + container: ghcr.io/galacticusorg/buildenv:latest steps: - name: Check out repository code uses: actions/checkout@v4 diff --git a/.github/workflows/profile.yml b/.github/workflows/profile.yml index 80311b155a..441695e6b0 100644 --- a/.github/workflows/profile.yml +++ b/.github/workflows/profile.yml @@ -23,7 +23,7 @@ jobs: Profile-Model: runs-on: ubuntu-latest container: - image: ghcr.io/galacticusorg/buildenv:feathdf1.14.5 + image: ghcr.io/galacticusorg/buildenv:latest options: --privileged steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." diff --git a/.github/workflows/testCode.yml b/.github/workflows/testCode.yml index c7145cb808..b1018db141 100644 --- a/.github/workflows/testCode.yml +++ b/.github/workflows/testCode.yml @@ -35,7 +35,7 @@ jobs: Test-Code: runs-on: ${{ inputs.runner }} container: - image: ${{ startsWith( format('{0}',inputs.runner), 'ubuntu') && 'ghcr.io/galacticusorg/buildenv:feathdf1.14.5' || '' }} + image: ${{ startsWith( format('{0}',inputs.runner), 'ubuntu') && 'ghcr.io/galacticusorg/buildenv:latest' || '' }} steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." - run: echo "This job is now running on a ${{ runner.os }} server." diff --git a/.github/workflows/testModel.yml b/.github/workflows/testModel.yml index 7756d1b8c2..7cd6e897d1 100644 --- a/.github/workflows/testModel.yml +++ b/.github/workflows/testModel.yml @@ -56,7 +56,7 @@ jobs: Test-Model: runs-on: ${{ inputs.runner }} container: - image: ${{ startsWith( format('{0}',inputs.runner), 'ubuntu') && 'ghcr.io/galacticusorg/buildenv:feathdf1.14.5' || '' }} + image: ${{ startsWith( format('{0}',inputs.runner), 'ubuntu') && 'ghcr.io/galacticusorg/buildenv:latest' || '' }} steps: - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." - run: echo "This job is now running on a ${{ runner.os }} server."