From 7f8cbf608ac604051651e94c395dd9c955e0a05f Mon Sep 17 00:00:00 2001 From: Allen Byrne Date: Wed, 13 Nov 2024 16:39:35 -0600 Subject: [PATCH 1/2] Add option to use HighFive C++ --- CMakeInstallation.cmake | 26 +- CMakeLists.txt | 6 +- CMakePresets.json | 40 +- HDF5Examples/CMakeLists.txt | 6 +- HDF5Examples/HFCXX/CMakeLists.txt | 10 + HDF5Examples/HFCXX/H5D/CMakeLists.txt | 83 +++++ HDF5Examples/HFCXX/H5D/C_sourcefiles.cmake | 12 + HDF5Examples/HFCXX/H5D/Makefile.am | 81 +++++ HDF5Examples/HFCXX/H5D/chunks.cpp | 217 +++++++++++ HDF5Examples/HFCXX/H5D/compound.cpp | 149 ++++++++ HDF5Examples/HFCXX/H5D/create.cpp | 71 ++++ HDF5Examples/HFCXX/H5D/expected.out | 81 +++++ HDF5Examples/HFCXX/H5D/extend_ds.cpp | 221 +++++++++++ HDF5Examples/HFCXX/H5D/h5group.cpp | 224 ++++++++++++ HDF5Examples/HFCXX/H5D/readdata.cpp | 162 +++++++++ HDF5Examples/HFCXX/H5D/testh5c++.sh.in | 277 ++++++++++++++ HDF5Examples/HFCXX/H5D/writedata.cpp | 343 ++++++++++++++++++ HDF5Examples/HFCXX/HL/CMakeLists.txt | 79 ++++ HDF5Examples/HFCXX/HL/C_sourcefiles.cmake | 6 + HDF5Examples/HFCXX/HL/Makefile.am | 49 +++ HDF5Examples/HFCXX/HL/packet_table_FL.cpp | 97 +++++ .../HFCXX/HL/tfiles/packet_table_FL.tst | 6 + HDF5Examples/HFCXX/Makefile.am | 28 ++ HDF5Examples/HFCXX/TUTR/CMakeLists.txt | 64 ++++ HDF5Examples/HFCXX/TUTR/C_sourcefiles.cmake | 14 + HDF5Examples/HFCXX/TUTR/Makefile.am | 81 +++++ HDF5Examples/HFCXX/TUTR/h5tutr_cmprss.cpp | 150 ++++++++ HDF5Examples/HFCXX/TUTR/h5tutr_crtatt.cpp | 81 +++++ HDF5Examples/HFCXX/TUTR/h5tutr_crtdat.cpp | 72 ++++ HDF5Examples/HFCXX/TUTR/h5tutr_crtgrp.cpp | 59 +++ HDF5Examples/HFCXX/TUTR/h5tutr_crtgrpar.cpp | 80 ++++ HDF5Examples/HFCXX/TUTR/h5tutr_crtgrpd.cpp | 127 +++++++ HDF5Examples/HFCXX/TUTR/h5tutr_extend.cpp | 160 ++++++++ HDF5Examples/HFCXX/TUTR/h5tutr_rdwt.cpp | 70 ++++ HDF5Examples/HFCXX/TUTR/h5tutr_subset.cpp | 168 +++++++++ HDF5Examples/HFCXX/TUTR/testh5c++.sh.in | 277 ++++++++++++++ .../config/cmake-presets/hidden-presets.json | 3 +- .../config/cmake/HDFExampleMacros.cmake | 40 +- HDF5Examples/config/cmake/cacheinit.cmake | 1 + c++/CMakeLists.txt | 56 ++- config/cmake/HDF5ExampleCache.cmake | 35 +- config/cmake/HDFLibMacros.cmake | 29 ++ config/cmake/cacheinit.cmake | 11 +- .../examples/HDF5_Examples_options.cmake | 2 + release_docs/INSTALL_CMake.txt | 7 +- release_docs/USING_CMake_Examples.txt | 1 + src/H5build_settings.cmake.c.in | 1 + 47 files changed, 3805 insertions(+), 58 deletions(-) create mode 100644 HDF5Examples/HFCXX/CMakeLists.txt create mode 100644 HDF5Examples/HFCXX/H5D/CMakeLists.txt create mode 100644 HDF5Examples/HFCXX/H5D/C_sourcefiles.cmake create mode 100644 HDF5Examples/HFCXX/H5D/Makefile.am create mode 100644 HDF5Examples/HFCXX/H5D/chunks.cpp create mode 100644 HDF5Examples/HFCXX/H5D/compound.cpp create mode 100644 HDF5Examples/HFCXX/H5D/create.cpp create mode 100644 HDF5Examples/HFCXX/H5D/expected.out create mode 100644 HDF5Examples/HFCXX/H5D/extend_ds.cpp create mode 100644 HDF5Examples/HFCXX/H5D/h5group.cpp create mode 100644 HDF5Examples/HFCXX/H5D/readdata.cpp create mode 100644 HDF5Examples/HFCXX/H5D/testh5c++.sh.in create mode 100644 HDF5Examples/HFCXX/H5D/writedata.cpp create mode 100644 HDF5Examples/HFCXX/HL/CMakeLists.txt create mode 100644 HDF5Examples/HFCXX/HL/C_sourcefiles.cmake create mode 100644 HDF5Examples/HFCXX/HL/Makefile.am create mode 100644 HDF5Examples/HFCXX/HL/packet_table_FL.cpp create mode 100644 HDF5Examples/HFCXX/HL/tfiles/packet_table_FL.tst create mode 100644 HDF5Examples/HFCXX/Makefile.am create mode 100644 HDF5Examples/HFCXX/TUTR/CMakeLists.txt create mode 100644 HDF5Examples/HFCXX/TUTR/C_sourcefiles.cmake create mode 100644 HDF5Examples/HFCXX/TUTR/Makefile.am create mode 100644 HDF5Examples/HFCXX/TUTR/h5tutr_cmprss.cpp create mode 100644 HDF5Examples/HFCXX/TUTR/h5tutr_crtatt.cpp create mode 100644 HDF5Examples/HFCXX/TUTR/h5tutr_crtdat.cpp create mode 100644 HDF5Examples/HFCXX/TUTR/h5tutr_crtgrp.cpp create mode 100644 HDF5Examples/HFCXX/TUTR/h5tutr_crtgrpar.cpp create mode 100644 HDF5Examples/HFCXX/TUTR/h5tutr_crtgrpd.cpp create mode 100644 HDF5Examples/HFCXX/TUTR/h5tutr_extend.cpp create mode 100644 HDF5Examples/HFCXX/TUTR/h5tutr_rdwt.cpp create mode 100644 HDF5Examples/HFCXX/TUTR/h5tutr_subset.cpp create mode 100644 HDF5Examples/HFCXX/TUTR/testh5c++.sh.in diff --git a/CMakeInstallation.cmake b/CMakeInstallation.cmake index 6211c959e99..5c118af5bb0 100644 --- a/CMakeInstallation.cmake +++ b/CMakeInstallation.cmake @@ -609,18 +609,20 @@ The HDF5 data model, file format, API, library, and tools are open and distribut INSTALL_TYPES Full Developer User ) if (HDF5_BUILD_CPP_LIB) - cpack_add_component (hlcpplibraries - DISPLAY_NAME "HDF5 HL C++ Libraries" - DEPENDS hllibraries - GROUP Runtime - INSTALL_TYPES Full Developer User - ) - cpack_add_component (hlcppheaders - DISPLAY_NAME "HDF5 HL C++ Headers" - DEPENDS hlcpplibraries - GROUP Development - INSTALL_TYPES Full Developer - ) + if (NOT HDF5_USE_HIGH_FIVE) + cpack_add_component (hlcpplibraries + DISPLAY_NAME "HDF5 HL C++ Libraries" + DEPENDS hllibraries + GROUP Runtime + INSTALL_TYPES Full Developer User + ) + cpack_add_component (hlcppheaders + DISPLAY_NAME "HDF5 HL C++ Headers" + DEPENDS hlcpplibraries + GROUP Development + INSTALL_TYPES Full Developer + ) + endif () endif () if (HDF5_BUILD_FORTRAN) cpack_add_component (hlfortlibraries diff --git a/CMakeLists.txt b/CMakeLists.txt index 4c8366d8393..d6dad16c3e3 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1178,7 +1178,7 @@ if (EXISTS "${HDF5_SOURCE_DIR}/c++" AND IS_DIRECTORY "${HDF5_SOURCE_DIR}/c++") option (HDF5_BUILD_CPP_LIB "Build HDF5 C++ Library" OFF) if (HDF5_BUILD_CPP_LIB) # check for unsupported options - if (HDF5_ENABLE_PARALLEL) + if (HDF5_ENABLE_PARALLEL AND NOT HDF5_USE_HIGH_FIVE) if (NOT HDF5_ALLOW_UNSUPPORTED) message (FATAL_ERROR " **** Parallel and C++ options are mutually exclusive, override with HDF5_ALLOW_UNSUPPORTED option **** ") else () @@ -1192,7 +1192,9 @@ if (EXISTS "${HDF5_SOURCE_DIR}/c++" AND IS_DIRECTORY "${HDF5_SOURCE_DIR}/c++") if (HDF5_BUILD_HL_LIB) if (EXISTS "${HDF5_SOURCE_DIR}/hl/c++" AND IS_DIRECTORY "${HDF5_SOURCE_DIR}/hl/c++") #-- Build the High Level Fortran source codes - add_subdirectory (hl/c++) + if (NOT HDF5_USE_HIGH_FIVE) + add_subdirectory (hl/c++) + endif () endif () endif () endif () diff --git a/CMakePresets.json b/CMakePresets.json index c0a3f6fed28..285893e8738 100644 --- a/CMakePresets.json +++ b/CMakePresets.json @@ -93,6 +93,24 @@ "PLUGIN_USE_LOCALCONTENT": "OFF" } }, + { + "name": "ci-HighFiveVars", + "hidden": true, + "cacheVariables": { + "HIGHFIVE_TGZ_NAME": {"type": "STRING", "value": "v2.10.0.tar.gz"}, + "HIGHFIVE_TGZ_ORIGPATH": {"type": "STRING", "value": "https://github.com/BlueBrain/HighFive/archive/refs/tags"}, + "HIGHFIVE_GIT_URL": {"type": "STRING", "value": "https://github.com/BlueBrain/HighFive.git"}, + "HIGHFIVE_GIT_BRANCH": {"type": "STRING", "value": "master"} + } + }, + { + "name": "ci-HighFive", + "hidden": true, + "inherits": ["ci-HighFiveVars", "ci-base-tgz"], + "cacheVariables": { + "HIGHFIVE_USE_LOCALCONTENT": "OFF" + } + }, { "name": "ci-StdExamples", "hidden": true, @@ -123,12 +141,20 @@ "HDF_TEST_EXPRESS": "2" } }, + { + "name": "ci-HighFiveCPP", + "hidden": true, + "inherits": ["ci-CPP", "ci-HighFive"], + "cacheVariables": { + "HDF5_USE_HIGH_FIVE": "ON" + } + }, { "name": "ci-StdShar-MSVC", "description": "MSVC Standard Config for x64 (Release)", "inherits": [ "ci-x64-Release-MSVC", - "ci-CPP", + "ci-HighFiveCPP", "ci-Java", "ci-StdShar" ] @@ -138,7 +164,7 @@ "description": "MSVC Standard Config for x64 (Release)", "inherits": [ "ci-x64-Release-MSVC", - "ci-CPP", + "ci-HighFiveCPP", "ci-Fortran", "ci-Java", "ci-StdShar" @@ -149,7 +175,7 @@ "description": "Clang Standard Config for x64 (Release)", "inherits": [ "ci-x64-Release-Clang", - "ci-CPP", + "ci-HighFiveCPP", "ci-Fortran", "ci-Java", "ci-StdShar" @@ -160,7 +186,7 @@ "description": "Clang Standard Config for macos (Release)", "inherits": [ "ci-macos-Release-Clang", - "ci-CPP", + "ci-HighFiveCPP", "ci-Java", "ci-StdShar" ] @@ -170,7 +196,7 @@ "description": "GNUC Standard Config for macos (Release)", "inherits": [ "ci-macos-Release-GNUC", - "ci-CPP", + "ci-HighFiveCPP", "ci-Java", "ci-StdShar" ] @@ -180,7 +206,7 @@ "description": "GNUC Standard Config for x64 (Release)", "inherits": [ "ci-x64-Release-GNUC", - "ci-CPP", + "ci-HighFiveCPP", "ci-Fortran", "ci-Java", "ci-StdShar" @@ -199,7 +225,7 @@ "description": "Intel Standard Config for x64 (Release)", "inherits": [ "ci-x64-Release-Intel", - "ci-CPP", + "ci-HighFiveCPP", "ci-Fortran", "ci-Java", "ci-StdShar" diff --git a/HDF5Examples/CMakeLists.txt b/HDF5Examples/CMakeLists.txt index 3fd1a7eb0da..34627e604ec 100644 --- a/HDF5Examples/CMakeLists.txt +++ b/HDF5Examples/CMakeLists.txt @@ -216,7 +216,11 @@ if (H5EX_BUILD_JAVA AND HDF5_BUILD_JAVA) add_subdirectory (JAVA) endif () if (H5EX_BUILD_CXX AND HDF5_BUILD_CPP_LIB) - add_subdirectory (CXX) + if (NOT H5EX_USE_HIGH_FIVE AND NOT HDF5_USE_HIGH_FIVE) + add_subdirectory (CXX) + else () + add_subdirectory (HFCXX) + endif () endif () if (H5EX_BUILD_PYTHON) add_subdirectory (PYTHON) diff --git a/HDF5Examples/HFCXX/CMakeLists.txt b/HDF5Examples/HFCXX/CMakeLists.txt new file mode 100644 index 00000000000..ad9654af673 --- /dev/null +++ b/HDF5Examples/HFCXX/CMakeLists.txt @@ -0,0 +1,10 @@ +cmake_minimum_required (VERSION 3.18) +project (HDFCXX_EXAMPLES CXX) + +add_subdirectory (H5D) +#add_subdirectory (TUTR) + +#-- Add High Level Examples +#if (H5EX_BUILD_HL AND HDF5_BUILD_HL_LIB) +# add_subdirectory (HL) +#endif () diff --git a/HDF5Examples/HFCXX/H5D/CMakeLists.txt b/HDF5Examples/HFCXX/H5D/CMakeLists.txt new file mode 100644 index 00000000000..e2c23b3387a --- /dev/null +++ b/HDF5Examples/HFCXX/H5D/CMakeLists.txt @@ -0,0 +1,83 @@ +cmake_minimum_required (VERSION 3.18) +project (HDF5Examples_CXX_H5D CXX) + +#----------------------------------------------------------------------------- +# Define Sources +#----------------------------------------------------------------------------- +include (C_sourcefiles.cmake) + +if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.8") + foreach (example_name ${examples}) + add_executable (${EXAMPLE_VARNAME}_cpp_ex_${example_name} ${PROJECT_SOURCE_DIR}/${example_name}.cpp) + target_compile_options(${EXAMPLE_VARNAME}_cpp_ex_${example_name} + PRIVATE + "$<$:-DH5_USE_16_API>" + "$<$:-DH5_USE_18_API>" + "$<$:-DH5_USE_110_API>" + "$<$:-DH5_USE_112_API>" + "$<$:-DH5_USE_114_API>" + "$<$:-DH5_USE_200_API>" + ) + if (H5_HAVE_PARALLEL) + target_include_directories (${EXAMPLE_VARNAME}_cpp_ex_${example_name} PUBLIC ${MPI_C_INCLUDE_DIRS}) + endif () + target_link_libraries (${EXAMPLE_VARNAME}_cpp_ex_${example_name} ${H5EX_HDF5_LINK_LIBS}) + endforeach () +endif () + +if (H5EX_BUILD_TESTING) + set (${EXAMPLE_VARNAME}_cpp_ex_CLEANFILES + Group.h5 + SDS.h5 + SDScompound.h5 + SDSextendible.h5 + Select.h5 + ) + add_test ( + NAME ${EXAMPLE_VARNAME}_cpp_ex-clear-objects + COMMAND ${CMAKE_COMMAND} + -E remove ${${EXAMPLE_VARNAME}_cpp_ex_CLEANFILES} + ) + set_tests_properties (${EXAMPLE_VARNAME}_cpp_ex-clear-objects PROPERTIES + FIXTURES_SETUP clear_${EXAMPLE_VARNAME}_cpp_ex + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} + ) + add_test ( + NAME ${EXAMPLE_VARNAME}_cpp_ex-clean-objects + COMMAND ${CMAKE_COMMAND} + -E remove ${${EXAMPLE_VARNAME}_cpp_ex_CLEANFILES} + ) + set_tests_properties (${EXAMPLE_VARNAME}_cpp_ex-clean-objects PROPERTIES + FIXTURES_CLEANUP clear_${EXAMPLE_VARNAME}_cpp_ex + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} + ) + + macro (ADD_H5_TEST testname) + if (HDF5_ENABLE_USING_MEMCHECKER) + add_test (NAME ${EXAMPLE_VARNAME}_cpp_ex_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) + else () + add_test ( + NAME ${EXAMPLE_VARNAME}_cpp_ex_${testname} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" + -D "TEST_EXPECT=0" + -D "TEST_SKIP_COMPARE=TRUE" + -D "TEST_OUTPUT=${testname}.out" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" + -P "${H5EX_RESOURCES_DIR}/runTest.cmake" + ) + endif () + set_tests_properties (${EXAMPLE_VARNAME}_cpp_ex_${testname} PROPERTIES FIXTURES_REQUIRED clear_${EXAMPLE_VARNAME}_cpp_ex) + if (last_test) + set_tests_properties (${EXAMPLE_VARNAME}_cpp_ex_${testname} PROPERTIES DEPENDS ${last_test}) + endif () + set (last_test "${EXAMPLE_VARNAME}_cpp_ex_${testname}") + endmacro () + + foreach (example_name ${examples}) + ADD_H5_TEST (${example_name}) + endforeach () +endif () diff --git a/HDF5Examples/HFCXX/H5D/C_sourcefiles.cmake b/HDF5Examples/HFCXX/H5D/C_sourcefiles.cmake new file mode 100644 index 00000000000..2924cab017f --- /dev/null +++ b/HDF5Examples/HFCXX/H5D/C_sourcefiles.cmake @@ -0,0 +1,12 @@ +#----------------------------------------------------------------------------- +# Define Sources, one file per application +#----------------------------------------------------------------------------- +set (examples + create +# readdata +# writedata +# compound +# extend_ds +# chunks +# h5group +) diff --git a/HDF5Examples/HFCXX/H5D/Makefile.am b/HDF5Examples/HFCXX/H5D/Makefile.am new file mode 100644 index 00000000000..6103485cfaa --- /dev/null +++ b/HDF5Examples/HFCXX/H5D/Makefile.am @@ -0,0 +1,81 @@ +# +# Copyright by The HDF Group. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the LICENSE file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. +## +## Makefile.am +## Run automake to generate a Makefile.in from this file. +## +# +# HDF5 Library Examples Makefile(.in) +# + +include $(top_srcdir)/config/commence.am + +INSTALL_SCRIPT_FILES = run-c++-ex.sh + +# These are the programs that 'make all' or 'make prog' will build and +# which 'make check' will run. List them in the order they should be run. +EXAMPLE_PROG=create readdata writedata compound extend_ds chunks h5group \ + h5tutr_cmprss h5tutr_crtatt h5tutr_crtdat h5tutr_crtgrpar \ + h5tutr_crtgrp h5tutr_crtgrpd h5tutr_extend h5tutr_rdwt \ + h5tutr_subset +TEST_SCRIPT=testh5c++.sh +TEST_EXAMPLES_SCRIPT=$(INSTALL_SCRIPT_FILES) + +# These are the example files to be installed +INSTALL_FILES=create.cpp readdata.cpp writedata.cpp compound.cpp \ + extend_ds.cpp chunks.cpp h5group.cpp \ + h5tutr_cmprss.cpp h5tutr_crtatt.cpp h5tutr_crtdat.cpp \ + h5tutr_crtgrpar.cpp h5tutr_crtgrp.cpp h5tutr_crtgrpd.cpp \ + h5tutr_extend.cpp h5tutr_rdwt.cpp h5tutr_subset.cpp + +# Some of the examples depend on files created by running other examples +readdata.chkexe_: create.chkexe_ +chunks.chkexe_: extend_ds.chkexe_ +h5tutr_rdwt.chkexe_: h5tutr_crtdat.chkexe +h5tutrcrtatt.chkexe_: h5tutr_crtdat.chkexe +h5tutr_crtgrpd.chkexe_: h5tutr_crtgrpar.chkexe + +# Tell conclude.am that these are C++ tests. +CXX_API=yes + +# Where to install examples +# Note: no '/' after DESTDIR. Explanation in commence.am +EXAMPLEDIR=${DESTDIR}$(examplesdir)/c++ +EXAMPLETOPDIR=${DESTDIR}$(examplesdir) + +# How to build programs using h5c++ +$(EXTRA_PROG): $(H5CPP) + $(H5CPP) $(H5CCFLAGS) $(CPPFLAGS) -o $@ $(srcdir)/$@.cpp + +# List dependencies for each program. Normally, automake would take +# care of this for us, but if we tell automake about the programs it +# will try to build them with the normal C++ compiler, not h5c++. This is +# an inelegant way of solving the problem, unfortunately. +create: $(srcdir)/create.cpp +readdata: $(srcdir)/readdata.cpp +writedata: $(srcdir)/writedata.cpp +compound: $(srcdir)/compound.cpp +extend_ds: $(srcdir)/extend_ds.cpp +chunks: $(srcdir)/chunks.cpp +h5group: $(srcdir)/h5group.cpp + +h5tutr_cmprss: $(srcdir)/h5tutr_cmprss.cpp +h5tutr_crtatt: $(srcdir)/h5tutr_crtatt.cpp +h5tutr_crtdat: $(srcdir)/h5tutr_crtdat.cpp +h5tutr_crtgrpar: $(srcdir)/h5tutr_crtgrpar.cpp +h5tutr_crtgrp: $(srcdir)/h5tutr_crtgrp.cpp +h5tutr_crtgrpd: $(srcdir)/h5tutr_crtgrpd.cpp +h5tutr_extend: $(srcdir)/h5tutr_extend.cpp +h5tutr_rdwt: $(srcdir)/h5tutr_rdwt.cpp +h5tutr_subset: $(srcdir)/h5tutr_subset.cpp + +include $(top_srcdir)/config/examples.am +include $(top_srcdir)/config/conclude.am diff --git a/HDF5Examples/HFCXX/H5D/chunks.cpp b/HDF5Examples/HFCXX/H5D/chunks.cpp new file mode 100644 index 00000000000..2b4b09ff050 --- /dev/null +++ b/HDF5Examples/HFCXX/H5D/chunks.cpp @@ -0,0 +1,217 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the LICENSE file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/* + * This example shows how to read data from a chunked dataset. + * We will read from the file created by extend.cpp + */ + +#include +using std::cout; +using std::endl; + +#include +#include "H5Cpp.h" +using namespace H5; + +const H5std_string FILE_NAME("SDSextendible.h5"); +const H5std_string DATASET_NAME("ExtendibleArray"); +const int NX = 10; +const int NY = 5; +const int RANK = 2; +const int RANKC = 1; + +int +main(void) +{ + hsize_t i, j; + + // Try block to detect exceptions raised by any of the calls inside it + try { + /* + * Turn off the auto-printing when failure occurs so that we can + * handle the errors appropriately + */ + Exception::dontPrint(); + + /* + * Open the file and the dataset. + */ + H5File file(FILE_NAME, H5F_ACC_RDONLY); + DataSet dataset = file.openDataSet(DATASET_NAME); + + /* + * Get filespace for rank and dimension + */ + DataSpace filespace = dataset.getSpace(); + + /* + * Get number of dimensions in the file dataspace + */ + int rank = filespace.getSimpleExtentNdims(); + + /* + * Get and print the dimension sizes of the file dataspace + */ + hsize_t dims[2]; // dataset dimensions + rank = filespace.getSimpleExtentDims(dims); + cout << "dataset rank = " << rank << ", dimensions " << (unsigned long)(dims[0]) << " x " + << (unsigned long)(dims[1]) << endl; + + /* + * Define the memory space to read dataset. + */ + DataSpace mspace1(RANK, dims); + + /* + * Read dataset back and display. + */ + int data_out[NX][NY]; // buffer for dataset to be read + dataset.read(data_out, PredType::NATIVE_INT, mspace1, filespace); + + cout << "\n"; + cout << "Dataset: \n"; + for (j = 0; j < dims[0]; j++) { + for (i = 0; i < dims[1]; i++) + cout << data_out[j][i] << " "; + cout << endl; + } + + /* + * dataset rank 2, dimensions 10 x 5 + * chunk rank 2, dimensions 2 x 5 + + * Dataset: + * 1 1 1 3 3 + * 1 1 1 3 3 + * 1 1 1 0 0 + * 2 0 0 0 0 + * 2 0 0 0 0 + * 2 0 0 0 0 + * 2 0 0 0 0 + * 2 0 0 0 0 + * 2 0 0 0 0 + * 2 0 0 0 0 + */ + + /* + * Read the third column from the dataset. + * First define memory dataspace, then define hyperslab + * and read it into column array. + */ + hsize_t col_dims[1]; + col_dims[0] = 10; + DataSpace mspace2(RANKC, col_dims); + + /* + * Define the column (hyperslab) to read. + */ + hsize_t offset[2] = {0, 2}; + hsize_t count[2] = {10, 1}; + int column[10]; // buffer for column to be read + + /* + * Define hyperslab and read. + */ + filespace.selectHyperslab(H5S_SELECT_SET, count, offset); + dataset.read(column, PredType::NATIVE_INT, mspace2, filespace); + + cout << endl; + cout << "Third column: " << endl; + for (i = 0; i < 10; i++) + cout << column[i] << endl; + + /* + * Third column: + * 1 + * 1 + * 1 + * 0 + * 0 + * 0 + * 0 + * 0 + * 0 + * 0 + */ + + /* + * Get creation properties list. + */ + DSetCreatPropList cparms = dataset.getCreatePlist(); + + /* + * Check if dataset is chunked. + */ + hsize_t chunk_dims[2]; + int rank_chunk; + if (H5D_CHUNKED == cparms.getLayout()) { + /* + * Get chunking information: rank and dimensions + */ + rank_chunk = cparms.getChunk(2, chunk_dims); + cout << "chunk rank " << rank_chunk << "dimensions " << (unsigned long)(chunk_dims[0]) << " x " + << (unsigned long)(chunk_dims[1]) << endl; + + /* + * Define the memory space to read a chunk. + */ + DataSpace mspace3(rank_chunk, chunk_dims); + + /* + * Define chunk in the file (hyperslab) to read. + */ + offset[0] = 2; + offset[1] = 0; + count[0] = chunk_dims[0]; + count[1] = chunk_dims[1]; + filespace.selectHyperslab(H5S_SELECT_SET, count, offset); + + /* + * Read chunk back and display. + */ + int chunk_out[2][5]; // buffer for chunk to be read + dataset.read(chunk_out, PredType::NATIVE_INT, mspace3, filespace); + cout << endl; + cout << "Chunk:" << endl; + for (j = 0; j < chunk_dims[0]; j++) { + for (i = 0; i < chunk_dims[1]; i++) + cout << chunk_out[j][i] << " "; + cout << endl; + } + /* + * Chunk: + * 1 1 1 0 0 + * 2 0 0 0 0 + */ + } + } // end of try block + + // catch failure caused by the H5File operations + catch (FileIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the DataSet operations + catch (DataSetIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the DataSpace operations + catch (DataSpaceIException error) { + error.printErrorStack(); + return -1; + } + return 0; +} diff --git a/HDF5Examples/HFCXX/H5D/compound.cpp b/HDF5Examples/HFCXX/H5D/compound.cpp new file mode 100644 index 00000000000..de5b91d80fd --- /dev/null +++ b/HDF5Examples/HFCXX/H5D/compound.cpp @@ -0,0 +1,149 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the LICENSE file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/* + * This example shows how to create a compound datatype, + * write an array which has the compound datatype to the file, + * and read back fields' subsets. + */ + +#include +#include +#include +using namespace HighFive; + +const std::string FILE_NAME("SDScompound.h5"); +const std::string DATASET_NAME("ArrayOfStructures"); +const std::string MEMBER1("a_name"); +const std::string MEMBER2("b_name"); +const std::string MEMBER3("c_name"); +const int LENGTH = 10; +const int RANK = 1; + +int +main(void) +{ + /* First structure and dataset*/ + typedef struct s1_t { + int a; + float b; + double c; + } s1_t; + + // Tell HighFive how to create the HDF5 datatype for this base type by + // using the HIGHFIVE_REGISTER_TYPE macro + CompoundType create_compound_s1_t() { + return {{"a", create_datatype()}, + {"b", create_datatype()}, + {"c", create_datatype()}}; + } + HIGHFIVE_REGISTER_TYPE(s1_t, create_compound_s1_t) + + /* Second structure (subset of s1_t) and dataset*/ + typedef struct s2_t { + double c; + int a; + } s2_t; + + // Tell HighFive how to create the HDF5 datatype for this base type by + // using the HIGHFIVE_REGISTER_TYPE macro + CompoundType create_compound_s2_t() { + return {{"c", create_datatype()}, + {"a", create_datatype()}}; + } + HIGHFIVE_REGISTER_TYPE(s2_t, create_compound_s2_t) + + try { + // Create a new file using the default property lists. Note that + // `File::Truncate` will, if present, truncate the file before opening + // it for reading and writing. + File file(FILE_NAME, File::Truncate); + + auto mtype1 = create_compound_s1_t(); + mtype1.commit(file, "s1_t"); + + // Define the size of our dataset: LENGTH + std::vector dims{LENGTH}; + + // Initialize the data + std::vector data; + int i; + for (i = 0; i < LENGTH; i++) { + data.push_back({ + i, + i * i, + 1. / (i + 1) + }); + } + + // Create the dataset + DataSet dataset = file.createDataSet(DATASET_NAME, DataSpace(dims)); + + // write it + dataset.write(data); + + // flush everything + file.flush(); + + // we get the dataset + DataSet dataset = file.getDataSet(dataset_name); + + // we convert the hdf5 dataset to a single dimension vector + dataset.read(read_data); + + /* + * Read two fields c and a from s1 dataset. Fields in the file + * are found by their names "c_name" and "a_name". + */ + s2_t s2[LENGTH]; + dataset->read(s2, mtype2); + + /* + * Display the fields + */ + std::cout << endl << "Field c : " << endl; + for (i = 0; i < LENGTH; i++) + std::cout << s2[i].c << " "; + std::cout << endl; + + std::cout << endl << "Field a : " << endl; + for (i = 0; i < LENGTH; i++) + std::cout << s2[i].a << " "; + std::cout << endl; + + /* + * Create a datatype for s3. + */ + CompType mtype3(sizeof(float)); + + mtype3.insertMember(MEMBER2, 0, PredType::NATIVE_FLOAT); + + /* + * Read field b from s1 dataset. Field in the file is found by its name. + */ + float s3[LENGTH]; // Third "structure" - used to read float field of s1 + dataset->read(s3, mtype3); + + /* + * Display the field + */ + std::cout << endl << "Field b : " << endl; + for (i = 0; i < LENGTH; i++) + std::cout << s3[i] << " "; + std::cout << endl; + } catch (const Exception& err) { + // catch and print any HDF5 error + std::cerr << err.what() << std::endl; + } + + return 0; +} diff --git a/HDF5Examples/HFCXX/H5D/create.cpp b/HDF5Examples/HFCXX/H5D/create.cpp new file mode 100644 index 00000000000..8366719c877 --- /dev/null +++ b/HDF5Examples/HFCXX/H5D/create.cpp @@ -0,0 +1,71 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the LICENSE file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +// +// This example writes a dataset to a new HDF5 file. +// + +#include + +#include +#include +using namespace HighFive; + +const std::string FILE_NAME("SDS.h5"); +const std::string DATASET_NAME("IntArray"); +const int NX = 5; // dataset dimensions +const int NY = 6; +const int RANK = 2; + +int +main(void) +{ + /* + * Data initialization. + */ + int i, j; + int data[NX][NY]; // buffer for data to write + for (j = 0; j < NX; j++) { + for (i = 0; i < NY; i++) + data[j][i] = i + j; + } + /* + * 0 1 2 3 4 5 + * 1 2 3 4 5 6 + * 2 3 4 5 6 7 + * 3 4 5 6 7 8 + * 4 5 6 7 8 9 + */ + + try { + // Create a new file using the default property lists. Note that + // `File::Truncate` will, if present, truncate the file before opening + // it for reading and writing. + File file(FILE_NAME, File::Truncate); + + // Define the size of our dataset: NXxNY + std::vector dims{NX, NY}; + + // Create the dataset + DataSet dataset = file.createDataSet(DATASET_NAME, DataSpace(dims)); + + // write it + dataset.write(data); + } + catch (const Exception& err) { + // catch and print any HDF5 error + std::cerr << err.what() << std::endl; + return -1; + } + + return 0; // successfully terminated +} diff --git a/HDF5Examples/HFCXX/H5D/expected.out b/HDF5Examples/HFCXX/H5D/expected.out new file mode 100644 index 00000000000..66c1d8f0395 --- /dev/null +++ b/HDF5Examples/HFCXX/H5D/expected.out @@ -0,0 +1,81 @@ +Data set has INTEGER type +Little endian byte ordering (0) +Data size is 4 +rank 2, dimensions 5 x 6 +0 0 0 0 0 0 0 +0 0 0 0 0 0 0 +0 0 0 0 0 0 0 +3 4 5 6 0 0 0 +4 5 6 7 0 0 0 +5 6 7 8 0 0 0 +0 0 0 0 0 0 0 +10 0 11 12 0 0 0 0 0 +18 0 19 20 0 21 22 0 0 +0 59 0 61 0 0 0 0 0 +0 0 27 28 0 29 30 0 0 +0 0 35 36 67 37 38 0 0 +0 0 43 44 0 45 46 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 + +Field c : +1 0.5 0.333333 0.25 0.2 0.166667 0.142857 0.125 0.111111 0.1 + +Field a : +0 1 2 3 4 5 6 7 8 9 + +Field b : +0 1 4 9 16 25 36 49 64 81 +1 1 1 3 3 +1 1 1 3 3 +1 1 1 0 0 +2 0 0 0 0 +2 0 0 0 0 +2 0 0 0 0 +2 0 0 0 0 +2 0 0 0 0 +2 0 0 0 0 +2 0 0 0 0 +dataset rank = 2, dimensions 10 x 5 + +Dataset: +1 1 1 3 3 +1 1 1 3 3 +1 1 1 0 0 +2 0 0 0 0 +2 0 0 0 0 +2 0 0 0 0 +2 0 0 0 0 +2 0 0 0 0 +2 0 0 0 0 +2 0 0 0 0 + +Third column: +1 +1 +1 +0 +0 +0 +0 +0 +0 +0 +chunk rank 2dimensions 2 x 5 + +Chunk: +1 1 1 0 0 +2 0 0 0 0 +dataset "/Data/Compressed_Data" is open +dataset "/Data_new/Compressed_Data" is open + +Iterating over elements in the file +Name : Data +Name : Data_new + +Unlinking... +"Data" is unlinked + +Iterating over elements in the file again +Name : Data_new + diff --git a/HDF5Examples/HFCXX/H5D/extend_ds.cpp b/HDF5Examples/HFCXX/H5D/extend_ds.cpp new file mode 100644 index 00000000000..093bf259532 --- /dev/null +++ b/HDF5Examples/HFCXX/H5D/extend_ds.cpp @@ -0,0 +1,221 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the LICENSE file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/* + * This example shows how to work with extendible dataset. + * In the current version of the library dataset MUST be + * chunked. + * + */ + +#include +#include + +using std::cout; +using std::endl; + +#include +#include "H5Cpp.h" +using namespace H5; + +const H5std_string FILE_NAME("SDSextendible.h5"); +const H5std_string DATASET_NAME("ExtendibleArray"); +const int NX = 10; +const int NY = 5; +const int RANK = 2; + +int +main(void) +{ + /* + * Try block to detect exceptions raised by any of the calls inside it + */ + try { + /* + * Turn off the auto-printing when failure occurs so that we can + * handle the errors appropriately + */ + Exception::dontPrint(); + + /* + * Create the data space with unlimited dimensions. + */ + hsize_t dims[2] = {3, 3}; // dataset dimensions at creation + hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED}; + DataSpace mspace1(RANK, dims, maxdims); + + /* + * Create a new file. If file exists its contents will be overwritten. + */ + H5File file(FILE_NAME, H5F_ACC_TRUNC); + + /* + * Modify dataset creation properties, i.e. enable chunking. + */ + DSetCreatPropList cparms; + + hsize_t chunk_dims[2] = {2, 5}; + cparms.setChunk(RANK, chunk_dims); + + /* + * Set fill value for the dataset + */ + int fill_val = 0; + cparms.setFillValue(PredType::NATIVE_INT, &fill_val); + + /* + * Create a new dataset within the file using cparms + * creation properties. + */ + DataSet dataset = file.createDataSet(DATASET_NAME, PredType::NATIVE_INT, mspace1, cparms); + + /* + * Extend the dataset. This call assures that dataset is at least 3 x 3. + */ + hsize_t size[2]; + size[0] = 3; + size[1] = 3; + dataset.extend(size); + + /* + * Select a hyperslab. + */ + DataSpace fspace1 = dataset.getSpace(); + hsize_t offset[2]; + offset[0] = 0; + offset[1] = 0; + hsize_t dims1[2] = {3, 3}; /* data1 dimensions */ + fspace1.selectHyperslab(H5S_SELECT_SET, dims1, offset); + + /* + * Write the data to the hyperslab. + */ + int data1[3][3] = {{1, 1, 1}, /* data to write */ + {1, 1, 1}, + {1, 1, 1}}; + dataset.write(data1, PredType::NATIVE_INT, mspace1, fspace1); + + /* + * Extend the dataset. Dataset becomes 10 x 3. + */ + hsize_t dims2[2] = {7, 1}; /* data2 dimensions */ + dims[0] = dims1[0] + dims2[0]; + size[0] = dims[0]; + size[1] = dims[1]; + dataset.extend(size); + + /* + * Select a hyperslab. + */ + DataSpace fspace2 = dataset.getSpace(); + offset[0] = 3; + offset[1] = 0; + fspace2.selectHyperslab(H5S_SELECT_SET, dims2, offset); + + /* + * Define memory space + */ + DataSpace mspace2(RANK, dims2); + + /* + * Write the data to the hyperslab. + */ + int data2[7] = {2, 2, 2, 2, 2, 2, 2}; + dataset.write(data2, PredType::NATIVE_INT, mspace2, fspace2); + + /* + * Extend the dataset. Dataset becomes 10 x 5. + */ + hsize_t dims3[2] = {2, 2}; /* data3 dimensions */ + dims[1] = dims1[1] + dims3[1]; + size[0] = dims[0]; + size[1] = dims[1]; + dataset.extend(size); + + /* + * Select a hyperslab + */ + DataSpace fspace3 = dataset.getSpace(); + offset[0] = 0; + offset[1] = 3; + fspace3.selectHyperslab(H5S_SELECT_SET, dims3, offset); + + /* + * Define memory space. + */ + DataSpace mspace3(RANK, dims3); + + /* + * Write the data to the hyperslab. + */ + int data3[2][2] = {{3, 3}, {3, 3}}; + dataset.write(data3, PredType::NATIVE_INT, mspace3, fspace3); + + /* + * Read the data from this dataset and display it. + */ + int i, j; + int data_out[NX][NY]; + for (i = 0; i < NX; i++) { + for (j = 0; j < NY; j++) + data_out[i][j] = 0; + } + dataset.read(data_out, PredType::NATIVE_INT); + /* + * Resulting dataset + * + * 1 1 1 3 3 + * 1 1 1 3 3 + * 1 1 1 0 0 + * 2 0 0 0 0 + * 2 0 0 0 0 + * 2 0 0 0 0 + * 2 0 0 0 0 + * 2 0 0 0 0 + * 2 0 0 0 0 + * 2 0 0 0 0 + */ + /* + * Display the result. + */ + for (i = 0; i < NX; i++) { + for (j = 0; j < NY; j++) + cout << data_out[i][j] << " "; + cout << endl; + } + } // end of try block + + // catch failure caused by the H5File operations + catch (FileIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the DataSet operations + catch (DataSetIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the DataSpace operations + catch (DataSpaceIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the DataSpace operations + catch (DataTypeIException error) { + error.printErrorStack(); + return -1; + } + return 0; +} diff --git a/HDF5Examples/HFCXX/H5D/h5group.cpp b/HDF5Examples/HFCXX/H5D/h5group.cpp new file mode 100644 index 00000000000..2afa5edd6c3 --- /dev/null +++ b/HDF5Examples/HFCXX/H5D/h5group.cpp @@ -0,0 +1,224 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the LICENSE file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/* + * This example creates a group in the file and dataset in the group. + * Hard link to the group object is created and the dataset is accessed + * under different names. + * Iterator function is used to find the object names in the root group. + * Note that the C++ API iterator function is not completed yet, thus + * the C version is used in this example. + */ + +#include +using std::cout; +using std::endl; + +#include +#include "H5Cpp.h" +using namespace H5; + +const H5std_string FILE_NAME("Group.h5"); +const int RANK = 2; + +// Operator function +extern "C" herr_t file_info(hid_t loc_id, const char *name, const H5L_info2_t *linfo, void *opdata); + +int +main(void) +{ + + hsize_t dims[2]; + hsize_t cdims[2]; + + // Try block to detect exceptions raised by any of the calls inside it + try { + /* + * Turn off the auto-printing when failure occurs so that we can + * handle the errors appropriately + */ + Exception::dontPrint(); + + /* + * Create the named file, truncating the existing one if any, + * using default create and access property lists. + */ + H5File *file = new H5File(FILE_NAME, H5F_ACC_TRUNC); + + /* + * Create a group in the file + */ + Group *group = new Group(file->createGroup("/Data")); + + /* + * Create dataset "Compressed Data" in the group using absolute + * name. Dataset creation property list is modified to use + * GZIP compression with the compression effort set to 6. + * Note that compression can be used only when dataset is chunked. + */ + dims[0] = 1000; + dims[1] = 20; + cdims[0] = 20; + cdims[1] = 20; + DataSpace *dataspace = new DataSpace(RANK, dims); // create new dspace + DSetCreatPropList ds_creatplist; // create dataset creation prop list + ds_creatplist.setChunk(2, cdims); // then modify it for compression + ds_creatplist.setDeflate(6); + + /* + * Create the first dataset. + */ + DataSet *dataset = new DataSet( + file->createDataSet("/Data/Compressed_Data", PredType::NATIVE_INT, *dataspace, ds_creatplist)); + + /* + * Close the first dataset. + */ + delete dataset; + delete dataspace; + + /* + * Create the second dataset. + */ + dims[0] = 500; + dims[1] = 20; + dataspace = new DataSpace(RANK, dims); // create second dspace + dataset = new DataSet(file->createDataSet("/Data/Float_Data", PredType::NATIVE_FLOAT, *dataspace)); + + delete dataset; + delete dataspace; + delete group; + delete file; + + /* + * Now reopen the file and group in the file. + */ + file = new H5File(FILE_NAME, H5F_ACC_RDWR); + group = new Group(file->openGroup("Data")); + + /* + * Access "Compressed_Data" dataset in the group. + */ + try { // to determine if the dataset exists in the group + dataset = new DataSet(group->openDataSet("Compressed_Data")); + } + catch (GroupIException not_found_error) { + cout << " Dataset is not found." << endl; + } + cout << "dataset \"/Data/Compressed_Data\" is open" << endl; + + /* + * Close the dataset. + */ + delete dataset; + + /* + * Create hard link to the Data group. + */ + file->link(H5L_TYPE_HARD, "Data", "Data_new"); + + /* + * We can access "Compressed_Data" dataset using created + * hard link "Data_new". + */ + try { // to determine if the dataset exists in the file + dataset = new DataSet(file->openDataSet("/Data_new/Compressed_Data")); + } + catch (FileIException not_found_error) { + cout << " Dataset is not found." << endl; + } + cout << "dataset \"/Data_new/Compressed_Data\" is open" << endl; + + /* + * Close the dataset. + */ + delete dataset; + + /* + * Use iterator to see the names of the objects in the file + * root directory. + */ + cout << endl << "Iterating over elements in the file" << endl; + herr_t idx = H5Literate2(file->getId(), H5_INDEX_NAME, H5_ITER_INC, NULL, file_info, NULL); + cout << endl; + + /* + * Unlink name "Data" and use iterator to see the names + * of the objects in the file root direvtory. + */ + cout << "Unlinking..." << endl; + try { // attempt to unlink the dataset + file->unlink("Data"); + } + catch (FileIException unlink_error) { + cout << " unlink failed." << endl; + } + cout << "\"Data\" is unlinked" << endl; + + cout << endl << "Iterating over elements in the file again" << endl; + idx = H5Literate2(file->getId(), H5_INDEX_NAME, H5_ITER_INC, NULL, file_info, NULL); + cout << endl; + + /* + * Close the group and file. + */ + delete group; + delete file; + } // end of try block + + // catch failure caused by the H5File operations + catch (FileIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the DataSet operations + catch (DataSetIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the DataSpace operations + catch (DataSpaceIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the Attribute operations + catch (AttributeIException error) { + error.printErrorStack(); + return -1; + } + return 0; +} + +/* + * Operator function. + */ +herr_t +file_info(hid_t loc_id, const char *name, const H5L_info2_t *linfo, void *opdata) +{ + hid_t group; + + /* + * Open the group using its name. + */ + group = H5Gopen2(loc_id, name, H5P_DEFAULT); + + /* + * Display group name. + */ + cout << "Name : " << name << endl; + + H5Gclose(group); + return 0; +} diff --git a/HDF5Examples/HFCXX/H5D/readdata.cpp b/HDF5Examples/HFCXX/H5D/readdata.cpp new file mode 100644 index 00000000000..5709a49c429 --- /dev/null +++ b/HDF5Examples/HFCXX/H5D/readdata.cpp @@ -0,0 +1,162 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the LICENSE file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +// +// This example reads hyperslab from the SDS.h5 file into +// two-dimensional plane of a three-dimensional array. Various +// information about the dataset in the SDS.h5 file is obtained. +// + +#include + +#include +#include +using namespace HighFive; + +const std::string FILE_NAME("SDS.h5"); +const std::string DATASET_NAME("IntArray"); +const int NX_SUB = 3; // hyperslab dimensions +const int NY_SUB = 4; +const int NX = 7; // output buffer dimensions +const int NY = 7; +const int NZ = 3; +const int RANK_OUT = 3; + +int +main(void) +{ + /* + * Output buffer initialization. + */ + int i, j, k; + int data_out[NX][NY][NZ]; /* output buffer */ + for (j = 0; j < NX; j++) { + for (i = 0; i < NY; i++) { + for (k = 0; k < NZ; k++) + data_out[j][i][k] = 0; + } + } + + try { + // we open the existing hdf5 file we created before + File file(FILE_NAME, File::ReadOnly); + + std::vector read_data; + + // we get the dataset + DataSet dataset = file.getDataSet(DATASET_NAME); + + // Get the class of the datatype that is used by the dataset. + DataTypeClass type_class = dataset.getDataType().getClass(); + + // Get class of datatype and print message if it's an integer. + if (type_class == DataTypeClass::Integer) { + std::cout << "Data set has INTEGER type" << std::endl; + + // Get the integer datatype + DataType intype = dataset.getDataType(); + + /* + * Get order of datatype and print message if it's a little endian. + */ + //H5std_string order_string; + //(void)intype.getOrder(order_string); + //std::cout << order_string << std::endl; + + // Get size of the data element stored in file and print it. + size_t size = intype.getSize(); + std::cout << "Data size is " << size << std::endl; + } + + // Get dataspace of the dataset. + DataSpace dataspace = dataset.getSpace(); + + // Get the number of dimensions in the dataspace. + size_t rank = dataspace.getNumberDimensions(); + + // Get the dimension size of each dimension in the dataspace and + // display them. + auto dimss_out = dspace.getDimensions() + std::cout << "rank " << rank << ", dimensions " << (unsigned long)(dims_out[0]) << " x " + << (unsigned long)(dims_out[1]) << std::endl; + + /* + * Define hyperslab in the dataset; implicitly giving strike and + * block NULL. + */ + hsize_t offset[2]; // hyperslab offset in the file + hsize_t count[2]; // size of the hyperslab in the file + offset[0] = 1; + offset[1] = 2; + count[0] = NX_SUB; + count[1] = NY_SUB; + dataspace.selectHyperslab(H5S_SELECT_SET, count, offset); + + /* + * Define the memory dataspace. + */ + hsize_t dimsm[3]; /* memory space dimensions */ + dimsm[0] = NX; + dimsm[1] = NY; + dimsm[2] = NZ; + DataSpace memspace(RANK_OUT, dimsm); + + /* + * Define memory hyperslab. + */ + hsize_t offset_out[3]; // hyperslab offset in memory + hsize_t count_out[3]; // size of the hyperslab in memory + offset_out[0] = 3; + offset_out[1] = 0; + offset_out[2] = 0; + count_out[0] = NX_SUB; + count_out[1] = NY_SUB; + count_out[2] = 1; + memspace.selectHyperslab(H5S_SELECT_SET, count_out, offset_out); + + // we convert the hdf5 dataset to a single dimension vector + dataset.read(read_data); + + /* + * Read data from hyperslab in the file into the hyperslab in + * memory and display the data. + */ + dataset.read(data_out, PredType::NATIVE_INT, memspace, dataspace); + + for (j = 0; j < NX; j++) { + for (i = 0; i < NY; i++) + std::cout << data_out[j][i][0] << " "; + std::cout << std::endl; + } + + for (size_t i = 0; i < read_data.size(); ++i) { + std::cout << read_data[i] << " "; + } + std::cout << "\n"; + /* + * 0 0 0 0 0 0 0 + * 0 0 0 0 0 0 0 + * 0 0 0 0 0 0 0 + * 3 4 5 6 0 0 0 + * 4 5 6 7 0 0 0 + * 5 6 7 8 0 0 0 + * 0 0 0 0 0 0 0 + */ + } + catch (const Exception& err) { + // catch and print any HDF5 error + std::cerr << err.what() << std::endl; + return -1; + } + + return 0; // successfully terminated +} diff --git a/HDF5Examples/HFCXX/H5D/testh5c++.sh.in b/HDF5Examples/HFCXX/H5D/testh5c++.sh.in new file mode 100644 index 00000000000..ef8a1407f45 --- /dev/null +++ b/HDF5Examples/HFCXX/H5D/testh5c++.sh.in @@ -0,0 +1,277 @@ +#! /bin/sh +# +# Copyright by The HDF Group. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the LICENSE file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. +# +# Tests for the h5c++ compiler tool + +srcdir=@srcdir@ + +# Initializations +TESTNAME=h5c++ +EXIT_SUCCESS=0 +EXIT_FAILURE=1 + +# Where the tool is installed. +prefix="${prefix:-@prefix@}" +AR="@AR@" +RANLIB="@RANLIB@" +H5TOOL="h5c++" # The tool name +H5TOOL_BIN="${prefix}/bin/${H5TOOL}" # The path of the tool binary + +CMP='cmp -s' +DIFF='diff -c' + +nerrors=$EXIT_SUCCESS +verbose=yes + +# setup my machine information. +myos=`uname -s` +myhostnama=`uname -n` + +# Generate some source files and library for tests. +suffix=cpp # source file suffix +hdf5main=${H5TOOL}_hdf5main.$suffix +hdf5main_o=${H5TOOL}_hdf5main.o +appmain=${H5TOOL}_appmain.$suffix +appmain_o=${H5TOOL}_appmain.o +prog1=${H5TOOL}_prog1.$suffix +prog1_o=${H5TOOL}_prog1.o +prog2=${H5TOOL}_prog2.$suffix +prog2_o=${H5TOOL}_prog2.o +applib=libapp${H5TOOL}.a +args=${H5TOOL}_args.$suffix +args_o=${H5TOOL}_args.o + +# short hands +# Caution: if some *.h5 files must be cleaned here, list them by names. +# Don't use the wildcard form of *.h5 as it will wipe out even *.h5 generated +# by other test programs. This will cause a racing condition error when +# parallel make (e.g., gmake -j 4) is used. +temp_SRC="$hdf5main $appmain $prog1 $prog2" +temp_OBJ=`echo $temp_SRC | sed -e "s/\.${suffix}/.o/g"` +temp_FILES="a.out $applib" + +# Generate appmain: +# An application Main that calls hdf5 and application's own functions. +cat > $appmain < + +#include + +#include "H5Cpp.h" + +#ifndef H5_NO_NAMESPACE +using namespace H5; +#endif + +const H5std_string FILE_NAME( "tmpapp.h5" ); +int sub1(void); +int sub2(void); + +int main (void) +{ + sub1(); + sub2(); + H5File file( FILE_NAME, H5F_ACC_TRUNC ); + return 0; +} + +EOF + +# generate prog1 +cat > $prog1 < +#include + +using std::cout; +using std::endl; +int sub1(void) +{ + cout << "in sub1" << endl; + return 0; +} +EOF + +# generate prog2 +cat > $prog2 < +#include + +using std::cout; +using std::endl; +int sub2(void) +{ + cout << "in sub2" << endl; + return 0; +} +EOF + +# Generate HDF5 Main Program: +# An HDF5 sample program that calls hdf5 functions. +cat > $hdf5main < + +#include + +#include "H5Cpp.h" + +#ifndef H5_NO_NAMESPACE +using namespace H5; +#endif + +const H5std_string FILE_NAME( "tmphdf5.h5" ); + +int main (void) +{ + H5File file( FILE_NAME, H5F_ACC_TRUNC ); + return 0; +} +EOF + +# Generate args: +# An application main that test misc command line arguments being passed. +cat > $args < +#include +#include "H5Cpp.h" +#ifndef H5_NO_NAMESPACE +using namespace H5; +#endif + +const H5std_string FILE_NAME( "args.h5" ); + +int main (void) +{ + char c = SGL_QUOTE; // 'H' + char *s = DBL_QUOTE; // "HDF" + int val = MISC; // 42 + + H5File file( FILE_NAME, H5F_ACC_TRUNC ); + return 0; +} +EOF + +# Parse option +# None + +# Print a line-line message left justified in a field of 74 characters +# beginning with the word "Testing". +# +TESTING() { + SPACES=" " + echo "Testing $* $SPACES" | cut -c1-74 | tr -d '\012' +} + + +# Debug printing +# Change : to echo to print the debug statement +DPRINT() { + : $* +} + +# Run a test and print PASS or *FAIL*. If a test fails then increment +# the `nerrors' global variable and (if $verbose is set) display the +# failed output. The actual output is not removed if $HDF5_NOCLEANUP is +# defined. +# +TOOLTEST() { + out=test_$H5TOOL_$$.out + err=test_$H5TOOL_$$.err + + # Run test. + TESTING $H5TOOL $@ + $H5TOOL_BIN $@ > $out 2>&1 + result=$? + if [ $result = 0 ]; then + echo " PASSED" + else + echo "*FAILED*" + nerrors="`expr $nerrors + 1`" + test yes = "$verbose" && \ + ( echo "========== results ==========="; cat $out; + echo "===============================================") |sed 's/^/ /' + fi + + # Clean up output file + if test -z "$HDF5_NOCLEANUP"; then + rm -f $out + fi +} + +# Print a "SKIP" message +SKIP() { + TESTING $H5TOOL $@ + echo " -SKIP-" +} + + +############################################################################## +### T H E T E S T S ### +############################################################################## +# +# HDF5 program that calls HDF5 APIs. +echo "***"Simple Compile and Link in one step. +TOOLTEST $hdf5main +# Application program that calls HDF5 and its own functions. +TOOLTEST $appmain $prog1 $prog2 + +# Compile, then link. +echo "***"Compile and Link in two steps. +TOOLTEST -c $hdf5main +TOOLTEST $hdf5main_o +TOOLTEST -c $appmain $prog1 $prog2 +TOOLTEST $appmain_o $prog1_o $prog2_o + +# Build external library, then link with it. +echo "***"Build external library and link with it. +TOOLTEST -c $prog1 $prog2 +rm -f $applib +$AR cru $applib $prog1_o $prog2_o +$RANLIB $applib +TOOLTEST $appmain $applib +TOOLTEST $appmain_o $applib +# This is peculiar but should work. (See bug ID 729) +TOOLTEST -c $hdf5main +rm -f $applib +$AR cru $applib $hdf5main_o +$RANLIB $applib +# SunOS does not support this. Skip it. +if [ $myos = SunOS ]; then + SKIP -o a.out $applib +else + TOOLTEST -o a.out $applib +fi + +# Just preprocess, no compile, no link. +echo "***"Just preprocess, no compile, no link. +TOOLTEST -E $hdf5main +TOOLTEST -E $appmain $prog1 $prog2 + +# HDF5 program that depends on input args. +echo "***"Simple Compile and Link in one step with user-supplied arguments. +TOOLTEST -DSGL_QUOTE=\'H\' -DDBL_QUOTE=\"HDF\" -DMISC=42 $args + +############################################################################## +# END +############################################################################## + +# Clean up file +if test -z "$HDF5_NOCLEANUP"; then + rm -f $temp_SRC $temp_OBJ $temp_FILES +fi + +if test $nerrors -eq 0 ; then + echo "All $TESTNAME tests passed." + exit $EXIT_SUCCESS +else + echo "$TESTNAME tests failed with $nerrors errors." + exit $EXIT_FAILURE +fi diff --git a/HDF5Examples/HFCXX/H5D/writedata.cpp b/HDF5Examples/HFCXX/H5D/writedata.cpp new file mode 100644 index 00000000000..f446b2f390e --- /dev/null +++ b/HDF5Examples/HFCXX/H5D/writedata.cpp @@ -0,0 +1,343 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the LICENSE file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/* + * This program shows how the select_hyperslab and select_elements + * functions are used to write selected data from memory to the file. + * Program takes 48 elements from the linear buffer and writes them into + * the matrix using 3x2 blocks, (4,3) stride and (2,4) count. + * Then four elements of the matrix are overwritten with the new values and + * file is closed. Program reopens the file and reads and displays the result. + */ + +#include +using std::cout; +using std::endl; + +#include +#include "H5Cpp.h" +using namespace H5; + +const H5std_string FILE_NAME("Select.h5"); +const H5std_string DATASET_NAME("Matrix in file"); +const int MSPACE1_RANK = 1; // Rank of the first dataset in memory +const int MSPACE1_DIM = 50; // Dataset size in memory +const int MSPACE2_RANK = 1; // Rank of the second dataset in memory +const int MSPACE2_DIM = 4; // Dataset size in memory +const int FSPACE_RANK = 2; // Dataset rank as it is stored in the file +const int FSPACE_DIM1 = 8; // Dimension sizes of the dataset as it is +const int FSPACE_DIM2 = 12; // stored in the file +const int MSPACE_RANK = 2; // Rank of the first dataset in memory +const int MSPACE_DIM1 = 8; // We will read dataset back from the file +const int MSPACE_DIM2 = 9; // to the dataset in memory with these + // dataspace parameters +const int NPOINTS = 4; // Number of points that will be selected + // and overwritten + +int +main(void) +{ + int i, j; // loop indices */ + + /* + * Try block to detect exceptions raised by any of the calls inside it + */ + try { + /* + * Turn off the auto-printing when failure occurs so that we can + * handle the errors appropriately + */ + Exception::dontPrint(); + + /* + * Create a file. + */ + H5File *file = new H5File(FILE_NAME, H5F_ACC_TRUNC); + + /* + * Create property list for a dataset and set up fill values. + */ + int fillvalue = 0; /* Fill value for the dataset */ + DSetCreatPropList plist; + plist.setFillValue(PredType::NATIVE_INT, &fillvalue); + + /* + * Create dataspace for the dataset in the file. + */ + hsize_t fdim[] = {FSPACE_DIM1, FSPACE_DIM2}; // dim sizes of ds (on disk) + DataSpace fspace(FSPACE_RANK, fdim); + + /* + * Create dataset and write it into the file. + */ + DataSet *dataset = + new DataSet(file->createDataSet(DATASET_NAME, PredType::NATIVE_INT, fspace, plist)); + + /* + * Select hyperslab for the dataset in the file, using 3x2 blocks, + * (4,3) stride and (2,4) count starting at the position (0,1). + */ + hsize_t start[2]; // Start of hyperslab + hsize_t stride[2]; // Stride of hyperslab + hsize_t count[2]; // Block count + hsize_t block[2]; // Block sizes + start[0] = 0; + start[1] = 1; + stride[0] = 4; + stride[1] = 3; + count[0] = 2; + count[1] = 4; + block[0] = 3; + block[1] = 2; + fspace.selectHyperslab(H5S_SELECT_SET, count, start, stride, block); + + /* + * Create dataspace for the first dataset. + */ + hsize_t dim1[] = {MSPACE1_DIM}; /* Dimension size of the first dataset + (in memory) */ + DataSpace mspace1(MSPACE1_RANK, dim1); + + /* + * Select hyperslab. + * We will use 48 elements of the vector buffer starting at the + * second element. Selected elements are 1 2 3 . . . 48 + */ + start[0] = 1; + stride[0] = 1; + count[0] = 48; + block[0] = 1; + mspace1.selectHyperslab(H5S_SELECT_SET, count, start, stride, block); + + /* + * Write selection from the vector buffer to the dataset in the file. + * + * File dataset should look like this: + * 0 1 2 0 3 4 0 5 6 0 7 8 + * 0 9 10 0 11 12 0 13 14 0 15 16 + * 0 17 18 0 19 20 0 21 22 0 23 24 + * 0 0 0 0 0 0 0 0 0 0 0 0 + * 0 25 26 0 27 28 0 29 30 0 31 32 + * 0 33 34 0 35 36 0 37 38 0 39 40 + * 0 41 42 0 43 44 0 45 46 0 47 48 + * 0 0 0 0 0 0 0 0 0 0 0 0 + */ + int vector[MSPACE1_DIM]; // vector buffer for dset + + /* + * Buffer initialization. + */ + vector[0] = vector[MSPACE1_DIM - 1] = -1; + for (i = 1; i < MSPACE1_DIM - 1; i++) + vector[i] = i; + + dataset->write(vector, PredType::NATIVE_INT, mspace1, fspace); + + /* + * Reset the selection for the file dataspace fid. + */ + fspace.selectNone(); + + /* + * Create dataspace for the second dataset. + */ + hsize_t dim2[] = {MSPACE2_DIM}; /* Dimension size of the second dataset + (in memory */ + DataSpace mspace2(MSPACE2_RANK, dim2); + + /* + * Select sequence of NPOINTS points in the file dataspace. + */ + hsize_t coord[NPOINTS][FSPACE_RANK]; /* Array to store selected points + from the file dataspace */ + coord[0][0] = 0; + coord[0][1] = 0; + coord[1][0] = 3; + coord[1][1] = 3; + coord[2][0] = 3; + coord[2][1] = 5; + coord[3][0] = 5; + coord[3][1] = 6; + + fspace.selectElements(H5S_SELECT_SET, NPOINTS, (const hsize_t *)coord); + + /* + * Write new selection of points to the dataset. + */ + int values[] = {53, 59, 61, 67}; /* New values to be written */ + dataset->write(values, PredType::NATIVE_INT, mspace2, fspace); + + /* + * File dataset should look like this: + * 53 1 2 0 3 4 0 5 6 0 7 8 + * 0 9 10 0 11 12 0 13 14 0 15 16 + * 0 17 18 0 19 20 0 21 22 0 23 24 + * 0 0 0 59 0 61 0 0 0 0 0 0 + * 0 25 26 0 27 28 0 29 30 0 31 32 + * 0 33 34 0 35 36 67 37 38 0 39 40 + * 0 41 42 0 43 44 0 45 46 0 47 48 + * 0 0 0 0 0 0 0 0 0 0 0 0 + * + */ + + /* + * Close the dataset and the file. + */ + delete dataset; + delete file; + + /* + * Open the file. + */ + file = new H5File(FILE_NAME, H5F_ACC_RDONLY); + + /* + * Open the dataset. + */ + dataset = new DataSet(file->openDataSet(DATASET_NAME)); + + /* + * Get dataspace of the dataset. + */ + fspace = dataset->getSpace(); + + /* + * Select first hyperslab for the dataset in the file. The following + * elements are selected: + * 10 0 11 12 + * 18 0 19 20 + * 0 59 0 61 + * + */ + start[0] = 1; + start[1] = 2; + block[0] = 1; + block[1] = 1; + stride[0] = 1; + stride[1] = 1; + count[0] = 3; + count[1] = 4; + fspace.selectHyperslab(H5S_SELECT_SET, count, start, stride, block); + + /* + * Add second selected hyperslab to the selection. + * The following elements are selected: + * 19 20 0 21 22 + * 0 61 0 0 0 + * 27 28 0 29 30 + * 35 36 67 37 38 + * 43 44 0 45 46 + * 0 0 0 0 0 + * Note that two hyperslabs overlap. Common elements are: + * 19 20 + * 0 61 + */ + start[0] = 2; + start[1] = 4; + block[0] = 1; + block[1] = 1; + stride[0] = 1; + stride[1] = 1; + count[0] = 6; + count[1] = 5; + fspace.selectHyperslab(H5S_SELECT_OR, count, start, stride, block); + + /* + * Create memory dataspace. + */ + hsize_t mdim[] = {MSPACE_DIM1, MSPACE_DIM2}; /* Dimension sizes of the + dataset in memory when we + read selection from the + dataset on the disk */ + DataSpace mspace(MSPACE_RANK, mdim); + + /* + * Select two hyperslabs in memory. Hyperslabs has the same + * size and shape as the selected hyperslabs for the file dataspace. + */ + start[0] = 0; + start[1] = 0; + block[0] = 1; + block[1] = 1; + stride[0] = 1; + stride[1] = 1; + count[0] = 3; + count[1] = 4; + mspace.selectHyperslab(H5S_SELECT_SET, count, start, stride, block); + start[0] = 1; + start[1] = 2; + block[0] = 1; + block[1] = 1; + stride[0] = 1; + stride[1] = 1; + count[0] = 6; + count[1] = 5; + mspace.selectHyperslab(H5S_SELECT_OR, count, start, stride, block); + + /* + * Initialize data buffer. + */ + int matrix_out[MSPACE_DIM1][MSPACE_DIM2]; + for (i = 0; i < MSPACE_DIM1; i++) + for (j = 0; j < MSPACE_DIM2; j++) + matrix_out[i][j] = 0; + + /* + * Read data back to the buffer matrix. + */ + dataset->read(matrix_out, PredType::NATIVE_INT, mspace, fspace); + + /* + * Display the result. Memory dataset is: + * + * 10 0 11 12 0 0 0 0 0 + * 18 0 19 20 0 21 22 0 0 + * 0 59 0 61 0 0 0 0 0 + * 0 0 27 28 0 29 30 0 0 + * 0 0 35 36 67 37 38 0 0 + * 0 0 43 44 0 45 46 0 0 + * 0 0 0 0 0 0 0 0 0 + * 0 0 0 0 0 0 0 0 0 + */ + for (i = 0; i < MSPACE_DIM1; i++) { + for (j = 0; j < MSPACE_DIM2; j++) + cout << matrix_out[i][j] << " "; + cout << endl; + } + + /* + * Close the dataset and the file. + */ + delete dataset; + delete file; + } // end of try block + + // catch failure caused by the H5File operations + catch (FileIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the DataSet operations + catch (DataSetIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the DataSpace operations + catch (DataSpaceIException error) { + error.printErrorStack(); + return -1; + } + + return 0; +} diff --git a/HDF5Examples/HFCXX/HL/CMakeLists.txt b/HDF5Examples/HFCXX/HL/CMakeLists.txt new file mode 100644 index 00000000000..3cc2f48a980 --- /dev/null +++ b/HDF5Examples/HFCXX/HL/CMakeLists.txt @@ -0,0 +1,79 @@ +cmake_minimum_required (VERSION 3.18) +project (HDF5Examples_CXX_HL CXX) + +#----------------------------------------------------------------------------- +# Define Sources +#----------------------------------------------------------------------------- +include (C_sourcefiles.cmake) + +if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.8") + foreach (example_name ${common_examples}) + add_executable (${EXAMPLE_VARNAME}_cpp_ex_${example_name} ${PROJECT_SOURCE_DIR}/${example_name}.cpp) + target_compile_options(${EXAMPLE_VARNAME}_cpp_ex_${example_name} + PRIVATE + "$<$:-DH5_USE_16_API>" + "$<$:-DH5_USE_18_API>" + "$<$:-DH5_USE_110_API>" + "$<$:-DH5_USE_112_API>" + "$<$:-DH5_USE_114_API>" + "$<$:-DH5_USE_200_API>" + ) + if (H5_HAVE_PARALLEL) + target_include_directories (${EXAMPLE_VARNAME}_cpp_ex_${example_name} PUBLIC ${MPI_C_INCLUDE_DIRS}) + endif () + target_link_libraries (${EXAMPLE_VARNAME}_cpp_ex_${example_name} ${H5EX_HDF5_LINK_LIBS}) + endforeach () +endif () + +if (H5EX_BUILD_TESTING) + set (${EXAMPLE_VARNAME}_cpp_ex_CLEANFILES + packet_table.h5 + ) + add_test ( + NAME ${EXAMPLE_VARNAME}_cpp_ex-clear-objects + COMMAND ${CMAKE_COMMAND} + -E remove ${${EXAMPLE_VARNAME}_cpp_ex_CLEANFILES} + ) + set_tests_properties (${EXAMPLE_VARNAME}_cpp_ex-clear-objects PROPERTIES + FIXTURES_SETUP clear_${EXAMPLE_VARNAME}_cpp_ex + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} + ) + add_test ( + NAME ${EXAMPLE_VARNAME}_cpp_ex-clean-objects + COMMAND ${CMAKE_COMMAND} + -E remove ${${EXAMPLE_VARNAME}_cpp_ex_CLEANFILES} + ) + set_tests_properties (${EXAMPLE_VARNAME}_cpp_ex-clean-objects PROPERTIES + FIXTURES_CLEANUP clear_${EXAMPLE_VARNAME}_cpp_ex + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} + ) + + macro (ADD_H5_TEST testname) + if (HDF5_ENABLE_USING_MEMCHECKER) + add_test (NAME ${EXAMPLE_VARNAME}_cpp_ex_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) + else () + add_test ( + NAME ${EXAMPLE_VARNAME}_cpp_ex_${testname} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" + -D "TEST_EXPECT=0" + -D "TEST_SKIP_COMPARE=TRUE" + -D "TEST_OUTPUT=${testname}.out" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" + -P "${H5EX_RESOURCES_DIR}/runTest.cmake" + ) + endif () + set_tests_properties (${EXAMPLE_VARNAME}_cpp_ex_${testname} PROPERTIES FIXTURES_REQUIRED clear_${EXAMPLE_VARNAME}_cpp_ex) + if (last_test) + set_tests_properties (${EXAMPLE_VARNAME}_cpp_ex_${testname} PROPERTIES DEPENDS ${last_test}) + endif () + set (last_test "${EXAMPLE_VARNAME}_cpp_ex_${testname}") + endmacro () + + foreach (example_name ${common_examples}) + ADD_H5_TEST (${example_name}) + endforeach () +endif () diff --git a/HDF5Examples/HFCXX/HL/C_sourcefiles.cmake b/HDF5Examples/HFCXX/HL/C_sourcefiles.cmake new file mode 100644 index 00000000000..54b7f55b6cf --- /dev/null +++ b/HDF5Examples/HFCXX/HL/C_sourcefiles.cmake @@ -0,0 +1,6 @@ +#----------------------------------------------------------------------------- +# Define Sources, one file per application +#----------------------------------------------------------------------------- +set (common_examples + packet_table_FL +) diff --git a/HDF5Examples/HFCXX/HL/Makefile.am b/HDF5Examples/HFCXX/HL/Makefile.am new file mode 100644 index 00000000000..ed4d55ba6d7 --- /dev/null +++ b/HDF5Examples/HFCXX/HL/Makefile.am @@ -0,0 +1,49 @@ +# +# Copyright by The HDF Group. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the LICENSE file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. +## +## Makefile.am +## Run automake to generate a Makefile.in from this file. +## +# +# HDF5 Library Examples Makefile(.in) +# + +include $(top_srcdir)/config/commence.am + +# These are the programs that 'make all' or 'make prog' will build and +# which 'make check' will run. List them in the order they should be run. +EXAMPLE_PROG=ptExampleFL +TEST_EXAMPLES_SCRIPT=$(INSTALL_SCRIPT_FILES) + +# These are the example files to be installed +INSTALL_FILES=ptExampleFL.cpp +INSTALL_SCRIPT_FILES = run-hlc++-ex.sh + +# Tell conclude.am that these are C++ tests. +CXX_API=yes + +# Where to install examples +# Note: no '/' after DESTDIR. Explanation in commence.am +EXAMPLEDIR=${DESTDIR}$(examplesdir)/hl/c++ +EXAMPLETOPDIR=${DESTDIR}$(examplesdir)/hl + +# How to build programs using h5c++ +$(EXTRA_PROG): $(H5CPP) + $(H5CPP) $(H5CCFLAGS) $(CPPFLAGS) -o $@ $(srcdir)/$@.cpp + +# List dependencies for each program. Normally, automake would take +# care of this for us, but if we tell automake about the programs it +# will try to build them with the normal C++ compiler, not h5c++. This is +# an inelegant way of solving the problem, unfortunately. +ptExampleFL: ptExampleFL.cpp + +include $(top_srcdir)/config/examples.am +include $(top_srcdir)/config/conclude.am diff --git a/HDF5Examples/HFCXX/HL/packet_table_FL.cpp b/HDF5Examples/HFCXX/HL/packet_table_FL.cpp new file mode 100644 index 00000000000..5f7195dd068 --- /dev/null +++ b/HDF5Examples/HFCXX/HL/packet_table_FL.cpp @@ -0,0 +1,97 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the LICENSE file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +#include "H5PacketTable.h" + +/*------------------------------------------------------------------------- + * Packet Table Fixed-Length Example + * + * Example program that creates a packet table and performs + * writes and reads. + * + *------------------------------------------------------------------------- + */ + +const char *FILE_NAME("PTcppexampleFL.h5"); +const char *PT_NAME("/examplePacketTable"); + +int +main(void) +{ + herr_t err; /* Return value from function calls */ + hid_t fileID; /* HDF5 identifier for file */ + hid_t plistID; /* HDF5 identifier for property list to use compression */ + hsize_t count; /* Number of records in table */ + int x; /* Temporary counter variable */ + + /* Buffers to hold data */ + int readBuffer[5]; + int writeBuffer[5]; + + /* Initialize buffers */ + for (x = 0; x < 5; x++) { + writeBuffer[x] = x; + readBuffer[x] = -1; + } + + /* Create a new HDF5 file */ + fileID = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); + if (fileID < 0) + fprintf(stderr, "Couldn't create file.\n"); + + /* Prepare property list to set compression, randomly use deflate + with compression level 5. */ + plistID = H5Pcreate(H5P_DATASET_CREATE); + err = H5Pset_deflate(plistID, 5); + if (err < 0) + fprintf(stderr, "Error setting compression level."); + + /* Create a fixed-length packet table. */ + FL_PacketTable ptable(fileID, plistID, PT_NAME, H5T_NATIVE_INT, 100); + if (!ptable.IsValid()) + fprintf(stderr, "Unable to create packet table."); + + /* Append five packets to the packet table, one at a time */ + for (x = 0; x < 5; x++) { + err = ptable.AppendPacket(&(writeBuffer[x])); + if (err < 0) + fprintf(stderr, "Error adding record."); + } + + /* Get the number of packets in the packet table. This should be five. */ + count = ptable.GetPacketCount(err); + if (err < 0) + fprintf(stderr, "Error getting packet count."); + + printf("Number of packets in packet table after five appends: %" PRIuHSIZE "\n", count); + + /* Initialize packet table's "current record" */ + ptable.ResetIndex(); + + /* Iterate through packets, read each one back */ + for (x = 0; x < 5; x++) { + err = ptable.GetNextPacket(&(readBuffer[x])); + if (err < 0) + fprintf(stderr, "Error reading record."); + + printf("Packet %d's value is %d.\n", x, readBuffer[x]); + } + + /* The packet table will close automatically when its object goes */ + /* out of scope. */ + + err = H5Fclose(fileID); + if (err < 0) + fprintf(stderr, "Failed to close file.\n"); + + return 0; +} diff --git a/HDF5Examples/HFCXX/HL/tfiles/packet_table_FL.tst b/HDF5Examples/HFCXX/HL/tfiles/packet_table_FL.tst new file mode 100644 index 00000000000..ddcf3bb7aa9 --- /dev/null +++ b/HDF5Examples/HFCXX/HL/tfiles/packet_table_FL.tst @@ -0,0 +1,6 @@ +Number of packets in packet table after five appends: 5 +Packet 0's value is 0. +Packet 1's value is 1. +Packet 2's value is 2. +Packet 3's value is 3. +Packet 4's value is 4. diff --git a/HDF5Examples/HFCXX/Makefile.am b/HDF5Examples/HFCXX/Makefile.am new file mode 100644 index 00000000000..a0dd701b156 --- /dev/null +++ b/HDF5Examples/HFCXX/Makefile.am @@ -0,0 +1,28 @@ +# +# Copyright by The HDF Group. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the LICENSE file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. +# +# +# This makefile mostly just reinvokes make in the various subdirectories +# but does so in the correct order. You can alternatively invoke make from +# each subdirectory manually. +## +## Makefile.am +## Run automake to generate a Makefile.in from this file. +## +# +# HDF5 CXX Library Examples Makefile(.in) + +include $(top_srcdir)/config/commence.am + +## Only recurse into subdirectories if the CXX interface is enabled. + SUBDIRS=H5D TUTR + +include $(top_srcdir)/config/conclude.am diff --git a/HDF5Examples/HFCXX/TUTR/CMakeLists.txt b/HDF5Examples/HFCXX/TUTR/CMakeLists.txt new file mode 100644 index 00000000000..a8400295ea4 --- /dev/null +++ b/HDF5Examples/HFCXX/TUTR/CMakeLists.txt @@ -0,0 +1,64 @@ +cmake_minimum_required (VERSION 3.18) +project (HDF5Examples_CXX_TUTR CXX) + +#----------------------------------------------------------------------------- +# Define Sources +#----------------------------------------------------------------------------- +include (C_sourcefiles.cmake) + +if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.8") + foreach (example_name ${examples}) + add_executable (${EXAMPLE_VARNAME}_cpp_ex_${example_name} ${PROJECT_SOURCE_DIR}/${example_name}.cpp) + target_compile_options(${EXAMPLE_VARNAME}_cpp_ex_${example_name} + PRIVATE + "$<$:-DH5_USE_16_API>" + "$<$:-DH5_USE_18_API>" + "$<$:-DH5_USE_110_API>" + "$<$:-DH5_USE_112_API>" + "$<$:-DH5_USE_114_API>" + "$<$:-DH5_USE_200_API>" + ) + if (H5_HAVE_PARALLEL) + target_include_directories (${EXAMPLE_VARNAME}_cpp_ex_${example_name} PUBLIC ${MPI_C_INCLUDE_DIRS}) + endif () + target_link_libraries (${EXAMPLE_VARNAME}_cpp_ex_${example_name} ${H5EX_HDF5_LINK_LIBS}) + endforeach () +endif () + +if (H5EX_BUILD_TESTING) + macro (ADD_H5_TEST testname) + add_test ( + NAME ${EXAMPLE_VARNAME}_cpp_ex_${testname}-clearall + COMMAND ${CMAKE_COMMAND} + -E remove + ${testname}.h5 + ) + if (HDF5_ENABLE_USING_MEMCHECKER) + add_test (NAME ${EXAMPLE_VARNAME}_cpp_ex_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) + set_tests_properties (${EXAMPLE_VARNAME}_cpp_ex_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_cpp_ex_${testname}-clearall) + else () + add_test ( + NAME ${EXAMPLE_VARNAME}_cpp_ex_${testname} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" + -D "TEST_EXPECT=0" + -D "TEST_SKIP_COMPARE=TRUE" + -D "TEST_OUTPUT=${testname}.out" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}" + -P "${H5EX_RESOURCES_DIR}/runTest.cmake" + ) + set_tests_properties (${EXAMPLE_VARNAME}_cpp_ex_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_cpp_ex_${testname}-clearall) + endif () + if (last_test) + set_tests_properties (${EXAMPLE_VARNAME}_cpp_ex_${testname} PROPERTIES DEPENDS ${last_test}) + endif () + set (last_test "${EXAMPLE_VARNAME}_cpp_ex_${testname}") + endmacro () + + foreach (example_name ${examples}) + ADD_H5_TEST (${example_name}) + endforeach () +endif () diff --git a/HDF5Examples/HFCXX/TUTR/C_sourcefiles.cmake b/HDF5Examples/HFCXX/TUTR/C_sourcefiles.cmake new file mode 100644 index 00000000000..0d8b7b7ffda --- /dev/null +++ b/HDF5Examples/HFCXX/TUTR/C_sourcefiles.cmake @@ -0,0 +1,14 @@ +#----------------------------------------------------------------------------- +# Define Sources, one file per application +#----------------------------------------------------------------------------- +set (examples + h5tutr_cmprss + h5tutr_crtdat + h5tutr_crtatt + h5tutr_crtgrpar + h5tutr_crtgrp + h5tutr_crtgrpd + h5tutr_extend + h5tutr_rdwt + h5tutr_subset +) diff --git a/HDF5Examples/HFCXX/TUTR/Makefile.am b/HDF5Examples/HFCXX/TUTR/Makefile.am new file mode 100644 index 00000000000..6103485cfaa --- /dev/null +++ b/HDF5Examples/HFCXX/TUTR/Makefile.am @@ -0,0 +1,81 @@ +# +# Copyright by The HDF Group. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the LICENSE file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. +## +## Makefile.am +## Run automake to generate a Makefile.in from this file. +## +# +# HDF5 Library Examples Makefile(.in) +# + +include $(top_srcdir)/config/commence.am + +INSTALL_SCRIPT_FILES = run-c++-ex.sh + +# These are the programs that 'make all' or 'make prog' will build and +# which 'make check' will run. List them in the order they should be run. +EXAMPLE_PROG=create readdata writedata compound extend_ds chunks h5group \ + h5tutr_cmprss h5tutr_crtatt h5tutr_crtdat h5tutr_crtgrpar \ + h5tutr_crtgrp h5tutr_crtgrpd h5tutr_extend h5tutr_rdwt \ + h5tutr_subset +TEST_SCRIPT=testh5c++.sh +TEST_EXAMPLES_SCRIPT=$(INSTALL_SCRIPT_FILES) + +# These are the example files to be installed +INSTALL_FILES=create.cpp readdata.cpp writedata.cpp compound.cpp \ + extend_ds.cpp chunks.cpp h5group.cpp \ + h5tutr_cmprss.cpp h5tutr_crtatt.cpp h5tutr_crtdat.cpp \ + h5tutr_crtgrpar.cpp h5tutr_crtgrp.cpp h5tutr_crtgrpd.cpp \ + h5tutr_extend.cpp h5tutr_rdwt.cpp h5tutr_subset.cpp + +# Some of the examples depend on files created by running other examples +readdata.chkexe_: create.chkexe_ +chunks.chkexe_: extend_ds.chkexe_ +h5tutr_rdwt.chkexe_: h5tutr_crtdat.chkexe +h5tutrcrtatt.chkexe_: h5tutr_crtdat.chkexe +h5tutr_crtgrpd.chkexe_: h5tutr_crtgrpar.chkexe + +# Tell conclude.am that these are C++ tests. +CXX_API=yes + +# Where to install examples +# Note: no '/' after DESTDIR. Explanation in commence.am +EXAMPLEDIR=${DESTDIR}$(examplesdir)/c++ +EXAMPLETOPDIR=${DESTDIR}$(examplesdir) + +# How to build programs using h5c++ +$(EXTRA_PROG): $(H5CPP) + $(H5CPP) $(H5CCFLAGS) $(CPPFLAGS) -o $@ $(srcdir)/$@.cpp + +# List dependencies for each program. Normally, automake would take +# care of this for us, but if we tell automake about the programs it +# will try to build them with the normal C++ compiler, not h5c++. This is +# an inelegant way of solving the problem, unfortunately. +create: $(srcdir)/create.cpp +readdata: $(srcdir)/readdata.cpp +writedata: $(srcdir)/writedata.cpp +compound: $(srcdir)/compound.cpp +extend_ds: $(srcdir)/extend_ds.cpp +chunks: $(srcdir)/chunks.cpp +h5group: $(srcdir)/h5group.cpp + +h5tutr_cmprss: $(srcdir)/h5tutr_cmprss.cpp +h5tutr_crtatt: $(srcdir)/h5tutr_crtatt.cpp +h5tutr_crtdat: $(srcdir)/h5tutr_crtdat.cpp +h5tutr_crtgrpar: $(srcdir)/h5tutr_crtgrpar.cpp +h5tutr_crtgrp: $(srcdir)/h5tutr_crtgrp.cpp +h5tutr_crtgrpd: $(srcdir)/h5tutr_crtgrpd.cpp +h5tutr_extend: $(srcdir)/h5tutr_extend.cpp +h5tutr_rdwt: $(srcdir)/h5tutr_rdwt.cpp +h5tutr_subset: $(srcdir)/h5tutr_subset.cpp + +include $(top_srcdir)/config/examples.am +include $(top_srcdir)/config/conclude.am diff --git a/HDF5Examples/HFCXX/TUTR/h5tutr_cmprss.cpp b/HDF5Examples/HFCXX/TUTR/h5tutr_cmprss.cpp new file mode 100644 index 00000000000..aafd0aa91da --- /dev/null +++ b/HDF5Examples/HFCXX/TUTR/h5tutr_cmprss.cpp @@ -0,0 +1,150 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the LICENSE file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/* + * This example illustrates how to create a compressed dataset. + * It is used in the HDF5 Tutorial. + */ + +#include +using std::cout; +using std::endl; + +#include +#include "H5Cpp.h" +using namespace H5; + +const H5std_string FILE_NAME("h5tutr_cmprss.h5"); +const H5std_string DATASET_NAME("Compressed_Data"); +const int DIM0 = 100; +const int DIM1 = 20; + +int +main(void) +{ + hsize_t dims[2] = {DIM0, DIM1}; // dataset dimensions + hsize_t chunk_dims[2] = {20, 20}; // chunk dimensions + int i, j, buf[DIM0][DIM1]; + + // Try block to detect exceptions raised by any of the calls inside it + try { + // Turn off the auto-printing when failure occurs so that we can + // handle the errors appropriately + Exception::dontPrint(); + + // Create a new file using the default property lists. + H5File file(FILE_NAME, H5F_ACC_TRUNC); + + // Create the data space for the dataset. + DataSpace *dataspace = new DataSpace(2, dims); + + // Modify dataset creation property to enable chunking + DSetCreatPropList *plist = new DSetCreatPropList; + plist->setChunk(2, chunk_dims); + + // Set ZLIB (DEFLATE) Compression using level 6. + // To use SZIP compression comment out this line. + plist->setDeflate(6); + + // Uncomment these lines to set SZIP Compression + // unsigned szip_options_mask = H5_SZIP_NN_OPTION_MASK; + // unsigned szip_pixels_per_block = 16; + // plist->setSzip(szip_options_mask, szip_pixels_per_block); + + // Create the dataset. + DataSet *dataset = + new DataSet(file.createDataSet(DATASET_NAME, PredType::STD_I32BE, *dataspace, *plist)); + + for (i = 0; i < DIM0; i++) + for (j = 0; j < DIM1; j++) + buf[i][j] = i + j; + + // Write data to dataset. + dataset->write(buf, PredType::NATIVE_INT); + + // Close objects and file. Either approach will close the HDF5 item. + delete dataspace; + delete dataset; + delete plist; + file.close(); + + // ----------------------------------------------- + // Re-open the file and dataset, retrieve filter + // information for dataset and read the data back. + // ----------------------------------------------- + + int rbuf[DIM0][DIM1]; + int numfilt; + size_t nelmts = {1}, namelen = {1}; + unsigned flags, filter_info, cd_values[1], idx; + char name[1]; + H5Z_filter_t filter_type; + + // Open the file and the dataset in the file. + file.openFile(FILE_NAME, H5F_ACC_RDONLY); + dataset = new DataSet(file.openDataSet(DATASET_NAME)); + + // Get the create property list of the dataset. + plist = new DSetCreatPropList(dataset->getCreatePlist()); + + // Get the number of filters associated with the dataset. + numfilt = plist->getNfilters(); + cout << "Number of filters associated with dataset: " << numfilt << endl; + + for (idx = 0; idx < numfilt; idx++) { + nelmts = 0; + + filter_type = plist->getFilter(idx, flags, nelmts, cd_values, namelen, name, filter_info); + + cout << "Filter Type: "; + + switch (filter_type) { + case H5Z_FILTER_DEFLATE: + cout << "H5Z_FILTER_DEFLATE" << endl; + break; + case H5Z_FILTER_SZIP: + cout << "H5Z_FILTER_SZIP" << endl; + break; + default: + cout << "Other filter type included." << endl; + } + } + + // Read data. + dataset->read(rbuf, PredType::NATIVE_INT); + + delete plist; + delete dataset; + file.close(); // can be skipped + + } // end of try block + + // catch failure caused by the H5File operations + catch (FileIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the DataSet operations + catch (DataSetIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the DataSpace operations + catch (DataSpaceIException error) { + error.printErrorStack(); + return -1; + } + + return 0; // successfully terminated +} diff --git a/HDF5Examples/HFCXX/TUTR/h5tutr_crtatt.cpp b/HDF5Examples/HFCXX/TUTR/h5tutr_crtatt.cpp new file mode 100644 index 00000000000..23ce84060d2 --- /dev/null +++ b/HDF5Examples/HFCXX/TUTR/h5tutr_crtatt.cpp @@ -0,0 +1,81 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the LICENSE file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/* + * This example illustrates how to create an attribute attached to a + * dataset. It is used in the HDF5 Tutorial. + */ + +#include +#include +#include "H5Cpp.h" +using namespace H5; + +const H5std_string FILE_NAME("h5tutr_dset.h5"); +const H5std_string DATASET_NAME("dset"); +const H5std_string ATTR_NAME("Units"); + +const int DIM1 = 2; + +int +main(void) +{ + int attr_data[2] = {100, 200}; + hsize_t dims[1] = {DIM1}; + + // Try block to detect exceptions raised by any of the calls inside it + try { + // Turn off the auto-printing when failure occurs so that we can + // handle the errors appropriately + Exception::dontPrint(); + + // Open an existing file and dataset. + H5File file(FILE_NAME, H5F_ACC_RDWR); + DataSet dataset = file.openDataSet(DATASET_NAME); + + // Create the data space for the attribute. + DataSpace attr_dataspace = DataSpace(1, dims); + + // Create a dataset attribute. + Attribute attribute = dataset.createAttribute(ATTR_NAME, PredType::STD_I32BE, attr_dataspace); + + // Write the attribute data. + attribute.write(PredType::NATIVE_INT, attr_data); + + } // end of try block + + // catch failure caused by the H5File operations + catch (DataSpaceIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the H5File operations + catch (AttributeIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the H5File operations + catch (FileIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the DataSet operations + catch (DataSetIException error) { + error.printErrorStack(); + return -1; + } + + return 0; // successfully terminated +} diff --git a/HDF5Examples/HFCXX/TUTR/h5tutr_crtdat.cpp b/HDF5Examples/HFCXX/TUTR/h5tutr_crtdat.cpp new file mode 100644 index 00000000000..5db56233c89 --- /dev/null +++ b/HDF5Examples/HFCXX/TUTR/h5tutr_crtdat.cpp @@ -0,0 +1,72 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the LICENSE file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/* + * This example illustrates how to create a dataset that is a 4 x 6 + * array. It is used in the HDF5 Tutorial. + */ + +#include +#include + +#include "H5Cpp.h" +using namespace H5; + +const H5std_string FILE_NAME("h5tutr_dset.h5"); +const H5std_string DATASET_NAME("dset"); +const int NX = 4; // dataset dimensions +const int NY = 6; +const int RANK = 2; + +int +main(void) +{ + // Try block to detect exceptions raised by any of the calls inside it + try { + // Turn off the auto-printing when failure occurs so that we can + // handle the errors appropriately + Exception::dontPrint(); + + // Create a new file using the default property lists. + H5File file(FILE_NAME, H5F_ACC_TRUNC); + + // Create the data space for the dataset. + hsize_t dims[2]; // dataset dimensions + dims[0] = NX; + dims[1] = NY; + DataSpace dataspace(RANK, dims); + + // Create the dataset. + DataSet dataset = file.createDataSet(DATASET_NAME, PredType::STD_I32BE, dataspace); + + } // end of try block + + // catch failure caused by the H5File operations + catch (FileIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the DataSet operations + catch (DataSetIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the DataSpace operations + catch (DataSpaceIException error) { + error.printErrorStack(); + return -1; + } + + return 0; // successfully terminated +} diff --git a/HDF5Examples/HFCXX/TUTR/h5tutr_crtgrp.cpp b/HDF5Examples/HFCXX/TUTR/h5tutr_crtgrp.cpp new file mode 100644 index 00000000000..1d157f876ca --- /dev/null +++ b/HDF5Examples/HFCXX/TUTR/h5tutr_crtgrp.cpp @@ -0,0 +1,59 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the LICENSE file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/* + * This example illustrates how to create and close a group. + * It is used in the HDF5 Tutorial. + */ + +#include +using std::cout; +using std::endl; + +#include +#include "H5Cpp.h" +using namespace H5; + +const H5std_string FILE_NAME("h5tutr_group.h5"); + +int +main(void) +{ + // Try block to detect exceptions raised by any of the calls inside it + try { + // Turn off the auto-printing when failure occurs so that we can + // handle the errors appropriately + Exception::dontPrint(); + + // Create a new file using default property lists. + H5File file(FILE_NAME, H5F_ACC_TRUNC); + + // Create a group named "/MygGroup" in the file + Group group(file.createGroup("/MyGroup")); + + // File and group will be closed as their instances go out of scope. + + } // end of try block + + // catch failure caused by the H5File operations + catch (FileIException error) { + error.printErrorStack(); + return -1; + } + // catch failure caused by the Group operations + catch (GroupIException error) { + error.printErrorStack(); + return -1; + } + + return 0; +} diff --git a/HDF5Examples/HFCXX/TUTR/h5tutr_crtgrpar.cpp b/HDF5Examples/HFCXX/TUTR/h5tutr_crtgrpar.cpp new file mode 100644 index 00000000000..81050d44380 --- /dev/null +++ b/HDF5Examples/HFCXX/TUTR/h5tutr_crtgrpar.cpp @@ -0,0 +1,80 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the LICENSE file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/* + * This example illustrates the creation of groups using absolute and + * relative names. It is used in the HDF5 Tutorial. + */ + +#include +using std::cout; +using std::endl; + +#include +#include "H5Cpp.h" +using namespace H5; + +const H5std_string FILE_NAME("h5tutr_groups.h5"); + +int +main(void) +{ + + // Try block to detect exceptions raised by any of the calls inside it + try { + + // Turn off the auto-printing when failure occurs so that we can + // handle the errors appropriately. + + Exception::dontPrint(); + + // Create a new file using default properties. + + H5File file(FILE_NAME, H5F_ACC_TRUNC); + + // Create group "MyGroup" in the root group using an absolute name. + + Group group1(file.createGroup("/MyGroup")); + + // Create group "Group_A" in group "MyGroup" using an + // absolute name. + + Group group2(file.createGroup("/MyGroup/Group_A")); + + // Create group "Group_B" in group "MyGroup" using a + // relative name. + + Group group3(group1.createGroup("Group_B")); + + // Close the groups and file. + + group1.close(); + group2.close(); + group3.close(); + file.close(); + + } // end of try block + + // catch failure caused by the File operations + catch (FileIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the Group operations + catch (GroupIException error) { + error.printErrorStack(); + return -1; + } + + return 0; +} diff --git a/HDF5Examples/HFCXX/TUTR/h5tutr_crtgrpd.cpp b/HDF5Examples/HFCXX/TUTR/h5tutr_crtgrpd.cpp new file mode 100644 index 00000000000..81d970b9d89 --- /dev/null +++ b/HDF5Examples/HFCXX/TUTR/h5tutr_crtgrpd.cpp @@ -0,0 +1,127 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the LICENSE file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/* + * This example illustrates how to create a dataset in a group. + * It is used in the HDF5 Tutorial. + */ + +#include +using std::cout; +using std::endl; + +#include +#include "H5Cpp.h" +using namespace H5; + +const H5std_string FILE_NAME("h5tutr_groups.h5"); +const H5std_string DATASET_NAME1("/MyGroup/dset1"); +const H5std_string DATASET_NAME2("dset2"); +const int RANK = 2; +const int D1DIM1 = 3; +const int D1DIM2 = 3; +const int D2DIM1 = 2; +const int D2DIM2 = 10; + +int +main(void) +{ + int dset1_data[D1DIM1][D1DIM2], dset2_data[D2DIM1][D2DIM2]; // data buffers + int i, j; + + // Try block to catch exceptions raised by any of the calls inside it + try { + // Turn off the auto-printing when failure occurs so that we can + // handle the errors appropriately + Exception::dontPrint(); + + // Initialize the first dataset. + for (i = 0; i < D1DIM1; i++) + for (j = 0; j < D1DIM2; j++) + dset1_data[i][j] = j + 1; + + // Initialize the second dataset. + for (i = 0; i < D2DIM1; i++) + for (j = 0; j < D2DIM2; j++) + dset2_data[i][j] = j + 1; + + // Open an existing file and dataset. + H5File file(FILE_NAME, H5F_ACC_RDWR); + + // Create the data space for the first dataset. Note the use of + // pointer for the instance 'dataspace'. It can be deleted and + // used again later for another data space. An HDF5 identifier is + // closed by the destructor or the method 'close()'. + hsize_t dims[RANK]; // dataset dimensions + dims[0] = D1DIM1; + dims[1] = D1DIM2; + DataSpace *dataspace = new DataSpace(RANK, dims); + + // Create the dataset in group "MyGroup". Same note as for the + // dataspace above. + DataSet *dataset = new DataSet(file.createDataSet(DATASET_NAME1, PredType::STD_I32BE, *dataspace)); + + // Write the data to the dataset using default memory space, file + // space, and transfer properties. + dataset->write(dset1_data, PredType::NATIVE_INT); + + // Close the current dataset and data space. + delete dataset; + delete dataspace; + + // Create the data space for the second dataset. + dims[0] = D2DIM1; + dims[1] = D2DIM2; + dataspace = new DataSpace(RANK, dims); + + // Create group "Group_A" in group "MyGroup". + Group group(file.openGroup("/MyGroup/Group_A")); + + // Create the second dataset in group "Group_A". + dataset = new DataSet(group.createDataSet(DATASET_NAME2, PredType::STD_I32BE, *dataspace)); + + // Write the data to the dataset using default memory space, file + // space, and transfer properties. + dataset->write(dset2_data, PredType::NATIVE_INT); + + // Close all objects. + delete dataspace; + delete dataset; + group.close(); + + } // end of try block + + // catch failure caused by the H5File operations + catch (FileIException error) { + error.printErrorStack(); + return -1; + } + // catch failure caused by the DataSet operations + catch (DataSetIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the DataSpace operations + catch (DataSpaceIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the Group operations + catch (GroupIException error) { + error.printErrorStack(); + return -1; + } + + return 0; +} diff --git a/HDF5Examples/HFCXX/TUTR/h5tutr_extend.cpp b/HDF5Examples/HFCXX/TUTR/h5tutr_extend.cpp new file mode 100644 index 00000000000..36392b8192b --- /dev/null +++ b/HDF5Examples/HFCXX/TUTR/h5tutr_extend.cpp @@ -0,0 +1,160 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the LICENSE file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/* + * This example illustrates how to create a dataset that is a 4 x 6 + * array. It is used in the HDF5 Tutorial. + */ + +#include +using std::cout; +using std::endl; + +#include +#include "H5Cpp.h" +using namespace H5; + +const H5std_string FILE_NAME("h5tutr_extend.h5"); +const H5std_string DATASETNAME("ExtendibleArray"); + +int +main(void) +{ + hsize_t dims[2] = {3, 3}; // dataset dimensions at creation + hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED}; + hsize_t chunk_dims[2] = {2, 5}; + int data[3][3] = {{1, 1, 1}, // data to write + {1, 1, 1}, + {1, 1, 1}}; + + // Variables used in extending and writing to the extended portion of dataset + + hsize_t size[2]; + hsize_t offset[2]; + hsize_t dimsext[2] = {7, 3}; // extend dimensions + int dataext[7][3] = {{2, 3, 4}, {2, 3, 4}, {2, 3, 4}, {2, 3, 4}, {2, 3, 4}, {2, 3, 4}, {2, 3, 4}}; + + // Try block to detect exceptions raised by any of the calls inside it + try { + // Turn off the auto-printing when failure occurs so that we can + // handle the errors appropriately + Exception::dontPrint(); + + // Create a new file using the default property lists. + H5File file(FILE_NAME, H5F_ACC_TRUNC); + + // Create the data space for the dataset. Note the use of pointer + // for the instance 'dataspace'. It can be deleted and used again + // later for another dataspace. An HDF5 identifier can be closed + // by the destructor or the method 'close()'. + DataSpace *dataspace = new DataSpace(2, dims, maxdims); + + // Modify dataset creation property to enable chunking + DSetCreatPropList prop; + prop.setChunk(2, chunk_dims); + + // Create the chunked dataset. Note the use of pointer. + DataSet *dataset = + new DataSet(file.createDataSet(DATASETNAME, PredType::STD_I32BE, *dataspace, prop)); + + // Write data to dataset. + dataset->write(data, PredType::NATIVE_INT); + + // Extend the dataset. Dataset becomes 10 x 3. + size[0] = dims[0] + dimsext[0]; + size[1] = dims[1]; + dataset->extend(size); + + // Select a hyperslab in extended portion of the dataset. + DataSpace *filespace = new DataSpace(dataset->getSpace()); + offset[0] = 3; + offset[1] = 0; + filespace->selectHyperslab(H5S_SELECT_SET, dimsext, offset); + + // Define memory space. + DataSpace *memspace = new DataSpace(2, dimsext, NULL); + + // Write data to the extended portion of the dataset. + dataset->write(dataext, PredType::NATIVE_INT, *memspace, *filespace); + + // Close all objects and file. + prop.close(); + delete filespace; + delete memspace; + delete dataspace; + delete dataset; + file.close(); + + // --------------------------------------- + // Re-open the file and read the data back + // --------------------------------------- + + int rdata[10][3]; + int i, j, rank; + hsize_t chunk_dimsr[2], dimsr[2]; + + // Open the file and dataset. + file.openFile(FILE_NAME, H5F_ACC_RDONLY); + dataset = new DataSet(file.openDataSet(DATASETNAME)); + + // Get the dataset's dataspace and creation property list. + filespace = new DataSpace(dataset->getSpace()); + prop = dataset->getCreatePlist(); + + // Get information to obtain memory dataspace. + rank = filespace->getSimpleExtentNdims(); + (void)filespace->getSimpleExtentDims(dimsr); + + if (H5D_CHUNKED == prop.getLayout()) { + int rank_chunk = prop.getChunk(rank, chunk_dimsr); + cout << "rank chunk = " << rank_chunk << endl; + } + + memspace = new DataSpace(rank, dimsr, NULL); + dataset->read(rdata, PredType::NATIVE_INT, *memspace, *filespace); + + cout << endl; + for (j = 0; j < dimsr[0]; j++) { + for (i = 0; i < dimsr[1]; i++) + cout << " " << rdata[j][i]; + cout << endl; + } + + // Close all objects and file. + prop.close(); + delete filespace; + delete memspace; + delete dataset; + file.close(); + + } // end of try block + + // catch failure caused by the H5File operations + catch (FileIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the DataSet operations + catch (DataSetIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the DataSpace operations + catch (DataSpaceIException error) { + error.printErrorStack(); + return -1; + } + + return 0; // successfully terminated +} diff --git a/HDF5Examples/HFCXX/TUTR/h5tutr_rdwt.cpp b/HDF5Examples/HFCXX/TUTR/h5tutr_rdwt.cpp new file mode 100644 index 00000000000..b13a782dd05 --- /dev/null +++ b/HDF5Examples/HFCXX/TUTR/h5tutr_rdwt.cpp @@ -0,0 +1,70 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the LICENSE file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/* + * This example illustrates how to write to and read from an existing + * dataset. It is used in the HDF5 Tutorial. + */ + +#include +#include +#include "H5Cpp.h" +using namespace H5; + +const H5std_string FILE_NAME("h5tutr_dset.h5"); +const H5std_string DATASET_NAME("dset"); +const int DIM0 = 4; // dataset dimensions +const int DIM1 = 6; + +int +main(void) +{ + + // Data initialization. + + int i, j; + int data[DIM0][DIM1]; // buffer for data to write + + for (j = 0; j < DIM0; j++) + for (i = 0; i < DIM1; i++) + data[j][i] = i * 6 + j + 1; + + // Try block to detect exceptions raised by any of the calls inside it + try { + // Turn off the auto-printing when failure occurs so that we can + // handle the errors appropriately + Exception::dontPrint(); + + // Open an existing file and dataset. + H5File file(FILE_NAME, H5F_ACC_RDWR); + DataSet dataset = file.openDataSet(DATASET_NAME); + + // Write the data to the dataset using default memory space, file + // space, and transfer properties. + dataset.write(data, PredType::NATIVE_INT); + + } // end of try block + + // catch failure caused by the H5File operations + catch (FileIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the DataSet operations + catch (DataSetIException error) { + error.printErrorStack(); + return -1; + } + + return 0; // successfully terminated +} diff --git a/HDF5Examples/HFCXX/TUTR/h5tutr_subset.cpp b/HDF5Examples/HFCXX/TUTR/h5tutr_subset.cpp new file mode 100644 index 00000000000..15ea6fffd7d --- /dev/null +++ b/HDF5Examples/HFCXX/TUTR/h5tutr_subset.cpp @@ -0,0 +1,168 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the LICENSE file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/* + * This example illustrates how to read/write a subset of data (a slab) + * from/to a dataset in an HDF5 file. It is used in the HDF5 Tutorial. + */ + +#include +using std::cout; +using std::endl; + +#include +#include "H5Cpp.h" +using namespace H5; + +const H5std_string FILE_NAME("h5tutr_subset.h5"); +const H5std_string DATASET_NAME("IntArray"); + +const int RANK = 2; +const int DIM0_SUB = 3; // subset dimensions +const int DIM1_SUB = 4; +const int DIM0 = 8; // size of dataset +const int DIM1 = 10; + +int +main(void) +{ + int i, j; + int data[DIM0][DIM1], sdata[DIM0_SUB][DIM1_SUB], rdata[DIM0][DIM1]; + + // Try block to detect exceptions raised by any of the calls inside it + try { + // Turn off the auto-printing when failure occurs so that we can + // handle the errors appropriately + Exception::dontPrint(); + + // --------------------------------------------------- + // Create a new file using the default property lists. + // Then create a dataset and write data to it. + // Close the file and dataset. + // --------------------------------------------------- + + H5File file(FILE_NAME, H5F_ACC_TRUNC); + + hsize_t dims[2]; + dims[0] = DIM0; + dims[1] = DIM1; + DataSpace dataspace = DataSpace(RANK, dims); + + DataSet dataset(file.createDataSet(DATASET_NAME, PredType::STD_I32BE, dataspace)); + + for (j = 0; j < DIM0; j++) { + for (i = 0; i < DIM1; i++) + if (i < (DIM1 / 2)) + data[j][i] = 1; + else + data[j][i] = 2; + } + + dataset.write(data, PredType::NATIVE_INT); + + cout << endl << "Data Written to File:" << endl; + for (j = 0; j < DIM0; j++) { + for (i = 0; i < DIM1; i++) + cout << " " << data[j][i]; + cout << endl; + } + + dataspace.close(); + dataset.close(); + file.close(); + + // --------------------------------------------------- + // Reopen the file and dataset and write a subset of + // values to the dataset. + // --------------------------------------------------- + + hsize_t offset[2], count[2], stride[2], block[2]; + hsize_t dimsm[2]; + + file.openFile(FILE_NAME, H5F_ACC_RDWR); + dataset = file.openDataSet(DATASET_NAME); + + // Specify size and shape of subset to write. + + offset[0] = 1; + offset[1] = 2; + + count[0] = DIM0_SUB; + count[1] = DIM1_SUB; + + stride[0] = 1; + stride[1] = 1; + + block[0] = 1; + block[1] = 1; + + // Define Memory Dataspace. Get file dataspace and select + // a subset from the file dataspace. + + dimsm[0] = DIM0_SUB; + dimsm[1] = DIM1_SUB; + + DataSpace memspace(RANK, dimsm, NULL); + + dataspace = dataset.getSpace(); + dataspace.selectHyperslab(H5S_SELECT_SET, count, offset, stride, block); + + // Write a subset of data to the dataset, then read the + // entire dataset back from the file. + + cout << endl << "Write subset to file specifying: " << endl; + cout << " offset=1x2 stride=1x1 count=3x4 block=1x1" << endl; + for (j = 0; j < DIM0_SUB; j++) { + for (i = 0; i < DIM1_SUB; i++) + sdata[j][i] = 5; + } + + dataset.write(sdata, PredType::NATIVE_INT, memspace, dataspace); + dataset.read(rdata, PredType::NATIVE_INT); + + cout << endl << "Data in File after Subset is Written:" << endl; + for (i = 0; i < DIM0; i++) { + for (j = 0; j < DIM1; j++) + cout << " " << rdata[i][j]; + cout << endl; + } + cout << endl; + + // It is not necessary to close these objects because close() will + // be called when the object instances are going out of scope. + dataspace.close(); + memspace.close(); + dataset.close(); + file.close(); + + } // end of try block + + // catch failure caused by the H5File operations + catch (FileIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the DataSet operations + catch (DataSetIException error) { + error.printErrorStack(); + return -1; + } + + // catch failure caused by the DataSpace operations + catch (DataSpaceIException error) { + error.printErrorStack(); + return -1; + } + + return 0; // successfully terminated +} diff --git a/HDF5Examples/HFCXX/TUTR/testh5c++.sh.in b/HDF5Examples/HFCXX/TUTR/testh5c++.sh.in new file mode 100644 index 00000000000..ef8a1407f45 --- /dev/null +++ b/HDF5Examples/HFCXX/TUTR/testh5c++.sh.in @@ -0,0 +1,277 @@ +#! /bin/sh +# +# Copyright by The HDF Group. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the LICENSE file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. +# +# Tests for the h5c++ compiler tool + +srcdir=@srcdir@ + +# Initializations +TESTNAME=h5c++ +EXIT_SUCCESS=0 +EXIT_FAILURE=1 + +# Where the tool is installed. +prefix="${prefix:-@prefix@}" +AR="@AR@" +RANLIB="@RANLIB@" +H5TOOL="h5c++" # The tool name +H5TOOL_BIN="${prefix}/bin/${H5TOOL}" # The path of the tool binary + +CMP='cmp -s' +DIFF='diff -c' + +nerrors=$EXIT_SUCCESS +verbose=yes + +# setup my machine information. +myos=`uname -s` +myhostnama=`uname -n` + +# Generate some source files and library for tests. +suffix=cpp # source file suffix +hdf5main=${H5TOOL}_hdf5main.$suffix +hdf5main_o=${H5TOOL}_hdf5main.o +appmain=${H5TOOL}_appmain.$suffix +appmain_o=${H5TOOL}_appmain.o +prog1=${H5TOOL}_prog1.$suffix +prog1_o=${H5TOOL}_prog1.o +prog2=${H5TOOL}_prog2.$suffix +prog2_o=${H5TOOL}_prog2.o +applib=libapp${H5TOOL}.a +args=${H5TOOL}_args.$suffix +args_o=${H5TOOL}_args.o + +# short hands +# Caution: if some *.h5 files must be cleaned here, list them by names. +# Don't use the wildcard form of *.h5 as it will wipe out even *.h5 generated +# by other test programs. This will cause a racing condition error when +# parallel make (e.g., gmake -j 4) is used. +temp_SRC="$hdf5main $appmain $prog1 $prog2" +temp_OBJ=`echo $temp_SRC | sed -e "s/\.${suffix}/.o/g"` +temp_FILES="a.out $applib" + +# Generate appmain: +# An application Main that calls hdf5 and application's own functions. +cat > $appmain < + +#include + +#include "H5Cpp.h" + +#ifndef H5_NO_NAMESPACE +using namespace H5; +#endif + +const H5std_string FILE_NAME( "tmpapp.h5" ); +int sub1(void); +int sub2(void); + +int main (void) +{ + sub1(); + sub2(); + H5File file( FILE_NAME, H5F_ACC_TRUNC ); + return 0; +} + +EOF + +# generate prog1 +cat > $prog1 < +#include + +using std::cout; +using std::endl; +int sub1(void) +{ + cout << "in sub1" << endl; + return 0; +} +EOF + +# generate prog2 +cat > $prog2 < +#include + +using std::cout; +using std::endl; +int sub2(void) +{ + cout << "in sub2" << endl; + return 0; +} +EOF + +# Generate HDF5 Main Program: +# An HDF5 sample program that calls hdf5 functions. +cat > $hdf5main < + +#include + +#include "H5Cpp.h" + +#ifndef H5_NO_NAMESPACE +using namespace H5; +#endif + +const H5std_string FILE_NAME( "tmphdf5.h5" ); + +int main (void) +{ + H5File file( FILE_NAME, H5F_ACC_TRUNC ); + return 0; +} +EOF + +# Generate args: +# An application main that test misc command line arguments being passed. +cat > $args < +#include +#include "H5Cpp.h" +#ifndef H5_NO_NAMESPACE +using namespace H5; +#endif + +const H5std_string FILE_NAME( "args.h5" ); + +int main (void) +{ + char c = SGL_QUOTE; // 'H' + char *s = DBL_QUOTE; // "HDF" + int val = MISC; // 42 + + H5File file( FILE_NAME, H5F_ACC_TRUNC ); + return 0; +} +EOF + +# Parse option +# None + +# Print a line-line message left justified in a field of 74 characters +# beginning with the word "Testing". +# +TESTING() { + SPACES=" " + echo "Testing $* $SPACES" | cut -c1-74 | tr -d '\012' +} + + +# Debug printing +# Change : to echo to print the debug statement +DPRINT() { + : $* +} + +# Run a test and print PASS or *FAIL*. If a test fails then increment +# the `nerrors' global variable and (if $verbose is set) display the +# failed output. The actual output is not removed if $HDF5_NOCLEANUP is +# defined. +# +TOOLTEST() { + out=test_$H5TOOL_$$.out + err=test_$H5TOOL_$$.err + + # Run test. + TESTING $H5TOOL $@ + $H5TOOL_BIN $@ > $out 2>&1 + result=$? + if [ $result = 0 ]; then + echo " PASSED" + else + echo "*FAILED*" + nerrors="`expr $nerrors + 1`" + test yes = "$verbose" && \ + ( echo "========== results ==========="; cat $out; + echo "===============================================") |sed 's/^/ /' + fi + + # Clean up output file + if test -z "$HDF5_NOCLEANUP"; then + rm -f $out + fi +} + +# Print a "SKIP" message +SKIP() { + TESTING $H5TOOL $@ + echo " -SKIP-" +} + + +############################################################################## +### T H E T E S T S ### +############################################################################## +# +# HDF5 program that calls HDF5 APIs. +echo "***"Simple Compile and Link in one step. +TOOLTEST $hdf5main +# Application program that calls HDF5 and its own functions. +TOOLTEST $appmain $prog1 $prog2 + +# Compile, then link. +echo "***"Compile and Link in two steps. +TOOLTEST -c $hdf5main +TOOLTEST $hdf5main_o +TOOLTEST -c $appmain $prog1 $prog2 +TOOLTEST $appmain_o $prog1_o $prog2_o + +# Build external library, then link with it. +echo "***"Build external library and link with it. +TOOLTEST -c $prog1 $prog2 +rm -f $applib +$AR cru $applib $prog1_o $prog2_o +$RANLIB $applib +TOOLTEST $appmain $applib +TOOLTEST $appmain_o $applib +# This is peculiar but should work. (See bug ID 729) +TOOLTEST -c $hdf5main +rm -f $applib +$AR cru $applib $hdf5main_o +$RANLIB $applib +# SunOS does not support this. Skip it. +if [ $myos = SunOS ]; then + SKIP -o a.out $applib +else + TOOLTEST -o a.out $applib +fi + +# Just preprocess, no compile, no link. +echo "***"Just preprocess, no compile, no link. +TOOLTEST -E $hdf5main +TOOLTEST -E $appmain $prog1 $prog2 + +# HDF5 program that depends on input args. +echo "***"Simple Compile and Link in one step with user-supplied arguments. +TOOLTEST -DSGL_QUOTE=\'H\' -DDBL_QUOTE=\"HDF\" -DMISC=42 $args + +############################################################################## +# END +############################################################################## + +# Clean up file +if test -z "$HDF5_NOCLEANUP"; then + rm -f $temp_SRC $temp_OBJ $temp_FILES +fi + +if test $nerrors -eq 0 ; then + echo "All $TESTNAME tests passed." + exit $EXIT_SUCCESS +else + echo "$TESTNAME tests failed with $nerrors errors." + exit $EXIT_FAILURE +fi diff --git a/HDF5Examples/config/cmake-presets/hidden-presets.json b/HDF5Examples/config/cmake-presets/hidden-presets.json index b8523977923..3f619c788d9 100644 --- a/HDF5Examples/config/cmake-presets/hidden-presets.json +++ b/HDF5Examples/config/cmake-presets/hidden-presets.json @@ -139,7 +139,8 @@ "name": "ci-CPP", "hidden": true, "cacheVariables": { - "H5EX_BUILD_CXX": "ON" + "H5EX_BUILD_CXX": "ON", + "H5EX_USE_HIGH_FIVE": "ON" } }, { diff --git a/HDF5Examples/config/cmake/HDFExampleMacros.cmake b/HDF5Examples/config/cmake/HDFExampleMacros.cmake index fb25f54731c..fe589456dd8 100644 --- a/HDF5Examples/config/cmake/HDFExampleMacros.cmake +++ b/HDF5Examples/config/cmake/HDFExampleMacros.cmake @@ -149,7 +149,7 @@ macro (HDF5_SUPPORT) set (FIND_HDF_COMPONENTS ${FIND_HDF_COMPONENTS} Fortran_HL) endif () endif () - if (H5EX_BUILD_CXX) + if (H5EX_BUILD_CXX AND NOT H5EX_USE_HIGH_FIVE) set (FIND_HDF_COMPONENTS ${FIND_HDF_COMPONENTS} CXX) if (H5EX_BUILD_HL) set (FIND_HDF_COMPONENTS ${FIND_HDF_COMPONENTS} CXX_HL) @@ -182,7 +182,7 @@ macro (HDF5_SUPPORT) if (H5EX_BUILD_JAVA) set (FIND_HDF_COMPONENTS ${FIND_HDF_COMPONENTS} Java) endif () - if (H5EX_BUILD_CXX) + if (H5EX_BUILD_CXX AND NOT H5EX_USE_HIGH_FIVE) set (FIND_HDF_COMPONENTS ${FIND_HDF_COMPONENTS} CXX) if (H5EX_BUILD_HL) set (FIND_HDF_COMPONENTS ${FIND_HDF_COMPONENTS} CXX_HL) @@ -273,24 +273,28 @@ macro (HDF5_SUPPORT) endif () endif () - if (NOT HDF5_static_CXX_FOUND AND NOT HDF5_shared_CXX_FOUND) - set (H5EX_BUILD_CXX OFF CACHE BOOL "Build CXX support" FORCE) - message (STATUS "HDF5 CXX libs not found - disable build of CXX examples") + if (HDF5_USE_HIGH_FIVE) + set (H5EX_USE_HIGH_FIVE ON CACHE BOOL "Use HighFive C++ Headers" FORCE) else () - if (H5EX_BUILD_CXX AND ${HDF5_BUILD_CXX}) - if (BUILD_SHARED_LIBS AND HDF5_shared_CXX_FOUND) - set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_CXX_SHARED_LIBRARY}) - if (H5EX_BUILD_HL AND ${HDF5_BUILD_HL_LIB}) - set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_CXX_HL_SHARED_LIBRARY}) - endif () - elseif (HDF5_static_CXX_FOUND) - set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_CXX_STATIC_LIBRARY}) - if (H5EX_BUILD_HL AND ${HDF5_BUILD_HL_LIB}) - set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_CXX_HL_STATIC_LIBRARY}) + if (NOT HDF5_static_CXX_FOUND AND NOT HDF5_shared_CXX_FOUND) + set (H5EX_BUILD_CXX OFF CACHE BOOL "Build CXX support" FORCE) + message (STATUS "HDF5 CXX libs not found - disable build of CXX examples") + else () + if (H5EX_BUILD_CXX AND ${HDF5_BUILD_CXX}) + if (BUILD_SHARED_LIBS AND HDF5_shared_CXX_FOUND) + set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_CXX_SHARED_LIBRARY}) + if (H5EX_BUILD_HL AND ${HDF5_BUILD_HL_LIB}) + set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_CXX_HL_SHARED_LIBRARY}) + endif () + elseif (HDF5_static_CXX_FOUND) + set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_CXX_STATIC_LIBRARY}) + if (H5EX_BUILD_HL AND ${HDF5_BUILD_HL_LIB}) + set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_CXX_HL_STATIC_LIBRARY}) + endif () + else () + set (H5EX_BUILD_CXX OFF CACHE BOOL "Build CXX support" FORCE) + message (STATUS "HDF5 CXX libs not found - disable build of CXX examples") endif () - else () - set (H5EX_BUILD_CXX OFF CACHE BOOL "Build CXX support" FORCE) - message (STATUS "HDF5 CXX libs not found - disable build of CXX examples") endif () endif () endif () diff --git a/HDF5Examples/config/cmake/cacheinit.cmake b/HDF5Examples/config/cmake/cacheinit.cmake index 872462d54b0..0d1e2c580c5 100644 --- a/HDF5Examples/config/cmake/cacheinit.cmake +++ b/HDF5Examples/config/cmake/cacheinit.cmake @@ -15,6 +15,7 @@ set (H5EX_BUILD_TESTING ON CACHE BOOL "Build HDF5 Unit Testing" FORCE) #set (H5EX_BUILD_FORTRAN ON CACHE BOOL "Build FORTRAN support" FORCE) #set (H5EX_BUILD_CXX ON CACHE BOOL "Build C++ support" FORCE) +#set (H5EX_USE_HIGH_FIVE ON CACHE BOOL "Use HighFive C++ Headers instead of HDF5 C++ Library" FORCE) #set (H5EX_BUILD_FILTERS ON CACHE BOOL "Build filter support" FORCE) diff --git a/c++/CMakeLists.txt b/c++/CMakeLists.txt index 5ce26aa9cf7..009f924eecf 100644 --- a/c++/CMakeLists.txt +++ b/c++/CMakeLists.txt @@ -1,11 +1,63 @@ cmake_minimum_required (VERSION 3.18) project (HDF5_CPP CXX) -add_subdirectory (src) +if (NOT HDF5_USE_HIGH_FIVE) + add_subdirectory (src) +else () + option (HIGHFIVE_USE_EXTERNAL "Use External Library Building for ZLIB" OFF) + if (NOT HIGHFIVE_USE_LOCALCONTENT) + set (HIGHFIVE_URL ${HIGHFIVE_TGZ_ORIGPATH}/${HIGHFIVE_TGZ_NAME}) + else () + set (HIGHFIVE_URL ${TGZPATH}/${HIGHFIVE_TGZ_NAME}) + endif () + message (VERBOSE "Filter HighFive file is ${HIGHFIVE_URL}") + + if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ") + set (HIGHFIVE_USE_EXTERNAL ON CACHE BOOL "Use External Library Building for HighFive else search" FORCE) + if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT") + set (HIGHFIVE_URL ${HIGHFIVE_GIT_URL} CACHE STRING "Path to HighFive git repository") + set (HIGHFIVE_BRANCH ${HIGHFIVE_GIT_BRANCH}) + elseif (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ") + if (NOT TGZPATH) + set (TGZPATH ${HDF5_SOURCE_DIR}) + endif () + if (HIGHFIVE_USE_LOCALCONTENT) + if (NOT EXISTS "${HIGHFIVE_URL}") + set (HDF5_BUILD_CPP_LIB OFF CACHE BOOL "" FORCE) + message (VERBOSE "Filter HighFive file ${HIGHFIVE_URL} not found") + endif () + endif () + else () + set (HDF5_BUILD_CPP_LIB OFF CACHE BOOL "" FORCE) + set (HIGHFIVE_USE_EXTERNAL OFF CACHE BOOL "Use External Library Building for HighFive else search") + endif () + endif () + + set (H5_HIGHFIVE_FOUND FALSE) + if (NOT HIGHFIVE_USE_EXTERNAL) + set (HIGHFIVE_FOUND FALSE) + find_package (HIGHFIVE NAMES HighFive REQUIRED) + set (H5_HIGHFIVE_FOUND ${HIGHFIVE_FOUND}) + if (H5_HIGHFIVE_FOUND) + set (H5_HIGHFIVE_INCLUDE_DIRS ${HighFive_INCLUDE_DIRS} PARENT_SCOPE) + set (H5_HIGHFIVE_LIBS HighFive::Include PARENT_SCOPE) + endif () + else () + if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ") + EXTERNAL_HIGHFIVE_LIBRARY (${HDF5_ALLOW_EXTERNAL_SUPPORT}) + message (VERBOSE "Filter HIGHFIVE is built") + set (H5_HIGHFIVE_INCLUDE_DIRS ${H5_HIGHFIVE_INCLUDE_DIR} PARENT_SCOPE) + set (H5_HIGHFIVE_LIBS ${H5_HIGHFIVE_LIBRARY} PARENT_SCOPE) + endif () + endif () + message (VERBOSE "HIGHFIVE LIB: ${H5_HIGHFIVE_LIBS} INCLUDE: ${H5_HIGHFIVE_INCLUDE_DIRS}") +endif () #----------------------------------------------------------------------------- # Build the CPP unit tests #----------------------------------------------------------------------------- if (NOT HDF5_EXTERNALLY_CONFIGURED AND BUILD_TESTING) - add_subdirectory (test) + if (NOT HDF5_USE_HIGH_FIVE) + add_subdirectory (test) + endif () endif () diff --git a/config/cmake/HDF5ExampleCache.cmake b/config/cmake/HDF5ExampleCache.cmake index 1ec8d408546..facfe8d43b9 100644 --- a/config/cmake/HDF5ExampleCache.cmake +++ b/config/cmake/HDF5ExampleCache.cmake @@ -11,6 +11,7 @@ set (H5EX_BUILD_FORTRAN ${HDF5_BUILD_FORTRAN} CACHE BOOL "Build examples FORTRAN set (H5EX_BUILD_JAVA ${HDF5_BUILD_JAVA} CACHE BOOL "Build examples JAVA support" FORCE) set (H5EX_BUILD_FILTERS ${HDF5_ENABLE_PLUGIN_SUPPORT} CACHE BOOL "Build examples PLUGIN filter support" FORCE) set (H5EX_BUILD_CXX ${HDF5_BUILD_CPP_LIB} CACHE BOOL "Build HDF5 C++ Library" FORCE) +set (H5EX_USE_HIGH_FIVE ${HDF5_USE_HIGH_FIVE} CACHE BOOL "Use HighFive C++ Headers instead of HDF5 C++ Library" FORCE) set (H5EX_BUILD_HL ${HDF5_BUILD_HL_LIB} CACHE BOOL "Build High Level examples" FORCE) set (H5EX_ENABLE_THREADSAFE ${HDF5_ENABLE_THREADSAFE} CACHE BOOL "Enable examples thread-safety" FORCE) set (H5EX_ENABLE_PARALLEL ${HDF5_ENABLE_PARALLEL} CACHE BOOL "Enable examples parallel build (requires MPI)" FORCE) @@ -56,10 +57,15 @@ if (NOT BUILD_SHARED_LIBS AND BUILD_STATIC_LIBS) endif () endif () if (HDF5_BUILD_CPP_LIB) - set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_CPP_LIB_TARGET}) - if (HDF5_BUILD_HL_LIB) - set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_HL_CPP_LIB_TARGET}) - set (H5EX_HDF5_INCLUDE_DIRS "${H5EX_HDF5_INCLUDE_DIRS};${HDF5_HL_CPP_SRC_DIR};${HDF5_HL_CPP_SRC_BINARY_DIR}" CACHE PATH "HDF5 include dirs" FORCE) + if (NOT HDF5_USE_HIGH_FIVE) + set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_CPP_LIB_TARGET}) + if (HDF5_BUILD_HL_LIB) + set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_HL_CPP_LIB_TARGET}) + set (H5EX_HDF5_INCLUDE_DIRS "${H5EX_HDF5_INCLUDE_DIRS};${HDF5_HL_CPP_SRC_DIR};${HDF5_HL_CPP_SRC_BINARY_DIR}" CACHE PATH "HDF5 include dirs" FORCE) + else () + #set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${H5_HIGHFIVE_LIBS}) + set (H5EX_HDF5_INCLUDE_DIRS "${H5EX_HDF5_INCLUDE_DIRS};${H5_HIGHFIVE_INCLUDE_DIRS}" CACHE PATH "HDF5 include dirs" FORCE) + endif () endif () endif () else () @@ -70,7 +76,19 @@ else () set (H5EX_HDF5_INCLUDE_DIRS "${H5EX_HDF5_INCLUDE_DIRS};${HDF5_HL_SRC_DIR};${HDF5_HL_SRC_BINARY_DIR}" CACHE PATH "HDF5 include dirs" FORCE) endif () if (HDF5_BUILD_CPP_LIB) - set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_CPP_LIBSH_TARGET}) + if (NOT HDF5_USE_HIGH_FIVE) + set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_CPP_LIBSH_TARGET}) + if (HDF5_BUILD_HL_LIB) + set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_HL_CPP_LIBSH_TARGET}) + set (H5EX_HDF5_INCLUDE_DIRS "${H5EX_HDF5_INCLUDE_DIRS};${HDF5_HL_CPP_SRC_DIR};${HDF5_HL_CPP_SRC_BINARY_DIR}" CACHE PATH "HDF5 include dirs" FORCE) + endif () + else () + #message (STATUS "HIGHFIVE LIB: ${H5_HIGHFIVE_LIBS} INCLUDE: ${H5_HIGHFIVE_INCLUDE_DIRS}") + #set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${H5_HIGHFIVE_LIBS}) + set (H5EX_HDF5_INCLUDE_DIRS "${H5EX_HDF5_INCLUDE_DIRS};${H5_HIGHFIVE_INCLUDE_DIRS}" CACHE PATH "HDF5 include dirs" FORCE) + message (STATUS "HDF5 Example HF libs: ${H5EX_HDF5_LINK_LIBS} Includes: ${H5EX_HDF5_INCLUDE_DIRS}") + endif () + #message (STATUS "HDF5 Example CPP libs: ${H5EX_HDF5_LINK_LIBS} Includes: ${H5EX_HDF5_INCLUDE_DIRS}") endif () if (HDF5_BUILD_FORTRAN) set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_F90_LIBSH_TARGET}) @@ -79,13 +97,6 @@ else () set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_HL_F90_LIBSH_TARGET}) endif () endif () - if (HDF5_BUILD_CPP_LIB) - set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_CPP_LIBSH_TARGET}) - if (HDF5_BUILD_HL_LIB) - set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_HL_CPP_LIBSH_TARGET}) - set (H5EX_HDF5_INCLUDE_DIRS "${H5EX_HDF5_INCLUDE_DIRS};${HDF5_HL_CPP_SRC_DIR};${HDF5_HL_CPP_SRC_BINARY_DIR}" CACHE PATH "HDF5 include dirs" FORCE) - endif () - endif () if (HDF5_BUILD_JAVA) set (HDF5_JAVA_INCLUDE_DIRS ${HDF5_JAVA_JARS} ${HDF5_JAVA_LOGGING_JAR}) set (H5EX_JAVA_LIBRARY ${HDF5_JAVA_JNI_LIB_TARGET}) diff --git a/config/cmake/HDFLibMacros.cmake b/config/cmake/HDFLibMacros.cmake index f85fb42c51b..6c0f2389a81 100644 --- a/config/cmake/HDFLibMacros.cmake +++ b/config/cmake/HDFLibMacros.cmake @@ -85,3 +85,32 @@ macro (EXTERNAL_SZIP_LIBRARY compress_type encoding) set (H5_SZIP_FOUND 1) set (H5_SZIP_INCLUDE_DIRS ${H5_SZIP_INCLUDE_DIR_GEN} ${H5_SZIP_INCLUDE_DIR}) endmacro () + +#------------------------------------------------------------------------------- +macro (EXTERNAL_HIGHFIVE_LIBRARY compress_type) + if (${compress_type} MATCHES "GIT") + FetchContent_Declare (HighFive + GIT_REPOSITORY ${HIGHFIVE_URL} + GIT_TAG ${HIGHFIVE_BRANCH} + ) + elseif (${compress_type} MATCHES "TGZ") + FetchContent_Declare (HighFive + URL ${HIGHFIVE_URL} + URL_HASH "" + ) + endif () + # Prevent HighFive CMake code from searching for HDF5: + set (HIGHFIVE_FIND_HDF5 OFF) + set (HIGHFIVE_USE_BOOST OFF) + set (HIGHFIVE_EXAMPLES OFF) + set (HIGHFIVE_BUILD_DOCS OFF) + set (HIGHFIVE_HAS_CONCEPTS OFF) + set (HDF5_C_LIBRARIES ${HDF5_CPP_LIBSH_TARGET}) # To disable looking for hdf5 the define needs to be set to anything + FetchContent_MakeAvailable (HighFive) + # Finally, use the target `HighFive::Include` which + # doesn't add a dependency on HDF5. + set (H5_HIGHFIVE_LIBRARY HighFive::Include) + set (H5_HIGHFIVE_INCLUDE_DIR "${HighFive_SOURCE_DIR}/include") + set (H5_HIGHFIVE_FOUND 1) + set (H5_HIGHFIVE_INCLUDE_DIRS ${H5_HIGHFIVE_INCLUDE_DIR}) +endmacro () diff --git a/config/cmake/cacheinit.cmake b/config/cmake/cacheinit.cmake index 44fa3db6610..1a08d8e01ef 100644 --- a/config/cmake/cacheinit.cmake +++ b/config/cmake/cacheinit.cmake @@ -68,6 +68,15 @@ set (LIBAEC_GIT_URL "https://github.com/MathisRosenhauer/libaec.git" CACHE STRIN set (LIBAEC_GIT_BRANCH "v1.1.3" CACHE STRING "" FORCE) set (HDF5_USE_LIBAEC_STATIC ON CACHE BOOL "Use static AEC library" FORCE) +######################## +# HighFive C++ options +######################## +set (HIGHFIVE_TGZ_NAME "v2.10.0.tar.gz" CACHE STRING "Use HighFive from original compressed file" FORCE) +set (HIGHFIVE_TGZ_ORIGPATH "https://github.com/BlueBrain/HighFive/archive/refs/tags" CACHE STRING "Use HighFive from original location" FORCE) +set (HIGHFIVE_USE_LOCALCONTENT OFF CACHE BOOL "Use local file for HighFive FetchContent" FORCE) +set (HIGHFIVE_GIT_URL "https://github.com/BlueBrain/HighFive.git" CACHE STRING "Use HighFive from GitHub repository" FORCE) +set (HIGHFIVE_GIT_BRANCH "master" CACHE STRING "" FORCE) + ######################## # API test options ######################## @@ -79,7 +88,7 @@ set (KWSYS_USE_LOCALCONTENT OFF CACHE BOOL "Use local file for KWSYS FetchConten # filter plugin options ######################## -set (PLUGIN_TGZ_ORIGPATH "https://github.com/HDFGroup/hdf5_plugins/releases/download/snapshots" CACHE STRING "Use PLUGINS from original location" FORCE) +set (PLUGIN_TGZ_ORIGPATH "https://github.com/HDFGroup/hdf5_plugins/releases/download/snapshot" CACHE STRING "Use PLUGINS from original location" FORCE) set (PLUGIN_TGZ_NAME "hdf5_plugins-master.tar.gz" CACHE STRING "Use PLUGINS from compressed file" FORCE) set (PLUGIN_USE_LOCALCONTENT ON CACHE BOOL "Use local file for PLUGIN FetchContent" FORCE) set (PLUGIN_PACKAGE_NAME "pl" CACHE STRING "Name of PLUGIN package" FORCE) diff --git a/config/cmake/examples/HDF5_Examples_options.cmake b/config/cmake/examples/HDF5_Examples_options.cmake index 1f18ae489af..426ad0280bd 100644 --- a/config/cmake/examples/HDF5_Examples_options.cmake +++ b/config/cmake/examples/HDF5_Examples_options.cmake @@ -17,6 +17,7 @@ #### H5EX_BUILD_C:BOOL=ON ### #### H5EX_BUILD_HL:BOOL=ON ### #### H5EX_BUILD_CXX:BOOL=OFF ### +#### H5EX_USE_HIGH_FIVE:BOOL=OFF ### #### H5EX_BUILD_FORTRAN:BOOL=OFF ### #### H5EX_BUILD_JAVA:BOOL=OFF ### #### H5EX_BUILD_FILTERS:BOOL=OFF ### @@ -51,6 +52,7 @@ ### enable C++ builds #set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DH5EX_BUILD_CXX:BOOL=ON") +#set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DH5EX_USE_HIGH_FIVE:BOOL=ON") ### enable Fortran builds #set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DH5EX_BUILD_FORTRAN:BOOL=ON") diff --git a/release_docs/INSTALL_CMake.txt b/release_docs/INSTALL_CMake.txt index 2b8e69db0c2..e370feeea7b 100644 --- a/release_docs/INSTALL_CMake.txt +++ b/release_docs/INSTALL_CMake.txt @@ -556,7 +556,7 @@ These five steps are described in detail below. ######################## # filter plugin options ######################## - set (PLUGIN_TGZ_ORIGPATH "https://github.com/HDFGroup/hdf5_plugins/releases/download/snapshots" CACHE STRING "Use PLUGINS from original location" FORCE) + set (PLUGIN_TGZ_ORIGPATH "https://github.com/HDFGroup/hdf5_plugins/releases/download/snapshot" CACHE STRING "Use PLUGINS from original location" FORCE) set (PLUGIN_TGZ_NAME "hdf5_plugins-master.tar.gz" CACHE STRING "Use PLUGINS from compressed file" FORCE) set (PLUGIN_USE_LOCALCONTENT ON CACHE BOOL "Use local file for PLUGIN FetchContent" FORCE) set (PLUGIN_PACKAGE_NAME "pl" CACHE STRING "Name of PLUGIN package" FORCE) @@ -865,6 +865,7 @@ HDF5_EXTERNAL_LIB_SUFFIX "Use suffix for custom library naming." HDF5_DISABLE_COMPILER_WARNINGS "Disable compiler warnings" OFF HDF5_ENABLE_ALL_WARNINGS "Enable all warnings" OFF HDF5_SHOW_ALL_WARNINGS "Show all warnings (i.e. not suppress "noisy" ones internally)" OFF +HDF5_ENABLE_WARNINGS_AS_ERRORS "Interpret some warnings as errors" OFF HDF5_ENABLE_COVERAGE "Enable code coverage for Libraries and Programs" OFF HDF5_ENABLE_DEBUG_APIS "Turn on extra debug output in all packages" OFF HDF5_ENABLE_DEPRECATED_SYMBOLS "Enable deprecated public API symbols" ON @@ -901,6 +902,8 @@ if (HDF5_BUILD_FORTRAN) if (BUILD_SHARED_LIBS AND NOT BUILD_STATIC_LIBS) default HDF5_INSTALL_MOD_FORTRAN is SHARED if (NOT BUILD_SHARED_LIBS AND BUILD_STATIC_LIBS) default HDF5_INSTALL_MOD_FORTRAN is STATIC if (NOT BUILD_SHARED_LIBS AND NOT BUILD_STATIC_LIBS) default HDF5_INSTALL_MOD_FORTRAN is SHARED +if (HDF5_BUILD_CPP_LIB) + HDF5_USE_HIGH_FIVE "Use HighFive C++ Headers instead of HDF5 C++ Library" OFF HDF5_ENABLE_ANALYZER_TOOLS "enable the use of Clang tools" OFF HDF5_ENABLE_SANITIZERS "execute the Clang sanitizer" OFF @@ -939,7 +942,7 @@ if (BUILD_TESTING) HDF5_ALLOW_EXTERNAL_SUPPORT "Allow External Library Building (NO GIT TGZ)" "NO" HDF5_ENABLE_PLUGIN_SUPPORT "Enable PLUGIN Filters" OFF HDF5_ENABLE_SZIP_SUPPORT "Use SZip Filter" OFF -HDF5_ENABLE_ZLIB_SUPPORT "Enable Zlib Filters" ON +HDF5_ENABLE_ZLIB_SUPPORT "Enable Zlib Filters" ON if (HDF5_USE_ZLIB_NG) ZLIBNG_USE_EXTERNAL "Use External Library Building for ZLIBNG" OFF diff --git a/release_docs/USING_CMake_Examples.txt b/release_docs/USING_CMake_Examples.txt index 514e300b393..3defd566d09 100644 --- a/release_docs/USING_CMake_Examples.txt +++ b/release_docs/USING_CMake_Examples.txt @@ -86,6 +86,7 @@ III. Defaults in the HDF5_Examples_options.cmake file #### H5EX_BUILD_C:BOOL=ON ### #### H5EX_BUILD_HL:BOOL=OFF ### #### H5EX_BUILD_CXX:BOOL=OFF ### +#### H5EX_USE_HIGH_FIVE:BOOL=OFF ### #### H5EX_BUILD_FORTRAN:BOOL=OFF ### #### H5EX_BUILD_JAVA:BOOL=OFF ### #### H5EX_BUILD_FILTERS:BOOL=OFF ### diff --git a/src/H5build_settings.cmake.c.in b/src/H5build_settings.cmake.c.in index 9530fbc822e..51d5ed6fe8e 100644 --- a/src/H5build_settings.cmake.c.in +++ b/src/H5build_settings.cmake.c.in @@ -73,6 +73,7 @@ const char H5build_settings[]= " Module Directory: @CMAKE_Fortran_MODULE_DIRECTORY@\n" "\n" " C++: @HDF5_BUILD_CPP_LIB@\n" + " HighFive C++: @HDF5_USE_HIGH_FIVE@\n" " C++ Compiler: @CMAKE_CXX_COMPILER@ @CMAKE_CXX_COMPILER_VERSION@\n" " C++ Flags: @CMAKE_CXX_FLAGS@ @HDF5_BUILD_MODE_CXX_FLAGS@\n" " H5 C++ Flags: @HDF5_CMAKE_CXX_FLAGS@\n" From e33816ab43538932afcb7ce9ff0fe08f563988a4 Mon Sep 17 00:00:00 2001 From: github-actions <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 13 Nov 2024 22:41:39 +0000 Subject: [PATCH 2/2] Committing clang-format changes --- HDF5Examples/HFCXX/H5D/compound.cpp | 25 +++++++++++-------------- HDF5Examples/HFCXX/H5D/create.cpp | 4 ++-- HDF5Examples/HFCXX/H5D/readdata.cpp | 12 ++++++------ 3 files changed, 19 insertions(+), 22 deletions(-) diff --git a/HDF5Examples/HFCXX/H5D/compound.cpp b/HDF5Examples/HFCXX/H5D/compound.cpp index de5b91d80fd..a272456a576 100644 --- a/HDF5Examples/HFCXX/H5D/compound.cpp +++ b/HDF5Examples/HFCXX/H5D/compound.cpp @@ -41,10 +41,10 @@ main(void) // Tell HighFive how to create the HDF5 datatype for this base type by // using the HIGHFIVE_REGISTER_TYPE macro - CompoundType create_compound_s1_t() { - return {{"a", create_datatype()}, - {"b", create_datatype()}, - {"c", create_datatype()}}; + CompoundType create_compound_s1_t() + { + return { + {"a", create_datatype()}, {"b", create_datatype()}, {"c", create_datatype()}}; } HIGHFIVE_REGISTER_TYPE(s1_t, create_compound_s1_t) @@ -56,9 +56,9 @@ main(void) // Tell HighFive how to create the HDF5 datatype for this base type by // using the HIGHFIVE_REGISTER_TYPE macro - CompoundType create_compound_s2_t() { - return {{"c", create_datatype()}, - {"a", create_datatype()}}; + CompoundType create_compound_s2_t() + { + return {{"c", create_datatype()}, {"a", create_datatype()}}; } HIGHFIVE_REGISTER_TYPE(s2_t, create_compound_s2_t) @@ -76,13 +76,9 @@ main(void) // Initialize the data std::vector data; - int i; + int i; for (i = 0; i < LENGTH; i++) { - data.push_back({ - i, - i * i, - 1. / (i + 1) - }); + data.push_back({i, i * i, 1. / (i + 1)}); } // Create the dataset @@ -140,7 +136,8 @@ main(void) for (i = 0; i < LENGTH; i++) std::cout << s3[i] << " "; std::cout << endl; - } catch (const Exception& err) { + } + catch (const Exception &err) { // catch and print any HDF5 error std::cerr << err.what() << std::endl; } diff --git a/HDF5Examples/HFCXX/H5D/create.cpp b/HDF5Examples/HFCXX/H5D/create.cpp index 8366719c877..7bf9d67be8a 100644 --- a/HDF5Examples/HFCXX/H5D/create.cpp +++ b/HDF5Examples/HFCXX/H5D/create.cpp @@ -60,8 +60,8 @@ main(void) // write it dataset.write(data); - } - catch (const Exception& err) { + } + catch (const Exception &err) { // catch and print any HDF5 error std::cerr << err.what() << std::endl; return -1; diff --git a/HDF5Examples/HFCXX/H5D/readdata.cpp b/HDF5Examples/HFCXX/H5D/readdata.cpp index 5709a49c429..0e034bc98d7 100644 --- a/HDF5Examples/HFCXX/H5D/readdata.cpp +++ b/HDF5Examples/HFCXX/H5D/readdata.cpp @@ -68,9 +68,9 @@ main(void) /* * Get order of datatype and print message if it's a little endian. */ - //H5std_string order_string; + // H5std_string order_string; //(void)intype.getOrder(order_string); - //std::cout << order_string << std::endl; + // std::cout << order_string << std::endl; // Get size of the data element stored in file and print it. size_t size = intype.getSize(); @@ -85,9 +85,9 @@ main(void) // Get the dimension size of each dimension in the dataspace and // display them. - auto dimss_out = dspace.getDimensions() - std::cout << "rank " << rank << ", dimensions " << (unsigned long)(dims_out[0]) << " x " - << (unsigned long)(dims_out[1]) << std::endl; + auto dimss_out = dspace.getDimensions() std::cout << "rank " << rank << ", dimensions " + << (unsigned long)(dims_out[0]) << " x " + << (unsigned long)(dims_out[1]) << std::endl; /* * Define hyperslab in the dataset; implicitly giving strike and @@ -152,7 +152,7 @@ main(void) * 0 0 0 0 0 0 0 */ } - catch (const Exception& err) { + catch (const Exception &err) { // catch and print any HDF5 error std::cerr << err.what() << std::endl; return -1;