diff --git a/.github/workflows/nvhpc-cmake.yml b/.github/workflows/nvhpc-cmake.yml
index 1b0dbebc19e..faa7f71e447 100644
--- a/.github/workflows/nvhpc-cmake.yml
+++ b/.github/workflows/nvhpc-cmake.yml
@@ -72,5 +72,5 @@ jobs:
       - name: CMake Run Tests
         shell: bash
         run: |
-          ctest . --parallel 2 -C ${{ inputs.build_mode }} -V
+          ctest . --parallel 2 -C ${{ inputs.build_mode }} -V -E "ph5_f90_hyperslab_by_chunk|ph5_f90_hyperslab_by_pattern"
         working-directory: ${{ runner.workspace }}/build
diff --git a/CMakeInstallation.cmake b/CMakeInstallation.cmake
index 2b03779fc89..f28bb949965 100644
--- a/CMakeInstallation.cmake
+++ b/CMakeInstallation.cmake
@@ -156,7 +156,7 @@ if (HDF5_PACK_EXAMPLES)
   )
 
   install (
-    DIRECTORY ${HDF5_BINARY_DIR}/HDF5Examples
+    DIRECTORY ${HDF5_SOURCE_DIR}/HDF5Examples
     DESTINATION ${HDF5_INSTALL_DATA_DIR}
     USE_SOURCE_PERMISSIONS
     COMPONENT hdfdocuments
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 1397c6b9a7b..e73b7f93bd2 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -220,7 +220,7 @@ set (HDF5_CPP_TST_DIR          ${HDF5_SOURCE_DIR}/c++/test)
 set (HDF5_HL_SRC_DIR           ${HDF5_SOURCE_DIR}/hl)
 set (HDF5_HL_CPP_SRC_DIR       ${HDF5_SOURCE_DIR}/hl/c++)
 set (HDF5_HL_TOOLS_DIR         ${HDF5_SOURCE_DIR}/hl/tools)
-set (HDF5_TOOLS_DIR            ${HDF5_SOURCE_DIR}/tools)
+set (HDF5_TOOLS_ROOT_DIR            ${HDF5_SOURCE_DIR}/tools)
 set (HDF5_TOOLS_SRC_DIR        ${HDF5_SOURCE_DIR}/tools/src)
 set (HDF5_TOOLS_TST_DIR        ${HDF5_SOURCE_DIR}/tools/test)
 set (HDF5_PERFORM_SRC_DIR      ${HDF5_SOURCE_DIR}/tools/src/perform)
@@ -1038,18 +1038,6 @@ if (HDF5_PACKAGE_EXTLIBS AND NOT HDF5_NO_PACKAGES)
   endif ()
 endif ()
 
-#-----------------------------------------------------------------------------
-# Option to build examples
-#-----------------------------------------------------------------------------
-if (EXISTS "${HDF5_SOURCE_DIR}/HDF5Examples" AND IS_DIRECTORY "${HDF5_SOURCE_DIR}/HDF5Examples")
-  option (HDF5_BUILD_EXAMPLES  "Build HDF5 Library Examples" ON)
-  if (HDF5_BUILD_EXAMPLES)
-    include (${HDF_RESOURCES_DIR}/HDF5ExampleCache.cmake)
-    set (HDF5_VERSION ${HDF5_PACKAGE_VERSION})
-    add_subdirectory (HDF5Examples)
-  endif ()
-endif ()
-
 #-----------------------------------------------------------------------------
 # Option to build High Level API's
 #-----------------------------------------------------------------------------
@@ -1160,6 +1148,18 @@ if (EXISTS "${HDF5_SOURCE_DIR}/java" AND IS_DIRECTORY "${HDF5_SOURCE_DIR}/java")
   endif ()
 endif ()
 
+#-----------------------------------------------------------------------------
+# Option to build examples
+#-----------------------------------------------------------------------------
+if (EXISTS "${HDF5_SOURCE_DIR}/HDF5Examples" AND IS_DIRECTORY "${HDF5_SOURCE_DIR}/HDF5Examples")
+  option (HDF5_BUILD_EXAMPLES  "Build HDF5 Library Examples" ON)
+  if (HDF5_BUILD_EXAMPLES)
+    include (${HDF_RESOURCES_DIR}/HDF5ExampleCache.cmake)
+    set (HDF5_VERSION ${HDF5_PACKAGE_VERSION})
+    add_subdirectory (HDF5Examples)
+  endif ()
+endif ()
+
 #-----------------------------------------------------------------------------
 # Generate the H5pubconf.h file containing user settings needed by compilation
 #-----------------------------------------------------------------------------
diff --git a/HDF5Examples/C/H5D/CMakeLists.txt b/HDF5Examples/C/H5D/CMakeLists.txt
index 5df56c1932b..68cbd23af40 100644
--- a/HDF5Examples/C/H5D/CMakeLists.txt
+++ b/HDF5Examples/C/H5D/CMakeLists.txt
@@ -1,5 +1,5 @@
 cmake_minimum_required (VERSION 3.12)
-project (HDF5Examples_C_H5D)
+project (HDF5Examples_C_H5D C)
 
 #-----------------------------------------------------------------------------
 # Define Sources
diff --git a/HDF5Examples/C/H5FLT/CMakeLists.txt b/HDF5Examples/C/H5FLT/CMakeLists.txt
index eb386b469fc..482d65b55ad 100644
--- a/HDF5Examples/C/H5FLT/CMakeLists.txt
+++ b/HDF5Examples/C/H5FLT/CMakeLists.txt
@@ -1,5 +1,5 @@
 cmake_minimum_required (VERSION 3.12)
-project (HDF5Examples_C_H5FLT)
+project (HDF5Examples_C_H5FLT C)
 
 set (dyn_examples)
 
@@ -190,6 +190,8 @@ if (H5EX_BUILD_TESTING)
               -D "TEST_MASK_ERROR=true"
               -D "TEST_OUTPUT=${testname}.out"
               -D "TEST_REFERENCE=${testname}.tst"
+              -D "TEST_ENV_VAR=HDF5_PLUGIN_PATH"
+              -D "TEST_ENV_VALUE=${H5EX_HDF5_PLUGIN_PATH}"
               -D "TEST_SKIP_COMPARE=1"
               -D "TEST_ERRREF=1"
               -D "GREP_ERRREF=Filter present but encoding disabled"
@@ -214,6 +216,8 @@ if (H5EX_BUILD_TESTING)
               -D "TEST_EXPECT=0"
               -D "TEST_OUTPUT=${testname}.out"
               -D "TEST_REFERENCE=${testname}.tst"
+              -D "TEST_ENV_VAR=HDF5_PLUGIN_PATH"
+              -D "TEST_ENV_VALUE=${H5EX_HDF5_PLUGIN_PATH}"
               -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}"
               -P "${${EXAMPLE_PACKAGE_NAME}_RESOURCES_DIR}/runTest.cmake"
       )
@@ -233,6 +237,8 @@ if (H5EX_BUILD_TESTING)
                 -D "TEST_FILTER_REPLACE=PARAMS { \\1 XXXX \\2 }\n"
                 -D "TEST_EXPECT=0"
                 -D "TEST_REFERENCE=${testname}.ddl"
+                -D "TEST_ENV_VAR=HDF5_PLUGIN_PATH"
+                -D "TEST_ENV_VALUE=${H5EX_HDF5_PLUGIN_PATH}"
                 -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}"
                 -P "${${EXAMPLE_PACKAGE_NAME}_RESOURCES_DIR}/runTest.cmake"
         )
@@ -248,6 +254,8 @@ if (H5EX_BUILD_TESTING)
                 -D "TEST_FILTER_REPLACE:STRING=PARAMS { XXXX }"
                 -D "TEST_EXPECT=0"
                 -D "TEST_REFERENCE=${testname}.ddl"
+                -D "TEST_ENV_VAR=HDF5_PLUGIN_PATH"
+                -D "TEST_ENV_VALUE=${H5EX_HDF5_PLUGIN_PATH}"
                 -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}"
                 -P "${${EXAMPLE_PACKAGE_NAME}_RESOURCES_DIR}/runTest.cmake"
         )
diff --git a/HDF5Examples/C/H5FLT/tfiles/h5ex_d_zfp.ddl b/HDF5Examples/C/H5FLT/tfiles/h5ex_d_zfp.ddl
index 7d73913f312..a8e66c08054 100644
--- a/HDF5Examples/C/H5FLT/tfiles/h5ex_d_zfp.ddl
+++ b/HDF5Examples/C/H5FLT/tfiles/h5ex_d_zfp.ddl
@@ -10,7 +10,7 @@ GROUP "/" {
       FILTERS {
          USER_DEFINED_FILTER {
             FILTER_ID 32013
-            COMMENT H5Z-ZFP-1.0.1 (ZFP-0.5.5) github.com/LLNL/H5Z-ZFP
+            COMMENT H5Z-ZFP-1.1.1 (ZFP-1.0.0) github.com/LLNL/H5Z-ZFP
             PARAMS { XXXX }
          }
       }
diff --git a/HDF5Examples/C/H5FLT/tfiles/h5ex_d_zfp.tst b/HDF5Examples/C/H5FLT/tfiles/h5ex_d_zfp.tst
index 0a1ba90aae1..dd7197cda78 100644
--- a/HDF5Examples/C/H5FLT/tfiles/h5ex_d_zfp.tst
+++ b/HDF5Examples/C/H5FLT/tfiles/h5ex_d_zfp.tst
@@ -4,8 +4,8 @@ zfp filter is available for encoding and decoding.
 ....Close the file and reopen for reading ........
 Filter info is available from the dataset creation property
    Filter identifier is 32013
-   Number of parameters is 6 with the value 5570817
-   To find more about the filter check H5Z-ZFP-1.0.1 (ZFP-0.5.5) github.com/LLNL/H5Z-ZFP
+   Number of parameters is 6 with the value 268456209
+   To find more about the filter check H5Z-ZFP-1.1.1 (ZFP-1.0.0) github.com/LLNL/H5Z-ZFP
 ....Reading zfp compressed data ................
 Maximum value in DS1 is 1890.0000
 zfp filter is available now since H5Dread triggered loading of the filter.
diff --git a/HDF5Examples/C/H5G/CMakeLists.txt b/HDF5Examples/C/H5G/CMakeLists.txt
index afef5567552..ffbc6142cbc 100644
--- a/HDF5Examples/C/H5G/CMakeLists.txt
+++ b/HDF5Examples/C/H5G/CMakeLists.txt
@@ -1,5 +1,5 @@
 cmake_minimum_required (VERSION 3.12)
-project (HDF5Examples_C_H5G)
+project (HDF5Examples_C_H5G C)
 
 #-----------------------------------------------------------------------------
 # Define Sources
diff --git a/HDF5Examples/C/H5T/CMakeLists.txt b/HDF5Examples/C/H5T/CMakeLists.txt
index 4dcf6cb1a67..0f8884b0bd2 100644
--- a/HDF5Examples/C/H5T/CMakeLists.txt
+++ b/HDF5Examples/C/H5T/CMakeLists.txt
@@ -1,5 +1,5 @@
 cmake_minimum_required (VERSION 3.12)
-project (HDF5Examples_C_H5T)
+project (HDF5Examples_C_H5T C)
 
 #-----------------------------------------------------------------------------
 # Define Sources
diff --git a/HDF5Examples/CMakeLists.txt b/HDF5Examples/CMakeLists.txt
index b5fb3e28feb..e944b9efee4 100644
--- a/HDF5Examples/CMakeLists.txt
+++ b/HDF5Examples/CMakeLists.txt
@@ -1,24 +1,25 @@
 cmake_minimum_required (VERSION 3.12)
-project (H5EXAMPLES C CXX)
+project (H5EXAMPLES C)
+
+#-----------------------------------------------------------------------------
+# Define some CMake variables for use later in the project
+#-----------------------------------------------------------------------------
+set (HDF5EX_C_SRC_DIR          ${H5EXAMPLES_SOURCE_DIR}/C)
+set (HDF5EX_F90_SRC_DIR        ${H5EXAMPLES_SOURCE_DIR}/FORTRAN)
+set (HDF5EX_JAVA_DIR           ${H5EXAMPLES_SOURCE_DIR}/JAVA)
+set (HDF5EX_RESOURCES_DIR      ${H5EXAMPLES_SOURCE_DIR}/config/cmake)
 
 #-----------------------------------------------------------------------------
 # Basic HDF5Examples stuff here
 #-----------------------------------------------------------------------------
-include (${H5EXAMPLES_SOURCE_DIR}/config/cmake/HDFMacros.cmake)
-include (${H5EXAMPLES_SOURCE_DIR}/config/cmake/HDFExampleMacros.cmake)
+include (${HDF5EX_RESOURCES_DIR}/HDFMacros.cmake)
+include (${HDF5EX_RESOURCES_DIR}/HDFExampleMacros.cmake)
 set (CMAKE_JAVA_INCLUDE_PATH "")
 
 SET_HDF_BUILD_TYPE()
 
 BASIC_SETTINGS (EXAMPLES)
 
-#-----------------------------------------------------------------------------
-# Define some CMake variables for use later in the project
-#-----------------------------------------------------------------------------
-set (HDF5EX_C_SRC_DIR          ${H5EXAMPLES_SOURCE_DIR}/C)
-set (HDF5EX_F90_SRC_DIR        ${H5EXAMPLES_SOURCE_DIR}/FORTRAN)
-set (HDF5EX_JAVA_DIR           ${H5EXAMPLES_SOURCE_DIR}/JAVA)
-
 #-----------------------------------------------------------------------------
 # HDF5 support
 #-----------------------------------------------------------------------------
@@ -37,7 +38,7 @@ if(NOT DEFINED _h5public_h_contents)
   string (REGEX REPLACE ".*#define[ \t]+H5_VERS_MAJOR[ \t]+([0-9]*).*$"
       "\\1" H5_VERS_MAJOR ${_h5public_h_contents})
   string (REGEX REPLACE ".*#define[ \t]+H5_VERS_MINOR[ \t]+([0-9]*).*$"
-     "\\1" H5_VERS_MINOR ${_h5public_h_contents})
+      "\\1" H5_VERS_MINOR ${_h5public_h_contents})
   string (REGEX REPLACE ".*#define[ \t]+H5_VERS_RELEASE[ \t]+([0-9]*).*$"
       "\\1" H5_VERS_RELEASE ${_h5public_h_contents})
   string (REGEX REPLACE ".*#define[ \t]+H5_VERS_SUBRELEASE[ \t]+\"([0-9A-Za-z._-]*)\".*$"
@@ -119,11 +120,6 @@ if (H5EX_BUILD_TESTING)
   configure_file (${${EXAMPLE_PACKAGE_NAME}_RESOURCES_DIR}/CTestCustom.cmake ${PROJECT_BINARY_DIR}/CTestCustom.ctest @ONLY)
 endif ()
 
-#-----------------------------------------------------------------------------
-# Build examples
-#-----------------------------------------------------------------------------
-add_subdirectory (C)
-
 if (${H5_LIBVER_DIR} GREATER 16)
   #-----------------------------------------------------------------------------
   # Option to build Fortran examples
@@ -136,19 +132,22 @@ if (${H5_LIBVER_DIR} GREATER 16)
   if (EXISTS "${H5EXAMPLES_SOURCE_DIR}/FORTRAN" AND IS_DIRECTORY "${H5EXAMPLES_SOURCE_DIR}/FORTRAN")
     option (HDF_BUILD_FORTRAN "Build FORTRAN support" OFF)
     if (HDF_BUILD_FORTRAN AND HDF5_BUILD_FORTRAN)
-      set (LINK_Fortran_LIBS ${H5EX_HDF5_LINK_LIBS})
+      set (H5EX_LINK_Fortran_LIBS ${H5EX_HDF5_LINK_LIBS})
 
       # Parallel IO usage requires MPI to be Linked and Included
       if (H5_HAVE_PARALLEL)
-        set (LINK_Fortran_LIBS ${LINK_Fortran_LIBS} ${MPI_Fortran_LIBRARIES})
+        set (H5EX_LINK_Fortran_LIBS ${H5EX_LINK_Fortran_LIBS} ${MPI_Fortran_LIBRARIES})
         if (MPI_Fortran_LINK_FLAGS)
           set (CMAKE_Fortran_EXE_LINKER_FLAGS "${MPI_Fortran_LINK_FLAGS} ${CMAKE_EXE_LINKER_FLAGS}")
         endif ()
       endif ()
 
-      add_subdirectory (FORTRAN)
       configure_file (${HDF5EX_F90_SRC_DIR}/H5D/h5_version.h.in ${PROJECT_BINARY_DIR}/FORTRAN/H5D/h5_version.h @ONLY)
+    else ()
+      set (HDF_BUILD_FORTRAN OFF CACHE BOOL "Build examples FORTRAN support" FORCE)
     endif ()
+  else ()
+    set (HDF_BUILD_FORTRAN OFF CACHE BOOL "Build examples FORTRAN support" FORCE)
   endif ()
 
   if (${H5_LIBVER_DIR} GREATER 18)
@@ -157,16 +156,49 @@ if (${H5_LIBVER_DIR} GREATER 16)
     #-----------------------------------------------------------------------------
     if (EXISTS "${H5EXAMPLES_SOURCE_DIR}/JAVA" AND IS_DIRECTORY "${H5EXAMPLES_SOURCE_DIR}/JAVA")
       option (HDF_BUILD_JAVA "Build JAVA support" OFF)
-      if (HDF_BUILD_JAVA AND HDF5_BUILD_JAVA)
-        add_subdirectory (JAVA)
-      endif ()
+    else ()
+      set (HDF_BUILD_JAVA OFF CACHE BOOL "Build examples JAVA support" FORCE)
     endif ()
+  else ()
+    set (HDF_BUILD_JAVA OFF CACHE BOOL "Build examples JAVA support" FORCE)
   endif ()
 
   #-----------------------------------------------------------------------------
   # Option to build filter examples
   #-----------------------------------------------------------------------------
-  if (EXISTS "${H5EXAMPLES_SOURCE_DIR}/C/H5F" AND IS_DIRECTORY "${H5EXAMPLES_SOURCE_DIR}/C/H5F")
+  if (EXISTS "${H5EXAMPLES_SOURCE_DIR}/C/H5FLT" AND IS_DIRECTORY "${H5EXAMPLES_SOURCE_DIR}/C/H5FLT")
     option (HDF_BUILD_FILTERS "Test filter support" OFF)
+    if (HDF_BUILD_FILTERS AND HDF5_ENABLE_PLUGIN_SUPPORT)
+      if(DEFINED ENV{HDF5_PLUGIN_PATH})
+        message (STATUS "ENV PATH=$ENV{HDF5_PLUGIN_PATH}")
+        set (H5EX_HDF5_PLUGIN_PATH $ENV{HDF5_PLUGIN_PATH})
+      else ()
+        if(NOT DEFINED H5EX_HDF5_PLUGIN_PATH)
+          message (STATUS "LIBRARY PATH=${HDF5_LIBRARY_PATH}/plugin")
+          set (H5EX_HDF5_PLUGIN_PATH ${HDF5_LIBRARY_PATH}/plugin)
+        endif ()
+      endif ()
+      message (STATUS "H5EX_HDF5_PLUGIN_PATH=${H5EX_HDF5_PLUGIN_PATH}")
+    else ()
+      set (HDF_BUILD_FILTERS OFF CACHE BOOL "Build examples PLUGIN filter support" FORCE)
+    endif ()
+  else ()
+    set (HDF_BUILD_FILTERS OFF CACHE BOOL "Build examples PLUGIN filter support" FORCE)
   endif ()
+else ()
+  set (HDF_BUILD_FORTRAN OFF} CACHE BOOL "Build examples FORTRAN support" FORCE)
+  set (HDF_BUILD_JAVA OFF CACHE BOOL "Build examples JAVA support" FORCE)
+  set (HDF_BUILD_FILTERS OFF CACHE BOOL "Build examples PLUGIN filter support" FORCE)
 endif ()
+
+#-----------------------------------------------------------------------------
+# Build examples
+#-----------------------------------------------------------------------------
+add_subdirectory (C)
+if (HDF_BUILD_FORTRAN AND HDF5_BUILD_FORTRAN)
+  add_subdirectory (FORTRAN)
+endif ()
+if (HDF_BUILD_JAVA AND HDF5_BUILD_JAVA)
+  add_subdirectory (JAVA)
+endif ()
+
diff --git a/HDF5Examples/FORTRAN/CMakeLists.txt b/HDF5Examples/FORTRAN/CMakeLists.txt
index 72582fe1b47..0f05a0eb08a 100644
--- a/HDF5Examples/FORTRAN/CMakeLists.txt
+++ b/HDF5Examples/FORTRAN/CMakeLists.txt
@@ -1,5 +1,5 @@
 cmake_minimum_required (VERSION 3.12)
-PROJECT (HDF5Examples_F90 C CXX Fortran)
+PROJECT (HDF5Examples_F90 Fortran)
 
 #-----------------------------------------------------------------------------
 # Build the Fortran Examples
diff --git a/HDF5Examples/FORTRAN/H5D/CMakeLists.txt b/HDF5Examples/FORTRAN/H5D/CMakeLists.txt
index dbc126c9216..5369a4994e0 100644
--- a/HDF5Examples/FORTRAN/H5D/CMakeLists.txt
+++ b/HDF5Examples/FORTRAN/H5D/CMakeLists.txt
@@ -1,5 +1,5 @@
 cmake_minimum_required (VERSION 3.12)
-project (HDF5Examples_FORTRAN_H5D C CXX Fortran)
+project (HDF5Examples_FORTRAN_H5D Fortran)
 
 # --------------------------------------------------------------------
 # Notes: When creating examples they should be prefixed
@@ -10,7 +10,7 @@ project (HDF5Examples_FORTRAN_H5D C CXX Fortran)
 # Setup include Directories
 #-----------------------------------------------------------------------------
 INCLUDE_DIRECTORIES (
-    ${CMAKE_Fortran_MODULE_DIRECTORY}
+    ${CMAKE_Fortran_MODULE_DIRECTORY}${HDF_MOD_EXT}
     ${PROJECT_BINARY_DIR}
     ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
 )
@@ -35,7 +35,7 @@ foreach (example_name ${common_examples})
   if (H5_HAVE_PARALLEL)
     target_include_directories (${EXAMPLE_VARNAME}_f90_${example_name} PUBLIC ${MPI_Fortran_INCLUDE_DIRS})
   endif ()
-  target_link_libraries (${EXAMPLE_VARNAME}_f90_${example_name} ${LINK_Fortran_LIBS})
+  target_link_libraries (${EXAMPLE_VARNAME}_f90_${example_name} ${H5EX_LINK_Fortran_LIBS})
   set_target_properties (${EXAMPLE_VARNAME}_f90_${example_name} PROPERTIES LINKER_LANGUAGE Fortran)
   if (H5EX_BUILD_TESTING)
     if (${example_name} STREQUAL "h5ex_d_alloc")
@@ -80,7 +80,7 @@ if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.10")
     if (H5_HAVE_PARALLEL)
       target_include_directories (${EXAMPLE_VARNAME}_f90_${example_name} PUBLIC ${MPI_C_INCLUDE_DIRS})
     endif ()
-    target_link_libraries (${EXAMPLE_VARNAME}_f90_${example_name} ${H5EX_HDF5_LINK_LIBS})
+    target_link_libraries (${EXAMPLE_VARNAME}_f90_${example_name} ${H5EX_LINK_Fortran_LIBS})
     if (H5EX_BUILD_TESTING)
       add_custom_command (
           TARGET     ${EXAMPLE_VARNAME}_f90_${example_name}
diff --git a/HDF5Examples/FORTRAN/H5G/CMakeLists.txt b/HDF5Examples/FORTRAN/H5G/CMakeLists.txt
index d2587e04d76..508283c502d 100644
--- a/HDF5Examples/FORTRAN/H5G/CMakeLists.txt
+++ b/HDF5Examples/FORTRAN/H5G/CMakeLists.txt
@@ -1,5 +1,5 @@
 cmake_minimum_required (VERSION 3.12)
-project (HDF5Examples_FORTRAN_H5G C CXX Fortran)
+project (HDF5Examples_FORTRAN_H5G Fortran)
 
 # --------------------------------------------------------------------
 # Notes: When creating examples they should be prefixed
@@ -10,7 +10,7 @@ project (HDF5Examples_FORTRAN_H5G C CXX Fortran)
 # Setup include Directories
 #-----------------------------------------------------------------------------
 INCLUDE_DIRECTORIES (
-    ${CMAKE_Fortran_MODULE_DIRECTORY}
+    ${CMAKE_Fortran_MODULE_DIRECTORY}${HDF_MOD_EXT}
     ${PROJECT_BINARY_DIR}
     ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
 )
@@ -35,7 +35,7 @@ foreach (example_name ${common_examples})
   if (H5_HAVE_PARALLEL)
     target_include_directories (${EXAMPLE_VARNAME}_f90_${example_name} PUBLIC ${MPI_Fortran_INCLUDE_DIRS})
   endif ()
-  target_link_libraries (${EXAMPLE_VARNAME}_f90_${example_name} ${LINK_Fortran_LIBS})
+  target_link_libraries (${EXAMPLE_VARNAME}_f90_${example_name} ${H5EX_LINK_Fortran_LIBS})
   set_target_properties (${EXAMPLE_VARNAME}_f90_${example_name} PROPERTIES LINKER_LANGUAGE Fortran)
   if (H5EX_BUILD_TESTING)
     if (NOT ${example_name} STREQUAL "h5ex_g_create" AND NOT ${example_name} STREQUAL "h5ex_g_compact")
@@ -64,7 +64,7 @@ endforeach ()
 #    if (H5_HAVE_PARALLEL)
 #      target_include_directories (${EXAMPLE_VARNAME}_f90_${example_name} PUBLIC ${MPI_C_INCLUDE_DIRS})
 #    endif ()
-#    target_link_libraries (${EXAMPLE_VARNAME}_f90_${example_name} ${H5EX_HDF5_LINK_LIBS})
+#    target_link_libraries (${EXAMPLE_VARNAME}_f90_${example_name} ${H5EX_LINK_Fortran_LIBS})
 #    if (H5EX_BUILD_TESTING)
 #      add_custom_command (
 #          TARGET     ${EXAMPLE_VARNAME}_f90_${example_name}
diff --git a/HDF5Examples/FORTRAN/H5PAR/CMakeLists.txt b/HDF5Examples/FORTRAN/H5PAR/CMakeLists.txt
index 84c964139fa..792caaf0bf6 100644
--- a/HDF5Examples/FORTRAN/H5PAR/CMakeLists.txt
+++ b/HDF5Examples/FORTRAN/H5PAR/CMakeLists.txt
@@ -1,5 +1,5 @@
 cmake_minimum_required (VERSION 3.12)
-project (HDF5Examples_FORTRAN_H5PAR C CXX Fortran)
+project (HDF5Examples_FORTRAN_H5PAR Fortran)
 
 # --------------------------------------------------------------------
 # Notes: When creating examples they should be prefixed
@@ -10,7 +10,7 @@ project (HDF5Examples_FORTRAN_H5PAR C CXX Fortran)
 # Setup include Directories
 #-----------------------------------------------------------------------------
 INCLUDE_DIRECTORIES (
-    ${CMAKE_Fortran_MODULE_DIRECTORY}
+    ${CMAKE_Fortran_MODULE_DIRECTORY}${HDF_MOD_EXT}
     ${PROJECT_BINARY_DIR}
     ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
 )
@@ -33,7 +33,7 @@ foreach (example_name ${examples})
           "$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
   )
   target_include_directories (${EXAMPLE_VARNAME}_f90_${example_name} PUBLIC ${MPI_Fortran_INCLUDE_DIRS})
-  target_link_libraries (${EXAMPLE_VARNAME}_f90_${example_name} ${LINK_Fortran_LIBS})
+  target_link_libraries (${EXAMPLE_VARNAME}_f90_${example_name} ${H5EX_LINK_Fortran_LIBS})
   set_target_properties (${EXAMPLE_VARNAME}_f90_${example_name} PROPERTIES LINKER_LANGUAGE Fortran)
 endforeach ()
 
@@ -59,8 +59,8 @@ if (H5EX_BUILD_TESTING)
         -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}"
         -P "${${EXAMPLE_PACKAGE_NAME}_RESOURCES_DIR}/grepTest.cmake"
     )
-    set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall)
-    set (last_test "${EXAMPLE_VARNAME}_${testname}")
+    set_tests_properties (${EXAMPLE_VARNAME}_f90_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_f90_${testname}-clearall)
+    set (last_test "${EXAMPLE_VARNAME}_f90_${testname}")
   endmacro ()
 
   # Ensure that 24 is a multiple of the number of processes.
diff --git a/HDF5Examples/FORTRAN/H5T/CMakeLists.txt b/HDF5Examples/FORTRAN/H5T/CMakeLists.txt
index 67a3ca3dd25..262adf85f04 100644
--- a/HDF5Examples/FORTRAN/H5T/CMakeLists.txt
+++ b/HDF5Examples/FORTRAN/H5T/CMakeLists.txt
@@ -1,5 +1,14 @@
 cmake_minimum_required (VERSION 3.12)
-project (HDF5Examples_FORTRAN_H5T C CXX Fortran)
+project (HDF5Examples_FORTRAN_H5T Fortran)
+
+#-----------------------------------------------------------------------------
+# Setup include Directories
+#-----------------------------------------------------------------------------
+INCLUDE_DIRECTORIES (
+    ${CMAKE_Fortran_MODULE_DIRECTORY}${HDF_MOD_EXT}
+    ${PROJECT_BINARY_DIR}
+    ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
+)
 
 #-----------------------------------------------------------------------------
 # Define Sources
@@ -21,7 +30,7 @@ include (Fortran_sourcefiles.cmake)
     if (H5_HAVE_PARALLEL)
       target_include_directories (${EXAMPLE_VARNAME}_f90_${example_name} PUBLIC ${MPI_Fortran_INCLUDE_DIRS})
     endif ()
-    target_link_libraries (${EXAMPLE_VARNAME}_f90_${example_name} ${LINK_Fortran_LIBS})
+    target_link_libraries (${EXAMPLE_VARNAME}_f90_${example_name} ${H5EX_LINK_Fortran_LIBS})
     set_target_properties (${EXAMPLE_VARNAME}_f90_${example_name} PROPERTIES LINKER_LANGUAGE Fortran)
     if (H5EX_BUILD_TESTING)
       add_custom_command (
@@ -48,7 +57,7 @@ foreach (example_name ${common_examples})
   if (H5_HAVE_PARALLEL)
     target_include_directories (${EXAMPLE_VARNAME}_f90_${example_name} PUBLIC ${MPI_Fortran_INCLUDE_DIRS})
   endif ()
-  target_link_libraries (${EXAMPLE_VARNAME}_f90_${example_name} ${LINK_Fortran_LIBS})
+  target_link_libraries (${EXAMPLE_VARNAME}_f90_${example_name} ${H5EX_LINK_Fortran_LIBS})
   set_target_properties (${EXAMPLE_VARNAME}_f90_${example_name} PROPERTIES LINKER_LANGUAGE Fortran)
   if (H5EX_BUILD_TESTING)
     add_custom_command (
diff --git a/HDF5Examples/JAVA/H5D/CMakeLists.txt b/HDF5Examples/JAVA/H5D/CMakeLists.txt
index 60c35dc8f72..bc06506f88b 100644
--- a/HDF5Examples/JAVA/H5D/CMakeLists.txt
+++ b/HDF5Examples/JAVA/H5D/CMakeLists.txt
@@ -3,10 +3,9 @@ project (HDF5Examples_JAVA_H5D Java)
 
 set (CMAKE_VERBOSE_MAKEFILE 1)
 
-INCLUDE_DIRECTORIES (
-    ${HDFJAVA_LIB_DIR}
-    ${JAVA_INCLUDE_PATH}
-    ${JAVA_INCLUDE_PATH2}
+set_directory_properties(PROPERTIES
+    INCLUDE_DIRECTORIES 
+        "${HDFJAVA_LIB_DIR};${JAVA_INCLUDE_PATH};${JAVA_INCLUDE_PATH2}"
 )
 
 #-----------------------------------------------------------------------------
@@ -20,6 +19,9 @@ else ()
   set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":")
 endif ()
 
+set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_INCLUDE_DIRS}")
+set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${H5EX_JAVA_LIBRARY}$<$<OR:$<CONFIG:Debug>,$<CONFIG:Developer>>:${CMAKE_DEBUG_POSTFIX}>;")
+
 set (CMAKE_JAVA_CLASSPATH ".")
 foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH})
   set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}")
@@ -32,13 +34,18 @@ endforeach ()
 
 foreach (example ${HDF_JAVA_EXAMPLES})
   get_filename_component (example_name ${example} NAME_WE)
-  file (WRITE ${PROJECT_BINARY_DIR}/Manifest.txt
+  file (WRITE ${PROJECT_BINARY_DIR}/${example_name}_Manifest.txt
   "Main-Class: ${example_name}
 Class-Path: ${HDFJAVA_CLASSJARS}
 "
   )
-  add_jar (${EXAMPLE_VARNAME}_${example_name} SOURCES ${example} MANIFEST ${PROJECT_BINARY_DIR}/Manifest.txt)
+  add_jar (${EXAMPLE_VARNAME}_${example_name}
+      SOURCES ${example}
+      MANIFEST ${PROJECT_BINARY_DIR}/${example_name}_Manifest.txt
+  )
   get_target_property (${EXAMPLE_VARNAME}_${example_name}_JAR_FILE ${EXAMPLE_VARNAME}_${example_name} JAR_FILE)
+  get_target_property (${EXAMPLE_VARNAME}_${example_name}_CLASSPATH ${EXAMPLE_VARNAME}_${example_name} CLASSDIR)
+  add_dependencies (${EXAMPLE_VARNAME}_${example_name} ${H5EX_JAVA_LIBRARIES})
 endforeach ()
 
 if (H5EX_BUILD_TESTING)
@@ -46,9 +53,9 @@ if (H5EX_BUILD_TESTING)
     add_test (
         NAME ${EXAMPLE_VARNAME}_jnative-h5-${resultfile}
         COMMAND "${CMAKE_COMMAND}"
-            -D "TEST_TESTER=${CMAKE_Java_RUNTIME}"
+            -D "TEST_TESTER=${CMAKE_Java_RUNTIME};${CMAKE_Java_RUNTIME_FLAGS}"
             -D "TEST_PROGRAM=${resultfile}"
-            -D "TEST_ARGS:STRING=${ARGN}"
+            -D "TEST_ARGS:STRING=${ARGN};${CMD_ARGS}"
             -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${EXAMPLE_VARNAME}_${resultfile}_JAR_FILE}"
             -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}"
             -D "TEST_FOLDER=${PROJECT_BINARY_DIR}"
diff --git a/HDF5Examples/JAVA/H5G/CMakeLists.txt b/HDF5Examples/JAVA/H5G/CMakeLists.txt
index 5f47d592fd4..957934b603c 100644
--- a/HDF5Examples/JAVA/H5G/CMakeLists.txt
+++ b/HDF5Examples/JAVA/H5G/CMakeLists.txt
@@ -3,10 +3,9 @@ project (HDF5Examples_JAVA_GROUPS Java)
 
 set (CMAKE_VERBOSE_MAKEFILE 1)
 
-INCLUDE_DIRECTORIES (
-    ${HDFJAVA_LIB_DIR}
-    ${JAVA_INCLUDE_PATH}
-    ${JAVA_INCLUDE_PATH2}
+set_directory_properties(PROPERTIES
+    INCLUDE_DIRECTORIES 
+        "${HDFJAVA_LIB_DIR};${JAVA_INCLUDE_PATH};${JAVA_INCLUDE_PATH2}"
 )
 
 #-----------------------------------------------------------------------------
@@ -20,6 +19,9 @@ else ()
   set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":")
 endif ()
 
+set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_INCLUDE_DIRS}")
+set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${H5EX_JAVA_LIBRARY}$<$<OR:$<CONFIG:Debug>,$<CONFIG:Developer>>:${CMAKE_DEBUG_POSTFIX}>;")
+
 set (CMAKE_JAVA_CLASSPATH ".")
 foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH})
   set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}")
@@ -32,13 +34,18 @@ endforeach ()
 
 foreach (example ${HDF_JAVA_EXAMPLES})
   get_filename_component (example_name ${example} NAME_WE)
-  file (WRITE ${PROJECT_BINARY_DIR}/Manifest.txt
+  file (WRITE ${PROJECT_BINARY_DIR}/${example_name}_Manifest.txt
   "Main-Class: ${example_name}
 Class-Path: ${HDFJAVA_CLASSJARS}
 "
   )
-  add_jar (${EXAMPLE_VARNAME}_${example_name} SOURCES ${example} MANIFEST ${PROJECT_BINARY_DIR}/Manifest.txt)
+  add_jar (${EXAMPLE_VARNAME}_${example_name}
+      SOURCES ${example}
+      MANIFEST ${PROJECT_BINARY_DIR}/${example_name}_Manifest.txt
+  )
   get_target_property (${EXAMPLE_VARNAME}_${example_name}_JAR_FILE ${EXAMPLE_VARNAME}_${example_name} JAR_FILE)
+  get_target_property (${EXAMPLE_VARNAME}_${example_name}_CLASSPATH ${EXAMPLE_VARNAME}_${example_name} CLASSDIR)
+  add_dependencies (${EXAMPLE_VARNAME}_${example_name} ${H5EX_JAVA_LIBRARIES})
 endforeach ()
 
 if (H5EX_BUILD_TESTING)
@@ -46,9 +53,9 @@ if (H5EX_BUILD_TESTING)
     add_test (
         NAME ${EXAMPLE_VARNAME}_jnative-h5-${resultfile}
         COMMAND "${CMAKE_COMMAND}"
-            -D "TEST_TESTER=${CMAKE_Java_RUNTIME}"
+            -D "TEST_TESTER=${CMAKE_Java_RUNTIME};${CMAKE_Java_RUNTIME_FLAGS}"
             -D "TEST_PROGRAM=${resultfile}"
-            -D "TEST_ARGS:STRING=${ARGN}"
+            -D "TEST_ARGS:STRING=${ARGN};${CMD_ARGS}"
             -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${EXAMPLE_VARNAME}_${resultfile}_JAR_FILE}"
             -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}"
             -D "TEST_FOLDER=${PROJECT_BINARY_DIR}"
diff --git a/HDF5Examples/JAVA/H5J/CMakeLists.txt b/HDF5Examples/JAVA/H5J/CMakeLists.txt
index 8f1c195fb56..eaefa7d0cbf 100644
--- a/HDF5Examples/JAVA/H5J/CMakeLists.txt
+++ b/HDF5Examples/JAVA/H5J/CMakeLists.txt
@@ -3,10 +3,9 @@ project (HDF5Examples_JAVA_INTRO Java)
 
 set (CMAKE_VERBOSE_MAKEFILE 1)
 
-INCLUDE_DIRECTORIES (
-    ${HDFJAVA_LIB_DIR}
-    ${JAVA_INCLUDE_PATH}
-    ${JAVA_INCLUDE_PATH2}
+set_directory_properties(PROPERTIES
+    INCLUDE_DIRECTORIES 
+        "${HDFJAVA_LIB_DIR};${JAVA_INCLUDE_PATH};${JAVA_INCLUDE_PATH2}"
 )
 
 #-----------------------------------------------------------------------------
@@ -20,6 +19,9 @@ else ()
   set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":")
 endif ()
 
+set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_INCLUDE_DIRS}")
+set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${H5EX_JAVA_LIBRARY}$<$<OR:$<CONFIG:Debug>,$<CONFIG:Developer>>:${CMAKE_DEBUG_POSTFIX}>;")
+
 set (CMAKE_JAVA_CLASSPATH ".")
 foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH})
   set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}")
@@ -32,13 +34,18 @@ endforeach ()
 
 foreach (example ${HDF_JAVA_EXAMPLES})
   get_filename_component (example_name ${example} NAME_WE)
-  file (WRITE ${PROJECT_BINARY_DIR}/Manifest.txt
+  file (WRITE ${PROJECT_BINARY_DIR}/${example_name}_Manifest.txt
   "Main-Class: ${example_name}
 Class-Path: ${HDFJAVA_CLASSJARS}
 "
   )
-  add_jar (${EXAMPLE_VARNAME}_${example_name} SOURCES ${example} MANIFEST ${PROJECT_BINARY_DIR}/Manifest.txt)
+  add_jar (${EXAMPLE_VARNAME}_${example_name}
+      SOURCES ${example}
+      MANIFEST ${PROJECT_BINARY_DIR}/${example_name}_Manifest.txt
+  )
   get_target_property (${EXAMPLE_VARNAME}_${example_name}_JAR_FILE ${EXAMPLE_VARNAME}_${example_name} JAR_FILE)
+  get_target_property (${EXAMPLE_VARNAME}_${example_name}_CLASSPATH ${EXAMPLE_VARNAME}_${example_name} CLASSDIR)
+  add_dependencies (${EXAMPLE_VARNAME}_${example_name} ${H5EX_JAVA_LIBRARIES})
 endforeach ()
 
 if (H5EX_BUILD_TESTING)
@@ -46,9 +53,9 @@ if (H5EX_BUILD_TESTING)
     add_test (
         NAME ${EXAMPLE_VARNAME}_jnative-h5-${resultfile}
         COMMAND "${CMAKE_COMMAND}"
-            -D "TEST_TESTER=${CMAKE_Java_RUNTIME}"
+            -D "TEST_TESTER=${CMAKE_Java_RUNTIME};${CMAKE_Java_RUNTIME_FLAGS}"
             -D "TEST_PROGRAM=${resultfile}"
-            -D "TEST_ARGS:STRING=${ARGN}"
+            -D "TEST_ARGS:STRING=${ARGN};${CMD_ARGS}"
             -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${EXAMPLE_VARNAME}_${resultfile}_JAR_FILE}"
             -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}"
             -D "TEST_FOLDER=${PROJECT_BINARY_DIR}"
diff --git a/HDF5Examples/JAVA/H5T/CMakeLists.txt b/HDF5Examples/JAVA/H5T/CMakeLists.txt
index a779a539d8c..0d2cb41aa70 100644
--- a/HDF5Examples/JAVA/H5T/CMakeLists.txt
+++ b/HDF5Examples/JAVA/H5T/CMakeLists.txt
@@ -3,10 +3,9 @@ project (HDF5Examples_JAVA_H5T Java)
 
 set (CMAKE_VERBOSE_MAKEFILE 1)
 
-INCLUDE_DIRECTORIES (
-    ${HDFJAVA_LIB_DIR}
-    ${JAVA_INCLUDE_PATH}
-    ${JAVA_INCLUDE_PATH2}
+set_directory_properties(PROPERTIES
+    INCLUDE_DIRECTORIES 
+        "${HDFJAVA_LIB_DIR};${JAVA_INCLUDE_PATH};${JAVA_INCLUDE_PATH2}"
 )
 
 #-----------------------------------------------------------------------------
@@ -20,6 +19,9 @@ else ()
   set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":")
 endif ()
 
+set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_INCLUDE_DIRS}")
+set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${H5EX_JAVA_LIBRARY}$<$<OR:$<CONFIG:Debug>,$<CONFIG:Developer>>:${CMAKE_DEBUG_POSTFIX}>;")
+
 set (CMAKE_JAVA_CLASSPATH ".")
 foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH})
   set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}")
@@ -32,13 +34,18 @@ endforeach ()
 
 foreach (example ${HDF_JAVA_EXAMPLES})
   get_filename_component (example_name ${example} NAME_WE)
-  file (WRITE ${PROJECT_BINARY_DIR}/Manifest.txt
+  file (WRITE ${PROJECT_BINARY_DIR}/${example_name}_Manifest.txt
   "Main-Class: ${example_name}
 Class-Path: ${HDFJAVA_CLASSJARS}
 "
   )
-  add_jar (${EXAMPLE_VARNAME}_${example_name} SOURCES ${example} MANIFEST ${PROJECT_BINARY_DIR}/Manifest.txt)
+  add_jar (${EXAMPLE_VARNAME}_${example_name}
+      SOURCES ${example}
+      MANIFEST ${PROJECT_BINARY_DIR}/${example_name}_Manifest.txt
+  )
   get_target_property (${EXAMPLE_VARNAME}_${example_name}_JAR_FILE ${EXAMPLE_VARNAME}_${example_name} JAR_FILE)
+  get_target_property (${EXAMPLE_VARNAME}_${example_name}_CLASSPATH ${EXAMPLE_VARNAME}_${example_name} CLASSDIR)
+  add_dependencies (${EXAMPLE_VARNAME}_${example_name} ${H5EX_JAVA_LIBRARIES})
 endforeach ()
 
 if (H5EX_BUILD_TESTING)
@@ -46,9 +53,9 @@ if (H5EX_BUILD_TESTING)
     add_test (
         NAME ${EXAMPLE_VARNAME}_jnative-h5-${resultfile}
         COMMAND "${CMAKE_COMMAND}"
-            -D "TEST_TESTER=${CMAKE_Java_RUNTIME}"
+            -D "TEST_TESTER=${CMAKE_Java_RUNTIME};${CMAKE_Java_RUNTIME_FLAGS}"
             -D "TEST_PROGRAM=${resultfile}"
-            -D "TEST_ARGS:STRING=${ARGN}"
+            -D "TEST_ARGS:STRING=${ARGN};${CMD_ARGS}"
             -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${EXAMPLE_VARNAME}_${resultfile}_JAR_FILE}"
             -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}"
             -D "TEST_FOLDER=${PROJECT_BINARY_DIR}"
diff --git a/HDF5Examples/config/cmake/HDFExampleMacros.cmake b/HDF5Examples/config/cmake/HDFExampleMacros.cmake
index d9bf67fa480..82fd8ac95eb 100644
--- a/HDF5Examples/config/cmake/HDFExampleMacros.cmake
+++ b/HDF5Examples/config/cmake/HDFExampleMacros.cmake
@@ -1,3 +1,15 @@
+#
+# Copyright by The HDF Group.
+# All rights reserved.
+#
+# This file is part of HDF5.  The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the COPYING file, which can be found at the root of the source code
+# distribution tree, or in https://www.hdfgroup.org/licenses.
+# If you do not have access to either file, you may request a copy from
+# help@hdfgroup.org.
+#
+
 #-------------------------------------------------------------------------------
 macro (BASIC_SETTINGS varname)
   string (TOUPPER ${varname} EXAMPLE_PACKAGE_VARNAME)
@@ -17,35 +29,7 @@ macro (BASIC_SETTINGS varname)
   #-----------------------------------------------------------------------------
   # Setup output Directories
   #-----------------------------------------------------------------------------
-  if (NOT ${EXAMPLE_PACKAGE_NAME}_EXTERNALLY_CONFIGURED)
-    set (CMAKE_RUNTIME_OUTPUT_DIRECTORY
-        ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single Directory for all Executables."
-    )
-    set (CMAKE_LIBRARY_OUTPUT_DIRECTORY
-        ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single Directory for all Libraries"
-    )
-    set (CMAKE_ARCHIVE_OUTPUT_DIRECTORY
-        ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single Directory for all static libraries."
-    )
-    set (CMAKE_Fortran_MODULE_DIRECTORY
-        ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single Directory for all fortran modules."
-    )
-    get_property(_isMultiConfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
-    if(_isMultiConfig)
-      set (CMAKE_TEST_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${CMAKE_BUILD_TYPE})
-      set (CMAKE_PDB_OUTPUT_DIRECTORY
-          ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single Directory for all pdb files."
-      )
-    else ()
-      set (CMAKE_TEST_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY})
-    endif ()
-  else ()
-    # if we are externally configured, but the project uses old cmake scripts
-    # this may not be set
-    if (NOT CMAKE_RUNTIME_OUTPUT_DIRECTORY)
-      set (CMAKE_RUNTIME_OUTPUT_DIRECTORY ${EXECUTABLE_OUTPUT_PATH})
-    endif ()
-  endif ()
+  SET_HDF_OUTPUT_DIRS(${EXAMPLE_PACKAGE_NAME})
 
   #-----------------------------------------------------------------------------
   # Option to use Shared/Static libs, default is static
@@ -63,9 +47,13 @@ macro (BASIC_SETTINGS varname)
   set (CMAKE_C_STANDARD 99)
   set (CMAKE_C_STANDARD_REQUIRED TRUE)
 
-  set (CMAKE_CXX_STANDARD 98)
-  set (CMAKE_CXX_STANDARD_REQUIRED TRUE)
-  set (CMAKE_CXX_EXTENSIONS OFF)
+  if (HDF_BUILD_CPP_LIB)
+    ENABLE_LANGUAGE (CXX)
+
+    set (CMAKE_CXX_STANDARD 98)
+    set (CMAKE_CXX_STANDARD_REQUIRED TRUE)
+    set (CMAKE_CXX_EXTENSIONS OFF)
+  endif ()
 
   #-----------------------------------------------------------------------------
   # Compiler specific flags : Shouldn't there be compiler tests for these
@@ -73,7 +61,7 @@ macro (BASIC_SETTINGS varname)
   if (CMAKE_COMPILER_IS_GNUCC)
     set (CMAKE_C_FLAGS "${CMAKE_ANSI_CFLAGS} ${CMAKE_C_FLAGS}")
   endif ()
-  if (CMAKE_COMPILER_IS_GNUCXX)
+  if (CMAKE_CXX_COMPILER_LOADED AND CMAKE_COMPILER_IS_GNUCXX)
     set (CMAKE_CXX_FLAGS "${CMAKE_ANSI_CFLAGS} ${CMAKE_CXX_FLAGS}")
   endif ()
 
@@ -84,7 +72,7 @@ macro (BASIC_SETTINGS varname)
   if (CMAKE_COMPILER_IS_GNUCC)
     set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fmessage-length=0")
   endif ()
-  if (CMAKE_COMPILER_IS_GNUCXX)
+  if (CMAKE_CXX_COMPILER_LOADED AND CMAKE_COMPILER_IS_GNUCXX)
     set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fmessage-length=0")
   endif ()
 
@@ -99,8 +87,10 @@ macro (BASIC_SETTINGS varname)
       set (HDF_WARNINGS_BLOCKED 1)
       string (REGEX REPLACE "(^| )([/-])W[0-9]( |$)" " " CMAKE_C_FLAGS "${CMAKE_C_FLAGS}")
       set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /w")
-      string (REGEX REPLACE "(^| )([/-])W[0-9]( |$)" " " CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
-      set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /w")
+      if (CMAKE_CXX_COMPILER_LOADED AND CMAKE_COMPILER_IS_GNUCXX)
+        string (REGEX REPLACE "(^| )([/-])W[0-9]( |$)" " " CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
+        set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /w")
+      endif ()
     endif ()
     if (WIN32)
       add_definitions (-D_CRT_SECURE_NO_WARNINGS)
@@ -114,7 +104,9 @@ macro (BASIC_SETTINGS varname)
     # Most compilers use -w to suppress warnings.
     if (NOT HDF_WARNINGS_BLOCKED)
       set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -w")
-      set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -w")
+      if (CMAKE_CXX_COMPILER_LOADED AND CMAKE_COMPILER_IS_GNUCXX)
+        set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -w")
+      endif ()
     endif ()
   endif ()
 
@@ -233,7 +225,9 @@ macro (HDF5_SUPPORT)
         if (HDF_BUILD_JAVA)
           if (${HDF5_BUILD_JAVA} AND HDF5_Java_FOUND)
             set (CMAKE_JAVA_INCLUDE_PATH "${CMAKE_JAVA_INCLUDE_PATH};${HDF5_JAVA_INCLUDE_DIRS}")
-            message (STATUS "HDF5 jars:${HDF5_JAVA_INCLUDE_DIRS}}")
+            set (H5EX_JAVA_LIBRARY ${HDF5_JAVA_LIBRARY})
+            set (H5EX_JAVA_LIBRARIES ${HDF5_JAVA_LIBRARY})
+            message (STATUS "HDF5 lib:${H5EX_JAVA_LIBRARY} jars:${HDF5_JAVA_INCLUDE_DIRS}}")
           else ()
             set (HDF_BUILD_JAVA OFF CACHE BOOL "Build Java support" FORCE)
             message (STATUS "HDF5 Java libs not found - disable build of Java examples")
@@ -285,11 +279,6 @@ macro (HDF5_SUPPORT)
   else ()
     set (H5_LIB_TYPE STATIC)
   endif ()
-
-  #-----------------------------------------------------------------------------
-  # Option to build filter examples
-  #-----------------------------------------------------------------------------
-  option (HDF_BUILD_FILTERS "Test filter support" OFF)
 endmacro ()
 
 #-------------------------------------------------------------------------------
diff --git a/HDF5Examples/config/cmake/HDFMacros.cmake b/HDF5Examples/config/cmake/HDFMacros.cmake
index 9ce592df96e..66a25aab83f 100644
--- a/HDF5Examples/config/cmake/HDFMacros.cmake
+++ b/HDF5Examples/config/cmake/HDFMacros.cmake
@@ -1,3 +1,50 @@
+#
+# Copyright by The HDF Group.
+# All rights reserved.
+#
+# This file is part of HDF5.  The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the COPYING file, which can be found at the root of the source code
+# distribution tree, or in https://www.hdfgroup.org/licenses.
+# If you do not have access to either file, you may request a copy from
+# help@hdfgroup.org.
+#
+
+#-------------------------------------------------------------------------------
+# Setup output Directories
+#-----------------------------------------------------------------------------
+macro (SET_HDF_OUTPUT_DIRS package_prefix)
+  if (NOT ${package_prefix}_EXTERNALLY_CONFIGURED)
+    set (CMAKE_RUNTIME_OUTPUT_DIRECTORY
+        ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single Directory for all Executables."
+    )
+    set (CMAKE_LIBRARY_OUTPUT_DIRECTORY
+        ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single Directory for all Libraries"
+    )
+    set (CMAKE_ARCHIVE_OUTPUT_DIRECTORY
+        ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single Directory for all static libraries."
+    )
+    set (CMAKE_Fortran_MODULE_DIRECTORY
+        ${PROJECT_BINARY_DIR}/mod CACHE PATH "Single Directory for all fortran modules."
+    )
+    get_property(_isMultiConfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
+    if(_isMultiConfig)
+      set (CMAKE_TEST_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${HDF_CFG_NAME})
+      set (CMAKE_PDB_OUTPUT_DIRECTORY
+          ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single Directory for all pdb files."
+      )
+    else ()
+      set (CMAKE_TEST_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY})
+    endif ()
+  else ()
+    # if we are externally configured, but the project uses old cmake scripts
+    # this may not be set and utilities like H5detect will fail
+    if (NOT CMAKE_RUNTIME_OUTPUT_DIRECTORY)
+      set (CMAKE_RUNTIME_OUTPUT_DIRECTORY ${EXECUTABLE_OUTPUT_PATH})
+    endif ()
+  endif ()
+endmacro ()
+
 #-------------------------------------------------------------------------------
 macro (SET_HDF_BUILD_TYPE)
   get_property (_isMultiConfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
@@ -157,38 +204,7 @@ macro (HDF_DIR_PATHS package_prefix)
 
   SET_HDF_BUILD_TYPE()
 
-#-----------------------------------------------------------------------------
-# Setup output Directories
-#-----------------------------------------------------------------------------
-  if (NOT ${package_prefix}_EXTERNALLY_CONFIGURED)
-    set (CMAKE_RUNTIME_OUTPUT_DIRECTORY
-        ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single Directory for all Executables."
-    )
-    set (CMAKE_LIBRARY_OUTPUT_DIRECTORY
-        ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single Directory for all Libraries"
-    )
-    set (CMAKE_ARCHIVE_OUTPUT_DIRECTORY
-        ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single Directory for all static libraries."
-    )
-    set (CMAKE_Fortran_MODULE_DIRECTORY
-        ${PROJECT_BINARY_DIR}/mod CACHE PATH "Single Directory for all fortran modules."
-    )
-    get_property(_isMultiConfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
-    if(_isMultiConfig)
-      set (CMAKE_TEST_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${HDF_CFG_NAME})
-      set (CMAKE_PDB_OUTPUT_DIRECTORY
-          ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single Directory for all pdb files."
-      )
-    else ()
-      set (CMAKE_TEST_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY})
-    endif ()
-  else ()
-    # if we are externally configured, but the project uses old cmake scripts
-    # this may not be set and utilities like H5detect will fail
-    if (NOT CMAKE_RUNTIME_OUTPUT_DIRECTORY)
-      set (CMAKE_RUNTIME_OUTPUT_DIRECTORY ${EXECUTABLE_OUTPUT_PATH})
-    endif ()
-  endif ()
+  SET_HDF_OUTPUT_DIRS(${package_prefix})
 
   include (FetchContent)
 endmacro ()
diff --git a/HDF5Examples/config/cmake/UseJava.cmake b/HDF5Examples/config/cmake/UseJava.cmake
index 2783cb638e4..b4af653740a 100644
--- a/HDF5Examples/config/cmake/UseJava.cmake
+++ b/HDF5Examples/config/cmake/UseJava.cmake
@@ -6,7 +6,7 @@ UseJava
 
 This file provides support for ``Java``.  It is assumed that
 :module:`FindJava` has already been loaded.  See :module:`FindJava` for
-information on how to load Java into your ``CMake`` project.
+information on how to load Java into your CMake project.
 
 Synopsis
 ^^^^^^^^
@@ -42,6 +42,7 @@ Creating And Installing JARs
 
     add_jar(<target_name>
             [SOURCES] <source1> [<source2>...] [<resource1>...]
+            [RESOURCES NAMESPACE <ns1> <resource1>... [NAMESPACE <nsX> <resourceX>...]... ]
             [INCLUDE_JARS <jar1> [<jar2>...]]
             [ENTRY_POINT <entry>]
             [VERSION <version>]
@@ -64,6 +65,34 @@ Creating And Installing JARs
     .. versionadded:: 3.4
       Support for response files, prefixed by ``@``.
 
+  ``RESOURCES``
+    .. versionadded:: 3.21
+
+    Adds the named ``<resource>`` files to the jar by stripping the source file
+    path and placing the file beneath ``<ns>`` within the jar.
+
+    For example::
+
+      RESOURCES NAMESPACE "/com/my/namespace" "a/path/to/resource.txt"
+
+    results in a resource accessible via ``/com/my/namespace/resource.txt``
+    within the jar.
+
+    Resources may be added without adjusting the namespace by adding them to
+    the list of ``SOURCES`` (original behavior), in this case, resource
+    paths must be relative to ``CMAKE_CURRENT_SOURCE_DIR``.  Adding resources
+    without using the ``RESOURCES`` parameter in out of source builds will
+    almost certainly result in confusion.
+
+    .. note::
+
+      Adding resources via the ``SOURCES`` parameter relies upon a hard-coded
+      list of file extensions which are tested to determine whether they
+      compile (e.g. File.java). ``SOURCES`` files which match the extensions
+      are compiled. Files which do not match are treated as resources. To
+      include uncompiled resources matching those file extensions use
+      the ``RESOURCES`` parameter.
+
   ``INCLUDE_JARS``
     The list of jars are added to the classpath when compiling the java sources
     and also to the dependencies of the target. ``INCLUDE_JARS`` also accepts
@@ -264,7 +293,7 @@ Header Generation
 
   .. deprecated:: 3.11
     This command will no longer be supported starting with version 10 of the JDK
-    due to the `suppression of javah tool <http://openjdk.java.net/jeps/313>`_.
+    due to the `suppression of javah tool <https://openjdk.org/jeps/313>`_.
     The :ref:`add_jar(GENERATE_NATIVE_HEADERS) <add_jar>` command should be
     used instead.
 
@@ -526,6 +555,7 @@ function (__java_copy_file src dest comment)
                 ${dest}
         DEPENDS ${src}
         COMMENT ${comment}
+        VERBATIM
         )
 endfunction ()
 
@@ -552,6 +582,58 @@ function(__java_export_jar VAR TARGET PATH)
     set(${VAR} "${${VAR}}" PARENT_SCOPE)
 endfunction()
 
+function(__java_copy_resource_namespaces VAR DEST JAVA_RESOURCE_FILES JAVA_RESOURCE_FILES_RELATIVE)
+
+    set(_ns_ID "")
+    set(_ns_VAL "")
+
+    foreach(_item IN LISTS VAR)
+        if(NOT _ns_ID)
+            if(NOT _item STREQUAL "NAMESPACE")
+                message(FATAL_ERROR "UseJava: Expecting \"NAMESPACE\", got\t\"${_item}\"")
+                return()
+            endif()
+        endif()
+
+        if(_item STREQUAL "NAMESPACE")
+            set(_ns_VAL "")               # Prepare for next namespace
+            set(_ns_ID "${_item}")
+            continue()
+        endif()
+
+        if( NOT _ns_VAL)
+            # we're expecting the next token to be a namespace value
+            # whatever it is, we're treating it like a namespace
+            set(_ns_VAL "${_item}")
+            continue()
+        endif()
+
+        if(_ns_ID AND _ns_VAL)
+            # We're expecting a file name, check to see if we got one
+            cmake_path(ABSOLUTE_PATH _item OUTPUT_VARIABLE _test_file_name)
+            if (NOT EXISTS "${_test_file_name}")
+                message(FATAL_ERROR "UseJava: File does not exist:\t${_item}")
+                return()
+            endif()
+        endif()
+
+        cmake_path(ABSOLUTE_PATH _item OUTPUT_VARIABLE _abs_file_name)
+        cmake_path(GET _item FILENAME _resource_file_name)
+        set(_dest_resource_file_name "${_ns_VAL}/${_resource_file_name}" )
+
+        __java_copy_file( ${_abs_file_name}
+                          ${DEST}/${_dest_resource_file_name}
+                          "Copying ${_item} to the build directory")
+
+        list(APPEND RESOURCE_FILES_LIST           ${DEST}/${_dest_resource_file_name})
+        list(APPEND RELATIVE_RESOURCE_FILES_LIST  ${_dest_resource_file_name})
+
+    endforeach()
+
+    set(${JAVA_RESOURCE_FILES} "${RESOURCE_FILES_LIST}" PARENT_SCOPE)
+    set(${JAVA_RESOURCE_FILES_RELATIVE} "${RELATIVE_RESOURCE_FILES_LIST}" PARENT_SCOPE)
+endfunction()
+
 # define helper scripts
 set(_JAVA_EXPORT_TARGETS_SCRIPT ${CMAKE_CURRENT_LIST_DIR}/javaTargets.cmake.in)
 set(_JAVA_CLASS_FILELIST_SCRIPT ${CMAKE_CURRENT_LIST_DIR}/UseJavaClassFilelist.cmake)
@@ -746,6 +828,13 @@ function(add_jar _TARGET_NAME)
         endif ()
     endforeach()
 
+    if(_add_jar_RESOURCES)         # Process RESOURCES if it exists
+        __java_copy_resource_namespaces("${_add_jar_RESOURCES}"
+                                        ${CMAKE_JAVA_CLASS_OUTPUT_PATH}
+                                        _JAVA_RESOURCE_FILES
+                                        _JAVA_RESOURCE_FILES_RELATIVE)
+    endif()
+
     foreach(_JAVA_INCLUDE_JAR IN LISTS _add_jar_INCLUDE_JARS)
         if (TARGET ${_JAVA_INCLUDE_JAR})
             get_target_property(_JAVA_JAR_PATH ${_JAVA_INCLUDE_JAR} JAR_FILE)
@@ -795,6 +884,7 @@ function(add_jar _TARGET_NAME)
             DEPENDS ${_JAVA_COMPILE_FILES} ${_JAVA_COMPILE_FILELISTS} ${_JAVA_COMPILE_DEPENDS}
             WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
             COMMENT "Building Java objects for ${_TARGET_NAME}.jar"
+            VERBATIM
         )
         add_custom_command(
             OUTPUT ${CMAKE_JAVA_CLASS_OUTPUT_PATH}/java_class_filelist
@@ -804,6 +894,7 @@ function(add_jar _TARGET_NAME)
                 -P ${_JAVA_CLASS_FILELIST_SCRIPT}
             DEPENDS ${CMAKE_JAVA_CLASS_OUTPUT_PATH}/java_compiled_${_TARGET_NAME}
             WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
+            VERBATIM
         )
     else ()
         # create an empty java_class_filelist
@@ -834,6 +925,7 @@ function(add_jar _TARGET_NAME)
             DEPENDS ${_JAVA_RESOURCE_FILES} ${_JAVA_DEPENDS} ${CMAKE_JAVA_CLASS_OUTPUT_PATH}/java_class_filelist
             WORKING_DIRECTORY ${CMAKE_JAVA_CLASS_OUTPUT_PATH}
             COMMENT "Creating Java archive ${_JAVA_TARGET_OUTPUT_NAME}"
+            VERBATIM
         )
     else ()
         add_custom_command(
@@ -849,6 +941,7 @@ function(add_jar _TARGET_NAME)
             WORKING_DIRECTORY ${CMAKE_JAVA_CLASS_OUTPUT_PATH}
             DEPENDS ${_JAVA_RESOURCE_FILES} ${_JAVA_DEPENDS} ${CMAKE_JAVA_CLASS_OUTPUT_PATH}/java_class_filelist
             COMMENT "Creating Java archive ${_JAVA_TARGET_OUTPUT_NAME}"
+            VERBATIM
         )
     endif ()
 
diff --git a/HDF5Examples/config/cmake/jrunTest.cmake b/HDF5Examples/config/cmake/jrunTest.cmake
index 2d91ee2b1c1..d7d83d4f709 100644
--- a/HDF5Examples/config/cmake/jrunTest.cmake
+++ b/HDF5Examples/config/cmake/jrunTest.cmake
@@ -1,3 +1,14 @@
+#
+# Copyright by The HDF Group.
+# All rights reserved.
+#
+# This file is part of HDF5.  The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the COPYING file, which can be found at the root of the source code
+# distribution tree, or in https://www.hdfgroup.org/licenses.
+# If you do not have access to either file, you may request a copy from
+# help@hdfgroup.org.
+#
 # jrunTest.cmake executes a command and captures the output in a file. File is then compared
 # against a reference file. Exit status of command can also be compared.
 cmake_policy(SET CMP0007 NEW)
@@ -36,7 +47,12 @@ else ()
   set (LOG_LEVEL "${TEST_LOG_LEVEL}")
 endif ()
 
-message (STATUS "COMMAND: ${TEST_TESTER} -Xmx1024M -Djava.library.path=\"${TEST_LIBRARY_DIRECTORY}\" -cp \"${TEST_CLASSPATH}\" ${TEST_ARGS} ${TEST_PROGRAM} ${ARGN}")
+if (NOT TEST_VOL)
+  message (STATUS "COMMAND: ${TEST_TESTER} -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=${LOG_LEVEL} -Djava.library.path=\"${TEST_LIBRARY_DIRECTORY}\" -cp \"${TEST_CLASSPATH}\" ${TEST_ARGS} ${TEST_PROGRAM} ${ARGN}")
+else ()
+  message (STATUS "USING ${TEST_VOL} ON COMMAND: ${TEST_TESTER} -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=${LOG_LEVEL} -Djava.library.path=\"${TEST_LIBRARY_DIRECTORY}\" -cp \"${TEST_CLASSPATH}\" ${TEST_ARGS} ${TEST_PROGRAM} ${ARGN}")
+  set (ENV{HDF5_VOL_CONNECTOR} "${TEST_VOL}")
+endif ()
 
 if (WIN32)
   set (ENV{PATH} "$ENV{PATH}\\;${TEST_LIBRARY_DIRECTORY}")
diff --git a/config/cmake/HDF5ExampleCache.cmake b/config/cmake/HDF5ExampleCache.cmake
index 6882d255770..043be1a992f 100644
--- a/config/cmake/HDF5ExampleCache.cmake
+++ b/config/cmake/HDF5ExampleCache.cmake
@@ -1,26 +1,55 @@
-# CMake cache file for external HDF5 filter plugins
+# CMake cache file for examples
 
 #########################
 # EXTERNAL cache entries
 #########################
 
-# examples are the tests for plugins
-set (H5EX_BUILD_TESTING ON CACHE BOOL "Enable H5PL testing" FORCE)
-set (H5EX_BUILD_EXAMPLES ${HDF5_BUILD_EXAMPLES} CACHE BOOL "Build H5PL Examples" FORCE)
+# set example options to match build options
+set (H5EX_BUILD_TESTING ${BUILD_TESTING} CACHE BOOL "Enable examples testing" FORCE)
+set (H5EX_BUILD_EXAMPLES ${HDF5_BUILD_EXAMPLES} CACHE BOOL "Build Examples" FORCE)
+set (HDF_BUILD_FORTRAN ${HDF5_BUILD_FORTRAN} CACHE BOOL "Build examples FORTRAN support" FORCE)
+set (HDF_BUILD_JAVA ${HDF5_BUILD_JAVA} CACHE BOOL "Build examples JAVA support" FORCE)
+set (HDF_BUILD_FILTERS ${HDF5_ENABLE_PLUGIN_SUPPORT} CACHE BOOL "Build examples PLUGIN filter support" FORCE)
+set (HDF_BUILD_CPP_LIB ${HDF5_BUILD_CPP_LIB} CACHE BOOL "Build HDF5 C++ Library" FORCE)
+set (HDF_BUILD_HL_LIB ${HDF5_BUILD_HL_LIB} CACHE BOOL "Build HIGH Level examples" FORCE)
+set (HDF_ENABLE_THREADSAFE ${HDF5_ENABLE_THREADSAFE} CACHE BOOL "Enable examples thread-safety" FORCE)
+set (HDF_ENABLE_PARALLEL ${HDF5_ENABLE_PARALLEL} CACHE BOOL "Enable examples parallel build (requires MPI)" FORCE)
+set (H5EX_USE_GNU_DIRS ${HDF5_USE_GNU_DIRS} CACHE BOOL "TRUE to use GNU Coding Standard install directory variables, FALSE to use historical settings" FORCE)
 
 #preset HDF5 cache vars to this projects libraries instead of searching
 set (H5EX_HDF5_HEADER "H5pubconf.h" CACHE STRING "Name of HDF5 header" FORCE)
 #set (H5EX_HDF5_INCLUDE_DIRS $<TARGET_PROPERTY:${HDF5_LIBSH_TARGET},INCLUDE_DIRECTORIES> CACHE PATH "HDF5 include dirs" FORCE)
 set (H5EX_HDF5_INCLUDE_DIRS "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR}" CACHE PATH "HDF5 include dirs" FORCE)
 set (H5EX_HDF5_DIR ${CMAKE_CURRENT_BINARY_DIR} CACHE STRING "HDF5 build folder" FORCE)
+set (EXAMPLES_EXTERNALLY_CONFIGURED ON CACHE BOOL "Examples build is used in another project" FORCE)
 
 if (NOT BUILD_SHARED_LIBS AND BUILD_STATIC_LIBS)
   set (USE_SHARED_LIBS OFF CACHE BOOL "Use Shared Libraries for Examples" FORCE)
   set (H5EX_HDF5_LINK_LIBS ${HDF5_LIB_TARGET} CACHE STRING "HDF5 target" FORCE)
+  if (HDF5_BUILD_FORTRAN)
+    set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_F90_LIB_TARGET})
+    set (HDF_MOD_EXT "/static" CACHE STRING "Use Static Modules for Examples" FORCE)
+  endif ()
 else ()
   set (USE_SHARED_LIBS ON CACHE BOOL "Use Shared Libraries for Examples" FORCE)
   set (H5EX_HDF5_LINK_LIBS ${HDF5_LIBSH_TARGET} CACHE STRING "HDF5 target" FORCE)
+  if (HDF5_BUILD_FORTRAN)
+    set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_F90_LIBSH_TARGET})
+    set (HDF_MOD_EXT "/shared" CACHE STRING "Use Shared Modules for Examples" FORCE)
+  endif ()
+  if (HDF5_BUILD_JAVA)
+    set (HDF5_JAVA_INCLUDE_DIRS ${HDF5_JAVA_JARS} ${HDF5_JAVA_LOGGING_JAR})
+    set (H5EX_JAVA_LIBRARY ${HDF5_JAVA_JNI_LIB_TARGET})
+    set (H5EX_JAVA_LIBRARIES ${HDF5_JAVA_HDF5_LIB_TARGET} ${HDF5_JAVA_JNI_LIB_TARGET})
+    set (HDF5_LIBRARY_PATH ${CMAKE_TEST_OUTPUT_DIRECTORY})
+    message (STATUS "HDF5 Example java lib: ${H5EX_JAVA_LIBRARY} jars: ${HDF5_JAVA_INCLUDE_DIRS}")
+  endif ()
+  if (HDF5_ENABLE_PLUGIN_SUPPORT)
+    set (H5EX_HDF5_PLUGIN_PATH "${CMAKE_BINARY_DIR}/plugins")
+  endif ()
 endif ()
+message (STATUS "HDF5 Example link libs: ${H5EX_HDF5_LINK_LIBS} Includes: ${H5EX_HDF5_INCLUDE_DIRS}")
 
+set (HDF5_TOOLS_DIR ${CMAKE_TEST_OUTPUT_DIRECTORY} CACHE STRING "HDF5 Directory for all Executables" FORCE)
 set (H5EX_HDF5_DUMP_EXECUTABLE $<TARGET_FILE:h5dump${tgt_file_ext}> CACHE STRING "HDF5 h5dump target" FORCE)
 set (H5EX_HDF5_REPACK_EXECUTABLE $<TARGET_FILE:h5repack${tgt_file_ext}> CACHE STRING "HDF5 h5repack target" FORCE)
diff --git a/config/cmake/HDFMacros.cmake b/config/cmake/HDFMacros.cmake
index f0b6f03789a..369df96e944 100644
--- a/config/cmake/HDFMacros.cmake
+++ b/config/cmake/HDFMacros.cmake
@@ -10,6 +10,55 @@
 # help@hdfgroup.org.
 #
 
+#-------------------------------------------------------------------------------
+# Setup output Directories
+#-----------------------------------------------------------------------------
+macro (SET_HDF_OUTPUT_DIRS package_prefix)
+  if (NOT ${package_prefix}_EXTERNALLY_CONFIGURED)
+    set (CMAKE_RUNTIME_OUTPUT_DIRECTORY
+        ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single Directory for all Executables."
+    )
+    set (CMAKE_LIBRARY_OUTPUT_DIRECTORY
+        ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single Directory for all Libraries"
+    )
+    set (CMAKE_ARCHIVE_OUTPUT_DIRECTORY
+        ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single Directory for all static libraries."
+    )
+    set (CMAKE_Fortran_MODULE_DIRECTORY
+        ${PROJECT_BINARY_DIR}/mod CACHE PATH "Single Directory for all fortran modules."
+    )
+    get_property(_isMultiConfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
+    if(_isMultiConfig)
+      set (CMAKE_TEST_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${HDF_CFG_NAME})
+      set (CMAKE_PDB_OUTPUT_DIRECTORY
+          ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single Directory for all pdb files."
+      )
+    else ()
+      set (CMAKE_TEST_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY})
+    endif ()
+  else ()
+    # if we are externally configured, but the project uses old cmake scripts
+    # this may not be set and utilities like H5detect will fail
+    if (NOT CMAKE_RUNTIME_OUTPUT_DIRECTORY)
+      set (CMAKE_RUNTIME_OUTPUT_DIRECTORY ${EXECUTABLE_OUTPUT_PATH})
+    endif ()
+  endif ()
+
+  if (NOT ${package_prefix}_EXTERNALLY_CONFIGURED AND CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT)
+    if (CMAKE_HOST_UNIX)
+      set (CMAKE_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}/HDF_Group/${HDF5_PACKAGE_NAME}/${HDF5_PACKAGE_VERSION}"
+        CACHE PATH "Install path prefix, prepended onto install directories." FORCE)
+    else ()
+      GetDefaultWindowsPrefixBase(CMAKE_GENERIC_PROGRAM_FILES)
+      set (CMAKE_INSTALL_PREFIX
+        "${CMAKE_GENERIC_PROGRAM_FILES}/HDF_Group/${HDF5_PACKAGE_NAME}/${HDF5_PACKAGE_VERSION}"
+        CACHE PATH "Install path prefix, prepended onto install directories." FORCE)
+      set (CMAKE_GENERIC_PROGRAM_FILES)
+    endif ()
+    set (CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT 0 CACHE PATH "" FORCE)
+  endif ()
+endmacro ()
+
 #-------------------------------------------------------------------------------
 macro (SET_HDF_BUILD_TYPE)
   get_property (_isMultiConfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
@@ -433,53 +482,10 @@ macro (HDF_DIR_PATHS package_prefix)
   endif ()
 
   SET_HDF_BUILD_TYPE()
-
 #-----------------------------------------------------------------------------
 # Setup output Directories
 #-----------------------------------------------------------------------------
-  if (NOT ${package_prefix}_EXTERNALLY_CONFIGURED)
-    set (CMAKE_RUNTIME_OUTPUT_DIRECTORY
-        ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single Directory for all Executables."
-    )
-    set (CMAKE_LIBRARY_OUTPUT_DIRECTORY
-        ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single Directory for all Libraries"
-    )
-    set (CMAKE_ARCHIVE_OUTPUT_DIRECTORY
-        ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single Directory for all static libraries."
-    )
-    set (CMAKE_Fortran_MODULE_DIRECTORY
-        ${PROJECT_BINARY_DIR}/mod CACHE PATH "Single Directory for all fortran modules."
-    )
-    get_property(_isMultiConfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
-    if(_isMultiConfig)
-      set (CMAKE_TEST_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${HDF_CFG_NAME})
-      set (CMAKE_PDB_OUTPUT_DIRECTORY
-          ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single Directory for all pdb files."
-      )
-    else ()
-      set (CMAKE_TEST_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY})
-    endif ()
-  else ()
-    # if we are externally configured, but the project uses old cmake scripts
-    # this may not be set and utilities like H5detect will fail
-    if (NOT CMAKE_RUNTIME_OUTPUT_DIRECTORY)
-      set (CMAKE_RUNTIME_OUTPUT_DIRECTORY ${EXECUTABLE_OUTPUT_PATH})
-    endif ()
-  endif ()
-
-  if (NOT ${package_prefix}_EXTERNALLY_CONFIGURED AND CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT)
-    if (CMAKE_HOST_UNIX)
-      set (CMAKE_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}/HDF_Group/${HDF5_PACKAGE_NAME}/${HDF5_PACKAGE_VERSION}"
-        CACHE PATH "Install path prefix, prepended onto install directories." FORCE)
-    else ()
-      GetDefaultWindowsPrefixBase(CMAKE_GENERIC_PROGRAM_FILES)
-      set (CMAKE_INSTALL_PREFIX
-        "${CMAKE_GENERIC_PROGRAM_FILES}/HDF_Group/${HDF5_PACKAGE_NAME}/${HDF5_PACKAGE_VERSION}"
-        CACHE PATH "Install path prefix, prepended onto install directories." FORCE)
-      set (CMAKE_GENERIC_PROGRAM_FILES)
-    endif ()
-    set (CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT 0 CACHE PATH "" FORCE)
-  endif ()
+  SET_HDF_OUTPUT_DIRS(${package_prefix})
 
   include (FetchContent)
 endmacro ()
diff --git a/config/cmake/hdf5-config.cmake.in b/config/cmake/hdf5-config.cmake.in
index c20e18f54a4..186ae670c77 100644
--- a/config/cmake/hdf5-config.cmake.in
+++ b/config/cmake/hdf5-config.cmake.in
@@ -92,8 +92,6 @@ if (${HDF5_PACKAGE_NAME}_BUILD_JAVA)
       @PACKAGE_CURRENT_BUILD_DIR@/lib/slf4j-api-2.0.6.jar
       @PACKAGE_CURRENT_BUILD_DIR@/lib/slf4j-nop-2.0.6.jar
   )
-  set (${HDF5_PACKAGE_NAME}_JAVA_LIBRARY "@PACKAGE_CURRENT_BUILD_DIR@/lib")
-  set (${HDF5_PACKAGE_NAME}_JAVA_LIBRARIES "${${HDF5_PACKAGE_NAME}_JAVA_LIBRARY}")
 endif ()
 
 #-----------------------------------------------------------------------------
diff --git a/configure.ac b/configure.ac
index eace890ffae..689ab3d2a42 100644
--- a/configure.ac
+++ b/configure.ac
@@ -4205,15 +4205,6 @@ AC_CONFIG_FILES([Makefile
                  java/src/jni/Makefile
                  java/test/Makefile
                  java/test/junit.sh
-                 java/examples/Makefile
-                 java/examples/intro/Makefile
-                 java/examples/intro/JavaIntroExample.sh
-                 java/examples/datasets/Makefile
-                 java/examples/datasets/JavaDatasetExample.sh
-                 java/examples/datatypes/Makefile
-                 java/examples/datatypes/JavaDatatypeExample.sh
-                 java/examples/groups/Makefile
-                 java/examples/groups/JavaGroupExample.sh
                  hl/Makefile
                  hl/src/Makefile
                  hl/test/Makefile
@@ -4243,13 +4234,6 @@ if test -n "$TESTPARALLEL"; then
   fi
 fi
 
-AC_CONFIG_COMMANDS([.classes], [], [$MKDIR_P java/src/.classes;
-                $MKDIR_P java/test/.classes;
-                $MKDIR_P java/examples/intro/.classes;
-                $MKDIR_P java/examples/datasets/.classes;
-                $MKDIR_P java/examples/datatypes/.classes;
-                $MKDIR_P java/examples/groups/.classes])
-
 AC_OUTPUT
 
 chmod 755 bin/h5cc
diff --git a/doxygen/dox/LearnBasics3.dox b/doxygen/dox/LearnBasics3.dox
index a91368b00fc..eb3dc7c1270 100644
--- a/doxygen/dox/LearnBasics3.dox
+++ b/doxygen/dox/LearnBasics3.dox
@@ -715,7 +715,7 @@ When the library is built, the following compile scripts are included:
 \li h5fc:  compile script for HDF5 F90 programs
 \li h5c++: compile script for HDF5 C++ programs
 
-These scripts are easilye used to compile single file applications, such as those included in the tutorial.
+These scripts are easily used to compile single file applications, such as those included in the tutorial.
 <table>
 <tr>
 <th><strong>Warning</strong>
diff --git a/hl/tools/h5watch/CMakeLists.txt b/hl/tools/h5watch/CMakeLists.txt
index 1eb361898b3..890ac37f5fb 100644
--- a/hl/tools/h5watch/CMakeLists.txt
+++ b/hl/tools/h5watch/CMakeLists.txt
@@ -12,7 +12,7 @@ set (H5WATCH_SOURCES
 if (BUILD_STATIC_LIBS)
   add_executable (h5watch ${H5WATCH_SOURCES})
   target_compile_options(h5watch PRIVATE "${HDF5_CMAKE_C_FLAGS}")
-  target_include_directories (h5watch PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5watch PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   TARGET_C_PROPERTIES (h5watch STATIC)
   target_link_libraries (h5watch PRIVATE ${HDF5_HL_LIB_TARGET} ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET})
   set_target_properties (h5watch PROPERTIES FOLDER tools/hl)
@@ -21,7 +21,7 @@ endif ()
 if (BUILD_SHARED_LIBS)
   add_executable (h5watch-shared ${H5WATCH_SOURCES})
   target_compile_options(h5watch-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}")
-  target_include_directories (h5watch-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5watch-shared PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   TARGET_C_PROPERTIES (h5watch-shared SHARED)
   target_link_libraries (h5watch-shared PRIVATE ${HDF5_HL_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} ${HDF5_TOOLS_LIBSH_TARGET})
   set_target_properties (h5watch-shared PROPERTIES FOLDER tools/hl)
diff --git a/java/Makefile.am b/java/Makefile.am
index bb8b4262930..b21f17835be 100644
--- a/java/Makefile.am
+++ b/java/Makefile.am
@@ -24,10 +24,8 @@ include $(top_srcdir)/config/commence.am
 
 if BUILD_TESTS_CONDITIONAL
    TESTSERIAL_DIR =test
-   TESTEXAMPLES_DIR =examples
 else
    TESTSERIAL_DIR=
-   TESTEXAMPLES_DIR=
 endif
 
 ## Only recurse into subdirectories if the Java (JNI) interface is enabled.
@@ -36,7 +34,7 @@ if BUILD_JAVA_CONDITIONAL
 # Mark this directory as part of the JNI API
 JAVA_API=yes
 
-SUBDIRS=src $(TESTSERIAL_DIR) $(TESTEXAMPLES_DIR)
+SUBDIRS=src $(TESTSERIAL_DIR)
 
 endif
 
diff --git a/java/examples/CMakeLists.txt b/java/examples/CMakeLists.txt
deleted file mode 100644
index f9d0310c073..00000000000
--- a/java/examples/CMakeLists.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-cmake_minimum_required (VERSION 3.18)
-project (HDFJAVA_EXAMPLES Java)
-
-add_subdirectory (datasets)
-add_subdirectory (datatypes)
-add_subdirectory (groups)
-add_subdirectory (intro)
diff --git a/java/examples/Makefile.am b/java/examples/Makefile.am
deleted file mode 100644
index eb7b7f5862d..00000000000
--- a/java/examples/Makefile.am
+++ /dev/null
@@ -1,28 +0,0 @@
-#
-# Copyright by The HDF Group.
-# All rights reserved.
-#
-# This file is part of HDF5.  The full HDF5 copyright notice, including
-# terms governing use, modification, and redistribution, is contained in
-# the COPYING file, which can be found at the root of the source code
-# distribution tree, or in https://www.hdfgroup.org/licenses.
-# If you do not have access to either file, you may request a copy from
-# help@hdfgroup.org.
-#
-#
-# This makefile mostly just reinvokes make in the various subdirectories
-# but does so in the correct order.  You can alternatively invoke make from
-# each subdirectory manually.
-##
-## Makefile.am
-## Run automake to generate a Makefile.in from this file.
-##
-#
-# HDF5 Java Library Examples Makefile(.in)
-
-include $(top_srcdir)/config/commence.am
-
-## Only recurse into subdirectories if the Java (JNI) interface is enabled.
-   SUBDIRS=intro groups datasets datatypes
-
-include $(top_srcdir)/config/conclude.am
diff --git a/java/examples/datasets/CMakeLists.txt b/java/examples/datasets/CMakeLists.txt
deleted file mode 100644
index a9a5643d652..00000000000
--- a/java/examples/datasets/CMakeLists.txt
+++ /dev/null
@@ -1,150 +0,0 @@
-cmake_minimum_required (VERSION 3.18)
-project (HDFJAVA_EXAMPLES_DATASETS Java)
-
-set (CMAKE_VERBOSE_MAKEFILE 1)
-
-set (HDF_JAVA_EXAMPLES
-    H5Ex_D_Alloc
-    H5Ex_D_Checksum
-    H5Ex_D_Chunk
-    H5Ex_D_Compact
-    H5Ex_D_External
-    H5Ex_D_FillValue
-    H5Ex_D_Hyperslab
-    H5Ex_D_ReadWrite
-    H5Ex_D_UnlimitedAdd
-    H5Ex_D_UnlimitedMod
-    H5Ex_D_Nbit
-    H5Ex_D_Transform
-    H5Ex_D_Sofloat
-    H5Ex_D_Soint
-)
-
-set (HDF_JAVA_ZLIB_EXAMPLES
-    H5Ex_D_Gzip
-    H5Ex_D_Shuffle
-    H5Ex_D_UnlimitedGzip
-)
-
-set (HDF_JAVA_SZIP_EXAMPLES
-    H5Ex_D_Szip
-)
-
-# detect whether the encoder is present.
-  if (H5_HAVE_FILTER_DEFLATE)
-    set (${HDF_JAVA_EXAMPLES} ${HDF_JAVA_EXAMPLES} ${HDF_JAVA_ZLIB_EXAMPLES})
-  endif ()
-
-  if (H5_HAVE_FILTER_SZIP)
-    set (${HDF_JAVA_EXAMPLES} ${HDF_JAVA_EXAMPLES} ${HDF_JAVA_SZIP_EXAMPLES})
-  endif (H5_HAVE_FILTER_SZIP)
-
-if (WIN32)
-  set (CMAKE_JAVA_INCLUDE_FLAG_SEP ";")
-else ()
-  set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":")
-endif ()
-
-set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS}")
-
-set (CMAKE_JAVA_CLASSPATH ".")
-foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH})
-  set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}")
-endforeach ()
-
-foreach (example ${HDF_JAVA_EXAMPLES})
-  file (WRITE ${PROJECT_BINARY_DIR}/${example}_Manifest.txt
-  "Main-Class: examples.datasets.${example}
-"
-  )
-  add_jar (${example} MANIFEST ${PROJECT_BINARY_DIR}/${example}_Manifest.txt ${example}.java)
-  get_target_property (${example}_JAR_FILE ${example} JAR_FILE)
-#  install_jar (${example} ${HJAVA_INSTALL_DATA_DIR}/examples examples)
-  get_target_property (${example}_CLASSPATH ${example} CLASSDIR)
-  add_dependencies (${example} ${HDF5_JAVA_HDF5_LIB_TARGET})
-
-  #-----------------------------------------------------------------------------
-  # Add Target to clang-format
-  #-----------------------------------------------------------------------------
-  if (HDF5_ENABLE_FORMATTERS)
-    clang_format (HDF5_JAVA_${example}_SRC_FORMAT ${example}.java)
-  endif ()
-endforeach ()
-
-set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS};${HDF5_JAVA_LOGGING_JAR};${HDF5_JAVA_LOGGING_NOP_JAR}")
-
-set (CMAKE_JAVA_CLASSPATH ".")
-foreach (HDFJAVA_JAR ${CMAKE_JAVA_INCLUDE_PATH})
-  set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${HDFJAVA_JAR}")
-endforeach ()
-
-if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL)
-  get_property (target_name TARGET ${HDF5_JAVA_JNI_LIB_TARGET} PROPERTY OUTPUT_NAME)
-  set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$<OR:$<CONFIG:Debug>,$<CONFIG:Developer>>:${CMAKE_DEBUG_POSTFIX}>;")
-
-  set (last_test "")
-  foreach (example ${HDF_JAVA_EXAMPLES})
-    if (example STREQUAL "H5Ex_D_External")
-      add_test (
-          NAME JAVA_datasets-${example}-clear-objects
-          COMMAND ${CMAKE_COMMAND} -E remove
-              ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5
-              ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.data
-      )
-    else ()
-      add_test (
-          NAME JAVA_datasets-${example}-clear-objects
-          COMMAND ${CMAKE_COMMAND} -E remove
-              ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5
-      )
-    endif ()
-    if (last_test)
-      set_tests_properties (JAVA_datasets-${example}-clear-objects PROPERTIES DEPENDS ${last_test})
-    endif ()
-
-    add_test (
-        NAME JAVA_datasets-${example}-copy-objects
-        COMMAND ${CMAKE_COMMAND} -E copy_if_different
-            ${HDFJAVA_EXAMPLES_SOURCE_DIR}/testfiles/examples.datasets.${example}.txt
-            ${HDFJAVA_EXAMPLES_DATASETS_BINARY_DIR}/${example}.txt
-    )
-    set_tests_properties (JAVA_datasets-${example}-copy-objects PROPERTIES DEPENDS JAVA_datasets-${example}-clear-objects)
-
-    add_test (
-        NAME JAVA_datasets-${example}
-        COMMAND "${CMAKE_COMMAND}"
-            -D "TEST_TESTER=${CMAKE_Java_RUNTIME};${CMAKE_Java_RUNTIME_FLAGS}"
-            -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${example}_JAR_FILE}"
-            -D "TEST_ARGS:STRING=${CMD_ARGS}"
-            -D "TEST_PROGRAM=examples.datasets.${example}"
-            -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}"
-            -D "TEST_FOLDER=${HDFJAVA_EXAMPLES_BINARY_DIR}"
-            -D "TEST_OUTPUT=datasets/${example}.out"
-            -D "TEST_EXPECT=0"
-            -D "TEST_REFERENCE=datasets/${example}.txt"
-            -P "${HDF_RESOURCES_DIR}/jrunTest.cmake"
-    )
-    set_tests_properties (JAVA_datasets-${example} PROPERTIES
-        DEPENDS JAVA_datasets-${example}-copy-objects
-    )
-
-    if (example STREQUAL "H5Ex_D_External")
-      add_test (
-          NAME JAVA_datasets-${example}-clean-objects
-          COMMAND ${CMAKE_COMMAND} -E remove
-              ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5
-              ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.data
-      )
-    else ()
-      add_test (
-          NAME JAVA_datasets-${example}-clean-objects
-          COMMAND ${CMAKE_COMMAND} -E remove
-              ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5
-      )
-    endif ()
-    set_tests_properties (JAVA_datasets-${example}-clean-objects PROPERTIES
-        DEPENDS JAVA_datasets-${example}
-    )
-    set (last_test "JAVA_datasets-${example}-clean-objects")
-  endforeach ()
-endif ()
diff --git a/java/examples/datasets/H5Ex_D_Alloc.java b/java/examples/datasets/H5Ex_D_Alloc.java
deleted file mode 100644
index 4853cc0811a..00000000000
--- a/java/examples/datasets/H5Ex_D_Alloc.java
+++ /dev/null
@@ -1,294 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to set the space allocation time
-  for a dataset.  The program first creates two datasets,
-  one with the default allocation time (late) and one with
-  early allocation time, and displays whether each has been
-  allocated and their allocation size.  Next, it writes data
-  to the datasets, and again displays whether each has been
-  allocated and their allocation size.
- ************************************************************/
-package examples.datasets;
-
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_D_Alloc {
-    private static String FILENAME     = "H5Ex_D_Alloc.h5";
-    private static String DATASETNAME1 = "DS1";
-    private static String DATASETNAME2 = "DS2";
-    private static final int DIM_X     = 4;
-    private static final int DIM_Y     = 7;
-    private static final int FILLVAL   = 99;
-    private static final int RANK      = 2;
-
-    // Values for the status of space allocation
-    enum H5D_space_status {
-        H5D_SPACE_STATUS_ERROR(-1),
-        H5D_SPACE_STATUS_NOT_ALLOCATED(0),
-        H5D_SPACE_STATUS_PART_ALLOCATED(1),
-        H5D_SPACE_STATUS_ALLOCATED(2);
-        private static final Map<Integer, H5D_space_status> lookup = new HashMap<Integer, H5D_space_status>();
-
-        static
-        {
-            for (H5D_space_status s : EnumSet.allOf(H5D_space_status.class))
-                lookup.put(s.getCode(), s);
-        }
-
-        private int code;
-
-        H5D_space_status(int space_status) { this.code = space_status; }
-
-        public int getCode() { return this.code; }
-
-        public static H5D_space_status get(int code) { return lookup.get(code); }
-    }
-
-    private static void allocation()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long filespace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id1  = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id2  = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-        int space_status  = 0;
-        long storage_size = 0;
-
-        // Initialize the dataset.
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                dset_data[indx][jndx] = FILLVAL;
-
-        // Create a file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            filespace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset creation property list, and set the chunk size.
-        try {
-            dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Set the allocation time to "early". This way we can be sure
-        // that reading from the dataset immediately after creation will
-        // return the fill value.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pset_alloc_time(dcpl_id, HDF5Constants.H5D_ALLOC_TIME_EARLY);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        System.out.println("Creating datasets...");
-        System.out.println(DATASETNAME1 + " has allocation time H5D_ALLOC_TIME_LATE");
-        System.out.println(DATASETNAME2 + " has allocation time H5D_ALLOC_TIME_EARLY");
-        System.out.println();
-
-        // Create the dataset using the dataset default creation property list.
-        try {
-            if ((file_id >= 0) && (filespace_id >= 0))
-                dataset_id1 = H5.H5Dcreate(file_id, DATASETNAME1, HDF5Constants.H5T_NATIVE_INT, filespace_id,
-                                           HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                           HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset using the dataset creation property list.
-        try {
-            if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
-                dataset_id2 = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_NATIVE_INT, filespace_id,
-                                           HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve and print space status and storage size for dset1.
-        try {
-            if (dataset_id1 >= 0)
-                space_status = H5.H5Dget_space_status(dataset_id1);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        try {
-            if (dataset_id1 >= 0)
-                storage_size = H5.H5Dget_storage_size(dataset_id1);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        String the_space = " ";
-        if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED)
-            the_space += "not ";
-        System.out.println("Space for " + DATASETNAME1 + " has" + the_space + "been allocated.");
-        System.out.println("Storage size for " + DATASETNAME1 + " is: " + storage_size + " bytes.");
-
-        // Retrieve and print space status and storage size for dset2.
-        try {
-            if (dataset_id2 >= 0)
-                space_status = H5.H5Dget_space_status(dataset_id2);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        try {
-            if (dataset_id2 >= 0)
-                storage_size = H5.H5Dget_storage_size(dataset_id2);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        the_space = " ";
-        if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED)
-            the_space += "not ";
-        System.out.println("Space for " + DATASETNAME2 + " has" + the_space + "been allocated.");
-        System.out.println("Storage size for " + DATASETNAME2 + " is: " + storage_size + " bytes.");
-        System.out.println();
-
-        System.out.println("Writing data...");
-        System.out.println();
-
-        // Write the data to the datasets.
-        try {
-            if (dataset_id1 >= 0)
-                H5.H5Dwrite(dataset_id1, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data[0]);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        try {
-            if (dataset_id2 >= 0)
-                H5.H5Dwrite(dataset_id2, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data[0]);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve and print space status and storage size for dset1.
-        try {
-            if (dataset_id1 >= 0)
-                space_status = H5.H5Dget_space_status(dataset_id1);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        try {
-            if (dataset_id1 >= 0)
-                storage_size = H5.H5Dget_storage_size(dataset_id1);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        the_space = " ";
-        if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED)
-            the_space += "not ";
-        System.out.println("Space for " + DATASETNAME1 + " has" + the_space + "been allocated.");
-        System.out.println("Storage size for " + DATASETNAME1 + " is: " + storage_size + " bytes.");
-
-        // Retrieve and print space status and storage size for dset2.
-        try {
-            if (dataset_id2 >= 0)
-                space_status = H5.H5Dget_space_status(dataset_id2);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        try {
-            if (dataset_id2 >= 0)
-                storage_size = H5.H5Dget_storage_size(dataset_id2);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        the_space = " ";
-        if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED)
-            the_space += "not ";
-        System.out.println("Space for " + DATASETNAME2 + " has" + the_space + "been allocated.");
-        System.out.println("Storage size for " + DATASETNAME2 + " is: " + storage_size + " bytes.");
-        System.out.println();
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id1 >= 0)
-                H5.H5Dclose(dataset_id1);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id2 >= 0)
-                H5.H5Dclose(dataset_id2);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (filespace_id >= 0)
-                H5.H5Sclose(filespace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args) { H5Ex_D_Alloc.allocation(); }
-}
diff --git a/java/examples/datasets/H5Ex_D_Checksum.java b/java/examples/datasets/H5Ex_D_Checksum.java
deleted file mode 100644
index 7b01176710c..00000000000
--- a/java/examples/datasets/H5Ex_D_Checksum.java
+++ /dev/null
@@ -1,350 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write data to a dataset
-  using the Fletcher32 checksum filter.  The program first
-  checks if the Fletcher32 filter is available, then if it
-  is it writes integers to a dataset using Fletcher32, then
-  closes the file.  Next, it reopens the file, reads back
-  the data, checks if the filter detected an error and
-  outputs the type of filter and the maximum value in the
-  dataset to the screen.
- ************************************************************/
-package examples.datasets;
-
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_D_Checksum {
-    private static String FILENAME    = "H5Ex_D_Checksum.h5";
-    private static String DATASETNAME = "DS1";
-    private static final int DIM_X    = 32;
-    private static final int DIM_Y    = 64;
-    private static final int CHUNK_X  = 4;
-    private static final int CHUNK_Y  = 8;
-    private static final int RANK     = 2;
-    private static final int NDIMS    = 2;
-
-    // Values for the status of space allocation
-    enum H5Z_filter {
-        H5Z_FILTER_ERROR(-1),
-        H5Z_FILTER_NONE(0),
-        H5Z_FILTER_DEFLATE(1),
-        H5Z_FILTER_SHUFFLE(2),
-        H5Z_FILTER_FLETCHER32(3),
-        H5Z_FILTER_SZIP(4),
-        H5Z_FILTER_NBIT(5),
-        H5Z_FILTER_SCALEOFFSET(6),
-        H5Z_FILTER_RESERVED(256),
-        H5Z_FILTER_MAX(65535);
-        private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
-
-        static
-        {
-            for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
-                lookup.put(s.getCode(), s);
-        }
-
-        private int code;
-
-        H5Z_filter(int layout_type) { this.code = layout_type; }
-
-        public int getCode() { return this.code; }
-
-        public static H5Z_filter get(int code) { return lookup.get(code); }
-    }
-
-    private static boolean checkFletcher32Filter()
-    {
-        try {
-            int available = H5.H5Zfilter_avail(H5Z_filter.H5Z_FILTER_FLETCHER32.getCode());
-            if (available == 0) {
-                System.out.println("N-Bit filter not available.");
-                return false;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_FLETCHER32);
-            if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
-                ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
-                System.out.println("N-Bit filter not available for encoding and decoding.");
-                return false;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        return true;
-    }
-
-    private static void writeChecksum()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long filespace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-        long[] chunk_dims = {CHUNK_X, CHUNK_Y};
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Initialize data.
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                dset_data[indx][jndx] = indx * jndx - jndx;
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            filespace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset creation property list, add the N-Bit filter.
-        try {
-            dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
-            if (dcpl_id >= 0) {
-                H5.H5Pset_fletcher32(dcpl_id);
-                // Set the chunk size.
-                H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset.
-        try {
-            if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
-                                          HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the data to the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (filespace_id >= 0)
-                H5.H5Sclose(filespace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void readChecksum()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve the dataset creation property list.
-        try {
-            if (dataset_id >= 0)
-                dcpl_id = H5.H5Dget_create_plist(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve and print the filter type. Here we only retrieve the
-        // first filter because we know that we only added one filter.
-        try {
-            if (dcpl_id >= 0) {
-                // Java lib requires a valid filter_name object and cd_values
-                int[] flags          = {0};
-                long[] cd_nelmts     = {1};
-                int[] cd_values      = {0};
-                String[] filter_name = {""};
-                int[] filter_config  = {0};
-                int filter_type      = -1;
-                filter_type = H5.H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name,
-                                               filter_config);
-                System.out.print("Filter type is: ");
-                switch (H5Z_filter.get(filter_type)) {
-                case H5Z_FILTER_DEFLATE:
-                    System.out.println("H5Z_FILTER_DEFLATE");
-                    break;
-                case H5Z_FILTER_SHUFFLE:
-                    System.out.println("H5Z_FILTER_SHUFFLE");
-                    break;
-                case H5Z_FILTER_FLETCHER32:
-                    System.out.println("H5Z_FILTER_FLETCHER32");
-                    break;
-                case H5Z_FILTER_SZIP:
-                    System.out.println("H5Z_FILTER_SZIP");
-                    break;
-                default:
-                    System.out.println("H5Z_FILTER_ERROR");
-                }
-                System.out.println();
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Read the data using the default properties.
-        try {
-            if (dataset_id >= 0) {
-                int status = H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                                        HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-                // Check if the read was successful. Normally we do not perform
-                // error checking in these examples for the sake of clarity, but in
-                // this case we will make an exception because this is how the
-                // fletcher32 checksum filter reports data errors.
-                if (status < 0) {
-                    System.out.print("Dataset read failed!");
-                    try {
-                        if (dcpl_id >= 0)
-                            H5.H5Pclose(dcpl_id);
-                        if (dataset_id >= 0)
-                            H5.H5Dclose(dataset_id);
-                        if (file_id >= 0)
-                            H5.H5Fclose(file_id);
-                    }
-                    catch (Exception e) {
-                        e.printStackTrace();
-                    }
-                    return;
-                }
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Find the maximum value in the dataset, to verify that it was read
-        // correctly.
-        int max = dset_data[0][0];
-        for (int indx = 0; indx < DIM_X; indx++) {
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                if (max < dset_data[indx][jndx])
-                    max = dset_data[indx][jndx];
-        }
-        // Print the maximum value.
-        System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        // Check if the Fletcher32 filter is available and can be used for
-        // both encoding and decoding. Normally we do not perform error
-        // checking in these examples for the sake of clarity, but in this
-        // case we will make an exception because this filter is an
-        // optional part of the hdf5 library.
-        // size to be the current size.
-        if (H5Ex_D_Checksum.checkFletcher32Filter()) {
-            H5Ex_D_Checksum.writeChecksum();
-            H5Ex_D_Checksum.readChecksum();
-        }
-    }
-}
diff --git a/java/examples/datasets/H5Ex_D_Chunk.java b/java/examples/datasets/H5Ex_D_Chunk.java
deleted file mode 100644
index fbfc148f251..00000000000
--- a/java/examples/datasets/H5Ex_D_Chunk.java
+++ /dev/null
@@ -1,371 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to create a chunked dataset.  The
-  program first writes integers in a hyperslab selection to
-  a chunked dataset with dataspace dimensions of DIM_XxDIM_Y
-  and chunk size of CHUNK_XxCHUNK_Y, then closes the file.
-  Next, it reopens the file, reads back the data, and
-  outputs it to the screen.  Finally it reads the data again
-  using a different hyperslab selection, and outputs
-  the result to the screen.
- ************************************************************/
-package examples.datasets;
-
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_D_Chunk {
-    private static String FILENAME    = "H5Ex_D_Chunk.h5";
-    private static String DATASETNAME = "DS1";
-    private static final int DIM_X    = 6;
-    private static final int DIM_Y    = 8;
-    private static final int CHUNK_X  = 4;
-    private static final int CHUNK_Y  = 4;
-    private static final int RANK     = 2;
-    private static final int NDIMS    = 2;
-
-    // Values for the status of space allocation
-    enum H5D_layout {
-        H5D_LAYOUT_ERROR(-1),
-        H5D_COMPACT(0),
-        H5D_CONTIGUOUS(1),
-        H5D_CHUNKED(2),
-        H5D_VIRTUAL(3),
-        H5D_NLAYOUTS(4);
-        private static final Map<Integer, H5D_layout> lookup = new HashMap<Integer, H5D_layout>();
-
-        static
-        {
-            for (H5D_layout s : EnumSet.allOf(H5D_layout.class))
-                lookup.put(s.getCode(), s);
-        }
-
-        private int code;
-
-        H5D_layout(int layout_type) { this.code = layout_type; }
-
-        public int getCode() { return this.code; }
-
-        public static H5D_layout get(int code) { return lookup.get(code); }
-    }
-
-    private static void writeChunk()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long filespace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-        long[] chunk_dims = {CHUNK_X, CHUNK_Y};
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Initialize data to "1", to make it easier to see the selections.
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                dset_data[indx][jndx] = 1;
-
-        // Print the data to the screen.
-        System.out.println("Original Data:");
-        for (int indx = 0; indx < DIM_X; indx++) {
-            System.out.print(" [ ");
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                System.out.print(dset_data[indx][jndx] + " ");
-            System.out.println("]");
-        }
-        System.out.println();
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            filespace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset creation property list.
-        try {
-            dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Set the chunk size.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the chunked dataset.
-        try {
-            if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
-                                          HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Define and select the first part of the hyperslab selection.
-        long[] start  = {0, 0};
-        long[] stride = {3, 3};
-        long[] count  = {2, 3};
-        long[] block  = {2, 2};
-        try {
-            if ((filespace_id >= 0))
-                H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count,
-                                       block);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        // Define and select the second part of the hyperslab selection,
-        // which is subtracted from the first selection by the use of
-        // H5S_SELECT_NOTB
-        block[0] = 1;
-        block[1] = 1;
-        try {
-            if ((filespace_id >= 0)) {
-                H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_NOTB, start, stride, count,
-                                       block);
-
-                // Write the data to the dataset.
-                if (dataset_id >= 0)
-                    H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, filespace_id,
-                                HDF5Constants.H5P_DEFAULT, dset_data);
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (filespace_id >= 0)
-                H5.H5Sclose(filespace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void readChunk()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long filespace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve the dataset creation property list.
-        try {
-            if (dataset_id >= 0)
-                dcpl_id = H5.H5Dget_create_plist(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Print the storage layout.
-        try {
-            if (dcpl_id >= 0) {
-                int layout_type = H5.H5Pget_layout(dcpl_id);
-                System.out.print("Storage layout for " + DATASETNAME + " is: ");
-                switch (H5D_layout.get(layout_type)) {
-                case H5D_COMPACT:
-                    System.out.println("H5D_COMPACT");
-                    break;
-                case H5D_CONTIGUOUS:
-                    System.out.println("H5D_CONTIGUOUS");
-                    break;
-                case H5D_CHUNKED:
-                    System.out.println("H5D_CHUNKED");
-                    break;
-                case H5D_VIRTUAL:
-                    System.out.println("H5D_VIRTUAL");
-                    break;
-                case H5D_LAYOUT_ERROR:
-                    break;
-                case H5D_NLAYOUTS:
-                    break;
-                default:
-                    break;
-                }
-                System.out.println();
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Read the data using the default properties.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        System.out.println("Data as written to disk by hyberslabs:");
-        for (int indx = 0; indx < DIM_X; indx++) {
-            System.out.print(" [ ");
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                System.out.print(dset_data[indx][jndx] + " ");
-            System.out.println("]");
-        }
-        System.out.println();
-
-        // Initialize the read array.
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                dset_data[indx][jndx] = 0;
-
-        // Define and select the hyperslab to use for reading.
-        try {
-            if (dataset_id >= 0) {
-                filespace_id = H5.H5Dget_space(dataset_id);
-
-                long[] start  = {0, 1};
-                long[] stride = {4, 4};
-                long[] count  = {2, 2};
-                long[] block  = {2, 3};
-
-                if (filespace_id >= 0) {
-                    H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count,
-                                           block);
-
-                    // Read the data using the previously defined hyperslab.
-                    if ((dataset_id >= 0) && (filespace_id >= 0))
-                        H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                                   filespace_id, HDF5Constants.H5P_DEFAULT, dset_data);
-                }
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        System.out.println("Data as read from disk by hyberslab:");
-        for (int indx = 0; indx < DIM_X; indx++) {
-            System.out.print(" [ ");
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                System.out.print(dset_data[indx][jndx] + " ");
-            System.out.println("]");
-        }
-        System.out.println();
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (filespace_id >= 0)
-                H5.H5Sclose(filespace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_D_Chunk.writeChunk();
-        H5Ex_D_Chunk.readChunk();
-    }
-}
diff --git a/java/examples/datasets/H5Ex_D_Compact.java b/java/examples/datasets/H5Ex_D_Compact.java
deleted file mode 100644
index 3a60283ce38..00000000000
--- a/java/examples/datasets/H5Ex_D_Compact.java
+++ /dev/null
@@ -1,291 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write data to a compact
-  dataset.  The program first writes integers to a compact
-  dataset with dataspace dimensions of DIM_XxDIM_Y, then
-  closes the file.  Next, it reopens the file, reads back
-  the data, and outputs it to the screen.
- ************************************************************/
-package examples.datasets;
-
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_D_Compact {
-    private static String FILENAME    = "H5Ex_D_Compact.h5";
-    private static String DATASETNAME = "DS1";
-    private static final int DIM_X    = 4;
-    private static final int DIM_Y    = 7;
-    private static final int RANK     = 2;
-
-    // Values for the status of space allocation
-    enum H5D_layout {
-        H5D_LAYOUT_ERROR(-1),
-        H5D_COMPACT(0),
-        H5D_CONTIGUOUS(1),
-        H5D_CHUNKED(2),
-        H5D_VIRTUAL(3),
-        H5D_NLAYOUTS(4);
-        private static final Map<Integer, H5D_layout> lookup = new HashMap<Integer, H5D_layout>();
-
-        static
-        {
-            for (H5D_layout s : EnumSet.allOf(H5D_layout.class))
-                lookup.put(s.getCode(), s);
-        }
-
-        private int code;
-
-        H5D_layout(int layout_type) { this.code = layout_type; }
-
-        public int getCode() { return this.code; }
-
-        public static H5D_layout get(int code) { return lookup.get(code); }
-    }
-
-    private static void writeCompact()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long filespace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Initialize data.
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                dset_data[indx][jndx] = indx * jndx - jndx;
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            filespace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset creation property list.
-        try {
-            dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Set the layout to compact.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pset_layout(dcpl_id, H5D_layout.H5D_COMPACT.getCode());
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset. We will use all default properties for this example.
-        try {
-            if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
-                                          HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the data to the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (filespace_id >= 0)
-                H5.H5Sclose(filespace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void readCompact()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long filespace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Open file and dataset using the default properties.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve the dataset creation property list.
-        try {
-            if (dataset_id >= 0)
-                dcpl_id = H5.H5Dget_create_plist(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Print the storage layout.
-        try {
-            if (dcpl_id >= 0) {
-                int layout_type = H5.H5Pget_layout(dcpl_id);
-                System.out.print("Storage layout for " + DATASETNAME + " is: ");
-                switch (H5D_layout.get(layout_type)) {
-                case H5D_COMPACT:
-                    System.out.println("H5D_COMPACT");
-                    break;
-                case H5D_CONTIGUOUS:
-                    System.out.println("H5D_CONTIGUOUS");
-                    break;
-                case H5D_CHUNKED:
-                    System.out.println("H5D_CHUNKED");
-                    break;
-                case H5D_VIRTUAL:
-                    System.out.println("H5D_VIRTUAL");
-                    break;
-                case H5D_LAYOUT_ERROR:
-                    break;
-                case H5D_NLAYOUTS:
-                    break;
-                default:
-                    break;
-                }
-                System.out.println();
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Read the data using the default properties.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        System.out.println("Data for " + DATASETNAME + " is: ");
-        for (int indx = 0; indx < DIM_X; indx++) {
-            System.out.print(" [ ");
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                System.out.print(dset_data[indx][jndx] + " ");
-            System.out.println("]");
-        }
-        System.out.println();
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (filespace_id >= 0)
-                H5.H5Sclose(filespace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_D_Compact.writeCompact();
-        H5Ex_D_Compact.readCompact();
-    }
-}
diff --git a/java/examples/datasets/H5Ex_D_External.java b/java/examples/datasets/H5Ex_D_External.java
deleted file mode 100644
index d706fb758ed..00000000000
--- a/java/examples/datasets/H5Ex_D_External.java
+++ /dev/null
@@ -1,236 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write data to an
-  external dataset.  The program first writes integers to an
-  external dataset with dataspace dimensions of DIM_XxDIM_Y,
-  then closes the file.  Next, it reopens the file, reads
-  back the data, and outputs the name of the external data
-  file and the data to the screen.
- ************************************************************/
-package examples.datasets;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_D_External {
-    private static String FILENAME         = "H5Ex_D_External.h5";
-    private static String EXTERNALNAME     = "H5Ex_D_External.data";
-    private static String DATASETNAME      = "DS1";
-    private static final int DIM_X         = 4;
-    private static final int DIM_Y         = 7;
-    private static final int RANK          = 2;
-    private static final int NAME_BUF_SIZE = 32;
-
-    private static void writeExternal()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        long filespace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Initialize the dataset.
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                dset_data[indx][jndx] = indx * jndx - jndx;
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            filespace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset creation property list.
-        try {
-            dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // set the external file.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pset_external(dcpl_id, EXTERNALNAME, 0, HDF5Constants.H5F_UNLIMITED);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the HDF5Constants.dataset.
-        try {
-            if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
-                                          HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the dataset.
-        try {
-            H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                        HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (filespace_id >= 0)
-                H5.H5Sclose(filespace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void readExternal()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-        String[] Xname    = new String[1];
-
-        // Open file using the default properties.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open dataset using the default properties.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve the dataset creation property list.
-        try {
-            if (dataset_id >= 0)
-                dcpl_id = H5.H5Dget_create_plist(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve and print the name of the external file.
-        long[] Xsize = new long[NAME_BUF_SIZE];
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pget_external(dcpl_id, 0, Xsize.length, Xname, Xsize);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        System.out.println(DATASETNAME + " is stored in file: " + Xname[0]);
-
-        // Read the data using the default properties.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        System.out.println(DATASETNAME + ":");
-        for (int indx = 0; indx < DIM_X; indx++) {
-            System.out.print(" [ ");
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                System.out.print(dset_data[indx][jndx] + " ");
-            System.out.println("]");
-        }
-        System.out.println();
-
-        // Close the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_D_External.writeExternal();
-        H5Ex_D_External.readExternal();
-    }
-}
diff --git a/java/examples/datasets/H5Ex_D_FillValue.java b/java/examples/datasets/H5Ex_D_FillValue.java
deleted file mode 100644
index db4dff77709..00000000000
--- a/java/examples/datasets/H5Ex_D_FillValue.java
+++ /dev/null
@@ -1,240 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to set the fill value for a
-  dataset.  The program first sets the fill value to
-  FILLVAL, creates a dataset with dimensions of DIM_XxDIM_Y,
-  reads from the uninitialized dataset, and outputs the
-  contents to the screen.  Next, it writes integers to the
-  dataset, reads the data back, and outputs it to the
-  screen.  Finally it extends the dataset, reads from it,
-  and outputs the result to the screen.
- ************************************************************/
-package examples.datasets;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_D_FillValue {
-    private static String FILENAME    = "H5Ex_D_FillValue.h5";
-    private static String DATASETNAME = "ExtendibleArray";
-    private static final int DIM_X    = 4;
-    private static final int DIM_Y    = 7;
-    private static final int EDIM_X   = 6;
-    private static final int EDIM_Y   = 10;
-    private static final int CHUNK_X  = 4;
-    private static final int CHUNK_Y  = 4;
-    private static final int RANK     = 2;
-    private static final int NDIMS    = 2;
-    private static final int FILLVAL  = 99;
-
-    private static void fillValue()
-    {
-        long file_id             = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id             = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id        = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id          = HDF5Constants.H5I_INVALID_HID;
-        long[] dims              = {DIM_X, DIM_Y};
-        long[] extdims           = {EDIM_X, EDIM_Y};
-        long[] chunk_dims        = {CHUNK_X, CHUNK_Y};
-        long[] maxdims           = {HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED};
-        int[][] write_dset_data  = new int[DIM_X][DIM_Y];
-        int[][] read_dset_data   = new int[DIM_X][DIM_Y];
-        int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
-
-        // Initialize the dataset.
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                write_dset_data[indx][jndx] = indx * jndx - jndx;
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace with unlimited dimensions.
-        try {
-            dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset creation property list.
-        try {
-            dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Set the chunk size.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Set the fill value for the dataset
-        try {
-            int[] fill_value = {FILLVAL};
-            if (dcpl_id >= 0)
-                H5.H5Pset_fill_value(dcpl_id, HDF5Constants.H5T_NATIVE_INT, fill_value);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Set the allocation time to "early". This way we can be sure
-        // that reading from the dataset immediately after creation will
-        // return the fill value.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pset_alloc_time(dcpl_id, HDF5Constants.H5D_ALLOC_TIME_EARLY);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset using the dataset creation property list.
-        try {
-            if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0))
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
-                                          HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Read values from the dataset, which has not been written to yet.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, read_dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        System.out.println("Dataset before being written to:");
-        for (int indx = 0; indx < DIM_X; indx++) {
-            System.out.print(" [ ");
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                System.out.print(read_dset_data[indx][jndx] + " ");
-            System.out.println("]");
-        }
-        System.out.println();
-
-        // Write the data to the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, write_dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Read the data back.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, read_dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        System.out.println("Dataset after being written to:");
-        for (int indx = 0; indx < DIM_X; indx++) {
-            System.out.print(" [ ");
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                System.out.print(read_dset_data[indx][jndx] + " ");
-            System.out.println("]");
-        }
-        System.out.println();
-
-        // Extend the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dset_extent(dataset_id, extdims);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Read from the extended dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, extend_dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        System.out.println("Dataset after extension:");
-        for (int indx = 0; indx < EDIM_X; indx++) {
-            System.out.print(" [ ");
-            for (int jndx = 0; jndx < EDIM_Y; jndx++)
-                System.out.print(extend_dset_data[indx][jndx] + " ");
-            System.out.println("]");
-        }
-        System.out.println();
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args) { H5Ex_D_FillValue.fillValue(); }
-}
diff --git a/java/examples/datasets/H5Ex_D_Gzip.java b/java/examples/datasets/H5Ex_D_Gzip.java
deleted file mode 100644
index 0a942548778..00000000000
--- a/java/examples/datasets/H5Ex_D_Gzip.java
+++ /dev/null
@@ -1,336 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write data to a dataset
-  using gzip compression (also called zlib or deflate).  The
-  program first checks if gzip compression is available,
-  then if it is it writes integers to a dataset using gzip,
-  then closes the file.  Next, it reopens the file, reads
-  back the data, and outputs the type of compression and the
-  maximum value in the dataset to the screen.
- ************************************************************/
-package examples.datasets;
-
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_D_Gzip {
-    private static String FILENAME    = "H5Ex_D_Gzip.h5";
-    private static String DATASETNAME = "DS1";
-    private static final int DIM_X    = 32;
-    private static final int DIM_Y    = 64;
-    private static final int CHUNK_X  = 4;
-    private static final int CHUNK_Y  = 8;
-    private static final int RANK     = 2;
-    private static final int NDIMS    = 2;
-
-    // Values for the status of space allocation
-    enum H5Z_filter {
-        H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR),
-        H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE),
-        H5Z_FILTER_DEFLATE(HDF5Constants.H5Z_FILTER_DEFLATE),
-        H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE),
-        H5Z_FILTER_FLETCHER32(HDF5Constants.H5Z_FILTER_FLETCHER32),
-        H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP),
-        H5Z_FILTER_NBIT(HDF5Constants.H5Z_FILTER_NBIT),
-        H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET),
-        H5Z_FILTER_RESERVED(HDF5Constants.H5Z_FILTER_RESERVED),
-        H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
-        private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
-
-        static
-        {
-            for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
-                lookup.put(s.getCode(), s);
-        }
-
-        private int code;
-
-        H5Z_filter(int layout_type) { this.code = layout_type; }
-
-        public int getCode() { return this.code; }
-
-        public static H5Z_filter get(int code) { return lookup.get(code); }
-    }
-
-    private static boolean checkGzipFilter()
-    {
-        try {
-            int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_DEFLATE);
-            if (available == 0) {
-                System.out.println("gzip filter not available.");
-                return false;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_DEFLATE);
-            if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
-                ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
-                System.out.println("gzip filter not available for encoding and decoding.");
-                return false;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        return true;
-    }
-
-    private static void writeGzip()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long filespace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-        long[] chunk_dims = {CHUNK_X, CHUNK_Y};
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Initialize data.
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                dset_data[indx][jndx] = indx * jndx - jndx;
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            filespace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset creation property list, add the gzip compression
-        // filter.
-        try {
-            dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
-            if (dcpl_id >= 0) {
-                H5.H5Pset_deflate(dcpl_id, 9);
-                // Set the chunk size.
-                H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset.
-        try {
-            if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
-                                          HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the data to the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (filespace_id >= 0)
-                H5.H5Sclose(filespace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void readGzip()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve the dataset creation property list.
-        try {
-            if (dataset_id >= 0)
-                dcpl_id = H5.H5Dget_create_plist(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve and print the filter type. Here we only retrieve the
-        // first filter because we know that we only added one filter.
-        try {
-            if (dcpl_id >= 0) {
-                // Java lib requires a valid filter_name object and cd_values
-                int[] flags          = {0};
-                long[] cd_nelmts     = {1};
-                int[] cd_values      = {0};
-                String[] filter_name = {""};
-                int[] filter_config  = {0};
-                int filter_type      = -1;
-                filter_type = H5.H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name,
-                                               filter_config);
-                System.out.print("Filter type is: ");
-                switch (H5Z_filter.get(filter_type)) {
-                case H5Z_FILTER_DEFLATE:
-                    System.out.println("H5Z_FILTER_DEFLATE");
-                    break;
-                case H5Z_FILTER_SHUFFLE:
-                    System.out.println("H5Z_FILTER_SHUFFLE");
-                    break;
-                case H5Z_FILTER_FLETCHER32:
-                    System.out.println("H5Z_FILTER_FLETCHER32");
-                    break;
-                case H5Z_FILTER_SZIP:
-                    System.out.println("H5Z_FILTER_SZIP");
-                    break;
-                case H5Z_FILTER_NBIT:
-                    System.out.println("H5Z_FILTER_NBIT");
-                    break;
-                case H5Z_FILTER_SCALEOFFSET:
-                    System.out.println("H5Z_FILTER_SCALEOFFSET");
-                    break;
-                default:
-                    System.out.println("H5Z_FILTER_ERROR");
-                }
-                System.out.println();
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Read the data using the default properties.
-        try {
-            if (dataset_id >= 0) {
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Find the maximum value in the dataset, to verify that it was read
-        // correctly.
-        int max = dset_data[0][0];
-        for (int indx = 0; indx < DIM_X; indx++) {
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                if (max < dset_data[indx][jndx])
-                    max = dset_data[indx][jndx];
-        }
-        // Print the maximum value.
-        System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        // Check if gzip compression is available and can be used for both
-        // compression and decompression. Normally we do not perform error
-        // checking in these examples for the sake of clarity, but in this
-        // case we will make an exception because this filter is an
-        // optional part of the hdf5 library.
-        if (H5Ex_D_Gzip.checkGzipFilter()) {
-            H5Ex_D_Gzip.writeGzip();
-            H5Ex_D_Gzip.readGzip();
-        }
-    }
-}
diff --git a/java/examples/datasets/H5Ex_D_Hyperslab.java b/java/examples/datasets/H5Ex_D_Hyperslab.java
deleted file mode 100644
index 0575d50f6ad..00000000000
--- a/java/examples/datasets/H5Ex_D_Hyperslab.java
+++ /dev/null
@@ -1,272 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write data to a
-  dataset by hyberslabs.  The program first writes integers
-  in a hyperslab selection to a dataset with dataspace
-  dimensions of DIM_XxDIM_Y, then closes the file.  Next, it
-  reopens the file, reads back the data, and outputs it to
-  the screen.  Finally it reads the data again using a
-  different hyperslab selection, and outputs the result to
-  the screen.
- ************************************************************/
-package examples.datasets;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_D_Hyperslab {
-    private static String FILENAME    = "H5Ex_D_Hyperslab.h5";
-    private static String DATASETNAME = "DS1";
-    private static final int DIM_X    = 6;
-    private static final int DIM_Y    = 8;
-    private static final int RANK     = 2;
-
-    private static void writeHyperslab()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long filespace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Initialize data to "1", to make it easier to see the selections.
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                dset_data[indx][jndx] = 1;
-
-        // Print the data to the screen.
-        System.out.println("Original Data:");
-        for (int indx = 0; indx < DIM_X; indx++) {
-            System.out.print(" [ ");
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                System.out.print(dset_data[indx][jndx] + " ");
-            System.out.println("]");
-        }
-        System.out.println();
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            filespace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset. We will use all default properties for this example.
-        try {
-            if ((file_id >= 0) && (filespace_id >= 0))
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
-                                          HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                          HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Define and select the first part of the hyperslab selection.
-        long[] start  = {0, 0};
-        long[] stride = {3, 3};
-        long[] count  = {2, 3};
-        long[] block  = {2, 2};
-        try {
-            if ((filespace_id >= 0))
-                H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count,
-                                       block);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        // Define and select the second part of the hyperslab selection,
-        // which is subtracted from the first selection by the use of
-        // H5S_SELECT_NOTB
-        block[0] = 1;
-        block[1] = 1;
-        try {
-            if ((filespace_id >= 0)) {
-                H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_NOTB, start, stride, count,
-                                       block);
-
-                // Write the data to the dataset.
-                if (dataset_id >= 0)
-                    H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, filespace_id,
-                                HDF5Constants.H5P_DEFAULT, dset_data);
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (filespace_id >= 0)
-                H5.H5Sclose(filespace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void readHyperslab()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long filespace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Read the data using the default properties.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        System.out.println("Data as written to disk by hyberslabs:");
-        for (int indx = 0; indx < DIM_X; indx++) {
-            System.out.print(" [ ");
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                System.out.print(dset_data[indx][jndx] + " ");
-            System.out.println("]");
-        }
-        System.out.println();
-
-        // Initialize the read array.
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                dset_data[indx][jndx] = 0;
-
-        // Define and select the hyperslab to use for reading.
-        try {
-            if (dataset_id >= 0) {
-                filespace_id = H5.H5Dget_space(dataset_id);
-
-                long[] start  = {0, 1};
-                long[] stride = {4, 4};
-                long[] count  = {2, 2};
-                long[] block  = {2, 3};
-
-                if (filespace_id >= 0) {
-                    H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count,
-                                           block);
-
-                    // Read the data using the previously defined hyperslab.
-                    if ((dataset_id >= 0) && (filespace_id >= 0))
-                        H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                                   filespace_id, HDF5Constants.H5P_DEFAULT, dset_data);
-                }
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        System.out.println("Data as read from disk by hyberslab:");
-        for (int indx = 0; indx < DIM_X; indx++) {
-            System.out.print(" [ ");
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                System.out.print(dset_data[indx][jndx] + " ");
-            System.out.println("]");
-        }
-        System.out.println();
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (filespace_id >= 0)
-                H5.H5Sclose(filespace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_D_Hyperslab.writeHyperslab();
-        H5Ex_D_Hyperslab.readHyperslab();
-    }
-}
diff --git a/java/examples/datasets/H5Ex_D_Nbit.java b/java/examples/datasets/H5Ex_D_Nbit.java
deleted file mode 100644
index d54ce210c20..00000000000
--- a/java/examples/datasets/H5Ex_D_Nbit.java
+++ /dev/null
@@ -1,305 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
- This example shows how to read and write data to a dataset
- using the N-Bit filter.  The program first checks if the
- N-Bit filter is available, then if it is it writes integers
- to a dataset using N-Bit, then closes the file. Next, it
- reopens the file, reads back the data, and outputs the type
- of filter and the maximum value in the dataset to the screen.
- ************************************************************/
-
-package examples.datasets;
-
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_D_Nbit {
-    private static String FILENAME    = "H5Ex_D_Nbit.h5";
-    private static String DATASETNAME = "DS1";
-    private static final int DIM_X    = 32;
-    private static final int DIM_Y    = 64;
-    private static final int CHUNK_X  = 4;
-    private static final int CHUNK_Y  = 8;
-    private static final int RANK     = 2;
-    private static final int NDIMS    = 2;
-
-    // Values for the status of space allocation
-    enum H5Z_filter {
-        H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR),
-        H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE),
-        H5Z_FILTER_DEFLATE(HDF5Constants.H5Z_FILTER_DEFLATE),
-        H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE),
-        H5Z_FILTER_FLETCHER32(HDF5Constants.H5Z_FILTER_FLETCHER32),
-        H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP),
-        H5Z_FILTER_NBIT(HDF5Constants.H5Z_FILTER_NBIT),
-        H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET),
-        H5Z_FILTER_RESERVED(HDF5Constants.H5Z_FILTER_RESERVED),
-        H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
-        private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
-
-        static
-        {
-            for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
-                lookup.put(s.getCode(), s);
-        }
-
-        private int code;
-
-        H5Z_filter(int layout_type) { this.code = layout_type; }
-
-        public int getCode() { return this.code; }
-
-        public static H5Z_filter get(int code) { return lookup.get(code); }
-    }
-
-    private static boolean checkNbitFilter()
-    {
-        try {
-            // Check if N-Bit compression is available and can be used for both compression and decompression.
-            int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_NBIT);
-            if (available == 0) {
-                System.out.println("N-Bit filter not available.");
-                return false;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_NBIT);
-            if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
-                ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
-                System.out.println("N-Bit filter not available for encoding and decoding.");
-                return false;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        return true;
-    }
-
-    private static void writeData() throws Exception
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long filespace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long dtype_id     = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-        long[] chunk_dims = {CHUNK_X, CHUNK_Y};
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Initialize data.
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                dset_data[indx][jndx] = indx * jndx - jndx;
-
-        try {
-            // Create a new file using the default properties.
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-
-            // Create dataspace. Setting maximum size to NULL sets the maximum
-            // size to be the current size.
-            filespace_id = H5.H5Screate_simple(RANK, dims, null);
-
-            // Create the datatype to use with the N-Bit filter. It has an uncompressed size of 32 bits,
-            // but will have a size of 16 bits after being packed by the N-Bit filter.
-            dtype_id = H5.H5Tcopy(HDF5Constants.H5T_STD_I32LE);
-            H5.H5Tset_precision(dtype_id, 16);
-            H5.H5Tset_offset(dtype_id, 5);
-
-            // Create the dataset creation property list, add the N-Bit filter and set the chunk size.
-            dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
-            H5.H5Pset_nbit(dcpl_id);
-            H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
-
-            // Create the dataset.
-            dataset_id = H5.H5Dcreate(file_id, DATASETNAME, dtype_id, filespace_id, HDF5Constants.H5P_DEFAULT,
-                                      dcpl_id, HDF5Constants.H5P_DEFAULT);
-
-            // Write the data to the dataset.
-            H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                        HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        finally {
-            // Close and release resources.
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-            if (dtype_id >= 0)
-                H5.H5Tclose(dtype_id);
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-            if (filespace_id >= 0)
-                H5.H5Sclose(filespace_id);
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-    }
-
-    private static void readData() throws Exception
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve the dataset creation property list.
-        try {
-            if (dataset_id >= 0)
-                dcpl_id = H5.H5Dget_create_plist(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve and print the filter type. Here we only retrieve the
-        // first filter because we know that we only added one filter.
-        try {
-            if (dcpl_id >= 0) {
-                // Java lib requires a valid filter_name object and cd_values
-                int[] flags          = {0};
-                long[] cd_nelmts     = {1};
-                int[] cd_values      = {0};
-                String[] filter_name = {""};
-                int[] filter_config  = {0};
-                int filter_type      = -1;
-                filter_type = H5.H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name,
-                                               filter_config);
-                System.out.print("Filter type is: ");
-                switch (H5Z_filter.get(filter_type)) {
-                case H5Z_FILTER_DEFLATE:
-                    System.out.println("H5Z_FILTER_DEFLATE");
-                    break;
-                case H5Z_FILTER_SHUFFLE:
-                    System.out.println("H5Z_FILTER_SHUFFLE");
-                    break;
-                case H5Z_FILTER_FLETCHER32:
-                    System.out.println("H5Z_FILTER_FLETCHER32");
-                    break;
-                case H5Z_FILTER_SZIP:
-                    System.out.println("H5Z_FILTER_SZIP");
-                    break;
-                case H5Z_FILTER_NBIT:
-                    System.out.println("H5Z_FILTER_NBIT");
-                    break;
-                case H5Z_FILTER_SCALEOFFSET:
-                    System.out.println("H5Z_FILTER_SCALEOFFSET");
-                    break;
-                default:
-                    System.out.println("H5Z_FILTER_ERROR");
-                }
-                System.out.println();
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Read the data using the default properties.
-        try {
-            if (dataset_id >= 0) {
-                int status = H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                                        HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-                // Check if the read was successful.
-                if (status < 0)
-                    System.out.print("Dataset read failed!");
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Find the maximum value in the dataset, to verify that it was read
-        // correctly.
-        int max = dset_data[0][0];
-        for (int indx = 0; indx < DIM_X; indx++) {
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                if (max < dset_data[indx][jndx])
-                    max = dset_data[indx][jndx];
-        }
-        // Print the maximum value.
-        System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        /*
-         * Check if N-Bit compression is available and can be used for both compression and decompression.
-         * Normally we do not perform error checking in these examples for the sake of clarity, but in this
-         * case we will make an exception because this filter is an optional part of the hdf5 library.
-         */
-        try {
-            if (H5Ex_D_Nbit.checkNbitFilter()) {
-                H5Ex_D_Nbit.writeData();
-                H5Ex_D_Nbit.readData();
-            }
-        }
-        catch (Exception ex) {
-            ex.printStackTrace();
-        }
-    }
-}
diff --git a/java/examples/datasets/H5Ex_D_ReadWrite.java b/java/examples/datasets/H5Ex_D_ReadWrite.java
deleted file mode 100644
index 4b26a2c7248..00000000000
--- a/java/examples/datasets/H5Ex_D_ReadWrite.java
+++ /dev/null
@@ -1,179 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-
-  This example shows how to read and write data to a
-  dataset.  The program first writes integers to a dataset
-  with dataspace dimensions of DIM_XxDIM_Y, then closes the
-  file.  Next, it reopens the file, reads back the data, and
-  outputs it to the screen.
- ************************************************************/
-package examples.datasets;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_D_ReadWrite {
-    private static String FILENAME    = "H5Ex_D_ReadWrite.h5";
-    private static String DATASETNAME = "DS1";
-    private static final int DIM_X    = 4;
-    private static final int DIM_Y    = 7;
-    private static final int RANK     = 2;
-
-    private static void WriteDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long filespace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Initialize data.
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                dset_data[indx][jndx] = indx * jndx - jndx;
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            filespace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset. We will use all default properties for this example.
-        try {
-            if ((file_id >= 0) && (filespace_id >= 0))
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
-                                          HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                          HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the data to the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (filespace_id >= 0)
-                H5.H5Sclose(filespace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void ReadDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Open file using the default properties.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open dataset using the default properties.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Read the data using the default properties.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        System.out.println(DATASETNAME + ":");
-        for (int indx = 0; indx < DIM_X; indx++) {
-            System.out.print(" [ ");
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                System.out.print(dset_data[indx][jndx] + " ");
-            System.out.println("]");
-        }
-        System.out.println();
-
-        // Close the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_D_ReadWrite.WriteDataset();
-        H5Ex_D_ReadWrite.ReadDataset();
-    }
-}
diff --git a/java/examples/datasets/H5Ex_D_Shuffle.java b/java/examples/datasets/H5Ex_D_Shuffle.java
deleted file mode 100644
index 3d80f7d4609..00000000000
--- a/java/examples/datasets/H5Ex_D_Shuffle.java
+++ /dev/null
@@ -1,374 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write data to a dataset
-  using the shuffle filter with gzip compression.  The
-  program first checks if the shuffle and gzip filters are
-  available, then if they are it writes integers to a
-  dataset using shuffle+gzip, then closes the file.  Next,
-  it reopens the file, reads back the data, and outputs the
-  types of filters and the maximum value in the dataset to
-  the screen.
- ************************************************************/
-package examples.datasets;
-
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_D_Shuffle {
-    private static String FILENAME    = "H5Ex_D_Shuffle.h5";
-    private static String DATASETNAME = "DS1";
-    private static final int DIM_X    = 32;
-    private static final int DIM_Y    = 64;
-    private static final int CHUNK_X  = 4;
-    private static final int CHUNK_Y  = 8;
-    private static final int RANK     = 2;
-    private static final int NDIMS    = 2;
-
-    // Values for the status of space allocation
-    enum H5Z_filter {
-        H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR),
-        H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE),
-        H5Z_FILTER_DEFLATE(HDF5Constants.H5Z_FILTER_DEFLATE),
-        H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE),
-        H5Z_FILTER_FLETCHER32(HDF5Constants.H5Z_FILTER_FLETCHER32),
-        H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP),
-        H5Z_FILTER_NBIT(HDF5Constants.H5Z_FILTER_NBIT),
-        H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET),
-        H5Z_FILTER_RESERVED(HDF5Constants.H5Z_FILTER_RESERVED),
-        H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
-        private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
-
-        static
-        {
-            for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
-                lookup.put(s.getCode(), s);
-        }
-
-        private int code;
-
-        H5Z_filter(int layout_type) { this.code = layout_type; }
-
-        public int getCode() { return this.code; }
-
-        public static H5Z_filter get(int code) { return lookup.get(code); }
-    }
-
-    private static boolean checkGzipFilter()
-    {
-        try {
-            int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_DEFLATE);
-            if (available == 0) {
-                System.out.println("gzip filter not available.");
-                return false;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_DEFLATE);
-            if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
-                ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
-                System.out.println("gzip filter not available for encoding and decoding.");
-                return false;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        return true;
-    }
-
-    private static boolean checkShuffleFilter()
-    {
-        try {
-            int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SHUFFLE);
-            if (available == 0) {
-                System.out.println("Shuffle filter not available.");
-                return false;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SHUFFLE);
-            if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
-                ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
-                System.out.println("Shuffle filter not available for encoding and decoding.");
-                return false;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        return true;
-    }
-
-    private static void writeShuffle()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long filespace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-        long[] chunk_dims = {CHUNK_X, CHUNK_Y};
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Initialize data.
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                dset_data[indx][jndx] = indx * jndx - jndx;
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            filespace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset creation property list, add the shuffle
-        // filter and the gzip compression filter.
-        // The order in which the filters are added here is significant -
-        // we will see much greater results when the shuffle is applied
-        // first. The order in which the filters are added to the property
-        // list is the order in which they will be invoked when writing
-        // data.
-        try {
-            dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
-            if (dcpl_id >= 0) {
-                H5.H5Pset_shuffle(dcpl_id);
-                H5.H5Pset_deflate(dcpl_id, 9);
-                // Set the chunk size.
-                H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset.
-        try {
-            if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
-                                          HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the data to the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (filespace_id >= 0)
-                H5.H5Sclose(filespace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void readShuffle()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve the dataset creation property list.
-        try {
-            if (dataset_id >= 0)
-                dcpl_id = H5.H5Dget_create_plist(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve the number of filters, and retrieve and print the
-        // type of each.
-        try {
-            if (dcpl_id >= 0) {
-                int nfilters = H5.H5Pget_nfilters(dcpl_id);
-                for (int indx = 0; indx < nfilters; indx++) {
-                    // Java lib requires a valid filter_name object and cd_values
-                    int[] flags          = {0};
-                    long[] cd_nelmts     = {1};
-                    int[] cd_values      = {0};
-                    String[] filter_name = {""};
-                    int[] filter_config  = {0};
-                    int filter_type      = -1;
-                    filter_type          = H5.H5Pget_filter(dcpl_id, indx, flags, cd_nelmts, cd_values, 120,
-                                                            filter_name, filter_config);
-                    System.out.print("Filter " + indx + ": Type is: ");
-                    switch (H5Z_filter.get(filter_type)) {
-                    case H5Z_FILTER_DEFLATE:
-                        System.out.println("H5Z_FILTER_DEFLATE");
-                        break;
-                    case H5Z_FILTER_SHUFFLE:
-                        System.out.println("H5Z_FILTER_SHUFFLE");
-                        break;
-                    case H5Z_FILTER_FLETCHER32:
-                        System.out.println("H5Z_FILTER_FLETCHER32");
-                        break;
-                    case H5Z_FILTER_SZIP:
-                        System.out.println("H5Z_FILTER_SZIP");
-                        break;
-                    case H5Z_FILTER_NBIT:
-                        System.out.println("H5Z_FILTER_NBIT");
-                        break;
-                    case H5Z_FILTER_SCALEOFFSET:
-                        System.out.println("H5Z_FILTER_SCALEOFFSET");
-                        break;
-                    default:
-                        System.out.println("H5Z_FILTER_ERROR");
-                    }
-                    System.out.println();
-                }
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Read the data using the default properties.
-        try {
-            if (dataset_id >= 0) {
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Find the maximum value in the dataset, to verify that it was read
-        // correctly.
-        int max = dset_data[0][0];
-        for (int indx = 0; indx < DIM_X; indx++) {
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                if (max < dset_data[indx][jndx])
-                    max = dset_data[indx][jndx];
-        }
-        // Print the maximum value.
-        System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        // Check if gzip compression is available and can be used for both
-        // compression and decompression. Normally we do not perform error
-        // checking in these examples for the sake of clarity, but in this
-        // case we will make an exception because this filter is an
-        // optional part of the hdf5 library.
-        // Similarly, check for availability of the shuffle filter.
-        if (H5Ex_D_Shuffle.checkGzipFilter() && H5Ex_D_Shuffle.checkShuffleFilter()) {
-            H5Ex_D_Shuffle.writeShuffle();
-            H5Ex_D_Shuffle.readShuffle();
-        }
-    }
-}
diff --git a/java/examples/datasets/H5Ex_D_Sofloat.java b/java/examples/datasets/H5Ex_D_Sofloat.java
deleted file mode 100644
index a5f5cd8076e..00000000000
--- a/java/examples/datasets/H5Ex_D_Sofloat.java
+++ /dev/null
@@ -1,362 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write data to a dataset
-  using the Scale-Offset filter.  The program first checks
-  if the Scale-Offset filter is available, then if it is it
-  writes floating point numbers to a dataset using
-  Scale-Offset, then closes the file Next, it reopens the
-  file, reads back the data, and outputs the type of filter
-  and the maximum value in the dataset to the screen.
- ************************************************************/
-package examples.datasets;
-
-import java.text.DecimalFormat;
-import java.text.DecimalFormatSymbols;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Locale;
-import java.util.Map;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_D_Sofloat {
-
-    private static String FILENAME    = "H5Ex_D_Sofloat.h5";
-    private static String DATASETNAME = "DS1";
-    private static final int DIM_X    = 32;
-    private static final int DIM_Y    = 64;
-    private static final int CHUNK_X  = 4;
-    private static final int CHUNK_Y  = 8;
-    private static final int RANK     = 2;
-    private static final int NDIMS    = 2;
-
-    // Values for the status of space allocation
-    enum H5Z_filter {
-        H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR),
-        H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE),
-        H5Z_FILTER_DEFLATE(HDF5Constants.H5Z_FILTER_DEFLATE),
-        H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE),
-        H5Z_FILTER_FLETCHER32(HDF5Constants.H5Z_FILTER_FLETCHER32),
-        H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP),
-        H5Z_FILTER_NBIT(HDF5Constants.H5Z_FILTER_NBIT),
-        H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET),
-        H5Z_FILTER_RESERVED(HDF5Constants.H5Z_FILTER_RESERVED),
-        H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
-        private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
-
-        static
-        {
-            for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
-                lookup.put(s.getCode(), s);
-        }
-
-        private int code;
-
-        H5Z_filter(int layout_type) { this.code = layout_type; }
-
-        public int getCode() { return this.code; }
-
-        public static H5Z_filter get(int code) { return lookup.get(code); }
-    }
-
-    private static boolean checkScaleoffsetFilter()
-    {
-        try {
-            int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
-            if (available == 0) {
-                System.out.println("Scale-Offset filter not available.");
-                return false;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
-            if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
-                ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
-                System.out.println("Scale-Offset filter not available for encoding and decoding.");
-                return false;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        return true;
-    }
-
-    private static void writeData()
-    {
-        long file_id         = HDF5Constants.H5I_INVALID_HID;
-        long filespace_id    = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id      = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id         = HDF5Constants.H5I_INVALID_HID;
-        long[] dims          = {DIM_X, DIM_Y};
-        long[] chunk_dims    = {CHUNK_X, CHUNK_Y};
-        double[][] dset_data = new double[DIM_X][DIM_Y];
-
-        // Initialize data.
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++) {
-                double x              = indx;
-                double y              = jndx;
-                dset_data[indx][jndx] = (x + 1) / (y + 0.3) + y;
-            }
-
-        // Find the maximum value in the dataset, to verify that it was read correctly.
-        double max = dset_data[0][0];
-        double min = dset_data[0][0];
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++) {
-                if (max < dset_data[indx][jndx])
-                    max = dset_data[indx][jndx];
-                if (min > dset_data[indx][jndx])
-                    min = dset_data[indx][jndx];
-            }
-
-        // Print the maximum value.
-        DecimalFormat df = new DecimalFormat("#,##0.000000", new DecimalFormatSymbols(Locale.US));
-        System.out.println("Maximum value in write buffer is: " + df.format(max));
-        System.out.println("Minimum value in write buffer is: " + df.format(min));
-
-        // Create a new file using the default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum size to be the current size.
-        try {
-            filespace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset creation property list, add the Scale-Offset
-        // filter and set the chunk size.
-        try {
-            dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
-            if (dcpl_id >= 0) {
-                H5.H5Pset_scaleoffset(dcpl_id, HDF5Constants.H5Z_SO_FLOAT_DSCALE, 2);
-                H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset.
-        try {
-            if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_IEEE_F64LE, filespace_id,
-                                          HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the data to the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close and release resources.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (filespace_id >= 0)
-                H5.H5Sclose(filespace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close file
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void readData()
-    {
-        long file_id         = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id      = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id         = HDF5Constants.H5I_INVALID_HID;
-        double[][] dset_data = new double[DIM_X][DIM_Y];
-
-        // Open file using the default properties.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        // Open dataset using the default properties.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve dataset creation property list.
-        try {
-            if (dataset_id >= 0)
-                dcpl_id = H5.H5Dget_create_plist(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve and print the filter type. Here we only retrieve the
-        // first filter because we know that we only added one filter.
-        try {
-            if (dcpl_id >= 0) {
-                // Java lib requires a valid filter_name object and cd_values
-                int[] flags          = {0};
-                long[] cd_nelmts     = {1};
-                int[] cd_values      = {0};
-                String[] filter_name = {""};
-                int[] filter_config  = {0};
-                int filter_type      = -1;
-
-                filter_type = H5.H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name,
-                                               filter_config);
-                System.out.print("Filter type is: ");
-                switch (H5Z_filter.get(filter_type)) {
-                case H5Z_FILTER_DEFLATE:
-                    System.out.println("H5Z_FILTER_DEFLATE");
-                    break;
-                case H5Z_FILTER_SHUFFLE:
-                    System.out.println("H5Z_FILTER_SHUFFLE");
-                    break;
-                case H5Z_FILTER_FLETCHER32:
-                    System.out.println("H5Z_FILTER_FLETCHER32");
-                    break;
-                case H5Z_FILTER_SZIP:
-                    System.out.println("H5Z_FILTER_SZIP");
-                    break;
-                case H5Z_FILTER_NBIT:
-                    System.out.println("H5Z_FILTER_NBIT");
-                    break;
-                case H5Z_FILTER_SCALEOFFSET:
-                    System.out.println("H5Z_FILTER_SCALEOFFSET");
-                    break;
-                default:
-                    System.out.println("H5Z_FILTER_ERROR");
-                }
-                System.out.println();
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Read the data using the default properties.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Find the maximum value in the dataset, to verify that it was read correctly.
-        double max = dset_data[0][0];
-        double min = dset_data[0][0];
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++) {
-                if (max < dset_data[indx][jndx])
-                    max = dset_data[indx][jndx];
-                if (min > dset_data[indx][jndx])
-                    min = dset_data[indx][jndx];
-            }
-
-        // Print the maximum value.
-        DecimalFormat df = new DecimalFormat("#,##0.000000", new DecimalFormatSymbols(Locale.US));
-        System.out.println("Maximum value in " + DATASETNAME + " is: " + df.format(max));
-        System.out.println("Minimum value in " + DATASETNAME + " is: " + df.format(min));
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-
-        // Check if Scale-Offset compression is available and can be used
-        // for both compression and decompression. Normally we do not
-        // perform error checking in these examples for the sake of
-        // clarity, but in this case we will make an exception because this
-        // filter is an optional part of the hdf5 library.
-        if (H5Ex_D_Sofloat.checkScaleoffsetFilter()) {
-            H5Ex_D_Sofloat.writeData();
-            H5Ex_D_Sofloat.readData();
-        }
-    }
-}
diff --git a/java/examples/datasets/H5Ex_D_Soint.java b/java/examples/datasets/H5Ex_D_Soint.java
deleted file mode 100644
index 3eb8e373b2d..00000000000
--- a/java/examples/datasets/H5Ex_D_Soint.java
+++ /dev/null
@@ -1,336 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-      This example shows how to read and write data to a dataset
-      using the Scale-Offset filter.  The program first checks
-      if the Scale-Offset filter is available, then if it is it
-      writes integers to a dataset using Scale-Offset, then
-      closes the file Next, it reopens the file, reads back the
-      data, and outputs the type of filter and the maximum value
-      in the dataset to the screen.
- ************************************************************/
-package examples.datasets;
-
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_D_Soint {
-
-    private static String FILENAME    = "H5Ex_D_Soint.h5";
-    private static String DATASETNAME = "DS1";
-    private static final int DIM_X    = 32;
-    private static final int DIM_Y    = 64;
-    private static final int CHUNK_X  = 4;
-    private static final int CHUNK_Y  = 8;
-    private static final int RANK     = 2;
-    private static final int NDIMS    = 2;
-
-    // Values for the status of space allocation
-    enum H5Z_filter {
-        H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR),
-        H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE),
-        H5Z_FILTER_DEFLATE(HDF5Constants.H5Z_FILTER_DEFLATE),
-        H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE),
-        H5Z_FILTER_FLETCHER32(HDF5Constants.H5Z_FILTER_FLETCHER32),
-        H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP),
-        H5Z_FILTER_NBIT(HDF5Constants.H5Z_FILTER_NBIT),
-        H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET),
-        H5Z_FILTER_RESERVED(HDF5Constants.H5Z_FILTER_RESERVED),
-        H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
-        private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
-
-        static
-        {
-            for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
-                lookup.put(s.getCode(), s);
-        }
-
-        private int code;
-
-        H5Z_filter(int layout_type) { this.code = layout_type; }
-
-        public int getCode() { return this.code; }
-
-        public static H5Z_filter get(int code) { return lookup.get(code); }
-    }
-
-    private static boolean checkScaleoffsetFilter()
-    {
-        try {
-            int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
-            if (available == 0) {
-                System.out.println("Scale-Offset filter not available.");
-                return false;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
-            if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
-                ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
-                System.out.println("Scale-Offset filter not available for encoding and decoding.");
-                return false;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        return true;
-    }
-
-    private static void writeData()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long filespace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-        long[] chunk_dims = {CHUNK_X, CHUNK_Y};
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Initialize data.
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                dset_data[indx][jndx] = indx * jndx - jndx;
-
-        // Create a new file using the default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum size to be the current size.
-        try {
-            filespace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset creation property list, add the Scale-Offset
-        // filter and set the chunk size.
-        try {
-            dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
-            if (dcpl_id >= 0) {
-                H5.H5Pset_scaleoffset(dcpl_id, HDF5Constants.H5Z_SO_INT,
-                                      HDF5Constants.H5Z_SO_INT_MINBITS_DEFAULT);
-                H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset.
-        try {
-            if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
-                                          HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the data to the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close and release resources.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (filespace_id >= 0)
-                H5.H5Sclose(filespace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close file
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void readData()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Open file using the default properties.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        // Open dataset using the default properties.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve dataset creation property list.
-        try {
-            if (dataset_id >= 0)
-                dcpl_id = H5.H5Dget_create_plist(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve and print the filter type. Here we only retrieve the
-        // first filter because we know that we only added one filter.
-        try {
-            if (dcpl_id >= 0) {
-                // Java lib requires a valid filter_name object and cd_values
-                int[] flags          = {0};
-                long[] cd_nelmts     = {1};
-                int[] cd_values      = {0};
-                String[] filter_name = {""};
-                int[] filter_config  = {0};
-                int filter_type      = -1;
-
-                filter_type = H5.H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name,
-                                               filter_config);
-                System.out.print("Filter type is: ");
-                switch (H5Z_filter.get(filter_type)) {
-                case H5Z_FILTER_DEFLATE:
-                    System.out.println("H5Z_FILTER_DEFLATE");
-                    break;
-                case H5Z_FILTER_SHUFFLE:
-                    System.out.println("H5Z_FILTER_SHUFFLE");
-                    break;
-                case H5Z_FILTER_FLETCHER32:
-                    System.out.println("H5Z_FILTER_FLETCHER32");
-                    break;
-                case H5Z_FILTER_SZIP:
-                    System.out.println("H5Z_FILTER_SZIP");
-                    break;
-                case H5Z_FILTER_NBIT:
-                    System.out.println("H5Z_FILTER_NBIT");
-                    break;
-                case H5Z_FILTER_SCALEOFFSET:
-                    System.out.println("H5Z_FILTER_SCALEOFFSET");
-                    break;
-                default:
-                    System.out.println("H5Z_FILTER_ERROR");
-                }
-                System.out.println();
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Read the data using the default properties.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Find the maximum value in the dataset, to verify that it was read correctly.
-        int max = dset_data[0][0];
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++) {
-                if (max < dset_data[indx][jndx])
-                    max = dset_data[indx][jndx];
-            }
-
-        // Print the maximum value.
-        System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-
-        // Check if Scale-Offset compression is available and can be used
-        // for both compression and decompression. Normally we do not
-        // perform error checking in these examples for the sake of
-        // clarity, but in this case we will make an exception because this
-        // filter is an optional part of the hdf5 library.
-        if (H5Ex_D_Soint.checkScaleoffsetFilter()) {
-            H5Ex_D_Soint.writeData();
-            H5Ex_D_Soint.readData();
-        }
-    }
-}
diff --git a/java/examples/datasets/H5Ex_D_Szip.java b/java/examples/datasets/H5Ex_D_Szip.java
deleted file mode 100644
index 0426a879106..00000000000
--- a/java/examples/datasets/H5Ex_D_Szip.java
+++ /dev/null
@@ -1,337 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write data to a dataset
-  using szip compression.    The program first checks if
-  szip compression is available, then if it is it writes
-  integers to a dataset using szip, then closes the file.
-  Next, it reopens the file, reads back the data, and
-  outputs the type of compression and the maximum value in
-  the dataset to the screen.
- ************************************************************/
-package examples.datasets;
-
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_D_Szip {
-    private static String FILENAME    = "H5Ex_D_Szip.h5";
-    private static String DATASETNAME = "DS1";
-    private static final int DIM_X    = 32;
-    private static final int DIM_Y    = 64;
-    private static final int CHUNK_X  = 4;
-    private static final int CHUNK_Y  = 8;
-    private static final int RANK     = 2;
-    private static final int NDIMS    = 2;
-
-    // Values for the status of space allocation
-    enum H5Z_filter {
-        H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR),
-        H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE),
-        H5Z_FILTER_DEFLATE(HDF5Constants.H5Z_FILTER_DEFLATE),
-        H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE),
-        H5Z_FILTER_FLETCHER32(HDF5Constants.H5Z_FILTER_FLETCHER32),
-        H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP),
-        H5Z_FILTER_NBIT(HDF5Constants.H5Z_FILTER_NBIT),
-        H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET),
-        H5Z_FILTER_RESERVED(HDF5Constants.H5Z_FILTER_RESERVED),
-        H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
-        private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
-
-        static
-        {
-            for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
-                lookup.put(s.getCode(), s);
-        }
-
-        private int code;
-
-        H5Z_filter(int layout_type) { this.code = layout_type; }
-
-        public int getCode() { return this.code; }
-
-        public static H5Z_filter get(int code) { return lookup.get(code); }
-    }
-
-    private static boolean checkSzipFilter()
-    {
-        try {
-            int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SZIP);
-            if (available == 0) {
-                System.out.println("szip filter not available.");
-                return false;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SZIP);
-            if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
-                ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
-                System.out.println("szip filter not available for encoding and decoding.");
-                return false;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        return true;
-    }
-
-    private static void writeSzip()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long filespace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-        long[] chunk_dims = {CHUNK_X, CHUNK_Y};
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Initialize data.
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                dset_data[indx][jndx] = indx * jndx - jndx;
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            filespace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset creation property list, add the szip compression
-        // filter.
-        try {
-            dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
-            if (dcpl_id >= 0) {
-                H5.H5Pset_szip(dcpl_id, HDF5Constants.H5_SZIP_NN_OPTION_MASK, 8);
-                // Set the chunk size.
-                H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset.
-        try {
-            if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
-                                          HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the data to the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (filespace_id >= 0)
-                H5.H5Sclose(filespace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void readSzip()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve the dataset creation property list.
-        try {
-            if (dataset_id >= 0)
-                dcpl_id = H5.H5Dget_create_plist(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve and print the filter type. Here we only retrieve the
-        // first filter because we know that we only added one filter.
-        try {
-            if (dcpl_id >= 0) {
-                // Java lib requires a valid filter_name object and cd_values
-                int[] flags          = {0};
-                long[] cd_nelmts     = {1};
-                int[] cd_values      = {0};
-                String[] filter_name = {""};
-                int[] filter_config  = {0};
-                int filter_type      = -1;
-
-                filter_type = H5.H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name,
-                                               filter_config);
-                System.out.print("Filter type is: ");
-                switch (H5Z_filter.get(filter_type)) {
-                case H5Z_FILTER_DEFLATE:
-                    System.out.println("H5Z_FILTER_DEFLATE");
-                    break;
-                case H5Z_FILTER_SHUFFLE:
-                    System.out.println("H5Z_FILTER_SHUFFLE");
-                    break;
-                case H5Z_FILTER_FLETCHER32:
-                    System.out.println("H5Z_FILTER_FLETCHER32");
-                    break;
-                case H5Z_FILTER_SZIP:
-                    System.out.println("H5Z_FILTER_SZIP");
-                    break;
-                case H5Z_FILTER_NBIT:
-                    System.out.println("H5Z_FILTER_NBIT");
-                    break;
-                case H5Z_FILTER_SCALEOFFSET:
-                    System.out.println("H5Z_FILTER_SCALEOFFSET");
-                    break;
-                default:
-                    System.out.println("H5Z_FILTER_ERROR");
-                }
-                System.out.println();
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Read the data using the default properties.
-        try {
-            if (dataset_id >= 0) {
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Find the maximum value in the dataset, to verify that it was read
-        // correctly.
-        int max = dset_data[0][0];
-        for (int indx = 0; indx < DIM_X; indx++) {
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                if (max < dset_data[indx][jndx])
-                    max = dset_data[indx][jndx];
-        }
-        // Print the maximum value.
-        System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        // Check if gzip compression is available and can be used for both
-        // compression and decompression. Normally we do not perform error
-        // checking in these examples for the sake of clarity, but in this
-        // case we will make an exception because this filter is an
-        // optional part of the hdf5 library.
-        if (H5Ex_D_Szip.checkSzipFilter()) {
-            H5Ex_D_Szip.writeSzip();
-            H5Ex_D_Szip.readSzip();
-        }
-    }
-}
diff --git a/java/examples/datasets/H5Ex_D_Transform.java b/java/examples/datasets/H5Ex_D_Transform.java
deleted file mode 100644
index 16ab423b8cb..00000000000
--- a/java/examples/datasets/H5Ex_D_Transform.java
+++ /dev/null
@@ -1,250 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-      This example shows how to read and write data to a dataset
-      using a data transform expression.  The program first
-      writes integers to a dataset using the transform
-      expression TRANSFORM, then closes the file.  Next, it
-      reopens the file, reads back the data without a transform,
-      and outputs the data to the screen.  Finally it reads the
-      data using the transform expression RTRANSFORM and outputs
-      the results to the screen.
- ************************************************************/
-package examples.datasets;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_D_Transform {
-
-    private static String FILE       = "H5Ex_D_Transform.h5";
-    private static String DATASET    = "DS1";
-    private static final int DIM_X   = 4;
-    private static final int DIM_Y   = 7;
-    private static String TRANSFORM  = "x+1";
-    private static String RTRANSFORM = "x-1";
-
-    private static void writeData()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long filespace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long dxpl_id      = HDF5Constants.H5I_INVALID_HID;
-
-        long[] dims       = {DIM_X, DIM_Y};
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Initialize data.
-        for (int i = 0; i < DIM_X; i++)
-            for (int j = 0; j < DIM_Y; j++)
-                dset_data[i][j] = i * j - j;
-
-        // Output the data to the screen.
-        System.out.println("Original Data:");
-        for (int i = 0; i < DIM_X; i++) {
-            System.out.print(" [");
-            for (int j = 0; j < DIM_Y; j++)
-                System.out.print(" " + dset_data[i][j] + " ");
-            System.out.println("]");
-        }
-
-        // Create a new file using the default properties.
-        try {
-            file_id = H5.H5Fcreate(FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            filespace_id = H5.H5Screate_simple(2, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset transfer property list and define the transform expression.
-        try {
-            dxpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
-            if (dxpl_id >= 0)
-                H5.H5Pset_data_transform(dxpl_id, TRANSFORM);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset using the default properties. Unfortunately we must save as
-        // a native type or the transform operation will fail.
-        try {
-            if ((file_id >= 0) && (filespace_id >= 0))
-                dataset_id = H5.H5Dcreate(file_id, DATASET, HDF5Constants.H5T_NATIVE_INT, filespace_id,
-                                          HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                          HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the data to the dataset using the dataset transfer property list.
-        try {
-            if ((dataset_id >= 0) && (dxpl_id >= 0))
-                H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5S_ALL, dxpl_id, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dxpl_id >= 0)
-                H5.H5Pclose(dxpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (filespace_id >= 0)
-                H5.H5Sclose(filespace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void readData()
-    {
-
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long dxpl_id      = HDF5Constants.H5I_INVALID_HID;
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Open an existing file using the default properties.
-        try {
-            file_id = H5.H5Fopen(FILE, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset using the default properties.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASET, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Read the data using the default properties.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        System.out.println("Data as written with transform '" + TRANSFORM + "'");
-        for (int i = 0; i < DIM_X; i++) {
-            System.out.print(" [");
-            for (int j = 0; j < DIM_Y; j++)
-                System.out.print(" " + dset_data[i][j] + " ");
-            System.out.println("]");
-        }
-
-        // Create the dataset transfer property list and define the transform expression.
-        try {
-            dxpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
-            if (dxpl_id >= 0)
-                H5.H5Pset_data_transform(dxpl_id, RTRANSFORM);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Read the data using the dataset transfer property list.
-        try {
-            if ((dataset_id >= 0) && (dxpl_id >= 0))
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, dxpl_id, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-
-        System.out.println("Data as written with transform  '" + TRANSFORM + "' and read with transform  '" +
-                           RTRANSFORM + "'");
-        for (int i = 0; i < DIM_X; i++) {
-            System.out.print(" [");
-            for (int j = 0; j < DIM_Y; j++)
-                System.out.print(" " + dset_data[i][j] + " ");
-            System.out.println("]");
-        }
-
-        // Close and release resources.
-        try {
-            if (dxpl_id >= 0)
-                H5.H5Pclose(dxpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_D_Transform.writeData();
-        H5Ex_D_Transform.readData();
-    }
-}
diff --git a/java/examples/datasets/H5Ex_D_UnlimitedAdd.java b/java/examples/datasets/H5Ex_D_UnlimitedAdd.java
deleted file mode 100644
index 4154cf3b336..00000000000
--- a/java/examples/datasets/H5Ex_D_UnlimitedAdd.java
+++ /dev/null
@@ -1,393 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to create and extend an unlimited
-  dataset.  The program first writes integers to a dataset
-  with dataspace dimensions of DIM_XxDIM_Y, then closes the
-  file.  Next, it reopens the file, reads back the data,
-  outputs it to the screen, extends the dataset, and writes
-  new data to the extended portions of the dataset.  Finally
-  it reopens the file again, reads back the data, and
-  outputs it to the screen.
- ************************************************************/
-package examples.datasets;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_D_UnlimitedAdd {
-    private static String FILENAME    = "H5Ex_D_UnlimitedAdd.h5";
-    private static String DATASETNAME = "DS1";
-    private static final int DIM_X    = 4;
-    private static final int DIM_Y    = 7;
-    private static final int EDIM_X   = 6;
-    private static final int EDIM_Y   = 10;
-    private static final int CHUNK_X  = 4;
-    private static final int CHUNK_Y  = 4;
-    private static final int RANK     = 2;
-    private static final int NDIMS    = 2;
-
-    private static void writeUnlimited()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-        long[] chunk_dims = {CHUNK_X, CHUNK_Y};
-        long[] maxdims    = {HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED};
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Initialize the dataset.
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                dset_data[indx][jndx] = indx * jndx - jndx;
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace with unlimited dimensions.
-        try {
-            dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset creation property list.
-        try {
-            dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Set the chunk size.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the unlimited dataset.
-        try {
-            if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0))
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
-                                          HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the data to the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void extendUnlimited()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-        long[] extdims    = {EDIM_X, EDIM_Y};
-        long[] start      = {0, 0};
-        long[] count      = new long[2];
-        int[][] dset_data;
-        int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get dataspace and allocate memory for read buffer. This is a
-        // two dimensional dataset so the dynamic allocation must be done
-        // in steps.
-        try {
-            if (dataset_id >= 0)
-                dataspace_id = H5.H5Dget_space(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Allocate array of pointers to rows.
-        dset_data = new int[(int)dims[0]][(int)dims[1]];
-
-        // Read the data using the default properties.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        System.out.println("Dataset before extension:");
-        for (int indx = 0; indx < DIM_X; indx++) {
-            System.out.print(" [ ");
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                System.out.print(dset_data[indx][jndx] + " ");
-            System.out.println("]");
-        }
-        System.out.println();
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Extend the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dset_extent(dataset_id, extdims);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve the dataspace for the newly extended dataset.
-        try {
-            if (dataset_id >= 0)
-                dataspace_id = H5.H5Dget_space(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Initialize data for writing to the extended dataset.
-        for (int indx = 0; indx < EDIM_X; indx++)
-            for (int jndx = 0; jndx < EDIM_Y; jndx++)
-                extend_dset_data[indx][jndx] = jndx;
-
-        // Select the entire dataspace.
-        try {
-            if (dataspace_id >= 0) {
-                H5.H5Sselect_all(dataspace_id);
-
-                // Subtract a hyperslab reflecting the original dimensions from the
-                // selection. The selection now contains only the newly extended
-                // portions of the dataset.
-                count[0] = dims[0];
-                count[1] = dims[1];
-                H5.H5Sselect_hyperslab(dataspace_id, HDF5Constants.H5S_SELECT_NOTB, start, null, count, null);
-
-                // Write the data to the selected portion of the dataset.
-                if (dataset_id >= 0)
-                    H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, dataspace_id,
-                                HDF5Constants.H5P_DEFAULT, extend_dset_data);
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void readUnlimited()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-        int[][] dset_data;
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get dataspace and allocate memory for the read buffer as before.
-        try {
-            if (dataset_id >= 0)
-                dataspace_id = H5.H5Dget_space(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        // Allocate array of pointers to rows.
-        dset_data = new int[(int)dims[0]][(int)dims[1]];
-
-        // Read the data using the default properties.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        System.out.println("Dataset after extension:");
-        for (int indx = 0; indx < dims[0]; indx++) {
-            System.out.print(" [ ");
-            for (int jndx = 0; jndx < dims[1]; jndx++)
-                System.out.print(dset_data[indx][jndx] + " ");
-            System.out.println("]");
-        }
-        System.out.println();
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_D_UnlimitedAdd.writeUnlimited();
-        H5Ex_D_UnlimitedAdd.extendUnlimited();
-        H5Ex_D_UnlimitedAdd.readUnlimited();
-    }
-}
diff --git a/java/examples/datasets/H5Ex_D_UnlimitedGzip.java b/java/examples/datasets/H5Ex_D_UnlimitedGzip.java
deleted file mode 100644
index e084641ae5e..00000000000
--- a/java/examples/datasets/H5Ex_D_UnlimitedGzip.java
+++ /dev/null
@@ -1,505 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to create and extend an unlimited
-  dataset with gzip compression.  The program first writes
-  integers to a gzip compressed dataset with dataspace
-  dimensions of DIM_XxDIM_Y, then closes the file.  Next, it
-  reopens the file, reads back the data, outputs it to the
-  screen, extends the dataset, and writes new data to the
-  extended portions of the dataset.  Finally it reopens the
-  file again, reads back the data, and outputs it to the
-  screen.
- ************************************************************/
-package examples.datasets;
-
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_D_UnlimitedGzip {
-    private static String FILENAME    = "H5Ex_D_UnlimitedGzip.h5";
-    private static String DATASETNAME = "DS1";
-    private static final int DIM_X    = 4;
-    private static final int DIM_Y    = 7;
-    private static final int EDIM_X   = 6;
-    private static final int EDIM_Y   = 10;
-    private static final int CHUNK_X  = 4;
-    private static final int CHUNK_Y  = 4;
-    private static final int RANK     = 2;
-    private static final int NDIMS    = 2;
-
-    // Values for the status of space allocation
-    enum H5Z_filter {
-        H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR),
-        H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE),
-        H5Z_FILTER_DEFLATE(HDF5Constants.H5Z_FILTER_DEFLATE),
-        H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE),
-        H5Z_FILTER_FLETCHER32(HDF5Constants.H5Z_FILTER_FLETCHER32),
-        H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP),
-        H5Z_FILTER_NBIT(HDF5Constants.H5Z_FILTER_NBIT),
-        H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET),
-        H5Z_FILTER_RESERVED(HDF5Constants.H5Z_FILTER_RESERVED),
-        H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
-        private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
-
-        static
-        {
-            for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
-                lookup.put(s.getCode(), s);
-        }
-
-        private int code;
-
-        H5Z_filter(int layout_type) { this.code = layout_type; }
-
-        public int getCode() { return this.code; }
-
-        public static H5Z_filter get(int code) { return lookup.get(code); }
-    }
-
-    private static boolean checkGzipFilter()
-    {
-        try {
-            int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_DEFLATE);
-            if (available == 0) {
-                System.out.println("gzip filter not available.");
-                return false;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_DEFLATE);
-            if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
-                ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
-                System.out.println("gzip filter not available for encoding and decoding.");
-                return false;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        return true;
-    }
-
-    private static void writeUnlimited()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-        long[] chunk_dims = {CHUNK_X, CHUNK_Y};
-        long[] maxdims    = {HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED};
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Initialize the dataset.
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                dset_data[indx][jndx] = indx * jndx - jndx;
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace with unlimited dimensions.
-        try {
-            dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset creation property list, add the gzip compression
-        // filter.
-        try {
-            dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
-            if (dcpl_id >= 0) {
-                H5.H5Pset_deflate(dcpl_id, 9);
-                // Set the chunk size.
-                H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the unlimited dataset.
-        try {
-            if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0))
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
-                                          HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the data to the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void extendUnlimited()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-        long[] extdims    = {EDIM_X, EDIM_Y};
-        long[] start      = {0, 0};
-        long[] count      = new long[2];
-        int[][] dset_data;
-        int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get dataspace and allocate memory for read buffer. This is a
-        // two dimensional dataset so the dynamic allocation must be done
-        // in steps.
-        try {
-            if (dataset_id >= 0)
-                dataspace_id = H5.H5Dget_space(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Allocate array of pointers to rows.
-        dset_data = new int[(int)dims[0]][(int)dims[1]];
-
-        // Read the data using the default properties.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        System.out.println("Dataset before extension:");
-        for (int indx = 0; indx < DIM_X; indx++) {
-            System.out.print(" [ ");
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                System.out.print(dset_data[indx][jndx] + " ");
-            System.out.println("]");
-        }
-        System.out.println();
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Extend the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dset_extent(dataset_id, extdims);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve the dataspace for the newly extended dataset.
-        try {
-            if (dataset_id >= 0)
-                dataspace_id = H5.H5Dget_space(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Initialize data for writing to the extended dataset.
-        for (int indx = 0; indx < EDIM_X; indx++)
-            for (int jndx = 0; jndx < EDIM_Y; jndx++)
-                extend_dset_data[indx][jndx] = jndx;
-
-        // Select the entire dataspace.
-        try {
-            if (dataspace_id >= 0) {
-                H5.H5Sselect_all(dataspace_id);
-
-                // Subtract a hyperslab reflecting the original dimensions from the
-                // selection. The selection now contains only the newly extended
-                // portions of the dataset.
-                count[0] = dims[0];
-                count[1] = dims[1];
-                H5.H5Sselect_hyperslab(dataspace_id, HDF5Constants.H5S_SELECT_NOTB, start, null, count, null);
-
-                // Write the data to the selected portion of the dataset.
-                if (dataset_id >= 0)
-                    H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, dataspace_id,
-                                HDF5Constants.H5P_DEFAULT, extend_dset_data);
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void readUnlimited()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-        int[][] dset_data;
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve the dataset creation property list.
-        try {
-            if (dataset_id >= 0)
-                dcpl_id = H5.H5Dget_create_plist(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve and print the filter type. Here we only retrieve the
-        // first filter because we know that we only added one filter.
-        try {
-            if (dcpl_id >= 0) {
-                // Java lib requires a valid filter_name object and cd_values
-                int[] flags          = {0};
-                long[] cd_nelmts     = {1};
-                int[] cd_values      = {0};
-                String[] filter_name = {""};
-                int[] filter_config  = {0};
-                int filter_type      = -1;
-                filter_type = H5.H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name,
-                                               filter_config);
-                System.out.print("Filter type is: ");
-                switch (H5Z_filter.get(filter_type)) {
-                case H5Z_FILTER_DEFLATE:
-                    System.out.println("H5Z_FILTER_DEFLATE");
-                    break;
-                case H5Z_FILTER_SHUFFLE:
-                    System.out.println("H5Z_FILTER_SHUFFLE");
-                    break;
-                case H5Z_FILTER_FLETCHER32:
-                    System.out.println("H5Z_FILTER_FLETCHER32");
-                    break;
-                case H5Z_FILTER_SZIP:
-                    System.out.println("H5Z_FILTER_SZIP");
-                    break;
-                default:
-                    System.out.println("H5Z_FILTER_ERROR");
-                }
-                System.out.println();
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get dataspace and allocate memory for the read buffer as before.
-        try {
-            if (dataset_id >= 0)
-                dataspace_id = H5.H5Dget_space(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        // Allocate array of pointers to rows.
-        dset_data = new int[(int)dims[0]][(int)dims[1]];
-
-        // Read the data using the default properties.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        System.out.println("Dataset after extension:");
-        for (int indx = 0; indx < dims[0]; indx++) {
-            System.out.print(" [ ");
-            for (int jndx = 0; jndx < dims[1]; jndx++)
-                System.out.print(dset_data[indx][jndx] + " ");
-            System.out.println("]");
-        }
-        System.out.println();
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        // Check if gzip compression is available and can be used for both
-        // compression and decompression. Normally we do not perform error
-        // checking in these examples for the sake of clarity, but in this
-        // case we will make an exception because this filter is an
-        // optional part of the hdf5 library.
-        if (H5Ex_D_UnlimitedGzip.checkGzipFilter()) {
-            H5Ex_D_UnlimitedGzip.writeUnlimited();
-            H5Ex_D_UnlimitedGzip.extendUnlimited();
-            H5Ex_D_UnlimitedGzip.readUnlimited();
-        }
-    }
-}
diff --git a/java/examples/datasets/H5Ex_D_UnlimitedMod.java b/java/examples/datasets/H5Ex_D_UnlimitedMod.java
deleted file mode 100644
index ccabcdd135c..00000000000
--- a/java/examples/datasets/H5Ex_D_UnlimitedMod.java
+++ /dev/null
@@ -1,379 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to create and extend an unlimited
-  dataset.  The program first writes integers to a dataset
-  with dataspace dimensions of DIM_XxDIM_Y, then closes the
-  file.  Next, it reopens the file, reads back the data,
-  outputs it to the screen, extends the dataset, and writes
-  new data to the entire extended dataset.  Finally it
-  reopens the file again, reads back the data, and outputs it
-  to the screen.
- ************************************************************/
-package examples.datasets;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_D_UnlimitedMod {
-    private static String FILENAME    = "H5Ex_D_UnlimitedMod.h5";
-    private static String DATASETNAME = "DS1";
-    private static final int DIM_X    = 4;
-    private static final int DIM_Y    = 7;
-    private static final int EDIM_X   = 6;
-    private static final int EDIM_Y   = 10;
-    private static final int CHUNK_X  = 4;
-    private static final int CHUNK_Y  = 4;
-    private static final int RANK     = 2;
-    private static final int NDIMS    = 2;
-
-    private static void writeUnlimited()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dcpl_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-        long[] chunk_dims = {CHUNK_X, CHUNK_Y};
-        long[] maxdims    = {HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED};
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Initialize the dataset.
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                dset_data[indx][jndx] = indx * jndx - jndx;
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace with unlimited dimensions.
-        try {
-            dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset creation property list.
-        try {
-            dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Set the chunk size.
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the unlimited dataset.
-        try {
-            if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0))
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
-                                          HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the data to the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dcpl_id >= 0)
-                H5.H5Pclose(dcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void extendUnlimited()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-        long[] extdims    = {EDIM_X, EDIM_Y};
-        int[][] dset_data;
-        int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get dataspace and allocate memory for read buffer. This is a
-        // two dimensional dataset so the dynamic allocation must be done
-        // in steps.
-        try {
-            if (dataset_id >= 0)
-                dataspace_id = H5.H5Dget_space(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Allocate array of pointers to rows.
-        dset_data = new int[(int)dims[0]][(int)dims[1]];
-
-        // Read the data using the default properties.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        System.out.println("Dataset before extension:");
-        for (int indx = 0; indx < DIM_X; indx++) {
-            System.out.print(" [ ");
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                System.out.print(dset_data[indx][jndx] + " ");
-            System.out.println("]");
-        }
-        System.out.println();
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Extend the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dset_extent(dataset_id, extdims);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Retrieve the dataspace for the newly extended dataset.
-        try {
-            if (dataset_id >= 0)
-                dataspace_id = H5.H5Dget_space(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Initialize data for writing to the extended dataset.
-        for (int indx = 0; indx < EDIM_X; indx++)
-            for (int jndx = 0; jndx < EDIM_Y; jndx++)
-                extend_dset_data[indx][jndx] = jndx;
-
-        // Write the data tto the extended dataset.
-        try {
-            if ((dataspace_id >= 0) && (dataset_id >= 0))
-                H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, dataspace_id,
-                            HDF5Constants.H5P_DEFAULT, extend_dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void readUnlimited()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-        int[][] dset_data;
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get dataspace and allocate memory for the read buffer as before.
-        try {
-            if (dataset_id >= 0)
-                dataspace_id = H5.H5Dget_space(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        // Allocate array of pointers to rows.
-        dset_data = new int[(int)dims[0]][(int)dims[1]];
-
-        // Read the data using the default properties.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        System.out.println("Dataset after extension:");
-        for (int indx = 0; indx < dims[0]; indx++) {
-            System.out.print(" [ ");
-            for (int jndx = 0; jndx < dims[1]; jndx++)
-                System.out.print(dset_data[indx][jndx] + " ");
-            System.out.println("]");
-        }
-        System.out.println();
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_D_UnlimitedMod.writeUnlimited();
-        H5Ex_D_UnlimitedMod.extendUnlimited();
-        H5Ex_D_UnlimitedMod.readUnlimited();
-    }
-}
diff --git a/java/examples/datasets/JavaDatasetExample.sh.in b/java/examples/datasets/JavaDatasetExample.sh.in
deleted file mode 100644
index 96830763215..00000000000
--- a/java/examples/datasets/JavaDatasetExample.sh.in
+++ /dev/null
@@ -1,497 +0,0 @@
-#! /bin/sh
-#
-# Copyright by The HDF Group.
-# All rights reserved.
-#
-# This file is part of HDF5.  The full HDF5 copyright notice, including
-# terms governing use, modification, and redistribution, is contained in
-# the COPYING file, which can be found at the root of the source code
-# distribution tree, or in https://www.hdfgroup.org/licenses.
-# If you do not have access to either file, you may request a copy from
-# help@hdfgroup.org.
-#
-
-top_builddir=@top_builddir@
-top_srcdir=@top_srcdir@
-srcdir=@srcdir@
-IS_DARWIN="@H5_IS_DARWIN@"
-
-USE_FILTER_SZIP="@USE_FILTER_SZIP@"
-USE_FILTER_DEFLATE="@USE_FILTER_DEFLATE@"
-
-TESTNAME=EX_Datasets
-EXIT_SUCCESS=0
-EXIT_FAILURE=1
-
-# Set up default variable values if not supplied by the user.
-RM='rm -rf'
-CMP='cmp'
-DIFF='diff -c'
-CP='cp'
-DIRNAME='dirname'
-BASENAME='basename'
-LS='ls'
-AWK='awk'
-
-nerrors=0
-
-# where the libs exist
-HDFLIB_HOME="$top_srcdir/java/lib"
-BLDDIR="."
-BLDLIBDIR="$BLDDIR/testlibs"
-HDFTEST_HOME="$top_srcdir/java/examples/datasets"
-JARFILE=jar@PACKAGE_TARNAME@-@PACKAGE_VERSION@.jar
-TESTJARFILE=jar@PACKAGE_TARNAME@datasets.jar
-test -d $BLDLIBDIR || mkdir -p $BLDLIBDIR
-
-######################################################################
-# library files
-# --------------------------------------------------------------------
-# All the library files copy from source directory to test directory
-# NOTE: Keep this framework to add/remove test files.
-#       This list are also used for checking exist.
-#       Comment '#' without space can be used.
-# --------------------------------------------------------------------
-LIST_LIBRARY_FILES="
-$top_builddir/src/.libs/libhdf5.*
-$top_builddir/java/src/jni/.libs/libhdf5_java.*
-$top_builddir/java/src/$JARFILE
-"
-LIST_JAR_TESTFILES="
-$HDFLIB_HOME/slf4j-api-2.0.6.jar
-$HDFLIB_HOME/ext/slf4j-simple-2.0.6.jar
-"
-LIST_DATA_FILES="
-$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Alloc.txt
-$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Checksum.txt
-$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Chunk.txt
-$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Compact.txt
-$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_External.txt
-$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_FillValue.txt
-$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Gzip.txt
-$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Hyperslab.txt
-$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_ReadWrite.txt
-$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Shuffle.txt
-$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Szip.txt
-$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_UnlimitedAdd.txt
-$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_UnlimitedGzip.txt
-$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_UnlimitedMod.txt
-$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Nbit.txt
-$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Transform.txt
-$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Sofloat.txt
-$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Soint.txt
-"
-
-#
-# copy files from source dirs to test dir
-#
-COPY_LIBFILES="$LIST_LIBRARY_FILES"
-COPY_JARTESTFILES="$LIST_JAR_TESTFILES"
-
-COPY_LIBFILES_TO_BLDLIBDIR()
-{
-    # copy test files. Used -f to make sure get a new copy
-    for tstfile in $COPY_LIBFILES
-    do
-        # ignore '#' comment
-        echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
-        RET=$?
-        if [ $RET -eq 1 ]; then
-            # skip cp if srcdir is same as destdir
-            # this occurs when build/test performed in source dir and
-            # make cp fail
-            SDIR=`$DIRNAME $tstfile`
-            INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
-            INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
-            if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
-                $CP -fR $tstfile $BLDLIBDIR
-                if [ $? -ne 0 ]; then
-                    echo "Error: FAILED to copy $tstfile ."
-
-                    # Comment out this to CREATE expected file
-                    exit $EXIT_FAILURE
-                fi
-                BNAME=`$BASENAME $tstfile`
-                if [ "$BNAME" = "libhdf5_java.dylib" ]; then
-                    COPIED_LIBHDF5_JAVA=1
-                fi
-            fi
-        fi
-    done
-    if [[ "$IS_DARWIN" = "yes" ]] && [[ $COPIED_LIBHDF5_JAVA -eq 1 ]]; then
-       (cd $BLDLIBDIR; \
-         install_name_tool -add_rpath @loader_path libhdf5_java.dylib; \
-         exist_path=` otool -l libhdf5_java.dylib | grep libhdf5 | grep -v java | awk '{print $2}'`; \
-         echo $exist_path; \
-         install_name_tool -change $exist_path @rpath/libhdf5.dylib libhdf5_java.dylib)
-    fi
-    # copy jar files. Used -f to make sure get a new copy
-    for tstfile in $COPY_JARTESTFILES
-    do
-        # ignore '#' comment
-        echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
-        RET=$?
-        if [ $RET -eq 1 ]; then
-            # skip cp if srcdir is same as destdir
-            # this occurs when build/test performed in source dir and
-            # make cp fail
-            SDIR=`$DIRNAME $tstfile`
-            INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
-            INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
-            if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
-                $CP -fR $tstfile $BLDLIBDIR
-                if [ $? -ne 0 ]; then
-                    echo "Error: FAILED to copy $tstfile ."
-
-                    # Comment out this to CREATE expected file
-                    exit $EXIT_FAILURE
-                fi
-            fi
-        fi
-    done
-}
-
-CLEAN_LIBFILES_AND_BLDLIBDIR()
-{
-    # skip rm if srcdir is same as destdir
-    # this occurs when build/test performed in source dir and
-    # make cp fail
-    SDIR=$HDFLIB_HOME
-    INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
-    INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
-    if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
-        $RM -rf $BLDLIBDIR
-    fi
-}
-
-COPY_DATAFILES="$LIST_DATA_FILES"
-
-COPY_DATAFILES_TO_BLDDIR()
-{
-    # copy test files. Used -f to make sure get a new copy
-    for tstfile in $COPY_DATAFILES
-    do
-        # ignore '#' comment
-        echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
-        RET=$?
-        if [ $RET -eq 1 ]; then
-            # skip cp if srcdir is same as destdir
-            # this occurs when build/test performed in source dir and
-            # make cp fail
-            SDIR=`$DIRNAME $tstfile`
-            INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
-            INODE_DDIR=`$LS -i -d $BLDDIR | $AWK -F' ' '{print $1}'`
-            if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
-                $CP -f $tstfile $BLDDIR
-                if [ $? -ne 0 ]; then
-                    echo "Error: FAILED to copy $tstfile ."
-
-                    # Comment out this to CREATE expected file
-                    exit $EXIT_FAILURE
-                fi
-            fi
-        fi
-    done
-}
-
-CLEAN_DATAFILES_AND_BLDDIR()
-{
-        $RM $BLDDIR/examples.datasets.H5Ex_D_*.txt
-        $RM $BLDDIR/H5Ex_D_*.out
-        $RM $BLDDIR/H5Ex_D_*.h5
-        $RM $BLDDIR/H5Ex_D_External.data
-}
-
-# Print a line-line message left justified in a field of 70 characters
-# beginning with the word "Testing".
-#
-TESTING() {
-   SPACES="                                                               "
-   echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012'
-}
-
-# where Java is installed (requires jdk1.7.x)
-JAVAEXE=@JAVA@
-JAVAEXEFLAGS=@H5_JAVAFLAGS@
-
-###############################################################################
-#            DO NOT MODIFY BELOW THIS LINE
-###############################################################################
-
-# prepare for test
-COPY_LIBFILES_TO_BLDLIBDIR
-COPY_DATAFILES_TO_BLDDIR
-
-CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/slf4j-api-2.0.6.jar:"$BLDLIBDIR"/slf4j-simple-2.0.6.jar:"$TESTJARFILE""
-
-TEST=/usr/bin/test
-if [ ! -x /usr/bin/test ]
-then
-TEST=`which test`
-fi
-
-if $TEST -z "$CLASSPATH"; then
-        CLASSPATH=""
-fi
-CLASSPATH=$CPATH":"$CLASSPATH
-export CLASSPATH
-
-if $TEST -n "$JAVAPATH" ; then
-        PATH=$JAVAPATH":"$PATH
-        export PATH
-fi
-
-if $TEST -e /bin/uname; then
-   os_name=`/bin/uname -s`
-elif $TEST -e /usr/bin/uname; then
-   os_name=`/usr/bin/uname -s`
-else
-   os_name=unknown
-fi
-
-if $TEST -z "$LD_LIBRARY_PATH" ; then
-        LD_LIBRARY_PATH=""
-fi
-
-case  $os_name in
-    *)
-    LD_LIBRARY_PATH=$BLDLIBDIR:$LD_LIBRARY_PATH
-    ;;
-esac
-
-export LD_LIBRARY_PATH
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Alloc"
-TESTING examples.datasets.H5Ex_D_Alloc
-(
-$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Alloc > H5Ex_D_Alloc.out
-)
-if diff H5Ex_D_Alloc.out examples.datasets.H5Ex_D_Alloc.txt > /dev/null; then
-    echo "  PASSED      datasets.H5Ex_D_Alloc"
-else
-    echo "**FAILED**    datasets.H5Ex_D_Alloc"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Checksum"
-TESTING examples.datasets.H5Ex_D_Checksum
-(
-$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Checksum > H5Ex_D_Checksum.out
-)
-if diff H5Ex_D_Checksum.out examples.datasets.H5Ex_D_Checksum.txt > /dev/null; then
-    echo "  PASSED      datasets.H5Ex_D_Checksum"
-else
-    echo "**FAILED**    datasets.H5Ex_D_Checksum"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Chunk"
-TESTING examples.datasets.H5Ex_D_Chunk
-(
-$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Chunk > H5Ex_D_Chunk.out
-)
-if diff H5Ex_D_Chunk.out examples.datasets.H5Ex_D_Chunk.txt > /dev/null; then
-    echo "  PASSED      datasets.H5Ex_D_Chunk"
-else
-    echo "**FAILED**    datasets.H5Ex_D_Chunk"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Compact"
-TESTING examples.datasets.H5Ex_D_Compact
-(
-$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Compact > H5Ex_D_Compact.out
-)
-if diff H5Ex_D_Compact.out examples.datasets.H5Ex_D_Compact.txt > /dev/null; then
-    echo "  PASSED      datasets.H5Ex_D_Compact"
-else
-    echo "**FAILED**    datasets.H5Ex_D_Compact"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_External"
-TESTING examples.datasets.H5Ex_D_External
-(
-$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_External > H5Ex_D_External.out
-)
-if diff H5Ex_D_External.out examples.datasets.H5Ex_D_External.txt > /dev/null; then
-    echo "  PASSED      datasets.H5Ex_D_External"
-else
-    echo "**FAILED**    datasets.H5Ex_D_External"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_FillValue"
-TESTING examples.datasets.H5Ex_D_FillValue
-(
-$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_FillValue > H5Ex_D_FillValue.out
-)
-if diff H5Ex_D_FillValue.out examples.datasets.H5Ex_D_FillValue.txt > /dev/null; then
-    echo "  PASSED      datasets.H5Ex_D_FillValue"
-else
-    echo "**FAILED**    datasets.H5Ex_D_FillValue"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-if test $USE_FILTER_DEFLATE = "yes"; then
-    echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Gzip"
-    TESTING examples.datasets.H5Ex_D_Gzip
-    (
-    $RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Gzip > H5Ex_D_Gzip.out
-    )
-    if diff H5Ex_D_Gzip.out examples.datasets.H5Ex_D_Gzip.txt > /dev/null; then
-        echo "  PASSED      datasets.H5Ex_D_Gzip"
-    else
-        echo "**FAILED**    datasets.H5Ex_D_Gzip"
-        nerrors="`expr $nerrors + 1`"
-    fi
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Hyperslab"
-TESTING examples.datasets.H5Ex_D_Hyperslab
-(
-$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Hyperslab > H5Ex_D_Hyperslab.out
-)
-if diff H5Ex_D_Hyperslab.out examples.datasets.H5Ex_D_Hyperslab.txt > /dev/null; then
-    echo "  PASSED      datasets.H5Ex_D_Hyperslab"
-else
-    echo "**FAILED**    datasets.H5Ex_D_Hyperslab"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_ReadWrite"
-TESTING examples.datasets.H5Ex_D_ReadWrite
-(
-$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_ReadWrite > H5Ex_D_ReadWrite.out
-)
-if diff H5Ex_D_ReadWrite.out examples.datasets.H5Ex_D_ReadWrite.txt > /dev/null; then
-    echo "  PASSED      datasets.H5Ex_D_ReadWrite"
-else
-    echo "**FAILED**    datasets.H5Ex_D_ReadWrite"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-if test $USE_FILTER_DEFLATE = "yes"; then
-    echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Shuffle"
-    TESTING examples.datasets.H5Ex_D_Shuffle
-    (
-    $RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Shuffle > H5Ex_D_Shuffle.out
-    )
-    if diff H5Ex_D_Shuffle.out examples.datasets.H5Ex_D_Shuffle.txt > /dev/null; then
-        echo "  PASSED      datasets.H5Ex_D_Shuffle"
-    else
-        echo "**FAILED**    datasets.H5Ex_D_Shuffle"
-        nerrors="`expr $nerrors + 1`"
-    fi
-fi
-
-if test $USE_FILTER_SZIP = "yes"; then
-    echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Szip"
-    TESTING examples.datasets.H5Ex_D_Szip
-    (
-    $RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Szip > H5Ex_D_Szip.out
-    )
-    if diff H5Ex_D_Szip.out examples.datasets.H5Ex_D_Szip.txt > /dev/null; then
-        echo "  PASSED      datasets.H5Ex_D_Szip"
-    else
-        echo "**FAILED**    datasets.H5Ex_D_Szip"
-        nerrors="`expr $nerrors + 1`"
-    fi
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedAdd"
-TESTING examples.datasets.H5Ex_D_UnlimitedAdd
-(
-$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedAdd > H5Ex_D_UnlimitedAdd.out
-)
-if diff H5Ex_D_UnlimitedAdd.out examples.datasets.H5Ex_D_UnlimitedAdd.txt > /dev/null; then
-    echo "  PASSED      datasets.H5Ex_D_UnlimitedAdd"
-else
-    echo "**FAILED**    datasets.H5Ex_D_UnlimitedAdd"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedGzip"
-TESTING examples.datasets.H5Ex_D_UnlimitedGzip
-(
-$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedGzip > H5Ex_D_UnlimitedGzip.out
-)
-if diff H5Ex_D_External.out examples.datasets.H5Ex_D_External.txt > /dev/null; then
-    echo "  PASSED      datasets.H5Ex_D_UnlimitedGzip"
-else
-    echo "**FAILED**    datasets.H5Ex_D_UnlimitedGzip"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedMod"
-TESTING examples.datasets.H5Ex_D_UnlimitedMod
-(
-$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedMod > H5Ex_D_UnlimitedMod.out
-)
-if diff H5Ex_D_UnlimitedMod.out examples.datasets.H5Ex_D_UnlimitedMod.txt > /dev/null; then
-    echo "  PASSED      datasets.H5Ex_D_UnlimitedMod"
-else
-    echo "**FAILED**    datasets.H5Ex_D_UnlimitedMod"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Nbit"
-TESTING examples.datasets.H5Ex_D_Nbit
-(
-$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Nbit > H5Ex_D_Nbit.out
-)
-if diff H5Ex_D_Nbit.out examples.datasets.H5Ex_D_Nbit.txt > /dev/null; then
-    echo "  PASSED      datasets.H5Ex_D_Nbit"
-else
-    echo "**FAILED**    datasets.H5Ex_D_Nbit"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Transform"
-TESTING examples.datasets.H5Ex_D_Transform
-(
-$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Transform > H5Ex_D_Transform.out
-)
-if diff H5Ex_D_Transform.out examples.datasets.H5Ex_D_Transform.txt > /dev/null; then
-    echo "  PASSED      datasets.H5Ex_D_Transform"
-else
-    echo "**FAILED**    datasets.H5Ex_D_Transform"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Sofloat"
-TESTING examples.datasets.H5Ex_D_Sofloat
-(
-$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Sofloat > H5Ex_D_Sofloat.out
-)
-if diff H5Ex_D_Sofloat.out examples.datasets.H5Ex_D_Sofloat.txt > /dev/null; then
-    echo "  PASSED      datasets.H5Ex_D_Sofloat"
-else
-    echo "**FAILED**    datasets.H5Ex_D_Sofloat"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Soint"
-TESTING examples.datasets.H5Ex_D_Soint
-(
-$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Soint > H5Ex_D_Soint.out
-)
-if diff H5Ex_D_Soint.out examples.datasets.H5Ex_D_Soint.txt > /dev/null; then
-    echo "  PASSED      datasets.H5Ex_D_Soint"
-else
-    echo "**FAILED**    datasets.H5Ex_D_Soint"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-# Clean up temporary files/directories
-CLEAN_LIBFILES_AND_BLDLIBDIR
-CLEAN_DATAFILES_AND_BLDDIR
-
-# Report test results and exit
-if test $nerrors -eq 0 ; then
-    echo "All $TESTNAME tests passed."
-    exit $EXIT_SUCCESS
-else
-    echo "$TESTNAME tests failed with $nerrors errors."
-    exit $EXIT_FAILURE
-fi
diff --git a/java/examples/datasets/Makefile.am b/java/examples/datasets/Makefile.am
deleted file mode 100644
index d4bb6662bb6..00000000000
--- a/java/examples/datasets/Makefile.am
+++ /dev/null
@@ -1,75 +0,0 @@
-#
-# Copyright by The HDF Group.
-# All rights reserved.
-#
-# This file is part of HDF5.  The full HDF5 copyright notice, including
-# terms governing use, modification, and redistribution, is contained in
-# the COPYING file, which can be found at the root of the source code
-# distribution tree, or in https://www.hdfgroup.org/licenses.
-# If you do not have access to either file, you may request a copy from
-# help@hdfgroup.org.
-##
-## Makefile.am
-## Run automake to generate a Makefile.in from this file.
-##
-#
-# HDF5 Java Library Examples Makefile(.in)
-
-include $(top_srcdir)/config/commence.am
-
-# Mark this directory as part of the JNI API
-JAVA_API=yes
-
-JAVAROOT = .classes
-
-classes:
-	test -d $(@D)/$(JAVAROOT) || $(MKDIR_P) $(@D)/$(JAVAROOT)
-
-pkgpath = examples/datasets
-hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar
-CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-2.0.6.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-2.0.6.jar:$$CLASSPATH
-
-jarfile = jar$(PACKAGE_TARNAME)datasets.jar
-
-AM_JAVACFLAGS = $(H5_JAVACFLAGS) -deprecation
-
-TESTPACKAGE =
-
-noinst_JAVA = \
-	H5Ex_D_Alloc.java \
-    H5Ex_D_Checksum.java \
-    H5Ex_D_Chunk.java \
-    H5Ex_D_Compact.java \
-    H5Ex_D_External.java \
-    H5Ex_D_FillValue.java \
-    H5Ex_D_Gzip.java \
-    H5Ex_D_Hyperslab.java \
-    H5Ex_D_ReadWrite.java \
-    H5Ex_D_Shuffle.java \
-    H5Ex_D_Szip.java \
-    H5Ex_D_UnlimitedAdd.java \
-    H5Ex_D_UnlimitedGzip.java \
-    H5Ex_D_UnlimitedMod.java \
-    H5Ex_D_Nbit.java \
-    H5Ex_D_Transform.java \
-    H5Ex_D_Sofloat.java \
-    H5Ex_D_Soint.java
-
-$(jarfile): classnoinst.stamp classes
-	$(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath)
-
-noinst_DATA = $(jarfile)
-
-.PHONY: classes
-
-check_SCRIPTS = JavaDatasetExample.sh
-TEST_SCRIPT = $(check_SCRIPTS)
-
-CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class JavaDatasetExample.sh
-
-clean:
-	rm -rf $(JAVAROOT)/*
-	rm -f $(jarfile)
-	rm -f classnoinst.stamp
-
-include $(top_srcdir)/config/conclude.am
diff --git a/java/examples/datatypes/CMakeLists.txt b/java/examples/datatypes/CMakeLists.txt
deleted file mode 100644
index 5860429ef6e..00000000000
--- a/java/examples/datatypes/CMakeLists.txt
+++ /dev/null
@@ -1,116 +0,0 @@
-cmake_minimum_required (VERSION 3.18)
-project (HDFJAVA_EXAMPLES_DATATYPES Java)
-
-set (CMAKE_VERBOSE_MAKEFILE 1)
-
-set (HDF_JAVA_EXAMPLES
-    H5Ex_T_Array
-    H5Ex_T_ArrayAttribute
-    H5Ex_T_Bit
-    H5Ex_T_BitAttribute
-    H5Ex_T_Commit
-    H5Ex_T_Compound
-    H5Ex_T_CompoundAttribute
-    H5Ex_T_Float
-    H5Ex_T_FloatAttribute
-    H5Ex_T_Integer
-    H5Ex_T_IntegerAttribute
-    H5Ex_T_ObjectReference
-    H5Ex_T_ObjectReferenceAttribute
-    H5Ex_T_Opaque
-    H5Ex_T_OpaqueAttribute
-    H5Ex_T_String
-    H5Ex_T_StringAttribute
-    H5Ex_T_VLString
-)
-
-if (WIN32)
-  set (CMAKE_JAVA_INCLUDE_FLAG_SEP ";")
-else ()
-  set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":")
-endif ()
-
-set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS}")
-
-set (CMAKE_JAVA_CLASSPATH ".")
-foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH})
-  set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}")
-endforeach ()
-
-foreach (example ${HDF_JAVA_EXAMPLES})
-  file (WRITE ${PROJECT_BINARY_DIR}/${example}_Manifest.txt
-  "Main-Class: examples.datatypes.${example}
-"
-  )
-  add_jar (${example} MANIFEST ${PROJECT_BINARY_DIR}/${example}_Manifest.txt ${example}.java)
-  get_target_property (${example}_JAR_FILE ${example} JAR_FILE)
-#  install_jar (${example} ${HJAVA_INSTALL_DATA_DIR}/examples examples)
-  get_target_property (${example}_CLASSPATH ${example} CLASSDIR)
-  add_dependencies (${example} ${HDF5_JAVA_HDF5_LIB_TARGET})
-
-  #-----------------------------------------------------------------------------
-  # Add Target to clang-format
-  #-----------------------------------------------------------------------------
-  if (HDF5_ENABLE_FORMATTERS)
-    clang_format (HDF5_JAVA_${example}_SRC_FORMAT ${example}.java)
-  endif ()
-endforeach ()
-
-set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS};${HDF5_JAVA_LOGGING_JAR};${HDF5_JAVA_LOGGING_NOP_JAR}")
-
-set (CMAKE_JAVA_CLASSPATH ".")
-foreach (HDFJAVA_JAR ${CMAKE_JAVA_INCLUDE_PATH})
-  set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${HDFJAVA_JAR}")
-endforeach ()
-
-if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL)
-  get_property (target_name TARGET ${HDF5_JAVA_JNI_LIB_TARGET} PROPERTY OUTPUT_NAME)
-  set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$<OR:$<CONFIG:Debug>,$<CONFIG:Developer>>:${CMAKE_DEBUG_POSTFIX}>;")
-
-  set (last_test "")
-  foreach (example ${HDF_JAVA_EXAMPLES})
-    add_test (
-        NAME JAVA_datatypes-${example}-clear-objects
-        COMMAND ${CMAKE_COMMAND} -E remove
-            ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5
-    )
-    if (last_test)
-      set_tests_properties (JAVA_datatypes-${example}-clear-objects PROPERTIES DEPENDS ${last_test})
-    endif ()
-
-    add_test (
-        NAME JAVA_datatypes-${example}-copy-objects
-        COMMAND ${CMAKE_COMMAND} -E copy_if_different
-            ${HDFJAVA_EXAMPLES_SOURCE_DIR}/testfiles/examples.datatypes.${example}.txt
-            ${HDFJAVA_EXAMPLES_DATATYPES_BINARY_DIR}/${example}.txt
-    )
-    set_tests_properties (JAVA_datatypes-${example}-copy-objects PROPERTIES DEPENDS JAVA_datatypes-${example}-clear-objects)
-    add_test (
-        NAME JAVA_datatypes-${example}
-        COMMAND "${CMAKE_COMMAND}"
-            -D "TEST_TESTER=${CMAKE_Java_RUNTIME};${CMAKE_Java_RUNTIME_FLAGS}"
-            -D "TEST_PROGRAM=examples.datatypes.${example}"
-            -D "TEST_ARGS:STRING=${CMD_ARGS}"
-            -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${example}_JAR_FILE}"
-            -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}"
-            -D "TEST_FOLDER=${HDFJAVA_EXAMPLES_BINARY_DIR}"
-            -D "TEST_OUTPUT=datatypes/${example}.out"
-            -D "TEST_EXPECT=0"
-            -D "TEST_REFERENCE=datatypes/${example}.txt"
-            -P "${HDF_RESOURCES_DIR}/jrunTest.cmake"
-    )
-    set_tests_properties (JAVA_datatypes-${example} PROPERTIES
-        DEPENDS JAVA_datatypes-${example}-copy-objects
-    )
-
-    add_test (
-        NAME JAVA_datatypes-${example}-clean-objects
-        COMMAND ${CMAKE_COMMAND} -E remove
-            ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5
-    )
-    set_tests_properties (JAVA_datatypes-${example}-clean-objects PROPERTIES
-        DEPENDS JAVA_datatypes-${example}
-    )
-    set (last_test "JAVA_datatypes-${example}-clean-objects")
-  endforeach ()
-endif ()
diff --git a/java/examples/datatypes/H5Ex_T_Array.java b/java/examples/datatypes/H5Ex_T_Array.java
deleted file mode 100644
index 4e9de2a429b..00000000000
--- a/java/examples/datatypes/H5Ex_T_Array.java
+++ /dev/null
@@ -1,280 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write array datatypes
-  to a dataset.  The program first writes integers arrays of
-  dimension ADIM0xADIM1 to a dataset with a dataspace of
-  DIM0, then closes the  file.  Next, it reopens the file,
-  reads back the data, and outputs it to the screen.
- ************************************************************/
-
-package examples.datatypes;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_T_Array {
-    private static String FILENAME    = "H5Ex_T_Array.h5";
-    private static String DATASETNAME = "DS1";
-    private static final int DIM0     = 4;
-    private static final int ADIM0    = 3;
-    private static final int ADIM1    = 5;
-    private static final int RANK     = 1;
-    private static final int NDIMS    = 2;
-
-    private static void CreateDataset()
-    {
-        long file_id        = HDF5Constants.H5I_INVALID_HID;
-        long filetype_id    = HDF5Constants.H5I_INVALID_HID;
-        long memtype_id     = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id   = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id     = HDF5Constants.H5I_INVALID_HID;
-        long[] dims         = {DIM0};
-        long[] adims        = {ADIM0, ADIM1};
-        int[][][] dset_data = new int[DIM0][ADIM0][ADIM1];
-
-        // Initialize data. indx is the element in the dataspace, jndx and kndx the
-        // elements within the array datatype.
-        for (int indx = 0; indx < DIM0; indx++)
-            for (int jndx = 0; jndx < ADIM0; jndx++)
-                for (int kndx = 0; kndx < ADIM1; kndx++)
-                    dset_data[indx][jndx][kndx] = indx * jndx - jndx * kndx + indx * kndx;
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create array datatypes for file.
-        try {
-            filetype_id = H5.H5Tarray_create(HDF5Constants.H5T_STD_I64LE, NDIMS, adims);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create array datatypes for memory.
-        try {
-            memtype_id = H5.H5Tarray_create(HDF5Constants.H5T_NATIVE_INT, NDIMS, adims);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            dataspace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset.
-        try {
-            if ((file_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
-                dataset_id =
-                    H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
-                                 HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the dataset.
-        try {
-            if ((dataset_id >= 0) && (memtype_id >= 0))
-                H5.H5Dwrite(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the file type.
-        try {
-            if (filetype_id >= 0)
-                H5.H5Tclose(filetype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the mem type.
-        try {
-            if (memtype_id >= 0)
-                H5.H5Tclose(memtype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void ReadDataset()
-    {
-        long file_id     = HDF5Constants.H5I_INVALID_HID;
-        long filetype_id = HDF5Constants.H5I_INVALID_HID;
-        long memtype_id  = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id  = HDF5Constants.H5I_INVALID_HID;
-        long[] dims      = {DIM0};
-        long[] adims     = {ADIM0, ADIM1};
-        int[][][] dset_data;
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get the datatype.
-        try {
-            if (dataset_id >= 0)
-                filetype_id = H5.H5Dget_type(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get the datatype's dimensions.
-        try {
-            if (filetype_id >= 0)
-                H5.H5Tget_array_dims(filetype_id, adims);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Allocate array of pointers to two-dimensional arrays (the
-        // elements of the dataset.
-        dset_data = new int[(int)dims[0]][(int)(adims[0])][(int)(adims[1])];
-
-        // Create array datatypes for memory.
-        try {
-            memtype_id = H5.H5Tarray_create(HDF5Constants.H5T_NATIVE_INT, 2, adims);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Read data.
-        try {
-            if ((dataset_id >= 0) && (memtype_id >= 0))
-                H5.H5Dread(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        for (int indx = 0; indx < dims[0]; indx++) {
-            System.out.println(DATASETNAME + " [" + indx + "]:");
-            for (int jndx = 0; jndx < adims[0]; jndx++) {
-                System.out.print(" [");
-                for (int kndx = 0; kndx < adims[1]; kndx++)
-                    System.out.print(dset_data[indx][jndx][kndx] + " ");
-                System.out.println("]");
-            }
-            System.out.println();
-        }
-        System.out.println();
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the file type.
-        try {
-            if (filetype_id >= 0)
-                H5.H5Tclose(filetype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the mem type.
-        try {
-            if (memtype_id >= 0)
-                H5.H5Tclose(memtype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_T_Array.CreateDataset();
-        // Now we begin the read section of this example. Here we assume
-        // the dataset and array have the same name and rank, but can have
-        // any size. Therefore we must allocate a new array to read in
-        // data using malloc().
-        H5Ex_T_Array.ReadDataset();
-    }
-}
diff --git a/java/examples/datatypes/H5Ex_T_ArrayAttribute.java b/java/examples/datatypes/H5Ex_T_ArrayAttribute.java
deleted file mode 100644
index 45b44c13e96..00000000000
--- a/java/examples/datatypes/H5Ex_T_ArrayAttribute.java
+++ /dev/null
@@ -1,320 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write array datatypes
-  to an attribute.  The program first writes integers arrays
-  of dimension ADIM0xADIM1 to an attribute with a dataspace
-  of DIM0, then closes the  file.  Next, it reopens the
-  file, reads back the data, and outputs it to the screen.
- ************************************************************/
-
-package examples.datatypes;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_T_ArrayAttribute {
-    private static String FILENAME      = "H5Ex_T_ArrayAttribute.h5";
-    private static String DATASETNAME   = "DS1";
-    private static String ATTRIBUTENAME = "A1";
-    private static final int DIM0       = 4;
-    private static final int ADIM0      = 3;
-    private static final int ADIM1      = 5;
-    private static final int RANK       = 1;
-    private static final int NDIMS      = 2;
-
-    private static void CreateDataset()
-    {
-        long file_id        = HDF5Constants.H5I_INVALID_HID;
-        long filetype_id    = HDF5Constants.H5I_INVALID_HID;
-        long memtype_id     = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id   = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id     = HDF5Constants.H5I_INVALID_HID;
-        long attribute_id   = HDF5Constants.H5I_INVALID_HID;
-        long[] dims         = {DIM0};
-        long[] adims        = {ADIM0, ADIM1};
-        int[][][] dset_data = new int[DIM0][ADIM0][ADIM1];
-
-        // Initialize data. indx is the element in the dataspace, jndx and kndx the
-        // elements within the array datatype.
-        for (int indx = 0; indx < DIM0; indx++)
-            for (int jndx = 0; jndx < ADIM0; jndx++)
-                for (int kndx = 0; kndx < ADIM1; kndx++)
-                    dset_data[indx][jndx][kndx] = indx * jndx - jndx * kndx + indx * kndx;
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create array datatypes for file.
-        try {
-            filetype_id = H5.H5Tarray_create(HDF5Constants.H5T_STD_I64LE, NDIMS, adims);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create array datatypes for memory.
-        try {
-            memtype_id = H5.H5Tarray_create(HDF5Constants.H5T_NATIVE_INT, NDIMS, adims);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataset with a scalar dataspace.
-        try {
-            dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
-            if (dataspace_id >= 0) {
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
-                                          HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                          HDF5Constants.H5P_DEFAULT);
-                H5.H5Sclose(dataspace_id);
-                dataspace_id = HDF5Constants.H5I_INVALID_HID;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            dataspace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the attribute and write the array data to it.
-        try {
-            if ((dataset_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
-                attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, filetype_id, dataspace_id,
-                                            HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the dataset.
-        try {
-            if ((attribute_id >= 0) && (memtype_id >= 0))
-                H5.H5Awrite(attribute_id, memtype_id, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (attribute_id >= 0)
-                H5.H5Aclose(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the file type.
-        try {
-            if (filetype_id >= 0)
-                H5.H5Tclose(filetype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the mem type.
-        try {
-            if (memtype_id >= 0)
-                H5.H5Tclose(memtype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void ReadDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long filetype_id  = HDF5Constants.H5I_INVALID_HID;
-        long memtype_id   = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long attribute_id = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM0};
-        long[] adims      = {ADIM0, ADIM1};
-        int[][][] dset_data;
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
-                                                  HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get the datatype.
-        try {
-            if (attribute_id >= 0)
-                filetype_id = H5.H5Aget_type(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get the datatype's dimensions.
-        try {
-            if (filetype_id >= 0)
-                H5.H5Tget_array_dims(filetype_id, adims);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Allocate array of pointers to two-dimensional arrays (the
-        // elements of the dataset.
-        dset_data = new int[(int)dims[0]][(int)(adims[0])][(int)(adims[1])];
-
-        // Create array datatypes for memory.
-        try {
-            memtype_id = H5.H5Tarray_create(HDF5Constants.H5T_NATIVE_INT, 2, adims);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Read data.
-        try {
-            if ((attribute_id >= 0) && (memtype_id >= 0))
-                H5.H5Aread(attribute_id, memtype_id, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        for (int indx = 0; indx < dims[0]; indx++) {
-            System.out.println(ATTRIBUTENAME + " [" + indx + "]:");
-            for (int jndx = 0; jndx < adims[0]; jndx++) {
-                System.out.print(" [");
-                for (int kndx = 0; kndx < adims[1]; kndx++)
-                    System.out.print(dset_data[indx][jndx][kndx] + " ");
-                System.out.println("]");
-            }
-            System.out.println();
-        }
-        System.out.println();
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (attribute_id >= 0)
-                H5.H5Aclose(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the file type.
-        try {
-            if (filetype_id >= 0)
-                H5.H5Tclose(filetype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the mem type.
-        try {
-            if (memtype_id >= 0)
-                H5.H5Tclose(memtype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_T_ArrayAttribute.CreateDataset();
-        // Now we begin the read section of this example. Here we assume
-        // the dataset and array have the same name and rank, but can have
-        // any size. Therefore we must allocate a new array to read in
-        // data using malloc().
-        H5Ex_T_ArrayAttribute.ReadDataset();
-    }
-}
diff --git a/java/examples/datatypes/H5Ex_T_Bit.java b/java/examples/datatypes/H5Ex_T_Bit.java
deleted file mode 100644
index 6d1a25325b8..00000000000
--- a/java/examples/datatypes/H5Ex_T_Bit.java
+++ /dev/null
@@ -1,225 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write bitfield
-  datatypes to a dataset.  The program first writes bit
-  fields to a dataset with a dataspace of DIM0xDIM1, then
-  closes the file.  Next, it reopens the file, reads back
-  the data, and outputs it to the screen.
- ************************************************************/
-
-package examples.datatypes;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_T_Bit {
-    private static String FILENAME    = "H5Ex_T_Bit.h5";
-    private static String DATASETNAME = "DS1";
-    private static final int DIM0     = 4;
-    private static final int DIM1     = 7;
-    private static final int RANK     = 2;
-
-    private static void CreateDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM0, DIM1};
-        int[][] dset_data = new int[DIM0][DIM1];
-
-        // Initialize data.
-        for (int indx = 0; indx < DIM0; indx++)
-            for (int jndx = 0; jndx < DIM1; jndx++) {
-                dset_data[indx][jndx] = 0;
-                dset_data[indx][jndx] |= (indx * jndx - jndx) & 0x03; /* Field "A" */
-                dset_data[indx][jndx] |= (indx & 0x03) << 2;          /* Field "B" */
-                dset_data[indx][jndx] |= (jndx & 0x03) << 4;          /* Field "C" */
-                dset_data[indx][jndx] |= ((indx + jndx) & 0x03) << 6; /* Field "D" */
-            }
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            dataspace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset.
-        try {
-            if ((file_id >= 0) && (dataspace_id >= 0))
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_B8BE, dataspace_id,
-                                          HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                          HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the bitfield data to the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_B8, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void ReadDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM0, DIM1};
-        int[][] dset_data;
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get dataspace and allocate memory for read buffer.
-        try {
-            if (dataset_id >= 0)
-                dataspace_id = H5.H5Dget_space(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Allocate array of pointers to two-dimensional arrays (the
-        // elements of the dataset.
-        dset_data = new int[(int)dims[0]][(int)(dims[1])];
-
-        // Read data.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_B8, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        System.out.println(DATASETNAME + ":");
-        for (int indx = 0; indx < dims[0]; indx++) {
-            System.out.print(" [");
-            for (int jndx = 0; jndx < dims[1]; jndx++) {
-                System.out.print("{" + (dset_data[indx][jndx] & 0x03) + ", ");
-                System.out.print(((dset_data[indx][jndx] >> 2) & 0x03) + ", ");
-                System.out.print(((dset_data[indx][jndx] >> 4) & 0x03) + ", ");
-                System.out.print(((dset_data[indx][jndx] >> 6) & 0x03) + "}");
-            }
-            System.out.println("]");
-        }
-        System.out.println();
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_T_Bit.CreateDataset();
-        // Now we begin the read section of this example. Here we assume
-        // the dataset and array have the same name and rank, but can have
-        // any size. Therefore we must allocate a new array to read in
-        // data using malloc().
-        H5Ex_T_Bit.ReadDataset();
-    }
-}
diff --git a/java/examples/datatypes/H5Ex_T_BitAttribute.java b/java/examples/datatypes/H5Ex_T_BitAttribute.java
deleted file mode 100644
index 3ad643a4a04..00000000000
--- a/java/examples/datatypes/H5Ex_T_BitAttribute.java
+++ /dev/null
@@ -1,266 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write bitfield
-  datatypes to an attribute.  The program first writes bit
-  fields to an attribute with a dataspace of DIM0xDIM1, then
-  closes the file.  Next, it reopens the file, reads back
-  the data, and outputs it to the screen.
- ************************************************************/
-
-package examples.datatypes;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_T_BitAttribute {
-    private static String FILENAME      = "H5Ex_T_BitAttribute.h5";
-    private static String DATASETNAME   = "DS1";
-    private static String ATTRIBUTENAME = "A1";
-    private static final int DIM0       = 4;
-    private static final int DIM1       = 7;
-    private static final int RANK       = 2;
-
-    private static void CreateDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long attribute_id = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM0, DIM1};
-        int[][] dset_data = new int[DIM0][DIM1];
-
-        // Initialize data.
-        for (int indx = 0; indx < DIM0; indx++)
-            for (int jndx = 0; jndx < DIM1; jndx++) {
-                dset_data[indx][jndx] = 0;
-                dset_data[indx][jndx] |= (indx * jndx - jndx) & 0x03; /* Field "A" */
-                dset_data[indx][jndx] |= (indx & 0x03) << 2;          /* Field "B" */
-                dset_data[indx][jndx] |= (jndx & 0x03) << 4;          /* Field "C" */
-                dset_data[indx][jndx] |= ((indx + jndx) & 0x03) << 6; /* Field "D" */
-            }
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataset with a scalar dataspace.
-        try {
-            dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
-            if (dataspace_id >= 0) {
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
-                                          HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                          HDF5Constants.H5P_DEFAULT);
-                H5.H5Sclose(dataspace_id);
-                dataspace_id = HDF5Constants.H5I_INVALID_HID;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            dataspace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the attribute and write the array data to it.
-        try {
-            if ((dataset_id >= 0) && (dataspace_id >= 0))
-                attribute_id =
-                    H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_B8BE, dataspace_id,
-                                 HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the dataset.
-        try {
-            if (attribute_id >= 0)
-                H5.H5Awrite(attribute_id, HDF5Constants.H5T_NATIVE_B8, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (attribute_id >= 0)
-                H5.H5Aclose(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void ReadDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long attribute_id = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM0, DIM1};
-        int[][] dset_data;
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
-                                                  HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get dataspace and allocate memory for read buffer.
-        try {
-            if (attribute_id >= 0)
-                dataspace_id = H5.H5Aget_space(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Allocate array of pointers to two-dimensional arrays (the
-        // elements of the dataset.
-        dset_data = new int[(int)dims[0]][(int)(dims[1])];
-
-        // Read data.
-        try {
-            if (attribute_id >= 0)
-                H5.H5Aread(attribute_id, HDF5Constants.H5T_NATIVE_B8, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        System.out.println(ATTRIBUTENAME + ":");
-        for (int indx = 0; indx < dims[0]; indx++) {
-            System.out.print(" [");
-            for (int jndx = 0; jndx < dims[1]; jndx++) {
-                System.out.print("{" + (dset_data[indx][jndx] & 0x03) + ", ");
-                System.out.print(((dset_data[indx][jndx] >> 2) & 0x03) + ", ");
-                System.out.print(((dset_data[indx][jndx] >> 4) & 0x03) + ", ");
-                System.out.print(((dset_data[indx][jndx] >> 6) & 0x03) + "}");
-            }
-            System.out.println("]");
-        }
-        System.out.println();
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (attribute_id >= 0)
-                H5.H5Aclose(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_T_BitAttribute.CreateDataset();
-        // Now we begin the read section of this example. Here we assume
-        // the dataset and array have the same name and rank, but can have
-        // any size. Therefore we must allocate a new array to read in
-        // data using malloc().
-        H5Ex_T_BitAttribute.ReadDataset();
-    }
-}
diff --git a/java/examples/datatypes/H5Ex_T_Commit.java b/java/examples/datatypes/H5Ex_T_Commit.java
deleted file mode 100644
index 44586efd574..00000000000
--- a/java/examples/datatypes/H5Ex_T_Commit.java
+++ /dev/null
@@ -1,260 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to commit a named datatype to a
-  file, and read back that datatype.  The program first
-  defines a compound datatype, commits it to a file, then
-  closes the file.  Next, it reopens the file, opens the
-  datatype, and outputs the names of its fields to the
-  screen.
- ************************************************************/
-
-package examples.datatypes;
-
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_T_Commit {
-    private static String FILENAME           = "H5Ex_T_Commit.h5";
-    private static String DATATYPENAME       = "Sensor_Type";
-    protected static final int INTEGERSIZE   = 4;
-    protected static final int DOUBLESIZE    = 8;
-    protected final static int MAXSTRINGSIZE = 80;
-
-    // Values for the various classes of datatypes
-    enum H5T_class {
-        H5T_NO_CLASS(HDF5Constants.H5T_NO_CLASS),   // error
-        H5T_INTEGER(HDF5Constants.H5T_INTEGER),     // integer types
-        H5T_FLOAT(HDF5Constants.H5T_FLOAT),         // floating-point types
-        H5T_TIME(HDF5Constants.H5T_TIME),           // date and time types
-        H5T_STRING(HDF5Constants.H5T_STRING),       // character string types
-        H5T_BITFIELD(HDF5Constants.H5T_BITFIELD),   // bit field types
-        H5T_OPAQUE(HDF5Constants.H5T_OPAQUE),       // opaque types
-        H5T_COMPOUND(HDF5Constants.H5T_COMPOUND),   // compound types
-        H5T_REFERENCE(HDF5Constants.H5T_REFERENCE), // reference types
-        H5T_ENUM(HDF5Constants.H5T_ENUM),           // enumeration types
-        H5T_VLEN(HDF5Constants.H5T_VLEN),           // Variable-Length types
-        H5T_ARRAY(HDF5Constants.H5T_ARRAY),         // Array types
-        H5T_NCLASSES(11);                           // this must be last
-
-        private static final Map<Long, H5T_class> lookup = new HashMap<Long, H5T_class>();
-
-        static
-        {
-            for (H5T_class s : EnumSet.allOf(H5T_class.class))
-                lookup.put(s.getCode(), s);
-        }
-
-        private long code;
-
-        H5T_class(long layout_type) { this.code = layout_type; }
-
-        public long getCode() { return this.code; }
-
-        public static H5T_class get(long typeclass_id) { return lookup.get(typeclass_id); }
-    }
-
-    // The supporting Sensor_Datatype class.
-    private static class Sensor_Datatype {
-        static int numberMembers = 4;
-        static int[] memberDims  = {1, 1, 1, 1};
-
-        String[] memberNames       = {"Serial number", "Location", "Temperature (F)", "Pressure (inHg)"};
-        long[] memberFileTypes     = {HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1,
-                                  HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE};
-        static int[] memberStorage = {INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE};
-
-        // Data size is the storage size for the members not the object.
-        static long getDataSize()
-        {
-            long data_size = 0;
-            for (int indx = 0; indx < numberMembers; indx++)
-                data_size += memberStorage[indx] * memberDims[indx];
-            return data_size;
-        }
-
-        static int getOffset(int memberItem)
-        {
-            int data_offset = 0;
-            for (int indx = 0; indx < memberItem; indx++)
-                data_offset += memberStorage[indx];
-            return data_offset;
-        }
-    }
-
-    private static void CreateDataType()
-    {
-        long file_id              = HDF5Constants.H5I_INVALID_HID;
-        long strtype_id           = HDF5Constants.H5I_INVALID_HID;
-        long filetype_id          = HDF5Constants.H5I_INVALID_HID;
-        Sensor_Datatype datatypes = new Sensor_Datatype();
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create string datatype.
-        try {
-            strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
-            if (strtype_id >= 0)
-                H5.H5Tset_size(strtype_id, MAXSTRINGSIZE);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the compound datatype for the file. Because the standard
-        // types we are using for the file may have different sizes than
-        // the corresponding native types, we must manually calculate the
-        // offset of each member.
-        try {
-            filetype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
-            if (filetype_id >= 0) {
-                for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
-                    long type_id = datatypes.memberFileTypes[indx];
-                    if (type_id == HDF5Constants.H5T_C_S1)
-                        type_id = strtype_id;
-                    H5.H5Tinsert(filetype_id, datatypes.memberNames[indx], Sensor_Datatype.getOffset(indx),
-                                 type_id);
-                }
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Commit the compound datatype to the file, creating a named datatype.
-        try {
-            if ((file_id >= 0) && (filetype_id >= 0))
-                H5.H5Tcommit(file_id, DATATYPENAME, filetype_id, HDF5Constants.H5P_DEFAULT,
-                             HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the file type.
-        try {
-            if (filetype_id >= 0)
-                H5.H5Tclose(filetype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the str type.
-        try {
-            if (strtype_id >= 0)
-                H5.H5Tclose(strtype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void ReadDataType()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long typeclass_id = HDF5Constants.H5I_INVALID_HID;
-        long filetype_id  = HDF5Constants.H5I_INVALID_HID;
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open named datatype.
-        try {
-            if (file_id >= 0)
-                filetype_id = H5.H5Topen(file_id, DATATYPENAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        System.out.println("Named datatype:  " + DATATYPENAME + ":");
-
-        // Get datatype class. If it isn't compound, we won't print anything.
-        try {
-            if (filetype_id >= 0)
-                typeclass_id = H5.H5Tget_class(filetype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        // Read data.
-        try {
-            if (H5T_class.get(typeclass_id) == H5T_class.H5T_COMPOUND) {
-                System.out.println("   Class: H5T_COMPOUND");
-                int nmembs = H5.H5Tget_nmembers(filetype_id);
-                // Iterate over compound datatype members.
-                for (int indx = 0; indx < nmembs; indx++) {
-                    String member_name = H5.H5Tget_member_name(filetype_id, indx);
-                    System.out.println("    " + member_name);
-                }
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the mem type.
-        try {
-            if (filetype_id >= 0)
-                H5.H5Tclose(filetype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_T_Commit.CreateDataType();
-        // Now we begin the read section of this example. Here we assume
-        // the dataset and array have the same name and rank, but can have
-        // any size. Therefore we must allocate a new array to read in
-        // data using malloc().
-        H5Ex_T_Commit.ReadDataType();
-    }
-}
diff --git a/java/examples/datatypes/H5Ex_T_Compound.java b/java/examples/datatypes/H5Ex_T_Compound.java
deleted file mode 100644
index 7a46bcf9361..00000000000
--- a/java/examples/datatypes/H5Ex_T_Compound.java
+++ /dev/null
@@ -1,462 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write compound
-  datatypes to a dataset.  The program first writes
-  compound structures to a dataset with a dataspace of DIM0,
-  then closes the file.  Next, it reopens the file, reads
-  back the data, and outputs it to the screen.
- ************************************************************/
-
-package examples.datatypes;
-
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.nio.charset.Charset;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_T_Compound {
-    private static String FILENAME           = "H5Ex_T_Compound.h5";
-    private static String DATASETNAME        = "DS1";
-    private static final int DIM0            = 4;
-    private static final int RANK            = 1;
-    protected static final int INTEGERSIZE   = 4;
-    protected static final int DOUBLESIZE    = 8;
-    protected final static int MAXSTRINGSIZE = 80;
-
-    static class Sensor_Datatype {
-        static int numberMembers = 4;
-        static int[] memberDims  = {1, 1, 1, 1};
-
-        static String[] memberNames   = {"Serial number", "Location", "Temperature (F)", "Pressure (inHg)"};
-        static long[] memberMemTypes  = {HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5T_C_S1,
-                                        HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5T_NATIVE_DOUBLE};
-        static long[] memberFileTypes = {HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1,
-                                         HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE};
-        static int[] memberStorage    = {INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE};
-
-        // Data size is the storage size for the members.
-        static long getTotalDataSize()
-        {
-            long data_size = 0;
-            for (int indx = 0; indx < numberMembers; indx++)
-                data_size += memberStorage[indx] * memberDims[indx];
-            return DIM0 * data_size;
-        }
-
-        static long getDataSize()
-        {
-            long data_size = 0;
-            for (int indx = 0; indx < numberMembers; indx++)
-                data_size += memberStorage[indx] * memberDims[indx];
-            return data_size;
-        }
-
-        static int getOffset(int memberItem)
-        {
-            int data_offset = 0;
-            for (int indx = 0; indx < memberItem; indx++)
-                data_offset += memberStorage[indx];
-            return data_offset;
-        }
-    }
-
-    static class Sensor {
-        public int serial_no;
-        public String location;
-        public double temperature;
-        public double pressure;
-
-        Sensor(int serial_no, String location, double temperature, double pressure)
-        {
-            this.serial_no   = serial_no;
-            this.location    = location;
-            this.temperature = temperature;
-            this.pressure    = pressure;
-        }
-
-        Sensor(List data)
-        {
-            this.serial_no   = (int)data.get(0);
-            this.location    = (String)data.get(1);
-            this.temperature = (double)data.get(2);
-            this.pressure    = (double)data.get(3);
-        }
-
-        Sensor(ByteBuffer databuf, int dbposition) { readBuffer(databuf, dbposition); }
-
-        void writeBuffer(ByteBuffer databuf, int dbposition)
-        {
-            databuf.putInt(dbposition + Sensor_Datatype.getOffset(0), serial_no);
-            byte[] temp_str = location.getBytes(Charset.forName("UTF-8"));
-            int arraylen    = (temp_str.length > MAXSTRINGSIZE) ? MAXSTRINGSIZE : temp_str.length;
-            for (int ndx = 0; ndx < arraylen; ndx++)
-                databuf.put(dbposition + Sensor_Datatype.getOffset(1) + ndx, temp_str[ndx]);
-            for (int ndx = arraylen; ndx < MAXSTRINGSIZE; ndx++)
-                databuf.put(dbposition + Sensor_Datatype.getOffset(1) + arraylen, (byte)0);
-            databuf.putDouble(dbposition + Sensor_Datatype.getOffset(2), temperature);
-            databuf.putDouble(dbposition + Sensor_Datatype.getOffset(3), pressure);
-        }
-
-        void readBuffer(ByteBuffer databuf, int dbposition)
-        {
-            this.serial_no       = databuf.getInt(dbposition + Sensor_Datatype.getOffset(0));
-            ByteBuffer stringbuf = databuf.duplicate();
-            stringbuf.position(dbposition + Sensor_Datatype.getOffset(1));
-            stringbuf.limit(dbposition + Sensor_Datatype.getOffset(1) + MAXSTRINGSIZE);
-            byte[] bytearr = new byte[stringbuf.remaining()];
-            stringbuf.get(bytearr);
-            this.location    = new String(bytearr, Charset.forName("UTF-8")).trim();
-            this.temperature = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(2));
-            this.pressure    = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(3));
-        }
-
-        List get()
-        {
-            List data = new ArrayList<>();
-            data.add(this.serial_no);
-            data.add(this.location);
-            data.add(this.temperature);
-            data.add(this.pressure);
-            return data;
-        }
-
-        void put(List data)
-        {
-            this.serial_no   = (int)data.get(0);
-            this.location    = (String)data.get(1);
-            this.temperature = (double)data.get(2);
-            this.pressure    = (double)data.get(3);
-        }
-
-        @Override
-        public String toString()
-        {
-            return String.format("Serial number   : " + serial_no + "%n"
-                                 + "Location        : " + location + "%n"
-                                 + "Temperature (F) : " + temperature + "%n"
-                                 + "Pressure (inHg) : " + pressure + "%n");
-        }
-    }
-
-    private static void CreateDataset()
-    {
-        long file_id            = HDF5Constants.H5I_INVALID_HID;
-        long strtype_id         = HDF5Constants.H5I_INVALID_HID;
-        long memtype_id         = HDF5Constants.H5I_INVALID_HID;
-        long filetype_id        = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id       = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id         = HDF5Constants.H5I_INVALID_HID;
-        long[] dims             = {DIM0};
-        ArrayList[] object_data = new ArrayList[DIM0];
-        byte[] dset_data        = null;
-
-        // Initialize data.
-        object_data[0] = (ArrayList) new Sensor(1153, new String("Exterior (static)"), 53.23, 24.57).get();
-        object_data[1] = (ArrayList) new Sensor(1184, new String("Intake"), 55.12, 22.95).get();
-        object_data[2] = (ArrayList) new Sensor(1027, new String("Intake manifold"), 103.55, 31.23).get();
-        object_data[3] = (ArrayList) new Sensor(1313, new String("Exhaust manifold"), 1252.89, 84.11).get();
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create string datatype.
-        try {
-            strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
-            if (strtype_id >= 0)
-                H5.H5Tset_size(strtype_id, MAXSTRINGSIZE);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the compound datatype for memory.
-        try {
-            memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
-            if (memtype_id >= 0) {
-                for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
-                    long type_id = Sensor_Datatype.memberMemTypes[indx];
-                    if (type_id == HDF5Constants.H5T_C_S1)
-                        type_id = strtype_id;
-                    H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx],
-                                 Sensor_Datatype.getOffset(indx), type_id);
-                }
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the compound datatype for the file. Because the standard
-        // types we are using for the file may have different sizes than
-        // the corresponding native types, we must manually calculate the
-        // offset of each member.
-        try {
-            filetype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
-            if (filetype_id >= 0) {
-                for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
-                    long type_id = Sensor_Datatype.memberFileTypes[indx];
-                    if (type_id == HDF5Constants.H5T_C_S1)
-                        type_id = strtype_id;
-                    H5.H5Tinsert(filetype_id, Sensor_Datatype.memberNames[indx],
-                                 Sensor_Datatype.getOffset(indx), type_id);
-                }
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            dataspace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset.
-        try {
-            if ((file_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
-                dataset_id =
-                    H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
-                                 HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the compound data to the dataset.
-        try {
-            if ((dataset_id >= 0) && (memtype_id >= 0))
-                H5.H5DwriteVL(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
-                              HDF5Constants.H5P_DEFAULT, (Object[])object_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the file type.
-        try {
-            if (filetype_id >= 0)
-                H5.H5Tclose(filetype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the mem type.
-        try {
-            if (memtype_id >= 0)
-                H5.H5Tclose(memtype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (strtype_id >= 0)
-                H5.H5Tclose(strtype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void ReadDataset()
-    {
-        long file_id          = HDF5Constants.H5I_INVALID_HID;
-        long strtype_id       = HDF5Constants.H5I_INVALID_HID;
-        long memtype_id       = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id     = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id       = HDF5Constants.H5I_INVALID_HID;
-        long[] dims           = {DIM0};
-        Sensor[] object_data2 = new Sensor[(int)dims[0]];
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get dataspace and allocate memory for read buffer.
-        try {
-            if (dataset_id >= 0)
-                dataspace_id = H5.H5Dget_space(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create string datatype.
-        try {
-            strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
-            if (strtype_id >= 0)
-                H5.H5Tset_size(strtype_id, MAXSTRINGSIZE);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the compound datatype for memory.
-        try {
-            memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
-            if (memtype_id >= 0) {
-                for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
-                    long type_id = Sensor_Datatype.memberMemTypes[indx];
-                    if (type_id == HDF5Constants.H5T_C_S1)
-                        type_id = strtype_id;
-                    H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx],
-                                 Sensor_Datatype.getOffset(indx), type_id);
-                }
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        ArrayList[] object_data = new ArrayList[(int)dims[0]];
-
-        // Read data.
-        try {
-            if ((dataset_id >= 0) && (memtype_id >= 0))
-                H5.H5DreadVL(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
-                             HDF5Constants.H5P_DEFAULT, (Object[])object_data);
-
-            for (int indx = 0; indx < (int)dims[0]; indx++) {
-                object_data2[indx] = new Sensor(object_data[indx]);
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        for (int indx = 0; indx < dims[0]; indx++) {
-            System.out.println(DATASETNAME + " [" + indx + "]:");
-            System.out.println(object_data2[indx].toString());
-        }
-        System.out.println();
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the mem type.
-        try {
-            if (memtype_id >= 0)
-                H5.H5Tclose(memtype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (strtype_id >= 0)
-                H5.H5Tclose(strtype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_T_Compound.CreateDataset();
-        // Now we begin the read section of this example. Here we assume
-        // the dataset and array have the same name and rank, but can have
-        // any size. Therefore we must allocate a new array to read in
-        // data using malloc().
-        H5Ex_T_Compound.ReadDataset();
-    }
-}
diff --git a/java/examples/datatypes/H5Ex_T_CompoundAttribute.java b/java/examples/datatypes/H5Ex_T_CompoundAttribute.java
deleted file mode 100644
index 93e15fd192c..00000000000
--- a/java/examples/datatypes/H5Ex_T_CompoundAttribute.java
+++ /dev/null
@@ -1,504 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write compound
-  datatypes to an attribute.  The program first writes
-  compound structures to an attribute with a dataspace of
-  DIM0, then closes the file.  Next, it reopens the file,
-  reads back the data, and outputs it to the screen.
- ************************************************************/
-
-package examples.datatypes;
-
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.nio.charset.Charset;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_T_CompoundAttribute {
-    private static String FILENAME           = "H5Ex_T_CompoundAttribute.h5";
-    private static String DATASETNAME        = "DS1";
-    private static String ATTRIBUTENAME      = "A1";
-    private static final int DIM0            = 4;
-    private static final int RANK            = 1;
-    protected static final int INTEGERSIZE   = 4;
-    protected static final int DOUBLESIZE    = 8;
-    protected final static int MAXSTRINGSIZE = 80;
-
-    static class Sensor_Datatype {
-        static int numberMembers = 4;
-        static int[] memberDims  = {1, 1, 1, 1};
-
-        static String[] memberNames   = {"Serial number", "Location", "Temperature (F)", "Pressure (inHg)"};
-        static long[] memberMemTypes  = {HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5T_C_S1,
-                                        HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5T_NATIVE_DOUBLE};
-        static long[] memberFileTypes = {HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1,
-                                         HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE};
-        static int[] memberStorage    = {INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE};
-
-        // Data size is the storage size for the members not the object.
-        static long getTotalDataSize()
-        {
-            long data_size = 0;
-            for (int indx = 0; indx < numberMembers; indx++)
-                data_size += memberStorage[indx] * memberDims[indx];
-            return DIM0 * data_size;
-        }
-
-        static long getDataSize()
-        {
-            long data_size = 0;
-            for (int indx = 0; indx < numberMembers; indx++)
-                data_size += memberStorage[indx] * memberDims[indx];
-            return data_size;
-        }
-
-        static int getOffset(int memberItem)
-        {
-            int data_offset = 0;
-            for (int indx = 0; indx < memberItem; indx++)
-                data_offset += memberStorage[indx];
-            return data_offset;
-        }
-    }
-
-    static class Sensor {
-        public int serial_no;
-        public String location;
-        public double temperature;
-        public double pressure;
-
-        Sensor(int serial_no, String location, double temperature, double pressure)
-        {
-            this.serial_no   = serial_no;
-            this.location    = location;
-            this.temperature = temperature;
-            this.pressure    = pressure;
-        }
-
-        Sensor(List data)
-        {
-            this.serial_no   = (int)data.get(0);
-            this.location    = (String)data.get(1);
-            this.temperature = (double)data.get(2);
-            this.pressure    = (double)data.get(3);
-        }
-
-        Sensor(ByteBuffer databuf, int dbposition) { readBuffer(databuf, dbposition); }
-
-        void writeBuffer(ByteBuffer databuf, int dbposition)
-        {
-            databuf.putInt(dbposition + Sensor_Datatype.getOffset(0), serial_no);
-            byte[] temp_str = location.getBytes(Charset.forName("UTF-8"));
-            int arraylen    = (temp_str.length > MAXSTRINGSIZE) ? MAXSTRINGSIZE : temp_str.length;
-            for (int ndx = 0; ndx < arraylen; ndx++)
-                databuf.put(dbposition + Sensor_Datatype.getOffset(1) + ndx, temp_str[ndx]);
-            for (int ndx = arraylen; ndx < MAXSTRINGSIZE; ndx++)
-                databuf.put(dbposition + Sensor_Datatype.getOffset(1) + arraylen, (byte)0);
-            databuf.putDouble(dbposition + Sensor_Datatype.getOffset(2), temperature);
-            databuf.putDouble(dbposition + Sensor_Datatype.getOffset(3), pressure);
-        }
-
-        void readBuffer(ByteBuffer databuf, int dbposition)
-        {
-            this.serial_no       = databuf.getInt(dbposition + Sensor_Datatype.getOffset(0));
-            ByteBuffer stringbuf = databuf.duplicate();
-            stringbuf.position(dbposition + Sensor_Datatype.getOffset(1));
-            stringbuf.limit(dbposition + Sensor_Datatype.getOffset(1) + MAXSTRINGSIZE);
-            byte[] bytearr = new byte[stringbuf.remaining()];
-            stringbuf.get(bytearr);
-            this.location    = new String(bytearr, Charset.forName("UTF-8")).trim();
-            this.temperature = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(2));
-            this.pressure    = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(3));
-        }
-
-        List get()
-        {
-            List data = new ArrayList<>();
-            data.add(this.serial_no);
-            data.add(this.location);
-            data.add(this.temperature);
-            data.add(this.pressure);
-            return data;
-        }
-
-        void put(List data)
-        {
-            this.serial_no   = (int)data.get(0);
-            this.location    = (String)data.get(1);
-            this.temperature = (double)data.get(2);
-            this.pressure    = (double)data.get(3);
-        }
-
-        @Override
-        public String toString()
-        {
-            return String.format("Serial number   : " + serial_no + "%n"
-                                 + "Location        : " + location + "%n"
-                                 + "Temperature (F) : " + temperature + "%n"
-                                 + "Pressure (inHg) : " + pressure + "%n");
-        }
-    }
-
-    private static void CreateDataset()
-    {
-        long file_id            = HDF5Constants.H5I_INVALID_HID;
-        long strtype_id         = HDF5Constants.H5I_INVALID_HID;
-        long memtype_id         = HDF5Constants.H5I_INVALID_HID;
-        long filetype_id        = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id       = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id         = HDF5Constants.H5I_INVALID_HID;
-        long attribute_id       = HDF5Constants.H5I_INVALID_HID;
-        long[] dims             = {DIM0};
-        ArrayList[] object_data = new ArrayList[DIM0];
-        byte[] dset_data        = null;
-
-        // Initialize data.
-        object_data[0] = (ArrayList) new Sensor(1153, new String("Exterior (static)"), 53.23, 24.57).get();
-        object_data[1] = (ArrayList) new Sensor(1184, new String("Intake"), 55.12, 22.95).get();
-        object_data[2] = (ArrayList) new Sensor(1027, new String("Intake manifold"), 103.55, 31.23).get();
-        object_data[3] = (ArrayList) new Sensor(1313, new String("Exhaust manifold"), 1252.89, 84.11).get();
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create string datatype.
-        try {
-            strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
-            if (strtype_id >= 0)
-                H5.H5Tset_size(strtype_id, MAXSTRINGSIZE);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the compound datatype for memory.
-        try {
-            memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
-            if (memtype_id >= 0) {
-                for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
-                    long type_id = Sensor_Datatype.memberMemTypes[indx];
-                    if (type_id == HDF5Constants.H5T_C_S1)
-                        type_id = strtype_id;
-                    H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx],
-                                 Sensor_Datatype.getOffset(indx), type_id);
-                }
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the compound datatype for the file. Because the standard
-        // types we are using for the file may have different sizes than
-        // the corresponding native types, we must manually calculate the
-        // offset of each member.
-        try {
-            filetype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
-            if (filetype_id >= 0) {
-                for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
-                    long type_id = Sensor_Datatype.memberFileTypes[indx];
-                    if (type_id == HDF5Constants.H5T_C_S1)
-                        type_id = strtype_id;
-                    H5.H5Tinsert(filetype_id, Sensor_Datatype.memberNames[indx],
-                                 Sensor_Datatype.getOffset(indx), type_id);
-                }
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataset with a scalar dataspace.
-        try {
-            dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
-            if (dataspace_id >= 0) {
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
-                                          HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                          HDF5Constants.H5P_DEFAULT);
-                H5.H5Sclose(dataspace_id);
-                dataspace_id = HDF5Constants.H5I_INVALID_HID;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            dataspace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the attribute.
-        try {
-            if ((dataset_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
-                attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, filetype_id, dataspace_id,
-                                            HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the compound data.
-        try {
-            if ((attribute_id >= 0) && (memtype_id >= 0))
-                H5.H5AwriteVL(attribute_id, memtype_id, (Object[])object_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (attribute_id >= 0)
-                H5.H5Aclose(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the file type.
-        try {
-            if (filetype_id >= 0)
-                H5.H5Tclose(filetype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the mem type.
-        try {
-            if (memtype_id >= 0)
-                H5.H5Tclose(memtype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (strtype_id >= 0)
-                H5.H5Tclose(strtype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void ReadDataset()
-    {
-        long file_id          = HDF5Constants.H5I_INVALID_HID;
-        long strtype_id       = HDF5Constants.H5I_INVALID_HID;
-        long memtype_id       = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id     = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id       = HDF5Constants.H5I_INVALID_HID;
-        long attribute_id     = HDF5Constants.H5I_INVALID_HID;
-        long[] dims           = {DIM0};
-        Sensor[] object_data2 = new Sensor[(int)dims[0]];
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
-                                                  HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get dataspace and allocate memory for read buffer. This is a
-        // three dimensional dataset when the array datatype is included so
-        // the dynamic allocation must be done in steps.
-        try {
-            if (attribute_id >= 0)
-                dataspace_id = H5.H5Aget_space(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create string datatype.
-        try {
-            strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
-            if (strtype_id >= 0)
-                H5.H5Tset_size(strtype_id, MAXSTRINGSIZE);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the compound datatype for memory.
-        try {
-            memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
-            if (memtype_id >= 0) {
-                for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
-                    long type_id = Sensor_Datatype.memberMemTypes[indx];
-                    if (type_id == HDF5Constants.H5T_C_S1)
-                        type_id = strtype_id;
-                    H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx],
-                                 Sensor_Datatype.getOffset(indx), type_id);
-                }
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        ArrayList[] object_data = new ArrayList[(int)dims[0]];
-
-        // Read data.
-        try {
-            if ((attribute_id >= 0) && (memtype_id >= 0))
-                H5.H5AreadVL(attribute_id, memtype_id, (Object[])object_data);
-
-            for (int indx = 0; indx < (int)dims[0]; indx++) {
-                object_data2[indx] = new Sensor(object_data[indx]);
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        for (int indx = 0; indx < dims[0]; indx++) {
-            System.out.println(ATTRIBUTENAME + " [" + indx + "]:");
-            System.out.println(object_data2[indx].toString());
-        }
-        System.out.println();
-
-        try {
-            if (attribute_id >= 0)
-                H5.H5Aclose(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the mem type.
-        try {
-            if (memtype_id >= 0)
-                H5.H5Tclose(memtype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (strtype_id >= 0)
-                H5.H5Tclose(strtype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_T_CompoundAttribute.CreateDataset();
-        // Now we begin the read section of this example. Here we assume
-        // the dataset and array have the same name and rank, but can have
-        // any size. Therefore we must allocate a new array to read in
-        // data using malloc().
-        H5Ex_T_CompoundAttribute.ReadDataset();
-    }
-}
diff --git a/java/examples/datatypes/H5Ex_T_Float.java b/java/examples/datatypes/H5Ex_T_Float.java
deleted file mode 100644
index 9ca099e7f18..00000000000
--- a/java/examples/datatypes/H5Ex_T_Float.java
+++ /dev/null
@@ -1,227 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write integer datatypes
-  to a dataset.  The program first writes integers to a
-  dataset with a dataspace of DIM0xDIM1, then closes the
-  file.  Next, it reopens the file, reads back the data, and
-  outputs it to the screen.
- ************************************************************/
-
-package examples.datatypes;
-
-import java.text.DecimalFormat;
-import java.text.DecimalFormatSymbols;
-import java.util.Locale;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_T_Float {
-    private static String FILENAME    = "H5Ex_T_Float.h5";
-    private static String DATASETNAME = "DS1";
-    private static final int DIM0     = 4;
-    private static final int DIM1     = 7;
-    private static final int RANK     = 2;
-
-    private static void CreateDataset()
-    {
-        long file_id         = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id    = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id      = HDF5Constants.H5I_INVALID_HID;
-        long[] dims          = {DIM0, DIM1};
-        double[][] dset_data = new double[DIM0][DIM1];
-
-        // Initialize data.
-        for (int indx = 0; indx < DIM0; indx++)
-            for (int jndx = 0; jndx < DIM1; jndx++) {
-                dset_data[indx][jndx] = indx / (jndx + 0.5) + jndx;
-            }
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            dataspace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset and write the floating point data to it. In
-        // this example we will save the data as 64 bit little endian IEEE
-        // floating point numbers, regardless of the native type. The HDF5
-        // library automatically converts between different floating point
-        // types.
-        try {
-            if ((file_id >= 0) && (dataspace_id >= 0))
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_IEEE_F64LE, dataspace_id,
-                                          HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                          HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the data to the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void ReadDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM0, DIM1};
-        double[][] dset_data;
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get dataspace and allocate memory for read buffer.
-        try {
-            if (dataset_id >= 0)
-                dataspace_id = H5.H5Dget_space(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Allocate array of pointers to two-dimensional arrays (the
-        // elements of the dataset.
-        dset_data = new double[(int)dims[0]][(int)(dims[1])];
-
-        // Read data.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        DecimalFormat df = new DecimalFormat("#,##0.0000", new DecimalFormatSymbols(Locale.US));
-        System.out.println(DATASETNAME + ":");
-        for (int indx = 0; indx < dims[0]; indx++) {
-            System.out.print(" [");
-            for (int jndx = 0; jndx < dims[1]; jndx++) {
-                System.out.print(" " + df.format(dset_data[indx][jndx]));
-            }
-            System.out.println("]");
-        }
-        System.out.println();
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_T_Float.CreateDataset();
-        // Now we begin the read section of this example. Here we assume
-        // the dataset and array have the same name and rank, but can have
-        // any size. Therefore we must allocate a new array to read in
-        // data using malloc().
-        H5Ex_T_Float.ReadDataset();
-    }
-}
diff --git a/java/examples/datatypes/H5Ex_T_FloatAttribute.java b/java/examples/datatypes/H5Ex_T_FloatAttribute.java
deleted file mode 100644
index 426c4ddd69d..00000000000
--- a/java/examples/datatypes/H5Ex_T_FloatAttribute.java
+++ /dev/null
@@ -1,264 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write floating point
-  datatypes to an attribute.  The program first writes
-  floating point numbers to an attribute with a dataspace of
-  DIM0xDIM1, then closes the file.  Next, it reopens the
-  file, reads back the data, and outputs it to the screen.
- ************************************************************/
-
-package examples.datatypes;
-
-import java.text.DecimalFormat;
-import java.text.DecimalFormatSymbols;
-import java.util.Locale;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_T_FloatAttribute {
-    private static String FILENAME      = "H5Ex_T_FloatAttribute.h5";
-    private static String DATASETNAME   = "DS1";
-    private static String ATTRIBUTENAME = "A1";
-    private static final int DIM0       = 4;
-    private static final int DIM1       = 7;
-    private static final int RANK       = 2;
-
-    private static void CreateDataset()
-    {
-        long file_id         = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id    = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id      = HDF5Constants.H5I_INVALID_HID;
-        long attribute_id    = HDF5Constants.H5I_INVALID_HID;
-        long[] dims          = {DIM0, DIM1};
-        double[][] dset_data = new double[DIM0][DIM1];
-
-        // Initialize data.
-        for (int indx = 0; indx < DIM0; indx++)
-            for (int jndx = 0; jndx < DIM1; jndx++) {
-                dset_data[indx][jndx] = indx / (jndx + 0.5) + jndx;
-            }
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataset with a scalar dataspace.
-        try {
-            dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
-            if (dataspace_id >= 0) {
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
-                                          HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                          HDF5Constants.H5P_DEFAULT);
-                H5.H5Sclose(dataspace_id);
-                dataspace_id = HDF5Constants.H5I_INVALID_HID;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            dataspace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the attribute and write the array data to it.
-        try {
-            if ((dataset_id >= 0) && (dataspace_id >= 0))
-                attribute_id =
-                    H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_IEEE_F64LE, dataspace_id,
-                                 HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the dataset.
-        try {
-            if (attribute_id >= 0)
-                H5.H5Awrite(attribute_id, HDF5Constants.H5T_NATIVE_DOUBLE, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (attribute_id >= 0)
-                H5.H5Aclose(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void ReadDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long attribute_id = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM0, DIM1};
-        double[][] dset_data;
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
-                                                  HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get dataspace and allocate memory for read buffer.
-        try {
-            if (attribute_id >= 0)
-                dataspace_id = H5.H5Aget_space(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Allocate array of pointers to two-dimensional arrays (the
-        // elements of the dataset.
-        dset_data = new double[(int)dims[0]][(int)(dims[1])];
-
-        // Read data.
-        try {
-            if (attribute_id >= 0)
-                H5.H5Aread(attribute_id, HDF5Constants.H5T_NATIVE_DOUBLE, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        DecimalFormat df = new DecimalFormat("#,##0.0000", new DecimalFormatSymbols(Locale.US));
-        System.out.println(ATTRIBUTENAME + ":");
-        for (int indx = 0; indx < dims[0]; indx++) {
-            System.out.print(" [");
-            for (int jndx = 0; jndx < dims[1]; jndx++) {
-                System.out.print(" " + df.format(dset_data[indx][jndx]));
-            }
-            System.out.println("]");
-        }
-        System.out.println();
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (attribute_id >= 0)
-                H5.H5Aclose(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_T_FloatAttribute.CreateDataset();
-        // Now we begin the read section of this example. Here we assume
-        // the dataset and array have the same name and rank, but can have
-        // any size. Therefore we must allocate a new array to read in
-        // data using malloc().
-        H5Ex_T_FloatAttribute.ReadDataset();
-    }
-}
diff --git a/java/examples/datatypes/H5Ex_T_Integer.java b/java/examples/datatypes/H5Ex_T_Integer.java
deleted file mode 100644
index 919ea7e7aad..00000000000
--- a/java/examples/datatypes/H5Ex_T_Integer.java
+++ /dev/null
@@ -1,224 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write integer datatypes
-  to a dataset.  The program first writes integers to a
-  dataset with a dataspace of DIM0xDIM1, then closes the
-  file.  Next, it reopens the file, reads back the data, and
-  outputs it to the screen.
- ************************************************************/
-
-package examples.datatypes;
-
-import java.text.DecimalFormat;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_T_Integer {
-    private static String FILENAME    = "H5Ex_T_Integer.h5";
-    private static String DATASETNAME = "DS1";
-    private static final int DIM0     = 4;
-    private static final int DIM1     = 7;
-    private static final int RANK     = 2;
-
-    private static void CreateDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM0, DIM1};
-        int[][] dset_data = new int[DIM0][DIM1];
-
-        // Initialize data.
-        for (int indx = 0; indx < DIM0; indx++)
-            for (int jndx = 0; jndx < DIM1; jndx++) {
-                dset_data[indx][jndx] = indx * jndx - jndx;
-            }
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            dataspace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset and write the integer data to it. In this
-        // example we will save the data as 64 bit big endian integers,
-        // regardless of the native integer type. The HDF5 library
-        // automatically converts between different integer types.
-        try {
-            if ((file_id >= 0) && (dataspace_id >= 0))
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I64BE, dataspace_id,
-                                          HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                          HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the data to the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void ReadDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM0, DIM1};
-        int[][] dset_data;
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get dataspace and allocate memory for read buffer.
-        try {
-            if (dataset_id >= 0)
-                dataspace_id = H5.H5Dget_space(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Allocate array of pointers to two-dimensional arrays (the
-        // elements of the dataset.
-        dset_data = new int[(int)dims[0]][(int)(dims[1])];
-
-        // Read data.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        DecimalFormat df = new DecimalFormat("#,##0");
-        System.out.println(DATASETNAME + ":");
-        for (int indx = 0; indx < dims[0]; indx++) {
-            System.out.print(" [");
-            for (int jndx = 0; jndx < dims[1]; jndx++) {
-                System.out.print(" " + df.format(dset_data[indx][jndx]));
-            }
-            System.out.println("]");
-        }
-        System.out.println();
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_T_Integer.CreateDataset();
-        // Now we begin the read section of this example. Here we assume
-        // the dataset and array have the same name and rank, but can have
-        // any size. Therefore we must allocate a new array to read in
-        // data using malloc().
-        H5Ex_T_Integer.ReadDataset();
-    }
-}
diff --git a/java/examples/datatypes/H5Ex_T_IntegerAttribute.java b/java/examples/datatypes/H5Ex_T_IntegerAttribute.java
deleted file mode 100644
index 4ec98c4829f..00000000000
--- a/java/examples/datatypes/H5Ex_T_IntegerAttribute.java
+++ /dev/null
@@ -1,262 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write integer datatypes
-  to an attribute.  The program first writes integers to an
-  attribute with a dataspace of DIM0xDIM1, then closes the
-  file.  Next, it reopens the file, reads back the data, and
-  outputs it to the screen.
- ************************************************************/
-
-package examples.datatypes;
-
-import java.text.DecimalFormat;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_T_IntegerAttribute {
-    private static String FILENAME      = "H5Ex_T_IntegerAttribute.h5";
-    private static String DATASETNAME   = "DS1";
-    private static String ATTRIBUTENAME = "A1";
-    private static final int DIM0       = 4;
-    private static final int DIM1       = 7;
-    private static final int RANK       = 2;
-
-    private static void CreateDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long attribute_id = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM0, DIM1};
-        int[][] dset_data = new int[DIM0][DIM1];
-
-        // Initialize data.
-        for (int indx = 0; indx < DIM0; indx++)
-            for (int jndx = 0; jndx < DIM1; jndx++) {
-                dset_data[indx][jndx] = indx * jndx - jndx;
-            }
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataset with a scalar dataspace.
-        try {
-            dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
-            if (dataspace_id >= 0) {
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
-                                          HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                          HDF5Constants.H5P_DEFAULT);
-                H5.H5Sclose(dataspace_id);
-                dataspace_id = HDF5Constants.H5I_INVALID_HID;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            dataspace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the attribute and write the array data to it.
-        try {
-            if ((dataset_id >= 0) && (dataspace_id >= 0))
-                attribute_id =
-                    H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_I64BE, dataspace_id,
-                                 HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the dataset.
-        try {
-            if (attribute_id >= 0)
-                H5.H5Awrite(attribute_id, HDF5Constants.H5T_NATIVE_INT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (attribute_id >= 0)
-                H5.H5Aclose(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void ReadDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long attribute_id = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM0, DIM1};
-        int[][] dset_data;
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
-                                                  HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get dataspace and allocate memory for read buffer.
-        try {
-            if (attribute_id >= 0)
-                dataspace_id = H5.H5Aget_space(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Allocate array of pointers to two-dimensional arrays (the
-        // elements of the dataset.
-        dset_data = new int[(int)dims[0]][(int)(dims[1])];
-
-        // Read data.
-        try {
-            if (attribute_id >= 0)
-                H5.H5Aread(attribute_id, HDF5Constants.H5T_NATIVE_INT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        DecimalFormat df = new DecimalFormat("#,##0");
-        System.out.println(ATTRIBUTENAME + ":");
-        for (int indx = 0; indx < dims[0]; indx++) {
-            System.out.print(" [");
-            for (int jndx = 0; jndx < dims[1]; jndx++) {
-                System.out.print(" " + df.format(dset_data[indx][jndx]));
-            }
-            System.out.println("]");
-        }
-        System.out.println();
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (attribute_id >= 0)
-                H5.H5Aclose(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_T_IntegerAttribute.CreateDataset();
-        // Now we begin the read section of this example. Here we assume
-        // the dataset and array have the same name and rank, but can have
-        // any size. Therefore we must allocate a new array to read in
-        // data using malloc().
-        H5Ex_T_IntegerAttribute.ReadDataset();
-    }
-}
diff --git a/java/examples/datatypes/H5Ex_T_ObjectReference.java b/java/examples/datatypes/H5Ex_T_ObjectReference.java
deleted file mode 100644
index 9220d8f3a2d..00000000000
--- a/java/examples/datatypes/H5Ex_T_ObjectReference.java
+++ /dev/null
@@ -1,329 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write object references
-  to a dataset.  The program first creates objects in the
-  file and writes references to those objects to a dataset
-  with a dataspace of DIM0, then closes the file.  Next, it
-  reopens the file, dereferences the references, and outputs
-  the names of their targets to the screen.
- ************************************************************/
-package examples.datatypes;
-
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_T_ObjectReference {
-    private static String FILENAME     = "H5Ex_T_ObjectReference.h5";
-    private static String DATASETNAME  = "DS1";
-    private static String DATASETNAME2 = "DS2";
-    private static String GROUPNAME    = "G1";
-    private static final int DIM0      = 2;
-    private static final int RANK      = 1;
-
-    // Values for the status of space allocation
-    enum H5G_obj {
-        H5G_UNKNOWN(HDF5Constants.H5O_TYPE_UNKNOWN),     /* Unknown object type */
-        H5G_GROUP(HDF5Constants.H5O_TYPE_GROUP),         /* Object is a group */
-        H5G_DATASET(HDF5Constants.H5O_TYPE_DATASET),     /* Object is a dataset */
-        H5G_TYPE(HDF5Constants.H5O_TYPE_NAMED_DATATYPE); /* Object is a named data type */
-        private static final Map<Integer, H5G_obj> lookup = new HashMap<Integer, H5G_obj>();
-
-        static
-        {
-            for (H5G_obj s : EnumSet.allOf(H5G_obj.class))
-                lookup.put(s.getCode(), s);
-        }
-
-        private int code;
-
-        H5G_obj(int layout_type) { this.code = layout_type; }
-
-        public int getCode() { return this.code; }
-
-        public static H5G_obj get(int code) { return lookup.get(code); }
-    }
-
-    private static void writeObjRef()
-    {
-        long file_id       = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id  = HDF5Constants.H5I_INVALID_HID;
-        long filespace_id  = HDF5Constants.H5I_INVALID_HID;
-        long group_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id    = HDF5Constants.H5I_INVALID_HID;
-        long[] dims        = {DIM0};
-        byte[][] dset_data = new byte[DIM0][HDF5Constants.H5R_REF_BUF_SIZE];
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataset with a scalar dataspace.
-        try {
-            dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
-            if ((file_id >= 0) && (dataspace_id >= 0)) {
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_STD_I32LE, dataspace_id,
-                                          HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                          HDF5Constants.H5P_DEFAULT);
-                if (dataset_id >= 0)
-                    H5.H5Dclose(dataset_id);
-                dataset_id = HDF5Constants.H5I_INVALID_HID;
-                H5.H5Sclose(dataspace_id);
-                dataspace_id = HDF5Constants.H5I_INVALID_HID;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create a group in the file.
-        try {
-            if (file_id >= 0)
-                group_id = H5.H5Gcreate(file_id, GROUPNAME, HDF5Constants.H5P_DEFAULT,
-                                        HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-            if (group_id >= 0)
-                H5.H5Gclose(group_id);
-            group_id = HDF5Constants.H5I_INVALID_HID;
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (file_id >= 0) {
-                try {
-                    dset_data[0] = H5.H5Rcreate_object(file_id, GROUPNAME, HDF5Constants.H5P_DEFAULT);
-                }
-                catch (Throwable err) {
-                    err.printStackTrace();
-                }
-
-                try {
-                    dset_data[1] = H5.H5Rcreate_object(file_id, DATASETNAME2, HDF5Constants.H5P_DEFAULT);
-                }
-                catch (Throwable err) {
-                    err.printStackTrace();
-                }
-            }
-
-            // Create dataspace. Setting maximum size to NULL sets the maximum
-            // size to be the current size.
-            try {
-                filespace_id = H5.H5Screate_simple(RANK, dims, null);
-            }
-            catch (Exception e) {
-                e.printStackTrace();
-            }
-
-            // Create the dataset.
-            try {
-                if ((file_id >= 0) && (filespace_id >= 0))
-                    dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_REF, filespace_id,
-                                              HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                              HDF5Constants.H5P_DEFAULT);
-            }
-            catch (Exception e) {
-                e.printStackTrace();
-            }
-
-            // Write the object references to it.
-            try {
-                if (dataset_id >= 0)
-                    H5.H5Dwrite(dataset_id, HDF5Constants.H5T_STD_REF, HDF5Constants.H5S_ALL,
-                                HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-            }
-            catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-        catch (Exception ex) {
-            ex.printStackTrace();
-        }
-        finally {
-            try {
-                H5.H5Rdestroy(dset_data[1]);
-            }
-            catch (Exception ex) {
-            }
-            try {
-                H5.H5Rdestroy(dset_data[0]);
-            }
-            catch (Exception ex) {
-            }
-        }
-
-        // End access to the dataset and release resources used by it.
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (filespace_id >= 0)
-                H5.H5Sclose(filespace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void readObjRef()
-    {
-        long file_id       = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id    = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id  = HDF5Constants.H5I_INVALID_HID;
-        int object_type    = -1;
-        long object_id     = HDF5Constants.H5I_INVALID_HID;
-        long[] dims        = {DIM0};
-        byte[][] dset_data = new byte[DIM0][HDF5Constants.H5R_REF_BUF_SIZE];
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-
-            // Open an existing dataset.
-            try {
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-
-                try {
-                    // Get dataspace and allocate memory for read buffer.
-                    dataspace_id = H5.H5Dget_space(dataset_id);
-                    H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-
-                    // Read data.
-                    H5.H5Dread(dataset_id, HDF5Constants.H5T_STD_REF, HDF5Constants.H5S_ALL,
-                               HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-
-                    // Output the data to the screen.
-                    for (int indx = 0; indx < dims[0]; indx++) {
-                        System.out.println(DATASETNAME + "[" + indx + "]:");
-                        System.out.print("  ->");
-                        // Open the referenced object, get its name and type.
-                        try {
-                            object_id = H5.H5Ropen_object(dset_data[indx], HDF5Constants.H5P_DEFAULT,
-                                                          HDF5Constants.H5P_DEFAULT);
-                            try {
-                                object_type = H5.H5Rget_obj_type3(dset_data[indx], HDF5Constants.H5R_OBJECT);
-                                String obj_name = null;
-                                if (object_type >= 0) {
-                                    // Get the name.
-                                    obj_name = H5.H5Iget_name(object_id);
-                                }
-                                if ((object_id >= 0) && (object_type >= -1)) {
-                                    switch (H5G_obj.get(object_type)) {
-                                    case H5G_GROUP:
-                                        System.out.print("H5G_GROUP");
-                                        break;
-                                    case H5G_DATASET:
-                                        System.out.print("H5G_DATASET");
-                                        break;
-                                    case H5G_TYPE:
-                                        System.out.print("H5G_TYPE");
-                                        break;
-                                    default:
-                                        System.out.print("UNHANDLED");
-                                    }
-                                }
-                                // Print the name.
-                                System.out.println(": " + obj_name);
-                            }
-                            catch (Exception e) {
-                                e.printStackTrace();
-                            }
-                            finally {
-                                try {
-                                    H5.H5Oclose(object_id);
-                                }
-                                catch (Exception e) {
-                                }
-                            }
-                        }
-                        catch (Exception e4) {
-                            e4.printStackTrace();
-                        }
-                        finally {
-                            try {
-                                H5.H5Rdestroy(dset_data[indx]);
-                            }
-                            catch (Exception e4) {
-                            }
-                        }
-                    } // end for
-                }
-                catch (Exception e3) {
-                    e3.printStackTrace();
-                }
-                finally {
-                    try {
-                        H5.H5Sclose(dataspace_id);
-                    }
-                    catch (Exception e3) {
-                    }
-                }
-            }
-            catch (Exception e2) {
-                e2.printStackTrace();
-            }
-            finally {
-                try {
-                    H5.H5Dclose(dataset_id);
-                }
-                catch (Exception e2) {
-                }
-            }
-        }
-        catch (Exception e1) {
-            e1.printStackTrace();
-        }
-        finally {
-            try {
-                H5.H5Fclose(file_id);
-            }
-            catch (Exception e1) {
-            }
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        // Check if gzip compression is available and can be used for both
-        // compression and decompression. Normally we do not perform error
-        // checking in these examples for the sake of clarity, but in this
-        // case we will make an exception because this filter is an
-        // optional part of the hdf5 library.
-        H5Ex_T_ObjectReference.writeObjRef();
-        H5Ex_T_ObjectReference.readObjRef();
-    }
-}
diff --git a/java/examples/datatypes/H5Ex_T_ObjectReferenceAttribute.java b/java/examples/datatypes/H5Ex_T_ObjectReferenceAttribute.java
deleted file mode 100644
index be84e51ecab..00000000000
--- a/java/examples/datatypes/H5Ex_T_ObjectReferenceAttribute.java
+++ /dev/null
@@ -1,367 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write object references
-  to an attribute.  The program first creates objects in the
-  file and writes references to those objects to an
-  attribute with a dataspace of DIM0, then closes the file.
-  Next, it reopens the file, dereferences the references,
-  and outputs the names of their targets to the screen.
- ************************************************************/
-package examples.datatypes;
-
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_T_ObjectReferenceAttribute {
-    private static String FILENAME      = "H5Ex_T_ObjectReferenceAttribute.h5";
-    private static String DATASETNAME   = "DS1";
-    private static String ATTRIBUTENAME = "A1";
-    private static String DATASETNAME2  = "DS2";
-    private static String GROUPNAME     = "G1";
-    private static final int DIM0       = 2;
-    private static final int RANK       = 1;
-
-    // Values for the status of space allocation
-    enum H5G_obj {
-        H5G_UNKNOWN(HDF5Constants.H5O_TYPE_UNKNOWN),     /* Unknown object type */
-        H5G_GROUP(HDF5Constants.H5O_TYPE_GROUP),         /* Object is a group */
-        H5G_DATASET(HDF5Constants.H5O_TYPE_DATASET),     /* Object is a dataset */
-        H5G_TYPE(HDF5Constants.H5O_TYPE_NAMED_DATATYPE); /* Object is a named data type */
-        private static final Map<Integer, H5G_obj> lookup = new HashMap<Integer, H5G_obj>();
-
-        static
-        {
-            for (H5G_obj s : EnumSet.allOf(H5G_obj.class))
-                lookup.put(s.getCode(), s);
-        }
-
-        private int code;
-
-        H5G_obj(int layout_type) { this.code = layout_type; }
-
-        public int getCode() { return this.code; }
-
-        public static H5G_obj get(int code) { return lookup.get(code); }
-    }
-
-    private static void CreateDataset()
-    {
-        long file_id       = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id  = HDF5Constants.H5I_INVALID_HID;
-        long group_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id    = HDF5Constants.H5I_INVALID_HID;
-        long attribute_id  = HDF5Constants.H5I_INVALID_HID;
-        long[] dims        = {DIM0};
-        byte[][] dset_data = new byte[DIM0][HDF5Constants.H5R_REF_BUF_SIZE];
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataset with a scalar dataspace.
-        try {
-            dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
-            if ((file_id >= 0) && (dataspace_id >= 0)) {
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_STD_I32LE, dataspace_id,
-                                          HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                          HDF5Constants.H5P_DEFAULT);
-                if (dataset_id >= 0)
-                    H5.H5Dclose(dataset_id);
-                dataset_id = HDF5Constants.H5I_INVALID_HID;
-                H5.H5Sclose(dataspace_id);
-                dataspace_id = HDF5Constants.H5I_INVALID_HID;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create a group in the file.
-        try {
-            if (file_id >= 0)
-                group_id = H5.H5Gcreate(file_id, GROUPNAME, HDF5Constants.H5P_DEFAULT,
-                                        HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-            if (group_id >= 0)
-                H5.H5Gclose(group_id);
-            group_id = HDF5Constants.H5I_INVALID_HID;
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (file_id >= 0) {
-                try {
-                    dset_data[0] = H5.H5Rcreate_object(file_id, GROUPNAME, HDF5Constants.H5P_DEFAULT);
-                }
-                catch (Throwable err) {
-                    err.printStackTrace();
-                }
-
-                try {
-                    dset_data[1] = H5.H5Rcreate_object(file_id, DATASETNAME2, HDF5Constants.H5P_DEFAULT);
-                }
-                catch (Throwable err) {
-                    err.printStackTrace();
-                }
-            }
-
-            // Create dataset with a scalar dataspace to serve as the parent
-            // for the attribute.
-            try {
-                dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
-                if (dataspace_id >= 0) {
-                    dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
-                                              HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                              HDF5Constants.H5P_DEFAULT);
-                    H5.H5Sclose(dataspace_id);
-                    dataspace_id = HDF5Constants.H5I_INVALID_HID;
-                }
-            }
-            catch (Exception e) {
-                e.printStackTrace();
-            }
-
-            // Create dataspace. Setting maximum size to NULL sets the maximum
-            // size to be the current size.
-            try {
-                dataspace_id = H5.H5Screate_simple(RANK, dims, null);
-            }
-            catch (Exception e) {
-                e.printStackTrace();
-            }
-
-            // Create the attribute and write the array data to it.
-            try {
-                if ((dataset_id >= 0) && (dataspace_id >= 0))
-                    attribute_id =
-                        H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_REF, dataspace_id,
-                                     HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-            }
-            catch (Exception e) {
-                e.printStackTrace();
-            }
-
-            // Write the dataset.
-            try {
-                if (attribute_id >= 0)
-                    H5.H5Awrite(attribute_id, HDF5Constants.H5T_STD_REF, dset_data);
-            }
-            catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-        catch (Exception ex) {
-            ex.printStackTrace();
-        }
-        finally {
-            try {
-                H5.H5Rdestroy(dset_data[1]);
-            }
-            catch (Exception ex) {
-            }
-            try {
-                H5.H5Rdestroy(dset_data[0]);
-            }
-            catch (Exception ex) {
-            }
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (attribute_id >= 0)
-                H5.H5Aclose(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void ReadDataset()
-    {
-        long file_id       = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id  = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id    = HDF5Constants.H5I_INVALID_HID;
-        long attribute_id  = HDF5Constants.H5I_INVALID_HID;
-        int object_type    = -1;
-        long object_id     = HDF5Constants.H5I_INVALID_HID;
-        long[] dims        = {DIM0};
-        byte[][] dset_data = new byte[DIM0][HDF5Constants.H5R_REF_BUF_SIZE];
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-
-            // Open an existing dataset.
-            try {
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-
-                try {
-                    attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME,
-                                                      HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-
-                    // Get dataspace and allocate memory for read buffer.
-                    try {
-                        dataspace_id = H5.H5Aget_space(attribute_id);
-                        H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-
-                        // Read data.
-                        H5.H5Aread(attribute_id, HDF5Constants.H5T_STD_REF, dset_data);
-
-                        // Output the data to the screen.
-                        for (int indx = 0; indx < dims[0]; indx++) {
-                            System.out.println(ATTRIBUTENAME + "[" + indx + "]:");
-                            System.out.print("  ->");
-                            // Open the referenced object, get its name and type.
-                            try {
-                                object_id = H5.H5Ropen_object(dset_data[indx], HDF5Constants.H5P_DEFAULT,
-                                                              HDF5Constants.H5P_DEFAULT);
-                                try {
-                                    object_type =
-                                        H5.H5Rget_obj_type3(dset_data[indx], HDF5Constants.H5R_OBJECT);
-                                    String obj_name = null;
-                                    if (object_type >= 0) {
-                                        // Get the name.
-                                        obj_name = H5.H5Iget_name(object_id);
-                                    }
-                                    if ((object_id >= 0) && (object_type >= -1)) {
-                                        switch (H5G_obj.get(object_type)) {
-                                        case H5G_GROUP:
-                                            System.out.print("H5G_GROUP");
-                                            break;
-                                        case H5G_DATASET:
-                                            System.out.print("H5G_DATASET");
-                                            break;
-                                        case H5G_TYPE:
-                                            System.out.print("H5G_TYPE");
-                                            break;
-                                        default:
-                                            System.out.print("UNHANDLED");
-                                        }
-                                    }
-                                    // Print the name.
-                                    System.out.println(": " + obj_name);
-                                }
-                                catch (Exception e) {
-                                    e.printStackTrace();
-                                }
-                                finally {
-                                    try {
-                                        H5.H5Oclose(object_id);
-                                    }
-                                    catch (Exception e) {
-                                    }
-                                }
-                            }
-                            catch (Exception e5) {
-                                e5.printStackTrace();
-                            }
-                            finally {
-                                try {
-                                    H5.H5Rdestroy(dset_data[indx]);
-                                }
-                                catch (Exception e5) {
-                                }
-                            }
-                        } // end for
-                    }
-                    catch (Exception e4) {
-                        e4.printStackTrace();
-                    }
-                    finally {
-                        try {
-                            H5.H5Sclose(dataspace_id);
-                        }
-                        catch (Exception e3) {
-                        }
-                    }
-                }
-                catch (Exception e3) {
-                    e3.printStackTrace();
-                }
-                finally {
-                    try {
-                        H5.H5Aclose(attribute_id);
-                    }
-                    catch (Exception e4) {
-                    }
-                }
-            }
-            catch (Exception e2) {
-                e2.printStackTrace();
-            }
-            finally {
-                try {
-                    H5.H5Dclose(dataset_id);
-                }
-                catch (Exception e2) {
-                }
-            }
-        }
-        catch (Exception e1) {
-            e1.printStackTrace();
-        }
-        finally {
-            try {
-                H5.H5Fclose(file_id);
-            }
-            catch (Exception e1) {
-            }
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_T_ObjectReferenceAttribute.CreateDataset();
-        // Now we begin the read section of this example. Here we assume
-        // the dataset and array have the same name and rank, but can have
-        // any size. Therefore we must allocate a new array to read in
-        // data using malloc().
-        H5Ex_T_ObjectReferenceAttribute.ReadDataset();
-    }
-}
diff --git a/java/examples/datatypes/H5Ex_T_Opaque.java b/java/examples/datatypes/H5Ex_T_Opaque.java
deleted file mode 100644
index 419a5c81bfd..00000000000
--- a/java/examples/datatypes/H5Ex_T_Opaque.java
+++ /dev/null
@@ -1,268 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write opaque datatypes
-  to a dataset.  The program first writes opaque data to a
-  dataset with a dataspace of DIM0, then closes the file.
-  Next, it reopens the file, reads back the data, and
-  outputs it to the screen.
- ************************************************************/
-
-package examples.datatypes;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_T_Opaque {
-    private static String FILENAME    = "H5Ex_T_Opaque.h5";
-    private static String DATASETNAME = "DS1";
-    private static final int DIM0     = 4;
-    private static final int LEN      = 7;
-    private static final int RANK     = 1;
-
-    private static void CreateDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long datatype_id  = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM0};
-        byte[] dset_data  = new byte[DIM0 * LEN];
-        byte[] str_data   = {'O', 'P', 'A', 'Q', 'U', 'E'};
-
-        // Initialize data.
-        for (int indx = 0; indx < DIM0; indx++) {
-            for (int jndx = 0; jndx < LEN - 1; jndx++)
-                dset_data[jndx + indx * LEN] = str_data[jndx];
-            dset_data[LEN - 1 + indx * LEN] = (byte)(indx + '0');
-        }
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create opaque datatype and set the tag to something appropriate.
-        // For this example we will write and view the data as a character
-        // array.
-        try {
-            datatype_id = H5.H5Tcreate(HDF5Constants.H5T_OPAQUE, (long)LEN);
-            if (datatype_id >= 0)
-                H5.H5Tset_tag(datatype_id, "Character array");
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            dataspace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset and write the integer data to it. In this
-        // example we will save the data as 64 bit big endian integers,
-        // regardless of the native integer type. The HDF5 library
-        // automatically converts between different integer types.
-        try {
-            if ((file_id >= 0) && (datatype_id >= 0) && (dataspace_id >= 0))
-                dataset_id =
-                    H5.H5Dcreate(file_id, DATASETNAME, datatype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
-                                 HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the opaque data to the dataset.
-        try {
-            if ((dataset_id >= 0) && (datatype_id >= 0))
-                H5.H5Dwrite(dataset_id, datatype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (datatype_id >= 0)
-                H5.H5Tclose(datatype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void ReadDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long datatype_id  = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long type_len     = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM0};
-        byte[] dset_data;
-        String tag_name = null;
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get datatype and properties for the datatype.
-        try {
-            if (dataset_id >= 0)
-                datatype_id = H5.H5Dget_type(dataset_id);
-            if (datatype_id >= 0) {
-                type_len = H5.H5Tget_size(datatype_id);
-                tag_name = H5.H5Tget_tag(datatype_id);
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get dataspace and allocate memory for read buffer.
-        try {
-            if (dataset_id >= 0)
-                dataspace_id = H5.H5Dget_space(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Allocate buffer.
-        dset_data = new byte[(int)(dims[0] * type_len)];
-
-        // Read data.
-        try {
-            if ((dataset_id >= 0) && (datatype_id >= 0))
-                H5.H5Dread(dataset_id, datatype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        System.out.println("Datatype tag for " + DATASETNAME + " is: \"" + tag_name + "\"");
-        for (int indx = 0; indx < dims[0]; indx++) {
-            System.out.print(DATASETNAME + "[" + indx + "]: ");
-            for (int jndx = 0; jndx < type_len; jndx++) {
-                char temp = (char)dset_data[jndx + indx * (int)type_len];
-                System.out.print(temp);
-            }
-            System.out.println("");
-        }
-        System.out.println();
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (datatype_id >= 0)
-                H5.H5Tclose(datatype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_T_Opaque.CreateDataset();
-        // Now we begin the read section of this example. Here we assume
-        // the dataset and array have the same name and rank, but can have
-        // any size. Therefore we must allocate a new array to read in
-        // data using malloc().
-        H5Ex_T_Opaque.ReadDataset();
-    }
-}
diff --git a/java/examples/datatypes/H5Ex_T_OpaqueAttribute.java b/java/examples/datatypes/H5Ex_T_OpaqueAttribute.java
deleted file mode 100644
index b8a15a64a71..00000000000
--- a/java/examples/datatypes/H5Ex_T_OpaqueAttribute.java
+++ /dev/null
@@ -1,305 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write opaque datatypes
-  to an attribute.  The program first writes opaque data to
-  an attribute with a dataspace of DIM0, then closes the
-  file. Next, it reopens the file, reads back the data, and
-  outputs it to the screen.
- ************************************************************/
-
-package examples.datatypes;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_T_OpaqueAttribute {
-    private static String FILENAME      = "H5Ex_T_OpaqueAttribute.h5";
-    private static String DATASETNAME   = "DS1";
-    private static String ATTRIBUTENAME = "A1";
-    private static final int DIM0       = 4;
-    private static final int LEN        = 7;
-    private static final int RANK       = 1;
-
-    private static void CreateDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long datatype_id  = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long attribute_id = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM0};
-        byte[] dset_data  = new byte[DIM0 * LEN];
-        byte[] str_data   = {'O', 'P', 'A', 'Q', 'U', 'E'};
-
-        // Initialize data.
-        for (int indx = 0; indx < DIM0; indx++) {
-            for (int jndx = 0; jndx < LEN - 1; jndx++)
-                dset_data[jndx + indx * LEN] = str_data[jndx];
-            dset_data[LEN - 1 + indx * LEN] = (byte)(indx + '0');
-        }
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataset with a scalar dataspace.
-        try {
-            dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
-            if (dataspace_id >= 0) {
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
-                                          HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                          HDF5Constants.H5P_DEFAULT);
-                H5.H5Sclose(dataspace_id);
-                dataspace_id = HDF5Constants.H5I_INVALID_HID;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create opaque datatype and set the tag to something appropriate.
-        // For this example we will write and view the data as a character
-        // array.
-        try {
-            datatype_id = H5.H5Tcreate(HDF5Constants.H5T_OPAQUE, (long)LEN);
-            if (datatype_id >= 0)
-                H5.H5Tset_tag(datatype_id, "Character array");
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            dataspace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the attribute and write the array data to it.
-        try {
-            if ((dataset_id >= 0) && (datatype_id >= 0) && (dataspace_id >= 0))
-                attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, datatype_id, dataspace_id,
-                                            HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the dataset.
-        try {
-            if ((attribute_id >= 0) && (datatype_id >= 0))
-                H5.H5Awrite(attribute_id, datatype_id, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (attribute_id >= 0)
-                H5.H5Aclose(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (datatype_id >= 0)
-                H5.H5Tclose(datatype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void ReadDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long datatype_id  = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long attribute_id = HDF5Constants.H5I_INVALID_HID;
-        long type_len     = -1;
-        long[] dims       = {DIM0};
-        byte[] dset_data;
-        String tag_name = null;
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
-                                                  HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get datatype and properties for the datatype.
-        try {
-            if (attribute_id >= 0)
-                datatype_id = H5.H5Aget_type(attribute_id);
-            if (datatype_id >= 0) {
-                type_len = H5.H5Tget_size(datatype_id);
-                tag_name = H5.H5Tget_tag(datatype_id);
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get dataspace and allocate memory for read buffer.
-        try {
-            if (attribute_id >= 0)
-                dataspace_id = H5.H5Aget_space(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Allocate buffer.
-        dset_data = new byte[(int)(dims[0] * type_len)];
-
-        // Read data.
-        try {
-            if ((attribute_id >= 0) && (datatype_id >= 0))
-                H5.H5Aread(attribute_id, datatype_id, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        System.out.println("Datatype tag for " + ATTRIBUTENAME + " is: \"" + tag_name + "\"");
-        for (int indx = 0; indx < dims[0]; indx++) {
-            System.out.print(ATTRIBUTENAME + "[" + indx + "]: ");
-            for (int jndx = 0; jndx < type_len; jndx++) {
-                char temp = (char)dset_data[jndx + indx * (int)type_len];
-                System.out.print(temp);
-            }
-            System.out.println("");
-        }
-        System.out.println();
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (attribute_id >= 0)
-                H5.H5Aclose(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (datatype_id >= 0)
-                H5.H5Tclose(datatype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_T_OpaqueAttribute.CreateDataset();
-        // Now we begin the read section of this example. Here we assume
-        // the dataset and array have the same name and rank, but can have
-        // any size. Therefore we must allocate a new array to read in
-        // data using malloc().
-        H5Ex_T_OpaqueAttribute.ReadDataset();
-    }
-}
diff --git a/java/examples/datatypes/H5Ex_T_String.java b/java/examples/datatypes/H5Ex_T_String.java
deleted file mode 100644
index a69a70b7ddf..00000000000
--- a/java/examples/datatypes/H5Ex_T_String.java
+++ /dev/null
@@ -1,309 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write string datatypes
-  to a dataset.  The program first writes strings to a
-  dataset with a dataspace of DIM0, then closes the file.
-  Next, it reopens the file, reads back the data, and
-  outputs it to the screen.
- ************************************************************/
-
-package examples.datatypes;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_T_String {
-    private static String FILENAME    = "H5Ex_T_String.h5";
-    private static String DATASETNAME = "DS1";
-    private static final int DIM0     = 4;
-    private static final int SDIM     = 8;
-    private static final int RANK     = 1;
-
-    private static void CreateDataset()
-    {
-        long file_id            = HDF5Constants.H5I_INVALID_HID;
-        long memtype_id         = HDF5Constants.H5I_INVALID_HID;
-        long filetype_id        = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id       = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id         = HDF5Constants.H5I_INVALID_HID;
-        long[] dims             = {DIM0};
-        byte[][] dset_data      = new byte[DIM0][SDIM];
-        StringBuffer[] str_data = {new StringBuffer("Parting"), new StringBuffer("is such"),
-                                   new StringBuffer("sweet"), new StringBuffer("sorrow.")};
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create file and memory datatypes. For this example we will save
-        // the strings as FORTRAN strings, therefore they do not need space
-        // for the null terminator in the file.
-        try {
-            filetype_id = H5.H5Tcopy(HDF5Constants.H5T_FORTRAN_S1);
-            if (filetype_id >= 0)
-                H5.H5Tset_size(filetype_id, SDIM - 1);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        try {
-            memtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
-            if (memtype_id >= 0)
-                H5.H5Tset_size(memtype_id, SDIM);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            dataspace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset and write the string data to it.
-        try {
-            if ((file_id >= 0) && (filetype_id >= 0) && (dataspace_id >= 0))
-                dataset_id =
-                    H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
-                                 HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the data to the dataset.
-        try {
-            for (int indx = 0; indx < DIM0; indx++) {
-                for (int jndx = 0; jndx < SDIM; jndx++) {
-                    if (jndx < str_data[indx].length())
-                        dset_data[indx][jndx] = (byte)str_data[indx].charAt(jndx);
-                    else
-                        dset_data[indx][jndx] = 0;
-                }
-            }
-            if ((dataset_id >= 0) && (memtype_id >= 0))
-                H5.H5Dwrite(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the file type.
-        try {
-            if (filetype_id >= 0)
-                H5.H5Tclose(filetype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the mem type.
-        try {
-            if (memtype_id >= 0)
-                H5.H5Tclose(memtype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void ReadDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long filetype_id  = HDF5Constants.H5I_INVALID_HID;
-        long memtype_id   = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long sdim         = 0;
-        long[] dims       = {DIM0};
-        byte[][] dset_data;
-        StringBuffer[] str_data;
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get the datatype and its size.
-        try {
-            if (dataset_id >= 0)
-                filetype_id = H5.H5Dget_type(dataset_id);
-            if (filetype_id >= 0) {
-                sdim = H5.H5Tget_size(filetype_id);
-                sdim++; // Make room for null terminator
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get dataspace and allocate memory for read buffer.
-        try {
-            if (dataset_id >= 0)
-                dataspace_id = H5.H5Dget_space(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Allocate space for data.
-        dset_data = new byte[(int)dims[0]][(int)sdim];
-        str_data  = new StringBuffer[(int)dims[0]];
-
-        // Create the memory datatype.
-        try {
-            memtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
-            if (memtype_id >= 0)
-                H5.H5Tset_size(memtype_id, sdim);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Read data.
-        try {
-            if ((dataset_id >= 0) && (memtype_id >= 0))
-                H5.H5Dread(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5P_DEFAULT, dset_data);
-            byte[] tempbuf = new byte[(int)sdim];
-            for (int indx = 0; indx < (int)dims[0]; indx++) {
-                for (int jndx = 0; jndx < sdim; jndx++) {
-                    tempbuf[jndx] = dset_data[indx][jndx];
-                }
-                str_data[indx] = new StringBuffer(new String(tempbuf).trim());
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        for (int indx = 0; indx < dims[0]; indx++) {
-            System.out.println(DATASETNAME + " [" + indx + "]: " + str_data[indx]);
-        }
-        System.out.println();
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the file type.
-        try {
-            if (filetype_id >= 0)
-                H5.H5Tclose(filetype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the mem type.
-        try {
-            if (memtype_id >= 0)
-                H5.H5Tclose(memtype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_T_String.CreateDataset();
-        // Now we begin the read section of this example. Here we assume
-        // the dataset and array have the same name and rank, but can have
-        // any size. Therefore we must allocate a new array to read in
-        // data using malloc().
-        H5Ex_T_String.ReadDataset();
-    }
-}
diff --git a/java/examples/datatypes/H5Ex_T_StringAttribute.java b/java/examples/datatypes/H5Ex_T_StringAttribute.java
deleted file mode 100644
index 46c1038f39d..00000000000
--- a/java/examples/datatypes/H5Ex_T_StringAttribute.java
+++ /dev/null
@@ -1,349 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to read and write string datatypes
-  to an attribute.  The program first writes strings to an
-  attribute with a dataspace of DIM0, then closes the file.
-  Next, it reopens the file, reads back the data, and
-  outputs it to the screen.
- ************************************************************/
-
-package examples.datatypes;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_T_StringAttribute {
-    private static String FILENAME      = "H5Ex_T_StringAttribute.h5";
-    private static String DATASETNAME   = "DS1";
-    private static String ATTRIBUTENAME = "A1";
-    private static final int DIM0       = 4;
-    private static final int SDIM       = 8;
-    private static final int RANK       = 1;
-
-    private static void CreateDataset()
-    {
-        long file_id            = HDF5Constants.H5I_INVALID_HID;
-        long memtype_id         = HDF5Constants.H5I_INVALID_HID;
-        long filetype_id        = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id       = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id         = HDF5Constants.H5I_INVALID_HID;
-        long attribute_id       = HDF5Constants.H5I_INVALID_HID;
-        long[] dims             = {DIM0};
-        byte[][] dset_data      = new byte[DIM0][SDIM];
-        StringBuffer[] str_data = {new StringBuffer("Parting"), new StringBuffer("is such"),
-                                   new StringBuffer("sweet"), new StringBuffer("sorrow.")};
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create file and memory datatypes. For this example we will save
-        // the strings as FORTRAN strings, therefore they do not need space
-        // for the null terminator in the file.
-        try {
-            filetype_id = H5.H5Tcopy(HDF5Constants.H5T_FORTRAN_S1);
-            if (filetype_id >= 0)
-                H5.H5Tset_size(filetype_id, SDIM - 1);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        try {
-            memtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
-            if (memtype_id >= 0)
-                H5.H5Tset_size(memtype_id, SDIM);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataset with a scalar dataspace.
-        try {
-            dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
-            if (dataspace_id >= 0) {
-                dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
-                                          HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                          HDF5Constants.H5P_DEFAULT);
-                H5.H5Sclose(dataspace_id);
-                dataspace_id = HDF5Constants.H5I_INVALID_HID;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            dataspace_id = H5.H5Screate_simple(RANK, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the attribute.
-        try {
-            if ((dataset_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
-                attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, filetype_id, dataspace_id,
-                                            HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the data to the dataset.
-        try {
-            for (int indx = 0; indx < DIM0; indx++) {
-                for (int jndx = 0; jndx < SDIM; jndx++) {
-                    if (jndx < str_data[indx].length())
-                        dset_data[indx][jndx] = (byte)str_data[indx].charAt(jndx);
-                    else
-                        dset_data[indx][jndx] = 0;
-                }
-            }
-            if ((attribute_id >= 0) && (memtype_id >= 0))
-                H5.H5Awrite(attribute_id, memtype_id, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (attribute_id >= 0)
-                H5.H5Aclose(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the file type.
-        try {
-            if (filetype_id >= 0)
-                H5.H5Tclose(filetype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the mem type.
-        try {
-            if (memtype_id >= 0)
-                H5.H5Tclose(memtype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void ReadDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long filetype_id  = HDF5Constants.H5I_INVALID_HID;
-        long memtype_id   = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long attribute_id = HDF5Constants.H5I_INVALID_HID;
-        long sdim         = 0;
-        long[] dims       = {DIM0};
-        byte[][] dset_data;
-        StringBuffer[] str_data;
-
-        // Open an existing file.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing dataset.
-        try {
-            if (file_id >= 0)
-                dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
-                                                  HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get the datatype and its size.
-        try {
-            if (attribute_id >= 0)
-                filetype_id = H5.H5Aget_type(attribute_id);
-            if (filetype_id >= 0) {
-                sdim = H5.H5Tget_size(filetype_id);
-                sdim++; // Make room for null terminator
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Get dataspace and allocate memory for read buffer.
-        try {
-            if (attribute_id >= 0)
-                dataspace_id = H5.H5Aget_space(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Allocate space for data.
-        dset_data = new byte[(int)dims[0]][(int)sdim];
-        str_data  = new StringBuffer[(int)dims[0]];
-
-        // Create the memory datatype.
-        try {
-            memtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
-            if (memtype_id >= 0)
-                H5.H5Tset_size(memtype_id, sdim);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Read data.
-        try {
-            if ((attribute_id >= 0) && (memtype_id >= 0))
-                H5.H5Aread(attribute_id, memtype_id, dset_data);
-            byte[] tempbuf = new byte[(int)sdim];
-            for (int indx = 0; indx < (int)dims[0]; indx++) {
-                for (int jndx = 0; jndx < sdim; jndx++) {
-                    tempbuf[jndx] = dset_data[indx][jndx];
-                }
-                str_data[indx] = new StringBuffer(new String(tempbuf).trim());
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Output the data to the screen.
-        for (int indx = 0; indx < dims[0]; indx++) {
-            System.out.println(DATASETNAME + " [" + indx + "]: " + str_data[indx]);
-        }
-        System.out.println();
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (attribute_id >= 0)
-                H5.H5Aclose(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the file type.
-        try {
-            if (filetype_id >= 0)
-                H5.H5Tclose(filetype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the mem type.
-        try {
-            if (memtype_id >= 0)
-                H5.H5Tclose(memtype_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_T_StringAttribute.CreateDataset();
-        // Now we begin the read section of this example. Here we assume
-        // the dataset and array have the same name and rank, but can have
-        // any size. Therefore we must allocate a new array to read in
-        // data using malloc().
-        H5Ex_T_StringAttribute.ReadDataset();
-    }
-}
diff --git a/java/examples/datatypes/H5Ex_T_VLString.java b/java/examples/datatypes/H5Ex_T_VLString.java
deleted file mode 100644
index a9288a451bc..00000000000
--- a/java/examples/datatypes/H5Ex_T_VLString.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-    Creating and writing a VL string to a file.
- ************************************************************/
-
-package examples.datatypes;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_T_VLString {
-    private static String FILENAME    = "H5Ex_T_VLString.h5";
-    private static String DATASETNAME = "DS1";
-
-    private static void createDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long type_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        int rank          = 1;
-        String[] str_data = {"Parting", "is such", "sweet", "sorrow."};
-        long[] dims       = {str_data.length};
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            type_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
-            H5.H5Tset_size(type_id, HDF5Constants.H5T_VARIABLE);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create dataspace. Setting maximum size to NULL sets the maximum
-        // size to be the current size.
-        try {
-            dataspace_id = H5.H5Screate_simple(rank, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset and write the string data to it.
-        try {
-            if ((file_id >= 0) && (type_id >= 0) && (dataspace_id >= 0)) {
-                dataset_id =
-                    H5.H5Dcreate(file_id, DATASETNAME, type_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
-                                 HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the data to the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5DwriteVL(dataset_id, type_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
-                              HDF5Constants.H5P_DEFAULT, str_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            H5.H5Sclose(dataspace_id);
-            H5.H5Tclose(type_id);
-            H5.H5Dclose(dataset_id);
-            H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private static void readDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long type_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        String[] str_data = {"", "", "", ""};
-
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-            type_id    = H5.H5Dget_type(dataset_id);
-            H5.H5DreadVL(dataset_id, type_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
-                         HDF5Constants.H5P_DEFAULT, str_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        for (int indx = 0; indx < str_data.length; indx++)
-            System.out.println(DATASETNAME + " [" + indx + "]: " + str_data[indx]);
-
-        try {
-            H5.H5Tclose(type_id);
-            H5.H5Dclose(dataset_id);
-            H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5Ex_T_VLString.createDataset();
-        H5Ex_T_VLString.readDataset();
-    }
-}
diff --git a/java/examples/datatypes/JavaDatatypeExample.sh.in b/java/examples/datatypes/JavaDatatypeExample.sh.in
deleted file mode 100644
index fc4a62706be..00000000000
--- a/java/examples/datatypes/JavaDatatypeExample.sh.in
+++ /dev/null
@@ -1,451 +0,0 @@
-#! /bin/sh
-#
-# Copyright by The HDF Group.
-# All rights reserved.
-#
-# This file is part of HDF5.  The full HDF5 copyright notice, including
-# terms governing use, modification, and redistribution, is contained in
-# the COPYING file, which can be found at the root of the source code
-# distribution tree, or in https://www.hdfgroup.org/licenses.
-# If you do not have access to either file, you may request a copy from
-# help@hdfgroup.org.
-#
-
-top_builddir=@top_builddir@
-top_srcdir=@top_srcdir@
-srcdir=@srcdir@
-IS_DARWIN="@H5_IS_DARWIN@"
-
-TESTNAME=EX_Datatypes
-EXIT_SUCCESS=0
-EXIT_FAILURE=1
-
-# Set up default variable values if not supplied by the user.
-RM='rm -rf'
-CMP='cmp'
-DIFF='diff -c'
-CP='cp'
-DIRNAME='dirname'
-BASENAME='basename'
-LS='ls'
-AWK='awk'
-
-nerrors=0
-
-# where the libs exist
-HDFLIB_HOME="$top_srcdir/java/lib"
-BLDDIR="."
-BLDLIBDIR="$BLDDIR/testlibs"
-HDFTEST_HOME="$top_srcdir/java/examples/datatypes"
-JARFILE=jar@PACKAGE_TARNAME@-@PACKAGE_VERSION@.jar
-TESTJARFILE=jar@PACKAGE_TARNAME@datatypes.jar
-test -d $BLDLIBDIR || mkdir -p $BLDLIBDIR
-
-######################################################################
-# library files
-# --------------------------------------------------------------------
-# All the library files copy from source directory to test directory
-# NOTE: Keep this framework to add/remove test files.
-#       This list are also used for checking exist.
-#       Comment '#' without space can be used.
-# --------------------------------------------------------------------
-LIST_LIBRARY_FILES="
-$top_builddir/src/.libs/libhdf5.*
-$top_builddir/java/src/jni/.libs/libhdf5_java.*
-$top_builddir/java/src/$JARFILE
-"
-LIST_JAR_TESTFILES="
-$HDFLIB_HOME/slf4j-api-2.0.6.jar
-$HDFLIB_HOME/ext/slf4j-simple-2.0.6.jar
-"
-LIST_DATA_FILES="
-$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_Array.txt
-$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_ArrayAttribute.txt
-$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_Bit.txt
-$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_BitAttribute.txt
-$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_Commit.txt
-$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_Compound.txt
-$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_CompoundAttribute.txt
-$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_Float.txt
-$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_FloatAttribute.txt
-$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_Integer.txt
-$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_IntegerAttribute.txt
-$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_ObjectReference.txt
-$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_ObjectReferenceAttribute.txt
-$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_Opaque.txt
-$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_OpaqueAttribute.txt
-$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_String.txt
-$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_StringAttribute.txt
-$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_VLString.txt
-"
-
-#
-# copy files from source dirs to test dir
-#
-COPY_LIBFILES="$LIST_LIBRARY_FILES"
-COPY_JARTESTFILES="$LIST_JAR_TESTFILES"
-
-COPY_LIBFILES_TO_BLDLIBDIR()
-{
-    # copy test files. Used -f to make sure get a new copy
-    for tstfile in $COPY_LIBFILES
-    do
-        # ignore '#' comment
-        echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
-        RET=$?
-        if [ $RET -eq 1 ]; then
-            # skip cp if srcdir is same as destdir
-            # this occurs when build/test performed in source dir and
-            # make cp fail
-            SDIR=`$DIRNAME $tstfile`
-            INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
-            INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
-            if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
-                $CP -fR $tstfile $BLDLIBDIR
-                if [ $? -ne 0 ]; then
-                    echo "Error: FAILED to copy $tstfile ."
-
-                    # Comment out this to CREATE expected file
-                    exit $EXIT_FAILURE
-                fi
-                BNAME=`$BASENAME $tstfile`
-                if [ "$BNAME" = "libhdf5_java.dylib" ]; then
-                    COPIED_LIBHDF5_JAVA=1
-                fi
-            fi
-        fi
-    done
-    if [[ "$IS_DARWIN" = "yes" ]] && [[ $COPIED_LIBHDF5_JAVA -eq 1 ]]; then
-       (cd $BLDLIBDIR; \
-         install_name_tool -add_rpath @loader_path libhdf5_java.dylib; \
-         exist_path=` otool -l libhdf5_java.dylib | grep libhdf5 | grep -v java | awk '{print $2}'`; \
-         echo $exist_path; \
-         install_name_tool -change $exist_path @rpath/libhdf5.dylib libhdf5_java.dylib)
-    fi
-    # copy jar files. Used -f to make sure get a new copy
-    for tstfile in $COPY_JARTESTFILES
-    do
-        # ignore '#' comment
-        echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
-        RET=$?
-        if [ $RET -eq 1 ]; then
-            # skip cp if srcdir is same as destdir
-            # this occurs when build/test performed in source dir and
-            # make cp fail
-            SDIR=`$DIRNAME $tstfile`
-            INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
-            INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
-            if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
-                $CP -fR $tstfile $BLDLIBDIR
-                if [ $? -ne 0 ]; then
-                    echo "Error: FAILED to copy $tstfile ."
-
-                    # Comment out this to CREATE expected file
-                    exit $EXIT_FAILURE
-                fi
-            fi
-        fi
-    done
-}
-
-CLEAN_LIBFILES_AND_BLDLIBDIR()
-{
-    # skip rm if srcdir is same as destdir
-    # this occurs when build/test performed in source dir and
-    # make cp fail
-    SDIR=$HDFLIB_HOME
-    INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
-    INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
-    if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
-        $RM -rf $BLDLIBDIR
-    fi
-}
-
-COPY_DATAFILES="$LIST_DATA_FILES"
-
-COPY_DATAFILES_TO_BLDDIR()
-{
-    # copy test files. Used -f to make sure get a new copy
-    for tstfile in $COPY_DATAFILES
-    do
-        # ignore '#' comment
-        echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
-        RET=$?
-        if [ $RET -eq 1 ]; then
-            # skip cp if srcdir is same as destdir
-            # this occurs when build/test performed in source dir and
-            # make cp fail
-            SDIR=`$DIRNAME $tstfile`
-            INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
-            INODE_DDIR=`$LS -i -d $BLDDIR | $AWK -F' ' '{print $1}'`
-            if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
-                $CP -f $tstfile $BLDDIR
-                if [ $? -ne 0 ]; then
-                    echo "Error: FAILED to copy $tstfile ."
-
-                    # Comment out this to CREATE expected file
-                    exit $EXIT_FAILURE
-                fi
-            fi
-        fi
-    done
-}
-
-CLEAN_DATAFILES_AND_BLDDIR()
-{
-        $RM $BLDDIR/examples.datatypes.H5Ex_T_*.txt
-        $RM $BLDDIR/H5Ex_T_*.out
-        $RM $BLDDIR/H5Ex_T_*.h5
-}
-
-# Print a line-line message left justified in a field of 70 characters
-# beginning with the word "Testing".
-#
-TESTING() {
-   SPACES="                                                               "
-   echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012'
-}
-
-# where Java is installed (requires jdk1.7.x)
-JAVAEXE=@JAVA@
-JAVAEXEFLAGS=@H5_JAVAFLAGS@
-
-###############################################################################
-#            DO NOT MODIFY BELOW THIS LINE
-###############################################################################
-
-# prepare for test
-COPY_LIBFILES_TO_BLDLIBDIR
-COPY_DATAFILES_TO_BLDDIR
-
-CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/slf4j-api-2.0.6.jar:"$BLDLIBDIR"/slf4j-simple-2.0.6.jar:"$TESTJARFILE""
-
-TEST=/usr/bin/test
-if [ ! -x /usr/bin/test ]
-then
-TEST=`which test`
-fi
-
-if $TEST -z "$CLASSPATH"; then
-        CLASSPATH=""
-fi
-CLASSPATH=$CPATH":"$CLASSPATH
-export CLASSPATH
-
-if $TEST -n "$JAVAPATH" ; then
-        PATH=$JAVAPATH":"$PATH
-        export PATH
-fi
-
-if $TEST -e /bin/uname; then
-   os_name=`/bin/uname -s`
-elif $TEST -e /usr/bin/uname; then
-   os_name=`/usr/bin/uname -s`
-else
-   os_name=unknown
-fi
-
-if $TEST -z "$LD_LIBRARY_PATH" ; then
-        LD_LIBRARY_PATH=""
-fi
-
-case  $os_name in
-    *)
-    LD_LIBRARY_PATH=$BLDLIBDIR:$LD_LIBRARY_PATH
-    ;;
-esac
-
-export LD_LIBRARY_PATH
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Array"
-TESTING examples.datatypes.H5Ex_T_Array
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Array > H5Ex_T_Array.out)
-if diff H5Ex_T_Array.out examples.datatypes.H5Ex_T_Array.txt > /dev/null; then
-    echo "  PASSED      datatypes.H5Ex_T_Array"
-else
-    echo "**FAILED**    datatypes.H5Ex_T_Array"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ArrayAttribute"
-TESTING examples.datatypes.H5Ex_T_ArrayAttribute
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ArrayAttribute > H5Ex_T_ArrayAttribute.out)
-if diff H5Ex_T_ArrayAttribute.out examples.datatypes.H5Ex_T_ArrayAttribute.txt > /dev/null; then
-    echo "  PASSED      datatypes.H5Ex_T_ArrayAttribute"
-else
-    echo "**FAILED**    datatypes.H5Ex_T_ArrayAttribute"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Bit"
-TESTING examples.datatypes.H5Ex_T_Bit
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Bit > H5Ex_T_Bit.out)
-if diff H5Ex_T_Bit.out examples.datatypes.H5Ex_T_Bit.txt > /dev/null; then
-    echo "  PASSED      datatypes.H5Ex_T_Bit"
-else
-    echo "**FAILED**    datatypes.H5Ex_T_Bit"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_BitAttribute"
-TESTING examples.datatypes.H5Ex_T_BitAttribute
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_BitAttribute > H5Ex_T_BitAttribute.out)
-if diff H5Ex_T_BitAttribute.out examples.datatypes.H5Ex_T_BitAttribute.txt > /dev/null; then
-    echo "  PASSED      datatypes.H5Ex_T_BitAttribute"
-else
-    echo "**FAILED**    datatypes.H5Ex_T_BitAttribute"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Commit"
-TESTING examples.datasets.H5Ex_T_Commit
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Commit > H5Ex_T_Commit.out)
-if diff H5Ex_T_Commit.out examples.datatypes.H5Ex_T_Commit.txt > /dev/null; then
-    echo "  PASSED      datatypes.H5Ex_T_Commit"
-else
-    echo "**FAILED**    datatypes.H5Ex_T_Commit"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Compound"
-TESTING examples.datatypes.H5Ex_T_Compound
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Compound > H5Ex_T_Compound.out)
-if diff H5Ex_T_Compound.out examples.datatypes.H5Ex_T_Compound.txt > /dev/null; then
-    echo "  PASSED      datatypes.H5Ex_T_Compound"
-else
-    echo "**FAILED**    datatypes.H5Ex_T_Compound"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_CompoundAttribute"
-TESTING examples.datatypes.H5Ex_T_CompoundAttribute
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_CompoundAttribute > H5Ex_T_CompoundAttribute.out)
-if diff H5Ex_T_CompoundAttribute.out examples.datatypes.H5Ex_T_CompoundAttribute.txt > /dev/null; then
-    echo "  PASSED      datatypes.H5Ex_T_CompoundAttribute"
-else
-    echo "**FAILED**    datatypes.H5Ex_T_CompoundAttribute"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Float"
-TESTING examples.datatypes.H5Ex_T_Float
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Float > H5Ex_T_Float.out)
-if diff H5Ex_T_Float.out examples.datatypes.H5Ex_T_Float.txt > /dev/null; then
-    echo "  PASSED      datatypes.H5Ex_T_Float"
-else
-    echo "**FAILED**    datatypes.H5Ex_T_Float"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_FloatAttribute"
-TESTING examples.datatypes.H5Ex_T_FloatAttribute
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_FloatAttribute > H5Ex_T_FloatAttribute.out)
-if diff H5Ex_T_FloatAttribute.out examples.datatypes.H5Ex_T_FloatAttribute.txt > /dev/null; then
-    echo "  PASSED      datatypes.H5Ex_T_FloatAttribute"
-else
-    echo "**FAILED**    datatypes.H5Ex_T_FloatAttribute"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Integer"
-TESTING examples.datatypes.H5Ex_T_Integer
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Integer > H5Ex_T_Integer.out)
-if diff H5Ex_T_Integer.out examples.datatypes.H5Ex_T_Integer.txt > /dev/null; then
-    echo "  PASSED      datatypes.H5Ex_T_Integer"
-else
-    echo "**FAILED**    datatypes.H5Ex_T_Integer"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_IntegerAttribute"
-TESTING examples.datatypes.H5Ex_T_IntegerAttribute
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_IntegerAttribute > H5Ex_T_IntegerAttribute.out)
-if diff H5Ex_T_IntegerAttribute.out examples.datatypes.H5Ex_T_IntegerAttribute.txt > /dev/null; then
-    echo "  PASSED      datatypes.H5Ex_T_IntegerAttribute"
-else
-    echo "**FAILED**    datatypes.H5Ex_T_IntegerAttribute"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ObjectReference"
-TESTING examples.datatypes.H5Ex_T_ObjectReference
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ObjectReference > H5Ex_T_ObjectReference.out)
-if diff H5Ex_T_ObjectReference.out examples.datatypes.H5Ex_T_ObjectReference.txt > /dev/null; then
-    echo "  PASSED      datatypes.H5Ex_T_ObjectReference"
-else
-    echo "**FAILED**    datatypes.H5Ex_T_ObjectReference"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ObjectReferenceAttribute"
-TESTING examples.datatypes.H5Ex_T_ObjectReferenceAttribute
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ObjectReferenceAttribute > H5Ex_T_ObjectReferenceAttribute.out)
-if diff H5Ex_T_ObjectReferenceAttribute.out examples.datatypes.H5Ex_T_ObjectReferenceAttribute.txt > /dev/null; then
-    echo "  PASSED      datatypes.H5Ex_T_ObjectReferenceAttribute"
-else
-    echo "**FAILED**    datatypes.H5Ex_T_ObjectReferenceAttribute"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Opaque"
-TESTING examples.datatypes.H5Ex_T_Opaque
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Opaque > H5Ex_T_Opaque.out)
-if diff H5Ex_T_Opaque.out examples.datatypes.H5Ex_T_Opaque.txt > /dev/null; then
-    echo "  PASSED      datatypes.H5Ex_T_Opaque"
-else
-    echo "**FAILED**    datatypes.H5Ex_T_Opaque"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_OpaqueAttribute"
-TESTING examples.datatypes.H5Ex_T_OpaqueAttribute
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_OpaqueAttribute > H5Ex_T_OpaqueAttribute.out)
-if diff H5Ex_T_OpaqueAttribute.out examples.datatypes.H5Ex_T_OpaqueAttribute.txt > /dev/null; then
-    echo "  PASSED      datatypes.H5Ex_T_OpaqueAttribute"
-else
-    echo "**FAILED**    datatypes.H5Ex_T_OpaqueAttribute"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_String"
-TESTING examples.datatypes.H5Ex_T_String
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_String > H5Ex_T_String.out)
-if diff H5Ex_T_String.out examples.datatypes.H5Ex_T_String.txt > /dev/null; then
-    echo "  PASSED      datatypes.H5Ex_T_String"
-else
-    echo "**FAILED**    datatypes.H5Ex_T_String"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_StringAttribute"
-TESTING examples.datatypes.H5Ex_T_StringAttribute
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_StringAttribute > H5Ex_T_StringAttribute.out)
-if diff H5Ex_T_StringAttribute.out examples.datatypes.H5Ex_T_StringAttribute.txt > /dev/null; then
-    echo "  PASSED      datatypes.H5Ex_T_StringAttribute"
-else
-    echo "**FAILED**    datatypes.H5Ex_T_StringAttribute"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_VLString"
-TESTING examples.datatypes.H5Ex_T_VLString
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_VLString > H5Ex_T_VLString.out)
-if diff H5Ex_T_VLString.out examples.datatypes.H5Ex_T_VLString.txt > /dev/null; then
-    echo "  PASSED      datatypes.H5Ex_T_VLString"
-else
-    echo "**FAILED**    datatypes.H5Ex_T_VLString"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-# Clean up temporary files/directories
-CLEAN_LIBFILES_AND_BLDLIBDIR
-CLEAN_DATAFILES_AND_BLDDIR
-
-# Report test results and exit
-if test $nerrors -eq 0 ; then
-    echo "All $TESTNAME tests passed."
-    exit $EXIT_SUCCESS
-else
-    echo "$TESTNAME tests failed with $nerrors errors."
-    exit $EXIT_FAILURE
-fi
diff --git a/java/examples/datatypes/Makefile.am b/java/examples/datatypes/Makefile.am
deleted file mode 100644
index 12e5cb8fadd..00000000000
--- a/java/examples/datatypes/Makefile.am
+++ /dev/null
@@ -1,75 +0,0 @@
-#
-# Copyright by The HDF Group.
-# All rights reserved.
-#
-# This file is part of HDF5.  The full HDF5 copyright notice, including
-# terms governing use, modification, and redistribution, is contained in
-# the COPYING file, which can be found at the root of the source code
-# distribution tree, or in https://www.hdfgroup.org/licenses.
-# If you do not have access to either file, you may request a copy from
-# help@hdfgroup.org.
-##
-## Makefile.am
-## Run automake to generate a Makefile.in from this file.
-##
-#
-# HDF5 Java Library Examples Makefile(.in)
-
-include $(top_srcdir)/config/commence.am
-
-# Mark this directory as part of the JNI API
-JAVA_API=yes
-
-JAVAROOT = .classes
-
-classes:
-	test -d $(@D)/$(JAVAROOT) || $(MKDIR_P) $(@D)/$(JAVAROOT)
-
-pkgpath = examples/datatypes
-hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar
-CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-2.0.6.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-2.0.6.jar:$$CLASSPATH
-
-jarfile = jar$(PACKAGE_TARNAME)datatypes.jar
-
-AM_JAVACFLAGS = $(H5_JAVACFLAGS) -deprecation
-
-TESTPACKAGE =
-
-noinst_JAVA = \
-    H5Ex_T_Array.java \
-    H5Ex_T_ArrayAttribute.java \
-    H5Ex_T_Bit.java \
-    H5Ex_T_BitAttribute.java \
-    H5Ex_T_Commit.java \
-    H5Ex_T_Compound.java \
-    H5Ex_T_CompoundAttribute.java \
-    H5Ex_T_Float.java \
-    H5Ex_T_FloatAttribute.java \
-    H5Ex_T_Integer.java \
-    H5Ex_T_IntegerAttribute.java \
-    H5Ex_T_ObjectReference.java \
-    H5Ex_T_ObjectReferenceAttribute.java \
-    H5Ex_T_Opaque.java \
-    H5Ex_T_OpaqueAttribute.java \
-    H5Ex_T_String.java \
-    H5Ex_T_StringAttribute.java \
-    H5Ex_T_VLString.java
-
-$(jarfile): classnoinst.stamp classes
-	$(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath)
-
-noinst_DATA = $(jarfile)
-
-.PHONY: classes
-
-check_SCRIPTS = JavaDatatypeExample.sh
-TEST_SCRIPT = $(check_SCRIPTS)
-
-CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class JavaDatatypeExample.sh
-
-clean:
-	rm -rf $(JAVAROOT)/*
-	rm -f $(jarfile)
-	rm -f classnoinst.stamp
-
-include $(top_srcdir)/config/conclude.am
diff --git a/java/examples/groups/CMakeLists.txt b/java/examples/groups/CMakeLists.txt
deleted file mode 100644
index fc9eb948a99..00000000000
--- a/java/examples/groups/CMakeLists.txt
+++ /dev/null
@@ -1,144 +0,0 @@
-cmake_minimum_required (VERSION 3.18)
-project (HDFJAVA_EXAMPLES_GROUPS Java)
-
-set (CMAKE_VERBOSE_MAKEFILE 1)
-
-set (HDF_JAVA_EXAMPLES
-    H5Ex_G_Create
-    H5Ex_G_Iterate
-    H5Ex_G_Compact
-    H5Ex_G_Corder
-    H5Ex_G_Intermediate
-    H5Ex_G_Phase
-    H5Ex_G_Visit
-)
-
-if (WIN32)
-  set (CMAKE_JAVA_INCLUDE_FLAG_SEP ";")
-else ()
-  set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":")
-endif ()
-
-set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS}")
-
-set (CMAKE_JAVA_CLASSPATH ".")
-foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH})
-  set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}")
-endforeach ()
-
-foreach (example ${HDF_JAVA_EXAMPLES})
-  file (WRITE ${PROJECT_BINARY_DIR}/${example}_Manifest.txt
-  "Main-Class: examples.groups.${example}
-"
-  )
-  add_jar (${example} MANIFEST ${PROJECT_BINARY_DIR}/${example}_Manifest.txt ${example}.java)
-  get_target_property (${example}_JAR_FILE ${example} JAR_FILE)
-#  install_jar (${example} ${HJAVA_INSTALL_DATA_DIR}/examples examples)
-  get_target_property (${example}_CLASSPATH ${example} CLASSDIR)
-  add_dependencies (${example} ${HDF5_JAVA_HDF5_LIB_TARGET})
-
-  #-----------------------------------------------------------------------------
-  # Add Target to clang-format
-  #-----------------------------------------------------------------------------
-  if (HDF5_ENABLE_FORMATTERS)
-    clang_format (HDF5_JAVA_${example}_SRC_FORMAT ${example}.java)
-  endif ()
-endforeach ()
-
-set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS};${HDF5_JAVA_LOGGING_JAR};${HDF5_JAVA_LOGGING_NOP_JAR}")
-
-set (CMAKE_JAVA_CLASSPATH ".")
-foreach (HDFJAVA_JAR ${CMAKE_JAVA_INCLUDE_PATH})
-  set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${HDFJAVA_JAR}")
-endforeach ()
-
-set (HDF_JAVA_TEST_FILES
-    h5ex_g_iterate.h5
-    h5ex_g_visit.h5
-)
-
-foreach (h5_file ${HDF_JAVA_TEST_FILES})
-  HDFTEST_COPY_FILE("${PROJECT_SOURCE_DIR}/${h5_file}" "${PROJECT_BINARY_DIR}/${h5_file}" "H5Ex_G_Visit_files")
-endforeach ()
-add_custom_target(H5Ex_G_Visit_files ALL COMMENT "Copying files needed by H5Ex_G_Visit tests" DEPENDS ${H5Ex_G_Visit_files_list})
-
-if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL)
-  get_property (target_name TARGET ${HDF5_JAVA_JNI_LIB_TARGET} PROPERTY OUTPUT_NAME)
-  set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$<OR:$<CONFIG:Debug>,$<CONFIG:Developer>>:${CMAKE_DEBUG_POSTFIX}>;")
-
-  set (last_test "")
-  foreach (example ${HDF_JAVA_EXAMPLES})
-    if (NOT example STREQUAL "H5Ex_G_Iterate" AND NOT example STREQUAL "H5Ex_G_Visit")
-      if (example STREQUAL "H5Ex_G_Compact")
-        add_test (
-            NAME JAVA_groups-${example}-clear-objects
-            COMMAND ${CMAKE_COMMAND} -E remove
-                ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}1.h5
-                ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}2.h5
-        )
-      else ()
-        add_test (
-            NAME JAVA_groups-${example}-clear-objects
-            COMMAND ${CMAKE_COMMAND} -E remove
-                ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5
-        )
-      endif ()
-    else ()
-      add_test (
-          NAME JAVA_groups-${example}-clear-objects
-          COMMAND ${CMAKE_COMMAND} -E echo "${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 exists"
-      )
-    endif ()
-    if (last_test)
-      set_tests_properties (JAVA_groups-${example}-clear-objects PROPERTIES DEPENDS ${last_test})
-    endif ()
-
-    add_test (
-        NAME JAVA_groups-${example}-copy-objects
-        COMMAND ${CMAKE_COMMAND} -E copy_if_different
-            ${HDFJAVA_EXAMPLES_SOURCE_DIR}/testfiles/examples.groups.${example}.txt
-            ${HDFJAVA_EXAMPLES_GROUPS_BINARY_DIR}/${example}.txt
-    )
-    set_tests_properties (JAVA_groups-${example}-copy-objects PROPERTIES DEPENDS JAVA_groups-${example}-clear-objects)
-    add_test (
-        NAME JAVA_groups-${example}
-        COMMAND "${CMAKE_COMMAND}"
-            -D "TEST_TESTER=${CMAKE_Java_RUNTIME};${CMAKE_Java_RUNTIME_FLAGS}"
-            -D "TEST_PROGRAM=examples.groups.${example}"
-            -D "TEST_ARGS:STRING=${CMD_ARGS}"
-            -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${example}_JAR_FILE}"
-            -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}"
-            -D "TEST_FOLDER=${HDFJAVA_EXAMPLES_BINARY_DIR}"
-            -D "TEST_OUTPUT=groups/${example}.out"
-            -D "TEST_EXPECT=0"
-            -D "TEST_REFERENCE=groups/${example}.txt"
-            -P "${HDF_RESOURCES_DIR}/jrunTest.cmake"
-    )
-    set_tests_properties (JAVA_groups-${example} PROPERTIES
-        DEPENDS JAVA_groups-${example}-copy-objects
-    )
-
-    if (NOT example STREQUAL "H5Ex_G_Iterate" AND NOT example STREQUAL "H5Ex_G_Visit")
-      if (example STREQUAL "H5Ex_G_Compact")
-        add_test (
-            NAME JAVA_groups-${example}-clean-objects
-            COMMAND ${CMAKE_COMMAND} -E remove
-                ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}1.h5
-                ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}2.h5
-        )
-      else ()
-        add_test (
-            NAME JAVA_groups-${example}-clean-objects
-            COMMAND ${CMAKE_COMMAND} -E remove
-                ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5
-        )
-      endif ()
-      set_tests_properties (JAVA_groups-${example}-clean-objects PROPERTIES
-          DEPENDS JAVA_groups-${example}
-      )
-      set (last_test "JAVA_groups-${example}-clean-objects")
-    else ()
-      set (last_test "JAVA_groups-${example}")
-    endif ()
-  endforeach ()
-endif ()
diff --git a/java/examples/groups/H5Ex_G_Compact.java b/java/examples/groups/H5Ex_G_Compact.java
deleted file mode 100644
index 2c6535ac04f..00000000000
--- a/java/examples/groups/H5Ex_G_Compact.java
+++ /dev/null
@@ -1,261 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-    Creating a file and print the storage layout.
- ************************************************************/
-
-package examples.groups;
-
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-import hdf.hdf5lib.structs.H5G_info_t;
-
-public class H5Ex_G_Compact {
-
-    private static final String FILE1 = "H5Ex_G_Compact1.h5";
-    private static final String FILE2 = "H5Ex_G_Compact2.h5";
-    private static final String GROUP = "G1";
-
-    enum H5G_storage {
-        H5G_STORAGE_TYPE_UNKNOWN(-1),
-        H5G_STORAGE_TYPE_SYMBOL_TABLE(0),
-        H5G_STORAGE_TYPE_COMPACT(1),
-        H5G_STORAGE_TYPE_DENSE(2);
-
-        private static final Map<Integer, H5G_storage> lookup = new HashMap<Integer, H5G_storage>();
-
-        static
-        {
-            for (H5G_storage s : EnumSet.allOf(H5G_storage.class))
-                lookup.put(s.getCode(), s);
-        }
-
-        private int code;
-
-        H5G_storage(int layout_type) { this.code = layout_type; }
-
-        public int getCode() { return this.code; }
-
-        public static H5G_storage get(int code) { return lookup.get(code); }
-    }
-
-    public static void CreateGroup()
-    {
-        long file_id  = HDF5Constants.H5I_INVALID_HID;
-        long group_id = HDF5Constants.H5I_INVALID_HID;
-        long fapl_id  = HDF5Constants.H5I_INVALID_HID;
-        H5G_info_t ginfo;
-        long size;
-
-        // Create file 1. This file will use original format groups.
-        try {
-            file_id = H5.H5Fcreate(FILE1, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        // Create a group in the file1.
-        try {
-            if (file_id >= 0)
-                group_id = H5.H5Gcreate(file_id, GROUP, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                        HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Obtain the group info and print the group storage type.
-        try {
-            if (group_id >= 0) {
-                ginfo = H5.H5Gget_info(group_id);
-                System.out.print("Group storage type for " + FILE1 + " is: ");
-                switch (H5G_storage.get(ginfo.storage_type)) {
-                case H5G_STORAGE_TYPE_COMPACT:
-                    System.out.println("H5G_STORAGE_TYPE_COMPACT"); // New compact format
-                    break;
-                case H5G_STORAGE_TYPE_DENSE:
-                    System.out.println("H5G_STORAGE_TYPE_DENSE"); // New dense (indexed) format
-                    break;
-                case H5G_STORAGE_TYPE_SYMBOL_TABLE:
-                    System.out.println("H5G_STORAGE_TYPE_SYMBOL_TABLE"); // Original format
-                    break;
-                case H5G_STORAGE_TYPE_UNKNOWN:
-                    System.out.println("H5G_STORAGE_TYPE_UNKNOWN");
-                    break;
-                default:
-                    System.out.println("Storage Type Invalid");
-                    break;
-                }
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the group.
-        try {
-            if (group_id >= 0)
-                H5.H5Gclose(group_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // close the file 1.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Re-open file 1. Need to get the correct file size.
-        try {
-            file_id = H5.H5Fopen(FILE1, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Obtain and print the file size.
-        try {
-            if (file_id >= 0) {
-                size = H5.H5Fget_filesize(file_id);
-                System.out.println("File size for " + FILE1 + " is: " + size + " bytes");
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close FILE1.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Set file access property list to allow the latest file format.
-        // This will allow the library to create new compact format groups.
-        try {
-            fapl_id = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS);
-            if (fapl_id >= 0)
-                H5.H5Pset_libver_bounds(fapl_id, HDF5Constants.H5F_LIBVER_LATEST,
-                                        HDF5Constants.H5F_LIBVER_LATEST);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        System.out.println();
-        // Create file 2 using the new file access property list.
-        try {
-            file_id = H5.H5Fcreate(FILE2, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, fapl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        // Create group in file2.
-        try {
-            if (file_id >= 0)
-                group_id = H5.H5Gcreate(file_id, GROUP, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                        HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Obtain the group info and print the group storage type.
-        try {
-            if (group_id >= 0) {
-                ginfo = H5.H5Gget_info(group_id);
-                System.out.print("Group storage type for " + FILE2 + " is: ");
-                switch (H5G_storage.get(ginfo.storage_type)) {
-                case H5G_STORAGE_TYPE_COMPACT:
-                    System.out.println("H5G_STORAGE_TYPE_COMPACT"); // New compact format
-                    break;
-                case H5G_STORAGE_TYPE_DENSE:
-                    System.out.println("H5G_STORAGE_TYPE_DENSE"); // New dense (indexed) format
-                    break;
-                case H5G_STORAGE_TYPE_SYMBOL_TABLE:
-                    System.out.println("H5G_STORAGE_TYPE_SYMBOL_TABLE"); // Original format
-                    break;
-                case H5G_STORAGE_TYPE_UNKNOWN:
-                    System.out.println("H5G_STORAGE_TYPE_UNKNOWN");
-                    break;
-                default:
-                    System.out.println("Storage Type Invalid");
-                    break;
-                }
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the group.
-        try {
-            if (group_id >= 0)
-                H5.H5Gclose(group_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // close the file 2.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Re-open file 2. Needed to get the correct file size.
-        try {
-            file_id = H5.H5Fopen(FILE2, HDF5Constants.H5F_ACC_RDONLY, fapl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Obtain and print the file size.
-        try {
-            if (file_id >= 0) {
-                size = H5.H5Fget_filesize(file_id);
-                System.out.println("File size for " + FILE2 + " is: " + size + " bytes");
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close FILE2.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args) { H5Ex_G_Compact.CreateGroup(); }
-}
diff --git a/java/examples/groups/H5Ex_G_Corder.java b/java/examples/groups/H5Ex_G_Corder.java
deleted file mode 100644
index 79bbad6aba5..00000000000
--- a/java/examples/groups/H5Ex_G_Corder.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-/************************************************************
-    Creating a file with creation properties and traverse the
-    groups in alphabetical and creation order.
- ************************************************************/
-
-package examples.groups;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-import hdf.hdf5lib.structs.H5G_info_t;
-
-public class H5Ex_G_Corder {
-    private static String FILE = "H5Ex_G_Corder.h5";
-
-    private static void CreateGroup() throws Exception
-    {
-        long file_id     = HDF5Constants.H5I_INVALID_HID;
-        long group_id    = HDF5Constants.H5I_INVALID_HID;
-        long subgroup_id = HDF5Constants.H5I_INVALID_HID;
-        long gcpl_id     = HDF5Constants.H5I_INVALID_HID;
-        int status;
-        H5G_info_t ginfo;
-        int i;
-        String name;
-
-        try {
-            // Create a new file using default properties.
-            file_id = H5.H5Fcreate(FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-
-            // Create group creation property list and enable link creation order tracking.
-            gcpl_id = H5.H5Pcreate(HDF5Constants.H5P_GROUP_CREATE);
-            status  = H5.H5Pset_link_creation_order(gcpl_id, HDF5Constants.H5P_CRT_ORDER_TRACKED +
-                                                                 HDF5Constants.H5P_CRT_ORDER_INDEXED);
-
-            // Create primary group using the property list.
-            if (status >= 0)
-                group_id = H5.H5Gcreate(file_id, "index_group", HDF5Constants.H5P_DEFAULT, gcpl_id,
-                                        HDF5Constants.H5P_DEFAULT);
-
-            try {
-                /*
-                 * Create subgroups in the primary group. These will be tracked by creation order. Note that
-                 * these groups do not have to have the creation order tracking property set.
-                 */
-                subgroup_id = H5.H5Gcreate(group_id, "H", HDF5Constants.H5P_DEFAULT,
-                                           HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-                status      = H5.H5Gclose(subgroup_id);
-                subgroup_id = H5.H5Gcreate(group_id, "D", HDF5Constants.H5P_DEFAULT,
-                                           HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-                status      = H5.H5Gclose(subgroup_id);
-                subgroup_id = H5.H5Gcreate(group_id, "F", HDF5Constants.H5P_DEFAULT,
-                                           HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-                status      = H5.H5Gclose(subgroup_id);
-                subgroup_id = H5.H5Gcreate(group_id, "5", HDF5Constants.H5P_DEFAULT,
-                                           HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-                status      = H5.H5Gclose(subgroup_id);
-
-                // Get group info.
-                ginfo = H5.H5Gget_info(group_id);
-
-                // Traverse links in the primary group using alphabetical indices (H5_INDEX_NAME).
-                System.out.println("Traversing group using alphabetical indices:");
-                for (i = 0; i < ginfo.nlinks; i++) {
-                    // Retrieve the name of the ith link in a group
-                    name = H5.H5Lget_name_by_idx(group_id, ".", HDF5Constants.H5_INDEX_NAME,
-                                                 HDF5Constants.H5_ITER_INC, i, HDF5Constants.H5P_DEFAULT);
-                    System.out.println("Index " + i + ": " + name);
-                }
-
-                // Traverse links in the primary group by creation order (H5_INDEX_CRT_ORDER).
-                System.out.println("Traversing group using creation order indices:");
-                for (i = 0; i < ginfo.nlinks; i++) {
-                    // Retrieve the name of the ith link in a group
-                    name = H5.H5Lget_name_by_idx(group_id, ".", HDF5Constants.H5_INDEX_CRT_ORDER,
-                                                 HDF5Constants.H5_ITER_INC, i, HDF5Constants.H5P_DEFAULT);
-                    System.out.println("Index " + i + ": " + name);
-                }
-            }
-            catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        finally {
-            // Close and release resources.
-            if (gcpl_id >= 0)
-                H5.H5Pclose(gcpl_id);
-            if (group_id >= 0)
-                H5.H5Gclose(group_id);
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        try {
-            H5Ex_G_Corder.CreateGroup();
-        }
-        catch (Exception ex) {
-            ex.printStackTrace();
-        }
-    }
-}
diff --git a/java/examples/groups/H5Ex_G_Create.java b/java/examples/groups/H5Ex_G_Create.java
deleted file mode 100644
index 51804b9aad6..00000000000
--- a/java/examples/groups/H5Ex_G_Create.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to create, open, and close a group.
- ************************************************************/
-
-package examples.groups;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5Ex_G_Create {
-    private static String FILENAME  = "H5Ex_G_Create.h5";
-    private static String GROUPNAME = "G1";
-
-    private static void CreateGroup()
-    {
-        long file_id  = HDF5Constants.H5I_INVALID_HID;
-        long group_id = HDF5Constants.H5I_INVALID_HID;
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create a group in the file.
-        try {
-            if (file_id >= 0)
-                group_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT,
-                                        HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the group. The handle "group" can no longer be used.
-        try {
-            if (group_id >= 0)
-                H5.H5Gclose(group_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Re-open the group, obtaining a new handle.
-        try {
-            if (file_id >= 0)
-                group_id = H5.H5Gopen(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the group.
-        try {
-            if (group_id >= 0)
-                H5.H5Gclose(group_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args) { H5Ex_G_Create.CreateGroup(); }
-}
diff --git a/java/examples/groups/H5Ex_G_Intermediate.java b/java/examples/groups/H5Ex_G_Intermediate.java
deleted file mode 100644
index e638fd0c3de..00000000000
--- a/java/examples/groups/H5Ex_G_Intermediate.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
- This example shows how to create intermediate groups with
- a single call to H5Gcreate.
- ************************************************************/
-package examples.groups;
-
-import java.util.ArrayList;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-import hdf.hdf5lib.callbacks.H5O_iterate_opdata_t;
-import hdf.hdf5lib.callbacks.H5O_iterate_t;
-import hdf.hdf5lib.structs.H5O_info_t;
-
-public class H5Ex_G_Intermediate {
-
-    private static String FILE = "H5Ex_G_Intermediate.h5";
-
-    private void CreateGroup() throws Exception
-    {
-
-        long file_id  = HDF5Constants.H5I_INVALID_HID;
-        long group_id = HDF5Constants.H5I_INVALID_HID;
-        long gcpl_id  = HDF5Constants.H5I_INVALID_HID;
-
-        try {
-            // Create a new file_id using the default properties.
-            file_id = H5.H5Fcreate(FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-
-            // Create group_id creation property list and set it to allow creation of intermediate group_ids.
-            gcpl_id = H5.H5Pcreate(HDF5Constants.H5P_LINK_CREATE);
-            H5.H5Pset_create_intermediate_group(gcpl_id, true);
-
-            /*
-             * Create the group_id /G1/G2/G3. Note that /G1 and /G1/G2 do not exist yet. This call would cause
-             * an error if we did not use the previously created property list.
-             */
-            group_id = H5.H5Gcreate(file_id, "/G1/G2/G3", gcpl_id, HDF5Constants.H5P_DEFAULT,
-                                    HDF5Constants.H5P_DEFAULT);
-            // Print all the objects in the file_ids to show that intermediate group_ids have been created.
-            System.out.println("Objects in the file_id:");
-
-            // H5O_iterate_opdata_t iter_data = null;
-            H5O_iterate_opdata_t iter_data = new H5O_iter_data();
-            H5O_iterate_t iter_cb          = new H5O_iter_callback();
-
-            H5.H5Ovisit(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, iter_cb,
-                        iter_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        finally {
-            // Close and release resources.
-            if (gcpl_id >= 0)
-                H5.H5Pclose(gcpl_id);
-            if (group_id >= 0)
-                H5.H5Gclose(group_id);
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        try {
-            (new H5Ex_G_Intermediate()).CreateGroup();
-        }
-        catch (Exception ex) {
-            ex.printStackTrace();
-        }
-    }
-
-    private class idata {
-        public String link_name = null;
-        public int link_type    = -1;
-
-        idata(String name, int type)
-        {
-            this.link_name = name;
-            this.link_type = type;
-        }
-    }
-
-    private class H5O_iter_data implements H5O_iterate_opdata_t {
-        public ArrayList<idata> iterdata = new ArrayList<idata>();
-    }
-
-    private class H5O_iter_callback implements H5O_iterate_t {
-        public int callback(long group, String name, H5O_info_t info, H5O_iterate_opdata_t op_data)
-        {
-            idata id = new idata(name, info.type);
-            ((H5O_iter_data)op_data).iterdata.add(id);
-
-            System.out.print("/"); /* Print root group in object path */
-
-            // Check if the current object is the root group, and if not print the full path name and type.
-
-            if (name.charAt(0) == '.') /* Root group, do not print '.' */
-                System.out.println("  (Group)");
-            else if (info.type == HDF5Constants.H5O_TYPE_GROUP)
-                System.out.println(name + "  (Group)");
-            else if (info.type == HDF5Constants.H5O_TYPE_DATASET)
-                System.out.println(name + "  (Dataset)");
-            else if (info.type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE)
-                System.out.println(name + "  (Datatype)");
-            else
-                System.out.println(name + "  (Unknown)");
-
-            return 0;
-        }
-    }
-}
diff --git a/java/examples/groups/H5Ex_G_Iterate.java b/java/examples/groups/H5Ex_G_Iterate.java
deleted file mode 100644
index 24cbb0a0d01..00000000000
--- a/java/examples/groups/H5Ex_G_Iterate.java
+++ /dev/null
@@ -1,111 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to iterate over group members using
-  H5Gget_obj_info_all.
- ************************************************************/
-package examples.groups;
-
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-import hdf.hdf5lib.structs.H5O_token_t;
-
-public class H5Ex_G_Iterate {
-    private static String FILENAME    = "groups/h5ex_g_iterate.h5";
-    private static String DATASETNAME = "/";
-
-    enum H5O_type {
-        H5O_TYPE_UNKNOWN(-1),       // Unknown object type
-        H5O_TYPE_GROUP(0),          // Object is a group
-        H5O_TYPE_DATASET(1),        // Object is a dataset
-        H5O_TYPE_NAMED_DATATYPE(2), // Object is a named data type
-        H5O_TYPE_NTYPES(3);         // Number of different object types
-        private static final Map<Integer, H5O_type> lookup = new HashMap<Integer, H5O_type>();
-
-        static
-        {
-            for (H5O_type s : EnumSet.allOf(H5O_type.class))
-                lookup.put(s.getCode(), s);
-        }
-
-        private int code;
-
-        H5O_type(int layout_type) { this.code = layout_type; }
-
-        public int getCode() { return this.code; }
-
-        public static H5O_type get(int code) { return lookup.get(code); }
-    }
-
-    private static void do_iterate()
-    {
-        long file_id = HDF5Constants.H5I_INVALID_HID;
-
-        // Open a file using default properties.
-        try {
-            file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Begin iteration.
-        System.out.println("Objects in root group:");
-        try {
-            if (file_id >= 0) {
-                int count             = (int)H5.H5Gn_members(file_id, DATASETNAME);
-                String[] oname        = new String[count];
-                int[] otype           = new int[count];
-                int[] ltype           = new int[count];
-                H5O_token_t[] otokens = new H5O_token_t[count];
-                H5.H5Gget_obj_info_all(file_id, DATASETNAME, oname, otype, ltype, otokens,
-                                       HDF5Constants.H5_INDEX_NAME);
-
-                // Get type of the object and display its name and type.
-                for (int indx = 0; indx < otype.length; indx++) {
-                    switch (H5O_type.get(otype[indx])) {
-                    case H5O_TYPE_GROUP:
-                        System.out.println("  Group: " + oname[indx]);
-                        break;
-                    case H5O_TYPE_DATASET:
-                        System.out.println("  Dataset: " + oname[indx]);
-                        break;
-                    case H5O_TYPE_NAMED_DATATYPE:
-                        System.out.println("  Datatype: " + oname[indx]);
-                        break;
-                    default:
-                        System.out.println("  Unknown: " + oname[indx]);
-                    }
-                }
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args) { H5Ex_G_Iterate.do_iterate(); }
-}
diff --git a/java/examples/groups/H5Ex_G_Phase.java b/java/examples/groups/H5Ex_G_Phase.java
deleted file mode 100644
index 7a3fba3b603..00000000000
--- a/java/examples/groups/H5Ex_G_Phase.java
+++ /dev/null
@@ -1,235 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-  This example shows how to set the conditions for
-  conversion between compact and dense (indexed) groups.
- ************************************************************/
-package examples.groups;
-
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-import hdf.hdf5lib.structs.H5G_info_t;
-
-public class H5Ex_G_Phase {
-    private static String FILE     = "H5Ex_G_Phase.h5";
-    private static int MAX_GROUPS  = 7;
-    private static int MAX_COMPACT = 5;
-    private static int MIN_DENSE   = 3;
-
-    enum H5G_storage {
-        H5G_STORAGE_TYPE_UNKNOWN(-1),
-        H5G_STORAGE_TYPE_SYMBOL_TABLE(0),
-        H5G_STORAGE_TYPE_COMPACT(1),
-        H5G_STORAGE_TYPE_DENSE(2);
-
-        private static final Map<Integer, H5G_storage> lookup = new HashMap<Integer, H5G_storage>();
-
-        static
-        {
-            for (H5G_storage s : EnumSet.allOf(H5G_storage.class))
-                lookup.put(s.getCode(), s);
-        }
-
-        private int code;
-
-        H5G_storage(int layout_type) { this.code = layout_type; }
-
-        public int getCode() { return this.code; }
-
-        public static H5G_storage get(int code) { return lookup.get(code); }
-    }
-
-    private static void CreateGroup()
-    {
-        long file_id     = HDF5Constants.H5I_INVALID_HID;
-        long group_id    = HDF5Constants.H5I_INVALID_HID;
-        long subgroup_id = HDF5Constants.H5I_INVALID_HID;
-        long fapl_id     = HDF5Constants.H5I_INVALID_HID;
-        long gcpl_id     = HDF5Constants.H5I_INVALID_HID;
-        H5G_info_t ginfo;
-        String name = "G0"; // Name of subgroup_id
-        int i;
-
-        // Set file access property list to allow the latest file format.This will allow the library to create
-        // new format groups.
-        try {
-            fapl_id = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS);
-            if (fapl_id >= 0)
-                H5.H5Pset_libver_bounds(fapl_id, HDF5Constants.H5F_LIBVER_LATEST,
-                                        HDF5Constants.H5F_LIBVER_LATEST);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create group access property list and set the phase change conditions.
-        try {
-            gcpl_id = H5.H5Pcreate(HDF5Constants.H5P_GROUP_CREATE);
-            if (gcpl_id >= 0)
-                H5.H5Pset_link_phase_change(gcpl_id, MAX_COMPACT, MIN_DENSE);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create a new file using the default properties.
-        try {
-            if (fapl_id >= 0)
-                file_id = H5.H5Fcreate(FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, fapl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create primary group.
-        try {
-            if ((file_id >= 0) && (gcpl_id >= 0))
-                group_id = H5.H5Gcreate(file_id, name, HDF5Constants.H5P_DEFAULT, gcpl_id,
-                                        HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Add subgroups to "group" one at a time, print the storage type for "group" after each subgroup is
-        // created.
-        for (i = 1; i <= MAX_GROUPS; i++) {
-            // Define the subgroup name and create the subgroup.
-            char append = (char)(((char)i) + '0');
-            name        = name + append; /* G1, G2, G3 etc. */
-            try {
-                if (group_id >= 0) {
-                    subgroup_id = H5.H5Gcreate(group_id, name, HDF5Constants.H5P_DEFAULT,
-                                               HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-                    H5.H5Gclose(subgroup_id);
-                }
-            }
-            catch (Exception e) {
-                e.printStackTrace();
-            }
-
-            // Obtain the group info and print the group storage type
-            try {
-                if (group_id >= 0) {
-                    ginfo = H5.H5Gget_info(group_id);
-                    System.out.print(ginfo.nlinks + " Group" + (ginfo.nlinks == 1 ? " " : "s") +
-                                     ": Storage type is ");
-                    switch (H5G_storage.get(ginfo.storage_type)) {
-                    case H5G_STORAGE_TYPE_COMPACT:
-                        System.out.println("H5G_STORAGE_TYPE_COMPACT"); // New compact format
-                        break;
-                    case H5G_STORAGE_TYPE_DENSE:
-                        System.out.println("H5G_STORAGE_TYPE_DENSE"); // New dense (indexed) format
-                        break;
-                    case H5G_STORAGE_TYPE_SYMBOL_TABLE:
-                        System.out.println("H5G_STORAGE_TYPE_SYMBOL_TABLE"); // Original format
-                        break;
-                    case H5G_STORAGE_TYPE_UNKNOWN:
-                        System.out.println("H5G_STORAGE_TYPE_UNKNOWN");
-                        break;
-                    default:
-                        System.out.println("Storage Type Invalid");
-                        break;
-                    }
-                }
-            }
-            catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-
-        System.out.println();
-
-        // Delete subgroups one at a time, print the storage type for "group" after each subgroup is deleted.
-        for (i = MAX_GROUPS; i >= 1; i--) {
-            // Define the subgroup name and delete the subgroup.
-            try {
-                H5.H5Ldelete(group_id, name, HDF5Constants.H5P_DEFAULT);
-            }
-            catch (Exception e) {
-                e.printStackTrace();
-            }
-            name = name.substring(0, i + 1);
-
-            // Obtain the group info and print the group storage type
-            try {
-                if (group_id >= 0) {
-                    ginfo = H5.H5Gget_info(group_id);
-                    System.out.print(ginfo.nlinks + " Group" + (ginfo.nlinks == 1 ? " " : "s") +
-                                     ": Storage type is ");
-                    switch (H5G_storage.get(ginfo.storage_type)) {
-                    case H5G_STORAGE_TYPE_COMPACT:
-                        System.out.println("H5G_STORAGE_TYPE_COMPACT"); // New compact format
-                        break;
-                    case H5G_STORAGE_TYPE_DENSE:
-                        System.out.println("H5G_STORAGE_TYPE_DENSE"); // New dense (indexed) format
-                        break;
-                    case H5G_STORAGE_TYPE_SYMBOL_TABLE:
-                        System.out.println("H5G_STORAGE_TYPE_SYMBOL_TABLE"); // Original format
-                        break;
-                    case H5G_STORAGE_TYPE_UNKNOWN:
-                        System.out.println("H5G_STORAGE_TYPE_UNKNOWN");
-                        break;
-                    default:
-                        System.out.println("Storage Type Invalid");
-                        break;
-                    }
-                }
-            }
-            catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-
-        // Close and release resources
-        try {
-            if (fapl_id >= 0)
-                H5.H5Pclose(fapl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (gcpl_id >= 0)
-                H5.H5Pclose(gcpl_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the group
-        try {
-            if (group_id >= 0)
-                H5.H5Gclose(group_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args) { H5Ex_G_Phase.CreateGroup(); }
-}
diff --git a/java/examples/groups/H5Ex_G_Traverse.java b/java/examples/groups/H5Ex_G_Traverse.java
deleted file mode 100644
index 61a269c1848..00000000000
--- a/java/examples/groups/H5Ex_G_Traverse.java
+++ /dev/null
@@ -1,167 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-This example shows a way to recursively traverse the file
-using H5Literate.  The method shown here guarantees that
-the recursion will not enter an infinite loop, but does
-not prevent objects from being visited more than once.
-The program prints the directory structure of the file
-specified in FILE.  The default file used by this example
-implements the structure described in the User's Guide,
-chapter 4, figure 26.
- ************************************************************/
-package examples.groups;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-import hdf.hdf5lib.callbacks.H5L_iterate_opdata_t;
-import hdf.hdf5lib.callbacks.H5L_iterate_t;
-import hdf.hdf5lib.structs.H5L_info_t;
-import hdf.hdf5lib.structs.H5O_info_t;
-
-import examples.groups.H5Ex_G_Iterate.H5O_type;
-
-class opdata implements H5L_iterate_opdata_t {
-    int recurs;
-    opdata prev;
-    H5O_token_t obj_token;
-}
-
-public class H5Ex_G_Traverse {
-
-    private static String FILE          = "h5ex_g_traverse.h5";
-    public static H5L_iterate_t iter_cb = new H5L_iter_callbackT();
-
-    private static void OpenGroup()
-    {
-        long file_id = HDF5Constants.H5I_INVALID_HID;
-        H5O_info_t infobuf;
-        opdata od = new opdata();
-
-        // Open file and initialize the operator data structure.
-        try {
-            file_id = H5.H5Fopen(FILE, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-            if (file_id >= 0) {
-                infobuf      = H5.H5Oget_info(file_id);
-                od.recurs    = 0;
-                od.prev      = null;
-                od.obj_token = infobuf.token;
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Print the root group and formatting, begin iteration.
-        try {
-            System.out.println("/ {");
-            // H5L_iterate_t iter_cb = new H5L_iter_callbackT();
-            H5.H5Literate(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, 0L, iter_cb,
-                          od);
-            System.out.println("}");
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close and release resources.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args) { H5Ex_G_Traverse.OpenGroup(); }
-}
-
-class H5L_iter_callbackT implements H5L_iterate_t {
-    public int callback(long group, String name, H5L_info_t info, H5L_iterate_opdata_t op_data)
-    {
-
-        H5O_info_t infobuf;
-        int return_val = 0;
-        opdata od      = (opdata)op_data;     // Type conversion
-        int spaces     = 2 * (od.recurs + 1); // Number of white spaces to prepend to output.
-
-        // Get type of the object and display its name and type.
-        // The name of the object is passed to this function by the Library.
-        try {
-            infobuf = H5.H5Oget_info_by_name(group, name, HDF5Constants.H5P_DEFAULT);
-
-            for (int i = 0; i < spaces; i++)
-                System.out.print(" "); // Format output.
-            switch (H5O_type.get(infobuf.type)) {
-            case H5O_TYPE_GROUP:
-                System.out.println("Group: " + name + " { ");
-                // Check group object token against linked list of operator
-                // data structures. We will always run the check, as the
-                // reference count cannot be relied upon if there are
-                // symbolic links, and H5Oget_info_by_name always follows
-                // symbolic links. Alternatively we could use H5Lget_info
-                // and never recurse on groups discovered by symbolic
-                // links, however it could still fail if an object's
-                // reference count was manually manipulated with
-                // H5Odecr_refcount.
-                if (group_check(od, infobuf.token)) {
-                    for (int i = 0; i < spaces; i++)
-                        System.out.print(" ");
-                    System.out.println("  Warning: Loop detected!");
-                }
-                else {
-                    // Initialize new object of type opdata and begin
-                    // recursive iteration on the discovered
-                    // group. The new opdata is given a pointer to the
-                    // current one.
-                    opdata nextod          = new opdata();
-                    nextod.recurs          = od.recurs + 1;
-                    nextod.prev            = od;
-                    nextod.obj_token       = infobuf.token;
-                    H5L_iterate_t iter_cb2 = new H5L_iter_callbackT();
-                    return_val             = H5.H5Literate_by_name(group, name, HDF5Constants.H5_INDEX_NAME,
-                                                                   HDF5Constants.H5_ITER_NATIVE, 0L, iter_cb2, nextod,
-                                                                   HDF5Constants.H5P_DEFAULT);
-                }
-                for (int i = 0; i < spaces; i++)
-                    System.out.print(" ");
-                System.out.println("}");
-                break;
-            case H5O_TYPE_DATASET:
-                System.out.println("Dataset: " + name);
-                break;
-            case H5O_TYPE_NAMED_DATATYPE:
-                System.out.println("Datatype: " + name);
-                break;
-            default:
-                System.out.println("Unknown: " + name);
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        return return_val;
-    }
-
-    public boolean group_check(opdata od, H5O_token_t target_token)
-    {
-        if (od.obj_token.equals(target_token))
-            return true; // Object tokens match
-        else if (od.recurs == 0)
-            return false; // Root group reached with no matches
-        else
-            return group_check(od.prev, target_token); // Recursively examine the next node
-    }
-}
diff --git a/java/examples/groups/H5Ex_G_Visit.java b/java/examples/groups/H5Ex_G_Visit.java
deleted file mode 100644
index 1f2f9a161d8..00000000000
--- a/java/examples/groups/H5Ex_G_Visit.java
+++ /dev/null
@@ -1,154 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
- This example shows how to recursively traverse a file
- using H5Ovisit and H5Lvisit.  The program prints all of
- the objects in the file specified in FILE, then prints all
- of the links in that file.  The default file used by this
- example implements the structure described in the User
- Guide, chapter 4, figure 26.
- ************************************************************/
-package examples.groups;
-
-import java.util.ArrayList;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-import hdf.hdf5lib.callbacks.H5L_iterate_opdata_t;
-import hdf.hdf5lib.callbacks.H5L_iterate_t;
-import hdf.hdf5lib.callbacks.H5O_iterate_opdata_t;
-import hdf.hdf5lib.callbacks.H5O_iterate_t;
-import hdf.hdf5lib.structs.H5L_info_t;
-import hdf.hdf5lib.structs.H5O_info_t;
-
-public class H5Ex_G_Visit {
-
-    private static String FILE = "groups/h5ex_g_visit.h5";
-
-    public static void main(String[] args)
-    {
-        try {
-            (new H5Ex_G_Visit()).VisitGroup();
-        }
-        catch (Exception ex) {
-            ex.printStackTrace();
-        }
-    }
-
-    private void VisitGroup() throws Exception
-    {
-
-        long file_id = HDF5Constants.H5I_INVALID_HID;
-
-        try {
-            // Open file
-            file_id = H5.H5Fopen(FILE, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-
-            // Begin iteration using H5Ovisit
-            System.out.println("Objects in the file:");
-            H5O_iterate_opdata_t iter_data = new H5O_iter_data();
-            H5O_iterate_t iter_cb          = new H5O_iter_callback();
-            H5.H5Ovisit(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, iter_cb,
-                        iter_data);
-            System.out.println();
-            // Repeat the same process using H5Lvisit
-            H5L_iterate_opdata_t iter_data2 = new H5L_iter_data();
-            H5L_iterate_t iter_cb2          = new H5L_iter_callback();
-            System.out.println("Links in the file:");
-            H5.H5Lvisit(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, iter_cb2,
-                        iter_data2);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-        finally {
-            // Close and release resources.
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-    }
-
-    /************************************************************
-     * Operator function for H5Lvisit. This function simply retrieves the info for the object the current link
-     *points to, and calls the operator function for H5Ovisit.
-     ************************************************************/
-
-    private class idata {
-        public String link_name = null;
-        public int link_type    = -1;
-
-        idata(String name, int type)
-        {
-            this.link_name = name;
-            this.link_type = type;
-        }
-    }
-
-    private class H5L_iter_data implements H5L_iterate_opdata_t {
-        public ArrayList<idata> iterdata = new ArrayList<idata>();
-    }
-
-    private class H5L_iter_callback implements H5L_iterate_t {
-        public int callback(long group, String name, H5L_info_t info, H5L_iterate_opdata_t op_data)
-        {
-
-            idata id = new idata(name, info.type);
-            ((H5L_iter_data)op_data).iterdata.add(id);
-
-            H5O_info_t infobuf;
-            int ret = 0;
-            try {
-                // Get type of the object and display its name and type. The name of the object is passed to
-                // this function by the Library.
-                infobuf                = H5.H5Oget_info_by_name(group, name, HDF5Constants.H5P_DEFAULT);
-                H5O_iterate_t iter_cbO = new H5O_iter_callback();
-                H5O_iterate_opdata_t iter_dataO = new H5O_iter_data();
-                ret                             = iter_cbO.callback(group, name, infobuf, iter_dataO);
-            }
-            catch (Exception e) {
-                e.printStackTrace();
-            }
-
-            return ret;
-        }
-    }
-
-    private class H5O_iter_data implements H5O_iterate_opdata_t {
-        public ArrayList<idata> iterdata = new ArrayList<idata>();
-    }
-
-    private class H5O_iter_callback implements H5O_iterate_t {
-        public int callback(long group, String name, H5O_info_t info, H5O_iterate_opdata_t op_data)
-        {
-            idata id = new idata(name, info.type);
-            ((H5O_iter_data)op_data).iterdata.add(id);
-
-            System.out.print("/"); /* Print root group in object path */
-
-            // Check if the current object is the root group, and if not print the full path name and type.
-
-            if (name.charAt(0) == '.') /* Root group, do not print '.' */
-                System.out.println("  (Group)");
-            else if (info.type == HDF5Constants.H5O_TYPE_GROUP)
-                System.out.println(name + "  (Group)");
-            else if (info.type == HDF5Constants.H5O_TYPE_DATASET)
-                System.out.println(name + "  (Dataset)");
-            else if (info.type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE)
-                System.out.println(name + "  (Datatype)");
-            else
-                System.out.println(name + "  (Unknown)");
-
-            return 0;
-        }
-    }
-}
diff --git a/java/examples/groups/JavaGroupExample.sh.in b/java/examples/groups/JavaGroupExample.sh.in
deleted file mode 100644
index 477357955b8..00000000000
--- a/java/examples/groups/JavaGroupExample.sh.in
+++ /dev/null
@@ -1,381 +0,0 @@
-#! /bin/sh
-#
-# Copyright by The HDF Group.
-# All rights reserved.
-#
-# This file is part of HDF5.  The full HDF5 copyright notice, including
-# terms governing use, modification, and redistribution, is contained in
-# the COPYING file, which can be found at the root of the source code
-# distribution tree, or in https://www.hdfgroup.org/licenses.
-# If you do not have access to either file, you may request a copy from
-# help@hdfgroup.org.
-#
-
-top_builddir=@top_builddir@
-top_srcdir=@top_srcdir@
-srcdir=@srcdir@
-IS_DARWIN="@H5_IS_DARWIN@"
-
-TESTNAME=EX_Groups
-EXIT_SUCCESS=0
-EXIT_FAILURE=1
-
-# Set up default variable values if not supplied by the user.
-RM='rm -rf'
-CMP='cmp'
-DIFF='diff -c'
-CP='cp'
-DIRNAME='dirname'
-BASENAME='basename'
-LS='ls'
-AWK='awk'
-
-nerrors=0
-
-# where the libs exist
-HDFLIB_HOME="$top_srcdir/java/lib"
-BLDDIR="."
-BLDLIBDIR="$BLDDIR/testlibs"
-BLDITERDIR="./groups"
-HDFTEST_HOME="$top_srcdir/java/examples/groups"
-JARFILE=jar@PACKAGE_TARNAME@-@PACKAGE_VERSION@.jar
-TESTJARFILE=jar@PACKAGE_TARNAME@groups.jar
-test -d $BLDLIBDIR || mkdir -p $BLDLIBDIR
-test -d $BLDITERDIR || mkdir -p $BLDITERDIR
-
-######################################################################
-# library files
-# --------------------------------------------------------------------
-# All the library files copy from source directory to test directory
-# NOTE: Keep this framework to add/remove test files.
-#       This list are also used for checking exist.
-#       Comment '#' without space can be used.
-# --------------------------------------------------------------------
-LIST_LIBRARY_FILES="
-$top_builddir/src/.libs/libhdf5.*
-$top_builddir/java/src/jni/.libs/libhdf5_java.*
-$top_builddir/java/src/$JARFILE
-"
-LIST_JAR_TESTFILES="
-$HDFLIB_HOME/slf4j-api-2.0.6.jar
-$HDFLIB_HOME/ext/slf4j-simple-2.0.6.jar
-"
-LIST_ITER_FILES="
-$HDFTEST_HOME/h5ex_g_iterate.h5
-$HDFTEST_HOME/h5ex_g_visit.h5
-"
-LIST_DATA_FILES="
-$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Create.txt
-$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Iterate.txt
-$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Compact.txt
-$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Corder.txt
-$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Intermediate.txt
-$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Phase.txt
-$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Visit.txt
-"
-
-#
-# copy files from source dirs to test dir
-#
-COPY_LIBFILES="$LIST_LIBRARY_FILES"
-COPY_JARTESTFILES="$LIST_JAR_TESTFILES"
-
-COPY_LIBFILES_TO_BLDLIBDIR()
-{
-    # copy test files. Used -f to make sure get a new copy
-    for tstfile in $COPY_LIBFILES
-    do
-        # ignore '#' comment
-        echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
-        RET=$?
-        if [ $RET -eq 1 ]; then
-            # skip cp if srcdir is same as destdir
-            # this occurs when build/test performed in source dir and
-            # make cp fail
-            SDIR=`$DIRNAME $tstfile`
-            INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
-            INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
-            if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
-                $CP -fR $tstfile $BLDLIBDIR
-                if [ $? -ne 0 ]; then
-                    echo "Error: FAILED to copy $tstfile ."
-
-                    # Comment out this to CREATE expected file
-                    exit $EXIT_FAILURE
-                fi
-                BNAME=`$BASENAME $tstfile`
-                if [ "$BNAME" = "libhdf5_java.dylib" ]; then
-                    COPIED_LIBHDF5_JAVA=1
-                fi
-            fi
-        fi
-    done
-    if [[ "$IS_DARWIN" = "yes" ]] && [[ $COPIED_LIBHDF5_JAVA -eq 1 ]]; then
-       (cd $BLDLIBDIR; \
-         install_name_tool -add_rpath @loader_path libhdf5_java.dylib; \
-         exist_path=` otool -l libhdf5_java.dylib | grep libhdf5 | grep -v java | awk '{print $2}'`; \
-         echo $exist_path; \
-         install_name_tool -change $exist_path @rpath/libhdf5.dylib libhdf5_java.dylib)
-    fi
-    # copy jar files. Used -f to make sure get a new copy
-    for tstfile in $COPY_JARTESTFILES
-    do
-        # ignore '#' comment
-        echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
-        RET=$?
-        if [ $RET -eq 1 ]; then
-            # skip cp if srcdir is same as destdir
-            # this occurs when build/test performed in source dir and
-            # make cp fail
-            SDIR=`$DIRNAME $tstfile`
-            INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
-            INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
-            if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
-                $CP -fR $tstfile $BLDLIBDIR
-                if [ $? -ne 0 ]; then
-                    echo "Error: FAILED to copy $tstfile ."
-
-                    # Comment out this to CREATE expected file
-                    exit $EXIT_FAILURE
-                fi
-            fi
-        fi
-    done
-}
-
-CLEAN_LIBFILES_AND_BLDLIBDIR()
-{
-    # skip rm if srcdir is same as destdir
-    # this occurs when build/test performed in source dir and
-    # make cp fail
-    SDIR=$HDFLIB_HOME
-    INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
-    INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
-    if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
-        $RM -rf $BLDLIBDIR
-    fi
-}
-
-COPY_DATAFILES="$LIST_DATA_FILES"
-
-COPY_DATAFILES_TO_BLDDIR()
-{
-    # copy test files. Used -f to make sure get a new copy
-    for tstfile in $COPY_DATAFILES
-    do
-        # ignore '#' comment
-        echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
-        RET=$?
-        if [ $RET -eq 1 ]; then
-            # skip cp if srcdir is same as destdir
-            # this occurs when build/test performed in source dir and
-            # make cp fail
-            SDIR=`$DIRNAME $tstfile`
-            INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
-            INODE_DDIR=`$LS -i -d $BLDDIR | $AWK -F' ' '{print $1}'`
-            if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
-                $CP -f $tstfile $BLDDIR
-                if [ $? -ne 0 ]; then
-                    echo "Error: FAILED to copy $tstfile ."
-
-                    # Comment out this to CREATE expected file
-                    exit $EXIT_FAILURE
-                fi
-            fi
-        fi
-    done
-}
-
-CLEAN_DATAFILES_AND_BLDDIR()
-{
-    $RM $BLDDIR/examples.groups.H5Ex_G_*.txt
-    $RM $BLDDIR/H5Ex_G_*.out
-    $RM $BLDDIR/H5Ex_G_*.h5
-}
-
-COPY_ITERFILES="$LIST_ITER_FILES"
-
-COPY_ITERFILES_TO_BLDITERDIR()
-{
-    # copy test files. Used -f to make sure get a new copy
-    for tstfile in $COPY_ITERFILES
-    do
-        # ignore '#' comment
-        echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
-        RET=$?
-        if [ $RET -eq 1 ]; then
-            # skip cp if srcdir is same as destdir
-            # this occurs when build/test performed in source dir and
-            # make cp fail
-            SDIR=`$DIRNAME $tstfile`
-            INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
-            INODE_DDIR=`$LS -i -d $BLDITERDIR | $AWK -F' ' '{print $1}'`
-            if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
-                $CP -f $tstfile $BLDITERDIR
-                if [ $? -ne 0 ]; then
-                    echo "Error: FAILED to copy $tstfile ."
-
-                    # Comment out this to CREATE expected file
-                    exit $EXIT_FAILURE
-                fi
-            fi
-        fi
-    done
-}
-
-CLEAN_ITERFILES_AND_BLDITERDIR()
-{
-    # skip rm if srcdir is same as destdir
-    # this occurs when build/test performed in source dir and
-    # make cp fail
-    SDIR=`$DIRNAME $HDFTEST_HOME/h5ex_g_iterate.h5`
-    INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
-    INODE_DDIR=`$LS -i -d $BLDITERDIR | $AWK -F' ' '{print $1}'`
-    if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
-        $RM $BLDITERDIR
-    fi
-}
-
-# Print a line-line message left justified in a field of 70 characters
-# beginning with the word "Testing".
-#
-TESTING() {
-   SPACES="                                                               "
-   echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012'
-}
-
-# where Java is installed (requires jdk1.7.x)
-JAVAEXE=@JAVA@
-JAVAEXEFLAGS=@H5_JAVAFLAGS@
-
-###############################################################################
-#            DO NOT MODIFY BELOW THIS LINE
-###############################################################################
-
-# prepare for test
-COPY_LIBFILES_TO_BLDLIBDIR
-COPY_DATAFILES_TO_BLDDIR
-COPY_ITERFILES_TO_BLDITERDIR
-
-CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/slf4j-api-2.0.6.jar:"$BLDLIBDIR"/slf4j-simple-2.0.6.jar:"$TESTJARFILE""
-
-TEST=/usr/bin/test
-if [ ! -x /usr/bin/test ]
-then
-TEST=`which test`
-fi
-
-if $TEST -z "$CLASSPATH"; then
-        CLASSPATH=""
-fi
-CLASSPATH=$CPATH":"$CLASSPATH
-export CLASSPATH
-
-if $TEST -n "$JAVAPATH" ; then
-        PATH=$JAVAPATH":"$PATH
-        export PATH
-fi
-
-if $TEST -e /bin/uname; then
-   os_name=`/bin/uname -s`
-elif $TEST -e /usr/bin/uname; then
-   os_name=`/usr/bin/uname -s`
-else
-   os_name=unknown
-fi
-
-if $TEST -z "$LD_LIBRARY_PATH" ; then
-        LD_LIBRARY_PATH=""
-fi
-
-case  $os_name in
-    *)
-    LD_LIBRARY_PATH=$BLDLIBDIR:$LD_LIBRARY_PATH
-    ;;
-esac
-
-export LD_LIBRARY_PATH
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Create"
-TESTING examples.groups.H5Ex_G_Create
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Create > H5Ex_G_Create.out)
-if diff H5Ex_G_Create.out examples.groups.H5Ex_G_Create.txt > /dev/null; then
-    echo "  PASSED      groups.H5Ex_G_Create"
-else
-    echo "**FAILED**    groups.H5Ex_G_Create"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Iterate"
-TESTING examples.groups.H5Ex_G_Iterate
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Iterate > H5Ex_G_Iterate.out)
-if diff H5Ex_G_Iterate.out examples.groups.H5Ex_G_Iterate.txt > /dev/null; then
-    echo "  PASSED      groups.H5Ex_G_Iterate"
-else
-    echo "**FAILED**    groups.H5Ex_G_Iterate"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Compact"
-TESTING examples.groups.H5Ex_G_Compact
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Compact > H5Ex_G_Compact.out)
-if diff H5Ex_G_Compact.out examples.groups.H5Ex_G_Compact.txt > /dev/null; then
-    echo "  PASSED      groups.H5Ex_G_Compact"
-else
-    echo "**FAILED**    groups.H5Ex_G_Compact"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Corder"
-TESTING examples.groups.H5Ex_G_Corder
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Corder > H5Ex_G_Corder.out)
-if diff H5Ex_G_Corder.out examples.groups.H5Ex_G_Corder.txt > /dev/null; then
-    echo "  PASSED      groups.H5Ex_G_Corder"
-else
-    echo "**FAILED**    groups.H5Ex_G_Corder"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Intermediate"
-TESTING examples.groups.H5Ex_G_Intermediate
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Intermediate > H5Ex_G_Intermediate.out)
-if diff H5Ex_G_Intermediate.out examples.groups.H5Ex_G_Intermediate.txt > /dev/null; then
-    echo "  PASSED      groups.H5Ex_G_Intermediate"
-else
-    echo "**FAILED**    groups.H5Ex_G_Intermediate"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Phase"
-TESTING examples.groups.H5Ex_G_Phase
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Phase > H5Ex_G_Phase.out)
-if diff H5Ex_G_Phase.out examples.groups.H5Ex_G_Phase.txt > /dev/null; then
-    echo "  PASSED      groups.H5Ex_G_Phase"
-else
-    echo "**FAILED**    groups.H5Ex_G_Phase"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Visit"
-TESTING examples.groups.H5Ex_G_Visit
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Visit > H5Ex_G_Visit.out)
-if diff H5Ex_G_Visit.out examples.groups.H5Ex_G_Visit.txt > /dev/null; then
-    echo "  PASSED      groups.H5Ex_G_Visit"
-else
-    echo "**FAILED**    groups.H5Ex_G_Visit"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-# Clean up temporary files/directories
-CLEAN_ITERFILES_AND_BLDITERDIR
-CLEAN_LIBFILES_AND_BLDLIBDIR
-CLEAN_DATAFILES_AND_BLDDIR
-
-# Report test results and exit
-if test $nerrors -eq 0 ; then
-    echo "All $TESTNAME tests passed."
-    exit $EXIT_SUCCESS
-else
-    echo "$TESTNAME tests failed with $nerrors errors."
-    exit $EXIT_FAILURE
-fi
diff --git a/java/examples/groups/Makefile.am b/java/examples/groups/Makefile.am
deleted file mode 100644
index a3fb774c5b7..00000000000
--- a/java/examples/groups/Makefile.am
+++ /dev/null
@@ -1,65 +0,0 @@
-#
-# Copyright by The HDF Group.
-# All rights reserved.
-#
-# This file is part of HDF5.  The full HDF5 copyright notice, including
-# terms governing use, modification, and redistribution, is contained in
-# the COPYING file, which can be found at the root of the source code
-# distribution tree, or in https://www.hdfgroup.org/licenses.
-# If you do not have access to either file, you may request a copy from
-# help@hdfgroup.org.
-##
-## Makefile.am
-## Run automake to generate a Makefile.in from this file.
-##
-#
-# HDF5 Java Library Examples Makefile(.in)
-
-include $(top_srcdir)/config/commence.am
-
-# Mark this directory as part of the JNI API
-JAVA_API=yes
-
-JAVAROOT = .classes
-
-classes:
-	test -d $(@D)/$(JAVAROOT) || $(MKDIR_P) $(@D)/$(JAVAROOT)
-
-pkgpath = examples/groups
-hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar
-CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-2.0.6.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-2.0.6.jar:$$CLASSPATH
-
-jarfile = jar$(PACKAGE_TARNAME)groups.jar
-
-AM_JAVACFLAGS = $(H5_JAVACFLAGS) -deprecation
-
-TESTPACKAGE =
-
-noinst_JAVA = \
-    H5Ex_G_Create.java \
-    H5Ex_G_Iterate.java \
-    H5Ex_G_Compact.java \
-    H5Ex_G_Corder.java \
-    H5Ex_G_Intermediate.java \
-    H5Ex_G_Phase.java \
-    H5Ex_G_Visit.java
-
-
-$(jarfile): classnoinst.stamp classes
-	$(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath)
-
-noinst_DATA = $(jarfile)
-
-.PHONY: classes
-
-check_SCRIPTS = JavaGroupExample.sh
-TEST_SCRIPT = $(check_SCRIPTS)
-
-CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class JavaGroupExample.sh
-
-clean:
-	rm -rf $(JAVAROOT)/*
-	rm -f $(jarfile)
-	rm -f classnoinst.stamp
-
-include $(top_srcdir)/config/conclude.am
diff --git a/java/examples/groups/h5ex_g_iterate.h5 b/java/examples/groups/h5ex_g_iterate.h5
deleted file mode 100644
index e4627035fb4..00000000000
Binary files a/java/examples/groups/h5ex_g_iterate.h5 and /dev/null differ
diff --git a/java/examples/groups/h5ex_g_visit.h5 b/java/examples/groups/h5ex_g_visit.h5
deleted file mode 100644
index d8267b14e87..00000000000
Binary files a/java/examples/groups/h5ex_g_visit.h5 and /dev/null differ
diff --git a/java/examples/intro/CMakeLists.txt b/java/examples/intro/CMakeLists.txt
deleted file mode 100644
index 685ef901347..00000000000
--- a/java/examples/intro/CMakeLists.txt
+++ /dev/null
@@ -1,107 +0,0 @@
-cmake_minimum_required (VERSION 3.18)
-project (HDFJAVA_EXAMPLES_INTRO Java)
-
-set (CMAKE_VERBOSE_MAKEFILE 1)
-
-set (HDF_JAVA_EXAMPLES
-    H5_CreateAttribute
-    H5_CreateDataset
-    H5_CreateFile
-    H5_CreateGroup
-    H5_CreateGroupAbsoluteRelative
-    H5_CreateGroupDataset
-    H5_ReadWrite
-)
-
-if (WIN32)
-  set (CMAKE_JAVA_INCLUDE_FLAG_SEP ";")
-else ()
-  set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":")
-endif ()
-
-set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS}")
-
-set (CMAKE_JAVA_CLASSPATH ".")
-foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH})
-  set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}")
-endforeach ()
-
-foreach (example ${HDF_JAVA_EXAMPLES})
-  file (WRITE ${PROJECT_BINARY_DIR}/${example}_Manifest.txt
-  "Main-Class: examples.intro.${example}
-"
-  )
-  add_jar (${example} MANIFEST ${PROJECT_BINARY_DIR}/${example}_Manifest.txt ${example}.java)
-  get_target_property (${example}_JAR_FILE ${example} JAR_FILE)
-#  install_jar (${example} ${HJAVA_INSTALL_DATA_DIR}/examples examples)
-  get_target_property (${example}_CLASSPATH ${example} CLASSDIR)
-  add_dependencies (${example} ${HDF5_JAVA_HDF5_LIB_TARGET})
-
-  #-----------------------------------------------------------------------------
-  # Add Target to clang-format
-  #-----------------------------------------------------------------------------
-  if (HDF5_ENABLE_FORMATTERS)
-    clang_format (HDF5_JAVA_${example}_SRC_FORMAT ${example}.java)
-  endif ()
-endforeach ()
-
-set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS};${HDF5_JAVA_LOGGING_JAR};${HDF5_JAVA_LOGGING_NOP_JAR}")
-
-set (CMAKE_JAVA_CLASSPATH ".")
-foreach (HDFJAVA_JAR ${CMAKE_JAVA_INCLUDE_PATH})
-  set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${HDFJAVA_JAR}")
-endforeach ()
-
-if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL)
-  get_property (target_name TARGET ${HDF5_JAVA_JNI_LIB_TARGET} PROPERTY OUTPUT_NAME)
-  set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$<OR:$<CONFIG:Debug>,$<CONFIG:Developer>>:${CMAKE_DEBUG_POSTFIX}>;")
-
-  set (last_test "")
-  foreach (example ${HDF_JAVA_EXAMPLES})
-    add_test (
-        NAME JAVA_intro-${example}-clear-objects
-        COMMAND ${CMAKE_COMMAND} -E remove
-            ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5
-    )
-    if (last_test)
-      set_tests_properties (JAVA_intro-${example}-clear-objects PROPERTIES DEPENDS ${last_test})
-    endif ()
-
-    add_test (
-        NAME JAVA_intro-${example}-copy-objects
-        COMMAND ${CMAKE_COMMAND} -E copy_if_different
-            ${HDFJAVA_EXAMPLES_SOURCE_DIR}/testfiles/examples.intro.${example}.txt
-            ${HDFJAVA_EXAMPLES_INTRO_BINARY_DIR}/${example}.txt
-    )
-    set_tests_properties (JAVA_intro-${example}-copy-objects PROPERTIES DEPENDS JAVA_intro-${example}-clear-objects)
-
-    add_test (
-        NAME JAVA_intro-${example}
-        COMMAND "${CMAKE_COMMAND}"
-            -D "TEST_TESTER=${CMAKE_Java_RUNTIME};${CMAKE_Java_RUNTIME_FLAGS}"
-            -D "TEST_PROGRAM=examples.intro.${example}"
-            -D "TEST_ARGS:STRING=${CMD_ARGS}"
-            -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${example}_JAR_FILE}"
-            -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}"
-            -D "TEST_FOLDER=${HDFJAVA_EXAMPLES_BINARY_DIR}"
-            -D "TEST_OUTPUT=intro/${example}.out"
-            -D "TEST_EXPECT=0"
-            -D "TEST_REFERENCE=intro/${example}.txt"
-            -P "${HDF_RESOURCES_DIR}/jrunTest.cmake"
-    )
-    set_tests_properties (JAVA_intro-${example} PROPERTIES
-        DEPENDS JAVA_intro-${example}-copy-objects
-    )
-
-    add_test (
-        NAME JAVA_intro-${example}-clean-objects
-        COMMAND ${CMAKE_COMMAND} -E remove
-            ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5
-    )
-    set_tests_properties (JAVA_intro-${example}-clean-objects PROPERTIES
-        DEPENDS JAVA_intro-${example}
-    )
-    set (last_test "JAVA_intro-${example}-clean-objects")
-  endforeach ()
-
-endif ()
diff --git a/java/examples/intro/H5_CreateAttribute.java b/java/examples/intro/H5_CreateAttribute.java
deleted file mode 100644
index 22ecdaebd96..00000000000
--- a/java/examples/intro/H5_CreateAttribute.java
+++ /dev/null
@@ -1,142 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-    Creating a dataset attribute.
- ************************************************************/
-
-package examples.intro;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5_CreateAttribute {
-    private static String FILENAME         = "H5_CreateAttribute.h5";
-    private static String DATASETNAME      = "dset";
-    private static final int DIM_X         = 4;
-    private static final int DIM_Y         = 6;
-    private static String DATASETATTRIBUTE = "Units";
-
-    private static void CreateDatasetAttribute()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long attribute_id = HDF5Constants.H5I_INVALID_HID;
-        long[] dims1      = {DIM_X, DIM_Y};
-        long[] dims       = {2};
-        int[] attr_data   = {100, 200};
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the data space for the dataset.
-        try {
-            dataspace_id = H5.H5Screate_simple(2, dims1, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset.
-        try {
-            if ((file_id >= 0) && (dataspace_id >= 0))
-                dataset_id = H5.H5Dcreate(file_id, "/" + DATASETNAME, HDF5Constants.H5T_STD_I32BE,
-                                          dataspace_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                          HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the data space for the attribute.
-        try {
-            dataspace_id = H5.H5Screate_simple(1, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create a dataset attribute.
-        try {
-            if ((dataset_id >= 0) && (dataspace_id >= 0))
-                attribute_id =
-                    H5.H5Acreate(dataset_id, DATASETATTRIBUTE, HDF5Constants.H5T_STD_I32BE, dataspace_id,
-                                 HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the attribute data.
-        try {
-            if (attribute_id >= 0)
-                H5.H5Awrite(attribute_id, HDF5Constants.H5T_NATIVE_INT, attr_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the attribute.
-        try {
-            if (attribute_id >= 0)
-                H5.H5Aclose(attribute_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the dataspace.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close to the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args) { H5_CreateAttribute.CreateDatasetAttribute(); }
-}
diff --git a/java/examples/intro/H5_CreateDataset.java b/java/examples/intro/H5_CreateDataset.java
deleted file mode 100644
index 20683e8f20b..00000000000
--- a/java/examples/intro/H5_CreateDataset.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-    Creating and closing a dataset.
- ************************************************************/
-
-package examples.intro;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5_CreateDataset {
-    private static String FILENAME    = "H5_CreateDataset.h5";
-    private static String DATASETNAME = "dset";
-    private static final int DIM_X    = 4;
-    private static final int DIM_Y    = 6;
-
-    private static void CreateDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the data space for the dataset.
-        try {
-            dataspace_id = H5.H5Screate_simple(2, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset.
-        try {
-            if ((file_id >= 0) && (dataspace_id >= 0))
-                dataset_id = H5.H5Dcreate(file_id, "/" + DATASETNAME, HDF5Constants.H5T_STD_I32BE,
-                                          dataspace_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                          HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // End access to the dataset and release resources used by it.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Terminate access to the data space.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args) { H5_CreateDataset.CreateDataset(); }
-}
diff --git a/java/examples/intro/H5_CreateFile.java b/java/examples/intro/H5_CreateFile.java
deleted file mode 100644
index 4323581f70f..00000000000
--- a/java/examples/intro/H5_CreateFile.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-    Creating and closing a file.
- ************************************************************/
-
-package examples.intro;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5_CreateFile {
-    static final String FILENAME = "H5_CreateFile.h5";
-
-    private static void CreateFile()
-    {
-        long file_id = HDF5Constants.H5I_INVALID_HID;
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args) { H5_CreateFile.CreateFile(); }
-}
diff --git a/java/examples/intro/H5_CreateGroup.java b/java/examples/intro/H5_CreateGroup.java
deleted file mode 100644
index a276cbdf913..00000000000
--- a/java/examples/intro/H5_CreateGroup.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-    Creating and closing a group.
- ************************************************************/
-
-package examples.intro;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5_CreateGroup {
-    private static String FILENAME  = "H5_CreateGroup.h5";
-    private static String GROUPNAME = "MyGroup";
-
-    private static void CreateGroup()
-    {
-        long file_id  = HDF5Constants.H5I_INVALID_HID;
-        long group_id = HDF5Constants.H5I_INVALID_HID;
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create a group in the file.
-        try {
-            if (file_id >= 0)
-                group_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT,
-                                        HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the group.
-        try {
-            if (group_id >= 0)
-                H5.H5Gclose(group_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args) { H5_CreateGroup.CreateGroup(); }
-}
diff --git a/java/examples/intro/H5_CreateGroupAbsoluteRelative.java b/java/examples/intro/H5_CreateGroupAbsoluteRelative.java
deleted file mode 100644
index 2fb9c3c9f99..00000000000
--- a/java/examples/intro/H5_CreateGroupAbsoluteRelative.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-   Creating groups using absolute and relative names.
- ************************************************************/
-
-package examples.intro;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5_CreateGroupAbsoluteRelative {
-    private static String FILENAME    = "H5_CreateGroupAbsoluteRelative.h5";
-    private static String GROUPNAME   = "MyGroup";
-    private static String GROUPNAME_A = "GroupA";
-    private static String GROUPNAME_B = "GroupB";
-
-    private static void CreateGroupAbsoluteAndRelative()
-    {
-        long file_id   = HDF5Constants.H5I_INVALID_HID;
-        long group1_id = HDF5Constants.H5I_INVALID_HID;
-        long group2_id = HDF5Constants.H5I_INVALID_HID;
-        long group3_id = HDF5Constants.H5I_INVALID_HID;
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create a group named "/MyGroup" in the file.
-        try {
-            if (file_id >= 0)
-                group1_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT,
-                                         HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create group "Group_A" in group "MyGroup" using absolute name.
-        try {
-            if (file_id >= 0)
-                group2_id =
-                    H5.H5Gcreate(file_id, "/" + GROUPNAME + "/" + GROUPNAME_A, HDF5Constants.H5P_DEFAULT,
-                                 HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create group "Group_B" in group "MyGroup" using relative name.
-        try {
-            if (group1_id >= 0)
-                group3_id = H5.H5Gcreate(group1_id, GROUPNAME_B, HDF5Constants.H5P_DEFAULT,
-                                         HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the group3.
-        try {
-            if (group3_id >= 0)
-                H5.H5Gclose(group3_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the group2.
-        try {
-            if (group2_id >= 0)
-                H5.H5Gclose(group2_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the group1.
-        try {
-            if (group1_id >= 0)
-                H5.H5Gclose(group1_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args)
-    {
-        H5_CreateGroupAbsoluteRelative.CreateGroupAbsoluteAndRelative();
-    }
-}
diff --git a/java/examples/intro/H5_CreateGroupDataset.java b/java/examples/intro/H5_CreateGroupDataset.java
deleted file mode 100644
index 1d7cf99f566..00000000000
--- a/java/examples/intro/H5_CreateGroupDataset.java
+++ /dev/null
@@ -1,206 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-    Create two datasets within groups.
- ************************************************************/
-
-package examples.intro;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5_CreateGroupDataset {
-    private static String FILENAME     = "H5_CreateGroupDataset.h5";
-    private static String GROUPNAME    = "MyGroup";
-    private static String GROUPNAME_A  = "GroupA";
-    private static String DATASETNAME1 = "dset1";
-    private static String DATASETNAME2 = "dset2";
-    private static final int DIM1_X    = 3;
-    private static final int DIM1_Y    = 3;
-    private static final int DIM2_X    = 2;
-    private static final int DIM2_Y    = 10;
-
-    private static void h5_crtgrpd()
-    {
-        long file_id       = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id  = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id    = HDF5Constants.H5I_INVALID_HID;
-        long group_id      = HDF5Constants.H5I_INVALID_HID;
-        long group1_id     = HDF5Constants.H5I_INVALID_HID;
-        long group2_id     = HDF5Constants.H5I_INVALID_HID;
-        int[][] dset1_data = new int[DIM1_X][DIM1_Y];
-        int[][] dset2_data = new int[DIM2_X][DIM2_Y];
-        long[] dims1       = {DIM1_X, DIM1_Y};
-        long[] dims2       = {DIM2_X, DIM2_Y};
-
-        // Initialize the first dataset.
-        for (int indx = 0; indx < DIM1_X; indx++)
-            for (int jndx = 0; jndx < DIM1_Y; jndx++)
-                dset1_data[indx][jndx] = jndx + 1;
-
-        // Initialize the second dataset.
-        for (int indx = 0; indx < DIM2_X; indx++)
-            for (int jndx = 0; jndx < DIM2_Y; jndx++)
-                dset2_data[indx][jndx] = jndx + 1;
-
-        // Create a file.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-            // Create a group named "/MyGroup" in the file.
-            if (file_id >= 0) {
-                group1_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT,
-                                         HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-                // Create group "Group_A" in group "MyGroup" using absolute name.
-                if (group1_id >= 0) {
-                    group2_id =
-                        H5.H5Gcreate(file_id, "/" + GROUPNAME + "/" + GROUPNAME_A, HDF5Constants.H5P_DEFAULT,
-                                     HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-                    if (group2_id >= 0)
-                        H5.H5Gclose(group2_id);
-                }
-                if (group1_id >= 0)
-                    H5.H5Gclose(group1_id);
-            }
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the data space for the first dataset.
-        try {
-            dataspace_id = H5.H5Screate_simple(2, dims1, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset in group "MyGroup".
-        try {
-            if ((file_id >= 0) && (dataspace_id >= 0))
-                dataset_id = H5.H5Dcreate(
-                    file_id, "/" + GROUPNAME + "/" + DATASETNAME1, HDF5Constants.H5T_STD_I32BE, dataspace_id,
-                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the first dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset1_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the data space for the first dataset.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-            dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the first dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-            dataset_id = HDF5Constants.H5I_INVALID_HID;
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Open an existing group of the specified file.
-        try {
-            if (file_id >= 0)
-                group_id =
-                    H5.H5Gopen(file_id, "/" + GROUPNAME + "/" + GROUPNAME_A, HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the data space for the second dataset.
-        try {
-            dataspace_id = H5.H5Screate_simple(2, dims2, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the second dataset in group "Group_A".
-        try {
-            if ((group_id >= 0) && (dataspace_id >= 0))
-                dataset_id = H5.H5Dcreate(group_id, DATASETNAME2, HDF5Constants.H5T_STD_I32BE, dataspace_id,
-                                          HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                          HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the second dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset2_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the data space for the second dataset.
-        try {
-            if (dataspace_id >= 0)
-                H5.H5Sclose(dataspace_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the second dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the group.
-        try {
-            if (group_id >= 0)
-                H5.H5Gclose(group_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args) { H5_CreateGroupDataset.h5_crtgrpd(); }
-}
diff --git a/java/examples/intro/H5_ReadWrite.java b/java/examples/intro/H5_ReadWrite.java
deleted file mode 100644
index 379fe20b8d7..00000000000
--- a/java/examples/intro/H5_ReadWrite.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the COPYING file, which can be found at the root of the source code       *
- * distribution tree, or in https://www.hdfgroup.org/licenses.               *
- * If you do not have access to either file, you may request a copy from     *
- * help@hdfgroup.org.                                                        *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-   Writing and reading an existing dataset.
- ************************************************************/
-
-package examples.intro;
-
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
-public class H5_ReadWrite {
-    private static String FILENAME    = "H5_ReadWrite.h5";
-    private static String DATASETNAME = "dset";
-    private static final int DIM_X    = 4;
-    private static final int DIM_Y    = 6;
-
-    private static void ReadWriteDataset()
-    {
-        long file_id      = HDF5Constants.H5I_INVALID_HID;
-        long dataspace_id = HDF5Constants.H5I_INVALID_HID;
-        long dataset_id   = HDF5Constants.H5I_INVALID_HID;
-        long[] dims       = {DIM_X, DIM_Y};
-        int[][] dset_data = new int[DIM_X][DIM_Y];
-
-        // Initialize the dataset.
-        for (int indx = 0; indx < DIM_X; indx++)
-            for (int jndx = 0; jndx < DIM_Y; jndx++)
-                dset_data[indx][jndx] = indx * 6 + jndx + 1;
-
-        // Create a new file using default properties.
-        try {
-            file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
-                                   HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the data space for the dataset.
-        try {
-            dataspace_id = H5.H5Screate_simple(2, dims, null);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Create the dataset.
-        try {
-            if ((file_id >= 0) && (dataspace_id >= 0))
-                dataset_id = H5.H5Dcreate(file_id, "/" + DATASETNAME, HDF5Constants.H5T_STD_I32BE,
-                                          dataspace_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
-                                          HDF5Constants.H5P_DEFAULT);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Write the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                            HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
-                           HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the dataset.
-        try {
-            if (dataset_id >= 0)
-                H5.H5Dclose(dataset_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        // Close the file.
-        try {
-            if (file_id >= 0)
-                H5.H5Fclose(file_id);
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public static void main(String[] args) { H5_ReadWrite.ReadWriteDataset(); }
-}
diff --git a/java/examples/intro/JavaIntroExample.sh.in b/java/examples/intro/JavaIntroExample.sh.in
deleted file mode 100644
index 56b6e572cd2..00000000000
--- a/java/examples/intro/JavaIntroExample.sh.in
+++ /dev/null
@@ -1,330 +0,0 @@
-#! /bin/sh
-#
-# Copyright by The HDF Group.
-# All rights reserved.
-#
-# This file is part of HDF5.  The full HDF5 copyright notice, including
-# terms governing use, modification, and redistribution, is contained in
-# the COPYING file, which can be found at the root of the source code
-# distribution tree, or in https://www.hdfgroup.org/licenses.
-# If you do not have access to either file, you may request a copy from
-# help@hdfgroup.org.
-#
-
-top_builddir=@top_builddir@
-top_srcdir=@top_srcdir@
-srcdir=@srcdir@
-IS_DARWIN="@H5_IS_DARWIN@"
-
-TESTNAME=EX_Intro
-EXIT_SUCCESS=0
-EXIT_FAILURE=1
-
-# Set up default variable values if not supplied by the user.
-RM='rm -rf'
-CMP='cmp'
-DIFF='diff -c'
-CP='cp'
-DIRNAME='dirname'
-BASENAME='basename'
-LS='ls'
-AWK='awk'
-
-nerrors=0
-
-# where the libs exist
-HDFLIB_HOME="$top_srcdir/java/lib"
-BLDDIR="."
-BLDLIBDIR="$BLDDIR/testlibs"
-HDFTEST_HOME="$top_srcdir/java/examples/intro"
-JARFILE=jar@PACKAGE_TARNAME@-@PACKAGE_VERSION@.jar
-TESTJARFILE=jar@PACKAGE_TARNAME@intro.jar
-test -d $BLDLIBDIR || mkdir -p $BLDLIBDIR
-
-######################################################################
-# library files
-# --------------------------------------------------------------------
-# All the library files copy from source directory to test directory
-# NOTE: Keep this framework to add/remove test files.
-#       This list are also used for checking exist.
-#       Comment '#' without space can be used.
-# --------------------------------------------------------------------
-LIST_LIBRARY_FILES="
-$top_builddir/src/.libs/libhdf5.*
-$top_builddir/java/src/jni/.libs/libhdf5_java.*
-$top_builddir/java/src/$JARFILE
-"
-LIST_JAR_TESTFILES="
-$HDFLIB_HOME/slf4j-api-2.0.6.jar
-$HDFLIB_HOME/ext/slf4j-simple-2.0.6.jar
-"
-LIST_DATA_FILES="
-$HDFTEST_HOME/../testfiles/examples.intro.H5_CreateDataset.txt
-$HDFTEST_HOME/../testfiles/examples.intro.H5_CreateAttribute.txt
-$HDFTEST_HOME/../testfiles/examples.intro.H5_CreateFile.txt
-$HDFTEST_HOME/../testfiles/examples.intro.H5_CreateGroup.txt
-$HDFTEST_HOME/../testfiles/examples.intro.H5_CreateGroupAbsoluteRelative.txt
-$HDFTEST_HOME/../testfiles/examples.intro.H5_CreateGroupDataset.txt
-$HDFTEST_HOME/../testfiles/examples.intro.H5_ReadWrite.txt
-"
-
-#
-# copy files from source dirs to test dir
-#
-COPY_LIBFILES="$LIST_LIBRARY_FILES"
-COPY_JARTESTFILES="$LIST_JAR_TESTFILES"
-
-COPY_LIBFILES_TO_BLDLIBDIR()
-{
-    # copy test files. Used -f to make sure get a new copy
-    for tstfile in $COPY_LIBFILES
-    do
-        # ignore '#' comment
-        echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
-        RET=$?
-        if [ $RET -eq 1 ]; then
-            # skip cp if srcdir is same as destdir
-            # this occurs when build/test performed in source dir and
-            # make cp fail
-            SDIR=`$DIRNAME $tstfile`
-            INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
-            INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
-            if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
-                $CP -fR $tstfile $BLDLIBDIR
-                if [ $? -ne 0 ]; then
-                    echo "Error: FAILED to copy $tstfile ."
-
-                    # Comment out this to CREATE expected file
-                    exit $EXIT_FAILURE
-                fi
-                BNAME=`$BASENAME $tstfile`
-                if [ "$BNAME" = "libhdf5_java.dylib" ]; then
-                    COPIED_LIBHDF5_JAVA=1
-                fi
-            fi
-        fi
-    done
-    if [[ "$IS_DARWIN" = "yes" ]] && [[ $COPIED_LIBHDF5_JAVA -eq 1 ]]; then
-       (cd $BLDLIBDIR; \
-         install_name_tool -add_rpath @loader_path libhdf5_java.dylib; \
-         exist_path=` otool -l libhdf5_java.dylib | grep libhdf5 | grep -v java | awk '{print $2}'`; \
-         echo $exist_path; \
-         install_name_tool -change $exist_path @rpath/libhdf5.dylib libhdf5_java.dylib)
-    fi
-    # copy jar files. Used -f to make sure get a new copy
-    for tstfile in $COPY_JARTESTFILES
-    do
-        # ignore '#' comment
-        echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
-        RET=$?
-        if [ $RET -eq 1 ]; then
-            # skip cp if srcdir is same as destdir
-            # this occurs when build/test performed in source dir and
-            # make cp fail
-            SDIR=`$DIRNAME $tstfile`
-            INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
-            INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
-            if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
-                $CP -fR $tstfile $BLDLIBDIR
-                if [ $? -ne 0 ]; then
-                    echo "Error: FAILED to copy $tstfile ."
-
-                    # Comment out this to CREATE expected file
-                    exit $EXIT_FAILURE
-                fi
-            fi
-        fi
-    done
-}
-
-CLEAN_LIBFILES_AND_BLDLIBDIR()
-{
-    # skip rm if srcdir is same as destdir
-    # this occurs when build/test performed in source dir and
-    # make cp fail
-    SDIR=$HDFLIB_HOME
-    INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
-    INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
-    if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
-        $RM -rf $BLDLIBDIR
-    fi
-}
-
-COPY_DATAFILES="$LIST_DATA_FILES"
-
-COPY_DATAFILES_TO_BLDDIR()
-{
-    # copy test files. Used -f to make sure get a new copy
-    for tstfile in $COPY_DATAFILES
-    do
-        # ignore '#' comment
-        echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
-        RET=$?
-        if [ $RET -eq 1 ]; then
-            # skip cp if srcdir is same as destdir
-            # this occurs when build/test performed in source dir and
-            # make cp fail
-            SDIR=`$DIRNAME $tstfile`
-            INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
-            INODE_DDIR=`$LS -i -d $BLDDIR | $AWK -F' ' '{print $1}'`
-            if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
-                $CP -f $tstfile $BLDDIR
-                if [ $? -ne 0 ]; then
-                    echo "Error: FAILED to copy $tstfile ."
-
-                    # Comment out this to CREATE expected file
-                    exit $EXIT_FAILURE
-                fi
-            fi
-        fi
-    done
-}
-
-CLEAN_DATAFILES_AND_BLDDIR()
-{
-        $RM $BLDDIR/examples.intro.H5_*.txt
-        $RM $BLDDIR/H5_*.out
-        $RM $BLDDIR/H5_*.h5
-}
-
-# Print a line-line message left justified in a field of 70 characters
-# beginning with the word "Testing".
-#
-TESTING() {
-   SPACES="                                                               "
-   echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012'
-}
-
-# where Java is installed (requires jdk1.7.x)
-JAVAEXE=@JAVA@
-JAVAEXEFLAGS=@H5_JAVAFLAGS@
-
-###############################################################################
-#            DO NOT MODIFY BELOW THIS LINE
-###############################################################################
-
-# prepare for test
-COPY_LIBFILES_TO_BLDLIBDIR
-COPY_DATAFILES_TO_BLDDIR
-
-CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/slf4j-api-2.0.6.jar:"$BLDLIBDIR"/slf4j-simple-2.0.6.jar:"$TESTJARFILE""
-
-TEST=/usr/bin/test
-if [ ! -x /usr/bin/test ]
-then
-TEST=`which test`
-fi
-
-if $TEST -z "$CLASSPATH"; then
-        CLASSPATH=""
-fi
-CLASSPATH=$CPATH":"$CLASSPATH
-export CLASSPATH
-
-if $TEST -n "$JAVAPATH" ; then
-        PATH=$JAVAPATH":"$PATH
-        export PATH
-fi
-
-if $TEST -e /bin/uname; then
-   os_name=`/bin/uname -s`
-elif $TEST -e /usr/bin/uname; then
-   os_name=`/usr/bin/uname -s`
-else
-   os_name=unknown
-fi
-
-if $TEST -z "$LD_LIBRARY_PATH" ; then
-        LD_LIBRARY_PATH=""
-fi
-
-case  $os_name in
-    *)
-    LD_LIBRARY_PATH=$BLDLIBDIR:$LD_LIBRARY_PATH
-    ;;
-esac
-
-export LD_LIBRARY_PATH
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateDataset"
-TESTING examples.intro.H5_CreateDataset
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateDataset > H5_CreateDataset.out)
-if diff H5_CreateDataset.out examples.intro.H5_CreateDataset.txt > /dev/null; then
-    echo "  PASSED      intro.H5_CreateDataset"
-else
-    echo "**FAILED**    intro.H5_CreateDataset"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateAttribute"
-TESTING examples.intro.H5_CreateAttribute
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateAttribute > H5_CreateAttribute.out)
-if diff H5_CreateAttribute.out examples.intro.H5_CreateAttribute.txt > /dev/null; then
-    echo "  PASSED      intro.H5_CreateAttribute"
-else
-    echo "**FAILED**    intro.H5_CreateAttribute"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateFile"
-TESTING examples.intro.H5_CreateFile
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateFile > H5_CreateFile.out)
-if diff H5_CreateFile.out examples.intro.H5_CreateFile.txt > /dev/null; then
-    echo "  PASSED      intro.H5_CreateFile"
-else
-    echo "**FAILED**    intro.H5_CreateFile"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateGroup"
-TESTING examples.intro.H5_CreateGroup
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateGroup > H5_CreateGroup.out)
-if diff H5_CreateGroup.out examples.intro.H5_CreateGroup.txt > /dev/null; then
-    echo "  PASSED      intro.H5_CreateGroup"
-else
-    echo "**FAILED**    intro.H5_CreateGroup"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateGroupAbsoluteRelative"
-TESTING examples.intro.H5_CreateGroupAbsoluteRelative
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateGroupAbsoluteRelative > H5_CreateGroupAbsoluteRelative.out)
-if diff H5_CreateGroupAbsoluteRelative.out examples.intro.H5_CreateGroupAbsoluteRelative.txt > /dev/null; then
-    echo "  PASSED      intro.H5_CreateGroupAbsoluteRelative"
-else
-    echo "**FAILED**    intro.H5_CreateGroupAbsoluteRelative"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateGroupDataset"
-TESTING examples.intro.H5_CreateGroupDataset
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateGroupDataset > H5_CreateGroupDataset.out)
-if diff H5_CreateGroupDataset.out examples.intro.H5_CreateGroupDataset.txt > /dev/null; then
-    echo "  PASSED      intro.H5_CreateGroupDataset"
-else
-    echo "**FAILED**    intro.H5_CreateGroupDataset"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_ReadWrite"
-TESTING examples.intro.H5_ReadWrite
-($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_ReadWrite > H5_ReadWrite.out)
-if diff H5_ReadWrite.out examples.intro.H5_ReadWrite.txt > /dev/null; then
-    echo "  PASSED      intro.H5_ReadWrite"
-else
-    echo "**FAILED**    intro.H5_ReadWrite"
-    nerrors="`expr $nerrors + 1`"
-fi
-
-# Clean up temporary files/directories
-CLEAN_LIBFILES_AND_BLDLIBDIR
-CLEAN_DATAFILES_AND_BLDDIR
-
-# Report test results and exit
-if test $nerrors -eq 0 ; then
-    echo "All $TESTNAME tests passed."
-    exit $EXIT_SUCCESS
-else
-    echo "$TESTNAME tests failed with $nerrors errors."
-    exit $EXIT_FAILURE
-fi
diff --git a/java/examples/intro/Makefile.am b/java/examples/intro/Makefile.am
deleted file mode 100644
index b6d6a1f3082..00000000000
--- a/java/examples/intro/Makefile.am
+++ /dev/null
@@ -1,64 +0,0 @@
-#
-# Copyright by The HDF Group.
-# All rights reserved.
-#
-# This file is part of HDF5.  The full HDF5 copyright notice, including
-# terms governing use, modification, and redistribution, is contained in
-# the COPYING file, which can be found at the root of the source code
-# distribution tree, or in https://www.hdfgroup.org/licenses.
-# If you do not have access to either file, you may request a copy from
-# help@hdfgroup.org.
-##
-## Makefile.am
-## Run automake to generate a Makefile.in from this file.
-##
-#
-# HDF5 Java Library Examples Makefile(.in)
-
-include $(top_srcdir)/config/commence.am
-
-# Mark this directory as part of the JNI API
-JAVA_API=yes
-
-JAVAROOT = .classes
-
-classes:
-	test -d $(@D)/$(JAVAROOT) || $(MKDIR_P) $(@D)/$(JAVAROOT)
-
-pkgpath = examples/intro
-hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar
-CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-2.0.6.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-2.0.6.jar:$$CLASSPATH
-
-jarfile = jar$(PACKAGE_TARNAME)intro.jar
-
-AM_JAVACFLAGS = $(H5_JAVACFLAGS) -deprecation
-
-TESTPACKAGE =
-
-noinst_JAVA = \
-    H5_CreateAttribute.java \
-    H5_CreateDataset.java \
-    H5_CreateFile.java \
-    H5_CreateGroup.java \
-    H5_CreateGroupAbsoluteRelative.java \
-    H5_CreateGroupDataset.java \
-    H5_ReadWrite.java
-
-$(jarfile): classnoinst.stamp classes
-	$(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath)
-
-noinst_DATA = $(jarfile)
-
-.PHONY: classes
-
-check_SCRIPTS = JavaIntroExample.sh
-TEST_SCRIPT = $(check_SCRIPTS)
-
-CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class JavaIntroExample.sh
-
-clean:
-	rm -rf $(JAVAROOT)/*
-	rm -f $(jarfile)
-	rm -f classnoinst.stamp
-
-include $(top_srcdir)/config/conclude.am
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Alloc.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Alloc.txt
deleted file mode 100644
index 6fd810be378..00000000000
--- a/java/examples/testfiles/examples.datasets.H5Ex_D_Alloc.txt
+++ /dev/null
@@ -1,16 +0,0 @@
-Creating datasets...
-DS1 has allocation time H5D_ALLOC_TIME_LATE
-DS2 has allocation time H5D_ALLOC_TIME_EARLY
-
-Space for DS1 has not been allocated.
-Storage size for DS1 is: 0 bytes.
-Space for DS2 has been allocated.
-Storage size for DS2 is: 112 bytes.
-
-Writing data...
-
-Space for DS1 has been allocated.
-Storage size for DS1 is: 112 bytes.
-Space for DS2 has been allocated.
-Storage size for DS2 is: 112 bytes.
-
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Checksum.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Checksum.txt
deleted file mode 100644
index 676aebbe7d0..00000000000
--- a/java/examples/testfiles/examples.datasets.H5Ex_D_Checksum.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Filter type is: H5Z_FILTER_FLETCHER32
-
-Maximum value in DS1 is: 1890
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Chunk.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Chunk.txt
deleted file mode 100644
index 5f4c2deb7be..00000000000
--- a/java/examples/testfiles/examples.datasets.H5Ex_D_Chunk.txt
+++ /dev/null
@@ -1,26 +0,0 @@
-Original Data:
- [ 1 1 1 1 1 1 1 1 ]
- [ 1 1 1 1 1 1 1 1 ]
- [ 1 1 1 1 1 1 1 1 ]
- [ 1 1 1 1 1 1 1 1 ]
- [ 1 1 1 1 1 1 1 1 ]
- [ 1 1 1 1 1 1 1 1 ]
-
-Storage layout for DS1 is: H5D_CHUNKED
-
-Data as written to disk by hyberslabs:
- [ 0 1 0 0 1 0 0 1 ]
- [ 1 1 0 1 1 0 1 1 ]
- [ 0 0 0 0 0 0 0 0 ]
- [ 0 1 0 0 1 0 0 1 ]
- [ 1 1 0 1 1 0 1 1 ]
- [ 0 0 0 0 0 0 0 0 ]
-
-Data as read from disk by hyberslab:
- [ 0 1 0 0 0 0 0 1 ]
- [ 0 1 0 1 0 0 1 1 ]
- [ 0 0 0 0 0 0 0 0 ]
- [ 0 0 0 0 0 0 0 0 ]
- [ 0 1 0 1 0 0 1 1 ]
- [ 0 0 0 0 0 0 0 0 ]
-
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Compact.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Compact.txt
deleted file mode 100644
index e34f3c17fee..00000000000
--- a/java/examples/testfiles/examples.datasets.H5Ex_D_Compact.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-Storage layout for DS1 is: H5D_COMPACT
-
-Data for DS1 is: 
- [ 0 -1 -2 -3 -4 -5 -6 ]
- [ 0 0 0 0 0 0 0 ]
- [ 0 1 2 3 4 5 6 ]
- [ 0 2 4 6 8 10 12 ]
-
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_External.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_External.txt
deleted file mode 100644
index 5878149ef40..00000000000
--- a/java/examples/testfiles/examples.datasets.H5Ex_D_External.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-DS1 is stored in file: H5Ex_D_External.data
-DS1:
- [ 0 -1 -2 -3 -4 -5 -6 ]
- [ 0 0 0 0 0 0 0 ]
- [ 0 1 2 3 4 5 6 ]
- [ 0 2 4 6 8 10 12 ]
-
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_FillValue.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_FillValue.txt
deleted file mode 100644
index 68d826bb3f7..00000000000
--- a/java/examples/testfiles/examples.datasets.H5Ex_D_FillValue.txt
+++ /dev/null
@@ -1,20 +0,0 @@
-Dataset before being written to:
- [ 99 99 99 99 99 99 99 ]
- [ 99 99 99 99 99 99 99 ]
- [ 99 99 99 99 99 99 99 ]
- [ 99 99 99 99 99 99 99 ]
-
-Dataset after being written to:
- [ 0 -1 -2 -3 -4 -5 -6 ]
- [ 0 0 0 0 0 0 0 ]
- [ 0 1 2 3 4 5 6 ]
- [ 0 2 4 6 8 10 12 ]
-
-Dataset after extension:
- [ 0 -1 -2 -3 -4 -5 -6 99 99 99 ]
- [ 0 0 0 0 0 0 0 99 99 99 ]
- [ 0 1 2 3 4 5 6 99 99 99 ]
- [ 0 2 4 6 8 10 12 99 99 99 ]
- [ 99 99 99 99 99 99 99 99 99 99 ]
- [ 99 99 99 99 99 99 99 99 99 99 ]
-
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Gzip.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Gzip.txt
deleted file mode 100644
index 255a5615db8..00000000000
--- a/java/examples/testfiles/examples.datasets.H5Ex_D_Gzip.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Filter type is: H5Z_FILTER_DEFLATE
-
-Maximum value in DS1 is: 1890
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Hyperslab.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Hyperslab.txt
deleted file mode 100644
index 823dfcc5a8d..00000000000
--- a/java/examples/testfiles/examples.datasets.H5Ex_D_Hyperslab.txt
+++ /dev/null
@@ -1,24 +0,0 @@
-Original Data:
- [ 1 1 1 1 1 1 1 1 ]
- [ 1 1 1 1 1 1 1 1 ]
- [ 1 1 1 1 1 1 1 1 ]
- [ 1 1 1 1 1 1 1 1 ]
- [ 1 1 1 1 1 1 1 1 ]
- [ 1 1 1 1 1 1 1 1 ]
-
-Data as written to disk by hyberslabs:
- [ 0 1 0 0 1 0 0 1 ]
- [ 1 1 0 1 1 0 1 1 ]
- [ 0 0 0 0 0 0 0 0 ]
- [ 0 1 0 0 1 0 0 1 ]
- [ 1 1 0 1 1 0 1 1 ]
- [ 0 0 0 0 0 0 0 0 ]
-
-Data as read from disk by hyberslab:
- [ 0 1 0 0 0 0 0 1 ]
- [ 0 1 0 1 0 0 1 1 ]
- [ 0 0 0 0 0 0 0 0 ]
- [ 0 0 0 0 0 0 0 0 ]
- [ 0 1 0 1 0 0 1 1 ]
- [ 0 0 0 0 0 0 0 0 ]
-
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Nbit.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Nbit.txt
deleted file mode 100644
index a768ba07860..00000000000
--- a/java/examples/testfiles/examples.datasets.H5Ex_D_Nbit.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Filter type is: H5Z_FILTER_NBIT
-
-Maximum value in DS1 is: 1890
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_ReadWrite.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_ReadWrite.txt
deleted file mode 100644
index e021029e981..00000000000
--- a/java/examples/testfiles/examples.datasets.H5Ex_D_ReadWrite.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-DS1:
- [ 0 -1 -2 -3 -4 -5 -6 ]
- [ 0 0 0 0 0 0 0 ]
- [ 0 1 2 3 4 5 6 ]
- [ 0 2 4 6 8 10 12 ]
-
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Shuffle.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Shuffle.txt
deleted file mode 100644
index ea95f110191..00000000000
--- a/java/examples/testfiles/examples.datasets.H5Ex_D_Shuffle.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-Filter 0: Type is: H5Z_FILTER_SHUFFLE
-
-Filter 1: Type is: H5Z_FILTER_DEFLATE
-
-Maximum value in DS1 is: 1890
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Sofloat.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Sofloat.txt
deleted file mode 100644
index 9025ce292d7..00000000000
--- a/java/examples/testfiles/examples.datasets.H5Ex_D_Sofloat.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-Maximum value in write buffer is: 106.666667
-Minimum value in write buffer is: 1.769231
-Filter type is: H5Z_FILTER_SCALEOFFSET
-
-Maximum value in DS1 is: 106.661698
-Minimum value in DS1 is: 1.769231
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Soint.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Soint.txt
deleted file mode 100644
index 48d0d8c52c9..00000000000
--- a/java/examples/testfiles/examples.datasets.H5Ex_D_Soint.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Filter type is: H5Z_FILTER_SCALEOFFSET
-
-Maximum value in DS1 is: 1890
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Szip.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Szip.txt
deleted file mode 100644
index a1c0d19cc24..00000000000
--- a/java/examples/testfiles/examples.datasets.H5Ex_D_Szip.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Filter type is: H5Z_FILTER_SZIP
-
-Maximum value in DS1 is: 1890
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Transform.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Transform.txt
deleted file mode 100644
index 05257bc902a..00000000000
--- a/java/examples/testfiles/examples.datasets.H5Ex_D_Transform.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-Original Data:
- [ 0  -1  -2  -3  -4  -5  -6 ]
- [ 0  0  0  0  0  0  0 ]
- [ 0  1  2  3  4  5  6 ]
- [ 0  2  4  6  8  10  12 ]
-Data as written with transform 'x+1'
- [ 1  0  -1  -2  -3  -4  -5 ]
- [ 1  1  1  1  1  1  1 ]
- [ 1  2  3  4  5  6  7 ]
- [ 1  3  5  7  9  11  13 ]
-Data as written with transform  'x+1' and read with transform  'x-1'
- [ 0  -1  -2  -3  -4  -5  -6 ]
- [ 0  0  0  0  0  0  0 ]
- [ 0  1  2  3  4  5  6 ]
- [ 0  2  4  6  8  10  12 ]
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedAdd.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedAdd.txt
deleted file mode 100644
index d3a7281f005..00000000000
--- a/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedAdd.txt
+++ /dev/null
@@ -1,14 +0,0 @@
-Dataset before extension:
- [ 0 -1 -2 -3 -4 -5 -6 ]
- [ 0 0 0 0 0 0 0 ]
- [ 0 1 2 3 4 5 6 ]
- [ 0 2 4 6 8 10 12 ]
-
-Dataset after extension:
- [ 0 -1 -2 -3 -4 -5 -6 7 8 9 ]
- [ 0 0 0 0 0 0 0 7 8 9 ]
- [ 0 1 2 3 4 5 6 7 8 9 ]
- [ 0 2 4 6 8 10 12 7 8 9 ]
- [ 0 1 2 3 4 5 6 7 8 9 ]
- [ 0 1 2 3 4 5 6 7 8 9 ]
-
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedGzip.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedGzip.txt
deleted file mode 100644
index 9e362813d97..00000000000
--- a/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedGzip.txt
+++ /dev/null
@@ -1,16 +0,0 @@
-Dataset before extension:
- [ 0 -1 -2 -3 -4 -5 -6 ]
- [ 0 0 0 0 0 0 0 ]
- [ 0 1 2 3 4 5 6 ]
- [ 0 2 4 6 8 10 12 ]
-
-Filter type is: H5Z_FILTER_DEFLATE
-
-Dataset after extension:
- [ 0 -1 -2 -3 -4 -5 -6 7 8 9 ]
- [ 0 0 0 0 0 0 0 7 8 9 ]
- [ 0 1 2 3 4 5 6 7 8 9 ]
- [ 0 2 4 6 8 10 12 7 8 9 ]
- [ 0 1 2 3 4 5 6 7 8 9 ]
- [ 0 1 2 3 4 5 6 7 8 9 ]
-
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedMod.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedMod.txt
deleted file mode 100644
index 15eee16d7da..00000000000
--- a/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedMod.txt
+++ /dev/null
@@ -1,14 +0,0 @@
-Dataset before extension:
- [ 0 -1 -2 -3 -4 -5 -6 ]
- [ 0 0 0 0 0 0 0 ]
- [ 0 1 2 3 4 5 6 ]
- [ 0 2 4 6 8 10 12 ]
-
-Dataset after extension:
- [ 0 1 2 3 4 5 6 7 8 9 ]
- [ 0 1 2 3 4 5 6 7 8 9 ]
- [ 0 1 2 3 4 5 6 7 8 9 ]
- [ 0 1 2 3 4 5 6 7 8 9 ]
- [ 0 1 2 3 4 5 6 7 8 9 ]
- [ 0 1 2 3 4 5 6 7 8 9 ]
-
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_Array.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_Array.txt
deleted file mode 100644
index 7bcd8fa8fce..00000000000
--- a/java/examples/testfiles/examples.datatypes.H5Ex_T_Array.txt
+++ /dev/null
@@ -1,21 +0,0 @@
-DS1 [0]:
- [0 0 0 0 0 ]
- [0 -1 -2 -3 -4 ]
- [0 -2 -4 -6 -8 ]
-
-DS1 [1]:
- [0 1 2 3 4 ]
- [1 1 1 1 1 ]
- [2 1 0 -1 -2 ]
-
-DS1 [2]:
- [0 2 4 6 8 ]
- [2 3 4 5 6 ]
- [4 4 4 4 4 ]
-
-DS1 [3]:
- [0 3 6 9 12 ]
- [3 5 7 9 11 ]
- [6 7 8 9 10 ]
-
-
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_ArrayAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_ArrayAttribute.txt
deleted file mode 100644
index 7d27c0bbf3a..00000000000
--- a/java/examples/testfiles/examples.datatypes.H5Ex_T_ArrayAttribute.txt
+++ /dev/null
@@ -1,21 +0,0 @@
-A1 [0]:
- [0 0 0 0 0 ]
- [0 -1 -2 -3 -4 ]
- [0 -2 -4 -6 -8 ]
-
-A1 [1]:
- [0 1 2 3 4 ]
- [1 1 1 1 1 ]
- [2 1 0 -1 -2 ]
-
-A1 [2]:
- [0 2 4 6 8 ]
- [2 3 4 5 6 ]
- [4 4 4 4 4 ]
-
-A1 [3]:
- [0 3 6 9 12 ]
- [3 5 7 9 11 ]
- [6 7 8 9 10 ]
-
-
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_Bit.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_Bit.txt
deleted file mode 100644
index 57769b21c49..00000000000
--- a/java/examples/testfiles/examples.datatypes.H5Ex_T_Bit.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-DS1:
- [{0, 0, 0, 0}{3, 0, 1, 1}{2, 0, 2, 2}{1, 0, 3, 3}{0, 0, 0, 0}{3, 0, 1, 1}{2, 0, 2, 2}]
- [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}]
- [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}]
- [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}]
-
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_BitAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_BitAttribute.txt
deleted file mode 100644
index 683bc7f8c36..00000000000
--- a/java/examples/testfiles/examples.datatypes.H5Ex_T_BitAttribute.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-A1:
- [{0, 0, 0, 0}{3, 0, 1, 1}{2, 0, 2, 2}{1, 0, 3, 3}{0, 0, 0, 0}{3, 0, 1, 1}{2, 0, 2, 2}]
- [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}]
- [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}]
- [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}]
-
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_Commit.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_Commit.txt
deleted file mode 100644
index e6d0befe4b0..00000000000
--- a/java/examples/testfiles/examples.datatypes.H5Ex_T_Commit.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-Named datatype:  Sensor_Type:
-   Class: H5T_COMPOUND
-    Serial number
-    Location
-    Temperature (F)
-    Pressure (inHg)
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_Compound.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_Compound.txt
deleted file mode 100644
index 0505c7840cb..00000000000
--- a/java/examples/testfiles/examples.datatypes.H5Ex_T_Compound.txt
+++ /dev/null
@@ -1,25 +0,0 @@
-DS1 [0]:
-Serial number   : 1153
-Location        : Exterior (static)
-Temperature (F) : 53.23
-Pressure (inHg) : 24.57
-
-DS1 [1]:
-Serial number   : 1184
-Location        : Intake
-Temperature (F) : 55.12
-Pressure (inHg) : 22.95
-
-DS1 [2]:
-Serial number   : 1027
-Location        : Intake manifold
-Temperature (F) : 103.55
-Pressure (inHg) : 31.23
-
-DS1 [3]:
-Serial number   : 1313
-Location        : Exhaust manifold
-Temperature (F) : 1252.89
-Pressure (inHg) : 84.11
-
-
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_CompoundAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_CompoundAttribute.txt
deleted file mode 100644
index dd77f8dc93d..00000000000
--- a/java/examples/testfiles/examples.datatypes.H5Ex_T_CompoundAttribute.txt
+++ /dev/null
@@ -1,25 +0,0 @@
-A1 [0]:
-Serial number   : 1153
-Location        : Exterior (static)
-Temperature (F) : 53.23
-Pressure (inHg) : 24.57
-
-A1 [1]:
-Serial number   : 1184
-Location        : Intake
-Temperature (F) : 55.12
-Pressure (inHg) : 22.95
-
-A1 [2]:
-Serial number   : 1027
-Location        : Intake manifold
-Temperature (F) : 103.55
-Pressure (inHg) : 31.23
-
-A1 [3]:
-Serial number   : 1313
-Location        : Exhaust manifold
-Temperature (F) : 1252.89
-Pressure (inHg) : 84.11
-
-
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_Float.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_Float.txt
deleted file mode 100644
index 85d8ced3760..00000000000
--- a/java/examples/testfiles/examples.datatypes.H5Ex_T_Float.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-DS1:
- [ 0.0000 1.0000 2.0000 3.0000 4.0000 5.0000 6.0000]
- [ 2.0000 1.6667 2.4000 3.2857 4.2222 5.1818 6.1538]
- [ 4.0000 2.3333 2.8000 3.5714 4.4444 5.3636 6.3077]
- [ 6.0000 3.0000 3.2000 3.8571 4.6667 5.5455 6.4615]
-
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_FloatAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_FloatAttribute.txt
deleted file mode 100644
index cfa1f9261cb..00000000000
--- a/java/examples/testfiles/examples.datatypes.H5Ex_T_FloatAttribute.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-A1:
- [ 0.0000 1.0000 2.0000 3.0000 4.0000 5.0000 6.0000]
- [ 2.0000 1.6667 2.4000 3.2857 4.2222 5.1818 6.1538]
- [ 4.0000 2.3333 2.8000 3.5714 4.4444 5.3636 6.3077]
- [ 6.0000 3.0000 3.2000 3.8571 4.6667 5.5455 6.4615]
-
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_Integer.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_Integer.txt
deleted file mode 100644
index f686bd1e121..00000000000
--- a/java/examples/testfiles/examples.datatypes.H5Ex_T_Integer.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-DS1:
- [ 0 -1 -2 -3 -4 -5 -6]
- [ 0 0 0 0 0 0 0]
- [ 0 1 2 3 4 5 6]
- [ 0 2 4 6 8 10 12]
-
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_IntegerAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_IntegerAttribute.txt
deleted file mode 100644
index dccd4a6c58e..00000000000
--- a/java/examples/testfiles/examples.datatypes.H5Ex_T_IntegerAttribute.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-A1:
- [ 0 -1 -2 -3 -4 -5 -6]
- [ 0 0 0 0 0 0 0]
- [ 0 1 2 3 4 5 6]
- [ 0 2 4 6 8 10 12]
-
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_ObjectReference.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_ObjectReference.txt
deleted file mode 100644
index d8afa56b751..00000000000
--- a/java/examples/testfiles/examples.datatypes.H5Ex_T_ObjectReference.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-DS1[0]:
-  ->H5G_GROUP: /G1
-DS1[1]:
-  ->H5G_DATASET: /DS2
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_ObjectReferenceAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_ObjectReferenceAttribute.txt
deleted file mode 100644
index 3fabd66bb62..00000000000
--- a/java/examples/testfiles/examples.datatypes.H5Ex_T_ObjectReferenceAttribute.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-A1[0]:
-  ->H5G_GROUP: /G1
-A1[1]:
-  ->H5G_DATASET: /DS2
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_Opaque.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_Opaque.txt
deleted file mode 100644
index fb742367e45..00000000000
--- a/java/examples/testfiles/examples.datatypes.H5Ex_T_Opaque.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-Datatype tag for DS1 is: "Character array"
-DS1[0]: OPAQUE0
-DS1[1]: OPAQUE1
-DS1[2]: OPAQUE2
-DS1[3]: OPAQUE3
-
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_OpaqueAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_OpaqueAttribute.txt
deleted file mode 100644
index bc9a73050ae..00000000000
--- a/java/examples/testfiles/examples.datatypes.H5Ex_T_OpaqueAttribute.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-Datatype tag for A1 is: "Character array"
-A1[0]: OPAQUE0
-A1[1]: OPAQUE1
-A1[2]: OPAQUE2
-A1[3]: OPAQUE3
-
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_String.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_String.txt
deleted file mode 100644
index 4df6a41d6ba..00000000000
--- a/java/examples/testfiles/examples.datatypes.H5Ex_T_String.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-DS1 [0]: Parting
-DS1 [1]: is such
-DS1 [2]: sweet
-DS1 [3]: sorrow.
-
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_StringAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_StringAttribute.txt
deleted file mode 100644
index 4df6a41d6ba..00000000000
--- a/java/examples/testfiles/examples.datatypes.H5Ex_T_StringAttribute.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-DS1 [0]: Parting
-DS1 [1]: is such
-DS1 [2]: sweet
-DS1 [3]: sorrow.
-
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_VLString.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_VLString.txt
deleted file mode 100644
index 0322953e602..00000000000
--- a/java/examples/testfiles/examples.datatypes.H5Ex_T_VLString.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-DS1 [0]: Parting
-DS1 [1]: is such
-DS1 [2]: sweet
-DS1 [3]: sorrow.
diff --git a/java/examples/testfiles/examples.groups.H5Ex_G_Compact.txt b/java/examples/testfiles/examples.groups.H5Ex_G_Compact.txt
deleted file mode 100644
index 0a88d3fdce1..00000000000
--- a/java/examples/testfiles/examples.groups.H5Ex_G_Compact.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-Group storage type for H5Ex_G_Compact1.h5 is: H5G_STORAGE_TYPE_SYMBOL_TABLE
-File size for H5Ex_G_Compact1.h5 is: 1832 bytes
-
-Group storage type for H5Ex_G_Compact2.h5 is: H5G_STORAGE_TYPE_COMPACT
-File size for H5Ex_G_Compact2.h5 is: 342 bytes
diff --git a/java/examples/testfiles/examples.groups.H5Ex_G_Corder.txt b/java/examples/testfiles/examples.groups.H5Ex_G_Corder.txt
deleted file mode 100644
index 2d959fc4bc6..00000000000
--- a/java/examples/testfiles/examples.groups.H5Ex_G_Corder.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-Traversing group using alphabetical indices:
-Index 0: 5
-Index 1: D
-Index 2: F
-Index 3: H
-Traversing group using creation order indices:
-Index 0: H
-Index 1: D
-Index 2: F
-Index 3: 5
diff --git a/java/examples/testfiles/examples.groups.H5Ex_G_Create.txt b/java/examples/testfiles/examples.groups.H5Ex_G_Create.txt
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/java/examples/testfiles/examples.groups.H5Ex_G_Intermediate.txt b/java/examples/testfiles/examples.groups.H5Ex_G_Intermediate.txt
deleted file mode 100644
index 65a0fc2051a..00000000000
--- a/java/examples/testfiles/examples.groups.H5Ex_G_Intermediate.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-Objects in the file_id:
-/  (Group)
-/G1  (Group)
-/G1/G2  (Group)
-/G1/G2/G3  (Group)
diff --git a/java/examples/testfiles/examples.groups.H5Ex_G_Iterate.txt b/java/examples/testfiles/examples.groups.H5Ex_G_Iterate.txt
deleted file mode 100644
index 66a4ae927ff..00000000000
--- a/java/examples/testfiles/examples.groups.H5Ex_G_Iterate.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-Objects in root group:
-  Dataset: DS1
-  Datatype: DT1
-  Group: G1
-  Dataset: L1
diff --git a/java/examples/testfiles/examples.groups.H5Ex_G_Phase.txt b/java/examples/testfiles/examples.groups.H5Ex_G_Phase.txt
deleted file mode 100644
index 9e666d4cfea..00000000000
--- a/java/examples/testfiles/examples.groups.H5Ex_G_Phase.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-1 Group : Storage type is H5G_STORAGE_TYPE_COMPACT
-2 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT
-3 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT
-4 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT
-5 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT
-6 Groups: Storage type is H5G_STORAGE_TYPE_DENSE
-7 Groups: Storage type is H5G_STORAGE_TYPE_DENSE
-
-6 Groups: Storage type is H5G_STORAGE_TYPE_DENSE
-5 Groups: Storage type is H5G_STORAGE_TYPE_DENSE
-4 Groups: Storage type is H5G_STORAGE_TYPE_DENSE
-3 Groups: Storage type is H5G_STORAGE_TYPE_DENSE
-2 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT
-1 Group : Storage type is H5G_STORAGE_TYPE_COMPACT
-0 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT
diff --git a/java/examples/testfiles/examples.groups.H5Ex_G_Visit.txt b/java/examples/testfiles/examples.groups.H5Ex_G_Visit.txt
deleted file mode 100644
index 126a5888404..00000000000
--- a/java/examples/testfiles/examples.groups.H5Ex_G_Visit.txt
+++ /dev/null
@@ -1,19 +0,0 @@
-Objects in the file:
-/  (Group)
-/group1  (Group)
-/group1/dset1  (Dataset)
-/group1/group3  (Group)
-/group1/group3/group4  (Group)
-/group1/group3/group4/group1  (Group)
-/group1/group3/group4/group2  (Group)
-
-Links in the file:
-/group1  (Group)
-/group1/dset1  (Dataset)
-/group1/group3  (Group)
-/group1/group3/dset2  (Dataset)
-/group1/group3/group4  (Group)
-/group1/group3/group4/group1  (Group)
-/group1/group3/group4/group1/group5  (Group)
-/group1/group3/group4/group2  (Group)
-/group2  (Group)
diff --git a/java/examples/testfiles/examples.intro.H5_CreateAttribute.txt b/java/examples/testfiles/examples.intro.H5_CreateAttribute.txt
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/java/examples/testfiles/examples.intro.H5_CreateDataset.txt b/java/examples/testfiles/examples.intro.H5_CreateDataset.txt
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/java/examples/testfiles/examples.intro.H5_CreateFile.txt b/java/examples/testfiles/examples.intro.H5_CreateFile.txt
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/java/examples/testfiles/examples.intro.H5_CreateGroup.txt b/java/examples/testfiles/examples.intro.H5_CreateGroup.txt
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/java/examples/testfiles/examples.intro.H5_CreateGroupAbsoluteRelative.txt b/java/examples/testfiles/examples.intro.H5_CreateGroupAbsoluteRelative.txt
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/java/examples/testfiles/examples.intro.H5_CreateGroupDataset.txt b/java/examples/testfiles/examples.intro.H5_CreateGroupDataset.txt
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/java/examples/testfiles/examples.intro.H5_ReadWrite.txt b/java/examples/testfiles/examples.intro.H5_ReadWrite.txt
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt
index 824905ba93f..fb2769d7023 100644
--- a/release_docs/RELEASE.txt
+++ b/release_docs/RELEASE.txt
@@ -47,6 +47,20 @@ New Features
 
     Configuration:
     -------------
+    - Incorporated HDF5 examples repository into HDF5 library.
+
+      The HDF5Examples folder is equivalent to the repository hdf5-examples.
+      As such it can build and test the examples during library build or after
+      the library is installed. Previously, the hdf5-repository archives were
+      downloaded for packaging with the library. Now the examples can be built
+      and tested without a packaged install of the library.
+
+      However to maintain the ability to use the HDF5Examples with an installed
+      library, it is necessary to translate or synch the option names from those
+      used by the library to those used by the examples. The typical pattern is:
+            <example option> = <library option>
+            HDF_BUILD_FORTRAN = ${HDF5_BUILD_FORTRAN}
+
     - Added new option for CMake to mark tests as SKIPPED.
 
       HDF5_DISABLE_TESTS_REGEX is a REGEX string that will be checked with
diff --git a/release_docs/USING_HDF5_VS.txt b/release_docs/USING_HDF5_VS.txt
index f0edcbbf4f4..39c66ea940d 100644
--- a/release_docs/USING_HDF5_VS.txt
+++ b/release_docs/USING_HDF5_VS.txt
@@ -11,6 +11,9 @@ be found in the USING_HDF5_CMake.txt file found in this folder.
 
 NOTE: Building applications with the dynamic/shared hdf5 libraries requires
       that the "H5_BUILT_AS_DYNAMIC_LIB" compile definition be used.
+      Go to "Project" and select "Properties", find "Configuration Properties",
+      and then "C/C+±"and then "Preprocessor".
+      Add H5_BUILT_AS_DYNAMIC_LIB to Preprocessor definitions.
 
 The following two sections are helpful if you do not use CMake to build
 your applications.
diff --git a/tools/libtest/CMakeLists.txt b/tools/libtest/CMakeLists.txt
index f6d8912dcbf..357856001fc 100644
--- a/tools/libtest/CMakeLists.txt
+++ b/tools/libtest/CMakeLists.txt
@@ -6,7 +6,7 @@ project (HDF5_TOOLS_LIBTEST C)
 #-----------------------------------------------------------------------------
 add_executable (h5tools_test_utils ${HDF5_TOOLS_LIBTEST_SOURCE_DIR}/h5tools_test_utils.c)
 target_compile_options(h5tools_test_utils PRIVATE "${HDF5_CMAKE_C_FLAGS}")
-target_include_directories(h5tools_test_utils PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+target_include_directories(h5tools_test_utils PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
 if (BUILD_STATIC_LIBS)
   TARGET_C_PROPERTIES (h5tools_test_utils STATIC)
   target_link_libraries (h5tools_test_utils PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET} ${HDF5_TEST_LIB_TARGET})
diff --git a/tools/src/h5copy/CMakeLists.txt b/tools/src/h5copy/CMakeLists.txt
index efd38cdd73b..5b79a85b9fd 100644
--- a/tools/src/h5copy/CMakeLists.txt
+++ b/tools/src/h5copy/CMakeLists.txt
@@ -6,7 +6,7 @@ project (HDF5_TOOLS_SRC_H5COPY C)
 # --------------------------------------------------------------------
 if (BUILD_STATIC_LIBS)
   add_executable (h5copy ${HDF5_TOOLS_SRC_H5COPY_SOURCE_DIR}/h5copy.c)
-  target_include_directories (h5copy PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5copy PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5copy PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   TARGET_C_PROPERTIES (h5copy STATIC)
   target_link_libraries (h5copy PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
@@ -18,7 +18,7 @@ endif ()
 
 if (BUILD_SHARED_LIBS)
   add_executable (h5copy-shared ${HDF5_TOOLS_SRC_H5COPY_SOURCE_DIR}/h5copy.c)
-  target_include_directories (h5copy-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5copy-shared PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5copy-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   TARGET_C_PROPERTIES (h5copy-shared SHARED)
   target_link_libraries (h5copy-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET})
diff --git a/tools/src/h5diff/CMakeLists.txt b/tools/src/h5diff/CMakeLists.txt
index f01d1aa5c7d..8de7c61f6a7 100644
--- a/tools/src/h5diff/CMakeLists.txt
+++ b/tools/src/h5diff/CMakeLists.txt
@@ -10,7 +10,7 @@ if (BUILD_STATIC_LIBS)
       ${HDF5_TOOLS_SRC_H5DIFF_SOURCE_DIR}/h5diff_main.c
       ${HDF5_TOOLS_SRC_H5DIFF_SOURCE_DIR}/h5diff_common.h
   )
-  target_include_directories (h5diff PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5diff PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5diff PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   #target_compile_definitions (h5diff PRIVATE H5_TOOLS_DEBUG)
   TARGET_C_PROPERTIES (h5diff STATIC)
@@ -26,7 +26,7 @@ if (BUILD_SHARED_LIBS)
       ${HDF5_TOOLS_SRC_H5DIFF_SOURCE_DIR}/h5diff_main.c
       ${HDF5_TOOLS_SRC_H5DIFF_SOURCE_DIR}/h5diff_common.h
   )
-  target_include_directories (h5diff-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5diff-shared PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5diff-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   #target_compile_definitions (h5diff-shared PRIVATE H5_TOOLS_DEBUG)
   TARGET_C_PROPERTIES (h5diff-shared SHARED)
@@ -54,7 +54,7 @@ if (H5_HAVE_PARALLEL)
         ${HDF5_TOOLS_SRC_H5DIFF_SOURCE_DIR}/h5diff_common.c
         ${HDF5_TOOLS_SRC_H5DIFF_SOURCE_DIR}/ph5diff_main.c
     )
-    target_include_directories (ph5diff PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+    target_include_directories (ph5diff PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
     target_compile_options(ph5diff PRIVATE "${HDF5_CMAKE_C_FLAGS}")
     TARGET_C_PROPERTIES (ph5diff STATIC)
     target_link_libraries (ph5diff PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:MPI::MPI_C>")
@@ -67,7 +67,7 @@ if (H5_HAVE_PARALLEL)
         ${HDF5_TOOLS_SRC_H5DIFF_SOURCE_DIR}/h5diff_common.c
         ${HDF5_TOOLS_SRC_H5DIFF_SOURCE_DIR}/ph5diff_main.c
     )
-    target_include_directories (ph5diff-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+    target_include_directories (ph5diff-shared PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
     target_compile_options(ph5diff-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}")
     TARGET_C_PROPERTIES (ph5diff-shared SHARED)
     target_link_libraries (ph5diff-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:MPI::MPI_C>")
diff --git a/tools/src/h5dump/CMakeLists.txt b/tools/src/h5dump/CMakeLists.txt
index f382020e736..32849496852 100644
--- a/tools/src/h5dump/CMakeLists.txt
+++ b/tools/src/h5dump/CMakeLists.txt
@@ -15,7 +15,7 @@ if (BUILD_STATIC_LIBS)
       ${HDF5_TOOLS_SRC_H5DUMP_SOURCE_DIR}/h5dump_ddl.h
       ${HDF5_TOOLS_SRC_H5DUMP_SOURCE_DIR}/h5dump_xml.h
   )
-  target_include_directories (h5dump PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5dump PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5dump PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   TARGET_C_PROPERTIES (h5dump STATIC)
   target_link_libraries (h5dump PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
@@ -36,7 +36,7 @@ if (BUILD_SHARED_LIBS)
       ${HDF5_TOOLS_SRC_H5DUMP_SOURCE_DIR}/h5dump_ddl.h
       ${HDF5_TOOLS_SRC_H5DUMP_SOURCE_DIR}/h5dump_xml.h
   )
-  target_include_directories (h5dump-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5dump-shared PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5dump-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   TARGET_C_PROPERTIES (h5dump-shared SHARED)
   target_link_libraries (h5dump-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET})
diff --git a/tools/src/h5format_convert/CMakeLists.txt b/tools/src/h5format_convert/CMakeLists.txt
index d1e21582d0f..2a7e3cb32bd 100644
--- a/tools/src/h5format_convert/CMakeLists.txt
+++ b/tools/src/h5format_convert/CMakeLists.txt
@@ -6,7 +6,7 @@ project (HDF5_TOOLS_SRC_H5FC C)
 # --------------------------------------------------------------------
 if (BUILD_STATIC_LIBS)
   add_executable (h5format_convert ${HDF5_TOOLS_SRC_H5FC_SOURCE_DIR}/h5format_convert.c)
-  target_include_directories (h5format_convert PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5format_convert PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5format_convert PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   TARGET_C_PROPERTIES (h5format_convert STATIC)
   target_link_libraries (h5format_convert PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
@@ -17,7 +17,7 @@ if (BUILD_STATIC_LIBS)
 endif ()
 if (BUILD_SHARED_LIBS)
   add_executable (h5format_convert-shared ${HDF5_TOOLS_SRC_H5FC_SOURCE_DIR}/h5format_convert.c)
-  target_include_directories (h5format_convert-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5format_convert-shared PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5format_convert-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   TARGET_C_PROPERTIES (h5format_convert-shared SHARED)
   target_link_libraries (h5format_convert-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET})
diff --git a/tools/src/h5import/CMakeLists.txt b/tools/src/h5import/CMakeLists.txt
index b2337cd51d0..f8268a1b020 100644
--- a/tools/src/h5import/CMakeLists.txt
+++ b/tools/src/h5import/CMakeLists.txt
@@ -6,7 +6,7 @@ project (HDF5_TOOLS_SRC_H5IMPORT C)
 # --------------------------------------------------------------------
 if (BUILD_STATIC_LIBS)
   add_executable (h5import ${HDF5_TOOLS_SRC_H5IMPORT_SOURCE_DIR}/h5import.c ${HDF5_TOOLS_SRC_H5IMPORT_SOURCE_DIR}/h5import.h)
-  target_include_directories (h5import PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5import PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   TARGET_C_PROPERTIES (h5import STATIC)
   target_link_libraries (h5import PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
   #set_target_properties (h5import PROPERTIES COMPILE_DEFINITIONS H5DEBUGIMPORT)
@@ -18,7 +18,7 @@ endif ()
 
 if (BUILD_SHARED_LIBS)
   add_executable (h5import-shared ${HDF5_TOOLS_SRC_H5IMPORT_SOURCE_DIR}/h5import.c ${HDF5_TOOLS_SRC_H5IMPORT_SOURCE_DIR}/h5import.h)
-  target_include_directories (h5import-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5import-shared PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   TARGET_C_PROPERTIES (h5import-shared SHARED)
   target_link_libraries (h5import-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET})
   #set_target_properties (h5import-shared PROPERTIES COMPILE_DEFINITIONS H5DEBUGIMPORT)
diff --git a/tools/src/h5jam/CMakeLists.txt b/tools/src/h5jam/CMakeLists.txt
index 8642d6f7999..7efd0d8462c 100644
--- a/tools/src/h5jam/CMakeLists.txt
+++ b/tools/src/h5jam/CMakeLists.txt
@@ -6,14 +6,14 @@ project (HDF5_TOOLS_SRC_H5JAM C)
 # --------------------------------------------------------------------
 if (BUILD_STATIC_LIBS)
   add_executable (h5jam ${HDF5_TOOLS_SRC_H5JAM_SOURCE_DIR}/h5jam.c)
-  target_include_directories (h5jam PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5jam PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   TARGET_C_PROPERTIES (h5jam STATIC)
   target_link_libraries (h5jam PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
   set_target_properties (h5jam PROPERTIES FOLDER tools)
   set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};h5jam")
 
   add_executable (h5unjam ${HDF5_TOOLS_SRC_H5JAM_SOURCE_DIR}/h5unjam.c)
-  target_include_directories (h5unjam PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5unjam PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   TARGET_C_PROPERTIES (h5unjam STATIC)
   target_link_libraries (h5unjam PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
   set_target_properties (h5unjam PROPERTIES FOLDER tools)
@@ -27,14 +27,14 @@ endif ()
 
 if (BUILD_SHARED_LIBS)
   add_executable (h5jam-shared ${HDF5_TOOLS_SRC_H5JAM_SOURCE_DIR}/h5jam.c)
-  target_include_directories (h5jam-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5jam-shared PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   TARGET_C_PROPERTIES (h5jam-shared SHARED)
   target_link_libraries (h5jam-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET})
   set_target_properties (h5jam-shared PROPERTIES FOLDER tools)
   set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};h5jam-shared")
 
   add_executable (h5unjam-shared ${HDF5_TOOLS_SRC_H5JAM_SOURCE_DIR}/h5unjam.c)
-  target_include_directories (h5unjam-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5unjam-shared PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   TARGET_C_PROPERTIES (h5unjam-shared SHARED)
   target_link_libraries (h5unjam-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET})
   set_target_properties (h5unjam-shared PROPERTIES FOLDER tools)
diff --git a/tools/src/h5ls/CMakeLists.txt b/tools/src/h5ls/CMakeLists.txt
index 94992538a46..67122a10d5a 100644
--- a/tools/src/h5ls/CMakeLists.txt
+++ b/tools/src/h5ls/CMakeLists.txt
@@ -6,7 +6,7 @@ project (HDF5_TOOLS_SRC_H5LS C)
 #-----------------------------------------------------------------------------
 if (BUILD_STATIC_LIBS)
   add_executable (h5ls ${HDF5_TOOLS_SRC_H5LS_SOURCE_DIR}/h5ls.c)
-  target_include_directories (h5ls PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5ls PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5ls PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   #target_compile_definitions(h5ls PRIVATE H5_TOOLS_DEBUG)
   TARGET_C_PROPERTIES (h5ls STATIC)
@@ -19,7 +19,7 @@ endif ()
 
 if (BUILD_SHARED_LIBS)
   add_executable (h5ls-shared ${HDF5_TOOLS_SRC_H5LS_SOURCE_DIR}/h5ls.c)
-  target_include_directories (h5ls-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5ls-shared PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5ls-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   #target_compile_definitions(h5ls-shared PRIVATE H5_TOOLS_DEBUG)
   TARGET_C_PROPERTIES (h5ls-shared SHARED)
diff --git a/tools/src/h5repack/CMakeLists.txt b/tools/src/h5repack/CMakeLists.txt
index 360fb0f1bd0..ea1ee8092d2 100644
--- a/tools/src/h5repack/CMakeLists.txt
+++ b/tools/src/h5repack/CMakeLists.txt
@@ -17,7 +17,7 @@ set (REPACK_COMMON_SOURCES
 
 if (BUILD_STATIC_LIBS)
   add_executable (h5repack ${REPACK_COMMON_SOURCES} ${HDF5_TOOLS_SRC_H5REPACK_SOURCE_DIR}/h5repack_main.c)
-  target_include_directories (h5repack PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5repack PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5repack PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   TARGET_C_PROPERTIES (h5repack STATIC)
   target_link_libraries (h5repack PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
@@ -29,7 +29,7 @@ endif ()
 
 if (BUILD_SHARED_LIBS)
   add_executable (h5repack-shared ${REPACK_COMMON_SOURCES} ${HDF5_TOOLS_SRC_H5REPACK_SOURCE_DIR}/h5repack_main.c)
-  target_include_directories (h5repack-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5repack-shared PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5repack-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   TARGET_C_PROPERTIES (h5repack-shared SHARED)
   target_link_libraries (h5repack-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET})
diff --git a/tools/src/h5stat/CMakeLists.txt b/tools/src/h5stat/CMakeLists.txt
index c0c0b32bfab..c3aef5fd990 100644
--- a/tools/src/h5stat/CMakeLists.txt
+++ b/tools/src/h5stat/CMakeLists.txt
@@ -6,7 +6,7 @@ project (HDF5_TOOLS_SRC_H5STAT C)
 # --------------------------------------------------------------------
 if (BUILD_STATIC_LIBS)
   add_executable (h5stat ${HDF5_TOOLS_SRC_H5STAT_SOURCE_DIR}/h5stat.c)
-  target_include_directories (h5stat PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5stat PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5stat PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   TARGET_C_PROPERTIES (h5stat STATIC)
   target_link_libraries (h5stat PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
@@ -18,7 +18,7 @@ endif ()
 
 if (BUILD_SHARED_LIBS)
   add_executable (h5stat-shared ${HDF5_TOOLS_SRC_H5STAT_SOURCE_DIR}/h5stat.c)
-  target_include_directories (h5stat-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5stat-shared PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5stat-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   TARGET_C_PROPERTIES (h5stat-shared SHARED)
   target_link_libraries (h5stat-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET})
diff --git a/tools/src/misc/CMakeLists.txt b/tools/src/misc/CMakeLists.txt
index d2b1f51ef2c..62bd443b64f 100644
--- a/tools/src/misc/CMakeLists.txt
+++ b/tools/src/misc/CMakeLists.txt
@@ -7,7 +7,7 @@ project (HDF5_TOOLS_SRC_MISC C)
 #-- Misc Executables
 if (BUILD_STATIC_LIBS)
   add_executable (h5debug ${HDF5_TOOLS_SRC_MISC_SOURCE_DIR}/h5debug.c)
-  target_include_directories (h5debug PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5debug PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5debug PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   TARGET_C_PROPERTIES (h5debug STATIC)
   target_link_libraries (h5debug PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
@@ -15,7 +15,7 @@ if (BUILD_STATIC_LIBS)
   set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};h5debug")
 
   add_executable (h5repart ${HDF5_TOOLS_SRC_MISC_SOURCE_DIR}/h5repart.c)
-  target_include_directories (h5repart PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5repart PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5repart PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   TARGET_C_PROPERTIES (h5repart STATIC)
   target_link_libraries (h5repart PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
@@ -23,7 +23,7 @@ if (BUILD_STATIC_LIBS)
   set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};h5repart")
 
   add_executable (h5mkgrp ${HDF5_TOOLS_SRC_MISC_SOURCE_DIR}/h5mkgrp.c)
-  target_include_directories (h5mkgrp PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5mkgrp PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5mkgrp PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   TARGET_C_PROPERTIES (h5mkgrp STATIC)
   target_link_libraries (h5mkgrp PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
@@ -31,7 +31,7 @@ if (BUILD_STATIC_LIBS)
   set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};h5mkgrp")
 
   add_executable (h5clear ${HDF5_TOOLS_SRC_MISC_SOURCE_DIR}/h5clear.c)
-  target_include_directories (h5clear PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5clear PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5clear PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   TARGET_C_PROPERTIES (h5clear STATIC)
   target_link_libraries (h5clear PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
@@ -39,7 +39,7 @@ if (BUILD_STATIC_LIBS)
   set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};h5clear")
 
   add_executable (h5delete ${HDF5_TOOLS_SRC_MISC_SOURCE_DIR}/h5delete.c)
-  target_include_directories (h5delete PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5delete PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5delete PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   TARGET_C_PROPERTIES (h5delete STATIC)
   target_link_libraries (h5delete PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
@@ -56,7 +56,7 @@ if (BUILD_STATIC_LIBS)
 endif ()
 if (BUILD_SHARED_LIBS)
   add_executable (h5debug-shared ${HDF5_TOOLS_SRC_MISC_SOURCE_DIR}/h5debug.c)
-  target_include_directories (h5debug-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5debug-shared PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   TARGET_C_PROPERTIES (h5debug-shared SHARED)
   target_compile_options(h5debug-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   target_link_libraries (h5debug-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET})
@@ -64,7 +64,7 @@ if (BUILD_SHARED_LIBS)
   set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};h5debug-shared")
 
   add_executable (h5repart-shared ${HDF5_TOOLS_SRC_MISC_SOURCE_DIR}/h5repart.c)
-  target_include_directories (h5repart-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5repart-shared PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5repart-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   TARGET_C_PROPERTIES (h5repart-shared SHARED)
   target_link_libraries (h5repart-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET})
@@ -72,7 +72,7 @@ if (BUILD_SHARED_LIBS)
   set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};h5repart-shared")
 
   add_executable (h5mkgrp-shared ${HDF5_TOOLS_SRC_MISC_SOURCE_DIR}/h5mkgrp.c)
-  target_include_directories (h5mkgrp-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5mkgrp-shared PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5mkgrp-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   TARGET_C_PROPERTIES (h5mkgrp-shared SHARED)
   target_link_libraries (h5mkgrp-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET})
@@ -80,7 +80,7 @@ if (BUILD_SHARED_LIBS)
   set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};h5mkgrp-shared")
 
   add_executable (h5clear-shared ${HDF5_TOOLS_SRC_MISC_SOURCE_DIR}/h5clear.c)
-  target_include_directories (h5clear-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5clear-shared PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5clear-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   TARGET_C_PROPERTIES (h5clear-shared SHARED)
   target_link_libraries (h5clear-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET})
@@ -88,7 +88,7 @@ if (BUILD_SHARED_LIBS)
   set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};h5clear-shared")
 
   add_executable (h5delete-shared ${HDF5_TOOLS_SRC_MISC_SOURCE_DIR}/h5delete.c)
-  target_include_directories (h5delete-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5delete-shared PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5delete-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   TARGET_C_PROPERTIES (h5delete-shared SHARED)
   target_link_libraries (h5delete-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET})
diff --git a/tools/test/h5copy/CMakeTests.cmake b/tools/test/h5copy/CMakeTests.cmake
index fb02d81883d..b4daa87c8ed 100644
--- a/tools/test/h5copy/CMakeTests.cmake
+++ b/tools/test/h5copy/CMakeTests.cmake
@@ -163,8 +163,8 @@
         COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $<TARGET_FILE:h5copy${tgt_file_ext}> -i ./testfiles/${infile} -o ./testfiles/${testname}.out.h5 -v -s ${psparam} -d ${pdparam}
     )
     set_tests_properties (H5COPY-${testname}-prefill PROPERTIES DEPENDS H5COPY-${testname}-clear-objects)
-    if ("" MATCHES "${HDF5_DISABLE_TESTS_REGEX}")
-      set_tests_properties ( PROPERTIES DISABLED true)
+    if ("H5COPY-${testname}-prefill" MATCHES "${HDF5_DISABLE_TESTS_REGEX}")
+      set_tests_properties (H5COPY-${testname}-prefill PROPERTIES DISABLED true)
     endif ()
 
     add_test (
diff --git a/tools/test/h5jam/CMakeLists.txt b/tools/test/h5jam/CMakeLists.txt
index 160ecdf2914..e6017fdf745 100644
--- a/tools/test/h5jam/CMakeLists.txt
+++ b/tools/test/h5jam/CMakeLists.txt
@@ -6,7 +6,7 @@ project (HDF5_TOOLS_TEST_H5JAM C)
 # --------------------------------------------------------------------
 if (HDF5_BUILD_GENERATORS AND BUILD_STATIC_LIBS)
   add_executable (h5jamgentest ${HDF5_TOOLS_TEST_H5JAM_SOURCE_DIR}/h5jamgentest.c)
-  target_include_directories (h5jamgentest PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5jamgentest PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   TARGET_C_PROPERTIES (h5jamgentest STATIC)
   target_link_libraries (h5jamgentest PRIVATE ${HDF5_LIB_TARGET})
   set_target_properties (h5jamgentest PROPERTIES FOLDER generator/tools)
@@ -22,7 +22,7 @@ if (HDF5_BUILD_GENERATORS AND BUILD_STATIC_LIBS)
 endif ()
 
 add_executable (getub ${HDF5_TOOLS_TEST_H5JAM_SOURCE_DIR}/getub.c)
-target_include_directories (getub PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+target_include_directories (getub PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
 if (BUILD_STATIC_LIBS)
   TARGET_C_PROPERTIES (getub STATIC)
   target_link_libraries (getub PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
@@ -33,7 +33,7 @@ endif ()
 set_target_properties (getub PROPERTIES FOLDER tools)
 
 add_executable (tellub ${HDF5_TOOLS_TEST_H5JAM_SOURCE_DIR}/tellub.c)
-target_include_directories (tellub PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+target_include_directories (tellub PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
 if (BUILD_STATIC_LIBS)
   TARGET_C_PROPERTIES (tellub STATIC)
   target_link_libraries (tellub PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
diff --git a/tools/test/h5repack/CMakeLists.txt b/tools/test/h5repack/CMakeLists.txt
index 3e7c6c20945..5c0075c0948 100644
--- a/tools/test/h5repack/CMakeLists.txt
+++ b/tools/test/h5repack/CMakeLists.txt
@@ -6,7 +6,7 @@ project (HDF5_TOOLS_TEST_H5REPACK C)
 # --------------------------------------------------------------------
 add_executable (testh5repack_detect_szip ${HDF5_TOOLS_TEST_H5REPACK_SOURCE_DIR}/testh5repack_detect_szip.c)
 target_include_directories (testh5repack_detect_szip
-    PRIVATE "${HDF5_TOOLS_SRC_H5REPACK_SOURCE_DIR};${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>"
+    PRIVATE "${HDF5_TOOLS_SRC_H5REPACK_SOURCE_DIR};${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>"
 )
 if (BUILD_STATIC_LIBS)
   TARGET_C_PROPERTIES (testh5repack_detect_szip STATIC)
@@ -29,7 +29,7 @@ set (REPACK_COMMON_SOURCES
 )
 add_executable (h5repacktest ${REPACK_COMMON_SOURCES} ${HDF5_TOOLS_TEST_H5REPACK_SOURCE_DIR}/h5repacktst.c)
 target_include_directories (h5repacktest
-    PRIVATE "${HDF5_TOOLS_SRC_H5REPACK_SOURCE_DIR};${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_TEST_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>"
+    PRIVATE "${HDF5_TOOLS_SRC_H5REPACK_SOURCE_DIR};${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_TEST_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>"
 )
 if (BUILD_STATIC_LIBS)
   TARGET_C_PROPERTIES (h5repacktest STATIC)
diff --git a/utils/tools/h5dwalk/CMakeLists.txt b/utils/tools/h5dwalk/CMakeLists.txt
index 9e4eb6d8b55..f0611e0b0c4 100644
--- a/utils/tools/h5dwalk/CMakeLists.txt
+++ b/utils/tools/h5dwalk/CMakeLists.txt
@@ -7,9 +7,9 @@ project (HDF5_UTILS_TOOLS_H5DWALK C)
 if (BUILD_STATIC_LIBS)
   add_executable (h5dwalk ${HDF5_UTILS_TOOLS_H5DWALK_SOURCE_DIR}/h5dwalk.c)
 #  add_custom_target(generate_demo ALL
-#    DEPENDS "${HDF5_TOOLS_DIR}/test/demo_destfiles.test"
+#    DEPENDS "${HDF5_TOOLS_ROOT_DIR}/test/demo_destfiles.test"
 #  )
-  target_include_directories (h5dwalk PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${CIRCLE_INCLUDE_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5dwalk PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${CIRCLE_INCLUDE_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5dwalk PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   TARGET_C_PROPERTIES (h5dwalk STATIC)
   target_link_libraries (h5dwalk PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET} ${MFU_LIBRARY} "$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:MPI::MPI_C>")
@@ -21,7 +21,7 @@ endif ()
 
 if (BUILD_SHARED_LIBS)
   add_executable (h5dwalk-shared ${HDF5_UTILS_TOOLS_H5DWALK_SOURCE_DIR}/h5dwalk.c)
-  target_include_directories (h5dwalk-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${CIRCLE_INCLUDE_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+  target_include_directories (h5dwalk-shared PRIVATE "${HDF5_TOOLS_ROOT_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${CIRCLE_INCLUDE_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
   target_compile_options(h5dwalk-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}")
   TARGET_C_PROPERTIES (h5dwalk-shared SHARED)
   target_link_libraries (h5dwalk-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} ${MFU_LIBRARY} "$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:MPI::MPI_C>")
diff --git a/utils/tools/test/h5dwalk/CMakeLists.txt b/utils/tools/test/h5dwalk/CMakeLists.txt
index 530bed87aed..a27190c8bd7 100644
--- a/utils/tools/test/h5dwalk/CMakeLists.txt
+++ b/utils/tools/test/h5dwalk/CMakeLists.txt
@@ -3,9 +3,9 @@ project (HDF5_TOOLS_TEST_H5DWALK)
 
 if (HDF5_BUILD_PARALLEL_TOOLS)
   add_custom_command(
-    OUTPUT ${HDF5_TOOLS_DIR}/test/demo_destfiles.test
+    OUTPUT ${HDF5_TOOLS_ROOT_DIR}/test/demo_destfiles.test
     COMMAND bash -c ${HDF5_TOOLS_SRC_H5DWALK_SOURCE_DIR}/copy_demo_files.sh
-    ARGS ${HDF5_TOOLS_DIR}/test ${CMAKE_BINARY_DIR}/bin
+    ARGS ${HDF5_TOOLS_ROOT_DIR}/test ${CMAKE_BINARY_DIR}/bin
     DEPENDS ${HDF5_TOOLS_SRC_H5DWALK_SOURCE_DIR}/copy_demo_files.sh
   )
 endif ()