Skip to content

Commit

Permalink
DAOS-623 test: dynamically find hdf5 plugin path
Browse files Browse the repository at this point in the history
Dynamically find the hdf5 plugin path instead of hardcoding the path for
every test, since it can come from a different path depending on the
system and install location.

Test-tag: BasicCheckout BasicCheckoutDm DowngradeTest IoSysAdmin IorSmall MacsioTest SoakSmoke ior LargeFileCount SmallFileCount
Skip-unit-tests: true
Skip-fault-injection-test: true

Required-githooks: true

Signed-off-by: Dalton Bohning <[email protected]>
  • Loading branch information
daltonbohning committed Sep 17, 2024
1 parent 98ddb2f commit 7ee035c
Show file tree
Hide file tree
Showing 19 changed files with 70 additions and 25 deletions.
2 changes: 1 addition & 1 deletion src/tests/ftest/deployment/basic_checkout.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -161,4 +161,4 @@ dfuse:
mount_dir: "/tmp/daos_dfuse/"
disable_caching: true
hdf5_vol:
plugin_path: /usr/lib64/mpich/lib
plugin_name: libhdf5_vol_daos.so
2 changes: 1 addition & 1 deletion src/tests/ftest/deployment/io_sys_admin.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -103,4 +103,4 @@ dcp:
client_processes:
np: 16
hdf5_vol:
plugin_path: /usr/lib64/mpich/lib
plugin_name: libhdf5_vol_daos.so
2 changes: 1 addition & 1 deletion src/tests/ftest/interoperability/diff_versions.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ ior:
write_flg: "-w -W -k -G 1 -i 1"
read_flg: "-C -k -e -r -R -g -G 1 -Q 1 -vv"
hdf5_vol:
plugin_path: "/usr/lib64/mpich/lib"
plugin_name: libhdf5_vol_daos.so
interop:
# Example of upgrade/downgrade RPMs from local tar file
# upgrade_rpms: ["/home/dinghwah/RPM/2.1.104/daos-2.1.104-1.el8.x86_64.rpm"]
Expand Down
2 changes: 1 addition & 1 deletion src/tests/ftest/interoperability/down_grade.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ ior:
write_flg: "-w -W -k -G 1 -i 1"
read_flg: "-C -k -e -r -R -g -G 1 -Q 1 -vv"
hdf5_vol:
plugin_path: "/usr/lib64/mpich/lib"
plugin_name: libhdf5_vol_daos.so
interop:
# Example of upgrade/downgrade RPMs from local tar file
# upgrade_rpms: ["/home/dinghwah/RPM/2.1.104/daos-2.1.104-1.el8.x86_64.rpm"]
Expand Down
2 changes: 1 addition & 1 deletion src/tests/ftest/interoperability/updown_grade.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ ior:
write_flg: "-w -W -k -G 1 -i 1"
read_flg: "-C -k -e -r -R -g -G 1 -Q 1 -vv"
hdf5_vol:
plugin_path: "/usr/lib64/mpich/lib"
plugin_name: libhdf5_vol_daos.so
interop:
# Example of upgrade/downgrade RPMs from local tar file
# upgrade_rpms: ["/home/dinghwah/RPM/2.1.104/daos-2.1.104-1.el8.x86_64.rpm"]
Expand Down
2 changes: 1 addition & 1 deletion src/tests/ftest/interoperability/updown_grade_8svr.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ ior:
write_flg: "-w -W -k -G 1 -i 1"
read_flg: "-C -k -e -r -R -g -G 1 -Q 1 -vv"
hdf5_vol:
plugin_path: "/usr/lib64/mpich/lib"
plugin_name: libhdf5_vol_daos.so
interop:
# Example of upgrade/downgrade RPMs from local tar file
# upgrade_rpms: ["/home/dinghwah/RPM/2.1.104/daos-2.1.104-1.el8.x86_64.rpm"]
Expand Down
6 changes: 3 additions & 3 deletions src/tests/ftest/interoperability/upgrade_downgrade_base.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
'''
(C) Copyright 2023 Intel Corporation.
(C) Copyright 2023-2024 Intel Corporation.
SPDX-License-Identifier: BSD-2-Clause-Patent
'''
Expand All @@ -11,7 +11,7 @@

from agent_utils import include_local_host
from command_utils_base import CommandFailure
from general_utils import get_random_bytes, pcmd, run_pcmd
from general_utils import find_library, get_random_bytes, pcmd, run_pcmd
from ior_test_base import IorTestBase
from pydaos.raw import DaosApiError

Expand Down Expand Up @@ -574,7 +574,7 @@ def upgrade_and_downgrade(self, fault_on_pool_upgrade=False):
# (3.b)ior hdf5
elif ior_api == "HDF5":
self.log.info("(3.b)==Run IOR HDF5 write and read.")
hdf5_plugin_path = self.params.get("plugin_path", '/run/hdf5_vol/')
hdf5_plugin_path = find_library(self.params.get("plugin_name", '/run/hdf5_vol/'))
self.ior_cmd.flags.update(iorflags_write)
self.run_ior_with_pool(
plugin_path=hdf5_plugin_path, mount_dir=mount_dir,
Expand Down
7 changes: 5 additions & 2 deletions src/tests/ftest/io/macsio_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from apricot import TestWithServers
from command_utils_base import CommandFailure
from dfuse_utils import get_dfuse, start_dfuse
from general_utils import get_log_file, list_to_str
from general_utils import find_library, get_log_file, list_to_str
from job_manager_utils import get_job_manager
from macsio_util import MacsioCommand

Expand Down Expand Up @@ -136,7 +136,10 @@ def test_macsio_daos_vol(self):
:avocado: tags=MacsioTest,test_macsio_daos_vol
:avocado: tags=DAOS_5610
"""
plugin_path = self.params.get("plugin_path", "/run/job_manager/*")
plugin_name = self.params.get("plugin_name", "/run/job_manager/*")
plugin_path = find_library(plugin_name)
if not plugin_path:
self.fail(f"Failed to find {plugin_name}")
processes = self.params.get("processes", "/run/macsio/*", len(self.hostlist_clients))

# Create a pool
Expand Down
4 changes: 2 additions & 2 deletions src/tests/ftest/io/macsio_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -41,15 +41,15 @@ job_manager: !mux
class_name: Mpirun
mpi_type: mpich
macsio_path: /usr/lib64/mpich/bin
plugin_path: /usr/lib64/mpich/lib
plugin_name: libhdf5_vol_daos.so
timeout:
test_macsio: 10
test_macsio_daos_vol: 20
openmpi:
class_name: Orterun
mpi_type: openmpi
macsio_path: /usr/lib64/openmpi3/bin
plugin_path: /usr/lib64/openmpi3/lib
plugin_name: libhdf5_vol_daos.so
timeout:
test_macsio: 10
test_macsio_daos_vol: 20
2 changes: 1 addition & 1 deletion src/tests/ftest/ior/small.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -66,4 +66,4 @@ dfuse:
mount_dir: "/tmp/daos_dfuse/"
disable_caching: true
hdf5_vol:
plugin_path: /usr/lib64/mpich/lib
plugin_name: libhdf5_vol_daos.so
2 changes: 1 addition & 1 deletion src/tests/ftest/soak/faults.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ ior_faults:
mount_dir: "/tmp/soak_dfuse_ior/"
disable_caching: true
hdf5_vol:
plugin_path: "/usr/lib64/mpich/lib"
plugin_name: libhdf5_vol_daos.so
events:
- "mce: [Hardware Error]: Machine check events logged"
- "Package temperature above threshold"
Expand Down
2 changes: 1 addition & 1 deletion src/tests/ftest/soak/harassers.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ mdtest_harasser:
mount_dir: "/tmp/soak_dfuse_mdtest/"
disable_caching: true
hdf5_vol:
plugin_path: "/usr/lib64/mpich/lib"
plugin_name: libhdf5_vol_daos.so
events:
- "mce: [Hardware Error]: Machine check events logged"
- "Package temperature above threshold"
Expand Down
2 changes: 1 addition & 1 deletion src/tests/ftest/soak/smoke.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,7 @@ datamover_smoke:
test_file: "daos:/testFile"
dfs_destroy: false
hdf5_vol:
plugin_path: "/usr/lib64/mpich/lib"
plugin_name: libhdf5_vol_daos.so
events:
- "mce: [Hardware Error]: Machine check events logged"
- "Package temperature above threshold"
Expand Down
2 changes: 1 addition & 1 deletion src/tests/ftest/soak/soak-extra-suse.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
hdf5_vol:
plugin_path: /usr/lib64/mpi/gcc/mpich/lib
plugin_name: libhdf5_vol_daos.so
mpi_module: "gnu-mpich"
2 changes: 1 addition & 1 deletion src/tests/ftest/soak/stress.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -329,7 +329,7 @@ datamover_stress:
test_file: "daos:/testFile"
dfs_destroy: false
hdf5_vol:
plugin_path: "/usr/lib64/mpich/lib"
plugin_name: libhdf5_vol_daos.so
events:
- "mce: [Hardware Error]: Machine check events logged"
- "Package temperature above threshold"
Expand Down
6 changes: 5 additions & 1 deletion src/tests/ftest/util/file_count_test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import os

from avocado.core.exceptions import TestFail
from general_utils import find_library
from ior_test_base import IorTestBase
from mdtest_test_base import MdtestBase
from oclass_utils import extract_redundancy_factor
Expand Down Expand Up @@ -68,7 +69,10 @@ def run_file_count(self):
results = []
dir_oclass = None
apis = self.params.get("api", "/run/largefilecount/*")
hdf5_plugin_path = self.params.get("plugin_path", '/run/hdf5_vol/*')
hdf5_plugin_name = self.params.get("plugin_name", '/run/hdf5_vol/*')
hdf5_plugin_path = find_library(hdf5_plugin_name)
if not hdf5_plugin_path:
self.fail(f"Failed to find {hdf5_plugin_name}")
ior_np = self.params.get("np", '/run/ior/client_processes/*', 1)
ior_ppn = self.params.get("ppn", '/run/ior/client_processes/*', None)
mdtest_np = self.params.get("np", '/run/mdtest/client_processes/*', 1)
Expand Down
28 changes: 28 additions & 0 deletions src/tests/ftest/util/general_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1207,3 +1207,31 @@ def check_ssh(log, hosts, cmd_timeout=60, verbose=True):
"""
result = run_remote(log, hosts, "uname", timeout=cmd_timeout, verbose=verbose)
return result.passed


def find_library(name):
"""Find a library by a given name.
In order of preference, searches in
LD_LIBRARY_PATH
MPI_LIB
/usr/lib
/usr/lib64
Args:
name (str): library name to find
Returns:
str: directory path containing the library. None if not found
"""
paths = []
for env_name in ("LD_LIBRARY_PATH", "MPI_LIB"):
env_val = os.environ.get(env_name, None)
if env_val is not None:
paths.extend(env_val.split(":"))
paths.append(os.path.join(os.sep, "usr", "lib"))
paths.append(os.path.join(os.sep, "usr", "lib64"))
for path in paths:
if os.path.exists(os.path.join(path, name)):
return path
return None
8 changes: 6 additions & 2 deletions src/tests/ftest/util/ior_test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from apricot import TestWithServers
from dfuse_utils import get_dfuse, start_dfuse
from exception_utils import CommandFailure
from general_utils import get_random_string
from general_utils import find_library, get_random_string
from host_utils import get_local_host
from ior_utils import IorCommand
from job_manager_utils import get_job_manager
Expand Down Expand Up @@ -318,7 +318,11 @@ def run_ior_multiple_variants(self, obj_class, apis, transfer_block_size,
flags_w = flags[0]
if api == "HDF5-VOL":
api = "HDF5"
hdf5_plugin_path = self.params.get("plugin_path", '/run/hdf5_vol/*')
hdf5_plugin_name = self.params.get("plugin_name", '/run/hdf5_vol/*')
hdf5_plugin_path = find_library(hdf5_plugin_name)
if not hdf5_plugin_path:
results.append(["FAIL", f"Failed to find {hdf5_plugin_name}"])
continue
flags_w += " -k"
elif api == "POSIX+IL":
api = "POSIX"
Expand Down
12 changes: 9 additions & 3 deletions src/tests/ftest/util/soak_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,9 @@
from duns_utils import format_path
from exception_utils import CommandFailure
from fio_utils import FioCommand
from general_utils import (DaosTestError, check_ping, check_ssh, get_host_data, get_log_file,
get_random_bytes, get_random_string, list_to_str, pcmd, run_command,
run_pcmd, wait_for_result)
from general_utils import (DaosTestError, check_ping, check_ssh, find_library, get_host_data,
get_log_file, get_random_bytes, get_random_string, list_to_str, pcmd,
run_command, run_pcmd, wait_for_result)
from ior_utils import IorCommand
from job_manager_utils import Mpirun
from macsio_util import MacsioCommand
Expand Down Expand Up @@ -949,6 +949,9 @@ def create_ior_cmdline(self, job_spec, pool, ppn, nodesperjob, oclass_list=None,
if not oclass_list:
oclass_list = self.params.get("dfs_oclass", ior_params)
plugin_path = self.params.get("plugin_path", "/run/hdf5_vol/")
plugin_name = self.params.get("plugin_name", "/run/hdf5_vol/")
if plugin_name and not plugin_path:
plugin_path = find_library(plugin_name)
# update IOR cmdline for each additional IOR obj
for api in api_list:
if not self.enable_il and api in ["POSIX-LIBIOIL", "POSIX-LIBPIL4DFS"]:
Expand Down Expand Up @@ -1045,6 +1048,9 @@ def create_macsio_cmdline(self, job_spec, pool, ppn, nodesperjob):
oclass_list = self.params.get("oclass", macsio_params)
api_list = self.params.get("api", macsio_params)
plugin_path = self.params.get("plugin_path", "/run/hdf5_vol/")
plugin_name = self.params.get("plugin_name", "/run/hdf5_vol/")
if plugin_name and not plugin_path:
plugin_path = find_library(plugin_name)
# update macsio cmdline for each additional MACsio obj
for api in api_list:
for file_oclass, dir_oclass in oclass_list:
Expand Down

0 comments on commit 7ee035c

Please sign in to comment.