Skip to content

Commit

Permalink
Merge branch 'master' of https://github.com/ansys/pydpf-core into jp_…
Browse files Browse the repository at this point in the history
…documentation_edit
  • Loading branch information
JennaPaikowsky committed Jan 8, 2025
2 parents 8487312 + 3883355 commit 0f4a510
Show file tree
Hide file tree
Showing 6 changed files with 136 additions and 5 deletions.
3 changes: 3 additions & 0 deletions .ci/build_wheel.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@
"win": "win_amd64",
"manylinux1": "manylinux1_x86_64",
"manylinux_2_17": "manylinux_2_17_x86_64",
"linux": "manylinux_2_17_x86_64", # Accommodate tox.ini platform substitutions
"win32": "win_amd64",
"darwin": "any",
}

argParser = argparse.ArgumentParser()
Expand Down
2 changes: 1 addition & 1 deletion requirements/requirements_docs.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
ansys-sphinx-theme[autoapi]==1.2.3
ansys-sphinx-theme[autoapi]==1.2.4
enum-tools[sphinx]==0.12.0
graphviz==0.20.1
imageio==2.36.0
Expand Down
2 changes: 1 addition & 1 deletion requirements/requirements_install.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
importlib-metadata==8.5.0
numpy==2.1.3
packaging==24.2
psutil==6.1.0
psutil==6.1.1
tqdm==4.67.1
10 changes: 7 additions & 3 deletions src/ansys/dpf/core/dpf_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -829,8 +829,10 @@ def _find_outputs_corresponding_pins(self, type_names, inpt, pin, corresponding_
if python_name == "B":
python_name = "bool"

# Type match
if type(inpt).__name__ == python_name:
corresponding_pins.append(pin)
# if the inpt has multiple potential outputs, find which ones can match
elif isinstance(inpt, (_Outputs, Operator, Result)):
if isinstance(inpt, Operator):
output_pin_available = inpt.outputs._get_given_output([python_name])
Expand All @@ -840,12 +842,14 @@ def _find_outputs_corresponding_pins(self, type_names, inpt, pin, corresponding_
output_pin_available = inpt._get_given_output([python_name])
for outputpin in output_pin_available:
corresponding_pins.append((pin, outputpin))
# If any output type matches python_name
elif isinstance(inpt, Output):
for inpttype in inpt._python_expected_types:
if inpttype == python_name:
corresponding_pins.append(pin)
if python_name == "Any":
corresponding_pins.append(pin)
else:
for inpttype in inpt._python_expected_types:
if inpttype == python_name:
corresponding_pins.append(pin)
elif python_name == "Any":
corresponding_pins.append(pin)

Expand Down
6 changes: 6 additions & 0 deletions tests/test_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -1496,3 +1496,9 @@ def test_operator_id(server_type):
assert op.id not in ids

ids.add(op.id)


def test_operator_find_outputs_corresponding_pins_any(server_type):
f1 = ops.utility.forward()
f2 = ops.utility.forward()
f2.inputs.any.connect(f1.outputs.any)
118 changes: 118 additions & 0 deletions tox.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
# This is work in progress, testing workflow in local/CI is gradually being transferred to tox

# Usage instructions:
# `tox` will run all tests sequentially, `tox --parallel` will run all tests in parallel (much faster).
# Run specific selection of tests with `tox -e pretest,<list-of-tests>,posttest` e.g., `tox -e pretest,test-api,test-launcher,posttest`
# `--parallel` flag can be passed when running specific selections.

[tox]
description = Default tox environment list and core configurations

# List all tests to run in parallel or sequential mode here
# So invocation can be specified as `tox`/`tox --parallel` to run all tests in sequential/parallel mode
envlist = pretest,test-{api,launcher,server,local_server,multi_server,remote_workflow,remote_operator,workflow,service,operators},posttest

isolated_build_env = build

[testenv]
description = Default configuration for test environments, unless overridden

pass_env =
PACKAGE_NAME
MODULE
ANSYS_DPF_ACCEPT_LA
ANSYSLMD_LICENSE_FILE
AWP_ROOT242

package = external # To allow custom wheel builds

[testenv:build_external]
description = Environment for custom build of package wheels, solves PyDPF custom wheel building requirement

package_glob = {toxinidir}{/}dist{/}ansys_dpf_core*

# {on_platform} substitution to automatically detect os type.
commands =
python .ci/build_wheel.py -p {on_platform} -w

[testenv:pretest]
description = Environment to kill servers and organize test files prior to testing

deps =
psutil

skip_install = True

commands =
# Clear any running servers that may be locking resources
python -c "import psutil; proc_name = 'Ans.Dpf.Grpc'; nb_procs = len([proc.kill() for proc in psutil.process_iter() if proc_name in proc.name()]); \
print(f'Killed \{nb_procs} \{proc_name} processes.')"

# Organize test files
python -c "\
import os, shutil; \
test_data=['test_launcher','test_server','test_local_server','test_multi_server','test_workflow','test_remote_workflow','test_remote_operator','test_service','test_custom_type_field']; \
[(os.makedirs(d, exist_ok=True), shutil.copy('tests/conftest.py', d), shutil.copy(f'tests/\{d}.py', d) if os.path.exists(f'tests/\{d}.py') else None) for d in test_data]; \
[os.remove(f'tests/\{d}.py') for d in test_data if os.path.exists(f'tests/\{d}.py')]"

[testenv:posttest]
description = Environment to kill servers and revert test files to original state after testing

depends = pretest, test-{api,launcher,server,local_server,multi_server,remote_workflow,remote_operator,workflow,service,operators}

deps =
psutil

skip_install = True

commands =
# Revert project layout to previous state
python -c "\
import os, shutil; \
test_data=['test_launcher','test_server','test_local_server','test_multi_server','test_workflow','test_remote_workflow','test_remote_operator','test_service', 'test_custom_type_field']; \
[shutil.move(f'\{d}/\{d}.py', f'tests/\{d}.py') for d in test_data if os.path.exists(f'\{d}/\{d}.py')]; \
[shutil.rmtree(d) for d in test_data if os.path.exists(d)]"

# Clear any running servers that may be locking resources
python -c "import psutil; proc_name = 'Ans.Dpf.Grpc'; nb_procs = len([proc.kill() for proc in psutil.process_iter() if proc_name in proc.name()]); \
print(f'Killed \{nb_procs} \{proc_name} processes.')"

[testenv:test-{api,launcher,server,local_server,multi_server,remote_workflow,remote_operator,workflow,service,operators}]
description = Environment where project testing configuration is defined

depends = pretest

setenv =
# Pytest extra arguments
COVERAGE = --cov=ansys.dpf.core --cov-report=xml --cov-report=html --log-level=ERROR --cov-append
RERUNS = --reruns=2 --reruns-delay=1
DEBUG = -v -s --durations=10 --durations-min=1.0

api: JUNITXML = --junitxml=tests/junit/test-results.xml
launcher: JUNITXML = --junitxml=tests/junit/test-results2.xml
server: JUNITXML = --junitxml=tests/junit/test-results3.xml
local_server: JUNITXML = --junitxml=tests/junit/test-results4.xml
multi_server: JUNITXML = --junitxml=tests/junit/test-results5.xml
remote_workflow: JUNITXML = --junitxml=tests/junit/test-results6.xml
remote_operator: JUNITXML = --junitxml=tests/junit/test-results7.xml
workflow: JUNITXML = --junitxml=tests/junit/test-results8.xml
service: JUNITXML = --junitxml=tests/junit/test-results9.xml
operators: JUNITXML = --junitxml=../tests/junit/test-results12.xml

# Tests sets
api: PYTEST_PYTHON_FILES = tests
launcher: PYTEST_PYTHON_FILES = test_launcher
server: PYTEST_PYTHON_FILES = test_server
local_server: PYTEST_PYTHON_FILES = test_local_server
multi_server: PYTEST_PYTHON_FILES = test_multi_server
remote_workflow: PYTEST_PYTHON_FILES = test_remote_workflow
remote_operator: PYTEST_PYTHON_FILES = test_remote_operator
workflow: PYTEST_PYTHON_FILES = test_workflow
service: PYTEST_PYTHON_FILES = test_service
operators: PYTEST_PYTHON_FILES = tests/operators

deps =
-r requirements/requirements_test.txt

commands =
pytest {env:PYTEST_PYTHON_FILES} {env:DEBUG} {env:COVERAGE} {env:RERUNS} {env:JUNITXML}

0 comments on commit 0f4a510

Please sign in to comment.