Skip to content

Commit

Permalink
Merged changes from main.
Browse files Browse the repository at this point in the history
  • Loading branch information
prmukherj committed Nov 27, 2024
2 parents 9ddf638 + 8396888 commit 7e0f8c9
Show file tree
Hide file tree
Showing 20 changed files with 670 additions and 397 deletions.
82 changes: 54 additions & 28 deletions .ci/fluent_test_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,16 @@ class FluentRuntimeError(RuntimeError):
pass


def run_fluent_test(journal_file: Path, launcher_args: str = "") -> None:
def run_fluent_test(
src_test_dir: Path, journal_file: Path, launcher_args: str = ""
) -> None:
"""Run Fluent test.
Parameters
----------
src_test_dir : Path
Path to the Fluent test directory in the host.
journal_file : Path
Absolute path to the journal file.
Expand All @@ -41,12 +46,27 @@ def run_fluent_test(journal_file: Path, launcher_args: str = "") -> None:
src_pyfluent_dir = str(Path(pyfluent.__file__).parent)
verion_for_file_name = FluentVersion.current_dev().number
dst_pyfluent_dir = f"/ansys_inc/v{verion_for_file_name}/commonfiles/CPython/3_10/linx64/Release/python/lib/python3.10/site-packages/ansys/fluent/core"
src_test_dir = str(journal_file.parent)
src_gen_dir = (
Path(pyfluent.__file__).parent / "ansys" / "fluent" / "core" / "generated"
)
dst_gen_dir = f"/ansys_inc/v{verion_for_file_name}/fluent/fluent{FluentVersion.current_dev()!r}/cortex/pylib/flapi/generated"
dst_test_dir = "/testing"
working_dir = Path(dst_test_dir)
parent = journal_file.parent
parents = []
while parent != src_test_dir:
parents.append(parent.name)
parent = parent.parent
parents.reverse()
for parent in parents:
working_dir /= parent
working_dir = str(working_dir)
src_test_dir = str(src_test_dir)
logging.debug(f"src_pyfluent_dir: {src_pyfluent_dir}")
logging.debug(f"dst_pyfluent_dir: {dst_pyfluent_dir}")
logging.debug(f"src_test_dir: {src_test_dir}")
logging.debug(f"dst_test_dir: {dst_test_dir}")
logging.debug(f"working_dir: {working_dir}")

docker_client = docker.from_env()
version_for_image_tag = FluentVersion.current_dev().docker_image_tag
Expand All @@ -55,34 +75,39 @@ def run_fluent_test(journal_file: Path, launcher_args: str = "") -> None:
image=image_name,
volumes=[
f"{src_pyfluent_dir}:{dst_pyfluent_dir}",
f"{src_gen_dir}:{dst_gen_dir}", # Try removing this after pyfluent is updated in commonfiles
f"{src_test_dir}:{dst_test_dir}",
],
working_dir=dst_test_dir,
working_dir=working_dir,
environment={"ANSYSLMD_LICENSE_FILE": os.environ["ANSYSLMD_LICENSE_FILE"]},
command=f"3ddp {launcher_args} -gu -py -i {journal_file.name}",
command=f"{launcher_args} -gu -py -i {journal_file.name}",
detach=True,
stdout=True,
stderr=True,
auto_remove=True,
)
while True:
container.reload()
if container.status == "exited":
break
stderr = container.logs(stdout=False, stderr=True)
if stderr:
stderr = stderr.decode()
for line in stderr.split("\n"):
if line.strip().startswith("Error:"):
if "Expected exception" in line: # for check_assert.py
container.stop()
else:
raise FluentRuntimeError(line)
sleep(1)
logging.debug(container.logs(stderr=True).decode())
container.remove()


MAX_TEST_PATH_LENGTH = 40
try:
while True:
container.reload()
if container.status == "exited":
break
stderr = container.logs(stdout=False, stderr=True)
if stderr:
stderr = stderr.decode()
for line in stderr.split("\n"):
if line.strip().startswith("Error:"):
if "Expected exception" in line: # for check_assert.py
container.stop()
else:
raise FluentRuntimeError(line)
sleep(1)
logging.debug(container.logs(stderr=True).decode())
container.remove()
except docker.errors.NotFound:
pass


MAX_TEST_PATH_LENGTH = 100


if __name__ == "__main__":
Expand All @@ -93,19 +118,20 @@ def run_fluent_test(journal_file: Path, launcher_args: str = "") -> None:
)
args = parser.parse_args()
test_dir = Path.cwd() / args.test_dir
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
copytree(test_dir, tmpdir, dirs_exist_ok=True)
with TemporaryDirectory(ignore_cleanup_errors=True) as src_test_dir:
copytree(test_dir, src_test_dir, dirs_exist_ok=True)
exception_occurred = False
for test_file in Path(tmpdir).rglob("*.py"):
src_test_dir = Path(src_test_dir)
for test_file in (src_test_dir / "fluent").rglob("*.py"):
config_file = test_file.with_suffix(".yaml")
launcher_args = ""
if config_file.exists():
configs = yaml.safe_load(config_file.read_text())
launcher_args = configs.get("launcher_args", "")
test_file_relpath = str(test_file.relative_to(tmpdir))
test_file_relpath = str(test_file.relative_to(src_test_dir))
print(f"Running {test_file_relpath}", end="", flush=True)
try:
run_fluent_test(test_file, launcher_args)
run_fluent_test(src_test_dir, test_file, launcher_args)
print(
f"{(MAX_TEST_PATH_LENGTH + 10 - len(test_file_relpath)) * '·'}PASSED"
)
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/test-fluent-journals.yml
Original file line number Diff line number Diff line change
Expand Up @@ -82,4 +82,4 @@ jobs:
- name: Run Fluent tests
run: |
python .ci/fluent_test_runner.py tests/fluent
make write-and-run-fluent-tests
10 changes: 8 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ docker-pull:
test-import:
@python -c "import ansys.fluent.core as pyfluent"

PYTESTEXTRA = --cache-clear --cov=ansys.fluent --cov-report=xml:cov_xml.xml --cov-report=html
PYTESTRERUN = --last-failed --last-failed-no-failures none
PYTESTEXTRA = --cache-clear --cov=ansys.fluent --cov-report=xml:cov_xml.xml --cov-report=html -n 4
PYTESTRERUN = --last-failed --last-failed-no-failures none -n 4

unittest: unittest-dev-242

Expand Down Expand Up @@ -179,3 +179,9 @@ cleanup-previous-docker-containers:
docker stop $(docker ps -a -q); \
fi
@if [ -n "$(docker ps -a -q)" ]; then docker rm -vf $(docker ps -a -q); fi

write-and-run-fluent-tests:
@pip install -r requirements/requirements_build.txt
@poetry install --with test -E reader
@poetry run python -m pytest --write-fluent-journals
@python .ci/fluent_test_runner.py tests
Loading

0 comments on commit 7e0f8c9

Please sign in to comment.