Skip to content

Commit

Permalink
Work on system-test
Browse files Browse the repository at this point in the history
  • Loading branch information
marcelldls committed Jan 16, 2025
1 parent 2e19e3e commit 2f17962
Show file tree
Hide file tree
Showing 5 changed files with 162 additions and 71 deletions.
1 change: 1 addition & 0 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
"env": {
// Enable break on exception when debugging tests (see: tests/conftest.py)
"PYTEST_RAISE": "1",
"FASTCS_SYSTEM_TEST": "true"
},
}
]
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ reportMissingImports = false # Ignore missing stubs in imported modules
[tool.pytest.ini_options]
# Run pytest with all our checkers, and don't spam us with massive tracebacks on error
addopts = """
--tb=native -vv --doctest-modules --doctest-glob="*.rst" --benchmark-autosave --benchmark-columns="min, max, mean, outliers, ops, rounds"
--tb=native -vv --doctest-modules --doctest-glob="*.rst" --benchmark-sort=mean --benchmark-autosave --benchmark-columns="mean, min, max, outliers, ops, rounds"
"""
# https://iscinumpy.gitlab.io/post/bound-version-constraints/#watch-for-warnings
filterwarnings = "error"
Expand Down
33 changes: 33 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import copy
import multiprocessing
import os
import random
import signal
import string
import subprocess
import time
Expand All @@ -12,9 +14,12 @@
from aioca import purge_channel_caches
from pytest_mock import MockerFixture

from fastcs import FastCS
from fastcs.attributes import AttrR, AttrRW, AttrW, Handler, Sender, Updater
from fastcs.controller import Controller, SubController
from fastcs.datatypes import Bool, Float, Int, String
from fastcs.transport.epics.options import EpicsIOCOptions, EpicsOptions
from fastcs.transport.rest.options import RestOptions, RestServerOptions
from fastcs.wrappers import command, scan

DATA_PATH = Path(__file__).parent / "data"
Expand Down Expand Up @@ -192,3 +197,31 @@ def ioc():
except ValueError:
# Someone else already called communicate
pass


@pytest.fixture(scope="session")
def system_test():
process = subprocess.Popen(
["python", HERE / "test.py"],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True,
)

start_time = time.monotonic()
while "Uvicorn running" not in (
process.stdout.readline().strip() # type: ignore
):
if time.monotonic() - start_time > 10:
raise TimeoutError("Controller did not start in time")

# Stop buffer from getting full and blocking the subprocess
for f in [process.stdin, process.stdout, process.stderr]:
if f:
f.close()

yield process

process.send_signal(signal.SIGINT)
process.wait(5)
22 changes: 22 additions & 0 deletions tests/controller.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
from fastcs import FastCS
from fastcs.attributes import AttrR, AttrW
from fastcs.controller import Controller
from fastcs.datatypes import Bool, Int
from fastcs.transport.epics.options import EpicsIOCOptions, EpicsOptions
from fastcs.transport.rest.options import RestOptions, RestServerOptions


class TestController(Controller):
read_int: AttrR = AttrR(Int(), initial_value=0)
write_bool: AttrW = AttrW(Bool())


transport_options = [
RestOptions(rest=RestServerOptions(port=8090)),
EpicsOptions(ioc=EpicsIOCOptions(pv_prefix="SYSTEM-DEVICE")),
]
instance = FastCS(
TestController(),
transport_options,
)
instance.run()
175 changes: 105 additions & 70 deletions tests/test_system.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,125 +7,160 @@
import tango
from p4p.client.thread import Context

SKIP_REST = False
REST_CLIENTS = 3
SKIP_CA = False
CA_CLIENTS = 3
SKIP_TANGO = False
TANGO_CLIENTS = 3
os.environ["FASTCS_SYSTEM_TEST"] = "true"
REST_CLIENTS = 9
FASTCS_SYSTEM_TEST = os.getenv("FASTCS_SYSTEM_TEST") == "true"

os.environ["FASTCS_PERFORMANCE"] = "true"
os.environ["TANGO_HOST"] = "localhost:10000"
FASTCS_PERFORMANCE = os.getenv("FASTCS_PERFORMANCE") == "true"


def to_do(event, url):
def bg_rest(event, url):
"""Workload for a rest subprocess."""
while not event.is_set():
requests.get(url)


def bg_pass(event, url):
"""Workload for a rest subprocess."""
while not event.is_set():
pass


@contextlib.contextmanager
def start_background_traffic(rest_target):
"""Context manager to handle the lifecycle of a process."""
stop_event = multiprocessing.Event()
process = multiprocessing.Process(target=to_do, args=(stop_event, rest_target))
process.start()
processes = [
multiprocessing.Process(
target=bg_rest if rest_target else bg_pass,
args=(stop_event, rest_target),
)
for _ in range(REST_CLIENTS)
]
for process in processes:
process.start()

try:
yield process
yield processes
finally:
stop_event.set() # Signal the process to stop
process.join() # Wait for the process to finish
# Signal the processes to stop
stop_event.set()
# Wait for the processes to finish
for process in processes:
process.join()


@pytest.mark.skipif(not FASTCS_PERFORMANCE, reason="Hardware dependant")
@pytest.mark.skipif(SKIP_REST, reason="Skip rest")
@pytest.mark.benchmark(
group="test-rest",
)
def test_rest_get(benchmark):
@pytest.mark.skipif(not FASTCS_SYSTEM_TEST, reason="FastCS System Testing")
@pytest.mark.benchmark(group="test-rest")
def test_rest_get(benchmark, system_test):
def to_do():
requests.get("http://localhost:8080/readback-position")
requests.get("http://localhost:8090/read-int")

benchmark(to_do)


@pytest.mark.skipif(not FASTCS_PERFORMANCE, reason="Hardware dependant")
@pytest.mark.skipif(SKIP_REST, reason="Skip rest")
@pytest.mark.benchmark(
group="test-rest",
)
def test_rest_get_loaded(benchmark):
@pytest.mark.skipif(not FASTCS_SYSTEM_TEST, reason="FastCS System Testing")
@pytest.mark.benchmark(group="test-rest")
def test_rest_get_loaded_request(benchmark, system_test):
def to_do():
requests.get("http://localhost:8090/read-int")

with start_background_traffic(
"http://localhost:8090/read-int",
):
benchmark(to_do)


@pytest.mark.skipif(not FASTCS_SYSTEM_TEST, reason="FastCS System Testing")
@pytest.mark.benchmark(group="test-rest")
def test_rest_get_loaded_baseline(benchmark, system_test):
def to_do():
requests.get("http://localhost:8080/readback-position")
requests.get("http://localhost:8090/read-int")

with start_background_traffic("http://localhost:8080/readback-position"):
with start_background_traffic(
None,
):
benchmark(to_do)


@pytest.mark.skipif(not FASTCS_PERFORMANCE, reason="Hardware dependant")
@pytest.mark.skipif(SKIP_REST, reason="Skip rest")
@pytest.mark.benchmark(
group="test-rest",
)
def test_rest_put(benchmark):
@pytest.mark.skipif(not FASTCS_SYSTEM_TEST, reason="FastCS System Testing")
@pytest.mark.benchmark(group="test-rest")
def test_rest_put(benchmark, system_test):
def to_do():
requests.put("http://localhost:8080/desired-position", json={"value": "false"})
requests.put("http://localhost:8090/write-bool", json={"value": "false"})

benchmark(to_do)


@pytest.mark.skipif(not FASTCS_PERFORMANCE, reason="Hardware dependant")
@pytest.mark.skipif(SKIP_CA, reason="Skip CA")
@pytest.mark.benchmark(
group="test-ca",
)
def test_ca_get(benchmark):
@pytest.mark.skipif(not FASTCS_SYSTEM_TEST, reason="FastCS System Testing")
@pytest.mark.benchmark(group="test-ca")
def test_ca_get(benchmark, system_test):
ctx = Context("pva")

def to_do():
ctx.get("MY-DEVICE-PREFIX:ReadbackPosition")
ctx.get("SYSTEM-DEVICE:ReadInt")

benchmark(to_do)


@pytest.mark.skipif(not FASTCS_PERFORMANCE, reason="Hardware dependant")
@pytest.mark.skipif(SKIP_CA, reason="Skip CA")
@pytest.mark.benchmark(
group="test-ca",
)
def test_ca_put(benchmark):
@pytest.mark.skipif(not FASTCS_SYSTEM_TEST, reason="FastCS System Testing")
@pytest.mark.benchmark(group="test-ca")
def test_ca_get_loaded_request(benchmark, system_test):
ctx = Context("pva")

def to_do():
ctx.put("MY-DEVICE-PREFIX:DesiredPosition", 0)
ctx.get("SYSTEM-DEVICE:ReadInt")

benchmark(to_do)
with start_background_traffic(
"http://localhost:8090/read-int",
):
benchmark(to_do)


@pytest.mark.skipif(not FASTCS_PERFORMANCE, reason="Hardware dependant")
@pytest.mark.skipif(SKIP_TANGO, reason="Skip Tango")
@pytest.mark.benchmark(
group="test-tango",
)
def test_tango_get(benchmark):
device = tango.DeviceProxy("MY/DEVICE/NAME")
@pytest.mark.skipif(not FASTCS_SYSTEM_TEST, reason="FastCS System Testing")
@pytest.mark.benchmark(group="test-ca")
def test_ca_get_loaded_baseline(benchmark, system_test):
ctx = Context("pva")

def to_do():
device.read_attribute("ReadbackPosition")
ctx.get("SYSTEM-DEVICE:ReadInt")

benchmark(to_do)
with start_background_traffic(None):
benchmark(to_do)


@pytest.mark.skipif(not FASTCS_PERFORMANCE, reason="Hardware dependant")
@pytest.mark.skipif(SKIP_TANGO, reason="Skip Tango")
@pytest.mark.benchmark(
group="test-tango",
)
def test_tango_put(benchmark):
device = tango.DeviceProxy("MY/DEVICE/NAME")
@pytest.mark.skipif(not FASTCS_SYSTEM_TEST, reason="FastCS System Testing")
@pytest.mark.benchmark(group="test-ca")
def test_ca_put(benchmark, system_test):
ctx = Context("pva")

def to_do():
device.write_attribute("DesiredPosition", 0)
ctx.put("SYSTEM-DEVICE:WriteBool", 0)

benchmark(to_do)


# @pytest.mark.skipif(not FASTCS_SYSTEM_TEST, reason="FastCS System Testing")
# @pytest.mark.skipif(True, reason="Need to setup Tango env")
# @pytest.mark.benchmark(
# group="test-tango",
# )
# def test_tango_get(benchmark):
# device = tango.DeviceProxy("MY/DEVICE/NAME")

# def to_do():
# device.read_attribute("ReadbackPosition")

# benchmark(to_do)


# @pytest.mark.skipif(not FASTCS_SYSTEM_TEST, reason="FastCS System Testing")
# @pytest.mark.skipif(True, reason="Need to setup Tango env")
# @pytest.mark.benchmark(
# group="test-tango",
# )
# def test_tango_put(benchmark):
# device = tango.DeviceProxy("MY/DEVICE/NAME")

# def to_do():
# device.write_attribute("DesiredPosition", 0)

# benchmark(to_do)

0 comments on commit 2f17962

Please sign in to comment.