Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add benchmark tests #973

Open
wants to merge 3 commits into
base: rolling
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions rclpy/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,11 @@ if(BUILD_TESTING)
if(NOT _typesupport_impls STREQUAL "")
# Run each test in its own pytest invocation to isolate any global state in rclpy
set(_rclpy_pytest_tests
test/benchmark/test_benchmark_actions.py
test/benchmark/test_benchmark_client_service.py
test/benchmark/test_benchmark_guard_condition.py
test/benchmark/test_benchmark_pub_sub.py
test/benchmark/test_benchmark_timer.py
test/test_action_client.py
test/test_action_graph.py
test/test_action_server.py
Expand Down Expand Up @@ -198,8 +203,19 @@ if(BUILD_TESTING)
)

foreach(_test_path ${_rclpy_pytest_tests})
if(NOT AMENT_RUN_PERFORMANCE_TESTS)
get_filename_component(_test_dir ${_test_path} DIRECTORY)
get_filename_component(_test_dir ${_test_dir} NAME)
if(_test_dir STREQUAL "benchmark")
# Skip benchmark tests by default
set(_SKIP_TEST "SKIP_TEST")
else()
set(_SKIP_TEST "")
endif()
endif()
get_filename_component(_test_name ${_test_path} NAME_WE)
ament_add_pytest_test(${_test_name} ${_test_path}
${_SKIP_TEST}
APPEND_ENV AMENT_PREFIX_PATH=${ament_index_build_path}
PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}
TIMEOUT 120
Expand Down
1 change: 1 addition & 0 deletions rclpy/package.xml
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
<test_depend>ament_lint_auto</test_depend>
<test_depend>ament_lint_common</test_depend>
<test_depend>python3-pytest</test_depend>
<test_depend>python3-pytest-benchmark</test_depend>
<test_depend>rosidl_generator_py</test_depend>
<test_depend>test_msgs</test_depend>

Expand Down
76 changes: 76 additions & 0 deletions rclpy/test/benchmark/test_benchmark_actions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
# Copyright 2022 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from action_msgs.msg import GoalStatus
import rclpy
from rclpy.action import ActionClient, ActionServer
from rclpy.executors import SingleThreadedExecutor
from rclpy.qos import qos_profile_services_default, ReliabilityPolicy
from test_msgs.action import Fibonacci


ONE_HUNDRED = 100


def test_one_hundred_goals(benchmark):
context = rclpy.context.Context()
rclpy.init(context=context)
try:
node = rclpy.create_node('benchmark_actions', context=context)
qos = qos_profile_services_default
qos.reliability = ReliabilityPolicy.RELIABLE

action_client = ActionClient(
node,
Fibonacci,
'benchmark',
goal_service_qos_profile=qos,
result_service_qos_profile=qos)

def execute_cb(goal_handle):
goal_handle.succeed()
return Fibonacci.Result()

action_server = ActionServer(
node,
Fibonacci,
'benchmark',
execute_cb,
goal_service_qos_profile=qos,
result_service_qos_profile=qos)

executor = SingleThreadedExecutor(context=context)
executor.add_node(node)

# Wait for client/server to discover each other
assert action_client.wait_for_server(timeout_sec=5.0)

def bm():
for _ in range(ONE_HUNDRED):
goal_fut = action_client.send_goal_async(Fibonacci.Goal())
executor.spin_until_future_complete(goal_fut)
client_goal_handle = goal_fut.result()
assert client_goal_handle.accepted
result_fut = client_goal_handle.get_result_async()
executor.spin_until_future_complete(result_fut)
assert GoalStatus.STATUS_SUCCEEDED == result_fut.result().status

benchmark(bm)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In the other tests we are asserting the result of benchmark, should we do that here?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Looking at the assertions now I don't think they were adding value. I removed them in 76fd188


executor.shutdown()
action_client.destroy()
action_server.destroy()
node.destroy_node()
finally:
rclpy.shutdown(context=context)
57 changes: 57 additions & 0 deletions rclpy/test/benchmark/test_benchmark_client_service.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
# Copyright 2022 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import rclpy
from rclpy.executors import SingleThreadedExecutor
from rclpy.qos import qos_profile_services_default, ReliabilityPolicy
from test_msgs.srv import Empty as EmptySrv


ONE_THOUSAND = 1000


def test_one_thousand_service_calls(benchmark):
context = rclpy.context.Context()
rclpy.init(context=context)
try:
node = rclpy.create_node('benchmark_client_service', context=context)
qos = qos_profile_services_default
qos.reliability = ReliabilityPolicy.RELIABLE
client = node.create_client(EmptySrv, 'empty', qos_profile=qos)

def cb(_, response):
return response

service = node.create_service(EmptySrv, 'empty', cb, qos_profile=qos)

executor = SingleThreadedExecutor(context=context)
executor.add_node(node)

# Wait for client/service to discover each other
assert client.wait_for_service(timeout_sec=5.0)

def bm():
for _ in range(ONE_THOUSAND):
fut = client.call_async(EmptySrv.Request())
executor.spin_until_future_complete(fut)
assert fut.result() # Raises if there was an error

benchmark(bm)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Same here, should we assert?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Removed assertions in 76fd188


executor.shutdown()
node.destroy_client(client)
node.destroy_service(service)
node.destroy_node()
finally:
rclpy.shutdown(context=context)
86 changes: 86 additions & 0 deletions rclpy/test/benchmark/test_benchmark_guard_condition.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
# Copyright 2022 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import rclpy
from rclpy.executors import SingleThreadedExecutor


ONE_THOUSAND = 1000


def test_one_thousand_guard_callbacks(benchmark):
context = rclpy.context.Context()
rclpy.init(context=context)
try:
node = rclpy.create_node('benchmark_many_gc_calls', context=context)
num_calls = 0

def cb():
nonlocal num_calls
num_calls += 1

gc = node.create_guard_condition(cb)

executor = SingleThreadedExecutor(context=context)
executor.add_node(node)

def bm():
nonlocal num_calls
# Reset for each benchmark run
num_calls = 0
while num_calls < ONE_THOUSAND:
gc.trigger()
executor.spin_once(timeout_sec=0)

benchmark(bm)

executor.shutdown()
node.destroy_guard_condition(gc)
node.destroy_node()
finally:
rclpy.shutdown(context=context)


def test_one_thousand_guard_coroutine_callbacks(benchmark):
context = rclpy.context.Context()
rclpy.init(context=context)
try:
node = rclpy.create_node('benchmark_many_gc_calls', context=context)
num_calls = 0

async def coro():
nonlocal num_calls
num_calls += 1

gc = node.create_guard_condition(coro)

executor = SingleThreadedExecutor(context=context)
executor.add_node(node)

def bm():
nonlocal num_calls
# Reset for each benchmark run
num_calls = 0
while num_calls < ONE_THOUSAND:
gc.trigger()
executor.spin_once(timeout_sec=0)
return num_calls == ONE_THOUSAND

assert benchmark(bm)

executor.shutdown()
node.destroy_guard_condition(gc)
node.destroy_node()
finally:
rclpy.shutdown(context=context)
64 changes: 64 additions & 0 deletions rclpy/test/benchmark/test_benchmark_pub_sub.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
# Copyright 2022 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import rclpy
from rclpy.executors import SingleThreadedExecutor
from rclpy.qos import QoSProfile, ReliabilityPolicy
from test_msgs.msg import Empty as EmptyMsg


ONE_THOUSAND = 1000


def test_one_thousand_messages(benchmark):
context = rclpy.context.Context()
rclpy.init(context=context)
try:
node = rclpy.create_node('benchmark_pub_sub', context=context)
qos = QoSProfile(depth=1, reliability=ReliabilityPolicy.RELIABLE)
pub = node.create_publisher(EmptyMsg, 'empty', qos)
num_calls = 0

def cb(_):
nonlocal num_calls
num_calls += 1
# Send next message
pub.publish(EmptyMsg())

sub = node.create_subscription(EmptyMsg, 'empty', cb, qos)

executor = SingleThreadedExecutor(context=context)
executor.add_node(node)

# Wait for pub/sub to discover each other
while not pub.get_subscription_count():
executor.spin_once(timeout_sec=0.01)

def bm():
nonlocal num_calls
# Reset for each benchmark run
num_calls = 0
# Prime the loop with a message
pub.publish(EmptyMsg())
while num_calls < ONE_THOUSAND:
executor.spin_once(timeout_sec=0)

benchmark(bm)

executor.shutdown()
node.destroy_publisher(pub)
node.destroy_subscription(sub)
node.destroy_node()
finally:
rclpy.shutdown(context=context)
Loading