Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add examples of API performance test written with Locust #3880

Merged
merged 4 commits into from
Jan 17, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions Dockerfile.buildroot
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ RUN microdnf -y module enable nodejs:22 && \

COPY ./requirements/mariadb.pc /usr/lib64/pkgconfig/mariadb.pc

ENV PATH /venv/bin:${PATH} \
VIRTUAL_ENV /venv
ENV PATH=/venv/bin:${PATH} \
VIRTUAL_ENV=/venv

# Create a virtualenv for the application dependencies
RUN python3.11 -m venv /venv
Expand Down
1 change: 1 addition & 0 deletions requirements/devel.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ doc8
isort==5.13.2
colorama
black==24.10.0
locust
parameterized
robotframework
robotframework-seleniumlibrary
Expand Down
127 changes: 127 additions & 0 deletions tests/performance/api_write_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,127 @@
# Copyright (c) 2025 Alexander Todorov <[email protected]>
#
# Licensed under GNU Affero General Public License v3 or later (AGPLv3+)
# https://www.gnu.org/licenses/agpl-3.0.html

from datetime import datetime

from base import LoggedInTestCase
from locust import task
from locust.exception import StopUser


class RecordTestExecutionsTestCase(LoggedInTestCase):
RANGE_SIZE = 100

@task
def record_test_executions(self):
"""
Range size: R
Number of test results: R^2
Number of API requests: 10 + 3R + 2R^2 (incl. on_start)
"""
user = self.json_rpc("User.filter", {})[0]
self.json_rpc(
"Classification.create",
{"name": f"from locust @ {datetime.now().isoformat()}"},
)
classification = self.json_rpc("Classification.filter", {})[0]

product = self.json_rpc(
"Product.create",
{
"name": f"Product created at {datetime.now().isoformat()}",
"classification": classification["id"],
},
)

version = self.json_rpc(
"Version.create",
{
"product": product["id"],
"value": f"ver-{datetime.now().isoformat()}",
},
)

test_plan = self.json_rpc(
"TestPlan.create",
{
"name": f"TP: created at {datetime.now().isoformat()}",
"text": "A script is creating this TP and adds TCs and TRs to it to establish a performance baseline",
"type": 7, # Performance
"product": product["id"],
"product_version": version["id"],
"is_active": True,
},
)

priority = self.json_rpc("Priority.filter", {})[0]
category = self.json_rpc(
"Category.filter",
{
"product": product["id"],
},
)[0]
confirmed_status = self.json_rpc(
"TestCaseStatus.filter", {"is_confirmed": True}
)[0]

pass_status = self.json_rpc("TestExecutionStatus.filter", {"weight__gt": 0})[0]

# create new build for all of these TRs
build = self.json_rpc(
"Build.create",
{
"name": f"b.{datetime.now().isoformat()}",
"description": f"the product build at {datetime.now().isoformat()}",
"version": version["id"],
},
)

# create TestCase(s)
test_cases = []
for _ in range(self.RANGE_SIZE):
test_case = self.json_rpc(
"TestCase.create",
{
"summary": f"Case created at {datetime.now().isoformat()}",
"product": product["id"],
"category": category["id"],
"priority": priority["id"],
"case_status": confirmed_status["id"],
},
)

test_cases.append(test_case)
self.json_rpc("TestPlan.add_case", [test_plan["id"], test_case["id"]])

# create TestRun(s)
for i in range(self.RANGE_SIZE):
test_run = self.json_rpc(
"TestRun.create",
{
"summary": f"TR {i} {datetime.now().isoformat()}",
"manager": user["id"],
"plan": test_plan["id"],
"build": build["id"],
},
)
print(f'TR-{test_run["id"]} created')

# add cases to TR
for case in test_cases:
result = self.json_rpc("TestRun.add_case", [test_run["id"], case["id"]])

# record the results
for execution in result:
self.json_rpc(
"TestExecution.update",
[
execution["id"],
{
"status": pass_status["id"],
},
],
)

raise StopUser()
56 changes: 56 additions & 0 deletions tests/performance/base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
# Copyright (c) 2025 Alexander Todorov <[email protected]>
#
# Licensed under GNU Affero General Public License v3 or later (AGPLv3+)
# https://www.gnu.org/licenses/agpl-3.0.html

from locust import FastHttpUser, between, task
from requests.utils import dict_from_cookiejar


class LoggedInTestCase(FastHttpUser):
abstract = True

username = "testadmin"
password = "password"
login_url = "/accounts/login/"

def on_start(self):
with self.client.get(self.login_url, catch_response=True):
cookies = dict_from_cookiejar(self.client.cookiejar)
csrf_middleware_token = cookies["csrftoken"]

self.client.post(
self.login_url,
data={
"username": self.username,
"password": self.password,
"csrfmiddlewaretoken": csrf_middleware_token,
},
headers={"Referer": self.host},
)

def json_rpc(self, rpc_method, rpc_args):
# .filter() args are passed as dictionary but other args,
# e.g. for .add_tag() are passed as a list of positional values
if not isinstance(rpc_args, list):
rpc_args = [rpc_args]

payload = {
"jsonrpc": "2.0",
"method": rpc_method,
"params": rpc_args,
"id": "jsonrpc",
}
return self.client.post("/json-rpc/", json=payload).json()["result"]


class ExampleTestCase(LoggedInTestCase):
wait_time = between(1, 5)

@task
def rpc_user_filter(self):
self.json_rpc("User.filter", {})

@task
def visit_dashboard_page(self):
self.client.get("/")
6 changes: 6 additions & 0 deletions tests/test_http.sh
Original file line number Diff line number Diff line change
Expand Up @@ -187,6 +187,12 @@ _EOF_
rlAssertGreaterOrEqual ">= 50 r/s" "$COMPLETED_REQUESTS" 500
rlPhaseEnd

rlPhaseStartTest "Sanity execute Locust files"
# this is designed to check that these files don't crash,
rlRun -t -c "locust --headless --users 1 --spawn-rate 1 --run-time 5s -H https://localhost/ --locustfile tests/performance/base.py"
rlRun -t -c "locust --headless --users 1 --spawn-rate 1 --run-time 5s -H https://localhost/ --locustfile tests/performance/api_write_test.py"
rlPhaseEnd

rlPhaseStartCleanup
rlRun -t -c "docker compose logs --no-color > test_http_docker.log"
rlRun -t -c "docker compose down"
Expand Down
Loading