From dc7ccd6d9e3bc34179c9d6e3e81995be9b952c65 Mon Sep 17 00:00:00 2001 From: Jordi Pique Date: Sat, 11 May 2024 19:51:22 +0100 Subject: [PATCH] Refactor the jsonpatch tests The jsonpatch tests now follow a similar format as the conditions tests, where we define them in a declarative way in a yaml format. Moreover, we define tests for specific schema versions. This allows us to keep running old tests for old schemas and for schemas with backward compatibility and introduce new tests that are only executed for the new schemas. --- tests/conditions_test.py | 12 +-- tests/jsonpatch_helpers_test.py | 167 -------------------------------- tests/jsonpatch_test.py | 45 +++++++++ tests/jsonpatch_test.yaml | 128 ++++++++++++++++++++++++ tests/test_utils.py | 9 ++ 5 files changed, 184 insertions(+), 177 deletions(-) delete mode 100644 tests/jsonpatch_helpers_test.py create mode 100644 tests/jsonpatch_test.py create mode 100644 tests/jsonpatch_test.yaml diff --git a/tests/conditions_test.py b/tests/conditions_test.py index 498ff04..5eda158 100644 --- a/tests/conditions_test.py +++ b/tests/conditions_test.py @@ -2,6 +2,7 @@ import pytest import yaml +from test_utils import expand_schemas from generic_k8s_webhook.config_parser.entrypoint import GenericWebhookConfigManifest @@ -9,15 +10,6 @@ CONDITIONS_YAML = os.path.join(SCRIPT_DIR, "conditions_test.yaml") -def _expand_schemas(schemas_subsets: dict[str, list[str]], list_schemas: list[str]) -> list[str]: - final_schemas = set() - for schema in list_schemas: - final_schemas.add(schema) - for schemas_superset in schemas_subsets.get(schema, []): - final_schemas.add(schemas_superset) - return sorted(list(final_schemas)) - - def _parse_tests() -> list[tuple]: with open(CONDITIONS_YAML, "r") as f: raw_tests = yaml.safe_load(f) @@ -25,7 +17,7 @@ def _parse_tests() -> list[tuple]: parsed_tests = [] for test_suite in raw_tests["test_suites"]: for test in test_suite["tests"]: - for schema in _expand_schemas(raw_tests["schemas_subsets"], test["schemas"]): + for schema in expand_schemas(raw_tests["schemas_subsets"], test["schemas"]): for i, case in enumerate(test["cases"]): parsed_tests.append( ( diff --git a/tests/jsonpatch_helpers_test.py b/tests/jsonpatch_helpers_test.py deleted file mode 100644 index 37ce796..0000000 --- a/tests/jsonpatch_helpers_test.py +++ /dev/null @@ -1,167 +0,0 @@ -import pytest - -from generic_k8s_webhook.config_parser.jsonpatch_parser import JsonPatchParserV1 - - -@pytest.mark.parametrize( - ("name", "json_to_patch", "patch", "expected_result"), - [ - ( - "Change the value of a simple key", - {"spec": {}, "metadata": {}}, - [{"op": "add", "path": ".spec", "value": "foo"}], - {"spec": "foo", "metadata": {}}, - ), - ( - "Add a subkey that doesn't exist", - {"spec": {}, "metadata": {}}, - [{"op": "add", "path": ".spec.subkey", "value": "foo"}], - {"spec": {"subkey": "foo"}, "metadata": {}}, - ), - ( - "Add a 2 subkeys that don't exist", - {"spec": {}, "metadata": {}}, - [{"op": "add", "path": ".spec.subkey1.subkey2", "value": "foo"}], - {"spec": {"subkey1": {"subkey2": "foo"}}, "metadata": {}}, - ), - ( - "Add an element to an existing empty list", - {"spec": {"containers": []}, "metadata": {}}, - [{"op": "add", "path": ".spec.containers.-", "value": {"name": "main"}}], - {"spec": {"containers": [{"name": "main"}]}, "metadata": {}}, - ), - ( - "Add an element to an existing non-empty list", - {"spec": {"containers": [{"name": "sidecar"}]}, "metadata": {}}, - [{"op": "add", "path": ".spec.containers.-", "value": {"name": "main"}}], - {"spec": {"containers": [{"name": "sidecar"}, {"name": "main"}]}, "metadata": {}}, - ), - ( - "Add an element to a non-existing list", - {"spec": {}, "metadata": {}}, - [{"op": "add", "path": ".spec.containers.-", "value": {"name": "main"}}], - {"spec": {"containers": [{"name": "main"}]}, "metadata": {}}, - ), - ( - "Add a new entry on the second element of the list", - {"spec": {"containers": [{"name": "main"}]}, "metadata": {}}, - [{"op": "add", "path": ".spec.containers.0.metadata", "value": {}}], - {"spec": {"containers": [{"name": "main", "metadata": {}}]}, "metadata": {}}, - ), - ], -) -def test_add(name, json_to_patch, patch, expected_result): - assert all(elem["op"] == "add" for elem in patch) - patcher = JsonPatchParserV1().parse(patch)[0] - processed_patch = patcher.generate_patch(json_to_patch) - patched_json = processed_patch.apply(json_to_patch) - assert patched_json == expected_result - - -@pytest.mark.parametrize( - ("name", "json_to_patch", "patch", "expected_result"), - [ - ( - "Remove the value of a simple key", - {"spec": {}, "metadata": {}}, - [ - { - "op": "remove", - "path": ".spec", - } - ], - {"metadata": {}}, - ), - # ( - # "Remove a key that doesn't exist", - # {"spec": {}, "metadata": {}}, - # [{ - # "op": "remove", - # "path": ".status", - # }], - # {"spec": {}, "metadata": {}} - # ), - ], -) -def test_remove(name, json_to_patch, patch, expected_result): - assert all(elem["op"] == "remove" for elem in patch) - patcher = JsonPatchParserV1().parse(patch)[0] - processed_patch = patcher.generate_patch(json_to_patch) - patched_json = processed_patch.apply(json_to_patch) - assert patched_json == expected_result - - -@pytest.mark.parametrize( - ("name", "json_to_patch", "patch", "expected_result"), - [ - ( - "Replace the value of a simple key", - {"spec": {}, "metadata": {"name": "foo"}}, - [{"op": "replace", "path": ".metadata.name", "value": "bar"}], - {"spec": {}, "metadata": {"name": "bar"}}, - ), - ], -) -def test_replace(name, json_to_patch, patch, expected_result): - assert all(elem["op"] == "replace" for elem in patch) - patcher = JsonPatchParserV1().parse(patch)[0] - processed_patch = patcher.generate_patch(json_to_patch) - patched_json = processed_patch.apply(json_to_patch) - assert patched_json == expected_result - - -@pytest.mark.parametrize( - ("name", "json_to_patch", "patch", "expected_result"), - [ - ( - "Copy the value from a simple key to another", - {"spec": {"containers": [{"name": "bar"}]}, "metadata": {"name": "foo"}}, - [{"op": "copy", "path": ".metadata.name", "from": ".spec.containers.0.name"}], - {"spec": {"containers": [{"name": "bar"}]}, "metadata": {"name": "bar"}}, - ), - ], -) -def test_copy(name, json_to_patch, patch, expected_result): - assert all(elem["op"] == "copy" for elem in patch) - patcher = JsonPatchParserV1().parse(patch)[0] - processed_patch = patcher.generate_patch(json_to_patch) - patched_json = processed_patch.apply(json_to_patch) - assert patched_json == expected_result - - -@pytest.mark.parametrize( - ("name", "json_to_patch", "patch", "expected_result"), - [ - ( - "Move the value from a simple key to another", - {"spec": {"containers": [{"name": "bar"}]}, "metadata": {"name": "foo"}}, - [{"op": "move", "path": ".metadata.name", "from": ".spec.containers.0.name"}], - {"spec": {"containers": [{}]}, "metadata": {"name": "bar"}}, - ), - ], -) -def test_move(name, json_to_patch, patch, expected_result): - assert all(elem["op"] == "move" for elem in patch) - patcher = JsonPatchParserV1().parse(patch)[0] - processed_patch = patcher.generate_patch(json_to_patch) - patched_json = processed_patch.apply(json_to_patch) - assert patched_json == expected_result - - -@pytest.mark.parametrize( - ("name", "json_to_patch", "patch", "expected_result"), - [ - ( - "Test the value of a simple key", - {"spec": {}, "metadata": {"name": "foo"}}, - [{"op": "test", "path": ".metadata.name", "value": "foo"}], - {"spec": {}, "metadata": {"name": "foo"}}, - ), - ], -) -def test_test(name, json_to_patch, patch, expected_result): - assert all(elem["op"] == "test" for elem in patch) - patcher = JsonPatchParserV1().parse(patch)[0] - processed_patch = patcher.generate_patch(json_to_patch) - patched_json = processed_patch.apply(json_to_patch) - assert patched_json == expected_result diff --git a/tests/jsonpatch_test.py b/tests/jsonpatch_test.py new file mode 100644 index 0000000..d91d269 --- /dev/null +++ b/tests/jsonpatch_test.py @@ -0,0 +1,45 @@ +import os + +import pytest +import yaml +from test_utils import expand_schemas + +from generic_k8s_webhook.config_parser.entrypoint import GenericWebhookConfigManifest + +SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) +JSONPATCH_YAML = os.path.join(SCRIPT_DIR, "jsonpatch_test.yaml") + + +def _parse_tests() -> list[tuple]: + with open(JSONPATCH_YAML, "r") as f: + raw_tests = yaml.safe_load(f) + + parsed_tests = [] + for test_suite in raw_tests["test_suites"]: + for test in test_suite["tests"]: + for schema in expand_schemas(raw_tests["schemas_subsets"], test["schemas"]): + for i, case in enumerate(test["cases"]): + parsed_tests.append( + ( + f"{test_suite['name']}_{i}", # name + schema, # schema + case["patch"], # patch + case["payload"], # payload + case["expected_result"], # expected_result + ) + ) + return parsed_tests + + +@pytest.mark.parametrize(("name", "schema", "patch", "payload", "expected_result"), _parse_tests()) +def test_all(name, schema, patch, payload, expected_result): + raw_config = { + "apiVersion": f"generic-webhook/{schema}", + "kind": "GenericWebhookConfig", + "webhooks": [{"name": "test-webhook", "path": "test-path", "actions": [{"patch": [patch]}]}], + } + gwcm = GenericWebhookConfigManifest(raw_config) + action = gwcm.list_webhook_config[0].list_actions[0] + json_patch = action.get_patches(payload) + result = json_patch.apply(payload) + assert result == expected_result diff --git a/tests/jsonpatch_test.yaml b/tests/jsonpatch_test.yaml new file mode 100644 index 0000000..d736b8b --- /dev/null +++ b/tests/jsonpatch_test.yaml @@ -0,0 +1,128 @@ +schemas_subsets: + # v1alpha1 is a subset of the features from v1beta1 + # For this reason, any test that uses v1alpha1 will also be executed for v1beta1 + v1alpha1: + - v1beta1 + +# Each suite stresses a specific jsonpatch operator. +# Each test specifies a patch operation to be applied on a json payload +# and the expected result. +test_suites: + - name: ADD + tests: + - schemas: [v1alpha1] + cases: + # Change the value of a simple key + - patch: + op: add + path: .spec + value: foo + payload: { spec: {}, metadata: {} } + expected_result: { spec: foo, metadata: {} } + # Add a subkey that doesn't exist + - patch: + op: add + path: .spec.subkey + value: foo + payload: { spec: {}, metadata: {} } + expected_result: { spec: { subkey: foo }, metadata: {} } + # Add a 2 subkeys that don't exist + - patch: + op: add + path: .spec.subkey1.subkey2 + value: foo + payload: { spec: {}, metadata: {} } + expected_result: + { spec: { subkey1: { subkey2: foo } }, metadata: {} } + # Add an element to an existing empty list + - patch: + op: add + path: .spec.containers.- + value: { name: main } + payload: { spec: { containers: [] }, metadata: {} } + expected_result: + { spec: { containers: [{ name: main }] }, metadata: {} } + # Add an element to an existing non-empty list + - patch: + op: add + path: .spec.containers.- + value: { name: main } + payload: { spec: { containers: [{ name: sidecar }] }, metadata: {} } + expected_result: + spec: { containers: [{ name: sidecar }, { name: main }] } + metadata: {} + # Add an element to a non-existing list + - patch: + op: add + path: .spec.containers.- + value: { name: main } + payload: { spec: {}, metadata: {} } + expected_result: + { spec: { containers: [{ name: main }] }, metadata: {} } + # Add a new entry on the second element of the list + - patch: + op: add + path: .spec.containers.0.metadata + value: {} + payload: { spec: { containers: [{ name: main }] }, metadata: {} } + expected_result: + spec: { containers: [{ name: main, metadata: {} }] } + metadata: {} + - name: REMOVE + tests: + - schemas: [v1alpha1] + cases: + # Remove the value of a simple key + - patch: + op: remove + path: .spec + payload: { spec: {}, metadata: {} } + expected_result: { metadata: {} } + - name: REPLACE + tests: + - schemas: [v1alpha1] + cases: + # Replace the value of a simple key + - patch: + op: replace + path: .metadata.name + value: bar + payload: { spec: {}, metadata: { name: foo } } + expected_result: { spec: {}, metadata: { name: bar } } + - name: COPY + tests: + - schemas: [v1alpha1] + cases: + # Copy the value from a simple key to another + - patch: + op: copy + path: .metadata.name + from: .spec.containers.0.name + payload: + { spec: { containers: [{ name: bar }] }, metadata: { name: foo } } + expected_result: + { spec: { containers: [{ name: bar }] }, metadata: { name: bar } } + - name: MOVE + tests: + - schemas: [v1alpha1] + cases: + # Move the value from a simple key to another + - patch: + op: move + path: .metadata.name + from: .spec.containers.0.name + payload: + { spec: { containers: [{ name: bar }] }, metadata: { name: foo } } + expected_result: + { spec: { containers: [{}] }, metadata: { name: bar } } + - name: TEST + tests: + - schemas: [v1alpha1] + cases: + # Test the value of a simple key + - patch: + op: test + path: .metadata.name + value: foo + payload: { spec: {}, metadata: { name: foo } } + expected_result: { spec: {}, metadata: { name: foo } } diff --git a/tests/test_utils.py b/tests/test_utils.py index 3d7c934..a2d972d 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -58,3 +58,12 @@ def wait_for_server_ready(port: int, tls: bool = False) -> None: response = requests.get(url, verify=False, timeout=1) if response.status_code != 200: raise RuntimeError(f"Error when doing the health check to the server. Error {response.status_code}") + + +def expand_schemas(schemas_subsets: dict[str, list[str]], list_schemas: list[str]) -> list[str]: + final_schemas = set() + for schema in list_schemas: + final_schemas.add(schema) + for schemas_superset in schemas_subsets.get(schema, []): + final_schemas.add(schemas_superset) + return sorted(list(final_schemas))