From f3df0e6e918f103a7da31cb082d9b740f9270d4a Mon Sep 17 00:00:00 2001 From: constanca Date: Wed, 29 May 2024 15:09:58 +0200 Subject: [PATCH] run black Signed-off-by: constanca --- handlers/aws/handler.py | 5 ++++- handlers/aws/replay_trigger.py | 4 +++- handlers/aws/utils.py | 11 ++++++----- tests/handlers/aws/test_handler.py | 4 +++- tests/handlers/aws/test_utils.py | 7 ++++--- tests/share/test_config.py | 10 ++++++---- 6 files changed, 26 insertions(+), 15 deletions(-) diff --git a/handlers/aws/handler.py b/handlers/aws/handler.py index 2477fa37..9456be29 100644 --- a/handlers/aws/handler.py +++ b/handlers/aws/handler.py @@ -101,7 +101,10 @@ def lambda_handler(lambda_event: dict[str, Any], lambda_context: context_.Contex if shipper is None: shared_logger.warning( "no shipper for output in replay queue", - extra={"output_destination": event["output_destination"], "event_input_id": event["event_input_id"]}, + extra={ + "output_destination": event["output_destination"], + "event_input_id": event["event_input_id"], + }, ) continue diff --git a/handlers/aws/replay_trigger.py b/handlers/aws/replay_trigger.py index 7fde1d97..4bf85619 100644 --- a/handlers/aws/replay_trigger.py +++ b/handlers/aws/replay_trigger.py @@ -20,7 +20,9 @@ def __init__(self, replay_queue_arn: str): def add_event_with_receipt_handle(self, event_uniq_id: str, receipt_handle: str) -> None: self._events_with_receipt_handle[event_uniq_id] = receipt_handle - def replay_handler(self, output_destination: str, output_args: dict[str, Any], event_payload: dict[str, Any]) -> None: + def replay_handler( + self, output_destination: str, output_args: dict[str, Any], event_payload: dict[str, Any] + ) -> None: event_uniq_id: str = event_payload["_id"] + output_destination self._failed_event_ids.append(event_uniq_id) diff --git a/handlers/aws/utils.py b/handlers/aws/utils.py index c15a23dc..3610e089 100644 --- a/handlers/aws/utils.py +++ b/handlers/aws/utils.py @@ -153,9 +153,7 @@ def get_shipper_from_input(event_input: Input) -> CompositeShipper: if output.type == "logstash": shared_logger.debug("setting Logstash shipper") - logstash_shipper: ProtocolShipper = ShipperFactory.create_from_output( - output_type="logstash", output=output - ) + logstash_shipper: ProtocolShipper = ShipperFactory.create_from_output(output_type="logstash", output=output) composite_shipper.add_shipper(shipper=logstash_shipper) @@ -348,7 +346,9 @@ class ReplayEventHandler: def __init__(self, event_input: Input): self._event_input_id: str = event_input.id - def replay_handler(self, output_destination: str, output_args: dict[str, Any], event_payload: dict[str, Any]) -> None: + def replay_handler( + self, output_destination: str, output_args: dict[str, Any], event_payload: dict[str, Any] + ) -> None: sqs_replay_queue = os.environ["SQS_REPLAY_URL"] sqs_client = get_sqs_client() @@ -363,7 +363,8 @@ def replay_handler(self, output_destination: str, output_args: dict[str, Any], e sqs_client.send_message(QueueUrl=sqs_replay_queue, MessageBody=json_dumper(message_payload)) shared_logger.debug( - "sent to replay queue", extra={"output_destination": output_destination, "event_input_id": self._event_input_id} + "sent to replay queue", + extra={"output_destination": output_destination, "event_input_id": self._event_input_id}, ) diff --git a/tests/handlers/aws/test_handler.py b/tests/handlers/aws/test_handler.py index 68c901e9..3356bf0e 100644 --- a/tests/handlers/aws/test_handler.py +++ b/tests/handlers/aws/test_handler.py @@ -546,7 +546,9 @@ def test_lambda_handler_failure(self) -> None: } ] } - with self.assertRaisesRegex(OutputConfigException, "Cannot load output with destination output_destination"): + with self.assertRaisesRegex( + OutputConfigException, "Cannot load output with destination output_destination" + ): ctx = ContextMock() handler(event, ctx) # type:ignore diff --git a/tests/handlers/aws/test_utils.py b/tests/handlers/aws/test_utils.py index 7208fa5f..1c4d704d 100644 --- a/tests/handlers/aws/test_utils.py +++ b/tests/handlers/aws/test_utils.py @@ -93,7 +93,7 @@ def test_get_trigger_type_and_config_source(self) -> None: "Records": [ { "body": '{"output_destination": "output_destination", ' - '"output_args": "output_args", "event_payload": "event_payload"}' + '"output_args": "output_args", "event_payload": "event_payload"}' } ] } @@ -261,8 +261,9 @@ def test_get_shipper_from_input(self) -> None: assert len(shipper._shippers) == 1 assert isinstance(shipper._shippers[0], LogstashShipper) - event_input_kinesis = config.get_input_by_id("arn:aws:kinesis:eu-central-1:123456789:stream/test-esf" - "-kinesis-stream") + event_input_kinesis = config.get_input_by_id( + "arn:aws:kinesis:eu-central-1:123456789:stream/test-esf" "-kinesis-stream" + ) assert event_input_kinesis is not None shipper = get_shipper_from_input(event_input=event_input_kinesis) assert len(shipper._shippers) == 1 diff --git a/tests/share/test_config.py b/tests/share/test_config.py index b9d34319..68e2bec5 100644 --- a/tests/share/test_config.py +++ b/tests/share/test_config.py @@ -591,8 +591,9 @@ def test_get_output_by_type(self) -> None: batch_max_bytes=1, ) - assert isinstance(input_sqs.get_output_by_destination(output_destination="elasticsearch_url"), - ElasticsearchOutput) + assert isinstance( + input_sqs.get_output_by_destination(output_destination="elasticsearch_url"), ElasticsearchOutput + ) with self.subTest("logstash output"): input_sqs = Input(input_type="s3-sqs", input_id="id") @@ -616,8 +617,9 @@ def test_add_output(self) -> None: batch_max_bytes=1, ) - assert isinstance(input_sqs.get_output_by_destination(output_destination="elasticsearch_url"), - ElasticsearchOutput) + assert isinstance( + input_sqs.get_output_by_destination(output_destination="elasticsearch_url"), ElasticsearchOutput + ) with self.subTest("logstash output"): input_sqs = Input(input_type="s3-sqs", input_id="id")