Skip to content

Commit

Permalink
Merge pull request #1355 from GSA/notify-api-1351
Browse files Browse the repository at this point in the history
increase code coverage to 93 percent
  • Loading branch information
terrazoon authored Oct 17, 2024
2 parents 2cfd1c6 + 621770d commit f4f6312
Show file tree
Hide file tree
Showing 10 changed files with 1,099 additions and 476 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ jobs:
NOTIFY_E2E_TEST_PASSWORD: ${{ secrets.NOTIFY_E2E_TEST_PASSWORD }}
- name: Check coverage threshold
# TODO get this back up to 95
run: poetry run coverage report -m --fail-under=91
run: poetry run coverage report -m --fail-under=93

validate-new-relic-config:
runs-on: ubuntu-latest
Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ test: ## Run tests and create coverage report
poetry run coverage run --omit=*/migrations/*,*/tests/* -m pytest --maxfail=10

## TODO set this back to 95 asap
poetry run coverage report -m --fail-under=91
poetry run coverage report -m --fail-under=93
poetry run coverage html -d .coverage_cache

.PHONY: py-lock
Expand Down
18 changes: 1 addition & 17 deletions app/aws/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -476,23 +476,7 @@ def get_personalisation_from_s3(service_id, job_id, job_row_number):

set_job_cache(job_cache, f"{job_id}_personalisation", extract_personalisation(job))

# If we can find the quick dictionary, use it
if job_cache.get(f"{job_id}_personalisation") is not None:
personalisation_to_return = job_cache.get(f"{job_id}_personalisation")[0].get(
job_row_number
)
if personalisation_to_return:
return personalisation_to_return
else:
current_app.logger.warning(
f"Was unable to retrieve personalisation from lookup dictionary for job {job_id}"
)
return {}
else:
current_app.logger.error(
f"Was unable to construct lookup dictionary for job {job_id}"
)
return {}
return job_cache.get(f"{job_id}_personalisation")[0].get(job_row_number)


def get_job_metadata_from_s3(service_id, job_id):
Expand Down
79 changes: 2 additions & 77 deletions app/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,6 @@
dao_create_or_update_annual_billing_for_year,
set_default_free_allowance_for_service,
)
from app.dao.fact_billing_dao import (
delete_billing_data_for_service_for_day,
fetch_billing_data_for_day,
get_service_ids_that_need_billing_populated,
update_fact_billing,
)
from app.dao.jobs_dao import dao_get_job_by_id
from app.dao.organization_dao import (
dao_add_service_to_organization,
Expand Down Expand Up @@ -63,7 +57,7 @@
TemplateHistory,
User,
)
from app.utils import get_midnight_in_utc, utc_now
from app.utils import utc_now
from notifications_utils.recipients import RecipientCSV
from notifications_utils.template import SMSMessageTemplate
from tests.app.db import (
Expand Down Expand Up @@ -167,6 +161,7 @@ def purge_functional_test_data(user_email_prefix):
delete_model_user(usr)


# TODO maintainability what is the purpose of this command? Who would use it and why?
@notify_command(name="insert-inbound-numbers")
@click.option(
"-f",
Expand All @@ -175,7 +170,6 @@ def purge_functional_test_data(user_email_prefix):
help="""Full path of the file to upload, file is a contains inbound numbers, one number per line.""",
)
def insert_inbound_numbers_from_file(file_name):
# TODO maintainability what is the purpose of this command? Who would use it and why?

current_app.logger.info(f"Inserting inbound numbers from {file_name}")
with open(file_name) as file:
Expand All @@ -195,50 +189,6 @@ def setup_commands(application):
application.cli.add_command(command_group)


@notify_command(name="rebuild-ft-billing-for-day")
@click.option("-s", "--service_id", required=False, type=click.UUID)
@click.option(
"-d",
"--day",
help="The date to recalculate, as YYYY-MM-DD",
required=True,
type=click_dt(format="%Y-%m-%d"),
)
def rebuild_ft_billing_for_day(service_id, day):
# TODO maintainability what is the purpose of this command? Who would use it and why?

"""
Rebuild the data in ft_billing for the given service_id and date
"""

def rebuild_ft_data(process_day, service):
deleted_rows = delete_billing_data_for_service_for_day(process_day, service)
current_app.logger.info(
f"deleted {deleted_rows} existing billing rows for {service} on {process_day}"
)
transit_data = fetch_billing_data_for_day(
process_day=process_day, service_id=service
)
# transit_data = every row that should exist
for data in transit_data:
# upsert existing rows
update_fact_billing(data, process_day)
current_app.logger.info(
f"added/updated {len(transit_data)} billing rows for {service} on {process_day}"
)

if service_id:
# confirm the service exists
dao_fetch_service_by_id(service_id)
rebuild_ft_data(day, service_id)
else:
services = get_service_ids_that_need_billing_populated(
get_midnight_in_utc(day), get_midnight_in_utc(day + timedelta(days=1))
)
for row in services:
rebuild_ft_data(day, row.service_id)


@notify_command(name="bulk-invite-user-to-service")
@click.option(
"-f",
Expand Down Expand Up @@ -472,31 +422,6 @@ def associate_services_to_organizations():
current_app.logger.info("finished associating services to organizations")


@notify_command(name="populate-service-volume-intentions")
@click.option(
"-f",
"--file_name",
required=True,
help="Pipe delimited file containing service_id, SMS, email",
)
def populate_service_volume_intentions(file_name):
# [0] service_id
# [1] SMS:: volume intentions for service
# [2] Email:: volume intentions for service

# TODO maintainability what is the purpose of this command? Who would use it and why?

with open(file_name, "r") as f:
for line in itertools.islice(f, 1, None):
columns = line.split(",")
current_app.logger.info(columns)
service = dao_fetch_service_by_id(columns[0])
service.volume_sms = columns[1]
service.volume_email = columns[2]
dao_update_service(service)
current_app.logger.info("populate-service-volume-intentions complete")


@notify_command(name="populate-go-live")
@click.option(
"-f", "--file_name", required=True, help="CSV file containing live service data"
Expand Down
24 changes: 13 additions & 11 deletions app/delivery/send_to_providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,17 +98,7 @@ def send_sms_to_provider(notification):

# TODO This is temporary to test the capability of validating phone numbers
# The future home of the validation is TBD
if "+" not in recipient:
recipient_lookup = f"+{recipient}"
else:
recipient_lookup = recipient
if recipient_lookup in current_app.config[
"SIMULATED_SMS_NUMBERS"
] and os.getenv("NOTIFY_ENVIRONMENT") in ["development", "test"]:
current_app.logger.info(hilite("#validate-phone-number fired"))
aws_pinpoint_client.validate_phone_number("01", recipient)
else:
current_app.logger.info(hilite("#validate-phone-number not fired"))
_experimentally_validate_phone_numbers(recipient)

sender_numbers = get_sender_numbers(notification)
if notification.reply_to_text not in sender_numbers:
Expand Down Expand Up @@ -145,6 +135,18 @@ def send_sms_to_provider(notification):
return message_id


def _experimentally_validate_phone_numbers(recipient):
if "+" not in recipient:
recipient_lookup = f"+{recipient}"
else:
recipient_lookup = recipient
if recipient_lookup in current_app.config["SIMULATED_SMS_NUMBERS"] and os.getenv(
"NOTIFY_ENVIRONMENT"
) in ["development", "test"]:
current_app.logger.info(hilite("#validate-phone-number fired"))
aws_pinpoint_client.validate_phone_number("01", recipient)


def _get_verify_code(notification):
key = f"2facode-{notification.id}".replace(" ", "")
recipient = redis_store.get(key)
Expand Down
57 changes: 0 additions & 57 deletions app/service/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -453,16 +453,6 @@ def get_all_notifications_for_service(service_id):
data = notifications_filter_schema.load(MultiDict(request.get_json()))
current_app.logger.debug(f"use POST, request {request.get_json()} data {data}")

if data.get("to"):
notification_type = (
data.get("template_type")[0] if data.get("template_type") else None
)
return search_for_notification_by_to_field(
service_id=service_id,
search_term=data["to"],
statuses=data.get("status"),
notification_type=notification_type,
)
page = data["page"] if "page" in data else 1
page_size = (
data["page_size"]
Expand Down Expand Up @@ -583,53 +573,6 @@ def get_notification_for_service(service_id, notification_id):
)


def search_for_notification_by_to_field(
service_id, search_term, statuses, notification_type
):
results = notifications_dao.dao_get_notifications_by_recipient_or_reference(
service_id=service_id,
search_term=search_term,
statuses=statuses,
notification_type=notification_type,
page=1,
page_size=current_app.config["PAGE_SIZE"],
)

# We try and get the next page of results to work out if we need provide a pagination link to the next page
# in our response. Note, this was previously be done by having
# notifications_dao.dao_get_notifications_by_recipient_or_reference use count=False when calling
# Flask-Sqlalchemys `paginate'. But instead we now use this way because it is much more performant for
# services with many results (unlike using Flask SqlAlchemy `paginate` with `count=True`, this approach
# doesn't do an additional query to count all the results of which there could be millions but instead only
# asks for a single extra page of results).
next_page_of_pagination = notifications_dao.dao_get_notifications_by_recipient_or_reference(
service_id=service_id,
search_term=search_term,
statuses=statuses,
notification_type=notification_type,
page=2,
page_size=current_app.config["PAGE_SIZE"],
error_out=False, # False so that if there are no results, it doesn't end in aborting with a 404
)

return (
jsonify(
notifications=notification_with_template_schema.dump(
results.items, many=True
),
links=get_prev_next_pagination_links(
1,
len(next_page_of_pagination.items),
".get_all_notifications_for_service",
statuses=statuses,
notification_type=notification_type,
service_id=service_id,
),
),
200,
)


@service_blueprint.route("/<uuid:service_id>/notifications/monthly", methods=["GET"])
def get_monthly_notification_stats(service_id):
# check service_id validity
Expand Down
2 changes: 1 addition & 1 deletion app/service_invite/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def _create_service_invite(invited_user, invite_link_host):
redis_store.set(
f"email-personalisation-{saved_notification.id}",
json.dumps(personalisation),
ex=2*24*60*60,
ex=2 * 24 * 60 * 60,
)
send_notification_to_queue(saved_notification, queue=QueueNames.NOTIFY)

Expand Down
60 changes: 25 additions & 35 deletions notifications_utils/sanitise_text.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,19 +122,15 @@ def is_arabic(cls, value):
def is_punjabi(cls, value):
# Gukmukhi script or Shahmukhi script

if regex.search(r"[\u0A00-\u0A7F]+", value):
return True
elif regex.search(r"[\u0600-\u06FF]+", value):
return True
elif regex.search(r"[\u0750-\u077F]+", value):
return True
elif regex.search(r"[\u08A0-\u08FF]+", value):
return True
elif regex.search(r"[\uFB50-\uFDFF]+", value):
return True
elif regex.search(r"[\uFE70-\uFEFF]+", value):
return True
elif regex.search(r"[\u0900-\u097F]+", value):
if (
regex.search(r"[\u0A00-\u0A7F]+", value)
or regex.search(r"[\u0600-\u06FF]+", value)
or regex.search(r"[\u0750-\u077F]+", value)
or regex.search(r"[\u08A0-\u08FF]+", value)
or regex.search(r"[\uFB50-\uFDFF]+", value)
or regex.search(r"[\uFE70-\uFEFF]+", value)
or regex.search(r"[\u0900-\u097F]+", value)
):
return True
return False

Expand All @@ -156,33 +152,27 @@ def _is_extended_language_group_one(cls, value):

@classmethod
def _is_extended_language_group_two(cls, value):
if regex.search(r"\p{IsBuhid}", value):
return True
if regex.search(r"\p{IsCanadian_Aboriginal}", value):
return True
if regex.search(r"\p{IsCherokee}", value):
return True
if regex.search(r"\p{IsDevanagari}", value):
return True
if regex.search(r"\p{IsEthiopic}", value):
return True
if regex.search(r"\p{IsGeorgian}", value):
if (
regex.search(r"\p{IsBuhid}", value)
or regex.search(r"\p{IsCanadian_Aboriginal}", value)
or regex.search(r"\p{IsCherokee}", value)
or regex.search(r"\p{IsDevanagari}", value)
or regex.search(r"\p{IsEthiopic}", value)
or regex.search(r"\p{IsGeorgian}", value)
):
return True
return False

@classmethod
def _is_extended_language_group_three(cls, value):
if regex.search(r"\p{IsGreek}", value):
return True
if regex.search(r"\p{IsGujarati}", value):
return True
if regex.search(r"\p{IsHanunoo}", value):
return True
if regex.search(r"\p{IsHebrew}", value):
return True
if regex.search(r"\p{IsLimbu}", value):
return True
if regex.search(r"\p{IsKannada}", value):
if (
regex.search(r"\p{IsGreek}", value)
or regex.search(r"\p{IsGujarati}", value)
or regex.search(r"\p{IsHanunoo}", value)
or regex.search(r"\p{IsHebrew}", value)
or regex.search(r"\p{IsLimbu}", value)
or regex.search(r"\p{IsKannada}", value)
):
return True
return False

Expand Down
Loading

0 comments on commit f4f6312

Please sign in to comment.