Skip to content

Commit

Permalink
merge from main and code review feedback
Browse files Browse the repository at this point in the history
  • Loading branch information
Kenneth Kehl committed Oct 8, 2024
2 parents ff5d405 + 37dc593 commit 621770d
Show file tree
Hide file tree
Showing 8 changed files with 28 additions and 132 deletions.
9 changes: 9 additions & 0 deletions app/aws/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,6 +264,9 @@ def file_exists(file_location):


def get_job_location(service_id, job_id):
current_app.logger.info(
f"#s3-partitioning NEW JOB_LOCATION: {NEW_FILE_LOCATION_STRUCTURE.format(service_id, job_id)}"
)
return (
current_app.config["CSV_UPLOAD_BUCKET"]["bucket"],
NEW_FILE_LOCATION_STRUCTURE.format(service_id, job_id),
Expand All @@ -279,6 +282,9 @@ def get_old_job_location(service_id, job_id):
but it will take a few days where we have to support both formats.
Remove this when everything works with the NEW_FILE_LOCATION_STRUCTURE.
"""
current_app.logger.info(
f"#s3-partitioning OLD JOB LOCATION: {FILE_LOCATION_STRUCTURE.format(service_id, job_id)}"
)
return (
current_app.config["CSV_UPLOAD_BUCKET"]["bucket"],
FILE_LOCATION_STRUCTURE.format(service_id, job_id),
Expand Down Expand Up @@ -470,6 +476,9 @@ def get_personalisation_from_s3(service_id, job_id, job_row_number):


def get_job_metadata_from_s3(service_id, job_id):
current_app.logger.info(
f"#s3-partitioning CALLING GET_JOB_METADATA with {service_id}, {job_id}"
)
obj = get_s3_object(*get_job_location(service_id, job_id))
return obj.get()["Metadata"]

Expand Down
71 changes: 0 additions & 71 deletions app/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,51 +189,6 @@ def setup_commands(application):
application.cli.add_command(command_group)


# TODO maintainability what is the purpose of this command? Who would use it and why?
# COMMENTING OUT UNTIL WE DETERMINE IF WE NEED IT OR NOT
# @notify_command(name="rebuild-ft-billing-for-day")
# @click.option("-s", "--service_id", required=False, type=click.UUID)
# @click.option(
# "-d",
# "--day",
# help="The date to recalculate, as YYYY-MM-DD",
# required=True,
# type=click_dt(format="%Y-%m-%d"),
# )
# def rebuild_ft_billing_for_day(service_id, day):

# """
# Rebuild the data in ft_billing for the given service_id and date
# """

# def rebuild_ft_data(process_day, service):
# deleted_rows = delete_billing_data_for_service_for_day(process_day, service)
# current_app.logger.info(
# f"deleted {deleted_rows} existing billing rows for {service} on {process_day}"
# )
# transit_data = fetch_billing_data_for_day(
# process_day=process_day, service_id=service
# )
# # transit_data = every row that should exist
# for data in transit_data:
# # upsert existing rows
# update_fact_billing(data, process_day)
# current_app.logger.info(
# f"added/updated {len(transit_data)} billing rows for {service} on {process_day}"
# )

# if service_id:
# # confirm the service exists
# dao_fetch_service_by_id(service_id)
# rebuild_ft_data(day, service_id)
# else:
# services = get_service_ids_that_need_billing_populated(
# get_midnight_in_utc(day), get_midnight_in_utc(day + timedelta(days=1))
# )
# for row in services:
# rebuild_ft_data(day, row.service_id)


@notify_command(name="bulk-invite-user-to-service")
@click.option(
"-f",
Expand Down Expand Up @@ -467,32 +422,6 @@ def associate_services_to_organizations():
current_app.logger.info("finished associating services to organizations")


# TODO maintainability what is the purpose of this command? Who would use it and why?
# COMMENTING OUT UNTIL WE DETERMINE IF WE NEED IT OR NOT
# @notify_command(name="populate-service-volume-intentions")
# @click.option(
# "-f",
# "--file_name",
# required=True,
# help="Pipe delimited file containing service_id, SMS, email",
# )
# def populate_service_volume_intentions(file_name):
# # [0] service_id
# # [1] SMS:: volume intentions for service
# # [2] Email:: volume intentions for service


# with open(file_name, "r") as f:
# for line in itertools.islice(f, 1, None):
# columns = line.split(",")
# current_app.logger.info(columns)
# service = dao_fetch_service_by_id(columns[0])
# service.volume_sms = columns[1]
# service.volume_email = columns[2]
# dao_update_service(service)
# current_app.logger.info("populate-service-volume-intentions complete")


@notify_command(name="populate-go-live")
@click.option(
"-f", "--file_name", required=True, help="CSV file containing live service data"
Expand Down
1 change: 1 addition & 0 deletions app/job/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,7 @@ def create_job(service_id):
original_file_name = data.get("original_file_name")
data.update({"service": service_id})
try:
current_app.logger.info(f"#s3-partitioning DATA IN CREATE_JOB: {data}")
data.update(**get_job_metadata_from_s3(service_id, data["id"]))
except KeyError:
raise InvalidRequest(
Expand Down
57 changes: 0 additions & 57 deletions app/service/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -453,16 +453,6 @@ def get_all_notifications_for_service(service_id):
data = notifications_filter_schema.load(MultiDict(request.get_json()))
current_app.logger.debug(f"use POST, request {request.get_json()} data {data}")

# if data.get("to"):
# notification_type = (
# data.get("template_type")[0] if data.get("template_type") else None
# )
# return search_for_notification_by_to_field(
# service_id=service_id,
# search_term=data["to"],
# statuses=data.get("status"),
# notification_type=notification_type,
# )
page = data["page"] if "page" in data else 1
page_size = (
data["page_size"]
Expand Down Expand Up @@ -583,53 +573,6 @@ def get_notification_for_service(service_id, notification_id):
)


# def search_for_notification_by_to_field(
# service_id, search_term, statuses, notification_type
# ):
# results = notifications_dao.dao_get_notifications_by_recipient_or_reference(
# service_id=service_id,
# search_term=search_term,
# statuses=statuses,
# notification_type=notification_type,
# page=1,
# page_size=current_app.config["PAGE_SIZE"],
# )

# # We try and get the next page of results to work out if we need provide a pagination link to the next page
# # in our response. Note, this was previously be done by having
# # notifications_dao.dao_get_notifications_by_recipient_or_reference use count=False when calling
# # Flask-Sqlalchemys `paginate'. But instead we now use this way because it is much more performant for
# # services with many results (unlike using Flask SqlAlchemy `paginate` with `count=True`, this approach
# # doesn't do an additional query to count all the results of which there could be millions but instead only
# # asks for a single extra page of results).
# next_page_of_pagination = notifications_dao.dao_get_notifications_by_recipient_or_reference(
# service_id=service_id,
# search_term=search_term,
# statuses=statuses,
# notification_type=notification_type,
# page=2,
# page_size=current_app.config["PAGE_SIZE"],
# error_out=False, # False so that if there are no results, it doesn't end in aborting with a 404
# )

# return (
# jsonify(
# notifications=notification_with_template_schema.dump(
# results.items, many=True
# ),
# links=get_prev_next_pagination_links(
# 1,
# len(next_page_of_pagination.items),
# ".get_all_notifications_for_service",
# statuses=statuses,
# notification_type=notification_type,
# service_id=service_id,
# ),
# ),
# 200,
# )


@service_blueprint.route("/<uuid:service_id>/notifications/monthly", methods=["GET"])
def get_monthly_notification_stats(service_id):
# check service_id validity
Expand Down
2 changes: 1 addition & 1 deletion app/service_invite/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def _create_service_invite(invited_user, invite_link_host):
redis_store.set(
f"email-personalisation-{saved_notification.id}",
json.dumps(personalisation),
ex=1800,
ex=2 * 24 * 60 * 60,
)
send_notification_to_queue(saved_notification, queue=QueueNames.NOTIFY)

Expand Down
1 change: 0 additions & 1 deletion app/user/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -599,7 +599,6 @@ def fetch_user_by_email():
fetched_user = get_user_by_email(email["email"])
debug_not_production(hilite(f"fetched user is {fetched_user}"))
result = fetched_user.serialize()
debug_not_production(hilite(f"result is serialized to {result}"))
return jsonify(data=result)
except Exception as e:
debug_not_production(hilite(f"Failed with {e}!!"))
Expand Down
11 changes: 11 additions & 0 deletions docs/all.md
Original file line number Diff line number Diff line change
Expand Up @@ -1242,6 +1242,17 @@ Notify.gov DNS records are maintained within [the 18f/dns repository](https://gi
- Rename to `api_static_scan_DATE.zip` and add it to 🔒 https://drive.google.com/drive/folders/1dSe9H7Ag_hLfi5hmQDB2ktWaDwWSf4_R
- Repeat for https://github.com/GSA/notifications-admin/actions/workflows/daily_checks.yml
## Rotating the DANGEROUS_SALT
1. Start API locally `make run-procfile`
2. In a separate terminal tab, navigate to the API project and run `poetry run flask command generate-salt`
3. A random secret will appear in the tab
4. Go to github->settings->secrets and variables->actions in the admin project and find the DANGEROUS_SALT secret for the admin project for staging. Open it and paste the result of #3 into the secret and save. Repeat for the API project, for staging.
5. Repeat #3 and #4 but do it for demo
6. Repeat #3 and #4 but do it for production
The important thing is to use the same secret for Admin and API on each tier--i.e. you only generate three secrets.
## <a name="gotcha"></a> Known Gotchas
Expand Down
8 changes: 6 additions & 2 deletions tests/app/service/test_rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -1974,7 +1974,9 @@ def test_get_monthly_notification_stats_by_user(
auth_header = create_admin_authorization_header()

response = client.get(
path=(f"/service/{sample_service.id}/notifications/{sample_user.id}/monthly?year=2024"),
path=(
f"/service/{sample_service.id}/notifications/{sample_user.id}/monthly?year=2024"
),
headers=[auth_header],
)

Expand All @@ -1999,7 +2001,9 @@ def test_get_single_month_notification_stats_by_user(
auth_header = create_admin_authorization_header()

response = client.get(
path=(f"/service/{sample_service.id}/notifications/{sample_user.id}/month?year=2024&month=07"),
path=(
f"/service/{sample_service.id}/notifications/{sample_user.id}/month?year=2024&month=07"
),
headers=[auth_header],
)

Expand Down

0 comments on commit 621770d

Please sign in to comment.