Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

get_paginated_jobs, get_paginated_uploads: merge, cache job outcome stat processing #4316

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# This file was automatically copied from notifications-utils@92.0.2
# This file was automatically copied from notifications-utils@92.1.0

repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
Expand Down
32 changes: 31 additions & 1 deletion app/dao/jobs_dao.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,14 @@
from datetime import datetime, timedelta

from flask import current_app
from notifications_utils.clients.redis import RequestCache
from notifications_utils.letter_timings import (
CANCELLABLE_JOB_LETTER_STATUSES,
letter_can_be_cancelled,
)
from sqlalchemy import and_, asc, desc, func

from app import db
from app import db, redis_store
from app.constants import (
JOB_STATUS_CANCELLED,
JOB_STATUS_FINISHED,
Expand All @@ -17,8 +18,10 @@
LETTER_TYPE,
NOTIFICATION_CANCELLED,
NOTIFICATION_CREATED,
NOTIFICATION_STATUS_TYPES_COMPLETED,
)
from app.dao.dao_utils import autocommit
from app.dao.fact_notification_status_dao import fetch_notification_statuses_for_job
from app.dao.templates_dao import dao_get_template_by_id
from app.models import (
FactNotificationStatus,
Expand Down Expand Up @@ -258,3 +261,30 @@ def find_missing_row_for_job(job_id, job_size):
.filter(Notification.job_row_number == None) # noqa
)
return query.all()


redis_cache = RequestCache(redis_store)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

minor: can we pop this up the top of the file? think it's a lil easier to see and reuse up there than in between other functions

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not sure where my reply to this went - do we actually want to encourage reuse for this though? I feel like using redis in this way from here isn't a pattern we have totally settled on..

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

fair, lets leave it here for now then



@redis_cache.set("job-{job_id}-notification-outcomes", ttl_in_seconds=timedelta(days=1).total_seconds())
def get_possibly_cached_notification_outcomes_for_job(
job_id: uuid.UUID | str, notification_count: int | None, processing_started: datetime | None
):
if processing_started is None:
statuses = []
elif processing_started.replace(tzinfo=None) < midnight_n_days_ago(3):
# ft_notification_status table
statuses = fetch_notification_statuses_for_job(job_id)
else:
# notifications table
statuses = dao_get_notification_outcomes_for_job(job_id)

return RequestCache.CacheResultWrapper(
value=[{"status": status.status, "count": status.count} for status in statuses],
# cache if all rows of the job are accounted for and no
# notifications are in a state still likely to change
cache_decision=bool(
sum(status.count for status in statuses) == notification_count
and all(status.status in NOTIFICATION_STATUS_TYPES_COMPLETED for status in statuses)
),
)
18 changes: 5 additions & 13 deletions app/job/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,6 @@
JOB_STATUS_SCHEDULED,
LETTER_TYPE,
)
from app.dao.fact_notification_status_dao import (
fetch_notification_statuses_for_job,
)
from app.dao.jobs_dao import (
can_letter_job_be_cancelled,
dao_cancel_letter_job,
Expand All @@ -24,6 +21,7 @@
dao_get_scheduled_job_by_id_and_service_id,
dao_get_scheduled_job_stats,
dao_update_job,
get_possibly_cached_notification_outcomes_for_job,
)
from app.dao.notifications_dao import (
dao_get_notification_count_for_job_id,
Expand All @@ -39,7 +37,7 @@
notifications_filter_schema,
unarchived_template_schema,
)
from app.utils import midnight_n_days_ago, pagination_links
from app.utils import pagination_links

job_blueprint = Blueprint("job", __name__, url_prefix="/service/<uuid:service_id>/job")

Expand Down Expand Up @@ -220,15 +218,9 @@ def get_paginated_jobs(
start = job_data["processing_started"]
start = dateutil.parser.parse(start).replace(tzinfo=None) if start else None

if start is None:
statistics = []
elif start.replace(tzinfo=None) < midnight_n_days_ago(3):
# ft_notification_status table
statistics = fetch_notification_statuses_for_job(job_data["id"])
else:
# notifications table
statistics = dao_get_notification_outcomes_for_job(job_data["id"])
job_data["statistics"] = [{"status": statistic.status, "count": statistic.count} for statistic in statistics]
job_data["statistics"] = get_possibly_cached_notification_outcomes_for_job(
job_data["id"], job_data["notification_count"], start
)

return {
"data": data,
Expand Down
23 changes: 5 additions & 18 deletions app/upload/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,14 @@

from flask import Blueprint, abort, current_app, jsonify, request

from app.dao.fact_notification_status_dao import (
fetch_notification_statuses_for_job,
)
from app.dao.jobs_dao import dao_get_notification_outcomes_for_job
from app.dao.jobs_dao import get_possibly_cached_notification_outcomes_for_job
from app.dao.uploads_dao import (
dao_get_uploaded_letters_by_print_date,
dao_get_uploads_by_service_id,
)
from app.errors import register_errors
from app.schemas import notification_with_template_schema
from app.utils import midnight_n_days_ago, pagination_links
from app.utils import pagination_links

upload_blueprint = Blueprint("upload", __name__, url_prefix="/service/<uuid:service_id>/upload")

Expand Down Expand Up @@ -49,19 +46,9 @@ def get_paginated_uploads(service_id, limit_days, page):
"recipient": upload.recipient,
}
if upload.upload_type == "job":
start = upload.processing_started

if start is None:
statistics = []
elif start.replace(tzinfo=None) < midnight_n_days_ago(3):
# ft_notification_status table
statistics = fetch_notification_statuses_for_job(upload.id)
else:
# notifications table
statistics = dao_get_notification_outcomes_for_job(upload.id)
upload_dict["statistics"] = [
{"status": statistic.status, "count": statistic.count} for statistic in statistics
]
upload_dict["statistics"] = get_possibly_cached_notification_outcomes_for_job(
upload.id, upload.notification_count, upload.processing_started
)
else:
upload_dict["statistics"] = []
data.append(upload_dict)
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# This file was automatically copied from notifications-utils@92.0.2
# This file was automatically copied from notifications-utils@92.1.0

[tool.black]
line-length = 120
Expand Down
2 changes: 1 addition & 1 deletion requirements.in
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ lxml==4.9.3
notifications-python-client==8.0.1

# Run `make bump-utils` to update to the latest version
notifications-utils @ git+https://github.com/alphagov/notifications-utils.git@92.0.2
notifications-utils @ git+https://github.com/alphagov/notifications-utils.git@92.1.0

# gds-metrics requires prometheseus 0.2.0, override that requirement as 0.7.1 brings significant performance gains
prometheus-client==0.14.1
Expand Down
6 changes: 4 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,9 @@ gds-metrics @ git+https://github.com/alphagov/gds_metrics_python.git@6f1840a57b6
govuk-bank-holidays==0.15
# via notifications-utils
greenlet==3.0.3
# via eventlet
# via
# eventlet
# sqlalchemy
gunicorn @ git+https://github.com/benoitc/gunicorn.git@1299ea9e967a61ae2edebe191082fd169b864c64
# via
# -r requirements.in
Expand Down Expand Up @@ -147,7 +149,7 @@ mistune==0.8.4
# via notifications-utils
notifications-python-client==8.0.1
# via -r requirements.in
notifications-utils @ git+https://github.com/alphagov/notifications-utils.git@7793546055d819904c76da73628b64d3eda255c4
notifications-utils @ git+https://github.com/alphagov/notifications-utils.git@bbdc5b43e959bca9891ad7994d3408678065d3e1
# via -r requirements.in
ordered-set==4.1.0
# via notifications-utils
Expand Down
3 changes: 2 additions & 1 deletion requirements_for_test.txt
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,7 @@ greenlet==3.0.3
# via
# -r requirements.txt
# eventlet
# sqlalchemy
gunicorn @ git+https://github.com/benoitc/gunicorn.git@1299ea9e967a61ae2edebe191082fd169b864c64
# via
# -r requirements.txt
Expand Down Expand Up @@ -223,7 +224,7 @@ mypy-extensions==1.0.0
# via black
notifications-python-client==8.0.1
# via -r requirements.txt
notifications-utils @ git+https://github.com/alphagov/notifications-utils.git@7793546055d819904c76da73628b64d3eda255c4
notifications-utils @ git+https://github.com/alphagov/notifications-utils.git@bbdc5b43e959bca9891ad7994d3408678065d3e1
# via -r requirements.txt
ordered-set==4.1.0
# via
Expand Down
2 changes: 1 addition & 1 deletion requirements_for_test_common.in
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# This file was automatically copied from notifications-utils@92.0.2
# This file was automatically copied from notifications-utils@92.1.0

beautifulsoup4==4.12.3
pytest==8.3.4
Expand Down
Loading