Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
0904a5b
[PRMP-975] add util to extract relevant querystring params
steph-torres-nhs Jan 6, 2026
d6a88c5
[PRMP-975] remove functionality of searching multiple tables
steph-torres-nhs Jan 6, 2026
e7d8076
Merge branch 'main' into PRMP-975
steph-torres-nhs Jan 9, 2026
0aae733
[PRMP-975] extract limit from querystring
steph-torres-nhs Jan 12, 2026
65e3deb
[PRMP-975] add pagination to doc ref search service
steph-torres-nhs Jan 12, 2026
50a322b
[PRMP-975] add query with pagination to doc ref search
steph-torres-nhs Jan 12, 2026
44d0f47
[PRMP-975] amend how filter is handled
steph-torres-nhs Jan 13, 2026
ed30447
[PRMP-975] update lg dynamo mock response
steph-torres-nhs Jan 13, 2026
7ff97f9
[PRMP-975] update lg dynamo mock response
steph-torres-nhs Jan 13, 2026
45ce75d
[PRMP-975] add filter build helper
steph-torres-nhs Jan 13, 2026
21135cd
[PRMP-975] camelize response
steph-torres-nhs Jan 13, 2026
47520bb
[PRMP-975] add test
steph-torres-nhs Jan 13, 2026
a360b9e
Merge branch 'main' into PRMP-975
steph-torres-nhs Jan 13, 2026
0968a90
[PRMP-975] address PR comments
steph-torres-nhs Jan 14, 2026
402ac05
[PRMP-975] format
steph-torres-nhs Jan 14, 2026
f86d414
[PRMP-975] adjust filter expression
steph-torres-nhs Jan 14, 2026
887bebb
[PRMP-975] change getDocSearchResults response object
steph-torres-nhs Jan 15, 2026
e3f19ed
[PRMP-975] check 204 response, upload complete filter
steph-torres-nhs Jan 16, 2026
1bbf5ad
Merge branch 'main' into PRMP-975
steph-torres-nhs Jan 16, 2026
c2f10ff
Merge branch 'main' into PRMP-975
steph-torres-nhs Jan 19, 2026
176fb90
[PRMP-975] amend stubbed response e2e tests
steph-torres-nhs Jan 19, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,10 @@ describe('GP Workflow: View Lloyd George record', () => {

cy.intercept('GET', '/SearchDocumentReferences*', {
statusCode: 200,
body: testFiles,
body: {
references: testFiles,
nextPageToken: 'abc',
},
}).as('searchDocumentReferences');

cy.get('#verify-submit').click();
Expand All @@ -142,7 +145,10 @@ describe('GP Workflow: View Lloyd George record', () => {

cy.intercept('GET', '/SearchDocumentReferences*', {
statusCode: 200,
body: testFiles,
body: {
references: testFiles,
nextPageToken: 'abc',
},
}).as('searchDocumentReferences');

setUpDownloadManifestIntercepts();
Expand Down Expand Up @@ -259,7 +265,10 @@ describe('GP Workflow: View Lloyd George record', () => {

cy.intercept('GET', '/SearchDocumentReferences*', {
statusCode: 200,
body: singleTestFile,
body: {
references: singleTestFile,
nextPageToken: 'abc'
},
}).as('searchDocumentReferences');

setUpDownloadManifestIntercepts();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,10 @@ describe('PCSE Workflow: Access and download found files', () => {

cy.intercept('GET', '/SearchDocumentReferences*', {
statusCode: 200,
body: searchDocumentReferencesResponse,
body: {
references: searchDocumentReferencesResponse,
nextPageToken: 'abc',
},
}).as('documentSearch');

cy.get('#verify-submit').click();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,12 @@ const mockUseConfig = useConfig as Mock;

const testFileName1 = 'John_1';
const testFileName2 = 'John_2';
const searchResults = [
buildSearchResult({ fileName: testFileName1 }),
buildSearchResult({ fileName: testFileName2 }),
];
const searchResults = {
references: [
buildSearchResult({ fileName: testFileName1 }),
buildSearchResult({ fileName: testFileName2 }),
],
};

let history = createMemoryHistory({
initialEntries: ['/'],
Expand Down Expand Up @@ -124,8 +126,8 @@ describe('RemoveRecordStage', () => {
).toBeInTheDocument();
});

expect(screen.getByText(searchResults[0].fileName)).toBeInTheDocument();
expect(screen.getByText(searchResults[1].fileName)).toBeInTheDocument();
expect(screen.getByText(searchResults.references[0].fileName)).toBeInTheDocument();
expect(screen.getByText(searchResults.references[1].fileName)).toBeInTheDocument();
});
});

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ describe('[GET] getDocumentSearchResults', () => {

test('Document search results handles a 2XX response', async () => {
const searchResult = buildSearchResult();
const mockResults = [searchResult];
const mockResults = { references: [searchResult] };
mockedAxios.get.mockImplementation(() =>
Promise.resolve({ status: 200, data: mockResults }),
);
Expand Down
7 changes: 4 additions & 3 deletions app/src/helpers/requests/getDocumentSearchResults.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ export type DocumentSearchResultsArgs = {
};

export type GetDocumentSearchResultsResponse = {
data: Array<SearchResult>;
references: Array<SearchResult>;
};

const getDocumentSearchResults = async ({
Expand All @@ -26,16 +26,17 @@ const getDocumentSearchResults = async ({
const gatewayUrl = baseUrl + endpoints.DOCUMENT_SEARCH;

try {
const response: GetDocumentSearchResultsResponse = await axios.get(gatewayUrl, {
const { data } = await axios.get<GetDocumentSearchResultsResponse>(gatewayUrl, {
headers: {
...baseHeaders,
},
params: {
patientId: nhsNumber?.replaceAll(/\s/g, ''), // replace whitespace
docType: docType,
limit: 9999,
},
});
return response?.data;
return data.references;
} catch (e) {
if (isLocal) {
return [
Expand Down
2 changes: 2 additions & 0 deletions lambdas/enums/dynamo_filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,3 +15,5 @@ class AttributeOperator(Enum):
class ConditionOperator(Enum):
OR = "|"
AND = "&"
EQUAL = "="
NOT_EQUAL = "<>"
31 changes: 23 additions & 8 deletions lambdas/handlers/document_reference_search_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,26 +9,24 @@
from utils.decorators.handle_lambda_exceptions import handle_lambda_exceptions
from utils.decorators.override_error_check import override_error_check
from utils.decorators.set_audit_arg import set_request_context_for_logging
from utils.decorators.validate_patient_id import (
extract_nhs_number_from_event,
validate_patient_id,
)
from utils.decorators.validate_patient_id import validate_patient_id
from utils.lambda_response import ApiGatewayResponse
from utils.request_context import request_context
from utils.utilities import camelize_dict

logger = LoggingService(__name__)


@set_request_context_for_logging
@validate_patient_id
@ensure_environment_variables(names=["DYNAMODB_TABLE_LIST"])
@ensure_environment_variables(names=["LLOYD_GEORGE_DYNAMODB_NAME"])
@override_error_check
@handle_lambda_exceptions
def lambda_handler(event, context):
request_context.app_interaction = LoggingAppInteraction.VIEW_PATIENT.value
logger.info("Starting document reference search process")

nhs_number = extract_nhs_number_from_event(event)
nhs_number, next_page_token, limit = extract_querystring_params(event)
request_context.patient_nhs_no = nhs_number

document_reference_search_service = DocumentReferenceSearchService()
Expand All @@ -41,9 +39,18 @@ def lambda_handler(event, context):
doc_status_filter = (
{"doc_status": "final"} if doc_upload_iteration2_enabled else None
)
response = document_reference_search_service.get_document_references(
nhs_number, check_upload_completed=True, additional_filters=doc_status_filter

logger.info("Searching for patient references with pagination.")

response_dict = (
document_reference_search_service.get_paginated_references_by_nhs_number(
nhs_number=nhs_number,
limit=limit,
next_page_token=next_page_token,
filter=doc_status_filter,
)
)
response = camelize_dict(response_dict)
logger.info("User is able to view docs", {"Result": "Successful viewing docs"})

if response:
Expand All @@ -54,3 +61,11 @@ def lambda_handler(event, context):
return ApiGatewayResponse(
204, json.dumps([]), "GET"
).create_api_gateway_response()


def extract_querystring_params(event):
nhs_number = event["queryStringParameters"]["patientId"]
next_page_token = event["queryStringParameters"].get("nextPageToken")
limit = event["queryStringParameters"].get("limit")

return nhs_number, next_page_token, limit
158 changes: 121 additions & 37 deletions lambdas/services/document_reference_search_service.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
import json
import os
from json import JSONDecodeError

from botocore.exceptions import ClientError
from enums.dynamo_filter import AttributeOperator
from enums.dynamo_filter import AttributeOperator, ConditionOperator
from enums.infrastructure import MAP_MTLS_TO_DYNAMO
from enums.lambda_error import LambdaError
from enums.metadata_field_names import DocumentReferenceMetadataFields
Expand All @@ -17,6 +16,7 @@
from utils.audit_logging_setup import LoggingService
from utils.common_query_filters import NotDeleted, UploadCompleted
from utils.dynamo_query_filter_builder import DynamoQueryFilterBuilder
from utils.dynamo_utils import build_mixed_condition_expression
from utils.exceptions import DynamoServiceException
from utils.lambda_exceptions import DocumentRefSearchException
from utils.lambda_header_utils import validate_common_name_in_mtls
Expand Down Expand Up @@ -46,10 +46,10 @@ def get_document_references(
api_request_context=api_request_context
)
try:
list_of_table_names = self._get_table_names(common_name)
table_name = self._get_table_name(common_name)
results = self._search_tables_for_documents(
nhs_number,
list_of_table_names,
table_name,
return_fhir,
additional_filters,
check_upload_completed,
Expand All @@ -67,54 +67,46 @@ def get_document_references(
)
raise DocumentRefSearchException(500, LambdaError.DocRefClient)

def _get_table_names(self, common_name: MtlsCommonNames | None) -> list[str]:
table_list = []
try:
table_list = json.loads(os.environ["DYNAMODB_TABLE_LIST"])
except JSONDecodeError as e:
logger.error(f"Failed to decode table list: {str(e)}")
raise

def _get_table_name(self, common_name: MtlsCommonNames | None) -> str:
if not common_name or common_name not in MtlsCommonNames:
return table_list
return os.environ["LLOYD_GEORGE_DYNAMODB_NAME"]

return [str(MAP_MTLS_TO_DYNAMO[common_name])]
return str(MAP_MTLS_TO_DYNAMO[common_name])

def _search_tables_for_documents(
self,
nhs_number: str,
table_names: list[str],
table_name: str,
return_fhir: bool,
filters=None,
check_upload_completed=False,
):
document_resources = []

for table_name in table_names:
logger.info(f"Searching for results in {table_name}")
filter_expression = self._get_filter_expression(
filters, upload_completed=check_upload_completed
)
logger.info(f"Searching for results in {table_name}")
filter_expression = self._get_filter_expression(
filters, upload_completed=check_upload_completed
)

if "coredocumentmetadata" not in table_name.lower():
documents = self.fetch_documents_from_table_with_nhs_number(
nhs_number, table_name, query_filter=filter_expression
)
else:
documents = self.fetch_documents_from_table(
search_condition=nhs_number,
search_key="NhsNumber",
table_name=table_name,
query_filter=filter_expression,
)
if "coredocumentmetadata" not in table_name.lower():
documents = self.fetch_documents_from_table_with_nhs_number(
nhs_number, table_name, query_filter=filter_expression
)
else:
documents = self.fetch_documents_from_table(
search_condition=nhs_number,
search_key="NhsNumber",
table_name=table_name,
query_filter=filter_expression,
)

if check_upload_completed:
self._validate_upload_status(documents)
if check_upload_completed:
self._validate_upload_status(documents)

processed_documents = self._process_documents(
documents, return_fhir=return_fhir
)
document_resources.extend(processed_documents)
processed_documents = self._process_documents(
documents, return_fhir=return_fhir
)
document_resources.extend(processed_documents)

logger.info(f"Found {len(document_resources)} document references")

Expand Down Expand Up @@ -241,3 +233,95 @@ def create_document_reference_fhir_response(
.model_dump(exclude_none=True)
)
return fhir_document_reference

def get_paginated_references_by_nhs_number(
self,
nhs_number: str,
limit: int | None = None,
next_page_token: str | None = None,
filter: dict | None = None,
api_request_context: dict = {},
):

filter_expression, condition_attribute_names, condition_attribute_values = (
self._build_pagination_filter(filter)
)

common_name = validate_common_name_in_mtls(
api_request_context=api_request_context
)

references, next_page_token = self.query_table_with_paginator(
table_name=self._get_table_name(common_name),
index_name="NhsNumberIndex",
search_key="NhsNumber",
search_condition=nhs_number,
limit=limit,
start_key=next_page_token,
filter_expression=filter_expression,
expression_attribute_names=condition_attribute_names,
expression_attribute_values=condition_attribute_values,
)

return {
"references": [self._build_document_model(ref) for ref in references],
"next_page_token": next_page_token,
}

def _build_pagination_filter(
self, filter_values: dict[str, str] | None
) -> tuple[str, dict, dict]:
conditions = [
{
"field": DocumentReferenceMetadataFields.DELETED.value,
"operator": ConditionOperator.NOT_EQUAL.value,
"value": "",
},
{
"field": DocumentReferenceMetadataFields.DELETED.value,
"operator": "attribute_not_exists",
},
]

filter, condition_attribute_names, condition_attribute_values = (
build_mixed_condition_expression(conditions=conditions, join_operator="OR")
)

if filter_values:
additional_conditions = []
for filter_key, filter_value in filter_values.items():
if filter_key == "custodian":
additional_conditions.append(
{
"field": DocumentReferenceMetadataFields.CUSTODIAN.value,
"operator": ConditionOperator.EQUAL.value,
"value": filter_value,
}
)
elif filter_key == "file_type":
# placeholder for future filtering
pass
elif filter_key == "doc_status":
additional_conditions.append(
{
"field": DocumentReferenceMetadataFields.DOC_STATUS.value,
"operator": ConditionOperator.EQUAL.value,
"value": filter_value,
}
)

(
additional_filter,
additional_condition_attribute_names,
additional_condition_attribute_values,
) = build_mixed_condition_expression(conditions=additional_conditions)
condition_attribute_names.update(additional_condition_attribute_names)
condition_attribute_values.update(additional_condition_attribute_values)

return (
f"({filter}) AND " + additional_filter,
condition_attribute_names,
condition_attribute_values,
)

return filter, condition_attribute_names, condition_attribute_values
Loading
Loading