From 242730fe533ab28fcae4b8f214d567217ec0abe0 Mon Sep 17 00:00:00 2001 From: Matthew Begley Date: Mon, 18 Dec 2023 15:54:59 +0000 Subject: [PATCH 01/10] Turn fetchall to fetchone --- application/service_sync/data_processing/get_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/application/service_sync/data_processing/get_data.py b/application/service_sync/data_processing/get_data.py index 4f0102964..407eb2361 100644 --- a/application/service_sync/data_processing/get_data.py +++ b/application/service_sync/data_processing/get_data.py @@ -36,7 +36,7 @@ def get_dos_service_and_history(service_id: int) -> tuple[DoSService, ServiceHis with connect_to_db_writer() as connection: # Query the DoS database for the service cursor = query_dos_db(connection=connection, query=sql_query, query_vars=query_vars) - rows: list[DictRow] = cursor.fetchall() + rows: list[DictRow] = cursor.fetchone() if len(rows) == 1: # Select first row (service) and create DoSService object service = DoSService(rows[0]) From 489c5ef8f2560b6aa9723bf2b497a05436271b89 Mon Sep 17 00:00:00 2001 From: Matthew Begley Date: Tue, 19 Dec 2023 18:46:07 +0000 Subject: [PATCH 02/10] Database call fetchone fixes --- .../service_sync/data_processing/get_data.py | 11 ++++------- .../data_processing/tests/test_get_data.py | 17 +---------------- 2 files changed, 5 insertions(+), 23 deletions(-) diff --git a/application/service_sync/data_processing/get_data.py b/application/service_sync/data_processing/get_data.py index 407eb2361..b15630cca 100644 --- a/application/service_sync/data_processing/get_data.py +++ b/application/service_sync/data_processing/get_data.py @@ -36,19 +36,16 @@ def get_dos_service_and_history(service_id: int) -> tuple[DoSService, ServiceHis with connect_to_db_writer() as connection: # Query the DoS database for the service cursor = query_dos_db(connection=connection, query=sql_query, query_vars=query_vars) - rows: list[DictRow] = cursor.fetchone() - if len(rows) == 1: + row: DictRow = cursor.fetchone() + if row is not None: # Select first row (service) and create DoSService object - service = DoSService(rows[0]) + service = DoSService(row) logger.append_keys(service_name=service.name) logger.append_keys(service_uid=service.uid) logger.append_keys(type_id=service.typeid) - elif not rows: + elif row is None: msg = f"Service ID {service_id} not found" raise ValueError(msg) - else: - msg = f"Multiple services found for Service Id: {service_id}" - raise ValueError(msg) # Set up remaining service data service.standard_opening_times = get_standard_opening_times_from_db( connection=connection, diff --git a/application/service_sync/data_processing/tests/test_get_data.py b/application/service_sync/data_processing/tests/test_get_data.py index 5a4f55d85..75a683538 100644 --- a/application/service_sync/data_processing/tests/test_get_data.py +++ b/application/service_sync/data_processing/tests/test_get_data.py @@ -51,23 +51,8 @@ def test_get_dos_service_and_history_no_match( ) -> None: # Arrange service_id = 12345 - mock_query_dos_db.return_value.fetchall.return_value = [] + mock_query_dos_db.return_value.fetchone.return_value = None # Act with pytest.raises(ValueError, match=f"Service ID {service_id} not found"): get_dos_service_and_history(service_id) mock_connect_to_db_writer.assert_called_once() - - -@patch(f"{FILE_PATH}.query_dos_db") -@patch(f"{FILE_PATH}.connect_to_db_writer") -def test_get_dos_service_and_history_mutiple_matches( - mock_connect_to_db_writer: MagicMock, - mock_query_dos_db: MagicMock, -) -> None: - # Arrange - service_id = 12345 - mock_query_dos_db.return_value.fetchall.return_value = [["Test"], ["Test"]] - # Act - with pytest.raises(ValueError, match=f"Multiple services found for Service Id: {service_id}"): - get_dos_service_and_history(service_id) - mock_connect_to_db_writer.assert_called_once() From a179c72f563ee380f4236de31434c6aa790af5cb Mon Sep 17 00:00:00 2001 From: Matthew Begley Date: Wed, 27 Dec 2023 14:44:57 +0000 Subject: [PATCH 03/10] Attempt single query for service and opening times --- application/common/dos.py | 41 +++++++++++++ .../service_sync/data_processing/get_data.py | 59 +++++++++++++++++++ 2 files changed, 100 insertions(+) diff --git a/application/common/dos.py b/application/common/dos.py index 6e72ec109..3d83916df 100644 --- a/application/common/dos.py +++ b/application/common/dos.py @@ -264,6 +264,29 @@ def db_rows_to_spec_open_times(db_rows: Iterable[dict]) -> list[SpecifiedOpening return specified_opening_times +def db_big_rows_to_spec_open_times(db_rows: Iterable[dict]) -> list[SpecifiedOpeningTime]: + """Turns a set of dos database rows into a list of SpecifiedOpenTime objects. + + note: The rows must to be for the same service. + """ + specified_opening_times = [] + date_sorted_rows = sorted(db_rows, key=lambda row: (row["date"], row["starttime"])) + for date, db_rows in groupby(date_sorted_rows, lambda row: row["date"]): + is_open = True + open_periods = [] + specified_op_times_ids = [] + for row in list(db_rows): + if row["ssot_id"] is not None and row["ssot_id"] not in specified_op_times_ids: + specified_op_times_ids.append = row["ssot_id"] + if row["isclosed"] is True: + is_open = False + else: + open_periods.append(OpenPeriod(row["starttime"], row["endtime"])) + specified_opening_times.append(SpecifiedOpeningTime(open_periods, date, is_open)) + + return specified_opening_times + + def db_rows_to_std_open_times(db_rows: Iterable[dict]) -> StandardOpeningTimes: """Turns a set of dos database rows into a StandardOpeningTime object. @@ -279,6 +302,24 @@ def db_rows_to_std_open_times(db_rows: Iterable[dict]) -> StandardOpeningTimes: return standard_opening_times +def db_big_rows_to_std_open_times(db_rows: Iterable[dict]) -> StandardOpeningTimes: + """Turns a set of dos database rows into a StandardOpeningTime object. + + note: The rows must be for the same service. + """ + standard_opening_times = StandardOpeningTimes() + std_op_times_ids = [] + for row in db_rows: + if row["sdot_id"] is not None and row["sdot_id"] not in std_op_times_ids: + std_op_times_ids.append = row["sdot_id"] + weekday = row["name"].lower() + start = row["day_starttime"] + end = row["day_endtime"] + open_period = OpenPeriod(start, end) + standard_opening_times.add_open_period(open_period, weekday) + return standard_opening_times + + def has_palliative_care(service: DoSService, connection: Connection) -> bool: """Checks if a service has palliative care. diff --git a/application/service_sync/data_processing/get_data.py b/application/service_sync/data_processing/get_data.py index b15630cca..7e459fb97 100644 --- a/application/service_sync/data_processing/get_data.py +++ b/application/service_sync/data_processing/get_data.py @@ -4,6 +4,8 @@ from .service_histories import ServiceHistories from common.dos import ( DoSService, + db_big_rows_to_spec_open_times, + db_big_rows_to_std_open_times, get_specified_opening_times_from_db, get_standard_opening_times_from_db, has_blood_pressure, @@ -67,3 +69,60 @@ def get_dos_service_and_history(service_id: int) -> tuple[DoSService, ServiceHis service_histories.create_service_histories_entry() # Connection closed by context manager return service, service_histories + + +def get_dos_service_and_history_one_query(service_id: int) -> tuple[DoSService, ServiceHistories]: + """Retrieves DoS Services from DoS database. + + Args: + service_id (str): Id of service to retrieve + + Returns: + Tuple[DoSService, ServiceHistories]: Tuple of DoS service and service history + + """ + sql_query = ( + "SELECT s.id, uid, s.name, odscode, address, town, postcode, web, typeid, statusid, ss.name status_name, " + "publicphone, publicname, st.name service_type_name, easting, northing, latitude, longitude, " + 'sdo.id as "sdo_id", sdo.dayid, otd.name, sdot.id as "sdot_id", sdot.starttime as "day_starttime", ' + 'sdot.endtime as "day_endtime", ssod.id as "ssod_id", ssod.date, ssot.id as "ssot_id", ' + "ssot.starttime, ssot.endtime, ssot.isclosed " + "FROM services s " + "INNER JOIN servicetypes st ON s.typeid = st.id INNER JOIN servicestatuses ss on s.statusid = ss.id " + "LEFT JOIN servicedayopenings sdo ON s.id = sdo.serviceid " + "LEFT JOIN openingtimedays otd ON sdo.dayid = otd.id " + "LEFT JOIN servicedayopeningtimes sdot ON sdo.id = sdot.servicedayopeningid " + "LEFT JOIN servicespecifiedopeningdates ssod ON s.id = ssod.serviceid " + "LEFT JOIN servicespecifiedopeningtimes ssot ON ssod.id = ssot.servicespecifiedopeningdateid " + "WHERE s.id = %(SERVICE_ID)s" + ) + query_vars = {"SERVICE_ID": service_id} + # Connect to the DoS database + with connect_to_db_writer() as connection: + # Query the DoS database for the service + cursor = query_dos_db(connection=connection, query=sql_query, query_vars=query_vars) + rows: DictRow = cursor.fetchall() + if len(rows) == 1: + # Select first row (service) and create DoSService object + service = DoSService(rows[0]) + logger.append_keys(service_name=service.name) + logger.append_keys(service_uid=service.uid) + logger.append_keys(type_id=service.typeid) + elif not rows: + msg = f"Service ID {service_id} not found" + raise ValueError(msg) + # Set up remaining service data + service.standard_opening_times = db_big_rows_to_std_open_times(rows) + service.specified_opening_times = db_big_rows_to_spec_open_times(rows) + # Set up palliative care flag + service.palliative_care = has_palliative_care(service=service, connection=connection) + # Set up blood pressure flag + service.blood_pressure = has_blood_pressure(service=service) + # Set up contraception flag + service.contraception = has_contraception(service=service) + # Set up service history + service_histories = ServiceHistories(service_id=service_id) + service_histories.get_service_history_from_db(connection) + service_histories.create_service_histories_entry() + # Connection closed by context manager + return service, service_histories From a66db1813b3d12b1cca9411b80de2a05bd3c1c74 Mon Sep 17 00:00:00 2001 From: Matthew Begley Date: Wed, 27 Dec 2023 16:32:11 +0000 Subject: [PATCH 04/10] Fixes and unit tests --- application/common/dos.py | 25 +-- application/common/tests/test_dos.py | 171 ++++++++++++++++++ .../service_sync/data_processing/get_data.py | 2 +- 3 files changed, 186 insertions(+), 12 deletions(-) diff --git a/application/common/dos.py b/application/common/dos.py index 3d83916df..1a9231694 100644 --- a/application/common/dos.py +++ b/application/common/dos.py @@ -270,18 +270,21 @@ def db_big_rows_to_spec_open_times(db_rows: Iterable[dict]) -> list[SpecifiedOpe note: The rows must to be for the same service. """ specified_opening_times = [] - date_sorted_rows = sorted(db_rows, key=lambda row: (row["date"], row["starttime"])) - for date, db_rows in groupby(date_sorted_rows, lambda row: row["date"]): + specified_op_times_ids = [] + db_rows_refined = [] + for row in list(db_rows): + if row["ssot_id"] is not None and row["ssot_id"] not in specified_op_times_ids: + specified_op_times_ids.append(row["ssot_id"]) + db_rows_refined.append(row) + date_sorted_rows = sorted(db_rows_refined, key=lambda row: (row["date"], row["date_starttime"])) + for date, db_rows_refined in groupby(date_sorted_rows, lambda row: row["date"]): is_open = True open_periods = [] - specified_op_times_ids = [] - for row in list(db_rows): - if row["ssot_id"] is not None and row["ssot_id"] not in specified_op_times_ids: - specified_op_times_ids.append = row["ssot_id"] - if row["isclosed"] is True: - is_open = False - else: - open_periods.append(OpenPeriod(row["starttime"], row["endtime"])) + for row in list(db_rows_refined): + if row["isclosed"] is True: + is_open = False + else: + open_periods.append(OpenPeriod(row["date_starttime"], row["date_endtime"])) specified_opening_times.append(SpecifiedOpeningTime(open_periods, date, is_open)) return specified_opening_times @@ -311,7 +314,7 @@ def db_big_rows_to_std_open_times(db_rows: Iterable[dict]) -> StandardOpeningTim std_op_times_ids = [] for row in db_rows: if row["sdot_id"] is not None and row["sdot_id"] not in std_op_times_ids: - std_op_times_ids.append = row["sdot_id"] + std_op_times_ids.append(row["sdot_id"]) weekday = row["name"].lower() start = row["day_starttime"] end = row["day_endtime"] diff --git a/application/common/tests/test_dos.py b/application/common/tests/test_dos.py index 2a3aa1ba5..926f23af8 100644 --- a/application/common/tests/test_dos.py +++ b/application/common/tests/test_dos.py @@ -4,6 +4,8 @@ from application.common.dos import ( DoSService, + db_big_rows_to_spec_open_times, + db_big_rows_to_std_open_times, db_rows_to_spec_open_times, db_rows_to_std_open_times, get_dos_locations, @@ -584,6 +586,91 @@ def test_db_rows_to_spec_open_times() -> None: assert spec_open_times == expected_spec_open_times +def test_db_big_rows_to_spec_open_times() -> None: + db_rows = [ + { + "serviceid": 1, + "date": date(2019, 5, 6), + "date_starttime": time(8, 0, 0), + "date_endtime": time(20, 0, 0), + "isclosed": False, + "ssot_id": 123, + }, + { + "serviceid": 1, + "date": date(2019, 5, 6), + "date_starttime": time(21, 0, 0), + "date_endtime": time(22, 0, 0), + "isclosed": False, + "ssot_id": 324, + }, + { + "serviceid": 1, + "date": date(2019, 5, 27), + "date_starttime": time(8, 0, 0), + "date_endtime": time(20, 0, 0), + "isclosed": False, + "ssot_id": 768, + }, + { + "serviceid": 1, + "date": date(2019, 8, 26), + "date_starttime": time(8, 0, 0), + "date_endtime": time(20, 0, 0), + "isclosed": False, + "ssot_id": 987, + }, + { + "serviceid": 1, + "date": date(2019, 9, 20), + "date_starttime": None, + "date_endtime": None, + "isclosed": True, + "ssot_id": 567, + }, + { + "serviceid": 1, + "date": date(2020, 5, 6), + "date_starttime": time(6, 0, 0), + "date_endtime": time(7, 0, 0), + "isclosed": False, + "ssot_id": 876, + }, + { + "serviceid": 1, + "date": date(2020, 5, 6), + "date_starttime": time(6, 0, 0), + "date_endtime": time(7, 0, 0), + "isclosed": False, + "ssot_id": 876, + }, + { + "serviceid": 1, + "date": None, + "date_starttime": None, + "date_endtime": None, + "isclosed": None, + "ssot_id": None, + }, + ] + + spec_open_times = db_big_rows_to_spec_open_times(db_rows) + + expected_spec_open_times = [ + SpecifiedOpeningTime( + [OpenPeriod.from_string_times("08:00", "20:00"), OpenPeriod.from_string_times("21:00", "22:00")], + date(2019, 5, 6), + True, + ), + SpecifiedOpeningTime([OpenPeriod.from_string_times("08:00", "20:00")], date(2019, 5, 27), True), + SpecifiedOpeningTime([OpenPeriod.from_string_times("08:00", "20:00")], date(2019, 8, 26), True), + SpecifiedOpeningTime([], date(2019, 9, 20), False), + SpecifiedOpeningTime([OpenPeriod.from_string_times("06:00", "07:00")], date(2020, 5, 6), True), + ] + + assert spec_open_times == expected_spec_open_times + + def test_db_rows_to_std_open_time() -> None: db_rows = [ {"serviceid": 1, "dayid": 0, "name": "Monday", "starttime": time(8, 0, 0), "endtime": time(17, 0, 0)}, @@ -611,6 +698,90 @@ def test_db_rows_to_std_open_time() -> None: assert actual_std_open_times == expected_std_open_times +def test_db_big_rows_to_std_open_time() -> None: + db_rows = [ + { + "serviceid": 1, + "dayid": 0, + "name": "Monday", + "day_starttime": time(8, 0, 0), + "day_endtime": time(17, 0, 0), + "sdot_id": 230, + }, + { + "serviceid": 1, + "dayid": 6, + "name": "Sunday", + "day_starttime": time(13, 0, 0), + "day_endtime": time(15, 30, 0), + "sdot_id": 231, + }, + { + "serviceid": 1, + "dayid": 1, + "name": "Tuesday", + "day_starttime": time(13, 0, 0), + "day_endtime": time(18, 0, 0), + "sdot_id": 233, + }, + { + "serviceid": 1, + "dayid": 4, + "name": "Friday", + "day_starttime": time(13, 0, 0), + "day_endtime": time(15, 30, 0), + "sdot_id": 232, + }, + { + "serviceid": 1, + "dayid": 6, + "name": "Wednesday", + "day_starttime": time(7, 0, 0), + "day_endtime": time(15, 30, 0), + "sdot_id": 234, + }, + { + "serviceid": 1, + "dayid": 1, + "name": "Tuesday", + "day_starttime": time(8, 0, 0), + "day_endtime": time(12, 0, 0), + "sdot_id": 287, + }, + { + "serviceid": 1, + "dayid": 4, + "name": "Thursday", + "day_starttime": time(11, 0, 0), + "day_endtime": time(13, 30, 0), + "sdot_id": 238, + }, + { + "serviceid": 1, + "dayid": 4, + "name": "Thursday", + "day_starttime": time(11, 0, 0), + "day_endtime": time(13, 30, 0), + "sdot_id": 238, + }, + ] + + expected_std_open_times = StandardOpeningTimes() + expected_std_open_times.monday = [OpenPeriod.from_string_times("08:00", "17:00")] + expected_std_open_times.tuesday = [ + OpenPeriod.from_string_times("08:00", "12:00"), + OpenPeriod.from_string_times("13:00", "18:00"), + ] + expected_std_open_times.wednesday = [OpenPeriod.from_string_times("07:00", "15:30")] + expected_std_open_times.thursday = [OpenPeriod.from_string_times("11:00", "13:30")] + expected_std_open_times.friday = [OpenPeriod.from_string_times("13:00", "15:30")] + expected_std_open_times.sunday = [OpenPeriod.from_string_times("13:00", "15:30")] + + actual_std_open_times = db_big_rows_to_std_open_times(db_rows) + + assert actual_std_open_times == expected_std_open_times + + def get_db_item(odscode: str = "FA9321", name: str = "fake name", id: int = 9999, typeid: int = 13) -> dict: # noqa: A002 return { "id": id, diff --git a/application/service_sync/data_processing/get_data.py b/application/service_sync/data_processing/get_data.py index 7e459fb97..4282284d5 100644 --- a/application/service_sync/data_processing/get_data.py +++ b/application/service_sync/data_processing/get_data.py @@ -86,7 +86,7 @@ def get_dos_service_and_history_one_query(service_id: int) -> tuple[DoSService, "publicphone, publicname, st.name service_type_name, easting, northing, latitude, longitude, " 'sdo.id as "sdo_id", sdo.dayid, otd.name, sdot.id as "sdot_id", sdot.starttime as "day_starttime", ' 'sdot.endtime as "day_endtime", ssod.id as "ssod_id", ssod.date, ssot.id as "ssot_id", ' - "ssot.starttime, ssot.endtime, ssot.isclosed " + 'ssot.starttime as "date_starttime", ssot.endtime as "date_endtime", ssot.isclosed ' "FROM services s " "INNER JOIN servicetypes st ON s.typeid = st.id INNER JOIN servicestatuses ss on s.statusid = ss.id " "LEFT JOIN servicedayopenings sdo ON s.id = sdo.serviceid " From abcc66da0a2e2115445732916191fa5cb3555b32 Mon Sep 17 00:00:00 2001 From: Matthew Begley Date: Thu, 28 Dec 2023 11:59:00 +0000 Subject: [PATCH 05/10] Force another env rebuild --- temp_file | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 temp_file diff --git a/temp_file b/temp_file new file mode 100644 index 000000000..e69de29bb From 7187123a1defce73bbe023ac0963f19ff35165f5 Mon Sep 17 00:00:00 2001 From: Matthew Begley Date: Thu, 28 Dec 2023 15:41:27 +0000 Subject: [PATCH 06/10] Use new one query function --- application/service_sync/service_sync.py | 4 ++-- application/service_sync/tests/test_service_sync.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/application/service_sync/service_sync.py b/application/service_sync/service_sync.py index 880603ad0..18c5e50ed 100644 --- a/application/service_sync/service_sync.py +++ b/application/service_sync/service_sync.py @@ -10,7 +10,7 @@ from boto3 import client from .data_processing.check_for_change import compare_nhs_uk_and_dos_data -from .data_processing.get_data import get_dos_service_and_history +from .data_processing.get_data import get_dos_service_and_history_one_query from .data_processing.update_dos import update_dos_data from .reject_pending_changes.pending_changes import check_and_remove_pending_dos_changes from common.middlewares import unhandled_exception_logging @@ -47,7 +47,7 @@ def lambda_handler(event: SQSEvent, context: LambdaContext) -> None: # noqa: AR change_event: dict[str, Any] = update_request["change_event"] nhs_entity = NHSEntity(change_event) # Get current DoS state - dos_service, service_histories = get_dos_service_and_history(service_id=int(service_id)) + dos_service, service_histories = get_dos_service_and_history_one_query(service_id=int(service_id)) # Compare NHS UK and DoS data changes_to_dos = compare_nhs_uk_and_dos_data( dos_service=dos_service, diff --git a/application/service_sync/tests/test_service_sync.py b/application/service_sync/tests/test_service_sync.py index adfeb786c..436b324e4 100644 --- a/application/service_sync/tests/test_service_sync.py +++ b/application/service_sync/tests/test_service_sync.py @@ -45,7 +45,7 @@ @patch(f"{FILE_PATH}.remove_sqs_message_from_queue") @patch(f"{FILE_PATH}.update_dos_data") @patch(f"{FILE_PATH}.compare_nhs_uk_and_dos_data") -@patch(f"{FILE_PATH}.get_dos_service_and_history") +@patch(f"{FILE_PATH}.get_dos_service_and_history_one_query") @patch(f"{FILE_PATH}.NHSEntity") def test_lambda_handler( mock_nhs_entity: MagicMock, @@ -89,7 +89,7 @@ def test_lambda_handler( @patch(f"{FILE_PATH}.remove_sqs_message_from_queue") @patch(f"{FILE_PATH}.update_dos_data") @patch(f"{FILE_PATH}.compare_nhs_uk_and_dos_data") -@patch(f"{FILE_PATH}.get_dos_service_and_history") +@patch(f"{FILE_PATH}.get_dos_service_and_history_one_query") @patch(f"{FILE_PATH}.NHSEntity") def test_lambda_handler_exception( mock_nhs_entity: MagicMock, From 893611e6704b4a678f206a9db8e01fc22d062f16 Mon Sep 17 00:00:00 2001 From: Matthew Begley Date: Thu, 28 Dec 2023 16:51:51 +0000 Subject: [PATCH 07/10] Fix many condition --- application/service_sync/data_processing/get_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/application/service_sync/data_processing/get_data.py b/application/service_sync/data_processing/get_data.py index 4282284d5..d9ba1e4bd 100644 --- a/application/service_sync/data_processing/get_data.py +++ b/application/service_sync/data_processing/get_data.py @@ -102,7 +102,7 @@ def get_dos_service_and_history_one_query(service_id: int) -> tuple[DoSService, # Query the DoS database for the service cursor = query_dos_db(connection=connection, query=sql_query, query_vars=query_vars) rows: DictRow = cursor.fetchall() - if len(rows) == 1: + if len(rows) > 1: # Select first row (service) and create DoSService object service = DoSService(rows[0]) logger.append_keys(service_name=service.name) From c36d2d2761b20c727b22848be4605a205ad18629 Mon Sep 17 00:00:00 2001 From: Matthew Begley Date: Thu, 28 Dec 2023 17:57:10 +0000 Subject: [PATCH 08/10] Fix many condition attempt 2 --- application/service_sync/data_processing/get_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/application/service_sync/data_processing/get_data.py b/application/service_sync/data_processing/get_data.py index d9ba1e4bd..0842c627a 100644 --- a/application/service_sync/data_processing/get_data.py +++ b/application/service_sync/data_processing/get_data.py @@ -102,7 +102,7 @@ def get_dos_service_and_history_one_query(service_id: int) -> tuple[DoSService, # Query the DoS database for the service cursor = query_dos_db(connection=connection, query=sql_query, query_vars=query_vars) rows: DictRow = cursor.fetchall() - if len(rows) > 1: + if len(rows) >= 1: # Select first row (service) and create DoSService object service = DoSService(rows[0]) logger.append_keys(service_name=service.name) From cee24c60b204108956f2cddaceb77a3cf4e2cc15 Mon Sep 17 00:00:00 2001 From: Matthew Begley Date: Tue, 9 Jan 2024 14:40:21 +0000 Subject: [PATCH 09/10] Psycopg pipeline mode implentation --- application/common/dos.py | 15 ++++++ .../service_sync/data_processing/get_data.py | 52 +++++++++++++------ .../data_processing/update_dos.py | 4 +- application/service_sync/service_sync.py | 4 +- .../service_sync/tests/test_service_sync.py | 4 +- 5 files changed, 56 insertions(+), 23 deletions(-) diff --git a/application/common/dos.py b/application/common/dos.py index 1a9231694..37cc13d90 100644 --- a/application/common/dos.py +++ b/application/common/dos.py @@ -5,6 +5,7 @@ from aws_lambda_powertools.logging import Logger from psycopg import Connection +from psycopg.rows import DictRow from .constants import ( DOS_ACTIVE_STATUS_ID, @@ -221,6 +222,11 @@ def get_specified_opening_times_from_db(connection: Connection, service_id: int) return specified_opening_times +def convert_db_to_specified_opening_times(db_specified_opening_times: list[DictRow]) -> list[SpecifiedOpeningTime]: + """Retrieves specified opening times from DoS database.""" + return db_rows_to_spec_open_times(db_specified_opening_times) + + def get_standard_opening_times_from_db(connection: Connection, service_id: int) -> StandardOpeningTimes: """Retrieves standard opening times from DoS database. @@ -244,6 +250,15 @@ def get_standard_opening_times_from_db(connection: Connection, service_id: int) return standard_opening_times +def convent_db_to_standard_opening_times(db_std_opening_times: list[DictRow]) -> StandardOpeningTimes: + """Retrieves standard opening times from DoS database. + + If the service id does not even match any service this function will still return a blank StandardOpeningTime + with no opening periods. + """ + return db_rows_to_std_open_times(db_std_opening_times) + + def db_rows_to_spec_open_times(db_rows: Iterable[dict]) -> list[SpecifiedOpeningTime]: """Turns a set of dos database rows into a list of SpecifiedOpenTime objects. diff --git a/application/service_sync/data_processing/get_data.py b/application/service_sync/data_processing/get_data.py index 0842c627a..cf6308c34 100644 --- a/application/service_sync/data_processing/get_data.py +++ b/application/service_sync/data_processing/get_data.py @@ -4,10 +4,10 @@ from .service_histories import ServiceHistories from common.dos import ( DoSService, + convent_db_to_standard_opening_times, + convert_db_to_specified_opening_times, db_big_rows_to_spec_open_times, db_big_rows_to_std_open_times, - get_specified_opening_times_from_db, - get_standard_opening_times_from_db, has_blood_pressure, has_contraception, has_palliative_care, @@ -33,30 +33,48 @@ def get_dos_service_and_history(service_id: int) -> tuple[DoSService, ServiceHis "LEFT JOIN servicetypes st ON s.typeid = st.id LEFT JOIN servicestatuses ss on s.statusid = ss.id " "WHERE s.id = %(SERVICE_ID)s" ) + std_opening_times_query = ( + "SELECT sdo.serviceid, sdo.dayid, otd.name, sdot.starttime, sdot.endtime " + "FROM servicedayopenings sdo " + "INNER JOIN servicedayopeningtimes sdot " + "ON sdo.id = sdot.servicedayopeningid " + "LEFT JOIN openingtimedays otd " + "ON sdo.dayid = otd.id " + "WHERE sdo.serviceid = %(SERVICE_ID)s" + ) + specified_op_times_query = ( + "SELECT ssod.serviceid, ssod.date, ssot.starttime, ssot.endtime, ssot.isclosed " + "FROM servicespecifiedopeningdates ssod " + "INNER JOIN servicespecifiedopeningtimes ssot " + "ON ssod.id = ssot.servicespecifiedopeningdateid " + "WHERE ssod.serviceid = %(SERVICE_ID)s" + ) + query_vars = {"SERVICE_ID": service_id} # Connect to the DoS database - with connect_to_db_writer() as connection: + with connect_to_db_writer() as connection, connection.pipeline() as p: # Query the DoS database for the service - cursor = query_dos_db(connection=connection, query=sql_query, query_vars=query_vars) - row: DictRow = cursor.fetchone() - if row is not None: + cursor_main = query_dos_db(connection=connection, query=sql_query, query_vars=query_vars) + cursor_std_ot = query_dos_db(connection=connection, query=std_opening_times_query, query_vars=query_vars) + cursor_sp_otd = query_dos_db(connection=connection, query=specified_op_times_query, query_vars=query_vars) + p.sync() + main_fetch = cursor_main.fetchone() + std_ot_fetch = cursor_std_ot.fetchall() + sp_otd_fetch = cursor_sp_otd.fetchall() + + if main_fetch is not None: # Select first row (service) and create DoSService object - service = DoSService(row) + service = DoSService(main_fetch) logger.append_keys(service_name=service.name) logger.append_keys(service_uid=service.uid) logger.append_keys(type_id=service.typeid) - elif row is None: + elif main_fetch is None: msg = f"Service ID {service_id} not found" raise ValueError(msg) # Set up remaining service data - service.standard_opening_times = get_standard_opening_times_from_db( - connection=connection, - service_id=service_id, - ) - service.specified_opening_times = get_specified_opening_times_from_db( - connection=connection, - service_id=service_id, - ) + service.standard_opening_times = convent_db_to_standard_opening_times(db_std_opening_times=std_ot_fetch) + service.specified_opening_times = convert_db_to_specified_opening_times(db_specified_opening_times=sp_otd_fetch) + # Set up palliative care flag service.palliative_care = has_palliative_care(service=service, connection=connection) # Set up blood pressure flag @@ -67,7 +85,7 @@ def get_dos_service_and_history(service_id: int) -> tuple[DoSService, ServiceHis service_histories = ServiceHistories(service_id=service_id) service_histories.get_service_history_from_db(connection) service_histories.create_service_histories_entry() - # Connection closed by context manager + # Connection closed by context manager return service, service_histories diff --git a/application/service_sync/data_processing/update_dos.py b/application/service_sync/data_processing/update_dos.py index f7f2a6b0d..2d38bb07a 100644 --- a/application/service_sync/data_processing/update_dos.py +++ b/application/service_sync/data_processing/update_dos.py @@ -227,7 +227,7 @@ def save_specified_opening_times_into_db( ) cursor.close() for specified_opening_times_day in specified_opening_times_changes: - logger.info(f"Saving specfied opening times for: {specified_opening_times_day}") + logger.info(f"Saving specified opening times for: {specified_opening_times_day}") cursor = query_dos_db( connection=connection, query=( @@ -244,7 +244,7 @@ def save_specified_opening_times_into_db( open_period: OpenPeriod # Type hint for the for loop for open_period in specified_opening_times_day.open_periods: logger.debug( - "Saving standard opening times period for dayid: " + "Saving specified opening times period for dayid: " f"{specified_opening_times_day.date}, period: {open_period}", ) cursor = query_dos_db( diff --git a/application/service_sync/service_sync.py b/application/service_sync/service_sync.py index 18c5e50ed..880603ad0 100644 --- a/application/service_sync/service_sync.py +++ b/application/service_sync/service_sync.py @@ -10,7 +10,7 @@ from boto3 import client from .data_processing.check_for_change import compare_nhs_uk_and_dos_data -from .data_processing.get_data import get_dos_service_and_history_one_query +from .data_processing.get_data import get_dos_service_and_history from .data_processing.update_dos import update_dos_data from .reject_pending_changes.pending_changes import check_and_remove_pending_dos_changes from common.middlewares import unhandled_exception_logging @@ -47,7 +47,7 @@ def lambda_handler(event: SQSEvent, context: LambdaContext) -> None: # noqa: AR change_event: dict[str, Any] = update_request["change_event"] nhs_entity = NHSEntity(change_event) # Get current DoS state - dos_service, service_histories = get_dos_service_and_history_one_query(service_id=int(service_id)) + dos_service, service_histories = get_dos_service_and_history(service_id=int(service_id)) # Compare NHS UK and DoS data changes_to_dos = compare_nhs_uk_and_dos_data( dos_service=dos_service, diff --git a/application/service_sync/tests/test_service_sync.py b/application/service_sync/tests/test_service_sync.py index 436b324e4..adfeb786c 100644 --- a/application/service_sync/tests/test_service_sync.py +++ b/application/service_sync/tests/test_service_sync.py @@ -45,7 +45,7 @@ @patch(f"{FILE_PATH}.remove_sqs_message_from_queue") @patch(f"{FILE_PATH}.update_dos_data") @patch(f"{FILE_PATH}.compare_nhs_uk_and_dos_data") -@patch(f"{FILE_PATH}.get_dos_service_and_history_one_query") +@patch(f"{FILE_PATH}.get_dos_service_and_history") @patch(f"{FILE_PATH}.NHSEntity") def test_lambda_handler( mock_nhs_entity: MagicMock, @@ -89,7 +89,7 @@ def test_lambda_handler( @patch(f"{FILE_PATH}.remove_sqs_message_from_queue") @patch(f"{FILE_PATH}.update_dos_data") @patch(f"{FILE_PATH}.compare_nhs_uk_and_dos_data") -@patch(f"{FILE_PATH}.get_dos_service_and_history_one_query") +@patch(f"{FILE_PATH}.get_dos_service_and_history") @patch(f"{FILE_PATH}.NHSEntity") def test_lambda_handler_exception( mock_nhs_entity: MagicMock, From 6e251408e29ef5db40d3fd815b2c25715c5bb708 Mon Sep 17 00:00:00 2001 From: Matthew Begley Date: Tue, 9 Jan 2024 14:42:32 +0000 Subject: [PATCH 10/10] Readme change to highlight broke targets --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 7d4050d0a..3d79b68da 100644 --- a/README.md +++ b/README.md @@ -168,11 +168,11 @@ Prerequisites (first setup only) To add an IP address to the IP allow lists, Ensure you're authenticated for access to AWS and run the following command. - make update-all-ip-allowlists + make update-all-ip-allowlists [WARNING: Not working the stack for this one needs fixing (likely SSO issue)] To add an IP address to the IP allow lists and deploy the allow list to environment run the following command.The `PROFILE` delineates which environment to update with the latest IP allow list. Set `ENVIRONMENT` if you are changing an environment not linked to your branch - make update-ip-allowlists-and-deploy-allowlist PROFILE=dev + make update-ip-allowlists-and-deploy-allowlist PROFILE=dev [WARNING: Not working the stack for this one needs fixing (likely SSO issue)] ### DoS Database Connection