diff --git a/bin/migrate-oats-data/applications/app_prep.py b/bin/migrate-oats-data/applications/app_prep.py index 01d346eef9..39e216dbb0 100644 --- a/bin/migrate-oats-data/applications/app_prep.py +++ b/bin/migrate-oats-data/applications/app_prep.py @@ -10,7 +10,7 @@ AlcsApplicantType, ) from db import inject_conn_pool -from constants import BATCH_UPLOAD_SIZE +from common import BATCH_UPLOAD_SIZE from psycopg2.extras import execute_batch, RealDictCursor import traceback from enum import Enum @@ -288,7 +288,6 @@ def get_update_query_for_nar(): def get_update_query_for_exc(): - # TODO Will be finalized in ALCS-834. # exclsn_app_type_code is out of scope. It is a part of submission unique_fields = """, incl_excl_applicant_type = %(legislation_code)s""" @@ -296,7 +295,6 @@ def get_update_query_for_exc(): def get_update_query_for_inc(): - # TODO Will be finalized in ALCS-834. unique_fields = """, incl_excl_applicant_type = %(legislation_code)s""" return get_update_query(unique_fields) diff --git a/bin/migrate-oats-data/submissions/__init__.py b/bin/migrate-oats-data/applications/submissions/__init__.py similarity index 100% rename from bin/migrate-oats-data/submissions/__init__.py rename to bin/migrate-oats-data/applications/submissions/__init__.py diff --git a/bin/migrate-oats-data/submissions/app_submissions.py b/bin/migrate-oats-data/applications/submissions/app_submissions.py similarity index 88% rename from bin/migrate-oats-data/submissions/app_submissions.py rename to bin/migrate-oats-data/applications/submissions/app_submissions.py index 585206a424..e92f532281 100644 --- a/bin/migrate-oats-data/submissions/app_submissions.py +++ b/bin/migrate-oats-data/applications/submissions/app_submissions.py @@ -18,7 +18,7 @@ add_soil_field, ) from db import inject_conn_pool -from constants import BATCH_UPLOAD_SIZE +from common import BATCH_UPLOAD_SIZE from psycopg2.extras import execute_batch, RealDictCursor import traceback from enum import Enum @@ -26,6 +26,7 @@ etl_name = "alcs_app_sub" + @inject_conn_pool def process_alcs_app_submissions(conn=None, batch_size=BATCH_UPLOAD_SIZE): """ @@ -39,7 +40,7 @@ def process_alcs_app_submissions(conn=None, batch_size=BATCH_UPLOAD_SIZE): log_start(etl_name) with conn.cursor(cursor_factory=RealDictCursor) as cursor: with open( - "submissions/sql/app_submission_count.sql", + "applications/submissions/sql/app_submission_count.sql", "r", encoding="utf-8", ) as sql_file: @@ -53,7 +54,7 @@ def process_alcs_app_submissions(conn=None, batch_size=BATCH_UPLOAD_SIZE): last_submission_id = 0 with open( - "submissions/sql/app_submission.sql", + "applications/submissions/sql/app_submission.sql", "r", encoding="utf-8", ) as sql_file: @@ -68,14 +69,21 @@ def process_alcs_app_submissions(conn=None, batch_size=BATCH_UPLOAD_SIZE): if not rows: break try: - direction_data = get_direction_data(rows, cursor) subdiv_data = get_subdiv_data(rows, cursor) soil_data = get_soil_data(rows, cursor) submissions_to_be_inserted_count = len(rows) - insert_app_sub_records(conn, batch_size, cursor, rows, direction_data, subdiv_data, soil_data) + insert_app_sub_records( + conn, + batch_size, + cursor, + rows, + direction_data, + subdiv_data, + soil_data, + ) successful_inserts_count = ( successful_inserts_count + submissions_to_be_inserted_count @@ -99,7 +107,10 @@ def process_alcs_app_submissions(conn=None, batch_size=BATCH_UPLOAD_SIZE): print("Total failed inserts:", failed_inserts) log_end(etl_name) -def insert_app_sub_records(conn, batch_size, cursor, rows, direction_data, subdiv_data, soil_data): + +def insert_app_sub_records( + conn, batch_size, cursor, rows, direction_data, subdiv_data, soil_data +): """ Function to insert submission records in batches. @@ -110,7 +121,7 @@ def insert_app_sub_records(conn, batch_size, cursor, rows, direction_data, subdi rows (list): Rows of data to insert in the database. direction_data (dict): Dictionary of adjacent parcel data subdiv_data: dictionary of subdivision data lists - soil_data: dictonary of soil element data. + soil_data: dictionary of soil element data. Returns: None: Commits the changes to the database. @@ -140,13 +151,14 @@ def insert_app_sub_records(conn, batch_size, cursor, rows, direction_data, subdi if len(other_data_list) > 0: execute_batch( cursor, - get_insert_query("",""), + get_insert_query("", ""), other_data_list, page_size=batch_size, ) conn.commit() + def prepare_app_sub_data(app_sub_raw_data_list, direction_data, subdiv_data, soil_data): """ This function prepares different lists of data based on the 'alr_change_code' field of each data dict in 'app_sub_raw_data_list'. @@ -154,7 +166,7 @@ def prepare_app_sub_data(app_sub_raw_data_list, direction_data, subdiv_data, soi :param app_sub_raw_data_list: A list of raw data dictionaries. :param direction_data: A dictionary of adjacent parcel data. :param subdiv_data: dictionary of subdivision data lists. - :param soil_data: dictonary of soil element data. + :param soil_data: dictionary of soil element data. :return: Five lists, each containing dictionaries from 'app_sub_raw_data_list' and 'direction_data' grouped based on the 'alr_change_code' field Detailed Workflow: @@ -173,9 +185,9 @@ def prepare_app_sub_data(app_sub_raw_data_list, direction_data, subdiv_data, soi for row in app_sub_raw_data_list: data = dict(row) data = add_direction_field(data) - data = add_subdiv(data,json) + data = add_subdiv(data, json) data = add_soil_field(data) - if data['alr_appl_component_id'] in subdiv_data: + if data["alr_appl_component_id"] in subdiv_data: data = map_subdiv_lots(data, subdiv_data, json) if data["alr_application_id"] in direction_data: data = map_direction_values(data, direction_data) @@ -183,7 +195,10 @@ def prepare_app_sub_data(app_sub_raw_data_list, direction_data, subdiv_data, soi data = map_soil_data(data, soil_data) if data["alr_change_code"] == ALRChangeCode.NFU.value: nfu_data_list.append(data) - elif data["alr_change_code"] == ALRChangeCode.EXC.value or data["alr_change_code"] == ALRChangeCode.INC.value: + elif ( + data["alr_change_code"] == ALRChangeCode.EXC.value + or data["alr_change_code"] == ALRChangeCode.INC.value + ): inc_exc_data_list.append(data) else: other_data_list.append(data) @@ -191,7 +206,7 @@ def prepare_app_sub_data(app_sub_raw_data_list, direction_data, subdiv_data, soi return nfu_data_list, other_data_list, inc_exc_data_list -def get_insert_query(unique_fields,unique_values): +def get_insert_query(unique_fields, unique_values): # unique_fields takes input from called function and appends to query query = """ INSERT INTO alcs.application_submission ( @@ -233,6 +248,7 @@ def get_insert_query(unique_fields,unique_values): """ return query.format(unique_fields=unique_fields, unique_values=unique_values) + def get_insert_query_for_nfu(): unique_fields = """, nfu_hectares, nfu_will_import_fill, @@ -254,12 +270,14 @@ def get_insert_query_for_nfu(): %(fill_duration_unit)s, %(fill_area)s """ - return get_insert_query(unique_fields,unique_values) + return get_insert_query(unique_fields, unique_values) + def get_insert_query_for_inc_exc(): unique_fields = ", incl_excl_hectares" unique_values = ", %(alr_area)s" - return get_insert_query(unique_fields,unique_values) + return get_insert_query(unique_fields, unique_values) + def get_direction_data(rows, cursor): # runs query to get direction data and creates a dict based on alr_application_id @@ -267,17 +285,20 @@ def get_direction_data(rows, cursor): direction_data = create_direction_dict(adj_rows) return direction_data + def get_subdiv_data(rows, cursor): - # runs query to get subdivision data and creates a dictionaly based on alr_appl_component_id with a list of plots + # runs query to get subdivision data and creates a dictionary based on alr_appl_component_id with a list of plots subdiv_rows = get_subdiv_rows(rows, cursor) subdiv_data = create_subdiv_dict(subdiv_rows) return subdiv_data + def get_soil_data(rows, cursor): soil_rows = get_soil_rows(rows, cursor) soil_data = create_soil_dict(soil_rows) return soil_data + @inject_conn_pool def clean_application_submission(conn=None): print("Start application_submission cleaning") @@ -285,4 +306,4 @@ def clean_application_submission(conn=None): cursor.execute( "DELETE FROM alcs.application_submission a WHERE a.audit_created_by = 'oats_etl'" ) - print(f"Deleted items count = {cursor.rowcount}") \ No newline at end of file + print(f"Deleted items count = {cursor.rowcount}") diff --git a/bin/migrate-oats-data/submissions/sql/app_submission.sql b/bin/migrate-oats-data/applications/submissions/sql/app_submission.sql similarity index 100% rename from bin/migrate-oats-data/submissions/sql/app_submission.sql rename to bin/migrate-oats-data/applications/submissions/sql/app_submission.sql diff --git a/bin/migrate-oats-data/submissions/sql/app_submission_count.sql b/bin/migrate-oats-data/applications/submissions/sql/app_submission_count.sql similarity index 100% rename from bin/migrate-oats-data/submissions/sql/app_submission_count.sql rename to bin/migrate-oats-data/applications/submissions/sql/app_submission_count.sql diff --git a/bin/migrate-oats-data/submissions/submap/__init__.py b/bin/migrate-oats-data/applications/submissions/submap/__init__.py similarity index 100% rename from bin/migrate-oats-data/submissions/submap/__init__.py rename to bin/migrate-oats-data/applications/submissions/submap/__init__.py diff --git a/bin/migrate-oats-data/submissions/submap/direction_mapping.py b/bin/migrate-oats-data/applications/submissions/submap/direction_mapping.py similarity index 100% rename from bin/migrate-oats-data/submissions/submap/direction_mapping.py rename to bin/migrate-oats-data/applications/submissions/submap/direction_mapping.py diff --git a/bin/migrate-oats-data/submissions/submap/soil_elements.py b/bin/migrate-oats-data/applications/submissions/submap/soil_elements.py similarity index 100% rename from bin/migrate-oats-data/submissions/submap/soil_elements.py rename to bin/migrate-oats-data/applications/submissions/submap/soil_elements.py diff --git a/bin/migrate-oats-data/submissions/submap/subdiv_plot.py b/bin/migrate-oats-data/applications/submissions/submap/subdiv_plot.py similarity index 100% rename from bin/migrate-oats-data/submissions/submap/subdiv_plot.py rename to bin/migrate-oats-data/applications/submissions/submap/subdiv_plot.py diff --git a/bin/migrate-oats-data/common/constants.py b/bin/migrate-oats-data/common/constants.py index 46497ef9c6..f93f7b8a79 100644 --- a/bin/migrate-oats-data/common/constants.py +++ b/bin/migrate-oats-data/common/constants.py @@ -1 +1,2 @@ OATS_ETL_USER = "oats_etl" +BATCH_UPLOAD_SIZE = 1000 diff --git a/bin/migrate-oats-data/common/etl_logger.py b/bin/migrate-oats-data/common/etl_logger.py index cfcd8e596b..17a2d1f323 100644 --- a/bin/migrate-oats-data/common/etl_logger.py +++ b/bin/migrate-oats-data/common/etl_logger.py @@ -4,9 +4,7 @@ etl_log_file_name = "etl_log.txt" -def log_start( - etl_name="Not specified", -): +def log_start(etl_name="Not specified", etl_log_file_name=etl_log_file_name): data = { "etl_name": etl_name, "start_time": datetime.now().isoformat(), @@ -21,6 +19,7 @@ def log_end( etl_name="Not specified", error_msg=None, error_log=None, + etl_log_file_name=etl_log_file_name, ): data = { "etl_name": etl_name, diff --git a/bin/migrate-oats-data/constants.py b/bin/migrate-oats-data/constants.py deleted file mode 100644 index a1642a815d..0000000000 --- a/bin/migrate-oats-data/constants.py +++ /dev/null @@ -1 +0,0 @@ -BATCH_UPLOAD_SIZE = 1000 diff --git a/bin/migrate-oats-data/migrate.py b/bin/migrate-oats-data/migrate.py index 12407d9042..0a7b88ae44 100644 --- a/bin/migrate-oats-data/migrate.py +++ b/bin/migrate-oats-data/migrate.py @@ -16,15 +16,15 @@ clean_applications, process_alcs_application_prep_fields, ) -from noi import ( +from noi.notice_of_intent_init import ( process_nois, clean_nois, ) -from submissions import ( +from applications.submissions import ( process_alcs_app_submissions, clean_application_submission, ) -from constants import BATCH_UPLOAD_SIZE +from common import BATCH_UPLOAD_SIZE from applications.application_submission_status_email import ( process_application_submission_status_emails, @@ -34,6 +34,9 @@ process_notice_of_intent_submission_status_emails, clean_application_submission_status_emails, ) +from noi.oats_to_alcs_notice_of_intent_table_etl import ( + process_alcs_notice_of_intent_fee_fields, +) import_batch_size = BATCH_UPLOAD_SIZE @@ -228,11 +231,12 @@ def setup_menu_args_parser(import_batch_size): console.log("Processing NOIs:") process_nois(batch_size=import_batch_size) - console.log("Processing documents:") - process_documents(batch_size=import_batch_size) + # TODO Liam question which process_documents_noi or process_noi_documents is the correct one to keep? + # console.log("Processing NOI specific documents:") + # process_documents_noi(batch_size=import_batch_size) - console.log("Processing NOI specific documents:") - process_documents_noi(batch_size=import_batch_size) + # console.log("Processing documents:") + # process_documents(batch_size=import_batch_size) console.log("Processing application documents:") process_application_documents(batch_size=import_batch_size) @@ -246,6 +250,11 @@ def setup_menu_args_parser(import_batch_size): console.log("Processing application submission:") process_alcs_app_submissions(batch_size=import_batch_size) + console.log("Processing notice of intent fields") + process_alcs_notice_of_intent_fee_fields( + batch_size=import_batch_size + ) + # NOTE: both process_application_submission_status_emails(), process_notice_of_intent_submission_status_emails() # must be the last ones in the migrate etl console.log("Processing submission status emails") @@ -264,7 +273,7 @@ def setup_menu_args_parser(import_batch_size): clean_applications() clean_nois() clean_notice_of_intent_submission_status_emails(), - clean_notice_of_intent_submission_status_emails(), + clean_application_submission_status_emails(), console.log("Done") case "document-import": @@ -318,6 +327,9 @@ def setup_menu_args_parser(import_batch_size): ) process_nois(batch_size=import_batch_size) + process_alcs_notice_of_intent_fee_fields( + batch_size=import_batch_size + ) case "application-import": console.log("Beginning OATS -> ALCS application import process") with console.status( diff --git a/bin/migrate-oats-data/noi/__init__.py b/bin/migrate-oats-data/noi/__init__.py index 57d7c89a17..4b8338fdd6 100644 --- a/bin/migrate-oats-data/noi/__init__.py +++ b/bin/migrate-oats-data/noi/__init__.py @@ -1 +1 @@ -from .noi import * \ No newline at end of file +from .notice_of_intent_init import * \ No newline at end of file diff --git a/bin/migrate-oats-data/noi/noi.py b/bin/migrate-oats-data/noi/notice_of_intent_init.py similarity index 100% rename from bin/migrate-oats-data/noi/noi.py rename to bin/migrate-oats-data/noi/notice_of_intent_init.py diff --git a/bin/migrate-oats-data/noi/oats_to_alcs_notice_of_intent_table_etl/__init__.py b/bin/migrate-oats-data/noi/oats_to_alcs_notice_of_intent_table_etl/__init__.py new file mode 100644 index 0000000000..4b40e6e129 --- /dev/null +++ b/bin/migrate-oats-data/noi/oats_to_alcs_notice_of_intent_table_etl/__init__.py @@ -0,0 +1 @@ +from .oats_to_alcs_notice_of_intent_table_etl import process_alcs_notice_of_intent_fee_fields diff --git a/bin/migrate-oats-data/noi/oats_to_alcs_notice_of_intent_table_etl/oats_to_alcs_notice_of_intent_table_etl.py b/bin/migrate-oats-data/noi/oats_to_alcs_notice_of_intent_table_etl/oats_to_alcs_notice_of_intent_table_etl.py new file mode 100644 index 0000000000..3898d44659 --- /dev/null +++ b/bin/migrate-oats-data/noi/oats_to_alcs_notice_of_intent_table_etl/oats_to_alcs_notice_of_intent_table_etl.py @@ -0,0 +1,144 @@ +from db import inject_conn_pool +from common import BATCH_UPLOAD_SIZE, log_end, log_start +from psycopg2.extras import execute_batch, RealDictCursor +import traceback +from common import ( + AlcsAgCapSource, + log_end, + log_start, + AlcsAgCap, +) +from enum import Enum + + +class OatsToAlcsAgCapSource(Enum): + BCLI = AlcsAgCapSource.BCLI.value + CLI = AlcsAgCapSource.CLI.value + ONSI = AlcsAgCapSource.On_site.value + + +class OatsToAlcsAgCap(Enum): + P = AlcsAgCap.Prime.value + PD = AlcsAgCap.Prime_Dominant.value + MIX = AlcsAgCap.Mixed_Prime_Secondary.value + S = AlcsAgCap.Secondary.value + U = AlcsAgCap.Unclassified.value + + +@inject_conn_pool +def process_alcs_notice_of_intent_fee_fields(conn=None, batch_size=BATCH_UPLOAD_SIZE): + """ + decision_date is imported separately + """ + + etl_name = "process_alcs_notice_of_intent_base_fields" + log_start(etl_name, etl_name) + with conn.cursor(cursor_factory=RealDictCursor) as cursor: + with open( + "noi/sql/notice_of_intent_base/notice_of_intent_base.count.sql", + "r", + encoding="utf-8", + ) as sql_file: + count_query = sql_file.read() + cursor.execute(count_query) + count_total = dict(cursor.fetchone())["count"] + print("- Total Notice of Intents data to update: ", count_total) + + failed_inserts = 0 + successful_updates_count = 0 + last_application_id = 0 + + with open( + "noi/sql/notice_of_intent_base/notice_of_intent_base.sql", + "r", + encoding="utf-8", + ) as sql_file: + application_sql = sql_file.read() + while True: + cursor.execute( + f"{application_sql} WHERE oaa.alr_application_id > {last_application_id} ORDER BY oaa.alr_application_id;" + ) + + rows = cursor.fetchmany(batch_size) + + if not rows: + break + try: + records_to_be_updated_count = len(rows) + + _update_fee_fields_records(conn, batch_size, cursor, rows) + + successful_updates_count = ( + successful_updates_count + records_to_be_updated_count + ) + last_application_id = dict(rows[-1])["alr_application_id"] + + print( + f"retrieved/updated items count: {records_to_be_updated_count}; total successfully updated applications so far {successful_updates_count}; last updated application_id: {last_application_id}" + ) + except Exception as error: + # this is NOT going to be caused by actual data update failure. This code is only executed when the code error appears or connection to DB is lost + conn.rollback() + error_str = "".join( + traceback.format_exception(None, error, error.__traceback__) + ) + print(error_str) + log_end(etl_name, str(error), error_str) + failed_inserts = count_total - successful_updates_count + last_application_id = last_application_id + 1 + + print("Total amount of successful updates:", successful_updates_count) + print("Total failed updates:", failed_inserts) + log_end(etl_name, etl_name) + + +def _update_fee_fields_records(conn, batch_size, cursor, rows): + query = _get_update_query_from_oats_alr_applications_fields() + parsed_fee_data_list = _prepare_oats_alr_applications_data(rows) + + if len(parsed_fee_data_list) > 0: + execute_batch(cursor, query, parsed_fee_data_list, page_size=batch_size) + + conn.commit() + + +def _get_update_query_from_oats_alr_applications_fields(): + query = """ + UPDATE alcs.notice_of_intent + SET fee_paid_date = %(fee_received_date)s, + fee_waived = %(fee_waived_ind)s, + fee_amount = %(applied_fee_amt)s, + fee_split_with_lg = %(split_fee_with_local_gov_ind)s, + date_submitted_to_alc = %(submitted_to_alc_date)s, + staff_observations = %(staff_comment_observations)s, + alr_area = %(component_area)s, + ag_cap_source = %(capability_source_code)s, + ag_cap_map = %(agri_cap_map)s, + ag_cap_consultant = %(agri_cap_consultant)s, + ag_cap = %(agri_capability_code)s, + source = 'APPLICANT' + WHERE + alcs.notice_of_intent.file_number = %(alr_application_id)s::TEXT; + """ + return query + + +def _prepare_oats_alr_applications_data(row_data_list): + data_list = [] + for row in row_data_list: + data = dict(row) + data = map_basic_field(data) + data_list.append(data) + return data_list + + +def map_basic_field(data): + if data["capability_source_code"]: + data["capability_source_code"] = str( + OatsToAlcsAgCapSource[data["capability_source_code"]].value + ) + if data["agri_capability_code"]: + data["agri_capability_code"] = str( + OatsToAlcsAgCap[data["agri_capability_code"]].value + ) + return data diff --git a/bin/migrate-oats-data/noi/sql/insert_noi.sql b/bin/migrate-oats-data/noi/sql/insert_noi.sql index 48bba19aae..17cace2206 100644 --- a/bin/migrate-oats-data/noi/sql/insert_noi.sql +++ b/bin/migrate-oats-data/noi/sql/insert_noi.sql @@ -13,8 +13,8 @@ WITH HAVING count(oaac.alr_application_id) < 2 ), - -- Step 2: get applicant - applicant_lookup AS ( + -- Step 2: get noi + noi_lookup AS ( SELECT DISTINCT oaap.alr_application_id AS application_id, string_agg (DISTINCT oo.organization_name, ', ') FILTER ( @@ -145,8 +145,8 @@ WITH SELECT ng.noi_application_id :: text AS file_number, CASE - WHEN applicant_lookup.orgs IS NOT NULL THEN applicant_lookup.orgs - WHEN applicant_lookup.persons IS NOT NULL THEN applicant_lookup.persons + WHEN noi_lookup.orgs IS NOT NULL THEN noi_lookup.orgs + WHEN noi_lookup.persons IS NOT NULL THEN noi_lookup.persons ELSE 'Unknown' END AS applicant, ar.code AS region_code, @@ -163,7 +163,7 @@ SELECT END AS type_code FROM noi_grouped AS ng - LEFT JOIN applicant_lookup ON ng.noi_application_id = applicant_lookup.application_id + LEFT JOIN noi_lookup ON ng.noi_application_id = noi_lookup.application_id LEFT JOIN panel_lookup ON ng.noi_application_id = panel_lookup.application_id LEFT JOIN alcs.application_region ar ON panel_lookup.panel_region = ar."label" LEFT JOIN alcs_gov ON ng.noi_application_id = alcs_gov.application_id diff --git a/bin/migrate-oats-data/noi/sql/notice_of_intent_base/notice_of_intent_base.count.sql b/bin/migrate-oats-data/noi/sql/notice_of_intent_base/notice_of_intent_base.count.sql new file mode 100644 index 0000000000..f294bef008 --- /dev/null +++ b/bin/migrate-oats-data/noi/sql/notice_of_intent_base/notice_of_intent_base.count.sql @@ -0,0 +1,12 @@ +WITH nois_with_one_or_zero_component_only AS ( + SELECT oaac.alr_application_id + FROM oats.oats_alr_appl_components oaac + GROUP BY oaac.alr_application_id + HAVING count(oaac.alr_application_id) < 2 +) +SELECT count(*) +FROM alcs.notice_of_intent noi + JOIN nois_with_one_or_zero_component_only oats_noi ON oats_noi.alr_application_id::TEXT = noi.file_number + JOIN oats.oats_alr_applications oaa ON oaa.alr_application_id = oats_noi.alr_application_id + AND oaa.application_class_code = 'NOI' + JOIN oats.oats_alr_appl_components oaac ON oaac.alr_application_id = oats_noi.alr_application_id; \ No newline at end of file diff --git a/bin/migrate-oats-data/noi/sql/notice_of_intent_base/notice_of_intent_base.sql b/bin/migrate-oats-data/noi/sql/notice_of_intent_base/notice_of_intent_base.sql new file mode 100644 index 0000000000..b1d10ad2f7 --- /dev/null +++ b/bin/migrate-oats-data/noi/sql/notice_of_intent_base/notice_of_intent_base.sql @@ -0,0 +1,25 @@ +WITH nois_with_one_or_zero_component_only AS ( + SELECT oaac.alr_application_id + FROM oats.oats_alr_appl_components oaac + GROUP BY oaac.alr_application_id + HAVING count(oaac.alr_application_id) < 2 +) +SELECT oaa.fee_waived_ind, + oaa.split_fee_with_local_gov_ind, + oaa.applied_fee_amt, + oaa.fee_received_date, + oaa.alr_application_id, + oaa.submitted_to_alc_date, + oaa.staff_comment_observations, + oaac.alr_change_code, + oaac.alr_appl_component_id, + oaac.component_area, + oaac.capability_source_code, + oaac.agri_cap_map, + oaac.agri_cap_consultant, + oaac.agri_capability_code +FROM alcs.notice_of_intent noi + JOIN nois_with_one_or_zero_component_only oats_noi ON oats_noi.alr_application_id::TEXT = noi.file_number + JOIN oats.oats_alr_applications oaa ON oaa.alr_application_id = oats_noi.alr_application_id + AND oaa.application_class_code = 'NOI' + JOIN oats.oats_alr_appl_components oaac ON oaac.alr_application_id = oats_noi.alr_application_id \ No newline at end of file diff --git a/bin/migrate-oats-data/noi/sql/notice_of_intent_base/notice_of_intent_base_validation.sql b/bin/migrate-oats-data/noi/sql/notice_of_intent_base/notice_of_intent_base_validation.sql new file mode 100644 index 0000000000..fe2e8b4887 --- /dev/null +++ b/bin/migrate-oats-data/noi/sql/notice_of_intent_base/notice_of_intent_base_validation.sql @@ -0,0 +1,54 @@ +-- this script selects difference between fields that do not require mapping +WITH nois_with_one_or_zero_component_only AS ( + SELECT oaac.alr_application_id + FROM oats.oats_alr_appl_components oaac + GROUP BY oaac.alr_application_id + HAVING count(oaac.alr_application_id) < 2 +), +oats_noi_data AS ( + SELECT oaa.alr_application_id, + oaac.agri_capability_code, + oaac.agri_cap_map, + oaac.agri_cap_consultant, + oaac.component_area, + oaac.capability_source_code, + oaa.staff_comment_observations, + oaac.alr_change_code, + split_fee_with_local_gov_ind AS fee_lg, + fee_received_date AS fee_date, + fee_waived_ind AS fee_waived, + applied_fee_amt AS fee_amount + FROM alcs.notice_of_intent noi + JOIN nois_with_one_or_zero_component_only oats_noi ON oats_noi.alr_application_id::TEXT = noi.file_number + JOIN oats.oats_alr_applications oaa ON oaa.alr_application_id = oats_noi.alr_application_id + AND oaa.application_class_code = 'NOI' + JOIN oats.oats_alr_appl_components oaac ON oaac.alr_application_id = oats_noi.alr_application_id +) +SELECT oats_noi.alr_application_id, + noi.alr_area, + noi.ag_cap, + noi.ag_cap_source, + noi.ag_cap_map, + noi.ag_cap_consultant, + staff_observations, + oats_noi.component_area, + oats_noi.agri_capability_code, + oats_noi.capability_source_code, + oats_noi.agri_cap_map, + oats_noi.agri_cap_consultant, + oats_noi.staff_comment_observations, + oats_noi.fee_date, + oats_noi.fee_amount, + noi.fee_split_with_lg, + oats_noi.fee_lg, + oats_noi.fee_waived, + noi.fee_waived +FROM alcs.notice_of_intent noi + LEFT JOIN oats_noi_data AS oats_noi ON noi.file_number = oats_noi.alr_application_id::TEXT +WHERE noi.alr_area != oats_noi.component_area + OR noi.ag_cap_map != oats_noi.agri_cap_map + OR noi.ag_cap_consultant != oats_noi.agri_cap_consultant + OR noi.staff_observations != oats_noi.staff_comment_observations + OR noi.fee_amount != oats_noi.fee_amount + OR noi.fee_split_with_lg::bool != oats_noi.fee_lg::bool + OR noi.fee_waived::bool != oats_noi.fee_waived::bool \ No newline at end of file diff --git a/portal-frontend/src/app/features/applications/edit-submission/parcel-details/parcel-entry/parcel-entry.component.html b/portal-frontend/src/app/features/applications/edit-submission/parcel-details/parcel-entry/parcel-entry.component.html index 4048cca05d..fa44c72e36 100644 --- a/portal-frontend/src/app/features/applications/edit-submission/parcel-details/parcel-entry/parcel-entry.component.html +++ b/portal-frontend/src/app/features/applications/edit-submission/parcel-details/parcel-entry/parcel-entry.component.html @@ -185,7 +185,7 @@
Parcel Lookup
- Visit BC Land Title & Survey to obtain + Visit BC Land Title & Survey to obtain a recent copy (not older than 1 year) of the Certificate of Title
Parcel Lookup
Visit - BC Land Title & Survey + BC Land Title & Survey to obtain a recent copy (not older than 1 year) of the Certificate of Title