def test_loading_new_data_into_existing_schema(app_with_db): fi = FileInfo.from_path( 'tests/fixtures/hmrc/exporters/exporters_2016_mock.zip') pipeline = HMRCExportersPipeline(app_with_db.dbi) pipeline.process(fi) fi.data.seek(0) fi2 = FileInfo(fi.name + '2', fi.data) pipeline.process(fi2) # check L1 (when loading new data, it should be appended (not re-creating all schema)) expected_rows = [ ( '2016-01-01 00:00:00', 'aaa limited', '01 street name street line london', 'BB11BB', ['12345670'], ), ( '2016-01-01 00:00:00', 'aaa limited', '01 street name street line london', 'BB11BB', ['12345670'], ), ( '2016-01-01 00:00:00', 'abc company ltd', '1 - 3 street avenue house name city name', 'AA11AA', ['12345678', '23456789'], ), ( '2016-01-01 00:00:00', 'abc company ltd', '1 - 3 street avenue house name city name', 'AA11AA', ['12345678', '23456789'], ), ( '2016-02-01 00:00:00', 'bbb limited', 'aaa house 2 street name city county', 'AA111AA', ['98765432', '12345678'], ), ( '2016-02-01 00:00:00', 'bbb limited', 'aaa house 2 street name city county', 'AA111AA', ['98765432', '12345678'], ), ] assert rows_equal_table(app_with_db.dbi, expected_rows, pipeline._l1_table, pipeline)
def test_raw_to_events(app_with_db): fi = FileInfo.from_path( 'tests/fixtures/hmrc/exporters/exporters_2016_mock.zip') pipeline = HMRCExportersPipeline(app_with_db.dbi) pipeline.process(fi) expected_rows = [ ( '2016-01-01 00:00:00', 'aaa limited', '01 street name street line london', 'BB11BB', ['12345670'], ), ( '2016-01-01 00:00:00', 'abc company ltd', '1 - 3 street avenue house name city name', 'AA11AA', ['12345678', '23456789'], ), ( '2016-02-01 00:00:00', 'bbb limited', 'aaa house 2 street name city county', 'AA111AA', ['98765432', '12345678'], ), ] assert rows_equal_table(app_with_db.dbi, expected_rows, pipeline._l1_table, pipeline)
def test_pipeline(self, app_with_db): pipeline = DITBACIPipeline(app_with_db.dbi) fi = FileInfo.from_path('tests/fixtures/dit/baci/baci.csv') pipeline.process(fi) # check L0 expected_rows = [(1995, 10519, 4, 251, 1.548, 0.051), (1995, 30110, 4, 381, 1.249, 0.01)] assert rows_equal_table(app_with_db.dbi, expected_rows, pipeline._l0_table, pipeline) assert rows_equal_table(app_with_db.dbi, expected_rows, pipeline._l1_table, pipeline)
def test_pipeline(self, add_world_bank_raw_tariff): pipeline = WorldBankTariffPipeline(self.dbi, force=True) fi = FileInfo.from_path(file_1) pipeline.process(fi) # check L0 expected_rows = [ (48, 1999, 201, 0, 'AHS', 5, 20), (262, 2005, 201, 380, 'BND', 40, 40), (266, 1998, 201, 0, 'AHS', 20, 20), ] assert rows_equal_table(self.dbi, expected_rows, pipeline._l0_table, pipeline) # check L1 pipeline = WorldBankTariffTransformPipeline(self.dbi, force=True) pipeline.process() expected_rows = [ (201, 12, 24, 2000, None, None, None, None, None, None, None, None), (201, 12, 24, 2001, None, None, None, None, None, None, None, None), ] assert rows_equal_table(self.dbi, expected_rows, pipeline._l1_table, pipeline, top_rows=2) # check second run with different raw tariff updates L1 add_world_bank_raw_tariff([ { 'reporter': 12, 'year': 2000, 'product': 201, 'partner': 24, 'duty_type': 'AHS', 'simple_average': 10, 'number_of_total_lines': 8, }, ]) pipeline.process() expected_rows = [ (201, 12, 24, 2000, 10, 10, None, None, None, None, None, None), (201, 12, 24, 2001, 10, None, None, None, None, None, None, None), ] assert rows_equal_table(self.dbi, expected_rows, pipeline._l1_table, pipeline, top_rows=2)
def _move_file_to_s3(file_url, organisation, dataset, delimiter, quote): bucket = app.config['s3']['bucket_url'] file_name = file_url.split('/')[-1] full_url = os.path.join(bucket, file_url) utf_8_byte_stream = CSVParser.get_csv_as_utf_8_byte_stream( full_url=full_url, delimiter=delimiter, quotechar=quote, ) file_info = FileInfo(file_url, utf_8_byte_stream) storage = StorageFactory.create(bucket) datasets_folder = app.config['s3']['datasets_folder'] target_file_url = f'{datasets_folder}/{organisation}/{dataset}/{file_name}' storage.write_file(target_file_url, file_info.data) file_info.data.seek(0) return file_info
def test_pipeline_happy_path(self, app_with_db): pipeline = WorldBankBoundRatesPipeline(app_with_db.dbi) fi = FileInfo.from_path('tests/fixtures/world_bank/bound_rates.csv') pipeline.process(fi) # check L0 expected_rows = [ ('H0', 4, 10111, 10, 2), ] assert rows_equal_table(app_with_db.dbi, expected_rows, pipeline._l0_table, pipeline) # check L1 expected_rows = [ (4, 10111, 10), ] assert rows_equal_table(app_with_db.dbi, expected_rows, pipeline._l1_table, pipeline)
def process(self, file_info, drop_source=True, **kwargs): file_regex = r'(.*?)/ONSPD_(?P<date>\w{3}_\d{4})\_UK.csv$' zf = zipfile.ZipFile(file_info.data, mode='r') members, file_found = zf.namelist(), False for file_name in members: file_match = re.match(file_regex, file_name) if file_match: file_found = True file = zf.open(file_name) datestr = file_match.group('date') break if not file_found: raise ValueError('No valid CSV found in zip file.') date = datetime.datetime.strptime(datestr, '%b_%Y').date() self.publication_date = datetime.datetime.strftime(date, '%Y-%m-%d') csv_file_info = FileInfo(file_info.name, file) super().process(csv_file_info, drop_source, **kwargs)
def test_pipeline_multiple_nomen_codes(self, app_with_db): pipeline = WorldBankBoundRatesPipeline(app_with_db.dbi) fi = FileInfo.from_path('tests/fixtures/world_bank/bound_rates_multiple_nomen_codes.csv') pipeline.process(fi) # check L0 expected_rows = [ ('H0', 4, 10111, 10, 2), ('H1', 4, 10111, 5, 2), ('H2', 4, 10111, 7, 2), ] assert rows_equal_table(app_with_db.dbi, expected_rows, pipeline._l0_table, pipeline) # check L1 has bound rate with highest nomen code expected_rows = [ (4, 10111, 7), ] assert rows_equal_table(app_with_db.dbi, expected_rows, pipeline._l1_table, pipeline)
def test_pipeline(self, app_with_db): pipeline = ComtradeCountryCodeAndISOPipeline(app_with_db.dbi) fi = FileInfo.from_path( 'tests/fixtures/comtrade/country_code_and_iso/country_list.csv') pipeline.process(fi) # check L0 expected_rows = [ (0, 'World', 'World', 'World', 'World', 'WL', 'WLD', '1962', '2061'), (4, 'Afghanistan', 'Afghanistan', 'Afghanistan', None, 'AF', 'AFG', '1962', '2061'), ( 899, 'Areas, nes', 'Areas, not elsewhere specified', 'Areas, nes', None, None, None, '1962', '2061', ), ( 918, 'European Union', 'European Union', 'European Union', None, 'EU', 'EUR', None, None, ), ] assert rows_equal_table(app_with_db.dbi, expected_rows, pipeline._l0_table, pipeline) # check L1 assert rows_equal_table(app_with_db.dbi, expected_rows, pipeline._l1_table, pipeline)
def partial_transform_data(self): pipeline = WorldBankTariffPipeline(self.dbi, force=True) fi = FileInfo.from_path(country_to_country_three_products) pipeline.process(fi) with mock.patch( 'app.etl.organisation.world_bank.WorldBankTariffTransformPipeline._get_products' ) as mock_get_products: mock_get_products.return_value = [['201']] with mock.patch( 'app.etl.organisation.world_bank.WorldBankTariffTransformPipeline' '.finish_processing') as mock_finish_processing: mock_finish_processing.return_value = None pipeline = WorldBankTariffTransformPipeline( self.dbi, force=False, continue_transform=True) pipeline.process() assert rows_equal_table(self.dbi, PRODUCT_201_ROWS, pipeline._l1_temp_table, pipeline) assert rows_equal_table(self.dbi, [], pipeline._l1_table, pipeline)
def test_one_datafile(self, app_with_db): pipeline = DITReferencePostcodesPipeline(app_with_db.dbi, force=False) fi = FileInfo.from_path(snapshot1) pipeline.process(fi) expected_rows = [ ( 'AB10 1AA', 'S12000033', 'Aberdeen City', 'S99999999', None, 'S99999999', None, 'S99999999', 'Scotland', '394251', '0806376', '2011-09-01', '2016-10-01', ), ( 'HU4 7SW', 'E06000011', 'East Riding of Yorkshire', 'E37000018', 'Humber', 'E37000039', 'York, North Yorkshire and East Riding', 'E12000003', 'Yorkshire and The Humber', '504860', '0429160', '1980-01-01', None, ), ] assert rows_equal_table(app_with_db.dbi, expected_rows, pipeline._l0_table, pipeline) assert rows_equal_table(app_with_db.dbi, expected_rows, pipeline._l1_table, pipeline)
def test_process_ch_accounts_datafile(app_with_db): # run tests fi = FileInfo.from_path('tests/fixtures/companies_house/accounts/test_datafile_1.zip') pipeline = CompaniesHouseAccountsPipeline(app_with_db.dbi, trigger_dataflow_dag=True) pipeline.process(fi) # check output of l0 expected_rows_l0 = [ ( 1, 'test_datafile_1.zip', 'Prod223_2798', '12345', datetime.date(2020, 3, 31), 'html', '', datetime.date(2020, 3, 31), datetime.date(2020, 3, 31), datetime.date(2020, 3, 31), '12345', 'BadCorp2 Limited', False, Decimal('1.0'), None, None, None, Decimal('96264.0'), None, None, Decimal('96264.0'), Decimal('96264.0'), None, None, None, Decimal('96264.0'), None, None, None, None, None, None, None, None, None, None, None, None, None, ), ( 2, 'test_datafile_1.zip', 'Prod223_2798', '12345', datetime.date(2020, 3, 31), 'html', '', datetime.date(2019, 3, 31), datetime.date(2019, 3, 31), datetime.date(2020, 3, 31), '12345', 'BadCorp2 Limited', False, Decimal('1.0'), None, None, None, Decimal('96264.0'), None, None, Decimal('96264.0'), Decimal('96264.0'), None, None, None, Decimal('96264.0'), None, None, None, None, None, None, None, None, None, None, None, None, None, ), ( 3, 'test_datafile_1.zip', 'Prod223_1859', '1234', datetime.date(2017, 2, 27), 'xml', 'http://www.xbrl.org/uk/fr/gaap/pt/2004-12-01', datetime.date(2016, 7, 31), datetime.date(2016, 7, 31), datetime.date(2016, 7, 31), '1234', 'BadCorp Limited', False, None, Decimal('251044.0'), Decimal('1840.0'), Decimal('29277.0'), Decimal('31117.0'), None, None, Decimal('16747.0'), Decimal('267791.0'), Decimal('267734.0'), Decimal('5700.0'), Decimal('18511.0'), Decimal('267734.0'), None, None, None, None, None, None, None, None, None, None, None, None, None, ), ( 4, 'test_datafile_1.zip', 'Prod223_1859', '1234', datetime.date(2017, 2, 27), 'xml', 'http://www.xbrl.org/uk/fr/gaap/pt/2004-12-01', datetime.date(2015, 7, 31), datetime.date(2015, 7, 31), datetime.date(2016, 7, 31), '1234', 'BadCorp Limited', False, None, Decimal('251160.0'), Decimal('2162.0'), Decimal('32909.0'), Decimal('35071.0'), None, None, Decimal('18604.0'), Decimal('269764.0'), Decimal('269707.0'), Decimal('5700.0'), Decimal('20484.0'), Decimal('269707.0'), None, None, None, None, None, None, None, None, None, None, None, None, None, ), ] rows_l0 = app_with_db.dbi.execute_query(f'SELECT * FROM {pipeline._l0_table} order by id') assert rows_l0 == expected_rows_l0
def test_pipeline(self, app_with_db): pipeline = DITEUCountryMembershipPipeline(app_with_db.dbi) fi = FileInfo.from_path( 'tests/fixtures/dit/eu_country_membership/eu_spine.csv') pipeline.process(fi) # check L0 expected_rows = [ ( 'Austria', 'AUT', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', ), ( 'Belgium', 'BEL', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', ), ( 'Bulgaria', 'BGR', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', 'EUN', ), ] assert rows_equal_table(app_with_db.dbi, expected_rows, pipeline._l0_table, pipeline) # check L1 expected_rows = [ ('Austria', 'AUT', 1958, None), ('Austria', 'AUT', 1959, None), ('Austria', 'AUT', 1960, None), ('Austria', 'AUT', 1961, None), ('Austria', 'AUT', 1962, None), ] assert rows_equal_table(app_with_db.dbi, expected_rows, pipeline._l1_table, pipeline, top_rows=5)
def test_spire_pipelines(self, app_with_db): pipeline_datafile_mapping = [ (SPIREBatchPipeline, 'tests/fixtures/dit/spire/batches.csv'), (SPIRECountryGroupPipeline, 'tests/fixtures/dit/spire/country_groups.csv'), (SPIREEndUserPipeline, 'tests/fixtures/dit/spire/end_users.csv'), (SPIREFootnotePipeline, 'tests/fixtures/dit/spire/footnotes.csv'), (SPIREMediaFootnotePipeline, 'tests/fixtures/dit/spire/media_footnotes.csv'), ( SPIREMediaFootnoteCountryPipeline, 'tests/fixtures/dit/spire/media_footnote_countries.csv', ), (SPIREOglTypePipeline, 'tests/fixtures/dit/spire/ogl_types.csv'), (SPIRERefArsSubjectPipeline, 'tests/fixtures/dit/spire/ref_ars_subjects.csv'), (SPIRERefCountryMappingPipeline, 'tests/fixtures/dit/spire/ref_country_mappings.csv'), ( SPIRERefDoNotReportValuePipeline, 'tests/fixtures/dit/spire/ref_do_not_report_values.csv', ), (SPIRERefReportRatingPipeline, 'tests/fixtures/dit/spire/ref_report_ratings.csv'), (SPIREApplicationPipeline, 'tests/fixtures/dit/spire/applications.csv'), (SPIRECountryGroupEntryPipeline, 'tests/fixtures/dit/spire/country_group_entries.csv'), ( SPIREMediaFootnoteDetailPipeline, 'tests/fixtures/dit/spire/media_footnote_details.csv', ), (SPIREReturnPipeline, 'tests/fixtures/dit/spire/returns.csv'), ( SPIREApplicationAmendmentPipeline, 'tests/fixtures/dit/spire/application_amendments.csv', ), (SPIREApplicationCountryPipeline, 'tests/fixtures/dit/spire/application_countries.csv'), (SPIREFootnoteEntryPipeline, 'tests/fixtures/dit/spire/footnote_entries.csv'), (SPIREGoodsIncidentPipeline, 'tests/fixtures/dit/spire/goods_incidents.csv'), (SPIREIncidentPipeline, 'tests/fixtures/dit/spire/incidents.csv'), (SPIREThirdPartyPipeline, 'tests/fixtures/dit/spire/third_parties.csv'), (SPIREUltimateEndUserPipeline, 'tests/fixtures/dit/spire/ultimate_end_users.csv'), (SPIREArsPipeline, 'tests/fixtures/dit/spire/ars.csv'), (SPIREControlEntryPipeline, 'tests/fixtures/dit/spire/control_entries.csv'), (SPIREReasonForRefusalPipeline, 'tests/fixtures/dit/spire/reasons_for_refusal.csv'), ] for pipeline, datafile in pipeline_datafile_mapping: fi = FileInfo.from_path(datafile) pipeline(app_with_db.dbi).process(fi) # check if table contains rows session = app_with_db.db.session rows = session.query(pipeline.sql_alchemy_model).all() assert len(rows) > 0
def test_new_data(self, app_with_db): pipeline = ONSPostcodeDirectoryPipeline(app_with_db.dbi, force=False, delete_previous=True) fi = FileInfo.from_path(file1) pipeline.process(fi) fi2 = FileInfo.from_path(file2) pipeline.process(fi2) # check L0 expected_rows = [ ( 'AB1 0AA', 'AB1 0AA', 'AB1 0AA', '198001', None, 'S99999999', 'S99999999', 'S12000033', 'S13002843', 'S99999999', '0', '385386', '0801193', '1', 'S08000020', 'S99999999', 'S92000003', 'S99999999', '0', 'S14000002', 'S15000001', 'S09000001', 'S22000047', 'S03000012', 'S31000935', '99ZZ00', 'S00001364', '01C30', 'S99999999', 'S01000011', 'S02000007', '6', '3C2', 'S00090303', 'S01006514', 'S02001237', 'S34002990', 'S03000012', 'S99999999', 'S99999999', '3', '1C3', '57.101474', '-2.242851', 'S99999999', 'S99999999', 'S23000009', '6808', 'S99999999', 'S99999999', ), ( 'AB1 0AA', 'AB1 0AA', 'AB1 0AA', '200001', '199606', 'S99999999', 'S99999999', 'S12000033', 'S13002843', 'S99999999', '0', '385386', '0801193', '1', 'S08000020', 'S99999999', 'S92000003', 'S99999999', '0', 'S14000002', 'S15000001', 'S09000001', 'S22000047', 'S03000012', 'S31000935', '99ZZ00', 'S00001364', '01C30', 'S99999999', 'S01000011', 'S02000007', '6', '3C2', 'S00090303', 'S01006514', 'S02001237', 'S34002990', 'S03000012', 'S99999999', 'S99999999', '3', '1C3', '57.101474', '-2.242851', 'S99999999', 'S99999999', 'S23000009', '6808', 'S99999999', 'S99999999', ), ( 'AB1 0AB', 'AB1 0AB', 'AB1 0AB', '198001', '199606', 'S99999999', 'S99999999', 'S12000033', 'S13002843', 'S99999999', '0', '385177', '0801314', '1', 'S08000020', 'S99999999', 'S92000003', 'S99999999', '0', 'S14000002', 'S15000001', 'S09000001', 'S22000047', 'S03000012', 'S31000935', '99ZZ00', 'S00001270', '01C31', 'S99999999', 'S01000011', 'S02000007', '6', '4B3', 'S00090303', 'S01006514', 'S02001237', 'S34002990', 'S03000012', 'S99999999', 'S99999999', '3', '1C3', '57.102554', '-2.246308', 'S99999999', 'S99999999', 'S23000009', '6808', 'S99999999', 'S99999999', ), ( 'AB1 0AB', 'AB1 0AB', 'AB1 0AB', '198001', '199606', 'S99999999', 'S99999999', 'S12000033', 'S13002843', 'S99999999', '0', '385177', '0801314', '1', 'S08000020', 'S99999999', 'S92000003', 'S99999999', '0', 'S14000002', 'S15000001', 'S09000001', 'S22000047', 'S03000012', 'S31000935', '99ZZ00', 'S00001270', '01C31', 'S99999999', 'S01000011', 'S02000007', '6', '4B3', 'S00090303', 'S01006514', 'S02001237', 'S34002990', 'S03000012', 'S99999999', 'S99999999', '3', '1C3', '57.102554', '-2.246308', 'S99999999', 'S99999999', 'S23000009', '6808', 'S99999999', 'S99999999', ), ( 'AB1 0AD', 'AB1 0AD', 'AB1 0AD', '198001', '199606', 'S99999999', 'S99999999', 'S12000033', 'S13002843', 'S99999999', '0', '385053', '0801092', '1', 'S08000020', 'S99999999', 'S92000003', 'S99999999', '0', 'S14000002', 'S15000001', 'S09000001', 'S22000047', 'S03000012', 'S31000935', '99ZZ00', 'S00001364', '01C30', 'S99999999', 'S01000011', 'S02000007', '6', '3C2', 'S00090399', 'S01006514', 'S02001237', 'S34003015', 'S03000012', 'S99999999', 'S99999999', '3', '6A1', '57.100556', '-2.248342', 'S99999999', 'S99999999', 'S23000009', '6808', 'S99999999', 'S99999999', ), ] assert rows_equal_table(app_with_db.dbi, expected_rows, pipeline._l0_table, pipeline) # check L1 expected_rows = [ ( 'AB1 0AA', 'AB1 0AA', 'AB1 0AA', '1980-01-01', None, 'S99999999', 'S99999999', 'S12000033', 'S13002843', 'S99999999', '0', '385386', '0801193', '1', 'S08000020', 'S99999999', 'S92000003', 'S99999999', '0', 'S14000002', 'S15000001', 'S09000001', 'S22000047', 'S03000012', 'S31000935', '99ZZ00', 'S00001364', '01C30', 'S99999999', 'S01000011', 'S02000007', '6', '3C2', 'S00090303', 'S01006514', 'S02001237', 'S34002990', 'S03000012', 'S99999999', 'S99999999', '3', '1C3', '57.101474', '-2.242851', 'S99999999', 'S99999999', 'S23000009', '6808', 'S99999999', 'S99999999', '2019-05-01', ), ( 'AB1 0AA', 'AB1 0AA', 'AB1 0AA', '2000-01-01', '1996-06-01', 'S99999999', 'S99999999', 'S12000033', 'S13002843', 'S99999999', '0', '385386', '0801193', '1', 'S08000020', 'S99999999', 'S92000003', 'S99999999', '0', 'S14000002', 'S15000001', 'S09000001', 'S22000047', 'S03000012', 'S31000935', '99ZZ00', 'S00001364', '01C30', 'S99999999', 'S01000011', 'S02000007', '6', '3C2', 'S00090303', 'S01006514', 'S02001237', 'S34002990', 'S03000012', 'S99999999', 'S99999999', '3', '1C3', '57.101474', '-2.242851', 'S99999999', 'S99999999', 'S23000009', '6808', 'S99999999', 'S99999999', '2019-07-01', ), ( 'AB1 0AB', 'AB1 0AB', 'AB1 0AB', '1980-01-01', '1996-06-01', 'S99999999', 'S99999999', 'S12000033', 'S13002843', 'S99999999', '0', '385177', '0801314', '1', 'S08000020', 'S99999999', 'S92000003', 'S99999999', '0', 'S14000002', 'S15000001', 'S09000001', 'S22000047', 'S03000012', 'S31000935', '99ZZ00', 'S00001270', '01C31', 'S99999999', 'S01000011', 'S02000007', '6', '4B3', 'S00090303', 'S01006514', 'S02001237', 'S34002990', 'S03000012', 'S99999999', 'S99999999', '3', '1C3', '57.102554', '-2.246308', 'S99999999', 'S99999999', 'S23000009', '6808', 'S99999999', 'S99999999', '2019-05-01', ), ( 'AB1 0AB', 'AB1 0AB', 'AB1 0AB', '1980-01-01', '1996-06-01', 'S99999999', 'S99999999', 'S12000033', 'S13002843', 'S99999999', '0', '385177', '0801314', '1', 'S08000020', 'S99999999', 'S92000003', 'S99999999', '0', 'S14000002', 'S15000001', 'S09000001', 'S22000047', 'S03000012', 'S31000935', '99ZZ00', 'S00001270', '01C31', 'S99999999', 'S01000011', 'S02000007', '6', '4B3', 'S00090303', 'S01006514', 'S02001237', 'S34002990', 'S03000012', 'S99999999', 'S99999999', '3', '1C3', '57.102554', '-2.246308', 'S99999999', 'S99999999', 'S23000009', '6808', 'S99999999', 'S99999999', '2019-07-01', ), ( 'AB1 0AD', 'AB1 0AD', 'AB1 0AD', '1980-01-01', '1996-06-01', 'S99999999', 'S99999999', 'S12000033', 'S13002843', 'S99999999', '0', '385053', '0801092', '1', 'S08000020', 'S99999999', 'S92000003', 'S99999999', '0', 'S14000002', 'S15000001', 'S09000001', 'S22000047', 'S03000012', 'S31000935', '99ZZ00', 'S00001364', '01C30', 'S99999999', 'S01000011', 'S02000007', '6', '3C2', 'S00090399', 'S01006514', 'S02001237', 'S34003015', 'S03000012', 'S99999999', 'S99999999', '3', '6A1', '57.100556', '-2.248342', 'S99999999', 'S99999999', 'S23000009', '6808', 'S99999999', 'S99999999', '2019-07-01', ), ] assert rows_equal_table(app_with_db.dbi, expected_rows, pipeline._l1_table, pipeline)