def delete_stats(): ''' Deletes table ''' with StatsDBConnection() as connector: extract_stats = connector.get_table(Constants.EXTRACT_STATS) connector.execute(extract_stats.delete())
def validate_edware_stats_table_before_mig(self): with StatsDBConnection() as conn: table = conn.get_table('udl_stats') query = select([table.c.load_status]) result = conn.execute(query).fetchall() expected_result = [('udl.ingested',), ('udl.ingested',)] self.assertEquals(result, expected_result)
def get_batches_to_migrate(tenant=None): """ This function returns daily batches to be migrated for a given tenant :param tenant: The tenant to be migrated :type tenant: str :returns: A list mapping batch_guid to the row in udl_stats table An empty list if no batches found to be migrated """ logger.info('Master: Getting daily delta batches to migrate' + ('with tenant: ' + tenant) if tenant else '') with StatsDBConnection() as connector: udl_status_table = connector.get_table(UdlStatsConstants.UDL_STATS) query = \ select([udl_status_table.c.rec_id, udl_status_table.c.batch_guid, udl_status_table.c.tenant, udl_status_table.c.load_type, udl_status_table.c.load_status, udl_status_table.c.batch_operation, udl_status_table.c.snapshot_criteria], from_obj=[udl_status_table]).\ where(udl_status_table.c.load_status == UdlStatsConstants.UDL_STATUS_INGESTED).\ order_by(udl_status_table.c.file_arrived) if tenant: query = query.where(and_(udl_status_table.c.tenant == tenant)) batches = connector.get_result(query) return batches
def empty_table(self): #Delete all data from batch_table with get_udl_connection() as connector: batch_table = connector.get_table(Constants.UDL2_BATCH_TABLE) result = connector.execute(batch_table.delete()) query = select([batch_table]) result1 = connector.execute(query).fetchall() number_of_row = len(result1) self.assertEqual(number_of_row, 0) #Delete all data from err_list err_list_table = connector.get_table('err_list') delete_data = connector.execute(err_list_table.delete()) query_table = select([err_list_table]) query_result = connector.execute(query_table).fetchall() number_of_row = len(query_result) self.assertEqual(number_of_row, 0) #Delete all data from udl_stats table with StatsDBConnection() as conn: table = conn.get_table('udl_stats') conn.execute(table.delete()) query = select([table]) query_tab = conn.execute(query).fetchall() no_rows = len(query_tab)
def validate_udl_stats(self): with StatsDBConnection() as conn: table = conn.get_table('udl_stats') query = select([table.c.load_status]) result = conn.execute(query).fetchall() expected_result = [('migrate.ingested', ), ('migrate.failed', )] # Validate that one migration batch is failed and one is successful. self.assertEquals(result, expected_result) # Validate that schema clean up is successful for both the scenario: Migration.ingested and migration.failed schema_name = select([table.c.batch_guid]) schema_name_result = conn.execute(schema_name).fetchall() tpl_schema_name_1 = schema_name_result[0] tpl_schema_name_2 = schema_name_result[1] schema_name_1 = tpl_schema_name_1[0] schema_name_2 = tpl_schema_name_2[0] with get_target_connection(self.tenant) as connector: query = select(['schema_name'], from_obj=['information_schema.schemata' ]).where('schema_name in :a') params = [bindparam('a', (schema_name_1, schema_name_2))] new_query = text(str(query), bindparams=params) result = connector.execute(new_query).fetchall() self.assertEqual( len(result), 0, "Schema clean up after migration is unsuccessful")
def validate_stats_table_after_mig(self): ''' validate udl stats table after migration for 30 row having status migrate.ingested''' with StatsDBConnection() as conn: table = conn.get_table('udl_stats') query = select([table]).where(table.c.load_status == 'migrate.ingested') result = conn.execute(query).fetchall() self.assertEquals(len(result), self.expected_unique_batch_guids)
def validate_udl_stats_after_mig(self): with StatsDBConnection() as conn: table = conn.get_table('udl_stats') query = select([table.c.load_status]) result = conn.execute(query).fetchall() expected_result = [('migrate.ingested',)] self.assertEquals(result, expected_result) print("Migration is complete")
def empty_stats_table(self): #Delete all data from udl_stats table inside edware_stats DB with StatsDBConnection() as conn: table = conn.get_table('udl_stats') conn.execute(table.delete()) query = select([table]) query_tab = conn.execute(query).fetchall() no_rows = len(query_tab) self.assertEqual(no_rows, 0)
def validate_callback(self, status): with StatsDBConnection() as conn: udl_stats = conn.get_table('udl_stats') query = select([udl_stats.c.notification_status]) query = query.where(udl_stats.c.notification is not None) results = conn.get_result(query) self.assertEqual(len(results), 1) actual_status = json.loads(results[0]['notification_status']) self.assertEqual(actual_status['call_back']['status'], status)
def get_stats_table_has_migrated_ingested_status(tenant): ''' Check udl_stats that there is tenant of cat is migrate.ingested ''' with StatsDBConnection() as conn: udl_stats = conn.get_table(UdlStatsConstants.UDL_STATS) query = select([udl_stats.c.load_status], from_obj=([udl_stats])) query = query.where(and_(udl_stats.c.tenant == tenant)) return conn.get_result(query)
def testUpdate_ed_stats_for_precached(self): update_ed_stats_for_precached('9000') with StatsDBConnection() as connector: udl_stats = connector.get_table('udl_stats') query = select([udl_stats.c.last_pre_cached.label('last_pre_cached'), ], from_obj=[udl_stats]) query = query.where(and_(udl_stats.c.rec_id == '9000')) results = connector.get_result(query) self.assertIsNotNone(results[0]['last_pre_cached'])
def test_update_record_table(self): with StatsDBConnection() as conn: table = conn.get_table('extract_stats') query = table.insert({'request_guid': 'b', 'status': 'C'}) conn.execute(query) update_records_in_table(StatsDBConnection, 'extract_stats', {'status': 'D'}, {'request_guid': 'b'}) query = select([table.c.status], from_obj=[table]) results = conn.get_result(query) self.assertEqual(results[0]['status'], 'D')
def test_insert_to_table(self): insert_to_table(StatsDBConnection, 'extract_stats', { 'request_guid': 'a', 'status': 'C' }) with StatsDBConnection() as conn: table = conn.get_table('extract_stats') query = select([table.c.request_guid], from_obj=[table]) results = conn.get_result(query) self.assertEqual(results[0]['request_guid'], 'a')
def validate_udl_stats(self, guid_batch_id): with StatsDBConnection() as conn: stats_table = conn.get_table('udl_stats') stats_record = select([ stats_table.c.load_status ]).where(stats_table.c.batch_guid == guid_batch_id) stats_result = conn.execute(stats_record).fetchall() expected_result = [('udl.failed', )] self.assertEquals( stats_result, expected_result, "Error has not been logged into udl_stats table")
def update_ed_stats_for_prepdf(rec_id): ''' Sets current timestamp to the `last_pdf_generated` field for row with specified `rec_id` :param rec_id: record id ''' with StatsDBConnection() as connector: udl_stats = connector.get_table(UdlStatsConstants.UDL_STATS) connector.execute( udl_stats.update(values={ udl_stats.c.last_pdf_task_requested: func.now() }).where(udl_stats.c.rec_id == rec_id))
def update_ed_stats_for_precached(rec_id): ''' update current timestamp to last_pre_cached field :param string tenant: name of the tenant :param string state_code: stateCode of the state ''' with StatsDBConnection() as connector: udl_stats = connector.get_table(UdlStatsConstants.UDL_STATS) stmt = udl_stats.update(values={ udl_stats.c.last_pre_cached: func.now() }).where(udl_stats.c.rec_id == rec_id) connector.execute(stmt)
def test_delete(self): with StatsDBConnection() as connector: extract_stats = connector.get_table(Constants.EXTRACT_STATS) # Insert test data stmt = extract_stats.insert({ Constants.REQUEST_GUID: 'abc', Constants.STATUS: 'status' }) connector.execute(stmt) query = select([extract_stats.c.status.label(Constants.STATUS)], from_obj=[extract_stats]) results = connector.get_result(query) self.assertGreater(len(results), 1) delete_stats() results = connector.get_result(query) self.assertEqual(len(results), 0)
def insert_extract_stats(*dict_values): ''' Insert into extract status table :params dict values: one or more dictionary of values to insert ex. dict_values = {Constants.TENANT, "tenantName": Constants.STATE_CODE: "EX"}, {Constants.OTHER: "test"} ''' values = {Constants.TIMESTAMP: datetime.utcnow()} for d in dict_values: values = merge_dict(d, values) with StatsDBConnection() as connector: extract_stats = connector.get_table(Constants.EXTRACT_STATS) stmt = extract_stats.insert(values) connector.execute(stmt)
def prepare_ed_stats(): ''' Get stats data to determine data that has not been cached ''' with StatsDBConnection() as connector: udl_stats = connector.get_table(UdlStatsConstants.UDL_STATS) query = select([udl_stats.c.rec_id.label(UdlStatsConstants.REC_ID), udl_stats.c.tenant.label(UdlStatsConstants.TENANT), udl_stats.c.load_start.label(UdlStatsConstants.LOAD_START), udl_stats.c.load_end.label(UdlStatsConstants.LOAD_END), udl_stats.c.record_loaded_count.label(UdlStatsConstants.RECORD_LOADED_COUNT), udl_stats.c.batch_guid.label(UdlStatsConstants.BATCH_GUID), ], from_obj=[udl_stats]).\ where(and_(udl_stats.c.load_status == UdlStatsConstants.MIGRATE_INGESTED, udl_stats.c.last_pre_cached == null(), udl_stats.c.load_type == LoadType.ASSESSMENT)) return connector.get_result(query)
def validate_stats_update(self, status): with StatsDBConnection() as conn: stats_table = conn.get_table('udl_stats') query = select([stats_table.c.batch_operation, stats_table.c.load_status, stats_table.c.snapshot_criteria]).where(stats_table.c.batch_guid == self.batch_id) result = conn.execute(query).fetchall() self.assertNotEqual(result, []) for row in result: operation = row['batch_operation'] self.assertEqual(operation, 's') snapshot_criteria = json.loads(row['snapshot_criteria']) self.assertEqual(2, len(snapshot_criteria)) self.assertEqual("800b3654-4406-4a90-9591-be84b67054df", snapshot_criteria['reg_system_id']) self.assertEqual(2015, snapshot_criteria['academic_year']) status = UdlStatsConstants.UDL_STATUS_INGESTED if status is NotificationConstants.SUCCESS else UdlStatsConstants.UDL_STATUS_FAILED self.assertEqual(row['load_status'], status)
def test_insert_udl_stats(self): udl_stats = { UdlStatsConstants.BATCH_GUID: 'abc', UdlStatsConstants.LOAD_TYPE: 'test', UdlStatsConstants.FILE_ARRIVED: datetime.now(), UdlStatsConstants.TENANT: 'tenant', UdlStatsConstants.LOAD_STATUS: UdlStatsConstants.UDL_STATUS_RECEIVED } insert_udl_stats(udl_stats) with StatsDBConnection() as conn: table = conn.get_table('udl_stats') query = select([table.c[UdlStatsConstants.LOAD_TYPE]], from_obj=[table]) results = conn.get_result( query.where(table.c[UdlStatsConstants.BATCH_GUID] == 'abc')) self.assertEqual(results[0][UdlStatsConstants.LOAD_TYPE], 'test')
def report_udl_stats_batch_status(rec_id, migrate_load_status): """This method populates udl_stats for batches that had successful migration :param batch_guid: The batch that was successfully migrated :returns : Nothing """ logger.info('Master: Reporting batch status to udl_stats') with StatsDBConnection() as connector: udl_stats_table = connector.get_table(UdlStatsConstants.UDL_STATS) update_query = udl_stats_table.update().values(load_status=migrate_load_status).\ where(udl_stats_table.c.rec_id == rec_id) rtn = connector.execute(update_query) rowcount = rtn.rowcount if rowcount == 0: raise EdMigrateUdl_statException( 'Failed to update record for rec_id=' + rec_id) return rowcount
def test_multi_dict_insert(self): one = {Constants.TASK_ID: 'abc'} two = {Constants.REQUEST_GUID: 'u1'} three = {Constants.STATUS: ExtractStatus.ARCHIVED} insert_extract_stats(one, two, three) with StatsDBConnection() as connector: extract_stats = connector.get_table(Constants.EXTRACT_STATS) query = select([ extract_stats.c.status.label(Constants.STATUS), extract_stats.c.task_id.label(Constants.TASK_ID) ], from_obj=[extract_stats]) query = query.where(extract_stats.c.request_guid == 'u1') results = connector.get_result(query) self.assertIsNotNone(results) self.assertEqual(len(results), 1) self.assertEqual(results[0][Constants.STATUS], three[Constants.STATUS]) self.assertEqual(results[0][Constants.TASK_ID], one[Constants.TASK_ID])
def test_update_udl_stats(self): udl_stats = { UdlStatsConstants.BATCH_GUID: 'cde', UdlStatsConstants.LOAD_TYPE: 'test', UdlStatsConstants.FILE_ARRIVED: datetime.now(), UdlStatsConstants.TENANT: 'tenant', UdlStatsConstants.LOAD_STATUS: UdlStatsConstants.UDL_STATUS_RECEIVED } with StatsDBConnection() as conn: table = conn.get_table('udl_stats') query = table.insert(udl_stats) result = conn.execute(query) rec_id = result.inserted_primary_key[0] update_udl_stats(rec_id, {UdlStatsConstants.LOAD_TYPE: 'CD'}) query = select([table.c[UdlStatsConstants.LOAD_TYPE]], from_obj=[table]).where( table.c[UdlStatsConstants.BATCH_GUID] == 'cde') results = conn.get_result(query) self.assertEqual(results[0][UdlStatsConstants.LOAD_TYPE], 'CD')
def get_udl_stats_by_date(start_date, end_date=None): ''' return all udl_stats records by given date ''' results = [] with StatsDBConnection() as connector: udl_stats = connector.get_table(UdlStatsConstants.UDL_STATS) s = select([ udl_stats.c.tenant, udl_stats.c.file_arrived, udl_stats.c.load_start, udl_stats.c.load_end, udl_stats.c.load_status, udl_stats.c.batch_guid ]) if end_date is None: s = s.where(udl_stats.c.file_arrived >= start_date) else: s = s.where( and_(udl_stats.c.file_arrived >= start_date, udl_stats.c.file_arrived < end_date)) results = connector.get_result(s) return results
def prepare_ed_stats(): ''' Get stats data to determine data that has not generated a pdf ''' with StatsDBConnection() as connector: udl_stats = connector.get_table(UdlStatsConstants.UDL_STATS) query = select([ udl_stats.c.rec_id.label(UdlStatsConstants.REC_ID), udl_stats.c.tenant.label(UdlStatsConstants.TENANT), udl_stats.c.load_start.label(UdlStatsConstants.LOAD_START), udl_stats.c.load_end.label(UdlStatsConstants.LOAD_END), udl_stats.c.record_loaded_count.label( UdlStatsConstants.RECORD_LOADED_COUNT), udl_stats.c.batch_guid.label(UdlStatsConstants.BATCH_GUID) ], from_obj=[udl_stats]) query = query.where( udl_stats.c.load_status == UdlStatsConstants.MIGRATE_INGESTED) query = query.where( and_(udl_stats.c.last_pdf_task_requested == null())) return connector.get_result(query)
def test_insert_status(self): values = { Constants.TASK_ID: 'abc', Constants.REQUEST_GUID: 'requestId', Constants.INFO: '{}', Constants.TIMESTAMP: datetime.now(), Constants.STATUS: ExtractStatus.QUEUED } insert_extract_stats(values) with StatsDBConnection() as connector: extract_stats = connector.get_table(Constants.EXTRACT_STATS) query = select([ extract_stats.c.status.label(Constants.STATUS), ], from_obj=[extract_stats]) query = query.where(extract_stats.c.task_id == 'abc') results = connector.get_result(query) self.assertIsNotNone(results) self.assertEqual(len(results), 1) self.assertEqual(results[0][Constants.STATUS], values[Constants.STATUS])
def get_batch_for_notification(): ''' get records to send notification from udl_stat table ''' with StatsDBConnection() as connector: udl_status_table = connector.get_table(UdlStatsConstants.UDL_STATS) query = \ select([udl_status_table.c.rec_id, udl_status_table.c.batch_guid, udl_status_table.c.load_type, udl_status_table.c.load_status, udl_status_table.c.notification, udl_status_table.c.notification_status], from_obj=[udl_status_table]).\ where(udl_status_table.c.load_status.in_([UdlStatsConstants.UDL_STATUS_FAILED, UdlStatsConstants.MIGRATE_FAILED, UdlStatsConstants.MIGRATE_INGESTED])) query = query.where( and_( or_(udl_status_table.c.notification_status.is_(None), udl_status_table.c.notification_status == ''))).order_by( udl_status_table.c.file_arrived) batches = connector.get_result(query) return batches
def merge_to_udl2stat_notification(batch_id, notification_data): ''' merge notification data with given new data in udl2_stat table :param batch_id: batch id to be updated :param notification_data: new notification data to be included ''' with StatsDBConnection() as connector: udl_status_table = connector.get_table(UdlStatsConstants.UDL_STATS) query = select([udl_status_table.c.notification], from_obj=[ udl_status_table ]).where(udl_status_table.c.batch_guid == batch_id) batches = connector.get_result(query) # there should be one record. for batch in batches: notification = batch.get(UdlStatsConstants.NOTIFICATION, '{}') notification_dict = json.loads( notification if notification is not None else '{}') notification_dict.update(notification_data) update_udl_stats_by_batch_guid( batch_id, {UdlStatsConstants.NOTIFICATION: json.dumps(notification_dict)})
def test_create_new_entry(self): values = {'tenant': 'dummy', 'uid': '1234', 'params': '{}'} request_id = 'request_id_1' task_id = create_new_entry(DummyUser(), request_id, values) self.assertIsNotNone(task_id) with StatsDBConnection() as connector: extract_stats = connector.get_table(Constants.EXTRACT_STATS) query = select([ extract_stats.c.task_id.label(Constants.TASK_ID), extract_stats.c.status.label(Constants.STATUS), extract_stats.c.info.label(Constants.INFO), extract_stats.c.request_guid.label(Constants.REQUEST_GUID) ], from_obj=[extract_stats]) query = query.where(extract_stats.c.request_guid == request_id) results = connector.get_result(query) self.assertEqual(len(results), 1) self.assertEqual(results[0][Constants.TASK_ID], task_id) self.assertEqual(results[0][Constants.STATUS], ExtractStatus.QUEUED) self.assertIn(DummyUser().get_tenants(), results[0][Constants.INFO]) self.assertIn(DummyUser().get_guid(), results[0][Constants.INFO]) self.assertEqual(results[0][Constants.REQUEST_GUID], request_id)
def test_generate_metadata(self): metadata = StatsDBConnection.generate_metadata() self.assertIsNotNone(metadata)
def test_connection(self): connector = StatsDBConnection() udl_stats = connector.get_table('udl_stats') self.assertEqual(udl_stats.name, 'udl_stats')
def test_get_datasource_name(self): name = StatsDBConnection.get_datasource_name(tenant='dummy') self.assertEqual(name, config_namespace)
def test_get_datasource_name_without_tenant(self): name = StatsDBConnection.get_datasource_name() self.assertEquals(name, config_namespace)
def setUpClass(cls): super().setUpClass(datasource_name=StatsDBConnection.get_datasource_name(), metadata=generate_stats_metadata())
def test_get_db_config_prefix(self): name = StatsDBConnection.get_db_config_prefix(tenant='dummy') self.assertEqual(name, config_namespace + '.')