def test_set_metadata_by_generate(self): dbUtil = DbUtil(engine=DummyEngine(), metadata=DummyMetadata()) component.provideUtility(dbUtil, IDbUtil, name='unittest') db = DBConnection('unittest') db.set_metadata_by_generate('schema_name', dummyFunc) metadata = db.get_metadata() self.assertEqual(metadata, dummyFunc())
def test_get_streaming_result(self): self.assertTrue('dim_inst_hier' in self.get_Metadata().tables, "missing dim_inst_hier") with DBConnection() as connector: dim_inst_hier = connector.get_table("dim_inst_hier") # check number of field in the table self.assertEqual(10, len(dim_inst_hier.c), "Number of fields in dim_district") query = dim_inst_hier.select(dim_inst_hier.c.district_id == '228') results = connector.get_streaming_result(query, fetch_size=1) self.assertEqual(type(results), types.GeneratorType) for result in results: self.assertEqual('228', result['district_id']) break # test for larger file out of fetch_size fact_asmt_outcome_vw = connector.get_table('fact_asmt_outcome_vw') query = fact_asmt_outcome_vw.select() results = connector.get_streaming_result(query, fetch_size=1) self.assertEqual(type(results), types.GeneratorType) counter = 0 for result in results: counter = counter + 1 self.assertEqual(counter, 1228)
def import_csv_dir(resources_dir, datasource_name=''): ''' import data from csv files return True: load data successfully False: no data loaded or failed to load data @param resources_dir: the resource directory @type resources_dir: string @param datasource_name: the data source name @type datasource_name: string ''' __success = False with DBConnection(name=datasource_name) as connection: metadata = connection.get_metadata() # Look through metadata and upload available imports with the same and and ext csv # use transaction. # if importing is okay. then commit the transaction; otherwise, roll back with connection.get_transaction() as _: __foundImport = False for table in metadata.sorted_tables: file = os.path.join(resources_dir, table.name + '.csv') # if import exists, upload it if os.path.exists(file): # Found csv file __import_csv_file(csv_file=file, connection=connection, table=table) __foundImport = True __success = __foundImport return __success
def test_check_fields_in_order_incorrect(self): result = False with DBConnection() as connection: result = True csv_file = get_resource_file('wrong_field_order', 'table_a.csv') result = check_fields_in_order(connection.get_table('table_a'), csv_file) self.assertFalse(result)
def import_data(): with DBConnection() as connection: generate_data(insert) def insert(table_name, rows): table = connection.get_table(table_name) print(rows) connection.execute(table.insert(), rows)
def test_fact_block_outcome(self): self.assertTrue( 'fact_block_asmt_outcome' in self.get_Metadata().tables, "missing fact_block_asmt_outcome") with DBConnection() as connector: fact_student_reg = connector.get_table("fact_block_asmt_outcome") # Check number of fields in the table self.assertEqual(59, len(fact_student_reg.c), "Number of fields in fact_block_asmt_outcome")
def test_student_registration(self): self.assertTrue('student_reg' in self.get_Metadata().tables, "missing student_reg") with DBConnection() as connector: fact_student_reg = connector.get_table("student_reg") # Check number of fields in the table self.assertEqual(40, len(fact_student_reg.c), "Number of fields in student_registration")
def test_dim_inst_hier_type(self): self.assertTrue('dim_inst_hier' in self.get_Metadata().tables, "missing dim_inst_hier") with DBConnection() as connector: dim_inst_hier = connector.get_table("dim_inst_hier") # check number of field in the table self.assertEqual(10, len(dim_inst_hier.c), "Number of fields in dim_district") query = dim_inst_hier.select(dim_inst_hier.c.district_id == '228') result = connector.get_result(query) self.assertEqual('228', result[0]['district_id'])
def check_datasource(request): ''' GET request that executes a Select 1 and returns status of 200 if database returns results :param request: Pyramid request object ''' error_msg = '' try: results = None for datasource_name in get_data_source_names(): with DBConnection(name=datasource_name) as connector: query = select([1]) results = connector.get_result(query) except Exception as e: error_msg = str(e) results = None if results and len(results) > 0: return HTTPOk() logger.error("Heartbeat failed at database connection. %s", error_msg) return HTTPServerError()
def load_fact_asmt_outcome(datasource_name=''): ''' load data from fact_asmt_outcome_vw to fact_asmt_outcome return True: load data successfully False: no data loaded or failed to load data @param resources_dir: the resource directory @type resources_dir: string @param datasource_name: the data source name @type datasource_name: string ''' __success = False with DBConnection(name=datasource_name) as connection: metadata = connection.get_metadata() tables = [t.name for t in metadata.sorted_tables] if "fact_asmt_outcome" not in tables: return True fao = metadata.schema + ".fact_asmt_outcome" if metadata.schema is not None else "fact_asmt_outcome" fao_vw = metadata.schema + ".fact_asmt_outcome_vw" if metadata.schema is not None else "fact_asmt_outcome_vw" # Look through metadata and upload available imports with the same and and ext csv # use transaction. # if importing is okay. then commit the transaction; otherwise, roll back with connection.get_transaction() as _: connection.execute( "INSERT INTO " + " " + fao + " " + ("(SELECT " if metadata.schema is not None else "SELECT ") + " asmt_outcome_vw_rec_id, asmt_rec_id, student_rec_id, inst_hier_rec_id, " + " asmt_guid, student_id, state_code, district_id, school_id, " + " where_taken_id, where_taken_name, asmt_grade, enrl_grade, " + " date_taken, date_taken_day, " + " date_taken_month, date_taken_year, asmt_score, asmt_score_range_min, " + " asmt_score_range_max, asmt_perf_lvl, asmt_claim_1_score, " + " asmt_claim_1_score_range_min, asmt_claim_1_score_range_max, " + " asmt_claim_1_perf_lvl, asmt_claim_2_score, asmt_claim_2_score_range_min, " + " asmt_claim_2_score_range_max, asmt_claim_2_perf_lvl, asmt_claim_3_score, " + " asmt_claim_3_score_range_min, asmt_claim_3_score_range_max, " + " asmt_claim_3_perf_lvl, asmt_claim_4_score, asmt_claim_4_score_range_min, " + " asmt_claim_4_score_range_max, asmt_claim_4_perf_lvl, acc_asl_video_embed, " + " acc_print_on_demand_items_nonembed, acc_braile_embed, acc_closed_captioning_embed, " + " acc_text_to_speech_embed, acc_abacus_nonembed, " + " acc_alternate_response_options_nonembed, acc_calculator_nonembed, " + " acc_multiplication_table_nonembed, acc_print_on_demand_nonembed, " + " acc_read_aloud_nonembed, acc_scribe_nonembed, acc_speech_to_text_nonembed, " + " acc_streamline_mode, acc_noise_buffer_nonembed, complete, from_date, to_date, rec_status, batch_guid, administration_condition" + " FROM " + " " + fao_vw + " " + (")" if metadata.schema is not None else "")) return __success
def test_retries(self): dbUtil = DbUtil(engine=DummyEngine(), metadata=DummyMetadata()) component.provideUtility(dbUtil, IDbUtil, name='unittest') db = DBConnection('unittest') self.assertRaises(DatabaseError, db.execute, 'query')
def test_get_metadata(self): dbUtil = DbUtil(engine=DummyEngine(), metadata=DummyMetadata()) component.provideUtility(dbUtil, IDbUtil, name='unittest') db = DBConnection('unittest') self.assertIsInstance(db.get_metadata(), DummyMetadata)
def test_extract_stats(self): self.assertTrue('extract_stats' in self.get_Metadata().tables) with DBConnection(name='stats') as connector: udl_stats = connector.get_table("extract_stats") self.assertEqual(6, len(udl_stats.c))
def test_dim_inst_hier_type(self): self.assertTrue('udl_stats' in self.get_Metadata().tables) with DBConnection(name='stats') as connector: udl_stats = connector.get_table("udl_stats") self.assertEqual(15, len(udl_stats.c))