def test_run_export_with_target_bucket(self): folder_prefix = 'dummy-prefix-2018-03-24/' bucket_nyc = gcs_utils.get_hpo_bucket('nyc') test_util.get_synpuf_results_files() test_util.populate_achilles(self.hpo_bucket, hpo_id=None) main.run_export(folder_prefix=folder_prefix, target_bucket=bucket_nyc) bucket_objects = gcs_utils.list_bucket(bucket_nyc) actual_object_names = [obj['name'] for obj in bucket_objects] for report in common.ALL_REPORT_FILES: expected_object_name = folder_prefix + common.ACHILLES_EXPORT_PREFIX_STRING + 'default' + '/' + report self.assertIn(expected_object_name, actual_object_names) datasources_json_path = folder_prefix + common.ACHILLES_EXPORT_DATASOURCES_JSON self.assertIn(datasources_json_path, actual_object_names) datasources_json = gcs_utils.get_object(bucket_nyc, datasources_json_path) datasources_actual = json.loads(datasources_json) datasources_expected = { 'datasources': [{ 'name': 'default', 'folder': 'default', 'cdmVersion': 5 }] } self.assertDictEqual(datasources_expected, datasources_actual)
def setUpClass(cls): cls.testbed = testbed.Testbed() cls.testbed.activate() cls.testbed.init_app_identity_stub() cls.testbed.init_memcache_stub() cls.testbed.init_urlfetch_stub() cls.testbed.init_blobstore_stub() cls.testbed.init_datastore_v3_stub() fake_bucket = gcs_utils.get_hpo_bucket(test_util.FAKE_HPO_ID) dataset_id = bq_utils.get_dataset_id() test_util.delete_all_tables(dataset_id) test_util.get_synpuf_results_files() test_util.populate_achilles(fake_bucket)
def test_heel_analyses(self): # Long-running test self._load_dataset() # populate achilles first test_util.get_synpuf_results_files() test_util.populate_achilles(self.hpo_bucket, include_heel=False) achilles_heel.create_tables(FAKE_HPO_ID, True) achilles_heel.run_heel(hpo_id=FAKE_HPO_ID) cmd = validation.sql_wrangle.qualify_tables( 'SELECT COUNT(1) FROM %sachilles_heel_results' % validation.sql_wrangle.PREFIX_PLACEHOLDER, FAKE_HPO_ID) result = bq_utils.query(cmd) self.assertEqual(int(result['rows'][0]['f'][0]['v']), ACHILLES_HEEL_RESULTS_COUNT) cmd = validation.sql_wrangle.qualify_tables( 'SELECT COUNT(1) FROM %sachilles_results_derived' % validation.sql_wrangle.PREFIX_PLACEHOLDER, FAKE_HPO_ID) result = bq_utils.query(cmd) self.assertEqual(int(result['rows'][0]['f'][0]['v']), ACHILLES_RESULTS_DERIVED_COUNT)
def setUpClass(cls): fake_bucket = gcs_utils.get_hpo_bucket(test_util.FAKE_HPO_ID) dataset_id = bq_utils.get_dataset_id() test_util.delete_all_tables(dataset_id) test_util.get_synpuf_results_files() test_util.populate_achilles(fake_bucket)
def test_heel_analyses(self): # Long-running test self._load_dataset() # populate achilles first test_util.get_synpuf_results_files() test_util.populate_achilles(self.hpo_bucket, include_heel=False) achilles_heel.create_tables(FAKE_HPO_ID, True) achilles_heel.run_heel(hpo_id=FAKE_HPO_ID) cmd = validation.sql_wrangle.qualify_tables( 'SELECT COUNT(1) FROM %sachilles_heel_results' % validation.sql_wrangle.PREFIX_PLACEHOLDER, FAKE_HPO_ID) result = bq_utils.query(cmd) self.assertEqual(ACHILLES_HEEL_RESULTS_COUNT, int(result['rows'][0]['f'][0]['v'])) cmd = validation.sql_wrangle.qualify_tables( 'SELECT COUNT(1) FROM %sachilles_results_derived' % validation.sql_wrangle.PREFIX_PLACEHOLDER, FAKE_HPO_ID) result = bq_utils.query(cmd) self.assertEqual(ACHILLES_RESULTS_DERIVED_COUNT, int(result['rows'][0]['f'][0]['v'])) # test new heel re-categorization errors = [2, 4, 5, 101, 200, 206, 207, 209, 400, 405, 406, 409, 411, 413, 500, 505, 506, 509, 600, 605, 606, 609, 613, 700, 705, 706, 709, 711, 713, 715, 716, 717, 800, 805, 806, 809, 813, 814, 906, 1006, 1609, 1805] cmd = validation.sql_wrangle.qualify_tables( """SELECT analysis_id FROM {prefix}achilles_heel_results WHERE achilles_heel_warning like 'ERROR:%' GROUP BY analysis_id""".format(prefix=validation.sql_wrangle.PREFIX_PLACEHOLDER), FAKE_HPO_ID) result = bq_utils.query(cmd) # self.assertIsNone(result.get('analysis_id', None)) actual_result = [int(row['f'][0]['v']) for row in result['rows']] for analysis_id in actual_result: self.assertIn(analysis_id, errors) # self.assertEqual(ACHILLES_HEEL_RESULTS_ERROR_COUNT, int(result['rows'][0]['f'][0]['v'])) warnings = [4, 5, 7, 8, 9, 200, 210, 302, 400, 402, 412, 420, 500, 511, 512, 513, 514, 515, 602, 612, 620, 702, 712, 720, 802, 812, 820] cmd = validation.sql_wrangle.qualify_tables( """SELECT analysis_id FROM {prefix}achilles_heel_results WHERE achilles_heel_warning like 'WARNING:%' GROUP BY analysis_id""".format(prefix=validation.sql_wrangle.PREFIX_PLACEHOLDER), FAKE_HPO_ID) result = bq_utils.query(cmd) # self.assertIsNone(result.get('analysis_id', None)) actual_result = [int(row['f'][0]['v']) for row in result['rows']] for analysis_id in actual_result: self.assertIn(analysis_id, warnings) # self.assertEqual(ACHILLES_HEEL_RESULTS_WARNING_COUNT, int(result['rows'][0]['f'][0]['v'])) notifications = [101, 103, 105, 114, 115, 118, 208, 301, 410, 610, 710, 810, 900, 907, 1000, 1800, 1807] cmd = validation.sql_wrangle.qualify_tables( """SELECT analysis_id FROM {prefix}achilles_heel_results WHERE achilles_heel_warning like 'NOTIFICATION:%' and analysis_id is not null GROUP BY analysis_id""".format(prefix=validation.sql_wrangle.PREFIX_PLACEHOLDER), FAKE_HPO_ID) result = bq_utils.query(cmd) self.assertIsNone(result.get('analysis_id', None)) actual_result = [int(row['f'][0]['v']) for row in result['rows']] for analysis_id in actual_result: self.assertIn(analysis_id, notifications)
def _test_report_export(self, report): test_util.get_synpuf_results_files() test_util.populate_achilles(self.hpo_bucket) data_density_path = os.path.join(export.EXPORT_PATH, report) result = export.export_from_path(data_density_path, FAKE_HPO_ID) return result