def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_app_identity_stub() self.testbed.init_memcache_stub() self.testbed.init_urlfetch_stub() self.testbed.init_blobstore_stub() self.testbed.init_datastore_v3_stub() self.project_id = bq_utils.app_identity.get_application_id() self.hpo_ids = [NYC_HPO_ID, PITT_HPO_ID] self.input_dataset_id = bq_utils.get_dataset_id() self.output_dataset_id = bq_utils.get_unioned_dataset_id() self._empty_hpo_buckets() test_util.delete_all_tables(self.input_dataset_id) test_util.delete_all_tables(self.output_dataset_id) # TODO Generalize to work for all foreign key references # Collect all primary key fields in CDM tables mapped_fields = [] for table in cdm.tables_to_map(): field = table + '_id' mapped_fields.append(field) self.mapped_fields = mapped_fields self.implemented_foreign_keys = [ eu_constants.VISIT_OCCURRENCE_ID, eu_constants.CARE_SITE_ID, eu_constants.LOCATION_ID ]
def tearDownClass(cls): ehr_dataset_id = bq_utils.get_dataset_id() rdr_dataset_id = bq_utils.get_rdr_dataset_id() test_util.delete_all_tables(ehr_dataset_id) test_util.delete_all_tables(rdr_dataset_id) cls.testbed.deactivate() logger.handlers = []
def setUp(self): super(CombineEhrRdrTest, self).setUp() self.APP_ID = bq_utils.app_identity.get_application_id() self.ehr_dataset_id = bq_utils.get_dataset_id() self.rdr_dataset_id = bq_utils.get_rdr_dataset_id() self.combined_dataset_id = bq_utils.get_ehr_rdr_dataset_id() self.drc_bucket = gcs_utils.get_drc_bucket() test_util.delete_all_tables(self.combined_dataset_id)
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_app_identity_stub() self.testbed.init_memcache_stub() self.testbed.init_urlfetch_stub() self.testbed.init_blobstore_stub() self.testbed.init_datastore_v3_stub() self.hpo_bucket = gcs_utils.get_hpo_bucket(test_util.FAKE_HPO_ID) test_util.empty_bucket(self.hpo_bucket) test_util.delete_all_tables(bq_utils.get_dataset_id())
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_app_identity_stub() self.testbed.init_memcache_stub() self.testbed.init_urlfetch_stub() self.testbed.init_blobstore_stub() self.testbed.init_datastore_v3_stub() self.hpo_bucket = gcs_utils.get_hpo_bucket(FAKE_HPO_ID) self.person_table_id = bq_utils.get_table_id(FAKE_HPO_ID, PERSON) test_util.delete_all_tables(self.EHR_DATASET_ID) self._empty_bucket()
def setUpClass(cls): cls.testbed = testbed.Testbed() cls.testbed.activate() cls.testbed.init_app_identity_stub() cls.testbed.init_memcache_stub() cls.testbed.init_urlfetch_stub() cls.testbed.init_blobstore_stub() cls.testbed.init_datastore_v3_stub() fake_bucket = gcs_utils.get_hpo_bucket(test_util.FAKE_HPO_ID) dataset_id = bq_utils.get_dataset_id() test_util.delete_all_tables(dataset_id) test_util.get_synpuf_results_files() test_util.populate_achilles(fake_bucket)
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_app_identity_stub() self.testbed.init_memcache_stub() self.testbed.init_urlfetch_stub() self.testbed.init_blobstore_stub() self.testbed.init_datastore_v3_stub() self.app_id = os.environ.get('APPLICATION_ID') self.dataset_id = os.environ.get('BIGQUERY_DATASET_ID') self.bucket = os.environ.get('DRC_BUCKET_NAME') test_util.empty_bucket(self.bucket) test_util.delete_all_tables(self.dataset_id) self.load_test_data(hpo_id=HPO_NYC)
def setUp(self): super(EhrUnionTest, self).setUp() self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_app_identity_stub() self.testbed.init_memcache_stub() self.testbed.init_urlfetch_stub() self.testbed.init_blobstore_stub() self.testbed.init_datastore_v3_stub() self.project_id = bq_utils.app_identity.get_application_id() self.hpo_ids = [CHS_HPO_ID, PITT_HPO_ID] self.input_dataset_id = bq_utils.get_dataset_id() self.output_dataset_id = bq_utils.get_unioned_dataset_id() self._empty_hpo_buckets() test_util.delete_all_tables(self.input_dataset_id) test_util.delete_all_tables(self.output_dataset_id)
def setUpClass(cls): # TODO base class this logger.level = logging.INFO stream_handler = logging.StreamHandler(sys.stdout) logger.addHandler(stream_handler) cls.testbed = testbed.Testbed() cls.testbed.activate() cls.testbed.init_app_identity_stub() cls.testbed.init_memcache_stub() cls.testbed.init_urlfetch_stub() cls.testbed.init_blobstore_stub() cls.testbed.init_datastore_v3_stub() ehr_dataset_id = bq_utils.get_dataset_id() rdr_dataset_id = bq_utils.get_rdr_dataset_id() test_util.delete_all_tables(ehr_dataset_id) test_util.delete_all_tables(rdr_dataset_id) cls.load_dataset_from_files(ehr_dataset_id, test_util.NYC_FIVE_PERSONS_PATH) cls.load_dataset_from_files(rdr_dataset_id, test_util.RDR_PATH)
def setUpClass(cls): fake_bucket = gcs_utils.get_hpo_bucket(test_util.FAKE_HPO_ID) dataset_id = bq_utils.get_dataset_id() test_util.delete_all_tables(dataset_id) test_util.get_synpuf_results_files() test_util.populate_achilles(fake_bucket)
def tearDownClass(cls): dataset_id = bq_utils.get_dataset_id() test_util.delete_all_tables(dataset_id)
def tearDown(self): test_util.delete_all_tables(self.EHR_DATASET_ID) self._empty_bucket() self.testbed.deactivate()
def tearDown(self): test_util.empty_bucket(self.bucket) test_util.delete_all_tables(self.dataset_id) self.testbed.deactivate()
def tearDown(self): test_util.delete_all_tables(bq_utils.get_dataset_id()) test_util.empty_bucket(self.hpo_bucket) self.testbed.deactivate()
def tearDownClass(cls): dataset_id = bq_utils.get_dataset_id() test_util.delete_all_tables(dataset_id) cls.testbed.deactivate()
def tearDown(self): self._empty_hpo_buckets() test_util.delete_all_tables(self.input_dataset_id) test_util.delete_all_tables(self.output_dataset_id) self.testbed.deactivate()
def tearDown(self): test_util.delete_all_tables(self.combined_dataset_id)