def test_data_metrics(self): self.step( "Get datasets and check datasetCount, privateDatasets, publicDatasets metrics are correct" ) public_datasets = [] private_datasets = [] datasets = DataSet.api_get_list([TestData.core_org]) for table in datasets: if table.is_public: public_datasets.append(table) else: private_datasets.append(table) dashboard_datasets_count = TestData.core_org.metrics['datasetCount'] dashboard_private_datasets = TestData.core_org.metrics[ 'privateDatasets'] dashboard_public_datasets = TestData.core_org.metrics['publicDatasets'] metrics_are_equal = ( len(datasets) == dashboard_datasets_count and len(private_datasets) == dashboard_private_datasets and len(public_datasets) == dashboard_public_datasets) self.assertTrue( metrics_are_equal, "\nDatasets count: {} - expected: {}\nPrivate datasets: {} - expected: {}" "\nPublic datasets: {} - expected: {}".format( dashboard_datasets_count, len(datasets), dashboard_private_datasets, len(private_datasets), dashboard_public_datasets, len(public_datasets)))
def create_test_data_sets(cls, request, test_org, class_context, test_data_urls): step("Create new transfer for each category") cls.transfers = [] for category in DataSet.CATEGORIES: cls.transfers.append( Transfer.api_create(class_context, category, is_public=False, org_guid=test_org.guid, source=test_data_urls.test_transfer.url)) for category in DataSet.CATEGORIES: cls.transfers.append( Transfer.api_create(class_context, category, is_public=True, org_guid=test_org.guid, source=test_data_urls.test_transfer.url)) step("Ensure that transfers are finished") for transfer in cls.transfers: transfer.ensure_finished() step("Get all data sets in the test org") cls.transfer_titles = [t.title for t in cls.transfers] dataset_list = DataSet.api_get_list(org_guid_list=[test_org.guid]) cls.datasets = [ d for d in dataset_list if d.title in cls.transfer_titles ]
def check_transfer_and_dataset_are_visible_in_test_org( transfer, dataset, test_org): step("Check transfer is visible on list of transfers") transfers = Transfer.api_get_list(org_guid_list=[test_org.guid]) assert transfer in transfers step("Check dataset is visible on list of datasets") datasets = DataSet.api_get_list(org_guid_list=[test_org.guid]) assert dataset in datasets
def _filter_datasets(self, org, filters=(), only_private=False, only_public=False, query=""): ds_list = DataSet.api_get_list(org_guid_list=[org.guid], query=query, filters=filters, only_private=only_private, only_public=only_public) return [d for d in ds_list if d in self.datasets]
def test_transfer_and_dataset_are_not_visible_in_other_org( self, context, core_org, test_org, test_data_urls): step("Create transfer and get dataset") transfer = self._create_transfer(context, category=self.DEFAULT_CATEGORY, org_guid=test_org.guid, test_data_urls=test_data_urls) dataset = DataSet.api_get_matching_to_transfer( org_guid=test_org.guid, transfer_title=transfer.title) step("Check transfer is not visible on other organization") transfers = Transfer.api_get_list(org_guid_list=[core_org.guid]) assert transfer not in transfers step("Check dataset is not visible on other organization") datasets = DataSet.api_get_list(org_guid_list=[core_org.guid]) assert dataset not in datasets
def cleanup_test_data(): core_org_guid = core_org().guid test_object_models = [ {'name': 'data set', 'objects_list': DataSet.api_get_list(), 'name_attribute': 'title'}, {'name': 'transfer', 'objects_list': Transfer.api_get_list(), 'name_attribute': 'title'}, {'name': 'user', 'objects_list': User.get_list_in_organization(org_guid=core_org_guid), 'name_attribute': 'username'}, {'name': 'invitation', 'objects_list': Invitation.api_get_list(), 'name_attribute': 'username'}, {'name': 'application', 'objects_list': Application.get_list(), 'name_attribute': 'name'}, {'name': 'service', 'objects_list': ServiceInstance.get_list(), 'name_attribute': 'name'}, {'name': 'offering', 'objects_list': ServiceOffering.get_list(), 'name_attribute': 'label'}, {'name': 'scoring engine model', 'objects_list': ScoringEngineModel.get_list(org_guid=core_org_guid), 'name_attribute': 'name'} ] for model in test_object_models: _cleanup_test_data(**model)
def from_reference(cls, org_guid): from modules.tap_object_model import Application, User, Organization, ServiceOffering, ServiceInstance, DataSet metrics = [] app_down_states = [TapEntityState.FAILURE, TapEntityState.STOPPED] apps = Application.get_list() apps_count = len(apps) apps_running_count = len( [app for app in apps if app.state == TapEntityState.RUNNING]) apps_down_count = len( [app for app in apps if app.state in app_down_states]) user_count = len(User.get_all_users(org_guid)) orgs_count = len(Organization.get_list()) services_count = len(ServiceOffering.get_list()) services_inst = len([ instance for instance in ServiceInstance.get_list() if instance.state == TapEntityState.RUNNING ]) nodes = KubernetesNode.get_list() for node in nodes: metrics.append(node.get_metrics()) cpu_usage_org = cls.parse_cpu(metrics) / (cls.CPU_RATE_FOR_REF * cls.NODE) cpu_usage_platform = cls.parse_cpu(metrics) / (cls.CPU_RATE_FOR_REF * cls.NODE) memory_usage_org = cls.parse_memory(metrics) memory_usage_platform = cls.parse_memory(metrics) datasets = DataSet.api_get_list(org_guid_list=[org_guid]) return cls(apps=apps_count, apps_running=apps_running_count, apps_down=apps_down_count, users_org=user_count, users_platform=user_count, orgs=orgs_count, services=services_count, service_instances=services_inst, service_usage=services_inst, cpu_usage_org=cpu_usage_org, memory_usage_org=memory_usage_org, cpu_usage_platform=cpu_usage_platform, memory_usage_platform=memory_usage_platform, datasets=datasets)
def test_get_data_sets_from_another_org(self, context): step("Create another test organization") org = Organization.create(context) step("Retrieve datasets from the new org") public_datasets = [ds for ds in self.datasets if ds.is_public] private_datasets = [ds for ds in self.datasets if not ds.is_public] datasets = [ ds for ds in DataSet.api_get_list(org_guid_list=[org.guid]) ] step("Check that no private data sets are visible in another org") found_private_ds = [ds for ds in private_datasets if ds in datasets] assert found_private_ds == [], "Private datasets from another org returned" step("Check that all public data sets are visible in another org") missing_public_ds = [ ds for ds in public_datasets if ds not in datasets ] assert missing_public_ds == [], "Not all public data sets from another org returned"
def test_0_get_atk_model(self, atk_virtualenv, core_atk_app, core_org, space_shuttle_sources, space_shuttle_model_input): step("Check if there already is an atk model generated") data_sets = DataSet.api_get_list(org_list=[core_org]) atk_model_dataset = next((ds for ds in data_sets if ds.title == self.ATK_MODEL_NAME), None) if atk_model_dataset is not None: self.__class__.atk_model_uri = atk_model_dataset.target_uri else: step("Install atk client package") atk_url = core_atk_app.urls[0] atk_virtualenv.pip_install(ATKtools.get_atk_client_url(atk_url)) step("Generate new atk model") atk_model_generator_path = os.path.join(space_shuttle_sources, RepoPath.space_shuttle_model_generator) atk_generator_output = atk_virtualenv.run_atk_script(atk_model_generator_path, atk_url, positional_arguments=[space_shuttle_model_input], use_uaa=False) pattern = r"(hdfs://[a-zA-Z0-9/\-_]*\.tar)" self.__class__.atk_model_uri = re.search(pattern, atk_generator_output).group() assert self.atk_model_uri is not None, "Model hdfs path not found"
def test_submit_and_delete_transfer(self, context, test_org): """ <b>Description:</b> Check transfer creation from an url and deletion. Also check dataset deletion. <b>Input data:</b> 1. organization guid 2. transfer category 3. url with a source file <b>Expected results:</b> Transfer is successfully created and deleted. Dataset is successfully deleted. <b>Steps:</b> 1. Create transfer. 2. Retrieve corresponding dataset, 3. Delete transfer. 4. Check that transfer disappeared from the transfer list. 5. Delete dataset. 6. Check that dataset disappeared from the dataset list. """ transfer = self._create_transfer(context, category=self.DEFAULT_CATEGORY, org_guid=test_org.guid) step( "Get transfers and check if they are the same as the uploaded ones" ) retrieved_transfer = Transfer.api_get(transfer.id) assert transfer == retrieved_transfer, "The transfer is not the same" dataset = DataSet.api_get_matching_to_transfer( org_guid=test_org.guid, transfer_title=transfer.title) TestSubmitTransfer.check_transfer_and_dataset_are_visible_in_test_org( transfer=transfer, dataset=dataset, test_org=test_org) step("Delete transfer") transfer.cleanup() step("Check transfer is not visible on list of transfers") transfers = Transfer.api_get_list(org_guid_list=[test_org.guid]) assert transfer not in transfers step("Delete dataset") dataset.cleanup() step("Check dataset is not visible on list of datasets") datasets = DataSet.api_get_list(org_guid_list=[test_org.guid]) assert dataset not in datasets
def create_test_data_sets(cls, request, test_org, class_context, test_data_urls): step("Create new transfer for each category") cls.transfers = [] for category in DataSet.CATEGORIES: for dataset_privacy in DataSet.IS_PUBLIC: cls.transfer = Transfer.api_create( class_context, category, is_public=dataset_privacy, org_guid=test_org.guid, source=test_data_urls.test_transfer.url) cls.transfers.append(cls.transfer) cls.transfer.ensure_finished() step("Get all data sets in the test org") cls.transfer_titles = [t.title for t in cls.transfers] dataset_list = DataSet.api_get_list(org_guid_list=[test_org.guid]) cls.datasets = [ d for d in dataset_list if d.title in cls.transfer_titles ]
def transfer_flow(transfer, core_org): step("Check that the transfer is finished") transfer.ensure_finished() step("Check that the transfer is on the list") transfers = Transfer.api_get_list(org_guid_list=[core_org.guid]) assert transfer in transfers step("Get data set matching to transfer") data_set = DataSet.api_get_matching_to_transfer( org_guid=core_org.guid, transfer_title=transfer.title) step("Delete the data set") data_set.api_delete() step("Check that the data set was deleted") data_sets = DataSet.api_get_list(org_guid_list=[core_org.guid]) assert data_set not in data_sets step("Delete the transfer") transfer.api_delete() step("Check that the transfer was deleted") transfers = Transfer.api_get_list(org_guid_list=[core_org.guid]) assert transfer not in transfers
def test_delete_dataset(self, test_org, dataset): """ <b>Description:</b> Check that dataset can be deleted. <b>Input data:</b> 1. dataset 2. organization id <b>Expected results:</b> Test passes when dataset can be deleted and is not on the dataset list. <b>Steps:</b> 1. Delete dataset. 2. Check that deleted dataset is not on the dataset list. """ step("Delete the data set") dataset.api_delete() step("Get data set list and check the deleted one is not on it") datasets = DataSet.api_get_list(org_guid_list=[test_org.guid]) assert dataset not in datasets
def test_delete_dataset(self, test_org, dataset): step("Delete the data set") dataset.api_delete() step("Get data set list and check the deleted one is not on it") datasets = DataSet.api_get_list(org_list=[test_org]) assert dataset not in datasets
dbs = hive.exec_query("show databases;") dbs = filter(lambda name: is_test_object_name(name), dbs) dbs = list(dbs) if dbs: logger.info("Removing databases:\n{}".format("\n".join(dbs))) dbs = map(lambda name: "DROP DATABASE {} CASCADE;".format(name), dbs) dbs = "".join(dbs) hive.exec_query(dbs) else: logger.info("No database to remove.") if __name__ == "__main__": all_data_sets = DataSet.api_get_list() test_data_sets = [x for x in all_data_sets if is_test_object_name(x.title)] log_deleted_objects(test_data_sets, "data set") fixtures.tear_down_test_objects(test_data_sets) all_transfers = Transfer.api_get_list() test_transfers = [x for x in all_transfers if is_test_object_name(x.title)] log_deleted_objects(test_transfers, "transfer") fixtures.tear_down_test_objects(test_transfers) all_users = User.cf_api_get_all_users() test_users = [x for x in all_users if is_test_object_name(x.username)] log_deleted_objects(test_users, "user") fixtures.tear_down_test_objects(test_users) all_pending_invitations = Invitation.api_get_list()