def _verify_dataset_in_list(): file_type = FileTypes.ALL()[dataset_meta_type.metatype] ds_endpoint = _get_endpoint_or_raise(file_type) # all datasets for a specific type datasets = self._get_datasets_by_type(ds_endpoint) uuids = {ds['uuid'] for ds in datasets} if dataset_meta_type.uuid not in uuids: raise JobExeError(("Dataset {u} was imported but does not " + "appear in the dataset list; this may " + "indicate XML schema errors.").format( u=dataset_meta_type.uuid))
def _get_or_create_uuid_from_file(path, file_type): """ Extract the uuid from the DataSet or Report, or assign a new UUID :param path: Path to file :rtype: str :return: uuid string """ if file_type.file_type_id == FileTypes.REPORT.file_type_id: return _get_report_uuid(path) elif file_type.file_type_id in FileTypes.ALL_DATASET_TYPES(): return getDataSetUuid(path) else: return uuid.uuid4()
def get_dataset_metadata(path): """ Returns DataSetMeta data or raises ValueError, KeyError :param path: :return: """ uuid = mt = None for event, element in ET.iterparse(path, events=("start", )): uuid = element.get("UniqueId") mt = element.get("MetaType") break if mt in FileTypes.ALL_DATASET_TYPES().keys(): return DataSetMetaData(uuid, mt) else: raise ValueError("Unsupported dataset type '{t}'".format(t=mt))
def get_dataset_metadata(path): """ Returns DataSetMeta data or raises ValueError if dataset XML is missing the required UniqueId and MetaType values. :param path: Path to DataSet XML :raises: ValueError :return: DataSetMetaData """ uuid = mt = None for event, element in ET.iterparse(path, events=("start", )): uuid = element.get("UniqueId") mt = element.get("MetaType") break if mt in FileTypes.ALL_DATASET_TYPES().keys(): return DataSetMetaData(uuid, mt) else: raise ValueError("Unsupported dataset type '{t}'".format(t=mt))
def test_datastore_dataset_file_uuid(self): """Test that the DataStore file and the Underlying Report have the same UUID""" dataset_type_ids = FileTypes.ALL_DATASET_TYPES().keys() ds = DataStore.load_from_json(_to_ds_json(self.job_dir)) n_tested = 0 for ds_file in ds.files.values(): if ds_file.file_type_id in dataset_type_ids: path = ds_file.path dsf_uuid = ds_file.uuid uuid = getDataSetUuid(path) self.assertEqual( uuid, dsf_uuid, "{p}: {u1} != {u2}".format(p=path, u1=uuid, u2=dsf_uuid)) n_tested += 1 if n_tested == 0: raise unittest.SkipTest( "Warning. No DataSet XML files in datastore.")
def test_is_valid(self): ft = FileTypes.DS_ALIGN self.assertTrue(FileTypes.is_valid_id(ft.file_type_id))
def test_is_valid(self): ft = FileTypes.DS_ALIGN assert FileTypes.is_valid_id(ft.file_type_id)