def _get_or_create_uuid_from_file(path, file_type): """ Extract the uuid from the DataSet or Report, or assign a new UUID :param path: Path to file :rtype: str :return: uuid string """ if file_type.file_type_id == FileTypes.REPORT.file_type_id: return _get_report_uuid(path) elif file_type.file_type_id in FileTypes.ALL_DATASET_TYPES(): return getDataSetUuid(path) else: return uuid.uuid4()
def get_dataset_metadata(path): """ Returns DataSetMeta data or raises ValueError, KeyError :param path: :return: """ uuid = mt = None for event, element in ET.iterparse(path, events=("start", )): uuid = element.get("UniqueId") mt = element.get("MetaType") break if mt in FileTypes.ALL_DATASET_TYPES().keys(): return DataSetMetaData(uuid, mt) else: raise ValueError("Unsupported dataset type '{t}'".format(t=mt))
def get_dataset_metadata(path): """ Returns DataSetMeta data or raises ValueError if dataset XML is missing the required UniqueId and MetaType values. :param path: Path to DataSet XML :raises: ValueError :return: DataSetMetaData """ uuid = mt = None for event, element in ET.iterparse(path, events=("start", )): uuid = element.get("UniqueId") mt = element.get("MetaType") break if mt in FileTypes.ALL_DATASET_TYPES().keys(): return DataSetMetaData(uuid, mt) else: raise ValueError("Unsupported dataset type '{t}'".format(t=mt))
def test_datastore_dataset_file_uuid(self): """Test that the DataStore file and the Underlying Report have the same UUID""" dataset_type_ids = FileTypes.ALL_DATASET_TYPES().keys() ds = DataStore.load_from_json(_to_ds_json(self.job_dir)) n_tested = 0 for ds_file in ds.files.values(): if ds_file.file_type_id in dataset_type_ids: path = ds_file.path dsf_uuid = ds_file.uuid uuid = getDataSetUuid(path) self.assertEqual( uuid, dsf_uuid, "{p}: {u1} != {u2}".format(p=path, u1=uuid, u2=dsf_uuid)) n_tested += 1 if n_tested == 0: raise unittest.SkipTest( "Warning. No DataSet XML files in datastore.")