def test_get_dataset_uuid(self): ds = SubreadSet(upstreamdata.getUnalignedBam(), strict=True) ds_file = tempfile.NamedTemporaryFile(suffix=".subreadset.xml").name ds.write(ds_file) uuid = getDataSetUuid(ds_file) assert uuid == ds.uuid with open(ds_file, "w") as out: out.write("hello world!") uuid = getDataSetUuid(ds_file) assert uuid is None
def test_get_dataset_uuid(self): ds = SubreadSet(upstreamdata.getUnalignedBam(), strict=True) ds_file = tempfile.NamedTemporaryFile(suffix=".subreadset.xml").name ds.write(ds_file) uuid = getDataSetUuid(ds_file) self.assertEqual(uuid, ds.uuid) with open(ds_file, "w") as out: out.write("hello world!") uuid = getDataSetUuid(ds_file) self.assertEqual(uuid, None)
def _get_or_create_uuid_from_file(path, file_type): """ Extract the uuid from the DataSet or Report, or assign a new UUID :param path: Path to file :rtype: str :return: uuid string """ if file_type.file_type_id == FileTypes.REPORT.file_type_id: return _get_report_uuid(path) elif file_type.file_type_id in FileTypes.ALL_DATASET_TYPES(): return getDataSetUuid(path) else: return uuid.uuid4()
def test_datastore_dataset_file_uuid(self): FILE_TYPE_IDS = ["DS_SUBREADS_H5", "DS_SUBREADS", "DS_CCS", "DS_REF", "DS_ALIGN", "DS_CONTIG", "DS_BARCODE", "DS_ALIGN_CCS"] DATASET_FILE_TYPES = set([getattr(FileTypes, t).file_type_id for t in FILE_TYPE_IDS]) p = os.path.join(self.job_dir, "workflow", "datastore.json") with open(p, 'r') as r: d = json.loads(r.read()) n_tested = 0 for file_info in d['files']: if file_info['fileTypeId'] in DATASET_FILE_TYPES: uuid = getDataSetUuid(file_info['path']) self.assertEqual(uuid, file_info['uniqueId'], "{p}: {u1} != {u2}".format( p=file_info['path'], u1=uuid, u2=file_info['uniqueId'])) n_tested += 1 if n_tested == 0: raise unittest.SkipTest("No DataSet XML files in datastore.")
def test_datastore_dataset_file_uuid(self): """Test that the DataStore file and the Underlying Report have the same UUID""" dataset_type_ids = FileTypes.ALL_DATASET_TYPES().keys() ds = DataStore.load_from_json(_to_ds_json(self.job_dir)) n_tested = 0 for ds_file in ds.files.values(): if ds_file.file_type_id in dataset_type_ids: path = ds_file.path dsf_uuid = ds_file.uuid uuid = getDataSetUuid(path) self.assertEqual( uuid, dsf_uuid, "{p}: {u1} != {u2}".format(p=path, u1=uuid, u2=dsf_uuid)) n_tested += 1 if n_tested == 0: raise unittest.SkipTest( "Warning. No DataSet XML files in datastore.")
def test_datastore_dataset_file_uuid(self): FILE_TYPE_IDS = [ "DS_SUBREADS_H5", "DS_SUBREADS", "DS_CCS", "DS_REF", "DS_ALIGN", "DS_CONTIG", "DS_BARCODE", "DS_ALIGN_CCS" ] DATASET_FILE_TYPES = set( [getattr(FileTypes, t).file_type_id for t in FILE_TYPE_IDS]) p = os.path.join(self.job_dir, "workflow", "datastore.json") with open(p, 'r') as r: d = json.loads(r.read()) n_tested = 0 for file_info in d['files']: if file_info['fileTypeId'] in DATASET_FILE_TYPES: uuid = getDataSetUuid(file_info['path']) self.assertEqual( uuid, file_info['uniqueId'], "{p}: {u1} != {u2}".format(p=file_info['path'], u1=uuid, u2=file_info['uniqueId'])) n_tested += 1 if n_tested == 0: raise unittest.SkipTest("No DataSet XML files in datastore.")