def setUp(self): # TODO: remake ge_dcm_screenshot with json self.ds = scidata.parse(os.path.join(DATADIR, 'ge_dcm_sc_screenshot.tgz'), load_data=True, filetype='dicom', ignore_json=True)
def test_parse(self): """ Parse one dicom as header. NIMSDicom parses a single dicom, using nimsdicom.MetaExtractor, upon initialization. """ # test 1 - keeps private tags # this test is rather indirect, nimsdicom uses the MetaExtractor # to set the dataset._hdr, testing it this way, allows re-using one of the test files ds = scidata.parse(os.path.join(DATADIR, 'ge_dcm_mr_localizer.tgz')) ok_(ds._hdr.get('PrivateCreator_0X9_0X0')) # try getting the first PrivateCreator, tag (0x0009,0x0000)
def test_parse(self): """ Parse one dicom as header. NIMSDicom parses a single dicom, using nimsdicom.MetaExtractor, upon initialization. """ # test 1 - keeps private tags # this test is rather indirect, nimsdicom uses the MetaExtractor # to set the dataset._hdr, testing it this way, allows re-using one of the test files ds = scidata.parse(os.path.join(DATADIR, 'ge_dcm_mr_localizer.tgz')) ok_(ds._hdr.get('PrivateCreator_0X9_0X0') ) # try getting the first PrivateCreator, tag (0x0009,0x0000)
def setUp(self): testdata = os.path.join(DATADIR, 'ge_dcm_mr_localizer.tgz') self.ds = scidata.parse(testdata, load_data=True)
def process(self): log.info('reconstructing and concatenating') outfiles = [] first_tr = None with tempfile.TemporaryDirectory(dir=None) as temp_dirpath: for f in self.inputs: fpath = os.path.abspath(f) dcm_ds = scidata.parse(fpath, filetype='dicom', load_data=True, ignore_json=True) if not first_tr: first_tr = dcm_ds.tr # save info to name this nifti label = '%s_%s' % (dcm_ds.exam_no, dcm_ds.series_no) # GLU: label = '%s' % (dcm_ds.protocol_name) intermediate = os.path.join(temp_dirpath, '_%s' % label) # save info to name the final output if not self.outbase: self.outbase = os.path.join(label + '_multicoil.nii.gz') result = scidata.write(dcm_ds, dcm_ds.data, intermediate, filetype='nifti', voxel_order=self.voxel_order) log.debug('reconstructed nifti: %s' % result) # maintain a list of intermediate files outfiles += result first_nii_header = None first_qto_xyz = None # to be able to check if any is saved at all. seq = [] # create a sequence from the intermediate files # resulting sequence items should have consistent dimensions log.debug('combinging niftis: %s' % str(outfiles)) for f in outfiles: nii = dcmstack.dcmmeta.NiftiWrapper(nibabel.load(f), make_empty=True) # store the header from the first outfile if first_nii_header is None: log.debug('storing first input nifti header') first_nii_header = nii.nii_img.get_header() if first_qto_xyz is None: # is array set? log.debug('storing first input affine') first_qto_xyz = nii.nii_img.get_affine() # build up the sequence of nifti wrappers if len(nii.nii_img.get_shape()) == 4: seq += [nii_wrp for nii_wrp in nii.split()] else: seq += [nii] # combine the sequence of nifti wrappers, raises error if shapes not consistent nii_merge = dcmstack.dcmmeta.NiftiWrapper.from_sequence(seq) nii_merge.nii_img.update_header() # update the underlying nifti header nii_header = nii_merge.nii_img.get_header() # reference to underlying nifti header # adjust the new header nii_header['descrip'] = first_nii_header['descrip'] data = nii_merge.nii_img.get_data() if np.iscomplexobj(data): clip_vals = np.percentile(np.abs(data), (10.0, 99.5)) else: clip_vals = np.percentile(data, (10.0, 99.5)) nii_header.structarr['cal_min'] = clip_vals[0] nii_header.structarr['cal_max'] = clip_vals[1] nii_header['pixdim'][4] = first_tr if os.path.exists(self.outbase): raise ProcessorError('output file %s already exists. not overwriting. bailing.', log_level=logging.ERROR) else: nii_merge.to_filename(self.outbase) if os.path.exists(self.outbase): log.info('generated %s' % self.outbase) else: raise ProcessorError('output file %s does not exist?' % self.outbase, log_level=logging.ERROR) return [self.outbase]