def sort_ipp(filenames): """STOP PRESS. This is currently incomplete. I'm waiting to see what's going to happen with the IPPSorter in GDCM. Given a list of filenames, make use of the gdcm scanner to sort them all according to IPP. @param filenames: list of full pathnames that you want to have sorted. @returns: tuple with (average z space, """ s = gdcm.Scanner() # we need the IOP and the IPP tags iop_tag = gdcm.Tag(0x0020, 0x0037) s.AddTag(iop_tag) ipp_tag = gdcm.Tag(0x0020, 0x0032) s.AddTag(ipp_tag) ret = s.Scan(filenames) if not ret: return (0, []) for f in filenames: mapping = s.GetMapping(f) pttv = gdcm.PythonTagToValue(mapping) pttv.Start() while not pttv.IsAtEnd(): tag = pttv.GetCurrentTag() val = pttv.GetCurrentValue()
def get_largest_subfnames(fnames): fname_grps = [] s = gdcm.Scanner() tag = gdcm.Tag(0x0020, 0x0037) s.AddTag(tag) s.Scan(fnames) vals = s.GetValues() for val in vals: fname_grp = s.GetAllFilenamesFromTagToValue(tag, val) fname_grps.append(fname_grp) return max(fname_grps, key=lambda x: len(x))
def get_series_dict(dirpath): directory = gdcm.Directory() loaded = directory.Load(dirpath) if not loaded: return {} scanner = gdcm.Scanner() seruid_tag = gdcm.Tag(0x0020, 0x000e) scanner.AddTag(seruid_tag) scanned = scanner.Scan(directory.GetFilenames()) if not scanned: return {} uids = scanner.GetValues() series_dict = {} for uid in uids: series_dict[uid] = scanner.GetAllFilenamesFromTagToValue( seruid_tag, uid) return series_dict
def TestScan(dirname, recursive=False): # Check the dirname is indeed a directory system = gdcm.System() if not system.FileIsDirectory(dirname): print "Need a directory" sys.exit(1) # Retrieve all the files within that dir (recursively?) d = gdcm.Directory() nfiles = d.Load(dirname, recursive) print "done retrieving all the", nfiles, "files" s = gdcm.Scanner() t1 = gdcm.Tag(0x0020, 0x000d) # VR::UI t2 = gdcm.Tag(0x0020, 0x000e) # VR::UI t3 = gdcm.Tag(0x0028, 0x0011) # VR::US # Some fun tags, with dual VR: t4 = gdcm.Tag(0x0028, 0x0106) # VR::US_SS t5 = gdcm.Tag(0x0028, 0x0107) # VR::US_SS s.AddTag(t1) s.AddTag(t2) s.AddTag(t3) s.AddTag(t4) s.AddTag(t5) b = s.Scan(d.GetFilenames()) if not b: print "Scanner failed" sys.exit(1) # Raw Values found: values = s.GetValues() print "Values found for all tags are:" print values # get the main super-map : mappings = s.GetMappings() #file1 = d.GetFilenames()[0]; #print file1 #m1 = s.GetMapping( file1 ) #print m1 #print dir(m1) #for k,v in m1.iteritems(): # print "item", k,v # For each file get the value for tag t1: for f in d.GetFilenames(): print "Working on:", f mapping = s.GetMapping(f) pttv = gdcm.PythonTagToValue(mapping) # reset iterator to start position pttv.Start() # iterate until the end: while (not pttv.IsAtEnd()): # get current value for tag and associated value: # if tag was not found, then it was simply not added to the internal std::map # Warning value can be None tag = pttv.GetCurrentTag() value = pttv.GetCurrentValue() print tag, "->", value # increment iterator pttv.Next()
t1 = gdcm.Tag(0x8, 0x8) t2 = gdcm.Tag(0x10, 0x10) # Iterate over directory d = gdcm.Directory() nfiles = d.Load(directory) if (nfiles == 0): sys.exit(1) # System.Console.WriteLine( "Files:\n" + d.toString() ); filenames = d.GetFilenames() # Get rid of any Warning while parsing the DICOM files gdcm.Trace.WarningOff() # instanciate Scanner: s = gdcm.Scanner() s.AddTag(t1) s.AddTag(t2) b = s.Scan(filenames) if (not b): sys.exit(1) print "success" #print s pttv = gdcm.PythonTagToValue(s.GetMapping(filenames[1])) pttv.Start() # iterate until the end: while (not pttv.IsAtEnd()): # get current value for tag and associated value: # if tag was not found, then it was simply not added to the internal std::map # Warning value can be None
def _scan(self, paths): """Given a list combining filenames and directories, search recursively to find all valid DICOM files. Build dictionaries. """ # UIDs are unique for their domains. Patient ID for example # is not unique. # Instance UID (0008,0018) # Patient ID (0010,0020) # Study UID (0020,000D) - data with common procedural context # Study description (0008,1030) # Series UID (0020,000E) # see http://public.kitware.com/pipermail/igstk-developers/ # 2006-March/000901.html for explanation w.r.t. number of # frames; for now we are going to assume that this refers to # the number of included slices (as is the case for the # Toshiba 320 slice for example) tag_to_symbol = { (0x0008, 0x0018): 'instance_uid', (0x0010, 0x0010): 'patient_name', (0x0010, 0x0020): 'patient_id', (0x0020, 0x000d): 'study_uid', (0x0008, 0x1030): 'study_description', (0x0008, 0x0020): 'study_date', (0x0020, 0x000e): 'series_uid', (0x0008, 0x103e): 'series_description', (0x0008, 0x0060): 'modality', # fixed per series (0x0028, 0x0008): 'number_of_frames', (0x0028, 0x0010): 'rows', (0x0028, 0x0011): 'columns' } # find list of unique and sorted filenames filenames = self._helper_recursive_glob(paths) s = gdcm.Scanner() # add the tags we want to the scanner for tag_tuple in tag_to_symbol: tag = gdcm.Tag(*tag_tuple) s.AddTag(tag) # maps from study_uid to instance of Study study_dict = {} # we're going to break the filenames up into 10 blocks and # scan each block separately in order to be able to give # proper feedback to the user, and also to give the user the # opportunity to interrupt the scan num_files = len(filenames) # no filenames, we return an empty dict. if num_files == 0: return study_dict num_files_per_block = 100 num_blocks = num_files / num_files_per_block if num_blocks == 0: num_blocks = 1 blocklen = num_files / num_blocks blockmod = num_files % num_blocks block_lens = [blocklen] * num_blocks if blockmod > 0: block_lens += [blockmod] file_idx = 0 progress = 0.0 # setup progress dialog dlg = wx.ProgressDialog( "DICOMBrowser", "Scanning DICOM data", maximum=100, parent=self._view_frame, style=wx.PD_CAN_ABORT | wx.PD_APP_MODAL | wx.PD_ELAPSED_TIME | wx.PD_AUTO_HIDE #| wx.PD_ESTIMATED_TIME | wx.PD_REMAINING_TIME) keep_going = True error_occurred = False # and now the processing loop can start for block_len in block_lens: # scan the current block of files try: self._helper_scan_block( s, filenames[file_idx:file_idx + block_len], tag_to_symbol, study_dict) except Exception: # error during scan, we have to kill the dialog and # then re-raise the error dlg.Destroy() raise # update file_idx for the next block file_idx += block_len # update progress counter progress = int(100 * file_idx / float(num_files)) # and tell the progress dialog about our progress # by definition, progress will be 100 at the end: if you # add all blocklens together, you have to get 1 (keep_going, skip) = dlg.Update(progress) if not keep_going: # user has clicked cancel so we zero the dictionary study_dict = {} # and stop the for loop break # dialog needs to be taken care of dlg.Destroy() # return all the scanned data return study_dict