def initialize(self, data_collection): super(SkullStrip_Model, self).initialize(data_collection) for label, data_group in data_collection.data_groups.iteritems(): reference_filename = data_group.data[data_collection.current_case][ self.reference_channel[0]] self.mask_filename = self.generate_output_filename( reference_filename, self.mask_string) input_data = np.take(data_group.preprocessed_case, self.reference_channel, axis=-1)[np.newaxis, ...] # Also Hacky self.model.outputs[-1].model = self.model self.model.outputs[-1].input_patch_shape = self.model.outputs[ -1].model.model.layers[0].input_shape self.model.outputs[-1].process_case([input_data]) self.model.outputs[-1].postprocess() save_numpy_2_nifti( np.squeeze(self.model.outputs[-1].return_objects[-1]), data_group.preprocessed_affine, self.mask_filename) # Hacky self.mask_numpy = read_image_files(self.mask_filename, return_affine=False)
def execute(self, data_collection): if self.mask_numpy is None: for label, data_group in list(data_collection.data_groups.items()): input_data = np.take(data_group.preprocessed_case, self.reference_channel, axis=-1)[np.newaxis, ...] # Hacky -- TODO: Revise. self.model.outputs[-1].model = self.model self.model.outputs[-1].input_patch_shape = self.model.outputs[ -1].model.model.layers[0].input_shape self.model.outputs[-1].process_case(input_data) self.model.outputs[-1].postprocess(input_data) reference_filename = data_group.data[ data_collection.current_case][self.reference_channel[0]] self.mask_filename = self.generate_output_filename( reference_filename, self.mask_string) save_numpy_2_nifti( np.squeeze(self.model.outputs[-1].return_objects[-1]), self.mask_filename, data_group.preprocessed_affine) # Hacky self.mask_numpy = read_image_files(self.mask_filename, return_affine=False) super(SkullStrip_Model, self).execute(data_collection)
def save_to_file(self, data_group): """ No idea how this will work if the amount of output files is changed in a preprocessing step Also missing affines is a problem. """ if type(self.output_data) is not list: for file_idx, output_filename in enumerate(self.output_filenames): if self.overwrite or not os.path.exists(output_filename): save_numpy_2_nifti(np.squeeze(self.output_data[..., file_idx]), data_group.preprocessed_affine, output_filename) return
def initialize(self, data_collection): super(SkullStrip, self).initialize(data_collection) for label, data_group in data_collection.data_groups.iteritems(): reference_filename = data_group.data[data_collection.current_case][ self.reference_channel] self.mask_filename = self.generate_output_filename( reference_filename, self.mask_string) if type(data_group.preprocessed_case) is list: input_file = data_group.preprocessed_case[ self.reference_channel] else: # What to do about affines here... Also, reroute this file to a temporary directory. input_file = save_numpy_2_nifti( data_group.preprocessed_case[..., self.reference_channel], data_group.preprocessed_affine, self.generate_output_filename(reference_filename)) specific_command = self.command + [ quotes(input_file), quotes(self.mask_filename), '-f', str(self.bet2_f), '-g', str(self.bet2_g), '-m' ] subprocess.call(' '.join(specific_command), shell=True) os.rename(self.mask_filename + '_mask.nii.gz', self.mask_filename) self.mask_numpy = read_image_files(self.mask_filename, return_affine=False)
def save_output(self, postprocessor=None): # Currently assumes Nifti output. TODO: Make automatically detect output or determine with a class variable. # Ideally, split also this out into a saving.py function in utils. # Case naming is a little wild here, TODO: make more simple. for input_data in self.return_objects: casename = self.data_collection.data_groups[self.inputs[0]].base_casename input_affine = self.data_collection.data_groups[self.inputs[0]].base_affine augmentation_string = self.data_collection.data_groups[self.inputs[0]].augmentation_strings[-1] if self.output_directory is None: output_directory = casename else: output_directory = self.output_directory if postprocessor is None: output_filepath = os.path.join(output_directory, replace_suffix(self.output_filename, '', augmentation_string + self.postprocessor_string)) else: output_filepath = os.path.join(output_directory, replace_suffix(self.output_filename, '', augmentation_string + postprocessor.postprocessor_string)) # If prediction already exists, skip it. Useful if process is interrupted. if os.path.exists(output_filepath) and not self.replace_existing: return # Squeezing is a little cagey. Maybe explicitly remove batch dimension instead. output_shape = input_data.shape input_data = np.squeeze(input_data) return_filenames = [] # If there is only one channel, only save one file. if output_shape[-1] == 1 or self.stack_outputs: self.return_filenames += [save_numpy_2_nifti(input_data, input_affine, output_filepath=output_filepath)] else: for channel in xrange(output_shape[-1]): return_filenames += [save_numpy_2_nifti(input_data[..., channel], input_affine, output_filepath=replace_suffix(output_filepath, input_suffix='', output_suffix='_channel_' + str(channel)))] self.return_filenames += [return_filenames] return
def initialize(self, data_collection): super(SkullStrip, self).initialize(data_collection) for label, data_group in data_collection.data_groups.iteritems(): if type(data_group.preprocessed_case) is list: input_file = data_group.preprocessed_case[ self.reference_channel] else: # What to do about affines here... input_file = save_numpy_2_nifti( data_group.preprocessed_case[..., self.reference_channel], data_group.preprocessed_affine, 'DEEPNEURO_TEMP_FILE.nii.gz') base_filename = data_group.data[data_collection.current_case][ self.reference_channel] self.mask_filename = self.generate_output_filename( base_filename, self.mask_string) specific_command = self.command + [ input_file, self.mask_filename, '-f', str(self.bet2_f), '-g', str(self.bet2_g), '-m' ] subprocess.call(' '.join(specific_command), shell=True, stdout=FNULL, stderr=subprocess.STDOUT) os.rename(self.mask_filename + '_mask.nii.gz', self.mask_filename) self.mask_numpy = read_image_files(self.mask_filename, return_affine=False)