def _setup_graph(pm, graph, index, x_region, y_region, region, params, source, gpu=None): backproject = get_task('lamino-backproject', processing_node=gpu) slicer = get_task('slice', processing_node=gpu) writer = get_writer(params) if not params.dry_run: writer.props.filename = '{}-{:>03}-%04i.tif'.format(params.output, index) # parameters backproject.props.num_projections = params.number backproject.props.overall_angle = np.deg2rad(params.overall_angle) backproject.props.lamino_angle = np.deg2rad(params.lamino_angle) backproject.props.roll_angle = np.deg2rad(params.roll_angle) backproject.props.x_region = x_region backproject.props.y_region = y_region backproject.props.z = params.z backproject.props.addressing_mode = params.lamino_padding_mode if params.z_parameter in ['lamino-angle', 'roll-angle']: region = [np.deg2rad(reg) for reg in region] backproject.props.region = region backproject.props.parameter = params.z_parameter backproject.props.center = params.axis graph.connect_nodes(backproject, slicer) graph.connect_nodes(slicer, writer) if params.only_bp: first = backproject graph.connect_nodes(source, backproject) else: first = create_preprocessing_pipeline(params, graph, source=source, processing_node=gpu) graph.connect_nodes(first, backproject) return first
def create_preprocessing_pipeline(args, graph, source=None, processing_node=None): pm = Ufo.PluginManager() if not (args.width and args.height): width, height = determine_shape(args, args.projections) if not width: raise RuntimeError("Could not determine width from the input") if not args.width: args.width = width if not args.height: args.height = height - args.y LOG.debug('Image width x height: %d x %d', args.width, args.height) if source: current = source elif args.darks and args.flats: current = create_flat_correct_pipeline(args, graph, processing_node=processing_node) else: current = get_task('read') set_node_props(current, args) if not args.projections: raise RuntimeError('--projections not set') setup_read_task(current, args.projections, args) if args.absorptivity: absorptivity = get_task('calculate', processing_node=processing_node) absorptivity.props.expression = '-log(v)' graph.connect_nodes(current, absorptivity) current = absorptivity if args.transpose_input: transpose = get_task('transpose') graph.connect_nodes(current, transpose) current = transpose tmp = args.width args.width = args.height args.height = tmp if args.projection_filter != 'none': pf_first, pf_last = create_projection_filtering_pipeline( args, graph, processing_node=processing_node) graph.connect_nodes(current, pf_first) current = pf_last if args.energy is not None and args.propagation_distance is not None: pr_first, pr_last = create_phase_retrieval_pipeline( args, graph, processing_node=processing_node) graph.connect_nodes(current, pr_first) current = pr_last return current
def create_phase_retrieval_pipeline(args, graph, processing_node=None): LOG.debug('Creating phase retrieval pipeline') pm = Ufo.PluginManager() # Retrieve phase phase_retrieve = get_task('retrieve-phase', processing_node=processing_node) pad_phase_retrieve = get_task('pad', processing_node=processing_node) crop_phase_retrieve = get_task('crop', processing_node=processing_node) fft_phase_retrieve = get_task('fft', processing_node=processing_node) ifft_phase_retrieve = get_task('ifft', processing_node=processing_node) width = args.width height = args.height default_padded_width = next_power_of_two(width) default_padded_height = next_power_of_two(height) if not args.retrieval_padded_width: args.retrieval_padded_width = default_padded_width if not args.retrieval_padded_height: args.retrieval_padded_height = default_padded_height fmt = 'Phase retrieval padding: {}x{} -> {}x{}' LOG.debug( fmt.format(width, height, args.retrieval_padded_width, args.retrieval_padded_height)) x = (args.retrieval_padded_width - width) / 2 y = (args.retrieval_padded_height - height) / 2 pad_phase_retrieve.props.x = x pad_phase_retrieve.props.y = y pad_phase_retrieve.props.width = args.retrieval_padded_width pad_phase_retrieve.props.height = args.retrieval_padded_height pad_phase_retrieve.props.addressing_mode = args.retrieval_padding_mode crop_phase_retrieve.props.x = x crop_phase_retrieve.props.y = y crop_phase_retrieve.props.width = width crop_phase_retrieve.props.height = height phase_retrieve.props.method = args.retrieval_method phase_retrieve.props.energy = args.energy phase_retrieve.props.distance = args.propagation_distance phase_retrieve.props.pixel_size = args.pixel_size phase_retrieve.props.regularization_rate = args.regularization_rate phase_retrieve.props.thresholding_rate = args.thresholding_rate fft_phase_retrieve.props.dimensions = 2 ifft_phase_retrieve.props.dimensions = 2 graph.connect_nodes(pad_phase_retrieve, fft_phase_retrieve) graph.connect_nodes(fft_phase_retrieve, phase_retrieve) graph.connect_nodes(phase_retrieve, ifft_phase_retrieve) graph.connect_nodes(ifft_phase_retrieve, crop_phase_retrieve) return (pad_phase_retrieve, crop_phase_retrieve)
def __init__(self, dark, flat, absorptivity=True, fix_nan_and_inf=True, copy_inputs=False): self.dark = dark self.flat = flat self.ffc = get_task('flat-field-correct') self.ffc.props.fix_nan_and_inf = fix_nan_and_inf self.ffc.props.absorption_correct = absorptivity super().__init__(self.ffc, get_output=True, output_dims=2, copy_inputs=copy_inputs)
def get_dummy_reader(params): if params.width is None and params.height is None: raise RuntimeError( "You have to specify --width and --height when generating data.") width, height = params.width, params.height reader = get_task('dummy-data', width=width, height=height, number=params.number or 1) return reader, width, height
def create_projection_filtering_pipeline(args, graph, processing_node=None): pm = Ufo.PluginManager() pad = get_task('pad', processing_node=processing_node) crop = get_task('crop', processing_node=processing_node) fft = get_task('fft', processing_node=processing_node) ifft = get_task('ifft', processing_node=processing_node) fltr = get_task('filter', processing_node=processing_node) setup_padding(pad, crop, args.width, args.height, args.projection_padding_mode) fft.props.dimensions = 1 ifft.props.dimensions = 1 fltr.props.filter = args.projection_filter fltr.props.scale = args.projection_filter_scale graph.connect_nodes(pad, fft) graph.connect_nodes(fft, fltr) graph.connect_nodes(fltr, ifft) graph.connect_nodes(ifft, crop) return (pad, crop)
def generate_partial(append=False): pm = Ufo.PluginManager() graph = Ufo.TaskGraph() sched = Ufo.Scheduler() writer = get_task('write') writer.props.filename = args.output writer.props.append = append sinos = create_sinogram_pipeline(args, graph) graph.connect_nodes(sinos, writer) sched.run(graph)
def create_sinogram_pipeline(args, graph): """Create sinogram generating pipeline based on arguments from *args*.""" pm = Ufo.PluginManager() sinos = pm.get_task('transpose-projections') if args.number: region = (args.start, args.start + args.number, args.step) num_projections = len(range(*region)) else: num_projections = len(get_filenames(args.projections)) sinos.props.number = num_projections if args.darks and args.flats: start = create_flat_correct_pipeline(args, graph) else: start = get_task('read') start.props.path = args.projections set_node_props(start, args) graph.connect_nodes(start, sinos) return sinos
def create_preprocessing_pipeline(args, graph, source=None, processing_node=None, cone_beam_weight=True, make_reader=True): """If *make_reader* is True, create a read task if *source* is None and no dark and flat fields are given. """ import numpy as np if not (args.width and args.height): width, height = determine_shape(args, args.projections) if not width: raise RuntimeError("Could not determine width from the input") if not args.width: args.width = width if not args.height: args.height = height - args.y LOG.debug('Image width x height: %d x %d', args.width, args.height) current = None if source: current = source elif args.darks and args.flats: current = create_flat_correct_pipeline(args, graph, processing_node=processing_node) else: if make_reader: current = get_task('read') set_node_props(current, args) if not args.projections: raise RuntimeError('--projections not set') setup_read_task(current, args.projections, args) if args.absorptivity: absorptivity = get_task('calculate', processing_node=processing_node) absorptivity.props.expression = 'v <= 0 ? 0.0f : -log(v)' if current: graph.connect_nodes(current, absorptivity) current = absorptivity if args.transpose_input: transpose = get_task('transpose') if current: graph.connect_nodes(current, transpose) current = transpose tmp = args.width args.width = args.height args.height = tmp if cone_beam_weight and not np.all(np.isinf(args.source_position_y)): # Cone beam projection weight LOG.debug('Enabling cone beam weighting') weight = get_task('cone-beam-projection-weight', processing_node=processing_node) weight.props.source_distance = ( -np.array(args.source_position_y)).tolist() weight.props.detector_distance = args.detector_position_y weight.props.center_position_x = args.center_position_x or [ args.width / 2. + (args.width % 2) * 0.5 ] weight.props.center_position_z = args.center_position_z or [ args.height / 2. + (args.height % 2) * 0.5 ] weight.props.axis_angle_x = args.axis_angle_x if current: graph.connect_nodes(current, weight) current = weight if args.energy is not None and args.propagation_distance is not None: pr_first, pr_last = create_phase_retrieval_pipeline( args, graph, processing_node=processing_node) if current: graph.connect_nodes(current, pr_first) current = pr_last if args.projection_filter != 'none': pf_first, pf_last = create_projection_filtering_pipeline( args, graph, processing_node=processing_node) if current: graph.connect_nodes(current, pf_first) current = pf_last return current
def tomo(params): # Create reader and writer if params.projections and params.sinograms: raise RuntimeError( "Cannot specify both --projections and --sinograms.") if params.projections is None and params.sinograms is None: reader, width, height = get_dummy_reader(params) else: if params.projections: reader, width, height = get_projection_reader(params) else: reader, width, height = get_sinogram_reader(params) axis = params.axis or width / 2.0 if params.projections and params.resize: width /= params.resize height /= params.resize axis /= params.resize LOG.debug("Input dimensions: {}x{} pixels".format(width, height)) writer = get_writer(params) # Setup graph depending on the chosen method and input data g = Ufo.TaskGraph() if params.projections is not None: if params.number: count = len( range(params.start, params.start + params.number, params.step)) else: count = len(get_filenames(params.projections)) LOG.debug("Number of projections: {}".format(count)) sino_output = get_task('transpose-projections', number=count) if params.darks and params.flats: g.connect_nodes(create_flat_correct_pipeline(params, g), sino_output) else: g.connect_nodes(reader, sino_output) if height: # Sinogram height is the one needed for further padding height = count else: sino_output = reader if params.method == 'fbp': fft = get_task('fft', dimensions=1) ifft = get_task('ifft', dimensions=1) fltr = get_task('filter', filter=params.projection_filter) bp = get_task('backproject', axis_pos=axis) if params.angle: bp.props.angle_step = params.angle if params.offset: bp.props.angle_offset = params.offset if width and height: # Pad the image with its extent to prevent reconstuction ring pad = get_task('pad') crop = get_task('crop') setup_padding(pad, crop, width, height, params.projection_padding_mode) LOG.debug("Padding input to: {}x{} pixels".format( pad.props.width, pad.props.height)) g.connect_nodes(sino_output, pad) g.connect_nodes(pad, fft) g.connect_nodes(fft, fltr) g.connect_nodes(fltr, ifft) g.connect_nodes(ifft, crop) g.connect_nodes(crop, bp) else: if params.crop_width: ifft.props.crop_width = int(params.crop_width) LOG.debug("Cropping to {} pixels".format( ifft.props.crop_width)) g.connect_nodes(sino_output, fft) g.connect_nodes(fft, fltr) g.connect_nodes(fltr, ifft) g.connect_nodes(ifft, bp) g.connect_nodes(bp, writer) if params.method in ('sart', 'sirt', 'sbtv', 'asdpocs'): projector = pm.get_task_from_package('ir', 'parallel-projector') projector.set_properties(model='joseph', is_forward=False) projector.set_properties(axis_position=axis) projector.set_properties(step=params.angle if params.angle else np.pi / 180.0) method = pm.get_task_from_package('ir', params.method) method.set_properties(projector=projector, num_iterations=params.num_iterations) if params.method in ('sart', 'sirt'): method.set_properties(relaxation_factor=params.relaxation_factor) if params.method == 'asdpocs': minimizer = pm.get_task_from_package('ir', 'sirt') method.set_properties(df_minimizer=minimizer) if params.method == 'sbtv': # FIXME: the lambda keyword is preventing from the following # assignment ... # method.props.lambda = params.lambda method.set_properties(mu=params.mu) g.connect_nodes(sino_output, method) g.connect_nodes(method, writer) if params.method == 'dfi': oversampling = params.oversampling or 1 pad = get_task('zeropad', center_of_rotation=axis, oversampling=oversampling) fft = get_task('fft', dimensions=1, auto_zeropadding=0) dfi = get_task('dfi-sinc') ifft = get_task('ifft', dimensions=2) swap_forward = get_task('swap-quadrants') swap_backward = get_task('swap-quadrants') if params.angle: dfi.props.angle_step = params.angle g.connect_nodes(sino_output, pad) g.connect_nodes(pad, fft) g.connect_nodes(fft, dfi) g.connect_nodes(dfi, swap_forward) g.connect_nodes(swap_forward, ifft) g.connect_nodes(ifft, swap_backward) if width: crop = get_task('crop') crop.set_properties(from_center=True, width=width, height=width) g.connect_nodes(swap_backward, crop) g.connect_nodes(crop, writer) else: g.connect_nodes(swap_backward, writer) scheduler = Ufo.Scheduler() if hasattr(scheduler.props, 'enable_tracing'): LOG.debug("Use tracing: {}".format(params.enable_tracing)) scheduler.props.enable_tracing = params.enable_tracing scheduler.run(g) duration = scheduler.props.time LOG.info("Execution time: {} s".format(duration)) return duration
def create_preprocessing_pipeline(args, graph, source=None, processing_node=None, cone_beam_weight=True, make_reader=True): """If *make_reader* is True, create a read task if *source* is None and no dark and flat fields are given. """ import numpy as np if not (args.width and args.height): width, height = determine_shape(args, args.projections) if not width: raise RuntimeError("Could not determine width from the input") if not args.width: args.width = width if not args.height: args.height = height - args.y LOG.debug('Image width x height: %d x %d', args.width, args.height) current = None if source: current = source elif args.darks and args.flats: current = create_flat_correct_pipeline(args, graph, processing_node=processing_node) else: if make_reader: current = get_task('read') set_node_props(current, args) if not args.projections: raise RuntimeError('--projections not set') setup_read_task(current, args.projections, args) if args.absorptivity: absorptivity = get_task('calculate', processing_node=processing_node) absorptivity.props.expression = 'v <= 0 ? 0.0f : -log(v)' if current: graph.connect_nodes(current, absorptivity) current = absorptivity if args.transpose_input: transpose = get_task('transpose') if current: graph.connect_nodes(current, transpose) current = transpose tmp = args.width args.width = args.height args.height = tmp if cone_beam_weight and not np.all(np.isinf(args.source_position_y)): # Cone beam projection weight LOG.debug('Enabling cone beam weighting') weight = get_task('cone-beam-projection-weight', processing_node=processing_node) weight.props.source_distance = (-np.array(args.source_position_y)).tolist() weight.props.detector_distance = args.detector_position_y weight.props.center_position_x = args.center_position_x or [args.width / 2. + (args.width % 2) * 0.5] weight.props.center_position_z = args.center_position_z or [args.height / 2. + (args.height % 2) * 0.5] weight.props.axis_angle_x = args.axis_angle_x if current: graph.connect_nodes(current, weight) current = weight if args.energy is not None and args.propagation_distance is not None: pr_first, pr_last = create_phase_retrieval_pipeline(args, graph, processing_node=processing_node) if current: graph.connect_nodes(current, pr_first) current = pr_last if args.projection_filter != 'none': pf_first, pf_last = create_projection_filtering_pipeline(args, graph, processing_node=processing_node) if current: graph.connect_nodes(current, pf_first) current = pf_last return current
def create_flat_correct_pipeline(args, graph, processing_node=None): """ Create flat field correction pipeline. All the settings are provided in *args*. *graph* is used for making the connections. Returns the flat field correction task which can be used for further pipelining. """ pm = Ufo.PluginManager() if args.projections is None or args.flats is None or args.darks is None: raise RuntimeError("You must specify --projections, --flats and --darks.") reader = get_task('read') dark_reader = get_task('read') flat_before_reader = get_task('read') ffc = get_task('flat-field-correct', processing_node=processing_node, dark_scale=args.dark_scale, absorption_correct=args.absorptivity, fix_nan_and_inf=args.fix_nan_and_inf) mode = args.reduction_mode.lower() roi_args = make_subargs(args, ['y', 'height', 'y_step']) set_node_props(reader, args) set_node_props(dark_reader, roi_args) set_node_props(flat_before_reader, roi_args) for r, path in ((reader, args.projections), (dark_reader, args.darks), (flat_before_reader, args.flats)): setup_read_task(r, path, args) LOG.debug("Doing flat field correction using reduction mode `{}'".format(mode)) if args.flats2: flat_after_reader = get_task('read') setup_read_task(flat_after_reader, args.flats2, args) set_node_props(flat_after_reader, roi_args) num_files = len(get_filenames(args.projections)) can_read = len(range(args.start, num_files, args.step)) number = args.number if args.number else num_files num_read = min(can_read, number) flat_interpolate = get_task('interpolate', processing_node=processing_node, number=num_read) if args.resize: LOG.debug("Resize input data by factor of {}".format(args.resize)) proj_bin = get_task('bin', processing_node=processing_node, size=args.resize) dark_bin = get_task('bin', processing_node=processing_node, size=args.resize) flat_bin = get_task('bin', processing_node=processing_node, size=args.resize) graph.connect_nodes(reader, proj_bin) graph.connect_nodes(dark_reader, dark_bin) graph.connect_nodes(flat_before_reader, flat_bin) reader, dark_reader, flat_before_reader = proj_bin, dark_bin, flat_bin if args.flats2: flat_bin = get_task('bin', processing_node=processing_node, size=args.resize) graph.connect_nodes(flat_after_reader, flat_bin) flat_after_reader = flat_bin if mode == 'median': dark_stack = get_task('stack', processing_node=processing_node, number=len(get_filenames(args.darks))) dark_reduced = get_task('flatten', processing_node=processing_node, mode='median') flat_before_stack = get_task('stack', processing_node=processing_node, number=len(get_filenames(args.flats))) flat_before_reduced = get_task('flatten', processing_node=processing_node, mode='median') graph.connect_nodes(dark_reader, dark_stack) graph.connect_nodes(dark_stack, dark_reduced) graph.connect_nodes(flat_before_reader, flat_before_stack) graph.connect_nodes(flat_before_stack, flat_before_reduced) if args.flats2: flat_after_stack = get_task('stack', processing_node=processing_node, number=len(get_filenames(args.flats2))) flat_after_reduced = get_task('flatten', processing_node=processing_node, mode='median') graph.connect_nodes(flat_after_reader, flat_after_stack) graph.connect_nodes(flat_after_stack, flat_after_reduced) elif mode == 'average': dark_reduced = get_task('average', processing_node=processing_node) flat_before_reduced = get_task('average', processing_node=processing_node) graph.connect_nodes(dark_reader, dark_reduced) graph.connect_nodes(flat_before_reader, flat_before_reduced) if args.flats2: flat_after_reduced = get_task('average', processing_node=processing_node) graph.connect_nodes(flat_after_reader, flat_after_reduced) else: raise ValueError('Invalid reduction mode') graph.connect_nodes_full(reader, ffc, 0) graph.connect_nodes_full(dark_reduced, ffc, 1) if args.flats2: graph.connect_nodes_full(flat_before_reduced, flat_interpolate, 0) graph.connect_nodes_full(flat_after_reduced, flat_interpolate, 1) graph.connect_nodes_full(flat_interpolate, ffc, 2) else: graph.connect_nodes_full(flat_before_reduced, ffc, 2) return ffc
def create_phase_retrieval_pipeline(args, graph, processing_node=None): LOG.debug('Creating phase retrieval pipeline') pm = Ufo.PluginManager() # Retrieve phase phase_retrieve = get_task('retrieve-phase', processing_node=processing_node) pad_phase_retrieve = get_task('pad', processing_node=processing_node) crop_phase_retrieve = get_task('crop', processing_node=processing_node) fft_phase_retrieve = get_task('fft', processing_node=processing_node) ifft_phase_retrieve = get_task('ifft', processing_node=processing_node) last = crop_phase_retrieve width = args.width height = args.height default_padded_width = next_power_of_two(width) default_padded_height = next_power_of_two(height) if not args.retrieval_padded_width: args.retrieval_padded_width = default_padded_width if not args.retrieval_padded_height: args.retrieval_padded_height = default_padded_height fmt = 'Phase retrieval padding: {}x{} -> {}x{}' LOG.debug(fmt.format(width, height, args.retrieval_padded_width, args.retrieval_padded_height)) x = (args.retrieval_padded_width - width) / 2 y = (args.retrieval_padded_height - height) / 2 pad_phase_retrieve.props.x = x pad_phase_retrieve.props.y = y pad_phase_retrieve.props.width = args.retrieval_padded_width pad_phase_retrieve.props.height = args.retrieval_padded_height pad_phase_retrieve.props.addressing_mode = args.retrieval_padding_mode crop_phase_retrieve.props.x = x crop_phase_retrieve.props.y = y crop_phase_retrieve.props.width = width crop_phase_retrieve.props.height = height phase_retrieve.props.method = args.retrieval_method phase_retrieve.props.energy = args.energy phase_retrieve.props.distance = args.propagation_distance phase_retrieve.props.pixel_size = args.pixel_size phase_retrieve.props.regularization_rate = args.regularization_rate phase_retrieve.props.thresholding_rate = args.thresholding_rate phase_retrieve.props.frequency_cutoff = args.frequency_cutoff fft_phase_retrieve.props.dimensions = 2 ifft_phase_retrieve.props.dimensions = 2 graph.connect_nodes(pad_phase_retrieve, fft_phase_retrieve) graph.connect_nodes(fft_phase_retrieve, phase_retrieve) graph.connect_nodes(phase_retrieve, ifft_phase_retrieve) graph.connect_nodes(ifft_phase_retrieve, crop_phase_retrieve) calculate = get_task('calculate', processing_node=processing_node) if args.retrieval_method == 'tie': expression = '(isinf (v) || isnan (v) || (v <= 0)) ? 0.0f :' if args.delta is not None: import numpy as np lam = 6.62606896e-34 * 299792458 / (args.energy * 1.60217733e-16) # Compute mju from the fact that beta = 10^-regularization_rate * delta # and mju = 4 * Pi * beta / lambda mju = 4 * np.pi * 10 ** -args.regularization_rate * args.delta / lam # Take the logarithm to obtain the projected thickness expression += '-log ({} * v) * {}'.format(2 / 10 ** args.regularization_rate, 1 / mju) else: expression += '-log (v)' else: expression = '(isinf (v) || isnan (v)) ? 0.0f : -v' calculate.props.expression = expression graph.connect_nodes(crop_phase_retrieve, calculate) last = calculate return (pad_phase_retrieve, last)
def __init__(self, args, resources=None, gpu_index=0, do_normalization=False, region=None, copy_inputs=False): if args.width is None or args.height is None: raise GeneralBackprojectError('width and height must be set in GeneralBackprojectArgs') scheduler = Ufo.FixedScheduler() if resources: scheduler.set_resources(resources) gpu = scheduler.get_resources().get_gpu_nodes()[gpu_index] self.args = copy.deepcopy(args) x_region, y_region, z_region = get_reconstruction_regions(self.args, store=True, dtype=float) set_projection_filter_scale(self.args) if region is not None: self.args.region = region LOG.debug('Creating reconstructor for gpu %d, region: %s', gpu_index, self.args.region) geometry = CTGeometry(self.args) if not self.args.disable_projection_crop: geometry.optimize_args() self.args = geometry.args regions = make_runs([gpu], [gpu_index], x_region, y_region, self.args.region, DTYPE_CL_SIZE[self.args.store_type], slices_per_device=self.args.slices_per_device, slice_memory_coeff=self.args.slice_memory_coeff, data_splitting_policy=self.args.data_splitting_policy) if len(regions) > 1: raise GeneralBackprojectError('Region does not fit to the GPU memory') graph = Ufo.TaskGraph() # Normalization self.ffc = None self.do_normalization = do_normalization if do_normalization: self.ffc = get_task('flat-field-correct', processing_node=gpu) self.ffc.props.fix_nan_and_inf = self.args.fix_nan_and_inf self.ffc.props.absorption_correct = self.args.absorptivity self._darks_averaged = False self._flats_averaged = False self.dark_avg = get_task('average', processing_node=gpu) self.flat_avg = get_task('average', processing_node=gpu) graph.connect_nodes_full(self.dark_avg, self.ffc, 1) graph.connect_nodes_full(self.flat_avg, self.ffc, 2) (first, last) = setup_graph(self.args, graph, x_region, y_region, self.args.region, source=self.ffc, gpu=gpu, index=gpu_index, do_output=False, make_reader=False) output_dims = 2 if args.slice_metric: output_dims = 1 metric = self.args.slice_metric if args.slice_metric == 'sag': metric = 'sum' gradient_task = get_task('gradient', processing_node=gpu, direction='both_abs') graph.connect_nodes(last, gradient_task) last = gradient_task measure_task = get_task('measure', processing_node=gpu, axis=-1, metric=metric) graph.connect_nodes(last, measure_task) elif first == last: # There are no other processing steps other than back projection LOG.debug('Only back projection, no other processing') graph = first super().__init__(graph, get_output=True, output_dims=output_dims, scheduler=scheduler, copy_inputs=copy_inputs) if self.do_normalization: # Setup input tasks for normalization images averaging. Our parent picks up only the two # averagers and not the ffc's zero port for projections. self.input_tasks[self.ffc] = [Ufo.InputTask()] self.ufo_buffers[self.ffc] = [None] self.graph.connect_nodes_full(self.input_tasks[self.ffc][0], self.ffc, 0)
def create_flat_correct_pipeline(args, graph, processing_node=None): """ Create flat field correction pipeline. All the settings are provided in *args*. *graph* is used for making the connections. Returns the flat field correction task which can be used for further pipelining. """ pm = Ufo.PluginManager() if args.projections is None or args.flats is None or args.darks is None: raise RuntimeError( "You must specify --projections, --flats and --darks.") reader = get_task('read') dark_reader = get_task('read') flat_before_reader = get_task('read') ffc = get_task('flat-field-correct', processing_node=processing_node, dark_scale=args.dark_scale, absorption_correct=args.absorptivity, fix_nan_and_inf=args.fix_nan_and_inf) mode = args.reduction_mode.lower() roi_args = make_subargs(args, ['y', 'height', 'y_step']) set_node_props(reader, args) set_node_props(dark_reader, roi_args) set_node_props(flat_before_reader, roi_args) for r, path in ((reader, args.projections), (dark_reader, args.darks), (flat_before_reader, args.flats)): setup_read_task(r, path, args) LOG.debug( "Doing flat field correction using reduction mode `{}'".format(mode)) if args.flats2: flat_after_reader = get_task('read') setup_read_task(flat_after_reader, args.flats2, args) set_node_props(flat_after_reader, roi_args) num_files = len(get_filenames(args.projections)) can_read = len(range(args.start, num_files, args.step)) number = args.number if args.number else num_files num_read = min(can_read, number) flat_interpolate = get_task('interpolate', processing_node=processing_node, number=num_read) if args.resize: LOG.debug("Resize input data by factor of {}".format(args.resize)) proj_bin = get_task('bin', processing_node=processing_node, size=args.resize) dark_bin = get_task('bin', processing_node=processing_node, size=args.resize) flat_bin = get_task('bin', processing_node=processing_node, size=args.resize) graph.connect_nodes(reader, proj_bin) graph.connect_nodes(dark_reader, dark_bin) graph.connect_nodes(flat_before_reader, flat_bin) reader, dark_reader, flat_before_reader = proj_bin, dark_bin, flat_bin if args.flats2: flat_bin = get_task('bin', processing_node=processing_node, size=args.resize) graph.connect_nodes(flat_after_reader, flat_bin) flat_after_reader = flat_bin if mode == 'median': dark_stack = get_task('stack', processing_node=processing_node, number=len(get_filenames(args.darks))) dark_reduced = get_task('flatten', processing_node=processing_node, mode='median') flat_before_stack = get_task('stack', processing_node=processing_node, number=len(get_filenames(args.flats))) flat_before_reduced = get_task('flatten', processing_node=processing_node, mode='median') graph.connect_nodes(dark_reader, dark_stack) graph.connect_nodes(dark_stack, dark_reduced) graph.connect_nodes(flat_before_reader, flat_before_stack) graph.connect_nodes(flat_before_stack, flat_before_reduced) if args.flats2: flat_after_stack = get_task('stack', processing_node=processing_node, number=len(get_filenames(args.flats2))) flat_after_reduced = get_task('flatten', processing_node=processing_node, mode='median') graph.connect_nodes(flat_after_reader, flat_after_stack) graph.connect_nodes(flat_after_stack, flat_after_reduced) elif mode == 'average': dark_reduced = get_task('average', processing_node=processing_node) flat_before_reduced = get_task('average', processing_node=processing_node) graph.connect_nodes(dark_reader, dark_reduced) graph.connect_nodes(flat_before_reader, flat_before_reduced) if args.flats2: flat_after_reduced = get_task('average', processing_node=processing_node) graph.connect_nodes(flat_after_reader, flat_after_reduced) else: raise ValueError('Invalid reduction mode') graph.connect_nodes_full(reader, ffc, 0) graph.connect_nodes_full(dark_reduced, ffc, 1) if args.flats2: graph.connect_nodes_full(flat_before_reduced, flat_interpolate, 0) graph.connect_nodes_full(flat_after_reduced, flat_interpolate, 1) graph.connect_nodes_full(flat_interpolate, ffc, 2) else: graph.connect_nodes_full(flat_before_reduced, ffc, 2) return ffc
def create_phase_retrieval_pipeline(args, graph, processing_node=None): LOG.debug('Creating phase retrieval pipeline') pm = Ufo.PluginManager() # Retrieve phase phase_retrieve = get_task('retrieve-phase', processing_node=processing_node) pad_phase_retrieve = get_task('pad', processing_node=processing_node) crop_phase_retrieve = get_task('crop', processing_node=processing_node) fft_phase_retrieve = get_task('fft', processing_node=processing_node) ifft_phase_retrieve = get_task('ifft', processing_node=processing_node) last = crop_phase_retrieve width = args.width height = args.height default_padded_width = next_power_of_two(width) default_padded_height = next_power_of_two(height) if not args.retrieval_padded_width: args.retrieval_padded_width = default_padded_width if not args.retrieval_padded_height: args.retrieval_padded_height = default_padded_height fmt = 'Phase retrieval padding: {}x{} -> {}x{}' LOG.debug( fmt.format(width, height, args.retrieval_padded_width, args.retrieval_padded_height)) x = (args.retrieval_padded_width - width) / 2 y = (args.retrieval_padded_height - height) / 2 pad_phase_retrieve.props.x = x pad_phase_retrieve.props.y = y pad_phase_retrieve.props.width = args.retrieval_padded_width pad_phase_retrieve.props.height = args.retrieval_padded_height pad_phase_retrieve.props.addressing_mode = args.retrieval_padding_mode crop_phase_retrieve.props.x = x crop_phase_retrieve.props.y = y crop_phase_retrieve.props.width = width crop_phase_retrieve.props.height = height phase_retrieve.props.method = args.retrieval_method phase_retrieve.props.energy = args.energy phase_retrieve.props.distance = args.propagation_distance phase_retrieve.props.pixel_size = args.pixel_size phase_retrieve.props.regularization_rate = args.regularization_rate phase_retrieve.props.thresholding_rate = args.thresholding_rate phase_retrieve.props.frequency_cutoff = args.frequency_cutoff fft_phase_retrieve.props.dimensions = 2 ifft_phase_retrieve.props.dimensions = 2 graph.connect_nodes(pad_phase_retrieve, fft_phase_retrieve) graph.connect_nodes(fft_phase_retrieve, phase_retrieve) graph.connect_nodes(phase_retrieve, ifft_phase_retrieve) graph.connect_nodes(ifft_phase_retrieve, crop_phase_retrieve) calculate = get_task('calculate', processing_node=processing_node) if args.retrieval_method == 'tie': expression = '(isinf (v) || isnan (v) || (v <= 0)) ? 0.0f :' if args.delta is not None: import numpy as np lam = 6.62606896e-34 * 299792458 / (args.energy * 1.60217733e-16) # Compute mju from the fact that beta = 10^-regularization_rate * delta # and mju = 4 * Pi * beta / lambda mju = 4 * np.pi * 10**-args.regularization_rate * args.delta / lam # Take the logarithm to obtain the projected thickness expression += '-log ({} * v) * {}'.format( 2 / 10**args.regularization_rate, 1 / mju) else: expression += '-log (v)' else: expression = '(isinf (v) || isnan (v)) ? 0.0f : -v' calculate.props.expression = expression graph.connect_nodes(crop_phase_retrieve, calculate) last = calculate return (pad_phase_retrieve, last)