예제 #1
0
 def load_darkfield(self):
     """
     Reads darkfield file and save the frame as class member.
     Parameters
     ----------
     none
     Returns
     -------
     nothing
     """
     try:
         self.darkfield = ut.read_tif(self.darkfield_filename)
     except:
         print("Darkfield filename not set for TIM1, will not correct")
예제 #2
0
 def load_whitefield(self):
     """
     Reads whitefield file and save the frame as class member.
     Parameters
     ----------
     none
     Returns
     -------
     nothing
     """
     try:
         self.whitefield = ut.read_tif(self.whitefield_filename)
         self.whitefield = np.where(self.whitefield < 100, 1e20,
                                    self.whitefield)  #Some large value
     except:
         print("Whitefield filename not set for TIM2")
         raise
 def load_darkfield(self):
     """
     Reads darkfield file and save the frame as class member.
     Parameters
     ----------
     none
     Returns
     -------
     nothing
     """
     try:
         self.darkfield = ut.read_tif(self.darkfield_filename)
     except:
         print("Darkfield filename not set for TIM2")
         raise
     if type(self.whitefield) == np.ndarray:
         self.whitefield = np.where(self.darkfield > 1, 0, self.whitefield) #kill known bad pixel
    def load_whitefield(self):
        """
        Reads whitefield file and save the frame as class member.
        Parameters
        ----------
        none
        Returns
        -------
        nothing
        """
        try:
            self.whitefield = ut.read_tif(self.whitefield_filename)
        except:
            print("Whitefield filename not set for TIM2")
            raise
        try:
#            self.whitefield = np.where(self.whitefield < 100, 1e20, self.whitefield) #Some large value
            self.whitefield[255:257,0:255] = 0   #wierd pixels on edge of seam (TL/TR). Kill in WF kills in returned frame as well.
            self.wfavg=np.average(self.whitefield)
            self.wfstd=np.std(self.whitefield)
            self.whitefield = np.where( self.whitefield < self.wfavg-3*self.wfstd, 0, self.whitefield)
        except:
            print("Corrections to the TIM2 whitefield image failed in detector module.")
예제 #5
0
def manage_reconstruction(proc, experiment_dir, rec_id=None):
    """
    This function starts the interruption discovery process and continues the recontruction processing.
    
    It reads configuration file defined as <experiment_dir>/conf/config_rec.
    If multiple generations are configured, or separate scans are discovered, it will start concurrent reconstructions.
    It creates image.npy file for each successful reconstruction.

    Parameters
    ----------
    proc : str
        processing library, choices are: cpu, cuda, opencl
    experiment_dir : str
        directory where the experiment files are loacted
    rec_id : str
        optional, if given, alternate configuration file will be used for reconstruction, (i.e. <rec_id>_config_rec)

    Returns
    -------
    nothing
    """
    if os.path.exists('stopfile'):
        os.remove('stopfile')
    print('starting reconstruction')

    # the rec_id is a postfix added to config_rec configuration file. If defined, use this configuration.
    conf_dir = os.path.join(experiment_dir, 'conf')
    if rec_id is None:
        conf_file = os.path.join(conf_dir, 'config_rec')
    else:
        conf_file = os.path.join(conf_dir, rec_id + '_config_rec')

    # check if file exists
    if not os.path.isfile(conf_file):
        print('no configuration file ' + conf_file + ' found')
        return

    # verify the configuration file
    if not ver.ver_config_rec(conf_file):
        # if not verified, the ver will print message
        return

    try:
        config_map = ut.read_config(conf_file)
        if config_map is None:
            print("can't read configuration file " + conf_file)
            return
    except Exception as e:
        print('Cannot parse configuration file ' + conf_file +
              ' , check for matching parenthesis and quotations')
        print(str(e))
        return

    # exp_dirs_data list hold pairs of data and directory, where the directory is the root of data/data.tif file, and
    # data is the data.tif file in this directory.
    exp_dirs_data = []
    # experiment may be multi-scan in which case reconstruction will run for each scan
    for dir in os.listdir(experiment_dir):
        if dir.startswith('scan'):
            datafile = os.path.join(experiment_dir, dir, 'data', 'data.tif')
            if os.path.isfile(datafile):
                exp_dirs_data.append(
                    (datafile, os.path.join(experiment_dir, dir)))
    # if there are no scan directories, assume it is combined scans experiment
    if len(exp_dirs_data) == 0:
        # in typical scenario data_dir is not configured, and it is defaulted to <experiment_dir>/data
        # the data_dir is ignored in multi-scan scenario
        try:
            data_dir = config_map.data_dir
        except AttributeError:
            data_dir = os.path.join(experiment_dir, 'data')
        datafile = os.path.join(data_dir, 'data.tif')
        if os.path.isfile(datafile):
            exp_dirs_data.append((datafile, experiment_dir))
    no_runs = len(exp_dirs_data)
    if no_runs == 0:
        print('did not find data.tif file(s). ')
        return
    try:
        generations = config_map.generations
    except:
        generations = 0
    try:
        reconstructions = config_map.reconstructions
    except:
        reconstructions = 1
    device_use = []
    if proc == 'cpu':
        cpu_use = [-1] * reconstructions
        if no_runs > 1:
            for _ in range(no_runs):
                device_use.append(cpu_use)
        else:
            device_use = cpu_use
    else:
        try:
            devices = config_map.device
        except:
            devices = [-1]

        if no_runs * reconstructions > 1:
            data_shape = ut.read_tif(exp_dirs_data[0][0]).shape
            device_use = get_gpu_use(devices, no_runs, reconstructions,
                                     data_shape)
        else:
            device_use = devices

    # start the interrupt process
    interrupt_process = Process(target=interrupt_thread, args=())
    interrupt_process.start()

    if no_runs == 1:
        if len(device_use) == 0:
            device_use = [-1]
        dir_data = exp_dirs_data[0]
        datafile = dir_data[0]
        dir = dir_data[1]
        if generations > 1:
            gen_rec.reconstruction(proc, conf_file, datafile, dir, device_use)
        elif reconstructions > 1:
            mult_rec.reconstruction(proc, conf_file, datafile, dir, device_use)
        else:
            rec.reconstruction(proc, conf_file, datafile, dir, device_use)
    else:
        if len(device_use) == 0:
            device_use = [[-1]]
        else:
            # check if is it worth to use last chunk
            if proc != 'cpu' and len(device_use[0]) > len(device_use[-1]) * 2:
                device_use = device_use[0:-1]
        if generations > 1:
            r = 'g'
        elif reconstructions > 1:
            r = 'm'
        else:
            r = 's'
        q = Queue()
        for gpus in device_use:
            q.put((None, gpus))
        # index keeps track of the multiple directories
        index = 0
        processes = {}
        while index < no_runs:
            pid, gpus = q.get()
            if pid is not None:
                os.kill(pid, signal.SIGKILL)
                del processes[pid]
            datafile = exp_dirs_data[index][0]
            dir = exp_dirs_data[index][1]
            p = Process(target=rec_process,
                        args=(proc, conf_file, datafile, dir, gpus, r, q))
            p.start()
            processes[p.pid] = index
            index += 1

        # close the queue
        while len(processes.items()) > 0:
            pid, gpus = q.get()
            os.kill(pid, signal.SIGKILL)
            del processes[pid]
        q.close()

    interrupt_process.terminate()
    print('finished reconstruction')
예제 #6
0
 def get_raw_frame(self, filename):
     try:
         self.raw_frame = ut.read_tif(filename)
     except:
         print("problem reading raw file ", filename)
         raise
예제 #7
0
def prep(fname, conf_info):
    """
    This function formats data for reconstruction. It uses configured parameters. The preparation consists of the following steps:
    1. removing the "aliens" - aliens are areas that are effect of interference. The area is manually set in a configuration file after inspecting the data. It could be also a mask file of the same dimensions that data.
    2. clearing the noise - the values below an amplitude threshold are set to zero
    3. amplitudes are set to sqrt
    4. cropping and padding. If the adjust_dimention is negative in any dimension, the array is cropped in this dimension.
    The cropping is followed by padding in the dimensions that have positive adjust dimension. After adjusting, the dimensions
    are adjusted further to find the smallest dimension that is supported by opencl library (multiplier of 2, 3, and 5).
    5. centering - finding the greatest amplitude and locating it at a center of new array. If shift center is defined, the center will be shifted accordingly.
    6. binning - adding amplitudes of several consecutive points. Binning can be done in any dimension.
    The modified data is then saved in data directory as data.tif.
    Parameters
    ----------
    fname : str
        tif file containing raw data
    conf_info : str
        experiment directory or configuration file. If it is directory, the "conf/config_data" will be
        appended to determine configuration file
    Returns
    -------
    nothing
    """
    
    # The data has been transposed when saved in tif format for the ImageJ to show the right orientation
    data = ut.read_tif(fname)

    if os.path.isdir(conf_info):
        experiment_dir = conf_info
        conf = os.path.join(experiment_dir, 'conf', 'config_data')
        # if the experiment contains separate scan directories
        if not os.path.isfile(conf):
            base_dir = os.path.abspath(os.path.join(experiment_dir, os.pardir))
            conf = os.path.join(base_dir, 'conf', 'config_data')
    else:
        #assuming it's a file
        conf = conf_info
        experiment_dir = None

    # verify the configuration file
    if not ver.ver_config_data(conf):
        return

    try:
        config_map = ut.read_config(conf)
        if config_map is None:
            print ("can't read configuration file")
            return
    except:
        print ('Please check the configuration file ' + conf + '. Cannot parse')
        return

    try:
        data_dir = config_map.data_dir
    except AttributeError:
        data_dir = 'data'
        if experiment_dir is not None:
            data_dir = os.path.join(experiment_dir, data_dir)
    if not os.path.exists(data_dir):
        os.makedirs(data_dir)

    try:
        data = at.remove_aliens(data, config_map, data_dir)
    except AttributeError:
        pass
    except Exception as e:
        print ('exiting, error in aliens removal ', str(e))
        return

    try:
        amp_threshold = config_map.amp_threshold
    except AttributeError:
        print ('define amplitude threshold. Exiting')
        return

    # zero out the noise
    prep_data = np.where(data <= amp_threshold, 0, data)

    # square root data
    prep_data = np.sqrt(prep_data)

    try:
        crops_pads = config_map.adjust_dimensions
        # the adjust_dimension parameter list holds adjustment in each direction. Append 0s, if shorter
        if len(crops_pads) < 6:
            for _ in range (6 - len(crops_pads)):
                crops_pads.append(0)
    except AttributeError:
        # the size still has to be adjusted to the opencl supported dimension
        crops_pads = (0, 0, 0, 0, 0, 0)
    # adjust the size, either pad with 0s or crop array
    pairs = []
    for i in range(int(len(crops_pads)/2)):
        pair = crops_pads[2*i:2*i+2]
        pairs.append(pair)

    prep_data = ut.adjust_dimensions(prep_data, pairs)
    if prep_data is None:
        print('check "adjust_dimensions" configuration')
        return

    try:
        center_shift = config_map.center_shift
        print ('shift center')
        prep_data = ut.get_centered(prep_data, center_shift)
    except AttributeError:
        prep_data = ut.get_centered(prep_data, [0,0,0])

    try:
        binsizes = config_map.binning
        try:
            bins = []
            for binsize in binsizes:
                bins.append(binsize)
            filler = len(prep_data.shape) - len(bins)
            for _ in range(filler):
                bins.append(1)
            prep_data = ut.binning(prep_data, bins)
        except:
            print ('check "binning" configuration')
    except AttributeError:
        pass

    # save data
    data_file = os.path.join(data_dir, 'data.tif')
    ut.save_tif(prep_data, data_file)
    print ('data ready for reconstruction, data dims:', prep_data.shape)