예제 #1
0
    def from_hdf_file(cls, cine_base_path, h5_fname):
        ''' Sets up object to process data based on MD in an hdf file.
        '''
        self = cls()
        tmp_file = h5py.File('/'.join(h5_fname), 'r')
        keys_lst = list(tmp_file.attrs.keys())
        lc_req_args = ['tck0', 'tck1', 'tck2']
        h5_req_args = ['cine_path', 'cine_fname']
        cls._verify_params(keys_lst, extra_req=(lc_req_args + h5_req_args))

        self.params = dict(tmp_file.attrs)

        for k in lc_req_args:
            del self.params[k]

        self.cine_fname = FilePath(cine_base_path,
                                   self.params.pop('cine_path'),
                                   self.params.pop('cine_fname'))
        self.cine_ = cine.Cine('/'.join(self.cine_fname))

        if 'bck_img' in list(tmp_file.keys()):
            self.bck_img = tmp_file['bck_img'][:]
        else:
            self.bck_img = infra.gen_bck_img('/'.join(self.cine_fname))

        seed_curve = infra.SplineCurve.from_hdf(tmp_file)

        tmp_file.close()

        return self, seed_curve
예제 #2
0
def get_h5_lst(base_path, search_path):
    '''Recursively returns all h5 files below base_path/search_path'''
    h5names = []
    for dirpath, dirnames, fnames in os.walk(base_path + '/' + search_path):
        h5names.extend([
            FilePath(base_path, dirpath[len(base_path) + 1:], f)
            for f in fnames if 'h5' in f
        ])
    h5names.sort(key=lambda x: x[-1])

    return h5names
예제 #3
0
def get_cine_hashes(base_path, search_path):
    '''returs all paths and cine hash values under the search path'''
    cine_fnames = []
    for dirpath, dirnames, fnames in os.walk(base_path + '/' + search_path):
        cine_fnames.extend([
            FilePath(base_path, dirpath[len(base_path) + 1:], f)
            for f in fnames if 'cine' in f
        ])
    cine_fnames.sort(key=lambda x: x[-1])
    cine_hash = [cine.Cine(cn.format).hash for cn in cine_fnames]

    return list(zip(cine_fnames, cine_hash))
예제 #4
0
def get_split_rm(base_path):
    res_dict = defaultdict(list)
    for dirpath, dirnames, fnames in os.walk(base_path):
        for ff in fnames:
            if ff[:2] == 'RM':
                rr = parse.parse('RM_{key}_{start_f:d}-{end_f:d}_{name}.h5',
                                 ff)

                res_dict[rr['key']].append(
                    (rr['start_f'], rr['end_f'],
                     FilePath(base_path, dirpath[len(base_path) + 1:], ff)))

    for k, v in res_dict.items():
        v.sort()

    return res_dict
예제 #5
0
파일: db.py 프로젝트: tacaswell/leidenfrost
    def get_h5_list(self, cine_hash):
        """


        Parameters
        ----------
        cine_hash : str
           cine hash of interest

        return
        """
        good_proc_cur = self.coll_dict['proc'].find({'cine': cine_hash,
                                                     'useful': True})
        if good_proc_cur.count() == 0:
            return []

        good_proc_cur.sort('start_time_stamp', pymongo.DESCENDING)
        return [(FilePath.from_db_dict(cc['out_file'], self.i_disk_dict),
                                      cc['in_frame'],
                                      cc['out_frame'])
            for cc in good_proc_cur]
예제 #6
0
    cine_fnames = []
    for cine_base_path in args.cine_base_path:
        if args.hdf_base_path:
            hdf_base_path = args.hdf_base_path
        else:
            hdf_base_path = cine_base_path

        if args.cine_search_path:
            search_path = args.cine_search_path
        else:
            search_path = 'leidenfrost'

        cines = lffh.get_cine_hashes(cine_base_path, search_path)
        cine_fnames.extend(
            zip(cines,
                itertools.repeat(FilePath(hdf_base_path, args.hdf_path, ''))))

    # don't start more processes than we could ever use
    N = min(N, len(cine_fnames))

    # stet up queues
    WORK_QUEUE = JoinableQueue()
    PROCS = [worker(WORK_QUEUE) for j in range(N)]
    # start workers
    for p in PROCS:
        p.start()

    # put the work in the queue
    for (cf, ch), hdf_fname_template in cine_fnames:
        print('adding {}/{} to work queue'.format(cf.path, cf.fname))
        WORK_QUEUE.put((cf, ch, hdf_fname_template))
예제 #7
0
 def path_template(self):
     bp = self.base_path
     return FilePath(bp, self.path[len(bp)+1:], '')
예제 #8
0
def change_base_path(fpath, new_base_path):
    '''Returns a new FilePath object with a different base_path entry '''
    return FilePath(new_base_path, fpath.path, fpath.fname)
예제 #9
0
    def __init__(self,
                 fname,
                 cine_base_path=None,
                 mode='r',
                 i_disk_dict=None,
                 *args,
                 **kwargs):
        """
        Parameters
        ----------
        fname: `Leidenfrost.FilePath`
            Fully qualified path to the hdf file to open
        cine_base_path: str or `None`
            If not `None`, base path to find the raw cine files
        h5_buffer_bas_path: str or `None`
            If not `None`, base path for buffering the h5 file
        cine_buffer_base_path: str or `None`
            If not `None`, base path for buffering the cine file
        """
        self._iter_cur_item = -1
        self.file = None

        if mode == 'rw':
            self.file = h5py.File(fname.format, 'r+')
            self.writeable = True
        else:
            self.file = h5py.File(fname.format, 'r')
            self.writeable = False

        self.num_frames = len([k for k in self.file.keys() if 'frame' in k])
        self._prams = HdfBEPram(False, True)
        self.proc_prams = dict(self.file.attrs)

        if cine_base_path is not None:
            self.cine_fname = FilePath(cine_base_path,
                                       self.file.attrs['cine_path'],
                                       self.file.attrs['cine_fname'])
            self.cine = cine.Cine('/'.join(self.cine_fname))
        else:
            self.cine_fname = None
            self.cine = None
        try:
            # hard code the mongodb
            self.db = db.LFmongodb(i_disk_dict=i_disk_dict)
        except:
            print('gave up and the DB')
            # this eats _ALL_ exceptions
            self.db = None

        self._procid = None
        if self.db is not None:
            self._procid = self.db.get_proc_id(fname)

        self.bck_img = None
        if self.db is not None and self.cine is not None:
            self.bck_img = self.db.get_background_img(self.cine.hash)
        # if that fails too, run it
        if self.bck_img is None and self.cine is not None:
            self.gen_back_img()
            # if we have a data base, shove in the data
            if self.db is not None and self.bck_img is not None:
                self.db.store_background_img(self.cine.hash, self.bck_img)

        if 'ver' not in self.file.attrs or self.file.attrs['ver'] < b'0.1.5':
            self._frame_str = 'frame_{:05d}'
        else:
            self._frame_str = 'frame_{:07d}'

        self._cal_val = None
        self._cal_val_unit = None
        self._first_frame = None
        self._last_frame = None