예제 #1
0
def cfg_to_bunch(cfg_file, section='', params_table=None):
    """Return session config info in Bunch (dictionary) form with interpolations
    from the master config settings. Perform full evaluation on parameters known
    here and leave subsequent evaluation downstream.
    """
    cp = new_SafeConfigParser()
    cp.read(cfg_file)
    sections = [section] if section else cp.sections()
    b = Bunch()
    if params_table is None:
        params_table = {}
    params_table.update(all_keys)
    for sec in sections:
        bsub = Bunch()
        opts = cp.options(sec)
        param_pairs = [(o, parse_param(o, cp.get(sec, o), params_table))
                       for o in opts]
        bsub.update(param_pairs)
        b[sec] = bsub
    b.sections = sections
    return b
예제 #2
0
def traverse_table(f,
                   path='/',
                   load=True,
                   scan=False,
                   shared_paths=(),
                   skip_stale_pickles=True,
                   attempt_reload=False):
    # Walk nodes and stuff arrays into the bunch.
    # If we encounter a group, then loop back into this method
    from ecogdata.devices.load.file2data import FileLoader
    if not isinstance(f, tables.file.File):
        if load or scan:
            # If scan is True, load should be forced False here
            load = not scan
            with closing(tables.open_file(f, mode='r')) as f:
                return traverse_table(f,
                                      path=path,
                                      load=load,
                                      scan=scan,
                                      shared_paths=shared_paths,
                                      skip_stale_pickles=skip_stale_pickles,
                                      attempt_reload=attempt_reload)
        else:
            f = tables.open_file(f, mode='r')
            try:
                return traverse_table(f,
                                      path=path,
                                      load=load,
                                      scan=scan,
                                      shared_paths=shared_paths,
                                      skip_stale_pickles=skip_stale_pickles,
                                      attempt_reload=attempt_reload)
            except:
                f.close()
                raise
    if load or scan:
        gbunch = Bunch()
    else:
        gbunch = HDF5Bunch(f)
    (p, g) = os.path.split(path)
    if g == '':
        g = p
    nlist = f.list_nodes(path)
    #for n in f.walk_nodes(where=path):
    for n in nlist:
        if isinstance(n, tables.Array):
            if load:
                if n.dtype.char == 'O':
                    arr = 'Not loaded: ' + n.name
                elif '/'.join([path, n.name]) in shared_paths:
                    arr = shm.shared_ndarray(n.shape)
                    arr[:] = n.read()
                else:
                    arr = n.read()
                if isinstance(arr, np.ndarray) and n.shape:
                    if arr.shape == (1, 1):
                        arr = arr[0, 0]
                        if arr == 0:
                            arr = None
                    else:
                        arr = arr.squeeze()
            else:
                arr = n
            gbunch[n.name] = arr
        elif isinstance(n, tables.VLArray):
            if load:
                try:
                    obj = n.read()[0]
                except (ModuleNotFoundError, PickleError, PicklingError):
                    if not skip_stale_pickles:
                        raise
                    gbunch[n.name] = 'unloadable pickle'
                    continue
                # if it's a generic Bunch Pickle, then update the bunch
                if n.name == 'b_pickle':
                    gbunch.update(obj)
                else:
                    gbunch[n.name] = obj
            else:
                # ignore the empty pickle
                if n.name == 'b_pickle' and n.size_in_memory > 32:
                    gbunch[n.name] = 'unloaded pickle'
        elif isinstance(n, tables.Group):
            gname = n._v_name
            # walk_nodes() includes the current group:
            # don't try to descend into this node!
            if gname == g:
                continue
            if gname == '#refs#':
                continue
            subbunch = traverse_table(f,
                                      path='/'.join([path, gname]),
                                      load=load,
                                      scan=scan,
                                      shared_paths=shared_paths,
                                      skip_stale_pickles=skip_stale_pickles,
                                      attempt_reload=attempt_reload)
            gbunch[gname] = subbunch

        else:
            gbunch[n.name] = 'Not Loaded!'

    this_node = f.get_node(path)
    for attr in this_node._v_attrs._f_list():
        gbunch[attr] = this_node._v_attrs[attr]

    loaders = [v for v in gbunch.values() if isinstance(v, FileLoader)]
    if attempt_reload and loaders:
        for loader in loaders:
            print('Attempting load from {}'.format(loader.primary_data_file))
            dataset = loader.create_dataset()
            new_keys = set(dataset.keys()) - set(gbunch.keys())
            for k in new_keys:
                gbunch[k] = dataset.pop(k)
    return gbunch