Exemplo n.º 1
0
def test06_creation_iter():
    fp = arf.open_file("test06", mode="a", driver="core", backing_store=False)
    entry_names = ('z', 'y', 'a', 'q', 'zzyfij')
    for name in entry_names:
        g = arf.create_entry(fp, name, 0)
        arf.create_dataset(g, "dset", (), sampling_rate=1)
    assert_sequence_equal(arf.keys_by_creation(fp), entry_names)
Exemplo n.º 2
0
def test02_create_datasets():
    for name in arf.keys_by_creation(fp):
        entry = fp[name]
        for dset in datasets:
            yield create_dataset, entry, dset
        assert_equal(len(entry), len(datasets))
        assert_items_equal(entry.keys(), (dset['name'] for dset in datasets))
Exemplo n.º 3
0
def check_entry_consistency(arfp, entries=None, channels=None, predicate=any_type):
    """Check whether all entries in arfp have the required channels

    Raises a warning if units and sampling rates do not match across channels.

    entries - if not None, restrict to entries with supplied names
    channels - if not None, only check datasets with supplied names
    filter - a predicate on dataset (e.g. arf.is_time_series)

    If consistent, returns
      [ [included entry names in order of creation alphabetically],
        {
         channel_name: {'samping_rate', 'units', 'channels'},
         ...
        }
      ]

    If not consistent across entries, logs an error and returns None. If
    sampling rate and units are not consistent within an entry, logs a warning.

    """
    log.info("checking entry consistency")
    # FIXME catch error when file does not track creation order
    entry_names = []
    channel_props = None
    for entry_name in arf.keys_by_creation(arfp):
        if entries is not None and entry_name in entries:
            continue
        entry = arfp[entry_name]
        props = channel_properties(entry, channels, predicate)
        sample_counts = set(v.pop("samples") for v in props.values())
        if len(sample_counts) > 1:
            log.error("sample count differs across channels in entry %s", entry_name)
            return
        if channel_props is None:
            channel_props = props
        elif props != channel_props:
            log.error("channels in entry %s do not match", entry_name)
            return
        entry_names.append(entry_name)
    return entry_names, channel_props
Exemplo n.º 4
0
}

archive = "/home/data/starlings"

if __name__ == "__main__":

    import argparse

    p = argparse.ArgumentParser(description="extract raw extracellular recordings from arf files""")
    p.add_argument("unit", help="the name of the unit")
    p.add_argument("channel", help="the name of the channel to extract")

    args = p.parse_args()

    location = nbank.get(args.unit, local_only=True)
    stim_counter = collections.defaultdict(int)
    with h5.File(location, "r") as fp:
        for entry_name in arf.keys_by_creation(fp):
            entry = fp[entry_name]
            stim = entry['stimuli']['name', 0].decode('utf-8')
            if stim not in stimuli:
                print("%s -> skipping (%s)" % (entry_name, stim))
            else:
                stim_counter[stim] += 1
                outfile = "%s_%s_%d.wav" % (args.unit, stimuli[stim], stim_counter[stim])
                print("%s -> %s" % (entry_name, outfile))
                dset = entry[args.channel]
                sampling_rate = dset.attrs['sampling_rate']
                with ewave.open(outfile, mode='w', sampling_rate=sampling_rate, dtype=dset.dtype) as ofp:
                    ofp.write(dset[:])