Beispiel #1
0
 def _write_segment(self, segment):
     # Note that an NWB Epoch corresponds to a Neo Segment, not to a Neo Epoch.
     nwb_epoch = nwb_utils.create_epoch(self._file, segment.name,
                                        time_in_seconds(segment.t_start),
                                        time_in_seconds(segment.t_stop))
     for i, signal in enumerate(chain(segment.analogsignals, segment.irregularlysampledsignals)):
         self._write_signal(signal, nwb_epoch, i)
     self._write_spiketrains(segment.spiketrains, segment)
     for i, event in enumerate(segment.events):
         self._write_event(event, nwb_epoch, i)
     for i, neo_epoch in enumerate(segment.epochs):
         self._write_neo_epoch(neo_epoch, nwb_epoch, i)
 def _write_segment(self, segment):
     # Note that an NWB Epoch corresponds to a Neo Segment, not to a Neo Epoch.
     nwb_epoch = nwb_utils.create_epoch(self._file, segment.name,
                                        time_in_seconds(segment.t_start),
                                        time_in_seconds(segment.t_stop))
     for i, signal in enumerate(chain(segment.analogsignals, segment.irregularlysampledsignals)):
         self._write_signal(signal, nwb_epoch, i)
     self._write_spiketrains(segment.spiketrains, segment)
     for i, event in enumerate(segment.events):
         self._write_event(event, nwb_epoch, i)
     for i, neo_epoch in enumerate(segment.epochs):
         self._write_neo_epoch(neo_epoch, nwb_epoch, i)
def create_trials(orig_h5, nuo):
    trial_id = orig_h5["trialIds/trialIds"].value
    trial_t = orig_h5["trialStartTimes/trialStartTimes"].value
    good_trials = orig_h5["trialPropertiesHash/value/4/4"].value
    ignore_ivals_start = [time for (time, good_trial) in zip(trial_t, good_trials) if good_trial == 0]
    # trial stop isn't stored. assume that it's twice the duration of other
    #   trials -- padding on the high side shouldn't matter
    ival = (trial_t[-1] - trial_t[0]) / (len(trial_t) - 1)
    trial_t = np.append(trial_t, trial_t[-1] + 2 * ival)
    ignore_ivals_stop = [time for (time, good_trial) in zip(trial_t[1:], good_trials) if good_trial == 0]
    ignore_intervals = [ignore_ivals_start, ignore_ivals_stop]
    # for i in range(10):  # version for only five epoch's to reduce time
    for i in range(len(trial_id)):  # use: in range(5): to reduce run time
        tid = trial_id[i]
        trial = "Trial_%d%d%d" % (int(tid / 100), int(tid / 10) % 10, tid % 10)
        # print trial # DEBUG
        start = trial_t[i]
        stop = trial_t[i + 1]
        epoch = ut.create_epoch(nuo, trial, start, stop)
        tags = []
        if good_trials[i] == 1:
            tags.append("Good trial")
        else:
            tags.append("Non-performing")
        for j in range(len(epoch_tags[trial])):
            tags.append(epoch_tags[trial][j])
        epoch.set_dataset("tags", tags)
        # keep with tradition and create a units field, even if it's empty
        if trial not in epoch_units:
            units = []
        else:
            units = epoch_units[trial]
        epoch.set_custom_dataset("units", units)
        # raw data path
        raw_path = "descrHash/value/%d" % (trial_id[i])
        # try:
        raw_file = parse_h5_obj(orig_h5[raw_path])[0]
        if len(raw_file) == 1:
            raw_file = "na"
        else:
            raw_file = str(raw_file)
        # except KeyError:
        #         raw_path = "descrHash/value/%d/" %(trial_id[i])
        #         try:
        #             raw_file_1 = parse_h5_obj(orig_h5[raw_path + "/1"])[0]
        #         except IndexError:
        #             raw_file_1 = ''
        #         try:
        #             raw_file_2 = parse_h5_obj(orig_h5[raw_path + "/2"])[0]
        #         except IndexError:
        #             raw_file_2 = ''
        #         raw_file = str(raw_file_1) + " and " + str(raw_file_2)
        #     except IndexError:
        #         raw_file = ''
        #        epoch.set_dataset("description", "Raw Voltage trace data files used to acuqire spike times data: " + raw_file + "\n\
        # ignore intervals: mark start and stop times of bad trials when mice are not performing")
        epoch.set_dataset("description", "Raw Voltage trace data files used to acuqire spike times data: " + raw_file)
        # epoch.set_ignore_intervals(ignore_intervals)
        # collect behavioral data
        ts = "/stimulus/presentation/auditory_cue"
        ut.add_epoch_ts(epoch, start, stop, "auditory_cue", ts)
        ts = "/stimulus/presentation/pole_in"
        ut.add_epoch_ts(epoch, start, stop, "pole_in", ts)
        ts = "/stimulus/presentation/pole_out"
        ut.add_epoch_ts(epoch, start, stop, "pole_out", ts)
        ts = "/acquisition/timeseries/lick_trace"
        ut.add_epoch_ts(epoch, start, stop, "lick_trace", ts)
        ts = "/stimulus/presentation/aom_input_trace"
        ut.add_epoch_ts(epoch, start, stop, "aom_input_trace", ts)
        ts = "/stimulus/presentation/simple_optogentic_stimuli"
        # ts = "/stimulus/presentation/laser_power"
        # DEBUG -- don't add this right now -- too many smaples make building file take too long
        # epoch.add_timeseries("laser_power", ts)
        ut.add_epoch_ts(epoch, start, stop, "simple_optogentic_stimuli", ts)
Beispiel #4
0
posa_ts.set_dataset("timestamps", pos_atime)
posa_ts.set_dataset("data", pos_a, attrs={"resolution": 0.001})
# iface.add_timeseries(posa_ts)

posb_ts = iface.make_group("<SpatialSeries>", "LED 2")
posb_ts.set_attr(
    "description",
    "LED 2, as reported in original data. Physical position of LED (eg, left, front, etc) not known"
)
posb_ts.set_dataset("reference_frame", "Top or room, as seen from camera")
posb_ts.set_dataset("timestamps", pos_btime)
posb_ts.set_dataset("data", pos_b, attrs={"resolution": 0.001})
# iface.add_timeseries(posb_ts)

pos_ts = iface.make_group("<SpatialSeries>", "position")
pos_ts.set_attr("description", "Position intermediate to LED1 and LED2")
pos_ts.set_dataset("reference_frame", "Top or room, as seen from camera")
pos_ts.set_dataset("timestamps", postime)
pos_ts.set_dataset("data", pos, attrs={"resolution": 0.001})
# iface.add_timeseries(pos_ts)
# mod.finalize();

epoch = ut.create_epoch(buz, "Linear track", 0, postime[-1] + 60)
epoch.set_dataset(
    "description",
    "This folder would normally be one of several (eg, sleep_1, enclosure_1, sleep_2, track_1, etc) and each would provide windows into the different acquisition and stimulus streams. Since only one epoch (linear track) was imported into the sample NWB file, the functionality of epochs is not visible"
)
# epoch.finalize()

buz.close()
posa_ts = iface.make_group("<SpatialSeries>", "LED 1")
posa_ts.set_attr("description", "LED 1, as reported in original data. Physical position of LED (eg, left, front, etc) not known")
posa_ts.set_dataset("reference_frame", "Top or room, as seen from camera")
posa_ts.set_dataset("timestamps", pos_atime)
posa_ts.set_dataset("data", pos_a, attrs={"resolution": 0.001})
# iface.add_timeseries(posa_ts)

posb_ts = iface.make_group("<SpatialSeries>", "LED 2")
posb_ts.set_attr("description","LED 2, as reported in original data. Physical position of LED (eg, left, front, etc) not known")
posb_ts.set_dataset("reference_frame", "Top or room, as seen from camera")
posb_ts.set_dataset("timestamps", pos_btime)
posb_ts.set_dataset("data", pos_b, attrs={"resolution":0.001})
# iface.add_timeseries(posb_ts)

pos_ts = iface.make_group("<SpatialSeries>", "position")
pos_ts.set_attr("description","Position intermediate to LED1 and LED2")
pos_ts.set_dataset("reference_frame", "Top or room, as seen from camera")
pos_ts.set_dataset("timestamps", postime)
pos_ts.set_dataset("data", pos, attrs={"resolution": 0.001})
# iface.add_timeseries(pos_ts)
# mod.finalize();

epoch = ut.create_epoch(buz, "Linear track", 0, postime[-1]+60)
epoch.set_dataset("description", "This folder would normally be one of several (eg, sleep_1, enclosure_1, sleep_2, track_1, etc) and each would provide windows into the different acquisition and stimulus streams. Since only one epoch (linear track) was imported into the sample NWB file, the functionality of epochs is not visible")
# epoch.finalize()

buz.close()


Beispiel #6
0
data = np.linspace(1., 100.0, 6*4*1000).reshape(24,1000)
times = np.linspace(0.0, 60.*2., 1000) 

# create an instance of MyNewTimeseries.  Name of group will be "my_new_timeseries
# it will be stored in /acquisition/timeseries

nts = f.make_group("<TrajectorySeries>", "hand_position", path="/acquisition/timeseries",
    attrs={"source": "source of data for my new timeseries"} )
nts.set_dataset("data", data, attrs={"conversion": 1.0, "resolution": 0.001, 
    "unit": "meter and radian; see definition of dimension trajectories in format specification"})
nts.set_dataset("timestamps", times)

# specify meaning of variables
reference_frame = ("Meaning of measurement values in array data, (e.g. sensor s1, s2, s3, s4; "
    "x, y, z, pitch, roll, yaw) should be described here")
nts.set_dataset("reference_frame", reference_frame)

# Add in sample epochs to specify the trials
trial_times = [ [0.5, 1.5], [2.5, 3.0], [3.5, 4.0]]

for trial_num in range(len(trial_times)):
    trial_name = "Trial_%03i" % (trial_num+1)
    start_time, stop_time = trial_times[trial_num]
    ep = utils.create_epoch(f, trial_name, start_time, stop_time)
    utils.add_epoch_ts(ep, start_time, stop_time, "hand_position", nts)


# All done.  Close the file
f.close()

Beispiel #7
0
def create_trials(orig_h5, nuo):
    trial_id = orig_h5["trialIds/trialIds"].value
    trial_t = orig_h5["trialStartTimes/trialStartTimes"].value
    good_trials = orig_h5['trialPropertiesHash/value/4/4'].value
    ignore_ivals_start = [time for (time, good_trial) in zip(trial_t,good_trials) if good_trial == 0]
    # trial stop isn't stored. assume that it's twice the duration of other
    #   trials -- padding on the high side shouldn't matter
    ival = (trial_t[-1] - trial_t[0]) / (len(trial_t) - 1)
    trial_t = np.append(trial_t, trial_t[-1] + 2*ival)
    ignore_ivals_stop = [time for (time, good_trial) in zip(trial_t[1:],good_trials) if good_trial == 0]
    ignore_intervals = [ignore_ivals_start, ignore_ivals_stop]
    # for i in range(10):  # version for only five epoch's to reduce time
    for i in range(len(trial_id)):  # use: in range(5): to reduce run time
        tid = trial_id[i]
        trial = "Trial_%d%d%d" % (int(tid/100), int(tid/10)%10, tid%10)
        # print trial # DEBUG
        start = trial_t[i]
        stop = trial_t[i+1]
        epoch = ut.create_epoch(nuo, trial, start, stop)
        tags = []
        if good_trials[i] == 1:
            tags.append("Good trial")
        else:
            tags.append("Non-performing")
        for j in range(len(epoch_tags[trial])):
            tags.append(epoch_tags[trial][j])
        epoch.set_dataset("tags", tags)
        # keep with tradition and create a units field, even if it's empty
        if trial not in epoch_units:
            units = []
        else:
            units = epoch_units[trial]
        epoch.set_custom_dataset("units", units)
        # raw data path
        raw_path = "descrHash/value/%d" % (trial_id[i])
        # try:
        raw_file = parse_h5_obj(orig_h5[raw_path])[0]
        if len(raw_file) == 1:
            raw_file = 'na'
        else:
            raw_file = str(raw_file)
        # except KeyError:
        #         raw_path = "descrHash/value/%d/" %(trial_id[i])
        #         try:
        #             raw_file_1 = parse_h5_obj(orig_h5[raw_path + "/1"])[0]
        #         except IndexError:
        #             raw_file_1 = ''
        #         try:
        #             raw_file_2 = parse_h5_obj(orig_h5[raw_path + "/2"])[0]
        #         except IndexError:
        #             raw_file_2 = ''
        #         raw_file = str(raw_file_1) + " and " + str(raw_file_2)
        #     except IndexError:
        #         raw_file = ''
#        epoch.set_dataset("description", "Raw Voltage trace data files used to acuqire spike times data: " + raw_file + "\n\
# ignore intervals: mark start and stop times of bad trials when mice are not performing")
        epoch.set_dataset("description", "Raw Voltage trace data files used to acuqire spike times data: " + raw_file)
        #epoch.set_ignore_intervals(ignore_intervals)
        # collect behavioral data
        ts = "/stimulus/presentation/auditory_cue"
        ut.add_epoch_ts(epoch, start, stop, "auditory_cue", ts)
        ts = "/stimulus/presentation/pole_in"
        ut.add_epoch_ts(epoch, start, stop, "pole_in", ts)
        ts = "/stimulus/presentation/pole_out"
        ut.add_epoch_ts(epoch, start, stop, "pole_out", ts)
        ts = "/acquisition/timeseries/lick_trace"
        ut.add_epoch_ts(epoch, start, stop,"lick_trace", ts)
        ts = "/stimulus/presentation/aom_input_trace"
        ut.add_epoch_ts(epoch, start, stop,"aom_input_trace", ts)
        ts = "/stimulus/presentation/simple_optogentic_stimuli"
        #ts = "/stimulus/presentation/laser_power"
# DEBUG -- don't add this right now -- too many smaples make building file take too long
        #epoch.add_timeseries("laser_power", ts)
        ut.add_epoch_ts(epoch, start, stop, "simple_optogentic_stimuli", ts)
        img.set_custom_dataset("meister_y", y)
        img.set_dataset("timestamps", timestamps)
        img.set_dataset("num_samples", len(timestamps))
        file_name, dataset_name = fetch_stimulus_link(seed, x, y, dx, dy)
        file_name_base = file_name[len(OUTPUT_DIR):]  # strip OUTPUT_DIR from front of name
        #- img.set_data_as_remote_link(file_name, dataset_name)
        link_str = "extlink:%s,%s" % (file_name_base, dataset_name)
        img.set_dataset("data", link_str) # special string, causes creation of external link
        img.set_dataset("bits_per_pixel", 8)
        img.set_dataset("format", "raw")
        img.set_dataset("dimension", [x/dx, y/dy])
        img.set_attr("description", "type = " + str(type_s) + "; seed = " + str(seed))
        img.set_attr("comments", "Based on ran1.bin. Pixel values are 255 for light, 0 for dark")
        # create epoch
        stim_end = timestamps[-1] + 1
        epoch = ut.create_epoch(bfile, "stim_%d"%(i+1), stim_offset[i], stim_end)
        stim_start = stim_offset[i]
        ts_path = "/stimulus/presentation/"+img.name
        ut.add_epoch_ts(epoch, stim_start, stim_end, "stimulus", ts_path)

    # create module 'Cells' for the spikes    
    mod_name = "Cells"
    mod = bfile.make_group("<Module>", mod_name)
    mod.set_attr("description", "Spike times for the individual cells and stimuli")
    mod.set_attr("source", "Data as reported in the original file")
    # create interfaces
    spk_times_iface = mod.make_group("UnitTimes")
    spk_times_iface.set_attr("source", "Data as reported in the original crcns file")
    # determine number of cells
    spikes_mat = mfile["spikes"]
    num_cells = spikes_mat.shape[0]
Beispiel #9
0
        #- img.set_data_as_remote_link(file_name, dataset_name)
        link_str = "extlink:%s,%s" % (file_name_base, dataset_name)
        img.set_dataset(
            "data",
            link_str)  # special string, causes creation of external link
        img.set_dataset("bits_per_pixel", 8)
        img.set_dataset("format", "raw")
        img.set_dataset("dimension", [x / dx, y / dy])
        img.set_attr("description",
                     "type = " + str(type_s) + "; seed = " + str(seed))
        img.set_attr(
            "comments",
            "Based on ran1.bin. Pixel values are 255 for light, 0 for dark")
        # create epoch
        stim_end = timestamps[-1] + 1
        epoch = ut.create_epoch(bfile, "stim_%d" % (i + 1), stim_offset[i],
                                stim_end)
        stim_start = stim_offset[i]
        ts_path = "/stimulus/presentation/" + img.name
        ut.add_epoch_ts(epoch, stim_start, stim_end, "stimulus", ts_path)

    # create module 'Cells' for the spikes
    mod_name = "Cells"
    mod = bfile.make_group("<Module>", mod_name)
    mod.set_attr("description",
                 "Spike times for the individual cells and stimuli")
    mod.set_attr("source", "Data as reported in the original file")
    # create interfaces
    spk_times_iface = mod.make_group("UnitTimes")
    spk_times_iface.set_attr("source",
                             "Data as reported in the original crcns file")
    # determine number of cells