def calculate_cell_means(project):
    cells = {}
    epochs = []
    for expt in iterable(project.getExperiments()):
        for epoch in iterable(expt.getEpochs()): # ctx.getObjectsWithTag('demo')
            epochs += [epoch]
            cell = epoch.getInputSources().get('cell')
            if len(list(iterable(epoch.getMeasurements()))) > 0:
                m = epoch.getMeasurement('Membrane current')
                data = as_data_frame(m)
                peak = max(data['current']).item()
                k = "{0} ({1})  {2}".format(cell.getLabel(), cell.getIdentifier(), cell.getURI().toString())
                peaks = cells.get(k, {})
                pk = "{0} mV".format(epoch.getProtocolParameters().get('step_amplitude_mV'))
                cell_peaks = peaks.get(pk, [])
                peaks[pk] = cell_peaks + [peak]
                cells[k] = peaks
    
    for (k,v) in cells.iteritems():
        for ck, cv in v.iteritems():
            v[ck] = sp.mean(cv)
    
        cells[k] = v
        
    return (cells, epochs)
    def should_set_device_for_analog_signals(self):
        for segment, epoch in zip(self.block.segments, iterable(self.epoch_group.getEpochs())):
            measurements = dict(((m.getName(), m) for m in iterable(epoch.getMeasurements())))

            for signal in segment.analogsignals:
                m = measurements[signal.name]
                assert_equals({"amplifier.channels.{}".format(signal.annotations['channel_index'])},
                              set(iterable(m.getDevices())))
def check_measurements(segment, epoch):
    assert_equals(len(segment.analogsignals), len(list(iterable(epoch.getMeasurements()))))

    measurements = dict(((m.getName(), m) for m in iterable(epoch.getMeasurements())))

    for signal in segment.analogsignals:
        m = measurements[signal.name]
        check_numeric_measurement(signal, m)
 def check_epoch_per_site(self, container):
     # One Epoch per site per day
     num_sites = len(self.group_sites())
     n_epochs = 0
     for group in iterable(container.getEpochGroups()):
         # Skip Epochs specifically for producing Sources
         n_epochs += len([e for e in list(iterable(group.getEpochs())) if e.getOutputSources().size() == 0])
     assert_equals(num_sites, n_epochs)
    def should_tag_site_with_species(self):
        species = set(self.df.Species)

        for group in iterable(self.expt.getEpochGroups()):
            for epoch in iterable(group.getEpochs()):
                src_map = to_dict(epoch.getInputSources())
                for src in src_map.values():
                    if len(list(iterable(src.getParentSources()))) == 0:
                        tags = set(iterable(src.getAllTags()))
                        assert(len(tags) > 0)
                        for tag in tags:
                            assert(tag in species)
    def should_import_epochs(self):
        expt2 = self.ctx.insertProject("project2","project2",DateTime()).insertExperiment("purpose", DateTime())
        protocol2 = self.ctx.insertProtocol("protocol", "description")
        epoch_start = DateTime()

        epoch = expt2.insertEpoch(Maps.newHashMap(),
                                  Maps.newHashMap(),
                                  epoch_start,
                                  DateTime(),
                                  protocol2,
                                  to_map({}),
                                  to_map({}))

        segment = self.block.segments[0]

        neoepoch = neo.core.epoch.Epoch(10 * pq.ms, 100 * pq.ms, "epoch1")
        segment.epochs.append(neoepoch)

        try:
            import_timeline_annotations(epoch, segment, epoch_start)

            annotations = list(iterable(epoch.getUserTimelineAnnotations(self.ctx.getAuthenticatedUser())))

            assert_equals(1, len(annotations))
            assert_equals(epoch_start.plusMillis(10).getMillis(), annotations[0].getStart().getMillis())
            assert_equals(epoch_start.plusMillis(10).plusMillis(100).getMillis(), annotations[0].getEnd().get().getMillis())
        finally:
            segment.epochs.remove(neoepoch)
    def should_import_events(self):
        expt2 = self.ctx.insertProject("project2","project2",DateTime()).insertExperiment("purpose", DateTime())
        protocol2 = self.ctx.insertProtocol("protocol", "description")
        epoch_start = DateTime()
        epoch = expt2.insertEpoch(Maps.newHashMap(),
                                  Maps.newHashMap(),
                                  epoch_start,
                                  DateTime(),
                                  protocol2,
                                  to_map(dict()),
                                  to_map(dict()))

        segment = self.block.segments[0]
        event_ms = 10
        event1 = Event(event_ms * pq.ms, "event1", name = "event1")

        segment.events.append(event1)

        try:
            import_timeline_annotations(epoch, segment, epoch_start)

            annotations = list(iterable(epoch.getUserTimelineAnnotations(self.ctx.getAuthenticatedUser())))

            assert(epoch_start.plusMillis(event_ms).equals(annotations[0].getStart()))
            assert_equals(1, len(annotations))
        finally:
            segment.events.remove(event1)
    def should_call_via_main(self):
        expt2 = self.ctx.insertProject("project2","project2",DateTime()).insertExperiment("purpose", DateTime())
        protocol2 = self.ctx.insertProtocol("protocol", "description")

        args = ['executable-name',
                '--source={}'.format(self.src.getURI().toString()),
                '--timezone=America/New_York',
                '--container={}'.format(expt2.getURI().toString()),
                '--protocol={}'.format(protocol2.getURI().toString()),
                'fixtures/example1.abf',
                ]

        main(argv=args, dsc=self.get_dsc())

        epoch_group = list(iterable(expt2.getEpochGroups()))[0]
        assert_equals(len(self.block.segments), len(set(iterable(epoch_group.getEpochs()))), "should import one epoch per segment")
예제 #9
0
def import_spiketrains(epoch, protocol, segment):
    for (i, spike_train) in enumerate(segment.spiketrains):
        params = {'t_start_ms': spike_train.t_start.rescale(pq.ms).item(),
                  't_stop_ms': spike_train.t_stop.rescale(pq.ms).item(),
                  'sampling_rate_hz': spike_train.sampling_rate.rescale(pq.Hz).item(),
                  'description': spike_train.description,
                  'file_origin': spike_train.file_origin}

        if spike_train.name:
            name = spike_train.name
        else:
            name = "spike train {}".format(i + 1)

        inputs = Maps.newHashMap()
        for m in iterable(epoch.getMeasurements()):
            inputs.put(m.getName(), m)

        ar = epoch.addAnalysisRecord(name,
                                     inputs,
                                     protocol,
                                     to_map(params))

        #
        spike_train.labels = ['spike time' for i in spike_train.shape]
        spike_train.sampling_rates = [spike_train.sampling_rate for i in spike_train.shape]

        spike_train.waveforms.labels = ['channel index', 'time', 'spike']
        spike_train.waveforms.sampling_rates = [0, spike_train.sampling_rate, 0] * pq.Hz

        insert_numeric_analysis_artifact(ar,
                                         name,
                                         {'spike times': spike_train,
                                          'spike waveforms': spike_train.waveforms})
 def collect_epochs_by_site(self, epoch_groups, ts):
     epochs = {}
     for e in iterable(epoch_groups[ts].getEpochs()):
         sources = to_dict(e.getInputSources())
         for s in sources.values():
             if s.getLabel() in self.df.Site.base:
                 epochs[s.getLabel()] = e
     return epochs
 def epoch_groups_by_timestamp(self):
     epoch_groups = {}
     for grp in iterable(self.expt.getEpochGroups()):
         d = grp.getStart()
         ts = pd.Timestamp(
             datetime.datetime(d.getYear(), d.getMonthOfYear(), d.getDayOfMonth(), d.getHourOfDay(),
                               d.getMinuteOfHour(),
                               d.getSecondOfMinute()))
         epoch_groups[ts] = grp
     return epoch_groups
    def should_add_sources(self):
        expected_source_names = np.unique(self.df.Site)

        sources = self.ctx.getTopLevelSources()
        src_map = {}
        for s in iterable(sources):
            src_map[s.getLabel()] = s

        for name in expected_source_names:
            assert(src_map.has_key(name))
    def should_annotate_measurements_with_observer(self):
        epoch_groups = self.epoch_groups_by_timestamp()

        for ((ts,site), group) in self.group_sites():
            epochs = self.collect_epochs_by_site(epoch_groups, ts)

            e = epochs[site]

            for i in xrange(len(group)):
                if len(list(iterable(e.getMeasurements()))) > 0:
                    m = e.getMeasurement(group['Species'][i])
                    assert_equals(group['Observer'][i], str(m.getUserProperty(self.ctx.getAuthenticatedUser(), 'Observer')))
    def should_import_spike_trains(self):
        expt2 = self.ctx.insertProject("project2","project2",DateTime()).insertExperiment("purpose", DateTime())
        protocol2 = self.ctx.insertProtocol("protocol", "description")
        epoch_start = DateTime()

        epoch = expt2.insertEpoch(Maps.newHashMap(),
                                  Maps.newHashMap(),
                                  epoch_start,
                                  DateTime(),
                                  protocol2,
                                  to_map({}),
                                  to_map({}))

        segment = self.block.segments[0]

        times = [.1, .2, .3, .4]
        waveforms = np.random.rand(2,3,4) * pq.mV

        train_name = 'spike train 1'
        spike_train = SpikeTrain(times, name=train_name, t_stop=2.0 * pq.s, units="s", waveforms=waveforms)

        segment.spiketrains.append(spike_train)
        try:
            import_spiketrains(epoch, protocol2, segment)

            records = list(iterable(epoch.getAnalysisRecords()))

            assert_equals(1, len(records))

            ar = records[0]

            assert_equals(train_name, ar.getName())

            expected_params = {'t_start_ms': spike_train.t_start.rescale(pq.ms).item(),
                  't_stop_ms': spike_train.t_stop.rescale(pq.ms).item(),
                  'sampling_rate_hz': spike_train.sampling_rate.rescale(pq.Hz).item(),
                  'description': spike_train.description,
                  'file_origin': spike_train.file_origin}

            for (k,v) in expected_params.iteritems():
                actual = ar.getProtocolParameters().get(k)
                if actual:
                    assert_equals(v, actual)

            assert_equals(len(expected_params), ar.getProtocolParameters().size())

            data_map = ar.getDataElements()
            df = as_data_frame(data_map.get(spike_train.name))

            check_signal(spike_train, df['spike times'])
            check_signal(spike_train.waveforms, df['spike waveforms'])
        finally:
            segment.spiketrains.remove(spike_train)
    def should_add_individual_measurement_sources(self):
        epoch_groups = self.epoch_groups_by_timestamp()

        for ((ts,site), group) in self.group_sites():
            epochs = self.collect_epochs_by_site(epoch_groups, ts)

            e = epochs[site]
            if 'individual' in list(iterable(e.getAllTags())):
                for i in xrange(len(group)):
                    if group['Type'][i] == MEASUREMENT_TYPE_INDIVIDUAL:
                        print(e.getInputSources(), group['Species'][i], i)
                        assert_true(e.getInputSources().containsKey(u"{} {}".format(group['Species'][i],i+1)))
    def should_use_existing_sources(self):
        expt2 = self.ctx.insertProject("project2","project2",DateTime()).insertExperiment("purpose", DateTime())
        protocol2 = self.ctx.insertProtocol("protocol", "description")

        import_csv(self.ctx,
                   container_uri=expt2.getURI().toString(),
                   protocol_uri=protocol2.getURI().toString(),
                   files=[EXAMPLE_FIELD_DATA_CSV])

        expected_source_names = np.unique(self.df.Site)

        sources = self.ctx.getTopLevelSources()

        assert_equals(len(expected_source_names), len(list(iterable(sources))))
    def should_call_via_main(self):
        expt2 = self.ctx.insertProject("project2","project2",DateTime()).insertExperiment("purpose", DateTime())
        protocol2 = self.ctx.insertProtocol("protocol", "description")

        number_of_days = np.unique(np.asarray(self.df.index)).size

        args = ['--timezone=America/New_York',
                '--container={}'.format(str(expt2.getURI().toString())),
                '--protocol={}'.format(str(protocol2.getURI().toString())),
                EXAMPLE_FIELD_DATA_CSV,
                ]

        main(args, dsc=self.dsc)

        assert_equals(number_of_days, len(list(iterable(expt2.getEpochGroups()))))
    def should_add_individual_measurements(self):
        epoch_groups = self.epoch_groups_by_timestamp()

        for ((ts,site), group) in self.group_sites():
            epochs = self.collect_epochs_by_site(epoch_groups, ts)
            e = epochs[site]

            for i in xrange(len(group)):
                if group['Type'][i] == MEASUREMENT_TYPE_INDIVIDUAL:
                    for m in iterable(e.getMeasurements()):
                        if m.getName().startswith(u"{}_{}".format(group['Species'][i],i+1)):
                            csv_path = m.getLocalDataPath().get()
                            data = pd.read_csv(csv_path)
                            expected_measurements = group.iloc[i, FIRST_MEASUREMENT_COLUMN_NUMBER:].dropna()
                            assert(np.all(data[group['Counting'][i]] == expected_measurements))
예제 #19
0
def import_analog_signal(epoch, analog_signal, equipment_setup_root):

    analog_signal.labels = [u'time']
    analog_signal.sampling_rates = [analog_signal.sampling_rate]
    if 'channel_index' in analog_signal.annotations:
        channel_index = analog_signal.annotations['channel_index']
    else:
        channel_index = 'unknown'
        log_warning("Analog signal does not have a channel index. Using '{}.channels.{}' as measurement device.".format(equipment_setup_root, channel_index))


    if analog_signal.name is not None:
        name = analog_signal.name
    else:
        name = 'analog signal'
        log_warning("Analog signal does not have a name. Using '{}' as measurement and data name.".format(name))

    device = '{}.channels.{}'.format(equipment_setup_root, channel_index)
    insert_numeric_measurement(epoch,
                               set(iterable(epoch.getInputSources().keySet())),
                               {device},
                               name,
                               {name : analog_signal})
    def should_import_event_arrays(self):
        expt2 = self.ctx.insertProject("project2","project2",DateTime()).insertExperiment("purpose", DateTime())
        protocol2 = self.ctx.insertProtocol("protocol", "description")
        epoch_start = DateTime()
        
        epoch = expt2.insertEpoch(Maps.newHashMap(),
                                  Maps.newHashMap(),
                                  epoch_start,
                                  DateTime(),
                                  protocol2,
                                  to_map({}),
                                  to_map({}))

        segment = self.block.segments[0]

        event_ms = [10, 12]
        event_array = EventArray(times=np.array(event_ms) * pq.ms, labels=['event1', 'event2'])
        segment.eventarrays.append(event_array)

        try:
            import_timeline_annotations(epoch, segment, epoch_start)

            annotations = list(iterable(epoch.getUserTimelineAnnotations(self.ctx.getAuthenticatedUser())))

            assert_equals(2, len(annotations))

            event_starts = [a.getStart() for a in annotations]
            for ms in event_ms:
                found = False
                for s in event_starts:
                    if epoch_start.plusMillis(ms).equals(s):
                        found = True

                if not found:
                    assert_true(False, "event start time doesn't match")
        finally:
            segment.eventarrays.remove(event_array)
 def should_import_one_epoch_per_block(self):
     assert_equals(len(self.block.segments), len(set(iterable(self.epoch_group.getEpochs()))), "should import one epoch per segment")
    def should_add_one_epoch_group_for_each_day(self):
        number_of_days = np.unique(np.asarray(self.df.index)).size

        assert_equals(number_of_days, len(list(iterable(self.expt.getEpochGroups()))))
 def should_store_segment_index(self):
     for segment, epoch in zip(self.block.segments, iterable(self.epoch_group.getEpochs())):
         assert_equals(segment.index,
                       (epoch.getUserProperty(epoch.getDataContext().getAuthenticatedUser(), 'index')))
 def should_import_segment_annotations(self):
     for segment, epoch in zip(self.block.segments, iterable(self.epoch_group.getEpochs())):
         # Check protocol parameters
         for k, v in segment.annotations.iteritems():
             assert_equals(v, epoch.getProtocolParameter(k))
 def should_import_analog_segments_as_measurements(self):
     for segment, epoch in zip(self.block.segments, iterable(self.epoch_group.getEpochs())):
         check_measurements(segment, epoch)
def _import_file(context, container, protocol, file_name, header_row, timezone, first_measurement_column_number, date_column):

    df = read_csv(file_name, header_row=header_row, date_column=date_column)

    # Organize sources; this should be replaced with getSourceWithName() or a query
    sites = {}
    for src in iterable(context.getTopLevelSources()):
        sites[src.getLabel()] = src

    for plot in df.Site:
        if plot not in sites:
            logging.info("Adding site " + plot)
            sites[plot] = context.insertSource(plot, plot) #TODO better name?


    # Group EpochData by (index, Site), i.e. (Date, Site)
    epoch_data = df.groupby([df.index, 'Site'])
    groups = {}
    for grp in iterable(container.getEpochGroups()):
        d = grp.getStart()
        ts = pd.Timestamp(datetime.datetime(d.getYear(), d.getMonthOfYear(), d.getDayOfMonth(), d.getHourOfDay(), d.getMinuteOfHour(), d.getSecondOfMinute()))
        groups[ts] = grp

    for (group_index, group) in epoch_data:
        logging.info("Adding data for CSV group" + str(group_index))

        # Get the Source object corresponding to this site
        plot_name = group_index[1]
        plot = sites[plot_name]
        ts = group_index[0]
        start,end = _make_day_ends(ts, timezone)

        # One EpochGroup per day
        if ts not in groups:
            group_name = "{}-{}-{}".format(start.getYear(), start.getMonthOfYear(), start.getDayOfMonth())
            print("Adding EpochGroup {}".format(group_name))
            groups[ts] = container.insertEpochGroup(group_name, start, protocol, None, None) # No protocol, params, or deviceParams

        epoch_group = groups[ts]

        # Epoch by site
        epochs = {}
        for epoch in iterable(epoch_group.getEpochs()):
            src_map = to_dict(epoch.getInputSources())
            for src in src_map.values():
                epochs[src.getLabel()] = epoch

        if not plot_name in epochs:
            print("Inserting Epoch for measurements at: {}".format(plot_name))
            epochs[plot_name] = epoch_group.insertEpoch(start, end, protocol, None, None)

        epoch = epochs[plot_name]

        for i in xrange(len(group)):
            species = group['Species'][i]
            observer = group['Observer'][i]

            print("    {}".format(species))

            # Tag the Source with the species found there
            try:
                plot.addTag(species)
            except JavaException:
                logging.error("Exception adding tag. Retrying...")
                plot.addTag(species)
                logging.info("Successfully added tag on second try")

            measurements = group.iloc[i, first_measurement_column_number:].dropna()

            if group['Type'][i] == MEASUREMENT_TYPE_SITE:

                epoch.addInputSource(plot_name, plot)

                srcNames = Sets.newHashSet()
                srcNames.add(plot_name)

                insert_measurements(epoch, group, i, measurements, plot_name, species, srcNames, start, observer)

            elif group['Type'][i] == MEASUREMENT_TYPE_INDIVIDUAL:
                individual = plot.insertSource(epoch_group,
                                               start,
                                               end,
                                               protocol,
                                               Maps.newHashMap(),
                                               Optional.absent(),
                                               u"{} {}".format(species, i+1),
                                               u"{}-{}-{}-{}".format(species, plot_name, start.toString(), i+1),)

                epoch.addInputSource(individual.getLabel(), individual)
                srcNames = Sets.newHashSet()
                srcNames.add(individual.getLabel())
                insert_measurements(epoch, group, i, measurements, plot_name, species, srcNames, start, observer)
                epoch.addTag('individual')


    return 0
예제 #27
0
def _import_file(context, container, protocol, file_name, header_row, timezone,
                 first_measurement_column_number, date_column):

    df = read_csv(file_name, header_row=header_row, date_column=date_column)

    # Organize sources; this should be replaced with getSourceWithName() or a query
    sites = {}
    for src in iterable(context.getTopLevelSources()):
        sites[src.getLabel()] = src

    for plot in df.Site:
        if plot not in sites:
            logging.info("Adding site " + plot)
            sites[plot] = context.insertSource(plot, plot)  #TODO better name?

    # Group EpochData by (index, Site), i.e. (Date, Site)
    epoch_data = df.groupby([df.index, 'Site'])
    groups = {}
    for grp in iterable(container.getEpochGroups()):
        d = grp.getStart()
        ts = pd.Timestamp(
            datetime.datetime(d.getYear(), d.getMonthOfYear(),
                              d.getDayOfMonth(), d.getHourOfDay(),
                              d.getMinuteOfHour(), d.getSecondOfMinute()))
        groups[ts] = grp

    for (group_index, group) in epoch_data:
        logging.info("Adding data for CSV group" + str(group_index))

        # Get the Source object corresponding to this site
        plot_name = group_index[1]
        plot = sites[plot_name]
        ts = group_index[0]
        start, end = _make_day_ends(ts, timezone)

        # One EpochGroup per day
        if ts not in groups:
            group_name = "{}-{}-{}".format(start.getYear(),
                                           start.getMonthOfYear(),
                                           start.getDayOfMonth())
            print("Adding EpochGroup {}".format(group_name))
            groups[ts] = container.insertEpochGroup(
                group_name, start, protocol, None,
                None)  # No protocol, params, or deviceParams

        epoch_group = groups[ts]

        # Epoch by site
        epochs = {}
        for epoch in iterable(epoch_group.getEpochs()):
            src_map = to_dict(epoch.getInputSources())
            for src in src_map.values():
                epochs[src.getLabel()] = epoch

        if not plot_name in epochs:
            print("Inserting Epoch for measurements at: {}".format(plot_name))
            epochs[plot_name] = epoch_group.insertEpoch(
                start, end, protocol, None, None)

        epoch = epochs[plot_name]

        for i in xrange(len(group)):
            species = group['Species'][i]
            observer = group['Observer'][i]

            print("    {}".format(species))

            # Tag the Source with the species found there
            try:
                plot.addTag(species)
            except JavaException:
                logging.error("Exception adding tag. Retrying...")
                plot.addTag(species)
                logging.info("Successfully added tag on second try")

            measurements = group.iloc[
                i, first_measurement_column_number:].dropna()

            if group['Type'][i] == MEASUREMENT_TYPE_SITE:

                epoch.addInputSource(plot_name, plot)

                srcNames = Sets.newHashSet()
                srcNames.add(plot_name)

                insert_measurements(epoch, group, i, measurements, plot_name,
                                    species, srcNames, start, observer)

            elif group['Type'][i] == MEASUREMENT_TYPE_INDIVIDUAL:
                individual = plot.insertSource(
                    epoch_group,
                    start,
                    end,
                    protocol,
                    Maps.newHashMap(),
                    Optional.absent(),
                    u"{} {}".format(species, i + 1),
                    u"{}-{}-{}-{}".format(species, plot_name, start.toString(),
                                          i + 1),
                )

                epoch.addInputSource(individual.getLabel(), individual)
                srcNames = Sets.newHashSet()
                srcNames.add(individual.getLabel())
                insert_measurements(epoch, group, i, measurements, plot_name,
                                    species, srcNames, start, observer)
                epoch.addTag('individual')

    return 0