def should_import_epochs(self):
        expt2 = self.ctx.insertProject("project2","project2",DateTime()).insertExperiment("purpose", DateTime())
        protocol2 = self.ctx.insertProtocol("protocol", "description")
        epoch_start = DateTime()

        epoch = expt2.insertEpoch(Maps.newHashMap(),
                                  Maps.newHashMap(),
                                  epoch_start,
                                  DateTime(),
                                  protocol2,
                                  to_map({}),
                                  to_map({}))

        segment = self.block.segments[0]

        neoepoch = neo.core.epoch.Epoch(10 * pq.ms, 100 * pq.ms, "epoch1")
        segment.epochs.append(neoepoch)

        try:
            import_timeline_annotations(epoch, segment, epoch_start)

            annotations = list(iterable(epoch.getUserTimelineAnnotations(self.ctx.getAuthenticatedUser())))

            assert_equals(1, len(annotations))
            assert_equals(epoch_start.plusMillis(10).getMillis(), annotations[0].getStart().getMillis())
            assert_equals(epoch_start.plusMillis(10).plusMillis(100).getMillis(), annotations[0].getEnd().get().getMillis())
        finally:
            segment.epochs.remove(neoepoch)
    def should_import_events(self):
        expt2 = self.ctx.insertProject("project2","project2",DateTime()).insertExperiment("purpose", DateTime())
        protocol2 = self.ctx.insertProtocol("protocol", "description")
        epoch_start = DateTime()
        epoch = expt2.insertEpoch(Maps.newHashMap(),
                                  Maps.newHashMap(),
                                  epoch_start,
                                  DateTime(),
                                  protocol2,
                                  to_map(dict()),
                                  to_map(dict()))

        segment = self.block.segments[0]
        event_ms = 10
        event1 = Event(event_ms * pq.ms, "event1", name = "event1")

        segment.events.append(event1)

        try:
            import_timeline_annotations(epoch, segment, epoch_start)

            annotations = list(iterable(epoch.getUserTimelineAnnotations(self.ctx.getAuthenticatedUser())))

            assert(epoch_start.plusMillis(event_ms).equals(annotations[0].getStart()))
            assert_equals(1, len(annotations))
        finally:
            segment.events.remove(event1)
    def should_import_spike_trains(self):
        expt2 = self.ctx.insertProject("project2","project2",DateTime()).insertExperiment("purpose", DateTime())
        protocol2 = self.ctx.insertProtocol("protocol", "description")
        epoch_start = DateTime()

        epoch = expt2.insertEpoch(Maps.newHashMap(),
                                  Maps.newHashMap(),
                                  epoch_start,
                                  DateTime(),
                                  protocol2,
                                  to_map({}),
                                  to_map({}))

        segment = self.block.segments[0]

        times = [.1, .2, .3, .4]
        waveforms = np.random.rand(2,3,4) * pq.mV

        train_name = 'spike train 1'
        spike_train = SpikeTrain(times, name=train_name, t_stop=2.0 * pq.s, units="s", waveforms=waveforms)

        segment.spiketrains.append(spike_train)
        try:
            import_spiketrains(epoch, protocol2, segment)

            records = list(iterable(epoch.getAnalysisRecords()))

            assert_equals(1, len(records))

            ar = records[0]

            assert_equals(train_name, ar.getName())

            expected_params = {'t_start_ms': spike_train.t_start.rescale(pq.ms).item(),
                  't_stop_ms': spike_train.t_stop.rescale(pq.ms).item(),
                  'sampling_rate_hz': spike_train.sampling_rate.rescale(pq.Hz).item(),
                  'description': spike_train.description,
                  'file_origin': spike_train.file_origin}

            for (k,v) in expected_params.iteritems():
                actual = ar.getProtocolParameters().get(k)
                if actual:
                    assert_equals(v, actual)

            assert_equals(len(expected_params), ar.getProtocolParameters().size())

            data_map = ar.getDataElements()
            df = as_data_frame(data_map.get(spike_train.name))

            check_signal(spike_train, df['spike times'])
            check_signal(spike_train.waveforms, df['spike waveforms'])
        finally:
            segment.spiketrains.remove(spike_train)
Ejemplo n.º 4
0
def import_segment(epoch_group,
                   segment,
                   sources,
                   protocol=None,
                   equipment_setup_root=None):


    ctx = epoch_group.getDataContext()
    if protocol is None:
        protocol = ctx.getProtocol(NEO_PROTOCOL)
        if protocol is None:
            protocol = ctx.insertProtocol(NEO_PROTOCOL, NEO_PROTOCOL_TEXT)

    segment_duration = max(arr.t_stop for arr in segment.analogsignals)
    segment_duration.units = 'ms' #milliseconds
    start_time = DateTime(epoch_group.getStart())

    inputSources = Maps.newHashMap()
    outputSources = Maps.newHashMap()

    for s in sources:
        if s:
            s = asclass("Source", s)
        inputSources.put(s.getLabel(), s)

    device_parameters = dict(("{}.{}".format(equipment_setup_root, k), v) for (k,v) in segment.annotations.items())
    epoch = epoch_group.insertEpoch(inputSources,
                                    outputSources,
                                    start_time,
                                    start_time.plusMillis(int(segment_duration)),
                                    protocol,
                                    to_map(segment.annotations),
                                    to_map(device_parameters)
    )
    if segment.index is not None:
        epoch.addProperty('index', box_number(segment.index))

    if len(segment.analogsignalarrays) > 0:
        log_warning("Segment contains AnalogSignalArrays. Import of AnalogSignalArrays is currently not supported")


    for analog_signal in segment.analogsignals:
        import_analog_signal(epoch, analog_signal, equipment_setup_root)

    import_timeline_annotations(epoch, segment, start_time)

    if len(segment.spikes) > 0:
        logging.warning("Segment contains Spikes. Import of individual Spike data is not yet implemented (but SpikeTrains are).")

    import_spiketrains(epoch, protocol, segment)
Ejemplo n.º 5
0
def import_spiketrains(epoch, protocol, segment):
    for (i, spike_train) in enumerate(segment.spiketrains):
        params = {'t_start_ms': spike_train.t_start.rescale(pq.ms).item(),
                  't_stop_ms': spike_train.t_stop.rescale(pq.ms).item(),
                  'sampling_rate_hz': spike_train.sampling_rate.rescale(pq.Hz).item(),
                  'description': spike_train.description,
                  'file_origin': spike_train.file_origin}

        if spike_train.name:
            name = spike_train.name
        else:
            name = "spike train {}".format(i + 1)

        inputs = Maps.newHashMap()
        for m in iterable(epoch.getMeasurements()):
            inputs.put(m.getName(), m)

        ar = epoch.addAnalysisRecord(name,
                                     inputs,
                                     protocol,
                                     to_map(params))

        #
        spike_train.labels = ['spike time' for i in spike_train.shape]
        spike_train.sampling_rates = [spike_train.sampling_rate for i in spike_train.shape]

        spike_train.waveforms.labels = ['channel index', 'time', 'spike']
        spike_train.waveforms.sampling_rates = [0, spike_train.sampling_rate, 0] * pq.Hz

        insert_numeric_analysis_artifact(ar,
                                         name,
                                         {'spike times': spike_train,
                                          'spike waveforms': spike_train.waveforms})
Ejemplo n.º 6
0
def to_dict_should_convert_flat_map():
    m = Maps.newHashMap()
    m.put('key1', 'value1')
    m.put('key2', autoclass("java.lang.Integer")(2))
    m.put('key3', autoclass("java.lang.Double")(2.5))

    d = to_dict(m)

    check_dict(d, m)
Ejemplo n.º 7
0
def to_map(d):
    result = Maps.newHashMap()
    for (k, v) in d.iteritems():
        if not isinstance(k, basestring):
            k = unicode(k)
        if isinstance(v, collections.Mapping):
            nested_value = to_map(v)
            result.put(k, nested_value)
        else:
            result.put(k, box_number(v))

    return result
    def should_import_event_arrays(self):
        expt2 = self.ctx.insertProject("project2","project2",DateTime()).insertExperiment("purpose", DateTime())
        protocol2 = self.ctx.insertProtocol("protocol", "description")
        epoch_start = DateTime()
        
        epoch = expt2.insertEpoch(Maps.newHashMap(),
                                  Maps.newHashMap(),
                                  epoch_start,
                                  DateTime(),
                                  protocol2,
                                  to_map({}),
                                  to_map({}))

        segment = self.block.segments[0]

        event_ms = [10, 12]
        event_array = EventArray(times=np.array(event_ms) * pq.ms, labels=['event1', 'event2'])
        segment.eventarrays.append(event_array)

        try:
            import_timeline_annotations(epoch, segment, epoch_start)

            annotations = list(iterable(epoch.getUserTimelineAnnotations(self.ctx.getAuthenticatedUser())))

            assert_equals(2, len(annotations))

            event_starts = [a.getStart() for a in annotations]
            for ms in event_ms:
                found = False
                for s in event_starts:
                    if epoch_start.plusMillis(ms).equals(s):
                        found = True

                if not found:
                    assert_true(False, "event start time doesn't match")
        finally:
            segment.eventarrays.remove(event_array)
def _import_file(context, container, protocol, file_name, header_row, timezone, first_measurement_column_number, date_column):

    df = read_csv(file_name, header_row=header_row, date_column=date_column)

    # Organize sources; this should be replaced with getSourceWithName() or a query
    sites = {}
    for src in iterable(context.getTopLevelSources()):
        sites[src.getLabel()] = src

    for plot in df.Site:
        if plot not in sites:
            logging.info("Adding site " + plot)
            sites[plot] = context.insertSource(plot, plot) #TODO better name?


    # Group EpochData by (index, Site), i.e. (Date, Site)
    epoch_data = df.groupby([df.index, 'Site'])
    groups = {}
    for grp in iterable(container.getEpochGroups()):
        d = grp.getStart()
        ts = pd.Timestamp(datetime.datetime(d.getYear(), d.getMonthOfYear(), d.getDayOfMonth(), d.getHourOfDay(), d.getMinuteOfHour(), d.getSecondOfMinute()))
        groups[ts] = grp

    for (group_index, group) in epoch_data:
        logging.info("Adding data for CSV group" + str(group_index))

        # Get the Source object corresponding to this site
        plot_name = group_index[1]
        plot = sites[plot_name]
        ts = group_index[0]
        start,end = _make_day_ends(ts, timezone)

        # One EpochGroup per day
        if ts not in groups:
            group_name = "{}-{}-{}".format(start.getYear(), start.getMonthOfYear(), start.getDayOfMonth())
            print("Adding EpochGroup {}".format(group_name))
            groups[ts] = container.insertEpochGroup(group_name, start, protocol, None, None) # No protocol, params, or deviceParams

        epoch_group = groups[ts]

        # Epoch by site
        epochs = {}
        for epoch in iterable(epoch_group.getEpochs()):
            src_map = to_dict(epoch.getInputSources())
            for src in src_map.values():
                epochs[src.getLabel()] = epoch

        if not plot_name in epochs:
            print("Inserting Epoch for measurements at: {}".format(plot_name))
            epochs[plot_name] = epoch_group.insertEpoch(start, end, protocol, None, None)

        epoch = epochs[plot_name]

        for i in xrange(len(group)):
            species = group['Species'][i]
            observer = group['Observer'][i]

            print("    {}".format(species))

            # Tag the Source with the species found there
            try:
                plot.addTag(species)
            except JavaException:
                logging.error("Exception adding tag. Retrying...")
                plot.addTag(species)
                logging.info("Successfully added tag on second try")

            measurements = group.iloc[i, first_measurement_column_number:].dropna()

            if group['Type'][i] == MEASUREMENT_TYPE_SITE:

                epoch.addInputSource(plot_name, plot)

                srcNames = Sets.newHashSet()
                srcNames.add(plot_name)

                insert_measurements(epoch, group, i, measurements, plot_name, species, srcNames, start, observer)

            elif group['Type'][i] == MEASUREMENT_TYPE_INDIVIDUAL:
                individual = plot.insertSource(epoch_group,
                                               start,
                                               end,
                                               protocol,
                                               Maps.newHashMap(),
                                               Optional.absent(),
                                               u"{} {}".format(species, i+1),
                                               u"{}-{}-{}-{}".format(species, plot_name, start.toString(), i+1),)

                epoch.addInputSource(individual.getLabel(), individual)
                srcNames = Sets.newHashSet()
                srcNames.add(individual.getLabel())
                insert_measurements(epoch, group, i, measurements, plot_name, species, srcNames, start, observer)
                epoch.addTag('individual')


    return 0
Ejemplo n.º 10
0
    def import_ncs(self,
               container,
               sources,
               label,
               ncs_files=list(),
               event_file=None,
               start_id=None,
               end_id=None,
               include_interepoch=True):

        self.sources = sources
        self.source_map = Maps.newHashMap()
        [self.source_map.put(source.getLabel(), source) for source in sources]

        # TODO figure out what should go into device params
        # device parameters
        self.device_parameters = {}

        epochs = {}
        epoch_boundaries = None
        group = None
        for f in ncs_files:
            logging.info("Reading %s", f)
            with open(f, 'rb') as ncs_file:
                reader = BinaryReader(ncs_file, NEURALYNX_ENDIAN)
                header = parse_header(reader)
                device_name = header["AcqEntName"]
                csc_data = CscData(header, ncs_blocks(reader, header))

                open_time = header["Time Opened"]

                # We assume all times are datetime in given local zone
                start = self.timeFormatter.parseDateTime(str(open_time)).toDateTime(self.timezone)
                logging.info("Start done")

                if group is None:
                    logging.info("Inserting top-level EpochGroup: %s", label)
                    group = asclass("us.physion.ovation.domain.mixin.EpochGroupContainer", container).insertEpochGroup(label,
                        start,
                        self.protocol,
                        to_map(self.protocol_parameters),
                        to_map(self.device_parameters)
                    )

                if event_file is None or start_id is None:
                    logging.info("No event file present")
                    if not None in epochs:
                        epochs[None] = self.insert_epoch(group, open_time, None, False)

                    self.append_response(epochs[None], device_name, csc_data, open_time, None)

                else:
                    logging.info("Event file present")
                    if epoch_boundaries is None:
                        logging.info("Determining Epoch boundaries")
                        with open(event_file, 'rb') as ef:
                            reader = BinaryReader(ef, NEURALYNX_ENDIAN)
                            header = parse_header(reader)
                            epoch_boundaries = list(EpochBoundaries(header,
                                nev_events(reader, header),
                                start_id,
                                end_id,
                                include_interepoch).boundaries)

                    current_epoch = None
                    for epoch_boundary in epoch_boundaries:
                        if not epoch_boundary in epochs:
                            epochs[epoch_boundary] = self.insert_epoch(group,
                                epoch_boundary.start,
                                epoch_boundary.end,
                                epoch_boundary.interepoch)

                        epoch = epochs[epoch_boundary]

                        self.append_response(epoch, device_name, csc_data, epoch_boundary.start, epoch_boundary.end)

                        current_epoch = epoch
Ejemplo n.º 11
0
def _import_file(context, container, protocol, file_name, header_row, timezone,
                 first_measurement_column_number, date_column):

    df = read_csv(file_name, header_row=header_row, date_column=date_column)

    # Organize sources; this should be replaced with getSourceWithName() or a query
    sites = {}
    for src in iterable(context.getTopLevelSources()):
        sites[src.getLabel()] = src

    for plot in df.Site:
        if plot not in sites:
            logging.info("Adding site " + plot)
            sites[plot] = context.insertSource(plot, plot)  #TODO better name?

    # Group EpochData by (index, Site), i.e. (Date, Site)
    epoch_data = df.groupby([df.index, 'Site'])
    groups = {}
    for grp in iterable(container.getEpochGroups()):
        d = grp.getStart()
        ts = pd.Timestamp(
            datetime.datetime(d.getYear(), d.getMonthOfYear(),
                              d.getDayOfMonth(), d.getHourOfDay(),
                              d.getMinuteOfHour(), d.getSecondOfMinute()))
        groups[ts] = grp

    for (group_index, group) in epoch_data:
        logging.info("Adding data for CSV group" + str(group_index))

        # Get the Source object corresponding to this site
        plot_name = group_index[1]
        plot = sites[plot_name]
        ts = group_index[0]
        start, end = _make_day_ends(ts, timezone)

        # One EpochGroup per day
        if ts not in groups:
            group_name = "{}-{}-{}".format(start.getYear(),
                                           start.getMonthOfYear(),
                                           start.getDayOfMonth())
            print("Adding EpochGroup {}".format(group_name))
            groups[ts] = container.insertEpochGroup(
                group_name, start, protocol, None,
                None)  # No protocol, params, or deviceParams

        epoch_group = groups[ts]

        # Epoch by site
        epochs = {}
        for epoch in iterable(epoch_group.getEpochs()):
            src_map = to_dict(epoch.getInputSources())
            for src in src_map.values():
                epochs[src.getLabel()] = epoch

        if not plot_name in epochs:
            print("Inserting Epoch for measurements at: {}".format(plot_name))
            epochs[plot_name] = epoch_group.insertEpoch(
                start, end, protocol, None, None)

        epoch = epochs[plot_name]

        for i in xrange(len(group)):
            species = group['Species'][i]
            observer = group['Observer'][i]

            print("    {}".format(species))

            # Tag the Source with the species found there
            try:
                plot.addTag(species)
            except JavaException:
                logging.error("Exception adding tag. Retrying...")
                plot.addTag(species)
                logging.info("Successfully added tag on second try")

            measurements = group.iloc[
                i, first_measurement_column_number:].dropna()

            if group['Type'][i] == MEASUREMENT_TYPE_SITE:

                epoch.addInputSource(plot_name, plot)

                srcNames = Sets.newHashSet()
                srcNames.add(plot_name)

                insert_measurements(epoch, group, i, measurements, plot_name,
                                    species, srcNames, start, observer)

            elif group['Type'][i] == MEASUREMENT_TYPE_INDIVIDUAL:
                individual = plot.insertSource(
                    epoch_group,
                    start,
                    end,
                    protocol,
                    Maps.newHashMap(),
                    Optional.absent(),
                    u"{} {}".format(species, i + 1),
                    u"{}-{}-{}-{}".format(species, plot_name, start.toString(),
                                          i + 1),
                )

                epoch.addInputSource(individual.getLabel(), individual)
                srcNames = Sets.newHashSet()
                srcNames.add(individual.getLabel())
                insert_measurements(epoch, group, i, measurements, plot_name,
                                    species, srcNames, start, observer)
                epoch.addTag('individual')

    return 0