Example #1
0
    def should_round_trip_multi_element_data_frame(self):
        arr1 = np.random.randn(10,10) * pq.s
        arr1.labels = [u'volts', u'other']
        arr1.name = u'name'
        arr1.sampling_rates = [1.0 * pq.Hz, 10.0 * pq.Hz]

        arr2 = np.random.randn(10,10) * pq.V
        arr2.labels = [u'volts', u'other']
        arr2.name = u'name'
        arr2.sampling_rates = [1.0 * pq.Hz, 10.0 * pq.Hz]

        epoch = self.expt.insertEpoch(DateTime(), DateTime(), self.protocol, None, None)

        ar = epoch.addAnalysisRecord("record", to_map({}), self.protocol, to_map({}))

        result_name1 = 'result'
        result_name2 = 'other-result'
        expected = {result_name1: arr1,
                    result_name2: arr2}
        record_name = "record-name"
        artifact = insert_numeric_analysis_artifact(ar, record_name, expected)

        assert artifact is not None

        sleep(0.5)

        actual = as_data_frame(ar.getOutputs().get(record_name))

        assert_data_frame_equals(expected, actual)
    def should_import_epochs(self):
        expt2 = self.ctx.insertProject("project2","project2",DateTime()).insertExperiment("purpose", DateTime())
        protocol2 = self.ctx.insertProtocol("protocol", "description")
        epoch_start = DateTime()

        epoch = expt2.insertEpoch(Maps.newHashMap(),
                                  Maps.newHashMap(),
                                  epoch_start,
                                  DateTime(),
                                  protocol2,
                                  to_map({}),
                                  to_map({}))

        segment = self.block.segments[0]

        neoepoch = neo.core.epoch.Epoch(10 * pq.ms, 100 * pq.ms, "epoch1")
        segment.epochs.append(neoepoch)

        try:
            import_timeline_annotations(epoch, segment, epoch_start)

            annotations = list(iterable(epoch.getUserTimelineAnnotations(self.ctx.getAuthenticatedUser())))

            assert_equals(1, len(annotations))
            assert_equals(epoch_start.plusMillis(10).getMillis(), annotations[0].getStart().getMillis())
            assert_equals(epoch_start.plusMillis(10).plusMillis(100).getMillis(), annotations[0].getEnd().get().getMillis())
        finally:
            segment.epochs.remove(neoepoch)
    def should_import_events(self):
        expt2 = self.ctx.insertProject("project2","project2",DateTime()).insertExperiment("purpose", DateTime())
        protocol2 = self.ctx.insertProtocol("protocol", "description")
        epoch_start = DateTime()
        epoch = expt2.insertEpoch(Maps.newHashMap(),
                                  Maps.newHashMap(),
                                  epoch_start,
                                  DateTime(),
                                  protocol2,
                                  to_map(dict()),
                                  to_map(dict()))

        segment = self.block.segments[0]
        event_ms = 10
        event1 = Event(event_ms * pq.ms, "event1", name = "event1")

        segment.events.append(event1)

        try:
            import_timeline_annotations(epoch, segment, epoch_start)

            annotations = list(iterable(epoch.getUserTimelineAnnotations(self.ctx.getAuthenticatedUser())))

            assert(epoch_start.plusMillis(event_ms).equals(annotations[0].getStart()))
            assert_equals(1, len(annotations))
        finally:
            segment.events.remove(event1)
    def should_import_spike_trains(self):
        expt2 = self.ctx.insertProject("project2","project2",DateTime()).insertExperiment("purpose", DateTime())
        protocol2 = self.ctx.insertProtocol("protocol", "description")
        epoch_start = DateTime()

        epoch = expt2.insertEpoch(Maps.newHashMap(),
                                  Maps.newHashMap(),
                                  epoch_start,
                                  DateTime(),
                                  protocol2,
                                  to_map({}),
                                  to_map({}))

        segment = self.block.segments[0]

        times = [.1, .2, .3, .4]
        waveforms = np.random.rand(2,3,4) * pq.mV

        train_name = 'spike train 1'
        spike_train = SpikeTrain(times, name=train_name, t_stop=2.0 * pq.s, units="s", waveforms=waveforms)

        segment.spiketrains.append(spike_train)
        try:
            import_spiketrains(epoch, protocol2, segment)

            records = list(iterable(epoch.getAnalysisRecords()))

            assert_equals(1, len(records))

            ar = records[0]

            assert_equals(train_name, ar.getName())

            expected_params = {'t_start_ms': spike_train.t_start.rescale(pq.ms).item(),
                  't_stop_ms': spike_train.t_stop.rescale(pq.ms).item(),
                  'sampling_rate_hz': spike_train.sampling_rate.rescale(pq.Hz).item(),
                  'description': spike_train.description,
                  'file_origin': spike_train.file_origin}

            for (k,v) in expected_params.iteritems():
                actual = ar.getProtocolParameters().get(k)
                if actual:
                    assert_equals(v, actual)

            assert_equals(len(expected_params), ar.getProtocolParameters().size())

            data_map = ar.getDataElements()
            df = as_data_frame(data_map.get(spike_train.name))

            check_signal(spike_train, df['spike times'])
            check_signal(spike_train.waveforms, df['spike waveforms'])
        finally:
            segment.spiketrains.remove(spike_train)
    def insert_epoch(self, group, start, end, interepoch):
        logging.info("Importing Epoch %s : %s", start, end)

        epoch = group.insertEpoch(
                self.source_map,
                None,
                self.timeFormatter.parseDateTime(str(start)).toDateTime(self.timezone),
                self.timeFormatter.parseDateTime(str(end)).toDateTime(self.timezone) if end else end,
                self.protocol, # self.protocol_id if not interepoch else "%s.inter-epoch" % self.protocol_id,
                to_map(self.protocol_parameters),
                to_map(self.device_parameters)
            )
        return epoch
Example #6
0
def import_segment(epoch_group,
                   segment,
                   sources,
                   protocol=None,
                   equipment_setup_root=None):


    ctx = epoch_group.getDataContext()
    if protocol is None:
        protocol = ctx.getProtocol(NEO_PROTOCOL)
        if protocol is None:
            protocol = ctx.insertProtocol(NEO_PROTOCOL, NEO_PROTOCOL_TEXT)

    segment_duration = max(arr.t_stop for arr in segment.analogsignals)
    segment_duration.units = 'ms' #milliseconds
    start_time = DateTime(epoch_group.getStart())

    inputSources = Maps.newHashMap()
    outputSources = Maps.newHashMap()

    for s in sources:
        if s:
            s = asclass("Source", s)
        inputSources.put(s.getLabel(), s)

    device_parameters = dict(("{}.{}".format(equipment_setup_root, k), v) for (k,v) in segment.annotations.items())
    epoch = epoch_group.insertEpoch(inputSources,
                                    outputSources,
                                    start_time,
                                    start_time.plusMillis(int(segment_duration)),
                                    protocol,
                                    to_map(segment.annotations),
                                    to_map(device_parameters)
    )
    if segment.index is not None:
        epoch.addProperty('index', box_number(segment.index))

    if len(segment.analogsignalarrays) > 0:
        log_warning("Segment contains AnalogSignalArrays. Import of AnalogSignalArrays is currently not supported")


    for analog_signal in segment.analogsignals:
        import_analog_signal(epoch, analog_signal, equipment_setup_root)

    import_timeline_annotations(epoch, segment, start_time)

    if len(segment.spikes) > 0:
        logging.warning("Segment contains Spikes. Import of individual Spike data is not yet implemented (but SpikeTrains are).")

    import_spiketrains(epoch, protocol, segment)
Example #7
0
def import_spiketrains(epoch, protocol, segment):
    for (i, spike_train) in enumerate(segment.spiketrains):
        params = {'t_start_ms': spike_train.t_start.rescale(pq.ms).item(),
                  't_stop_ms': spike_train.t_stop.rescale(pq.ms).item(),
                  'sampling_rate_hz': spike_train.sampling_rate.rescale(pq.Hz).item(),
                  'description': spike_train.description,
                  'file_origin': spike_train.file_origin}

        if spike_train.name:
            name = spike_train.name
        else:
            name = "spike train {}".format(i + 1)

        inputs = Maps.newHashMap()
        for m in iterable(epoch.getMeasurements()):
            inputs.put(m.getName(), m)

        ar = epoch.addAnalysisRecord(name,
                                     inputs,
                                     protocol,
                                     to_map(params))

        #
        spike_train.labels = ['spike time' for i in spike_train.shape]
        spike_train.sampling_rates = [spike_train.sampling_rate for i in spike_train.shape]

        spike_train.waveforms.labels = ['channel index', 'time', 'spike']
        spike_train.waveforms.sampling_rates = [0, spike_train.sampling_rate, 0] * pq.Hz

        insert_numeric_analysis_artifact(ar,
                                         name,
                                         {'spike times': spike_train,
                                          'spike waveforms': spike_train.waveforms})
Example #8
0
def to_map_should_convert_nested_dict():
    d = {'key1': 'value1',
         'nested': {'key2': 2,
                    3: 'value3',
                    4: 5}}

    m = to_map(d)

    check_dict(d, m)   
Example #9
0
def to_map_should_convert_nested_dict():
    d = {'key1': 'value1',
         'nested': {'key2': 2,
                    3: 'value3',
                    4: 5}}

    m = to_map(d)

    check_dict(d, m)   
Example #10
0
def to_map_should_convert_flat_dict():
    d = {'key1': 'value1',
         'key2': 2,
         'key3': 2.5,
         'key4': 'value4',
         3: 'value3',
         4: 5}

    m = to_map(d)

    check_dict(d, m)
    def should_round_trip_pandas_data_frame_artifact(self):
        expected = pd.DataFrame({
            'ColA': np.random.randn(10),
            'ColB': np.random.randn(10)
        })

        project = list(self.expt.getProjects())[0]
        record = project.addAnalysisRecord('name', to_map({}), None,
                                           to_map({}))

        m = add_tabular_analysis_artifact(record, 'tabular', expected)

        while (m.getDataContext().getFileService().hasPendingUploads()):
            pass

        m = asclass('Measurement', m.refresh())

        actual = as_data_frame(m, index_col=0)

        assert_frame_equal(expected, actual)
Example #12
0
def to_map_should_convert_flat_dict():
    d = {'key1': 'value1',
         'key2': 2,
         'key3': 2.5,
         'key4': 'value4',
         3: 'value3',
         4: 5}

    m = to_map(d)

    check_dict(d, m)
    def should_import_event_arrays(self):
        expt2 = self.ctx.insertProject("project2","project2",DateTime()).insertExperiment("purpose", DateTime())
        protocol2 = self.ctx.insertProtocol("protocol", "description")
        epoch_start = DateTime()
        
        epoch = expt2.insertEpoch(Maps.newHashMap(),
                                  Maps.newHashMap(),
                                  epoch_start,
                                  DateTime(),
                                  protocol2,
                                  to_map({}),
                                  to_map({}))

        segment = self.block.segments[0]

        event_ms = [10, 12]
        event_array = EventArray(times=np.array(event_ms) * pq.ms, labels=['event1', 'event2'])
        segment.eventarrays.append(event_array)

        try:
            import_timeline_annotations(epoch, segment, epoch_start)

            annotations = list(iterable(epoch.getUserTimelineAnnotations(self.ctx.getAuthenticatedUser())))

            assert_equals(2, len(annotations))

            event_starts = [a.getStart() for a in annotations]
            for ms in event_ms:
                found = False
                for s in event_starts:
                    if epoch_start.plusMillis(ms).equals(s):
                        found = True

                if not found:
                    assert_true(False, "event start time doesn't match")
        finally:
            segment.eventarrays.remove(event_array)
    def import_ncs(self,
               container,
               sources,
               label,
               ncs_files=list(),
               event_file=None,
               start_id=None,
               end_id=None,
               include_interepoch=True):

        self.sources = sources
        self.source_map = Maps.newHashMap()
        [self.source_map.put(source.getLabel(), source) for source in sources]

        # TODO figure out what should go into device params
        # device parameters
        self.device_parameters = {}

        epochs = {}
        epoch_boundaries = None
        group = None
        for f in ncs_files:
            logging.info("Reading %s", f)
            with open(f, 'rb') as ncs_file:
                reader = BinaryReader(ncs_file, NEURALYNX_ENDIAN)
                header = parse_header(reader)
                device_name = header["AcqEntName"]
                csc_data = CscData(header, ncs_blocks(reader, header))

                open_time = header["Time Opened"]

                # We assume all times are datetime in given local zone
                start = self.timeFormatter.parseDateTime(str(open_time)).toDateTime(self.timezone)
                logging.info("Start done")

                if group is None:
                    logging.info("Inserting top-level EpochGroup: %s", label)
                    group = asclass("us.physion.ovation.domain.mixin.EpochGroupContainer", container).insertEpochGroup(label,
                        start,
                        self.protocol,
                        to_map(self.protocol_parameters),
                        to_map(self.device_parameters)
                    )

                if event_file is None or start_id is None:
                    logging.info("No event file present")
                    if not None in epochs:
                        epochs[None] = self.insert_epoch(group, open_time, None, False)

                    self.append_response(epochs[None], device_name, csc_data, open_time, None)

                else:
                    logging.info("Event file present")
                    if epoch_boundaries is None:
                        logging.info("Determining Epoch boundaries")
                        with open(event_file, 'rb') as ef:
                            reader = BinaryReader(ef, NEURALYNX_ENDIAN)
                            header = parse_header(reader)
                            epoch_boundaries = list(EpochBoundaries(header,
                                nev_events(reader, header),
                                start_id,
                                end_id,
                                include_interepoch).boundaries)

                    current_epoch = None
                    for epoch_boundary in epoch_boundaries:
                        if not epoch_boundary in epochs:
                            epochs[epoch_boundary] = self.insert_epoch(group,
                                epoch_boundary.start,
                                epoch_boundary.end,
                                epoch_boundary.interepoch)

                        epoch = epochs[epoch_boundary]

                        self.append_response(epoch, device_name, csc_data, epoch_boundary.start, epoch_boundary.end)

                        current_epoch = epoch
Example #15
0
def import_block(epoch_group_container,
                 block,
                 equipment_setup_root,
                 sources,
                 protocol=None,
                 protocol_parameters={},
                 device_parameters={},
                 group_label=None,
                 file_mtime=None):
    """Import a `Neo <http://neuralensemble.org/neo/>`_ `Block` as a single Ovation `EpochGroup`


    Parameters
    ----------
    block : neo.Block
        `neo.Block` to import
    epoch_group_container : ovation.EpochGroup or ovation.Experiment
        Container for the inserted `ovation.EpochGroup`
    equipment_setup : ovation.EquipmentSetup
        Experiment `EquipmentSetup` for the data contained in the file to be imported
    equipment_setup_root : str
        Root path for equipment setup describing equipment that recorded the data to be imported
    source : iterable of us.physion.ovation.domain.Source
        Experimental `Subjects` for data contained in `block`
    protocol : ovation.Protocol, optional
        Ovation `Protocol` for the EpochGroup (if present)
    protocol_parameters : Mapping, optional
    device_parameters : Mapping, optional
    group_label : string, optional
        EpochGroup label. If `None`, and `block.name` is not `None`, `block.name` will be used
        for the EpochGroup label.


    Returns
    -------
    The inserted `ovation.EpochGroup`

    """

    if group_label is None:
        if not (block.name is None):
            group_label = block.name
        else:
            group_label = "Neo importer"


    merged_protocol_parameters = protocol_parameters.copy()
    merged_protocol_parameters.update(block.annotations)

    #Convert a datetime.datetime to a DateTime
    if block.rec_datetime is not None:
        start_time = DateTime(*(block.rec_datetime.timetuple()[:7]))
    else:
        log_warning("Block does not contain a recording date/time. Using file modification time instead.")
        start_time = DateTime(*(datetime.fromtimestamp(file_mtime).timetuple()[:7]))


    epochGroup = asclass("us.physion.ovation.domain.mixin.EpochGroupContainer", epoch_group_container).insertEpochGroup(group_label,
                                                        start_time,
                                                        protocol,
                                                        to_map(merged_protocol_parameters),
                                                        to_map(device_parameters)
    )

    if len(block.recordingchannelgroups) > 0:
        log_warning("Block contains RecordingChannelGroups. Import of RecordingChannelGroups is currently not supported.")

    log_info("Importing segments from {}".format(block.file_origin))
    for seg in block.segments:
        log_info("Importing segment {} from {}".format(str(seg.index), block.file_origin))
        import_segment(epochGroup,
                       seg,
                       sources,
                       protocol=protocol,
                       equipment_setup_root=equipment_setup_root)

    log_info("Waiting for uploads to complete...")
    fs = epoch_group_container.getDataContext().getFileService()
    while(fs.hasPendingUploads()):
        fs.waitForPendingUploads(10, TimeUnit.SECONDS)

    return epochGroup