コード例 #1
0
def import_csv(context,
               container_uri=None,
               protocol_uri=None,
               files=[],
               timezone=None,
               csv_header_row=CSV_HEADER_ROW,
               first_measurement_column=FIRST_MEASUREMENT_COLUMN_NUMBER,
               date_column=DATE_COLUMN_NUMBER):

    assert(not protocol_uri is None)
    assert(not container_uri is None)

    container = asclass("us.physion.ovation.domain.mixin.EpochGroupContainer", context.getObjectWithURI(container_uri))
    protocol = asclass("Protocol", context.getObjectWithURI(protocol_uri))

    if timezone is None:
        timezone = DateTimeZone.getDefault().getID()

    for f in files:
        _import_file(context,
                     container,
                     protocol,
                     f,
                     csv_header_row,
                     timezone,
                     first_measurement_column,
                     date_column)
コード例 #2
0
    def import_wrapper(data_context,
                  container=None,
                  protocol=None,
                  files=None,
                  sources=None,
                  equipment_setup_root=None,
                  **args):

        container = data_context.getObjectWithURI(container)
        protocol_entity = data_context.getObjectWithURI(protocol)
        if protocol_entity:
            protocol = asclass("Protocol", protocol_entity)
        else:
            protocol = None

        sources = [data_context.getObjectWithURI(source) for source in sources]

        for file in files:
            import_file(file,
                        container,
                        equipment_setup_root,
                        sources,
                        protocol=protocol)

        return 0
コード例 #3
0
 def should_cast_experiment_from_procedure_element_via_explicit_cast(self):
     epoch = self.expt.insertEpoch(DateTime(), DateTime(), self.protocol,
                                   None, None)
     assert_equals(
         self.expt.getUuid().toString(),
         asclass("us.physion.ovation.domain.Experiment",
                 epoch.getParent()).getUuid().toString())
コード例 #4
0
def import_csv(context,
               container_uri=None,
               protocol_uri=None,
               files=[],
               timezone=None,
               csv_header_row=CSV_HEADER_ROW,
               first_measurement_column=FIRST_MEASUREMENT_COLUMN_NUMBER,
               date_column=DATE_COLUMN_NUMBER):

    assert (not protocol_uri is None)
    assert (not container_uri is None)

    container = asclass("us.physion.ovation.domain.mixin.EpochGroupContainer",
                        context.getObjectWithURI(container_uri))
    protocol = asclass("Protocol", context.getObjectWithURI(protocol_uri))

    if timezone is None:
        timezone = DateTimeZone.getDefault().getID()

    for f in files:
        _import_file(context, container, protocol, f, csv_header_row, timezone,
                     first_measurement_column, date_column)
コード例 #5
0
def import_segment(epoch_group,
                   segment,
                   sources,
                   protocol=None,
                   equipment_setup_root=None):


    ctx = epoch_group.getDataContext()
    if protocol is None:
        protocol = ctx.getProtocol(NEO_PROTOCOL)
        if protocol is None:
            protocol = ctx.insertProtocol(NEO_PROTOCOL, NEO_PROTOCOL_TEXT)

    segment_duration = max(arr.t_stop for arr in segment.analogsignals)
    segment_duration.units = 'ms' #milliseconds
    start_time = DateTime(epoch_group.getStart())

    inputSources = Maps.newHashMap()
    outputSources = Maps.newHashMap()

    for s in sources:
        if s:
            s = asclass("Source", s)
        inputSources.put(s.getLabel(), s)

    device_parameters = dict(("{}.{}".format(equipment_setup_root, k), v) for (k,v) in segment.annotations.items())
    epoch = epoch_group.insertEpoch(inputSources,
                                    outputSources,
                                    start_time,
                                    start_time.plusMillis(int(segment_duration)),
                                    protocol,
                                    to_map(segment.annotations),
                                    to_map(device_parameters)
    )
    if segment.index is not None:
        epoch.addProperty('index', box_number(segment.index))

    if len(segment.analogsignalarrays) > 0:
        log_warning("Segment contains AnalogSignalArrays. Import of AnalogSignalArrays is currently not supported")


    for analog_signal in segment.analogsignals:
        import_analog_signal(epoch, analog_signal, equipment_setup_root)

    import_timeline_annotations(epoch, segment, start_time)

    if len(segment.spikes) > 0:
        logging.warning("Segment contains Spikes. Import of individual Spike data is not yet implemented (but SpikeTrains are).")

    import_spiketrains(epoch, protocol, segment)
コード例 #6
0
    def should_give_data_path(self):

        df = pd.DataFrame({'ColA' : np.random.randn(10),
                                 'ColB' : np.random.randn(10)})

        epoch = self.expt.insertEpoch(DateTime(), DateTime(), self.protocol, None, None)

        m = insert_tabular_measurement(epoch, set(), set(), 'tabular', df)

        while(m.getDataContext().getFileService().hasPendingUploads()):
            pass

        m = asclass('Measurement', m.refresh())

        expected = m.getLocalDataPath().get()
        actual = datapath(m)

        assert_equals(expected, actual)
コード例 #7
0
    def should_round_trip_pandas_data_frame_artifact(self):
        expected = pd.DataFrame({
            'ColA': np.random.randn(10),
            'ColB': np.random.randn(10)
        })

        project = list(self.expt.getProjects())[0]
        record = project.addAnalysisRecord('name', to_map({}), None,
                                           to_map({}))

        m = add_tabular_analysis_artifact(record, 'tabular', expected)

        while (m.getDataContext().getFileService().hasPendingUploads()):
            pass

        m = asclass('Measurement', m.refresh())

        actual = as_data_frame(m, index_col=0)

        assert_frame_equal(expected, actual)
コード例 #8
0
ファイル: test_csv.py プロジェクト: c-wilson/ovation-python
    def should_round_trip_data_frame(self):
        epoch = self.expt.insertEpoch(DateTime(), DateTime(), self.protocol, None, None)

        df = pd.DataFrame({'col1': np.random.rand(10), 'col2': np.random.rand(10)})

        source_name = 'source'
        s = epoch.getDataContext().insertSource('source-name', 'source-id')
        epoch.addInputSource(source_name, s)

        name = 'data_name'
        m = insert_tabular_measurement(epoch, {source_name}, {'amp'}, name, df)

        actual = None
        while actual is None:
            m = asclass('Measurement', m.refresh())
            try:
                actual = pd.read_csv(m.getLocalDataPath().get(), index_col=0)
            except Exception as e:
                pass

        assert_true(((actual-df) < .001).all().all(), msg="Approximately equal")
コード例 #9
0
    def should_round_trip_pandas_data_frame(self):

        expected = pd.DataFrame({
            'ColA': np.random.randn(10),
            'ColB': np.random.randn(10)
        })

        epoch = self.expt.insertEpoch(DateTime(), DateTime(), self.protocol,
                                      None, None)

        m = insert_tabular_measurement(epoch, set(), set(), 'tabular',
                                       expected)

        while (m.getDataContext().getFileService().hasPendingUploads()):
            pass

        m = asclass('Measurement', m.refresh())

        actual = as_data_frame(m, index_col=0)

        assert_frame_equal(expected, actual)
コード例 #10
0
 def should_cast_experiment_from_procedure_element(self):
     epoch = self.expt.insertEpoch(DateTime(), DateTime(), self.protocol,
                                   None, None)
     assert_equals(
         self.expt.getUuid().toString(),
         asclass("Experiment", epoch.getParent()).getUuid().toString())
コード例 #11
0
# Example of how to get project named "Surprise laser probe" by searching for its name
projects = con.getProjects()
project_uri = None
print "fetching projects..."
for project in projects:
    if project.getName() == "Surprise laser probe":
        print "Found project!"
        project_uri = project.getURI().toString()
        break
print "done fetching!"

# Now grab the project by URI. Note that URI needs to be known for this to work.
# Ideally, you would only iterate once and save the URIs of the projects locally
if project_uri is not None:
    project = asclass('Project', con.getObjectWithURI(project_uri))
    print project.getName()
else:
    raise ValueError, "could not find project"

# Now lets insert an experiment names "session_001"
# First make sure experiment does not exist so we don't insert double sessions
experiments = project.getExperiments()
for experiment in experiments:
    if experiment.getPurpose() == "session_008":
        date = experiment.getStart().toString()
        raise ValueError, "Experiment already exists and has date of " + date

# Grab the local current time with the argument being the EST time zone
#current_time = datetime.datetime.now(pytz.timezone("US/Eastern"))
experiment_time = datetime.datetime(2014, 8, 18, tzinfo = pytz.timezone("US/Eastern"))
コード例 #12
0
    def import_ncs(self,
               container,
               sources,
               label,
               ncs_files=list(),
               event_file=None,
               start_id=None,
               end_id=None,
               include_interepoch=True):

        self.sources = sources
        self.source_map = Maps.newHashMap()
        [self.source_map.put(source.getLabel(), source) for source in sources]

        # TODO figure out what should go into device params
        # device parameters
        self.device_parameters = {}

        epochs = {}
        epoch_boundaries = None
        group = None
        for f in ncs_files:
            logging.info("Reading %s", f)
            with open(f, 'rb') as ncs_file:
                reader = BinaryReader(ncs_file, NEURALYNX_ENDIAN)
                header = parse_header(reader)
                device_name = header["AcqEntName"]
                csc_data = CscData(header, ncs_blocks(reader, header))

                open_time = header["Time Opened"]

                # We assume all times are datetime in given local zone
                start = self.timeFormatter.parseDateTime(str(open_time)).toDateTime(self.timezone)
                logging.info("Start done")

                if group is None:
                    logging.info("Inserting top-level EpochGroup: %s", label)
                    group = asclass("us.physion.ovation.domain.mixin.EpochGroupContainer", container).insertEpochGroup(label,
                        start,
                        self.protocol,
                        to_map(self.protocol_parameters),
                        to_map(self.device_parameters)
                    )

                if event_file is None or start_id is None:
                    logging.info("No event file present")
                    if not None in epochs:
                        epochs[None] = self.insert_epoch(group, open_time, None, False)

                    self.append_response(epochs[None], device_name, csc_data, open_time, None)

                else:
                    logging.info("Event file present")
                    if epoch_boundaries is None:
                        logging.info("Determining Epoch boundaries")
                        with open(event_file, 'rb') as ef:
                            reader = BinaryReader(ef, NEURALYNX_ENDIAN)
                            header = parse_header(reader)
                            epoch_boundaries = list(EpochBoundaries(header,
                                nev_events(reader, header),
                                start_id,
                                end_id,
                                include_interepoch).boundaries)

                    current_epoch = None
                    for epoch_boundary in epoch_boundaries:
                        if not epoch_boundary in epochs:
                            epochs[epoch_boundary] = self.insert_epoch(group,
                                epoch_boundary.start,
                                epoch_boundary.end,
                                epoch_boundary.interepoch)

                        epoch = epochs[epoch_boundary]

                        self.append_response(epoch, device_name, csc_data, epoch_boundary.start, epoch_boundary.end)

                        current_epoch = epoch
コード例 #13
0
 def should_cast_experiment_from_procedure_element_via_explicit_cast(self):
     epoch = self.expt.insertEpoch(DateTime(), DateTime(), self.protocol, None, None)
     assert_equals(self.expt.getUuid().toString(),
                   asclass("us.physion.ovation.domain.Experiment", epoch.getParent()).getUuid().toString())
コード例 #14
0
 def should_cast_none(self):
     assert_equals(None, asclass("Experiment", None))
コード例 #15
0
 def should_cast_experiment_from_procedure_element(self):
     epoch = self.expt.insertEpoch(DateTime(), DateTime(), self.protocol, None, None)
     assert_equals(self.expt.getUuid().toString(),
                   asclass("Experiment", epoch.getParent()).getUuid().toString())
コード例 #16
0
 def should_cast_none(self):
     assert_equals(None, asclass("Experiment", None))
コード例 #17
0
def import_block(epoch_group_container,
                 block,
                 equipment_setup_root,
                 sources,
                 protocol=None,
                 protocol_parameters={},
                 device_parameters={},
                 group_label=None,
                 file_mtime=None):
    """Import a `Neo <http://neuralensemble.org/neo/>`_ `Block` as a single Ovation `EpochGroup`


    Parameters
    ----------
    block : neo.Block
        `neo.Block` to import
    epoch_group_container : ovation.EpochGroup or ovation.Experiment
        Container for the inserted `ovation.EpochGroup`
    equipment_setup : ovation.EquipmentSetup
        Experiment `EquipmentSetup` for the data contained in the file to be imported
    equipment_setup_root : str
        Root path for equipment setup describing equipment that recorded the data to be imported
    source : iterable of us.physion.ovation.domain.Source
        Experimental `Subjects` for data contained in `block`
    protocol : ovation.Protocol, optional
        Ovation `Protocol` for the EpochGroup (if present)
    protocol_parameters : Mapping, optional
    device_parameters : Mapping, optional
    group_label : string, optional
        EpochGroup label. If `None`, and `block.name` is not `None`, `block.name` will be used
        for the EpochGroup label.


    Returns
    -------
    The inserted `ovation.EpochGroup`

    """

    if group_label is None:
        if not (block.name is None):
            group_label = block.name
        else:
            group_label = "Neo importer"


    merged_protocol_parameters = protocol_parameters.copy()
    merged_protocol_parameters.update(block.annotations)

    #Convert a datetime.datetime to a DateTime
    if block.rec_datetime is not None:
        start_time = DateTime(*(block.rec_datetime.timetuple()[:7]))
    else:
        log_warning("Block does not contain a recording date/time. Using file modification time instead.")
        start_time = DateTime(*(datetime.fromtimestamp(file_mtime).timetuple()[:7]))


    epochGroup = asclass("us.physion.ovation.domain.mixin.EpochGroupContainer", epoch_group_container).insertEpochGroup(group_label,
                                                        start_time,
                                                        protocol,
                                                        to_map(merged_protocol_parameters),
                                                        to_map(device_parameters)
    )

    if len(block.recordingchannelgroups) > 0:
        log_warning("Block contains RecordingChannelGroups. Import of RecordingChannelGroups is currently not supported.")

    log_info("Importing segments from {}".format(block.file_origin))
    for seg in block.segments:
        log_info("Importing segment {} from {}".format(str(seg.index), block.file_origin))
        import_segment(epochGroup,
                       seg,
                       sources,
                       protocol=protocol,
                       equipment_setup_root=equipment_setup_root)

    log_info("Waiting for uploads to complete...")
    fs = epoch_group_container.getDataContext().getFileService()
    while(fs.hasPendingUploads()):
        fs.waitForPendingUploads(10, TimeUnit.SECONDS)

    return epochGroup
コード例 #18
0
 def should_set_device_parameters(self):
     assert_equals(self.device_info.keys(),
                   to_dict(asclass("Experiment", self.epoch_group.getParent()).getEquipmentSetup().getDeviceDetails()).keys())
コード例 #19
0
    def import_wrapper(data_context,
                  container=None,
                  protocol=None,
                  files=None,
                  sources=None,
                  epoch_group_label=None,
                  event_file=None,
                  protocol_parameters=None,
                  epoch_start_event_id=None,
                  epoch_end_event_id=None,
                  include_interepoch=False,
                  timezone=None,
                  **args):

        container = data_context.getObjectWithURI(container)
        protocol_entity = data_context.getObjectWithURI(protocol)
        if protocol_entity:
            protocol = asclass("Protocol", protocol_entity)
        else:
            protocol = None

        if protocol_parameters is None:
            protocol_parameters = {}
        else:
            protocol_parameters = dict(protocol_parameters)

        if not sources is None:
            sources = [asclass('Source', data_context.getObjectWithURI(source)) for source in sources]

        importer = NeuralynxImporter(
            protocol=protocol,
            protocol_parameters=protocol_parameters,
            timezone=timezone)

        data_context.beginTransaction()
        logging.info("Starting import...")
        try:
            importer.import_ncs(container,
                sources,
                epoch_group_label,
                ncs_files=files,
                event_file=event_file,
                start_id=epoch_start_event_id,
                end_id=epoch_end_event_id,
                include_interepoch=include_interepoch)
        except Exception as e:
            logging.error("Import failed, aborting.")
            logging.error("Error: %s" % e)
            traceback.print_exc(file=sys.stdout)
            data_context.abortTransaction()
        else:
            logging.info("Import complete.")
            data_context.commitTransaction()

            time.sleep(2) # wait for file service to recognize pending uploads

            logging.info("Waiting for file uploads...")
            file_service = data_context.getFileService()
            while file_service.hasPendingUploads():
                logging.info('  .')
                time.sleep(2)

            logging.info("Done.")

        return 0