예제 #1
0
def test_create_delete_project_and_childs():
    module_contents = {'species': {'value': 'rat'}}

    project = expipe.core.require_project(pytest.PROJECT_ID)
    action = project.require_action(pytest.ACTION_ID)
    action_module = action.create_module(pytest.MODULE_ID,
                                         contents=module_contents,
                                         overwrite=False)
    project_module = project.create_module(pytest.MODULE_ID,
                                           contents=module_contents,
                                           overwrite=False)

    expipe.delete_project(pytest.PROJECT_ID, remove_all_childs=True)
    with pytest.raises(KeyError):
        expipe.get_project(pytest.PROJECT_ID)

    # remake project, then the "old" action and project_module should be deleted
    project = expipe.require_project(pytest.PROJECT_ID)
    with pytest.raises(KeyError):
        project.actions[pytest.ACTION_ID]
        project.modules[pytest.MODULE_ID]

    # remake action, then the "old" action_module should be deleted
    action = project.require_action(pytest.ACTION_ID)
    with pytest.raises(KeyError):
        action.modules[pytest.MODULE_ID]
예제 #2
0
def display(project_path=None):
    project_path = project_path or PAR.PROJECT_ROOT
    assert project_path is not None
    project = expipe.get_project(project_path)
    # register tab
    register_tab_tab_titles = [
        'OpenEphys', 'Axona', 'Adjustment', 'Entity', 'Surgery', 'Perfusion'
    ]
    register_tab = ipywidgets.Tab()
    register_tab.children = [
        openephys_view(project),
        axona_view(project),
        adjustment_view(project),
        entity_view(project),
        surgery_view(project),
        perfuse_view(project),
    ]
    for i, title in enumerate(register_tab_tab_titles):
        register_tab.set_title(i, title)

    process_tab_tab_titles = [
        'OpenEphys',
    ]
    process_tab = ipywidgets.Tab()
    process_tab.children = [process_view(project)]
    for i, title in enumerate(process_tab_tab_titles):
        process_tab.set_title(i, title)

    tab_titles = ['Register', 'Process']
    tab = ipywidgets.Tab()
    tab.children = [register_tab, process_tab]
    for i, title in enumerate(tab_titles):
        tab.set_title(i, title)
    ipd.display(tab)
예제 #3
0
파일: main.py 프로젝트: CINPLA/expipe-cli
def list_plugins():
    cwd = pathlib.Path.cwd()
    try:
        project = expipe_module.get_project(path=pathlib.Path.cwd())
        config = project.config
    except KeyError as e:
        config = expipe_module.settings
    return config.get('plugins')
예제 #4
0
파일: main.py 프로젝트: CINPLA/expipe-cli
 def list_stuff(object_type):
     """Print project objects."""
     try:
         project = expipe_module.get_project(path=pathlib.Path.cwd())
     except KeyError as e:
         print(str(e))
         return
     for object in getattr(project, object_type):
         print(object)
예제 #5
0
파일: main.py 프로젝트: CINPLA/expipe-cli
 def status():
     """Print project status."""
     try:
         project = expipe_module.get_project(path=pathlib.Path.cwd())
     except KeyError as e:
         print(str(e))
         return
     for k, v in project.config.items():
         print('{}: {}'.format(k, v))
예제 #6
0
def display(project_path):
    project = expipe.get_project(project_path)
    # register tab
    register_tab_tab_titles = [
        'OpenEphys',
        'Intan',
        'Axona',
        'Adjustment',
        'Entity',
        'Surgery',
        'Perfusion',
        'Annotate']
    register_tab = ipywidgets.Tab()
    register_tab.children = [
        register_openephys_view(project),
        register_intan_view(project),
        axona_view(project),
        adjustment_view(project),
        entity_view(project),
        surgery_view(project),
        perfuse_view(project),
        annotate_view(project)
    ]
    for i, title in enumerate(register_tab_tab_titles):
        register_tab.set_title(i, title)

    process_tab_tab_titles = [
        'OpenEphys', 'Intan', 'Tracking', 'Psychopy', 'Curation']
    process_tab = ipywidgets.Tab()
    process_tab.children = [
        process_openephys_view(project),
        process_intan_view(project),
        process_tracking_view(project),
        process_psychopy_view(project),
        process_curation_view(project)
    ]
    for i, title in enumerate(process_tab_tab_titles):
        process_tab.set_title(i, title)

    tab_titles = ['Register', 'Process']
    tab = ipywidgets.Tab()
    tab.children = [
        register_tab,
        process_tab
    ]
    for i, title in enumerate(tab_titles):
        tab.set_title(i, title)
    ipd.display(tab)
예제 #7
0
 def __init__(self, stim_mask=False, baseline_duration=None, **kwargs):
     self.project_path = project_path()
     self.params = kwargs
     self.project = expipe.get_project(self.project_path)
     self.actions = self.project.actions
     self._spike_trains = {}
     self._templates = {}
     self._stim_times = {}
     self._unit_names = {}
     self._tracking = {}
     self._head_direction = {}
     self._lfp = {}
     self._occupancy = {}
     self._rate_maps = {}
     self._prob_dist = {}
     self._spatial_bins = None
     self.stim_mask = stim_mask
     self.baseline_duration = baseline_duration
예제 #8
0
    def __init__(
        self, project_path, action_ids, group_ids, position_sampling_rate,
        position_low_pass_frequency, box_size):
        project = expipe.get_project(project_path)
        actions = project.actions
        self._data = {}
        self.group_ids = group_ids
        self.action_ids = action_ids
        for action_id in action_ids:
            action  = actions[action_id]
            self._data[action_id] = {}
            data_path = dp.get_data_path(action)
            epochs = dp.load_epochs(data_path)
            x, y, t, speed = dp.load_tracking(
                data_path, position_sampling_rate, position_low_pass_frequency, box_size=box_size)
            a, at = dp.load_head_direction(
                data_path, position_sampling_rate, position_low_pass_frequency, box_size=box_size)

            self._data[action_id] = {
                'x': x,
                'y': y,
                't': t,
                'speed': speed,
                'a': a,
                'at': at,
                'epochs': epochs,
                'groups': {}
            }

            for group_id in group_ids:
                try:
                    anas = dp.load_lfp(data_path, group_id) # TODO remove artifacts
                except:
                    print(group_id)
                    raise
                spike_trains = dp.load_spiketrains(data_path, group_id, load_waveforms=True)

                self._data[action_id]['groups'][group_id] = {
                    'spike_trains': {_name(s): s for s in dp.sort_by_cluster_id(spike_trains)},
                    'anas': anas
                }
예제 #9
0
@lazy_import
def expipe():
    import expipe
    return expipe


@lazy_import
def pathlib():
    import pathlib
    return pathlib


local_root, _ = expipe.config._load_local_config(pathlib.Path.cwd())
if local_root is not None:
    project = expipe.get_project(path=local_root)
else:

    class P:
        config = {}

    project = P


@lazy_import
def pd():
    import pandas as pd
    return pd


@lazy_import
예제 #10
0
def experiment_plot(project_path,
                    action_id,
                    n_channel=8,
                    rem_channel="all",
                    skip_channels=None,
                    raster_start=-0.5,
                    raster_stop=1):
    """
    Plot raster, isi-mean-median, tuning (polar and linear) from visual data acquired through open-ephys and psychopy
    Parameters
    ----------
    project_path: os.path, or equivallent
        Path to project directory
    action_id: string
        The experiment/action id
    n_channel: default 8; int
        Number of channel groups in recordings
    rem_channel: default "all"; "all" or int
        Signify what channels are of interest(=rem)
    skip_channels: default None; int or (list, array, tupule)
        ID of channel groups to skip
    raster_start: default -0.5; int or float
        When the rasters start on the x-axis relative to trial start, which is 0
    raster_stop: default 1; int or float
        When the rasters stop on the x-axis relative to trial start, which is 0
    Returns
    -------
    savesfigures into exdir directory: main.exdir/figures
    """
    if not (rem_channel == "all" or
            (isinstance(rem_channel, int) and rem_channel < n_channel)):
        msg = "rem_channel must be either 'all' or integer between 0 and n_channel ({}); not {}".format(
            n_channel, rem_channel)
        raise AttributeError(msg)

    # Define project tree
    project = expipe.get_project(project_path)
    action = project.actions[action_id]
    data_path = er.get_data_path(action)
    epochs = er.load_epochs(data_path)

    # Get data of interest (orients vs rates vs channel)
    oe_epoch = epochs[0]  # openephys
    assert (oe_epoch.annotations['provenance'] == 'open-ephys')
    ps_epoch = epochs[1]  # psychopy
    assert (ps_epoch.annotations['provenance'] == 'psychopy')

    # Create directory for figures
    exdir_file = exdir.File(data_path, plugins=exdir.plugins.quantities)
    figures_group = exdir_file.require_group('figures')

    raster_start = raster_start * pq.s
    raster_stop = raster_stop * pq.s
    orients = ps_epoch.labels  # the labels are orrientations (135, 90, ...)

    def plot(channel_num, channel_path, spiketrains):
        # Create figures from spiketrains
        for spiketrain in spiketrains:
            try:
                if spiketrain.annotations["cluster_group"] == "noise":
                    continue
            except KeyError:
                msg = "Cluster/channel group {} seems to not have been sorted in phys".format(
                    channel_num)
                raise KeyError(msg)

            figure_id = "{}_{}_".format(channel_num,
                                        spiketrain.annotations['cluster_id'])

            sns.set()
            sns.set_style("white")
            # Raster plot processing
            trials = er.make_spiketrain_trials(spiketrain,
                                               oe_epoch,
                                               t_start=raster_start,
                                               t_stop=raster_stop)
            er.add_orientation_to_trials(trials, orients)
            orf_path = os.path.join(channel_path,
                                    figure_id + "orrient_raster.png")
            orient_raster_fig = orient_raster_plots(trials)
            orient_raster_fig.savefig(orf_path)

            # Orrientation vs spikefrequency plot (tuning curves) processing
            trials = er.make_spiketrain_trials(spiketrain, oe_epoch)
            er.add_orientation_to_trials(trials, orients)
            tf_path = os.path.join(channel_path, figure_id + "tuning.png")
            tuning_fig = plot_tuning_overview(trials, spiketrain)
            tuning_fig.savefig(tf_path)

            # Reset before next loop to save memory
            plt.close(fig="all")

    if rem_channel == "all":
        channels = range(n_channel)
        if isinstance(skip_channels, int):
            channels = [x for x in channels]
            del channels[skip_channels]
        elif isinstance(skip_channels, (list, tuple, type(empty(0)))):
            channels = [x for x in channels]
            for channel in skip_channels:
                del channels[channel]
        for channel in channels:
            channel_name = "channel_{}".format(channel)
            channel_group = figures_group.require_group(channel_name)
            channel_path = os.path.join(str(data_path),
                                        "figures\\" + channel_name)

            spiketrains = er.load_spiketrains(str(data_path), channel)

            plot(channel, channel_path, spiketrains)

    elif isinstance(rem_channel, int):
        channel_name = "channel_{}".format(rem_channel)
        channel_group = figures_group.require_group(channel_name)
        channel_path = os.path.join(str(data_path), "figures\\" + channel_name)

        spiketrains = er.load_spiketrains(str(data_path), rem_channel)

        plot(rem_channel, channel_path, spiketrains)
예제 #11
0
def load_data_frames(queries=None,
                     labels=None,
                     colors=None,
                     stim_location='stim_location=="ms"'):
    cell_types = [
        'ns_inhibited', 'ns_not_inhibited', 'gridcell', 'bs_not_gridcell'
    ]
    import expipe
    import septum_mec.analysis.data_processing as dp
    if queries is None:
        queries, labels, colors, gridcell_query = get_queries(stim_location)
    else:
        _, _, _, gridcell_query = get_queries()

    label_nums = list(range(len(labels)))

    project_path = dp.project_path()
    project = expipe.get_project(project_path)
    actions = project.actions

    identification_action = actions['identify-neurons']
    sessions = pd.read_csv(identification_action.data_path('sessions'))
    units = pd.read_csv(identification_action.data_path('units'))
    session_units = pd.merge(sessions, units, on='action')

    statistics_action = actions['calculate-statistics']
    statistics_results = pd.read_csv(statistics_action.data_path('results'))
    statistics = pd.merge(session_units, statistics_results, how='left')
    statistics['unit_day'] = statistics.apply(
        lambda x: str(x.unit_idnum) + '_' + x.action.split('-')[1], axis=1)

    #     statistics_action_extra = actions['calculate-statistics-extra']
    #     statistics_action_extra = pd.read_csv(statistics_action_extra.data_path('results'))
    #     statistics = pd.merge(statistics, statistics_action_extra, how='left')

    stim_response_action = actions['stimulus-response']
    stim_response_results = pd.read_csv(
        stim_response_action.data_path('results'))
    statistics = pd.merge(statistics, stim_response_results, how='left')

    shuffling = actions['shuffling']
    quantiles_95 = pd.read_csv(shuffling.data_path('quantiles_95'))
    quantiles_95.head()
    action_columns = ['action', 'channel_group', 'unit_name']
    data = pd.merge(statistics,
                    quantiles_95,
                    on=action_columns,
                    suffixes=("", "_threshold"))

    data['specificity'] = np.log10(data['in_field_mean_rate'] /
                                   data['out_field_mean_rate'])

    # waveform
    waveform_action = actions['waveform-analysis']
    waveform_results = pd.read_csv(waveform_action.data_path('results')).drop(
        'template', axis=1)

    data = data.merge(waveform_results, how='left')

    data.bs = data.bs.astype(bool)

    data.loc[data.eval('t_i_peak == t_i_peak and not bs'),
             'ns_inhibited'] = True
    data.ns_inhibited.fillna(False, inplace=True)

    data.loc[data.eval('t_i_peak != t_i_peak and not bs'),
             'ns_not_inhibited'] = True
    data.ns_not_inhibited.fillna(False, inplace=True)

    # if a neuron is significantly inhibited once, we count it as a ns_inhibited

    data.loc[data.unit_id.isin(data.query('ns_inhibited').unit_id.values),
             'ns_inhibited'] = True
    # we alsochange label from not inhibted to inhibited
    data.loc[data.eval('ns_inhibited'), 'ns_not_inhibited'] = False
    data.loc[data.eval('ns_inhibited'), 'bs'] = False
    #     data.loc[data.unit_id.isin(data.query('ns_not_inhibited').unit_id.values), 'ns_not_inhibited'] = True

    # gridcells
    sessions_above_threshold = data.query(gridcell_query)
    print("Number of sessions above threshold", len(sessions_above_threshold))
    print("Number of animals",
          len(sessions_above_threshold.groupby(['entity'])))

    gridcell_sessions = data[data.unit_day.isin(
        sessions_above_threshold.unit_day.values)]
    print("Number of individual gridcells",
          gridcell_sessions.unit_idnum.nunique())
    print("Number of gridcell recordings", len(gridcell_sessions))
    data.loc[:, 'gridcell'] = np.nan
    data['gridcell'] = data.isin(gridcell_sessions)

    data.loc[data.eval('not gridcell and bs'), 'bs_not_gridcell'] = True
    data.bs_not_gridcell.fillna(False, inplace=True)

    for i, query in enumerate(queries):
        data.loc[data.eval(query), 'label'] = labels[i]
        data.loc[data.eval(query), 'label_num'] = label_nums[i]
        data.loc[data.eval(query), 'query'] = query
        data.loc[data.eval(query), 'color'] = colors[i]

    data['cell_type'] = np.nan
    for cell_type in cell_types:
        data.loc[data.eval(cell_type), 'cell_type'] = cell_type

    return data, labels, colors, queries
예제 #12
0
        command_line = ' '.join(command_line)
    else:
        raise TypeError('str or list')
    logger.info(command_line)
    command_line_process = subprocess.Popen(
        command_line_args,
        stdout=subprocess.PIPE,
        stderr=subprocess.STDOUT
    )
    try:
        stdout, stderr =  command_line_process.communicate()
        for line in stdout.decode().split('\n'):
            logger.info(line)
        if 'Error' in stdout.decode():
            print('Error occured.')
    except Exception as e:
        logger.exception('Exception: ')


project = expipe.get_project(PAR.PROJECT_ID)
# your code
for action in project.actions:
    if action.type != 'Recording':
        continue
    # if 'no' in action.tags:
    #     continue
    print('Evaluating ', action.id)
    run_shell_command(['expipe', 'analyse', action.id, '-a',
                       'spatial', '--skip'])
    expipe.core.refresh_token()