Exemple #1
0
# coding: utf8
"""
For generating a DB table describing short term dynamics.

"""
from __future__ import print_function, division

import os, logging
from collections import OrderedDict
import numpy as np
from ...dynamics import generate_pair_dynamics
from .pipeline_module import MultipatchPipelineModule
from .experiment import ExperimentPipelineModule
from neuroanalysis.util.optional_import import optional_import
list_cached_results, load_cache_file, StochasticReleaseModel = optional_import(
    'aisynphys.stochastic_release_model',
    ['list_cached_results', 'load_cache_file', 'StochasticReleaseModel'])
from ...util import datetime_to_timestamp, timestamp_to_datetime
from .dynamics import generate_pair_dynamics


class SynapseModelPipelineModule(MultipatchPipelineModule):
    """Summarizes stochastic model outputs for each synapse
    """
    name = 'synapse_model'
    dependencies = [ExperimentPipelineModule]
    table_group = ['synapse_model']

    @classmethod
    def create_db_entries(cls, job, session):
        logger = logging.getLogger(__name__)
# coding: utf8
"""
For generating a table that describes cell intrinisic properties

"""
from __future__ import print_function, division

import traceback, sys
import numpy as np

from neuroanalysis.util.optional_import import optional_import
extractors_for_sweeps = optional_import('ipfx.data_set_features',
                                        'extractors_for_sweeps')
LongSquareAnalysis = optional_import('ipfx.stimulus_protocol_analysis',
                                     'LongSquareAnalysis')
Sweep, SweepSet = optional_import('ipfx.sweep', ['Sweep', 'SweepSet'])
extract_chirp_features = optional_import('ipfx.chirp_features',
                                         'extract_chirp_features')
get_complete_long_square_features = optional_import(
    'ipfx.bin.features_from_output_json', 'get_complete_long_square_features')

from .pipeline_module import MultipatchPipelineModule
from .experiment import ExperimentPipelineModule
from .dataset import DatasetPipelineModule
from ...nwb_recordings import get_intrinsic_recording_dict, get_pulse_times

SPIKE_FEATURES = [
    'upstroke_downstroke_ratio',
    'upstroke',
    'downstroke',
    'width',
Exemple #3
0
"""
Analysis code used to load tables of synapse features and perform dimensionality reduction on them,
with the intent to better understand what the major classes of synapses are, and how they relate
to cell and synapse properties.
"""
import os, pickle
import numpy as np
import pandas
import sklearn.preprocessing, sklearn.pipeline
import umap
from .database import default_db
from neuroanalysis.util.optional_import import optional_import
plt = optional_import('matplotlib.pyplot')
sns = optional_import('seaborn')

# Complete list of fields to load along with synapses by default
default_data_fields = [
    'experiment_ext_id',
    'pre_ext_id',
    'post_ext_id',
    'synapse_type',
    'pre_cre_type',
    'post_cre_type',
    'pre_cell_class',
    'post_cell_class',
    'species',
    'pre_layer',
    'post_layer',
    'distance',
    'n_ex_test_spikes',
    'n_in_test_spikes',
Exemple #4
0
# coding: utf8
"""
For generating a table that describes cell intrinisic properties

"""
from __future__ import print_function, division

from neuroanalysis.util.optional_import import optional_import
get_long_square_features, get_chirp_features = optional_import(
    'aisynphys.intrinsic_ephys',
    ['get_long_square_features', 'get_chirp_features'])

from .pipeline_module import MultipatchPipelineModule
from .experiment import ExperimentPipelineModule
from .dataset import DatasetPipelineModule
from ...nwb_recordings import get_intrinsic_recording_dict


class IntrinsicPipelineModule(MultipatchPipelineModule):

    name = 'intrinsic'
    dependencies = [ExperimentPipelineModule, DatasetPipelineModule]
    table_group = ['intrinsic']

    @classmethod
    def create_db_entries(cls, job, session):
        db = job['database']
        job_id = job['job_id']

        # Load experiment from DB
        expt = db.experiment_from_ext_id(job_id, session=session)
Exemple #5
0
"""
For generating DB table describing a cells location within cortex, 
and adding layer-aligned distance info to the Pair table
"""
from ..pipeline_module import DatabasePipelineModule
from .experiment import ExperimentPipelineModule
from aisynphys import lims
import numpy as np
from neuroanalysis.util.optional_import import optional_import

get_depths_slice = optional_import('aisynphys.layer_depths',
                                   'get_depths_slice')
import logging

logger = logging.getLogger(__name__)


class CortexLocationPipelineModule(DatabasePipelineModule):
    """Imports cell location data for each experiment
    """
    name = 'cortical_location'
    dependencies = [ExperimentPipelineModule]
    table_group = ['cortical_cell_location']

    @classmethod
    def create_db_entries(cls, job, session):
        lims_layers = get_lims_layers()
        db = job['database']
        expt_id = job['job_id']
        errors = []
Exemple #6
0
import os, glob, pickle, time
from collections import OrderedDict
from .pipeline_module import MultipatchPipelineModule
from ... import config
from ...util import timestamp_to_datetime
from ...data.slice import Slice
from neuroanalysis.util.optional_import import optional_import
getDirHandle = optional_import('acq4.util.DataManager', 'getDirHandle')


class SlicePipelineModule(MultipatchPipelineModule):
    """Imports per-slice metadata into DB.
    """
    name = 'slice'
    dependencies = []
    table_group = ['slice']
    
    @classmethod
    def create_db_entries(cls, job, session):
        job_id = job['job_id']
        db = job['database']

        slices = all_slices()
        path = slices[job_id]
        
        ignore_file = os.path.join(path, 'ignore.txt')
        if os.path.exists(ignore_file):
            err = open(ignore_file).read()
            raise Exception("Ignoring slice %s: %s" % (job_id, err))
        
        sl = Slice.get(path)
Exemple #7
0
from .pipeline import Pipeline
from . import pipeline_module
from . import multipatch

from neuroanalysis.util.optional_import import optional_import
opto = optional_import('.opto', package=__name__)


def all_pipelines():
    """Return a dictionary of {pipeline_name:pipeline_class} pairs
    """
    return Pipeline.all_pipelines()