Exemplo n.º 1
0
def exportToElphy(data_store_location,elphy_export_location,sheets=None,threshold=None):
    import os.path
    if not os.path.isdir(elphy_export_location):
       if os.path.exists(elphy_export_location):
          raise ValueError("The elphy export path is not a directory")
       else:
          os.makedirs(elphy_export_location)
              
    setup_logging()
    data_store = PickledDataStore(load=True,parameters=ParameterSet({'root_directory':data_store_location, 'store_stimuli' : False}))
    ps = MP.parameter_value_list([MP.MozaikParametrized.idd(s) for s in data_store.get_stimuli()],'name')
    for i,sn in enumerate(ps):
        if sheets == None: sheets = data_store.sheets() 
        
        for shn in sheets:
            dsv = param_filter_query(data_store,st_name = sn,sheet_name = shn)
            if dsv.get_stimuli() == []: continue
            varying_parameters = MP.varying_parameters([MP.MozaikParametrized.idd(s) for s in dsv.get_stimuli()])
            
            segments,stimuli = MP.colapse(dsv.get_segments(),[MP.MozaikParametrized.idd(s) for s in dsv.get_stimuli()],parameter_list=['trial'],allow_non_identical_objects=True)
            j = 0 
            for segs,st in zip(segments,stimuli):
                # just make sure all segments are fully loaded, in future this should probably soreted out such that this line can be deleted
                for s in segs: s.load_full()
                
                # create file name:
                filename = "name=" + sn + "#" + "sheet_name=" + shn 
                for pn in varying_parameters:
                    if pn != "trial":
                        filename += "#" + str(pn) + "=" + str(getattr(MP.MozaikParametrized.idd(st),pn)) 
                path = os.path.join(elphy_export_location,filename+".dat")
                
                # if the threshold is defined add spikes into Vms
                if threshold != None:
                    for seg in segs : addSpikes(seg,threshold)
                    
                createFileFromSegmentList( segs, path)
                print "Finished saving file %d/%d for sheet %s and %d-th stimulus" % (j+1,len(segments),shn,i)
                # release segments from memory
                for s in segs: s.release()
                j = j + 1
        print "Finished saving %d/%d stimulus" % (i+1,len(ps))
Exemplo n.º 2
0
print sys.argv
import matplotlib
matplotlib.use('Agg')
import os
from mozaik.controller import setup_logging
import mozaik
from mozaik.storage.datastore import Hdf5DataStore,PickledDataStore
from mozaik.tools.mozaik_parametrized import colapse, colapse_to_dictionary, MozaikParametrized
from mozaik.analysis.technical import NeuronAnnotationsToPerNeuronValues
from parameters import ParameterSet
import numpy
from mozaik.storage import queries
from mozaik.controller import Global
Global.root_directory = sys.argv[1]+'/'

setup_logging()

data_store = PickledDataStore(load=True,parameters=ParameterSet({'root_directory':sys.argv[1],'store_stimuli' : False}),replace=True)

NeuronAnnotationsToPerNeuronValues(data_store,ParameterSet({})).analyse()
analog_ids = queries.param_filter_query(data_store,sheet_name="V1_Exc_L4").get_segments()[0].get_stored_esyn_ids()


dsv = queries.param_filter_query(data_store,st_name='FlashedBar')
for ads in dsv.get_analysis_result():
    sid = MozaikParametrized.idd(ads.stimulus_id)
    sid.x=0
    ads.stimulus_id = str(sid)
for seg in dsv.get_segments():    
    sid = MozaikParametrized.idd(seg.annotations['stimulus'])
    sid.x=0
Exemplo n.º 3
0
from experiments import create_experiments
from model import VogelsAbbott
from mozaik.storage.datastore import Hdf5DataStore,PickledDataStore
from analysis_and_visualization import perform_analysis_and_visualization
from parameters import ParameterSet

#mpi_comm = MPI.COMM_WORLD
logger = mozaik.getMozaikLogger()
simulation_name = "VogelsAbbott2005"
simulation_run_name, _, _, _, modified_parameters = parse_workflow_args()

if True:
    data_store,model = run_workflow(simulation_name,VogelsAbbott,create_experiments)
    model.connectors['ExcExcConnection'].store_connections(data_store)    
else: 
    setup_logging()
    data_store = PickledDataStore(
        load=True,
        parameters=ParameterSet(
            {
                "root_directory": result_directory_name(
                    simulation_run_name, simulation_name, modified_parameters
                ),
                "store_stimuli": False,
            }
        ),
        replace=True,
    )
    logger.info('Loaded data store')

#if mpi_comm.rank == 0:
Exemplo n.º 4
0
def exportToElphy(data_store_location,
                  elphy_export_location,
                  sheets=None,
                  threshold=None):
    import os.path
    if not os.path.isdir(elphy_export_location):
        if os.path.exists(elphy_export_location):
            raise ValueError("The elphy export path is not a directory")
        else:
            os.makedirs(elphy_export_location)

    setup_logging()
    data_store = PickledDataStore(load=True,
                                  parameters=ParameterSet({
                                      'root_directory':
                                      data_store_location,
                                      'store_stimuli':
                                      False
                                  }))
    ps = MP.parameter_value_list(
        [MP.MozaikParametrized.idd(s) for s in data_store.get_stimuli()],
        'name')
    for i, sn in enumerate(ps):
        if sheets == None: sheets = data_store.sheets()

        for shn in sheets:
            dsv = param_filter_query(data_store, st_name=sn, sheet_name=shn)
            if dsv.get_stimuli() == []: continue
            varying_parameters = MP.varying_parameters(
                [MP.MozaikParametrized.idd(s) for s in dsv.get_stimuli()])

            segments, stimuli = MP.colapse(
                dsv.get_segments(),
                [MP.MozaikParametrized.idd(s) for s in dsv.get_stimuli()],
                parameter_list=['trial'],
                allow_non_identical_objects=True)
            j = 0
            for segs, st in zip(segments, stimuli):
                # just make sure all segments are fully loaded, in future this should probably soreted out such that this line can be deleted
                for s in segs:
                    s.load_full()

                # create file name:
                filename = "name=" + sn + "#" + "sheet_name=" + shn
                for pn in varying_parameters:
                    if pn != "trial":
                        filename += "#" + str(pn) + "=" + str(
                            getattr(MP.MozaikParametrized.idd(st), pn))
                path = os.path.join(elphy_export_location, filename + ".dat")

                # if the threshold is defined add spikes into Vms
                if threshold != None:
                    for seg in segs:
                        addSpikes(seg, threshold)

                createFileFromSegmentList(segs, path)
                print "Finished saving file %d/%d for sheet %s and %d-th stimulus" % (
                    j + 1, len(segments), shn, i)
                # release segments from memory
                for s in segs:
                    s.release()
                j = j + 1
        print "Finished saving %d/%d stimulus" % (i + 1, len(ps))