def glif_api(): endpoint = None if 'TEST_API_ENDPOINT' in os.environ: endpoint = os.environ['TEST_API_ENDPOINT'] return GlifApi(endpoint) else: return GlifApi()
def model_ids_for_cell_type(cells_df, cell_type_tag): model_ids = [] type_cells = cells_df[cells_df.transgenic_line.str.contains(cell_type_tag)] glif_api = GlifApi() for neuron_id in type_cells.index.values: if glif_api.get_neuronal_models([neuron_id]): models_metadata = glif_api.get_neuronal_models([neuron_id])[0] for model in models_metadata['neuronal_models']: model_ids.append(model['id']) return np.array(model_ids)
def generate_train_from_model_id(model_id, stimulus_amplitude=1e-8, duration=1e6, noise_exponent=0): glif_api = GlifApi() neuron_config = glif_api.get_neuron_configs([model_id]) neuron = GlifNeuron.from_dict(neuron_config[model_id]) stimulus = stimulus_amplitude * colorednoise(exponent=noise_exponent, size=int(duration)) neuron.dt = 5e-6 output = neuron.run(stimulus) spike_times = output['interpolated_spike_times'] return spike_times
def main(): args = parse_arguments() logging.getLogger().setLevel(args.log_level) glif_api = None if (args.neuron_config_file is None or args.sweeps_file is None or args.ephys_file is None): assert args.neuronal_model_id is not None, Exception( "A neuronal model id is required if no neuron config file, sweeps file, or ephys data file is provided." ) glif_api = GlifApi() glif_api.get_neuronal_model(args.neuronal_model_id) if args.neuron_config_file: neuron_config = json_utilities.read(args.neuron_config_file) else: neuron_config = glif_api.get_neuron_config() if args.sweeps_file: sweeps = json_utilities.read(args.sweeps_file) else: sweeps = glif_api.get_ephys_sweeps() if args.ephys_file: ephys_file = args.ephys_file else: ephys_file = 'stimulus_%d.nwb' % args.neuronal_model_id if not os.path.exists(ephys_file): logging.info("Downloading stimulus to %s." % ephys_file) glif_api.cache_stimulus_file(ephys_file) else: logging.warning("Reusing %s because it already exists." % ephys_file) if args.output_ephys_file: output_ephys_file = args.output_ephys_file else: logging.warning( "Overwriting input file data with simulated data in place.") output_ephys_file = ephys_file neuron = GlifNeuron.from_dict(neuron_config) # filter out test sweeps sweep_numbers = [ s['sweep_number'] for s in sweeps if s['stimulus_name'] != 'Test' ] simulate_neuron(neuron, sweep_numbers, ephys_file, output_ephys_file, args.spike_cut_value)
def __init__(self, allen_id=None): self = self if allen_id == None: self.allen_id = 566302806 glif_api = GlifApi() self.nc = glif_api.get_neuron_configs([self.allen_id ])[self.allen_id] self.glif = GlifNeuron.from_dict(self.nc) self.metad = glif_api.get_neuronal_models_by_id([self.allen_id])[0] else: glif_api = GlifApi() self.allen_id = allen_id self.glif = glif_api.get_neuronal_models_by_id([allen_id])[0] self.nc = glif_api.get_neuron_configs([allen_id])[allen_id] self.glif = GlifNeuron.from_dict(self.nc) self.metad = glif_api.get_neuronal_models_by_id([self.allen_id])[0]
def init_backend(self, attrs=None, cell_name='alice', current_src_name='hannah', DTC=None): backend = 'GLIF' super(GLIFBackend, self).init_backend() self.model._backend.use_memory_cache = False self.current_src_name = current_src_name self.cell_name = cell_name self.vM = None self.allen_id = None self.attrs = attrs self.nc = None self.temp_attrs = None if self.allen_id == None: try: self.nc = pickle.load(open(str('allen_id.p'), 'rb')) except: self.allen_id = 566302806 glif_api = GlifApi() self.nc = glif_api.get_neuron_configs([self.allen_id ])[self.allen_id] pickle.dump(copy.copy(self.nc), open(str('allen_id.p'), 'wb')) else: try: self.nc = pickle.load(open(str('allen_id.p'), 'rb')) except: glif_api = GlifApi() self.allen_id = allen_id self.glif = glif_api.get_neuronal_models_by_id([allen_id])[0] self.nc = glif_api.get_neuron_configs([self.allen_id ])[self.allen_id] pickle.dump(self.nc, open(str('allen_id.p'), 'wb')) self.glif = GlifNeuron.from_dict(self.nc) if type(attrs) is not type(None): self.set_attrs(**attrs) self.sim_attrs = attrs if type(DTC) is not type(None): if type(DTC.attrs) is not type(None): self.set_attrs(**DTC.attrs) if hasattr(DTC, 'current_src_name'): self._current_src_name = DTC.current_src_name if hasattr(DTC, 'cell_name'): self.cell_name = DTC.cell_name
def test_download(): if os.path.exists(OUTPUT_DIR): shutil.rmtree(OUTPUT_DIR) os.makedirs(OUTPUT_DIR) glif_api = GlifApi() glif_api.get_neuronal_model(NEURONAL_MODEL_ID) glif_api.cache_stimulus_file(os.path.join(OUTPUT_DIR, '%d.nwb' % NEURONAL_MODEL_ID)) neuron_config = glif_api.get_neuron_config() json_utilities.write(os.path.join(OUTPUT_DIR, '%d_neuron_config.json' % NEURONAL_MODEL_ID), neuron_config) ephys_sweeps = glif_api.get_ephys_sweeps() json_utilities.write(os.path.join(OUTPUT_DIR, 'ephys_sweeps.json'), ephys_sweeps)
def __init__(self): self = self import allensdk.core.json_utilities as json_utilities from allensdk.model.glif.glif_neuron import GlifNeuron try: from allensdk.api.queries.glif_api import GlifApi from allensdk.core.cell_types_cache import CellTypesCache import allensdk.core.json_utilities as json_utilities import sciunit except: import os os.system('pip install allensdk') from allensdk.api.queries.glif_api import GlifApi from allensdk.core.cell_types_cache import CellTypesCache import allensdk.core.json_utilities as json_utilities os.system('pip install git+https://github.com/scidash/sciunit@dev') neuronal_model_id = 566302806 glif_api = GlifApi() nc = glif_api.get_neuron_configs([neuronal_model_id])[neuronal_model_id] self.nm = GlifNeuron.from_dict(nc)
def main(): args = parse_arguments() logging.getLogger().setLevel(args.log_level) glif_api = None if (args.neuron_config_file is None or args.sweeps_file is None or args.ephys_file is None): assert args.neuronal_model_id is not None, Exception("A neuronal model id is required if no neuron config file, sweeps file, or ephys data file is provided.") glif_api = GlifApi() glif_api.get_neuronal_model(args.neuronal_model_id) if args.neuron_config_file: neuron_config = json_utilities.read(args.neuron_config_file) else: neuron_config = glif_api.get_neuron_config() if args.sweeps_file: sweeps = json_utilities.read(args.sweeps_file) else: sweeps = glif_api.get_ephys_sweeps() if args.ephys_file: ephys_file = args.ephys_file else: ephys_file = 'stimulus_%d.nwb' % args.neuronal_model_id if not os.path.exists(ephys_file): logging.info("Downloading stimulus to %s." % ephys_file) glif_api.cache_stimulus_file(ephys_file) else: logging.warning("Reusing %s because it already exists." % ephys_file) if args.output_ephys_file: output_ephys_file = args.output_ephys_file else: logging.warning("Overwriting input file data with simulated data in place.") output_ephys_file = ephys_file neuron = GlifNeuron.from_dict(neuron_config) # filter out test sweeps sweep_numbers = [ s['sweep_number'] for s in sweeps if s['stimulus_name'] != 'Test' ] simulate_neuron(neuron, sweep_numbers, ephys_file, output_ephys_file, args.spike_cut_value)
def download_glif_models(cell_ids, base_dir, incl_ephys=True, force_overwrite=False): """Goes through the list of cell_ids and downloads cell config and ephys data in base_dir/cell_<ID>. Then looks up all possible models and downloads model files int base_dir/cell_<ID>/<MODEL_TYPE>_<MODEL_ID>/ """ # Determine the best url for connecting to cell-types db try: # see if we can connect to interal cell-types db request = requests.get('http://icelltypes/') if request.status_code == 200: base_uri = 'http://icelltypes/' else: base_uri = None except Exception: base_uri = None # use the default url base_dir = base_dir if base_dir.endswith('/') else base_dir + '/' valid_cells = [] ct_api = CellTypesApi(base_uri) for cell in ct_api.list_cells(): if cell['id'] in cell_ids: # create directory for cell cell_home = '{}cell_{}/'.format(base_dir, cell['id']) if not os.path.exists(cell_home): os.makedirs(cell_home) # save metadata cell_metadata_file = cell_home + 'cell_metadata.json' if force_overwrite or not os.path.exists(cell_metadata_file): print('Saving metadata for cell {} in {}'.format( cell['id'], cell_metadata_file)) json_utilities.write(cell_metadata_file, cell) else: print('File {} already exists. Skipping'.format( cell_metadata_file)) # save ephys data if incl_ephys: cell_ephys_file = cell_home + 'ephys_data.nwb' if force_overwrite or not os.path.exists(cell_ephys_file): print('Saving ephys data for cell {} in {}'.format( cell['id'], cell_ephys_file)) ct_api.save_ephys_data(cell['id'], cell_ephys_file) else: print('File {} already exists. Skipping'.format( cell_ephys_file)) # save sweeps file sweeps_file = cell_home + 'ephys_sweeps.json' if force_overwrite or not os.path.exists(sweeps_file): print('- Saving sweeps file to {}'.format(sweeps_file)) ephys_sweeps = ct_api.get_ephys_sweeps(cell['id']) json_utilities.write(sweeps_file, ephys_sweeps) else: print('- File {} already exits. Skipping.'.format(sweeps_file)) # keep track of valid ids valid_cells.append(cell['id']) cell_ids.remove(cell['id']) for cid in cell_ids: print('Warning: cell #{} was not found in cell-types database'.format( cid)) # Iterate through each all available models and find ones correspoding to cell list glif_models = {} # map model-id to their directory glif_api = GlifApi(base_uri=base_uri) for model in glif_api.list_neuronal_models(): if model['specimen_id'] in valid_cells: # save model files <BASE_DIR>/cell_<CELL_ID>/<MODEL_TYPE>_<MODEL_ID>/ cell_id = model['specimen_id'] model_id = model['id'] model_type = model[ 'neuronal_model_template_id'] #['id'] # type of model, GLIF-LIF, GLIF-ASC, etc type_name = model_id2name.get(model_type, None) if type_name is None: print( 'Warning: Unknown model type {} ({}) for cell/model {}/{}'. format(model_type, model['neuronal_model_template']['name'], cell_id, model_id)) type_name = model_type model_home_dir = '{}cell_{}/{}_{}/'.format(base_dir, cell_id, type_name, model_id) glif_models[model_id] = model_home_dir # go through all the found models, download necessary files n_models = len(glif_models) for i, (gid, home_dir) in enumerate(glif_models.iteritems()): print('Processing model {} ({} of {})'.format(gid, (i + 1), n_models)) model_metadata = glif_api.get_neuronal_model(gid) if not os.path.exists(home_dir): os.makedirs(home_dir) # save model metadata metadata_file = home_dir + 'metadata.json' if force_overwrite or not os.path.exists(metadata_file): print('- Saving metadata file to {}'.format(metadata_file)) #print type(metadata_file) with open(metadata_file, 'wb') as fp: json.dump(model_metadata, fp, indent=2) else: print('- File {} already exits. Skipping.'.format(metadata_file)) # get neuron configuration file config_file = home_dir + 'config.json' if force_overwrite or not os.path.exists(config_file): print('- Saving configuration file to {}'.format(config_file)) neuron_config = glif_api.get_neuron_config() json_utilities.write(config_file, neuron_config) else: print('- File {} already exits. Skipping.'.format(config_file))
from allensdk.api.queries.glif_api import GlifApi import allensdk.core.json_utilities as json_utilities neuronal_model_id = 472423251 glif_api = GlifApi() glif_api.get_neuronal_model(neuronal_model_id) glif_api.cache_stimulus_file('stimulus.nwb') neuron_config = glif_api.get_neuron_config() json_utilities.write('neuron_config.json', neuron_config) ephys_sweeps = glif_api.get_ephys_sweeps() json_utilities.write('ephys_sweeps.json', ephys_sweeps)
class glifBackend(Backend): backend = 'glif' try: from allensdk.api.queries.glif_api import GlifApi from allensdk.core.cell_types_cache import CellTypesCache import allensdk.core.json_utilities as json_utilities except: import os os.system('pip install allensdk') from allensdk.api.queries.glif_api import GlifApi from allensdk.core.cell_types_cache import CellTypesCache import allensdk.core.json_utilities as json_utilities neuronal_model_id = 566302806 # download model metadata glif_api = GlifApi() nm = glif_api.get_neuronal_models_by_id([neuronal_model_id])[0] # download the model configuration file nc = glif_api.get_neuron_configs([neuronal_model_id])[neuronal_model_id] neuron_config = glif_api.get_neuron_configs([neuronal_model_id]) json_utilities.write('neuron_config.json', neuron_config) # download information about the cell ctc = CellTypesCache() ctc.get_ephys_data(nm['specimen_id'], file_name='stimulus.nwb') ctc.get_ephys_sweeps(nm['specimen_id'], file_name='ephys_sweeps.json') import allensdk.core.json_utilities as json_utilities from allensdk.model.glif.glif_neuron import GlifNeuron # initialize the neuron neuron_config = json_utilities.read('neuron_config.json') neuron_config = neuron_config['566302806'] neuron = GlifNeuron.from_dict(neuron_config) def init_backend(self, attrs=None, simulator='neuron', DTC=None): from pyNN import neuron self.neuron = neuron from pyNN.neuron import simulator as sim from pyNN.neuron import setup as setup from pyNN.neuron import Izhikevich from pyNN.neuron import Population from pyNN.neuron import DCSource self.Izhikevich = Izhikevich self.Population = Population self.DCSource = DCSource self.setup = setup self.model_path = None self.related_data = {} self.lookup = {} self.attrs = {} super(pyNNBackend, self).init_backend() #*args, **kwargs) if DTC is not None: self.set_attrs(**DTC.attrs) backend = 'pyNN' def get_membrane_potential(self): """Must return a neo.core.AnalogSignal. And must destroy the hoc vectors that comprise it. """ dt = float(copy.copy(self.neuron.dt)) data = self.population.get_data().segments[0] return data.filter(name="v")[0] def _local_run(self): ''' pyNN lazy array demands a minimum population size of 3. Why is that. ''' import numpy as np results = {} #self.population.record('v') #self.population.record('spikes') # For ome reason you need to record from all three neurons in a population # In order to get the membrane potential from only the stimulated neuron. self.population[0:2].record(('v', 'spikes', 'u')) ''' self.Iz.record('v') self.Iz.record('spikes') # For ome reason you need to record from all three neurons in a population # In order to get the membrane potential from only the stimulated neuron. self.Iz.record(('v', 'spikes','u')) ''' #self.neuron.run(650.0) DURATION = 1000.0 self.neuron.run(DURATION) data = self.population.get_data().segments[0] vm = data.filter(name="v")[0] #/10.0 results['vm'] = vm #print(vm) sample_freq = DURATION / len(vm) results['t'] = np.arange(0, len(vm), DURATION / len(vm)) results['run_number'] = results.get('run_number', 0) + 1 return results def load_model(self): self.Iz = None self.population = None self.setup(timestep=0.01, min_delay=1.0) import pyNN #i_offset=[0.014, 0.0, 0.0] pop = self.neuron.Population( 3, pyNN.neuron.Izhikevich(a=0.02, b=0.2, c=-65, d=6, i_offset=[0.014, -65.0, 0.0])) #,v=-65)) self.population = pop def set_attrs(self, **attrs): #attrs = copy.copy(self.model.attrs) self.init_backend() #self.set_attrs(**attrs) self.model.attrs.update(attrs) assert type(self.model.attrs) is not type(None) attrs['i_offset'] = None attrs_ = {x: attrs[x] for x in ['a', 'b', 'c', 'd', 'i_offset']} attrs_['i_offset'] = 0.014 #[0.014,-attrs_['v0'],0.0] #self.population[0].initialize() self.population[0].set_parameters(**attrs_) print(self.population[0].get_parameters()) self.neuron.h.psection() return self def inject_square_current(self, current): import copy attrs = copy.copy(self.model.attrs) self.init_backend() self.set_attrs(**attrs) c = copy.copy(current) if 'injected_square_current' in c.keys(): c = current['injected_square_current'] c['delay'] = re.sub('\ ms$', '', str(c['delay'])) # take delay c['duration'] = re.sub('\ ms$', '', str(c['duration'])) c['amplitude'] = re.sub('\ pA$', '', str(c['amplitude'])) stop = float(c['delay']) + float(c['duration']) start = float(c['delay']) amplitude = float(c['amplitude']) / 1000.0 #print('amplitude',amplitude) electrode = self.neuron.DCSource(start=start, stop=stop, amplitude=amplitude) print(self.population[0]) print(type(self.population[0])) print(self.population[0].get_parameters()) electrode.inject_into(self.population[0:1])
exit(1) # model name and model template id mapping LIF = 'LIF' LIF_R = 'LIF-R' LIF_ASC = 'LIF-ASC' LIF_R_ASC = 'LIF-R-ASC' LIF_R_ASC_A = 'LIF-R-ASC-A' model_id2name = { 395310469: LIF, 395310479: LIF_R, 395310475: LIF_ASC, 471355161: LIF_R_ASC, 395310498: LIF_R_ASC_A } glif_api = GlifApi() for cell_result in glif_api.get_neuronal_models(cell_ids): #[325464516] cell_id = cell_result['id'] for curr_model in cell_result['neuronal_models']: if model_id2name[ curr_model['neuronal_model_template_id']] != options.model: continue model_id = curr_model['id'] neuron_config = glif_api.get_neuron_configs([model_id])[model_id] for stim in options.stimulus.split(','): simulate = stimulus[stim] output = simulate(cell_id, options.model, neuron_config) plt.figure('Cell ' + str(cell_id) + ' ' + options.model + ' ' + stim) plotter.plt_comparison_neurons(np.array(output['I']) * 1.0e12,
def glif_api(): glif_api = GlifApi() return glif_api
# set matplotlib headless - this turns off the production of visible plots! import matplotlib matplotlib.use("Agg") #=============================================================================== # example 1 #=============================================================================== from allensdk.api.queries.glif_api import GlifApi from allensdk.core.cell_types_cache import CellTypesCache import allensdk.core.json_utilities as json_utilities neuronal_model_id = 566302806 # download model metadata glif_api = GlifApi() nm = glif_api.get_neuronal_models_by_id([neuronal_model_id])[0] # download the model configuration file nc = glif_api.get_neuron_configs([neuronal_model_id])[neuronal_model_id] neuron_config = glif_api.get_neuron_configs([neuronal_model_id]) json_utilities.write('neuron_config.json', neuron_config) # download information about the cell ctc = CellTypesCache() ctc.get_ephys_data(nm['specimen_id'], file_name='stimulus.nwb') ctc.get_ephys_sweeps(nm['specimen_id'], file_name='ephys_sweeps.json') #=============================================================================== # example 2 #===============================================================================
import time import numpy as np from allensdk.api.queries.glif_api import GlifApi from allensdk.core.cell_types_cache import CellTypesCache import allensdk.core.json_utilities as json_utilities neuronal_model_id = 566302806 # download model metadata glif_api = GlifApi() nm = glif_api.get_neuronal_models_by_id([neuronal_model_id])[0] # download the model configuration file nc = glif_api.get_neuron_configs([neuronal_model_id])[neuronal_model_id] neuron_config = glif_api.get_neuron_configs([neuronal_model_id]) json_utilities.write('neuron_config.json', neuron_config) # download information about the cell ctc = CellTypesCache() ctc.get_ephys_data(nm['specimen_id'], file_name='stimulus.nwb') ctc.get_ephys_sweeps(nm['specimen_id'], file_name='ephys_sweeps.json') import allensdk.core.json_utilities as json_utilities from allensdk.model.glif.glif_neuron import GlifNeuron # initialize the neuron neuron_config = json_utilities.read('neuron_config.json')['566302806'] neuron = GlifNeuron.from_dict(neuron_config) # make a short square pulse. stimulus units should be in Amps. stimulus = [0.0] * 100 + [10e-9] * 100 + [0.0] * 100
from allensdk.api.queries.glif_api import GlifApi import random import pprint import os pp = pprint.PrettyPrinter(indent=4) api = GlifApi() models = api.list_neuronal_models() max = 5 def write_to_file(directory, file_name, jstring): file_path = '%s/%s' % (directory, file_name) print("Writing to: %s" % file_path) if not os.path.isdir(directory): os.mkdir(directory) f = open(file_path, 'w') #info = str(jstring) pretty = pp.pformat(jstring) pretty = pretty.replace('\'', '"') pretty = pretty.replace('u"', '"') pretty = pretty.replace('None', 'null') pretty = pretty.replace('False', 'false') pretty = pretty.replace('True', 'true') f.write(pretty) f.close()
'''Written by Corinne Teeter. Grab the explained variance for the published biophys models''' import numpy as np from allensdk.core.cell_types_cache import CellTypesCache import allensdk.internal.core.lims_utilities as lu import pandas as pd from allensdk.api.queries.glif_api import GlifApi import os import sys relative_path = os.path.dirname(os.getcwd()) sys.path.append(os.path.join(relative_path, 'libraries')) # Find all mouse cells with models glif_api = GlifApi() ctc = CellTypesCache( manifest_file=os.path.join(relative_path, 'cell_types_manifest.json')) specimen_id_list = [] temp = ctc.get_cells() for c in temp: if c['species'] == 'Mus musculus': specimen_id_list.append(c['id']) print len(specimen_id_list), 'mouse specimens in public database' def get_expVar(specimen_id_list, keyword): '''Grab explained variance value of specimen id list in public database Inputs: specimen_id_list: list of integers desired specimen ids of data in AIBS public database
def test_find_optimization_sweeps(): ga = GlifApi() nm = ga.get_neuronal_model(473836744) sweeps = ga.get_ephys_sweeps() opt_sweeps, stim_index, errs = find_optimization_sweeps(sweeps)
def get_files_from_LIMS_public(output_path, glif_sp_ids=None, type='mouse'): '''This will grab cre positive data config files from LIMS and sort them and put them in the specified output folder. input: output_path: string specifies path for files to be placed in glif_sp_ids: list of strings or integers specimen ids of cells specifically want to grab. If none it will get all available on the Allen Institue Cell Types Database. type: string can be 'mouse' or 'human'. Note that if mouse is specified is will only grab cre positive mouse cells (code can be altered to get cre negative cells). output: Does not return values but creates the specified 'output_path' folder. Inside the folder a series of folders are created with the name format: specimenid_cre. Inside those inner folders are the neuron configs of the available GLIF models along with the preprocessor files. ''' glif_api = GlifApi() ctc = CellTypesCache(manifest_file=os.path.join(relative_path,'cell_types_manifest.json')) # select the specimen ids to grab from the data base (cre positive or human which have at least 1 GLIF model) if glif_sp_ids==None: #if no specimen id's are specified grab all data in the cell types manifest specimen_id_list = [] if type=='mouse': for c in ctc.get_cells(): if c['reporter_status']=='cre reporter positive': specimen_id_list.append(c['id']) elif type=='human': print 'getting human' for c in ctc.get_cells(species=['H**o Sapiens']): #print c specimen_id_list.append(c['id']) print specimen_id_list # reduce list to cells that have a GLIF model glif_sp_ids=[] for sp in specimen_id_list: models=glif_api.get_neuronal_models(sp)[0] for m in models['neuronal_models']: if 'LIF' in m['name']: glif_sp_ids.append(m['specimen_id']) glif_sp_ids=list(set(glif_sp_ids)) print len(glif_sp_ids), 'cre positive specimens with at least 1 LIF model' # create the overall output directory if it doesn't exist try: os.makedirs(output_path) except: pass # go get the files corresponding to the specimen ids from the Allen Cell Types Database # and put them into a specified output directory for id in glif_sp_ids: model_query=glif_api.get_neuronal_models(id)[0]['neuronal_models'] df=pd.DataFrame(model_query) for mt_id, short_name in zip(model_template_ids, model_names): dff=df[df['neuronal_model_template_id']==mt_id] if len(dff)>=2: print dff raise Exception("This is public data, there should not be more than 1 model") elif len(dff)==1: use_me=dff #go get the file path=use_me['well_known_files'].iloc[0][0]['path'] if type=='mouse': cre=(str(use_me['name'].values).split(')_'))[1].split(';')[0] elif type=='human': cre='human' else: raise Exception('specified species not known') # convert old non complete cre names if 'Ntsr1-Cre' in cre: cre='Ntsr1-Cre_GN220' if 'Chat-IRES-Cre' in cre: cre='Chat-IRES-Cre-neo' dir_name=os.path.join(output_path, str(id)+'_'+cre) try: os.makedirs(dir_name) except: pass if path.endswith('_neuron_config.json'): pass else: print path raise Exception('the file doesnt end with _neuron_config.json') try: copyfile(path, os.path.join(dir_name, str(id)+'_'+cre+'_'+short_name+'_neuron_config.json')) except: print 'couldnt make ', os.path.join(dir_name, str(id)+'_'+cre+'_'+short_name+'_neuron_config.json') if mt_id==model_template_ids[0]: model_path=os.path.dirname(path) pp_path=os.path.join(model_path, os.listdir(model_path)[np.where([fname.endswith('_preprocessor_values.json') for fname in os.listdir(model_path)])[0][0]]) try: copyfile(pp_path, os.path.join(dir_name, str(id)+'_'+cre+'_preprocessor_values.json')) except: print 'couldnt make ', os.path.join(dir_name, str(id)+'_'+cre+'_preprocessor_values.json') raise Exception('there should be a preprocessed file') elif len(dff)<1: use_me=pd.DataFrame() path=None