Example #1
0
    def save_morph_data(self, morph_stats_filename):

        if not os.path.exists(morph_stats_filename):

            #  load a neuron from an SWC file
            nrn = nm.load_neuron(self.morph_file)

            morph_stats = {}

            morph_stats['cell_id'] = self.cell_id
            morph_stats['soma_suface'] = nm.get('soma_surface_areas', nrn)[0]
            morph_stats['soma_radius'] = np.mean(nm.get('soma_radii', nrn))

            # Morph stats
            for nrn_type_ in NEURITES:

                morph_stats['length' + '.' +
                            str(nrn_type_).split('.')[1]] = np.sum(
                                nm.get('segment_lengths',
                                       nrn,
                                       neurite_type=nrn_type_))
                morph_stats['area' + '.' + str(nrn_type_).split('.')[1]] = sum(
                    mm.segment_area(s) for s in nm.iter_segments(
                        nrn, neurite_filter=tree_type_checker(nrn_type_)))
                morph_stats[
                    'volume' + '.' + str(nrn_type_).split('.')[1]] = sum(
                        mm.segment_volume(s) for s in nm.iter_segments(
                            nrn, neurite_filter=tree_type_checker(nrn_type_)))
                morph_stats['taper_rate' + '.' + str(nrn_type_).split('.')[1]] = \
                    np.mean([mm.segment_taper_rate(s) for s in nm.iter_segments(
                        nrn, neurite_filter=tree_type_checker(nrn_type_))])

            utility.save_json(morph_stats_filename, morph_stats)
Example #2
0
    def write_opt_config_file(self,
                              param_write_path,
                              mech_write_path,
                              mech_release_write_path,
                              train_features_write_path,
                              test_features_write_path,
                              protocols_write_path,
                              release_params,
                              release_param_write_path,
                              opt_config_filename='config_file.json',
                              **kwargs):
        if not os.path.exists(opt_config_filename):
            path_dict = dict()
        else:
            path_dict = utility.load_json(opt_config_filename)

        path_dict['parameters'] = param_write_path
        path_dict['mechanism'] = mech_write_path
        path_dict['released_aa_mechanism'] = mech_release_write_path
        path_dict['train_features'] = train_features_write_path
        path_dict['test_features'] = test_features_write_path
        path_dict['train_protocols'] = protocols_write_path
        path_dict['released_aa_model_dict'] = release_params
        path_dict['released_aa_model'] = release_param_write_path
        path_dict['released_peri_model'] = kwargs.get('released_peri_model')
        path_dict['released_peri_mechanism'] = kwargs.get(
            'released_peri_mechanism')

        #        for config_key,path in kwargs.items():
        #            path_dict[config_key] = path

        utility.save_json(opt_config_filename, path_dict)
    def script_generator(self, chain_job='chain_job.sh', **kwargs):
        # Force change of certain config properties
        job_config = utility.load_json(self.job_config_path)
        stage_jobconfig = job_config['stage_jobconfig']
        highlevel_job_props = job_config['highlevel_jobconfig']
        analysis_config = stage_jobconfig['analysis_config']
        optim_config = stage_jobconfig['optim_config']
        stage_jobconfig = update(stage_jobconfig, dryrun_config)
        stage_jobconfig['optim_config']['ipyparallel'] = False
        stage_jobconfig['analysis_config']['ipyparallel'] = False
        stage_jobconfig['seed'] = [1]
        utility.save_json(self.job_config_path, job_config)

        testjob_string = '#!/bin/bash\n'
        testjob_string += 'set -ex\n'
        testjob_string += 'source activate %s\n' % highlevel_job_props[
            'conda_env']
        testjob_string += 'python %s --input_json %s\n' %\
            (optim_config['main_script'], self.job_config_path)
        testjob_string += 'python %s --input_json %s\n'\
            % (analysis_config['main_script'], self.job_config_path)

        if 'next_stage_job_config' in kwargs.keys():
            if bool(kwargs['next_stage_job_config']):
                testjob_string += 'bash %s\n' % chain_job
        with open(self.script_name, "w") as shell_script:
            shell_script.write(testjob_string)
Example #4
0
    def write_params_opt(self,
                         model_params,
                         model_params_release,
                         base_dir='config/',
                         **kwargs):

        param_write_path = kwargs.get('param_write_path') or \
            base_dir + self.cell_id + '/parameters.json'
        release_param_write_path = kwargs.get('release_param_write_path') or \
            base_dir + self.cell_id + '/release_parameters.json'

        utility.create_filepath(param_write_path)
        utility.save_json(param_write_path, model_params)

        release_params = dict(
        )  # for comparison with optimized values (strictly for values)
        if model_params_release:
            for param_dict_release in model_params_release:
                param_name = param_dict_release['param_name']
                if param_name not in ['ena', 'ek', 'v_init', 'celsius']:
                    release_params[param_name + '.' + param_dict_release['sectionlist']] = \
                        param_dict_release['value']

            # Released parameter file in bpopt format for running simulation
            utility.save_json(release_param_write_path, model_params_release)
        else:
            release_param_write_path = None

        return param_write_path, release_param_write_path, release_params
 def create_sa_bound_peri(self,bpopt_param_bounds_path,sens_param_bounds_path,
                     max_bound = .5):
     
     # For parameter sensitivity create a new set of bounds because the permutations may 
     # fall outside the original bounds
     
     max_bound = max(max_bound,self.param_range+.1)        
     param_bounds = utility.load_json(bpopt_param_bounds_path)
     
     optim_param_bpopt_format = {}
     
     param_sens_list = list()
     for i,param_dict in enumerate(param_bounds):
         
         if 'sectionlist' in param_dict.keys():
             name_loc = param_dict['param_name'] + '.' + \
                             param_dict['sectionlist']
             
             if param_dict['param_name'] not in ['ena','ek']:
                 optim_param_bpopt_format[name_loc] = param_dict['value']
                 lb =  param_dict['value']-\
                      max_bound*abs(param_dict['value'])
                 ub = param_dict['value']+\
                      max_bound*abs(param_dict['value'])
                 param_dict['bounds'] = [lb,ub]
                 del param_dict['value']
         param_sens_list.append(param_dict)
     
     utility.save_json(sens_param_bounds_path,param_sens_list)
     return optim_param_bpopt_format
 def create_sa_bound(self,bpopt_param_bounds_path,sens_param_bounds_path,
                     max_bound = .5):
     
     # For parameter sensitivity create a new set of bounds because the permutations may 
     # fall outside the original bounds
     
     max_bound = max(max_bound,self.param_range+.1)
     
     bpopt_section_map = utility.bpopt_section_map
     param_bounds = utility.load_json(bpopt_param_bounds_path)
     optim_param = self.optim_param
     
     optim_param_bpopt_format = {}
     
     if 'genome' in optim_param.keys():
         print('The parameter file is in AIBS format')
         for aibs_param_dict in optim_param['genome']:
             param_name,param_sect = aibs_param_dict['name'],\
                                         aibs_param_dict['section']
             if param_name in ['e_pas','g_pas','Ra']:
                 param_sect = 'all'
                                             
             param_sect = bpopt_section_map[param_sect]
             optim_param_bpopt_format[param_name+'.'+param_sect]=\
                             float(aibs_param_dict['value'])
     else:
         print('The parameter file is in bluepyopt format')
         for key,val in optim_param.items():
             key_param,key_sect = key.split('.')
             try:
                 key_sect = bpopt_section_map[key_sect]
             except:
                 print('Already in bluepyopt format')
             optim_param_bpopt_format[key_param+'.'+key_sect]=val
         
     param_sens_list = list()
     for i,param_dict in enumerate(param_bounds):
         bound = param_dict.get('bounds')
         if bound:
             name_loc = param_dict['param_name'] + '.' + param_dict['sectionlist']
             lb = min(param_dict['bounds'][0], optim_param_bpopt_format[name_loc]-\
                      max_bound*abs(optim_param_bpopt_format[name_loc]))
             ub = max(param_dict['bounds'][1], optim_param_bpopt_format[name_loc]+\
                      max_bound*abs(optim_param_bpopt_format[name_loc]))
             param_dict['bounds'] = [lb,ub]
         param_sens_list.append(param_dict)
     
     utility.save_json(sens_param_bounds_path,param_sens_list)
     return optim_param_bpopt_format
Example #7
0
def save_cell_metadata(**cell_metadata):
    cell_id = cell_metadata["cell_id"]
    metadata_filename = 'cell_metadata_%s.json' % cell_id

    # TODO: maybe move this to launch_optimjob?
    machine_name = socket.gethostname()
    machine_name = 'aws' if machine_name == 'master' else machine_name
    cell_metadata['machine'] = machine_name

    data_source = cell_metadata["data_source"]
    if data_source == "web":
        ctc = CellTypesCache(
            manifest_file=os.path.join(data_dir, 'manifest.json'))
        cell_metadata.update(get_data_web(cell_id, ctc))
        cell_metadata.update(cell_props(cell_id, ctc))
        cell_metadata.update(model_props(cell_id))
    elif data_source == "lims":
        cell_metadata.update(get_data_lims(cell_id))

    utility.save_json(metadata_filename, cell_metadata)

    return cell_metadata, metadata_filename
Example #8
0
    def write_mechanisms_opt(self,
                             model_mechs,
                             model_mechs_release,
                             base_dir='config/',
                             **kwargs):

        mechanism_write_path = kwargs.get('mechanism_write_path') or \
            base_dir + self.cell_id + '/mechanism.json'
        mechanism_release_write_path = kwargs.get('mechanism_release_write_path') \
            or base_dir + self.cell_id + '/mechanism_release.json'

        utility.create_filepath(mechanism_write_path)
        utility.save_json(mechanism_write_path, model_mechs)

        if model_mechs_release:
            utility.save_json(mechanism_release_write_path,
                              model_mechs_release)
        else:
            model_mechs_release = None
            mechanism_release_write_path = None

        return mechanism_write_path, mechanism_release_write_path
Example #9
0
def get_efeatures(cell_id):
    from ateamopt.nwb_extractor import NwbExtractor
    from ateamopt.optim_config_rules import correct_voltage_feat_std
    from ateam.data import lims
    import os
    from ateamopt.utils import utility
    import shutil

    acceptable_stimtypes = ['Long Square']
    feature_names_path = 'feature_set_all.json'
    ephys_dir_origin = 'ephys_ttype'
    efel_feature_path = 'eFEL_features_ttype'
    lr = lims.LimsReader()

    cell_efeatures_dir = os.path.join(efel_feature_path, cell_id)
    cell_protocols_filename = os.path.join(cell_efeatures_dir,
                                           'protocols.json')
    cell_features_filename = os.path.join(efel_feature_path, cell_id,
                                          'features.json')
    efeature_filenames = [cell_protocols_filename, cell_features_filename]

    if not all(os.path.exists(filename_) for filename_ in efeature_filenames):
        utility.create_filepath(cell_protocols_filename)
        nwb_path = lr.get_nwb_path_from_lims(int(cell_id),
                                             get_sdk_version=True)
        cell_ephys_dir = os.path.join(ephys_dir_origin, cell_id)
        nwb_handler = NwbExtractor(cell_id, nwb_path)
        ephys_data_path, stimmap_filename = nwb_handler.save_cell_data_web(
            acceptable_stimtypes, ephys_dir=cell_ephys_dir)

        protocol_dict, feature_dict = nwb_handler.get_efeatures_all(
            feature_names_path, ephys_data_path, stimmap_filename)
        feature_dict = correct_voltage_feat_std(feature_dict)

        utility.save_json(cell_protocols_filename, protocol_dict)
        utility.save_json(cell_features_filename, feature_dict)
        shutil.move(stimmap_filename, cell_efeatures_dir)
        shutil.rmtree(cell_ephys_dir, ignore_errors=True)
    def write_ephys_features(self, train_features, test_features,
                             train_protocols, base_dir='config/', **kwargs):
        cell_name = self.cell_id
        train_features_write_path = kwargs.get('train_features_write_path') or \
            os.path.join(base_dir, cell_name, 'train_features.json')
        test_features_write_path = kwargs.get('test_features_write_path') \
            or os.path.join(base_dir, cell_name, 'test_features.json')
        train_protocols_write_path = kwargs.get('protocols_write_path') \
            or os.path.join(base_dir, cell_name, 'train_protocols.json')
        utility.create_filepath(train_protocols_write_path)
        utility.save_json(train_features_write_path, train_features)
        utility.save_json(test_features_write_path, test_features)
        utility.save_json(train_protocols_write_path, train_protocols)

        return train_features_write_path, test_features_write_path,\
            train_protocols_write_path
Example #11
0
def main():
    
    # Read sensitivity analysis config file
    sens_config_file = sys.argv[-1]
    sens_config_dict = utility.load_json(sens_config_file)
    cell_id = sens_config_dict['Cell_id']
    cpu_count = sens_config_dict['cpu_count'] if 'cpu_count'\
            in sens_config_dict.keys() else mp.cpu_count()
    perisomatic_sa = sens_config_dict.get('run_peri_analysis',False)
    
    # Parameters to vary (All-active) 
    select_aa_param_path = sens_config_dict['select_aa_param_path'] # knobs
    
    # Parameters to vary (Perisomatic) 
    if perisomatic_sa:
        select_peri_param_path = sens_config_dict['select_peri_param_path'] # knobs
    
    select_feature_path = sens_config_dict['select_feature_path'] # knobs
    param_mod_range = sens_config_dict.get('param_mod_range',.1) # knobs
    mechanism_path = sens_config_dict['mechanism']
    
    # config files with all the paths for Bluepyopt sim    
    lr = lims.LimsReader()
    morph_path = lr.get_swc_path_from_lims(int(cell_id))
    
    model_base_path='/allen/aibs/mat/ateam_shared/' \
                         'Mouse_Model_Fit_Metrics/{}'.format(cell_id)
                         
    opt_config_file = os.path.join(model_base_path,'config_file.json')
    if not os.path.exists(opt_config_file):
        opt_config = {
                "morphology": "",
                "parameters": "config/{}/parameters.json".format(cell_id),
                "mechanism": "config/{}/mechanism.json".format(cell_id),
                "protocols": "config/{}/protocols.json".format(cell_id),
                "all_protocols": "config/{}/all_protocols.json".format(cell_id),
                "features": "config/{}/features.json".format(cell_id),
                "peri_parameters": "config/{}/peri_parameters.json".format(cell_id),
                "peri_mechanism": "config/{}/peri_mechanism.json".format(cell_id)
                }
        opt_config_file = os.path.join(os.getcwd(),'config_file.json')
        utility.save_json(opt_config_file,opt_config)
    
    # optimized parameters around which select parameters are varied
    optim_param_path_aa = '/allen/aibs/mat/ateam_shared/Mouse_Model_Fit_Metrics/'\
    '{cell_id}/fitted_params/optim_param_unformatted_{cell_id}.json'.\
                    format(cell_id = cell_id)
    if not os.path.exists(optim_param_path_aa):
        optim_param_path_aa = '/allen/aibs/mat/ateam_shared/Mouse_Model_Fit_Metrics/'\
            '{cell_id}/fitted_params/optim_param_{cell_id}_bpopt.json'.\
                    format(cell_id = cell_id)
    
    SA_obj_aa = SA_helper(optim_param_path_aa,select_aa_param_path,param_mod_range,
                       opt_config_file)
    
    _,protocol_path,mech_path,feature_path,\
        param_bound_path = SA_obj_aa.load_config(model_base_path)
        
    # Make sure to get the parameter bounds big enough for BluePyOpt sim
    sens_param_bound_write_path_aa = "param_sensitivity_aa.json"
    optim_param_aa = SA_obj_aa.create_sa_bound(param_bound_path,
                                         sens_param_bound_write_path_aa)    
    param_dict_uc_aa = SA_obj_aa.create_sens_param_dict()
    parameters_aa ={key:optim_param_aa[val] for key,val in param_dict_uc_aa.items()}
    eval_handler_aa = Bpopt_Evaluator(protocol_path, feature_path,
                                   morph_path, sens_param_bound_write_path_aa,
                                   mech_path,
                                   ephys_dir=None,
                                   timed_evaluation = False)
    evaluator_aa = eval_handler_aa.create_evaluator()
    opt_aa = bpopt.optimisations.DEAPOptimisation(evaluator=evaluator_aa)
    
    
    stim_protocols = utility.load_json(protocol_path)
    stim_protocols = {key:val for key,val in stim_protocols.items() \
                      if 'LongDC' in key}
    stim_dict = {key:val['stimuli'][0]['amp'] \
                     for key,val in stim_protocols.items()}
    sorted_stim_tuple= sorted(stim_dict.items(), key=operator.itemgetter(1))
    
    stim_name= sorted_stim_tuple[-1][0] # knobs (the max amp)
    
    # Copy compiled modfiles
    if not os.path.isdir('x86_64'):
        raise Exception('Compiled modfiles do not exist')
    
    efel_features = utility.load_json(select_feature_path)
    un_features = un.EfelFeatures(features_to_run=efel_features)
    
    un_parameters_aa = un.Parameters(parameters_aa)
    un_parameters_aa.set_all_distributions(un.uniform(param_mod_range))
    un_model_aa = un.Model(run=nrnsim_bpopt, interpolate=True,
                 labels=["Time (ms)", "Membrane potential (mV)"],
                 opt=opt_aa,stim_protocols =stim_protocols,
                 param_dict_uc = param_dict_uc_aa,
                 stim_name=stim_name,
                 optim_param=optim_param_aa)
    
    
    # Perform the uncertainty quantification
    UQ_aa = un.UncertaintyQuantification(un_model_aa,
                                      parameters=un_parameters_aa,
                                      features=un_features)    
    data_folder = 'sensitivity_data'
    sa_filename_aa = 'sa_allactive_%s.h5'%cell_id
    sa_filename_aa_csv = 'sa_allactive_%s.csv'%cell_id
    sa_data_path_aa = os.path.join(data_folder,sa_filename_aa)
    sa_aa_csv_path = os.path.join(data_folder,sa_filename_aa_csv)
    
    UQ_aa.quantify(seed=0,CPUs=cpu_count,data_folder=data_folder,
                   filename= sa_filename_aa)
    _ = SA_obj_aa.save_analysis_data(sa_data_path_aa,
                                filepath=sa_aa_csv_path)
        
    cell_data_aa =  un.Data(sa_data_path_aa)
    SA_obj_aa.plot_sobol_analysis(cell_data_aa,analysis_path = \
                          'figures/sa_analysis_aa_%s.pdf'%cell_id,
                          palette='Set1')
    
    # Perisomatic model
    
    if perisomatic_sa:
    
        try:
            optim_param_path_peri = None
            SA_obj_peri = SA_helper(optim_param_path_peri,select_peri_param_path,param_mod_range,
                                   opt_config_file)
            _,_,mech_path_peri,_,\
                    param_bound_path_peri = SA_obj_peri.load_config(model_base_path,
                                                                perisomatic=True)
            
            sens_param_bound_write_path_peri = "param_sensitivity_peri.json"
            optim_param_peri = SA_obj_peri.create_sa_bound_peri(param_bound_path_peri,
                                                     sens_param_bound_write_path_peri)
            
            param_dict_uc_peri = SA_obj_peri.create_sens_param_dict()
            parameters_peri ={key:optim_param_peri[val] for key,val in param_dict_uc_peri.items()}
            eval_handler_peri = Bpopt_Evaluator(protocol_path, feature_path,
                                               morph_path, sens_param_bound_write_path_peri,
                                               mech_path_peri,
                                               ephys_dir=None,
                                               timed_evaluation = False)
            evaluator_peri = eval_handler_peri.create_evaluator()
            opt_peri = bpopt.optimisations.DEAPOptimisation(evaluator=evaluator_peri)
            un_parameters_peri= un.Parameters(parameters_peri)
            un_parameters_peri.set_all_distributions(un.uniform(param_mod_range))
            un_model_peri = un.Model(run=nrnsim_bpopt, interpolate=True,
                             labels=["Time (ms)", "Membrane potential (mV)"],
                             opt=opt_peri,stim_protocols =stim_protocols,
                             param_dict_uc = param_dict_uc_peri,
                             stim_name=stim_name,
                             optim_param=optim_param_peri)
            UQ_peri = un.UncertaintyQuantification(un_model_peri,
                                                  parameters=un_parameters_peri,
                                                  features=un_features)
            sa_filename_peri = 'sa_perisomatic_%s.h5'%cell_id
            sa_filename_peri_csv = 'sa_perisomatic_%s.csv'%cell_id
            sa_data_path_peri = os.path.join(data_folder,sa_filename_peri)
            sa_peri_csv_path = os.path.join(data_folder,sa_filename_peri_csv)
            
            UQ_peri.quantify(seed=0,CPUs=cpu_count,data_folder=data_folder,
                           filename= sa_filename_peri)
            _ = SA_obj_peri.save_analysis_data(sa_data_path_peri,
                                            filepath=sa_peri_csv_path)
            cell_data_peri =  un.Data(sa_data_path_peri)    
            SA_obj_peri.plot_sobol_analysis(cell_data_peri,analysis_path = \
                                      'figures/sa_analysis_peri_%s.pdf'%cell_id,
                                      palette='Set2')
        except Exception as e:
            print(e)
Example #12
0
    def aibs_peri_to_bpopt(self, peri_param_path, base_dir='config/'):
        peri_params = utility.load_json(peri_param_path)
        peri_params_release = list()
        peri_mechs_release = defaultdict(list)
        peri_mechs_release['all'].append('pas')

        rev_potential = utility.rev_potential
        section_map = utility.bpopt_section_map

        for key, values in peri_params.items():
            if key == 'genome':
                for j in range(len(values)):
                    iter_dict_release = {
                        'param_name': peri_params[key][j]['name']
                    }
                    iter_dict_release['sectionlist'] = section_map[
                        peri_params[key][j]['section']]
                    iter_dict_release['type'] = 'section'
                    iter_dict_release['value'] = float(
                        peri_params[key][j]['value'])
                    iter_dict_release['dist_type'] = 'uniform'
                    if peri_params[key][j]['mechanism'] != '':
                        iter_dict_release['mech'] = peri_params[key][j][
                            'mechanism']
                        iter_dict_release['type'] = 'range'
                    peri_params_release.append(iter_dict_release)

            elif key == 'passive':
                for key_pas, val_pas in values[0].items():
                    if key_pas == 'cm':
                        for pas_param in val_pas:
                            iter_dict_release = {
                                'param_name': 'cm',
                                'sectionlist':
                                section_map[pas_param['section']],
                                'value': pas_param['cm'],
                                'dist_type': 'uniform',
                                'type': 'section'
                            }
                            peri_params_release.append(iter_dict_release)

                    else:
                        iter_dict_release = {
                            'param_name': 'Ra' if key_pas == 'ra' else key_pas,
                            'sectionlist': 'all',
                            'value': val_pas,
                            'dist_type': 'uniform',
                            'type': 'section'
                        }
                        peri_params_release.append(iter_dict_release)

        for rev in rev_potential:
            iter_dict_release = {
                'param_name': rev,
                'sectionlist': 'somatic',
                'dist_type': 'uniform',
                'type': 'section'
            }
            if rev == 'ena':
                iter_dict_release['value'] = rev_potential[rev]
            elif rev == 'ek':
                iter_dict_release['value'] = rev_potential[rev]
            peri_params_release.append(iter_dict_release)

        peri_params_release.append({
            "param_name": "celsius",
            "type": "global",
            "value": 34
        })
        peri_params_release.append({
            "param_name":
            "v_init",
            "type":
            "global",
            "value":
            peri_params['conditions'][0]["v_init"]
        })

        for param_dict in peri_params_release:
            if 'mech' in param_dict.keys():
                if param_dict['mech'] not in peri_mechs_release[
                        param_dict['sectionlist']]:
                    peri_mechs_release[param_dict['sectionlist']].append(
                        param_dict['mech'])

        peri_params_write_path = os.path.join(base_dir, self.cell_id,
                                              'peri_parameters.json')
        peri_mech_write_path = os.path.join(base_dir, self.cell_id,
                                            'peri_mechanism.json')
        utility.create_filepath(peri_params_write_path)
        utility.save_json(peri_params_write_path, peri_params_release)
        utility.save_json(peri_mech_write_path, peri_mechs_release)
        return peri_params_write_path, peri_mech_write_path
    def script_generator(self, chain_job='chain_job.sh', **kwargs):
        job_config = utility.load_json(self.job_config_path)
        stage_jobconfig = job_config['stage_jobconfig']

        highlevel_job_props = job_config['highlevel_jobconfig']
        analysis_flag = kwargs.get(
            'analysis')  # this means prepare a batch script for analysis

        if highlevel_job_props['dryrun']:
            stage_jobconfig = update(stage_jobconfig, dryrun_config)
            job_config['stage_jobconfig'] = stage_jobconfig
            utility.save_json(self.job_config_path, job_config)

        analysis_config = stage_jobconfig['analysis_config']
        with open(self.script_template, 'r') as job_template:
            batchjob_string = job_template.read()

        jobname = '%s.%s' % (os.path.basename(
            highlevel_job_props['job_dir']), stage_jobconfig['stage_name'])
        if analysis_flag:
            jobname += '.analysis'

        seed_string = ''.join(
            ['%s ' % seed_ for seed_ in stage_jobconfig['seed']])

        # High level job config
        batchjob_string = re.sub('conda_env', highlevel_job_props['conda_env'],
                                 batchjob_string)
        batchjob_string = re.sub('jobname', jobname, batchjob_string)
        batchjob_string = re.sub('jobscript_name', self.script_name,
                                 batchjob_string)
        if highlevel_job_props.get('email'):
            batchjob_string = re.sub('email', highlevel_job_props['email'],
                                     batchjob_string)

        # Only related to optimization
        batchjob_string = re.sub('seed_list', seed_string, batchjob_string)
        batchjob_string = re.sub('analysis_script',
                                 analysis_config['main_script'],
                                 batchjob_string)

        batchjob_string = re.sub('job_config_path', self.job_config_path,
                                 batchjob_string)

        # Job config analysis vs optimization
        if analysis_flag:
            hpc_job_config = analysis_config
            batchjob_string = re.sub('# Run[\S\s]*pids', '', batchjob_string)

        else:
            hpc_job_config = stage_jobconfig['optim_config']

            # Within the batch job script change the analysis launch to batch analysis
            if analysis_config.get(
                    'ipyparallel') and stage_jobconfig['run_hof_analysis']:
                analysis_jobname = kwargs.get('analysis_jobname')
                batchjob_string = re.sub('# Analyze[\S\s]*.json',
                                         'qsub %s' % analysis_jobname,
                                         batchjob_string)

        # if there is a next stage chain the job
        if bool(kwargs.get('next_stage_job_config')):
            batchjob_string += 'bash %s\n' % chain_job

        hpc_job_parameters = [
            'jobmem', 'ipyparallel_db', 'qos', 'main_script', 'jobtime',
            'error_stream', 'output_stream', 'nnodes', 'nprocs', 'nengines'
        ]

        for hpc_param in hpc_job_parameters:
            batchjob_string = re.sub(hpc_param, str(hpc_job_config[hpc_param]),
                                     batchjob_string)

        with open(self.script_name, "w") as batchjob_script:
            batchjob_script.write(batchjob_string)
Example #14
0
def create_optim_job(args):
    level = logging.getLevelName(args['log_level'])
    logger.setLevel(level)

    cty_props = args['cty_config']
    cell_id = cty_props['cell_id']
    highlevel_job_props = args['job_config']['highlevel_jobconfig']
    stage_job_props = args['job_config']['stage_jobconfig']

    # Change any paths to absolute path
    for ii, stage_job_prop in enumerate(stage_job_props):
        stage_job_props[ii] = convert_paths(stage_job_prop)
    highlevel_job_props = convert_paths(highlevel_job_props)

    try:
        job_dir = os.path.join(os.getcwd(), highlevel_job_props['job_dir'])
    except:
        job_dir = os.path.join(os.getcwd(), str(cell_id))
    highlevel_job_props['job_dir'] = job_dir

    utility.create_dirpath(job_dir)
    os.chdir(job_dir)  # Change Working directory

    cty_config_path = os.path.join('user_config', 'cell_config.json')
    job_config_path = os.path.join('user_config', 'job_config.json')
    highlevel_jobconfig_path = 'high_level_job_config.json'
    stage_tracker_path = 'stage_tracker_config.json'

    utility.create_filepath(cty_config_path)
    utility.create_filepath(job_config_path)

    # Save a copy of the config files
    utility.save_json(cty_config_path, cty_props)
    utility.save_json(job_config_path, args['job_config'])

    try:
        ateamopt_dir = os.path.join(
            os.path.dirname(ateamopt.__file__), os.pardir)
        ateamopt_commitID = subprocess.check_output(
            ["git", "describe", "--tags"], cwd=ateamopt_dir).strip()
        ateamopt_commitID = ateamopt_commitID.decode() if isinstance(
            ateamopt_commitID, bytes) else ateamopt_commitID
        cty_props['ateamopt_tag'] = ateamopt_commitID
    except Exception as e:
        logger.debug(e)
    try:
        bluepyopt_dir = os.path.join(
            os.path.dirname(bluepyopt.__file__), os.pardir)
        bpopt_commitID = subprocess.check_output(
            ["git", "describe", "--tags"], cwd=bluepyopt_dir).strip()
        bpopt_commitID = bpopt_commitID.decode() if isinstance(
            bpopt_commitID, bytes) else bpopt_commitID
        cty_props['bluepyopt_tag'] = bpopt_commitID
    except:
        pass

    # pickling consistency depends on pandas version
    pd_version = pd.__version__
    cty_props['pandas_version'] = pd_version

    cell_metadata_path = glob.glob('cell_metadata*.json')

    if len(cell_metadata_path) == 0:
        cty_props.update(highlevel_job_props)
        cell_metadata, cell_metadata_path = cell_data.save_cell_metadata(
            **cty_props)
        morph_stats_filename = 'morph_stats_%s.json' % cell_id
        morph_handler = MorphHandler(
            cell_metadata['swc_path'], cell_id=cell_id)
        morph_handler.save_morph_data(morph_stats_filename)

    elif len(cell_metadata_path) == 1:
        cell_metadata_path = cell_metadata_path[0]
        cell_metadata = utility.load_json(cell_metadata_path)

    else:
        raise Exception('More than one metadata files found')

    # Extract ephys data
    ephys_dir = highlevel_job_props['ephys_dir']
    non_standard_nwb = highlevel_job_props['non_standard_nwb']
    feature_stimtypes = highlevel_job_props['feature_stimtypes']

    highlevel_job_props['nwb_path'] = cell_metadata['nwb_path']
    highlevel_job_props['swc_path'] = cell_metadata['swc_path']
    nwb_handler = NwbExtractor(cell_id, nwb_path=highlevel_job_props['nwb_path'])
    data_source = highlevel_job_props["data_source"]
    if data_source == "lims":
        ephys_data_path, stimmap_filename = nwb_handler.save_cell_data(feature_stimtypes,
                                                                       non_standard_nwb=non_standard_nwb, ephys_dir=ephys_dir)
    else:
        ephys_data_path, stimmap_filename = nwb_handler.save_cell_data_web(feature_stimtypes,
                                                                           non_standard_nwb=non_standard_nwb, ephys_dir=ephys_dir)
    feature_names_path = highlevel_job_props['feature_names_path']
    protocol_dict, feature_dict = nwb_handler.get_efeatures_all(feature_names_path,
                                                                ephys_data_path, stimmap_filename)

    feature_dict = correct_voltage_feat_std(feature_dict)
    all_protocols_filename = os.path.join(ephys_data_path, 'all_protocols.json')
    all_features_filename = os.path.join(ephys_data_path, 'all_features.json')
    utility.save_json(all_protocols_filename, protocol_dict)
    utility.save_json(all_features_filename, feature_dict)
    highlevel_job_props['stimmap_file'] = os.path.abspath(stimmap_filename)
    highlevel_job_props['machine'] = cell_metadata['machine']
    highlevel_job_props['log_level'] = args['log_level']
    highlevel_job_props['all_features_path'] = all_features_filename
    highlevel_job_props['all_protocols_path'] = all_protocols_filename

    highlevel_job_props = convert_paths(highlevel_job_props)
    utility.save_json(highlevel_jobconfig_path, highlevel_job_props)

    stage_level_jobconfig = {}
    stage_level_jobconfig['stage_jobconfig'] = stage_job_props.pop(0)
    stage_level_jobconfig['highlevel_jobconfig'] = highlevel_job_props

    utility.save_json(stage_tracker_path, stage_job_props)

    stage_jobdir = os.path.join(highlevel_job_props['job_dir'],
                                stage_level_jobconfig['stage_jobconfig']['stage_name'])
    stage_level_jobconfig_path = os.path.join(
        stage_jobdir, 'stage_job_config.json')
    utility.create_filepath(stage_level_jobconfig_path)

    utility.save_json(stage_level_jobconfig_path, stage_level_jobconfig)
    prepare_jobscript_default = utility.locate_script_file(
        'prepare_stagejob.py')
    analyze_jobscript_default = utility.locate_script_file(
        'analyze_stagejob.py')
    shutil.copy(prepare_jobscript_default, stage_jobdir)
    shutil.copy(analyze_jobscript_default, stage_jobdir)

    jobtemplate_path = 'job_templates/chainjob_template.sh'

    chain_job = ChainSubJob(jobtemplate_path, stage_level_jobconfig_path)
    chain_job.script_generator()

    chain_job.run_job()
def main(args):
    # Job config
    job_config_path = sys.argv[-1]
    stage_jobconfig = args['stage_jobconfig']
    highlevel_job_props = args['highlevel_jobconfig']

    logging.basicConfig(level=highlevel_job_props['log_level'])

    job_dir = highlevel_job_props['job_dir']
    path_to_cell_metadata = glob.glob(
        os.path.join(job_dir, 'cell_metadata*.json'))[0]
    stage_tracker_path = os.path.join(job_dir, 'stage_tracker_config.json')

    cell_metadata = utility.load_json(path_to_cell_metadata)
    cell_id = cell_metadata['cell_id']
    peri_model_id = cell_metadata.get('peri_model_id')
    released_aa_model_path = cell_metadata.get('model_path_all_active')
    released_aa_model_id = cell_metadata.get('released_aa_model_id')

    nwb_path = highlevel_job_props['nwb_path']
    swc_path = highlevel_job_props['swc_path']
    all_features_path = highlevel_job_props['all_features_path']
    all_protocols_path = highlevel_job_props['all_protocols_path']

    stage_stimtypes = stage_jobconfig['stage_stimtypes']
    stage_feature_names_path = stage_jobconfig['stage_features']
    param_bounds_path = stage_jobconfig['stage_parameters']
    ap_init_flag = stage_jobconfig['AP_initiation_zone']
    ap_init_feature = 'check_AISInitiation'
    script_repo_dir = stage_jobconfig.get('script_repo_dir')
    depol_block_check = stage_jobconfig.get('depol_block_check')
    add_fi_kink = stage_jobconfig.get('add_fi_kink')
    analysis_parallel = (stage_jobconfig['analysis_config'].get('ipyparallel')
                         and stage_jobconfig['run_hof_analysis']
                         )  # analysis batch job only for hof analysis
    param_bound_tolerance = stage_jobconfig.get('adjust_param_bounds_prev')
    prev_stage_path = stage_jobconfig.get('prev_stage_path')

    filter_rule_func = getattr(filter_rules, stage_jobconfig['filter_rule'])

    all_features = utility.load_json(all_features_path)
    all_protocols = utility.load_json(all_protocols_path)

    stage_feature_names = utility.load_json(
        stage_feature_names_path)['features']
    # AP init flag is prioritized over feature set file
    if ap_init_flag == 'soma':
        if ap_init_feature in stage_feature_names:
            stage_feature_names.remove(ap_init_feature)
    elif ap_init_flag == 'axon':
        if ap_init_feature not in stage_feature_names:
            stage_feature_names.append(ap_init_feature)

    select_stim_names = []
    for stim_name in all_features.keys():
        stim_type = stim_name.rsplit('_', 1)[0]
        stim_type_aibs = utility.aibs_stimname_map_inv[stim_type]
        if stim_type_aibs in stage_stimtypes:
            select_stim_names.append(stim_name)

    features_dict = defaultdict(lambda: defaultdict(lambda: defaultdict(dict)))
    for stim_name, stim_dict in all_features.items():
        if stim_name in select_stim_names:
            for loc, loc_features in stim_dict.items():
                for feat, val in loc_features.items():
                    if feat in stage_feature_names:
                        features_dict[stim_name][loc][feat] = [val[0], val[1]]

    protocols_dict = {
        proto_key: proto_val
        for proto_key, proto_val in all_protocols.items()
        if proto_key in select_stim_names
    }
    nwb_handler = NwbExtractor(cell_id, nwb_path=nwb_path)

    kwargs = {
        'depol_block_check': depol_block_check,
        'add_fi_kink': add_fi_kink
    }
    if depol_block_check:
        train_features, test_features, train_protocols, DB_proto_dict = filter_rule_func(
            features_dict, protocols_dict, **kwargs)
        # also append DB check info to all_protocols json and save
        all_protocols['DB_check_DC'] = {'stimuli': DB_proto_dict}
        utility.save_json(all_protocols_path, all_protocols)
    else:
        train_features, test_features, train_protocols = filter_rule_func(
            features_dict, protocols_dict, **kwargs)

    train_features_path, test_features_path, train_protocols_path = \
        nwb_handler.write_ephys_features(train_features, test_features,
                                         train_protocols)

    # Create the parameter bounds for the optimization
    if prev_stage_path:
        prev_stage_model_path = os.path.join(
            prev_stage_path, 'fitted_params',
            'optim_param_%s_compact.json' % cell_id)
    else:
        prev_stage_model_path = None
    model_params_handler = AllActive_Model_Parameters(
        cell_id,
        swc_path=swc_path,
        prev_stage_model_path=prev_stage_model_path,
        released_aa_model_path=released_aa_model_path)

    model_params, model_params_release = model_params_handler.get_opt_params(
        param_bounds_path, prev_stage_tolerance=param_bound_tolerance)
    param_write_path, released_aa_param_write_path, released_aa_params =\
        model_params_handler.write_params_opt(model_params, model_params_release)

    model_mechs, model_mechs_release = model_params_handler.get_opt_mechanism(
        model_params, model_params_release, param_bounds_path)
    mech_write_path, mech_release_write_path = model_params_handler.write_mechanisms_opt(
        model_mechs, model_mechs_release)

    props = {}
    if peri_model_id:
        peri_model_path = cell_metadata['model_path_perisomatic']
        peri_params_write_path, peri_mech_write_path = \
            model_params_handler.aibs_peri_to_bpopt(peri_model_path)
        props['released_peri_model'] = peri_params_write_path
        props['released_peri_mechanism'] = peri_mech_write_path

    # Config file with all the necessary paths to feed into the optimization
    # TODO: clarify how this fits into schema
    model_params_handler.write_opt_config_file(
        param_write_path,
        mech_write_path,
        mech_release_write_path,
        train_features_path,
        test_features_path,
        train_protocols_path,
        released_aa_params,
        released_aa_param_write_path,
        opt_config_filename=job_config_path,
        **props)

    # Copy the optimizer scripts in the current directory
    optimizer_script = stage_jobconfig['optim_config']['main_script']
    analysis_script = stage_jobconfig['analysis_config']['main_script']
    if script_repo_dir:
        optimizer_script_repo = os.path.abspath(
            os.path.join(script_repo_dir, optimizer_script))
        optimizer_script_repo = optimizer_script_repo if os.path.exists(optimizer_script_repo)\
            else None
    else:
        optimizer_script_repo = None
    optimizer_script_default = utility.locate_script_file(optimizer_script)
    optimizer_script_path = optimizer_script_repo or optimizer_script_default
    stage_cwd = os.getcwd()
    shutil.copy(optimizer_script_path, stage_cwd)

    next_stage_job_props = utility.load_json(stage_tracker_path)

    machine = highlevel_job_props['machine']
    machine_match_patterns = ['hpc-login', 'aws', 'cori', 'bbp5']

    next_stage_jobconfig = {}
    try:
        next_stage_jobconfig['stage_jobconfig'] = next_stage_job_props.pop(0)
        next_stage_jobconfig['highlevel_jobconfig'] = highlevel_job_props
        next_stage_jobconfig['stage_jobconfig']['prev_stage_path'] = os.getcwd(
        )

        chainjobtemplate_path = 'job_templates/chainjob_template.sh'
    except:
        pass

    utility.save_json(stage_tracker_path, next_stage_job_props)

    # Create batch jobscript
    if not any(substr in machine for substr in machine_match_patterns):
        testJob = test_JobModule('batch_job.sh',
                                 job_config_path=job_config_path)

        testJob.script_generator(next_stage_job_config=next_stage_jobconfig)

    elif any(pattern in machine for pattern in ['hpc-login', 'aws']):
        jobtemplate_path = 'job_templates/pbs_jobtemplate.sh'
        batch_job = PBS_JobModule(jobtemplate_path, job_config_path)
        if analysis_parallel:
            batch_job.script_generator(analysis_jobname='analyze_job.sh')
            # A separate batch job needs to be created in this case
            analysis_job = PBS_JobModule(jobtemplate_path,
                                         job_config_path,
                                         script_name='analyze_job.sh')
            analysis_job.script_generator(
                analysis=True, next_stage_job_config=next_stage_jobconfig)
        else:
            batch_job.script_generator(
                next_stage_job_config=next_stage_jobconfig)

    elif any(pattern in machine for pattern in ['cori', 'bbp5']):

        if 'cori' in machine:
            jobtemplate_path = 'job_templates/nersc_slurm_jobtemplate.sh'
        else:
            jobtemplate_path = 'job_templates/bbp_slurm_jobtemplate.sh'

        batch_job = Slurm_JobModule(jobtemplate_path, job_config_path)
        if analysis_parallel:
            batch_job.script_generator(analysis_jobname='analyze_job.sh')
            # A separate batch job needs to be created in this case
            analysis_job = Slurm_JobModule(jobtemplate_path,
                                           job_config_path,
                                           script_name='analyze_job.sh')
            analysis_job.script_generator(
                analysis=True, next_stage_job_config=next_stage_jobconfig)
        else:
            batch_job.script_generator(
                next_stage_job_config=next_stage_jobconfig)

    if next_stage_jobconfig:

        stage_jobdir = os.path.join(
            highlevel_job_props['job_dir'],
            next_stage_jobconfig['stage_jobconfig']['stage_name'])

        next_stage_jobconfig_path = os.path.join(stage_jobdir,
                                                 'stage_job_config.json')
        utility.create_filepath(next_stage_jobconfig_path)
        utility.save_json(next_stage_jobconfig_path, next_stage_jobconfig)
        prepare_jobscript_default = utility.locate_script_file(
            'prepare_stagejob.py')
        analyze_jobscript_default = utility.locate_script_file(analysis_script)
        shutil.copy(prepare_jobscript_default, stage_jobdir)
        shutil.copy(analyze_jobscript_default, stage_jobdir)

        chain_job = ChainSubJob(chainjobtemplate_path,
                                next_stage_jobconfig_path)
        chain_job.script_generator()