def myloglike(cube, ndim, nparams):
    obs, params = get_obs(cube, ndim)

    if obs:
        chi2 = get_chi2(obs)
        # RESULT ORIENTED: for sampling set error to default if error in one of the predictors
        for name in ["FeynHiggs", "Micromegas", "BPhysics", "SUSY-POPE"]:
            if obs[(name, "error")]:
                chi2 = default_chi
        obs[("tot_X2", "all")] = chi2
    else:
        chi2 = default_chi
        obs = params
    # write everything to root files
    if args.root_out:
        VARS = rootstore.get_VARS(obs, args.model)
        root.root_write(VARS)
    if args.pickle_out:
        with open("{}/{}.pkl".format(args.multinest_dir, unique_str()), "wb") as pickle_file:
            pickle.dump(obs, pickle_file)
    if "X" in args.verbose:
        print("X^2={}".format(chi2))
    return -chi2
def myloglike(cube, ndim, nparams):
    obs,params=get_obs(cube,ndim)
            
    if obs is not None: 
        chi2=get_chi2(obs)
        #RESULT ORIENTED: for sampling set error to default if error in one of the predictors  
        #FIXME: consider to not set Micromegas error to infinity, since it crashes on neutralino!=lsp
        for name in ['FeynHiggs','Micromegas','BPhysics','SUSY-POPE']:
            if obs[(name,'error')]:
                chi2=default_chi()
        if numpy.isnan(chi2):
            chi2=default_chi()
        obs[('tot_X2', 'all')]=chi2
    else:
        obs={}
        chi2=default_chi()
#        obs=params
        obs[('tot_X2', 'all')]=chi2
    # write everything to root files
    if args.root_out:
        if args.storage_dict is not None:
            vars=len(storage_dict)*[0.]
            for oids, val in obs.items():
                try:
                    vars[storage_dict[oids]]=val
                except KeyError:
                    print('WARNING: not saving {}'.format(oids))
                    continue
        else:
            vars=rootstore.get_VARS(obs, args.model)
        root.root_write(vars)
    if args.pickle_out:
        with open('{}/{}.pkl'.format(args.multinest_dir, unique_str()),'wb') as pickle_file:
            pickle.dump(obs,pickle_file)
    if 'X' in args.verbose: 
        print("X^2={}".format(chi2))
    return -chi2
    parser.add_argument('--n-points','-n', dest='n_points', action='store', type=int, default=10000,
              help='Name of input date base file')
    parser.add_argument('--root_save'  , '-r', dest='root_save', action='store_true', help='save to root file')
    return parser.parse_args()


if __name__=="__main__" :
    con = None
    args= parse_args()
    try:
        #connection and cursor
        con=sqlite3.connect(args.input_file)
        cur=con.cursor()
        #retrieve a point
        n_points=args.n_points
        root.root_open('temp/test.root')
        cur.execute('select * from points limit {}'.format(n_points))
        for row in cur:
            root.root_write(row)
        root.root_close()
#        rows=cur.fetchmany(100000)
#        points=sql.retrieve_points_from_rows(con,cur,rows)
    
    # Finalise ...
    except sqlite3.Error as e:
        if con:
            con.rollback()
        print('ERROR: {}'.format(e.args[0]))
        sys.exit()
    finally:
        if con:
Пример #4
0
def main(args):
    #Start with clean set of parameters
    all_params={} 

    #get predictor modules
    predictors=User.predictors.get(args.predictors)
    predictor_modules=import_predictor_modules(predictors)
    all_params.update(predictor_modules)

    #FIXME: need to come up with generalised parsing of options to the predictors
    #get versions
    versions=User.versions.get(args.versions)
    for predictor , versions in versions.items():
        if predictor==predictors['spectrum_generator']:
            try:
                all_params[predictor]['version']=versions
            except KeyError:
                all_params[predictor]={'version':versions}
        else:
            try:
                all_params[predictor]['versions']=versions
            except KeyError:
                all_params[predictor]={'versions':versions}

    #this is afterburner style 
    if args.mc_cmssm :
        all_params.update(inputs.get_mc_cmssm_inputs(*(args.mc_cmssm)))
    elif args.mc_cmssm_default:
        all_params.update(inputs.get_mc_cmssm_inputs(271.378279475, 920.368119935, 14.4499538001, 
            -1193.57068242, 173.474173, 91.1877452551, 0.0274821578423))
    elif args.mc_neg_mu_cmssm :
        all_params.update(inputs.get_mc_neg_mu_cmssm_inputs(*(args.mc_neg_mu_cmssm)))
    elif args.mc_nuhm1 :
        all_params.update(inputs.get_mc_nuhm1_inputs(*(args.mc_nuhm1)))
    elif args.mc_neg_mu_nuhm1 :
        all_params.update(inputs.get_mc_neg_mu_nuhm1_inputs(*(args.mc_neg_mu_nuhm1)))
    elif args.mc_nuhm1_default :
        all_params.update(inputs.get_mc_nuhm1_inputs(237.467776964, 968.808711245, 15.649644, -1858.78698798, -6499529.79661,
                173.385870186, 91.1875000682, 0.0274949856504))
    elif args.mc_pmssm8 :
        all_params.update(inputs.get_mc_pmssm8_inputs(*(args.mc_pmssm8)))
    elif args.mc_pmssm10 :
        all_params.update(inputs.get_mc_pmssm10_inputs(*(args.mc_pmssm10)))
    elif args.mc_pmssm10_default :
        all_params.update(inputs.get_mc_pmssm10_inputs(1663.99,1671.75,414.131,294.935,311.199,1712.73,
                1841.21,718.489,43.4923,775.09,173.233,91.1874,0.0275018))
    elif args.run_softsusy_input_slha:
        all_params.update({'SoftSUSY':{'file':args.run_softsusy_input_slha}})
    elif args.run_spectrum:
        all_params.update({'spectrumfile':args.run_spectrum})

    #check for command line input parameters
    if args.input_pars is not None:
        command_line_dict=eval(args.input_pars)
        for key, value in command_line_dict.items():
#            if input_pars.get(key) is None:
#                input_pars.update
            if isinstance(value,dict) and (all_params.get(key) is not None):
                all_params[key].update(value)
            else:
                all_params[key]=value

    #check for tmp_dir
    if args.tmp_dir:
        all_params.update({'tmp_dir':args.tmp_dir})

    #print inputs like  
    if 'inputs' in args.verbose: 
        print(all_params)
        
    #check verbosity
    if args.verbose:
        all_params['verbose']=args.verbose

    try:
        slha_obj, point ,stdouts = POINT.run_point(**all_params)
    except TypeError:
        print("ERROR: Point failed to run")
        exit()
    if __name__=="__main__" and slha_obj is None:
        print('ERROR: Point fails\nExiting')
        exit()

    if not args.suppress_chi2_calc:
        all_constraints=Constraints_list.constraints
        #mc8 data set
        try:
            data_set=data_sets[args.data_set]
        except KeyError:
            print("WARNING: \"{}\" invalid data set. No X^2 is calculated".format(args.data_set))
            data_set=[]
    if not args.suppress_chi2_calc:
        constraints={name: all_constraints[name] for name in data_set}

    #FIXME: this should become a separate file
    #pass this constraints list to the chi2 function
    if not args.suppress_chi2_calc:
        total, breakdown = Analyse.chi2(point,constraints)


    # optional printing
    if args.obs:
        pp.pprint(point)
    if args.breakdown:
        Analyse.print_chi2_breakdown(point, constraints,data_set)

    # save to root
    if args.root_save:
        # NOTE: for old_mc_rootstorage, need X^2 
        point[('tot_X2','all')]=total
        root.root_open(args.root_save)
        VARS=old_mc_rootstorage.get_VARS(point,point[('m','in_o')])
        root.root_write(VARS)
        root.root_close()
    if args.json_breakdown:
        l=[]
        for d in data_set:
            l.append([d,breakdown[d]])
        with open(args.json_breakdown,'w') as f:
            json.dump(l,f)
        

    # print only observable keys
    if args.observable_keys:
        pp.pprint([key for key in point.keys()])

    # store observables to piclked file
    if args.store_pickle:
        with open(args.store_pickle,'wb') as pickle_file:
            pickle.dump(point,pickle_file)

    #create json file with [('oid1','oid2',array_id), ... ] for storage array ids
    if args.create_storage_dict:
        point=OrderedDict([(('tot_X2', 'all'),0)]+list(point.items()))
        l=[]
        i=0
        #make list
        for key,val in point.items():
            oid1,oid2=key
            l.append([oid1,oid2,i])
            i+=1
        #store as json file
        with open(args.create_storage_dict,'w') as f:
            json.dump(l,f,indent=3)

    if args.numpy_out :
        if args.storage_dict:
            with open(args.storage_dict, 'r') as f:
                l=json.load(f)
            d={(oid1,oid2):array_id for oid1,oid2, array_id in l}
            #start with list of None's
            vars=[None]*len(d)
            #Fill with values
            for oids, val in point.items():
                vars[d[oids]]=val
            dt=numpy.dtype(len(vars)*[('','f')])
            vars=numpy.array(tuple(vars),dtype=dt)
            try:
                a=numpy.load(args.numpy_out)
                print(args.numpy_out,'exists. Appending')
                a=numpy.append(a,vars)
            except FileNotFoundError:
                print('creating: ', args.numpy_out)
                a=vars
            numpy.save(args.numpy_out,a)
    #FIXME: we may want a better way of doing this
    return slha_obj, point ,stdouts
if __name__=="__main__" :
    con = None
    args= parse_args()
    #initialise constraints once
    constraints={name: Constraints_list.constraints[name] for name in data_sets[args.data_set]}
    try:
        #connection 
        con=sqlite3.connect(args.input_file)
        #use row factory (see sqlite3 python documentation sqlite3.Row)
        con.row_factory=sqlite3.Row
        #cursor
        cur=con.cursor()
        #number of points
        n_points=args.n_points
        #open root file
        root.root_open(args.output_root)
        #get observables lookuk
        lookup = sql.get_observable_ids(con,cur)    # lookup={col1: ( .. , .. ) , .... }
        #FIXME: this statement should make a sensible selection, for now just a number of points
        cur.execute('select * from points limit {}'.format(n_points))
        for row in cur:
            point={ oid: row[col]  for col, oid in lookup.items()} # this here is slow
            total, breakdown = Analyse.chi2(point,constraints)
            point[('tot_X2', 'all')]=total
            old_mc_rootstorage.write_point_to_root(point)
        root.root_close()
    
    # Finalise ...
    except sqlite3.Error as e:
        if con:
            con.rollback()
    # print whith verbosity options were selected
    print("SELECTED VERBOSITY OPTIONS:")
    print(args.verbose)

    # dump sampling info in the sampling file
    if not args.suppress_info:
        info = my_pprint.pformat(vars(args))
        fname = "{}/mc_mn_info.txt".format(args.multinest_dir)
        with open(fname, "w") as info_file:
            info_file.write(info)

    # open root file before calling the sampling algorithm
    if args.root_out:
        if not os.path.exists(args.root_out):
            os.makedirs(args.root_out)
        root.root_open(get_root_file_name(args.root_out))

    # run multinest
    multinest.run(
        LogLikelihood=myloglike,
        Prior=myprior,
        n_dims=len(param_ranges),
        n_params=None,
        n_clustering_params=None,
        wrapped_params=None,
        multimodal=args.multimodal,
        const_efficiency_mode=args.const_efficiency_mode,
        n_live_points=args.n_live_points,
        evidence_tolerance=args.evidence_tolerance,
        sampling_efficiency=args.sampling_efficiency,
        n_iter_before_update=args.n_iter_before_update,

    all_constraints=Constraints_list.constraints
    #mc8 data set
    data_set= [ 'Al(SLD)', 'Ab', 'Ac', 'Oh^2_mc8', 'Higgs125', 'BR(Bd->ll)',  
            'Gamma_Z', 'GZ_in', 'R(B->Xsll)', 'Al(P_tau)', 'MZ', 'R(D_ms)', 'MW', 'Afb_l', 
            'xenon100', 'DAlpha_had', 'R(Delta_mk)',  'sigma_had^0', 'Afb(c)', 
            'atlas5_m0_m12', 'Afb(b)',  'R(b->sg)', 'R(Dms)/R(Dmd)', 'R(B->taunu)', 
            'Rc', 'Rb',  'Rl', 'mc8_bsmm', 'sintheta_eff', 'Mt', 'R(K->lnu)', 'R(Kp->pinn)', 'gminus2mu', 'MATANB' ]
    constraints={name: all_constraints[name] for name in data_set}

    #pass this constraints list to the chi2 function
    total, breakdown = Analyse.chi2(combined_obs,constraints)

    bpp = pprint.PrettyPrinter(indent=4, depth=3)

    # optional printing
    if args.obs:
        bpp.pprint(combined_obs)
    if args.breakdown:
        bpp.pprint(breakdown)
        print('Total chi2:',total)

    # save to root
    if args.root_save:
        # NOTE: for old_mc_rootstorage, need X^2 
        combined_obs[('tot_X2','all')]=total
        root.root_open('temp/test.root')
        old_mc_rootstorage.write_point_to_root(combined_obs)
        root.root_close()
data set                = {dataset}
parameter boundaries    = 
{boundaries}
        """.format(
                nlive=nlive, tol=tolerance, eff=samplingefficiency, res=args.resume,
                dataset=args.data_set,
                seed=my_seed,
                boundaries=bpp.pformat(get_param_ranges()), 
                )
        fname='{}/mc_mn_info.txt'.format(args.multinest_dir)
        with open(fname,'w') as info_file:
            info_file.write(info)

    #open root file before calling the sampling algorithm
    if args.root_out:
        root.root_open(args.root_out)

    # run multinest 
    multinest.run(myloglike, 
            myprior, 
            n_dims                  = len(param_ranges),
            resume                  = args.resume, 
            verbose                 = ('multinest' in args.verbose), 
            sampling_efficiency     = samplingefficiency, 
            n_live_points           = nlive , 
            max_iter                = args.max_iter,
            seed                    = my_seed,

            outputfiles_basename    = '{}/'.format(args.multinest_dir),
            evidence_tolerance      = tolerance)
    #close root file after sampling
def signal_handler(signal, frame):
    print('EXITING ON SIGNAL:'.format(signal))
    if args.root_out:
        root.root_close()
    sys.exit(0)