def save(self, filename, dotc=False, dotroot=False, json=False, verbose=False): ''' Save the current canvas contents to [filename] ''' self.pad.Draw() self.canvas.Update() if not os.path.exists(self.outputdir): os.makedirs(self.outputdir) if verbose: print 'saving ' + os.path.join(self.outputdir, filename) + '.png' self.canvas.SaveAs(os.path.join(self.outputdir, filename) + '.png') self.canvas.SaveAs(os.path.join(self.outputdir, filename) + '.pdf') if dotc: self.canvas.SaveAs(os.path.join(self.outputdir, filename) + '.C') if json: jdict = {} for obj in self.keep: if isinstance(obj, ROOT.TH1): jdict[obj.GetTitle()] = [ obj.GetBinContent(1), obj.GetBinError(1) ] if isinstance(obj, ROOT.THStack): jdict['hist_stack'] = {} for i in obj.GetHists(): jdict['hist_stack'][i.GetTitle()] = [ i.GetBinContent(1), i.GetBinError(1) ] with open(os.path.join(self.outputdir, filename) + '.json', 'w') as jout: jout.write(prettyjson.dumps(jdict)) if dotroot: outfile = ROOT.TFile.Open( os.path.join(self.outputdir, filename) + '.root', 'recreate') outfile.cd() self.canvas.Write() for obj in self.keep: obj.Write() #self.keep = [] self.reset() outfile.Close() #self.canvas = plotting.Canvas(name='adsf', title='asdf') #self.canvas.cd() #self.pad = plotting.Pad(0., 0., 1., 1.) #ful-size pad #self.pad.cd() if self.keep and self.lower_pad: #pass self.reset() else: # Reset keeps self.keep = [] # Reset logx/y self.canvas.SetLogx(False) self.canvas.SetLogy(False)
def save(self, filename, dotc=False, dotroot=False, json=False, verbose=False): ''' Save the current canvas contents to [filename] ''' self.pad.Draw() self.canvas.Update() if not os.path.exists(self.outputdir): os.makedirs(self.outputdir) if verbose: print 'saving '+os.path.join(self.outputdir, filename) + '.png' self.canvas.SaveAs(os.path.join(self.outputdir, filename) + '.png') self.canvas.SaveAs(os.path.join(self.outputdir, filename) + '.pdf') if dotc: self.canvas.SaveAs(os.path.join(self.outputdir, filename) + '.C') if json: jdict = {} for obj in self.keep: if isinstance(obj, ROOT.TH1): jdict[obj.GetTitle()] = [obj.GetBinContent(1), obj.GetBinError(1)] if isinstance(obj, ROOT.THStack): jdict['hist_stack'] = {} for i in obj.GetHists(): jdict['hist_stack'][i.GetTitle()] = [i.GetBinContent(1), i.GetBinError(1)] with open(os.path.join(self.outputdir, filename) + '.json', 'w') as jout: jout.write(prettyjson.dumps(jdict)) if dotroot: outfile = ROOT.TFile.Open(os.path.join(self.outputdir, filename) + '.root', 'recreate') outfile.cd() self.canvas.Write() for obj in self.keep: obj.Write() #self.keep = [] self.reset() outfile.Close() #self.canvas = plotting.Canvas(name='adsf', title='asdf') #self.canvas.cd() #self.pad = plotting.Pad(0., 0., 1., 1.) #ful-size pad #self.pad.cd() if self.keep and self.lower_pad: #pass self.reset() else: # Reset keeps self.keep = [] # Reset logx/y self.canvas.SetLogx(False) self.canvas.SetLogy(False)
'--output-dag-file=%s/dag.dag' % dag_directory, ] command.extend(farmout_options) command.append('-'.join([jobId, sample])) command.append(cfg) command.extend(options) print ' '.join(command) info = { 'creator': '%s' % site_spec._log_name, 'jobid': jobId, 'production date': time.strftime("%c"), 'FSA Version': fsa_version(), 'DBS Name': sample_info['datasetpath'], 'PAT Location': output_dir, } hasher = md5() hasher.update(info.__repr__()) production_info[hasher.hexdigest()] = info import FinalStateAnalysis.Utilities.prettyjson as prettyjson hasher = md5() hasher.update(production_info.__repr__()) with open(hasher.hexdigest() + '.json', 'w') as json: json.write(prettyjson.dumps(production_info))
#! /bin/env python import FinalStateAnalysis.Utilities.prettyjson as prettyjson from optparse import OptionParser import logging parser = OptionParser() parser.add_option('--tag', '-t', type=str, default = None, help='value of the tag to be added',dest='tag') parser.add_option('--label', '-l', type=str, default = 'tag', help='label of the tag to be added',dest='label') (options,jsons) = parser.parse_args() tagVal = options.tag for jfile in jsons: json = prettyjson.loads( open(jfile).read() ) json[options.label] = tagVal with open(jfile,'w') as out: out.write(prettyjson.dumps(json))
plotter.plot_with_estimate(folder, var, **kwargs) plotter.save('final_%s_%s' % ('signal_region', var) ) if var == "m1_m2_Mass": kwargs['logscale']=True kwargs['yrange']=[1,3*10**8] plotter.plot_with_estimate(folder, var, **kwargs) plotter.save('final_%s_%s_logscale' % ('signal_region', var) ) del kwargs['logscale'] del kwargs['yrange'] plotter.set_subdir('') plotter.write_summary('','m1_m2_Mass') yield_dump = plotter.dump_selection_info(['h2Tau'], 'm1_m2_Mass') with open('results/%s/plots/mm/yield_dump.json' % jobid, 'w') as jfile: jfile.write(prettyjson.dumps(yield_dump) ) #Make QCD region plots folder = folder.replace('os','ss') plotter.set_subdir('qcd') for var, kwargs in toPlot.iteritems(): plotter.plot_mc_vs_data(folder, var, **kwargs) plotter.save('mc_vs_data_%s_%s' % ('qcd_region', var) ) #FIXME: _understand systamtic uncertainties: # _ask Evan for Zrecoil correction in MVA MET # _make uncertainties on Zrecoil correction --> propagate to WJets Ztautau QCD ecc... # _make #evts passing cuts #of MC events passing cuts (+ stat+sys)
channel = paths[0].split('/')[-3] store['kind'] = kind store['method'] = method store['channel'] = channel store['limits'] ={} for path in paths: print path mass = path.split('/')[-2] store['limits'][mass]={} tfile = io.open(path) limit_tree = tfile.Get('limit') limit_map = tree_to_quantile_map(limit_tree) if kind == 'expected': store['limits'][mass]['+2sigma'] = limit_map['0.975'] store['limits'][mass]['+1sigma'] = limit_map['0.840'] store['limits'][mass]['median'] = limit_map['0.500'] store['limits'][mass]['-1sigma'] = limit_map['0.160'] store['limits'][mass]['-2sigma'] = limit_map['0.025'] else: store['limits'][mass]['median'] = limit_map['-1.000'] tfile.Close() outfilename = '%s_%s_%s_limit.json' % (channel, method, kind) with open( os.path.join(input_dir,outfilename),'w') as outfile: outfile.write(prettyjson.dumps(store))
file_groups[(kind, method)].append(tfile_path) for info, paths in file_groups.iteritems(): store = {} kind, method = info channel = paths[0].split('/')[-3] store['kind'] = kind store['method'] = method store['channel'] = channel store['limits'] = {} for path in paths: print path mass = path.split('/')[-2] store['limits'][mass] = {} tfile = io.open(path) limit_tree = tfile.Get('limit') limit_map = tree_to_quantile_map(limit_tree) if kind == 'expected': store['limits'][mass]['+2sigma'] = limit_map['0.975'] store['limits'][mass]['+1sigma'] = limit_map['0.840'] store['limits'][mass]['median'] = limit_map['0.500'] store['limits'][mass]['-1sigma'] = limit_map['0.160'] store['limits'][mass]['-2sigma'] = limit_map['0.025'] else: store['limits'][mass]['median'] = limit_map['-1.000'] tfile.Close() outfilename = '%s_%s_%s_limit.json' % (channel, method, kind) with open(os.path.join(input_dir, outfilename), 'w') as outfile: outfile.write(prettyjson.dumps(store))
command.append('-'.join([jobId, sample])) command.append(cfg) command.extend(options) print ' '.join(command) info = { 'creator' : '%s' % site_spec._log_name, 'jobid' : jobId, 'production date' : time.strftime("%c"), 'FSA Version' : fsa_version(), 'DBS Name' : sample_info['datasetpath'], 'PAT Location' : output_dir, } hasher = md5() hasher.update(info.__repr__()) production_info[hasher.hexdigest()] = info import FinalStateAnalysis.Utilities.prettyjson as prettyjson hasher = md5() hasher.update(production_info.__repr__()) with open(hasher.hexdigest() + '.json', 'w') as json: json.write( prettyjson.dumps( production_info ) )
#! /bin/env python import FinalStateAnalysis.Utilities.prettyjson as prettyjson from optparse import OptionParser import logging parser = OptionParser() parser.add_option('--tag', '-t', type=str, default=None, help='value of the tag to be added', dest='tag') parser.add_option('--label', '-l', type=str, default='tag', help='label of the tag to be added', dest='label') (options, jsons) = parser.parse_args() tagVal = options.tag for jfile in jsons: json = prettyjson.loads(open(jfile).read()) json[options.label] = tagVal with open(jfile, 'w') as out: out.write(prettyjson.dumps(json))