def dump(self): outpath = self.get_outpath() logging.debug('Making directory {0}'.format(outpath)) if not core.is_testmode() and not os.path.isdir(outpath): os.makedirs(outpath) outname = os.path.join(outpath, self.get_outname()) logging.info('Dumping contents to {0}'.format(outname)) contents = [ 'file={0}'.format(self.d.path), 'kappab={0}'.format(self.d.kappab), 'kappac={0}'.format(self.d.kappac), 'muR={0}'.format(self.d.muR), 'muF={0}'.format(self.d.muF), 'Q={0}'.format(self.d.Q), 'binBoundaries={0}'.format(','.join(map(str, self.bin_boundaries))), 'binCenters={0}'.format(','.join(map(str, self.bin_centers))), 'crosssection={0}'.format(','.join(map(str, self.xs_per_GeV))), 'crosssection_integrated={0}'.format(','.join(map(str, self.xs))), 'ratios={0}'.format(','.join(map(str, self.ratio))), ] contents = '\n'.join(contents) logging.debug('Contents:\n{0}'.format(contents)) if not core.is_testmode(): with open(outname, 'w') as out_fp: out_fp.write(contents)
def dump_txt_to_file(text, out): actual_out_dir = os.path.dirname(out) if not os.path.isdir(actual_out_dir): if core.is_testmode(): logging.info( 'Would now create directory {0}'.format(actual_out_dir)) else: logging.info('Creating directory {0}'.format(actual_out_dir)) os.makedirs(actual_out_dir) out = fix_extension_for_txt(out) if core.is_testmode(): logging.info('Would now dump text to {0}'.format(out)) else: logging.info('Dumping text to {0}'.format(out)) with open(out, 'w') as out_fp: out_fp.write(text)
def register_jobids_in_jobmanager(self, submission_output): if core.is_testmode(): print '[TESTMODE] Not writing any jobmanager files' return if not self.onBatch: print '{0} was not on batch; not registering jobs.'.format(self) return # Your job 8086766 ("job__SCAN_ASIMOV_hgg_Top_reweighted_nominal_148_0.sh") has been submitted jobids = re.findall(r'Your job (\d+)', submission_output) if len(jobids) == 0: print '\nNo jobids were found in the passed submission output; nothing to register for the jobmanager' header = [ basename(self.subDirectory), 'datacard: {0}'.format(self.datacard), 'subDirectory: {0}'.format(self.subDirectory), 'registration time: {0}'.format(strftime('%y-%m-%d %H:%M:%S')), 'example cmd:\n\n{0}\n'.format( '\n '.join(self.parse_command()) ) ] contents = '\n'.join(header) + '\n' + '\n'.join(jobids) + '\n' _, jobman_file = tempfile.mkstemp( prefix = 'tklijnsm_queuegroup_', suffix = '.jobman', dir = '/tmp' ) print 'Dumping following jobmanager contents to {0}:\n'.format(jobman_file) print contents + '\n' with open(jobman_file, 'w') as jobman_fp: jobman_fp.write(contents)
def drop_pdfindices(card_file, category_pats=None): if differentials.core.is_testmode(): return with open(card_file, 'r') as card_fp: card = card_fp.read() if category_pats is None: category_pats = ['recoPt_600p0_10000p0'] lines = [] for line in card.split('\n'): for category_pat in category_pats: if re.match(r'pdfindex_.*{0}'.format(category_pat), line): logging.debug( 'Dropping following line from {0} (matched to {2}):\n{1}'. format(card_file, line, category_pat)) break else: lines.append(line) new_card = '\n'.join(lines) logging.trace('Datacard after removing lines:\n{0}'.format(new_card)) logging.info( 'Writing new card after deleting lines to {0}'.format(card_file)) if not core.is_testmode(): with open(card_file, 'w') as card_fp: card_fp.write(new_card)
def get_output(self): with core.enterdirectory(self.subDirectory, verbose=False): if core.is_testmode(): output = 'higgsCombine_[TESTMODE].root' else: output = 'higgsCombine{0}.{1}.mH{2}.root'.format(self.get_task_name(), self.input.METHOD, int(self.input.DEFAULT_MASS)) output = abspath(output) return output
def run(self): logging.info('Creating {0} if not yet existing'.format( self.get_outdir())) if not core.is_testmode(): if not isdir(self.get_outdir()): os.makedirs(self.get_outdir()) cmd = self.get_cmd() core.execute(cmd)
def execute_command(self, cmd): if self.onBatch: output = core.execute(cmd, py_capture_output=True) logging.info('Output of cmd {0}'.format(cmd)) logging.info(output) else: core.execute(cmd) output = '' if core.is_testmode(): output = '\nOUTPUT: some output but this is testmode' return output
def get_outdir(self): if self.outdir is None: outdir = self.default_outdir else: outdir = self.outdir if outdir.endswith('/'): outdir = outdir[:-1] if len(self.tags) > 0: outdir += '_' + '_'.join(self.tags) if not core.is_testmode() and not os.path.isdir(outdir): os.makedirs(outdir) return outdir
def list_accepted_points(self, fastscanFile): self.print_info('Selecting points from output of fastscan; deltaNLLCutOff = {0}'.format(self.deltaNLLCutOff)) if not isfile(fastscanFile): if core.is_testmode(): self.print_info('[TESTMODE] No file \'{0}\'; Returning some bogus accepted points'.format(fastscanFile)) # return [ Container(iPoint=i) for i in xrange(2*self.nPointsPerJob) ] return range(2*self.nPointsPerJob) else: raise ValueError('File \'{0}\' does not exist'.format(fastscanFile)) with core.openroot(fastscanFile) as fastscanFp: if not fastscanFp.GetListOfKeys().Contains('limit'): raise ValueError('There is no tree \'limit\' in', fastscanFile) acceptedPoints = [] rejectedPoints = [] tree = fastscanFp.Get('limit') for iEvent, event in enumerate(tree): container = core.AttrDict() container.iPoint = iEvent container.deltaNLL = event.deltaNLL container.POIvals = [ getattr( event, POI ) for POI in self.input.POIs ] if iEvent == 0: self.bestfit = container continue if container.deltaNLL <= self.deltaNLLCutOff: acceptedPoints.append( container ) else: rejectedPoints.append( container ) if self.verbose: self.print_info('Rejected points:') for container in rejectedPoints: line = [ '{0:7}'.format(container.iPoint), 'deltaNLL = {0:+10.2f}'.format( container.deltaNLL ) ] for POI, POIval in zip( self.input.POIs, container.POIvals ): line.append( '{0:10} = {1:+7.2f}'.format( POI, POIval ) ) self.print_info(' | '.join(line)) self.print_info('Accepted points:') for container in acceptedPoints: line = [ '{0:7}'.format(container.iPoint), 'deltaNLL = {0:+10.2f}'.format( container.deltaNLL ) ] for POI, POIval in zip( self.input.POIs, container.POIvals ): line.append( '{0:10} = {1:+7.2f}'.format( POI, POIval ) ) self.print_info(' | '.join(line)) return [ c.iPoint for c in acceptedPoints ]
def copy_physics_model_dir(): """ Copies models to compiled directory (scram b takes unnecessarily long) """ physics_models_dir = 'physicsModels' dst = join(os.environ['CMSSW_BASE'], 'bin', os.environ['SCRAM_ARCH'], basename(physics_models_dir)) logging.info('Copying {0} to {1}'.format(physics_models_dir, dst)) if not core.is_testmode(): if isdir(dst): shutil.rmtree(dst) shutil.copytree(physics_models_dir, dst)
def parse_command(self): cmd = super(CombineCorrMat, self).parse_command() if self.input.asimov and len(self.input.PhysicsModelParameters)==0: raise ValueError( 'PhysicsModelParameters *have* to be set when running asimov, ' 'or the best fit may make no sense' ) if core.is_testmode(): pdf_vars_to_freeze = [ 'some', 'pdfs' ] else: # pdf_vars_to_freeze = ListOfPDFIndicesToFreeze( postfitFilename, verbose=False ) pdf_vars_to_freeze = differentials.pdffreezer.PDFFreezer(self.datacard).get_vars_to_freeze() cmd.extend([ '--algo none', '--snapshotName MultiDimFit', '--saveWorkspace', # '--skipInitialFit', '--computeCovarianceMatrix=1', '--freezeNuisances {0}'.format( ','.join(pdf_vars_to_freeze) ), ]) return cmd
def write_to_file(out_file, contents): logging.debug('Contents of datacard {0}:\n{1}'.format(out_file, contents)) logging.info('Opening {0} and dumping contents'.format(out_file)) if not core.is_testmode(): with open(out_file, 'w') as out_fp: out_fp.write(contents)