#!/usr/bin/env python

from TauAnalysis.Configuration.recoSampleDefinitionsZtoMuTau_7TeV_grid_cfi import recoSampleDefinitionsZtoMuTau_7TeV
from TauAnalysis.Configuration.harvestAnalysisResults_grid import harvestAnalysisResults
import TauAnalysis.Configuration.userRegistry as reg 


channel = 'ZtoMuTau_bgEstTemplate'
reg.overrideJobId(channel,'Run32')
analysisFilePath = reg.getAnalysisFilePath(channel)
harvestingFilePath = reg.getHarvestingFilePath(channel)
jobId = reg.getJobId(channel)
tmpFilePath = reg.getTmpFilePath(channel)

harvestAnalysisResults(channel = channel, 
                        samples = recoSampleDefinitionsZtoMuTau_7TeV,
                       inputFilePath = analysisFilePath, outputFilePath = harvestingFilePath, jobId = jobId,
                       tmpFilePath = tmpFilePath, use_job_report = True,
					   useCastor = False)
Beispiel #2
0
#!/usr/bin/env python

from TauAnalysis.Configuration.recoSampleDefinitionsAHtoMuTau_7TeV_grid_cfi \
        import recoSampleDefinitionsAHtoMuTau_7TeV as samples

from TauAnalysis.Configuration.submitAnalysisToGrid import submitAnalysisToGrid
import TauAnalysis.Configuration.userRegistry as reg

channel = 'AHtoMuTau'
reg.overrideJobId(channel, 'RunSVTestApr01')

configFile = 'runAHtoMuTau_cfg.py'
analysisFilePath = reg.getAnalysisFilePath(channel)
jobId = reg.getJobId(channel)

fake_rate_samples = [
    sample for sample in samples['SAMPLES_TO_ANALYZE']
    if samples['RECO_SAMPLES'][sample]['enableFakeRates']
]

samplesToAnalyze = [
    'ZtautauPU156bx_pythiaZ2',
    'A90',
    'bbA90',
    'A100',
    'bbA100',
    'A120',
    'bbA120',
    'A130',
    'bbA130',
    'A140',
import FWCore.ParameterSet.Config as cms

from TauAnalysis.Configuration.recoSampleDefinitionsZtoMuTau_7TeV_grid_cfi import recoSampleDefinitionsZtoMuTau_7TeV
from TauAnalysis.Configuration.plotZtoMuTau_drawJobs_cfi import *
from TauAnalysis.Configuration.makePlots2_grid import makePlots
from TauAnalysis.Configuration.userRegistry import getHarvestingFilePath, overrideJobId, getJobId

# import utility function to enable factorization
from TauAnalysis.Configuration.tools.factorizationTools import enableFactorization_makeZtoMuTauPlots_grid2

process = cms.Process('makeZtoMuTauPlots')

process.load("TauAnalysis.Configuration.dumpZtoMuTau_grid_cff")

channel = 'ZtoMuTau'
overrideJobId(channel, '2011Apr09_HPSloose')
inputFilePath = getHarvestingFilePath(channel)
jobId = getJobId(channel)

makePlots(process, channel = channel, samples = recoSampleDefinitionsZtoMuTau_7TeV,
          inputFilePath = inputFilePath, jobId = jobId,
          analyzer_drawJobConfigurator_indOutputFileName_sets = [
              [ "zMuTauAnalyzerOS", drawJobConfigurator_ZtoMuTauOS, "plotZtoMuTauOS_#PLOT#.pdf" ],
	      [ "zMuTauAnalyzerSS", drawJobConfigurator_ZtoMuTauSS, "plotZtoMuTauSS_#PLOT#.pdf" ]
          ],
          drawJobTemplate = plots_ZtoMuTau,
          enableFactorizationFunction = enableFactorization_makeZtoMuTauPlots_grid2,
          dqmDirectoryFilterStatistics = {
              'factorizationDisabled' : 'zMuTauAnalyzerOS/FilterStatistics',
              'factorizationEnabled' : 'zMuTauAnalyzerOS_factorizedWithMuonIsolation/FilterStatistics'
          },
Beispiel #4
0
from TauAnalysis.Configuration.tools.harvestingLXBatch import \
        make_harvest_scripts

from TauAnalysis.Configuration.tools.harvesting import \
        castor_source, crabdir_sources, \
        clean_by_crab_id

import TauAnalysis.Configuration.userRegistry as reg
import os
import glob

channel = 'AHtoMuTau'

for id in ['RunSVTestApr04']:
    reg.overrideJobId(channel, id)
    analysisFilePath = reg.getAnalysisFilePath(channel)
    harvestingFilePath = reg.getHarvestingFilePath(channel)
    jobId = reg.getJobId(channel)
    tmpFilePath = reg.getBatchHarvestLocation(channel)

    # Regexes that define how to parse the filename of a file on castor
    plot_regex = r"plots_%s_(?P<sample>\w+?)_%s_(?P<gridJob>\d*)(_(?P<gridTry>\d*))*_(?P<hash>[a-zA-Z0-9]*).root" % (
        channel, jobId)
    skim_regex = r"final_events_%s_(?P<sample>\w+?)_%s_(?P<gridJob>\d*)(_(?P<gridTry>\d*))*_(?P<hash>[a-zA-Z0-9]*).root" % (
        channel, jobId)

    def local_copy_mapper(sample):
        " Define where we want to copy the final output locally "
        return os.path.join(
            #'/tmp/friis/Run33SYS',
#!/usr/bin/env python

import TauAnalysis.Configuration.userRegistry as reg

import os
import re

from TauAnalysis.Configuration.tools.harvestingLXBatch import make_harvest_scripts
from TauAnalysis.Configuration.tools.harvesting import castor_source, clean_by_crab_id

channel = 'ZtoMuTau_tauIdEff'

reg.overrideJobId(channel, '2011Aug18')

analysisFilePath = reg.getAnalysisFilePath(channel)
harvestingFilePath = reg.getHarvestingFilePath(channel)
jobId = reg.getJobId(channel)
tmpFilePath = reg.getBatchHarvestLocation(channel)

SAMPLES_TO_ANALYZE = [
    # modify in case you want to submit jobs for some of the samples only...
]

print analysisFilePath
print tmpFilePath

plot_regex = r"dont match anything"
skim_regex = r"tauIdEffSample_(?P<sample>\w+?)_%s_RECO_(?P<gridJob>\d*)(_(?P<gridTry>\d*))*_(?P<hash>[a-zA-Z0-9]*).root" % (jobId)

def matches_either(files):
    # Check if the file matches either of the regexes we are interested in.
    inputFilePath = inputFilePath.replace('//', '/')
    inputFilePath = inputFilePath.replace('/castor/cern.ch/castor/cern.ch/',
                                          '/castor/cern.ch/')
print(" inputFilePath = %s" % inputFilePath)

mode = None
if inputFilePath.find('/castor/') == 0:
    mode = 'castor'
if inputFilePath.find('/store/') == 0:
    mode = 'eos'
else:
    mode = 'local'

if jobId is None:
    reg.overrideJobId(
        channel, '2011Oct30'
    )  # CV: need to overwrite this in order to match Mauro's filenames
    jobId = reg.getJobId(channel)
print(" jobId = %s" % jobId)

if mode == 'castor':
    files = [file_info for file_info in castor.nslsl(inputFilePath)]
elif mode == 'eos':
    files = [file_info for file_info in eos.lsl(inputFilePath)]
else:
    commandLine = '%s %s' % (options['executable_ls'][mode], inputFilePath)
    args = shlex.split(commandLine)
    retval = subprocess.Popen(args, stdout=subprocess.PIPE)
    #retval.wait()

    files = retval.stdout.read().split('\n')
Beispiel #7
0
#!/usr/bin/env python

from TauAnalysis.Configuration.recoSampleDefinitionsAHtoElecTau_grid_cfi import recoSampleDefinitionsAHtoElecTau
from TauAnalysis.Configuration.harvestAnalysisResults_grid import harvestAnalysisResults
import TauAnalysis.Configuration.userRegistry as reg

channel = 'AHtoElecTau'
reg.overrideJobId(channel,"Run41")
analysisFilePath = reg.getAnalysisFilePath(channel)
harvestingFilePath = reg.getHarvestingFilePath(channel)
jobId = reg.getJobId(channel)
tmpFilePath = reg.getTmpFilePath(channel)

harvestAnalysisResults(channel = channel, samples = recoSampleDefinitionsAHtoElecTau,
                       inputFilePath = analysisFilePath, outputFilePath = harvestingFilePath, jobId = jobId,
                       tmpFilePath = tmpFilePath, ana_defs = None, plot_defs = None, plotters = None,
                       use_job_report = True,
                       useCastor = False)
        useSSdataForQCD

from TauAnalysis.Configuration.makePlots2_grid import makePlots
import TauAnalysis.Configuration.userRegistry as reg

# import utility function to enable factorization
from TauAnalysis.Configuration.tools.factorizationTools import \
        enableFactorization_makeAHtoElecTauPlots_grid2

process = cms.Process('makeAHtoElecTauPlots')

channel = 'AHtoElecTau'

categories = ['wBtag', 'woBtag', 'ZeroOneJets', 'VBF', 'Boosted']

reg.overrideJobId(channel, 'Run41')

inputFilePath = reg.getHarvestingFilePath(channel)
jobId = reg.getJobId(channel)

plotsDirectory = './plots'
outputFileNameMaker = lambda channel: 'plots%s_all.root' % channel

analyzer_draw_jobs = []

for cat in categories:
    for sign in ['OS', 'SS']:
        analyzer_draw_jobs.append([
            'ahElecTauAnalyzer' + sign + '_' + cat,
            drawJobConfiguratorDict[sign + '_' + cat],
            'plotAHtoElecTauOS_' + cat + '_#PLOT#.png'
#!/usr/bin/env python

from TauAnalysis.Configuration.recoSampleDefinitionsAHtoMuTau_7TeV_grid_cfi \
        import recoSampleDefinitionsAHtoMuTau_7TeV as samples

from TauAnalysis.Configuration.submitAnalysisToGrid import submitAnalysisToGrid
import TauAnalysis.Configuration.userRegistry as reg

channel = 'AHtoMuTau'
reg.overrideJobId(channel, 'RunSVTestApr01')

configFile = 'runAHtoMuTau_cfg.py'
analysisFilePath = reg.getAnalysisFilePath(channel)
jobId = reg.getJobId(channel)

fake_rate_samples = [sample for sample in samples['SAMPLES_TO_ANALYZE']
                     if samples['RECO_SAMPLES'][sample]['enableFakeRates']]

samplesToAnalyze = [
    'ZtautauPU156bx_pythiaZ2',
    'A90',  'bbA90',
    'A100', 'bbA100',
    'A120', 'bbA120',
    'A130', 'bbA130',
    'A140', 'bbA140',
    'A160', 'bbA160',
    'A180', 'bbA180',
    'A200', 'bbA200',
    'A250', 'bbA250',
    'A300', 'bbA300',
    'A350', 'bbA350',
        useSSdataForQCD

from TauAnalysis.Configuration.makePlots2_grid import makePlots
import TauAnalysis.Configuration.userRegistry as reg

# import utility function to enable factorization
from TauAnalysis.Configuration.tools.factorizationTools import \
        enableFactorization_makeAHtoElecTauPlots_grid2

process = cms.Process('makeAHtoElecTauPlots')

channel = 'AHtoElecTau'

categories = ['wBtag','woBtag','ZeroOneJets','VBF','Boosted']

reg.overrideJobId(channel, 'Run41')

inputFilePath = reg.getHarvestingFilePath(channel)
jobId = reg.getJobId(channel)

plotsDirectory = './plots'
outputFileNameMaker = lambda channel: 'plots%s_all.root' % channel

analyzer_draw_jobs = []

for cat in categories:
    for sign in ['OS','SS']:
        analyzer_draw_jobs.append(
            [ 'ahElecTauAnalyzer' + sign + '_' + cat,
            drawJobConfiguratorDict[sign+'_'+cat],
            'plotAHtoElecTauOS_' + cat + '_#PLOT#.png'])
#
# Apply "template" method for data-driven background estimation
# to Z --> mu + tau-jet channel
#
# Author: Christian Veelken, UC Davis
#
#--------------------------------------------------------------------------------

from TauAnalysis.DQMTools.plotterStyleDefinitions_cfi import *
from TauAnalysis.BgEstimationTools.templateHistDefinitions_cfi import \
  drawJobTemplateHist, drawJobAnalysisHistData, drawJobAnalysisHistMC, \
  plotBgEstData, plotBgEstMC_pure, plotBgEstMC_smSum, plotAnalysisMC_pure
from TauAnalysis.BgEstimationTools.tools.drawTemplateHistConfigurator import drawTemplateHistConfigurator
import TauAnalysis.Configuration.userRegistry as reg 

reg.overrideJobId('ZtoMuTau_bgEstTemplate','Run32')

process = cms.Process('fitBgEstTemplateZtoMuTau')

process.DQMStore = cms.Service("DQMStore")

process.maxEvents = cms.untracked.PSet(            
    input = cms.untracked.int32(0)         
)

process.source = cms.Source("EmptySource")

dqmDirectoriesProcess = {
    'Ztautau'    : 'ZtautauSum',
    'Zmumu'      : 'ZmumuSum',
    'WplusJets'  : 'WplusJets_madgraph_skim',
Beispiel #12
0
#!/usr/bin/env python

from TauAnalysis.Configuration.recoSampleDefinitionsZtoMuTau_7TeV_grid_cfi import recoSampleDefinitionsZtoMuTau_7TeV
from TauAnalysis.Configuration.harvestAnalysisResults_grid import harvestAnalysisResults
import TauAnalysis.Configuration.userRegistry as reg

channel = 'ZtoMuTau_bgEstTemplate'
reg.overrideJobId(channel, 'Run32')
analysisFilePath = reg.getAnalysisFilePath(channel)
harvestingFilePath = reg.getHarvestingFilePath(channel)
jobId = reg.getJobId(channel)
tmpFilePath = reg.getTmpFilePath(channel)

harvestAnalysisResults(channel=channel,
                       samples=recoSampleDefinitionsZtoMuTau_7TeV,
                       inputFilePath=analysisFilePath,
                       outputFilePath=harvestingFilePath,
                       jobId=jobId,
                       tmpFilePath=tmpFilePath,
                       use_job_report=True,
                       useCastor=False)
    #'Zmumu'
]

signal_sources = [
    'ZtautauPU156bx'
]

samples['SAMPLES_TO_PRINT'][:] = []
samples['SAMPLES_TO_PLOT'][:] = fake_sources + signal_sources
samples['FLATTENED_SAMPLES_TO_PLOT'] = make_flattened_samples()

channel = 'AHtoMuTau'
_REGULAR_JOBID = 'Run33'
_FR_JOBID = 'Run33FR'

reg.overrideJobId(channel, 'Run33FR')

process = cms.Process('makeBgEstFakeRatePlots')

process.DQMStore = cms.Service("DQMStore")

process.maxEvents = cms.untracked.PSet(
    input = cms.untracked.int32(0)
)
process.source = cms.Source("EmptySource")

# Load the regular analysis workflow file
reg.overrideJobId(channel, _REGULAR_JOBID)
makePlots.makePlots(
    process, channel, samples,
    inputFilePath = reg.getHarvestingFilePath(channel),
#!/usr/bin/env python

import TauAnalysis.Configuration.userRegistry as reg

import os
import re

from TauAnalysis.Configuration.tools.harvestingLXBatch import make_harvest_scripts
from TauAnalysis.Configuration.tools.harvesting import castor_source, clean_by_crab_id

channel = 'ZtoMuTau_tauIdEff'

reg.overrideJobId(channel, '2011Aug18')

analysisFilePath = reg.getAnalysisFilePath(channel)
harvestingFilePath = reg.getHarvestingFilePath(channel)
jobId = reg.getJobId(channel)
tmpFilePath = reg.getBatchHarvestLocation(channel)

SAMPLES_TO_ANALYZE = [
    # modify in case you want to submit jobs for some of the samples only...
]

print analysisFilePath
print tmpFilePath

plot_regex = r"dont match anything"
skim_regex = r"tauIdEffSample_(?P<sample>\w+?)_%s_RECO_(?P<gridJob>\d*)(_(?P<gridTry>\d*))*_(?P<hash>[a-zA-Z0-9]*).root" % (
    jobId)

    drawJobConfigurator_AHtoMuTau_wBtagSS,
)
from TauAnalysis.Configuration.makePlots2_grid import makePlots
import TauAnalysis.Configuration.userRegistry as reg

# import utility function to enable factorization
from TauAnalysis.Configuration.tools.factorizationTools import enableFactorization_makeAHtoMuTauPlots_grid2

process = cms.Process("makeAHtoMuTauPlots")

channel = "AHtoMuTau"

# reg.overrideJobId(channel, '2010Dec23_lxbatch')
# reg.overrideJobId(channel, 'Run33')
# reg.overrideJobId(channel, 'Run37sysTanc')
reg.overrideJobId(channel, "Run37sys")
# reg.overrideJobId(channel, 'Run42OldTaNCfinal')

inputFilePath = reg.getHarvestingFilePath(channel)
jobId = reg.getJobId(channel)

plotsDirectory = "./plots"
outputFileNameMaker = lambda channel: "plots%s_all.root" % channel

# Check if we want to override what's in reco sample definitions
if len(sys.argv) > 2:
    mode = sys.argv[2]
    print "Using plot mode: ", mode
    plotsDirectory += os.path.join(jobId, "_" + mode)
    if not os.path.exists(plotsDirectory):
        os.makedirs(plotsDirectory)
from TauAnalysis.Configuration.tools.harvestingLXBatch import \
        make_harvest_scripts

from TauAnalysis.Configuration.tools.harvesting import \
        castor_source, crabdir_sources, \
        clean_by_crab_id

import TauAnalysis.Configuration.userRegistry as reg
import os
import glob

channel = 'AHtoMuTau'

for id in ['RunSVTestApr04']:
    reg.overrideJobId(channel, id)
    analysisFilePath = reg.getAnalysisFilePath(channel)
    harvestingFilePath = reg.getHarvestingFilePath(channel)
    jobId = reg.getJobId(channel)
    tmpFilePath = reg.getBatchHarvestLocation(channel)

    # Regexes that define how to parse the filename of a file on castor
    plot_regex = r"plots_%s_(?P<sample>\w+?)_%s_(?P<gridJob>\d*)(_(?P<gridTry>\d*))*_(?P<hash>[a-zA-Z0-9]*).root" % (channel, jobId)
    skim_regex = r"final_events_%s_(?P<sample>\w+?)_%s_(?P<gridJob>\d*)(_(?P<gridTry>\d*))*_(?P<hash>[a-zA-Z0-9]*).root" % (channel, jobId)

    def local_copy_mapper(sample):
        " Define where we want to copy the final output locally "
        return os.path.join(
            #'/tmp/friis/Run33SYS',
            #'/tmp/friis/Run32',
            harvestingFilePath,
import FWCore.ParameterSet.Config as cms

from TauAnalysis.Configuration.recoSampleDefinitionsZtoMuTau_7TeV_grid_cfi import recoSampleDefinitionsZtoMuTau_7TeV
from TauAnalysis.Configuration.plotZtoMuTau_drawJobs_cfi import *
from TauAnalysis.Configuration.makePlots2_grid import makePlots
from TauAnalysis.Configuration.userRegistry import getHarvestingFilePath, overrideJobId, getJobId

# import utility function to enable factorization
from TauAnalysis.Configuration.tools.factorizationTools import enableFactorization_makeZtoMuTauPlots_grid2

process = cms.Process('makeZtoMuTauPlots')

process.load("TauAnalysis.Configuration.dumpZtoMuTau_grid_cff")

channel = 'ZtoMuTau'
overrideJobId(channel, '2011Apr09_HPSloose')
inputFilePath = getHarvestingFilePath(channel)
jobId = getJobId(channel)

makePlots(
    process,
    channel=channel,
    samples=recoSampleDefinitionsZtoMuTau_7TeV,
    inputFilePath=inputFilePath,
    jobId=jobId,
    analyzer_drawJobConfigurator_indOutputFileName_sets=[
        [
            "zMuTauAnalyzerOS", drawJobConfigurator_ZtoMuTauOS,
            "plotZtoMuTauOS_#PLOT#.pdf"
        ],
        [
Beispiel #18
0
#
# Apply "template" method for data-driven background estimation
# to Z --> mu + tau-jet channel
#
# Author: Christian Veelken, UC Davis
#
#--------------------------------------------------------------------------------

from TauAnalysis.DQMTools.plotterStyleDefinitions_cfi import *
from TauAnalysis.BgEstimationTools.templateHistDefinitions_cfi import \
  drawJobTemplateHist, drawJobAnalysisHistData, drawJobAnalysisHistMC, \
  plotBgEstData, plotBgEstMC_pure, plotBgEstMC_smSum, plotAnalysisMC_pure
from TauAnalysis.BgEstimationTools.tools.drawTemplateHistConfigurator import drawTemplateHistConfigurator
import TauAnalysis.Configuration.userRegistry as reg

reg.overrideJobId('ZtoMuTau_bgEstTemplate', 'Run32')

process = cms.Process('fitBgEstTemplateZtoMuTau')

process.DQMStore = cms.Service("DQMStore")

process.maxEvents = cms.untracked.PSet(input=cms.untracked.int32(0))

process.source = cms.Source("EmptySource")

dqmDirectoriesProcess = {
    'Ztautau': 'ZtautauSum',
    'Zmumu': 'ZmumuSum',
    'WplusJets': 'WplusJets_madgraph_skim',
    'QCD': 'qcdSum',
    'TTplusJets': 'TTplusJets_madgraph_skim',
#!/usr/bin/env python

from TauAnalysis.Configuration.recoSampleDefinitionsAHtoElecTau_grid_cfi import recoSampleDefinitionsAHtoElecTau
from TauAnalysis.Configuration.submitAnalysisToGrid import submitAnalysisToGrid
import TauAnalysis.Configuration.userRegistry as reg

channel = 'AHtoElecTau'
reg.overrideJobId(channel,"Run41")
jobId = reg.getJobId(channel)
configFile = 'runAHtoElecTau_cfg.py'
analysisFilePath = reg.getAnalysisFilePath(channel)

samplesToAnalyze = [
    #'Ztautau_embedded_Run2011B_PR_v1_skim',
    #'Ztautau_embedded_Run2011A_03OctReReco_skim',
    #'Ztautau_embedded_Run2011A_05AugReReco_skim',
    #'Ztautau_embedded_Run2011A_May10ReReco_skim',
    #'Ztautau_embedded_Run2011A_PR_v4_skim'
    #'data_TauPlusX_Run2011A_May10ReReco_skim',
    #'data_TauPlusX_Run2011A_PR_v4_skim',
    #'data_TauPlusX_Run2011A_05AugReReco_skim',
    #'data_TauPlusX_Run2011A_03OctReReco_skim',
    #'data_TauPlusX_Run2011B_PR_v1_skim',
    #'Ztautau_powheg_skim',
    #'Zee_powheg_skim',
    #'WplusJets_madgraph_skim',
    #'TTplusJets_madgraph_skim',
    #'WW_skim','WZ_skim',
    #'WWtoLL_skim',
    #'WZto3LNu_skim',
    #'DYtautauM10to20_powheg_skim',
#!/usr/bin/env python
from TauAnalysis.Configuration.recoSampleDefinitionsAHtoMuTau_7TeV_grid_cfi \
        import recoSampleDefinitionsAHtoMuTau_7TeV as samples

import TauAnalysis.Configuration.submitAnalysisToLXBatch as submit
import TauAnalysis.Configuration.userRegistry as reg
import TauAnalysis.Configuration.tools.castor as castor
import os

channel = 'AHtoMuTau'
configFile = 'runAHtoMuTau_cfg.py'

for jobId in ['RunSVTestApr04']:
    reg.overrideJobId(channel, jobId)

    powheg_samples = [sample for sample in samples['SAMPLES_TO_ANALYZE']
                      if sample.find('POWHEG') != -1 ]

    fake_rate_samples = [sample for sample in samples['SAMPLES_TO_ANALYZE']
                         if samples['RECO_SAMPLES'][sample]['enableFakeRates']]

    factorized_samples = [sample for sample in samples['SAMPLES_TO_ANALYZE']
                         if samples['RECO_SAMPLES'][sample]['factorize']]

    samples_for_mike = ['WplusJets_madgraph',
                        'PPmuXptGt20Mu10',
                        'PPmuXptGt20Mu15',
                        'data_Mu_Run2010A_Nov4ReReco',
                        'data_Mu_Run2010B_Nov4ReReco' ]

    # If this is a list, only the items in the list will be analyzed.
    # get file path from TauAnalysis/Configuration/python/userRegistry.py
    inputFilePath = '/castor/cern.ch/' + reg.getAnalysisFilePath(channel)
    inputFilePath = inputFilePath.replace('//', '/')
    inputFilePath = inputFilePath.replace('/castor/cern.ch/castor/cern.ch/', '/castor/cern.ch/')
print(" inputFilePath = %s" % inputFilePath)

mode = None
if inputFilePath.find('/castor/') == 0: 
    mode = 'castor'
if inputFilePath.find('/store/') == 0: 
    mode = 'eos'    
else:
    mode = 'local'

if jobId is None:
    reg.overrideJobId(channel, '2011Oct30') # CV: need to overwrite this in order to match Mauro's filenames
    jobId = reg.getJobId(channel)
print(" jobId = %s" % jobId)

if mode == 'castor':
    files = [ file_info for file_info in castor.nslsl(inputFilePath) ]
elif mode == 'eos':
    files = [ file_info for file_info in eos.lsl(inputFilePath) ]
else:
    commandLine = '%s %s' % (options['executable_ls'][mode], inputFilePath)
    args = shlex.split(commandLine)
    retval = subprocess.Popen(args, stdout = subprocess.PIPE)
    #retval.wait()

    files = retval.stdout.read().split('\n')
    #print(" files = %s" % files)
Beispiel #22
0
#!/usr/bin/env python
from TauAnalysis.Configuration.recoSampleDefinitionsAHtoMuTau_7TeV_grid_cfi \
        import recoSampleDefinitionsAHtoMuTau_7TeV as samples
from TauAnalysis.Configuration.submitAnalysisToGrid import submitAnalysisToGrid
from TauAnalysis.Configuration.userRegistry import getAnalysisFilePath, \
        getJobId, getPickEventsPath, getHarvestingFilePath, overrideJobId
import os
import copy
import glob

overrideJobId('AHtoMuTau', 'RunOnOursJan16')
#overrideJobId('AHtoMuTau', 'RunOnWeirdFile')

channel = 'AHtoMuTau'
configFile = 'runAHtoMuTau_cfg.py'
analysisFilePath = getAnalysisFilePath(channel)
jobId = getJobId(channel)

samplesToAnalyze = [
    #'data_Mu_Run2010A_Nov4ReReco',
    'data_Mu_Run2010B_Nov4ReReco'
]

add_prefix = lambda x: 'file:' + x

pickEventsMap = {
    'RunOnMikesJan13' : {
        'data_Mu_Run2010A_Nov4ReReco' : map(add_prefix,
                                            glob.glob('pickevents/2010A/*root')),
        'data_Mu_Run2010B_Nov4ReReco' : map(add_prefix,
                                            glob.glob('pickevents/2010B/*root')),
Beispiel #23
0
from TauAnalysis.Configuration.makePlots2_grid import makePlots
import TauAnalysis.Configuration.userRegistry as reg

# import utility function to enable factorization
from TauAnalysis.Configuration.tools.factorizationTools import \
        enableFactorization_makeAHtoMuTauPlots_grid2

process = cms.Process('makeAHtoMuTauPlots')

channel = 'AHtoMuTau'

#reg.overrideJobId(channel, '2010Dec23_lxbatch')
#reg.overrideJobId(channel, 'Run33')
#reg.overrideJobId(channel, 'Run37sysTanc')
reg.overrideJobId(channel, 'Run37sys')
#reg.overrideJobId(channel, 'Run42OldTaNCfinal')

inputFilePath = reg.getHarvestingFilePath(channel)
jobId = reg.getJobId(channel)

plotsDirectory = './plots'
outputFileNameMaker = lambda channel: 'plots%s_all.root' % channel

# Check if we want to override what's in reco sample definitions
if len(sys.argv) > 2:
    mode = sys.argv[2]
    print "Using plot mode: ", mode
    plotsDirectory += os.path.join(jobId, "_" + mode)
    if not os.path.exists(plotsDirectory):
        os.makedirs(plotsDirectory)