Esempio n. 1
0
def remove_decay(slha_file, pid):
    """
  rewrite slha file, removing DECAY block for given pid and remove decay from MSSM.model file
  """

    # rewrite slha file
    import shutil
    shutil.move(slha_file, slha_file + '.org')

    update = open(slha_file, 'w')

    skip = False
    for l in open(slha_file + '.org'):
        if l.startswith('DECAY   %s ' % pid):
            skip = True
            continue

        if l.startswith('DECAY '):
            skip = False

        if not skip:
            update.write(l)

    update.close()

    # remove also from MSSM.model file

    # rewrite MSSM.model file
    from PyJobTransformsCore.trfutil import get_files
    get_files('MSSM.model', keepDir=False, errorIfNotFound=True)

    import shutil
    shutil.move('MSSM.model', 'MSSM.model.org')

    update = open('MSSM.model', 'w')

    for l in open('MSSM.model.org'):
        if l.startswith(
                'insert /Herwig/Shower/ShowerHandler:DecayInShower 0  %s ' %
                pid):
            continue
        update.write(l)

    update.close()
def remove_decay(slha_file, pid):
  """
  rewrite slha file, removing DECAY block for given pid and remove decay from MSSM.model file
  """

  # rewrite slha file
  import shutil
  shutil.move(slha_file, slha_file+'.org')
  
  update = open(slha_file, 'w')

  skip = False
  for l in open(slha_file+'.org'):
    if l.startswith('DECAY   %s '%pid):
      skip = True
      continue

    if l.startswith('DECAY '):
      skip = False
    
    if not skip:
      update.write(l)
    
  update.close()

  # remove also from MSSM.model file

  # rewrite MSSM.model file
  from PyJobTransformsCore.trfutil import get_files
  get_files( 'MSSM.model', keepDir=False, errorIfNotFound=True )

  import shutil
  shutil.move('MSSM.model', 'MSSM.model.org')
  
  update = open('MSSM.model', 'w')

  for l in open('MSSM.model.org'):
    if l.startswith('insert /Herwig/Shower/ShowerHandler:DecayInShower 0  %s '%pid):
      continue
    update.write(l)
    
  update.close()
Esempio n. 3
0
# do not run MadGraph if config only is requested
if not opts.config_only:

	# writing proc card for MG
	fcard = open('proc_card_mg5.dat','w')
	fcard.write("""
import model iDM
define p = g u c d s u~ c~ d~ s~
generate p p > j chi1 chi2 /hs, chi2 > chi1 l+ l- /hs
output -f
""" )
	fcard.close()

	# getting run and param cards
	from PyJobTransformsCore.trfutil import get_files
	get_files( rcard, keepDir=False, errorIfNotFound=True )
	get_files( pcard, keepDir=False, errorIfNotFound=True )

	# generating events in MG
	process_dir = new_process()
	generate(run_card_loc=rcard,param_card_loc=pcard,mode=0,njobs=1,run_name=rname,proc_dir=process_dir)

	# hacking LHE file
	unzip1 = subprocess.Popen(['gunzip',process_dir+'/Events/'+rname+'/unweighted_events.lhe.gz'])
	unzip1.wait()
	oldlhe = open(process_dir+'/Events/'+rname+'/unweighted_events.lhe','r')
	newlhe = open(process_dir+'/Events/'+rname+'/unweighted_events2.lhe','w')
	init = True
	for line in oldlhe:
        	if init==True:
            		# chi1, chi2: removing one zero in pdgID
Esempio n. 4
0
        raise RuntimeError("inputGeneratorFile arg specified for %s, but generators %s do not require an input file" %
                           (runArgs.jobConfig, str(gennames)))
    if evgenConfig.inputfilecheck:
        raise RuntimeError("evgenConfig.inputfilecheck specified in %s, but generators %s do not require an input file" %
                           (runArgs.jobConfig, str(gennames)))

## Check conf files, as above but for a different command line arg, and with omission allowed
if hasattr(runArgs, "inputGenConfFile") and runArgs.inputGenConfFile != "NONE":
    if evgenConfig.inputconfcheck and not re.search(evgenConfig.inputconfcheck, runArgs.inputGenConfFile):
        raise RuntimeError("inputGenConfFile=%s is incompatible with inputconfcheck (%s) in %s" %
                           (runArgs.inputGenConfFile, evgenConfig.inputconfcheck, runArgs.jobConfig))

## Do the aux-file copying
if evgenConfig.auxfiles:
    from PyJobTransformsCore.trfutil import get_files
    get_files(evgenConfig.auxfiles, keepDir=False, errorIfNotFound=True)


##==============================================================
## Write out metadata for reporting to AMI
##==============================================================

def _checkattr(attr, required=False):
    if not hasattr(evgenConfig, attr) or not getattr(evgenConfig, attr):
        msg = "evgenConfig attribute '%s' not found." % attr
        if required:
            raise RuntimeError("Required " + msg)
        return False
    return True

if _checkattr("description", required=True):
def buildHerwigppCommands(sparticle_list, slha_file, mode='SingleParticleInclusive', earlyCopy=False):
    header = """
## Generate the process in MSSM equivalent to 2-parton -> 2-sparticle processes in Fortran Herwig
## Read the MSSM model details
read MSSM.model
cd /Herwig/NewPhysics
"""

    process_mode = """
## Establish how processes are selected
## SingleParticleInclusive means that at least one of the
## outgoing particles must be in the HPConstructor:Outgoing.
## TwoParticleInclusive means that both of the outgoing particles
## must be in the HPConstructor:Outgoing.
## Exclusive means that only two outgoing particles can be supplied
## in HPConstructor:Outgoing, and only that pair will be produced.
##
## E.g. if the outgoing particles were the ~g and ~u_L, then:
## SingleParticleInclusive produces any process involving ~g or ~u_L,
## including ~g/~g, ~g/~u_L, ~u_L/~d_L, ~g/~chi_10 etc.
## TwoParticleInclusive produces processes involving ~g and ~u_L only,
## i.e. ~g/~g, ~g/~u_L, ~u_L/~u_L.
## Exclusive produces only one process,
## i.e. ~g/~u_L.
##
set HPConstructor:Processes %(mode)s
""" % {'mode':mode}

    incoming_partons = """
#incoming parton
insert HPConstructor:Incoming 0 /Herwig/Particles/g
insert HPConstructor:Incoming 1 /Herwig/Particles/u
insert HPConstructor:Incoming 2 /Herwig/Particles/ubar
insert HPConstructor:Incoming 3 /Herwig/Particles/d
insert HPConstructor:Incoming 4 /Herwig/Particles/dbar
insert HPConstructor:Incoming 5 /Herwig/Particles/s
insert HPConstructor:Incoming 6 /Herwig/Particles/sbar
insert HPConstructor:Incoming 7 /Herwig/Particles/c
insert HPConstructor:Incoming 8 /Herwig/Particles/cbar
insert HPConstructor:Incoming 9 /Herwig/Particles/b
insert HPConstructor:Incoming 10 /Herwig/Particles/bbar
"""
    sparticle_flatlist = []
    for sparty in sparticle_list:
        try:
        	sparticle_flatlist += sparticles[sparty]
        except KeyError:
        	sparticle_flatlist += [sparty]
    ## A slightly clunky way of removing duplicates
    helperdict = {}
    for sparticle in sparticle_flatlist:
        helperdict[sparticle] = 1
    sparticle_flatlist = helperdict.keys()

    outgoing_sparticles = """
#outgoing sparticles
"""
    for index,sparticle in enumerate(sparticle_flatlist):
        outgoing_sparticles += addOutgoingSparticle(index,sparticle)
        outgoing_sparticles += '\n'

    footer = """
## Read the SUSY spectrum file (SLHA format)
setup MSSM/Model %(slha_file)s
""" % {'slha_file':slha_file}

    ## SLHA files are copied later on to the working directory via the auxfiles mechanism
    evgenConfig.auxfiles += [ slha_file, 'MSSM.model' ]

    ## use earlyCopy in case this should be done now to tweak/read the SLHA file
    if earlyCopy:
        from PyJobTransformsCore.trfutil import get_files
        get_files( slha_file, keepDir=False, errorIfNotFound=True )

    cmd = header + process_mode + incoming_partons + outgoing_sparticles + footer
    return cmd
Esempio n. 6
0
import model mssm-full
"""+helpful_definitions()+"""
# Specify process(es) to run

"""+process+"""
# Output processes to MadEvent directory
output -f
"""
thedir = new_process(card_loc=full_proc)
if 1==thedir:
    mglog.error('Error in process generation!')
mglog.info('Using process directory '+thedir)

# Grab the param card and move the new masses into place
from PyJobTransformsCore.trfutil import get_files
get_files(slha_file, keepDir=False, errorIfNotFound=True)

include ( 'MC15JobOptions/SUSYMetadata.py' )
(m_neutralino, m_gravitino, m_chargino1, m_neutralino2) = mass_extract(slha_file, ['1000022', '1000039', '1000024', '1000023'])
mglog.info('chargino1 mass = '+m_chargino1+' neutralino2 mass = '+m_neutralino2)

build_param_card(param_card_old=slha_file,param_card_new='param_card.dat')

qcut=(abs(float(m_chargino1))+abs(float(m_neutralino2)))*0.125

xqcut=-1
if njets>0:
    xqcut=qcut
# Grab the run card and move it into place
extras = { 'ktdurham':xqcut , 'lhe_version':'2.0' , 'cut_decays':'F' , 'pdlabel':pdlabel , 'lhaid':lhaid , 'drjj':0.0 }
build_run_card(run_card_old=get_default_runcard(),run_card_new='run_card.dat',xqcut=qcut,
Esempio n. 7
0
def buildHerwigppCommands(sparticle_list,
                          slha_file,
                          mode='SingleParticleInclusive',
                          earlyCopy=False):
    header = """
## Generate the process in MSSM equivalent to 2-parton -> 2-sparticle processes in Fortran Herwig
## Read the MSSM model details
read MSSM.model
cd /Herwig/NewPhysics
"""

    process_mode = """
## Establish how processes are selected
## SingleParticleInclusive means that at least one of the
## outgoing particles must be in the HPConstructor:Outgoing.
## TwoParticleInclusive means that both of the outgoing particles
## must be in the HPConstructor:Outgoing.
## Exclusive means that only two outgoing particles can be supplied
## in HPConstructor:Outgoing, and only that pair will be produced.
##
## E.g. if the outgoing particles were the ~g and ~u_L, then:
## SingleParticleInclusive produces any process involving ~g or ~u_L,
## including ~g/~g, ~g/~u_L, ~u_L/~d_L, ~g/~chi_10 etc.
## TwoParticleInclusive produces processes involving ~g and ~u_L only,
## i.e. ~g/~g, ~g/~u_L, ~u_L/~u_L.
## Exclusive produces only one process,
## i.e. ~g/~u_L.
##
set HPConstructor:Processes %(mode)s
""" % {
        'mode': mode
    }

    incoming_partons = """
#incoming parton
insert HPConstructor:Incoming 0 /Herwig/Particles/g
insert HPConstructor:Incoming 1 /Herwig/Particles/u
insert HPConstructor:Incoming 2 /Herwig/Particles/ubar
insert HPConstructor:Incoming 3 /Herwig/Particles/d
insert HPConstructor:Incoming 4 /Herwig/Particles/dbar
insert HPConstructor:Incoming 5 /Herwig/Particles/s
insert HPConstructor:Incoming 6 /Herwig/Particles/sbar
insert HPConstructor:Incoming 7 /Herwig/Particles/c
insert HPConstructor:Incoming 8 /Herwig/Particles/cbar
insert HPConstructor:Incoming 9 /Herwig/Particles/b
insert HPConstructor:Incoming 10 /Herwig/Particles/bbar
"""
    sparticle_flatlist = []
    for sparty in sparticle_list:
        try:
            sparticle_flatlist += sparticles[sparty]
        except KeyError:
            sparticle_flatlist += [sparty]
    ## A slightly clunky way of removing duplicates
    helperdict = {}
    for sparticle in sparticle_flatlist:
        helperdict[sparticle] = 1
    sparticle_flatlist = helperdict.keys()

    outgoing_sparticles = """
#outgoing sparticles
"""
    for index, sparticle in enumerate(sparticle_flatlist):
        outgoing_sparticles += addOutgoingSparticle(index, sparticle)
        outgoing_sparticles += '\n'

    footer = """
## Read the SUSY spectrum file (SLHA format)
setup MSSM/Model %(slha_file)s
""" % {
        'slha_file': slha_file
    }

    ## SLHA files are copied later on to the working directory via the auxfiles mechanism
    evgenConfig.auxfiles += [slha_file, 'MSSM.model']

    ## use earlyCopy in case this should be done now to tweak/read the SLHA file
    if earlyCopy:
        from PyJobTransformsCore.trfutil import get_files
        get_files(slha_file, keepDir=False, errorIfNotFound=True)

    cmd = header + process_mode + incoming_partons + outgoing_sparticles + footer
    return cmd
Esempio n. 8
0
            'drjj':0.0 }

run_name=runArgs.jobConfig[0][runArgs.jobConfig[0].find("NUHM2"):runArgs.jobConfig[0].find("m12")+7]

param_card_name = "susy.{0}.".format(runArgs.runNumber)+run_name+".slha"

run_card_name = build_run_card(run_card_old=get_default_runcard(),xqcut=runArgs.qcut,
                                       nevts=nevts*evt_multiplier,
                                       rand_seed=runArgs.randomSeed,beamEnergy=beamEnergy,extras=extras)



print_cards()

from PyJobTransformsCore.trfutil import get_files
get_files(param_card_name, keepDir=True, errorIfNotFound=True)

# Generate the new process
process_dir = new_process(card_loc='proc_card_mg5.dat')
generate(run_card_loc=run_card_name,param_card_loc=param_card_name,mode=0,njobs=1,
         run_name=run_name,proc_dir=process_dir)


# Arrange the output
runArgs.inputGeneratorFile="madgraph.{0}.Madgraph_{1}._00001.events.tar.gz".format(runArgs.runNumber,runArgs.jobConfig[0][runArgs.jobConfig[0].find("A14N23LO"):runArgs.jobConfig[0].find(".py")])

#skip_events = 0
if hasattr(runArgs,'skipEvents'): skip_events = runArgs.skipEvents
arrange_output(run_name=run_name,proc_dir=process_dir,
                       outputDS=runArgs.inputGeneratorFile)