def _set_parameter_slha(self, idx, parameter): """ Set parameter in SUSYHIT input file. """ LGR.debug('Set index %s to %s in SLHA.', idx, parameter) # Set regular expressions in SLHA file slha_in = '^ {} '.format(idx) slha_out = ' {} '.format(idx) # Read file with open( '{}/{}.in'.format(self._dir_susyhit, self._get_susyhit_filename()), 'r') as f_in: lines = f_in.readlines() # Make changes and overwrite file with open( '{}/{}.in'.format(self._dir_susyhit, self._get_susyhit_filename()), 'w') as f_in: has_replacement = False for line in lines: t_new = subn('{}.*'.format(slha_in), '{}{}'.format(slha_out, parameter), line) if t_new[1] > 0: LGR.debug('Replaced "%s" with "%s" in input file.', line.rstrip(), t_new[0].rstrip()) has_replacement = True f_in.write(t_new[0]) if not has_replacement: raise RuntimeError('No replacement in SUSYHIT input file has ' 'been done.')
def set_axis(self, axis_x, axis_y): """ Set axis labels. """ # Set the main label axis_x = self._get_axis_label(axis_x) axis_y = self._get_axis_label(axis_y) ## Add all additional labels from the dictionaries #for key, value in axis_x_add.iteritems(): # if value == 0: # axis_x += ' = {}'.format(self._get_axis_label(key)) # elif value > 0: # axis_x += ' = {} - {}'.format(self._get_axis_label(key), value) # else: # axis_x += ' = {} + {}'.format(self._get_axis_label(key), # abs(value)) #for key, value in axis_y_add.iteritems(): # if value == 0: # axis_y += ' = {}'.format(self._get_axis_label(key)) # elif value > 0: # axis_y += ' = {} - {}'.format(self._get_axis_label(key), value) # else: # axis_y += ' = {} + {}'.format(self._get_axis_label(key), # abs(value)) ## Add unit #axis_x += ' [GeV]' #axis_y += ' [GeV]' self.set_axis_x(axis_x) self.set_axis_y(axis_y) LGR.debug('Set x axis to %s', axis_x) LGR.debug('Set y axis to %s', axis_y)
def _get_m(self, id_particle): """ Return mass of particle with ID id_particle. """ with open('{}/susyhit_slha.out'.format(self._dir_susyhit), 'r') as f_susyhit: f_susyhit_mass = dropwhile(lambda l: not search('^BLOCK MASS', l), f_susyhit) for line in f_susyhit_mass: if search('^ *{}'.format(id_particle), line): mass = float((line.split())[1]) LGR.debug('Found mass %s for particle %s from line %s.', mass, id_particle, line.rstrip()) return abs(mass)
def _run_external( self, name, cmd, # pylint: disable=no-self-use check_for_error=True): """ Run external software, such as SUSYHIT or SModelS. """ # If logging level is not set to debug, suppress output LGR.debug('Output from %s:', name) if LGR.getEffectiveLevel() > 10: cmd += ' &> /dev/null' # Logic inverted: bash success (0) is python failure if system(cmd) and check_for_error: raise RuntimeError('Could not run {}.'.format(name))
def get_xs(self, com, path): """ Get cross section from SLHA. """ # Regex to be searched in SLHA file if com == 13: regex_xs = r'XSECTION *1\.30E\+04 *2212 2212 2' elif com == 8: regex_xs = r'XSECTION *8\.00E\+03 *2212 2212 2' else: raise ValueError('Only cross-sections of 8 or 13 TeV are allowed.') with open('{}/susyhit_slha.out'.format(path), 'r') as f_susyhit: found_xsec = False for line in f_susyhit: if found_xsec: # Multiply by 1000. to get cross section in fb self._xs.append(1000. * float(line.split()[6])) found_xsec = False # If the xs matches, set bool, next line will have the xs if search(regex_xs, line): LGR.debug(line.rstrip()) self._p1.append(int(line.split()[5])) self._p2.append(int(line.split()[6])) found_xsec = True
def _get_br_1leg(self, id_parent): """ Get branching ratio into particles for one particle. """ br_leptons_1leg = [] br_jets_1leg = [] br_photons_1leg = [] LGR.debug('Branching ratios for particle %s:', id_parent) # Loop over all decay modes for list_decay in self._prob_tree(id_parent): LGR.debug('list_decay: %s', list_decay) br_single = list_decay[0] # Check if we have unknown particles in the final state has_unknowns = False # Calculate number of particles for specific decay mode no_leptons = 0 no_jets = 0 no_photons = 0 for id_particle in list_decay[1]: if self._is_unknown(id_particle): has_unknowns = True no_leptons += self._is_lepton(id_particle) no_jets += self._is_jet(id_particle) no_photons += self._is_photon(id_particle) # Calculate total branching ratio for consistency; # this number will not exactly add up to 1, since rare decay modes # are missing and due to obnoxious python rounding errors if has_unknowns: continue # Make sure lists are long enough self._expand_list(br_leptons_1leg, no_leptons) self._expand_list(br_jets_1leg, no_jets) self._expand_list(br_photons_1leg, no_photons) # Fill branching ratio br_leptons_1leg[no_leptons] += br_single br_jets_1leg[no_jets] += br_single br_photons_1leg[no_photons] += br_single # If all lists are empty, we don't get any particles (LSP production) if not (br_leptons_1leg and br_jets_1leg and br_photons_1leg): br_leptons_1leg = [1] br_jets_1leg = [1] br_photons_1leg = [1] LGR.debug('Branching ratios into leptons: %s', br_leptons_1leg) LGR.debug('Branching ratios into jets: %s', br_jets_1leg) LGR.debug('Branching ratios into photons: %s', br_photons_1leg) LGR.debug('Total branching ratio: %s', sum(br_leptons_1leg)) return br_leptons_1leg, br_jets_1leg, br_photons_1leg
def _get_brs(self, id_parent_1, id_parent_2=-1., weight=1.): """ Get probabilites for branching into one, two, ... leptons and jets. This includes the combinatorics from 2 parent particles. The probabilities are weighted by weight. """ # If id_parent_2 is not set, set it to same value as id_parent_1 if id_parent_2 < 0: id_parent_2 = id_parent_1 br_leptons_1leg_1, br_jets_1leg_1, br_photons_1leg_1 = \ self._get_br_1leg(id_parent_1) # Don't call self._get_br_1leg() again if both parent particles are the # same if id_parent_2 == id_parent_1: br_leptons_1leg_2 = br_leptons_1leg_1 br_jets_1leg_2 = br_jets_1leg_1 br_photons_1leg_2 = br_photons_1leg_1 else: br_leptons_1leg_2, br_jets_1leg_2, br_photons_1leg_2 = \ self._get_br_1leg(id_parent_2) # Create list with right length br_leptons_2leg = [0] * (len(br_leptons_1leg_1) + len(br_leptons_1leg_2) - 1) br_jets_2leg = [0] * (len(br_jets_1leg_1) + len(br_jets_1leg_2) - 1) br_photons_2leg = [0] * (len(br_photons_1leg_1) + len(br_photons_1leg_2) - 1) # Combinatorics going from one leg to two legs for idx_l1, br_l1 in enumerate(br_leptons_1leg_1): for idx_l2, br_l2 in enumerate(br_leptons_1leg_2): br_leptons_2leg[idx_l1 + idx_l2] += br_l1 * br_l2 for idx_j1, br_j1 in enumerate(br_jets_1leg_1): for idx_j2, br_j2 in enumerate(br_jets_1leg_2): br_jets_2leg[idx_j1 + idx_j2] += br_j1 * br_j2 for idx_l1, br_l1 in enumerate(br_photons_1leg_1): for idx_l2, br_l2 in enumerate(br_photons_1leg_2): br_photons_2leg[idx_l1 + idx_l2] += br_l1 * br_l2 # If total branching ratio (for both legs) is under a certain # threshold, throw a warning; this can have many reasons, like unknown # (ignored) particle decays, or thresholds to limit computing time #if sum(br_leptons_2leg) < .9: # LGR.warning('The defined threshold led to a total branching ' # 'ratio of %s. You might want to consider lowering the ' # 'threshold.', sum(br_leptons_2leg)) LGR.debug('Parent particle (1st leg): %s', id_parent_1) LGR.debug('Parent particle (2nd leg): %s', id_parent_2) LGR.debug('Branching ratios into leptons (1st leg): %s', br_leptons_1leg_1) LGR.debug('Branching ratios into leptons (2nd leg): %s', br_leptons_1leg_2) LGR.debug('Branching ratios into leptons (both legs): %s', br_leptons_2leg) # Fill class variables for idx, br in enumerate(br_leptons_2leg): try: self._br_leptons[idx] += weight * br except IndexError: self._br_leptons.append(weight * br) for idx, br in enumerate(br_jets_2leg): try: self._br_jets[idx] += weight * br except IndexError: self._br_jets.append(weight * br) for idx, br in enumerate(br_photons_2leg): try: self._br_photons[idx] += weight * br except IndexError: self._br_photons.append(weight * br)
def _fill_dict_susy(self, id_particle): """ Translate SUSYHIT output into python dictionaries. The format of the dictionary is: dict[id_particle] = [ [prob., [child1, child2, ...]], [prob., [child1, child2, ...]], ... ] """ # Open SUSYHIT output file with open('{}/susyhit_slha.out'.format( self._dir_susyhit)) as f_susyhit_out: # Select range to be read from file f_susyhit_out_start = dropwhile( lambda l: not search('^DECAY *{}'.format(abs(id_particle)), l), f_susyhit_out) f_susyhit_out_range = takewhile( lambda l: not search('^# *PDG', l) and not search( '^XSECTION', l), f_susyhit_out_start) # LGR.debug('Range selected from SUSYHIT output file for particle ' # '%s:', id_particle) # List of all decays list_decays = [] # Loop over selected range and filter out comments for line in ifilterfalse(lambda l: search('^ *#|^DECAY', l), f_susyhit_out_range): # LGR.debug(line.rstrip()) # Split line into list, format is # [prob., # of childs, child1, child2, ..., comments] list_line_str_comments = line.rstrip().split() # LGR.debug('list: %s', list_line_str_comments) # Skip empty lines if len(list_line_str_comments) == 0: continue # Strip all comments from list_line_comments list_line_str = list( takewhile(lambda m: not str(m).startswith('#'), list_line_str_comments)) # LGR.debug('list stripped from comments: %s', list_line_str) del list_line_str_comments # list_line_str needs to have length of at least 4, otherwise # something's wrong with the input if len(list_line_str) < 3: raise IndexError('list_line_str {} needs to have at least ' '4 elements. Something seems to be wrong ' 'with the input.'.format(list_line_str)) # Convert first element (branching ratio) to float # and other elements (number of daughter particles and particle # ID's) to integers list_line = [] list_line.append(float(list_line_str[0])) list_line.append([int(x) for x in list_line_str[1:]]) # LGR.debug('list as numbers: %s', list_line) del list_line_str # According to SLHA format, second number per line should be # number of daughter particles if list_line[1][0] + 1 != len(list_line[1]): raise IndexError('According to SLHA format, second number ' 'per line should be number of daugher ' 'particles. Something seems to be wrong ' 'with the input.') # Once we checked for consistency, we can remove list_line[1], # since it is redundant del list_line[1][0] if isnan(list_line[0]): LGR.warning('Some decays have a branching ratio of "NaN" ' 'in the SUSYHIT output file. These decays are ' 'skipped.') continue # Fill list of decays list_decays.append(list_line) del list_line # Check if list of decays is empty if not list_decays: # If it is a SM particle, it probably needs to be filled by # hand, if it is a SUSY particle, it will be ignored #if abs(id_particle) < 1000000: # raise IndexError('SM particle {} could not be found in ' # 'dictionary. Maybe it needs to be filled' # ' by hand?'.format(id_particle)) # If the particle has no known decay modes, according to # SUSYHIT, then we define its decay to 100 % into the unknown # (and ignored) particle 999 (which is a final state) list_decays.append([1., [999]]) LGR.warning( 'Added particle %s to list of ignored particles. ' 'It does not seem to have any decay modes in the ' 'SUSYHIT output file.', id_particle) # Loop over list_decays to print debug information and sum up # branching ratios sum_br = 0 for list_decay in list_decays: sum_br += list_decay[0] LGR.debug('list_decay: %s', list_decay) # Fill dictionary self._d_susy[abs(id_particle)] = list_decays LGR.info( 'Filled decay modes from particle with ID %s into ' 'dictionary.', id_particle)
def do_scan(self): # pylint: disable=too-many-branches,too-many-statements """ Loops over the different mass combinations and calls appropriate functions to set masses in the SUSYHIT input file and to fill the python dictionary. """ # Make backup SUSYHIT input file system('mv {}/{}.in{{,.orig}}'.format(self._dir_susyhit, self._get_susyhit_filename())) # Copy template input file system('cp {}.template {}/{}.in'.format(self._get_susyhit_filename(), self._dir_susyhit, self._get_susyhit_filename())) # Fill SM dictionary self._fill_dict_sm() # # Calculate total number of different mass combinations # total = len(M_GLUINOS) * len(M_CHARGINOS1) * len(M_NEUTRALINOS1) total = len(self.l_prmtr_x) * len(self.l_prmtr_y) # Define counter to count from 1 to total counter = 0 # Create MassScanPlots object for plotting plots = MassScanPlots() # Set the plot axis labels plots.set_axis(self._prmtr_id_x, self._prmtr_id_y) # Set the text describing the different parameter values plots.set_text(self._prmtr_id_x, self._d_prmtr_x_add, self._d_prmtr_x_scale) plots.set_text(self._prmtr_id_y, self._d_prmtr_y_add, self._d_prmtr_y_scale) for prmtr_x in self.l_prmtr_x: for prmtr_y in self.l_prmtr_y: # Clear SUSY dictionary (SM can stay) self._d_susy.clear() counter += 1 # Reset error self._error = False LGR.info('Processing mass combination %3d of %3d: (%4d/%4d).', counter, total, prmtr_x, prmtr_y) LGR.debug('prmtr_x = %4d - prmtr_y = %4d', prmtr_x, prmtr_y) self._set_parameter_all(prmtr_x, prmtr_y) # Run SUSYHIT self._run_external('SUSYHIT', 'cd {} && ./run'.format(self._dir_susyhit)) if not self._check_susyhit_output(): self._skip_point(prmtr_x, prmtr_y) # Check for LSP if not self._error and not self._check_lsp(): self._skip_point(prmtr_x, prmtr_y) # Get particle masses if not self._error and self._calc_masses: self._get_masses() # Get particle lifetimes if not self._error and self._calc_br: self._get_ctau() # Calculate cross-section with SModelS if not self._error and (self._calc_xs or self._calc_mu): # 8 TeV cross-sections to check if the model is already # excluded and 13 TeV cross-sections for cross-sections # itself for com in [8, 13]: self._run_external( 'SModelS', 'runTools xseccomputer ' '-p -s {} -f {}/susyhit_slha.out'.format( com, self._dir_susyhit)) # Apply k-factors self._apply_k_factor() if self._calc_xs: self._get_xs() # Move SUSYHIT output system('cp {}/susyhit_slha.out susyhit_slha_{}_{}.out'.format( self._dir_susyhit, prmtr_x, prmtr_y)) system('cp {}/suspect2.out suspect2_{}_{}.out'.format( self._dir_susyhit, prmtr_x, prmtr_y)) # Check if models are already excluded if not self._error and self._calc_mu: self._run_external( 'SModelS', 'timeout 1800 runSModelS ' '-o smodels_summary.txt ' '-f {}/susyhit_slha.out'.format(self._dir_susyhit), False) self._mu = self._get_mu() # Move SModelS output file system('mv smodels_summary.txt smodels_summary_{}_{}.txt ' '2>/dev/null'.format(prmtr_x, prmtr_y)) LGR.debug('Excluded signal strength: %s', self._mu) # Calculate branching ratios into final states if not self._error and self._calc_br: self._get_br_all() if not self._br_leptons or \ not self._br_jets or \ not self._br_photons: LGR.warning('Some branching ratios are empty.') self._skip_point(prmtr_x, prmtr_y) # Get decay channels if not self._error and self._calc_br: self._dc_gluino = self._get_dcs(self._id_gluino) self._dc_chargino1 = self._get_dcs(self._id_chargino1) self._dc_chargino2 = self._get_dcs(self._id_chargino2) self._dc_neutralino2 = self._get_dcs(self._id_neutralino2) self._dc_neutralino3 = self._get_dcs(self._id_neutralino3) self._dc_neutralino4 = self._get_dcs(self._id_neutralino4) self._dc_sdown_l = self._get_dcs(self._id_sdown_l) self._dc_sdown_r = self._get_dcs(self._id_sdown_r) self._dc_sup_l = self._get_dcs(self._id_sup_l) self._dc_sup_r = self._get_dcs(self._id_sup_r) self._dc_sstrange_l = self._get_dcs(self._id_sstrange_l) self._dc_sstrange_r = self._get_dcs(self._id_sstrange_r) self._dc_scharm_l = self._get_dcs(self._id_scharm_l) self._dc_scharm_r = self._get_dcs(self._id_scharm_r) # If there was an error, empty all values if self._error: self._reset() plots = self._fill_plots(plots, prmtr_x, prmtr_y) # Restore backup SUSYHIT input file system('mv {}/{}.in{{.orig,}}'.format(self._dir_susyhit, self._get_susyhit_filename())) # Throw error when no list is filled if len(plots.coordinate_x) == 0: raise RuntimeError('Nothing to plot.') return plots