def summarise_systematics( list_of_central_measurements, dictionary_of_systematics, pdf_calculation = False, hadronisation_systematic = False, mass_systematic = False, kValueSystematic = False, experimentalUncertainty = False, actualCentralMeasurements = [] ): global symmetrise_errors # number of bins number_of_bins = len( list_of_central_measurements ) down_errors = [0] * number_of_bins up_errors = [0] * number_of_bins for bin_i in range( number_of_bins ): central_value = list_of_central_measurements[bin_i][0] # 0 = value, 1 = error error_down, error_up = 0, 0 if pdf_calculation: pdf_uncertainty_values = {systematic:measurement[bin_i][0] for systematic, measurement in dictionary_of_systematics.iteritems()} error_down, error_up = calculate_lower_and_upper_PDFuncertainty( central_value, pdf_uncertainty_values ) if symmetrise_errors: error_down = max( error_down, error_up ) error_up = max( error_down, error_up ) elif hadronisation_systematic: # always symmetric: absolute value of the difference between powheg_herwig and powheg_pythia powheg_herwig = dictionary_of_systematics['TTJets_hadronisation'][bin_i][0] powheg_pythia = dictionary_of_systematics['TTJets_NLOgenerator'][bin_i][0] difference = powheg_herwig - powheg_pythia mean = (powheg_herwig + powheg_pythia)/2.0 difference = abs(difference) # now scale the error to the central value relative_error = difference/mean error_down = relative_error * central_value error_up = error_down elif mass_systematic: list_of_systematics = [systematic[bin_i][0] for systematic in dictionary_of_systematics.values()] error_down, error_up = calculate_lower_and_upper_systematics( central_value, list_of_systematics, False ) # Scale errors calculated using very different top masses error_down, error_up = scaleTopMassSystematicErrors( [error_down], [error_up] ) error_down = error_down[0] error_up = error_up[0] elif kValueSystematic: list_of_systematics = [systematic[bin_i][0] for systematic in dictionary_of_systematics.values()] error_down, error_up = calculate_lower_and_upper_systematics( central_value, list_of_systematics, True ) elif experimentalUncertainty: list_of_systematics = [systematic[bin_i][0] for systematic in dictionary_of_systematics.values()] error_down, error_up = calculate_lower_and_upper_systematics( central_value, list_of_systematics, symmetrise_errors ) actualCentralValue = actualCentralMeasurements[bin_i][0] error_down = error_down / central_value * actualCentralValue error_up = error_up / central_value * actualCentralValue else: list_of_systematics = [systematic[bin_i][0] for systematic in dictionary_of_systematics.values()] error_down, error_up = calculate_lower_and_upper_systematics( central_value, list_of_systematics, symmetrise_errors ) down_errors[bin_i] = error_down up_errors[bin_i] = error_up return down_errors, up_errors
def summarise_systematics( list_of_central_measurements, dictionary_of_systematics, pdf_calculation = False, hadronisation_systematic = False ): global symmetrise_errors # number of bins number_of_bins = len( list_of_central_measurements ) down_errors = [0] * number_of_bins up_errors = [0] * number_of_bins for bin_i in range( number_of_bins ): central_value = list_of_central_measurements[bin_i][0] # 0 = value, 1 = error error_down, error_up = 0, 0 if pdf_calculation: pdf_uncertainty_values = {systematic:measurement[bin_i][0] for systematic, measurement in dictionary_of_systematics.iteritems()} error_down, error_up = calculate_lower_and_upper_PDFuncertainty( central_value, pdf_uncertainty_values ) if symmetrise_errors: error_down = max( error_down, error_up ) error_up = max( error_down, error_up ) elif hadronisation_systematic: # always symmetric: absolute value of the difference between powheg_herwig and powheg_pythia difference = dictionary_of_systematics['TTJets_powheg_herwig'][bin_i][0]-dictionary_of_systematics['TTJets_powheg_pythia'][bin_i][0] error_down = abs(difference) error_up = error_down else: list_of_systematics = [systematic[bin_i][0] for systematic in dictionary_of_systematics.values()] error_down, error_up = calculate_lower_and_upper_systematics( central_value, list_of_systematics, symmetrise_errors ) down_errors[bin_i] = error_down up_errors[bin_i] = error_up return down_errors, up_errors
def summarise_systematics(list_of_central_measurements, dictionary_of_systematics, pdf_calculation=False): global symmetrise_errors #number of bins number_of_bins = len(list_of_central_measurements) down_errors = [0] * number_of_bins up_errors = [0] * number_of_bins for bin_i in range(number_of_bins): central_value = list_of_central_measurements[bin_i][0] # 0 = value, 1 = error error_down, error_up = 0, 0 if pdf_calculation: pdf_uncertainty_values = {systematic:measurement[bin_i][0] for systematic, measurement in dictionary_of_systematics.iteritems()} error_down, error_up = calculate_lower_and_upper_PDFuncertainty(central_value, pdf_uncertainty_values) if symmetrise_errors: error_down = max(error_down, error_up) error_up = max(error_down, error_up) else: list_of_systematics = [systematic[bin_i][0] for systematic in dictionary_of_systematics.values()] error_down, error_up = calculate_lower_and_upper_systematics(central_value, list_of_systematics, symmetrise_errors) down_errors[bin_i] = error_down up_errors[bin_i] = error_up return down_errors, up_errors
def summarise_systematics(list_of_central_measurements, dictionary_of_systematics, pdf_calculation=False, hadronisation_systematic=False, mass_systematic=False, kValueSystematic=False, experimentalUncertainty=False, actualCentralMeasurements=[]): global symmetrise_errors # number of bins number_of_bins = len(list_of_central_measurements) down_errors = [0] * number_of_bins up_errors = [0] * number_of_bins for bin_i in range(number_of_bins): central_value = list_of_central_measurements[bin_i][ 0] # 0 = value, 1 = error error_down, error_up = 0, 0 if pdf_calculation: pdf_uncertainty_values = { systematic: measurement[bin_i][0] for systematic, measurement in dictionary_of_systematics.iteritems() } error_down, error_up = calculate_lower_and_upper_PDFuncertainty( central_value, pdf_uncertainty_values) if symmetrise_errors: error_down = max(error_down, error_up) error_up = max(error_down, error_up) elif hadronisation_systematic: # always symmetric: absolute value of the difference between powheg_herwig and powheg_pythia powheg_herwig = dictionary_of_systematics['TTJets_hadronisation'][ bin_i][0] powheg_pythia = dictionary_of_systematics['TTJets_NLOgenerator'][ bin_i][0] difference = powheg_herwig - powheg_pythia mean = (powheg_herwig + powheg_pythia) / 2.0 difference = abs(difference) # now scale the error to the central value relative_error = difference / mean error_down = relative_error * central_value error_up = error_down elif mass_systematic: list_of_systematics = [ systematic[bin_i][0] for systematic in dictionary_of_systematics.values() ] error_down, error_up = calculate_lower_and_upper_systematics( central_value, list_of_systematics, False) # Scale errors calculated using very different top masses error_down, error_up = scaleTopMassSystematicErrors([error_down], [error_up]) error_down = error_down[0] error_up = error_up[0] elif kValueSystematic: list_of_systematics = [ systematic[bin_i][0] for systematic in dictionary_of_systematics.values() ] error_down, error_up = calculate_lower_and_upper_systematics( central_value, list_of_systematics, True) elif experimentalUncertainty: list_of_systematics = [ systematic[bin_i][0] for systematic in dictionary_of_systematics.values() ] error_down, error_up = calculate_lower_and_upper_systematics( central_value, list_of_systematics, symmetrise_errors) actualCentralValue = actualCentralMeasurements[bin_i][0] error_down = error_down / central_value * actualCentralValue error_up = error_up / central_value * actualCentralValue else: list_of_systematics = [ systematic[bin_i][0] for systematic in dictionary_of_systematics.values() ] error_down, error_up = calculate_lower_and_upper_systematics( central_value, list_of_systematics, symmetrise_errors) down_errors[bin_i] = error_down up_errors[bin_i] = error_up return down_errors, up_errors