def __init__( self, events_file, files_per_flavor, output_names, reco, keys, cuts, track_E_cut=None, **std_kwargs, ): # instantiation args that should not change self.events_file = find_resource(events_file) # init base class super().__init__( expected_params=(), **std_kwargs, ) self.output_names = output_names self.reco = reco self.files_per_flavor = split(files_per_flavor) self.track_E_cut = track_E_cut self.keys = split(keys) self.cuts = eval(cuts)
def __init__(self, events_file, mc_cuts, data_dict, neutrinos=True, required_metadata=None, fraction_events_to_keep=None, events_subsample_index=0, seed=123456, output_names=None, **std_kwargs, ): # instantiation args that should not change self.events_file = events_file self.mc_cuts = mc_cuts self.data_dict = data_dict self.neutrinos = neutrinos self.required_metadata = required_metadata self.fraction_events_to_keep = fraction_events_to_keep self.events_subsample_index = int(events_subsample_index) self.seed = int(seed) self.output_names = output_names # Handle list inputs self.events_file = split(self.events_file) if self.required_metadata is not None : self.required_metadata = split(self.required_metadata) # instead of adding params here, consider making them instantiation # args so nothing external will inadvertently try to change # their values expected_params = () # init base class super().__init__( expected_params=expected_params, **std_kwargs, ) # check output names if len(self.output_names) != len(set(self.output_names)): raise ValueError( 'Found duplicates in `output_names`, but each name must be' ' unique.' ) self.load_events() self.apply_cuts_to_events()
def __init__(self, params, input_binning, input_names, combine_groups, disk_cache=None, memcache_deepcopy=True, error_method=None, outputs_cache_depth=20, debug_mode=None): expected_params = ( 'nu_nc_norm', #'nutau_norm', #'nutau_cc_norm' ) #input_names = split(input_names, sep=',') self.combine_groups = eval(combine_groups) for key, val in self.combine_groups.items(): self.combine_groups[key] = split(val, sep=',') output_names = self.combine_groups.keys() super().__init__(use_transforms=True, params=params, expected_params=expected_params, input_names=input_names, output_names=output_names, error_method=error_method, disk_cache=disk_cache, memcache_deepcopy=memcache_deepcopy, outputs_cache_depth=outputs_cache_depth, output_binning=input_binning, input_binning=input_binning, debug_mode=debug_mode)
def __init__(self, params, input_binning, output_binning, input_names, disk_cache=None, error_method=None, transforms_cache_depth=20, outputs_cache_depth=20): # All of the following params (and no more) must be passed via the # `params` argument. expected_params = ( 'dom_eff', 'dom_eff_file', 'hole_ice_fwd', 'hole_ice_fwd_file', 'hole_ice', 'hole_ice_file', #'reco_cz_res', 'reco_cz_res_file', ) input_names = split(input_names, sep=',') output_names = input_names # Invoke the init method from the parent class, which does a lot of # work for you. super().__init__( use_transforms=True, params=params, expected_params=expected_params, input_names=input_names, output_names=output_names, disk_cache=disk_cache, outputs_cache_depth=outputs_cache_depth, error_method=error_method, transforms_cache_depth=transforms_cache_depth, input_binning=input_binning, output_binning=output_binning ) self.fit_results = None self.pnames = None
def parse_pipeline_config(config): """Parse pipeline config. Parameters ---------- config : string or ConfigParser Returns ------- stage_dicts : OrderedDict Keys are (stage_name, service_name) tuples and values are OrderedDicts with keys the argnames and values the arguments' values. Some known arg values are parsed out fully into Python objects, while the rest remain as strings that must be used or parsed elsewhere. """ # Note: imports placed here to avoid circular imports from pisa.core.binning import MultiDimBinning, OneDimBinning from pisa.core.param import ParamSelector if isinstance(config, basestring): config = from_file(config) elif isinstance(config, PISAConfigParser): pass else: raise TypeError( '`config` must either be a string or PISAConfigParser. Got %s ' 'instead.' % type(config)) if not config.has_section('binning'): raise NoSectionError( "Could not find 'binning'. Only found sections: %s" % config.sections()) # Create binning objects binning_dict = {} for name, value in config['binning'].items(): if name.endswith('.order'): order = split(config.get('binning', name)) binning, _ = split(name, sep='.') bins = [] for bin_name in order: try: def_raw = config.get('binning', binning + '.' + bin_name) except: dims_defined = [ split(dim, sep='.')[1] for dim in config['binning'].keys() if dim.startswith(binning + '.') and not dim.endswith('.order') ] logging.error( "Failed to find definition of '%s' dimension of '%s'" " binning entry. Only found definition(s) of: %s", bin_name, binning, dims_defined) del dims_defined raise try: kwargs = eval(def_raw) # pylint: disable=eval-used except: logging.error( "Failed to evaluate definition of '%s' dimension of" " '%s' binning entry:\n'%s'", bin_name, binning, def_raw) raise try: bins.append(OneDimBinning(bin_name, **kwargs)) except: logging.error( "Failed to instantiate new `OneDimBinning` from '%s'" " dimension of '%s' binning entry with definition:\n" "'%s'\n", bin_name, binning, kwargs) raise binning_dict[binning] = MultiDimBinning(bins) # Pipeline section section = 'pipeline' # Get and parse the order of the stages (and which services implement them) order = [split(x, STAGE_SEP) for x in split(config.get(section, 'order'))] param_selections = [] if config.has_option(section, 'param_selections'): param_selections = split(config.get(section, 'param_selections')) detector_name = None if config.has_option(section, 'detector_name'): detector_name = config.get(section, 'detector_name') # Parse [stage.<stage_name>] sections and store to stage_dicts stage_dicts = OrderedDict() for stage, service in order: old_section_header = 'stage%s%s' % (STAGE_SEP, stage) new_section_header = '%s%s%s' % (stage, STAGE_SEP, service) if config.has_section(old_section_header): logging.warning( '"%s" is an old-style section header, in the future use "%s"' % (old_section_header, new_section_header)) section = old_section_header elif config.has_section(new_section_header): section = new_section_header else: raise IOError( 'missing section in cfg for stage "%s" service "%s"' % (stage, service)) # Instantiate dict to store args to pass to this stage service_kwargs = OrderedDict() param_selector = ParamSelector(selections=param_selections) service_kwargs['params'] = param_selector n_params = 0 for fullname in config.options(section): try: value = config.get(section, fullname) except: logging.error( 'Unable to obtain value of option "%s" in section "%s".' % (fullname, section)) raise # See if this matches a param specification param_match = PARAM_RE.match(fullname) if param_match is not None: n_params += 1 param_match_dict = param_match.groupdict() param_subfields = param_match_dict['subfields'].split('.') # Figure out what the dotted fields represent... infodict = interpret_param_subfields(subfields=param_subfields) # If field is an attr, skip since these are located manually if infodict['attr'] is not None: continue # Check if this param already exists in a previous stage; if # so, make sure there are no specs for this param, but just a # link to previous the param object that is already # instantiated. for kw in stage_dicts.values(): # Stage did not get a `params` argument from config if not kw.has_key('params'): continue # Retrieve the param from the ParamSelector try: param = kw['params'].get(name=infodict['pname'], selector=infodict['selector']) except KeyError: continue # Make sure there are no other specs (in this section) for # the param defined defined in previous section for a in PARAM_ATTRS: if config.has_option(section, '%s.%s' % (fullname, a)): raise ValueError("Parameter spec. '%s' of '%s' " "found in section '%s', but " "parameter exists in previous " "stage!" % (a, fullname, section)) break # Param *not* found in a previous stage (i.e., no explicit # `break` encountered in `for` loop above); therefore must # instantiate it. else: param = parse_param(config=config, section=section, selector=infodict['selector'], fullname=fullname, pname=infodict['pname'], value=value) param_selector.update(param, selector=infodict['selector']) # If it's not a param spec but contains 'binning', assume it's a # binning spec for CAKE stages elif 'binning' in fullname: service_kwargs[fullname] = binning_dict[value] # it's gonna be a PI stage elif '_specs' in fullname: value = parse_string_literal(value) # is it None? if value is None: service_kwargs[fullname] = value # is it evts? elif value in ['evnts', 'events']: service_kwargs[fullname] = 'events' # so it gotta be a binning else: service_kwargs[fullname] = binning_dict[value] # it's a list on in/output names list elif fullname.endswith('_names'): value = split(value) service_kwargs[fullname] = value # Otherwise it's some other stage instantiation argument; identify # this by its full name and try to interpret and instantiate a # Python object using the string else: try: value = parse_quantity(value) value = value.nominal_value * value.units except ValueError: value = parse_string_literal(value) service_kwargs[fullname] = value # If no params actually specified in config, remove 'params' from the # service's keyword args if n_params == 0: service_kwargs.pop('params') # Store the service's kwargs to the stage_dicts stage_dicts[(stage, service)] = service_kwargs stage_dicts['detector_name'] = detector_name return stage_dicts
def _compute_outputs(self, inputs=None): """Compute histograms for output channels.""" logging.debug('Entering mceq._compute_outputs') primary_model = split(self.params['primary_model'].value, ',') if len(primary_model) != 2: raise ValueError('primary_model is not of length 2, instead is of ' 'length {0}'.format(len(primary_model))) primary_model[0] = eval('pm.' + primary_model[0]) density_model = (self.params['density_model'].value, (self.params['location'].value, self.params['season'].value)) mceq_run = MCEqRun( interaction_model=str(self.params['interaction_model'].value), primary_model=primary_model, theta_deg=0.0, density_model=density_model, **mceq_config.mceq_config_without(['density_model'])) # Power of energy to scale the flux (the results will be returned as E**mag * flux) mag = 0 # Obtain energy grid (fixed) of the solution for the x-axis of the plots e_grid = mceq_run.e_grid # Dictionary for results flux = OrderedDict() for nu in self.output_names: flux[nu] = [] binning = self.output_binning cz_binning = binning.dims[binning.index('coszen', use_basenames=True)] en_binning = binning.dims[binning.index('energy', use_basenames=True)] cz_centers = cz_binning.weighted_centers.m angles = (np.arccos(cz_centers) * ureg.radian).m_as('degrees') for theta in angles: mceq_run.set_theta_deg(theta) mceq_run.solve() flux['nue'].append(mceq_run.get_solution('total_nue', mag)) flux['nuebar'].append(mceq_run.get_solution('total_antinue', mag)) flux['numu'].append(mceq_run.get_solution('total_numu', mag)) flux['numubar'].append(mceq_run.get_solution( 'total_antinumu', mag)) for nu in flux.iterkeys(): flux[nu] = np.array(flux[nu]) smoothing = self.params['smoothing'].value.m en_centers = en_binning.weighted_centers.m_as('GeV') spline_flux = self.bivariate_spline(flux, cz_centers, e_grid, smooth=smoothing) ev_flux = self.bivariate_evaluate(spline_flux, cz_centers, en_centers) for nu in ev_flux: ev_flux[nu] = ev_flux[nu] * ureg('cm**-2 s**-1 sr**-1 GeV**-1') mapset = [] for nu in ev_flux.iterkeys(): mapset.append(Map(name=nu, hist=ev_flux[nu], binning=binning)) return MapSet(mapset)
def __init__(self, events_file, mc_cuts, data_dict, neutrinos=True, required_metadata=None, data=None, params=None, input_names=None, output_names=None, debug_mode=None, input_specs=None, calc_specs=None, output_specs=None, fraction_events_to_keep=None, ): # instantiation args that should not change self.events_file = events_file self.mc_cuts = mc_cuts self.data_dict = data_dict self.neutrinos = neutrinos self.required_metadata = required_metadata self.fraction_events_to_keep = fraction_events_to_keep # Handle list inputs self.events_file = split(self.events_file) if self.required_metadata is not None : self.required_metadata = split(self.required_metadata) # instead of adding params here, consider making them instantiation # args so nothing external will inadvertently try to change # their values expected_params = () # created as ones if not already present input_apply_keys = ( 'initial_weights', ) # copy of initial weights, to be modified by later stages output_apply_keys = ( 'weights', ) # init base class super().__init__( data=data, params=params, expected_params=expected_params, input_names=input_names, output_names=output_names, debug_mode=debug_mode, input_specs=input_specs, calc_specs=calc_specs, output_specs=output_specs, input_apply_keys=input_apply_keys, output_apply_keys=output_apply_keys, ) # doesn't calculate anything if self.calc_mode is not None: raise ValueError( 'There is nothing to calculate for this event loading service.' ' Hence, `calc_mode` must not be set.' ) # check output names if len(self.output_names) != len(set(self.output_names)): raise ValueError( 'Found duplicates in `output_names`, but each name must be' ' unique.' ) self.load_events() self.apply_cuts_to_events()