def get_spn_resolution(contents): norm_contents = contents.lower() if '0 to 255 per byte' in norm_contents or 'states/' in norm_contents: return 1.0 elif 'bit-mapped' in norm_contents or \ 'binary' in norm_contents or \ 'ascii' in norm_contents or \ 'not defined' in norm_contents or \ 'variant determined' in norm_contents or \ '7 bit iso latin 1 characters' in norm_contents or \ contents.strip() == '': return int(0) elif 'per bit' in norm_contents or '/bit' in norm_contents: first = contents.split(' ')[0] first = first.replace('/bit', '') first = J1939daConverter.just_numerals(first) return asteval.Interpreter()(first) elif 'bit' in norm_contents and '/' in norm_contents: left, right = contents.split('/') left = J1939daConverter.just_numerals(left.split(' ')[0]) right = J1939daConverter.just_numerals(right.split(' ')[0]) return asteval.Interpreter()('%s/%s' % (left, right)) elif 'microsiemens/mm' in norm_contents or 'usiemens/mm' in norm_contents: # special handling for this weirdness return float(contents.split(' ')[0]) raise ValueError('unknown spn resolution "%s"' % contents)
def _asteval_interpreter(symtable, *args, **kwds): try: interp = asteval.Interpreter(usersyms=symtable, **kwds) except TypeError: # Arg `usersym ` added in asteval-0.9.10 (Oct 2017) interp = asteval.Interpreter(symtable, **kwds) interp.symtable.update(symtable) return interp
def evaluate(self, context): """ Repeats the embedded block as many times as required. Args: context: The context object at the point of evaluation Yields: A line of text at a time """ # Locate the 'in' keyword parts = shlex.split(self.loop) if "in" not in parts: raise PrologueError( f"Incorrectly formed loop condition '{self.loop}'") pre_loop = (" ".join(parts[:parts.index("in")])).strip() post_loop = (" ".join(parts[parts.index("in") + 1:])).strip() # Recognise a range m_range = self.RGX_RANGE.match(post_loop) m_array = self.RGX_ARRAY.match(post_loop) iterable = None if m_range: rng_eval = context.evaluate(m_range.groups(0)[0]) if isinstance(rng_eval, tuple): iterable = range(*rng_eval) else: iterable = range(rng_eval) elif m_array: iterable = [x.strip() for x in m_array.groups(0)[0].split(",")] else: iterable = asteval.Interpreter()(context.flatten(post_loop)) # TODO: Need to support complex unpacking of loop conditions for entry in iterable: # Expose the current value of the loop variable context.set_define(pre_loop, entry, check=False) # Perform substitutions for the loop variable (and any others) for line in super().evaluate(context): yield context.substitute(line)
def __init__(self, label: str = None, root_group: ParameterGroup = None): """Initialize a :class:`ParameterGroup` instance with ``label``. Parameters ---------- label : str The label of the group. root_group : ParameterGroup The root group Raises ------ ValueError Raised if the an invalid label is given. """ if label is not None and not Parameter.valid_label(label): raise ValueError(f"'{label}' is not a valid group label.") self._label = label self._parameters: dict[str, Parameter] = {} self._root_group = root_group self._evaluator = (asteval.Interpreter( symtable=asteval.make_symbol_table( group=self)) if root_group is None else None) self.source_path = "parameters.csv" super().__init__()
def main(): args = get_args() conf = args.conf pbpl.common.setup_plot() with h5py.File(conf['Files']['Input'], 'r') as fin: d2W = fin['d2W'][:] * joule / (mrad**2 * MeV) photon_energy = fin['energy'][:] * MeV thetax = fin['thetax'][:] * mrad thetay = fin['thetay'][:] * mrad # create safe interpreter for evaluation of configuration expressions aeval = asteval.Interpreter(use_numpy=True) for x in pbpl.common.units.__all__: aeval.symtable[x] = pbpl.common.units.__dict__[x] output = PdfPages(conf['Files']['Output']) for fig_conf in conf['Figure']: fig_type = fig_conf['Type'] if fig_type == '1D': func = plot_1D elif fig_type == '2D': func = plot_2D else: raise Exception("unknown figure type '{}'".format(fig_type)) func(output, aeval, fig_conf, d2W, photon_energy, thetax, thetay) output.close()
def __init__(self, *args, **kwargs): kwargs['asteval'] = asteval.Interpreter(symtable=SmartSymTable(self)) super(ParameterSet, self).__init__(*args, **kwargs) self._prepared = False self._registered_functions = {} self.tag = None
def __init__(self, prefix, scaler=None, nchan=8, clockrate=50.0): self._nchan = nchan self.scaler = None self.clockrate = clockrate # clock rate in MHz self._mode = SCALER_MODE if scaler is not None: self.scaler = Scaler(scaler, nchan=nchan) self.mcas = [] for i in range(nchan): self.mcas.append(MCA(prefix, mca=i + 1, nrois=2)) Device.__init__(self, prefix, delim='', attrs=self.attrs, mutable=False) time.sleep(0.05) for pvname, pv in self._pvs.items(): pv.get() self.ast_interp = asteval.Interpreter() self.read_scaler_config()
def __init__(self, name, conf): g4.G4VSensitiveDetector.__init__(self, name) self.filename = conf['File'] if 'TreeFilter' in conf: c = conf['TreeFilter'] self.tree_filter = TreeFilter(*c[0:3], int(c[3])) else: self.tree_filter = None if 'Group' in conf: self.groupname = conf['Group'] else: self.groupname = None self.M = compton.build_transformation(conf['Transformation'], mm, deg) aeval = asteval.Interpreter(use_numpy=True) for q in g4.hepunit.__dict__: aeval.symtable[q] = g4.hepunit.__dict__[q] self.bin_edges = [aeval(q) for q in conf['BinEdges']] self.hist = np.zeros([len(q) - 1 for q in self.bin_edges]) self.update_interval = self.hist.size self.position = [] self.edep = [] try: os.unlink(self.filename) except OSError as e: pass
def __init__(self, get_str=None, get_reply_float=False, set_str=None, set_value_lowercase=True, set_value_map=None, extract_raw_regex=None, **kwargs): ''' Args: get_str (str): sent verbatim in the event of on_get; if None, getting of endpoint is disabled get_reply_float (bool): apply special formatting to get return set_str (str): sent as set_str.format(value) in the event of on_set; if None, setting of endpoint is disabled set_value_lowercase (bool): default option to map all string set value to .lower() **WARNING**: never set to False if using a set_value_map dict set_value_map (str||dict): inverse of calibration to map raw set value to value sent; either a dictionary or an asteval-interpretable string extract_raw_regex (str): regular expression search pattern applied to get return. Must be constructed with an extraction group keyed with the name "value_raw" (ie r'(?P<value_raw>)' ) ''' Entity.__init__(self, **kwargs) self._get_reply_float = get_reply_float self._get_str = get_str self._set_str = set_str self._set_value_map = set_value_map self._extract_raw_regex = extract_raw_regex self.evaluator = asteval.Interpreter() if set_value_map is not None and not isinstance( set_value_map, (dict, str)): raise ValueError( f"Invalid set_value_map config for {self.name}; type is {type(set_value_map)} not dict" ) self._set_value_lowercase = set_value_lowercase if isinstance(set_value_map, dict) and not set_value_lowercase: raise ValueError( f"Invalid config option for {self.name} with set_value_map and set_value_lowercase=False" )
def produce_image(self, dpi=None): f = self.figures.get(dpi, None) if f: return f path = self.labgen_instance.figures_dir + os.sep + self.figure_name + (dpi or "") pp.clf() interpreter = asteval.Interpreter({table.name: table.body for table in self.labgen_instance.tables.values()}) curves = self.metadata.get(Plot._PROP_CURVE.name, []) xlabel, ylabel = self.metadata[self._PROP_AXES.name] pp.xlabel(xlabel) pp.ylabel(ylabel) # plot data for curve in curves: x_expr, y_expr, scope = curve.get_expressions() interpreter.eval(scope) # prepare scope for curve_data_x, curve_data_y in zip(flatten_2d_np_array(interpreter.eval(x_expr)), flatten_2d_np_array(interpreter.eval(y_expr))): pp.plot(curve_data_x, curve_data_y, marker="o", linestyle=curve.get_style(), color=curve.get_color()) xrange = self.metadata[self._PROP_XRANGE.name] do_auto_x = False if xrange != Plot.AUTOSCALE: pp.xlim(xrange.start, xrange.stop) do_auto_x = True yrange = self.metadata[self._PROP_YRANGE.name] do_auto_y = False if yrange != Plot.AUTOSCALE: pp.ylim(yrange.start, yrange.stop) do_auto_y = True if do_auto_x and do_auto_y: pp.autoscale() pp.savefig(path) self.figures[dpi] = fig = Figure(path) return fig
def _eval_context(self, vars): import asteval e = asteval.Interpreter(use_numpy=False, writer=None) e.symtable.update(vars) e.symtable["__last_iteration"] = vars.get("__last_iteration", False) return e
def literal_eval(string): """Use the asteval module to interpret arbitary strings slightly safely. Args: string (str): String epxression to be evaluated. Returns: (object): Evaluation result. On the first call this will create a new asteval.Interpreter() instance and preload some key modules into the symbol table. """ global _asteval_interp # Ugly! if _asteval_interp is None: _asteval_interp = asteval.Interpreter( usersyms={ "np": np, "re": re, "NaN": NaN, "nan": NaN, "None": None, "datetime": datetime }) try: return _asteval_interp(string, show_errors=False) except (SyntaxError, ValueError, NameError, IndexError, TypeError): raise ValueError("Cannot interpret {} as valid Python".format(string))
def register_function(self, name, function): """ Register a function in the ``symtable`` of the ``asteval`` attribute """ if not hasattr(self, '_asteval'): self._asteval = asteval.Interpreter() self._asteval.symtable[name] = function self._registered_functions[name] = function
def get_spn_offset(contents): norm_contents = contents.lower() if 'manufacturer defined' in norm_contents or 'not defined' in norm_contents or contents.strip( ) == '': return int(0) else: first = contents.split(' ')[0] first = J1939daConverter.just_numerals(first) return asteval.Interpreter()(first)
def __init__(self, receiver_model): script_engine = ScriptEngine(receiver_model) def add_handler(action, callback): nonlocal script_engine script_engine.add_handler(action, callback) def set_data(key, value): nonlocal receiver_model receiver_model.set_data(key, value) def get_data(key): nonlocal receiver_model return receiver_model.get_data(key) def says(message): nonlocal receiver_model receiver_model.say(message) def tell_sender(sender_obj, action, args): nonlocal receiver_model sender_obj = receiver_model.get_by_id(sender_obj.id) receiver_model.tell_sender(sender_obj, action, args) def move_sender(sender_obj, target_room_name): nonlocal receiver_model sender_obj = receiver_model.get_by_id(sender_obj.id) receiver_model.move_sender(sender_obj, target_room_name) def teleport_sender(sender_obj, target_room_name): """Given a ProxyGameObject, find the actual gameobject and move it.""" nonlocal receiver_model sender_obj = receiver_model.get_by_id(sender_obj.id) receiver_model.teleport_sender(sender_obj, target_room_name) def ensure_obj_data(data): nonlocal receiver_model receiver_model._ensure_data(data) def witch_open(*args, **kwargs): raise NotImplementedError("No file access in WITCH") self.script_engine = script_engine self.interpreter = asteval.Interpreter( use_numpy=False, max_time=100000.0, # there's a bug with this and setting it arbitrarily high avoids it usersyms=dict( open=witch_open, split_args=split_args, add_handler=add_handler, set_data=set_data, get_data=get_data, says=says, witch_tell_sender=tell_sender, witch_move_sender=move_sender, witch_teleport_sender=teleport_sender, ensure_obj_data=ensure_obj_data))
def test_parameters_init_with_asteval(): """Test for initialization of the Parameters class with asteval.""" ast_int = asteval.Interpreter() msg = ("The use of the 'asteval' argument for the Parameters class was " "deprecated in lmfit v0.9.12 and will be removed in a later " "release. Please use the 'usersyms' argument instead!") with pytest.warns(FutureWarning, match=msg): pars = lmfit.Parameters(asteval=ast_int) assert pars._asteval == ast_int
def __init__(self, name, conf): g4.G4VSensitiveDetector.__init__(self, name) self.M = compton.build_transformation(conf['Transformation'], mm, deg) aeval = asteval.Interpreter(use_numpy=True) for q in g4.hepunit.__dict__: aeval.symtable[q] = g4.hepunit.__dict__[q] self.vol = np.array((conf['Volume'])) * mm self.threshold = conf['Threshold'] * MeV self.limit_count = conf['LimitCount'] self.num_flagged = 0 self.curr_event = -1
def asteval_convert(fcn, handle=None): if inspect.isfunction(fcn): aeval = asteval.Interpreter() if handle != None: fcn_name = handle else: fcn_name = fcn.__name__ fcn_vars = inspect.getfullargspec(fcn)[0] aeval.symtable[fcn_name] = fcn return fcn_name, fcn_vars else: raise Exception('ERROR: input function is type %s' % str(type(fcn)).split(' ')[1].rsplit('>')[0])
def main(cmdline_args): """Avoid polluting the global namespace.""" evolution = run_evolution(cmdline_args) print(repr(evolution)) evaluator = asteval.Interpreter() evaluator.symtable.update(vars(evolution)) for plot in cmdline_args.plot + cmdline_args.plot_with_tangents: plot_data = [evaluator(expression) for expression in plot] pyplot.plot(plot_data[0], plot_data[1]) if len(plot) == 4: plot_tangents(*plot_data) pyplot.show() pyplot.cla()
def apply_fn2vals(df, fns): if not fns: return df evalr = asteval.Interpreter() evalr.symtable['pdutil'] = pdutil for k, v in fns.items(): logger.info('transforming col w/ apply_fn', col=k, fn=v.replace('\n', '\\n')) if not k in df.columns: logger.info('skipping absent column', k=k) continue map_fn = evalr(v) df[k] = df[k].map(map_fn) return df
def reset_fiteval(_larch=None, **kws): """initiailze fiteval for fitting with lmfit""" fiteval = _larch.symtable._sys.fiteval = asteval.Interpreter() # remove 'print' from asteval symtable, as it is not picklable try: fiteval.symtable.pop('print') except KeyError: pass fiteval_init = getattr(_larch.symtable._sys, 'fiteval_init', None) if fiteval_init is not None: for init_item in fiteval_init: if isinstance(init_item, (tuple, list)) and len(init_item) == 2: key, val = init_item fiteval.symtable[key] = val else: fiteval(init_item)
def _add_density(self, openmc_material): """Calculates the density of the Material""" if not isinstance(self.density, float): if self.density is None and self.density_equation is not None: aeval = asteval.Interpreter(usersyms=asteval_user_symbols) # Potentially used in the eval part aeval.symtable["temperature_in_K"] = self.temperature_in_K aeval.symtable["temperature_in_C"] = self.temperature_in_C aeval.symtable["pressure_in_Pa"] = self.pressure_in_Pa density = aeval.eval(self.density_equation) if len(aeval.error) > 0: raise aeval.error[0].exc(aeval.error[0].msg) if density is None: raise ValueError("Density value of ", self.material_name, " can not be found") else: self.density = density elif (self.atoms_per_unit_cell is not None and self.volume_of_unit_cell_cm3 is not None): molar_mass = (self._get_atoms_in_crystal() * openmc_material.average_molar_mass) mass = self.atoms_per_unit_cell * molar_mass * atomic_mass_unit_in_g self.density = mass / self.volume_of_unit_cell_cm3 else: raise ValueError( "density can't be set for " + str(self.material_name) + " provide either a density_value, density_equation as a \ string, or atoms_per_unit_cell and \ volume_of_unit_cell_cm3") openmc_material.set_density(self.density_unit, self.density * self.packing_fraction) return openmc_material
def thread(): stdout = io.StringIO() stderr = io.StringIO() aeval = asteval.Interpreter(writer=stdout, err_writer=stderr, use_numpy=False) del aeval.symtable['open'] aeval.symtable['random'] = random.random aeval.symtable['choice'] = random.choice aeval.symtable['randrange'] = random.randrange r = aeval(query) for stream in (stdout, stderr): stream.seek(0) err = stream.read() if err: r = err queue.put(r)
def calculate_reflectivity(self): ''' Calculate reflectivity for each point ''' self.ZZ_sc1 = initialize_nans([1, len(self.angles), 360, 240]) self.degr = numpy.arange(0, 360, 1) self.r = numpy.arange(0, 240, 1) * 1000 # convert from KM to M dx = (numpy.meshgrid(self.r, numpy.sin(numpy.deg2rad(self.degr)))[0] * numpy.meshgrid(self.r, numpy.sin(numpy.deg2rad(self.degr)))[1]) self.lon = ( self.LON_bilt + (180 / numpy.pi) * (dx / self.r_earth) / numpy.cos(self.LAT_bilt * numpy.pi / 180)) dy = (numpy.meshgrid(self.r, numpy.cos(numpy.deg2rad(self.degr)))[0] * numpy.meshgrid(self.r, numpy.cos(numpy.deg2rad(self.degr)))[1]) self.lat = self.LAT_bilt + (180 / numpy.pi) * (dy / self.r_earth) meshgr = (numpy.meshgrid(numpy.ones(len(self.degr)), numpy.sin(numpy.deg2rad(self.angles)), self.r)) self.z = self.height_bilt + (meshgr[0] * meshgr[1] * meshgr[2]) try: h5file = h5py.File(self.filename, 'r') except Exception: # TODO: add meaningfull exception handling import pdb pdb.set_trace() for x in range(len(self.scans)): scan1 = h5file.get(self.scans[x]) PV = numpy.array(scan1.get('scan_Z_data')) cal_sc1 = scan1.get('calibration') str = numpy.array_str(cal_sc1.attrs.get( 'calibration_Z_formulas')).strip("['']").split('=') # evaluate expression in a "safe" manner Z_sc1 = asteval.Interpreter(symtable={"PV": PV}).eval(str[1]) # create random mask with approx self.pdry% of data points if self.pdry: if self.pdry <= 100: max_int = numpy.int(numpy.round((1 / (self.pdry / 100.)))) else: max_int = 1 mask = numpy.random.randint( 0, max_int, size=Z_sc1.shape).astype(numpy.bool) # Set only points in mask that are <0 to NaN (corresponds to dry cases) Z_sc1[(mask) & (Z_sc1 < 0)] = -999 for i in range(0, len(self.r)): for j in self.degr: self.ZZ_sc1[0, x, j, i] = Z_sc1[j, i]
def __init__(self, label: str = None, root_group: ParameterGroup = None): """Represents are group of parameters. Can contain other groups, creating a tree-like hierarchy. Parameters ---------- label : The label of the group. """ if label is not None and not Parameter.valid_label(label): raise ValueError(f"'{label}' is not a valid group label.") self._label = label self._parameters = {} self._root_group = root_group self._evaluator = (asteval.Interpreter( symtable=asteval.make_symbol_table( group=self)) if root_group is None else None) super().__init__()
def process_sas_survey(svy_cfg, facets, client=None, lgr=logger): g = svy_cfg prefix = g.s3_url_prefix lgr.bind(p=prefix) evalr = asteval.Interpreter() evalr.symtable['pd.util'] = pd.util fn = g.rename_cols map_fn = evalr(fn) df_munger = curry(sdf.munge_df)(facets=facets, qids=g.qids, na_syns=g.na_synonyms, col_fn=map_fn, fmts=g.patch_format, fpc=g.fpc, lgr=lgr) lbl_loader = curry(load_variable_labels)(repl=g.replace_labels) xpt_loader = curry(load_sas_xport_df)(lgr=lgr) dfs = map( lambda r: pipe(prefix+r.xpt, delayed(xpt_loader), delayed(df_munger(r=r, lbls=lbl_loader(prefix+r.format, prefix+r.formas)))), [r for idx, r in g.meta.iterrows()]) lgr.info('merging SAS dfs') dfs = delayed(pd.concat)(dfs, ignore_index=True) scols = delayed( lambda xf: list(xf.columns .intersection(set(g.qids) .union(facets))))(dfs) lgr.info('re-filtering question and facet columns to cast to category dtype', cols=scols) dfz = (dfs .apply(lambda x: x.astype('category')) .reset_index(drop=True) .assign(year=dfs['year'].astype(int), sitecode=dfs['sitecode'].astype('category'), weight=dfs['weight'].astype(float), strata=dfs['strata'].astype(int, errors='ignore'), psu=dfs['psu'].astype(int, errors='ignore')) .reset_index(drop=True)) if g.fpc: dfz = (dfz.assign(fpc=dfs['fpc'].astype(int, errors='ignore'), sample_ct=dfs['sample_ct'].astype(int, errors='ignore')) .reset_index(drop=True)) dfz.visualize() lgr.info('merged SAS dfs') lgr.unbind('p') return dfz
def recalculate_column(self, col_name, expression=None): """Recalculate column values. Calculate column values based on its expression. Each column can use values from columns to the left of itself. Those values can be accessed by using the column name as a variable in the expression. Args: col_name: a string containing the column name. expression: an optional string that contains the mathematical expression. If None (the default) the expression is taken from the column information. Returns: True if the calculation was successful, False otherwise. """ if expression is None: expression = self._calculated_column_expression[col_name] objects = self._get_accessible_columns(col_name) aeval = asteval.Interpreter(usersyms=objects) output = aeval(expression) if aeval.error: self._is_calculated_column_valid[col_name] = False self.emit_column_changed(col_name) for err in aeval.error: exc, msg = err.get_error() self.main_app.ui.statusbar.showMessage(f"ERROR: {exc}: {msg}.", timeout=MSG_TIMEOUT) elif output is not None: self._is_calculated_column_valid[col_name] = True if isinstance(output, pd.Series): output = output.astype("float64") else: output = float(output) self._data[col_name] = output self.emit_column_changed(col_name) self.main_app.ui.statusbar.showMessage( f"Recalculated column values.", timeout=MSG_TIMEOUT) return True else: print( f"No evaluation error but no output for expression {expression}." ) return False
def wrapper(self, *args, **kwargs): very_raw = fun(self) if isinstance(very_raw, list): very_raw = very_raw[0] val_dict = {'value_raw': very_raw} logger.debug('attempting to calibrate') if val_dict['value_raw'] is None: return None if self._calibration is None: pass elif isinstance(self._calibration, str): evaluator = asteval.Interpreter(usersyms=cal_functions) if isinstance(val_dict['value_raw'], float): eval_str = self._calibration.format(val_dict['value_raw']) elif isinstance(val_dict['value_raw'], six.string_types): eval_str = self._calibration.format( val_dict['value_raw'].strip()) else: eval_str = self._calibration.format(val_dict['value_raw']) logger.debug("formatted cal is:\n{}".format(eval_str)) try: cal = evaluator(eval_str) except OverflowError: logger.debug('GOT AN OVERFLOW ERROR') cal = None except Exception as e: raise exceptions.DriplineValueError(repr(e), result=val_dict) if cal is not None: val_dict['value_cal'] = cal elif isinstance(self._calibration, dict): logger.debug('calibration is dictionary, looking up value') if val_dict['value_raw'] in self._calibration: val_dict['value_cal'] = self._calibration[ val_dict['value_raw']] else: raise exceptions.DriplineValueError( 'raw value <{}> not in cal dict'.format( repr(val_dict['value_raw'])), result=val_dict) else: logger.warning('the _calibration property is of unknown type') return val_dict
async def filter_asteval(self, params, filter_expr): """Filter parameters using an asteval expression.""" # Load asteval lazily for fast startup import asteval aeval = asteval.Interpreter(no_print=True) new_params = [] for param in params: # Note that by exposing param without copying, # the user-supplied filter_expr may modify the content of param. # However, we allow it because the modification is only visible via get_param(), # which is used by dump commands and no other commands such as add or enqueue. aeval.symtable['param'] = aeval.symtable['p'] = param result = aeval.eval(filter_expr, show_errors=False) if result: new_params.append(param) return new_params
def main(): args = get_args() conf = args.conf data = scan_hdf(conf['input'], args.input_filename) num_steps = len(data['t']) setup_plot() # create safe interpreter for evaluation of scale expressions aeval = asteval.Interpreter() import pbpl.units for x in pbpl.units.__all__: aeval.symtable[x] = pbpl.units.__dict__[x] output = PdfPages(args.output) for i in range(num_steps): plot_frame(output, data, i, conf['xaxis'], conf['yaxis'], aeval) output.close()