def process_variables(config, manager): """Render all string elements of the config against defined variables.""" env_params = { 'block_start_string': '^^disabled^^', 'block_end_string': '^^disabled^^', 'variable_start_string': '{?', 'variable_end_string': '?}' } if 'variables' not in config or config.get('variables') is False: return env = NativeEnvironment(**env_params) if isinstance(config['variables'], bool): log.debug('trying to load variables from DB') variables = variables_from_db() elif isinstance(config['variables'], dict): log.debug('loading variables from config') variables = config['variables'] else: log.debug('trying to load variables from file') variables = variables_from_file(manager.config_base, config['variables']) log.debug('updating DB with variable file contents') variables_to_db(variables) env.globals = variables _process(config, env) return config
def cart_render_portal(): product = request.args.get('product') e = NativeEnvironment() t = e.from_string(product) product = t.render() # product is product added to cart by the customer username = request.args.get('username') return render_template('cart.html', product=product, username=username, error = False)
def _render_query(query, parameters): """ Render both jinja or %()s templates in query while keeping type of parameters """ if isinstance(query, dict): return {key: _render_query(value, parameters) for key, value in deepcopy(query).items()} elif isinstance(query, list): return [_render_query(elt, parameters) for elt in deepcopy(query)] elif isinstance(query, str): if not _has_parameters(query): return query # Replace param templating with jinja templating: query = re.sub(RE_PARAM, r'{{ \g<1> }}', query) # Add quotes to string parameters to keep type if not complex clean_p = deepcopy(parameters) if re.match(RE_JINJA_ALONE, query): clean_p = _prepare_parameters(clean_p) env = NativeEnvironment() res = env.from_string(query).render(clean_p) # NativeEnvironment's render() isn't recursive, so we need to # apply recursively the literal_eval by hand for lists and dicts: if isinstance(res, (list, dict)): return _prepare_result(res) return res else: return query
def purchase(): product = request.args.get('product') e = NativeEnvironment() t = e.from_string(product) product = t.render() # converting the string to native python type (dictionary) username = request.args.get('username') return render_template('purchase.html', product=product, error = False, username=username)
def __init__(self, base_uri): self.ctx = { 'env': os.environ, 'load_spec': lambda uri, *args, **kwargs: load_spec(uri, base_uri, *args, ** kwargs) } self.env = NativeEnvironment() self.env.filters['eval'] = eval
def jinja(template, _env=None, **data): """Recursively renders a python dict, list or str, evaluating strings along the way""" kwargs = deepcopy(config.variables) kwargs.update(data) if _env is None: _env = NativeEnvironment(loader=BaseLoader, undefined=DebugUndefined) return _env.from_string(str(template)).render(**kwargs, json=json)
def create_config_file(self): try: cfg_template = NativeEnvironment().from_string( self.tentacle_creator.get_config_templates()[self.subtype]) if not os.path.isfile(self.get_config_path()): with open(self.get_config_path(), "w") as config_file: config_file.write(cfg_template.render()[1:]) else: raise Exception("A config with this name already exists") except Exception: pass
def purchase_tentative(): username = request.args.get('username') product = request.args.get('product') qty = request.form.get('quantity') if qty == "": qty = 1 if qty == '0': return render_template('purchase.html', username = username, product = product, error = True) e = NativeEnvironment() t = e.from_string(product) product = t.render() product["total_cost"] = int(product["cost"])*int(qty) product["quantity"] = qty return render_template('purchase_confirmation.html', product=product, username=username)
def add_to_cart(): # getting the quantity of items required and calculating the total cost username = request.args.get('username') product = request.args.get('product') qty = request.form.get('quantity') if qty == "": qty = 1 if qty == '0': return render_template('cart.html', product=product, username=username, error = True) e = NativeEnvironment() t = e.from_string(product) product = t.render() product["customer_name"] = username product["total_cost"] = int(product["cost"])*int(qty) product["quantity"] = qty add_this_product_to_cart(product) return redirect(url_for('recent_products_customer', is_message=True, message="cart", username=username))
def __init__(self, base_uri): self.ctx = { 'env': os.environ, 'load_spec': lambda uri, *args, **kwargs: load_spec(uri, base_uri, *args, **kwargs) } self.env = NativeEnvironment() self.env.filters['eval'] = eval
class TemplateExpander(object): """Object to expand templates found as strings in structured data """ def __init__(self, base_uri): self.ctx = { 'env': os.environ, 'load_spec': lambda uri, *args, **kwargs: load_spec(uri, base_uri, *args, **kwargs) } self.env = NativeEnvironment() self.env.filters['eval'] = eval def __call__(self, node, seen=None): if seen is None: seen = {} if id(node) in seen: return node elif isinstance(node, list): return [self(n, seen) for n in node] elif isinstance(node, dict): return { self(k, seen): self(v, seen) for k, v in node.items() } elif isinstance(node, str): if re_template_metacharacters.search(node): t = self.env.from_string(node) return t.render(self.ctx) else: return node return node
def _load_collections(self, attr, ds): # We are always a mixin with Base, so we can validate this untemplated # field early on to guarantee we are dealing with a list. ds = self.get_validated_value('collections', self._collections, ds, None) # this will only be called if someone specified a value; call the shared value _ensure_default_collection(collection_list=ds) if not ds: # don't return an empty collection list, just return None return None # This duplicates static attr checking logic from post_validate() # because if the user attempts to template a collection name, it may # error before it ever gets to the post_validate() warning (e.g. trying # to import a role from the collection). env = NativeEnvironment() for collection_name in ds: if is_template(collection_name, env): display.warning( '"collections" is not templatable, but we found: %s, ' 'it will not be templated and will be used "as is".' % (collection_name)) return ds
class TemplateExpander(object): """Object to expand templates found as strings in structured data """ def __init__(self, base_uri): self.ctx = { 'env': os.environ, 'load_spec': lambda uri, *args, **kwargs: load_spec(uri, base_uri, *args, ** kwargs) } self.env = NativeEnvironment() self.env.filters['eval'] = eval def __call__(self, node, seen=None): if seen is None: seen = {} if id(node) in seen: return node elif isinstance(node, list): return [self(n, seen) for n in node] elif isinstance(node, dict): return {self(k, seen): self(v, seen) for k, v in node.items()} elif isinstance(node, str): if re_template_metacharacters.search(node): t = self.env.from_string(node) return t.render(self.ctx) else: return node return node
def create_file(self): try: template = NativeEnvironment().from_string(self.tentacle_creator.get_templates()[self.subtype]) if not os.path.isfile(self.get_path()): with open(self.get_path(), "w") as tentacle_file: tentacle_file.write(self.tentacle_creator.get_templates()["Description"]) tentacle_file.write(template.render(name=self.name, big_name=self.name.title(), t_type=self.t_type, subtype=self.subtype, version=self.version, requirements=self.requirements, tests=self.tests)) # TODO add __init__.py management else: raise Exception("A tentacle with this name already exists") except Exception as e: raise e
def purchase_confirmation(): username = request.args.get('username') product = request.args.get('product') product_cnt = request.args.get('product_cnt') cart_cost = request.args.get('cart_cost') e = NativeEnvironment() t = e.from_string(product) product = t.render() product["customer_name"] = username product["no_of_items_in_cart"] = int(product_cnt) if product["no_of_items_in_cart"] > 1: # this condition is used since items checked out from cart are stored as single nested dictionary product["cart_cost"] = cart_cost product["mode"] = request.args.get('mode') if product["mode"] == 'cart': product["cart_cost"] = request.args.get('cart_cost') generatebill(product, username) # generate bill return redirect(url_for('recent_products_customer', is_message=True, message="buy", username=username))
def __init__(self): self.templates = {} T_PATH = Config.TEMPLATE_PATH env = NativeEnvironment(loader=FileSystemLoader(T_PATH)) templates = env.list_templates('.jinja') for template_name in templates: try: template_source = env.loader.get_source(env, template_name)[0] parsed_content = env.parse(template_source) vars = meta.find_undeclared_variables(parsed_content) logger.info('Template: {}'.format(template_name)) metainfo = MetaInformation(pathlib.Path(T_PATH / template_name)) variables = [] for var in vars: logger.info('.. Variable: {}'.format(var)) if metainfo: readablename, datatype, description, source = metainfo.getVarInfo( var) variables.append( dict(variable=var, readablename=readablename, datatype=datatype, description=description, source=source)) tid, tname, description = metainfo.getInfo() if tname is None: tname = template_name if description is None: description = 'No metainfo-file for this template, create one!' self.templates[tid] = dict(id=tid, templatename=tname, description=description, vars=variables) except TemplateNotValidError as error: logger.error( 'Can not load template from filesystem: {}'.format( template_name))
def temp(swagger): app = Flask(__name__) func_template = """ {{ endpoint }} def func_{{ func_postfix }}(): {{ func_body }} """ env = NativeEnvironment() i: int = 0 for method in swagger.methods: i = i + 1 func_body = generate_function_body(method) endpoint = f'@app.route("{method.path}, methods=[\'{method.method_name.upper()}\']")' result = env.from_string(func_template).render(endpoint=endpoint, func_postfix=i, func_body=func_body) #print(result) exec(result) return app
def __init__(self, rules: List[ExtendedRule], tokenizer_fn=tokenize): self.rules = rules self.intents = [] self.resolvers = [ StemmerResolver(Stemmer('russian')) ] self.j2_env = NativeEnvironment() self.tokenizer_fn = tokenizer_fn for r in rules: if r.production.startswith('intent'): self.intents.append(r.production) self.intents = tuple(set(self.intents)) self._parsing_tables = {}
def main(): logging.basicConfig(level=logging.DEBUG) player_name = 'Shide' hero = '{hero}' with open('opendota_matches_shide.json', encoding='utf-8') as f: MATCHES = json.load(f) grammar = { **yaml_dict_from_file('../resources/dota/phrase_rules.yaml'), **yaml_dict_from_file('../resources/dota/heroes.yaml') } j2env = NativeEnvironment() rules = rules_from_grammar_dict(grammar, j2env) nlu = TokemaNlu(rules) client = GoogleSpreadsheetsClient.from_file('../.gsc_service_account.json') phrases_table = client.get_table_by_title('ggbot_dota', worksheet='phrases') phrase_rules = parse_rules(phrases_table, nlu) pgen = PhraseGenerator(phrase_rules) variables = get_dota_variables() for match in MATCHES: print('\n\n\n') pprint.pprint(match) print('\n') p = pgen.generate_phrase(match, player_name, hero) for v in variables: if v.name in match: value = match[v.name] #linguistic_value = v.fuzzify_max(value) res = v.fuzzify_all(value) print(f'{v.name}: {value}') print(res) print() break
def load_config_files(*files): """Load and merge YAML config files. Files that come earlier in the list take precedence over files that come later in the list. Args: *files (list) : Variable number of file paths. Example:: load_config_files(file1, file2, file3, ...): """ files = [file for file in files if file is not None and file != ''] for file in files: sys.path.insert(0, os.path.dirname(file)) LOG.debug('Loading config files: {}'.format(files)) # hiyapyco merges in order least important to most important files.reverse() expanded_files = process_templates(files) hiyapyco.jinja2env = NativeEnvironment(variable_start_string='(', variable_end_string=')', undefined=LogUndefined) cfg_dict = hiyapyco.load(expanded_files, method=hiyapyco.METHOD_MERGE, interpolate=True, failonmissingfiles=True) if LOG.getEffectiveLevel() == logLevelFromName("DEBUG"): LOG.debug("Merged YAML config:\n\n%s\n", hiyapyco.dump(cfg_dict, default_flow_style=False)) return cfg_dict
def _test(): from tokema.utils import benchmark logging.basicConfig(level=logging.INFO) grammar_files = [ '../common/common.yaml', '../common/datetime.yaml', '../common/intents.yaml', '../common/money.yaml', '../common/numbers.yaml', '../dota/grammar.yaml', '../dota/heroes.yaml', '../search/genres.yaml', '../search/grammar.yaml', ] jinja_env = NativeEnvironment() rules = [] for filename in grammar_files: rules.extend(load_rules_from_yaml(filename, jinja_env)) nlu = TokemaNlu(rules) test_phrases = [ #'посоветуй плз кого лучше взять против инво в мид', #'тактические настольные инди картонки типа доты', #'игры типа доты', "найди игры на 5 к человек в жанре тактические настольные инди картонки типа доты" ] for phrase in test_phrases: #match = nlu.match_intent_one_of(phrase, ['intent-search-coop']) with benchmark(phrase): match = nlu.match_any_intent(phrase)
def env(): return NativeEnvironment()
def native_eval(cls, msg, resource): msg = "{{ %s }}" % msg j2 = NativeEnvironment(loader=BaseLoader, undefined=StrictUndefined).from_string(msg) context = cls._get_context(resource) return j2.render(context)
def async_native_env(): return NativeEnvironment(enable_async=True)
from ggbot.text.distance import damerau_levenshtein_distance from ggbot.text.base import NluBase, IntentMatchResultBase from ggbot.text.tokenization import * from ggbot.utils import load_yamls from ggbot.text.slot_matching import * __all__ = [ 'Matcher', 'TGrammar', 'GrammarBasedNlu', 'GrammarBasedNluMatchResult', 'grammar_from_dict', ] STEMMER = Stemmer('russian') JINJA_ENV = NativeEnvironment() def to_string(tokens: Iterable[Token]) -> str: return ' '.join(t.raw for t in tokens) @dataclass class Rule: tokens: tuple[Token] meta: dict = field(default_factory=dict) _targets: set[str] = None def __post_init__(self): for k, v in self.meta.items():
FRECKLES_RUN_LOG_FILE_LOCK = os.path.join(tempfile.gettempdir(), "_freckles_run_log_lock") FRECKLES_LAST_RUN_FILE_PATH = os.path.join(FRECKLES_SHARE_DIR, FRECKLES_LAST_RUN_FILE_NAME) # templates if not hasattr(sys, "frozen"): freckles_src_template_dir = os.path.join(os.path.dirname(__file__), "templates", "src") else: freckles_src_template_dir = os.path.join(sys._MEIPASS, "freckles", "templates", "src") # --------------------------------------------------------------- # jinja-related defaults DEFAULT_FRECKLES_JINJA_ENV = NativeEnvironment( **JINJA_DELIMITER_PROFILES["freckles"]) for filter_name, filter_details in jinja2_filters.ALL_FRUTIL_FILTERS.items(): DEFAULT_FRECKLES_JINJA_ENV.filters[filter_name] = filter_details["func"] DEFAULT_RUN_CONFIG_JINJA_ENV = NativeEnvironment( **JINJA_DELIMITER_PROFILES["default"]) DEFAULT_FRECKLES_SSH_SESSION_SOCK = os.path.join(FRECKLES_SHARE_DIR, "ssh", "freckles_ssh_sock") DATACLASS_CERBERUS_TYPE_MAP = { "string": "str", "float": "float", "integer": "int", "boolean": "bool", "dict": "Dict",
def render_template(template_str, data): env = NativeEnvironment() t = env.from_string(template_str) return t.render(data)
def get_config_value_and_origin(self, config, cfile=None, plugin_type=None, plugin_name=None, keys=None, variables=None, direct=None): ''' Given a config key figure out the actual value and report on the origin of the settings ''' if cfile is None: # use default config cfile = self._config_file if config == 'CONFIG_FILE': return cfile, '' # Note: sources that are lists listed in low to high precedence (last one wins) value = None origin = None defs = self.get_configuration_definitions(plugin_type, plugin_name) if config in defs: aliases = defs[config].get('aliases', []) # direct setting via plugin arguments, can set to None so we bypass rest of processing/defaults if direct: if config in direct: value = direct[config] origin = 'Direct' else: direct_aliases = [ direct[alias] for alias in aliases if alias in direct ] if direct_aliases: value = direct_aliases[0] origin = 'Direct' if value is None and variables and defs[config].get('vars'): # Use 'variable overrides' if present, highest precedence, but only present when querying running play value, origin = self._loop_entries(variables, defs[config]['vars']) origin = 'var: %s' % origin # use playbook keywords if you have em if value is None and defs[config].get('keyword') and keys: value, origin = self._loop_entries(keys, defs[config]['keyword']) origin = 'keyword: %s' % origin # automap to keywords # TODO: deprecate these in favor of explicit keyword above if value is None and keys: if config in keys: value = keys[config] keyword = config elif aliases: for alias in aliases: if alias in keys: value = keys[alias] keyword = alias break if value is not None: origin = 'keyword: %s' % keyword if value is None and 'cli' in defs[config]: # avoid circular import .. until valid from ansible import context value, origin = self._loop_entries(context.CLIARGS, defs[config]['cli']) origin = 'cli: %s' % origin # env vars are next precedence if value is None and defs[config].get('env'): value, origin = self._loop_entries(py3compat.environ, defs[config]['env']) origin = 'env: %s' % origin # try config file entries next, if we have one if self._parsers.get(cfile, None) is None: self._parse_config_file(cfile) if value is None and cfile is not None: ftype = get_config_type(cfile) if ftype and defs[config].get(ftype): if ftype == 'ini': # load from ini config try: # FIXME: generalize _loop_entries to allow for files also, most of this code is dupe for ini_entry in defs[config]['ini']: temp_value = get_ini_config_value( self._parsers[cfile], ini_entry) if temp_value is not None: value = temp_value origin = cfile if 'deprecated' in ini_entry: self.DEPRECATED.append( ('[%s]%s' % (ini_entry['section'], ini_entry['key']), ini_entry['deprecated'])) except Exception as e: sys.stderr.write( "Error while loading ini config %s: %s" % (cfile, to_native(e))) elif ftype == 'yaml': # FIXME: implement, also , break down key from defs (. notation???) origin = cfile # set default if we got here w/o a value if value is None: if defs[config].get('required', False): if not plugin_type or config not in INTERNAL_DEFS.get( plugin_type, {}): raise AnsibleError( "No setting was provided for required configuration %s" % to_native( _get_entry(plugin_type, plugin_name, config))) else: origin = 'default' value = defs[config].get('default') if isinstance(value, string_types) and ( value.startswith('{{') and value.endswith('}}')) and variables is not None: # template default values if possible # NOTE: cannot use is_template due to circular dep try: t = NativeEnvironment().from_string(value) value = t.render(variables) except Exception: pass # not templatable # ensure correct type, can raise exceptions on mismatched types try: value = ensure_type(value, defs[config].get('type'), origin=origin) except ValueError as e: if origin.startswith('env:') and value == '': # this is empty env var for non string so we can set to default origin = 'default' value = ensure_type(defs[config].get('default'), defs[config].get('type'), origin=origin) else: raise AnsibleOptionsError( 'Invalid type for configuration option %s (from %s): %s' % (to_native( _get_entry(plugin_type, plugin_name, config)).strip(), origin, to_native(e))) # deal with restricted values if value is not None and 'choices' in defs[config] and defs[ config]['choices'] is not None: invalid_choices = True # assume the worst! if defs[config].get('type') == 'list': # for a list type, compare all values in type are allowed invalid_choices = not all(choice in defs[config]['choices'] for choice in value) else: # these should be only the simple data types (string, int, bool, float, etc) .. ignore dicts for now invalid_choices = value not in defs[config]['choices'] if invalid_choices: if isinstance(defs[config]['choices'], Mapping): valid = ', '.join([ to_text(k) for k in defs[config]['choices'].keys() ]) elif isinstance(defs[config]['choices'], string_types): valid = defs[config]['choices'] elif isinstance(defs[config]['choices'], Sequence): valid = ', '.join( [to_text(c) for c in defs[config]['choices']]) else: valid = defs[config]['choices'] raise AnsibleOptionsError( 'Invalid value "%s" for configuration option "%s", valid values are: %s' % (value, to_native(_get_entry(plugin_type, plugin_name, config)), valid)) # deal with deprecation of the setting if 'deprecated' in defs[config] and origin != 'default': self.DEPRECATED.append( (config, defs[config].get('deprecated'))) else: raise AnsibleError( 'Requested entry (%s) was not defined in configuration.' % to_native(_get_entry(plugin_type, plugin_name, config))) return value, origin
async def main(config_filename: str = 'app.toml'): # Loading config _logger.info(f'Loading config from {config_filename}') with open(config_filename, 'r', encoding='utf-8') as f: config = toml.load(f) # Setting log level log_level_name = require_item_from_dict_or_env(config, 'logging.level') log_level = getattr(logging, log_level_name, logging.INFO) logging.basicConfig(level=log_level) # Loading grammar files grammar_data = {} for filename in config['resources']['grammar_files']: _logger.info(f'Loading grammar from {filename}') grammar_part = yaml_dict_from_file(filename).get_data() grammar_data.update(grammar_part) rules_j2_env = NativeEnvironment() rules = rules_from_grammar_dict(grammar_data, rules_j2_env) nlu = TokemaNlu(rules) """ Bot initialization and startup """ import time import datetime template_env = NativeEnvironment() template_env.globals['time'] = time.time template_env.globals['now'] = datetime.datetime.now context = BotContext(template_env=template_env) """ IGDB """ from ggbot.igdb import IgdbClient igdb = await IgdbClient.create( secret=require_item_from_dict_or_env(config, 'igdb.secret'), client_id=require_item_from_dict_or_env(config, 'igdb.client_id')) """ Dota """ from ggbot.dota import Dota from ggbot.dota.phrases import PhraseGenerator, parse_rules from ggbot.spreadsheet import GoogleSpreadsheetsClient gsc = GoogleSpreadsheetsClient.from_file( filename=require_item_from_dict_or_env( config, 'dota.gsc_service_account_file')) phrases_table = gsc.get_table_by_title('ggbot_dota', worksheet='phrases') phrase_parsing_grammar = {} for filename in config['dota']['phrase_parsing_grammar_files']: data = yaml_dict_from_file(filename) phrase_parsing_grammar.update(data) rules = rules_from_grammar_dict(phrase_parsing_grammar, template_env) phrases_nlu = TokemaNlu(rules) phrase_rules = parse_rules(phrases_table, phrases_nlu) dota = Dota(opendota_api_key=require_item_from_dict_or_env( config, 'opendota.api_key'), phrase_generator=PhraseGenerator(phrase_rules)) """ Memory """ from ggbot.memory import Memory, PickleDbStorage db_filename = require_item_from_dict_or_env(config, 'memory.db_file') memory = Memory(storage=PickleDbStorage(filename=db_filename)) components = [ dota, memory #igdb, ] for component in components: await component.init(context) # Scenarios / handlers from ggbot.scenarios import HANDLERS as COMMON_HANDLERS from ggbot.dota.scenarios import create_dota_scenario_handlers from ggbot.opendota import OpenDotaApi api = OpenDotaApi(dota.api_key) dota_handlers = create_dota_scenario_handlers(memory, dota, api) handlers = {**COMMON_HANDLERS, **dota_handlers} conversation_manager = ConversationManager(nlu=nlu, intent_handlers=handlers, context=context) client = Client(conversation_manager) context.client = client discord_token = require_item_from_dict_or_env(config, 'discord.token') try: await client.start(discord_token) finally: if not client.is_closed(): await client.close()