def __init__(self, paths, indexes, subffixes=['.tmpl'], inherit=False): BaseLoader.__init__(self) TmplUrl2File.__init__(self, paths, indexes=indexes, subffixes=subffixes, inherit=inherit)
def gen_dashboard(entry_link, entry_description, cve_s, entry_title): Entry_Title = entry_title.replace("'", "") Entry_ShortDesc = "For more information, please see the full page at " + entry_link Entry_Summary = entry_description.replace("'", "").replace("\\", "/") cve_list = cve_s dashboard_template_file = open( '/templates/sc_working_dashboard_template.txt', "r") dashboard_template_contents = dashboard_template_file.read() for x in range( len( re.findall("<definition>(.+)</definition>", str(dashboard_template_contents)))): r_dashboard_component = Environment(loader=BaseLoader()).from_string( dashboard_components_list[x]) component_render = r_dashboard_component.render( Entry_Title=Entry_Title, Entry_ShortDesc=Entry_ShortDesc, Entry_Summary=Entry_Summary, cve_list=cve_list) component_raw = ast.literal_eval(component_render) component_output = base64.b64encode(serialize(component_raw)) dashboard_template_contents = str(dashboard_template_contents).replace( '{{ dashboard_output }}', component_output.decode("utf8"), 1) #print(dashboard_template_contents) #dashboard_template_contents.replace('re.findall("<definition>(.+)</definition>", str(dashboard_template_contents)[x])',dashboard_components_list[x]) #print(dashboard_template_contents) r_dashboard_full = Environment( loader=BaseLoader()).from_string(dashboard_template_contents) dashboard_full = r_dashboard_full.render(Entry_Title=Entry_Title, Entry_ShortDesc=Entry_ShortDesc, Entry_Summary=Entry_Summary, cve_list=cve_list, Feed=feed) # Write the output to a file that we'll then upload to tsc. dashboard_name = Entry_Title.replace(" ", "").replace( ":", "-")[:15] + "_dashboard.xml" generated_tsc_dashboard_file = open(dashboard_name, "w") generated_tsc_dashboard_file.write(dashboard_full) generated_tsc_dashboard_file.close() # Upload the dashboard to T.sc generated_tsc_dashboard_file = open(dashboard_name, "r") tsc_file = sc.files.upload(generated_tsc_dashboard_file) dashboard_data = {"name": "", "order": "1", "filename": str(tsc_file)} dashboard_post = sc.post('dashboard/import', json=dashboard_data).text dashboard_post = json.loads(dashboard_post) dashboard_id = dashboard_post['response']['id'] generated_tsc_dashboard_file.close() return dashboard_id
def simulate_techniques(self, simulation_techniques, clean_up, var_str='no'): path = self.config['atomic_red_team_path'] new_commands = [] objects = self.find_attack_yaml(path, simulation_techniques) if simulation_techniques and clean_up == 'no': for object in objects: data = dict() for atomic_tests in object['atomic_tests']: if 'iaas:aws' in (atomic_tests['supported_platforms']): new_command = self.replace_simulation_vars( atomic_tests, clean_up) print( "Execute - AWS technique {0}:\n {1}".format( object['attack_technique'], new_command)) rtemplate = Environment( loader=BaseLoader()).from_string(new_command) function_call = rtemplate.render(**data) stream = os.popen(function_call) output = stream.read() print(output) if simulation_techniques and clean_up == 'yes': for object in objects: data = dict() for atomic_tests in object['atomic_tests']: if 'iaas:aws' in (atomic_tests['supported_platforms']): new_command = self.replace_simulation_vars( atomic_tests, clean_up) print( "Clean up - AWS technique {0}:\n {1}".format( object['attack_technique'], new_command)) rtemplate = Environment( loader=BaseLoader()).from_string(new_command) function_call = rtemplate.render(**data) stream = os.popen(function_call) output = stream.read()
def template_engine(): jinja2_environment = Environment(loader=BaseLoader()) # add custom filters which ansible adds to jinja, # but are not present in plain jinja2 jinja2_environment.filters["ipaddr"] = ipaddr return jinja2_environment
def test_inject_advertisements(self): """Test injection in a real piece of valid AMP html""" jinja_env = Environment(loader=BaseLoader()) test_template_text = "<div class='advertisment'>{{ item.text }}</div>" template = jinja_env.from_string(test_template_text) settings = AdsSettings(template=template) amp_html = BeautifulSoup(AMP_HTML_TEST, "lxml") theme = self.themeservice.find_one(req=None, name="amp") inject_advertisements(amp_html, settings, TEST_ADS, theme) # for some reason after injection, queries on BeautifulSoup object # does not work properly, so we need to run BS4 over the result again amp_with_ads = str(amp_html) amp_soup = BeautifulSoup(amp_with_ads, "lxml") articles = amp_soup.find_all(lambda x: x.name == settings.article_tag) # base on default frequency (2) we're going to assert "advertisment" # on article 1, 3 and 5 self.assertTrue("advertisment" in str(articles[0])) self.assertTrue("advertisment" in str(articles[2])) self.assertTrue("advertisment" in str(articles[4]))
def forgot_code(): if request.method == 'GET': return render_template('forgot.html') try: ra = str(int(request.form['ra'])) except: return 'O RA deve conter apenas dígitos.', 400 turma = str(request.form['turma']).lower().strip()[:3] objs = [] for obj in venues: codes = get_all_codes(ra, turma, obj) if codes: objs.append((obj, [code[0] for code in codes])) row = search_csv(ra, 4, 'notas.csv', visit_header=True) if not row: return 'Nenhum e-mail foi encontrado para o RA ' + str(ra), 404 email = row[0] msg_template = ''' Os códigos (se houver) estão listados abaixo. <ul> {% for obj, codes in objs %} <li><b>{{ obj }}</b>: {% for code in codes %}({{ code }}){% endfor %}</li> {% endfor %} </ul> Cada código está delimitado por parênteses, mas os parênteses não fazem parte do código. ''' jinja2_template = Environment( loader=BaseLoader()).from_string(msg_template) msg = jinja2_template.render(objs=objs) if not [code for code in codes for obj, codes in objs]: msg = 'Nenhum código de submissão foi encontrado.' # using SendGrid's Python Library # https://github.com/sendgrid/sendgrid-python message = Mail(from_email='*****@*****.**', to_emails=email, subject='Código(s) de submissão', html_content=msg) try: sg = SendGridAPIClient(app.config['SENDGRID_API_KEY']) response = sg.send(message) print(response.status_code) print(response.body) print(response.headers) except Exception as e: print(e.message) return 'Se existe(m) código(s) cadastrado(s), ele(s) pode(m) ser visto(s) na mensagem enviada para ' + redact( email ) + ' [Se você não recebeu o e-mail, por favor, verifique a pasta de SPAM.]', 200
def send(self, subject_id, body_file_path, attachments=None, subject_params=None, body_params=None): _config = config.Config.get_instance() _template_file_reader = template_file_reader.TemplateFileReader( _config.TEMPLATES_ROOT_PATH) subject = self._mail_config_reader.get_subject_by_id(subject_id) if subject_params: env = Environment(loader=BaseLoader()) subject = env.from_string( self._mail_config_reader.get_subject_by_id(subject_id)) subject = subject.render(subject_params) body = _template_file_reader.read(body_file_path, body_params) from_address, to_addresses, profile, cc_addresses, bcc_addresses = self._mail_config_reader.get_addresses_by_id( self._address_id) _mail_sender = mail_sender.MailSender(**_config.MAIL_CONFIG) _mail_sender.send(from_address, to_addresses, subject, body, attachments, profile, cc_addresses, bcc_addresses)
def render_template(template, context): template_enviroment = Environment( autoescape=False, loader=BaseLoader(), trim_blocks=False, extensions=['jinja2.ext.do']).from_string(template) return template_enviroment.render(context)
def generate_readme(self): """Re-generate the add-on readme based on a template.""" click.echo("Re-generating add-on README.md file...", nl=False) addon_file = os.path.join(self.addon_target, ".README.j2") local_file = os.path.join(self.git_repository.working_dir, self.repository_target, "README.md") try: remote_file = self.addon_repository.get_contents( addon_file, self.current_commit.sha) except UnknownObjectException: click.echo(crayons.blue("Skipping")) return data = self.get_template_data() jinja = Environment( loader=BaseLoader(), trim_blocks=True, extensions=["jinja2.ext.loopcontrols"], ) if not self.repository.dryrun: with open(local_file, "w") as outfile: outfile.write( jinja.from_string( remote_file.decoded_content.decode("utf8")).render( **data)) click.echo(crayons.green("Done"))
def extract_variables(filename): """Extract text from template file and get jinja variables""" extracted = textract.process(filename) env = Environment(loader=BaseLoader()) content = env.parse(extracted) variables = meta.find_undeclared_variables(content) return variables
def render(f, vtubers, batches={}, trailing=False): """ read xx.j2 and rewrite xx """ env = Environment(loader=BaseLoader(), keep_trailing_newline=trailing) template = env.from_string(open(f + '.j2').read()) template = template.stream(vtubers=vtubers, batches=batches) # print(''.join(list(template))) template.dump(f)
def exec_code(self, **kwargs): self.logger.debug('exec code') data = kwargs.get('Data') code = kwargs.get('Code') entity = kwargs.get('Entity') o = self.objects g = self.g logger = self.logger this_entity = self.entity if data: code_t = Environment(loader=BaseLoader()).from_string(code) code_r = code_t.render(data) else: code_r = code # TODO: Add a code sanitizer self.logger.debug('Executing code: {}'.format(code_r)) with stdoutIO() as s: try: exec(code_r) val = None if 'exec_val' in locals(): val = locals()['exec_val'] output = s.getvalue().strip() self.logger.debug('Ouput:\n' + output) return val, s.getvalue().strip() except Exception as e: _traceback = traceback.format_exc() self.logger.error(_traceback) return None, e
def validate_snippet_present(service, context): """ Checks all xpaths in the service to validate if they are already present in panorama Status codes documented here: https://www.paloaltonetworks.com/documentation/71/pan-os/xml-api/pan-os-xml-api-error-codes :param service: dict of service params generared by snippet_utils.load_snippet_with_name() :param context: dict containing all jinja variables as key / value pairs :return: boolean True if found, false if any xpath is not found """ xapi = panorama_login() if xapi is None: print('Could not login to Panorama') return False try: for snippet in service['snippets']: xpath = snippet['xpath'] xpath_template = Environment( loader=BaseLoader()).from_string(xpath) xpath_string = xpath_template.render(context) xapi.get(xpath=xpath_string) if xapi.status_code == '19' or xapi.status_code == '20': print('xpath is already present') elif xapi.status_code == '7': print('xpath was NOT found') return False # all xpaths were found return True except pan.xapi.PanXapiError as pxe: print('Could not validate snippet was present!') print(pxe) return False
def env(self): try: return self._env except AttributeError: self._env = Environment(loader=BaseLoader()) self.set_filters() return self._env
def datapkg2rst(meta_json, meta_rst, ignore=None): """Convert json metadata to a single rst file.""" logger.info("Accessing json metadata as dictionary") with open(meta_json) as f: metadata_dict = json.load(f) metadata_dict["resources"] = [ x for x in metadata_dict["resources"] if x["name"] not in ignore ] metadata_dict["resources"] = sorted( metadata_dict["resources"], key=lambda x: x["name"] ) for resource in metadata_dict["resources"]: resource["schema"]["fields"] = sorted( resource["schema"]["fields"], key=lambda x: x["name"] ) logger.info("Converting json metadata into an rst file") template = ( Environment(loader=BaseLoader(), autoescape=True) .from_string(RST_TEMPLATE) ) rendered = template.render(metadata_dict) # Create or overwrite an rst file containing the field descriptions of the input table with open(meta_rst, 'w') as f: f.seek(0) # Used to overwrite existing content f.write(rendered) f.truncate() # Used to overwrite existing content
def deploy_service(self, service, context): if not self.__get_salt_auth_token(): print('Could not connect to provisioner') return 'No good' snippets_dir = Path(os.path.join(settings.BASE_DIR, 'mssp', 'snippets')) try: for snippet in service['snippets']: template_name = snippet['file'] template_full_path = os.path.join(snippets_dir, service['name'], template_name) with open(template_full_path, 'r') as template: template_string = template.read() template_template = Environment( loader=BaseLoader()).from_string(template_string) payload = template_template.render(context) print(payload) url = self.base_url + '/' headers = {"X-Auth-Token": self._auth_token} payload_json = json.loads(payload) res = requests.post(url, json=payload_json, headers=headers) print(res.status_code) return res.text except Exception as e: print(e) print('Caught an error deploying service')
def get_and_load_configs(config_url, load_external=True, load_key=DEFAULT_LOAD_KEY): """ Retrieves and loads config from url, parses it and downloads 'load' configs if applicable. """ log.debug("Loading config: {}".format(config_url)) config_template = get_config(config_url) rtemplate = Environment(loader=BaseLoader()).from_string(config_template) config_string = rtemplate.render({}) config_dict = yaml.load(config_string) result = [config_dict] load = config_dict.get(load_key, []) if load and load_external: if isinstance(load, basestring): new_configs = get_and_load_configs(load) result.extend(new_configs) elif isinstance(load, (tuple, list)): for config in load: new_configs = get_and_load_configs(config) result.extend(new_configs) else: raise FrecklesConfigError( "Can't load external config, type not recognized: {}".format( load), GLOBAL_LOAD_KEY, load) return result
def create_config_file(code, host='localhost', port=5672): # Templates config_template = ''' { "code": "{{ CODE }}", "host": "{{ HOST }}", "port": {{ PORT }} } ''' config = Environment(loader=BaseLoader()).from_string(config_template) config_string = config.render(CODE=code, HOST=host, PORT=port) with tempfile.TemporaryDirectory() as directory: # Create config.json file config_json_file = os.path.join(directory, 'config.json') with open(config_json_file, 'w') as c: c.write(config_string) # Create tar.gz file config_tarfile = '{}.tar.gz'.format(code) with tarfile.open(config_tarfile, mode='w') as tf: tf.add(config_json_file, os.path.join('config', 'config.json'))
def generate_readme(self): """Re-generate the add-on readme based on a template.""" click.echo("Re-generating add-on README.md file...", nl=False) addon_file = os.path.join(self.git_repo.working_dir, self.addon_target, ".README.j2") if not os.path.exists(addon_file): click.echo(crayons.blue("Skipping")) return local_file = os.path.join(self.repository.working_dir, self.repository_target, "README.md") data = self.get_template_data() jinja = Environment( loader=BaseLoader(), trim_blocks=True, extensions=["jinja2.ext.loopcontrols"], ) with open(local_file, "w", encoding="utf8") as outfile: outfile.write( jinja.from_string(open(addon_file, encoding="utf8").read()).render(**data)) click.echo(crayons.green("Done"))
def get_body(self, obj): templateVars = {} for img in obj.images.all(): templateVars['img__' + str(img.slug)] = '<img src="' + img.image.url + '" />' rtemplate = Environment(loader=BaseLoader()).from_string(obj.body) return rtemplate.render(templateVars)
def __init__(self, application=None, nested_configuration_key=None, defaults=None, validator=None, **handler_kwargs): self.logger = logging.getLogger() self.jinja2_recurse_limit = 10 self.jinja2_env = Environment(loader=BaseLoader()) self.validator = validator schema = handler_kwargs.get('schema') if self.validator is None and schema: self.validator = DefaultValidator(schema) self.defaults = defaults if defaults else {} self.nested_configuration_key = nested_configuration_key if nested_configuration_key else "config42" # Implicit load of handlers self.handler = self.load_handler(**handler_kwargs) if schema: self.load_defaults(schema) self.load_nested() self.validate() if application: self.init_app(application)
def jinja(template, data): env = Environment( loader=BaseLoader() ) rtemplate = env.from_string(template) output = rtemplate.render(**data) sys.stdout.write(output)
def build_point_in_time_query( feature_view_query_contexts: List[FeatureViewQueryContext], left_table_query_string: str, entity_df_event_timestamp_col: str, entity_df_columns: KeysView[str], query_template: str, full_feature_names: bool = False, ) -> str: """Build point-in-time query between each feature view table and the entity dataframe for Bigquery and Redshift""" template = Environment(loader=BaseLoader()).from_string(source=query_template) final_output_feature_names = list(entity_df_columns) final_output_feature_names.extend( [ (f"{fv.name}__{feature}" if full_feature_names else feature) for fv in feature_view_query_contexts for feature in fv.features ] ) # Add additional fields to dict template_context = { "left_table_query_string": left_table_query_string, "entity_df_event_timestamp_col": entity_df_event_timestamp_col, "unique_entity_keys": set( [entity for fv in feature_view_query_contexts for entity in fv.entities] ), "featureviews": [asdict(context) for context in feature_view_query_contexts], "full_feature_names": full_feature_names, "final_output_feature_names": final_output_feature_names, } query = template.render(template_context) return query
def build_point_in_time_query( feature_view_query_contexts: List[FeatureViewQueryContext], min_timestamp: datetime, max_timestamp: datetime, left_table_query_string: str, entity_df_event_timestamp_col: str, ): """Build point-in-time query between each feature view table and the entity dataframe""" template = Environment(loader=BaseLoader()).from_string( source=SINGLE_FEATURE_VIEW_POINT_IN_TIME_JOIN) # Add additional fields to dict template_context = { "min_timestamp": min_timestamp, "max_timestamp": max_timestamp, "left_table_query_string": left_table_query_string, "entity_df_event_timestamp_col": entity_df_event_timestamp_col, "featureviews": [asdict(context) for context in feature_view_query_contexts], } query = template.render(template_context) return query
def test_multiple_json_templates(self): template_file = os.path.join( constants.ROOT_DIR, r'template\restconf\vpn_instance.template') lst_of_json_templates = [ r'data\vpn_instance.json', r'data\ietf_interface.json', r'data\vpn_interface.json' ] restconf_interface = os.path.join(constants.ROOT_DIR, 'restconf') for template in lst_of_json_templates: json_file = os.path.join(os.getcwd(), template) class_attrs = json_loader.dict_to_class_attrs( json_loader.load(json_file), class_attrs={}) pprint(class_attrs) env = Environment(loader=BaseLoader(), trim_blocks=True) env.filters[ 'format_attribute_name'] = template_utils.format_attribute_name template = env.from_string(open(template_file).read()) class_code = template.render(class_attrs=class_attrs) with open( restconf_interface + os.path.sep + os.path.split(json_file)[1].strip('.json') + ".py", "w") as fh: fh.write(class_code)
def render_template(template: str, funcs: Mapping = None, **data): _template = Environment(loader=BaseLoader()).from_string(template) if funcs: _template.globals.update(funcs) _template.globals['now'] = datetime.datetime.utcnow return _template.render(**data)
def push_service(service, context): xapi = panorama_login() snippets_dir = Path(os.path.join(settings.BASE_DIR, 'mssp', 'snippets')) if xapi is None: print('Could not push service to Panorama') return False try: for snippet in service['snippets']: xpath = snippet['xpath'] xml_file_name = snippet['file'] xml_full_path = os.path.join(snippets_dir, service['name'], xml_file_name) with open(xml_full_path, 'r') as xml_file: xml_string = xml_file.read() xml_template = Environment( loader=BaseLoader()).from_string(xml_string) xpath_template = Environment( loader=BaseLoader()).from_string(xpath) xml_snippet = xml_template.render(context).replace('\n', '') xpath_string = xpath_template.render(context) print('Pushing xpath: %s' % xpath_string) #print('Pushing element: %s' % xml_snippet) xapi.set(xpath=xpath_string, element=xml_snippet) # FIXME - We need to fix this if xapi.status_code == '19' or xapi.status_code == '20': print('xpath is already present') elif xapi.status_code == '7': print('xpath was NOT found') return False xapi.commit('<commit/>', sync=True) print(xapi.xml_result()) return True except IOError as ioe: print('Could not open xml snippet file for reading!!!') # FIXME - raise a decent error here return False except pan.xapi.PanXapiError as pxe: print('Could not push service snippet!') print(pxe) return False
def render(output_dict): env = Environment(loader = FileSystemLoader('templates', followlinks=True)) ext = '.' + args.extension if ext == '.debug': template = Environment(loader=BaseLoader()).from_string('{{output_dict|pprint}}') return template.render(output_dict=output_dict) template = env.get_or_select_template([output_dict['group_id']+ext, 'custom_template'+ext, 'template'+ext]) return template.render(**output_dict)
def simulate(self, simulation_technique, simulation_file, force, simulation_vars): # read definition files from Leonidas # search for technique or name # run command with subsitution of variables filelist = [] objects = [] if simulation_technique: path = "leonidas/definitions" for root, dirs, files in os.walk(path): for file in files: if os.path.splitext(file)[1] == ".yml": filepath = os.path.join(root, file) object = self.load_file(filepath) for technique in object['mitre_ids']: if technique == simulation_technique: filelist.append(filepath) objects.append(object) if not filelist: self.log.error( 'ERROR: No attack file found for given technique') sys.exit(1) elif simulation_file: filelist.append(simulation_file) object = self.load_file(simulation_file) objects.append(object) for object in objects: data = dict() if simulation_vars: data = dict( item.split("=") for item in simulation_vars.split(", ")) else: for var in object['input_arguments']: data[var] = object['input_arguments'][var]['value'] rtemplate = Environment(loader=BaseLoader()).from_string( object['executors']['sh']['code']) function_call = rtemplate.render(**data) print(function_call) if force: stream = os.popen(function_call) output = stream.read() print(output) else: if self.query_yes_no( 'Run attack command? [default=Y]') or force: stream = os.popen(function_call) output = stream.read() print(output) else: self.log.info('Attack is not executed.')
def mirror_upstream(upstream_template: TextIO, version: str, apt_repo: str, release_label: str, distribution: str, keys: Iterable[pathlib.Path] = [], force_mirror: bool = False, publish: bool = False): """Create and publish an upstream mirror. :param upstream_template: Template containing upstream repository operation. :param version: Snapshot version tag. :param apt_repo: Repository where to publish packages :param release_label: Release label :param distribution: Distribution of interest. :param keys: (Optional) GPG keys to use while publishing. :param force_mirror: (Optional) Force mirror creation even if one already exists. :param publish: (Optional) Flag to enable publishing mirror to endpoint. """ context = { 'distribution': distribution } # Check if mirror already exists common_args = deb_s3_common_args(apt_repo, 'ubuntu', distribution + "-mirror", release_label) packages = deb_s3_list_packages(common_args) if packages and not force_mirror: print(f"Found mirror in {apt_repo}, skipping mirror creation.", file=sys.stderr) return # Configure aptly endpoint endpoint = aptly_configure(apt_repo, release_label) # Import publishing key gpg_import_keys(keys) # Load configuration from yaml upstream_yaml = Environment(loader=BaseLoader()).from_string(upstream_template.read()).render(**context) upstream = yaml.safe_load(upstream_yaml) print(upstream_yaml, file=sys.stderr) # Trust keys from upstream repositories upstream_keys: Set[str] = set() for _, data in upstream['mirrors'].items(): try: upstream_keys.update(data['keys']) except KeyError: pass if upstream_keys: gpg_receive_keys(upstream_keys, upstream['keyservers']) # Create upstream mirrors mirrors = create_mirror(upstream['mirrors'], upstream['architectures']) # Update and snapshot mirrors snapshots = pull_mirror(mirrors, version) # Merge and publish mirror if publish: publish_mirror(snapshots, version, upstream['architectures'], distribution, apt_repo, endpoint)
def __init__(self, server): BaseLoader.__init__(self) self.server = server
def __init__(self, manager): BaseLoader.__init__(self) #: One theme manager to rule them all. self.manager = manager
def __init__(self, name, template): BaseLoader.__init__(self) self.name = name self.template = template