def test_scopes_and_blocks(self): env = Environment(loader=DictLoader({ 'a.html': ''' {%- set foo = 'bar' -%} {% include 'x.html' -%} ''', 'b.html': ''' {%- set foo = 'bar' -%} {% block test %}{% include 'x.html' %}{% endblock -%} ''', 'c.html': ''' {%- set foo = 'bar' -%} {% block test %}{% set foo = foo %}{% include 'x.html' %}{% endblock -%} ''', 'x.html': '''{{ foo }}|{{ test }}''' })) a = env.get_template('a.html') b = env.get_template('b.html') c = env.get_template('c.html') assert a.render(test='x').strip() == 'bar|x' assert b.render(test='x').strip() == 'bar|x' assert c.render(test='x').strip() == 'bar|x'
def generate_html(self): self.set_baselineinfo() self.analyze_result() self.copy_template() env = Environment(loader=PackageLoader('ChorusCore', 'templates')) for suite_name, suiteresult in self.result.suites.iteritems(): suitetemplate = env.get_template("suite.html") content = suitetemplate.render({"result":suiteresult.__dict__, "bsinfo":self.updateinfo}) filename = os.path.join(self.output_path, '%s.html' % suite_name ) Utils.write_to_file(filename, content, "w+") self.logger.info("Suite %s.html generated" % suite_name) summarytemplate = env.get_template('summary.html') xmltemplate = env.get_template('summary.xml') content = summarytemplate.render({"result":self.result,"ea":Performance_Result}) filename = os.path.join(self.output_path, 'Summary.html') Utils.write_to_file(filename, content, "w+") self.logger.info("Summary.html generated") #TODO Modify xml content = xmltemplate.render({"result":self.result}) filename = os.path.join(self.output_path, 'Summary.xml') Utils.write_to_file(filename, content, "w+") self.logger.info("Summary.xml generated") if os.environ.has_key("BUILD_URL"): self.generate_result_email() if Performance_Result.data: self.generate_performance_result()
def test_choice_loader(self): env = Environment(loader=choice_loader) tmpl = env.get_template('justdict.html') assert tmpl.render().strip() == 'FOO' tmpl = env.get_template('test.html') assert tmpl.render().strip() == 'BAR' self.assert_raises(TemplateNotFound, env.get_template, 'missing.html')
def generate_archive(posts, tag_set): print('Generating blog archive...') env = Environment() env.loader = FileSystemLoader('templates') tpl = env.get_template('blog.html') html = tpl.render(dict( sitename=cfg['sitename'], license=cfg['license'], title='blog', posts=posts )) with open('blog.html', 'w') as file: file.write(html) for tag in tag_set: print('Generating {0} archive page...'.format(tag)) post_list = [] for post in posts: if tag in post['tags']: post_list.append(post) tpl = env.get_template('blog.html') html = tpl.render(dict( sitename=cfg['sitename'], title='blog: #{0}'.format(tag), posts=post_list )) tagpath = path.join('tag', tag) try: mkdir(tagpath) except OSError: pass with open('{0}/index.html'.format(tagpath), 'w') as file: file.write(html)
def test_prefix_loader(self): env = Environment(loader=prefix_loader) tmpl = env.get_template('a/test.html') assert tmpl.render().strip() == 'BAR' tmpl = env.get_template('b/justdict.html') assert tmpl.render().strip() == 'FOO' self.assert_raises(TemplateNotFound, env.get_template, 'missing')
class RenderTestCase(TestCase): templates_path = os.path.join(os.path.dirname(os.getcwd()), u'templates') def setUp(self): self.env = Environment(loader=FileSystemLoader(self.templates_path)) def test_simple_list(self): dut = Element(u'ul', u'text', {u'class': u'some class'}) dut.child.append(Element(u'li', u'text', {u'class': u'some class'})) dut.child.append(Element(u'li', u'text', {u'class': u'some class'})) result = self.env.get_template(u'json2html.html').render(root=dut.render()) expect = u'<ul class="some class">text<li class="some class">text</li><li class="some class">text</li></ul>' self.assertEqual(result, expect) def test_property(self): dut = Element(u'ul', u'text', {u'class': u'some class'}) dut.child.append(Element(u'li', u'text', {u'disable': None})) dut.child.append(Element(u'li', u'text', {u'disable': u''})) result = self.env.get_template(u'json2html.html').render(root=dut.render()) expect = u'<ul class="some class">text<li disable>text</li><li disable>text</li></ul>' self.assertEqual(result, expect)
def test_filesystem_loader(self): env = Environment(loader=filesystem_loader) tmpl = env.get_template('test.html') assert tmpl.render().strip() == 'BAR' tmpl = env.get_template('foo/test.html') assert tmpl.render().strip() == 'FOO' self.assert_raises(TemplateNotFound, env.get_template, 'missing.html')
class Builder: def __init__(self, out_file): self.env = Environment(loader=PackageLoader('builder', 'templates')) self.out_file = out_file # provides a template helper to turn a list into a list of pairs self.env.filters['pairs'] = lambda l: [l[i:i + 2] for i in range(0, len(l), 2)] # inlines css using premailer with my specific configuration, namely, must # keep classes so responsiveness can work. def premailer(self, html): return Premailer(html, base_url=None, remove_classes=False).transform() # creates a string of rendered html def render(self, elements): body = '' # loop through given elements and render respective template with options for element in elements: template = self.env.get_template(element['template']) # gets the template body += template.render(element['options']) # renders the template layout = self.env.get_template('body.html') # get layout template output = layout.render(body=body) # render into layout return self.premailer(output) # writes the rendered string to the specified file def write(self, elements): output = self.render(elements) # builds the html string with open(self.out_file, "w") as html: # writes the html to a file html.write(output)
def main(): """ Get hardware info, format it, and write to slurm.conf and gres.conf """ args = parse_args() # Get info from cnode and xthwinv repurposed = get_repurposed_computes(args.partition) nodes = get_inventory(args.partition, repurposed) nodelist = rli_compress([int(nid) for nid in nodes]) compact_nodes(nodes) defmem, maxmem = get_mem_per_cpu(nodes) # Write files from templates jinjaenv = Environment(loader=FileSystemLoader(args.templatedir)) conffile = os.path.join(args.output, 'slurm.conf') print 'Writing Slurm configuration to {0}...'.format(conffile) with open(conffile, 'w') as outfile: outfile.write(jinjaenv.get_template('slurm.conf.j2').render( script=sys.argv[0], date=time.asctime(), controlmachine=args.controlmachine, grestypes=get_gres_types(nodes), defmem=defmem, maxmem=maxmem, nodes=nodes, nodelist=nodelist)) gresfilename = os.path.join(args.output, 'gres.conf') print 'Writing gres configuration to {0}...'.format(gresfilename) with open(gresfilename, 'w') as gresfile: gresfile.write(jinjaenv.get_template('gres.conf.j2').render( script=sys.argv[0], date=time.asctime(), nodes=nodes)) print 'Done.'
def generate_templates( project, html_template, text_template, current_date, current_time_utc): '''Generates the templates using Jinja2 templates :param html_template: The filename of the HTML template in the templates folder :param text_template: The filename of the text template in the templates folder :param current_date: The current date. ''' env = Environment( loader=FileSystemLoader('templates'), trim_blocks=True, lstrip_blocks=True, autoescape=True) env.filters['last_comment'] = last_comment env.filters['most_recent_comments'] = most_recent_comments env.filters['comments_within_lookback'] = comments_within_lookback env.filters['as_date'] = as_date log.info('Rendering HTML Template') html = env.get_template(html_template) rendered_html = premailer.transform(html.render( project=project, current_date=current_date, current_time_utc=current_time_utc)) log.info('Rendering Text Template') env.autoescape = False plaintext = env.get_template(text_template) rendered_plaintext = plaintext.render( project=project, current_date=current_date, current_time_utc=current_time_utc) return (rendered_html, rendered_plaintext)
def generate_templates(project, html_template, text_template, current_date, current_time_utc, skip_inline_css=False): """Generates the templates using Jinja2 templates :param html_template: The filename of the HTML template in the templates folder :param text_template: The filename of the text template in the templates folder :param current_date: The current date. """ env = Environment(loader=FileSystemLoader("templates"), trim_blocks=True, lstrip_blocks=True, autoescape=True) env.filters["last_comment"] = last_comment env.filters["most_recent_comments"] = most_recent_comments env.filters["comments_within_lookback"] = comments_within_lookback env.filters["as_date"] = as_date log.info("Rendering HTML Template") html = env.get_template(html_template) if skip_inline_css: rendered_html = html.render(project=project, current_date=current_date, current_time_utc=current_time_utc) else: rendered_html = premailer.transform( html.render(project=project, current_date=current_date, current_time_utc=current_time_utc) ) log.info("Rendering Text Template") env.autoescape = False plaintext = env.get_template(text_template) rendered_plaintext = plaintext.render(project=project, current_date=current_date, current_time_utc=current_time_utc) return (rendered_html, rendered_plaintext)
def _refresh(self): """ show the html page in the custom widget""" if self._gtkHtml is None: self._gtkHtml = self._ricerca.getHtmlWidget() # A bit of double buffering here self._gtkHtmlDocuments = (gtkhtml2.Document(), gtkhtml2.Document()) for doc in self._gtkHtmlDocuments: doc.connect('request_url', self.on_html_request_url) doc.connect('link_clicked', self.on_html_link_clicked) self._currGtkHtmlDocument = 0 templates_dir = self._htmlTemplate jinja_env = Env(loader=FileSystemLoader(templates_dir), bytecode_cache = FileSystemBytecodeCache(os.path.join(Environment.promogestDir, 'temp'), '%s.cache')) jinja_env.globals['environment'] = Environment jinja_env.globals['utils'] = utils currDocument = (self._currGtkHtmlDocument + 1) % 2 document = self._gtkHtmlDocuments[currDocument] document.open_stream('text/html') if self.dao is None: html = jinja_env.get_template("index.html").render() else: html = jinja_env.get_template(self.defaultFileName+".html").render(dao=self.dao) document.write_stream(html) document.close_stream() self._gtkHtml.set_document(document)
def render(template, *args, **kwargs): env = Environment(loader=FileSystemLoader(TEMPLATES_DIR)) env.globals['base_url'] = base_url() env.filters['url'] = url # Used everywhere env.filters['time'] = timeformat env.filters['date_to_js'] = date_to_js env.filters['date'] = dateformat env.filters['to_int'] = to_int env.filters['time_in_words'] = time_in_words env.filters['exceptions_dict'] = exceptions_dict env.filters['test_additional_data'] = check_additional_data env.filters['clean_slashes'] = clean_slashes env.filters['beautify_json'] = beautify_json # Dashboard filters env.filters['progress_width'] = progress_width env.filters['progress_width_percent'] = progress_width_percent env.filters['format_float'] = format_float try: template = env.get_template(template) except: template = env.get_template('blank.html') # Global variables env.globals['acl'] = settings.ACL return template.render(*args, **kwargs)
def _process_stack_cfg(cfg, stack, minion_id, pillar): log.debug('Config: {0}'.format(cfg)) basedir, filename = os.path.split(cfg) jenv = Environment(loader=FileSystemLoader(basedir)) jenv.globals.update({ "__opts__": __opts__, "__salt__": __salt__, "__grains__": __grains__, "minion_id": minion_id, "pillar": pillar, }) for path in _parse_stack_cfg(jenv.get_template(filename).render(stack=stack)): try: log.debug('YAML: basedir={0}, path={1}'.format(basedir, path)) obj = yaml.safe_load(jenv.get_template(path).render(stack=stack)) if not isinstance(obj, dict): log.info('Ignoring pillar stack template "{0}": Can\'t parse ' 'as a valid yaml dictionnary'.format(path)) continue stack = _merge_dict(stack, obj) except TemplateNotFound: log.info('Ignoring pillar stack template "{0}": can\'t find from ' 'root dir "{1}"'.format(path, basedir)) continue return stack
def _process_stack_cfg(cfg, stack, minion_id, pillar): log.debug('Config: {0}'.format(cfg)) basedir, filename = os.path.split(cfg) jenv = Environment(loader=FileSystemLoader(basedir)) jenv.globals.update({ "__opts__": __opts__, "__salt__": __salt__, "__grains__": __grains__, "__stack__": { 'traverse': salt.utils.traverse_dict_and_list }, "minion_id": minion_id, "pillar": pillar, }) for item in _parse_stack_cfg( jenv.get_template(filename).render(stack=stack)): if not item.strip(): continue # silently ignore whitespace or empty lines paths = glob(os.path.join(basedir, item)) if not paths: log.warn('Ignoring pillar stack template "{0}": can\'t find from ' 'root dir "{1}"'.format(item, basedir)) continue for path in sorted(paths): log.debug('YAML: basedir={0}, path={1}'.format(basedir, path)) obj = yaml.safe_load(jenv.get_template( os.path.relpath(path, basedir)).render(stack=stack)) if not isinstance(obj, dict): log.info('Ignoring pillar stack template "{0}": Can\'t parse ' 'as a valid yaml dictionnary'.format(path)) continue stack = _merge_dict(stack, obj) return stack
class render_jinja: """Rendering interface to Jinja2 Templates Example: render= render_jinja('templates') render.hello(name='jinja2') """ def __init__(self, *a, **kwargs): extensions = kwargs.pop('extensions', []) globals = kwargs.pop('globals', {}) from jinja2 import Environment,FileSystemLoader self._lookup = Environment(loader=FileSystemLoader(*a, **kwargs), extensions=extensions) self._lookup.globals.update(globals) def __getattr__(self, name): # Assuming all templates end with .html path = name + '.html' t = self._lookup.get_template(path) return t.render def _render(self, name, **kwargs): t = self._lookup.get_template(name) return t.render(**kwargs)
def build_web_pages(root_category, all_categories): build_path = base_path + '/build/latest' ticks = long(time.time())*1000 if os.path.isdir(build_path): os.rename(build_path, base_path + '/build/website-%s' % (ticks)) os.makedirs(build_path) shutil.copytree(templates_path + '/resources', build_path + '/resources') context = { 'root_category': root_category, 'all_categories': all_categories, 'page_title': '1,200 Pages: Curated Reading Lists Covering Every Topic', 'cache_buster': ticks, 'category': Category(), } environment = Environment(loader=FileSystemLoader(templates_path)) home_template = environment.get_template('home.html') html = home_template.render(context) f = open(build_path + '/index.html', 'w') f.write(html) f.close() page_template = environment.get_template('category-page.html') for category in all_categories: context['category'] = category context['page_title'] = category.title + ' | Best Books Reading List' out_path = build_path + category.url print "OUT PATH ", out_path folder = os.path.split(out_path)[0] if not os.path.isdir(folder): os.makedirs(folder) html = page_template.render(context) f = open(out_path, 'w') f.write(html) f.close() return build_path
def get_hdl(self): env = Environment(loader=PackageLoader('src', 'templates')) circuit_hdl_template = env.get_template('circuit_hdl_template') enable_template = env.get_template('enable_template') update_variable_template = env.get_template('update_variable_template') check_template = env.get_template('check_template') circuit_logic = '' if len(self.enables) > 0: condition = ' || '.join([ ('instr_data = x"%s"' % program_counter) for program_counter in self.enables ]) circuit_logic += enable_template.render( condition=condition, value='1' ) if len(self.disables) > 0: condition = ' || '.join([ ('instr_data = x"%s"' % program_counter) for program_counter in self.disables ]) circuit_logic += enable_template.render( condition=condition, value='0' ) for key in self.updates: circuit_logic += update_variable_template.render( pc=key, var_id=self.var_index[self.updates[key]] ) condition = ' '.join(map(self.symbol_to_hdl, tokenizer.get_tokens(self.constraint))) circuit_logic += check_template.render(condition=condition) return circuit_hdl_template.render(name=self.name, circuit_logic=circuit_logic)
def setup_apache_config(): ## upload Apache vhost config template_dir = os.path.join(os.path.curdir, 'templates') jinja_env = Environment(loader=FileSystemLoader(template_dir)) template_vars = {"server_name": env.server_name, "app_path": env.app_path, "venv_path": env.venv_path, "apache_user": env.apache_user, "apache_group": env.apache_group, "apache_process_count": env.apache_process_count, "apache_thread_count": env.apache_thread_count, } if env.os == 'Debian': template = jinja_env.get_template('wsgi_vhost_debian.jinja.conf') elif env.os == 'Redhat': template = jinja_env.get_template('wsgi_vhost_redhat.jinja.conf') output_from_parsed_template = template.render(template_vars) local_path = '/tmp/mod_wsgi.conf' remote_path = '/etc/httpd/conf.d/mod_wsgi.conf' with open(local_path, "wb") as fh: fh.write(output_from_parsed_template) if env.os == 'Debian': put(local_path=local_path, remote_path='/etc/apache2/sites-enabled') elif env.os == 'Redhat': put(local_path=local_path, remote_path='/tmp')
def create_project(project_name): env = Environment(loader=PackageLoader('oct.utilities', 'templates')) config_content = env.get_template('configuration/config.json').render(script_name='v_user.py') script_content = env.get_template('scripts/v_user.py').render() if os.path.exists(project_name): sys.stderr.write('\nERROR: project already exists: %s\n\n' % project_name) raise OSError("Project %s already exists" % project_name) try: os.makedirs(project_name) os.makedirs(os.path.join(project_name, 'test_scripts')) os.makedirs(os.path.join(project_name, 'templates')) os.makedirs(os.path.join(project_name, 'templates', 'css')) os.makedirs(os.path.join(project_name, 'templates', 'scripts')) os.makedirs(os.path.join(project_name, 'templates', 'img')) shutil.copy(os.path.join(BASE_DIR, 'templates', 'css', 'style.css'), os.path.join(project_name, 'templates', 'css')) shutil.copy(os.path.join(BASE_DIR, 'templates', 'html', 'report.html'), os.path.join(project_name, 'templates')) except OSError: print('ERROR: can not create directory for %r' % project_name, file=sys.stderr) raise with open(os.path.join(project_name, 'config.json'), 'w') as f: f.write(config_content) with open(os.path.join(project_name, 'test_scripts', 'v_user.py'), 'w') as f: f.write(script_content)
def generateSimReport(): global tool global testcase template_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib/templates')) print "Getting templates from: %s" % template_dir loader = FileSystemLoader(searchpath=template_dir) env = Environment(loader=loader) template = env.get_template('template_similarity_report_flot.html') report = template.render( title='Similarity report', # similarity=getAvgMetricPerRevision(), similarity=getTotalBytesWrittenPerRevision(), # matrix=matrix1 tool=tool, testcase=testcase ).encode("utf-8") with open(outputPath + '/sim_report.html', 'wb') as fh: fh.write(report) # generate summary report for display on the job main page template = env.get_template('template_similarity_report_summary.html') report = template.render( title='Similarity report summary', similarity=getTotalBytesWrittenPerRevision(), tool=tool, testcase=testcase ).encode("utf-8") with open(outputPath + '/sim_report_summary.html', 'wb') as fh: fh.write(report)
def generate_html(self, json_content, output_file=None, past_results=None): pkg_path = os.path.dirname(os.path.abspath(__file__)) templates = os.path.join(pkg_path, 'templates') env = Environment(loader=FileSystemLoader(searchpath=templates)) env.filters['humanize_date'] = humanize_date template = env.get_template('base_2.html') #todo: alternative design template2 = env.get_template('base.html') results = json.loads(json_content) svg_filename = "{}.svg".format(os.path.splitext(output_file)[0]) svg_path = self.generate_svg(svg_filename) svg = urllib.quote(os.path.basename(svg_path)) if svg_path else None history = self.get_by_provider(past_results) if past_results else None chart_data = self.generate_chart_data(results) html_content = template.render( title=self.bundle, charm_name=self.bundle, results=results, past_results=past_results, svg_path=svg, history=history, chart_data=json.dumps(chart_data)) #todo alternative design html_content_2 = template2.render( title=self.bundle, charm_name=self.bundle, results=results, past_results=past_results, svg_path=svg, history=history, chart_data=json.dumps(chart_data) ) if output_file: with codecs.open(output_file, 'w', encoding='utf-8') as stream: stream.write(html_content) with codecs.open( output_file.replace( '.html', '_2.html'), 'w', encoding='utf-8') as stream: stream.write(html_content_2) return html_content
def request_odes_extract(extract, request, url_for, api_key): ''' ''' env = Environment(loader=PackageLoader(__name__, 'templates')) args = dict( name = extract.name or extract.wof.name or 'an unnamed place', link = urljoin(util.get_base_url(request), url_for('ODES.get_extract', extract_id=extract.id)), extracts_link = urljoin(util.get_base_url(request), url_for('ODES.get_extracts')), created = extract.created ) email = dict( email_subject=env.get_template('email-subject.txt').render(**args), email_body_text=env.get_template('email-body.txt').render(**args), email_body_html=env.get_template('email-body.html').render(**args) ) params = {key: extract.envelope.bbox[i] for (i, key) in enumerate(('bbox_w', 'bbox_s', 'bbox_e', 'bbox_n'))} params.update(email) post_url = uritemplate.expand(odes_extracts_url, dict(api_key=api_key)) resp = requests.post(post_url, data=params) oj = resp.json() if 'error' in oj: raise util.KnownUnknown("Error: {}".format(oj['error'])) elif resp.status_code != 200: raise Exception("Bad ODES status code: {}".format(resp.status_code)) return data.ODES(str(oj['id']), status=oj['status'], bbox=oj['bbox'], links=oj.get('download_links', {}), processed_at=(parse_datetime(oj['processed_at']) if oj['processed_at'] else None), created_at=(parse_datetime(oj['created_at']) if oj['created_at'] else None))
class RenderJinja2: postfix = ('.html') def __init__(self, *a, **kwargs): extensions = kwargs.pop('extensions', []) globals = kwargs.pop('globals', t_globals) context = kwargs.pop('context', {}) self.jinja_env = Environment(loader=FileSystemLoader(*a, **kwargs), extensions=extensions) self.jinja_env.globals.update(globals) self.jinja_env.globals.update(context) def render(self, path, **kwargs): for fix in self.postfix: realpath = path + fix try: t = self.jinja_env.get_template(realpath) return t.render(**kwargs) except: pass raise 'Template Not Found' def add_filter(self, name, fn_filter): self.jinja_env.filters[name] = fn_filter def __getattr__(self, name): path = name + '.html' t = self.jinja_env.get_template(path) return t.render
def run(self): if Environment is None: logging.critical(_('package jinja2 is required.')) return 1 env = Environment(loader=PackageLoader('starterpyth.commands', 'templates')) def write_template(template_, path, context): """ Write a template file. :param template_: Jinja2 template :type template_: :class:`Template` :param path: destination path :type path: basestring :param context: context :type context: :class:`dict` """ dirname = os.path.dirname(path) if not os.path.isdir(dirname): os.makedirs(dirname) if not os.path.isfile(path) or self.overwrite: tpl_fd = codecs.open(path, 'w', encoding='utf-8') tpl_fd.write(template_.render(context)) tpl_fd.close() logging.info('writing %s' % path) src_module_names = find_packages() if self.pre_rm and os.path.isdir(self.api_dir): logging.info('removing %s' % self.api_dir) shutil.rmtree(self.api_dir) module_names = [] excluded_module_names = set([x.strip() for x in self.modules_to_exclude.split(',') if x.strip()]) for module_name in src_module_names: module = load_module(module_name) logging.warning('Processing %s.' % module_name) if not any(fnmatch.fnmatch(module_name, x) for x in excluded_module_names): module_names.append(module_name) module_root = os.path.dirname(module.__file__) for filename in os.listdir(module_root): basename, sep, ext = filename.rpartition('.') if ext not in ('pyx', 'py', 'so') or filename == '__init__.py': continue submodule_name = '%s.%s' % (module_name, basename) try: load_module(submodule_name) if not any(fnmatch.fnmatch(submodule_name, x) for x in excluded_module_names): module_names.append(submodule_name) except ImportError as e: msg = 'Unable to import %s [%s].' % (submodule_name, e) logging.warning(msg) template = env.get_template('index.rst_tpl') all_module_names = [mod_name.replace('.', '/') for mod_name in module_names] all_module_names.sort() write_template(template, os.path.join(self.api_dir, 'index.rst'), {'module_paths': all_module_names}) template = env.get_template('module.rst_tpl') for mod_name in module_names: path_components = mod_name.split('.') path_components[-1] += '.rst' write_template(template, os.path.join(self.api_dir, *path_components), {'module_name': mod_name})
def generateCode(self, test): """ Generate code has to be smart enough to determine if the json array should generate a test case, or a fixture or if it needs to generate both. * If setup or teardown is present, then its a test case * If parent is present, then it needs to extend a fixture """ #TODO: Maybe we should have a yaml validation class? #Totaly agree - that makes perfect sense. from jinja2 import Environment, FileSystemLoader from time import strftime, gmtime # TODO: Find a more efficient way to pull in this template other than ../ # Is is possible to parameterize the template directory? It should be a static location... - Alex # Maybe we can use the PackageLoader out_path = "%s/../templates/" % os.path.dirname(os.path.abspath(__file__)) j2_env = Environment(loader=FileSystemLoader(out_path), trim_blocks=True, lstrip_blocks=True) # Header lines created here and added to the templates as required header = "#!/usr/bin/python\n" \ "#\n" \ "# This file was created by etlUnit.\n#" \ " Create date: %s\n" \ "#\n" % \ strftime("%a, %d %b %Y %X +0000", gmtime()) for yml in self.yaml_data.keys(): self.log.info("Generating code from %s..." % yml) self.yml_data = self.yaml_data[yml] # TODO: Determine how we handle dependencies on single files. # TODO: Added fixture definition to the mix. Currently it generates a fixture but it has no variables. try: if self.yml_data['fixture'] is not None: from etlunit.yaml_reader import YAMLReader self.fixture = self.yml_data['fixture'] fixture_res = "../res/%s.yml" % self.fixture reader = YAMLReader(fixture_res, None) fixture_data = reader.readTests()[fixture_res] self.template_output = j2_env.get_template("testfixture.jj2")\ .render(header=header, fixture=self.fixture, setup=fixture_data['setup'], teardown=fixture_data['teardown']) self.persist_output(self.yml_data['fixture'], self.template_output, test) except KeyError: self.fixture = "unittest.TestCase" # Default value for fixture self.log.info("Fixture not present, generating TestSuite...") finally: self.template_output = j2_env.get_template("testsuite.jj2") \ .render(header=header, fixture=self.fixture, tests=self.yml_data['tests'], suitename=self.yml_data['name'].replace(' ', '')) self.persist_output(self.yml_data['name'], self.template_output, test) self.log.info("Code generation complete.")
def create_ebuild(package_name, version, targets, stdout): ''' Creates an ebuild based on the response from rubygems.org and a template ebuild using jinja2 :param package_name: Name of the package :param version: Version of the package :param targets: Ruby targets to be included in USE_RUBY ''' template_dir = os.path.dirname(os.path.abspath(__file__)) jj2_env = Environment(loader=FileSystemLoader(template_dir), trim_blocks=True) res_pkg, res_pkg_search = get_json(package_name, version) ebuildgen = craft_json(res_pkg, res_pkg_search) ebuildgen['ruby_targets'] = targets ebuildgen['year'] = date.today().year if stdout is True: print(jj2_env.get_template('ruby.ebuild.tpl').render( ebuildgen=ebuildgen)) else: filename = "%s-%s.ebuild" % (package_name, ebuildgen['version']) with open(filename, 'w+') as f: f.write(jj2_env.get_template('ruby.ebuild.tpl').render( ebuildgen=ebuildgen))
class RenderJinja2: postfix = ('.html', 'htm') def __init__(self, *a, **kwargs): extensions = kwargs.pop('extensions', []) globals = kwargs.pop('globals', {}) registers = kwargs.pop('registers', {}) self._lookup = Environment(loader=FileSystemLoader(*a, **kwargs), extensions=extensions) self._lookup.globals.update(globals) self._lookup.globals.update(registers) def render(self, path, **kwargs): for fix in self.postfix: realpath = path + fix try: t = self._lookup.get_template(realpath) return t.render(**kwargs) except: pass raise TemplateNotFound def __getattr__(self, name): path = name + '.html' t = self._lookup.get_template(path) return t.render
def get_domain_template(distro, libvirt_ver, **kwargs): """ Get a rendered Jinja2 domain template Args: distro(str): domain distro libvirt_ver(int): libvirt version kwargs(dict): args for template render Returns: str: rendered template """ env = Environment( loader=PackageLoader('lago', 'providers/libvirt/templates'), trim_blocks=True, lstrip_blocks=True, ) template_name = 'dom_template-{0}.xml.j2'.format(distro) try: template = env.get_template(template_name) except TemplateNotFound: LOGGER.debug('could not find template %s using default', template_name) template = env.get_template('dom_template-base.xml.j2') return template.render(libvirt_ver=libvirt_ver, **kwargs)
def generate_html_response(operation, obj, params): template_path = config.base_path + "/lib/ui/templates" env = Environment(loader=FileSystemLoader(template_path),trim_blocks=True,extensions=['jinja2.ext.loopcontrols', jinja2htmlcompress.HTMLCompress]) env.globals['get_file_lines'] = get_file_lines env.globals['htmlize'] = htmlize env.globals['does_file_exist'] = does_file_exist if operation == 'unit_test' or operation == 'test': template = env.get_template('/unit_test/result.html') config.logger.debug(json.dumps(obj, sort_keys=True,indent=4)) result = process_unit_test_result(obj) config.logger.debug('\n\n\n\n\n') config.logger.debug(json.dumps(result, sort_keys=True,indent=4)) html = template.render(result=result,results_normal={},args=params) elif operation == 'deploy': template = env.get_template('/deploy/result.html') deploy_results = [] for result in obj: if 'runTestResult' in result and 'codeCoverage' in result['runTestResult']: result['parsedTestResults'] = process_unit_test_result(result['runTestResult']) deploy_results.append(result) else: deploy_results.append(result) config.logger.debug(deploy_results) html = template.render(deploy_results=deploy_results,args=params) elif operation == 'index_metadata': template = env.get_template('/project/tree.html') org_metadata = config.connection.project.get_org_metadata() html = template.render(metadata=org_metadata) return html
#!/usr/bin/env python3 import http.cookies import cgi import os import funct, sql import glob from jinja2 import Environment, FileSystemLoader env = Environment(loader=FileSystemLoader('templates/')) template = env.get_template('delver.html') print('Content-type: text/html\n') funct.check_login() funct.page_for_admin(level = 2) form = cgi.FieldStorage() serv = form.getvalue('serv') Select = form.getvalue('del') configver = form.getvalue('configver') stderr = "" aftersave = "" file = set() hap_configs_dir = funct.get_config_var('configs', 'haproxy_save_configs_dir') if form.getvalue('configver'): template = env.get_template('configver.html') try: cookie = http.cookies.SimpleCookie(os.environ.get("HTTP_COOKIE")) user_id = cookie.get('uuid') user = sql.get_user_name_by_uuid(user_id.value) servers = sql.get_dick_permit(disable=0)
class Main: def __init__(self, path): self.env = Environment(loader=FileSystemLoader('/var/www/templates')) self.template = self.env.get_template('template.html') self.file = None self.most_active = {} # per user statistics self.user_question = {} self.user_exclamation = {} self.user_actions = {} self.user_givemodes = {} # total statistics self.total_question = Count() self.total_exclamation = Count() self.total_actions = Count() self.total_givemodes = Count() self.activity_graph = [0] * 24 # urls self.urls = {} self.path = path self.name = path.split("/")[-1] self.file = [] self.onlyfiles = [f for f in listdir(path) if isfile(join(path, f))] def bulk_lines(self): def test_if_gz(filename): if (filename.split(".")[::-1][0] == "gz"): # print("gz") return gzip.open("%s/%s" % (sys.argv[1], filename)) else: # print("log") return open("%s/%s" % (sys.argv[1], filename)) for filename in self.onlyfiles: lines = (line.rstrip('\n') for line in test_if_gz(filename)) for index, line in enumerate(lines): self.one_line(line, index, filename) def one_line(self, line, n, filename): index = n line = line filename = filename if len(line) < 7: return None if line[6] == "-": m = re.match("\d{2}:\d{2}\s-!-\smode/#\w+\s\[(.{2})\s(.+)\].*", line) self.total_givemodes.number += 1 try: if m.group(2) in self.user_givemodes: self.user_givemodes[m.group(2)].number += 1 self.user_givemodes[m.group(2)].indexes += [n] else: self.user_givemodes.update({m.group(2): Count()}) self.user_givemodes[m.group(2)].indexes += [n] except AttributeError: return None if line[6] == "<": m = re.match("\d{2}:\d{2}\s<(\W|\s)(.*)>\s(.*)", line) url_r = re.findall( "(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)", line) time = re.match("(\d{2}:\d{2})", line) if not (m.group(2) == "TheKubaX"): try: time = time.group(1).split(":")[0] self.activity_graph[int(time)] += 1 except AttributeError: pass try: for i in url_r: if i in self.urls: self.urls[i].number += 1 self.urls[i].last_use = m.group(2) else: self.urls.update({i: Count()}) self.urls[i].last_use = m.group(2) except AttributeError: pass try: try: if "!" == m.group(3)[0]: return None except IndexError: pass if "!" in m.group(3): self.total_exclamation.number += 1 if m.group(2) in self.user_exclamation: self.user_exclamation[m.group(2)].number += 1 self.user_exclamation[m.group(2)].indexes += [n] else: self.user_exclamation.update({m.group(2): Count()}) self.user_exclamation[m.group(2)].indexes += [n] if "?" in m.group(3): self.total_question.number += 1 if m.group(2) in self.user_question: self.user_question[m.group(2)].number += 1 self.user_question[m.group(2)].indexes += [n] else: self.user_question.update({m.group(2): Count()}) self.user_question[m.group(2)].indexes += [n] if m.group(2) in self.most_active: self.most_active[m.group(2)].number += 1 self.most_active[m.group(2)].indexes += [n] else: self.most_active.update({m.group(2): Count()}) self.most_active[m.group(2)].indexes += [n] except AttributeError: return None else: pass if line[6] == " ": m = re.match("\d{2}:\d{2}\s{2}\*\s([^\s]+)\s(.*)", line) self.total_actions.number += 1 try: if m.group(1) in self.user_actions: self.user_actions[m.group(1)].number += 1 self.user_actions[m.group(1)].indexes += [n] else: self.user_actions.update({m.group(1): Count()}) self.user_actions[m.group(1)].indexes += [n] except AttributeError: return None def get_most_active(self): keys = self.most_active.keys() my_x = {} for i in keys: my_x.update({i: self.most_active[i].number}) return sorted(my_x, key=my_x.get)[::-1][:25] def get_runner_ups_active(self): keys = self.most_active.keys() my_x = {} for i in keys: my_x.update({i: self.most_active[i].number}) try: return sorted(my_x, key=my_x.get)[::-1][25:35] except IndexError: return sorted(my_x, key=my_x.get)[::-1][25:] def get_random_line(self, nick): mine = u"" with cd(sys.argv[1]): base = "<\W%s>" % re.escape(nick) user_input = "zgrep \"%s\" * | shuf -n 1" % base mine = subprocess.Popen( "%s" % user_input, shell=True, stdout=subprocess.PIPE).stdout.read()[20:].decode("utf-8") return mine[3:] def save_page(self): my_keys = self.get_most_active() most_active = [] for i in my_keys: item = self.most_active[i] uid = getUID.GetUID(i).uid most_active += [(i, item.number, cgi.escape(self.get_random_line(i)), uid)] runner_ups = self.get_runner_ups_active() def get_contents(what): keys = what.keys() my_x = {} for i in keys: my_x.update({i: what[i].number}) # print(sorted(my_x, key=my_x.get)[::-1][:2]) get_who = lambda n: sorted(my_x, key=my_x.get)[::-1][:2][n] return [(get_who(0), what[get_who(0)].number), (get_who(1), what[get_who(1)].number)] # graph percentage calculation sum = 0 for i in self.activity_graph: sum += i sum = sum / 240.0 for i in xrange(0, 24): self.activity_graph[i] = int( self.activity_graph[i]) # int(self.activity_graph[i]/sum) self.screaming = get_contents(self.user_exclamation) self.asking = get_contents(self.user_question) self.telling = get_contents(self.user_actions) self.modding = get_contents(self.user_givemodes) being = { "screaming": self.screaming, "asking": self.asking, "telling": self.telling, "modding": self.modding } keys = self.urls.keys() my_x = {} for i in keys: my_x.update({i: self.urls[i].number}) my_keys = sorted(my_x, key=my_x.get)[::-1][:10] most_urls = [] for i in my_keys: item = self.urls[i] most_urls += [(i, item.number, item.last_use)] total_num = [ self.total_question.number, self.total_exclamation.number, self.total_actions.number, self.total_givemodes.number ] output_from_parsed_template = self.template.render( name=self.name, most_active=most_active, runner_ups=runner_ups, being=being, urls_used=most_urls, total=total_num, activity_graph=self.activity_graph ) # All necessary variables goes here with open("/var/www/alltime/%s.html" % self.name, "wb") as fh: fh.write(output_from_parsed_template.encode("utf-8")) with open("/var/www/alltime/json/%s.json" % self.name, "w") as fh: json.dump( { "name": self.name, "total_numbers": total_num, "most_active": most_active, "runner_ups": runner_ups, "being": being, "most_used_urls": most_urls, "activity_graph": self.activity_graph }, fh)
print(" ... distribute {0} booklets ({1} test, {2} control)".format( NB_TOTAL, NB_TEST, NB_CNTRL)) print(" ... visit {0} schools".format(num_schools)) print(" ... offer a ${0} incentive for a {1} survey response rate".format( price_incentive, R_RATE)) print("\n### COST SHARING SUMMARY") print(" ...Vegan Outreach: ${0}".format(vo_total)) print(" ...ACE: ${0} <-- ACE Grant Request".format(ace_total)) summary = { 'num_books': NB_TOTAL, 'num_schools': num_schools, 'num_cards': NB_TOTAL * R_RATE, 'sticker_cost': sticker_cost, 'card_cost': incentive_cost, 'ace_total': ace_total, 'vo_total': vo_total, } # ===================================================================== # ===================================================================== # Summaries if TEX_TABLE: tex_table = env.get_template('budget_textable.tmpl') print tex_table.render(summary=summary, labor_budget=labor_budget, material_budget=material_budget, incentive_budget=incentive_budget)
from jinja2 import Template, Environment, FileSystemLoader #'テンプレートファイル配置ディレクトリのルート FSL = FileSystemLoader('.') env = Environment(loader=FSL) #'sample.tpl'はテンプレートファイル.テンプレート化しているっぽい template = env.get_template('template2') #必要なインタフェース分rangeを記載 scl = range(1, 11) #dictをまとめるリスト li = [] keys = ('id', 'intf_ip', 'loop_ip') iden_k = 'id' intf_k = 'intf_ip' loop_k = 'loop_ip' host = 'RT-A' snmp_ip = '10.0.0.220' for iden in scl: iden_v = str(iden) intf_v = '172.16.' + str(iden) + '.1' loop_v = '192.168.0.' + str(iden) iden_dict = {} #forのサイクルの中で空dictを定義する必要がある iden_dict[iden_k] = iden_v #iden_dictにkey iden_k、vaule iden_vを追加 iden_dict[intf_k] = intf_v #iden_dictにkey intf_k、vaule intf_vを追加 iden_dict[loop_k] = loop_v #iden_dictにkey loop_k、vaule loop_vを追加 li.append(iden_dict) #リストにiden_dictを追加。この状態でjinjaテンプレートの変数値として利用可能になる print(li)
class PocketStats(object): def __init__(self): self.redis = redis.StrictRedis(host='localhost', port=6379, db=0) self.key, self.token = get_credentials() self.pocket = Pocket(self.key, self.token) loader = FileSystemLoader( os.path.join(os.path.dirname(os.path.realpath(__file__)), "reports")) print os.path.join(os.path.dirname(os.path.realpath(__file__)), "reports") self.env = Environment(loader=loader) def get_last_sync(self): return self.redis.get('pocketstats.last_sync') def sync_data(self): "Fetch latest data from Pocket to our local redis cache" since = self.get_last_sync() if since is None: since = 'all' print("sync since: " + since) data, response = self.pocket.get(since=since) # pocket returns an empty list instead of an empty dict if # there are no changes, so we have to manually check before # iterating if data['list']: for id, item in data['list'].iteritems(): print("syncing " + id) self.redis.set('pocketstats.item:' + id, json.dumps(item)) else: print("Up to date") self.redis.set('pocketstats.last_sync', time.time()) self.redis.bgsave() def get_items(self, sync=False): "Return all know items as a list of PocketItems. Sync against pocket if `sync' is true" if sync: self.sync_data() itemskeys = self.redis.keys('pocketstats.item:*') items = [] for doc in self.redis.mget(itemskeys): parsed_json = json.loads(doc) if parsed_json.get('resolved_url') is not None: items.append(PocketItem(parsed_json)) return items def render(self, template, **kwargs): template = self.env.get_template(template) return template.render(**kwargs) def _get_items_since(self, sorted_collection, since): first = sorted_collection.find_ge(since) index = sorted_collection.index(first) return sorted_collection[index:] def get_stats(self): items = self.get_items(False) read = filter(operator.attrgetter("is_read"), items) unread = filter(operator.attrgetter("is_unread"), items) read_sorted = SortedCollection(read, operator.attrgetter('time_read')) unread_sorted = SortedCollection(unread, operator.attrgetter('time_added')) # find items read less than a week ago now = datetime.datetime.now() _7_days_ago = now + relativedelta(days=-7) _30_days_ago = now + relativedelta(days=-30) print self.render( "report.txt", total=len(items), total_read=len(read), total_unread=len(unread), now=now, newly_added_7d=self._get_items_since(unread_sorted, _7_days_ago), newly_read_7d=self._get_items_since(read_sorted, _7_days_ago), newly_added_30d=self._get_items_since(unread_sorted, _30_days_ago), newly_read_30d=self._get_items_since(read_sorted, _30_days_ago))
class Generator(object): """Baseclass generator""" def __init__(self, context, settings, path, theme, output_path, readers_cache_name='', **kwargs): self.context = context self.settings = settings self.path = path self.theme = theme self.output_path = output_path for arg, value in kwargs.items(): setattr(self, arg, value) self.readers = Readers(self.settings, readers_cache_name) # templates cache self._templates = {} self._templates_path = list(self.settings['THEME_TEMPLATES_OVERRIDES']) theme_templates_path = os.path.expanduser( os.path.join(self.theme, 'templates')) self._templates_path.append(theme_templates_path) theme_loader = FileSystemLoader(theme_templates_path) simple_theme_path = os.path.dirname(os.path.abspath(__file__)) simple_loader = FileSystemLoader( os.path.join(simple_theme_path, "themes", "simple", "templates")) self.env = Environment( loader=ChoiceLoader([ FileSystemLoader(self._templates_path), simple_loader, # implicit inheritance PrefixLoader({ '!simple': simple_loader, '!theme': theme_loader }) # explicit ones ]), **self.settings['JINJA_ENVIRONMENT'] ) logger.debug('Template list: %s', self.env.list_templates()) # provide utils.strftime as a jinja filter self.env.filters.update({'strftime': DateFormatter()}) # get custom Jinja filters from user settings custom_filters = self.settings['JINJA_FILTERS'] self.env.filters.update(custom_filters) # get custom Jinja globals from user settings custom_globals = self.settings['JINJA_GLOBALS'] self.env.globals.update(custom_globals) # get custom Jinja tests from user settings custom_tests = self.settings['JINJA_TESTS'] self.env.tests.update(custom_tests) signals.generator_init.send(self) def get_template(self, name): """Return the template by name. Use self.theme to get the templates to use, and return a list of templates ready to use with Jinja2. """ if name not in self._templates: for ext in self.settings['TEMPLATE_EXTENSIONS']: try: self._templates[name] = self.env.get_template(name + ext) break except TemplateNotFound: continue if name not in self._templates: raise PelicanTemplateNotFound( '[templates] unable to load {}[{}] from {}'.format( name, ', '.join(self.settings['TEMPLATE_EXTENSIONS']), self._templates_path)) return self._templates[name] def _include_path(self, path, extensions=None): """Inclusion logic for .get_files(), returns True/False :param path: the path which might be including :param extensions: the list of allowed extensions, or False if all extensions are allowed """ if extensions is None: extensions = tuple(self.readers.extensions) basename = os.path.basename(path) # check IGNORE_FILES ignores = self.settings['IGNORE_FILES'] if any(fnmatch.fnmatch(basename, ignore) for ignore in ignores): return False ext = os.path.splitext(basename)[1][1:] if extensions is False or ext in extensions: return True return False def get_files(self, paths, exclude=[], extensions=None): """Return a list of files to use, based on rules :param paths: the list pf paths to search (relative to self.path) :param exclude: the list of path to exclude :param extensions: the list of allowed extensions (if False, all extensions are allowed) """ # backward compatibility for older generators if isinstance(paths, str): paths = [paths] # group the exclude dir names by parent path, for use with os.walk() exclusions_by_dirpath = {} for e in exclude: parent_path, subdir = os.path.split(os.path.join(self.path, e)) exclusions_by_dirpath.setdefault(parent_path, set()).add(subdir) files = set() ignores = self.settings['IGNORE_FILES'] for path in paths: # careful: os.path.join() will add a slash when path == ''. root = os.path.join(self.path, path) if path else self.path if os.path.isdir(root): for dirpath, dirs, temp_files in os.walk( root, topdown=True, followlinks=True): excl = exclusions_by_dirpath.get(dirpath, ()) # We copy the `dirs` list as we will modify it in the loop: for d in list(dirs): if (d in excl or any(fnmatch.fnmatch(d, ignore) for ignore in ignores)): if d in dirs: dirs.remove(d) reldir = os.path.relpath(dirpath, self.path) for f in temp_files: fp = os.path.join(reldir, f) if self._include_path(fp, extensions): files.add(fp) elif os.path.exists(root) and self._include_path(path, extensions): files.add(path) # can't walk non-directories return files def add_source_path(self, content, static=False): """Record a source file path that a Generator found and processed. Store a reference to its Content object, for url lookups later. """ location = content.get_relative_source_path() key = 'static_content' if static else 'generated_content' self.context[key][location] = content def _add_failed_source_path(self, path, static=False): """Record a source file path that a Generator failed to process. (For example, one that was missing mandatory metadata.) The path argument is expected to be relative to self.path. """ key = 'static_content' if static else 'generated_content' self.context[key][posixize_path(os.path.normpath(path))] = None def _is_potential_source_path(self, path, static=False): """Return True if path was supposed to be used as a source file. (This includes all source files that have been found by generators before this method is called, even if they failed to process.) The path argument is expected to be relative to self.path. """ key = 'static_content' if static else 'generated_content' return (posixize_path(os.path.normpath(path)) in self.context[key]) def add_static_links(self, content): """Add file links in content to context to be processed as Static content. """ self.context['static_links'] |= content.get_static_links() def _update_context(self, items): """Update the context with the given items from the currrent processor. """ for item in items: value = getattr(self, item) if hasattr(value, 'items'): value = list(value.items()) # py3k safeguard for iterators self.context[item] = value def __str__(self): # return the name of the class for logging purposes return self.__class__.__name__
def __init__( self, start_type, enclave_file, enclave_type, remote_class, workspace, common_dir, label="", binary_dir=".", local_node_id=None, host=None, ledger_dir=None, read_only_ledger_dirs=None, snapshots_dir=None, common_read_only_ledger_dir=None, constitution=None, curve_id=None, version=None, host_log_level="Info", major_version=None, include_addresses=True, config_file=None, join_timer_s=None, sig_ms_interval=None, jwt_key_refresh_interval_s=None, election_timeout_ms=None, node_data_json_file=None, **kwargs, ): """ Run a ccf binary on a remote host. """ self.name = f"{label}_{local_node_id}" self.start_type = start_type self.local_node_id = local_node_id self.pem = f"{local_node_id}.pem" self.node_address_file = f"{local_node_id}.node_address" self.rpc_addresses_file = f"{local_node_id}.rpc_addresses" # 1.x releases have a separate cchost.virtual binary for virtual enclaves if enclave_type == "virtual" and ( (major_version is not None and major_version <= 1) # This is still present in 2.0.0-rc0 or (version == "ccf-2.0.0-rc0")): self.BIN = "cchost.virtual" self.BIN = infra.path.build_bin_path(self.BIN, binary_dir=binary_dir) self.common_dir = common_dir self.pub_host = host.get_primary_interface().public_host self.enclave_file = os.path.join(".", os.path.basename(enclave_file)) data_files = [] exe_files = [] # Main ledger directory self.ledger_dir = os.path.normpath(ledger_dir) if ledger_dir else None self.ledger_dir_name = (os.path.basename(self.ledger_dir) if self.ledger_dir else f"{local_node_id}.ledger") # Read-only ledger directories self.read_only_ledger_dirs = read_only_ledger_dirs or [] self.read_only_ledger_dirs_names = [] for d in self.read_only_ledger_dirs: self.read_only_ledger_dirs_names.append(os.path.basename(d)) if common_read_only_ledger_dir is not None: self.read_only_ledger_dirs_names.append( common_read_only_ledger_dir) # Snapshots self.snapshots_dir = os.path.normpath( snapshots_dir) if snapshots_dir else None self.snapshot_dir_name = (os.path.basename(self.snapshots_dir) if self.snapshots_dir else f"{local_node_id}.snapshots") # Constitution constitution = [ os.path.join(self.common_dir, os.path.basename(f)) for f in constitution ] # Configuration file if config_file: LOG.info( f"Node {self.local_node_id}: Using configuration file {config_file}" ) with open(config_file, encoding="utf-8") as f: config = json.load(f) self.pem = config.get("node_certificate_file", "nodecert.pem") self.node_address_file = config.get("node_address_file") self.rpc_addresses_file = config.get("rpc_addresses_file") elif major_version is None or major_version > 1: loader = FileSystemLoader(binary_dir) env = Environment(loader=loader, autoescape=select_autoescape()) t = env.get_template(self.TEMPLATE_CONFIGURATION_FILE) output = t.render( start_type=start_type.name.title(), enclave_file=self.enclave_file, enclave_type=enclave_type.title(), rpc_interfaces=infra.interfaces.HostSpec.to_json(host), node_certificate_file=self.pem, node_address_file=self.node_address_file, rpc_addresses_file=self.rpc_addresses_file, ledger_dir=self.ledger_dir_name, read_only_ledger_dirs=self.read_only_ledger_dirs_names, snapshots_dir=self.snapshot_dir_name, constitution=constitution, curve_id=curve_id.name.title(), host_log_level=host_log_level.title(), join_timer=f"{join_timer_s}s" if join_timer_s else None, signature_interval_duration=f"{sig_ms_interval}ms", jwt_key_refresh_interval=f"{jwt_key_refresh_interval_s}s", election_timeout=f"{election_timeout_ms}ms", node_data_json_file=node_data_json_file, **kwargs, ) config_file_name = f"{self.local_node_id}.config.json" config_file = os.path.join(common_dir, config_file_name) exe_files += [config_file] with open(config_file, "w", encoding="utf-8") as f: f.write(output) exe_files += [self.BIN, enclave_file] + self.DEPS data_files += [self.ledger_dir] if self.ledger_dir else [] data_files += [self.snapshots_dir] if self.snapshots_dir else [] if self.read_only_ledger_dirs_names: data_files.extend([ os.path.join(self.common_dir, f) for f in self.read_only_ledger_dirs ]) # exe_files may be relative or absolute. The remote implementation should # copy (or symlink) to the target workspace, and then node will be able # to reference the destination file locally in the target workspace. bin_path = os.path.join(".", os.path.basename(self.BIN)) if major_version is None or major_version > 1: cmd = [bin_path, "--config", config_file] if start_type == StartType.join: data_files += [ os.path.join(self.common_dir, "service_cert.pem") ] else: consensus = kwargs.get("consensus") node_address = kwargs.get("node_address") worker_threads = kwargs.get("worker_threads") ledger_chunk_bytes = kwargs.get("ledger_chunk_bytes") subject_alt_names = kwargs.get("subject_alt_names") snapshot_tx_interval = kwargs.get("snapshot_tx_interval") max_open_sessions = kwargs.get("max_open_sessions") max_open_sessions_hard = kwargs.get("max_open_sessions_hard") initial_node_cert_validity_days = kwargs.get( "initial_node_cert_validity_days") node_client_host = kwargs.get("node_client_host") members_info = kwargs.get("members_info") target_rpc_address = kwargs.get("target_rpc_address") maximum_node_certificate_validity_days = kwargs.get( "maximum_node_certificate_validity_days") reconfiguration_type = kwargs.get("reconfiguration_type") log_format_json = kwargs.get("log_format_json") sig_tx_interval = kwargs.get("sig_tx_interval") primary_rpc_interface = host.get_primary_interface() cmd = [ bin_path, f"--enclave-file={self.enclave_file}", f"--enclave-type={enclave_type}", f"--node-address-file={self.node_address_file}", f"--rpc-address={infra.interfaces.make_address(primary_rpc_interface.host, primary_rpc_interface.port)}", f"--rpc-address-file={self.rpc_addresses_file}", f"--ledger-dir={self.ledger_dir_name}", f"--snapshot-dir={self.snapshot_dir_name}", f"--node-cert-file={self.pem}", f"--host-log-level={host_log_level}", f"--raft-election-timeout-ms={election_timeout_ms}", f"--consensus={consensus}", f"--worker-threads={worker_threads}", ] if include_addresses: cmd += [ f"--node-address={node_address}", f"--public-rpc-address={infra.interfaces.make_address(primary_rpc_interface.public_host, primary_rpc_interface.public_port)}", ] if log_format_json: cmd += ["--log-format-json"] if sig_tx_interval: cmd += [f"--sig-tx-interval={sig_tx_interval}"] if sig_ms_interval: cmd += [f"--sig-ms-interval={sig_ms_interval}"] if ledger_chunk_bytes: cmd += [f"--ledger-chunk-bytes={ledger_chunk_bytes}"] if subject_alt_names: cmd += [f"--san={s}" for s in subject_alt_names] if snapshot_tx_interval: cmd += [f"--snapshot-tx-interval={snapshot_tx_interval}"] if max_open_sessions: cmd += [f"--max-open-sessions={max_open_sessions}"] if jwt_key_refresh_interval_s: cmd += [ f"--jwt-key-refresh-interval-s={jwt_key_refresh_interval_s}" ] for f in self.read_only_ledger_dirs_names: cmd += [f"--read-only-ledger-dir={f}"] for f in self.read_only_ledger_dirs: data_files += [os.path.join(self.common_dir, f)] if curve_id is not None: cmd += [f"--curve-id={curve_id.name}"] # Added in 1.x if not major_version or major_version > 1: if initial_node_cert_validity_days: cmd += [ f"--initial-node-cert-validity-days={initial_node_cert_validity_days}" ] if node_client_host: cmd += [f"--node-client-interface={node_client_host}"] if reconfiguration_type and reconfiguration_type != "OneTransaction": cmd += [f"--reconfiguration-type={reconfiguration_type}"] if max_open_sessions_hard: cmd += [ f"--max-open-sessions-hard={max_open_sessions_hard}" ] if start_type == StartType.start: cmd += ["start", "--network-cert-file=service_cert.pem"] for fragment in constitution: cmd.append(f"--constitution={os.path.basename(fragment)}") data_files += [ os.path.join(self.common_dir, os.path.basename(fragment)) ] if members_info is None: raise ValueError( "Starting node should be given at least one member info" ) for mi in members_info: member_info_cmd = f'--member-info={mi["certificate_file"]}' data_files.append(mi["certificate_file"]) if mi["encryption_public_key_file"] is not None: member_info_cmd += f',{mi["encryption_public_key_file"]}' data_files.append(mi["encryption_public_key_file"]) elif mi["data_json_file"] is not None: member_info_cmd += "," if mi["data_json_file"] is not None: member_info_cmd += f',{mi["data_json_file"]}' data_files.append(mi["data_json_file"]) cmd += [member_info_cmd] # Added in 1.x if not major_version or major_version > 1: if maximum_node_certificate_validity_days: cmd += [ f"--max-allowed-node-cert-validity-days={maximum_node_certificate_validity_days}" ] elif start_type == StartType.join: cmd += [ "join", "--network-cert-file=service_cert.pem", f"--target-rpc-address={target_rpc_address}", f"--join-timer={join_timer_s * 1000}", ] data_files += [ os.path.join(self.common_dir, "service_cert.pem") ] elif start_type == StartType.recover: cmd += ["recover", "--network-cert-file=service_cert.pem"] else: raise ValueError( f"Unexpected CCFRemote start type {start_type}. Should be start, join or recover" ) env = {} if enclave_type == "virtual": env["UBSAN_OPTIONS"] = "print_stacktrace=1" ubsan_opts = kwargs.get("ubsan_options") if ubsan_opts: env["UBSAN_OPTIONS"] += ":" + ubsan_opts oe_log_level = CCF_TO_OE_LOG_LEVEL.get(kwargs.get("host_log_level")) if oe_log_level: env["OE_LOG_LEVEL"] = oe_log_level self.remote = remote_class( self.name, self.pub_host, exe_files, data_files, cmd, workspace, common_dir, env, )
def render(template_name, directory, **kwargs): loader = FileSystemLoader(directory) env = Environment(loader=loader) template = env.get_template(template_name) return template.render(**kwargs)
def init(self, force_deploy=False): """Reserve and deploys the vagrant boxes. Args: force_deploy (bool): True iff new machines should be started """ machines = self.provider_conf.machines networks = self.provider_conf.networks _networks = [] for network in networks: ipnet = IPNetwork(network.cidr) _networks.append({ "netpool": list(ipnet)[10:-10], "cidr": network.cidr, "roles": network.roles, "gateway": ipnet.ip }) vagrant_machines = [] vagrant_roles = {} j = 0 for machine in machines: for _ in range(machine.number): vagrant_machine = { "name": "enos-%s" % j, "cpu": machine.flavour["cpu"], "mem": machine.flavour["mem"], "ips": [n["netpool"].pop() for n in _networks], } vagrant_machines.append(vagrant_machine) # Assign the machines to the right roles for role in machine.roles: vagrant_roles.setdefault(role, []).append(vagrant_machine) j = j + 1 logger.debug(vagrant_roles) loader = FileSystemLoader(searchpath=TEMPLATE_DIR) env = Environment(loader=loader, autoescape=True) template = env.get_template('Vagrantfile.j2') vagrantfile = template.render(machines=vagrant_machines, provider_conf=self.provider_conf) vagrantfile_path = os.path.join(os.getcwd(), "Vagrantfile") with open(vagrantfile_path, 'w') as f: f.write(vagrantfile) # Build env for Vagrant with a copy of env variables (needed by # subprocess opened by vagrant v_env = dict(os.environ) v_env['VAGRANT_DEFAULT_PROVIDER'] = self.provider_conf.backend v = vagrant.Vagrant(root=os.getcwd(), quiet_stdout=False, quiet_stderr=False, env=v_env) if force_deploy: v.destroy() v.up() v.provision() roles = {} for role, machines in vagrant_roles.items(): for machine in machines: keyfile = v.keyfile(vm_name=machine['name']) port = v.port(vm_name=machine['name']) address = v.hostname(vm_name=machine['name']) roles.setdefault(role, []).append( Host(address, alias=machine['name'], user=self.provider_conf.user, port=port, keyfile=keyfile)) networks = [{ 'cidr': str(n["cidr"]), 'start': str(n["netpool"][0]), 'end': str(n["netpool"][-1]), 'dns': '8.8.8.8', 'gateway': n["gateway"], 'roles': n["roles"] } for n in _networks] logger.debug(roles) logger.debug(networks) return (roles, networks)
get_ip_address(config['opnfv']['spaces_dict']['admin']['bridge']) } # Prepare interface-enable, more easy to do it here ifnamelist = set() for node in config['lab']['racks'][0]['nodes']: for nic in node['nics']: if 'admin' not in nic['spaces']: ifnamelist.add(nic['ifname']) config['lab']['racks'][0]['ifnamelist'] = ','.join(ifnamelist) # # Transform template to deployconfig.yaml according to config # # Create the jinja2 environment. env = Environment(loader=FileSystemLoader(TPL_DIR), trim_blocks=True) template = env.get_template('deployconfig.yaml') # Render the template output = template.render(**config) # Check output syntax try: yaml.load(output) except yaml.YAMLError as exc: print(exc) # print output print(output)
def make_multi_report(run_names, sub_html_files, crispresso_multi_report_file, crispresso_folder, _ROOT, report_name, window_nuc_pct_quilts=[], nuc_pct_quilts=[], window_nuc_conv_plots=[], nuc_conv_plots=[], summary_plot_names=[], summary_plot_titles={}, summary_plot_labels={}, summary_plot_datas={}): """ Makes an HTML report for a run containing multiple crispresso runs Parameters: run_names (arr of strings): names of runs sub_html_files (dict): dict of run_name->file_loc crispresso_multi_report_file (string): path of file to write to report_name (string): description of report type to be shown at top of report crispresso_folder (string): absolute path to the crispresso output _ROOT (string): absolute path to the crispresso executable summary_plot_names (list): list of plot names - keys for following dicts summary_plot_titles (dict): dict of plot_name->plot_title summary_plot_labels (dict): dict of plot_name->plot_label summary_plot_datas (dict): dict of plot_name->(datafile_description, data_filename) """ def dirname(path): return os.path.basename(os.path.dirname(path)) j2_env = Environment( loader=FileSystemLoader(os.path.join(_ROOT, 'templates'))) j2_env.filters['dirname'] = dirname template = j2_env.get_template('multiReport.html') crispresso_data_path = os.path.relpath( crispresso_folder, os.path.dirname(crispresso_multi_report_file)) if crispresso_data_path == ".": crispresso_data_path = "" else: crispresso_data_path += "/" outfile = open(crispresso_multi_report_file, 'w') outfile.write( template.render(window_nuc_pct_quilts=window_nuc_pct_quilts, nuc_pct_quilts=nuc_pct_quilts, window_nuc_conv_plots=window_nuc_conv_plots, nuc_conv_plots=nuc_conv_plots, crispresso_data_path=crispresso_data_path, summary_plot_names=summary_plot_names, summary_plot_titles=summary_plot_titles, summary_plot_labels=summary_plot_labels, summary_plot_datas=summary_plot_datas, run_names=run_names, sub_html_files=sub_html_files, report_name=report_name)) outfile.close()
def create_prob_group(prob_results, group_name, support_pages_dir, count, options): """ Creates a support page containing figures and other details about the fit for a problem. A link to the support page is stored in the results object. :param prob_results: problem results objects containing results for each minimizer and a certain fitting function :type prob_results: list[fitbenchmarking.utils.fitbm_result.FittingResult] :param group_name: name of the problem group :type group_name: str :param support_pages_dir: directory to store the support pages in :type support_pages_dir: str :param count: number of times a problem with the same name was passed through this function :type count: int :param options: The options used in the fitting problem and plotting :type options: fitbenchmarking.utils.options.Options """ for result in prob_results: prob_name = result.problem.sanitised_name file_name = '{}_{}_{}_{}.html'.format(group_name, prob_name, count, result.minimizer).lower() file_path = os.path.join(support_pages_dir, file_name) # Bool for print message/insert image fit_success = init_success = options.make_plots if options.make_plots: fig_fit, fig_start = get_figure_paths(result, count) if fig_fit == '': fig_fit = result.figure_error fit_success = False if fig_start == '': fig_start = result.figure_error init_success = False else: fig_fit = fig_start = 'Re-run with make_plots set to yes in the ' \ 'ini file to generate plots.' root = os.path.dirname(inspect.getfile(fitbenchmarking)) template_dir = os.path.join(root, "templates") env = Environment(loader=FileSystemLoader(template_dir)) style_css = os.path.join(template_dir, 'main_style.css') table_css = os.path.join(template_dir, 'table_style.css') custom_style = os.path.join(template_dir, 'custom_style.css') template = env.get_template("support_page_template.html") with open(file_path, 'w') as fh: fh.write( template.render(css_style_sheet=style_css, table_style=table_css, custom_style=custom_style, title=result.problem.name, equation=result.problem.equation, initial_guess=result.ini_function_params, minimiser=result.minimizer, is_best_fit=result.is_best_fit, initial_plot_available=init_success, initial_plot=fig_start, min_params=result.fin_function_params, fitted_plot_available=fit_success, fitted_plot=fig_fit)) result.support_page_link = file_path
import time from jinja2 import Template, Environment, FileSystemLoader env = Environment(loader=FileSystemLoader('.')) print env.get_template('index.html').render() # src = open('index.html').read() # print(env._generate(env._parse(src, 'poop', 'hello.html'), # 'poop', # 'hello.html')) # print([x for x in env._tokenize(src, 'poop', 'hello.html')]) # env = Environment(loader=FileSystemLoader('.')) # times = [] # arr = [5]*1000 # for i in range(100): # env = Environment(loader=FileSystemLoader('.')) # t1 = time.time() # tmpl = env.get_template('index.html') # tmpl.render({'username': '******', # 'arr': arr}) # t2 = time.time() # times.append(t2-t1) # print( reduce(lambda x, y: x+y, times) / len(times))
def render_template(filename, vars_dict): env = Environment(loader=FileSystemLoader('./templates')) template = env.get_template(filename) yaml_manifest = template.render(vars_dict) json_manifest = yaml.load(yaml_manifest) return json_manifest
def execute_init(self): """ creates the skeleton framework of a role that complies with the galaxy metadata format. """ init_path = context.CLIARGS['init_path'] force = context.CLIARGS['force'] role_skeleton = context.CLIARGS['role_skeleton'] role_name = context.CLIARGS['role_name'] role_path = os.path.join(init_path, role_name) if os.path.exists(role_path): if os.path.isfile(role_path): raise AnsibleError( "- the path %s already exists, but is a file - aborting" % role_path) elif not force: raise AnsibleError( "- the directory %s already exists." "you can use --force to re-initialize this directory,\n" "however it will reset any main.yml files that may have\n" "been modified there already." % role_path) inject_data = dict( role_name=role_name, author='your name', description='your description', company='your company (optional)', license='license (GPL-2.0-or-later, MIT, etc)', issue_tracker_url='http://example.com/issue/tracker', min_ansible_version='2.4', role_type=context.CLIARGS['role_type']) # create role directory if not os.path.exists(role_path): os.makedirs(role_path) if role_skeleton is not None: skeleton_ignore_expressions = C.GALAXY_ROLE_SKELETON_IGNORE else: role_skeleton = self.galaxy.default_role_skeleton_path skeleton_ignore_expressions = ['^.*/.git_keep$'] role_skeleton = os.path.expanduser(role_skeleton) skeleton_ignore_re = [ re.compile(x) for x in skeleton_ignore_expressions ] template_env = Environment(loader=FileSystemLoader(role_skeleton)) for root, dirs, files in os.walk(role_skeleton, topdown=True): rel_root = os.path.relpath(root, role_skeleton) in_templates_dir = rel_root.split(os.sep, 1)[0] == 'templates' dirs[:] = [ d for d in dirs if not any(r.match(d) for r in skeleton_ignore_re) ] for f in files: filename, ext = os.path.splitext(f) if any( r.match(os.path.join(rel_root, f)) for r in skeleton_ignore_re): continue elif ext == ".j2" and not in_templates_dir: src_template = os.path.join(rel_root, f) dest_file = os.path.join(role_path, rel_root, filename) template_env.get_template(src_template).stream( inject_data).dump(dest_file) else: f_rel_path = os.path.relpath(os.path.join(root, f), role_skeleton) shutil.copyfile(os.path.join(root, f), os.path.join(role_path, f_rel_path)) for d in dirs: dir_path = os.path.join(role_path, rel_root, d) if not os.path.exists(dir_path): os.makedirs(dir_path) display.display("- %s was created successfully" % role_name)
def make_report(run_data, crispresso_report_file, crispresso_folder, _ROOT): #dicts for each amplicon fig_names[amp_name] = [list of fig names] # fig_locs[amp_name][fig_name] = figure location fig_names = { } #all except for the figure 1 (which is common to all amplicons) fig_locs = {} fig_titles = {} fig_captions = {} fig_datas = {} sgRNA_based_fig_names = {} # print('crispresso_report file: ' + crispresso_report_file + ' crispresso_folder : ' + crispresso_folder + ' root: ' + _ROOT) def add_fig_if_exists(fig_name, fig_root, fig_title, fig_caption, fig_data, amplicon_fig_names, amplicon_fig_locs, amplicon_fig_titles, amplicon_fig_captions, amplicon_fig_datas): """ Helper function to add figure if the file exists if fig at filename exists, amplicon_figs[figname] is set to that file """ #fullpath=os.path.join(crispresso_folder,fig_root+'.png') fullpath = os.path.join(crispresso_folder, fig_root + '.png') # print('adding file ' + fig_root + ' at ' + fullpath) if os.path.exists(fullpath): amplicon_fig_names.append(fig_name) #amplicon_fig_locs[fig_name]=os.path.basename(fig_root+'.png') amplicon_fig_locs[fig_name] = os.path.basename(fig_root) amplicon_fig_titles[fig_name] = fig_title amplicon_fig_captions[fig_name] = fig_caption amplicon_fig_datas[fig_name] = [] for (data_caption, data_file) in fig_data: if os.path.exists(os.path.join(crispresso_folder, data_file)): amplicon_fig_datas[fig_name].append( (data_caption, data_file)) global_fig_names = [] for fig in ['1a', '1b', '1c', '1d', '5a', '6a', '8a', '11c']: fig_name = 'plot_' + fig if fig_name + '_root' in run_data['results']['general_plots']: add_fig_if_exists( fig_name, run_data['results']['general_plots'][fig_name + '_root'], 'Figure ' + fig, run_data['results']['general_plots'][fig_name + '_caption'], run_data['results']['general_plots'][fig_name + '_data'], global_fig_names, fig_locs, fig_titles, fig_captions, fig_datas) amplicons = [] for amplicon_name in run_data['results']['ref_names']: amplicons.append(amplicon_name) amplicon_fig_names = [] amplicon_fig_locs = {} amplicon_fig_titles = {} amplicon_fig_captions = {} amplicon_fig_datas = {} for fig in [ '2a', '3a', '3b', '4a', '4b', '4c', '4d', '4e', '4f', '4g', '5', '6', '7', '8', '10a', '10b', '10c', '11a' ]: fig_name = 'plot_' + fig if fig_name + '_root' in run_data['results']['refs'][ amplicon_name]: add_fig_if_exists( fig_name, run_data['results']['refs'][amplicon_name][fig_name + '_root'], 'Figure ' + fig_name, run_data['results']['refs'][amplicon_name][fig_name + '_caption'], run_data['results']['refs'][amplicon_name][fig_name + '_data'], amplicon_fig_names, amplicon_fig_locs, amplicon_fig_titles, amplicon_fig_captions, amplicon_fig_datas) this_sgRNA_based_fig_names = {} for fig in ['2b', '9', '10d', '10e', '10f', '10g', '11b']: #fig 2b's this_fig_names = [] if 'plot_' + fig + '_roots' in run_data['results']['refs'][ amplicon_name]: for idx, plot_root in enumerate( run_data['results']['refs'][amplicon_name]['plot_' + fig + '_roots']): fig_name = "plot_" + fig + "_" + str(idx) add_fig_if_exists( fig_name, plot_root, 'Figure ' + fig_name + ' sgRNA ' + str(idx + 1), run_data['results']['refs'][amplicon_name][ 'plot_' + fig + '_captions'][idx], run_data['results']['refs'][amplicon_name][ 'plot_' + fig + '_datas'][idx], this_fig_names, amplicon_fig_locs, amplicon_fig_titles, amplicon_fig_captions, amplicon_fig_datas) this_sgRNA_based_fig_names[fig] = this_fig_names fig_names[amplicon_name] = amplicon_fig_names sgRNA_based_fig_names[amplicon_name] = this_sgRNA_based_fig_names fig_locs[amplicon_name] = amplicon_fig_locs fig_titles[amplicon_name] = amplicon_fig_titles fig_captions[amplicon_name] = amplicon_fig_captions fig_datas[amplicon_name] = amplicon_fig_datas report_display_name = "" if run_data['running_info']['args'].name != "": report_display_name = run_data['running_info']['args'].name #find path between the report and the data (if the report is in another directory vs in the same directory as the data) crispresso_data_path = os.path.relpath( crispresso_folder, os.path.dirname(crispresso_report_file)) if crispresso_data_path == ".": crispresso_data_path = "" else: crispresso_data_path += "/" report_data = { 'amplicons': amplicons, 'fig_names': fig_names, 'sgRNA_based_fig_names': sgRNA_based_fig_names, 'fig_locs': fig_locs, 'fig_titles': fig_titles, 'fig_captions': fig_captions, 'fig_datas': fig_datas, 'run_data': run_data, 'report_display_name': report_display_name, 'crispresso_data_path': crispresso_data_path, } j2_env = Environment( loader=FileSystemLoader(os.path.join(_ROOT, 'templates'))) template = j2_env.get_template('report.html') # dest_dir = os.path.dirname(crispresso_report_file) # shutil.copy2(os.path.join(_ROOT,'templates','CRISPResso_justcup.png'),dest_dir) # shutil.copy2(os.path.join(_ROOT,'templates','favicon.ico'),dest_dir) outfile = open(crispresso_report_file, 'w') outfile.write(template.render(report_data=report_data)) outfile.close()
import time import math import argparse # from sys import argv from os import getenv as _ from dotenv import load_dotenv from concurrent.futures import ThreadPoolExecutor, as_completed from utils import api, execstr, tsfiles, safename, sameparams import importlib import base64 import hashlib from jinja2 import FileSystemLoader, Environment load_dotenv() env = Environment(loader=FileSystemLoader('./web/templates')) template = env.get_template('play.html') upload_drive = None def md5(s): md5 = hashlib.md5(s.encode('utf-8')).hexdigest() return md5[8:24] def writefile(code, title=None): key = md5(code) meta = { 'title': title or 'untitled', 'code': base64.b64encode(code.encode('utf-8')).decode('ascii') } with open(f"play/{key}.html", "w", encoding='utf-8') as f:
class PluginView: """ 插件视图基类 """ def __init__(self): self.env = Environment() self.rules = [] self.event_rules = [] self.static_folder = None self.template_folder = None def add_url_rule(self, rule, endpoint=None, view_func=None, **options): self.rules.append(Rule(rule, endpoint, view_func, **options)) def on_event(self, event, handler, namespace): self.event_rules.append(EventRule(event, handler, namespace)) def on_create(self): """ 初始生命周期 通过在该函数内调用'set_template_root'指定前端模板目录 通过在该函数内调用'set_static_root'指定前端静态文件目录 通过在该函数内调用'add_url_rule'添加处理请求的路由,插件的主页面要使用'/'路径 """ pass def after_on_create(self): if not self.static_folder: self.set_static_root() if not self.template_folder: self.set_template_root() def _get_current_package_name(self): """ 通过class判断包名 :return: """ module_name = self.__class__.__module__ if '.' in module_name: return module_name.split('.')[0] return module_name def get_package_file_path(self, file_path, package_name=None): if not package_name: package_name = self._get_current_package_name() target_module = sys.modules.get(package_name) package_path = os.path.dirname(inspect.getsourcefile(target_module)) return os.path.abspath(os.path.join(package_path, file_path)) def set_template_root(self, package_name=None, templates_path='templates'): """ 设置html模板根目录. 默认在 [插件包]/templates 下 :param package_name: 包名,不指定时默认读取当前插件包 :param templates_path: 模板路径, 默认为templates目录 """ if not package_name: package_name = self._get_current_package_name() self.env.loader = PackageLoader(package_name, package_path=templates_path) target_module = sys.modules.get(package_name) package_path = os.path.dirname(target_module.__file__) self.template_folder = os.path.abspath( os.path.join(package_path, templates_path)) def set_static_root(self, package_name=None, static_path='static'): """ 设置静态文件(css、js等)目录. 默认在 [插件包]/static 下 :param package_name: 包名,不指定时默认读取当前插件包 :param static_path: 静态文件目录, 默认为static目录 """ if not package_name: package_name = self._get_current_package_name() target_module = sys.modules.get(package_name) package_path = os.path.dirname(target_module.__file__) self.static_folder = os.path.abspath( os.path.join(package_path, static_path)) self.add_url_rule('/static/<path:filename>', view_func=self.send_static_file) def render_template(self, template, *args, **kwargs): """ 渲染模板,支持jinja2语法 :param template: 模板文件名 :param args: 参数数组 :param kwargs: 参数字典 :return: 返回渲染完成的html文本 """ return self.env.get_template(template).render(*args, **kwargs) def send_static_file(self, filename): """ 响应静态文件 收到请求后,按照filename在静态文件目录下查找,并返回静态文件 :param filename: :return: """ # todo cache_timeout return send_from_directory(self.static_folder, filename) def send_template_file(self, filename): template_file_path = os.path.abspath( os.path.join(self.template_folder, filename)) return codecs.open(template_file_path, 'r', 'utf-8').read() def default_conf(self): """ 设置默认的 conf.json :return: 返回 conf.json 内容 """ # todo plugin conf 应该以对象方式读取存储 并且对外只读,特定方法支持写 return {}
if local_subscriber_ids: logging.info('ID ALIAS MAPPER: local_subscriber_ids added to subscriber_ids dictionary') subscriber_ids.update(local_subscriber_ids) local_peer_ids = mk_full_id_dict(PATH, LOCAL_PEER_FILE, 'peer') if local_peer_ids: logging.info('ID ALIAS MAPPER: local_peer_ids added peer_ids dictionary') peer_ids.update(local_peer_ids) # Jinja2 Stuff env = Environment( loader=PackageLoader('monitor', 'templates'), autoescape=select_autoescape(['html', 'xml']) ) dtemplate = env.get_template('hblink_table.html') btemplate = env.get_template('bridge_table.html') # Create Static Website index file index_html = get_template(PATH + 'index_template.html') index_html = index_html.replace('<<<system_name>>>', REPORT_NAME) if CLIENT_TIMEOUT > 0: index_html = index_html.replace('<<<timeout_warning>>>', 'Continuous connections not allowed. Connections time out in {} seconds'.format(CLIENT_TIMEOUT)) else: index_html = index_html.replace('<<<timeout_warning>>>', '') # Start update loop update_stats = task.LoopingCall(build_stats) update_stats.start(FREQUENCY) # Start a timout loop
import os from datetime import datetime from experts_dw import db from experts_dw.models import PureSyncUserData from jinja2 import Environment, PackageLoader, Template, select_autoescape env = Environment( loader=PackageLoader('experts_etl', 'templates'), autoescape=select_autoescape(['html', 'xml']) ) # defaults: template = env.get_template('user.xml.j2') db_name = 'hotel' # This dirname could use improvement before deploying to a remote machine: dirname = os.path.dirname(os.path.realpath(__file__)) if 'EXPERTS_ETL_SYNC_DIR' in os.environ: dirname = os.environ['EXPERTS_ETL_SYNC_DIR'] output_filename = dirname + '/user_' + datetime.now().strftime('%Y-%m-%dT%H:%M:%S') + '.xml' def run( db_name=db_name, template=template, output_filename=output_filename, experts_etl_logger=None ): if experts_etl_logger is None: experts_etl_logger = loggers.experts_etl_logger() experts_etl_logger.info('starting: edw -> pure', extra={'pure_sync_job': 'user'})
else: browser = initBrowser() query = args['<keyword>'] start_paa = newSearch(browser, query) initialSet = {} cnt = 0 for q in start_paa: initialSet.update({cnt: q}) cnt += 1 paa_list = [] crawlQuestions(start_paa, paa_list, initialSet, depth) treeData = 'var treeData = ' + json.dumps(paa_list) + ';' if paa_list[0]['children']: root = os.path.dirname(os.path.abspath(__file__)) templates_dir = os.path.join(root, 'templates') env = Environment(loader=FileSystemLoader(templates_dir)) template = env.get_template('index.html') filename = os.path.join(root, 'html', prettyOutputName()) with open(filename, 'w') as fh: fh.write(template.render(treeData=treeData, )) if args['--csv']: if paa_list[0]['children']: _path = 'csv/' + prettyOutputName('csv') flatten_csv(paa_list, depth, _path) browser.close()
class PipWeb(object): def __init__(self, base): self.base = base template_dir = "templates" if os.path.exists( "templates") else os.path.join(APPROOT, "templates") self.tpl = Environment(loader=FileSystemLoader(template_dir), autoescape=select_autoescape(['html', 'xml'])) self.tpl.filters.update(normalize=normalize) @cherrypy.expose def index(self, reponame=None, distname=None, filename=None): if filename: return self.handle_download(reponame, distname, filename) else: return self.handle_navigation(reponame, distname, filename) def handle_navigation(self, reponame=None, distname=None, filename=None): if reponame: repo = get_repo(db(), reponame, create_ok=False) if distname: return self.tpl.get_template("pypi/dist.html") \ .render(repo=repo, pkgs=db().query(PipPackage).filter(PipPackage.repo == repo, PipPackage.dist_norm == distname). order_by(PipPackage.version).all(), distname=normalize(distname)) return self.tpl.get_template("pypi/repo.html") \ .render(repo=repo, dists=self._get_dists(repo)) return self.tpl.get_template("pypi/root.html") \ .render(repos=db().query(PipRepo).order_by(PipRepo.name).all()) def _get_dists(self, repo): lastdist = None for dist in db().query(PipPackage).filter( PipPackage.repo == repo).order_by(PipPackage.dist).all(): if lastdist and dist.dist == lastdist: continue yield dist lastdist = dist.dist def handle_download(self, reponame, distname, filename): repo = get_repo(db(), reponame, create_ok=False) pkg = db().query(PipPackage).filter( PipPackage.repo == repo, PipPackage.fname == filename).first() if not pkg: raise cherrypy.HTTPError(404) dpath = os.path.join(self.base.basepath, "repos", repo.name, "wheels", pkg.fname[0].lower(), pkg.fname) if str(cherrypy.request.method) == "DELETE": db().delete(pkg) files = self.base.s3.list_objects(Bucket=self.base.bucket, Prefix=dpath).get("Contents") if files: self.base.s3.delete_object(Bucket=self.base.bucket, Key=dpath) db().commit() return "OK" elif str(cherrypy.request.method) == "GET": response = self.base.s3.get_object(Bucket=self.base.bucket, Key=dpath) cherrypy.response.headers["Content-Type"] = "binary/octet-stream" cherrypy.response.headers["Content-Length"] = response[ "ContentLength"] def stream(): while True: data = response["Body"].read(65535) if not data: return yield data return stream() else: raise cherrypy.HTTPError(405) index._cp_config = {'response.stream': True}
# Check that one of the versions matches site_url selected = None for version in info['versions']: if version['url'] == mkdocs['site_url']: selected = version['name'] if selected is None: raise Exception('"{0}" not found in versions'.format(mkdocs['site_url'])) # Check if the selected version is the first (= latest) warning = None if selected != info['versions'][0]['name']: warning = info['warning'] # Initialize template renderer loader = FileSystemLoader(searchpath='data') env = Environment(loader=loader) # Render template template = env.get_template('version.html') output = template.render(selected=selected, versions=info['versions'], warning=warning) # Write output to file with open(TARGET, 'w') as partial: partial.write(output) # Print message and exit print('Updated "{0}"'.format(TARGET))
sys.stderr = sys.stdout cgitb.enable() # store the timestamp in the status.txt file subprocess.call('echo "' + str(datetime.datetime.now().timestamp()) + '" > /var/www/html/wmsinh.org/public_html/cover-page/status.txt',shell=True) # print a header so the browser doesn't freak out print("Content-Type:text/html;charset=utf-8\n\n") print() # do some jinja2 magic to render our web page j2_env = Environment(loader=FileSystemLoader(TEMP_DIR), trim_blocks=True) headscript = """<script> var startTime = new Date().getTime(); </script>""" print(j2_env.get_template("waiting.html").render(title='Internet of Things',bgimg='../img/Network.png',headscript=headscript)) # get the thingworx password from file os.putenv('HOME','/home/webpics') os.environ['HOME'] = '/home/webpics' with open('/home/webpics/pass/iot.txt', 'r') as myfile: username,password=myfile.read().replace('\n', '').split(',') # put together an HTTP request and send it to thingworx headers = {'Accept': 'application/json AppKey:ba063966-5d0d-46ff-be4f-2a9ace0f40a0 Content-Type:application/json'} params = ( ('Accept', 'application/json-compressed'), ('_twsr', '1'), ('Content-Type', 'application/json'),
interface = m.group(1) if interface != 'lo' and interface not in config['interfaces']: config['interfaces'].append(interface) return len(config['interfaces']) > 0 if __name__ == "__main__": DIR = os.environ['HOME'] + '/.conky' env = Environment(loader=FileSystemLoader(DIR), trim_blocks=True, lstrip_blocks=True) template = env.get_template('conkyrc-template') config = { 'debug': False, 'font_size': 8, 'swap': False, 'torrents_host': None, } if not cpu_info(): config['cpu_number'] = 1 config['cpu_model'] = ' ' ram_info() if not storage_info():
from _init import ROOT_DIR, OVERRIDE_CONFIG_DIRECTORY from app import app from buildman.container_cloud_config import CloudConfigContext from buildman.server import SECURE_GRPC_SERVER_PORT logger = logging.getLogger(__name__) ONE_HOUR = 60 * 60 _TAG_RETRY_COUNT = 3 # Number of times to retry adding tags. _TAG_RETRY_SLEEP = 2 # Number of seconds to wait between tag retries. ENV = Environment( loader=FileSystemLoader(os.path.join(ROOT_DIR, "buildman/templates"))) CloudConfigContext().populate_jinja_environment(ENV) TEMPLATE = ENV.get_template("cloudconfig.json") build_start_duration = Histogram( "quay_build_start_duration_seconds", "seconds taken for a executor to start executing a queued build", labelnames=["executor"], ) def observe(metric, *labels): def decorator(func): @wraps(func) def wrapper(*args, **kwargs): trigger_time = time.time() rv = func(*args, **kwargs) metric.labels(*labels).observe(time.time() - trigger_time)
class Variable(): def __init__(self): self.jinja_env = Environment( loader=FileSystemLoader(join(ROOT, 'template'))) self.jinja_template = self.jinja_env.get_template("global_variable.c") self.version = "6.2.0" def gen(self, root, macro_table): return { "define_params": self.get_define_params(root), "define_ions": self.get_define_ions(root), "hoc_parm_limits": self.get_hoc_parm_limits(root), "hoc_parm_units": self.get_hoc_parm_units(root), "hoc_global_param": self.get_hoc_global_param(root), "num_global_param": self.get_num_global_param(root), "define_global_param": self.get_define_global_param(root), "static_global": self.get_static_global(root), "num_states": self.get_num_states(root), "num_cvode": self.get_num_cvode(root), "mechanism": self.get_mechanism(root), "restruct_table": self.get_restruct_table(root), "optimize_table": self.get_optimize_table(root, macro_table), "ion_symbol": self.get_ion_symbol(root) } def get_define_params(self, root): code = "" cnt = 0 ions = [[x.r[0].reads[0].name, x.w[0].writes[0].name] for x in children_of_type('UseIon', root)] params = [x.name for x in children_of_type('Range', root)[0].ranges] +\ children_of_type('Nonspecific', root)[0].nonspecifics +\ [x.name for x in children_of_type('State', root)[0].state_vars] +\ ["D{0}".format(x.name) for x in children_of_type('State', root)[0].state_vars] +\ reduce(lambda x, y: x+y, ions) +\ ['v', '_g'] for param in params: code += "#define {0} _p[{1}]\n".format(param, cnt) cnt += 1 return code def get_ion_symbol(self, root): code = "" for ion in children_of_type('UseIon', root): code += "static Symbol* _{0}_sym;\n".format(ion.ion) return code def get_define_ions(self, root): code = "" cnt = 0 for ion in children_of_type('UseIon', root): read_name = ion.r[0].reads[0].name write_name = ion.w[0].writes[0].name code += "#define _ion_{0} *_ppvar[{1}]._pval\n"\ "#define _ion_{2} *_ppvar[{3}]._pval\n"\ "#define _ion_d{4}dv *_ppvar[{5}]._pval\n"\ .format(read_name, cnt * 3, write_name, cnt * 3 + 1, write_name, cnt * 3 + 2) cnt += 1 return code def get_hoc_parm_limits(self, root): code = "" for parm in children_of_type('ParDef', root): if parm.llim and parm.ulim: llim = parm.llim ulim = parm.ulim mllim = exponential_exp.match(parm.llim) mulim = exponential_exp.match(parm.ulim) if mllim: llim = "{0}e+{1:02d}"\ .format(mllim.group("b"), int(mllim.group("e"))) if mulim: ulim = "{0}e+{1:02d}"\ .format(mulim.group("b"), int(mulim.group("e"))) code += "\t\"{0}_{1}\", {2}, {3},\n"\ .format(parm.name, self.filename, llim, ulim) code += "\t\"usetable_{0}\", 0, 1,// TODO: figure out what this is\n"\ "\t0, 0, 0"\ .format(self.filename) return code def get_hoc_parm_units(self, root): code = "" for assigned in children_of_type('Assigned', root)[0].assigneds: if assigned.unit: unit = assigned.unit unit = unit.replace('(', '') unit = unit.replace(')', '') code += "\t\"{0}_{1}\", \"{2}\",\n"\ .format(assigned.name, self.filename, unit) for param in children_of_type('Parameter', root)[0].parameters: if param.unit: unit = param.unit unit = unit.replace('(', '') unit = unit.replace(')', '') code += "\t\"{0}_{1}\", \"{2}\",\n"\ .format(param.name, self.filename, unit) code += "\t0,0" return code def get_num_states(self, root): return "{0}".format(len(children_of_type('State', root)[0].state_vars)) def get_hoc_global_param(self, root): code = "" for param in children_of_type('Global', root)[0].globals: code += "\t\"{0}_{1}\", &{0}_{1},\n"\ .format(param.name, self.filename) code += "\t\"usetable_{0}\", &usetable_{0},\n"\ "\t0, 0"\ .format(self.filename) return code def get_num_global_param(self, root): params = children_of_type('Global', root)[0].globals return "{0}".format(len(params)) def get_define_global_param(self, root): cnt = 0 code = "#define _gth 0\n" for param in children_of_type('Global', root)[0].globals: code += "#define {0}_{1} _thread1data[{2}]\n"\ "#define {0} _thread[_gth]._pval[{2}]\n"\ .format(param.name, self.filename, cnt) cnt += 1 code += "#define usetable usetable_{0}\n".format(self.filename) return code def get_static_global(self, root): code = "" for param in children_of_type('Global', root)[0].globals: code += "static double *_t_{0};\n".format(param.name) return code def get_mechanism(self, root): code = "\t\"{0}\",\n"\ "\t\"{1}\",\n"\ .format(self.version, self.filename) for parm in children_of_type('Range', root)[0].ranges: code += "\t\"{0}_{1}\",\n"\ .format(parm.name, self.filename) for parm in children_of_type('Nonspecific', root)[0].nonspecifics: code += "\t\"{0}_{1}\",\n"\ .format(parm, self.filename) for parm in children_of_type('State', root)[0].state_vars: code += "\t\"{0}_{1}\",\n"\ .format(parm.name, self.filename) code += "\t0" return code def get_restruct_table(self, root): cnt = 0 params = children_of_type('Global', root)[0].globals # code = "#ifdef RESTRUCT_TABLE\n"\ code = "#define TABLE_SIZE 201\n"\ "FLOAT {0}_table[TABLE_SIZE][{1}];\n"\ .format(self.filename, len(params)) for param in params: code += "#define TABLE_{0}(x) {1}_table[(x)][{2}]\n"\ .format(param.name.upper(), self.filename, cnt) cnt += 1 # code += "#else\n" # for param in params: # code += "#define TABLE_{0}(x) _t_{1}[(x)]\n"\ # .format(param.name.upper(), param.name) # code += "#endif\n" return code def get_optimize_table(self, root, macro_table): code = "" ions = [[x.r[0].reads[0].name, x.w[0].writes[0].name] for x in children_of_type('UseIon', root)] params = [x.name for x in children_of_type('Range', root)[0].ranges] +\ children_of_type('Nonspecific', root)[0].nonspecifics +\ [x.name for x in children_of_type('State', root)[0].state_vars] +\ ['v', 'g'] +\ reduce(lambda x, y: x+y, ions) # if we have ordered table, then we use it first # then add the remainings at the end if macro_table: code = "" for i in range(len(macro_table)): code += "static double opt_table{0}"\ "[BUFFER_SIZE * MAX_NTHREADS][{1}];\n"\ .format(i, len(macro_table[i])) for j in range(len(macro_table[i])): code += "#define TABLE_{0}(x) "\ "opt_table{1}[(x)][{2}]\n"\ .format(macro_table[i][j].upper(), i, j) if macro_table[i][j] in params: params.remove(macro_table[i][j]) for param in params: code += "static double _{0}_table[BUFFER_SIZE * MAX_NTHREADS];\n"\ .format(param) return code def get_num_cvode(self, root): return "{0}".format(len(children_of_type('UseIon', root)) * 3) def compile(self, filename, root, table_order): self.filename = filename tokens = self.gen(root, table_order) tokens.update({"filename": filename}.items()) return self.jinja_template.render(**tokens)
class TemplateWorker(ExecutionWorker): """This worker can work with steps like: { 'info': '....', 'template': 'template name', 'filename': 'path to rendered template', 'sudo': True, } If the key template exist, this worker will execute the step. Only if the key filename exist, the worker will save the template as a file. Otherwise, it will interpreter lien as an commands and execute it. """ templates = None def __init__(self, **kwargs): super(TemplateWorker, self).__init__(**kwargs) templateLoader = PackageLoader(*self.templates) self.templateEnv = Environment(loader=templateLoader) def render_template(self, filename): template = self.templateEnv.get_template(filename) return template.render(self.render_data()) def run(self, name): step = super(TemplateWorker, self).run(name) if not step: return None if 'template' in step: t = self.render_template(step['template']) if 'filename' in step: filename = self.render_str(step['filename']) with NamedTemporaryFile() as f: f.write(t.encode()) f.flush() if step.get('sudo', False): call(['sudo', 'cp', f.name, filename]) else: call(['cp', f.name, filename]) else: self.execute(t.split('\n')) self.logger.info('finished step "%s"', name) return step def export(self, name, file): step = super(TemplateWorker, self).export(name, file) if not step: return None if 'template' in step: t = self.render_template(step['template']) if 'filename' in step: file.write('# Create a file:\n') filename = self.render_str(step['filename']) if step.get('sudo', False): file.write('sudo nano "{}"\n'.format(filename)) else: file.write('nano "{}"\n'.format(filename)) file.write('# ---- begin file ----\n') for c in t.split('\n'): file.write('{}\n'.format(c, filename)) file.write('# ----- end file -----\n') else: for c in t.split('\n'): file.write('{}\n'.format(c)) return step
except Exception: print(module_name) traceback.print_exc() # 二、数据渲染 for _, module_name, _ in pkgutil.iter_modules(['actions']): if module_name not in ['utils']: try: module = importlib.import_module('.' + module_name, 'actions') func_generate = getattr(module, 'generate') func_generate(datas) except Exception: print(module_name) traceback.print_exc() # 三、页面配置 env = Environment(loader=PackageLoader('actions')) template = env.get_template('sidebar.j2') content = template.render(competitions=competitions) with open('docs/_sidebar.md', 'w') as f: f.write(content) STANDARD_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S+0800' update = datetime.utcnow() + timedelta(hours=8) update = update.strftime(STANDARD_TIME_FORMAT) template = env.get_template('cover.j2') content = template.render(update=update) with open('docs/_coverpage.md', 'w') as f: f.write(content)
env2.globals.update(zip=zip) env3.globals.update(zip=zip) env4.globals.update(zip=zip) env5.globals.update(zip=zip) env6.globals.update(zip=zip) env7.globals.update(zip=zip) env8.globals.update(zip=zip) env9.globals.update(zip=zip) env10.globals.update(zip=zip) env11.globals.update(zip=zip) env12.globals.update(zip=zip) env13.globals.update(zip=zip) env14.globals.update(zip=zip) env15.globals.update(zip=zip) template0 = env0.get_template('process_edges_full.template') template1 = env1.get_template('process_edges_incr.template') template2 = env2.get_template('graph_engine.template') template3 = env3.get_template('apply_phase.template') template4 = env4.get_template('inference_box.template') template5 = env5.get_template('load_vtmpprops.template') template6 = env6.get_template('process_edge.template') template7 = env7.get_template('read_vtmpproperty.template') template8 = env8.get_template('reduce.template') template9 = env9.get_template('process_edges_full_stinger.template') template10 = env10.get_template('process_edges_incr_stinger.template') template11 = env11.get_template('write_vtmpproperty.template') template12 = env12.get_template('write_vtmpproperty.template') template13 = env13.get_template('write_vtmpproperty.template') template14 = env14.get_template('write_vtmpproperty.template') template15 = env15.get_template('write_vtmpproperty.template')