def check_cpp(argv, prog): CMD_TPL1='for d in {{dirs}};do find $d -name "*.cc" -o -name "*.cpp" -o -name "*.h" -o -name "*.hpp" -o -name "*.c" | xargs -I {} cpplint {}; done' CMD_TPL2='for d in {{dirs}};do find $d -name "*.cc" -o -name "*.cpp" -o -name "*.h" -o -name "*.hpp" -o -name "*.c" | xargs -I {} cpplint {} >> {{out}}/cpplint.txt 2>&1;done' argparser = argparse.ArgumentParser(description="octopus ", prog=prog) argparser.add_argument('-o', dest='output_dir', help='文件输出目录', default='') argparser.add_argument( 'dirs', metavar='N', nargs='*', help='要检查的目录') args = argparser.parse_args(argv) if args.output_dir: local('[ -d {out} ] || mkdir -p {out}'.format(out=args.output_dir)) local('if [ -f {out}/cpplint.txt ];then rm -f {out}/cpplint.txt; fi'.format( out=args.output_dir)) cmd = Template(CMD_TPL2.decode('utf-8')); cmd = cmd.render(out=args.output_dir, dirs=' '.join(args.dirs)) cmd = cmd.encode('utf-8') pyshell.shell(cmd, warn_only=True) converter = CpplintConverter() converter.parse(args.output_dir+'/cpplint.txt') converter.write(args.output_dir+'/cpplint.xml') else: cmd = Template(CMD_TPL1.decode('utf-8')); cmd = cmd.render(out=args.output_dir, dirs=' '.join(args.dirs)) cmd = cmd.encode('utf-8') pyshell.shell(cmd, warn_only=True)
def render_ass(template_name, sentences, filename): output_file_path = "static/cache/gif/%s.ass" % filename template = ass_text(template_name) rendered_ass_text = Template(template).render(sentences=sentences) with open(output_file_path, "w") as fp: fp.write(rendered_ass_text.encode("utf-8")) return output_file_path
def index(): with open(os.path.join("views", "index.html"), "r") as f: data = f.read() data = Template(data).render(name="Morra", user_list=["a", "b", "c"]) ret = [data.encode('utf-8')] return ret
def render_results(results, approved): from jinja2 import Template items = results.values() items.sort(key=lambda x: x.name.lower()) rv = Template(RESULT_TEMPATE, autoescape=True).render(results=items, approved=approved) fd, filename = tempfile.mkstemp(suffix='.html') os.fdopen(fd, 'w').write(rv.encode('utf-8') + '\n') return filename
def main(): with open("input.txt") as fab_f: with open("out.jinja") as jinja_f: with open('out.html', 'w') as out_f: source = highlight(fab_f.read(), FabLexer(), HtmlFormatter(style="friendly")) out = Template(jinja_f.read()).render(source=source) out_f.write(out.encode('utf-8'))
def transform(self, sheet_name, target_range, tmpl_path): ws = self.workbook.get_sheet_by_name(sheet_name.decode('utf-8')) with open(tmpl_path) as f: tmpl = f.read() rendered = Template(textwrap.dedent(tmpl).strip()).render({ 'rows': ws[target_range], }) print(rendered.encode('utf-8'))
def _replace_template(file_path, variables): if os.path.exists(file_path): with open(file_path, 'r') as f: from jinja2 import Template rendered = Template(f.read().decode('utf-8')).render(**variables) f = open(file_path, 'w') f.write(rendered.encode('utf-8')) f.close()
def _render_cloud_init(path: str, **kwargs) -> str: """Renders a cloud-init Jinja2 template at the given path using the given keyword arguments. Args: path: Path to the Jinja2 template kwargs: Keyword arguments that will be passed to Jinja2 for rendering Returns: The rendered template encoded with base64 """ with open(path, 'r') as f: rendered = Template(f.read()).render(**kwargs) return base64.b64encode(rendered.encode('utf-8')).decode('utf-8')
def create_plp(self, intern_plp_number, object_list): """ Gera uma nova PLP (Pré Lista de Postagem) :param intern_plp_number: Número de controle interno sequêncial para a geração da PLP :param object_list: :return: """ data = { 'card': self.card, 'contract': self.contract, 'reginal_code': self.regional_code, 'admin_code': self.admin_code, 'sender_info': self.sender_info, 'object_list': object_list, } xml = Template('sigep/xml/plp.xml', data) xml = xml.encode('ascii', 'xmlcharrefreplace') xml = xml.replace(" ", "") xml = xml.replace('\n', '') xml = xml.replace('\t', '') xml = xml.replace("> <", "><") self._validate_xml(xml) tracking_code_list = [] for item in object_list: tracking_code = self._remove_dv_tracking_code(item.get('tracking_code')) tracking_code_list.append(tracking_code) logger.info(u'create_plp - xml: {} tracking_code_list: {}'.format( xml, ', '.join(tracking_code_list), )) plp_id = self.client.service.fechaPlpVariosServicos( xml=xml, idPlpCliente=intern_plp_number, cartaoPostagem=self.card, listaEtiquetas=tracking_code_list, usuario=self.user, senha=self.password, ) logger.info(u'create_plp - tracking_code_list: {} plp: {}'.format(', '.join(tracking_code_list), plp_id)) return { 'plp_id': plp_id, 'tracking_code_list': tracking_code_list, }
def run_script_with_context(script_path, cwd, context): """ Executes a script after rendering with it Jinja. :param script_path: Absolute path to the script to run. :param cwd: The directory to run the script from. :param context: Cookiecutter project template context. """ _, extension = os.path.splitext(script_path) contents = io.open(script_path, "r", encoding="utf-8").read() with tempfile.NamedTemporaryFile(delete=False, mode="wb", suffix=extension) as temp: output = Template(contents).render(**context) temp.write(output.encode("utf-8")) run_script(temp.name, cwd)
def render_template(self, src, dst, **kwargs): """ Рендер темплейта из src в dst @param src путь к шаблону @param dst путь сохранения @param kwargs параметры шаблонизатору """ assert ConfigGlobals.ProjectName is not None or \ error("ProjectName not specified in config file. Go to your config file to resolve this error") with open(src, 'r', encoding='utf-8') as f: text = Template(f.read()).render( PROJECT_NAME=ConfigGlobals.ProjectName, PROJECT_API=ConfigGlobals.ProjectName.upper(), **kwargs) path = os.path.abspath(dst) with open(path, "xb") as f: f.write(text.encode())
def run_script_with_context(script_path, cwd, context): """Execute a script after rendering it with Jinja. :param script_path: Absolute path to the script to run. :param cwd: The directory to run the script from. :param context: Cookiecutter project template context. """ _, extension = os.path.splitext(script_path) contents = io.open(script_path, 'r', encoding='utf-8').read() with tempfile.NamedTemporaryFile(delete=False, mode='wb', suffix=extension) as temp: output = Template(contents).render(**context) temp.write(output.encode('utf-8')) run_script(temp.name, cwd)
def render(**context): template_path = Path(__file__).parent / 'templates' / 'thumbnail.html' html = Template(template_path.read_text()).render(**context) with tempfile.NamedTemporaryFile(suffix='.html', delete=False) as f: f.write(html.encode('utf-8')) try: with tempfile.TemporaryDirectory() as dir_path: # pageres doesn't support changing the output directory, so we need # to set cwd. The problem is, with cwd set to temp dir, npx stops # to work, therefore we need to use an explicit path here pageres = ['node', f'{os.getcwd()}/node_modules/.bin/pageres'] run(pageres + [f'file://{f.name}', '1200x630', '--format=png', '--overwrite', '--filename=thumbnail'], cwd=dir_path, check=True, stdout=DEVNULL) thumbnail_path = Path(dir_path) / 'thumbnail.png' return thumbnail_path.read_bytes() finally: os.unlink(f.name)
def kubeconfig_present(ca_certificate, client_certificate, client_key, username, cluster_name, cluster_dns, output_path): ret = { 'name': 'kubeconfig_present', 'changes': {}, 'result': False, 'comment': '', 'pchanges': {} } signing_cert = base64.b64encode(open(ca_certificate, "r").read()) client_cert = base64.b64encode(open(client_certificate, "r").read()) client_key = base64.b64encode(open(client_key, "r").read()) template = Template(kubectl_template).render( ca_certificate=signing_cert, client_certificate=client_cert, client_key=client_key, user=username, cluster_name=cluster_name, cluster_dns=cluster_dns) new_file = True if os.path.isfile(output_path): output_file = open(output_path, "r").read() if output_file == template: new_file = False if new_file: output_file = open(output_path, "w") output_file.write(template.encode("utf-8")) ret["result"] = True ret["comment"] = "kubeconfig created." ret['changes'] = { 'old': 'No kubeconfig was found.', 'new': 'New kubeconfig generated at [{kube}]'.format(kube=output_path) } else: ret["result"] = True ret["comment"] = "kubeconfig already in correct state." return ret
def gen_conf(argv, prog): argparser = argparse.ArgumentParser(description="octopus ", prog=prog) argparser.add_argument('-t', '--tpl', dest='tpl', help='输入的模版文件', required=True) argparser.add_argument('-o', '--output', dest='output', help='输出文件路径', required=True) argparser.add_argument( 'params', metavar='N', nargs='*', help='模版参数') args = argparser.parse_args(argv) if not os.path.exists(args.tpl): print args.tpl + ' not exist' sys.exit(1) s = open(args.tpl, 'r').read() params = {} for p in args.params: (k,v) = p.split('=') params[k] = v out = Template(s.decode('utf-8')).render(params) open(args.output, 'w').write(out.encode('utf-8'))
def ShowRefundRecs(self): filename = 'blank.htm' URL = '%s%s%s' % (os.getcwd(), os.sep, filename) self.rWV.LoadURL(URL) if not self.Orders: return filename = 'rt.htm' with open(filename, 'r') as fp: filecontent = fp.read() filecontent = filecontent.decode('utf-8', 'ignore') rendered = Template(filecontent).render(ss=self.Orders[0]) newfile = 'rfn.htm' with open(newfile, 'w') as fp: fp.write(rendered.encode('utf-8', 'ignore')) URL = '%s%s%s' % (os.getcwd(), os.sep, newfile) self.rWV.LoadURL(URL)
def produce(): info = json.load(open(PREFIX + MANIFEST, 'rb')) for file_name in info["spine"]: data = open(PREFIX + file_name + '.html', 'rb').read() doc = html.document_fromstring(data.decode('utf-8')) body = doc.find('.//body') body.tag = 'div' title = doc.find('.//h1') title = title.text.strip() if title is not None else '' print [title] rename_tag(doc, 'h4', 'h5') rename_tag(doc, 'h3', 'h4') rename_tag(doc, 'h2', 'h3') rename_tag(doc, 'h1', 'h2') tpl = { "title": title, "body": html.tostring(body), "toc": info.get("TOC") } content = Template(TEMPLATE).render(tpl) fh = open(file_name + '.html', 'wb') fh.write(content.encode('utf-8')) fh.close()
def produce(): info = json.load(open(PREFIX + MANIFEST, 'rb')) for file_name in info["spine"]: data = open(PREFIX + file_name + '.html', 'rb').read() doc = html.document_fromstring(data.decode('utf-8')) body = doc.find('.//body') body.tag = 'div' title = doc.find('.//h1') title = title.text.strip() if title is not None else '' print[title] rename_tag(doc, 'h4', 'h5') rename_tag(doc, 'h3', 'h4') rename_tag(doc, 'h2', 'h3') rename_tag(doc, 'h1', 'h2') tpl = { "title": title, "body": html.tostring(body), "toc": info.get("TOC") } content = Template(TEMPLATE).render(tpl) fh = open(file_name + '.html', 'wb') fh.write(content.encode('utf-8')) fh.close()
def make_package(self, instance, program_info, program_path, deploy_env, default_runpath, remote_program_path): """将文件打包,方便拷贝. """ # TODO :打包的时候排除掉.svn文件. package = program_info.package print 'make_package instance=%s' % (instance) if instance['package']: package = program_info.packages[instance['package']] print 'make_package package=%s' % (package) for ext in package.extends: ext_package = program_info.packages[ext] package.dirs.extend(ext_package.dirs) package.files.extend(ext_package.files) package.tpls.extend(ext_package.tpls) current_dir = os.getcwd() pkg_file = '%s.tar.gz' % (instance['name']) pkg_file_full_path = current_dir + '/' + pkg_file os.chdir(program_path) local('[ ! -d __instance ] || rm -rf __instance') local('mkdir __instance') local(' if [ -d METAINFO ] ;then cp -r METAINFO __instance/;fi') common_args = instance['args'] common_args['env'] = deploy_env common_args['name'] = instance['name'] if not 'run_path' in common_args: common_args['run_path'] = default_runpath for d in package.dirs: src = Template(d.src.decode('utf-8')).render(common_args).encode('utf-8') local('cp -r %s __instance/%s' % (src, d.to)) for d in package.files: src = Template(d.src.decode('utf-8')).render(common_args).encode('utf-8') to_dir = '__instance/' + os.path.dirname(d.to) if not os.path.isdir(to_dir): print 'mkdir ' , to_dir os.makedirs(to_dir) local('cp %s __instance/%s' % (src, d.to)) print 'start prepare template' for d in package.tpls: src = Template(d.src.decode('utf-8')).render(common_args).encode('utf-8') s = open(src, 'r').read() #params = {} #for k in d.params: # params[k] = common_args[k] out = Template(s.decode('utf-8')).render(common_args) target_file = '__instance/'+d.to to_dir = os.path.dirname(target_file) if not os.path.isdir(to_dir): os.makedirs(to_dir) local('cp %s %s' % (src, target_file)) open(target_file, 'w').write(out.encode('utf-8')) role_str = ','.join(instance['roles']) local('[ -d __instance/METAINFO ] || mkdir __instance/METAINFO') local('echo roles:%s >> __instance/METAINFO/deploy.inf' % (role_str)) os.chdir('__instance') self.create_start_scripts(instance['name'], instance['roles'], program_info.get_scripts(), remote_program_path, default_runpath, common_args) os.chdir(program_path) local('echo `pwd` && cd __instance && tar -zcf %s *' % ( pkg_file_full_path)) local('rm -rf __instance') os.chdir(current_dir) return pkg_file_full_path
rankings = json.load(json_file) # Read competitions with open('Data/completed.json') as json_file: completed_competitions = json.load(json_file) with open('Data/upcoming.json') as json_file: upcoming_competitions = json.load(json_file) # Clean competition names for c in completed_competitions: c['name'] = re.sub(r" par.*$", "", c['name']) c['name'] = re.sub(r"monde", "Monde", c['name']) if not any(ext in c['name'] for ext in ['Monde', 'Championnats']): c['name'] = 'Coupe du Monde' for c in upcoming_competitions: c['name'] = re.sub(r" par.*$", "", c['name']) c['name'] = re.sub(r"monde", "Monde", c['name']) if not any(ext in c['name'] for ext in ['Monde', 'Championnats']): c['name'] = 'Coupe du Monde' # Read and jinjify template with open('template.html') as template_file: template = Template(template_file.read()) template = template.render(rankings=rankings, completed_competitions=completed_competitions, upcoming_competitions=upcoming_competitions) # Write output html with open('index.html', 'wb') as output_file: output_file.write(template.encode('utf8', 'replace'))
#!/usr/bin/env python2.7 import json, os from jinja2 import Template for lang in ['en', 'de', 'es', 'el', 'fr']: directory = '.' if (lang=='en') else lang if not os.path.exists(directory): os.makedirs(directory) with open('%s/index.html' % directory, 'w') as out: with open('template.html') as t: with open('%s.json' % lang) as f: output = Template(t.read().decode('utf8')).render(**json.load(f, encoding='utf-8')) out.write(output.encode('utf-8'))
import json from jinja2 import Template from ansi2html import Ansi2HTMLConverter from ansi2html.style import get_styles conv = Ansi2HTMLConverter() def shell_to_html(shell): return conv.convert(shell, False) if __name__ == '__main__': result = Template(open("template.html", "r").read()).render(data=json.load(open("./logs.json")), convert=shell_to_html, shell_css="\n".join(map(str, get_styles(conv.dark_bg, conv.scheme)))) open("index.html", "w").write(result.encode("Utf-8"))
import sys import json from jinja2 import Template sys.path.append('./libs') from pynliner_encoded import Pynliner folder = sys.argv[1] if folder != '': template = open(folder + '/template.html', 'r').read() css = open(folder + '/style.css', 'r').read() with open(folder + '/data.json') as data_file: data = json.load(data_file) template = Template(template.decode('utf-8')) template = template.render(data) p = Pynliner() p.from_string(template).with_cssString(css) template = p.run() open(folder + '/index.html', 'w').write(template.encode('utf-8')) print "Generating of %s template is complete" % folder print "Please look through %s/index.html file" % folder else: print "You need to specify a template name as a parameter"
def inject_credential(self, credential, env, safe_env, args, safe_args, private_data_dir): """ Inject credential data into the environment variables and arguments passed to `ansible-playbook` :param credential: a :class:`awx.main.models.Credential` instance :param env: a dictionary of environment variables used in the `ansible-playbook` call. This method adds additional environment variables based on custom `env` injectors defined on this CredentialType. :param safe_env: a dictionary of environment variables stored in the database for the job run (`UnifiedJob.job_env`); secret values should be stripped :param args: a list of arguments passed to `ansible-playbook` in the style of `subprocess.call(args)`. This method appends additional arguments based on custom `extra_vars` injectors defined on this CredentialType. :param safe_args: a list of arguments stored in the database for the job run (`UnifiedJob.job_args`); secret values should be stripped :param private_data_dir: a temporary directory to store files generated by `file` injectors (like config files or key files) """ if not self.injectors: if self.managed_by_tower and credential.kind in dir( builtin_injectors): injected_env = {} getattr(builtin_injectors, credential.kind)(credential, injected_env, private_data_dir) env.update(injected_env) safe_env.update(build_safe_env(injected_env)) return class TowerNamespace: pass tower_namespace = TowerNamespace() # maintain a normal namespace for building the ansible-playbook arguments (env and args) namespace = {'tower': tower_namespace} # maintain a sanitized namespace for building the DB-stored arguments (safe_env and safe_args) safe_namespace = {'tower': tower_namespace} # build a normal namespace with secret values decrypted (for # ansible-playbook) and a safe namespace with secret values hidden (for # DB storage) for field_name, value in credential.inputs.items(): if type(value) is bool: # boolean values can't be secret/encrypted safe_namespace[field_name] = namespace[field_name] = value continue if field_name in self.secret_fields: value = decrypt_field(credential, field_name) safe_namespace[field_name] = '**********' elif len(value): safe_namespace[field_name] = value if len(value): namespace[field_name] = value # default missing boolean fields to False for field in self.inputs.get('fields', []): if field['type'] == 'boolean' and field[ 'id'] not in credential.inputs.keys(): namespace[field['id']] = safe_namespace[field['id']] = False file_tmpls = self.injectors.get('file', {}) # If any file templates are provided, render the files and update the # special `tower` template namespace so the filename can be # referenced in other injectors for file_label, file_tmpl in file_tmpls.items(): data = Template(file_tmpl).render(**namespace) _, path = tempfile.mkstemp(dir=private_data_dir) with open(path, 'w') as f: f.write(data.encode('utf-8')) os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) # determine if filename indicates single file or many if file_label.find('.') == -1: tower_namespace.filename = path else: if not hasattr(tower_namespace, 'filename'): tower_namespace.filename = TowerNamespace() file_label = file_label.split('.')[1] setattr(tower_namespace.filename, file_label, path) injector_field = self._meta.get_field('injectors') for env_var, tmpl in self.injectors.get('env', {}).items(): try: injector_field.validate_env_var_allowed(env_var) except ValidationError as e: logger.error( six.text_type( 'Ignoring prohibited env var {}, reason: {}').format( env_var, e)) continue env[env_var] = Template(tmpl).render(**namespace) safe_env[env_var] = Template(tmpl).render(**safe_namespace) if 'INVENTORY_UPDATE_ID' not in env: # awx-manage inventory_update does not support extra_vars via -e extra_vars = {} for var_name, tmpl in self.injectors.get('extra_vars', {}).items(): extra_vars[var_name] = Template(tmpl).render(**namespace) def build_extra_vars_file(vars, private_dir): handle, path = tempfile.mkstemp(dir=private_dir) f = os.fdopen(handle, 'w') f.write(safe_dump(vars)) f.close() os.chmod(path, stat.S_IRUSR) return path path = build_extra_vars_file(extra_vars, private_data_dir) if extra_vars: args.extend(['-e', '@%s' % path]) safe_args.extend(['-e', '@%s' % path])
def create_itmsp_package(output_path, provider, team_id, vendor_id, version_string, whats_new): """Creates itmsp package for uploading localized store assets at <output_path>""" # Construct package itmsp_path = os.path.abspath( os.path.join(output_path, vendor_id + '.itmsp')) if os.path.exists(itmsp_path): # Delete any pre-existing package at <output_path> shutil.rmtree(itmsp_path) os.mkdir(itmsp_path, 0744) with open("StoreAssets/master.yaml") as f: # Use english store description as a fallback en_yaml = yaml.load(f)['en'] store_assets_path = os.path.abspath('./StoreAssets') # Load the json file which provides metadata used by create_locale_template. # This file specifies: # - which languages are supported by Apple and language code mappings from the codes used in # StoreAssets/*.yaml to those used by Apple. # - mappings of <device_name> to display targets that need to be specified in itmsp metadata # - screenshot patterns used for finding screenshot files # # create_locale_template will look for screenshot files for each screenshot pattern display target pairing. # With "<device_name>" replaced with the display target key. For example with the sample json below the # screenshots for the en locale would be: # - ./StoreAssets/screenshots/en-US/iPhone 5s-screenshot_name.png # - ./StoreAssets/screenshots/en-US/iPad Pro (12.9 inch)-screenshot_name.png # And they would be respectively renamed in the itmsp package as: # - <output_path>/<vendor_id>.itmsp/en-US_iOS-5.5-in_screenshot_name.png # - <output_path>/<vendor_id>.itmsp/en-US_iOS-iPad-Pro_screenshot_name.png # # The expected format of metadata.json is: # '{ # "_comments": "..." # "app_store_supported_languages": [ # "en", # "fr", # ... # ], # "language_code_mappings": { # "en": "en-US", # ... # }, # "display_targets": { # "iPhone 5s": "iOS-5.5-in", # "iPad Pro (12.9 inch)": "iOS-iPad-Pro" # ... # }, # "screenshots": [ # "filename_pattern": "<device_name>-screenshot_name.png", # ... # ] # }' # metadata_file_path = os.path.join(store_assets_path, 'metadata.json') if os.path.exists(metadata_file_path): with open(metadata_file_path) as metadata_file: metadata = json.load(metadata_file) # Generate itmsp locale xml for each *.yaml file in the ./StoreAssets directory yaml_files = filter( lambda filename: filename.endswith('.yaml'), os.listdir("StoreAssets")) templates = [ create_locale_template( defaults=en_yaml, localized_store_strings_yaml=x, itmsp_path=itmsp_path, store_assets_path=store_assets_path, website=psiphon_website_info.PsiphonWebsiteInfo(), metadata=metadata, whats_new=whats_new) for x in yaml_files ] # Combine locales to create completed metadata.xml file for upload context = { 'locales': filter(lambda locale: locale is not None, templates), 'provider': provider, 'team_id': team_id, 'vendor_id': vendor_id, 'version_string': version_string } metadata_template = Template(get_itmsp_xml()).render(context) with open(itmsp_path + '/metadata.xml', "wb") as itmsp_metadata_file: itmsp_metadata_file.write(metadata_template.encode('utf8')) else: sys.exit("Metadata file not found. Exiting...")
{%set response = response + ['url=url'] -%} {% endif -%} {%- if item['parameters'].get('query') %} params = {{ item['parameters'].get('query') -}} {%set response = response + ['params=params'] -%} {% endif -%} {%- if item['parameters'].get('fromData') %} data = {{ item['parameters'].get('data') -}} {%set response = response + ['data=data'] -%} {% endif -%} {%- if item['parameters'].get('body') %} _json = {{ item['parameters'].get('body') -}} {%set response = response + ['json=_json'] -%} {% endif %} return response({{ ', '.join(response) }}) {% endfor %} """ if __name__ == '__main__': swagger_url = 'http://mdp.test.zghbh.com/v2/api-docs' class_name = 'hunbohuiMDP' object_file_name = '../apis/huibohuiMDP2.py' ss = Swagger(swagger_url) ss.parse() t = Template(SCRIPT_TEMPLATE).render(class_name=class_name, tags=ss.tags, items=ss.result) with open(object_file_name, 'wb') as f: f.write(t.encode('utf-8'))
def cookiepatch(): parser = argparse.ArgumentParser( description='Tool to apply / create patch from ' 'cookiecutter templates') parser.add_argument('--template', type=str, help='an integer for the accumulator') parser.add_argument('--diff', type=str, nargs='+', help='versions passed for git diff') parser.add_argument('--show', action='store_true', help='Just print diff') args = parser.parse_args() conf_file = None if os.path.exists(CONF_PATH): with open(CONF_PATH) as f: conf_file = json.load(f) if args.template: template = args.template elif conf_file and 'template' in conf_file: template = conf_file['template'] else: template = input('Input template repository url: ') if args.diff: diff = args.diff elif conf_file and 'revision' in conf_file: diff = [conf_file['revision']] else: cur = input('Input template version applied currently: ') to = input('Input version to follow [master]: ') or 'master' diff = [cur, to] no_input = False config_dict = get_user_config(config_file=USER_CONFIG_PATH) parsed_template = expand_abbreviations(template, config_dict) repo_dir = clone(repo_url=parsed_template, clone_to_dir=config_dict['cookiecutters_dir'], checkout=None, no_input=no_input) patch_bytes = subprocess.check_output(['git', 'diff'] + diff + ['--', '{{cookiecutter.repo_name}}'], cwd=repo_dir) patch_str = patch_bytes.decode() context_file = os.path.join(repo_dir, 'cookiecutter.json') context = generate_context( context_file=context_file, default_context=config_dict['default_context'], extra_context={}, ) if conf_file: context['cookiecutter'] = conf_file['variables'] else: # prompt the user to manually configure at the command line. # except when 'no-input' flag is set context['cookiecutter'] = prompt_for_config(context, no_input) rendered = Template(patch_str).render(**context) if args.show: print(rendered) return p = subprocess.Popen(['patch', '-Np1', '--no-backup-if-mismatch'], stdin=subprocess.PIPE, cwd='..') p.communicate(rendered.encode()) # Generate cookiepatcher JSON if len(diff) == 1: rev = 'HEAD' else: rev = diff[-1] revision_bytes = subprocess.check_output(['git', 'rev-parse'] + [rev], cwd=repo_dir) revision_str = revision_bytes.decode().rstrip('\n') json_content = { 'revision': revision_str, 'variables': context['cookiecutter'], 'template': template } with open(CONF_PATH, 'w') as f: json.dump(json_content, f, ensure_ascii=False, indent=2, sort_keys=True)
center=False) report_body += get_paralel_images([logo_path, logo_path], captions=['fig1', 'fig2'], center=False) report_body += get_base64_image(base64_img, caption='base64 exemple') captions = ['onuo', 'onuo insuo'] # report_body += get_paralel_images(['regions_WB.jpg', logo_path, logo_path, logo_path], captions=None, center=True) report_dict = dict(report_header=report_header, report_cover=report_cover, report_body=report_body, has_cover=include_title_page, secondary_language=report_language, font_name=font) # report_dict.update(insets) latex_src = Template(report_string).render(**report_dict) print('* * *') print(latex_src) print('* * *') with open(output_latex_file, 'w') as f: f.write(latex_src.encode('utf8')) # lualatex is required if we want to use a custom font, such as, Roboto. os.system( 'lualatex --shell-escape -synctex=1 -interaction=nonstopmode -output-directory="%s" "%s"' % (out_dir, output_latex_file)) # os.system('lualatex --shell-escape -synctex=1 -interaction=nonstopmode -output-directory="%s" "%s"' % (out_dir, output_latex_file)) # os.system('pdflatex --shell-escape -synctex=1 -interaction=nonstopmode -output-directory="%s" "%s"' % (out_dir, output_latex_file)) # lualatex - synctex = 1 - interaction = nonstopmode %.tex
def render_template(path,context={}): html = open("templates/{}".format(path),"r").read() template = Template(html) template = template.render(**context) return [bytes(template.encode("utf-8"))]
def cookiepatch(): parser = argparse.ArgumentParser(description='Tool to apply / create patch from ' 'cookiecutter templates') parser.add_argument('--template', type=str, help='an integer for the accumulator') parser.add_argument('--diff', type=str, nargs='+', help='versions passed for git diff') parser.add_argument('--show', action='store_true', help='Just print diff') args = parser.parse_args() conf_file = None if os.path.exists(CONF_PATH): with open(CONF_PATH) as f: conf_file = json.load(f) if args.template: template = args.template elif conf_file and 'template' in conf_file: template = conf_file['template'] else: template = input('Input template repository url: ') if args.diff: diff = args.diff elif conf_file and 'revision' in conf_file: diff = [conf_file['revision']] else: cur = input('Input template version applied currently: ') to = input('Input version to follow [master]: ') or 'master' diff = [cur, to] no_input = False config_dict = get_user_config(config_file=USER_CONFIG_PATH) parsed_template = expand_abbreviations(template, config_dict) repo_dir = clone(repo_url=parsed_template, clone_to_dir=config_dict['cookiecutters_dir'], checkout=None, no_input=no_input) patch_bytes = subprocess.check_output(['git', 'diff'] + diff + ['--', '{{cookiecutter.repo_name}}'], cwd=repo_dir) patch_str = patch_bytes.decode() context_file = os.path.join(repo_dir, 'cookiecutter.json') context = generate_context( context_file=context_file, default_context=config_dict['default_context'], extra_context={}, ) if conf_file: context['cookiecutter'] = conf_file['variables'] else: # prompt the user to manually configure at the command line. # except when 'no-input' flag is set context['cookiecutter'] = prompt_for_config(context, no_input) rendered = Template(patch_str).render(**context) if args.show: print(rendered) return p = subprocess.Popen(['patch', '-Np1', '--no-backup-if-mismatch'], stdin=subprocess.PIPE, cwd='..') p.communicate(rendered.encode()) # Generate cookiepatcher JSON if len(diff) == 1: rev = 'HEAD' else: rev = diff[-1] revision_bytes = subprocess.check_output(['git', 'rev-parse'] + [rev], cwd=repo_dir) revision_str = revision_bytes.decode().rstrip('\n') json_content = { 'revision': revision_str, 'variables': context['cookiecutter'], 'template': template } with open(CONF_PATH, 'w') as f: json.dump(json_content, f, ensure_ascii=False, indent=2, sort_keys=True)