Exemple #1
0
 def __init__(self):
     self._config = Config(os.path.join(PROJECT_ROOT, 'config.json'))
     self._db = DrugDatabase(self._config)
     self._env = Environment(loader=PackageLoader('combogen', 'templates'),
                             lstrip_blocks=True,
                             trim_blocks=True)
from jinja2 import Environment, PackageLoader, FileSystemLoader

# 包加载器
env = Environment(loader=PackageLoader('web_jinja', 'template'))

# 文件加载器
# env = Environment(loader=FileSystemLoader('web_jinja/template'))

# 搜索模块
template = env.get_template('index.html')

# 输出返回Template信息
print(template.render(title='Jinja2', url='www.Jinja2', username='******'))

# # 自动转义
# def guess_autoescape(template_name):
#     if template_name is None or '.' not in template_name:
#         return False
#     # 从右往左按照点切,只切一次返回一个列表,取这个列表索引为1的元素,也就是后缀
#     ext = template_name.rsplit('.', 1) [1]
#     return ext in ('html', 'htm', 'xml')
#
# env = Environment(autoescape=guess_autoescape,
#                   loader=PackageLoader('web_jinja'),
#                   extensions=['jinja2.ext.autoescape'])
Exemple #3
0
# -*- coding: utf-8 -*-
"""Global pyprof2html's environment.
"""

from jinja2 import Environment, PackageLoader

__all__ = ['ENVIRON']

CODEC = 'utf-8'
ENVIRON = Environment(loader=PackageLoader('pyprof2html',
                      './templates', encoding=CODEC))
Exemple #4
0
.. bokeh-jinja:: bokeh.core.templates.DOC_NB_JS
.. bokeh-jinja:: bokeh.core.templates.FILE
.. bokeh-jinja:: bokeh.core.templates.JS_RESOURCES
.. bokeh-jinja:: bokeh.core.templates.NOTEBOOK_LOAD
.. bokeh-jinja:: bokeh.core.templates.PLOT_DIV
.. bokeh-jinja:: bokeh.core.templates.ROOT_DIV
.. bokeh-jinja:: bokeh.core.templates.SCRIPT_TAG

'''
from __future__ import absolute_import

import json

from jinja2 import Environment, PackageLoader, Markup

_env = Environment(loader=PackageLoader('bokeh.core', '_templates'))
_env.filters['json'] = lambda obj: Markup(json.dumps(obj))

JS_RESOURCES = _env.get_template("js_resources.html")

CSS_RESOURCES = _env.get_template("css_resources.html")

SCRIPT_TAG = _env.get_template("script_tag.html")

PLOT_DIV = _env.get_template("plot_div.html")

ROOT_DIV = _env.get_template("root_div.html")

DOC_JS = _env.get_template("doc_js.js")

DOC_NB_JS = _env.get_template("doc_nb_js.js")
Exemple #5
0
import sys
import webbrowser
import random
from tempfile import NamedTemporaryFile
from typing import Any, Dict
from bokeh.io import output_notebook
from bokeh.embed import components
from bokeh.resources import INLINE
from jinja2 import Environment, PackageLoader
from ..utils import is_notebook

output_notebook(INLINE, hide_banner=True)  # for offline usage

ENV_LOADER = Environment(
    loader=PackageLoader("edax", "eda/distribution/templates"),
)


class Container:
    """
    This class creates a customized Container object for the plot(df) function.
    """

    def __init__(
        self,
        to_render: Dict[str, Any],
        visual_type: str,
    ) -> None:
        if visual_type == "distribution_grid":
            self.context = {
Exemple #6
0
    if talkgroup_ids:
        logging.info('ID ALIAS MAPPER: talkgroup_ids dictionary is available')

    local_subscriber_ids = mk_full_id_dict(PATH, LOCAL_SUB_FILE, 'subscriber')
    if local_subscriber_ids:
        logging.info('ID ALIAS MAPPER: local_subscriber_ids added to subscriber_ids dictionary')
        subscriber_ids.update(local_subscriber_ids)

    local_peer_ids = mk_full_id_dict(PATH, LOCAL_PEER_FILE, 'peer')
    if local_peer_ids:
        logging.info('ID ALIAS MAPPER: local_peer_ids added peer_ids dictionary')
        peer_ids.update(local_peer_ids)

    # Jinja2 Stuff
    env = Environment(
        loader=PackageLoader('monitor', 'templates'),
        autoescape=select_autoescape(['html', 'xml'])
    )

    dtemplate = env.get_template('hblink_table.html')
    btemplate = env.get_template('bridge_table.html')

    # Create Static Website index file
    index_html = get_template(PATH + 'index_template.html')
    index_html = index_html.replace('<<<system_name>>>', REPORT_NAME)
    if CLIENT_TIMEOUT > 0:
        index_html = index_html.replace('<<<timeout_warning>>>', 'Continuous connections not allowed. Connections time out in {} seconds'.format(CLIENT_TIMEOUT))
    else:
        index_html = index_html.replace('<<<timeout_warning>>>', '')

    # Start update loop
Exemple #7
0
@operate_bp.listener('before_server_start')
def setup_db(operate_bp, loop):
    global motor_base
    motor_base = MotorBase()


@operate_bp.listener('after_server_stop')
def close_connection(operate_bp, loop):
    motor_base = None


enable_async = sys.version_info >= (3, 6)

# jinjia2 config
env = Environment(loader=PackageLoader('views.operate',
                                       '../templates/operate'),
                  autoescape=select_autoescape(['html', 'xml', 'tpl']),
                  enable_async=enable_async)


async def template(tpl, **kwargs):
    template = env.get_template(tpl)
    rendered_template = await template.render_async(**kwargs)
    return html(rendered_template)


@operate_bp.route("/login", methods=['GET'])
async def owllook_get_login(request):
    return await template('login.html', static='/html/statics')

def cli(install_directory, metadata_directory, distribution, user, confirm_yes,
        menu_file, wsl_executable, target_name, preferred_theme,
        alternative_theme, jinja_template_batch, jinja_template_shell, rc_file,
        launch_directory, batch_encoding, use_batch_newline_crlf):

    # Debug information
    logger.info("distribution = %s", distribution)
    logger.info("user = %s", user)
    logger.info("confirm_yes = %s", confirm_yes)
    logger.info("menu_file = %s", menu_file.name if menu_file else None)
    logger.info("wsl_executable = %s", wsl_executable)
    logger.info("target_name = %s", target_name)
    logger.info("preferred_theme = %s", preferred_theme)
    logger.info("alternative_theme = %s", alternative_theme)
    logger.info("jinja_template_batch = %s",
                jinja_template_batch.name if jinja_template_batch else None)
    logger.info("jinja_template_shell = %s",
                jinja_template_shell.name if jinja_template_shell else None)
    logger.info("rc_file = %s", rc_file.name)
    logger.info("has_imagemagick = %s", has_imagemagick)
    logger.info("has_cairosvg = %s", has_cairosvg)
    logger.info("launch_directory = %s", launch_directory)
    logger.info("batch_encoding = %s", batch_encoding)
    logger.info("use_batch_newline_crlf = %s", use_batch_newline_crlf)

    # Add distro to directory names, since we want to support multiple concurrent distributions
    install_directory = os.path.join(install_directory, target_name)
    metadata_directory = os.path.join(metadata_directory, target_name)

    # OK print it debug information for these now
    logger.info("install_directory = %s", install_directory)
    logger.info("metadata_directory = %s", metadata_directory)

    if not menu_file:
        logger.error(
            "Could not find an appropriate .menu file in %s - perhaps yum/apt install gnome-menus or another desktop?",
            DEFAULT_MENU_LOCATION)
        sys.exit(os.EX_OSFILE)

    if not confirm_yes:
        logger.info(
            "For full list of options available, call script again with --help"
        )
        logger.info(
            "This script will write to the above locations if it can, but giving final chance to chicken out."
        )
        raw_input("Press <enter> to continue or ctrl+c to abort.")

    # OK we're ready to go - ensure we can create / have write access to the installation directory
    try:
        # Create directory and fear not if it already exists
        os.makedirs(install_directory, exist_ok=True)
        os.makedirs(metadata_directory, exist_ok=True)
    except PermissionError:
        logger.error(
            "No permissions to create directories %s or %s - aborting",
            install_directory, metadata_directory)
        sys.exit(os.EX_NOPERM)

    # Make metadata a hidden system file to hide it from indexer (cleaner search results for powertoys etc)
    set_hidden_from_indexer(metadata_directory)

    # Check we have absolute ownership of this directory - if not, chicken out
    if not is_directory_writable(install_directory):
        logger.error(
            "Could not confirm write access to all contents of %s - aborting",
            install_directory)
        sys.exit(os.EX_NOPERM)

    # Find all desktop menu items, indexed by menu path
    menu = xdg.Menu.parse(menu_file.name)
    entries = get_desktop_entries(menu)

    # Create shortcut launcher script (avoids terminal being displayed while launching)
    silent_launcher_script_file = os.path.join(metadata_directory,
                                               "silent-launcher.vbs")
    if not os.path.exists(silent_launcher_script_file):
        try:
            with open(silent_launcher_script_file, "w") as lsf:
                # If this gets more complicated, we could make this a resource, but at one line, this is fine
                lsf.write(
                    'CreateObject("Wscript.Shell").Run """" & WScript.Arguments(0) & """", 0, False'
                )
        except Exception:
            logger.error("Could not create %s", silent_launcher_script_file)
            sys.exit(os.EX_IOERR)

    # Build windows path for the launcher script
    silent_launcher_script_file_win = get_windows_path_from_wsl_path(
        silent_launcher_script_file)

    # Load in the template which is used to generate the launcher script
    if not jinja_template_batch:
        # Default load from package
        env = Environment(
            loader=PackageLoader('wsl_windows_toolbar', package_path=''))
        batch_template = env.get_template(
            "wsl-windows-toolbar-template.bat.j2")
        shell_template = env.get_template("wsl-windows-toolbar-template.sh.j2")
    else:
        # Optionally load from custom filesystem location
        env = Environment(loader=FileSystemLoader(
            os.path.dirname(os.path.abspath(jinja_template_batch.name))))
        batch_template = env.get_template(
            os.path.basename(jinja_template_batch.name))
        shell_template = env.get_template(
            os.path.basename(jinja_template_shell.name))

    # Create shortcut files
    shortcuts_installed = 0
    for path, entry in entries.items():
        logger.info("Creating menu item for: %s", path)
        exec_cmd = entry.getExec()
        # https://specifications.freedesktop.org/desktop-entry-spec/desktop-entry-spec-latest.html#key-path
        exec_dir = entry.getPath()
        # https://specifications.freedesktop.org/desktop-entry-spec/desktop-entry-spec-latest.html#key-terminal
        run_in_terminal = entry.getTerminal()

        if not exec_dir:
            exec_dir = launch_directory

        # These parts aren't relevant for menu launcher so prune out from the command
        for substr in FREEDESKTOP_FIELD_CODES:
            exec_cmd = exec_cmd.replace(substr, "")

        # Carve the way for the shortcut
        shortcut_path = os.path.join(install_directory, "%s.lnk" % path)
        os.makedirs(os.path.dirname(shortcut_path), exist_ok=True)
        logger.debug("Will create shortcut file: %s", shortcut_path)

        # Normalize the icon to a windows path so shortcut can find it
        icon = entry.getIcon()
        metadata_prefix = os.path.join(metadata_directory, "%s" % path)
        ico_file_winpath = create_windows_icon(
            icon if icon else entry.getName().lower(),
            metadata_prefix,
            preferred_theme=preferred_theme,
            alternative_theme=alternative_theme)

        shell_launcher_path = os.path.join(metadata_directory, "%s.sh" % path)
        template_dict = {
            "distribution": distribution,
            "user": user,
            "command": exec_cmd,
            "wsl": wsl_executable,
            "rcfile": rc_file.name,
            "launch_script": shell_launcher_path,
            "exec_dir": exec_dir,
            "run_in_terminal": run_in_terminal
        }

        # Create a little shell launcher for the executable
        with open(shell_launcher_path, mode="w") as script_handle:
            script_handle.write(shell_template.render(template_dict))
        # Make executable
        os.chmod(shell_launcher_path, 509)
        set_hidden_from_indexer(shell_launcher_path)

        # Create a little batch file launcher for the executable
        batch_launcher_path = os.path.join(metadata_directory, "%s.bat" % path)
        batch_newline = "\r\n" if use_batch_newline_crlf else None
        with open(batch_launcher_path,
                  mode="w",
                  encoding=batch_encoding,
                  newline=batch_newline) as script_handle:
            script_handle.write(batch_template.render(template_dict))
        batch_launcher_path_win = get_windows_path_from_wsl_path(
            batch_launcher_path)

        if run_in_terminal:
            windows_lnk = create_shortcut(shortcut_path,
                                          batch_launcher_path_win,
                                          comment=entry.getComment(),
                                          icon_file=ico_file_winpath)
        else:
            windows_lnk = create_shortcut(
                shortcut_path,
                "wscript",
                '"%s" "%s"' %
                (silent_launcher_script_file_win, batch_launcher_path_win),
                comment=entry.getComment(),
                icon_file=ico_file_winpath)
        set_hidden_from_indexer(batch_launcher_path)
        logger.debug("Created %s", windows_lnk)
        shortcuts_installed += 1

    logger.info("Finished creating %d shortcuts!", shortcuts_installed)
    logger.info(
        "Before raising an issue, make sure you have Xming / X410 etc set up in your .bashrc."
    )
    logger.info(
        "Right click on the toolbar, then select Toolbars -> New toolbar... and select the directory '%s'.",
        install_directory)
Exemple #9
0
    return updates


# Parameters to code generation
deps = [(-1, 0, -1), (-1, -1, 0), (-1, -1, -1), (-1, -1, -2), (-1, -2, -1)]

unroll = (1, int(argv[1]))

ndims = 3
winsize = window_size(ndims, deps, unroll)
updates = compute_updates(ndims, deps, unroll)
depths = dependence_depths(ndims, deps)
uhalo = [ceil(depths[i] / unroll[i - 1]) for i in range(1, ndims)]

params = dict(winsize=winsize,
              updates=updates,
              unroll=unroll,
              depths=depths,
              uhalo=uhalo)

# Create template environment
env = Environment(loader=PackageLoader(__name__, 'templates'))
env.filters['ceil'] = ceil

# Load templates
inner_tile = env.get_template("inner_tile.cpp")
hls_types = env.get_template("hls_types.h")

print(inner_tile.render(**params))
#print(hls_types.render(**params))
Exemple #10
0
sys.path.insert(0, 'FILEPATH')
from call_mort_function import call_mort_function

# Get username
import getpass
USER = getpass.getuser()

import argparse
parser = argparse.ArgumentParser(description='Process version')
parser.add_argument('processing_version_id', type = str, default = "99999")
parser.add_argument('processing_version_description', type = str, nargs = "+", default = "Test version")
args = parser.parse_args()

from jinja2 import Environment, PackageLoader, select_autoescape
env = Environment(
    loader=PackageLoader('sandbox_graphs', 'templates'),
    autoescape=select_autoescape(['html', 'xml'])
)

# Set the output file path
processing_version_id = args.processing_version_id
processing_version_description = ' '.join(args.processing_version_description)
link = "LINK PATH""
input_model_dir = "FILEPATH + processing_version_id"
output_dir = "FILEPATH + processing_version_id"
os.makedirs(output_dir, exist_ok=True)

# Get the list of locations
locations = get_location_metadata(location_set_id = 93, gbd_round_id = 6)
locations = locations.loc[locations['is_estimate'] == 1]
""" Implements the operations needed to solve Maxwell's equations in 3D. """

import numpy as np
from jinja2 import Environment, PackageLoader, Template
from gce.space import initialize_space, get_space_info
from gce.grid import Grid
from gce.const import Const
from gce.out import Out
from gce.kernel import Kernel
from mpi4py.MPI import COMM_WORLD as comm

# Execute when module is loaded.
# Load the jinja environment.
jinja_env = Environment(loader=PackageLoader(__name__, 'kernels'))


def ops(params):
    """ Define the operations that specify the symmetrized, lumped problem. """

    # Initialize the space.
    initialize_space(params['shape'])

    dtype = np.complex128

    if comm.rank == 0:
        pre_cond, post_cond = conditioners(params, dtype)
        params['j'] = pre_cond(params['j'])  # Precondition b.
    else:
        post_cond = None

    b = [Grid(dtype(f), x_overlap=1) for f in params['j']]
Exemple #12
0
 def __init__(self, offers, number_of_offers):
     self.offers = offers
     self.number_of_offers = number_of_offers
     self.env = Environment(loader=PackageLoader('scraper', 'templates'), )
     self.template = self.env.get_template('template.html')
     self.__render_output()
Exemple #13
0
def get_inherited_template(config, ast):
    env = Environment(
        loader=PackageLoader(config.PACKAGE_NAME, config.TEMPLATE_DIR))
    return env.parse(env.loader.get_source(env, ast.template.value)[0])
Exemple #14
0
from __future__ import absolute_import
import requests
import requests.auth as request_auth
from requests_ntlm import HttpNtlmAuth
from jinja2 import Environment, PackageLoader, select_autoescape
import uuid
import xmltodict
from raxwinrm import exceptions as raxwinrm_exceptions
import base64

SOAP_ENV = Environment(
    loader=PackageLoader('raxwinrm', 'winrm_commands'),
    autoescape=select_autoescape(['html', 'xml'])
)


def disable_request_logging():
    import urllib3
    urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)


class WinRMConnection(object):

    url_template = 'https://{hostname}:{port}/wsman'
    prepared_statement = None
    shell_id = None
    last_command = None

    def __init__(self, hostname, port=None, username=None, password=None, **kwargs):
        self.hostname = hostname
        self.port = port
Exemple #15
0
 def __init__(self, package_name, directory) -> None:
     self.template_environment = Environment(
         loader=PackageLoader(package_name, directory),
         autoescape=select_autoescape(['html']))
Exemple #16
0
#!/usr/bin/python3
from flask import Flask, send_file
import os
import markdown
import re
from jinja2 import Environment, PackageLoader, select_autoescape
app = Flask(__name__)

jinja2_env = Environment(loader=PackageLoader(__name__, ''),
                         autoescape=select_autoescape(['html', 'xml']))
template = jinja2_env.get_template('template.html')


def find_page(page):
    pages = [page + '.md', page + '/index.md']
    for filename in pages:
        if os.path.exists(filename):
            return filename
    raise Exception("not found, try %s" % pages)


def read_page(page):
    filename = find_page(page)
    with open(filename) as fd:
        return fd.read()


def get_title(page):
    try:
        return read_page(page).split('\n')[0][1:].strip() or page
    except Exception:
Exemple #17
0
from jsonschema import validate
import datetime
import tempfile
from distutils.version import LooseVersion
import time
import threading

from conda_build.exceptions import UnableToParse
from conda_build import api
from conda_build.metadata import MetaData
import yaml
from jinja2 import Environment, PackageLoader, select_autoescape

logger = logging.getLogger(__name__)

jinja = Environment(loader=PackageLoader('bioconda_utils', 'templates'),
                    trim_blocks=True,
                    lstrip_blocks=True)

# Patterns of allowed environment variables that are allowed to be passed to
# conda-build.
ENV_VAR_WHITELIST = [
    'CONDA_*', 'PATH', 'LC_*', 'LANG', 'MACOSX_DEPLOYMENT_TARGET'
]


def get_free_space():
    """Return free space in MB on disk"""
    s = os.statvfs(os.getcwd())
    return s.f_frsize * s.f_bavail / (1024**2)
Exemple #18
0
)
from ._util import (
    PathType,
    _determine_license_path,
    _determine_license_suffix_path,
    contains_spdx_info,
    extract_spdx_info,
    make_copyright_line,
    spdx_identifier,
)
from .project import Project

_LOGGER = logging.getLogger(__name__)

_ENV = Environment(
    loader=PackageLoader("reuse", "templates"), trim_blocks=True
)
DEFAULT_TEMPLATE = _ENV.get_template("default_template.jinja2")

_NEWLINE_PATTERN = re.compile(r"\n", re.MULTILINE)


class _TextSections(NamedTuple):
    """Used to split up text in three parts."""

    before: str
    middle: str
    after: str


class MissingSpdxInfo(Exception):
Exemple #19
0
"""Bull library, used for selling digital goods."""

import os

from flask import Flask
from jinja2 import Environment, FileSystemLoader, ChoiceLoader, PackageLoader
import stripe

from .bull import bull, mail, login_manager, bcrypt
from .models import Product, Purchase, db


app = Flask(__name__)
app.config['SECRET_KEY'] = 'foo'
app.config['WTF_CSRF_KEY'] = 'foo'
app.config.from_object('config')
app.jinja_loader = ChoiceLoader([
    FileSystemLoader(os.path.join(os.getcwd(), 'templates')),
    PackageLoader('bull'),
    ])
stripe.api_key = app.config['STRIPE_SECRET_KEY']
db.init_app(app)
mail.init_app(app)
bcrypt.init_app(app)
login_manager.init_app(app)
app.register_blueprint(bull)

__version__ = '0.4.1'
class ExplanationDashboard:
    """Explanation Dashboard Class.

    :param explanation: An object that represents an explanation.
    :type explanation: ExplanationMixin
    :param model: An object that represents a model. It is assumed that for the classification case
        it has a method of predict_proba() returning the prediction probabilities for each
        class and for the regression case a method of predict() returning the prediction value.
    :type model: object
    :param dataset:  A matrix of feature vector examples (# examples x # features), the same samples
        used to build the explanation. Overwrites any existing dataset on the explanation object. Must have fewer than
        10000 rows and fewer than 1000 columns.
    :type dataset: numpy.array or list[][]
    :param datasetX: Alias of the dataset parameter. If dataset is passed, this will have no effect. Must have fewer
        than 10000 rows and fewer than 1000 columns.
    :type datasetX: numpy.array or list[][]
    :param true_y: The true labels for the provided dataset. Overwrites any existing dataset on the
        explanation object.
    :type true_y: numpy.array or list[]
    :param classes: The class names.
    :type classes: numpy.array or list[]
    :param features: Feature names.
    :type features: numpy.array or list[]
    :param port: The port to use on locally hosted service.
    :type port: int
    :param use_cdn: Whether to load latest dashboard script from cdn, fall back to local script if False.
    :type use_cdn: bool
    :param public_ip: Optional. If running on a remote vm, the external public ip address of the VM.
    :type public_ip: str
    :param with_credentials: Optional. If running on a remote vm, sets up CORS policy both on client and server.
    :type with_credentials: bool
    """

    service = None
    explanations = {}
    model_count = 0
    using_fallback = False
    _cdn_path = "v0.4.js"
    _dashboard_js = None
    env = Environment(loader=PackageLoader(__name__, 'templates'))
    default_template = env.get_template("inlineDashboard.html")

    class DashboardService:
        def __init__(self, port, public_ip, with_credentials=False):
            app = Flask(__name__)
            self.nbvm = _get_nbvm()
            if self.nbvm is None and not with_credentials:
                self.cors = CORS(app)
                self.with_credentials = False
                if public_ip is not None:
                    self.ip = public_ip
                    self.env = PUBLIC_VM
                else:
                    self.ip = LOCALHOST
                    self.env = 'local'
            elif self.nbvm is not None:
                # Note: for debugging CORS set logging.getLogger('flask_cors').level = logging.DEBUG
                instance_name = self.nbvm["instance"]
                domain_suffix = self.nbvm["domainsuffix"]
                nbvm_origin1 = "https://{}.{}".format(instance_name,
                                                      domain_suffix)
                nbvm_origin2 = "https://{}-{}.{}".format(
                    instance_name, port, domain_suffix)
                nbvm_origins = [nbvm_origin1, nbvm_origin2]
                headers = ['Content-Type']
                # Support credentials for notebook VM scenario
                self.cors = CORS(app,
                                 origins=nbvm_origins,
                                 expose_headers=headers,
                                 supports_credentials=True)
                self.with_credentials = True
                self.ip = LOCALHOST
                self.env = 'azure'
            else:
                if public_ip is not None:
                    self.ip = public_ip
                else:
                    # Attempt to get the ip, but this may fail since it may not get the external ip of
                    # the machine, just the private ip. Note we don't use LOCALHOST here because it
                    # doesn't make sense to run with CORS on local machine anyway.
                    host_name = socket.gethostname()
                    self.ip = socket.gethostbyname(host_name)
                origin = "https://{}:{}".format(self.ip, port)
                headers = ['Content-Type']
                self.cors = CORS(app,
                                 origins=[origin],
                                 expose_headers=headers,
                                 supports_credentials=True)
                self.with_credentials = True
                self.env = CREDENTIALED_VM

            @app.route('/')
            def hello():
                return "No global list view supported at this time."

            @app.route('/<id>')
            def explanation_visual(id):
                if id in ExplanationDashboard.explanations:
                    return generate_inline_html(
                        ExplanationDashboard.explanations[id], None)
                else:
                    return "Unknown model id."

            @app.route('/<id>/predict', methods=['POST'])
            def predict(id):
                data = request.get_json(force=True)
                if id in ExplanationDashboard.explanations:
                    return jsonify(
                        ExplanationDashboard.explanations[id].on_predict(data))

            self.app = app
            self.port = port
            self.use_cdn = True
            if self.port is None:
                # Try 100 different ports
                for port in range(5000, 5100):
                    available = ExplanationDashboard.DashboardService._local_port_available(
                        port, rais=False)
                    if available:
                        self.port = port
                        return
                error_message = """Ports 5000 to 5100 not available.
                    Please specify an open port for use via the 'port' parameter"""
                raise RuntimeError(error_message.format(port))
            else:
                ExplanationDashboard.DashboardService._local_port_available(
                    self.port)

        def run(self):
            class devnull:
                write = lambda _: None  # noqa: E731

            ip = LOCALHOST
            # Note: for credentialed VM or public VM we need to use the private IP address
            if self.env in VM_ENVS:
                host_name = socket.gethostname()
                ip = socket.gethostbyname(host_name)
            server = WSGIServer((ip, self.port), self.app, log=devnull)
            self.app.config["server"] = server
            server.serve_forever()

            # Closes server on program exit, including freeing all sockets
            def closeserver():
                server.stop()

            atexit.register(closeserver)

        def get_base_url(self):
            env = EnvironmentDetector()
            detected_envs = env.detect()
            in_cloud_env = is_cloud_env(detected_envs)
            result = _get_nbvm()
            # First handle known cloud environments
            if result is None:
                # special case azure, since the azure sdk can set this env setting on local runs
                if not in_cloud_env or self.env in VM_ENVS:
                    return "http://{0}:{1}".format(self.ip, self.port)
                # all non-specified, non-credentialed cloud environments are not handled
                if not self.with_credentials:
                    self.env = 'cloud'
                return None

            instance_name = result["instance"]
            domain_suffix = result["domainsuffix"]
            return "https://{}-{}.{}".format(instance_name, self.port,
                                             domain_suffix)

        @staticmethod
        def _local_port_available(port, rais=True):
            """
            Borrowed from:
            https://stackoverflow.com/questions/19196105/how-to-check-if-a-network-port-is-open-on-linux
            """
            try:
                backlog = 5
                sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                sock.bind((LOCALHOST, port))
                sock.listen(backlog)
                sock.close()
            except socket.error:  # pragma: no cover
                if rais:
                    error_message = """Port {0} is not available.
                    Please specify another port for use via the 'port' parameter"""
                    raise RuntimeError(error_message.format(port))
                else:
                    return False
            return True

    def __init__(self,
                 explanation,
                 model=None,
                 *,
                 dataset=None,
                 true_y=None,
                 classes=None,
                 features=None,
                 port=None,
                 use_cdn=True,
                 datasetX=None,
                 trueY=None,
                 locale=None,
                 public_ip=None,
                 with_credentials=False):
        # support legacy kwarg names
        if dataset is None and datasetX is not None:
            dataset = datasetX
        if true_y is None and trueY is not None:
            true_y = trueY
        self._initialize_js(use_cdn)
        predict_url = None
        local_url = None
        if not ExplanationDashboard.service:
            try:
                ExplanationDashboard.service = ExplanationDashboard.DashboardService(
                    port, public_ip, with_credentials)
                self._thread = threading.Thread(
                    target=ExplanationDashboard.service.run, daemon=True)
                self._thread.start()
            except Exception as e:
                ExplanationDashboard.service = None
                raise e
        ExplanationDashboard.service.use_cdn = use_cdn
        ExplanationDashboard.model_count += 1
        base_url = ExplanationDashboard.service.get_base_url()
        if base_url is not None:
            predict_url = "{0}/{1}/predict".format(
                base_url, str(ExplanationDashboard.model_count))
            local_url = "{0}/{1}".format(base_url,
                                         str(ExplanationDashboard.model_count))
        with_credentials = ExplanationDashboard.service.with_credentials
        explanation_input = ExplanationDashboardInput(explanation, model,
                                                      dataset, true_y, classes,
                                                      features, predict_url,
                                                      locale, with_credentials)
        # Due to auth, predict is only available in separate tab in cloud after login
        if ExplanationDashboard.service.env != "cloud":
            explanation_input.enable_predict_url()
        html = generate_inline_html(explanation_input, local_url)
        ExplanationDashboard.explanations[str(
            ExplanationDashboard.model_count)] = explanation_input

        if "DATABRICKS_RUNTIME_VERSION" in os.environ:
            _render_databricks(html)
        else:
            display(HTML(html))

    def _initialize_js(self, use_cdn):
        if (ExplanationDashboard._dashboard_js is None):
            if (use_cdn):
                try:
                    url = 'https://interpret-cdn.azureedge.net/{0}'.format(
                        ExplanationDashboard._cdn_path)
                    r = requests.get(url)
                    if not r.ok:
                        ExplanationDashboard.using_fallback = True
                        self._load_local_js()
                    r.encoding = "utf-8"
                    ExplanationDashboard._dashboard_js = r.text
                except Exception:
                    ExplanationDashboard.using_fallback = True
                    self._load_local_js()
            else:
                self._load_local_js()

    def _load_local_js(self):
        script_path = os.path.dirname(os.path.abspath(__file__))
        js_path = os.path.join(script_path, "static", "index.js")
        with open(js_path, "r", encoding="utf-8") as f:
            ExplanationDashboard._dashboard_js = f.read()
Exemple #21
0

def _update_nonce_cache(timestamp, nonce):
    for timestamp, nonce in list(NONCE_CACHE):
        if timestamp < datetime.now(
                timezone.utc) - REPLAY_PROTECT_MAX_TIME_DELTA:
            NONCE_CACHE.remove((timestamp, nonce))
    NONCE_CACHE.add((timestamp, nonce))


# Replay protect settings
REPLAY_PROTECT_MAX_TIME_DELTA = timedelta(seconds=5)
NONCE_CACHE = set()

# Settings for jinja2
TEMPLATES = Environment(loader=PackageLoader('openleadr', 'templates'))
TEMPLATES.filters['datetimeformat'] = utils.datetimeformat
TEMPLATES.filters['timedeltaformat'] = utils.timedeltaformat
TEMPLATES.filters['booleanformat'] = utils.booleanformat
TEMPLATES.trim_blocks = True
TEMPLATES.lstrip_blocks = True

# Settings for xmltodict
NAMESPACES = {
    'http://docs.oasis-open.org/ns/energyinterop/201110': None,
    'http://openadr.org/oadr-2.0b/2012/07': None,
    'urn:ietf:params:xml:ns:icalendar-2.0': None,
    'http://docs.oasis-open.org/ns/energyinterop/201110/payloads': None,
    'http://docs.oasis-open.org/ns/emix/2011/06': None,
    'urn:ietf:params:xml:ns:icalendar-2.0:stream': None,
    'http://docs.oasis-open.org/ns/emix/2011/06/power': None,
Exemple #22
0
 def __init__(self, spec, settings):
     self.spec = spec
     self.settings = self.__class__.cleaned_settings(settings)
     self.jinjaenv = Environment(
         loader=PackageLoader('generate', self.settings.tpl_base))
     self.jinjaenv.filters['wordwrap'] = do_wordwrap
Exemple #23
0
from sanic import Blueprint

from jinja2 import Environment, PackageLoader, select_autoescape
from sanic.response import html, json, text

from monkey.config import Config
from monkey.common.doc_search import doc_search

bp_home = Blueprint(__name__)
bp_home.static('/statics', Config.BASE_DIR + '/statics/')

# 开启异步特性  要求3.6+
enable_async = sys.version_info >= (3, 6)

# jinjia2 config
env = Environment(loader=PackageLoader('views.bp_home', '../templates'),
                  autoescape=select_autoescape(['html', 'xml', 'tpl']),
                  enable_async=enable_async)


async def template(tpl, **kwargs):
    template = env.get_template(tpl)
    rendered_template = await template.render_async(**kwargs)
    return html(rendered_template)


@bp_home.route('/')
async def index(request):
    return await template('index.html')

def main():
    """Generate GLAM ETL queries."""
    parser = ArgumentParser(description=main.__doc__)
    parser.add_argument("--prefix")
    parser.add_argument("--dataset", default="glam_etl")
    parser.add_argument("--sql-root", default="sql/")
    parser.add_argument("--daily-view-only",
                        action="store_true",
                        default=False)
    args = parser.parse_args()

    env = Environment(loader=PackageLoader("bigquery_etl", "glam/templates"))

    dataset_path = Path(args.sql_root) / args.dataset
    if not dataset_path.is_dir():
        raise NotADirectoryError(f"path to {dataset_path} not found")

    # curry functions for convenience
    template = partial(from_template,
                       environment=env,
                       dataset_path=dataset_path,
                       args=args)
    view = partial(template, QueryType.VIEW)
    table = partial(template, QueryType.TABLE)
    init = partial(template, QueryType.INIT)

    # If this is a logical app id, generate it. Assert that the daily view for
    # the app exists. This assumes that both scalar and histogram aggregates
    # exist and will break down in the case where a glean app only contains one
    # of the scalar or histogram view.
    for daily_view in [
            "view_clients_daily_scalar_aggregates_v1",
            "view_clients_daily_histogram_aggregates_v1",
    ]:
        try:
            view(f"logical_app_id/{args.prefix}__{daily_view}")
        except TemplateNotFound:
            print(f"{args.prefix} is not a logical app id")
            # generate the view for the app id directly
            view(daily_view)

        if not (dataset_path / f"{args.prefix}__{daily_view}").is_dir():
            raise ValueError(f"missing {daily_view}")

    # exit early if we're only generating a daily view
    if args.daily_view_only:
        return

    # Supported fenix/firefox for android products. These are logical ids that
    # are formed from the union of several app_ids (sometimes across date
    # boundaries).
    fenix_app_ids = [
        "org_mozilla_fenix_glam_nightly",
        "org_mozilla_fenix_glam_beta",
        "org_mozilla_fenix_glam_release",
    ]

    build_date_udf_mapping = dict(
        **{
            app_id: "`moz-fx-data-shared-prod`.udf.fenix_build_to_datetime"
            for app_id in fenix_app_ids
        })
    if not build_date_udf_mapping.get(args.prefix):
        raise ValueError(f"build date udf for {args.prefix} was not found")

    [
        table(
            "latest_versions_v1",
            **dict(source_table=(
                f"glam_etl.{args.prefix}__view_clients_daily_scalar_aggregates_v1"
            )),
        ),
        init(
            "clients_scalar_aggregates_v1",
            **models.clients_scalar_aggregates(
                source_table=
                (f"glam_etl.{args.prefix}__view_clients_daily_scalar_aggregates_v1"
                 ),
                destination_table=(
                    f"glam_etl.{args.prefix}__clients_scalar_aggregates_v1"),
            ),
        ),
        table(
            "clients_scalar_aggregates_v1",
            **models.clients_scalar_aggregates(
                source_table=
                (f"glam_etl.{args.prefix}__view_clients_daily_scalar_aggregates_v1"
                 ),
                destination_table=(
                    f"glam_etl.{args.prefix}__clients_scalar_aggregates_v1"),
            ),
        ),
        init(
            "clients_histogram_aggregates_v1",
            **models.clients_histogram_aggregates(parameterize=True),
        ),
        table(
            "clients_histogram_aggregates_v1",
            **models.clients_histogram_aggregates(parameterize=True),
        ),
        table(
            "scalar_bucket_counts_v1",
            **models.scalar_bucket_counts(
                source_table=
                f"glam_etl.{args.prefix}__clients_scalar_aggregates_v1"),
        ),
        table(
            "histogram_bucket_counts_v1",
            **models.histogram_bucket_counts(
                source_table=
                f"glam_etl.{args.prefix}__clients_histogram_aggregates_v1"),
        ),
        table(
            "probe_counts_v1",
            query_name_prefix="scalar",
            **models.probe_counts(
                source_table=f"glam_etl.{args.prefix}__scalar_bucket_counts_v1",
                is_scalar=True,
            ),
        ),
        table(
            "probe_counts_v1",
            query_name_prefix="histogram",
            **models.probe_counts(
                source_table=
                f"glam_etl.{args.prefix}__histogram_bucket_counts_v1",
                is_scalar=False,
            ),
        ),
        table(
            "scalar_percentiles_v1",
            **models.scalar_percentiles(
                source_table=
                f"glam_etl.{args.prefix}__clients_scalar_aggregates_v1"),
        ),
        table("histogram_percentiles_v1"),
        view("view_probe_counts_v1"),
        view("view_user_counts_v1", **models.user_counts()),
        table("extract_user_counts_v1",
              build_date_udf=build_date_udf_mapping[args.prefix]),
        table(
            "extract_probe_counts_v1",
            build_date_udf=build_date_udf_mapping[args.prefix],
        ),
    ]
Exemple #25
0
POSTS = {}
for markdown_post in os.listdir('content'):
    file_path = os.path.join('content', markdown_post)

    with open(file_path, 'r') as file:
        POSTS[markdown_post] = markdown(file.read(), extras=['metadata'])

    POSTS = {
        post: POSTS[post]
        for post in sorted(POSTS,
                           key=lambda post: datetime.strptime(
                               POSTS[post].metadata['date'], '%Y-%m-%d'),
                           reverse=True)
    }
#skilgreina template sem á að nota
env = Environment(loader=PackageLoader('main', 'templates'))
index_template = env.get_template('index.html')
bread_template = env.get_template('bread.html')
post_template = env.get_template('post.html')

index_html = index_template.render()

#setjum md í færibreytur posts og tags
posts_metadata = [POSTS[post].metadata for post in POSTS]
tags = [post['tags'] for post in posts_metadata]
bread_html = bread_template.render(posts=posts_metadata, tags=tags)

#ekki output heldur upp úr vinnumöppunni og í recipes möppuna
with open('../complex-myrecipes-output/index.html', 'w',
          encoding="utf-8") as file:
    file.write(index_html)
Exemple #26
0
def get_rendering_environment(xsd_namespaces, module='soapfish'):
    """Return a rendering environment to use with code generation templates."""
    from . import soap, xsd, xsdspec, wsdl

    def capitalize(value):
        return value[0].upper() + value[1:]

    def use(value):
        from . import xsd
        if value == xsd.Use.OPTIONAL:
            return 'xsd.Use.OPTIONAL'
        if value == xsd.Use.REQUIRED:
            return 'xsd.Use.REQUIRED'
        if value == xsd.Use.PROHIBITED:
            return 'xsd.Use.PROHIBITED'
        raise ValueError('Unknown value for use attribute: %s' % value)

    def url_regex(url):
        return r'^%s$' % re.escape(urlparse(url).path.lstrip('/'))

    def url_component(url, item):
        parts = urlparse(url)
        try:
            return getattr(parts, item)
        except AttributeError as e:
            raise ValueError(f'Unknown URL component: {item}') from e

    def url_template(url):
        o = list(urlparse(url))
        o[0:2] = ['${scheme}', '${host}']
        return urlunparse(o)

    def get_type(obj, known_types=None):
        qname = None
        if isinstance(obj, (xsdspec.Attribute, xsdspec.Element)):
            if obj.ref:
                qname = obj.ref
            elif obj.type:
                qname = obj.type
            elif obj.simpleType:
                # FIXME: Determine how to handle embedded types...
                raise NotImplementedError('Unable to handle embedded type.')
        elif isinstance(obj, (xsdspec.Extension, xsdspec.Restriction)):
            if obj.base:
                qname = obj.base
        elif isinstance(obj, str):
            qname = obj

        if not qname:
            raise ValueError(f'Unable to determine type of {obj}')

        qname = qname.split(':')
        if len(qname) < 2:
            qname.insert(0, None)
        ns, name = qname
        name = capitalize(name)

        if ns in xsd_namespaces:
            return f'xsd.{name}'
        elif known_types is not None and name in known_types:
            return str(name)
        else:
            return f"__name__ + '.{name}'"

    keywords = set(keyword.kwlist)

    env = Environment(
        extensions=['jinja2.ext.do', 'jinja2.ext.loopcontrols'],
        loader=PackageLoader('soapfish', 'templates'),
    )
    env.filters.update(
        capitalize=capitalize,
        fix_keyword=lambda x: f'_{x}' if str(x) in keywords else str(x),
        max_occurs=lambda x: 'xsd.UNBOUNDED' if x is xsd.UNBOUNDED else str(x),
        remove_namespace=remove_namespace,
        type=get_type,
        url_component=url_component,
        url_regex=url_regex,
        url_template=url_template,
        use=use,
    )
    env.globals.update(
        SOAPTransport=soap.SOAP_HTTP_Transport,
        keywords=keywords,
        get_by_name=wsdl.get_by_name,
        get_message_header=wsdl.get_message_header,
        get_message_object=wsdl.get_message_object,
        preamble={'module': module, 'generated': datetime.now(timezone.utc)},
        schema_name=schema_name,
        schema_select=schema_select,
    )
    return env
Exemple #27
0

def parse_argv(parser, argv):
    try:
        divider_pos = argv.index('--')
        mrunner_argv = argv[1:divider_pos]
        rest_argv = argv[divider_pos + 1:]
    except ValueError:
        # when missing '--' separator
        mrunner_argv = argv
        rest_argv = []
    return parser.parse_args(args=mrunner_argv), rest_argv


template_env = Environment(
    loader=PackageLoader('mrunner', 'templates'),
    undefined=StrictUndefined
)


class TempFile(object):

    def __init__(self, dir=None):
        self._file = NamedTemporaryFile(prefix='mrunner_', dir=dir)

    def write(self, payload):
        self._file.write(payload)
        self._file.flush()

    @property
    def path(self):
Exemple #28
0
from bottle import route, abort
from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options
from datetime import datetime, timedelta
import pytz
from scorepile.db import Session
from jinja2 import Environment, PackageLoader
from scorepile.dateutils import full_date
import os

BASE_PATH = os.path.dirname(__file__) or '.'
ENV = Environment(loader=PackageLoader('scorepile.web', 'templates'))
TEMPLATES = {
    'main_page': ENV.get_template('main_page.html'),
    'game_list': ENV.get_template('game_list.html'),
    'no_results': ENV.get_template('no_results.html')
}
PT = pytz.timezone('US/Pacific')

CACHE_OPTS = {
    'cache.type': 'file',
    'cache.data_dir': BASE_PATH + '/cache/data',
    'cache.lock_dir': BASE_PATH + '/cache/lock',
}
cache = CacheManager(**parse_cache_config_options(CACHE_OPTS))


class MiniSession:
    """
    A DB session that lasts for as long as a web request. Makes sure that
    nothing stays in memory between requests, because we're not paying for a
Exemple #29
0
  Environment, PackageLoader, select_autoescape

from dotenv import load_dotenv
load_dotenv('.env')

PORT = int(os.environ.get('PORT', '8888'))

client = boto3.client(
  'ses',
  aws_access_key_id=os.environ.get('AWS_ACCESS_KEY'),
  aws_secret_access_key=os.environ.get('AWS_SECRET_KEY'),
  region_name="us-east-1"
)

ENV = Environment(
  loader=PackageLoader('myapp', 'templates'),
  autoescape=select_autoescape(['html', 'xml'])
)


class TemplateHandler(tornado.web.RequestHandler):
  def render_template (self, tpl, context):
    template = ENV.get_template(tpl)
    self.write(template.render(**context))

class MainHandler(TemplateHandler):
  def get(self):
    self.set_header(
      'Cache-Control',
      'no-store, no-cache, must-revalidate, max-age=0')
    self.render_template("index.html", {'name':'justin'})
Exemple #30
0
    local_subscriber_ids = mk_full_id_dict(PATH, LOCAL_SUB_FILE, 'subscriber')
    if local_subscriber_ids:
        logging.info(
            'ID ALIAS MAPPER: local_subscriber_ids added to subscriber_ids dictionary'
        )
        subscriber_ids.update(local_subscriber_ids)

    local_peer_ids = mk_full_id_dict(PATH, LOCAL_PEER_FILE, 'peer')
    if local_peer_ids:
        logging.info(
            'ID ALIAS MAPPER: local_peer_ids added peer_ids dictionary')
        peer_ids.update(local_peer_ids)

    # Jinja2 Stuff
    env = Environment(loader=PackageLoader('monitor', 'templates'),
                      autoescape=select_autoescape(['html', 'xml']))

    dtemplate = env.get_template('hblink_table.html')
    btemplate = env.get_template('bridge_table.html')

    # Create Static Website index file
    index_html = get_template(PATH + 'index_template.html')
    index_html = index_html.replace('<<<system_name>>>', REPORT_NAME)
    if CLIENT_TIMEOUT > 0:
        index_html = index_html.replace(
            '<<<timeout_warning>>>',
            'Continuous connections not allowed. Connections time out in {} seconds'
            .format(CLIENT_TIMEOUT))
    else:
        index_html = index_html.replace('<<<timeout_warning>>>', '')