Exemple #1
0
def templated_file_contents(options, configfile):
    paths = options["searchpath"].split(",") # CSV of PATHs
    if os.path.isdir(paths[0]):
        loader = FileSystemLoader(paths)
    else:
        loader = URLloader(paths)
    env = Environment(loader=loader)
    contents, config_full_path, _a_lambda_ = loader.get_source(env, configfile)
    template = env.from_string(contents, globals=options)
    return template.render(options)
Exemple #2
0
 def _get_nereid_template_messages_from_file(self, template_dir, template):
     """
     Same generator as _get_nereid_template_messages, but for specific files.
     """
     extract_options = self._get_nereid_template_extract_options()
     loader = FileSystemLoader(template_dir)
     file_obj = open(loader.get_source({}, template)[1])
     for message_tuple in babel_extract(
             file_obj, GETTEXT_FUNCTIONS,
             ['trans:'], extract_options):
         yield (template,) + message_tuple
 def to_html(self, embed_css=True):
     import os
     from jinja2 import Environment, FileSystemLoader
     loader = FileSystemLoader(
         os.path.join(os.path.dirname(os.path.abspath(__file__)), 'html'))
     env = Environment(loader=loader)
     template = env.get_template('template.html')
     return template.render(rulesets=self.rulesets,
         lines=self.lines,
         text=self.text,
         css=loader.get_source(env, 'style.css')[0] if embed_css else None)
Exemple #4
0
def generate_manifests(path, kwargs, pkg_name):
    click.echo('Generate manifest')
    loader = FileSystemLoader(path)
    env = Environment(loader=loader)
    out = os.path.join(ROOT_PATH, 'kubernetes')
    for name in loader.list_templates():
        template = env.get_template(name)
        fname = os.path.join(out, name.replace('.j2', ''))
        with open(fname, 'w') as f:
            f.write(template.render(**kwargs))
    with tarfile.open(pkg_name, "w:gz") as tar:
        tar.add(out, arcname='kube-admin')
Exemple #5
0
def render_pages():
    loader = FileSystemLoader('source')
    env = Environment(loader=loader)

    for name in loader.list_templates():
        if name.startswith('_'):
            continue

        template = env.get_template(name)
        print "writing {}".format(name)
        with open(name, 'w') as handle:
            output = template.render()
            handle.write(output)
Exemple #6
0
class TemplateService(EngineService):
    endpoint_methods = None
    published_members = ['render', 'get_base_templates', 'render_from_string', 'get_source']
    name = 'template'
    # Project's base path
    __base_path = os.path.dirname(os.path.realpath(__file__))
    # User's application base path
    __app_base_path = None
    __app_base_templates_dir = None
    # Our internal jinja2 template env
    __template_env = None
    __fs_loader = None

    def __init__(self, engine):

        super(TemplateService, self).__init__(engine)

        self.__app_base_path = self.engine.get('csettings', 'all')()['templates_root']
        self.__app_base_templates_dir = self.engine.get('csettings', 'all')()['base_templates_dir']

        self.__fs_loader = FileSystemLoader(
            # Search path gets saved as a list in jinja2 internally, so we could
            # add to it if necessary.
            searchpath=[self.__base_path + '/templates', self.__app_base_path],
            encoding='utf-8',
        )
        self.__template_env = Environment(loader=self.__fs_loader, trim_blocks=True)

    def render(self, template_name, context):
        """
        Context must be a dictionary right now, but could also be **kwargs
        """
        # Add the global corus settings from engine to the context
        context['csettings'] = self.engine.get('csettings', 'all')()
        return self.__template_env.get_template(template_name).render(context)

    def render_from_string(self, s, context):
        # TODO we should probably make a new loader for getting stuff out of NDB
        return self.__template_env.from_string(s).render(context)

    def get_base_templates(self):
        # The call to FS loader list_templates is a sorted set, so just append, return
        bts = []
        for t in self.__fs_loader.list_templates():
            if t.startswith(self.__app_base_templates_dir):
                bts.append(t)
        return bts

    def get_source(self, template_name):
        source, filename, uptodate = self.__fs_loader.get_source(self.__template_env, template_name)
        return source
Exemple #7
0
    def _get_nereid_template_messages(cls):
        """
        Extract localizable strings from the templates of installed modules.

        For every string found this function yields a
        `(module, template, lineno, function, message)` tuple, where:

        * module is the name of the module in which the template is found
        * template is the name of the template in which message was found
        * lineno is the number of the line on which the string was found,
        * function is the name of the gettext function used (if the string
          was extracted from embedded Python code), and
        * message is the string itself (a unicode object, or a tuple of
          unicode objects for functions with multiple string arguments).
        * comments List of Translation comments if any. Comments in the code
          should have a prefix `trans:`. Example::

              {{ _(Welcome) }} {# trans: In the top banner #}
        """
        extract_options = cls._get_nereid_template_extract_options()
        logger = logging.getLogger('nereid.translation')

        for module, directory in cls._get_installed_module_directories():
            template_dir = os.path.join(directory, 'templates')
            if not os.path.isdir(template_dir):
                # The template directory does not exist. Just continue
                continue

            logger.info(
                'Found template directory for module %s at %s' % (
                    module, template_dir
                )
            )
            # now that there is a template directory, load the templates
            # using a simple filesystem loader and load all the
            # translations from it.
            loader = FileSystemLoader(template_dir)
            env = Environment(loader=loader)
            extensions = '.html,.jinja'
            for template in env.list_templates(extensions=extensions):
                logger.info('Loading from: %s:%s' % (module, template))
                file_obj = open(loader.get_source({}, template)[1])
                for message_tuple in babel_extract(
                        file_obj, GETTEXT_FUNCTIONS,
                        ['trans:'], extract_options):
                    yield (module, template) + message_tuple
Exemple #8
0
def main():

    env = Environment()
    loader = FileSystemLoader(project_path)

    template_opf = loader.load(env, opf_file)
    template_ncx = loader.load(env, ncx_file)
    #print template_opf.render(name='noisy')

    #ctx = Context(loadContext());

    #d = {'unique_identifier':'test', 'dc_title':'title_test', 'dc_lang':'lang_test'}
    d = loadContext()

    ctx = Context(env, blocks=d, name=opf_file, parent=env.globals)

    template_opf.stream(**d).dump(project_path+'ekundelek_gen.opf')                            #unique_identifier='test', dc_title='title_test', dc_lang='lang_test') #jak dzia?a ** ?
    template_ncx.stream(**d).dump(project_path+'ekundelek_gen.ncx')
    #strim.dump(project_path+'ekundelek_gen.opf')

    print 'Gotowe!'
    pass
Exemple #9
0
    def _clean_nereid_template(translation):
        """
        Clean the template translations if the module is not installed, or if
        the template is not there.
        """
        TranslationSet = Pool().get('ir.translation.set', type='wizard')
        installed_modules = TranslationSet._get_installed_module_directories()

        # Clean if the module is not installed anymore
        for module, directory in installed_modules:
            if translation.module == module:
                break
        else:
            return True

        # Clean if the template directory does not exist
        template_dir = os.path.join(directory, 'templates')
        if not os.path.isdir(template_dir):
            return True

        # Clean if the template is not found
        loader = FileSystemLoader(template_dir)
        if translation.name not in loader.list_templates():
            return True

        # Clean if the translation has changed (avoid duplicates)
        # (translation has no equivalent in template)
        found = False
        for template, lineno, function, message, comments in \
            TranslationSet._get_nereid_template_messages_from_file(
                TranslationSet, template_dir, translation.name):
            if (template, lineno, message, comments and
                    '\n'.join(comments) or None) == \
                (translation.name, translation.res_id, translation.src,
                    translation.comments):
                found = True
                break
        if not found:
            return True
Exemple #10
0
    def __init__(self, engine):

        super(TemplateService, self).__init__(engine)

        self.__app_base_path = self.engine.get('csettings', 'all')()['templates_root']
        self.__app_base_templates_dir = self.engine.get('csettings', 'all')()['base_templates_dir']

        self.__fs_loader = FileSystemLoader(
            # Search path gets saved as a list in jinja2 internally, so we could
            # add to it if necessary.
            searchpath=[self.__base_path + '/templates', self.__app_base_path],
            encoding='utf-8',
        )
        self.__template_env = Environment(loader=self.__fs_loader, trim_blocks=True)
Exemple #11
0
    def to_html(self, embed_css=True, include_empty=False):
        '''
        Convert results into HTML.
        
        Args:
            embed_css: A boolean indicating whether css should be
                embedded into the HTML code.
            include_empty: A boolean indicating whether empty rulesets,
                rules and patterns should be returned.

        Returns:
            A string of HTML code representing the results.
        '''
        from jinja2 import Environment, FileSystemLoader
        loader = FileSystemLoader(
            os.path.join(os.path.dirname(os.path.abspath(__file__)), 'html'))
        env = Environment(loader=loader)
        template = env.get_template('template.html')
        return template.render(rulesets=self.rulesets,
            lines=self.lines,
            text=self.text,
            css=loader.get_source(env, 'style.css')[0] if embed_css else None,
            include_empty=include_empty)
Exemple #12
0
    def _clean_nereid_template(translation):
        """
        Clean the template translations if the module is not installed, or if
        the template is not there.
        """
        TranslationSet = Pool().get('ir.translation.set', type='wizard')
        installed_modules = TranslationSet._get_installed_module_directories()

        # Clean if the module is not installed anymore
        for module, directory in installed_modules:
            if translation.module == module:
                break
        else:
            return True

        # Clean if the template directory does not exist
        template_dir = os.path.join(directory, 'templates')
        if not os.path.isdir(template_dir):
            return True

        # Clean if the template is not found
        loader = FileSystemLoader(template_dir)
        if translation.name not in loader.list_templates():
            return True
Exemple #13
0
    def get_source(self, environment, template):
        if LOCAL:
            # during local development, templates are files and we want "uptodate" feature
            return FileSystemLoader.get_source(self, environment, template)

        # on production server template may come from zip file
        for searchpath in self.searchpath:
            filename = os.path.join(searchpath, template)
            contents = universal_read(filename)
            if contents is None:
                continue
            contents = contents.decode(self.encoding)

            def uptodate():
                return True

            return contents, filename, uptodate
        raise TemplateNotFound(template)
Exemple #14
0
class Application(ErrorResponses):

    middlewares = [CSRF()]

    def __init__(self, settings=None):
        self.settings = settings
        self.subs = list()
        self.app = self

        self.loader = FileSystemLoader(settings.TEMPLATES_PATH)

    def register(self, sub_class, path, *args, **kwargs):
        log.debug('registred %s' % sub_class)
        sub = sub_class(self, self, path, *args, **kwargs)
        self.subs.append(sub)
        return sub

    def full_path(self, current):
        return ''

    def _match_path(self, match, path, subs):
        match = dict()
        for sub in subs:
            submatch = sub.path.match(path)
            if submatch:
                # it is a good path
                matched = submatch.groupdict()
                if matched:
                    matched.update(match)
                else:
                    matched = match
                subpath = path[submatch.end():]
                if subpath:
                    # try subs from this sub
                    o = self._match_path(matched, subpath, sub.subs)
                    if o[0]:
                        # a sub of this sub is a match return it
                        return o
                else:
                    # if it matched but is not a subsub then it's the one
                    # that match (and to repeat it: it's not a sub of sub!)
                    return matched, sub
            # else continue
        # None matched
        return None, None

    def __call__(self, environ, start_response):
        request = Request(environ)
        for sub in self.subs:
            log.debug('try to match %s' % sub)
            # first match the domain if any try to match the path
            path_match, sub = self._match_path(dict(), request.path, [sub])
            if not sub:
                continue
            # found the good sub
            request.path_match = path_match
            for middleware in self.middlewares:
                maybe_response = middleware.process_request_before_view(self, request)
                if isinstance(maybe_response, Response):
                    return maybe_response(environ, start_response)
            try:
                response = sub(request)
            except Exception:  # XXX: improve this
                print_exc()
                response = self.internal_server_error(request)
                for middleware in self.middlewares:
                    maybe_response = middleware.process_response_before_answer(self, request, response)
                    if isinstance(maybe_response, Response):
                        return maybe_response(environ, start_response)
                return response(environ, start_response)
            else:
                for middleware in self.middlewares:
                    maybe_response = middleware.process_response_before_answer(self, request, response)
                    if isinstance(maybe_response, Response):
                        return maybe_response(environ, start_response)
                return response(environ, start_response)
        response = self.not_found(request)
        for middleware in self.middlewares:
            maybe_response = middleware.process_response_before_answer(self, request, response)
            if isinstance(maybe_response, Response):
                return maybe_response(environ, start_response)
        return response(environ, start_response)

    def render(self, request, path, **context):
        response = Response(status=200)
        template = self.loader.load(Environment(), path)
        context['settings'] = self.settings
        context['request'] = request
        context['app'] = self.app
        response.text = template.render(**context)
        return response

    def redirect(self, url):
        return Response(status=302, location=url)
Exemple #15
0
    def export(cls, records, template_name):
        """Creates XML with ORCID data from python dictionar/list.

        Currently supported template names:
        + "works.xml" - for works exporting. Records are a list of works. Every
                work is a dictionary and it can contain following fields (most
                fields are optional):
            * work_title - a dictionary:
                - title (required) - string
                - subtitle - string
                - translated_titles - list of pairs (tuples):
                    1). code of language - string (eg. "fr")
                    2). translated title - string
            * journal_title - string
            * short_description - string
            * work_citation - a pair (tuple):
                1). type - string
                2). citation - string in correct format
                See https://support.orcid.org/knowledgebase/articles/
                135758-anatomy-of-a-citation
            * work_type - string
              See https://support.orcid.org/knowledgebase/articles/118795
            * publication_date - a dictionary:
                - year - four digit integer - required
                - month - two digit integer - required if day is provided
                - day - two digit integer
            * work_external_identifiers - a list of pairs (tuples):
                1). type - string
                2). id - string
                See https://support.orcid.org/knowledgebase/articles/118807
                for available types
            * url - string in anyURI format.
              See http://www.datypic.com/sc/xsd/t-xsd_anyURI.html
            * work_contributors - a list of dictionaries. Every contributor can
              contain:
                - orcid - string
                - name - string
                - email - string
                - attributes - a dictionary with "role" and/or "sequence" keys
                See https://support.orcid.org/knowledgebase/articles/
                118843-anatomy-of-a-contributor for details
            * work-source - string with ORCID
            * language_code - string (eg. "en")
            * country - a pair (tuple):
                1). abbreviation - two letters string in ISO 3166 format
                2). visibility - string (see below)
            * visibility - string. One from:
                + "limited"
                + "public"
                + "private"
                + "registered-only"
            * put-code - integer

        :param dictionary: dictionary containing orcid fields
        :type dictionary: dict
        :param template_name: name of file from 'xml_templates' directory
        :type template_name: str
        :returns: str -- XML which can be sent to ORCID with API.
        :raises: TemplateNotFound
        """
        template_dir = '%s/miscutils/templates/' % CFG_ETCDIR
        environment = Environment(loader=FileSystemLoader(template_dir))
        template = environment.get_template(template_name)
        xml = template.render({'records': records})
        return xml
Exemple #16
0
from yaml import load, Loader


CONTENT_DIR = './content/en/docs'
JS_ASSET_DIR = './assets/js'
SYNC_DIR = './sync/config'
TEMPLATE_DIR = './templates'
VAULT_DIR = './content/en/vault'
BUCKET_NAME = 'tekton-website-assets'

GCP_NETLIFY_ENV_CRED = os.environ.get('GCP_CREDENTIAL_JSON')
GCP_PROJECT = os.environ.get('GCP_PROJECT')

RELATIVE_LINKS_RE = r'\[([^\]]*)\]\((?!.*://|/)([^)]*).md(#[^)]*)?\)'

jinja_env = Environment(loader=FileSystemLoader(TEMPLATE_DIR))

def transform_links(link_prefix, dest_prefix, files):
    for f in files:
        for k in f:
            dest_path = f'{dest_prefix}/{f[k]}'
            for line in fileinput.input(dest_path, inplace=1):
                line = re.sub(RELATIVE_LINKS_RE, r'[\1](' + link_prefix + r'\2\3)', line.rstrip())
                print(line)


def retrieve_files(url_prefix, dest_prefix, files):
    if os.path.isdir(dest_prefix):
        shutil.rmtree(dest_prefix)
    os.mkdir(dest_prefix)
    for f in files:
def build_redirects(app: Sphinx, exception: Union[Exception, None]) -> None:
    """
    Build amd write redirects
    """
    redirect_json_file = Path(app.outdir) / REDIRECT_JSON_NAME
    if redirect_json_file.exists():
        redirect_record = json.loads(redirect_json_file.read_text("utf8"))
    else:
        redirect_record = {}

    if exception != None:
        return

    if isinstance(app.builder, CheckExternalLinksBuilder):
        logger.info(
            "rediraffe: Redirect generation skipped for linkcheck builders.")
        return

    if (type(app.builder) not in (StandaloneHTMLBuilder, DirectoryHTMLBuilder)
            and app.builder.name not in READTHEDOCS_BUILDERS):
        logger.info(
            "rediraffe: Redirect generation skipped for unsupported builders. Supported builders: html, dirhtml, readthedocs, readthedocsdirhtml."
        )
        return

    rediraffe_template = app.config.rediraffe_template
    if isinstance(rediraffe_template, str):
        # path
        template_path = Path(app.srcdir) / rediraffe_template
        if template_path.exists():
            file_loader = FileSystemLoader(template_path.parent)
            env = Environment(loader=file_loader)
            rediraffe_template = env.get_template(template_path.name)
        else:
            logger.warning(
                "rediraffe: rediraffe_template does not exist. The default will be used."
            )
            rediraffe_template = DEFAULT_REDIRAFFE_TEMPLATE
    else:
        rediraffe_template = DEFAULT_REDIRAFFE_TEMPLATE

    graph_edges = {}

    rediraffe_redirects = app.config.rediraffe_redirects
    if isinstance(rediraffe_redirects, dict):
        # dict in conf.py
        graph_edges = rediraffe_redirects
    elif isinstance(rediraffe_redirects, str):
        # filename
        path = Path(app.srcdir) / rediraffe_redirects
        if not path.is_file():
            logger.error(
                red("rediraffe: rediraffe_redirects file does not exist. Redirects will not be generated."
                    ))
            app.statuscode = 1
            return

        try:
            graph_edges = create_graph(path)
        except ExtensionError as e:
            app.statuscode = 1
            raise e
    else:
        logger.warning(
            "rediraffe: rediraffe was not given redirects to process. Redirects will not be generated."
        )
        return

    try:
        redirects = create_simple_redirects(graph_edges)
    except ExtensionError as e:
        app.statuscode = 1
        raise e

    logger.info("Writing redirects...")

    # write redirects
    for src_redirect_from, src_redirect_to in redirects.items():
        # Normalize path - src_redirect_.* is relative so drive letters aren't an issue.
        src_redirect_from = Path(PureWindowsPath(src_redirect_from))
        src_redirect_to = Path(PureWindowsPath(src_redirect_to))

        # remove extensions
        redirect_from_name = remove_suffix(src_redirect_from.name,
                                           app.config.source_suffix)
        redirect_to_name = remove_suffix(src_redirect_to.name,
                                         app.config.source_suffix)

        redirect_from = src_redirect_from.parent / f"{redirect_from_name}.html"
        redirect_to = src_redirect_to.parent / f"{redirect_to_name}.html"

        if type(app.builder) == DirectoryHTMLBuilder:
            if redirect_from_name != "index":
                redirect_from = (src_redirect_from.parent /
                                 redirect_from_name / "index.html")
            if redirect_to_name != "index":
                redirect_to = src_redirect_to.parent / redirect_to_name / "index.html"

        # absolute paths into the build dir
        build_redirect_from = Path(app.outdir) / redirect_from
        build_redirect_to = Path(app.outdir) / redirect_to

        if (build_redirect_from.exists()
                and src_redirect_from.as_posix() in redirect_record):
            # if it is still pointing to the same source, continue
            if (redirect_record[src_redirect_from.as_posix()] ==
                    src_redirect_to.as_posix()):
                continue
            # otherwise remove and rewrite
            build_redirect_from.unlink()

        if build_redirect_from.exists():
            logger.warning(
                f'{yellow("(broken)")} {redirect_from} redirects to {redirect_to} but {build_redirect_from} already exists!'
            )
            app.statuscode = 1
            continue

        if not build_redirect_to.exists():
            logger.warning(
                f'{yellow("(broken)")} {redirect_from} redirects to {redirect_to} but {build_redirect_to} does not exist!'
            )
            app.statuscode = 1
            continue

        build_redirect_from.parent.mkdir(parents=True, exist_ok=True)
        with build_redirect_from.open("w") as f:
            f.write(
                rediraffe_template.render(
                    rel_url=str(
                        PurePosixPath(
                            PureWindowsPath(
                                relpath(build_redirect_to,
                                        build_redirect_from.parent)))),
                    from_file=src_redirect_from,
                    to_file=src_redirect_to,
                    from_url=redirect_from,
                    to_url=redirect_to,
                ))
            logger.info(
                f'{green("(good)")} {redirect_from} {green("-->")} {redirect_to}'
            )
            redirect_record[
                src_redirect_from.as_posix()] = src_redirect_to.as_posix()

    redirect_json_file.write_text(json.dumps(redirect_record), encoding="utf8")
Exemple #18
0
    def update_metadata(self):

        logger = logging.getLogger('lib.sapns.update_metadata')

        env = Environment(loader=FileSystemLoader(current_path))

        managers = dbs.query(SapnsRole).\
            filter(SapnsRole.group_name == u'managers').\
            first()

        tables = self.extract_model(all_=True)
        tables_id = {}
        for tbl in tables:

            logger.info('Table: %s' % tbl['name'])

            klass = dbs.query(SapnsClass).\
                filter(SapnsClass.name == tbl['name']).\
                first()

            if not klass:
                logger.warning('.....creating')

                klass = SapnsClass()
                klass.name = tbl['name']
                klass.title = tbl['name'].title()
                klass.description = u'Clases: %s' % tbl['name']

                dbs.add(klass)
                dbs.flush()

                # grant access (r/w) to "managers"
                priv = SapnsPrivilege()
                priv.role_id = managers.group_id
                priv.class_id = klass.class_id
                priv.granted = True

                dbs.add(priv)
                dbs.flush()

            else:
                logger.warning('.....already exists')

            tables_id[tbl['name']] = klass  #.class_id

        tmpl = env.get_template('trigger_function_log.txt')

        pending_attr = {}
        for tbl in tables:

            #tables_id[tbl['name']] = klass.class_id
            klass = tables_id[tbl['name']]

            # create an action
            def create_action(name, type_):
                action = dbs.query(SapnsPermission).\
                    filter(and_(SapnsPermission.class_id == klass.class_id,
                                SapnsPermission.type == type_)).\
                    first()

                if not action:
                    action = SapnsPermission()
                    action.permission_name = u'%s#%s' % (klass.name,
                                                         name.lower())
                    action.display_name = name
                    action.type = type_
                    action.class_id = klass.class_id

                    dbs.add(action)
                    dbs.flush()

                    # add this action to "managers" role
                    managers.permissions_.append(action)
                    dbs.flush()

                elif action.type == SapnsPermission.TYPE_LIST:
                    for s in action.shortcuts:
                        s.title = action.class_.title
                        dbs.add(s)
                        dbs.flush()

            # create standard actions
            create_action(u'New', SapnsPermission.TYPE_NEW)
            create_action(u'Edit', SapnsPermission.TYPE_EDIT)
            create_action(u'Delete', SapnsPermission.TYPE_DELETE)
            create_action(u'List', SapnsPermission.TYPE_LIST)
            create_action(u'Docs', SapnsPermission.TYPE_DOCS)

            log_attributes = Dict(created=False, updated=False)
            log_cols = []
            first_ref = False
            for i, col in enumerate(tbl['columns']):

                logger.info('Column: %s' % col['name'])

                attr = dbs.query(SapnsAttribute).\
                    filter(and_(SapnsAttribute.name == col['name'],
                                SapnsAttribute.class_id == klass.class_id,
                                )).\
                    first()

                # log attributes
                if col['name'] in ['_created', '_updated']:

                    if col['name'] == '_created':
                        log_attributes.created = True

                    if col['name'] == '_updated':
                        log_attributes.updated = True

                    continue

                elif col['name'] != 'id':
                    log_cols.append(col['name'])

                if col['name'] not in ['id', '_created', '_updated']:
                    if not attr:
                        logger.warning('.....creating')

                        attr = SapnsAttribute()
                        attr.name = col['name']
                        attr.title = col['name'].replace('_', ' ').title()
                        attr.class_id = klass.class_id
                        attr.type = col['type_name']
                        if attr.type == SapnsAttribute.TYPE_STRING and not first_ref:
                            attr.reference_order = 0
                            first_ref = True

                        attr.visible = True
                        attr.insertion_order = i

                        if attr.type == SapnsAttribute.TYPE_INTEGER and \
                        not attr.name.startswith('id_'):
                            # signed
                            attr.field_regex = r'^\s*(\+|\-)?\d+\s*$'

                        elif attr.type == SapnsAttribute.TYPE_FLOAT:
                            # signed
                            # col['prec']
                            # col['scale']
                            attr.field_regex = r'^\s*(\+|\-)?\d{1,%d}(\.\d{1,%d})?\s*$' % \
                                (col['prec']-col['scale'],
                                 col['scale'])

                        elif attr.type == SapnsAttribute.TYPE_TIME:
                            attr.field_regex = r'^\s*([01][0-9]|2[0-3]):[0-5][0-9](:[0-5][0-9])?\s*$'

                        dbs.add(attr)
                        dbs.flush()

                        # grant access (r/w) to managers
                        priv = SapnsAttrPrivilege()
                        priv.role_id = managers.group_id
                        priv.attribute_id = attr.attribute_id
                        priv.access = SapnsAttrPrivilege.ACCESS_READWRITE

                        dbs.add(priv)
                        dbs.flush()

                    else:
                        logger.warning('.....already exists')

                        # fill the "field_regex"
                        if attr and not attr.field_regex:
                            if attr.type == SapnsAttribute.TYPE_INTEGER and \
                            not attr.name.startswith('id_'):
                                # signed
                                attr.field_regex = r'^\s*(\+|\-)?\d+\s*$'

                            elif attr.type == SapnsAttribute.TYPE_FLOAT:
                                # signed
                                attr.field_regex = r'^\s*(\+|\-)?\d{1,%d}(\.\d{1,%d})?\s*$' % \
                                    (col['prec'] - col['scale'], col['scale'])

                            elif attr.type == SapnsAttribute.TYPE_TIME:
                                attr.field_regex = r'^\s*([01][0-9]|2[0-3]):[0-5][0-9](:[0-5][0-9])?\s*$'

                # foreign key
                if col['fk_table'] != None:
                    pending_attr[attr.attribute_id] = col['fk_table'].name

            if tbl['name'] not in [
                    u'sp_logs', u'sp_role_permission', u'sp_user_role'
            ]:

                _log_attributes = []

                # _created
                if not log_attributes.created:
                    _log_attributes.append('ADD _created TIMESTAMP')

                # _updated
                if not log_attributes.updated:
                    _log_attributes.append('ADD _updated TIMESTAMP')

                if _log_attributes:
                    _alter = 'ALTER TABLE %s %s;' % (
                        tbl['name'], ', '.join(_log_attributes))
                    logger.info(_alter)

                    try:
                        dbs.execute(_alter)
                        dbs.flush()

                    except Exception, e:
                        #dbs.rollback()
                        logger.error(e)

                # log trigger function
                try:
                    logf = tmpl.render(
                        tbl_name=tbl['name'],
                        cols=log_cols,
                        class_id=klass.class_id,
                    )
                    #logger.info(logf)
                    dbs.execute(logf)
                    dbs.flush()

                except Exception, e:
                    #dbs.rollback()
                    logger.error(e)

                # log triggers
                log_trigger = 'SELECT COUNT(*) FROM pg_trigger WHERE tgname = \'zzzflog_%s\'' % tbl[
                    'name']
                lt = dbs.execute(log_trigger).fetchone()
                if lt[0] == 0:
                    _trigger = '''create trigger zzzflog_%s
                                  after insert or update or delete
                                  on %s
                                  for each row
                                  execute procedure flog_%s();''' % (
                        (tbl['name'], ) * 3)

                    #logger.info(_trigger)
                    try:
                        dbs.execute(_trigger)
                        dbs.flush()

                    except Exception, e:
                        #dbs.rollback()
                        logger.error(e)
Exemple #19
0
    if name == 'Task':
        oblist[name][
            'with_<lookup_plugin>'] = 'DEPRECATED: use ``loop`` instead, with_ used to be how loops were defined, '
        'it can use any available lookup plugin to generate the item list'

    # local_action is implicit with action
    if 'action' in oblist[name]:
        oblist[name][
            'local_action'] = 'Same as action but also implies ``delegate_to: localhost``'

    # remove unusable (used to be private?)
    for nouse in ('loop_args'):
        if nouse in oblist[name]:
            del oblist[name][nouse]

env = Environment(
    loader=FileSystemLoader(options.template_dir),
    trim_blocks=True,
)
template = env.get_template(template_file)
outputname = options.output_dir + template_file.replace('.j2', '')
tempvars = {'oblist': oblist, 'clist': clist}

keyword_page = template.render(tempvars)
if LooseVersion(jinja2.__version__) < LooseVersion('2.10'):
    # jinja2 < 2.10's indent filter indents blank lines.  Cleanup
    keyword_page = re.sub(' +\n', '\n', keyword_page)

with open(outputname, 'w') as f:
    f.write(keyword_page)
def render_template(filename, vars_dict):
    env = Environment(loader=FileSystemLoader('./templates'))
    template = env.get_template(filename)
    yaml_manifest = template.render(vars_dict)
    json_manifest = yaml.safe_load(yaml_manifest)
    return json_manifest
Exemple #21
0
app.secret_key = 'secret key'

SESSION_USERID = 'userid'

ROOT_DIR = os.path.abspath(os.path.dirname(__file__))

# custom static directories
STATIC_DIRS = (
    os.path.join(ROOT_DIR, 'css'),
    os.path.join(ROOT_DIR, 'js'),
)

# configure template directories
app.jinja_loader = FileSystemLoader([
    os.path.join(ROOT_DIR, 'templates'),
    os.path.join(ROOT_DIR, 'templates_helpers'),
])

## database handlers


@app.before_first_request
def init_database():
    """Create database if it doesn't exist.
    """
    db.create_db_connection(migrate=True)


@app.before_request
def before_request():
    """Connect to database before each request.
def _render_template(template_context, template_file_name):
    env = Environment(
        loader=FileSystemLoader(os.path.abspath(os.path.dirname(__file__))))
    template = env.get_template(template_file_name)
    rendered_template = template.render(template_context)
    return rendered_template
Exemple #23
0
    commands = [
        "sudo chmod +x " + tmp_config_path + script, tmp_config_path + script +
        " BACKUP " + interface + " " + vrrpip + " " + kp
    ]
    funct.ssh_command(slave, commands)

    os.system("rm -f %s" % script)

if form.getvalue('haproxyaddserv'):
    funct.install_haproxy(form.getvalue('haproxyaddserv'),
                          syn_flood=form.getvalue('syn_flood'))

if form.getvalue('table_metrics'):
    import http.cookies
    from jinja2 import Environment, FileSystemLoader
    env = Environment(loader=FileSystemLoader('templates/ajax'))
    template = env.get_template('table_metrics.html')

    cookie = http.cookies.SimpleCookie(os.environ.get("HTTP_COOKIE"))
    user_id = cookie.get('uuid')
    table_stat = sql.select_table_metrics(user_id.value)

    template = template.render(
        table_stat=sql.select_table_metrics(user_id.value))
    print(template)

if form.getvalue('metrics'):
    from datetime import timedelta
    from bokeh.plotting import figure, output_file, show
    from bokeh.models import ColumnDataSource, HoverTool, DatetimeTickFormatter, DatePicker
    from bokeh.layouts import widgetbox, gridplot
Exemple #24
0
 def setUp(self):
     loader = FileSystemLoader(dirname(abspath(__file__)))
     self.under_test = ScriptTemplateRenderer(Environment(loader=loader),
                                              DEFAULT_TEMPLATE,
                                              DEFAULT_FILE_MODE)
def main(forge_file_directory):
    if hasattr(conda_build, 'api'):
        build_config = conda_build.api.Config()
    else:
        build_config = conda_build.config.config

    # conda-build has some really fruity behaviour where it needs CONDA_NPY
    # and CONDA_PY in order to even read a meta. Because we compute version
    # matricies anyway the actual number makes absolutely no difference.
    build_config.CONDA_NPY = '99.9'
    build_config.CONDA_PY = 10

    recipe_dir = 'recipe'
    config = {
        'docker': {
            'executable': 'docker',
            'image': 'condaforge/linux-anvil',
            'command': 'bash'
        },
        'templates': {
            'run_docker_build': 'run_docker_build_matrix.tmpl'
        },
        'travis': {},
        'circle': {},
        'appveyor': {},
        'channels': {
            'sources': ['conda-forge', 'defaults'],
            'targets': [['conda-forge', 'main']]
        },
        'github': {
            'user_or_org': 'conda-forge',
            'repo_name': ''
        },
        'recipe_dir': recipe_dir
    }
    forge_dir = os.path.abspath(forge_file_directory)

    # An older conda-smithy used to have some files which should no longer exist,
    # remove those now.
    old_files = [
        'disabled_appveyor.yml',
        os.path.join('ci_support', 'upload_or_check_non_existence.py'),
    ]
    for old_file in old_files:
        remove_file(os.path.join(forge_dir, old_file))

    forge_yml = os.path.join(forge_dir, "conda-forge.yml")
    if not os.path.exists(forge_yml):
        warnings.warn('No conda-forge.yml found. Assuming default options.')
    else:
        with open(forge_yml, "r") as fh:
            file_config = list(yaml.load_all(fh))[0] or {}
        # The config is just the union of the defaults, and the overriden
        # values.
        for key, value in file_config.items():
            config_item = config.setdefault(key, value)
            # Deal with dicts within dicts.
            if isinstance(value, dict):
                config_item.update(value)
    config['package'] = meta = meta_of_feedstock(forge_file_directory,
                                                 config=build_config)
    if not config['github']['repo_name']:
        feedstock_name = os.path.basename(forge_dir)
        if not feedstock_name.endswith("-feedstock"):
            feedstock_name += "-feedstock"
        config['github']['repo_name'] = feedstock_name

    for each_ci in ["travis", "circle", "appveyor"]:
        if config[each_ci].pop("enabled", None):
            warnings.warn(
                "It is not allowed to set the `enabled` parameter for `%s`."
                " All CIs are enabled by default. To disable a CI, please"
                " add `skip: true` to the `build` section of `meta.yaml`"
                " and an appropriate selector so as to disable the build." \
                % each_ci
            )

    tmplt_dir = os.path.join(conda_forge_content, 'templates')
    # Load templates from the feedstock in preference to the smithy's templates.
    env = Environment(loader=FileSystemLoader(
        [os.path.join(forge_dir, 'templates'), tmplt_dir]))

    copy_feedstock_content(forge_dir)

    render_circle(env, config, forge_dir)
    render_travis(env, config, forge_dir)
    render_appveyor(env, config, forge_dir)
    render_README(env, config, forge_dir)
Exemple #26
0
class ProxyApp(object):
    app = Klein()
    ns = "{http://www.yale.edu/tp/cas}"
    port = None
    logout_instant_skew = 5
    ticket_name = 'ticket'
    service_name = 'service'
    renew_name = 'renew'
    pgturl_name = 'pgtUrl'
    reactor = reactor
    auth_info_resource = None
    auth_info_callback = None
    remoteUserHeader = 'Remote-User'
    logout_patterns = None
    logout_passthrough = False
    verbose = False
    proxy_client_endpoint_s = None
    cas_client_endpoint_s = None
    
    def __init__(self, proxied_url, cas_info, 
            fqdn=None, authorities=None, plugins=None, is_https=True,
            excluded_resources=None, excluded_branches=None,
            remote_user_header=None, logout_patterns=None,
            logout_passthrough=False,
            template_dir=None, template_resource='/_templates',
            proxy_client_endpoint_s=None, cas_client_endpoint_s=None):
        self.proxy_client_endpoint_s = proxy_client_endpoint_s
        self.cas_client_endpoint_s = cas_client_endpoint_s
        self.logout_passthrough = logout_passthrough
        self.template_dir = template_dir
        if template_dir is not None:
            self.template_loader_ = FileSystemLoader(template_dir)
            self.template_env_ = Environment()
            self.templateStaticResource_ = self.create_template_static_resource()
        if template_resource is not None:
            if not template_resource.endswith('/'):
                template_resource = "{0}/".format(template_resource)
        if template_resource is not None and template_dir is not None:
            static_base = "{0}static/".format(template_resource)
            self.static = self.app.route(static_base, branch=True)(self.__class__.static)
            self.static_base = static_base
        self.template_resource = template_resource
        if logout_patterns is not None:
            self.logout_patterns = [parse_url_pattern(pattern) for pattern in logout_patterns]
        for pattern in self.logout_patterns:
            assert pattern is None or pattern.scheme == '', (
                "Logout pattern '{0}' must be a relative URL.".format(pattern))
        if remote_user_header is not None:
            self.remoteUserHeader = remote_user_header
        self.excluded_resources = excluded_resources
        self.excluded_branches = excluded_branches
        self.is_https = is_https
        if proxied_url.endswith('/'):
            proxied_url = proxied_url[:-1]
        self.proxied_url = proxied_url
        p = urlparse.urlparse(proxied_url)
        self.p = p
        self.proxied_scheme = p.scheme
        netloc = p.netloc
        self.proxied_netloc = netloc
        self.proxied_host = netloc.split(':')[0]
        self.proxied_path = p.path
        self.cas_info = cas_info
        cas_param_names = set([])
        cas_param_names.add(self.ticket_name.lower())
        cas_param_names.add(self.service_name.lower())
        cas_param_names.add(self.renew_name.lower())
        cas_param_names.add(self.pgturl_name.lower())
        self.cas_param_names = cas_param_names
        if fqdn is None:
            fqdn = socket.getfqdn()
        self.fqdn = fqdn
        self.valid_sessions = {}
        self.logout_tickets = {}
        self._make_agents(authorities)
        # Sort/tag plugins
        if plugins is None:
            plugins = []
        content_modifiers = []
        info_acceptors = []
        cas_redirect_handlers = []
        interceptors = []
        access_control = []
        for plugin in plugins:
            if IResponseContentModifier.providedBy(plugin):
                content_modifiers.append(plugin)
            if IRProxyInfoAcceptor.providedBy(plugin):
                info_acceptors.append(plugin)
            if ICASRedirectHandler.providedBy(plugin):
                cas_redirect_handlers.append(plugin)
            if IResourceInterceptor.providedBy(plugin):
                interceptors.append(plugin)
            if IAccessControl.providedBy(plugin):
                access_control.append(plugin)
        self.info_acceptors = info_acceptors
        content_modifiers.sort(key=lambda x: x.mod_sequence)
        self.content_modifiers = content_modifiers
        cas_redirect_handlers.sort(key=lambda x: x.cas_redirect_sequence)
        self.cas_redirect_handlers = cas_redirect_handlers
        interceptors.sort(key=lambda x: x.interceptor_sequence)
        self.interceptors = interceptors
        access_control.sort(key=lambda x: x.ac_sequence)
        self.access_control = access_control
        # Create static resources.
        static_resources = {}
        for plugin in plugins:
            if IStaticResourceProvider.providedBy(plugin):
                if plugin.static_resource_base in static_resources:
                    if static_resources[plugin.static_resource_base] != plugin.static_resource_dir:
                        raise Exception("Static resource conflict for '{0}': '{1}' != '{2}'".format(
                            plugin.static_resource_base,
                            static_resources[plugin.static_resource_base],
                            plugin.static_resource_dir))
                else:
                    static_resources[plugin.static_resource_base] = plugin.static_resource_dir
        self.static_handlers = []
        for n, (resource_base, resource_dir) in enumerate(static_resources.iteritems()):
            handler = lambda self, request: File(resource_dir)
            handler = self.app.route(resource_base, branch=True)(handler)
            self.static_handlers.append(handler)

    def log(self, msg, important=False):
        if important or self.verbose:
            if important:
                tag = "INFO"
            else:
                tag = "DEBUG"
            log.msg("[{0}] {1}".format(tag, msg))

    def handle_port_set(self):
        fqdn = self.fqdn
        port = self.port
        proxied_scheme = self.proxied_scheme
        proxied_netloc = self.proxied_netloc
        proxied_path = self.proxied_path
        for plugin in self.info_acceptors:
            plugin.proxy_fqdn = fqdn
            plugin.proxy_port = port
            plugin.proxied_scheme = proxied_scheme
            plugin.proxied_netloc = proxied_netloc
            plugin.proxied_path = proxied_path
            plugin.handle_rproxy_info_set()
            plugin.expire_session = self._expired

    def _make_agents(self, auth_files):
        """
        Configure the web clients that:
        * perform backchannel CAS ticket validation
        * proxy the target site
        """
        self.connectionPool = HTTPConnectionPool(self.reactor)
        if auth_files is None or len(auth_files) == 0:
            agent = Agent(self.reactor, pool=self.connectionPool)
        else:
            extra_ca_certs = []
            for ca_cert in auth_files:
                with open(ca_cert, "rb") as f:
                    data = f.read()
                cert = crypto.load_certificate(crypto.FILETYPE_PEM, data)
                del data
                extra_ca_certs.append(cert)
            policy = CustomPolicyForHTTPS(extra_ca_certs)
            agent = Agent(self.reactor, contextFactory=policy, pool=self.connectionPool)
        if self.proxy_client_endpoint_s is not None:
            self.proxyConnectionPool = HTTPConnectionPool(self.reactor)
            self.proxy_agent = Agent.usingEndpointFactory(
                self.reactor,
                WebClientEndpointFactory(self.reactor, self.proxy_client_endpoint_s),
                pool=self.proxyConnectionPool)
        else:
            self.proxy_agent = agent
        if self.cas_client_endpoint_s is not None:
            self.casConnectionPool = HTTPConnectionPool(self.reactor)
            self.cas_agent = Agent.usingEndpointFactory(
                self.reactor,
                WebClientEndpointFactory(self.reactor, self.cas_client_endpoint_s),
                pool=self.casConnectionPool) 
        else:
            self.cas_agent = agent

    def is_excluded(self, request):
        resource = request.path
        if resource in self.excluded_resources:
            return True
        for excluded in self.excluded_branches:
            if proxyutils.is_resource_or_child(excluded, resource):
                return True
        return False

    def mod_headers(self, h):
        keymap = {}
        for k,v in h.iteritems():
            key = k.lower()
            if key in keymap:
                keymap[key].append(k)
            else:
                keymap[key] = [k]
        if 'host' in keymap:
            for k in keymap['host']:
                h[k] = [self.proxied_netloc]
        if 'origin' in keymap:
            for k in keymap['origin']:
                h[k] = [self.proxied_netloc]
        if 'content-length' in keymap:
            for k in keymap['content-length']:
                del h[k]
        if 'referer' in keymap:
            referer_handled = False 
            keys = keymap['referer']
            if len(keys) == 1:
                k = keys[0]
                values = h[k]
                if len(values) == 1:
                    referer = values[0]
                    new_referer = self.proxy_url_to_proxied_url(referer)
                    if new_referer is not None:
                        h[k] = [new_referer]
                        self.log("Re-wrote Referer header: '%s' => '%s'" % (referer, new_referer))
                        referer_handled = True
            if not referer_handled:
                for k in keymap['referer']:
                    del h[k]
        return h

    def _check_for_logout(self, request):
        data = request.content.read()
        samlp_ns = "{urn:oasis:names:tc:SAML:2.0:protocol}"
        try:
            root = etree.fromstring(data)
        except Exception as ex:
            root = None
        if (root is not None) and (root.tag == "%sLogoutRequest" % samlp_ns):
            instant = root.get('IssueInstant')
            if instant is not None:
                try:
                    instant = parse_date(instant)
                except ValueError:
                    self.log("Invalid issue_instant supplied: '{0}'.".format(instant), important=True)
                    instant = None
                if instant is not None:
                    utcnow = datetime.datetime.utcnow()
                    seconds = abs((utcnow - instant.replace(tzinfo=None)).total_seconds())
                    if seconds <= self.logout_instant_skew:
                        results = root.findall("%sSessionIndex" % samlp_ns)
                        if len(results) == 1:
                            result = results[0]
                            ticket = result.text
                            sess_uid = self.logout_tickets.get(ticket, None)
                            if sess_uid is not None:
                                self._expired(sess_uid)
                                return True
                            else:
                                self.log(
                                    ("No matching session for logout request "
                                    "for ticket '{0}'.").format(ticket))
                    else:
                        self.log(
                            ("Issue instant was not within"
                            " {0} seconds of actual time.").format(
                                self.logout_instant_skew), important=True)
                else:
                    self.log("Could not parse issue instant.", important=True)
            else:
                self.log("'IssueInstant' attribute missing from root.", important=True)
        elif root is None:
            self.log("Could not parse XML.", important=True)
        return False

    @app.route("/", branch=True)
    def proxy(self, request):
        for pattern in self.logout_patterns:
            if does_url_match_pattern(request.uri, pattern):
                sess = request.getSession()
                sess_uid = sess.uid
                self._expired(sess_uid)
                cas_logout = self.cas_info.get('logout_url', None)
                if cas_logout is not None:
                    if self.logout_passthrough:
                        d = self.reverse_proxy(request, protected=False)
                    return request.redirect(cas_logout)
                else:
                    return self.reverse_proxy(request, protected=False)
        if self.is_excluded(request):
            return self.reverse_proxy(request, protected=False)
        valid_sessions = self.valid_sessions
        sess = request.getSession()
        sess_uid = sess.uid
        if not sess_uid in valid_sessions:
            self.log(
                ("Session {0} not in valid sessions.  "
                "Will authenticate with CAS.").format(sess_uid))
            if request.method == 'POST':
                headers = request.requestHeaders
                if headers.hasHeader("Content-Type"):
                    ct_list =  headers.getRawHeaders("Content-Type") 
                    #log.msg("[DEBUG] ct_list: %s" % str(ct_list))
                    for ct in ct_list:
                        if ct.find('text/xml') != -1 or ct.find('application/xml') != -1:
                            if self._check_for_logout(request):
                                return ""
                            else:
                                break
            # CAS Authentication
            # Does this request have a ticket?  I.e. is it coming back from a successful
            # CAS authentication?
            args = request.args
            ticket_name = self.ticket_name
            if ticket_name in args:
                values = args[ticket_name]
                if len(values) == 1:
                    ticket = values[0]
                    d = self.validate_ticket(ticket, request)
                    return d
            # If no ticket is present, redirect to CAS.
            d = self.redirect_to_cas_login(request)
            return d
        elif request.path == self.auth_info_resource:
            self.log("Providing authentication info.")
            return self.deliver_auth_info(request)
        else:
            d = self.reverse_proxy(request)
            return d

    def deliver_auth_info(self, request):
        valid_sessions = self.valid_sessions
        sess = request.getSession()    
        sess_uid = sess.uid
        session_info = valid_sessions[sess_uid]
        username = session_info['username']
        attributes = session_info['attributes']
        doc = {'username': username, 'attributes': attributes}
        serialized = json.dumps(doc)
        request.responseHeaders.setRawHeaders('Content-Type', ['application/json'])
        return serialized 
        
    def get_url(self, request):
        if self.is_https:
            scheme = 'https'
            default_port = 443
        else:
            scheme = 'http'
            default_port = 80
        fqdn = self.fqdn
        port = self.port
        if port is None:
            port = default_port
        if port == default_port:
            return urlparse.urljoin("%s://%s" % (scheme, fqdn), request.uri)
        else:
            return urlparse.urljoin("%s://%s:%d" % (scheme, fqdn, port), request.uri)
        
    def redirect_to_cas_login(self, request):
        """
        Begin the CAS redirection process.
        """        
        service_url = self.get_url(request)
        d = None
        for plugin in self.cas_redirect_handlers:
            if d is None:
                d = defer.maybeDeferred(plugin.intercept_service_url, service_url, request)
            else:
                d.addCallback(plugin.intercept_service_url, request)
        if d is None:
            return self.complete_redirect_to_cas_login(service_url, request)
        else:
            d.addCallback(self.complete_redirect_to_cas_login, request)
            return d
                
    def complete_redirect_to_cas_login(self, service_url, request):
        """
        Complete the CAS redirection process.
        Return a deferred that will redirect the user-agent to the CAS login.
        """
        cas_info = self.cas_info
        login_url = cas_info['login_url']
        p = urlparse.urlparse(login_url)
        params = {self.service_name: service_url}
        if p.query == '':
            param_str = urlencode(params)
        else:
            qs_map = urlparse.parse_qs(p.query)
            qs_map.update(params)
            param_str = urlencode(qs_map, doseq=True)
        p = urlparse.ParseResult(*tuple(p[:4] + (param_str,) + p[5:]))
        url = urlparse.urlunparse(p)
        self.log("Redirecting to CAS with URL '{0}'.".format(url))
        d = request.redirect(url)
        return d
        
    def validate_ticket(self, ticket, request):
        service_name = self.service_name
        ticket_name = self.ticket_name
        this_url = self.get_url(request)
        p = urlparse.urlparse(this_url)
        qs_map = urlparse.parse_qs(p.query)
        if ticket_name in qs_map:
            del qs_map[ticket_name]
        param_str = urlencode(qs_map, doseq=True)
        p = urlparse.ParseResult(*tuple(p[:4] + (param_str,) + p[5:]))
        service_url = urlparse.urlunparse(p)
        params = {
                service_name: service_url,
                ticket_name: ticket,}
        param_str = urlencode(params, doseq=True)
        p = urlparse.urlparse(self.cas_info['service_validate_url'])
        p = urlparse.ParseResult(*tuple(p[:4] + (param_str,) + p[5:]))
        service_validate_url = urlparse.urlunparse(p)
        self.log(
            "Requesting service-validate URL => '{0}' ...".format(
                service_validate_url))
        http_client = HTTPClient(self.cas_agent) 
        d = http_client.get(service_validate_url)
        d.addCallback(treq.content)
        d.addCallback(self.parse_sv_results, service_url, ticket, request)
        return d
        
    def parse_sv_results(self, payload, service_url, ticket, request):
        self.log("Parsing /serviceValidate results  ...")
        ns = self.ns
        try:
            root = etree.fromstring(payload)
        except (etree.XMLSyntaxError,) as ex:
            self.log((
                    "error='Error parsing XML payload.' "
                    "service='{0}' ticket={1}'/n{2}"
                    ).format(service_url, ticket, ex), important=True)
            return self.render_template_500(request)
        if root.tag != ('%sserviceResponse' % ns):
            self.log((
                    "error='Error parsing XML payload.  No `serviceResponse`.' "
                    "service='{0}' ticket={1}'"
                    ).format(service_url, ticket), important=True)
            return self.render_template_403(request)
        results = root.findall("{0}authenticationSuccess".format(ns))
        if len(results) != 1:
            self.log((
                    "error='Error parsing XML payload.  No `authenticationSuccess`.' "
                    "service='{0}' ticket={1}'"
                    ).format(service_url, ticket), important=True)
            return self.render_template_403(request)
        success = results[0]
        results = success.findall("{0}user".format(ns))
        if len(results) != 1:
            self.log((
                    "error='Error parsing XML payload.  Not exactly 1 `user`.' "
                    "service='{0}' ticket={1}'"
                    ).format(service_url, ticket), important=True)
            return self.render_template_403(request)
        user = results[0]
        username = user.text
        attributes = success.findall("{0}attributes".format(ns))
        attrib_map = {}
        for attrib_container in attributes:
            for elm in attrib_container.findall('./*'):
                tag_name = elm.tag[len(ns):]
                value = elm.text
                attrib_map.setdefault(tag_name, []).append(value)
        # Access control plugins
        access_control = self.access_control
        for ac_plugin in access_control:
            is_allowed, reason = ac_plugin.isAllowed(username, attrib_map)
            if not is_allowed:
                self.log((
                        "Access denied:  user='******' ac_plugin='{ac_plugin}' "
                        "reason={reason}, service='{service}' ticket='{ticket}'"
                        ).format(
                            username=username, 
                            ac_plugin=ac_plugin.tagname, 
                            service=service_url, 
                            ticket=ticket,
                            reason=reason), important=True)
                return self.render_template_403(request, username=username, reason=reason)
        # Update session session
        valid_sessions = self.valid_sessions
        logout_tickets = self.logout_tickets
        sess = request.getSession()
        sess_uid = sess.uid
        if sess_uid not in valid_sessions:
            valid_sessions[sess_uid] = {}
        valid_sessions[sess_uid].update({
            'username': username,
            'ticket': ticket,
            'attributes': attrib_map})
        if not ticket in logout_tickets:
            logout_tickets[ticket] = sess_uid
        auth_info_callback = self.auth_info_callback
        if auth_info_callback is not None: 
            auth_info_callback(username, attrib_map)
        sess.notifyOnExpire(lambda: self._expired(sess_uid))
        # Reverse proxy.
        return request.redirect(service_url)
        
    def _expired(self, uid):
        valid_sessions = self.valid_sessions
        if uid in valid_sessions:
            session_info = valid_sessions[uid]
            username = session_info['username']
            ticket = session_info['ticket']
            del valid_sessions[uid]
            auth_info_callback = self.auth_info_callback
            if auth_info_callback is not None:
                auth_info_callback(username, None)
            logout_tickets = self.logout_tickets
            if ticket in logout_tickets:
                del logout_tickets[ticket]
            self.log(
                ("label='Expired session.' session_id='{0}' "
                "username='******'").format(uid, username))
        
    def reverse_proxy(self, request, protected=True):
        if protected:
            sess = request.getSession()
            valid_sessions = self.valid_sessions
            sess_uid = sess.uid
            username = valid_sessions[sess_uid]['username']
        # Normal reverse proxying.
        kwds = {}
        cookiejar = {}
        kwds['allow_redirects'] = False
        kwds['cookies'] = cookiejar
        req_headers = self.mod_headers(dict(request.requestHeaders.getAllRawHeaders()))
        kwds['headers'] = req_headers
        if protected:
            kwds['headers'][self.remoteUserHeader] = [username]
        if request.method in ('PUT', 'POST'):
            kwds['data'] = request.content.read()
        url = self.proxied_url + request.uri
        # Determine if a plugin wants to intercept this URL.
        interceptors = self.interceptors
        for interceptor in interceptors:
            if interceptor.should_resource_be_intercepted(url, request.method, req_headers, request):
                return interceptor.handle_resource(url, request.method, req_headers, request)
        # Check if this is a request for a websocket.
        d = self.checkForWebsocketUpgrade(request)
        if d is not None:
            return d
        # Typical reverse proxying.    
        self.log("Proxying URL => {0}".format(url))
        http_client = HTTPClient(self.proxy_agent) 
        d = http_client.request(request.method, url, **kwds)

        def process_response(response, request):
            req_resp_headers = request.responseHeaders
            resp_code = response.code
            resp_headers = response.headers
            resp_header_map = dict(resp_headers.getAllRawHeaders())
            # Rewrite Location headers for redirects as required.
            if resp_code in (301, 302, 303, 307, 308) and "Location" in resp_header_map:
                values = resp_header_map["Location"]
                if len(values) == 1:
                    location = values[0]
                    if request.isSecure():
                        proxy_scheme = 'https'
                    else:
                        proxy_scheme = 'http'
                    new_location = self.proxied_url_to_proxy_url(proxy_scheme, location)
                    if new_location is not None:
                        resp_header_map['Location'] = [new_location]
            request.setResponseCode(response.code, message=response.phrase)
            for k,v in resp_header_map.iteritems():
                if k == 'Set-Cookie':
                    v = self.mod_cookies(v)
                req_resp_headers.setRawHeaders(k, v)
            return response
            
        def mod_content(body, request):
            """
            Modify response content before returning it to the user agent.
            """
            d = None
            for content_modifier in self.content_modifiers:
                if d is None:
                    d = content_modifier.transform_content(body, request)
                else:
                    d.addCallback(content_modifier.transform_content, request)
            if d is None:
                return body
            else:
                return d
            
        d.addCallback(process_response, request)
        d.addCallback(treq.content)
        d.addCallback(mod_content, request)
        return d

    def checkForWebsocketUpgrade(self, request):
        
        def _extract(name):
            raw_value = request.getHeader(name)
            if raw_value is None:
                return set([])
            else:
                return set(raw_value.split(', '))

        upgrade = _extract("Upgrade")
        connection = _extract("Connection")
        if 'websocket' in upgrade and 'Upgrade' in connection:
            uri = request.uri
            proxy_fqdn = self.fqdn
            proxy_port = self.port
            proxied_scheme = self.proxied_scheme
            proxied_netloc = self.proxied_netloc
            proxied_path = self.proxied_path
            if self.is_https:
                scheme = 'wss'
            else:
                scheme = 'ws'
            netloc = "{0}:{1}".format(proxy_fqdn, proxy_port)
            proxy_url = "{0}://{1}{2}".format(scheme, netloc, request.uri)  
            if proxied_scheme == 'https':
                proxied_scheme = 'wss'
            else:
                proxied_scheme = 'ws'
            proxied_url = proxyutils.proxy_url_to_proxied_url(
                proxied_scheme, 
                proxy_fqdn, 
                proxy_port, 
                proxied_netloc, 
                proxied_path, 
                proxy_url,
            )
            origin = proxied_url
            kind = "tcp"
            if proxied_scheme == 'wss':
                kind = 'ssl'
            parts = proxied_netloc.split(":", 1)
            proxied_host = parts[0]
            if len(parts) == 2:
                proxied_port = int(parts[1])
            elif proxied_scheme == 'wss':
                proxied_port =  443
            else:
                proxied_port = 80
            extra = "" #TODO: SSL options.
            proxied_endpoint_str = "{0}:host={1}:port={2}{3}".format(
                kind,
                proxied_host,
                proxied_port,
                extra
            )
            if proxied_url is not None:
                resource = makeWebsocketProxyResource(
                    proxy_url, 
                    proxied_endpoint_str, 
                    proxied_url, 
                    request,
                    reactor=self.reactor, 
                    origin=origin,
                    verbose=self.verbose)
                return resource
        return None
    
    def mod_cookies(self, value_list):
        proxied_path = self.proxied_path
        proxied_path_size = len(proxied_path)
        results = []
        for cookie_value in value_list:
            c = Cookie.SimpleCookie()
            c.load(cookie_value)
            for k in c.keys():
                m = c[k]
                if m.has_key('path'):
                    m_path = m['path']
                    if self.is_proxy_path_or_child(m_path):
                        m_path = m_path[proxied_path_size:]
                        m['path'] = m_path
            results.append(c.output(header='')[1:])
        return results
                     
    def is_proxy_path_or_child(self, path):
        return proxyutils.is_proxy_path_or_child(self.proxied_path, path)
    
    def proxied_url_to_proxy_url(self, proxy_scheme, target_url):
        return proxyutils.proxied_url_to_proxy_url(
            proxy_scheme,
            self.fqdn, 
            self.port, 
            self.proxied_netloc, 
            self.proxied_path, 
            target_url)
        
    def proxy_url_to_proxied_url(self, target_url):
        return proxyutils.proxy_url_to_proxied_url(
            self.proxied_scheme,
            self.fqdn, 
            self.port, 
            self.proxied_netloc,
            self.proxied_path,
            target_url)

    def get_template_static_base(self):
        if self.template_resource is None:
            return None
        else:
            return '{0}static/'.format(self.template_resource)

    def render_template_403(self, request, **kwargs):
        template_dir = self.template_dir
        if template_dir is None:
            request.setResponseCode(403)
            return ""
        else:
            return self.render_template('error/403.jinja2', request=request, **kwargs)

    def render_template_500(self, request, **kwargs):
        template_dir = self.template_dir
        if template_dir is None:
            request.setResponseCode(500)
            return ""
        else:
            return self.render_template('error/500.jinja2', request=request, **kwargs)

    def render_template(self, template_name, **kwargs):
        template_dir = self.template_dir
        try:
            template = self.template_loader_.load(self.template_env_, template_name)
        except TemplateNotFound:
            raise Exception("The template '{0}' was not found.".format(template_name))
        return template.render(static_base=self.static_base, **kwargs).encode('utf-8')
    
    def create_template_static_resource(self):
        static_path = os.path.join(self.template_dir, 'static')
        static = File(static_path)
        return static

    def static(self, request):
        return self.templateStaticResource_

    @app.handle_errors(Exception)
    def handle_uncaught_errors(self, request, failure):
        self.log("Uncaught exception: {0}".format(failure), important=True)
        return self.render_template_500(request=request, failure=failure)
Exemple #27
0
def generate_autosummary_docs(sources, output_dir=None, suffix='.rst',
                              warn=_simple_warn, info=_simple_info,
                              base_path=None, builder=None, template_dir=None):

    showed_sources = list(sorted(sources))
    if len(showed_sources) > 20:
        showed_sources = showed_sources[:10] + ['...'] + showed_sources[-10:]
    info('[autosummary] generating autosummary for: %s' %
         ', '.join(showed_sources))

    if output_dir:
        info('[autosummary] writing to %s' % output_dir)

    if base_path is not None:
        sources = [os.path.join(base_path, filename) for filename in sources]

    # create our own templating environment
    template_dirs = [os.path.join(os.path.dirname(__file__), 'templates')]
    if builder is not None:
        # allow the user to override the templates
        template_loader = BuiltinTemplateLoader()
        template_loader.init(builder, dirs=template_dirs)
    else:
        if template_dir:
            template_dirs.insert(0, template_dir)
        template_loader = FileSystemLoader(template_dirs)
    template_env = SandboxedEnvironment(loader=template_loader,
                                        extensions=["jinja2.ext.do"])

    # read
    items = find_autosummary_in_files(sources)

    # remove possible duplicates
    items = dict([(item, True) for item in items]).keys()

    # keep track of new files
    new_files = []

    # write
    for name, path, template_name in sorted(items):
        if path is None:
            # The corresponding autosummary:: directive did not have
            # a :toctree: option
            continue

        path = output_dir or os.path.abspath(path)
        ensuredir(path)

        try:
            name, obj, parent = import_by_name(name)
        except ImportError, e:
            warn('[autosummary] failed to import %r: %s' % (name, e))
            continue

        fn = os.path.join(path, name + suffix)

        # skip it if it exists
        if os.path.isfile(fn):
            continue

        new_files.append(fn)

        f = open(fn, 'w')

        try:
            doc = get_documenter(obj, parent)

            if template_name is not None:
                template = template_env.get_template(template_name)
            else:
                try:
                    template = template_env.get_template('autosummary/%s.rst'
                                                         % doc.objtype)
                except TemplateNotFound:
                    template = template_env.get_template(
                        'autosummary/base.rst')

            def get_members(obj, typ, include_public=[]):
                # XXX: whole function is a patch!
                if typ in ('function', 'class', 'exception'):
                    # modules seem to work
                    items = [name for name in dir(obj)
                             if get_documenter(getattr(obj, name),
                                                       obj).objtype == typ
                    ]
                    items = [name for name in items
                             if getattr(obj, name).__module__ == obj.__name__]
                elif typ == 'method':
                    # filter methods (__call__) which are defined within this
                    # class (im_class)
                    items = [name for name in dir(obj)
                             if hasattr(getattr(obj, name), '__call__')
                             and hasattr(getattr(obj, name), 'im_class')]
                elif typ == 'attribute':
                    # attribute
                    items = [name for name in dir(obj)
                             if not hasattr(getattr(obj, name), '__call__')]
                public = [x for x in items
                          if not x.startswith('_') or x.endswith('__')]
                return public, items

            ns = {}

            if doc.objtype == 'module':
                ns['members'] = dir(obj)
                ns['functions'], ns['all_functions'] = \
                                   get_members(obj, 'function')
                ns['classes'], ns['all_classes'] = \
                                 get_members(obj, 'class')
                ns['exceptions'], ns['all_exceptions'] = \
                                   get_members(obj, 'exception')
            elif doc.objtype == 'class':
                ns['members'] = dir(obj)
                ns['methods'], ns['all_methods'] = \
                                 get_members(obj, 'method', ['__init__'])
                ns['attributes'], ns['all_attributes'] = \
                                 get_members(obj, 'attribute')

            parts = name.split('.')
            if doc.objtype in ('method', 'attribute'):
                mod_name = '.'.join(parts[:-2])
                cls_name = parts[-2]
                obj_name = '.'.join(parts[-2:])
                ns['class'] = cls_name
            else:
                mod_name, obj_name = '.'.join(parts[:-1]), parts[-1]

            ns['fullname'] = name
            ns['module'] = mod_name
            ns['objname'] = obj_name
            ns['name'] = parts[-1]

            ns['objtype'] = doc.objtype
            ns['underline'] = len(name) * '='

            rendered = template.render(**ns)
            f.write(rendered)
        finally:
            f.close()
from jinja2 import Environment, FileSystemLoader

from lib.wrappers.ghapiwrapper import ratecheck
from lib.wrappers.ghapiwrapper import GithubWrapper
from lib.wrappers.issuewrapper import IssueWrapper
from lib.utils.moduletools import ModuleIndexer
from lib.utils.file_tools import FileIndexer
from lib.utils.version_tools import AnsibleVersionIndexer
from lib.utils.extractors import extract_template_data
from lib.utils.descriptionfixer import DescriptionFixer

basepath = os.path.dirname(__file__).split('/')
libindex = basepath.index('lib')
basepath = '/'.join(basepath[0:libindex])
loader = FileSystemLoader(os.path.join(basepath, 'templates'))
environment = Environment(loader=loader, trim_blocks=True)

# A dict of alias labels. It is used for coupling a template (comment) with a
# label.

MAINTAINERS_FILES = {
    'core': "MAINTAINERS-CORE.txt",
    'extras': "MAINTAINERS-EXTRAS.txt",
}


# Static labels, manually added
IGNORE_LABELS = [
    "feature_pull_request",
    "bugfix_pull_request",
#!/usr/bin/env python3
import argparse
from datetime import datetime
from jinja2 import (
    FileSystemLoader,
    Environment,
)
import os

root = os.path.dirname(os.path.abspath(__file__))
template_dir = os.path.join(root, 'templates')
env = Environment(
    loader=FileSystemLoader(template_dir),
    keep_trailing_newline=True,
)

template_mappings = {
    '__main__.py': 'src',
}


def template_name_to_filename(template_name):
    return template_name.rstrip('.j2')


def one_of(choices):
    def _one_of(value):
        if value not in choices:
            raise TypeError('must be one of {}'.format(choices))
        return value
    return _one_of
Exemple #30
0
#!/usr/bin/env python

from os.path import dirname, join
from jinja2 import Environment, FileSystemLoader

# http://jinja.pocoo.org/docs/2.10/templates/

templates_path = join(dirname(__file__), "templates")
templating_environment = Environment(loader=FileSystemLoader(templates_path),
                                     lstrip_blocks=True,
                                     trim_blocks=True)

template = templating_environment.get_template("pirates.jinja2")

pirates_and_ships = [
    dict(captain="Blackbeard", ship="Queen Anne's Revenge"),
    dict(captain="William Kidd", ship="Adventure Galley")
]

print(template.render(pirates=pirates_and_ships))

 def get_stack_env(self):
     # TODO: This is perhaps where you would want to put in functionality to do custom imports
     self.stack_env = Environment(loader=FileSystemLoader(
         self.stacks_dir))  # Separate for testing purposes
 def __init__(self, searchpath):
     FileSystemLoader.__init__(self, searchpath)
     self._path = searchpath
 def get_tpl_env(self):
     self.tpl_env = Environment(loader=FileSystemLoader(
         self.templates_dir))  # Separate for testing purposes
Exemple #34
0
import os
from os import path
from jinja2 import Environment, FileSystemLoader
import datetime
import random

print(datetime.datetime.now())

env = Environment(
    loader=FileSystemLoader(path.dirname(__file__), encoding='utf8'))
md_tmpl = env.get_template('template.md')

md_summary_file_path = "content/posts"

today = datetime.datetime.today()

slug_string = str(random.randint(10000, 1000000))

tmpl_title = today.strftime("%Y-%m-%d") + "[draft]"
tmpl_date = today.strftime("%Y-%m-%d") + "T00:00:00+09:00"
tmpl_tag = "diary"
tmpl_archive = today.strftime("%Y-%m")
tmpl_slug = slug_string

filename = today.strftime("%Y%m%d") + "_" + slug_string + ".md"
print(filename)

file_content = md_tmpl.render(title=tmpl_title,
                              date=tmpl_date,
                              tag=tmpl_tag,
                              archive=tmpl_archive,
Exemple #35
0
    def visonic_event_callback_handler(self, visonic_devices, datadictionary):
        
        import custom_components.visonic.pyvisonic as visonicApi   # Connection to python Library
    
        # Check to ensure variables are set correctly
        if self.hass == None:
            _LOGGER.warning("Visonic attempt to add device when hass is undefined")
            return
        if visonic_devices == None:
            _LOGGER.warning("Visonic attempt to add device when sensor is undefined")
            return
        if type(visonic_devices) == defaultdict:  
            # a set of sensors and/or switches. 
            #_LOGGER.info("Visonic got new sensors {0}".format( visonic_devices["sensor"] ))
            
            for dev in visonic_devices["sensor"]:
                if dev.getDeviceID() is None:
                    _LOGGER.info("     Sensor ID is None")
                else:
                    _LOGGER.info("     Sensor {0}".format( str(dev) ))
                    if dev.getDeviceID() not in self.exclude_sensor_list:
                        if dev not in self.hass.data[DOMAIN]["binary_sensor"]:
                            #_LOGGER.info("     Added to dispatcher")
                            #async_dispatcher_send(self.hass, "visonic_new_binary_sensor", dev)
                            self.hass.data[DOMAIN]["binary_sensor"].append(dev)   
                        else:
                            _LOGGER.debug("      Sensor Already in the list")

            #_LOGGER.info("Visonic got new switches {0}".format( visonic_devices["switch"] ))
            for dev in visonic_devices["switch"]:
                #_LOGGER.info("VS: X10 Switch list {0}".format(dev))
                if dev.enabled and dev.getDeviceID() not in self.exclude_x10_list:
                    if dev not in self.hass.data[DOMAIN]["switch"]:
                        self.hass.data[DOMAIN]["switch"].append(dev)
                    else:
                        _LOGGER.debug("      X10 Already in the list")
            
            self.hass.async_create_task( self.hass.config_entries.async_forward_entry_setup(self.entry, "binary_sensor") )
            self.hass.async_create_task( self.hass.config_entries.async_forward_entry_setup(self.entry, "switch") )
            
        elif type(visonic_devices) == visonicApi.SensorDevice:
            # This is an update of an existing sensor device
            _LOGGER.info("Individual Sensor update {0} not yet included".format( visonic_devices ))
            
        elif type(visonic_devices) == visonicApi.X10Device:
            # This is an update of an existing x10 device
            _LOGGER.info("Individual X10 update {0} not yet included".format( visonic_devices ))
            
        elif type(visonic_devices) == visonicApi.LogPanelEvent:
            # This is an event log
            _LOGGER.debug("Panel Event Log {0}".format( visonic_devices ))
            reverse = self.toBool(self.config.get(CONF_LOG_REVERSE))
            total = min(visonic_devices.total, self.config.get(CONF_LOG_MAX_ENTRIES))
            current = visonic_devices.current   # only used for output and not for logic
            if reverse:
                current = total + 1 - visonic_devices.current
            # Fire event visonic_alarm_panel_event_log
            if self.toBool(self.config.get(CONF_LOG_EVENT)) and visonic_devices.current <= total:
                hass.bus.fire('visonic_alarm_panel_event_log_entry', {
                    'current': current,
                    'total': total,
                    'date': visonic_devices.date,
                    'time': visonic_devices.time,
                    'partition': visonic_devices.partition,
                    'zone': visonic_devices.zone,
                    'event': visonic_devices.event
                })            

            # Write out to an xml file
            if visonic_devices.current==1:
                self.templatedata = []
                self.csvdata = ""

            if self.csvdata is not None:
                if reverse:
                    self.csvdata = "{0}, {1}, {2}, {3}, {4}, {5}, {6}\n".format(current, total, visonic_devices.partition, visonic_devices.date, visonic_devices.time, visonic_devices.zone, visonic_devices.event ) + self.csvdata
                else:
                    self.csvdata = self.csvdata + "{0}, {1}, {2}, {3}, {4}, {5}, {6}\n".format(current, total, visonic_devices.partition, visonic_devices.date, visonic_devices.time, visonic_devices.zone, visonic_devices.event )
                
                datadict = {	
                  "partition" : "{0}".format(visonic_devices.partition),
                  "current"   : "{0}".format(current),
                  "date"      : "{0}".format(visonic_devices.date),
                  "time"      : "{0}".format(visonic_devices.time),
                  "zone"      : "{0}".format(visonic_devices.zone),
                  "event"     : "{0}".format(visonic_devices.event)
                }
                
                self.templatedata.append(datadict)
                
                if visonic_devices.current == total:
                    # create a new XML file with the results
                    if len(self.config.get(CONF_LOG_XML_FN)) > 0:
                        if reverse:
                            self.templatedata.reverse()
                        try:
                            file_loader = FileSystemLoader(['./templates', self.hass.config.path()+'/templates', './xml', self.hass.config.path()+'/xml', './www', self.hass.config.path()+'/www', '.', self.hass.config.path(), './custom_components/visonic', self.hass.config.path()+'/custom_components/visonic'], followlinks=True)
                            env = Environment(loader=file_loader)
                            template = env.get_template('visonic_template.xml')
                            output = template.render(entries=self.templatedata, total=total, available="{0}".format(visonic_devices.total))
                            with open(self.config.get(CONF_LOG_XML_FN), "w") as f:
                                f.write(output.rstrip())
                                f.close()
                        except:
                            _LOGGER.debug("Panel Event Log - Failed to write XML file")
                    if len(self.config.get(CONF_LOG_CSV_FN)) > 0:
                        try:
                            if self.toBool(self.config.get(CONF_LOG_CSV_TITLE)):
                                self.csvdata = "current, total, partition, date, time, zone, event\n" + self.csvdata
                            with open(self.config.get(CONF_LOG_CSV_FN), "w") as f:
                                f.write(self.csvdata.rstrip())
                                f.close()
                        except:
                            _LOGGER.debug("Panel Event Log - Failed to write CSV file")
                    self.csvdata = None
                    if self.toBool(self.config.get(CONF_LOG_DONE)):
                        self.hass.bus.fire('visonic_alarm_panel_event_log_complete', {
                            'total': total,
                            'available': visonic_devices.total,
                        })            
            
        elif type(visonic_devices) == int:
            tmp = int(visonic_devices)
            if 1 <= tmp <= 14:   
                # General update trigger
                #    1 is a zone update, 
                #    2 is a panel update AND the alarm is not active, 
                #    3 is a panel update AND the alarm is active, 
                #    4 is the panel has been reset, 
                #    5 is pin rejected, 
                #    6 is tamper triggered
                #    7 is download timer expired
                #    8 is watchdog timer expired, give up trying to achieve a better mode
                #    9 is watchdog timer expired, going to try again to get a better mode
                #   10 is a comms problem, we have received no data so plugin has suspended itself
                tmpdict = datadictionary.copy()
                
                tmpdict['condition'] = tmp
                _LOGGER.info("Visonic update event {0} {1}".format(tmp, tmpdict))
                self.hass.bus.fire(self.visonic_event_name, tmpdict)
                
                if tmp == 10:
                    message = 'Failed to connect to your Visonic Alarm. We have not received any data from the panel at all, not one single byte.'
                    _LOGGER.error(message)
                    self.hass.components.persistent_notification.create(
                        message,
                        title=NOTIFICATION_TITLE,
                        notification_id=NOTIFICATION_ID)
        else:
            _LOGGER.warning("Visonic attempt to add device with type {0}  device is {1}".format(type(visonic_devices), visonic_devices ))
Exemple #36
0
 def get_source(self, environment, template):
     source, path, validator = FileSystemLoader.get_source(self, environment, template)
     if len(source) > 0 and source[0] != "\n":
         source = "\n" + source
     return source, path, validator
Exemple #37
0
            actions.append(
                ParameterAction(
                    name=parameter_name,
                    description=parameter_description,
                    default_value=parameter_default_value,
                ))
            registered_names.append(parameter_name)

    print("[INFO] Added the following parameters for child jobs:\n{}".format(
        ", ".join(registered_names)))
    actions.append(ChildJobAction(job_config["children"]))

generator = MultiAction(actions)

template_dir = abspath(join(dirname(__file__), 'templates'))
env = Environment(loader=FileSystemLoader(template_dir))

action = None
target_repo = None
if "merge" in job_config:
    action = "merge"
    target_repo = job_config["merge"]
if "test" in job_config:
    action = "test"
    target_repo = job_config["test"]

if action != None:
    print("[INFO] Marking this as a {} job for the {} repo".format(
        action, target_repo))

DEFAULT_DESCRIPTION = "<div style=\"font-size: 32px; line-height: 1.5em; background-color: yellow; padding: 5px;\">" + \
Exemple #38
0
 def get_template(self, name):
     loader = FileSystemLoader(os.getcwd())
     return loader.load(self.settings['jinja2_env'], name)
Exemple #39
0
import numpy as np
import matplotlib.pyplot as plt
import math

from lib import year_classifier as year_classifier

from jinja2 import Environment, FileSystemLoader

# In[ ]:

# gen html
# http://pbpython.com/pdf-reports.html

#reports folder
dir_reports = '../AnalysisTool/'
fsl = FileSystemLoader(dir_reports)
env = Environment(loader=fsl)

# In[ ]:

from scipy.interpolate import interp1d
from scipy.signal import medfilt
from scipy.signal import convolve


def append_mid_year(df, column='mid_year'):
    '''
    Adds a int column (default "mid_year") to the table. It is the mean (rounded up) of from_year and to_year.
    '''
    df[column] = (df['from_year'] + df['to_year']) / 2
    df[column] = (df['mid_year'] + 0.49).astype(int)
Exemple #40
0
def get_jinja_env():
    return Environment(loader=FileSystemLoader('templates'),
                       autoescape=select_autoescape(['html', 'xml']))
Exemple #41
0
        total = statistics.find('total')
        for item in total:
            if 'All Tests' == item.text:
                fa = int(item.attrib['fail'])
                pa = int(item.attrib['pass'])
                tot = fa + pa
                rate = '%.2f' % (pa * 100 / tot)
                if fa != 0:
                    result = 'Fail'
                else:
                    result = 'Pass'
        break
    time.sleep(1)
    wait_time -= 1
# 渲染模板
env = Environment(loader=FileSystemLoader('./templates'))
template = env.get_template('template.html')
html = template.render(project_name='yj',
                       fa=fa,
                       pa=pa,
                       tot=tot,
                       rate=rate,
                       result=result,
                       build_no=build_no,
                       build_url=sys.argv[1],
                       log_url=os.path.join(os.getcwd(), 'log.html'))
with open('daily_report.html', 'w', encoding='utf8') as f:
    f.write(html)

# 对测试的时间进行检查,过短则发送构建失败的邮件
ret1 = requests.get(crumb_url)
 def render_template(self, data):
     self.log.debug('Rendering template')
     env = Environment(loader=FileSystemLoader(self.templates_path))
     env.filters['datetimeformat'] = self.datetimeformat
     template = env.get_template(self.template)
     return template.render(data=data)
Exemple #43
0
def make_renderer(
    searchpath="templates",
    outpath=".",
    contexts=None,
    rules=None,
    encoding="utf8",
    extensions=None,
    staticpath=None,
):
    """Get a Renderer object.

    :param searchpath: the name of the directory to search for templates.
                       Defaults to ``'templates'``.

    :param outpath: the name of the directory to store the rendered files in.
                    Defaults to ``'.'``.

    :param contexts: list of *(regex, function)* pairs. When rendering, if a
                     template's name matches *regex*, *function* will be
                     invoked and expected to provide a context. *function*
                     should optionally take a Template as a parameter and
                     return a dictionary context when invoked. Defaults to
                     ``[]``.

    :param rules: list of *(regex, function)* pairs. When rendering, if a
                  template's name matches *regex*, rendering will delegate to
                  *function*. *function* should take a jinja2 Environment, a
                  filename, and a context and render the template. Defaults to
                  ``[]``.

    :param encoding: the encoding of templates to use. Defaults to ``'utf8'``.

    :param extensions: list of extensions to add to the Environment. Defaults to
                       ``[]``.
    :param staticpath: the name of the directory to get static files from
                       (relative to searchpath). Defaults to ``None``.


    """
    # Coerce search to an absolute path if it is not already
    if not os.path.isabs(searchpath):
        # TODO: Determine if there is a better way to write do this
        calling_module = inspect.getmodule(inspect.stack()[-1][0])
        # Absolute path to project
        project_path = os.path.realpath(
            os.path.dirname(calling_module.__file__))
        searchpath = os.path.join(project_path, searchpath)

    loader = FileSystemLoader(searchpath=searchpath, encoding=encoding)
    environment = Environment(loader=loader, extensions=extensions or [])
    logger = logging.getLogger(__name__)
    logger.setLevel(logging.INFO)
    logger.addHandler(logging.StreamHandler())
    return Renderer(
        environment,
        searchpath=searchpath,
        outpath=outpath,
        encoding=encoding,
        logger=logger,
        rules=rules,
        contexts=contexts,
        staticpath=staticpath,
    )
Exemple #44
0
    def __init__(self, settings=None):
        self.settings = settings
        self.subs = list()
        self.app = self

        self.loader = FileSystemLoader(settings.TEMPLATES_PATH)
Exemple #45
0
from jinja2 import FileSystemLoader, Environment

loader = FileSystemLoader('src/widget/login/templates')
env = Environment(loader=loader)

"""
Login widget provides different types of login form.
See available lists of login form type
Usage: LoginWidget(template='a_login.html', method='<POST, GET>', action='<Target URL>')

"""
class LoginWidget(object):
  def __init__(self, **options):
    """
    Regiter login forma

    """
    self.__available_templates = ['a_login.html', 'b_login.html']
    if 'template' in options and options['template'] in self.__available_templates:
      # Todo: Define available dictionary
      # Warn if method is not valid
      self.__available_methods = ['POST', 'GET']
      if 'method' in options and options['method'] not in self.__available_methods:
        raise TypeError(f"Invalid form method for {options['method']}")
      
      tpl = env.get_template(options['template'])
      self.output = tpl.render(**options) 
    else:
      raise KeyError(f"No template name {options['template']}")
Exemple #46
0
 def __init__(self, proxied_url, cas_info, 
         fqdn=None, authorities=None, plugins=None, is_https=True,
         excluded_resources=None, excluded_branches=None,
         remote_user_header=None, logout_patterns=None,
         logout_passthrough=False,
         template_dir=None, template_resource='/_templates',
         proxy_client_endpoint_s=None, cas_client_endpoint_s=None):
     self.proxy_client_endpoint_s = proxy_client_endpoint_s
     self.cas_client_endpoint_s = cas_client_endpoint_s
     self.logout_passthrough = logout_passthrough
     self.template_dir = template_dir
     if template_dir is not None:
         self.template_loader_ = FileSystemLoader(template_dir)
         self.template_env_ = Environment()
         self.templateStaticResource_ = self.create_template_static_resource()
     if template_resource is not None:
         if not template_resource.endswith('/'):
             template_resource = "{0}/".format(template_resource)
     if template_resource is not None and template_dir is not None:
         static_base = "{0}static/".format(template_resource)
         self.static = self.app.route(static_base, branch=True)(self.__class__.static)
         self.static_base = static_base
     self.template_resource = template_resource
     if logout_patterns is not None:
         self.logout_patterns = [parse_url_pattern(pattern) for pattern in logout_patterns]
     for pattern in self.logout_patterns:
         assert pattern is None or pattern.scheme == '', (
             "Logout pattern '{0}' must be a relative URL.".format(pattern))
     if remote_user_header is not None:
         self.remoteUserHeader = remote_user_header
     self.excluded_resources = excluded_resources
     self.excluded_branches = excluded_branches
     self.is_https = is_https
     if proxied_url.endswith('/'):
         proxied_url = proxied_url[:-1]
     self.proxied_url = proxied_url
     p = urlparse.urlparse(proxied_url)
     self.p = p
     self.proxied_scheme = p.scheme
     netloc = p.netloc
     self.proxied_netloc = netloc
     self.proxied_host = netloc.split(':')[0]
     self.proxied_path = p.path
     self.cas_info = cas_info
     cas_param_names = set([])
     cas_param_names.add(self.ticket_name.lower())
     cas_param_names.add(self.service_name.lower())
     cas_param_names.add(self.renew_name.lower())
     cas_param_names.add(self.pgturl_name.lower())
     self.cas_param_names = cas_param_names
     if fqdn is None:
         fqdn = socket.getfqdn()
     self.fqdn = fqdn
     self.valid_sessions = {}
     self.logout_tickets = {}
     self._make_agents(authorities)
     # Sort/tag plugins
     if plugins is None:
         plugins = []
     content_modifiers = []
     info_acceptors = []
     cas_redirect_handlers = []
     interceptors = []
     access_control = []
     for plugin in plugins:
         if IResponseContentModifier.providedBy(plugin):
             content_modifiers.append(plugin)
         if IRProxyInfoAcceptor.providedBy(plugin):
             info_acceptors.append(plugin)
         if ICASRedirectHandler.providedBy(plugin):
             cas_redirect_handlers.append(plugin)
         if IResourceInterceptor.providedBy(plugin):
             interceptors.append(plugin)
         if IAccessControl.providedBy(plugin):
             access_control.append(plugin)
     self.info_acceptors = info_acceptors
     content_modifiers.sort(key=lambda x: x.mod_sequence)
     self.content_modifiers = content_modifiers
     cas_redirect_handlers.sort(key=lambda x: x.cas_redirect_sequence)
     self.cas_redirect_handlers = cas_redirect_handlers
     interceptors.sort(key=lambda x: x.interceptor_sequence)
     self.interceptors = interceptors
     access_control.sort(key=lambda x: x.ac_sequence)
     self.access_control = access_control
     # Create static resources.
     static_resources = {}
     for plugin in plugins:
         if IStaticResourceProvider.providedBy(plugin):
             if plugin.static_resource_base in static_resources:
                 if static_resources[plugin.static_resource_base] != plugin.static_resource_dir:
                     raise Exception("Static resource conflict for '{0}': '{1}' != '{2}'".format(
                         plugin.static_resource_base,
                         static_resources[plugin.static_resource_base],
                         plugin.static_resource_dir))
             else:
                 static_resources[plugin.static_resource_base] = plugin.static_resource_dir
     self.static_handlers = []
     for n, (resource_base, resource_dir) in enumerate(static_resources.iteritems()):
         handler = lambda self, request: File(resource_dir)
         handler = self.app.route(resource_base, branch=True)(handler)
         self.static_handlers.append(handler)
Exemple #47
0
def main(input_module,
         files=None,
         out_dir=None,
         verbose=False,
         substitutions={}):
    if out_dir and not os.path.exists(out_dir):
        os.makedirs(out_dir)

    in_mod = importlib.import_module(input_module)

    # add a template filter to produce pretty pretty JSON
    def jsonify(input, indent=None, pre_whitespace=0):
        code = json.dumps(input, indent=indent, sort_keys=True)
        if pre_whitespace:
            code = code.replace("\n", ("\n" + " " * pre_whitespace))

        return code

    def indent_block(input, indent):
        return input.replace("\n", ("\n" + " " * indent))

    def indent(input, indent):
        return " " * indent + input

    def wrap(input, wrap=80, initial_indent=""):
        if len(input) == 0:
            return initial_indent
        # TextWrapper collapses newlines into single spaces; we do our own
        # splitting on newlines to prevent this, so that newlines can actually
        # be intentionally inserted in text.
        input_lines = input.split('\n\n')
        wrapper = TextWrapper(initial_indent=initial_indent, width=wrap)
        output_lines = [wrapper.fill(line) for line in input_lines]

        for i in range(len(output_lines)):
            line = output_lines[i]
            in_bullet = line.startswith("- ")
            if in_bullet:
                output_lines[i] = line.replace("\n", "\n  " + initial_indent)

        return '\n\n'.join(output_lines)

    def fieldwidths(input, keys, defaults=[], default_width=15):
        """
        A template filter to help in the generation of tables.

        Given a list of rows, returns a list giving the maximum length of the
        values in each column.

        :param list[dict[str, str]] input: a list of rows. Each row should be a
           dict with the keys given in ``keys``.
        :param list[str] keys: the keys corresponding to the table columns
        :param list[int] defaults: for each column, the default column width.
        :param int default_width: if ``defaults`` is shorter than ``keys``, this
           will be used as a fallback
        """
        def colwidth(key, default):
            return reduce(max, (len(row[key]) for row in input),
                          default if default is not None else default_width)

        results = map(colwidth, keys, defaults)
        return results

    # make Jinja aware of the templates and filters
    env = Environment(loader=FileSystemLoader(in_mod.exports["templates"]),
                      undefined=StrictUndefined)
    env.filters["jsonify"] = jsonify
    env.filters["indent"] = indent
    env.filters["indent_block"] = indent_block
    env.filters["wrap"] = wrap
    env.filters["fieldwidths"] = fieldwidths

    # load up and parse the lowest single units possible: we don't know or care
    # which spec section will use it, we just need it there in memory for when
    # they want it.
    units = AccessKeyStore(existing_data=in_mod.exports["units"](
        debug=verbose).get_units())

    # use the units to create RST sections
    sections = in_mod.exports["sections"](env, units,
                                          debug=verbose).get_sections()

    # print out valid section keys if no file supplied
    if not files:
        print "\nValid template variables:"
        for key in sections.keys():
            sec_text = "" if (len(sections[key]) > 75) else ("(Value: '%s')" %
                                                             sections[key])
            sec_info = "%s characters" % len(sections[key])
            if sections[key].count("\n") > 0:
                sec_info += ", %s lines" % sections[key].count("\n")
            print "  %s" % key
            print "      %s %s" % (sec_info, sec_text)
        return

    # check the input files and substitute in sections where required
    for input_filename in files:
        output_filename = os.path.join(out_dir,
                                       os.path.basename(input_filename))
        process_file(env, sections, input_filename, output_filename)

    check_unaccessed("units", units)
Exemple #48
0
def _setup_jinja2():
    env = Environment(loader=FileSystemLoader(TEMPLATES_PATH))
    return env
Exemple #49
0
    source_time = datetime.strptime(
        r.headers['last-modified'],
        #Fri, 27 Mar 2015 08:05:42 GMT
        '%a, %d %b %Y %X %Z')

    r = requests.head(
        "https://transparenzrh.eu-central-1.linodeobjects.com/bikepedestrian/FZS_MILI.csv"
    )
    cached_time = datetime.strptime(
        r.headers['last-modified'],
        #Fri, 27 Mar 2015 08:05:42 GMT
        '%a, %d %b %Y %X %Z')
    # Load Sensors from GeoData portal via geo.pipeline.yaml if file has been updated today

    # if source_time >= cached_time:
    env = Environment(loader=FileSystemLoader('src/descriptors'))
    template = env.get_template(f"geo.pipeline.yaml")
    recipe = template.render(
        params={"thisyear": int(datetime.strftime(date.today(), "%Y"))})
    target = Pipeline(yaml.full_load(recipe)).run()

    # check if pipeline is valid
    if not target.valid:
        print(target)
    else:
        try:
            # convert resource from pipeline into python dictionary
            sensors = target.task.target.to_inline(dialect=dict(keyed=True))
            for sensor in sensors:
                print(f"processing {sensor}...")
                template = env.get_template(f"bikepedestrian.pipeline.yaml")
from jinja2 import Environment, FileSystemLoader

# Load index template.
my_template_path = '/srv/projects/intro_programming/intro_programming/notebooks/my_templates'
my_template_base_path = '/srv/projects/intro_programming/intro_programming/notebooks'
ipython_template_path = '/srv/projects/intro_programming/venv/lib/python3.4/site-packages/IPython/nbconvert/templates/html'

my_loader = FileSystemLoader(
    [my_template_path, my_template_base_path, ipython_template_path])
env = Environment(loader=my_loader)

index_template = my_loader.load(env, 'index.tpl')

# Render template to file.
notebooks_path = '/srv/projects/intro_programming/intro_programming/notebooks/'
filepath = notebooks_path + 'index.html'
with open(filepath, 'w') as f:
    f.write(index_template.render())
Exemple #51
0
import os
import gettext
from jinja2 import Environment, FileSystemLoader

current_dir = os.path.dirname(os.path.realpath(__file__))
templates_dir = os.path.join(current_dir, 'plantillas')
file_loader = FileSystemLoader(templates_dir)
env = Environment(loader=file_loader, extensions=['jinja2.ext.i18n'])
env.globals.update(zip=zip)
env.install_gettext_callables(gettext.gettext, gettext.ngettext)

env.globals['SemanticCSS'] = os.path.join(templates_dir, 'css', 'semantic.min.css')
env.globals['CustomCSS'] = os.path.join(templates_dir, 'css', 'custom.css')


class Reporte:
    def __init__(self, template, **kwargs):
        template = env.get_template(template)
        self._html_str = template.render(**kwargs)

    def html(self):
        return self._html_str

    def escribir(self, output):
        with open(output, 'w', encoding='utf-8') as ou:
            ou.write(self._html_str)