Exemple #1
0
def get_doc_object(obj, what=None, doc=None, config={}, builder=None):
    if what is None:
        if inspect.isclass(obj):
            what = 'class'
        elif inspect.ismodule(obj):
            what = 'module'
        elif isinstance(obj, collections.Callable):
            what = 'function'
        else:
            what = 'object'

    template_dirs = [os.path.join(os.path.dirname(__file__), 'templates')]
    if builder is not None:
        template_loader = BuiltinTemplateLoader()
        template_loader.init(builder, dirs=template_dirs)
    else:
        template_loader = FileSystemLoader(template_dirs)
    template_env = SandboxedEnvironment(loader=template_loader)
    config['template'] = template_env.get_template('numpydoc_docstring.rst')

    if what == 'class':
        return SphinxClassDoc(obj, func_doc=SphinxFunctionDoc, doc=doc,
                              config=config)
    elif what in ('function', 'method'):
        return SphinxFunctionDoc(obj, doc=doc, config=config)
    else:
        if doc is None:
            doc = pydoc.getdoc(obj)
        return SphinxObjDoc(obj, doc, config=config)
Exemple #2
0
def templating_environment(template_dirs=None):
    """Jinja2 templating environment.
    
    :returns: :obj:`jinja2.environment.Environment`, which is monkey patched to
        include :class:`TemplatePython` object in variable ``__tp``.
    
    """
    
    template_dirs_ = [os.path.realpath(
                        os.path.join(os.path.dirname(__file__), 'templates')
                     )]
    
    if template_dirs is not None:
        template_dirs_.extend(template_dirs)
        
    template_loader = FileSystemLoader(template_dirs_)
    tenv = SandboxedEnvironment(loader=template_loader)
    
    tenv.__tp = TemplatePython(tenv) 
    
    # Ignore protected member access
    # pylint: disable-msg=W0212
    #
    # Add all __template.pys
    for tdir in template_dirs_:
        tp = os.path.realpath(os.path.join(tdir, "__template.py"))
        tenv.__tp.add(tp)
    
    tenv.__tp.pre_template()
    # pylint: enable-msg=W0212
        
    return tenv
Exemple #3
0
def process_template(content):
    env = SandboxedEnvironment()
    template = env.from_string(content)
    context = {
        'current_user_id': current_user_id,
        'current_username': current_username,
    }
    return template.render(context)
def server(path='/'):
    try:
        account = g.account
        application = g.account.application
        # prepare the jinja environment. This is used for regular routes 
        # and special handlers such as 404
        template_lookup = {t.key: t.jinja2 for t in application.templates}
        loader = DictLoader(template_lookup)
        jinja_env = SandboxedEnvironment(
            extensions=['application.app.pyjade.ext.jinja.PyJadeExtension'],
            loader=loader)
        
        # default helper utils
        path = PathUtil(request.environ)
        get = GetUtil(request)
        static = StaticUtil()

        # load template data. 404 can also use these
        template_data = {}
        template_data['path'] = path
        template_data['get'] = get
        template_data['cms'] = cms
        template_data['static'] = static
        template_data['deployment'] = config.TEMPLATE_GLOBAL_DEPLOYMENT
        template_data['markdown'] = service.markdown

        template_content = {}
        for content in application.static_contents:
            template_content.update(content.data)
        template_data['content'] = template_content

        # find the route with werkzeug
        url_map = Map()
        for route in application.routes:
            # skip non string rules like 404. These should be handled by exceptions
            if not route.rule.isnumeric():
                url_map.add(Rule(route.rule, endpoint=route.template_name))
        urls = url_map.bind_to_environ(request.environ)
        endpoint, args = urls.match()
        template_data['path'].add_placeholders(args)

        app_template = jinja_env.get_template(endpoint)
        page_content = app_template.render(**template_data)
        app.record_transfer(page_content)
        return page_content

    except NotFound as e:
        # find the template for a 404 handler if specified
        for route in application.routes:
            if route.rule == '404':
                app_template = jinja_env.get_template(route.template_name)
                not_found_page = app_template.render(**template_data)
                app.record_transfer(not_found_page)
                return not_found_page, 404
        return '404', 404

    except Exception as e:
        return '500 internal error', 500
Exemple #5
0
 def serialize_html(self, request, data):
     request.setHeader('content-type', 'text/html')
     
     from jinja2.sandbox import SandboxedEnvironment
     env = SandboxedEnvironment()
     env.filters['urlencode'] = urllib.quote_plus
     template = env.from_string(LIST_TEMPLATE)
     
     return str(template.render(data))
 def __init__(self, app):
     template_loader = BuiltinTemplateLoader()
     template_loader.init(app.builder)
     template_env = SandboxedEnvironment(loader=template_loader)
     template_env.filters['escape'] = escape_filter
     template_env.filters['underline'] = underline_filter
     template_env.filters['as_extlink'] = as_extlink_filter
     self.env = template_env
     self.templates = {}
Exemple #7
0
def main ():
    loader = FileSystemLoader([".templates"])
    env = SandboxedEnvironment(loader=loader)

    done_files = []

    for orig_module in auto.modules:
        output = orig_module + ".rst"

        f = open(output, "w")

        try:
            mod_base = orig_module.split(".")[-1]
            module = __import__(orig_module, fromlist=mod_base)

            def get_methods_etc (obj):
                if not hasattr(obj, "__dict__"):
                    return ""

                obj_dict = getattr(obj, "__dict__")
                return ", ".join(set(obj_dict.keys()))

            def get_members (obj, typ):
                items = [
                    (name, get_methods_etc(getattr(obj, name))) for name in dir(obj)
                    if get_documenter(getattr(obj, name), obj).objtype == typ
                ]

                return items

            ns = {}
            ns['members'] = dir(module)
            ns['functions'] = get_members(module, 'function')
            ns['classes'] = get_members(module, 'class')
            ns['exceptions'] = get_members(module, 'exception')
            ns['name'] = mod_base
            ns['fullname'] = orig_module
            ns['name_underline'] = "=" * len(mod_base)
            ns['fullname_underline'] = "=" * len(orig_module)
            ns['mod_underline'] = "=" * (len(mod_base) + len(":mod:``"))

            template = env.get_template("module.rst")
            rendered = template.render(**ns)
            f.write(rendered)
        finally:
            f.close()
            done_files.append(output)

    summary_template = open("summary.rst.template").read()
    modules = ""

    for module in done_files:
        modules += "    %s\n" % module

    f = open("summary.rst", "w")
    f.write(summary_template % {"files": modules})
    f.close()
Exemple #8
0
 def load_config(self, config):
     self.use_plots = config.get('use_plots', False)
     self.class_members_toctree = config.get('class_members_toctree', True)
     self.template = config.get('template', None)
     if self.template is None:
         template_dirs = [os.path.join(os.path.dirname(__file__), 'templates')]
         template_loader = FileSystemLoader(template_dirs)
         template_env = SandboxedEnvironment(loader=template_loader)
         self.template = template_env.get_template('numpydoc_docstring.rst')
Exemple #9
0
class BaseRenderer(object):
    def __init__(self, loader=None):
        self.env = SandboxedEnvironment(loader=loader)
        self.env.filters['repr'] = repr

    def render(self, template_name, context):
        return self.env.get_template(template_name).render(context)

    def render_string(self, source, context):
        return self.env.from_string(source).render(context)
Exemple #10
0
 def load_config(self, config):
     self.use_plots = config.get('use_plots', False)
     self.use_blockquotes = config.get('use_blockquotes', False)
     self.class_members_toctree = config.get('class_members_toctree', True)
     self.attributes_as_param_list = config.get('attributes_as_param_list', True)
     self.xref_param_type = config.get('xref_param_type', False)
     self.xref_aliases = config.get('xref_aliases', dict())
     self.xref_ignore = config.get('xref_ignore', set())
     self.template = config.get('template', None)
     if self.template is None:
         template_dirs = [os.path.join(os.path.dirname(__file__), 'templates')]
         template_loader = FileSystemLoader(template_dirs)
         template_env = SandboxedEnvironment(loader=template_loader)
         self.template = template_env.get_template('numpydoc_docstring.rst')
Exemple #11
0
class BaseRenderer(object):
    def __init__(self, loader=None):
        # type: (BaseLoader) -> None
        self.env = SandboxedEnvironment(loader=loader, extensions=['jinja2.ext.i18n'])
        self.env.filters['repr'] = repr
        self.env.install_gettext_translations(get_translator())  # type: ignore

    def render(self, template_name, context):
        # type: (unicode, Dict) -> unicode
        return self.env.get_template(template_name).render(context)

    def render_string(self, source, context):
        # type: (unicode, Dict) -> unicode
        return self.env.from_string(source).render(context)
Exemple #12
0
 def _render_files(self, template_args, output_dir):
     env = SandboxedEnvironment(
         loader=FileSystemLoader(self.template_dir),
         extensions=[],
     )
     try:
         for template in self.template_files():
             output_str = env.get_template(template).render(template_args)
             output_file = output_dir + "/" + template.rstrip(".j2")
             with open(output_file, "w+") as f:
                 f.write(output_str)
     except TemplateSyntaxError as e:
         print("[{}:{}] {} ".format(e.filename, e.lineno, e.message))
         sys.exit(1)
Exemple #13
0
    def __init__(self, text):
        self.valid_parameters = []
        self.valid_blocks = []

        text = self._escape_text(text.encode('utf-8')).strip()
        text = text.decode('utf-8')
        template_environment = SandboxedEnvironment()

        try:
            self.template = template_environment.from_string(text)
            self.error = None
        except TemplateSyntaxError as e:
            self.template = None
            self.error = e
Exemple #14
0
 def test_binary_operator_intercepting(self, env):
     def disable_op(left, right):
         raise TemplateRuntimeError('that operator so does not work')
     for expr, ctx, rv in ('1 + 2', {}, '3'), ('a + 2', {'a': 2}, '4'):
         env = SandboxedEnvironment()
         env.binop_table['+'] = disable_op
         t = env.from_string('{{ %s }}' % expr)
         assert t.render(ctx) == rv
         env.intercepted_binops = frozenset(['+'])
         t = env.from_string('{{ %s }}' % expr)
         try:
             t.render(ctx)
         except TemplateRuntimeError as e:
             pass
         else:
             assert False, 'expected runtime error'
    def init(self, builder, theme=None, dirs=None):
        # create a chain of paths to search
        if theme:
            # the theme's own dir and its bases' dirs
            chain = theme.get_dirchain()
            # then the theme parent paths
            chain.extend(theme.themepath)
        elif dirs:
            chain = list(dirs)
        else:
            chain = []

        # prepend explicit template paths
        self.templatepathlen = len(builder.config.templates_path)
        if builder.config.templates_path:
            chain[0:0] = [path.join(builder.confdir, tp)
                          for tp in builder.config.templates_path]

        # store it for use in newest_template_mtime
        self.pathchain = chain

        # make the paths into loaders
        self.loaders = map(SphinxFileSystemLoader, chain)

        use_i18n = builder.app.translator is not None
        extensions = use_i18n and ['jinja2.ext.i18n'] or []
        self.environment = SandboxedEnvironment(loader=self,
                                                extensions=extensions)
        self.environment.filters['tobool'] = _tobool
        self.environment.globals['debug'] = contextfunction(pformat)
        self.environment.globals['accesskey'] = contextfunction(accesskey)
        self.environment.globals['idgen'] = idgen
        if use_i18n:
            self.environment.install_gettext_translations(
                builder.app.translator)
Exemple #16
0
 def test_unary_operator_intercepting(self):
     def disable_op(arg):
         raise TemplateRuntimeError('that operator so does not work')
     for expr, ctx, rv in ('-1', {}, '-1'), ('-a', {'a': 2}, '-2'):
         env = SandboxedEnvironment()
         env.unop_table['-'] = disable_op
         t = env.from_string('{{{{ {0!s} }}}}'.format(expr))
         assert t.render(ctx) == rv
         env.intercepted_unops = frozenset(['-'])
         t = env.from_string('{{{{ {0!s} }}}}'.format(expr))
         try:
             t.render(ctx)
         except TemplateRuntimeError as e:
             pass
         else:
             self.fail('expected runtime error')
 def test_unsafe(self):
     env = SandboxedEnvironment()
     self.assert_raises(SecurityError, env.from_string('{{ foo.foo() }}').render, foo=PrivateStuff())
     self.assert_equal(env.from_string('{{ foo.bar() }}').render(foo=PrivateStuff()), '23')
     self.assert_raises(SecurityError, env.from_string('{{ foo._foo() }}').render, foo=PublicStuff())
     self.assert_equal(env.from_string('{{ foo.bar() }}').render(foo=PublicStuff()), '23')
     self.assert_equal(env.from_string('{{ foo.__class__ }}').render(foo=42), '')
     self.assert_equal(env.from_string('{{ foo.func_code }}').render(foo=lambda : None), '')
     self.assert_raises(SecurityError, env.from_string('{{ foo.__class__.__subclasses__() }}').render, foo=42)
Exemple #18
0
class DocWriter:
    """
    Write module documentation source, using the same template format
    as `sphinx.ext.autosummary
    <www.sphinx-doc.org/en/stable/ext/autosummary.html>`__.
    """

    def __init__(self, outpath, tmpltpath):
        """
        Parameters
        ----------
        outpath : string
          Directory path for RST output files
        tmpltpath : string
          Directory path for autosummary template files
        """

        self.outpath = outpath
        self.template_loader = FileSystemLoader(tmpltpath)
        self.template_env = SandboxedEnvironment(loader=self.template_loader)
        self.template_env.filters['underline'] = _underline
        self.template_env.filters['escape'] = rst_escape
        self.template_env.filters['e'] = rst_escape
        self.template = self.template_env.get_template('module.rst')


    def write(self, module):
        """
        Write the RST source document for generating the docs for
        a specified module.

        Parameters
        ----------
        module : module object
          Module for which member list is to be generated
        """

        modname = module.__name__

        # Based on code in generate_autosummary_docs in https://git.io/fxpJS
        ns = {}
        ns['members'] = dir(module)
        ns['functions'] = list(map(lambda x: x.__name__,
                                   get_module_functions(module)))
        ns['classes'] = list(map(lambda x: x.__name__,
                                 get_module_classes(module)))
        ns['exceptions'] = list(map(lambda x: x.__name__,
                                    get_module_exceptions(module)))
        ns['fullname'] = modname
        ns['module'] = modname
        ns['objname'] = modname
        ns['name'] = modname.split('.')[-1]
        ns['objtype'] = 'module'
        ns['underline'] = len(modname) * '='
        rndr = self.template.render(**ns)

        rstfile = os.path.join(self.outpath, modname + '.rst')
        with open(rstfile, 'w') as f:
            f.write(rndr)
Exemple #19
0
def get_template_env(app):
    """
    Get the template environment.

    .. note::

       Template should be loaded as a package_data using
       :py:function:`pkgutil.get_data`, but because we want the user to
       override the default template we need to hook it to the Sphinx loader,
       and thus a file system approach is required as it is implemented like
       that.
    """
    template_dir = [join(dirname(abspath(__file__)), 'templates')]
    template_loader = BuiltinTemplateLoader()
    template_loader.init(app.builder, dirs=template_dir)
    template_env = SandboxedEnvironment(loader=template_loader)
    template_env.filters['summary'] = filter_summary
    return template_env
Exemple #20
0
def render_in_context(context, template_text, html_intent=False):
    """
    Render the given Jinja2 template text in the script context.

    :param context: Script context.
    :type context: shuup.notify.script.Context
    :param template_text: Jinja2 template text.
    :type template_text: str
    :param html_intent: Is the template text intended for HTML output?
                        This currently turns on autoescaping.
    :type html_intent: bool
    :return: Rendered template text
    :rtype: str
    :raises: Whatever Jinja2 might happen to raise
    """
    # TODO: Add some filters/globals into this environment?
    env = SandboxedEnvironment(autoescape=html_intent)
    template = env.from_string(template_text)
    return template.render(context.get_variables())
Exemple #21
0
def _render_template(thread, template):
    """Render given iterable of templates in a sandboxed environment.

    :param JobThread thread:   JobThread instance the templates refer to
    :param template:           Template we need to render.

    :returns: Rendered template(s)
    """
    sandbox = SandboxedEnvironment()
    rendered = (
        sandbox.from_string(template).render(
            stage=thread.stage,
            substage=thread.substage,
            distro=thread.distro,
            arch=thread.arch
        )
    )
    if not isinstance(rendered, str):
        rendered = rendered.encode('ascii', 'ignore')
    return rendered
Exemple #22
0
    def render(self, context, language_code=DEFAULT_LANG):
        """
        Render this notification template with given context and language

        Returns a dict containing all content fields of the template. Example:

        {'short_message': 'foo', 'subject': 'bar', 'body': 'baz', 'html_body': '<b>foobar</b>'}

        """

        env = SandboxedEnvironment(trim_blocks=True, lstrip_blocks=True, undefined=StrictUndefined)
        env.filters['reservation_time'] = reservation_time
        env.filters['format_datetime'] = format_datetime
        env.filters['format_datetime_tz'] = format_datetime_tz

        logger.debug('Rendering template for notification %s' % self.type)
        with switch_language(self, language_code):
            try:
                rendered_notification = {
                    attr: env.from_string(getattr(self, attr)).render(context)
                    for attr in ('short_message', 'subject', 'html_body')
                }
                if self.body:
                    rendered_notification['body'] = env.from_string(self.body).render(context)
                else:
                    # if text body is empty use html body without tags as text body
                    rendered_notification['body'] = strip_tags(rendered_notification['html_body'])
                return rendered_notification
            except TemplateError as e:
                raise NotificationTemplateException(e) from e
Exemple #23
0
 def __init__(self, database=None, query=None, table=None):
     self.database = database
     self.query = query
     self.schema = None
     if query and query.schema:
         self.schema = query.schema
     elif table:
         self.schema = table.schema
     self.context = {}
     self.context.update(BASE_CONTEXT)
     if self.engine:
         self.context[self.engine] = self
     self.env = SandboxedEnvironment()
Exemple #24
0
    def test_unsafe(self, env):
        env = SandboxedEnvironment()
        pytest.raises(SecurityError, env.from_string("{{ foo.foo() }}").render,
                      foo=PrivateStuff())
        assert env.from_string("{{ foo.bar() }}").render(foo=PrivateStuff()) == '23'

        pytest.raises(SecurityError,
                      env.from_string("{{ foo._foo() }}").render,
                      foo=PublicStuff())
        assert env.from_string("{{ foo.bar() }}").render(foo=PublicStuff()) == '23'
        assert env.from_string("{{ foo.__class__ }}").render(foo=42) == ''
        assert env.from_string("{{ foo.func_code }}").render(foo=lambda:None) == ''
        # security error comes from __class__ already.
        pytest.raises(SecurityError, env.from_string(
            "{{ foo.__class__.__subclasses__() }}").render, foo=42)
Exemple #25
0
class BaseTemplateProcessor(object):
    """Base class for database-specific jinja context

    There's this bit of magic in ``process_template`` that instantiates only
    the database context for the active database as a ``models.Database``
    object binds it to the context object, so that object methods
    have access to
    that context. This way, {{ hive.latest_partition('mytable') }} just
    knows about the database it is operating in.

    This means that object methods are only available for the active database
    and are given access to the ``models.Database`` object and schema
    name. For globally available methods use ``@classmethod``.
    """
    engine = None

    def __init__(self, database=None, query=None, table=None, **kwargs):
        self.database = database
        self.query = query
        self.schema = None
        if query and query.schema:
            self.schema = query.schema
        elif table:
            self.schema = table.schema
        self.context = {
            'url_param': url_param,
            'current_user_id': current_user_id,
            'current_username': current_username,
            'filter_values': filter_values,
            'form_data': {},
        }
        self.context.update(kwargs)
        self.context.update(BASE_CONTEXT)
        if self.engine:
            self.context[self.engine] = self
        self.env = SandboxedEnvironment()

    def process_template(self, sql, **kwargs):
        """Processes a sql template

        >>> sql = "SELECT '{{ datetime(2017, 1, 1).isoformat() }}'"
        >>> process_template(sql)
        "SELECT '2017-01-01T00:00:00'"
        """
        template = self.env.from_string(sql)
        kwargs.update(self.context)
        return template.render(kwargs)
Exemple #26
0
    def __init__(self, outpath, tmpltpath):
        """
        Parameters
        ----------
        outpath : string
          Directory path for RST output files
        tmpltpath : string
          Directory path for autosummary template files
        """

        self.outpath = outpath
        self.template_loader = FileSystemLoader(tmpltpath)
        self.template_env = SandboxedEnvironment(loader=self.template_loader)
        self.template_env.filters['underline'] = _underline
        self.template_env.filters['escape'] = rst_escape
        self.template_env.filters['e'] = rst_escape
        self.template = self.template_env.get_template('module.rst')
Exemple #27
0
 def __init__(self, app, extra_context):
     template_loader = BuiltinTemplateLoader()
     template_loader.init(app.builder)
     template_env = SandboxedEnvironment(loader=template_loader)
     template_env.filters['rst_escape'] = rst_escape_filter
     template_env.filters['underline'] = underline_filter
     template_env.filters['as_extlink'] = as_extlink_filter
     template_env.filters['prefixes'] = prefixes_filter
     template_env.filters['rst_link'] = rst_link_filter
     self.env = template_env
     self.templates: Dict[str, Any] = {}
     self.extra_context = extra_context
Exemple #28
0
class JinjaProcessor(ComputableInputProcessor):

    def __init__(self):
        self.env = SandboxedEnvironment(trim_blocks=True,
                                        lstrip_blocks=True)
        self._globals = {'make_arr': make_arr}

    def run(self, resource_name, computable_type, funct, data):
        t = self.env.from_string(funct, globals=self._globals)
        if computable_type == ComputablePassedTypes.full.name:
            arr = make_arr(data)
            my_inputs = arr[resource_name]
        else:
            my_inputs = {}
            arr = {}
        return t.render(resource_name=resource_name,
                        D=data,
                        R=arr,
                        **my_inputs).strip()
Exemple #29
0
    def init(self, builder, theme=None, dirs=None):
        # type: (Builder, Theme, List[unicode]) -> None
        # create a chain of paths to search
        if theme:
            # the theme's own dir and its bases' dirs
            pathchain = theme.get_theme_dirs()
            # the loader dirs: pathchain + the parent directories for all themes
            loaderchain = pathchain + [path.join(p, '..') for p in pathchain]
        elif dirs:
            pathchain = list(dirs)
            loaderchain = list(dirs)
        else:
            pathchain = []
            loaderchain = []

        # prepend explicit template paths
        self.templatepathlen = len(builder.config.templates_path)
        if builder.config.templates_path:
            cfg_templates_path = [path.join(builder.confdir, tp)
                                  for tp in builder.config.templates_path]
            pathchain[0:0] = cfg_templates_path
            loaderchain[0:0] = cfg_templates_path

        # store it for use in newest_template_mtime
        self.pathchain = pathchain

        # make the paths into loaders
        self.loaders = [SphinxFileSystemLoader(x) for x in loaderchain]

        use_i18n = builder.app.translator is not None
        extensions = use_i18n and ['jinja2.ext.i18n'] or []
        self.environment = SandboxedEnvironment(loader=self,
                                                extensions=extensions)
        self.environment.filters['tobool'] = _tobool
        self.environment.filters['toint'] = _toint
        self.environment.filters['todim'] = _todim
        self.environment.filters['slice_index'] = _slice_index
        self.environment.globals['debug'] = contextfunction(pformat)
        self.environment.globals['warning'] = warning
        self.environment.globals['accesskey'] = contextfunction(accesskey)
        self.environment.globals['idgen'] = idgen
        if use_i18n:
            self.environment.install_gettext_translations(builder.app.translator)  # type: ignore  # NOQA
 def __init__(self, database=None, query=None, table=None, **kwargs):
     self.database = database
     self.query = query
     self.schema = None
     if query and query.schema:
         self.schema = query.schema
     elif table:
         self.schema = table.schema
     self.context = {
         'url_param': url_param,
         'current_user_id': current_user_id,
         'current_username': current_username,
         'form_data': {},
     }
     self.context.update(kwargs)
     self.context.update(BASE_CONTEXT)
     if self.engine:
         self.context[self.engine] = self
     self.env = SandboxedEnvironment()
Exemple #31
0
 def test_basic_format_safety(self):
     env = SandboxedEnvironment()
     t = env.from_string('{{ "a{x.__class__}b".format_map({"x":42}) }}')
     assert t.render() == "ab"
Exemple #32
0
 def test_safe_format_all_okay(self):
     env = SandboxedEnvironment()
     t = env.from_string(
         '{{ ("a{x.foo}b{y}"|safe).format_map({"x":{"foo": 42}, "y":"<foo>"}) }}'
     )
     assert t.render() == "a42b&lt;foo&gt;"
Exemple #33
0
 def test_basic_format_all_okay(self):
     env = SandboxedEnvironment()
     t = env.from_string('{{ "a{x.foo}b".format_map({"x":{"foo": 42}}) }}')
     assert t.render() == "a42b"
Exemple #34
0
def create_package_file(root, master_package, subroot, py_files, opts, subs,
                        is_namespace):
    # type: (unicode, unicode, unicode, List[unicode], Any, List[unicode], bool) -> None
    """Build the text of the file and write the file."""

    use_templates = False
    if opts.templates:
        use_templates = True
        template_loader = FileSystemLoader(opts.templates)
        template_env = SandboxedEnvironment(loader=template_loader)

    fullname = makename(master_package, subroot)

    text = format_heading(
        1, ('%s package' if not is_namespace else "%s namespace") % fullname)

    if opts.modulefirst and not is_namespace:
        text += format_directive(subroot, master_package)
        text += '\n'

    # build a list of directories that are szvpackages (contain an INITPY file)
    subs = [sub for sub in subs if path.isfile(path.join(root, sub, INITPY))]
    # if there are some package directories, add a TOC for theses subpackages
    if subs:
        text += format_heading(2, 'Subpackages')
        text += '.. toctree::\n\n'
        for sub in subs:
            text += '    %s.%s\n' % (makename(master_package, subroot), sub)
        text += '\n'

    submods = [
        path.splitext(sub)[0] for sub in py_files
        if not shall_skip(path.join(root, sub), opts) and sub != INITPY
    ]

    if use_templates:
        try:
            package_ns = _get_mod_ns(name=subroot,
                                     fullname=fullname,
                                     includeprivate=opts.includeprivate)
            package_ns['subpackages'] = subs
            package_ns['submodules'] = submods
        except ImportError as e:
            _warn('failed to import %r: %s' % (fullname, e))

    if submods:
        text += format_heading(2, 'Submodules')
        if opts.separatemodules:
            text += '.. toctree::\n\n'
            for submod in submods:
                modfile = makename(master_package, makename(subroot, submod))
                text += '   %s\n' % modfile

                # generate separate file for this module
                if not opts.noheadings:
                    filetext = format_heading(1, '%s module' % modfile)
                else:
                    filetext = ''
                filetext += format_directive(makename(subroot, submod),
                                             master_package)
                if use_templates:
                    try:
                        mod_ns = _get_mod_ns(
                            name=submod,
                            fullname=modfile,
                            includeprivate=opts.includeprivate)
                        template = template_env.get_template('module.rst')
                        filetext = template.render(**mod_ns)
                    except ImportError as e:
                        _warn('failed to import %r: %s' % (modfile, e))
                write_file(modfile, filetext, opts)
        else:
            for submod in submods:
                modfile = makename(master_package, makename(subroot, submod))
                if not opts.noheadings:
                    text += format_heading(2, '%s module' % modfile)
                text += format_directive(makename(subroot, submod),
                                         master_package)
                text += '\n'
        text += '\n'

    if use_templates:
        template = template_env.get_template('package.rst')
        text = template.render(**package_ns)
    else:
        if not opts.modulefirst and not is_namespace:
            text += format_heading(2, 'Module contents')
            text += format_directive(subroot, master_package)

    write_file(makename(master_package, subroot), text, opts)
def generate_autosummary_docs(sources,
                              output_dir=None,
                              suffix='.rst',
                              warn=_simple_warn,
                              info=_simple_info,
                              base_path=None,
                              builder=None,
                              template_dir=None):

    showed_sources = list(sorted(sources))
    if len(showed_sources) > 20:
        showed_sources = showed_sources[:10] + ['...'] + showed_sources[-10:]
    info('[autosummary] generating autosummary for: %s' %
         ', '.join(showed_sources))

    if output_dir:
        info('[autosummary] writing to %s' % output_dir)

    if base_path is not None:
        sources = [os.path.join(base_path, filename) for filename in sources]

    # create our own templating environment
    template_dirs = [
        os.path.join(package_dir, 'ext', 'autosummary', 'templates')
    ]
    if builder is not None:
        # allow the user to override the templates
        template_loader = BuiltinTemplateLoader()
        template_loader.init(builder, dirs=template_dirs)
    else:
        if template_dir:
            template_dirs.insert(0, template_dir)
        template_loader = FileSystemLoader(template_dirs)
    template_env = SandboxedEnvironment(loader=template_loader)

    # read
    items = find_autosummary_in_files(sources)

    # keep track of new files
    new_files = []

    # write
    for name, path, template_name in sorted(set(items), key=str):
        if path is None:
            # The corresponding autosummary:: directive did not have
            # a :toctree: option
            continue

        path = output_dir or os.path.abspath(path)
        ensuredir(path)

        try:
            name, obj, parent, mod_name = import_by_name(name)
        except ImportError as e:
            warn('[autosummary] failed to import %r: %s' % (name, e))
            continue

        fn = os.path.join(path, name + suffix)

        # skip it if it exists
        if os.path.isfile(fn):
            continue

        new_files.append(fn)

        with open(fn, 'w') as f:
            doc = get_documenter(obj, parent)

            if template_name is not None:
                template = template_env.get_template(template_name)
            else:
                try:
                    template = template_env.get_template('autosummary/%s.rst' %
                                                         doc.objtype)
                except TemplateNotFound:
                    template = template_env.get_template(
                        'autosummary/base.rst')

            # Patched get_members according to
            # http://stackoverflow.com/questions/25405110/sphinx-autosummary-with-toctree-also-lists-imported-members/25460763#25460763
            def get_members(obj, typ, include_public=[], imported=False):
                items = []
                for name in dir(obj):
                    try:
                        obj_name = safe_getattr(obj, name)
                        documenter = get_documenter(obj_name, obj)
                    except AttributeError:
                        continue
                    if documenter.objtype == typ:
                        try:
                            cond = (imported
                                    or obj_name.__module__ == obj.__name__)
                        except AttributeError:
                            cond = True
                        if cond:
                            items.append(name)
                public = [
                    x for x in items
                    if x in include_public or not x.startswith('_')
                ]
                return public, items

            ns = {}

            if doc.objtype == 'module':
                ns['members'] = dir(obj)
                ns['functions'], ns['all_functions'] = \
                    get_members(obj, 'function')
                ns['classes'], ns['all_classes'] = \
                    get_members(obj, 'class')
                ns['exceptions'], ns['all_exceptions'] = \
                    get_members(obj, 'exception')
            elif doc.objtype == 'class':
                ns['members'] = dir(obj)
                ns['methods'], ns['all_methods'] = \
                    get_members(obj, 'method', ['__init__'])
                ns['attributes'], ns['all_attributes'] = \
                    get_members(obj, 'attribute')

            parts = name.split('.')
            if doc.objtype in ('method', 'attribute'):
                mod_name = '.'.join(parts[:-2])
                cls_name = parts[-2]
                obj_name = '.'.join(parts[-2:])
                ns['class'] = cls_name
            else:
                mod_name, obj_name = '.'.join(parts[:-1]), parts[-1]

            ns['fullname'] = name
            ns['module'] = mod_name
            ns['objname'] = obj_name
            ns['name'] = parts[-1]

            ns['objtype'] = doc.objtype
            ns['underline'] = len(name) * '='

            rendered = template.render(**ns)
            f.write(rendered)

    # descend recursively to new files
    if new_files:
        generate_autosummary_docs(new_files,
                                  output_dir=output_dir,
                                  suffix=suffix,
                                  warn=warn,
                                  info=info,
                                  base_path=base_path,
                                  builder=builder,
                                  template_dir=template_dir)
class BQPipeline(object):
    """
    BigQuery Python SDK Client Wrapper
    Provides methods for running queries, copying and deleting tables.
    Supports Jinja2 templated SQL and enables default project and dataset to
    be set for an entire pipeline.
    """

    def __init__(self,
                 job_name,
                 query_project=None,
                 location='US',
                 default_project=None,
                 default_dataset=None,
                 json_credentials_path=None):
        """
        :param job_name: used as job name prefix
        :param query_project: project used to submit queries
        :param location: BigQuery defaults to 'US'
        :param default_project: project to use when tablespec does not specify
            project
        :param default_dataset: dataset to use when tablespec does not specify
            dataset, if default_project is also set
        :param json_credentials_path: (optional) path to service account JSON
            credentials file
        """
        self.job_id_prefix = job_name + '-'
        self.query_project = query_project
        self.location = location
        if default_project is None and query_project is not None:
            self.default_project = query_project
        else:
            self.default_project = default_project
        self.json_credentials_path = json_credentials_path
        self.default_dataset = default_dataset
        self.bq = None
        self.jinja2 = SandboxedEnvironment()

    def get_client(self):
        """
        Initializes bigquery.Client
        :return bigquery.Client
        """
        if self.bq is None:
            if self.json_credentials_path is not None:
                self.bq = bigquery.Client.from_service_account_json(self.json_credentials_path)
                if self.query_project is not None:
                    self.bq.project = self.query_project
                if self.location is not None:
                    self.bq._location = self.location
            else:
                self.bq = bigquery.Client(project=self.query_project, location=self.location)
        return self.bq

    def resolve_table_spec(self, dest):
        """
        Resolves a full TableSpec from a partial TableSpec by adding default
        project and dataset.
        :param dest: TableSpec string or partial TableSpec string
        :return str TableSpec
        """
        table_id = dest
        if table_id is not None:
            parts = table_id.split('.')
            if len(parts) == 2 and self.default_project is not None:
                table_id = self.default_project + '.' + dest
            elif len(parts) == 1 and \
                self.default_project is not None \
                and self.default_dataset is not None:
                table_id = self.default_project + '.' + self.default_dataset + '.' + dest
        return table_id

    def resolve_dataset_spec(self, dataset):
        """
        Resolves a full DatasetSpec from a partial DatasetSpec by adding default
        project.
        :param dest: DatasetSpec string or partial DatasetSpec string
        :return str DatasetSpec
        """
        dataset_id = dataset
        if dataset_id is not None:
            parts = dataset_id.split('.')
            if len(parts) == 1 and \
                self.default_project is not None:
                dataset_id = self.default_project + '.' + dataset
        return dataset_id

    def create_dataset(self, dataset, exists_ok=False):
        """
        Creates a BigQuery Dataset from a full or partial dataset spec.
        :param dataset: DatasetSpec string or partial DatasetSpec string
        """
        return self.bq.create_dataset(self.resolve_dataset_spec(dataset),
                                      exists_ok=exists_ok)

    def create_job_config(self, batch=True, dest=None, create=True,
                          overwrite=True, append=False):
        """
        Creates a QueryJobConfig
        :param batch: use QueryPriority.BATCH if true
        :param dest: tablespec of destination table
        :param create: if False, destination table must already exist
        :param overwrite: if False, destination table must not exist
        :param append: if True, destination table will be appended to
        :return: bigquery.QueryJobConfig
        """
        if create:
            create_disp = bigquery.job.CreateDisposition.CREATE_IF_NEEDED
        else:
            create_disp = bigquery.job.CreateDisposition.CREATE_NEVER

        if overwrite:
            write_disp = bigquery.job.WriteDisposition.WRITE_TRUNCATE
        elif append:
            write_disp = bigquery.job.WriteDisposition.WRITE_APPEND
        else:
            write_disp = bigquery.job.WriteDisposition.WRITE_EMPTY

        if batch:
            priority = bigquery.QueryPriority.BATCH
        else:
            priority = bigquery.QueryPriority.INTERACTIVE

        if dest is not None:
            dest_tableref = to_tableref(self.resolve_table_spec(dest))
        else:
            dest_tableref = None

        if self.default_project is not None \
            and self.default_dataset is not None:
            ds = bigquery.dataset.DatasetReference(
                project=self.default_project,
                dataset_id=self.default_dataset
            )
        else:
            ds = None

        return bigquery.QueryJobConfig(priority=priority,
                                       default_dataset=ds,
                                       destination=dest_tableref,
                                       create_disposition=create_disp,
                                       write_disposition=write_disp)

    def run_query(self, path, batch=True, wait=True, create=True,
                  overwrite=True, append=False, timeout=20*60, **kwargs):
        """
        Executes a SQL query from a Jinja2 template file
        :param path: path to sql file or tuple of (path to sql file, destination tablespec)
        :param batch: run query with batch priority
        :param wait: wait for job to complete before returning
        :param create: if False, destination table must already exist
        :param overwrite: if False, destination table must not exist
        :param append: if True, destination table must exist
        :param timeout: time in seconds to wait for job to complete
        :param kwargs: replacements for Jinja2 template
        :return: bigquery.job.QueryJob
        """
        dest = None
        sql_path = path
        if type(path) == tuple:
            sql_path = path[0]
            dest = self.resolve_table_spec(path[1])

        template_str = read_sql(sql_path)
        template = self.jinja2.from_string(template_str)
        query = template.render(**kwargs)
        client = self.get_client()
        job = client.query(query,
                           job_config=self.create_job_config(batch, dest, create, overwrite, append),
                           job_id_prefix=self.job_id_prefix)
        LOGGER.info('Executing query %s %s', sql_path, job.job_id)
        if wait:
            job.result(timeout=timeout)  # wait for job to complete
            job = client.get_job(job.job_id)
            LOGGER.info('Finished query %s %s', sql_path, job.job_id)
        return job

    def run_queries(self, query_paths, batch=True, wait=True, create=True,
                    overwrite=True, append=False timeout=20*60, **kwargs):
        """
        :param query_paths: List[Union[str,Tuple[str,str]]] path to sql file or
               tuple of (path, destination tablespec)
        :param batch: run query with batch priority
        :param wait: wait for job to complete before returning
        :param create: if False, destination table must already exist
        :param overwrite: if False, destination table must not exist
        :param timeout: time in seconds to wait for job to complete
        :param kwargs: replacements for Jinja2 template
        """
        for path in query_paths:
            self.run_query(path, batch=batch, wait=wait, create=create,
                           overwrite=overwrite, append=append, timeout=timeout, **kwargs)
Exemple #37
0
class Fetcher(object):
    def __init__(self, download_size_limit=config.download_size_limit):
        if pycurl:
            httpclient.AsyncHTTPClient.configure(
                'tornado.curl_httpclient.CurlAsyncHTTPClient')
        self.client = httpclient.AsyncHTTPClient()
        self.download_size_limit = download_size_limit
        self.jinja_env = Environment()
        self.jinja_env.globals = utils.jinja_globals
        self.jinja_env.globals.update(utils.jinja_inner_globals)
        self.jinja_env.filters.update(utils.jinja_globals)

    def render(self, request, env, session=[]):
        request = dict(request)
        if isinstance(session, cookie_utils.CookieSession):
            _cookies = session
        else:
            _cookies = cookie_utils.CookieSession()
            _cookies.from_json(session)

        def _render(obj, key):
            if not obj.get(key):
                return
            try:
                obj[key] = self.jinja_env.from_string(obj[key]).render(
                    _cookies=_cookies, **env)
                return True
            except Exception as e:
                log_error = 'The error occurred when rendering template {}: {} \\r\\n {}'.format(
                    key, obj[key], repr(e))
                raise httpclient.HTTPError(500, log_error)

        _render(request, 'method')
        _render(request, 'url')
        for header in request['headers']:
            _render(header, 'name')
            if pycurl and header['name'] and header['name'][0] == ":":
                header['name'] = header['name'][1:]
            _render(header, 'value')
            header['value'] = utils.quote_chinese(header['value'])
        for cookie in request['cookies']:
            _render(cookie, 'name')
            _render(cookie, 'value')
            cookie['value'] = utils.quote_chinese(cookie['value'])
        _render(request, 'data')
        return request

    def build_request(self,
                      obj,
                      download_size_limit=config.download_size_limit,
                      connect_timeout=config.connect_timeout,
                      request_timeout=config.request_timeout,
                      proxy={},
                      CURL_ENCODING=True,
                      CURL_CONTENT_LENGTH=True):
        env = obj['env']
        rule = obj['rule']
        request = self.render(obj['request'], env['variables'], env['session'])

        method = request['method']
        url = request['url']
        if str(url).startswith('api://'):
            url = str(url).replace('api:/', local_host, 1)

        headers = dict((e['name'], e['value']) for e in request['headers'])
        cookies = dict((e['name'], e['value']) for e in request['cookies'])
        data = request.get('data')
        if method == 'GET':
            data = None
        elif method == 'POST':
            data = request.get('data', '')

        def set_curl_callback(curl):
            def size_limit(download_size, downloaded, upload_size, uploaded):
                if download_size and download_size > download_size_limit:
                    return 1
                if downloaded > download_size_limit:
                    return 1
                return 0

            if pycurl:
                if not CURL_ENCODING:
                    try:
                        curl.unsetopt(pycurl.ENCODING)
                    except:
                        pass
                if not CURL_CONTENT_LENGTH:
                    try:
                        if headers.get('content-length'):
                            headers.pop('content-length')
                            curl.setopt(pycurl.HTTPHEADER, [
                                "%s: %s" % (native_str(k), native_str(v))
                                for k, v in HTTPHeaders(headers).get_all()
                            ])
                    except:
                        pass
                curl.setopt(pycurl.NOPROGRESS, 0)
                curl.setopt(pycurl.PROGRESSFUNCTION, size_limit)
                curl.setopt(pycurl.CONNECTTIMEOUT, int(connect_timeout))
                curl.setopt(pycurl.TIMEOUT, int(request_timeout))
                if proxy:
                    if proxy.get('scheme', '') == 'socks5':
                        curl.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5)
                    elif proxy.get('scheme', '') == 'socks5h':
                        curl.setopt(pycurl.PROXYTYPE,
                                    pycurl.PROXYTYPE_SOCKS5_HOSTNAME)
            return curl

        req = httpclient.HTTPRequest(url=url,
                                     method=method,
                                     headers=headers,
                                     body=data,
                                     follow_redirects=False,
                                     max_redirects=0,
                                     decompress_response=True,
                                     allow_nonstandard_methods=True,
                                     allow_ipv6=True,
                                     prepare_curl_callback=set_curl_callback,
                                     validate_cert=False,
                                     connect_timeout=connect_timeout,
                                     request_timeout=request_timeout)

        session = cookie_utils.CookieSession()
        if req.headers.get('cookie'):
            req.headers['Cookie'] = req.headers.pop("cookie")
        if req.headers.get('Cookie'):
            session.update(dict(x.strip().split('=', 1) \
                    for x in req.headers['Cookie'].split(';') \
                    if '=' in x))
        if isinstance(env['session'], cookie_utils.CookieSession):
            session.from_json(env['session'].to_json())
        else:
            session.from_json(env['session'])
        session.update(cookies)
        cookie_header = session.get_cookie_header(req)
        if cookie_header:
            req.headers['Cookie'] = cookie_header

        if proxy and pycurl:
            if not config.proxy_direct_mode:
                for key in proxy:
                    if key != 'scheme':
                        setattr(req, 'proxy_%s' % key, proxy[key])
            elif config.proxy_direct_mode == 'regexp':
                if not re.compile(config.proxy_direct).search(req.url):
                    for key in proxy:
                        if key != 'scheme':
                            setattr(req, 'proxy_%s' % key, proxy[key])
            elif config.proxy_direct_mode == 'url':
                if utils.urlmatch(
                        req.url) not in config.proxy_direct.split('|'):
                    for key in proxy:
                        if key != 'scheme':
                            setattr(req, 'proxy_%s' % key, proxy[key])

        env['session'] = session

        return req, rule, env

    @staticmethod
    def response2har(response):
        request = response.request

        def build_headers(headers):
            result = []
            if headers:
                for k, v in headers.get_all():
                    result.append(dict(name=k, value=v))
            return result

        def build_request(request):
            url = urlparse.urlparse(request.url)
            ret = dict(
                    method = request.method,
                    url = request.url,
                    httpVersion = 'HTTP/1.1',
                    headers = build_headers(request.headers),
                    queryString = [
                        {'name': n, 'value': v} for n, v in\
                                urlparse.parse_qsl(url.query)],
                    cookies = [
                        {'name': n, 'value': v} for n, v in \
                                urlparse.parse_qsl(request.headers.get('cookie', ''))],
                    headersSize = -1,
                    bodySize = len(request.body) if request.body else 0,
                    )
            if request.body:
                if isinstance(request.body, bytes):
                    request._body = request.body.decode()
                ret['postData'] = dict(
                    mimeType=request.headers.get('content-type'),
                    text=request.body,
                )
                if ret['postData'][
                        'mimeType'] and 'application/x-www-form-urlencoded' in ret[
                            'postData']['mimeType']:
                    ret['postData']['params'] = [
                            {'name': n, 'value': v} for n, v in \
                                urlparse.parse_qsl(request.body)]
                    try:
                        _ = json.dumps(ret['postData']['params'])
                    except UnicodeDecodeError:
                        logger.error('params encoding error')
                        del ret['postData']['params']

            return ret

        def build_response(response):
            cookies = cookie_utils.CookieSession()
            cookies.extract_cookies_to_jar(response.request, response)

            encoding = utils.find_encoding(response.body, response.headers)
            if not response.headers.get('content-type'):
                response.headers['content-type'] = 'text/plain'
            if 'charset=' not in response.headers.get('content-type', ''):
                response.headers['content-type'] += '; charset=' + encoding

            return dict(
                status=response.code,
                statusText=response.reason,
                headers=build_headers(response.headers),
                cookies=cookies.to_json(),
                content=dict(
                    size=len(response.body),
                    mimeType=response.headers.get('content-type'),
                    text=base64.b64encode(response.body).decode('ascii'),
                    decoded=utils.decode(response.body, response.headers),
                ),
                redirectURL=response.headers.get('Location'),
                headersSize=-1,
                bodySize=-1,
            )

        entry = dict(
            startedDateTime=datetime.now().isoformat(),
            time=response.request_time,
            request=build_request(request),
            response=build_response(response),
            cache={},
            timings=response.time_info,
            connections="0",
            pageref="page_0",
        )
        if response.body and 'image' in response.headers.get('content-type'):
            entry['response']['content']['decoded'] = base64.b64encode(
                response.body).decode('ascii')
        return entry

    @staticmethod
    def run_rule(response, rule, env):
        success = True
        msg = ''

        content = [
            -1,
        ]

        def getdata(_from):
            if _from == 'content':
                if content[0] == -1:
                    if response.headers and isinstance(response.headers,
                                                       HTTPHeaders):
                        content[0] = utils.decode(response.body,
                                                  headers=response.headers)
                    else:
                        content[0] = utils.decode(response.body)
                if ('content-type' in response.headers):
                    if 'image' in response.headers.get('content-type'):
                        return base64.b64encode(response.body).decode('utf8')
                return content[0]
            elif _from == 'status':
                return '%s' % response.code
            elif _from.startswith('header-'):
                _from = _from[7:]
                return response.headers.get(_from, '')
            elif _from == 'header':
                try:
                    return str(response.headers._dict).replace('\'', '')
                except Exception as e:
                    traceback.print_exc()
                try:
                    return json.dumps(response.headers._dict)
                except Exception as e:
                    traceback.print_exc()
            else:
                return ''

        for r in rule.get('success_asserts') or '':
            if re.search(r['re'], getdata(r['from'])):
                msg = ''
                break
            else:
                msg = 'Fail assert: %s from success_asserts' % json.dumps(
                    r, ensure_ascii=False)
        else:
            if rule.get('success_asserts'):
                success = False

        for r in rule.get('failed_asserts') or '':
            if re.search(r['re'], getdata(r['from'])):
                success = False
                msg = 'Fail assert: %s from failed_asserts' % json.dumps(
                    r, ensure_ascii=False)
                break

        if not success and msg and (response.error or response.reason):
            msg += ', \\r\\nResponse Error : %s' % str(response.error
                                                       or response.reason)

        for r in rule.get('extract_variables') or '':
            pattern = r['re']
            flags = 0
            find_all = False

            re_m = re.match(r"^/(.*?)/([gimsu]*)$", r['re'])
            if re_m:
                pattern = re_m.group(1)
                if 'g' in re_m.group(2):
                    find_all = True  # 全局匹配
                if 'i' in re_m.group(2):
                    flags |= re.I  # 使匹配对大小写不敏感
                if 'm' in re_m.group(2):
                    flags |= re.M  # 多行匹配,影响 ^ 和 $
                if 's' in re_m.group(2):
                    flags |= re.S  # 使 . 匹配包括换行在内的所有字符
                if 'u' in re_m.group(2):
                    flags |= re.U  # 根据Unicode字符集解析字符。这个标志影响 \w, \W, \b, \B.
                if 'x' in re_m.group(2):
                    pass  # flags |= re.X # 该标志通过给予你更灵活的格式以便你将正则表达式写得更易于理解。暂不启用

            if find_all:
                try:
                    env['variables'][r['name']] = re.compile(
                        pattern, flags).findall(getdata(r['from']))
                except Exception as e:
                    env['variables'][r['name']] = str(e)
            else:
                try:
                    m = re.compile(pattern, flags).search(getdata(r['from']))
                    if m:
                        if m.groups():
                            m = m.groups()[0]
                        else:
                            m = m.group(0)
                        env['variables'][r['name']] = m
                except Exception as e:
                    env['variables'][r['name']] = str(e)
        return success, msg

    @staticmethod
    def tpl2har(tpl):
        def build_request(en):
            url = urlparse.urlparse(en['request']['url'])
            request = dict(
                    method = en['request']['method'],
                    url = en['request']['url'],
                    httpVersion = 'HTTP/1.1',
                    headers = [
                        {'name': x['name'], 'value': x['value'], 'checked': True} for x in\
                                en['request'].get('headers', [])],
                    queryString = [
                        {'name': n, 'value': v} for n, v in\
                                urlparse.parse_qsl(url.query)],
                    cookies = [
                        {'name': x['name'], 'value': x['value'], 'checked': True} for x in\
                                en['request'].get('cookies', [])],
                    headersSize = -1,
                    bodySize = len(en['request'].get('data')) if en['request'].get('data') else 0,


                    )
            if en['request'].get('data'):
                request['postData'] = dict(
                    mimeType=en['request'].get('mimeType'),
                    text=en['request'].get('data'),
                )
                if request['postData'][
                        'mimeType'] and 'application/x-www-form-urlencoded' in request[
                            'postData']['mimeType']:
                    params = [{'name': x[0], 'value': x[1]} \
                        for x in urlparse.parse_qsl(en['request']['data'], True)]
                    request['postData']['params'] = params
                    try:
                        _ = json.dumps(request['postData']['params'])
                    except UnicodeDecodeError:
                        logger.error('params encoding error')
                        del request['postData']['params']
            return request

        entries = []
        for en in tpl:
            entry = dict(
                checked=True,
                startedDateTime=datetime.now().isoformat(),
                time=1,
                request=build_request(en),
                response={},
                cache={},
                timings={},
                connections="0",
                pageref="page_0",
                success_asserts=en.get('rule', {}).get('success_asserts', []),
                failed_asserts=en.get('rule', {}).get('failed_asserts', []),
                extract_variables=en.get('rule',
                                         {}).get('extract_variables', []),
            )
            entries.append(entry)
        return dict(log=dict(creator=dict(name='binux', version='qiandao'),
                             entries=entries,
                             pages=[],
                             version='1.2'))

    async def build_response(self,
                             obj,
                             proxy={},
                             CURL_ENCODING=config.curl_encoding,
                             CURL_CONTENT_LENGTH=config.curl_length,
                             EMPTY_RETRY=config.empty_retry):
        try:
            req, rule, env = self.build_request(
                obj,
                download_size_limit=self.download_size_limit,
                proxy=proxy,
                CURL_ENCODING=CURL_ENCODING,
                CURL_CONTENT_LENGTH=CURL_CONTENT_LENGTH)
            response = await gen.convert_yielded(self.client.fetch(req))
        except httpclient.HTTPError as e:
            try:
                if config.allow_retry and pycurl:
                    if e.__dict__.get('errno', '') == 61:
                        logger.warning(
                            '{} {} [Warning] {} -> Try to retry!'.format(
                                req.method, req.url, e))
                        req, rule, env = self.build_request(
                            obj,
                            download_size_limit=self.download_size_limit,
                            proxy=proxy,
                            CURL_ENCODING=False,
                            CURL_CONTENT_LENGTH=CURL_CONTENT_LENGTH)
                        e.response = await gen.convert_yielded(
                            self.client.fetch(req))
                    elif e.code == 400 and e.message == 'Bad Request' and req and req.headers.get(
                            'content-length'):
                        logger.warning(
                            '{} {} [Warning] {} -> Try to retry!'.format(
                                req.method, req.url, e))
                        req, rule, env = self.build_request(
                            obj,
                            download_size_limit=self.download_size_limit,
                            proxy=proxy,
                            CURL_ENCODING=CURL_ENCODING,
                            CURL_CONTENT_LENGTH=False)
                        e.response = await gen.convert_yielded(
                            self.client.fetch(req))
                    elif e.code not in NOT_RETYR_CODE or (EMPTY_RETRY
                                                          and not e.response):
                        try:
                            logger.warning(
                                '{} {} [Warning] {} -> Try to retry!'.format(
                                    req.method, req.url, e))
                            client = simple_httpclient.SimpleAsyncHTTPClient()
                            e.response = await gen.convert_yielded(
                                client.fetch(req))
                        except Exception:
                            logger.error(
                                e.message.replace('\\r\\n', '\r\n')
                                or e.response.replace('\\r\\n', '\r\n')
                                or Exception)
                    else:
                        try:
                            logger.warning('{} {} [Warning] {}'.format(
                                req.method, req.url, e))
                        except Exception:
                            logger.error(
                                e.message.replace('\\r\\n', '\r\n')
                                or e.response.replace('\\r\\n', '\r\n')
                                or Exception)
                else:
                    logger.warning('{} {} [Warning] {}'.format(
                        req.method, req.url, e))
            finally:
                if 'req' not in locals().keys():
                    tmp = {'env': obj['env'], 'rule': obj['rule']}
                    tmp['request'] = {
                        'method': 'GET',
                        'url': 'http://127.0.0.1:8923/util/unicode?content=',
                        'headers': [],
                        'cookies': []
                    }
                    req, rule, env = self.build_request(tmp)
                    e.response = httpclient.HTTPResponse(request=req,
                                                         code=e.code,
                                                         reason=e.message,
                                                         buffer=BytesIO(
                                                             str(e).encode()))
                if not e.response:
                    traceback.print_exc()
                    e.response = httpclient.HTTPResponse(request=req,
                                                         code=e.code,
                                                         reason=e.message,
                                                         buffer=BytesIO(
                                                             str(e).encode()))
                return rule, env, e.response
        return rule, env, response

    async def fetch(self,
                    obj,
                    proxy={},
                    CURL_ENCODING=config.curl_encoding,
                    CURL_CONTENT_LENGTH=config.curl_length,
                    EMPTY_RETRY=config.empty_retry):
        """
        obj = {
          request: {
            method: 
            url: 
            headers: [{name: , value: }, ]
            cookies: [{name: , value: }, ]
            data:
          }
          rule: {
            success_asserts: [{re: , from: 'content'}, ]
            failed_asserts: [{re: , from: 'content'}, ]
            extract_variables: [{name: , re:, from: 'content'}, ]
          }
          env: {
            variables: {
              name: value
            }
            session: [
            ]
          }
        }
        """

        rule, env, response = await gen.convert_yielded(
            self.build_response(obj, proxy, CURL_ENCODING, CURL_CONTENT_LENGTH,
                                EMPTY_RETRY))

        env['session'].extract_cookies_to_jar(response.request, response)
        success, msg = self.run_rule(response, rule, env)

        return {
            'success': success,
            'response': response,
            'env': env,
            'msg': msg,
        }

    FOR_START = re.compile('{%\s*for\s+(\w+)\s+in\s+(\w+)\s*%}')
    FOR_END = re.compile('{%\s*endfor\s*%}')

    def parse(self, tpl):
        stmt_stack = []

        for i, entry in enumerate(tpl):
            if 'type' in entry:
                yield entry
            elif self.FOR_START.match(entry['request']['url']):
                m = self.FOR_START.match(entry['request']['url'])
                stmt_stack.append({
                    'type': 'for',
                    'target': m.group(1),
                    'from': m.group(2),
                    'body': []
                })
            elif self.FOR_END.match(entry['request']['url']):
                if stmt_stack and stmt_stack[-1]['type'] == 'for':
                    entry = stmt_stack.pop()
                    if stmt_stack:
                        stmt_stack[-1]['body'].append(entry)
                    else:
                        yield entry
            elif stmt_stack:
                stmt_stack[-1]['body'].append({
                    'type': 'request',
                    'entry': entry,
                })
            else:
                yield {
                    'type': 'request',
                    'entry': entry,
                }

        while stmt_stack:
            yield stmt_stack.pop()

    async def do_fetch(self,
                       tpl,
                       env,
                       proxies=config.proxies,
                       request_limit=1000):
        """
        do a fetch of hole tpl
        """
        if proxies:
            proxy = random.choice(proxies)
        else:
            proxy = {}

        for i, block in enumerate(self.parse(tpl)):
            if request_limit <= 0:
                raise Exception('request limit')
            elif block['type'] == 'for':
                for each in env['variables'].get(block['from'], []):
                    env['variables'][block['target']] = each
                    env = await gen.convert_yielded(
                        self.do_fetch(block['body'],
                                      env,
                                      proxies=[proxy],
                                      request_limit=request_limit))
            elif block['type'] == 'request':
                entry = block['entry']
                try:
                    request_limit -= 1
                    result = await gen.convert_yielded(
                        self.fetch(dict(
                            request=entry['request'],
                            rule=entry['rule'],
                            env=env,
                        ),
                                   proxy=proxy))
                    env = result['env']
                except Exception as e:
                    if config.debug:
                        logging.exception(e)
                    raise Exception(
                        'Failed at %d/%d request, \\r\\nError: %r, \\r\\nRequest URL: %s'
                        % (i + 1, len(tpl), e, entry['request']['url']))
                if not result['success']:
                    raise Exception(
                        'Failed at %d/%d request, \\r\\n%s, \\r\\nRequest URL: %s'
                        % (i + 1, len(tpl), result['msg'],
                           entry['request']['url']))
        return env
Exemple #38
0
 def test_safe_format_safety(self):
     env = SandboxedEnvironment()
     t = env.from_string(
         '{{ ("a{0.__class__}b{1}"|safe).format(42, "<foo>") }}')
     assert t.render() == "ab&lt;foo&gt;"
Exemple #39
0
    def _compute_msg(self, observation_ids):
        from jinja2.sandbox import SandboxedEnvironment
        mako_template_env = SandboxedEnvironment(
            block_start_string="<%",
            block_end_string="%>",
            variable_start_string="${",
            variable_end_string="}",
            comment_start_string="<%doc>",
            comment_end_string="</%doc>",
            line_statement_prefix="%",
            line_comment_prefix="##",
            trim_blocks=True,  # do not output newline after
            autoescape=True,  # XML/HTML automatic escaping
        )
        mako_template_env.globals.update({
            'str':
            str,
            'datetime':
            datetime,
            'len':
            len,
            'abs':
            abs,
            'min':
            min,
            'max':
            max,
            'sum':
            sum,
            'filter':
            filter,
            'reduce':
            reduce,
            'map':
            map,
            'round':
            round,
            'cmp':
            cmp,
            # dateutil.relativedelta is an old-style class and cannot be
            # instanciated wihtin a jinja2 expression, so a lambda "proxy" is
            # is needed, apparently.
            'relativedelta':
            lambda *a, **kw: relativedelta.relativedelta(*a, **kw),
        })
        mako_safe_env = copy.copy(mako_template_env)
        mako_safe_env.autoescape = False

        result = ''
        for item in observation_ids:
            if item.document_id and item.document_id.code != self.model:
                continue
            template = mako_safe_env.from_string(tools.ustr(item.message))
            variables = {
                'user': self.env.user,
                'ctx': self._context,
                'invoice': self.invoice_id,
            }
            render_result = template.render(variables)
            result += render_result + '\n'
        return result
Exemple #40
0
_logger = logging.getLogger(__name__)


try:
    # We use a jinja2 sandboxed environment to render mako templates.
    # Note that the rendering does not cover all the mako syntax, in particular
    # arbitrary Python statements are not accepted, and not all expressions are
    # allowed: only "public" attributes (not starting with '_') of objects may
    # be accessed.
    # This is done on purpose: it prevents incidental or malicious execution of
    # Python code that may break the security of the server.
    from jinja2.sandbox import SandboxedEnvironment

    mako_template_env = SandboxedEnvironment(
        variable_start_string="${",
        variable_end_string="}",
        line_statement_prefix="%",
        trim_blocks=True,  # do not output newline after blocks
    )
    mako_template_env.globals.update(
        {
            "str": str,
            "datetime": datetime,
            "len": len,
            "abs": abs,
            "min": min,
            "max": max,
            "sum": sum,
            "filter": filter,
            "map": map,
            "round": round,
        }
Exemple #41
0
def generate_autosummary_docs(sources, output_dir=None, suffix='.rst',
                              warn=_simple_warn, info=_simple_info,
                              base_path=None, builder=None, template_dir=None,
                              imported_members=False, app=None):
    # type: (List[unicode], unicode, unicode, Callable, Callable, unicode, Builder, unicode, bool, Any) -> None  # NOQA

    showed_sources = list(sorted(sources))
    if len(showed_sources) > 20:
        showed_sources = showed_sources[:10] + ['...'] + showed_sources[-10:]
    info(__('[autosummary] generating autosummary for: %s') %
         ', '.join(showed_sources))

    if output_dir:
        info(__('[autosummary] writing to %s') % output_dir)

    if base_path is not None:
        sources = [os.path.join(base_path, filename) for filename in sources]

    # create our own templating environment
    template_dirs = None  # type: List[unicode]
    template_dirs = [os.path.join(package_dir, 'ext',
                                  'autosummary', 'templates')]

    template_loader = None  # type: BaseLoader
    if builder is not None:
        # allow the user to override the templates
        template_loader = BuiltinTemplateLoader()
        template_loader.init(builder, dirs=template_dirs)
    else:
        if template_dir:
            template_dirs.insert(0, template_dir)
        template_loader = FileSystemLoader(template_dirs)
    template_env = SandboxedEnvironment(loader=template_loader)
    template_env.filters['underline'] = _underline

    # replace the builtin html filters
    template_env.filters['escape'] = rst_escape
    template_env.filters['e'] = rst_escape

    # read
    items = find_autosummary_in_files(sources)

    # keep track of new files
    new_files = []

    # write
    for name, path, template_name in sorted(set(items), key=str):
        if path is None:
            # The corresponding autosummary:: directive did not have
            # a :toctree: option
            continue

        path = output_dir or os.path.abspath(path)
        ensuredir(path)

        try:
            name, obj, parent, mod_name = import_by_name(name)
        except ImportError as e:
            warn('[autosummary] failed to import %r: %s' % (name, e))
            continue

        fn = os.path.join(path, name + suffix)

        # skip it if it exists
        if os.path.isfile(fn):
            continue

        new_files.append(fn)

        with open(fn, 'w') as f:
            doc = get_documenter(app, obj, parent)

            if template_name is not None:
                template = template_env.get_template(template_name)
            else:
                try:
                    template = template_env.get_template('autosummary/%s.rst'
                                                         % doc.objtype)
                except TemplateNotFound:
                    template = template_env.get_template('autosummary/base.rst')

            def get_members(obj, typ, include_public=[], imported=True):
                # type: (Any, unicode, List[unicode], bool) -> Tuple[List[unicode], List[unicode]]  # NOQA
                items = []  # type: List[unicode]
                for name in dir(obj):
                    try:
                        value = safe_getattr(obj, name)
                    except AttributeError:
                        continue
                    documenter = get_documenter(app, value, obj)
                    if documenter.objtype == typ:
                        if imported or getattr(value, '__module__', None) == obj.__name__:
                            # skip imported members if expected
                            items.append(name)
                public = [x for x in items
                          if x in include_public or not x.startswith('_')]
                return public, items

            ns = {}  # type: Dict[unicode, Any]

            if doc.objtype == 'module':
                ns['members'] = dir(obj)
                ns['functions'], ns['all_functions'] = \
                    get_members(obj, 'function', imported=imported_members)
                ns['classes'], ns['all_classes'] = \
                    get_members(obj, 'class', imported=imported_members)
                ns['exceptions'], ns['all_exceptions'] = \
                    get_members(obj, 'exception', imported=imported_members)
            elif doc.objtype == 'class':
                ns['members'] = dir(obj)
                ns['inherited_members'] = \
                    set(dir(obj)) - set(obj.__dict__.keys())
                ns['methods'], ns['all_methods'] = \
                    get_members(obj, 'method', ['__init__'])
                ns['attributes'], ns['all_attributes'] = \
                    get_members(obj, 'attribute')

            parts = name.split('.')
            if doc.objtype in ('method', 'attribute'):
                mod_name = '.'.join(parts[:-2])
                cls_name = parts[-2]
                obj_name = '.'.join(parts[-2:])
                ns['class'] = cls_name
            else:
                mod_name, obj_name = '.'.join(parts[:-1]), parts[-1]

            ns['fullname'] = name
            ns['module'] = mod_name
            ns['objname'] = obj_name
            ns['name'] = parts[-1]

            ns['objtype'] = doc.objtype
            ns['underline'] = len(name) * '='

            rendered = template.render(**ns)
            f.write(rendered)  # type: ignore

    # descend recursively to new files
    if new_files:
        generate_autosummary_docs(new_files, output_dir=output_dir,
                                  suffix=suffix, warn=warn, info=info,
                                  base_path=base_path, builder=builder,
                                  template_dir=template_dir, app=app)
Exemple #42
0
 def __init__(self, loader=None):
     self.env = SandboxedEnvironment(loader=loader)
     self.env.filters['repr'] = repr
Exemple #43
0
class Render:
    def __init__(
        self,
        container: Container,
        filename: str,
        report: Report,
        *,
        task: Optional[Task] = None,
        job: Optional[Job] = None,
        target_url: Optional[str] = None,
        input_url: Optional[str] = None,
        report_url: Optional[str] = None,
    ):
        self.report = report
        self.container = container
        self.filename = filename
        if not task:
            task = Task.get(report.job_id, report.task_id)
            if not task:
                raise ValueError(f"invalid task {report.task_id}")
        if not job:
            job = Job.get(report.job_id)
            if not job:
                raise ValueError(f"invalid job {report.job_id}")

        self.task_config = task.config
        self.job_config = job.config
        self.env = SandboxedEnvironment()

        self.target_url = target_url
        if not self.target_url:
            setup_container = get_setup_container(task.config)
            if setup_container:
                self.target_url = auth_download_url(
                    setup_container,
                    self.report.executable.replace("setup/", "", 1))

        if report_url:
            self.report_url = report_url
        else:
            self.report_url = auth_download_url(container, filename)

        self.input_url = input_url
        if not self.input_url:
            if self.report.input_blob:
                self.input_url = auth_download_url(
                    self.report.input_blob.container,
                    self.report.input_blob.name)

    def render(self, template: str) -> str:
        return self.env.from_string(template).render({
            "report":
            self.report,
            "task":
            self.task_config,
            "job":
            self.job_config,
            "report_url":
            self.report_url,
            "input_url":
            self.input_url,
            "target_url":
            self.target_url,
            "report_container":
            self.container,
            "report_filename":
            self.filename,
            "repro_cmd":
            "onefuzz --endpoint %s repro create_and_connect %s %s" %
            (get_instance_url(), self.container, self.filename),
        })
Exemple #44
0
def generate_automodsumm_docs(lines,
                              srcfn,
                              suffix='.rst',
                              warn=None,
                              info=None,
                              base_path=None,
                              builder=None,
                              template_dir=None,
                              inherited_members=False):
    """
    This function is adapted from
    `sphinx.ext.autosummary.generate.generate_autosummmary_docs` to
    generate source for the automodsumm directives that should be
    autosummarized. Unlike generate_autosummary_docs, this function is
    called one file at a time.
    """

    from sphinx.jinja2glue import BuiltinTemplateLoader
    from sphinx.ext.autosummary import import_by_name, get_documenter
    from sphinx.ext.autosummary.generate import (_simple_info, _simple_warn)
    from sphinx.util.osutil import ensuredir
    from sphinx.util.inspect import safe_getattr
    from jinja2 import FileSystemLoader, TemplateNotFound
    from jinja2.sandbox import SandboxedEnvironment

    from .utils import find_autosummary_in_lines_for_automodsumm as find_autosummary_in_lines

    if info is None:
        info = _simple_info
    if warn is None:
        warn = _simple_warn

    # info('[automodsumm] generating automodsumm for: ' + srcfn)

    # Create our own templating environment - here we use Astropy's
    # templates rather than the default autosummary templates, in order to
    # allow docstrings to be shown for methods.
    template_dirs = [
        os.path.join(os.path.dirname(__file__), 'templates'),
        os.path.join(base_path, '_templates')
    ]
    if builder is not None:
        # allow the user to override the templates
        template_loader = BuiltinTemplateLoader()
        template_loader.init(builder, dirs=template_dirs)
    else:
        if template_dir:
            template_dirs.insert(0, template_dir)
        template_loader = FileSystemLoader(template_dirs)
    template_env = SandboxedEnvironment(loader=template_loader)

    # read
    # items = find_autosummary_in_files(sources)
    items = find_autosummary_in_lines(lines, filename=srcfn)
    if len(items) > 0:
        msg = '[automodsumm] {1}: found {0} automodsumm entries to generate'
        info(msg.format(len(items), srcfn))


#    gennms = [item[0] for item in items]
#    if len(gennms) > 20:
#        gennms = gennms[:10] + ['...'] + gennms[-10:]
#    info('[automodsumm] generating autosummary for: ' + ', '.join(gennms))

# remove possible duplicates
    items = dict([(item, True) for item in items]).keys()

    # keep track of new files
    new_files = []

    # write
    for name, path, template_name, inherited_mem in sorted(items):

        if path is None:
            # The corresponding autosummary:: directive did not have
            # a :toctree: option
            continue

        path = os.path.abspath(os.path.join(base_path, path))
        ensuredir(path)

        try:
            import_by_name_values = import_by_name(name)
        except ImportError as e:
            warn('[automodsumm] failed to import %r: %s' % (name, e))
            continue

        # if block to accommodate Sphinx's v1.2.2 and v1.2.3 respectively
        if len(import_by_name_values) == 3:
            name, obj, parent = import_by_name_values
        elif len(import_by_name_values) == 4:
            name, obj, parent, module_name = import_by_name_values

        fn = os.path.join(path, name + suffix)

        # skip it if it exists
        if os.path.isfile(fn):
            continue

        new_files.append(fn)

        f = open(fn, 'w')

        try:
            doc = get_documenter(obj, parent)

            if template_name is not None:
                template = template_env.get_template(template_name)
            else:
                tmplstr = 'autosummary_core/%s.rst'
                try:
                    template = template_env.get_template(tmplstr % doc.objtype)
                except TemplateNotFound:
                    template = template_env.get_template(tmplstr % 'base')

            def get_members_mod(obj, typ, include_public=[]):
                """
                typ = None -> all
                """
                items = []
                for name in dir(obj):
                    try:
                        documenter = get_documenter(safe_getattr(obj, name),
                                                    obj)
                    except AttributeError:
                        continue
                    if typ is None or documenter.objtype == typ:
                        items.append(name)
                public = [
                    x for x in items
                    if x in include_public or not x.startswith('_')
                ]
                return public, items

            def get_members_class(obj,
                                  typ,
                                  include_public=[],
                                  include_base=False):
                """
                typ = None -> all
                include_base -> include attrs that are from a base class
                """
                items = []

                # using dir gets all of the attributes, including the elements
                # from the base class, otherwise use __slots__ or __dict__
                if include_base:
                    names = dir(obj)
                else:
                    if hasattr(obj, '__slots__'):
                        names = tuple(getattr(obj, '__slots__'))
                    else:
                        names = getattr(obj, '__dict__').keys()

                for name in names:
                    try:
                        documenter = get_documenter(safe_getattr(obj, name),
                                                    obj)
                    except AttributeError:
                        continue
                    if typ is None or documenter.objtype == typ:
                        items.append(name)
                public = [
                    x for x in items
                    if x in include_public or not x.startswith('_')
                ]
                return public, items

            ns = {}

            if doc.objtype == 'module':
                ns['members'] = get_members_mod(obj, None)
                ns['functions'], ns['all_functions'] = \
                    get_members_mod(obj, 'function')
                ns['classes'], ns['all_classes'] = \
                    get_members_mod(obj, 'class')
                ns['exceptions'], ns['all_exceptions'] = \
                    get_members_mod(obj, 'exception')
            elif doc.objtype == 'class':
                if inherited_mem is not None:
                    # option set in this specifc directive
                    include_base = inherited_mem
                else:
                    # use default value
                    include_base = inherited_members

                api_class_methods = ['__init__', '__call__']
                ns['members'] = get_members_class(obj,
                                                  None,
                                                  include_base=include_base)
                ns['methods'], ns['all_methods'] = \
                    get_members_class(obj, 'method', api_class_methods,
                                      include_base=include_base)
                ns['attributes'], ns['all_attributes'] = \
                    get_members_class(obj, 'attribute',
                                      include_base=include_base)
                ns['methods'].sort()
                ns['attributes'].sort()

            parts = name.split('.')
            if doc.objtype in ('method', 'attribute'):
                mod_name = '.'.join(parts[:-2])
                cls_name = parts[-2]
                obj_name = '.'.join(parts[-2:])
                ns['class'] = cls_name
            else:
                mod_name, obj_name = '.'.join(parts[:-1]), parts[-1]

            ns['fullname'] = name
            ns['module'] = mod_name
            ns['objname'] = obj_name
            ns['name'] = parts[-1]

            ns['objtype'] = doc.objtype
            ns['underline'] = len(obj_name) * '='

            # We now check whether a file for reference footnotes exists for
            # the module being documented. We first check if the
            # current module is a file or a directory, as this will give a
            # different path for the reference file. For example, if
            # documenting astropy.wcs then the reference file is at
            # ../wcs/references.txt, while if we are documenting
            # astropy.config.logging_helper (which is at
            # astropy/config/logging_helper.py) then the reference file is set
            # to ../config/references.txt
            if '.' in mod_name:
                mod_name_dir = mod_name.replace('.', '/').split('/', 1)[1]
            else:
                mod_name_dir = mod_name
            if not os.path.isdir(os.path.join(base_path, mod_name_dir)) \
               and os.path.isdir(os.path.join(base_path, mod_name_dir.rsplit('/', 1)[0])):
                mod_name_dir = mod_name_dir.rsplit('/', 1)[0]

            # We then have to check whether it exists, and if so, we pass it
            # to the template.
            if os.path.exists(
                    os.path.join(base_path, mod_name_dir, 'references.txt')):
                # An important subtlety here is that the path we pass in has
                # to be relative to the file being generated, so we have to
                # figure out the right number of '..'s
                ndirsback = path.replace(base_path, '').count('/')
                ref_file_rel_segments = ['..'] * ndirsback
                ref_file_rel_segments.append(mod_name_dir)
                ref_file_rel_segments.append('references.txt')
                ns['referencefile'] = os.path.join(*ref_file_rel_segments)

            rendered = template.render(**ns)
            f.write(cleanup_whitespace(rendered))
        finally:
            f.close()
Exemple #45
0
from jinja2 import parser as jinja_parse
from jinja2.sandbox import SandboxedEnvironment
from oslo_log import log as logging
import six

from mistral import exceptions as exc
from mistral.expressions.base_expression import Evaluator
from mistral.utils import expression_utils

LOG = logging.getLogger(__name__)

JINJA_REGEXP = '({{(.*)}})'
JINJA_BLOCK_REGEXP = '({%(.*)%})'

_environment = SandboxedEnvironment(undefined=jinja2.StrictUndefined,
                                    trim_blocks=True,
                                    lstrip_blocks=True)

_filters = expression_utils.get_custom_functions()

for name in _filters:
    _environment.filters[name] = _filters[name]


class JinjaEvaluator(Evaluator):
    _env = _environment.overlay()

    @classmethod
    def validate(cls, expression):
        if not isinstance(expression, six.string_types):
            raise exc.JinjaEvaluationException("Unsupported type '%s'." %
class BuiltinTemplateLoader(TemplateBridge, BaseLoader):
    """
    Interfaces the rendering environment of jinja2 for use in Sphinx.
    """

    # TemplateBridge interface

    def init(self, builder: "Builder", theme: Theme = None, dirs: List[str] = None) -> None:
        # create a chain of paths to search
        if theme:
            # the theme's own dir and its bases' dirs
            pathchain = theme.get_theme_dirs()
            # the loader dirs: pathchain + the parent directories for all themes
            loaderchain = pathchain + [path.join(p, '..') for p in pathchain]
        elif dirs:
            pathchain = list(dirs)
            loaderchain = list(dirs)
        else:
            pathchain = []
            loaderchain = []

        # prepend explicit template paths
        self.templatepathlen = len(builder.config.templates_path)
        if builder.config.templates_path:
            cfg_templates_path = [path.join(builder.confdir, tp)
                                  for tp in builder.config.templates_path]
            pathchain[0:0] = cfg_templates_path
            loaderchain[0:0] = cfg_templates_path

        # store it for use in newest_template_mtime
        self.pathchain = pathchain

        # make the paths into loaders
        self.loaders = [SphinxFileSystemLoader(x) for x in loaderchain]

        use_i18n = builder.app.translator is not None
        extensions = ['jinja2.ext.i18n'] if use_i18n else []
        self.environment = SandboxedEnvironment(loader=self,
                                                extensions=extensions)
        self.environment.filters['tobool'] = _tobool
        self.environment.filters['toint'] = _toint
        self.environment.filters['todim'] = _todim
        self.environment.filters['slice_index'] = _slice_index
        self.environment.globals['debug'] = contextfunction(pformat)
        self.environment.globals['warning'] = warning
        self.environment.globals['accesskey'] = contextfunction(accesskey)
        self.environment.globals['idgen'] = idgen
        if use_i18n:
            self.environment.install_gettext_translations(builder.app.translator)  # type: ignore  # NOQA

    def render(self, template: str, context: Dict) -> str:  # type: ignore
        return self.environment.get_template(template).render(context)

    def render_string(self, source: str, context: Dict) -> str:
        return self.environment.from_string(source).render(context)

    def newest_template_mtime(self) -> float:
        return max(mtimes_of_files(self.pathchain, '.html'))

    # Loader interface

    def get_source(self, environment: Environment, template: str) -> Tuple[str, str, Callable]:
        loaders = self.loaders
        # exclamation mark starts search from theme
        if template.startswith('!'):
            loaders = loaders[self.templatepathlen:]
            template = template[1:]
        for loader in loaders:
            try:
                return loader.get_source(environment, template)
            except TemplateNotFound:
                pass
        raise TemplateNotFound(template)
Exemple #47
0
 def test_attr_filter(self, env):
     env = SandboxedEnvironment()
     tmpl = env.from_string('{{ cls|attr("__subclasses__")() }}')
     pytest.raises(SecurityError, tmpl.render, cls=int)
Exemple #48
0
 def __init__(self, loader=None):
     # type: (BaseLoader) -> None
     self.env = SandboxedEnvironment(loader=loader,
                                     extensions=['jinja2.ext.i18n'])
     self.env.filters['repr'] = repr
     self.env.install_gettext_translations(get_translator())  # type: ignore
Exemple #49
0
def test_attr_filter():
    env = SandboxedEnvironment()
    tmpl = env.from_string(
        '{{ 42|attr("__class__")|attr("__subclasses__")() }}')
    assert_raises(SecurityError, tmpl.render)
Exemple #50
0
 def __init__(self, app, **options):
     if 'loader' not in options:
         options['loader'] = app.create_global_jinja_loader()
     SandboxedEnvironment.__init__(self, **options)
     self.app = app
Exemple #51
0
    def add_content(self, more_content, no_docstring=False):
        if self.doc_as_attr:
            super(GWpyClassDocumenter,
                  self).add_content(more_content, no_docstring=no_docstring)
        else:
            name = safe_getattr(self.object, '__name__', None)
            if name:
                # create our own templating environment
                builder = self.env.app.builder or None
                template_dirs = [
                    os.path.join(package_dir, 'ext', 'autosummary',
                                 'templates')
                ]
                if builder is not None:
                    if builder.config.templates_path:
                        template_dirs = (builder.config.templates_path +
                                         template_dirs)
                    # allow the user to override the templates
                    template_loader = BuiltinTemplateLoader()
                    template_loader.init(builder, dirs=template_dirs)
                else:
                    template_loader = FileSystemLoader(template_dirs)
                template_env = SandboxedEnvironment(loader=template_loader)
                template = template_env.get_template('autoclass/class.rst')

                def get_members(obj, typ, include_public=[]):
                    items = []
                    want_all = self.options.inherited_members or \
                               self.options.members is ALL
                    members = zip(*self.get_object_members(want_all)[1])[0]
                    if self.options.exclude_members:
                        members = [
                            m for m in members
                            if m not in self.options.exclude_members
                        ]
                    for name in members:
                        try:
                            documenter = get_documenter(
                                safe_getattr(obj, name), obj)
                        except AttributeError:
                            continue
                        if documenter.objtype == typ:
                            items.append(name)
                    public = [
                        x for x in items
                        if x in include_public or not x.startswith('_')
                    ]
                    return public, items

                ns = {}
                config = self.env.app.config
                npconfig = dict(
                    use_plots=config.numpydoc_use_plots,
                    show_class_members=config.numpydoc_show_class_members)
                ns['docstring'] = SphinxClassDoc(self.object, config=npconfig)

                ns['members'] = vars(self.object)
                ns['methods'], ns['all_methods'] = get_members(
                    self.object, 'method', ['__init__'])
                ns['attributes'], ns['all_attributes'] = get_members(
                    self.object, 'attribute')

                parts = self.fullname.split('.')
                mod_name, obj_name = '.'.join(parts[:-1]), parts[-1]

                ns['fullname'] = name
                ns['module'] = mod_name
                ns['objname'] = obj_name
                ns['name'] = parts[-1]

                for line in template.render(**ns).split('\n'):
                    if line not in [None, 'None']:
                        self.add_line(line, '<autodoc>')
                self.doc_as_attr = True
Exemple #52
0
def test_restricted():
    env = SandboxedEnvironment()
    assert_raises(TemplateSyntaxError, env.from_string,
                  "{% for item.attribute in seq %}...{% endfor %}")
    assert_raises(TemplateSyntaxError, env.from_string,
                  "{% for foo, bar.baz in seq %}...{% endfor %}")
Exemple #53
0
class AutosummaryRenderer:
    """A helper class for rendering."""
    def __init__(self,
                 app: Union[Builder, Sphinx],
                 template_dir: str = None) -> None:
        if isinstance(app, Builder):
            warnings.warn(
                'The first argument for AutosummaryRenderer has been '
                'changed to Sphinx object',
                RemovedInSphinx50Warning,
                stacklevel=2)
        if template_dir:
            warnings.warn(
                'template_dir argument for AutosummaryRenderer is deprecated.',
                RemovedInSphinx50Warning,
                stacklevel=2)

        system_templates_path = [
            os.path.join(package_dir, 'ext', 'autosummary', 'templates')
        ]
        loader = SphinxTemplateLoader(app.srcdir, app.config.templates_path,
                                      system_templates_path)

        self.env = SandboxedEnvironment(loader=loader)
        self.env.filters['escape'] = rst.escape
        self.env.filters['e'] = rst.escape
        self.env.filters['underline'] = _underline

        if isinstance(app, (Sphinx, DummyApplication)):
            if app.translator:
                self.env.add_extension("jinja2.ext.i18n")
                self.env.install_gettext_translations(app.translator)
        elif isinstance(app, Builder):
            if app.app.translator:
                self.env.add_extension("jinja2.ext.i18n")
                self.env.install_gettext_translations(app.app.translator)

    def exists(self, template_name: str) -> bool:
        """Check if template file exists."""
        warnings.warn('AutosummaryRenderer.exists() is deprecated.',
                      RemovedInSphinx50Warning,
                      stacklevel=2)
        try:
            self.env.get_template(template_name)
            return True
        except TemplateNotFound:
            return False

    def render(self, template_name: str, context: Dict) -> str:
        """Render a template file."""
        try:
            template = self.env.get_template(template_name)
        except TemplateNotFound:
            try:
                # objtype is given as template_name
                template = self.env.get_template('autosummary/%s.rst' %
                                                 template_name)
            except TemplateNotFound:
                # fallback to base.rst
                template = self.env.get_template('autosummary/base.rst')

        return template.render(context)
try:
    # We use a jinja2 sandboxed environment to render mako templates.
    # Note that the rendering does not cover all the mako syntax, in particular
    # arbitrary Python statements are not accepted, and not all expressions are
    # allowed: only "public" attributes (not starting with '_') of objects may
    # be accessed.
    # This is done on purpose: it prevents incidental or malicious execution of
    # Python code that may break the security of the server.
    from jinja2.sandbox import SandboxedEnvironment
    mako_template_env = SandboxedEnvironment(
        block_start_string="<%",
        block_end_string="%>",
        variable_start_string="${",
        variable_end_string="}",
        comment_start_string="<%doc>",
        comment_end_string="</%doc>",
        line_statement_prefix="%",
        line_comment_prefix="##",
        trim_blocks=True,  # do not output newline after blocks
        autoescape=True,  # XML/HTML automatic escaping
    )
    mako_template_env.globals.update({
        'str':
        str,
        'quote':
        urls.url_quote,
        'urlencode':
        urls.url_encode,
        'datetime':
        datetime,
        'len':
Exemple #55
0
def generate_autosummary_docs(sources,
                              app,
                              suffix='.rst',
                              output_dir=None,
                              base_path=None,
                              builder=None,
                              template_dir=None):
    showed_sources = list(sorted(sources))
    if len(showed_sources) > 20:
        showed_sources = showed_sources[:10] + ['...'] + showed_sources[-10:]
    logger.info('[autosummary] generating autosummary for: %s' %
                ', '.join(showed_sources))

    if output_dir:
        logger.info('[autosummary] writing to %s' % output_dir)

    if base_path is not None:
        sources = [os.path.join(base_path, filename) for filename in sources]

    # create our own templating environment
    template_dirs = [
        os.path.join(package_dir, 'ext', 'autosummary', 'templates')
    ]
    if builder is not None:
        # allow the user to override the templates
        template_loader = BuiltinTemplateLoader()
        template_loader.init(builder, dirs=template_dirs)
    else:
        if template_dir:
            template_dirs.insert(0, template_dir)
        template_loader = FileSystemLoader(template_dirs)
    template_env = SandboxedEnvironment(loader=template_loader)

    # read
    items = find_autosummary_in_files(sources)
    # keep track of new files
    new_files = []
    # write
    for name, path, template_name in sorted(set(items), key=str):
        if path is None:
            continue  # The corresponding autosummary:: directive did not have a :toctree: option

        path = output_dir or os.path.abspath(path)
        ensuredir(path)

        try:
            name, obj, parent, mod_name = import_by_name(name)
        except ImportError as e:
            logger.warning('[autosummary] failed to import %r: %s' % (name, e))
            continue

        fn = os.path.join(path, name + suffix)
        # skip it if it exists
        if os.path.isfile(fn):
            continue

        new_files.append(fn)
        with open(fn, 'w') as f:
            doc = get_documenter(app, obj, parent)

            if template_name is not None:
                template = template_env.get_template(template_name)
            else:
                try:
                    template = template_env.get_template('autosummary/%s.rst' %
                                                         doc.objtype)
                except TemplateNotFound:
                    template = template_env.get_template(
                        'autosummary/base.rst')

            ns = {}
            if doc.objtype == 'module':
                ns['members'] = dir(obj)
                ns['functions'], ns['all_functions'] = get_members(
                    app, obj, 'function')
                ns['classes'], ns['all_classes'] = get_members(
                    app, obj, 'class')
                ns['exceptions'], ns['all_exceptions'] = get_members(
                    app, obj, 'exception')
            elif doc.objtype == 'class':
                ns['members'] = dir(obj)
                include_public = app.config.autodoc_allowed_special_members
                ns['methods'], ns['all_methods'] = get_members(
                    app, obj, 'method', include_public)
                ns['attributes'], ns['all_attributes'] = get_members(
                    app, obj, 'attribute')

            parts = name.split('.')
            if doc.objtype in ('method', 'attribute'):
                mod_name = '.'.join(parts[:-2])
                cls_name = parts[-2]
                obj_name = '.'.join(parts[-2:])
                ns['class'] = cls_name
            else:
                mod_name, obj_name = '.'.join(parts[:-1]), parts[-1]

            ns['fullname'] = name
            ns['module'] = mod_name
            ns['objname'] = obj_name
            ns['name'] = parts[-1]
            ns['objtype'] = doc.objtype
            ns['underline'] = len(name) * '='

            rendered = template.render(**ns)
            f.write(rendered)

    # descend recursively to new files
    if new_files:
        generate_autosummary_docs(new_files,
                                  app,
                                  suffix=suffix,
                                  output_dir=output_dir,
                                  base_path=base_path,
                                  builder=builder,
                                  template_dir=template_dir)
from jinja2.sandbox import SandboxedEnvironment


jinja_template_env = SandboxedEnvironment(
    variable_start_string='{$',
    variable_end_string='$}',
)


def jinja_render_string(string, variables):
    template = jinja_template_env.from_string(string)
    result = template.render(variables)
    return result

if __name__=='__main__':
    string = '<!DOCTYPE html><html lang="en"><head><meta charset="UTF-8"><title>Title</title></head>' \
             '<body>hello {$ name $}!<div></div>{% for i in items %}{% if i %}<div>{$ i $}</div>{% endif %}{% endfor %}</body></html>'
    variables = {"name": "jaja", "items": [i for i in range(0,9)]}
    result = jinja_render_string(string,variables)
    print(result)
Exemple #57
0
class Fetcher(object):
    def __init__(self, download_size_limit=config.download_size_limit):
        if pycurl:
            httpclient.AsyncHTTPClient.configure(
                'tornado.curl_httpclient.CurlAsyncHTTPClient')
        self.client = httpclient.AsyncHTTPClient()
        self.download_size_limit = download_size_limit
        self.jinja_env = Environment()
        self.jinja_env.globals = utils.jinja_globals

    def render(self, request, env, session=[]):
        request = dict(request)
        if isinstance(session, cookie_utils.CookieSession):
            _cookies = session
        else:
            _cookies = cookie_utils.CookieSession()
            _cookies.from_json(session)

        def _render(obj, key):
            if not obj.get(key):
                return
            obj[key] = self.jinja_env.from_string(obj[key]).render(
                _cookies=_cookies, **env)

        _render(request, 'method')
        _render(request, 'url')
        for header in request['headers']:
            _render(header, 'name')
            _render(header, 'value')
        for cookie in request['cookies']:
            _render(cookie, 'name')
            _render(cookie, 'value')
        _render(request, 'data')
        return request

    def build_request(self,
                      obj,
                      download_size_limit=config.download_size_limit):
        env = obj['env']
        rule = obj['rule']
        request = self.render(obj['request'], env['variables'], env['session'])

        method = request['method']
        url = request['url']
        headers = dict((e['name'], e['value']) for e in request['headers'])
        cookies = dict((e['name'], e['value']) for e in request['cookies'])
        data = request.get('data')
        if method == 'GET':
            data = None
        elif method == 'POST':
            data = request.get('data', '')

        def set_size_limit_callback(curl):
            def size_limit(download_size, downloaded, upload_size, uploaded):
                if download_size and download_size > download_size_limit:
                    return 1
                if downloaded > download_size_limit:
                    return 1
                return 0

            curl.setopt(pycurl.NOPROGRESS, 0)
            curl.setopt(pycurl.PROGRESSFUNCTION, size_limit)
            return curl

        req = httpclient.HTTPRequest(
            url=url,
            method=method,
            headers=headers,
            body=data,
            follow_redirects=False,
            max_redirects=0,
            decompress_response=True,
            allow_nonstandard_methods=True,
            allow_ipv6=True,
            prepare_curl_callback=set_size_limit_callback,
        )

        session = cookie_utils.CookieSession()
        if req.headers.get('Cookie'):
            session.update(dict(x.strip().split('=', 1) \
                    for x in req.headers['Cookie'].split(';') \
                    if '=' in x))
        if isinstance(env['session'], cookie_utils.CookieSession):
            session.from_json(env['session'].to_json())
        else:
            session.from_json(env['session'])
        session.update(cookies)
        cookie_header = session.get_cookie_header(req)
        if cookie_header:
            req.headers['Cookie'] = cookie_header

        env['session'] = session

        return req, rule, env

    @staticmethod
    def response2har(response):
        request = response.request

        def build_headers(headers):
            result = []
            for k, v in headers.get_all():
                result.append(dict(name=k, value=v))
            return result

        def build_request(request):
            url = urlparse.urlparse(request.url)
            ret = dict(
                    method = request.method,
                    url = request.url,
                    httpVersion = 'HTTP/1.1',
                    headers = build_headers(request.headers),
                    queryString = [
                        {'name': n, 'value': v} for n, v in\
                                urlparse.parse_qsl(url.query)],
                    cookies = [
                        {'name': n, 'value': v} for n, v in \
                                urlparse.parse_qsl(request.headers.get('cookie', ''))],
                    headersSize = -1,
                    bodySize = len(request.body) if request.body else 0,
                    )
            if request.body:
                ret['postData'] = dict(
                    mimeType=request.headers.get('content-type'),
                    text=request.body,
                )
                if ret['postData'][
                        'mimeType'] == 'application/x-www-form-urlencoded':
                    ret['postData']['params'] = [
                            {'name': n, 'value': v} for n, v in \
                                urlparse.parse_qsl(request.body)]
                    try:
                        _ = json.dumps(ret['postData']['params'])
                    except UnicodeDecodeError:
                        logger.error('params encoding error')
                        del ret['postData']['params']

            return ret

        def build_response(response):
            cookies = cookie_utils.CookieSession()
            cookies.extract_cookies_to_jar(response.request, response)

            encoding = utils.find_encoding(response.body, response.headers)
            if not response.headers.get('content-type'):
                response.headers['content-type'] = 'text/plain'
            if 'charset=' not in response.headers.get('content-type', ''):
                response.headers['content-type'] += '; charset=' + encoding

            return dict(
                status=response.code,
                statusText=response.reason,
                headers=build_headers(response.headers),
                cookies=cookies.to_json(),
                content=dict(
                    size=len(response.body),
                    mimeType=response.headers.get('content-type'),
                    text=base64.b64encode(response.body),
                    decoded=utils.decode(response.body, response.headers),
                ),
                redirectURL=response.headers.get('Location'),
                headersSize=-1,
                bodySize=-1,
            )

        entry = dict(
            startedDateTime=datetime.now().isoformat(),
            time=response.request_time,
            request=build_request(request),
            response=build_response(response),
            cache={},
            timings=response.time_info,
            connections="0",
            pageref="page_0",
        )
        return entry

    @staticmethod
    def run_rule(response, rule, env):
        success = True
        msg = ''

        content = [
            -1,
        ]

        def getdata(_from):
            if _from == 'content':
                if content[0] == -1:
                    content[0] = utils.decode(response.body)
                return content[0]
            elif _from == 'status':
                return '%s' % response.code
            elif _from.startswith('header-'):
                _from = _from[7:]
                return response.headers.get(_from, '')
            elif _from == 'header':
                return unicode(response.headers)
            else:
                return ''

        for r in rule.get('success_asserts') or '':
            if re.search(r['re'], getdata(r['from'])):
                break
        else:
            if rule.get('success_asserts'):
                success = False

        for r in rule.get('failed_asserts') or '':
            if re.search(r['re'], getdata(r['from'])):
                success = False
                msg = 'fail assert: %s' % r
                break

        for r in rule.get('extract_variables') or '':
            pattern = r['re']
            flags = 0
            find_all = False

            re_m = re.match(r"^/(.*?)/([gim]*)$", r['re'])
            if re_m:
                pattern = re_m.group(1)
                if 'i' in re_m.group(2):
                    flags |= re.I
                if 'm' in re_m.group(2):
                    flags |= re.M
                if 'g' in re_m.group(2):
                    find_all = True

            if find_all:
                result = []
                for m in re.compile(pattern,
                                    flags).finditer(getdata(r['from'])):
                    if m.groups():
                        m = m.groups()[0]
                    else:
                        m = m.group(0)
                    result.append(m)
                env['variables'][r['name']] = result
            else:
                m = re.compile(pattern, flags).search(getdata(r['from']))
                if m:
                    if m.groups():
                        m = m.groups()[0]
                    else:
                        m = m.group(0)
                    env['variables'][r['name']] = m

        return success, msg

    @staticmethod
    def tpl2har(tpl):
        def build_request(en):
            url = urlparse.urlparse(en['request']['url'])
            request = dict(
                    method = en['request']['method'],
                    url = en['request']['url'],
                    httpVersion = 'HTTP/1.1',
                    headers = [
                        {'name': x['name'], 'value': x['value'], 'checked': True} for x in\
                                en['request'].get('headers', [])],
                    queryString = [
                        {'name': n, 'value': v} for n, v in\
                                urlparse.parse_qsl(url.query)],
                    cookies = [
                        {'name': x['name'], 'value': x['value'], 'checked': True} for x in\
                                en['request'].get('cookies', [])],
                    headersSize = -1,
                    bodySize = len(en['request'].get('data')) if en['request'].get('data') else 0,


                    )
            if en['request'].get('data'):
                request['postData'] = dict(
                    mimeType=en['request'].get('mimeType'),
                    text=en['request'].get('data'),
                )
                if en['request'].get(
                        'mimeType') == 'application/x-www-form-urlencoded':
                    params = [{'name': x[0], 'value': x[1]} \
                        for x in urlparse.parse_qsl(en['request']['data'], True)]
                    request['postData']['params'] = params
            return request

        entries = []
        for en in tpl:
            entry = dict(
                checked=True,
                startedDateTime=datetime.now().isoformat(),
                time=1,
                request=build_request(en),
                response={},
                cache={},
                timings={},
                connections="0",
                pageref="page_0",
                success_asserts=en.get('rule', {}).get('success_asserts', []),
                failed_asserts=en.get('rule', {}).get('failed_asserts', []),
                extract_variables=en.get('rule',
                                         {}).get('extract_variables', []),
            )
            entries.append(entry)
        return dict(log=dict(creator=dict(name='binux', version='qiandao'),
                             entries=entries,
                             pages=[],
                             version='1.2'))

    @gen.coroutine
    def fetch(self, obj, proxy={}):
        """
        obj = {
          request: {
            method: 
            url: 
            headers: [{name: , value: }, ]
            cookies: [{name: , value: }, ]
            data:
          }
          rule: {
            success_asserts: [{re: , from: 'content'}, ]
            failed_asserts: [{re: , from: 'content'}, ]
            extract_variables: [{name: , re:, from: 'content'}, ]
          }
          env: {
            variables: {
              name: value
            }
            session: [
            ]
          }
        }
        """
        req, rule, env = self.build_request(obj, self.download_size_limit)

        if proxy:
            for key in proxy:
                setattr(req, 'proxy_%s' % key, proxy[key])

        try:
            if config.debug:
                logger.info('request:' + req.url)
                logger.info(req.headers)
                logger.info('request_body:')
                logger.info(req.body)
            response = yield self.client.fetch(req)
            if config.debug:
                logger.info('response:' + str(response.code))
                logger.info(response.headers)
                logger.info('response_body:')
                logger.info(response.body)
        except httpclient.HTTPError as e:
            if not e.response:
                raise
            response = e.response

        env['session'].extract_cookies_to_jar(response.request, response)
        success, msg = self.run_rule(response, rule, env)

        raise gen.Return({
            'success': success,
            'response': response,
            'env': env,
            'msg': msg,
        })

    FOR_START = re.compile('{%\s*for\s+(\w+)\s+in\s+(\w+)\s*%}')
    FOR_END = re.compile('{%\s*endfor\s*%}')

    def parse(self, tpl):
        stmt_stack = []

        for i, entry in enumerate(tpl):
            if 'type' in entry:
                yield entry
            elif self.FOR_START.match(entry['request']['url']):
                m = self.FOR_START.match(entry['request']['url'])
                stmt_stack.append({
                    'type': 'for',
                    'target': m.group(1),
                    'from': m.group(2),
                    'body': []
                })
            elif self.FOR_END.match(entry['request']['url']):
                if stmt_stack and stmt_stack[-1]['type'] == 'for':
                    entry = stmt_stack.pop()
                    if stmt_stack:
                        stmt_stack[-1]['body'].append(entry)
                    else:
                        yield entry
            elif stmt_stack:
                stmt_stack[-1]['body'].append({
                    'type': 'request',
                    'entry': entry,
                })
            else:
                yield {
                    'type': 'request',
                    'entry': entry,
                }

        while stmt_stack:
            yield stmt_stack.pop()

    @gen.coroutine
    def do_fetch(self, tpl, env, proxies=config.proxies, request_limit=100):
        """
        do a fetch of hole tpl
        """
        if proxies:
            proxy = random.choice(proxies)
        else:
            proxy = {}

        for i, block in enumerate(self.parse(tpl)):
            if request_limit <= 0:
                raise Exception('request limit')
            elif block['type'] == 'for':
                for each in env['variables'].get(block['from'], []):
                    env['variables'][block['target']] = each
                    env = yield self.do_fetch(block['body'],
                                              env,
                                              proxies=[proxy],
                                              request_limit=request_limit)
            elif block['type'] == 'request':
                entry = block['entry']
                try:
                    request_limit -= 1
                    result = yield self.fetch(dict(
                        request=entry['request'],
                        rule=entry['rule'],
                        env=env,
                    ),
                                              proxy=proxy)
                    env = result['env']
                except Exception as e:
                    if config.debug:
                        logging.exception(e)
                    raise Exception(
                        'failed at %d/%d request, error:%r, %s' %
                        (i + 1, len(tpl), e, entry['request']['url']))
                if not result['success']:
                    raise Exception('failed at %d/%d request, %s, %s' %
                                    (i + 1, len(tpl), result['msg'],
                                     entry['request']['url']))
        raise gen.Return(env)
Exemple #58
0
import textwrap

from jinja2.sandbox import SandboxedEnvironment

# The default env options for jinja2
DEFAULT_ENV_OPTIONS = {
    "autoescape": False,
    "block_start_string": "[%",
    "block_end_string": "%]",
    "variable_start_string": "[[",
    "variable_end_string": "]]",
    "keep_trailing_newline": False,
}

jinja_env = SandboxedEnvironment(**DEFAULT_ENV_OPTIONS)


class Component(object):
    def template(self):
        return None

    def __str__(self):
        return self.render()

    def _get_template_src(self):
        src = (self.template() or "").strip("\n")
        return textwrap.dedent(src)

    def _jinja_render(self, template_src):
        props = {
            key: value
Exemple #59
0
class BuiltinTemplateLoader(TemplateBridge, BaseLoader):
    """
    Interfaces the rendering environment of jinja2 for use in Sphinx.
    """

    # TemplateBridge interface

    def init(self, builder, theme=None, dirs=None):
        # create a chain of paths to search
        if theme:
            # the theme's own dir and its bases' dirs
            pathchain = theme.get_dirchain()
            # then the theme parent paths
            loaderchain = pathchain + theme.themepath
        elif dirs:
            pathchain = list(dirs)
            loaderchain = list(dirs)
        else:
            pathchain = []
            loaderchain = []

        # prepend explicit template paths
        self.templatepathlen = len(builder.config.templates_path)
        if builder.config.templates_path:
            cfg_templates_path = [
                path.join(builder.confdir, tp)
                for tp in builder.config.templates_path
            ]
            pathchain[0:0] = cfg_templates_path
            loaderchain[0:0] = cfg_templates_path

        # store it for use in newest_template_mtime
        self.pathchain = pathchain

        # make the paths into loaders
        self.loaders = [SphinxFileSystemLoader(x) for x in loaderchain]

        use_i18n = builder.app.translator is not None
        extensions = use_i18n and ['jinja2.ext.i18n'] or []
        self.environment = SandboxedEnvironment(loader=self,
                                                extensions=extensions)
        self.environment.filters['tobool'] = _tobool
        self.environment.filters['toint'] = _toint
        self.environment.globals['debug'] = contextfunction(pformat)
        self.environment.globals['accesskey'] = contextfunction(accesskey)
        self.environment.globals['idgen'] = idgen
        if use_i18n:
            self.environment.install_gettext_translations(
                builder.app.translator)

    def render(self, template, context):
        return self.environment.get_template(template).render(context)

    def render_string(self, source, context):
        return self.environment.from_string(source).render(context)

    def newest_template_mtime(self):
        return max(mtimes_of_files(self.pathchain, '.html'))

    # Loader interface

    def get_source(self, environment, template):
        loaders = self.loaders
        # exclamation mark starts search from theme
        if template.startswith('!'):
            loaders = loaders[self.templatepathlen:]
            template = template[1:]
        for loader in loaders:
            try:
                return loader.get_source(environment, template)
            except TemplateNotFound:
                pass
        raise TemplateNotFound(template)
Exemple #60
0
 def test_item_and_attribute(self):
     from jinja2.sandbox import SandboxedEnvironment
     for env in (Environment(), SandboxedEnvironment()):
         tmpl = env.from_string('{{ foo.items()|list }}')
         tmpl = env.from_string('{{ foo|attr("items")()|list }}')
         tmpl = env.from_string('{{ foo["items"] }}')