コード例 #1
0
    def __init__(self, **kwargs):
        if 'environment' in kwargs:
            configs = self.get_configs_from_environment(kwargs['environment'])
            self.config = Config(**configs)
        else:
            self.config = Config(**kwargs)

        # Cache output of make_hash in-memory when using offline compress
        self.offline_hash_cache = {}

        # Cache compressed contents during offline compress step
        self.offline_compress_cache = {}
コード例 #2
0
 def __init__(self, **kwargs):
     if 'environment' in kwargs:
         configs = self.get_configs_from_environment(kwargs['environment'])
         self.config = Config(**configs)
     else:
         self.config = Config(**kwargs)
コード例 #3
0
class Compressor(object):

    def __init__(self, **kwargs):
        if 'environment' in kwargs:
            configs = self.get_configs_from_environment(kwargs['environment'])
            self.config = Config(**configs)
        else:
            self.config = Config(**kwargs)

    def compress(self, html, compression_type):

        if not self.config.compressor_enabled:
            return html

        compression_type = compression_type.lower()
        html_hash = self.make_hash(html)

        if not os.path.exists(u(self.config.compressor_output_dir)):
            os.makedirs(u(self.config.compressor_output_dir))

        cached_file = os.path.join(
            u(self.config.compressor_output_dir),
            u('{hash}.{extension}').format(
                hash=html_hash,
                extension=compression_type,
            ),
        )

        if os.path.exists(cached_file):
            filename = os.path.join(
                u(self.config.compressor_static_prefix),
                os.path.basename(cached_file),
            )
            return self.render_element(filename, compression_type)

        assets = OrderedDict()
        soup = BeautifulSoup(html, PARSER)
        for count, c in enumerate(self.find_compilable_tags(soup)):

            url = c.get('src') or c.get('href')
            if url:
                filename = os.path.basename(u(url)).split('.', 1)[0]
                uri_cwd = os.path.join(u(self.config.compressor_static_prefix), os.path.dirname(u(url)))
                text = open(self.find_file(u(url)), 'r', encoding='utf-8')
                cwd = os.path.dirname(text.name)
            else:
                filename = u('inline{0}').format(count)
                uri_cwd = None
                text = c.string
                cwd = None

            mimetype = c['type'].lower()
            try:
                compressor = self.config.compressor_classes[mimetype]
            except KeyError:
                msg = u('Unsupported type of compression {0}').format(mimetype)
                raise RuntimeError(msg)

            text = self.get_contents(text)
            compressed = compressor.compile(text,
                                            mimetype=mimetype,
                                            cwd=cwd,
                                            uri_cwd=uri_cwd,
                                            debug=self.config.compressor_debug)

            if not self.config.compressor_debug:
                outfile = cached_file
            else:
                outfile = os.path.join(
                    u(self.config.compressor_output_dir),
                    u('{hash}-{filename}.{extension}').format(
                        hash=html_hash,
                        filename=filename,
                        extension=compression_type,
                    ),
                )

            if assets.get(outfile) is None:
                assets[outfile] = u('')
            assets[outfile] += u("\n") + compressed

        blocks = u('')
        for outfile, asset in assets.items():
            with open(outfile, 'w', encoding='utf-8') as fh:
                fh.write(asset)
            filename = os.path.join(
                u(self.config.compressor_static_prefix),
                os.path.basename(outfile),
            )
            blocks += self.render_element(filename, compression_type)

        return blocks

    def make_hash(self, html):
        soup = BeautifulSoup(html, PARSER)
        compilables = self.find_compilable_tags(soup)
        html_hash = hashlib.md5(utf8_encode(html))

        for c in compilables:
            url = c.get('src') or c.get('href')
            if url:
                with open(self.find_file(url), 'r', encoding='utf-8') as f:
                    while True:
                        content = f.read(1024)
                        if content:
                            html_hash.update(utf8_encode(content))
                        else:
                            break

        return html_hash.hexdigest()

    def find_file(self, path):
        if callable(self.config.compressor_source_dirs):
            filename = self.config.compressor_source_dirs(path)
            if os.path.exists(filename):
                return filename
        else:
            if isinstance(self.config.compressor_source_dirs, basestring):
                dirs = [self.config.compressor_source_dirs]
            else:
                dirs = self.config.compressor_source_dirs

            for d in dirs:
                if self.config.compressor_static_prefix_precompress is not None and path.startswith('/'):
                    path = path.replace(self.config.compressor_static_prefix_precompress, '', 1).lstrip(os.sep).lstrip('/')
                filename = os.path.join(d, path)
                if os.path.exists(filename):
                    return filename

        raise IOError(2, u('File not found {0}').format(path))

    def find_compilable_tags(self, soup):
        tags = ['link', 'style', 'script']
        for tag in soup.find_all(tags):

            # don't compress externally hosted assets
            src = tag.get('src') or tag.get('href')
            if src and (src.startswith('http') or src.startswith('//')):
                continue

            if tag.get('type') is None:
                if tag.name == 'script':
                    tag['type'] = 'text/javascript'
                if tag.name == 'style':
                    tag['type'] = 'text/css'
            else:
                tag['type'] == tag['type'].lower()

            if tag.get('type') is None:
                raise RuntimeError(u('Tags to be compressed must have a type attribute: {0}').format(u(tag)))

            yield tag

    def get_contents(self, src):
        if isinstance(src, file):
            return u(src.read())
        else:
            return u(src)

    def render_element(self, filename, type):
        """Returns an html element pointing to filename as a string.
        """
        if type.lower() == 'css':
            return u('<link type="text/css" rel="stylesheet" href="{0}" />').format(filename)
        elif type.lower() == 'js':
            return u('<script type="text/javascript" src="{0}"></script>').format(filename)
        else:
            raise RuntimeError(u('Unsupported type of compression {0}').format(type))

    def get_configs_from_environment(self, environment):
        configs = {}
        for key in dir(environment):
            if key.startswith('compressor_'):
                configs[key] = getattr(environment, key)
        return configs

    def offline_compress(self, environment, template_dirs):

        if isinstance(template_dirs, basestring):
            template_dirs = [template_dirs]

        configs = self.get_configs_from_environment(environment)
        self.config.update(**configs)

        compressor_nodes = {}
        parser = Jinja2Parser(charset='utf-8', env=environment)
        for template_path in self.find_template_files(template_dirs):
            try:
                template = parser.parse(template_path)
            except IOError:  # unreadable file -> ignore
                continue
            except TemplateSyntaxError:  # broken template -> ignore
                continue
            except TemplateDoesNotExist:  # non existent template -> ignore
                continue
            except UnicodeDecodeError:
                continue

            try:
                nodes = list(parser.walk_nodes(template))
            except (TemplateDoesNotExist, TemplateSyntaxError):
                continue
            if nodes:
                template.template_name = template_path
                compressor_nodes.setdefault(template, []).extend(nodes)

        if not compressor_nodes:
            raise OfflineGenerationError(
                "No 'compress' template tags found in templates. "
                "Try setting follow_symlinks to True")

        for template, nodes in compressor_nodes.items():
            for node in nodes:
                parser.render_node({}, node, globals=environment.globals)

    def find_template_files(self, template_dirs):
        templates = set()
        for d in template_dirs:
            for root, dirs, files in os.walk(d,
                    followlinks=self.config.compressor_follow_symlinks):
                templates.update(os.path.join(root, name)
                    for name in files if not name.startswith('.'))
        return templates
コード例 #4
0
 def __init__(self, **kwargs):
     if 'environment' in kwargs:
         configs = self.get_configs_from_environment(kwargs['environment'])
         self.config = Config(**configs)
     else:
         self.config = Config(**kwargs)
コード例 #5
0
class Compressor(object):
    def __init__(self, **kwargs):
        if 'environment' in kwargs:
            configs = self.get_configs_from_environment(kwargs['environment'])
            self.config = Config(**configs)
        else:
            self.config = Config(**kwargs)

    def compress(self, html, compression_type):

        if not self.config.compressor_enabled:
            return html

        compression_type = compression_type.lower()
        html_hash = self.make_hash(html)

        if not os.path.exists(u(self.config.compressor_output_dir)):
            os.makedirs(u(self.config.compressor_output_dir))

        cached_file = os.path.join(
            u(self.config.compressor_output_dir),
            u('{hash}.{extension}').format(
                hash=html_hash,
                extension=compression_type,
            ),
        )

        if os.path.exists(cached_file):
            filename = os.path.join(
                u(self.config.compressor_static_prefix),
                os.path.basename(cached_file),
            )
            return self.render_element(filename, compression_type)

        assets = OrderedDict()
        soup = BeautifulSoup(html)
        for count, c in enumerate(self.find_compilable_tags(soup)):

            url = c.get('src') or c.get('href')
            if url:
                filename = os.path.basename(u(url)).split('.', 1)[0]
                uri_cwd = os.path.join(u(self.config.compressor_static_prefix),
                                       os.path.dirname(u(url)))
                text = open(self.find_file(u(url)), 'r', encoding='utf-8')
                cwd = os.path.dirname(text.name)
            else:
                filename = u('inline{0}').format(count)
                uri_cwd = None
                text = c.string
                cwd = None

            mimetype = c['type'].lower()
            try:
                compressor = self.config.compressor_classes[mimetype]
            except KeyError:
                msg = u('Unsupported type of compression {0}').format(mimetype)
                raise RuntimeError(msg)

            text = self.get_contents(text)
            compressed = compressor.compile(text,
                                            mimetype=mimetype,
                                            cwd=cwd,
                                            uri_cwd=uri_cwd,
                                            debug=self.config.compressor_debug)

            if not self.config.compressor_debug:
                outfile = cached_file
            else:
                outfile = os.path.join(
                    u(self.config.compressor_output_dir),
                    u('{hash}-{filename}.{extension}').format(
                        hash=html_hash,
                        filename=filename,
                        extension=compression_type,
                    ),
                )

            if assets.get(outfile) is None:
                assets[outfile] = u('')
            assets[outfile] += u("\n") + compressed

        blocks = u('')
        for outfile, asset in assets.items():
            with open(outfile, 'w', encoding='utf-8') as fh:
                fh.write(asset)
            filename = os.path.join(
                u(self.config.compressor_static_prefix),
                os.path.basename(outfile),
            )
            blocks += self.render_element(filename, compression_type)

        return blocks

    def make_hash(self, html):
        soup = BeautifulSoup(html)
        compilables = self.find_compilable_tags(soup)
        html_hash = hashlib.md5(utf8_encode(html))

        for c in compilables:
            url = c.get('src') or c.get('href')
            if url:
                with open(self.find_file(url), 'r', encoding='utf-8') as f:
                    while True:
                        content = f.read(1024)
                        if content:
                            html_hash.update(utf8_encode(content))
                        else:
                            break

        return html_hash.hexdigest()

    def find_file(self, path):
        if callable(self.config.compressor_source_dirs):
            filename = self.config.compressor_source_dirs(path)
            if os.path.exists(filename):
                return filename
        else:
            if isinstance(self.config.compressor_source_dirs, basestring):
                dirs = [self.config.compressor_source_dirs]
            else:
                dirs = self.config.compressor_source_dirs

            for d in dirs:
                if self.config.compressor_static_prefix_precompress is not None and path.startswith(
                        '/'):
                    path = path.replace(
                        self.config.compressor_static_prefix_precompress, '',
                        1).lstrip(os.sep).lstrip('/')
                filename = os.path.join(d, path)
                if os.path.exists(filename):
                    return filename

        raise IOError(2, u('File not found {0}').format(path))

    def find_compilable_tags(self, soup):
        tags = ['link', 'style', 'script']
        for tag in soup.find_all(tags):

            # don't compress externally hosted assets
            src = tag.get('src') or tag.get('href')
            if src and (src.startswith('http') or src.startswith('//')):
                continue

            if tag.get('type') is None:
                if tag.name == 'script':
                    tag['type'] = 'text/javascript'
                if tag.name == 'style':
                    tag['type'] = 'text/css'
            else:
                tag['type'] == tag['type'].lower()

            if tag.get('type') is None:
                raise RuntimeError(
                    u('Tags to be compressed must have a type attribute: {0}').
                    format(u(tag)))

            yield tag

    def get_contents(self, src):
        if isinstance(src, file):
            return u(src.read())
        else:
            return u(src)

    def render_element(self, filename, type):
        """Returns an html element pointing to filename as a string.
        """
        if type.lower() == 'css':
            return u('<link type="text/css" rel="stylesheet" href="{0}" />'
                     ).format(filename)
        elif type.lower() == 'js':
            return u('<script type="text/javascript" src="{0}"></script>'
                     ).format(filename)
        else:
            raise RuntimeError(
                u('Unsupported type of compression {0}').format(type))

    def get_configs_from_environment(self, environment):
        configs = {}
        for key in dir(environment):
            if key.startswith('compressor_'):
                configs[key] = getattr(environment, key)
        return configs

    def offline_compress(self, environment, template_dirs):

        if isinstance(template_dirs, basestring):
            template_dirs = [template_dirs]

        configs = self.get_configs_from_environment(environment)
        self.config.update(**configs)

        compressor_nodes = {}
        parser = Jinja2Parser(charset='utf-8', env=environment)
        for template_path in self.find_template_files(template_dirs):
            try:
                template = parser.parse(template_path)
            except IOError:  # unreadable file -> ignore
                continue
            except TemplateSyntaxError:  # broken template -> ignore
                continue
            except TemplateDoesNotExist:  # non existent template -> ignore
                continue
            except UnicodeDecodeError:
                continue

            try:
                nodes = list(parser.walk_nodes(template))
            except (TemplateDoesNotExist, TemplateSyntaxError):
                continue
            if nodes:
                template.template_name = template_path
                compressor_nodes.setdefault(template, []).extend(nodes)

        if not compressor_nodes:
            raise OfflineGenerationError(
                "No 'compress' template tags found in templates. "
                "Try setting follow_symlinks to True")

        for template, nodes in compressor_nodes.items():
            for node in nodes:
                parser.render_node({}, node, globals=environment.globals)

    def find_template_files(self, template_dirs):
        templates = set()
        for d in template_dirs:
            for root, dirs, files in os.walk(
                    d, followlinks=self.config.compressor_follow_symlinks):
                templates.update(
                    os.path.join(root, name) for name in files
                    if not name.startswith('.'))
        return templates
コード例 #6
0
ファイル: __init__.py プロジェクト: d9k/pyragrid
def main(global_config, **settings):
    """ This function returns a Pyramid WSGI application.
    """
    passwords_config_path = helpers.get_passwords_config_path(
        global_config['__file__'])
    if os.path.isfile(passwords_config_path):
        passwords_settings = helpers.load_config(passwords_config_path)
        settings = helpers.dicts_merge(passwords_settings.get('app:main', {}),
                                       settings)

    sql_engine = engine_from_config(settings, 'sqlalchemy.')
    authentication_secret = settings.get('authentication_secret')

    if authentication_secret is None:
        raise Exception(
            'authentication_secret must be set at [conf_type]_passwords.ini!')

    # see http://docs.pylonsproject.org/projects/pyramid//en/latest/tutorials/wiki2/authorization.html
    authn_policy = AuthTktAuthenticationPolicy(secret=authentication_secret,
                                               hashalg='sha512',
                                               callback=User.get_groups)
    authz_policy = ACLAuthorizationPolicy()
    # DBSession.configure(bind=engine)p
    Base.metadata.bind = sql_engine
    session_factory = session_factory_from_settings(settings)
    config = Configurator(settings=settings,
                          root_factory='pyragrid.db.RootFactory')
    config.set_authentication_policy(authn_policy)
    config.set_authorization_policy(authz_policy)
    config.include('pyramid_chameleon')
    config.add_translation_dirs('colander:locale', 'deform:locale',
                                'pyragrid:locale')

    def translator(term):
        # return get_localizer(get_current_request()).translate(term)
        return get_current_request().localizer.translate(term)

    deform_template_dir = resource_filename('deform', 'templates/')
    zpt_renderer = deform.ZPTRendererFactory([deform_template_dir],
                                             translator=translator)
    deform.Form.set_default_renderer(zpt_renderer)

    payment_systems.load_by_config(config)

    static_cache_max_age = 3600
    # TODO hacky. maybe better copy resources with gulp task?
    config.add_static_view(
        'static/fonts/bootstrap',
        '../bower_components/bootstrap-sass-official/assets/fonts/bootstrap',
        cache_max_age=static_cache_max_age)
    config.add_static_view('static/bower_components',
                           '../bower_components',
                           cache_max_age=static_cache_max_age)
    # config.add_static_view('static/dist', '../static/dist', cache_max_age=static_cache_max_age)
    config.add_static_view('static',
                           'static',
                           cache_max_age=static_cache_max_age)
    config.add_static_view('resources',
                           'resources',
                           cache_max_age=static_cache_max_age)
    config.add_static_view('static_deform', 'deform:static')

    config.add_route('index', '/')
    config.add_route('profile_edit', '/profile/edit')
    config.add_route('vk_iframe_auth', '/vkIframeAuth')
    config.add_route('test', '/t')

    # TODO delete:
    config.add_route('add_user', '/users/add')
    config.add_route('delete_user', '/users/delete/{any_data}')

    config.add_route('login', '/login')
    config.add_route('logout', '/logout')

    config.add_route('register', '/register')
    config.add_route('register_success', '/register_success')

    config.add_route('email_check_code', '/checkEmail/{code}')

    config.add_route('article', '/article/{article_system_name}')
    config.add_route('article_revision', '/article/{article_system_name}')

    config.add_route('admin_index', '/admin')
    config.add_route('admin_test', '/test')

    config.add_route('admin_users', '/admin/users')
    config.add_route('admin_users_grid', '/admin/users/grid')
    config.add_route('admin_user_enable', '/admin/users/enable/{user_id}')
    config.add_route('admin_user_disable', '/admin/users/disable/{user_id}')
    config.add_route('admin_user_edit', '/admin/users/edit/{user_id}')

    config.add_route('admin_article_edit', '/admin/article/{article_id}/edit')
    config.add_route('admin_article_enable',
                     '/admin/article/{article_id}/enable')
    config.add_route('admin_article_disable',
                     '/admin/article/{article_id}/disable')
    config.add_route('admin_article_revisions',
                     '/admin/article/{article_id}/revisions')
    config.add_route(
        'admin_article_revision',
        '/admin/article/{article_id}/revision/{article_revision_id}')
    config.add_route(
        'admin_article_revision_activate',
        '/admin/article/{article_id}/revision/{article_revision_id}/activate')
    config.add_route('admin_article_new', '/admin/new/article')
    config.add_route('admin_articles', '/admin/articles')
    config.add_route('admin_articles_grid', '/admin/articles/grid')
    config.add_route('admin_article_revisions_grid',
                     '/admin/article/{article_id}/revisions/grid')

    config.add_route('admin_goods', '/admin/goods')
    config.add_route('admin_goods_grid', '/admin/goods/grid')
    config.add_route('admin_good_new', '/admin/new/good')
    config.add_route('admin_good_edit', '/admin/good/{id}/edit')
    config.add_route('admin_good_enable', '/admin/good/{id}/enable')
    config.add_route('admin_good_disable', '/admin/good/{id}/disable')

    config.add_route('test_mail', '/test/mail')
    config.add_route('test_render', '/test/render')
    config.add_route('test_notify', '/test/notify')
    config.add_route('test_view_notify', '/test/view_notify')
    config.add_route('test_url', '/test/url')
    config.add_route('test_ajax', '/test/ajax')
    config.add_route('test_redirect', '/test/redirect')
    config.add_route('test_bootgrid_edit', '/test/bootgrid')
    config.add_route('test_script_inclusion', '/test/script_inclusion')
    config.add_route('test_db_enum', '/test/db_enum')
    config.add_route('test_filetree', '/test/filetree')
    config.add_route('test_ajax_filetree', '/test/ajax_filetree')
    config.add_route('test_filedialog', '/test/filedialog')
    config.add_route('test_droparea', '/test/droparea')
    config.add_route('test_jquery_file_upload', '/test/jquery_file_upload')
    config.add_route('test_blocks', '/test/blocks')
    config.add_route('test_nunjucks', '/test/nunjucks')
    config.add_route('test_jac', '/test/jac')
    config.add_route('test_mobx', '/test/mobx')
    config.add_route('test_mobx_fetch', '/test/mobx_fetch')

    # urlList: '/uploads/list'
    # urlInfo: '/uploads/info'
    # urlOperations: '/uploads/manage'

    config.add_route('uploads_list', '/uploads/list')
    config.add_route('uploads_info', '/uploads/info')
    config.add_route('uploads_manage', '/uploads/manage')
    config.add_route('uploads_handle_droparea', '/uploads/handleDropArea')
    config.add_route('uploads_handle_jquery_file_upload',
                     '/uploads/handleJqueryFileUpload')

    config.add_route('order_status', '/order/{id}/status')

    config.add_route('good_one_click_buy', '/goods/{id}/one_click_buy')

    config.add_notfound_view(views_articles.view_custom_not_found,
                             append_slash=True)

    config.set_session_factory(session_factory)

    config.add_subscriber(add_renderer_globals, pyramid.events.BeforeRender)

    # config.registry['mailer'] = Mailer.from_settings(settings)

    # fix vk init:
    if pyramid.threadlocal.get_current_registry().settings is None:
        pyramid.threadlocal.get_current_registry().settings = settings

    config.scan(ignore=['pyragrid.payment_systems'])
    # TODO убрать настройку jinja2 env в конфиг

    app = config.make_wsgi_app()
    jinja2_env = pyramid_jinja2.get_jinja2_environment(config)
    jac_output_dir_path = os.path.join(os.path.dirname(__file__), 'static',
                                       'dist')
    #jinja2_env.compressor_output_dir = './pyragrid/static/dist'
    jinja2_env.compressor_output_dir = jac_output_dir_path
    jinja2_env.compressor_debug = True

    # BabelCompressor.binary = './node_modules/.bin/babel'
    BabelCompressor.binary = os.path.abspath(
        os.path.join(os.path.dirname(__file__), '..', 'node_modules', '.bin',
                     'babel'))
    BabelCompressor.cwd_for_presets_search = os.path.abspath(
        os.path.join(os.path.dirname(__file__), '..'))
    BabelCompressor.presets = [
        'es2015', 'stage-0', 'react', 'es2016', 'es2017'
    ]
    # wtf? adds lines like `require("babel-runtime/core-js/object/get-prototype-of")` to the code
    # TODO https://babeljs.io/docs/plugins/transform-runtime/
    # BabelCompressor.plugins = ['transform-runtime']

    jac_default_config = JacDefaultConfig()
    jinja2_env.compressor_classes = jac_default_config.get(
        'compressor_classes')
    jinja2_env.compressor_classes['text/babel'] = BabelCompressor

    return app