Пример #1
0
 def __init__(self,
              cache_dir,
              threshold,
              mode,
              key_prefix,
              use_signer=False,
              permanent=True):
     from cachelib import FileSystemCache
     self.cache = FileSystemCache(cache_dir, threshold=threshold, mode=mode)
     self.key_prefix = key_prefix
     self.use_signer = use_signer
     self.permanent = permanent
Пример #2
0
def deltabot_start(bot: DeltaBot) -> None:
    path = os.path.join(os.path.dirname(bot.account.db_path), __name__)
    if not os.path.exists(path):
        os.makedirs(path)
    signal.cache = FileSystemCache(path,
                                   threshold=5000,
                                   default_timeout=60 * 60 * 24 * 60)
Пример #3
0
    def __init__(
        self,
        cache_dir,
        threshold=500,
        default_timeout=300,
        mode=0o600,
        hash_method=hashlib.md5,
        ignore_errors=False,
    ):

        BaseCache.__init__(self, default_timeout=default_timeout)
        CachelibFileSystemCache.__init__(
            self,
            cache_dir=cache_dir,
            threshold=threshold,
            default_timeout=default_timeout,
            mode=mode,
            hash_method=hash_method,
        )

        self.ignore_errors = ignore_errors
Пример #4
0
    def setup_cache(self):
        """
        Setup the cache from configuration.
        If cache_path is set in the config,
        it can be set to file:///some/cache/path
        or memcached://127.0.0.1:port for memcached

        Otherwise, try to use storage.get_cache_path()
        as a cache directory. If cachelib can't be imported,
        no cache will be available.
        """
        cache = None
        cache_url = self.config.get('cache_path')
        if cache_url:
            # cache_url could be memcached://127.0....
            # or file:///some/path or /some/path
            split_url = cache_url.split('://', 1)
            if len(split_url) == 1:
                split_url = ('file://', split_url)
            proto, path = split_url
            if proto == 'memcached':
                from cachelib import MemcachedCache
                from hashlib import sha1
                cache = MemcachedCache(
                    path.split(','),
                    key_prefix=sha1(
                        self.site_id.encode('utf8')).hexdigest()[:8])
            elif proto == 'file':
                from cachelib import FileSystemCache
                cache = FileSystemCache(os.path.abspath(path))
        else:
            try:
                from cachelib import FileSystemCache
            except ImportError:
                pass
            else:
                cache = FileSystemCache(
                    os.path.abspath(self.storage.get_cache_path()))
        return cache
Пример #5
0
 def __init__(
     self,
     *users_and_organizations,
     cache_dir,  # Should be an absolute path!
     index_template=None,
     repository_template=None,
 ) -> None:
     """
     :type users_and_organizations: (github.Organization | github.User, ...)
     :type index_template: str
     :type repository_template: str
     :rtype: None
     """
     self._cache = FileSystemCache(cache_dir=cache_dir, default_timeout=900)
     self._users_and_organizations = users_and_organizations
     self._index_template = index_template or templates.INDEX
     self._repository_template = repository_template or templates.REPOSITORY
     self._auth = None
     self._request_domain = None
     self._request_urls = None
     self.current_user = None
     self.github_api = None
Пример #6
0
def gzip_cache():
    """Set up a caching system for the gzipped assets"""
    cache = FileSystemCache(cache_dir='.cache/gzip')
    cache.clear()
    return cache
Пример #7
0
def _clear_cache():
    global cache  # pylint: disable=global-statement,invalid-name
    if not cache:
        cache = FileSystemCache(CACHE_DIR, CACHE_ENTRY_MAX, 0)

    return cache.clear()
Пример #8
0
def filesystem(config, *args, **kwargs):
    args = chain([config['CACHE_DIR']], args)
    defaults = dict(gen_defaults('threshold', **config))
    defaults.update(kwargs)
    return FileSystemCache(*args, **defaults)
Пример #9
0
                        x_prefix=1)

app.secret_key = os.getenv('SECRET_KEY')

# You can add the config to the app or to the ext
app.config['AUTH0_CLIENT_ID'] = os.getenv('AUTH0_CLIENT_ID')
app.config['AUTH0_CLIENT_SECRET'] = os.getenv('AUTH0_CLIENT_SECRET')

# Initialize the extension
auth = AuthorizationCodeFlow(
    app=app,  # or use auth.init_app() later
    scope='profile',
    base_url=os.getenv('AUTH0_BASE_URL'),  # The base url of your SSO
    # All your instances need to be able to access this path,
    # or use another backend like Redis
    cache=FileSystemCache('/tmp/flask_auth0_cache'))


@app.route('/oauth2/knock')
@auth.protected(enforce=False)
def web_auth():
    if auth.is_authenticated:

        response = make_response('OK', 200)
        response.headers['X-Auth-User'] = auth.get_verified_claims(
            auth.id_token).get('name')

        return response

    h = request.headers
    return auth.login(
Пример #10
0
        self._cache[key] = value

    def get(self, key):
        return self._cache.get(key, None)

    def has(self, key):
        return key in self._cache

    def delete(self, *keys):
        count = 0
        for key in keys:
            if key in self.has(key):
                del self._cache[key]
                count += 1
        return count


# The simpleCache uses pickle to dump and loads object. Thus we cannot store a process object in the cache
# TODO: Find a better way to manage the processes
# __cache = SimpleCache(default_timeout=0)

# __cache = Cache()
# This might be slow, but the cache system is safe
__cache = FileSystemCache(default_timeout=0,
                          cache_dir=os.path.join(SERVER_ROOT,
                                                 '../.atm_server_cache'))


def get_cache():
    return __cache
Пример #11
0
 def _filesystem(self, **kwargs):
     """Returns a :class:`FileSystemCache` instance"""
     kwargs.update(dict(
         threshold=self._config('threshold', 500),
     ))
     return FileSystemCache(self._config('dir', None), **kwargs)
Пример #12
0
 def _factory(self, *args, **kwargs):
     return FileSystemCache(tmpdir, *args, **kwargs)
Пример #13
0
def runCouchPotato(options,
                   base_path,
                   args,
                   data_dir=None,
                   log_dir=None,
                   Env=None,
                   desktop=None):

    try:
        locale.setlocale(locale.LC_ALL, "")
        encoding = locale.getpreferredencoding()
    except (locale.Error, IOError):
        encoding = None

    # for OSes that are poorly configured I'll just force UTF-8
    if not encoding or encoding in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):
        encoding = 'UTF-8'

    Env.set('encoding', encoding)

    # Do db stuff
    db_path = sp(os.path.join(data_dir, 'database'))
    old_db_path = os.path.join(data_dir, 'couchpotato.db')

    # Remove database folder if both exists
    if os.path.isdir(db_path) and os.path.isfile(old_db_path):
        db = SuperThreadSafeDatabase(db_path)
        db.open()
        db.destroy()

    # Check if database exists
    db = SuperThreadSafeDatabase(db_path)
    db_exists = db.exists()
    if db_exists:

        # Backup before start and cleanup old backups
        backup_path = sp(os.path.join(data_dir, 'db_backup'))
        backup_count = 5
        existing_backups = []
        if not os.path.isdir(backup_path): os.makedirs(backup_path)

        for root, dirs, files in os.walk(backup_path):
            # Only consider files being a direct child of the backup_path
            if root == backup_path:
                for backup_file in sorted(files):
                    ints = re.findall('\d+', backup_file)

                    # Delete non zip files
                    if len(ints) != 1:
                        try:
                            os.remove(os.path.join(root, backup_file))
                        except:
                            pass
                    else:
                        existing_backups.append((int(ints[0]), backup_file))
            else:
                # Delete stray directories.
                shutil.rmtree(root)

        # Remove all but the last 5
        for eb in existing_backups[:-backup_count]:
            os.remove(os.path.join(backup_path, eb[1]))

        # Create new backup
        new_backup = sp(
            os.path.join(backup_path, '%s.tar.gz' % int(time.time())))
        zipf = tarfile.open(new_backup, 'w:gz')
        for root, dirs, files in os.walk(db_path):
            for zfilename in files:
                zipf.add(os.path.join(root, zfilename),
                         arcname='database/%s' %
                         os.path.join(root[len(db_path) + 1:], zfilename))
        zipf.close()

        # Open last
        db.open()

    else:
        db.create()

    # Force creation of cachedir
    log_dir = sp(log_dir)
    cache_dir = sp(os.path.join(data_dir, 'cache'))
    python_cache = sp(os.path.join(cache_dir, 'python'))

    if not os.path.exists(cache_dir):
        os.mkdir(cache_dir)
    if not os.path.exists(python_cache):
        os.mkdir(python_cache)

    session = requests.Session()
    session.max_redirects = 5

    # Register environment settings
    Env.set('app_dir', sp(base_path))
    Env.set('data_dir', sp(data_dir))
    Env.set('log_path', sp(os.path.join(log_dir, 'CouchPotato.log')))
    Env.set('db', db)
    Env.set('http_opener', session)
    Env.set('cache_dir', cache_dir)
    Env.set('cache', FileSystemCache(python_cache))
    Env.set('console_log', options.console_log)
    Env.set('quiet', options.quiet)
    Env.set('desktop', desktop)
    Env.set('daemonized', options.daemon)
    Env.set('args', args)
    Env.set('options', options)

    # Determine debug
    debug = options.debug or Env.setting('debug', default=False, type='bool')
    Env.set('debug', debug)

    # Development
    development = Env.setting('development', default=False, type='bool')
    Env.set('dev', development)

    # Disable logging for some modules
    for logger_name in [
            'enzyme', 'guessit', 'subliminal', 'apscheduler', 'tornado',
            'requests'
    ]:
        logging.getLogger(logger_name).setLevel(logging.ERROR)

    for logger_name in ['gntp']:
        logging.getLogger(logger_name).setLevel(logging.WARNING)

    # Disable SSL warning
    disable_warnings()

    # Use reloader
    reloader = debug is True and development and not Env.get(
        'desktop') and not options.daemon

    # Logger
    logger = logging.getLogger()
    formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s',
                                  '%m-%d %H:%M:%S')
    level = logging.DEBUG if debug else logging.INFO
    logger.setLevel(level)
    logging.addLevelName(19, 'INFO')

    # To screen
    if (debug or
            options.console_log) and not options.quiet and not options.daemon:
        hdlr = logging.StreamHandler(sys.stderr)
        hdlr.setFormatter(formatter)
        logger.addHandler(hdlr)

    # To file
    hdlr2 = handlers.RotatingFileHandler(Env.get('log_path'),
                                         'a',
                                         500000,
                                         10,
                                         encoding=Env.get('encoding'))
    hdlr2.setFormatter(formatter)
    logger.addHandler(hdlr2)

    # Start logging & enable colors
    # noinspection PyUnresolvedReferences
    import color_logs
    from couchpotato.core.logger import CPLog
    log = CPLog(__name__)
    log.debug('Started with options %s', options)

    # Check soft-chroot dir exists:
    try:
        # Load Soft-Chroot
        soft_chroot = Env.get('softchroot')
        soft_chroot_dir = Env.setting('soft_chroot',
                                      section='core',
                                      default=None,
                                      type='unicode')
        soft_chroot.initialize(soft_chroot_dir)
    except SoftChrootInitError as exc:
        log.error(exc)
        return
    except:
        log.error('Unable to check whether SOFT-CHROOT is defined')
        return

    # Check available space
    try:
        total_space, available_space = getFreeSpace(data_dir)
        if available_space < 100:
            log.error(
                'Shutting down as CP needs some space to work. You\'ll get corrupted data otherwise. Only %sMB left',
                available_space)
            return
    except:
        log.error('Failed getting diskspace: %s', traceback.format_exc())

    def customwarn(message, category, filename, lineno, file=None, line=None):
        log.warning('%s %s %s line:%s', (category, message, filename, lineno))

    warnings.showwarning = customwarn

    # Create app
    from couchpotato import WebHandler
    web_base = ('/' + Env.setting('url_base').lstrip('/') +
                '/') if Env.setting('url_base') else '/'
    Env.set('web_base', web_base)

    api_key = Env.setting('api_key')
    if not api_key:
        api_key = uuid4().hex
        Env.setting('api_key', value=api_key)

    api_base = r'%sapi/%s/' % (web_base, api_key)
    Env.set('api_base', api_base)

    # Basic config
    host = Env.setting('host', default='0.0.0.0')
    host6 = Env.setting('host6', default='::')

    config = {
        'use_reloader': reloader,
        'port': tryInt(Env.setting('port', default=5050)),
        'host': host if host and len(host) > 0 else '0.0.0.0',
        'host6': host6 if host6 and len(host6) > 0 else '::',
        'ssl_cert': Env.setting('ssl_cert', default=None),
        'ssl_key': Env.setting('ssl_key', default=None),
    }

    # Load the app
    application = Application(
        [],
        log_function=lambda x: None,
        debug=config['use_reloader'],
        gzip=True,
        cookie_secret=api_key,
        login_url='%slogin/' % web_base,
    )
    Env.set('app', application)

    # Request handlers
    application.add_handlers(
        ".*$",
        [
            (r'%snonblock/(.*)(/?)' % api_base, NonBlockHandler),

            # API handlers
            (r'%s(.*)(/?)' % api_base, ApiHandler),  # Main API handler
            (r'%sgetkey(/?)' % web_base, KeyHandler),  # Get API key
            (r'%s' % api_base, RedirectHandler, {
                "url": web_base + 'docs/'
            }),  # API docs

            # Login handlers
            (r'%slogin(/?)' % web_base, LoginHandler),
            (r'%slogout(/?)' % web_base, LogoutHandler),

            # Catch all webhandlers
            (r'%s(.*)(/?)' % web_base, WebHandler),
            (r'(.*)', WebHandler),
        ])

    # Static paths
    static_path = '%sstatic/' % web_base
    for dir_name in ['fonts', 'images', 'scripts', 'style']:
        application.add_handlers(
            ".*$",
            [('%s%s/(.*)' % (static_path, dir_name), StaticFileHandler, {
                'path':
                sp(os.path.join(base_path, 'couchpotato', 'static', dir_name))
            })])
    Env.set('static_path', static_path)

    # Load configs & plugins
    loader = Env.get('loader')
    loader.preload(root=sp(base_path))
    loader.run()

    # Fill database with needed stuff
    fireEvent('database.setup')
    if not db_exists:
        fireEvent('app.initialize', in_order=True)
    fireEvent('app.migrate')

    # Go go go!
    from tornado.ioloop import IOLoop
    from tornado.autoreload import add_reload_hook
    loop = IOLoop.current()

    # Reload hook
    def reload_hook():
        fireEvent('app.shutdown')

    add_reload_hook(reload_hook)

    # Some logging and fire load event
    try:
        log.info('Starting server on port %(port)s', config)
    except:
        pass
    fireEventAsync('app.load')

    ssl_options = None
    if config['ssl_cert'] and config['ssl_key']:
        ssl_options = {
            'certfile': config['ssl_cert'],
            'keyfile': config['ssl_key'],
        }

    server = HTTPServer(application,
                        no_keep_alive=True,
                        ssl_options=ssl_options)

    try_restart = True
    restart_tries = 5

    while try_restart:
        try:
            if config['host'].startswith('unix:'):
                server.add_socket(bind_unix_socket(config['host'][5:]))
            else:
                server.listen(config['port'], config['host'])

                if Env.setting('ipv6', default=False):
                    try:
                        server.listen(config['port'], config['host6'])
                    except:
                        log.info2('Tried to bind to IPV6 but failed')

            loop.start()
            server.close_all_connections()
            server.stop()
            loop.close(all_fds=True)
        except Exception as e:
            log.error('Failed starting: %s', traceback.format_exc())
            try:
                nr, msg = e
                if nr == 48:
                    log.info(
                        'Port (%s) needed for CouchPotato is already in use, try %s more time after few seconds',
                        (config.get('port'), restart_tries))
                    time.sleep(1)
                    restart_tries -= 1

                    if restart_tries > 0:
                        continue
                    else:
                        return
            except ValueError:
                return
            except:
                pass

            raise

        try_restart = False
Пример #14
0
class FileSystemSessionInterface(SessionInterface):
    """Uses the :class:`werkzeug.contrib.cache.FileSystemCache` as a session
    backend.

    .. versionadded:: 0.2
        The `use_signer` parameter was added.

    :param cache_dir: the directory where session files are stored.
    :param threshold: the maximum number of items the session stores before it
                      starts deleting some.
    :param mode: the file mode wanted for the session files, default 0600
    :param key_prefix: A prefix that is added to FileSystemCache store keys.
    :param use_signer: Whether to sign the session id cookie or not.
    :param permanent: Whether to use permanent session or not.
    """

    session_class = FileSystemSession

    def __init__(self,
                 cache_dir,
                 threshold,
                 mode,
                 key_prefix,
                 use_signer=False,
                 permanent=True):
        from cachelib import FileSystemCache
        self.cache = FileSystemCache(cache_dir, threshold=threshold, mode=mode)
        self.key_prefix = key_prefix
        self.use_signer = use_signer
        self.permanent = permanent

    def open_session(self, app, request):
        sid = request.cookies.get(app.session_cookie_name)
        if not sid:
            sid = self._generate_sid()
            return self.session_class(sid=sid, permanent=self.permanent)
        if self.use_signer:
            signer = self._get_signer(app)
            if signer is None:
                return None
            try:
                sid_as_bytes = signer.unsign(sid)
                sid = sid_as_bytes.decode()
            except BadSignature:
                sid = self._generate_sid()
                return self.session_class(sid=sid, permanent=self.permanent)

        data = self.cache.get(self.key_prefix + sid)
        if data is not None:
            return self.session_class(data, sid=sid)
        return self.session_class(sid=sid, permanent=self.permanent)

    def save_session(self, app, session, response):
        domain = self.get_cookie_domain(app)
        path = self.get_cookie_path(app)
        if not session:
            if session.modified:
                self.cache.delete(self.key_prefix + session.sid)
                response.delete_cookie(app.session_cookie_name,
                                       domain=domain,
                                       path=path)
            return

        httponly = self.get_cookie_httponly(app)
        secure = self.get_cookie_secure(app)
        expires = self.get_expiration_time(app, session)
        data = dict(session)
        self.cache.set(self.key_prefix + session.sid, data,
                       total_seconds(app.permanent_session_lifetime))
        if self.use_signer:
            session_id = self._get_signer(app).sign(want_bytes(session.sid))
        else:
            session_id = session.sid
        response.set_cookie(app.session_cookie_name,
                            session_id,
                            expires=expires,
                            httponly=httponly,
                            domain=domain,
                            path=path,
                            secure=secure)
Пример #15
0
#
# Copyright (C) 2019 UAVCAN Development Team <*****@*****.**>.
# Author: Pavel Kirienko <*****@*****.**>
#

from cachelib import FileSystemCache
from .. import app

_cache = FileSystemCache('/tmp/' + app.root_path)


def read(key):
    """
    Atomic cache read.
    If the entry does not exist, will return None.
    """
    return _cache.get(key)


def write(key, value, timeout=None):
    """
    Atomic cache write.
    If the timeout is not set, the entry will never expire.
    """
    _cache.set(key, value, timeout=timeout)
Пример #16
0
def _clear_cache():
    global cache
    if not cache:
        cache = FileSystemCache(CACHE_DIR, CACHE_ENTRY_MAX, 0)

    return cache.clear()
Пример #17
0
app.config['CACHE_FOLDER'] = os.environ.get('CACHE_FOLDER', '__cache__')

# Integrate with gunicorn logging if present:
if "gunicorn" in os.environ.get("SERVER_SOFTWARE", ""):
    gunicorn_logger = logging.getLogger('gunicorn.error')
    app.logger.handlers = gunicorn_logger.handlers
    app.logger.setLevel(gunicorn_logger.level)

# Integrate root and module logging with Flask:
root = logging.getLogger()
root.addHandler(default_handler)
root.setLevel(app.logger.level)

# Set up a persistent cache for screenshots etc.
screenshot_cache = FileSystemCache(os.path.join(app.config['CACHE_FOLDER'],
                                                'screenshot_cache'),
                                   threshold=0,
                                   default_timeout=0)

# Get the location of the web rendering server:
WEBRENDER_ARCHIVE_SERVER = os.environ.get("WEBRENDER_ARCHIVE_SERVER",
                                          "http://webrender:8010/render")

# Get the location of the CDX server:
CDX_SERVER = os.environ.get("CDX_SERVER", "http://cdx:9090/tc")

# Get the location of the IIIF server:
IIIF_SERVER = os.environ.get("IIIF_SERVER", "http://iiif:8182")

# Example URL to use
EXAMPLE_URL = "http://www.bl.uk/"
Пример #18
0
def filesystem(app, config, args, kwargs):
    args.insert(0, config['CACHE_DIR'])
    kwargs.update(dict(threshold=config['CACHE_THRESHOLD']))
    return FileSystemCache(*args, **kwargs)
Пример #19
0
#! /usr/bin/python3

import argparse
import random
import re
import appdirs
import requests
from bs4 import BeautifulSoup
from cachelib import FileSystemCache

URL = 'http://tratu.soha.vn/dict/en_vn/{}'

CACHE_DIR = appdirs.user_cache_dir('envi')
CACHE_MAX_SIZE = 1024

cache = FileSystemCache(CACHE_DIR, CACHE_MAX_SIZE, default_timeout=0)


def _random_user_agent():
    user_agents = (
        "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
        "(KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36",
        "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 "
        "(KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36",
        "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/537.36 "
        "(KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36",
        "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:64.0) "
        "Gecko/20100101 Firefox/64.0",
        "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/605.1.15 "
        "(KHTML, like Gecko) Version/12.0.2 Safari/605.1.15",
        "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:65.0) "
Пример #20
0
def _clear_cache():
    global cache
    if not cache:
        cache = FileSystemCache(CACHE_DIR, CACHE_ENTRY_MAX, 0)

    return cache.clear()
Пример #21
0
import requests
from cachelib import FileSystemCache

from pythoncz import app

__all__ = ('get_issues', )

get_issues_graphql_filename = (os.path.join(os.path.dirname(__file__),
                                            'github_get_issues.graphql'))
with open(get_issues_graphql_filename) as f:
    GET_ISSUES_GRAPHQL = f.read()

SIX_HOURS_AS_SECONDS = 21600

cache = FileSystemCache(app.config['CACHE_DIR'],
                        default_timeout=SIX_HOURS_AS_SECONDS)


def get_issues(org_names):
    issues = cache.get('github-issues')
    if issues is None:
        session = _create_api_session()
        issues = itertools.chain(*(_get_issues_for_org(session, org_name)
                                   for org_name in org_names))
        issues = _sort_issues(issues)
        cache.set('github-issues', issues)
    return issues


def _create_api_session():
    user_agent = ('pythoncz/{now.year}-{now.month} '
Пример #22
0
import requests
from dotenv import load_dotenv

from flask import Flask, Response, abort, request
from prometheus_client import (
    REGISTRY,
    Counter,
    Gauge,
    Histogram,
    Summary,
    generate_latest,
)
from cachelib import FileSystemCache

APP = Flask(__name__)  # Standard Flask app
CACHE = FileSystemCache(cache_dir="cache/")

FLASK_REQUEST_LATENCY = Histogram("flask_request_latency_seconds",
                                  "Flask Request Latency",
                                  ["method", "endpoint"])

FLASK_REQUEST_COUNT = Counter("flask_request_count", "Flask Request Count",
                              ["method", "endpoint", "http_status"])

FLASK_REQUEST_SIZE = Gauge(
    "flask_request_size_bytes",
    "Flask Response Size",
    ["method", "endpoint", "http_status"],
)

UPDATE_TIME = Summary("update_seconds", "Time spent loading data upstream")
Пример #23
0
    'saveTiming',       # Time to first byte for page edits
)

PERIODS = {
    'hour':  60 * 60,
    'day':   60 * 60 * 24,
    'week':  60 * 60 * 24 * 7,
    'month': 60 * 60 * 24 * 30,
    'year':  int(60 * 60 * 24 * 365.25),
}

CACHE_DIR = '/var/cache/coal_web'

app = flask.Flask(__name__)
if os.access(CACHE_DIR, os.W_OK):
    cache = FileSystemCache(CACHE_DIR)
else:
    cache = NullCache()


@app.after_request
def add_header(response):
    """Add CORS and Cache-Control headers to the response."""
    if not response.cache_control:
        response.cache_control.max_age = 30
    response.headers['Access-Control-Allow-Origin'] = '*'
    response.headers['Access-Control-Allow-Methods'] = 'GET'
    return response


def chunks(items, chunk_size):
Пример #24
0
def create_cache_list(request, tmpdir):
    mc = MemcachedCache()
    mc._client.flush_all()
    rc = RedisCache(port=6360)
    rc._client.flushdb()
    request.cls.cache_list = [FileSystemCache(tmpdir), mc, rc, SimpleCache()]
Пример #25
0
}


'''
set up cache
'''

# Set up cache 
CACHE_EMPTY_VAL = "NULL"
CACHE_DIR = appdirs.use_cache_dir('howdoi')
CACHE_ENTRY_MAX = 128

if os.getenv('HOWDOI_DISABLE_CACHE'):
    cache = NullCache()
else:
    cache = FileSystemCache(CACHE_DIR, CACHE_ENTRY_MAX, default_timeout=0)


# start a session
howdoi_session = requests.session()


'''
get data
'''
# result proxies 
def get_result(url):
    try:
        # here we can customize proxies and certificates 
        return howdoi_session.get(url).text 
    
Пример #26
0
class WsgiApplication(github.GitHub):  # pylint:disable=too-many-instance-attributes
    _URLS = routing.Map((
        routing.Rule('/', endpoint='index'),
        routing.Rule('/<repository_name>/', endpoint='repository'),
        routing.Rule('/<repository_name>/release/<release_name>.tar.gz',
                     endpoint='download'),
    ))

    _cache = None  # type: FileSystemCache
    _users_and_organizations = None  # type: (github.Organization | github.User, ...)
    _index_template = None  # type: str
    _repository_template = None  # type: str
    _auth = None  # type: dict[str]
    _request_domain = None  # type: str
    _request_urls = None  # type: routing.MapAdapter

    @property
    def repositories(self):
        """
        :rtype: dict[str, repos.ShortRepository]
        """
        value = self._cache.get(CACHE_KEY)

        if value is None:
            data = self.get_installable_repositories(
                *self._users_and_organizations)
            value = {key: repo._json_data for key, repo in data.items()}  # pylint:disable=protected-access
            self._cache.set(CACHE_KEY, value)
            return data

        return {
            key: repos.ShortRepository(json, self.github_api.session)
            for key, json in value.items()
        }

    # noinspection PyTypeChecker
    def __init__(
        self,
        *users_and_organizations,
        cache_dir,  # Should be an absolute path!
        index_template=None,
        repository_template=None,
    ) -> None:
        """
        :type users_and_organizations: (github.Organization | github.User, ...)
        :type index_template: str
        :type repository_template: str
        :rtype: None
        """
        self._cache = FileSystemCache(cache_dir=cache_dir, default_timeout=900)
        self._users_and_organizations = users_and_organizations
        self._index_template = index_template or templates.INDEX
        self._repository_template = repository_template or templates.REPOSITORY
        self._auth = None
        self._request_domain = None
        self._request_urls = None
        self.current_user = None
        self.github_api = None

    def __call__(self, environ, start_response):
        """
        :type environ: dict
        :type start_response: werkzeug.serving.WSGIRequestHandler.start_response
        :rtype: werkzeug.wsgi.ClosingIterator
        """
        request = wrappers.Request(environ)

        if b'purge_cache' in request.query_string:
            self._cache.delete(CACHE_KEY)

        try:
            self._get_authorization(request)
            self._authorize_github()
            self._request_domain = self._get_request_domain(request)

            self._request_urls = self._URLS.bind_to_environ(request.environ)
            endpoint, values = self._request_urls.match()

            func_name = 'dispatch_{}'.format(endpoint)
            response = (getattr(self, func_name)(request, **values) if hasattr(
                self, func_name) else exceptions.NotFound())
        except exceptions.HTTPException as ex:
            response = ex

        return response(environ, start_response)

    def _get_authorization(self, request):
        """
        :type request: werkzeug.wrappers.request.Request
        :rtype: dict[str]
        :raises werkzeug.exceptions.Unauthorized: If authorization information not available in `request`
        """
        auth = request.authorization or {}
        username = auth.get('username', '').strip()
        password = auth.get('password', '').strip()
        if not username:
            raise exceptions.Unauthorized(
                www_authenticate='Basic realm="Simple index"')

        self._auth = {
            'username': username,
            'password': password
        } if username and password else {
            'token': username
        }

    def _authorize_github(self):
        """
        :rtype: None
        :raises werkzeug.exceptions.Forbidden: If cannot login to GitHub using `auth` credentials
        """
        self.github_api = GitHubApi(**self._auth)
        try:
            self.current_user = self.github_api.me()
        except gh_exc.AuthenticationFailed:
            raise exceptions.Forbidden()
        if self.current_user is None:
            raise exceptions.Forbidden()

    def _get_request_domain(self, request):
        """
        :type request: werkzeug.wrappers.request.Request
        :rtype: str
        """
        base_url = parse.urlsplit(request.base_url)  # type: parse.SplitResult
        netloc = ('{scheme}://{token}@{domain}' if 'token' in self._auth else
                  '{scheme}://{username}:{password}@{domain}')

        format_dict = {'domain': base_url.netloc.split('@')[-1].rstrip('/')}
        format_dict.update(self._auth)
        format_dict.update(base_url._asdict())

        return netloc.format(**format_dict)

    # Endpoints

    def dispatch_download(self, request, repository_name, release_name):  # pylint:disable=unused-argument
        repo_releases = self._get_releases(repository_name)
        if not repo_releases:
            raise exceptions.NotFound()

        wanted_release = repo_releases[release_name]  # type: release.Release
        asset_url = self._find_release_download_link(
            wanted_release)  # type: str

        with requests.get(asset_url,
                          headers={'Accept': 'application/octet-stream'},
                          stream=True) as tarball:
            if tarball.status_code != HTTPStatus.OK:
                raise exceptions.NotFound()

            stream = ResponseStream(tarball.iter_content(1024))
            return wrappers.Response(stream.read(),
                                     direct_passthrough=True,
                                     mimetype='application/x-compressed')

    def dispatch_index(self, request):  # pylint:disable=unused-argument
        """
        :type request: werkzeug.wrappers.request.Request
        :rtype: werkzeug.wrappers.response.Response
        """
        return wrappers.Response(
            self._index_template % {
                'links':
                '\n'.join(
                    '<a href="%(domain)s%(endpoint)s">%(name)s</a>' % {
                        'domain':
                        self._request_domain,
                        'endpoint':
                        self._request_urls.build(
                            'repository', {'repository_name': repo_name}),
                        'name':
                        repo.full_name,
                    } for repo_name, repo in self.repositories.items()),
            },
            mimetype='text/html',
        )

    def dispatch_repository(self, request, repository_name):  # pylint:disable=unused-argument
        """
        :type request: werkzeug.wrappers.request.Request
        :type repository_name: str
        :rtype: werkzeug.wrappers.response.Response
        :raises werkzeug.exceptions.NotFound: If `repository_name` not found on GitHub
        """
        releases = self._get_repo_releases_links(repository_name)
        return wrappers.Response(
            self._repository_template % {
                'repository_name':
                repository_name,
                'links':
                '\n'.join('<a href="%(url)s">%(tag_name)s</a>' % item
                          for item in releases),
            },
            mimetype='text/html',
        )

    # Helpers

    def _get_repo_releases_links(self, repository_name):
        """
        :type repository_name: str
        :rtype: (dict[str, str], ...) | None
        """
        repo_releases = self._get_releases(repository_name)
        if not repo_releases:
            return None

        return tuple({
            'url': '%(domain)s%(endpoint)s' % {
                'domain':
                self._request_domain,
                'endpoint':
                self._request_urls.build('download', {
                    'repository_name': repository_name,
                    'release_name': tag_name
                }),
            },
            'tag_name': tag_name
        } for tag_name in repo_releases.keys())

    def _get_releases(self, repository_name):
        """
        :type repository_name: str
        :rtype: dict[str, release.Release] | None
        """
        repositories_with_aliases = {}
        for key, value in self.repositories.items():
            repositories_with_aliases[key] = value
            repositories_with_aliases[key.replace('_', '-')] = value

        try:
            repository = repositories_with_aliases[
                repository_name]  # type: repos.ShortRepository
        except KeyError:
            raise exceptions.NotFound()

        repo_releases = collections.OrderedDict(
            (item.tag_name, item)
            for item in sorted(repository.releases(),
                               key=lambda item: item.created_at))
        return repo_releases or None

    def _find_release_download_link(self, release_instance):
        """
        :type: release.Release
        :rtype: str
        :raises werkzeug.exceptions.NotFound:
        """
        asset_url = None
        for asset in release_instance.assets():  # type: release_instance.Asset
            if asset.name == ASSET_FILENAME:
                asset_url = '{download_url}?access_token={access_token}'.format(
                    download_url=asset.download_url,
                    access_token=self._auth.get('token',
                                                self._auth.get('password')),
                )
                break

        if not asset_url:
            raise exceptions.NotFound(
                'Asset "{}" not found in release "{}"'.format(
                    ASSET_FILENAME, release_instance.name))
        return asset_url