Exemple #1
0
class ContantView(HTTPMethodView):
    Client = MongoClient()
    db = Client['owllook']

    async def get(self, request):
        contants = await self.get_contanct()
        return json(contants)

    async def post(self, request):
        doc = request.json
        self.db.user.save(doc)
        return json({"_id": "{}".format(doc.get('_id', -1))})

    @cached(ttl=10,
            cache=RedisCache,
            key="contanct",
            serializer=JsonSerializer(),
            port=6379,
            namespace="main")
    async def get_contanct(self):
        await asyncio.sleep(1)
        # 通过查询
        docs = self.db.user.find({}, {"_id": 0})
        contants = []
        for doc in docs:
            contants.append(doc)
        # data = await MotorBase().get_db().user.find().to_list(length=10)
        print(contants)
        return contants
Exemple #2
0
async def init_cache(app: web.Application) -> None:
    if app['config'].redis_host:
        app['cache'] = Cache(
            Cache.REDIS,
            endpoint=app['config'].redis_host,
            port=app['config'].redis_port,
            namespace='jaundice',
            serializer=JsonSerializer(),
        )
    else:
        app['cache'] = None
Exemple #3
0
async def new_search(request):
    upload_image = request.files.get("image")
    if not upload_image:
        raise NotFound(message='not image file')
    image_types = ['image/jpeg', 'image/jpg', 'image/png']
    if upload_image.type not in image_types:
        raise NotFound(message='not image file')
    upload_image_type = upload_image.type.split('/')[-1]
    file_name = str(time.time())[:10] + '.' + upload_image_type
    file_path = upload_image_path + file_name
    with open(file_path, "wb") as f:
        f.write(request.files["image"][0].body)
    search_results = image_search(file_path)[:5]
    cache = SimpleMemoryCache(serializer=JsonSerializer())
    response_dict = {
        'site_name': site_name,
        'upload_image': file_name,
        'search_results': search_results
    }
    await cache.set("response_dict", response_dict)
    return response_dict
async def cache():
    conf = load_config(PROJ_ROOT / 'config' / 'config-gunicorn.yml')

    logging.basicConfig(level=logging.DEBUG)
    app = web.Application()

    app.router.add_route('GET', "/api/v2/{item}", method)
    app.router.add_route('GET', "/api/v2/{item}/{domain}", method)
    app.router.add_route('GET', "/api/{item}", method)
    app.router.add_route('GET', "/api/{item}/{domain}", method)
    memcached_host = conf['cache']['memcached_host']
    memcached_port = conf['cache']['memcached_port']
    #cache = Cache(plugins=[HitMissRatioPlugin(), TimingPlugin()])
    lookup_type = {}
    cache = Cache(Cache.MEMCACHED,
                  endpoint=memcached_host,
                  port=memcached_port,
                  serializer=JsonSerializer(),
                  plugins=[HitMissRatioPlugin(),
                           TimingPlugin()])

    if 'statsd' in conf:
        if conf['statsd']['enable']:
            hostname = socket.gethostname().split('.', 1)[0]
            c = statsd.StatsClient(conf['statsd']['host'],
                                   conf['statsd']['port'],
                                   prefix=conf['statsd']['prefix'])
            t = MetricsTimer(conf['statsd']['interval'], cache_metrics, cache,
                             lookup_type, c, hostname)

    app['config'] = conf

    user, password, realm = conf['authentication']['user'], conf[
        'authentication']['password'], conf['authentication']['realm']
    await setup(app, AllowedHosts(conf['allowed_hosts']),
                BasicAuth(user, password, realm))
    app['cache'] = cache
    app['lookup_type'] = lookup_type
    return app
Exemple #5
0
        html = get_html_by_requests(url=url, headers=headers)
    if html:
        soup = BeautifulSoup(html, 'html5lib')
        selector = RULES[netloc].chapter_selector
        if selector.get('id', None):
            content = soup.find_all(id=selector['id'])
        elif selector.get('class', None):
            content = soup.find_all(class_=selector['class'])
        else:
            content = soup.find_all(selector.get('tag'))
        # 防止章节被display:none
        return str(content).replace('style', '') if content else None
    return None


@cached(ttl=10800, key_from_attr='search_ranking', serializer=JsonSerializer(), namespace="ranking")
async def cache_novel_search_ranking():
    motor_db = MotorBase().get_db()
    keyword_cursor = motor_db.search_records.find(
        {'count': {'$gte': 50}},
        {'keyword': 1, 'count': 1, '_id': 0}
    ).sort('count', -1).limit(35)
    result = []
    index = 1
    async for document in keyword_cursor:
        result.append({'keyword': document['keyword'], 'count': document['count'], 'index': index})
        index += 1
    return result


@cached(ttl=3600, key_from_attr='search_ranking', serializer=JsonSerializer(), namespace="ranking")
Exemple #6
0
 def test_dumps_and_loads(self):
     obj = {"hi": 1}
     serializer = JsonSerializer()
     assert serializer.loads(serializer.dumps(obj)) == obj
Exemple #7
0
        async with aiohttp.ClientSession(headers=headers) as session:
            async with session.get('%s' % uri,
                                   ssl=sslcontext,
                                   allow_redirects=False) as response:

                json = await response.json()
                if response.status in RESPONSE_OK:
                    await cache.set(uri, json, ttl=ttl)
    except aiohttp.ClientConnectorError as e:
        print("Cannot connect to %s %s" % (uri, e))
        pass

    return json


@cached(ttl=60, serializer=JsonSerializer())
async def fgetp1(request):

    json = {}
    json['subtotal'] = -1
    params = {}

    for qstring in request.rel_url.query:
        params[qstring] = request.rel_url.query[qstring]

    params['per_page'] = 1
    path = request.rel_url.path

    url = '%s://%s:%s' % (request.app['config']['foreman']['scheme'],
                          request.app['config']['foreman']['host'],
                          request.app['config']['foreman']['port'])
Exemple #8
0
import asyncio
import logging
from datetime import datetime
from aiohttp import web
from aiocache import cached
from aiocache.serializers import JsonSerializer


@cached(key="function_key", serializer=JsonSerializer())
async def time():
    return {"time": datetime.now().isoformat()}


async def handle(request):
    return web.json_response(await time())


# It is also possible to cache the whole route, but for this you will need to
# override `cached.get_from_cache` and regenerate the response since aiohttp
# forbids reusing responses
class CachedOverride(cached):
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

    async def get_from_cache(self, key):
        try:
            value = await self.cache.get(key)
            if type(value) == web.Response:
                return web.Response(
                    body=value.body,
                    status=value.status,
Exemple #9
0
 def test_loads_with_none(self):
     assert JsonSerializer().loads(None) is None
Exemple #10
0
from urllib.parse import unquote_plus

from aiocache import Cache, cached
from aiocache.serializers import JsonSerializer
from sanic.log import logger

from .models import (HoloBiliDB, NijiBiliDB, NijiTubeChannels,
                     NijiTubeLive, OtherBiliDB, OtherYTChannelsDB, OtherYTDB,
                     TwitcastingChannelsDB, TwitcastingDB, TwitchChannelsDB,
                     TwitchDB)


@cached(
    key="holobili", ttl=60, serializer=JsonSerializer(),
)
async def fetch_holobili() -> dict:
    try:
        logger.debug("Fetching (HoloLive) database...")
        data = await HoloBiliDB.find_one()
    except Exception as e:
        logger.debug(e)
        logger.debug("Failed to fetch database, returning...")
        return {"upcoming": [], "live": []}
    logger.info("Returning...")
    return {"live": data["live"], "upcoming": data["upcoming"]}


@cached(
    key="nijibili", ttl=60, serializer=JsonSerializer(),
)
async def fetch_nijibili() -> dict:
Exemple #11
0
 def test_init_fails_if_msgpack_not_installed(self):
     with mock.patch("aiocache.serializers.serializers.msgpack", None):
         with pytest.raises(RuntimeError):
             MsgPackSerializer()
         assert JsonSerializer(
         ), "Other serializers should still initialize"
Exemple #12
0
async def view_search_result(request):
    cache = SimpleMemoryCache(serializer=JsonSerializer())
    response_dict = await cache.get("response_dict")
    if not response_dict:
        response_dict = {}
    return response_dict
Exemple #13
0
 async def test_add_get_types(self, cache, obj):
     cache.serializer = JsonSerializer()
     assert await cache.add(pytest.KEY, obj) is True
     assert await cache.get(pytest.KEY) == json.loads(json.dumps(obj))
Exemple #14
0
 def test_set_types(self, obj):
     serializer = JsonSerializer()
     assert serializer.loads(serializer.dumps(obj)) == obj
Exemple #15
0
 def test_init(self):
     serializer = JsonSerializer()
     assert isinstance(serializer, BaseSerializer)
     assert serializer.DEFAULT_ENCODING == 'utf-8'
     assert serializer.encoding == 'utf-8'
Exemple #16
0
    """Cache Beacon URLs that were received from Registry's update message."""
    LOG.debug('Caching Beacons from Registry\'s update message.')

    try:
        cache = SimpleMemoryCache()
        await cache.set('beacon_urls', beacons)
        LOG.debug('Cache was set.')
    except Exception as e:
        response = 500
        LOG.error(f'Couldn\'t set cache: {e}.')

    return response


# Cache Beacon URLs if they're not already cached
@cached(ttl=86400, key="beacon_urls", serializer=JsonSerializer())
async def get_services(db_pool):
    """Return service urls."""
    LOG.debug('Fetch service urls.')

    # Take connection from the database pool
    async with db_pool.acquire() as connection:
        services = await db_get_service_urls(
            connection, service_type='GA4GHRegistry'
        )  # service urls (in this case registries) to be queried

    # Query Registries for their known Beacon services, fetch only URLs
    service_urls = await http_get_service_urls(services,
                                               service_type='GA4GHBeacon')

    # Remove duplicate Beacons
Exemple #17
0
and their associated metadata.

.. note:: See ``beacon_api`` root folder ``__init__.py`` for changing values used here.
"""

from .. import __apiVersion__, __title__, __version__, __description__, __url__, __alturl__, __handover_beacon__
from .. import __createtime__, __updatetime__, __org_id__, __org_name__, __org_description__
from .. import __org_address__, __org_logoUrl__, __org_welcomeUrl__, __org_info__, __org_contactUrl__
from .. import __sample_queries__, __handover_drs__, __docs_url__, __service_type__, __service_env__
from ..utils.data_query import fetch_dataset_metadata
from ..extensions.handover import make_handover
from aiocache import cached
from aiocache.serializers import JsonSerializer


@cached(ttl=60, key="ga4gh_info", serializer=JsonSerializer())
async def ga4gh_info(host):
    """Construct the `Beacon` app information dict in GA4GH Discovery format.

    :return beacon_info: A dict that contain information about the ``Beacon`` endpoint.
    """
    beacon_info = {
        # TO DO implement some fallback mechanism for ID
        'id': '.'.join(reversed(host.split('.'))),
        'name': __title__,
        "type": __service_type__,
        'description': __description__,
        "organization": {
            "name": __org_name__,
            "url": __org_welcomeUrl__,
        },
 def test_set_types(self, obj):
     assert JsonSerializer().dumps(obj) == json.dumps(obj)
Exemple #19
0
async def reuse_data():
    cache = Cache(serializer=JsonSerializer())  # Not ideal to define here
    data = await cache.get("my_custom_key"
                           )  # Note the key is defined in `cached` decorator
    return data
Exemple #20
0
        soup = BeautifulSoup(html, 'html5lib')
        selector = RULES[netloc].chapter_selector
        if selector.get('id', None):
            content = soup.find_all(id=selector['id'])
        elif selector.get('class', None):
            content = soup.find_all(class_=selector['class'])
        else:
            content = soup.find_all(selector.get('tag'))
        # 防止章节被display:none
        return str(content).replace('style', '') if content else None
    return None


@cached(ttl=10800,
        key_from_attr='search_ranking',
        serializer=JsonSerializer(),
        namespace="ranking")
async def cache_owllook_search_ranking():
    motor_db = MotorBase().get_db()
    keyword_cursor = motor_db.search_records.find({
        'count': {
            '$gte': 50
        }
    }, {
        'keyword': 1,
        'count': 1,
        '_id': 0
    }).sort('count', -1).limit(35)
    result = []
    index = 1
    async for document in keyword_cursor:
Exemple #21
0
def setup_cache(app: web.Application) -> None:
    cache = Cache.from_url(app['config']['cache_url'])
    cache.serializer = JsonSerializer()
    app['cache'] = cache
Exemple #22
0
 def __init__(self, serializer=None, **kwargs):
     super().__init__(**kwargs)
     self.serializer = serializer or JsonSerializer()
from sanic import Sanic
from sanic.response import json
from sanic.log import log

# 异步 IO 缓存:
from aiocache import cached
from aiocache.serializers import JsonSerializer

app = Sanic(__name__)

#
# 异步 IO 缓存:
#
aiocache.settings.set_defaults(cache="aiocache.RedisCache")


@cached(key="my_custom_key", serializer=JsonSerializer())
async def expensive_call():
    log.info("Expensive has been called")
    await asyncio.sleep(3)
    return {"test": True}


@app.route("/")
async def test(request):
    log.info("Received GET /")
    return json(await expensive_call())


app.run(host="0.0.0.0", port=8000, loop=asyncio.get_event_loop())
Exemple #24
0
from aiocache import SimpleMemoryCache
from aiocache.serializers import JsonSerializer
from aiohttp import web
from aiohttp_apispec import docs, response_schema, use_kwargs
from marshmallow import ValidationError

from {{cookiecutter.project_slug}}.entities.requests import RequestPostDummy
from {{cookiecutter.project_slug}}.entities.responses import ResponseGetDummyData, ResponseGetDummy, \
    ResponseInternalError
from {{cookiecutter.project_slug}}.services.dummy import get_dummy, post_dummy
from {{cookiecutter.project_slug}}.log_manager import log_manager

LOGGER = log_manager.getLogger(module_name=__name__)

cache = SimpleMemoryCache(serializer=JsonSerializer())


class DummyHandler(web.View):
    @docs(tags=['dummy'],
          summary='Get dummy',
          description='''Dummy resource''')
    @response_schema(ResponseGetDummyData.Schema(), 200, description="Single dummy", required=True)
    @response_schema(ResponseInternalError.Schema(), 400, description="Error description", required=True)
    async def get(self):
        dummy = await get_dummy()
        try:
            payload = dummy.make_dump()
        except ValidationError as err:
            return web.json_response({"error": err.messages}, status=400)
        return web.json_response({"data": payload})
Exemple #25
0
 async def test_multi_set_multi_get_types(self, cache, obj):
     cache.serializer = JsonSerializer()
     assert await cache.multi_set([(pytest.KEY, obj)]) is True
     assert await cache.multi_get([pytest.KEY]
                                  ) == [json.loads(json.dumps(obj))]
Exemple #26
0
 def test_dumps(self):
     assert (JsonSerializer().dumps({"hi": 1}) == '{"hi": 1}' or  # json
             JsonSerializer().dumps({"hi": 1}) == '{"hi":1}')  # ujson
Exemple #27
0
                        f'Bad Request: {e.message} caused by input: {e.instance}'
                    )
                    raise BeaconBadRequest(
                        obj, request.host,
                        f"Provided input: '{e.instance}' does not seem correct because: '{e.message}'"
                    )

            return await func(*args)

        return wrapped

    return wrapper


# This can be something that lives longer as it is unlikely to change
@cached(ttl=3600, key="jwk_key", serializer=JsonSerializer())
async def get_key():
    """Get OAuth2 public key and transform it to usable pem key."""
    existing_key = os.environ.get('PUBLIC_KEY', None)
    if existing_key is not None:
        return existing_key
    try:
        async with aiohttp.ClientSession() as session:
            async with session.get(OAUTH2_CONFIG.server) as r:
                # This can be a single key or a list of JWK
                return await r.json()
    except Exception:
        raise BeaconServerError("Could not retrieve OAuth2 public key.")


def token_scheme_check(token, scheme, obj, host):
Exemple #28
0
 def test_dumps_with_none(self):
     assert JsonSerializer().dumps(None) == 'null'
Exemple #29
0
 def test_loads_with_null(self):
     assert JsonSerializer().loads('null') is None
Exemple #30
0
class BaseManager:

    cache = SimpleMemoryCache(serializer=JsonSerializer())

    @classmethod
    async def get_by_uri(cls, object_id):
        activity = await cls.find_one({
            "activity.object.id": object_id,
            "deleted": False
        })
        return activity

    @classmethod
    async def delete(cls, obj_id):
        result = await cls.update_one(
            {
                "$or": [{
                    "activity.object.id": obj_id
                }, {
                    "activity.id": obj_id
                }],
                "deleted": False
            }, {'$set': {
                "deleted": True
            }})
        await cls.cache.clear()
        return result.modified_count

    @staticmethod
    def activity_clean(data):
        return [item["activity"] for item in data]

    @staticmethod
    async def get_ordered(request, model, filters, cleaner, coll_id):
        page = request.args.get("page")

        if page:
            total = None
            page = int(page)
        else:
            total = await model.count(filter=filters)
            page = 1

        limit = request.app.config.PAGINATION_LIMIT
        if total != 0:
            data = await model.find(filter=filters,
                                    sort="activity.published desc",
                                    skip=limit * (page - 1),
                                    limit=limit)
            data = data.objects

        else:
            data = []

        return ordered_collection(coll_id, total, page, cleaner(data))

    @classmethod
    async def timeline_paged(cls, request, uri):
        filters = {
            "deleted": False,
            "activity.type": {
                '$in': ["Create", "Announce", "Like"]
            }
        }

        if cls.__coll__ == 'inbox':
            filters.update({
                "users.0": {
                    "$ne": "cached"
                },
                "users": {
                    "$size": 1
                }
            })

        return await cls.get_ordered(request, cls, filters, cls.activity_clean,
                                     uri)