Пример #1
0
class JoinWinningBids(Computation):
    """ Filters out losing bids from a stream of bids and impressions.
    Omit records that don't have a match within a 60s interval."""
    def __init__(self, maxsize, ttl, prune_time=5):
        self.prune_time = prune_time
        self.cache = TTLCache(maxsize, ttl)

    def init(self, ctx):
        self.concord_logger.info("Operator initialized")
        if self.prune_time > 0:
            ctx.set_timer('loop', time_time() * 1000)

    def process_timer(self, ctx, key, time):
        """ Prune the cache of expired items every 'prune_time' seconds.
        Otherwise this would only happen when mutating the cache"""
        self.cache.expire()
        ctx.set_timer('cleanup_loop', (time.time() + self.prune_time) * 1000)

    def process_record(self, ctx, record):
        """ With GROUP_BY routing strategy, it is guaranteed that the same
        key will be sent to the same operator, regardless of scaling"""
        if record.stream == 'bids':
            self.cache[record.key] = record.data
        elif record.stream == 'imps':
            bid = self.cache[record.key]
            if bid is not None:
                ctx.process_record('winningbids', record.key, '-')

    def metadata(self):
        return Metadata(
            name='filter-winning-bids',
            istreams=[('bids', StreamGrouping.GROUP_BY),
                      ('imps', StreamGrouping.GROUP_BY)],
            ostreams=['winningbids'])
Пример #2
0
class NetCDFData(Data):

    def __init__(self, url):
        self._dataset = None
        self.__timestamp_cache = TTLCache(1, 3600)
        super(NetCDFData, self).__init__(url)

    def __enter__(self):
        self._dataset = Dataset(self.url, 'r')

        return self

    def __exit__(self, exc_type, exc_value, traceback):
        self._dataset.close()

    @property
    def timestamps(self):
        if self.__timestamp_cache.get("timestamps") is None:
            var = None
            for v in ['time', 'time_counter']:
                if v in self._dataset.variables:
                    var = self._dataset.variables[v]
                    break

            t = netcdftime.utime(var.units)
            timestamps = np.array(
                map(
                    lambda ts: t.num2date(ts).replace(tzinfo=pytz.UTC),
                    var[:]
                )
            )
            timestamps.flags.writeable = False
            self.__timestamp_cache["timestamps"] = timestamps

        return self.__timestamp_cache.get("timestamps")
Пример #3
0
 def test_atomic(self):
     cache = TTLCache(maxsize=1, ttl=1, timer=Timer(auto=True))
     cache[1] = 1
     self.assertEqual(1, cache[1])
     cache[1] = 1
     self.assertEqual(1, cache.get(1))
     cache[1] = 1
     self.assertEqual(1, cache.pop(1))
     cache[1] = 1
     self.assertEqual(1, cache.setdefault(1))
Пример #4
0
    def test_ttl(self):
        cache = TTLCache(maxsize=2, ttl=1, timer=Timer())
        self.assertEqual(0, cache.timer())
        self.assertEqual(1, cache.ttl)

        cache[1] = 1
        self.assertEqual({1}, set(cache))
        self.assertEqual(1, len(cache))
        self.assertEqual(1, cache[1])

        cache.timer.tick()
        self.assertEqual({1}, set(cache))
        self.assertEqual(1, len(cache))
        self.assertEqual(1, cache[1])

        cache[2] = 2
        self.assertEqual({1, 2}, set(cache))
        self.assertEqual(2, len(cache))
        self.assertEqual(1, cache[1])
        self.assertEqual(2, cache[2])

        cache.timer.tick()
        self.assertEqual({2}, set(cache))
        self.assertEqual(1, len(cache))
        self.assertNotIn(1, cache)
        self.assertEqual(2, cache[2])

        cache[3] = 3
        self.assertEqual({2, 3}, set(cache))
        self.assertEqual(2, len(cache))
        self.assertNotIn(1, cache)
        self.assertEqual(2, cache[2])
        self.assertEqual(3, cache[3])

        cache.timer.tick()
        self.assertEqual({3}, set(cache))
        self.assertEqual(1, len(cache))
        self.assertNotIn(1, cache)
        self.assertNotIn(2, cache)
        self.assertEqual(3, cache[3])

        cache.timer.tick()
        self.assertEqual(set(), set(cache))
        self.assertEqual(0, len(cache))
        self.assertNotIn(1, cache)
        self.assertNotIn(2, cache)
        self.assertNotIn(3, cache)

        with self.assertRaises(KeyError):
            del cache[1]
        with self.assertRaises(KeyError):
            cache.pop(2)
        with self.assertRaises(KeyError):
            del cache[3]
Пример #5
0
class Middleware(object):
    """ Falcon rate limiting middleware """

    def __init__(self):

        self.count = goldman.config.RATE_LIMIT_COUNT
        self.duration = goldman.config.RATE_LIMIT_DURATION

        self.cache = TTLCache(maxsize=self.count, ttl=self.duration)

    @property
    def _error_headers(self):
        """ Return a dict of headers in every auth failure """

        return {
            'Retry-After': self.duration,
            'X-RateLimit-Limit': self.count,
            'X-RateLimit-Remaining': 0,
        }

    # pylint: disable=unused-argument
    def process_request(self, req, resp):
        """ Process the request before routing it. """

        key = req.env['REMOTE_PORT'] + req.env['REMOTE_ADDR']
        val = self.cache.get(key, 0)

        if val == self.count:
            abort(exceptions.TooManyRequests(headers=self._error_headers))
        else:
            self.cache[key] = val + 1
Пример #6
0
    def __init__(self, app, read_preference=None, cache_size=None, cache_ttl=None,
                 blocking_enabled=True, logging_enabled=False):
        """
        Initialize IPBlock and set up a before_request handler in the
        app.

        You can override the default MongoDB read preference via the
        optional read_preference kwarg.

        You can limit the impact of the IP checks on your MongoDB by
        maintaining a local in-memory LRU cache. To do so, specify its
        cache_size (i.e. max number of IP addresses it can store) and
        cache_ttl (i.e. how many seconds each result should be cached
        for).

        To run in dry-run mode without blocking requests, set
        blocking_enabled to False. Set logging_enabled to True
        to log IPs that match blocking rules -- if enabled, will
        log even if blocking_enabled is False.
        """
        self.read_preference = read_preference
        self.blocking_enabled = blocking_enabled
        self.logger = None
        if logging_enabled:
            self.logger = app.logger
            self.block_msg = "blocking" if blocking_enabled else "blocking disabled"

        if cache_size and cache_ttl:
            # inline import because cachetools dependency is optional.
            from cachetools import TTLCache
            self.cache = TTLCache(cache_size, cache_ttl)
        else:
            self.cache = None

        app.before_request(self.block_before)
Пример #7
0
 def __init__(self, config):
     self._cache = TTLCache(maxsize=2048, ttl=3600)
     if 'user' in config and 'password' in config:
         self.auth = (config['user'], config['password'])
     else:
         self.auth = None
     self.host = config['host']
     self.port = config['port']
     self.chunk_size = 750
     self._api = Server(
         url='http://{host}:{port}/jsonrpc'.format(**config),
         auth=self.auth)
Пример #8
0
class LinkIService(IService):
    service_class = LinkService

    def __init__(self, **kwargs):
        super().__init__(**kwargs)
        self.__services_by_domain = TTLCache(maxsize=100, ttl=5)

    async def get_service_for(self, target_domain, ignore_services):
        services = self.__services_by_domain.get(target_domain)

        if services:
            services.rotate(-1)
            return services[0]
        else:
            services = []

        event = asyncio.Event(loop=self.loop)

        queries = {
            service: asyncio.ensure_future(
                service.query(target_domain),
                loop=self.loop,
            )
            for service in self.services
            if not service in ignore_services
        }

        def query_done(service, task):
            queries.pop(service)

            if not task.cancelled() and not task.exception():
                services.append(service)

        for service, task in queries.items():
            task.add_done_callback(partial(query_done, service))

        while queries and not services:
            await asyncio.wait(
                queries.values(),
                return_when=asyncio.FIRST_COMPLETED,
                loop=self.loop,
            )

        for task in queries.values():
            task.cancel()

        if services:
            self.__services_by_domain[target_domain] = deque(services)
            return services[0]
Пример #9
0
import re
import timeit
import scraper_regions
import urllib.request
from bs4 import BeautifulSoup
from operator import itemgetter
from cachetools import cached, TTLCache

player_cache = TTLCache(maxsize=500, ttl=80000)


#########################################################################
@cached(player_cache)
def nuliga_get_players(region_id, club_id, team_id, print_players_found=False):

    region_url = scraper_regions.nuliga_get_region_url(region_id)
    if region_id is None:
        return []

    url = region_url + '/liga/vereine/verein/mannschaften/mannschaft/m/' + str(
        team_id) + '.html'

    page = urllib.request.urlopen(url)
    soup = BeautifulSoup(page.read(), features='html.parser')

    players_found = []
    table = soup.find('table',
                      attrs={'class': 'table table-striped table-condensed'})
    table_body = table.find('tbody')

    rows = table_body.find_all('tr')
Пример #10
0
    def test_ttl_expire(self):
        cache = TTLCache(maxsize=3, ttl=2, timer=Timer())
        with cache.timer as time:
            self.assertEqual(time, cache.timer())
        self.assertEqual(2, cache.ttl)

        cache[1] = 1
        cache.timer.tick()
        cache[2] = 2
        cache.timer.tick()
        cache[3] = 3
        self.assertEqual(2, cache.timer())

        self.assertEqual({1, 2, 3}, set(cache))
        self.assertEqual(3, len(cache))
        self.assertEqual(1, cache[1])
        self.assertEqual(2, cache[2])
        self.assertEqual(3, cache[3])

        cache.expire()
        self.assertEqual({1, 2, 3}, set(cache))
        self.assertEqual(3, len(cache))
        self.assertEqual(1, cache[1])
        self.assertEqual(2, cache[2])
        self.assertEqual(3, cache[3])

        cache.expire(3)
        self.assertEqual({2, 3}, set(cache))
        self.assertEqual(2, len(cache))
        self.assertNotIn(1, cache)
        self.assertEqual(2, cache[2])
        self.assertEqual(3, cache[3])

        cache.expire(4)
        self.assertEqual({3}, set(cache))
        self.assertEqual(1, len(cache))
        self.assertNotIn(1, cache)
        self.assertNotIn(2, cache)
        self.assertEqual(3, cache[3])

        cache.expire(5)
        self.assertEqual(set(), set(cache))
        self.assertEqual(0, len(cache))
        self.assertNotIn(1, cache)
        self.assertNotIn(2, cache)
        self.assertNotIn(3, cache)
Пример #11
0
# get app info
@app.route('/api/' + API_VERSION + '/apps/<app_name>', methods=["GET"])
@retry(stop_max_attempt_number=3, wait_exponential_multiplier=200, wait_exponential_max=500)
@multi_auth.login_required
@check_authorization_wrapper(permission_needed="ro", permission_object_type="apps")
def get_app(app_name):
    app_exists, app_json = mongo_connection.mongo_get_app(app_name)
    if app_exists is True:
        return dumps(app_json), 200
    elif app_exists is False:
        return jsonify({"app_exists": False}), 403


# get device_group info
@app.route('/api/' + API_VERSION + '/device_groups/<device_group>/info', methods=["GET"])
@cached(cache=TTLCache(maxsize=cache_max_size, ttl=cache_time))
@retry(stop_max_attempt_number=3, wait_exponential_multiplier=200, wait_exponential_max=500)
@multi_auth.login_required
@check_authorization_wrapper(permission_needed="ro", permission_object_type="device_groups")
def get_device_group_info(device_group):
    device_group_exists, device_group_json = mongo_connection.mongo_get_device_group(device_group)
    if device_group_exists is False:
        return jsonify({"device_group_exists": False}), 403
    device_group_config = {"apps": [], "apps_list": [], "prune_id": device_group_json["prune_id"], "cron_jobs": [],
                           "cron_jobs_list": [], "device_group_id": device_group_json["device_group_id"]}
    for device_app in device_group_json["apps"]:
        app_exists, app_json = mongo_connection.mongo_get_app(device_app)
        if app_exists is True:
            device_group_config["apps"].append(app_json)
            device_group_config["apps_list"].append(app_json["app_name"])
    for device_cron_job in device_group_json["cron_jobs"]:
Пример #12
0
import geojson
from cachetools import TTLCache, cached
from shapely.geometry import Polygon, box
from server.models.dtos.project_dto import ProjectSearchDTO, ProjectSearchResultsDTO, ListSearchResultDTO, \
    Pagination, ProjectSearchBBoxDTO
from server.models.postgis.project import Project, ProjectInfo
from server.models.postgis.statuses import ProjectStatus, MappingLevel, MappingTypes, ProjectPriority
from server.models.postgis.utils import NotFound, ST_Intersects, ST_MakeEnvelope, ST_Transform, ST_Area
from server.services.users.user_service import UserService
from server import db
from flask import current_app
from geoalchemy2 import shape
import math

search_cache = TTLCache(maxsize=128, ttl=300)

# max area allowed for passed in bbox, calculation shown to help future maintenace
# client resolution (mpp)* arbitrary large map size on a large screen in pixels * 50% buffer, all squared
MAX_AREA = math.pow(1250 * 4275 * 1.5, 2)


class ProjectSearchServiceError(Exception):
    """ Custom Exception to notify callers an error occurred when handling mapping """
    def __init__(self, message):
        if current_app:
            current_app.logger.error(message)


class BBoxTooBigError(Exception):
    """ Custom Exception to notify callers an error occurred when handling mapping """
    def __init__(self, message):
from backend.models.dtos.message_dto import MessageDTO, MessagesDTO
from backend.models.dtos.stats_dto import Pagination
from backend.models.postgis.message import Message, MessageType, NotFound
from backend.models.postgis.notification import Notification
from backend.models.postgis.project import Project
from backend.models.postgis.task import TaskStatus, TaskAction, TaskHistory
from backend.models.postgis.statuses import TeamRoles
from backend.services.messaging.smtp_service import SMTPService
from backend.services.messaging.template_service import (
    get_txt_template,
    template_var_replacing,
    clean_html,
)
from backend.services.users.user_service import UserService, User

message_cache = TTLCache(maxsize=512, ttl=30)


class MessageServiceError(Exception):
    """ Custom Exception to notify callers an error occurred when handling mapping """
    def __init__(self, message):
        if current_app:
            current_app.logger.debug(message)


class MessageService:
    @staticmethod
    def send_welcome_message(user: User):
        """Sends welcome message to new user at Sign up"""
        org_code = current_app.config["ORG_CODE"]
        text_template = get_txt_template("welcome_message_en.txt")
Пример #14
0
                    msg = 'Container id {0} mapped to {1} by FQDN match'
                    log.debug(msg.format(_id, ip))
                    CONTAINER_MAPPING[ip] = _id
                    return c
        # Try to find the container over the mesos state api and use the labels attached to it
        # as a replacement for docker env and labels
        if app.config['MESOS_STATE_LOOKUP']:
            mesos_container = find_mesos_container(ip)
            if mesos_container is not None:
                return mesos_container

    log.error('No container found for ip {0}'.format(ip))
    return None


@cached(cache=TTLCache(maxsize=512, ttl=60))
@log_exec_time
def find_mesos_container(ip):
    mesos_state_url = app.config['MESOS_STATE_URL']
    try:
        state = requests.get(mesos_state_url,
                             timeout=app.config['MESOS_STATE_TIMEOUT']).json()
        for framework in state['frameworks']:
            for executor in framework['executors']:
                for task in executor['tasks']:
                    for status in task['statuses']:
                        if status['state'] == 'TASK_RUNNING':
                            for network in status['container_status'][
                                    'network_infos']:
                                for ip_map in network['ip_addresses']:
                                    if ip_map['ip_address'] == ip:
Пример #15
0
        df_to_json(summary),
        'tickerScoreRel':
        df_to_json(score.ticker_score_df_rel.loc[:, top_tickers]),
        'tickerScoreSum':
        df_to_json(score.ticker_score_df_rel.loc[:, top_tickers].cumsum()),
        'posts':
        score.posts_by_id
    })


@app.route("/scores")
def calculate():
    return calculate_cached()


@cached(cache=TTLCache(maxsize=1024, ttl=20 * 60 * 60))
def yfinance_data_cached(ticker):
    return yfinance_data(ticker)


@app.route("/yahoo/<ticker>")
def yahoo(ticker):
    history_month, history_week, info, summary = yfinance_data_cached(ticker)
    return jsonify({
        "historyMonth": df_to_json(history_month),
        "historyWeek": df_to_json(history_week),
        "info": info,
        "summary": summary
    })

Пример #16
0
    response.data = compression.gzip_compress(response.data)

    response.headers["Content-Encoding"] = "gzip"
    response.headers["Vary"] = "Accept-Encoding"
    response.headers["Content-Length"] = len(response.data)

    return response


def check_aligned_volume(aligned_volume):
    aligned_volumes = get_aligned_volumes()
    if aligned_volume not in aligned_volumes:
        abort(400, f"aligned volume: {aligned_volume} not valid")


@cached(cache=TTLCache(maxsize=64, ttl=600))
def get_relevant_datastack_info(datastack_name):
    ds_info = get_datastack_info(datastack_name=datastack_name)
    seg_source = ds_info["segmentation_source"]
    pcg_table_name = seg_source.split("/")[-1]
    aligned_volume_name = ds_info["aligned_volume"]["name"]
    return aligned_volume_name, pcg_table_name


@cached(cache=LRUCache(maxsize=64))
def get_analysis_version_and_table(datastack_name: str, table_name: str,
                                   version: int, Session):
    """query database for the analysis version and table name

    Args:
        datastack_name (str): datastack name
Пример #17
0
from tg_bot import (
    DEL_CMDS,
    DEV_USERS,
    SUDO_USERS,
    SUPPORT_USERS,
    SARDEGNA_USERS,
    WHITELIST_USERS,
    dispatcher,
)
from cachetools import TTLCache
from telegram import Chat, ChatMember, ParseMode, Update
from telegram.ext import CallbackContext

# stores admemes in memory for 10 min.
ADMIN_CACHE = TTLCache(maxsize=512, ttl=60 * 10)


def is_whitelist_plus(chat: Chat,
                      user_id: int,
                      member: ChatMember = None) -> bool:
    return any(user_id in user for user in [
        WHITELIST_USERS,
        SARDEGNA_USERS,
        SUPPORT_USERS,
        SUDO_USERS,
        DEV_USERS,
    ])


def is_support_plus(chat: Chat,
Пример #18
0
    return get('{}_{}'.format(key, get_language().upper())) or get(key)


def create_secret_key_file(filename):
    key = get_random_secret_key()
    with open(filename, 'w') as f:
        f.write('''"""
Automatically generated SECRET_KEY for django.
This needs to be unique and SECRET. It is also installation specific.
You can change it here or in local_settings.py
"""
SECRET_KEY = '%s'
''' % (key))


@cached(TTLCache(100, ttl=30))
def get_url_ip_address_list(url):
    """
    This function takes a full URL as a parameter and returns the IP addresses
    of the host as a string.

    It will cache results for 30 seconds, so repeated calls return fast
    """
    hostname = urlsplit(url).hostname
    assert hostname, "Invalid url: no hostname found"
    ips = (a[4][0] for a in socket.getaddrinfo(
        hostname, None, 0, socket.SOCK_STREAM, socket.IPPROTO_TCP))
    return tuple(set(ips))


def get_font_color_for_background(background_color):
Пример #19
0
from cachetools import TTLCache
from requests.models import Request, Response
from localstack import config, constants
from localstack.utils.aws import aws_stack, aws_responses
from localstack.utils.common import to_bytes, to_str, clone, select_attributes, short_uid, json_safe
from localstack.utils.analytics import event_publisher
from localstack.utils.bootstrap import is_api_enabled
from localstack.services.awslambda import lambda_api
from localstack.services.generic_proxy import ProxyListener, RegionBackend
from localstack.services.dynamodbstreams import dynamodbstreams_api

# set up logger
LOGGER = logging.getLogger(__name__)

# cache schema definitions
SCHEMA_CACHE = TTLCache(maxsize=50, ttl=20)

# action header prefix
ACTION_PREFIX = 'DynamoDB_20120810.'

# list of actions subject to throughput limitations
READ_THROTTLED_ACTIONS = [
    'GetItem', 'Query', 'Scan', 'TransactGetItems', 'BatchGetItem'
]
WRITE_THROTTLED_ACTIONS = [
    'PutItem',
    'BatchWriteItem',
    'UpdateItem',
    'DeleteItem',
    'TransactWriteItems',
]
Пример #20
0
class S3Backend(BaseBackend):
    """S3 Backend Adapter"""

    client: Any = attr.ib(default=None)
    bucket: str = attr.ib(init=False)
    key: str = attr.ib(init=False)

    _backend_name = "AWS S3"

    def __attrs_post_init__(self):
        """Post Init: parse path and create client."""
        assert boto3_session is not None, "'boto3' must be installed to use S3Backend"

        parsed = urlparse(self.path)
        self.bucket = parsed.netloc
        self.key = parsed.path.strip("/")
        self.client = self.client or boto3_session().client("s3")
        super().__attrs_post_init__()

    def write(self, overwrite: bool = False, **kwargs: Any):
        """Write mosaicjson document to AWS S3."""
        if not overwrite and self._head_object(self.key, self.bucket):
            raise MosaicExistsError(
                "Mosaic file already exist, use `overwrite=True`.")

        mosaic_doc = self.mosaic_def.dict(exclude_none=True)
        if self.key.endswith(".gz"):
            body = _compress_gz_json(mosaic_doc)
        else:
            body = json.dumps(mosaic_doc).encode("utf-8")

        self._put_object(self.key, self.bucket, body, **kwargs)

    @cached(
        TTLCache(maxsize=cache_config.maxsize, ttl=cache_config.ttl),
        key=lambda self: hashkey(self.path),
    )
    def _read(self) -> MosaicJSON:  # type: ignore
        """Get mosaicjson document."""
        body = self._get_object(self.key, self.bucket)

        self._file_byte_size = len(body)

        if self.key.endswith(".gz"):
            body = _decompress_gz(body)

        return MosaicJSON(**json.loads(body))

    def _get_object(self, key: str, bucket: str) -> bytes:
        try:
            response = self.client.get_object(Bucket=bucket, Key=key)
        except ClientError as e:
            status_code = e.response["ResponseMetadata"]["HTTPStatusCode"]
            exc = _HTTP_EXCEPTIONS.get(status_code, MosaicError)
            raise exc(e.response["Error"]["Message"]) from e

        return response["Body"].read()

    def _put_object(self, key: str, bucket: str, body: bytes, **kwargs) -> str:
        try:
            self.client.put_object(Bucket=bucket, Key=key, Body=body, **kwargs)
        except ClientError as e:
            status_code = e.response["ResponseMetadata"]["HTTPStatusCode"]
            exc = _HTTP_EXCEPTIONS.get(status_code, MosaicError)
            raise exc(e.response["Error"]["Message"]) from e

        return key

    def _head_object(self, key: str, bucket: str) -> bool:
        try:
            return self.client.head_object(Bucket=bucket, Key=key)
        except ClientError:
            return False
Пример #21
0
from meals import utils
from meals.importers import IMPORTERS, Importer, CollisionOccurredException
from .models import Sku, FormulaIngredient, ProductLine, Ingredient

logger = logging.getLogger(__name__)


FILE_TYPES = {
    "skus": Sku,
    "ingredients": Ingredient,
    "product_lines": ProductLine,
    "formulas": FormulaIngredient,
}
TOPOLOGICAL_ORDER = ["product_lines", "ingredients", "formulas", "skus"]
# A self expiring cache for 30 minutes
TRANSACTION_CACHE = TTLCache(maxsize=10, ttl=1800)


@transaction.atomic
@utils.log_exceptions(logger=logger)
def process_csv_files(files, session_key: str) -> Tuple[Dict[str, int], Dict[str, int]]:
    inserted = defaultdict(lambda: 0)
    ignored = defaultdict(lambda: 0)
    for file_type in TOPOLOGICAL_ORDER:
        if file_type in files:
            stream = files[file_type]
            if not stream:
                continue
            importer = IMPORTERS[file_type](stream.name)
            logger.info("Processing %s: %s", file_type, stream)
            lines = stream.read().decode("UTF-8").splitlines()
Пример #22
0
from cachetools import cached
from ecosystem.jira import client
from cachetools import TTLCache

import config
import log
from exceptions import document_exception

cache = TTLCache(maxsize=40000, ttl=60 * 60 * 2)


@cached(cache)
def _query_jira(query_string):
    return [
        ticket
        for ticket in client.search(config.jira_query.format(query_string))
        if query_string in ticket.get_summary() or (ticket.get_field(
            "description") and query_string in ticket.get_field("description"))
    ]


def get_jira_tickets(test):
    """
    1. Get all jira tickets that contain test name
    2. Filter tickets that contain specifically the test name in summary and description
    3. Get all jira tickets that contain the errors in the test
    5. Search tickets by test id
    4. existing_jira_tickets keeps already the tickets that were queried. This specifically apply
        for repearing errors under different tests
    :type test: InternalTest
    """
Пример #23
0
class KodiClient(object):
    def __init__(self, config):
        self._cache = TTLCache(maxsize=2048, ttl=3600)
        if 'user' in config and 'password' in config:
            self.auth = (config['user'], config['password'])
        else:
            self.auth = None
        self.host = config['host']
        self.port = config['port']
        self.chunk_size = 750
        self._api = Server(
            url='http://{host}:{port}/jsonrpc'.format(**config),
            auth=self.auth)

    def _make_generator(self, method, data_key, **params):
        logger.debug("Fetching first chunk of {}".format(data_key))
        params.update({'limits': {'start': 0, 'end': self.chunk_size}})
        resp = method(**params)
        for d in resp[data_key]:
            yield d
        num_total = resp['limits']['total']
        cur_start = self.chunk_size
        while cur_start < num_total:
            params['limits']['start'] = cur_start
            params['limits']['end'] = cur_start + self.chunk_size
            logger.debug("Fetching next chunk from #{}".format(cur_start))
            resp = method(**params)
            for d in resp[data_key]:
                yield d
            cur_start += self.chunk_size

    @cached()
    def get_artists(self):
        artists = list(self._make_generator(
            self._api.AudioLibrary.GetArtists, 'artists',
            properties=PROPERTIES['artist']))
        self._cache.update({'artist.{}'.format(a['artistid']): a
                            for a in artists})
        return artists

    def get_artist(self, artist_id):
        artist_id = int(artist_id)
        cached = self._cache.get('artist.{}'.format(artist_id))
        if cached is None:
            try:
                artist = self._api.AudioLibrary.GetArtistDetails(
                    artistid=artist_id,
                    properties=PROPERTIES['artist'])['artistdetails']
                self._cache['artist.{}'.format(artist_id)] = artist
                return artist
            except Exception as e:
                return None
        else:
            return cached

    @cached()
    def get_albums(self, artist_id=None, recently_added=False):
        if recently_added:
            return self._api.AudioLibrary.GetRecentlyAddedAlbums(
                properties=PROPERTIES['album'])['albums']
        if artist_id is not None:
            artist_id = int(artist_id)
        params = {'properties': PROPERTIES['album'],
                  'data_key': 'albums'}
        if artist_id:
            params['filter'] = {'artistid': artist_id}
        albums = list(self._make_generator(
            self._api.AudioLibrary.GetAlbums, **params))
        self._cache.update({'album.{}'.format(a['albumid']): a
                            for a in albums})
        return albums

    def get_album(self, album_id):
        album_id = int(album_id)
        cached = self._cache.get('album.{}'.format(album_id))
        if cached is None:
            try:
                album = self._api.AudioLibrary.GetAlbumDetails(
                    albumid=album_id,
                    properties=PROPERTIES['album'])['albumdetails']
                self._cache['album.{}'.format(album_id)] = album
                return album
            except Exception as e:
                self._cache['album.{}'.format(album_id)] = None
                return None
        else:
            return cached


    @cached()  # First-level cache for accessing all tracks
    def get_songs(self, album_id=None):
        if album_id is not None:
            album_id = int(album_id)
        params = {'properties': PROPERTIES['song'],
                  'data_key': 'songs'}
        if album_id:
            params['filter'] = {'albumid': album_id}
        songs = list(self._make_generator(
            self._api.AudioLibrary.GetSongs, **params))
        # Second level cache so that get_song doesn't have to make an API call
        self._cache.update({'song.{}'.format(s['songid']): s for s in songs})
        return songs

    def get_song(self, song_id):
        song_id = int(song_id)
        cached = self._cache.get('song.{}'.format(song_id))
        if cached is None:
            try:
                song = self._api.AudioLibrary.GetSongDetails(
                    songid=song_id,
                    properties=PROPERTIES['song'])['songdetails']
                self._cache['song.{}'.format(song_id)] = song
                return song
            except Exception as e:
                self._cache['song.{}'.format(song_id)] = None
                return None
        else:
            return cached

    @cached()
    def get_url(self, filepath):
        path = self._api.Files.PrepareDownload(filepath)
        url = "http://{}{}:{}/{}".format(
            "{}:{}@".format(*self.auth) if self.auth else '',
            self.host, self.port, path['details']['path'])
        self._cache['trackurl.{}'.format(url)] = filepath
        return url
Пример #24
0
from flask import request, jsonify
from flask import jsonify

from bs4 import BeautifulSoup
import urllib.request
import re
import json
import ast
from datetime import datetime
from cachetools import cached, TTLCache
from apscheduler.schedulers.background import BackgroundScheduler
from app.scraping import fetch_data_coronavirus

app = Flask(__name__)
api = Api(app)
cacheCovid = TTLCache(maxsize=1024, ttl=30)

sched = BackgroundScheduler()
sched.add_job(fetch_data_coronavirus, 'interval', minutes=1, max_instances=2)
sched.start()


@app.route('/', methods=['GET'])
def home():
    return "<h1>Unofficial Worldometers.info API</h1><p>This site is a API for get data from Worldometers.info</p>"


# CORONAVIRUS SECTION
@cached(cacheCovid)
def get_data_coronavirus():
    f = open('app/data/coronavirus.json', "r")
Пример #25
0
from odoo import _
from odoo import tools
from odoo import http
from odoo.http import request
from odoo.http import Response

_logger = logging.getLogger(__name__)

try:
    import requests
except ImportError:
    _logger.warn('Cannot `import requests`.')

try:
    from cachetools import TTLCache
    pdf_cache = TTLCache(maxsize=25, ttl=1200)
except ImportError:
    _logger.warn('Cannot `import cachetools`.')

try:
    import pdfconv
except ImportError:
    _logger.warn('Cannot `import pdfconv`.')


class MSOfficeParserController(http.Controller):
    @http.route('/web/preview/converter/msoffice', auth="user", type='http')
    def convert_msoffice(self,
                         url,
                         export_filename=None,
                         force_compute=False,
Пример #26
0
__author__ = '''Costas Tyfoxylos <*****@*****.**>'''
__docformat__ = '''google'''
__date__ = '''2017-12-09'''
__copyright__ = '''Copyright 2017, Costas Tyfoxylos'''
__credits__ = ["Costas Tyfoxylos"]
__license__ = '''MIT'''
__maintainer__ = '''Costas Tyfoxylos'''
__email__ = '''<*****@*****.**>'''
__status__ = '''Development'''  # "Prototype", "Development", "Production".

# This is the main prefix used for logging
LOGGER_BASENAME = '''toonapilib'''
LOGGER = logging.getLogger(LOGGER_BASENAME)
LOGGER.addHandler(logging.NullHandler())

STATE_CACHE = TTLCache(maxsize=1, ttl=STATE_CACHING_SECONDS)
THERMOSTAT_STATE_CACHE = TTLCache(maxsize=1,
                                  ttl=THERMOSTAT_STATE_CACHING_SECONDS)

INVALID_TOKEN = 'Invalid Access Token'


class Toon:  # pylint: disable=too-many-instance-attributes
    """Model of the toon smart meter from eneco."""
    def __init__(self,
                 authentication_token,
                 tenant_id='eneco',
                 display_common_name=None):
        logger_name = u'{base}.{suffix}'.format(base=LOGGER_BASENAME,
                                                suffix=self.__class__.__name__)
        self._logger = logging.getLogger(logger_name)
Пример #27
0
from time import perf_counter
from functools import wraps
from cachetools import TTLCache
from threading import RLock
from SaitamaRobot import (DEL_CMDS, DEV_USERS, DRAGONS, SUPPORT_CHAT, DEMONS,
                          TIGERS, WOLVES, dispatcher)

from telegram import Chat, ChatMember, ParseMode, Update
from telegram.ext import CallbackContext

# stores admemes in memory for 10 min.
ADMIN_CACHE = TTLCache(maxsize=512, ttl=60 * 10, timer=perf_counter)
THREAD_LOCK = RLock()


def is_whitelist_plus(chat: Chat,
                      user_id: int,
                      member: ChatMember = None) -> bool:
    return any(user_id in user
               for user in [WOLVES, TIGERS, DEMONS, DRAGONS, DEV_USERS])


def is_support_plus(chat: Chat,
                    user_id: int,
                    member: ChatMember = None) -> bool:
    return user_id in DEMONS or user_id in DRAGONS or user_id in DEV_USERS


def is_sudo_plus(chat: Chat, user_id: int, member: ChatMember = None) -> bool:
    return user_id in DRAGONS or user_id in DEV_USERS
Пример #28
0
class Client(__Client):
    """Interact with the public transport-related endpoints.

    References:
        https://www.mytransport.sg/content/dam/datamall/datasets/LTA_DataMall_API_User_Guide.pdf
    """
    def __init(self, account_key):
        super(Client, self).__init__(account_key)

    @cached(cache=TTLCache(maxsize=CACHE_MAXSIZE, ttl=CACHE_ONE_MINUTE))
    def bus_arrival(self, bus_stop_code, service_number=None):
        """Get real-time Bus Arrival information of Bus Services at a queried
        Bus Stop, including Est. Arrival Time, Est. Current Location, Est.
        Current Load.

        Arguments:
            bus_stop_code (str):
                5-digit bus stop reference code.
            service_number (str):
                (optional) Bus service number.
                If omitted, then all bus services at the bus stop code are
                returned.

        Returns:
            (list) Information about bus arrival at the specified bus stop.

        Raises:
            ValueError
                Raised if bus_stop_code or service_number are not strings.
            ValueError
                Raised if bus_stop_code is not exactly 5 characters long.
            ValueError
                Raised if bus_stop_code is not a number-like string.
        """
        if not isinstance(bus_stop_code, str):
            raise ValueError('bus_stop_code is not a string.')
        if len(bus_stop_code) != 5:
            raise ValueError('bus_stop_code is not a 5-character string.')
        try:
            _ = int(bus_stop_code)
        except:
            raise ValueError('bus_stop_code is not a valid number.')

        if service_number is not None and not isinstance(service_number, str):
            raise ValueError('service_number is not a string.')

        bus_arrival = self.send_request(
            BUS_ARRIVAL_API_ENDPOINT,
            BusStopCode=bus_stop_code,
            ServiceNo=service_number,
        )

        return bus_arrival

    @cached(cache=TTLCache(maxsize=CACHE_MAXSIZE, ttl=CACHE_ONE_DAY))
    def bus_services(self):
        """Get detailed service information for all buses currently in
        operation, including: first stop, last stop, peak / offpeak frequency
        of dispatch.

        Returns:
            (list) Information about bus services currently in operation.
        """
        bus_services = self.send_request(BUS_SERVICES_API_ENDPOINT)

        return bus_services

    @cached(cache=TTLCache(maxsize=CACHE_MAXSIZE, ttl=CACHE_ONE_DAY))
    def bus_routes(self):
        """Get detailed route information for all services currently in
        operation, including: all bus stops along each route, first/last bus
        timings for each stop.

        Returns:
            (list) Information about bus routes currently in operation.
        """
        bus_routes = self.send_request(BUS_ROUTES_API_ENDPOINT)

        return bus_routes

    @cached(cache=TTLCache(maxsize=CACHE_MAXSIZE, ttl=CACHE_ONE_DAY))
    def bus_stops(self):
        """Get detailed information for all bus stops currently being serviced
        by buses, including: Bus Stop Code, location coordinate.

        Returns:
            (list) Location coordinaties of bus stops with active services.
        """
        bus_stops = self.send_request(BUS_STOPS_API_ENDPOINT)

        return bus_stops

    @cached(cache=TTLCache(maxsize=CACHE_MAXSIZE, ttl=CACHE_FIVE_MINUTES))
    def facilities_maintenance(self, station_code):
        """Get the pre-signed links to JSON file containing facilities
        maintenance schedules of the particular station.

        Arguments:
            station_code (str):
                Station Code of train station.
                Refer to the STATION_CODES_REGEX_PATTERN constant for the
                expected regex pattern that this code has to match.

        Returns:
            (str) Link for downloading the requested JSON file.

        Raises:
            ValueError
                Raised if station_code is not specified.
            ValueError
                Raised if station_code is not a string.
            ValueError:
                Raised if station_code does not match the expected regex
                pattern.
        """
        if station_code is None:
            raise ValueError('Missing station_code.')

        if not isinstance(station_code, str):
            raise ValueError('station_code is not a string.')

        if not re.search(STATION_CODES_REGEX_PATTERN, station_code):
            raise ValueError('station_code is invalid.')

        facilities_maintenance_link = self.send_download_request(
            FACILITIES_MAINTENANCE_API_ENDPOINT,
            StationCode=station_code,
        )

        return facilities_maintenance_link

    @cached(cache=TTLCache(maxsize=CACHE_MAXSIZE, ttl=CACHE_ONE_MONTH))
    def passenger_volume_by_bus_stops(self, dt=None):
        """Get tap in and tap out passenger volume by weekdays and weekends
        for individual bus stop.

        Arguments:
            dt (date):
                (optional) Date of a specific month to get passenger volume.
                This must be a valid date object, e.g. `date(2019, 7, 2)`.
                But only the year and month will be used since that is what
                the endpoint accepts.
                Must be within the last 3 months of the current month.

        Returns:
            (str) Download link of file containing passenger volume data.
        """
        passenger_volume_link = self.__get_passenger_volume_link(
            PASSENGER_VOLUME_BY_BUS_STOPS_API_ENDPOINT,
            dt,
        )

        return passenger_volume_link

    @cached(cache=TTLCache(maxsize=CACHE_MAXSIZE, ttl=CACHE_ONE_MONTH))
    def passenger_volume_by_origin_destination_bus_stops(self, dt=None):
        """Get number of trips by weekdays and weekends from origin to
        destination bus stops.

        Arguments:
            dt (date):
                (optional) Date of a specific month to get passenger volume.
                This must be a valid date object, e.g. `date(2019, 7, 2)`.
                But only the year and month will be used since that is what
                the endpoint accepts.
                Must be within the last 3 months of the current month.

        Returns:
            (str) Download link of file containing passenger volume data.
        """
        passenger_volume_link = self.__get_passenger_volume_link(
            PASSENGER_VOLUME_BY_ORIGIN_DESTINATION_BUS_STOPS_API_ENDPOINT,
            dt,
        )

        return passenger_volume_link

    @cached(cache=TTLCache(maxsize=CACHE_MAXSIZE, ttl=CACHE_ONE_MONTH))
    def passenger_volume_by_origin_destination_train_stations(self, dt=None):
        """Get number of trips by weekdays and weekends from origin to
        destination train stations.

        Arguments:
            dt (date):
                (optional) Date of a specific month to get passenger volume.
                This must be a valid date object, e.g. `date(2019, 7, 2)`.
                But only the year and month will be used since that is what
                the endpoint accepts.
                Must be within the last 3 months of the current month.
                Default: None, i.e. today.

        Returns:
            (str) Download link of file containing passenger volume data.
        """
        passenger_volume_link = self.__get_passenger_volume_link(
            PASSENGER_VOLUME_BY_ORIGIN_DESTINATION_TRAIN_STATIONS_API_ENDPOINT,
            dt,
        )

        return passenger_volume_link

    @cached(cache=TTLCache(maxsize=CACHE_MAXSIZE, ttl=CACHE_ONE_MONTH))
    def passenger_volume_by_train_stations(self, dt=None):
        """Get tap in and tap out passenger volume by weekdays and weekends
        for individual train station.

        Arguments:
            dt (date):
                (optional) Date of a specific month to get passenger volume.
                This must be a valid date object, e.g. `date(2019, 7, 2)`.
                But only the year and month will be used since that is what
                the endpoint accepts.
                Must be within the last 3 months of the current month.

        Returns:
            (str) Download link of file containing passenger volume data.
        """
        passenger_volume_link = self.__get_passenger_volume_link(
            PASSENGER_VOLUME_BY_TRAIN_STATIONS_API_ENDPOINT,
            dt,
        )

        return passenger_volume_link

    @cached(cache=TTLCache(maxsize=CACHE_MAXSIZE, ttl=CACHE_ONE_MINUTE))
    def taxi_availability(self):
        """Get location coordinates of all Taxis that are currently available
        for hire. Does not include "Hired" or "Busy" Taxis.

        Returns:
            (list) Location coordinaties of available taxis.
        """
        taxi_availabilities = self.send_request(
            TAXI_AVAILABILITY_API_ENDPOINT, )

        return taxi_availabilities

    @cached(cache=TTLCache(maxsize=CACHE_MAXSIZE, ttl=CACHE_ONE_MONTH))
    def taxi_stands(self):
        """Get detailed information of Taxi stands, such as location and whether
        is it barrier free.

        Returns:
            (list) Detailed information of taxi stands .
        """
        taxi_stands = self.send_request(TAXI_STANDS_API_ENDPOINT, )

        return taxi_stands

    @cached(cache=TTLCache(maxsize=CACHE_MAXSIZE, ttl=CACHE_ONE_HOUR))
    def train_service_alerts(self):
        """Get detailed information on train service unavailability during
        scheduled operating hours, such as affected line and stations etc.

        Returns:
            (list) Information about train service unavailability.
        """
        train_service_alerts = self.send_request(
            TRAIN_SERVICE_ALERTS_API_ENDPOINT, )

        return train_service_alerts

    # private

    def __get_passenger_volume_link(self, endpoint, dt=None):
        """Get download link of the passenger volume data file for the
        specific endpoint.

        Arguments:
            endpoint(str):
                API endpoint URL to call.
            dt (date):
                (optional) Date of a specific month to get passenger volume.
                This must be a valid date object, e.g. `date(2019, 7, 2)`.
                But only the year and month will be used since that is what
                the endpoint accepts.
                Must be within the last 3 months of the current month.

        Returns:
            (str) Download link of file containing passenger volume data.

        Raises:
            ValueError:
                Raised if the specified date is more than 3 months ago.
            APIError:
                Raised if no download link is returned.
        """
        if dt is not None and \
            not timezone.date_is_within_last_three_months(dt):
            raise ValueError('dt is not within the last 3 months.')

        self.validate_kwargs(Date=dt)

        passenger_volume_link = self.send_download_request(endpoint, Date=dt)

        return passenger_volume_link
Пример #29
0
        """ Creates and saves the current model to the DB """
        db.session.add(self)
        db.session.commit()

    def save(self):
        """ Save changes to db"""
        db.session.commit()

    def delete(self):
        """ Deletes the current model from the DB """
        db.session.delete(self)
        db.session.commit()


# cache mapper counts for 30 seconds
active_mappers_cache = TTLCache(maxsize=1024, ttl=30)


class Project(db.Model):
    """ Describes a HOT Mapping Project """

    __tablename__ = "projects"

    # Columns
    id = db.Column(db.Integer, primary_key=True)
    status = db.Column(db.Integer,
                       default=ProjectStatus.DRAFT.value,
                       nullable=False)
    created = db.Column(db.DateTime, default=timestamp, nullable=False)
    priority = db.Column(db.Integer, default=ProjectPriority.MEDIUM.value)
    default_locale = db.Column(
Пример #30
0
class Stackexchange(commands.Cog):
    def __init__(self, bot: commands.Bot):
        self.bot = bot
        self.ready = False
        self.sites = None
        self.token_cache = TTLCache(maxsize=1000, ttl=600)
        self.load_sites.start()

    @property
    def session(self):
        return self.bot.http._HTTPClient__session

    @tasks.loop(count=1)
    async def load_sites(self):
        if os.path.isfile("cache/stackexchange_sites.json"):
            with open("cache/stackexchange_sites.json") as f:
                self.sites = json.load(f)
        else:
            try:
                data = await self.stack_request(
                    None,
                    "GET",
                    "/sites",
                    params={
                        "pagesize": "500",
                        "filter": "*Ids4-aWV*RW_UxCPr0D"
                    },
                )
            except Exception:
                return traceback.print_exc()
            else:
                self.sites = data["items"]
                if not os.path.isdir("cache"):
                    os.mkdir("cache")
                with open("cache/stackexchange_sites.json", "w") as f:
                    json.dump(self.sites, f)

        self.ready = True

    async def cog_check(self, ctx: commands.Context):
        if not self.ready:
            raise StackExchangeError(
                "Stackexchange commands are not ready yet")
        return True

    async def cog_before_invoke(self, ctx: commands.Context):
        if ctx.command == self.link_stackoverflow:
            return

        token = self.token_cache.get(ctx.author.id)
        if not token:
            user = await UserModel.get_or_none(id=ctx.author.id)
            if user is None or user.stackoverflow_oauth_token is None:
                raise StackExchangeNotLinkedError()

            token = user.stackoverflow_oauth_token
            self.token_cache[ctx.author.id] = token
        ctx.stack_token = token  # type: ignore

    @commands.command(
        name="stackrep",
        aliases=[
            "stackreputation", "stackoverflowrep", "stackoverflowreputation"
        ],
    )
    async def stack_reputation(self, ctx: commands.Context):
        """Check your stackoverflow reputation"""
        # TODO: Use a stackexchange filter here
        # https://api.stackexchange.com/docs/filters
        data = await self.stack_request(
            ctx,
            "GET",
            "/me",
            data={
                "site": "stackoverflow",
            },
        )
        await ctx.send(data["items"][0]["reputation"])

    @flags.add_flag("--site", type=str, default="stackoverflow")
    @flags.add_flag("--tagged", type=str, nargs="+", default=[])
    @flags.add_flag("term", nargs="+")
    @flags.command(name="stacksearch", aliases=["stackser"])
    async def stackexchange_search(self, ctx: commands.Context, **kwargs):
        """Search stackexchange for your question"""
        term, sitename, tagged = (
            " ".join(kwargs["term"]),
            kwargs["site"],
            kwargs["tagged"],
        )

        site = None
        for s in self.sites:
            if s["api_site_parameter"] == sitename:
                site = s
                break
        if not site:
            raise StackExchangeError(f"Invalid site {sitename} provided")

        data = await self.stack_request(
            ctx,
            "GET",
            "/search/excerpts",
            data={
                "site": sitename,
                "sort": "relevance",
                "q": term,
                "tagged": ";".join(tagged),
                "pagesize": 5,
                "filter": "ld-5YXYGN1SK1e",
            },
        )
        embed = Embed(title=f"{site['name']} search", color=Color.green())
        embed.set_thumbnail(url=site["icon_url"])
        if data["items"]:
            for i, q in enumerate(data["items"], 1):
                tags = "\u2800".join(["`" + t + "`" for t in q["tags"]])
                embed.add_field(
                    name=str(i) + " " + html.unescape(q["title"]),
                    value=search_result_template.format(site=site,
                                                        q=q,
                                                        tags=tags),
                    inline=False,
                )
        else:
            embed.add_field(name="Oops", value="Couldn't find any results")
        await ctx.send(embed=embed)

    async def stack_request(
        self,
        ctx: commands.Context,
        method: str,
        endpoint: str,
        params: dict = {},
        data: dict = {},
    ):
        data.update(stack_oauth_config.dict())
        if ctx:
            data["access_token"] = (ctx.stack_token, )
        res = await self.session.request(
            method,
            f"https://api.stackexchange.com/2.2{endpoint}",
            params=params,
            data=data,
        )

        data = await res.json()
        if "error_message" in data:
            raise StackExchangeError(data["error_message"])
        return data

    @commands.command(name="linkstack", aliases=["lnstack"])
    async def link_stackoverflow(self, ctx: commands.Context):
        """Link your stackoverflow account"""
        expiry = datetime.datetime.utcnow() + datetime.timedelta(seconds=120)
        url = "https://stackoverflow.com/oauth/?" + urlencode({
            "client_id":
            stack_oauth_config.client_id,
            "scope":
            "no_expiry",
            "redirect_uri":
            "https://tech-struck.vercel.app/oauth/stackexchange",
            "state":
            jwt.encode({
                "id": ctx.author.id,
                "expiry": str(expiry)
            }, config.secret),
        })
        try:
            await ctx.author.send(embed=Embed(
                title="Connect Stackexchange",
                description=
                f"Click [this]({url}) to link your stackexchange account. This link invalidates in 2 minutes",
                color=Color.blue(),
            ))
        except Forbidden:
            await ctx.send(
                "Your DMs (direct messages) are closed. Open them so I can send you a safe authorization link."
            )
Пример #31
0
 def __init__(self, **kwargs):
     super().__init__(**kwargs)
     self.__services_by_domain = TTLCache(maxsize=100, ttl=5)
Пример #32
0
 def __init__(self, bot: commands.Bot):
     self.bot = bot
     self.ready = False
     self.sites = None
     self.token_cache = TTLCache(maxsize=1000, ttl=600)
     self.load_sites.start()
Пример #33
0
    def __init__(self):

        self.count = goldman.config.RATE_LIMIT_COUNT
        self.duration = goldman.config.RATE_LIMIT_DURATION

        self.cache = TTLCache(maxsize=self.count, ttl=self.duration)
Пример #34
0
class IPBlock(object):

    def __init__(self, app, read_preference=None, cache_size=None, cache_ttl=None,
                 blocking_enabled=True, logging_enabled=False):
        """
        Initialize IPBlock and set up a before_request handler in the
        app.

        You can override the default MongoDB read preference via the
        optional read_preference kwarg.

        You can limit the impact of the IP checks on your MongoDB by
        maintaining a local in-memory LRU cache. To do so, specify its
        cache_size (i.e. max number of IP addresses it can store) and
        cache_ttl (i.e. how many seconds each result should be cached
        for).

        To run in dry-run mode without blocking requests, set
        blocking_enabled to False. Set logging_enabled to True
        to log IPs that match blocking rules -- if enabled, will
        log even if blocking_enabled is False.
        """
        self.read_preference = read_preference
        self.blocking_enabled = blocking_enabled
        self.logger = None
        if logging_enabled:
            self.logger = app.logger
            self.block_msg = "blocking" if blocking_enabled else "blocking disabled"

        if cache_size and cache_ttl:
            # inline import because cachetools dependency is optional.
            from cachetools import TTLCache
            self.cache = TTLCache(cache_size, cache_ttl)
        else:
            self.cache = None

        app.before_request(self.block_before)

    def block_before(self):
        """
        Check the current request and block it if the IP address it's
        coming from is blacklisted.
        """
        # To avoid unnecessary database queries, ignore the IP check for
        # requests for static files
        if request.path.startswith(url_for('static', filename='')):
            return

        # Some static files might be served from the root path (e.g.
        # favicon.ico, robots.txt, etc.). Ignore the IP check for most
        # common extensions of those files.
        ignored_extensions = ('ico', 'png', 'txt', 'xml')
        if request.path.rsplit('.', 1)[-1] in ignored_extensions:
            return

        ips = request.headers.getlist('X-Forwarded-For')
        if not ips:
            return

        # If the X-Forwarded-For header contains multiple comma-separated
        # IP addresses, we're only interested in the last one.
        ip = ips[0].strip()
        if ip[-1] == ',':
            ip = ip[:-1]
        ip = ip.rsplit(',', 1)[-1].strip()

        if self.matches_ip(ip):
            if self.logger is not None:
                self.logger.info("IPBlock: matched {}, {}".format(ip, self.block_msg))
            if self.blocking_enabled:
                return 'IP Blocked', 200

    def matches_ip(self, ip):
        """Return True if the given IP is blacklisted, False otherwise."""

        # Check the cache if caching is enabled
        if self.cache is not None:
            matches_ip = self.cache.get(ip)
            if matches_ip is not None:
                return matches_ip

        # Query MongoDB to see if the IP is blacklisted
        matches_ip = IPNetwork.matches_ip(
            ip, read_preference=self.read_preference)

        # Cache the result if caching is enabled
        if self.cache is not None:
            self.cache[ip] = matches_ip

        return matches_ip
Пример #35
0
 def __init__(self, maxsize, ttl=0, **kwargs):
     TTLCache.__init__(self, maxsize, ttl=ttl, timer=Timer(), **kwargs)
    MappingNotAllowed,
    ValidatingNotAllowed,
    MappingPermission,
    ValidationPermission,
    TeamRoles,
)
from backend.models.postgis.task import Task, TaskHistory
from backend.models.postgis.utils import NotFound
from backend.services.users.user_service import UserService
from backend.services.project_search_service import ProjectSearchService
from backend.services.project_admin_service import ProjectAdminService
from backend.services.team_service import TeamService
from sqlalchemy import func, or_
from sqlalchemy.sql.expression import true

summary_cache = TTLCache(maxsize=1024, ttl=600)


class ProjectServiceError(Exception):
    """ Custom Exception to notify callers an error occurred when handling projects """
    def __init__(self, message):
        if current_app:
            current_app.logger.error(message)


class ProjectService:
    @staticmethod
    def get_project_by_id(project_id: int) -> Project:
        project = Project.get(project_id)
        if project is None:
            raise NotFound()
    def __init__(self, args: list, **kwargs):
        parser = argparse.ArgumentParser(prog='imtoken-pricing-server')

        parser.add_argument("--http-address",
                            type=str,
                            default='',
                            help="Address of the Imtoken Pricing server")

        parser.add_argument("--http-port",
                            type=int,
                            default=8777,
                            help="Port of the Imtoken Pricing server")

        parser.add_argument(
            "--imtoken-api-server",
            type=str,
            default='http://localhost:8157',
            help=
            "Address of the Imtoken API server (default: 'http://localhost:8157')"
        )

        parser.add_argument(
            "--imtoken-api-timeout",
            type=float,
            default=9.5,
            help=
            "Timeout for accessing the Imtoken API (in seconds, default: 9.5)")

        parser.add_argument(
            "--base-pair",
            type=str,
            required=True,
            help="Token pair (sell/buy) on which the keeper will operate")

        parser.add_argument("--base-config",
                            type=str,
                            required=True,
                            help="Bands configuration file")

        parser.add_argument(
            "--counter-pair",
            type=str,
            required=True,
            help="Token pair (sell/buy) on which the keeper will operate")

        parser.add_argument("--counter-config",
                            type=str,
                            required=True,
                            help="Bands configuration file")

        parser.add_argument("--price-feed",
                            type=str,
                            required=True,
                            help="Source of price feed")

        parser.add_argument(
            "--price-feed-expiry",
            type=int,
            default=120,
            help="Maximum age of the price feed (in seconds, default: 120)")

        parser.add_argument("--spread-feed",
                            type=str,
                            help="Source of spread feed")

        parser.add_argument(
            "--spread-feed-expiry",
            type=int,
            default=3600,
            help="Maximum age of the spread feed (in seconds, default: 3600)")

        parser.add_argument("--control-feed",
                            type=str,
                            help="Source of control feed")

        parser.add_argument(
            "--control-feed-expiry",
            type=int,
            default=86400,
            help="Maximum age of the control feed (in seconds, default: 86400)"
        )

        parser.add_argument("--order-cache-maxsize",
                            type=int,
                            default=100000,
                            help="Maximum size of orders cache")

        parser.add_argument("--order-cache-ttl",
                            type=int,
                            default=10,
                            help="Orders time to live")

        parser.add_argument("--debug",
                            dest='debug',
                            action='store_true',
                            help="Enable debug output")

        self.arguments = parser.parse_args(args)
        setup_logging(self.arguments)

        self.cache = TTLCache(maxsize=self.arguments.order_cache_maxsize,
                              ttl=self.arguments.order_cache_ttl)
        self.base_bands_config = ReloadableConfig(self.arguments.base_config)
        self.counter_bands_config = ReloadableConfig(
            self.arguments.counter_config)
        self.price_feed = PriceFeedFactory().create_price_feed(self.arguments)
        self.spread_feed = create_spread_feed(self.arguments)
        self.control_feed = create_control_feed(self.arguments)

        self.history = History()

        pair = ImtokenPair(self.arguments.base_pair,
                           self.arguments.counter_pair)

        application = tornado.web.Application([
            (r"/pairs", PairsHandler, dict(pair=pair)),
            (r"/indicativePrice", IndicativePriceHandler,
             dict(pair=pair,
                  base_bands_config=self.base_bands_config,
                  counter_bands_config=self.counter_bands_config,
                  price_feed=self.price_feed,
                  spread_feed=self.spread_feed,
                  control_feed=self.control_feed,
                  history=self.history,
                  cache=self.cache)),
            (r"/price", PriceHandler,
             dict(pair=pair,
                  base_bands_config=self.base_bands_config,
                  counter_bands_config=self.counter_bands_config,
                  price_feed=self.price_feed,
                  spread_feed=self.spread_feed,
                  control_feed=self.control_feed,
                  history=self.history,
                  cache=self.cache)),
            (r"/deal", DealHandler, dict(cache=self.cache,
                                         schema=deal_schema())),
        ])
        application.listen(port=self.arguments.http_port,
                           address=self.arguments.http_address)
        tornado.ioloop.IOLoop.current().start()
Пример #38
0
class RedisConfig(object):

    __cached_page = TTLCache(128, 1)
    __cached_conf = TTLCache(128, 3)

    def __init__(self):
        self.__conf = {'connection': {}}

    def from_object(self, path, name='conf'):
        obj = self._load_object(path)
        for k in dir(obj):
            if k.startswith('_'):
                continue
            self.__conf[k] = getattr(obj, k)

        if name not in self.__conf['connection']:
            raise ConfigError(u'请设定配置缓存链接: < redis://...>')

        cc = self.__conf['connection'][name]
        self._redis = redis.from_url(cc['dsn'])

    def _load_object(self, path):
        # imp = import_from_cwd
        if ':' in path:
            # Path includes attribute so can just jump
            # here (e.g., ``os.path:abspath``).
            return symbol_by_name(path, imp=import_from_cwd)

        # Not sure if path is just a module name or if it includes an
        # attribute name (e.g., ``os.path``, vs, ``os.path.abspath``).
        try:
            return import_from_cwd(path)
        except ImportError:
            # Not a module name, so try module + attribute.
            return symbol_by_name(path, imp=import_from_cwd)

    # def setup(self, dsn, **kw):
    #     self._redis = redis.from_url(dsn)
    #     # maxsize = kw.pop('maxsize', 128)
    #     # ttltime = kw.pop('ttl', 600)
    #     # self.cached = TTLCache(maxsize, ttltime)
    #     self.__conf.setdefault('connection', {})
    #     self.__conf.update(kw)

    def clear(self):
        self.__cached_page.clear()
        self.__cached_conf.clear()

    def serialize(self, data):
        pass

    def unserialize(self, data):
        return json.loads(data)

    def raw_set(self, name, key, value=None):
        if isinstance(key, dict):
            self._redis.hmset(name, key)
        else:
            self._redis.hset(name, key, value)

    def raw_get(self, name, key=None, *keys):
        field = arg_to_iter(key)
        field.extend(keys)

        if field:
            value = self._redis.hmget(name, field)
            if len(field) == 1:
                return value[0]
            return dict(zip(field, value))
        else:
            data = self._redis.hgetall(name)
            for k in data:
                data[k] = json.loads(data[k])
            return data

    def _get(self, name, key=None, *keys):
        field = arg_to_iter(key)
        field.extend(keys)
        try:
            if name.startswith('conf:'):
                _conf = self.__conf.get(name[5:])
                if _conf:
                    if field:
                        if len(field) == 1:
                            return copy.deepcopy(_conf.get(field[0]))
                        return dict([(k, copy.deepcopy(_conf.get(k)))
                                     for k in field])
                    else:
                        return copy.deepcopy(_conf)

            if field:
                value = self._redis.hmget(name, field)
                value = [None if v is None else json.loads(v) for v in value]
                if len(value) == 1:
                    return value[0]
                return dict(zip(field, value))
            else:
                # if name in self.__conf:
                #     return self.__conf[name]

                data = self._redis.hgetall(name)
                for k in data:
                    data[k] = json.loads(data[k])
                return data
        except Exception as e:
            raise ConfigError(u'获取设置错误 %s [ %s ]\n%s' %
                              (name, ', '.join(field), e))

    def _set(self, name, key, value=None):
        if isinstance(key, dict):
            data = dict([(k, json.dumps(v)) for k, v in key.items()])
            self._redis.hmset(name, data)
        else:
            self._redis.hset(name, key, json.dumps(value))

    def _delete(self, name, key=None):
        if key:
            self._redis.hdel(name, key)
        else:
            self._redis.delete(name)

    @cached(__cached_conf)
    def get(self, key, default=None):
        val = self._get('conf:common', key)
        if val is None:
            return default
        return val

    def set(self, key, value=None):
        return self._set('conf:common', key, value)

    @cached(__cached_conf)
    def get_connection(self, key):
        conn = self.__conf['connection']
        if key in conn:
            return conn[key]

        conf = self._get('conf:connection', key)
        if conf:
            return conf[key]
        return None

    def set_connection(self, key, value=None):
        return self._set('conf:connection', key, value)

    @cached(__cached_conf)
    def get_http(self, key=None, *keys):
        return self._get('conf:http', key, *keys)

    @cached(__cached_conf)
    def get_proxy(self, key=None, *keys):
        return self._get('conf:proxy', key, *keys)

    @cached(__cached_page)
    def get_page(self, project, job, page):
        name = 'project:%s' % project
        key_job = 'job:%s' % job
        key_page = 'page:%s' % page
        conf = self._get(name, key_job, key_page)
        c = {}
        c = merge(c, conf[key_job], conf[key_page])
        return c

    def set_page(self, project, page, data):
        name = 'project:%s' % project
        key = 'page:%s' % page
        return self._set(name, key, data)

    def del_page(self, project, page):
        name = 'project:%s' % project
        key = 'page:%s' % page
        return self._delete(name, key)

    @cached(__cached_page)
    def get_form(self, project, form=None, tag=None):
        name = 'project:%s' % project
        if form is None:
            ways = []
            for _, way in self._redis.hscan_iter(name, 'form:*'):
                frm = json.loads(way)
                if not tag or frm['tags'] == tag:
                    ways.append(frm)
            return ways

        key = 'form:%s' % form
        return self._get(name, key)

    def set_form(self, project, form, data):
        name = 'project:%s' % project
        key = 'form:%s' % form
        return self._set(name, key, data)

    def del_form(self, project, form):
        name = 'project:%s' % project
        key = 'form:%s' % form
        return self._delete(name, key)

    @cached(__cached_page)
    def get_job(self, project, job):
        name = 'project:%s' % project
        key = 'job:%s' % job
        return self._get(name, key)

    def set_job(self, project, job, data):
        name = 'project:%s' % project
        key = 'job:%s' % job
        return self._set(name, key, data)

    def del_job(self, project, job):
        name = 'project:%s' % project
        key = 'job:%s' % job
        return self._delete(name, key)

    @cached(__cached_page)
    def get_action(self, project, action):
        if project is None:
            name = 'global:action'
        else:
            name = 'project:%s' % project
        key = 'action:%s' % action
        return self._get(name, key)

    def set_action(self, project, action, data):
        if project is None:
            name = 'global:action'
        else:
            name = 'project:%s' % project
        key = 'action:%s' % action
        return self._set(name, key, data)

    def del_action(self, project, action):
        if project is None:
            name = 'global:action'
        else:
            name = 'project:%s' % project
        key = 'action:%s' % action
        return self._delete(name, key)

    @cached(__cached_page)
    def get_script(self, project, script):
        if project is None:
            name = 'global:script'
        else:
            name = 'project:%s' % project
        key = 'script:%s' % script
        return self._get(name, key)

    def set_script(self, project, script, data):
        if project is None:
            name = 'global:script'
        else:
            name = 'project:%s' % project
        key = 'script:%s' % script
        return self._set(name, key, data)

    def del_script(self, project, script):
        if project is None:
            name = 'global:script'
        else:
            name = 'project:%s' % project
        key = 'script:%s' % script
        return self._delete(name, key)
        # authorization

    @cached(__cached_page)
    def get_blocked(self, project, job):
        name = 'project:%s' % project
        key = 'blocked:%s' % job
        return self._get(name, key)

    def set_blocked(self, project, job, data):
        name = 'project:%s' % project
        key = 'blocked:%s' % job
        return self._set(name, key, data)

    def del_blocked(self, project, job):
        name = 'project:%s' % project
        key = 'blocked:%s' % job
        return self._delete(name, key)
Пример #39
0
 def __init__(self, maxsize, ttl, prune_time=5):
     self.prune_time = prune_time
     self.cache = TTLCache(maxsize, ttl)
Пример #40
0
    'NS': (dns.NS, QTYPE.NS),
    'PTR': (dns.PTR, QTYPE.PTR),
    'RRSIG': (dns.RRSIG, QTYPE.RRSIG),
    'SOA': (dns.SOA, QTYPE.SOA),
    'SRV': (dns.SRV, QTYPE.SRV),
    'TXT': (dns.TXT, QTYPE.TXT),
    'SPF': (dns.TXT, QTYPE.TXT),
}


def returnfalse(key):
    return False


CACHETIMEOUT = 60 * 60 * 6  #6 hours
cache = TTLCache(maxsize=256, ttl=CACHETIMEOUT, missing=returnfalse)


class Record:
    def __init__(self, rname, rtype, args):
        self._rname = DNSLabel(rname)

        rd_cls, self._rtype = TYPE_LOOKUP[rtype]

        if self._rtype == QTYPE.SOA and len(args) == 2:
            # add sensible times to SOA
            args += (SERIAL_NO, 3600, 3600 * 3, 3600 * 24, 3600),

        if self._rtype == QTYPE.TXT and len(args) == 1 and isinstance(
                args[0], str) and len(args[0]) > 255:
            # wrap long TXT records as per dnslib's docs.
Пример #41
0
import random
import speedtest
from flask import Flask, request
from pymessenger.bot import Bot
import modules.hackermode as ghack
import json
import weather
from time import sleep
from cachetools import TTLCache

app = Flask(__name__)
ACCESS_TOKEN = 'YOUR_FACEBOOK_PAGE_ACCESS_TOKEN'
VERIFY_TOKEN = 'YOUR_VERIFY_TOKEN'
bot = Bot(ACCESS_TOKEN)

cache = TTLCache(maxsize=256, ttl=120)

sms_na = []



quicks = [
    {
        "content_type":"text",
        "title":"👌 Yes",
        "payload":"quick_yes",
    },{
        "content_type":"text",
        "title":"✋🏻 No",
        "payload":"quick_no"
    }
Пример #42
0
@Author    : Deco [[email protected]]
@Created   : 7/10/18 10:03 AM
@Desc      : 
"""
import logging
import os
import string
import time
# from multiprocessing import Pool

import numpy as np
import spacy
from cachetools import cached, TTLCache

file_dir = os.path.dirname(os.path.dirname(__file__))
cache = TTLCache(maxsize=100, ttl=300)
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s',
                    level=logging.INFO)

# 把句子转换为向量


def model_load():
    nlp0 = spacy.load('en_core_web_md')
    logging.info('The model was loaded.')
    return nlp0


nlp = model_load()

from typing import Dict, Optional

import requests
from bs4 import BeautifulSoup
from cachetools import TTLCache, cached

HUMIDITY = "humidity"


@cached(cache=TTLCache(maxsize=1, ttl=300))
def get_data() -> Optional[Dict[str, float]]:
    response = requests.get(
        "http://www.meteo.jankovic.cz/aktualni-pocasi-brno/")
    response.raise_for_status()

    data = BeautifulSoup(response.text,
                         "html.parser").find(id="aktualni_pocasi")

    return {
        HUMIDITY:
        float(
            data.find_all("tr")[2].find_all("td")[1].text.strip().split()[0])
    }
Пример #44
0
import dash
import dash_core_components as dcc
import dash_html_components as html
import base64
import geocoder
from datetime import datetime
import forecastio
from dash.dependencies import Input, Output, State
import plotly.graph_objs as go
from cachetools import cached, TTLCache

# This probably shouldn't be committed
API_KEY = "c5dde0bafce3442350c75b743864339a"

# Picked an arbitrary maxsize and ttl-- modify as needed
weather_cache = TTLCache(maxsize=5, ttl=3600*5)

@cached(weather_cache)
def _get_forecast(location):
    a = geocoder.location(location=location)
    lat = a.latlng[0]
    lng = a.latlng[1]
    forecast = forecastio.load_forecast(API_KEY, lat, lng)

    return forecast

def weather_on(loca):
    forecast = _get_forecast(loca)
    byHour = forecast.hourly()
    return byHour
Пример #45
0
 def __init__(self, url):
     self._dataset = None
     self.__timestamp_cache = TTLCache(1, 3600)
     super(NetCDFData, self).__init__(url)
Пример #46
0
    "screen_program": "Disabled",
    "last_screen_program": "Disabled",
    "initial_resolution": (0, 0),
    "dynamic_resolution": False,
}

# Settings change very rarely, cache them to reduce database roundtrips.
# This is especially advantageous for suggestions which check whether platforms are enabled.
# There is a data inconsistency issue when a setting is changed in one process.
# Only that process would flush its cache, others would retain the stale value.
# This could be fixed by communicating the cache flush through redis.
# However, with the daphne setup there is currently only one process handling requests,
# and settings are never changed outside a request (especially not in a celery worker).
# So this is fine as long as no additional daphne (or other) workers are used.
# The lights flushes the cache in its update function.
cache: TTLCache = TTLCache(ttl=10, maxsize=128)


@cached(cache)
def get(key: str) -> Union[bool, int, float, str, tuple]:
    """This method returns the value for the given :param key:.
    Values of non-existing keys are set to their respective default value."""
    # values are stored as string in the database
    # cast the value to its respective type, defined by the default value, before returning it
    default = defaults[key]
    value = models.Setting.objects.get_or_create(
        key=key, defaults={"value": str(default)}
    )[0].value
    if type(default) is str:
        return str(value)
    if type(default) is int: