Exemple #1
0
 def __init__(self, store: dict = None,
              keygen: typing.Callable[..., str] = lambda _: str(uuid.uuid4())):
     """ Initialize the store """
     self._store: dict = expiringdict.ExpiringDict(
         max_len=1024,
         max_age_seconds=3600) if store is None else store
     self._keygen = keygen
Exemple #2
0
    def __init__(self, *args, **kwargs):
        """Intitializer function"""
        logging.debug("Initializing the interface.")
        self.cad = pifacecad.PiFaceCAD()
        self.current_page = 1
        self.pages = {1: self.show_time, 2: self.show_stats, 3: self.show_ip}
        logging.debug("Polling for inputs!")
        self.last_page = self.current_page
        # five minute cache
        self.cached_dict = expiringdict.ExpiringDict(max_len=100,
                                                     max_age_seconds=300)

        self.listener = pifacecad.SwitchEventListener(chip=self.cad)
        self.scheduler = BackgroundScheduler()
        self.scheduler.start()
        self.scheduler.add_job(self.update,
                               "interval",
                               seconds=10,
                               id="update_job")
        for i in range(6):
            self.listener.register(i, pifacecad.IODIR_FALLING_EDGE,
                                   logging.debug)

        self.listener.register(6, pifacecad.IODIR_FALLING_EDGE, self.page_left)
        self.listener.register(7, pifacecad.IODIR_FALLING_EDGE,
                               self.page_right)
        self.update()
        logging.debug("Activating the listener.")
        self.listener.activate()
 def __init__(self, update_tabs_callback, update_tab_icon_callback):
     self._in_fds_by_browser_pid = dict()
     self._out_fds_by_browser_pid = dict()
     self._update_tabs_callback = update_tabs_callback
     self._update_tab_icon_callback = update_tab_icon_callback
     self._in_id = None
     self._icon_cache = expiringdict.ExpiringDict(
         max_len=100, max_age_seconds=self.ONE_MONTH_IN_SECONDS)
Exemple #4
0
    def __init__(self, update_tabs_callback, update_tab_icon_callback):
        self._update_tab_icon_callback = update_tab_icon_callback
        self.browsers = dict()
        self._icon_cache = expiringdict.ExpiringDict(max_len=100, max_age_seconds=self.ONE_MONTH_IN_SECONDS)

        def read_and_update_tabs(pid, tabs):
            self._populate_tabs_icons(tabs)
            update_tabs_callback(pid, tabs)

        self._update_tabs_callback = read_and_update_tabs
Exemple #5
0
 def __init__(self, client_key: str,
              client_secret: str,
              timeout: int = None,
              storage: dict = None):
     # pylint:disable=too-many-arguments
     self._client_key = client_key
     self._client_secret = client_secret
     self._timeout = timeout or 600
     self._pending = expiringdict.ExpiringDict(
         max_len=128,
         max_age_seconds=self._timeout) if storage is None else storage
Exemple #6
0
 def __init__(
     self,
     cache_image_expiration_time: int = 3600,
     **image_params,
 ):
     self._gbdx = gbdxtools.Interface()
     self._image_params = image_params
     self._catalog_image_cache = expiringdict.ExpiringDict(
         max_len=1000,
         max_age_seconds=cache_image_expiration_time,
     )
     self._not_in_catalog_image_cache = set()
Exemple #7
0
    def __init__(self, flow_idle_timeout=10, flow_hard_timeout=400):
        core.listen_to_dependencies(self)

        self.log = core.getLogger()
        self.flow_idle_timeout = flow_idle_timeout
        self.flow_hard_timeout = flow_hard_timeout
        '''
    A pathset is just a bunch of paths put into itertools cycle type.
    Use next(pathset) to get next path.
    '''
        self.pending_capable = expdict.ExpiringDict(
            expdict_len, expdict_time
        )  # (init_ip, init_port, listen_ip, listen_port) => (init_hash, pathset)
        self.pending_join = expdict.ExpiringDict(
            expdict_len, expdict_time
        )  # (init_ip, init_port, listen_ip, listen_port) => (listen_hash, pathset)
        self.mptcp_connections = expdict.ExpiringDict(
            expdict_len, expdict_time)  # (to_hash) => from_hash, pathset
        self.tcp_path_assignment = expdict.ExpiringDict(
            expdict_len,
            expdict_time)  # (srcip, srcport, dstip, dstport) => path
Exemple #8
0
    def deco(fn):
        memo = expiringdict.ExpiringDict(8, time)

        def inner_deco(*args, **kwargs):
            key = (str(args), str(kwargs))
            if key in memo:
                return memo[key]
            if len(memo) >= memo.size() and memo.size() < hard_max:
                memo.grow(memo.size() * 2)
            memo[key] = fn(*args, **kwargs)
            return memo[key]

        return inner_deco
Exemple #9
0
 def __init__(self, time_seconds=-1, maxCount=-1):
     """
     :type time_seconds: int
     :param time_seconds: time to live in cache (infinite if default)
     :type maxCount: int
     :param maxCount: maximum items in cache (infinite if default)
     """
     if time_seconds == -1:
         time_seconds = 9223372036854775807
     if maxCount == -1:
         maxCount = 9223372036854775807
     self.time_seconds = time_seconds
     self.maxCount = maxCount
     self.__storage = expiringdict.ExpiringDict(max_age_seconds=time_seconds, max_len=maxCount)
Exemple #10
0
 def __init__(
     self,
     sendmail,
     notify_cdata,
     token_store: tokens.TokenStore,
     expires_time: int = None,
     pending_storage: dict = None,
     email_template_text: str = DEFAULT_TEMPLATE_TEXT,
 ):
     # pylint:disable=too-many-arguments
     self._sendmail = sendmail
     self._email_template_text = email_template_text
     self._cdata = notify_cdata
     self._token_store = token_store
     self._lifetime = expires_time or 900
     self._pending = expiringdict.ExpiringDict(
         max_len=1024, max_age_seconds=self._lifetime
     ) if pending_storage is None else pending_storage
Exemple #11
0
    def __init__(self, region_name: str = 'us-east-2') -> None:
        self._ec2 = boto3.client('ec2', region_name=region_name)

        self._instances = guarded_kv_store.guarded_kv_store()
        self._images = guarded_kv_store.guarded_kv_store()
        self._flavors = guarded_kv_store.guarded_kv_store()

        # Requests are not kept in the memory for ever.
        #
        # I don't like the idea of creating an interface to delete
        # them, so it has to be done automagically - and the simplest
        # solution is to use expiringdict.
        #
        # Limitations are defined as ec2_proxy.REQUESTS_LIMITS.
        self._requests = guarded_kv_store.guarded_kv_store(
            dict_cls=lambda: expiringdict.ExpiringDict(**ec2_proxy.
                                                       REQUESTS_LIMITS))

        try:
            with open(ec2_proxy.FLAVORS_FILE, 'r') as fh:
                # Flavors are taken from the file shipped with project, because
                # there is no AWS EC2 Api call to retrieve them.
                #
                # They are not inserted directly to the code either, since there
                # are a lot of them - it would disrupt the vissibility of the code.
                #
                # In case of flavors guarded_kv_store is still used to store them
                #     - it's done just to be consistent with other resources.
                flavors = [
                    flavor.strip() for flavor in fh.read().splitlines()
                    if flavor.strip()
                ]
                empty_values = itertools.repeat('', len(flavors))
                self._flavors.bulk_insert(
                    entries=list(zip(flavors, empty_values)))
        except FileNotFoundError:
            LOGGER.warning('Failed to load flavors. File does not exist: "%s"',
                           ec2_proxy.FLAVORS_FILE)
Exemple #12
0
from io import BytesIO

# Related third party imports
from flask import (Flask, send_file, render_template, jsonify, redirect,
                   request)
from flask_cors import CORS
from PIL import ImageDraw, Image, ImageFont
import random
import expiringdict

# Local application/library specific imports
from utils import noise

app = Flask(__name__)
CORS(app)
captchas = expiringdict.ExpiringDict(max_age_seconds=120, max_len=float('inf'))


class CaptchaCount:
    """Class to count captchas to prevent duplicates in the captchas dictionary."""
    count: int = 0


fonts_lower = [
    ImageFont.truetype("./fonts/lower/gadugib.ttf", 32),
    ImageFont.truetype("./fonts/lower/Chalkduster_400.ttf", 32),
    ImageFont.truetype('./fonts/lower/ShadowsIntoLight-Regular.ttf', 32),
    ImageFont.truetype('./fonts/lower/Rajdhani-SemiBold.ttf', 32)
]
fonts_upper = [
    ImageFont.truetype('./fonts/upper/arial.ttf', 55),
Exemple #13
0
def from_config(config: typing.Dict[str, typing.Any],
                state_storage: dict = None,
                token_storage: tokens.TokenStore = None) -> Authl:
    """ Generate an Authl handler set from provided configuration directives.

    :param dict config: a configuration dictionary. See the individual handlers'
        from_config functions to see possible configuration values.

    :param dict state_storage: a dict-like object that will store session
        state for methods that need it. Defaults to an instance-local
        ExpiringDict; this will not work well in load-balanced scenarios. This
        can be safely stored in a user session, if available.

    :param tokens.TokenStore token_storage: a TokenStore for storing session
        state for methods that need it. Defaults to an instance-local DictStore
        backed by an ExpiringDict; this will not work well in load-balanced
        scenarios.

    Handlers will be enabled based on truthy values of the following keys:

    * ``EMAIL_FROM`` / ``EMAIL_SENDMAIL``: enable :py:mod:`authl.handlers.email_addr`

    * ``FEDIVERSE_NAME``: enable :py:mod:`authl.handlers.fediverse`

    * ``INDIEAUTH_CLIENT_ID``: enable :py:mod:`authl.handlers.indieauth`

    * ``TWITTER_CLIENT_KEY``: enable :py:mod:`authl.handlers.twitter`

    * ``TEST_ENABLED``: enable :py:mod:`authl.handlers.test_handler`

    For additional configuration settings, see each handler's respective
    ``from_config()``.

    """

    if token_storage is None:
        token_storage = tokens.DictStore()

    if state_storage is None:
        state_storage = expiringdict.ExpiringDict(max_len=1024,
                                                  max_age_seconds=3600)

    instance = Authl()

    if config.get('EMAIL_FROM') or config.get('EMAIL_SENDMAIL'):
        from .handlers import email_addr
        instance.add_handler(email_addr.from_config(config, token_storage))

    if config.get('FEDIVERSE_NAME') or config.get('MASTODON_NAME'):
        from .handlers import fediverse
        instance.add_handler(fediverse.from_config(config, token_storage))

    if config.get('INDIEAUTH_CLIENT_ID'):
        from .handlers import indieauth
        instance.add_handler(indieauth.from_config(config, token_storage))

    if config.get('TWITTER_CLIENT_KEY'):
        from .handlers import twitter
        instance.add_handler(twitter.from_config(config, state_storage))

    if config.get('TEST_ENABLED'):
        from .handlers import test_handler
        instance.add_handler(test_handler.TestHandler())

    return instance
Exemple #14
0
 def clear_cache(self):
     """Clears/initializes file cache
     """
     self._file_cache = expiringdict.ExpiringDict(
         max_len=FILE_CACHE_SIZE, max_age_seconds=FILE_CACHE_AGE)
def main(argv: List[str]) -> int:
    logging.basicConfig(format='[%(asctime)s] %(message)s', level=logging.INFO)
    if len(argv) != 2:
        print_usage(argv[0])
        return 0
    cache_items, cache_seconds = 1024, 10
    ifaces = []  # type: List[str]
    bpf = None  # type: Optional[str]
    rules = []  # type: List[Rule]
    with open(argv[1], 'r') as config_file:
        for lineno, line in enumerate(config_file):
            fields = line.split('#', 1)[0].strip().split()
            if len(fields) == 0:
                continue
            if fields[0] == 'iface':
                if len(fields) != 2:
                    logging.error('Error in line {}: {}'.format(
                        lineno + 1, line))
                    return 1
                ifaces.append(fields[1])
            elif fields[0] == 'filter':
                bpf = ' '.join(fields[1:])
            elif fields[0] == 'cache':
                if len(fields) != 3:
                    logging.error('Error in line {}: {}'.format(
                        lineno + 1, line))
                    return 1
                try:
                    cache_items = int(fields[1])
                    if cache_items <= 0:
                        raise ValueError(
                            'Cache items should be greater than 0')
                    cache_seconds = int(fields[2])
                    if cache_seconds <= 0:
                        raise ValueError(
                            'Cache seconds should be greater than 0')
                except Exception as e:
                    logging.error('Error in line {}: {}'.format(lineno + 1, e))
                    return 1
            else:
                if len(fields) not in (3, 4):
                    logging.error('Error in line {}: {}'.format(
                        lineno + 1, line))
                try:
                    src = ipaddress.ip_network(fields[0])
                    dst = ipaddress.ip_network(fields[1])
                    mtu = int(fields[2])
                    if mtu <= 0:
                        raise ValueError('MTU should be greater than 0')
                    trigger = mtu
                    if len(fields) >= 4:
                        trigger = int(fields[3])
                        if trigger < 0:
                            raise ValueError(
                                'Trigger length should be greater or equal than 0'
                            )
                except Exception as e:
                    logging.error('Error in line {}: {}'.format(lineno + 1, e))
                    return 1
                rules.append(Rule(src, dst, mtu, trigger))
    if 'any' in ifaces:
        ifaces = []

    pmtud_cache = expiringdict.ExpiringDict(max_len=cache_items,
                                            max_age_seconds=cache_seconds)

    try:
        scapy.sendrecv.sniff(prn=functools.partial(callback, rules,
                                                   pmtud_cache),
                             promisc=False,
                             filter=bpf,
                             iface=ifaces or None)
    except Exception as e:
        logging.error('Error sniffing packets: {}'.format(e))
        return 1
    return 0
Exemple #16
0
        return fom.OAuth2(flows=fom.OAuthFlows(
            authorizationCode={
                "authorizationUrl": self.lazy_config.evaluate()["auth.authorization"],
                "tokenUrl": self.lazy_config.evaluate()["auth.token"],
                "refreshUrl": self.lazy_config.evaluate()["auth.refresh"],
                "scopes": {
                    "profile": "[Required] Get location, nickname and picture",
                    "email": "[Required] Get email and email_verified",
                    "openid": "[Required] Additional OpenID Connect info"
                },
            }
        ))


# TODO: is max_len mandatory?
USER_INFO_CACHE = expiringdict.ExpiringDict(max_len=100, max_age_seconds=60 * 60 * 24)


def dependency_access_token(
        token: str = f.Security(LazyAuthorizationCodeBearer(lazy_config=lazy_config))
) -> JSON:
    if token not in USER_INFO_CACHE:
        user_info = requests.get(lazy_config.e["auth.userinfo"], headers={
            "Authorization": f"Bearer {token}"
        }).json()
        USER_INFO_CACHE[token] = user_info
    else:
        user_info = USER_INFO_CACHE[token]
    return user_info

Exemple #17
0

async def get_setting_context(message, parameter, context):
	if context in {'channel', 'server'} and message.channel.is_private:
		return None
	return await core.keystore.get(get_key(message, parameter, context))


def redirect(name):
	details = SETTINGS.get(name)
	if details:
		name = details.get('redirect', name)
	return name, SETTINGS.get(name)


PREFIX_CACHE = expiringdict.ExpiringDict(max_len = 10000, max_age_seconds = 120)


async def get_server_prefix(server_id):
	try:
		value = PREFIX_CACHE[server_id]
	except KeyError:
		value = await core.keystore.get('s-prefix:' + server_id) or '='
	PREFIX_CACHE[server_id] = value
	return value



async def set_server_prefix(server_id, prefix):
	PREFIX_CACHE[server_id] = prefix
	await core.keystore.set('s-prefix:' + server_id, prefix)

def fetch_forecast_data():
    db = client.get_database("weather")
    collection_daily = db.get_collection("daily_weather_forecast")
    collection_hourly = db.get_collection("hourly_weather_forecast")
    ret_daily = list(collection_daily.find())
    ret_hourly = list(collection_hourly.find())
    logger.info('Daily weather: ' + str(len(ret_daily)) +
                ' documents read from the db')
    logger.info('Hourly weather: ' + str(len(ret_hourly)) +
                ' documents read from the db')
    return ret_daily, ret_hourly


_fetch_forecast_data_as_df_cache = expiringdict.ExpiringDict(
    max_len=1, max_age_seconds=RESULT_CACHE_EXPIRATION)


def fetch_forecast_data_as_df(allow_cached=False):
    """Converts list of dicts returned by `fetch_all_bpa` to DataFrame with ID removed
    Actual job is done in `_worker`. When `allow_cached`, attempt to retrieve timed cached from
    `_fetch_all_bpa_as_df_cache`; ignore cache and call `_work` if cache expires or `allow_cached`
    is False.
    """
    def _work():
        daily_forecast_data, hourly_forecast_data = fetch_forecast_data()
        if len(daily_forecast_data) == 0 or len(hourly_forecast_data) == 0:
            return None
        df_daily_forecast = pd.DataFrame.from_records(daily_forecast_data)
        df_daily_forecast.drop('_id', axis=1, inplace=True)
    """
    Update MongoDB database `weather` and collection for city with the given `weather_dict`.
    """
    db = client.get_database("weather")
    city_name = weather_dict["name"]
    collection = db.get_collection(city_name)
    result = collection.replace_one(filter={'dt': weather_dict['dt']},
                                    replacement=weather_dict,
                                    upsert=True)
    logger.info("City: {}, Total Data Points: {}, Update: {}".format(
        city_name, len(list(collection.find())), result.matched_count == 0))


def fetch_all_weather(city="ALL"):
    db = client.get_database("weather")
    collections = []
    if city == "ALL":
        for city_name in db.collection_names():
            collection = db.get_collection(city_name)
            collections.extend(list(collection.find()))
    else:
        collection = db.get_collection(city)
        return list(collection.find())


_fetch_all_weather_cache = expiringdict.ExpiringDict(
    max_len=1, max_age_seconds=RESULT_CACHE_EXPIRATION)

if __name__ == '__main__':
    pass
Exemple #20
0
import typing
import urllib.parse

import expiringdict
import mf2py
import requests
from bs4 import BeautifulSoup

from .. import disposition, tokens, utils
from . import Handler

LOGGER = logging.getLogger(__name__)

# We do this instead of functools.lru_cache so that IndieAuth.handles_page
# and find_endpoint can both benefit from the same endpoint cache
_ENDPOINT_CACHE = expiringdict.ExpiringDict(max_len=128, max_age_seconds=300)

# And similar for retrieving user h-cards
_PROFILE_CACHE = expiringdict.ExpiringDict(max_len=128, max_age_seconds=300)


def find_endpoint(id_url: str,
                  links: typing.Dict = None,
                  content: BeautifulSoup = None,
                  rel: str = "authorization_endpoint") -> typing.Tuple[typing.Optional[str],
                                                                       str]:
    """ Given an identity URL, get its IndieAuth endpoint

    :param str id_url: an identity URL to check
    :param links: a request.links object from a requests operation
    :param BeautifulSoup content: a BeautifulSoup parse tree of an HTML document
 def Run(self, memStream = None):
     self.knownStructures = expiringdict.ExpiringDict(1000, 3600)
Exemple #22
0
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities to store local memories."""

import expiringdict
from makani.gs.monitor2.project import settings

# A dictionary of local memories for each tab.
_memories = expiringdict.ExpiringDict(
    max_len=settings.MAX_CLIENT_COUNT,
    max_age_seconds=settings.MEMORY_STALE_TIMEOUT_S)


def GetMemory(client_id, create_if_none):
    if client_id in _memories:
        return _memories[client_id]
    elif create_if_none:
        _memories[client_id] = {}
        return _memories[client_id]
    else:
        return None
    ret_dict = {}
    for level in levels:
        length = 0
        while length == 0:
            collection = db.get_collection(level)
            ret = list(collection.find())
            ret_dict[level] = ret
            length = len(ret)
            if length == 0:
                time.sleep(30)
            logger.info(str(length) + ' documents read from the db')
        time.sleep(1)
    return ret_dict


_fetch_all_db_as_df_cache = expiringdict.ExpiringDict(
    max_len=10, max_age_seconds=RESULT_CACHE_EXPIRATION)


def fetch_all_db_as_df(allow_cached=False):
    """Converts list of dicts returned by `fetch_all_db` to DataFrame with ID removed
    Actual job is done in `_worker`. When `allow_cached`, attempt to retrieve timed cached from
    `_fetch_all_db_as_df_cache`; ignore cache and call `_work` if cache expires or `allow_cached`
    is False.
    """
    def _work():
        ret_dict = fetch_all_db()
        if len(ret_dict) == 0:
            return None
        df_dict = {}
        for level, data in ret_dict.items():
            df = pd.DataFrame.from_records(data)
            replacement=record,                         # latest document
            upsert=True)                                # update if exists, insert if not
        if result.matched_count > 0:
            update_count += 1
    logger.info("rows={}, update={}, ".format(df.shape[0], update_count) +
                "insert={}".format(df.shape[0]-update_count))

def fetch_all_bpa():
    db = client.get_database("energy")
    collection = db.get_collection("energy")
    ret = list(collection.find())
    logger.info(str(len(ret)) + ' documents read from the db')
    return ret


_fetch_all_bpa_as_df_cache = expiringdict.ExpiringDict(max_len=1,
                                                       max_age_seconds=RESULT_CACHE_EXPIRATION)

def fetch_all_spotify():
    db = client.get_database("spotify")
    collection = db.get_collection("spotify")
    ret = list(collection.find())
    logger.info(str(len(ret)) + ' documents read from the db')
    return ret


_fetch_all_spotify_as_df_cache = expiringdict.ExpiringDict(max_len=1,
                                                       max_age_seconds=RESULT_CACHE_EXPIRATION)


def fetch_all_bpa_as_df(allow_cached=False):
    """Converts list of dicts returned by `fetch_all_bpa` to DataFrame with ID removed