예제 #1
0
    def __init__(
        self,
        bootstrap_servers=None,
        configs=None,
        topic=None,
        kafka_loglevel=logging.WARNING,
    ):
        set_kafka_loglevel(kafka_loglevel)
        self.logger = logging.getLogger(self.__class__.__name__)
        self.lock = threading.Lock()

        self.topic: Optional[str] = topic
        configs = {} if configs is None else configs
        self.configs: dict = configs
        self.producers: Dict[str, KafkaProducer] = {}
        self.fail_pass = ExpiringDict(max_len=10000, max_age_seconds=60)
        self.not_exist_topics = ExpiringDict(max_len=10000, max_age_seconds=60)

        bs = configs.pop("bootstrap_servers", None)
        if not bootstrap_servers:
            bootstrap_servers = bs

        if bootstrap_servers:
            producer = self.get_producer(bootstrap_servers)
            if producer is None:
                raise Exception("can not init default producer")
            self.producers[DEFAULT_FLAG] = producer
        else:
            self.logger.warning("no default kafka producer")
예제 #2
0
    def __init__(self,
                 listen=15,
                 f_local=False,
                 default_hook=None,
                 object_hook=None):
        assert V.DATA_PATH is not None, 'Setup p2p params before PeerClientClass init.'

        # object control params
        self.f_stop = False
        self.f_finish = False
        self.f_running = False

        # co-objects
        self.core = Core(host='localhost' if f_local else None, listen=listen)
        self.peers = PeerData(os.path.join(
            V.DATA_PATH, 'peer.dat'))  # {(host, port): header,..}
        self.event = EventIgnition()  # DirectCmdを受け付ける窓口

        # data status control
        self.broadcast_status: Dict[int, asyncio.Future] = ExpiringDict(
            max_len=5000, max_age_seconds=90)
        self.result_futures: Dict[int, asyncio.Future] = ExpiringDict(
            max_len=5000, max_age_seconds=90)

        # recode traffic if f_debug true
        if Debug.F_RECODE_TRAFFIC:
            self.core.traffic.recode_dir = V.DATA_PATH

        # serializer/deserializer hook
        self.default_hook = default_hook
        self.object_hook = object_hook
예제 #3
0
def test_expiring_dict_copy_from_expiring_dict_new_timeout_and_length():
    exp_dict_test = ExpiringDict(max_len=200000, max_age_seconds=1800)
    exp_dict_test['test'] = 1
    exp_dict_test2 = ExpiringDict(max_len=100000, max_age_seconds=900, items=exp_dict_test)
    eq_(1, exp_dict_test2['test'])
    eq_(100000, exp_dict_test2.max_len)
    eq_(900, exp_dict_test2.max_age)
예제 #4
0
 def reset_cache(self):
     MongoCache.cache = ExpiringDict(
         max_len=MongoCache.configuration.cache_max_elements(),
         max_age_seconds=MongoCache.configuration.cache_timeout())
     MongoCache.data_cache = ExpiringDict(
         max_len=MongoCache.configuration.data_cache_max_elements(),
         max_age_seconds=MongoCache.configuration.data_cache_timeout())
예제 #5
0
    def __init__(self, bot):
        self.bot = bot

        # cooldown to monitor if too many users join in a short period of time (more than 10 within 8 seconds)
        self.join_raid_detection_threshold = commands.CooldownMapping.from_cooldown(
            rate=10, per=8, type=commands.BucketType.guild)
        # cooldown to monitor if users are spamming a message (8 within 5 seconds)
        self.message_spam_detection_threshold = commands.CooldownMapping.from_cooldown(
            rate=7, per=10.0, type=commands.BucketType.member)
        # cooldown to monitor if too many accounts created on the same date are joining within a short period of time
        # (5 accounts created on the same date joining within 45 minutes of each other)
        self.join_overtime_raid_detection_threshold = commands.CooldownMapping.from_cooldown(
            rate=4, per=2700, type=MessageTextBucket.custom)

        # cooldown to monitor how many times AntiRaid has been triggered (5 triggers per 15 seconds puts server in lockdown)
        self.raid_detection_threshold = commands.CooldownMapping.from_cooldown(
            rate=4, per=15.0, type=commands.BucketType.guild)
        # cooldown to only send one raid alert for moderators per 10 minutes
        self.raid_alert_cooldown = commands.CooldownMapping.from_cooldown(
            1, 600.0, commands.BucketType.guild)

        # stores the users that trigger self.join_raid_detection_threshold so we can ban them
        self.join_user_mapping = ExpiringDict(max_len=100, max_age_seconds=10)
        # stores the users that trigger self.message_spam_detection_threshold so we can ban them
        self.spam_user_mapping = ExpiringDict(max_len=100, max_age_seconds=10)
        # stores the users that trigger self.join_overtime_raid_detection_threshold so we can ban them
        self.join_overtime_mapping = ExpiringDict(max_len=100,
                                                  max_age_seconds=2700)
        # stores the users that we have banned so we don't try to ban them repeatedly
        self.ban_user_mapping = ExpiringDict(max_len=100, max_age_seconds=120)

        # locks to prevent race conditions when banning concurrently
        self.join_overtime_lock = Lock()
        self.banning_lock = Lock()
예제 #6
0
def test_expiring_dict_copy_from_expiring_dict_original_timeout_and_length():
    exp_dict_test = ExpiringDict(max_len=200000, max_age_seconds=1800)
    exp_dict_test['test'] = 1
    exp_dict_test2 = ExpiringDict(max_len=None, max_age_seconds=None, items=exp_dict_test)
    eq_(1, exp_dict_test2['test'])
    eq_(200000, exp_dict_test2.max_len)
    eq_(1800, exp_dict_test2.max_age)
예제 #7
0
 def __init__(self, reddit, db_collection, predictor):
     self.reddit = reddit
     self.collection = db_collection
     self.predictor = predictor
     self.subreddit = self.reddit.subreddit('risingthreads')
     self.user_cache = ExpiringDict(max_len=1000000, max_age_seconds=86400)
     self.subreddit_cache = ExpiringDict(max_len=1000000,
                                         max_age_seconds=86400)
 def __init__(self, db: Session, model_cache_size: int, cache_ttl: int):
     self.db: Session = db
     with OpenPredictionService.lock:
         if OpenPredictionService.MODEL_CACHE is None:
             OpenPredictionService.MODEL_CACHE = ExpiringDict(
                 max_len=model_cache_size, max_age_seconds=cache_ttl)
         if OpenPredictionService.MODEL_CONFIGS_CACHE is None:
             OpenPredictionService.MODEL_CONFIGS_CACHE = ExpiringDict(
                 max_len=1, max_age_seconds=cache_ttl)
예제 #9
0
    def __init__(self, username, token, channel_id):
        self.user_id = username
        self.token = token.removeprefix("oauth:")
        self.irc_channel = "#" + channel_id.lower()
        self.channel_id = channel_id
        self.serialized_data_dir = "data"
        self.serialized_data_filename = os.path.join(self.serialized_data_dir,
                                                     f"{self.channel_id}.bin")
        self.push_trend_cache = ExpiringDict(max_len=100,
                                             max_age_seconds=TREND_EXPIRE_SEC)

        self.dizzy_users = []
        self.dizzy_start_ts = 0
        self.dizzy_ban_end_ts = 0
        self.ban_targets = []

        # self.api_client = TwitchAPIClient(self.channel_id, client_id)

        # Create IRC bot connection
        logging.info(f"Connecting to {SERVER} on port {PORT}...")
        irc.bot.SingleServerIRCBot.__init__(
            self, [(SERVER, PORT, "oauth:" + self.token)], username, username)
        # TODO: dynamically determine
        self.trend_threshold = 3

        self.gbf_code_re = re.compile(r"[A-Z0-9]{8}")

        # setup scheduler
        self.reactor.scheduler.execute_every(1, self.dizzy)
        # self.reactor.scheduler.execute_every(5 * 60, self.insert_all)
        # self.reactor.scheduler.execute_every(60 * 60, self.share_clip)

        # load data in disk
        try:
            with open(self.serialized_data_filename, "rb") as f:
                self.data = dill.loads(f.read())
                if "gbf_room_num" not in self.data:
                    self.data["gbf_room_num"] = 0
                if "gbf_room_id_cache" not in self.data:
                    self.data["gbf_room_id_cache"] = ExpiringDict(
                        max_len=1, max_age_seconds=600)
        except FileNotFoundError:
            self.data = {
                "gbf_room_num": 0,
                "gbf_room_id_cache": ExpiringDict(max_len=1,
                                                  max_age_seconds=600),
            }

        # register signal handler
        # https://stackoverflow.com/questions/1112343/how-do-i-capture-sigint-in-python
        signal.signal(signal.SIGINT, handler=self.save_data)
예제 #10
0
    def __init__(self,
                 gateway,
                 schema_path=None,
                 data_gw_cache=None,
                 **kwargs):
        self.__gateway = gateway

        if schema_path:
            with open(schema_path) as f:
                source = f.read()
        else:
            self.__schema_source = create_gql_schema(gateway)
            source = self.__schema_source

        document = parse(source)
        self.__schema = build_ast_schema(document)
        abstract_types = filter(lambda x: hasattr(x, 'resolve_type'),
                                dict(self.__schema.get_type_map()).values())
        for at in abstract_types:
            at.resolve_type = self.__resolve_type

        self.__executor = AgoraExecutor(gateway)

        if not data_gw_cache:
            data_gw_cache = {'max_age_seconds': 300, 'max_len': 1000000}

        self.expiring_dict = ExpiringDict(**data_gw_cache)
        middleware = AgoraMiddleware(gateway,
                                     data_gw_cache=self.expiring_dict,
                                     **kwargs)
        self.__middleware = MiddlewareManager(middleware)
예제 #11
0
    def __init__(self,
                 data_store=None,
                 format_parser=JsonFormParser(),
                 initial_load=True,
                 max_age_seconds=60):
        """
        :param format_parser: A custom parsers to convert the database entry into a FlaskForm. Has to inherit from
        the ParserAdapterInterface class.
        :param data_store: A custom database adapter. Has to be an inherit from DbAdapterInterface
        :param initial_load: If all forms should be loaded
        :param max_age_seconds: Expiration time of cached forms

        """

        if not isinstance(data_store, IDataStore):
            raise FormManagerException(
                f"{data_store.__class__.__name__} has to be a subclass of"
                f"{IDataStore.__class__.__name__}")

        if not isinstance(format_parser, IFormParser):
            raise FormManagerException(
                f"{format_parser.__class__.__name__} has to be a subclass of f"
                f"{IFormParser.__name__}")

        self._data_store = data_store
        self._parser = format_parser

        self.form_cache = ExpiringDict(max_len=100,
                                       max_age_seconds=max_age_seconds)
        if initial_load:
            self._fetch_forms()
예제 #12
0
파일: logger.py 프로젝트: dgsharpe/marvin
    def __init__(self, config):
        self.watch_flags = config.watch_flags
        self.expiring_file_event_dict = ExpiringDict(max_len=100000,
                                                     max_age_seconds=10)

        #Set logging formatting
        LOGGING_MSG_FORMAT = '[%(levelname)s] [%(asctime)s] : %(message)s'
        LOGGING_DATE_FORMAT = '%Y-%m-%d %H:%M:%S'

        logging.basicConfig(level=logging.DEBUG,
                            format=LOGGING_MSG_FORMAT,
                            datefmt=LOGGING_DATE_FORMAT)

        formatter = logging.Formatter(LOGGING_MSG_FORMAT)
        handler = logging.handlers.TimedRotatingFileHandler(
            filename=config.log_file_path, when="d", interval=1, backupCount=7)
        handler.setFormatter(formatter)
        self.logger = logging.getLogger()
        self.logger.addHandler(handler)

        #Set logging levels
        self.logger.setLevel(logging.INFO)
        logging.getLogger('schedule').setLevel(logging.WARNING)
        logging.getLogger('requests').setLevel(logging.WARNING)
        logging.getLogger('urllib3').setLevel(logging.WARNING)
예제 #13
0
파일: adsb.py 프로젝트: cnelson/maho
    def __init__(self,
                 adsb_host='localhost',
                 adsb_port=30002,
                 max_aircraft=1000,
                 max_aircraft_age=60):
        """Connect to dump1090 TCP raw output

        Args:
            adsb_host (str): The hostname running dump1090
            adsb_port (int): The "TCP raw output" port

            max_aircraft (int, optional): The maxinum number of aircraft to cache in memory
            max_aircraft_age (int, optional): The maxinum number of seconds to cache
            an aircraft after receiving an ADS-B update

        Raises:
            IOError: Unable to connect to dump1090

        """

        self._adsbsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self._adsbsock.connect((adsb_host, adsb_port))

        # TODO: Expose these settiongs
        self._cache = ExpiringDict(max_len=max_aircraft,
                                   max_age_seconds=max_aircraft_age)
예제 #14
0
    def __init__(self,
                 token,
                 api_url_base=None,
                 name=None,
                 version=None,
                 timeout_s=20,
                 poll_timeout_s=60):
        super(ICQBot, self).__init__()

        self.log = logging.getLogger(__name__)

        self.token = token
        self.api_base_url = "https://botapi.icq.net" if api_url_base is None else api_url_base
        self.name = name
        self.version = version
        self.timeout_s = timeout_s
        self.poll_timeout_s = poll_timeout_s

        self.dispatcher = Dispatcher(self)
        self.dispatcher.add_handler(MyInfoHandler())
        self.running = False

        self._uin = self._nick = None
        self._fetch_base_url = None
        self._next_fetch_after_s = 0

        self.__lock = Lock()
        self.__polling_thread = None

        self.__sent_im_cache = ExpiringDict(max_len=2**10, max_age_seconds=60)
        self.dispatcher.add_handler(
            SkipDuplicateIMHandler(self.__sent_im_cache))
예제 #15
0
    def __init__(
        self,
        auth_token=None,
        file_path=None,
        store=None,
        targets=None,
        status_path="/tmp",
        log=DUMMY_LOG,
    ):
        self.file_path = file_path
        self.auth_token = auth_token
        self.log = log
        self.store = store
        self.targets = targets
        self.status_path = status_path
        self.h = SlackHistory(
            auth_token=self.auth_token,
            file_path=self.file_path,
            dict_path=self.status_path,
            targets=self.targets,
            log=self.log,
        )

        self.slack = SlackClient(self.auth_token)
        self.channel_name_cache = ExpiringDict(self.CACHE_LEN, self.CACHE_EXPIRY_TIME)
예제 #16
0
 def __init__(self):
     log.debug("[Startup]: Initializing YouTube Module . . .")
     self.mongo = mongo.Mongo()
     self.queue = Queue()
     self.cache = ExpiringDict(max_age_seconds=10800, max_len=1000)
     self.search_cache = dict()
     self.session = aiohttp.ClientSession()
예제 #17
0
    def __init__(self,
                 token,
                 api_url_base=None,
                 name=None,
                 version=None,
                 timeout_s=20,
                 poll_time_s=60):
        super(Bot, self).__init__()

        self.log = logging.getLogger(__name__)

        self.token = token
        self.api_base_url = "https://api.icq.net/bot/v1" if api_url_base is None else api_url_base
        self.name = name
        self.version = version
        self.timeout_s = timeout_s
        self.poll_time_s = poll_time_s
        self.last_event_id = 0

        self.dispatcher = Dispatcher(self)
        self.running = False

        self._uin = token.split(":")[-1]

        self.__lock = Lock()
        self.__polling_thread = None

        self.__sent_im_cache = ExpiringDict(max_len=2**10, max_age_seconds=60)
        self.dispatcher.add_handler(
            SkipDuplicateMessageHandler(self.__sent_im_cache))
예제 #18
0
    def get_all_schemas(self, **kwargs) -> ExpiringDict:
        repo = self.git.repository(self.repo_owner, self.repo_name)
        contents = repo.directory_contents(self.base_path, self.branch, return_as=dict)

        schemas = ExpiringDict(max_len=100, max_age_seconds=86400)
        schemas_final = self.get_schema_content(repo, contents, schemas)
        return schemas_final
예제 #19
0
파일: discord.py 프로젝트: slice/black-hole
    def __init__(self, *, config):
        self.config = config
        intents = Intents.default()

        # members intent is required to resolve discord.User/discord.Member
        # on command parameters
        intents.members = True
        intents.typing = False

        self.client = commands.Bot(intents=intents,
                                   command_prefix=commands.when_mentioned)
        self.client.add_cog(Management(self.client, self.config))
        self.session = aiohttp.ClientSession(loop=self.client.loop)

        self.client.loop.create_task(self._sender())

        self._queue = []
        self._incoming = asyncio.Event()

        #: { int: (timestamp, str) }
        self._avatar_cache = {}

        #: { (jid, xmpp_message_id): discord_message_id }
        # the message id store serves as a way for edited messages coming
        # from a xmpp room to have the edit reflected on the discord channel.
        #
        # the high level overview is as follows:
        #  when sending a message, check if its an edit and the edited id exists in the cache
        #   if so, issue a patch (since we have the webhook url AND message id)
        #   if not, issue a post, and store the message id for later
        #
        # the store has a maximum of 1k messages, and lets an xmpp message
        # be last corrected for an hour
        self._message_id_store = ExpiringDict(max_len=1000,
                                              max_age_seconds=3600)
예제 #20
0
    def __init__(self, country='us'):
        # There is a massive performance gain if we cache a directory's contents.
        self.cache = ExpiringDict(max_len=64 * 1024, max_age_seconds=3600)
        self.country = country

        #FIXME -- Don't rely on spotipy and copy-pasting for authentication
        self.token = spotipy.util.prompt_for_user_token(
            '1252589511',
            'user-library-read user-library-modify user-follow-read user-follow-modify playlist-read-private playlist-read-collaborative playlist-modify-private',
            client_id=SPOTIFY_CLIENT_ID,
            client_secret=SPOTIFY_CLIENT_SECRET,
            redirect_uri='https://example.com')

        self.artistNodes = {}
        self.albumNodes = {}
        self.trackNodes = {}
        self.playlistNodes = {}

        fusetree.DictDir.__init__(
            self,
            {
                'Artists': FollowedArtistsNode(self, id=None,
                                               mode=DIR_MODE_RW),
                #'Playlists': UserPlaylistsNode(self, id='me', mode=DIR_MODE_RW),
                #'Saved Albums': {},
                #'Saved Tracks': {},
                # Top Artists
                # Top Tracks
                #'Recently Played': {},
                #'Categories': {},
                #'Featured Playlists': {},
                #'New Releases': {},
                #'Recommendations': {},
            })
예제 #21
0
    def __init__(
        self,
        aws_access_key_id=None,
        aws_secret_access_key=None,
        region=os.environ.get("AWS_REGION", "us-west-2"),
        cache=None,
        cache_timeout=3600,
        **kwargs,
    ):

        # Open a session with boto3
        self.session = boto3.session.Session(
            region_name=region,
            aws_access_key_id=aws_access_key_id,
            aws_secret_access_key=aws_secret_access_key)
        # Open a connection to the AWS S3 bucket
        self.client = self.session.client("s3")

        # If there is no redis cache available
        self.cache_timeout = cache_timeout
        if cache is None:
            # Set up an expiring cache
            self.cache = ExpiringDict(max_len=100,
                                      max_age_seconds=self.cache_timeout)
            self.using_redis = False
        else:
            # Otherwise attach the redis cache to this object
            self.cache = cache
            self.using_redis = True
예제 #22
0
class RamCacheHandler(tornado.web.RequestHandler):

    cache = ExpiringDict(max_len=100, max_age_seconds=10)

    def get(self, path):
        if path in RamCacheHandler.cache and falsy(
                tornado.options.options.dev):
            self.write_from_cache(path)
        else:
            log.debug('Generating %s', path)
            mimetype, text = self.generate(path)
            self.add_to_cache(path, mimetype, text)
            self.write_from_cache(path)

    def generate(self, path):
        raise tornado.web.HTTPError(404, "Path %s not found in cache.", path)

    def add_to_cache(self, path, mimetype, text):
        RamCacheHandler.cache[path] = {'mimetype': mimetype, 'text': text}

    def write_from_cache(self, path):
        entry = RamCacheHandler.cache[path]
        self.set_header("Content-Type", entry['mimetype'])
        self.write(entry['text'])
        self.finish()
예제 #23
0
 def __init__(self, **connect_args):
     self.connect_args = connect_args
     self._cnt = count()
     api = connect(**connect_args)
     self.connections = ExpiringDict(max_len=MAX_CONN_PER_HOST,
                                     max_age_seconds=CONN_TIMEOUT,
                                     items={self._id: api})
예제 #24
0
    def LoadFromURN(self):
        #volume_urn = self.resolver.Get(self.urn, lexicon.AFF4_STORED)
        #if not volume_urn:
        #    raise IOError("Unable to find storage for urn %s" % self.urn)

        self.lexicon = self.resolver.lexicon

        self.chunk_size = int(
            self.resolver.Get(self.urn, self.lexicon.chunkSize) or 32 * 1024)

        self.chunks_per_segment = int(
            self.resolver.Get(self.urn, self.lexicon.chunksPerSegment) or 1024)

        sz = self.resolver.Get(self.urn, self.lexicon.streamSize) or 0
        self.size = int(sz)

        self.compression = str(
            self.resolver.Get(self.urn, self.lexicon.compressionMethod)
            or lexicon.AFF4_IMAGE_COMPRESSION_ZLIB)

        # A buffer for overlapped writes which do not fit into a chunk.
        self.buffer = ""

        # Compressed chunks in the bevy.
        self.bevy = []

        # Length of all chunks in the bevy.
        self.bevy_length = 0

        # List of bevy offsets.
        self.bevy_index = []
        self.chunk_count_in_bevy = 0
        self.bevy_number = 0

        self.cache = ExpiringDict(max_len=1000, max_age_seconds=10)
예제 #25
0
    def __init__(self, token, api_url_base=None, name=None, version=None, timeout_s=20, poll_time_s=60, is_myteam=False, proxy=None):
        super(Bot, self).__init__()

        self.log = logging.getLogger(__name__)

        self.token = token
        self.api_base_url = "https://api.icq.net/bot/v1" if api_url_base is None else api_url_base
        self.name = name
        self.version = version
        self.timeout_s = timeout_s
        self.poll_time_s = poll_time_s
        self.last_event_id = 0
        self.is_myteam = is_myteam
        self.proxies = None if proxy is None else {'http': f'http://{proxy}', 'https': f'http://{proxy}'}

        self.dispatcher = Dispatcher(self)
        self.running = False

        self._uin = token.split(":")[-1]

        self.__lock = Lock()
        self.__polling_thread = None

        self.__sent_im_cache = ExpiringDict(max_len=2 ** 10, max_age_seconds=60)
        self.dispatcher.add_handler(SkipDuplicateMessageHandler(self.__sent_im_cache))

        if self.is_myteam:
            self.add_chat_members = types.MethodType( add_chat_members, self )
            self.create_chat = types.MethodType( create_chat, self )
예제 #26
0
    def _run(self):
        if self.stream not in fragments:
            fragments[self.stream] = ExpiringDict(max_len=16,
                                                  max_age_seconds=300)
            handle_expiringdict(fragments[self.stream])

        packetid = self.packet.id

        if packetid not in fragments[self.stream]:
            # I'd love to use a tuple here, but it's not mutable :(
            fragments[self.stream][packetid] = [
                None, FragmentReassembler()]

        fragments[self.stream][packetid][1].extend(
            self.packet.body_bytes,
            self.packet.offset * 8,
            not self.packet.flags & 0b1
        )

        if not self.packet.offset:
            fragments[self.stream][packetid][0] = self.packet

        try:
            body = fragments[self.stream][packetid][1].bin()
        except FragmentReassembler.NotReady:
            return

        self.packet = fragments[self.stream][packetid][0]
        self.packet.body_bytes = body

        del fragments[self.stream][packetid]

        print(self.packet, file=__import__('sys').stderr, flush=True)
예제 #27
0
class DownloaderStatistics(object):
    """Based on ecxpiring dictionary this will hold last 10 minutes data"""

    expiring_dict = ExpiringDict(max_len=sys.maxint, max_age_seconds=600)
    expiring_dict["responses/total"] = 0

    @staticmethod
    def update(response):
        key = "responses/%s" % response.code
        if key not in DownloaderStatistics.expiring_dict:
            DownloaderStatistics.expiring_dict[key] = 0
        if key not in DownloaderStatistics.expiring_dict:
            DownloaderStatistics.expiring_dict[key] = 0

        DownloaderStatistics.expiring_dict[key] += 1

        key = "responses/total"
        if key not in DownloaderStatistics.expiring_dict:
            DownloaderStatistics.expiring_dict[key] = 0

        DownloaderStatistics.expiring_dict[key] += 1

        DownloaderStatistics.expiring_dict["response_ratio_per_10_min"] = \
            float(DownloaderStatistics.expiring_dict["responses/total"]) / 600
        return response
예제 #28
0
    def __init__(self, app) -> None:
        self.authed_users = ExpiringDict(max_len=50, max_age_seconds=3600)

        app.user_handler = self
        self.unauthed_user = UnauthedUser()

        self.app = app
        self.app.context_processor(self.template_context)
class TemporaryKeyHandler(object):
    
    _user_cache = ExpiringDict(max_len=500, max_age_seconds=10) #user : key
    _file_cache = ExpiringDict(max_len=1000, max_age_seconds=10) #file : user
    _shared_file_cache = ExpiringDict(max_len=500, max_age_seconds=10) #file : group_key
  
    #link a key to an user for the time of the operation
    @classmethod
    def addUser(cls,user,key):
        #print(user)
        cls._user_cache[user] = key

    #link a file to a user for a very short time 
    # (just enough time to encrypt the file with the proper user key in the EncryptedFile model which is
    # a wrapper of FieldFile that can't be given parameters in an usual way)
    @classmethod
    def addFile(cls,user,file):
        cls._file_cache[file] = user
        
    

    #link a file to a group_key for a very short time 
    # (just enough time to encrypt the file with the proper group key in the EncryptedFile model which is
    # a wrapper of FieldFile that can't be given parameters in an usual way)
    @classmethod
    def addSharedFile(cls,key,file):
        cls._shared_file_cache[file] = key

    @classmethod
    def getFileKey(cls,file):
        #print(file)
        #print(cls._user_cache)
        #print(cls._file_cache)
        if file in cls._file_cache:
            user = cls._file_cache.get(file)
            if user in cls._user_cache:
                return cls._user_cache.get(user)
            else:
                print("session has expired")
                return False
        else:
            if file in cls._shared_file_cache:
                return cls._shared_file_cache.get(file)
            else:
                print("unknown issue")
                return False
예제 #30
0
 def __init__(self):
     fixed_zone = open("fixed_zone/primary.txt").read() + open(
         "fixed_zone/tests.txt").read()
     self.fixedrrs = RR.fromZone(fixed_zone)
     self.active_transmissions = {}  # Dictionary of Transmission objects.
     # Their ID's are the keys, for easy/quick lookup.
     self.cache = ExpiringDict(max_len=100000, max_age_seconds=70)
     self.transmission_handler_lock = multiprocessing.Lock()