예제 #1
0
파일: adsb.py 프로젝트: cnelson/maho
    def __init__(self,
                 adsb_host='localhost',
                 adsb_port=30002,
                 max_aircraft=1000,
                 max_aircraft_age=60):
        """Connect to dump1090 TCP raw output

        Args:
            adsb_host (str): The hostname running dump1090
            adsb_port (int): The "TCP raw output" port

            max_aircraft (int, optional): The maxinum number of aircraft to cache in memory
            max_aircraft_age (int, optional): The maxinum number of seconds to cache
            an aircraft after receiving an ADS-B update

        Raises:
            IOError: Unable to connect to dump1090

        """

        self._adsbsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self._adsbsock.connect((adsb_host, adsb_port))

        # TODO: Expose these settiongs
        self._cache = ExpiringDict(max_len=max_aircraft,
                                   max_age_seconds=max_aircraft_age)
예제 #2
0
def test_expiring_dict_copy_from_expiring_dict_original_timeout_and_length():
    exp_dict_test = ExpiringDict(max_len=200000, max_age_seconds=1800)
    exp_dict_test['test'] = 1
    exp_dict_test2 = ExpiringDict(max_len=None, max_age_seconds=None, items=exp_dict_test)
    eq_(1, exp_dict_test2['test'])
    eq_(200000, exp_dict_test2.max_len)
    eq_(1800, exp_dict_test2.max_age)
예제 #3
0
class CachedRealmStatusProvider(RealmStatusProvider):
    max_age = attr.ib(type=int)
    cache = attr.ib(init=False)

    def __attrs_post_init__(self):
        self.cache = ExpiringDict(max_len=2, max_age_seconds=self.max_age)
        self.log = logging.getLogger(self.__class__.__name__)

    def getClassicRealmStatuses(self):
        self.log.info("Getting classic realm status")
        statuses = self.cache.get('classic', None)
        if statuses is None:
            self.log.info("Not cached. Fetching...")
            statuses = super().getClassicRealmStatuses()
            self.cache['classic'] = statuses
        else:
            self.log.info("Result cached!")
        self.log.debug(f"Got status {statuses}")
        return statuses
    
    def getVanillaRealmStatuses(self):
        statuses = self.cache.get('vanilla', None)
        if statuses is None:
            statuses = self.cache['vanilla'] = super().getVanillaRealmStatuses()
        return statuses
예제 #4
0
    def LoadFromURN(self):
        #volume_urn = self.resolver.Get(self.urn, lexicon.AFF4_STORED)
        #if not volume_urn:
        #    raise IOError("Unable to find storage for urn %s" % self.urn)

        self.lexicon = self.resolver.lexicon

        self.chunk_size = int(
            self.resolver.Get(self.urn, self.lexicon.chunkSize) or 32 * 1024)

        self.chunks_per_segment = int(
            self.resolver.Get(self.urn, self.lexicon.chunksPerSegment) or 1024)

        sz = self.resolver.Get(self.urn, self.lexicon.streamSize) or 0
        self.size = int(sz)

        self.compression = str(
            self.resolver.Get(self.urn, self.lexicon.compressionMethod)
            or lexicon.AFF4_IMAGE_COMPRESSION_ZLIB)

        # A buffer for overlapped writes which do not fit into a chunk.
        self.buffer = ""

        # Compressed chunks in the bevy.
        self.bevy = []

        # Length of all chunks in the bevy.
        self.bevy_length = 0

        # List of bevy offsets.
        self.bevy_index = []
        self.chunk_count_in_bevy = 0
        self.bevy_number = 0

        self.cache = ExpiringDict(max_len=1000, max_age_seconds=10)
예제 #5
0
파일: logger.py 프로젝트: dgsharpe/marvin
    def __init__(self, config):
        self.watch_flags = config.watch_flags
        self.expiring_file_event_dict = ExpiringDict(max_len=100000,
                                                     max_age_seconds=10)

        #Set logging formatting
        LOGGING_MSG_FORMAT = '[%(levelname)s] [%(asctime)s] : %(message)s'
        LOGGING_DATE_FORMAT = '%Y-%m-%d %H:%M:%S'

        logging.basicConfig(level=logging.DEBUG,
                            format=LOGGING_MSG_FORMAT,
                            datefmt=LOGGING_DATE_FORMAT)

        formatter = logging.Formatter(LOGGING_MSG_FORMAT)
        handler = logging.handlers.TimedRotatingFileHandler(
            filename=config.log_file_path, when="d", interval=1, backupCount=7)
        handler.setFormatter(formatter)
        self.logger = logging.getLogger()
        self.logger.addHandler(handler)

        #Set logging levels
        self.logger.setLevel(logging.INFO)
        logging.getLogger('schedule').setLevel(logging.WARNING)
        logging.getLogger('requests').setLevel(logging.WARNING)
        logging.getLogger('urllib3').setLevel(logging.WARNING)
예제 #6
0
 def reset_cache(self):
     MongoCache.cache = ExpiringDict(
         max_len=MongoCache.configuration.cache_max_elements(),
         max_age_seconds=MongoCache.configuration.cache_timeout())
     MongoCache.data_cache = ExpiringDict(
         max_len=MongoCache.configuration.data_cache_max_elements(),
         max_age_seconds=MongoCache.configuration.data_cache_timeout())
class LicenseDetector:
    def __init__(self, runtime_data='/usr/share/openalpr/runtime_data/'):
        self.runtime_data = runtime_data
        self.cache = ExpiringDict(max_len=100, max_age_seconds=5)
        self.init_alpr()

    def license_detect(self, image):

        results = self.alpr.recognize_ndarray(image)
        i = 0
        for plate in results['results']:
            for candidate in plate['candidates']:
                if 90 <= candidate['confidence']:
                    self.cache[candidate['plate']] = self.cache.get(
                        candidate['plate'], 0) + 1
        sort_orders = sorted(self.cache.items(),
                             key=lambda itm: itm[1],
                             reverse=True)
        return (sort_orders[0][0] if sort_orders else None)

    def init_alpr(self):
        self.alpr = Alpr("us", "/etc/openalpr/openalpr.conf",
                         self.runtime_data)
        if not self.alpr.is_loaded():
            print("Error loading OpenALPR")
            sys.exit(1)

        self.alpr.set_top_n(20)
        self.alpr.set_default_region("md")
예제 #8
0
    def __init__(self,
                 data_store=None,
                 format_parser=JsonFormParser(),
                 initial_load=True,
                 max_age_seconds=60):
        """
        :param format_parser: A custom parsers to convert the database entry into a FlaskForm. Has to inherit from
        the ParserAdapterInterface class.
        :param data_store: A custom database adapter. Has to be an inherit from DbAdapterInterface
        :param initial_load: If all forms should be loaded
        :param max_age_seconds: Expiration time of cached forms

        """

        if not isinstance(data_store, IDataStore):
            raise FormManagerException(
                f"{data_store.__class__.__name__} has to be a subclass of"
                f"{IDataStore.__class__.__name__}")

        if not isinstance(format_parser, IFormParser):
            raise FormManagerException(
                f"{format_parser.__class__.__name__} has to be a subclass of f"
                f"{IFormParser.__name__}")

        self._data_store = data_store
        self._parser = format_parser

        self.form_cache = ExpiringDict(max_len=100,
                                       max_age_seconds=max_age_seconds)
        if initial_load:
            self._fetch_forms()
예제 #9
0
 def __init__(self):
     log.debug("[Startup]: Initializing YouTube Module . . .")
     self.mongo = mongo.Mongo()
     self.queue = Queue()
     self.cache = ExpiringDict(max_age_seconds=10800, max_len=1000)
     self.search_cache = dict()
     self.session = aiohttp.ClientSession()
예제 #10
0
 def __init__(self, **connect_args):
     self.connect_args = connect_args
     self._cnt = count()
     api = connect(**connect_args)
     self.connections = ExpiringDict(max_len=MAX_CONN_PER_HOST,
                                     max_age_seconds=CONN_TIMEOUT,
                                     items={self._id: api})
예제 #11
0
    def __init__(
        self,
        bootstrap_servers=None,
        configs=None,
        topic=None,
        kafka_loglevel=logging.WARNING,
    ):
        set_kafka_loglevel(kafka_loglevel)
        self.logger = logging.getLogger(self.__class__.__name__)
        self.lock = threading.Lock()

        self.topic: Optional[str] = topic
        configs = {} if configs is None else configs
        self.configs: dict = configs
        self.producers: Dict[str, KafkaProducer] = {}
        self.fail_pass = ExpiringDict(max_len=10000, max_age_seconds=60)
        self.not_exist_topics = ExpiringDict(max_len=10000, max_age_seconds=60)

        bs = configs.pop("bootstrap_servers", None)
        if not bootstrap_servers:
            bootstrap_servers = bs

        if bootstrap_servers:
            producer = self.get_producer(bootstrap_servers)
            if producer is None:
                raise Exception("can not init default producer")
            self.producers[DEFAULT_FLAG] = producer
        else:
            self.logger.warning("no default kafka producer")
예제 #12
0
    def __init__(
        self,
        aws_access_key_id=None,
        aws_secret_access_key=None,
        region=os.environ.get("AWS_REGION", "us-west-2"),
        cache=None,
        cache_timeout=3600,
        **kwargs,
    ):

        # Open a session with boto3
        self.session = boto3.session.Session(
            region_name=region,
            aws_access_key_id=aws_access_key_id,
            aws_secret_access_key=aws_secret_access_key)
        # Open a connection to the AWS S3 bucket
        self.client = self.session.client("s3")

        # If there is no redis cache available
        self.cache_timeout = cache_timeout
        if cache is None:
            # Set up an expiring cache
            self.cache = ExpiringDict(max_len=100,
                                      max_age_seconds=self.cache_timeout)
            self.using_redis = False
        else:
            # Otherwise attach the redis cache to this object
            self.cache = cache
            self.using_redis = True
예제 #13
0
    def __init__(self,
                 listen=15,
                 f_local=False,
                 default_hook=None,
                 object_hook=None):
        assert V.DATA_PATH is not None, 'Setup p2p params before PeerClientClass init.'

        # object control params
        self.f_stop = False
        self.f_finish = False
        self.f_running = False

        # co-objects
        self.core = Core(host='localhost' if f_local else None, listen=listen)
        self.peers = PeerData(os.path.join(
            V.DATA_PATH, 'peer.dat'))  # {(host, port): header,..}
        self.event = EventIgnition()  # DirectCmdを受け付ける窓口

        # data status control
        self.broadcast_status: Dict[int, asyncio.Future] = ExpiringDict(
            max_len=5000, max_age_seconds=90)
        self.result_futures: Dict[int, asyncio.Future] = ExpiringDict(
            max_len=5000, max_age_seconds=90)

        # recode traffic if f_debug true
        if Debug.F_RECODE_TRAFFIC:
            self.core.traffic.recode_dir = V.DATA_PATH

        # serializer/deserializer hook
        self.default_hook = default_hook
        self.object_hook = object_hook
예제 #14
0
def test_expiring_dict_copy_from_expiring_dict_new_timeout_and_length():
    exp_dict_test = ExpiringDict(max_len=200000, max_age_seconds=1800)
    exp_dict_test['test'] = 1
    exp_dict_test2 = ExpiringDict(max_len=100000, max_age_seconds=900, items=exp_dict_test)
    eq_(1, exp_dict_test2['test'])
    eq_(100000, exp_dict_test2.max_len)
    eq_(900, exp_dict_test2.max_age)
예제 #15
0
    def __init__(self, app) -> None:
        self.authed_users = ExpiringDict(max_len=50, max_age_seconds=3600)

        app.user_handler = self
        self.unauthed_user = UnauthedUser()

        self.app = app
        self.app.context_processor(self.template_context)
예제 #16
0
 def __init__(self, reddit, db_collection, predictor):
     self.reddit = reddit
     self.collection = db_collection
     self.predictor = predictor
     self.subreddit = self.reddit.subreddit('risingthreads')
     self.user_cache = ExpiringDict(max_len=1000000, max_age_seconds=86400)
     self.subreddit_cache = ExpiringDict(max_len=1000000,
                                         max_age_seconds=86400)
예제 #17
0
class APIAuthWrapper():
    def __init__(self):
        max_age = timedelta(minutes=10)
        self.cache = ExpiringDict(max_len=100, max_age_seconds=max_age.seconds)

    def auth_service(self):
        return current_app.config["API_AUTH_SERVICE"]

    def check_auth(self, username, password):
        """This function is called to check if a username /
        password combination is valid.
        """
        res = self.cache.get((username, password))
        if not res:
            res = self.auth_service().check_auth(username, password)
            self.cache[(username, password)] = res
        return res

    def check_token(self, token):
        """This function is called to check if a token is valid.
        """
        res = self.cache.get(token)
        if not res:
            res = self.auth_service().check_token(token)
            self.cache[token] = res
        return res

    def need_authentication_response(self):
        """Sends a 401 response that enables basic auth"""
        return Response(
            'Could not verify your access level for that URL.\n'
            'You have to login with proper credentials', 401,
            {'WWW-Authenticate': 'Basic realm="Login Required"'})

    def error_response(self, error):
        """Sends a 500 response that indicates server error"""
        return Response("Error checking auth: %s" % error, 500)

    def requires_auth(self, f):
        @wraps(f)
        def decorated(*args, **kwargs):

            if request.headers.get('X-SenseiToken'):
                res = self.check_token(request.headers.get('X-SenseiToken'))
            else:
                auth = request.authorization
                if not auth:
                    return self.need_authentication_response()
                res = self.check_auth(auth.username, auth.password)

            if res.authenticated:
                g.user = User(res.userinfo)
                return f(*args, **kwargs)

            return Response("Unauthorized", 401)

        return decorated
예제 #18
0
    def __init__(self, clockchain, networker):

        self.clockchain = clockchain
        self.networker = networker

        # cache to avoid processing duplicate json forwards
        self.duplicate_cache = ExpiringDict(
            max_len=config['expiring_dict_max_len'],
            max_age_seconds=config['expiring_dict_max_age'])
 def __init__(self, db: Session, model_cache_size: int, cache_ttl: int):
     self.db: Session = db
     with OpenPredictionService.lock:
         if OpenPredictionService.MODEL_CACHE is None:
             OpenPredictionService.MODEL_CACHE = ExpiringDict(
                 max_len=model_cache_size, max_age_seconds=cache_ttl)
         if OpenPredictionService.MODEL_CONFIGS_CACHE is None:
             OpenPredictionService.MODEL_CONFIGS_CACHE = ExpiringDict(
                 max_len=1, max_age_seconds=cache_ttl)
예제 #20
0
 def __init__(self, **kwargs):
     self.local = ExpiringDict(max_len=100, max_age_seconds=expiration)
     self.project_name = kwargs.get('project', DEFAULT_PROJECT_NAME)
     self.bucket_name = kwargs.get('name', DEFAULT_BUCKET_NAME)
     self.creds_path = kwargs.get('creds_path', DEFAULT_CREDS_PATH)
     credentials = service_account.Credentials.from_service_account_file(
         self.creds_path)
     self.client = storage.Client(self.project_name, credentials)
     self.bucket = self.client.get_bucket(self.bucket_name)
예제 #21
0
def test_setdefault():
    d = ExpiringDict(max_len=10, max_age_seconds=0.01)

    eq_('x', d.setdefault('a', 'x'))
    eq_('x', d.setdefault('a', 'y'))

    sleep(0.01)

    eq_('y', d.setdefault('a', 'y'))
예제 #22
0
    def __init__(self, db_path = DEFAULT_DB_PATH, max_cache_len=1000, max_cache_age=30):
        self.db_path = db_path
        self.cache = ExpiringDict(max_len=max_cache_len, max_age_seconds=max_cache_age)

        # Make sure to do data synchronization on writes!
        self.c = sqlite3.connect(db_path, check_same_thread=False)
        self.c.execute('''CREATE TABLE IF NOT EXISTS api_keys (
            "api_key"	TEXT NOT NULL,
            "req_limit"	INTEGER NOT NULL,
            PRIMARY KEY("api_key")
        );''')
예제 #23
0
def test_iter():
    d = ExpiringDict(max_len=10, max_age_seconds=0.01)
    eq_([k for k in d], [])
    d['a'] = 'x'
    d['b'] = 'y'
    d['c'] = 'z'
    eq_([k for k in d], ['a', 'b', 'c'])

    eq_([k for k in d.values()], ['x', 'y', 'z'])
    sleep(0.01)
    eq_([k for k in d.values()], [])
예제 #24
0
 def __init__(self, config, client_id: str, client_secret: str, token_path: str, cache_path: str):
     self.cfg = config
     self.client_id = client_id
     self.client_secret = client_secret
     self.token_path = token_path
     self.cache_path = cache_path
     self.cache = SqliteDict(self.cache_path, tablename='cache', encode=json.dumps, decode=json.loads,
                             autocommit=False)
     self.transcodes_cache = ExpiringDict(max_len=5000, max_age_seconds=2 * (60 * 60))
     self.token = self._load_token()
     self.token_refresh_lock = Lock()
     self.http = self._new_http_object()
예제 #25
0
    def __init__(self):
        self.disabled = False

        api_key = env_cfg("BINANCE_KEY")
        api_secret = env_cfg('BINANCE_SECRET')

        if api_key is None or api_secret is None:
            self.disabled = True
            log.info("Disable BINANCE because either api_key or api_secret not available")
        else:
            self.client = Client(api_key, api_secret)
            self.cache = ExpiringDict(max_len=200, max_age_seconds=60)
예제 #26
0
    def LoadFromURN(self):
        volume_urn = self.resolver.GetUnique(lexicon.transient_graph, self.urn,
                                             lexicon.AFF4_STORED)
        #if not volume_urn:
        #    raise IOError("Unable to find storage for urn %s" % self.urn)

        appendMode = self.resolver.GetUnique(lexicon.transient_graph, self.urn,
                                             lexicon.AFF4_STREAM_WRITE_MODE)
        if appendMode != None and str(appendMode) in [
                "truncate", "append", "random"
        ]:
            self.properties.writable = True

        self.lexicon = self.resolver.lexicon

        self.chunk_size = int(
            self.resolver.GetUnique(volume_urn, self.urn,
                                    self.lexicon.chunkSize) or 32 * 1024)

        self.chunks_per_segment = int(
            self.resolver.GetUnique(volume_urn, self.urn,
                                    self.lexicon.chunksPerSegment) or 1024)

        sz = self.resolver.GetUnique(volume_urn, self.urn,
                                     self.lexicon.streamSize) or 0
        self.size = int(sz)

        self.compression = (self.resolver.GetUnique(
            volume_urn, self.urn, self.lexicon.compressionMethod)
                            or lexicon.AFF4_IMAGE_COMPRESSION_ZLIB)

        # A buffer for overlapped writes which do not fit into a chunk.
        self.buffer = b""

        # Compressed chunks in the bevy.
        self.bevy = []

        # Length of all chunks in the bevy.
        self.bevy_length = 0

        # List of (bevy offsets, compressed chunk length).
        self.bevy_index = []
        self.chunk_count_in_bevy = 0
        self.bevy_number = 0

        self.cache = ExpiringDict(max_len=1000, max_age_seconds=10)

        # used for identifying in-place writes to bevys
        self.bevy_is_loaded_from_disk = False

        # used for identifying if a bevy now exceeds its initial size
        self.bevy_size_has_changed = False
예제 #27
0
    def __init__(self, core: FeatureStoreCore, cache_config: CacheConfig):
        """Constructs an instance by wrapping a core implementation object.

        :param core: the implementation object
        :param cache_config: the caching parameters
        """
        self._core = core
        if cache_config.enabled:
            self._cache = ExpiringDict(max_len=cache_config.capacity,
                                       max_age_seconds=cache_config.expiration)
        else:
            self._cache = None
        self._inited = False
예제 #28
0
    def __init__(self, username, token, channel_id):
        self.user_id = username
        self.token = token.removeprefix("oauth:")
        self.irc_channel = "#" + channel_id.lower()
        self.channel_id = channel_id
        self.serialized_data_dir = "data"
        self.serialized_data_filename = os.path.join(self.serialized_data_dir,
                                                     f"{self.channel_id}.bin")
        self.push_trend_cache = ExpiringDict(max_len=100,
                                             max_age_seconds=TREND_EXPIRE_SEC)

        self.dizzy_users = []
        self.dizzy_start_ts = 0
        self.dizzy_ban_end_ts = 0
        self.ban_targets = []

        # self.api_client = TwitchAPIClient(self.channel_id, client_id)

        # Create IRC bot connection
        logging.info(f"Connecting to {SERVER} on port {PORT}...")
        irc.bot.SingleServerIRCBot.__init__(
            self, [(SERVER, PORT, "oauth:" + self.token)], username, username)
        # TODO: dynamically determine
        self.trend_threshold = 3

        self.gbf_code_re = re.compile(r"[A-Z0-9]{8}")

        # setup scheduler
        self.reactor.scheduler.execute_every(1, self.dizzy)
        # self.reactor.scheduler.execute_every(5 * 60, self.insert_all)
        # self.reactor.scheduler.execute_every(60 * 60, self.share_clip)

        # load data in disk
        try:
            with open(self.serialized_data_filename, "rb") as f:
                self.data = dill.loads(f.read())
                if "gbf_room_num" not in self.data:
                    self.data["gbf_room_num"] = 0
                if "gbf_room_id_cache" not in self.data:
                    self.data["gbf_room_id_cache"] = ExpiringDict(
                        max_len=1, max_age_seconds=600)
        except FileNotFoundError:
            self.data = {
                "gbf_room_num": 0,
                "gbf_room_id_cache": ExpiringDict(max_len=1,
                                                  max_age_seconds=600),
            }

        # register signal handler
        # https://stackoverflow.com/questions/1112343/how-do-i-capture-sigint-in-python
        signal.signal(signal.SIGINT, handler=self.save_data)
예제 #29
0
def test_ttl():
    d = ExpiringDict(max_len=10, max_age_seconds=10)
    d['a'] = 'x'

    # existent non-expired key
    ok_(0 < d.ttl('a') < 10)

    # non-existent key
    eq_(None, d.ttl('b'))

    # expired key
    with patch.object(ExpiringDict, '__getitem__',
                      Mock(return_value=('x', 10**9))):
        eq_(None, d.ttl('a'))
예제 #30
0
    def __init__(self, bot):
        self.bot = bot

        # cooldown to monitor if too many users join in a short period of time (more than 10 within 8 seconds)
        self.join_raid_detection_threshold = commands.CooldownMapping.from_cooldown(
            rate=10, per=8, type=commands.BucketType.guild)
        # cooldown to monitor if users are spamming a message (8 within 5 seconds)
        self.message_spam_detection_threshold = commands.CooldownMapping.from_cooldown(
            rate=7, per=10.0, type=commands.BucketType.member)
        # cooldown to monitor if too many accounts created on the same date are joining within a short period of time
        # (5 accounts created on the same date joining within 45 minutes of each other)
        self.join_overtime_raid_detection_threshold = commands.CooldownMapping.from_cooldown(
            rate=4, per=2700, type=MessageTextBucket.custom)

        # cooldown to monitor how many times AntiRaid has been triggered (5 triggers per 15 seconds puts server in lockdown)
        self.raid_detection_threshold = commands.CooldownMapping.from_cooldown(
            rate=4, per=15.0, type=commands.BucketType.guild)
        # cooldown to only send one raid alert for moderators per 10 minutes
        self.raid_alert_cooldown = commands.CooldownMapping.from_cooldown(
            1, 600.0, commands.BucketType.guild)

        # stores the users that trigger self.join_raid_detection_threshold so we can ban them
        self.join_user_mapping = ExpiringDict(max_len=100, max_age_seconds=10)
        # stores the users that trigger self.message_spam_detection_threshold so we can ban them
        self.spam_user_mapping = ExpiringDict(max_len=100, max_age_seconds=10)
        # stores the users that trigger self.join_overtime_raid_detection_threshold so we can ban them
        self.join_overtime_mapping = ExpiringDict(max_len=100,
                                                  max_age_seconds=2700)
        # stores the users that we have banned so we don't try to ban them repeatedly
        self.ban_user_mapping = ExpiringDict(max_len=100, max_age_seconds=120)

        # locks to prevent race conditions when banning concurrently
        self.join_overtime_lock = Lock()
        self.banning_lock = Lock()
예제 #31
0
 def __init__(self, settings=None, prefix=None, parent=None):
     """
     Initialize lazy settings instance.
     """
     self._settings = settings or AVAILABLE_SETTINGS
     self._parent = parent
     self._prefix = prefix
     self._cache = ExpiringDict(max_len=20,
             max_age_seconds=getattr(django_settings,
                                     'SETMAN_INTERNAL_CACHE_TIMEOUT', 30))
    def __init__(self, core, cache_config):
        """Constructs an instance by wrapping a core implementation object.

        :param FeatureStoreCore core: the implementation object
        :param ldclient.feature_store.CacheConfig cache_config: the caching parameters
        """
        self._core = core
        if cache_config.enabled:
            self._cache = ExpiringDict(max_len=cache_config.capacity, max_age_seconds=cache_config.expiration)
        else:
            self._cache = None
        self._inited = False
예제 #33
0
 def __init__(self, **kwargs):
     cfg_defaults= { 'mysql_user' : None,
                     'mysql_pass' : None,
                     'mysql_db' : None,
                     'mysql_host' : None }
     self.c = AttrDict()
     for k in cfg_defaults.keys():
         if kwargs.has_key(k):
             self.c[k]=kwargs[k]
         elif cfg_defaults[k]:
             self.c[k]=cfg_defaults[k]
             log.info("przyjęto domyślne ustawienie dla %s"%k)
         else:
             raise Exception("ustawienia niekompletne, brak %s"%k)
     self.c.db = connect(self.c.mysql_host, 
                         self.c.mysql_user, 
                         self.c.mysql_pass,
                         self.c.mysql_db )
     self.c.db.autocommit(False)
     self.c.cur = self.c.db.cursor()
     self.recent_act = ExpiringDict(256,300)
예제 #34
0
 def __init__(self, max_len, max_age_seconds, callback=None):
     ExpiringDict.__init__(self, max_len, max_age_seconds)
     self.callback = callback
예제 #35
0
class LazySettings(object):
    """
    Simple proxy object that accessed database only when user needs to read
    some setting.
    """

    def __init__(self, settings=None, prefix=None, parent=None):
        """
        Initialize lazy settings instance.
        """
        self._settings = settings or AVAILABLE_SETTINGS
        self._parent = parent
        self._prefix = prefix
        self._cache = ExpiringDict(max_len=20,
                max_age_seconds=getattr(django_settings,
                                        'SETMAN_INTERNAL_CACHE_TIMEOUT', 30))

    def __delattr__(self, name):
        if name.startswith('_'):
            return self._safe_super_method('__delattr__', name)

        if hasattr(django_settings, name):
            delattr(django_settings, name)
        else:
            custom = self._custom
            delattr(custom, name)
            custom.save()
            cache.delete(CACHE_KEY)

    def _get_setting_value(self, name):
        data, prefix = self._custom.data, self._prefix

        # Read app setting from database
        if prefix and prefix in data and name in data[prefix]:
            return data[prefix][name]
        # Read project setting from database
        elif name in data and not isinstance(data[name], dict):
            return data[name]
        # Or from Django settings
        elif hasattr(django_settings, name):
            return getattr(django_settings, name)
        # Or read default value from available settings
        elif hasattr(self._settings, name):
            mixed = getattr(self._settings, name)

            if is_settings_container(mixed):
                return LazySettings(mixed, name, self)

            return mixed.default

        # If cannot read setting - raise error
        raise AttributeError('Settings has not attribute %r' % name)

    def __getattr__(self, name):
        """
        Add support for getting settings keys as instance attribute.

        For first try, method tries to read settings from database, then from
        Django settings and if all fails try to return default value of
        available setting from configuration definition file if any.
        """
        if name.startswith('_'):
            return self._safe_super_method('__getattr__', name)

        from_cache = self._cache.get(name, None)

        if from_cache is None:
            value = self._get_setting_value(name)
            self._cache[name] = value
            return value
        else:
            return from_cache

    def __setattr__(self, name, value):
        """
        Add support of setting values to settings as instance attribute.
        """
        if name.startswith('_'):
            return self._safe_super_method('__setattr__', name, value)

        # First of all try to setup value to Django setting
        if hasattr(django_settings, name):
            setattr(django_settings, name, value)
        # Then setup value to project setting
        elif not self._prefix:
            custom = self._custom
            setattr(custom, name, value)
            custom.save()
            cache.delete(CACHE_KEY)
        # And finally setup value to app setting
        else:
            custom = self._custom
            data, prefix = custom.data, self._prefix

            if not prefix in data:
                data[prefix] = {}

            data[prefix].update({name: value})
            custom.save()
            cache.delete(CACHE_KEY)

    def revert(self):
        """
        Revert settings to default values.
        """
        self._custom.revert()

    def save(self):
        """
        Save customized settings to the database.
        """
        self._custom.save()

    def _clear(self):
        """
        Clear custom settings cache.
        """
        if CACHE_KEY in cache:
            cache.delete(CACHE_KEY)

    #@threaded_cached_property_with_ttl(
    #    ttl=getattr(django_settings, 'SETMAN_PROPERTY_CACHE_TIMEOUT', 30))
    @property
    def _custom(self):
        """
        Read custom settings from database and store it to the instance cache.
        """
        if self._parent:
            return self._parent._custom

        from_cache = cache.get(CACHE_KEY)
        if not from_cache:
            custom = self._get_custom_settings()
            cache.set(CACHE_KEY, custom)
            return custom

        return from_cache

    def _get_custom_settings(self):
        """
        Do not read any settings before post_syncdb signal is called.
        """
        try:
            return Settings.objects.get()
        except Settings.DoesNotExist:
            return Settings.objects.create(data={})

    def _safe_super_method(self, method, *args, **kwargs):
        """
        Execute super ``method`` and format fancy error message on
        ``AttributeError``.
        """
        klass = self.__class__

        try:
            method = getattr(super(klass, self), method)
        except AttributeError:
            args = (
                klass.__name__,
                args[0] if method.endswith('attr__') else method
            )
            raise AttributeError('%r object has no attribute %r' % args)
        else:
            return method(*args, **kwargs)
class CachingStoreWrapper(FeatureStore):
    """A partial implementation of :class:`ldclient.interfaces.FeatureStore`.

    This class delegates the basic functionality to an implementation of
    :class:`ldclient.interfaces.FeatureStoreCore` - while adding optional caching behavior and other logic
    that would otherwise be repeated in every feature store implementation. This makes it easier to create
    new database integrations by implementing only the database-specific logic. 
    """
    __INITED_CACHE_KEY__ = "$inited"

    def __init__(self, core, cache_config):
        """Constructs an instance by wrapping a core implementation object.

        :param FeatureStoreCore core: the implementation object
        :param ldclient.feature_store.CacheConfig cache_config: the caching parameters
        """
        self._core = core
        if cache_config.enabled:
            self._cache = ExpiringDict(max_len=cache_config.capacity, max_age_seconds=cache_config.expiration)
        else:
            self._cache = None
        self._inited = False

    def init(self, all_data):
        """
        """
        self._core.init_internal(all_data)
        if self._cache is not None:
            self._cache.clear()
            for kind, items in all_data.items():
                self._cache[self._all_cache_key(kind)] = self._items_if_not_deleted(items)
                for key, item in items.items():
                    self._cache[self._item_cache_key(kind, key)] = [item] # note array wrapper
        self._inited = True

    def get(self, kind, key, callback=lambda x: x):
        """
        """
        if self._cache is not None:
            cache_key = self._item_cache_key(kind, key)
            cached_item = self._cache.get(cache_key)
            # note, cached items are wrapped in an array so we can cache None values
            if cached_item is not None:
                return callback(self._item_if_not_deleted(cached_item[0]))
        item = self._core.get_internal(kind, key)
        if self._cache is not None:
            self._cache[cache_key] = [item]
        return callback(self._item_if_not_deleted(item))

    def all(self, kind, callback=lambda x: x):
        """
        """
        if self._cache is not None:
            cache_key = self._all_cache_key(kind)
            cached_items = self._cache.get(cache_key)
            if cached_items is not None:
                return callback(cached_items)
        items = self._items_if_not_deleted(self._core.get_all_internal(kind))
        if self._cache is not None:
            self._cache[cache_key] = items
        return callback(items)
    
    def delete(self, kind, key, version):
        """
        """
        deleted_item = { "key": key, "version": version, "deleted": True }
        self.upsert(kind, deleted_item)

    def upsert(self, kind, item):
        """
        """
        new_state = self._core.upsert_internal(kind, item)
        if self._cache is not None:
            self._cache[self._item_cache_key(kind, item.get('key'))] = [new_state]
            self._cache.pop(self._all_cache_key(kind), None)

    @property
    def initialized(self):
        """
        """
        if self._inited:
            return True
        if self._cache is None:
            result = bool(self._core.initialized_internal())
        else:
            result = self._cache.get(CachingStoreWrapper.__INITED_CACHE_KEY__)
            if result is None:
                result = bool(self._core.initialized_internal())
                self._cache[CachingStoreWrapper.__INITED_CACHE_KEY__] = result
        if result:
            self._inited = True
        return result

    @staticmethod
    def _item_cache_key(kind, key):
        return "{0}:{1}".format(kind.namespace, key)

    @staticmethod
    def _all_cache_key(kind):
        return kind.namespace
    
    @staticmethod
    def _item_if_not_deleted(item):
        if item is not None and item.get('deleted', False):
            return None
        return item
    
    @staticmethod
    def _items_if_not_deleted(items):
        results = {}
        if items is not None:
            for key, item in items.items():
                if not item.get('deleted', False):
                    results[key] = item
        return results
예제 #37
0
class SGD(object):
    """ SzpieGwiazdor """    

    def __init__(self, **kwargs):
        cfg_defaults= { 'mysql_user' : None,
                        'mysql_pass' : None,
                        'mysql_db' : None,
                        'mysql_host' : None }
        self.c = AttrDict()
        for k in cfg_defaults.keys():
            if kwargs.has_key(k):
                self.c[k]=kwargs[k]
            elif cfg_defaults[k]:
                self.c[k]=cfg_defaults[k]
                log.info("przyjęto domyślne ustawienie dla %s"%k)
            else:
                raise Exception("ustawienia niekompletne, brak %s"%k)
        self.c.db = connect(self.c.mysql_host, 
                            self.c.mysql_user, 
                            self.c.mysql_pass,
                            self.c.mysql_db )
        self.c.db.autocommit(False)
        self.c.cur = self.c.db.cursor()
        self.recent_act = ExpiringDict(256,300)


    def _amf_raw_response_parser(self, rawamf):
        """ przetwarza surową odpowiedź AMF (iteruje po zawartych wiadomościach
            (message), wybiera tą o numerze 1061 (trade update), zwraca body pierwszej
            pasującej lub None) """
        decresp = amfdec.decode_packet( rawamf )
        for m in decresp.messages:
            if m.body.body.has_key('type') and int(m.body.body['type'])== 1001 \
            and m.body.body.has_key('data') and m.body.body['data'].has_key('data')\
            and isinstance(m.body.body['data']['data'], dict):
                    if m.body.body['data']['errorCode']==0:
                        return m.body.body
                    else:
                        errcode = m.body.body['data']['errorCode']
                        log.error('amf error, code:%s') %errcode
        return None
            
          
    def _pom_remove_if_exists(self, pID, pN):
        sql = """DELETE FROM gwiazda WHERE playerID = %d AND playerName = '%s';"""%(pID, pN)
        self.c.cur.execute(sql)
        log.info('gracz/ID %s/%d: usunięto %d pozycji'%(pN, pID, self.c.cur.rowcount))
        self.c.db.commit()
        
        
    def _pom_generate_objects(self, pID, pN, buffsvector):
        return [self.StarItem( pID, pN, **pd ) for pd in buffsvector]
        
    def _pom_insert_star_list(self, pID, pN, star_menu):
        sqllines = [b._sql_ins_data for b in star_menu]
        query= """  INSERT IGNORE INTO gwiazda
                  ( playerID,          playerName,            amount,
                    buffName_string,   resourceName_string,   kiedyZlapanoTS  )  
                    VALUES ( %s,%s,%s,%s,%s,%s ) """ 
        self.c.cur.executemany(query, sqllines)
        log.info('gracz/ID %s/%d: dodano %d pozycji'%(pN, pID, self.c.cur.rowcount))
        self.c.db.commit()

    def _incoming_traffic_handler(self, response):
        """ logika postępowania z przechwyconym ruchem (1001 response) """
        resp_1001= self._amf_raw_response_parser(response)
        pom      = nested_lookup('playersOnMap', resp_1001)
 
        for pd in pom[0]:
            bv = pd.get('availableBuffs_vector')
            if bv:
                pID = pd['userID']
                pN  = pd['username_string']
                recent_key = ":".join([str(pID),pN])
                ittl= self.recent_act.ttl( recent_key )
                if ittl:
                    log.info('gracz/ID %s/%d: ignorowanie jeszcze %d sekund'%(pN, pID, ittl))
                    return None
                else:
                    adventure_cntr = dict(Counter(b['resourceName_string'] for b in [a for a in bv if a['buffName_string'] == "Adventure"]))
                    adventure_list = [self.StarItem(pID, pN, "Adventure", i[0], i[1]) for i in adventure_cntr.iteritems()]
                    otheritms_list = [self.StarItem(pID, pN, i['resourceName_string'],i['buffName_string'], i['amount']) for i in bv if i['buffName_string'] <> "Adventure"]
                    star_menu = adventure_list + otheritms_list
                    self._pom_remove_if_exists(pID, pN)
                    self._pom_insert_star_list(pID, pN, star_menu)
                    log.info('gracz/ID %s/%d: zapisano menu gwiazdy, będzie ignorowany kolejne %d sekund'%(pN, pID, self.recent_act.max_age))
                    self.recent_act[ recent_key ] = True
###############################################################################
    class StarItem(object):
        """ pozycja w menu gwiazdy """
        def __init__(self, playerID, playerName, resourceName_string, \
                     buffName_string, amount ):
            self.playerID       = playerID
            self.playerName     = playerName
            self.resourceName_string= resourceName_string
            self.buffName_string= buffName_string
            self.amount         = amount
            self.kiedyZlapanoTS = time().__int__()
            
        def __eq__(self, o):
            return self.playerID == o.playerID \
               and self.playerName == o.playerName \
               and self.resourceName_string == o.resourceName_string \
               and self.buffName_string == o.buffName_string \
               and self.amount == o.amount

        @property
        def _sql_ins_data(self):
            """ wypluwa sql insert values:
                (  playerID,          playerName,     resourceName_string,
                   buffName_string,   amount,         kiedyZlapanoTS  )
            """                
            return  self.playerID,              self.playerName,        \
                    self.amount,                self.buffName_string,   \
                    self.resourceName_string,   self.kiedyZlapanoTS