def _store_page( pipeline: redis.client.Pipeline, date: str, expire: int, url: urllib.parse.ParseResult, ) -> None: key: str = f"stats:page:{date}" pipeline.zincrby(key, 1, url.path) pipeline.expireat(key, expire)
def insert_metric(self, site_id: int, value: float, unit: MetricUnit, time: datetime.datetime, pipeline: redis.client.Pipeline): """Insert a specific metric.""" metric_key = self.key_schema.day_metric_key(site_id, unit, time) minute_of_day = self._get_day_minute(time) # START Challenge #2 pipeline.zadd(metric_key, {f"{value}:{minute_of_day}": minute_of_day}) pipeline.expire(metric_key, METRIC_EXPIRATION_SECONDS)
def pop_transaction(self, pipe: redis.client.Pipeline, version): free_key, lease_key = self.generate_free_key( version=version), self.generate_lease_key(version=version) [(member, score)] = pipe.zrevrange(free_key, 0, 0, withscores=True) member = member.decode() pipe.multi() pipe.zincrby(free_key, value=member, amount=-1) pipe.zincrby(lease_key, value=member, amount=1) return member
def _insert(self, meter_reading: MeterReading, pipeline: redis.client.Pipeline) -> None: """Helper method to insert a meter reading.""" # START Challenge #6 global_key = self.key_schema.global_feed_key() site_key = self.key_schema.feed_key(meter_reading.site_id) pipeline.xadd(global_key, MeterReadingSchema().dump(meter_reading), maxlen=self.GLOBAL_MAX_FEED_LENGTH) pipeline.xadd(site_key, MeterReadingSchema().dump(meter_reading), maxlen=self.SITE_MAX_FEED_LENGTH)
def _insert(self, meter_reading: MeterReading, pipeline: redis.client.Pipeline) -> None: """Helper method to insert a meter reading.""" reading_data = MeterReadingSchema().dump(meter_reading) pipeline.xadd(self.key_schema.global_feed_key(), reading_data, maxlen=self.SITE_MAX_FEED_LENGTH) pipeline.xadd(self.key_schema.feed_key(reading_data.get('site_id')), reading_data, maxlen=self.GLOBAL_MAX_FEED_LENGTH)
def _insert(self, meter_reading: MeterReading, pipeline: redis.client.Pipeline) -> None: """Helper method to insert a meter reading.""" # START Challenge #6 field_values = MeterReadingSchema().dump(meter_reading) pipeline.xadd(self.key_schema.global_feed_key(), field_values, maxlen=FeedDaoRedis.GLOBAL_MAX_FEED_LENGTH) pipeline.xadd(self.key_schema.feed_key(meter_reading.site_id), field_values, maxlen=FeedDaoRedis.SITE_MAX_FEED_LENGTH)
def insert_metric(self, site_id: int, value: float, unit: MetricUnit, time: datetime.datetime, pipeline: redis.client.Pipeline): """Insert a specific metric.""" metric_key = self.key_schema.day_metric_key(site_id, unit, time) minute_of_day = self._get_day_minute(time) pipeline.zadd( metric_key, {str(MeasurementMinute(value, minute_of_day)): minute_of_day}) pipeline.expire(metric_key, METRIC_EXPIRATION_SECONDS)
def insert_metric(self, site_id: int, value: float, unit: MetricUnit, time: datetime.datetime, pipeline: redis.client.Pipeline): """Insert a specific metric.""" metric_key = self.key_schema.day_metric_key(site_id, unit, time) # pylint: disable=unused-variable minute_of_day = self._get_day_minute(time) # pylint: disable=unused-variable # START Challenge #2 key = self.key_schema.day_metric_key(site_id, unit, time) member_str = str(value) + ":" + str(minute_of_day) pipeline.zadd(key, mapping={member_str: minute_of_day}) pipeline.expire(key, METRIC_EXPIRATION_SECONDS)
def _store_referrer(pipeline: redis.client.Pipeline, date: str, expire: int) -> None: referrer: Optional[str] = flask.request.referrer if referrer is not None: try: referrer_url: urllib.parse.ParseResult = urllib.parse.urlparse( flask.request.url) except: pass else: key: str = f"stats:referrers:{date}" pipeline.zincrby(key, 1, referrer_url.netloc) pipeline.expireat(key, expire)
def _insert(self, meter_reading: MeterReading, pipeline: redis.client.Pipeline) -> None: """Helper method to insert a meter reading.""" # START Challenge #6 my_dict = { "site_id": meter_reading.site_id, "wh_used": meter_reading.wh_used, "wh_generated": meter_reading.wh_generated, "temp_c": meter_reading.temp_c, "timestamp": meter_reading.timestamp.timestamp(), } pipeline.xadd(self.key_schema.global_feed_key(), fields=my_dict) pipeline.xadd(self.key_schema.feed_key(meter_reading.site_id), fields=my_dict)
def _add_match_item_generator_to_redis( cls, match_item_generator: ABCMatchItemGenerator, redis_pipeline: redis.client.Pipeline, category_key: str, prefix: str, ) -> None: redis_pipeline.sadd( category_key, match_item_generator.name, ) cls._add_match_items_to_redis( match_item_generator, redis_pipeline, prefix, )
def _insert(self, meter_reading: MeterReading, pipeline: redis.client.Pipeline) -> None: """Helper method to insert a meter reading.""" # START Challenge #6 # Create variables for ease of use in redis command as well as avoid long statement global_key = self.key_schema.global_feed_key() site_key = self.key_schema.feed_key(meter_reading.site_id) # Avoid repeating doing dump twice serialized_meter_reading = MeterReadingSchema().dump(meter_reading) pipeline.xadd(global_key, \ serialized_meter_reading, \ maxlen=self.GLOBAL_MAX_FEED_LENGTH) pipeline.xadd(site_key, \ serialized_meter_reading, \ maxlen=self.SITE_MAX_FEED_LENGTH)
def _store_browser(pipeline: redis.client.Pipeline, date: str, expire: int) -> None: browser: Optional[str] = flask.request.user_agent.browser if browser is not None: browser_key: str = f"stats:browser:{date}" pipeline.zincrby(browser_key, 1, browser) pipeline.expireat(browser_key, expire) version: Optional[str] = flask.request.user_agent.version if version is not None: version_key: str = f"stats:{browser}:{date}" pipeline.zincrby(version_key, 1, version) pipeline.expireat(version_key, expire)
def put_transaction(self, pipe: redis.client.Pipeline, member, version): free_key, lease_key = self.generate_free_key( version=version), self.generate_lease_key(version=version) pipe.multi() pipe.zincrby(free_key, value=member, amount=1) pipe.zincrby(lease_key, value=member, amount=-1)
def cache_helper( pipeline: redis.client.Pipeline, cache_key: str, cache_func: Callable[..., Any], cache_args: ArgsType = None, cache_kwargs: KwargsType = None, ): if cache_args is None: cache_args = tuple() if cache_kwargs is None: cache_kwargs = dict() # FIXME: there's a possible race condition in caching # if cache is reset at the moment the round is updated, # we could override the correct cache state with the state # of the previous round if caching is started earlier was_changed = False while True: try: pipeline.watch(cache_key) cached = pipeline.exists(cache_key) pipeline.multi() if not cached: was_changed = True cache_func(*cache_args, **cache_kwargs) pipeline.execute() except redis.WatchError: time.sleep(0.05) else: break return was_changed
def _add_match_items_to_redis( cls, match_item_generator: ABCMatchItemGenerator, redis_pipeline: redis.client.Pipeline, redis_prefix: str, ) -> None: prefix = cls.add_to_prefix(redis_prefix, match_item_generator.name) key_match_texts = cls.key_match_texts(prefix) redis_pipeline.delete(key_match_texts) for idx, match_item in enumerate(match_item_generator.generate_match_items()): redis_pipeline.hset( key_match_texts, key=" ".join(match_item.match_texts), value=idx, ) redis_pipeline.hset( # type: ignore[call-arg] # no idea why this is necessary... cls.add_to_prefix(prefix, idx), mapping={ "title": match_item.title, "topic": match_item.topic, "url": match_item.url, }, )
def _store_response_time(pipeline: redis.client.Pipeline, url: urllib.parse.ParseResult) -> None: response_time: float = round(time.perf_counter() - flask.g.start, 3) key: str = f"stats:perf:{url.path}" pipeline.lpush(key, response_time) pipeline.ltrim(key, 0, 99)
def _do(p: redis.client.Pipeline, number): p.watch(number) if p.get(number): raise AlreadyExistException('Number already exists') elif p.get(number + 1): raise OneLessThatItWasException('Number is 1 less than required') p.execute() p.unwatch() p.set(number, 'True') p.execute()
def transaction_method(pipe: redis.client.Pipeline) -> None: value = pipe.get(db_key) if value is not None and \ value.decode("utf-8") == expected_report_key: pipe.delete(db_key)
def __set_score(self, lock_id: str, old_zscore: Optional[float], now: float, new_zscore: Optional[float], locked: bool, pipeline: redis.client.Pipeline): """Transaction method (redis.transaction()) to update or delete zscore of elements in mutex sorted list. it assumes that redis.watch is set for the sorted list. Method is used internally by all public methods. Removes the element if new score is not provided. Before removing the element or updating the score validates if old_zscore didnt change. Args: lock_id (str): redis sorted list's element key old_zscore (Optional[float]): old zscore of the element. Ignored if not provided now (float): the current timestemp (zscore representing current time) new_zscore (Optional[float]): New zscore to set (often equals to "now"). If None removes the element locked (bool): Indicates if target mutex should not be in "expired" state (when updating or releasing) pipeline (redis.client.Pipeline): Redis pipeline object Returns: [type]: [description] """ zscore = pipeline.zscore(name=self.__name, value=lock_id) pipeline.multi() if zscore is None: return None if old_zscore is not None and old_zscore != zscore: return None if locked and zscore > now - self.__timeout: if new_zscore is None: pipeline.zrem(self.__name, lock_id) else: pipeline.zadd(name=self.__name, mapping={lock_id: new_zscore}, xx=True) return (lock_id, new_zscore) elif not locked and zscore < now - self.__timeout: if new_zscore is None: pipeline.zrem(self.__name, lock_id) else: pipeline.zadd(name=self.__name, mapping={lock_id: new_zscore}, xx=True) return (lock_id, new_zscore) return None
def _store_os(pipeline: redis.client.Pipeline, date: str, expire: int) -> None: os: Optional[str] = flask.request.user_agent.platform if os is not None: key: str = f"stats:os:{date}" pipeline.zincrby(key, 1, os) pipeline.expireat(key, expire)
def _update_page_views(pipeline: redis.client.Pipeline, date: str, expire: int) -> None: key: str = f"stats:views:{date}" pipeline.incr(key) pipeline.expire(key, expire)
def transaction_func(pipeline: redis.client.Pipeline) -> None: pipeline.multi() pipeline.set('my_pet', 'cat') pipeline.set('my_pet', 'tiger') pipeline.execute()
def _update_visitor_count(pipeline: redis.client.Pipeline, date: str, expire: int) -> None: key: str = f"stats:visitors:{date}" pipeline.pfadd(key, flask.g.session.id_) pipeline.expire(key, expire)
def update_transaction(self, pipe: redis.client.Pipeline, member_scores): version = self.version + 1 pipe.multi() pipe.zadd(self.generate_free_key(version=version), mapping=member_scores) return version