def _populate( self, # type: ignore pipeline: Pipeline, arg: InitArg = tuple(), *, sign: int = +1, **kwargs: int, ) -> None: to_set = {} try: for key, value in cast(Counter[JSONTypes], arg).items(): to_set[key] = sign * value except AttributeError: for key in arg: to_set[key] = to_set.get(key, self[key]) + sign for key, value in kwargs.items(): original = self[key] if to_set.get(key, 0) == 0 else to_set[key] to_set[key] = original + sign * value to_set = {key: self[key] + value for key, value in to_set.items()} encoded_to_set = { self._encode(k): self._encode(v) for k, v in to_set.items() } if encoded_to_set: pipeline.multi() pipeline.hset(self.key, mapping=encoded_to_set) # type: ignore
def tx_fn(pipeline: Pipeline) -> Response: """Code to be executed within a Redis transaction.""" rstates: List[Optional[bytes]] = pipeline.mget(rkeys) t_s: int t_us: int t_s, t_us = pipeline.time() t1 = t_s + t_us / 1000000 delay: float = 0 states: List[State] = [] for limit, rstate in zip(limits, rstates): t0, v0 = self._codec.decode(rstate) or (t1, 0) v1 = max(v0 - (t1 - t0) * limit.zone.rate, 0) + 1 c = limit.burst + 1 - v1 if c < -limit.delay: pipeline.unwatch() return Response(False, None) if c < 0: delay = max(delay, -c/limit.zone.rate) states.append(State(t1, v1)) pipeline.multi() for limit, rkey, state in zip(limits, rkeys, states): pipeline.setex(rkey, limit.zone.expiry, self._codec.encode(state)) return Response(True, delay)
def _populate( self, pipeline: Pipeline, iterable: Iterable[JSONTypes] = tuple(), ) -> None: encoded_values = {self._encode(value) for value in iterable} if encoded_values: # pragma: no cover pipeline.multi() pipeline.sadd(self.key, *encoded_values)
def _populate( self, pipeline: Pipeline, arg: InitArg = tuple(), **kwargs: JSONTypes, ) -> None: to_set = {} with contextlib.suppress(AttributeError): arg = cast(InitMap, arg).items() for key, value in itertools.chain(cast(InitIter, arg), kwargs.items()): to_set[self._encode(key)] = self._encode(value) if to_set: pipeline.multi() pipeline.hset(self.key, mapping=to_set) # type: ignore
def __delete(self, pipeline: Pipeline, index: Union[slice, int]) -> None: # This is monumentally stupid. Python's list API requires us to delete # an element by *index.* Of course, Redis doesn't support that, # because it's Redis. Instead, Redis supports deleting an element by # *value.* So our ridiculous hack is to set l[index] to 0, then to # delete the value 0. # # More info: # http://redis.io/commands/lrem indices, num = self.__slice_to_indices(index), 0 pipeline.multi() for index in indices: pipeline.lset(self.key, index, 0) num += 1 if num: # pragma: no cover pipeline.lrem(self.key, num, 0)
def _store(p: Pipeline): # The mass of a vehicle includes any objects it carries, so # we don't need to check the mass of individual objects in # a container. if obj.mass > self.capacity_mass: raise NoCapacityError p.multi() object_dict = schema.dump(obj) if hasattr(obj, 'objects'): object_dict['objects'] = {} for name, contained_obj in obj.objects.items(): item_schema = object_schemas_by_type[contained_obj.type] object_dict['objects'][name] = item_schema.dump( contained_obj) p.jsonset(deck_key, f'.objects.{obj.name}', object_dict) p.jsonnumincrby(deck_key, '.mass', obj.mass)
def _store(p: Pipeline): # The mass of a vehicle includes any objects it carries, so # we don't need to check the mass of individual objects in # a container. if obj.mass > self.capacity_mass: raise NoCapacityError item_key = keys.deck_item(self.name, obj.name) deck_mass_key = keys.deck_stored_mass(self.name) schema = object_schemas_by_type.get(obj.type) objects = {} if hasattr(obj, 'objects'): # This is a container, so we need to be persist its objects. objects = obj.objects p.multi() object_dict = schema.dump(obj) # Redis can't store lists in a hash, so we persist objects # within a container object separately. object_dict.pop('objects', None) # Persist objects in a container in their own hashes -- and # link them to the container using a sorted set. for contained_obj in objects.values(): item_schema = object_schemas_by_type[contained_obj.type] container_key = keys.container_items_set(obj.name) container_item_key = keys.container_item( obj.name, contained_obj.name) p.zadd(container_key, {contained_obj.name: contained_obj.mass}) p.hset(container_item_key, mapping=item_schema.dump(contained_obj)) p.zadd(deck_items_key, {obj.name: obj.mass}) p.hset(item_key, mapping=object_dict) p.incrby(deck_mass_key, obj.mass)