Esempio n. 1
0
    def _iterate(self, axis, _hash_key, mult):
        keys = self.metrica.choices(axis, self._filter.get(axis, None))
        pipe = redis.pipeline(transaction=False)

        for key in keys:
            fl = dict(self._filter, **{axis: key})
            hash_field_id = self.metrica.hash_field_id(**fl)
            pipe.hget(_hash_key, hash_field_id)

        values = pipe.execute()
        return zip(keys, [int(v or 0) / mult for v in values])
Esempio n. 2
0
    def _iterate_on_dateaxis(self, _hash_key_postfix, mult):
        prefix = self.metrica.key_prefix()
        keys = []
        pipe = redis.pipeline(transaction=False)

        for key, tp_id in self.metrica.date_axis.iterate(self):
            keys.append(key)
            hash_key = u'%s:%s' % (prefix, tp_id) + _hash_key_postfix
            pipe.hget(hash_key, self._hash_field_id)

        values = pipe.execute()
        return zip(keys, [int(v or 0) / mult for v in values])
Esempio n. 3
0
    def kick(self, value=1, date=None, **kwargs):
        """Registers an event with parameters (for each of axis)"""
        date = date or datetime.datetime.now()
        value = int(self.multiplier * value)
        
        hash_key_prefix = self.key_prefix()

        choices_sets_to_append = []
        hash_field_id_parts = []

        for axis_kw, axis in self.axes:
            param_value = kwargs.pop(axis_kw, None)
            
            hash_field_id_parts.append(
                list(axis.get_field_id_parts(param_value))
                )

            try:
                if axis.store_choice:
                    set_key = '__choices__:%s' % axis_kw
                    choices_sets_to_append.append((set_key, param_value))
            except AttributeError: # 'duck typing'
                pass
                

        if kwargs:
            raise TypeError("Invalid kwargs left: %s" % kwargs)

        choices_sets_to_append = filter(None, choices_sets_to_append)
            
        # Here we go: bumping all counters out there
        pipe = redis.pipeline(transaction=False)

        for date_scale in self.date_axis.scales(date):
            hash_key = '%s:%s' % (hash_key_prefix, date_scale.id)
            
            for parts in itertools.product(*hash_field_id_parts):
                hash_field_id = ':'.join(parts)

                self._increment(pipe, hash_key, hash_field_id, value)
            
            if date_scale.expiration:
                pipe.expire(hash_key, date_scale.expiration)

            if date_scale.store:
                choices_sets_to_append.append((date_scale.store, date_scale.value))

        for key, s_value in choices_sets_to_append:
            pipe.sadd('%s:%s' % (hash_key_prefix, key),
                      s_value)

        pipe.execute()
Esempio n. 4
0
    def _iterate(self, axis, _hash_key, mult):
        keys = self.metrica.choices(axis)

        pipe = redis.pipeline(transaction=False)

        for key in keys:
            fl = dict(self._filter, **{axis: key})
            hash_field_id = self.metrica.hash_field_id(**fl)

            pipe.hget(_hash_key, hash_field_id)

        values = pipe.execute()

        return zip(keys, [int(v or 0) / mult for v in values])
Esempio n. 5
0
    def kick(self, value=1, date=None, **kwargs):
        """Registers an event with parameters (for each of axis)"""
        date = date or datetime.datetime.now()
        value = int(self.multiplier * value)

        hash_key_prefix = self.key_prefix()

        choices_sets_to_append = []
        hash_field_id_parts = []

        for axis_kw, axis in self.axes:
            param_value = kwargs.pop(axis_kw, None)

            hash_field_id_parts.append(
                list(axis.get_field_id_parts(param_value)))

            try:
                if axis.store_choice:
                    set_key = '__choices__:%s' % axis_kw
                    choices_sets_to_append.append((set_key, param_value))
            except AttributeError:  # 'duck typing'
                pass

        if kwargs:
            raise TypeError("Invalid kwargs left: %s" % kwargs)

        choices_sets_to_append = filter(None, choices_sets_to_append)

        # Here we go: bumping all counters out there
        pipe = redis.pipeline(transaction=False)

        for date_scale in self.date_axis.scales(date):
            hash_key = '%s:%s' % (hash_key_prefix, date_scale.id)

            for parts in itertools.product(*hash_field_id_parts):
                hash_field_id = ':'.join(parts)

                self._increment(pipe, hash_key, hash_field_id, value)

            if date_scale.expiration:
                pipe.expire(hash_key, date_scale.expiration)

            if date_scale.store:
                choices_sets_to_append.append(
                    (date_scale.store, date_scale.value))

        for key, s_value in choices_sets_to_append:
            pipe.sadd('%s:%s' % (hash_key_prefix, key), s_value)

        pipe.execute()
Esempio n. 6
0
    def timeserie(self, since, until, scale=None, _hash_key_postfix='', _mult=None):
        mult = _mult or self.metrica.multiplier
        prefix = self.metrica.key_prefix()
        ts_points = self.metrica.date_axis.timeserie(since, until, scale)
        points = []
        pipe = redis.pipeline(transaction=False)

        for point, tp_id in ts_points:
            points.append(point)
            hash_key = u'%s:%s%s' % (prefix, tp_id, _hash_key_postfix)
            pipe.hget(hash_key, self._hash_field_id)

        values = pipe.execute()
        return zip(points, [int(v or 0) / mult for v in values])
Esempio n. 7
0
    def _iterate_on_dateaxis(self, _hash_key_postfix, mult):
        prefix = self.metrica.key_prefix()
        keys = []

        pipe = redis.pipeline(transaction=False)

        for key, tp_id in self.metrica.date_axis.iterate(self):
            keys.append(key)

            hash_key = '%s:%s' % (prefix, tp_id) + _hash_key_postfix

            pipe.hget(hash_key, self._hash_field_id)

        values = pipe.execute()

        return zip(keys, [int(v or 0) / mult for v in values])
Esempio n. 8
0
    def timeserie(self,
                  since,
                  until,
                  scale=None,
                  _hash_key_postfix='',
                  _mult=None):
        mult = _mult or self.metrica.multiplier
        prefix = self.metrica.key_prefix()

        ts_points = self.metrica.date_axis.timeserie(since, until, scale)

        points = []
        pipe = redis.pipeline(transaction=False)

        for point, tp_id in ts_points:
            points.append(point)

            hash_key = '%s:%s%s' % (prefix, tp_id, _hash_key_postfix)
            pipe.hget(hash_key, self._hash_field_id)

        values = pipe.execute()

        return zip(points, [int(v or 0) / mult for v in values])