Пример #1
0
def get_client(conf, scripts=None):
    if redis is None:
        raise RuntimeError("Redis Python module is unavailable")
    parsed_url = parse.urlparse(conf.redis_url)
    options = parse.parse_qs(parsed_url.query)

    kwargs = {}
    if parsed_url.hostname:
        kwargs['host'] = parsed_url.hostname
        if parsed_url.port:
            kwargs['port'] = parsed_url.port
    else:
        if not parsed_url.path:
            raise ValueError("Expected socket path in parsed urls path")
        kwargs['unix_socket_path'] = parsed_url.path
    if parsed_url.password:
        kwargs['password'] = parsed_url.password

    for a in CLIENT_ARGS:
        if a not in options:
            continue
        if a in CLIENT_BOOL_ARGS:
            v = utils.strtobool(options[a][-1])
        elif a in CLIENT_LIST_ARGS:
            v = options[a]
        elif a in CLIENT_INT_ARGS:
            v = int(options[a][-1])
        else:
            v = options[a][-1]
        kwargs[a] = v

    # Ask the sentinel for the current master if there is a
    # sentinel arg.
    if 'sentinel' in kwargs:
        sentinel_hosts = [
            tuple(fallback.split(':'))
            for fallback in kwargs.get('sentinel_fallback', [])
        ]
        sentinel_hosts.insert(0, (kwargs['host'], kwargs['port']))
        sentinel_server = sentinel.Sentinel(
            sentinel_hosts,
            socket_timeout=kwargs.get('socket_timeout'))
        sentinel_name = kwargs['sentinel']
        del kwargs['sentinel']
        if 'sentinel_fallback' in kwargs:
            del kwargs['sentinel_fallback']
        master_client = sentinel_server.master_for(sentinel_name, **kwargs)
        # The master_client is a redis.StrictRedis using a
        # Sentinel managed connection pool.
        return master_client

    client = redis.StrictRedis(**kwargs)

    if scripts is not None:
        scripts = {
            name: client.register_script(code)
            for name, code in six.iteritems(scripts)
        }

    return client, scripts
Пример #2
0
def get_client(conf, scripts=None):
    if redis is None:
        raise RuntimeError("Redis Python module is unavailable")
    parsed_url = parse.urlparse(conf.redis_url)
    options = parse.parse_qs(parsed_url.query)

    kwargs = {}
    if parsed_url.hostname:
        kwargs['host'] = parsed_url.hostname
        if parsed_url.port:
            kwargs['port'] = parsed_url.port
    else:
        if not parsed_url.path:
            raise ValueError("Expected socket path in parsed urls path")
        kwargs['unix_socket_path'] = parsed_url.path
    if parsed_url.password:
        kwargs['password'] = parsed_url.password

    for a in CLIENT_ARGS:
        if a not in options:
            continue
        if a in CLIENT_BOOL_ARGS:
            v = utils.strtobool(options[a][-1])
        elif a in CLIENT_LIST_ARGS:
            v = options[a]
        elif a in CLIENT_INT_ARGS:
            v = int(options[a][-1])
        else:
            v = options[a][-1]
        kwargs[a] = v

    # Ask the sentinel for the current master if there is a
    # sentinel arg.
    if 'sentinel' in kwargs:
        sentinel_hosts = [
            tuple(fallback.split(':'))
            for fallback in kwargs.get('sentinel_fallback', [])
        ]
        sentinel_hosts.insert(0, (kwargs['host'], kwargs['port']))
        sentinel_server = sentinel.Sentinel(
            sentinel_hosts, socket_timeout=kwargs.get('socket_timeout'))
        sentinel_name = kwargs['sentinel']
        del kwargs['sentinel']
        if 'sentinel_fallback' in kwargs:
            del kwargs['sentinel_fallback']
        master_client = sentinel_server.master_for(sentinel_name, **kwargs)
        # The master_client is a redis.StrictRedis using a
        # Sentinel managed connection pool.
        return master_client

    client = redis.StrictRedis(**kwargs)

    if scripts is not None:
        scripts = {
            name: client.register_script(code)
            for name, code in six.iteritems(scripts)
        }

    return client, scripts
Пример #3
0
    def aggregate_data(data,
                       func,
                       window,
                       min_grain,
                       center=False,
                       min_size=1):
        """Calculates moving func of data with sampling width of window.

        :param data: Series of timestamp, value pairs
        :param func: the function to use when aggregating
        :param window: (float) range of data to use in each aggregation.
        :param min_grain: granularity of the data being passed in.
        :param center: whether to index the aggregated values by the first
            timestamp of the values picked up by the window or by the central
            timestamp.
        :param min_size: if the number of points in the window is less than
            min_size, the aggregate is not computed and nan is returned for
            that iteration.
        """

        if center:
            center = utils.strtobool(center)

        def moving_window(x):
            msec = datetime.timedelta(milliseconds=1)
            zero = datetime.timedelta(seconds=0)
            half_span = datetime.timedelta(seconds=window / 2)
            start = timeutils.normalize_time(data.index[0])
            stop = timeutils.normalize_time(data.index[-1] +
                                            datetime.timedelta(
                                                seconds=min_grain))
            # min_grain addition necessary since each bin of rolled-up data
            # is indexed by leftmost timestamp of bin.

            left = half_span if center else zero
            right = 2 * half_span - left - msec
            # msec subtraction is so we don't include right endpoint in slice.

            x = timeutils.normalize_time(x)

            if x - left >= start and x + right <= stop:
                dslice = data[x - left:x + right]

                if center and dslice.size % 2 == 0:
                    return func([
                        func(data[x - msec - left:x - msec + right]),
                        func(data[x + msec - left:x + msec + right])
                    ])

                # (NOTE) atmalagon: the msec shift here is so that we have two
                # consecutive windows; one centered at time x - msec,
                # and one centered at time x + msec. We then average the
                # aggregates from the two windows; this result is centered
                # at time x. Doing this double average is a way to return a
                # centered average indexed by a timestamp that existed in
                # the input data (which wouldn't be the case for an even number
                # of points if we did only one centered average).

            else:
                return numpy.nan
            if dslice.size < min_size:
                return numpy.nan
            return func(dslice)

        try:
            result = pandas.Series(data.index).apply(moving_window)

            # change from integer index to timestamp index
            result.index = data.index

            return [(t, window, r)
                    for t, r in six.iteritems(result[~result.isnull()])]
        except Exception as e:
            raise aggregates.CustomAggFailure(str(e))