Esempio n. 1
0
class RedisDataSender(object):
    def __init__(self, data_stream_object, read_frequency_hz=5):
        self.data_stream_object = data_stream_object
        self.read_frequency_hz = read_frequency_hz
        self.data_channels = self.data_stream_object.return_data_channels()

        # initialize redis connection
        redis_instance = Redis(host="redis_host", port="6379")

        # initialize redis timeseries client connection
        self.rts = Client(conn=redis_instance)
        for data_channel in self.data_channels:
            self.rts.create(data_channel)

    def grab_serial_data(self):
        while True:
            # grab data tuple from line
            tup = self.data_stream_object.read_line()

            # walk through all the data channels
            for (index, data_channel) in enumerate(self.data_channels):
                # pass float into database under correct name
                self.send_to_redis_timeseries(tup[index], data_channel)

            time.sleep(1 / self.read_frequency_hz)  # should operate

    def send_to_redis_timeseries(self, flt, data_channel):
        self.rts.add(data_channel, "*", flt)
Esempio n. 2
0
def setup_redis_ts(host='localhost', port=6379, db=0):
    redis_ts = Client(host=host, port=port, db=db)

    try:
        redis_ts.create('status:highcurrentboard:current')
    except RedisError:
        log.debug(f"KEY 'status:highcurrentboard:current' already exists")
        pass
    return redis_ts
Esempio n. 3
0
def setup_redis_ts(host='localhost', port=6379, db=0):
    redis_ts = Client(host=host, port=port, db=db)

    for key in TS_KEYS:
        try:
            redis_ts.create(key)
        except RedisError:
            getLogger(__name__).debug(f"KEY '{key}' already exists")
            pass

    return redis_ts
Esempio n. 4
0
    def __init__(self, data_stream_object, read_frequency_hz=5):
        self.data_stream_object = data_stream_object
        self.read_frequency_hz = read_frequency_hz
        self.data_channels = self.data_stream_object.return_data_channels()

        # initialize redis connection
        redis_instance = Redis(host="redis_host", port="6379")

        # initialize redis timeseries client connection
        self.rts = Client(conn=redis_instance)
        for data_channel in self.data_channels:
            self.rts.create(data_channel)
Esempio n. 5
0
 def __init__(self):
     """ Virtually private constructor. """
     if RedisClient.__instance != None:
         # print("RedisClient is a singleton!")
         pass
     else:
         self.__ts_client = Client()  # timeseries redis client
         self.__redis_client = redis.Redis()  # general redis client
         # try:
         #     self.__ts_client.create(self.TS_STORE_KEY)
         # except Exception as e:
         #     pass
         RedisClient.__instance = self
Esempio n. 6
0
    def __init__(
        self,
        config: ConfigContextCommon,
        name: str = "",
        thread: int = 0,
        transitionms: int = 0,
        retentionms: int = 0,
    ) -> None:
        """
        Args:
            config: A config object.
        """

        logname = Path(__file__).stem
        self._logger = logging.getLogger(f"{config.PACKAGE_NAME}.{logname}")
        redis_host = os.environ.get("REDISHOST", "localhost")
        redis_port = int(os.environ.get("REDISPORT", 6379))
        self._rts = RedisTimeSeries(host=redis_host, port=redis_port)
        self._name = name or getattr(config, "name", "A")
        self._thread = thread or getattr(config, "thread", 0)
        self._transitionms = transitionms or getattr(config, "transitionms",
                                                     100)

        self._retentionms = retentionms or getattr(config, "retentionms",
                                                   7 * 24 * 60 * 60 * 1000)

        self._previous_value = 0
Esempio n. 7
0
 def stream_handler(item):
     data = item['value']
     member = json.dumps({
         'device_id': data['device_id'],
         'transaction_id': data['transaction_id'],
         'ts': data['ts'],
     })
     redis.Redis().zadd(data.get('device_id'), {member: data['ts']})
     Client().incrby(data['fraud_type'], 1)
    def testPool(self):
        redis = Redis(port=port)
        client = RedisTimeSeries(conn=redis, port=666)

        name = 'test'
        client.create(name)
        self.assertEqual(None, client.get(name))
        client.add(name, 2, 3)
        self.assertEqual(2, client.get(name)[0])
        info = client.info(name)
        self.assertEqual(1, info.total_samples)
Esempio n. 9
0
class RedisClient:
    """Constants"""
    TS_STORE_KEY = "nethive"
    __instance = None

    @staticmethod
    def getInstance():
        """ Static access method. """
        if RedisClient.__instance == None:
            RedisClient()
        return RedisClient.__instance

    def __init__(self):
        """ Virtually private constructor. """
        if RedisClient.__instance != None:
            # print("RedisClient is a singleton!")
            pass
        else:
            self.__ts_client = Client()  # timeseries redis client
            self.__redis_client = redis.Redis()  # general redis client
            # try:
            #     self.__ts_client.create(self.TS_STORE_KEY)
            # except Exception as e:
            #     pass
            RedisClient.__instance = self

    # Timeseries Query

    def ts_insert_http_bundle(self, store_key, package_id, timestamp, value,
                              label):
        self.__ts_client.create(store_key, labels={'type': 'http'})
        return self.__ts_client.add(package_id, timestamp, value, labels=label)

    def ts_get_http_bundles(self, start_time, end_time):
        return self.__ts_client.mrange(start_time,
                                       end_time,
                                       filters=['type=http'],
                                       with_labels=True)
        # return self.__ts_client.info(key)
        # return self.__ts_client.mrange(start_time, end_time)
        # id = self.__ts_client.range(key, start_time, end_time)

    def ts_expire_http_bundle(self, package_id):
        self.__ts_client.alter(package_id, labels={"type": "expired"})
        key = "{}:{}".format(self.TS_STORE_KEY, package_id)
        return self.__ts_client.delete(key)

    # End of Timeseries Query

    # Redis Query

    def store_http_request(self, key, value):
        return self.__redis_client.hmset(key, value)

    def get_http_request(self, key):
        # from_redis = self.__redis_client.hgetall(key)
        # self.__redis_client.delete(key)
        return self.__redis_client.hgetall(key)
Esempio n. 10
0
def run():
    rcon = Rcon(SERVER_INFO)
    red = Client(connection_pool=get_redis_pool())
    registered_series = [PlayerCount(red)]
    for series in registered_series:
        series.migrate()

    while True:
        for series in registered_series:
            series.run_on_time(rcon)
        time.sleep(LOOP_FREQUENCY_SEC)
def get_redis_timeseries_connection(hostname, port, username=USERNAME, password=PASSWORD):
    client_kwargs = {
        "host": hostname,
        "port": port,
        "decode_responses": True
    }
    if password:
        client_kwargs["password"] = password
    if username:
        client_kwargs["username"] = username

    return Client(**client_kwargs)
Esempio n. 12
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("--redis-server",
                        help="redis server address",
                        default="localhost")
    parser.add_argument("--redis-port",
                        help="redis server port",
                        default=6379,
                        type=int)
    args = parser.parse_args()
    print("""
    Starting with:
    redis server: {}
    redis port: {}
    """.format(args.redis_server, args.redis_port))

    rts = RedisTimeSeries(port=args.redis_port, host=args.redis_server)

    try:
        rts.create('temperature',
                   retentionSecs=60 * 24,
                   labels={'sensorId': '2'})
    except Exception as e:
        # will except if key already exists (i.e. on restart)
        print(str(e))

    variance = 0
    t = 0
    while True:
        # add with current timestamp
        print(".", end="")
        variance += (random.random() - 0.5) / 10.0
        t += 1
        value = math.cos(t / 100) + variance
        rts.add('temperature', '*', value)
        sleep(0.1)
Esempio n. 13
0
 def _init_rts(self):
     try:
         #rts = Client(host=os.environ.get('REDIS_HOST'),port=os.environ.get('REDIS_PORT'))
         self.host = self.config['REDIS_HOST'].get()
         self.port = self.config['REDIS_PORT'].get()
         rts = Client(host=self.host, port=self.port, decode_responses=True)
         q = Queue(connection=rts)
     except:
         logging.warning('Failed To initialize Redis client')
         logging.warning(self.config.keys())
         raise
     else:
         logging.warning('Redis Client Initialized')
     finally:
         return rts, q
Esempio n. 14
0
 def __init__(self,
              client=None,
              min_resolution_ms=30000,
              retention_days=60,
              max_fails=5,
              slices=None):
     self.client = client or Client(connection_pool=get_redis_pool())
     self.slices = slices or {
         'minute': (f'{self.NAME}avgRuleMinute', 'avg', 1000 * 60),
         'hour': (f'{self.NAME}avgRuleHour', 'avg', 1000 * 60 * 60),
         'day': (f'{self.NAME}avgRuleDay', 'avg', 1000 * 60 * 60 * 24)
     }
     self.retention_msecs = 1000 * 60 * 60 * 24 * retention_days
     self.min_resolution_ms = min_resolution_ms / 1000
     self.last_run_time = 0
     self.fails = 0
     self.max_fails = max_fails
Esempio n. 15
0
def predict(host,port,t0):
    connection=Client(host=host,port=port)
    keys=['yhat','yhat_upper','yhat_lower','trend','trend_upper','trend_lower','daily','daily_lower','daily_upper']
    #flush old predictions
    for k in keys:
        try:
            connection.delete(k)
        except:
            logging.warn('Error Deleting keys')
            raise
            pass
    # create keys for storing the result
    for k in keys:
        try:
            connection.create(k,retention_msecs=7*DAY)
        except:
            pass

    # read data from timeseries structure
    data=connection.range("Temperature",
                from_time=0,
                to_time=-1,
                bucket_size_msec=60*5, # In Seconds NOT milliseconds
                aggregation_type='avg')
    # clean the data for the algorithm to run
    time,value=zip(*data)
    time=[datetime.fromtimestamp(x) for x in time]
    df=pd.DataFrame(dict(ds=time,y=value))
    m = Prophet(changepoint_prior_scale=0.02,interval_width=.95).fit(df)
    future = m.make_future_dataframe(periods=48, freq='H',include_history = True)
    fcst = m.predict(future)
    fcst=fcst.set_index('ds')
    # send data to redistimeseries struct

    # yhat_upper=fcst['yhat_upper'].values()
    # yhat_lower=fcst['yhat_lower'].values()
    def send(key):
        time=fcst.index.values
        time=[int(x.astype('uint64') / 1e9) for x in time]
        yhat=fcst[key].values
        yhat=[x.astype(float) for x in yhat]
        out=[(key,time,value) for time,value in zip(time,yhat)]
        connection.madd(out)
    [send(k) for k in keys]
Esempio n. 16
0
class RedisTimeSeriesCommon(object):
    """
    Wrapper class for accessing RedisTimeSeries.

    """
    def __init__(
        self,
        config: ConfigContextCommon,
        name: str = "",
        thread: int = 0,
        transitionms: int = 0,
        retentionms: int = 0,
    ) -> None:
        """
        Args:
            config: A config object.
        """

        logname = Path(__file__).stem
        self._logger = logging.getLogger(f"{config.PACKAGE_NAME}.{logname}")
        redis_host = os.environ.get("REDISHOST", "localhost")
        redis_port = int(os.environ.get("REDISPORT", 6379))
        self._rts = RedisTimeSeries(host=redis_host, port=redis_port)
        self._name = name or getattr(config, "name", "A")
        self._thread = thread or getattr(config, "thread", 0)
        self._transitionms = transitionms or getattr(config, "transitionms",
                                                     100)

        self._retentionms = retentionms or getattr(config, "retentionms",
                                                   7 * 24 * 60 * 60 * 1000)

        self._previous_value = 0

    def create(
        self,
        name: str = "",
        thread: int = 0,
        transitionms: int = 0,
        retentionms: int = 0,
    ) -> None:
        if name:
            self._name = name

        if thread:
            self._thread = thread

        if transitionms:
            self._transitionms = transitionms

        if retentionms:
            self._retentionms = retentionms

        key = f"ts:{self._name}.T:{self._thread:03d}"
        labeld = {"ts": self._name, "T": self._thread}
        self._rts.create(key, retention_msecs=self._retentionms, labels=labeld)

    def delete(self, name: str = "", thread: int = 0) -> None:
        key = f"ts:{name or self._name}.T:{thread or self._thread:03d}"
        self._rts.delete(key)

    # slots are created dynamically and every now and then we want to delete
    def delete_slot(self, name: str = "", slot: int = 0) -> None:
        key = f"ts:{name or self._name}.S:{slot:03d}"
        self._rts.delete(key)

    def _add_value(
        self,
        key: str,
        timestampms: Union[int, str],
        value: int,
        labeld: Mapping[str, Any],
    ) -> int:
        i = 0
        while True:
            try:
                timestampms_return = self._rts.add(
                    key,
                    timestampms,
                    value,
                    retention_msecs=self._retentionms,
                    labels=labeld,
                )

                return timestampms_return  # type: ignore
            except ResponseError:  # too quick, delay a bit if using server timestamp
                if i < 5 and timestampms == "*":
                    i += 1
                    time.sleep(0.001)
                else:
                    raise

    def add_value(self,
                  value: int = 0,
                  name: str = "",
                  thread: int = 0) -> int:
        key = f"ts:{name or self._name}.T:{thread or self._thread:03d}"
        labeld = {"ts": name or self._name, "T": thread or self._thread}

        if self._transitionms and value != self._previous_value:
            timestampms_return = self._add_value(key, "*",
                                                 self._previous_value, labeld)
            time.sleep(self._transitionms / 1000)
            self._add_value(key, "*", value, labeld)
            self._previous_value = value
            return timestampms_return
        else:
            return self._add_value(key, "*", value, labeld)

    def add_slot_values(self,
                        values: Sequence[int] = [],
                        name: str = "") -> int:
        if not values:
            values = [0]

        keybase = f"ts:{name or self._name}.S:"
        labeld = {"ts": name or self._name, "S": 0}
        timestampms = self._add_value(f"{keybase}000", "*", values[0], labeld)

        for i, value in enumerate(values[1:]):
            j = i + 1
            labeld["S"] = j
            self._add_value(f"{keybase}{j:03d}", timestampms, value, labeld)

        return timestampms

    def get_keytuples_by_names(
            self,
            names: Sequence[str] = [],
            types: Sequence[str] = ["T"]) -> List[Tuple[str, int]]:
        namelist = (",").join(names or [self._name])
        filters = [f"ts=({namelist})"]
        keys = self._rts.queryindex(filters)

        keytuples = []
        for key in keys:
            eles = key.split(".")
            _, name = eles[0].split(":")  # ("ts", <name>)
            mytype, value = eles[1].split(":")  # ("T" or "S", <str number>)
            keytuple = (name, int(value))  # (<name>, <int>)

            if mytype in types:
                keytuples.append(keytuple)

        return keytuples

    def get_threads_by_name(self, name: str = "") -> Tuple[int, ...]:
        keytuples = self.get_keytuples_by_names([name or self._name],
                                                types=["T"])
        names, threads = zip(*keytuples)

        return threads  # discard names

    def get_slots_by_name(self, name: str = "") -> Tuple[int, ...]:
        keytuples = self.get_keytuples_by_names([name or self._name],
                                                types=["S"])
        names, slots = zip(*keytuples)

        return slots  # discard names

    def _get_dataframe(self, key: str, timestampms: int) -> pd.DataFrame:
        datapointts = self._rts.range(key, timestampms, -1)

        if not datapointts:
            return pd.DataFrame()

        dts, values = zip(*datapointts)
        datapointdf = pd.DataFrame({
            "dt": dts,
            key: [float(v) for v in values]
        })
        datapointdf["dt"] = pd.to_datetime(datapointdf.dt, unit="ms")
        return datapointdf.set_index("dt")

    def get_dataframe(self,
                      name: str = "",
                      thread: int = 0,
                      timestampms: int = 0) -> pd.DataFrame:
        key = f"ts:{name or self._name}.T:{thread or self._thread:03d}"
        return self._get_dataframe(key, timestampms)

    def get_slot_dataframe(self,
                           name: str = "",
                           slot: int = 0,
                           timestampms: int = 0) -> pd.DataFrame:
        key = f"ts:{name or self._name}.S:{slot:03d}"
        return self._get_dataframe(key, timestampms)
Esempio n. 17
0
##
## Create various Redis TimeSeries for storing stock prices
## and technical indicators
## Author: Prasanna Rajagopal
##
from redistimeseries.client import Client
rts = Client(host='127.0.0.1', port=6379)

#
# TimeSeries for storing Relative Strength Indicator (RSI) for Goldman Sachs
# Symbol: GS
# Daily Values of RSI
#
rts.create('DAILYRSI:GS',
           labels={
               'SYMBOL': 'GS',
               'DESC': 'RELATIVE_STRENGTH_INDEX',
               'INDEX': 'DJIA',
               'TIMEFRAME': '1_DAY',
               'INDICATOR': 'RSI',
               'COMPANYNAME': 'GOLDMAN_SACHS_GROUP'
           })
#
# TimeSeries for storing Relative Strength Indicator (RSI) for Caterpillar
# Symbol: CAT
# Daily Values of RSI
#
rts.create('DAILYRSI:CAT',
           labels={
               'SYMBOL': 'CAT',
               'DESC': 'RELATIVE_STRENGTH_INDEX',
 def setUp(self):
     global rts
     rts = RedisTimeSeries(port=port)
     rts.flushdb()
##
## Query sample for  various Redis TimeSeries for querying stock prices
## and technical indicators
## Author: Prasanna Rajagopal
##
from redistimeseries.client import Client
from datetime import datetime
from iexfinance.stocks import get_historical_data
import pandas as pd

##
## Connect to Redis TimeSeries
##
rts = Client(host='127.0.0.1', port=6379)

##
## Query the Goldman Sachs range for RSI values
## 15-minute window
## "from_time = 0" indicates from the beginning
## "to_time = -1" indicates until the last value in the time series.
##
dailyRSI15MinRange = rts.range('DAILYRSI15MINRNG:GS', from_time=0, to_time=-1)
##dailyRSI15MinRange = rts.get('DAILYRSI15MINRNG:GS')

print('****************GS RSI RANGE**************************************')
print(dailyRSI15MinRange)
print('****************GS RSI RANGE**************************************')

##
## Query the TimeSeries for Standard Deviation values for Goldman Sachs
## for each 15-minute window.
Esempio n. 20
0
def bkapp(doc):
    MSEC = 1
    SEC = 1 * MSEC
    MIN = 60 * SEC
    rts = Client(host=redis_host, port=redis_port)

    def get_ts(name, _min=0, _max=-1):
        y = rts.range('Temperature',
                      _min,
                      _max,
                      aggregation_type='avg',
                      bucket_size_msec=5 * MIN)
        y_time, y_value = zip(*y)
        y_time = [datetime.utcfromtimestamp(int(x)) for x in y_time]
        y_value = [v for v in y_value]
        dataset_y = dict(time=y_time, value=y_value)
        ds = ColumnDataSource(dataset_y)
        return ds

    def get_pred(name, _min=0, _max=-1):
        y = rts.range(name, _min, _max)

        y_time, y_value = zip(*y)
        y_time = [datetime.utcfromtimestamp(int(x)) for x in y_time]
        y_value = [v for v in y_value]
        dataset_y = dict(time=y_time, value=y_value)
        ds = ColumnDataSource(dataset_y)
        return ds

    def get_bounds(upper, lower, _min=0, _max=-1):
        y = rts.range(upper, _min, _max)
        y_time, y_upper = zip(*y)
        y = rts.range(lower, _min, _max)
        _, y_lower = zip(*y)
        y_time = [datetime.utcfromtimestamp(int(x)) for x in y_time]
        y_upper = [v for v in y_upper]
        y_lower = [v for v in y_lower]
        dataset_y = dict(time=y_time, y_upper=y_upper, y_lower=y_lower)
        ds = ColumnDataSource(dataset_y)
        return ds

    def draw_line(p, data, line_color=None, line_width=2, alpha=1):
        output_file("test.html")
        p.line(x='time',
               y='value',
               source=data,
               line_width=line_width,
               line_color=line_color,
               alpha=alpha)
        return p

    # we want to plot 3 axes and have a slider to manipulate the date range
    def make_plot(name,
                  dim_x,
                  dim_y,
                  label_x,
                  label_y,
                  type_x="datetime",
                  x_range=None,
                  y_range=None,
                  tools=None,
                  hover=None):
        if x_range is None:
            p = figure(plot_width=dim_x,
                       plot_height=dim_y,
                       x_axis_label=label_x,
                       y_axis_label=label_y,
                       x_axis_type=type_x,
                       tools=TOOLS)
            p.add_tools(hover)
            return p
        else:
            p = figure(plot_width=dim_x,
                       plot_height=dim_y,
                       x_axis_label=label_x,
                       y_axis_label=label_y,
                       x_axis_type=type_x,
                       x_range=x_range,
                       tools=TOOLS)
            p.add_tools(hover)
            return p

    dim_y = 250
    dim_x = int(5 * dim_y)
    TOOLS = "pan,wheel_zoom,help"
    hover = HoverTool(
        tooltips=[('Date', '@time{%F %H:%M}'), ('Value', '@value')],
        formatters={
            'time': 'datetime',  # use 'datetime' formatter for 'date' field
        },
    )
    p_temperature = make_plot(name="Temperature",
                              dim_x=dim_x,
                              dim_y=dim_y,
                              label_x="Time",
                              label_y="Temperature",
                              tools=TOOLS,
                              hover=hover)
    p_temperature.xaxis.visible = False

    p_trend = make_plot(name='Trend',
                        dim_x=dim_x,
                        dim_y=dim_y,
                        label_x="Time",
                        label_y="Trend",
                        x_range=p_temperature.x_range,
                        tools=TOOLS,
                        hover=hover)
    p_trend.xaxis.visible = False

    p_seasonality = make_plot(name="Daily Seasonality",
                              dim_x=dim_x,
                              dim_y=dim_y,
                              label_x="Time",
                              label_y="Daily Seasonality",
                              x_range=p_trend.x_range,
                              tools=TOOLS,
                              hover=hover)

    def draw_trend(p_trend):
        data = get_pred('trend')
        p_trend = draw_line(p_trend, data, line_color='#2171b5')

        data = get_pred('trend_upper')
        p_trend = draw_line(p_trend,
                            data,
                            line_color='#2171b5',
                            alpha=0.5,
                            line_width=1)

        data = get_pred('trend_lower')
        p_trend = draw_line(p_trend,
                            data,
                            line_color='#2171b5',
                            alpha=0.5,
                            line_width=1)

        bounds = get_bounds('trend_upper', 'trend_lower')
        patch = p_trend.varea(x='time',
                              y1='y_upper',
                              y2='y_lower',
                              source=bounds,
                              fill_alpha=0.2)
        patch.level = 'underlay'
        return p_trend

    def draw_seasonality(p_seasonality):
        data = get_pred('daily')
        return draw_line(p_seasonality, data, line_color='#2171b5')

    def draw_temperature(p_temperature):
        data = get_pred('yhat')
        p_temperature = draw_line(p_temperature, data, line_color='#2171b5')
        data = get_ts('Temperature')
        p_temperature.circle('time',
                             'value',
                             size=2,
                             color="black",
                             alpha=0.25,
                             source=data)

        data = get_pred('yhat_upper')
        p_temperature = draw_line(p_temperature,
                                  data,
                                  line_color='#2171b5',
                                  alpha=0.5,
                                  line_width=1)

        data = get_pred('yhat_lower')
        p_temperature = draw_line(p_temperature,
                                  data,
                                  line_color='#2171b5',
                                  alpha=0.5,
                                  line_width=1)

        bounds = get_bounds('yhat_upper', 'yhat_lower')
        patch = p_temperature.varea(x='time',
                                    y1='y_upper',
                                    y2='y_lower',
                                    source=bounds,
                                    fill_alpha=0.2)
        patch.level = 'underlay'
        return p_temperature

    p_trend = draw_trend(p_trend)
    p_seasonality = draw_seasonality(p_seasonality)
    p_temperature = draw_temperature(p_temperature)

    layout = column(p_temperature, p_trend, p_seasonality)

    doc.add_root(layout)
Esempio n. 21
0
from bokeh.plotting import figure
from bokeh.server.server import Server
from bokeh.themes import Theme
from tornado.ioloop import IOLoop
from bokeh.plotting import figure, output_file, show
from bokeh.plotting import ColumnDataSource
from redistimeseries.client import Client
from bokeh.layouts import column
from bokeh.models import HoverTool
from datetime import datetime
import pandas as pd

app = Flask(__name__)
redis_host = 'redis-cluster-ip-service'
redis_port = '6379'
connection = Client(host=redis_host, port=redis_port)


def bkapp(doc):
    MSEC = 1
    SEC = 1 * MSEC
    MIN = 60 * SEC
    rts = Client(host=redis_host, port=redis_port)

    def get_ts(name, _min=0, _max=-1):
        y = rts.range('Temperature',
                      _min,
                      _max,
                      aggregation_type='avg',
                      bucket_size_msec=5 * MIN)
        y_time, y_value = zip(*y)
def load_data():

    if environ.get('REDIS_SERVER') is not None:
        redis_server = environ.get('REDIS_SERVER')
    else:
        redis_server = 'localhost'

    if environ.get('REDIS_PORT') is not None:
        redis_port = int(environ.get('REDIS_PORT'))
    else:
        redis_port = 6379

    if environ.get('REDIS_PASSWORD') is not None:
        redis_password = environ.get('REDIS_PASSWORD')
    else:
        redis_password = ''

    rdb = redis.Redis(host=redis_server,
                      port=redis_port,
                      password=redis_password)
    rb = RedisBloom(host=redis_server,
                    port=redis_port,
                    password=redis_password)
    rts = RedisTimeseries(host=redis_server,
                          port=redis_port,
                          password=redis_password)

    with open('./users.csv', encoding='utf-8') as csv_file:
        csv_reader = csv.reader(csv_file, delimiter=',')
        line_count = 0
        for row in csv_reader:
            if line_count > 0:
                rdb.hset("user:%s" % (row[0].replace(" ", '')),
                         mapping={
                             'Name': row[0],
                             'AgeDemo': row[1],
                             'IncomeDemo': row[2],
                             'Sex': row[3]
                         })
                rdb.lpush("USERLIST", row[0])
            line_count += 1

    with open('./campaigns.csv', encoding='utf-8') as csv_file:
        rts.create('TOTALREVENUE')
        csv_reader = csv.reader(csv_file, delimiter=',')
        line_count = 0
        for row in csv_reader:
            if line_count > 0:
                rdb.zadd("campaign:%s" % (row[0].replace(" ", '')),
                         {row[2]: row[1]})
                rb.bfCreate(row[2], 0.01, 1000)
                rb.set("counter:%s" % (row[2].replace(" ", '')), row[3])
                rts.create("ADVIEW:%s" % (row[2].replace(" ", '')))
                rb.sadd("AdStats", row[2])
            line_count += 1

    for gear in ['./adgear.py', './adstats.py']:
        file = open(gear, mode='r')
        g = file.read()
        rdb.execute_command('RG.PYEXECUTE', g)
        file.close()
Esempio n. 23
0
# An example Python client for Redis key-value store using RedisTimeSeries.
from redistimeseries.client import Client as RedisTimeSeries
import time
import sys
import site
import datetime
import random

print(' \n '.join(sys.path))

ts = RedisTimeSeries(host='localhost', port=6379)

#ts.flushdb()

key = 'temperature'

def create(key):

	print('\n Create new time series: %s' % str(key))

	#ts.create(key,retentionSecs=30,labels={'sensor_id' : 2,'area_id' : 32})
	ts.create(key,retention_msecs=30000,labels={'sensor_id' : 2,'area_id' : 32})

	print('')

def store(key, interval):

	print("\n Append new value to time series:\n")

	begin_time = int(time.time())
Esempio n. 24
0
#!/usr/bin/env python

from redistimeseries.client import Client as RedisTimeSeries
import redis
import time

redis_host = "localhost"
redis_port = 6379

rts = RedisTimeSeries(host=redis_host, port=redis_port)

pool = redis.ConnectionPool(host=redis_host, port=redis_port)
r = redis.Redis(connection_pool=pool)

try:
    r.xadd("mystream", {'event_type': 'startup', 'user': '******'})
    r.xgroup_create("mystream", "consumerGroup", '$')
except:
    print("group already exists")

while True:
    msgs = r.xreadgroup("consumerGroup",
                        "consumerName",
                        streams={"mystream": '>'},
                        count=10,
                        block=1000,
                        noack=False)
    for msg in msgs:
        for m in msg[1]:
            evnt = m[1]['event_type']
            try:
Esempio n. 25
0
 def setup_redis_ts(self, host, port, db):
     redis_ts = Client(host, port, db)
     return redis_ts
import redis
from redistimeseries.client import Client
import os
import sys
from dotenv import load_dotenv

load_dotenv()

redisClient = redis.from_url(os.getenv('REDIS_URL'), decode_responses=True)
rts = Client(redisClient)


def run(consumer, group='trace_stats_worker', stream='events:trace'):
    """
    Subscribe to Trace events and write to hashes
    """
    print(f'Starting {group}/{consumer} consumer listen on {stream}')
    try:
        redisClient.xgroup_create(stream, group, id='0', mkstream=True)
    except redis.exceptions.ResponseError as error:
        print(error)
        if not str(error) == 'BUSYGROUP Consumer Group name already exists':
            raise error

    if not redisClient.exists('stats:jitter'):
        rts.create('stats:jitter', retention=3600, labels={'type': 'trace'})

    if not redisClient.exists('stats:latency'):
        rts.create('stats:latency', retention=3600, labels={'type': 'trace'})

    while True:
Esempio n. 27
0
import pandas as pd
from redistimeseries.client import Client
from datetime import datetime

key = 'test'
rts = Client()
rts.redis.delete(key)
rts.create(key, labels={'source': key})
df = pd.read_csv('samples.csv')
for _, row in df.iterrows():
    d = datetime.strptime(row['ds'], '%Y-%m-%d') 
    millisec = round(d.timestamp()*1000)
    rts.add(key, millisec, row['y'])
Esempio n. 28
0
# from numpy import array,dot
# from sklearn.linear_model import LinearRegression
from time import time
from redistimeseries.client import Client
import matplotlib.pyplot as plt
from datetime import datetime
import pandas as pd
from fbprophet import Prophet
MSEC = 1
SEC = 1000 * MSEC
MINUTE = 60 * SEC
rts = Client(host='localhost', port=6379)
# Grab the time series
data = rts.range(
    "Temperature",
    from_time=0,
    to_time=-1,
    bucket_size_msec=60 * 5,  # In Seconds NOT milliseconds
    aggregation_type='avg')
time, value = res = zip(*data)
time = [datetime.fromtimestamp(x) for x in time]
df = pd.DataFrame(dict(ds=time, y=value))
m = Prophet(changepoint_prior_scale=0.02, interval_width=.95).fit(df)
future = m.make_future_dataframe(periods=48, freq='H')
fcst = m.predict(future)
fcst = fcst.set_index('ds')
fcst.to_csv('forecast.csv')
ax = fcst[['yhat', 'yhat_upper', 'yhat_lower']]['2020-2-1':'2020-2-4'].plot()
df.set_index('ds')['2020-2-1':'2020-2-4'].plot(ax=ax)
plt.savefig('output.png', dpi=120)
Esempio n. 29
0
from redistimeseries.client import Client 
rts = Client(host='127.0.0.1', port=6379)

# Create Time Series and the aggregated time series
rts.create('DAILYRSI:GS',
            labels={
                'SYMBOL': 'GS',
                'DESC': 'RELATIVE_STRENGTH_INDEX',
                'INDEX': 'DJIA',
                'TIMEFRAME': '1_DAY',
                'INDICATOR': 'RSI',
                'COMPANYNAME': 'GOLDMAN_SACHS_GROUP'
            })

rts.create('INTRADAYPRICES:GS',
            labels={
                'SYMBOL': 'GS',
                'DESC': 'SHARE_PRICE',
                'INDEX': 'DJIA',
                'PRICETYPE':'INTRADAY',
                'COMPANYNAME': 'GOLDMAN_SACHS_GROUP'
            })

rts.create('DAILYRSI15MINRNG:GS',
            labels={
                'SYMBOL': 'GS',
                'DESC': 'RELATIVE_STRENGTH_INDEX',
                'INDEX': 'DJIA',
                'TIMEFRAME': '15_MINUTES',
                'AGGREGATION': 'RANGE',
                'INDICATOR': 'RSI',
## The API documentation for IEX Cloud can be found here:
## https://iexcloud.io/docs/api/
##

from redistimeseries.client import Client
import time
from datetime import datetime
from iexfinance.stocks import get_historical_data, get_historical_intraday, Stock
import pandas as pd
import requests
import json

##
## make a connection to Redis TimeSeries
##
rts = Client(host='127.0.0.1', port=6379)

## Use the epoch time to convert timestamps to integer values.
dtFmt = '%Y-%m-%d'
epoch = datetime(1970, 1, 1)
intradayPriceList = []

##
## Daily RSI For Goldman Sachs Group
##
dailydtFmt = '%Y-%m-%d %H:%M'
resp = requests.get('https://sandbox.iexapis.com/stable/stock/GS/indicator/rsi?range=1d&token=<Your IEX API Key>')
if resp.status_code != 200:
    # This means something went wrong.
    raise ApiError('GET /tasks/ {}'.format(resp.status_code))
rsiJSON = resp.json()