class RedisDataSender(object): def __init__(self, data_stream_object, read_frequency_hz=5): self.data_stream_object = data_stream_object self.read_frequency_hz = read_frequency_hz self.data_channels = self.data_stream_object.return_data_channels() # initialize redis connection redis_instance = Redis(host="redis_host", port="6379") # initialize redis timeseries client connection self.rts = Client(conn=redis_instance) for data_channel in self.data_channels: self.rts.create(data_channel) def grab_serial_data(self): while True: # grab data tuple from line tup = self.data_stream_object.read_line() # walk through all the data channels for (index, data_channel) in enumerate(self.data_channels): # pass float into database under correct name self.send_to_redis_timeseries(tup[index], data_channel) time.sleep(1 / self.read_frequency_hz) # should operate def send_to_redis_timeseries(self, flt, data_channel): self.rts.add(data_channel, "*", flt)
class RedisClient: """Constants""" TS_STORE_KEY = "nethive" __instance = None @staticmethod def getInstance(): """ Static access method. """ if RedisClient.__instance == None: RedisClient() return RedisClient.__instance def __init__(self): """ Virtually private constructor. """ if RedisClient.__instance != None: # print("RedisClient is a singleton!") pass else: self.__ts_client = Client() # timeseries redis client self.__redis_client = redis.Redis() # general redis client # try: # self.__ts_client.create(self.TS_STORE_KEY) # except Exception as e: # pass RedisClient.__instance = self # Timeseries Query def ts_insert_http_bundle(self, store_key, package_id, timestamp, value, label): self.__ts_client.create(store_key, labels={'type': 'http'}) return self.__ts_client.add(package_id, timestamp, value, labels=label) def ts_get_http_bundles(self, start_time, end_time): return self.__ts_client.mrange(start_time, end_time, filters=['type=http'], with_labels=True) # return self.__ts_client.info(key) # return self.__ts_client.mrange(start_time, end_time) # id = self.__ts_client.range(key, start_time, end_time) def ts_expire_http_bundle(self, package_id): self.__ts_client.alter(package_id, labels={"type": "expired"}) key = "{}:{}".format(self.TS_STORE_KEY, package_id) return self.__ts_client.delete(key) # End of Timeseries Query # Redis Query def store_http_request(self, key, value): return self.__redis_client.hmset(key, value) def get_http_request(self, key): # from_redis = self.__redis_client.hgetall(key) # self.__redis_client.delete(key) return self.__redis_client.hgetall(key)
def setup_redis_ts(host='localhost', port=6379, db=0): redis_ts = Client(host=host, port=port, db=db) try: redis_ts.create('status:highcurrentboard:current') except RedisError: log.debug(f"KEY 'status:highcurrentboard:current' already exists") pass return redis_ts
def setup_redis_ts(host='localhost', port=6379, db=0): redis_ts = Client(host=host, port=port, db=db) for key in TS_KEYS: try: redis_ts.create(key) except RedisError: getLogger(__name__).debug(f"KEY '{key}' already exists") pass return redis_ts
def testPool(self): redis = Redis(port=port) client = RedisTimeSeries(conn=redis, port=666) name = 'test' client.create(name) self.assertEqual(None, client.get(name)) client.add(name, 2, 3) self.assertEqual(2, client.get(name)[0]) info = client.info(name) self.assertEqual(1, info.total_samples)
def predict(host,port,t0): connection=Client(host=host,port=port) keys=['yhat','yhat_upper','yhat_lower','trend','trend_upper','trend_lower','daily','daily_lower','daily_upper'] #flush old predictions for k in keys: try: connection.delete(k) except: logging.warn('Error Deleting keys') raise pass # create keys for storing the result for k in keys: try: connection.create(k,retention_msecs=7*DAY) except: pass # read data from timeseries structure data=connection.range("Temperature", from_time=0, to_time=-1, bucket_size_msec=60*5, # In Seconds NOT milliseconds aggregation_type='avg') # clean the data for the algorithm to run time,value=zip(*data) time=[datetime.fromtimestamp(x) for x in time] df=pd.DataFrame(dict(ds=time,y=value)) m = Prophet(changepoint_prior_scale=0.02,interval_width=.95).fit(df) future = m.make_future_dataframe(periods=48, freq='H',include_history = True) fcst = m.predict(future) fcst=fcst.set_index('ds') # send data to redistimeseries struct # yhat_upper=fcst['yhat_upper'].values() # yhat_lower=fcst['yhat_lower'].values() def send(key): time=fcst.index.values time=[int(x.astype('uint64') / 1e9) for x in time] yhat=fcst[key].values yhat=[x.astype(float) for x in yhat] out=[(key,time,value) for time,value in zip(time,yhat)] connection.madd(out) [send(k) for k in keys]
def main(): parser = argparse.ArgumentParser() parser.add_argument("--redis-server", help="redis server address", default="localhost") parser.add_argument("--redis-port", help="redis server port", default=6379, type=int) args = parser.parse_args() print(""" Starting with: redis server: {} redis port: {} """.format(args.redis_server, args.redis_port)) rts = RedisTimeSeries(port=args.redis_port, host=args.redis_server) try: rts.create('temperature', retentionSecs=60 * 24, labels={'sensorId': '2'}) except Exception as e: # will except if key already exists (i.e. on restart) print(str(e)) variance = 0 t = 0 while True: # add with current timestamp print(".", end="") variance += (random.random() - 0.5) / 10.0 t += 1 value = math.cos(t / 100) + variance rts.add('temperature', '*', value) sleep(0.1)
def load_data(): if environ.get('REDIS_SERVER') is not None: redis_server = environ.get('REDIS_SERVER') else: redis_server = 'localhost' if environ.get('REDIS_PORT') is not None: redis_port = int(environ.get('REDIS_PORT')) else: redis_port = 6379 if environ.get('REDIS_PASSWORD') is not None: redis_password = environ.get('REDIS_PASSWORD') else: redis_password = '' rdb = redis.Redis(host=redis_server, port=redis_port, password=redis_password) rb = RedisBloom(host=redis_server, port=redis_port, password=redis_password) rts = RedisTimeseries(host=redis_server, port=redis_port, password=redis_password) rdb.set("CONFIG", "YES") rts.create('s-unfiltered', retention_ms=60000) rts.create('s-filtered', retention_ms=60000) rts.create('unfiltered', labels={'Type': 'Final'}, retention_ms=86400000) rts.create('filtered', labels={'Type': 'Final'}, retention_ms=86400000) rts.createrule('s-unfiltered', 'unfiltered', 'last', 1000) rts.createrule('s-filtered', 'filtered', 'last', 1000) for gear in ['./dedup.py']: file = open(gear, mode='r') g = file.read() rdb.execute_command('RG.PYEXECUTE', g) file.close() if environ.get('REDIS_SCRABBLE') is not None: for line in fileinput.input("2019_Collins_Scrabble_Words.txt"): rb.bfAdd("Scrabble-Bloom", line.rstrip())
def load_data(): if environ.get('REDIS_SERVER') is not None: redis_server = environ.get('REDIS_SERVER') else: redis_server = 'localhost' if environ.get('REDIS_PORT') is not None: redis_port = int(environ.get('REDIS_PORT')) else: redis_port = 6379 if environ.get('REDIS_PASSWORD') is not None: redis_password = environ.get('REDIS_PASSWORD') else: redis_password = '' rdb = redis.Redis(host=redis_server, port=redis_port, password=redis_password) rb = RedisBloom(host=redis_server, port=redis_port, password=redis_password) rts = RedisTimeseries(host=redis_server, port=redis_port, password=redis_password) with open('./users.csv', encoding='utf-8') as csv_file: csv_reader = csv.reader(csv_file, delimiter=',') line_count = 0 for row in csv_reader: if line_count > 0: rdb.hset("user:%s" % (row[0].replace(" ", '')), mapping={ 'Name': row[0], 'AgeDemo': row[1], 'IncomeDemo': row[2], 'Sex': row[3] }) rdb.lpush("USERLIST", row[0]) line_count += 1 with open('./campaigns.csv', encoding='utf-8') as csv_file: rts.create('TOTALREVENUE') csv_reader = csv.reader(csv_file, delimiter=',') line_count = 0 for row in csv_reader: if line_count > 0: rdb.zadd("campaign:%s" % (row[0].replace(" ", '')), {row[2]: row[1]}) rb.bfCreate(row[2], 0.01, 1000) rb.set("counter:%s" % (row[2].replace(" ", '')), row[3]) rts.create("ADVIEW:%s" % (row[2].replace(" ", ''))) rb.sadd("AdStats", row[2]) line_count += 1 for gear in ['./adgear.py', './adstats.py']: file = open(gear, mode='r') g = file.read() rdb.execute_command('RG.PYEXECUTE', g) file.close()
pool = redis.ConnectionPool(host=redis_host, port=redis_port) r = redis.Redis(connection_pool=pool) try: r.xadd("mystream", {'event_type': 'startup', 'user': '******'}) r.xgroup_create("mystream", "consumerGroup", '$') except: print("group already exists") while True: msgs = r.xreadgroup("consumerGroup", "consumerName", streams={"mystream": '>'}, count=10, block=1000, noack=False) for msg in msgs: for m in msg[1]: evnt = m[1]['event_type'] try: rts.info(evnt) except: rts.create(evnt, retentionSecs=60, labels={'event_type': evnt}) rts.create(evnt + "_minute", retentionSecs=0, labels={'event_type': evnt}) rts.createrule(evnt, evnt + "_minute", 'count', 60) rts.incrby(evnt, 1)
import pandas as pd from redistimeseries.client import Client from datetime import datetime key = 'test' rts = Client() rts.redis.delete(key) rts.create(key, labels={'source': key}) df = pd.read_csv('samples.csv') for _, row in df.iterrows(): d = datetime.strptime(row['ds'], '%Y-%m-%d') millisec = round(d.timestamp()*1000) rts.add(key, millisec, row['y'])
import json from user import return_data import time from datetime import datetime from redistimeseries.client import Client rts = Client(host='127.0.0.1', port=6379) company_symbol = "IBM" data = return_data("IBM") rts.create('DAILYOPEN:IBM', labels={ 'SYMBOL': 'IBM', 'DESC': 'OPEN', 'TIMEFRAME': '1_DAY', 'COMPANYNAME': 'IBM' }) rts.create('DAILYHIGH:IBM', labels={ 'SYMBOL': 'IBM', 'DESC': 'HIGH', 'TIMEFRAME': '1_DAY', 'COMPANYNAME': 'IBM' }) rts.create('DAILYLOW:IBM', labels={ 'SYMBOL': 'IBM', 'DESC': 'LOW', 'TIMEFRAME': '1_DAY',
from redistimeseries.client import Client from redis.exceptions import ResponseError from random import randint import argparse from time import time, sleep parser = argparse.ArgumentParser() parser.add_argument("key", help="Time Series Key", type=str) parser.add_argument("retention", help="Time series retention periods in ms", type=int) args = parser.parse_args() rts = Client(host='localhost', port=6379) try: rts.create(args.key, retention_msecs=args.retention) except ResponseError as e: s = str(e) print(s) pass if __name__ == "__main__": while (True): v = randint(0, 10) t = rts.add(args.key, value=v, timestamp=int(time())) print('INSERT: ({},{}) AT {}, SUCCESS'.format(args.key, v, t)) sleep(60)
import redis from redistimeseries.client import Client as RedisTimeSeries # Formulas to perform transforms on messages import transformation_list as transforms log = logging.getLogger() # log.setLevel("INFO") # Setup Redis and events of interest to track r = redis.Redis(host="localhost", port=6379, db=0) rts = RedisTimeSeries() if not r.exists("vehicle_speed"): rts.create("vehicle_speed", retention_msecs=300000, labels={"Time": "Series"}) if not r.exists("throttle_position"): rts.create("throttle_position", retention_msecs=300000, labels={"Time": "Series"}) if not r.exists("ambient_air_temperature"): rts.create("ambient_air_temperature", retention_msecs=300000, labels={"Time": "Series"}) # Track all messages on extract queue to only send every 1 second msg_time_last_sent = {} def ts_val(rts, key, from_time, to_time, agg):
temperature_value, temperature_time = zip( *[[x.value, parse_time(x.time)] for x in temperature]) return temperature_time, temperature_value #Initialize Redis Client try: #rts = Client(host=os.environ.get('REDIS_HOST'),port=os.environ.get('REDIS_PORT')) rts = Client(host='localhost', port=6379) except: logging.warning('Could not connect to redis server') key = 'Temperature' # Create a key if it doesnt exists try: rts.create(key, retention_msecs=DAY) except ResponseError as e: s = str(e) print(s) pass while (True): try: temperature_time, temperature_value = pull() except ResponseError: logging.warning('Failed to Pull data') out = [(key, time, value) for time, value in zip(temperature_time, temperature_value)] t = rts.madd(out) logging.warning('key inserted with last timestamp: {0}'.format(t[-1])) sleep(60 * 2)
from redistimeseries.client import Client rts = Client(host='127.0.0.1', port=6379) # Create Time Series and the aggregated time series rts.create('DAILYRSI:GS', labels={ 'SYMBOL': 'GS', 'DESC': 'RELATIVE_STRENGTH_INDEX', 'INDEX': 'DJIA', 'TIMEFRAME': '1_DAY', 'INDICATOR': 'RSI', 'COMPANYNAME': 'GOLDMAN_SACHS_GROUP' }) rts.create('INTRADAYPRICES:GS', labels={ 'SYMBOL': 'GS', 'DESC': 'SHARE_PRICE', 'INDEX': 'DJIA', 'PRICETYPE':'INTRADAY', 'COMPANYNAME': 'GOLDMAN_SACHS_GROUP' }) rts.create('DAILYRSI15MINRNG:GS', labels={ 'SYMBOL': 'GS', 'DESC': 'RELATIVE_STRENGTH_INDEX', 'INDEX': 'DJIA', 'TIMEFRAME': '15_MINUTES', 'AGGREGATION': 'RANGE', 'INDICATOR': 'RSI',
class RedisTimeSeriesCommon(object): """ Wrapper class for accessing RedisTimeSeries. """ def __init__( self, config: ConfigContextCommon, name: str = "", thread: int = 0, transitionms: int = 0, retentionms: int = 0, ) -> None: """ Args: config: A config object. """ logname = Path(__file__).stem self._logger = logging.getLogger(f"{config.PACKAGE_NAME}.{logname}") redis_host = os.environ.get("REDISHOST", "localhost") redis_port = int(os.environ.get("REDISPORT", 6379)) self._rts = RedisTimeSeries(host=redis_host, port=redis_port) self._name = name or getattr(config, "name", "A") self._thread = thread or getattr(config, "thread", 0) self._transitionms = transitionms or getattr(config, "transitionms", 100) self._retentionms = retentionms or getattr(config, "retentionms", 7 * 24 * 60 * 60 * 1000) self._previous_value = 0 def create( self, name: str = "", thread: int = 0, transitionms: int = 0, retentionms: int = 0, ) -> None: if name: self._name = name if thread: self._thread = thread if transitionms: self._transitionms = transitionms if retentionms: self._retentionms = retentionms key = f"ts:{self._name}.T:{self._thread:03d}" labeld = {"ts": self._name, "T": self._thread} self._rts.create(key, retention_msecs=self._retentionms, labels=labeld) def delete(self, name: str = "", thread: int = 0) -> None: key = f"ts:{name or self._name}.T:{thread or self._thread:03d}" self._rts.delete(key) # slots are created dynamically and every now and then we want to delete def delete_slot(self, name: str = "", slot: int = 0) -> None: key = f"ts:{name or self._name}.S:{slot:03d}" self._rts.delete(key) def _add_value( self, key: str, timestampms: Union[int, str], value: int, labeld: Mapping[str, Any], ) -> int: i = 0 while True: try: timestampms_return = self._rts.add( key, timestampms, value, retention_msecs=self._retentionms, labels=labeld, ) return timestampms_return # type: ignore except ResponseError: # too quick, delay a bit if using server timestamp if i < 5 and timestampms == "*": i += 1 time.sleep(0.001) else: raise def add_value(self, value: int = 0, name: str = "", thread: int = 0) -> int: key = f"ts:{name or self._name}.T:{thread or self._thread:03d}" labeld = {"ts": name or self._name, "T": thread or self._thread} if self._transitionms and value != self._previous_value: timestampms_return = self._add_value(key, "*", self._previous_value, labeld) time.sleep(self._transitionms / 1000) self._add_value(key, "*", value, labeld) self._previous_value = value return timestampms_return else: return self._add_value(key, "*", value, labeld) def add_slot_values(self, values: Sequence[int] = [], name: str = "") -> int: if not values: values = [0] keybase = f"ts:{name or self._name}.S:" labeld = {"ts": name or self._name, "S": 0} timestampms = self._add_value(f"{keybase}000", "*", values[0], labeld) for i, value in enumerate(values[1:]): j = i + 1 labeld["S"] = j self._add_value(f"{keybase}{j:03d}", timestampms, value, labeld) return timestampms def get_keytuples_by_names( self, names: Sequence[str] = [], types: Sequence[str] = ["T"]) -> List[Tuple[str, int]]: namelist = (",").join(names or [self._name]) filters = [f"ts=({namelist})"] keys = self._rts.queryindex(filters) keytuples = [] for key in keys: eles = key.split(".") _, name = eles[0].split(":") # ("ts", <name>) mytype, value = eles[1].split(":") # ("T" or "S", <str number>) keytuple = (name, int(value)) # (<name>, <int>) if mytype in types: keytuples.append(keytuple) return keytuples def get_threads_by_name(self, name: str = "") -> Tuple[int, ...]: keytuples = self.get_keytuples_by_names([name or self._name], types=["T"]) names, threads = zip(*keytuples) return threads # discard names def get_slots_by_name(self, name: str = "") -> Tuple[int, ...]: keytuples = self.get_keytuples_by_names([name or self._name], types=["S"]) names, slots = zip(*keytuples) return slots # discard names def _get_dataframe(self, key: str, timestampms: int) -> pd.DataFrame: datapointts = self._rts.range(key, timestampms, -1) if not datapointts: return pd.DataFrame() dts, values = zip(*datapointts) datapointdf = pd.DataFrame({ "dt": dts, key: [float(v) for v in values] }) datapointdf["dt"] = pd.to_datetime(datapointdf.dt, unit="ms") return datapointdf.set_index("dt") def get_dataframe(self, name: str = "", thread: int = 0, timestampms: int = 0) -> pd.DataFrame: key = f"ts:{name or self._name}.T:{thread or self._thread:03d}" return self._get_dataframe(key, timestampms) def get_slot_dataframe(self, name: str = "", slot: int = 0, timestampms: int = 0) -> pd.DataFrame: key = f"ts:{name or self._name}.S:{slot:03d}" return self._get_dataframe(key, timestampms)
class Patient: def __init__(self, name): self.name = name self.id = "autogenerateID" self.rts = Client() self.delete_previous() self.create_all_series() def delete_previous(self): """delete any previously same name timeseries.""" self.rts.delete("blood_min") self.rts.delete("blood_max") self.rts.delete("sp02") self.rts.delete("pulse") print("All previous timeseries deleted") def create_all_series(self): """Create all the time series that will contain all of the patients' data """ self.rts.create('blood_min', labels={'Time': 'Series'}) self.rts.create('blood_max', labels={'Time': 'Series'}) self.rts.create('sp02', labels={'Time': 'Series'}) self.rts.create('pulse', labels={'Time': 'Series'}) print("Timeseries created") def add_blood_samples(self, blood_reading): """Convert a standard blood reading into a BloodPressureReading object and add it to both timeseries. """ blood_reading = BloodPressureReading(blood_reading[0], blood_reading[1]) self.rts.add("blood_min", blood_reading.time, blood_reading.min_bp) # print(rts.get("blood_min")) self.rts.add("blood_max", blood_reading.time, blood_reading.max_bp) # print(rts.get("blood_max")) def add_sp02_samples(self, sp02_reading): """Convert a standard SPO2 reading into a SP02Reading and add to its timeseries""" sp02_reading = SP02Reading(sp02_reading) self.rts.add("sp02", sp02_reading.time, sp02_reading.oxygen) def add_pulse_samples(self, pulse_reading): """Convert a standard pulse reading into a PulseReading and add to its timeseries""" pulse_reading = PulseReading(pulse_reading) self.rts.add("pulse", pulse_reading.time, pulse_reading.heartbeat) def add_temp_samples(self, temp_reading): """Convert a standard temperature (Celsius) into a TemperatureReading and add it to its timeseries""" temp_reading = TempReading(temp_reading) self.rts.add("temp", temp_reading.time, temp_reading.temp) def receive_vitals(self, bp_reading, sp_reading, pulse_reading, temp_reading): """Receive vitals from a push operation and add to the timeseries""" self.add_blood_samples(bp_reading) self.add_sp02_samples(sp_reading) self.add_pulse_samples(pulse_reading) self.add_temp_samples(temp_reading) print("vitals updated") #p = Patient("Boris") #p.receive_vitals([120, 60], 98, 100) # time.sleep(2) # p.receive_vitals([130, 90], 80, 170) #from here on, experiments # def get_all_ts(timeseries_name): # full_timeseries = rts.getrange(timeseries_name, 0, -1) # return full_timeseries # # print(get_all_ts("pulse")) # pulse_sample_boris= pulseReading(98) # r.hset("boris2", rts.create("borisblood", pulse_sample_boris.time, pulse_sample_boris.heartbeat)) # print(r.hget("boris2"))