def predict(host,port,t0): connection=Client(host=host,port=port) keys=['yhat','yhat_upper','yhat_lower','trend','trend_upper','trend_lower','daily','daily_lower','daily_upper'] #flush old predictions for k in keys: try: connection.delete(k) except: logging.warn('Error Deleting keys') raise pass # create keys for storing the result for k in keys: try: connection.create(k,retention_msecs=7*DAY) except: pass # read data from timeseries structure data=connection.range("Temperature", from_time=0, to_time=-1, bucket_size_msec=60*5, # In Seconds NOT milliseconds aggregation_type='avg') # clean the data for the algorithm to run time,value=zip(*data) time=[datetime.fromtimestamp(x) for x in time] df=pd.DataFrame(dict(ds=time,y=value)) m = Prophet(changepoint_prior_scale=0.02,interval_width=.95).fit(df) future = m.make_future_dataframe(periods=48, freq='H',include_history = True) fcst = m.predict(future) fcst=fcst.set_index('ds') # send data to redistimeseries struct # yhat_upper=fcst['yhat_upper'].values() # yhat_lower=fcst['yhat_lower'].values() def send(key): time=fcst.index.values time=[int(x.astype('uint64') / 1e9) for x in time] yhat=fcst[key].values yhat=[x.astype(float) for x in yhat] out=[(key,time,value) for time,value in zip(time,yhat)] connection.madd(out) [send(k) for k in keys]
class RedisTimeSeriesCommon(object): """ Wrapper class for accessing RedisTimeSeries. """ def __init__( self, config: ConfigContextCommon, name: str = "", thread: int = 0, transitionms: int = 0, retentionms: int = 0, ) -> None: """ Args: config: A config object. """ logname = Path(__file__).stem self._logger = logging.getLogger(f"{config.PACKAGE_NAME}.{logname}") redis_host = os.environ.get("REDISHOST", "localhost") redis_port = int(os.environ.get("REDISPORT", 6379)) self._rts = RedisTimeSeries(host=redis_host, port=redis_port) self._name = name or getattr(config, "name", "A") self._thread = thread or getattr(config, "thread", 0) self._transitionms = transitionms or getattr(config, "transitionms", 100) self._retentionms = retentionms or getattr(config, "retentionms", 7 * 24 * 60 * 60 * 1000) self._previous_value = 0 def create( self, name: str = "", thread: int = 0, transitionms: int = 0, retentionms: int = 0, ) -> None: if name: self._name = name if thread: self._thread = thread if transitionms: self._transitionms = transitionms if retentionms: self._retentionms = retentionms key = f"ts:{self._name}.T:{self._thread:03d}" labeld = {"ts": self._name, "T": self._thread} self._rts.create(key, retention_msecs=self._retentionms, labels=labeld) def delete(self, name: str = "", thread: int = 0) -> None: key = f"ts:{name or self._name}.T:{thread or self._thread:03d}" self._rts.delete(key) # slots are created dynamically and every now and then we want to delete def delete_slot(self, name: str = "", slot: int = 0) -> None: key = f"ts:{name or self._name}.S:{slot:03d}" self._rts.delete(key) def _add_value( self, key: str, timestampms: Union[int, str], value: int, labeld: Mapping[str, Any], ) -> int: i = 0 while True: try: timestampms_return = self._rts.add( key, timestampms, value, retention_msecs=self._retentionms, labels=labeld, ) return timestampms_return # type: ignore except ResponseError: # too quick, delay a bit if using server timestamp if i < 5 and timestampms == "*": i += 1 time.sleep(0.001) else: raise def add_value(self, value: int = 0, name: str = "", thread: int = 0) -> int: key = f"ts:{name or self._name}.T:{thread or self._thread:03d}" labeld = {"ts": name or self._name, "T": thread or self._thread} if self._transitionms and value != self._previous_value: timestampms_return = self._add_value(key, "*", self._previous_value, labeld) time.sleep(self._transitionms / 1000) self._add_value(key, "*", value, labeld) self._previous_value = value return timestampms_return else: return self._add_value(key, "*", value, labeld) def add_slot_values(self, values: Sequence[int] = [], name: str = "") -> int: if not values: values = [0] keybase = f"ts:{name or self._name}.S:" labeld = {"ts": name or self._name, "S": 0} timestampms = self._add_value(f"{keybase}000", "*", values[0], labeld) for i, value in enumerate(values[1:]): j = i + 1 labeld["S"] = j self._add_value(f"{keybase}{j:03d}", timestampms, value, labeld) return timestampms def get_keytuples_by_names( self, names: Sequence[str] = [], types: Sequence[str] = ["T"]) -> List[Tuple[str, int]]: namelist = (",").join(names or [self._name]) filters = [f"ts=({namelist})"] keys = self._rts.queryindex(filters) keytuples = [] for key in keys: eles = key.split(".") _, name = eles[0].split(":") # ("ts", <name>) mytype, value = eles[1].split(":") # ("T" or "S", <str number>) keytuple = (name, int(value)) # (<name>, <int>) if mytype in types: keytuples.append(keytuple) return keytuples def get_threads_by_name(self, name: str = "") -> Tuple[int, ...]: keytuples = self.get_keytuples_by_names([name or self._name], types=["T"]) names, threads = zip(*keytuples) return threads # discard names def get_slots_by_name(self, name: str = "") -> Tuple[int, ...]: keytuples = self.get_keytuples_by_names([name or self._name], types=["S"]) names, slots = zip(*keytuples) return slots # discard names def _get_dataframe(self, key: str, timestampms: int) -> pd.DataFrame: datapointts = self._rts.range(key, timestampms, -1) if not datapointts: return pd.DataFrame() dts, values = zip(*datapointts) datapointdf = pd.DataFrame({ "dt": dts, key: [float(v) for v in values] }) datapointdf["dt"] = pd.to_datetime(datapointdf.dt, unit="ms") return datapointdf.set_index("dt") def get_dataframe(self, name: str = "", thread: int = 0, timestampms: int = 0) -> pd.DataFrame: key = f"ts:{name or self._name}.T:{thread or self._thread:03d}" return self._get_dataframe(key, timestampms) def get_slot_dataframe(self, name: str = "", slot: int = 0, timestampms: int = 0) -> pd.DataFrame: key = f"ts:{name or self._name}.S:{slot:03d}" return self._get_dataframe(key, timestampms)
'INDEX': 'DJIA', 'PRICETYPE':'RANGE', 'AGGREGATION': 'RANGE', 'DURATION': '15_MINUTES', 'COMPANYNAME': 'GOLDMAN_SACHS_GROUP' }) rts.createrule('INTRADAYPRICES:GS','INTRADAYPRICES15MINRNG:GS','range',900) rts.create('INTRADAYPRICES15MINSTDP:GS', labels={ 'SYMBOL': 'GS', 'DESC': 'SHARE_PRICE', 'INDEX': 'DJIA', 'PRICETYPE':'STDDEV', 'AGGREGATION': 'STDDEV', 'DURATION': '15_MINUTES', 'COMPANYNAME': 'GOLDMAN_SACHS_GROUP' }) rts.createrule('INTRADAYPRICES:GS','INTRADAYPRICES15MINSTDP:GS','std.p',900) # Populate Data rts.madd(<RSIIndicatorList>) # Querying data rts.range( 'INTRADAYPRICES15MINRNG:GS' , from_time = 1603704600 , to_time = 1603713600) # Start is 9:30 a.m. on October 26, 2020 ET allRSIValues = rts.mget(filters=['DESC=RELATIVE_STRENGTH_INDEX','TIMEFRAME=1_DAY'], with_labels=False)
# from numpy import array,dot # from sklearn.linear_model import LinearRegression from time import time from redistimeseries.client import Client import matplotlib.pyplot as plt from datetime import datetime import pandas as pd from fbprophet import Prophet MSEC = 1 SEC = 1000 * MSEC MINUTE = 60 * SEC rts = Client(host='localhost', port=6379) # Grab the time series data = rts.range( "Temperature", from_time=0, to_time=-1, bucket_size_msec=60 * 5, # In Seconds NOT milliseconds aggregation_type='avg') time, value = res = zip(*data) time = [datetime.fromtimestamp(x) for x in time] df = pd.DataFrame(dict(ds=time, y=value)) m = Prophet(changepoint_prior_scale=0.02, interval_width=.95).fit(df) future = m.make_future_dataframe(periods=48, freq='H') fcst = m.predict(future) fcst = fcst.set_index('ds') fcst.to_csv('forecast.csv') ax = fcst[['yhat', 'yhat_upper', 'yhat_lower']]['2020-2-1':'2020-2-4'].plot() df.set_index('ds')['2020-2-1':'2020-2-4'].plot(ax=ax) plt.savefig('output.png', dpi=120)
from datetime import datetime from iexfinance.stocks import get_historical_data import pandas as pd ## ## Connect to Redis TimeSeries ## rts = Client(host='127.0.0.1', port=6379) ## ## Query the Goldman Sachs range for RSI values ## 15-minute window ## "from_time = 0" indicates from the beginning ## "to_time = -1" indicates until the last value in the time series. ## dailyRSI15MinRange = rts.range('DAILYRSI15MINRNG:GS', from_time=0, to_time=-1) ##dailyRSI15MinRange = rts.get('DAILYRSI15MINRNG:GS') print('****************GS RSI RANGE**************************************') print(dailyRSI15MinRange) print('****************GS RSI RANGE**************************************') ## ## Query the TimeSeries for Standard Deviation values for Goldman Sachs ## for each 15-minute window. ## dailyGS15MinStdP = rts.range('INTRADAYPRICES15MINSTDP:GS', from_time=0, to_time=-1) ##dailyRSI15MinRange = rts.get('DAILYRSI15MINRNG:GS')
import json from user import data import time from datetime import datetime from redistimeseries.client import Client rts = Client(host='127.0.0.1', port=6379) dailyRange = rts.range('DAILYOPEN:IBM', from_time=0, to_time=1611014400000) print('****************IBM RANGE**************************************') print(dailyRange) print('****************IBM RANGE**************************************')