def getseries(q): pkl = "observation_" + q["route"] + "_" + q["link"] + "_" + q["direction"] ts = mem.get(pkl) # existising query stored if ts: print("cached trips") else: result = db.observation.find(q).sort("_id", -1) data = [] dates = [] for res in result: d = res["item"] for res2 in d: data.append(res2["stt"]) dates.append(res2['date']) ts = TimeSeries(data,dates) #put_pickle(pkl, ts) result = ts.resample('10min', how="mean",convention='end',fill_method='pad') r3 = (result["2014-01-06":"2014-04-13"].dropna().values) t3 = (result["2014-01-06":"2014-04-13"].dropna().index.values) r2 = (result["2013-05-01":"2013-08-04"].dropna().values) t2 = (result["2013-05-01":"2013-08-04"].dropna().index.values) r1 = (result["2012-09-03":"2012-10-21"].dropna().values) t1 = (result["2012-09-03":"2012-10-21"].dropna().index.values) stt = np.append(r1, np.append(r2,r3)) idx = np.append(t1, np.append(t2,t3)) ts = TimeSeries(stt,idx) return ts
def getseriesweather(attr,bool,location): result = db.weather.find({"location":location}).sort("_id", -1) weather = [] dates = [] for res in (result): for res2 in (res['item']): if set((attr,'Time')).issubset(res2.keys()): weather.append(float(res2[attr])) dates.append(datetime.strptime(res2['Time'],'%Y-%m-%d %H:%M:%S')) ts = TimeSeries(weather,dates) result = ts.resample('10min', how=bool,convention='end',fill_method="pad") return result
def getseries(q): pkl = "observation_" + q["route"] + "_" + q["link"] + "_" + q["direction"] result = db.observation.find(q).sort("_id", -1) data = [] dates = [] for res in result: d = res["item"] for res2 in d: data.append(res2["stt"]) dates.append(res2['date']) ts = TimeSeries(data,dates) result = ts.resample('10min', how="mean",convention='end',fill_method="pad") return result
def getseriesweather(): for ws in stations(): result = db.weather.find({"location":ws}).sort("_id", -1) dailyrainMM = [] TemperatureC = [] dates = [] print(">>>> location >>> ",ws) for res in (result): for res2 in (res['item']): dailyrainMM.append(float(res2["dailyrainMM"])) TemperatureC.append(float(res2["TemperatureC"])) dates.append(datetime.strptime(res2['Time'],'%Y-%m-%d %H:%M:%S')) ts = TimeSeries(dailyrainMM,dates) ts2 = TimeSeries(TemperatureC,dates) result = ts.resample('10min', how="mean",convention='end',fill_method="pad") result2 = ts2.resample('10min', how="mean",convention='end',fill_method="pad") weatherstore[ws] = {"dailyrainMM":result,"TemperatureC":result2} return weatherstore
def getseries(q): pkl = "observation_" + q["route"] + "_" + q["link"] + "_" + q["direction"] print(q) result = db.observation.find(q).sort("_id", -1) data = [] dates = [] for res in result: d = res["item"] for res2 in d: data.append(res2["stt"]) dates.append(res2['date']) ts = TimeSeries(data,dates) result = ts.resample('10min', how="mean",convention='end',fill_method='pad') stt = (result["2014-01-13":"2014-04-21"].dropna().values) idx = (result["2014-01-13":"2014-04-21"].dropna().index.values) ts = TimeSeries(stt,idx) return ts
def getseries(q): pkl = "observation_" + q["route"] + "_" + q["link"] + "_" + q["direction"] ts = mem.get(pkl) # existising query stored if ts: print("cached trips") else: result = db.observation.find(q).sort("_id", -1) data = [] dates = [] for res in result: d = res["item"] for res2 in d: data.append(res2["stt"]) dates.append(res2['date']) ts = TimeSeries(data,dates) #put_pickle(pkl, ts) result = ts.resample('10min', how="mean",convention='end',fill_method='pad') return result
@author: declan ''' import csv, os, re import pymongo from pymongo import Connection as mongoConn from datetime import datetime from bson import json_util from pandas.core.series import TimeSeries from pandas import DataFrame as df connection_local = mongoConn('mongodb://localhost:27017/') db_local = connection_local.traffic rawtweets = db_local.twitter_mapped #cnn_remote = db_remote.twitter_streaming.find().skip(0).limit(2000) dates=[] values=[] result={} tweetdata = rawtweets.find() for tweetlist in tweetdata: d = datetime.strptime(tweetlist["_id"], '%Y/%m/%d/%H') result[tweetlist["_id"]] = tweetlist["item"]; dates.append(d) values.append(len(tweetlist["item"])) #print(d, len(tweetlist["item"])) ts = TimeSeries(dates,values) print(ts.head(100))
self.last_price = price if __name__ == '__main__': start = datetime(2008, 1, 1, 0, 0, 0, 0, pytz.utc) end = datetime(2013, 1, 1, 0, 0, 0, 0, pytz.utc) data = load_from_yahoo(stocks=[SYMBOL], indexes={}, start=start, end=end, adjusted=True) simple_algo = BuyStock() results = simple_algo.run(data) ax1 = plt.subplot(211) ax2 = plt.subplot(212) TRACK_STRIPPED = [x for x in TRACK if type(x) == tuple] futures_indexes = [timestamp for (_, _, timestamp) in TRACK_STRIPPED] futures_quantity_data = [ quantity_owned for (_, quantity_owned, _) in TRACK_STRIPPED ] futures_margin_data = [margin for (margin, _, _) in TRACK_STRIPPED] futures_margin_series = TimeSeries(index=futures_indexes, data=futures_margin_data) futures_margin_series.plot(ax=ax1) futures_quantity_series = TimeSeries(index=futures_indexes, data=futures_quantity_data) futures_quantity_series.plot(ax=ax2) plt.gcf().set_size_inches(18, 8)
TRACK.append("0 to buy, what a shame") else: self.order(SYMBOL, quantity_to_buy) # order SID (=0) and amount (=1 shares) if quantity_to_buy == 0: TRACK.append(str(timestamp) + " had a 0-sell!") self.last_price = price if __name__ == '__main__': start = datetime(2008, 1, 1, 0, 0, 0, 0, pytz.utc) end = datetime(2013, 1, 1, 0, 0, 0, 0, pytz.utc) data = load_from_yahoo(stocks=[SYMBOL], indexes={}, start=start, end=end, adjusted=True) simple_algo = BuyStock() results = simple_algo.run(data) ax1 = plt.subplot(211) ax2 = plt.subplot(212) TRACK_STRIPPED = [x for x in TRACK if type(x) == tuple] futures_indexes = [timestamp for (_, _, timestamp) in TRACK_STRIPPED] futures_quantity_data = [quantity_owned for (_, quantity_owned, _) in TRACK_STRIPPED] futures_margin_data = [margin for (margin, _, _) in TRACK_STRIPPED] futures_margin_series = TimeSeries(index=futures_indexes, data=futures_margin_data) futures_margin_series.plot(ax=ax1) futures_quantity_series = TimeSeries(index=futures_indexes, data=futures_quantity_data) futures_quantity_series.plot(ax=ax2) plt.gcf().set_size_inches(18, 8)
def margin_account_log(self): return TimeSeries(self._margin_account_log)
class BuyGoogleAsFuture(FuturesTradingAlgorithm): def initialize_futures(self, *args, **kwargs): pass def handle_futures_data(self, data): self.order("GOOG", 1, initial_margin=data['GOOG']['initial_margin']) def _handle_margin_call(self): self._liquidate_random_positions() if __name__ == '__main__': start = datetime(2008, 1, 1, 0, 0, 0, 0, pytz.utc) end = datetime(2013, 1, 1, 0, 0, 0, 0, pytz.utc) data = load_from_yahoo(stocks=["GOOG"], indexes={}, start=start, end=end, adjusted=True) simple_algo = BuyGoogleAsFuture() results = simple_algo.run(data) ax1 = plt.subplot(211) futures_indexes = list(simple_algo.margin_account_log.keys()) futures_margin_data = list(simple_algo.margin_account_log.values) futures_margin_series = TimeSeries(index=futures_indexes, data=futures_margin_data) futures_margin_series.plot(ax=ax1) ax2 = plt.subplot(212, sharex=ax1) data.GOOG.plot(ax=ax2) plt.gcf().set_size_inches(18, 8)