def store_ETF_data(self, tickers) : settings = Settings() db = settings.get_mongo_db('Quandl', local=True) coll = db['ETF'] for t in tickers: # find the ticker dates = coll.find_one({'name':t}, sort=[('Date', -1)]) if (dates is None): print 'max_date is none' mdate = None else : mdate = pd.to_datetime(dates['Date']*1000*1000).strftime(format='%Y-%m-%d') print 'downloadng {t}'.format(t=t) df = Quandl.get(t, trim_start=mdate) df['name'] = t df = df.reset_index() if mdate is None : df_a = df else : df_a = df[df['Date'] > mdate] if len(df_a) == 0: print "data for {ticker} is already updated till {date}".format(ticker=t, date=mdate) continue records = json.loads(df_a.T.to_json()).values() #print records print "uploading {t}".format(t=t) coll.insert_many(records)
def store_ETF_data(self, tickers): settings = Settings() db = settings.get_mongo_db('Quandl', local=True) coll = db['ETF'] for t in tickers: # find the ticker dates = coll.find_one({'name': t}, sort=[('Date', -1)]) if (dates is None): print 'max_date is none' mdate = None else: mdate = pd.to_datetime(dates['Date'] * 1000 * 1000).strftime(format='%Y-%m-%d') print 'downloadng {t}'.format(t=t) df = Quandl.get(t, trim_start=mdate) df['name'] = t df = df.reset_index() if mdate is None: df_a = df else: df_a = df[df['Date'] > mdate] if len(df_a) == 0: print "data for {ticker} is already updated till {date}".format( ticker=t, date=mdate) continue records = json.loads(df_a.T.to_json()).values() #print records print "uploading {t}".format(t=t) coll.insert_many(records)
def store_Stock_data(self, tickers): settings = Settings() db = settings.get_mongo_db('Quandl', local=True) coll = db['Stock'] for t in tickers: # find the ticker dates = coll.find_one({'name': t}, sort=[('Date', -1)]) if (dates is None): print 'max_date is none' mdate = None else: mdate = pd.to_datetime(dates['Date'] * 1000 * 1000).strftime(format='%Y-%m-%d') print 'downloadng {t}'.format(t=t) try: df = Quandl.get(t, trim_start=mdate) df = df.reset_index() except: print 'cannot download {n}'.format(n=t) continue if mdate is None: df_a = df else: df_a = df[df['Date'] > mdate] if len(df_a) == 0: print "data for {ticker} is already updated till {date}".format( ticker=t, date=mdate) continue df_adj = pd.DataFrame() df_adj['Date'] = df_a['Date'] df_adj['Open'] = df_a['Adj. Open'] df_adj['High'] = df_a['Adj. High'] df_adj['Low'] = df_a['Adj. Low'] df_adj['Close'] = df_a['Adj. Close'] df_adj['Volume'] = df_a['Adj. Volume'] df_adj['name'] = t records = json.loads(df_adj.T.to_json()).values() # #print records print "uploading {t}".format(t=t) coll.insert_many(records)
def store_Stock_data(self, tickers): settings = Settings() db = settings.get_mongo_db('Quandl', local=True) coll = db['Stock'] for t in tickers: # find the ticker dates = coll.find_one({'name':t}, sort=[('Date', -1)]) if (dates is None): print 'max_date is none' mdate = None else : mdate = pd.to_datetime(dates['Date']*1000*1000).strftime(format='%Y-%m-%d') print 'downloadng {t}'.format(t=t) try: df = Quandl.get(t, trim_start=mdate) df = df.reset_index() except: print 'cannot download {n}'.format(n=t) continue if mdate is None : df_a = df else : df_a = df[df['Date'] > mdate] if len(df_a) == 0: print "data for {ticker} is already updated till {date}".format(ticker=t, date=mdate) continue df_adj = pd.DataFrame() df_adj['Date'] = df_a['Date'] df_adj['Open'] = df_a['Adj. Open'] df_adj['High'] = df_a['Adj. High'] df_adj['Low'] = df_a['Adj. Low'] df_adj['Close'] = df_a['Adj. Close'] df_adj['Volume'] = df_a['Adj. Volume'] df_adj['name'] = t records = json.loads(df_adj.T.to_json()).values() # #print records print "uploading {t}".format(t=t) coll.insert_many(records)
def store_Fred_data(self, tickers): settings = Settings() db = settings.get_mongo_db(local=True) coll = db['Fred'] for t in tickers: # find the ticker dates = coll.find_one({'name': t}, sort=[('Date', -1)]) if (dates is None): print 'max_date is none' mdate = None else: mdate = pd.to_datetime(dates['Date'] * 1000 * 1000).strftime(format='%Y-%m-%d') print 'downloadng {t}'.format(t=t) df = Quandl.get(t, trim_start=mdate, authtoken='AuFngLLqDpLf672K9W85') df = df.reset_index() if mdate is None: df_a = df else: df_a = df[df['Date'] > mdate] if len(df_a) == 0: print "data for {ticker} is already updated till {date}".format( ticker=t, date=mdate) continue df_adj = pd.DataFrame() df_adj['Date'] = df_a['DATE'] df_adj['Value'] = df_a['VALUE'] df_adj['name'] = t records = json.loads(df_adj.T.to_json()).values() # #print records print "uploading {t}".format(t=t) coll.insert_many(records)
def store_Fred_data(self, tickers): settings = Settings() db = settings.get_mongo_db(local=True) coll = db['Fred'] for t in tickers: # find the ticker dates = coll.find_one({'name':t}, sort=[('Date', -1)]) if (dates is None): print 'max_date is none' mdate = None else : mdate = pd.to_datetime(dates['Date']*1000*1000).strftime(format='%Y-%m-%d') print 'downloadng {t}'.format(t=t) df = Quandl.get(t, trim_start=mdate, authtoken='AuFngLLqDpLf672K9W85') df = df.reset_index() if mdate is None : df_a = df else : df_a = df[df['Date'] > mdate] if len(df_a) == 0: print "data for {ticker} is already updated till {date}".format(ticker=t, date=mdate) continue df_adj = pd.DataFrame() df_adj['Date'] = df_a['DATE'] df_adj['Value'] = df_a['VALUE'] df_adj['name'] = t records = json.loads(df_adj.T.to_json()).values() # #print records print "uploading {t}".format(t=t) coll.insert_many(records)