def get(self, request, tickers): end = dt.datetime.now().date() if (tickers == 'all'): his_data = get_basic_hist_data('2008-01-01', end) else: his_data = get_basic_hist_data('2008-01-01', end, code=str(tickers)) his_data = his_data.to_csv(sep='\t', index=False, compression='gzip') return Response(his_data)
def get(self, request, tickers): end = dt.datetime.now().date() if (tickers == 'all'): his_data = get_basic_hist_data('2008-01-01', end) else: his_data = get_basic_hist_data('2008-01-01', end, code=str(tickers)) his_data = his_data.to_json(orient='records', compression='gzip') return Response(ast.literal_eval(his_data))
def get(self, request, tickers): end = dt.datetime.now().date() if (tickers == 'all'): his_data = get_basic_hist_data('2008-01-01', end) else: his_data = get_basic_hist_data('2008-01-01', end, code=str(tickers)) his_data = his_data.to_numpy() return HttpResponse(his_data)
def index(): df = get_basic_hist_data('2019-03-01', '2020-03-01', 'ACI') graphs = [ dict(data=[ dict(x=df['date'], open=df['open'], high=df['high'], low=df['low'], close=df['close'], type='candlestick'), ], layout=dict(title='first graph')), dict(data=[ dict(x=df['date'], y=df['close'], mode='lines+markers', type='scatter'), ], layout=dict(title='second graph')) ] # Add "ids" to each of the graphs to pass up to the client # for templating ids = ['graph-{}'.format(i + 1) for i, _ in enumerate(graphs)] # Convert the figures to JSON # PlotlyJSONEncoder appropriately converts pandas, datetime, etc # objects to their JSON equivalents graphJSON = json.dumps(graphs, cls=plotly.utils.PlotlyJSONEncoder) return render_template('index.html', ids=ids, graphJSON=graphJSON)
def _init_coll(self): """ Get all the history data when initiate the library. 1. Connect to arctic and create the library. 2. Get all the history data from tushare and strip the unused columns. 3. Store the data to arctic. :return: None """ # if collection is not initialized if self._coll_name not in self._library.list_symbols(): self._new_added_colls.append(self._coll_name) #end = dt.datetime.now().strftime('%Y-%m-%d') end = dt.datetime.now().date() #start = end - dt.timedelta(days=2*360) his_data = bds.get_basic_hist_data('2008-01-01', end, code=self._coll_name, index='date').sort_index() if len(his_data) == 0: logger.warning( f'data of stock {self._coll_name} when initiation is empty' ) return #his_data = bdu.Utils.strip_unused_cols(his_data, *self._unused_cols) logger.debug(f'write history data for stock: {self._coll_name}.') self._library.write(self._coll_name, his_data)
def get_dse_data(ticker, cache_path): ''' Download data from dse, or retrieve from the cache_path if it exists :param ticker: The stock ticker :param cache_path: The cache path :return: The dse data ''' if os.path.exists(cache_path): f = open(cache_path, 'rb') data = pickle.load(f) print('Loaded {} data from cache'.format(ticker)) else: #end = dt.datetime.now().strftime('%Y-%m-%d') end = dt.datetime.now().date() #start = end - dt.timedelta(days=2*360) data = get_basic_hist_data('2008-01-01', end, ticker) # reverse order so newest is at end of list #data = data[::-1] if not cache_path == None: with open(cache_path, 'wb') as f: data.to_pickle(f) print('Cached {} data at {}'.format(ticker, cache_path)) return data
def download_delta_data(self): """ Get yesterday's data and append it to collection, this method is planned to be executed at each day's 8:30am to update the data. 1. Connect to arctic and get the library. 2. Get today's history data from bdshare and strip the unused columns. 3. Store the data to arctic. :return: None """ self._init_coll() if self._coll_name in self._new_added_colls: return # 15:00 PM can get today data # start = latest_date + 1 day latest_date = self.get_data().index[-1] start = latest_date + dt.timedelta(days=1) start = dt.datetime.strftime(start, '%Y-%m-%d') his_data = bds.get_basic_hist_data( start=start, end=start, code=self._coll_name ) # delta data is empty if len(his_data) == 0: logger.info( f'delta data of stock {self._coll_name} is empty, after {start}') return his_data = bdu.Utils.strip_unused_cols(his_data, *self._unused_cols) logger.info(f'got delta data of stock: {self._coll_name}, after {start}') self._library.append(self._coll_name, his_data)
def test_get_basic_hist_data(self): end = dt.datetime.now().date() df = get_basic_hist_data('2020-01-01', end,'BATBC') print(df.to_string()) print(df.dtypes)