def oanda_historical_data(instrument, start_date, end_date, granularity='M1', client=None): params = { "from": start_date, "to": end_date, "granularity": granularity, "count": 2500, } df_full = pd.DataFrame() for r in InstrumentsCandlesFactory(instrument=instrument, params=params): client.request(r) dat = [] api_data = r.response.get('candles') if (api_data): for oo in r.response.get('candles'): dat.append([ oo['time'], oo['volume'], oo['mid']['o'], oo['mid']['h'], oo['mid']['l'], oo['mid']['c'] ]) df = pd.DataFrame(dat) df.columns = ['time', 'volume', 'open', 'high', 'low', 'close'] df = df.set_index('time') if df_full.empty: df_full = df else: df_full = df_full.append(df) df_full.index = pd.to_datetime(df_full.index) return df_full
def fetch_range(self, start_day, end_day, granularity, instrument, col, save=True): path = './data/' + start_day.strftime( '%Y%m%d') + '-' + end_day.strftime( '%Y%m%d') + '-' + instrument + '.csv.zip' if os.path.exists(path): return pd.read_csv(path) nth, divider = 0, 100 delta_day = (end_day - start_day) mod = timedelta(int(np.ceil(delta_day.days / divider))) bar = tqdm.tqdm(range(divider)) res = [] while start_day + mod * nth < end_day: start = datetime.strftime(start_day + mod * nth, '%Y-%m-%dT%H:%M:%SZ') end = datetime.strftime(min(start_day + mod * (nth + 1), end_day), '%Y-%m-%dT%H:%M:%SZ') params = {"from": start, "to": end, "granularity": granularity} for r in InstrumentsCandlesFactory(instrument=instrument, params=params): self.client.request(r) res.extend(r.response.get('candles')) df = self.res_to_df(res) bar.update(1) nth += 1 df = self.align(df) if save: df[col].to_csv(path, index=False, compression='zip') return df
def get_history_price_segment(self, _currency_pair, _params): for r in InstrumentsCandlesFactory(instrument=_currency_pair, params=_params): # print("REQUEST: {} {} {}".format(r, r.__class__.__name__, r.params)) record_json = self.client.request(r) new_df = pd.io.json.json_normalize(record_json['candles']) yield new_df
def __init__(self, _from, _to, gran, instr): '''2017-01-01T00:00:00Z 2017-06-30T00:00:00Z H4 EUR_USD''' client = API(access_token=config['account']['token']) instrument = instr params = {"granularity": gran, "from": _from, "to": _to} for res in InstrumentsCandlesFactory(instrument=instr, params=params): self.rv = client.request(res)
def fetch_data_frame(self, _from: datetime.datetime, _to: datetime.datetime, gran: str, symbol: str): instr = symbol[:3] + '_' + symbol[-3:] params = { "granularity": gran, "from": _from.strftime(self.date_format_in), "to": _to.strftime(self.date_format_in) } candles = {} for r in InstrumentsCandlesFactory(instrument=instr, params=params): print("REQUEST: {} {} {}".format(r, r.__class__.__name__, r.params)) self.client.request(r) for candle in r.response.get('candles'): dt = datetime.datetime.strptime( candle.get('time')[0:19], self.date_format_out) candles[dt] = [] candles[dt].append(candle['mid']['o']) candles[dt].append(candle['mid']['h']) candles[dt].append(candle['mid']['l']) candles[dt].append(candle['mid']['c']) candles[dt].append(candle['volume']) df = pd.DataFrame.from_dict(candles, orient='index') df.columns = ['open', 'high', 'low', 'close', 'volume'] return df
def get_historical_data_factory(instrument, params): # filename p_to = params['to'][:10] p_from = params['from'][:10] p_granularity = params['granularity'] filename = f"data/data_oanda_{instrument}_{p_from}_{p_to}_{p_granularity}.csv" if os.path.isfile(filename): df2 = pd.read_csv(filename) df2['datetime'] = pd.to_datetime(df2['datetime']) df2 = df2.set_index('datetime') return df2 # Create a Data Feed client = API(access_token=access_token) df_list = [] def cnv(response): # for candle in response.get('candles'): # print(candle) candles = [candle['mid'] for candle in response['candles']] ts = pd.DataFrame( {'datetime': [candle['time'] for candle in response['candles']]}) vol = pd.DataFrame( {'volume': [candle['volume'] for candle in response['candles']]}) candles_df = pd.DataFrame(data=candles) ts_df = pd.DataFrame(data=ts) vol_df = pd.DataFrame(data=vol) df = pd.concat([ts_df, candles_df, vol_df], axis=1) df.rename({ 'o': 'open', 'h': 'high', 'l': 'low', 'c': 'close' }, axis=1, inplace=True) df['datetime'] = pd.to_datetime(df['datetime']) df = df.set_index('datetime') for col in df.columns: df[col] = pd.to_numeric(df[col]) df_list.append(df) for r in InstrumentsCandlesFactory(instrument=instrument, params=params): # print("FACTORY REQUEST: {} {} {}".format(r, r.__class__.__name__, r.params)) rv = client.request(r) cnv(rv) df2 = pd.concat(df_list) now = datetime.datetime.now() dt_string = now.strftime("%Y%m%d%H%M%S") df2.to_csv(filename) return df2
def __init__(self, client, instrument, resolution, from_date, to_date=None, datetime_fmt=None, timezone=None): self.client = client self.account_summary = client.account_summary self.api = client.api # define params of candles self.instrument = instrument self.resolution = resolution self.timezone = timezone or self.account_summary.get("timezone") self.from_date = timezone_shift(from_date, in_tz=self.timezone) self.to_date = timezone_shift(to_date, in_tz=self.timezone) self.datetime_fmt = datetime_fmt or self.account_summary.get("datetime_format") self.candles_params = { "granularity": self.resolution, # "alignmentTimezone": self.timezone, "from": self.from_date, "to": self.to_date } # generate request for candles self.requests = InstrumentsCandlesFactory(self.instrument, self.candles_params) # initialize response self._response = None # initialize dictionary table self._dict_table = None # initialize dataframe table self._dataframe_table = None
def main(): """ My first attempt at pulling data from Oanda """ oanda_environment = 'Practice' if oanda_environment == 'Live': # oanda_account_id = os.environ['OANDA_ACCOUNTID'] oanda_access_token = os.environ['OANDA_ACCESS_TOKEN'] # oanda_hostname = "api-fxtrade.oanda.com" else: # oanda_account_id = os.environ['OANDA_ACCOUNTID_DEV'] oanda_access_token = os.environ['OANDA_ACCESS_TOKEN_DEV'] # oanda_hostname = "api-fxpractice.oanda.com" # oanda_port = "443" client = API(oanda_access_token) instrument, granularity = "EUR_USD", "M15" _from = "2021-03-01T00:00:00Z" params = {"from": _from, "granularity": granularity} fn = "/tmp/{}.{}.json".format(instrument, granularity) if os.path.isfile(fn): os.remove(fn) with open(fn, "w") as OUT: # The factory returns a generator generating consecutive # requests to retrieve full history from date 'from' till 'to' json_data = list() for r in InstrumentsCandlesFactory(instrument=instrument, params=params): client.request(r) json_data.extend(r.response.get('candles')) OUT.write(json.dumps(json_data, indent=2))
def requestCandles(apiClient, gran, _from, _to, instr): params = {"granularity": gran, "from": _from, "to": _to} df = None for r in InstrumentsCandlesFactory(instrument=instr, params=params): print("REQUEST: {} {} {}".format(r, r.__class__.__name__, r.params)) rv = apiClient.request(r) for candle in r.response.get('candles'): index = pd.to_datetime(candle.get('time')[0:19]) data = np.array([[ candle['mid']['o'], candle['mid']['h'], candle['mid']['l'], candle['mid']['c'] ]]) # print(data) df_ = pd.DataFrame( { 'Open': float(data[:, 0]), 'High': float(data[:, 1]), 'Low': float(data[:, 2]), 'Close': float(data[:, 3]) }, index=[index]) if df is None: df = df_ else: df = df.append(df_) df = df.sort_index() return df
def bulkloadlivedatabytime(instrument, granularity, start): # load data from start date to now # example: bulkloadlivedatabytime('EUR_GBP','M15','2017-08-07T00:00:00Z') client = API(access_token=access_token) time = [] value = [] stop_date = datetime.utcnow() start_date = datetime.strptime(start, "%Y-%m-%dT%H:%M:%SZ") diff = stop_date - start_date mins = int(diff.total_seconds() / (60 * 15)) print(mins) stop = stop_date.strftime("%Y-%m-%dT%H:%M:%SZ") print(stop) params = { "from": start, "to": stop, "granularity": granularity, "count": mins } for r in InstrumentsCandlesFactory(instrument=instrument, params=params): client.request(r) data = r.response.get('candles') for k in range(len(data)): time.append(data[k]['time']) value.append(data[k]['mid']['c']) d = {'time': time, instrument: value} df = pandas.DataFrame(data=d) return df
def histPrice(self, broker, accountID, pair, t0, t1=None, gran='M10'): if broker == 'oanda': t0Str = t0.strftime('%Y-%m-%dT%H:%M:%S') + 'Z' if t1 == None: paramshist = \ { "from": str(t0Str), "granularity": gran } else: t1Str = t1.strftime('%Y-%m-%dT%H:%M:%S') + 'Z' paramshist = \ { "from": str(t0Str), "to": str(t1Str), "granularity": gran } priceH = pd.DataFrame({ 'c': [], 'h': [], 'l': [], 'o': [] }, index=[]) client = API(access_token=cfg.brokerList['oanda']['token']) for r in InstrumentsCandlesFactory(instrument=pair, params=paramshist): rv = dict(client.request(r))["candles"] for candle in rv: print(candle) priceH.loc[pd.Timestamp(candle["time"], tzinfo='UTC')] = candle["mid"] return (priceH)
def save_2mgdb(client, param, db): collection = db[param['api_param']['granularity']] for r in InstrumentsCandlesFactory(instrument=param['instrument'], params=param['api_param']): client.request(r) # data type # r.response: # {'instrument': 'EUR_USD', # 'granularity': 'S5', # 'candles': [{'complete': True, 'volume': 1, 'time': '2018-01-01T22:00:00.000000000Z', 'mid': {'o': '1.20052', 'h': '1.20052', 'l': '1.20052', 'c': '1.20052'}}] # } candles = r.response.get('candles') # candles is a list if (candles == []): print( '\t * skip to write next: find empty data (with candles == [])' ) continue else: print('\t - download progress: {}'.format(candles[0].get('time'))) # write to mongodb bar_list = fy.walk(normalize_raw_candles, candles) start_time = time.time() for bar in bar_list: collection.insert_one(bar) end_time = time.time() print('\t - it took {} second to write to mongodb '.format(end_time - start_time)) length = drop_duplicates_func(collection) print(f'\t <<collection:{collection}>> has been drop {length} duplicates!')
def downloadStockData(self): ''' :Arguments: :instruments: Name of the instrument we are trading :start: specify the start date of stcok to download :end: specify end date of the stock to download :Returntype: return the csv file of the downloaded stock in the specific folder. ''' from oandapyV20.contrib.factories import InstrumentsCandlesFactory def covert_json(reqst, frame): for candle in reqst.get('candles'): ctime = candle.get('time')[0:19] try: #--Only download closed candle if not candle['complete']: pass else: rec = '{time},{complete},{o},{h},{l},{c},{v}'.format(time = ctime, complete = candle['complete'], o = candle['mid']['o'], h = candle['mid']['h'], l = candle['mid']['l'], c = candle['mid']['c'], v = candle['volume']) except Exception as e: raise(e) finally: frame.write(rec+'\n') #try except to both create folder and enter ticker try: #create folder for all instruments if not os.path.exists(self.path['mainPath'] + f'/DATASETS/{self.instrument}'): os.makedirs(self.path['mainPath'] + f'/DATASETS/{self.instrument}') #import the required timeframe with open(self.path['mainPath'] + '/DATASETS/{}/{}_{}.csv'.format(self.instrument, self.instrument, self.timeframe), 'w+') as OUTPUT: params = {'from': self.start, 'to': self.end, 'granularity': self.timeframe, } try: for ii in InstrumentsCandlesFactory(instrument = self.instrument, params = params): print("REQUEST: {} {} {}".format(ii, ii.__class__.__name__, ii.params)) self.client.request(ii) covert_json(ii.response, OUTPUT) except: print('{} not available using this API\n Please check your internet connection'.format(self.instrument)) print('********************Done downloading******************\n{}_{}\n'.format(self.instrument, self.timeframe)) except Exception as e: raise(e) finally: print('*'*40) print('Stock download completed') print('*'*40)
def get_forex_data(inst, frame, from_date=None, days=3650, include_first=True): client = oandapyV20.API(access_token=os.environ.get('OANDA_TOKEN')) # time period used end_date = datetime.datetime.utcnow().replace(microsecond=0) start_date = (end_date - datetime.timedelta(days=days)).isoformat('T')+'Z' if \ from_date == None else pd.Timestamp(from_date).to_pydatetime().isoformat('T')+'Z' end_date = end_date.isoformat('T') + 'Z' # define list to hold the candles data # define request parameters params = { 'granularity': frame, 'price': 'AB', 'from': start_date, 'to': end_date } # fetch data from Oanda servers by a client request data = list() for r in InstrumentsCandlesFactory(instrument=inst, params=params): client.request(r) data.append(r.response['candles']) flat_list = [flatten_dict(item) for sublist in data for item in sublist] # remove duplicated records from the dataFrame candles_data = ks.DataFrame(data=flat_list) candles_data = candles_data.iloc[1:] if not include_first else candles_data candles_data = candles_data[candles_data['complete'] == True] candles_data = candles_data.drop_duplicates( 'time', keep='last').reset_index(drop=True) candles_data = candles_data.astype({ 'ask.o': np.float, 'ask.h': np.float, 'ask.l': np.float, 'ask.c': np.float, 'bid.o': np.float, 'bid.h': np.float, 'bid.l': np.float, 'bid.c': np.float, 'volume': np.float }) candles_data.rename(columns={ 'ask.o': 'ask_open', 'ask.h': 'ask_high', 'ask.l': 'ask_low', 'ask.c': 'ask_close', 'bid.o': 'bid_open', 'bid.h': 'bid_high', 'bid.l': 'bid_low', 'bid.c': 'bid_close', 'time': 'utc_time' }, inplace=True) candles_data['utc_time'] = candles_data['utc_time'].apply( lambda x: datetime.datetime.strptime( x.split('.')[0] + 'Z', '%Y-%m-%dT%H:%M:%SZ')) candles_data = candles_data[[ 'utc_time', 'ask_open', 'ask_high', 'ask_low', 'ask_close', 'bid_open', 'bid_high', 'bid_low', 'bid_close', 'volume' ]] candles_data.sort_values(by=['utc_time'], inplace=True, ascending=True) return candles_data
def __call__(self, instrument, _from, _to, granularity="M1", price="MBA", count=2500, save=False): # from_date = now.replace(day=_from[0], month=_from[1], hour=0, minute=0, second=0).strftime("%Y-%m-%dT%H:%M:%SZ") # to_date = now.replace(day=_to[0], month=_to[1], hour=21, minute=0, second=0).strftime("%Y-%m-%dT%H:%M:%SZ") from_date = _from.strftime("%Y-%m-%dT%H:%M:%SZ") to_date = _to.strftime("%Y-%m-%dT%H:%M:%SZ") params = { "from": from_date, "to": to_date, "price": price, "granularity": granularity, "count": count, } candles = [] for req in InstrumentsCandlesFactory(instrument=instrument, params=params): single_req = self.api.request(req) candles += single_req['candles'] list_candles = [] for candle in tqdm(candles): list_candles.append([ float(candle["mid"]["o"]), # middle open float(candle["mid"]["c"]), # middle open float(candle["mid"]["h"]), # middle open float(candle["mid"]["l"]), # middle open #float(candle["ask_c"]), # ask close #float(candle["bid_c"]), # bid close candle["volume"] ]) overall_count = len(list_candles) if save: now = datetime.datetime.now() namefile = os.path.join( SAVEPATH, "{}_from{}_to{}{}_{}_{}.json".format(instrument, _from, _to, now.strftime("%B"), granularity, overall_count)) with open(namefile, "w") as jout: json.dump(list_candles, jout, indent=4) return list_candles, namefile return list_candles
def get_candles(instrument, granularity, _from, _to, da=daily_alignment): print('Fetching Candles.') client = 'f01b219340f61ffa887944e7673d85a5-6bcb8a840148b5c366e17285c984799e' client = oandapyV20.API(access_token=client) params = { 'from': _from, 'to': _to, 'granularity': granularity, 'price': 'BAM', 'count': 5000, 'alignmentTimezone': 'UTC', 'dailyAlignment': da } # Request Data coll = [] for r in InstrumentsCandlesFactory(instrument=instrument, params=params): client.request(r) coll.append(r.response) # collect Returned Data into list. Cast to floats. bidlow = [] bidhigh = [] bidclose = [] asklow = [] askhigh = [] askclose = [] midlow = [] midhigh = [] midclose = [] timestamp = [] volume = [] for i in range(len(coll)): for j in range(len(coll[i]['candles'])): bidhigh.append(float(coll[i]['candles'][j]['bid']['h'])) bidlow.append(float(coll[i]['candles'][j]['bid']['l'])) bidclose.append(float(coll[i]['candles'][j]['bid']['c'])) askhigh.append(float(coll[i]['candles'][j]['ask']['h'])) asklow.append(float(coll[i]['candles'][j]['ask']['l'])) askclose.append(float(coll[i]['candles'][j]['ask']['c'])) midhigh.append(float(coll[i]['candles'][j]['mid']['h'])) midlow.append(float(coll[i]['candles'][j]['mid']['l'])) midclose.append(float(coll[i]['candles'][j]['mid']['c'])) timestamp.append(coll[i]['candles'][j]['time']) volume.append(float(coll[i]['candles'][j]['volume'])) # Assemble DataFrame. Cast Values. df = pd.DataFrame(pd.to_datetime(timestamp)) df.columns = ['timestamp'] df['bidhigh'] = pd.to_numeric(bidhigh) df['bidlow'] = pd.to_numeric(bidlow) df['bidclose'] = pd.to_numeric(bidclose) df['askhigh'] = pd.to_numeric(askhigh) df['asklow'] = pd.to_numeric(asklow) df['askclose'] = pd.to_numeric(askclose) df['midhigh'] = pd.to_numeric(midhigh) df['midlow'] = pd.to_numeric(midlow) df['midclose'] = pd.to_numeric(midclose) df['spread'] = df.askclose - df.bidclose df['volume'] = pd.to_numeric(volume) return df
def getMarketData(self, granularity): access_token = self.access_token; client = self.client; instrument = self.instrument; startTimeString,endTimeString = self.date; params = { "from": startTimeString, "to": endTimeString, "granularity": granularity, "includeFirst": True, "count": 5000, } marketData = []; for r in InstrumentsCandlesFactory(instrument=instrument,params=params): rv = client.request(r); marketData.extend(rv["candles"]); if granularity == "D": newCandles = [] toAdd = [] endTimeString = startTimeString startTimeString = self.parseMonthsBack(startTimeString,2) params = { "from": startTimeString, "to": endTimeString, "granularity": granularity, "includeFirst": True, "count": 5000, } for r in InstrumentsCandlesFactory(instrument=instrument,params=params): rv = client.request(r); newCandles.extend(rv["candles"]) print("newStartDate: " + startTimeString) print("newEndDate: " + endTimeString) daysToAdd = 7 - (self.dayInterval%7) toMinus = 2 + daysToAdd print(newCandles) for i in range(daysToAdd): print(toMinus) toAdd.append(newCandles[len(newCandles)-toMinus]) toMinus -= 1 marketData = toAdd return marketData;
def fetch_to_db(self, _from: datetime.datetime, _to: datetime.datetime, gran: str, symbol: str): instr = symbol[:3] + '_' + symbol[-3:] params = { "granularity": gran, "from": _from.strftime(self.date_format_in), "to": _to.strftime(self.date_format_in) } session = Connection.get_instance().get_session() existing_quotes = session.query(PriceQuote) \ .filter_by(symbol=symbol) \ .filter(PriceQuote.datetime >= (_from - datetime.timedelta(minutes=1)))\ .filter(PriceQuote.datetime <= _to).all() existing_quote_dts = list( map(lambda _quote: _quote.datetime.strftime(self.date_format_out), existing_quotes)) try: for r in InstrumentsCandlesFactory(instrument=instr, params=params): print("REQUEST: {} {} {}".format(r, r.__class__.__name__, r.params)) rv = self.client.request(r) for candle in r.response.get('candles'): dt = candle.get('time')[0:19] print(candle) if candle['complete'] and dt not in existing_quote_dts: quote = PriceQuote( symbol, datetime.datetime.strptime(dt, self.date_format_out), candle['mid']['h'], candle['mid']['l'], candle['volume']) existing_quote_dts.append(dt) session.add(quote) session.commit() except SQLAlchemyError as e: session.rollback() print(e) except Exception as e: print(e)
def get_history(self, instrument: str, granularity: str, count: int = 50, _from=None, _to=None, price="MBA", complete=False): #granularity input is given as a number [seconds] #or as a string following granularity_dict values """ Returns a list of candles, each candle is a dictionary: { 'complete': True, 'volume': 100, 'time': '2018-10-05T14:56:40.000000000Z', 'mid': { 'o': '1.15258', 'h': '1.15286', 'l': '1.15246', 'c': '1.15286' } } """ params = { "granularity": granularity if type(granularity)==str \ else granularity_dict[granularity] } params['count'] = count params['price'] = price if _from and _to: params['from'] = _from params['to'] = _to params.pop("count") candles = [] for req in InstrumentsCandlesFactory(instrument=instrument, params=params): single_req = self.send_request(req) candles += single_req['candles'] if not complete: return candles else: return candles if candles[-1]["complete"] else candles[:-1]
def save_2file(client, param, saveto, account_type): params = param['api_param'] file_2_write = "../output/{}.{}.{}.{}.{}".format( param['instrument'], params['granularity'], params['from'][0:20].replace('-', ''), params['to'][0:20].replace('-', ''), account_type) if (saveto == 'csv'): file_2_write += '.csv' print('Write data to {}'.format(file_2_write)) with open(file_2_write, "w") as OUT: cnt = 0 # The factory returns a generator generating consecutive # requests to retrieve full history from date 'from' till 'to' for r in InstrumentsCandlesFactory(instrument=param['instrument'], params=params): client.request(r) # data type # r.response: # {'instrument': 'EUR_USD', # 'granularity': 'S5', # 'candles': [{'complete': True, 'volume': 1, 'time': '2018-01-01T22:00:00.000000000Z', 'mid': {'o': '1.20052', 'h': '1.20052', 'l': '1.20052', 'c': '1.20052'}}] # } candles = r.response.get('candles') # candles is a list if (candles == []): print( 'skip to write next: find empty data (with candles == []), at ' ) continue else: print('\t - download progress: {}'.format( candles[0].get('time'))) start_time = time.time() if (saveto == 'string'): OUT.write(json.dumps(candles, indent=2)) elif (saveto == 'csv'): convert_candle_list_2_csv(OUT, candles, skip_header=cnt) cnt += 1 end_time = time.time() print('\t - it took {} second to write to {} '.format( end_time - start_time, saveto))
def get_raw_data(td, granularity): # Function gets the historical data for td-period till now time_now = datetime.utcnow() starting_time = time_now - td _from = starting_time.strftime("%Y-%m-%dT%H:%M:%SZ") _to = time_now.strftime("%Y-%m-%dT%H:%M:%SZ") params = { "from": _from, "to": _to, "granularity": granularity, } for r in InstrumentsCandlesFactory(instrument=instrument, params=params): client.request(r) timeframe_price_data = r.response.get('candles') return timeframe_price_data
def getMarketData(self, granularity): access_token = self.access_token client = self.client instrument = self.instrument startTimeString, endTimeString = self.date params = { "from": startTimeString, "to": endTimeString, "granularity": granularity, "includeFirst": True, "count": 5000, } marketData = [] for r in InstrumentsCandlesFactory(instrument=instrument, params=params): rv = client.request(r) marketData.extend(rv["candles"]) return marketData
def getdata(self): client = API(access_token=self.token) data = {} data_frame = pd.DataFrame() for traindpairs in self.instruments: dictionary = {} for r in InstrumentsCandlesFactory(instrument=traindpairs, params=self.params): client.request(r) for datas in range(len(r.response.get('candles'))): dictionary[datetime.strptime( r.response.get('candles')[datas]['time'].replace( ".000000000Z", ''), '%Y-%m-%dT%H:%M:%S')] = r.response.get( 'candles')[datas]['mid'][self.candle] pair_dict = pd.Series(dictionary) data_frame[traindpairs] = pair_dict return data_frame
def get_candles_bid_close(instrument, granularity, _from, _to, da=daily_alignment, oanda_api=oanda_api): print('Fetching Candles.') client = oanda_api client = oandapyV20.API(access_token=client) params = { 'from': _from, 'to': _to, 'granularity': granularity, 'price': 'B', 'count': 5000, 'alignmentTimezone': 'America/Los_Angeles', 'dailyAlignment': da } # Request Data coll = [] for r in InstrumentsCandlesFactory(instrument=instrument, params=params): try: client.request(r) coll.append(r.response) except Exception as e: print(e) # collect Returned Data into list. Cast to floats. bidclose = [] timestamp = [] volume = [] for i in range(len(coll)): for j in range(len(coll[i]['candles'])): bidclose.append(float(coll[i]['candles'][j]['bid']['c'])) timestamp.append(coll[i]['candles'][j]['time']) volume.append(float(coll[i]['candles'][j]['volume'])) # Assemble DataFrame. Cast Values. df = pd.DataFrame(pd.to_datetime(timestamp)) df.columns = ['timestamp'] df[instrument] = pd.to_numeric(bidclose) df['volume'] = pd.to_numeric(volume) if not coll[i]['candles'][-1]['complete']: df.drop(df.last_valid_index(), inplace=True) return df
def fetch_to_file(self, _from: datetime.datetime, _to: datetime.datetime, gran='H1', symbol='EURUSD'): file_path = os.path.join(os.path.abspath(os.getcwd()), 'resources', 'oanda_prices', symbol + '.csv') mode = 'w' if (os.path.isfile(file_path)): mode = 'a' with open(file_path, "r") as O: all_lines = O.readlines() last_line = all_lines[len(all_lines) - 1] last_dt = datetime.datetime.strptime(last_line[0:19], self.date_format_out) if gran == 'M1': delta = datetime.timedelta(minutes=1) elif gran == 'D1': delta = datetime.timedelta(days=1) else: delta = datetime.timedelta(hours=1) _from = last_dt + delta instr = symbol[:3] + '_' + symbol[-3:] params = { "granularity": gran, "from": _from.strftime(self.date_format_in), "to": _to.strftime(self.date_format_in) } with open(file_path, mode) as O: for r in InstrumentsCandlesFactory(instrument=instr, params=params): print("REQUEST: {} {} {}".format(r, r.__class__.__name__, r.params)) self.client.request(r) OandaHistoryPriceFetcher.__write_rec_to_file(r.response, O)
def bulkloadlivedata(instrument, granularity, minutelookback): # load data from how many minutes lookback to now # example: bulkloadlivedata('DE30_EUR', 'M15', '100000') client = API(access_token=access_token) time = [] value = [] stop_date = datetime.utcnow() stop = stop_date.strftime("%Y-%m-%dT%H:%M:%SZ") start = ( stop_date - timedelta(minutes=int(minutelookback))).strftime("%Y-%m-%dT%H:%M:%SZ") params = {"from": start, "to": stop, "granularity": granularity} for r in InstrumentsCandlesFactory(instrument=instrument, params=params): client.request(r) data = r.response.get('candles') for k in range(len(data)): time.append(data[k]['time']) value.append(data[k]['mid']['c']) d = {'time': time, instrument: value} df = pandas.DataFrame(data=d) return df
def get_candles(_from, _to, instrument, granularity = granularity, da = daily_alignment): # Prepare Request code = 'f01b219340f61ffa887944e7673d85a5-6bcb8a840148b5c366e17285c984799e' client = code client = oandapyV20.API(access_token=client) params = {'from': _from, 'to': _to, 'granularity': granularity, 'price': 'M', 'count': 5000, 'alignmentTimezone': 'UTC', #'America/Los_Angeles', 'dailyAlignment': da} # Request Data coll = [] for r in InstrumentsCandlesFactory(instrument = instrument, params = params): client.request(r) coll.append(r.response) # collect Returned Data into list. Cast to floats. low = [] high = [] close = [] timestamp = [] for i in range(len(coll)): for j in range(len(coll[i]['candles'])): high.append(float(coll[i]['candles'][j]['mid']['h'])) low.append(float(coll[i]['candles'][j]['mid']['l'])) close.append(float(coll[i]['candles'][j]['mid']['c'])) timestamp.append(coll[i]['candles'][j]['time']) # Assemble DataFrame. Cast Values. df = pd.DataFrame(pd.to_datetime(timestamp)) df.columns = ['timestamp'] df['high'] = pd.to_numeric(high) df['low'] = pd.to_numeric(low) df['close'] = pd.to_numeric(close) return df
def multi_assets_builder(self, weeks, instruments, continuous=True, granularity="M1", price="MBA", count=2500, save=False): time_intervals = get_time_interval(weeks_ago=weeks) if continuous: time_intervals = [(time_intervals[-1][0], time_intervals[0][1])] datas = [] for _from, _to in time_intervals: dfs = [] from_date = _from.strftime("%Y-%m-%dT%H:%M:%SZ") to_date = _to.strftime("%Y-%m-%dT%H:%M:%SZ") params = { "from": from_date, "to": to_date, "price": price, "granularity": granularity, "count": count, } for ins in instruments: candles = [] for req in InstrumentsCandlesFactory(instrument=ins, params=params): single_req = self.api.request(req) candles += single_req['candles'] list_candles = [] for candle in tqdm(candles): if granularity == "D": timestamp = datetime.datetime.strptime( candle["time"][:10], "%Y-%m-%d") else: timestamp = datetime.datetime.strptime( candle["time"][:19], "%Y-%m-%dT%H:%M:%S") list_candles.append({ "timestamp": timestamp, #"{}.o".format(ins) : float(candle["mid"]["o"]), # middle open "{}".format(ins): float(candle["mid"]["c"]), # middle close #"{}.h".format(ins) : float(candle["mid"]["h"]), # middle high #"{}.l".format(ins) : float(candle["mid"]["l"]), # middle low }) to_append = pd.DataFrame(list_candles).set_index("timestamp") to_append = to_append.loc[~to_append.index.duplicated( keep='first')] dfs.append( to_append ) #.drop_duplicates(subset='timestamp', keep='last')) df = pd.concat(dfs, axis=1) df = df.fillna(method='ffill') df = df.fillna(method='bfill') datas.append(df) if continuous: data_list = [ list(d) for d in zip(*[df[x].tolist() for x in instruments]) ] data_output = { "prices": data_list, "min": np.array(data_list).min(), "max": np.array(data_list).max() } return data_output else: return datas #this is a dataframe so watch the dog out
def cnv(r): for candle in r.get('candles'): ctime = candle.get('time')[0:19] try: rec = "{time},{complete},{o},{h},{l},{c},{v}".format( time=ctime, complete=candle['complete'], o=candle['mid']['o'], h=candle['mid']['h'], l=candle['mid']['l'], c=candle['mid']['c'], v=candle['volume'], ) close = float(candle['mid']['c']) bot.tick(close) except Exception as e: print(e, r) else: # print(rec + "\n") print("Current Price: " + str(close) + " Current Value: " + str(bot.get_current_value())) for r in InstrumentsCandlesFactory(instrument=instr, params=params): print("REQUEST: {} {} {}".format(r, r.__class__.__name__, r.params)) rv = client.request(r) cnv(r.response)
def getHistory(instrument, granularity, dateFrom, dateTo, count, freq): rawData = [] history = [] historyOpen = [] historyHigh = [] historyLow = [] historyClose = [] accountID, token = authAPI() api = API(access_token=token) params = { "from": dateFrom, "to": dateTo, "count": count, "granularity": granularity, } for r in InstrumentsCandlesFactory(instrument=instrument, params=params): api.request(r) #for i in range( 0, len(r.response) ): #print( r.response["candles"] ) rawData.extend(r.response["candles"]) dfRaw = pd.DataFrame.from_records(rawData) #print( len(dfRaw) ) ''' for i in range( 0, len( r.response["candles"] ) ): history.append( r.response["candles"][i]["mid"] ) print( len( r.response["candles"] ) ) ''' #sHistory = pd.Series( history, index = pd.date_range( start = dateFrom, periods= len(history), freq = freq )) #for i in range( 0, len( history ) ): #historyOpen.append( sHistory.values[i]["o"] ) #add open values to list #historyHigh.append( sHistory.values[i]["h"] ) #add high values to list #historyLow.append( sHistory.values[i]["l"] ) #add low values to list #historyClose.append( sHistory.values[i]["c"] ) #add close values to list for i in range(0, len(dfRaw)): historyOpen.append(dfRaw["mid"][i]["o"]) #add open values to list historyHigh.append(dfRaw["mid"][i]["h"]) #add high values to list historyLow.append(dfRaw["mid"][i]["l"]) #add low values to list historyClose.append(dfRaw["mid"][i]["c"]) #add close values to list #print( historyClose ) #print( historyOpen[0], historyHigh[0], historyLow[0], historyClose[0] ) dfHistory = pd.DataFrame( { "open": historyOpen, "high": historyHigh, "low": historyLow, "close": historyClose }, ) #index = dfRaw["time"] ) #index = pd.date_range( start = dateFrom, periods= len(dfRaw), freq = freq ) ) #print( dfHistory ) macdHistory, histHistory = MACD(dfHistory.close, fast=12, slow=26, signal=9) ''' macdHistory.plot( style = 'k' ) histHistory.plot( style = 'b--' ) plt.show() ''' ma10 = EMA(dfHistory.close, 10) ma50 = EMA(dfHistory.close, 50) ''' ma10.plot( style = 'k' ) ma50.plot( style = 'b--' ) plt.show() ''' dfHistory["macd"] = macdHistory #add macd column to dataframe dfHistory["hist"] = histHistory #add histograph column to dataframe dfHistory["ma10"] = ma10 #add ma10 to DataFrame dfHistory["ma50"] = ma50 #add ma50 to DataFrame #print( dfHistory.index ) strFrom = dateFrom.replace(":", "_") strTo = dateTo.replace(":", "_") fname = "./historical-data/" + str(instrument) + "-" + str( granularity) + "-" + strFrom + "to" + strTo dfHistory.to_pickle(path=fname) dfPickled = pd.read_pickle(path=fname) #print( dfPickled.tail(1) ) print(dfPickled)