def __init__(self, scheduler: NetworkScheduler, instrument_cache: InstrumentCache, include_symbol: str = '*', instance_id: str = 'prod'): if instance_id == 'prod': self.ws_uri = 'wss://ws-feed.pro.coinbase.com' self.cbp_client = coinbasepro.PublicClient() elif instance_id == 'test': self.ws_uri = 'wss://ws-feed-public.sandbox.pro.coinbase.com' self.cbp_client = coinbasepro.PublicClient( api_url='https://api-public.sandbox.pro.coinbase.com') else: raise ValueError(f'Unknown instance_id: {instance_id}') # ensure we've initialized client before loading instruments in super() super().__init__(scheduler, instrument_cache, instance_id) self.include_symbol = include_symbol self.instrument_trades = {} self.instrument_order_book_events = {} self.instrument_order_books = {} # timeout in seconds self.timeout = 60
def __init__(self, scheduler: NetworkScheduler, instrument_cache: InstrumentCache, instance_id: str = 'prod'): if instance_id == 'prod': self.ws_uri = 'wss://ws-feed.pro.coinbase.com' self.cbp_client = coinbasepro.PublicClient() elif instance_id == 'test': self.ws_uri = 'wss://ws-feed-public.sandbox.pro.coinbase.com' self.cbp_client = coinbasepro.PublicClient(api_url='https://api-public.sandbox.pro.coinbase.com') else: raise ValueError(f'Unknown instance_id: {instance_id}') # ensure we've initialized PhemexConnection before loading instruments in super() super().__init__(scheduler, instrument_cache, instance_id) self.instrument_trades = {} self.instrument_quotes = {}
def download_data(): """Downloads coinbase market history data""" client = cbpro.PublicClient() delta = timedelta(seconds=60 * 300) now = datetime.now() start = datetime(2019, 1, 1) end = start + delta rates = [] while end < now: time_start = datetime.now() click.echo(f'Downloading: {start} -> {end}') rates_partial = client.get_product_historic_rates( 'ETH-USD', start=start, end=end, granularity=60, ) if isinstance(rates_partial, dict): message = rates_partial['message'] if message == 'Slow rate limit exceeded': time.sleep(10) client = cbpro.PublicClient() continue click.echo(f"Unkown error: {message}") sys.exit(1) rates_partial.reverse() rates.extend(rates_partial) start = end end = start + delta time_end = datetime.now() time_diff = time_end - time_start time_to_sleep = 0.334 - time_diff.total_seconds() # NOTE: to avoid rate limits if time_to_sleep > 0.0: time.sleep(time_to_sleep) rates = rates_preprocess(rates) rates_save(rates, './data/ETH-EUR.pkl') click.echo("Done")
def market_index(): user = get_user(session.get('username')) print(user) cb_client = cbp.PublicClient() user_currencies = UserCurrency.query.filter_by(user_id=user.id).all() response = [] for cur in user_currencies: r = cb_client.get_product_ticker(cur.currency) response.append((cur.currency, r)) form = AddCurrencyForm() if request.method == 'POST': currency = form.currency.data + '-USD' new_cur = UserCurrency(user_id=user.id, currency=currency, amount=0.0) if new_cur not in user_currencies: db.session.add(new_cur) db.session.commit() flash('{} added to your dashboard'.format(currency)) return redirect(url_for('market_bp.market_index')) else: flash('{} already in your dashboard'.format(currency)) return render_template('landing.html', currency=user_currencies, response=response, form=form)
def requires(self): cbp_conn = coinbasepro.PublicClient() cbp_products_raw = cbp_conn.get_products() cbp_products = [product['id'] for product in cbp_products_raw] phemex_conn, phemex_ws = get_phemex_connection() phemex_products_raw = phemex_conn.get_products()['data'] phemex_products = [ product['symbol'] for product in phemex_products_raw ] exchanges = { 'PHEMEX': { 'db_prefix': 'PHEMEX', 'supported_products': phemex_products }, 'COINBASEPRO': { 'db_prefix': 'COINBASE_PRO', 'supported_products': cbp_products } } for exchange in exchanges.keys(): db_prefix = exchanges[exchange]['db_prefix'] supported_products = exchanges[exchange]['supported_products'] for db in ['BOOKS', 'TRADES']: for product in supported_products: yield AzureBlobUploadTask( behemoth_path=self.behemoth_path, storage_account=self.storage_account, db=f'{db_prefix}_{db}', product=product, start_date=self.start_date, end_date=self.end_date)
def __init__(self): super().__init__() self.set_param("product-id", required=True) self.set_param("start", default="2017-01-01T00:00:00.000Z") self.set_param("stop", default="2021-01-09T00:00:00.000Z") self.set_param("granularity", default="60") self.client = cbp.PublicClient()
def download(self, symbol: str, start_date, end_date): client = cbp.PublicClient() # if dates passed on command line they will be of type string if type(start_date) == str: start_date = datetime.strptime(start_date, '%Y-%m-%d').date() if type(end_date) == str: end_date = datetime.strptime(end_date, '%Y-%m-%d').date() # start date stepping delta = timedelta(days=1) while start_date <= end_date: all_raw_rates = [] # load data 4 hours at a time, up until 23:59:00 for h in range(0, 24, 4): start = start_date.strftime( '%Y-%m-%d') + ' {:02d}:{:02d}:00.000'.format(h, 0) if h + 4 == 24: h = 23 end_minute = 59 else: h = h + 4 end_minute = 0 stop = start_date.strftime( '%Y-%m-%d') + ' {:02d}:{:02d}:00.000'.format( h, end_minute) print('downloading ' + start + ' - ' + stop) raw_rates = client.get_product_historic_rates(symbol, start=start, stop=stop) all_raw_rates.extend(raw_rates) sleep(1) if len(all_raw_rates) > 0: # convert one day's data into pandas, and convert all the decimal typed fields # from the coinbasepro API into float; h5py doesn't support decimal serialization hist_rates = pd.DataFrame(all_raw_rates) hist_rates.set_index('time', inplace=True) hist_rates['open'] = hist_rates['open'].astype(float) hist_rates['high'] = hist_rates['high'].astype(float) hist_rates['low'] = hist_rates['low'].astype(float) hist_rates['close'] = hist_rates['close'].astype(float) hist_rates['volume'] = hist_rates['volume'].astype(float) # force ascending sort on time hist_rates.sort_index(inplace=True) # write HDF5 with compression print('writing historical rates to Tickstore') self.tickstore.insert(symbol, BiTimestamp(start_date), hist_rates) start_date += delta
def backfill_coinbase_trades(staging_dir: str = '/mnt/raid/data/behemoth/staging', symbol: str = 'BTC-USD', start_date=date(2015, 7, 20), end_date=date.today()): client = cbp.PublicClient() # if dates passed on command line they will be of type string if type(start_date) == str: start_date = datetime.strptime(start_date, '%Y-%m-%d').date() if type(end_date) == str: end_date = datetime.strptime(end_date, '%Y-%m-%d').date() # start date stepping delta = timedelta(days=1) while start_date <= end_date: all_raw_rates = [] # load data 4 hours at a time, up until 23:59:00 for h in range(0, 24, 4): start = start_date.strftime('%Y-%m-%d') + ' {:02d}:{:02d}:00.000'.format(h, 0) if h + 4 == 24: h = 23 end_minute = 59 else: h = h + 4 end_minute = 0 stop = start_date.strftime('%Y-%m-%d') + ' {:02d}:{:02d}:00.000'.format(h, end_minute) print('downloading ' + start + ' - ' + stop) raw_rates = client.get_product_historic_rates(symbol, start=start, stop=stop) all_raw_rates.extend(raw_rates) sleep(1) if len(all_raw_rates) > 0: # convert one day's data into pandas, and convert all the decimal typed fields # from the coinbasepro API into float; h5py doesn't support decimal serialization hist_rates = pd.DataFrame(all_raw_rates) hist_rates.set_index('time', inplace=True) hist_rates['open'] = hist_rates['open'].astype(float) hist_rates['high'] = hist_rates['high'].astype(float) hist_rates['low'] = hist_rates['low'].astype(float) hist_rates['close'] = hist_rates['close'].astype(float) hist_rates['volume'] = hist_rates['volume'].astype(float) # force ascending sort on time hist_rates.sort_index(inplace=True) # write HDF5 with compression splay_dir = staging_dir + '/COINBASE_PRO_ONE_MIN_BINS/' + start_date.strftime('%Y/%m/%d') if not os.path.exists(splay_dir): os.makedirs(splay_dir) filename = splay_dir + '/{}.h5'.format(symbol) print('writing ' + filename) hist_rates.to_hdf(filename, 'trades', mode='w', append=False, complevel=9, complib='blosc') start_date += delta
def pullCoinbaseData(symbol): client = cbp.PublicClient() phistory = client.get_product_historic_rates(symbol, granularity=granularityVal) if (len(phistory) > 0): oDir = './coinbase' if (not os.path.isdir(oDir)): os.mkdir(oDir) filename = 'coinbase/' + symbol + '-' + str(granularityVal) + '.json' with open(filename, 'w', encoding='utf-8') as f: f.write(json.dumps(phistory, indent=4, default=str)) print(filename)
def shouldBuy(): global dt client = cbp.PublicClient() stats = client.get_product_24hr_stats("ETH-EUR") qt = stats["open"] / stats["last"] if qt > 1 and qt - 1 > dt: dt = abs(qt - 1) f = open("dt.txt", "w") f.write(str(dt)) f.close() return True else: return False
def __init__(self, args): super().__init__(args) self.setInput(False) self.setOutput(True) self.add_argument("product-id", parser=lambda x: x, default="BTC-USD") # Default start is chosen to be 2017 since ~2017-01-02T00:00:00 is when data loss is nearly gone. self.add_argument("start", parser=lambda x: self.__str2iso(x), default="2017-01-01T00:00:00.000Z") self.add_argument("stop", parser=lambda x: self.__str2iso(x), default="2021-01-01T00:00:00.000Z") self.add_argument("granularity", parser=lambda x: x, default="60") self.build() self.client = cbp.PublicClient()
def get_product_details(currency_id): print('CURRENCY ID', currency_id) user = get_user(session.get('username')) cb_client = cbp.PublicClient() cb_auth = cbp.AuthenticatedClient(key=cb_public_key, secret=cb_private_key, passphrase=cb_passphrase) accounts = cb_auth.get_accounts() account_id = None pub_currency = None for acc in accounts: if currency_id in acc['currency']: account_id = acc['id'] pub_currency = acc['currency'] + '-USD' print('PUB CURRENCY', pub_currency) try: product_resp = cb_auth.get_account(account_id) account_hist = cb_auth.get_account_history(account_id) acc_hist = list(account_hist) market_resp = cb_client.get_product_ticker(pub_currency) fills = cb_auth.get_fills(pub_currency) fills_lst = list(fills) except: return redirect(url_for('market_bp.market_index')) if request.method == 'POST': currency = pub_currency user_currency = UserCurrency.query.filter_by( user_id=user.id).filter_by(currency=currency).first() print(user_currency) db.session.delete(user_currency) db.session.commit() flash('{} removed from dashboard'.format(currency)) return redirect(url_for('market_bp.market_index')) return render_template('product_details.html', product=product_resp, market=market_resp, hist=acc_hist, fills=fills_lst)
def fill_candle_data( self, product_id: str, start_date: datetime, end_date: datetime, granularity=3600, ): """[summary] Given the inputs, does batched calls to return historical candle dataset. Args: product_id (str): [description] The product to return data for (i.e BTC-USD) start_date (datetime): [description] The start date to query historical data for end_date (datetime): [description] The end date for the historical data period granularity (int, optional): [description]. Defaults to 3600. Time in seconds to pull candle data for Returns: array: [description] The flattened historical data for the product """ candle_data = [] client = cbp.PublicClient() start_month = start_date.month new_start_date = None end_date_delta = None while start_date <= end_date: if new_start_date is None: new_start_date = start_date.isoformat() else: new_start_date = end_date_delta end_date_delta = self.determine_end_date(new_start_date, end_date) candle_data.append( client.get_product_historic_rates(product_id, new_start_date, end_date_delta, granularity)) start_date = datetime.fromisoformat(new_start_date) transformed_data = [ item for sublist in candle_data for item in sublist ] # flatten the data finaldata = {} finaldata.data = transformed_data return transformed_data
def get_public_client(): pub_client = cbp.PublicClient() return pub_client
""" Re-balances pair of currencies automatically at t intervals""" import pandas as pd import coinbasepro #TODO setup connections c = coinbasepro.PublicClient() #TODO get necessary balances class Balance: def __init__(self, ticker='ETH-USD'): self.ticker = ticker def open(self): return float(c.get_product_24hr_stats(self.ticker)['open']) #TODO execute re-balance
type_code_cache = TypeCodeCache(cur) instrument_cache = InstrumentCache(cur, type_code_cache) exch_service = ExchangeEntityService(cur, type_code_cache, instrument_cache) # map all Gemini products to exchange_instrument table gemini_client = GeminiConnection() gemini = exch_service.instrument_cache.get_crypto_exchange("GEMINI") for symbol in gemini_client.get_products(): base_ccy = symbol[0:3].upper() quote_ccy = symbol[3:].upper() currency_pair = instrument_cache.get_or_create_cryptocurrency_pair(base_ccy, quote_ccy) instrument_cache.get_or_create_exchange_instrument(symbol, currency_pair.get_instrument(), gemini) # map all Coinbase Pro products to exchange_instrument table cbp_client = coinbasepro.PublicClient() cbp = exch_service.instrument_cache.get_crypto_exchange("COINBASEPRO") for product in cbp_client.get_products(): symbol = product['id'] base_ccy = product['base_currency'] quote_ccy = product['quote_currency'] currency_pair = instrument_cache.get_or_create_cryptocurrency_pair(base_ccy, quote_ccy) instrument_cache.get_or_create_exchange_instrument(symbol, currency_pair.get_instrument(), cbp) # map all Phemex products to exchange_instrument table (phemex, ws_uri) = get_phemex_connection(PublicCredentials()) products = phemex.get_products() exchange_code = 'PHEMEX' for product in products['data']: symbol = product['symbol'] base_ccy = product['baseCurrency']
stats = client.get_product_24hr_stats("ETH-EUR") qt = stats["open"] / stats["last"] if qt < 1 and abs(qt - 1) > dt: dt = abs(qt - 1) f = open("dt.txt", "w") f.write(str(dt)) f.close() return True else: return False while True: client = cbp.PublicClient() eth = client.get_product_ticker("ETH-EUR") ethPrice = float(eth["price"]) f = open("dt.txt", "r") dt = float(f.read()) f.close() if nextBuy(): if shouldBuy(): buy() elif shouldSell(): sell() time.sleep(120)