def gatherData(): public_client = gdax.PublicClient( ) #API reference : https://docs.gdax.com/#api start = '2016-02-01T12:00:00' # start date of the dataset date time in ISO 8601 format numdays = 800 # get data this number of days into the future from start date granulity = 3600 # in seconds datelist = pd.date_range(start, periods=numdays).tolist() dates = [] for date in datelist: dates.append(getDateTime(date)) #create the file to write data file = open('/home/sleek_eagle/research/crypto/1hrdata.csv', 'w') file.write('time,low,high,open,close,volume\n') file.close() #retrieve data from API ad append to file startdate = dates[0] for i in range(1, len(dates)): datalen = 0 tries = 0 while (datalen < 10): tries += 1 try: if ( (1 + tries * 0.5 > 5) ): #check if the data retried contains enough data. request again if it only has very little data public_client = gdax.PublicClient() data = public_client.get_product_historic_rates( 'BTC-USD', start=startdate, end=dates[i], granularity=granulity) data = pd.DataFrame(data) datalen = len(data) sleep(1 + tries * 0.5) print("this length = " + str(len(data))) except Exception as exp: print(exp) print("new try") with open('/home/sleek_eagle/research/crypto/1hrdata.csv', 'a') as file: data.to_csv(file, header=False, index=False) startdate = dates[i] print(len(data)) print(i)
def generate_portfolio(S_0, params): """ This script analyzes possible portfolios based on the calculated parameters on the observed data, and the user inputted investment. """ public_client = gdax.PublicClient() allvar = [] sumvar = 0 for coin in params: theta = coin[0] v = theta[0] T = coin[1] prod_id = coin[2] # Get the current value of the coin, i.e. how much you bought name = prod_id + '-USD' stats = public_client.get_product_24hr_stats(name) value = (float(stats['high']) + float(stats['low'])) / 2 allvar.append([prod_id, value, v]) sumvar += v priority = sorted(allvar, key=lambda i: i[2]) portfolio = [] for i in priority: investment = S_0 * i[2] / sumvar currency = investment / i[1] portfolio.append( (i[0], currency, investment)) # id, investment, currency print("\nYour suggested investments are: \n") for coin in portfolio: print("%s: %s for %s USD" % (coin[0], coin[1], coin[2])) # Prompt to save the portfolio done = False while done != True: inp = input("\nWould you like to save this portfolio? (y/n) ") try: if inp.lower() == 'y': public_client = gdax.PublicClient() current_date = np.datetime64( public_client.get_time().get("iso").split('T')[0]) # Save the file with open("portfolios/%s.txt" % (current_date), "w") as f: for coin in portfolio: f.write( str(coin[0]) + ', ' + str(coin[1]) + ', ' + str(coin[2]) + '\n') print("Portfolio saved. Exiting.\n") done = True if inp.lower() == 'n': print("Program complete. Exiting.\n") done = True except ValueError: print("Your input could not be interpreted.")
def initialize_public_client(self, *args, **kwargs): """ Initialize public client for API calls. :param sandbox: boolean, if true then use sandbox mode for API :return: public_client object """ super().initialize_public_client(*args, **kwargs) if kwargs.get("sandbox", False): return gdax.PublicClient( api_url="https://api-public.sandbox.gdax.com") return gdax.PublicClient()
def get_yest_end_price(product): start_time, end_time = get_start_and_end_times() resp = gdax.PublicClient().get_product_historic_rates( product, start_time, end_time) price = resp[0][3] time.sleep(1) # don't overload our gdax connection return price
def get_data_from_GDAX(pair, candle_size='15M', start = dt.datetime(2015,1,1), N = 3000, max_candles_per_page = 300): #N is the desired number of candles g = gdax.PublicClient() candle_sizes = {'1M' : 60,'5M':300,'15M':900,'1H':3600,'6H':21600,'1D':86400} #gathering data from api start = start - dt.timedelta(days=0, hours=2, minutes=0) #lag of 2 hours to start at midnight french time deltaT = dt.timedelta(days=0, hours=0, minutes=max_candles_per_page*candle_sizes[candle_size]/60) T = [start + i*deltaT for i in range(0,N//max_candles_per_page)] history = [] for t in T: try: h = g.get_product_historic_rates(pair, start = t.isoformat(), end = (t + deltaT).isoformat(), granularity=candle_sizes[candle_size]) except: print("bad handshake, waiting 15 sec") time.sleep(15) h = g.get_product_historic_rates(pair, start = t.isoformat(), end = (t + deltaT).isoformat(), granularity=candle_sizes[candle_size]) history += h time.sleep(0.4) print(history[-1]==history[-2]) #checking if the data doesn't repeat (it does it when gdax's api is requested too much) try: #processing data data = pd.DataFrame(history,columns = ['TimeStamp','low','high','open','close','volume']).set_index('TimeStamp') data = data.sort_index() # print("Data contient les données du {} au {}".format(T[0].isoformat(),(T[-1]+deltaT).isoformat())) data = data.apply(lambda x : add_French_Time(x,data,start),axis = 1) return data except: print('error') return history
def get_gdax_data(crypto_codes, currency_code, dates): #get history from gdax gdax_client = gdax.PublicClient() df = pd.DataFrame(index=dates) for crypto_coin in crypto_codes: #start_date_unix_ts = dates[0].value // 10 ** 9 #convert from nanoseconds to seconds start_date_iso = dates[0].isoformat() #dates[0] is a pandas TimeStamp end_date_iso = (dates[-1] + pd.DateOffset(days=1)).isoformat( ) #add on more day because get_product_historic_rates end date is exclusive history = gdax_client.get_product_historic_rates( crypto_coin + '-' + currency_code, granularity=60 * 60 * 24, start=start_date_iso, end=end_date_iso) #convert to pandas dataframe coin_df = pd.DataFrame(history) #rename columns close_col_name = crypto_coin + ' close' coin_df.rename(columns={0: 'date', 4: close_col_name}, inplace=True) #convert date coin_df['date'] = pd.to_datetime(coin_df['date'], unit='s') #set date as index coin_df.set_index('date', inplace=True) #join_data df = df.join(coin_df[close_col_name], how='left') return df
def __init__(self, coin_id, product_id, auth_client): print "Initializing GDAX Bot PRODUCT: {}".format(product_id) self.auth_client = auth_client self.pc = gdax.PublicClient() self.coin_id = coin_id self.product_id = product_id self.orderbook = gdax.OrderBook(product_id=[self.product_id]) self.init_orderbook() self.min_amount, self.quote_increment, self.min_market_funds = self.get_product_info( ) self.last_buy_price = None self.short_max_profit = Decimal(settings.SHORT_MAX_PROFIT) self.long_max_profit = Decimal(settings.LONG_MAX_PROFIT) self.short_max_loss = Decimal(settings.SHORT_MAX_LOSS) self.long_max_loss = Decimal(settings.LONG_MAX_LOSS) self.max_slippage = settings.MAX_SLIPPAGE self.equivalent_fiat = None self.long_flag = False self.short_flag = False self.open_orders = [] self.order_thread = None self.pending_order = False self.order_exp = 10 #sec Time until bot should cancel limit order and create new one self.get_orders() print self.get_balances()
def __init__(self, currencyPair='ETH-BTC'): self.client = gdax.PublicClient() self.currency_pair = currencyPair self.plt = plt self.time = time() self.update(1)
def get(self, request, format=None): json_data = [] client = gdax.PublicClient() ticker = client.get_product_ticker('BTC-USD') mongoserver_uri = "mongodb://*****:*****@10.8.0.2:27017/admin" connection = MongoClient(host=mongoserver_uri) db = connection['cc_accounts'] collection = db['LANDON_coinigy_account'] data = list( collection.find({}).sort('_id', pymongo.DESCENDING).limit(25)) for datum in data: datum['usd_balance'] = str( float(datum['btc_balance']) * float(ticker['bid'])) json_data.append({ 'id': str(datum['_id']), 'balance_curr_code': datum['balance_curr_code'], 'balance_amount_avail': datum['balance_amount_avail'], # 'balance_amount_held': datum['balance_amount_held'], 'balance_amount_total': datum['balance_amount_total'], 'usd_balance': datum['usd_balance'], 'btc_balance': datum['btc_balance'], 'last_price': datum['last_price'], 'time': datum['time'] }) return Response(json_data, status=status.HTTP_200_OK)
def calculate_avg(self): # Utilizzo un meccanismo di cache della media settimanale if self.last_avg != -1 and self.last_avg_time > time() - self.avg_duration: return self.last_avg public_client = gdax.PublicClient() # Media a blocchi di 2 ore, per i 'avg_days' giorni passati stop_time = datetime.datetime.utcnow() start_time = stop_time - datetime.timedelta(days=self.avg_days) # Chiede i dati al server history = public_client.get_product_historic_rates(self.product, start_time.isoformat(), stop_time.isoformat(), granularity=7200) # Calcola la media in questi ultimi 'avg_days' giorni total_avg = 0 total_count = 0 for element in history: this_avg = (element[2] + element[3]) / 2 total_avg += this_avg total_count += 1 if total_count <= 0: raise RuntimeError('Invalid data received from the server') total_avg = total_avg / total_count last_avg = total_avg last_avg_time = datetime.time() return total_avg
def profit(event, context): public_client = gdax.PublicClient() data = public_client.get_product_ticker(product_id='ETH-EUR') ethprice = data["price"] # uri = "https://api.coinbase.com/v2/prices/btc-usd/spot" # data = requests.get(uri) # json_data = data.text # dane = json.loads(json_data) # ethprice = dane["data"]["amount"] html_template = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'static/') with open(os.path.join(html_template, 'index.html'), 'r') as template: html = template.read() html_price = re.sub(r'<% ethprice %>', ethprice, html) response = { "statusCode": 200, "headers": { 'Access-Control-Allow-Origin': '*', 'Content-Type': 'text/html', }, "body": html_price } return response
def __init__(self, granularity=[3600, 3600, 3600, 3600], MA_period=[10, 10, 10, 10]): self.clients = [] # Create multiple clients to increase resolution of the calls, assigning # a new client to each coinA for i in range(0, 4): self.clients.append(gdax.PublicClient()) self.ledger = {} # Put everything into a ledger that can be called, will eventually create objects that # will manipulate and store this data for i in range(0, 4): self.ledger[self.coins[i]] = { 'client': self.clients[i], 'granularity': granularity[i], 'MA_period': MA_period[i] } self.traders = [] #create the traders for coin in self.coins: self.traders.append(ts.TradeSimulator(coin, self.ledger[coin])) print('something1') self.start()
def __init__(self, ask=None, bid=None, last=None, bitcoin=None, litecoin=None, ether=None): self.litecoin_USD_bid = 0.0 self.litecoin_USD_ask = 0.0 self.litecoin_USD_lasttrade = 0.0 self.ether_USD_bid = 0.0 self.ether_USD_ask = 0.0 self.ether_USD_lasttrade = 0.0 self.bitcoin_USD_bid = 0.0 self.bitcoin_USD_ask = 0.0 self.bitcoin_USD_lasttrade = 0.0 self.ask_key = ask self.bid_key = bid self.last_key = last self.coinbase = gdax.PublicClient() self.litecoin_key = litecoin self.bitcoin_key = bitcoin self.ether_key = ether self.order_volume = None self.name = 'gdax' super(GdaxCoinData, self).__init__(exchange=self, request_duration=REQUEST_DURATION)
def __init__(self, trader=None, *args, **kwargs): self.product = kwargs['pairs'] self.period = kwargs['ATR-Period'] self.vstop_timeframe = kwargs['vstop timeframe'] self.multiplier = kwargs['vstop multiplier'] self.data_days = kwargs['data days'] self.client = gdax.PublicClient() self.trader = trader self.position = None self.counter = 0 self.check = True self.temp_df = pd.DataFrame(columns=['time', 'price', 'size']) self.temp_df['time'] = pd.to_datetime(self.temp_df['time'], format="%Y-%m-%dT%H:%M:%S.%fZ") self.temp_df = self.temp_df.set_index('time', drop=True, inplace=True) self.main_df = self.df_load(60, self.product) self.main_df.drop(self.main_df.head(1).index, inplace=True) self.timer = dt.datetime.now() self.main_atr = self.atr(self.main_df) self.check_v = True self.vstop = {} self.v_stop_init() # Initializing daemon for getting account balance scheduler = BackgroundScheduler() scheduler.add_job(self._scheduled_task, trigger='cron', minute='*/{}'.format(self.vstop_timeframe)) scheduler.start()
def _init_client(self): url = "https://api.gdax.com" if self.sandbox: url = "https://api-public.sandbox.gdax.com" self.client_public = gdax.PublicClient() self.client_private = gdax.AuthenticatedClient( self.api_key,self.api_secret,self.api_pass,url)
def __init__(self, rules, gdax_client, app_key, app_secret, oauth_token, oauth_token_secret, timeout=300, retry_count=None, retry_in=10, client_args=None, handlers=None, chunk_size=1, sleep_time=0.5): super(TradeListener, self).__init__(app_key, app_secret, oauth_token, oauth_token_secret, timeout=300, retry_count=None, retry_in=10, client_args=None, handlers=None, chunk_size=1) self.rules = rules self.gdax_client = gdax_client self.sleep_time = sleep_time self.available = get_dough(self.gdax_client, status_update=False) self.public_client = gdax.PublicClient() # for product order book
def plotProductOrders(product_id): # Set a default product publicClient = gdax.PublicClient() # To include other parameters, see official documentation: orders = publicClient.get_product_order_book(product_id, 2) xbids = [] ybids = [] print(orders['bids']) for d in orders['bids']: xbids.append(d[0]) ybids.append(d[1]) plt.plot(xbids, ybids, 'g') xasks = [] yasks = [] for d in orders['asks']: xasks.append(d[0]) yasks.append(d[1]) plt.plot(xasks, yasks, 'r') plt.show()
def get(self, request, format=None): data = [] json_data = [] client = gdax.PublicClient() ticker = client.get_product_ticker('BTC-USD') mongoserver_uri = "mongodb://*****:*****@10.8.0.2:27017/admin" connection = MongoClient(host=mongoserver_uri) db = connection['cc_accounts'] current_username = request.path.split('/')[-1] if current_username == 'LANDON': collection = db['LANDON_coinigy_account'] if current_username == 'CHRISTIAN': collection = db['CHRISTIAN_coinigy_account'] if current_username == 'VIVEK': collection = db['VIVEK_coinigy_account'] latest_datatime = list( collection.find({}).sort('time', pymongo.DESCENDING).limit(1))[0]['time'] for index in range(0, 30): diff_time = 10080 * index curr_date_time = latest_datatime - datetime.timedelta( minutes=diff_time) end_time = curr_date_time - datetime.timedelta(minutes=55) start_time = curr_date_time - datetime.timedelta(minutes=60) cursor_data_eachtime = collection.find( {'time': { '$gte': start_time, '$lt': end_time }}) data_eachtime = list(cursor_data_eachtime) data.append(data_eachtime) for idx, datums in enumerate(data): for datum in datums: datum['usd_balance'] = str( float(datum['btc_balance']) * float(ticker['bid'])) json_data.append({ 'id': idx, 'balance_curr_code': datum['balance_curr_code'], 'balance_amount_avail': datum['balance_amount_avail'], # 'balance_amount_held': datum['balance_amount_held'], 'balance_amount_total': datum['balance_amount_total'], 'usd_balance': datum['usd_balance'], 'btc_balance': datum['btc_balance'], 'last_price': datum['last_price'], 'time': datum['time'] }) return Response(json_data, status=status.HTTP_200_OK)
def write_csv(level): write_dir = "/Users/Jeff/Desktop/Novumind/ob_data_level3.csv" csv = open(write_dir, "w") csv.truncate(0) public_client = gdax.PublicClient() ob_data_level2 = public_client.get_product_order_book('BTC-USD', level=level) # Helper that writes either asks or bids def write_helper(param): list = ob_data_level2.get(param) if level == 1 or level == 2: label = "num_orders" else: label = "order_id" csv.write("price, size, " + label + "\n") for i in range(len(list)): item = list[i] price = item[0] size = item[1] num_orders = item[2] row = str(price) + "," + str(size) + "," + str( num_orders) + "," + "\n" csv.write(row) write_helper('asks') write_helper('bids') csv.close()
def __init__(self, from_currency: str, to_currency: str, granularity: int, client: gdax.PublicClient = None, no_download: bool = False, start: datetime.datetime = None, end: datetime.datetime = None, samples: int = None, do_not_check_time: bool = False) -> None: self.hist = [] self.product = "{}-{}".format(from_currency, to_currency) if not client: self.publicClient = gdax.PublicClient() else: self.publicClient = client self.max_points = 200 self.max_downloads_per_sec = 5 self.sleep_time = 1 self.max_retries = 10 self.granularity = granularity self.do_not_check_time = do_not_check_time self.samples = self._get_samples(samples=samples, start=start, end=end) if not no_download: self.download(granularity=self.granularity, end=end, start=start, samples=self.samples)
def read_historic_data(self): public_client = gdax.PublicClient() historic_rates = public_client.get_product_historic_rates( self.product, granularity=self.granularity_in_seconds) sum_volume = 0.0 sum_close = 0.0 sum_high = 0.0 sum_low = 0.0 for (timestamp, low, high, open_val, close_val, volume) in reversed(historic_rates): sum_volume += volume sum_close += close_val sum_high += high sum_low += low self.history_queue.append(float(close_val)) avg_low = sum_low / float(len(historic_rates)) avg_high = sum_high / float(len(historic_rates)) if len(historic_rates) > 0: latest_rate = historic_rates[0] self.last_price = float(latest_rate[4]) print(historic_rates[0]) print("{} Avg low {}, avg high with avg volume of {}".format( datetime.fromtimestamp(timestamp), avg_low, avg_high, sum_volume / len(historic_rates))) pass
def buyAtBestPrice(size='0.01', product_id='LTC-EUR'): myInfo = infoPerso.infoPerso() auth_client = gdax.AuthenticatedClient(myInfo.gdax_key, myInfo.gdax_b64secret, myInfo.gdax_passphrase) publicClient = gdax.PublicClient() ticker = publicClient.get_product_ticker(product_id) price = float(ticker['price']) - 0.5 keys = {'size': size, 'product_id': product_id, 'price': price} order = auth_client.buy(**keys) print(order) count = 0 while (count < 60): time.sleep(1) auth_client.cancel_order(order['id']) ticker = publicClient.get_product_ticker(product_id) price = float(ticker['price']) - 0.5 keys = {'size': size, 'product_id': product_id, 'price': price} order = auth_client.buy(**keys) print(order) count = count + 1 return order
def get_gdax_dataframe(product_id, start, end, granularity): ''' Uses the gdax python package to download data from GDAX and place it into a pandas dataframe. Args: start: Start time in ISO 8601 end: End time in ISO 8601 granularity: Desired time slice in seconds. Valid values for granularity are 60, 300, 900, 3600, 21600, and 86400. Returns: df: The GDAX information for the product_id in a pandas dataframe. ''' public_client = gdax.PublicClient() result_list = public_client.get_product_historic_rates( product_id, start=start, end=end, granularity=granularity) result_keys = ('time', 'low', 'high', 'open', 'close', 'volume') time_list = [] data_list = [] for item in result_list: #temp_time = time.localtime(item[0]) #time_list.append(datetime(*temp_time[:6])) time_list.append(epoch_to_datetime(item[0])) data_list.append(item[1:]) df = pd.DataFrame(data=result_list, index=time_list, columns=result_keys) return df
def GetGdaxData(fsym, tsym, dates, data_granularity=60*60*24): #get price history from gdax df = pd.DataFrame(index=dates) gdax_client = gdax.PublicClient() start_date_iso = dates[0].isoformat() #dates[0] is a pandas TimeStamp end_date_iso = (dates[-1] + pd.DateOffset(days=1)).isoformat() #add on more day because get_product_historic_rates end date is exclusive history = gdax_client.get_product_historic_rates(fsym + '-' + tsym, granularity=data_granularity, start=start_date_iso, end=end_date_iso) #convert to pandas dataframe history_df = pd.DataFrame(history) #rename columns history_df.rename(columns={0:'date', 1:'low', 2:'high', 3:'open', 4:'close', 5:'volume'}, inplace=True) #convert date history_df['date'] = Timestamp2Datetime(history_df['date']) #set date as index history_df.set_index('date', inplace=True) df = df.join(history_df, how='left') df = df.dropna() return df
def main(argv): key = os.environ['GDAX_KEY'] b64secret = os.environ['GDAX_B64SECRET'] passphrase = os.environ['GDAX_PASSPHRASE'] period = 1 public_client = gdax.PublicClient() litecoin_historic_prices_list = [] number_of_historic_points = 3 print public_client.get_product_historic_rates('LTC-USD', granularity=100) print public_client.get_product_24hr_stats('LTC-USD') while True: time.sleep(int(period)) query_time, lite_coin_price = get_lite_coin_price( public_client=public_client) average_price = get_historic_average_price( litecoin_historic_prices_list=litecoin_historic_prices_list, current_price=lite_coin_price, number_of_historic_points=number_of_historic_points) # print(litecoin_historic_prices_list) print 'past : ', get_past_date_time_utc(days=0, hours=1, minutes=0, seconds=0) print 'time: ', query_time, ', price: ', lite_coin_price, ', Moving average: ', average_price print '-----'
def get_market_price(self): public_client = gdax.PublicClient() data = public_client.get_product_historic_rates('ETH-EUR', granularity=60) df = pd.DataFrame( data, columns=['time', 'low', 'high', 'open', 'close', 'volume']) return df["close"][0]
def __init__(self): if settings.USE_AUTH_CLIENT: self.client = gdax.AuthenticatedClient(settings.API_KEY, settings.API_SECRET, settings.API_KEY_PASSPHRASE) else: self.client = gdax.PublicClient()
def get_historical_data(self, num_periods=200): gdax_client = gdax.PublicClient() end = datetime.datetime.utcnow() end_iso = end.isoformat() start = end - datetime.timedelta(seconds=(self.period_size * num_periods)) start_iso = start.isoformat() ret = gdax_client.get_product_historic_rates( self.product, granularity=self.period_size, start=start_iso, end=end_iso) # Check if we got rate limited, which will return a JSON message while not isinstance(ret, list): time.sleep(3) ret = gdax_client.get_product_historic_rates( self.product, granularity=self.period_size, start=start_iso, end=end_iso) hist_data = np.array(ret, dtype='object') for row in hist_data: row[0] = datetime.datetime.fromtimestamp(row[0], pytz.utc) return np.flipud(hist_data)
def get_data_by_market_gdax(exchange, market_name): market_name = market_name.split(sep='/') market_name = '-'.join(market_name) publicClient = gdax.PublicClient() client = ccxt.gdax start_point_time = '2019-04-08T00:00:00' actual = epoch_now listI = [] start_point_time_epock = client.parse8601(start_point_time) end_point_time = start_point_time_epock + 864000000 try: next_date_start_t = client.iso8601(start_point_time_epock) end_point_time_ = client.iso8601(end_point_time) epoch_next_date_start = 0 flag = True while epoch_next_date_start < actual and flag is True: time.sleep(1) hist = publicClient.get_product_historic_rates( market_name, start=next_date_start_t, end=end_point_time_, granularity=3600) listI = listI + hist next_date_start_t = end_point_time_ end_point_time_ = client.parse8601(end_point_time_) end_point_time_ = end_point_time_ + 864000000 epoch_next_date_start = end_point_time_ if end_point_time_ >= actual: flag = False else: end_point_time_ = client.iso8601(end_point_time_) df = pd.DataFrame( listI, columns=["timestamp", "open", "high", "low", "close", "volume"]) df.to_sql(name='{}_{}'.format(exchange, market_name), con=mydb, if_exists='replace', index=False, index_label='id') print(listI) except Exception as error: # Create a custom logger logger = logging.getLogger(__name__) # Create handlers f_handler = logging.FileHandler('gdax_exchanges.log') # Create formatters and add it to handlers f_format = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') f_handler.setFormatter(f_format) # Add handlers to the logger logger.addHandler(f_handler) logger.error('EXCHANGE: %s ERROR %s', exchange, error) logging.error('COIN: %s', market_name) pass
def get_historical_data(self): gdax_client = gdax.PublicClient() hist_data = np.array(gdax_client.get_product_historic_rates( 'BTC-USD', granularity=self.period_size), dtype='object') for row in hist_data: row[0] = datetime.datetime.fromtimestamp(row[0], pytz.utc) return np.flipud(hist_data)