def func_get_coin_list(self): try: frames = [] source = "cryptocompare" url = "https://min-api.cryptocompare.com/data/all/coinlist" headers = { 'cache-control': "no-cache", 'postman-token': "a1299df4-9db1-44cc-376e-0357176b776f" } response = requests.request("GET", url, headers=headers) data = response.json() df = p.DataFrame.from_dict(data["Data"], orient='index', dtype=None) df = df.assign(utc=time.time(), hostname=socket.gethostname(), source=source) df = df.reset_index(drop=True) df = df.sort_values('Id') frames.append(df) my_file = self.cwd + '/data/coininfo/coininfo.csv' if os.path.isfile(my_file): df_resident = p.read_csv(my_file, encoding='utf-8') frames.append(df_resident) else: pass df = p.concat(frames) if not df.empty: df = df.drop_duplicates(['Symbol', 'source'], keep='last') df = df.reset_index(drop=True) df.to_csv(my_file, index=False, encoding='utf-8') #need to add this s3 = savetos3.SaveS3(my_file, self.catalog) s3.main() else: pass except requests.exceptions.RequestException as e: print(e) except Exception as e: logging.info('------') logging.error(traceback.format_exc()) logging.info('------') logging.exception(traceback.format_exc()) logging.info('------') print(e)
def getcurrencylist(self): try: frames = [] url = "https://www.alphavantage.co/physical_currency_list/" headers = { 'cache-control': "no-cache", 'postman-token': "f7f0c0c6-c707-e39e-76ed-f61d9046b7f8" } response = requests.request("GET", url, headers=headers) data = response.content.decode('utf-8') cr = csv.reader(data.splitlines(), delimiter=',') my_list = list(cr) df = p.DataFrame(my_list[1:], columns=my_list[0]) df = df.assign(utc=time.time(), hostname=socket.gethostname(), source='alphavantage') df = df.reset_index(drop=True) frames.append(df) my_file = self.cwd + '/data/avinfo/avcurrencies.csv' if os.path.isfile(my_file): df_resident = p.read_csv(my_file, encoding='utf-8') frames.append(df_resident) else: pass df = p.concat(frames) if not df.empty: df = df.drop_duplicates(['currency code', 'currency name'], keep='last') df = df.reset_index(drop=True) df.to_csv(my_file, index=False, encoding='utf-8') #need to add this s3 = savetos3.SaveS3(my_file, self.catalog) s3.main() self.avcurs = df["currency code"].tolist() return self.avcurs except requests.exceptions.RequestException as e: print(e) except Exception as e: logging.info('------') logging.error(traceback.format_exc()) logging.info('------') logging.exception(traceback.format_exc()) logging.info('------') print(e)
def coin_miner_data(self): try: data = self.data source = 'cryptocompare' if data["CoinData"]: keys = list(data['CoinData'].keys()) #for key in tqdm(keys,desc='coin_miner_data'): for key in keys: frames = [] sub = data['CoinData'][key] df = p.DataFrame.from_dict(sub, orient='Index', dtype=None) df = p.DataFrame.transpose(df) df = df.assign(utc=time.time(), hostname=socket.gethostname(), source=source, symbol=key) frames.append(df) my_file = self.cwd + '/data/mining_data/coin_miner_data/%s_mining.csv' % key #print(my_file) if os.path.isfile(my_file): df_resident = p.read_csv(my_file, encoding='utf-8') frames.append(df_resident) else: pass df = p.concat(frames) df = df.drop_duplicates([ 'Symbol', 'TotalCoinsMined', 'BlockReward', 'DifficultyAdjustment', 'BlockRewardReduction', 'BlockNumber', 'PreviousTotalCoinsMined' ], keep='last') df = df.sort_values('symbol') df = df.reset_index(drop=True) if not df.empty: df.to_csv(my_file, index=False, encoding='utf-8') s3 = savetos3.SaveS3(my_file, self.catalog) s3.main() else: print('No ' + str(key) + ' data: ' + key) else: print('No coin_miner_data') except Exception as e: logging.info('------') logging.error(traceback.format_exc()) logging.info('------') logging.exception(traceback.format_exc()) logging.info('------') print(e)
def get_metals(self, metal, error_symbols): try: df = quandl.get(metal, authtoken="kzmH8ENEsNUc5GkS9bum") pattern = metal metal = metal.replace('LBMA/', '') df = df.assign(utc=time.time(), hostname=socket.gethostname(), source='quandl', pattern=pattern, metal=metal, Date='') df['Date'] = df.index #print(df) my_file = self.cwd + '/data/lbma/metals/lbma_' + metal + '/' + metal + '.csv' frames = [] frames.append(df) if os.path.isfile(my_file): df_resident = p.read_csv(my_file, encoding='utf-8') frames.append(df_resident) else: pass df = p.concat(frames) if not df.empty: df = df.drop_duplicates(['pattern', 'Date'], keep='last') df = df.reset_index(drop=True) df.to_csv(my_file, index=False, encoding='utf-8') #need to add this s3 = savetos3.SaveS3(my_file, self.catalog) s3.main() #print(df) pass except LimitExceededError as e: error_symbols.append(metal) #sleep(1) #logging.exception(traceback.format_exc()) # logging.info('------') # logging.error(traceback.format_exc()) # logging.info('------') # logging.exception(traceback.format_exc()) # logging.info('------') pass except Exception: pass
def get_hour_hist(self, symbol, error_symbols): currentts = str(int(time.time())) frames = [] for exchange in self.exchanges: url = "https://min-api.cryptocompare.com/data/histohour" querystring = { "fsym": symbol, "tsym": "USD", "limit": "2000", "aggregate": "3", "e": exchange, "toTs": currentts } headers = { 'cache-control': "no-cache", 'postman-token': "e00df90c-b8b6-cb28-54ff-88c19b883e0a" } try: response = requests.request("GET", url, headers=headers, params=querystring) if response.status_code == 200: data = response.json() if data["Data"] != [] and data["Response"] == "Success": df = p.DataFrame(data["Data"]) df = df.assign(symbol=symbol, utc=time.time(), hostname=socket.gethostname(), exchange=exchange, source='cryptocompare') frames.append(df) else: pass else: pass except requests.exceptions.RequestException as e: logging.info('------') logging.error(traceback.format_exc()) logging.info('------') logging.exception(traceback.format_exc()) logging.info('------') error_symbols.append(symbol) sleep(0.2) pass except OverflowError: print('OverflowError: ' + str(symbol)) logging.info('------') logging.error(traceback.format_exc()) logging.info('------') logging.exception(traceback.format_exc()) logging.info('------') pass except Exception as e: logging.info('------') logging.error(traceback.format_exc()) logging.info('------') logging.exception(traceback.format_exc()) logging.info('------') pass try: if len(frames) > 0: my_file = self.cwd + '/data/hour_data/' + symbol + '_hour.csv' if os.path.isfile(my_file): df_resident = p.read_csv(my_file, encoding='utf-8') frames.append(df_resident) else: pass df = p.concat(frames) if not df.empty: df = df.drop_duplicates(['symbol', 'time', 'exchange'], keep='last') df = df.sort_values('time') df = df.reset_index(drop=True) df.to_csv(my_file, index=False, encoding='utf-8') #need to add this s3 = savetos3.SaveS3(my_file, self.catalog) s3.main() else: pass else: pass except Exception as e: logging.info('------') logging.error(traceback.format_exc()) logging.info('------') logging.exception(traceback.format_exc()) logging.info('------') print(e)
def getstocklist(self): try: frames = [] my_file = self.cwd+'/data/quandlstocks/dim_stocks/'+'WIKI-datasets-codes.csv' if os.path.isfile(my_file): df_resident = p.read_csv(my_file, encoding= 'utf-8') frames.append(df_resident) else: pass url = "https://www.quandl.com/api/v3/databases/WIKI/codes?api_key=kzmH8ENEsNUc5GkS9bum" request = requests.get(url) data = zipfile.ZipFile(BytesIO(request.content)) x = data.namelist() for y in x: #print(y) my_file = data.extract(y,self.cwd+'/data/quandlstocks/dim_stocks/') column_names = ['pattern','description'] df = p.read_csv(my_file, header = None, names = column_names) df = df.reset_index(drop=True) descriptionlist =df["description"].tolist() mitem = [] mlocation = [] for z in descriptionlist: #d = z z = z.replace('- All T', 'All T') x = z.split(" - ") mitem.append(x[0]) if len(x) == 2: mlocation.append(x[1]) else: mlocation.append(None) dl = [] dl.append(['item',mitem]) dl.append(['location',mlocation]) df_desc = p.DataFrame.from_items(dl) df = p.concat([df, df_desc], axis=1, join_axes=[df.index]) df = df.assign (utc = time.time(),hostname = socket.gethostname(),source = 'quandl' ) frames.append(df) df = p.concat(frames) if not df.empty: df = df.drop_duplicates(['pattern','description'], keep='last') df = df.reset_index(drop=True) df.to_csv(my_file, index = False, encoding= 'utf-8') #need to add this s3 = savetos3.SaveS3(my_file,self.catalog) s3.main() x = df.query("(location != location ) or (location == 'All Areas')") self.stocklist =x["pattern"].tolist() return self.stocklist #return stocklist except requests.exceptions.RequestException as e: print (e) except Exception as e: logging.info('------') logging.error(traceback.format_exc()) logging.info('------') logging.exception(traceback.format_exc()) logging.info('------') print(e)
def trading_partners(self, symbol, error_symbols): my_file = self.cwd + '/data/trading_pair/%s_trading_pair.csv' % symbol frames = [] source = "cryptocompare" url = "https://min-api.cryptocompare.com/data/top/pairs" querystring = {"fsym": symbol, "limit": "2000"} headers = { 'cache-control': "no-cache", 'postman-token': "d0123538-5878-5919-128f-7dda59bb21b4" } try: response = requests.request("GET", url, headers=headers, params=querystring) if response.status_code == 200: data = response.json() if data["Data"] != [] and data["Response"] == "Success": df = p.DataFrame(data["Data"]) df = df.assign(symbol=symbol, utc=time.time(), hostname=socket.gethostname(), source=source) frames.append(df) if os.path.isfile(my_file): df_resident = p.read_csv(my_file, encoding='utf-8') frames.append(df_resident) else: pass df = p.concat(frames) #print(frames) if not df.empty: df = df.drop_duplicates([ 'exchange', 'fromSymbol', 'toSymbol', 'volume24h', 'volume24hTo', 'source', 'timestamp_api_call' ], keep='last') #df = df.sort_values('timestamp_api_call') df = df.reset_index(drop=True) df.to_csv(my_file, index=False, encoding='utf-8') #need to add this s3 = savetos3.SaveS3(my_file, self.catalog) s3.main() else: pass else: pass else: pass except requests.exceptions.RequestException as e: logging.info('------') logging.error(traceback.format_exc()) logging.info('------') logging.exception(traceback.format_exc()) logging.info('------') error_symbols.append(symbol) sleep(0.2) pass except OverflowError: logging.info('------') logging.error(traceback.format_exc()) logging.info('------') logging.exception(traceback.format_exc()) logging.info('------') print('OverflowError: ' + str(symbol)) pass except Exception as e: logging.info('------') logging.error(traceback.format_exc()) logging.info('------') logging.exception(traceback.format_exc()) logging.info('------') pass
def miner_data(self): try: data = self.data source = 'cryptocompare' if data["MiningData"]: keys = list(data['MiningData'].keys()) frames = [] #for key in tqdm(keys,desc='miner_data'): for key in keys: #print '----------' #print key sub = data['MiningData'][key] #print '----------' df = p.DataFrame.from_dict(sub, orient='Index', dtype=None) df = p.DataFrame.transpose(df) df = df.assign(utc=time.time(), hostname=socket.gethostname(), source=source, symbol=key) frames.append(df) my_file = self.cwd + '/data/mining_data/miner_data/mining_equipment.csv' #print(my_file) if os.path.isfile(my_file): df_resident = p.read_csv(my_file, encoding='utf-8') frames.append(df_resident) else: pass df = p.concat(frames) df = df.drop_duplicates([ 'Company', 'Cost', 'CurrenciesAvailable', 'HashesPerSecond', 'Name' ], keep='last') df = df.sort_values('CurrenciesAvailable') df = df.reset_index(drop=True) if not df.empty: df.to_csv(my_file, index=False, encoding='utf-8') #need to add this s3 = savetos3.SaveS3(my_file, self.catalog) s3.main() pass #print 'Updated: '+str(my_file) else: print('No data: ') else: print('no miner_data') except Exception as e: logging.info('------') logging.error(traceback.format_exc()) logging.info('------') logging.exception(traceback.format_exc()) logging.info('------') print(e)
def ticker(self): frames = [] url = "https://api.coinmarketcap.com/v1/ticker/" querystring = {"limit": "0"} headers = { 'cache-control': "no-cache", 'postman-token': "a9ed8f5a-9fa5-b77b-6d59-ab97bb499b5e" } try: response = requests.request("GET", url, headers=headers, params=querystring) if response.status_code == 200: data = response.json() df = p.DataFrame(data) df = df.assign(utc=time.time(), hostname=socket.gethostname(), source='coinmarketcap') frames.append(df) my_file = self.cwd + '/data/coinmarketcap/ticker.csv' if os.path.isfile(my_file): df_resident = p.read_csv(my_file, encoding='utf-8') frames.append(df_resident) else: pass df = p.concat(frames) if not df.empty: df = df.drop_duplicates(['symbol', 'last_updated'], keep='last') df = df.sort_values('symbol') df = df.reset_index(drop=True) df.to_csv(my_file, index=False, encoding='utf-8') #need to add this s3 = savetos3.SaveS3(my_file, self.catalog) s3.main() else: pass else: pass except requests.exceptions.RequestException as e: sleep(0.2) logging.info('------') logging.error(traceback.format_exc()) logging.info('------') logging.exception(traceback.format_exc()) logging.info('------') pass except OverflowError: print('OverflowError: ') logging.info('------') logging.error(traceback.format_exc()) logging.info('------') logging.exception(traceback.format_exc()) logging.info('------') pass except Exception as e: logging.info('------') logging.error(traceback.format_exc()) logging.info('------') logging.exception(traceback.format_exc()) logging.info('------') pass
def get_rate(self, symbol, error_symbols): sleep(.5) frames = [] local = [] for x in self.top_currencies: local.append(x) for to_currency in local: #print(local) sleep(2) url = "https://www.alphavantage.co/query" querystring = { "function": "CURRENCY_EXCHANGE_RATE", "from_currency": symbol, "to_currency": to_currency, "apikey": "6258AGUENRIIG1MH" } #6258AGUENRIIG1MH #DEMO headers = { 'cache-control': "no-cache", 'postman-token': "4e1fbe35-c6a5-ecae-4766-b31f6e63f985" } try: response = requests.request("GET", url, headers=headers, params=querystring) #print(response.status_code) if response.status_code == 200: data = response.json() keys = list(data.keys()) # print(list(data.keys())) # print(data) if 'Information' not in keys and len( keys) > 0 and 'Error Message' not in keys: if data["Realtime Currency Exchange Rate"] != []: df = p.DataFrame( data["Realtime Currency Exchange Rate"], index=[0]) df = df.assign(symbol=symbol, utc=time.time(), hostname=socket.gethostname(), source='alphavantage') frames.append(df) #print(symbol+' '+str(len(frames))+'/'+str(len(local))) else: pass else: local.append(to_currency) #error_symbols.append(symbol) sleep(2) pass else: pass except requests.exceptions.RequestException as e: error_symbols.append(symbol) #sleep(0.2) logging.info('------') logging.error(traceback.format_exc()) logging.info('------') logging.exception(traceback.format_exc()) logging.info('------') pass except OverflowError: print('OverflowError: ' + str(symbol)) logging.info('------') logging.error(traceback.format_exc()) logging.info('------') logging.exception(traceback.format_exc()) logging.info('------') pass except Exception as e: logging.info('------') logging.error(traceback.format_exc()) logging.info('------') logging.exception(traceback.format_exc()) logging.info('------') try: if len(frames) > 0: my_file = self.cwd + '/data/currency_exchange_rates/' + symbol + '_curexrate.csv' if os.path.isfile(my_file): df_resident = p.read_csv(my_file, encoding='utf-8') frames.append(df_resident) else: pass df = p.concat(frames) if not df.empty: df = df.drop_duplicates([ '1. From_Currency Code', '2. From_Currency Name', '6. Last Refreshed' ], keep='last') df = df.sort_values('6. Last Refreshed') df = df.reset_index(drop=True) df.to_csv(my_file, index=False, encoding='utf-8') #need to add this s3 = savetos3.SaveS3(my_file, self.catalog) s3.main() else: pass else: pass except Exception as e: logging.info('------') logging.error(traceback.format_exc()) logging.info('------') logging.exception(traceback.format_exc()) logging.info('------') print(e)
def main(self): """ :return: """ frames = [] error_symbols = [] print('BEGIN: GetDtlPrice.main') try: gdl = GetDtlPrice(self.symbol_list, self.exchanges, self.chunksize, self.cwd, self.catalog) xsymbols = [ self.symbol_list[x:x + self.chunksize] for x in range(0, len(self.symbol_list), self.chunksize) ] for symbol_list in tqdm(xsymbols, desc='get_price_details_for_symbols'): threads = [ threading.Thread(target=gdl.get_price_details_for_symbols, args=( symbol, frames, error_symbols, )) for symbol in symbol_list ] for thread in threads: thread.start() ##for thread in tqdm(threads,desc='Closed Threads'): for thread in threads: thread.join() if len(error_symbols) > 0: xsymbols.append(error_symbols) #print 'appending: errors: '+ str(error_symbols) error_symbols = [] else: pass my_file = self.cwd + '/data/pricedetails/price.csv' if os.path.isfile(my_file): df_resident = p.read_csv(my_file, encoding='utf-8') frames.append(df_resident) else: pass if len(frames) > 0: df = p.concat(frames) if not df.empty: df = df.drop_duplicates( ['FROMSYMBOL', 'LASTUPDATE', 'LASTMARKET', 'MARKET'], keep='last') df = df.sort_values('LASTUPDATE') df = df.reset_index(drop=True) df.to_csv(my_file, index=False, encoding='utf-8') #need to add this s3 = savetos3.SaveS3(my_file, self.catalog) s3.main() else: pass else: pass print('DONE') except Exception as e: print(e) logging.info('------') logging.error(traceback.format_exc()) logging.info('------') logging.exception(traceback.format_exc()) logging.info('------') print('Error: GetDtlPrice.main')