def bian_data(self, symbol, date_type): headers = { "Content-type": "application/x-www-form-urlencoded", 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.71 Safari/537.36' } request_url = 'https://api.binance.com/api/v1/klines?symbol=' + symbol + '&interval=' + date_type res_json = common_fun.get_url_json(request_url, headers) file_name = 'bian_' + symbol + '.csv' write_data = [] for data in res_json: line = [] recordDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(data[0] / 1000)) line.append(recordDate) line.append(data[1]) line.append(data[2]) line.append(data[3]) line.append(data[4]) line.append(data[5]) write_data.append(line) self.write_csv_file(file_name, write_data, self.head_line) return
def foreign_data_to_sql(self): db = Mysqldb(config.MySqlHost, config.MySqlUser, config.MySqlPasswd, config.MySqlDb, config.MySqlPort) county_list = ['united-states', 'japan', 'south-korea', 'singapore'] with open('foreign_loc_index_shougong.csv', 'r', encoding='utf8') as csvfile: reader = csv.reader(csvfile) for line in reader: if line[0] == '城市名': continue if line[3] == '' or line[4] == '': continue else: url = 'http://api.map.baidu.com/geocoder/v2/?location=' + str( line[3]) + ',' + str( line[4] ) + '&output=json&pois=0&ak=lS5SlxcGqXfkuj3pcwRGBv90' res_data = common_fun.get_url_json(url) city_code = res_data['result']['cityCode'] country = '' for county_str in county_list: if county_str in line[2]: country = county_str insert_str = "INSERT INTO city_foreign (country, city, latitude, longitude, altitude, is_use, city_code, claim_temperature)" insert_str += "VALUES ('" + country + "', '" + line[ 0] + "'," + str(line[3]) + "," + str( line[4]) + ", 0, 0," + str(city_code) + "," + str( line[1]) + ")" db.insert(insert_str)
def huobi_data(self, symbol, date_type): headers = { "Content-type": "application/x-www-form-urlencoded", 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.71 Safari/537.36' } request_url = 'https://api.huobipro.com/market/history/kline?period=' + date_type + '&size=200&symbol=' + symbol res_json = common_fun.get_url_json(request_url, headers) file_name = 'huobi_' + symbol + '.csv' write_data = [] for data in res_json['data']: line = [] recordDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(data['id'])) line.append(recordDate) line.append(data['open']) line.append(data['high']) line.append(data['low']) line.append(data['close']) line.append(data['amount']) write_data.append(line) self.write_csv_file(file_name, write_data, self.head_line) return
def city_id_loc(self): city_index = {} with open('city_index.csv', 'r', encoding='utf8') as csvfile: reader = csv.reader(csvfile) for line in reader: if line[0] == 'city_name': continue if line[0] not in city_index.keys(): city_index[line[0]] = int(line[1]) db = Mysqldb(config.MySqlHost, config.MySqlUser, config.MySqlPasswd, config.MySqlDb, config.MySqlPort) with open('cityidloc.csv', 'r', encoding='utf8') as csvfile: reader = csv.reader(csvfile) for line in reader: if line[1] == line[2]: if city_index.get(line[1], '') != '': url = 'http://api.map.baidu.com/geocoder/v2/?location=' + str( line[4] ) + ',' + str( line[5] ) + '&output=json&pois=0&ak=lS5SlxcGqXfkuj3pcwRGBv90' res_data = common_fun.get_url_json(url) city_code = res_data['result']['cityCode'] insert_str = "INSERT INTO city_location (country, city, latitude, longitude, altitude, is_use, city_code, claim_temperature)" insert_str += "VALUES ('China', '" + line[ 1] + "'," + str(line[4]) + "," + str( line[5]) + ", 0, 0," + str( city_code) + "," + str( city_index[line[1]]) + ")" #write_data.append([line[1], line[4], line[5], city_index[line[1]]]) db.insert(insert_str)
def tickers(self): request_url = self.base_url + '/api/v1/ticker/24hr' res_json = common_fun.get_url_json(request_url, self.headers) insert_list = [] data_time = int(time.time()) for currencys in res_json: currency_pair = '' for quot in self.quot_asset: if currencys['symbol'].endswith(quot): currency_pair = currencys['symbol'].replace(quot, '_' + quot) high = currencys['highPrice'] low = currencys['lowPrice'] last = currencys['lastPrice'] sell = currencys['askPrice'] buy = currencys['bidPrice'] vol = currencys['volume'] insert_str = "INSERT INTO bian_tickers (date_time, currency_pair, high, low, last, sell, buy, vol)" insert_str += "VALUES (" + str(data_time) + ",'" + currency_pair + "'," + str(high) + "," + str( low) + "," + str(last) + "," + str(sell) + "," + str(buy) + "," + str(vol) + ");" insert_list.append(insert_str) try: self.db.execute_list(insert_list) except: print(insert_str) print('insert_list tickers err data_time = ', data_time)
def kline_data(self, symbol, date_type): tx_name = symbol['base-currency'] + '_' + symbol['quote-currency'] request_url = self.base_url + '/market/history/kline?period=' + date_type + '&size=200&symbol=' + symbol[ 'base-currency'] + symbol['quote-currency'] print(request_url) res_json = common_fun.get_url_json(request_url) res_json['tx_name'] = tx_name
def symbol_data(self): request_url = self.base_url + '/v1/common/symbols' res_json = common_fun.get_url_json(request_url) for symbol in res_json['data']: one_symbol = {} one_symbol['base-currency'] = symbol['base-currency'] one_symbol['quote-currency'] = symbol['quote-currency'] self.symbols.append(one_symbol)
def tickers(self): request_url = self.base_url + 'tickers.do' res_json = common_fun.get_url_json(request_url, self.headers) insert_list = [] data_time = res_json['date'] for ticker in res_json['tickers']: insert_str = "INSERT INTO okex_tickers (date_time, currency_pair, high, low, last, sell, buy, vol)" insert_str += "VALUES (" + str(data_time) + ",'" + str(ticker['symbol']) + "'," + str( ticker['high']) + "," + str(ticker['low']) + "," + str(ticker['last']) + "," + str( ticker['sell']) + "," + str(ticker['buy']) + "," + str(ticker['vol']) + ");" insert_list.append(insert_str) try: self.db.execute_list(insert_list) except: print(insert_str) print('insert_list tickers err data_time = ', data_time)
def ticker(self, symbol): tx_name = symbol['base-currency'] + '_' + symbol['quote-currency'] request_url = self.base_url + '/market/detail/merged?symbol=' + symbol[ 'base-currency'] + symbol['quote-currency'] res_json = common_fun.get_url_json(request_url) ticker = res_json['tick'] insert_str = "INSERT INTO huobi_tickers_copy (date_time, currency_pair, high, low, last, sell, buy, vol)" insert_str += "VALUES (" + str(res_json['ts'] / 1000) + ",'" + str( tx_name) + "'," + str(ticker['high']) + "," + str( ticker['low']) + "," + '-1' + "," + str( ticker['ask'][0]) + "," + str( ticker['bid'][0]) + "," + str(ticker['amount']) + ");" try: self.db.insert(insert_str) except: print(insert_str) print('insert_list tickers err tx_name = ', tx_name)
def zb_Kline_data(self, symbol, date_type): request_url = 'http://api.zb.com/data/v1/kline?market=' + symbol + '&type=' + date_type res_json = common_fun.get_url_json(request_url) file_name = 'zb_' + symbol + '.csv' write_data = [] for data in res_json['data']: line = [] recordDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(data[0] / 1000)) line.append(recordDate) line.append(data[1]) line.append(data[2]) line.append(data[3]) line.append(data[4]) line.append(data[5]) write_data.append(line) self.write_csv_file(file_name, write_data, self.head_line) return
def okex_data(self, symbol, date_type): request_url = 'https://www.okex.com/api/v1/kline.do?symbol=' + symbol + '&type=' + date_type res_json = common_fun.get_url_json(request_url) file_name = 'okex_' + symbol + '_' + date_type + '.csv' write_data = [] for data in res_json: line = [] #recordDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(data[0]/1000)) line.append(data[0]) line.append(data[1]) line.append(data[2]) line.append(data[3]) line.append(data[4]) line.append(data[5]) write_data.append(line) self.write_csv_file(file_name, write_data, self.head_line) return
def kraken_data(self, symbol, date_type): request_url = 'https://api.kraken.com/0/public/OHLC?pair=' + symbol + '&interval=' + date_type res_json = common_fun.get_url_json(request_url) file_name = 'kraken_' + symbol + '.csv' write_data = [] for data in res_json['result'][symbol]: line = [] recordDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(data[0])) line.append(recordDate) line.append(data[1]) line.append(data[2]) line.append(data[3]) line.append(data[4]) line.append(data[6]) write_data.append(line) self.write_csv_file(file_name, write_data, self.head_line) return
def getmarketsummaries(self): request_url = self.base_url + 'getmarketsummaries' res_json = common_fun.get_url_json(request_url) res_json['updata_time'] = int(time.time()) insert_list = [] data_time = int(time.time()) for ticker in res_json['result']: market_name = ticker['MarketName'].split('-') token_name = market_name[1] + '_' + market_name[0] insert_str = "INSERT INTO bittrex_tickers (date_time, currency_pair, high, low, last, sell, buy, vol)" insert_str += "VALUES (" + str( data_time) + ",'" + token_name + "'," + str( ticker['High']) + "," + str(ticker['Low']) + "," + str( ticker['Last']) + "," + str(ticker['Ask']) + "," + str( ticker['Bid']) + "," + str(ticker['Volume']) + ");" insert_list.append(insert_str) try: self.db.execute_list(insert_list) except: print(insert_str) print('insert_list tickers err data_time = ', data_time)
import okex_web_api import sys import os path = os.getcwd() sys.path.append(path) import common_fun base_url = 'https://www.okex.com/api/v1/' headers = { "Content-type": "application/x-www-form-urlencoded", } request_url = base_url + 'tickers.do' res_json = common_fun.get_url_json(request_url, headers) for ticker in res_json['tickers']: if ticker['symbol'] not in okex_web_api.symbols: okex_web_api.symbols.append(ticker['symbol']) okex_web_api.current_ticker[ticker['symbol']] = '' web_client = okex_web_api.WebClient() web_client.make_events('_ticker') web_client.run()