def test_trade_history_huobi_methods(): load_keys(API_KEY_PATH) key = get_key_by_exchange(EXCHANGE.HUOBI) time_end = get_now_seconds_utc() time_start = 0 # time_end - POLL_TIMEOUT pair_name = get_currency_pair_to_huobi(CURRENCY_PAIR.BTC_TO_LSK) huobi_orders_by_pair = get_order_history_huobi(key, pair_name, time_start, time_end) for pair_id in huobi_orders_by_pair: pair_name = get_currency_pair_to_huobi(pair_id) print "PAIR NAME: ", pair_name for b in huobi_orders_by_pair[pair_id]: print b res, order_history = get_order_history_huobi(key, pair_name, time_start, time_end) if len(order_history) > 0: for b in order_history: print b pg_conn = init_pg_connection(_db_host=DB_HOST, _db_port=DB_PORT, _db_name=DB_NAME) load_recent_huobi_trades_to_db(pg_conn, time_start, time_end, unique_only=True)
def load_trades_from_csv_to_db(): file_name = "all_orders.csv" start_time = -1 end_time = -2 pg_conn = init_pg_connection(_db_host=DB_HOST, _db_port=DB_PORT, _db_name=DB_NAME) bittrex_order_by_pair = defaultdict(list) with open(file_name, 'r') as f: reader = csv.reader(f) for row in reader: new_trade = Trade.from_bittrex_scv(row) if start_time <= new_trade.create_time <= end_time: bittrex_order_by_pair[new_trade.pair_id].append(new_trade) unique_only = True for pair_id in bittrex_order_by_pair: headline = "Loading bittrex trades - {p}".format( p=get_currency_pair_to_bittrex(pair_id)) wrap_with_progress_bar(headline, bittrex_order_by_pair[pair_id], save_to_pg_adapter, pg_conn, unique_only, is_trade_present_in_trade_history, init_arbitrage_id=-20, table_name="arbitrage_trades")
def update_arbitrage_id(host=DEFAULT_REDIS_HOST): r = _redis.StrictRedis(host=host, port=6379, db=0) pg_conn = init_pg_connection(_db_host="192.168.1.106", _db_port=5432, _db_name="crypto") next_arbitrage_id = get_arbitrage_id(pg_conn) r.set('arbitrage_id', str(next_arbitrage_id))
def process_args(args): settings = CommonSettings.from_cfg(args.cfg) pg_conn = init_pg_connection(_db_host=settings.db_host, _db_port=settings.db_port, _db_name=settings.db_name) set_log_folder(settings.log_folder) set_logging_level(settings.logging_level_id) return pg_conn, settings
def test_insert_order(): from enums.exchange import EXCHANGE from enums.deal_type import DEAL_TYPE from enums.currency_pair import CURRENCY_PAIR wtf = Trade(DEAL_TYPE.SELL, EXCHANGE.POLONIEX, CURRENCY_PAIR.BTC_TO_ARDR, 0.00001, 10.4, 1516039961, 1516039961) pg_conn = init_pg_connection(_db_host=DB_HOST, _db_port=DB_PORT, _db_name=DB_NAME) save_order_into_pg(wtf, pg_conn)
def test_trade_present(): pg_conn = init_pg_connection(_db_host=DB_HOST, _db_port=DB_PORT) # 6479142 ts = 1516142509 trade = Trade(DEAL_TYPE.BUY, EXCHANGE.BINANCE, CURRENCY_PAIR.BTC_TO_STRAT, price=0.001184, volume=2.08, order_book_time=ts, create_time=ts, execute_time=ts, order_id='whatever') res = is_trade_present_in_trade_history(pg_conn, trade, table_name="tmp_history_trades") print res
def test_order_presence(): pg_conn = init_pg_connection(_db_host=DB_HOST, _db_port=DB_PORT) # 6479142 ts = get_now_seconds_utc() some_trade = Trade(DEAL_TYPE.BUY, EXCHANGE.BINANCE, CURRENCY_PAIR.BTC_TO_STRAT, price=0.001184, volume=2.08, order_book_time=ts, create_time=ts, execute_time=ts, order_id='whatever') res = is_order_present_in_order_history(pg_conn, some_trade, table_name="tmp_binance_orders") print res
def run_analysis_over_db(deal_threshold, analysis_method): print "<<< WARNING >>> Non UPDATED FOR AGES." # FIXME NOTE: accumulate profit pg_conn = init_pg_connection() time_entries = get_time_entries(pg_conn) time_entries_num = len(time_entries) print "Order_book num: ", time_entries_num cnt = 0 MAX_ORDER_BOOK_COUNT = 10000 current_balance = custom_balance_init(time_entries[0]) deal_cap = common_cap_init() for exchange_id in current_balance.balance_per_exchange: print current_balance.balance_per_exchange[exchange_id] for every_time_entry in time_entries: order_book_grouped_by_time = get_order_book_by_time(pg_conn, every_time_entry) for order_book in order_book_grouped_by_time: analysis_method (order_book, deal_threshold, current_balance, log_to_file, deal_cap) cnt += 1 msg = "Processed order_book #{cnt} out of {total} time entries\n current_balance={balance}".format( cnt=cnt, total=time_entries_num, balance=str(current_balance)) print msg log_to_file(msg, "history_trades.txt") if cnt == MAX_ORDER_BOOK_COUNT: raise print "At the end of processing we have following balance:" print "NOTE: supposedly all buy and sell requests were fulfilled" for exchange_id in current_balance.balance_per_exchange: print current_balance.balance_per_exchange[exchange_id]
should_fetch_history_to_db = config.getboolean( "profit_report", "fetch_history_from_exchanges") fetch_from_start = config.getboolean("profit_report", "fetch_from_start") start_time = parse_time(config.get("profit_report", "start_time"), '%Y-%m-%d %H:%M:%S') end_time = parse_time(config.get("profit_report", "end_time"), '%Y-%m-%d %H:%M:%S') if start_time == end_time or end_time <= start_time: print "Wrong time interval provided! {ts0} - {ts1}".format( ts0=start_time, ts1=end_time) assert False pg_conn = init_pg_connection(_db_host=db_host, _db_port=db_port, _db_name=db_name) key_path = config.get("keys", "path_to_api_keys") log_folder = config.get("logging", "logs_folder") load_keys(key_path) set_log_folder(log_folder) if should_fetch_history_to_db: fetch_trades_history_to_db(pg_conn, start_time, end_time, fetch_from_start) orders, history_trades = prepare_data(pg_conn, start_time, end_time) missing_orders, failed_orders, orders_with_trades = group_trades_by_orders( orders, history_trades)