Ejemplo n.º 1
0
def parse_order_id_binance(json_document):
    """
    {u'orderId': 6599290,
    u'clientOrderId': u'oGDxv6VeLXRdvUA8PiK8KR',
    u'origQty': u'27.79000000',
    u'symbol': u'OMGBTC',
    u'side': u'SELL',
    u'timeInForce': u'GTC',
    u'status': u'FILLED',
    u'transactTime': 1514223327566,
    u'type': u'LIMIT',
    u'price': u'0.00111100',
    u'executedQty': u'27.79000000'}
    """

    if is_error(json_document):

        msg = "parse_order_id_binance - error response - {er}".format(
            er=json_document)
        log_to_file(msg, ERROR_LOG_FILE_NAME)

        return None

    if "orderId" in json_document:
        return json_document["orderId"]

    return None
Ejemplo n.º 2
0
def compare_price(tickers, threshold, predicate):
    """
    High level function that perform tickers analysis

    :param tickers: dict of dict where data are structured by exchange_id -> pair_id
    :param threshold: percentage, 0-100.0, float to trigger event
    :return: array of triplets pair_id, exchange_1.lowest_price, exchange_2.highest_bid
    """
    res = []

    sorted_tickers = get_matches(tickers, "pair_id")

    for pair_id in CURRENCY_PAIR.values():
        if pair_id in sorted_tickers:
            tickers_to_check = sorted_tickers[pair_id]

            if len(tickers_to_check) < 2:
                for b in tickers_to_check:
                    log_to_file("Ticker: not found ticker from other markets: " + str(b),
                                "ticker.log")
            else:
                current_result = check_all_combinations_list(tickers_to_check, threshold, predicate)
                if current_result:
                    res += current_result

    return res
Ejemplo n.º 3
0
def cancel_order_binance(key, pair_name, order_id):

    body = {
        "recvWindow": 5000,
        "timestamp": get_now_seconds_utc_ms(),
        "symbol": pair_name,
        "orderId": order_id
    }

    post_details = generate_post_request(BINANCE_CANCEL_ORDER, body, key)

    if get_logging_level() >= LOG_ALL_MARKET_RELATED_CRAP:
        msg = "cancel_order_binance: url - {url} headers - {headers} body - {body}".format(
            url=post_details.final_url,
            headers=post_details.headers,
            body=post_details.body)
        print_to_console(msg, LOG_ALL_MARKET_RELATED_CRAP)
        log_to_file(msg, "market_utils.log")

    err_msg = "cancel binance order with id {id}".format(id=order_id)

    res = send_delete_request_with_header(post_details, err_msg, max_tries=3)

    if get_logging_level() >= LOG_ALL_MARKET_RELATED_CRAP:
        print_to_console(res, LOG_ALL_MARKET_RELATED_CRAP)
        log_to_file(res, "market_utils.log")

    return res
Ejemplo n.º 4
0
def add_sell_order_binance(key, pair_name, price, amount):

    post_details = add_sell_order_binance_url(key, pair_name, price, amount)

    err_msg = "add_sell_order binance called for {pair} for amount = {amount} " \
              "with price {price}".format(pair=pair_name, amount=amount, price=price)

    # NOTE: Yeah, body must be empty!
    res = send_post_request_with_header(post_details,
                                        err_msg,
                                        max_tries=BINANCE_NUM_OF_DEAL_RETRY,
                                        timeout=BINANCE_DEAL_TIMEOUT)
    """
    {
        "orderId": 1373492, 
        "clientOrderId": "e04JGgCpafdrR6O1lOLwgD",
        "origQty": "1.00000000",
        "symbol": "RDNBTC",
        "side": "SELL",
        "timeInForce": "GTC",
        "status": "NEW",
        "transactTime": 1512581721384,
        "type": "LIMIT",
        "price": "1.00022220",
        "executedQty": "0.00000000"
    }
    """

    if get_logging_level() >= LOG_ALL_MARKET_RELATED_CRAP:
        print_to_console(res, LOG_ALL_MARKET_RELATED_CRAP)
        log_to_file(res, "market_utils.log")

    return res
Ejemplo n.º 5
0
def find_corresponding_trades(deal_from_bot, trade_history):
    res = []
    tot_volume = 0.0
    if deal_from_bot.exchange_id in [EXCHANGE.BITTREX, EXCHANGE.POLONIEX]:
        if deal_from_bot.pair_id in trade_history:
            res = [
                x for x in trade_history[deal_from_bot.pair_id]
                if x.order_id == deal_from_bot.order_id
            ]
        else:
            log_to_file(
                "NOT FOUND deal in history for {a_id}".format(
                    a_id=deal_from_bot), "what_we_have_at_the_end.log")
    elif deal_from_bot.exchange_id == EXCHANGE.BINANCE:
        if deal_from_bot.pair_id in trade_history:
            for trade in trade_history[deal_from_bot.pair_id]:
                if trade.trade_type == deal_from_bot.trade_type and 0 < deal_from_bot.execute_time - trade.execute_time < 2 \
                        and deal_from_bot.volume >= tot_volume:
                    tot_volume += trade.volume
                    res.append(trade)

        if not res:
            log_to_file(
                "NOT FOUND deal in history for {a_id}".format(
                    a_id=deal_from_bot), "what_we_have_at_the_end.log")
    else:
        assert False

    return res
Ejemplo n.º 6
0
def log_responce_cant_be_parsed(work_unit, file_name=None):

    json_responce = ""
    try:
        json_responce = work_unit.future_value_json
    except:
        pass

    responce_code = ""
    try:
        responce_code = work_unit.future_status_code
    except:
        pass

    msg = """   ERROR
    For url {url} Response {resp} can't be parsed.
    HTTP Responce code, if any: {hc}
    JSON Data, if any: {js} 
    """.format(url=work_unit.url, resp=work_unit.future_value, hc=responce_code, js=json_responce)
    log_to_file(msg, ERROR_LOG_FILE_NAME)

    if file_name is not None:
        log_to_file(msg, file_name)

    return msg
Ejemplo n.º 7
0
def cancel_order_huobi(key, order_id):
    HUOBI_CANCEL_PATH = HUOBI_CANCEL_ORDER + str(order_id) + "/submitcancel"
    final_url = HUOBI_API_URL + HUOBI_CANCEL_PATH + "?"

    body = init_body(key)

    message = _urlencode(body).encode('utf8')

    msg = "POST\n{base_url}\n{path}\n{msg1}".format(base_url=HUOBI_API_ONLY,
                                                    path=HUOBI_CANCEL_PATH,
                                                    msg1=message)

    signature = sign_string_256_base64(key.secret, msg)

    body.append(("Signature", signature))

    final_url += _urlencode(body).encode('utf8')

    body = {}

    post_details = PostRequestDetails(final_url, HUOBI_POST_HEADERS, body)

    if get_logging_level() >= LOG_ALL_MARKET_RELATED_CRAP:
        msg = "cancel_order_huobi: url - {url} headers - {headers} body - {body}".format(
            url=final_url, headers=HUOBI_POST_HEADERS, body=body)
        print_to_console(msg, LOG_ALL_MARKET_RELATED_CRAP)
        log_to_file(msg, "market_utils.log")

    err_msg = "cancel huobi order with id {id}".format(id=order_id)

    return send_post_request_with_logging(post_details, err_msg)
Ejemplo n.º 8
0
def get_order_history_binance(key,
                              pair_name,
                              limit=BINANCE_ORDER_HISTORY_LIMIT,
                              last_order_id=None):

    post_details = get_order_history_binance_post_details(
        key, pair_name, limit, last_order_id)

    err_msg = "get_all_orders_binance for {pair_name}".format(
        pair_name=pair_name)

    status_code, json_response = send_get_request_with_header(
        post_details.final_url,
        post_details.headers,
        err_msg,
        timeout=BINANCE_DEAL_TIMEOUT)

    if get_logging_level() >= LOG_ALL_DEBUG:
        msg = "get_order_history_binance: {sc} {resp}".format(
            sc=status_code, resp=json_response)
        print_to_console(msg, LOG_ALL_DEBUG)
        log_to_file(msg, DEBUG_LOG_FILE_NAME)

    historical_orders = []
    if status_code == STATUS.SUCCESS:
        msg = "{fn} - error response - {er}".format(
            fn=get_order_history_binance.func_name, er=json_response)
        status_code, historical_orders = get_orders_binance_result_processor(
            json_response, pair_name, msg)

    return status_code, historical_orders
Ejemplo n.º 9
0
    def on_receive(self, **kwargs):
        """
            heart beat and other stuff
        :param kwargs:
        :return:
        """

        if 'R' in kwargs and type(kwargs['R']) is not bool:
            msg = process_message(kwargs['R'])

            log_to_file(msg, "bittrex.log")

            if msg is not None:

                self.order_book_is_received = True
                self.initial_order_book = parse_socket_order_book_bittrex(
                    msg, self.pair_id)

        else:
            try:
                msg = process_message(str(kwargs))
            except:
                msg = kwargs

            log_to_file(msg, "bittrex.log")
Ejemplo n.º 10
0
def update_order_details(pg_conn, order):

    """
            if order.pair_id == every_order.pair_id and \
                        order.deal_type == every_order.deal_type and \
                        abs(order.price - every_order.price) < FLOAT_POINT_PRECISION and \
                        order.create_time >= every_order.create_time and \
                        abs(order.create_time - every_order.create_time) < 15:
            # FIXME
            order.order_id = every_order.order_id
            order.create_time = every_order.create_time


    :param pg_conn:
    :param order:
    :return:
    """

    select_query = """update arbitrage_orders set order_id = '{order_id}' where exchange_id = {e_id} and pair_id = {p_id} and
    trade_type = {d_type} and create_time = {c_time}
    """.format(order_id=order.order_id, e_id=order.exchange_id, p_id=order.pair_id, d_type=order.trade_type,
               c_time=order.create_time)

    cursor = pg_conn.cursor

    cursor.execute(select_query)

    if 0 == cursor.rowcount:
        msg = "ZERO number of row affected! For order = {o}".format(o=order)
        log_to_file(msg, FAILED_ORDER_PROCESSING_FILE_NAME)
Ejemplo n.º 11
0
    def request_order_book(self):
        try:
            with Session() as session:
                connection = Connection(self.url, session)
                self.hub = connection.register_hub(self.hub_name)

                connection.received += self.on_receive

                connection.start()

                while self.order_book_is_received is not True:
                    self.hub.server.invoke(
                        BittrexParameters.QUERY_EXCHANGE_STATE, self.pair_name)
                    connection.wait(
                        5
                    )  # otherwise it shoot thousands of query and we will be banned :(

                connection.close()

                msg = "Got orderbook for Bittrex!"
                log_to_file(msg, SOCKET_ERRORS_LOG_FILE_NAME)
                print(msg)

                return STATUS.SUCCESS

        except Exception as e:
            # log_error_on_receive_from_socket("Bittrex", e)
            msg = "Error during order book retrieval for Bittrex {}".format(
                str(e))
            log_to_file(msg, SOCKET_ERRORS_LOG_FILE_NAME)
            print(msg)

        return STATUS.FAILURE
Ejemplo n.º 12
0
def add_buy_order_binance(key, pair_name, price, amount):

    post_details = add_buy_order_binance_url(key, pair_name, price, amount)

    err_msg = "add_buy_order_binance  called for {pair} for amount = {amount} with price {price}".format(
        pair=pair_name, amount=amount, price=price)

    res = send_post_request_with_header(post_details,
                                        err_msg,
                                        max_tries=BINANCE_NUM_OF_DEAL_RETRY,
                                        timeout=BINANCE_DEAL_TIMEOUT)
    """
    {
        "orderId": 1373289, 
        "clientOrderId": "Is7wGaKBtLBK7JjDkNAJwn",
        "origQty": "10.00000000",
        "symbol": "RDNBTC",
        "side": "BUY",
        "timeInForce": "GTC",
        "status": "NEW",
        "transactTime": 1512581468544,
        "type": "LIMIT",
        "price": "0.00022220",
        "executedQty": "0.00000000"
    }
    """

    if get_logging_level() >= LOG_ALL_MARKET_RELATED_CRAP:
        print_to_console(res, LOG_ALL_MARKET_RELATED_CRAP)
        log_to_file(res, "market_utils.log")

    return res
Ejemplo n.º 13
0
def log_dont_supported_currency(cfg, exchange_id, pair_id):
    msg = "Not supported currency {idx}-{name} for {exch}".format(
        idx=cfg.pair_id,
        name=pair_id,
        exch=get_exchange_name_by_id(exchange_id))
    print_to_console(msg, LOG_ALL_ERRORS)
    log_to_file(msg, cfg.log_file_name)
Ejemplo n.º 14
0
def send_delete_request_with_header(post_details, error_msg, max_tries):
    res = STATUS.FAILURE, None

    try_number = 0
    while try_number < max_tries:
        try_number += 1
        try:
            response = requests.delete(post_details.final_url,
                                       data=post_details.body,
                                       headers=post_details.headers,
                                       timeout=HTTP_TIMEOUT_SECONDS)
            json_response = response.json()

            if get_logging_level() >= LOG_ALL_DEBUG:
                msg = "send_delete_request_with_header: RESULT: {res} for url={url}".format(
                    res=json_response, url=post_details.final_url)
                log_to_file(msg, DEBUG_LOG_FILE_NAME)

            status = STATUS.SUCCESS if HTTP_SUCCESS == response.status_code else STATUS.FAILURE

            return status, json_response

        except Exception, e:
            log_error_request_failed("send_delete_request_with_header",
                                     post_details.final_url, error_msg, e)
Ejemplo n.º 15
0
def log_dont_have_open_orders(cfg):
    msg = "process_expired_deals - list of open orders from both exchanges is empty, " \
          "REMOVING all watched deals - consider them closed!"
    print_to_console(msg, LOG_ALL_ERRORS)
    log_to_file(msg, cfg.log_file_name)

    log_to_file(msg, EXPIRED_ORDER_PROCESSING_FILE_NAME)
Ejemplo n.º 16
0
def forward_new_messages(args):
    settings = CommonSettings.from_cfg(args.cfg)

    set_log_folder(settings.log_folder)
    set_logging_level(settings.logging_level_id)
    msg_queue = get_message_queue(host=settings.cache_host,
                                  port=settings.cache_port)

    do_we_have_data = False

    while True:
        for topic_id in QUEUE_TOPICS:
            msg = msg_queue.get_message_nowait(topic_id)
            if msg is not None:
                do_we_have_data = True
                notification_id = get_notification_id_by_topic_name(topic_id)
                err_code = send_single_message(msg, notification_id)
                if err_code == STATUS.FAILURE:
                    err_msg = """telegram_notifier can't send message to telegram. Message will be re-processed on next iteration.
                        {msg}""".format(msg=msg)
                    log_to_file(err_msg, "telegram_notifier.log")
                    print_to_console(err_msg, LOG_ALL_ERRORS)
                    msg_queue.add_message_to_start(topic_id, msg)
                    sleep_for(1)

        #
        #   NOTE: it still can lead to throttling by telegram
        #

        if not do_we_have_data:
            sleep_for(1)

        do_we_have_data = False
Ejemplo n.º 17
0
def get_ohlc_binance_result_processor(json_response, currency, date_start,
                                      date_end):
    """
    [
        1499040000000,      // Open time
        "0.01634790",       // Open
        "0.80000000",       // High
        "0.01575800",       // Low
        "0.01577100",       // Close
        "148976.11427815",  // Volume
        1499644799999,      // Close time
        "2434.19055334",    // Quote asset volume
        308,                // Number of trades
        "1756.87402397",    // Taker buy base asset volume
        "28.46694368",      // Taker buy quote asset volume
        "17928899.62484339" // Can be ignored
    ]
    """
    result_set = []

    if is_error(json_response):
        msg = "get_ohlc_binance_result_processor - error response - {er}".format(
            er=json_response)
        log_to_file(msg, ERROR_LOG_FILE_NAME)

        return result_set

    for record in json_response:
        result_set.append(Candle.from_binance(record, currency))

    return result_set
Ejemplo n.º 18
0
def compute_loss(trades_to_order_by_pair):

    orders_by_arbitrage_id = defaultdict(list)
    # 1 stage group by arbitrage id
    for order, trades in trades_to_order_by_pair:
        orders_by_arbitrage_id[order.arbitrage_id].append((order, trades))

    orders_by_pair = defaultdict(list)

    cnt = 0
    for arbitrage_id in orders_by_arbitrage_id:
        if len(orders_by_arbitrage_id[arbitrage_id]) != 1:
            continue
        order, trades_list = orders_by_arbitrage_id[arbitrage_id][0]
        msg = "can't find pair order - {o}".format(o=order)
        log_to_file(msg,
                    "missing_" + get_pair_name_by_id(order.pair_id) + ".txt")
        cnt += 1
        orders_by_pair[order.pair_id].append((order, trades_list))

    loss_details = defaultdict(list)
    loss_details_total = Counter()

    for pair_id in orders_by_pair:
        loss_by_coin, loss_by_base_coin = compute_loss_by_pair(
            orders_by_pair[pair_id])
        base_currency_id, dst_currency_id = split_currency_pairs(pair_id)
        loss_details[base_currency_id].append(
            LossDetails(base_currency_id, dst_currency_id, pair_id,
                        loss_by_coin, loss_by_base_coin))

        loss_details_total[base_currency_id] += loss_by_base_coin

    return loss_details, loss_details_total
Ejemplo n.º 19
0
def get_history_binance_result_processor(json_document, pair_name, timest):
    """
          {
            "a": 26129,         // Aggregate tradeId
            "p": "0.01633102",  // Price
            "q": "4.70443515",  // Quantity
            "f": 27781,         // First tradeId
            "l": 27781,         // Last tradeId
            "T": 1498793709153, // Timestamp
            "m": true,          // Was the buyer the maker?
            "M": true           // Was the trade the best price match?
          }
    """

    all_history_records = []

    if is_error(json_document):

        msg = "get_history_binance_result_processor - error response - {er}".format(
            er=json_document)
        log_to_file(msg, ERROR_LOG_FILE_NAME)

        return all_history_records

    for record in json_document:
        all_history_records.append(
            TradeHistory.from_binance(record, pair_name, timest))

    return all_history_records
Ejemplo n.º 20
0
def cancel_order_bittrex(key, order_id):
    # https://bittrex.com/api/v1.1/market/cancel?apikey=API_KEY&uuid=ORDER_UUID
    final_url = BITTREX_CANCEL_ORDER + key.api_key + "&nonce=" + str(
        generate_nonce())

    body = {
        "uuid": order_id,
    }

    final_url += _urlencode(body)

    headers = {"apisign": signed_string(final_url, key.secret)}

    post_details = PostRequestDetails(final_url, headers, body)

    if get_logging_level() >= LOG_ALL_MARKET_RELATED_CRAP:
        msg = "cancel_order_bittrex: {res}".format(res=post_details)
        print_to_console(msg, LOG_ALL_MARKET_RELATED_CRAP)
        log_to_file(msg, "market_utils.log")

    err_msg = "cancel bittrex order with id {id}".format(id=order_id)

    res = send_get_request_with_header(post_details.final_url,
                                       post_details.headers,
                                       err_msg,
                                       timeout=BITTREX_DEAL_TIMEOUT)

    if get_logging_level() >= LOG_ALL_MARKET_RELATED_CRAP:
        print_to_console(res, LOG_ALL_MARKET_RELATED_CRAP)
        log_to_file(res, "market_utils.log")

    return res
Ejemplo n.º 21
0
def compute_profit_by_pair(pair_id, trades_to_order_by_pair):

    file_name = get_pair_name_by_id(pair_id) + "_trace.txt"

    profit_coin = 0.0
    profit_base_currency = 0.0

    orders_by_arbitrage_id = defaultdict(list)
    # 1 stage group by arbitrage id
    for order, trades in trades_to_order_by_pair:
        orders_by_arbitrage_id[order.arbitrage_id].append((order, trades))

    number_of_missing_pair = 0
    for arbitrage_id in orders_by_arbitrage_id:
        if len(orders_by_arbitrage_id[arbitrage_id]) == 1:
            number_of_missing_pair += 1
            msg = "Can't find paired arbitrage order for {arbitrage_id} {o}".format(
                arbitrage_id=arbitrage_id,
                o=orders_by_arbitrage_id[arbitrage_id][0])
            log_to_file(msg, file_name)
            continue
        else:
            for order, trades in orders_by_arbitrage_id[arbitrage_id]:
                msg = "Computing trades for order {o}".format(o=order)
                log_to_file(msg, file_name)
                if order.trade_type == DEAL_TYPE.BUY:
                    for trade in trades:
                        profit_coin += trade.executed_volume
                        base_currency_volume = trade.executed_volume * trade.price * 0.01 * (
                            100 + get_fee_by_exchange(trade.exchange_id))
                        profit_base_currency -= base_currency_volume
                        msg = """Analysing trade {o}
                        ADD coin volume = {cv}
                        SUBTRACT base currency = {base}
                        """.format(o=trade,
                                   cv=trade.executed_volume,
                                   base=base_currency_volume)
                        log_to_file(msg, file_name)
                elif order.trade_type == DEAL_TYPE.SELL:
                    for trade in trades:
                        profit_coin -= trade.executed_volume
                        base_currency_volume = trade.executed_volume * trade.price * 0.01 * (
                            100 - get_fee_by_exchange(trade.exchange_id))
                        profit_base_currency += base_currency_volume
                        msg = """Analysing trade {o}
                        SUBTRACT coin volume = {cv}
                        ADD base currency = {base}
                        """.format(o=trade,
                                   cv=trade.executed_volume,
                                   base=base_currency_volume)
                        log_to_file(msg, file_name)
                else:
                    print "WE HAVE WRONG trade_type", order.trade_type
                    print "For order: ", order

    msg = "For {pair_name} Number of missing paired order is {num}".format(
        pair_name=get_pair_name_by_id(pair_id), num=number_of_missing_pair)
    log_to_file(msg, file_name)

    return profit_coin, profit_base_currency
Ejemplo n.º 22
0
def get_order_history_huobi(key,
                            pair_name,
                            time_start=0,
                            time_end=get_now_seconds_utc()):

    post_details = get_order_history_huobi_post_details(
        key, pair_name, time_start, time_end)

    err_msg = "get_all_orders_huobi for {pair_name}".format(
        pair_name=pair_name)

    status_code, json_response = send_get_request_with_header(
        post_details.final_url,
        post_details.headers,
        err_msg,
        timeout=HUOBI_DEAL_TIMEOUT)

    if get_logging_level() >= LOG_ALL_DEBUG:
        msg = "get_order_history_huobi: {sc} {resp}".format(sc=status_code,
                                                            resp=json_response)
        print_to_console(msg, LOG_ALL_DEBUG)
        log_to_file(msg, DEBUG_LOG_FILE_NAME)

    historical_orders = []
    if status_code == STATUS.SUCCESS:
        status_code, historical_orders = get_orders_huobi_result_processor(
            json_response, pair_name)

    return status_code, historical_orders
Ejemplo n.º 23
0
def log_dublicative_order_book(log_file_name, msg_queue, order_book,
                               prev_order_book):
    msg = """ <b> !!! WARNING !!! </b>
    Number of similar asks OR bids are the same for the most recent and cached version of order book for
    exchange_name {exch} pair_name {pn}
    cached timest: {ts1} {dt1}
    recent timest: {ts2} {dt2}
    Verbose information can be found in logs error & 
    """.format(exch=get_exchange_name_by_id(order_book.exchange_id),
               pn=get_currency_pair_name_by_exchange_id(
                   order_book.pair_id, order_book.exchange_id),
               ts1=prev_order_book.timest,
               dt1=ts_to_string_utc(prev_order_book.timest),
               ts2=order_book.timest,
               dt2=ts_to_string_utc(order_book.timest))

    msg_queue.add_message(DEAL_INFO_MSG, msg)
    print_to_console(msg, LOG_ALL_ERRORS)
    log_to_file(msg, log_file_name)

    msg = """Cached version of order book: 
    {o}
    Recent version of order book:
    {oo}
    """.format(o=str(prev_order_book), oo=str(order_book))
    log_to_file(msg, log_file_name)
Ejemplo n.º 24
0
    def on_public(self, compressed_data):
        msg = process_message(compressed_data)
        # FIXME Howdy DK - is this check promissing FAST?
        if not self.order_book_is_received and "orderBook" in compressed_data:
            self.order_book_is_received = True
            order_book_delta = parse_socket_order_book_poloniex(
                msg, self.pair_id)
        else:
            order_book_delta = parse_socket_update_poloniex(msg)

        if order_book_delta is None:
            #
            # Poloniex tend to send heartbeat messages: [1010]
            # When no messages have been sent out for one second, the server will send a heartbeat message as follows.
            # Absence of heartbeats indicates a protocol or networking issue and the client application is expected
            # to close the socket and try again.
            #
            str_msg = str(msg)

            if "1010" in str_msg:
                self.last_heartbeat_ts = get_now_seconds_utc()
            else:
                err_msg = "Poloniex - cant parse update from message: {msg}".format(
                    msg=str_msg)
                log_to_file(err_msg, SOCKET_ERRORS_LOG_FILE_NAME)
        else:
            self.last_heartbeat_ts = get_now_seconds_utc()
            self.on_update(EXCHANGE.POLONIEX, order_book_delta)
Ejemplo n.º 25
0
def test_binance():
    def on_message(message):
        print(message)

    def on_error(ws, error):
        print(error)

    def on_close(ws):
        print("### closed ###")

    def on_open(ws):
        print("ONOPEN")
        # def run(*args):
        #     ws.send(json.dumps({'command':'subscribe','channel':'BTC-ETH@depth'}))
        #     while True:
        #         time.sleep(1)
        #     ws.close()
        #     print("thread terminating...")
        # thread.start_new_thread(run, ())

    # websocket.enableTrace(True)
    # # ws = websocket.WebSocketApp(sslopt={"cert_reqs": ssl.CERT_NONE})
    # ws = websocket.WebSocketApp("wss://stream.binance.com:9443/ws/ethbtc@depth")
    # # ws = websocket.WebSocket(sslopt={"cert_reqs": ssl.CERT_NONE})
    # ws.on_message = on_message
    # ws.on_error = on_error
    # ws.on_close = on_close
    # ws.on_open = on_open
    # # ws.connect("wss://stream.binance.com:9443/ws/ethbtc@depth")
    # # ws.run_forever()
    # ws.run_forever(sslopt={"cert_reqs": ssl.CERT_NONE})


    # Create connection
    while True:
        try:
            ws = create_connection("wss://stream.binance.com:9443/ws/ethbtc@depth", sslopt={"cert_reqs": ssl.CERT_NONE})
            ws.settimeout(15)
            break
        except:
            print('connect ws error,retry...')
            sleep_for(5)

    # actual subscription
    # ws.send()

    # event loop
    while True:
        try:
            compress_data = ws.recv()
            on_message(compress_data)
        except Exception as e:      # Supposedly timeout big enough to not trigger re-syncing
            msg = "Binance - triggered exception during reading from socket = {}".format(str(e))
            print msg
            break

    msg = "Binance - triggered on_close. We have to re-init the whole state from the scratch. " \
          "Current thread will be finished."
    log_to_file(msg, SOCKET_ERRORS_LOG_FILE_NAME)
Ejemplo n.º 26
0
def get_balance_huobi_result_processor(json_document, timest):
    if not is_error(json_document) and "data" in json_document and json_document["data"]:
        return STATUS.SUCCESS, Balance.from_huobi(timest, json_document["data"])

    msg = "get_balance_huobi_result_processor - error response - {er}".format(er=json_document)
    log_to_file(msg, ERROR_LOG_FILE_NAME)

    return STATUS.FAILURE, None
Ejemplo n.º 27
0
def log_order_book_update_failed_pre_sync(kind, exchange_id,
                                          order_book_updates):
    msg = "Reset stage will be initiated because Orderbook update FAILED during pre-SYNC stage - {kind} - " \
          "for {exch_name} Update itself: {upd}".format(kind=kind,
                                                        exch_name=get_exchange_name_by_id(exchange_id),
                                                        upd=order_book_updates)
    log_to_file(msg, SOCKET_ERRORS_LOG_FILE_NAME)
    print_to_console(msg, LOG_ALL_ERRORS)
Ejemplo n.º 28
0
def log_warn_balance_not_updating(last_balance, msg_queue):
    msg = """           <b> !!! WARNING !!! </b>
    BALANCE were not updated for a {tm} seconds!
    last balance {bl}""".format(tm=BALANCE_EXPIRE_TIMEOUT, bl=last_balance)

    print_to_console(msg, LOG_ALL_ERRORS)
    msg_queue.add_message(DEAL_INFO_MSG, msg)
    log_to_file(msg, "balance.log")
Ejemplo n.º 29
0
def log_heartbeat_is_missing(exch_name, timeout, last_heartbeat_ts, ts_now):
    msg = "{exch_name} - Havent heard from exchange more than {timeout}. Last update - {l_update} but " \
          "now - {n_time}. Reseting stage!".format(exch_name=exch_name,
                                                   timeout=timeout,
                                                   l_update=last_heartbeat_ts,
                                                   n_time=ts_now)
    log_to_file(msg, SOCKET_ERRORS_LOG_FILE_NAME)
    print_to_console(msg, LOG_ALL_ERRORS)
Ejemplo n.º 30
0
def log_failed_order_replacement_result(failed_order, json_document,
                                        msg_queue):
    msg = """We have tried to replace failed order with new one:
                {o}
                and got response:
                {r}
                """.format(o=failed_order, r=json_document)
    msg_queue.add_message(DEBUG_INFO_MSG, msg)
    log_to_file(msg, FAILED_ORDER_PROCESSING_FILE_NAME)