Beispiel #1
0
async def address_confirmed_utxo(address,  type, from_block, order, order_by, limit, page, app):
    q = time.time()
    utxo = []
    if from_block:
        from_block = " AND pointer >= " + str(from_block << 39)
    else:
        from_block = ""

    if address[0] == 0 and type is None:
        a = [address]
        async with app["db_pool"].acquire() as conn:
            script = await conn.fetchval("SELECT script from connector_p2pk_map "
                                         "WHERE address = $1 LIMIT 1;", address[1:])
            if script is not None:
                a.append(b"\x02" + script)
            rows = await conn.fetch("SELECT  outpoint, amount, pointer, address  "
                                          "FROM connector_utxo "
                                          "WHERE address = ANY($1) %s "
                                    "order by  %s %s LIMIT $2 OFFSET $3;" % (from_block, order_by, order),
                                    a, limit if limit else "ALL", limit * (page - 1))
            for row in rows:
                utxo.append({"txId": rh2s(row["outpoint"][:32]),
                             "vOut": bytes_to_int(row["outpoint"][32:]),
                             "block": row["pointer"] >> 39,
                             "txIndex": (row["pointer"] - ((row["pointer"] >> 39) << 39)) >> 20,
                             "amount": row["amount"],
                             "type": SCRIPT_N_TYPES[row["address"][0]]})

    else:
        async with app["db_pool"].acquire() as conn:
            if address[0] == 0:
                if type == 2:
                    script = await conn.fetchval("SELECT script from connector_p2pk_map "
                                                 "WHERE address = $1 LIMIT 1;", address[1:])
                    if script is not None:
                        address = b"\x02" + script
                    else:
                        return {"data": utxo, "time": round(time.time() - q, 4)}

            rows = await conn.fetch("SELECT  outpoint, amount, pointer  "
                                          "FROM connector_utxo "
                                          "WHERE address = $1 %s "
                                    "order by  %s %s LIMIT $2 OFFSET $3;" % (from_block, order_by, order),
                                    address, limit, limit * (page - 1))

        for row in rows:
            utxo.append({"txId": rh2s(row["outpoint"][:32]),
                         "vOut": bytes_to_int(row["outpoint"][32:]),
                         "block": row["pointer"] >> 39,
                         "txIndex": (row["pointer"] - ((row["pointer"] >> 39) << 39)) >> 20,
                         "amount": row["amount"]})

    return {"data": utxo,
            "time": round(time.time() - q, 4)}
Beispiel #2
0
async def address_unconfirmed_utxo(address,  type, order, limit, page, app):
    q = time.time()
    utxo = []

    if address[0] == 0 and type is None:
        a = [address]
        async with app["db_pool"].acquire() as conn:
            script = await conn.fetchval("SELECT script from connector_unconfirmed_p2pk_map "
                                         "WHERE address = $1 LIMIT 1;", address[1:])
            if script is None:
                script = await conn.fetchval("SELECT script from connector_p2pk_map "
                                             "WHERE address = $1 LIMIT 1;", address[1:])
            if script is not None:
                a.append(b"\x02" + script)
            rows = await conn.fetch("SELECT  outpoint, amount, address  "
                                          "FROM connector_unconfirmed_utxo "
                                    "WHERE address = ANY($1)  "
                                    "order by  amount %s LIMIT $2 OFFSET $3;" %  order,
                                    a, limit if limit else "ALL", limit * (page - 1))
            for row in rows:
                utxo.append({"txId": rh2s(row["outpoint"][:32]),
                             "vOut": bytes_to_int(row["outpoint"][32:]),
                             "amount": row["amount"],
                             "type": SCRIPT_N_TYPES[row["address"][0]]})

    else:
        async with app["db_pool"].acquire() as conn:
            if address[0] == 0:
                if type == 2:
                    script = await conn.fetchval("SELECT script from connector_unconfirmed_p2pk_map "
                                                 "WHERE address = $1 LIMIT 1;", address[1:])
                    if script is None:
                        script = await conn.fetchval("SELECT script from connector_p2pk_map "
                                                     "WHERE address = $1 LIMIT 1;", address[1:])
                    if script is not None:
                        address = b"\x02" + script
                    else:
                        return {"data": utxo, "time": round(time.time() - q, 4)}


            rows = await conn.fetch("SELECT  outpoint, amount, address  "
                                          "FROM connector_unconfirmed_utxo "
                                    "WHERE address = $1 "
                                    "order by  amount %s LIMIT $2 OFFSET $3;" %  order,
                                    address, limit if limit else "ALL", limit * (page - 1))

        for row in rows:
            utxo.append({"txId": rh2s(row["outpoint"][:32]),
                         "vOut": bytes_to_int(row["outpoint"][32:]),
                         "amount": row["amount"]})

    return {"data": utxo,
            "time": round(time.time() - q, 4)}
    async def _new_transaction(self, tx, block_time = None, block_height = None, block_index = None):
        tx_hash = rh2s(tx["txId"])
        if tx_hash in self.tx_in_process or self.tx_cache.get(tx["txId"]):
            return
        try:
            ft = self.await_tx_future if block_height is not None else None
            self.tx_in_process.add(tx_hash)

            if self.tx_handler:
                await self.tx_handler(tx, ft, block_time, block_height, block_index)

            self.tx_cache.set(tx_hash, True)
            try:
                self.await_tx_list.remove(tx_hash)
                if not self.await_tx_future[tx_hash].done():
                    self.await_tx_future[tx_hash].set_result(True)
                if not self.await_tx_list:
                    self.block_txs_request.set_result(True)
            except:
                pass
        except DependsTransaction as err:
            self.block_dependency_tx += 1
            self.loop.create_task(self.wait_tx_then_add(err.raw_tx_hash, tx))
        except Exception as err:
            if tx_hash in self.await_tx_list:
                self.await_tx_list = set()
                self.block_txs_request.cancel()
                for i in self.await_tx_future:
                    if not self.await_tx_future[i].done():
                        self.await_tx_future[i].cancel()
            self.log.debug("new transaction error %s " % err)
            self.log.debug(str(traceback.format_exc()))
        finally:
            self.tx_in_process.remove(tx_hash)
Beispiel #4
0
async def block_transaction_id_list(pointer, limit, page, order, app):
    pool = app["db_pool"]
    qt = time.time()

    async with pool.acquire() as conn:
        if isinstance(pointer, bytes):
            stmt = await conn.prepare(
                "SELECT height FROM blocks  WHERE hash = $1 LIMIT 1;")
            pointer = await stmt.fetchval(pointer)
            if pointer is None:
                raise APIException(NOT_FOUND, "block not found", status=404)

    if app["block_transaction_id_list"].has_key(pointer):
        transactions = app["block_transaction_id_list"][pointer]
    else:
        async with pool.acquire() as conn:
            rows = await conn.fetch(
                "SELECT tx_id "
                "FROM transaction  WHERE pointer >= $1 AND pointer < $2 "
                "ORDER BY pointer %s LIMIT $3 OFFSET $4;" % order,
                pointer << 39, (pointer + 1) << 39, limit + 1,
                limit * (page - 1))
        transactions = [rh2s(t["tx_id"]) for t in rows]
        app["block_transaction_id_list"][pointer] = transactions
    resp = {"data": transactions, "time": round(time.time() - qt, 4)}
    return resp
Beispiel #5
0
    async def _get_missed(self,
                          block_hash=False,
                          block_time=None,
                          block_height=None):
        if block_hash:
            t = time.time()
            block = self.block_preload.pop(block_hash)
            if not block:
                result = await self.rpc.getblock(block_hash, 0)
            try:
                if not block:
                    block = decode_block_tx(result)
                self.log.info("block downloaded %s" %
                              round(time.time() - t, 4))
                for index, tx in enumerate(block):
                    if rh2s(block[tx]["txId"]) in self.missed_tx_list:
                        self.loop.create_task(
                            self._new_transaction(block[tx], block_time,
                                                  block_height, index))
                return
            except Exception as err:
                self.log.error("_get_missed exception %s " % str(err))
                self.log.error(str(traceback.format_exc()))
                self.await_tx_list = []
                self.block_txs_request.cancel()

        if self.get_missed_tx_threads > self.get_missed_tx_threads_limit:
            return
        self.get_missed_tx_threads += 1
        # start more threads
        if len(self.missed_tx_list) > 1:
            self.loop.create_task(
                self._get_missed(False, block_time, block_height))
        while True:
            if not self.missed_tx_list:
                break
            try:
                batch = list()
                while self.missed_tx_list:
                    batch.append(
                        ["getrawtransaction",
                         self.missed_tx_list.pop()])
                    if len(batch) >= self.batch_limit:
                        break
                result = await self.rpc.batch(batch)
                for r in result:
                    try:
                        tx = Transaction(r["result"], format="raw")
                    except:
                        self.log.error("Transaction decode failed: %s" %
                                       r["result"])
                        raise Exception("Transaction decode failed")
                    self.loop.create_task(
                        self._new_transaction(tx, block_time, None, None))
            except Exception as err:
                self.log.error("_get_missed exception %s " % str(err))
                self.log.error(str(traceback.format_exc()))
                self.await_tx_list = []
                self.block_txs_request.cancel()
        self.get_missed_tx_threads -= 1
Beispiel #6
0
async def blocks_last_n_hours(n, app):
    pool = app["db_pool"]
    q = time.time()
    async with pool.acquire() as conn:
        rows = await conn.fetch(
            "SELECT height,"
            "       hash,"
            "       header,"
            "       adjusted_timestamp "
            "FROM blocks  "
            "WHERE adjusted_timestamp >= $1 "
            "ORDER BY height desc;",
            int(time.time()) - n * 60 * 60)
    if rows is None:
        raise APIException(NOT_FOUND, "blocks not found", status=404)
    r = []
    for row in rows:
        block = dict()
        block["height"] = row["height"]
        block["hash"] = rh2s(row["hash"])
        block["header"] = base64.b64encode(row["header"]).decode()
        block["adjustedTimestamp"] = row["adjusted_timestamp"]
        r.append(block)

    resp = {"data": r, "time": round(time.time() - q, 4)}
    return resp
    async def _get_missed(self,
                          block_hash=False,
                          block_time=None,
                          block_height=None):
        if block_hash:
            t = time.time()
            block = self.block_preload.pop(block_hash)
            if not block:
                result = await self.rpc.getblock(block_hash, 0)
            try:
                if not block:
                    block = decode_block_tx(result)
                self.log.info("block downloaded %s" % round(time.time() - t, 4))
                for index, tx in enumerate(block):
                    if rh2s(block[tx]["txId"]) in self.missed_tx_list:
                        self.loop.create_task(self._new_transaction(block[tx],
                                                                    block_time,
                                                                    block_height,
                                                                    index
                                                                   ))
                return
            except Exception as err:
                self.log.error("_get_missed exception %s " % str(err))
                self.log.error(str(traceback.format_exc()))
                self.await_tx_list = []
                self.block_txs_request.cancel()

        if self.get_missed_tx_threads > self.get_missed_tx_threads_limit:
            return
        self.get_missed_tx_threads += 1
        # start more threads
        if len(self.missed_tx_list) > 1:
            self.loop.create_task(self._get_missed(False, block_time, block_height))
        while True:
            if not self.missed_tx_list:
                break
            try:
                batch = list()
                while self.missed_tx_list:
                    batch.append(["getrawtransaction", self.missed_tx_list.pop()])
                    if len(batch) >= self.batch_limit:
                        break
                result = await self.rpc.batch(batch)
                for r in result:
                    try:
                        tx = Transaction(r["result"], format="raw")
                    except:
                        self.log.error("Transaction decode failed: %s" % r["result"])
                        raise Exception("Transaction decode failed")
                    self.loop.create_task(self._new_transaction(tx,
                                                                block_time,
                                                                None,
                                                                None
                                                               ))
            except Exception as err:
                self.log.error("_get_missed exception %s " % str(err))
                self.log.error(str(traceback.format_exc()))
                self.await_tx_list = []
                self.block_txs_request.cancel()
        self.get_missed_tx_threads -= 1
 async def wait_tx_then_add(self, raw_tx_hash, tx):
     tx_hash = rh2s(tx["hash"])
     try:
         if not self.await_tx_future[raw_tx_hash].done():
             await self.await_tx_future[raw_tx_hash]
         self.loop.create_task(self._new_transaction(tx))
     except:
         self.tx_in_process.remove(tx_hash)
 async def wait_tx_then_add(self, raw_tx_hash, tx):
     tx_hash = rh2s(tx["hash"])
     try:
         if not self.await_tx_future[raw_tx_hash].done():
             await self.await_tx_future[raw_tx_hash]
         self.loop.create_task(self._new_transaction(tx))
     except:
         self.tx_in_process.remove(tx_hash)
Beispiel #10
0
async def block_utxo(pointer, limit, page, order, app):
    pool = app["db_pool"]
    q = time.time()
    async with pool.acquire() as conn:
        if isinstance(pointer, bytes):
            stmt = await conn.prepare("SELECT height "
                                      "FROM blocks  WHERE hash = $1 LIMIT 1;")
            pointer = await stmt.fetchval(pointer)
            if pointer is None:
                raise APIException(NOT_FOUND, "block not found", status=404)

        rows = await conn.fetch(
            "SELECT outpoint, pointer, address, amount "
            "FROM connector_utxo  WHERE pointer >= $1 AND pointer < $2 "
            "ORDER BY pointer %s LIMIT $3 OFFSET $4;" % order, pointer << 39,
            (pointer + 1) << 39, limit + 1, limit * (page - 1))
    utxo = list()
    for row in rows[:limit]:
        address = row["address"]
        address_type = SCRIPT_N_TYPES[address[0]]
        if address[0] in (0, 1, 5, 6):
            script_hash = True if address[0] in (1, 6) else False
            witness = 0 if address[0] in (1, 6) else None
            address = hash_to_address(address[1:],
                                      testnet=app["testnet"],
                                      script_hash=script_hash,
                                      witness_version=witness)
            script = address_to_script(address, hex=1)
        elif address[0] == 2:
            script = address[1:].hex()
            address = script_to_address(address[1:], testnet=app["testnet"])
        else:
            script = address[1:].hex()
            address = None

        utxo.append({
            "txId": rh2s(row["outpoint"][:32]),
            "vOut": bytes_to_int(row["outpoint"][32:]),
            "txIndex": (row["pointer"] >> 20) & 1048575,
            "amount": row["amount"],
            "address": address,
            "script": script,
            "type": address_type
        })
    last_page = False if len(rows) > limit else True
    resp = {
        "data": utxo,
        "time": round(time.time() - q, 4),
        "lastPage": last_page
    }

    return resp
 async def _new_transaction(self,
                            tx,
                            block_time = None,
                            block_height = None,
                            block_index = None):
     tx_hash = rh2s(tx["txId"])
     ft = self.await_tx_future if tx_hash in self.await_tx_list else None
     if tx_hash in self.tx_in_process:
         return
     self.tx_in_process.add(tx_hash)
     # Check is transaction new
     tx_id = self.tx_cache.get(tx["txId"])
     if tx_id is not None:
         self.tx_in_process.remove(tx_hash)
         return
     try:
         # call external handler
         if self.tx_handler:
             tx_id = await self.tx_handler(tx,
                                           ft,
                                           block_time,
                                           block_height,
                                           block_index)
         # insert new transaction to dublicate filter
         if not self.external_dublicate_filter:
             tx_id = await insert_new_tx(self, tx["txId"])
         self.tx_cache.set(tx["txId"], tx_id)
         if tx_hash in self.await_tx_list:
             self.await_tx_list.remove(tx_hash)
             self.await_tx_id_list.append(tx_id)
             if not self.await_tx_future[unhexlify(tx_hash)[::-1]].done():
                 self.await_tx_future[unhexlify(tx_hash)[::-1]].set_result(True)
             if not self.await_tx_list:
                 self.block_txs_request.set_result(True)
     except DependsTransaction as err:
         self.block_dependency_tx += 1
         self.loop.create_task(self.wait_tx_then_add(err.raw_tx_hash, tx))
     except Exception as err:
         if tx_hash in self.await_tx_list:
             self.await_tx_list = []
             self.await_tx_id_list = []
             self.block_txs_request.cancel()
             for i in self.await_tx_future:
                 if not self.await_tx_future[i].done():
                     self.await_tx_future[i].cancel()
         self.log.debug("new transaction error %s " % err)
         self.log.debug(str(traceback.format_exc()))
     finally:
         self.tx_in_process.remove(tx_hash)
Beispiel #12
0
 async def _new_transaction(self,
                            tx,
                            block_time=None,
                            block_height=None,
                            block_index=None):
     tx_hash = rh2s(tx["txId"])
     ft = self.await_tx_future if tx_hash in self.await_tx_list else None
     if tx_hash in self.tx_in_process:
         return
     self.tx_in_process.add(tx_hash)
     # Check is transaction new
     tx_id = self.tx_cache.get(tx["txId"])
     if tx_id is not None:
         self.tx_in_process.remove(tx_hash)
         return
     try:
         # call external handler
         if self.tx_handler:
             tx_id = await self.tx_handler(tx, ft, block_time, block_height,
                                           block_index)
         # insert new transaction to dublicate filter
         if not self.external_dublicate_filter:
             tx_id = await insert_new_tx(self, tx["txId"])
         self.tx_cache.set(tx["txId"], tx_id)
         if tx_hash in self.await_tx_list:
             self.await_tx_list.remove(tx_hash)
             self.await_tx_id_list.append(tx_id)
             if not self.await_tx_future[unhexlify(tx_hash)[::-1]].done():
                 self.await_tx_future[unhexlify(tx_hash)[::-1]].set_result(
                     True)
             if not self.await_tx_list:
                 self.block_txs_request.set_result(True)
     except DependsTransaction as err:
         self.block_dependency_tx += 1
         self.loop.create_task(self.wait_tx_then_add(err.raw_tx_hash, tx))
     except Exception as err:
         if tx_hash in self.await_tx_list:
             self.await_tx_list = []
             self.await_tx_id_list = []
             self.block_txs_request.cancel()
             for i in self.await_tx_future:
                 if not self.await_tx_future[i].done():
                     self.await_tx_future[i].cancel()
         self.log.debug("new transaction error %s " % err)
         self.log.debug(str(traceback.format_exc()))
     finally:
         self.tx_in_process.remove(tx_hash)
Beispiel #13
0
async def block_by_pointer(pointer, app):
    pool = app["db_pool"]
    q = time.time()
    async with pool.acquire() as conn:
        if pointer == 'last':
            stmt = await conn.prepare(
                "SELECT height,"
                "       hash,"
                "       header,"
                "       adjusted_timestamp "
                "FROM blocks  ORDER BY height desc LIMIT 1;")
            row = await stmt.fetchrow()
        else:
            if type(pointer) == bytes:
                stmt = await conn.prepare(
                    "SELECT height,"
                    "       hash,"
                    "       header,"
                    "       adjusted_timestamp "
                    "FROM blocks  WHERE hash = $1 LIMIT 1;")
                row = await stmt.fetchrow(pointer)

            elif type(pointer) == int:
                stmt = await conn.prepare(
                    "SELECT height,"
                    "       hash,"
                    "       header,"
                    "       adjusted_timestamp "
                    "FROM blocks  WHERE height = $1 LIMIT 1;")
                row = await stmt.fetchrow(pointer)

    if row is None:
        raise APIException(NOT_FOUND, "block not found", status=404)

    block = dict()
    block["height"] = row["height"]
    block["hash"] = rh2s(row["hash"])
    block["header"] = base64.b64encode(row["header"]).decode()
    block["adjustedTimestamp"] = row["adjusted_timestamp"]

    resp = {"data": block, "time": round(time.time() - q, 4)}
    return resp
Beispiel #14
0
async def tx_hash_by_pointers(pointers, app):
    q = time.time()
    async with app["db_pool"].acquire() as conn:
        rows = await conn.fetch(
            "SELECT pointer, tx_id "
            "FROM transaction "
            "WHERE pointer = ANY($1);", pointers)

    d = dict()
    r = dict()
    for row in rows:
        d[row["pointer"]] = row["tx_id"]
    for pointer in pointers:
        try:
            key = "%s:%s" % (pointer >> 39, (pointer >> 20) & 1048575)
            r[key] = rh2s(d[pointer])
        except:
            r[key] = None

    return {"data": r, "time": round(time.time() - q, 4)}
Beispiel #15
0
async def create_db_model(app, conn):

    # check db isolation level

    level = await conn.fetchval("SHOW TRANSACTION ISOLATION LEVEL;")
    if level != "repeatable read":
        raise Exception("Postgres repeatable read isolation level required! current isolation level is %s" % level)
    await conn.execute(open("./db_model/sql/schema.sql", "r", encoding='utf-8').read().replace("\n",""))


    # blocks data

    if app.blocks_data:
        await conn.execute(open("./db_model/sql/block.sql", "r", encoding='utf-8').read().replace("\n", ""))
        await conn.execute("INSERT INTO service (name,value) VALUES ('blocks_data','1') ON CONFLICT(name) DO NOTHING;")
    else:
        await conn.execute("""
                               INSERT INTO service (name, value) VALUES ('blocks_data', '0') 
                               ON CONFLICT(name) DO NOTHING;
                               """)

    m = await conn.fetchval("SELECT value FROM service WHERE name ='blocks_data' LIMIT 1;")
    app.log.info("Option blocks_data = %s" % m)

    if bool(int(m)) !=  app.blocks_data:
        app.log.critical("blocks_data option not match db structure; you should drop db and recreate it")
        raise Exception("DB structure invalid")

    # transaction

    if app.transaction:
        await conn.execute(open("./db_model/sql/transaction.sql",
                                "r", encoding='utf-8').read().replace("\n", ""))
        await conn.execute("INSERT INTO service (name,value) VALUES ('transaction','1') ON CONFLICT(name) DO NOTHING;")
    else:
        await conn.execute("INSERT INTO service (name,value) VALUES ('transaction','0') ON CONFLICT(name) DO NOTHING;")

    m = await conn.fetchval("SELECT value FROM service WHERE name ='transaction' LIMIT 1;")
    app.log.info("Option transaction = %s" % m)

    if bool(int(m)) !=  app.transaction:
        app.log.critical("transaction option not match db structure; you should drop db and recreate it")
        raise Exception("DB structure invalid")

    # mempool_analytica

    if app.mempool_analytica:
        if not app.transaction:
            app.log.critical("transaction mempool_analytica required transaction option enabled")
            raise Exception("configuration invalid")

        await conn.execute(open("./db_model/sql/mempool_analytica.sql",
                                "r", encoding='utf-8').read().replace("\n", ""))
        await conn.execute("INSERT INTO service (name,value) VALUES ('mempool_analytica','1') "
                           "ON CONFLICT(name) DO NOTHING;")
    else:
        await conn.execute("INSERT INTO service (name,value) VALUES ('mempool_analytica','0') "
                           "ON CONFLICT(name) DO NOTHING;")
    m = await conn.fetchval("SELECT value FROM service WHERE name ='mempool_analytica' LIMIT 1;")
    app.log.info("Option mempool_analytica = %s" % m)

    if bool(int(m)) !=  app.mempool_analytica:
        app.log.critical("mempool_analytica option not match db structure; you should drop db and recreate it")
        raise Exception("DB structure invalid")


    # merkle proof module

    if app.merkle_proof:
        await conn.execute(open("./db_model/sql/merkle_proof.sql",
                                "r", encoding='utf-8').read().replace("\n", ""))
        await conn.execute("""
                           INSERT INTO service (name, value) VALUES ('merkle_proof', '1')  
                           ON CONFLICT(name) DO NOTHING;
                           """)
    else:
        await conn.execute("""
                           INSERT INTO service (name, value) VALUES ('merkle_proof', '0') 
                           ON CONFLICT(name) DO NOTHING;
                           """)

    m = await conn.fetchval("SELECT service.value FROM service WHERE service.name ='merkle_proof' LIMIT 1;")
    app.log.info("Option merkle_proof = %s" % m)

    if int(m) == 1 and not app.merkle_proof or app.merkle_proof and int(m) == 0:
        app.log.critical("merkle_proof config option not match db structure; you should drop db and recreate it.")
        raise Exception("DB structure invalid")


    if app.block_filters:
        await conn.execute(open("./db_model/sql/filters.sql",
                                "r", encoding='utf-8').read().replace("\n", ""))
        await conn.execute("""
                           INSERT INTO service (name, value) VALUES ('block_filters', '1')  
                           ON CONFLICT(name) DO NOTHING;
                           """)
    else:
        await conn.execute("""
                           INSERT INTO service (name, value) VALUES ('block_filters', '0') 
                           ON CONFLICT(name) DO NOTHING;
                           """)

    m = await conn.fetchval("SELECT service.value FROM service WHERE service.name ='block_filters' LIMIT 1;")
    app.log.info("Option block_filters = %s" % m)

    if int(m) == 1 and not app.block_filters or app.block_filters and int(m) == 0:
        app.log.critical("block_filters config option not match db structure; you should drop db and recreate it.")
        raise Exception("DB structure invalid")



    # transaction history

    if app.transaction_history:
        if not app.transaction:
            app.log.critical("transaction_history option required transaction option enabled")
            raise Exception("configuration invalid")

        await conn.execute(open("./db_model/sql/transaction_history.sql",
                                "r", encoding='utf-8').read().replace("\n", ""))

        t = await  conn.fetchval("""SELECT EXISTS ( SELECT FROM information_schema.tables 
                             WHERE    table_name   = 'transaction_map_1');""")

        if not t:
            for i in range(app.transaction_map_partitions):
                await conn.execute("""
                                   CREATE TABLE transaction_map_%s
                                   PARTITION OF transaction_map
                                   FOR VALUES WITH (MODULUS %s, REMAINDER %s)
                                   WITH (fillfactor=100);
                                   """ % (i + 1, app.transaction_map_partitions, i))

        await conn.execute("""
                           INSERT INTO service (name, value) VALUES ('transaction_history', '1')  
                           ON CONFLICT(name) DO NOTHING;
                           """)
        app.transaction_map_start_block = await conn.fetchval("SELECT pointer FROM stxo "
                                                              "ORDER BY pointer DESC LIMIT 1;")
        if app.transaction_map_start_block is None:
            app.transaction_map_start_block = 0
        else:
            app.transaction_map_start_block = app.transaction_map_start_block >> 39

    else:
        await conn.execute("""
                           INSERT INTO service (name, value) VALUES ('transaction_history', '0') 
                           ON CONFLICT(name) DO NOTHING;
                           """)

    m = await conn.fetchval("SELECT value FROM service WHERE name ='transaction_history' LIMIT 1;")
    app.log.info("Option transaction_history = %s" % m)

    if bool(int(m)) !=  app.transaction_history:
        app.log.critical("transaction_history option not match db structure; "
                         "you should drop db and recreate it")
        raise Exception("DB structure invalid")



    # address_state module

    if app.address_state:
        if not app.transaction_history:
            app.log.critical("address_state option required transaction_history option enabled")
            raise Exception("configuration invalid")


        await conn.execute(open("./db_model/sql/address_state.sql",
                                "r", encoding='utf-8').read().replace("\n", ""))
        await conn.execute("""
                               INSERT INTO service (name, value) VALUES ('address_state', '1')  
                               ON CONFLICT(name) DO NOTHING;
                               """)
        await conn.execute("""
                               INSERT INTO service (name, value) VALUES ('address_state_rollback', '0')  
                               ON CONFLICT(name)  DO UPDATE SET value = '0';
                               """)
        # for i in range(50):
        #     await conn.execute("""
        #                        CREATE TABLE  IF NOT EXISTS address_%s
        #                        PARTITION OF address
        #                        FOR VALUES WITH (MODULUS %s, REMAINDER %s)
        #                        WITH (fillfactor=90);
        #                        """ % (i + 1, 50, i))

    else:

        await conn.execute("""
                               INSERT INTO service (name, value) VALUES ('address_state', '0') 
                               ON CONFLICT(name) DO NOTHING;
                               """)

    m = await conn.fetchval("SELECT service.value FROM service WHERE service.name ='address_state' LIMIT 1;")
    app.log.info("Option address_state = %s" % m)

    # address_timeline module

    if app.address_timeline:
        if not app.transaction_history:
            app.log.critical("address_timeline option required transaction_history option enabled")
            raise Exception("configuration invalid")
        if not app.address_state:
            app.log.critical("address_timeline option required address_state option enabled")
            raise Exception("configuration invalid")


        await conn.execute(open("./db_model/sql/address_timeline.sql",
                                "r", encoding='utf-8').read().replace("\n", ""))


        await conn.execute("""
                               INSERT INTO service (name, value) VALUES ('address_timeline', '1')
                               ON CONFLICT(name) DO NOTHING;
                               """)

    else:

        await conn.execute("""
                               INSERT INTO service (name, value) VALUES ('address_timeline', '0')
                               ON CONFLICT(name) DO NOTHING;
                               """)

    m = await conn.fetchval("SELECT service.value FROM service WHERE service.name ='address_timeline' LIMIT 1;")
    app.log.info("Option address_timeline = %s" % m)

    if int(m) == 1 and not app.address_timeline or app.address_timeline and int(m) == 0:
        app.log.critical("address_timeline config option not match db structure; you should drop db and recreate it.")
        raise Exception("DB structure invalid")


    # blockchain_analytica module

    if app.blockchain_analytica:
        await conn.execute(open("./db_model/sql/blockchain_analytica.sql",
                                "r", encoding='utf-8').read().replace("\n", ""))
        await conn.execute("""
                               INSERT INTO service (name, value) VALUES ('blockchain_analytica', '1')
                               ON CONFLICT(name) DO NOTHING;
                               """)
        if not app.mempool_analytica:
            app.log.critical("mempool_analytica option required for blockchain_analytica option enabled")
            raise Exception("configuration invalid")
        # b = await conn.fetchval("SELECT blockchain FROM blocks_stat ORDER BY height DESC LIMIT 1;")
        # if b is not None:
        #     app.blockchain_stat = json.loads(b)
        # else:
        #     app.blockchain_stat = {
        #         "outputs": {"count": {"total": 0},                                  # total outputs count
        #                                                                             # What is the total quantity of
        #                                                                             # coins in bitcoin blockchain?
        #
        #                     "amount": {"min": {"pointer": 0,                        # output with minimal amount
        #                                        "value": 0},                         # What is the minimum amount of a coins?
        #
        #                                "max": {"pointer": 0,                        # output with maximal amount
        #                                        "value": 0},                         # What is the maximal amount of a coins?
        #
        #                                "total": 0,                                  # total amount of all outputs
        #
        #                                "map": {"count": dict(),                     # quantity distribution by amount
        #                                                                             # How many coins exceed 1 BTC?
        #
        #                                        "amount": dict()}                    # amounts distribution by amount
        #                                                                             # What is the total amount of all coins
        #                                                                             # exceeding 10 BTC?
        #                                },
        #
        #                     "type": {"map": {"count": dict(),                       # quantity distribution by type
        #                                                                             # How many P2SH coins?
        #
        #                                      "amount": dict(),                      # amounts distribution by type
        #                                                                             # What is the total amount of
        #                                                                             # all P2PKH coins?
        #
        #                                      "size": dict()}},                      # sizes distribution by type
        #                                                                             # What is the total size
        #                                                                             # of all P2PKH coins?
        #
        #                     "age": {"map": {"count": dict(),                        # distribution of counts by age
        #                                                                             # How many coins older then 1 year?
        #
        #                                     "amount": dict(),                       # distribution of amount by age
        #                                                                             # What amount of coins older then 1 month?
        #
        #                                     "type": dict()                          # distribution of counts by type
        #                                                                             # How many P2SH coins older then 1 year?
        #                                     }}
        #
        #                     },
        #
        #         "inputs": {"count": {"total": 0},                                   # total inputs count
        #                                                                             # What is the total quantity of
        #                                                                             # spent coins in bitcoin blockchain?
        #
        #                    "amount": {"min": {"pointer": 0,                         # input with minimal amount
        #                                       "value": 0},                          # What is the smallest coin spent?
        #
        #                               "max": {"pointer": 0,                         # input with maximal amount
        #                                       "value": 0},                          # what is the greatest coin spent?
        #
        #                               "total": 0,                                   # total amount of all inputs
        #                                                                             # What is the total amount of
        #                                                                             # all spent coins?
        #
        #                               "map": {"count": dict(),                      # quantity distribution by amount
        #                                                                             # How many spent coins exceed 1 BTC?
        #
        #                                       "amount": dict()}                     # amounts distribution by amount
        #                                                                             # What is the total amount of
        #                                                                             #  all spent coins exceeding 10 BTC?
        #                               },
        #                    "type": {
        #                        "map": {"count": dict(),                             # quantity distribution by type
        #                                                                             # How many P2SH  spent coins?
        #
        #                                "amount": dict(),                            # amounts distribution by type
        #                                                                             # What is the total amount
        #                                                                             # of all P2PKH spent?
        #
        #                                "size": dict()                               # sizes distribution by type
        #                                                                             # What is the total
        #                                                                             # size of all P2PKH spent?
        #
        #                                }},
        #
        #
        #                    # P2SH redeem script statistics
        #                    "P2SH": {
        #                        "type": {"map": {"count": dict(),
        #                                         "amount": dict(),
        #                                         "size": dict()}
        #                                 }
        #                    },
        #
        #                    # P2WSH redeem script statistics
        #                    "P2WSH": {
        #                        "type": {"map": {"count": dict(),
        #                                         "amount": dict(),
        #                                         "size": dict()}
        #                                 }
        #                    }
        #                    },
        #
        #         "transactions": {"count": {"total": 0},
        #
        #                          "amount": {"min": {"pointer": 0,
        #                                             "value": 0},
        #
        #                                     "max": {"pointer": 0,
        #                                             "value": 0},
        #
        #                                     "map": {"count": dict(),
        #                                             "amount": dict(),
        #                                             "size": dict()},
        #                                     "total": 0},
        #                          "size": {"min": {"pointer": 0, "value": 0},
        #                                   "max": {"pointer": 0, "value": 0},
        #                                   "total": {"size": 0, "bSize": 0, "vSize": 0},
        #                                   "map": {"count": dict(), "amount": dict()}},
        #
        #                          "type": {"map": {"count": dict(), "size": dict(),
        #                                           "amount": dict()}},
        #
        #                          "fee": {"min": {"pointer": 0, "value": 0},
        #                                  "max": {"pointer": 0, "value": 0},
        #                                  "total": 0}
        #                          }
        #          }




    else:

        await conn.execute("""
                               INSERT INTO service (name, value) VALUES ('blockchain_analytica', '0')
                               ON CONFLICT(name) DO NOTHING;
                               """)

    m = await conn.fetchval("SELECT service.value FROM service WHERE service.name ='blockchain_analytica' LIMIT 1;")
    app.log.info("Option blockchain_analytica = %s" % m)

    if int(m) == 1 and not app.blockchain_analytica or app.blockchain_analytica and int(m) == 0:
        app.log.critical("blockchain_analytica config option not match db structure; you should drop db and recreate it.")
        raise Exception("DB structure invalid")








    # transaction_fee_analytica

    app.log.info("Option transaction_fee_analytica = 0")

    # nodes_discovery

    app.log.info("Option nodes_discovery = 0")

    # market_data

    app.log.info("Option market_data = 0")


    # deterministic_wallet

    app.log.info("Option deterministic_wallet = 0")


    # payment_forwarding

    app.log.info("Option payment_forwarding = 0")


    # hot_wallet

    app.log.info("Option hot_wallet = 0")

    start_block = await conn.fetchval("SELECT height FROM blocks ORDER BY height DESC LIMIT 1;")
    app.start_checkpoint = -1 if start_block is None else start_block


    if "flush_mempool_on_start" in app.config["OPTIONS"]:
        if app.config["OPTIONS"]["flush_mempool_on_start"] == "on":
            app.log.info("Option flush_mempool_on_start = 1")
            app.log.info("Flush mempool ...")
            if app.transaction:
                await conn.execute("TRUNCATE TABLE unconfirmed_transaction;")
                await conn.execute("TRUNCATE TABLE invalid_transaction;")
            if app.transaction_history:
                await conn.execute("TRUNCATE TABLE unconfirmed_transaction_map;")
                await conn.execute("TRUNCATE TABLE invalid_transaction_map;")
            await conn.execute("TRUNCATE TABLE connector_unconfirmed_utxo;")
            await conn.execute("TRUNCATE TABLE connector_unconfirmed_stxo;")





    rows = await conn.fetch("SELECT hash from blocks order by height desc limit 100;")
    for row in rows:
        app.chain_tail.append(rh2s(row["hash"]))

    await conn.execute(""" INSERT INTO service (name, value) VALUES ('block_filters_bootstrap', '0')
                           ON CONFLICT(name) DO UPDATE SET value = '0';
                       """)
    async def _new_block(self, block):
        self.block_dependency_tx = 0
        """
        0 Check if block already exist in db
        1 Check parent block in db:
            If no parent
                get last block height from db
                   if last block height >= recent block height 
                       this is orphan ignore it
                   else:
                       remove top block from db and ask block with
                       hrecent block height -1
                       return
        2 add all transactions from block to db
            ask full block from node
            parse txs and add to db in case not exist
        3 call before add block handler^ if this handler rise 
          exception block adding filed
        4 add block to db and commit
        5 after block add handelr 
        6 ask next block
        """
        if not self.active or not self.active_block.done():
            return
        if block is None:
            self.sync = False
            self.log.debug('Block synchronization completed')
            return
        self.active_block = asyncio.Future()

        binary_block_hash = unhexlify(block["hash"])
        binary_previousblock_hash = \
            unhexlify(block["previousblockhash"]) \
            if "previousblockhash" in block else None
        block_height = int(block["height"])
        next_block_height = block_height + 1
        self.log.info("New block %s %s" % (block_height, block["hash"]))
        bt = q = tm()
        try:
            async with self._db_pool.acquire() as con:
                # blockchain position check
                block_exist = self.block_cache.get(binary_block_hash)
                if block_exist is not None:
                    self.log.info("block already exist in db %s" % block["hash"])
                    return
                # Get parent from db
                if binary_previousblock_hash is not None:
                    parent_height = self.block_cache.get(binary_previousblock_hash)
                else:
                    parent_height = None
                # self.log.warning("parent height %s" % parent_height)

                if parent_height is None:
                    # have no mount point in local chain
                    # self.log.warning("last local height %s" % self.last_block_height)
                    if self.last_block_height is not None:
                        if self.last_block_height >= block_height:
                            self.log.critical("bitcoin node out of sync block %s" % block["hash"])
                            return
                        if self.last_block_height+1 == block_height:
                            if self.orphan_handler:
                                tq = tm()
                                await self.orphan_handler(self.last_block_height, con)
                                self.log.info("orphan handler  %s [%s]" % (self.last_block_height, tm(tq)))
                            tq = tm()
                            await remove_orphan(self, con)
                            self.log.info("remove orphan %s [%s]" % (self.last_block_height + 1, tm(tq)))
                        next_block_height -= 2
                        if next_block_height > self.last_block_height:
                            next_block_height = self.last_block_height + 1
                        if self.sync and next_block_height >= self.sync:
                            if self.sync_requested:
                                next_block_height = self.last_block_height + 1
                        else:
                            self.sync = next_block_height
                        return
                    else:
                        if self.start_block is not None and block["height"] != self.start_block:
                            self.log.info("Start from block %s" % self.start_block)
                            next_block_height = self.start_block
                            return
                else:
                    if self.last_block_height + 1 != block_height:
                        if self.orphan_handler:
                            tq = tm()
                            await self.orphan_handler(self.last_block_height, con)
                            self.log.info("orphan handler  %s [%s]" % (self.last_block_height, tm(tq)))
                        await remove_orphan(self, con)
                        next_block_height -= 1
                        self.log.debug("requested %s" % next_block_height)
                        return
                self.log.debug("blockchain position check [%s]" % tm(q))

                # add all block transactions
                q = tm()
                binary_tx_hash_list = [unhexlify(t)[::-1] for t in block["tx"]]
                if block["height"] in (91842, 91880):
                    # BIP30 Fix
                    self.tx_cache.pop(s2rh("d5d27987d2a3dfc724e359870c6644b40e497bdc0589a033220fe15429d88599"))
                    self.tx_cache.pop(s2rh("e3bf3d07d4b0375638d5f1db5255fe07ba2c4cb067cd81b84ee974b6585fb468"))
                tx_id_list, missed = await get_tx_id_list(self, binary_tx_hash_list, con)
                if len(tx_id_list)+len(missed) != len(block["tx"]):
                    raise Exception("tx count mismatch")
                self.await_tx_id_list = tx_id_list
                if self.before_block_handler:
                    sn = await self.before_block_handler(block,
                                                         missed,
                                                         self.sync_tx_lock,
                                                         self.node_last_block,
                                                         con)
                    if sn and missed:
                        self.await_tx_id_list = self.await_tx_id_list + [0 for i in range(len(missed))]
                        missed = []
                cq = tm()
                missed = [rh2s(t) for t in missed]
                self.log.info("Transactions already exist: %s missed %s [%s]" % (len(tx_id_list), len(missed), tm(q)))
                if missed:
                    self.log.debug("Request missed transactions")
                    self.missed_tx_list = list(missed)
                    self.await_tx_list = missed
                    self.await_tx_future = dict()
                    for i in missed:
                        self.await_tx_future[unhexlify(i)[::-1]] = asyncio.Future()
                    self.block_txs_request = asyncio.Future()
                    if len(missed) == len(block["tx"]):
                        self.loop.create_task(self._get_missed(block["hash"],
                                                               block["time"],
                                                               block["height"]
                                                              ))
                    else:
                        self.loop.create_task(self._get_missed(False,
                                                               block["time"],
                                                               block["height"]
                                                              ))
                    try:
                        await asyncio.wait_for(self.block_txs_request, timeout=self.block_timeout)
                    except asyncio.CancelledError:
                        # refresh rpc connection session
                        await self.rpc.close()
                        self.rpc = aiojsonrpc.rpc(self.rpc_url, self.loop, timeout=self.rpc_timeout)
                        raise RuntimeError("block transaction request timeout")
                if len(block["tx"]) != len(self.await_tx_id_list):
                    self.log.error("get block transactions failed")
                    self.log.error(str(self.await_tx_id_list))
                    raise Exception("get block transactions failed")

                tx_count = len(self.await_tx_id_list)
                self.total_received_tx += tx_count
                self.total_received_tx_time += tm(q)
                rate = round(self.total_received_tx/self.total_received_tx_time)
                self.log.info("Transactions received: %s [%s] rate tx/s ->> %s <<" % (tx_count, tm(cq), rate))
                async with con.transaction():
                    if self.block_received_handler:
                        await self.block_received_handler(block, con)
                    # insert new block
                    await insert_new_block(self, binary_block_hash,
                                           block["height"],
                                           binary_previousblock_hash,
                                           block["time"], con)
                    if not self.external_dublicate_filter:
                        self.loop.create_task(update_block_height(self, block["height"],
                                                                  list(self.await_tx_id_list)))
                if self.sync == block["height"]:
                    self.sync += 1
                    next_block_height = self.sync
                # after block added handler
                if self.block_handler:
                    await self.block_handler(block, con)
            self.last_block_height = block["height"]
            self.block_cache.set(binary_block_hash, block["height"])
        except Exception as err:
            if self.await_tx_list:
                self.await_tx_list = []
            self.log.error(str(traceback.format_exc()))
            self.log.error("new block error %s" % str(err))
            next_block_height = None
        finally:
            self.active_block.set_result(True)
            self.log.debug("block  processing completed")
            if next_block_height is not None:
                self.sync_requested = True
                self.loop.create_task(self.get_block_by_height(next_block_height))
            self.log.info("%s block [%s tx/ %s size] (dp %s) processing time %s cache [%s/%s]" %
                          (block["height"],
                           len(block["tx"]),
                           block["size"] / 1000000,
                           self.block_dependency_tx,
                           tm(bt),
                           len(self.block_hashes_preload._store),
                           len(self.block_preload._store)))
Beispiel #17
0
async def outpoints_info(outpoints, app):
    q = time.time()
    o = [i[2] for i in outpoints]
    t = [i[0] for i in outpoints]
    out_map_pointer = dict()
    async with app["db_pool"].acquire() as conn:
        utxo = await conn.fetch(
            "SELECT outpoint, pointer, address, amount "
            "FROM connector_utxo "
            "WHERE outpoint = ANY($1) LIMIT $2;", o, len(o))
        uutxo = await conn.fetch(
            "SELECT outpoint, address, amount "
            "FROM connector_unconfirmed_utxo "
            "WHERE outpoint = ANY($1) LIMIT $2;", o, len(o))
        ustxo = await conn.fetch(
            "SELECT outpoint, tx_id "
            "FROM connector_unconfirmed_stxo "
            "WHERE outpoint = ANY($1);", o)
        stxo = []

        if len(utxo) + len(uutxo) < len(o):
            tx_pointers = await conn.fetch(
                "SELECT pointer, tx_id FROM transaction "
                "WHERE tx_id = ANY($1) LIMIT $2;", t, len(t))
            print(tx_pointers)
            tx_map_pointer = dict()
            for row in tx_pointers:
                tx_map_pointer[row["tx_id"]] = row["pointer"]
            s = set()

            for i in outpoints:
                try:
                    s.add(tx_map_pointer[i[0]] + (1 << 19) + i[1])
                    out_map_pointer[tx_map_pointer[i[0]] + (1 << 19) +
                                    i[1]] = i[2]
                except:
                    pass
            stxo = await conn.fetch(
                "SELECT stxo.pointer, stxo.address, stxo.amount, transaction.tx_id "
                "FROM stxo "
                "JOIN transaction "
                "ON transaction.pointer = (stxo.s_pointer >> 18)<<18"
                "WHERE stxo.pointer = ANY($1) LIMIT $2", s, len(s))
            print(stxo)
    outpoints_map = dict()

    for row in utxo:
        outpoints_map[row["outpoint"]] = [
            row["pointer"] >> 39, row["address"], row["amount"], []
        ]

    for row in uutxo:
        outpoints_map[row["outpoint"]] = [
            None, row["address"], row["amount"], []
        ]

    for row in ustxo:
        try:
            outpoints_map[row["outpoint"]][3].append(rh2s(row["tx_id"]))
        except:
            pass

    for row in stxo:
        outpoint = out_map_pointer[row["pointer"]]
        outpoints_map[outpoint] = [
            row["pointer"] >> 39, row["address"], row["amount"],
            [rh2s(row["tx_id"])]
        ]

    result = []
    for i in outpoints:
        outpoint = "%s:%s" % (rh2s(i[0]), i[1])
        try:
            h, address, amount, s = outpoints_map[i[2]]
            r = dict()
            if address[0] in (0, 1, 5, 6):
                script_hash = True if address[0] in (1, 6) else False
                witness_version = None if address[0] < 5 else 0
                try:
                    r["address"] = hash_to_address(
                        address[1:],
                        testnet=app["testnet"],
                        script_hash=script_hash,
                        witness_version=witness_version)
                    r["script"] = address_to_script(r["address"], hex=1)
                except:
                    raise
            elif address[0] == 2:
                r["address"] = script_to_address(address[1:],
                                                 testnet=app["testnet"])
                r["script"] = r["address"][1:].hex()
            else:
                r["script"] = r["address"][1:].hex()
            r["scriptOpcodes"] = decode_script(r["script"])
            r["scriptAsm"] = decode_script(r["script"], 1)

            r["height"] = h
            r["spent"] = s
            r["type"] = SCRIPT_N_TYPES[address[0]]
            r["amount"] = amount
            result.append({outpoint: r})
        except:
            result.append({outpoint: None})

    return {"data": result, "time": round(time.time() - q, 4)}
Beispiel #18
0
async def tx_by_pointers_opt_tx(pointers, hashes, option_raw_tx, app):
    q = time.time()
    async with app["db_pool"].acquire() as conn:
        h_rows, uh_rows, p_row = [], [], []
        if hashes:
            if app["merkle_proof"]:
                h_rows = await conn.fetch(
                    "SELECT tx_id, raw_transaction,  timestamp, pointer, merkle_proof,"
                    "       blocks.hash, header, adjusted_timestamp "
                    "FROM transaction "
                    "JOIN blocks ON "
                    "blocks.height = pointer >> 39 "
                    "WHERE tx_id = ANY($1);", hashes)
            else:
                h_rows = await conn.fetch(
                    "SELECT tx_id, raw_transaction,  timestamp, pointer, "
                    "       blocks.hash, header, adjusted_timestamp "
                    "FROM transaction "
                    "JOIN blocks ON "
                    "blocks.height = pointer >> 39 "
                    "WHERE tx_id = ANY($1);", hashes)
        if len(h_rows) < len(hashes):
            uh_rows = await conn.fetch(
                "SELECT tx_id, raw_transaction,  timestamp "
                "FROM unconfirmed_transaction "
                "WHERE tx_id = ANY($1);", hashes)
        if pointers:
            if app["merkle_proof"]:
                p_row = await conn.fetch(
                    "SELECT tx_id, raw_transaction,  timestamp, pointer, merkle_proof,  "
                    "       blocks.hash, header, adjusted_timestamp "
                    "FROM transaction "
                    " JOIN blocks ON "
                    "blocks.height = pointer >> 39 "
                    "WHERE pointer = ANY($1);", pointers)
            else:
                p_row = await conn.fetch(
                    "SELECT tx_id, raw_transaction,  timestamp, pointer, merkle_proof,  "
                    "       blocks.hash, header, adjusted_timestamp "
                    "FROM transaction "
                    "JOIN blocks ON "
                    "blocks.height = pointer >> 39 "
                    "WHERE pointer = ANY($1);", pointers)

        r = dict()
        txs = dict()
        s_pointers = []
        o_pointers = []
        tx_id_list = []
        for row in h_rows:
            block_height = row["pointer"] >> 39
            block_index = (row["pointer"] >> 20) & 524287
            block_time = unpack("<L", row["header"][68:68 + 4])[0]
            tx = Transaction(row["raw_transaction"],
                             format="decoded",
                             testnet=app["testnet"],
                             keep_raw_tx=option_raw_tx)
            tx["blockHeight"] = block_height
            tx["blockIndex"] = block_index
            tx["blockHash"] = rh2s(row["hash"])
            tx["adjustedTimestamp"] = row["adjusted_timestamp"]
            tx["blockTime"] = block_time
            tx["timestamp"] = row["timestamp"]
            tx["confirmations"] = app["last_block"] - block_height + 1
            if app["merkle_proof"]:
                tx["merkleProof"] = base64.b64encode(
                    row["merkle_proof"]).decode()
            del tx["format"]
            del tx["testnet"]
            del tx["fee"]
            del tx["rawTx"]
            r[tx["txId"]] = tx
            if app["transaction_history"]:
                [
                    s_pointers.append((block_height << 39) +
                                      (block_index << 20) + i)
                    for i in tx["vIn"]
                ]
                [
                    o_pointers.append((block_height << 39) +
                                      (block_index << 20) + (1 << 19) + i)
                    for i in tx["vIn"]
                ]
                tx_id_list.append(s2rh(tx["txId"]))

        us_pointers = []
        us_pointers_inputs = []

        for row in uh_rows:
            block_height = None
            block_index = None
            block_time = None
            tx = Transaction(row["raw_transaction"],
                             format="decoded",
                             testnet=app["testnet"],
                             keep_raw_tx=option_raw_tx)
            tx["blockHeight"] = block_height
            tx["blockIndex"] = block_index
            tx["blockHash"] = None
            tx["adjustedTimestamp"] = None
            tx["blockTime"] = block_time
            tx["time"] = row["timestamp"]
            tx["confirmations"] = 0
            del tx["format"]
            del tx["testnet"]
            del tx["fee"]
            if not option_raw_tx: del tx["rawTx"]
            r[tx["txId"]] = tx
            if app["transaction_history"]:
                us_pointers.append(s2rh(tx["txId"]))
                tx_id_list.append(s2rh(tx["txId"]))
                [
                    us_pointers_inputs.append(s2rh(tx["vIn"][v]["txId"]))
                    for v in tx["vIn"]
                ]

        for row in p_row:
            block_height = row["pointer"] >> 39
            block_index = (row["pointer"] >> 20) & 524287
            block_time = unpack("<L", row["header"][68:68 + 4])[0]
            tx = Transaction(row["raw_transaction"],
                             format="decoded",
                             testnet=app["testnet"],
                             keep_raw_tx=option_raw_tx)
            tx["blockHeight"] = block_height
            tx["blockIndex"] = block_index
            tx["blockHash"] = rh2s(row["hash"])
            tx["adjustedTimestamp"] = row["adjusted_timestamp"]
            tx["blockTime"] = block_time
            tx["timestamp"] = row["timestamp"]
            tx["confirmations"] = app["last_block"] - block_height + 1
            if app["merkle_proof"]:
                tx["merkleProof"] = base64.b64encode(
                    row["merkle_proof"]).decode()
            del tx["format"]
            del tx["testnet"]
            del tx["fee"]
            if not option_raw_tx: del tx["rawTx"]
            pointer = row["pointer"]
            r["%s:%s" % (pointer >> 39, (pointer >> 20) & 524287)] = tx
            if app["transaction_history"]:
                [
                    s_pointers.append((block_height << 39) +
                                      (block_index << 20) + i)
                    for i in tx["vIn"]
                ]
                tx_id_list.append(s2rh(tx["txId"]))
                [
                    o_pointers.append((block_height << 39) +
                                      (block_index << 20) + (1 << 19) + i)
                    for i in tx["vIn"]
                ]

        if app["transaction_history"]:
            # get information about spent input coins

            if s_pointers:
                rows = await conn.fetch(
                    "SELECT pointer,"
                    "       s_pointer,"
                    "       address, "
                    "       amount  "
                    "FROM stxo "
                    "WHERE stxo.s_pointer = ANY($1);", s_pointers)
                s_pointers_map = dict()
                for v in rows:
                    s_pointers_map[v["s_pointer"]] = v

                for t in r:
                    if r[t]["blockHeight"] is None:
                        continue

                    s_pointer = (r[t]["blockHeight"] << 39) + (
                        r[t]["blockIndex"] << 20)
                    r[t]["inputsAmount"] = 0
                    for i in r[t]["vIn"]:
                        if r[t]["coinbase"]:
                            continue
                        d = s_pointers_map[s_pointer + i]
                        r[t]["vIn"][i]["type"] = SCRIPT_N_TYPES[d["address"]
                                                                [0]]
                        r[t]["vIn"][i]["amount"] = d["amount"]
                        r[t]["inputsAmount"] += d["amount"]
                        r[t]["vIn"][i]["blockHeight"] = d["pointer"] >> 39
                        r[t]["vIn"][i]["confirmations"] = app["last_block"] - (
                            d["pointer"] >> 39) + 1
                        if d["address"][0] in (0, 1, 2, 5, 6):
                            script_hash = True if d["address"][0] in (
                                1, 6) else False
                            witness_version = None if d["address"][0] < 5 else 0
                            r[t]["vIn"][i]["address"] = hash_to_address(
                                d["address"][1:],
                                testnet=app["testnet"],
                                script_hash=script_hash,
                                witness_version=witness_version)
                            r[t]["vIn"][i]["scriptPubKey"] = address_to_script(
                                r[t]["vIn"][i]["address"], hex=1)
                        elif r["address"][0] == 2:
                            r[t]["vIn"][i]["address"] = script_to_address(
                                d["address"][1:], testnet=app["testnet"])
                            r[t]["vIn"][i]["scriptPubKey"] = d["address"][
                                1:].hex()
                        else:
                            r[t]["vIn"][i]["scriptPubKey"] = d["address"][
                                1:].hex()
                        r[t]["vIn"][i]["scriptPubKeyOpcodes"] = decode_script(
                            r[t]["vIn"][i]["scriptPubKey"])
                        r[t]["vIn"][i]["scriptPubKeyAsm"] = decode_script(
                            r[t]["vIn"][i]["scriptPubKey"], 1)

            if us_pointers:
                rows = await conn.fetch(
                    "SELECT   outpoint,"
                    "         input_index,"
                    "       out_tx_id, "
                    "       tx_id,"
                    "       address, "
                    "       amount "
                    "    "
                    "FROM connector_unconfirmed_stxo "
                    "WHERE tx_id =  ANY($1);", us_pointers)
                c_rows = await conn.fetch(
                    """
                                           SELECT pointer, tx_id FROM transaction 
                                           WHERE tx_id = ANY($1);
                                           """, us_pointers_inputs)
                tx_id_map_pointer = dict()

                for v in c_rows:
                    tx_id_map_pointer[rh2s(v["tx_id"])] = v["pointer"]

                us_pointers = dict()
                for v in rows:
                    us_pointers[(rh2s(v["tx_id"]), v["input_index"])] = v

                for t in r:
                    if r[t]["blockHeight"] is not None:
                        continue

                    r[t]["inputsAmount"] = 0
                    for i in r[t]["vIn"]:
                        d = us_pointers[(r[t]["txId"], i)]
                        r[t]["vIn"][i]["type"] = SCRIPT_N_TYPES[d["address"]
                                                                [0]]
                        r[t]["vIn"][i]["amount"] = d["amount"]
                        r[t]["inputsAmount"] += d["amount"]
                        try:
                            pointer = tx_id_map_pointer[r[t]["vIn"][i]["txId"]]
                            r[t]["vIn"][i]["blockHeight"] = pointer >> 39
                            r[t]["vIn"][i]["confirmations"] = app[
                                "last_block"] - (pointer >> 39) + 1
                        except:
                            r[t]["vIn"][i]["blockHeight"] = None
                            r[t]["vIn"][i]["confirmations"] = None

                        if d["address"][0] in (0, 1, 2, 5, 6):
                            script_hash = True if d["address"][0] in (
                                1, 6) else False
                            witness_version = None if d["address"][0] < 5 else 0
                            try:
                                if d["address"][0] == 2:
                                    ad = b"\x02" + parse_script(
                                        d["address"][1:])["addressHash"]
                                else:
                                    ad = d["address"]
                                r[t]["vIn"][i]["address"] = hash_to_address(
                                    ad[1:],
                                    testnet=app["testnet"],
                                    script_hash=script_hash,
                                    witness_version=witness_version)

                                r[t]["vIn"][i][
                                    "scriptPubKey"] = address_to_script(
                                        r[t]["vIn"][i]["address"], hex=1)
                            except:
                                print(r[t]["txId"])
                                print("??", d["address"].hex())
                                raise

                        elif r["address"][0] == 2:
                            r[t]["vIn"][i]["address"] = script_to_address(
                                d["address"][1:], testnet=app["testnet"])
                            r[t]["vIn"][i]["scriptPubKey"] = d["address"][
                                1:].hex()
                        else:
                            r[t]["vIn"][i]["scriptPubKey"] = d["address"][
                                1:].hex()
                        r[t]["vIn"][i]["scriptPubKeyOpcodes"] = decode_script(
                            r[t]["vIn"][i]["scriptPubKey"])
                        r[t]["vIn"][i]["scriptPubKeyAsm"] = decode_script(
                            r[t]["vIn"][i]["scriptPubKey"], 1)

            # get information about spent output coins
            rows = await conn.fetch(
                "SELECT   outpoint,"
                "         input_index,"
                "       tx_id "
                "FROM connector_unconfirmed_stxo "
                "WHERE out_tx_id = ANY($1);", tx_id_list)
            out_map = dict()
            for v in rows:
                i = bytes_to_int(v["outpoint"][32:])
                try:
                    out_map[(rh2s(v["outpoint"][:32]), i)].append({
                        "txId":
                        rh2s(v["tx_id"]),
                        "vIn":
                        v["input_index"]
                    })
                except:
                    out_map[(rh2s(v["outpoint"][:32]), i)] = [{
                        "txId":
                        rh2s(v["tx_id"]),
                        "vIn":
                        v["input_index"]
                    }]

            rows = await conn.fetch(
                "SELECT stxo.pointer,"
                "       stxo.s_pointer,"
                "       transaction.tx_id  "
                "FROM stxo "
                "JOIN transaction "
                "ON transaction.pointer = (stxo.s_pointer >> 18)<<18 "
                "WHERE stxo.pointer = ANY($1);", o_pointers)
            p_out_map = dict()
            for v in rows:
                p_out_map[v["pointer"]] = [{
                    "txId":
                    rh2s(v["tx_id"]),
                    "vIn":
                    v["s_pointer"] & 0b111111111111111111
                }]

            for t in r:
                if r[t]["blockHeight"] is not None:
                    o_pointer = (r[t]["blockHeight"] << 39) + (
                        r[t]["blockIndex"] << 20) + (1 << 19)
                    for i in r[t]["vOut"]:
                        try:
                            r[t]["vOut"][i]["spent"] = p_out_map[o_pointer + i]
                        except:
                            try:
                                r[t]["vOut"][i]["spent"] = out_map[(
                                    r[t]["txId"], int(i))]
                            except:
                                r[t]["vOut"][i]["spent"] = []
                else:
                    for i in r[t]["vOut"]:
                        try:
                            r[t]["vOut"][i]["spent"] = out_map[(r[t]["txId"],
                                                                int(i))]
                        except:
                            r[t]["vOut"][i]["spent"] = []

        for pointer in pointers:
            key = "%s:%s" % (pointer >> 39, (pointer >> 20) & 524287)
            try:
                txs[key] = r[key]
            except:
                txs[key] = None

        for h in hashes:
            h = rh2s(h)
            try:
                txs[h] = r[h]
            except:
                txs[h] = None

    return {"data": txs, "time": round(time.time() - q, 4)}
Beispiel #19
0
async def data_last_n_blocks(n, app):
    pool = app["db_pool"]
    qt = time.time()
    async with pool.acquire() as conn:
        rows = await conn.fetch(
            "SELECT height,"
            "       hash,"
            "       miner,"
            "       timestamp_received,"
            "       data,"
            "       header,"
            "       adjusted_timestamp "
            "FROM blocks  ORDER BY height desc LIMIT $1;", n)

        nx_map = dict()
        cb_pointers = []
        for row in rows:
            cb_pointers.append(row["height"] << 39)
            nx_map[row["height"] - 1] = rh2s(row["hash"])

        cb_pointers = [row["height"] << 39 for row in rows]

        cb_rows = await conn.fetch(
            "SELECT pointer, raw_transaction  "
            "FROM transaction  WHERE pointer = ANY($1);", cb_pointers)
        cb_map = dict()
        for cb in cb_rows:
            cb_map[cb["pointer"] >> 39] = Transaction(cb["raw_transaction"],
                                                      format="raw")

    if rows is None:
        raise APIException(NOT_FOUND, "blocks not found", status=404)
    r = []

    for row in rows:
        block = dict()
        block["height"] = row["height"]
        if block["height"] > app["last_block"]:
            await block_map_update(app)
            if block["height"] > app["last_block"]:
                raise Exception("internal error")
        block["hash"] = rh2s(row["hash"])
        block["header"] = base64.b64encode(row["header"]).decode()
        d = json.loads(row["data"])
        for k in d:
            block[k] = d[k]
        if row["miner"]:
            block["miner"] = json.loads(row["miner"])
        else:
            block["miner"] = None
        block["medianBlockTime"] = app["block_map_time"][block["height"]][2]
        block["blockTime"] = app["block_map_time"][block["height"]][1]
        block["receivedTimestamp"] = row["timestamp_received"]
        block["adjustedTimestamp"] = row["adjusted_timestamp"]
        block["bitsHex"] = block["bits"]
        block["bits"] = bytes_to_int(bytes_from_hex(block["bits"]))
        block["nonceHex"] = block["nonce"].to_bytes(4, byteorder="big").hex()
        block["versionHex"] = int_to_bytes(block["version"]).hex()
        block["difficulty"] = round(block["targetDifficulty"], 2)
        q = int.from_bytes(s2rh(block["hash"]), byteorder="little")
        block["blockDifficulty"] = target_to_difficulty(q)
        del block["targetDifficulty"]
        try:
            block["nextBlockHash"] = nx_map[block["height"]]
        except:
            block["nextBlockHash"] = None

        # get coinbase transaction

        tx = cb_map[block["height"]]

        block["estimatedBlockReward"] = 50 * 100000000 >> block[
            "height"] // 210000
        block["blockReward"] = tx["amount"]
        if tx["amount"] > block["estimatedBlockReward"]:
            block["blockReward"] = block["estimatedBlockReward"]
            block["blockFeeReward"] = tx["amount"] - block[
                "estimatedBlockReward"]
        else:
            block["blockReward"] = tx["amount"]
            block["blockFeeReward"] = 0
        block["confirmations"] = app["last_block"] - block["height"] + 1
        block["transactionsCount"] = var_int_to_int(row["header"][80:])
        block["coinbase"] = tx["vIn"][0]["scriptSig"].hex()
        r.append(block)

    resp = {"data": r, "time": round(time.time() - qt, 4)}
    return resp
Beispiel #20
0
    async def _new_block(self, block):
        self.block_dependency_tx = 0
        """
        0 Check if block already exist in db
        1 Check parent block in db:
            If no parent
                get last block height from db
                   if last block height >= recent block height 
                       this is orphan ignore it
                   else:
                       remove top block from db and ask block with
                       hrecent block height -1
                       return
        2 add all transactions from block to db
            ask full block from node
            parse txs and add to db in case not exist
        3 call before add block handler^ if this handler rise 
          exception block adding filed
        4 add block to db and commit
        5 after block add handelr 
        6 ask next block
        """
        if not self.active or not self.active_block.done():
            return
        if block is None:
            self.sync = False
            self.log.debug('Block synchronization completed')
            return
        self.active_block = asyncio.Future()

        binary_block_hash = unhexlify(block["hash"])
        binary_previousblock_hash = \
            unhexlify(block["previousblockhash"]) \
            if "previousblockhash" in block else None
        block_height = int(block["height"])
        next_block_height = block_height + 1
        self.log.info("New block %s %s" % (block_height, block["hash"]))
        bt = q = tm()
        try:
            async with self._db_pool.acquire() as con:
                # blockchain position check
                block_exist = self.block_cache.get(binary_block_hash)
                if block_exist is not None:
                    self.log.info("block already exist in db %s" %
                                  block["hash"])
                    return
                # Get parent from db
                if binary_previousblock_hash is not None:
                    parent_height = self.block_cache.get(
                        binary_previousblock_hash)
                else:
                    parent_height = None
                # self.log.warning("parent height %s" % parent_height)

                if parent_height is None:
                    # have no mount point in local chain
                    # self.log.warning("last local height %s" % self.last_block_height)
                    if self.last_block_height is not None:
                        if self.last_block_height >= block_height:
                            self.log.critical(
                                "bitcoin node out of sync block %s" %
                                block["hash"])
                            return
                        if self.last_block_height + 1 == block_height:
                            if self.orphan_handler:
                                tq = tm()
                                await self.orphan_handler(
                                    self.last_block_height, con)
                                self.log.info("orphan handler  %s [%s]" %
                                              (self.last_block_height, tm(tq)))
                            tq = tm()
                            await remove_orphan(self, con)
                            self.log.info("remove orphan %s [%s]" %
                                          (self.last_block_height + 1, tm(tq)))
                        next_block_height -= 2
                        if next_block_height > self.last_block_height:
                            next_block_height = self.last_block_height + 1
                        if self.sync and next_block_height >= self.sync:
                            if self.sync_requested:
                                next_block_height = self.last_block_height + 1
                        else:
                            self.sync = next_block_height
                        return
                    else:
                        if self.start_block is not None and block[
                                "height"] != self.start_block:
                            self.log.info("Start from block %s" %
                                          self.start_block)
                            next_block_height = self.start_block
                            return
                else:
                    if self.last_block_height + 1 != block_height:
                        if self.orphan_handler:
                            tq = tm()
                            await self.orphan_handler(self.last_block_height,
                                                      con)
                            self.log.info("orphan handler  %s [%s]" %
                                          (self.last_block_height, tm(tq)))
                        await remove_orphan(self, con)
                        next_block_height -= 1
                        self.log.debug("requested %s" % next_block_height)
                        return
                self.log.debug("blockchain position check [%s]" % tm(q))

                # add all block transactions
                q = tm()
                binary_tx_hash_list = [unhexlify(t)[::-1] for t in block["tx"]]
                if block["height"] in (91842, 91880):
                    # BIP30 Fix
                    self.tx_cache.pop(
                        s2rh(
                            "d5d27987d2a3dfc724e359870c6644b40e497bdc0589a033220fe15429d88599"
                        ))
                    self.tx_cache.pop(
                        s2rh(
                            "e3bf3d07d4b0375638d5f1db5255fe07ba2c4cb067cd81b84ee974b6585fb468"
                        ))
                tx_id_list, missed = await get_tx_id_list(
                    self, binary_tx_hash_list, con)
                if len(tx_id_list) + len(missed) != len(block["tx"]):
                    raise Exception("tx count mismatch")
                self.await_tx_id_list = tx_id_list
                if self.before_block_handler:
                    sn = await self.before_block_handler(
                        block, missed, self.sync_tx_lock, self.node_last_block,
                        con)
                    if sn and missed:
                        self.await_tx_id_list = self.await_tx_id_list + [
                            0 for i in range(len(missed))
                        ]
                        missed = []
                cq = tm()
                missed = [rh2s(t) for t in missed]
                self.log.info("Transactions already exist: %s missed %s [%s]" %
                              (len(tx_id_list), len(missed), tm(q)))
                if missed:
                    self.log.debug("Request missed transactions")
                    self.missed_tx_list = list(missed)
                    self.await_tx_list = missed
                    self.await_tx_future = dict()
                    for i in missed:
                        self.await_tx_future[unhexlify(i)
                                             [::-1]] = asyncio.Future()
                    self.block_txs_request = asyncio.Future()
                    if len(missed) == len(block["tx"]):
                        self.loop.create_task(
                            self._get_missed(block["hash"], block["time"],
                                             block["height"]))
                    else:
                        self.loop.create_task(
                            self._get_missed(False, block["time"],
                                             block["height"]))
                    try:
                        await asyncio.wait_for(self.block_txs_request,
                                               timeout=self.block_timeout)
                    except asyncio.CancelledError:
                        # refresh rpc connection session
                        await self.rpc.close()
                        self.rpc = aiojsonrpc.rpc(self.rpc_url,
                                                  self.loop,
                                                  timeout=self.rpc_timeout)
                        raise RuntimeError("block transaction request timeout")
                if len(block["tx"]) != len(self.await_tx_id_list):
                    self.log.error("get block transactions failed")
                    self.log.error(str(self.await_tx_id_list))
                    raise Exception("get block transactions failed")

                tx_count = len(self.await_tx_id_list)
                self.total_received_tx += tx_count
                self.total_received_tx_time += tm(q)
                rate = round(self.total_received_tx /
                             self.total_received_tx_time)
                self.log.info(
                    "Transactions received: %s [%s] rate tx/s ->> %s <<" %
                    (tx_count, tm(cq), rate))
                async with con.transaction():
                    if self.block_received_handler:
                        await self.block_received_handler(block, con)
                    # insert new block
                    await insert_new_block(self, binary_block_hash,
                                           block["height"],
                                           binary_previousblock_hash,
                                           block["time"], con)
                    if not self.external_dublicate_filter:
                        self.loop.create_task(
                            update_block_height(self, block["height"],
                                                list(self.await_tx_id_list)))
                if self.sync == block["height"]:
                    self.sync += 1
                    next_block_height = self.sync
                # after block added handler
                if self.block_handler:
                    await self.block_handler(block, con)
            self.last_block_height = block["height"]
            self.block_cache.set(binary_block_hash, block["height"])
        except Exception as err:
            if self.await_tx_list:
                self.await_tx_list = []
            self.log.error(str(traceback.format_exc()))
            self.log.error("new block error %s" % str(err))
            next_block_height = None
        finally:
            self.active_block.set_result(True)
            self.log.debug("block  processing completed")
            if next_block_height is not None:
                self.sync_requested = True
                self.loop.create_task(
                    self.get_block_by_height(next_block_height))
            self.log.info(
                "%s block [%s tx/ %s size] (dp %s) processing time %s cache [%s/%s]"
                % (block["height"], len(block["tx"]), block["size"] / 1000000,
                   self.block_dependency_tx, tm(bt),
                   len(self.block_hashes_preload._store),
                   len(self.block_preload._store)))
Beispiel #21
0
async def address_unconfirmed_transactions(address, type, limit, page, order,
                                           mode, app):
    q = time.time()

    if address[0] == 0 and type is None:
        a = [address]
        async with app["db_pool"].acquire() as conn:
            script = await conn.fetchval(
                "SELECT script from connector_p2pk_map "
                "WHERE address = $1 LIMIT 1;", address[1:])
            if script is None:
                script = await conn.fetchval(
                    "SELECT script from connector_unconfirmed_p2pk_map "
                    "WHERE address = $1 LIMIT 1;", address[1:])
            if script is not None:
                a.append(b"\x02" + script)

    else:
        async with app["db_pool"].acquire() as conn:
            if address[0] == 0:
                if type == 2:
                    script = await conn.fetchval(
                        "SELECT script from connector_unconfirmed_p2pk_map "
                        "WHERE address = $1 LIMIT 1;", address[1:])
                    if script is None:
                        script = await conn.fetchval(
                            "SELECT script from connector_p2pk_map "
                            "WHERE address = $1 LIMIT 1;", address[1:])
                    if script is not None:
                        address = b"\x02" + script
                    else:
                        return {
                            "data": {
                                "confirmed": 0,
                                "unconfirmed": 0
                            },
                            "time": round(time.time() - q, 4)
                        }
            a = [address]

    async with app["db_pool"].acquire() as conn:
        count = await conn.fetchval(
            "SELECT count(tx_id) FROM unconfirmed_transaction_map "
            "WHERE address = ANY($1);", a)
        pages = math.ceil(count / limit)

        rows = await conn.fetch(
            "SELECT  "
            "        unconfirmed_transaction.raw_transaction,"
            "        unconfirmed_transaction.tx_id,  "
            "        unconfirmed_transaction.timestamp  "
            "FROM unconfirmed_transaction_map "
            "JOIN unconfirmed_transaction "
            "on unconfirmed_transaction.tx_id = unconfirmed_transaction_map.tx_id "
            "WHERE unconfirmed_transaction_map.address = ANY($1)    "
            "order by  unconfirmed_transaction.timestamp %s "
            "LIMIT $2 OFFSET $3;" % order, a, limit, limit * (page - 1))
        t = set()
        mempool_rank_map = dict()
        for row in rows:
            t.add(row["tx_id"])
        if t:
            ranks = await conn.fetch(
                """SELECT ranks.rank, ranks.tx_id FROM 
                                                  (SELECT tx_id, rank() OVER(ORDER BY feerate DESC) as rank 
                                                  FROM unconfirmed_transaction) ranks 
                                                  WHERE tx_id = ANY($1) LIMIT $2""",
                t, len(t))
            for r in ranks:
                mempool_rank_map[r["tx_id"]] = r["rank"]

    target_scripts = []

    tx_list = []
    tx_id_set = set()

    for row in rows:
        tx_id_set.add(row["tx_id"])
        tx = Transaction(row["raw_transaction"], testnet=app["testnet"])
        tx["timestamp"] = row["timestamp"]
        tx_list.append(tx)
        try:
            tx["mempoolRank"] = mempool_rank_map[row["tx_id"]]
        except:
            pass
        ts = dict()
        for d in a:
            if d[0] in (0, 1, 5, 6):
                ts[hash_to_script(d[1:], d[0], hex=True)] = 0
            else:
                ts[d[1:].hex()] = 0
        target_scripts.append(ts)

    async with app["db_pool"].acquire() as conn:
        stxo = await conn.fetch(
            "SELECT connector_unconfirmed_stxo.input_index,"
            "       connector_unconfirmed_stxo.tx_id,"
            "       connector_unconfirmed_stxo.amount,"
            "       connector_unconfirmed_stxo.address,"
            "       transaction.pointer "
            "FROM connector_unconfirmed_stxo "
            "LEFT OUTER JOIN transaction "
            "ON connector_unconfirmed_stxo.out_tx_id = transaction.tx_id "
            "WHERE connector_unconfirmed_stxo.tx_id = ANY($1);", tx_id_set)
    stxo_map = {}

    for row in stxo:
        stxo_map[(rh2s(row["tx_id"]),
                  row["input_index"])] = (row["address"], row["amount"],
                                          row["pointer"])

    for i in range(len(tx_list)):
        tx_list[i]["inputsAmount"] = 0
        tx_list[i]["inputAddressCount"] = 0
        tx_list[i]["outAddressCount"] = 0
        tx_list[i]["inputsCount"] = len(tx_list[i]["vIn"])
        tx_list[i]["outsCount"] = len(tx_list[i]["vOut"])

        if not tx_list[i]["coinbase"]:
            for k in tx_list[i]["vIn"]:
                d = stxo_map[(tx_list[i]["txId"], k)]
                tx_list[i]["vIn"][k]["type"] = SCRIPT_N_TYPES[d[0][0]]
                tx_list[i]["vIn"][k]["amount"] = d[1]
                tx_list[i]["inputsAmount"] += d[1]
                pointer = d[2]
                if pointer is not None:
                    tx_list[i]["vIn"][k]["blockHeight"] = pointer >> 39
                    tx_list[i]["vIn"][k]["confirmations"] = app[
                        "last_block"] - (pointer >> 39) + 1
                else:
                    tx_list[i]["vIn"][k]["blockHeight"] = None
                    tx_list[i]["vIn"][k]["confirmations"] = None

                if d[0][0] in (0, 1, 2, 5, 6):
                    script_hash = True if d[0][0] in (1, 6) else False
                    witness_version = None if d[0][0] < 5 else 0
                    try:
                        if d[0][0] == 2:
                            ad = b"\x02" + parse_script(
                                d[0][1:])["addressHash"]
                        else:
                            ad = d[0]
                        tx_list[i]["vIn"][k]["address"] = hash_to_address(
                            ad[1:],
                            testnet=app["testnet"],
                            script_hash=script_hash,
                            witness_version=witness_version)

                        tx_list[i]["vIn"][k][
                            "scriptPubKey"] = address_to_script(
                                tx_list[i]["vIn"][k]["address"], hex=1)
                    except:
                        print(tx_list[i]["txId"])
                        print("??", d[0].hex())
                        raise
                    tx_list[i]["inputAddressCount"] += 1
                else:
                    tx_list[i]["vIn"][k]["scriptPubKey"] = d[0][1:].hex()

                tx_list[i]["vIn"][k]["scriptPubKeyOpcodes"] = decode_script(
                    tx_list[i]["vIn"][k]["scriptPubKey"])
                tx_list[i]["vIn"][k]["scriptPubKeyAsm"] = decode_script(
                    tx_list[i]["vIn"][k]["scriptPubKey"], 1)
                for ti in target_scripts[i]:
                    if ti == tx_list[i]["vIn"][k]["scriptPubKey"]:
                        target_scripts[i][ti] -= tx_list[i]["vIn"][k]["amount"]

                if tx_list[i]["vIn"][k]["sequence"] < 0xfffffffe:
                    tx_list[i]["vIn"][k]["rbf"] = True

        if not tx_list[i]["coinbase"]:
            tx_list[i][
                "fee"] = tx_list[i]["inputsAmount"] - tx_list[i]["amount"]
        else:
            tx_list[i]["fee"] = 0

        tx_list[i]["outputsAmount"] = tx_list[i]["amount"]

    # get information about spent output coins
    async with app["db_pool"].acquire() as conn:
        rows = await conn.fetch(
            "SELECT   outpoint,"
            "         input_index,"
            "       tx_id "
            "FROM connector_unconfirmed_stxo "
            "WHERE out_tx_id = ANY($1);", tx_id_set)
    out_map = dict()
    for v in rows:
        i = bytes_to_int(v["outpoint"][32:])
        try:
            out_map[(rh2s(v["outpoint"][:32]), i)].append({
                "txId":
                rh2s(v["tx_id"]),
                "vIn":
                v["input_index"]
            })
        except:
            out_map[(rh2s(v["outpoint"][:32]), i)] = [{
                "txId": rh2s(v["tx_id"]),
                "vIn": v["input_index"]
            }]

    # todo get information about double spent coins
    # async with app["db_pool"].acquire() as conn:
    #     rows = await conn.fetch("SELECT   outpoint,"
    #                             "         input_index,"
    #                             "       tx_id "
    #                             "FROM connector_unconfirmed_stxo "
    #                             "WHERE out_tx_id = ANY($1);", tx_id_set)

    for t in range(len(tx_list)):
        for i in tx_list[t]["vOut"]:
            try:
                tx_list[t]["vOut"][i]["spent"] = out_map[(tx_list[t]["txId"],
                                                          int(i))]
            except:
                tx_list[t]["vOut"][i]["spent"] = []

            for ti in target_scripts[t]:
                if ti == tx_list[t]["vOut"][i]["scriptPubKey"]:
                    target_scripts[t][ti] += tx_list[t]["vOut"][i]["value"]
            if "address" in tx_list[t]["vOut"][i]:
                tx_list[t]["outAddressCount"] += 1

        address_amount = 0
        for ti in target_scripts[t]:
            address_amount += target_scripts[t][ti]

        tx_list[t]["amount"] = address_amount

        if mode != "verbose":
            del tx_list[t]["vIn"]
            del tx_list[t]["vOut"]
        del tx_list[t]["format"]
        del tx_list[t]["testnet"]
        del tx_list[t]["rawTx"]
        del tx_list[t]["hash"]
        del tx_list[t]["blockHash"]
        del tx_list[t]["time"]
        del tx_list[t]["confirmations"]
        del tx_list[t]["blockIndex"]

        try:
            del tx_list[t]["flag"]
        except:
            pass

    return {
        "data": {
            "page": page,
            "limit": limit,
            "pages": pages,
            "list": tx_list
        },
        "time": round(time.time() - q, 4)
    }
Beispiel #22
0
async def address_transactions(address, type, limit, page, order, mode,
                               timeline, app):
    q = time.time()
    qt = time.time()
    pages = 0

    if address[0] == 0 and type is None:
        a = [address]
        async with app["db_pool"].acquire() as conn:
            script = await conn.fetchval(
                "SELECT script from connector_p2pk_map "
                "WHERE address = $1 LIMIT 1;", address[1:])
            if script is not None:
                a.append(b"\x02" + script)

    else:
        async with app["db_pool"].acquire() as conn:
            if address[0] == 0:
                if type == 2:
                    script = await conn.fetchval(
                        "SELECT script from connector_unconfirmed_p2pk_map "
                        "WHERE address = $1 LIMIT 1;", address[1:])
                    if script is None:
                        script = await conn.fetchval(
                            "SELECT script from connector_p2pk_map "
                            "WHERE address = $1 LIMIT 1;", address[1:])
                    if script is not None:
                        address = b"\x02" + script
                    else:
                        return {
                            "data": {
                                "confirmed": 0,
                                "unconfirmed": 0
                            },
                            "time": round(time.time() - q, 4)
                        }
            a = [address]

    print("1", time.time() - qt)
    qt = time.time()

    pages = 0
    for z in a:
        s = await address_state_extended(z, app)
        pages += s["data"]["receivedTxCount"] + s["data"]["sentTxCount"]
    pages = math.ceil(pages / limit)

    async with app["db_pool"].acquire() as conn:
        # get total transactions count to determine pages count
        print("4", time.time() - qt)
        qt = time.time()

        tx_id_list = await conn.fetch(
            "SELECT  pointer  FROM transaction_map "
            "WHERE address = ANY($1) order by  pointer %s "
            "LIMIT $2 OFFSET $3;" % order, a, limit, limit * (page - 1))
        l = [t["pointer"] for t in tx_id_list]

        print("4.1", time.time() - qt)
        qt = time.time()

        rows = await conn.fetch(
            "SELECT  transaction.pointer,"
            "        transaction.raw_transaction,"
            "        transaction.tx_id,  "
            "        transaction.timestamp  "
            "FROM transaction "
            "WHERE pointer = ANY($1) "
            "order by  pointer %s "
            "LIMIT $2 ;" % order, l, limit)

        print("3", time.time() - qt)
        qt = time.time()
    target_scripts = []
    tx_list = []
    s_pointers = []
    tx_id_set = set()
    o_pointers = []

    for row in rows:
        tx_id_set.add(row["tx_id"])
        tx = Transaction(row["raw_transaction"], testnet=app["testnet"])
        tx["blockHeight"] = row["pointer"] >> 39
        tx["blockIndex"] = (row["pointer"] >> 20) & 524287
        tx["timestamp"] = row["timestamp"]
        tx["confirmations"] = app["last_block"] - tx["blockHeight"] + 1
        try:
            tx["blockTime"] = app["block_map_time"][tx["blockHeight"]][1]
        except:
            pass
        tx_pointer = (tx["blockHeight"] << 39) + (tx["blockIndex"] << 20)

        for i in tx["vIn"]:
            s_pointers.append(row["pointer"] + i)
        for i in tx["vOut"]:
            o_pointers.append(tx_pointer + (1 << 19) + i)

        tx_list.append(tx)
        ts = dict()
        for d in a:
            if d[0] in (0, 1, 5, 6):
                ts[hash_to_script(d[1:], d[0], hex=True)] = {
                    "r": 0,
                    "s": 0,
                    "i": 0,
                    "o": 0
                }
            else:
                ts[d[1:].hex()] = {"r": 0, "s": 0, "i": 0, "o": 0}
        target_scripts.append(ts)

    async with app["db_pool"].acquire() as conn:
        stxo = await conn.fetch(
            "SELECT s_pointer, pointer, amount, address FROM stxo "
            "WHERE stxo.s_pointer = ANY($1);", s_pointers)
    stxo_map = {}

    for row in stxo:
        stxo_map[row["s_pointer"]] = (row["address"], row["amount"],
                                      row["pointer"])

    for i in range(len(tx_list)):
        tx_list[i]["inputsAmount"] = 0
        tx_list[i]["inputAddressCount"] = 0
        tx_list[i]["outAddressCount"] = 0
        tx_list[i]["inputsCount"] = len(tx_list[i]["vIn"])
        tx_list[i]["outsCount"] = len(tx_list[i]["vOut"])
        tx_pointer = (tx_list[i]["blockHeight"] << 39) + (
            tx_list[i]["blockIndex"] << 20)
        if not tx_list[i]["coinbase"]:
            for k in tx_list[i]["vIn"]:
                d = stxo_map[tx_pointer + k]
                tx_list[i]["vIn"][k]["type"] = SCRIPT_N_TYPES[d[0][0]]
                tx_list[i]["vIn"][k]["amount"] = d[1]
                tx_list[i]["inputsAmount"] += d[1]
                pointer = d[2]
                tx_list[i]["vIn"][k]["blockHeight"] = pointer >> 39
                tx_list[i]["vIn"][k]["confirmations"] = app["last_block"] - (
                    pointer >> 39) + 1

                if d[0][0] in (0, 1, 2, 5, 6):
                    script_hash = True if d[0][0] in (1, 6) else False
                    witness_version = None if d[0][0] < 5 else 0
                    try:
                        if d[0][0] == 2:
                            ad = b"\x02" + parse_script(
                                d[0][1:])["addressHash"]
                        else:
                            ad = d[0]
                        tx_list[i]["vIn"][k]["address"] = hash_to_address(
                            ad[1:],
                            testnet=app["testnet"],
                            script_hash=script_hash,
                            witness_version=witness_version)

                        tx_list[i]["vIn"][k][
                            "scriptPubKey"] = address_to_script(
                                tx_list[i]["vIn"][k]["address"], hex=1)
                    except:
                        print(tx_list[i]["txId"])
                        print("??", d[0].hex())
                        raise
                    tx_list[i]["inputAddressCount"] += 1
                else:
                    tx_list[i]["vIn"][k]["scriptPubKey"] = d[0][1:].hex()

                tx_list[i]["vIn"][k]["scriptPubKeyOpcodes"] = decode_script(
                    tx_list[i]["vIn"][k]["scriptPubKey"])
                tx_list[i]["vIn"][k]["scriptPubKeyAsm"] = decode_script(
                    tx_list[i]["vIn"][k]["scriptPubKey"], 1)
                for ti in target_scripts[i]:
                    if ti == tx_list[i]["vIn"][k]["scriptPubKey"]:
                        target_scripts[i][ti]["s"] += tx_list[i]["vIn"][k][
                            "amount"]
                        target_scripts[i][ti]["i"] += 1

        if not tx_list[i]["coinbase"]:
            tx_list[i][
                "fee"] = tx_list[i]["inputsAmount"] - tx_list[i]["amount"]
        else:
            tx_list[i]["fee"] = 0

        tx_list[i]["outputsAmount"] = tx_list[i]["amount"]

    print("2", time.time() - qt)
    qt = time.time()
    # get information about spent output coins
    async with app["db_pool"].acquire() as conn:
        rows = await conn.fetch(
            "SELECT   outpoint,"
            "         input_index,"
            "       tx_id "
            "FROM connector_unconfirmed_stxo "
            "WHERE out_tx_id = ANY($1);", tx_id_set)
    out_map = dict()
    for v in rows:
        i = bytes_to_int(v["outpoint"][32:])
        try:
            out_map[(rh2s(v["outpoint"][:32]), i)].append({
                "txId":
                rh2s(v["tx_id"]),
                "vIn":
                v["input_index"]
            })
        except:
            out_map[(rh2s(v["outpoint"][:32]), i)] = [{
                "txId": rh2s(v["tx_id"]),
                "vIn": v["input_index"]
            }]

    async with app["db_pool"].acquire() as conn:
        rows = await conn.fetch(
            "SELECT stxo.pointer,"
            "       stxo.s_pointer,"
            "       transaction.tx_id,  "
            "       transaction.timestamp  "
            "FROM stxo "
            "JOIN transaction "
            "ON transaction.pointer = (stxo.s_pointer >> 18)<<18 "
            "WHERE stxo.pointer = ANY($1);", o_pointers)
        p_out_map = dict()
        for v in rows:
            p_out_map[v["pointer"]] = [{
                "txId":
                rh2s(v["tx_id"]),
                "vIn":
                v["s_pointer"] & 0b111111111111111111
            }]
    print("1", time.time() - qt)
    qt = time.time()

    for t in range(len(tx_list)):
        if tx_list[t]["blockHeight"] is not None:
            o_pointer = (tx_list[t]["blockHeight"] << 39) + (
                tx_list[t]["blockIndex"] << 20) + (1 << 19)
            for i in tx_list[t]["vOut"]:
                try:
                    tx_list[t]["vOut"][i]["spent"] = p_out_map[o_pointer + i]
                except:
                    try:
                        tx_list[t]["vOut"][i]["spent"] = out_map[(
                            tx_list[t]["txId"], int(i))]
                    except:
                        tx_list[t]["vOut"][i]["spent"] = []
                for ti in target_scripts[t]:
                    if ti == tx_list[t]["vOut"][i]["scriptPubKey"]:
                        target_scripts[t][ti]["r"] += tx_list[t]["vOut"][i][
                            "value"]
                        target_scripts[t][ti]["o"] += 1
                if "address" in tx_list[t]["vOut"][i]:
                    tx_list[t]["outAddressCount"] += 1

            address_amount = 0
            address_received = 0
            address_outputs = 0
            address_sent = 0
            address_inputs = 0
            for ti in target_scripts[t]:
                address_amount += target_scripts[t][ti]["r"] - target_scripts[
                    t][ti]["s"]
                address_received += target_scripts[t][ti]["r"]
                address_outputs += target_scripts[t][ti]["o"]
                address_sent += target_scripts[t][ti]["s"]
                address_inputs += target_scripts[t][ti]["i"]

            tx_list[t]["amount"] = address_amount
            tx_list[t]["addressReceived"] = address_received
            tx_list[t]["addressOuts"] = address_outputs
            tx_list[t]["addressSent"] = address_sent
            tx_list[t]["addressInputs"] = address_inputs

        if mode != "verbose":
            del tx_list[t]["vIn"]
            del tx_list[t]["vOut"]
        del tx_list[t]["format"]
        del tx_list[t]["testnet"]
        del tx_list[t]["rawTx"]
        del tx_list[t]["hash"]
        del tx_list[t]["blockHash"]
        del tx_list[t]["time"]

        try:
            del tx_list[t]["flag"]
        except:
            pass

    if timeline and tx_list:
        tx_pointer = (tx_list[-1]["blockHeight"] << 39) + (
            tx_list[-1]["blockIndex"] << 20)
        print(tx_list[-1]["blockHeight"])
        r = await address_state_extended_in_pointer(a, tx_pointer, app)
        print(r)
        state = {
            "receivedAmount": r['data']["receivedAmount"],
            "receivedTxCount": r['data']["receivedTxCount"],
            "sentAmount": r['data']["sentAmount"],
            "sentTxCount": r['data']["sentTxCount"],
            "receivedOutsCount": r['data']["receivedOutsCount"],
            "spentOutsCount": r['data']["spentOutsCount"]
        }

        k = len(tx_list)
        l = len(tx_list)
        while k:
            k -= 1
            if order == "desc":
                i = k
            else:
                i = l - (k + 1)
            tx = tx_list[i]
            state["receivedAmount"] += tx["addressReceived"]
            state["receivedOutsCount"] += tx["addressOuts"]
            state["sentAmount"] += tx["addressSent"]
            state["spentOutsCount"] += tx["addressInputs"]
            if tx["addressReceived"] >= tx["addressSent"]:
                state["receivedTxCount"] += 1
            else:
                state["sentTxCount"] += 1
            tx_list[i]["timelineState"] = {
                "receivedAmount": state["receivedAmount"],
                "receivedTxCount": state["receivedTxCount"],
                "sentAmount": state["sentAmount"],
                "sentTxCount": state["sentTxCount"],
                "receivedOutsCount": state["receivedOutsCount"],
                "spentOutsCount": state["spentOutsCount"]
            }

    return {
        "data": {
            "page": page,
            "limit": limit,
            "pages": pages,
            "list": tx_list
        },
        "time": round(time.time() - q, 4)
    }
Beispiel #23
0
async def mempool_doublespend(limit, page, order, from_timestamp, dbs_type,
                              mode, app):
    qt = time.time()
    async with app["db_pool"].acquire() as conn:
        count = await conn.fetchval(
            "SELECT count(tx_id) FROM mempool_dbs "
            "WHERE timestamp > $1;", from_timestamp)

        pages = math.ceil(count / limit)

        if dbs_type == None:
            rows = await conn.fetch(
                "SELECT   "
                "        mempool_dbs.tx_id,"
                "        mempool_dbs.child,"
                "        unconfirmed_transaction.amount,  "
                "        unconfirmed_transaction.size,  "
                "        unconfirmed_transaction.b_size,  "
                "        unconfirmed_transaction.fee,  "
                "        unconfirmed_transaction.feerate,  "
                "        unconfirmed_transaction.rbf,  "
                "        unconfirmed_transaction.segwit,  "
                "        mempool_dbs.timestamp,  "
                "        unconfirmed_transaction.raw_transaction "
                "FROM mempool_dbs "
                "JOIN unconfirmed_transaction "
                "ON unconfirmed_transaction.tx_id = mempool_dbs.tx_id "
                "WHERE mempool_dbs.timestamp > $1 "
                "ORDER BY mempool_dbs.timestamp %s LIMIT $2 OFFSET $3;" %
                order, from_timestamp, limit, limit * (page - 1))
        else:
            rows = await conn.fetch(
                "SELECT   "
                "        mempool_dbs.tx_id,"
                "        mempool_dbs.child,"
                "        unconfirmed_transaction.amount,  "
                "        unconfirmed_transaction.size,  "
                "        unconfirmed_transaction.b_size,  "
                "        unconfirmed_transaction.fee,  "
                "        unconfirmed_transaction.feerate,  "
                "        unconfirmed_transaction.rbf,  "
                "        unconfirmed_transaction.segwit,  "
                "        mempool_dbs.timestamp,  "
                "        unconfirmed_transaction.raw_transaction "
                "FROM mempool_dbs "
                "JOIN unconfirmed_transaction "
                "ON unconfirmed_transaction.tx_id = mempool_dbs.tx_id "
                "WHERE child = $4 and mempool_dbs.timestamp > $1 "
                "ORDER BY mempool_dbs.timestamp %s LIMIT $2 OFFSET $3;" %
                order, from_timestamp, limit, limit * (page - 1), dbs_type)

        h = [r["tx_id"] for r in rows]

        m_rows = await conn.fetch(
            """SELECT ranks.tx_id, ranks.rank FROM 
                                  (SELECT tx_id, rank() OVER(ORDER BY feerate DESC) as rank 
                                  FROM unconfirmed_transaction) ranks 
                                  WHERE tx_id = ANY($1) LIMIT $2""", h, len(h))
    rank_map = dict()
    for row in m_rows:
        rank_map[row["tx_id"]] = row["rank"]

    tx_list = []
    dep_chain = dict()
    conflict_outpoints_chain = dict()
    conflict_outpoints_accum = set()
    last_dep = dict()
    last_dep_map = dict()
    conflict_outpoint_map = dict()
    us_pointers = deque()
    us_pointers_inputs = deque()

    for row in rows:
        tx = Transaction(row["raw_transaction"], testnet=app["testnet"])
        tx["fee"] = row["fee"]
        tx["time"] = row["timestamp"]
        tx["feeRate"] = round(row["feerate"], 2)
        tx["rbf"] = bool(row["feerate"])
        tx["segwit"] = bool(row["feerate"])
        tx["mempoolRank"] = rank_map[row["tx_id"]]
        if "flag" in tx:
            del tx["flag"]
        tx["conflict"] = {
            "blockchain": [],
            "txCompetitors": [],
            "unconfirmedChain": [],
        }

        tx_list.append(tx)

        conflict_outpoint = deque()
        dep_chain[tx["txId"]] = [{s2rh(tx["txId"])}, set()]

        for q in tx["vIn"]:
            op = b"%s%s" % (s2rh(
                tx["vIn"][q]["txId"]), int_to_bytes(tx["vIn"][q]["vOut"]))
            try:
                last_dep[op[:32]].add(tx["vIn"][q]["vOut"])
            except:
                last_dep[op[:32]] = {tx["vIn"][q]["vOut"]}
            dep_chain[tx["txId"]][-1].add(op[:32])
            us_pointers_inputs.append(op[:32])
            try:
                last_dep_map[op[:32]].add(tx["txId"])
            except:
                last_dep_map[op[:32]] = {tx["txId"]}

            conflict_outpoint_map[op] = tx["txId"]
            conflict_outpoint.append(op)
            conflict_outpoints_accum.add(op)

        conflict_outpoints_chain[tx["txId"]] = [conflict_outpoint]

        us_pointers.append(s2rh(tx["txId"]))

    # get info about inputs
    if mode == "verbose" and us_pointers:
        o_pointers = us_pointers
        async with app["db_pool"].acquire() as conn:
            rows = await conn.fetch(
                "SELECT   outpoint,"
                "         input_index,"
                "       out_tx_id, "
                "       tx_id,"
                "       address, "
                "       amount "
                "    "
                "FROM connector_unconfirmed_stxo "
                "WHERE tx_id =  ANY($1);", us_pointers)
            c_rows = await conn.fetch(
                """
                                       SELECT pointer, tx_id FROM transaction 
                                       WHERE tx_id = ANY($1);
                                       """, us_pointers_inputs)
        tx_id_map_pointer = dict()

        for v in c_rows:
            tx_id_map_pointer[rh2s(v["tx_id"])] = v["pointer"]

        us_pointers = dict()
        for v in rows:
            us_pointers[(rh2s(v["tx_id"]), v["input_index"])] = v

        # get information about spent output coins

        async with app["db_pool"].acquire() as conn:
            rows = await conn.fetch(
                "SELECT   outpoint,"
                "         input_index,"
                "       tx_id, "
                "       out_tx_id "
                "FROM connector_unconfirmed_stxo "
                "WHERE out_tx_id = ANY($1);", o_pointers)
        spent_outs_map = dict()
        for r in rows:
            t = rh2s(r["out_tx_id"])
            i = bytes_to_int(r["outpoint"][32:])
            try:
                spent_outs_map[t][i] = {
                    "txId": rh2s(r["tx_id"]),
                    "vIn": r["input_index"]
                }
            except:
                spent_outs_map[t] = {
                    i: {
                        "txId": rh2s(r["tx_id"]),
                        "vIn": r["input_index"]
                    }
                }

        for t in range(len(tx_list)):

            tx_list[t]["inputsAmount"] = 0
            for i in tx_list[t]["vIn"]:
                d = us_pointers[(tx_list[t]["txId"], i)]
                tx_list[t]["vIn"][i]["type"] = SCRIPT_N_TYPES[d["address"][0]]
                tx_list[t]["vIn"][i]["amount"] = d["amount"]
                tx_list[t]["inputsAmount"] += d["amount"]
                try:
                    pointer = tx_id_map_pointer[tx_list[t]["vIn"][i]["txId"]]
                    tx_list[t]["vIn"][i]["blockHeight"] = pointer >> 39
                    tx_list[t]["vIn"][i]["confirmations"] = app[
                        "last_block"] - (pointer >> 39) + 1
                except:
                    tx_list[t]["vIn"][i]["blockHeight"] = None
                    tx_list[t]["vIn"][i]["confirmations"] = None

                if d["address"][0] in (0, 1, 2, 5, 6):
                    script_hash = True if d["address"][0] in (1, 6) else False
                    witness_version = None if d["address"][0] < 5 else 0
                    try:
                        if d["address"][0] == 2:
                            ad = b"\x02" + parse_script(
                                d["address"][1:])["addressHash"]
                        else:
                            ad = d["address"]
                        tx_list[t]["vIn"][i]["address"] = hash_to_address(
                            ad[1:],
                            testnet=app["testnet"],
                            script_hash=script_hash,
                            witness_version=witness_version)

                        tx_list[t]["vIn"][i][
                            "scriptPubKey"] = address_to_script(
                                tx_list[t]["vIn"][i]["address"], hex=1)
                    except:
                        print(tx_list[t]["txId"])
                        print("??", d["address"].hex())
                        raise

                elif tx_list["address"][0] == 2:
                    tx_list[t]["vIn"][i]["address"] = script_to_address(
                        d["address"][1:], testnet=app["testnet"])
                    tx_list[t]["vIn"][i]["scriptPubKey"] = d["address"][
                        1:].hex()
                else:
                    tx_list[t]["vIn"][i]["scriptPubKey"] = d["address"][
                        1:].hex()
                tx_list[t]["vIn"][i]["scriptPubKeyOpcodes"] = decode_script(
                    tx_list[t]["vIn"][i]["scriptPubKey"])
                tx_list[t]["vIn"][i]["scriptPubKeyAsm"] = decode_script(
                    tx_list[t]["vIn"][i]["scriptPubKey"], 1)

            for i in tx_list[t]["vOut"]:
                tx_list[t]["vOut"][i]['spent'] = []
            try:
                s_outs = spent_outs_map[tx_list[t]["txId"]]
                for s in s_outs:
                    tx_list[t]["vOut"][s]['spent'].append(s_outs[s])
            except:
                pass

    # find out mainnet competitors and invalid chains
    last_pointer_map = dict()
    while last_dep:
        async with app["db_pool"].acquire() as conn:
            rows = await conn.fetch(
                "SELECT  tx_id, pointer FROM transaction where tx_id = ANY($1)",
                last_dep.keys())
        tmp_lp = dict()
        for row in rows:
            tl = last_dep_map[row["tx_id"]]
            for t in tl:
                try:
                    v_outs = last_dep[row["tx_id"]]
                    dep_chain[t][-1].remove(row["tx_id"])
                    try:
                        tmp_lp[t][row["pointer"]] = {
                            "txId": row["tx_id"],
                            "vOuts": v_outs
                        }
                    except:
                        tmp_lp[t] = {
                            row["pointer"]: {
                                "txId": row["tx_id"],
                                "vOuts": v_outs
                            }
                        }

                except:
                    pass

        for t in tmp_lp:
            last_pointer_map[t] = tmp_lp[t]
            if not dep_chain[t][-1]:
                dep_chain[t].pop()

        # go deeper
        async with app["db_pool"].acquire() as conn:
            rows = await conn.fetch(
                "SELECT connector_unconfirmed_stxo.tx_id,"
                "       connector_unconfirmed_stxo.out_tx_id,"
                "       outpoint "
                "from connector_unconfirmed_stxo "
                "WHERE tx_id = ANY($1)", last_dep.keys())
        last_dep = dict()
        new_last_dep_map = dict()
        new_dep_chain = dict()
        conflict_outpoints = deque()
        tmp_co = dict()
        for row in rows:
            tl = last_dep_map[row["tx_id"]]
            for t in tl:
                try:
                    new_last_dep_map[row["out_tx_id"]].add(t)
                except:
                    new_last_dep_map[row["out_tx_id"]] = {t}

                o = row["outpoint"][:32]
                i = bytes_to_int(row["outpoint"][32:])
                try:
                    new_dep_chain[t].add(o)
                except:
                    new_dep_chain[t] = {o}

                try:
                    last_dep[row["out_tx_id"]].add(i)
                except:
                    last_dep[row["out_tx_id"]] = {i}

                conflict_outpoints.append(row["outpoint"])
                conflict_outpoints_accum.add(row["outpoint"])
                try:
                    tmp_co[t].append(row["outpoint"])
                except:
                    tmp_co[t] = [row["outpoint"]]

        for t in tmp_co:
            conflict_outpoints_chain[t].append(tmp_co[t])
            dep_chain[t].append(new_dep_chain[t])
        last_dep_map = new_last_dep_map

    # check invalid transactions competitors
    async with app["db_pool"].acquire() as conn:
        rows = await conn.fetch(
            "SELECT connector_unconfirmed_stxo.tx_id,"
            "       connector_unconfirmed_stxo.out_tx_id, "
            "       connector_unconfirmed_stxo.input_index, "
            "       connector_unconfirmed_stxo.outpoint, "
            "       unconfirmed_transaction.size, "
            "       unconfirmed_transaction.b_size, "
            "       unconfirmed_transaction.feerate, "
            "       unconfirmed_transaction.fee,"
            "       unconfirmed_transaction.rbf, "
            "       unconfirmed_transaction.segwit "
            "from connector_unconfirmed_stxo "
            "JOIN unconfirmed_transaction "
            "ON unconfirmed_transaction.tx_id = connector_unconfirmed_stxo.tx_id "
            "WHERE outpoint = ANY($1)", conflict_outpoints_accum)

    conflict_outpoints_map = dict()
    for row in rows:
        try:
            conflict_outpoints_map[row["outpoint"]].append(row)
        except:
            conflict_outpoints_map[row["outpoint"]] = [row]

    # convert last pointers to coins
    o_pointers = set()
    pointer_map_coin = dict()
    pointer_map_competitor = dict()

    for t in last_pointer_map:
        for pointer in last_pointer_map[t]:
            for v_out in last_pointer_map[t][pointer]["vOuts"]:
                pointer_map_coin[pointer + (1 << 19) + v_out] = {
                    "txId": rh2s(last_pointer_map[t][pointer]["txId"]),
                    "vOut": v_out,
                    "block": pointer >> 39
                }
                o_pointers.add(pointer + (1 << 19) + v_out)
    if o_pointers:
        async with app["db_pool"].acquire() as conn:
            rows = await conn.fetch(
                "SELECT transaction.tx_id, stxo.s_pointer, stxo.pointer  "
                "FROM stxo "
                "JOIN transaction on transaction.pointer = (stxo.s_pointer >> 18)<<18 "
                "WHERE stxo.pointer = ANY($1);", o_pointers)

        for row in rows:
            pointer_map_competitor[row["pointer"]] = {
                "txId": rh2s(row["tx_id"]),
                "vIn": row["s_pointer"] & 524287,
                "block": row["s_pointer"] >> 39
            }

    for i in range(len(tx_list)):
        t = tx_list[i]["txId"]
        m_conflict = []
        if t not in last_pointer_map:
            continue
        for pointer in last_pointer_map[t]:
            for v_out in last_pointer_map[t][pointer]["vOuts"]:
                c_pointer = pointer + (1 << 19) + v_out
                dbs = pointer_map_coin[c_pointer]
                m_conflict.append({"outpoint": dbs})

        tx_list[i]["conflict"]["blockchain"] = m_conflict
        tx_list[i]["conflict"]["unconfirmedChain"] = [[rh2s(y) for y in l]
                                                      for l in dep_chain[t]]

        i_conflict = []
        atx = {s2rh(t)}
        for outpoints in conflict_outpoints_chain[t]:
            [atx.add(out[:32]) for out in outpoints]
            m = []
            for out in outpoints:
                rows = conflict_outpoints_map[out]
                for row in rows:
                    if row["tx_id"] not in atx:
                        m.append({
                            "doublespend": {
                                "txId": rh2s(row["outpoint"][:32]),
                                "vOut": bytes_to_int(row["outpoint"][32:])
                            },
                            "competitor": {
                                "txId": rh2s(row["tx_id"]),
                                "vIn": row["input_index"],
                                "size": row["size"],
                                "bSize": row["b_size"],
                                "feeRate": round(row["feerate"], 2),
                                "fee": row["fee"],
                                "rbf": bool(row["rbf"]),
                                "segwit": bool(row["segwit"])
                            }
                        })
            i_conflict.append(m)

        tx_list[i]["conflict"]["txCompetitors"] = i_conflict
        tx_list[i]["valid"] = True
        if mode == "brief":
            del tx_list[i]["vIn"]
            del tx_list[i]["vOut"]
        del tx_list[i]["format"]
        del tx_list[i]["testnet"]
        del tx_list[i]["blockTime"]
        del tx_list[i]["rawTx"]
        del tx_list[i]["blockHash"]
        del tx_list[i]["confirmations"]
        del tx_list[i]["blockIndex"]

    return {
        "data": {
            "page": page,
            "limit": limit,
            "pages": pages,
            "count": count,
            "fromTimestamp": from_timestamp,
            "list": tx_list
        },
        "time": round(time.time() - qt, 4)
    }
Beispiel #24
0
    async def processor(self):
        utxo_sequence = 0
        stxo_sequence = 0
        tx_sequence = 0
        best_fee = 1
        dbs = set()
        dbs_childs = set()
        outputs, inputs, transactions = self.refresh_stat()
        truncate_dbs_table = True

        best_fee_hourly = ListCache(60 * 60)
        best_fee_4h = ListCache(60 * 60 * 4)
        async with self.db_pool.acquire() as conn:
            rows = await conn.fetch(
                "SELECT minute, transactions->'feeRate'->'best' as best FROM  mempool_analytica "
                "order by minute desc LIMIT 240;")
        c = 0
        for row in rows:
            if row["best"] is not None:
                if c < 60:
                    best_fee_hourly.set(float(row["best"]))
                best_fee_4h.set(float(row["best"]))
            c += 1

        while True:
            try:
                if not self.bootstrap_completed:
                    async with self.db_pool.acquire() as conn:
                        v = await conn.fetchval(
                            "SELECT value FROM  service "
                            "WHERE name = 'bootstrap_completed' LIMIT 1;")
                    if v == '1':
                        self.bootstrap_completed = True
                        self.log.info("Mempool analytica task started")
                        async with self.db_pool.acquire() as conn:
                            self.last_day = await conn.fetchval(
                                "SELECT max(day) FROM  mempool_analytica")
                            self.last_hour = await conn.fetchval(
                                "SELECT max(hour) FROM  mempool_analytica")
                    else:
                        await asyncio.sleep(60)

                q = time.time()
                await self.load_block_map()

                async with self.db_pool.acquire() as conn:
                    async with conn.transaction():
                        last_hash = await conn.fetchval(
                            "SELECT height FROM blocks order by height desc  LIMIT 1;"
                        )

                        if last_hash != self.last_hash:
                            self.last_hash = last_hash
                            outputs, inputs, transactions = self.refresh_stat()
                            utxo_sequence = 0
                            stxo_sequence = 0
                            tx_sequence = 0
                            dbs = set()
                            dbs_childs = set()
                            truncate_dbs_table = True

                        stxo = await conn.fetch(
                            "SELECT tx_id, out_tx_id, address, amount, pointer, sequence, outpoint,  id  "
                            "FROM connector_unconfirmed_stxo "
                            "WHERE id > $1;", stxo_sequence)
                        utxo = await conn.fetch(
                            "SELECT out_tx_id as tx_id, address, amount, id "
                            "FROM connector_unconfirmed_utxo "
                            "WHERE id > $1;", utxo_sequence)
                        tx = await conn.fetch(
                            "SELECT tx_id, size, b_size, rbf, fee, "
                            "amount, segwit, timestamp, id  FROM unconfirmed_transaction "
                            "WHERE id > $1;", tx_sequence)
                        row = await conn.fetchval(
                            "select min(feerate)  "
                            "from (select feerate, sum((size + b_size * 4)/4) "
                            "over (order by feerate desc) as block "
                            "from unconfirmed_transaction) t where block <= 920000;"
                        )
                        if row is not None:
                            best_fee = row

                txsi = set()
                txso = set()
                dbs_outs = set()
                dbs_set = set()
                if tx:
                    inputs["count"] += len(stxo)
                    for row in stxo:
                        if stxo_sequence < row["id"]:
                            stxo_sequence = row["id"]
                        if row["sequence"] > 0:
                            dbs_outs.add(row["outpoint"])
                            dbs_set.add(row["tx_id"])
                        txsi.add(row["tx_id"])
                        inputs["amount"]["total"] += row["amount"]
                        if inputs["amount"]["max"]["value"] is None or \
                                inputs["amount"]["max"]["value"] < row["amount"]:
                            inputs["amount"]["max"]["value"] = row["amount"]
                            inputs["amount"]["max"]["txId"] = rh2s(
                                row["tx_id"])

                        if inputs["amount"]["min"]["value"] is None or \
                                inputs["amount"]["min"]["value"] > row["amount"]:
                            inputs["amount"]["min"]["value"] = row["amount"]
                            inputs["amount"]["min"]["txId"] = rh2s(
                                row["tx_id"])

                        try:
                            inputs["typeMap"][row["address"][0]]["count"] += 1
                            inputs["typeMap"][row["address"]
                                              [0]]["amount"] += row["amount"]
                        except:
                            inputs["typeMap"][row["address"][0]] = {
                                "count": 1,
                                "amount": row["amount"]
                            }
                        amount = row["amount"]
                        key = None if amount == 0 else str(
                            math.floor(math.log10(amount)))

                        try:
                            inputs["amountMap"][key]["count"] += 1
                            inputs["amountMap"][key]["amount"] += row["amount"]
                        except:
                            inputs["amountMap"][key] = {
                                "count": 1,
                                "amount": row["amount"]
                            }

                        try:
                            key = time.time() - self.block_map_timestamp[
                                row["pointer"] >> 39]
                            if key < 3600:
                                key = "1h"
                            elif key < 43200:
                                key = "12h"
                            elif key < 86400:
                                key = "1d"
                            elif key < 259200:
                                key = "3d"
                            elif key < 604800:
                                key = "1w"
                            elif key < 2592000:
                                key = "1m"
                            else:
                                key = "%sy" % (int(key // 31536000) + 1)
                        except:
                            key = None

                        try:
                            inputs["ageMap"][key]["count"] += 1
                            inputs["ageMap"][key]["amount"] += row["amount"]
                        except:
                            inputs["ageMap"][key] = {
                                "count": 1,
                                "amount": row["amount"]
                            }

                    async with self.db_pool.acquire() as conn:
                        dbs_rows = await conn.fetch(
                            "SELECT tx_id, outpoint  "
                            "FROM connector_unconfirmed_stxo "
                            "WHERE outpoint = ANY($1);", dbs_outs)
                        out_map = set()

                        for row in dbs_rows:
                            if row["outpoint"] in out_map:
                                if row["tx_id"] in dbs_set:
                                    dbs.add(row["tx_id"])
                            else:
                                out_map.add(row["outpoint"])

                    l_dbs_size = 0
                    while True:
                        for row in stxo:
                            if row["out_tx_id"] in dbs or row[
                                    "out_tx_id"] in dbs_childs:
                                if row["tx_id"] not in dbs:
                                    dbs_childs.add(row["tx_id"])

                        if l_dbs_size != len(dbs_childs):
                            l_dbs_size = len(dbs_childs)
                        else:
                            break

                    outputs["count"] += len(utxo)
                    for row in utxo:
                        if utxo_sequence < row["id"]:
                            utxo_sequence = row["id"]
                        txso.add(row["tx_id"])
                        outputs["amount"]["total"] += row["amount"]
                        if outputs["amount"]["max"]["value"] is None or \
                                outputs["amount"]["max"]["value"] < row["amount"]:
                            outputs["amount"]["max"]["value"] = row["amount"]
                            outputs["amount"]["max"]["txId"] = rh2s(
                                row["tx_id"])

                        if outputs["amount"]["min"]["value"] is None or \
                                outputs["amount"]["min"]["value"] > row["amount"]:
                            if row["amount"] > 0:
                                outputs["amount"]["min"]["value"] = row[
                                    "amount"]
                                outputs["amount"]["min"]["txId"] = rh2s(
                                    row["tx_id"])
                        try:
                            outputs["typeMap"][row["address"][0]]["count"] += 1
                            outputs["typeMap"][row["address"]
                                               [0]]["amount"] += row["amount"]
                        except:
                            outputs["typeMap"][row["address"][0]] = {
                                "count": 1,
                                "amount": row["amount"]
                            }
                        amount = row["amount"]
                        key = None if amount == 0 else str(
                            math.floor(math.log10(amount)))

                        try:
                            outputs["amountMap"][key]["count"] += 1
                            outputs["amountMap"][key]["amount"] += row[
                                "amount"]
                        except:
                            outputs["amountMap"][key] = {
                                "count": 1,
                                "amount": row["amount"]
                            }

                    transactions["doublespend"]["count"] = len(dbs)
                    transactions["doublespendChilds"]["count"] = len(
                        dbs_childs)
                    transactions["count"] += len(tx)
                    dbs_records = deque()

                    for row in tx:
                        v_size = math.ceil(
                            (row["b_size"] * 3 + row["size"]) / 4)
                        if tx_sequence < row["id"]:
                            tx_sequence = row["id"]
                        if row["tx_id"] in dbs:
                            transactions["doublespend"]["amount"] += row[
                                "amount"]
                            transactions["doublespend"]["size"] += row["size"]
                            transactions["doublespend"]["vSize"] += v_size
                            dbs_records.append(
                                (row["tx_id"], row["timestamp"], 0))
                        if row["tx_id"] in dbs_childs:
                            transactions["doublespendChilds"]["amount"] += row[
                                "amount"]
                            transactions["doublespendChilds"]["size"] += row[
                                "size"]
                            transactions["doublespendChilds"][
                                "vSize"] += v_size
                            dbs_records.append(
                                (row["tx_id"], row["timestamp"], 1))

                        if row["amount"] > 0:
                            transactions["amount"]["total"] += row["amount"]
                            if transactions["amount"]["max"]["value"] is None or \
                                    transactions["amount"]["max"]["value"] < row["amount"]:
                                transactions["amount"]["max"]["value"] = row[
                                    "amount"]
                                transactions["amount"]["max"]["txId"] = rh2s(
                                    row["tx_id"])

                            if transactions["amount"]["min"]["value"] is None or \
                                    transactions["amount"]["min"]["value"] > row["amount"]:
                                transactions["amount"]["min"]["value"] = row[
                                    "amount"]
                                transactions["amount"]["min"]["txId"] = rh2s(
                                    row["tx_id"])

                        if row["fee"] is not None:
                            transactions["fee"]["total"] += row["fee"]
                            if transactions["fee"]["max"]["value"] is None or \
                                    transactions["fee"]["max"]["value"] < row["fee"]:
                                transactions["fee"]["max"]["value"] = row[
                                    "fee"]
                                transactions["fee"]["max"]["txId"] = rh2s(
                                    row["tx_id"])

                            if transactions["fee"]["min"]["value"] is None or \
                                    transactions["fee"]["min"]["value"] > row["fee"]:
                                transactions["fee"]["min"]["value"] = row[
                                    "fee"]
                                transactions["fee"]["min"]["txId"] = rh2s(
                                    row["tx_id"])

                            fee_rate = math.ceil(row["fee"] / v_size)

                            if transactions["feeRate"]["max"]["value"] is None or \
                                    transactions["feeRate"]["max"]["value"] < fee_rate:
                                transactions["feeRate"]["max"][
                                    "value"] = fee_rate
                                transactions["feeRate"]["max"]["txId"] = rh2s(
                                    row["tx_id"])

                            if transactions["feeRate"]["min"]["value"] is None or \
                                    transactions["feeRate"]["min"]["value"] > fee_rate:
                                transactions["feeRate"]["min"][
                                    "value"] = fee_rate
                                transactions["feeRate"]["min"]["txId"] = rh2s(
                                    row["tx_id"])

                            key = fee_rate
                            if key > 10 and key < 20:
                                key = math.floor(key / 2) * 2
                            elif key > 20 and key < 200:
                                key = math.floor(key / 10) * 10
                            elif key > 200:
                                key = math.floor(key / 25) * 25
                            try:
                                transactions["feeRateMap"][key]["count"] += 1
                                transactions["feeRateMap"][key]["size"] += row[
                                    "size"]
                                transactions["feeRateMap"][key][
                                    "vSize"] += v_size
                            except:
                                transactions["feeRateMap"][key] = {
                                    "count": 1,
                                    "size": row["size"],
                                    "vSize": v_size
                                }

                        if row["rbf"]:
                            transactions["rbfCount"] += 1
                        if row["segwit"]:
                            transactions["segwitCount"] += 1
                        if row["size"]:
                            transactions["size"]["total"] += row["size"]
                            transactions["vSize"]["total"] += v_size
                            if transactions["size"]["max"]["value"] is None or \
                                    transactions["size"]["max"]["value"] < row["size"]:
                                transactions["size"]["max"]["value"] = row[
                                    "size"]
                                transactions["size"]["max"]["txId"] = rh2s(
                                    row["tx_id"])

                            if transactions["vSize"]["max"]["value"] is None or \
                                    transactions["vSize"]["max"]["value"] < v_size:
                                transactions["vSize"]["max"]["value"] = v_size
                                transactions["vSize"]["max"]["txId"] = rh2s(
                                    row["tx_id"])


                            if transactions["size"]["min"]["value"] is None or \
                                    transactions["size"]["min"]["value"] > row["size"]:
                                transactions["size"]["min"]["value"] = row[
                                    "size"]
                                transactions["size"]["min"]["txId"] = rh2s(
                                    row["tx_id"])

                            if transactions["vSize"]["min"]["value"] is None or \
                                    transactions["vSize"]["min"]["value"] > v_size:
                                transactions["vSize"]["min"]["value"] = v_size
                                transactions["vSize"]["min"]["txId"] = rh2s(
                                    row["tx_id"])

                    if transactions["vSize"]["total"] > 1000000:
                        transactions["feeRate"]["best"] = round(best_fee, 2)
                    else:
                        transactions["feeRate"]["best"] = 1

                    async with self.db_pool.acquire() as conn:
                        async with conn.transaction():
                            if truncate_dbs_table:
                                await conn.execute(
                                    "truncate table  mempool_dbs;")
                                truncate_dbs_table = False
                            await conn.copy_records_to_table(
                                'mempool_dbs',
                                columns=["tx_id", "timestamp", "child"],
                                records=dbs_records)

                            s_minute = int(time.time()) // 60
                            if s_minute % 60 == 0 and self.last_hour < s_minute // 60:
                                s_hour = s_minute // 60
                                self.last_hour = s_hour
                                if s_hour % 24 == 0 and self.last_day < s_hour // 24:
                                    s_day = s_hour // 24
                                    self.last_day = s_day
                                else:
                                    s_day = None
                            else:
                                s_hour = None
                                s_day = None

                            if self.last_minute != s_minute or transactions[
                                    "feeRate"]["bestHourly"] == 1:
                                best_fee_hourly.set(
                                    transactions["feeRate"]["best"])
                                f = 0
                                for i in best_fee_hourly.items:
                                    f += i
                                f4 = 0
                                for i in best_fee_4h.items:
                                    f4 += i
                                if len(best_fee_hourly.items):
                                    transactions["feeRate"][
                                        "bestHourly"] = round(
                                            f / len(best_fee_hourly.items), 2)
                                else:
                                    transactions["feeRate"][
                                        "bestHourly"] = transactions[
                                            "feeRate"]["best"]

                                if len(best_fee_4h.items):
                                    transactions["feeRate"]["best4h"] = round(
                                        f4 / len(best_fee_4h.items), 2)
                                else:
                                    transactions["feeRate"][
                                        "best4h"] = transactions["feeRate"][
                                            "best"]

                            await conn.execute(
                                "INSERT INTO mempool_analytica "
                                "(minute, hour, day, inputs, outputs, transactions)"
                                " VALUES "
                                "($1, $2, $3, $4, $5, $6) "
                                "ON CONFLICT (minute) "
                                "DO UPDATE SET "
                                " inputs = $4,"
                                " outputs = $5, "
                                " transactions = $6", s_minute, s_hour, s_day,
                                json.dumps(inputs), json.dumps(outputs),
                                json.dumps(transactions))

                    if s_hour is not None:
                        self.log.warning(
                            "Mempool analytica hourly point saved %s" % s_hour)
                        self.log.info(
                            "Mempool transactions %s; STXO : %s; UTXO %s; DBS %s; round time %s;"
                            % (transactions["count"], inputs["count"],
                               outputs["count"],
                               transactions["doublespend"]["count"] +
                               transactions["doublespendChilds"]["count"], q))
                    q = time.time() - q
                    if q < 1:
                        await asyncio.sleep(1 - q)
                    if q > 10:
                        self.log.warning("Mempool analytica is to slow %s" % q)

                    if self.last_minute != s_minute or transactions["feeRate"][
                            "best4h"] == 1:
                        self.last_minute = s_minute
                        self.log.debug(
                            "Mempool TX %s; STXO %s; UTXO %s; DBS %s; %s; %s; Best fee  %s/%s/%s; Round time %s;"
                            % (transactions["count"], inputs["count"],
                               outputs["count"],
                               transactions["doublespend"]["count"] +
                               transactions["doublespendChilds"]["count"],
                               format_bytes(transactions["size"]["total"]),
                               format_vbytes(transactions["vSize"]["total"]),
                               transactions["feeRate"]["best"],
                               transactions["feeRate"]["bestHourly"],
                               transactions["feeRate"]["best4h"], round(q, 4)))

                    # assert len(tx) == len(txsi)
                    # assert len(tx) == len(txso)
                    #
                    # async with self.db_pool.acquire() as conn:
                    #     v = await conn.fetch("SELECT invalid_transaction.tx_id FROM  invalid_transaction "
                    #                                 " JOIN connector_unconfirmed_stxo ON connector_unconfirmed_stxo.tx_id = invalid_transaction.tx_id "
                    #                             " ;")
                    #     k = [t["tx_id"] for t in v]
                    #     for t in v:
                    #         print(rh2s(t["tx_id"]))
                    #     v = await conn.fetch("SELECT  outpoint, sequence FROM  connector_unconfirmed_stxo WHERE tx_id = ANY($1);", k)
                    #     print("u", len(v))
                    #     uu = set()
                    #     pp = set()
                    #     for r in v:
                    #         uu.add(r["outpoint"])
                    #         pp.add((r["outpoint"], r["sequence"]))
                    #     v = await conn.fetch("SELECT  outpoint, sequence FROM  invalid_stxo WHERE tx_id = ANY($1);", k)
                    #     print("i", len(v))
                    #     ii = set()
                    #     for r in v:
                    #         ii.add((r["outpoint"], r["sequence"]))
                    #     e = 0
                    #     for i in ii:
                    #         if i[0] not in uu:
                    #             print("none", i[1])
                    #         else:
                    #             e += 1
                    #     print(">>", e)
                    #
                    #     v = await conn.fetch("SELECT  count(*)  from connector_unconfirmed_utxo WHERE out_tx_id = ANY($1);", k)
                    #     print("connector_unconfirmed_utxo", v)
                    #     v = await conn.fetch("SELECT  count(*)  from unconfirmed_transaction WHERE tx_id = ANY($1);", k)
                    #     print("unconfirmed_transaction", v)
                    #     v = await conn.fetch("SELECT  count(*)  from unconfirmed_transaction_map WHERE tx_id = ANY($1);", k)
                    #     print("unconfirmed_transaction_map", v)
                    #     ff = 0
                    #     for i in pp:
                    #         v = await conn.fetchval("SELECT  count(*)  from invalid_stxo WHERE outpoint = $1 and sequence = $2;", i[0], i[1])
                    #         ff += v
                    #     print("ff", ff)
                    #     ll = list()
                    #     v = await conn.fetch("SELECT  outpoint, sequence, out_tx_id, tx_id, input_index, address, amount, pointer from connector_unconfirmed_stxo WHERE tx_id = ANY($1);", k)
                    #     for i in v:
                    #         ll.append((i["outpoint"],
                    #                 i["sequence"],
                    #                 i["out_tx_id"],
                    #                 i["tx_id"],
                    #                 i["input_index"],
                    #                 i["address"],
                    #                 i["amount"],
                    #                 i["pointer"],
                    #                 ))
                    #     print("ll", len(ll))
                    #     try:
                    #         # await conn.copy_records_to_table('invalid_stxo',
                    #         #                                  columns=["outpoint",
                    #         #                                           "sequence",
                    #         #                                           "out_tx_id",
                    #         #                                           "tx_id",
                    #         #                                           "input_index",
                    #         #                                           "address",
                    #         #                                           "amount",
                    #         #                                           "pointer",],
                    #         #                                  records=ll)
                    #         # print("iok")
                    #          ###v = await conn.fetch("DELETE  FROM  connector_unconfirmed_stxo WHERE tx_id = ANY($1);", k)
                    #     except Exception as err:
                    #         print(err)
                    #     await asyncio.sleep(50000)

                    #     v = await conn.fetch("DELETE  FROM  unconfirmed_transaction_map WHERE tx_id = ANY($1);", k)
                    #     print(v)
                    #     # v = await conn.fetch("DELETE  FROM  connector_unconfirmed_stxo WHERE tx_id = ANY($1);", k)
                    #     # print(v)
                    # v = await conn.fetch("SELECT  tx_id FROM  connector_unconfirmed_stxo WHERE tx_id = ANY($1);", k)
                    # print(v)
                    # v = await conn.fetch("SELECT  out_tx_id FROM  connector_unconfirmed_utxo WHERE out_tx_id = ANY($1);", k)
                    # print(v)
                    # v = await conn.fetch("DELETE  FROM  connector_unconfirmed_utxo WHERE out_tx_id = ANY($1);", k)
                    # print(v)
                    # v = await conn.fetch("SELECT  out_tx_id FROM  connector_unconfirmed_utxo WHERE out_tx_id = ANY($1);", k)
                    # print(v)
                    # if v == []:
                    #     await conn.fetch("DELETE  FROM  unconfirmed_transaction WHERE tx_id = ANY($1);", k)
                else:
                    await asyncio.sleep(2)
            except asyncio.CancelledError:
                self.log.warning("Mempool analytica task canceled")
                break
            except Exception as err:
                self.log.error("Mempool analytica task error: %s" % err)
                print(traceback.format_exc())
                await asyncio.sleep(10)
Beispiel #25
0
async def block_transactions(pointer, option_raw_tx, limit, page, order, mode,
                             app):
    pool = app["db_pool"]
    qt = time.time()

    async with pool.acquire() as conn:
        if isinstance(pointer, bytes):
            stmt = await conn.prepare(
                "SELECT height, hash, header, adjusted_timestamp "
                "FROM blocks  WHERE hash = $1 LIMIT 1;")
            block_row = await stmt.fetchrow(pointer)
            if block_row is None:
                raise APIException(NOT_FOUND, "block not found", status=404)
            pointer = block_row["height"]
            block_height = block_row["height"]

        else:
            block_height = pointer
            stmt = await conn.prepare(
                "SELECT height, hash, header, adjusted_timestamp "
                "FROM blocks  WHERE height = $1 LIMIT 1;")
            block_row = await stmt.fetchrow(pointer)
        if block_row is None:
            raise APIException(NOT_FOUND, "block not found", status=404)

        count = var_int_to_int(block_row["header"][80:])
        pages = count // limit
        if app["merkle_proof"]:
            rows = await conn.fetch(
                "SELECT tx_id, raw_transaction,  timestamp, pointer, merkle_proof  "
                "FROM transaction  WHERE pointer >= $1 AND pointer < $2 "
                "ORDER BY pointer %s LIMIT $3 OFFSET $4;" % order,
                pointer << 39, (pointer + 1) << 39, limit + 1,
                limit * (page - 1))
        else:
            rows = await conn.fetch(
                "SELECT tx_id, raw_transaction,  timestamp, pointer  "
                "FROM transaction  WHERE pointer >= $1 AND pointer < $2 "
                "ORDER BY pointer %s LIMIT $3 OFFSET $4;" % order,
                pointer << 39, (pointer + 1) << 39, limit + 1,
                limit * (page - 1))
        block_time = unpack("<L", block_row["header"][68:68 + 4])[0]

        transactions = list()
        for row in rows:
            tx = Transaction(row["raw_transaction"],
                             format="decoded",
                             testnet=app["testnet"],
                             keep_raw_tx=option_raw_tx)
            tx["blockIndex"] = (row["pointer"] >> 20) & 524287
            tx["blockTime"] = block_time
            tx["timestamp"] = row["timestamp"]
            tx["confirmations"] = app["last_block"] - block_height + 1
            if app["merkle_proof"]:
                tx["merkleProof"] = base64.b64encode(
                    row["merkle_proof"]).decode()

            del tx["blockHash"]
            del tx["blockTime"]
            del tx["format"]
            del tx["testnet"]
            del tx["time"]
            del tx["fee"]
            if not option_raw_tx:
                del tx["rawTx"]
            if app["transaction_history"]:
                tx["inputsAmount"] = 0
            if mode == "brief":
                tx["outputAddresses"] = 0
                tx["inputAddresses"] = 0
                for z in tx["vOut"]:
                    if "address" in tx["vOut"][z]:
                        tx["outputAddresses"] += 1
            transactions.append(tx)
        app["block_transactions"][block_row["hash"]] = transactions

        if app["transaction_history"]:
            # get information about spent input coins
            rows = await conn.fetch(
                "SELECT pointer,"
                "       s_pointer,"
                "       address, "
                "       amount  "
                "FROM stxo "
                "WHERE stxo.s_pointer >= $1 and  stxo.s_pointer < $2 order by stxo.s_pointer asc;",
                (block_height << 39) + ((limit * (page - 1)) << 20),
                ((block_height) << 39) + ((limit * (page)) << 20))

            for r in rows:
                s = r["s_pointer"]
                i = (s - ((s >> 19) << 19))
                m = ((s - ((s >> 39) << 39)) >> 20) - limit * (page - 1)
                transactions[m]["inputsAmount"] += r["amount"]

                if mode == "verbose":

                    transactions[m]["vIn"][i]["type"] = SCRIPT_N_TYPES[
                        r["address"][0]]
                    transactions[m]["vIn"][i]["amount"] = r["amount"]
                    transactions[m]["vIn"][i][
                        "blockHeight"] = r["pointer"] >> 39
                    transactions[m]["vIn"][i]["confirmations"] = app[
                        "last_block"] - (r["pointer"] >> 39) + 1

                    if r["address"][0] in (0, 1, 5, 6):
                        script_hash = True if r["address"][0] in (1,
                                                                  6) else False
                        witness_version = None if r["address"][0] < 5 else 0
                        transactions[m]["vIn"][i]["address"] = hash_to_address(
                            r["address"][1:],
                            testnet=app["testnet"],
                            script_hash=script_hash,
                            witness_version=witness_version)
                        transactions[m]["vIn"][i][
                            "scriptPubKey"] = address_to_script(
                                transactions[m]["vIn"][i]["address"], hex=1)
                    elif r["address"][0] == 2:
                        transactions[m]["vIn"][i][
                            "address"] = script_to_address(
                                r["address"][1:], testnet=app["testnet"])
                        transactions[m]["vIn"][i]["scriptPubKey"] = r[
                            "address"][1:].hex()
                    else:
                        transactions[m]["vIn"][i]["scriptPubKey"] = r[
                            "address"][1:].hex()
                    transactions[m]["vIn"][i][
                        "scriptPubKeyOpcodes"] = decode_script(
                            transactions[m]["vIn"][i]["scriptPubKey"])
                    transactions[m]["vIn"][i][
                        "scriptPubKeyAsm"] = decode_script(
                            transactions[m]["vIn"][i]["scriptPubKey"], 1)
                else:
                    if r["address"][0] in (0, 1, 2, 5, 6):
                        if mode == "brief":
                            transactions[m]["inputAddresses"] += 1

            for m in range(len(transactions)):
                transactions[m]["fee"] = transactions[m][
                    "inputsAmount"] - transactions[m]["amount"]
                transactions[m]["outputsAmount"] = transactions[m]["amount"]
                if mode != "verbose":
                    transactions[m]["inputs"] = len(transactions[m]["vIn"])
                    transactions[m]["outputs"] = len(transactions[m]["vOut"])
                    del transactions[m]["vIn"]
                    del transactions[m]["vOut"]
            transactions[0]["fee"] = 0
            # get information about spent output coins
            if mode == "verbose":
                # get information about spent output coins
                rows = await conn.fetch(
                    "SELECT   outpoint,"
                    "         input_index,"
                    "       tx_id "
                    "FROM connector_unconfirmed_stxo "
                    "WHERE out_tx_id = ANY($1);",
                    [s2rh(t["txId"]) for t in transactions])
                out_map = dict()
                for v in rows:
                    i = bytes_to_int(v["outpoint"][32:])
                    try:
                        out_map[(rh2s(v["outpoint"][:32]), i)].append({
                            "txId":
                            rh2s(v["tx_id"]),
                            "vIn":
                            v["input_index"]
                        })
                    except:
                        out_map[(rh2s(v["outpoint"][:32]), i)] = [{
                            "txId":
                            rh2s(v["tx_id"]),
                            "vIn":
                            v["input_index"]
                        }]

                rows = await conn.fetch(
                    "SELECT stxo.pointer,"
                    "       stxo.s_pointer,"
                    "       transaction.tx_id  "
                    "FROM stxo "
                    "JOIN transaction "
                    "ON transaction.pointer = (stxo.s_pointer >> 18)<<18 "
                    "WHERE stxo.pointer >= $1 and "
                    "stxo.pointer < $2  order by stxo.pointer ;",
                    (block_height << 39) + ((limit * (page - 1)) << 20),
                    ((block_height) << 39) + ((limit * page) << 20))
                p_out_map = dict()
                for v in rows:
                    p_out_map[v["pointer"]] = [{
                        "txId":
                        rh2s(v["tx_id"]),
                        "vIn":
                        v["s_pointer"] & 0b111111111111111111
                    }]
                for t in range(len(transactions)):
                    o_pointer = (block_height << 39) + (
                        transactions[t]["blockIndex"] << 20) + (1 << 19)
                    for i in transactions[t]["vOut"]:
                        try:
                            transactions[t]["vOut"][i]["spent"] = p_out_map[
                                o_pointer + i]
                        except:
                            try:
                                transactions[t]["vOut"][i]["spent"] = out_map[(
                                    transactions[t]["txId"], int(i))]
                            except:
                                transactions[t]["vOut"][i]["spent"] = []

    resp = {
        "data": {
            "list": transactions,
            "page": page,
            "pages": pages,
            "total": count,
            "limit": limit
        },
        "time": round(time.time() - qt, 4)
    }
    return resp
Beispiel #26
0
async def block_data_by_pointer(pointer, stat, app):
    pool = app["db_pool"]
    qt = time.time()
    async with pool.acquire() as conn:
        if pointer == 'last':
            stmt = await conn.prepare(
                "SELECT height,"
                "       hash,"
                "       miner,"
                "       timestamp_received,"
                "       data,"
                "       header,"
                "       adjusted_timestamp "
                "FROM blocks  ORDER BY height desc LIMIT 1;")
            row = await stmt.fetchrow()
        else:
            if type(pointer) == bytes:
                stmt = await conn.prepare(
                    "SELECT height,"
                    "       hash,"
                    "       miner,"
                    "       timestamp_received,"
                    "       data,"
                    "       header,"
                    "       adjusted_timestamp "
                    "FROM blocks  WHERE hash = $1 LIMIT 1;")
                row = await stmt.fetchrow(pointer)

            elif type(pointer) == int:
                stmt = await conn.prepare(
                    "SELECT height,"
                    "       hash,"
                    "       miner,"
                    "       timestamp_received,"
                    "       data,"
                    "       header,"
                    "       adjusted_timestamp "
                    "FROM blocks  WHERE height = $1 LIMIT 1;")
                row = await stmt.fetchrow(pointer)

        if row is None:
            raise APIException(NOT_FOUND, "block not found", status=404)

        block = dict()
        block["height"] = row["height"]
        if block["height"] > app["last_block"]:
            await block_map_update(app)
            if block["height"] > app["last_block"]:
                raise Exception("internal error")
        block["hash"] = rh2s(row["hash"])
        block["header"] = base64.b64encode(row["header"]).decode()
        d = json.loads(row["data"])
        for k in d:
            block[k] = d[k]
        try:
            block["miner"] = json.loads(row["miner"])
        except:
            block["miner"] = None
        block["medianBlockTime"] = app["block_map_time"][block["height"]][2]
        block["blockTime"] = app["block_map_time"][block["height"]][1]
        block["receivedTimestamp"] = row["timestamp_received"]
        block["adjustedTimestamp"] = row["adjusted_timestamp"]
        block["bitsHex"] = block["bits"]
        block["bits"] = bytes_to_int(bytes_from_hex(block["bits"]))
        block["nonceHex"] = block["nonce"].to_bytes(4, byteorder="big").hex()
        block["versionHex"] = int_to_bytes(block["version"]).hex()
        block["difficulty"] = block["targetDifficulty"]
        q = int.from_bytes(s2rh(block["hash"]), byteorder="little")
        block["blockDifficulty"] = target_to_difficulty(q)
        del block["targetDifficulty"]
        next = await conn.fetchval(
            "SELECT "
            "       hash "
            "FROM blocks WHERE height = $1;", block["height"] + 1)

        if next is not None:
            block["nextBlockHash"] = rh2s(next)
        else:
            block["nextBlockHash"] = None

        # get coinbase transaction
        cb = await conn.fetchval(
            "SELECT raw_transaction  "
            "FROM transaction  WHERE pointer = $1  LIMIT 1;",
            block["height"] << 39)
        tx = Transaction(cb, format="raw")
        block["estimatedBlockReward"] = 50 * 100000000 >> block[
            "height"] // 210000
        block["blockReward"] = tx["amount"]
        if tx["amount"] > block["estimatedBlockReward"]:
            block["blockReward"] = block["estimatedBlockReward"]
            block["blockFeeReward"] = tx["amount"] - block[
                "estimatedBlockReward"]
        else:
            block["blockReward"] = tx["amount"]
            block["blockFeeReward"] = 0
        block["confirmations"] = app["last_block"] - block["height"] + 1
        block["transactionsCount"] = var_int_to_int(row["header"][80:])
        block["coinbase"] = tx["vIn"][0]["scriptSig"].hex()

    if stat and app["blockchain_analytica"]:
        async with pool.acquire() as conn:
            stat = await conn.fetchval(
                "SELECT block FROM block_stat WHERE height = $1 LIMIT 1;",
                row["height"])
        if stat is not None:
            block["statistics"] = json.loads(stat)
        else:
            block["statistics"] = None

    resp = {"data": block, "time": round(time.time() - qt, 4)}
    return resp