def count(self): api = get_client() try: stream_info = api.getstreaminfo(self.stream) return int(stream_info.get("items")) except Exception: return 999999
def get_context_data(self, **kwargs): ctx = super().get_context_data(**kwargs) address = ctx["address"].address ctx["amount_blocks"] = Block.objects.filter(miner=address).count() api = get_client() blocks = api.getinfo().blocks ctx["miner"] = False for perm in api.listpermissions("mine"): if (perm["address"] == address and perm["startblock"] < blocks < perm["endblock"]): ctx["miner"] = True ctx["admin"] = False for perm in api.listpermissions("admin"): if (perm["address"] == address and perm["startblock"] < blocks < perm["endblock"]): ctx["admin"] = True qs = super().get_queryset() ctx["balance"] = qs.with_balance().filter( address=address).first().balance last_out = Output.objects.filter( address=address).order_by("-transaction__block__time")[:10] ctx["last_tx"] = [] for out in last_out: ctx["last_tx"].append({ "tx": out.transaction.hash, "time": out.transaction.block.time, "hash": out.transaction.block.hash, "height": out.transaction.block.height, }) return ctx
def get_table_data(self): keys = self.request.GET.get("keys") stream = self.kwargs["stream"] if keys is None or not keys.strip(): sort = self.request.GET.get("sort", "-time") stream_itr = LazyStream(self.kwargs["stream"]) if sort == "time": stream_itr.descending = False return TableDataLen(stream_itr) client = get_client() if is_iscc(keys): keys_clean = iscc_split(keys) else: keys_clean = [keys.strip()] results = [] for k in keys_clean: result = client.liststreamkeyitems(stream, k, verbose=True) results.extend(result) seen = set() unique = [] for item in results: k = item["txid"] + str(item["vout"]) if k not in seen: unique.append(item) seen.add(k) result = [dict(e, stream=stream) for e in unique] return result
def get_context_data(self, **kwargs): ctx = super().get_context_data(**kwargs) address = ctx['address'].address ctx['amount_blocks'] = Block.objects.filter(miner=address).count() api = get_client() blocks = api.getinfo().blocks ctx['miner'] = False for perm in api.listpermissions("mine"): if perm['address'] == address and perm[ 'startblock'] < blocks and perm['endblock'] > blocks: ctx['miner'] = True ctx['admin'] = False for perm in api.listpermissions("admin"): if perm['address'] == address and perm[ 'startblock'] < blocks and perm['endblock'] > blocks: ctx['admin'] = True qs = super().get_queryset() ctx['balance'] = qs.with_balance().filter( address=address).first().balance last_out = Output.objects.filter( address=address).order_by('-transaction__block__time')[:10] ctx['last_tx'] = [] for out in last_out: ctx['last_tx'].append({ 'tx': out.transaction.hash, 'time': out.transaction.block.time, 'hash': out.transaction.block.hash, 'height': out.transaction.block.height }) return ctx
def get_context_data(self, **kwargs): api = get_client() ctx = super().get_context_data(**kwargs) tx_raw = api.getrawtransaction(ctx["hash"], 4) if tx_raw.get("confirmations"): try: tx_db = Transaction.objects.get(hash=tx_raw["txid"]) outputs_db = tx_db.outputs_for_tx.order_by("out_idx") except Transaction.DoesNotExist: outputs_db = None else: outputs_db = None ctx["details"] = tx_raw ctx["raw"] = "raw" in self.request.GET blockchain_params = api.getblockchainparams() pubkeyhash_version = blockchain_params["address-pubkeyhash-version"] checksum_value = blockchain_params["address-checksum-value"] if "blocktime" in ctx["details"]: ctx["formattedBlocktime"] = datetime.datetime.fromtimestamp( ctx["details"]["blocktime"]) ctx["formattedVin"] = [] ctx["formattedVout"] = [] for index, vin in enumerate(ctx["details"]["vin"]): address = "N/A" if "scriptSig" in vin: public_key = vin["scriptSig"]["asm"].split(" ")[1] address = public_key_to_address(public_key, pubkeyhash_version, checksum_value) ctx["formattedVin"].append({ "index": index, "address": address, "transaction": vin["txid"] if "txid" in vin else "", "vout": vin.get("vout", 0), }) for index, vout in enumerate(ctx["details"]["vout"]): address = "N/A" if "scriptPubKey" in vout and "addresses" in vout["scriptPubKey"]: address = ", ".join(vout["scriptPubKey"]["addresses"]) if outputs_db and outputs_db[index].spent: redeemed_in = outputs_db[index].spent_by_txid() else: redeemed_in = "" ctx["formattedVout"].append({ "index": index, "address": address, "transaction": redeemed_in, "amount": vout["value"], }) return ctx
def get_context_data(self, **kwargs): api = get_client() ctx = super().get_context_data(**kwargs) ctx["MEX_MINER"] = settings.MEX_MINER ctx["details"] = api.getblock(ctx["block"].hash, 1) ctx["formattedtime"] = datetime.datetime.fromtimestamp(ctx["details"]["time"]) ctx["num_transactions"] = len(ctx["details"]["tx"]) return ctx
def get_context_data(self, **kwargs): api = get_client() ctx = super().get_context_data(**kwargs) ctx['MEX_MINER'] = settings.MEX_MINER ctx['details'] = api.getblock(ctx['block'].hash, 1) ctx['formattedtime'] = datetime.datetime.fromtimestamp( ctx['details']['time']) ctx['num_transactions'] = len(ctx['details']['tx']) return ctx
def sync_stream_items(): """Synchronize stream data for all monitored streams.""" api = get_client() streams = Stream.objects.filter(monitor=True) for stream_obj in streams: log.info("import items for stream %s" % stream_obj.name) height = StreamItem.objects.filter(stream=stream_obj).count() total_new_items = 0 while True: raw_items = api.liststreamitems(stream_obj.name, verbose=True, count=100, start=height) if not raw_items: break new_stream_items = {} # collect referenced outputs txids_raw = [r["txid"] for r in raw_items] outputs = Output.objects.filter(transaction_id__in=txids_raw).only( "id", "transaction_id", "out_idx") # Map txid/out_idx composite to primary key of Output outputs = {f"{o.transaction_id}{o.out_idx}": o.pk for o in outputs} for raw_item in raw_items: if raw_item["confirmations"] == 0: continue # Transform raw data txid = raw_item.pop("txid") vout = raw_item.pop("vout") raw_item["output_id"] = outputs[f"{txid}{vout}"] raw_item["stream_id"] = stream_obj.pk raw_item["time"] = datetime.fromtimestamp(raw_item["time"], tz=pytz.utc) del raw_item["blockhash"] del raw_item["blockindex"] del raw_item["blocktime"] del raw_item["confirmations"] del raw_item["timereceived"] publishers = raw_item.pop("publishers") s_item_obj = StreamItem(**raw_item) new_stream_items[s_item_obj] = publishers StreamItem.objects.bulk_create(new_stream_items.keys()) for item_obj, publishers in new_stream_items.items(): item_obj.publishers.add(*publishers) total_new_items += len(new_stream_items.keys()) height += 100 if total_new_items: log.info("imported %s items from stream %s" % (total_new_items, stream_obj.name))
def get_context_data(self, **kwargs): api = get_client() ctx = super().get_context_data(**kwargs) tx_raw = api.getrawtransaction(ctx['hash'], 4) if tx_raw['confirmations']: tx_db = Transaction.objects.get(hash=tx_raw['txid']) outputs_db = tx_db.outputs_for_tx.order_by('out_idx') else: outputs_db = None ctx['details'] = tx_raw ctx['raw'] = 'raw' in self.request.GET blockchain_params = api.getblockchainparams() pubkeyhash_version = blockchain_params['address-pubkeyhash-version'] checksum_value = blockchain_params['address-checksum-value'] if 'blocktime' in ctx['details']: ctx['formattedBlocktime'] = datetime.datetime.fromtimestamp( ctx['details']['blocktime']) ctx['formattedVin'] = [] ctx['formattedVout'] = [] for index, vin in enumerate(ctx['details']['vin']): address = 'N/A' if 'scriptSig' in vin: public_key = vin['scriptSig']['asm'].split(' ')[1] address = public_key_to_address(public_key, pubkeyhash_version, checksum_value) ctx['formattedVin'].append({ 'index': index, 'address': address, 'transaction': vin['txid'] if 'txid' in vin else '', 'vout': vin.get('vout', 0) }) for index, vout in enumerate(ctx['details']['vout']): address = 'N/A' if 'scriptPubKey' in vout and 'addresses' in vout['scriptPubKey']: address = ', '.join(vout['scriptPubKey']['addresses']) if outputs_db and outputs_db[index].spent: redeemed_in = outputs_db[index].spent_by_txid() else: redeemed_in = '' ctx['formattedVout'].append({ 'index': index, 'address': address, 'transaction': redeemed_in, 'amount': vout['value'] }) return ctx
def sync_blocks(batch_size=1000): api = get_client() db_height = Block.get_db_height() node_height = api.getblockcount() if db_height == node_height: log.info("no new blocks to sync") return if db_height > node_height: raise SyncError("database is ahead of node") log.info("sync blocks %s-%s" % (db_height + 1, node_height)) block_counter = 0 existing_addrs = set(Address.objects.values_list("address", flat=True)) from_to = range(db_height + 1, node_height) for batch in batchwise(from_to, batch_size=batch_size): log.info("sync blocks batch %s" % batch) block_objs = [] for block_data in api.listblocks(batch, True): miner_addr = block_data["miner"] if miner_addr not in existing_addrs: Address.objects.create(address=miner_addr) existing_addrs.add(miner_addr) blocktime = datetime.fromtimestamp(block_data["time"], tz=pytz.utc) block_objs.append( Block( height=block_data["height"], hash=block_data["hash"], merkleroot=block_data["merkleroot"], miner_id=miner_addr, time=blocktime, txcount=block_data["txcount"], size=block_data["size"], ) ) block_counter += 1 Block.objects.bulk_create(block_objs, batch_size=batch_size) log.info("imported %s blocks" % block_counter)
def sync_blocks(batch_size=1000): api = get_client() db_height = Block.get_db_height() node_height = api.getblockcount() if db_height == node_height: log.info('no new blocks to sync') return if db_height > node_height: raise SyncError('database is ahead of node') log.info('sync blocks %s-%s' % (db_height + 1, node_height)) block_counter = 0 existing_addrs = set(Address.objects.values_list('address', flat=True)) from_to = range(db_height + 1, node_height) for batch in batchwise(from_to, batch_size=batch_size): block_objs = [] for block_data in api.listblocks(batch, True): miner_addr = block_data['miner'] if miner_addr not in existing_addrs: Address.objects.create(address=miner_addr) existing_addrs.add(miner_addr) blocktime = datetime.fromtimestamp(block_data['time'], tz=pytz.utc) block_objs.append( Block( height=block_data['height'], hash=block_data['hash'], merkleroot=block_data['merkleroot'], miner_id=miner_addr, time=blocktime, txcount=block_data['txcount'], size=block_data['size'], )) block_counter += 1 Block.objects.bulk_create(block_objs, batch_size=batch_size) log.info('imported %s blocks' % block_counter)
def get_context_data(self, **kwargs): ctx = super().get_context_data(**kwargs) api = get_client() ctx['stream_details'] = api.liststreams(ctx['stream'])[0] ctx['stream_items'] = list(reversed(api.liststreamitems( ctx['stream']))) for key, item in enumerate(ctx['stream_items']): if 'blocktime' in item: ctx['stream_items'][key][ 'formatted_time'] = datetime.datetime.fromtimestamp( item['blocktime']) if item['data']: try: ctx['stream_items'][key]['formatted_data'] = ubjson.loadb( unhexlify(item['data'])) except Exception as e: ctx['stream_items'][key]['formatted_data'] = item['data'] return ctx
def sync_streams(): api = get_client() streams = api.liststreams("*", verbose=True) for stream in streams: # Key for update name = stream.pop("name") # Set fk by id stream["createtxid_id"] = stream.pop("createtxid") creators = stream.pop("creators") stream_obj, created = Stream.objects.update_or_create(name=name, defaults=stream) creator_objs = Address.objects.filter(address__in=creators) if creator_objs.exists(): stream_obj.creators.add(*creator_objs) log.info("imported %s streams" % len(streams))
def get_context_data(self, **kwargs): ctx = super().get_context_data(**kwargs) tx, out_idx = self.kwargs.get("output").split(":") stream = self.kwargs.get("stream") client = get_client() res = client.liststreamtxitems(stream, [tx], verbose=True)[int(out_idx)] res["stream"] = stream ctx["streamitem"] = res if stream == "iscc": iscc_code = "-".join(res["keys"]) try: smart_licenses = client.liststreamkeyitems("smart-license", key=iscc_code, verbose=True) ctx["smartlicenses"] = smart_licenses except RpcError: pass return ctx
def benchmark_rpc(): api = get_client() node_height = api.getblockcount()["result"] log.info(f"Node height: {node_height}") log.info("Starting benchmark. Please be patient!") start = timeit.default_timer() blocks = api.listblocks("-" + str(node_height), verbose=False)["result"] stop = timeit.default_timer() runtime = stop - start log.info(f"RPC listblocks: {runtime}") start = timeit.default_timer() blocks = api.listblocks("-" + str(node_height), verbose=True)["result"] stop = timeit.default_timer() runtime = stop - start log.info(f"RPC listblocks verbose: {runtime}") block_hashes = [item["hash"] for item in blocks] tx_hashes = [] start = timeit.default_timer() for block_hash in block_hashes: data = api.getblock(block_hash, verbose=1)["result"]["tx"] tx_hashes.extend(data) # pre-collect for getrawtransactions stop = timeit.default_timer() runtime = stop - start log.info(f"RPC full getblock scan verbose=1: {runtime}") start = timeit.default_timer() for block_hash in block_hashes: data = api.getblock(block_hash, verbose=4) stop = timeit.default_timer() runtime = stop - start log.info(f"RPC full getblock scan verbose=4: {runtime}") start = timeit.default_timer() for tx_hash in tx_hashes: data = api.getrawtransaction(tx_hash, verbose=1)["result"] stop = timeit.default_timer() runtime = stop - start log.info(f"RPC full getrawtransaction scan verbose=4: {runtime}")
def benchmark_rpc(): api = get_client() node_height = api.getblockcount()['result'] log.info(f'Node height: {node_height}') log.info('Starting benchmark. Please be patient!') start = timeit.default_timer() blocks = api.listblocks('-' + str(node_height), verbose=False)['result'] stop = timeit.default_timer() runtime = stop - start log.info(f'RPC listblocks: {runtime}') start = timeit.default_timer() blocks = api.listblocks('-' + str(node_height), verbose=True)['result'] stop = timeit.default_timer() runtime = stop - start log.info(f'RPC listblocks verbose: {runtime}') block_hashes = [item['hash'] for item in blocks] tx_hashes = [] start = timeit.default_timer() for block_hash in block_hashes: data = api.getblock(block_hash, verbose=1)['result']['tx'] tx_hashes.extend(data) # pre-collect for getrawtransactions stop = timeit.default_timer() runtime = stop - start log.info(f'RPC full getblock scan verbose=1: {runtime}') start = timeit.default_timer() for block_hash in block_hashes: data = api.getblock(block_hash, verbose=4) stop = timeit.default_timer() runtime = stop - start log.info(f'RPC full getblock scan verbose=4: {runtime}') start = timeit.default_timer() for tx_hash in tx_hashes: data = api.getrawtransaction(tx_hash, verbose=1)['result'] stop = timeit.default_timer() runtime = stop - start log.info(f'RPC full getrawtransaction scan verbose=4: {runtime}')
def clean_reorgs(horizon=settings.MEX_SYNC_HORIZON): """ Clean chain reorganizations up to `horizon` number of blocks in the past. First we compare the latest `horizon` block hashes from the database with those form the authoritative node. If we find a difference we will delete all the blocks starting at the oldest differing block. Deleteing those blocks will automatically cascade throuth the datamodel and delete all dependant transactions, inputs and outputs. """ log.info("clean reorgs with horizon {}".format(horizon)) api = get_client() node_height = api.getblockcount() db_height = Block.get_db_height() if db_height > node_height: log.warning("database is ahead of node") return db_blocks = list( Block.objects.order_by("-height").values_list("height", "hash")[:horizon]) if not db_blocks: log.info("database has no block data") return db_height = db_blocks[0][0] db_horizon = db_blocks[-1][0] horizon_range = "%s-%s" % (db_horizon, db_height) node_data = api.listblocks(horizon_range, False) node_blocks = [(b["height"], b["hash"]) for b in reversed(node_data)] difference = set(db_blocks).difference(set(node_blocks)) if not difference: log.info("no reorgs found") return fork_height = min(difference)[0] log.info("database reorg from height %s" % fork_height) Block.objects.filter(height__gte=fork_height).delete()
def sync_transactions(): api = get_client() queryset = (Block.objects.filter( transactions__isnull=True).only("hash").order_by("height")) if not queryset.exists(): return addrs_existing = set(Address.objects.values_list("address", flat=True)) log.info("sync transactions from %s blocks" % queryset.count()) tx_counter = 0 out_counter = 0 in_counter = 0 addr_counter = 0 for block_obj in queryset: block_data = api.getblock(block_obj.hash, 4) tx_hashes = [item["txid"] for item in block_data["tx"]] tx_objs = [] for tx_idx, tx_hash in enumerate(tx_hashes): tx_obj = Transaction(hash=tx_hash, block=block_obj, idx=tx_idx) tx_objs.append(tx_obj) tx_counter += 1 # only postgres sets primary ids for tx_objs here. Transaction.objects.bulk_create(tx_objs) tx_objs_by_hash = {txo.hash: txo for txo in tx_objs} # create outputs for all transactions in block out_objs = [] for tx_data in block_data["tx"]: if tx_data is None: continue tx_obj = tx_objs_by_hash[tx_data["txid"]] # Create new outputs for out_entry in tx_data["vout"]: value = out_entry.get("value") try: address = out_entry["scriptPubKey"]["addresses"][0] except KeyError: address = None out_idx = out_entry["n"] if address and address not in addrs_existing: Address.objects.create(address=address) addrs_existing.add(address) addr_counter += 1 out_objs.append( Output( transaction=tx_obj, out_idx=out_idx, value=value, address_id=address, )) out_counter += 1 # postgres needed to set object ids Output.objects.bulk_create(out_objs) # create inputs for all transactions in block in_objs = [] for tx_data in block_data["tx"]: if tx_data is None: continue tx_obj = tx_objs_by_hash[tx_data["txid"]] # Create input and mark spent outputs for vin_entry in tx_data["vin"]: txid = vin_entry.get("txid") coinbase = vin_entry.get("coinbase") vout = vin_entry.get("vout") if txid: out = Output.objects.get(transaction__hash=txid, out_idx=vout) in_objs.append( Input(transaction=tx_obj, spends=out, coinbase=False)) in_counter += 1 out.spent = True out.save() if coinbase: in_objs.append(Input(transaction=tx_obj, coinbase=True)) in_counter += 1 Input.objects.bulk_create(in_objs) log.info("imported %s transactions from block %s" % (len(tx_objs), block_obj.height)) log.info("imported %s transactions" % tx_counter) log.info("imported %s outputs" % out_counter) log.info("imported %s inputs" % in_counter) log.info("imported %s addresses" % addr_counter)
def count(self): api = get_client() try: return int(api.liststreams(self.stream)[0]["items"]) except Exception: return 999999
def get_context_data(self, **kwargs): ctx = super().get_context_data(**kwargs) api = get_client() ctx['tokens'] = api.listassets() return ctx
def get_context_data(self, **kwargs): ctx = super().get_context_data(**kwargs) api = get_client() ctx['token_details'] = api.listassets(ctx['token'])[0] return ctx
def get_context_data(self, **kwargs): api = get_client() ctx = super().get_context_data(**kwargs) ctx['info'] = api.getinfo() return ctx
def sync_transactions(): api = get_client() queryset = Block.objects\ .filter(transactions__isnull=True)\ .only('hash')\ .order_by('height')\ if not queryset.exists(): return addrs_existing = set(Address.objects.values_list('address', flat=True)) log.info('sync transactions from %s blocks' % queryset.count()) tx_counter = 0 out_counter = 0 in_counter = 0 addr_counter = 0 for block_obj in queryset: block_data = api.getblock(block_obj.hash, 4) tx_hashes = [item['txid'] for item in block_data['tx']] tx_objs = [] for tx_idx, tx_hash in enumerate(tx_hashes): tx_obj = Transaction(hash=tx_hash, block=block_obj, idx=tx_idx) tx_objs.append(tx_obj) tx_counter += 1 # only postgres sets primary ids for tx_objs here. Transaction.objects.bulk_create(tx_objs) tx_objs_by_hash = {txo.hash: txo for txo in tx_objs} # create outputs for all transactions in block out_objs = [] for tx_data in block_data['tx']: if tx_data is None: continue tx_obj = tx_objs_by_hash[tx_data['txid']] # Create new outputs for out_entry in tx_data['vout']: value = out_entry.get('value') try: address = out_entry['scriptPubKey']['addresses'][0] except KeyError: address = None out_idx = out_entry['n'] if address and address not in addrs_existing: Address.objects.create(address=address) addrs_existing.add(address) addr_counter += 1 out_objs.append( Output( transaction=tx_obj, out_idx=out_idx, value=value, address_id=address, )) out_counter += 1 # postgres needed to set object ids Output.objects.bulk_create(out_objs) # create inputs for all transactions in block in_objs = [] for tx_data in block_data['tx']: if tx_data is None: continue tx_obj = tx_objs_by_hash[tx_data['txid']] # Create input and mark spent outputs for vin_entry in tx_data['vin']: txid = vin_entry.get('txid') coinbase = vin_entry.get('coinbase') vout = vin_entry.get('vout') if txid: out = Output.objects.get(transaction__hash=txid, out_idx=vout) in_objs.append( Input(transaction=tx_obj, spends=out, coinbase=False)) in_counter += 1 out.spent = True out.save() if coinbase: in_objs.append(Input(transaction=tx_obj, coinbase=True)) in_counter += 1 Input.objects.bulk_create(in_objs) log.info('imported %s transactions' % tx_counter) log.info('imported %s outputs' % out_counter) log.info('imported %s inputs' % in_counter) log.info('imported %s addresses' % addr_counter)
def __init__(self, name, descending=True): self.name = name self.descending = descending self.api = get_client()
def get_context_data(self, **kwargs): ctx = super().get_context_data(**kwargs) api = get_client() streams = api.liststreams() ctx['streams'] = sorted(streams, key=lambda k: -k['items']) return ctx
def get_table_data(self): client = get_client() streams = client.liststreams("*", verbose=True) streams = [e for e in streams if e["name"] in ListStreamTable.streams] return streams