コード例 #1
0
"""
This script outputs a CSV file with the number of confirmed and unconfirmed blocks per day.
"""
from ipv8.attestation.trustchain.database import TrustChainDB

from tc_analysis import DB_PATH

db = TrustChainDB(DB_PATH, "trustchain")
print("Database opened!")

query = "SELECT strftime('%d-%m-%Y', block_timestamp/1000, 'unixepoch'), COUNT(*) FROM blocks GROUP BY strftime('%d-%m-%Y', block_timestamp/1000, 'unixepoch') ORDER BY block_timestamp"
res = list(db.execute(query))
creation_info = []
for day_info in res:
    creation_info.append(day_info)

print("Writing statistics")
with open("creation_stats.csv", "w") as output_file:
    output_file.write("day,blocks\n")
    for day, num_blocks in creation_info:
        output_file.write("%s,%d\n" % (day.decode(), num_blocks))
コード例 #2
0
unconfirmed_txs = set()
unconfirmed_links = set()
tx_times = {}


def write_results():
    with open("creation_stats_detailled.csv", "w") as output_file:
        output_file.write("day,confirmed,unconfirmed\n")
        for day, info in day_stats.items():
            output_file.write("%s,%d,%d\n" % (day, info["confirmed"], info["unconfirmed"]))


parsed_blocks = 0
while True:
    blocks = list(db.execute("SELECT public_key, sequence_number, link_public_key, link_sequence_number, block_timestamp, type FROM blocks ORDER BY block_timestamp DESC LIMIT %d OFFSET %d" % (BATCH_SIZE, parsed_blocks)))
    if len(blocks) == 0 or (MAX_BLOCKS != -1 and parsed_blocks >= MAX_BLOCKS):
        break

    for block in blocks:
        if block[5] != b"tribler_bandwidth":
            continue

        date = datetime.datetime.fromtimestamp(block[4] / 1000)
        day = date.strftime("%Y-%m-%d")
        if day not in day_stats:
            day_stats[day] = {"unconfirmed": 0, "confirmed": 0}

        if block[3] == 0:  # Source block
            if (block[0], block[1]) in unconfirmed_links:
                day_stats[day]["confirmed"] += 1
コード例 #3
0
with open("identities.csv", "r") as in_file:
    did_header = False
    for line in in_file.readlines():
        if not did_header:
            did_header = True
            continue

        parts = line.split(",")
        public_key = unhexlify(parts[0])
        public_keys.append(public_key)

processed = 0
for public_key in public_keys:
    last_block = list(
        db.execute(
            "SELECT tx, type FROM blocks WHERE public_key = ? ORDER BY block_timestamp DESC LIMIT 1",
            (public_key, )))[0]
    processed += 1

    if last_block[1] != b"tribler_bandwidth":
        print("NO BW %s" % last_block[1])
        continue

    decoded_tx = decode(last_block[0])[1]
    try:
        total_up = decoded_tx[b"total_up"]
        total_down = decoded_tx[b"total_down"]
        balances[public_key] = (total_up, total_down)
    except KeyError:
        print("Invalid tx: %s" % decoded_tx)
コード例 #4
0
"""
This script outputs a CSV file with identities and the number of blocks created for each identity.
"""
from binascii import hexlify

from ipv8.attestation.trustchain.database import TrustChainDB

from tc_analysis import DB_PATH

day_stats = {}
db = TrustChainDB(DB_PATH, "trustchain")
print("Database opened!")

users_info = db.execute(
    "SELECT DISTINCT public_key, COUNT(*) FROM blocks GROUP BY public_key")
print("Fetched user info!")
user_id = 1
with open("identities.csv", "w") as out_file:
    out_file.write("id,public_key,blocks\n")
    for user_info in users_info:
        out_file.write("%d,%s,%d\n" %
                       (user_id, hexlify(user_info[0]).decode(), user_info[1]))
        user_id += 1
コード例 #5
0
day_stats = {}
database_path = u"/Users/martijndevos/Documents/trustchain-db"
db = TrustChainDB(database_path, "trustchain")
print("Database opened!")

BATCH_SIZE = 500000
TOTAL_BLOCKS = 103149931
FIVE_MB = 5 * 1024 * 1024

size_frequencies = {}
under_5_mb_size_frequencies = {}

parsed_blocks = 0
while parsed_blocks < TOTAL_BLOCKS:
    blocks = list(db.execute("SELECT link_sequence_number, type, tx FROM blocks ORDER BY block_timestamp ASC LIMIT %d OFFSET %d" % (BATCH_SIZE, parsed_blocks)))
    for block in blocks:
        if block[1] != b"tribler_bandwidth":
            continue

        if block[0] == 0:  # Source block
            decoded_tx = decode(block[2])[1]
            download_mb = int(decoded_tx[b"down"] / 1024.0 / 1024.0)
            if download_mb not in size_frequencies:
                size_frequencies[download_mb] = 0
            size_frequencies[download_mb] += 1

            if decoded_tx[b"down"] <= FIVE_MB:
                raw_download_kb = int(decoded_tx[b"down"] / 1024.0)
                # Round down to nearest hundred
                download_kb = int(raw_download_kb / 100) * 100