#!/usr/bin/env python3 import networkx as nx from sqlite_wrapper import SQLiteWrapper from queries import * from util import * from collections import Counter ### FILENAME = "tx_graph" db = SQLiteWrapper('../blockchain/blockchain.sqlite') try: max_txid_res = db.query(max_txid_query, fetch_one=True) except Exception as e: die(e) G = nx.DiGraph() min_txid = 1 try: G, min_txid = load(FILENAME) except Exception as e: print(e) print("Scanning %d transactions, starting from %d." %(max_txid_res, min_txid)) for tx_id in range(min_txid, max_txid_res + 1): # Save progress to files
parser = argparse.ArgumentParser(description="BitIodine Classifier") parser.add_argument('-d', dest='db', default="features.sqlite", help='SQLite database path') parser.add_argument("-a", dest="address", default=None, help="Classify a single address.") parser.add_argument("-af", dest="address_filename", default=None, help="Classify every address in a text file, one per line.") parser.add_argument("-cf", dest="cluster_filename", default=None, help="Classify every cluster in a text file, one per line.") parser.add_argument("-c", dest="cluster", type=int, default=None, help="Classify a single cluster.") parser.add_argument("--all-clusters", action="store_true", dest="all_clusters", default=False, help="Classify every cluster.") options = parser.parse_args() db = SQLiteWrapper(options.db) try: db_blockchain = SQLiteWrapper("../blockchain/blockchain.sqlite") max_block = int(db_blockchain.query(max_block_query, fetch_one=True)) db_blockchain.close() except: max_block = 0 f = Features() scores = f.features labels = f.labels labels_string = f.labels_string with open("../grapher/tx_graph.dat", "rb") as gf: G = pickle.load(gf) print("Graph loaded.") with open("../clusterizer/clusters.dat", "rb") as cf:
f.write(k + '\n') clusters_query = '(' + ', '.join(['"' + str(k) + '"' for k in known]) + ')' # 1391385600 is bitcrypt.info registration date ransoms_signature = ' AND (txout_value BETWEEN 0.19e8 AND 0.21e8 OR txout_value BETWEEN 0.39e8 AND 0.41e8 OR txout_value BETWEEN 0.49e8 AND 0.51e8) AND time > 1391385600' sum_query = "SELECT SUM(txout_value)/1e8 FROM tx_full WHERE address IN " + clusters_query + ransoms_signature detail_query = "SELECT address, COUNT(*) AS ransoms FROM tx_full WHERE address IN " + clusters_query + ransoms_signature + " GROUP BY address ORDER by ransoms DESC" tx_query = "SELECT datetime(time, 'unixepoch'), tx_hash, txout_value, address FROM tx_full WHERE address IN " + clusters_query + ransoms_signature + " ORDER BY time ASC" group_query = "SELECT date(time, 'unixepoch') AS tx_date, SUM(txout_value), COUNT(*) FROM tx_full WHERE address IN " + clusters_query + ransoms_signature + " GROUP BY tx_date ORDER BY time ASC" sum_res = float(db_blockchain.query(sum_query, fetch_one=True)) print("Sum: %f" % sum_res) detail_res = db_blockchain.query(detail_query) tx_res = db_blockchain.query(tx_query) group_res = db_blockchain.query(group_query) with open("bitcrypt_ransoms.txt", "w") as rf: for row in detail_res: address, ransoms = row print("%s, %d" % (address, int(ransoms))) rf.write("%s, %d\n" % (address, int(ransoms))) with open("bitcrypt_tx.txt", "w") as tf: for row in tx_res:
parser.add_argument("--max-time", dest="max_time") parser.add_argument("--out-filename", dest="output_filename") args = parser.parse_args() # Load clusters with open("../clusterizer/clusters.dat", "rb") as infile: users = pickle.load(infile) print("Clusters loaded - %d addresses." % len(users)) users = stripSingletons(users) print("Singletons stripped - %d addresses." % len(users)) try: amount_txids = db.query(number_of_transactions_between_time_interval, (args.min_time, args.max_time,))[0][0] min_tx_id, max_tx_id = db.query(max_min_transaction_ids_time_interval, (args.min_time, args.max_time,))[0] except Exception as e: die(e) G = nx.DiGraph() min_txid = 1 try: G, min_tx_id = load(args.output_filename) except: pass print("Scanning %d transactions, starting from %d." %(amount_txids, min_tx_id))
#!/usr/bin/env python3 import networkx as nx from sqlite_wrapper import SQLiteWrapper from queries import * from util import * from collections import Counter ### FILENAME = "tx_graph" db = SQLiteWrapper('../blockchain/blockchain.sqlite') try: max_txid_res = db.query(max_txid_query, fetch_one=True) except Exception as e: die(e) G = nx.DiGraph() min_txid = 1 try: G, min_txid = load(FILENAME) except: pass print("Scanning %d transactions, starting from %d." %(max_txid_res, min_txid)) for tx_id in range(min_txid, max_txid_res + 1): # Save progress to files
help="Generate clusters (takes a long time)") parser.add_argument("--load-clusters", action="store_true", dest="load", default=False, help="Load a previously generated clusters from disk") parser.add_argument("--print-cluster", dest="print_cluster", default=None, help="Display all addresses belonging to a cluster") parser.add_argument("--print-address", dest="print_address", default=None, help="Display the cluster ID to which an address belongs") parser.add_argument("--csv", action="store_true", dest="csv", default=False, help="Export clusters to a clusters.csv file") options = parser.parse_args() db = SQLiteWrapper(options.db) if options.generate: try: max_txid_res = db.query(max_txid_query, fetch_one=True) except Exception as e: die(e) users, loaded = {}, False try: users, min_txid = load(FILENAME) loaded = True except: min_txid = 1 try: # Retrieve maximum cluster ID max_cluster_id = max(users.values()) except ValueError: # users is empty
parser.add_argument("--print-address", dest="print_address", default=None, help="Display the cluster ID to which an address belongs") parser.add_argument("--csv", action="store_true", dest="csv", default=False, help="Export clusters to a clusters.csv file") options = parser.parse_args() db = SQLiteWrapper(options.db) if options.generate: try: max_txid_res = db.query(max_txid_query, fetch_one=True) except Exception as e: die(e) users, loaded = {}, False # Keep a cache for efficient value -> keys querying users_cache = defaultdict(set) try: users, min_txid = load(FILENAME) # Build cache for address, cluster in users.items(): users_cache[cluster].add(address) loaded = True except:
help="Get label for a particular cluster ID") parser.add_argument( "--set", dest="set", nargs=2, default=[], help= "Set or replace the label for a particular cluster ID (--set <CLUSTER_ID> <LABEL>)" ) options = parser.parse_args() db = SQLiteWrapper(options.db) if options.get is not None: try: label = db.query(get_cluster_label_query, (options.get, ), fetch_one=True) except Exception as e: die('No label found for the cluster specified.') print(label) elif len(options.set) > 1: try: res = db.query(add_cluster_label_query, (int(options.set[0]), options.set[1])) except Exception as e: die(e) print("Cluster {} now has label '{}'".format(int(options.set[0]), options.set[1]))
db = SQLiteWrapper('../blockchain/blockchain.sqlite') parser = argparse.ArgumentParser( description= 'Generate transaction graph based on transactions on a time interval desired' ) parser.add_argument( "--tx", dest="base_tx", default="F4184FC596403B9D638783CF57ADFE4C75C605F6356FBC91338530E9831E9E16") parser.add_argument("--count", dest="count", default=1, type=int) parser.add_argument("--out-filename", dest="output_filename", default="tx_graph") args = parser.parse_args() try: base_tx_id = db.query(txhash_to_txid_query, (args.base_tx, ))[0][0] except Exception as e: die(e) G = nx.MultiDiGraph() print("base_tx_id=%d count=%d" % (base_tx_id, args.count)) tx_count = dump_transactions(G, base_tx_id, 0, args.count) print("tx_count=%d" % (tx_count)) save(G, args.output_filename, tx_count)
from util import * import argparse ### parser = argparse.ArgumentParser(description="BitIodine Cluster Labels: add labels to clusters.") parser.add_argument('-d', dest='db', default="cluster_labels.sqlite", help='Cluster labels DB path') parser.add_argument("--get", dest="get", default=None, help="Get label for a particular cluster ID") parser.add_argument("--set", dest="set", nargs = 2, default=[], help="Set or replace the label for a particular cluster ID (--set <CLUSTER_ID> <LABEL>)") options = parser.parse_args() db = SQLiteWrapper(options.db) if options.get is not None: try: label = db.query(get_cluster_label_query, (options.get,), fetch_one=True) except Exception as e: die('No label found for the cluster specified.') print(label) elif len(options.set) > 1: try: res = db.query(add_cluster_label_query, (int(options.set[0]), options.set[1])) except Exception as e: die(e) print("Cluster {} now has label '{}'".format(int(options.set[0]), options.set[1]))
if len(in_res) > 0: print("\t txin_id=%d tx_id=%d" % (in_res[0][0], in_res[0][1])) tx_count = dump_transactions(G, in_res[0][1], tx_count, count - 1) return tx_count ### db = SQLiteWrapper('../blockchain/blockchain.sqlite') parser = argparse.ArgumentParser(description='Generate transaction graph based on transactions on a time interval desired') parser.add_argument("--tx", dest="base_tx", default="F4184FC596403B9D638783CF57ADFE4C75C605F6356FBC91338530E9831E9E16") parser.add_argument("--count", dest="count", default=1, type=int) parser.add_argument("--out-filename", dest="output_filename", default="tx_graph") args = parser.parse_args() try: base_tx_id = db.query(txhash_to_txid_query, (args.base_tx,))[0][0] except Exception as e: die(e) G = nx.MultiDiGraph() print("base_tx_id=%d count=%d" %(base_tx_id, args.count)) tx_count = dump_transactions(G, base_tx_id, 0, args.count) print("tx_count=%d" % (tx_count)) save(G, args.output_filename, tx_count)
help="Generate clusters (takes a long time)") parser.add_argument("--load-clusters", action="store_true", dest="load", default=False, help="Load a previously generated clusters from disk") parser.add_argument("--print-cluster", dest="print_cluster", default=None, help="Display all addresses belonging to a cluster") parser.add_argument("--print-address", dest="print_address", default=None, help="Display the cluster ID to which an address belongs") parser.add_argument("--csv", action="store_true", dest="csv", default=False, help="Export clusters to a clusters.csv file") options = parser.parse_args() db = SQLiteWrapper(options.db) if options.generate: try: max_txid_res = db.query(max_txid_query, fetch_one=True) except Exception as e: die(e) users, loaded = {}, False # Keep a cache for efficient value -> keys querying users_cache = defaultdict(set) try: users, min_txid = load(FILENAME) # Build cache for address, cluster in users.items(): users_cache[cluster].add(address) loaded = True except:
logging.basicConfig(level=logging.DEBUG) parser = argparse.ArgumentParser(description="BitIodine Classifier") parser.add_argument('-d', dest='db', default="features.sqlite", help='SQLite database path') parser.add_argument("-a", dest="address", default=None, help="Classify a single address.") parser.add_argument("-c", dest="cluster", type=int, default=None, help="Classify a single cluster.") parser.add_argument("--all-clusters", action="store_true", dest="all_clusters", default=False, help="Classify every cluster.") options = parser.parse_args() db = SQLiteWrapper(options.db) try: db_blockchain = SQLiteWrapper("../blockchain/blockchain.sqlite") max_block = int(db_blockchain.query(max_block_query, fetch_one=True)) db_blockchain.close() except: max_block = 0 f = Features() scores = f.features labels = f.labels labels_string = f.labels_string with open("../grapher/tx_graph.dat", "rb") as gf: G = pickle.load(gf) print("Graph loaded.") with open("../clusterizer/clusters.dat", "rb") as cf:
help="Load a previously generated clusters from disk") parser.add_argument("--print-cluster", dest="print_cluster", default=None, help="Display all addresses belonging to a cluster") parser.add_argument("--print-address", dest="print_address", default=None, help="Display the cluster ID to which an address belongs") parser.add_argument("--csv", action="store_true", dest="csv", default=False, help="Export clusters to a clusters.csv file") parser.add_argument("--sqlite", action="store_true", dest="sqlite", default=False, help="Export clusters to a clusters.sqlite SQLite database") options = parser.parse_args() db = SQLiteWrapper(options.db) if options.generate: try: max_txid_res = db.query(max_txid_query, fetch_one=True) except Exception as e: die(e) users, loaded = {}, False # Keep a cache for efficient value -> keys querying users_cache = defaultdict(set) try: users, min_txid = load(FILENAME) # Build cache for address, cluster in users.items(): users_cache[cluster].add(address) loaded = True except:
from collections import Counter ### def padWithSpaces(address): if len(address) < 34: address += " " * (34 - len(address)) return address FILENAME = "tx_graph.lgf.new" db = SQLiteWrapper('../blockchain/blockchain.sqlite') min_txid_res = 0 try: addresses_res = db.query("SELECT DISTINCT address FROM txout") except Exception as e: die(e) with open(FILENAME, 'w') as f: f.write("@nodes\n") f.write("label\n") for address in addresses_res: f.write(address[0] + "\n") f.write("\n") f.write("@arcs\n") f.write(" " * 35) f.write(" " * 35)