parser = argparse.ArgumentParser(description="BitIodine Classifier") parser.add_argument('-d', dest='db', default="features.sqlite", help='SQLite database path') parser.add_argument("-a", dest="address", default=None, help="Classify a single address.") parser.add_argument("-af", dest="address_filename", default=None, help="Classify every address in a text file, one per line.") parser.add_argument("-cf", dest="cluster_filename", default=None, help="Classify every cluster in a text file, one per line.") parser.add_argument("-c", dest="cluster", type=int, default=None, help="Classify a single cluster.") parser.add_argument("--all-clusters", action="store_true", dest="all_clusters", default=False, help="Classify every cluster.") options = parser.parse_args() db = SQLiteWrapper(options.db) try: db_blockchain = SQLiteWrapper("../blockchain/blockchain.sqlite") max_block = int(db_blockchain.query(max_block_query, fetch_one=True)) db_blockchain.close() except: max_block = 0 f = Features() scores = f.features labels = f.labels labels_string = f.labels_string with open("../grapher/tx_graph.dat", "rb") as gf: G = pickle.load(gf) print("Graph loaded.") with open("../clusterizer/clusters.dat", "rb") as cf: users = pickle.load(cf)
logging.basicConfig(level=logging.DEBUG) parser = argparse.ArgumentParser(description="BitIodine Classifier") parser.add_argument('-d', dest='db', default="features.sqlite", help='SQLite database path') parser.add_argument("-a", dest="address", default=None, help="Classify a single address.") parser.add_argument("-c", dest="cluster", type=int, default=None, help="Classify a single cluster.") parser.add_argument("--all-clusters", action="store_true", dest="all_clusters", default=False, help="Classify every cluster.") options = parser.parse_args() db = SQLiteWrapper(options.db) try: db_blockchain = SQLiteWrapper("../blockchain/blockchain.sqlite") max_block = int(db_blockchain.query(max_block_query, fetch_one=True)) db_blockchain.close() except: max_block = 0 f = Features() scores = f.features labels = f.labels labels_string = f.labels_string with open("../grapher/tx_graph.dat", "rb") as gf: G = pickle.load(gf) print("Graph loaded.") with open("../clusterizer/clusters.dat", "rb") as cf: users = pickle.load(cf)
if options.sqlite: cluster_db = SQLiteWrapper(FILENAME + ".sqlite.new") try: cluster_db.query(clusters_schema) clusters = 0 rows = [] for address, cluster in users.items(): rows.append((cluster, address)) clusters += 1 if clusters == 10000: print("Updated 10,000 records.", file=sys.stderr) cluster_db.query(add_cluster_query, many_rows=rows) rows = [] clusters = 0 cluster_db.query(add_cluster_query, many_rows=rows) cluster_db.close() os.rename(FILENAME + ".sqlite.new", FILENAME + ".sqlite") except Exception as e: die(e) sys.exit(0) counter = Counter(users.values()) top10 = counter.most_common(10) print("Top clusters:") print("Cluster ID\t\t\tSize") for candidate, size in top10: print("{}\t\t\t\t{}".format(candidate, size)) print() lengths = list(counter.values())