コード例 #1
0
import argparse
import logging

logging.basicConfig(level=logging.DEBUG)

parser = argparse.ArgumentParser(description="BitIodine Classifier")
parser.add_argument('-d', dest='db', default="features.sqlite",
				   help='SQLite database path')
parser.add_argument("-a", dest="address", default=None, help="Classify a single address.")
parser.add_argument("-af", dest="address_filename", default=None, help="Classify every address in a text file, one per line.")
parser.add_argument("-cf", dest="cluster_filename", default=None, help="Classify every cluster in a text file, one per line.")
parser.add_argument("-c", dest="cluster", type=int, default=None, help="Classify a single cluster.")
parser.add_argument("--all-clusters", action="store_true", dest="all_clusters", default=False, help="Classify every cluster.")
options = parser.parse_args()

db = SQLiteWrapper(options.db)

try:
	db_blockchain = SQLiteWrapper("../blockchain/blockchain.sqlite")
	max_block = int(db_blockchain.query(max_block_query, fetch_one=True))
	db_blockchain.close()
except:
	max_block = 0

f = Features()

scores = f.features
labels = f.labels
labels_string = f.labels_string

with open("../grapher/tx_graph.dat", "rb") as gf:
コード例 #2
0
#!/usr/bin/env python3
import networkx as nx

import argparse
import math

import os, sys
lib_path = os.path.abspath('../common')
sys.path.append(lib_path)

from sqlite_wrapper import SQLiteWrapper
from queries import *
from util import *

###
db = SQLiteWrapper('../blockchain/blockchain.sqlite')

parser = argparse.ArgumentParser(description='Generate user graph based on transactions on a time interval desired')
parser.add_argument("--min-time", dest="min_time")
parser.add_argument("--max-time", dest="max_time")
parser.add_argument("--out-filename", dest="output_filename")
args = parser.parse_args()

# Load clusters
with open("../clusterizer/clusters.dat", "rb") as infile:
	users = pickle.load(infile)

print("Clusters loaded - %d addresses." % len(users))

users = stripSingletons(users)
コード例 #3
0
#!/usr/bin/env python3

import os, sys
lib_path = os.path.abspath('../common')
sys.path.append(lib_path)

from sqlite_wrapper import SQLiteWrapper
from util import *
import pickle
import logging

known = set()
clusters = set()

db_blockchain = SQLiteWrapper("../blockchain/blockchain.sqlite")

with open("bitcrypt_known.txt") as f:
    for addr in f:
        known.add(addr.strip())

print("Known addresses imported.")

with open("../clusterizer/clusters.dat", "rb") as cf:
    users = pickle.load(cf)
print("Clusters loaded.")

for addr in known:
    try:
        clusters.add(users[addr])
    except:
        pass
コード例 #4
0
ファイル: clusterizer.py プロジェクト: Sophrinix/bitiodine
                    help='SQLite database path')
parser.add_argument("--generate-clusters", action="store_true", dest="generate", default=False,
                    help="Generate clusters (takes a long time)")
parser.add_argument("--load-clusters", action="store_true", dest="load", default=False,
                    help="Load a previously generated clusters from disk")
parser.add_argument("--print-cluster", dest="print_cluster", default=None,
                    help="Display all addresses belonging to a cluster")
parser.add_argument("--print-address", dest="print_address", default=None,
                    help="Display the cluster ID to which an address belongs")
parser.add_argument("--csv", action="store_true", dest="csv", default=False,
                    help="Export clusters to a clusters.csv file")
parser.add_argument("--sqlite", action="store_true", dest="sqlite",
                    default=False, help="Export clusters to a clusters.sqlite SQLite database")
options = parser.parse_args()

db = SQLiteWrapper(options.db)

if options.generate:
    try:
        max_txid_res = db.query(max_txid_query, fetch_one=True)
    except Exception as e:
        die(e)

    users, loaded = {}, False

    # Keep a cache for efficient value -> keys querying
    users_cache = defaultdict(set)

    try:
        users, min_txid = load(FILENAME)
        # Build cache
コード例 #5
0
parser.add_argument("--print-cluster",
                    dest="print_cluster",
                    default=None,
                    help="Display all addresses belonging to a cluster")
parser.add_argument("--print-address",
                    dest="print_address",
                    default=None,
                    help="Display the cluster ID to which an address belongs")
parser.add_argument("--csv",
                    action="store_true",
                    dest="csv",
                    default=False,
                    help="Export clusters to a clusters.csv file")
options = parser.parse_args()

db = SQLiteWrapper(options.db)

if options.generate:
    try:
        max_txid_res = db.query(max_txid_query, fetch_one=True)
    except Exception as e:
        die(e)

    users, loaded = {}, False

    # Keep a cache for efficient value -> keys querying
    users_cache = defaultdict(set)

    try:
        users, min_txid = load(FILENAME)
        # Build cache
コード例 #6
0
import argparse
import math

import os, sys
lib_path = os.path.abspath('../common')
sys.path.append(lib_path)


from sqlite_wrapper import SQLiteWrapper
from queries import *
from util import *
from collections import Counter

###

db = SQLiteWrapper('/home/pankaj/blockchain.db')

parser = argparse.ArgumentParser(description='Generate transaction graph based on transactions on a time interval desired')
parser.add_argument("--min-time", dest="min_time", default=0)
parser.add_argument("--max-time", dest="max_time", default=2147483647)
parser.add_argument("--out-filename", dest="output_filename", default="tx_graph")
args = parser.parse_args()

try:
  amount_txids = db.query(number_of_transactions_between_time_interval, (args.min_time, args.max_time,))[0][0]
  min_tx_id, max_tx_id = db.query(max_min_transaction_ids_time_interval, (args.min_time, args.max_time,))[0]
  max_txid_res = db.query(max_txid_query, fetch_one=True)
except Exception as e:
  die(e)

G = nx.MultiDiGraph()