#!/usr/bin/env python3
import networkx as nx

from sqlite_wrapper import SQLiteWrapper
from queries import *
from util import *
from collections import Counter

###

FILENAME = "tx_graph"
db = SQLiteWrapper('../blockchain/blockchain.sqlite')

try:
  max_txid_res = db.query(max_txid_query, fetch_one=True)
except Exception as e:
  die(e)

G = nx.DiGraph()
min_txid = 1

try:
  G, min_txid = load(FILENAME)
except Exception as e:
  print(e)

print("Scanning %d transactions, starting from %d." %(max_txid_res, min_txid))

for tx_id in range(min_txid, max_txid_res + 1):

  # Save progress to files
Beispiel #2
0
import argparse
import logging

logging.basicConfig(level=logging.DEBUG)

parser = argparse.ArgumentParser(description="BitIodine Classifier")
parser.add_argument('-d', dest='db', default="features.sqlite",
				   help='SQLite database path')
parser.add_argument("-a", dest="address", default=None, help="Classify a single address.")
parser.add_argument("-af", dest="address_filename", default=None, help="Classify every address in a text file, one per line.")
parser.add_argument("-cf", dest="cluster_filename", default=None, help="Classify every cluster in a text file, one per line.")
parser.add_argument("-c", dest="cluster", type=int, default=None, help="Classify a single cluster.")
parser.add_argument("--all-clusters", action="store_true", dest="all_clusters", default=False, help="Classify every cluster.")
options = parser.parse_args()

db = SQLiteWrapper(options.db)

try:
	db_blockchain = SQLiteWrapper("../blockchain/blockchain.sqlite")
	max_block = int(db_blockchain.query(max_block_query, fetch_one=True))
	db_blockchain.close()
except:
	max_block = 0

f = Features()

scores = f.features
labels = f.labels
labels_string = f.labels_string

with open("../grapher/tx_graph.dat", "rb") as gf:
Beispiel #3
0
#!/usr/bin/env python3

import os, sys
lib_path = os.path.abspath('../common')
sys.path.append(lib_path)

from sqlite_wrapper import SQLiteWrapper
from util import *
import pickle
import logging

known = set()
clusters = set()

db_blockchain = SQLiteWrapper("../blockchain/blockchain.sqlite")

with open("bitcrypt_known.txt") as f:
    for addr in f:
        known.add(addr.strip())

print("Known addresses imported.")

with open("../clusterizer/clusters.dat", "rb") as cf:
    users = pickle.load(cf)
print("Clusters loaded.")

for addr in known:
    try:
        clusters.add(users[addr])
    except:
        pass
#!/usr/bin/env python3
import networkx as nx

import argparse
import math

import os, sys
lib_path = os.path.abspath('../common')
sys.path.append(lib_path)

from sqlite_wrapper import SQLiteWrapper
from queries import *
from util import *

###
db = SQLiteWrapper('../blockchain/blockchain.sqlite')

parser = argparse.ArgumentParser(description='Generate user graph based on transactions on a time interval desired')
parser.add_argument("--min-time", dest="min_time")
parser.add_argument("--max-time", dest="max_time")
parser.add_argument("--out-filename", dest="output_filename")
args = parser.parse_args()

# Load clusters
with open("../clusterizer/clusters.dat", "rb") as infile:
	users = pickle.load(infile)

print("Clusters loaded - %d addresses." % len(users))

users = stripSingletons(users)
#!/usr/bin/env python3
import networkx as nx

from sqlite_wrapper import SQLiteWrapper
from queries import *
from util import *
from collections import Counter

###

FILENAME = "tx_graph"
db = SQLiteWrapper('../blockchain/blockchain.sqlite')

try:
  max_txid_res = db.query(max_txid_query, fetch_one=True)
except Exception as e:
  die(e)

G = nx.DiGraph()
min_txid = 1

try:
  G, min_txid = load(FILENAME)
except:
  pass

print("Scanning %d transactions, starting from %d." %(max_txid_res, min_txid))

for tx_id in range(min_txid, max_txid_res + 1):

  # Save progress to files
Beispiel #6
0
parser = argparse.ArgumentParser(description="BitIodine Clusterizer: groups addresses in ownership clusters.")
parser.add_argument('-d', dest='db', default="../blockchain/blockchain.sqlite",
				   help='SQLite database path')
parser.add_argument("--generate-clusters", action="store_true", dest="generate", default=False,
	help="Generate clusters (takes a long time)")
parser.add_argument("--load-clusters", action="store_true", dest="load", default=False,
	help="Load a previously generated clusters from disk")
parser.add_argument("--print-cluster", dest="print_cluster", default=None,
	help="Display all addresses belonging to a cluster")
parser.add_argument("--print-address", dest="print_address", default=None,
	help="Display the cluster ID to which an address belongs")
parser.add_argument("--csv", action="store_true", dest="csv", default=False,
	help="Export clusters to a clusters.csv file")
options = parser.parse_args()

db = SQLiteWrapper(options.db)

if options.generate:
	try:
		max_txid_res = db.query(max_txid_query, fetch_one=True)
	except Exception as e:
		die(e)

	users, loaded = {}, False
	try:
		users, min_txid = load(FILENAME)
		loaded = True
	except:
		min_txid = 1

	try:
Beispiel #7
0
parser.add_argument("--print-cluster",
                    dest="print_cluster",
                    default=None,
                    help="Display all addresses belonging to a cluster")
parser.add_argument("--print-address",
                    dest="print_address",
                    default=None,
                    help="Display the cluster ID to which an address belongs")
parser.add_argument("--csv",
                    action="store_true",
                    dest="csv",
                    default=False,
                    help="Export clusters to a clusters.csv file")
options = parser.parse_args()

db = SQLiteWrapper(options.db)

if options.generate:
    try:
        max_txid_res = db.query(max_txid_query, fetch_one=True)
    except Exception as e:
        die(e)

    users, loaded = {}, False

    # Keep a cache for efficient value -> keys querying
    users_cache = defaultdict(set)

    try:
        users, min_txid = load(FILENAME)
        # Build cache
Beispiel #8
0
                    help='Cluster labels DB path')
parser.add_argument("--get",
                    dest="get",
                    default=None,
                    help="Get label for a particular cluster ID")
parser.add_argument(
    "--set",
    dest="set",
    nargs=2,
    default=[],
    help=
    "Set or replace the label for a particular cluster ID (--set <CLUSTER_ID> <LABEL>)"
)
options = parser.parse_args()

db = SQLiteWrapper(options.db)

if options.get is not None:
    try:
        label = db.query(get_cluster_label_query, (options.get, ),
                         fetch_one=True)
    except Exception as e:
        die('No label found for the cluster specified.')

    print(label)

elif len(options.set) > 1:
    try:
        res = db.query(add_cluster_label_query,
                       (int(options.set[0]), options.set[1]))
    except Exception as e:
    for out_address in out_addr:
        txout_id = out_values[out_address]['txout_id']
        print("%s: txout_id=%d" % (out_address, txout_id))
        in_res = db.query("select txin_id, tx_id from txin where txout_id=?",
                          (txout_id, ))
        print('\tin_res=', in_res)
        if len(in_res) > 0:
            print("\t txin_id=%d tx_id=%d" % (in_res[0][0], in_res[0][1]))
            tx_count = dump_transactions(G, in_res[0][1], tx_count, count - 1)

    return tx_count


###

db = SQLiteWrapper('../blockchain/blockchain.sqlite')

parser = argparse.ArgumentParser(
    description=
    'Generate transaction graph based on transactions on a time interval desired'
)
parser.add_argument(
    "--tx",
    dest="base_tx",
    default="F4184FC596403B9D638783CF57ADFE4C75C605F6356FBC91338530E9831E9E16")
parser.add_argument("--count", dest="count", default=1, type=int)
parser.add_argument("--out-filename",
                    dest="output_filename",
                    default="tx_graph")
args = parser.parse_args()
Beispiel #10
0
#!/usr/bin/env python3

import os, sys
lib_path = os.path.abspath('../common')
sys.path.append(lib_path)

from sqlite_wrapper import SQLiteWrapper
from util import *
import pickle
import logging

known = set()
clusters = set()

db_blockchain = SQLiteWrapper("../blockchain/blockchain.sqlite")

with open("bitcrypt_known.txt") as f:
	for addr in f:
		known.add(addr.strip())

print("Known addresses imported.")

with open("../clusterizer/clusters.dat", "rb") as cf:
	users = pickle.load(cf)
print("Clusters loaded.")

for addr in known:
	try:
		clusters.add(users[addr])
	except:
		pass
Beispiel #11
0
from sqlite_wrapper import SQLiteWrapper
from queries import *
from util import *

import argparse
###

parser = argparse.ArgumentParser(description="BitIodine Cluster Labels: add labels to clusters.")
parser.add_argument('-d', dest='db', default="cluster_labels.sqlite",
           help='Cluster labels DB path')
parser.add_argument("--get", dest="get", default=None, help="Get label for a particular cluster ID")
parser.add_argument("--set", dest="set", nargs = 2, default=[], help="Set or replace the label for a particular cluster ID (--set <CLUSTER_ID> <LABEL>)")
options = parser.parse_args()

db = SQLiteWrapper(options.db)

if options.get is not None:
  try:
    label = db.query(get_cluster_label_query, (options.get,), fetch_one=True)
  except Exception as e:
    die('No label found for the cluster specified.')

  print(label)

elif len(options.set) > 1:
  try:
    res = db.query(add_cluster_label_query, (int(options.set[0]), options.set[1]))
  except Exception as e:
    die(e)
  # iterate through the outputs and dump all of them (plus their children depending on count)
  for out_address in out_addr:
      txout_id = out_values[out_address]['txout_id']
      print("%s: txout_id=%d" % (out_address, txout_id))
      in_res = db.query("select txin_id, tx_id from txin where txout_id=?", (txout_id,))
      print('\tin_res=', in_res)
      if len(in_res) > 0:
        print("\t txin_id=%d tx_id=%d" % (in_res[0][0], in_res[0][1]))
        tx_count = dump_transactions(G, in_res[0][1], tx_count, count - 1)

  return tx_count


###

db = SQLiteWrapper('../blockchain/blockchain.sqlite')

parser = argparse.ArgumentParser(description='Generate transaction graph based on transactions on a time interval desired')
parser.add_argument("--tx", dest="base_tx", default="F4184FC596403B9D638783CF57ADFE4C75C605F6356FBC91338530E9831E9E16")
parser.add_argument("--count", dest="count", default=1, type=int)
parser.add_argument("--out-filename", dest="output_filename", default="tx_graph")
args = parser.parse_args()

try:
  base_tx_id = db.query(txhash_to_txid_query, (args.base_tx,))[0][0]
except Exception as e:
  die(e)

G = nx.MultiDiGraph()

print("base_tx_id=%d count=%d" %(base_tx_id, args.count))
Beispiel #13
0
parser = argparse.ArgumentParser(description="BitIodine Clusterizer: groups addresses in ownership clusters.")
parser.add_argument('-d', dest='db', default="../blockchain/blockchain.sqlite",
				   help='SQLite database path')
parser.add_argument("--generate-clusters", action="store_true", dest="generate", default=False,
	help="Generate clusters (takes a long time)")
parser.add_argument("--load-clusters", action="store_true", dest="load", default=False,
	help="Load a previously generated clusters from disk")
parser.add_argument("--print-cluster", dest="print_cluster", default=None,
	help="Display all addresses belonging to a cluster")
parser.add_argument("--print-address", dest="print_address", default=None,
	help="Display the cluster ID to which an address belongs")
parser.add_argument("--csv", action="store_true", dest="csv", default=False,
	help="Export clusters to a clusters.csv file")
options = parser.parse_args()

db = SQLiteWrapper(options.db)

if options.generate:
	try:
		max_txid_res = db.query(max_txid_query, fetch_one=True)
	except Exception as e:
		die(e)

	users, loaded = {}, False

	# Keep a cache for efficient value -> keys querying
	users_cache = defaultdict(set)

	try:
		users, min_txid = load(FILENAME)
		# Build cache
Beispiel #14
0
from collections import defaultdict
import pickle
import argparse
import logging

logging.basicConfig(level=logging.DEBUG)

parser = argparse.ArgumentParser(description="BitIodine Classifier")
parser.add_argument('-d', dest='db', default="features.sqlite",
				   help='SQLite database path')
parser.add_argument("-a", dest="address", default=None, help="Classify a single address.")
parser.add_argument("-c", dest="cluster", type=int, default=None, help="Classify a single cluster.")
parser.add_argument("--all-clusters", action="store_true", dest="all_clusters", default=False, help="Classify every cluster.")
options = parser.parse_args()

db = SQLiteWrapper(options.db)

try:
	db_blockchain = SQLiteWrapper("../blockchain/blockchain.sqlite")
	max_block = int(db_blockchain.query(max_block_query, fetch_one=True))
	db_blockchain.close()
except:
	max_block = 0

f = Features()

scores = f.features
labels = f.labels
labels_string = f.labels_string

with open("../grapher/tx_graph.dat", "rb") as gf:
Beispiel #15
0
                    help='SQLite database path')
parser.add_argument("--generate-clusters", action="store_true", dest="generate", default=False,
                    help="Generate clusters (takes a long time)")
parser.add_argument("--load-clusters", action="store_true", dest="load", default=False,
                    help="Load a previously generated clusters from disk")
parser.add_argument("--print-cluster", dest="print_cluster", default=None,
                    help="Display all addresses belonging to a cluster")
parser.add_argument("--print-address", dest="print_address", default=None,
                    help="Display the cluster ID to which an address belongs")
parser.add_argument("--csv", action="store_true", dest="csv", default=False,
                    help="Export clusters to a clusters.csv file")
parser.add_argument("--sqlite", action="store_true", dest="sqlite",
                    default=False, help="Export clusters to a clusters.sqlite SQLite database")
options = parser.parse_args()

db = SQLiteWrapper(options.db)

if options.generate:
    try:
        max_txid_res = db.query(max_txid_query, fetch_one=True)
    except Exception as e:
        die(e)

    users, loaded = {}, False

    # Keep a cache for efficient value -> keys querying
    users_cache = defaultdict(set)

    try:
        users, min_txid = load(FILENAME)
        # Build cache
Beispiel #16
0
parser = argparse.ArgumentParser(description="BitIodine Clusterizer: groups addresses in ownership clusters.")
parser.add_argument('-d', dest='db', default="../blockchain/blockchain.sqlite",
				   help='SQLite database path')
parser.add_argument("--generate-clusters", action="store_true", dest="generate", default=False,
	help="Generate clusters (takes a long time)")
parser.add_argument("--load-clusters", action="store_true", dest="load", default=False,
	help="Load a previously generated clusters from disk")
parser.add_argument("--print-cluster", dest="print_cluster", default=None,
	help="Display all addresses belonging to a cluster")
parser.add_argument("--print-address", dest="print_address", default=None,
	help="Display the cluster ID to which an address belongs")
parser.add_argument("--csv", action="store_true", dest="csv", default=False,
	help="Export clusters to a clusters.csv file")
options = parser.parse_args()

db = SQLiteWrapper(options.db)

if options.generate:
	try:
		max_txid_res = db.query(max_txid_query, fetch_one=True)
	except Exception as e:
		die(e)

	users, loaded = {}, False

	# Keep a cache for efficient value -> keys querying
	users_cache = defaultdict(set)

	try:
		users, min_txid = load(FILENAME)
		# Build cache
sys.path.append(lib_path)

from sqlite_wrapper import SQLiteWrapper
from queries import *
from util import *
from collections import Counter

###

def padWithSpaces(address):
  if len(address) < 34:
    address += " " * (34 - len(address))
  return address

FILENAME = "tx_graph.lgf.new"
db = SQLiteWrapper('../blockchain/blockchain.sqlite')

min_txid_res = 0

try:
  addresses_res = db.query("SELECT DISTINCT address FROM txout")
except Exception as e:
  die(e)

with open(FILENAME, 'w') as f:

  f.write("@nodes\n")
  f.write("label\n")

  for address in addresses_res:
    f.write(address[0] + "\n")