def test_TxCommit(sometx): (factory, instance, tr), (k1, k2, tx1, tx2, tx3) = sometx # Check the list is up to date for ik in tx3.get_utxo_in_keys(): assert ik in factory.db #data1 = map(b64encode, [tx3.serialize(), tx1.serialize(), tx2.serialize(), # k1.export()[0], k2.export()[0], k1.sign(tx3.id()), k2.sign(tx3.id())]) #H = sha256(" ".join(data1)).digest() #data = " ".join(["Query", str(len(data1))] + data1) H, data, dataCore = package_query(tx3, [tx1, tx2], [k1, k2]) instance.lineReceived(data) response = tr.value() k, s = map(b64decode, response.split(" ")[1:]) k2 = rscoin.Key(k) assert factory.key.verify(H, s) assert k2.verify(H, s) ## Now we test the Commit tr.clear() # data = " ".join(["Commit", str(len(dataCore))] + dataCore + map(b64encode, [k, s])) data = package_commit(dataCore, [(k, s)]) instance.lineReceived(data) flag, pub, sig = tr.value().split(" ") assert factory.key.verify(tx3.id(), b64decode(sig)) k3 = rscoin.Key(b64decode(pub)) assert k3.verify(tx3.id(), b64decode(sig))
def test_TxQuery_serialize(sometx): (factory, instance, tr), (k1, k2, tx1, tx2, tx3) = sometx # Check the list is up to date for ik in tx3.get_utxo_in_keys(): assert ik in factory.db H, data, dataCore = package_query(tx3, [tx1, tx2], [k1, k2]) #??? H is hash digest of quired data #... where data is serialized (mainTx, otherTx, keys, sigs) instance.lineReceived(data) response = tr.value() #??? tr is a StringTransport #??? handle_Query will return ("OK %s" % self.sign(H)) #... Notice here the self.sgin is the factory.sign not key.sign _, k, s, hashhead, seqStr = unpackage_query_response(response) new_H = sha256(" ".join( dataCore + [hashhead] + [seqStr]) ).digest() #... notice in sometx, there is only one factory playing mintette #... factory.key.pub.export( EcPt.POINT_CONVERSION_UNCOMPRESSED ) == k assert factory.key.verify(new_H, s)
def test_QueryLogEntry_serialize(sometx): (factory, instance, tr), (k1, k2, tx1, tx2, tx3) = sometx H, dataString, _ = package_query(tx3, [tx1, tx2], [k1, k2]) #??? H is hash digest of quired data items = dataString.split(" ") bundle_size = int(items[1]) try: items = items[2:2+bundle_size] H, data = RSCProtocol.parse_Tx_bundle( bundle_size, items) except Exception as e: print_exc() return logEntry = RSCLogEntry(data,"Query_Success") logger = RSCLogger() logger.write_log(logEntry) #jasonString = logger.query_log(logEntry.utcTimestamp) jasonString = logger.query_log_by_processedTxId(logEntry.processedTx.id()) logEntry2 = decode_json_to_log_entry(jasonString) assert logEntry.parentTx == logEntry2.parentTx assert logEntry.processedTx.id() == logEntry2.processedTx.id()
def msg_mass(): secret = "A" * 32 public = rscoin.Key(secret, public=False).id() directory = [(public, "127.0.0.1", 8080)] factory = RSCFactory(secret, directory, None) # Run the protocol instance = factory.buildProtocol(None) tr = StringTransport() instance.makeConnection(tr) sometx = (factory, instance, tr) # Make special keys for making coins secret_special = "KEYSPECIAL" public_special = rscoin.Key(secret_special, public=False).pub.export() # Define a number of keys all_keys = [] secret = "KEYX" public = rscoin.Key(secret, public=False).id() directory = [(public, "127.0.0.1", 8080 )] # Make a mass of transactions k1 = rscoin.Key(urandom(32), public=False) k2 = rscoin.Key(urandom(32), public=False) all_tx = [] for _ in range(1000): tx1 = rscoin.Tx([], [rscoin.OutputTx(k1.id(), 100)]) tx2 = rscoin.Tx([], [rscoin.OutputTx(k2.id(), 150)]) tx3 = rscoin.Tx( [rscoin.InputTx(tx1.id(), 0), rscoin.InputTx(tx2.id(), 0)], [rscoin.OutputTx(k1.id(), 250)] ) all_tx += [ ([tx1, tx2], tx3) ] for intx, _ in all_tx: for tx in intx: for kv, vv in tx.get_utxo_out_entries(): factory.db[kv] = vv mesages_q = [] for ([tx1, tx2], tx3) in all_tx: H, data, core = package_query(tx3, [tx1, tx2], [k1, k2]) mesages_q += [ (tx3, data, core) ] return (sometx, mesages_q)
def msg_mass(): secret = "A" * 32 public = rscoin.Key(secret, public=False).id() directory = [(public, "127.0.0.1", 8080)] factory = RSCFactory(secret, directory, None) # Run the protocol instance = factory.buildProtocol(None) tr = StringTransport() instance.makeConnection(tr) sometx = (factory, instance, tr) # Make special keys for making coins secret_special = "KEYSPECIAL" public_special = rscoin.Key(secret_special, public=False).pub.export() # Define a number of keys all_keys = [] secret = "KEYX" public = rscoin.Key(secret, public=False).id() directory = [(public, "127.0.0.1", 8080)] # Make a mass of transactions k1 = rscoin.Key(urandom(32), public=False) k2 = rscoin.Key(urandom(32), public=False) all_tx = [] for _ in range(1000): tx1 = rscoin.Tx([], [rscoin.OutputTx(k1.id(), 100)]) tx2 = rscoin.Tx([], [rscoin.OutputTx(k2.id(), 150)]) tx3 = rscoin.Tx( [rscoin.InputTx(tx1.id(), 0), rscoin.InputTx(tx2.id(), 0)], [rscoin.OutputTx(k1.id(), 250)]) all_tx += [([tx1, tx2], tx3)] for intx, _ in all_tx: for tx in intx: for kv, vv in tx.get_utxo_out_entries(): factory.db[kv] = vv mesages_q = [] for ([tx1, tx2], tx3) in all_tx: H, data, core = package_query(tx3, [tx1, tx2], [k1, k2]) mesages_q += [(tx3, data, core)] return (sometx, mesages_q)
def test_TxQuery_serialize(sometx): (factory, instance, tr), (k1, k2, tx1, tx2, tx3) = sometx # Check the list is up to date for ik in tx3.get_utxo_in_keys(): assert ik in factory.db H, data, _ = package_query(tx3, [tx1, tx2], [k1, k2]) instance.lineReceived(data) response = tr.value() _, k, s = unpackage_query_response(response) assert factory.key.verify(H, s)
def test_TxQuery_serialize(sometx): (factory, instance, tr), (k1, k2, tx1, tx2, tx3) = sometx # Check the list is up to date for ik in tx3.get_utxo_in_keys(): assert ik in factory.db H, dataString, _ = package_query(tx3, [tx1, tx2], [k1, k2]) items = dataString.split(" ") bundle_size = int(items[1]) try: items = items[2:2+bundle_size] H, data = RSCProtocol.parse_Tx_bundle( bundle_size, items) # items here already become the b64encoded string of / # serialized (tx3, [tx1, tx2], [k1, k2]) except Exception as e: print_exc() return instance.lineReceived(dataString) response = tr.value() tr.clear() instance.lineReceived(dataString) response = tr.value() tr.clear() instance.lineReceived(dataString) response = tr.value() tr.clear() _, k, s, hashhead, seqStr = unpackage_query_response(response) new_H = sha256(" ".join( items + [hashhead] + [seqStr]) ).digest() assert seqStr == "3" #... notice in sometx, there is only one factory playing mintette #... factory.key.pub.export( EcPt.POINT_CONVERSION_UNCOMPRESSED ) == k assert factory.key.verify(new_H, s)
def test_TxCommit(sometx): (factory, instance, tr), (k1, k2, tx1, tx2, tx3) = sometx # Check the list is up to date for ik in tx3.get_utxo_in_keys(): assert ik in factory.db #dataCore = map(b64encode, [tx3.serialize(), tx1.serialize(), tx2.serialize(), # k1.export()[0], k2.export()[0], k1.sign(tx3.id()), k2.sign(tx3.id())]) #H = sha256(" ".join(data1)).digest() #data = " ".join(["Query", str(len(data1))] + data1) H, dataString, dataCoreList = package_query(tx3, [tx1, tx2], [k1, k2]) instance.lineReceived(dataString) response = tr.value() _, k, s, hashhead, seqStr = unpackage_query_response(response) new_H = sha256(" ".join( dataCoreList + [hashhead] + [seqStr]) ).digest() k2 = rscoin.Key(k) assert factory.key.verify(new_H, s) assert k2.verify(new_H, s) ## Now we test the Commit tr.clear() # data = " ".join(["Commit", str(len(dataCore))] + dataCore + map(b64encode, [k, s])) dataString2 = package_commit(dataCoreList, [(k, s, hashhead, seqStr)]) instance.lineReceived(dataString2) response = tr.value() flag, pub, sig, hashhead, seqStr = unpackage_commit_response(response) new_h = sha256(" ".join(dataCoreList + [hashhead] +[seqStr])).digest() assert factory.key.verify(new_h, sig) k3 = rscoin.Key(pub) assert k3.verify(new_h, sig)
def test_hash_head(sometx): (factory, instance, tr), (k1, k2, tx1, tx2, tx3) = sometx # Check the list is up to date for ik in tx3.get_utxo_in_keys(): assert ik in factory.db H, dataString, _ = package_query(tx3, [tx1, tx2], [k1, k2]) #... H is hash digest of quired dataCore #... where data is serialized (mainTx, otherTx, keys, sigs) instance.lineReceived(dataString) response = tr.value() tr.clear() instance.lineReceived(dataString) response = tr.value() tr.clear() instance.lineReceived(dataString) response = tr.value() tr.clear() items = dataString.split(" ") bundle_size = int(items[1]) try: items = items[2:2+bundle_size] H, data = RSCProtocol.parse_Tx_bundle( bundle_size, items) except Exception as e: print_exc() return logEntry = RSCLogEntry(data, "Query_Success") tmpHashhead = sha256(logEntry.serialize()+"").digest() tmpHashhead = sha256(logEntry.serialize()+tmpHashhead).digest() tmpHashhead = sha256(logEntry.serialize()+tmpHashhead).digest() assert tmpHashhead == factory.get_hash_head()
def test_TxCommit(sometx): # query phase (factory, instance, tr), (k1, k2, tx1, tx2, tx3) = sometx # Check the list is up to date for ik in tx3.get_utxo_in_keys(): assert ik in factory.db H, dataString, dataCoreString = package_query(tx3, [tx1, tx2], [k1, k2]) instance.lineReceived(dataString) response = tr.value() _, k, s, hashhead, seqStr = unpackage_query_response(response) new_H = sha256(" ".join( dataCoreString + [hashhead] + [seqStr]) ).digest() #... notice in sometx, there is only one factory playing mintette #... factory.key.pub.export( EcPt.POINT_CONVERSION_UNCOMPRESSED ) == k assert factory.key.verify(new_H, s) # commit phase tr.clear() # items here is the same with dataCore dataString2 = package_commit(dataCoreString, [(k, s, hashhead, seqStr)]) instance.lineReceived(dataString2) response = tr.value() flag, pub, sig, hashhead, seqStr = unpackage_commit_response(response) new_h = sha256(" ".join(dataCoreString + [hashhead] +[seqStr])).digest() assert factory.key.verify(new_h, sig) k3 = rscoin.Key(pub) assert k3.verify(new_h, sig)
data = " ".join(core) print >>fi, data active_addrs += [ ((tx.id(), 0, all_keys, 1), tx) ] shuffle(active_addrs) active_addrs2 = [] while active_addrs != []: (tx1_id, pos1, (k1id, k1pub, k1), val1), txin1 = active_addrs.pop() (tx2_id, pos2, (k2id, k2pub, k2), val2), txin2 = active_addrs.pop() tx = Tx([InputTx(tx1_id, pos1), InputTx(tx2_id, pos2)], [OutputTx(k1id, 1), OutputTx(k2id, 1)]) _, _, core = package_query(tx, [txin1, txin2], [k1, k2]) _, (_, xxotherTx, xxkeys, xxsigs) = RSCProtocol.parse_Tx_bundle(len(core), core) assert tx.check_transaction( xxotherTx, xxkeys, xxsigs) active_addrs2 += [ ((tx.id(), 0, (k1id, k1pub, k1), 1), tx) ] active_addrs2 += [ ((tx.id(), 1, (k2id, k2pub, k2), 1), tx) ] data = " ".join(core) print >>f1, data shuffle(active_addrs2) active_addrs3 = [] while active_addrs2 != []: (tx1_id, pos1, (k1id, k1pub, k1), val1), txin1 = active_addrs2.pop()
def test_multiple(): import os try: os.mkdir("testscratch") except: pass # Make special keys for making coins secret_special = "KEYSPECIAL" public_special = rscoin.Key(secret_special, public=False).id() # Define a number of keys all_keys = [] for x in range(100): secret = "KEY%s" % x public = rscoin.Key(secret, public=False).id() all_keys += [(public, secret)] # Make up the directory directory = [] for x, (pub, _) in enumerate(all_keys): directory += [(pub, "127.0.0.1", 8080 + x)] # Build the factories factories = {} for pub, sec in all_keys: factory = RSCFactory(sec, directory, public_special, conf_dir="testscratch", N=5) factories[pub] = factory # Make a mass of transactions k1 = rscoin.Key(urandom(32), public=False) k2 = rscoin.Key(urandom(32), public=False) all_tx_in = [] all_tx_out = [] for _ in range(10): tx1 = rscoin.Tx([], [rscoin.OutputTx(k1.id(), 100)]) tx2 = rscoin.Tx([], [rscoin.OutputTx(k2.id(), 150)]) tx3 = rscoin.Tx( [rscoin.InputTx(tx1.id(), 0), rscoin.InputTx(tx2.id(), 0)], [rscoin.OutputTx(k1.id(), 250)] ) all_tx_in += [ tx1, tx2 ] all_tx_out += [ tx3 ] print "Lens: all_tx_in: %s all_tx_out: %s" % (len(all_tx_in), len(all_tx_out)) for tx in all_tx_in: for kv, vv in tx.get_utxo_out_entries(): for f in factories.values(): f.db[kv] = vv data = (tx3, [tx1.serialize(), tx2.serialize()], [k1.export()[0], k2.export()[0]], [k1.sign(tx3.id()), k2.sign(tx3.id())]) # Put the transaction through total = 0 [ kid1, kid2 ] = tx3.get_utxo_in_keys() au1 = get_authorities(directory, kid1, N = 5) au2 = get_authorities(directory, kid2, N = 5) auxes = set(au1 + au2) assert len(auxes) == 10 for aid in auxes: assert isinstance(aid, str) and len(aid) == 32 assert aid in factories H, msg, dataCore = package_query(tx3, [tx1, tx2], [k1, k2]) xset = [] rss = [] for kid, f in factories.iteritems(): # resp = f.process_TxQuery(data) instance = f.buildProtocol(None) tr = StringTransport() instance.makeConnection(tr) instance.lineReceived(msg) resp_msg = unpackage_query_response(tr.value().strip()) assert kid == f.key.id() if resp_msg[0] == "OK": [r, s] = resp_msg[1:] total += 1 xset += [ f.key.id() ] rss += [(r,s)] else: pass assert 5 <= total <= 10 assert set(auxes) == set(xset) ## Now test the commit phase assert 5 <= len(rss) <= 10 msg_commit = package_commit(dataCore, rss) #from twisted.python import log #import sys #log.startLogging(sys.stdout) total = 0 for kid, f in factories.iteritems(): instance = f.buildProtocol(None) tr = StringTransport() instance.makeConnection(tr) instance.lineReceived(msg_commit) resp_commit = tr.value().strip() resp_l = unpackage_commit_response(resp_commit) if resp_l[0] == "OK": total += 1 assert total == 5
def test_CommitLogEntry_serialize(sometx): # query phase (factory, instance, tr), (k1, k2, tx1, tx2, tx3) = sometx # Check the list is up to date for ik in tx3.get_utxo_in_keys(): assert ik in factory.db H, dataString, dataCoreString = package_query(tx3, [tx1, tx2], [k1, k2]) instance.lineReceived(dataString) response = tr.value() _, k, s, hashhead, seqStr = unpackage_query_response(response) new_H = sha256(" ".join( dataCoreString + [hashhead] + [seqStr]) ).digest() #... notice in sometx, there is only one factory playing mintette #... factory.key.pub.export( EcPt.POINT_CONVERSION_UNCOMPRESSED ) == k assert factory.key.verify(new_H, s) # commit phase tr.clear() # items here is the same with dataCore dataString2 = package_commit(dataCoreString, [(k, s, hashhead, seqStr)]) items = dataString2.split() try: bundle_size = int(items[1]) extras = items[2+bundle_size:] items = items[2:2+bundle_size] # Specific checks assert len(items[2+bundle_size:]) == 0 auth_keys, auth_sigs = [], [] hashheads, seqStrs = [], [] while len(extras) > 0: auth_keys += [ b64decode(extras.pop(0)) ] auth_sigs += [ b64decode(extras.pop(0)) ] hashheads += [ b64decode(extras.pop(0)) ] seqStrs += [ b64decode(extras.pop(0)) ] assert len(extras) == 0 H, data = RSCProtocol.parse_Tx_bundle( bundle_size, items) (mainTx, otherTx, keys, sigs) = data except: print_exc() return data = (H, mainTx, otherTx, keys, sigs, auth_keys, auth_sigs, hashheads, seqStrs, items) logEntry = RSCLogEntry(data, action = "Commit_Success", lampClock = "30" ) logger = RSCLogger() logger.write_log(logEntry) #jasonString = logger.query_log(logEntry.utcTimestamp) jasonString = logger.query_log_by(logEntry.processedTx.id(), logEntry.action) logEntry2 = decode_json_to_log_entry(jasonString) assert logEntry.processedTx.id() == logEntry2.processedTx.id() assert logEntry.processedTx == logEntry2.processedTx
def test_multiple(): import os try: os.mkdir("testscratch") except: pass # Make special keys for making coins secret_special = "KEYSPECIAL" public_special = rscoin.Key(secret_special, public=False).id() # Define a number of keys all_keys = [] for x in range(100): secret = "KEY%s" % x public = rscoin.Key(secret, public=False).id() all_keys += [(public, secret)] # Make up the directory directory = [] for x, (pub, _) in enumerate(all_keys): directory += [(pub, "127.0.0.1", 8080 + x)] # Build the factories factories = {} for pub, sec in all_keys: factory = RSCFactory(sec, directory, public_special, conf_dir="testscratch", N=5) factories[pub] = factory # Make a mass of transactions k1 = rscoin.Key(urandom(32), public=False) k2 = rscoin.Key(urandom(32), public=False) all_tx_in = [] all_tx_out = [] for _ in range(10): tx1 = rscoin.Tx([], [rscoin.OutputTx(k1.id(), 100)]) tx2 = rscoin.Tx([], [rscoin.OutputTx(k2.id(), 150)]) tx3 = rscoin.Tx( [rscoin.InputTx(tx1.id(), 0), rscoin.InputTx(tx2.id(), 0)], [rscoin.OutputTx(k1.id(), 250)]) all_tx_in += [tx1, tx2] all_tx_out += [tx3] print "Lens: all_tx_in: %s all_tx_out: %s" % (len(all_tx_in), len(all_tx_out)) for tx in all_tx_in: for kv, vv in tx.get_utxo_out_entries(): for f in factories.values(): f.db[kv] = vv data = (tx3, [tx1.serialize(), tx2.serialize()], [k1.export()[0], k2.export()[0]], [k1.sign(tx3.id()), k2.sign(tx3.id())]) # Put the transaction through total = 0 [kid1, kid2] = tx3.get_utxo_in_keys() au1 = get_authorities(directory, kid1, N=5) au2 = get_authorities(directory, kid2, N=5) auxes = set(au1 + au2) assert len(auxes) == 10 for aid in auxes: assert isinstance(aid, str) and len(aid) == 32 assert aid in factories H, msg, dataCore = package_query(tx3, [tx1, tx2], [k1, k2]) xset = [] rss = [] for kid, f in factories.iteritems(): # resp = f.process_TxQuery(data) instance = f.buildProtocol(None) tr = StringTransport() instance.makeConnection(tr) instance.lineReceived(msg) resp_msg = unpackage_query_response(tr.value().strip()) assert kid == f.key.id() if resp_msg[0] == "OK": [r, s] = resp_msg[1:] total += 1 xset += [f.key.id()] rss += [(r, s)] else: pass assert 5 <= total <= 10 assert set(auxes) == set(xset) ## Now test the commit phase assert 5 <= len(rss) <= 10 msg_commit = package_commit(dataCore, rss) #from twisted.python import log #import sys #log.startLogging(sys.stdout) total = 0 for kid, f in factories.iteritems(): instance = f.buildProtocol(None) tr = StringTransport() instance.makeConnection(tr) instance.lineReceived(msg_commit) resp_commit = tr.value().strip() resp_l = unpackage_commit_response(resp_commit) if resp_l[0] == "OK": total += 1 assert total == 5
def main(): dir_data = load_setup(file("directory.conf").read()) # directory = dir_data["directory"] directory = [(kid, socket.gethostbyname(ip), port) for (kid, ip, port) in dir_data["directory"]] special_id = dir_data["special"] # Options parser = argparse.ArgumentParser(description='RSCoin client.') parser.add_argument('--dir', action='store_true', help='List mintettes.') parser.add_argument('--mock', action='store_true', help='Do not connect to the network.') parser.add_argument('--balances', action='store_true', help='List balances of all addresses.') parser.add_argument('--issue', nargs=2, metavar=("VALUE", "ADDRESS"), help='Issue a coin to an address.') parser.add_argument('--pay', nargs=3, metavar=("VALUE", "ADDRESS", "CHANGEADDRESS"), help='Pay and address some amount, and return change') parser.add_argument('--newaddress', nargs=1, metavar="NAME", help='Make a new address with a specific name.') parser.add_argument('--storeaddress', nargs=2, metavar=("NAME", "KEYID"), help='Load an address ID with a specific name.') parser.add_argument('--listaddress', action='store_true', help='List all known addresses.') parser.add_argument('--play', nargs=1, metavar="FILE", help='Play a set of transaction cores.') parser.add_argument('--conn', default=20, type=int, metavar="CONNECTIONS", help='Number of simultaneaous connections.') args = parser.parse_args() if args.dir: for (kid, ip, port) in directory: print "%s\t%s\t%s" % (ip, port, b64encode(kid)) elif args.balances: keys = load_keys() active = ActiveTx("activetx.log", keys) for (k, v) in active.balances().iteritems(): print "%s\t%s RSC" % (k, v) elif args.listaddress: keys = load_keys() for k in keys: if k[0] == "#": print "%s\t%s (%s)" % (k, keys[k][2], keys[k][1]) elif args.newaddress: sec_str = urandom(32) k_sec = rscoin.Key(sec_str, public=False) k_pub = k_sec.pub.export() k_id = k_sec.id() f = file("keychain.txt", "a") data = "#%s sec %s %s" % (args.newaddress[0], b64encode(k_id), b64encode(sec_str)) print data f.write(data + "\n") f.close() elif args.storeaddress: f = file("keychain.txt", "a") data = "#%s pub %s" % (args.storeaddress[0], args.storeaddress[1]) f.write(data + "\n") f.close() elif args.play: threads = [None] * args.conn cores = [] for core in file(args.play[0]): c = core.strip().split() cores += [c] def play_another_song(var): if var is not None and (not isinstance(var, float) or not isinstance(var, float)): print "ERROR", var if cores != []: c = cores.pop() d = play(c, directory) d.addCallback(play_another_song) def replay(): cores += [c] d.addErrback(replay) d.addErrback(play_another_song) else: threads.pop() if threads == []: reactor.stop() for _ in threads: play_another_song(None) t0 = default_timer() reactor.run() t1 = default_timer() print "Overall time: %s" % (t1 - t0) for (ip, v) in sorted(_stats.iteritems()): print "Stats: %s %s" % (ip, v) elif args.pay: (val, dest_addr, change_addr) = args.pay val = int(val) assert isinstance(val, int) and 0 < val keys = load_keys() dest_addr = b64decode(keys["#" + dest_addr][2]) change_addr = b64decode(keys["#" + change_addr][2]) active = ActiveTx("activetx.log", keys) print val xval, txs = active.get_value(int(val)) assert len(txs) > 0 if val <= xval: # build the transactions inTx = [] outTx = [rscoin.OutputTx(dest_addr, val)] if xval - val > 0: outTx += [rscoin.OutputTx(change_addr, xval - val)] inTx_list = [] keys_list = [] for (tx_id, i, key_id, value) in txs: inTx_list += [ rscoin.Tx.parse(active.Tx[(tx_id, i, key_id, value)]) ] keys_list += [rscoin.Key(b64decode(keys[key_id][3]), False)] inTx += [rscoin.InputTx(tx_id, i)] newtx = rscoin.Tx(inTx, outTx) newtx_ser = newtx.serialize() ## Now we sign and remove from the list active.add(newtx_ser) for k in txs: active.remove(k) active.save(reactor) ## Now run the on-line checking sechash, query_string, core = package_query( newtx, inTx_list, keys_list) print " ".join(core) d = play(core, directory) d.addBoth(r_stop) reactor.run() else: print "Insufficient balance: %s ( < %s)" % (val, xval) elif args.issue: # Parse the basic files. secret = file("secret.key").read() mykey = rscoin.Key(secret, public=False) # Ensure the secret key corresponds to the special public key. assert special_id == mykey.id() [value_str, key_name] = args.issue keys = load_keys() key_id = b64decode(keys["#" + key_name][2]) tx = rscoin.Tx([], [rscoin.OutputTx(key_id, int(value_str))]) sig = mykey.sign(tx.id()) ## Now we test the Commit tx_ser = tx.serialize() #core = map(b64encode, [tx_ser, mykey.pub.export(), sig]) #data = " ".join(["Commit", str(len(core))] + core) data = package_issue(tx, [mykey, sig]) if args.mock: print data else: auths = set(get_authorities(directory, tx.id())) small_dir = [(kid, ip, port) for (kid, ip, port) in directory if kid in auths] d = broadcast(small_dir, data) def r_process(results): for msg in results: parsed = unpackage_commit_response(msg) if parsed[0] != "OK": raise Exception("Response not OK.") pub, sig = parsed[1:] kx = rscoin.Key(pub) if not (kx.verify(tx.id(), sig) and kx.id() in auths): raise Exception("Invalid Signature.") auths.remove(kx.id()) active = ActiveTx("activetx.log", keys) active.add(tx_ser) active.save(reactor) print " ".join(core) d.addCallback(r_process) d.addBoth(r_stop) reactor.run()
def main(): dir_data = load_setup(file("directory.conf").read()) # directory = dir_data["directory"] directory = [(kid, socket.gethostbyname(ip), port) for (kid, ip, port) in dir_data["directory"] ] special_id = dir_data["special"] # Options parser = argparse.ArgumentParser(description='RSCoin client.') parser.add_argument('--dir', action='store_true', help='List mintettes.') parser.add_argument('--mock', action='store_true', help='Do not connect to the network.') parser.add_argument('--balances', action='store_true', help='List balances of all addresses.') parser.add_argument('--issue', nargs=2, metavar=("VALUE", "ADDRESS"), help='Issue a coin to an address.') parser.add_argument('--pay', nargs=3, metavar=("VALUE", "ADDRESS", "CHANGEADDRESS"), help='Pay and address some amount, and return change') parser.add_argument('--newaddress', nargs=1, metavar="NAME", help='Make a new address with a specific name.') parser.add_argument('--storeaddress', nargs=2, metavar=("NAME", "KEYID"), help='Load an address ID with a specific name.') parser.add_argument('--listaddress',action='store_true', help='List all known addresses.') parser.add_argument('--play', nargs=1, metavar="FILE", help='Play a set of transaction cores.') parser.add_argument('--conn', default=20, type=int, metavar="CONNECTIONS", help='Number of simultaneaous connections.') args = parser.parse_args() if args.dir: for (kid, ip, port) in directory: print "%s\t%s\t%s" % (ip, port, b64encode(kid)) elif args.balances: keys = load_keys() active = ActiveTx("activetx.log", keys) for (k, v) in active.balances().iteritems(): print "%s\t%s RSC" % (k, v) elif args.listaddress: keys = load_keys() for k in keys: if k[0] == "#": print "%s\t%s (%s)" % (k, keys[k][2], keys[k][1]) elif args.newaddress: sec_str = urandom(32) k_sec = rscoin.Key(sec_str, public=False) k_pub = k_sec.pub.export() k_id = k_sec.id() f = file("keychain.txt", "a") data = "#%s sec %s %s" % (args.newaddress[0], b64encode(k_id), b64encode(sec_str)) print data f.write(data+"\n") f.close() elif args.storeaddress: f = file("keychain.txt", "a") data = "#%s pub %s" % (args.storeaddress[0], args.storeaddress[1]) f.write(data+"\n") f.close() elif args.play: threads = [ None ] * args.conn cores = [] for core in file(args.play[0]): c = core.strip().split() cores += [ c ] def play_another_song(var): if var is not None and (not isinstance(var, float) or not isinstance(var, float)): print "ERROR", var if cores != []: c = cores.pop() d = play(c, directory) d.addCallback(play_another_song) def replay(failure): cores.append(c) print cores + len(cores) d.addErrback(replay) d.addErrback(play_another_song) else: threads.pop() if threads == []: reactor.stop() for _ in threads: play_another_song(None) t0 = default_timer() reactor.run() t1 = default_timer() print "Overall time: %s" % (t1 - t0) for (ip, v) in sorted(_stats.iteritems()): print "Stats: %s %s" % (ip, v) elif args.pay: (val, dest_addr, change_addr) = args.pay val = int(val) assert isinstance(val, int) and 0 < val keys = load_keys() dest_addr = b64decode(keys["#"+dest_addr][2]) change_addr = b64decode(keys["#"+change_addr][2]) active = ActiveTx("activetx.log", keys) print val xval, txs = active.get_value(int(val)) assert len(txs) > 0 if val <= xval: # build the transactions inTx = [] outTx = [ rscoin.OutputTx(dest_addr, val) ] if xval - val > 0: outTx += [ rscoin.OutputTx(change_addr, xval - val) ] inTx_list = [] keys_list = [] for (tx_id, i, key_id, value) in txs: inTx_list += [ rscoin.Tx.parse(active.Tx[(tx_id, i, key_id, value)]) ] keys_list += [ rscoin.Key(b64decode(keys[key_id][3]), False) ] inTx += [ rscoin.InputTx(tx_id, i) ] newtx = rscoin.Tx(inTx, outTx) newtx_ser = newtx.serialize() ## Now we sign and remove from the list active.add(newtx_ser) for k in txs: active.remove(k) active.save(reactor) ## Now run the on-line checking sechash, query_string, core = package_query(newtx, inTx_list, keys_list) print " ".join(core) d = play(core, directory) d.addBoth(r_stop) reactor.run() else: print "Insufficient balance: %s ( < %s)" % (val, xval) elif args.issue: # Parse the basic files. secret = file("secret.key").read() mykey = rscoin.Key(secret, public=False) # Ensure the secret key corresponds to the special public key. assert special_id == mykey.id() [value_str, key_name] = args.issue keys = load_keys() key_id = b64decode(keys["#"+key_name][2]) tx = rscoin.Tx([], [rscoin.OutputTx(key_id, int(value_str))]) sig = mykey.sign(tx.id()) ## Now we test the Commit #tx_ser = tx.serialize() #core = map(b64encode, [tx_ser, mykey.pub.export(), sig]) #data = " ".join(["Commit", str(len(core))] + core) data = package_issue(tx, [mykey, sig]) if args.mock: print data else: auths = set(get_authorities(directory, tx.id())) small_dir = [(kid, ip, port) for (kid, ip, port) in directory if kid in auths] d = broadcast(small_dir, data) def r_process(results): for msg in results: parsed = unpackage_commit_response(msg) if parsed[0] != "OK": raise Exception("Response not OK.") pub, sig = parsed[1:] kx = rscoin.Key(pub) if not (kx.verify(tx.id(), sig) and kx.id() in auths): raise Exception("Invalid Signature.") auths.remove( kx.id() ) active = ActiveTx("activetx.log", keys) active.add(tx_ser) active.save(reactor) print " ".join(core) d.addCallback(r_process) d.addBoth(r_stop) reactor.run()
def test_multiple(): import os try: os.mkdir("testscratch") except: pass # Make special keys for making coins secret_special = "KEYSPECIAL" public_special = rscoin.Key(secret_special, public=False).id() # Define a number of keys all_keys = [] for x in range(100): secret = "KEY%s" % x public = rscoin.Key(secret, public=False).id() all_keys += [(public, secret)] # Make up the directory directory = [] for x, (pub, _) in enumerate(all_keys): directory += [(pub, "127.0.0.1", 8080 + x)] # Build the factories factories = {} for pub, sec in all_keys: factory = RSCFactory(sec, directory, public_special, conf_dir="testscratch", N=5) factories[pub] = factory # Make a mass of transactions k1 = rscoin.Key(urandom(32), public=False) k2 = rscoin.Key(urandom(32), public=False) all_tx_in = [] all_tx_out = [] for _ in range(10): #...Notice the tx will have different id, because of R inside the Tx tx1 = rscoin.Tx([], [rscoin.OutputTx(k1.id(), 100)]) tx2 = rscoin.Tx([], [rscoin.OutputTx(k2.id(), 150)]) tx3 = rscoin.Tx( [rscoin.InputTx(tx1.id(), 0), rscoin.InputTx(tx2.id(), 0)], [rscoin.OutputTx(k1.id(), 250)] ) all_tx_in += [ tx1, tx2 ] all_tx_out += [ tx3 ] print "Lens: all_tx_in: %s all_tx_out: %s" % (len(all_tx_in), len(all_tx_out)) for tx in all_tx_in: for kv, vv in tx.get_utxo_out_entries(): for f in factories.values(): f.db[kv] = vv #... here the each tx1 and tx2 are added into utxo of every minetette #... notice here the tx1, tx2, tx3 come from the last time loop of transactions initilization at line 293 data = (tx3, [tx1.serialize(), tx2.serialize()], [k1.export()[0], k2.export()[0]], [k1.sign(tx3.id()), k2.sign(tx3.id())]) # Put the transaction through total = 0 [ kid1, kid2 ] = tx3.get_utxo_in_keys() #... N is the number of mintettes in one shard #... Notice inkey = pack("32sI", intx.tx_id, intx.pos) #... Kid1 and kid2 can be used as transaction id to pass into get_authorities because the first 32 byes are transaction id #... The last 4 byte for 'pos' are ignored in the get_authroties() au1 = get_authorities(directory, kid1, N = 5) au2 = get_authorities(directory, kid2, N = 5) auxes = set(au1 + au2) #??? Does this happen by chance: no overlapping betten au1 and au2 assert len(auxes) == 10 for aid in auxes: assert isinstance(aid, str) and len(aid) == 32 assert aid in factories H, msg, dataCore = package_query(tx3, [tx1, tx2], [k1, k2]) xset = [] rss = [] for kid, f in factories.iteritems(): # resp = f.process_TxQuery(data) instance = f.buildProtocol(None) tr = StringTransport() instance.makeConnection(tr) instance.lineReceived(msg) resp_msg = unpackage_query_response(tr.value().strip()) assert kid == f.key.id() if resp_msg[0] == "OK": [pub, sig, hashhead, seqStr] = resp_msg[1:] total += 1 xset += [ f.key.id() ] rss += [(pub, sig, hashhead, seqStr)] else: pass assert 5 <= total <= 10 assert set(auxes) == set(xset) ## Now test the commit phase assert 5 <= len(rss) <= 10 msg_commit = package_commit(dataCore, rss) #from twisted.python import log #import sys #log.startLogging(sys.stdout) total = 0 for kid, f in factories.iteritems(): instance = f.buildProtocol(None) tr = StringTransport() instance.makeConnection(tr) instance.lineReceived(msg_commit) resp_commit = tr.value().strip() resp_l = unpackage_commit_response(resp_commit) if resp_l[0] == "OK": total += 1 assert total == 5