class TestGetAddress(unittest.TestCase): def setUp(self): self.api = BtcTxStore(dryrun=True, testnet=True) def test_standard(self): wif = self.api.create_key() address = self.api.get_address(wif) self.assertTrue(validate.is_address_valid(address, allowable_netcodes=['XTN'])) def test_input_validation(self): # test correct types a = self.api.get_address(S_WIF) b = self.api.get_address(B_WIF) c = self.api.get_address(U_WIF) self.assertEqual(a, b, c) # TODO invalid types # TODO invalid input data def test_standards_compliant(self): wif = self.api.create_key() address = self.api.get_address(S_WIF) self.assertEqual(address, EXPECTED)
class TestGetAddress(unittest.TestCase): def setUp(self): self.api = BtcTxStore(dryrun=True, testnet=True) def test_standard(self): wif = self.api.create_key() address = self.api.get_address(wif) self.assertTrue( validate.is_address_valid(address, allowable_netcodes=['XTN'])) def test_input_validation(self): # test correct types a = self.api.get_address(S_WIF) b = self.api.get_address(B_WIF) c = self.api.get_address(U_WIF) self.assertEqual(a, b, c) # TODO invalid types # TODO invalid input data def test_standards_compliant(self): wif = self.api.create_key() address = self.api.get_address(S_WIF) self.assertEqual(address, EXPECTED)
def test_authenticate_headers_provide(self): """ Test of preparing and providing credential headers when ``sender_key`` and ``btctx_api`` are provided. """ btctx_api = BtcTxStore(testnet=True, dryrun=True) sender_key = btctx_api.create_key() signature = btctx_api.sign_unicode(sender_key, self.file_hash) sender_address = btctx_api.get_address(sender_key) self.mock_get.return_value = Response() self.test_data_for_requests['headers'] = { 'sender-address': sender_address, 'signature': signature, } download_call_result = core.download( self.test_url_address, self.file_hash, sender_key=sender_key, btctx_api=btctx_api ) expected_mock_calls = [call( urljoin(self.test_url_address, '/api/files/' + self.file_hash), **self.test_data_for_requests )] self.assertListEqual( self.mock_get.call_args_list, expected_mock_calls, 'In the download() function requests.get() calls are unexpected' ) self.assertIsInstance(download_call_result, Response, 'Must return a response object')
def test_fail(self): # register without auth headres fails rv = self.app.get('/api/register/{0}'.format(addresses["eta"])) self.assertEqual(rv.status_code, 401) # register first because ping is lazy blockchain = BtcTxStore() wif = blockchain.create_key() address = blockchain.get_address(wif) header_date = formatdate(timeval=mktime(datetime.now().timetuple()), localtime=True, usegmt=True) message = app.config["ADDRESS"] + " " + header_date header_authorization = blockchain.sign_unicode(wif, message) headers = {"Date": header_date, "Authorization": header_authorization} url = '/api/register/{0}'.format(address) rv = self.app.get(url, headers=headers) self.assertEqual(rv.status_code, 200) # ping without auth headres fails time.sleep(app.config["MAX_PING"]) rv = self.app.get('/api/ping/{0}'.format(address)) self.assertEqual(rv.status_code, 401) # set height without auth headres fails rv = self.app.get('/api/height/{0}/10'.format(addresses["eta"])) self.assertEqual(rv.status_code, 401)
def setUp(self): # Create a temporary that will be used as uploaded file. self.testing_dir = os.path.dirname(os.path.abspath(__file__)) self.test_source_file = tempfile.NamedTemporaryFile( prefix='tmp_', suffix='.spam', mode="w+", dir=self.testing_dir, ) self.test_source_file.write('some file content') self.test_source_file.flush() # Mock the ``requests`` package. self.post_patch = patch('requests.post') self.mock_post = self.post_patch.start() self.mock_post.return_value = Response() # Prepare common arguments for the API's ``upload()`` function call. btctx_api = BtcTxStore(testnet=True, dryrun=True) self.upload_param = dict( url_base='http://test.url.com', btctx_api=btctx_api, sender_key=btctx_api.create_key(), file_role='101', )
def args_prepare(required_args, parsed_args): """ Filling all missed, but required by the core API function arguments. Return dictionary that will be passed to the API function. :param required_args: list of required argument's names for the API function :type required_args: list of stings :param parsed_args: can be any object with appropriate names of attributes required by the core API function :type parsed_args: argparse.Namespace :returns: dictionary that will be used like the ``**kwargs`` argument :rtype: dictionary """ prepared_args = {} if 'sender_key' in required_args and 'btctx_api' in required_args: btctx_api = BtcTxStore(testnet=True, dryrun=True) args_base = dict( sender_key=btctx_api.create_key(), btctx_api=btctx_api, ) for required_arg in required_args: try: prepared_args[required_arg] = getattr(parsed_args, required_arg) except AttributeError: prepared_args[required_arg] = args_base[required_arg] return prepared_args
def test_core_audit(self): """ Test of providing correct arguments to the ``requests.post()`` and returning gotten response object. """ test_url_address = 'http://test.url.com' file_hash = sha256(b'some test data').hexdigest() seed = sha256(b'some test challenge seed').hexdigest() btctx_api = BtcTxStore(testnet=True, dryrun=True) sender_key = btctx_api.create_key() audit_call_result = core.audit(test_url_address, sender_key, btctx_api, file_hash, seed) expected_calls = [call( urljoin(test_url_address, '/api/audit/'), data={ 'data_hash': file_hash, 'challenge_seed': seed, }, headers={ 'sender-address': btctx_api.get_address(sender_key), 'signature': btctx_api.sign_unicode(sender_key, file_hash), } )] self.assertListEqual( self.mock_post.call_args_list, expected_calls, 'In the audit() function requests.post() calls are unexpected' ) self.assertIs( self.mock_post.return_value, audit_call_result, 'Returned value must be the object returned by the ' '``requests.post()``' )
def test_together_sender_key_and_btctx_api(self): """ Test of possibility to provide the ``sender_key`` and ``btctx_api`` only together. """ btctx_api = BtcTxStore(testnet=True, dryrun=True) sender_key = btctx_api.create_key() self.mock_get.return_value = Response() # test only "sender_key" given self.assertRaises( TypeError, core.download, *(self.test_url_address, self.file_hash), **{'sender_key': sender_key} ) # test only "btctx_api" given self.assertRaises( TypeError, core.download, *(self.test_url_address, self.file_hash), **{'btctx_api': btctx_api} ) # test of now exception when both args are given download_call_result = core.download( self.test_url_address, self.file_hash, sender_key=sender_key, btctx_api=btctx_api ) self.assertIsInstance(download_call_result, Response, 'Must return a response object')
class AppAuthenticationHeadersTest(unittest.TestCase): def setUp(self): app.config["SKIP_AUTHENTICATION"] = False # monkey patch self.app = app.test_client() self.btctxstore = BtcTxStore() db.create_all() def tearDown(self): db.session.remove() db.drop_all() def test_success(self): # create header date and authorization signature wif = self.btctxstore.create_key() btc_addr = self.btctxstore.get_address(wif) header_date = formatdate(timeval=mktime(datetime.now().timetuple()), localtime=True, usegmt=True) message = app.config["ADDRESS"] + " " + header_date header_authorization = self.btctxstore.sign_unicode(wif, message) headers = {"Date": header_date, "Authorization": header_authorization} url = '/api/register/{0}'.format(btc_addr) rv = self.app.get(url, headers=headers) data = json.loads(rv.data.decode("utf-8")) self.assertEqual(btc_addr, data["btc_addr"]) self.assertEqual(rv.status_code, 200) def test_fail(self): # register without auth headres fails btc_addr = self.btctxstore.get_address(self.btctxstore.get_key(self.btctxstore.create_wallet())) rv = self.app.get('/api/register/{0}'.format(btc_addr)) self.assertEqual(rv.status_code, 401) # register first because ping is lazy wif = self.btctxstore.get_key(self.btctxstore.create_wallet()) btc_addr = self.btctxstore.get_address(wif) header_date = formatdate(timeval=mktime(datetime.now().timetuple()), localtime=True, usegmt=True) message = app.config["ADDRESS"] + " " + header_date header_authorization = self.btctxstore.sign_unicode(wif, message) headers = {"Date": header_date, "Authorization": header_authorization} url = '/api/register/{0}'.format(btc_addr) rv = self.app.get(url, headers=headers) self.assertEqual(rv.status_code, 200) # ping without auth headres fails time.sleep(app.config["MAX_PING"]) rv = self.app.get('/api/ping/{0}'.format(btc_addr)) self.assertEqual(rv.status_code, 401) # set height without auth headres fails btc_addr = self.btctxstore.get_address(self.btctxstore.get_key(self.btctxstore.create_wallet())) rv = self.app.get('/api/height/{0}/10'.format(btc_addr)) self.assertEqual(rv.status_code, 401)
class TestValidateKeyMainnet(unittest.TestCase): def setUp(self): self.testnet_api = BtcTxStore(dryrun=True, testnet=True) self.mainnet_api = BtcTxStore(dryrun=True, testnet=False) def test_valid_network(self): key = self.mainnet_api.create_key() self.assertTrue(self.mainnet_api.validate_key(key)) def test_invalid_network(self): key = self.testnet_api.create_key() self.assertFalse(self.mainnet_api.validate_key(key)) def test_invalid_data(self): self.assertFalse(self.mainnet_api.validate_key("f483")) def test_invalid_type(self): self.assertFalse(self.mainnet_api.validate_key(None))
def callback(): blockchain = BtcTxStore() wif = blockchain.create_key() address = blockchain.get_address(wif) farmer = Farmer(address) header_date = formatdate(timeval=mktime(datetime.now().timetuple()), localtime=True, usegmt=True) header_authorization = blockchain.sign_unicode(wif, "lalala-wrong") farmer.authenticate(header_authorization, header_date)
class TestAuth(unittest.TestCase): def setUp(self): self.btctxstore = BtcTxStore() self.sender_wif = self.btctxstore.create_key() self.sender = self.btctxstore.get_address(self.sender_wif) recipient_wif = self.btctxstore.create_key() self.recipient = self.btctxstore.get_address(recipient_wif) def test_self_validates(self): headers = storjcore.auth.create_headers(self.btctxstore, self.recipient, self.sender_wif) self.assertTrue(storjcore.auth.verify_headers(self.btctxstore, headers, 5, self.sender, self.recipient)) def test_invalid_signature(self): def callback(): headers = storjcore.auth.create_headers(self.btctxstore, self.recipient, self.sender_wif) headers["Authorization"] = base64.b64encode(65 * b"x") storjcore.auth.verify_headers(self.btctxstore, headers, 5, self.sender, self.recipient) self.assertRaises(storjcore.auth.AuthError, callback) def test_timeout_to_old(self): def callback(): headers = storjcore.auth.create_headers(self.btctxstore, self.recipient, self.sender_wif) time.sleep(5) storjcore.auth.verify_headers(self.btctxstore, headers, 5, self.sender, self.recipient) self.assertRaises(storjcore.auth.AuthError, callback) @unittest.skip("TODO implement") def test_timeout_to_young(self): pass # FIXME how to test this?
def test_authentication_success(self): blockchain = BtcTxStore() wif = blockchain.create_key() address = blockchain.get_address(wif) farmer = Farmer(address) header_date = formatdate(timeval=mktime(datetime.now().timetuple()), localtime=True, usegmt=True) message = farmer.get_server_address() + " " + header_date header_authorization = blockchain.sign_unicode(wif, message) self.assertTrue(farmer.authenticate(header_authorization, header_date))
def callback(): blockchain = BtcTxStore() wif = blockchain.create_key() address = blockchain.get_address(wif) farmer = Farmer(address) header_date = formatdate(timeval=mktime(datetime.now().timetuple()) , localtime=True, usegmt=True) message = farmer.get_server_address() + " " + header_date header_authorization = blockchain.sign_unicode(wif, message) headers = {"Date": None, "Authorization": header_authorization} farmer.authenticate(headers)
class TestValidateAddressMainnet(unittest.TestCase): def setUp(self): self.testnet_api = BtcTxStore(dryrun=True, testnet=True) self.mainnet_api = BtcTxStore(dryrun=True, testnet=False) def test_valid_string(self): address = '191GVvAaTRxLmz3rW3nU5jAV1rF186VxQc' self.assertTrue(self.mainnet_api.validate_address(address)) def test_valid_network(self): address = self.mainnet_api.get_address(self.mainnet_api.create_key()) self.assertTrue(self.mainnet_api.validate_address(address)) def test_invalid_network(self): address = self.testnet_api.get_address(self.testnet_api.create_key()) self.assertFalse(self.mainnet_api.validate_address(address)) def test_invalid_data(self): self.assertFalse(self.mainnet_api.validate_address("f483")) def test_invalid_type(self): self.assertFalse(self.mainnet_api.validate_address(None))
class TestValidateAddressTestnet(unittest.TestCase): def setUp(self): self.testnet_api = BtcTxStore(dryrun=True, testnet=True) self.mainnet_api = BtcTxStore(dryrun=True, testnet=False) def test_valid_string(self): address = 'migiScBNvVKYwEiCFhgBNGtZ87cdygtuSQ' self.assertTrue(self.testnet_api.validate_address(address)) def test_valid_network(self): address = self.testnet_api.get_address(self.testnet_api.create_key()) self.assertTrue(self.testnet_api.validate_address(address)) def test_invalid_network(self): address = self.mainnet_api.get_address(self.mainnet_api.create_key()) self.assertFalse(self.testnet_api.validate_address(address)) def test_invalid_data(self): self.assertFalse(self.testnet_api.validate_address("f483")) def test_invalid_type(self): self.assertFalse(self.testnet_api.validate_address(None))
def callback(): blockchain = BtcTxStore() wif = blockchain.create_key() address = blockchain.get_address(wif) farmer = Farmer(address) header_date = formatdate(timeval=mktime( datetime.now().timetuple()), localtime=True, usegmt=True) message = farmer.get_server_address() + " " + header_date header_authorization = blockchain.sign_unicode(wif, message) headers = {"Date": None, "Authorization": header_authorization} farmer.authenticate(headers)
def callback(): blockchain = BtcTxStore() wif = blockchain.create_key() address = blockchain.get_address(wif) farmer = Farmer(address) timeout = farmer.get_server_authentication_timeout() date = datetime.now() - timedelta(seconds=timeout) header_date = formatdate(timeval=mktime(date.timetuple()), localtime=True, usegmt=True) message = farmer.get_server_address() + " " + header_date header_authorization = blockchain.sign_unicode(wif, message) farmer.authenticate(header_authorization, header_date)
def test_authentication_timeout_future_success(self): blockchain = BtcTxStore() wif = blockchain.create_key() address = blockchain.get_address(wif) farmer = Farmer(address) timeout = farmer.get_server_authentication_timeout() - 5 date = datetime.now() + timedelta(seconds=timeout) header_date = formatdate(timeval=mktime(date.timetuple()), localtime=True, usegmt=True) message = farmer.get_server_address() + " " + header_date header_authorization = blockchain.sign_unicode(wif, message) headers = {"Date": header_date, "Authorization": header_authorization} self.assertTrue(farmer.authenticate(headers))
def test_success(self): # create header date and authorization signature blockchain = BtcTxStore() wif = blockchain.create_key() address = blockchain.get_address(wif) header_date = formatdate(timeval=mktime(datetime.now().timetuple()), localtime=True, usegmt=True) message = app.config["ADDRESS"] + " " + header_date header_authorization = blockchain.sign_unicode(wif, message) headers = {"Date": header_date, "Authorization": header_authorization} url = '/api/register/{0}'.format(address) rv = self.app.get(url, headers=headers) data = json.loads(rv.data.decode("utf-8")) self.assertEqual(address, data["btc_addr"]) self.assertEqual(rv.status_code, 200)
# store data in blockchain as nulldata output (max 40bytes) data = binascii.hexlify(b"example_data") txid = api.store_nulldata(data, wifs) # Show current transaction id print("Current Transaction ID: {}".format(txid)) # Now, retrieve data based on transaction id hexnulldata = api.retrieve_nulldata(txid) print("Retrieved Data: {}".format(hexnulldata)) # create new private key wif = api.create_key() # get private key address address = api.get_address(wif) # hexlify messagetext data = binascii.hexlify(b"messagetext") # sign data with private key signature = api.sign_data(wif, data) print("signature:", signature) # verify signature (no public or private key needed) isvalid = api.verify_signature(address, signature, data) print("valid signature" if isvalid else "invalid signature")
#!/usr/bin/env python # coding: utf-8 # Copyright (c) 2015 Fabian Barkhau <*****@*****.**> # License: MIT (see LICENSE file) from __future__ import print_function from __future__ import unicode_literals from btctxstore import BtcTxStore import time import cProfile from pstats import Stats api = BtcTxStore(testnet=True, dryrun=True) # use testing setup for example wif = api.create_key() # create new private key address = api.get_address(wif) # get private key address message = "Signed ünicöde message." signature = api.sign_unicode(wif, message) profile = cProfile.Profile() profile.enable() begin = time.time() for i in range(10): assert (api.verify_signature_unicode(address, signature, message)) end = time.time() stats = Stats(profile) stats.strip_dirs() stats.sort_stats('cumtime') stats.print_stats()
class FileTransfer: def __init__(self, net, wif=None, store_config=None, handlers=None): # Accept direct connections. self.net = net # Returned by callbacks. self.success_value = ("127.0.0.1", 7777) # Used for signing messages. self.wallet = BtcTxStore(testnet=False, dryrun=True) self.wif = wif or self.wallet.create_key() # Where will the data be stored? self.store_config = store_config assert(len(list(store_config))) # Handlers for certain events. self.handlers = handlers if self.handlers is None: self.handlers = {} if "complete" not in self.handlers: self.handlers["complete"] = [] if "accept" not in self.handlers: self.handlers["accept"] = [] # Start networking. if not self.net.is_net_started: self.net.start() # Dict of data requests: [contract_id] > contract self.contracts = {} # List of Sock objects returned from UNL.connect. self.cons = [] # Dict of defers for contracts: [contract_id] > defer self.defers = {} # Three-way handshake status for contracts: [contract_id] > state self.handshake = {} # All contracts associated with this connection. # [con] > [contract_id] > con_info self.con_info = {} # File transfer currently active on connection. # [con] > contract_id self.con_transfer = {} # List of active downloads. # (Never try to download multiple copies of the same thing at once.) self.downloading = {} # Lock threads. self.mutex = Lock() def get_their_unl(self, contract): if self.net.unl == pyp2p.unl.UNL(value=contract["dest_unl"]): their_unl = contract["src_unl"] else: their_unl = contract["dest_unl"] return their_unl def get_node_id_from_unl(self, unl): unl = pyp2p.unl.UNL(value=unl).deconstruct() return unl["node_id"] def is_queued(self, con=None): if con is not None: if con not in self.con_info: return 0 if con is None: con_list = list(self.con_info) else: con_list = [con] for con in con_list: for contract_id in list(self.con_info[con]): con_info = self.con_info[con][contract_id] if con_info["remaining"]: return 1 return 0 def cleanup_transfers(self, con, contract_id): # Cleanup downloading. contract = self.contracts[contract_id] if contract["data_id"] in self.downloading: if contract["direction"] == "receive": del self.downloading[contract["data_id"]] # Cleanup handshakes. if contract_id in self.handshake: del self.handshake[contract_id] # Cleanup defers. if contract_id in self.defers: del self.defers[contract_id] # Cleanup con transfers. if con in self.con_transfer: del self.con_transfer[con] # Cleanup con_info. if con in self.con_info: del self.con_info[con] # Cleanup contracts. if contract_id in self.contracts: del self.contracts[contract_id] def queue_next_transfer(self, con): _log.debug("Queing next transfer") for contract_id in list(self.con_info[con]): con_info = self.con_info[con][contract_id] if con_info["remaining"]: self.con_transfer[con] = contract_id con.send(contract_id, send_all=1) return # Mark end of transfers. self.con_transfer[con] = u"0" * 64 def save_contract(self, contract): # Record contract details. contract_id = self.contract_id(contract) self.contracts[contract_id] = contract return contract_id def send_msg(self, dict_obj, unl): node_id = self.net.unl.deconstruct(unl)["node_id"] msg = json.dumps(dict_obj, ensure_ascii=True) self.net.dht_node.relay_message( node_id, msg ) def contract_id(self, contract): if sys.version_info >= (3, 0, 0): contract = str(contract).encode("ascii") else: contract = str(contract) return hashlib.sha256(contract).hexdigest() def sign_contract(self, contract): if sys.version_info >= (3, 0, 0): msg = str(contract).encode("ascii") else: msg = str(contract) msg = binascii.hexlify(msg).decode("utf-8") sig = self.wallet.sign_data(self.wif, msg) if sys.version_info >= (3, 0, 0): contract[u"signature"] = sig.decode("utf-8") else: contract[u"signature"] = unicode(sig) return contract def is_valid_contract_sig(self, contract, node_id=None): sig = contract[u"signature"][:] del contract[u"signature"] if sys.version_info >= (3, 0, 0): msg = str(contract).encode("ascii") else: msg = str(contract) # Use our address. msg = binascii.hexlify(msg).decode("utf-8") if node_id is None: address = self.wallet.get_address(self.wif) ret = self.wallet.verify_signature(address, sig, msg) else: # Use their node ID: try testnet. address = b2a_hashed_base58(b'o' + node_id) ret = self.wallet.verify_signature(address, sig, msg) if not ret: # Use their node ID: try mainnet. address = b2a_hashed_base58(b'\0' + node_id) ret = self.wallet.verify_signature(address, sig, msg) # Move sig back. contract[u"signature"] = sig[:] return ret def simple_data_request(self, data_id, node_unl, direction): file_size = 0 if direction == u"send": action = u"upload" else: action = u"download" return self.data_request(action, data_id, file_size, node_unl) def data_request(self, action, data_id, file_size, node_unl): """ Action = put (upload), get (download.) """ _log.debug("In data request function") # Who is hosting this data? if action == "upload": # We store this data. direction = u"send" host_unl = self.net.unl.value assert(storage.manager.find(self.store_config, data_id) is not None) else: # They store the data. direction = u"receive" host_unl = node_unl if data_id in self.downloading: raise Exception("Already trying to download this.") # Encoding. if sys.version_info >= (3, 0, 0): if type(data_id) == bytes: data_id = data_id.decode("utf-8") if type(host_unl) == bytes: host_unl = host_unl.decode("utf-8") if type(node_unl) == bytes: node_unl = node_unl.decode("utf-8") else: if type(data_id) == str: data_id = unicode(data_id) if type(host_unl) == str: host_unl = unicode(host_unl) if type(node_unl) == str: node_unl = unicode(node_unl) # Create contract. contract = OrderedDict({ u"status": u"SYN", u"direction": direction, u"data_id": data_id, u"file_size": file_size, u"host_unl": host_unl, u"dest_unl": node_unl, u"src_unl": self.net.unl.value, }) # Sign contract. contract = self.sign_contract(contract) # Route contract. contract_id = self.save_contract(contract) self.send_msg(contract, node_unl) _log.debug("Sending data request") # Update handshake. self.handshake[contract_id] = { u"state": u"SYN", u"timestamp": time.time() } # For async code. d = defer.Deferred() self.defers[contract_id] = d # Return defer for async code. return d def get_con_by_contract_id(self, needle): for con in list(self.con_info): for contract_id in list(self.con_info[con]): if contract_id == needle: return con return None def remove_file_from_storage(self, data_id): storage.manager.remove(self.store_config, data_id) def move_file_to_storage(self, path): with open(path, "rb") as shard: storage.manager.add(self.store_config, shard) return { "file_size": storage.shard.get_size(shard), "data_id": storage.shard.get_id(shard) } def get_data_chunk(self, data_id, position, chunk_size=1048576): path = storage.manager.find(self.store_config, data_id) buf = b"" with open(path, "rb") as fp: fp.seek(position, 0) buf = fp.read(chunk_size) return buf def save_data_chunk(self, data_id, chunk): _log.debug("Saving data chunk for " + str(data_id)) _log.debug("of size + " + str(len(chunk))) assert(data_id in self.downloading) # Find temp file path. path = self.downloading[data_id] _log.debug(path) with open(path, "ab") as fp: fp.write(chunk)
assert(data_id in self.downloading) # Find temp file path. path = self.downloading[data_id] _log.debug(path) with open(path, "ab") as fp: fp.write(chunk) if __name__ == "__main__": from crochet import setup setup() # Alice sample node. alice_wallet = BtcTxStore(testnet=False, dryrun=True) alice_wif = alice_wallet.create_key() alice_node_id = address_to_node_id(alice_wallet.get_address(alice_wif)) # print(type(alice_node_id)) alice_dht_node = pyp2p.dht_msg.DHT(node_id=alice_node_id) # print(alice_dht_node.get_id()) alice_dht_node = storjnode.network.Node( alice_wif, bootstrap_nodes=[("240.0.0.0", 1337)], disable_data_transfer=True ) alice = FileTransfer( pyp2p.net.Net(
with open(path, "ab") as fp: fp.write(chunk) if __name__ == "__main__": # Alice sample node. alice_wallet = BtcTxStore(testnet=True, dryrun=True) alice = FileTransfer( pyp2p.net.Net( net_type="direct", node_type="passive", nat_type="preserving", passive_port=60400, dht_node=pyp2p.dht_msg.DHT(), ), wif=alice_wallet.create_key(), store_config={"/home/laurence/Storj/Alice": None} # FIXME temppath ) # ed980e5ef780d5b9ca1a6200a03302f2a91223044bc63dacc6d9f07eead663ab # _log.debug(_log.debug(alice.move_file_to_storage("/home/laurence/Firefox_wallpaper.png"))) # exit() # Bob sample node. bob_wallet = BtcTxStore(testnet=True, dryrun=True) bob = FileTransfer( pyp2p.net.Net( net_type="direct",
#!/usr/bin/python # coding: utf-8 import logging # set logging before anything is imported LOG_FORMAT = "%(levelname)s %(name)s %(lineno)d: %(message)s" logging.basicConfig(format=LOG_FORMAT, level=logging.DEBUG) import storjnode from btctxstore import BtcTxStore from twisted.internet import reactor # TODO get this from args STARTING_PORT = 3000 SWARM_SIZE = 10 btctxstore = BtcTxStore(testnet=False) swarm = [] for i in range(SWARM_SIZE): port = STARTING_PORT + i key = btctxstore.create_key() peer = storjnode.network.BlockingNode(key, port=port, start_reactor=False) swarm.append(peer) # serve forever print("Starting with {0} peers ...".format(len(swarm))) reactor.run()
class TestCreateKey(unittest.TestCase): def setUp(self): self.api = BtcTxStore(dryrun=True, testnet=True) def test_standard(self): wif = self.api.create_key() self.assertTrue(validate.is_wif_valid(wif, allowable_netcodes=['XTN'])) def test_random(self): a = self.api.create_key() b = self.api.create_key() self.assertTrue(a != b) def test_master_secret(self): a = self.api.create_key(master_secret="foo") b = self.api.create_key(master_secret="foo") self.assertEqual(a, b) def test_input_validation(self): # correct types a = self.api.create_key(master_secret="foo") b = self.api.create_key(master_secret=b"foo") c = self.api.create_key(master_secret=u"foo") self.assertEqual(a, b, c) self.assertTrue(self.api.create_key(master_secret="üöä") != None) self.assertTrue(self.api.create_key(master_secret=u"üöä") != None) # incorrect types def callback(): self.api.create_key(master_secret=None) self.assertRaises(exceptions.InvalidInput, callback) def callback(): self.api.create_key(master_secret=1) self.assertRaises(exceptions.InvalidInput, callback) def callback(): self.api.create_key(master_secret=object()) self.assertRaises(exceptions.InvalidInput, callback) def test_standards_compliant(self): pass # FIXME check generated against expected output from 3rd parties
class FileTransfer: def __init__(self, net, bandwidth, wif=None, store_config=None, handlers=None): # Accept direct connections. self.net = net # Control bandwidth. self.bandwidth = bandwidth # Returned by callbacks. self.success_value = ("127.0.0.1", 7777) # Used for signing messages. self.wallet = BtcTxStore(testnet=False, dryrun=True) self.wif = wif or self.wallet.create_key() # Where will the data be stored? self.store_config = store_config assert(len(list(store_config))) # Handlers for certain events. self.handlers = handlers if self.handlers is None: self.handlers = {} if "complete" not in self.handlers: self.handlers["complete"] = set() if "accept" not in self.handlers: self.handlers["accept"] = set() if "start" not in self.handlers: self.handlers["start"] = set() # Start networking. if not self.net.is_net_started: self.net.start() # Dict of data requests: [contract_id] > contract self.contracts = {} # List of Sock objects returned from UNL.connect. self.cons = [] # Dict of defers for contracts: [contract_id] > defer self.defers = {} # Three-way handshake status for contracts: [contract_id] > state self.handshake = {} # All contracts associated with this connection. # [con] > [contract_id] > con_info self.con_info = {} # File transfer currently active on connection. # [con] > contract_id self.con_transfer = {} # List of active downloads. # (Never try to download multiple copies of the same thing at once.) self.downloading = {} # Lock threads. self.mutex = Lock() def add_handler(self, type, handler): # todo: change handler for when new data is transferred # might be helpful to have for updating UI progress if type in list(self.handlers): self.handlers[type].add(handler) def remove_handler(self, type, handler): if type in list(self.handlers): if handler in self.handlers[type]: self.handlers[type].remove(handler) def get_their_unl(self, contract): if self.net.unl == pyp2p.unl.UNL(value=contract["dest_unl"]): their_unl = contract["src_unl"] else: their_unl = contract["dest_unl"] return their_unl def is_queued(self, con=None): if con is not None: if con not in self.con_info: return 0 if con is None: con_list = list(self.con_info) else: con_list = [con] for con in con_list: for contract_id in list(self.con_info[con]): con_info = self.con_info[con][contract_id] if con_info["remaining"]: return 1 return 0 def cleanup_transfers(self, con, contract_id): # Cleanup downloading. if contract_id in self.contracts: contract = self.contracts[contract_id] if contract["data_id"] in self.downloading: if self.get_direction(contract_id) == u"receive": del self.downloading[contract["data_id"]] # Cleanup handshakes. if contract_id in self.handshake: del self.handshake[contract_id] # Cleanup defers. if contract_id in self.defers: del self.defers[contract_id] # Cleanup con transfers. if con in self.con_transfer: del self.con_transfer[con] # Cleanup con_info. if con in self.con_info: del self.con_info[con] # Cleanup contracts. if contract_id in self.contracts: del self.contracts[contract_id] def queue_next_transfer(self, con): _log.debug("Queing next transfer") for contract_id in list(self.con_info[con]): con_info = self.con_info[con][contract_id] if con_info["remaining"]: self.con_transfer[con] = contract_id con.send(contract_id, send_all=1) return # Mark end of transfers. self.con_transfer[con] = u"0" * 64 def save_contract(self, contract): # Record contract details. contract_id = self.contract_id(contract) self.contracts[contract_id] = contract return contract_id def send_msg(self, msg, unl): assert(type(msg) == OrderedDict) node_id = self.net.unl.deconstruct(unl)["node_id"] msg = ordered_dict_to_list(msg) msg = zlib.compress(str(msg)) self.net.dht_node.repeat_relay_message(node_id, msg) def contract_id(self, contract): if sys.version_info >= (3, 0, 0): contract = str(contract).encode("ascii") else: contract = str(contract) return hashlib.sha256(contract).hexdigest() def sign_contract(self, contract): return storjnode.network.message.sign(contract, self.wif) def is_valid_contract_sig(self, contract, node_id=None): return storjnode.network.message.verify_signature(contract, self.wif, node_id=node_id) def get_direction(self, contract_id, contract=None): """ The direction of a transfer is relative to the node. """ contract = contract or self.contracts[contract_id] our_unl = self.net.unl host_unl = pyp2p.unl.UNL(value=contract[u"host_unl"]) if our_unl == host_unl: direction = u"send" else: direction = u"receive" return direction def simple_data_request(self, data_id, node_unl, direction): file_size = 0 if direction == u"send": action = u"download" else: # We're download: so tell the peer to upload to us. action = u"upload" return self.data_request(action, data_id, file_size, node_unl) def data_request(self, action, data_id, file_size, node_unl): """ Action = put (upload), get (download.) """ _log.debug("In data request function") node_unl = node_unl.decode("utf-8") d = defer.Deferred() if node_unl == self.net.unl.value: e = "Can;t send data request to ourself" _log.debug(e) d.errback(Exception(e)) return d # Who is hosting this data? if action == u"download": # We store this data. host_unl = self.net.unl.value.decode("utf-8") cfg = self.store_config _log.debug(cfg) _log.debug(data_id) assert(storjnode.storage.manager.find(cfg, data_id) is not None) else: # They store the data. host_unl = node_unl if data_id in self.downloading: e = "Already trying to download this." _log.debug(e) d.errback(Exception(e)) return d # Create contract. contract = OrderedDict([ (u"status", u"SYN"), (u"data_id", data_id.decode("utf-8")), (u"file_size", file_size), (u"host_unl", host_unl), (u"dest_unl", node_unl), (u"src_unl", self.net.unl.value) ]) # Sign contract. contract = self.sign_contract(contract) # Check contract is valid. if is_valid_syn(self, contract) != 1: e = "our syn is invalid" _log.debug(e) d.errback(Exception(e)) return d # Route contract. contract_id = self.save_contract(contract) self.send_msg(contract, node_unl) _log.debug("Sending data request") # Update handshake. self.handshake[contract_id] = { u"state": u"SYN", u"timestamp": time.time() } # For async code. self.defers[contract_id] = d # Return defer for async code. return contract_id def get_con_by_contract_id(self, needle): for con in list(self.con_info): for contract_id in list(self.con_info[con]): if contract_id == needle: return con return None def remove_file_from_storage(self, data_id): storjnode.storage.manager.remove(self.store_config, data_id) def move_file_to_storage(self, path): with open(path, "rb") as shard: storjnode.storage.manager.add(self.store_config, shard) return { "file_size": storjnode.storage.shard.get_size(shard), "data_id": storjnode.storage.shard.get_id(shard) } def get_data_chunk(self, data_id, position, chunk_size=1048576): path = storjnode.storage.manager.find(self.store_config, data_id) buf = b"" with open(path, "rb") as fp: fp.seek(position, 0) buf = fp.read(chunk_size) return buf def save_data_chunk(self, data_id, chunk): assert(data_id in self.downloading) # Find temp file path. path = self.downloading[data_id] with open(path, "ab") as fp: fp.write(chunk)
#!/usr/bin/env python # coding: utf-8 # Copyright (c) 2015 Fabian Barkhau <*****@*****.**> # License: MIT (see LICENSE file) from __future__ import print_function from __future__ import unicode_literals from btctxstore import BtcTxStore import time import cProfile from pstats import Stats api = BtcTxStore(testnet=True, dryrun=True) # use testing setup for example wif = api.create_key() # create new private key address = api.get_address(wif) # get private key address message = "Signed ünicöde message." signature = api.sign_unicode(wif, message) profile = cProfile.Profile() profile.enable() begin = time.time() for i in range(10): assert(api.verify_signature_unicode(address, signature, message)) end = time.time() stats = Stats(profile) stats.strip_dirs() stats.sort_stats('cumtime')
class FileTransfer: def __init__(self, net, wif=None, store_config=None, handlers=None): # Accept direct connections. self.net = net # Returned by callbacks. self.success_value = ("127.0.0.1", 7777) # Used for signing messages. self.wallet = BtcTxStore(testnet=True, dryrun=True) self.wif = wif or self.wallet.create_key() # Where will the data be stored? self.store_config = store_config assert (len(list(store_config))) # Handlers for certain events. self.handlers = handlers # Start networking. if not self.net.is_net_started: self.net.start() # Dict of data requests. self.contracts = {} # Dict of defers for contracts. self.defers = {} # Three-way handshake status for contracts. self.handshake = {} # All contracts associated with this connection. self.con_info = {} # File transfer currently active on connection. self.con_transfer = {} # List of active downloads. # (Never try to download multiple copies of the same thing at once.) self.downloading = {} def get_their_unl(self, contract): if self.net.unl == pyp2p.unl.UNL(value=contract["dest_unl"]): their_unl = contract["src_unl"] else: their_unl = contract["dest_unl"] return their_unl def is_queued(self, con=None): if con is not None: if con not in self.con_info: return 0 if con is None: con_list = list(self.con_info) else: con_list = [con] for con in con_list: for contract_id in list(self.con_info[con]): con_info = self.con_info[con][contract_id] if con_info["remaining"]: return 1 return 0 def cleanup_transfers(self, con): # Close con - there's nothing left to download. if not self.is_queued(con): # Cleanup con transfers. if con in self.con_transfer: del self.con_transfer[con] # Cleanup con_info. if con in self.con_info: del self.con_info[con] # Todo: cleanup contract + handshake state. def queue_next_transfer(self, con): _log.debug("Queing next transfer") for contract_id in list(self.con_info[con]): con_info = self.con_info[con][contract_id] if con_info["remaining"]: self.con_transfer[con] = contract_id con.send(contract_id, send_all=1) return # Mark end of transfers. self.con_transfer[con] = u"0" * 64 def is_valid_syn(self, msg): # List of expected fields. syn_schema = (u"status", u"direction", u"data_id", u"file_size", u"host_unl", u"dest_unl", u"src_unl", u"signature") # Check all fields exist. if not all(key in msg for key in syn_schema): _log.debug("Missing required key.") return 0 # Check SYN size. if len(msg) > 5242880: # 5 MB. _log.debug("SYN is too big") return 0 # Check direction is valid. direction_tuple = (u"send", u"receive") if msg[u"direction"] not in direction_tuple: _log.debug("Missing required direction tuple.") return 0 # Check the UNLs are valid. unl_tuple = (u"host_unl", u"dest_unl", u"src_unl") for unl_key in unl_tuple: if not pyp2p.unl.is_valid_unl(msg[unl_key]): _log.debug("Invalid UNL for " + unl_key) _log.debug(msg[unl_key]) return 0 # Check file size. file_size_type = type(msg[u"file_size"]) if sys.version_info >= (3, 0, 0): expr = file_size_type != int else: expr = file_size_type != int and file_size_type != long if expr: _log.debug("File size validation failed") _log.debug(type(msg[u"file_size"])) return 0 # Are we the host? if self.net.unl == pyp2p.unl.UNL(value=msg[u"host_unl"]): # Then check we have this file. path = storage.manager.find(self.store_config, msg[u"data_id"]) if path is None: _log.debug("Failed to find file we're uploading") return 0 else: # Do we already have this file? path = storage.manager.find(self.store_config, msg[u"data_id"]) if path is not None: _log.debug("Attempting to download file we already have") return 0 # Are we already trying to download this? if msg[u"data_id"] in self.downloading: _log.debug("We're already trying to download this") return 0 return 1 def protocol(self, msg): msg = json.loads(msg, object_pairs_hook=OrderedDict) # Associate TCP con with contract. def success_wrapper(self, contract_id, host_unl): def success(con): with mutex: _log.debug("IN SUCCESS CALLBACK") _log.debug("Success() contract_id = " + str(contract_id)) # Associate TCP con with contract. contract = self.contracts[contract_id] file_size = contract["file_size"] # Store con association. if con not in self.con_info: self.con_info[con] = {} # Associate contract with con. if contract_id not in self.con_info[con]: self.con_info[con][contract_id] = { "contract_id": contract_id, "remaining": 350, # Tree fiddy. "file_size": file_size, "file_size_buf": b"" } # Record download state. data_id = contract["data_id"] if self.net.unl != pyp2p.unl.UNL(value=host_unl): _log.debug("Success: download") fp, self.downloading[data_id] = tempfile.mkstemp() else: # Set initial upload for this con. _log.debug("Success: upload") # Queue first transfer. their_unl = self.get_their_unl(contract) is_master = self.net.unl.is_master(their_unl) _log.debug("Is master = " + str(is_master)) if con not in self.con_transfer: if is_master: # A transfer to queue processing. self.queue_next_transfer(con) else: # A transfer to receive (unknown.) self.con_transfer[con] = u"" else: if self.con_transfer[con] == u"0" * 64: if is_master: self.queue_next_transfer(con) else: self.con_transfer[con] = u"" return success # Sanity checking. if u"status" not in msg: return # Accept data request. if msg[u"status"] == u"SYN": # Check syn is valid. if not self.is_valid_syn(msg): _log.debug("SYN: invalid syn.") return # Save contract. contract_id = self.contract_id(msg) self.save_contract(msg) self.handshake[contract_id] = { "state": u"SYN-ACK", "timestamp": time.time() } # Create reply. reply = OrderedDict({ u"status": u"SYN-ACK", u"syn": msg, }) # Sign reply. reply = self.sign_contract(reply) # Save reply. self.send_msg(reply, msg[u"src_unl"]) _log.debug("SYN") # Confirm accept and make connection if needed. if msg[u"status"] == u"SYN-ACK": # Valid syn-ack? if u"syn" not in msg: _log.debug("SYN-ACK: syn not in msg.") return # Is this a reply to our SYN? contract_id = self.contract_id(msg[u"syn"]) if contract_id not in self.contracts: _log.debug("--------------") _log.debug(msg) _log.debug("--------------") _log.debug(self.contracts) _log.debug("--------------") _log.debug("SYN-ACK: contract not found.") return # Check syn is valid. if not self.is_valid_syn(msg[u"syn"]): _log.debug("SYN-ACK: invalid syn.") return # Did I sign this? if not self.is_valid_contract_sig(msg[u"syn"]): _log.debug("SYN-ACK: sig is invalid.") return # Update handshake. contract = self.contracts[contract_id] self.handshake[contract_id] = { "state": u"ACK", "timestamp": time.time() } # Create reply contract. reply = OrderedDict({u"status": u"ACK", u"syn_ack": msg}) # Sign reply. reply = self.sign_contract(reply) # Try make TCP con. self.net.unl.connect(contract["dest_unl"], { "success": success_wrapper(self, contract_id, contract["host_unl"]) }, force_master=0, nonce=contract_id) # Send reply. self.send_msg(reply, msg[u"syn"][u"dest_unl"]) _log.debug("SYN-ACK") if msg[u"status"] == u"ACK": # Valid ack. if u"syn_ack" not in msg: _log.debug("ACK: syn_ack not in msg.") return if u"syn" not in msg[u"syn_ack"]: _log.debug("ACK: syn not in msg.") return # Is this a reply to our SYN-ACK? contract_id = self.contract_id(msg[u"syn_ack"][u"syn"]) if contract_id not in self.contracts: _log.debug("ACK: contract not found.") return # Did I sign this? if not self.is_valid_contract_sig(msg[u"syn_ack"]): _log.debug("--------------") _log.debug(msg) _log.debug("--------------") _log.debug(self.contracts) _log.debug("--------------") _log.debug("ACK: sig is invalid.") return # Is the syn valid? if not self.is_valid_syn(msg[u"syn_ack"][u"syn"]): _log.debug("ACK: syn is invalid.") return # Update handshake. contract = self.contracts[contract_id] self.handshake[contract_id] = { "state": u"ACK", "timestamp": time.time() } # Try make TCP con. self.net.unl.connect(contract["src_unl"], { "success": success_wrapper(self, contract_id, contract["host_unl"]) }, force_master=0, nonce=contract_id) _log.debug("ACK") def save_contract(self, contract): # Record contract details. contract_id = self.contract_id(contract) self.contracts[contract_id] = contract return contract_id def send_msg(self, dict_obj, unl): node_id = self.net.unl.deconstruct(unl)["node_id"] msg = json.dumps(dict_obj, ensure_ascii=True) self.net.dht_node.direct_message(node_id, msg) def contract_id(self, contract): if sys.version_info >= (3, 0, 0): contract = str(contract).encode("ascii") else: contract = str(contract) return hashlib.sha256(contract).hexdigest() def sign_contract(self, contract): if sys.version_info >= (3, 0, 0): msg = str(contract).encode("ascii") else: msg = str(contract) msg = binascii.hexlify(msg).decode("utf-8") sig = self.wallet.sign_data(self.wif, msg) if sys.version_info >= (3, 0, 0): contract[u"signature"] = sig.decode("utf-8") else: contract[u"signature"] = unicode(sig) return contract def is_valid_contract_sig(self, contract): sig = contract[u"signature"][:] del contract[u"signature"] if sys.version_info >= (3, 0, 0): msg = str(contract).encode("ascii") else: msg = str(contract) msg = binascii.hexlify(msg).decode("utf-8") address = self.wallet.get_address(self.wif) ret = self.wallet.verify_signature(address, sig, msg) contract[u"signature"] = sig[:] return ret def simple_data_request(self, data_id, node_unl, direction): file_size = 0 if direction == u"send": action = u"upload" else: action = u"download" return self.data_request(action, data_id, file_size, node_unl) def data_request(self, action, data_id, file_size, node_unl): """ Action = put (upload), get (download.) """ _log.debug("In data request function") # Who is hosting this data? if action == "upload": # We store this data. direction = u"send" host_unl = self.net.unl.value assert (storage.manager.find(self.store_config, data_id) is not None) else: # They store the data. direction = u"receive" host_unl = node_unl if data_id in self.downloading: raise Exception("Already trying to download this.") # Encoding. if sys.version_info >= (3, 0, 0): if type(data_id) == bytes: data_id = data_id.decode("utf-8") if type(host_unl) == bytes: host_unl = host_unl.decode("utf-8") if type(node_unl) == bytes: node_unl = node_unl.decode("utf-8") else: if type(data_id) == str: data_id = unicode(data_id) if type(host_unl) == str: host_unl = unicode(host_unl) if type(node_unl) == str: node_unl = unicode(node_unl) # Create contract. contract = OrderedDict({ u"status": u"SYN", u"direction": direction, u"data_id": data_id, u"file_size": file_size, u"host_unl": host_unl, u"dest_unl": node_unl, u"src_unl": self.net.unl.value }) # Sign contract. contract = self.sign_contract(contract) # Route contract. contract_id = self.save_contract(contract) self.send_msg(contract, node_unl) _log.debug("Sending data request") # Update handshake. self.handshake[contract_id] = { "state": "SYN", "timestamp": time.time() } # For async code. d = defer.Deferred() self.defers[contract_id] = d # Return defer for async code. return d def remove_file_from_storage(self, data_id): storage.manager.remove(self.store_config, data_id) def move_file_to_storage(self, path): with open(path, "rb") as shard: storage.manager.add(self.store_config, shard) return { "file_size": storage.shard.get_size(shard), "data_id": storage.shard.get_id(shard) } def get_data_chunk(self, data_id, position, chunk_size=1048576): path = storage.manager.find(self.store_config, data_id) buf = b"" with open(path, "rb") as fp: fp.seek(position, 0) buf = fp.read(chunk_size) return buf def save_data_chunk(self, data_id, chunk): _log.debug("Saving data chunk for " + str(data_id)) _log.debug("of size + " + str(len(chunk))) assert (data_id in self.downloading) # Find temp file path. path = self.downloading[data_id] _log.debug(path) with open(path, "ab") as fp: fp.write(chunk)
from btctxstore import BtcTxStore __author__ = 'karatel' test_btctx_api = BtcTxStore(testnet=True, dryrun=True) test_owner_wif = test_btctx_api.create_key() test_owner_address = test_btctx_api.get_address(test_owner_wif) test_other_wfi = test_btctx_api.create_key() test_other_address = test_btctx_api.get_address(test_other_wfi)
class FileTransfer: def __init__(self, net, wif=None, store_config=None, handlers=None): # Accept direct connections. self.net = net # Returned by callbacks. self.success_value = ("127.0.0.1", 7777) # Used for signing messages. self.wallet = BtcTxStore(testnet=True, dryrun=True) self.wif = wif or self.wallet.create_key() # Where will the data be stored? self.store_config = store_config assert(len(list(store_config))) # Handlers for certain events. self.handlers = handlers # Start networking. if not self.net.is_net_started: self.net.start() # Dict of data requests. self.contracts = {} # Dict of defers for contracts. self.defers = {} # Three-way handshake status for contracts. self.handshake = {} # All contracts associated with this connection. self.con_info = {} # File transfer currently active on connection. self.con_transfer = {} # List of active downloads. # (Never try to download multiple copies of the same thing at once.) self.downloading = {} def get_their_unl(self, contract): if self.net.unl == pyp2p.unl.UNL(value=contract["dest_unl"]): their_unl = contract["src_unl"] else: their_unl = contract["dest_unl"] return their_unl def is_queued(self, con=None): if con is not None: if con not in self.con_info: return 0 if con is None: con_list = list(self.con_info) else: con_list = [con] for con in con_list: for contract_id in list(self.con_info[con]): con_info = self.con_info[con][contract_id] if con_info["remaining"]: return 1 return 0 def cleanup_transfers(self, con): # Close con - there's nothing left to download. if not self.is_queued(con): # Cleanup con transfers. if con in self.con_transfer: del self.con_transfer[con] # Cleanup con_info. if con in self.con_info: del self.con_info[con] # Todo: cleanup contract + handshake state. def queue_next_transfer(self, con): _log.debug("Queing next transfer") for contract_id in list(self.con_info[con]): con_info = self.con_info[con][contract_id] if con_info["remaining"]: self.con_transfer[con] = contract_id con.send(contract_id, send_all=1) return # Mark end of transfers. self.con_transfer[con] = u"0" * 64 def is_valid_syn(self, msg): # List of expected fields. syn_schema = ( u"status", u"direction", u"data_id", u"file_size", u"host_unl", u"dest_unl", u"src_unl", u"signature" ) # Check all fields exist. if not all(key in msg for key in syn_schema): _log.debug("Missing required key.") return 0 # Check SYN size. if len(msg) > 5242880: # 5 MB. _log.debug("SYN is too big") return 0 # Check direction is valid. direction_tuple = (u"send", u"receive") if msg[u"direction"] not in direction_tuple: _log.debug("Missing required direction tuple.") return 0 # Check the UNLs are valid. unl_tuple = (u"host_unl", u"dest_unl", u"src_unl") for unl_key in unl_tuple: if not pyp2p.unl.is_valid_unl(msg[unl_key]): _log.debug("Invalid UNL for " + unl_key) _log.debug(msg[unl_key]) return 0 # Check file size. file_size_type = type(msg[u"file_size"]) if sys.version_info >= (3, 0, 0): expr = file_size_type != int else: expr = file_size_type != int and file_size_type != long if expr: _log.debug("File size validation failed") _log.debug(type(msg[u"file_size"])) return 0 # Are we the host? if self.net.unl == pyp2p.unl.UNL(value=msg[u"host_unl"]): # Then check we have this file. path = storage.manager.find(self.store_config, msg[u"data_id"]) if path is None: _log.debug("Failed to find file we're uploading") return 0 else: # Do we already have this file? path = storage.manager.find(self.store_config, msg[u"data_id"]) if path is not None: _log.debug("Attempting to download file we already have") return 0 # Are we already trying to download this? if msg[u"data_id"] in self.downloading: _log.debug("We're already trying to download this") return 0 return 1 def protocol(self, msg): msg = json.loads(msg, object_pairs_hook=OrderedDict) # Associate TCP con with contract. def success_wrapper(self, contract_id, host_unl): def success(con): with mutex: _log.debug("IN SUCCESS CALLBACK") _log.debug("Success() contract_id = " + str(contract_id)) # Associate TCP con with contract. contract = self.contracts[contract_id] file_size = contract["file_size"] # Store con association. if con not in self.con_info: self.con_info[con] = {} # Associate contract with con. if contract_id not in self.con_info[con]: self.con_info[con][contract_id] = { "contract_id": contract_id, "remaining": 350, # Tree fiddy. "file_size": file_size, "file_size_buf": b"" } # Record download state. data_id = contract["data_id"] if self.net.unl != pyp2p.unl.UNL(value=host_unl): _log.debug("Success: download") fp, self.downloading[data_id] = tempfile.mkstemp() else: # Set initial upload for this con. _log.debug("Success: upload") # Queue first transfer. their_unl = self.get_their_unl(contract) is_master = self.net.unl.is_master(their_unl) _log.debug("Is master = " + str(is_master)) if con not in self.con_transfer: if is_master: # A transfer to queue processing. self.queue_next_transfer(con) else: # A transfer to receive (unknown.) self.con_transfer[con] = u"" else: if self.con_transfer[con] == u"0" * 64: if is_master: self.queue_next_transfer(con) else: self.con_transfer[con] = u"" return success # Sanity checking. if u"status" not in msg: return # Accept data request. if msg[u"status"] == u"SYN": # Check syn is valid. if not self.is_valid_syn(msg): _log.debug("SYN: invalid syn.") return # Save contract. contract_id = self.contract_id(msg) self.save_contract(msg) self.handshake[contract_id] = { "state": u"SYN-ACK", "timestamp": time.time() } # Create reply. reply = OrderedDict({ u"status": u"SYN-ACK", u"syn": msg, }) # Sign reply. reply = self.sign_contract(reply) # Save reply. self.send_msg(reply, msg[u"src_unl"]) _log.debug("SYN") # Confirm accept and make connection if needed. if msg[u"status"] == u"SYN-ACK": # Valid syn-ack? if u"syn" not in msg: _log.debug("SYN-ACK: syn not in msg.") return # Is this a reply to our SYN? contract_id = self.contract_id(msg[u"syn"]) if contract_id not in self.contracts: _log.debug("--------------") _log.debug(msg) _log.debug("--------------") _log.debug(self.contracts) _log.debug("--------------") _log.debug("SYN-ACK: contract not found.") return # Check syn is valid. if not self.is_valid_syn(msg[u"syn"]): _log.debug("SYN-ACK: invalid syn.") return # Did I sign this? if not self.is_valid_contract_sig(msg[u"syn"]): _log.debug("SYN-ACK: sig is invalid.") return # Update handshake. contract = self.contracts[contract_id] self.handshake[contract_id] = { "state": u"ACK", "timestamp": time.time() } # Create reply contract. reply = OrderedDict({ u"status": u"ACK", u"syn_ack": msg }) # Sign reply. reply = self.sign_contract(reply) # Try make TCP con. self.net.unl.connect( contract["dest_unl"], { "success": success_wrapper( self, contract_id, contract["host_unl"] ) }, force_master=0, nonce=contract_id ) # Send reply. self.send_msg(reply, msg[u"syn"][u"dest_unl"]) _log.debug("SYN-ACK") if msg[u"status"] == u"ACK": # Valid ack. if u"syn_ack" not in msg: _log.debug("ACK: syn_ack not in msg.") return if u"syn" not in msg[u"syn_ack"]: _log.debug("ACK: syn not in msg.") return # Is this a reply to our SYN-ACK? contract_id = self.contract_id(msg[u"syn_ack"][u"syn"]) if contract_id not in self.contracts: _log.debug("ACK: contract not found.") return # Did I sign this? if not self.is_valid_contract_sig(msg[u"syn_ack"]): _log.debug("--------------") _log.debug(msg) _log.debug("--------------") _log.debug(self.contracts) _log.debug("--------------") _log.debug("ACK: sig is invalid.") return # Is the syn valid? if not self.is_valid_syn(msg[u"syn_ack"][u"syn"]): _log.debug("ACK: syn is invalid.") return # Update handshake. contract = self.contracts[contract_id] self.handshake[contract_id] = { "state": u"ACK", "timestamp": time.time() } # Try make TCP con. self.net.unl.connect( contract["src_unl"], { "success": success_wrapper( self, contract_id, contract["host_unl"] ) }, force_master=0, nonce=contract_id ) _log.debug("ACK") def save_contract(self, contract): # Record contract details. contract_id = self.contract_id(contract) self.contracts[contract_id] = contract return contract_id def send_msg(self, dict_obj, unl): node_id = self.net.unl.deconstruct(unl)["node_id"] msg = json.dumps(dict_obj, ensure_ascii=True) self.net.dht_node.direct_message( node_id, msg ) def contract_id(self, contract): if sys.version_info >= (3, 0, 0): contract = str(contract).encode("ascii") else: contract = str(contract) return hashlib.sha256(contract).hexdigest() def sign_contract(self, contract): if sys.version_info >= (3, 0, 0): msg = str(contract).encode("ascii") else: msg = str(contract) msg = binascii.hexlify(msg).decode("utf-8") sig = self.wallet.sign_data(self.wif, msg) if sys.version_info >= (3, 0, 0): contract[u"signature"] = sig.decode("utf-8") else: contract[u"signature"] = unicode(sig) return contract def is_valid_contract_sig(self, contract): sig = contract[u"signature"][:] del contract[u"signature"] if sys.version_info >= (3, 0, 0): msg = str(contract).encode("ascii") else: msg = str(contract) msg = binascii.hexlify(msg).decode("utf-8") address = self.wallet.get_address(self.wif) ret = self.wallet.verify_signature(address, sig, msg) contract[u"signature"] = sig[:] return ret def simple_data_request(self, data_id, node_unl, direction): file_size = 0 if direction == u"send": action = u"upload" else: action = u"download" return self.data_request(action, data_id, file_size, node_unl) def data_request(self, action, data_id, file_size, node_unl): """ Action = put (upload), get (download.) """ _log.debug("In data request function") # Who is hosting this data? if action == "upload": # We store this data. direction = u"send" host_unl = self.net.unl.value assert(storage.manager.find(self.store_config, data_id) is not None) else: # They store the data. direction = u"receive" host_unl = node_unl if data_id in self.downloading: raise Exception("Already trying to download this.") # Encoding. if sys.version_info >= (3, 0, 0): if type(data_id) == bytes: data_id = data_id.decode("utf-8") if type(host_unl) == bytes: host_unl = host_unl.decode("utf-8") if type(node_unl) == bytes: node_unl = node_unl.decode("utf-8") else: if type(data_id) == str: data_id = unicode(data_id) if type(host_unl) == str: host_unl = unicode(host_unl) if type(node_unl) == str: node_unl = unicode(node_unl) # Create contract. contract = OrderedDict({ u"status": u"SYN", u"direction": direction, u"data_id": data_id, u"file_size": file_size, u"host_unl": host_unl, u"dest_unl": node_unl, u"src_unl": self.net.unl.value }) # Sign contract. contract = self.sign_contract(contract) # Route contract. contract_id = self.save_contract(contract) self.send_msg(contract, node_unl) _log.debug("Sending data request") # Update handshake. self.handshake[contract_id] = { "state": "SYN", "timestamp": time.time() } # For async code. d = defer.Deferred() self.defers[contract_id] = d # Return defer for async code. return d def remove_file_from_storage(self, data_id): storage.manager.remove(self.store_config, data_id) def move_file_to_storage(self, path): with open(path, "rb") as shard: storage.manager.add(self.store_config, shard) return { "file_size": storage.shard.get_size(shard), "data_id": storage.shard.get_id(shard) } def get_data_chunk(self, data_id, position, chunk_size=1048576): path = storage.manager.find(self.store_config, data_id) buf = b"" with open(path, "rb") as fp: fp.seek(position, 0) buf = fp.read(chunk_size) return buf def save_data_chunk(self, data_id, chunk): _log.debug("Saving data chunk for " + str(data_id)) _log.debug("of size + " + str(len(chunk))) assert(data_id in self.downloading) # Find temp file path. path = self.downloading[data_id] _log.debug(path) with open(path, "ab") as fp: fp.write(chunk)
def test_queued(): from crochet import setup setup() # Alice sample node. alice_wallet = BtcTxStore(testnet=False, dryrun=True) alice_wif = alice_wallet.create_key() alice_node_id = address_to_node_id(alice_wallet.get_address(alice_wif)) alice_dht = pyp2p.dht_msg.DHT( node_id=alice_node_id, networking=0 ) alice = FileTransfer( pyp2p.net.Net( net_type="direct", node_type="passive", nat_type="preserving", passive_port=63400, dht_node=alice_dht, wan_ip="8.8.8.8", debug=1 ), BandwidthLimit(), wif=alice_wif, store_config={tempfile.mkdtemp(): None}, ) # Bob sample node. bob_wallet = BtcTxStore(testnet=False, dryrun=True) bob_wif = bob_wallet.create_key() bob_node_id = address_to_node_id(bob_wallet.get_address(bob_wif)) bob_dht = pyp2p.dht_msg.DHT( node_id=bob_node_id, networking=0 ) bob = FileTransfer( pyp2p.net.Net( net_type="direct", node_type="passive", nat_type="preserving", passive_port=63401, dht_node=bob_dht, wan_ip="8.8.8.8", debug=1 ), BandwidthLimit(), wif=bob_wif, store_config={tempfile.mkdtemp(): None} ) # Simulate Alice + Bob "connecting" alice_dht.add_relay_link(bob_dht) bob_dht.add_relay_link(alice_dht) # Accept all transfers. def accept_handler(contract_id, src_unl, data_id, file_size): return 1 # Add accept handler. alice.handlers["accept"].add(accept_handler) bob.handlers["accept"].add(accept_handler) # Create file we're suppose to be uploading. data_id = ("5feceb66ffc86f38d952786c6d696c" "79c2dbc239dd4e91b46729d73a27fb57e9") path = os.path.join(list(alice.store_config)[0], data_id) if not os.path.exists(path): with open(path, "w") as fp: fp.write("0") # Alice wants to upload data to Bob. upload_contract_id = alice.data_request( "download", data_id, 0, bob.net.unl.value ) # Delete source file. def callback_builder(path, alice, bob, data_id): def callback(client, contract_id, con): print("Upload succeeded") print("Removing content and downloading back") os.remove(path) # Fix transfers. bob.handlers["complete"] = [] # Synchronize cons and check con.unl. time.sleep(1) clients = {"alice": alice, "bob": bob} for client in list({"alice": alice, "bob": bob}): print() print(client) clients[client].net.synchronize() nodes_out = clients[client].net.outbound nodes_in = clients[client].net.inbound for node in nodes_out + nodes_in: print(node["con"].unl) print(clients[client].cons) # Queued transfer: download_contract_id = alice.data_request( "upload", data_id, 0, bob.net.unl.value ) print("Download contract ID =") print(download_contract_id) # Indicate Bob's download succeeded. def alice_callback(val): print("Download succeeded") global queue_succeeded queue_succeeded = 1 def alice_errback(val): print("Download failed! Error:") print(val) # Hook upload from bob. d = alice.defers[download_contract_id] d.addCallback(alice_callback) d.addErrback(alice_errback) return callback # Register callback for bob (when he's downloaded the data.) bob.handlers["complete"] = [ callback_builder(path, alice, bob, data_id) ] # d = alice.defers[upload_contract_id] # d.addCallback(callback_builder(path, alice, bob, data_id)) # Main event loop. timeout = time.time() + 40 while not queue_succeeded and time.time() < timeout: for client in [alice, bob]: if client == alice: _log.debug("Alice") else: _log.debug("Bob") process_transfers(client) time.sleep(1) if not queue_succeeded: print("\a") for client in [alice, bob]: client.net.stop() assert(queue_succeeded == 1)
def test_bandwidth_test(self): # Alice sample node. alice_wallet = BtcTxStore(testnet=False, dryrun=True) alice_wif = alice_wallet.create_key() alice_node_id = address_to_node_id(alice_wallet.get_address(alice_wif)) alice_dht = pyp2p.dht_msg.DHT( node_id=alice_node_id, networking=0 ) alice_transfer = FileTransfer( pyp2p.net.Net( net_type="direct", node_type="passive", nat_type="preserving", passive_port=63600, debug=1, wan_ip="8.8.8.8", dht_node=alice_dht, ), wif=alice_wif, store_config={tempfile.mkdtemp(): None} ) _log.debug("Alice UNL") _log.debug(alice_transfer.net.unl.value) # Bob sample node. bob_wallet = BtcTxStore(testnet=False, dryrun=True) bob_wif = bob_wallet.create_key() bob_node_id = address_to_node_id(bob_wallet.get_address(bob_wif)) bob_dht = pyp2p.dht_msg.DHT( node_id=bob_node_id, networking=0 ) bob_transfer = FileTransfer( pyp2p.net.Net( net_type="direct", node_type="passive", nat_type="preserving", passive_port=63601, debug=1, wan_ip="8.8.8.8", dht_node=bob_dht ), wif=bob_wif, store_config={tempfile.mkdtemp(): None} ) # Link DHT nodes. alice_dht.add_relay_link(bob_dht) bob_dht.add_relay_link(alice_dht) _log.debug("Bob UNL") _log.debug(bob_transfer.net.unl.value) # Show bandwidth. def show_bandwidth(results): global test_success test_success = 1 _log.debug(results) # Test bandwidth between Alice and Bob. bob_test = BandwidthTest(bob_wif, bob_transfer, bob_dht, 0) alice_test = BandwidthTest(alice_wif, alice_transfer, alice_dht, 0) d = alice_test.start(bob_transfer.net.unl.value) d.addCallback(show_bandwidth) # Main event loop. # and not test_success end_time = time.time() + 60 while alice_test.active_test is not None and time.time() < end_time: for client in [alice_transfer, bob_transfer]: process_transfers(client) time.sleep(0.002) # End net. for client in [alice_transfer, bob_transfer]: client.net.stop() self.assertTrue(test_success == 1)