def test_handleFoundValues(self): self._connecting_to_connected() self.wire_protocol[self.addr1] = self.con self.protocol.router.addContact(self.node1) self.protocol.router.addContact(self.node2) self.protocol.router.addContact(self.node3) node = Node(digest("s")) nearest = self.protocol.router.findNeighbors(node) spider = ValueSpiderCrawl(self.protocol, node, nearest, 20, 3) val = Value() val.valueKey = digest("contractID") val.serializedData = self.node1.getProto().SerializeToString() val.ttl = 10 val1 = val.SerializeToString() value = spider._handleFoundValues([val1]) self.assertEqual(value[0], val.SerializeToString()) # test handle multiple values val.serializedData = self.node2.getProto().SerializeToString() val2 = val.SerializeToString() val.valueKey = digest("contractID2") val3 = val.SerializeToString() found_values = [val1, val2, val2, val3] self.assertEqual(spider._handleFoundValues(found_values), [val3, val2]) # test store value at nearest without value spider.nearestWithoutValue.push(self.node1) spider._handleFoundValues(found_values) self.clock.advance(constants.PACKET_TIMEOUT) connection.REACTOR.runUntilCurrent() self.assertTrue(len(self.proto_mock.send_datagram.call_args_list) > 1) self.proto_mock.send_datagram.call_args_list = []
def test_rpc_store(self): self._connecting_to_connected() m = message.Message() m.messageID = digest("msgid") m.sender.MergeFrom(self.protocol.sourceNode.getProto()) m.command = message.Command.Value("STORE") m.arguments.extend([ digest("Keyword"), "Key", self.protocol.sourceNode.getProto().SerializeToString() ]) data = m.SerializeToString() del m.arguments[-3:] m.arguments.append("True") expected_message = m.SerializeToString() self.handler.receive_message(data) self.clock.advance(100 * constants.PACKET_TIMEOUT) connection.REACTOR.runUntilCurrent() m_calls = self.proto_mock.send_datagram.call_args_list sent_packet = packet.Packet.from_bytes( self.proto_mock.send_datagram.call_args_list[0][0][0]) received_message = sent_packet.payload self.assertEqual(received_message, expected_message) self.assertEqual(len(m_calls), 2) self.assertTrue( self.storage.getSpecific(digest("Keyword"), "Key") == self.protocol.sourceNode.getProto().SerializeToString())
def test_create_proto(self): rid = hashlib.sha1(str(random.getrandbits(255))).digest() pubkey = digest("pubkey") addr = objects.Node.IPAddress() addr.ip = "127.0.0.1" addr.port = 1234 relay_addr = objects.Node.IPAddress() relay_addr.ip = "127.0.0.1" relay_addr.port = 1234 n1 = objects.Node() n1.guid = rid n1.signedPublicKey = pubkey n1.vendor = False n1.nodeAddress.MergeFrom(addr) n1.natType = objects.FULL_CONE n2 = Node(rid, "127.0.0.1", 1234, digest("pubkey"), None, objects.FULL_CONE, False) self.assertEqual(n1, n2.getProto()) n1.vendor = True n1.relayAddress.MergeFrom(relay_addr) n2 = Node(rid, "127.0.0.1", 1234, digest("pubkey"), ("127.0.0.1", 1234), objects.FULL_CONE, True) self.assertEqual(n1, n2.getProto())
def upload_image(self, request): try: ret = [] if "image" in request.args: for image in request.args["image"]: img = image.decode('base64') hash_value = digest(img).encode("hex") with open(DATA_FOLDER + "store/media/" + hash_value, 'wb') as outfile: outfile.write(img) self.db.HashMap().insert(unhexlify(hash_value), DATA_FOLDER + "store/media/" + hash_value) ret.append(hash_value) elif "avatar" in request.args: avi = request.args["avatar"][0].decode("base64") hash_value = digest(avi).encode("hex") with open(DATA_FOLDER + "store/avatar", 'wb') as outfile: outfile.write(avi) self.db.HashMap().insert(unhexlify(hash_value), DATA_FOLDER + "store/avatar") ret.append(hash_value) elif "header" in request.args: hdr = request.args["header"][0].decode("base64") hash_value = digest(hdr).encode("hex") with open(DATA_FOLDER + "store/header", 'wb') as outfile: outfile.write(hdr) self.db.HashMap().insert(unhexlify(hash_value), DATA_FOLDER + "store/header") ret.append(hash_value) request.write(json.dumps({"success": True, "image_hashes": ret}, indent=4)) request.finish() return server.NOT_DONE_YET except Exception, e: request.write(json.dumps({"success": False, "reason": e.message}, indent=4)) request.finish() return server.NOT_DONE_YET
def test_transferKeyValues(self): self._connecting_to_connected() self.wire_protocol[self.addr1] = self.con self.protocol.addToRouter(mknode()) self.protocol.storage[digest("keyword")] = ( digest("key"), self.protocol.sourceNode.getProto().SerializeToString()) self.protocol.transferKeyValues(Node(digest("id"), self.addr1[0], self.addr1[1])) self.clock.advance(1) connection.REACTOR.runUntilCurrent() sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0]) sent_message = sent_packet.payload x = message.Message() x.ParseFromString(sent_message) m = message.Message() m.sender.MergeFrom(self.protocol.sourceNode.getProto()) m.command = message.Command.Value("STORE") m.arguments.append(digest("keyword")) m.arguments.append(digest("key")) m.arguments.append(self.protocol.sourceNode.getProto().SerializeToString()) self.assertEqual(x.sender, m.sender) self.assertEqual(x.command, m.command) self.assertEqual(x.arguments[0], m.arguments[0]) self.assertEqual(x.arguments[1], m.arguments[1]) self.assertEqual(x.arguments[2], m.arguments[2])
def test_addSameIP(self): self.router.addContact(self.node) self.router.addContact(Node(digest("asdf"), "127.0.0.1", 1234)) self.assertTrue(len(self.router.buckets), 1) self.assertTrue(len(self.router.buckets[0].nodes), 1) self.assertTrue( self.router.buckets[0].getNodes()[0].id == digest("asdf"))
def test_transferKeyValues(self): self._connecting_to_connected() self.wire_protocol[self.addr1] = self.con self.protocol.addToRouter(mknode()) self.protocol.storage[digest("keyword")] = ( digest("key"), self.protocol.sourceNode.getProto().SerializeToString()) self.protocol.transferKeyValues( Node(digest("id"), self.addr1[0], self.addr1[1])) self.clock.advance(1) connection.REACTOR.runUntilCurrent() sent_packet = packet.Packet.from_bytes( self.proto_mock.send_datagram.call_args_list[0][0][0]) sent_message = sent_packet.payload x = message.Message() x.ParseFromString(sent_message) m = message.Message() m.sender.MergeFrom(self.protocol.sourceNode.getProto()) m.command = message.Command.Value("STORE") m.arguments.append(digest("keyword")) m.arguments.append(digest("key")) m.arguments.append( self.protocol.sourceNode.getProto().SerializeToString()) self.assertEqual(x.sender, m.sender) self.assertEqual(x.command, m.command) self.assertEqual(x.arguments[0], m.arguments[0]) self.assertEqual(x.arguments[1], m.arguments[1]) self.assertEqual(x.arguments[2], m.arguments[2])
def test_rpc_find_value(self): self._connecting_to_connected() self.protocol.router.addContact(self.protocol.sourceNode) # Set a value to find m = message.Message() m.messageID = digest("msgid") m.sender.MergeFrom(self.protocol.sourceNode.getProto()) m.command = message.Command.Value("STORE") m.protoVer = self.version m.arguments.extend([ digest("Keyword"), "Key", self.protocol.sourceNode.getProto().SerializeToString(), str(10) ]) m.signature = self.signing_key.sign(m.SerializeToString())[:64] data = m.SerializeToString() self.handler.on_connection_made() self.handler.receive_message(data) self.assertTrue( self.storage.getSpecific(digest("Keyword"), "Key") == self.protocol.sourceNode.getProto().SerializeToString()) # Send the find_value rpc m = message.Message() m.messageID = digest("msgid") m.sender.MergeFrom(self.protocol.sourceNode.getProto()) m.command = message.Command.Value("FIND_VALUE") m.protoVer = self.version m.testnet = False m.arguments.append(digest("Keyword")) m.signature = self.signing_key.sign(m.SerializeToString())[:64] data = m.SerializeToString() self.handler.receive_message(data) del m.arguments[-1] value = objects.Value() value.valueKey = "Key" value.serializedData = self.protocol.sourceNode.getProto( ).SerializeToString() value.ttl = 10 m.arguments.append("value") m.arguments.append(value.SerializeToString()) m.ClearField("signature") expected_message = m.SerializeToString() self.clock.advance(100 * constants.PACKET_TIMEOUT) connection.REACTOR.runUntilCurrent() m_calls = self.proto_mock.send_datagram.call_args_list sent_packets = tuple( packet.Packet.from_bytes(call[0][0]) for call in self.proto_mock.send_datagram.call_args_list) received_message = sent_packets[1].payload a = message.Message() a.ParseFromString(received_message) a.ClearField("signature") received_message = a.SerializeToString() self.assertEqual(received_message, expected_message) self.assertEqual(len(m_calls), 3)
def test_rpc_store(self): self._connecting_to_connected() self.protocol.router.addContact(self.protocol.sourceNode) m = message.Message() m.messageID = digest("msgid") m.sender.MergeFrom(self.protocol.sourceNode.getProto()) m.command = message.Command.Value("STORE") m.protoVer = self.version m.testnet = False m.arguments.extend([digest("Keyword"), "Key", self.protocol.sourceNode.getProto().SerializeToString(), str(10)]) m.signature = self.signing_key.sign(m.SerializeToString())[:64] data = m.SerializeToString() del m.arguments[-4:] m.arguments.append("True") m.ClearField("signature") expected_message = m.SerializeToString() self.handler.on_connection_made() self.handler.receive_message(data) self.clock.advance(100 * constants.PACKET_TIMEOUT) connection.REACTOR.runUntilCurrent() m_calls = self.proto_mock.send_datagram.call_args_list sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0]) received_message = sent_packet.payload m2 = message.Message() m2.ParseFromString(received_message) m2.ClearField("signature") received_message = m2.SerializeToString() self.assertEqual(received_message, expected_message) self.assertEqual(len(m_calls), 2) self.assertTrue( self.storage.getSpecific(digest("Keyword"), "Key") == self.protocol.sourceNode.getProto().SerializeToString())
def delete(self, delete_images=False): """ Deletes the contract json from the OpenBazaar directory as well as the listing metadata from the db and all the related images in the file system. """ # get the file path h = self.db.HashMap() file_path = h.get_file(digest(json.dumps(self.contract, indent=4))) # maybe delete the images from disk if "image_hashes" in self.contract["vendor_offer"]["listing"]["item"] and delete_images: for image_hash in self.contract["vendor_offer"]["listing"]["item"]["image_hashes"]: # delete from disk image_path = h.get_file(unhexlify(image_hash)) if os.path.exists(image_path): os.remove(image_path) # remove pointer to the image from the HashMap h.delete(unhexlify(image_hash)) # delete the contract from disk if os.path.exists(file_path): os.remove(file_path) # delete the listing metadata from the db contract_hash = digest(json.dumps(self.contract, indent=4)) self.db.ListingsStore().delete_listing(contract_hash) # remove the pointer to the contract from the HashMap h.delete(contract_hash)
def upload_image(self, request): try: ret = [] if "image" in request.args: for image in request.args["image"]: img = image.decode('base64') hash_value = digest(img).encode("hex") with open(DATA_FOLDER + "store/media/" + hash_value, 'wb') as outfile: outfile.write(img) self.db.HashMap().insert(hash_value, DATA_FOLDER + "store/media/" + hash_value) ret.append(hash_value) elif "avatar" in request.args: avi = request.args["avatar"][0].decode("base64") hash_value = digest(avi).encode("hex") with open(DATA_FOLDER + "store/avatar", 'wb') as outfile: outfile.write(avi) self.db.HashMap().insert(hash_value, DATA_FOLDER + "store/avatar") ret.append(hash_value) elif "header" in request.args: hdr = request.args["header"][0].decode("base64") hash_value = digest(hdr).encode("hex") with open(DATA_FOLDER + "store/header", 'wb') as outfile: outfile.write(hdr) self.db.HashMap().insert(hash_value, DATA_FOLDER + "store/header") ret.append(hash_value) request.write(json.dumps({"success": True, "image_hashes": ret}, indent=4)) request.finish() return server.NOT_DONE_YET except Exception, e: request.write(json.dumps({"success": False, "reason": e.message}, indent=4)) request.finish() return server.NOT_DONE_YET
def delete(self, delete_images=False): """ Deletes the contract json from the OpenBazaar directory as well as the listing metadata from the db and all the related images in the file system. """ # get the file path h = self.db.HashMap() file_path = h.get_file(digest(json.dumps(self.contract, indent=4))) # maybe delete the images from disk if "image_hashes" in self.contract["vendor_offer"]["listing"][ "item"] and delete_images: for image_hash in self.contract["vendor_offer"]["listing"]["item"][ "image_hashes"]: # delete from disk image_path = h.get_file(unhexlify(image_hash)) if os.path.exists(image_path): os.remove(image_path) # remove pointer to the image from the HashMap h.delete(unhexlify(image_hash)) # delete the contract from disk if os.path.exists(file_path): os.remove(file_path) # delete the listing metadata from the db contract_hash = digest(json.dumps(self.contract, indent=4)) self.db.ListingsStore().delete_listing(contract_hash) # remove the pointer to the contract from the HashMap h.delete(contract_hash)
def cache(filename): """ Saves the file to a cache folder if it doesn't already exist. """ if not os.path.isfile(DATA_FOLDER + "cache/" + digest(filename).encode("hex")): with open(DATA_FOLDER + "cache/" + digest(filename).encode("hex"), 'wb') as outfile: outfile.write(filename)
def test_handleFoundValues(self): self._connecting_to_connected() self.wire_protocol[self.addr1] = self.con self.protocol.router.addContact(self.node1) self.protocol.router.addContact(self.node2) self.protocol.router.addContact(self.node3) node = Node(digest("s")) nearest = self.protocol.router.findNeighbors(node) spider = ValueSpiderCrawl(self.protocol, node, nearest, dht.constants.KSIZE, dht.constants.ALPHA) val = Value() val.valueKey = digest("contractID") val.serializedData = self.node1.getProto().SerializeToString() val1 = val.SerializeToString() value = spider._handleFoundValues([(val1, )]) self.assertEqual(value[0], val.SerializeToString()) # test handle multiple values val.serializedData = self.node2.getProto().SerializeToString() val2 = val.SerializeToString() found_values = [(val1, ), (val1, ), (val2, )] self.assertEqual(spider._handleFoundValues(found_values), (val1, )) # test store value at nearest without value spider.nearestWithoutValue.push(self.node1) spider._handleFoundValues(found_values) self.clock.advance(100 * constants.PACKET_TIMEOUT) connection.REACTOR.runUntilCurrent() self.assertTrue(len(self.proto_mock.send_datagram.call_args_list) > 1) self.proto_mock.send_datagram.call_args_list = []
def test_callFindNode(self): self._connecting_to_connected() n = Node(digest("S"), self.addr1[0], self.addr1[1]) self.wire_protocol[self.addr1] = self.con keyword = Node(digest("nodetofind")) self.protocol.callFindNode(n, keyword) self.clock.advance(100 * constants.PACKET_TIMEOUT) connection.REACTOR.runUntilCurrent() sent_packet = packet.Packet.from_bytes( self.proto_mock.send_datagram.call_args_list[0][0][0]) sent_message = sent_packet.payload m = message.Message() m.ParseFromString(sent_message) self.assertTrue(len(m.messageID) == 20) self.assertEqual(self.protocol.sourceNode.getProto().guid, m.sender.guid) self.assertEqual(self.protocol.sourceNode.getProto().signedPublicKey, m.sender.signedPublicKey) self.assertTrue(m.command == message.FIND_NODE) self.assertEqual(self.proto_mock.send_datagram.call_args_list[0][0][1], self.addr1) self.assertEqual(m.arguments[0], keyword.id)
def test_rpc_store(self): self._connecting_to_connected() self.protocol.router.addContact(self.protocol.sourceNode) m = message.Message() m.messageID = digest("msgid") m.sender.MergeFrom(self.protocol.sourceNode.getProto()) m.command = message.Command.Value("STORE") m.protoVer = self.version m.testnet = False m.arguments.extend([digest("Keyword"), "Key", self.protocol.sourceNode.getProto().SerializeToString(), str(10)]) data = m.SerializeToString() del m.arguments[-4:] m.arguments.append("True") expected_message = m.SerializeToString() self.handler.receive_message(data) self.clock.advance(100 * constants.PACKET_TIMEOUT) connection.REACTOR.runUntilCurrent() m_calls = self.proto_mock.send_datagram.call_args_list sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0]) received_message = sent_packet.payload self.assertEqual(received_message, expected_message) self.assertEqual(len(m_calls), 2) self.assertTrue( self.storage.getSpecific(digest("Keyword"), "Key") == self.protocol.sourceNode.getProto().SerializeToString())
def jsonrpc_delete(self, keyword, key): def handle_result(result): print "JSONRPC result:", result signature = self.keys.signing_key.sign(digest(key)) d = self.kserver.delete(str(keyword), digest(key), signature[:64]) d.addCallback(handle_result) return "Sending delete request..."
def test_digest(self): intermed = hashlib.sha256("1").digest() d = hashlib.new('ripemd160', intermed).digest() self.assertEqual(d, digest(1)) intermed = hashlib.sha256("another").digest() d = hashlib.new('ripemd160', intermed).digest() self.assertEqual(d, digest('another'))
def get_result(result): try: if result[0] and digest(result[1][0]) == image_hash: self.cache(result[1][0], digest(result[1][0]).encode("hex")) return result[1][0] else: return None except Exception: return None
def test_rpc_delete(self): self._connecting_to_connected() # Set a keyword to store m = message.Message() m.messageID = digest("msgid") m.sender.MergeFrom(self.protocol.sourceNode.getProto()) m.command = message.Command.Value("STORE") m.arguments.extend(["Keyword", "Key", self.protocol.sourceNode.getProto().SerializeToString()]) data = m.SerializeToString() for i in range(0, 3): del m.arguments[-1] m.arguments.append("True") expected_message1 = m.SerializeToString() self.handler.receive_message(data) self.assertTrue(self.storage.getSpecific("Keyword", "Key") == self.protocol.sourceNode.getProto().SerializeToString()) # Test bad signature m = message.Message() m.messageID = digest("msgid") m.sender.MergeFrom(self.protocol.sourceNode.getProto()) m.command = message.Command.Value("DELETE") m.arguments.extend(["Keyword", "Key", "Bad Signature"]) data = m.SerializeToString() for i in range(0, 3): del m.arguments[-1] m.arguments.append("False") expected_message2 = m.SerializeToString() self.handler.receive_message(data) self.assertTrue(self.storage.getSpecific("Keyword", "Key") == self.protocol.sourceNode.getProto().SerializeToString()) self.clock.advance(100 * constants.PACKET_TIMEOUT) connection.REACTOR.runUntilCurrent() sent_packets = tuple( packet.Packet.from_bytes(call[0][0]) for call in self.proto_mock.send_datagram.call_args_list ) self.assertEqual(sent_packets[0].payload, expected_message1) self.assertEqual(sent_packets[1].payload, expected_message2) self.proto_mock.send_datagram.call_args_list = [] # Test good signature m = message.Message() m.messageID = digest("msgid") m.sender.MergeFrom(self.protocol.sourceNode.getProto()) m.command = message.Command.Value("DELETE") m.arguments.extend(["Keyword", "Key", self.signing_key.sign("Key")[:64]]) data = m.SerializeToString() for i in range(0, 3): del m.arguments[-1] m.arguments.append("True") expected_message3 = m.SerializeToString() self.handler.receive_message(data) self.clock.advance(100 * constants.PACKET_TIMEOUT) sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0]) self.assertEqual(sent_packet.payload, expected_message3) self.assertTrue(self.storage.getSpecific("Keyword", "Key") is None)
def save(self): """ Saves the json contract into the OpenBazaar/store/listings/contracts/ directory. It uses the title as the file name so it's easy on human eyes. A mapping of the hash of the contract and file path is stored in the database so we can retrieve the contract with only its hash. Additionally, the contract metadata (sent in response to the GET_LISTINGS query) is saved in the db for fast access. """ # get the contract title to use as the file name and format it file_name = str( self.contract["vendor_offer"]["listing"]["item"]["title"][:100]) file_name = re.sub(r"[^\w\s]", '', file_name) file_name = re.sub(r"\s+", '_', file_name) file_name += digest(json.dumps(self.contract, indent=4)).encode("hex")[:8] # save the json contract to the file system file_path = DATA_FOLDER + "store/listings/contracts/" + file_name + ".json" with open(file_path, 'w') as outfile: outfile.write(json.dumps(self.contract, indent=4)) # Create a `ListingMetadata` protobuf object using data from the full contract listings = Listings() data = listings.ListingMetadata() data.contract_hash = digest(json.dumps(self.contract, indent=4)) vendor_item = self.contract["vendor_offer"]["listing"]["item"] data.title = vendor_item["title"] if "image_hashes" in vendor_item: data.thumbnail_hash = unhexlify(vendor_item["image_hashes"][0]) if "category" in vendor_item: data.category = vendor_item["category"] if "bitcoin" not in vendor_item["price_per_unit"]: data.price = float(vendor_item["price_per_unit"]["fiat"]["price"]) data.currency_code = vendor_item["price_per_unit"]["fiat"][ "currency_code"] else: data.price = float(vendor_item["price_per_unit"]["bitcoin"]) data.currency_code = "BTC" data.nsfw = vendor_item["nsfw"] if "shipping" not in self.contract["vendor_offer"]["listing"]: data.origin = CountryCode.Value("NA") else: data.origin = CountryCode.Value( self.contract["vendor_offer"]["listing"]["shipping"] ["shipping_origin"].upper()) for region in self.contract["vendor_offer"]["listing"]["shipping"][ "shipping_regions"]: data.ships_to.append(CountryCode.Value(region.upper())) # save the mapping of the contract file path and contract hash in the database self.db.HashMap().insert(data.contract_hash, file_path) # save the `ListingMetadata` protobuf to the database as well self.db.ListingsStore().add_listing(data)
def test_rpc_find_value(self): self._connecting_to_connected() self.protocol.router.addContact(self.protocol.sourceNode) # Set a value to find m = message.Message() m.messageID = digest("msgid") m.sender.MergeFrom(self.protocol.sourceNode.getProto()) m.command = message.Command.Value("STORE") m.protoVer = self.version m.arguments.extend([digest("Keyword"), "Key", self.protocol.sourceNode.getProto().SerializeToString(), str(10)]) m.signature = self.signing_key.sign(m.SerializeToString())[:64] data = m.SerializeToString() self.handler.on_connection_made() self.handler.receive_message(data) self.assertTrue( self.storage.getSpecific(digest("Keyword"), "Key") == self.protocol.sourceNode.getProto().SerializeToString()) # Send the find_value rpc m = message.Message() m.messageID = digest("msgid") m.sender.MergeFrom(self.protocol.sourceNode.getProto()) m.command = message.Command.Value("FIND_VALUE") m.protoVer = self.version m.testnet = False m.arguments.append(digest("Keyword")) m.signature = self.signing_key.sign(m.SerializeToString())[:64] data = m.SerializeToString() self.handler.receive_message(data) del m.arguments[-1] value = objects.Value() value.valueKey = "Key" value.serializedData = self.protocol.sourceNode.getProto().SerializeToString() value.ttl = 10 m.arguments.append("value") m.arguments.append(value.SerializeToString()) m.ClearField("signature") expected_message = m.SerializeToString() self.clock.advance(100 * constants.PACKET_TIMEOUT) connection.REACTOR.runUntilCurrent() m_calls = self.proto_mock.send_datagram.call_args_list sent_packets = tuple( packet.Packet.from_bytes(call[0][0]) for call in self.proto_mock.send_datagram.call_args_list ) received_message = sent_packets[1].payload a = message.Message() a.ParseFromString(received_message) a.ClearField("signature") received_message = a.SerializeToString() self.assertEqual(received_message, expected_message) self.assertEqual(len(m_calls), 3)
def update_profile(self, request): try: p = Profile(self.db) if not p.get().encryption_key \ and "name" not in request.args \ and "location" not in request.args: return "False" u = objects.Profile() if "name" in request.args: u.name = request.args["name"][0] if "location" in request.args: # This needs to be formatted. Either here or from the UI. u.location = CountryCode.Value(request.args["location"][0].upper()) if "handle" in request.args: u.handle = request.args["handle"][0] if "about" in request.args: u.about = request.args["about"][0] if "short_description" in request.args: u.short_description = request.args["short_description"][0] if "nsfw" in request.args: u.nsfw = True if "vendor" in request.args: u.vendor = True if "moderator" in request.args: u.moderator = True if "website" in request.args: u.website = request.args["website"][0] if "email" in request.args: u.email = request.args["email"][0] if "avatar" in request.args: with open(DATA_FOLDER + "store/avatar", 'wb') as outfile: outfile.write(request.args["avatar"][0]) avatar_hash = digest(request.args["avatar"][0]) self.db.HashMap().insert(avatar_hash, DATA_FOLDER + "store/avatar") u.avatar_hash = avatar_hash if "header" in request.args: with open(DATA_FOLDER + "store/header", 'wb') as outfile: outfile.write(request.args["header"][0]) header_hash = digest(request.args["header"][0]) self.db.HashMap().insert(header_hash, DATA_FOLDER + "store/header") u.header_hash = header_hash if "pgp_key" in request.args and "signature" in request.args: p.add_pgp_key(request.args["pgp_key"][0], request.args["signature"][0], self.keychain.guid.encode("hex")) enc = u.PublicKey() enc.public_key = self.keychain.encryption_pubkey enc.signature = self.keychain.signing_key.sign(enc.public_key)[:64] u.encryption_key.MergeFrom(enc) p.update(u) request.write(json.dumps({"success": True})) request.finish() return server.NOT_DONE_YET except Exception, e: request.write(json.dumps({"success": False, "reason": e.message}, indent=4)) request.finish() return server.NOT_DONE_YET
def test_nodesFound(self): self._connecting_to_connected() self.wire_protocol[self.addr1] = self.con self.wire_protocol[self.addr2] = self.con self.wire_protocol[self.addr3] = self.con self.protocol.router.addContact(self.node1) self.protocol.router.addContact(self.node2) self.protocol.router.addContact(self.node3) # test resonse with uncontacted nodes node = Node(digest("s")) nearest = self.protocol.router.findNeighbors(node) spider = ValueSpiderCrawl(self.protocol, node, nearest, dht.constants.KSIZE, dht.constants.ALPHA) response = (True, (self.node1.getProto().SerializeToString(), self.node2.getProto().SerializeToString(), self.node3.getProto().SerializeToString())) responses = {self.node1.id: response} spider._nodesFound(responses) self.clock.advance(100 * constants.PACKET_TIMEOUT) connection.REACTOR.runUntilCurrent() self.assertEqual(len(self.proto_mock.send_datagram.call_args_list), 4) # test all been contacted spider = ValueSpiderCrawl(self.protocol, node, nearest, dht.constants.KSIZE, dht.constants.ALPHA) for peer in spider.nearest.getUncontacted(): spider.nearest.markContacted(peer) response = (True, (self.node1.getProto().SerializeToString(), self.node2.getProto().SerializeToString(), self.node3.getProto().SerializeToString())) responses = {self.node2.id: response} resp = spider._nodesFound(responses) self.assertTrue(resp is None) # test didn't happen spider = ValueSpiderCrawl(self.protocol, node, nearest, dht.constants.KSIZE, dht.constants.ALPHA) response = (False, (self.node1.getProto().SerializeToString(), self.node2.getProto().SerializeToString(), self.node3.getProto().SerializeToString())) responses = {self.node1.id: response} spider._nodesFound(responses) self.assertTrue(len(spider.nearest) == 2) # test got value val = Value() val.valueKey = digest("contractID") val.serializedData = self.protocol.sourceNode.getProto( ).SerializeToString() response = (True, ("value", val.SerializeToString())) responses = {self.node3.id: response} spider.nearestWithoutValue = NodeHeap(node, 1) value = spider._nodesFound(responses) self.assertEqual(value[0], val.SerializeToString())
def cache(self, file): """ Saves the file to a cache folder if it doesn't already exist. """ if not os.path.exists(DATA_FOLDER + "cache"): os.makedirs(DATA_FOLDER + "cache") if not os.path.isfile(DATA_FOLDER + "cache/" + digest(file).encode("hex")): with open(DATA_FOLDER + "cache/" + digest(file).encode("hex"), 'w') as outfile: outfile.write(file)
def on_tx_received(self, address_version, address_hash, height, block_hash, tx): """ Fire when the libbitcoin server tells us we received a payment to this funding address. While unlikely, a user may send multiple transactions to the funding address reach the funding level. We need to keep a running balance and increment it when a new transaction is received. If the contract is fully funded, we push a notification to the websockets. """ # decode the transaction transaction = bitcoin.deserialize(tx.encode("hex")) # get the amount (in satoshi) the user is expected to pay amount_to_pay = int(float(self.contract["buyer_order"]["order"]["payment"]["amount"]) * 100000000) if tx not in self.received_txs: # make sure we aren't parsing the same tx twice. output_script = ( "a914" + digest(unhexlify(self.contract["buyer_order"]["order"]["payment"]["redeem_script"])).encode("hex") + "87" ) for output in transaction["outs"]: if output["script"] == output_script: self.amount_funded += output["value"] if tx not in self.received_txs: self.received_txs.append(tx) if self.amount_funded >= amount_to_pay: # if fully funded self.timeout.cancel() self.blockchain.unsubscribe_address( self.contract["buyer_order"]["order"]["payment"]["address"], self.on_tx_received ) order_id = digest(json.dumps(self.contract, indent=4)).encode("hex") if self.is_purchase: message_json = { "payment_received": { "address": self.contract["buyer_order"]["order"]["payment"]["address"], "order_id": order_id, } } # update the db self.db.Purchases().update_status(order_id, 1) self.log.info("Payment for order id %s successfully broadcast to network." % order_id) else: message_json = { "new_order": { "order_id": order_id, "title": self.contract["vendor_offer"]["listing"]["item"]["title"], } } self.db.Sales().update_status(order_id, 1) self.log.info("Received new order %s" % order_id) # push the message over websockets self.ws.push(json.dumps(message_json, indent=4))
def save(self): """ Saves the json contract into the OpenBazaar/store/listings/contracts/ directory. It uses the title as the file name so it's easy on human eyes. A mapping of the hash of the contract and file path is stored in the database so we can retrieve the contract with only its hash. Additionally, the contract metadata (sent in response to the GET_LISTINGS query) is saved in the db for fast access. """ # get the contract title to use as the file name and format it file_name = str(self.contract["vendor_offer"]["listing"]["item"]["title"][:100]) file_name = re.sub(r"[^\w\s]", '', file_name) file_name = re.sub(r"\s+", '_', file_name) file_name += digest(json.dumps(self.contract, indent=4)).encode("hex")[:8] # save the json contract to the file system file_path = DATA_FOLDER + "store/listings/contracts/" + file_name + ".json" with open(file_path, 'w') as outfile: outfile.write(json.dumps(self.contract, indent=4)) # Create a `ListingMetadata` protobuf object using data from the full contract listings = Listings() data = listings.ListingMetadata() data.contract_hash = digest(json.dumps(self.contract, indent=4)) vendor_item = self.contract["vendor_offer"]["listing"]["item"] data.title = vendor_item["title"] if "image_hashes" in vendor_item: data.thumbnail_hash = unhexlify(vendor_item["image_hashes"][0]) if "category" in vendor_item: data.category = vendor_item["category"] if "bitcoin" not in vendor_item["price_per_unit"]: data.price = float(vendor_item["price_per_unit"]["fiat"]["price"]) data.currency_code = vendor_item["price_per_unit"]["fiat"][ "currency_code"] else: data.price = float(vendor_item["price_per_unit"]["bitcoin"]) data.currency_code = "BTC" data.nsfw = vendor_item["nsfw"] if "shipping" not in self.contract["vendor_offer"]["listing"]: data.origin = CountryCode.Value("NA") else: data.origin = CountryCode.Value( self.contract["vendor_offer"]["listing"]["shipping"]["shipping_origin"].upper()) for region in self.contract["vendor_offer"]["listing"]["shipping"]["shipping_regions"]: data.ships_to.append(CountryCode.Value(region.upper())) # save the mapping of the contract file path and contract hash in the database self.db.HashMap().insert(data.contract_hash, file_path) # save the `ListingMetadata` protobuf to the database as well self.db.ListingsStore().add_listing(data)
def test_rpc_find_without_value(self): self._connecting_to_connected() node1 = Node(digest("id1"), "127.0.0.1", 12345, digest("key1")) node2 = Node(digest("id2"), "127.0.0.1", 22222, digest("key2")) node3 = Node(digest("id3"), "127.0.0.1", 77777, digest("key3")) self.protocol.router.addContact(node1) self.protocol.router.addContact(node2) self.protocol.router.addContact(node3) m = message.Message() m.messageID = digest("msgid") m.sender.MergeFrom(self.protocol.sourceNode.getProto()) m.command = message.Command.Value("FIND_VALUE") m.protoVer = self.version m.testnet = False m.arguments.append(digest("Keyword")) data = m.SerializeToString() self.handler.receive_message(data) del m.arguments[-1] m.arguments.extend([node3.getProto().SerializeToString(), node1.getProto().SerializeToString(), node2.getProto().SerializeToString()]) expected_message = m.SerializeToString() self.clock.advance(100 * constants.PACKET_TIMEOUT) connection.REACTOR.runUntilCurrent() m_calls = self.proto_mock.send_datagram.call_args_list sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0]) received_message = sent_packet.payload m = message.Message() m.ParseFromString(received_message) self.assertEqual(received_message, expected_message) self.assertEqual(len(m_calls), 2)
def test_nodesFound(self): self._connecting_to_connected() self.wire_protocol[self.addr1] = self.con self.wire_protocol[self.addr2] = self.con self.wire_protocol[self.addr3] = self.con self.protocol.router.addContact(self.node1) self.protocol.router.addContact(self.node2) self.protocol.router.addContact(self.node3) # test response with uncontacted nodes node = Node(digest("s")) nearest = self.protocol.router.findNeighbors(node) spider = ValueSpiderCrawl(self.protocol, node, nearest, 20, 3) response = (True, (self.node1.getProto().SerializeToString(), self.node2.getProto().SerializeToString(), self.node3.getProto().SerializeToString())) responses = {self.node1.id: response} spider._nodesFound(responses) self.clock.advance(constants.PACKET_TIMEOUT) connection.REACTOR.runUntilCurrent() self.assertEqual(len(self.proto_mock.send_datagram.call_args_list), 4) # test all been contacted spider = ValueSpiderCrawl(self.protocol, node, nearest, 20, 3) for peer in spider.nearest.getUncontacted(): spider.nearest.markContacted(peer) response = (True, (self.node1.getProto().SerializeToString(), self.node2.getProto().SerializeToString(), self.node3.getProto().SerializeToString())) responses = {self.node2.id: response} resp = spider._nodesFound(responses) self.assertTrue(resp is None) # test didn't happen spider = ValueSpiderCrawl(self.protocol, node, nearest, 20, 3) response = (False, (self.node1.getProto().SerializeToString(), self.node2.getProto().SerializeToString(), self.node3.getProto().SerializeToString())) responses = {self.node1.id: response} spider._nodesFound(responses) self.assertTrue(len(spider.nearest) == 2) # test got value val = Value() val.valueKey = digest("contractID") val.serializedData = self.protocol.sourceNode.getProto().SerializeToString() val.ttl = 10 response = (True, ("value", val.SerializeToString())) responses = {self.node3.id: response} spider.nearestWithoutValue = NodeHeap(node, 1) value = spider._nodesFound(responses) self.assertEqual(value[0], val.SerializeToString())
def test_rpc_find_node(self): self._connecting_to_connected() node1 = Node(digest("id1"), "127.0.0.1", 12345, digest("key1")) node2 = Node(digest("id2"), "127.0.0.1", 22222, digest("key2")) node3 = Node(digest("id3"), "127.0.0.1", 77777, digest("key3")) self.protocol.router.addContact(node1) self.protocol.router.addContact(node2) self.protocol.router.addContact(node3) m = message.Message() m.messageID = digest("msgid") m.sender.MergeFrom(self.protocol.sourceNode.getProto()) m.command = message.Command.Value("FIND_NODE") m.arguments.append(digest("nodetofind")) data = m.SerializeToString() del m.arguments[-1] m.arguments.extend([ node2.getProto().SerializeToString(), node1.getProto().SerializeToString(), node3.getProto().SerializeToString() ]) expected_message = m.SerializeToString() self.handler.receive_message(data) self.clock.advance(100 * constants.PACKET_TIMEOUT) connection.REACTOR.runUntilCurrent() m_calls = self.proto_mock.send_datagram.call_args_list sent_packet = packet.Packet.from_bytes( self.proto_mock.send_datagram.call_args_list[0][0][0]) received_message = sent_packet.payload a = message.Message() a.ParseFromString(received_message) self.assertEqual(received_message, expected_message) self.assertEqual(len(m_calls), 2)
def make_moderator(self): """ Set self as a moderator in the DHT. """ u = objects.Profile() k = u.PublicKey() k.public_key = bitcoin.bip32_deserialize(KeyChain().bitcoin_master_pubkey)[5] k.signature = self.signing_key.sign(k.public_key)[:64] u.bitcoin_key.MergeFrom(k) u.moderator = True Profile().update(u) proto = self.kserver.node.getProto().SerializeToString() self.kserver.set(digest("moderators"), digest(proto), proto)
def make_moderator(self): """ Set self as a moderator in the DHT. """ u = objects.Profile() k = u.PublicKey() k.public_key = bitcoin.bip32_deserialize(KeyChain(self.db).bitcoin_master_pubkey)[5] k.signature = self.signing_key.sign(k.public_key)[:64] u.bitcoin_key.MergeFrom(k) u.moderator = True Profile(self.db).update(u) proto = self.kserver.node.getProto().SerializeToString() self.kserver.set(digest("moderators"), digest(proto), proto)
def test_acceptResponse(self): self._connecting_to_connected() def handle_response(resp): self.assertTrue(resp[0]) self.assertEqual(resp[1][0], "test") self.assertTrue(message_id not in self.protocol._outstanding) message_id = digest("msgid") n = Node(digest("S"), self.addr1[0], self.addr1[1]) d = defer.Deferred() self.protocol._outstanding[message_id] = (d, self.addr1, reactor.callLater(5, handle_response)) self.protocol._acceptResponse(message_id, ["test"], n) return d.addCallback(handle_response)
def make_moderator(self): """ Set self as a moderator in the DHT. """ u = objects.Profile() k = u.PublicKey() k.public_key = unhexlify(bitcointools.bip32_extract_key(KeyChain(self.db).bitcoin_master_pubkey)) k.signature = self.signing_key.sign(k.public_key)[:64] u.bitcoin_key.MergeFrom(k) u.moderator = True Profile(self.db).update(u) proto = self.kserver.node.getProto().SerializeToString() self.kserver.set(digest("moderators"), digest(proto), proto) self.log.info("setting self as moderator on the network")
def test_create_proto(self): rid = hashlib.sha1(str(random.getrandbits(255))).digest() pubkey = digest("pubkey") vendor = True n1 = objects.Node() n1.guid = rid n1.signedPublicKey = pubkey n1.vendor = False n2 = Node(rid, signed_pubkey=digest("pubkey")) self.assertEqual(n1, n2.getProto()) n1.vendor = True n2 = Node(rid, signed_pubkey=pubkey, vendor=vendor) self.assertEqual(n1, n2.getProto())
def update_profile(self, request): p = Profile() if not p.get().encryption_key \ and "name" not in request.args \ and "location" not in request.args: return "False" u = objects.Profile() if "name" in request.args: u.name = request.args["name"][0] if "location" in request.args: # This needs to be formatted. Either here or from the UI. u.location = CountryCode.Value(request.args["location"][0].upper()) if "handle" in request.args: u.handle = request.args["handle"][0] if "about" in request.args: u.about = request.args["about"][0] if "short_description" in request.args: u.short_description = request.args["short_description"][0] if "nsfw" in request.args: u.nsfw = True if "vendor" in request.args: u.vendor = True if "moderator" in request.args: u.moderator = True if "website" in request.args: u.website = request.args["website"][0] if "email" in request.args: u.email = request.args["email"][0] if "avatar" in request.args: with open(DATA_FOLDER + "store/avatar", 'wb') as outfile: outfile.write(request.args["avatar"][0]) avatar_hash = digest(request.args["avatar"][0]) HashMap().insert(avatar_hash, DATA_FOLDER + "store/avatar") u.avatar_hash = avatar_hash if "header" in request.args: with open(DATA_FOLDER + "store/header", 'wb') as outfile: outfile.write(request.args["header"][0]) header_hash = digest(request.args["header"][0]) HashMap().insert(header_hash, DATA_FOLDER + "store/header") u.header_hash = header_hash if "pgp_key" in request.args and "signature" in request.args: p.add_pgp_key(request.args["pgp_key"][0], request.args["signature"][0], KeyChain().guid.encode("hex")) enc = u.PublicKey() enc.public_key = KeyChain().encryption_pubkey enc.signature = KeyChain().signing_key.sign(enc.public_key)[:64] u.encryption_key.MergeFrom(enc) p.update(u)
def add_order_confirmation( self, payout_address, comments=None, shipper=None, tracking_number=None, est_delivery=None, url=None, password=None, ): """ Add the vendor's order confirmation to the contract. """ if not self.testnet and not (payout_address[:1] == "1" or payout_address[:1] == "3"): raise Exception("Bitcoin address is not a mainnet address") elif self.testnet and not (payout_address[:1] == "n" or payout_address[:1] == "m" or payout_address[:1] == "2"): raise Exception("Bitcoin address is not a testnet address") try: bitcoin.b58check_to_hex(payout_address) except AssertionError: raise Exception("Invalid Bitcoin address") conf_json = { "vendor_order_confirmation": { "invoice": { "ref_hash": digest(json.dumps(self.contract, indent=4)).encode("hex"), "payout_address": payout_address, } } } if self.contract["vendor_offer"]["listing"]["metadata"]["category"] == "physical good": shipping = {"shipper": shipper, "tracking_number": tracking_number, "est_delivery": est_delivery} conf_json["vendor_order_confirmation"]["invoice"]["shipping"] = shipping elif self.contract["vendor_offer"]["listing"]["metadata"]["category"] == "digital good": content_source = {"url": url, "password": password} conf_json["vendor_order_confirmation"]["invoice"]["content_source"] = content_source if comments: conf_json["vendor_order_confirmation"]["invoice"]["comments"] = comments confirmation = json.dumps(conf_json["vendor_order_confirmation"]["invoice"], indent=4) conf_json["vendor_order_confirmation"]["signature"] = self.keychain.signing_key.sign( confirmation, encoder=nacl.encoding.HexEncoder )[:128] order_id = digest(json.dumps(self.contract, indent=4)).encode("hex") self.contract["vendor_order_confirmation"] = conf_json["vendor_order_confirmation"] self.db.Sales().update_status(order_id, 2) file_path = DATA_FOLDER + "store/listings/in progress/" + order_id + ".json" with open(file_path, "w") as outfile: outfile.write(json.dumps(self.contract, indent=4))
def handle_shutdown(self): try: self.connection.unregister() except Exception: pass if self.node is None: self.node = Node(digest("null"), str(self.connection.dest_addr[0]), int(self.connection.dest_addr[1])) for processor in self.processors: processor.timeout(self.node) if self.addr: self.log.info("connection with %s terminated" % self.addr) try: self.ban_score.scoring_loop.stop() except Exception: pass try: self.keep_alive_loop.stop() except Exception: pass if self.relay_node == (self.connection.dest_addr[0], self.connection.dest_addr[1]): self.log.info("Disconnected from relay node. Picking new one...") self.change_relay_node()
def delete(self, keyword, key, signature): """ Delete the given key/value pair from the keyword dictionary on the network. To delete you must provide a signature covering the key that you wish to delete. It will be verified against the public key stored in the value. We use our ksize as alpha to make sure we reach as many nodes storing our value as possible. Args: keyword: the `string` keyword where the data being deleted is stored. key: the 20 byte hash of the data. signature: a signature covering the key. """ self.log.info("deleting '%s':'%s' from the network" % (keyword, hexlify(key))) dkey = digest(keyword) def delete(nodes): self.log.debug("deleting '%s' on %s" % (key, [str(i) for i in nodes])) ds = [self.protocol.callDelete(node, dkey, key, signature) for node in nodes] if self.storage.getSpecific(dkey, key) is not None: self.storage.delete(dkey, key) return defer.DeferredList(ds).addCallback(_anyRespondSuccess) node = Node(dkey) nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: self.log.warning("There are no known neighbors to delete key %s" % key) return defer.succeed(False) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.ksize) return spider.find().addCallback(delete)
def rpc_delete(self, sender, keyword, key, signature): self.addToRouter(sender) value = self.storage.getSpecific(keyword, key) if value is not None: # Try to delete a message from the dht if keyword == digest(sender.id): try: verify_key = nacl.signing.VerifyKey( sender.signed_pubkey[64:]) verify_key.verify(key, signature) self.storage.delete(keyword, key) return ["True"] except Exception: return ["False"] # Or try to delete a pointer else: try: node = objects.Node() node.ParseFromString(value) pubkey = node.signedPublicKey[64:] try: verify_key = nacl.signing.VerifyKey(pubkey) verify_key.verify(signature + key) self.storage.delete(keyword, key) return ["True"] except Exception: return ["False"] except Exception: pass return ["False"]
def notify(self, guid, message): # pull the metadata for this node from the db f = Following() ser = self.db.FollowData().get_following() if ser is not None: f.ParseFromString(ser) for user in f.users: if user.guid == guid: avatar_hash = user.metadata.avatar_hash handle = user.metadata.handle timestamp = int(time.time()) broadcast_id = digest(random.getrandbits(255)).encode("hex") self.db.BroadcastStore().save_broadcast(broadcast_id, guid.encode("hex"), handle, message, timestamp, avatar_hash) broadcast_json = { "broadcast": { "id": broadcast_id, "guid": guid.encode("hex"), "handle": handle, "message": message, "timestamp": timestamp, "avatar_hash": avatar_hash.encode("hex") } } self.ws.push(json.dumps(broadcast_json, indent=4))
def delete(self, keyword, key, signature): """ Delete the given key/value pair from the keyword dictionary on the network. To delete you must provide a signature covering the key that you wish to delete. It will be verified against the public key stored in the value. We use our ksize as alpha to make sure we reach as many nodes storing our value as possible. Args: keyword: the `string` keyword where the data being deleted is stored. key: the 20 byte hash of the data. signature: a signature covering the key. """ self.log.debug("deleting '%s':'%s' from the network" % (keyword.encode("hex"), key.encode("hex"))) dkey = digest(keyword) def delete(nodes): self.log.debug("deleting '%s' on %s" % (key.encode("hex"), [str(i) for i in nodes])) ds = [self.protocol.callDelete(node, dkey, key, signature) for node in nodes] if self.storage.getSpecific(dkey, key) is not None: self.storage.delete(dkey, key) return defer.DeferredList(ds).addCallback(_anyRespondSuccess) node = Node(dkey) nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: self.log.warning("there are no known neighbors to delete key %s" % key.encode("hex")) return defer.succeed(False) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.ksize) return spider.find().addCallback(delete)
def get_result(result): if digest(result[1][0]) == contract_hash: contract = json.loads(result[1][0], object_pairs_hook=OrderedDict) try: signature = contract["vendor_offer"]["signature"] pubkey = node_to_ask.signed_pubkey[64:] verify_key = nacl.signing.VerifyKey(pubkey) verify_key.verify(json.dumps(contract["vendor_offer"]["listing"], indent=4), unhexlify(signature)) for moderator in contract["vendor_offer"]["listing"]["moderators"]: guid = moderator["guid"] guid_key = moderator["pubkeys"]["signing"]["key"] guid_sig = moderator["pubkeys"]["signing"]["signature"] enc_key = moderator["pubkeys"]["encryption"]["key"] enc_sig = moderator["pubkeys"]["encryption"]["signature"] bitcoin_key = moderator["pubkeys"]["bitcoin"]["key"] bitcoin_sig = moderator["pubkeys"]["bitcoin"]["signature"] h = nacl.hash.sha512(unhexlify(guid_sig) + unhexlify(guid_key)) pow_hash = h[64:128] if int(pow_hash[:6], 16) >= 50 or guid != h[:40]: raise Exception('Invalid GUID') verify_key = nacl.signing.VerifyKey(guid_key, encoder=nacl.encoding.HexEncoder) verify_key.verify(unhexlify(enc_key), unhexlify(enc_sig)) verify_key.verify(unhexlify(bitcoin_key), unhexlify(bitcoin_sig)) # should probably also validate the handle here. except Exception: return None self.cache(result[1][0]) if "image_hashes" in contract["vendor_offer"]["listing"]["item"]: for image_hash in contract["vendor_offer"]["listing"]["item"]["image_hashes"]: self.get_image(node_to_ask, unhexlify(image_hash)) return contract else: return None
def test_rpc_ping(self): self._connecting_to_connected() m = message.Message() m.messageID = digest("msgid") m.sender.MergeFrom(self.protocol.sourceNode.getProto()) m.command = message.Command.Value("PING") m.protoVer = self.version m.testnet = False m.signature = self.signing_key.sign(m.SerializeToString())[:64] data = m.SerializeToString() m.arguments.append(self.protocol.sourceNode.getProto().SerializeToString()) m.ClearField("signature") expected_message = m.SerializeToString() self.handler.on_connection_made() self.handler.receive_message(data) self.clock.advance(100 * constants.PACKET_TIMEOUT) connection.REACTOR.runUntilCurrent() m_calls = self.proto_mock.send_datagram.call_args_list sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0]) received_message = sent_packet.payload m2 = message.Message() m2.ParseFromString(received_message) m2.ClearField("signature") received_message = m2.SerializeToString() self.assertEqual(received_message, expected_message) self.assertEqual(len(m_calls), 2)
def test_acceptResponse(self): self._connecting_to_connected() def handle_response(resp): self.assertTrue(resp[0]) self.assertEqual(resp[1][0], self.protocol.sourceNode.id) n = Node(digest("S"), self.addr1[0], self.addr1[1]) self.wire_protocol[self.addr1] = self.con d = self.protocol.callPing(n) self.clock.advance(1) connection.REACTOR.runUntilCurrent() sent_packet = packet.Packet.from_bytes( self.proto_mock.send_datagram.call_args_list[0][0][0]) sent_message = sent_packet.payload m = message.Message() m.ParseFromString(sent_message) timeout = reactor.callLater(5, self.protocol._timeout, m.messageID) self.protocol._outstanding[m.messageID] = (d, timeout) m.arguments.append(self.protocol.sourceNode.id) self.handler.receive_message(m.SerializeToString()) return d.addCallback(handle_response)
def rpc_delete(self, sender, keyword, key, signature): self.addToRouter(sender) value = self.storage.getSpecific(keyword, key) if value is not None: # Try to delete a message from the dht if keyword == digest(sender.id): try: verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:]) verify_key.verify(key, signature) self.storage.delete(keyword, key) return ["True"] except Exception: return ["False"] # Or try to delete a pointer else: try: node = objects.Node() node.ParseFromString(value) pubkey = node.signedPublicKey[64:] try: verify_key = nacl.signing.VerifyKey(pubkey) verify_key.verify(signature + key) self.storage.delete(keyword, key) return ["True"] except Exception: return ["False"] except Exception: pass return ["False"]
def save_message(self, guid, handle, pubkey, subject, message_type, message, timestamp, avatar_hash, signature, is_outgoing, msg_id=None): """ Store message in database. """ outgoing = 1 if is_outgoing else 0 msgID = digest(message + str(timestamp)).encode( "hex") if msg_id is None else msg_id cursor = self.db.cursor() cursor.execute( '''INSERT INTO messages(msgID, guid, handle, pubkey, subject, messageType, message, timestamp, avatarHash, signature, outgoing, read) VALUES (?,?,?,?,?,?,?,?,?,?,?,?)''', (msgID, guid, handle, pubkey, subject, message_type, message, timestamp, avatar_hash, signature, outgoing, 0)) self.db.commit()
def save_message(self, guid, handle, pubkey, subject, message_type, message, timestamp, avatar_hash, signature, is_outgoing, msg_id=None): """ Store message in database. """ try: conn = Database.connect_database(self.PATH) with conn: outgoing = 1 if is_outgoing else 0 msgID = digest(message + str(timestamp)).encode( "hex") if msg_id is None else msg_id cursor = conn.cursor() cursor.execute( '''INSERT INTO messages(msgID, guid, handle, pubkey, subject, messageType, message, timestamp, avatarHash, signature, outgoing, read) VALUES (?,?,?,?,?,?,?,?,?,?,?,?)''', (msgID, guid, handle, pubkey, subject, message_type, message, timestamp, avatar_hash, signature, outgoing, 0)) conn.commit() conn.close() return True except Exception: return False
def setUp(self): self.catcher = [] observer = self.catcher.append log.addObserver(observer) self.addCleanup(log.removeObserver, observer) self.node = Node(digest("test"), "127.0.0.1", 1234) self.router = RoutingTable(self, 20, self.node.id)
def test_rpc_stun(self): self._connecting_to_connected() m = message.Message() m.messageID = digest("msgid") m.sender.MergeFrom(self.protocol.sourceNode.getProto()) m.command = message.Command.Value("STUN") m.protoVer = self.version m.testnet = False m.signature = self.signing_key.sign(m.SerializeToString())[:64] data = m.SerializeToString() m.arguments.extend([self.public_ip, str(self.port)]) m.ClearField("signature") expected_message = m.SerializeToString() self.handler.on_connection_made() self.handler.receive_message(data) self.clock.advance(100 * constants.PACKET_TIMEOUT) connection.REACTOR.runUntilCurrent() m_calls = self.proto_mock.send_datagram.call_args_list sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0]) received_message = sent_packet.payload a = message.Message() a.ParseFromString(received_message) a.ClearField("signature") received_message = a.SerializeToString() self.assertEqual(received_message, expected_message) self.assertEqual(len(m_calls), 2)
def change_relay_node(self): potential_relay_nodes = [] for bucket in self.processors[0].router.buckets: for node in bucket.nodes.values(): if node.nat_type == FULL_CONE: potential_relay_nodes.append((node.ip, node.port)) if len(potential_relay_nodes) == 0: for seed in SEEDS: try: potential_relay_nodes.append( (socket.gethostbyname(seed[0].split(":")[0]), 28469 if self.processors[0].TESTNET else 18469)) except socket.gaierror: pass shuffle(potential_relay_nodes) self.relay_node = potential_relay_nodes[0] for processor in self.processors: if PING in processor: if (self.relay_node[0], self.relay_node[1]) in processor.multiplexer: processor.multiplexer[(self.relay_node[0], self.relay_node[1])].shutdown() processor.callPing( Node(digest("null"), self.relay_node[0], self.relay_node[1], relay_node=None, nat_type=FULL_CONE))
def sendHello(self): request = { "request": { "api": "v1", "id": digest(random.getrandbits(128)).encode("hex"), "command": "get_vendors", "keyword": "furniture", "message": "Hello World!", "subject": "yo!", "handle": "@vintage", "guid": "5aef2616b37496d65e06f8413724167811756af5", "message_type": "CHAT", "recipient_key": "769fd0d4f24cdeef820c28dc1df71d3b47ccf2403c8e205dfb89b21fee61c673" } } self.sendMessage(json.dumps(request, indent=4))