def test_process_payload_merge_entries(self): # Check the corner case where the new entry must replace two old entries: one with a matching infohash, and # another one with a non-matching id node = self.mds.TorrentMetadata(infohash=database_blob(os.urandom(20))) node_dict = node.to_dict() node.delete() node2 = self.mds.TorrentMetadata( infohash=database_blob(os.urandom(20))) node2_dict = node2.to_dict() node2.delete() node_updated = self.mds.TorrentMetadata( infohash=node_dict["infohash"], id_=node2_dict["id_"], timestamp=node2_dict["timestamp"] + 1) node_updated_payload = node_updated._payload_class.from_signed_blob( node_updated.serialized()) node_updated.delete() self.mds.TorrentMetadata(**node_dict) self.mds.TorrentMetadata(**node2_dict) result = self.mds.process_payload(node_updated_payload) self.assertIn((None, DELETED_METADATA), result) self.assertIn((self.mds.TorrentMetadata.get(), UPDATED_OUR_VERSION), result) self.assertEqual( database_blob(self.mds.TorrentMetadata.select()[:][0].signature), database_blob(node_updated_payload.signature))
def test_multiple_squashed_commit_and_read(self): """ Test committing entries into several squashed blobs and reading them back """ self.mds.ChannelMetadata._CHUNK_SIZE_LIMIT = 500 num_entries = 10 channel = self.mds.ChannelMetadata(title='testchan', infohash=database_blob( os.urandom(20))) md_list = [ self.mds.TorrentMetadata(title='test' + str(x), status=NEW, infohash=database_blob(os.urandom(20))) for x in range(0, num_entries) ] channel.commit_channel_torrent() channel.local_version = 0 for md in md_list: md.delete() channel_dir = os.path.join(self.mds.channels_dir, channel.dir_name) self.assertTrue( len(os.listdir(channel_dir)) > 1) # make sure it was broken into more than one .mdblob file self.mds.process_channel_dir(channel_dir, channel.public_key) self.assertEqual(num_entries, len(channel.contents))
def test_has_valid_signature(self): """ Test whether a signature can be validated correctly """ metadata = self.mds.ChannelNode.from_dict({}) self.assertTrue(metadata.has_valid_signature()) md_dict = metadata.to_dict() # Mess with the signature metadata.signature = 'a' self.assertFalse(metadata.has_valid_signature()) # Create metadata with wrong key metadata.delete() md_dict.update(public_key=database_blob("aaa")) md_dict.pop("rowid") metadata = self.mds.ChannelNode(skip_key_check=True, **md_dict) self.assertFalse(metadata.has_valid_signature()) key = default_eccrypto.generate_key(u"curve25519") metadata2 = self.mds.ChannelNode(sign_with=key, **md_dict) self.assertTrue(database_blob(key.pub().key_to_bin()[10:]), metadata2.public_key) md_dict2 = metadata2.to_dict() md_dict2["signature"] = md_dict["signature"] self.assertRaises(InvalidSignatureException, self.mds.ChannelNode, **md_dict2)
def test_get_num_channels_nodes(self): self.mds.ChannelMetadata(title='testchan', id_=0, infohash=database_blob(os.urandom(20))) self.mds.ChannelMetadata(title='testchan', id_=123, infohash=database_blob(os.urandom(20))) self.mds.ChannelMetadata(title='testchan', id_=0, public_key=unhexlify('0' * 20), signature=unhexlify('0' * 64), skip_key_check=True, infohash=database_blob(os.urandom(20))) self.mds.ChannelMetadata(title='testchan', id_=0, public_key=unhexlify('1' * 20), signature=unhexlify('1' * 64), skip_key_check=True, infohash=database_blob(os.urandom(20))) _ = [ self.mds.TorrentMetadata(title='test' + str(x), status=NEW, infohash=database_blob(os.urandom(20))) for x in range(0, 3) ] self.assertEqual(4, self.mds.get_num_channels()) self.assertEqual(3, self.mds.get_num_torrents())
def test_has_valid_signature(self): """ Test whether a signature can be validated correctly """ metadata = self.mds.ChannelNode.from_dict({}) self.assertTrue(metadata.has_valid_signature()) md_dict = metadata.to_dict() # Mess with the signature metadata.signature = 'a' self.assertFalse(metadata.has_valid_signature()) # Create metadata with wrong key metadata.delete() md_dict.update(public_key=database_blob("aaa")) md_dict.pop("rowid") metadata = self.mds.ChannelNode(skip_key_check=True, **md_dict) self.assertFalse(metadata.has_valid_signature()) key = default_eccrypto.generate_key(u"curve25519") metadata2 = self.mds.ChannelNode(sign_with=key, **md_dict) self.assertTrue(database_blob(key.pub().key_to_bin()[10:]), metadata2.public_key) md_dict2 = metadata2.to_dict() md_dict2["signature"] = md_dict["signature"] self.assertRaises(InvalidSignatureException, self.mds.ChannelNode, **md_dict2)
def to_database(self): return (database_blob(self.order_id.trader_id.to_bytes()), int(self.order_id.order_number), self.assets.first.amount, text_type(self.assets.first.asset_id), self.assets.second.amount, text_type(self.assets.second.asset_id), int(self.timeout), float(self.timestamp), self.is_ask(), self.traded, database_blob(self.block_hash))
def __init__(self, *args, **kwargs): """ Initialize a metadata object. All this dance is required to ensure that the signature is there and it is correct. """ # Process special keyworded arguments # "sign_with" argument given, sign with it private_key_override = None if "sign_with" in kwargs: kwargs["public_key"] = database_blob(kwargs["sign_with"].pub().key_to_bin()[10:]) private_key_override = kwargs["sign_with"] kwargs.pop("sign_with") # For putting legacy/test stuff in skip_key_check = False if "skip_key_check" in kwargs and kwargs["skip_key_check"]: skip_key_check = True kwargs.pop("skip_key_check") if "id_" not in kwargs: kwargs["id_"] = self._clock.tick() if "timestamp" not in kwargs: kwargs["timestamp"] = kwargs["id_"] if not private_key_override and not skip_key_check: # No key/signature given, sign with our own key. if ("signature" not in kwargs) and \ (("public_key" not in kwargs) or ( kwargs["public_key"] == database_blob(self._my_key.pub().key_to_bin()[10:]))): private_key_override = self._my_key # Key/signature given, check them for correctness elif ("public_key" in kwargs) and ("signature" in kwargs): try: self._payload_class(**kwargs) except InvalidSignatureException: raise InvalidSignatureException( ("Attempted to create %s object with invalid signature/PK: " % str( self.__class__.__name__)) + (hexlify(kwargs["signature"]) if "signature" in kwargs else "empty signature ") + " / " + (hexlify(kwargs["public_key"]) if "public_key" in kwargs else " empty PK")) if private_key_override: # Get default values for Pony class attributes. We have to do it manually because we need # to know the payload signature *before* creating the object. kwargs = generate_dict_from_pony_args(self.__class__, skip_list=["signature", "public_key"], **kwargs) payload = self._payload_class( **dict(kwargs, public_key=str(private_key_override.pub().key_to_bin()[10:]), key=private_key_override, metadata_type=self.metadata_type)) kwargs["public_key"] = payload.public_key kwargs["signature"] = payload.signature super(ChannelNode, self).__init__(*args, **kwargs)
def to_database(self): """ Returns a database representation of a Payment object. :rtype: tuple """ return (database_blob(self.trader_id.to_bytes()), database_blob(self.transaction_id.trader_id.to_bytes()), int(self.transaction_id.transaction_number), text_type(self.payment_id), self.transferred_assets.amount, text_type(self.transferred_assets.asset_id), text_type(self.address_from), text_type(self.address_to), float(self.timestamp), self.success)
def add_reserved_tick(self, order_id, reserved_order_id, amount): """ Add a reserved tick to the database """ self.execute( u"INSERT INTO orders_reserved_ticks (trader_id, order_number, reserved_trader_id, reserved_order_number," u"quantity) VALUES(?,?,?,?,?)", (database_blob(order_id.trader_id.to_bytes()), text_type(order_id.order_number), database_blob(reserved_order_id.trader_id.to_bytes()), text_type(reserved_order_id.order_number), amount)) self.commit()
def add_reserved_tick(self, order_id, reserved_order_id, amount): """ Add a reserved tick to the database """ self.execute( u"INSERT INTO orders_reserved_ticks (trader_id, order_number, reserved_trader_id, reserved_order_number," u"quantity) VALUES(?,?,?,?,?)", (database_blob(order_id.trader_id.to_bytes()), text_type(order_id.order_number), database_blob(reserved_order_id.trader_id.to_bytes()), text_type(reserved_order_id.order_number), amount)) self.commit()
def to_database(self): """ Returns a database representation of a Payment object. :rtype: tuple """ return (database_blob(self.trader_id.to_bytes()), database_blob(self.transaction_id.trader_id.to_bytes()), int(self.transaction_id.transaction_number), text_type(self.payment_id), self.transferred_assets.amount, text_type(self.transferred_assets.asset_id), text_type(self.address_from), text_type(self.address_to), float(self.timestamp), self.success)
def get_sample_torrent_dict(my_key): """ Utility method to return a dictionary with torrent information. """ return { "infohash": database_blob("1" * 20), "size": 123, "torrent_date": datetime.utcnow(), "tags": "bla", "id_": 123, "public_key": database_blob(my_key.pub().key_to_bin()[10:]), "title": "lalala" }
def get_sample_torrent_dict(my_key): """ Utility method to return a dictionary with torrent information. """ return { "infohash": database_blob("1" * 20), "size": 123, "torrent_date": datetime.utcnow(), "tags": "bla", "id_": 123, "public_key": database_blob(my_key.pub().key_to_bin()[10:]), "title": "lalala" }
def to_database(self): """ Returns a database representation of a Transaction object. :rtype: tuple """ return (database_blob(self.transaction_id.trader_id.to_bytes()), int(self.transaction_id.transaction_number), database_blob(self.order_id.trader_id.to_bytes()), int(self.order_id.order_number), database_blob(self.partner_order_id.trader_id.to_bytes()), int(self.partner_order_id.order_number), self.assets.first.amount, text_type(self.assets.first.asset_id), self.transferred_assets.first.amount, self.assets.second.amount, text_type(self.assets.second.asset_id), self.transferred_assets.second.amount, float(self.timestamp), self.sent_wallet_info, self.received_wallet_info, text_type(self.incoming_address), text_type(self.outgoing_address), text_type(self.partner_incoming_address), text_type(self.partner_outgoing_address), text_type(self.match_id))
def get_reserved_ticks(self, order_id): """ Get all reserved ticks for a specific order. """ db_results = self.execute(u"SELECT * FROM orders_reserved_ticks WHERE trader_id = ? AND order_number = ?", (database_blob(order_id.trader_id.to_bytes()), text_type(order_id.order_number))) return [(OrderId(TraderId(bytes(data[2])), OrderNumber(data[3])), data[4]) for data in db_results]
def get_old_channels(self): connection = sqlite3.connect(self.tribler_db) cursor = connection.cursor() channels = [] for id_, name, dispersy_cid, modified, nr_torrents, nr_favorite, nr_spam in cursor.execute( self.select_channels_sql): if nr_torrents and nr_torrents > 0: channels.append({ "id_": 0, "infohash": database_blob(os.urandom(20)), "title": name or '', "public_key": dispesy_cid_to_pk(id_), "timestamp": final_timestamp(), "votes": int(nr_favorite or 0), "origin_id": 0, "signature": pseudo_signature(), "skip_key_check": True, "size": 0, "local_version": final_timestamp(), "subscribed": False, "status": LEGACY_ENTRY, "num_entries": int(nr_torrents or 0) }) connection.close() return channels
def get_channel_with_id(cls, channel_id): """ Fetch a channel with a specific id. :param channel_id: The ID of the channel to fetch. :return: the ChannelMetadata object, or None if it is not available. """ return cls.get(public_key=database_blob(channel_id))
def test_process_channel_dir_file(self): """ Test whether we are able to process files in a directory containing node metadata """ test_node_metadata = self.mds.TorrentMetadata(title='test', infohash=database_blob( os.urandom(20))) metadata_path = os.path.join(self.session_base_dir, 'metadata.data') test_node_metadata.to_file(metadata_path) # We delete this TorrentMeta info now, it should be added again to the database when loading it test_node_metadata.delete() loaded_metadata = self.mds.process_mdblob_file(metadata_path) self.assertEqual(loaded_metadata[0][0].title, 'test') # Test whether we delete existing metadata when loading a DeletedMetadata blob metadata = self.mds.TorrentMetadata(infohash='1' * 20) metadata.to_delete_file(metadata_path) loaded_metadata = self.mds.process_mdblob_file(metadata_path) # Make sure the original metadata is deleted self.assertEqual(loaded_metadata[0], (None, 6)) self.assertIsNone(self.mds.TorrentMetadata.get(infohash='1' * 20)) # Test an unknown metadata type, this should raise an exception invalid_metadata = os.path.join(self.session_base_dir, 'invalidtype.mdblob') make_wrong_payload(invalid_metadata) self.assertRaises(UnknownBlobTypeException, self.mds.process_mdblob_file, invalid_metadata)
def insert_or_update_transaction(self, transaction): """ Inserts or updates a specific transaction in the database, according to the timestamp. Updates only if the timestamp is more recent than the one in the database. """ self.execute( u"INSERT OR IGNORE INTO transactions (trader_id, transaction_number, order_trader_id, order_number," u"partner_trader_id, partner_order_number, asset1_amount, asset1_type, asset1_transferred, asset2_amount," u"asset2_type, asset2_transferred, transaction_timestamp, sent_wallet_info, received_wallet_info," u"incoming_address, outgoing_address, partner_incoming_address, partner_outgoing_address, match_id) " u"VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)", transaction.to_database()) self.execute( u"UPDATE transactions SET asset1_amount = ?, asset1_transferred = ?, asset2_amount = ?, " u"asset2_transferred = ?, transaction_timestamp = ? WHERE trader_id = ? AND transaction_number = ?" u"AND transaction_timestamp < ?", (transaction.assets.first.amount, transaction.transferred_assets.first.amount, transaction.assets.second.amount, transaction.transferred_assets.second.amount, float(transaction.timestamp), database_blob(transaction.transaction_id.trader_id.to_bytes()), int(transaction.transaction_id.transaction_number), float(transaction.timestamp))) self.commit()
def test_process_payload_reject_older(self): # Check there is no action if the processed payload has a timestamp that is less than the # local_version of the corresponding local channel. (I.e. remote peer trying to push back a deleted entry) channel = self.mds.ChannelMetadata(title='bla', version=123, local_version=12, infohash=database_blob( os.urandom(20))) torrent = self.mds.TorrentMetadata(title='blabla', timestamp=11, origin_id=channel.id_, infohash=database_blob( os.urandom(20))) payload = torrent._payload_class(**torrent.to_dict()) torrent.delete() self.assertFalse(self.mds.process_payload(payload))
def delete_payments(self, transaction_id): """ Delete all payments that are associated with a specific transaction """ self.execute(u"DELETE FROM payments WHERE transaction_trader_id = ? AND transaction_number = ?", (database_blob(transaction_id.trader_id.to_bytes()), text_type(transaction_id.transaction_number)))
def delete_order(self, order_id): """ Delete a specific order from the database """ self.execute(u"DELETE FROM orders WHERE trader_id = ? AND order_number = ?", (database_blob(order_id.trader_id.to_bytes()), text_type(order_id.order_number))) self.delete_reserved_ticks(order_id)
def render_GET(self, request): with db_session: my_channel = self.session.lm.mds.ChannelMetadata.get_my_channel() if not my_channel: request.setResponseCode(http.NOT_FOUND) return json.dumps( {"error": "your channel has not been created"}) sanitized = SpecificChannelTorrentsEndpoint.sanitize_parameters( request.args) if 'exclude_deleted' in request.args: sanitized['exclude_deleted'] = request.args['exclude_deleted'] torrents, total = self.session.lm.mds.TorrentMetadata.get_entries( channel_pk=database_blob(my_channel.public_key), **sanitized) torrents = [torrent.to_simple_dict() for torrent in torrents] return json.dumps({ "results": torrents, "first": sanitized['first'], "last": sanitized['last'], "sort_by": sanitized['sort_by'], "sort_asc": int(sanitized['sort_asc']), "total": total, "dirty": my_channel.dirty })
def render_POST(self, request): parameters = http.parse_qs(request.content.read(), 1) if 'subscribe' not in parameters: request.setResponseCode(http.BAD_REQUEST) return json.dumps({ "success": False, "error": "subscribe parameter missing" }) to_subscribe = bool(int(parameters['subscribe'][0])) with db_session: channel = self.session.lm.mds.ChannelMetadata.get_for_update( public_key=database_blob(self.channel_pk)) if not channel: request.setResponseCode(http.NOT_FOUND) return json.dumps({"error": "this channel cannot be found"}) channel.subscribed = to_subscribe def delete_channel(): # TODO: this should be eventually moved to a garbage-collector like subprocess in MetadataStore with db_session: channel = self.session.lm.mds.ChannelMetadata.get_for_update( public_key=database_blob(self.channel_pk)) channel.local_version = 0 contents = channel.contents contents.delete(bulk=True) self.session.lm.mds._db.disconnect() if not to_subscribe: reactor.callInThread(delete_channel) return json.dumps({"success": True, "subscribed": to_subscribe})
def insert_or_update_transaction(self, transaction): """ Inserts or updates a specific transaction in the database, according to the timestamp. Updates only if the timestamp is more recent than the one in the database. """ self.execute( u"INSERT OR IGNORE INTO transactions (trader_id, transaction_number, order_trader_id, order_number," u"partner_trader_id, partner_order_number, asset1_amount, asset1_type, asset1_transferred, asset2_amount," u"asset2_type, asset2_transferred, transaction_timestamp, sent_wallet_info, received_wallet_info," u"incoming_address, outgoing_address, partner_incoming_address, partner_outgoing_address, match_id) " u"VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)", transaction.to_database()) self.execute( u"UPDATE transactions SET asset1_amount = ?, asset1_transferred = ?, asset2_amount = ?, " u"asset2_transferred = ?, transaction_timestamp = ? WHERE trader_id = ? AND transaction_number = ?" u"AND transaction_timestamp < ?", (transaction.assets.first.amount, transaction.transferred_assets.first.amount, transaction.assets.second.amount, transaction.transferred_assets.second.amount, float(transaction.timestamp), database_blob(transaction.transaction_id.trader_id.to_bytes()), int(transaction.transaction_id.transaction_number), float(transaction.timestamp)) ) self.commit()
def delete_payments(self, transaction_id): """ Delete all payments that are associated with a specific transaction """ self.execute( u"DELETE FROM payments WHERE transaction_trader_id = ? AND transaction_number = ?", (database_blob(transaction_id.trader_id.to_bytes()), text_type(transaction_id.transaction_number)))
def delete_transaction(self, transaction_id): """ Delete a specific transaction from the database """ self.execute(u"DELETE FROM transactions WHERE trader_id = ? AND transaction_number = ?", (database_blob(transaction_id.trader_id.to_bytes()), text_type(transaction_id.transaction_number))) self.delete_payments(transaction_id)
def delete_reserved_ticks(self, order_id): """ Delete all reserved ticks from a specific order """ self.execute( u"DELETE FROM orders_reserved_ticks WHERE trader_id = ? AND order_number = ?", (database_blob(order_id.trader_id.to_bytes()), text_type(order_id.order_number)))
def get_torrent(self, infohash): """ Return a torrent with a specific infohash from the database. """ results = list(self.metadata_store.TorrentMetadata.select( lambda g: g.infohash == database_blob(infohash) and g.metadata_type == REGULAR_TORRENT).limit(1)) if results: return results[0] return None
def delete_channel(): # TODO: this should be eventually moved to a garbage-collector like subprocess in MetadataStore with db_session: channel = self.session.lm.mds.ChannelMetadata.get_for_update( public_key=database_blob(self.channel_pk)) channel.local_version = 0 contents = channel.contents contents.delete(bulk=True) self.session.lm.mds._db.disconnect()
def get_payments(self, transaction_id): """ Return all payment tied to a specific transaction. """ db_result = self.execute(u"SELECT * FROM payments WHERE transaction_trader_id = ? AND transaction_number = ?" u"ORDER BY timestamp ASC", (database_blob(transaction_id.trader_id.to_bytes()), text_type(transaction_id.transaction_number))) return [Payment.from_database(db_item) for db_item in db_result]
def delete_transaction(self, transaction_id): """ Delete a specific transaction from the database """ self.execute( u"DELETE FROM transactions WHERE trader_id = ? AND transaction_number = ?", (database_blob(transaction_id.trader_id.to_bytes()), text_type(transaction_id.transaction_number))) self.delete_payments(transaction_id)
def make_wrong_payload(filename): key = default_eccrypto.generate_key(u"curve25519") metadata_payload = SignedPayload(666, 0, database_blob( key.pub().key_to_bin()[10:]), signature='\x00' * 64, skip_key_check=True) with open(filename, 'wb') as output_file: output_file.write(''.join(metadata_payload.serialized()))
def to_database(self): """ Returns a database representation of an Order object. :rtype: tuple """ completed_timestamp = float(self.completed_timestamp) if self.completed_timestamp else None return (database_blob(self.order_id.trader_id.to_bytes()), text_type(self.order_id.order_number), self.assets.first.amount, text_type(self.assets.first.asset_id), self.assets.second.amount, text_type(self.assets.second.asset_id), self.traded_quantity, int(self.timeout), float(self.timestamp), completed_timestamp, self.is_ask(), self._cancelled, self._verified)
def get_payments(self, transaction_id): """ Return all payment tied to a specific transaction. """ db_result = self.execute( u"SELECT * FROM payments WHERE transaction_trader_id = ? AND transaction_number = ?" u"ORDER BY timestamp ASC", (database_blob(transaction_id.trader_id.to_bytes()), text_type(transaction_id.transaction_number))) return [Payment.from_database(db_item) for db_item in db_result]
def to_simple_dict(self): """ Return a basic dictionary with information about the channel. """ epoch = datetime.utcfromtimestamp(0) return { "id": self.rowid, "public_key": hexlify(self.public_key), "name": self.title, "torrents": self.num_entries, "subscribed": self.subscribed, "votes": self.votes, "status": self.status, "updated": int((self.torrent_date - epoch).total_seconds()), # TODO: optimize this? "my_channel": database_blob(self._my_key.pub().key_to_bin()[10:]) == database_blob(self.public_key) }
def get_transaction(self, transaction_id): """ Return a transaction with a specific id. """ try: db_result = next(self.execute(u"SELECT * FROM transactions WHERE trader_id = ? AND transaction_number = ?", (database_blob(transaction_id.trader_id.to_bytes()), text_type(transaction_id.transaction_number)))) except StopIteration: return None return Transaction.from_database(db_result, self.get_payments(transaction_id))
def get_order(self, order_id): """ Return an order with a specific id. """ try: db_result = next(self.execute(u"SELECT * FROM orders WHERE trader_id = ? AND order_number = ?", (database_blob(order_id.trader_id.to_bytes()), text_type(order_id.order_number)))) except StopIteration: return None return Order.from_database(db_result, self.get_reserved_ticks(order_id))
def get_reserved_ticks(self, order_id): """ Get all reserved ticks for a specific order. """ db_results = self.execute( u"SELECT * FROM orders_reserved_ticks WHERE trader_id = ? AND order_number = ?", (database_blob(order_id.trader_id.to_bytes()), text_type(order_id.order_number))) return [(OrderId(TraderId(bytes(data[2])), OrderNumber(data[3])), data[4]) for data in db_results]
def test_get_channel_with_dirname(self): sample_channel_dict = TestChannelMetadata.get_sample_channel_dict(self.my_key) channel_metadata = self.mds.ChannelMetadata.from_dict(sample_channel_dict) dirname = channel_metadata.dir_name channel_result = self.mds.ChannelMetadata.get_channel_with_dirname(dirname) self.assertEqual(channel_metadata, channel_result) # Test for corner-case of channel PK starting with zeroes channel_metadata.public_key = database_blob(unhexlify('0' * 128)) channel_result = self.mds.ChannelMetadata.get_channel_with_dirname(channel_metadata.dir_name) self.assertEqual(channel_metadata, channel_result)
def test_get_channel_name(self): infohash = "\x00" * 20 title = "testchan" chan = self.mds.ChannelMetadata(title=title, infohash=database_blob(infohash)) dirname = chan.dir_name self.assertEqual(title, self.mds.ChannelMetadata.get_channel_name(dirname, infohash)) chan.infohash = "\x11" * 20 self.assertEqual("OLD:" + title, self.mds.ChannelMetadata.get_channel_name(dirname, infohash)) chan.delete() self.assertEqual(dirname, self.mds.ChannelMetadata.get_channel_name(dirname, infohash))
def read_payload_with_offset(data, offset=0): # First we have to determine the actual payload type metadata_type = struct.unpack_from('>H', database_blob(data), offset=offset)[0] if metadata_type == DELETED: return DeletedMetadataPayload.from_signed_blob_with_offset(data, offset=offset) elif metadata_type == REGULAR_TORRENT: return TorrentMetadataPayload.from_signed_blob_with_offset(data, offset=offset) elif metadata_type == CHANNEL_TORRENT: return ChannelMetadataPayload.from_signed_blob_with_offset(data, offset=offset) # Unknown metadata type, raise exception raise UnknownBlobTypeException
def test_search(self): """ Test a search query that should return a few new type channels """ num_hay = 100 with db_session: _ = self.session.lm.mds.ChannelMetadata( title='test', tags='test', subscribed=True, infohash=str(random.getrandbits(160))) for x in xrange(0, num_hay): self.session.lm.mds.TorrentMetadata( title='hay ' + str(x), infohash=str(random.getrandbits(160))) self.session.lm.mds.TorrentMetadata( title='needle', infohash=database_blob( bytearray(random.getrandbits(8) for _ in xrange(20)))) self.should_check_equality = False result = yield self.do_request('search?filter=needle', expected_code=200) parsed = json.loads(result) self.assertEqual(len(parsed["results"]), 1) result = yield self.do_request('search?filter=hay', expected_code=200) parsed = json.loads(result) self.assertEqual(len(parsed["results"]), 50) result = yield self.do_request('search?filter=test&type=channel', expected_code=200) parsed = json.loads(result) self.assertEqual(len(parsed["results"]), 1) result = yield self.do_request('search?filter=needle&type=torrent', expected_code=200) parsed = json.loads(result) self.assertEqual(parsed["results"][0][u'name'], 'needle') result = yield self.do_request('search?filter=needle&sort_by=name', expected_code=200) parsed = json.loads(result) self.assertEqual(len(parsed["results"]), 1) # If uuid is passed in request, then the same uuid is returned in the response result = yield self.do_request( 'search?uuid=uuid1&filter=needle&sort_by=name', expected_code=200) parsed = json.loads(result) self.assertEqual(len(parsed["results"]), 1) self.assertEqual(parsed['uuid'], 'uuid1')
def to_database(self): """ Returns a database representation of an Order object. :rtype: tuple """ completed_timestamp = float( self.completed_timestamp) if self.completed_timestamp else None return (database_blob(self.order_id.trader_id.to_bytes()), text_type(self.order_id.order_number), self.assets.first.amount, text_type( self.assets.first.asset_id), self.assets.second.amount, text_type(self.assets.second.asset_id), self.traded_quantity, int(self.timeout), float(self.timestamp), completed_timestamp, self.is_ask(), self._cancelled, self._verified)
def create_channel(cls, title, description=""): """ Create a channel and sign it with a given key. :param title: The title of the channel :param description: The description of the channel :return: The channel metadata """ if ChannelMetadata.get_channel_with_id(cls._my_key.pub().key_to_bin()[10:]): raise DuplicateChannelIdError() my_channel = cls(id_=ROOT_CHANNEL_ID, public_key=database_blob(cls._my_key.pub().key_to_bin()[10:]), title=title, tags=description, subscribed=True, infohash=str(random.getrandbits(160))) my_channel.sign() return my_channel
def to_database(self): """ Returns a database representation of a Transaction object. :rtype: tuple """ return (database_blob(self.transaction_id.trader_id.to_bytes()), int(self.transaction_id.transaction_number), database_blob(self.order_id.trader_id.to_bytes()), int(self.order_id.order_number), database_blob(self.partner_order_id.trader_id.to_bytes()), int(self.partner_order_id.order_number), self.assets.first.amount, text_type(self.assets.first.asset_id), self.transferred_assets.first.amount, self.assets.second.amount, text_type(self.assets.second.asset_id), self.transferred_assets.second.amount, float(self.timestamp), self.sent_wallet_info, self.received_wallet_info, text_type(self.incoming_address), text_type(self.outgoing_address), text_type(self.partner_incoming_address), text_type(self.partner_outgoing_address), text_type(self.match_id))
def render_GET(self, request): with db_session: md = self.session.lm.mds.TorrentMetadata.select( lambda g: g.infohash == database_blob(self.infohash))[:1] torrent_dict = md[0].to_simple_dict( include_trackers=True) if md else None if not md: request.setResponseCode(http.NOT_FOUND) request.write( json.dumps({"error": "torrent not found in database"})) return return json.dumps({"torrent": torrent_dict})
def test_check_channels_updates(self): with db_session: chan = self.generate_personal_channel() chan.commit_channel_torrent() chan.local_version -= 1 _ = self.mock_session.lm.mds.ChannelMetadata(title="bla", public_key=database_blob(str(123)), signature=database_blob(str(345)), skip_key_check=True, timestamp=123, local_version=123, subscribed=True, infohash=str(random.getrandbits(160))) _ = self.mock_session.lm.mds.ChannelMetadata(title="bla", public_key=database_blob(str(124)), signature=database_blob(str(346)), skip_key_check=True, timestamp=123, local_version=122, subscribed=False, infohash=str(random.getrandbits(160))) self.mock_session.has_download = lambda _: False self.torrents_added = 0 def mock_dl(_): self.torrents_added += 1 self.chanman.download_channel = mock_dl self.chanman.check_channels_updates() # download_channel should only fire once - for the original subscribed channel self.assertEqual(1, self.torrents_added)
def check_torrent_health(self, infohash, timeout=20, scrape_now=False): """ Check the health of a torrent with a given infohash. :param infohash: Torrent infohash. :param timeout: The timeout to use in the performed requests :param scrape_now: Flag whether we want to force scraping immediately """ tracker_set = [] # We first check whether the torrent is already in the database and checked before with db_session: result = self.tribler_session.lm.mds.TorrentState.get(infohash=database_blob(infohash)) if result: torrent_id = str(result.infohash) last_check = result.last_check time_diff = time.time() - last_check if time_diff < MIN_TORRENT_CHECK_INTERVAL and not scrape_now: self._logger.debug("time interval too short, not doing torrent health check for %s", hexlify(infohash)) return succeed({ "db": { "seeders": result.seeders, "leechers": result.leechers, "infohash": hexlify(infohash) } }) # get torrent's tracker list from DB tracker_set = self.get_valid_trackers_of_torrent(torrent_id) deferred_list = [] for tracker_url in tracker_set: session = self._create_session_for_request(tracker_url, timeout=timeout) session.add_infohash(infohash) deferred_list.append(session.connect_to_tracker(). addCallbacks(*self.get_callbacks_for_session(session))) # Create a (fake) DHT session for the lookup if we have support for BEP33. if has_bep33_support(): session = FakeDHTSession(self.tribler_session, infohash, timeout) self._session_list['DHT'].append(session) deferred_list.append(session.connect_to_tracker(). addCallbacks(*self.get_callbacks_for_session(session))) return DeferredList(deferred_list, consumeErrors=True).addCallback( lambda res: self.on_torrent_health_check_completed(infohash, res))
def _update_torrent_result(self, response): infohash = response['infohash'] seeders = response['seeders'] leechers = response['leechers'] last_check = response['last_check'] self._logger.debug(u"Update result %s/%s for %s", seeders, leechers, hexlify(infohash)) with db_session: # Update torrent state torrent = self.tribler_session.lm.mds.TorrentState.get(infohash=database_blob(infohash)) if not torrent: # Something is wrong, there should exist a corresponding TorrentState entry in the DB. return torrent.seeders = seeders torrent.leechers = leechers torrent.last_check = last_check
def get_channel_name(cls, name, infohash): """ Try to translate a Tribler download name into matching channel name. By searching for a channel with the given dirname and/or infohash. Try do determine if infohash belongs to an older version of some channel we already have. :param name - name of the download. Should match the directory name of the channel. :param infohash - infohash of the download. :return: Channel title as a string, prefixed with 'OLD:' for older versions """ try: channel = cls.get_channel_with_dirname(name) except UnicodeEncodeError: channel = cls.get_channel_with_infohash(infohash) if not channel: return name if channel.infohash == database_blob(infohash): return channel.title else: return u'OLD:' + channel.title
def test_search(self): """ Test a search query that should return a few new type channels """ num_hay = 100 with db_session: _ = self.session.lm.mds.ChannelMetadata(title='test', tags='test', subscribed=True, infohash=str(random.getrandbits(160))) for x in xrange(0, num_hay): self.session.lm.mds.TorrentMetadata(title='hay ' + str(x), infohash=str(random.getrandbits(160))) self.session.lm.mds.TorrentMetadata(title='needle', infohash=database_blob( bytearray(random.getrandbits(8) for _ in xrange(20)))) self.should_check_equality = False result = yield self.do_request('search?filter=needle', expected_code=200) parsed = json.loads(result) self.assertEqual(len(parsed["results"]), 1) result = yield self.do_request('search?filter=hay', expected_code=200) parsed = json.loads(result) self.assertEqual(len(parsed["results"]), 50) result = yield self.do_request('search?filter=test&type=channel', expected_code=200) parsed = json.loads(result) self.assertEqual(len(parsed["results"]), 1) result = yield self.do_request('search?filter=needle&type=torrent', expected_code=200) parsed = json.loads(result) self.assertEqual(parsed["results"][0][u'name'], 'needle') result = yield self.do_request('search?filter=needle&sort_by=name', expected_code=200) parsed = json.loads(result) self.assertEqual(len(parsed["results"]), 1) # If uuid is passed in request, then the same uuid is returned in the response result = yield self.do_request('search?uuid=uuid1&filter=needle&sort_by=name', expected_code=200) parsed = json.loads(result) self.assertEqual(len(parsed["results"]), 1) self.assertEqual(parsed['uuid'], 'uuid1')
def get_old_channels(self): connection = sqlite3.connect(self.tribler_db) cursor = connection.cursor() channels = [] for id_, name, dispersy_cid, modified, nr_torrents, nr_favorite, nr_spam in cursor.execute( self.select_channels_sql): if nr_torrents and nr_torrents > 0: channels.append({"id_": 0, "infohash": database_blob(os.urandom(20)), "title": name or '', "public_key": dispesy_cid_to_pk(id_), "timestamp": final_timestamp(), "votes": int(nr_favorite or 0), "origin_id": 0, "signature": pseudo_signature(), "skip_key_check": True, "size": 0, "local_version": final_timestamp(), "subscribed": False, "status": LEGACY_ENTRY, "num_entries": int(nr_torrents or 0)}) connection.close() return channels
def render_GET(self, request): with db_session: my_channel = self.session.lm.mds.ChannelMetadata.get_my_channel() if not my_channel: request.setResponseCode(http.NOT_FOUND) return json.dumps({"error": "your channel has not been created"}) sanitized = SpecificChannelTorrentsEndpoint.sanitize_parameters(request.args) if 'exclude_deleted' in request.args: sanitized['exclude_deleted'] = request.args['exclude_deleted'] torrents, total = self.session.lm.mds.TorrentMetadata.get_entries( channel_pk=database_blob(my_channel.public_key), **sanitized) torrents = [torrent.to_simple_dict() for torrent in torrents] return json.dumps({ "results": torrents, "first": sanitized['first'], "last": sanitized['last'], "sort_by": sanitized['sort_by'], "sort_asc": int(sanitized['sort_asc']), "total": total, "dirty": my_channel.dirty })
def get_valid_trackers_of_torrent(self, torrent_id): """ Get a set of valid trackers for torrent. Also remove any invalid torrent.""" db_tracker_list = self.tribler_session.lm.mds.TorrentState.get(infohash=database_blob(torrent_id)).trackers return set([str(tracker.url) for tracker in db_tracker_list if is_valid_url(str(tracker.url)) and not self.is_blacklisted_tracker(str(tracker.url))])