def gen_sample_channel(mds): my_channel = mds.ChannelMetadata.create_channel('test_channel', 'test description') _ = my_channel.add_torrent_to_channel(TorrentDef.load(TORRENT_UBUNTU_FILE), None) my_channel.commit_channel_torrent() t2 = my_channel.add_torrent_to_channel(TorrentDef.load(TORRENT_VIDEO_FILE), None) _ = mds.TorrentMetadata.from_dict(gen_random_entry()) _ = mds.TorrentMetadata.from_dict(gen_random_entry()) my_channel.commit_channel_torrent() my_channel.delete_torrent(t2.infohash) my_channel.commit_channel_torrent() # Rename files to stable names mdblob_name = os.path.join(SAMPLE_DIR, my_channel.dir_name + ".mdblob") torrent_name = os.path.join(SAMPLE_DIR, my_channel.dir_name + ".torrent") os.rename(mdblob_name, CHANNEL_METADATA) os.rename(torrent_name, CHANNEL_TORRENT) # Update channel _ = mds.TorrentMetadata.from_dict(gen_random_entry()) my_channel.commit_channel_torrent() # Rename updated files to stable names os.rename(mdblob_name, CHANNEL_METADATA_UPDATED) os.rename(torrent_name, CHANNEL_TORRENT_UPDATED)
def test_is_private(self): privatefn = os.path.join(TESTS_DATA_DIR, "private.torrent") publicfn = os.path.join(TESTS_DATA_DIR, "bak_single.torrent") t1 = TorrentDef.load(privatefn) t2 = TorrentDef.load(publicfn) self.assert_(t1.is_private() == True) self.assert_(t2.is_private() == False)
def test_consolidate_channel_torrent(self): """ Test completely re-commit your channel """ channel = self.mds.ChannelMetadata.create_channel('test', 'test') my_dir = os.path.abspath(os.path.join(self.mds.channels_dir, channel.dir_name)) tdef = TorrentDef.load(TORRENT_UBUNTU_FILE) # 1st torrent channel.add_torrent_to_channel(tdef, None) channel.commit_channel_torrent() # 2nd torrent md = self.mds.TorrentMetadata.from_dict( dict(self.torrent_template, public_key=channel.public_key, status=NEW)) channel.commit_channel_torrent() # Delete entry channel.delete_torrent(tdef.get_infohash()) channel.commit_channel_torrent() self.assertEqual(1, len(channel.contents_list)) self.assertEqual(3, len(os.listdir(my_dir))) channel.consolidate_channel_torrent() self.assertEqual(1, len(os.listdir(my_dir)))
def test_restore_torrent_in_channel(self): """ Test if the torrent scheduled for deletion is restored/updated after the user tries to re-add it. """ channel_metadata = self.mds.ChannelMetadata.create_channel('test', 'test') tdef = TorrentDef.load(TORRENT_UBUNTU_FILE) md = channel_metadata.add_torrent_to_channel(tdef, None) # Check correct re-add md.status = TODELETE md_updated = channel_metadata.add_torrent_to_channel(tdef, None) self.assertEqual(md.status, COMMITTED) self.assertEqual(md_updated, md) self.assertTrue(md.has_valid_signature) # Check update of torrent properties from a new tdef md.status = TODELETE new_tracker_address = u'http://tribler.org/announce' tdef.torrent_parameters['announce'] = new_tracker_address md_updated = channel_metadata.add_torrent_to_channel(tdef, None) self.assertEqual(md_updated, md) self.assertEqual(md.status, NEW) self.assertEqual(md.tracker_info, new_tracker_address) self.assertTrue(md.has_valid_signature) # In addition, check that the trackers table was properly updated self.assertEqual(len(md.health.trackers), 2)
def setUpPostSession(self): """ override TestAsServer """ TestAsServer.setUpPostSession(self) # Let Tribler start downloading an non-functioning torrent, so # we can talk to a normal download engine. self.torrentfn = os.path.join('extend_hs_dir','dummydata.merkle.torrent') tdef = TorrentDef.load(self.torrentfn) dscfg = DownloadStartupConfig() dscfg.set_dest_dir(self.config_path) self.session.start_download(tdef,dscfg) # This is the infohash of the torrent in test/extend_hs_dir self.infohash = '\xccg\x07\xe2\x9e!]\x16\xae{\xb8\x10?\xf9\xa5\xf9\x07\xfdBk' self.setUpMyListenSocket() # Must be changed in test/extend_hs_dir/dummydata.merkle.torrent as well self.mytrackerport = 4901 # Must be Tribler version <= 3.5.0. Changing this to 351 makes this test # fail, so it's a good test. self.myid = 'R350-----HgUyPu56789' self.mytracker = MyTracker(self.mytrackerport,self.myid,'127.0.0.1',self.mylistenport) self.mytracker.background_serve() print >>sys.stderr,"test: Giving MyTracker and myself time to start" time.sleep(5)
def test_create_torrent(self): """ Testing whether the API returns a proper base64 encoded torrent """ torrent_path = os.path.join(self.files_path, "video.avi.torrent") expected_tdef = TorrentDef.load(torrent_path) def verify_torrent(body): response = json.loads(body) torrent = base64.b64decode(response["torrent"]) tdef = TorrentDef.load_from_memory(torrent) # Copy expected creation date and created by (Tribler version) from actual result creation_date = tdef.get_creation_date() expected_tdef.metainfo["creation date"] = creation_date expected_tdef.metainfo["created by"] = tdef.metainfo['created by'] self.assertEqual(dir(expected_tdef), dir(tdef)) post_data = { "files": [os.path.join(self.files_path, "video.avi"), os.path.join(self.files_path, "video.avi.torrent")], "description": "Video of my cat", "trackers": "http://localhost/announce" } self.should_check_equality = False return self.do_request('createtorrent?download=1', 200, None, 'POST', post_data).addCallback(verify_torrent)
def addTorrentToDB(self, filename, torrent_hash, metadata, source='BC', extra_info={}, hack=False): """ Arno: no need to delegate to olbridge, this is already run by OverlayThread """ # 03/02/10 Boudewijn: addExternalTorrent now requires a # torrentdef, consequently we provide the filename through the # extra_info dictionary torrentdef = TorrentDef.load(filename) if not 'filename' in extra_info: extra_info['filename'] = filename torrent = self.torrent_db.addExternalTorrent(torrentdef, source, extra_info) if torrent is None: return # Arno, 2008-10-20: XXX torrents are filtered out in the final display stage self.launchmany.set_activity(NTFY_ACT_GOT_METADATA,unicode('"'+torrent['name']+'"'),torrent['category']) if self.initialized: self.num_torrents += 1 # for free disk limitation if not extra_info: self.refreshTrackerStatus(torrent) if len(self.recently_collected_torrents) < 50: # Queue of 50 self.recently_collected_torrents.append(torrent_hash) else: self.recently_collected_torrents.pop(0) self.recently_collected_torrents.append(torrent_hash)
def add_torrents_from_dir(self, torrents_dir, recursive=False): # TODO: Optimize this properly!!!! torrents_list = [] errors_list = [] if recursive: def rec_gen(): for root, _, filenames in os.walk(torrents_dir): for fn in filenames: yield os.path.join(root, fn) filename_generator = rec_gen() else: filename_generator = os.listdir(torrents_dir) # Build list of .torrents to process for f in filename_generator: filepath = os.path.join(torrents_dir, f) filename = str(filepath) if sys.platform == 'win32' else filepath.decode('utf-8') if os.path.isfile(filepath) and filename.endswith(u'.torrent'): torrents_list.append(filepath) for chunk in chunks(torrents_list, 100): # 100 is a reasonable chunk size for commits for f in chunk: try: self.add_torrent_to_channel(TorrentDef.load(f)) except DuplicateTorrentFileError: pass except: errors_list.append(f) orm.commit() # Kinda optimization to drop excess cache? return torrents_list, errors_list
def test_channel_update_and_download(self): """ Test whether we can successfully update a channel and download the new version """ # First we have to manually add the old version old_payload = ChannelMetadataPayload.from_file(CHANNEL_METADATA) with db_session: old_channel = self.session.lm.mds.ChannelMetadata.from_payload(old_payload) chan_dir = os.path.join(CHANNEL_DIR, old_channel.dir_name) self.session.lm.mds.process_channel_dir(chan_dir, old_payload.public_key) channel_tdef = TorrentDef.load(CHANNEL_TORRENT_UPDATED) libtorrent_port = get_random_port() yield self.setup_seeder(channel_tdef, CHANNEL_DIR, libtorrent_port) payload = ChannelMetadataPayload.from_file(CHANNEL_METADATA_UPDATED) # Download the channel in our session download, finished_deferred = self.session.lm.update_channel(payload) download.add_peer(("127.0.0.1", self.seeder_session.config.get_libtorrent_port())) yield finished_deferred with db_session: # There should be 4 torrents + 1 channel torrent channel = self.session.lm.mds.ChannelMetadata.get_channel_with_id(payload.public_key) self.assertEqual(5, len(list(self.session.lm.mds.TorrentMetadata.select()))) self.assertEqual(4, channel.local_version)
def test_commit_channel_torrent(self): channel = self.mds.ChannelMetadata.create_channel('test', 'test') tdef = TorrentDef.load(TORRENT_UBUNTU_FILE) channel.add_torrent_to_channel(tdef, None) # The first run should return the infohash, the second should return None, because nothing was really done self.assertTrue(channel.commit_channel_torrent()) self.assertFalse(channel.commit_channel_torrent())
def _rename_torrent_files(self): """ Renames all the torrent files to INFOHASH.torrent and delete unparseable ones. """ def update_status(): progress = 1.0 if self.total_torrent_file_count > 0: progress = float(self.total_torrent_files_processed) / self.total_torrent_file_count progress *= 100 self.status_update_func(u"Migrating torrent files %.2f%%..." % progress) for root, _, files in os.walk(self.torrent_collecting_dir): for name in files: file_path = os.path.join(root, name) try: tdef = TorrentDef.load(file_path) move(file_path, os.path.join(self.tmp_migration_dir, hexlify(tdef.infohash) + u".torrent")) self.torrent_files_migrated += 1 except Exception as e: self._logger.error(u"dropping corrupted torrent file %s: %s", file_path, str(e)) os.unlink(file_path) self.torrent_files_dropped += 1 self.total_torrent_files_processed += 1 update_status() # We don't want to walk through the child directories break
def test_add_torrent_to_channel(self): """ Test adding a torrent to your channel """ channel_metadata = self.mds.ChannelMetadata.create_channel('test', 'test') tdef = TorrentDef.load(TORRENT_UBUNTU_FILE) channel_metadata.add_torrent_to_channel(tdef, {'description': 'blabla'}) self.assertTrue(channel_metadata.contents_list) self.assertRaises(DuplicateTorrentFileError, channel_metadata.add_torrent_to_channel, tdef, None)
def create_torrent(self, filename, store=True, update=True, forward=True): if path.exists(filename): try: torrentdef = TorrentDef.load(filename) files = torrentdef.get_files_as_unicode_with_length() return self._disp_create_torrent(torrentdef.get_infohash(), long(time()), torrentdef.get_name_as_unicode(), tuple(files), torrentdef.get_trackers_as_single_tuple(), store, update, forward) except: print_exc() return False
def updated_my_channel(self, new_torrent_path): """ Notify the core that we updated our channel. :param new_torrent_path: path to the new torrent file """ # Start the new download tdef = TorrentDef.load(new_torrent_path) dcfg = DownloadStartupConfig() dcfg.set_dest_dir(self.mds.channels_dir) dcfg.set_channel_download(True) self.add(tdef, dcfg)
def test_torrent_checking(self): tdef = TorrentDef.load(os.path.join(BASE_DIR, "data", "Pioneer.One.S01E06.720p.x264-VODO.torrent")) tdef.set_tracker("http://95.211.198.141:2710/announce") tdef.metainfo_valid = True self.tdb.addExternalTorrent(tdef) self.torrentChecking.addToQueue(tdef.get_infohash()) sleep(30) id, num_leechers, num_seeders, last_check = self.tdb.getSwarmInfoByInfohash(tdef.get_infohash()) assert num_leechers >= 0 or num_seeders >= 0, (num_leechers, num_seeders)
def read_torrents(self): files = os.listdir(self.path) for file in files: full_path = os.path.join(self.path, file) tdef = None try: tdef = TorrentDef.load(full_path) yield full_path, tdef.infohash, tdef.get_metainfo() except: pass
def _GetDestPath(self, torrentfilename = None): tdef = None if torrentfilename: tdef = TorrentDef.load(torrentfilename) dlg = SaveAs(self, tdef, self.defaultDLConfig.get_dest_dir(), None, os.path.join(self.frame.utility.session.get_state_dir(), 'recent_download_history')) id = dlg.ShowModal() if id == wx.ID_OK: destdir = dlg.GetPath() else: destdir = None dlg.Destroy() return destdir
def test_remove_torrent_id(self): """ Test whether removing a torrent id works. """ torrent_def = TorrentDef.load(TORRENT_UBUNTU_FILE) dcfg = DownloadStartupConfig() dcfg.set_dest_dir(self.getDestDir()) download = self.session.start_download_from_tdef(torrent_def, download_startup_config=dcfg, hidden=True) # Create a deferred which forwards the unhexlified string version of the download's infohash download_started = download.get_handle().addCallback(lambda handle: unhexlify(str(handle.info_hash()))) return download_started.addCallback(self.session.remove_download_by_id)
def do_create_playlist(torrentfilename): self.screenshot('Files have been added created') infohash = TorrentDef.load(torrentfilename).get_infohash() manageplaylist = self.managechannel.playlistlist manager = manageplaylist.GetManager() manager.createPlaylist('Unittest', 'Playlist created for Unittest', [infohash, ]) # switch to playlist tab mp_index = self.managechannel.GetPage(self.managechannel.notebook, "Manage playlists") if mp_index: self.managechannel.notebook.SetSelection(mp_index) self.CallConditional(60, lambda: len(manageplaylist.GetItems()) == 1, lambda: do_download_torrent(torrentfilename), 'Channel did not have a playlist')
def test_add_torrent_duplicate(self): """ Test whether adding a duplicate torrent to you channel results in an error """ self.create_my_channel() my_channel = self.session.lm.mds.ChannelMetadata.get_my_channel() tdef = TorrentDef.load(TORRENT_UBUNTU_FILE) my_channel.add_torrent_to_channel(tdef, {'description': 'blabla'}) with open(TORRENT_UBUNTU_FILE, "r") as torrent_file: base64_content = base64.b64encode(torrent_file.read()) self.should_check_equality = False post_params = {'torrent': base64_content} return self.do_request('mychannel/torrents', request_type='PUT', post_data=post_params, expected_code=500)
def notify_possible_torrent_roothash(self, roothash): keys = self.callbacks.keys() for key in keys: if key[1] == roothash: handle_lambda = lambda key=key: self._handleCallback(key, True) self.scheduletask(handle_lambda) sdef = SwiftDef(roothash) swiftpath = os.path.join(self.session.get_torrent_collecting_dir(),sdef.get_roothash_as_hex()) if os.path.exists(swiftpath): tdef = TorrentDef.load(swiftpath) if self.torrent_db.hasTorrent(tdef.get_infohash()): self.torrent_db.updateTorrent(tdef.get_infohash(), swift_torrent_hash = sdef.get_roothash(), torrent_file_name = swiftpath) else: self.torrent_db._addTorrentToDB(tdef, source = "SWIFT", extra_info = {'filename': swiftpath, 'swift_torrent_hash':roothash, 'status':'good'}, commit = True)
def test_torrent_checking(self): tdef = TorrentDef.load(TORRENT_VIDEO_FILE) # TODO(emilon): This tracker is no more, we need to set up a new one # tdef.set_tracker("http://95.211.198.141:2710/announce") tdef.metainfo_valid = True self.tdb.addExternalTorrent(tdef) self.session.check_torrent_health(tdef.get_infohash()) sleep(31) torrent = self.tdb.getTorrent(tdef.get_infohash()) self._logger.debug('got torrent %s', torrent) num_seeders = torrent['num_seeders'] num_leechers = torrent['num_leechers'] assert num_leechers >= 0 or num_seeders >= 0, "No peers found: leechers: %d seeders: %d" % (num_leechers, num_seeders)
def do_create_playlist(torrentfilename): self.screenshot("Files have been added created") infohash = TorrentDef.load(torrentfilename).get_infohash() manageplaylist = self.managechannel.playlistlist manager = manageplaylist.GetManager() manager.createPlaylist("Unittest", "Playlist created for Unittest", [infohash]) # switch to playlist tab mp_index = self.managechannel.GetPage(self.managechannel.notebook, "Manage playlists") self.managechannel.notebook.SetSelection(mp_index) self.CallConditional( 60, lambda: len(manageplaylist.GetItems()) == 1, do_overview, "Channel did not have a playlist" )
def isTorrentPlayable(self, torrent, default=(False, [], []), callback=None): """ TORRENT is a dictionary containing torrent information used to display the entry on the UI. it is NOT the torrent file! DEFAULT indicates the default value when we don't know if the torrent is playable. CALLBACK can be given to result the actual 'playable' value for the torrent after some downloading/processing. The DEFAULT value is returned in this case. Will only be called if self.item == torrent The return value is a tuple consisting of a boolean indicating if the torrent is playable and a list. If the torrent is not playable or if the default value is returned the boolean is False and the list is empty. If it is playable the boolean is true and the list returned consists of the playable files within the actual torrent. """ torrent_callback = lambda infohash, metadata, filename: self.isTorrentPlayable(torrent, default, callback) torrent_callback.__name__ = "isTorrentPlayable_callback" torrent_filename = self.getTorrent(torrent, torrent_callback) if isinstance(torrent_filename, basestring): #got actual filename tdef = TorrentDef.load(torrent_filename) files = tdef.get_files_as_unicode(exts=videoextdefaults) allfiles = tdef.get_files_as_unicode_with_length() playable = len(files) > 0 torrent['comment'] = tdef.get_comment_as_unicode() if tdef.get_tracker_hierarchy(): torrent['trackers'] = tdef.get_tracker_hierarchy() else: torrent['trackers'] = [[tdef.get_tracker()]] if not callback is None: callback(torrent, (playable, files, allfiles)) else: return torrent, (playable, files, allfiles) elif not torrent_filename[0]: if DEBUG: print >>sys.stderr, "standardDetails:torrent_is_playable returning default", default callback(torrent, default) else: return torrent_filename[1]
def setUpPostSession(self): """ override TestAsServer """ TestAsServer.setUpPostSession(self) # Let Tribler start downloading an non-functioning torrent, so # we can talk to a normal download engine. self.torrentfn = os.path.join('extend_hs_dir','dummydata.merkle.torrent') tdef = TorrentDef.load(self.torrentfn) dscfg = self.setUpDownloadConfig() self.session.start_download(tdef,dscfg) # This is the infohash of the torrent in test/extend_hs_dir self.infohash = '\xccg\x07\xe2\x9e!]\x16\xae{\xb8\x10?\xf9\xa5\xf9\x07\xfdBk' self.mylistenport = 4810
def test_add_torrent_def_to_channel(self): """ Test whether adding a torrent def to a channel works """ test_deferred = Deferred() torrent_def = TorrentDef.load(TORRENT_UBUNTU_FILE) def on_channel_created(subject, change_type, object_id, channel_data): channel_id = self.channel_db_handler.getMyChannelId() self.session.add_torrent_def_to_channel(channel_id, torrent_def, {"description": "iso"}, forward=False) self.assertTrue(self.channel_db_handler.hasTorrent(channel_id, torrent_def.get_infohash())) test_deferred.callback(None) self.session.add_observer(on_channel_created, SIGNAL_CHANNEL, [SIGNAL_ON_CREATED]) self.session.create_channel("name", "description", "open") return test_deferred
def test_delete_torrent_from_channel(self): """ Test deleting a torrent from your channel """ channel_metadata = self.mds.ChannelMetadata.create_channel('test', 'test') tdef = TorrentDef.load(TORRENT_UBUNTU_FILE) # Check that nothing is committed when deleting uncommited torrent metadata channel_metadata.add_torrent_to_channel(tdef, None) channel_metadata.delete_torrent(tdef.get_infohash()) self.assertEqual(0, len(channel_metadata.contents_list)) # Check append-only deletion process channel_metadata.add_torrent_to_channel(tdef, None) channel_metadata.commit_channel_torrent() self.assertEqual(1, len(channel_metadata.contents_list)) channel_metadata.delete_torrent(tdef.get_infohash()) channel_metadata.commit_channel_torrent() self.assertEqual(0, len(channel_metadata.contents_list))
def search_torrents(self,kws,maxhits=None,sendtorrents=False): if DEBUG: print >>sys.stderr,time.asctime(),'-', "rquery: search for torrents matching",`kws` allhits = self.torrent_db.searchNames(kws,local=False) print >>sys.stderr,time.asctime(),'-', "rquery: got matches",`allhits` if maxhits is None: hits = allhits else: hits = allhits[:maxhits] colltorrdir = self.session.get_torrent_collecting_dir() if sendtorrents: print >>sys.stderr,time.asctime(),'-', "rqmh: search_torrents: adding torrents" for hit in hits: filename = os.path.join(colltorrdir,hit['torrent_file_name']) try: tdef = TorrentDef.load(filename) if tdef.get_url_compat(): metatype = URL_MIME_TYPE metadata = tdef.get_url() else: metatype = TSTREAM_MIME_TYPE metadata = bencode(tdef.get_metainfo()) except: print_exc() metadata = None hit['metatype'] = metatype hit['metadata'] = metadata # Filter out hits for which we could not read torrent file (rare) newhits = [] for hit in hits: if hit['metadata'] is not None: newhits.append(hit) hits = newhits return hits
def start(self): """ The Metadata Store checks the database at regular intervals to see if new channels are available for preview or subscribed channels require updating. """ # Test if we our channel is there, but we don't share it because Tribler was closed unexpectedly try: with db_session: my_channel = self.session.lm.mds.ChannelMetadata.get_my_channel() if my_channel and my_channel.status == COMMITTED and \ not self.session.has_download(str(my_channel.infohash)): torrent_path = os.path.join(self.session.lm.mds.channels_dir, my_channel.dir_name + ".torrent") self.updated_my_channel(TorrentDef.load(torrent_path)) except: pass channels_check_interval = 5.0 # seconds self.channels_lc = self.register_task("Process channels download queue and remove cruft", LoopingCall(self.service_channels)).start(channels_check_interval)
def do_modifications(torrentfilename): infohash = TorrentDef.load(torrentfilename).get_infohash() self.frame.librarylist.Select(infohash) torrent = self.guiUtility.channelsearch_manager.getTorrentFromChannel(self.frame.managechannel.channel, infohash) def check_for_modifications(): modifications = self.guiUtility.channelsearch_manager.getTorrentModifications(torrent) videoinfo_valid = False swiftthumbnails_valid = False for modification in modifications: if modification.name == 'swift-thumbnails' and modification.value: swiftthumbnails_valid = True if modification.name == 'video-info' and modification.value: videoinfo_dict = json.loads(modification.value) if videoinfo_dict['duration'] and videoinfo_dict['resolution']: videoinfo_valid = True return videoinfo_valid and swiftthumbnails_valid self.CallConditional(10, check_for_modifications, do_overview, 'No valid channel modifications received')
if __name__ == "__main__": s.set_download_states_callback(states_callback, getpeerlist=True) # For testing only! #s.add_observer(testfunc, NTFY_PEERS) #s.add_observer(testfunc, NTFY_TORRENTS) #s.remove_observer(testfunc) # Torrent 1 if sys.platform == 'win32': tdef = TorrentDef.load('bla.torrent') else: #tdef = TorrentDef.load('/tmp/bla3multi.torrent') tdef = TorrentDef.load('/tmp/bla.torrent') dcfg = DownloadStartupConfig() dcfg.set_max_rate_period(4.0) #dcfg.set_max_speed(UPLOAD, 100) #dcfg.set_max_speed(DOWNLOAD, 50) #dcfg.set_dest_dir('/arno/tmp/scandir') """ dcfg.set_video_start_callback(vod_ready_callback) #dcfg.set_selected_files('star-wreck-in-the-pirkinning.txt') # play this video dcfg.set_selected_files('star_wreck_in_the_pirkinning_subtitled_xvid.avi') # play this video """ d = s.start_download(tdef, dcfg)
def _upgrade_22_to_23(self): """ Migrates the database to the new version. """ self.status_update_func(u"Upgrading database from v%s to v%s..." % (22, 23)) self.db.execute(u""" DROP TABLE IF EXISTS BarterCast; DROP INDEX IF EXISTS bartercast_idx; DROP INDEX IF EXISTS Torrent_swift_torrent_hash_idx; """) try: next( self.db.execute( u"SELECT * From sqlite_master WHERE name == '_tmp_Torrent' and type == 'table';" )) except StopIteration: # no _tmp_Torrent table, check if the current Torrent table is new lines = [(0, u'torrent_id', u'integer', 1, None, 1), (1, u'infohash', u'text', 1, None, 0), (2, u'name', u'text', 0, None, 0), (3, u'torrent_file_name', u'text', 0, None, 0), (4, u'length', u'integer', 0, None, 0), (5, u'creation_date', u'integer', 0, None, 0), (6, u'num_files', u'integer', 0, None, 0), (7, u'thumbnail', u'integer', 0, None, 0), (8, u'insert_time', u'numeric', 0, None, 0), (9, u'secret', u'integer', 0, None, 0), (10, u'relevance', u'numeric', 0, u'0', 0), (11, u'source_id', u'integer', 0, None, 0), (12, u'category_id', u'integer', 0, None, 0), (13, u'status_id', u'integer', 0, u'0', 0), (14, u'num_seeders', u'integer', 0, None, 0), (15, u'num_leechers', u'integer', 0, None, 0), (16, u'comment', u'text', 0, None, 0), (17, u'dispersy_id', u'integer', 0, None, 0), (18, u'last_tracker_check', u'integer', 0, u'0', 0), (19, u'tracker_check_retries', u'integer', 0, u'0', 0), (20, u'next_tracker_check', u'integer', 0, u'0', 0)] i = 0 is_new = True for line in self.db.execute(u"PRAGMA table_info(Torrent);"): if line != lines[i]: is_new = False break i += 1 if not is_new: # create the temporary table self.db.execute(u""" CREATE TABLE IF NOT EXISTS _tmp_Torrent ( torrent_id integer PRIMARY KEY AUTOINCREMENT NOT NULL, infohash text NOT NULL, name text, torrent_file_name text, length integer, creation_date integer, num_files integer, thumbnail integer, insert_time numeric, secret integer, relevance numeric DEFAULT 0, source_id integer, category_id integer, status_id integer DEFAULT 0, num_seeders integer, num_leechers integer, comment text, dispersy_id integer, last_tracker_check integer DEFAULT 0, tracker_check_retries integer DEFAULT 0, next_tracker_check integer DEFAULT 0 ); """) # migrate Torrent table keys = (u"torrent_id", u"infohash", u"name", u"torrent_file_name", u"length", u"creation_date", u"num_files", u"thumbnail", u"insert_time", u"secret", u"relevance", u"source_id", u"category_id", u"status_id", u"num_seeders", u"num_leechers", u"comment", u"dispersy_id", u"last_tracker_check", u"tracker_check_retries", u"next_tracker_check") keys_str = u", ".join(keys) values_str = u"?," * len(keys) insert_stmt = u"INSERT INTO _tmp_Torrent(%s) VALUES(%s)" % ( keys_str, values_str[:-1]) current_count = 0 results = self.db.execute(u"SELECT %s FROM Torrent;" % keys_str) new_torrents = [] for torrent in results: torrent_id, infohash, name, torrent_file_name = torrent[:4] filepath = os.path.join( self.torrent_collecting_dir, hexlify(str2bin(infohash)) + u".torrent") # Check if we have the actual .torrent torrent_file_name = None if os.path.exists(filepath): torrent_file_name = filepath tdef = TorrentDef.load(filepath) # Use the name on the .torrent file instead of the one stored in the database. name = tdef.get_name_as_unicode() or name new_torrents.append((torrent_id, infohash, name, torrent_file_name) + torrent[4:]) current_count += 1 self.status_update_func( u"Upgrading database, %s records upgraded..." % current_count) self.status_update_func( u"All torrent entries processed, inserting in database...") self.db.executemany(insert_stmt, new_torrents) self.status_update_func( u"All updated torrent entries inserted.") self.db.execute(u""" DROP TABLE IF EXISTS Torrent; ALTER TABLE _tmp_Torrent RENAME TO Torrent; """) # cleanup metadata tables self.db.execute(u""" DROP TABLE IF EXISTS MetadataMessage; DROP TABLE IF EXISTS MetadataData; CREATE TABLE IF NOT EXISTS MetadataMessage ( message_id INTEGER PRIMARY KEY AUTOINCREMENT, dispersy_id INTEGER NOT NULL, this_global_time INTEGER NOT NULL, this_mid TEXT NOT NULL, infohash TEXT NOT NULL, previous_mid TEXT, previous_global_time INTEGER ); CREATE TABLE IF NOT EXISTS MetadataData ( message_id INTEGER, data_key TEXT NOT NULL, data_value INTEGER, FOREIGN KEY (message_id) REFERENCES MetadataMessage(message_id) ON DELETE CASCADE ); """) # cleanup all SearchCommunity and MetadataCommunity data in dispersy database self._purge_old_search_metadata_communities() # update database version self.db.write_version(23)
def addTorrent(self): old_size = self.tdb.size() old_src_size = self.tdb._db.size('TorrentSource') old_tracker_size = self.tdb._db.size('TorrentTracker') s_infohash = unhexlify('44865489ac16e2f34ea0cd3043cfd970cc24ec09') m_infohash = unhexlify('ed81da94d21ad1b305133f2726cdaec5a57fed98') sid = self.tdb._db.getTorrentID(s_infohash) mid = self.tdb._db.getTorrentID(m_infohash) single_torrent_file_path = os.path.join(self.getStateDir(), 'single.torrent') multiple_torrent_file_path = os.path.join(self.getStateDir(), 'multiple.torrent') copyFile(S_TORRENT_PATH_BACKUP, single_torrent_file_path) copyFile(M_TORRENT_PATH_BACKUP, multiple_torrent_file_path) single_tdef = TorrentDef.load(single_torrent_file_path) assert s_infohash == single_tdef.get_infohash() src = 'http://www.rss.com/torrent.xml' multiple_tdef = TorrentDef.load(multiple_torrent_file_path) assert m_infohash == multiple_tdef.get_infohash() self.tdb.addExternalTorrent(single_tdef, extra_info={'filename': single_torrent_file_path}) self.tdb.addExternalTorrent(multiple_tdef, source=src, extra_info={'filename': multiple_torrent_file_path}) single_torrent_id = self.tdb._db.getTorrentID(s_infohash) multiple_torrent_id = self.tdb._db.getTorrentID(m_infohash) assert self.tdb.getInfohash(single_torrent_id) == s_infohash single_name = 'Tribler_4.1.7_src.zip' multiple_name = 'Tribler_4.1.7_src' assert self.tdb.size() == old_size + 2, old_size - self.tdb.size() assert old_src_size + 1 == self.tdb._db.size('TorrentSource') assert old_tracker_size + 2 == self.tdb._db.size('TorrentTracker'), self.tdb._db.size('TorrentTracker') - old_tracker_size sname = self.tdb.getOne('name', torrent_id=single_torrent_id) assert sname == single_name, (sname, single_name) mname = self.tdb.getOne('name', torrent_id=multiple_torrent_id) assert mname == multiple_name, (mname, multiple_name) s_size = self.tdb.getOne('length', torrent_id=single_torrent_id) assert s_size == 1583233, s_size m_size = self.tdb.getOne('length', torrent_id=multiple_torrent_id) assert m_size == 5358560, m_size # TODO: action is flagged as XXX causing this torrent to be XXX instead of other cat = self.tdb.getOne('category_id', torrent_id=multiple_torrent_id) # assert cat == 8, cat # other sid = self.tdb._db.getOne('TorrentSource', 'source_id', name=src) assert sid > 1 m_sid = self.tdb.getOne('source_id', torrent_id=multiple_torrent_id) assert sid == m_sid s_sid = self.tdb.getOne('source_id', torrent_id=single_torrent_id) assert 1 == s_sid s_status = self.tdb.getOne('status_id', torrent_id=single_torrent_id) assert s_status == 0 m_comment = self.tdb.getOne('comment', torrent_id=multiple_torrent_id) comments = 'www.tribler.org' assert m_comment.find(comments) > -1 comments = 'something not inside' assert m_comment.find(comments) == -1 m_trackers = self.tdb.getTracker(m_infohash, 0) # db._db.getAll('TorrentTracker', 'tracker', 'torrent_id=%d'%multiple_torrent_id) assert len(m_trackers) == 1 assert ('http://tpb.tracker.thepiratebay.org/announce', 1) in m_trackers, m_trackers s_torrent = self.tdb.getTorrent(s_infohash) m_torrent = self.tdb.getTorrent(m_infohash) assert s_torrent['name'] == 'Tribler_4.1.7_src.zip', s_torrent['name'] assert m_torrent['name'] == 'Tribler_4.1.7_src', m_torrent['name'] assert m_torrent['last_check_time'] == 0
def fake_get_metainfo(_, callback, timeout=10, timeout_callback=None, notify=True): meta_info = TorrentDef.load(TORRENT_UBUNTU_FILE).get_metainfo() callback(meta_info)
def __init__(self, *argv, **kwargs): super(TestBoostingManagerSysChannel, self).__init__(*argv, **kwargs) self.tdef = TorrentDef.load(TORRENT_UBUNTU_FILE) self.channel_id = 0 self.expected_votecast_cid = None self.expected_votecast_vote = None
def verify_method_invocation(channel_id, torrent_def, extra_info=None, forward=True): self.assertEqual(my_channel_id, channel_id) self.assertEqual(TorrentDef.load(TORRENT_UBUNTU_FILE), torrent_def) self.assertEqual({}, extra_info or {}) self.assertEqual(True, forward)
def test_chn_max_torrents(self): """ Test the restriction of max_torrents in a source. """ # create channel and insert torrent self.create_fake_allchannel_community() self.create_torrents_in_channel(self.dispersy_cid_hex) pioneer_file = os.path.join( TESTS_DATA_DIR, "Pioneer.One.S01E06.720p.x264-VODO.torrent") pioneer_tdef = TorrentDef.load(pioneer_file) pioneer_ihash = binascii.unhexlify( "66ED7F30E3B30FA647ABAA19A36E7503AA071535") torrent_list = [[ self.channel_id, 1, 1, pioneer_ihash, 1460000001, pioneer_file, pioneer_tdef.get_files_with_length(), pioneer_tdef.get_trackers_as_single_tuple() ]] self.insert_torrents_into_channel(torrent_list) self.boosting_manager.add_source(self.dispersy_cid) chn_obj = self.boosting_manager.get_source_object(self.dispersy_cid) chn_obj.max_torrents = 2 chn_obj._load_torrent = lambda _: defer.Deferred() def _load(infohash): defer_ret = defer.Deferred() defer_ret.callback(self.tdef if binascii.hexlify(infohash). startswith("fc") else pioneer_tdef) return defer_ret def activate_mgr(): """ activate ltmgr and adjust max torrents to emulate overflow torrents """ chn_obj.max_torrents = 1 chn_obj._load_torrent = _load reactor.callLater(5, activate_mgr) def check_torrents_channel(src, defer_param=None): """ check if a torrent already in channel and ready to download """ if defer_param is None: defer_param = defer.Deferred() src_obj = self.boosting_manager.get_source_object(src) success = True if len(src_obj.unavail_torrent) == 0: self.assertLessEqual(len(src_obj.torrents), src_obj.max_torrents) else: success = False reactor.callLater(1, check_torrents_channel, src, defer_param) if success: src_obj.community.cancel_all_pending_tasks() src_obj.kill_tasks() defer_param.callback(src) return defer_param d = self.check_source(self.dispersy_cid) d.addCallback(check_torrents_channel) return d
def resume_download(self, filename, initialdlstatus=None, initialdlstatus_dict={}, commit=True, setupDelay=0): tdef = sdef = dscfg = pstate = None try: pstate = self.load_download_pstate(filename) # SWIFTPROC if SwiftDef.is_swift_url(pstate['metainfo']): sdef = SwiftDef.load_from_url(pstate['metainfo']) elif 'infohash' in pstate['metainfo']: tdef = TorrentDefNoMetainfo(pstate['metainfo']['infohash'], pstate['metainfo']['name']) else: tdef = TorrentDef.load_from_dict(pstate['metainfo']) dlconfig = pstate['dlconfig'] if isinstance(dlconfig['saveas'], tuple): dlconfig['saveas'] = dlconfig['saveas'][-1] if sdef and 'name' in dlconfig and isinstance( dlconfig['name'], basestring): sdef.set_name(dlconfig['name']) if sdef and sdef.get_tracker().startswith("127.0.0.1:"): current_port = int(sdef.get_tracker().split(":")[1]) if current_port != self.session.get_swift_dht_listen_port(): print >> sys.stderr, "Modified SwiftDef to new tracker port" sdef.set_tracker("127.0.0.1:%d" % self.session.get_swift_dht_listen_port()) dscfg = DownloadStartupConfig(dlconfig) except: print_exc() # pstate is invalid or non-existing _, file = os.path.split(filename) infohash = binascii.unhexlify(file[:-7]) torrent = self.torrent_db.getTorrent( infohash, keys=['name', 'torrent_file_name', 'swift_torrent_hash'], include_mypref=False) torrentfile = None if torrent: torrent_dir = self.session.get_torrent_collecting_dir() if torrent['swift_torrent_hash']: sdef = SwiftDef(torrent['swift_torrent_hash']) save_name = sdef.get_roothash_as_hex() torrentfile = os.path.join(torrent_dir, save_name) if torrentfile and os.path.isfile(torrentfile): # normal torrentfile is not present, see if readable torrent is there save_name = get_readable_torrent_name( infohash, torrent['name']) torrentfile = os.path.join(torrent_dir, save_name) if torrentfile and os.path.isfile(torrentfile): tdef = TorrentDef.load(torrentfile) defaultDLConfig = DefaultDownloadStartupConfig.getInstance() dscfg = defaultDLConfig.copy() if self.mypref_db != None: preferences = self.mypref_db.getMyPrefStatsInfohash( infohash) if preferences: if os.path.isdir( preferences[2]) or preferences[2] == '': dscfg.set_dest_dir(preferences[2]) if DEBUG: print >> sys.stderr, "tlm: load_checkpoint: pstate is", dlstatus_strings[ pstate['dlstate']['status']], pstate['dlstate']['progress'] if pstate['engineresumedata'] is None: print >> sys.stderr, "tlm: load_checkpoint: resumedata None" else: print >> sys.stderr, "tlm: load_checkpoint: resumedata len", len( pstate['engineresumedata']) if (tdef or sdef) and dscfg: if dscfg.get_dest_dir() != '': # removed torrent ignoring try: if not self.download_exists((tdef or sdef).get_id()): if tdef: initialdlstatus = initialdlstatus_dict.get( tdef.get_id(), initialdlstatus) self.add(tdef, dscfg, pstate, initialdlstatus, commit=commit, setupDelay=setupDelay) else: initialdlstatus = initialdlstatus_dict.get( sdef.get_id(), initialdlstatus) self.swift_add(sdef, dscfg, pstate, initialdlstatus) else: print >> sys.stderr, "tlm: not resuming checkpoint because download has already been added" except Exception as e: self.rawserver_nonfatalerrorfunc(e) else: print >> sys.stderr, "tlm: removing checkpoint", filename, "destdir is", dscfg.get_dest_dir( ) os.remove(filename) else: print >> sys.stderr, "tlm: could not resume checkpoint", filename, tdef, dscfg
def verify_method_invocation(channel_id, torrent_def, extra_info=None, forward=True): self.assertEqual(my_channel_id, channel_id) self.assertEqual(TorrentDef.load(torrent_path), torrent_def) self.assertEqual({"description": "video of my cat"}, extra_info or {}) self.assertEqual(True, forward)
def addTorrent(self): old_size = self.tdb.size() old_tracker_size = self.tdb._db.size('TrackerInfo') s_infohash = unhexlify('44865489ac16e2f34ea0cd3043cfd970cc24ec09') m_infohash = unhexlify('ed81da94d21ad1b305133f2726cdaec5a57fed98') sid = self.tdb.getTorrentID(s_infohash) mid = self.tdb.getTorrentID(m_infohash) single_torrent_file_path = os.path.join(self.getStateDir(), 'single.torrent') multiple_torrent_file_path = os.path.join(self.getStateDir(), 'multiple.torrent') copyFile(S_TORRENT_PATH_BACKUP, single_torrent_file_path) copyFile(M_TORRENT_PATH_BACKUP, multiple_torrent_file_path) single_tdef = TorrentDef.load(single_torrent_file_path) assert s_infohash == single_tdef.get_infohash() multiple_tdef = TorrentDef.load(multiple_torrent_file_path) assert m_infohash == multiple_tdef.get_infohash() self.tdb.addExternalTorrent(single_tdef) self.tdb.addExternalTorrent(multiple_tdef) single_torrent_id = self.tdb.getTorrentID(s_infohash) multiple_torrent_id = self.tdb.getTorrentID(m_infohash) assert self.tdb.getInfohash(single_torrent_id) == s_infohash single_name = 'Tribler_4.1.7_src.zip' multiple_name = 'Tribler_4.1.7_src' assert self.tdb.size() == old_size + 2, old_size - self.tdb.size() new_tracker_table_size = self.tdb._db.size('TrackerInfo') assert old_tracker_size < new_tracker_table_size, new_tracker_table_size - old_tracker_size sname = self.tdb.getOne('name', torrent_id=single_torrent_id) assert sname == single_name, (sname, single_name) mname = self.tdb.getOne('name', torrent_id=multiple_torrent_id) assert mname == multiple_name, (mname, multiple_name) s_size = self.tdb.getOne('length', torrent_id=single_torrent_id) assert s_size == 1583233, s_size m_size = self.tdb.getOne('length', torrent_id=multiple_torrent_id) assert m_size == 5358560, m_size cat = self.tdb.getOne('category', torrent_id=multiple_torrent_id) assert cat == u'xxx', cat s_status = self.tdb.getOne('status', torrent_id=single_torrent_id) assert s_status == u'unknown', s_status m_comment = self.tdb.getOne('comment', torrent_id=multiple_torrent_id) comments = 'www.tribler.org' assert m_comment.find(comments) > -1 comments = 'something not inside' assert m_comment.find(comments) == -1 m_trackers = self.tdb.getTrackerListByInfohash(m_infohash) assert len(m_trackers) == 8 assert 'http://tpb.tracker.thepiratebay.org/announce' in m_trackers, m_trackers s_torrent = self.tdb.getTorrent(s_infohash) m_torrent = self.tdb.getTorrent(m_infohash) assert s_torrent['name'] == 'Tribler_4.1.7_src.zip', s_torrent['name'] assert m_torrent['name'] == 'Tribler_4.1.7_src', m_torrent['name'] assert m_torrent['last_tracker_check'] == 0
torrent = fileargs[1] if not os.path.exists(torrent): print "Error: Could not find torrent file '%s'"%torrent raise SystemExit(1) if not config['key_file']: config['key_file'] = torrent + ".tkey" if not os.path.exists(config['key_file']): print "Error: Could not find key file '%s'"%config['key_file'] raise SystemExit(1) # Load the torrent file try: t = TorrentDef.load(torrent) except Exception,e: print "Bad torrent file:",e raise SystemExit(1) if not t.get_cs_keys(): print "Not a closed swarm torrent" raise SystemExit(1) try: torrent_keypair = ClosedSwarm.read_cs_keypair(config['key_file']) except Exception,e: print "Bad torrent key file",e raise SystemExit(1) # Need permid of the receiving node if not config['node_id']:
def lineReceived(self, line): anon_tunnel = self.anon_tunnel if line == 'threads': for thread in threading.enumerate(): logger.debug("%s \t %d", thread.name, thread.ident) elif line == 'c': logger.debug( "========\nCircuits\n========\nid\taddress\t\t\t\t\tgoal\thops\tIN (MB)\tOUT (MB)\tinfohash\ttype" ) for circuit_id, circuit in anon_tunnel.community.circuits.items(): info_hash = circuit.info_hash.encode( 'hex')[:10] if circuit.info_hash else '?' logger.debug( "%d\t%s:%d\t%d\t%d\t\t%.2f\t\t%.2f\t\t%s\t%s" % circuit_id, circuit.first_hop[0], circuit.first_hop[1], circuit.goal_hops, len(circuit.hops), circuit.bytes_down / 1024.0 / 1024.0, circuit.bytes_up / 1024.0 / 1024.0, info_hash, circuit.ctype) elif line.startswith('s'): cur_path = os.getcwd() line_split = line.split(' ') filename = 'test_file' if len(line_split) == 1 else line_split[1] if not os.path.exists(filename): logger.info("Creating torrent..") with open(filename, 'wb') as fp: fp.write(os.urandom(50 * 1024 * 1024)) tdef = TorrentDef() tdef.add_content(os.path.join(cur_path, filename)) tdef.set_tracker("udp://localhost/announce") tdef.set_private() tdef.finalize() tdef.save(os.path.join(cur_path, filename + '.torrent')) else: logger.info("Loading existing torrent..") tdef = TorrentDef.load(filename + '.torrent') logger.info("loading torrent done, infohash of torrent: %s" % (tdef.get_infohash().encode('hex')[:10])) defaultDLConfig = DefaultDownloadStartupConfig.getInstance() dscfg = defaultDLConfig.copy() dscfg.set_hops(1) dscfg.set_dest_dir(cur_path) reactor.callFromThread( anon_tunnel.session.start_download_from_tdef, tdef, dscfg) elif line.startswith('i'): # Introduce dispersy port from other main peer to this peer line_split = line.split(' ') to_introduce_ip = line_split[1] to_introduce_port = int(line_split[2]) self.anon_tunnel.community.add_discovered_candidate( Candidate((to_introduce_ip, to_introduce_port), tunnel=False)) elif line.startswith('d'): line_split = line.split(' ') filename = 'test_file' if len(line_split) == 1 else line_split[1] logger.info("Loading torrent..") tdef = TorrentDef.load(filename + '.torrent') logger.info("Loading torrent done") defaultDLConfig = DefaultDownloadStartupConfig.getInstance() dscfg = defaultDLConfig.copy() dscfg.set_hops(1) dscfg.set_dest_dir( os.path.join( os.getcwd(), 'downloader%s' % anon_tunnel.session.get_dispersy_port())) def start_download(): def cb(ds): logger.info( 'Download infohash=%s, down=%s, progress=%s, status=%s, seedpeers=%s, candidates=%d' % (tdef.get_infohash().encode('hex')[:10], ds.get_current_speed('down'), ds.get_progress(), dlstatus_strings[ds.get_status()], sum(ds.get_num_seeds_peers()), sum(1 for _ in anon_tunnel.community. dispersy_yield_verified_candidates()))) return 1.0, False download = anon_tunnel.session.start_download_from_tdef( tdef, dscfg) download.set_state_callback(cb) reactor.callFromThread(start_download) elif line == 'q': anon_tunnel.should_run = False elif line == 'r': logger.debug("circuit\t\t\tdirection\tcircuit\t\t\tTraffic (MB)") from_to = anon_tunnel.community.relay_from_to for key in from_to.keys(): relay = from_to[key] logger.info("%s-->\t%s\t\t%.2f" % ( (key[0], key[1]), (relay.sock_addr, relay.circuit_id), relay.bytes[1] / 1024.0 / 1024.0, ))
def addTorrent(self): old_size = self.tdb.size() old_tracker_size = self.tdb._db.size('TrackerInfo') s_infohash = unhexlify('44865489ac16e2f34ea0cd3043cfd970cc24ec09') m_infohash = unhexlify('ed81da94d21ad1b305133f2726cdaec5a57fed98') sid = self.tdb.getTorrentID(s_infohash) mid = self.tdb.getTorrentID(m_infohash) single_torrent_file_path = os.path.join(self.getStateDir(), 'single.torrent') multiple_torrent_file_path = os.path.join(self.getStateDir(), 'multiple.torrent') copyfile(S_TORRENT_PATH_BACKUP, single_torrent_file_path) copyfile(M_TORRENT_PATH_BACKUP, multiple_torrent_file_path) single_tdef = TorrentDef.load(single_torrent_file_path) self.assertEqual(s_infohash, single_tdef.get_infohash()) multiple_tdef = TorrentDef.load(multiple_torrent_file_path) self.assertEqual(m_infohash, multiple_tdef.get_infohash()) self.tdb.addExternalTorrent(single_tdef) self.tdb.addExternalTorrent(multiple_tdef) single_torrent_id = self.tdb.getTorrentID(s_infohash) multiple_torrent_id = self.tdb.getTorrentID(m_infohash) self.assertEqual(self.tdb.getInfohash(single_torrent_id), s_infohash) single_name = 'Tribler_4.1.7_src.zip' multiple_name = 'Tribler_4.1.7_src' self.assertEqual(self.tdb.size(), old_size + 2) new_tracker_table_size = self.tdb._db.size('TrackerInfo') self.assertLess(old_tracker_size, new_tracker_table_size) sname = self.tdb.getOne('name', torrent_id=single_torrent_id) self.assertEqual(sname, single_name) mname = self.tdb.getOne('name', torrent_id=multiple_torrent_id) self.assertEqual(mname, multiple_name) s_size = self.tdb.getOne('length', torrent_id=single_torrent_id) self.assertEqual(s_size, 1583233) m_size = self.tdb.getOne('length', torrent_id=multiple_torrent_id) self.assertEqual(m_size, 5358560) cat = self.tdb.getOne('category', torrent_id=multiple_torrent_id) self.assertEqual(cat, u'xxx') s_status = self.tdb.getOne('status', torrent_id=single_torrent_id) self.assertEqual(s_status, u'unknown') m_comment = self.tdb.getOne('comment', torrent_id=multiple_torrent_id) comments = 'www.tribler.org' self.assertGreater(m_comment.find(comments), -1) comments = 'something not inside' self.assertEqual(m_comment.find(comments), -1) m_trackers = self.tdb.getTrackerListByInfohash(m_infohash) self.assertEqual(len(m_trackers), 8) self.assertIn('http://tpb.tracker.thepiratebay.org/announce', m_trackers) s_torrent = self.tdb.getTorrent(s_infohash) m_torrent = self.tdb.getTorrent(m_infohash) self.assertEqual(s_torrent['name'], 'Tribler_4.1.7_src.zip') self.assertEqual(m_torrent['name'], 'Tribler_4.1.7_src') self.assertEqual(m_torrent['last_tracker_check'], 0)
def lineReceived(self, line): anon_tunnel = self.anon_tunnel profile = self.profile if line == 'threads': for thread in threading.enumerate(): print "%s \t %d" % (thread.name, thread.ident) elif line == 'p': if profile: for func_stats in yappi.get_func_stats().sort("subtime")[:50]: print "YAPPI: %10dx %10.3fs" % ( func_stats.ncall, func_stats.tsub), func_stats.name else: logger.error("Profiling disabled!") elif line == 'P': if profile: filename = 'callgrindc_%d.yappi' % anon_tunnel.dispersy.lan_address[ 1] yappi.get_func_stats().save(filename, type='callgrind') else: logger.error("Profiling disabled!") elif line == 't': if profile: yappi.get_thread_stats().sort("totaltime").print_all() else: logger.error("Profiling disabled!") elif line == 'c': print "========\nCircuits\n========\nid\taddress\t\t\t\t\tgoal\thops\tIN (MB)\tOUT (MB)\tinfohash\ttype" for circuit_id, circuit in anon_tunnel.community.circuits.items(): info_hash = circuit.info_hash.encode( 'hex')[:10] if circuit.info_hash else '?' print "%d\t%s:%d\t%d\t%d\t\t%.2f\t\t%.2f\t\t%s\t%s" % ( circuit_id, circuit.first_hop[0], circuit.first_hop[1], circuit.goal_hops, len(circuit.hops), circuit.bytes_down / 1024.0 / 1024.0, circuit.bytes_up / 1024.0 / 1024.0, info_hash, circuit.ctype) elif line.startswith('s'): cur_path = os.getcwd() line_split = line.split(' ') filename = 'test_file' if len(line_split) == 1 else line_split[1] if not os.path.exists(filename): logger.info("Creating torrent..") with open(filename, 'wb') as fp: fp.write(os.urandom(50 * 1024 * 1024)) tdef = TorrentDef() tdef.add_content(os.path.join(cur_path, filename)) tdef.set_tracker("udp://fake.net/announce") tdef.set_private() tdef.finalize() tdef.save(os.path.join(cur_path, filename + '.torrent')) else: logger.info("Loading existing torrent..") tdef = TorrentDef.load(filename + '.torrent') logger.info("loading torrent done, infohash of torrent: %s" % (tdef.get_infohash().encode('hex')[:10])) defaultDLConfig = DefaultDownloadStartupConfig.getInstance() dscfg = defaultDLConfig.copy() dscfg.set_hops(1) dscfg.set_dest_dir(cur_path) anon_tunnel.session.lm.threadpool.call( 0, anon_tunnel.session.start_download, tdef, dscfg) elif line.startswith('i'): # Introduce dispersy port from other main peer to this peer line_split = line.split(' ') to_introduce_ip = line_split[1] to_introduce_port = int(line_split[2]) self.anon_tunnel.community.add_discovered_candidate( Candidate((to_introduce_ip, to_introduce_port), tunnel=False)) elif line.startswith('d'): line_split = line.split(' ') filename = 'test_file' if len(line_split) == 1 else line_split[1] logger.info("Loading torrent..") tdef = TorrentDef.load(filename + '.torrent') logger.info("Loading torrent done") defaultDLConfig = DefaultDownloadStartupConfig.getInstance() dscfg = defaultDLConfig.copy() dscfg.set_hops(1) dscfg.set_dest_dir( os.path.join( os.getcwd(), 'downloader%s' % anon_tunnel.session.get_dispersy_port())) def start_download(): def cb(ds): logger.info( 'Download infohash=%s, down=%s, progress=%s, status=%s, seedpeers=%s, candidates=%d' % (tdef.get_infohash().encode('hex')[:10], ds.get_current_speed('down'), ds.get_progress(), dlstatus_strings[ds.get_status()], sum(ds.get_num_seeds_peers()), sum(1 for _ in anon_tunnel.community. dispersy_yield_verified_candidates()))) return 1.0, False download = anon_tunnel.session.start_download(tdef, dscfg) download.set_state_callback(cb, delay=1) anon_tunnel.session.lm.threadpool.call(0, start_download) elif line == 'q': anon_tunnel.stop() return elif line == 'r': print "circuit\t\t\tdirection\tcircuit\t\t\tTraffic (MB)" from_to = anon_tunnel.community.relay_from_to for key in from_to.keys(): relay = from_to[key] logger.info("%s-->\t%s\t\t%.2f" % ( (key[0], key[1]), (relay.sock_addr, relay.circuit_id), relay.bytes[1] / 1024.0 / 1024.0, ))
def _on_load(torrent_def): self.assertEqual( torrent_def.get_metainfo(), TorrentDef.load(TORRENT_UBUNTU_FILE).get_metainfo()) self.assertEqual(torrent_def.infohash, TorrentDef.load(TORRENT_UBUNTU_FILE).infohash)
def callback(): torrent_filename = torrentsearch_manager.getCollectedFilename(torrent) tdef = TorrentDef.load(torrent_filename) wx.CallAfter(self.SetCollected, tdef)