def start_torrent(self, torrent): tdef = TorrentDef.load(torrent) if not os.access(self._directory, os.F_OK): os.makedirs(self._directory) dscfg = DownloadStartupConfig() dscfg.set_dest_dir(self._directory) dscfg.set_video_events([ simpledefs.VODEVENT_START, simpledefs.VODEVENT_PAUSE, simpledefs.VODEVENT_RESUME ]) dscfg.set_max_speed(simpledefs.DOWNLOAD, self._max_dl_rate) dscfg.set_max_speed(simpledefs.UPLOAD, self._max_ul_rate) dscfg.set_peer_type("S") #dscfg.set_video_event_callback(self.video_callback) # supporter should not play the files ! d = self._session.start_download(tdef, dscfg) d.set_state_callback(self.state_callback) time.sleep(1) # give the download some time to fully initialize d.sd.dow.choker.set_supporter_server(True) self._tracker_url = tdef.get_tracker()[:tdef.get_tracker(). find("announce")] self._id = d.sd.peerid self._choke_objects.append(d.sd.dow.choker)
def addTorrentToDB(self, filename, torrent_hash, metadata, source='BC', extra_info={}, hack=False): """ Arno: no need to delegate to olbridge, this is already run by OverlayThread """ # 03/02/10 Boudewijn: addExternalTorrent now requires a # torrentdef, consequently we provide the filename through the # extra_info dictionary torrentdef = TorrentDef.load(filename) if not 'filename' in extra_info: extra_info['filename'] = filename torrent = self.torrent_db.addExternalTorrent(torrentdef, source, extra_info) if torrent is None: return # Arno, 2008-10-20: XXX torrents are filtered out in the final display stage self.launchmany.set_activity(NTFY_ACT_GOT_METADATA,unicode('"'+torrent['name']+'"'),torrent['category']) if self.initialized: self.num_torrents += 1 # for free disk limitation if not extra_info: self.refreshTrackerStatus(torrent) if len(self.recently_collected_torrents) < 50: # Queue of 50 self.recently_collected_torrents.append(torrent_hash) else: self.recently_collected_torrents.pop(0) self.recently_collected_torrents.append(torrent_hash)
def testRetrieveLocalPeersForActiveTorrents(self): tdef = TorrentDef.load("torrents/ubuntu.torrent") infohash_hex = common_utils.get_id(tdef) torrent_dict = self.wsclient.retrieve_local_peers_for_active_torrents(list_of_torrent_ids=[infohash_hex], maximum_number_of_peers=10, include_seeds=False) assert len(torrent_dict) >= 0, torrent_dict
def start_torrent(self, torrent): tdef = TorrentDef.load(torrent) if not os.access(self._directory, os.F_OK): os.makedirs(self._directory) dscfg = DownloadStartupConfig() dscfg.set_dest_dir(self._directory) dscfg.set_video_events([simpledefs.VODEVENT_START, simpledefs.VODEVENT_PAUSE, simpledefs.VODEVENT_RESUME]) dscfg.set_max_speed(simpledefs.DOWNLOAD, self._max_dl_rate) dscfg.set_max_speed(simpledefs.UPLOAD, self._max_ul_rate) dscfg.set_peer_type("S") #dscfg.set_video_event_callback(self.video_callback) # supporter should not play the files ! d = self._session.start_download(tdef, dscfg) d.set_state_callback(self.state_callback) time.sleep(1) # give the download some time to fully initialize d.sd.dow.choker.set_supporter_server(True) self._tracker_url = tdef.get_tracker()[:tdef.get_tracker().find("announce")] self._id = d.sd.peerid self._choke_objects.append(d.sd.dow.choker)
def main(): global __TORRENT__, __TORRENT_NEW__, __DOWNLOAD__, active_procs if len(sys.argv[1:]) != 1: print >>sys.stderr, 'Usage: %s <full download>' % sys.argv[0] sys.exit(1) __DOWNLOAD__ = sys.argv[1] __TORRENT__ = FileUtils.get_relative_filename(__DOWNLOAD__) + constants.TORRENT_DOWNLOAD_EXT __TORRENT_NEW__ = __TORRENT__ + '-new' check_dependencies() prepare_scenario() start_ts = time.time() last_vodclient_spawned = time.time()-30.0 print >>sys.stderr, 'Spawning tracker process...' spawn_tracker() time.sleep(10) print >>sys.stderr, 'Fetching torrent from tracker...' shutil.copyfile(os.path.join('basic_scenario_tracker', __TORRENT__), __TORRENT__) print >>sys.stderr, 'Adding playtime information to %s' % __TORRENT__ add_playtime_to_torrent(__TORRENT__) tdef = TorrentDef.load(__TORRENT_NEW__) bitrate = tdef.get_bitrate() print >>sys.stderr, 'Bitrate is: %i' % bitrate print >>sys.stderr, 'Spawning seeder process...' spawn_headless(bitrate*4) FINISH_TIME = 90 # experiment duration in seconds count = 0 while (time.time() - start_ts <= FINISH_TIME): if count == 10: count = 0 print >>sys.stderr, 'Scenario progress '+str(time.time() - start_ts) count+=1 # remove finished vodclient processes active_procs = [p for p in active_procs if p.poll() == None] # check if we have to spawn a new vodclient ts = time.time() if ts - last_vodclient_spawned >= 30.0 and len(active_procs) < 2: print >>sys.stderr, '%s: Spawning new vod client... experiment progress is %f' % (str(ts), ts/start_ts*100) spawn_vodclient(bitrate/4, bitrate) last_vodclient_spawned = ts time.sleep(1.0) print >>sys.stderr, 'Emulation finished, finish and exit...' cleanup() os._exit(0)
def setUpPostSession(self): """ override TestAsServer """ TestAsServer.setUpPostSession(self) # Let Tribler start downloading an non-functioning torrent, so # we can talk to a normal download engine. self.torrentfn = os.path.join('extend_hs_dir','dummydata.merkle.torrent') tdef = TorrentDef.load(self.torrentfn) dscfg = self.setUpDownloadConfig() self.session.start_download(tdef,dscfg) # This is the infohash of the torrent in test/extend_hs_dir self.infohash = '\xccg\x07\xe2\x9e!]\x16\xae{\xb8\x10?\xf9\xa5\xf9\x07\xfdBk' self.mylistenport = 4810
def setUpPostSession(self): """ override TestAsServer """ TestAsServer.setUpPostSession(self) # Let Tribler start downloading an non-functioning torrent, so # we can talk to a normal download engine. self.torrentfn = os.path.join('extend_hs_dir', 'dummydata.merkle.torrent') tdef = TorrentDef.load(self.torrentfn) dscfg = self.setUpDownloadConfig() self.session.start_download(tdef, dscfg) # This is the infohash of the torrent in test/extend_hs_dir self.infohash = '\xccg\x07\xe2\x9e!]\x16\xae{\xb8\x10?\xf9\xa5\xf9\x07\xfdBk' self.mylistenport = 4810
def get_torrents(folder): ''' Returns the dictionary {infohash : (torrent_definition, file_name)} for all torrent files in the given directory. ''' fileObjects = os.listdir(folder) torrent_files = [] for file in fileObjects: if str(file).endswith(constants.TORRENT_DOWNLOAD_EXT) or str(file).endswith(constants.TORRENT_VOD_EXT): torrent_files.append(os.path.join(folder, str(file))) files = sorted(torrent_files) torrents = dict() for file in files: tdef = TorrentDef.load(file) id = get_id(tdef) torrents[id] = (tdef, file) return torrents
def search_torrents(self, kws, maxhits=None, sendtorrents=False): if DEBUG: print >> sys.stderr, "rquery: search for torrents matching", ` kws ` allhits = self.torrent_db.searchNames(kws, local=False) print >> sys.stderr, "rquery: got matches", ` allhits ` if maxhits is None: hits = allhits else: hits = allhits[:maxhits] colltorrdir = self.session.get_torrent_collecting_dir() if sendtorrents: print >> sys.stderr, "rqmh: search_torrents: adding torrents" for hit in hits: filename = os.path.join(colltorrdir, hit['torrent_file_name']) try: tdef = TorrentDef.load(filename) if tdef.get_url_compat(): metatype = URL_MIME_TYPE metadata = tdef.get_url() else: metatype = TSTREAM_MIME_TYPE metadata = bencode(tdef.get_metainfo()) except: print_exc() metadata = None hit['metatype'] = metatype hit['metadata'] = metadata # Filter out hits for which we could not read torrent file (rare) newhits = [] for hit in hits: if hit['metadata'] is not None: newhits.append(hit) hits = newhits return hits
def search_torrents(self,kws,maxhits=None,sendtorrents=False): if DEBUG: print >>sys.stderr,"rquery: search for torrents matching",`kws` allhits = self.torrent_db.searchNames(kws,local=False) print >>sys.stderr,"rquery: got matches",`allhits` if maxhits is None: hits = allhits else: hits = allhits[:maxhits] colltorrdir = self.session.get_torrent_collecting_dir() if sendtorrents: print >>sys.stderr,"rqmh: search_torrents: adding torrents" for hit in hits: filename = os.path.join(colltorrdir,hit['torrent_file_name']) try: tdef = TorrentDef.load(filename) if tdef.get_url_compat(): metatype = URL_MIME_TYPE metadata = tdef.get_url() else: metatype = TSTREAM_MIME_TYPE metadata = bencode(tdef.get_metainfo()) except: print_exc() metadata = None hit['metatype'] = metatype hit['metadata'] = metadata # Filter out hits for which we could not read torrent file (rare) newhits = [] for hit in hits: if hit['metadata'] is not None: newhits.append(hit) hits = newhits return hits
def test_tdef_has_video_files_expected_true(self): try: tdef = TorrentDef.load("torrents/Locality-Demo.mp4.tstream") self.assertTrue(has_torrent_video_files(tdef)) except: self.fail("There was some error while loading the torrent file.")
scfg = SessionStartupConfig() scfg.set_state_dir(tempfile.mkdtemp()) scfg.set_listen_port(options.port) scfg.set_overlay(False) scfg.set_megacache(False) scfg.set_upnp_mode(simpledefs.UPNPMODE_DISABLED) scfg.set_dialback(False) scfg.set_social_networking(False) scfg.set_buddycast(False) scfg.set_crawler(False) scfg.set_internal_tracker(False) s = Session(scfg) tdef = TorrentDef.load(__TORRENT_FILE__) # tdef.get_tracker() returns the announce-url; we must omit the "announce" part tracker_url = tdef.get_tracker()[:tdef.get_tracker().find("announce")] if tdef.get_bitrate() == None: print >>sys.stderr, "Provided torrent file has no bitrate information. Exiting." sys.exit(1) BITRATE = tdef.get_bitrate() print >>sys.stderr, "Calculated bitrate is %d" % BITRATE client_stats['video_duration'] = int(tdef.get_length() / tdef.get_bitrate()) if not os.access(options.directory, os.F_OK): os.makedirs(options.directory) dscfg = DownloadStartupConfig()
torrent = fileargs[1] if not os.path.exists(torrent): print "Error: Could not find torrent file '%s'"%torrent raise SystemExit(1) if not config['key_file']: config['key_file'] = torrent + ".tkey" if not os.path.exists(config['key_file']): print "Error: Could not find key file '%s'"%config['key_file'] raise SystemExit(1) # Load the torrent file try: t = TorrentDef.load(torrent) except Exception,e: print "Bad torrent file:",e raise SystemExit(1) if not t.get_cs_keys(): print "Not a closed swarm torrent" raise SystemExit(1) try: torrent_keypair = ClosedSwarm.read_cs_keypair(config['key_file']) except Exception,e: print "Bad torrent key file",e raise SystemExit(1) # Need permid of the receiving node if not config['node_id']:
def parsedir(directory, parsed, files, blocked, exts=['.torrent', TRIBLER_TORRENT_EXT], return_metainfo=False, errfunc=_errfunc): if DEBUG: errfunc('checking dir') dirs_to_check = [directory] new_files = {} new_blocked = {} torrent_type = {} while dirs_to_check: # first, recurse directories and gather torrents directory = dirs_to_check.pop() newtorrents = False for f in os.listdir(directory): newtorrent = None for ext in exts: if f.endswith(ext): newtorrent = ext[1:] break if newtorrent: newtorrents = True p = os.path.join(directory, f) new_files[p] = [(int(os.path.getmtime(p)), os.path.getsize(p)), 0] torrent_type[p] = newtorrent if not newtorrents: for f in os.listdir(directory): p = os.path.join(directory, f) if os.path.isdir(p): dirs_to_check.append(p) new_parsed = {} to_add = [] added = {} removed = {} # files[path] = [(modification_time, size), hash], hash is 0 if the file # has not been successfully parsed for p, v in new_files.items(): # re-add old items and check for changes oldval = files.get(p) if not oldval: # new file to_add.append(p) continue h = oldval[1] if oldval[0] == v[0]: # file is unchanged from last parse if h: if blocked.has_key(p): # parseable + blocked means duplicate to_add.append(p) # other duplicate may have gone away else: new_parsed[h] = parsed[h] new_files[p] = oldval else: new_blocked[p] = 1 # same broken unparseable file continue if parsed.has_key(h) and not blocked.has_key(p): if DEBUG: errfunc('removing ' + p + ' (will re-add)') removed[h] = parsed[h] to_add.append(p) to_add.sort() for p in to_add: # then, parse new and changed torrents new_file = new_files[p] v, h = new_file if new_parsed.has_key(h): # duplicate if not blocked.has_key(p) or files[p][0] != v: errfunc('**warning** ' + p + ' is a duplicate torrent for ' + new_parsed[h]['path']) new_blocked[p] = 1 continue if DEBUG: errfunc('adding ' + p) try: # Arno: P2PURL tdef = TorrentDef.load(p) h = tdef.get_infohash() d = tdef.get_metainfo() new_file[1] = h if new_parsed.has_key(h): errfunc('**warning** ' + p + ' is a duplicate torrent for ' + new_parsed[h]['path']) new_blocked[p] = 1 continue a = {} a['path'] = p f = os.path.basename(p) a['file'] = f a['type'] = torrent_type[p] if tdef.get_url_compat(): a['url'] = tdef.get_url() i = d['info'] l = 0 nf = 0 if i.has_key('length'): l = i.get('length', 0) nf = 1 elif i.has_key('files'): for li in i['files']: nf += 1 if li.has_key('length'): l += li['length'] a['numfiles'] = nf a['length'] = l a['name'] = i.get('name', f) def setkey(k, d=d, a=a): if d.has_key(k): a[k] = d[k] setkey('failure reason') setkey('warning message') setkey('announce-list') if return_metainfo: a['metainfo'] = d except: errfunc('**warning** ' + p + ' has errors') new_blocked[p] = 1 continue if DEBUG: errfunc('... successful') new_parsed[h] = a added[h] = a for p, v in files.items(): # and finally, mark removed torrents if not new_files.has_key(p) and not blocked.has_key(p): if DEBUG: errfunc('removing ' + p) removed[v[1]] = parsed[v[1]] if DEBUG: errfunc('done checking') return (new_parsed, new_files, new_blocked, added, removed)
def parsedir(directory, parsed, files, blocked, exts = ['.torrent', TRIBLER_TORRENT_EXT], return_metainfo = False, errfunc = _errfunc): if DEBUG: errfunc('checking dir') dirs_to_check = [directory] new_files = {} new_blocked = {} torrent_type = {} while dirs_to_check: # first, recurse directories and gather torrents directory = dirs_to_check.pop() newtorrents = False for f in os.listdir(directory): newtorrent = None for ext in exts: if f.endswith(ext): newtorrent = ext[1:] break if newtorrent: newtorrents = True p = os.path.join(directory, f) new_files[p] = [(int(os.path.getmtime(p)), os.path.getsize(p)), 0] torrent_type[p] = newtorrent if not newtorrents: for f in os.listdir(directory): p = os.path.join(directory, f) if os.path.isdir(p): dirs_to_check.append(p) new_parsed = {} to_add = [] added = {} removed = {} # files[path] = [(modification_time, size), hash], hash is 0 if the file # has not been successfully parsed for p, v in new_files.items(): # re-add old items and check for changes oldval = files.get(p) if not oldval: # new file to_add.append(p) continue h = oldval[1] if oldval[0] == v[0]: # file is unchanged from last parse if h: if blocked.has_key(p): # parseable + blocked means duplicate to_add.append(p) # other duplicate may have gone away else: new_parsed[h] = parsed[h] new_files[p] = oldval else: new_blocked[p] = 1 # same broken unparseable file continue if parsed.has_key(h) and not blocked.has_key(p): if DEBUG: errfunc('removing '+p+' (will re-add)') removed[h] = parsed[h] to_add.append(p) to_add.sort() for p in to_add: # then, parse new and changed torrents new_file = new_files[p] v, h = new_file if new_parsed.has_key(h): # duplicate if not blocked.has_key(p) or files[p][0] != v: errfunc('**warning** '+ p +' is a duplicate torrent for '+new_parsed[h]['path']) new_blocked[p] = 1 continue if DEBUG: errfunc('adding '+p) try: # Arno: P2PURL tdef = TorrentDef.load(p) h = tdef.get_infohash() d = tdef.get_metainfo() new_file[1] = h if new_parsed.has_key(h): errfunc('**warning** '+ p +' is a duplicate torrent for '+new_parsed[h]['path']) new_blocked[p] = 1 continue a = {} a['path'] = p f = os.path.basename(p) a['file'] = f a['type'] = torrent_type[p] if tdef.get_url_compat(): a['url'] = tdef.get_url() i = d['info'] l = 0 nf = 0 if i.has_key('length'): l = i.get('length', 0) nf = 1 elif i.has_key('files'): for li in i['files']: nf += 1 if li.has_key('length'): l += li['length'] a['numfiles'] = nf a['length'] = l a['name'] = i.get('name', f) def setkey(k, d = d, a = a): if d.has_key(k): a[k] = d[k] setkey('failure reason') setkey('warning message') setkey('announce-list') # Arno, LOOKUP SERVICE if tdef.get_urllist() is not None: httpseedhashes = [] for url in tdef.get_urllist(): # TODO: normalize? urlhash = sha(url).digest() httpseedhashes.append(urlhash) a['url-hash-list'] = httpseedhashes if return_metainfo: a['metainfo'] = d except: print_exc() errfunc('**warning** '+p+' has errors') new_blocked[p] = 1 continue if DEBUG: errfunc('... successful') new_parsed[h] = a added[h] = a for p, v in files.items(): # and finally, mark removed torrents if not new_files.has_key(p) and not blocked.has_key(p): if DEBUG: errfunc('removing '+p) removed[v[1]] = parsed[v[1]] if DEBUG: errfunc('done checking') return (new_parsed, new_files, new_blocked, added, removed)
def test_tdef_has_video_files_expected_false(self): try: tdef = TorrentDef.load("torrents/ubuntu.torrent") self.assertFalse(has_torrent_video_files(tdef)) except: self.fail("There was an error while loading the torrent files.")
scfg = SessionStartupConfig() scfg.set_state_dir(tempfile.mkdtemp()) scfg.set_listen_port(options.port) scfg.set_overlay(False) scfg.set_megacache(False) scfg.set_upnp_mode(simpledefs.UPNPMODE_DISABLED) scfg.set_dialback(False) scfg.set_social_networking(False) scfg.set_buddycast(False) scfg.set_crawler(False) scfg.set_internal_tracker(False) s = Session(scfg) tdef = TorrentDef.load(__TORRENT_FILE__) # tdef.get_tracker() returns the announce-url; we must omit the "announce" part tracker_url = tdef.get_tracker()[:tdef.get_tracker().find("announce")] if tdef.get_bitrate() == None: print >> sys.stderr, "Provided torrent file has no bitrate information. Exiting." sys.exit(1) BITRATE = tdef.get_bitrate() print >> sys.stderr, "Calculated bitrate is %d" % BITRATE client_stats['video_duration'] = int(tdef.get_length() / tdef.get_bitrate()) if not os.access(options.directory, os.F_OK): os.makedirs(options.directory)