def update(self, url): # сначала очистим содержимое print "обновление содержимого" self.clear() torrent = Torrent() torrent.get_source(url) self.set_torrent(torrent.info)
def main(argv): if len(argv) != 2: print "Usage: %s <torrent-file>" % argv[0] sys.exit(1) inputs = [] outputs = [] print "~~~ Starting BitTorrent Client ~~~" print "----------------------------------" # Read & parse torrent file tfile = Torrent(argv[1]) tfile.print_info() #My ID has to be exactly 20 characters but each ID should be random my_id = "-SilviaLearnsBT%05d"%random.randint(0,99999) # Setup tracker connection tracker = Tracker(tfile.tracker_url) tracker.connect(tfile, my_id) # Setup Brain, the dispatcher brain = Brain({ "ip": "localhost", "port": 1050, "id": my_id}, tfile, tracker) if brain.is_complete(): print "Aborting. I have the entire file!" else: print "Received list of peers from tracker: %r" % tracker.peers brain.add_peers() brain.connect_all(3) brain.run() print "~~~ GROOVY! You successfully downloaded a torrent ~~~"
def main(argv): if len(argv) != 2: print "Usage: %s <torrent-file>" % argv[0] sys.exit(1) inputs = [] outputs = [] tfile = Torrent(argv[1]) tfile.print_info() my_id = "xxxSpicyboiiixx%20d" % random.randint(0, 99999) tracker = Tracker(tfile.tracker_url) tracker.connect(tfile, my_id) brain = Brain({ "ip": "localhost", "port": 1050, "id": my_id }, tfile, tracker) if brain.is_complete(): print "Aborting" else: print "Received list of peers: %r" % tracker.peers brain.add_peers() brain.connect_all(3) brain.run() print "Downloaded a torrent"
def insert_meta_piece(self, piece, data): logging.info('insert meta piece %s of len %s!' % (piece, len(data))) self._meta_pieces[piece] = data sz = self._remote_extension_handshake['metadata_size'] chunksz = 2**14 numpieces = int( math.ceil(float(sz) / chunksz) ) if sorted(self._meta_pieces.keys()) == range(numpieces): logging.info('got all the metadata!') alldata = [] for i in range(numpieces): alldata.append( self._meta_pieces[i] ) torrent_data = ''.join(alldata) infohash = sha1(torrent_data).digest() # if they don't match? what do we do??? torrent_meta = bencode.bdecode(torrent_data) connection_hash = self.torrent.hash self.torrent.update_meta( { 'info': torrent_meta }, update=True ) if infohash != connection_hash: logging.warn('received metadata does not correspond to connection infohash!') if 'althash' in self.torrent.meta['info']: althash = self.torrent.meta['info']['althash'] logging.info('received metadata has althash %s' % [althash]) if althash == connection_hash: Torrent.register_althash( self.torrent ) logging.info('GREAT! althash matches the connection hash though! proceeding!') else: logging.error('received metadata has no althash') raise Exception('received metadata has no althash') # check that it corresponds to this connection! self.torrent.save_metadata() self.post_metadata_received()
def __init__(self, piratebayId): self.data = get_data(piratebayId) if not self.data: return Torrent.__init__(self) published = self.data['uploaded'] published = published.replace(' GMT', '').split(' +')[0] self['published'] = datetime.strptime(published, "%Y-%m-%d %H:%M:%S")
def main(): torrent = Torrent('data/ubuntu.torrent') td = deepcopy(torrent.torrent_file_dict) td[b'info'][b'pieces'] = '20 byte pieces hashes go here...' pp(td) print('Number of pieces in the file:', torrent.num_pieces) print('Size of download in bytes: ', torrent.get_download_length()) input() peers_info = PeersGetter(torrent).parse_peers() writers = [] readers = [] peers_connected_to_count = 0 # Build connected peers for peer_info in peers_info: if peers_connected_to_count > 3: break peers_connected_to_count += 1 peer = Peer(torrent, peer_info=peer_info) peer.connect() if peer.is_connected: writers.append(peer) readers.append(peer) while not torrent.is_download_finished(): # print( # 'Downloading... Writers: {} Readers: {}'.format( # len(readers), len(writers) # ) # ) to_read, to_write, errors = select.select(readers, writers, readers) for peer in to_read: peer.read() for peer in to_write: peer.write() for peer in errors: readers.remove(peer) writers.remove(peer)
def make_test_files(torrent_data, torrent_info, download_dir, number_of_files): t = Torrent(torrent_data, torrent_info, download_dir, None) files = [] main_file_wrapper = file_manager.FileWrapper(torrent=t, file_suffix="") main_file_wrapper.create_file_or_return_hashes() for i in range(int(number_of_files)): fw = file_manager.FileWrapper(torrent=t, file_suffix=".{}".format(i)) fw.create_file_or_return_hashes() files.append(fw) for p in t._pieces: data = main_file_wrapper.read_block(p.index, 0, t.piece_length(p.index)) if p.sha1hash == hashlib.sha1(data).digest(): random.choice(files).write_piece(p.index, data)
def search(term): request = create_search_request(term) print "fetcihing " + request.url html = BeautifulSoup(request.text) results_div = html.find(lambda e: e.name == 'div' and e.has_attr('class') and e['class'] == ['results']) torrents = [] for item in results_div.find_all('dl'): try: new_torrent = Torrent() new_torrent.name = item.dt.a.text new_torrent.rel_url = item.dt.a['href'].replace('/', '') age_span = item.dd.find(lambda e: e.name == 'span' and e.has_attr( 'class') and e['class'] == ['a']).span new_torrent.date_exact = age_span['title'] new_torrent.date_relative = age_span.text new_torrent.size = item.dd.find( lambda e: e.name == 'span' and e.has_attr('class') and e[ 'class'] == ['s']).text new_torrent.seeders = item.dd.find( lambda e: e.name == 'span' and e.has_attr('class') and e[ 'class'] == ['u']).text torrents.append(new_torrent) except Exception, e: print e continue
def __init__(self, filename): self.torrent = Torrent(filename) self.host = '0.0.0.0' self.port = 6886 # self.listener = self.create_listener() self.inputs = [] self.outputs = []
def remove_torrent(self, hash): torrent = Torrent.instantiate(hash) if hash in self.torrents: torrent = self.torrents[hash] del self.torrents[hash] torrent.remove( ) #causes connections to close and other bookkeeping
def parse(self): with open(self.path, 'rb') as f: torrent_file = bdecode(f) urls = [] if 'announce' in torrent_file: urls.append(torrent_file['announce']) if 'announce-list' in torrent_file: for tracker in torrent_file['announce-list']: urls.append(tracker[0]) info = torrent_file['info'] name = info['name'] piece_length = info['piece length'] pieces = info['pieces'] if 'length' in info: length = info['length'] # only exists for single-file torrents file_list = [File(length, [name], 0)] else: files = info['files'] file_list = [] offset = 0 for file in files: file_list.append(File(file['length'], file['path'], offset)) offset += file['length'] target = FileStructure(name, file_list) print("Torrent has %d pieces" % (len(pieces) // 20)) print("Torrent piece length = %d" % int(piece_length)) # print("Torrent has %d bytes" % length) return Torrent(urls, info, piece_length, pieces, target)
def got_handshake(self, data): if options.verbose > 2: logging.info('got handshake %s' % [data]) self.handshake = parse_handshake(data) if options.verbose > 1: logging.info('parsed handshake %s' % [self.handshake]) if self.handshake: self.peerid = self.handshake['peerid'] self.peer = Peer.instantiate({'peerid':self.peerid}) self.infohash = self.handshake['infohash'] if not self.torrent: # check if this torrent is started, and in the current client's list of torrents! #self.torrent = Torrent.instantiate( binascii.hexlify(self.handshake['infohash']) ) self.torrent = Torrent.instantiate( self.handshake['infohash'] ) self.torrent.connections.append(self) logging.info('connection has torrent %s with hash %s%s' % (self.torrent, [self.torrent.hash], ' (with metadata)' if self.torrent.meta else '')) if not self._sent_handshake: self.send_handshake() if not self._sent_extension_handshake: self.send_extension_handshake() if self.torrent and self.torrent.meta: self.send_bitmask() self.get_more_messages() else: self.get_more_messages() else: logging.info('invalid/unrecognized handshake') self.stream.close()
class TestTorrentModel(unittest.TestCase): def setUp(self): self.torrent = Torrent('ub.torrent') def test_generate_peer_id(self): self.assertEqual(4, 4) pass def test_build_tracker_params(self): self.assertEqual(self.torrent.tracker_params['info_hash'],'\xcb\x84\xcc\xc1\x0f)m\xf7-l@\xbaz\x07\xc1x\xa42:\x14') self.assertEqual(self.torrent.tracker_params['port'],8123) self.assertEqual(self.torrent.tracker_params['uploaded'],0) self.assertEqual(self.torrent.tracker_params['downloaded'],0) self.assertEqual(self.torrent.tracker_params['left'],1028653056) self.assertEqual(self.torrent.tracker_params['compact'],1) self.assertEqual(self.torrent.tracker_params['no_peer_id'],0) self.assertEqual(len(self.torrent.tracker_params['peer_id']), 20) def test_announce_url(self): self.assertEqual(self.torrent.announce_url, 'http://torrent.ubuntu.com:6969/announce') def test_get_peer_list(self): expected_peer_list = ['31.16.170.106:6882', '213.89.96.185:6941', '50.173.173.26:48838', '85.239.121.202:51413', '146.115.161.94:51413', '37.187.17.222:51413', '2.224.179.235:25287', '62.210.236.9:51413', '5.135.186.165:6984', '177.182.204.16:46714', '81.90.237.124:666', '84.236.19.85:51999', '108.61.191.94:58869', '185.44.107.109:51413', '5.45.109.115:51413', '198.100.147.91:51103', '123.198.9.83:51413', '188.226.241.51:51413', '80.217.52.181:58882', '130.243.184.10:13350', '89.143.230.17:58826', '85.224.46.172:24366', '89.142.59.154:6884', '46.188.29.249:24261', '185.21.216.192:58153', '208.53.164.19:49325', '85.229.24.145:51412', '95.211.186.115:53076', '80.198.252.120:42000', '60.241.41.178:51413', '68.114.213.208:53281', '89.12.44.117:51413', '124.33.156.230:6890', '71.179.85.145:58090', '37.59.36.217:61050', '80.99.91.28:51413', '93.180.52.136:51413', '67.189.24.160:5555', '110.4.196.166:51413', '46.146.228.7:6866', '89.169.1.240:30254', '67.189.24.160:5555', '71.213.10.242:51413', '177.204.35.11:51413', '85.183.40.67:42478', '82.211.208.148:39249', '23.255.227.142:51413', '94.23.38.99:51103', '176.31.66.69:64305', '31.38.100.198:51413'] self.torrent.tracker_response['peers'] = '\x1f\x10\xaaj\x1a\xe2\xd5Y`\xb9\x1b\x1d2\xad\xad\x1a\xbe\xc6U\xefy\xca\xc8\xd5\x92s\xa1^\xc8\xd5%\xbb\x11\xde\xc8\xd5\x02\xe0\xb3\xebb\xc7>\xd2\xec\t\xc8\xd5\x05\x87\xba\xa5\x1bH\xb1\xb6\xcc\x10\xb6zQZ\xed|\x02\x9aT\xec\x13U\xcb\x1fl=\xbf^\xe5\xf5\xb9,km\xc8\xd5\x05-ms\xc8\xd5\xc6d\x93[\xc7\x9f{\xc6\tS\xc8\xd5\xbc\xe2\xf13\xc8\xd5P\xd94\xb5\xe6\x02\x82\xf3\xb8\n4&Y\x8f\xe6\x11\xe5\xcaU\xe0.\xac_.Y\x8e;\x9a\x1a\xe4.\xbc\x1d\xf9^\xc5\xb9\x15\xd8\xc0\xe3)\xd05\xa4\x13\xc0\xadU\xe5\x18\x91\xc8\xd4_\xd3\xbas\xcfTP\xc6\xfcx\xa4\x10<\xf1)\xb2\xc8\xd5Dr\xd5\xd0\xd0!Y\x0c,u\xc8\xd5|!\x9c\xe6\x1a\xeaG\xb3U\x91\xe2\xea%;$\xd9\xeezPc[\x1c\xc8\xd5]\xb44\x88\xc8\xd5C\xbd\x18\xa0\x15\xb3n\x04\xc4\xa6\xc8\xd5.\x92\xe4\x07\x1a\xd2Y\xa9\x01\xf0v.C\xbd\x18\xa0\x15\xb3G\xd5\n\xf2\xc8\xd5\xb1\xcc#\x0b\xc8\xd5U\xb7(C\xa5\xeeR\xd3\xd0\x94\x99Q\x17\xff\xe3\x8e\xc8\xd5^\x17&c\xc7\x9f\xb0\x1fBE\xfb1\x1f&d\xc6\xc8\xd5' peer_list = self.torrent.get_peer_list() for expected_peer in expected_peer_list: self.assertIn(expected_peer, peer_list) def test_get_handshake(self): pass
def get_torrents(self, view="main"): """Get list of all torrents in specified view @return: list of L{Torrent} instances @rtype: list @todo: add validity check for specified view """ self.torrents = [] methods = torrent.methods retriever_methods = [ m for m in methods if m.is_retriever() and m.is_available(self) ] m = rpc.Multicall(self) m.add("d.multicall", view, "d.get_hash=", *[method.rpc_call + "=" for method in retriever_methods]) results = m.call()[0] # only sent one call, only need first result for result in results: results_dict = {} # build results_dict for m, r in zip(retriever_methods, result[1:]): # result[0] is the info_hash results_dict[m.varname] = rpc.process_result(m, r) self.torrents.append( Torrent(self, info_hash=result[0], **results_dict)) self._manage_torrent_cache() return (self.torrents)
def announce_tracker(self): """ Gets the list of peers from the tracker :return: list of peers """ payload = self._create_payload() print(self.torrent.file_name) for url_tracker in self.torrent.announce_list: if url_tracker.startswith(b'udp'): url_tracker = b'http' + url_tracker print(url_tracker) try: raw_response = t.urlopen(url_tracker.decode() + "?" + payload).read() response = bencoder.decode(raw_response) if b'failure reason' in response.keys(): print('Torrent failed because of ' + response[b'failure reason'].decode()) else: # print(response) print(Torrent.bin_to_dec(response[b'peers'])) except urllib.error.URLError as e: print(e) print()
async def main(): # torrent = Torrent('Dua Lipa - Future Nostalgia (2020) MP3 [320 kbps]-[rarbg.to].torrent') # torrent = Torrent('flagfromserver.torrent') torrent = Torrent('ubuntu-19.10-desktop-amd64.iso.torrent') tracker = Tracker(torrent) await tracker.send_announce_request(0, 0, 'started') await tracker.close()
def main(): parser = argparse.ArgumentParser() parser.add_argument('torrent', help='the .torrent to download') parser.add_argument('-v', '--verbose', action='store_true', help='enable verbose output') args = parser.parse_args() if args.verbose: logging.basicConfig(level=logging.INFO) loop = asyncio.get_event_loop() client = TorrentClient(Torrent(args.torrent)) task = loop.create_task(client.start()) def signal_handler(*_): logging.info('Exiting, please wait until everything is shutdown...') client.stop() task.cancel() signal.signal(signal.SIGINT, signal_handler) try: loop.run_until_complete(task) except CancelledError: logging.warning('Event loop was canceled')
def getTorrentIfAny(config): eztvSearchString = (config.tvShow).replace(' ', '_').lower() raw_html = simple_get('https://eztv.io/search/' + eztvSearchString) html = BeautifulSoup(raw_html, 'html.parser') torrents = [] expr = re.compile(config.tvShow + ' ' + config.episode, re.I) for p in html.select('tr.forum_header_border'): row = p.select('td') title = row[1].text.strip() magnet = row[2].find('a', 'magnet').get('href') if (bool(expr.match(title))): t = Torrent(config.episode, title, row[3].text, magnet, row[4].text, int(row[5].text.replace(',', ''))) torrents.append(t) if len(torrents) == 0: quit() # The file with more seeds that is bigger that 1Gb torrentsBigger1G = [t for t in torrents if t.size_in_bytes > 1000000000] torrents = torrentsBigger1G if len(torrents) == 0: quit() sortedTorrents = sorted(torrents, reverse=True) theTorrent = sortedTorrents[0] #print('\n'.join(map(str, sortedTorrents))) filename = getFilenameFromMagnet(theTorrent.magnet) theTorrent.setFile(filename) return theTorrent
def get_torrent_info(page_url): """ get a torrent's info from kickass torrent page info includes: title, magnet link, torrent link, size, seeders, leechers, update time and upload time return a Torrent object """ # response = session.get(page_url) response = requests.get(page_url, verify=VERIFY) soup = bs4.BeautifulSoup(response.text, "html.parser") try: c_title_and_size = soup.select('span.folderopen')[0].text.strip() size_beg_index = c_title_and_size.index('(Size: ') c_title = c_title_and_size[:size_beg_index].strip() c_size = c_title_and_size[size_beg_index:] c_size = c_size[c_size.index(' ') + 1:-1] # remove the '(Size: ' and ')' c_magnet = soup.select('a[href^="magnet:"]')[0].attrs.get('href') c_torrent_cache = u'http:' + soup.select( 'a[href^="//torcache.net"]')[0].attrs.get('href') c_seeders = soup.select('div.seedBlock > strong')[0].text c_leechers = soup.select('div.leechBlock > strong')[0].text c_update_time = soup.select('time.timeago')[0].text c_upload_time = soup.select('time.timeago')[1].text torrent = Torrent(title=c_title, magnet=c_magnet, torrent_cache=c_torrent_cache, size=c_size, seeders=c_seeders, leechers=c_leechers, update_time=c_update_time, upload_time=c_upload_time) # filter function to remove non-ascii characters from showing up in terminal print termcolor.colored( 'Processing torrent info at {} succeeded.'.format( filter(lambda x: x in string.printable, page_url)), 'green') return torrent except IndexError: # torrent page has been deleted print termcolor.colored('Torrent at {} deleted!'.format(page_url), 'red') return Torrent(title='Deleted!')
def conv_json_torrents(self): """Util function to normalize data""" if self.data['Torrents']: torrents = self.data['Torrents'] for t in torrents: if 'RemasterTitle' not in t: t['RemasterTitle'] = '' self.data['Torrents'] = [Torrent(data=t) for t in torrents]
def add_torrent(self, hash): torrent = Torrent.instantiate(hash) if hash not in self.torrents: self.torrents[hash] = torrent # notify any sessions #self.notify_sessions(added={'btapp/torrent':torrent}) #Session.notify(client=self, added=torrent) return True
def parse_raw_page_for_torrents(self, content): soup = BeautifulSoup(content, 'html.parser') content_searchResult = soup.body.find(id='searchResult') if content_searchResult is None: logging.info('No torrents found for the search criteria.') return None listElements = content_searchResult.tr torrentWrapper = self.removeHeader(listElements) torrents_found = [] for torrentElement in torrentWrapper.find_all_next('td'): if torrentElement.find_all("div", class_='detName'): name = torrentElement.find('a', class_='detLink').get_text() url = torrentElement.find('a', class_='detLink')['href'] magnet = torrentElement.find(href=self.has_magnet) uploader = torrentElement.find('a', class_='detDesc') if uploader is None: uploader = torrentElement.find('i') uploader = uploader.get_text() info_text = torrentElement.find('font', class_='detDesc').get_text() date = return_re_match( info_text, r"(\d+\-\d+\s\d+)|(Y\-day\s\d{2}\:\d{2})") size = return_re_match(info_text, r"(\d+(\.\d+)?\s[a-zA-Z]+)") byteSize = deHumansize(size) # COULD NOT FIND HREF! if (magnet is None): logger.warning('Could not find magnet for {}'.format(name)) continue seed_and_leech = torrentElement.find_all_next( attrs={"align": "right"}) seed = seed_and_leech[0].get_text() leech = seed_and_leech[1].get_text() torrent = Torrent(name, magnet['href'], byteSize, uploader, date, seed, leech, url) torrents_found.append(torrent) else: logger.warning( 'Could not find torrent element on thepiratebay webpage.') continue logging.info('Found %s torrents for given search criteria.' % len(torrents_found)) return torrents_found
def add_torrent(self, tor_file_path): """Give the Client at Torrent to use.""" if self._torrent: raise ClientError('Client already has a Torrent') else: self._torrent = Torrent(tor_file_path) self.unrequested_pieces = piece_factory(self._torrent.length, self._torrent.piece_length, self._torrent.piece_hashes)
def get_torrent(self, id): """ Returns a TorrentGroup for the passed ID, associated with this API object. """ id = int(id) if id in self.cached_torrents.keys(): return self.cached_torrents[id] else: return Torrent(id, self)
class Main: parser = argparse.ArgumentParser() parser.add_argument('torrent', help='.torrent file') args = parser.parse_args() loop = asyncio.get_event_loop() client = Client(Torrent(args.torrent)) task = loop.create_task(client.start()) loop.run_until_complete(task)
def __init__(self, role=PEER, server_ip_address='127.0.0.1'): """ Class constructor :param server_ip_address: used when need to use the ip assigned by LAN """ self.server_ip_address = server_ip_address self.id = uuid.uuid4() # creates unique id for the peer self.role = role # Commented out from this lab b/c not needed self.DHT = None self.torrent = Torrent(self.TORRENT_PATH) self.message = Message(self.id, self.torrent.create_info_hash()) # peer_id, torrent, message, server_ip_address="127.0.0.1", server_port=12000 self.server = Server(peer_id=self.id, torrent=self.torrent, message=self.message, server_ip_address=server_ip_address, server_port=self.SERVER_PORT) self.tracker = None
def get_peers(metainfo): '''Input: metainfo file (.torrent file) Output: a list of peer_ids (strings) returned from the tracker Calls methods to send an http request to the tracker, parse the returned result message and return a list of peer_ids ''' torrentObj = Torrent(metainfo) r = requests.get(torrentObj.announce_url, params=torrentObj.param_dict) peers = parse_response_from_tracker(r) return peers, torrentObj
def added_torrent_url(self, response): if response.code == 200: meta = bencode.bdecode(response.body) infohash = hashlib.sha1(bencode.bencode(meta['info'])).digest() torrent = Torrent.instantiate(infohash) if not torrent.meta: # update? torrent.update_meta(meta) torrent.save_metadata() self.add_torrent(torrent.hash)
def main(): # make sure a file name is provided if len(sys.argv) < 2: error_quit("File name not provided") ## attempt to decode torrent torrent = None try: torrent = Torrent(sys.argv[1]) except OSError as e: error_quit(f"Could not open torrent file - {e}") except BEncodeDecodeError as e: error_quit(f"Could not decode torrent file - {e}") except Exception as e: error_quit(f"Unexpected error! - {e}") ## attempt to contact tracker tracker = Tracker(torrent, ID, PORT) try: response = tracker.request() except TrackerParseError as e: error_quit(f"Tracker Parsing error - {e}") except URLError as e: error_quit(f"Could not connect to tracker: {e}") except BEncodeDecodeError as e: error_quit(f"Malformed tracker response: {e}") except Exception as e: error_quit(f"Unexpected error! - {e}") # make sure the peers blob is correct if len(response["peers"]) % 6 != 0: error_quit("Malformed peers list") # list of raw peer IPs and port raw_peers = [ response["peers"][i:i + 6] for i in range(0, len(response["peers"]), 6) ] # peers we are attempting to request pieces from seed_peers = [] for peer_bytes in raw_peers: try: seed_peers.append(Peer(peer_bytes)) except ValueError as e: print(f"Could not parse {peer_bytes}'s ip: {e}") asyncio.run(do_connect(seed_peers, torrent))
def initiate_connected(cls, conn, infohash): logging.info('connected to %s' % str(conn.address)) conn._connecting = False conn.infohash = infohash conn.torrent = Torrent.instantiate( infohash ) conn.torrent.connections.append(conn) conn.send_handshake(infohash=infohash) conn.send_extension_handshake() if conn.torrent.meta: conn.send_bitmask() conn.when_connected()
def setUp(self): self.torrent = Torrent('data/ubuntu.torrent') self.pg = PeersGetter(self.torrent) self.pg.request_peers = mock.Mock( return_value={ b'complete': 389, b'peers': b'.\xa6\xbc\xe2\xa7\xabI\xe1\xc8\x1c\x1a\xe1^\x17\xd3K\xc8\x89\xd43\x9f\x81~\xafQ\xa9\x96:\xc8\xd5\x94e\x94\xbd\xf0PY_&\xb8\xc8\xd5G\xaf1\x11\xe3\x88[\xca(s\xc78\xc6\x1bJ\xd0y\xb8\x1f\xd0)\x9e\x1a\xe1X\xc6\\/\x1a\xe1G\xf1\xff\xbc\xd1.\xc1Gp\x83\x1b:\x05\x87\xb7\xe2\x82e%\xbbp\x9a\xc8\xd5XP((\xc8\xd5T\xec"#\xcb\x1fm\xcd\xc8\xa1\xd6\xd8>\xd2\xc34\xc8\xd5\xb0\x1fx.\xdd\x1eU\x11\x1ds\xee\x80\xd9\x0b\xb6\x04\xc8\xd5%\xbb\x05\xba\xaf\xc8\xc3\x9ai\x85\xc8\xd5l\x13kV\xe1\n_\xd3\xcd\x97\xc8\xd5\xc3\x9a\r\xa7\xc8\nd#\xec\xfe\xc3P\xbc\x1b\xbd-\xc8\xd5\x05\xc4Xj\xaf\xc8K\x98x\xb5\xc00\xbck\xa4\xc2\x1a\xe1[\xd2W\xcd\xc8\xd5Q\x02S~\xc8\xd5\xb9\x15\xd8\x9djmO\x8d\xad\xad\x9c\xddO\x8d\xabQ,\xefO\x8d\xae\'\xb0\xd3O\x8d\xad\x8et&_\x8d\x1c\xb5\x8f\x06O\x8d\xabI\xe5\x01O\x8d\xa0I\xbb\x06O\x8d\xad\xa3A\x95_\x8d\x1c\xb1\xb7\x18O\x8d\xabDz\x10_\x8d\x1c\xb4\xe6y_\x8d\x1c\xb3 \x90O\x8d\xabE\xee@O\x8d\xa0KA@', b'incomplete': 60, b'interval': 1800 })
def main(): options, files = getopt.getopt(sys.argv[1:], 'ivt:', ['info', 'verbose', 'threads:']) for flag, value in options: if flag == '-v' or flag == '--verbose': configuration['verbose'] = True elif flag == '-t' or flag == '--threads': try: configuration['threads'] = int(value) except: usage() elif flag == '-i' or flag == '--info': configuration['info'] = True else: usage() if len(files) != 1: usage() try: torrent = Torrent(files[0]) except: print 'Impossible to read torrent file/magnet link:', files[0] exit(1) if configuration['info'] == True: torrent.show() exit(0) torrent.start() generate_clientid() tracker = Tracker(torrent.data['announce']) tracker.update(torrent) swarm = Swarm() swarm.update_peers(tracker) threads = configuration['threads']
def _load_torrents(self): """load torrents in default dir, seeding in default period""" log.debug("load torrents from %s..." % TORRENT_DIR) from vtrans import Vtrans vtrans = Vtrans() for file in os.listdir(TORRENT_DIR): if file.endswith(".torrent"): log.debug("loading %s" % file) file_path = os.path.join(TORRENT_DIR, file) torrent = Torrent(file_path, SAVE_DIR, SEED_TIME) vtrans.add_torrent(torrent) log.debug("load torrents end.")
def search_piratebay(query): html = requests.get('https://thepiratebay.se/search/' + query) soup = BeautifulSoup(html.content) total_phrase = soup.find_all('h2')[0].contents[1].strip() results = [] if total_phrase.startswith('No hits'): total = 0 else: total = [int(s) for s in total_phrase.split() if s.isdigit()][-1] if total > 0: rows = soup.find_all('tr')[1:] for row in rows: torrent = Torrent(source='Pirate Bay') td = row.find_all('td') torrent.title = td[1].find('a').text torrent.url = td[1].find_all('a')[1].attrs['href'] torrent.seeders = int(td[2].text) torrent.leechers = int(td[3].text) torrent.size = td[1].find('font').text.split(',')[1].replace(' Size ', '') results.append(torrent) if total > 30: total = 30 return {'total': total, 'results': results}
def __init__(self, mininovaId): self.data = get_data(mininovaId) if not self.data: return Torrent.__init__(self) ratio = self.data['share ratio'].split(',') self['seeder'] = -1 self['leecher'] = -1 if len(ratio) == 2: val = int_value(ratio[0].replace(',','').strip()) if val: self['seeder'] = int(val) val = int_value(ratio[1].replace(',','').strip()) if val: self['leecher'] = int(val) val = int_value(self.data['downloads'].replace(',','').strip()) if val: self['downloaded'] = int(val) else: self['downloaded'] = -1 published = self.data['added on'] published = published.split(' +')[0] self['published'] = datetime.strptime(published, "%a, %d %b %Y %H:%M:%S")
def got_handshake(self, data): logging.info('got handshake %s' % [data]) self.handshake = parse_handshake(data) if self.handshake: self.torrent = Torrent.instantiate( binascii.hexlify(self.handshake['infohash']) ) logging.info('connection has torrent %s with hash %s%s' % (self.torrent, self.torrent.hash, ' (with metadata)' if self.torrent.meta else '')) if not self._sent_handshake: self.send_handshake() if self.torrent and self.torrent.meta: self.send_bitmask() self.get_more_messages() else: logging.info('invalid/unrecognized handshake') self.stream.close()
def main(torrent_path: str, yes: bool) -> None: torrent_file_path = Path(torrent_path) torrent = Torrent.parse(torrent_file_path) display_torrent_info(torrent) if not yes: allow_download = Confirm.ask("Allow download of this torrent", default="y") if not allow_download: print("Downloading aborted!") return http_tracker = tracker.HTTTPTracker(torrent) event_loop = asyncio.get_event_loop() event_loop.run_until_complete(http_tracker.connect())
def main(): #url = "pervyj-mstitel_captain-america-the-first-avenger-2011-bdrip-1080p-ot-youtracker-licenzija" #url = "virtuoznost_virtuosity-1995-hdrip-ot-rulya74" url = "http://rutor.org/torrent/318589/kolonija_dvojnaja-komanda_double-team-1997-bdrip-by-msltel-p-p2-a" #url = "chelovek-bez-proshlogo_mies-vailla-menneisyyttä" #url = "seksualnye-hroniki-francuzskoj-semi_chroniques-sexuelles-dune-famille-daujourdhui-2012-bdrip-1080p-uncut" app = QApplication(sys.argv) app.setApplicationName("Video torrent") torrent = Torrent() #torrent.get_source(url) #print torrent.get_sql_insert() form = MainWindow() form.set_torrent(torrent.info) form.setup() form.setMinimumSize(800, 600) form.show() #torrent.save() torrent.close() sys.exit(app.exec_())
def _get_torrents_list(self, query): self._cur.execute(query) rows = self._cur.fetchall() result = [] for row in rows: t = Torrent() t.id = row["id"] t.search_query = row["search_query"] t.title = row["title"] t.torrent_file = row["torrent_file"] t.magnet_link = row["magnet_link"] t.created_at = row["created_at"] self.load_torrent_tracks(t) result.append(t) return result;
def search_extra_torrent(query): html = requests.get('http://extratorrent.cc/search/?search=' + query) soup = BeautifulSoup(html.content) total = int(soup.find_all('b')[12].text) results = [] if total > 0: r_rows = soup.find_all('tr', class_='tlr') z_rows = soup.find_all('tr', class_='tlz') all_rows = r_rows + z_rows for row in all_rows: torrent = Torrent(source='ExtraTorrent') attributes = row.find_all('a')[0].attrs title = attributes['title'].split() title.pop(0) title.pop() torrent.title = " ".join(title) download_link = attributes['href'].replace('torrent_download', 'download') torrent.url = 'http://extratorrent.cc' + download_link torrent.size = row.find_all('td')[-4].text if row.find('td', class_='sy'): torrent.seeders = int(row.find('td', class_='sy').text) else: torrent.seeders = 0 if row.find('td', class_='ly'): torrent.leechers = int(row.find('td', class_='ly').text) else: torrent.leechers = 0 results.append(torrent) if total > 50: total = 50 return {'total': total, 'results': results}
def main(argv): path = argv[1] torrent = Torrent(path) torrent.serve_forever()
from torrent import Torrent from peerfactory import PeerFactory from twisted.internet import reactor tor = Torrent('ub.torrent') for peer in tor.peers_list[0:3]: handshake = tor.get_handshake(peer) ip_add, port = peer.split(":") print 'connection to ', ip_add, port reactor.connectTCP(ip_add, int(port), PeerFactory(handshake, tor)) reactor.run() # peer_response = tor.connect_to_peer(tor.peers_list[0]) # print peer_response # import ipdb # ipdb.set_trace()
from db import MedialibDb; from torrent import Torrent; medialib = MedialibDb(); torrent = Torrent() torrent.title = 'test' torrent.magnet_uri = 'sfewfwefewfewfwfewf' print medialib.get_torrents_list() medialib.add_torrent(torrent) print torrent.id print medialib.get_torrents_list()
def setUp(self): self.torrent = Torrent('ub.torrent')
from torrent import Torrent # Desired behavior - create torrent file. # write_torrent_file("test.torrent", "stuffToDo", # tracker = "http://127.0.0.1:9010", comment = "test") # Works! # How's the torrenting go? torrent = Torrent("ubuntu.torrent") torrent.run()