def upload_json(request): u = int(time.time()) l = request.POST['torlink'] try: data = urllib2.urlopen(l) buf = StringIO(data.read()) f = gzip.GzipFile(fileobj=buf) data = f.read() except: data = urllib2.urlopen(l) data = data.read() with open("/home/can/torrentor/torrents/%d.torrent"%u,"wb") as f: f.write(data) with open("/home/can/torrentor/torrents/%d.torrent"%u) as f: raw = f.read() name = bencode.bdecode(raw)['info']['name'] try: length = bencode.bdecode(raw)['info']['length'] except: length = 0 for file in bencode.bdecode(raw)['info']['files']: length += file['length'] proc = Process.objects.create(tlink=l,name=name,length=length,ttype=1,progress=2) proc.save() print to_canonical(name) if(os.path.isdir(to_canonical(name))): if not os.path.exists(to_canonical(name)): os.makedirs(to_canonical(name)) return HttpResponse('{"msg":true}',mimetype="application/json")
def _download_torrent_file(self, bibliotik_client): if self.torrent_file is not None: return filename, torrent_file = bibliotik_client.download_torrent(self.id) bencode.bdecode(torrent_file) self.torrent_filename = filename self.torrent_file = torrent_file
def validateRSS(self): try: if self.cookies: cookie_validator = re.compile("^(\w+=\w+)(;\w+=\w+)*$") if not cookie_validator.match(self.cookies): return (False, 'Cookie is not correctly formatted: ' + self.cookies) data = self.cache._getRSSData()['entries'] if not data: return (False, 'No items found in the RSS feed ' + self.url) (title, url) = self._get_title_and_url(data[0]) if not title: return (False, 'Unable to get title from first item') if not url: return (False, 'Unable to get torrent url from first item') if url.startswith('magnet:') and re.search('urn:btih:([\w]{32,40})', url): return (True, 'RSS feed Parsed correctly') else: if self.cookies: requests.utils.add_dict_to_cookiejar(self.session.cookies, dict(x.rsplit('=', 1) for x in (self.cookies.split(';')))) torrent_file = self.getURL(url) try: bdecode(torrent_file) except Exception, e: self.dumpHTML(torrent_file) return (False, 'Torrent link is not a valid torrent file: ' + ex(e)) return (True, 'RSS feed Parsed correctly')
def handles_in_branch(co, lpoints, bpoints, txn, cache=None): points = bpoints[:] seen = {} named, modified, added, deleted = {}, {}, {}, {} while len(points): pnext = points.pop() if seen.has_key(pnext): continue seen[pnext] = 1 if _is_ancestor(co, pnext, lpoints, txn): continue if cache: if cache.has_key(pnext): pinfo = cache[pnext] else: pinfo = bdecode(co.lcrepo.get(pnext, txn=txn)) cache[pnext] = pinfo else: pinfo = bdecode(co.lcrepo.get(pnext, txn=txn)) for handle, hinfo in pinfo['handles'].items(): if hinfo.has_key('add'): added[handle] = 1 if hinfo.has_key('name'): named[handle] = 1 if hinfo.has_key('hash'): modified[handle] = 1 if hinfo.has_key('delete'): deleted[handle] = 1 points.extend(pinfo['precursors']) return (named.keys(), modified.keys(), added.keys(), deleted.keys())
def _download_torrent_file(self, mam_client): if self.torrent_file is not None: return filename, torrent_file = mam_client.download_torrent(self.torrent_url) bencode.bdecode(torrent_file) self.torrent_filename = filename self.torrent_file = torrent_file
def test_decode_parameter(): """Ensure non-strings raise an exception.""" # TODO: BTL implementation currently chokes on this type of input # self.assertRaises(BTFailure, bdecode, 0) # self.assertRaises(BTFailure, bdecode, None) # self.assertRaises(BTFailure, bdecode, 1.0) with pytest.raises(BTFailure): bdecode([1, 2]) with pytest.raises(BTFailure): bdecode({'foo': 'bar'})
def test(): c = ChunkedList(chunk_size=14) c.push(('0')) c.push(('1')) c.push(('2')) c.push(('a', 'b', 'c')) c.push(('d', 'e', 'f')) d = bdecode(bencode(c)) d = ChunkedList(d.encode()) d.push(('g', 'h', 'i')) print list(d) print list(bdecode(bencode(d)))
def get_response(file, url, errorfunc): try: if file: h = open(file, 'rb') try: # quick test to see if responsefile contains a dict line = h.read(10) front = line.split(':', 1)[0] assert front[0] == 'd' int(front[1:]) except: errorfunc(file + ' is not a valid responsefile') return None try: h.seek(0) except: try: h.close() except: pass h = open(file, 'rb') else: try: h = urlopen(url) except: errorfunc(url + ' bad url') return None response = h.read() except IOError as e: errorfunc('problem getting response info - ' + str(e)) return None try: h.close() except: pass try: try: response = bdecode(response) except: errorfunc("warning: bad data in responsefile") response = bdecode(response, sloppy=1) check_type(response, dict) check_info(response.get('info')) check_type(response.get('announce'), str) except ValueError as e: errorfunc("got bad file info - " + str(e)) return None return response
def find_info_hash(self, text): info_dict_start = text.find('4:infod') + len('4:info') success = False end = info_dict_start while not success and end <= len(text): end += 1 try: bencode.bdecode(text[info_dict_start:end]) break except bencode.BTFailure: pass info_string = text[info_dict_start:end] bencode.bdecode(info_string) return hashlib.sha1(info_string).digest()
def announce(self, callback=None): logging.info('%s announcing' % self) if not self.http_client: Tracker.http_client = httpclient.AsyncHTTPClient() if not self.can_announce(): if callback: callback(None) self.last_announce = time.time() response = yield gen.Task( self.http_client.fetch, '%s?_tracker_url=%s&info_hash=%s&compact=1&callback=mycallback' % (options.tracker_proxy, urllib.quote(self.url), urllib.quote(self.infohash) ) ) if response.code == 200: if self.proxy_mode: rawdata = response.body b64data = rawdata[ rawdata.find('(')+2 : rawdata.find(')', len(rawdata) - 1)-1 ] data = bencode.bdecode(base64.b64decode(b64data)) else: data = bencode.bdecode(response.body) peerdata = [] if 'peers' in data: for i in range(len(data['peers'])/6): decoded = decode_peer( data['peers'][i*6:(i+1)*6] ) if decoded[1] != 0: peerdata.append( decoded ) if 'min interval' in data: self.min_interval = data['min interval'] if 'interval' in data: self.interval = data['interval'] self.peerdata = peerdata toreturn = {'response':data, 'peers':peerdata} Tracker.Torrent.instantiate(self.infohash).notify_peers(peerdata) if callback: callback(TrackerResponse(response, toreturn)) else: if callback: callback(TrackerResponse(response))
def compute_hash(videoObj): """ Return torrent hash """ torrent = urllib.urlopen(videoObj.download_url).read() info = bencode.bdecode(torrent)['info'] h = hashlib.sha1() h.update(bencode.bencode(info)) return h.hexdigest()
def _get_info_hash(result): if result.url.startswith('magnet:'): result.hash = re.findall(r'urn:btih:([\w]{32,40})', result.url)[0] if len(result.hash) == 32: result.hash = b16encode(b32decode(result.hash)).lower() else: try: # `bencode.bdecode` is monkeypatched in `medusa.init` torrent_bdecode = bdecode(result.content, allow_extra_data=True) info = torrent_bdecode['info'] result.hash = sha1(bencode(info)).hexdigest() except (BencodeDecodeError, KeyError): log.warning( 'Unable to bdecode torrent. Invalid torrent: {name}. ' 'Deleting cached result if exists', {'name': result.name} ) cache_db_con = db.DBConnection('cache.db') cache_db_con.action( 'DELETE FROM [{provider}] ' 'WHERE name = ? '.format(provider=result.provider.get_id()), [result.name] ) except Exception: log.error(traceback.format_exc()) return result
def parse_response_from_tracker(self,r): '''Input: http response from our request to the tracker Output: a list of peer_ids Takes the http response from the tracker and parses the peer ids from the response. This involves changing the peer string from unicode (binary model) to a network(?) model(x.x.x.x:y). From the spec: 'First 4 bytes are the IP address and last 2 bytes are the port number' ''' response = bencode.bdecode(r.content) peers = response['peers'] peer_address = '' peer_list = [] for i,c in enumerate(peers): if i%6 == 4: port_large = ord(c)*256 elif i%6 == 5: port = port_large + ord(c) peer_address += ':'+str(port) peer_list.append(peer_address) peer_address = '' elif i%6 == 3: peer_address += str(ord(c)) else: peer_address += str(ord(c))+'.' return peer_list
def main(): try: args = parser.parse_args() if not os.path.isdir(args.directory): print '{} is not a directory'.format(args.directory) return 2 if args.delete or args.list_delete: cmd = delete_cmd else: cmd = verify_cmd all_ok = True for torrent_path in args.torrent: with open(torrent_path, 'rb') as f: torrent = bencode.bdecode(f.read()) info = torrent['info'] try: ok = cmd(info, torrent_path, args) except Exception: ok = False print '{}: ERROR'.format(torrent_path) if args.debug: raise all_ok = all_ok and ok return 0 if all_ok else 1 except KeyboardInterrupt: return 1
def _extended(self, data): msgtype = ord(data[0]) if msgtype == 0 and not self.extensionHandshakeReceived: #handshake payload = bencode.bdecode(data[1:]) if not "metadata_size" in payload or not "ut_metadata" in payload['m']: self.close() raise PeerException, "Not supporting ut_metadata extension" size = payload['metadata_size'] if size == 0: self.close() raise PeerException, "The peer does not appear to have any metadata" self.torrent.setMetadataSize(size) self.metadata_id = payload['m']['ut_metadata'] self.extensionHandshakeReceived = True #everything seems fine, go ahead an request the first bit of metadata self._requestPiece() self.resetTimeout() elif not self.extensionHandshakeReceived: self.close() raise PeerException, "Peer send extension messages before handshake" if msgtype == 3: #Got metadata extension message r, l = bencode.bdecode_len(data[1:]) self._metadataExt(r, data[l+1:])
def _get_torrent_hash(self, result): if result.url.startswith('magnet'): result.hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0] if len(result.hash) == 32: result.hash = b16encode(b32decode(result.hash)).lower() else: if not result.content: logger.log('Torrent without content', logger.ERROR) raise Exception('Torrent without content') try: torrent_bdecode = bdecode(result.content) except BTFailure as e: logger.log('Unable to bdecode torrent', logger.ERROR) logger.log('Torrent bencoded data: {0}'.format(str(result.content)), logger.DEBUG) raise try: info = torrent_bdecode["info"] except Exception as e: logger.log('Unable to find info field in torrent', logger.ERROR) raise result.hash = sha1(bencode(info)).hexdigest() return result
def to_metainfo(self): try: info_as_dict = bdecode(redis_connection.get(self.info_hash + ".info")) return bencode({"announce": announce_url, "info": info_as_dict }) except TypeError: raise NoInfoException()
def handle(self, *args, **options): if not self.check_args(args): print u'Pass the torrent data directory as a first argument, ' \ u'a path to the .torrent file as a second.' return self.data_path, self.torrent_path = [wm_unicode(i) for i in args] with open(wm_str(self.torrent_path), 'rb') as f: self.torrent_info = bencode.bdecode(f.read()) if options['base_dir']: self.data_path = os.path.join(self.data_path, wm_unicode(self.torrent_info['info']['name'])) print u'Checking to see if torrent is already loaded into WM..' masters = list(ReplicaSet.get_what_master().transinstance_set.all()) try: TransTorrent.objects.get(instance__in=masters, info_hash=self.info_hash) print u'Torrent already added to WM. Skipping...' return False except TransTorrent.DoesNotExist: pass self.what_torrent = WhatTorrent.get_or_create(self.pseudo_request, info_hash=self.info_hash) if not self.check_files(): return self.move_files() print 'Adding torrent to WM...' manage_torrent.add_torrent(self.pseudo_request, self.trans_instance, self.download_location, self.what_torrent.id) print 'Done!'
def parse_metadata(self, data): #解析种子 info = {} self.encoding = 'utf8' try: torrent = bdecode(data) #编码后解析 if not torrent.get('name'): return None except: return None detail = torrent info['name'] = self.decode_utf8(detail, 'name') if 'files' in detail: info['files'] = [] for x in detail['files']: if 'path.utf-8' in x: v = {'path': self.decode('/'.join(x['path.utf-8'])), 'length': x['length']} else: v = {'path': self.decode('/'.join(x['path'])), 'length': x['length']} if 'filehash' in x: v['filehash'] = x['filehash'].encode('hex') info['files'].append(v) info['length'] = sum([x['length'] for x in info['files']]) else: info['length'] = detail['length'] info['data_hash'] = hashlib.md5(detail['pieces']).hexdigest() return info
def data_received(self, fd, events): (data, address) = self.socket.recvfrom(65536) try: res = bdecode(data) self.types[res["y"]](res, address) except Exception: pass
def GetTopLevel(self, file_object): """Returns deserialized content of a bencoded file as a dictionary object. Args: file_object: A file-like object. Returns: Dictionary object representing the contents of the bencoded file. """ header = file_object.read(2) file_object.seek(0, os.SEEK_SET) if not self.BENCODE_RE.match(header): raise errors.UnableToParseFile(u'Not a valid Bencoded file.') try: top_level_object = bencode.bdecode(file_object.read()) except (IOError, bencode.BTFailure) as exception: raise errors.UnableToParseFile( u'Unable to parse invalid Bencoded file with error: {0:s}'.format( exception)) if not top_level_object: raise errors.UnableToParseFile(u'Not a valid Bencoded file.') return top_level_object
def from_metainfo_file(cls, metainfo_file, filename): metainfo = bdecode(metainfo_file.read()) s = Swarm() s._save_info_(metainfo["info"]) s._ensure_recorded_() redis_connection.save() return s
def getPeers(self): # TODO: move the self.infoHash to init if we need it later. params = {'info_hash': self.infoHash, 'peer_id': self.peer_id, 'left': str(self.tracker['info']['piece length'])} # shouldn't 'left' be total size remaining? # here it's just piece length x 1 response = requests.get(self.tracker['announce'], params=params) if response.status_code > 400: errorMsg = ("Failed to connect to tracker.\n" "Status Code: %s \n" "Reason: %s") % (response.status_code, response.reason) raise RuntimeError(errorMsg) result = bencode.bdecode(response.content) for chunk in self.chunkToSixBytes(result['peers']): ip = [] port = None for i in range(0, 4): ip.append(str(ord(chunk[i]))) port = ord(chunk[4])*256+ord(chunk[5]) mySocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) mySocket.setblocking(0) ip = '.'.join(ip) peer = Peer(ip, port, mySocket, self.infoHash, self.peer_id) self.peers.append(peer)
def _get_torrent_hash(self, result): if result.url.startswith('magnet'): result.hash = re.findall(r'urn:btih:([\w]{32,40})', result.url)[0] if len(result.hash) == 32: result.hash = b16encode(b32decode(result.hash)).lower() else: if not result.content: sickrage.LOGGER.error('Torrent without content') raise Exception('Torrent without content') try: torrent_bdecode = bdecode(result.content) except BTFailure: sickrage.LOGGER.error('Unable to bdecode torrent') sickrage.LOGGER.debug('Torrent bencoded data: %r' % result.content) raise try: info = torrent_bdecode[b"info"] except Exception: sickrage.LOGGER.error('Unable to find info field in torrent') raise result.hash = sha1(bencode(info)).hexdigest() return result
def test_ok(self): calls = [] def callback(*args): calls.append(args) global remoteAddr remoteAddr = '1.2.3.4' query = {} query['info_hash'] = 'a'*20 query['peer_id'] = '0'*20 query['port'] = 42000 query['uploaded'] = 0 query['downloaded'] = 0 query['left'] = 128 query['compact'] = 0 query['event'] = 'started' self.tracker.addAfterAnnounce(callback) r = self.urlopen('http://tracker/' + 86*'0' + '/announce?' + urllib.urlencode(query)) self.assertEqual(200,r.code) response = bencode.bdecode(r.read()) self.assertIn('peers',response) self.assertEqual(len(response['peers']),1) self.assertEqual(1,len(calls)) userId,infoHash,peerIp,port,peerId = calls[0] self.assertEqual(infoHash,query['info_hash']) self.assertEqual(port,query['port']) self.assertEqual(peerIp,remoteAddr) self.assertEqual(peerId,query['peer_id']) self.assertEqual(self.auth._authenticateSecretKey,userId)
def make_tracker_request(gto_dict, info_hash, rsa, crt): # TODO(hammer): support partial downloads peer_id = get_fingerprint() left = sum([f.get('length') for f in gto_dict.get('info').get('files')]) key = get_random_string(8) payload = { 'info_hash': info_hash, 'peer_id': peer_id, 'port': 20893, 'uploaded': 0, 'downloaded': 0, 'left': left, 'corrupt': 0, 'redundant': 0, 'compact': 1, 'numwant': 200, 'key': key, 'no_peer_id': 1, 'supportcrypto': 1, 'event': 'started', } url_base = 'https://dream.annailabs.com:21111/tracker.php/announce' temp_crt_file_path = get_temp_crt_file_path(crt) temp_key_file_path = get_temp_key_file_path(rsa) r = requests.get(url_base, params=payload, verify=False, cert=(temp_crt_file_path, temp_key_file_path)) logging.debug('Tracker response content: %s' % r.content) os.remove(temp_crt_file_path) os.remove(temp_key_file_path) tracker_response = bencode.bdecode(r.content.strip()) return tracker_response
def handle_tracker_response(self, req): if not req.ok: raise Exception('Problem with Tracker') # Note: req.text returns a 'utf-8' encoding of req.content. rd = bencode.bdecode(req.content) if 'failure_reason' in rd: raise Exception(rd['failure_reason']) self.interval = rd.get('interval', None) self.complete = rd.get('complete', None) self.incomplete = rd.get('incomplete', None) # Optional keys a tracker may return self.tracker_id = rd.get('tracker_id', self.tracker_id) self.min_interval = rd.get('min interval', None) # # Set the timeout of the reactor to the announce interval # if self.min_interval: # self.reactor.timeout = self.min_interval # else: # self.reactor.timeout = self.inteval peers_raw = rd['peers'] if isinstance(peers_raw, dict): # Must then check peer_id matches t handshake raise Exception('Dicitionary peers model not yet implemented.') # Break peers_raw into list of (ip, port) tuples peers_bytes = (peers_raw[i:i+6] for i in range(0, len(peers_raw), 6)) peer_addrs = (map(ord, peer) for peer in peers_bytes) peers = [('.'.join(map(str, p[0:4])), p[4]*256 + p[5]) for p in peer_addrs] print peers self._update_peers(peers)
def receive_krpc(self): try: (data, address) = self.ufd.recvfrom(65536) msg = bdecode(data) self.handle_message(msg, address) except Exception: pass
def handle(self, data, addr): try: data = bdecode(data) except: return query_handle_function = { "ping" : self.handle_pi_qdata, "find_node" : self.handle_fn_qdata, "get_peers" : self.handle_gp_qdata, "announce_peer" : self.handle_ap_qdata } try: type = data["y"] if type == "q": if data["q"] in query_handle_function.keys(): query_handle_function[data["q"]](data, addr) elif type == "r": if data["r"].has_key("token"): self.handle_gp_rdata(data, addr) elif data["r"].has_key("nodes"): self.handle_fn_rdata(data, addr) except KeyError: pass
def downloadTracker(): if len(sys.argv) != 3: print("Improper usage!") exit(-1) src = sys.argv[1] f = open(src, "rb") tracker = f.read() f.close() decoded = bencode.bdecode(tracker) if not isinstance(decoded, bencode.DictType): print("Invalid bencode!") exit(-1) announce = decoded.get("announce") print(announce) decoded = decoded.get("info") name = decoded.get("name") print(name) pieceLength = decoded.get("piece length") print(pieceLength) length = decoded.get("length") print(length) pieces = decoded.get("pieces") getRequest=announce+"?" sha1 = hashlib.sha1() sha1.update(str(decoded)) getRequest += "info_hash="+urllib.quote(sha1.digest()) getRequest += "&peer_id=xxDOWNTORRDOWNTORRxx&port=6881&compact=0&uploaded=0&downloaded=0&left="+str(length) print(getRequest) response = urllib2.urlopen(getRequest) print(response.read()) response.close()
def handle_message(self, msg_data): if msg_data[0] == 0: hs_body = bdecode(msg_data[1:]) metadata_size = hs_body.get("metadata_size", 0) ut_metadata_id = hs_body.get("m", {}).get("ut_metadata", 0) if metadata_size and ut_metadata_id: hs_response = { "e": 0, "metadata_size": metadata_size, "v": "μTorrent 3.2.3", "m": { "ut_metadata": 1 }, "reqq": 255 } self.send_extended_message(0, hs_response) for i in range(0, int(1 + metadata_size / (16 * 1024))): self.send_extended_message(ut_metadata_id, { "msg_type": 0, "piece": i }) elif msg_data[0] == 1: r, l = decode_dict(msg_data[1:], 0) if r["msg_type"] == 1: self.metadata[r["piece"]] = msg_data[l + 1:] metadata = bytes() for key in sorted(self.metadata.keys()): metadata += self.metadata[key] if len(metadata) == r["total_size"]: if sha1(metadata).digest() == self.info_hash: result = bdecode(metadata) else: result = None if not self.result_future.done(): self.result_future.set_result(result) self.transport.close()
def addFormat(request, release_id): release = Release.objects.get(id=release_id) formatform = FormatForm(initial={"release": release}) torrentform = TorrentUploadForm(initial={"user": request.user}) if request.POST: formatform = FormatForm(request.POST) torrentform = TorrentUploadForm(request.POST, request.FILES) if formatform.is_valid() and torrentform.is_valid(): u = torrentform.save() #u.user = request.user data = open(u.torrent.file.name, "rb").read() torrent = bencode.bdecode(data) #u.data = torrent #u.info_hash = torrent['info'][''] s = hashlib.sha1() s.update(bencode.bencode(torrent['info'])) u.info_hash = s.hexdigest() exclude = ["pieces"] for key in exclude: torrent['info'].pop(key, None) u.data = torrent['info'] try: u.save() except IntegrityError as ie: print ie print "error" print u.torrent try: u.torrent.delete() except IntegrityError: errors = torrentform._errors.setdefault( "torrent", ErrorList()) errors.append(u"Duplicate Torrent") u.delete() return render_to_response( "release/addFormat.html", context_instance=RequestContext( request, { "release": release, "formatform": formatform, "torrentform": torrentform })) u.delete() format = formatform.save(commit=False) format.torrent = u format.save() u.content_type = ContentType.objects.get(model='albumformat') u.object_id = format.id u.save() return HttpResponseRedirect( reverse(albumPage, kwargs={'album_id': release.album.id})) return render_to_response('music/release/addFormat.html', context_instance=RequestContext( request, { "release": release, "formatform": formatform, "torrentform": torrentform }))
def __init__(self, metainfo, port, peer_id): self._metainfo = metainfo self._port = port self._peer_id = peer_id params = { 'info_hash': self._metainfo.info_hash, 'peer_id': self._peer_id, 'port': self._port, 'uploaded': 0, 'downloaded': 0, 'left': sum([pair[1] for pair in self._metainfo.files]), 'compact': 1, 'event': 'started' } try: response = requests.get(self._metainfo.announce, params=params) except requests.ConnectionError: raise TrackerError("Can't connect to the tracker at {}".format( self._metainfo.announce)) response = bencode.bdecode(response.content) if 'failure reason' in response: raise TrackerError("Failure reason: {}".format( response['failure reason'])) if 'warning message' in response: logger.warning("Warning: {}".format(response['warning message'])) print >> sys.stderr, ("Warning: {}".format( response['warning message'])) try: self._min_interval = response.get('min interval', 0) if 'tracker id' in response: self._tracker_id = response['tracker id'] self._interval = response['interval'] self._complete = response['complete'] self._incomplete = response['incomplete'] if isinstance(response['peers'], list): self._peers = response['peers'] else: self._peers = [] peers = response['peers'] for offset in xrange(0, len(peers), 6): self._peers.append({ 'ip': "{}.{}.{}.{}".format(str(ord(peers[offset])), str(ord(peers[offset + 1])), str(ord(peers[offset + 2])), str(ord(peers[offset + 3]))), 'port': (ord(peers[offset + 4]) * 256 + ord(peers[offset + 5])) }) except Exception: raise TrackerError("Invalid tracker response")
def _update_mini_dag(co, changedbs, helper, handles, cset, txn): indexdb = changedbs.indexdb dagdb = changedbs.dagdb pres = cset['precursors'] point = cset['point'] #bminfo = bdecode(co.branchmapdb.get(point, txn=txn)) bminfo = db_get(co, co.branchmapdb, point, txn) bnum = struct.pack('>II', bminfo['branch'], bminfo['branchnum']) untouched = [] for handle in handles: precursors = simplify_precursors(co, handle, changedbs, pres, txn)[0] mdinfo = {'handle': {}} # there's some broken history, need to work around it for now # deleted files are deleted, regardless of later edits deleted = None #if len(precursors) > 1: if True: # only deletes can be implicitly merged all_deleted = True for pre in precursors: phinfo = bdecode(dagdb.get(handle + pre[0], txn=txn)) if phinfo['handle'].has_key('delete'): deleted = phinfo else: all_deleted = False if all_deleted: if len(precursors) > 1: # affects finding when the handle was modified untouched.append(handle) continue if deleted is not None: mdinfo['handle'] = deleted['handle'] elif cset['handles'].has_key(handle): mdinfo['handle'] = helper(co, handle, point, precursors, cset['handles'][handle], txn) if mdinfo['handle'] == {}: if deleted is None: if len(precursors) > 1: raise HistoryError, 'cannot automatically merge changes' del mdinfo['handle'] mdinfo['precursors'] = precursors if precursors == []: assert cset['handles'][handle].has_key('add') dagdb.put(handle + point, bencode(mdinfo), txn=txn) indexdb.put(handle + bnum, point, txn=txn) return untouched
def transfer(self, string, tpath, foo=None, bar=None): self.dir_dict = {} self.sub_dir_index = 0 dstring = bencode.bdecode(string) files = [] file_index = 0 ## change files' name if dstring['info'].get('files'): for fl in dstring['info']['files']: filename = fl['path'][-1] if args.type_ == 'n': newfilename = re.sub(foo, bar, filename, re.I) \ if foo and bar else filename if filename != newfilename: print filename, s % (1, 92, '==>'), newfilename path = [self._get_sub_dir_index(i) \ for i in fl['path'][:-1]] + [newfilename] else: ext = os.path.splitext(filename)[-1] ext = self._check_ext(ext) path = [self._get_sub_dir_index(i) \ for i in fl['path'][:-1]] \ + ['%s%s' % (file_index, ext)] file_index += 1 fl['path'] = path elif args.type_ == 'be64': fn, ext = os.path.splitext(filename) ext = self._check_ext(ext) tfn = '/'.join(fl['path'][:-1] + [fn]) e_fn = base64.urlsafe_b64encode(tfn) fl['path'] = [e_fn + '.base64' + ext] for item in fl.keys(): #if item not in ['path', 'length', 'filehash', 'ed2k']: if item not in ['path', 'length', 'filehash']: del fl[item] files.append(fl) dstring['info']['files'] = files ## change top directory for i in dstring['info'].keys(): if i not in ['files', 'piece length', 'pieces', 'name', 'length']: del dstring['info'][i] elif 'name' in i: if args.name: dstring['info'][i] = args.name ## delete comment and creator for i in dstring.keys(): if i not in ['creation date', 'announce', 'info', 'encoding']: del dstring[i] c = bencode.bencode(dstring) with open(tpath, 'w') as g: g.write(c)
def getInfo(): torrNum = 0 path = os.getcwd() sep = os.sep torrentSet = [ filename for filename in os.listdir(path) if filename.split('.')[-1] == 'torrent' ] for i in torrentSet: torrNum += 1 with open(path + sep + i, 'rb') as f: d = {} c = bdecode(f.read()) try: d['creation_date'] = strftime('%Y-%m-%d', localtime(c['creation date'])) if 'files' not in c['info']: d['type'] = 'single' d['file_num'] = 1 d['file_name'] = c['info']['name'] d['file_size'] = c['info']['length'] d['size'] = c['info']['length'] else: d['type'] = 'multi' d['file_num'] = len(c['info']['files']) d['file_info'] = [ dict(file_size=x['length'], file_name='/'.join(x['path'])) for x in c['info']['files'] ] d['size'] = sum([x['file_size'] for x in d['file_info']]) d['name'] = c['info']['name'] d['magnet'] = 'magnet:?xt=urn:btih:' + i.split('.')[0].lower() d['thunder'] = 'thunder://' + b64encode('AA' + d['magnet'] + 'ZZ') except KeyError: print('$$$$$$$$$$$$$$$$$$$$$$$$$') print(i) continue if d['type'] == 'single': print('TName: %s' % d['name']) print('FName: %s' % d['file_name']) print(' Date: %s' % d['creation_date']) print(' Size: %d' % d['file_size']) print(' Num: %d' % d['file_num']) print('MLink: %s' % d['magnet']) print('TLink: %s' % d['thunder']) else: print('TName: %s' % d['name']) print(' Date: %s' % d['creation_date']) print(' Size: %d' % d['size']) print(' Num: %d' % d['file_num']) for i in d['file_info']: print(' %s %d' % (i['file_name'][:10], i['file_size'])) print('MLink: %s' % d['magnet']) print('TLink: %s' % d['thunder']) print('-------------------------') print(torrNum)
def generate_magnet(torrent_file): torrent = open(torrent_file, 'rb').read() metadata = bencode.bdecode(torrent) hashcontents = bencode.bencode(metadata['info']) digest = hashlib.sha1(hashcontents).digest() b32hash = base64.b32encode(digest) magnet = 'magnet:?xt=urn:btih:' + b32hash print(magnet)
def torrent_file_to_magnet(torrent_file): data = open(torrent_file, 'rb').read() metadata = bencode.bdecode(data) name = metadata['info']['name'] dn = quote(name) info_bts = bencode.bencode(metadata['info']) info_hash = hashlib.sha1(info_bts).hexdigest() return f'magnet:?xt=urn:btih:{info_hash}&dn={dn}'
def validateRSS(self): try: if self.cookies: cookie_validator = re.compile(r"^(\w+=\w+)(;\w+=\w+)*$") if not cookie_validator.match(self.cookies): return False, 'Cookie is not correctly formatted: ' + self.cookies data = self.cache._get_rss_data()['entries'] if not data: return False, 'No items found in the RSS feed ' + self.urls[ 'base_url'] (title, url) = self._get_title_and_url(data[0]) if not title: return False, 'Unable to get title from first item' if not url: return False, 'Unable to get torrent url from first item' if url.startswith('magnet:') and re.search( r'urn:btih:([\w]{32,40})', url): return True, 'RSS feed Parsed correctly' else: if self.cookies: requests.utils.add_dict_to_cookiejar( sickrage.srCore.srWebSession.cookies, dict( x.rsplit('=', 1) for x in self.cookies.split(';'))) try: torrent_file = sickrage.srCore.srWebSession.get(url).text except Exception: return False, 'Unable to get torrent from url' try: bencode.bdecode(torrent_file) except Exception as e: self.dumpHTML(torrent_file) return False, 'Torrent link is not a valid torrent file: {}'.format( e.message) return True, 'RSS feed Parsed correctly' except Exception as e: return False, 'Error when trying to load RSS: {}'.format(e.message)
def run(self): while True: try: (data, address) = ufd.recvfrom(65536) msg = bdecode(data) self.on_message(msg, address) except: print str(time.time()) + " error."
def datagram_received(self, datagram, address): log.debug("received datagram from %s", address) try: data = bdecode(datagram.decode('latin1')) except BTFailure as e: log.warning('decode data failure. {}'.format(e)) else: asyncio.ensure_future(self._solve_datagram(data, address))
async def datagram_received(self, data, addr): try: msg = bdecode(data, decoder=decode_bkeys) except BTFailure: # TODO: Log error pass else: self.handle_message(msg, addr)
def write_index(co, point, handle, index, txn): cdagdb = co.contents.dagdb try: old_index = bdecode(cdagdb.get(handle + point, txn=txn)) old_index['handle'] = index except (db.DBNotFoundError, TypeError): old_index = {'handle': index} cdagdb.put(handle + point, bencode(old_index), txn=txn)
def __init__(self, path: str): if not isfile(abspath(expanduser(path))) or spe(path)[1] not in ( '.fastresume', '.torrent'): raise FileNotFoundError( f"{path} does not direct to a valid '.fastresume' file.") self.hash = spe(sp(path)[-1])[0] with open(path, 'rb') as file: self._data = bencode.bdecode(file)
def parse_message_lt_tex(self, stream, n, length): try: lt_tex = bencode.bdecode(stream[n+6:n+length+4]) except bencode.BTL.BTFailure: raise InvalidBitTorrentStreamError() self.logger.info('[MESSAGE] [EXTENDED] lt_tex: announced {} tracker(s).'.format( len(lt_tex['added']))) self.__new_extended_message('lt_tex', added=lt_tex['added'])
def datagramReceived(self, str, addr): # bdecode try: msg = bdecode(str) except Exception, e: if self.noisy: print "response decode error: " + ` e ` self.d.errback()
def fetch_metadata(self, callback=None): url = '%s/talon/seed/%s/torrent/%s' % (self.client.session.data['host'], self.client.session.data['cid'], self.get('stream_id')) request = tornado.httpclient.HTTPRequest(url, validate_cert=False) response = yield gen.Task( httpclient.fetch, request ) self.meta = bencode.bdecode(response.body) callback( response )
def _verify_response(self, response): try: response = bencode.bdecode(response) except ValueError: raise BadTrackerError, 'response not bencoded' if 'failure reason' in response: raise BadTrackerError, response['failure reason'] return response
def recv_msg(self): while True: try: msg_bencode, addr = self.s.recvfrom(65536) break except Exception: pass return bdecode(msg_bencode), addr
def check_token(self, data, address): info = bdecode(data) if info.has_key('t') and info['t'] == TOKEN: _ref = info['r'] if info.has_key('r') else '' _func = self.referer(_ref) _func and _func(info,address) else: self.record(2, address[0], address[1])
class TransmissionClient(object): TAGNUMBER = 0 def __init__(self, socketpath="/tmp/transmission-daemon"): self.socketpath = socketpath self.socket = None # # internal helper methods: def _connect(self, ping=True): """ opens a connection to the daemon""" if not os.path.exists(self.socketpath): raise TransmissionClientFailure, """No socket at %s. Make sure your daemon is up and running!""" % self.socketpath self._close() self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self.socket.connect(self.socketpath) # 1. we wait for the version info from the server answer = self._listen() try: if answer['version']['max'] < 2: raise InsufficientProtocolVersion, \ "The server must at least support version 2" except KeyError: raise TransmissionClientFailure, \ "received illegal answer from daemon: '%s'" % repr(answer) # 2. we send our own version info self._send_command_v1({'version': {'min': 1, 'max': 2}}) # 3. sending a 'ping' seems to ensure that all following messages receive status messages # back from the server: if ping: self._send_command_v2('noop') # now the connection has been established and we can start sending commands def _close(self): """ if open, closes connection to the daemon""" if self.socket is not None: self.socket.close() self.socket = None def _listen(self): """Waits for a transmission from the other side and returns it.""" # First retrieve the eight byte ascii-hex payload length try: payloadlen = self.socket.recv(8) except IOError, (errno, ermess): if errno in [32, 57]: self._connect() payloadlen = self.socket.recv(8) try: payloadlen = int(payloadlen, 16) except ValueError: return None return bdecode(self.socket.recv(payloadlen))
def datagramReceived(self, datagram, address): if self.noisy: log.msg("received datagram from %s" % repr(address)) try: msg = bdecode(datagram) msgID = msg["t"] msgType = msg["y"] if msgType == "q": f = getattr(self, "rpc_%s" % msg["q"], None) if f is None or not callable(f): self.transport.write( bencode({ "t": msgID, "y": "e", "e": [204, "Method Unknown"] }), address) else: self._acceptRequest(msgID, [msg["q"], [msg["a"]]], address) elif msgType == "r": self._acceptResponse(msgID, msg["r"], address) elif msgType == "e": # Ignore error messages pass else: # otherwise, don't know the format, don't do anything log.msg("Received unknown message from %s, ignoring" % repr(address)) self.transport.write( bencode({ "t": msgID, "y": "e", "e": [203, "Protocol Error, invalid arguments"] }), address) except KeyError: log.msg("Invalid message data from %s, ignoring" % repr(address)) self.transport.write( bencode({ "y": "e", "e": [201, "Generic Error"] }), address) except BTFailure: log.msg("Not a valid bencoded string from %s, ignoring" % repr(address)) self.transport.write( bencode({ "y": "e", "e": [203, "Protocol Error, malformed packet"] }), address)
async def main(): f = open("torrentFiles/Mob_Psycho_100_S2-01.torrent", 'rb') peerid = await generatePeerID() torrent_file = f.read() f.close() torrent_file = bencode.bdecode(torrent_file) response = await requestPeerList(torrent_file, peerid) peer_list = await decodePeerBinary(response['peers']) await downloadFile(torrent_file, peer_list, peerid)
def run(self): self.re_join_DHT() while True: try: (data, address) = self.ufd.recvfrom(65536) msg = bdecode(data) self.on_message(msg, address) except Exception: pass
def test_UnauthSecretKey(self): self.keys = ['0'*64] self.info_hashes = [] query = {'peer_id':'A'*20,'port':1025,'uploaded':0,'downloaded':0,'left':0,'info_hash':'C'*20} r = self.urlopen('http://tracker/' + 86*'A' + '/announce?' + urllib.urlencode(query)) r = bencode.bdecode(r.read()) self.assertIn('failure reason',r) self.assertIn('secret key',r['failure reason'])
def test_authd(self): self.keys = ['0'*64] self.info_hashes = ['C'*20] query = {'peer_id':'A'*20,'port':1025,'uploaded':0,'downloaded':0,'left':0,'info_hash':'C'*20} r = self.urlopen('http://tracker/' + base64.urlsafe_b64encode(self.keys[0]).replace('=','') + '/announce?' + urllib.urlencode(query)) r = bencode.bdecode(r.read()) self.assertIn('peers',r) self.assertIn('interval',r)
def download(self, data=None, media=None, filedata=None): if not media: media = {} if not data: data = {} log.debug('Sending "%s" to rTorrent.', (data.get('name'))) if not self.connect(): return False torrent_params = {} if self.conf('label'): torrent_params['label'] = self.conf('label') if not filedata and data.get('protocol') == 'torrent': log.error('Failed sending torrent, no data') return False # Try download magnet torrents if data.get('protocol') == 'torrent_magnet': filedata = self.magnetToTorrent(data.get('url')) if filedata is False: return False data['protocol'] = 'torrent' info = bdecode(filedata)["info"] torrent_hash = sha1(bencode(info)).hexdigest().upper() # Convert base 32 to hex if len(torrent_hash) == 32: torrent_hash = b16encode(b32decode(torrent_hash)) # Send request to rTorrent try: # Send torrent to rTorrent torrent = self.rt.load_torrent(filedata, verify_retries=10) if not torrent: log.error('Unable to find the torrent, did it fail to load?') return False # Set label if self.conf('label'): torrent.set_custom(1, self.conf('label')) if self.conf('directory'): torrent.set_directory(self.conf('directory')) # Start torrent if not self.conf('paused', default=0): torrent.start() return self.downloadReturnId(torrent_hash) except Exception as err: log.error('Failed to send torrent to rTorrent: %s', err) return False
def torrent2magnet(torrent_url): print "TL: Downloading " + torrent_url torrent = get_data(torrent_url) metadata = bencode.bdecode(torrent) hashcontents = bencode.bencode(metadata['info']) digest = hashlib.sha1(hashcontents).digest() b32hash = base64.b32encode(digest) magneturl = 'magnet:?xt=urn:btih:' + b32hash + '&dn=' + metadata['info']['name'] return magneturl
def parse_torrent(self, data): info = {} self.encoding = 'utf8' try: torrent = bdecode(data) if not torrent.get('name'): return None except: return None try: info['create_time'] = datetime.datetime.fromtimestamp( float(torrent['creation date'])) + datetime.timedelta(hours=8) except: info['create_time'] = datetime.datetime.utcnow( ) + datetime.timedelta(hours=8) if torrent.get('encoding'): self.encoding = torrent['encoding'] if torrent.get('announce'): info['announce'] = self.decode_utf8(torrent, 'announce') if torrent.get('comment'): info['comment'] = self.decode_utf8(torrent, 'comment')[:200] if torrent.get('publisher-url'): info['publisher-url'] = self.decode_utf8(torrent, 'publisher-url') if torrent.get('publisher'): info['publisher'] = self.decode_utf8(torrent, 'publisher') if torrent.get('created by'): info['creator'] = self.decode_utf8(torrent, 'created by')[:15] if 'info' in torrent: detail = torrent['info'] else: detail = torrent info['name'] = self.decode_utf8(detail, 'name') if 'files' in detail: info['files'] = [] for x in detail['files']: if 'path.utf-8' in x: v = { 'path': self.decode('/'.join(x['path.utf-8'])), 'length': x['length'] } else: v = { 'path': self.decode('/'.join(x['path'])), 'length': x['length'] } if 'filehash' in x: v['filehash'] = x['filehash'].encode('hex') info['files'].append(v) info['length'] = sum([x['length'] for x in info['files']]) else: info['length'] = detail['length'] info['data_hash'] = hashlib.md5(detail['pieces']).hexdigest() if 'profiles' in detail: info['profiles'] = detail['profiles'] return info
def get_info_hash(torrent_path): # Open the file and decode it with open(torrent_path, 'rb') as f: d = bencode.bdecode(f.read()) # Get the torrent info from the decoded file. info = d['info'] # hash it with a sha1 hashed = hashlib.sha1(bencode.bencode(info)).hexdigest() return hashed