def datagramReceived(self, str, addr): # bdecode try: msg = bdecode(str) except Exception, e: if self.noisy: print "response decode error: " + `e`, `str`
def main(proxy): global logiter global all_lines global to_remove for url in logiter: try: peer_id = get_param(url, "peer_id") if peer_id: peer_id = id_map.setdefault(peer_id, make_id()) url = replace_param(url, "peer_id", peer_id) url = replace_param(url, "ip", "127.0.0.1") url = replace_param(url, "port", "6881") path = '/' + url.split('/', 3)[3] sys.stdout.write('.') df = proxy.callRemote(path) r = like_yield(df) r = bdecode(r) er = r.get('failure reason') if er: raise Exception(er) except Exception, e: print "Error", e.__class__, e, url to_remove.add(line) pass
def _postrequest(self, data=None, failure=None): #self.errorfunc(logging.INFO, 'postrequest(%s): %s d:%s f:%s' % # (self.__class__.__name__, self.current_started, # bool(data), bool(failure))) self.current_started = None self.last_time = bttime() if self.dead: return if failure is not None: if failure.type == twisted.internet.error.TimeoutError: m = _("Timeout while contacting server.") else: m = failure.getErrorMessage() self.errorfunc(logging.WARNING, self._make_errormsg(m)) self._fail(failure.exc_info()) return try: r = bdecode(data) if LOG_RESPONSE: self.errorfunc(logging.INFO, 'tracker said: %r' % r) check_peers(r) except BTFailure, e: if data: self.errorfunc(logging.ERROR, _("bad data from tracker (%r)") % data, exc_info=sys.exc_info()) self._fail() return
def _read_metainfo(self, infohash): path = os.path.join(self.data_dir, 'metainfo', infohash.encode('hex')) f = file(path, 'rb') data = f.read() f.close() return ConvertedMetainfo(bdecode(data))
def scrape(url): separator = '?' if '?' in url: separator = '&' req = '%s%cinfo_hash=%s' % (url.replace('announce', 'scrape'), separator, info_hash) # print req r = bencode.bdecode(urlopen(req).read()) return r['files'][info_hash]
def make_meta_file_dht(path, nodes, piece_len_exp, flag=Event(), progress=dummy, title=None, comment=None, safe=None, content_type=None, target=None, data_dir=None): # if nodes is empty, then get them out of the routing table in data_dir # else, expect nodes to be a string of comma seperated <ip>:<port> pairs # this has a lot of duplicated code from make_meta_file piece_length = 2**piece_len_exp a, b = os.path.split(path) if not target: if b == '': f = a + '.torrent' else: f = os.path.join(a, b + '.torrent') else: f = target info = makeinfo(path, piece_length, flag, progress, content_type) if flag.isSet(): return check_info(info) info_hash = sha1(bencode(info)).digest() if not nodes: x = open(os.path.join(data_dir, 'routing_table'), 'rb') d = bdecode(x.read()) x.close() t = KTable(Node().initWithDict({ 'id': d['id'], 'host': '127.0.0.1', 'port': 0 })) for n in d['rt']: t.insertNode(Node().initWithDict(n)) nodes = [(node.host, node.port) for node in t.findNodes(info_hash) if node.host != '127.0.0.1'] else: nodes = [ (a[0], int(a[1])) for a in [node.strip().split(":") for node in nodes.split(",")] ] data = {'nodes': nodes, 'creation date': int(gmtime())} h = file(f, 'wb') data['info'] = info if title: data['title'] = title if comment: data['comment'] = comment if safe: data['safe'] = safe h.write(bencode(data)) h.close()
def loadd(data): d = bdecode(data) if d['y'] == 'e': raise DFault(d['c'], d['s'], d['t']) elif d['y'] == 'r': return d['r'], None, d['t'] elif d['y'] == 'q': return d['a'], d['q'], d['t'] raise ValueError
def loads(data): d = bdecode(data) if d['y'] == 'e': raise Fault(d['c'], d['s']) # the server raised a fault elif d['y'] == 'r': # why is this return value so weird? # because it's the way that loads works in xmlrpclib return (d['r'],), None elif d['y'] == 'q': return d['a'], d['q'] raise ValueError
def loads(data): d = bdecode(data) if d['y'] == 'e': raise Fault(d['c'], d['s']) # the server raised a fault elif d['y'] == 'r': # why is this return value so weird? # because it's the way that loads works in xmlrpclib return (d['r'], ), None elif d['y'] == 'q': return d['a'], d['q'] raise ValueError
def create_torrent_non_suck(self, torrent_filename, path_to_data, hidden=False, feedback=None): data = open(torrent_filename, 'rb').read() metainfo = ConvertedMetainfo(bdecode(data)) return self.create_torrent(metainfo, path_to_data, path_to_data, hidden=hidden, feedback=feedback)
def make_meta_file_dht( path, nodes, piece_len_exp, flag=Event(), progress=dummy, title=None, comment=None, safe=None, content_type=None, target=None, data_dir=None, ): # if nodes is empty, then get them out of the routing table in data_dir # else, expect nodes to be a string of comma seperated <ip>:<port> pairs # this has a lot of duplicated code from make_meta_file piece_length = 2 ** piece_len_exp a, b = os.path.split(path) if not target: if b == "": f = a + ".torrent" else: f = os.path.join(a, b + ".torrent") else: f = target info = makeinfo(path, piece_length, flag, progress, content_type) if flag.isSet(): return check_info(info) info_hash = sha(bencode(info)).digest() if not nodes: x = open(os.path.join(data_dir, "routing_table"), "rb") d = bdecode(x.read()) x.close() t = KTable(Node().initWithDict({"id": d["id"], "host": "127.0.0.1", "port": 0})) for n in d["rt"]: t.insertNode(Node().initWithDict(n)) nodes = [(node.host, node.port) for node in t.findNodes(info_hash) if node.host != "127.0.0.1"] else: nodes = [(a[0], int(a[1])) for a in [node.strip().split(":") for node in nodes.split(",")]] data = {"nodes": nodes, "creation date": int(time())} h = file(f, "wb") data["info"] = info if title: data["title"] = title if comment: data["comment"] = comment if safe: data["safe"] = safe h.write(bencode(data)) h.close()
def _load(self): do_load = False try: s = open(os.path.join(self.ddir, "routing_table"), 'r').read() dict = bdecode(s) except: id = newID() else: id = dict['id'] do_load = True self.node = self._Node(self.udp.connectionForAddr).init(id, self.host, self.port) self.table = KTable(self.node) if do_load: self._loadRoutingTable(dict['rt'])
def _read_torrent_config(self, infohash): path = os.path.join(self.data_dir, 'torrents', infohash.encode('hex')) if not os.path.exists(path): raise BTFailure, _("Coult not open the torrent config: " + infohash.encode('hex')) f = file(path, 'rb') data = f.read() f.close() try: torrent_config = cPickle.loads(data) except: # backward compatibility with <= 4.9.3 torrent_config = bdecode(data) for k, v in torrent_config.iteritems(): try: torrent_config[k] = v.decode('utf8') if k in ('destination_path', 'working_path'): torrent_config[k] = encode_for_filesystem( torrent_config[k])[0] except: pass if not torrent_config.get('destination_path'): raise BTFailure(_("Invalid torrent config file")) if not torrent_config.get('working_path'): raise BTFailure(_("Invalid torrent config file")) if get_filesystem_encoding() == None: # These paths should both be unicode. If they aren't, they are the # broken product of some old version, and probably are in the # encoding we used to use in config files. Attempt to recover. dp = torrent_config['destination_path'] if isinstance(dp, str): try: dp = dp.decode(old_broken_config_subencoding) torrent_config['destination_path'] = dp except: raise BTFailure(_("Invalid torrent config file")) wp = torrent_config['working_path'] if isinstance(wp, str): try: wp = wp.decode(old_broken_config_subencoding) torrent_config['working_path'] = wp except: raise BTFailure(_("Invalid torrent config file")) return torrent_config
def _read_torrent_config(self, infohash): path = os.path.join(self.data_dir, 'torrents', infohash.encode('hex')) if not os.path.exists(path): raise BTFailure,_("Coult not open the torrent config: " + infohash.encode('hex')) f = file(path, 'rb') data = f.read() f.close() try: torrent_config = cPickle.loads(data) except: # backward compatibility with <= 4.9.3 torrent_config = bdecode(data) for k, v in torrent_config.iteritems(): try: torrent_config[k] = v.decode('utf8') if k in ('destination_path', 'working_path'): torrent_config[k] = encode_for_filesystem(torrent_config[k])[0] except: pass if not torrent_config.get('destination_path'): raise BTFailure( _("Invalid torrent config file")) if not torrent_config.get('working_path'): raise BTFailure( _("Invalid torrent config file")) if get_filesystem_encoding() == None: # These paths should both be unicode. If they aren't, they are the # broken product of some old version, and probably are in the # encoding we used to use in config files. Attempt to recover. dp = torrent_config['destination_path'] if isinstance(dp, str): try: dp = dp.decode(old_broken_config_subencoding) torrent_config['destination_path'] = dp except: raise BTFailure( _("Invalid torrent config file")) wp = torrent_config['working_path'] if isinstance(wp, str): try: wp = wp.decode(old_broken_config_subencoding) torrent_config['working_path'] = wp except: raise BTFailure( _("Invalid torrent config file")) return torrent_config
def make_meta_file_dht(path, nodes, piece_len_exp, flag=Event(), progress=dummy, title=None, comment=None, safe=None, content_type=None, target=None, data_dir=None): # if nodes is empty, then get them out of the routing table in data_dir # else, expect nodes to be a string of comma seperated <ip>:<port> pairs # this has a lot of duplicated code from make_meta_file piece_length = 2 ** piece_len_exp a, b = os.path.split(path) if not target: if b == '': f = a + '.torrent' else: f = os.path.join(a, b + '.torrent') else: f = target info = makeinfo(path, piece_length, flag, progress, content_type) if flag.isSet(): return check_info(info) info_hash = sha(bencode(info)).digest() if not nodes: x = open(os.path.join(data_dir, 'routing_table'), 'rb') d = bdecode(x.read()) x.close() t = KTable(Node().initWithDict({'id':d['id'], 'host':'127.0.0.1','port': 0})) for n in d['rt']: t.insertNode(Node().initWithDict(n)) nodes = [(node.host, node.port) for node in t.findNodes(info_hash) if node.host != '127.0.0.1'] else: nodes = [(a[0], int(a[1])) for a in [node.strip().split(":") for node in nodes.split(",")]] data = {'nodes': nodes, 'creation date': int(gmtime())} h = file(f, 'wb') data['info'] = info if title: data['title'] = title if comment: data['comment'] = comment if safe: data['safe'] = safe h.write(bencode(data)) h.close()
def get(arg): """Obtains the contents of the .torrent metainfo file either from the local filesystem or from a remote server. 'arg' is either a filename or an URL. Returns a ConvertedMetainfo object which is the parsed metainfo from the contents of the .torrent file. Any exception raised while obtaining the .torrent file or parsing its contents is caught and wrapped in one of the following errors: GetTorrent.URLException, GetTorrent.FileException, GetTorrent.MetainfoException, or GetTorrent.UnknownArgument. (All have the base class GetTorrent.GetTorrentException) """ data = _get(arg) metainfo = None try: b = bdecode(data) metainfo = ConvertedMetainfo(b) except Exception, e: raise MetainfoException((_('"%s" is not a valid torrent file (%s).') % (arg, unicode(e))))
def get(arg): """Obtains the contents of the .torrent metainfo file either from the local filesystem or from a remote server. 'arg' is either a filename or an URL. Returns a ConvertedMetainfo object which is the parsed metainfo from the contents of the .torrent file. Any exception raised while obtaining the .torrent file or parsing its contents is caught and wrapped in one of the following errors: GetTorrent.URLException, GetTorrent.FileException, GetTorrent.MetainfoException, or GetTorrent.UnknownArgument. (All have the base class GetTorrent.GetTorrentException) """ data = _get(arg) metainfo = None try: b = bdecode(data) metainfo = ConvertedMetainfo(b) except Exception, e: raise MetainfoException( (_('"%s" is not a valid torrent file (%s).') % (arg, unicode(e))))
def announce(url, id, left=10, event=''): global interval_sum global min_interval global max_interval global num_announces auth = '' if use_auth: auth = 'auth=%s&' % sha(info_hash + tid + sekret).hexdigest() if event != '': event = '&event=%s' % event separator = '?' if '?' in url: separator = '&' req = '%s%c%sinfo_hash=%s&tid=%s&peer_id=DNA%0.4d%s&left=%d&port=%d%s' % ( url, separator, auth, info_hash, tid, id, '0' * 13, left, id, event) # print req r = bencode.bdecode(urlopen(req).read()) if not 'peers' in r: return [] peers = r['peers'] peers6 = '' try: peers6 = r['peers6'] except: pass interval = r['interval'] interval_sum += interval if interval < min_interval: min_interval = interval if interval > max_interval: max_interval = interval num_announces += 1 ret = [] while len(peers) >= 6: ret.append(peers[5:6]) peers = peers[6:] while len(peers6) >= 18: ret.append(peers6[17:18]) peers6 = peers6[18:] ret = sorted(ret) # print ret return ret
def announce(url, id, left = 10, event = ''): global interval_sum global min_interval global max_interval global num_announces auth = '' if use_auth: auth = 'auth=%s&' % sha(info_hash + tid + sekret).hexdigest() if event != '': event = '&event=%s' % event separator = '?' if '?' in url: separator = '&' req = '%s%c%sinfo_hash=%s&tid=%s&peer_id=DNA%0.4d%s&left=%d&port=%d%s' % (url, separator, auth, info_hash, tid, id, '0' * 13, left, id, event) # print req r = bencode.bdecode(urlopen(req).read()) if not 'peers' in r: return [] peers = r['peers'] peers6 = '' try: peers6 = r['peers6'] except: pass interval = r['interval'] interval_sum += interval if interval < min_interval: min_interval = interval if interval > max_interval: max_interval = interval num_announces += 1 ret = [] while len(peers) >= 6: ret.append(peers[5:6]) peers = peers[6:] while len(peers6) >= 18: ret.append(peers6[17:18]) peers6 = peers6[18:] ret = sorted(ret) # print ret return ret
def __init__(self, config, rawserver): self.config = config self.response_size = config['response_size'] self.max_give = config['max_give'] self.dfile = efs2(config['dfile']) self.natcheck = config['nat_check'] favicon = config['favicon'] self.favicon = None if favicon: try: h = open(favicon,'r') self.favicon = h.read() h.close() except: errorfunc(logging.WARNING, _("specified favicon file -- %s -- does not exist.") % favicon) self.rawserver = rawserver self.cached = {} # format: infohash: [[time1, l1, s1], [time2, l2, s2], [time3, l3, s3]] self.cached_t = {} # format: infohash: [time, cache] self.times = {} self.state = {} self.seedcount = {} self.save_pending = False self.parse_pending = False self.only_local_override_ip = config['only_local_override_ip'] if self.only_local_override_ip == 2: self.only_local_override_ip = not config['nat_check'] if os.path.exists(self.dfile): try: h = open(self.dfile, 'rb') ds = h.read() h.close() try: tempstate = cPickle.loads(ds) except: tempstate = bdecode(ds) # backwards-compatibility. if not tempstate.has_key('peers'): tempstate = {'peers': tempstate} statefiletemplate(tempstate) self.state = tempstate except: errorfunc(logging.WARNING, _("statefile %s corrupt; resetting") % self.dfile) self.downloads = self.state.setdefault('peers', {}) self.completed = self.state.setdefault('completed', {}) self.becache = {} # format: infohash: [[l1, s1], [l2, s2], [l3, s3]] for infohash, ds in self.downloads.iteritems(): self.seedcount[infohash] = 0 for x, y in ds.iteritems(): if not y.get('nat', -1): ip = y.get('given_ip') if not (ip and self.allow_local_override(y['ip'], ip)): ip = y['ip'] self.natcheckOK(infohash, x, ip, y['port'], y['left']) if not y['left']: self.seedcount[infohash] += 1 for infohash in self.downloads: self.times[infohash] = {} for peerid in self.downloads[infohash]: self.times[infohash][peerid] = 0 self.reannounce_interval = config['reannounce_interval'] self.save_dfile_interval = config['save_dfile_interval'] self.show_names = config['show_names'] rawserver.add_task(self.save_dfile_interval, self.save_dfile) self.prevtime = time() self.timeout_downloaders_interval = config['timeout_downloaders_interval'] rawserver.add_task(self.timeout_downloaders_interval, self.expire_downloaders) self.logfile = None self.log = None if (config['logfile'] != '') and (config['logfile'] != '-'): try: self.logfile = config['logfile'] self.log = open(self.logfile, 'a') sys.stdout = self.log print _("# Log Started: "), isotime() except: print _("**warning** could not redirect stdout to log file: "), sys.exc_info()[0] if config['hupmonitor']: def huphandler(signum, frame, self = self): try: self.log.close () self.log = open(self.logfile, 'a') sys.stdout = self.log print _("# Log reopened: "), isotime() except: print _("***warning*** could not reopen logfile") signal.signal(signal.SIGHUP, huphandler) self.allow_get = config['allow_get'] if config['allowed_dir'] != '': self.allowed_dir = config['allowed_dir'] self.parse_dir_interval = config['parse_dir_interval'] self.allowed = self.state.setdefault('allowed', {}) self.allowed_dir_files = self.state.setdefault('allowed_dir_files', {}) self.allowed_dir_blocked = {} self.parse_allowed() else: try: del self.state['allowed'] except: pass try: del self.state['allowed_dir_files'] except: pass self.allowed = None self.uq_broken = unquote('+') != ' ' self.keep_dead = config['keep_dead']
def like_gettorrent(path): data = open(path, 'rb').read() b = bdecode(data) metainfo = ConvertedMetainfo(b) return metainfo
yield df torrentfile = df.getResult() df = ThreadedDeferred(wrap_task(self.rawserver.external_add_task), self._get_signature, installer_url) yield df signature = df.getResult() if torrentfile and signature: df = ThreadedDeferred(wrap_task(self.rawserver.external_add_task), self._check_signature, torrentfile, signature) yield df checked = df.getResult() if checked: self.debug(debug_prefix + 'signature verified successfully.') b = bdecode(torrentfile) metainfo = ConvertedMetainfo(b) infohash = metainfo.infohash self.available_version = available_version self.multitorrent.remove_auto_updates_except(infohash) try: df = self.multitorrent.create_torrent(metainfo, installer_path, installer_path, hidden=True, is_auto_update=True) yield df df.getResult() except TorrentAlreadyRunning: self.debug(debug_prefix + 'found auto-update torrent already running') except TorrentAlreadyInQueue:
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # Written by Henry 'Pi' James and Bram Cohen app_name = "BitTorrent" from BitTorrent.translation import _ from os.path import basename from sys import argv, exit from BTL.bencode import bencode, bdecode if len(argv) < 3: print _("Usage: %s TRACKER_URL [TORRENTFILE [TORRENTFILE ... ] ]") % basename(argv[0]) print exit(2) # common exit code for syntax error for f in argv[2:]: h = open(f, 'rb') metainfo = bdecode(h.read()) h.close() if metainfo['announce'] != argv[1]: print _("old announce for %s: %s") % (f, metainfo['announce']) metainfo['announce'] = argv[1] h = open(f, 'wb') h.write(bencode(metainfo)) h.close()
torrentfile = df.getResult() df = ThreadedDeferred(wrap_task(self.rawserver.external_add_task), self._get_signature, installer_url) yield df signature = df.getResult() if torrentfile and signature: df = ThreadedDeferred(wrap_task(self.rawserver.external_add_task), self._check_signature, torrentfile, signature) yield df checked = df.getResult() if checked: self.debug(debug_prefix + 'signature verified successfully.') b = bdecode(torrentfile) metainfo = ConvertedMetainfo(b) infohash = metainfo.infohash self.available_version = available_version self.multitorrent.remove_auto_updates_except(infohash) try: df = self.multitorrent.create_torrent(metainfo, installer_path, installer_path, hidden=True, is_auto_update=True) yield df df.getResult() except TorrentAlreadyRunning:
def metainfo_from_file(f): metainfo = ConvertedMetainfo(bdecode(f)) return metainfo