def check_allowed(self, infohash, paramslist): if (self.aggregator_key is not None and not ('password' in paramslist and paramslist['password'][0] == self.aggregator_key)): return ( 200, 'Not Authorized', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, bencode({ 'failure reason': 'Requested download is not authorized for use with this tracker.' })) if self.allowed is not None: if infohash not in self.allowed: return ( 200, 'Not Authorized', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, bencode({ 'failure reason': 'Requested download is not authorized for use with this tracker.' })) if self.config['allowed_controls']: if 'failure reason' in self.allowed[infohash]: return (200, 'Not Authorized', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, bencode({ 'failure reason': self.allowed[infohash]['failure reason'] })) if 'tracker' in paramslist: if (self.config['multitracker_allowed'] == 'none' or # turned off paramslist['peer_id'][0] == self.trackerid): # oops! contacted myself return (200, 'Not Authorized', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, bencode({'failure reason': 'disallowed'})) if (self.config['multitracker_allowed'] == 'autodetect' and 'announce-list' not in self.allowed[infohash]): return ( 200, 'Not Authorized', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, bencode({ 'failure reason': 'Requested download is not authorized for multitracker use.' })) return None
def natcheckOK(self, infohash, peerid, ip, port, not_seed): bc = self.becache.setdefault(infohash, [[{}, {}], [{}, {}], [{}, {}]]) bc[0][not not_seed][peerid] = Bencached( bencode({ 'ip': ip, 'port': port, 'peer id': peerid })) bc[1][not not_seed][peerid] = Bencached( bencode({ 'ip': ip, 'port': port })) bc[2][not not_seed][peerid] = compact_peer_info(ip, port)
def get_scrape(self, paramslist): fs = {} if 'info_hash' in paramslist: if self.config['scrape_allowed'] not in ['specific', 'full']: return ( 400, 'Not Authorized', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, bencode({ 'failure reason': 'specific scrape function is not available with this tracker.' })) for hash in paramslist['info_hash']: if self.allowed is not None: if hash in self.allowed: fs[hash] = self.scrapedata(hash) else: if hash in self.downloads: fs[hash] = self.scrapedata(hash) else: if self.config['scrape_allowed'] != 'full': return ( 400, 'Not Authorized', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, bencode({ 'failure reason': 'full scrape function is not available with this tracker.' })) if self.allowed is not None: keys = list(self.allowed.keys()) else: keys = list(self.downloads.keys()) for hash in keys: fs[hash] = self.scrapedata(hash) return (200, 'OK', { 'Content-Type': 'text/plain' }, bencode({'files': fs}))
def make_meta_file(file, url, params = {}, flag = Event(), progress = lambda x: None, progress_percent = 1): if 'piece_size_pow2' in params: piece_len_exp = params['piece_size_pow2'] else: piece_len_exp = default_piece_len_exp if 'target' in params and params['target'] != '': f = params['target'] else: a, b = split(file) if b == '': f = a + '.torrent' else: f = join(a, b + '.torrent') if piece_len_exp == 0: # automatic size = calcsize(file) if size > 8*1024*1024*1024: # > 8 gig = piece_len_exp = 21 # 2 meg pieces elif size > 2*1024*1024*1024: # > 2 gig = piece_len_exp = 20 # 1 meg pieces elif size > 512*1024*1024: # > 512M = piece_len_exp = 19 # 512K pieces elif size > 64*1024*1024: # > 64M = piece_len_exp = 18 # 256K pieces elif size > 16*1024*1024: # > 16M = piece_len_exp = 17 # 128K pieces elif size > 4*1024*1024: # > 4M = piece_len_exp = 16 # 64K pieces else: # < 4M = piece_len_exp = 15 # 32K pieces piece_length = 2 ** piece_len_exp encoding = None if 'filesystem_encoding' in params: encoding = params['filesystem_encoding'] if not encoding: encoding = ENCODING if not encoding: encoding = 'ascii' info = makeinfo(file, piece_length, encoding, flag, progress, progress_percent) if flag.isSet(): return check_info(info) h = open(f, 'wb') data = {'info': info, 'announce': strip(url), 'creation date': int(time())} if 'comment' in params and params['comment']: data['comment'] = params['comment'] if 'real_announce_list' in params: # shortcut for progs calling in from outside data['announce-list'] = params['real_announce_list'] elif 'announce_list' in params and params['announce_list']: l = [] for tier in params['announce_list'].split('|'): l.append(tier.split(',')) data['announce-list'] = l if 'real_httpseeds' in params: # shortcut for progs calling in from outside data['httpseeds'] = params['real_httpseeds'] elif 'httpseeds' in params and params['httpseeds']: data['httpseeds'] = params['httpseeds'].split('|') h.write(bencode(data)) h.close()
if params.has_key('real_announce_list' ): # shortcut for progs calling in from outside data['announce-list'] = params['real_announce_list'] elif params.has_key('announce_list') and params['announce_list']: l = [] for tier in params['announce_list'].split('|'): l.append(tier.split(',')) data['announce-list'] = l if params.has_key( 'real_httpseeds'): # shortcut for progs calling in from outside data['httpseeds'] = params['real_httpseeds'] elif params.has_key('httpseeds') and params['httpseeds']: data['httpseeds'] = params['httpseeds'].split('|') h.write(bencode(data)) h.close() def calcsize(file): if not isdir(file): return getsize(file) total = 0L for s in subfiles(abspath(file)): total += getsize(s[1]) return total def uniconvertl(l, e): r = [] try:
data["comment"] = params["comment"] if params.has_key("real_announce_list"): # shortcut for progs calling in from outside data["announce-list"] = params["real_announce_list"] elif params.has_key("announce_list") and params["announce_list"]: l = [] for tier in params["announce_list"].split("|"): l.append(tier.split(",")) data["announce-list"] = l if params.has_key("real_httpseeds"): # shortcut for progs calling in from outside data["httpseeds"] = params["real_httpseeds"] elif params.has_key("httpseeds") and params["httpseeds"]: data["httpseeds"] = params["httpseeds"].split("|") h.write(bencode(data)) h.close() def calcsize(file): if not isdir(file): return getsize(file) total = 0L for s in subfiles(abspath(file)): total += getsize(s[1]) return total def uniconvertl(l, e): r = [] try:
def get(self, connection, path, headers): real_ip = connection.get_ip() ip = real_ip if is_ipv4(ip): ipv4 = True else: try: ip = ipv6_to_ipv4(ip) ipv4 = True except ValueError: ipv4 = False if ((self.allowed_IPs and not self.allowed_IPs.includes(ip)) or (self.banned_IPs and self.banned_IPs.includes(ip))): return (400, 'Not Authorized', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, bencode({ 'failure reason': 'your IP is not allowed on this tracker' })) nip = get_forwarded_ip(headers) if nip and not self.only_local_override_ip: ip = nip try: ip = to_ipv4(ip) ipv4 = True except ValueError: ipv4 = False paramslist = {} def params(key, default=None, l=paramslist): if key in l: return l[key][0] return default try: (scheme, netloc, path, pars, query, fragment) = urlparse(path) if self.uq_broken == 1: path = path.replace('+', ' ') query = query.replace('+', ' ') path = unquote(path)[1:] for s in query.split('&'): if s: i = s.index('=') kw = unquote(s[:i]) paramslist.setdefault(kw, []) paramslist[kw] += [unquote(s[i + 1:])] if path == '' or path == 'index.html': return self.get_infopage() if (path == 'file'): return self.get_file(params('info_hash')) if path == 'favicon.ico' and self.favicon is not None: return (200, 'OK', { 'Content-Type': 'image/x-icon' }, self.favicon) # automated access from here on if path in ('scrape', 'scrape.php', 'tracker.php/scrape'): return self.get_scrape(paramslist) if not path in ('announce', 'announce.php', 'tracker.php/announce'): return (404, 'Not Found', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, alas) # main tracker function filtered = self.Filter.check(real_ip, paramslist, headers) if filtered: return (400, 'Not Authorized', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, bencode({'failure reason': filtered})) infohash = params('info_hash') if not infohash: raise ValueError('no info hash') notallowed = self.check_allowed(infohash, paramslist) if notallowed: return notallowed event = params('event') rsize = self.add_data(infohash, event, ip, paramslist) except ValueError as e: return (400, 'Bad Request', { 'Content-Type': 'text/plain' }, 'you sent me garbage - ' + str(e)) if self.aggregate_forward and 'tracker' not in paramslist: self.aggregate_senddata(query) if self.is_aggregator: # don't return peer data here return (200, 'OK', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, bencode({'response': 'OK'})) if params('compact') and ipv4: return_type = 2 elif params('no_peer_id'): return_type = 1 else: return_type = 0 data = self.peerlist(infohash, event == 'stopped', params('tracker'), not params('left'), return_type, rsize) if 'scrape' in paramslist: # deprecated data['scrape'] = self.scrapedata(infohash, False) if self.dedicated_seed_id: if params('seed_id') == self.dedicated_seed_id and params( 'left') == 0: self.is_seeded[infohash] = True if params('check_seeded') and self.is_seeded.get(infohash): data['seeded'] = 1 return (200, 'OK', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, bencode(data))
def save_state(self): self.rawserver.add_task(self.save_state, self.save_dfile_interval) h = open(self.dfile, 'wb') h.write(bencode(self.state)) h.close()