def _check_signature(self, torrentfile, signature): """Check the torrent file's signature using the public key.""" public_key_file = open(os.path.join(doc_root, 'public.key'), 'rb') public_key = pickle.load(public_key_file) public_key_file.close() h = sha(torrentfile).digest() return public_key.verify(h, signature)
def remove_torrent(self, infohash): del self.torrents[infohash] del self.obfuscated_torrents[sha('req2' + infohash).digest()] if infohash in self.ld_services: service = self.ld_services.pop(infohash) if self.local_discovery: self.local_discovery.unannounce(service)
def _compute_allowed_fast_list(infohash, ip, num_fast, num_pieces): # if ipv4 then (for now assume IPv4) iplist = [int(x) for x in ip.split(".")] # classful heuristic. iplist = [chr(iplist[0]), chr(iplist[1]), chr(iplist[2]), chr(0)] h = "".join(iplist) h = "".join([h, infohash]) fastlist = [] assert num_pieces < 2**32 if num_pieces <= num_fast: return range(num_pieces) # <---- this would be bizarre while True: h = sha(h).digest() # rehash hash to generate new random string. for i in xrange(5): j = i * 4 #y = [ord(x) for x in h[j:j+4]] #z = (y[0] << 24) + (y[1]<<16) + (y[2]<<8) + y[3] z = struct.unpack("!L", h[j:j + 4])[0] index = int(z % num_pieces) if index not in fastlist: fastlist.append(index) if len(fastlist) >= num_fast: return fastlist
def _compute_allowed_fast_list(self,infohash,ip, num_fast, num_pieces): # if ipv4 then (for now assume IPv4) iplist = [int(x) for x in ip.split(".")] # classful heuristic. if iplist[0] | 0x7F==0xFF or iplist[0] & 0xC0==0x80: # class A or B iplist = [chr(iplist[0]),chr(iplist[1]),chr(0),chr(0)] else: iplist = [chr(iplist[0]),chr(iplist[1]),chr(iplist[2]),chr(0)] h = "".join(iplist) h = "".join([h,infohash]) fastlist = [] assert num_pieces < 2**32 if num_pieces <= num_fast: return range(num_pieces) # <---- this would be bizarre while True: h = sha(h).digest() # rehash hash to generate new random string. #log( "infohash=%s" % h.encode('hex' ) ) for i in xrange(5): j = i*4 y = [ord(x) for x in h[j:j+4]] z = (y[0] << 24) + (y[1]<<16) + (y[2]<<8) + y[3] index = int(z % num_pieces) #log("z=%s=%d, index=%d" % ( hex(z), z, index )) if index not in fastlist: fastlist.append(index) if len(fastlist) >= num_fast: return fastlist
def _compute_allowed_fast_list(self, infohash, ip, num_fast, num_pieces): # if ipv4 then (for now assume IPv4) iplist = [int(x) for x in ip.split(".")] # classful heuristic. if iplist[0] | 0x7F == 0xFF or iplist[0] & 0xC0 == 0x80: # class A or B iplist = [chr(iplist[0]), chr(iplist[1]), chr(0), chr(0)] else: iplist = [chr(iplist[0]), chr(iplist[1]), chr(iplist[2]), chr(0)] h = "".join(iplist) h = "".join([h, infohash]) fastlist = [] assert num_pieces < 2**32 if num_pieces <= num_fast: return range(num_pieces) # <---- this would be bizarre while True: h = sha(h).digest() # rehash hash to generate new random string. #log("infohash=%s" % h.encode('hex')) for i in xrange(5): j = i * 4 y = [ord(x) for x in h[j:j + 4]] z = (y[0] << 24) + (y[1] << 16) + (y[2] << 8) + y[3] index = int(z % num_pieces) #log("z=%s=%d, index=%d" % ( hex(z), z, index )) if index not in fastlist: fastlist.append(index) if len(fastlist) >= num_fast: return fastlist
def _compute_allowed_fast_list(infohash, ip, num_fast, num_pieces): # if ipv4 then (for now assume IPv4) iplist = [int(x) for x in ip.split(".")] # classful heuristic. iplist = [chr(iplist[0]),chr(iplist[1]),chr(iplist[2]),chr(0)] h = "".join(iplist) h = "".join([h,infohash]) fastlist = [] assert num_pieces < 2**32 if num_pieces <= num_fast: return range(num_pieces) # <---- this would be bizarre while True: h = sha(h).digest() # rehash hash to generate new random string. for i in xrange(5): j = i*4 #y = [ord(x) for x in h[j:j+4]] #z = (y[0] << 24) + (y[1]<<16) + (y[2]<<8) + y[3] z = struct.unpack("!L", h[j:j+4])[0] index = int(z % num_pieces) if index not in fastlist: fastlist.append(index) if len(fastlist) >= num_fast: return fastlist
def make_meta_file_dht(path, nodes, piece_len_exp, flag=Event(), progress=dummy, title=None, comment=None, safe=None, content_type=None, target=None, data_dir=None): # if nodes is empty, then get them out of the routing table in data_dir # else, expect nodes to be a string of comma seperated <ip>:<port> pairs # this has a lot of duplicated code from make_meta_file piece_length = 2**piece_len_exp a, b = os.path.split(path) if not target: if b == '': f = a + '.torrent' else: f = os.path.join(a, b + '.torrent') else: f = target info = makeinfo(path, piece_length, flag, progress, content_type) if flag.isSet(): return check_info(info) info_hash = sha(bencode(info)).digest() if not nodes: x = open(os.path.join(data_dir, 'routing_table'), 'rb') d = bdecode(x.read()) x.close() t = KTable(Node().initWithDict({ 'id': d['id'], 'host': '127.0.0.1', 'port': 0 })) for n in d['rt']: t.insertNode(Node().initWithDict(n)) nodes = [(node.host, node.port) for node in t.findNodes(info_hash) if node.host != '127.0.0.1'] else: nodes = [ (a[0], int(a[1])) for a in [node.strip().split(":") for node in nodes.split(",")] ] data = {'nodes': nodes, 'creation date': int(gmtime())} h = file(f, 'wb') data['info'] = info if title: data['title'] = title if comment: data['comment'] = comment if safe: data['safe'] = safe h.write(bencode(data)) h.close()
def add_torrent(self, infohash, connection_manager): if infohash in self.torrents: raise BTFailure(_("Can't start two separate instances of the same " "torrent")) self.torrents[infohash] = connection_manager key = sha("req2" + infohash).digest() self.obfuscated_torrents[key] = connection_manager if self.local_discovery: service = self.local_discovery.announce(infohash.encode("hex"), connection_manager.my_id.encode("hex")) self.ld_services[infohash] = service
def add_torrent(self, infohash, connection_manager): if infohash in self.torrents: raise BTFailure(_("Can't start two separate instances of the same " "torrent")) self.torrents[infohash] = connection_manager key = sha('req2' + infohash).digest() self.obfuscated_torrents[key] = connection_manager if self.local_discovery: service = self.local_discovery.announce(infohash.encode('hex'), connection_manager.my_id.encode('hex')) self.ld_services[infohash] = service
def make_meta_file_dht( path, nodes, piece_len_exp, flag=Event(), progress=dummy, title=None, comment=None, safe=None, content_type=None, target=None, data_dir=None, ): # if nodes is empty, then get them out of the routing table in data_dir # else, expect nodes to be a string of comma seperated <ip>:<port> pairs # this has a lot of duplicated code from make_meta_file piece_length = 2 ** piece_len_exp a, b = os.path.split(path) if not target: if b == "": f = a + ".torrent" else: f = os.path.join(a, b + ".torrent") else: f = target info = makeinfo(path, piece_length, flag, progress, content_type) if flag.isSet(): return check_info(info) info_hash = sha(bencode(info)).digest() if not nodes: x = open(os.path.join(data_dir, "routing_table"), "rb") d = bdecode(x.read()) x.close() t = KTable(Node().initWithDict({"id": d["id"], "host": "127.0.0.1", "port": 0})) for n in d["rt"]: t.insertNode(Node().initWithDict(n)) nodes = [(node.host, node.port) for node in t.findNodes(info_hash) if node.host != "127.0.0.1"] else: nodes = [(a[0], int(a[1])) for a in [node.strip().split(":") for node in nodes.split(",")]] data = {"nodes": nodes, "creation date": int(time())} h = file(f, "wb") data["info"] = info if title: data["title"] = title if comment: data["comment"] = comment if safe: data["safe"] = safe h.write(bencode(data)) h.close()
def make_meta_file_dht(path, nodes, piece_len_exp, flag=Event(), progress=dummy, title=None, comment=None, safe=None, content_type=None, target=None, data_dir=None): # if nodes is empty, then get them out of the routing table in data_dir # else, expect nodes to be a string of comma seperated <ip>:<port> pairs # this has a lot of duplicated code from make_meta_file piece_length = 2 ** piece_len_exp a, b = os.path.split(path) if not target: if b == '': f = a + '.torrent' else: f = os.path.join(a, b + '.torrent') else: f = target info = makeinfo(path, piece_length, flag, progress, content_type) if flag.isSet(): return check_info(info) info_hash = sha(bencode(info)).digest() if not nodes: x = open(os.path.join(data_dir, 'routing_table'), 'rb') d = bdecode(x.read()) x.close() t = KTable(Node().initWithDict({'id':d['id'], 'host':'127.0.0.1','port': 0})) for n in d['rt']: t.insertNode(Node().initWithDict(n)) nodes = [(node.host, node.port) for node in t.findNodes(info_hash) if node.host != '127.0.0.1'] else: nodes = [(a[0], int(a[1])) for a in [node.strip().split(":") for node in nodes.split(",")]] data = {'nodes': nodes, 'creation date': int(gmtime())} h = file(f, 'wb') data['info'] = info if title: data['title'] = title if comment: data['comment'] = comment if safe: data['safe'] = safe h.write(bencode(data)) h.close()
def announce(url, id, left=10, event=''): global interval_sum global min_interval global max_interval global num_announces auth = '' if use_auth: auth = 'auth=%s&' % sha(info_hash + tid + sekret).hexdigest() if event != '': event = '&event=%s' % event separator = '?' if '?' in url: separator = '&' req = '%s%c%sinfo_hash=%s&tid=%s&peer_id=DNA%0.4d%s&left=%d&port=%d%s' % ( url, separator, auth, info_hash, tid, id, '0' * 13, left, id, event) # print req r = bencode.bdecode(urlopen(req).read()) if not 'peers' in r: return [] peers = r['peers'] peers6 = '' try: peers6 = r['peers6'] except: pass interval = r['interval'] interval_sum += interval if interval < min_interval: min_interval = interval if interval > max_interval: max_interval = interval num_announces += 1 ret = [] while len(peers) >= 6: ret.append(peers[5:6]) peers = peers[6:] while len(peers6) >= 18: ret.append(peers6[17:18]) peers6 = peers6[18:] ret = sorted(ret) # print ret return ret
def announce(url, id, left = 10, event = ''): global interval_sum global min_interval global max_interval global num_announces auth = '' if use_auth: auth = 'auth=%s&' % sha(info_hash + tid + sekret).hexdigest() if event != '': event = '&event=%s' % event separator = '?' if '?' in url: separator = '&' req = '%s%c%sinfo_hash=%s&tid=%s&peer_id=DNA%0.4d%s&left=%d&port=%d%s' % (url, separator, auth, info_hash, tid, id, '0' * 13, left, id, event) # print req r = bencode.bdecode(urlopen(req).read()) if not 'peers' in r: return [] peers = r['peers'] peers6 = '' try: peers6 = r['peers6'] except: pass interval = r['interval'] interval_sum += interval if interval < min_interval: min_interval = interval if interval > max_interval: max_interval = interval num_announces += 1 ret = [] while len(peers) >= 6: ret.append(peers[5:6]) peers = peers[6:] while len(peers6) >= 18: ret.append(peers6[17:18]) peers6 = peers6[18:] ret = sorted(ret) # print ret return ret
raise BTFailure( _( 'File/directory name "%s" contains reserved ' "unicode values that do not correspond to " "characters." ) % name ) return u.encode("utf-8") path = os.path.abspath(path) if os.path.isdir(path): subs = subfiles(path) subs.sort() pieces = [] sh = sha() done = 0 fs = [] totalsize = 0.0 totalhashed = 0 for p, f in subs: totalsize += os.path.getsize(f) for p, f in subs: pos = 0 size = os.path.getsize(f) p2 = [to_utf8(n) for n in p] if content_type: fs.append({"length": size, "path": p2, "content_type": content_type}) # HEREDAVE. bad for batch! else: fs.append({"length": size, "path": p2})
def __init__(self, metainfo): """metainfo is a dict. When read from a metainfo (i.e., .torrent file), the file must first be bdecoded before being passed to ConvertedMetainfo.""" self.bad_torrent_wrongfield = False self.bad_torrent_unsolvable = False self.bad_torrent_noncharacter = False self.bad_conversion = False self.bad_windows = False self.bad_path = False self.reported_errors = False # All of the following values should be considered READONLY. # Modifications to the metainfo that should be written should # occur to the underlying metainfo dict directly. self.is_batch = False self.orig_files = None self.files_fs = None self.total_bytes = 0 self.sizes = [] self.comment = None self.title = None # descriptive title text for whole torrent self.creation_date = None self.metainfo = metainfo self.encoding = None self.caches = None btformats.check_message(metainfo, check_paths=False) info = metainfo['info'] self.is_private = info.has_key("private") and info['private'] if 'encoding' in metainfo: self.encoding = metainfo['encoding'] elif 'codepage' in metainfo: self.encoding = 'cp%s' % metainfo['codepage'] if self.encoding is not None: try: for s in u'this is a test', u'these should also work in any encoding: 0123456789\0': assert s.encode(self.encoding).decode(self.encoding) == s except: self.encoding = 'iso-8859-1' self.bad_torrent_unsolvable = True if info.has_key('length'): self.total_bytes = info['length'] self.sizes.append(self.total_bytes) if info.has_key('content_type'): self.content_type = info['content_type'] else: self.content_type = None # hasattr or None. Which is better? else: self.is_batch = True r = [] self.orig_files = [] self.sizes = [] self.content_types = [] i = 0 # info['files'] is a list of dicts containing keys: # 'length', 'path', and 'content_type'. The 'content_type' # key is optional. for f in info['files']: l = f['length'] self.total_bytes += l self.sizes.append(l) self.content_types.append(f.get('content_type')) path = self._get_attr(f, 'path') if len(path[-1]) == 0: if l > 0: raise BTFailure(_("Bad file path component: ")+x) # BitComet makes .torrent files with directories # listed along with the files, which we don't support # yet, in part because some idiot interpreted this as # a bug in BitComet rather than a feature. path.pop(-1) for x in path: if not btformats.allowed_path_re.match(x): raise BTFailure(_("Bad file path component: ")+x) self.orig_files.append('/'.join(path)) k = [] for u in path: tf2 = self._to_fs_2(u) k.append((tf2, u)) r.append((k,i)) i += 1 # If two or more file/subdirectory names in the same directory # would map to the same name after encoding conversions + Windows # workarounds, change them. Files are changed as # 'a.b.c'->'a.b.0.c', 'a.b.1.c' etc, directories or files without # '.' as 'a'->'a.0', 'a.1' etc. If one of the multiple original # names was a "clean" conversion, that one is always unchanged # and the rest are adjusted. r.sort() self.files_fs = [None] * len(r) prev = [None] res = [] stack = [{}] for x in r: j = 0 x, i = x while x[j] == prev[j]: j += 1 del res[j:] del stack[j+1:] name = x[j][0][1] if name in stack[-1]: for name in generate_names(x[j][1], j != len(x) - 1): name = self._to_fs(name) if name not in stack[-1]: break stack[-1][name] = None res.append(name) for j in xrange(j + 1, len(x)): name = x[j][0][1] stack.append({name: None}) res.append(name) self.files_fs[i] = os.path.join(*res) prev = x self.name = self._get_attr(info, 'name') self.name_fs = self._to_fs(self.name) self.piece_length = info['piece length'] self.announce = metainfo.get('announce') self.announce_list = metainfo.get('announce-list') if 'announce-list' not in metainfo and 'announce' not in metainfo: self.is_trackerless = True else: self.is_trackerless = False self.nodes = metainfo.get('nodes', [('router.bittorrent.com', 6881)]) self.title = metainfo.get('title') self.comment = metainfo.get('comment') self.creation_date = metainfo.get('creation date') self.locale = metainfo.get('locale') self.safe = metainfo.get('safe') self.url_list = metainfo.get('url-list', []) if not isinstance(self.url_list, list): self.url_list = [self.url_list, ] self.caches = metainfo.get('caches') self.hashes = [info['pieces'][x:x+20] for x in xrange(0, len(info['pieces']), 20)] self.infohash = InfoHashType(sha(bencode(info)).digest())
def got_key_reward(self, index, begin, length, key): """ Process a received key reward. Compares hash of keyreward with received hash from tracker. If hash from tracker not present, a tracker request is made to retrieve it. If the key is OK any pending requests that were blocked are now unblocked. A key reward reponse is send. @param index: index of the piece (piece number), a piece is a block of which the has is in the meta file @param begin: offset of the subpiece is the piece, this is currently always zero, this enables to account a single piece hashcheck failing to single peer. @param length: length of the piece @return: nothing """ log("in got key reward for piece %d %d %d and key [%s] len key=%d" % (index, begin, length, key.encode('hex'), len(key))) if len(key) < 127: log("received empty/too small key got_key_reward indicating bad payment key" ) proceed = self.update_upload_key_status(index, begin, length, False) if not proceed: log("too many retries.. in upload (update key status)") return self.got_request(index, begin, length) return if (not self.connector.id in self.multidownload.payment_key_hash_cache.keys()) or\ self.multidownload.payment_key_hash_cache[self.connector.id] == {}: self.elogger.warn( "downlaod. got_key_reward no key_cache, so retrieve first") if self.connector.id not in self.multidownload.payment_key_hash_cache.keys( ): self.elogger.warn( "dl: initializing payment_key_hash_cache to {}") self.multidownload.payment_key_hash_cache[ self.connector.id] = {} self.elogger.warn( "calling rerequester.get_key_hash_list_for_peer()") self.multidownload.rerequester.get_key_hash_list_for_peer( self.connector.id, self.got_key_hash_list) if self.connector.id not in self.unchecked_key_rewards: self.elogger.warn("initializing unchecked key rewards") self.unchecked_key_rewards[self.connector.id] = {} self.unchecked_key_rewards[self.connector.id][(index, begin, length)] = key self.elogger.warn("added key to unchecked rewarsd") return self.elogger.warn("cached keys of peer already available") self.elogger.warn("encrypted key reward hex: %s" % key.encode('hex')) key = self.multidownload.pk_tools.decrypt_piece_tls( self.multidownload.private_key, key) self.elogger.warn("decrypted key reward: " + key) key_cache = self.multidownload.payment_key_hash_cache[ self.connector.id] self.elogger.warn( "comparing recvd %s, to stored %s" % (key_cache[(index, begin, length)], sha(key).hexdigest())) result = (key_cache[(index, begin, length)] == sha(key).hexdigest()) print("result is %d" % result) self.send_key_reward_response(index, begin, length, result) print("after send key reward response") if result: print("key matches stored key") if self.connector.id in self.multidownload.waiting_for_reward: print("waiting for reward true in upload") self.elogger.warn("waiting for reward true in upload") waiting_for_piece_rewards = self.multidownload.waiting_for_reward[ self.connector.id] #ez: iterate to send all responses if (index, begin, length) in waiting_for_piece_rewards: self.elogger.warn("removing from waiting for keys: %d" % index) waiting_for_piece_rewards.remove((index, begin, length)) if self.blocked_piece_requests: (bidx, bbegin, blen) = self.blocked_piece_requests.pop() print( "sending blocked request to got_request: %d %d %d" % (bidx, bbegin, blen)) self.got_request(bidx, bbegin, blen) else: self.elogger.warn( "received key but wasnt waiting for it (error in code)" ) self.elogger.warn("not waiting for reward from this peer") else: self.elogger.warn("recieved bad key")
def __init__(self, metainfo): """metainfo is a dict. When read from a metainfo (i.e., .torrent file), the file must first be bdecoded before being passed to ConvertedMetainfo.""" self.bad_torrent_wrongfield = False self.bad_torrent_unsolvable = False self.bad_torrent_noncharacter = False self.bad_conversion = False self.bad_windows = False self.bad_path = False self.reported_errors = False # All of the following values should be considered READONLY. # Modifications to the metainfo that should be written should # occur to the underlying metainfo dict directly. self.is_batch = False self.orig_files = None self.files_fs = None self.total_bytes = 0 self.sizes = [] self.comment = None self.title = None # descriptive title text for whole torrent self.creation_date = None self.metainfo = metainfo self.encoding = None self.caches = None #EZ micropayments are used self.micropayments = False btformats.check_message(metainfo, check_paths=False) info = metainfo['info'] self.is_private = info.has_key("private") and info['private'] if 'encoding' in metainfo: self.encoding = metainfo['encoding'] elif 'codepage' in metainfo: self.encoding = 'cp%s' % metainfo['codepage'] if self.encoding is not None: try: for s in u'this is a test', u'these should also work in any encoding: 0123456789\0': assert s.encode(self.encoding).decode(self.encoding) == s except: self.encoding = 'iso-8859-1' self.bad_torrent_unsolvable = True if info.has_key('length'): self.total_bytes = info['length'] self.sizes.append(self.total_bytes) if info.has_key('content_type'): self.content_type = info['content_type'] else: self.content_type = None # hasattr or None. Which is better? else: self.is_batch = True r = [] self.orig_files = [] self.sizes = [] self.content_types = [] i = 0 # info['files'] is a list of dicts containing keys: # 'length', 'path', and 'content_type'. The 'content_type' # key is optional. for f in info['files']: l = f['length'] self.total_bytes += l self.sizes.append(l) self.content_types.append(f.get('content_type')) path = self._get_attr(f, 'path') if len(path[-1]) == 0: if l > 0: raise BTFailure(_("Bad file path component: ")+x) # BitComet makes .torrent files with directories # listed along with the files, which we don't support # yet, in part because some idiot interpreted this as # a bug in BitComet rather than a feature. path.pop(-1) for x in path: if not btformats.allowed_path_re.match(x): raise BTFailure(_("Bad file path component: ")+x) self.orig_files.append('/'.join(path)) k = [] for u in path: tf2 = self._to_fs_2(u) k.append((tf2, u)) r.append((k,i)) i += 1 # If two or more file/subdirectory names in the same directory # would map to the same name after encoding conversions + Windows # workarounds, change them. Files are changed as # 'a.b.c'->'a.b.0.c', 'a.b.1.c' etc, directories or files without # '.' as 'a'->'a.0', 'a.1' etc. If one of the multiple original # names was a "clean" conversion, that one is always unchanged # and the rest are adjusted. r.sort() self.files_fs = [None] * len(r) prev = [None] res = [] stack = [{}] for x in r: j = 0 x, i = x while x[j] == prev[j]: j += 1 del res[j:] del stack[j+1:] name = x[j][0][1] if name in stack[-1]: for name in generate_names(x[j][1], j != len(x) - 1): name = self._to_fs(name) if name not in stack[-1]: break stack[-1][name] = None res.append(name) for j in xrange(j + 1, len(x)): name = x[j][0][1] stack.append({name: None}) res.append(name) self.files_fs[i] = os.path.join(*res) prev = x self.name = self._get_attr(info, 'name') self.name_fs = self._to_fs(self.name) self.piece_length = info['piece length'] self.announce = metainfo.get('announce') self.announce_list = metainfo.get('announce-list') if 'announce-list' not in metainfo and 'announce' not in metainfo: self.is_trackerless = True else: self.is_trackerless = False #EZ if 'micropayments' in metainfo and metainfo['micropayments'] == True: print "found micropayments ==true in metafile" self.micropayments = True self.nodes = metainfo.get('nodes', [('router.bittorrent.com', 6881)]) self.title = metainfo.get('title') self.comment = metainfo.get('comment') self.creation_date = metainfo.get('creation date') self.locale = metainfo.get('locale') self.safe = metainfo.get('safe') self.url_list = metainfo.get('url-list', []) if not isinstance(self.url_list, list): self.url_list = [self.url_list, ] self.caches = metainfo.get('caches') self.hashes = [info['pieces'][x:x+20] for x in xrange(0, len(info['pieces']), 20)] self.infohash = InfoHashType(sha(bencode(info)).digest())
'encoding "%s" is wrong or the filename contains ' 'illegal bytes.') % (name, s, get_filesystem_encoding())) if u.translate(noncharacter_translate) != u: raise BTFailure( _('File/directory name "%s" contains reserved ' 'unicode values that do not correspond to ' 'characters.') % name) return u.encode('utf-8') path = os.path.abspath(path) if os.path.isdir(path): subs = subfiles(path) subs.sort() pieces = [] sh = sha() done = 0 fs = [] totalsize = 0.0 totalhashed = 0 for p, f in subs: totalsize += os.path.getsize(f) for p, f in subs: pos = 0 size = os.path.getsize(f) p2 = [to_utf8(n) for n in p] if content_type: fs.append({ 'length': size, 'path': p2,