def on_button_folder_clicked(self, widget): log.debug('on_button_folder_clicked') # Setup the filechooserdialog chooser = Gtk.FileChooserDialog( _('Choose a folder'), self.dialog, Gtk.FileChooserAction.SELECT_FOLDER, buttons=( _('_Cancel'), Gtk.ResponseType.CANCEL, _('_Open'), Gtk.ResponseType.OK, ), ) chooser.set_transient_for(self.dialog) chooser.set_select_multiple(False) chooser.set_property('skip-taskbar-hint', True) # Run the dialog response = chooser.run() if response == Gtk.ResponseType.OK: result = chooser.get_filename() else: chooser.destroy() return path = decode_bytes(result) self.files_treestore.clear() self.files_treestore.append( None, [result, 'document-open-symbolic', get_path_size(path)]) self.adjust_piece_size() chooser.destroy()
def on_button_folder_clicked(self, widget): log.debug('on_button_folder_clicked') # Setup the filechooserdialog chooser = gtk.FileChooserDialog( _('Choose a folder'), self.dialog, gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER, buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN, gtk.RESPONSE_OK)) chooser.set_transient_for(self.dialog) chooser.set_select_multiple(False) chooser.set_property('skip-taskbar-hint', True) # Run the dialog response = chooser.run() if response == gtk.RESPONSE_OK: result = chooser.get_filename() else: chooser.destroy() return path = result.decode('utf-8') self.files_treestore.clear() self.files_treestore.append( None, [result, gtk.STOCK_OPEN, get_path_size(path)]) self.adjust_piece_size() chooser.destroy()
def save(self, torrent_path, progress=None): """ Creates and saves the torrent file to `path`. :param torrent_path: where to save the torrent file :type torrent_path: string :param progress: a function to be called when a piece is hashed :type progress: function(num_completed, num_pieces) :raises InvalidPath: if the data_path has not been set """ if not self.data_path: raise InvalidPath("Need to set a data_path!") torrent = { "info": {} } if self.comment: torrent["comment"] = self.comment.encode("UTF-8") if self.private: torrent["info"]["private"] = True if self.trackers: torrent["announce"] = self.trackers[0][0] torrent["announce-list"] = self.trackers else: torrent["announce"] = "" if self.webseeds: httpseeds = [] webseeds = [] for w in self.webseeds: if w.endswith(".php"): httpseeds.append(w) else: webseeds.append(w) if httpseeds: torrent["httpseeds"] = httpseeds if webseeds: torrent["url-list"] = webseeds datasize = get_path_size(self.data_path) if self.piece_size: piece_size = piece_size * 1024 else: # We need to calculate a piece size piece_size = 16384 while (datasize / piece_size) > 1024 and piece_size < (8192 * 1024): piece_size *= 2 # Calculate the number of pieces we will require for the data num_pieces = datasize / piece_size if datasize % piece_size: num_pieces += 1 torrent["info"]["piece length"] = piece_size # Create the info if os.path.isdir(self.data_path): torrent["info"]["name"] = os.path.split(self.data_path)[1] files = [] padding_count = 0 # Collect a list of file paths and add padding files if necessary for (dirpath, dirnames, filenames) in os.walk(self.data_path): for index, filename in enumerate(filenames): size = get_path_size(os.path.join(self.data_path, dirpath, filename)) p = dirpath[len(self.data_path):] p = p.lstrip("/") p = p.split("/") if p[0]: p += [filename] else: p = [filename] files.append((size, p)) # Add a padding file if necessary if self.pad_files and (index + 1) < len(filenames): left = size % piece_size if left: p = list(p) p[-1] = "_____padding_file_" + str(padding_count) files.append((piece_size - left, p)) padding_count += 1 # Run the progress function with 0 completed pieces if progress: progress(0, num_pieces) fs = [] pieces = [] # Create the piece hashes buf = "" for size, path in files: path = [s.decode(sys.getfilesystemencoding()).encode("UTF-8") for s in path] fs.append({"length": size, "path": path}) if path[-1].startswith("_____padding_file_"): buf += "\0" * size pieces.append(sha(buf).digest()) buf = "" fs[-1]["attr"] = "p" else: fd = open(os.path.join(self.data_path, *path), "rb") r = fd.read(piece_size - len(buf)) while r: buf += r if len(buf) == piece_size: pieces.append(sha(buf).digest()) # Run the progress function if necessary if progress: progress(len(pieces), num_pieces) buf = "" else: break r = fd.read(piece_size - len(buf)) fd.close() if buf: pieces.append(sha(buf).digest()) if progress: progress(len(pieces), num_pieces) buf = "" torrent["info"]["pieces"] = "".join(pieces) torrent["info"]["files"] = fs elif os.path.isfile(self.data_path): torrent["info"]["name"] = os.path.split(self.data_path)[1] torrent["info"]["length"] = get_path_size(self.data_path) pieces = [] fd = open(self.data_path, "rb") r = fd.read(piece_size) while r: pieces.append(sha(r).digest()) if progress: progress(len(pieces), num_pieces) r = fd.read(piece_size) torrent["info"]["pieces"] = "".join(pieces) # Write out the torrent file open(torrent_path, "wb").write(bencode(torrent))
def save(self, torrent_path, progress=None): """Creates and saves the torrent file to `path`. Args: torrent_path (str): Location to save the torrent file. progress(func, optional): The function to be called when a piece is hashed. The provided function should be in the format `func(num_completed, num_pieces)`. Raises: InvalidPath: If the data_path has not been set. """ if not self.data_path: raise InvalidPath('Need to set a data_path!') torrent = {'info': {}} if self.comment: torrent['comment'] = self.comment if self.private: torrent['info']['private'] = True if self.trackers: torrent['announce'] = self.trackers[0][0] torrent['announce-list'] = self.trackers else: torrent['announce'] = '' if self.webseeds: httpseeds = [] webseeds = [] for w in self.webseeds: if w.endswith('.php'): httpseeds.append(w) else: webseeds.append(w) if httpseeds: torrent['httpseeds'] = httpseeds if webseeds: torrent['url-list'] = webseeds datasize = get_path_size(self.data_path) if self.piece_size: piece_size = self.piece_size * 1024 else: # We need to calculate a piece size piece_size = 16384 while (datasize // piece_size) > 1024 and piece_size < (8192 * 1024): piece_size *= 2 # Calculate the number of pieces we will require for the data num_pieces = datasize // piece_size if datasize % piece_size: num_pieces += 1 torrent['info']['piece length'] = piece_size torrent['info']['name'] = os.path.split(self.data_path)[1] # Create the info if os.path.isdir(self.data_path): files = [] padding_count = 0 # Collect a list of file paths and add padding files if necessary for (dirpath, dirnames, filenames) in os.walk(self.data_path): for index, filename in enumerate(filenames): size = get_path_size( os.path.join(self.data_path, dirpath, filename)) p = dirpath[len(self.data_path):] p = p.lstrip('/') p = p.split('/') if p[0]: p += [filename] else: p = [filename] files.append((size, p)) # Add a padding file if necessary if self.pad_files and (index + 1) < len(filenames): left = size % piece_size if left: p = list(p) p[-1] = '_____padding_file_' + str(padding_count) files.append((piece_size - left, p)) padding_count += 1 # Run the progress function with 0 completed pieces if progress: progress(0, num_pieces) fs = [] pieces = [] # Create the piece hashes buf = b'' for size, path in files: path = [s.encode('UTF-8') for s in path] fs.append({b'length': size, b'path': path}) if path[-1].startswith(b'_____padding_file_'): buf += b'\0' * size pieces.append(sha(buf).digest()) buf = b'' fs[-1][b'attr'] = b'p' else: with open( os.path.join(self.data_path.encode('utf8'), *path), 'rb') as _file: r = _file.read(piece_size - len(buf)) while r: buf += r if len(buf) == piece_size: pieces.append(sha(buf).digest()) # Run the progress function if necessary if progress: progress(len(pieces), num_pieces) buf = b'' else: break r = _file.read(piece_size - len(buf)) torrent['info']['files'] = fs if buf: pieces.append(sha(buf).digest()) if progress: progress(len(pieces), num_pieces) buf = '' elif os.path.isfile(self.data_path): torrent['info']['length'] = get_path_size(self.data_path) pieces = [] with open(self.data_path, 'rb') as _file: r = _file.read(piece_size) while r: pieces.append(sha(r).digest()) if progress: progress(len(pieces), num_pieces) r = _file.read(piece_size) torrent['info']['pieces'] = b''.join(pieces) # Write out the torrent file with open(torrent_path, 'wb') as _file: _file.write(bencode(utf8_encode_structure(torrent)))
def test_get_path_size(self): if windows_check() and sys.version_info < (3, 8): # https://bugs.python.org/issue1311 raise unittest.SkipTest('os.devnull returns False on Windows') self.assertTrue(get_path_size(os.devnull) == 0) self.assertTrue(get_path_size('non-existant.file') == -1)
def save(self, torrent_path, progress=None): """Creates and saves the torrent file to `path`. Args: torrent_path (str): Location to save the torrent file. progress(func, optional): The function to be called when a piece is hashed. The provided function should be in the format `func(num_completed, num_pieces)`. Raises: InvalidPath: If the data_path has not been set. """ if not self.data_path: raise InvalidPath('Need to set a data_path!') torrent = { 'info': {} } if self.comment: torrent['comment'] = self.comment if self.private: torrent['info']['private'] = True if self.trackers: torrent['announce'] = self.trackers[0][0] torrent['announce-list'] = self.trackers else: torrent['announce'] = '' if self.webseeds: httpseeds = [] webseeds = [] for w in self.webseeds: if w.endswith('.php'): httpseeds.append(w) else: webseeds.append(w) if httpseeds: torrent['httpseeds'] = httpseeds if webseeds: torrent['url-list'] = webseeds datasize = get_path_size(self.data_path) if self.piece_size: piece_size = self.piece_size * 1024 else: # We need to calculate a piece size piece_size = 16384 while (datasize // piece_size) > 1024 and piece_size < (8192 * 1024): piece_size *= 2 # Calculate the number of pieces we will require for the data num_pieces = datasize // piece_size if datasize % piece_size: num_pieces += 1 torrent['info']['piece length'] = piece_size torrent['info']['name'] = os.path.split(self.data_path)[1] # Create the info if os.path.isdir(self.data_path): files = [] padding_count = 0 # Collect a list of file paths and add padding files if necessary for (dirpath, dirnames, filenames) in os.walk(self.data_path): for index, filename in enumerate(filenames): size = get_path_size(os.path.join(self.data_path, dirpath, filename)) p = dirpath[len(self.data_path):] p = p.lstrip('/') p = p.split('/') if p[0]: p += [filename] else: p = [filename] files.append((size, p)) # Add a padding file if necessary if self.pad_files and (index + 1) < len(filenames): left = size % piece_size if left: p = list(p) p[-1] = '_____padding_file_' + str(padding_count) files.append((piece_size - left, p)) padding_count += 1 # Run the progress function with 0 completed pieces if progress: progress(0, num_pieces) fs = [] pieces = [] # Create the piece hashes buf = '' for size, path in files: path = [s.decode(sys.getfilesystemencoding()).encode('UTF-8') for s in path] fs.append({'length': size, 'path': path}) if path[-1].startswith('_____padding_file_'): buf += '\0' * size pieces.append(sha(buf).digest()) buf = '' fs[-1]['attr'] = 'p' else: with open(os.path.join(self.data_path, *path), 'rb') as _file: r = _file.read(piece_size - len(buf)) while r: buf += r if len(buf) == piece_size: pieces.append(sha(buf).digest()) # Run the progress function if necessary if progress: progress(len(pieces), num_pieces) buf = '' else: break r = _file.read(piece_size - len(buf)) torrent['info']['files'] = fs if buf: pieces.append(sha(buf).digest()) if progress: progress(len(pieces), num_pieces) buf = '' elif os.path.isfile(self.data_path): torrent['info']['length'] = get_path_size(self.data_path) pieces = [] with open(self.data_path, 'rb') as _file: r = _file.read(piece_size) while r: pieces.append(sha(r).digest()) if progress: progress(len(pieces), num_pieces) r = _file.read(piece_size) torrent['info']['pieces'] = b''.join(pieces) # Write out the torrent file with open(torrent_path, 'wb') as _file: _file.write(bencode(utf8_encode_structure(torrent)))
def test_get_path_size(self): if windows_check(): raise unittest.SkipTest('os devnull is different on windows') self.assertTrue(get_path_size(os.devnull) == 0) self.assertTrue(get_path_size('non-existant.file') == -1)
def test_get_path_size(self): self.assertTrue(get_path_size(os.devnull) == 0) self.assertTrue(get_path_size('non-existant.file') == -1)
def save(self, torrent_path, progress=None): """ Creates and saves the torrent file to `path`. :param torrent_path: where to save the torrent file :type torrent_path: string :param progress: a function to be called when a piece is hashed :type progress: function(num_completed, num_pieces) :raises InvalidPath: if the data_path has not been set """ if not self.data_path: raise InvalidPath("Need to set a data_path!") torrent = {"info": {}} if self.comment: torrent["comment"] = self.comment.encode("UTF-8") if self.private: torrent["info"]["private"] = True if self.trackers: torrent["announce"] = self.trackers[0][0] torrent["announce-list"] = self.trackers else: torrent["announce"] = "" if self.webseeds: httpseeds = [] webseeds = [] for w in self.webseeds: if w.endswith(".php"): httpseeds.append(w) else: webseeds.append(w) if httpseeds: torrent["httpseeds"] = httpseeds if webseeds: torrent["url-list"] = webseeds datasize = get_path_size(self.data_path) if self.piece_size: piece_size = piece_size * 1024 else: # We need to calculate a piece size piece_size = 16384 while (datasize / piece_size) > 1024 and piece_size < (8192 * 1024): piece_size *= 2 # Calculate the number of pieces we will require for the data num_pieces = datasize / piece_size if datasize % piece_size: num_pieces += 1 torrent["info"]["piece length"] = piece_size # Create the info if os.path.isdir(self.data_path): torrent["info"]["name"] = os.path.split(self.data_path)[1] files = [] padding_count = 0 # Collect a list of file paths and add padding files if necessary for (dirpath, dirnames, filenames) in os.walk(self.data_path): for index, filename in enumerate(filenames): size = get_path_size(os.path.join(self.data_path, dirpath, filename)) p = dirpath[len(self.data_path) :] p = p.lstrip("/") p = p.split("/") if p[0]: p += [filename] else: p = [filename] files.append((size, p)) # Add a padding file if necessary if self.pad_files and (index + 1) < len(filenames): left = size % piece_size if left: p = list(p) p[-1] = "_____padding_file_" + str(padding_count) files.append((piece_size - left, p)) padding_count += 1 # Run the progress function with 0 completed pieces if progress: progress(0, num_pieces) fs = [] pieces = [] # Create the piece hashes buf = "" for size, path in files: path = [s.decode(sys.getfilesystemencoding()).encode("UTF-8") for s in path] fs.append({"length": size, "path": path}) if path[-1].startswith("_____padding_file_"): buf += "\0" * size pieces.append(sha(buf).digest()) buf = "" fs[-1]["attr"] = "p" else: fd = open(os.path.join(self.data_path, *path), "rb") r = fd.read(piece_size - len(buf)) while r: buf += r if len(buf) == piece_size: pieces.append(sha(buf).digest()) # Run the progress function if necessary if progress: progress(len(pieces), num_pieces) buf = "" else: break r = fd.read(piece_size - len(buf)) fd.close() if buf: pieces.append(sha(buf).digest()) if progress: progress(len(pieces), num_pieces) buf = "" torrent["info"]["pieces"] = "".join(pieces) torrent["info"]["files"] = fs elif os.path.isfile(self.data_path): torrent["info"]["name"] = os.path.split(self.data_path)[1] torrent["info"]["length"] = get_path_size(self.data_path) pieces = [] fd = open(self.data_path, "rb") r = fd.read(piece_size) while r: pieces.append(sha(r).digest()) if progress: progress(len(pieces), num_pieces) r = fd.read(piece_size) torrent["info"]["pieces"] = "".join(pieces) # Write out the torrent file open(torrent_path, "wb").write(bencode(torrent))