def create_info_data(self,files,encoding=None, piece_size=None,validate=True,private=False): """ creates dict which is the info part of a meta data file from a list of files / directories. if the list contains a directory the directory is recursively searched. values passed (other than file list) take priorty over defaults passed in @ instantiation """ # get list of files to index file_paths = [] for path in files: file_paths += find_files(path) # make sure there are any files to be had if not file_paths: raise Exception('No Files Found!') # get the file sizes file_sizes = determine_file_sizes(file_paths) # determine our total total_size = sum(file_sizes.itervalues()) # lets figure out what our piece size will be if not piece_size: # did they pass us a value ? if self.piece_size: piece_size = self.piece_size # did they set a default ? else: piece_size = determine_piece_size(total_size) # lets get our hash piece_hashes = self.hash_pieces(file_paths,file_sizes,piece_size) # figure out what the "name" of our torrent is torrent_name = determine_torrent_name(files) # create our info dict info_data = self.create_info_dict(file_paths, piece_hashes, file_sizes, piece_size, total_size, private, torrent_name) # success ? try: validate_info_data(info_data) except Exception, ex: raise
def create_info_dict(self,file_paths,pieces=None,file_sizes=None, piece_size=None,total_size=None, private=False,create_md5=False,file_name=None, rel_file_base=None): """ creates a dict of the 'info' part of the meta data """ # fill out our data if not file_sizes: file_sizes = determine_file_sizes(file_paths) if not total_size: total_size = sum(file_sizes.itervalues()) if not piece_size: piece_size = determine_piece_size(total_size) # create our meta data dict info_data = { 'piece length': piece_size, 'pieces': ''.join(pieces), 'private': 1 if private else 0, } # don't have to have a file name if file_name: info_data['name'] = file_name # we need to determine common prefix for all the files # it will be our rel base, any paths for the info will # be relative to it rel_file_base = os.path.commonprefix(file_paths) log.debug('rel file base: %s',rel_file_base) # length only appropriate if there is a single file if len(file_paths) == 1: info_data['length'] = total_size # if they want us to create the optional md5 # for the files than lets do so if create_md5: info_data['md5sum'] = md5sum(file_paths[0]) if not info_data.get('name'): # we'll go ahead and put a name info_data['name'] = get_file_name(file_paths[0], rel_file_base) # if it's multiple files we give it each one individually else: info_data['files'] = self.create_files_info(file_paths, file_sizes, create_md5, rel_file_base) if not info_data.get('name'): # guess a name name = get_common_name(file_paths) if name: info_data['name'] = name # make sure our meta info is valid try: validate_info_data(info_data) except Exception, ex: raise