示例#1
0
    def create_info_data(self,files,encoding=None,
                         piece_size=None,validate=True,private=False):
        """ creates dict which is the info part of a meta data file
             from a list of files / directories.
            if the list contains a directory the directory is recursively
            searched. values passed (other than file list) take priorty over
            defaults passed in @ instantiation """

        # get list of files to index
        file_paths = []
        for path in files:
            file_paths += find_files(path)

        # make sure there are any files to be had
        if not file_paths:
            raise Exception('No Files Found!')

        # get the file sizes
        file_sizes = determine_file_sizes(file_paths)

        # determine our total
        total_size = sum(file_sizes.itervalues())

        # lets figure out what our piece size will be
        if not piece_size: # did they pass us a value ?
            if self.piece_size:
                piece_size = self.piece_size # did they set a default ?
            else:
                piece_size = determine_piece_size(total_size)

        # lets get our hash
        piece_hashes = self.hash_pieces(file_paths,file_sizes,piece_size)

        # figure out what the "name" of our torrent is
        torrent_name = determine_torrent_name(files)

        # create our info dict
        info_data = self.create_info_dict(file_paths,
                                          piece_hashes,
                                          file_sizes,
                                          piece_size,
                                          total_size,
                                          private,
                                          torrent_name)

        # success ?
        try:
            validate_info_data(info_data)
        except Exception, ex:
            raise
示例#2
0
    def create_files_info(self,file_paths,file_sizes=None,
                               create_md5=False,rel_file_base=None):
        """ create dict of file info for the info section of meta data.
            file_paths can also be a dict who's key is the file path
            and the value is the file size """

        if not file_sizes:
            file_sizes = determine_file_sizes(file_paths)

        files_info = []
        # go through our files adding thier info dict
        for path in file_paths:
            name = get_file_name(path,rel_file_base)
            file_info = {
                'length': file_sizes.get(path),
                'path': [x for x in name.split(os.sep) if x.strip()]
            }
            if create_md5:
                file_info['md5sum'] = md5sum(path)
            files_info.append(file_info)

        return files_info
示例#3
0
    def digest(self,piece_size=None):
        # we are going strait up and down with this

        # fill in our datas
        file_paths = self.files.keys()
        file_sizes = determine_file_sizes(file_paths)
        total_size = sum(file_sizes.itervalues())
        piece_size = piece_size or determine_piece_size(total_size)

        # now we go through the files data concatenated end to end
        # hashing pieces along the way
        data_pos = 0L
        pieces = []
        piece_pos = 0L
        sh = sha()
        for path in file_paths:
            # pull this file's size
            file_size = file_sizes.get(path)

            # keep track of our pos w/in the file
            file_pos = 0L

            # open the file
            with file(path,'rb') as fh:
                # loop through the files data
                while file_pos < file_size:
                    # b/c we might hit the end of the file or
                    # the end of a piece
                    # we don't just want to read a while piece
                    read_len = min(file_size-file_pos,piece_size-piece_pos)
                    data = fh.read(read_len)
                    sh.update(data)

                    # update our positions
                    file_pos += read_len
                    data_pos += read_len
                    piece_pos += read_len

                    # if we hit the end of a piece hash our data collected
                    if piece_pos == piece_size:
                        pieces.append(sh.digest())
                        piece_pos = 0L


        # if we finished w/ some data left over add it
        if piece_pos > 0:
            pieces.append(sh.digest())

        return ''.join(pieces)















        # we are going to fill out the info for the files
        # we don't have info for
        for path, info in self.files.iteritems():
            # first check and see if we have the size
            # we need to be sure to include 0 size files
            if info.get('size') is None:
                info['size'] = os.path.getsize(path)
示例#4
0
    def create_info_dict(self,file_paths,pieces=None,file_sizes=None,
                         piece_size=None,total_size=None,
                         private=False,create_md5=False,file_name=None,
                         rel_file_base=None):
        """ creates a dict of the 'info' part of the meta data """
        # fill out our data
        if not file_sizes:
            file_sizes = determine_file_sizes(file_paths)
        if not total_size:
            total_size = sum(file_sizes.itervalues())
        if not piece_size:
            piece_size = determine_piece_size(total_size)

        # create our meta data dict
        info_data = {
            'piece length': piece_size,
            'pieces': ''.join(pieces),
            'private': 1 if private else 0,
        }

        # don't have to have a file name
        if file_name:
            info_data['name'] = file_name

        # we need to determine common prefix for all the files
        # it will be our rel base, any paths for the info will
        # be relative to it
        rel_file_base = os.path.commonprefix(file_paths)

        log.debug('rel file base: %s',rel_file_base)

        # length only appropriate if there is a single file
        if len(file_paths) == 1:
            info_data['length'] = total_size

            # if they want us to create the optional md5
            # for the files than lets do so
            if create_md5:
                info_data['md5sum'] = md5sum(file_paths[0])

            if not info_data.get('name'):
                # we'll go ahead and put a name
                info_data['name'] = get_file_name(file_paths[0],
                                                  rel_file_base)

        # if it's multiple files we give it each one individually
        else:
            info_data['files'] = self.create_files_info(file_paths,
                                                        file_sizes,
                                                        create_md5,
                                                        rel_file_base)

            if not info_data.get('name'):
                # guess a name
                name = get_common_name(file_paths)
                if name:
                    info_data['name'] = name

        # make sure our meta info is valid
        try:
            validate_info_data(info_data)
        except Exception, ex:
            raise