Пример #1
0
    def test_pushd_popd(self):
        """
        Just a simple wrapper to makedirs, but tries a few times before
        completely aborting.

        """

        # Temporary directory to work with
        work_dir = join(self.tmp_dir, 'Utils_Test.pushd', 'newdir')

        # Ensure it doesn't already exist
        assert isdir(work_dir) is False

        # Store our current directory
        cur_dir = getcwd()

        try:
            with pushd(work_dir):
                # We should throw an exeption here and never make it to the
                # assert call below; but just incase ...
                assert False

        except OSError, e:
            # Directory doesn't exist
            assert e[0] is errno.ENOENT
            assert getcwd() == cur_dir
Пример #2
0
    def test_pushd_popd(self):
        """
        Just a simple wrapper to makedirs, but tries a few times before
        completely aborting.

        """

        # Temporary directory to work with
        work_dir = join(self.tmp_dir, 'Utils_Test.pushd', 'newdir')

        # Ensure it doesn't already exist
        assert isdir(work_dir) is False

        # Store our current directory
        cur_dir = getcwd()

        try:
            with pushd(work_dir):
                # We should throw an exeption here and never make it to the
                # assert call below; but just incase ...
                assert False

        except OSError, e:
            # Directory doesn't exist
            assert e[0] is errno.ENOENT
            assert getcwd() == cur_dir
Пример #3
0
def database_status(ctx):
    """
    displays details on the current database store
    """
    db_path = join(ctx['NNTPSettings'].work_dir, 'cache', 'search')
    logger.debug('Scanning %s for databases...' % db_path)
    with pushd(db_path, create_if_missing=True):
        results = find(
            db_path,
            suffix_filter=SQLITE_DATABASE_EXTENSION,
            fsinfo=True,
            max_depth=1,
        )

    # Use our Database first if it exists
    session = ctx['NNTPSettings'].session()
    if not session:
        logger.error('Could not acquire a database connection.')
        exit(1)

    # PEP8 E712 does not allow us to make a comparison to a boolean value
    # using the == instead of the keyword 'in'.  However SQLAlchemy
    # requires us to do just because that's how the amazing tool works.
    # so to get around the pep8 error, we'll just define a variable equal
    # to True and then we can compare to it
    pep8_e712 = True

    try:
        # Get a list of watched groups
        groups = dict(
            session.query(Group.name,
                          Group.id).filter(Group.watch == pep8_e712).all())

    except OperationalError:
        # Get a list of watched groups
        logger.warning('The database does not appear to be initialized.')
        logger.info('Try running: "nr db init" first.')
        exit(0)

    if not len(results):
        logger.info('There are no groups configured to be watched.')
        exit(0)

    for _, meta in results.iteritems():
        # Open up the database
        flags = ''
        if meta['filename'] in groups:
            flags += 'W'

        print('%-65s %-10s %s' % (
            meta['filename'],
            bytes_to_strsize(meta['size']),
            flags,
        ))
Пример #4
0
def database_status(ctx):
    """
    displays details on the current database store
    """
    db_path = join(ctx['NNTPSettings'].work_dir, 'cache', 'search')
    logger.debug('Scanning %s for databases...' % db_path)
    with pushd(db_path, create_if_missing=True):
        results = find(
            db_path,
            suffix_filter=SQLITE_DATABASE_EXTENSION,
            fsinfo=True,
            max_depth=1,
        )

    # Use our Database first if it exists
    session = ctx['NNTPSettings'].session()
    if not session:
        logger.error('Could not acquire a database connection.')
        exit(1)

    # PEP8 E712 does not allow us to make a comparison to a boolean value
    # using the == instead of the keyword 'in'.  However SQLAlchemy
    # requires us to do just because that's how the amazing tool works.
    # so to get around the pep8 error, we'll just define a variable equal
    # to True and then we can compare to it
    pep8_e712 = True

    try:
        # Get a list of watched groups
        groups = dict(session.query(Group.name, Group.id)
                      .filter(Group.watch == pep8_e712).all())

    except OperationalError:
        # Get a list of watched groups
        logger.warning('The database does not appear to be initialized.')
        logger.info('Try running: "nr db init" first.')
        exit(0)

    if not len(results):
        logger.info('There are no groups configured to be watched.')
        exit(0)

    for _, meta in results.iteritems():
        # Open up the database
        flags = ''
        if meta['filename'] in groups:
            flags += 'W'

        print('%-65s %-10s %s' % (
            meta['filename'],
            bytes_to_strsize(meta['size']),
            flags,
        ))
Пример #5
0
def database_reset(ctx):
    """
    Reset's the database based on the current configuration
    """
    logger.info('Resetting database ...')
    ctx['NNTPSettings'].open(reset=True)
    __db_prep(ctx)

    db_path = join(ctx['NNTPSettings'].base_dir, 'cache', 'search')
    logger.debug('Scanning %s for databases...' % db_path)
    with pushd(db_path, create_if_missing=True):
        for entry in listdir(db_path):
            db_file = join(db_path, entry)
            if not isfile(db_file):
                continue

            try:
                unlink(db_file)
                logger.info('Removed %s ...' % entry)
            except:
                logger.warning('Failed to remove %s ...' % entry)
Пример #6
0
def database_reset(ctx):
    """
    Reset's the database based on the current configuration
    """
    logger.info('Resetting database ...')
    ctx['NNTPSettings'].open(reset=True)
    __db_prep(ctx)

    db_path = join(ctx['NNTPSettings'].base_dir, 'cache', 'search')
    logger.debug('Scanning %s for databases...' % db_path)
    with pushd(db_path, create_if_missing=True):
        for entry in listdir(db_path):
            db_file = join(db_path, entry)
            if not isfile(db_file):
                continue

            try:
                unlink(db_file)
                logger.info('Removed %s ...' % entry)
            except:
                logger.warning('Failed to remove %s ...' % entry)
Пример #7
0
class Utils_Test(TestBase):
    """
    Testing the Utils class
    """
    def test_strsize_n_bytes(self):
        """
        A formatting tool to make bytes more readable for an end user
        """
        # Garbage Entry
        assert strsize_to_bytes(None) is None
        assert strsize_to_bytes("0J") is None
        assert strsize_to_bytes("") is None
        assert strsize_to_bytes("totalgarbage") is None

        # Allow integers
        assert strsize_to_bytes(0) == 0
        assert strsize_to_bytes(1024) == 1024

        # Good Entries
        assert strsize_to_bytes("0B") == 0
        assert strsize_to_bytes("0") == 0
        assert strsize_to_bytes("10") == 10
        assert strsize_to_bytes("1K") == 1024
        assert strsize_to_bytes("1M") == 1024 * 1024
        assert strsize_to_bytes("1G") == 1024 * 1024 * 1024
        assert strsize_to_bytes("1T") == 1024 * 1024 * 1024 * 1024

        # Spaces between units and value are fine too
        assert strsize_to_bytes(" 0         B ") == 0
        assert strsize_to_bytes("  1       K  ") == 1024
        assert strsize_to_bytes("   1     M   ") == 1024 * 1024
        assert strsize_to_bytes("    1   G    ") == 1024 * 1024 * 1024
        assert strsize_to_bytes("     1 T     ") == 1024 * 1024 * 1024 * 1024

        # Support Byte character
        assert strsize_to_bytes("1KB") == 1024
        assert strsize_to_bytes("1MB") == 1024 * 1024
        assert strsize_to_bytes("1GB") == 1024 * 1024 * 1024
        assert strsize_to_bytes("1TB") == 1024 * 1024 * 1024 * 1024

        # Support bit character
        assert strsize_to_bytes("1Kb") == 1000
        assert strsize_to_bytes("1Mb") == 1000 * 1000
        assert strsize_to_bytes("1Gb") == 1000 * 1000 * 1000
        assert strsize_to_bytes("1Tb") == 1000 * 1000 * 1000 * 1000

        # Garbage Entry
        assert bytes_to_strsize(None) is None
        assert bytes_to_strsize('') is None
        assert bytes_to_strsize('GARBAGE') is None

        # Good Entries
        assert bytes_to_strsize(0) == "0.00B"
        assert bytes_to_strsize(1) == "1.00B"
        assert bytes_to_strsize(1024) == "1.00KB"
        assert bytes_to_strsize(1024 * 1024) == "1.00MB"
        assert bytes_to_strsize(1024 * 1024 * 1024) == "1.00GB"
        assert bytes_to_strsize(1024 * 1024 * 1024 * 1024) == "1.00TB"

        # Support strings too
        assert bytes_to_strsize("0") == "0.00B"
        assert bytes_to_strsize("1024") == "1.00KB"

    def test_stat(self):
        """
        Stat makes it easier to disect the file extension, filesystem info
        and mime information.
        """

        general_keys = ('extension', 'basename', 'filename', 'dirname')
        filesys_keys = ('created', 'modified', 'accessed', 'size')
        mime_keys = ('mime', )

        # Test a file that doesn't exist
        tmp_file = join(self.tmp_dir, 'Utils_Test.stat', 'missing_file')
        stats = stat(tmp_file)
        assert stats is None
        stats = stat(tmp_file, fsinfo=False)
        assert stats is None
        stats = stat(tmp_file, fsinfo=False, mime=False)
        assert stats is None

        # Create Temporary file 1MB in size
        tmp_file = join(self.tmp_dir, 'Utils_Test.stat', '1MB.rar')
        assert self.touch(tmp_file, size='1MB')

        stats = stat(tmp_file)

        # This check basically makes sure all of the expected keys
        # are in place and that there aren't more or less
        k_iter = chain(mime_keys, filesys_keys, general_keys)
        k_len = len(mime_keys) + len(filesys_keys) + len(general_keys)
        assert isinstance(stats, dict) is True
        assert len([k for k in k_iter if k not in stats.keys()]) == 0
        assert k_len == len(stats)

        # Filesize should actually match what we set it as
        assert bytes_to_strsize(stats['size']) == "1.00MB"

        # different OS's and variations of python can yield different
        # results.  We're trying to just make sure that we find the
        # rar keyword in the mime type
        assert (stats['mime'] == 'application/x-rar-compressed')

        # Create Temporary file 1MB in size
        tmp_file = join(self.tmp_dir, 'Utils_Test.stat', '2MB.zip')
        assert self.touch(tmp_file, size='2MB')

        stats = stat(tmp_file)

        # This check basically makes sure all of the expected keys
        # are in place and that there aren't more or less
        k_iter = chain(mime_keys, filesys_keys, general_keys)
        k_len = len(mime_keys) + len(filesys_keys) + len(general_keys)
        assert isinstance(stats, dict) is True
        assert len([k for k in k_iter if k not in stats.keys()]) == 0
        assert k_len == len(stats)

        # Filesize should actually match what we set it as
        assert bytes_to_strsize(stats['size']) == "2.00MB"

        assert re.search(
            'application/.*zip.*',
            stats['mime'],
            re.IGNORECASE,
        ) is not None

        # Test different variations
        stats = stat(tmp_file, mime=False)

        # This check basically makes sure all of the expected keys
        # are in place and that there aren't more or less
        k_iter = chain(filesys_keys, general_keys)
        k_len = len(filesys_keys) + len(general_keys)
        assert isinstance(stats, dict) is True
        assert len([k for k in k_iter if k not in stats.keys()]) == 0
        assert k_len == len(stats)

        # Test different variations
        stats = stat(tmp_file, fsinfo=False, mime=True)

        # This check basically makes sure all of the expected keys
        # are in place and that there aren't more or less
        k_iter = chain(mime_keys, general_keys)
        k_len = len(mime_keys) + len(general_keys)
        assert isinstance(stats, dict) is True
        assert len([k for k in k_iter if k not in stats.keys()]) == 0
        assert k_len == len(stats)

        # Test different variations
        stats = stat(tmp_file, fsinfo=False, mime=False)

        # This check basically makes sure all of the expected keys
        # are in place and that there aren't more or less
        k_iter = chain(general_keys)
        k_len = len(general_keys)
        assert isinstance(stats, dict) is True
        assert len([k for k in k_iter if k not in stats.keys()]) == 0
        assert k_len == len(stats)

    def test_mkdir(self):
        """
        Just a simple wrapper to makedirs, but tries a few times before
        completely aborting.

        """

        work_dir = join(self.tmp_dir, 'Utils_Test.mkdir', 'dirA')
        # The directory should not exist
        assert isdir(work_dir) is False

        # mkdir() should be successful
        assert mkdir(work_dir) is True

        # The directory should exist now
        assert isdir(work_dir) is True

        # mkdir() gracefully handles 2 calls to the same location
        assert mkdir(work_dir) is True

        # Create Temporary file 1KB in size
        tmp_file = join(self.tmp_dir, 'Utils_Test.mkdir', '2KB')
        assert self.touch(tmp_file, size='2KB')

        # Now the isdir() will still return False because there is a file
        # there now, not a directory
        assert isdir(tmp_file) is False
        # mkdir() will fail to create a directory in place of file
        assert mkdir(tmp_file) is False

        # And reference a new directory (not created yet) within
        new_work_dir = join(work_dir, 'subdir')

        # Confirm our directory doesn't exist
        assert isdir(new_work_dir) is False

        # Now we'll protect our original directory
        chmod(work_dir, 0000)

        # mkdir() will fail because of permissions, but incase it doesn't work
        # as planned, just store the result in a variable.  We'll flip our
        # permission back first
        result = mkdir(new_work_dir)

        # reset the permission
        chmod(work_dir, 0700)

        # Our result should yield a failed result
        assert result is False

        # Confirm that the directory was never created:
        assert isdir(new_work_dir) is False

    def test_pushd_popd(self):
        """
        Just a simple wrapper to makedirs, but tries a few times before
        completely aborting.

        """

        # Temporary directory to work with
        work_dir = join(self.tmp_dir, 'Utils_Test.pushd', 'newdir')

        # Ensure it doesn't already exist
        assert isdir(work_dir) is False

        # Store our current directory
        cur_dir = getcwd()

        try:
            with pushd(work_dir):
                # We should throw an exeption here and never make it to the
                # assert call below; but just incase ...
                assert False

        except OSError, e:
            # Directory doesn't exist
            assert e[0] is errno.ENOENT
            assert getcwd() == cur_dir

        # Now we'll make the directory
        with pushd(work_dir, create_if_missing=True):
            # We're in a new directory
            assert getcwd() == work_dir

        # We're back to where we were
        assert getcwd() == cur_dir

        try:
            with pushd(work_dir, create_if_missing=True):
                # We're in a new directory
                assert getcwd() == work_dir
                # Throw an exception
                raise Exception

        except Exception:
            # We're back to where we were
            assert getcwd() == cur_dir
Пример #8
0
    def decode(self, content=None, name=None, password=None, *args, **kwargs):
        """
        content must be pointing to a directory containing rar files that can
        be easily sorted on. Alternatively, path can be of type NNTPContent()
        or a set/list of.

        If no password is specified, then the password configuration loaded
        into the class is used instead.

        An NNTPBinaryContent() object containing the contents of the package
        within a sortedset() object.  All decoded() functions have to return
        a resultset() to be consistent with one another.

        """
        if content is not None:
            self.add(content)

        # Some simple error checking to save from doing to much here
        if len(self) == 0:
            return None

        if not self.can_exe(self._unrar):
            return None

        if not password:
            password = self.password

        # Initialize our command
        execute = [
            # Our Executable RAR Application
            self._unrar,
            # Use Add Flag
            'x',
            # Assume Yes
            '-y',
        ]

        # Password Protection
        if password is not None:
            execute.append('-p%s' % password)
        else:
            # Do not prompt for password
            execute.append('-p-')

        if self.keep_broken:
            # Keep Broken Flag
            execute.append('-kb')

        if self.overwrite:
            # Overwrite files
            execute.append('-o+')

        else:
            # Don't overwrite files
            execute.append('-o-')

        if self.freshen:
            # Freshen files
            execute.append('-f')

        # Stop Switch Parsing
        execute.append('--')

        if not name:
            name = self.name
            if not name:
                name = random_str()

        for _path in self:
            # Temporary Path
            tmp_path, _ = self.mkstemp(content=name)

            with pushd(tmp_path):
                # Create our SubProcess Instance
                sp = SubProcess(list(execute) + [_path])

                # Start our execution now
                sp.start()

                found_set = None
                while not sp.is_complete(timeout=1.5):

                    found_set = self.watch_dir(
                        tmp_path,
                        ignore=found_set,
                    )

                # Handle remaining content
                found_set = self.watch_dir(
                    tmp_path,
                    ignore=found_set,
                    seconds=-1,
                )

                # Let the caller know our status
                if not sp.successful():
                    # Cleanup Temporary Path
                    rm(tmp_path)
                    return None

                if not len(found_set):
                    logger.warning(
                        'RAR archive (%s) contained no content.' %
                        basename(_path),
                    )

        # Clean our are list of objects to archive
        self.clear()

        # Return path containing unrar'ed content
        results = NNTPBinaryContent(tmp_path)

        # We intentionally attach it's content
        results.attach()

        # Create a sortedset to return
        _resultset = sortedset(key=lambda x: x.key())
        _resultset.add(results)

        # Return our content
        return _resultset
Пример #9
0
    def decode(self, content=None, name=None, password=None, *args, **kwargs):
        """
        content must be pointing to a directory containing 7-Zip files that can
        be easily sorted on. Alternatively, path can be of type NNTPContent()
        or a set/list of.

        If no password is specified, then the password configuration loaded
        into the class is used instead.

        An NNTPBinaryContent() object containing the contents of the package
        within a sortedset() object.  All decoded() functions have to return
        a resultset() to be consistent with one another.

        """
        if content is not None:
            self.add(content)

        # Some simple error checking to save from doing to much here
        if len(self) == 0:
            return None

        if not self.can_exe(self._bin):
            return None

        if not password:
            password = self.password

        # Initialize our command
        execute = [
            # Our Executable 7-Zip Application
            self._bin,
            # Use Add Flag
            'x',
            # Assume Yes
            '-y',
        ]

        # Password Protection
        if password is not None:
            execute.append('-p%s' % password)
        else:
            # Do not prompt for password
            execute.append('-p-')

        if self.overwrite:
            # Overwrite files
            execute.append('-aoa')

        else:
            # Don't overwrite files
            execute.append('-aos')

        # Stop Switch Parsing
        execute.append('--')

        if not name:
            name = self.name
            if not name:
                name = random_str()

        for _path in self:
            # Temporary Path
            tmp_path, _ = self.mkstemp(content=name)

            with pushd(tmp_path):
                # Create our SubProcess Instance
                sp = SubProcess(list(execute) + [_path])

                # Start our execution now
                sp.start()

                found_set = None
                while not sp.is_complete(timeout=1.5):

                    found_set = self.watch_dir(
                        tmp_path,
                        ignore=found_set,
                    )

                # Handle remaining content
                found_set = self.watch_dir(
                    tmp_path,
                    ignore=found_set,
                    seconds=-1,
                )

                # Let the caller know our status
                if not sp.successful():
                    # Cleanup Temporary Path
                    rm(tmp_path)
                    return None

                if not len(found_set):
                    logger.warning(
                        '7Z archive (%s) contained no content.' %
                        basename(_path), )

        # Clean our are list of objects to archive
        self.clear()

        # Return path containing unrar'ed content
        results = NNTPBinaryContent(tmp_path)

        # We intentionally attach it's content
        results.attach()

        # Create a sortedset to return
        _resultset = sortedset(key=lambda x: x.key())
        _resultset.add(results)

        # Return our content
        return _resultset
Пример #10
0
    def test(self, content=None):
        """
        content must be pointing to a directory containing par files that can
        be easily sorted on. Alternatively, path can be of type NNTPContent()
        or a set/list of.

        This function just tests an archive to see if it can be properly
        prepared (it is effectively a wrapper to verify)

        If anything but True is returned then there was a problem verifying
        the results and a code identified in ParReturnCode() is returned
        instead.
        """
        if content is not None:
            paths = self.get_paths(content)

        elif len(self.archive):
            # Get first item in archive
            paths = iter(self.archive)

        else:
            raise AttributeError("CodecPar: No par file detected.")

        if not self.can_exe(self._par):
            return None

        # filter our results by indexes
        indexes = self.__filter_pars(paths, indexes=True, volumes=False)

        if not len(indexes):
            logger.warning('Archive contained no PAR files.')
            return ParReturnCode.NoParFiles

        # Initialize our command
        execute = [
            # Our Executable PAR Application
            self._par,
            # Use Test Flag
            'verify',
        ]

        if self.cpu_cores is not None and self.cpu_cores > 1:
            # to checksum concurrently - uses multiple threads
            execute.append('-t+')

        # Stop Switch Parsing
        execute.append('--')

        for _path in indexes:

            # Get the directory the par file resides in
            par_path = dirname(_path)

            with pushd(par_path):
                # Create our SubProcess Instance
                sp = SubProcess(list(execute) + [basename(_path)])

                # Start our execution now
                sp.start()
                sp.join()

                # Let the caller know our status
                if sp.response_code() is not ParReturnCode.NoRepairRequired:
                    return sp.response_code()

        return True
Пример #11
0
    def decode(self, content=None, *args, **kwargs):
        """
        content must be pointing to a directory containing par files that can
        be easily retrieved. Alternatively, path can be of type NNTPContent()
        or a set/list of.

        An sortedset of NNTPBinaryContent() objects are returned containing
        any new content that was generated as a result of the par2 call

        If an error occurs then None is returned.

        """
        if content is not None:
            self.add(content)

        # Some simple error checking to save from doing to much here
        if len(self) == 0:
            return None

        if not self.can_exe(self._par):
            return None

        # filter our results by indexes
        indexes = self.__filter_pars(self.archive, indexes=True, volumes=False)

        # Initialize our command
        execute = [
            # Our Executable PAR Application
            self._par,
            # Use Repair
            'repair',
        ]

        if self.cpu_cores is not None and self.cpu_cores > 1:
            # to repair concurrently - uses multiple threads
            execute.append('-t+')

        # Stop Switch Parsing
        execute.append('--')

        results = sortedset(key=lambda x: x.key())
        for _path in indexes:

            # Get the directory the par file resides in
            par_path = dirname(_path)

            with pushd(par_path):
                # create a before snapshot
                before_snapshot = self.watch_dir(
                    par_path,
                    seconds=-1,
                )

                # Create our SubProcess Instance
                sp = SubProcess(list(execute) + [basename(_path)])

                # Start our execution now
                sp.start()

                # Track files after
                after_snapshot = sortedset()
                while not sp.is_complete(timeout=1.5):

                    after_snapshot = self.watch_dir(
                        par_path,
                        ignore=after_snapshot,
                    )

                # Handle remaining content
                after_snapshot = self.watch_dir(
                    par_path,
                    ignore=after_snapshot,
                    seconds=-1,
                )

                # Add any new files detected to our result set otherwise we
                # just return an empty set
                total_parts = after_snapshot - before_snapshot
                for no, path in enumerate(total_parts):
                    content = NNTPBinaryContent(
                        path,
                        part=no+1,
                        total_parts=len(total_parts),
                    )
                    # Loaded data is by default detached; we want to attach it
                    content.attach()

                    # Add our attached content to our results
                    results.add(content)

                # Let the caller know our status
                if not sp.successful():
                    return None

        # Clean our are list of objects to archive
        self.clear()

        return results
Пример #12
0
    def encode(self, content=None, *args, **kwargs):
        """
        Takes a specified path (and or file) and creates par2 files based on
        it. If this function is successful, it returns a set of
        NNTPBinaryContent() objects identifying the PAR2 files generated
        based on the passed in content.

        The function returns None if it fails in any way.

        """

        if content is not None:
            self.add(content)

        # Some simple error checking to save from doing to much here
        if len(self) == 0:
            return None

        if not self.can_exe(self._par):
            return None

        for target in self.archive:
            # Base entry on first file in the list
            name = basename(target)
            target_dir = dirname(target)

            # tmp_path, tmp_file = self.mkstemp(content=name, suffix='.par2')

            # Initialize our command
            execute = [
                # Our Executable PAR Application
                self._par,
                # Use Create Flag
                'create',
            ]

            # Handle PAR Block Size
            if self.block_size:
                execute.append('-s%s' % self.block_size)

            if self.recovery_percent:
                execute.append('-r%d' % self.recovery_percent)

            if self.cpu_cores is not None and self.cpu_cores > 1:
                # to repair concurrently - uses multiple threads
                execute.append('-t+')

            # Stop Switch Parsing
            execute.append('--')

            # Now add our target (we can only do one at a time which i why we
            # loop) and run our setups
            execute.append(target)

            found_set = sortedset()
            with pushd(target_dir):
                # Create our SubProcess Instance
                sp = SubProcess(execute)

                # Start our execution now
                sp.start()

                while not sp.is_complete(timeout=1.5):

                    found_set = self.watch_dir(
                        target_dir,
                        prefix=name,
                        regex=PAR_PART_RE,
                        ignore=found_set,
                    )

            # Handle remaining content
            found_set = self.watch_dir(
                target_dir,
                prefix=name,
                regex=PAR_PART_RE,
                ignore=found_set,
                seconds=-1,
            )

            # Let the caller know our status
            if not sp.successful():
                # We're done; we failed
                return None

            if not len(found_set):
                # We're done; we failed
                return None

            # Create a resultset
            results = sortedset(key=lambda x: x.key())

            part = 0
            # iterate through our found_set and create NNTPBinaryContent()
            # objects from them.
            for path in found_set:
                # Iterate over our found files and determine their part
                # information
                part += 1
                content = NNTPBinaryContent(
                    path,
                    part=part,
                    total_parts=len(found_set),
                )

                # Loaded data is by default detached; we want to attach it
                content.attach()

                # Add our attached content to our results
                results.add(content)

        # Clean our are list of objects to archive
        self.clear()

        # Return our
        return results