Пример #1
0
 def setUp(self):
     self.out = StringIO()
     self.tarfixer = TarFixer(None, self.out, EXPECTED_TIMESTAMP,
                              EXPECTED_REF)
     self.test_file = os.path.join(os.path.dirname(__file__), 'resources',
                                   'archive.tar')
     self.reference_file = os.path.join(os.path.dirname(__file__),
                                        'resources', 'archive-fixed.tar')
     self.reference_hash = self.hash_file(self.reference_file)
Пример #2
0
 def setUp(self):
     self.out = io.BytesIO()
     self.tarfixer = TarFixer(None, self.out, EXPECTED_TIMESTAMP,
                              EXPECTED_REF)
     self.utf8_containing_file = os.path.join(os.path.dirname(__file__),
                                              'resources',
                                              'les_misérables.tar')
     self.utf8_file = os.path.join(os.path.dirname(__file__), 'resources',
                                   'archivé.tar')
     self.test_file = os.path.join(os.path.dirname(__file__), 'resources',
                                   'archive.tar')
     self.reference_file = os.path.join(os.path.dirname(__file__),
                                        'resources', 'archive-fixed.tar')
     self.reference_hash = self.hash_file(self.reference_file)
Пример #3
0
 def setUp(self):
     self.out = io.BytesIO()
     self.tarfixer = TarFixer(None, self.out, EXPECTED_TIMESTAMP, EXPECTED_REF)
     self.utf8_containing_file = os.path.join(os.path.dirname(__file__), 'resources', 'les_misérables.tar')
     self.utf8_file = os.path.join(os.path.dirname(__file__), 'resources', 'archivé.tar')
     self.test_file = os.path.join(os.path.dirname(__file__), 'resources', 'archive.tar')
     self.reference_file = os.path.join(os.path.dirname(__file__), 'resources', 'archive-fixed.tar')
     self.reference_hash = self.hash_file(self.reference_file)
Пример #4
0
def create_tgz(git_root, prefix, commit, relative_dir, dest_tgz):
    """
    Create a .tar.gz from a projects source in git.
    """
    os.chdir(os.path.abspath(git_root))
    timestamp = get_commit_timestamp(commit)

    # Accomodate standalone projects with specfile i root of git repo:
    relative_git_dir = "%s" % relative_dir
    if relative_git_dir in ["/", "./"]:
        relative_git_dir = ""

    basename = os.path.splitext(dest_tgz)[0]
    initial_tar = "%s.initial" % basename

    # command to generate a git-archive
    git_archive_cmd = "git archive --format=tar --prefix=%s/ %s:%s --output=%s" % (
        prefix,
        commit,
        relative_git_dir,
        initial_tar,
    )
    run_command(git_archive_cmd)

    # Run git-archive separately if --debug was specified.
    # This allows us to detect failure early.
    # On git < 1.7.4-rc0, `git archive ... commit:./` fails!
    debug("git-archive fails if relative dir is not in git tree", "%s > /dev/null" % git_archive_cmd)

    fixed_tar = "%s.tar" % basename
    fixed_tar_fh = open(fixed_tar, "wb")
    try:
        tarfixer = TarFixer(open(initial_tar, "rb"), fixed_tar_fh, timestamp, commit)
        tarfixer.fix()
    finally:
        fixed_tar_fh.close()

    # It's a pity we can't use Python's gzip, but it doesn't offer an equivalent of -n
    return run_command("gzip -n -c < %s > %s" % (fixed_tar, dest_tgz))
Пример #5
0
def create_tgz(git_root, prefix, commit, relative_dir, dest_tgz):
    """
    Create a .tar.gz from a projects source in git.
    """
    os.chdir(os.path.abspath(git_root))
    timestamp = get_commit_timestamp(commit)

    # Accomodate standalone projects with specfile i root of git repo:
    relative_git_dir = "%s" % relative_dir
    if relative_git_dir in ['/', './']:
        relative_git_dir = ""

    basename = os.path.splitext(dest_tgz)[0]
    initial_tar = "%s.initial" % basename

    # command to generate a git-archive
    git_archive_cmd = 'git archive --format=tar --prefix=%s/ %s:%s --output=%s' % (
        prefix, commit, relative_git_dir, initial_tar)
    run_command(git_archive_cmd)

    # Run git-archive separately if --debug was specified.
    # This allows us to detect failure early.
    # On git < 1.7.4-rc0, `git archive ... commit:./` fails!
    debug('git-archive fails if relative dir is not in git tree',
          '%s > /dev/null' % git_archive_cmd)

    fixed_tar = "%s.tar" % basename
    fixed_tar_fh = open(fixed_tar, 'wb')
    try:
        tarfixer = TarFixer(open(initial_tar, 'rb'), fixed_tar_fh, timestamp,
                            commit)
        tarfixer.fix()
    finally:
        fixed_tar_fh.close()

    # It's a pity we can't use Python's gzip, but it doesn't offer an equivalent of -n
    return run_command("gzip -n -c < %s > %s" % (fixed_tar, dest_tgz))
Пример #6
0
class TarTest(unittest.TestCase):
    def setUp(self):
        self.out = io.BytesIO()
        self.tarfixer = TarFixer(None, self.out, EXPECTED_TIMESTAMP, EXPECTED_REF)
        self.utf8_containing_file = os.path.join(os.path.dirname(__file__), 'resources', 'les_misérables.tar')
        self.utf8_file = os.path.join(os.path.dirname(__file__), 'resources', 'archivé.tar')
        self.test_file = os.path.join(os.path.dirname(__file__), 'resources', 'archive.tar')
        self.reference_file = os.path.join(os.path.dirname(__file__), 'resources', 'archive-fixed.tar')
        self.reference_hash = self.hash_file(self.reference_file)

    def tearDown(self):
        self.out = None

    def hash_file(self, filename):
        file_bytes = open(filename, 'rb').read()
        return self.hash_buffer(file_bytes)

    def hash_buffer(self, buf):
        hasher = hashlib.sha256()
        hasher.update(buf)
        return hasher.hexdigest()

    def _irregular_reader(self, items):
        def item_read(read_length):
            try:
                item = items.pop(0)
            except IndexError:
                # If no more items, the buffer is empty and would return empty string
                return ''

            return item.read(read_length)

        mock_fh = Mock()
        mock_fh.read = Mock()
        mock_fh.read.side_effect = item_read

        return mock_fh

    def test_full_read(self):
        items = [StringIO("1" * 5), StringIO("1" * 2), StringIO("1" * 6)]
        self.tarfixer.fh = self._irregular_reader(items)
        self.assertEqual("1" * 10, self.tarfixer.full_read(10))

    def test_full_read_buffer_underflow(self):
        input = StringIO("1" * 9)
        self.tarfixer.fh = input
        self.assertRaises(IOError, self.tarfixer.full_read, 10)

    def test_full_read_eventual_buffer_underflow(self):
        items = [StringIO("1" * 5), StringIO("1" * 2), StringIO("1" * 2)]
        self.tarfixer.fh = self._irregular_reader(items)
        self.assertRaises(IOError, self.tarfixer.full_read, 10)

    def test_fix(self):
        self.fh = open(self.test_file, 'rb')
        self.tarfixer.fh = self.fh
        self.tarfixer.fix()
        self.assertEqual(self.reference_hash, self.hash_buffer(self.out.getvalue()))

    def test_fix_fails_unless_file_in_binary_mode(self):
        self.fh = open(self.test_file, 'r')
        self.tarfixer.fh = self.fh
        self.assertRaises(IOError, self.tarfixer.fix)

    def test_padded_size_length_small(self):
        length = 10
        block_size = 512
        self.assertEqual(512, self.tarfixer.padded_size(length, block_size))

    def test_padded_size_length_spot_on(self):
        length = 512
        block_size = 512
        self.assertEqual(512, self.tarfixer.padded_size(length, block_size))

    def test_padded_size_length_over(self):
        length = 513
        block_size = 512
        self.assertEqual(1024, self.tarfixer.padded_size(length, block_size))

    def test_padded_size_length_long(self):
        length = 82607
        block_size = 512
        self.assertEqual(82944, self.tarfixer.padded_size(length, block_size))

    def test_create_extended_header(self):
        self.tarfixer.create_extended_header()
        header = self.out.getvalue()
        self.assertEqual(512, len(header))
        self.assertEqual(ensure_binary("52 comment=%s\n" % EXPECTED_REF), header[:52])
        self.assertEqual(ensure_binary("\x00" * (512 - 53)), header[53:])

    def test_calculate_checksum(self):
        fields = {
            'a': '\x01',
            'b': '\x02',
            'c': '\x03',
            'd': '\x04',
        }
        self.tarfixer.struct_members = list(fields.keys()) + ['checksum']
        result = self.tarfixer.calculate_checksum(fields)
        expected_result = 10 + ord(" ") * 8
        self.assertEqual("%07o\x00" % expected_result, result)

    def test_encode_header(self):
        mode = 123
        chunk = {
            'mode': mode,
            'name': 'hello',
        }
        result = self.tarfixer.encode_header(chunk, ['mode', 'name'])
        expected_result = ["%07o\x00" % mode, "hello"]
        expected_result = list(map(lambda x: ensure_binary(x), expected_result))
        self.assertEqual(expected_result, result)

    def test_utf8_file(self):
        # The goal of this test is to *not* throw a UnicodeDecodeError
        self.fh = open(self.utf8_file, 'rb')
        self.tarfixer.fh = self.fh
        self.tarfixer.fix()

        self.assertEqual(self.reference_hash, self.hash_buffer(self.out.getvalue()))

        # rewind the buffer
        self.out.seek(0)
        try:
            tarball = tarfile.open(fileobj=self.out, mode="r")
        except tarfile.TarError:
            self.fail("Unable to open generated tarball")

    def test_utf8_containing_file(self):
        # # The goal of this test is to *not* blow up due to a corrupted tarball
        self.fh = open(self.utf8_containing_file, 'rb')
        self.tarfixer.fh = self.fh
        self.tarfixer.fix()

        # rewind the buffer
        self.out.seek(0)
        try:
            tarball = tarfile.open(fileobj=self.out, mode="r")
        except tarfile.TarError as e:
            self.fail("Unable to open generated tarball: %s" % e)
Пример #7
0
    def tgz(self):
        destination_file = os.path.join(self.rpmbuild_basedir, self.tgz_filename)
        formatted_properties = ["-D%s" % x for x in self.maven_properties]

        run_command("git clone --no-hardlinks %s %s" % (find_git_root(), self.maven_clone_dir))
        with chdir(self.maven_clone_dir):
            run_command("git checkout %s" % self.git_commit_id)

            try:
                info_out("Running Maven build...")
                # We always want to deploy to a tito controlled location during local builds
                local_properties = formatted_properties + [
                    "-DaltDeploymentRepository=local-output::default::file://%s" % self.deploy_dir]
                run_command("mvn %s %s deploy" % (
                    " ".join(self.maven_args),
                    " ".join(local_properties)))
            except RunCommandException as e:
                error_out("Maven build failed! %s" % e.output)

        self._create_build_dirs()

        full_path = self._find_tarball()
        if full_path:
            fh = gzip.open(full_path, 'rb')
            fixed_tar = os.path.join(os.path.splitext(full_path)[0])
            fixed_tar_fh = open(fixed_tar, 'wb')
            timestamp = get_commit_timestamp(self.git_commit_id)
            try:
                tarfixer = TarFixer(fh, fixed_tar_fh, timestamp, self.git_commit_id, maven_built=True)
                tarfixer.fix()
            finally:
                fixed_tar_fh.close()

            # It's a pity we can't use Python's gzip, but it doesn't offer an equivalent of -n
            run_command("gzip -n -c < %s > %s" % (fixed_tar, destination_file))
        else:
            warn_out([
                "No Maven generated tarball found.",
                "Please set up the assembly plugin in your pom.xml to generate a .tar.gz"])
            full_path = os.path.join(self.rpmbuild_sourcedir, self.tgz_filename)
            create_tgz(self.git_root, self.tgz_dir, self.git_commit_id, self.relative_project_dir, full_path)
            print("Creating %s from git tag: %s..." % (self.tgz_filename, self.build_tag))
            shutil.copy(full_path, destination_file)

        debug("Copying git source to: %s" % self.rpmbuild_gitcopy)
        shutil.copy(destination_file, self.rpmbuild_gitcopy)

        # Extract the source so we can get at the spec file, etc.
        with chdir(self.rpmbuild_gitcopy):
            run_command("tar --strip-components=1 -xvf %s" % os.path.join(self.rpmbuild_gitcopy, self.tgz_filename))

        if self.local_build:
            artifacts = {}
            all_artifacts = []
            all_artifacts_with_path = []

            for directory, unused, filenames in os.walk(self.deploy_dir):
                for f in filenames:
                    artifacts.setdefault(os.path.splitext(f)[1], []).append(f)
                dir_artifacts_with_path = [os.path.join(directory, f) for f in filenames]

                # Place the Maven artifacts in the SOURCES directory for rpmbuild to use
                for artifact in dir_artifacts_with_path:
                    shutil.copy(artifact, self.rpmbuild_sourcedir)

                dir_artifacts_with_path = map(lambda x: os.path.relpath(x, self.deploy_dir), dir_artifacts_with_path)
                all_artifacts_with_path.extend(dir_artifacts_with_path)
                all_artifacts.extend([os.path.basename(f) for f in filenames])

            cheetah_input = {
                'name': self.project_name,
                'version': self.spec_version,
                'release': self.spec_release,
                'epoch': None,  # TODO: May need to support this at some point
                'artifacts': artifacts,
                'all_artifacts': all_artifacts,
                'all_artifacts_with_path': all_artifacts_with_path,
            }
            debug("Cheetah input: %s" % cheetah_input)
            render_cheetah(find_cheetah_template_file(self.start_dir), self.rpmbuild_gitcopy, cheetah_input)
            self.spec_file_name = find_spec_file(self.rpmbuild_gitcopy)
        else:
            self.spec_file_name = find_cheetah_template_file(self.rpmbuild_gitcopy)

        # NOTE: The spec file we actually use is the one exported by git
        # archive into the temp build directory. This is done so we can
        # modify the version/release on the fly when building test rpms
        # that use a git SHA1 for their version.
        self.spec_file = os.path.join(self.rpmbuild_gitcopy, self.spec_file_name)

        info_out("Wrote: %s" % destination_file)
        self.sources.append(destination_file)
        self.artifacts.append(destination_file)
        self.ran_tgz = True
Пример #8
0
    def tgz(self):
        destination_file = os.path.join(self.rpmbuild_basedir, self.tgz_filename)
        formatted_properties = ["-D%s" % x for x in self.maven_properties]

        run_command("git clone --no-hardlinks %s %s" % (find_git_root(), self.maven_clone_dir))
        with chdir(self.maven_clone_dir):
            run_command("git checkout %s" % self.git_commit_id)

            try:
                info_out("Running Maven build...")
                # We always want to deploy to a tito controlled location during local builds
                local_properties = formatted_properties + [
                    "-DaltDeploymentRepository=local-output::default::file://%s" % self.deploy_dir]
                run_command("mvn %s %s deploy" % (
                    " ".join(self.maven_args),
                    " ".join(local_properties)))
            except RunCommandException as e:
                error_out("Maven build failed! %s" % e.output)

        self._create_build_dirs()

        full_path = self._find_tarball()
        if full_path:
            fh = gzip.open(full_path, 'rb')
            fixed_tar = os.path.join(os.path.splitext(full_path)[0])
            fixed_tar_fh = open(fixed_tar, 'wb')
            timestamp = get_commit_timestamp(self.git_commit_id)
            try:
                tarfixer = TarFixer(fh, fixed_tar_fh, timestamp, self.git_commit_id, maven_built=True)
                tarfixer.fix()
            finally:
                fixed_tar_fh.close()

            # It's a pity we can't use Python's gzip, but it doesn't offer an equivalent of -n
            run_command("gzip -n -c < %s > %s" % (fixed_tar, destination_file))
        else:
            warn_out([
                "No Maven generated tarball found.",
                "Please set up the assembly plugin in your pom.xml to generate a .tar.gz"])
            full_path = os.path.join(self.rpmbuild_sourcedir, self.tgz_filename)
            create_tgz(self.git_root, self.tgz_dir, self.git_commit_id, self.relative_project_dir, full_path)
            print("Creating %s from git tag: %s..." % (self.tgz_filename, self.build_tag))
            shutil.copy(full_path, destination_file)

        debug("Copying git source to: %s" % self.rpmbuild_gitcopy)
        shutil.copy(destination_file, self.rpmbuild_gitcopy)

        # Extract the source so we can get at the spec file, etc.
        with chdir(self.rpmbuild_gitcopy):
            run_command("tar --strip-components=1 -xvf %s" % os.path.join(self.rpmbuild_gitcopy, self.tgz_filename))

        if self.local_build:
            artifacts = {}
            all_artifacts = []
            all_artifacts_with_path = []

            for directory, unused, filenames in os.walk(self.deploy_dir):
                for f in filenames:
                    artifacts.setdefault(os.path.splitext(f)[1], []).append(f)
                dir_artifacts_with_path = [os.path.join(directory, f) for f in filenames]

                # Place the Maven artifacts in the SOURCES directory for rpmbuild to use
                for artifact in dir_artifacts_with_path:
                    shutil.copy(artifact, self.rpmbuild_sourcedir)

                dir_artifacts_with_path = map(lambda x: os.path.relpath(x, self.deploy_dir), dir_artifacts_with_path)
                all_artifacts_with_path.extend(dir_artifacts_with_path)
                all_artifacts.extend([os.path.basename(f) for f in filenames])

            cheetah_input = {
                'name': self.project_name,
                'version': self.spec_version,
                'release': self.spec_release,
                'epoch': None,  # TODO: May need to support this at some point
                'artifacts': artifacts,
                'all_artifacts': all_artifacts,
                'all_artifacts_with_path': all_artifacts_with_path,
            }
            debug("Cheetah input: %s" % cheetah_input)
            render_cheetah(find_cheetah_template_file(self.start_dir), self.rpmbuild_gitcopy, cheetah_input)
            self.spec_file_name = find_spec_file(self.rpmbuild_gitcopy)
        else:
            self.spec_file_name = find_cheetah_template_file(self.rpmbuild_gitcopy)

        # NOTE: The spec file we actually use is the one exported by git
        # archive into the temp build directory. This is done so we can
        # modify the version/release on the fly when building test rpms
        # that use a git SHA1 for their version.
        self.spec_file = os.path.join(self.rpmbuild_gitcopy, self.spec_file_name)

        info_out("Wrote: %s" % destination_file)
        self.sources.append(destination_file)
        self.artifacts.append(destination_file)
        self.ran_tgz = True
Пример #9
0
    def create_tgz(self, git_root, prefix, commit, relative_dir,
                   dest_tgz):
        """
        Create a .tar.gz from a projects source in git.
        And include submodules
        """

        git_root_abspath = os.path.abspath(git_root)
        gitmodules_path = os.path.join(git_root_abspath, '.gitmodules')

        # if .gitmodules does not exist, just call the existing create_tgz function
        # as there is nothing to see here.
        if not os.path.exists(gitmodules_path):
            return create_tgz(git_root, prefix, commit, relative_dir, dest_tgz)

        os.chdir(git_root_abspath)
        timestamp = get_commit_timestamp(commit)

        # Accommodate standalone projects with specfile in root of git repo:
        relative_git_dir = "%s" % relative_dir
        if relative_git_dir in ['/', './']:
            relative_git_dir = ""

        basename = os.path.splitext(dest_tgz)[0]
        initial_tar = "%s.initial" % basename

        # We need to tar up the following:
        # 1. the current repo
        self.run_git_archive(relative_git_dir, prefix, commit, initial_tar, None)

        # 2. all of the submodules
        # then combine those into a single archive.
        submodules_cmd = 'git submodule--helper list'
        submodules_output = run_command(submodules_cmd)

        # split submodules output on newline
        # then on tab, and the directory is the last entry
        submodules_list = [line.split('\t')[-1] for line in submodules_output.split('\n')]

        submodule_tar_files = [initial_tar]
        # We ignore the hash in the sub modules list as we'll have to get the correct one
        # from the commit id in commit
        for submodule in submodules_list:
            # to find the submodule shars:
            # git rev-parse <commit>:./<submodule>
            rev_parse_cmd = 'git rev-parse %s:./%s' % (commit, submodule)
            submodule_commit = run_command(rev_parse_cmd)
            submodule_tar_file = '%s.%s' % (initial_tar, submodule)
            # prefix should be <prefix>/<submodule>
            submodule_prefix = '%s/%s' % (prefix, submodule)

            self.run_git_archive(relative_git_dir, submodule_prefix, submodule_commit,
                                 submodule_tar_file, submodule)
            submodule_tar_files.append(submodule_tar_file)

        # we need to append all of the submodule tar files onto the initial
        tarfiles = ' '.join(submodule_tar_files)
        run_command("tar -Af %s" % tarfiles)

        fixed_tar = "%s.tar" % basename
        fixed_tar_fh = open(fixed_tar, 'wb')
        try:
            tarfixer = TarFixer(open(initial_tar, 'rb'), fixed_tar_fh, timestamp, commit)
            tarfixer.fix()
        finally:
            fixed_tar_fh.close()

        # It's a pity we can't use Python's gzip, but it doesn't offer an equivalent of -n
        return run_command("gzip -n -c < %s > %s" % (fixed_tar, dest_tgz))
Пример #10
0
class TarTest(unittest.TestCase):
    def setUp(self):
        self.out = io.BytesIO()
        self.tarfixer = TarFixer(None, self.out, EXPECTED_TIMESTAMP,
                                 EXPECTED_REF)
        self.utf8_containing_file = os.path.join(os.path.dirname(__file__),
                                                 'resources',
                                                 'les_misérables.tar')
        self.utf8_file = os.path.join(os.path.dirname(__file__), 'resources',
                                      'archivé.tar')
        self.test_file = os.path.join(os.path.dirname(__file__), 'resources',
                                      'archive.tar')
        self.reference_file = os.path.join(os.path.dirname(__file__),
                                           'resources', 'archive-fixed.tar')
        self.reference_hash = self.hash_file(self.reference_file)

    def tearDown(self):
        self.out = None

    def hash_file(self, filename):
        file_bytes = open(filename, 'rb').read()
        return self.hash_buffer(file_bytes)

    def hash_buffer(self, buf):
        hasher = hashlib.sha256()
        hasher.update(buf)
        return hasher.hexdigest()

    def _irregular_reader(self, items):
        def item_read(read_length):
            try:
                item = items.pop(0)
            except IndexError:
                # If no more items, the buffer is empty and would return empty string
                return ''

            return item.read(read_length)

        mock_fh = Mock()
        mock_fh.read = Mock()
        mock_fh.read.side_effect = item_read

        return mock_fh

    def test_full_read(self):
        items = [StringIO("1" * 5), StringIO("1" * 2), StringIO("1" * 6)]
        self.tarfixer.fh = self._irregular_reader(items)
        self.assertEqual("1" * 10, self.tarfixer.full_read(10))

    def test_full_read_buffer_underflow(self):
        input = StringIO("1" * 9)
        self.tarfixer.fh = input
        self.assertRaises(IOError, self.tarfixer.full_read, 10)

    def test_full_read_eventual_buffer_underflow(self):
        items = [StringIO("1" * 5), StringIO("1" * 2), StringIO("1" * 2)]
        self.tarfixer.fh = self._irregular_reader(items)
        self.assertRaises(IOError, self.tarfixer.full_read, 10)

    def test_fix(self):
        self.fh = open(self.test_file, 'rb')
        self.tarfixer.fh = self.fh
        self.tarfixer.fix()
        self.assertEqual(self.reference_hash,
                         self.hash_buffer(self.out.getvalue()))

    def test_fix_fails_unless_file_in_binary_mode(self):
        self.fh = open(self.test_file, 'r')
        self.tarfixer.fh = self.fh
        self.assertRaises(IOError, self.tarfixer.fix)

    def test_padded_size_length_small(self):
        length = 10
        block_size = 512
        self.assertEqual(512, self.tarfixer.padded_size(length, block_size))

    def test_padded_size_length_spot_on(self):
        length = 512
        block_size = 512
        self.assertEqual(512, self.tarfixer.padded_size(length, block_size))

    def test_padded_size_length_over(self):
        length = 513
        block_size = 512
        self.assertEqual(1024, self.tarfixer.padded_size(length, block_size))

    def test_padded_size_length_long(self):
        length = 82607
        block_size = 512
        self.assertEqual(82944, self.tarfixer.padded_size(length, block_size))

    def test_create_extended_header(self):
        self.tarfixer.create_extended_header()
        header = self.out.getvalue()
        self.assertEqual(512, len(header))
        self.assertEqual(ensure_binary("52 comment=%s\n" % EXPECTED_REF),
                         header[:52])
        self.assertEqual(ensure_binary("\x00" * (512 - 53)), header[53:])

    def test_calculate_checksum(self):
        fields = {
            'a': '\x01',
            'b': '\x02',
            'c': '\x03',
            'd': '\x04',
        }
        self.tarfixer.struct_members = list(fields.keys()) + ['checksum']
        result = self.tarfixer.calculate_checksum(fields)
        expected_result = 10 + ord(" ") * 8
        self.assertEqual("%07o\x00" % expected_result, result)

    def test_encode_header(self):
        mode = 123
        chunk = {
            'mode': mode,
            'name': 'hello',
        }
        result = self.tarfixer.encode_header(chunk, ['mode', 'name'])
        expected_result = ["%07o\x00" % mode, "hello"]
        expected_result = list(map(lambda x: ensure_binary(x),
                                   expected_result))
        self.assertEqual(expected_result, result)

    def test_utf8_file(self):
        # The goal of this test is to *not* throw a UnicodeDecodeError
        self.fh = open(self.utf8_file, 'rb')
        self.tarfixer.fh = self.fh
        self.tarfixer.fix()

        self.assertEqual(self.reference_hash,
                         self.hash_buffer(self.out.getvalue()))

        # rewind the buffer
        self.out.seek(0)
        try:
            tarball = tarfile.open(fileobj=self.out, mode="r")
        except tarfile.TarError:
            self.fail("Unable to open generated tarball")

    def test_utf8_containing_file(self):
        # # The goal of this test is to *not* blow up due to a corrupted tarball
        self.fh = open(self.utf8_containing_file, 'rb')
        self.tarfixer.fh = self.fh
        self.tarfixer.fix()

        # rewind the buffer
        self.out.seek(0)
        try:
            tarball = tarfile.open(fileobj=self.out, mode="r")
        except tarfile.TarError as e:
            self.fail("Unable to open generated tarball: %s" % e)
Пример #11
0
 def setUp(self):
     self.out = StringIO()
     self.tarfixer = TarFixer(None, self.out, EXPECTED_TIMESTAMP, EXPECTED_REF)
     self.test_file = os.path.join(os.path.dirname(__file__), 'resources', 'archive.tar')
     self.reference_file = os.path.join(os.path.dirname(__file__), 'resources', 'archive-fixed.tar')
     self.reference_hash = self.hash_file(self.reference_file)