Esempio n. 1
0
def _generate_gpg_key_if_does_not_exist(config):
    """:returns: True if the gpg key existed before"""
    gnupg_directory = path.join(path.expanduser("~"), ".gnupg")
    already_generated = all([
        path.exists(path.join(gnupg_directory, filename))
        for filename in GPG_FILENAMES
    ])
    home_key_path = path.join(path.expanduser("~"), 'gpg.key')
    already_generated = already_generated and path.exists(home_key_path)
    if not already_generated:
        rmtree(gnupg_directory, ignore_errors=True)
        log_execute_assert_success([
            'gpg', '--batch', '--gen-key',
            resource_filename(__name__, 'gpg_batch_file')
        ])
        pid = log_execute_assert_success(['gpg', '--export', '--armor'])
        with fopen(path.join(path.expanduser("~"), ".rpmmacros"), 'w') as fd:
            fd.write(GPG_TEMPLATE)
        with fopen(home_key_path, 'w') as fd:
            fd.write(pid.get_stdout().decode())
    data_key_path = path.join(config.artifacts_directory, 'packages',
                              'gpg.key')
    if not path.exists(data_key_path):
        copy(home_key_path, data_key_path)
    return not already_generated
Esempio n. 2
0
 def generate_release_file_for_specific_distribution_and_version(
         self, distribution, codename, force=True):
     dirpath = path.join(self.base_directory, distribution, 'dists',
                         codename)
     in_release = path.join(dirpath, 'InRelease')
     release = path.join(dirpath, 'Release')
     release_gpg = release + '.gpg'
     if path.exists(release) and not force:
         return
     # write release file
     contents = apt_ftparchive(['release', dirpath])
     with fopen(release, 'w') as fd:
         available_archs = sorted(
             KNOWN_DISTRIBUTIONS[distribution][codename])
         fd.write(
             RELEASE_FILE_HEADER.format(codename, " ".join(available_archs),
                                        contents))
     # delete old release signature files
     for filepath in [in_release, release_gpg]:
         if path.exists(filepath):
             remove(filepath)
     # sign release file
     if codename == "trusty":
         # trusty doesn't support SHA256 for InRelease
         gpg([
             '--clearsign', '--digest-algo', 'SHA1', '-o', in_release,
             release
         ])
     else:
         gpg([
             '--clearsign', '--digest-algo', 'SHA256', '-o', in_release,
             release
         ])
     gpg(['-abs', '-o', release_gpg, release])
Esempio n. 3
0
def hard_link_or_raise_exception(src, dst):
    if not path.exists(dst):
        link(src, dst)
        return dst
    elif path.isfile(dst):
        raise FileAlreadyExists(dst)
    elif path.isdir(dst):
        dst_abspath = path.join(dst, path.basename(src))
        if path.exists(dst_abspath):
            raise FileAlreadyExists(dst_abspath)
        link(src, dst_abspath)
        return dst_abspath
Esempio n. 4
0
def hard_link_or_raise_exception(src, dst):
    if not path.exists(dst):
        link(src, dst)
        return dst
    elif path.isfile(dst):
        raise FileAlreadyExists(dst)
    elif path.isdir(dst):
        dst_abspath = path.join(dst, path.basename(src))
        if path.exists(dst_abspath):
            raise FileAlreadyExists(dst_abspath)
        link(src, dst_abspath)
        return dst_abspath
Esempio n. 5
0
    def generate_release_file_for_specific_distribution_and_version(self, distribution, codename, force=True):
        dirpath = path.join(self.base_directory, distribution, 'dists', codename)
        in_release = path.join(dirpath, 'InRelease')
        release = path.join(dirpath, 'Release')

        def write_release_file():
            cache = path.join(dirpath, 'apt_cache.db')
            contents = apt_ftparchive(['--db', cache, 'release', dirpath])

            def _write():
                with fopen(release, 'w') as fd:
                    available_archs = sorted(KNOWN_DISTRIBUTIONS[distribution][codename])
                    fd.write(RELEASE_FILE_HEADER.format(codename, " ".join(available_archs), contents))

            _write()

        def delete_old_release_signature_files():
            for filepath in [in_release, '%s.gpg' % release]:
                if path.exists(filepath):
                    remove(filepath)

        def sign_release_file():
            gpg(['--clearsign', '-o', in_release, release])
            gpg(['-abs', '-o', '%s.gpg' % release, release])

        if force or not path.exists(release):
            write_release_file()
            delete_old_release_signature_files()
            sign_release_file()
Esempio n. 6
0
def hard_link_and_override(src, dst):
    if not path.exists(dst):
        link(src, dst)
    elif path.isfile(dst):
        remove(dst)
        link(src, dst)
    elif path.isdir(dst):
        link(src, path.join(dst, path.basename(src)))
Esempio n. 7
0
def hard_link_and_override(src, dst):
    if not path.exists(dst):
        link(src, dst)
    elif path.isfile(dst):
        remove(dst)
        link(src, dst)
    elif path.isdir(dst):
        link(src, path.join(dst, path.basename(src)))
Esempio n. 8
0
def _generate_gpg_key_if_does_not_exist(config):
    """:returns: True if the gpg key existed before"""
    gnupg_directory = path.join(path.expanduser("~"), ".gnupg")
    already_generated = all([path.exists(path.join(gnupg_directory, filename)) for filename in GPG_FILENAMES])
    home_key_path = path.join(path.expanduser("~"), 'gpg.key')
    already_generated = already_generated and path.exists(home_key_path)
    if not already_generated:
        rmtree(gnupg_directory, ignore_errors=True)
        log_execute_assert_success(['gpg', '--batch', '--gen-key',
                                    resource_filename(__name__, 'gpg_batch_file')])
        pid = log_execute_assert_success(['gpg', '--export', '--armor'])
        with fopen(path.join(path.expanduser("~"), ".rpmmacros"), 'w') as fd:
            fd.write(GPG_TEMPLATE)
        with fopen(home_key_path, 'w') as fd:
            fd.write(pid.get_stdout())
    data_key_path = path.join(config.artifacts_directory, 'packages', 'gpg.key')
    if not path.exists(data_key_path):
        copy(home_key_path, data_key_path)
    return not already_generated
Esempio n. 9
0
 def initialise(self):
     ensure_directory_exists(self.base_directory)
     for distribution_name, distribution_dict in KNOWN_DISTRIBUTIONS.items():
         for version, architectures in distribution_dict.items():
             for arch in architectures:
                 dirpath = self.deduce_dirname(distribution_name, version, arch)
                 ensure_directory_exists(dirpath)
                 if not path.exists(path.join(dirpath, 'Packages')):
                     write_to_packages_file(dirpath, '', 'w')
             self.generate_release_file_for_specific_distribution_and_version(distribution_name, version, False)
Esempio n. 10
0
def _fix_dpkg_sig():
    # https://launchpadlibrarian.net/180099595/dpkg-sig-xz.patch
    # http://osdir.com/ml/ubuntu-bugs/2014-07/msg09103.html
    dpkg_sig = '/usr/bin/dpkg-sig'
    if path.exists(dpkg_sig):
        with fopen(dpkg_sig) as fd:
            contents = fd.read()
        with fopen(dpkg_sig, 'w') as fd:
            fd.write(contents.replace('$seen_files{"data.tar.gz"} &&',
                                      '($seen_files{"data.tar.gz"} || $seen_files{"data.tar.xz"}) &&'))
Esempio n. 11
0
 def _get_custom_installation_instructions(self, package):
     filepath = path.join(package['abspath'], 'installation_instructions.json')
     try:
         if not path.exists(filepath):
             return dict()
         with fopen(filepath) as fd:
             result = decode(fd.read())
             return result if isinstance(result, dict) else dict()
     except:
         logger.exception("failed to read custom installation instructions from {0}".format(filepath))
         return dict()
Esempio n. 12
0
def ensure_packages_json_file_exists_in_directory(dirpath):
    filepath = path.join(dirpath, 'packages.json')
    if path.exists(filepath):
        try:
            with fopen(filepath) as fd:
                if isinstance(decode(fd.read()), list):
                    return
        except:
            pass
    with fopen(filepath, 'w') as fd:
        fd.write('[]')
Esempio n. 13
0
 def _get_custom_installation_instructions(self, package):
     filepath = path.join(package['abspath'], 'installation_instructions.json')
     try:
         if not path.exists(filepath):
             return dict()
         with fopen(filepath) as fd:
             result = decode(fd.read())
             return result if isinstance(result, dict) else dict()
     except:
         logger.exception("failed to read custom installation instructions from {0}".format(filepath))
         return dict()
Esempio n. 14
0
def ensure_packages_json_file_exists_in_directory(dirpath):
    filepath = path.join(dirpath, 'packages.json')
    if path.exists(filepath):
        try:
            with fopen(filepath) as fd:
                if isinstance(decode(fd.read()), list):
                    return
        except:
            pass
    with fopen(filepath, 'w') as fd:
        fd.write('[]')
Esempio n. 15
0
def _fix_entropy_generator():
    from os import getuid
    rng_tools_script = '/etc/init.d/rng-tools'
    if not path.exists(rng_tools_script) or getuid() != 0:
        return
    with fopen("/etc/default/rng-tools") as fd:
        if fd.read().endswith("HRNGDEVICE=/dev/urandom\n"):
            return
    log_execute_assert_success([rng_tools_script, 'stop'], True)
    with fopen("/etc/default/rng-tools", 'a') as fd:
        fd.write("HRNGDEVICE=/dev/urandom\n")
    log_execute_assert_success([rng_tools_script, 'start'], True)
Esempio n. 16
0
def _fix_entropy_generator():
    from os import getuid
    rng_tools_script = '/etc/init.d/rng-tools'
    if not path.exists(rng_tools_script) or getuid() != 0:
        return
    with fopen("/etc/default/rng-tools") as fd:
        if fd.read().endswith("HRNGDEVICE=/dev/urandom\n"):
            return
    log_execute_assert_success([rng_tools_script, 'stop'], True)
    with fopen("/etc/default/rng-tools", 'a') as fd:
        fd.write("HRNGDEVICE=/dev/urandom\n")
    log_execute_assert_success([rng_tools_script, 'start'], True)
Esempio n. 17
0
def _fix_dpkg_sig():
    # https://launchpadlibrarian.net/180099595/dpkg-sig-xz.patch
    # http://osdir.com/ml/ubuntu-bugs/2014-07/msg09103.html
    dpkg_sig = '/usr/bin/dpkg-sig'
    if path.exists(dpkg_sig):
        with fopen(dpkg_sig) as fd:
            contents = fd.read()
        with fopen(dpkg_sig, 'w') as fd:
            fd.write(
                contents.replace(
                    '$seen_files{"data.tar.gz"} &&',
                    '($seen_files{"data.tar.gz"} || $seen_files{"data.tar.xz"}) &&'
                ))
Esempio n. 18
0
 def generate_release_file_for_specific_distribution_and_version(self, distribution, codename, force=True):
     dirpath = path.join(self.base_directory, distribution, 'dists', codename)
     in_release = path.join(dirpath, 'InRelease')
     release = path.join(dirpath, 'Release')
     release_gpg = release + '.gpg'
     if path.exists(release) and not force:
         return
     # write release file
     contents = apt_ftparchive(['release', dirpath])
     with fopen(release, 'w') as fd:
         available_archs = sorted(KNOWN_DISTRIBUTIONS[distribution][codename])
         fd.write(RELEASE_FILE_HEADER.format(codename, " ".join(available_archs), contents))
     # delete old release signature files
     for filepath in [in_release, release_gpg]:
         if path.exists(filepath):
             remove(filepath)
     # sign release file
     if codename == "trusty":
         # trusty doesn't support SHA256 for InRelease
         gpg(['--clearsign', '--digest-algo', 'SHA1', '-o', in_release, release])
     else:
         gpg(['--clearsign', '--digest-algo', 'SHA256', '-o', in_release, release])
     gpg(['-abs', '-o', release_gpg, release])
Esempio n. 19
0
    def from_disk(cls, filepath):
        filepath = filepath or cls.get_default_config_file()
        if not path.exists(filepath):
            self = cls()
            self.filepath = filepath
        else:
            with fopen(filepath) as fd:
                kwargs = decode(fd.read())
                kwargs['filepath'] = filepath
                self = cls()
                for key, value in kwargs.items():
                    setattr(self, key, value)

        assert self.webserver.default_index is None or self.webserver.default_index in self.indexes
        return self
Esempio n. 20
0
    def from_disk(cls, filepath):
        filepath = filepath or cls.get_default_config_file()
        if not path.exists(filepath):
            self = cls()
            self.filepath = filepath
        else:
            with fopen(filepath) as fd:
                kwargs = decode(fd.read())
                kwargs['filepath'] = filepath
                self = cls()
                for key, value in kwargs.iteritems():
                    setattr(self, key, value)

        assert self.webserver.default_index is None or self.webserver.default_index in self.indexes
        return self
    def test_consume_file(self):
        indexer = vmware_studio_updates.VmwareStudioUpdatesIndexer(
            self.config, INDEX)
        indexer.initialise()
        src = 'host-power-tools-for-vmware-1.6.13-vmware-esx-x86_OVF10_UPDATE_ZIP.zip'
        dst = path.join(indexer.base_directory, 'host-power-tools-for-vmware')
        utils.write_file(src, '')
        with mock.patch.object(
                vmware_studio_updates,
                'log_execute_assert_success') as log_execute_assert_success:
            indexer.consume_file(src, '', '')
        self.assertTrue(
            path.exists(
                path.join(indexer.base_directory,
                          'host-power-tools-for-vmware', src)))
        log_execute_assert_success.assert_called_with(
            ['unzip', '-qq', '-o',
             path.join(dst, src), '-d', dst])

        src = 'host-power-tools-for-vmware-1.7.4-vmware-esx-x86_OVF10_UPDATE_ZIP.zip'
        utils.write_file(src, '')
        with mock.patch.object(
                vmware_studio_updates,
                'log_execute_assert_success') as log_execute_assert_success:
            indexer.consume_file(src, '', '')
        log_execute_assert_success.assert_called_with(
            ['unzip', '-qq', '-o',
             path.join(dst, src), '-d', dst])

        src = 'host-power-tools-for-vmware-1.7.3-vmware-esx-x86_OVF10_UPDATE_ZIP.zip'
        utils.write_file(src, '')
        with mock.patch.object(
                vmware_studio_updates,
                'log_execute_assert_success') as log_execute_assert_success:
            indexer.consume_file(src, '', '')
        log_execute_assert_success.assert_not_called()

        self.assertTrue(
            "1.7.4" in indexer._get_latest_update_file_in_directory(
                path.join(indexer.base_directory,
                          'host-power-tools-for-vmware')))
Esempio n. 22
0
    def rebuild_index(self):
        packages = []
        log_execute_assert_success([
            'find', self.base_directory, '-type', 'd', '-empty', '-print',
            '-delete'
        ])
        for package in self._iter_packages():
            releases = []
            for release in sorted(
                    self._iter_releases(package),
                    reverse=True,
                    key=lambda release: parse_version(release['version'])):
                release['distributions'] = list(
                    self._iter_distributions(package, release))
                if not release['distributions']:
                    continue
                releases.append(release)
            write_file(path.join(package['abspath'], 'releases.json'),
                       encode(releases, indent=4, large_object=True))

            latest_release = self._get_latest_release(releases)
            latest_release_txt = path.join(package['abspath'],
                                           'latest_release.txt')
            if latest_release:
                package['latest_version'] = latest_release['version']
                package['latest_version_release_date'] = latest_release[
                    'release_date']
                package[
                    'installation_instructions'] = self._get_installation_instructions(
                        package, latest_release)

                packages.append(package)
                write_file(latest_release_txt, latest_release['version'])
            elif path.exists(latest_release_txt):
                remove(latest_release_txt)
        sorted_packages = sorted(packages,
                                 key=lambda package: package['product_name'])
        write_file(path.join(self.base_directory, 'packages.json'),
                   encode(sorted_packages, indent=4, large_object=True))
Esempio n. 23
0
    def rebuild_index(self):
        packages = []
        for package in self._iter_packages():
            releases = []
            for release in sorted(self._iter_releases(package), reverse=True, key=lambda release: parse_version(release['version'])):
                release['distributions'] = list(self._iter_distributions(package, release))
                if not release['distributions']:
                    continue
                releases.append(release)
            write_file(path.join(package['abspath'], 'releases.json'), encode(releases, indent=4, large_object=True))

            latest_release = self._get_latest_release(releases)
            latest_release_txt = path.join(package['abspath'], 'latest_release.txt')
            if latest_release:
                package['latest_version'] = latest_release['version']
                package['installation_instructions'] = self._get_installation_instructions(package, latest_release)
                packages.append(package)
                write_file(latest_release_txt, latest_release['version'])
            elif path.exists(latest_release_txt):
                remove(latest_release_txt)
        sorted_packages = sorted(packages, key=lambda package: package['product_name'])
        write_file(path.join(self.base_directory, 'packages.json'), encode(sorted_packages, indent=4, large_object=True))
Esempio n. 24
0
    def test_consume_file(self):
        indexer = vmware_studio_updates.VmwareStudioUpdatesIndexer(self.config, INDEX)
        indexer.initialise()
        src = 'host-power-tools-for-vmware-1.6.13-vmware-esx-x86_OVF10_UPDATE_ZIP.zip'
        dst = path.join(indexer.base_directory, 'host-power-tools-for-vmware')
        utils.write_file(src, '')
        with mock.patch.object(vmware_studio_updates, 'log_execute_assert_success') as log_execute_assert_success:
            indexer.consume_file(src, '', '')
        self.assertTrue(path.exists(path.join(indexer.base_directory, 'host-power-tools-for-vmware', src)))
        log_execute_assert_success.assert_called_with(['unzip', '-qq', '-o', path.join(dst, src), '-d', dst])

        src = 'host-power-tools-for-vmware-1.7.4-vmware-esx-x86_OVF10_UPDATE_ZIP.zip'
        utils.write_file(src, '')
        with mock.patch.object(vmware_studio_updates, 'log_execute_assert_success') as log_execute_assert_success:
            indexer.consume_file(src, '', '')
        log_execute_assert_success.assert_called_with(['unzip', '-qq', '-o', path.join(dst, src), '-d', dst])

        src = 'host-power-tools-for-vmware-1.7.3-vmware-esx-x86_OVF10_UPDATE_ZIP.zip'
        utils.write_file(src, '')
        with mock.patch.object(vmware_studio_updates, 'log_execute_assert_success') as log_execute_assert_success:
            indexer.consume_file(src, '', '')
        log_execute_assert_success.assert_not_called()

        self.assertTrue("1.7.4" in indexer._get_latest_update_file_in_directory(path.join(indexer.base_directory, 'host-power-tools-for-vmware')))
Esempio n. 25
0
 def _is_hidden(self, dirpath):
     return path.exists(path.join(dirpath, 'hidden'))
Esempio n. 26
0
def _override_symlink(src, dst):
    if path.exists(dst):
        assert path.islink(dst)
        remove(dst)
    symlink(src, dst)
Esempio n. 27
0
def sign_repomd(dirpath):
    repomd = path.join(dirpath, 'repodata', 'repomd.xml')
    if path.exists('%s.asc' % repomd):
        remove('%s.asc' % repomd)
    log_execute_assert_success(['gpg', '-a', '--detach-sign', repomd])
Esempio n. 28
0
 def delete_artifact(self, filepath):
     if path.exists(filepath):
         remove(filepath)
Esempio n. 29
0
 def to_disk(self):
     if not path.exists(path.dirname(self.filepath)):
         makedirs(path.dirname(self.filepath))
     with fopen(self.filepath, 'w') as fd:
         fd.write(self.to_json())
Esempio n. 30
0
 def delete_old_release_signature_files():
     for filepath in [in_release, '%s.gpg' % release]:
         if path.exists(filepath):
             remove(filepath)
Esempio n. 31
0
def ensure_directory_exists(dirpath):
    if not path.exists(dirpath):
        makedirs(dirpath)
Esempio n. 32
0
 def test_exists(self):
     self.switch_validator.assert_called(0)
     self.assertFalse(path.exists("/this_path_probably_doesnt_exist_or_else_the_test_will_fail"))
     self.switch_validator.assert_called(1)
Esempio n. 33
0
def sign_repomd(dirpath):
    repomd = path.join(dirpath, 'repodata', 'repomd.xml')
    if path.exists('%s.asc' % repomd):
        remove('%s.asc' % repomd)
    log_execute_assert_success(['gpg', '-a', '--detach-sign', repomd])
 def _override_updates_symlink(self, src, dst):
     if path.exists(dst):
         assert path.islink(dst)
         remove(dst)
     symlink(src, dst)
Esempio n. 35
0
 def _is_repodata_exists(self, dirpath):
     repodata = path.join(dirpath, 'repodata')
     return path.exists(repodata)
Esempio n. 36
0
 def delete(self):
     if path.exists(self.filepath):
         remove(self.filepath)
Esempio n. 37
0
 def load(self):
     if path.exists(self.filepath):
         self.data = _read(self.filepath)
     else:
         self.data = dict()
Esempio n. 38
0
 def to_disk(self):
     if not path.exists(path.dirname(self.filepath)):
         makedirs(path.dirname(self.filepath))
     with fopen(self.filepath, 'w') as fd:
         fd.write(self.to_json())
Esempio n. 39
0
def ensure_directory_exists(dirpath):
    if not path.exists(dirpath):
        makedirs(dirpath)
Esempio n. 40
0
 def _is_hidden(self, dirpath):
     return path.exists(path.join(dirpath, 'hidden'))
Esempio n. 41
0
 def _is_repodata_exists(self, dirpath):
     repodata = path.join(dirpath, 'repodata')
     return path.exists(repodata)
Esempio n. 42
0
 def test_initialise(self):
     indexer = vmware_studio_updates.VmwareStudioUpdatesIndexer(self.config, INDEX)
     self.assertFalse(path.exists(indexer.base_directory))
     indexer.initialise()
     self.assertTrue(path.exists(indexer.base_directory))
 def test_initialise(self):
     indexer = vmware_studio_updates.VmwareStudioUpdatesIndexer(
         self.config, INDEX)
     self.assertFalse(path.exists(indexer.base_directory))
     indexer.initialise()
     self.assertTrue(path.exists(indexer.base_directory))
Esempio n. 44
0
 def delete_artifact(self, filepath):
     if path.exists(filepath):
         remove(filepath)
Esempio n. 45
0
def sign_repomd(dirpath):
    repomd = path.join(dirpath, "repodata", "repomd.xml")
    if path.exists("%s.asc" % repomd):
        remove("%s.asc" % repomd)
    log_execute_assert_success(["gpg", "-a", "--detach-sign", repomd])