예제 #1
0
 def iter_files(self):
     for platform, architectures in KNOWN_PLATFORMS.items():
         for arch in architectures:
             dirpath = path.join(self.base_directory,
                                 '%s-%s' % (platform, arch))
             for filepath in glob(path.join(dirpath, '*.rpm')):
                 yield filepath
예제 #2
0
def _generate_gpg_key_if_does_not_exist(config):
    """:returns: True if the gpg key existed before"""
    gnupg_directory = path.join(path.expanduser("~"), ".gnupg")
    already_generated = all([
        path.exists(path.join(gnupg_directory, filename))
        for filename in GPG_FILENAMES
    ])
    home_key_path = path.join(path.expanduser("~"), 'gpg.key')
    already_generated = already_generated and path.exists(home_key_path)
    if not already_generated:
        rmtree(gnupg_directory, ignore_errors=True)
        log_execute_assert_success([
            'gpg', '--batch', '--gen-key',
            resource_filename(__name__, 'gpg_batch_file')
        ])
        pid = log_execute_assert_success(['gpg', '--export', '--armor'])
        with fopen(path.join(path.expanduser("~"), ".rpmmacros"), 'w') as fd:
            fd.write(GPG_TEMPLATE)
        with fopen(home_key_path, 'w') as fd:
            fd.write(pid.get_stdout().decode())
    data_key_path = path.join(config.artifacts_directory, 'packages',
                              'gpg.key')
    if not path.exists(data_key_path):
        copy(home_key_path, data_key_path)
    return not already_generated
예제 #3
0
    def generate_release_file_for_specific_distribution_and_version(self, distribution, codename, force=True):
        dirpath = path.join(self.base_directory, distribution, 'dists', codename)
        in_release = path.join(dirpath, 'InRelease')
        release = path.join(dirpath, 'Release')

        def write_release_file():
            cache = path.join(dirpath, 'apt_cache.db')
            contents = apt_ftparchive(['--db', cache, 'release', dirpath])

            def _write():
                with fopen(release, 'w') as fd:
                    available_archs = sorted(KNOWN_DISTRIBUTIONS[distribution][codename])
                    fd.write(RELEASE_FILE_HEADER.format(codename, " ".join(available_archs), contents))

            _write()

        def delete_old_release_signature_files():
            for filepath in [in_release, '%s.gpg' % release]:
                if path.exists(filepath):
                    remove(filepath)

        def sign_release_file():
            gpg(['--clearsign', '-o', in_release, release])
            gpg(['-abs', '-o', '%s.gpg' % release, release])

        if force or not path.exists(release):
            write_release_file()
            delete_old_release_signature_files()
            sign_release_file()
예제 #4
0
 def generate_release_file_for_specific_distribution_and_version(
         self, distribution, codename, force=True):
     dirpath = path.join(self.base_directory, distribution, 'dists',
                         codename)
     in_release = path.join(dirpath, 'InRelease')
     release = path.join(dirpath, 'Release')
     release_gpg = release + '.gpg'
     if path.exists(release) and not force:
         return
     # write release file
     contents = apt_ftparchive(['release', dirpath])
     with fopen(release, 'w') as fd:
         available_archs = sorted(
             KNOWN_DISTRIBUTIONS[distribution][codename])
         fd.write(
             RELEASE_FILE_HEADER.format(codename, " ".join(available_archs),
                                        contents))
     # delete old release signature files
     for filepath in [in_release, release_gpg]:
         if path.exists(filepath):
             remove(filepath)
     # sign release file
     if codename == "trusty":
         # trusty doesn't support SHA256 for InRelease
         gpg([
             '--clearsign', '--digest-algo', 'SHA1', '-o', in_release,
             release
         ])
     else:
         gpg([
             '--clearsign', '--digest-algo', 'SHA256', '-o', in_release,
             release
         ])
     gpg(['-abs', '-o', release_gpg, release])
예제 #5
0
 def _iter_packages(self):
     for package_dirpath in glob(path.join(self.base_directory, 'packages', '*')):
         yield dict(abspath=package_dirpath,
                    hidden=self._is_hidden(package_dirpath),
                    product_name=self._deduce_produce_name(package_dirpath),
                    name=path.basename(package_dirpath),
                    release_notes_url=self._deduce_release_notes_url(package_dirpath),
                    releases_uri=self._normalize_url(path.join(package_dirpath, 'releases.json',)))
예제 #6
0
 def consume_file(self, filepath, platform, arch):
     package_name, package_version, platform_string, architecture, extension = parse_filepath(
         filepath)
     directory = path.join(self.base_directory,
                           package_name.replace('_', '-'))
     ensure_directory_exists(directory)
     filename = '{0}-{1}.tar.gz'.format(package_name, package_version)
     hard_link_or_raise_exception(filepath, path.join(directory, filename))
예제 #7
0
 def _rpm():
     for item in glob(path.join(config.packages_directory, 'rpm', '*')):
         remove(item)
     for src in glob(path.join(config.packages_directory, config.webserver.default_index, 'yum', 'linux-*')):
         linux, distro, version, arch = path.basename(src).split('-')
         dst = path.join(config.artifacts_directory, 'rpm', distro, version, arch)
         ensure_directory_exists(path.dirname(dst))
         _override_symlink(src, dst)
예제 #8
0
 def _iter_packages(self):
     for package_dirpath in glob(path.join(self.base_directory, 'packages', '*')):
         yield dict(abspath=package_dirpath,
                    hidden=self._is_hidden(package_dirpath),
                    product_name=self._deduce_produce_name(package_dirpath),
                    name=path.basename(package_dirpath),
                    release_notes_url=self._deduce_release_notes_url(package_dirpath),
                    releases_uri=self._normalize_url(path.join(package_dirpath, 'releases.json',)))
예제 #9
0
 def _ova_updates():
     ensure_directory_exists(path.join(config.artifacts_directory, 'ova'))
     ensure_directory_exists(
         path.join(config.packages_directory,
                   config.webserver.default_index, 'ova'))
     _override_symlink(
         path.join(config.packages_directory,
                   config.webserver.default_index, 'ova'),
         path.join(config.artifacts_directory, 'ova', 'updates'))
예제 #10
0
 def rebuild_index(self):
     cache_file = path.join(self.base_directory, 'apt_cache.db')
     for distribution_name, distribution_dict in KNOWN_DISTRIBUTIONS.items():
         for version, architectures in distribution_dict.items():
             for arch in architectures:
                 dirpath = self.deduce_dirname(distribution_name, version, arch)
                 contents = apt_ftparchive(['packages', '--db', cache_file, dirpath])
                 relapath = dirpath.replace(path.join(self.base_directory, distribution_name), '').strip(path.sep)
                 fixed_contents = contents.replace(dirpath, relapath)
                 write_to_packages_file(dirpath, fixed_contents, 'w')
             self.generate_release_file_for_specific_distribution_and_version(distribution_name, version)
예제 #11
0
 def _rpm():
     for item in glob(path.join(config.packages_directory, 'rpm', '*')):
         remove(item)
     for src in glob(
             path.join(config.packages_directory,
                       config.webserver.default_index, 'yum', 'linux-*')):
         linux, distro, version, arch = path.basename(src).split('-')
         dst = path.join(config.artifacts_directory, 'rpm', distro, version,
                         arch)
         ensure_directory_exists(path.dirname(dst))
         _override_symlink(src, dst)
예제 #12
0
 def initialise(self):
     ensure_directory_exists(self.base_directory)
     for platform, architectures in KNOWN_PLATFORMS.items():
         for arch in architectures:
             dirpath = path.join(self.base_directory, "%s-%s" % (platform, arch))
             ensure_directory_exists(path.join(dirpath, "repodata"))
             gkg_key = path.join(self.config.packages_directory, "gpg.key")
             hard_link_and_override(gkg_key, path.join(dirpath, "repodata", "repomd.xml.key"))
     for dirpath in glob(path.join(self.base_directory, "*")):
         if not self._is_repodata_exists(dirpath):
             createrepo(dirpath)
예제 #13
0
def add_index(config, index_name, async_rpc=False):
    from infi.app_repo.indexers import get_indexers
    from infi.app_repo.install import ensure_directory_exists, path
    from infi.app_repo.service import get_client
    assert index_name not in config.indexes
    for indexer in get_indexers(config, index_name):
        indexer.initialise()
    ensure_directory_exists(path.join(config.incoming_directory, index_name))
    ensure_directory_exists(path.join(config.rejected_directory, index_name))
    config.indexes.append(index_name)
    config.to_disk()
    get_client(config).reload_configuration_from_disk(async_rpc=async_rpc)
예제 #14
0
def add_index(config, index_name, async_rpc=False):
    from infi.app_repo.indexers import get_indexers
    from infi.app_repo.install import ensure_directory_exists, path
    from infi.app_repo.service import get_client
    assert index_name not in config.indexes
    for indexer in get_indexers(config, index_name):
        indexer.initialise()
    ensure_directory_exists(path.join(config.incoming_directory, index_name))
    ensure_directory_exists(path.join(config.rejected_directory, index_name))
    config.indexes.append(index_name)
    config.to_disk()
    get_client(config).reload_configuration_from_disk(async_rpc=async_rpc)
예제 #15
0
def sign_all_existing_deb_and_rpm_packages(config):
    # this is necessary because we replaced the gpg key
    from gevent.pool import Pool
    pool = Pool(20)
    rpms = set()
    debs = set()
    for index_name in config.indexes:
        rpms |= set(find_files(path.join(config.packages_directory, index_name, 'yum'), '*.rpm'))
        debs |= set(find_files(path.join(config.packages_directory, index_name, 'apt'), '*.deb'))
    pool.map_async(sign_rpm_package, rpms)
    pool.map_async(sign_deb_package, debs)
    pool.join(raise_error=True)
예제 #16
0
 def initialise(self):
     ensure_directory_exists(self.base_directory)
     for platform, architectures in KNOWN_PLATFORMS.items():
         for arch in architectures:
             dirpath = path.join(self.base_directory,
                                 '%s-%s' % (platform, arch))
             ensure_directory_exists(path.join(dirpath, 'repodata'))
             gkg_key = path.join(self.config.packages_directory, 'gpg.key')
             hard_link_and_override(
                 gkg_key, path.join(dirpath, 'repodata', 'repomd.xml.key'))
     for dirpath in glob(path.join(self.base_directory, '*')):
         if not self._is_repodata_exists(dirpath):
             createrepo(dirpath)
예제 #17
0
 def rebuild_index(self):
     for package_dir in glob(path.join(self.base_directory, '*')):
         if not path.isdir(package_dir) or package_dir.endswith('updates'):
             continue
         latest_zip = self._get_latest_update_file_in_directory(package_dir)
         if latest_zip:
             self._extract_update(package_dir, latest_zip)
예제 #18
0
 def consume_file(self, filepath, platform, arch):
     package_name, package_version, platform_string, architecture, extension = parse_filepath(filepath)
     package_dir = path.join(self.base_directory, package_name)
     ensure_directory_exists(package_dir)
     final_filepath = hard_link_or_raise_exception(filepath, package_dir)
     if self._get_latest_update_file_in_directory(package_dir) == final_filepath:
         self._extract_update(package_dir, final_filepath)
예제 #19
0
def sign_all_existing_deb_and_rpm_packages(config):
    # this is necessary because we replaced the gpg key
    from gevent.pool import Pool
    pool = Pool(20)
    rpms = set()
    debs = set()
    for index_name in config.indexes:
        rpms |= set(
            find_files(path.join(config.packages_directory, index_name, 'yum'),
                       '*.rpm'))
        debs |= set(
            find_files(path.join(config.packages_directory, index_name, 'apt'),
                       '*.deb'))
    pool.map_async(sign_rpm_package, rpms)
    pool.map_async(sign_deb_package, debs)
    pool.join(raise_error=True)
 def rebuild_index(self):
     for package_dir in glob(path.join(self.base_directory, '*')):
         if not path.isdir(package_dir) or package_dir.endswith('updates'):
             continue
         latest_zip = self._get_latest_update_file_in_directory(package_dir)
         if latest_zip:
             self._extract_update(package_dir, latest_zip)
예제 #21
0
    def consume_file(self, filepath, platform, arch):
        from infi.app_repo.utils import sign_rpm_package

        dirpath = path.join(self.base_directory, "%s-%s" % (platform, TRANSLATE_ARCH[arch]))
        hard_link_or_raise_exception(filepath, dirpath)
        sign_rpm_package(filepath)
        self._update_index(dirpath)
예제 #22
0
 def consume_file(self, filepath, platform, arch):
     from infi.app_repo.utils import sign_rpm_package
     dirpath = path.join(self.base_directory,
                         '%s-%s' % (platform, TRANSLATE_ARCH[arch]))
     hard_link_or_raise_exception(filepath, dirpath)
     sign_rpm_package(filepath)
     self._update_index(dirpath)
예제 #23
0
 def _read_release_date_from_file(self, dirpath):
     from dateutil.parser import parse
     try:
         with fopen(path.join(dirpath, 'release_date')) as fd:
             release_date = fd.read().strip()
             return parse(release_date).date()
     except:
         return None
예제 #24
0
 def _deduce_produce_name(self, dirpath):
     try:
         with fopen(path.join(dirpath, 'product_name')) as fd:
             return fd.read().strip()
     except:
         return ' '.join(
             word.capitalize()
             for word in path.basename(dirpath).split('-')).strip()
예제 #25
0
 def _iter_distributions(self, package, release):
     for distribution_dirpath in glob(path.join(release['abspath'], 'distributions', '*')):
         for arch_dirpath in glob(path.join(distribution_dirpath, 'architectures', '*')):
             for extension_dirpath in glob(path.join(arch_dirpath, 'extensions', '*')):
                 try:
                     [filepath] = list(glob(path.join(extension_dirpath, '*')))
                 except ValueError:
                     logger.warn("expected only one file under {}, but it is not the case".format(extension_dirpath))
                     continue
                 distribution = dict(platform=path.basename(distribution_dirpath),
                                     hidden=self._is_hidden(distribution_dirpath) or \
                                            self._is_hidden(arch_dirpath) or \
                                            self._is_hidden(extension_dirpath),
                                     architecture=path.basename(arch_dirpath),
                                     extension=path.basename(extension_dirpath),
                                     filepath=self._normalize_url(filepath))
                 yield distribution
예제 #26
0
 def rebuild_index(self):
     cache_file = path.join(self.base_directory, 'apt_cache.db')
     for distribution_name, distribution_dict in KNOWN_DISTRIBUTIONS.items(
     ):
         for version, architectures in distribution_dict.items():
             for arch in architectures:
                 dirpath = self.deduce_dirname(distribution_name, version,
                                               arch)
                 contents = apt_ftparchive(
                     ['packages', '--db', cache_file, dirpath])
                 relapath = dirpath.replace(
                     path.join(self.base_directory, distribution_name),
                     '').strip(path.sep)
                 fixed_contents = contents.replace(dirpath, relapath)
                 write_to_packages_file(dirpath, fixed_contents, 'w')
             self.generate_release_file_for_specific_distribution_and_version(
                 distribution_name, version)
예제 #27
0
 def _rpm_addsign_rewrites_the_file(filepath):
     from os import environ
     logger.info("Signing {!r}".format(filepath))
     command = ['rpm', '--addsign', filepath]
     env = environ.copy()
     env['HOME'] = env.get('HOME', "/root")
     env['GNUPGHOME'] = path.join(env.get('HOME', "/root"), ".gnupg")
     log_execute_assert_success('echo | setsid rpm --addsign {}'.format(filepath), env=env, shell=True)
예제 #28
0
 def iter_files(self):
     ensure_directory_exists(self.base_directory)
     for distribution_name, distribution_dict in KNOWN_DISTRIBUTIONS.items():
         for version, architectures in distribution_dict.items():
             for arch in architectures:
                 dirpath = self.deduce_dirname(distribution_name, version, arch)
                 for filepath in glob(path.join(dirpath, '*.deb')):
                     yield filepath
예제 #29
0
 def _rpm_addsign_rewrites_the_file(filepath):
     from os import environ
     logger.info("Signing {!r}".format(filepath))
     command = ['rpm', '--addsign', filepath]
     env = environ.copy()
     env['HOME'] = env.get('HOME', "/root")
     env['GNUPGHOME'] = path.join(env.get('HOME', "/root"), ".gnupg")
     log_execute_assert_success('echo | setsid rpm --addsign {}'.format(filepath), env=env, shell=True)
예제 #30
0
def hard_link_and_override(src, dst):
    if not path.exists(dst):
        link(src, dst)
    elif path.isfile(dst):
        remove(dst)
        link(src, dst)
    elif path.isdir(dst):
        link(src, path.join(dst, path.basename(src)))
예제 #31
0
def hard_link_and_override(src, dst):
    if not path.exists(dst):
        link(src, dst)
    elif path.isfile(dst):
        remove(dst)
        link(src, dst)
    elif path.isdir(dst):
        link(src, path.join(dst, path.basename(src)))
 def consume_file(self, filepath, platform, arch):
     package_name, package_version, platform_string, architecture, extension = parse_filepath(
         filepath)
     package_dir = path.join(self.base_directory, package_name)
     ensure_directory_exists(package_dir)
     final_filepath = hard_link_or_raise_exception(filepath, package_dir)
     if self._get_latest_update_file_in_directory(
             package_dir) == final_filepath:
         self._extract_update(package_dir, final_filepath)
예제 #33
0
 def consume_file(self, filepath, platform, arch):
     package_name, package_version, platform_string, architecture, extension = parse_filepath(filepath)
     platform_string = "vmware-esx" if extension == "ova" else platform_string # TODO this needs to change in our build
     dirpath = path.join(self.base_directory, 'packages', package_name, 'releases', package_version,
                         'distributions', platform_string, 'architectures', architecture,
                         'extensions', extension)
     ensure_directory_exists(dirpath)
     hard_link_or_raise_exception(filepath, dirpath)
     self.rebuild_index()
예제 #34
0
 def consume_file(self, filepath, platform, arch):
     package_name, package_version, platform_string, architecture, extension = parse_filepath(filepath)
     platform_string = "vmware-esx" if extension == "ova" else platform_string # TODO this needs to change in our build
     dirpath = path.join(self.base_directory, 'packages', package_name, 'releases', package_version,
                         'distributions', platform_string, 'architectures', architecture,
                         'extensions', extension)
     ensure_directory_exists(dirpath)
     hard_link_or_raise_exception(filepath, dirpath)
     self.rebuild_index()
예제 #35
0
def _generate_gpg_key_if_does_not_exist(config):
    """:returns: True if the gpg key existed before"""
    gnupg_directory = path.join(path.expanduser("~"), ".gnupg")
    already_generated = all([path.exists(path.join(gnupg_directory, filename)) for filename in GPG_FILENAMES])
    home_key_path = path.join(path.expanduser("~"), 'gpg.key')
    already_generated = already_generated and path.exists(home_key_path)
    if not already_generated:
        rmtree(gnupg_directory, ignore_errors=True)
        log_execute_assert_success(['gpg', '--batch', '--gen-key',
                                    resource_filename(__name__, 'gpg_batch_file')])
        pid = log_execute_assert_success(['gpg', '--export', '--armor'])
        with fopen(path.join(path.expanduser("~"), ".rpmmacros"), 'w') as fd:
            fd.write(GPG_TEMPLATE)
        with fopen(home_key_path, 'w') as fd:
            fd.write(pid.get_stdout())
    data_key_path = path.join(config.artifacts_directory, 'packages', 'gpg.key')
    if not path.exists(data_key_path):
        copy(home_key_path, data_key_path)
    return not already_generated
예제 #36
0
 def initialise(self):
     ensure_directory_exists(self.base_directory)
     for distribution_name, distribution_dict in KNOWN_DISTRIBUTIONS.items():
         for version, architectures in distribution_dict.items():
             for arch in architectures:
                 dirpath = self.deduce_dirname(distribution_name, version, arch)
                 ensure_directory_exists(dirpath)
                 if not path.exists(path.join(dirpath, 'Packages')):
                     write_to_packages_file(dirpath, '', 'w')
             self.generate_release_file_for_specific_distribution_and_version(distribution_name, version, False)
예제 #37
0
def write_to_packages_file(dirpath, contents, mode):
    import gzip
    packages_filepath = path.join(dirpath, 'Packages')
    with fopen(packages_filepath, mode) as fd:
        fd.write(contents)
    with fopen(packages_filepath, 'rb') as fd:
        all_contents = fd.read()
    fd = gzip.open(packages_filepath + '.gz', 'wb')
    fd.write(all_contents)
    fd.close()
예제 #38
0
 def rebuild_index(self):
     for distribution_name, distribution_dict in KNOWN_DISTRIBUTIONS.items():
         for version, architectures in distribution_dict.items():
             for arch in architectures:
                 dirpath = self.deduce_dirname(distribution_name, version, arch)
                 contents = dpkg_scanpackages(['--multiversion', dirpath, '/dev/null'])
                 relapath = dirpath.replace(path.join(self.base_directory, distribution_name), '').strip(path.sep)
                 fixed_contents = contents.replace(dirpath, relapath)
                 write_to_packages_file(dirpath, fixed_contents, 'w')
             self.generate_release_file_for_specific_distribution_and_version(distribution_name, version)
예제 #39
0
 def _iter_releases(self, package):
     from os import stat
     from time import ctime
     for version_dirpath in glob(path.join(package['abspath'], 'releases', '*')):
         mod_time = stat(version_dirpath).st_mtime
         release = dict(version=path.basename(version_dirpath),
                        hidden=self._is_hidden(version_dirpath),
                        abspath=version_dirpath,
                        last_modified=ctime(mod_time) if mod_time else '')
         yield release
예제 #40
0
 def _iter_releases(self, package):
     from os import stat
     from time import ctime
     for version_dirpath in glob(path.join(package['abspath'], 'releases', '*')):
         mod_time = stat(version_dirpath).st_mtime
         release = dict(version=path.basename(version_dirpath),
                        hidden=self._is_hidden(version_dirpath),
                        abspath=version_dirpath,
                        last_modified=ctime(mod_time) if mod_time else '')
         yield release
예제 #41
0
        def write_release_file():
            cache = path.join(dirpath, 'apt_cache.db')
            contents = apt_ftparchive(['--db', cache, 'release', dirpath])

            def _write():
                with fopen(release, 'w') as fd:
                    available_archs = sorted(KNOWN_DISTRIBUTIONS[distribution][codename])
                    fd.write(RELEASE_FILE_HEADER.format(codename, " ".join(available_archs), contents))

            _write()
예제 #42
0
 def _get_custom_installation_instructions(self, package):
     filepath = path.join(package['abspath'], 'installation_instructions.json')
     try:
         if not path.exists(filepath):
             return dict()
         with fopen(filepath) as fd:
             result = decode(fd.read())
             return result if isinstance(result, dict) else dict()
     except:
         logger.exception("failed to read custom installation instructions from {0}".format(filepath))
         return dict()
예제 #43
0
 def _try_except_finally_on_filepath(self, func, index, filepath, *args, **kwargs): # TODO rejection needs a test
     try:
         func(self.config, index, filepath, *args, **kwargs)
     except:
         logger.exception("processing source {} failed, moving it to {}".format(filepath, self.config.rejected_directory))
         try:
             hard_link_or_raise_exception(filepath, path.join(self.config.rejected_directory, index))
         except:
             pass
     finally:
         remove(filepath)
예제 #44
0
def ensure_packages_json_file_exists_in_directory(dirpath):
    filepath = path.join(dirpath, 'packages.json')
    if path.exists(filepath):
        try:
            with fopen(filepath) as fd:
                if isinstance(decode(fd.read()), list):
                    return
        except:
            pass
    with fopen(filepath, 'w') as fd:
        fd.write('[]')
예제 #45
0
 def _get_custom_installation_instructions(self, package):
     filepath = path.join(package['abspath'], 'installation_instructions.json')
     try:
         if not path.exists(filepath):
             return dict()
         with fopen(filepath) as fd:
             result = decode(fd.read())
             return result if isinstance(result, dict) else dict()
     except:
         logger.exception("failed to read custom installation instructions from {0}".format(filepath))
         return dict()
예제 #46
0
 def _get_latest_update_file_in_directory(self, dirpath):
     from pkg_resources import parse_version
     latest_update_file, latest_version = None, parse_version('0')
     update_files = [filepath for filepath in glob(path.join(dirpath, '*.zip')) if ARCH in filepath]
     for filepath in update_files:
         package_name, package_version, platform_string, architecture, extension = parse_filepath(filepath)
         package_version = parse_version(package_version)
         if package_version > latest_version:
             latest_version = package_version
             latest_update_file = filepath
     return latest_update_file
예제 #47
0
def ensure_packages_json_file_exists_in_directory(dirpath):
    filepath = path.join(dirpath, 'packages.json')
    if path.exists(filepath):
        try:
            with fopen(filepath) as fd:
                if isinstance(decode(fd.read()), list):
                    return
        except:
            pass
    with fopen(filepath, 'w') as fd:
        fd.write('[]')
예제 #48
0
    def test_consume_file(self):
        indexer = vmware_studio_updates.VmwareStudioUpdatesIndexer(
            self.config, INDEX)
        indexer.initialise()
        src = 'host-power-tools-for-vmware-1.6.13-vmware-esx-x86_OVF10_UPDATE_ZIP.zip'
        dst = path.join(indexer.base_directory, 'host-power-tools-for-vmware')
        utils.write_file(src, '')
        with mock.patch.object(
                vmware_studio_updates,
                'log_execute_assert_success') as log_execute_assert_success:
            indexer.consume_file(src, '', '')
        self.assertTrue(
            path.exists(
                path.join(indexer.base_directory,
                          'host-power-tools-for-vmware', src)))
        log_execute_assert_success.assert_called_with(
            ['unzip', '-qq', '-o',
             path.join(dst, src), '-d', dst])

        src = 'host-power-tools-for-vmware-1.7.4-vmware-esx-x86_OVF10_UPDATE_ZIP.zip'
        utils.write_file(src, '')
        with mock.patch.object(
                vmware_studio_updates,
                'log_execute_assert_success') as log_execute_assert_success:
            indexer.consume_file(src, '', '')
        log_execute_assert_success.assert_called_with(
            ['unzip', '-qq', '-o',
             path.join(dst, src), '-d', dst])

        src = 'host-power-tools-for-vmware-1.7.3-vmware-esx-x86_OVF10_UPDATE_ZIP.zip'
        utils.write_file(src, '')
        with mock.patch.object(
                vmware_studio_updates,
                'log_execute_assert_success') as log_execute_assert_success:
            indexer.consume_file(src, '', '')
        log_execute_assert_success.assert_not_called()

        self.assertTrue(
            "1.7.4" in indexer._get_latest_update_file_in_directory(
                path.join(indexer.base_directory,
                          'host-power-tools-for-vmware')))
예제 #49
0
def hard_link_or_raise_exception(src, dst):
    if not path.exists(dst):
        link(src, dst)
        return dst
    elif path.isfile(dst):
        raise FileAlreadyExists(dst)
    elif path.isdir(dst):
        dst_abspath = path.join(dst, path.basename(src))
        if path.exists(dst_abspath):
            raise FileAlreadyExists(dst_abspath)
        link(src, dst_abspath)
        return dst_abspath
예제 #50
0
    def rebuild_index(self):
        packages = []
        for package in self._iter_packages():
            releases = []
            for release in sorted(self._iter_releases(package), reverse=True, key=lambda release: parse_version(release['version'])):
                release['distributions'] = list(self._iter_distributions(package, release))
                if not release['distributions']:
                    continue
                releases.append(release)
            write_file(path.join(package['abspath'], 'releases.json'), encode(releases, indent=4, large_object=True))

            latest_release = self._get_latest_release(releases)
            latest_release_txt = path.join(package['abspath'], 'latest_release.txt')
            if latest_release:
                package['latest_version'] = latest_release['version']
                package['installation_instructions'] = self._get_installation_instructions(package, latest_release)
                packages.append(package)
                write_file(latest_release_txt, latest_release['version'])
            elif path.exists(latest_release_txt):
                remove(latest_release_txt)
        sorted_packages = sorted(packages, key=lambda package: package['product_name'])
        write_file(path.join(self.base_directory, 'packages.json'), encode(sorted_packages, indent=4, large_object=True))
예제 #51
0
 def consume_file(self, filepath, platform, arch):
     from infi.app_repo.utils import sign_deb_package
     distribution_name, codename = platform.rsplit('-', 1)
     dirpath = self.deduce_dirname(distribution_name, codename, arch)
     hard_link_or_raise_exception(filepath, dirpath)
     sign_deb_package(filepath)
     with temporary_directory_context() as tempdir:
         hard_link_or_raise_exception(filepath, tempdir)
         contents = dpkg_scanpackages(['--multiversion', tempdir, '/dev/null'])
         relapath = dirpath.replace(path.join(self.base_directory, distribution_name), '').strip(path.sep)
         fixed_contents = contents.replace(tempdir, relapath)
         write_to_packages_file(dirpath, fixed_contents, 'a')
     self.generate_release_file_for_specific_distribution_and_version(distribution_name, codename)
예제 #52
0
 def generate_release_file_for_specific_distribution_and_version(self, distribution, codename, force=True):
     dirpath = path.join(self.base_directory, distribution, 'dists', codename)
     in_release = path.join(dirpath, 'InRelease')
     release = path.join(dirpath, 'Release')
     release_gpg = release + '.gpg'
     if path.exists(release) and not force:
         return
     # write release file
     contents = apt_ftparchive(['release', dirpath])
     with fopen(release, 'w') as fd:
         available_archs = sorted(KNOWN_DISTRIBUTIONS[distribution][codename])
         fd.write(RELEASE_FILE_HEADER.format(codename, " ".join(available_archs), contents))
     # delete old release signature files
     for filepath in [in_release, release_gpg]:
         if path.exists(filepath):
             remove(filepath)
     # sign release file
     if codename == "trusty":
         # trusty doesn't support SHA256 for InRelease
         gpg(['--clearsign', '--digest-algo', 'SHA1', '-o', in_release, release])
     else:
         gpg(['--clearsign', '--digest-algo', 'SHA256', '-o', in_release, release])
     gpg(['-abs', '-o', release_gpg, release])
예제 #53
0
    def test_consume_file(self):
        indexer = vmware_studio_updates.VmwareStudioUpdatesIndexer(self.config, INDEX)
        indexer.initialise()
        src = 'host-power-tools-for-vmware-1.6.13-vmware-esx-x86_OVF10_UPDATE_ZIP.zip'
        dst = path.join(indexer.base_directory, 'host-power-tools-for-vmware')
        utils.write_file(src, '')
        with mock.patch.object(vmware_studio_updates, 'log_execute_assert_success') as log_execute_assert_success:
            indexer.consume_file(src, '', '')
        self.assertTrue(path.exists(path.join(indexer.base_directory, 'host-power-tools-for-vmware', src)))
        log_execute_assert_success.assert_called_with(['unzip', '-qq', '-o', path.join(dst, src), '-d', dst])

        src = 'host-power-tools-for-vmware-1.7.4-vmware-esx-x86_OVF10_UPDATE_ZIP.zip'
        utils.write_file(src, '')
        with mock.patch.object(vmware_studio_updates, 'log_execute_assert_success') as log_execute_assert_success:
            indexer.consume_file(src, '', '')
        log_execute_assert_success.assert_called_with(['unzip', '-qq', '-o', path.join(dst, src), '-d', dst])

        src = 'host-power-tools-for-vmware-1.7.3-vmware-esx-x86_OVF10_UPDATE_ZIP.zip'
        utils.write_file(src, '')
        with mock.patch.object(vmware_studio_updates, 'log_execute_assert_success') as log_execute_assert_success:
            indexer.consume_file(src, '', '')
        log_execute_assert_success.assert_not_called()

        self.assertTrue("1.7.4" in indexer._get_latest_update_file_in_directory(path.join(indexer.base_directory, 'host-power-tools-for-vmware')))