Ejemplo n.º 1
0
def install(repository_archive, repository_spec, destination_info,
            display_callback):
    log.debug('installing/extracting repo archive %s to destination %s',
              repository_archive, destination_info)

    # An editable install is a symlink to existing dir, so nothing to extract
    if destination_info.editable:
        all_installed_files = []
    else:
        all_installed_files = extract(
            repository_spec,
            repository_archive.info,
            content_path=destination_info.destination_root_dir,
            extract_archive_to_dir=destination_info.extract_archive_to_dir,
            tar_file=repository_archive.tar_file,
            display_callback=display_callback)

    install_datetime = datetime.datetime.utcnow()

    install_info_ = InstallInfo.from_version_date(
        repository_spec.version, install_datetime=install_datetime)

    # TODO: this save will need to be moved to a step later. after validating install?
    # The to_dict_version_strings is to convert the un-yaml-able semantic_version.Version to a string
    install_info.save(install_info_.to_dict_version_strings(),
                      destination_info.install_info_path)

    installation_results = InstallationResults(
        install_info_path=destination_info.install_info_path,
        install_info=install_info_,
        installed_to_path=destination_info.path,
        installed_datetime=install_datetime,
        installed_files=all_installed_files)
    return installation_results
Ejemplo n.º 2
0
    def _install_role_archive(self, content_tar_file, archive_meta, content_meta,
                              force_overwrite=False):

        if not content_meta.namespace:
            raise exceptions.GalaxyError('While installing a role from %s, no namespace was found. Try providing one with --namespace' %
                                         content_meta.src)

        label = "%s.%s" % (content_meta.namespace, content_meta.name)
        log.debug('content_meta: %s', content_meta)

        log.info('About to extract "%s" to %s', label, content_meta.path)

        tar_members = content_tar_file.members
        parent_dir = tar_members[0].name

        namespaced_content_path = '%s/%s/%s/%s' % (content_meta.namespace,
                                                   content_meta.name,
                                                   'roles',
                                                   content_meta.name)

        log.debug('namespaced role path: %s', namespaced_content_path)

        files_to_extract = []
        for member in tar_members:
            # rel_path ~  roles/some-role/meta/main.yml for ex
            rel_path = member.name[len(parent_dir) + 1:]

            namespaced_role_rel_path = os.path.join(content_meta.namespace, content_meta.name, 'roles',
                                                    content_meta.name, rel_path)
            files_to_extract.append({
                'archive_member': member,
                'dest_dir': content_meta.path,
                'dest_filename': namespaced_role_rel_path,
                'force_overwrite': force_overwrite})

        file_extractor = archive.extract_files(content_tar_file, files_to_extract)

        installed_paths = [x for x in file_extractor]
        installed = [(content_meta, installed_paths)]

        info_path = os.path.join(content_meta.path,
                                 namespaced_content_path,
                                 self.META_INSTALL)

        install_datetime = datetime.datetime.utcnow()

        content_install_info = InstallInfo.from_version_date(version=content_meta.version,
                                                             install_datetime=install_datetime)

        install_info.save(content_install_info, info_path)

        return installed
Ejemplo n.º 3
0
def load(data_or_file_object):
    log.debug('loading content install info from %s', data_or_file_object)

    info_dict = yaml.safe_load(data_or_file_object)

    log.debug('info_dict: %s', info_dict)
    install_info = InstallInfo(
        version=info_dict.get('version', None),
        install_date=info_dict.get('install_date', None),
        install_date_iso=info_dict.get('install_date_iso', None))

    log.debug('install_info: %s', install_info)
    return install_info
Ejemplo n.º 4
0
def load(data_or_file_object):
    # log.debug('loading content install info from %s', getattr(data_or_file_object, 'name', data_or_file_object))

    info_dict = yaml.safe_load(data_or_file_object)

    # an empty .galaxy_install_info
    if info_dict is None:
        return None

    # log.debug('info_dict: %s', info_dict)
    install_info = InstallInfo(version=info_dict.get('version', None),
                               install_date=info_dict.get('install_date', None),
                               install_date_iso=info_dict.get('install_date_iso', None))

    # log.debug('install_info loaded from %s', install_info)
    return install_info
Ejemplo n.º 5
0
    def _install_contents(self, content_tar_file, archive_parent_dir,
                          content_archive_type=None, content_meta=None,
                          content_sub_name=None,
                          force_overwrite=False):

        if not content_meta.namespace:
            raise exceptions.GalaxyError('While installing content from %s, no namespace was found. '
                                         'Try providing one with --namespace' % content_meta.src)

        all_installed_paths = []
        files_to_extract = []
        tar_members = content_tar_file.getmembers()
        parent_dir = tar_members[0].name

        for member in tar_members:
            rel_path = member.name[len(parent_dir) + 1:]
            namespaced_role_rel_path = os.path.join(content_meta.namespace, content_meta.name, rel_path)
            files_to_extract.append({
                'archive_member': member,
                'dest_dir': content_meta.path,
                'dest_filename': namespaced_role_rel_path,
                'force_overwrite': force_overwrite})

        file_extractor = archive.extract_files(content_tar_file, files_to_extract)

        installed_paths = [x for x in file_extractor]
        all_installed_paths.extend(installed_paths)

        namespaced_content_path = '%s/%s' % (content_meta.namespace,
                                             content_meta.name)

        info_path = os.path.join(content_meta.path,
                                 namespaced_content_path,
                                 self.META_INSTALL)

        install_datetime = datetime.datetime.utcnow()

        content_install_info = InstallInfo.from_version_date(version=content_meta.version,
                                                             install_datetime=install_datetime)

        install_info.save(content_install_info, info_path)
        return all_installed_paths
Ejemplo n.º 6
0
def test_save(tmpdir):
    install_datetime = datetime.datetime.utcnow()
    install_info_ = InstallInfo.from_version_date(
        version='4.5.6', install_datetime=install_datetime)
    log.debug('install_info: %s', install_info_)

    temp_dir = tmpdir.mkdir('mazer_content_install_info_unit_test')
    temp_file = temp_dir.join('.galaxy_install_info')
    install_info.save(install_info_, temp_file.strpath)

    log.debug('tmpfile: %s', temp_file)

    res = temp_file.read()
    log.debug('res: %s', res)

    reloaded = yaml.safe_load(res)

    assert isinstance(reloaded, dict)
    assert reloaded['version'] == '4.5.6'
    assert reloaded['install_date_iso'] == install_datetime
Ejemplo n.º 7
0
    def install(self, content_meta=None, force_overwrite=False):
        """extract the archive to the filesystem and write out install metadata.

        MUST be called after self.fetch()."""

        log.debug('install: content_meta=%s, force_overwrite=%s',
                  content_meta, force_overwrite)
        installed = []
        archive_parent_dir = None

        # FIXME: enum/constant/etc demagic
        # content_archive_type = 'multi'

        content_meta = content_meta or self.content_meta

        # FIXME: really need to move the fetch step elsewhere and do it before,
        #        install should get pass a content_archive (or something more abstract)
        # TODO: some useful exceptions for 'cant find', 'cant read', 'cant write'

        archive_path = self._fetch_results.get('archive_path', None)

        if not archive_path:
            raise exceptions.GalaxyClientError('No valid content data found for %s', self.src)

        log.debug("installing from %s", archive_path)

        content_tar_file, archive_meta = content_archive.load_archive(archive_path)

        # TODO: do we still need to check the fetched version against the spec version?
        content_data = self._fetch_results.get('content', {})

        # If the requested namespace/version is different than the one we got via find()/fetch()...
        if content_data.get('fetched_version', content_meta.version) != content_meta.version:
            log.info('Version "%s" for %s was requested but fetch found version "%s"',
                     content_meta.version, '%s.%s' % (content_meta.namespace, content_meta.name),
                     content_data.get('fetched_version', content_meta.version))

            content_meta = attr.evolve(content_meta, version=content_data['fetched_version'])

        if content_data.get('content_namespace', content_meta.namespace) != content_meta.namespace:
            log.info('Namespace "%s" for %s was requested but fetch found namespace "%s"',
                     content_meta.namespace, '%s.%s' % (content_meta.namespace, content_meta.name),
                     content_data.get('content_namespace', content_meta.namespace))

            content_meta = attr.evolve(content_meta, namespace=content_data['content_namespace'])

        log.debug('archive_meta: %s', archive_meta)

        # we strip off any higher-level directories for all of the files contained within
        # the tar file here. The default is 'github_repo-target'. Gerrit instances, on the other
        # hand, does not have a parent directory at all.

        if not os.path.isdir(content_meta.path):
            log.debug('No content path (%s) found so creating it', content_meta.path)

            os.makedirs(content_meta.path)

        if archive_meta.archive_type == 'multi-content':
            self._content_type = 'all'
            self.display_callback('- extracting all content from "%s" to %s' % (content_meta.name, self.path))
        else:
            self.display_callback("- extracting all content in %s to content directories" % content_meta.name)

        log.info('Installing content from archive type: %s', archive_meta.archive_type)

        if archive_meta.archive_type == 'multi-content':
            log.info('Installing "%s" as a archive_type=%s content_type=%s install_type=%s ',
                     content_meta.name, archive_meta.archive_type, content_meta.content_type,
                     self.content_install_type)

            log.info('About to extract content_type=%s "%s" version=%s to %s',
                     content_meta.content_type, content_meta.name, content_meta.version, content_meta.path)

            log.debug('content_meta: %s', content_meta)

            res = self._install_contents(content_tar_file,
                                         archive_parent_dir,
                                         archive_meta.archive_type,
                                         content_meta,
                                         content_sub_name=self.sub_name,
                                         force_overwrite=force_overwrite)
            installed.append((content_meta, res))

        elif archive_meta.archive_type == 'role':
            log.info('Installing "%s" as a role content archive and content_type=%s (role)', content_meta.name, content_meta.content_type)

            # log.debug('archive_parent_dir: %s', archive_parent_dir)

            installed_from_role = self._install_role_archive(content_tar_file,
                                                             archive_meta=archive_meta,
                                                             content_meta=content_meta,
                                                             force_overwrite=force_overwrite)
            installed.extend(installed_from_role)

        install_datetime = datetime.datetime.utcnow()

        repo_info_path = os.path.join(content_meta.path,
                                      self.content_meta.namespace,
                                      self.content_meta.name,
                                      '.galaxy_install_info')

        repo_install_info = InstallInfo.from_version_date(version=content_meta.version,
                                                          install_datetime=install_datetime)

        log.debug('repo_info_path: %s', repo_info_path)
        install_info.save(repo_install_info, repo_info_path)

        # return the parsed yaml metadata
        if archive_meta.archive_type == 'multi-content':
            self.display_callback("- all content was succssfully installed to %s" % self.path)
        else:
            self.display_callback("- %s was installed successfully to %s" % (str(self), self.path))

        # rm any temp files created when getting the content archive
        self._fetcher.cleanup()

        for item in installed:
            log.info('Installed content: %s', item[0])
            # log.debug('Installed files: %s', pprint.pformat(item[1]))

        return installed
Ejemplo n.º 8
0
    def _install_for_content_types(self, content_tar_file, archive_parent_dir,
                                   content_archive_type=None, content_meta=None,
                                   content_types_to_install=None,
                                   content_sub_name=None,
                                   force_overwrite=False):

        all_installed_paths = []
        content_types_to_install = content_types_to_install or []

        tar_file_members = content_tar_file.getmembers()

        for install_content_type in content_types_to_install:
            log.debug('Installing %s type content from %s', install_content_type, content_tar_file)

            # TODO: install_for_content_type()  - handle one content type
            #       _install_for_content_type_role() - role specific impl
            # TODO:  install_contents()  - iterator over all the contents of a content type (ie, 'roles')
            # TODO:   install_content()  - install a single content  (ie, a role)
            #         _install_content_role()  - role specific impl of install_content
            content_type_member_matches = archive.filter_members_by_content_type(tar_file_members,
                                                                                 content_archive_type,
                                                                                 content_type=install_content_type)

            # filter by path built from sub_dir and sub_name for 'modules/elasticsearch_plugin.py'
            content_sub_dir = content_meta.content_sub_dir or content.CONTENT_TYPE_DIR_MAP.get(install_content_type, '')

            label = "%s.%s" % (content_meta.namespace, content_meta.name)
            log.debug('content_meta: %s', content_meta)

            log.info('About to extract "%s" to %s', label, content_meta.path)
            log.info('content_sub_dir: %s', content_sub_dir)

            # TODO: extract_file_list_to_path(content_tar_file, files_to_extract, extract_to_path, force_overwrite)
            # TODO: split into lists of each content objects (ie, each role, instead of all roles) and
            #       install them one by one

            parent_dir = content_tar_file.members[0].name
            content_names = set()

            for content_type_member_match in content_type_member_matches:
                path_parts = content_type_member_match.name.split('/')
                # 0 is archive parent dir, 1 is content type (roles, modules, etc)
                # 2 is the content_name (role name, pluginfoo.py etc)
                content_names.add(path_parts[2])

            log.debug('content_names: %s', content_names)

            # extract each content individually
            for content_name in content_names:
                files_to_extract = []

                # TODO: This only works for roles/apbs that have a dir matching the content
                #       name. For other content like plugins or modules, we need to match
                #       on parent_dir/content_sub_dir/content_name (/modules/my_module.py)

                match_pattern = '%s/%s/%s/*' % (parent_dir, content_sub_dir, content_name)

                member_matches = archive.filter_members_by_fnmatch(tar_file_members,
                                                                   match_pattern)

                namespaced_content_path = '%s/%s/%s/%s' % (content_meta.namespace,
                                                           content_meta.name,
                                                           content_sub_dir,
                                                           content_name)

                log.debug('Extracting "%s" to %s', label, namespaced_content_path)

                for member_match in member_matches:
                    # archive_member, dest_dir, dest_filename, force_overwrite

                    # rel_path ~  roles/some-role/meta/main.yml for ex
                    rel_path = member_match.name[len(parent_dir) + 1:]

                    # need to replace the role name in the archive with the role name
                    # that includes the galaxy namespace

                    namespaced_role_rel_path = rel_path.replace('%s/%s' % (content_sub_dir,
                                                                           content_name),
                                                                namespaced_content_path,
                                                                1)

                    extract_info = {'archive_member': member_match,
                                    'dest_dir': content_meta.path,
                                    'dest_filename': namespaced_role_rel_path,
                                    'force_overwrite': force_overwrite}

                    files_to_extract.append(extract_info)

                file_extractor = archive.extract_files(content_tar_file, files_to_extract)

                installed_paths = [x for x in file_extractor]
                all_installed_paths.extend(installed_paths)

                if install_content_type in self.REQUIRES_META_MAIN:
                    info_path = os.path.join(content_meta.path,
                                             namespaced_content_path,
                                             self.META_INSTALL)

                    install_datetime = datetime.datetime.utcnow()

                    content_install_info = InstallInfo.from_version_date(version=content_meta.version,
                                                                         install_datetime=install_datetime)

                    install_info.save(content_install_info, info_path)
                    # self._write_galaxy_install_info(content_meta, info_path)

        return all_installed_paths