Example #1
0
def get_content_version(content_data, version, content_versions, content_content_name):
    '''find and compare content version found in content_data dict

    content_data is a dict based on /api/v1/content/13 for ex
    content_content_data is the name of the content specified by user?
    version is the version string asked for by user
    content_versions is a list of version strings in order
    '''

    log.debug('%s want ver: %s', content_content_name, version)
#    log.debug('%s vers avail: %s',
#              content_content_name, json.dumps(content_versions, indent=2))

    # normalize versions, but also build a map of the normalized version string to the orig string
    available_normalized_versions, norm_to_orig_map = normalize_versions(content_versions)

    # verify that the normalized versions are valid semver now so that we dont worry about it
    # in the sort
    available_versions, dummy = \
        validate_versions(available_normalized_versions)

    normalized_version = normalize_version_string(version)

#    log.debug('normalized_version: %s', normalized_version)
#    log.debug('avail_normalized_versions: %s', json.dumps(available_normalized_versions, indent=4))

    # we specified a particular version is required so look for it in available versions
    if version and version != 'master':
        if not available_versions:
            # FIXME: should we show the actual available versions or the available
            #        versions we searched in?  act: ['v1.0.0', '1.1'] nor: ['1.0.0', '1.1']
            msg = "- The list of available versions for %s is empty (%s)." % \
                (content_content_name or 'content', available_versions)
            raise exceptions.GalaxyError(msg)

        if str(normalized_version) not in available_versions:
            # TODO: how do we msg 'couldn't find the version you specified
            #       in actual version tags or ones we made up without the leading v'
            msg = "- the specified version (%s) of %s was not found in the list of available versions (%s)." % \
                (version, content_content_name or 'content', available_versions)
            raise exceptions.GalaxyError(msg)

        # if we get here, 'version' is in available_normalized_versions
        # return the exact match version since it was available
        orig_version = norm_to_orig_map[normalized_version]
        log.debug('%s requested ver: %s, matched: %s, using real ver: %s ', content_content_name, version, normalized_version, orig_version)
        return orig_version

    # At this point, we have a list of the available versions. The available versions have
    # been normalized (leading 'v' or 'V' stripped off).
    # No specific version was requested, so we return the latest one.
    content_version = get_latest_version(available_versions, content_data)

    log.debug('%s using latest ver: %s', content_content_name, content_version)
    return content_version
Example #2
0
def chose_repository_fetch_method(repository_spec_string, editable=False):
    log.debug('repository_spec_string: %s', repository_spec_string)

    if is_scm(repository_spec_string):
        # create tar file from scm url
        return FetchMethods.SCM_URL

    comma_parts = repository_spec_string.split(',', 1)
    potential_filename = comma_parts[0]
    fetch_method = None
    if editable and os.path.isdir(potential_filename):
        fetch_method = FetchMethods.EDITABLE
    elif os.path.isfile(potential_filename):
        # installing a local tar.gz
        fetch_method = FetchMethods.LOCAL_FILE
    elif '://' in repository_spec_string:
        fetch_method = FetchMethods.REMOTE_URL
    elif '.' in repository_spec_string and len(
            repository_spec_string.split('.', 1)) == 2:
        fetch_method = FetchMethods.GALAXY_URL
    else:
        msg = (
            'Failed to determine fetch method for content spec %s. '
            'Expecting a Galaxy name, SCM path, remote URL, path to a local '
            'archive file, or -e option and a directory path' %
            repository_spec_string)
        raise exceptions.GalaxyError(msg)
    return fetch_method
Example #3
0
def get(galaxy_context, repository_spec):
    """determine how to download a repo, builds a fetch instance, and returns the instance"""

    fetcher = None

    # FIXME: note that ignore_certs for the galaxy
    # server(galaxy_context.server['ignore_certs'])
    # does not really imply that the repo archive download should ignore certs as well
    # (galaxy api server vs cdn) but for now, we use the value for both

    if repository_spec.fetch_method == FetchMethods.EDITABLE:
        fetcher = editable.EditableFetch(repository_spec=repository_spec,
                                         galaxy_context=galaxy_context)
    elif repository_spec.fetch_method == FetchMethods.SCM_URL:
        fetcher = scm_url.ScmUrlFetch(repository_spec=repository_spec)
    elif repository_spec.fetch_method == FetchMethods.LOCAL_FILE:
        # the file is a tar, so open it that way and extract it
        # to the specified (or default) content directory
        fetcher = local_file.LocalFileFetch(repository_spec)
    elif repository_spec.fetch_method == FetchMethods.REMOTE_URL:
        fetcher = remote_url.RemoteUrlFetch(
            repository_spec=repository_spec,
            validate_certs=not galaxy_context.server['ignore_certs'])
    elif repository_spec.fetch_method == FetchMethods.GALAXY_URL:
        fetcher = galaxy_url.GalaxyUrlFetch(repository_spec=repository_spec,
                                            galaxy_context=galaxy_context)
    else:
        raise exceptions.GalaxyError(
            'No approriate content fetcher found for %s %s',
            repository_spec.scm, repository_spec.src)

    log.debug('Using fetcher: %s for repository_spec: %r', fetcher,
              repository_spec)

    return fetcher
Example #4
0
def choose_repository_fetch_method(repository_spec_string, editable=False):
    log.debug('repository_spec_string: %s', repository_spec_string)

    parsed_url = parse_as_url(repository_spec_string)
    log.debug('parsed_url: %s', parsed_url)

    # TODO: figure out of SCM_URL makes any sense for installing artifacts
    if is_scm(parsed_url):
        # create tar file from scm url
        return FetchMethods.SCM_URL

    if parsed_url.scheme in ('http', 'https', 'ftp'):
        return FetchMethods.REMOTE_URL

    # for cases like:
    #   '/tmp/ns-name-1.2.3.tar.gz'
    #   'file://home/someuser/Downloads/ns-name-1.2.3.tar.gz(2)'
    #   'file_in_cwd,version=2.111.22'
    if parsed_url.scheme == 'file' or is_local_file(parsed_url.path):
        return FetchMethods.LOCAL_FILE

    if editable and is_local_dir(parsed_url.path):
        return FetchMethods.EDITABLE

    url_before_comma = strip_comma_fields(parsed_url.path)
    if '.' in url_before_comma and len(url_before_comma.split('.', 1)) == 2:
        return FetchMethods.GALAXY_URL

    msg = ('Failed to determine fetch method for content spec: %s. '
           'Expecting a Galaxy name, SCM path, remote URL, path to a local '
           'archive file, or -e option and a directory path' %
           repository_spec_string)
    raise exceptions.GalaxyError(msg)
Example #5
0
def extract(repository_spec,
            collections_path,
            extract_archive_to_dir,
            tar_file,
            force_overwrite=False,
            display_callback=None):

    all_installed_paths = []

    # TODO: move to content info validate step in install states?
    if not repository_spec.namespace:
        # TODO: better error
        raise exceptions.GalaxyError(
            'While installing a collection , no namespace was found. Try providing one with --namespace'
        )

    log.debug('About to extract "%s" to collections_path %s', repository_spec,
              collections_path)

    tar_members = tar_file.members

    # TODO: need to support deleting all content in the dirs we are targetting
    #       first (and/or delete the top dir) so that we clean up any files not
    #       part of the content. At the moment, this will add or update the files
    #       that are in the archive, but it will not delete files on the fs that are
    #       not in the archive
    files_to_extract = []
    for member in tar_members:
        rel_path = member.name

        extract_to_filename_path = os.path.join(extract_archive_to_dir,
                                                rel_path)

        files_to_extract.append({
            'archive_member': member,
            # Note: for trad roles, we are extract the top level of the archive into
            #       a sub path of the destination
            'dest_dir': extract_archive_to_dir,
            'dest_filename': extract_to_filename_path,
            'force_overwrite': force_overwrite
        })

    file_extractor = archive.extract_files(tar_file, files_to_extract)

    installed_paths = [x for x in file_extractor]

    all_installed_paths.extend(installed_paths)

    log.debug('Extracted %s files from %s to %s', len(all_installed_paths),
              repository_spec.label, extract_archive_to_dir)

    # TODO: InstallResults object? installedPaths, InstallInfo, etc?
    return all_installed_paths
Example #6
0
def raise_without_ignore(ignore_errors, msg=None, rc=1):
    """
    Exits with the specified return code unless the
    option --ignore-errors was specified
    """
    ignore_error_blurb = '- you can use --ignore-errors to skip failed roles and finish processing the list.'
    if not ignore_errors:
        message = ignore_error_blurb
        if msg:
            message = '%s:\n%s' % (msg, ignore_error_blurb)
        # TODO: some sort of ignoreable exception
        raise exceptions.GalaxyError(message)
Example #7
0
    def _install_role_archive(self, content_tar_file, archive_meta, content_meta,
                              force_overwrite=False):

        if not content_meta.namespace:
            raise exceptions.GalaxyError('While installing a role from %s, no namespace was found. Try providing one with --namespace' %
                                         content_meta.src)

        label = "%s.%s" % (content_meta.namespace, content_meta.name)
        log.debug('content_meta: %s', content_meta)

        log.info('About to extract "%s" to %s', label, content_meta.path)

        tar_members = content_tar_file.members
        parent_dir = tar_members[0].name

        namespaced_content_path = '%s/%s/%s/%s' % (content_meta.namespace,
                                                   content_meta.name,
                                                   'roles',
                                                   content_meta.name)

        log.debug('namespaced role path: %s', namespaced_content_path)

        files_to_extract = []
        for member in tar_members:
            # rel_path ~  roles/some-role/meta/main.yml for ex
            rel_path = member.name[len(parent_dir) + 1:]

            namespaced_role_rel_path = os.path.join(content_meta.namespace, content_meta.name, 'roles',
                                                    content_meta.name, rel_path)
            files_to_extract.append({
                'archive_member': member,
                'dest_dir': content_meta.path,
                'dest_filename': namespaced_role_rel_path,
                'force_overwrite': force_overwrite})

        file_extractor = archive.extract_files(content_tar_file, files_to_extract)

        installed_paths = [x for x in file_extractor]
        installed = [(content_meta, installed_paths)]

        info_path = os.path.join(content_meta.path,
                                 namespaced_content_path,
                                 self.META_INSTALL)

        install_datetime = datetime.datetime.utcnow()

        content_install_info = InstallInfo.from_version_date(version=content_meta.version,
                                                             install_datetime=install_datetime)

        install_info.save(content_install_info, info_path)

        return installed
Example #8
0
    def _install_contents(self, content_tar_file, archive_parent_dir,
                          content_archive_type=None, content_meta=None,
                          content_sub_name=None,
                          force_overwrite=False):

        if not content_meta.namespace:
            raise exceptions.GalaxyError('While installing content from %s, no namespace was found. '
                                         'Try providing one with --namespace' % content_meta.src)

        all_installed_paths = []
        files_to_extract = []
        tar_members = content_tar_file.getmembers()
        parent_dir = tar_members[0].name

        for member in tar_members:
            rel_path = member.name[len(parent_dir) + 1:]
            namespaced_role_rel_path = os.path.join(content_meta.namespace, content_meta.name, rel_path)
            files_to_extract.append({
                'archive_member': member,
                'dest_dir': content_meta.path,
                'dest_filename': namespaced_role_rel_path,
                'force_overwrite': force_overwrite})

        file_extractor = archive.extract_files(content_tar_file, files_to_extract)

        installed_paths = [x for x in file_extractor]
        all_installed_paths.extend(installed_paths)

        namespaced_content_path = '%s/%s' % (content_meta.namespace,
                                             content_meta.name)

        info_path = os.path.join(content_meta.path,
                                 namespaced_content_path,
                                 self.META_INSTALL)

        install_datetime = datetime.datetime.utcnow()

        content_install_info = InstallInfo.from_version_date(version=content_meta.version,
                                                             install_datetime=install_datetime)

        install_info.save(content_install_info, info_path)
        return all_installed_paths
Example #9
0
def resolve(spec_data):
    # build up the 'name.namespace' spec based
    # split the name on '.' and recombine the first 1 or 2
    src = spec_data['src']
    name_parts = src.split('.')

    # enforce the galaxy content_spec specific rule about requiring a dot in namespace.name

    # if we have namespace.name, pop off namespace and use rest for name
    if len(name_parts) < 2:
        raise exceptions.GalaxyError('A galaxy content spec must have at least a namespace, a dot, and a name but "%s" does not.'
                                     % spec_data.get('spec_string'))

    if len(name_parts) > 1:
        spec_data['namespace'] = name_parts[0]

        # use the second part of namespace.name if there wasnt an explicit name=foo
        if not spec_data['name']:
            spec_data['name'] = name_parts[1]
    else:
        spec_data['name'] = name_parts[0]

    return spec_data
Example #10
0
def get_repository_version(repository_data, requirement_spec,
                           repository_versions):
    '''find and compare repository version found in repository_data dict

    repository_data is a dict based on /api/v1/repositories/13 for ex
    content_content_data is the name of the content specified by user?
    version is the version string asked for by user
    content_versions is a list of version strings in order
    '''

    log.debug('%s wants ver: %s type: %s',
              requirement_spec.name, requirement_spec.version_spec,
              type(requirement_spec.version_spec))

    # normalize versions, but also build a map of the normalized version string to the orig string
    available_normalized_versions, norm_to_orig_map = normalize_versions(
        repository_versions)

    # verify that the normalized versions are valid semver now so that we dont worry about it
    # in the sort
    available_versions, dummy = \
        validate_versions(available_normalized_versions)

    # we specified a particular version is required so look for it in available versions
    if not available_versions:
        # FIXME: should we show the actual available versions or the available
        #        versions we searched in?  act: ['v1.0.0', '1.1'] nor: ['1.0.0', '1.1']
        msg = "- The list of available versions for %s is empty (%s)." % \
            (requirement_spec.label or 'content', available_versions)
        raise exceptions.GalaxyError(msg)

    semver_available_versions = [
        semantic_version.Version(ver) for ver in available_versions
    ]

    latest_version = requirement_spec.version_spec.select(
        semver_available_versions)
    if latest_version is None:
        # TODO: how do we msg 'couldn't find the version you specified
        #       in actual version tags or ones we made up without the leading v'
        msg = "- the specified version spec (%s) of %s was not found in the list of available versions (%s)." % \
            (requirement_spec.version_spec, requirement_spec.label or 'content', available_versions)
        raise exceptions.GalaxyError(msg)

    # if we get here, 'version' is in available_normalized_versions
    # return the exact match version since it was available
    latest_version_str = str(latest_version)

    # have to use list of tuples here since we can have multiple identical keys with different values
    norm_to_orig = [x for x in norm_to_orig_map if x[0] == latest_version_str]

    if len(norm_to_orig) > 1:
        unnormal_versions = [ver_alias[1] for ver_alias in norm_to_orig]
        raise exceptions.GalaxyClientError(
            'There are ambiguous and contradicting version numbers (%s) that match version spec "%s"'
            %
            (', '.join(unnormal_versions), str(requirement_spec.version_spec)))

    orig_version_str = norm_to_orig[0][1]

    log.debug('%s requested ver: %s, matched: %s, using real ver: %s ',
              requirement_spec.label, requirement_spec.version_spec,
              latest_version, orig_version_str)

    return orig_version_str
Example #11
0
def extract(repository_spec,
            repository_archive_info,
            content_path,
            extract_archive_to_dir,
            tar_file,
            force_overwrite=False,
            display_callback=None):

    all_installed_paths = []

    # TODO: move to content info validate step in install states?
    if not repository_spec.namespace:
        # TODO: better error
        raise exceptions.GalaxyError(
            'While installing a role , no namespace was found. Try providing one with --namespace'
        )

    # label = "%s.%s" % (repository_namespace, repository_name)

    # 'extract_to_path' is for ex, ~/.ansible/content
    log.debug('About to extract %s "%s" to %s',
              repository_archive_info.archive_type, repository_spec.label,
              content_path)
    # display_callback('- extracting %s repository from "%s"' % (repository_archive_info.archive_type,
    #                                                           repository_spec.label))

    tar_members = tar_file.members

    # This assumes the first entry in the tar archive / tar members
    # is the top dir of the content, ie 'my_content_name-branch' for collection
    # or 'ansible-role-my_content-1.2.3' for a traditional role.
    parent_dir = tar_members[0].name

    # self.log.debug('content_dest_root_subpath: %s', content_dest_root_subpath)

    # self.log.debug('content_dest_root_path1: |%s|', content_dest_root_path)

    # TODO: need to support deleting all content in the dirs we are targetting
    #       first (and/or delete the top dir) so that we clean up any files not
    #       part of the content. At the moment, this will add or update the files
    #       that are in the archive, but it will not delete files on the fs that are
    #       not in the archive
    files_to_extract = []
    for member in tar_members:
        # rel_path ~  roles/some-role/meta/main.yml for ex
        rel_path = member.name[len(parent_dir) + 1:]

        extract_to_filename_path = os.path.join(extract_archive_to_dir,
                                                rel_path)

        # self.log.debug('content_dest_root_path: %s', content_dest_root_path)
        # self.log.debug('content_dest_root_rel_path: %s', content_dest_root_rel_path)

        files_to_extract.append({
            'archive_member': member,
            # Note: for trad roles, we are extract the top level of the archive into
            #       a sub path of the destination
            'dest_dir': extract_archive_to_dir,
            'dest_filename': extract_to_filename_path,
            'force_overwrite': force_overwrite
        })

    file_extractor = archive.extract_files(tar_file, files_to_extract)

    installed_paths = [x for x in file_extractor]

    all_installed_paths.extend(installed_paths)

    log.debug('Extracted %s files from %s %s to %s', len(all_installed_paths),
              repository_archive_info.archive_type, repository_spec.label,
              extract_archive_to_dir)

    # TODO: InstallResults object? installedPaths, InstallInfo, etc?
    return all_installed_paths
Example #12
0
    def find(self):
        '''Find a collection

        This method does 3 things:

            1. Determine if the requested collection exists (GET /api/v2/collections/{namespace}/{name})
            2. Get the available versions (CollectionVersion) of the collection and
               select the "best" one. (GET /api/v2/collections/{namespace}/{name}/versions/)
            3. Get the details of the CollectionVersion including 'download_url' (GET /api/v2/collections/{namespace}/{names}/versions/{version}/,
               available as the 'href' in each CollectionVersion)

        It then returns the info about the Collection and CollectionVersion including download_url to be used by fetch()'''

        api = GalaxyAPI(self.galaxy_context)

        namespace = self.requirement_spec.namespace
        collection_name = self.requirement_spec.name

        log.debug('Querying %s for namespace=%s, name=%s',
                  self.galaxy_context.server['url'], namespace,
                  collection_name)

        # TODO: extract parsing of cli content sorta-url thing and add better tests

        # FIXME: Remove? We kind of need the actual Collection detail yet (ever?)
        collection_detail_url = '{base_api_url}/v2/collections/{namespace}/{name}/'.format(
            base_api_url=api.base_api_url,
            namespace=urlquote(namespace),
            name=urlquote(collection_name))

        log.debug('collection_detail_url: %s', collection_detail_url)

        collection_detail_data = api.get_object(href=collection_detail_url)

        if not collection_detail_data:
            raise exceptions.GalaxyClientError(
                "- sorry, %s was not found on %s." %
                (self.requirement_spec.label, api.api_server))

        versions_list_url = collection_detail_data.get('versions_url', None)

        collection_is_deprecated = collection_detail_data.get(
            'deprecated', False)

        # TODO: if versions ends up with a 'related' we could follow it instead of specific
        #       get_collection_version_list()
        # example results
        # [{
        #   "version": "2.4.5",
        #   "href": "/api/v2/collections/ansible/k8s/versions/2.4.5/",
        #  },
        #  {
        #   "version": "1.2.3",
        #   "href": "/api/v2/collections/ansible/k8s/versions/1.2.3/",
        #  }]
        log.debug('Getting collectionversions for %s.%s from %s', namespace,
                  collection_name, versions_list_url)

        collection_version_list_data = api.get_object(versions_list_url)

        log.debug('collectionvertlist data:\n%s', collection_version_list_data)

        if not collection_version_list_data:
            raise exceptions.GalaxyClientError(
                "- sorry, %s was not found on %s." %
                (self.requirement_spec.label, api.api_server))

        collection_version_strings = [
            a.get('version') for a in collection_version_list_data
            if a.get('version', None)
        ]

        # a Version() component of the CollectionVersion
        collection_versions_versions = [
            semantic_version.Version(ver) for ver in collection_version_strings
        ]

        # No match returns None
        best_version = self.requirement_spec.version_spec.select(
            collection_versions_versions)

        # Find the rest of the info for the collectionversion that is the best version
        # linear search
        best_collectionversion = next(
            (
                cv for cv in collection_version_list_data
                # build a Version of both to for full semver matching
                if semantic_version.Version(cv['version']) == best_version),
            {})

        log.debug('best_collectionversion: %s', best_collectionversion)

        # We did not find a collection that meets this spec
        if not best_collectionversion:
            log.debug(
                'Unable to find a collection that matches the spec: %s from available versions: %s',
                self.requirement_spec,
                [ver['version'] for ver in collection_version_list_data])
            raise exceptions.GalaxyCouldNotFindAnswerForRequirement(
                'Unable to find a collection that matches the spec: %s' %
                self.requirement_spec.label,
                requirement_spec=self.requirement_spec)

        best_collectionversion_detail_data = api.get_object(
            href=best_collectionversion.get('href', None))

        download_url = best_collectionversion_detail_data.get(
            'download_url', None)

        log.debug('download_url for %s.%s: %s', namespace, collection_name,
                  download_url)

        if not download_url:
            raise exceptions.GalaxyError(
                'no external_url info on the Repository object from %s' %
                self.requirement_spec.label)

        # collectionversion_metadata = best_collectionversion_detail_data.get('metadata', None)
        # log.debug('collectionversion_metadata: %s', collectionversion_metadata)

        # TODO: raise exceptions if API requests are empty

        results = {
            'content': {
                'galaxy_namespace': namespace,
                'repo_name': collection_name,
                'version': best_version
            },
            'custom': {
                'download_url': download_url,
                'collection_is_deprecated': collection_is_deprecated
            },
        }

        return results
Example #13
0
    def find(self):
        api = GalaxyAPI(self.galaxy_context)

        namespace = self.requirement_spec.namespace
        repo_name = self.requirement_spec.name

        log.debug('Querying %s for namespace=%s, name=%s',
                  self.galaxy_context.server['url'], namespace, repo_name)

        # TODO: extract parsing of cli content sorta-url thing and add better tests

        # FIXME: exception handling
        repo_data = api.lookup_repo_by_name(namespace, repo_name)

        if not repo_data:
            raise exceptions.GalaxyClientError(
                "- sorry, %s was not found on %s." %
                (self.requirement_spec.label, api.api_server))

        # FIXME - Need to update our API calls once Galaxy has them implemented
        related = repo_data.get('related', {})

        repo_versions_url = related.get('versions', None)

        # FIXME: exception handling
        repoversions = api.fetch_content_related(repo_versions_url)

        content_repo_versions = [
            a.get('version') for a in repoversions if a.get('version', None)
        ]

        repo_version_best = repository_version.get_repository_version(
            repo_data,
            requirement_spec=self.requirement_spec,
            repository_versions=content_repo_versions)

        # get the RepositoryVersion obj (or its data anyway)
        _repoversion = select_repository_version(repoversions,
                                                 repo_version_best)

        # Note: download_url can point anywhere...
        external_url = repo_data.get('external_url', None)

        if not external_url:
            raise exceptions.GalaxyError(
                'no external_url info on the Repository object from %s' %
                self.requirement_spec.label)

        results = {
            'content': {
                'galaxy_namespace': namespace,
                'repo_name': repo_name,
                'version': _repoversion.get('version')
            },
            'requirement_spec_version_spec':
            self.requirement_spec.version_spec,
            'custom': {
                'external_url': external_url,
                'repo_data': repo_data,
                'repoversion': _repoversion,
            },
        }

        return results
Example #14
0
    def find(self):
        api = GalaxyAPI(self.galaxy_context)

        # FIXME - Need to update our API calls once Galaxy has them implemented
        content_username, repo_name, content_name = parse_content_name(self.content_spec)

        log.debug('Querying %s for namespace=%s, name=%s', self.galaxy_context.server['url'], content_username, repo_name)

        # TODO: extract parsing of cli content sorta-url thing and add better tests
        repo_name = repo_name or content_name

        # FIXME: exception handling
        repo_data = api.lookup_repo_by_name(content_username, repo_name)

        if not repo_data:
            raise exceptions.GalaxyClientError("- sorry, %s was not found on %s." % (self.content_spec,
                                                                                     api.api_server))

        # FIXME: ?
        # if repo_data.get('role_type') == 'APP#':
            # Container Role
        #    self.display_callback("%s is a Container App role, and should only be installed using Ansible "
        #                          "Container" % content_name, level='warning')

        # FIXME - Need to update our API calls once Galaxy has them implemented
        related = repo_data.get('related', {})

        repo_versions_url = related.get('versions', None)

        log.debug('related=%s', related)

        # FIXME: exception handling
        repoversions = api.fetch_content_related(repo_versions_url)

        # related_repo_url = related.get('repository', None)
        # log.debug('related_repo_url: %s', related_repo_url)
        # related_content_url = related.get('content', None)
        # log.debug('related_content_url: %s', related_content_url)

        # content_repo = None
        # if related_content_url:
        #     content_repo = api.fetch_content_related(related_content_url)
        content_repo_versions = [a.get('version') for a in repoversions if a.get('version', None)]

        # log.debug('content_repo: %s', content_repo)
        # FIXME: mv to it's own method
        # FIXME: pass these to fetch() if it really needs it
        repo_version_best = content_version.get_content_version(repo_data,
                                                                version=self.content_version,
                                                                content_versions=content_repo_versions,
                                                                content_content_name=content_name)

        # get the RepositoryVersion obj (or its data anyway)
        _repoversion = select_repository_version(repoversions, repo_version_best)
        # FIXME: stop munging state
        # self.content_meta.version = _content_version

        external_url = repo_data.get('external_url', None)
        if not external_url:
            raise exceptions.GalaxyError('no external_url info on the Repository object from %s' % repo_name)

        results = {'content': {'galaxy_namespace': content_username,
                               'repo_name': repo_name,
                               'content_name': content_name},
                   'specified_content_version': self.content_version,
                   'specified_content_spec': self.content_spec,
                   'custom': {'content_repo_versions': content_repo_versions,
                              'external_url': external_url,
                              'galaxy_context': self.galaxy_context,
                              'related': related,
                              'repo_data': repo_data,
                              'repo_versions_url': repo_versions_url,
                              'repoversion': _repoversion},
                   }

        return results
Example #15
0
    def install(self, content_meta=None, force_overwrite=False):
        installed = []
        archive_role_metadata = None

        meta_file = None
        archive_parent_dir = None

        # FIXME: enum/constant/etc demagic
        content_archive_type = 'multi'

        content_meta = content_meta or self.content_meta

        # TODO: some useful exceptions for 'cant find', 'cant read', 'cant write'
        fetch_method = choose_content_fetch_method(scm_url=self.scm,
                                                   src=self.src)

        fetcher = None
        if fetch_method == FetchMethods.SCM_URL:
            fetcher = ScmUrlFetch(scm_url=self.scm, scm_spec=self.spec)
        elif fetch_method == FetchMethods.LOCAL_FILE:
            # the file is a tar, so open it that way and extract it
            # to the specified (or default) content directory
            fetcher = LocalFileFetch(self.src)
        elif fetch_method == FetchMethods.REMOTE_URL:
            fetcher = RemoteUrlFetch(remote_url=self.src,
                                     validate_certs=self._validate_certs)
        elif fetch_method == FetchMethods.GALAXY_URL:
            fetcher = GalaxyUrlFetch(content_spec=self.src,
                                     content_version=self.version,
                                     galaxy_context=self.galaxy,
                                     validate_certs=self._validate_certs)
        else:
            raise exceptions.GalaxyError(
                'No approriate content fetcher found for %s %s', self.scm,
                self.src)

        log.debug('fetch_method: %s', fetch_method)

        if fetcher:
            content_archive = fetcher.fetch()

            log.debug('content_archive=%s', content_archive)

        if not content_archive:
            raise exceptions.GalaxyClientError(
                'No valid content data found for %s', self.src)

        log.debug("installing from %s", content_archive)

        if not tarfile.is_tarfile(content_archive):
            raise exceptions.GalaxyClientError(
                "the file downloaded was not a tar.gz")

        if content_archive.endswith('.gz'):
            content_tar_file = tarfile.open(content_archive, "r:gz")
        else:
            content_tar_file = tarfile.open(content_archive, "r")

        members = content_tar_file.getmembers()

        # next find the metadata file
        (meta_file, meta_parent_dir, galaxy_file, apb_yaml_file) = \
            archive.find_archive_metadata(members)

        # log.debug('self.content_type: %s', self.content_type)

        # content types like 'module' shouldn't care about meta_file elsewhere
        if self.content_type in self.NO_META:
            meta_file = None

        if not archive_parent_dir:
            archive_parent_dir = archive.find_archive_parent_dir(
                members, content_meta)

        log.debug('meta_file: %s', meta_file)
        log.debug('galaxy_file: %s', galaxy_file)
        log.debug('content_type: %s', content_meta.content_type)
        log.debug("archive_parent_dir: %s", archive_parent_dir)
        log.debug("meta_parent_dir: %s", meta_parent_dir)

        # if not meta_file and not galaxy_file and self.content_type == "role":
        #    raise exceptions.GalaxyClientError("this role does not appear to have a meta/main.yml file or ansible-galaxy.yml.")

        # Look for top level role metadata
        archive_role_metadata = \
            archive.load_archive_role_metadata(content_tar_file,
                                               os.path.join(archive_parent_dir, archive.META_MAIN))

        self._metadata = archive.load_archive_role_metadata(
            content_tar_file, meta_file)

        galaxy_metadata = archive.load_archive_galaxyfile(
            content_tar_file, galaxy_file)

        apb_data = archive.load_archive_apb_yaml(content_tar_file,
                                                 apb_yaml_file)

        log.debug('apb_data: %s', pprint.pformat(apb_data))

        # looks like we are a role, update the default content_type from all -> role
        if archive_role_metadata:
            log.debug(
                'Find role metadata in the archive, so installing it as role content_type'
            )
            log.debug('copying self.content_meta: %s', self.content_meta)

            data = self.content_meta.data

            content_meta = content.RoleContentArchiveMeta.from_data(data)

            log.debug('role content_meta: %s', content_meta)

            # we are dealing with an role archive
            content_archive_type = 'role'

        # TODO: truthiness of galaxy_metadata may be better, since that means it was parsed and non empty
        if galaxy_file:
            content_archive_type = 'galaxy'

        if apb_data:
            log.debug(
                'Find APB metadata in the archive, so installing it as APB content_type'
            )

            data = self.content_meta.data
            data['apb_data'] = apb_data

            content_meta = content.APBContentArchiveMeta.from_data(data)

            log.debug('APB content_meta: %s', content_meta)
            content_archive_type = 'apb'

        log.debug('content_archive_type=%s', content_archive_type)

        # we strip off any higher-level directories for all of the files contained within
        # the tar file here. The default is 'github_repo-target'. Gerrit instances, on the other
        # hand, does not have a parent directory at all.

        if not os.path.isdir(content_meta.path):
            log.debug('No content path (%s) found so creating it',
                      content_meta.path)

            os.makedirs(content_meta.path)

        # TODO: need an install state machine real bad

        if self.content_type != "all":
            self.display_callback(
                "- extracting %s %s to %s" %
                (self.content_type, content_meta.name, self.path))
        else:
            self.display_callback(
                "- extracting all content in %s to content directories" %
                content_meta.name)

        log.info('Installing content of type: %s', content_meta.content_type)

        content_types_to_install = [self.content_install_type]
        if self.content_install_type == 'all':
            content_types_to_install = CONTENT_TYPES

        # now branch based on archive type
        if content_archive_type == 'galaxy':
            log.info(
                'Installing %s as a content_archive_type=%s content_type=%s (galaxy_file)',
                content_meta.name, content_archive_type,
                content_meta.content_type)
            log.debug('galaxy_file=%s', galaxy_file)
            log.debug('galaxy_metadata=%s', pprint.pformat(galaxy_metadata))

            # Parse the ansible-galaxy.yml file and install things
            # as necessary
            installed_from_galaxy_metadata =  \
                install_from_galaxy_metadata(content_tar_file,
                                             archive_parent_dir,
                                             galaxy_metadata,
                                             content_meta,
                                             display_callback=self.display_callback,
                                             force_overwrite=force_overwrite)

            installed.extend(installed_from_galaxy_metadata)

        elif content_archive_type == 'role':
            log.info(
                'Installing %s as a role content archive and content_type=%s (role)',
                content_meta.name, content_meta.content_type)

            log.debug('archive_parent_dir: %s', archive_parent_dir)
            installed_from_role = self._install_role_archive(
                content_tar_file,
                archive_parent_dir,
                content_meta=content_meta,
                force_overwrite=force_overwrite)
            installed.extend(installed_from_role)

        elif content_archive_type == 'apb':
            log.info(
                'Installing %s as a Ansible Playbook Bundle content archive and content_type=%s (apb)',
                content_meta.name, content_meta.content_type)

            apb_name = content_meta.apb_data.get('name', content_meta.name)
            log.info('about to extract %s to %s', apb_name, content_meta.path)

            if self.content_install_type in ('all', 'apb'):
                installed_from_apb = \
                    self._install_apb_archive(content_tar_file,
                                              archive_parent_dir,
                                              content_meta=content_meta,
                                              force_overwrite=force_overwrite)

                installed.extend(installed_from_apb)

            else:
                # we are installing bits out of the apb, treat it like a multi-content
                content_meta.content_dir = None
                content_meta.content_sub_dir = None
                content_meta.content_type = 'all'

                installed_paths = \
                    self._install_for_content_types(content_tar_file,
                                                    archive_parent_dir,
                                                    content_archive_type='apb',
                                                    content_meta=content_meta,
                                                    # install_content_type='apb',
                                                    content_types_to_install=content_types_to_install,
                                                    force_overwrite=force_overwrite)

                installed_from_apb = [(content_meta, installed_paths)]

            installed.extend(installed_from_apb)
        # a multi content archive
        else:
            # if content_meta.content_type == 'all':

            log.info(
                'Installing %s as a archive_type=%s content_type=%s install_type=%s ',
                content_meta.name, content_archive_type,
                content_meta.content_type, self.content_install_type)

            log.info('about to extract content_type=%s %s to %s',
                     content_meta.content_type, content_meta.name,
                     content_meta.path)

            log.debug('content_meta: %s', content_meta)
            res = self._install_for_content_types(
                content_tar_file,
                archive_parent_dir,
                content_archive_type,
                content_meta,
                content_sub_name=self.sub_name,
                content_types_to_install=content_types_to_install,
                force_overwrite=force_overwrite)

            log.debug('res:\n%s', pprint.pformat(res))

            installed.append((content_meta, res))

        # return the parsed yaml metadata
        self.display_callback("- %s was installed successfully to %s" %
                              (str(self), self.path))

        # rm any temp files created when getting the content archive
        fetcher.cleanup()

        # self.display_callback('Installed content: %s',

        for item in installed:
            log.info('Installed content: %s', item[0])
            log.debug('Installed files: %s', pprint.pformat(item[1]))
        return installed
Example #16
0
def get_content_version(content_data, version, content_versions,
                        content_content_name):
    '''find and compare content version found in content_data dict

    content_data is a dict based on /api/v1/content/13 for ex
    content_content_data is the name of the content specified by user?
    version is the version string asked for by user
    content_versions is a list of version strings in order
    '''

    log.debug('%s want ver: %s', content_content_name, version)
    log.debug('%s vers avail: %s', content_content_name,
              json.dumps(content_versions, indent=2))

    # a list of tuples of (normalized_version, original_version) for building map of normalized version to original version
    normalized_versions = [(normalize_version_string(x), x)
                           for x in content_versions]

    available_normalized_versions = [v[0] for v in normalized_versions]

    # map the 'normalized' version back to the original version string, we need it for content archive download urls
    norm_to_orig_map = dict(normalized_versions)

    normalized_version = normalize_version_string(version)

    log.debug('normalized_version: %s', normalized_version)
    log.debug('avail_normalized_versions: %s',
              json.dumps(available_normalized_versions, indent=4))

    # we specified a particular version is required so look for it in available versions
    if version and version != 'master':
        if not available_normalized_versions:
            # FIXME: should we show the actual available versions or the available
            #        versions we searched in?  act: ['v1.0.0', '1.1'] nor: ['1.0.0', '1.1']
            msg = "- The list of available versions for %s is empty (%s)." % \
                (content_content_name or 'content', available_normalized_versions)
            raise exceptions.GalaxyError(msg)

        if str(normalized_version) not in available_normalized_versions:
            # TODO: how do we msg 'couldn't find the version you specified
            #       in actual version tags or ones we made up without the leading v'
            msg = "- the specified version (%s) of %s was not found in the list of available versions (%s)." % \
                (version, content_content_name or 'content', available_normalized_versions)
            raise exceptions.GalaxyError(msg)

        # if we get here, 'version' is in available_normalized_versions
        # return the exact match version since it was available
        orig_version = norm_to_orig_map[normalized_version]
        log.debug('%s requested ver: %s, matched: %s, using real ver: %s ',
                  content_content_name, version, normalized_version,
                  orig_version)
        return orig_version

    # and sort them to get the latest version. If there
    # are no versions in the list, we'll grab the head
    # of the master branch
    if len(available_normalized_versions) > 0:
        loose_versions = [
            LooseVersion(a) for a in available_normalized_versions
        ]
        try:
            loose_versions.sort()
        except TypeError as e:
            log.exception(e)
            log.error('ver: %s loose_versions: %s', version, loose_versions)
            raise exceptions.GalaxyClientError(
                'Unable to compare content versions (%s) to determine the most recent version due to incompatible version formats. '
                'Please contact the content author to resolve versioning conflicts, or specify an explicit content version to '
                'install.' % ', '.join([v.vstring for v in loose_versions]))
        content_version = str(loose_versions[-1])
    # FIXME: follow 'repository' branch and it's ['import_branch'] ?
    elif content_data.get('github_branch', None):
        content_version = content_data['github_branch']
    else:
        content_version = 'master'

    log.debug('%s using latest ver: %s', content_content_name, content_version)
    return content_version
Example #17
0
    def fetch(self):
        api = GalaxyAPI(self.galaxy_context)

        # FIXME - Need to update our API calls once Galaxy has them implemented
        content_username, repo_name, content_name = parse_content_name(
            self.content_spec)

        log.debug('content_username=%s', content_username)
        log.debug('repo_name=%s', repo_name)
        log.debug('content_name=%s', content_name)

        # TODO: extract parsing of cli content sorta-url thing and add better tests
        repo_name = repo_name or content_name

        # FIXME: exception handling
        repo_data = api.lookup_repo_by_name(content_username, repo_name)

        if not repo_data:
            raise exceptions.GalaxyClientError(
                "- sorry, %s was not found on %s." %
                (self.content_spec, api.api_server))

        # FIXME: ?
        # if repo_data.get('role_type') == 'APP#':
        # Container Role
        #    self.display_callback("%s is a Container App role, and should only be installed using Ansible "
        #                          "Container" % content_name, level='warning')

        # FIXME - Need to update our API calls once Galaxy has them implemented
        related = repo_data.get('related', {})

        repo_versions_url = related.get('versions', None)

        log.debug('related=%s', related)

        # FIXME: exception handling
        repo_versions = api.fetch_content_related(repo_versions_url)

        log.debug('repo_versions: %s', repo_versions)

        # related_repo_url = related.get('repository', None)
        # log.debug('related_repo_url: %s', related_repo_url)
        # related_content_url = related.get('content', None)
        # log.debug('related_content_url: %s', related_content_url)

        # content_repo = None
        # if related_content_url:
        #     content_repo = api.fetch_content_related(related_content_url)
        content_repo_versions = [
            a.get('name') for a in repo_versions if a.get('name', None)
        ]
        log.debug('content_repo_versions: %s', content_repo_versions)

        # log.debug('content_repo: %s', content_repo)
        # FIXME: mv to it's own method
        # FIXME: pass these to fetch() if it really needs it
        _content_version = content_version.get_content_version(
            repo_data,
            version=self.content_version,
            content_versions=content_repo_versions,
            content_content_name=content_name)

        # FIXME: stop munging state
        # self.content_meta.version = _content_version

        external_url = repo_data.get('external_url', None)
        if not external_url:
            raise exceptions.GalaxyError(
                'no external_url info on the Repository object from %s',
                repo_name)

        download_url = _build_download_url(external_url=external_url,
                                           version=_content_version)

        log.debug('content_spec=%s', self.content_spec)
        log.debug('download_url=%s', download_url)

        try:
            content_archive_path = download.fetch_url(
                download_url, validate_certs=self.validate_certs)
        except exceptions.GalaxyDownloadError as e:
            log.exception(e)
            self.display_callback("failed to download the file: %s" % str(e))
            return None

        self.local_path = content_archive_path

        log.debug('content_archive_path=%s', content_archive_path)

        return content_archive_path
Example #18
0
def test_galaxy_error():
    exc = exceptions.GalaxyError()
    log.debug('exc: %s', exc)
Example #19
0
    def find(self):
        api = GalaxyAPI(self.galaxy_context)

        namespace = self.repository_spec.namespace
        repo_name = self.repository_spec.name

        log.debug('Querying %s for namespace=%s, name=%s', self.galaxy_context.server['url'], namespace, repo_name)

        # TODO: extract parsing of cli content sorta-url thing and add better tests

        # FIXME: exception handling
        repo_data = api.lookup_repo_by_name(namespace, repo_name)

        if not repo_data:
            raise exceptions.GalaxyClientError("- sorry, %s was not found on %s." % (self.repository_spec.label,
                                                                                     api.api_server))

        # FIXME - Need to update our API calls once Galaxy has them implemented
        related = repo_data.get('related', {})

        repo_versions_url = related.get('versions', None)

        # FIXME: exception handling
        repoversions = api.fetch_content_related(repo_versions_url)

        content_repo_versions = [a.get('version') for a in repoversions if a.get('version', None)]

        # FIXME: mv to it's own method
        # FIXME: pass these to fetch() if it really needs it
        repo_version_best = repository_version.get_repository_version(repo_data,
                                                                      version=self.repository_spec.version,
                                                                      repository_versions=content_repo_versions,
                                                                      content_content_name=self.repository_spec.name)

        # get the RepositoryVersion obj (or its data anyway)
        _repoversion = select_repository_version(repoversions, repo_version_best)

        # external_url isnt specific, it could be something like github.com/alikins/some_collection
        # external_url is the third option after a 'download_url' provided by the galaxy rest API
        # (repo version specific download_url first if applicable, then the general download_url)
        # Note: download_url can point anywhere...
        external_url = repo_data.get('external_url', None)

        if not external_url:
            raise exceptions.GalaxyError('no external_url info on the Repository object from %s' % self.repository_spec.label)

        # The repo spec of the install candidate with potentially a different version
        potential_repository_spec = RepositorySpec(namespace=namespace,
                                                   name=repo_name,
                                                   version=_repoversion['version'],
                                                   fetch_method=self.repository_spec.fetch_method,
                                                   scm=self.repository_spec.scm,
                                                   spec_string=self.repository_spec.spec_string,
                                                   src=self.repository_spec.src)

        results = {'content': {'galaxy_namespace': namespace,
                               'repo_name': repo_name},
                   'specified_content_version': self.repository_spec.version,
                   'specified_repository_spec': self.repository_spec,
                   'custom': {'content_repo_versions': content_repo_versions,
                              'external_url': external_url,
                              'galaxy_context': self.galaxy_context,
                              'related': related,
                              'repo_data': repo_data,
                              'repo_versions_url': repo_versions_url,
                              'repoversion': _repoversion,
                              'potential_repository_spec': potential_repository_spec},
                   }

        return results
Example #20
0
 def remove(self):
     raise exceptions.GalaxyError('Calling remove() on a GalaxyContent (not InstalledGalaxyContent) doesnt mean anything')