Example #1
0
    def fetch(self, find_results=None):
        find_results = find_results or {}

        results = {}

        download_url = find_results['custom']['download_url']

        # download_url = _build_download_url(external_url=external_url, version=_content_version)
        # TODO: error handling if there is no download_url

        expected_filename = find_results.get('artifact',
                                             {}).get('filename', None)

        log.debug('repository_spec=%s', self.requirement_spec)
        log.debug('download_url=%s', download_url)
        log.debug('expected_filename=%s', expected_filename)

        # for including in any error messages or logging for this fetch
        self.remote_resource = download_url

        # can raise GalaxyDownloadError
        repository_archive_path = download.fetch_url(
            download_url,
            validate_certs=self.validate_certs,
            filename=expected_filename)

        self.local_path = repository_archive_path

        log.debug('repository_archive_path=%s', repository_archive_path)

        # validate the sha256sum of the downloaded artifact against the expected value
        expected_chksum = find_results['artifact'].get('sha256')
        collection_artifact.validate_artifact(self.local_path, expected_chksum)

        # TODO: This is indication that a fetcher is wrong abstraction. A fetch
        #       can resolve a name/spec, find metadata about the content including avail versions,
        #       compare/sort versions, select matching versions, find a download uri, and finally
        #       actually fetch it.
        #       Ie, more of a RepositoryRepository (aiee) (RepositorySource? RepositoryChannel? RepositoryProvider?)
        #       that is a remote 'channel' with info and content itself.
        results = {
            'archive_path': repository_archive_path,
            'fetch_method': self.fetch_method
        }

        # So fetch_results has the download url, if we follow redirects
        # we could also add a 'final_download_url' or 'downloaded_url' so
        # we know the original and the final url after redirects
        results['custom'] = find_results['custom']
        results['content'] = find_results['content']
        results['artifact'] = find_results['artifact']

        return results
Example #2
0
    def fetch(self):
        '''Download the remote_url to a temp file

        Can raise GalaxyDownloadError on any exception while downloadin remote_url and saving it.'''

        # NOTE: could move download.fetch_url here instead of splitting it
        content_archive_path = download.fetch_url(
            self.remote_url, validate_certs=self.validate_certs)
        self.local_path = content_archive_path

        log.debug('content_archive_path=%s', content_archive_path)

        return content_archive_path
Example #3
0
    def fetch(self, find_results=None):
        find_results = find_results or {}

        results = {}

        download_url = get_download_url(
            repo_data=find_results['custom']['repo_data'],
            external_url=find_results['custom']['external_url'],
            repoversion=find_results['custom']['repoversion'])

        # download_url = _build_download_url(external_url=external_url, version=_content_version)
        # TODO: error handling if there is no download_url

        log.debug('repository_spec=%s', self.requirement_spec)
        log.debug('download_url=%s', download_url)

        # for including in any error messages or logging for this fetch
        self.remote_resource = download_url

        # can raise GalaxyDownloadError
        repository_archive_path = download.fetch_url(
            download_url, validate_certs=self.validate_certs)

        self.local_path = repository_archive_path

        log.debug('repository_archive_path=%s', repository_archive_path)

        # TODO: This is indication that a fetcher is wrong abstraction. A fetch
        #       can resolve a name/spec, find metadata about the content including avail versions,
        #       compare/sort versions, select matching versions, find a download uri, and finally
        #       actually fetch it.
        #       Ie, more of a RepositoryRepository (aiee) (RepositorySource? RepositoryChannel? RepositoryProvider?)
        #       that is a remote 'channel' with info and content itself.
        results = {
            'archive_path': repository_archive_path,
            'download_url': download_url,
            'fetch_method': self.fetch_method
        }

        results['custom'] = {}
        results['content'] = find_results['content']
        results['content']['fetched_version'] = find_results['custom'][
            'repoversion'].get('version')

        return results
Example #4
0
    def fetch(self, find_results=None):
        '''Download the remote_url to a temp file

        Can raise GalaxyDownloadError on any exception while downloadin remote_url and saving it.'''

        find_results = find_results or {}

        # NOTE: could move download.fetch_url here instead of splitting it
        repository_archive_path = download.fetch_url(self.remote_url, validate_certs=self.validate_certs)
        self.local_path = repository_archive_path

        log.debug('repository_archive_path=%s', repository_archive_path)

        results = {'archive_path': repository_archive_path,
                   'fetch_method': self.fetch_method}
        results['content'] = find_results['content']
        results['custom'] = {'remote_url': self.remote_url,
                             'validate_certs': self.validate_certs}
        return results
Example #5
0
    def fetch(self):
        api = GalaxyAPI(self.galaxy_context)

        # FIXME - Need to update our API calls once Galaxy has them implemented
        content_username, repo_name, content_name = parse_content_name(
            self.content_spec)

        log.debug('content_username=%s', content_username)
        log.debug('repo_name=%s', repo_name)
        log.debug('content_name=%s', content_name)

        # TODO: extract parsing of cli content sorta-url thing and add better tests
        repo_name = repo_name or content_name

        # FIXME: exception handling
        repo_data = api.lookup_repo_by_name(content_username, repo_name)

        if not repo_data:
            raise exceptions.GalaxyClientError(
                "- sorry, %s was not found on %s." %
                (self.content_spec, api.api_server))

        # FIXME: ?
        # if repo_data.get('role_type') == 'APP#':
        # Container Role
        #    self.display_callback("%s is a Container App role, and should only be installed using Ansible "
        #                          "Container" % content_name, level='warning')

        # FIXME - Need to update our API calls once Galaxy has them implemented
        related = repo_data.get('related', {})

        repo_versions_url = related.get('versions', None)

        log.debug('related=%s', related)

        # FIXME: exception handling
        repo_versions = api.fetch_content_related(repo_versions_url)

        log.debug('repo_versions: %s', repo_versions)

        # related_repo_url = related.get('repository', None)
        # log.debug('related_repo_url: %s', related_repo_url)
        # related_content_url = related.get('content', None)
        # log.debug('related_content_url: %s', related_content_url)

        # content_repo = None
        # if related_content_url:
        #     content_repo = api.fetch_content_related(related_content_url)
        content_repo_versions = [
            a.get('name') for a in repo_versions if a.get('name', None)
        ]
        log.debug('content_repo_versions: %s', content_repo_versions)

        # log.debug('content_repo: %s', content_repo)
        # FIXME: mv to it's own method
        # FIXME: pass these to fetch() if it really needs it
        _content_version = content_version.get_content_version(
            repo_data,
            version=self.content_version,
            content_versions=content_repo_versions,
            content_content_name=content_name)

        # FIXME: stop munging state
        # self.content_meta.version = _content_version

        external_url = repo_data.get('external_url', None)
        if not external_url:
            raise exceptions.GalaxyError(
                'no external_url info on the Repository object from %s',
                repo_name)

        download_url = _build_download_url(external_url=external_url,
                                           version=_content_version)

        log.debug('content_spec=%s', self.content_spec)
        log.debug('download_url=%s', download_url)

        try:
            content_archive_path = download.fetch_url(
                download_url, validate_certs=self.validate_certs)
        except exceptions.GalaxyDownloadError as e:
            log.exception(e)
            self.display_callback("failed to download the file: %s" % str(e))
            return None

        self.local_path = content_archive_path

        log.debug('content_archive_path=%s', content_archive_path)

        return content_archive_path
Example #6
0
def install_repository_specs_loop(galaxy_context,
                                  repository_spec_strings=None,
                                  requirements_list=None,
                                  collections_lockfile_path=None,
                                  editable=False,
                                  namespace_override=None,
                                  display_callback=None,
                                  # TODO: error handling callback ?
                                  ignore_errors=False,
                                  no_deps=False,
                                  force_overwrite=False):

    requirements_list = requirements_list or []

    for repository_spec_string in repository_spec_strings:
        fetch_method = \
            repository_spec_parse.choose_repository_fetch_method(repository_spec_string,
                                                                 editable=editable)
        log.debug('fetch_method: %s', fetch_method)

        if fetch_method == FetchMethods.LOCAL_FILE:
            # Since we only know this is a local file we vaguely recognize, we have to
            # open it up to get any more details. We _could_ attempt to parse the file
            # name, but that rarely ends well. Filename could also be arbitrary for downloads
            # from remote urls ('mazer install http://myci.example.com/somebuildjob/latest' etc)
            spec_data = collection_artifact.load_data_from_collection_artifact(repository_spec_string)
            spec_data['fetch_method'] = fetch_method
        elif fetch_method == FetchMethods.REMOTE_URL:
            # download the url
            # hope it is a collection artifact and use load_data_from_collection_artifact() for the
            # rest of the repo_spec data
            log.debug('repository_spec_string: %s', repository_spec_string)

            tmp_downloaded_path = download.fetch_url(repository_spec_string,
                                                     # Note: ignore_certs is meant for galaxy server,
                                                     # overloaded to apply for arbitrary http[s] downloads here
                                                     validate_certs=not galaxy_context.server['ignore_certs'])
            spec_data = collection_artifact.load_data_from_collection_artifact(tmp_downloaded_path)

            # pretend like this is a local_file install now
            spec_data['fetch_method'] = FetchMethods.LOCAL_FILE
        else:
            spec_data = repository_spec_parse.spec_data_from_string(repository_spec_string,
                                                                    namespace_override=namespace_override,
                                                                    editable=editable)

            spec_data['fetch_method'] = fetch_method

        log.debug('spec_data: %s', spec_data)

        req_spec = RequirementSpec.from_dict(spec_data)

        req = Requirement(repository_spec=None, op=RequirementOps.EQ, requirement_spec=req_spec)

        requirements_list.append(req)

    log.debug('collections_lockfile_path: %s', collections_lockfile_path)

    if collections_lockfile_path:
        # load collections lockfile as if the 'dependencies' dict from a collection_info
        collections_lockfile = load_collections_lockfile(collections_lockfile_path)

        dependencies_list = requirements.from_dependencies_dict(collections_lockfile.dependencies)

        # Create the CollectionsLock for the validators
        collections_lock = CollectionsLock(dependencies=dependencies_list)

        requirements_list.extend(collections_lock.dependencies)

    log.debug('requirements_list: %s', requirements_list)

    while True:
        if not requirements_list:
            break

        display_callback('', level='info')
        display_callback('Collection specs to install:', level='info')

        for req in requirements_list:
            if req.repository_spec:
                msg = '  %s (required by %s)' % (req.requirement_spec.label, req.repository_spec)
            else:
                msg = '  %s' % req.requirement_spec.label
            display_callback(msg, level='info')

        just_installed_repositories = \
            install_repositories_matching_repository_specs(galaxy_context,
                                                           requirements_list,
                                                           editable=editable,
                                                           namespace_override=namespace_override,
                                                           display_callback=display_callback,
                                                           ignore_errors=ignore_errors,
                                                           no_deps=no_deps,
                                                           force_overwrite=force_overwrite)

        for just_installed_repo in just_installed_repositories:
            display_callback('  Installed: %s (to %s)' %
                             (just_installed_repo.repository_spec,
                              just_installed_repo.path),
                             level='info')

        # set the repository_specs to search for to whatever the install reported as being needed yet
        # requirements_list = new_requirements_list
        requirements_list = find_new_deps_from_installed(galaxy_context,
                                                         just_installed_repositories,
                                                         no_deps=no_deps)

    # FIXME: what results to return?
    return 0