Ejemplo n.º 1
0
def test_install(galaxy_context, mocker):
    repo_spec = RepositorySpec(namespace='some_namespace',
                               name='some_name',
                               version='4.3.2')

    mock_fetcher = mocker.MagicMock(name='MockFetch')
    fetch_results = {'archive_path': '/dev/null/doesntexist'}

    # Mock args for creating a Mock to replace repository_archive.load_archive
    # TODO: the 'config' constructor can be replaced with straight mocker.patch?
    config = {'return_value': mocker.MagicMock(name='MockRepoArchive')}

    mocker.patch.object(install.repository_archive, 'load_archive', **config)

    res = install.install(galaxy_context,
                          fetcher=mock_fetcher,
                          fetch_results=fetch_results,
                          repository_spec=repo_spec,
                          display_callback=display_callback)
    log.debug('res: %s', res)

    assert isinstance(res, list)
    assert len(res) > 0
    assert isinstance(res[0], Repository)
    assert res[0].repository_spec == repo_spec
    assert galaxy_context.content_path in res[0].path
Ejemplo n.º 2
0
def test_install(galaxy_context, tmpdir):
    built_res = build_repo_artifact(galaxy_context, tmpdir)
    archive_path = built_res['build_results'].artifact_file_path

    repo_archive = repository_archive.load_archive(archive_path)

    log.debug('repo_archive: %s', repo_archive)

    repo_spec = RepositorySpec(namespace='some_namespace',
                               name='some_name',
                               version='1.2.3')

    namespaced_repository_path = '%s/%s' % (repo_spec.namespace,
                                            repo_spec.name)

    destination_info = InstallDestinationInfo(
        collections_path=galaxy_context.collections_path,
        repository_spec=repo_spec,
        namespaced_repository_path=namespaced_repository_path,
        force_overwrite=True,
        editable=False)

    res = repository_archive.install(repo_archive,
                                     repo_spec,
                                     destination_info,
                                     display_callback=display_callback)

    log.debug('res: %s', res)

    assert isinstance(res, InstallationResults)
    assert isinstance(res.install_info, InstallInfo)
    assert isinstance(res.install_info.version, semantic_version.Version)
    assert isinstance(res.installed_datetime, datetime.datetime)
Ejemplo n.º 3
0
def repository_spec_from_find_results(find_results, requirement_spec):
    '''Create a new RepositorySpec with updated info from fetch_results.

    Evolves repository_spec to match fetch results.'''

    # TODO: do we still need to check the fetched version against the spec version?
    #       We do, since the unspecific version is None, so fetched versions wont match
    #       so we need a new repository_spec for install.
    # TODO: this is more or less a verify/validate step or state transition
    content_data = find_results.get('content', {})
    resolved_version = content_data.get('version')

    log.debug(
        'version_spec "%s" for %s was requested and was resolved to version "%s"',
        requirement_spec.version_spec, requirement_spec.label,
        resolved_version)

    # In theory, a fetch can return a different namespace/name than the one request. This
    # is for things like server side aliases.
    resolved_name = content_data.get('fetched_name', requirement_spec.name)
    resolved_namespace = content_data.get('content_namespace',
                                          requirement_spec.namespace)

    # Build a RepositorySpec based on RequirementSpec and the extra info resolved in find()
    spec_data = attr.asdict(requirement_spec)

    del spec_data['version_spec']

    spec_data['version'] = resolved_version
    spec_data['namespace'] = resolved_namespace
    spec_data['name'] = resolved_name

    repository_spec = RepositorySpec.from_dict(spec_data)
    return repository_spec
Ejemplo n.º 4
0
def load(data_or_file_object, repository_spec=None):
    log.debug('START of load of requirements %s', data_or_file_object.name)

    requirements_data = yaml.safe_load(data_or_file_object)

    # log.debug('requirements_data: %s', pprint.pformat(requirements_data))

    requirements_list = []

    for req_data_item in requirements_data:
        # log.debug('req_data_item: %s', req_data_item)
        # log.debug('type(req_data_item): %s', type(req_data_item))

        req_spec_data = yaml_parse.yaml_parse(req_data_item)
        # log.debug('req_spec_data: %s', req_spec_data)

        # name_info = content_name.parse_content_name(data_name)
        # log.debug('data_name (after): %s', data_name)
        # log.debug('name_info: %s', name_info)

        req_spec = RepositorySpec.from_dict(req_spec_data)

        # log.debug('req_spec: %s', req_spec)

        req = Requirement(repository_spec=repository_spec,
                          op=RequirementOps.EQ,
                          requirement_spec=req_spec)

        # log.debug('req: %s', req)

        requirements_list.append(req)

    log.debug('FINISH of load of requirements: %s: %s',
              data_or_file_object.name, requirements_list)
    return requirements_list
Ejemplo n.º 5
0
def load_from_archive(repository_archive, namespace=None, installed=True):
    repo_tarfile = repository_archive.tar_file
    archive_path = repository_archive.info.archive_path

    manifest_filename = os.path.join(
        collection_artifact_manifest.COLLECTION_MANIFEST_FILENAME)
    manifest_data = None

    log.debug('Trying to extract %s from %s', manifest_filename, archive_path)

    try:
        mfd = repo_tarfile.extractfile(manifest_filename)
        if mfd:
            manifest_data = collection_artifact_manifest.load(mfd)
            log.debug('md: %s', manifest_data)
            log.debug('md.collection_info: %s', manifest_data.collection_info)
            log.debug('manifest_data.collection_info.name: %s',
                      manifest_data.collection_info.name)
    except KeyError as e:
        log.warning('No %s found in archive: %s (Error: %s)',
                    manifest_filename, archive_path, e)

    if not manifest_data:
        raise exceptions.GalaxyArchiveError(
            'No collection manifest (%s) found in %s' %
            (collection_artifact_manifest.COLLECTION_MANIFEST_FILENAME,
             archive_path),
            archive_path=archive_path)

    col_info = manifest_data.collection_info

    log.debug('col_info: %s', col_info)

    # if we specify a namespace, use it otherwise use the info from the manifest col_info
    repo_spec = RepositorySpec(
        namespace=namespace or col_info.namespace,
        name=col_info.name,
        version=col_info.version,
        spec_string=archive_path,
        # fetch_method=None,
        src=archive_path)

    log.debug('repo spec from %s: %r', archive_path, repo_spec)

    requirements_list = requirements.from_dependencies_dict(
        col_info.dependencies, repository_spec=repo_spec)

    repository = Repository(
        repository_spec=repo_spec,
        path=None,
        installed=installed,
        requirements=requirements_list,
    )

    log.debug('repository: %s', repository)

    return repository
Ejemplo n.º 6
0
def galaxy_url_fetch(galaxy_context):
    repo_spec = RepositorySpec(namespace='some_namespace',
                               name='some_name',
                               version='9.3.245')

    galaxy_url_fetch = galaxy_url.GalaxyUrlFetch(repo_spec, galaxy_context)
    log.debug('galaxy_url_fetch: %s', galaxy_url_fetch)

    return galaxy_url_fetch
Ejemplo n.º 7
0
def info_repository_specs(galaxy_context,
                          api,
                          repository_spec_strings,
                          display_callback=None,
                          offline=None):

    online = not offline

    display_callback = display_callback or display.display_callback

    offline = offline or False

    irdb = installed_repository_db.InstalledRepositoryDatabase(galaxy_context)

    labels_to_match = []

    all_labels_to_match = []
    for repository_spec_string in repository_spec_strings:
        galaxy_namespace, repository_name, content_name = parse_repository_name(repository_spec_string)

        log.debug('showing info for repository spec: %s', repository_spec_string)

        repository_name = repository_name or content_name

        if online:
            remote_data = api.lookup_repo_by_name(galaxy_namespace, repository_name)
            if remote_data:
                display_callback(_repr_remote_repo(remote_data))

        label_to_match = '%s.%s' % (galaxy_namespace, repository_name)
        all_labels_to_match.append(label_to_match)

        labels_to_match.append((label_to_match, RepositorySpec(namespace=galaxy_namespace,
                                                               name=repository_name)))

    matcher = matchers.MatchRepositorySpec([label_and_spec[1] for label_and_spec in labels_to_match])

    matched_repositories = irdb.select(repository_match_filter=matcher)

    remote_data = False

    matched_labels = []
    for matched_repository in matched_repositories:
        display_callback(_repr_installed_repository(matched_repository))
        matched_labels.append(matched_repository.repository_spec.label)

    unmatched_labels = set(all_labels_to_match).difference(set(matched_labels))

    if unmatched_labels:
        display_callback('These repositories were not found:')

        for unmatched_label in sorted(unmatched_labels):
            display_callback(_repr_unmatched_label(unmatched_label))

    return
Ejemplo n.º 8
0
def test_fetch(mocker):
    mock_fetcher = mocker.MagicMock(name='MockFetch')
    mock_fetcher.fetch.return_value = {}
    repo_spec = RepositorySpec(namespace='some_namespace',
                               name='some_name',
                               version='86.75.30')
    find_results = {}

    res = install.fetch(mock_fetcher, repo_spec, find_results)

    log.debug('res: %s', res)
Ejemplo n.º 9
0
def test_find_new_deps_from_installed(galaxy_context):
    repo_spec = RepositorySpec(namespace='some_namespace',
                               name='some_name',
                               version='4.3.2')

    req_spec = RepositorySpec(namespace='some_required_namespace',
                              name='some_required_name',
                              version='1.0.0')

    some_requirement = Requirement(repository_spec=repo_spec,
                                   op=RequirementOps.EQ,
                                   requirement_spec=req_spec)

    installed_repo = Repository(repo_spec, requirements=[some_requirement, some_requirement])
    res = install.find_new_deps_from_installed(galaxy_context, [installed_repo])

    log.debug('res: %s', res)
    assert isinstance(res, list)
    assert isinstance(res[0], Requirement)
    assert res[0].requirement_spec == req_spec
Ejemplo n.º 10
0
def test_fetch_download_error(mocker):
    mock_fetcher = mocker.MagicMock(name='MockFetch')
    mock_fetcher.fetch.side_effect = exceptions.GalaxyDownloadError(
        url='http://example.invalid')

    repo_spec = RepositorySpec(namespace='some_namespace',
                               name='some_name',
                               version='86.75.30')
    find_results = {}

    with pytest.raises(exceptions.GalaxyDownloadError) as exc_info:
        install.fetch(mock_fetcher, repo_spec, find_results)
    log.debug("exc_info: %s", exc_info)
Ejemplo n.º 11
0
def repository_spec_from_string(repository_spec_string, namespace_override=None, editable=False):
    spec_data = spec_data_from_string(repository_spec_string, namespace_override=namespace_override, editable=editable)

    log.debug('spec_data: %s', spec_data)

    return RepositorySpec(name=spec_data.get('name'),
                          namespace=spec_data.get('namespace'),
                          version=spec_data.get('version'),
                          # version=version,
                          scm=spec_data.get('scm'),
                          spec_string=spec_data.get('spec_string'),
                          fetch_method=spec_data.get('fetch_method'),
                          src=spec_data.get('src'))
Ejemplo n.º 12
0
def test_local_file_fetch(mocker):
    tmp_file_fo = tempfile.NamedTemporaryFile(prefix='tmp',
                                              suffix='.tar.gz',
                                              delete=True)
    log.debug('tmp_file_fo.name=%s tmp_file=%s', tmp_file_fo.name, tmp_file_fo)

    tar_file = tarfile.open(mode='w:gz', fileobj=tmp_file_fo)

    _repo_archive_info = RepositoryArchiveInfo(archive_type='foo',
                                               top_dir='namespace-name-1.2.3',
                                               archive_path=tmp_file_fo.name)
    _repo_archive = RepositoryArchive(info=_repo_archive_info,
                                      tar_file=tar_file)
    mocker.patch(
        'ansible_galaxy.repository_spec.repository_archive.load_archive',
        return_value=_repo_archive)

    repository_spec_ = RepositorySpec(namespace='namespace',
                                      name='name',
                                      version='1.2.3',
                                      fetch_method=FetchMethods.LOCAL_FILE,
                                      src=tmp_file_fo.name)

    mocker.patch(
        'ansible_galaxy.fetch.local_file.LocalFileFetch._load_repository_archive',
        return_value=mocker.Mock(name='mockRepoArchive'))
    mocker.patch(
        'ansible_galaxy.fetch.local_file.LocalFileFetch._load_repository',
        return_value=mocker.Mock(name='mockRepo'))
    local_fetch = local_file.LocalFileFetch(repository_spec_)

    find_results = local_fetch.find()
    results = local_fetch.fetch(find_results=find_results)

    log.debug('results: %s', results)
    local_fetch.cleanup()

    # LocalFileFetch is acting directly on an existing file, so it's cleanup
    # should _not_ delete the file
    assert os.path.isfile(tmp_file_fo.name)

    # results = {'archive_path': '/tmp/tmpcle_fdtp.tar.gz', 'fetch_method': 'local_file',
    # 'custom': {'local_path': '/tmp/tmpcle_fdtp.tar.gz'},
    # 'content': {'galaxy_namespace': None, 'repo_name': '/tmp/tmpcle_fdtp.tar',
    # 'fetched_name': <Mock name='mockRepo.repository_spec.name' id='139946600228288'>}}
    assert results['archive_path'] == tmp_file_fo.name
    assert results['fetch_method'] == 'local_file'
    assert results['custom']['local_path'] == tmp_file_fo.name

    log.debug('should unlink %s here', tmp_file_fo.name)
Ejemplo n.º 13
0
def test_install_no_valid_content(galaxy_context, mocker):
    repo_spec = RepositorySpec(namespace='some_namespace',
                               name='some_name',
                               version='4.3.2')

    mock_fetcher = mocker.MagicMock(name='MockFetch')
    fetch_results = {}

    with pytest.raises(exceptions.GalaxyClientError,
                       match='No valid content data found for') as exc_info:
        install.install(galaxy_context,
                        fetcher=mock_fetcher,
                        fetch_results=fetch_results,
                        repository_spec=repo_spec,
                        display_callback=display_callback)
    log.debug('exc_info: %s', exc_info)
Ejemplo n.º 14
0
def test_install_repositories(galaxy_context, mocker):
    repo_spec = RepositorySpec(namespace='some_namespace', name='some_name')
    expected_repos = [Repository(repository_spec=repo_spec)]

    requirements_to_install = \
        requirements.from_requirement_spec_strings(['some_namespace.this_requires_some_name'])

    mocker.patch('ansible_galaxy.actions.install.install_repository',
                 return_value=expected_repos)

    ret = install.install_repositories(galaxy_context,
                                       requirements_to_install=requirements_to_install,
                                       display_callback=display_callback)

    log.debug('ret: %s', ret)

    assert isinstance(ret, list)
    assert ret == expected_repos
Ejemplo n.º 15
0
def from_dependency_spec_strings(dependency_spec_strings, namespace_override=None, editable=False):
    deps = []
    for dep_spec_string in dependency_spec_strings:
        dep_spec_data = spec_data_from_string(dep_spec_string)

        log.debug('dep_spec_data: %s', dep_spec_data)

        dep_spec = RepositorySpec.from_dict(dep_spec_data)

        log.debug('dep_spec: %s', dep_spec)

        # Add a requirement, but with the 'RUNTIME' scope
        requirement = Requirement(repository_spec=None, op=RequirementOps.EQ,
                                  scope=RequirementScopes.RUNTIME,
                                  requirement_spec=dep_spec)
        deps.append(requirement)

    return deps
Ejemplo n.º 16
0
def test_install(galaxy_context, tmpdir):
    built_res = build_repo_artifact(galaxy_context, tmpdir)
    archive_path = built_res['build_results'].artifact_file_path

    repo_archive = repository_archive.load_archive(archive_path)

    log.debug('repo_archive: %s', repo_archive)

    repo_spec = RepositorySpec(namespace='some_namespace',
                               name='some_name',
                               version='1.2.3')

    namespaced_repository_path = '%s/%s' % (repo_spec.namespace,
                                            repo_spec.name)

    extract_archive_to_dir = os.path.join(galaxy_context.content_path,
                                          namespaced_repository_path, '')

    install_info_path = os.path.join(galaxy_context.content_path,
                                     namespaced_repository_path,
                                     'meta/.galaxy_install_info')

    destination_info = InstallDestinationInfo(
        destination_root_dir=galaxy_context.content_path,
        repository_spec=repo_spec,
        extract_archive_to_dir=extract_archive_to_dir,
        namespaced_repository_path=namespaced_repository_path,
        install_info_path=install_info_path,
        force_overwrite=True,
        editable=False)

    res = repository_archive.install(repo_archive,
                                     repo_spec,
                                     destination_info,
                                     display_callback=display_callback)

    log.debug('res: %s', res)

    assert isinstance(res, InstallationResults)
    assert isinstance(res.install_info, InstallInfo)
    assert isinstance(res.install_info.version, VersionInfo)
    assert isinstance(res.installed_datetime, datetime.datetime)
Ejemplo n.º 17
0
def from_requirement_spec_strings(requirement_spec_strings,
                                  namespace_override=None,
                                  editable=False,
                                  repository_spec=None):
    reqs = []
    for requirement_spec_string in requirement_spec_strings:
        req_spec_data = spec_data_from_string(
            requirement_spec_string,
            namespace_override=namespace_override,
            editable=editable)

        req_spec = RepositorySpec.from_dict(req_spec_data)

        req = Requirement(repository_spec=repository_spec,
                          op=RequirementOps.EQ,
                          requirement_spec=req_spec)

        reqs.append(req)

    return reqs
Ejemplo n.º 18
0
def load_from_archive(repository_archive, namespace=None, installed=True):
    repo_tarfile = repository_archive.tar_file
    archive_path = repository_archive.info.archive_path

    # path_name = os.path.join(content_dir, namespace, name)
    path_name = repository_archive.info.top_dir

    manifest_filename = os.path.join(
        path_name, collection_artifact_manifest.COLLECTION_MANIFEST_FILENAME)
    manifest_data = None

    log.debug('Trying to extract %s from %s', manifest_filename, archive_path)

    try:
        mfd = repo_tarfile.extractfile(manifest_filename)
        if mfd:
            manifest_data = collection_artifact_manifest.load(mfd)
            log.debug('md: %s', manifest_data)
            log.debug('md.collection_info: %s', manifest_data.collection_info)
            log.debug('manifest_data.collection_info.name: %s',
                      manifest_data.collection_info.name)
    except KeyError as e:
        log.warning('No %s found in archive: %s (Error: %s)',
                    manifest_filename, archive_path, e)

    # load galaxy.yml
    galaxy_filename = os.path.join(path_name,
                                   collection_info.COLLECTION_INFO_FILENAME)

    collection_info_data = None

    try:
        gfd = repo_tarfile.extractfile(galaxy_filename)
        if gfd:
            collection_info_data = collection_info.load(gfd)
    except KeyError as e:
        log.warning('No %s found in archive: %s - %s', galaxy_filename,
                    archive_path, e)
        # log.debug('No galaxy.yml collection info found for collection %s.%s: %s', namespace, name, e)

    # TODO/FIXME: what takes precedence?
    #           - the dir name in the archive that a collection lives in ~/.ansible/content/my_ns/my_name
    #           - Or the namespace/name from galaxy.yml?
    # log.debug('collection_info_data: %s', collection_info_data)

    col_info = None
    if manifest_data:
        col_info = manifest_data.collection_info
        log.debug('md.col_info: %s', col_info)
    elif collection_info_data:
        col_info = collection_info_data
    else:
        raise exceptions.GalaxyArchiveError(
            'No galaxy collection info or manifest found in %s', archive_path)

    log.debug('col_info: %s', col_info)

    # FIXME: change collectionInfo to have separate name/namespace so we dont have to 'parse' the name
    # repo_spec = repository_spec.repository_spec_from_string(col_info.name, namespace_override=namespace)
    # spec_data = repository_spec_parse.parse_string(col_info.name)

    # log.debug('spec_data: %s', spec_data)
    # log.debug('repo_spec: %s', repo_spec)

    # Build a repository_spec of the repo now so we can pass it things like requirements.load()
    # that need to know what requires something
    # if we specify a namespace, use it otherwise use the info from galaxy.yml
    repo_spec = RepositorySpec(
        namespace=namespace or col_info.namespace,
        name=col_info.name,
        version=col_info.version,
        spec_string=archive_path,
        # fetch_method=None,
        src=archive_path)

    log.debug('repo spec from %s: %r', archive_path, repo_spec)

    requirements_list = []
    requirements_list = requirements.from_requirement_spec_strings(
        col_info.dependencies, repository_spec=repo_spec)

    repository = Repository(
        repository_spec=repo_spec,
        path=None,
        installed=installed,
        requirements=requirements_list,
        # Assuming this is a collection artifact, FIXME if we support role artifacts
        dependencies=[])

    log.debug('repository: %s', repository)

    return repository
Ejemplo n.º 19
0
def load_from_dir(content_dir, namespace, name, installed=True):
    # TODO: or artifact

    path_name = os.path.join(content_dir, namespace, name)
    # TODO: add trad role or collection detection rules here
    #       Or possibly earlier so we could call 'collection' loading
    #       code/class or trad-role-as-collection loading code/class
    #       and avoid intermingly the impls.
    #       Maybe:
    #       if more than one role in roles/ -> collection

    if not os.path.isdir(path_name):
        log.debug(
            'The directory %s does not exist, unable to load a Repository from it',
            path_name)
        return None

    requirements_list = []

    # Now look for any install_info for the repository
    install_info_data = None
    install_info_filename = os.path.join(path_name,
                                         'meta/.galaxy_install_info')
    try:
        with open(install_info_filename, 'r') as ifd:
            install_info_data = install_info.load(ifd)
    except EnvironmentError as e:
        log.warning(
            'Unable to find or load meta/.galaxy_install_info for repository %s.%s: %s',
            namespace, name, e)

    # TODO: figure out what to do if the version from install_info conflicts with version
    #       from galaxy.yml etc.
    install_info_version = getattr(install_info_data, 'version', None)

    # Try to load a MANIFEST.json if we have one

    manifest_filename = os.path.join(
        path_name, collection_artifact_manifest.COLLECTION_MANIFEST_FILENAME)
    manifest_data = None

    try:
        with open(manifest_filename, 'r') as mfd:
            manifest_data = collection_artifact_manifest.load(mfd)
    except EnvironmentError:
        # log.debug('No galaxy.yml collection info found for collection %s.%s: %s', namespace, name, e)
        pass

    # load galaxy.yml
    galaxy_filename = os.path.join(path_name,
                                   collection_info.COLLECTION_INFO_FILENAME)

    collection_info_data = None

    try:
        with open(galaxy_filename, 'r') as gfd:
            collection_info_data = collection_info.load(gfd)
    except EnvironmentError:
        # log.debug('No galaxy.yml collection info found for collection %s.%s: %s', namespace, name, e)
        pass

    # Now try the repository as a role-as-collection
    # FIXME: For a repository with one role that matches the collection name and doesn't
    #        have a galaxy.yml, that's indistinguishable from a role-as-collection
    # FIXME: But in theory, if there is more than one role in roles/, we should skip this
    role_meta_main_filename = os.path.join(path_name, 'roles', name, 'meta',
                                           'main.yml')
    role_meta_main = None
    role_name = '%s.%s' % (namespace, name)

    try:
        with open(role_meta_main_filename, 'r') as rmfd:
            # FIXME: kluge to avoid circular import on py2
            #        repository->role_metadata->dependencies->repository_spec->repository (loop)
            #        repository->requirements->repository_spec->repository (loop)
            from ansible_galaxy import role_metadata
            role_meta_main = role_metadata.load(rmfd, role_name=role_name)
    except EnvironmentError:
        # log.debug('No meta/main.yml was loaded for repository %s.%s: %s', namespace, name, e)
        pass

    # Prefer version from install_info, but for a editable installed, there may be only galaxy version
    installed_version = install_info_version
    if manifest_data:
        installed_version = manifest_data.collection_info.version
    elif collection_info_data:
        installed_version = collection_info_data.version
    # if role_meta_main:
    #    installed_version = installed_version or role_meta_main.version

    # TODO/FIXME: what takes precedence?
    #           - the dir names a collection lives in ~/.ansible/content/my_ns/my_name
    #           - Or the namespace/name from galaxy.yml?
    # log.debug('collection_info_data: %s', collection_info_data)

    # Build a repository_spec of the repo now so we can pass it things like requirements.load()
    # that need to know what requires something
    repository_spec = RepositorySpec(namespace=namespace,
                                     name=name,
                                     version=installed_version)

    # The current galaxy.yml 'dependencies' are actually 'requirements' in ansible/ansible terminology
    # (ie, install-time)
    if collection_info_data:
        collection_requires = requirements.from_dependencies_dict(
            collection_info_data.dependencies, repository_spec=repository_spec)
        requirements_list.extend(collection_requires)

    # TODO: add requirements loaded from galaxy.yml
    # TODO: should the requirements in galaxy.yml be plain strings or dicts?
    # TODO: should there be requirements in galaxy.yml at all? in liue of requirements.yml
    # collection_info_requirements = []

    requirements_filename = os.path.join(path_name, 'requirements.yml')

    try:
        with open(requirements_filename, 'r') as rfd:
            requirements_list.extend(
                requirements.load(rfd, repository_spec=repository_spec))
    except EnvironmentError:
        # log.debug('No requirements.yml was loaded for repository %s.%s: %s', namespace, name, e)
        pass

    # TODO: if there are other places to load dependencies (ie, runtime deps) we will need
    #       to load them and combine them with role_depenency_specs
    role_dependency_specs = []
    if role_meta_main:
        role_dependency_specs = role_meta_main.dependencies

    repository = Repository(repository_spec=repository_spec,
                            path=path_name,
                            installed=installed,
                            requirements=requirements_list,
                            dependencies=role_dependency_specs)

    log.debug('Repository %s loaded from %s', repository.repository_spec.label,
              path_name)

    return repository
Ejemplo n.º 20
0
    def find(self):
        api = GalaxyAPI(self.galaxy_context)

        namespace = self.repository_spec.namespace
        repo_name = self.repository_spec.name

        log.debug('Querying %s for namespace=%s, name=%s', self.galaxy_context.server['url'], namespace, repo_name)

        # TODO: extract parsing of cli content sorta-url thing and add better tests

        # FIXME: exception handling
        repo_data = api.lookup_repo_by_name(namespace, repo_name)

        if not repo_data:
            raise exceptions.GalaxyClientError("- sorry, %s was not found on %s." % (self.repository_spec.label,
                                                                                     api.api_server))

        # FIXME - Need to update our API calls once Galaxy has them implemented
        related = repo_data.get('related', {})

        repo_versions_url = related.get('versions', None)

        # FIXME: exception handling
        repoversions = api.fetch_content_related(repo_versions_url)

        content_repo_versions = [a.get('version') for a in repoversions if a.get('version', None)]

        # FIXME: mv to it's own method
        # FIXME: pass these to fetch() if it really needs it
        repo_version_best = repository_version.get_repository_version(repo_data,
                                                                      version=self.repository_spec.version,
                                                                      repository_versions=content_repo_versions,
                                                                      content_content_name=self.repository_spec.name)

        # get the RepositoryVersion obj (or its data anyway)
        _repoversion = select_repository_version(repoversions, repo_version_best)

        # external_url isnt specific, it could be something like github.com/alikins/some_collection
        # external_url is the third option after a 'download_url' provided by the galaxy rest API
        # (repo version specific download_url first if applicable, then the general download_url)
        # Note: download_url can point anywhere...
        external_url = repo_data.get('external_url', None)

        if not external_url:
            raise exceptions.GalaxyError('no external_url info on the Repository object from %s' % self.repository_spec.label)

        # The repo spec of the install candidate with potentially a different version
        potential_repository_spec = RepositorySpec(namespace=namespace,
                                                   name=repo_name,
                                                   version=_repoversion['version'],
                                                   fetch_method=self.repository_spec.fetch_method,
                                                   scm=self.repository_spec.scm,
                                                   spec_string=self.repository_spec.spec_string,
                                                   src=self.repository_spec.src)

        results = {'content': {'galaxy_namespace': namespace,
                               'repo_name': repo_name},
                   'specified_content_version': self.repository_spec.version,
                   'specified_repository_spec': self.repository_spec,
                   'custom': {'content_repo_versions': content_repo_versions,
                              'external_url': external_url,
                              'galaxy_context': self.galaxy_context,
                              'related': related,
                              'repo_data': repo_data,
                              'repo_versions_url': repo_versions_url,
                              'repoversion': _repoversion,
                              'potential_repository_spec': potential_repository_spec},
                   }

        return results
Ejemplo n.º 21
0
import logging
import os
import tempfile
import pytest

from ansible_galaxy import repository_spec
from ansible_galaxy import exceptions
from ansible_galaxy.models.repository_spec import RepositorySpec, FetchMethods

log = logging.getLogger(__name__)

repo_spec_from_string_cases = \
    [
        {'spec': 'geerlingguy.apache',
         'expected': RepositorySpec(name='apache', namespace='geerlingguy',
                                    version=None, fetch_method=FetchMethods.GALAXY_URL)},
        {'spec': 'geerlingguy.apache,2.1.1',
         'expected': RepositorySpec(name='apache', namespace='geerlingguy',
                                    version='2.1.1', fetch_method=FetchMethods.GALAXY_URL)},
        {'spec': 'testing.ansible-testing-content',
         'expected': RepositorySpec(name='ansible-testing-content', namespace='testing',
                                    version=None, fetch_method=FetchMethods.GALAXY_URL)},
        # {'spec': 'testing.ansible-testing-content,name=testing-content',
        # 'expected': RepositorySpec(name='testing-content', namespace='testing')},
        # {'spec': 'alikins.awx',
        # 'expected': RepositorySpec(name='awx', namespace='alikins')},
        {'spec': 'testing.ansible-testing-content,1.2.3,name=testing-content',
         'expected': RepositorySpec(name='testing-content', namespace='testing',
                                    version='1.2.3', fetch_method=FetchMethods.GALAXY_URL)},
        {'spec': 'testing.ansible-testing-content,1.2.3,also-testing-content,stuff',
         'expected': RepositorySpec(name='also-testing-content', namespace='testing',
Ejemplo n.º 22
0
def install_repository_specs_loop(
        galaxy_context,
        repository_spec_strings=None,
        requirements_list=None,
        editable=False,
        namespace_override=None,
        display_callback=None,
        # TODO: error handling callback ?
        ignore_errors=False,
        no_deps=False,
        force_overwrite=False):

    requirements_list = requirements_list or []

    for repository_spec_string in repository_spec_strings:
        fetch_method = \
            repository_spec.choose_repository_fetch_method(repository_spec_string,
                                                           editable=editable)
        log.debug('fetch_method: %s', fetch_method)

        if fetch_method == FetchMethods.LOCAL_FILE:
            # Since only know this is a local file we vaguely recognize, we have to
            # open it up to get any more details. We _could_ attempt to parse the file
            # name, but that rarely ends well...
            spec_data = collection_artifact.load_data_from_collection_artifact(
                repository_spec_string)
            spec_data['fetch_method'] = fetch_method
        else:
            spec_data = repository_spec.spec_data_from_string(
                repository_spec_string,
                namespace_override=namespace_override,
                editable=editable)

            spec_data['fetch_method'] = fetch_method

        log.debug('spec_data: %s', spec_data)

        req_spec = RepositorySpec.from_dict(spec_data)

        req = Requirement(repository_spec=None,
                          op=RequirementOps.EQ,
                          requirement_spec=req_spec)

        requirements_list.append(req)

    log.debug('requirements_list: %s', requirements_list)
    for req in requirements_list:
        display_callback('Installing %s' % req.requirement_spec.label,
                         level='info')

    while True:
        if not requirements_list:
            break

        just_installed_repositories = \
            install_repositories_matching_repository_specs(galaxy_context,
                                                           requirements_list,
                                                           editable=editable,
                                                           namespace_override=namespace_override,
                                                           display_callback=display_callback,
                                                           ignore_errors=ignore_errors,
                                                           no_deps=no_deps,
                                                           force_overwrite=force_overwrite)

        # set the repository_specs to search for to whatever the install reported as being needed yet
        # requirements_list = new_requirements_list
        requirements_list = find_new_deps_from_installed(
            galaxy_context, just_installed_repositories, no_deps=no_deps)

        for req in requirements_list:
            if req.repository_spec:
                msg = 'Installing requirement %s (required by %s)' % (
                    req.requirement_spec.label, req.repository_spec.label)
            else:
                msg = 'Installing requirement %s' % req.requirement_spec.label
            display_callback(msg, level='info')

    # FIXME: what results to return?
    return 0
Ejemplo n.º 23
0
def load_from_dir(content_dir,
                  namespace_path,
                  namespace,
                  name,
                  installed=True):
    path_name = os.path.join(namespace_path, name)

    log.debug('Loading repository %s.%s from path: %s', namespace, name,
              path_name)

    if not os.path.isdir(path_name):
        log.debug(
            'The directory %s does not exist, unable to load a Repository from it',
            path_name)
        return None

    # Now look for any install_info for the repository
    install_info_data = None
    install_info_filename = os.path.join(path_name,
                                         'meta/.galaxy_install_info')

    try:
        with open(install_info_filename, 'r') as ifd:
            install_info_data = install_info.load(ifd)
    except EnvironmentError as e:
        log.warning(
            'Unable to find or load meta/.galaxy_install_info for repository %s.%s: %s',
            namespace, name, e)

    # TODO: figure out what to do if the version from install_info conflicts with version
    #       from galaxy.yml etc.
    install_info_version = getattr(install_info_data, 'version', None)

    # Try to load a MANIFEST.json if we have one

    manifest_filename = os.path.join(
        path_name, collection_artifact_manifest.COLLECTION_MANIFEST_FILENAME)
    manifest_data = None

    try:
        with open(manifest_filename, 'r') as mfd:
            manifest_data = collection_artifact_manifest.load(mfd)
    except EnvironmentError:
        # log.debug('No galaxy.yml collection info found for collection %s.%s: %s', namespace, name, e)
        pass

    # # TODO/FIXME: do we even need to load file_manifest here?
    # file_manifest_filename = os.path.join(path_name, collection_artifact_file_manifest.COLLECTION_FILE_MANIFEST_FILENAME)
    # file_manifest_data = None

    # try:
    #     with open(file_manifest_filename, 'r') as mfd:
    #         file_manifest_data = collection_artifact_file_manifest.load(mfd)
    # except EnvironmentError:
    #     # log.debug('No galaxy.yml collection info found for collection %s.%s: %s', namespace, name, e)
    #     pass

    # load galaxy.yml
    galaxy_filename = os.path.join(path_name,
                                   collection_info.COLLECTION_INFO_FILENAME)

    galaxy_yml_data = None

    try:
        with open(galaxy_filename, 'r') as gfd:
            if gfd:
                galaxy_yml_data = collection_info.load(gfd)
    except EnvironmentError:
        # for the case of collections that are not from or intended for galaxy, they do not
        # need to provide a galaxy.yml or MANIFEST.json, so an error here is exceptable.
        # log.debug('No galaxy.yml collection info found for collection %s.%s: %s', namespace, name, e)
        pass

    # TODO: make existence of a galaxy.yml and a MANIFEST.json mutual exclude and raise an exception for that case

    col_info = None
    # MANIFEST.json is higher prec than galaxy.yml
    if galaxy_yml_data:
        col_info = galaxy_yml_data

    if manifest_data:
        col_info = manifest_data.collection_info

    # Prefer version from install_info, but for a editable installed, there may be only galaxy version
    installed_version = install_info_version
    if col_info:
        installed_version = col_info.version

    # TODO/FIXME: what takes precedence?
    #           - the dir names a collection lives in ~/.ansible/content/my_ns/my_name
    #           - Or the namespace/name from galaxy.yml?
    #           - Or the namespace/name from MANIFEST.json
    #         Ditto for requirements

    # log.debug('collection_info_data: %s', collection_info_data)

    # Build a repository_spec of the repo now so we can pass it things like
    # requirements.from_dependencies_dict that need to know what requires something.
    repository_spec = RepositorySpec(namespace=namespace,
                                     name=name,
                                     version=installed_version)

    # The current galaxy.yml 'dependencies' are actually 'requirements' in ansible/ansible terminology
    # (ie, install-time)
    requirements_list = []
    if col_info:
        requirements_list = requirements.from_dependencies_dict(
            col_info.dependencies, repository_spec=repository_spec)

    repository = Repository(
        repository_spec=repository_spec,
        path=path_name,
        installed=installed,
        requirements=requirements_list,
    )

    log.debug('Loaded repository %s from %s', repository.repository_spec.label,
              path_name)

    return repository
Ejemplo n.º 24
0
import logging
import os
import tempfile
import pytest

from ansible_galaxy import repository_spec
from ansible_galaxy import exceptions
from ansible_galaxy.models.repository_spec import RepositorySpec, FetchMethods

log = logging.getLogger(__name__)

repo_spec_from_string_cases = \
    [
        {'spec': 'geerlingguy.apache',
         'expected': RepositorySpec(name='apache', namespace='geerlingguy')},
        {'spec': 'geerlingguy.apache,2.1.1',
         'expected': RepositorySpec(name='apache', namespace='geerlingguy', version='2.1.1')},
        {'spec': 'testing.ansible-testing-content',
         'expected': RepositorySpec(name='ansible-testing-content', namespace='testing')},
        {'spec': 'testing.ansible-testing-content,name=testing-content',
         'expected': RepositorySpec(name='testing-content', namespace='testing')},
        {'spec': 'alikins.awx',
         'expected': RepositorySpec(name='awx', namespace='alikins')},
        {'spec': 'testing.ansible-testing-content,1.2.3,name=testing-content',
         'expected': RepositorySpec(name='testing-content', namespace='testing', version='1.2.3')},
        {'spec': 'testing.ansible-testing-content,1.2.3,also-testing-content,stuff',
         'expected': RepositorySpec(name='also-testing-content', namespace='testing', version='1.2.3')},
        # for git/tar/url, we dont try to guess the namespace, so the expected result is namespace=None
        # here. cli adds a namespace from --namespace here.
        {'spec': 'git+https://github.com/geerlingguy/ansible-role-apache.git,version=2.0.0',
         'expected': RepositorySpec(name='ansible-role-apache', namespace=None, version='2.0.0',
Ejemplo n.º 25
0
def CR(namespace=None, name=None):
    cs = RepositorySpec(namespace=namespace, name=name)
    return Repository(repository_spec=cs)
Ejemplo n.º 26
0
def CR(namespace=None, name=None):
    cs = RepositorySpec(namespace=namespace, name=name, version='1.2.3')
    return Repository(repository_spec=cs)