def test_editable_fetch_find(galaxy_context, mocker, tmpdir): name = 'mazer_fetch_test_editable' namespace_override = 'some_editable_namespace' tmp_path = tmpdir.mkdir(name) more_reqs = requirements.from_dependencies_dict( {tmp_path.strpath: '*'}, namespace_override=namespace_override, editable=True) import pprint log.debug('more_reqs: %s', pprint.pformat(more_reqs)) req_spec = more_reqs[0].requirement_spec fetcher = editable.EditableFetch(galaxy_context, req_spec) log.debug(fetcher) res = fetcher.find() log.debug('res: %s', res) assert isinstance(res, dict) assert res['content']['galaxy_namespace'] == namespace_override assert res['content']['repo_name'] == name assert res['custom']['real_path'] == tmp_path.strpath
def load_from_archive(repository_archive, namespace=None, installed=True): repo_tarfile = repository_archive.tar_file archive_path = repository_archive.info.archive_path manifest_filename = os.path.join( collection_artifact_manifest.COLLECTION_MANIFEST_FILENAME) manifest_data = None log.debug('Trying to extract %s from %s', manifest_filename, archive_path) try: mfd = repo_tarfile.extractfile(manifest_filename) if mfd: manifest_data = collection_artifact_manifest.load(mfd) log.debug('md: %s', manifest_data) log.debug('md.collection_info: %s', manifest_data.collection_info) log.debug('manifest_data.collection_info.name: %s', manifest_data.collection_info.name) except KeyError as e: log.warning('No %s found in archive: %s (Error: %s)', manifest_filename, archive_path, e) if not manifest_data: raise exceptions.GalaxyArchiveError( 'No collection manifest (%s) found in %s' % (collection_artifact_manifest.COLLECTION_MANIFEST_FILENAME, archive_path), archive_path=archive_path) col_info = manifest_data.collection_info log.debug('col_info: %s', col_info) # if we specify a namespace, use it otherwise use the info from the manifest col_info repo_spec = RepositorySpec( namespace=namespace or col_info.namespace, name=col_info.name, version=col_info.version, spec_string=archive_path, # fetch_method=None, src=archive_path) log.debug('repo spec from %s: %r', archive_path, repo_spec) requirements_list = requirements.from_dependencies_dict( col_info.dependencies, repository_spec=repo_spec) repository = Repository( repository_spec=repo_spec, path=None, installed=installed, requirements=requirements_list, ) log.debug('repository: %s', repository) return repository
def test_install_repositories(galaxy_context, mocker): repo_spec = RepositorySpec(namespace='some_namespace', name='some_name', version='9.4.5') expected_repos = [Repository(repository_spec=repo_spec)] requirements_to_install = \ requirements.from_dependencies_dict({'some_namespace.this_requires_some_name': '*'}) mocker.patch('ansible_galaxy.actions.install.install_repository', return_value=expected_repos) ret = install.install_repositories( galaxy_context, requirements_to_install=requirements_to_install, display_callback=display_callback) log.debug('ret: %s', ret) assert isinstance(ret, list) assert ret == expected_repos
def test_install_repository_validate_artifacts_exception( galaxy_context, mocker): requirements_to_install = \ requirements.from_dependencies_dict({'some_namespace.this_requires_some_name': '*'}) find_results = { 'content': { 'galaxy_namespace': 'some_namespace', 'repo_name': 'some_name' }, 'custom': { 'repo_data': {}, 'download_url': 'http://foo.invalid/stuff/blip.tar.gz', 'repoversion': { 'version': '9.3.245' }, 'collection_is_deprecated': True, }, } mocker.patch('ansible_galaxy.actions.install.install.find', return_value=find_results) mocker.patch('ansible_galaxy.actions.install.install.fetch', side_effect=exceptions.GalaxyArtifactChksumError( artifact_path='/dev/null/fake/path', expected='FAKEEXPECTEDa948904f2f0f479', actual='FAKEACTUAL4b0d2ed1c1cd2a1ec0fb85d2')) with pytest.raises( exceptions.GalaxyClientError, match= "While fetching some_namespace.*/dev/null/fake/path.*did not match.*FAKEEXPECTEDa948904f2f0f479" ) as exc_info: install.install_repository( galaxy_context, requirement_to_install=requirements_to_install[0], display_callback=display_callback) log.debug('exc_info: %s', exc_info)
def test_install_repository_deprecated(galaxy_context, mocker): requirements_to_install = \ requirements.from_dependencies_dict({'some_namespace.this_requires_some_name': '*'}) find_results = { 'content': { 'galaxy_namespace': 'some_namespace', 'repo_name': 'some_name' }, 'custom': { 'repo_data': {}, 'download_url': 'http://foo.invalid/stuff/blip.tar.gz', 'repoversion': { 'version': '9.3.245' }, 'collection_is_deprecated': True, }, } mocker.patch('ansible_galaxy.actions.install.install.find', return_value=find_results) mocker.patch('ansible_galaxy.actions.install.install.fetch') mocker.patch('ansible_galaxy.actions.install.install.install') mock_display_callback = mocker.MagicMock(name='mock_display_callback') ret = install.install_repository( galaxy_context, requirement_to_install=requirements_to_install[0], display_callback=mock_display_callback) expected_display_calls = mocker.call( "The collection 'some_namespace.this_requires_some_name' is deprecated.", level='warning') log.debug('ret: %s', ret) assert expected_display_calls in mock_display_callback.call_args_list
def load_from_dir(content_dir, namespace, name, installed=True): # TODO: or artifact path_name = os.path.join(content_dir, namespace, name) # TODO: add trad role or collection detection rules here # Or possibly earlier so we could call 'collection' loading # code/class or trad-role-as-collection loading code/class # and avoid intermingly the impls. # Maybe: # if more than one role in roles/ -> collection if not os.path.isdir(path_name): log.debug( 'The directory %s does not exist, unable to load a Repository from it', path_name) return None requirements_list = [] # Now look for any install_info for the repository install_info_data = None install_info_filename = os.path.join(path_name, 'meta/.galaxy_install_info') try: with open(install_info_filename, 'r') as ifd: install_info_data = install_info.load(ifd) except EnvironmentError as e: log.warning( 'Unable to find or load meta/.galaxy_install_info for repository %s.%s: %s', namespace, name, e) # TODO: figure out what to do if the version from install_info conflicts with version # from galaxy.yml etc. install_info_version = getattr(install_info_data, 'version', None) # Try to load a MANIFEST.json if we have one manifest_filename = os.path.join( path_name, collection_artifact_manifest.COLLECTION_MANIFEST_FILENAME) manifest_data = None try: with open(manifest_filename, 'r') as mfd: manifest_data = collection_artifact_manifest.load(mfd) except EnvironmentError: # log.debug('No galaxy.yml collection info found for collection %s.%s: %s', namespace, name, e) pass # load galaxy.yml galaxy_filename = os.path.join(path_name, collection_info.COLLECTION_INFO_FILENAME) collection_info_data = None try: with open(galaxy_filename, 'r') as gfd: collection_info_data = collection_info.load(gfd) except EnvironmentError: # log.debug('No galaxy.yml collection info found for collection %s.%s: %s', namespace, name, e) pass # Now try the repository as a role-as-collection # FIXME: For a repository with one role that matches the collection name and doesn't # have a galaxy.yml, that's indistinguishable from a role-as-collection # FIXME: But in theory, if there is more than one role in roles/, we should skip this role_meta_main_filename = os.path.join(path_name, 'roles', name, 'meta', 'main.yml') role_meta_main = None role_name = '%s.%s' % (namespace, name) try: with open(role_meta_main_filename, 'r') as rmfd: # FIXME: kluge to avoid circular import on py2 # repository->role_metadata->dependencies->repository_spec->repository (loop) # repository->requirements->repository_spec->repository (loop) from ansible_galaxy import role_metadata role_meta_main = role_metadata.load(rmfd, role_name=role_name) except EnvironmentError: # log.debug('No meta/main.yml was loaded for repository %s.%s: %s', namespace, name, e) pass # Prefer version from install_info, but for a editable installed, there may be only galaxy version installed_version = install_info_version if manifest_data: installed_version = manifest_data.collection_info.version elif collection_info_data: installed_version = collection_info_data.version # if role_meta_main: # installed_version = installed_version or role_meta_main.version # TODO/FIXME: what takes precedence? # - the dir names a collection lives in ~/.ansible/content/my_ns/my_name # - Or the namespace/name from galaxy.yml? # log.debug('collection_info_data: %s', collection_info_data) # Build a repository_spec of the repo now so we can pass it things like requirements.load() # that need to know what requires something repository_spec = RepositorySpec(namespace=namespace, name=name, version=installed_version) # The current galaxy.yml 'dependencies' are actually 'requirements' in ansible/ansible terminology # (ie, install-time) if collection_info_data: collection_requires = requirements.from_dependencies_dict( collection_info_data.dependencies, repository_spec=repository_spec) requirements_list.extend(collection_requires) # TODO: add requirements loaded from galaxy.yml # TODO: should the requirements in galaxy.yml be plain strings or dicts? # TODO: should there be requirements in galaxy.yml at all? in liue of requirements.yml # collection_info_requirements = [] requirements_filename = os.path.join(path_name, 'requirements.yml') try: with open(requirements_filename, 'r') as rfd: requirements_list.extend( requirements.load(rfd, repository_spec=repository_spec)) except EnvironmentError: # log.debug('No requirements.yml was loaded for repository %s.%s: %s', namespace, name, e) pass # TODO: if there are other places to load dependencies (ie, runtime deps) we will need # to load them and combine them with role_depenency_specs role_dependency_specs = [] if role_meta_main: role_dependency_specs = role_meta_main.dependencies repository = Repository(repository_spec=repository_spec, path=path_name, installed=installed, requirements=requirements_list, dependencies=role_dependency_specs) log.debug('Repository %s loaded from %s', repository.repository_spec.label, path_name) return repository
def load_from_dir(content_dir, namespace_path, namespace, name, installed=True): path_name = os.path.join(namespace_path, name) log.debug('Loading repository %s.%s from path: %s', namespace, name, path_name) if not os.path.isdir(path_name): log.debug( 'The directory %s does not exist, unable to load a Repository from it', path_name) return None # Now look for any install_info for the repository install_info_data = None install_info_filename = os.path.join(path_name, 'meta/.galaxy_install_info') try: with open(install_info_filename, 'r') as ifd: install_info_data = install_info.load(ifd) except EnvironmentError as e: log.warning( 'Unable to find or load meta/.galaxy_install_info for repository %s.%s: %s', namespace, name, e) # TODO: figure out what to do if the version from install_info conflicts with version # from galaxy.yml etc. install_info_version = getattr(install_info_data, 'version', None) # Try to load a MANIFEST.json if we have one manifest_filename = os.path.join( path_name, collection_artifact_manifest.COLLECTION_MANIFEST_FILENAME) manifest_data = None try: with open(manifest_filename, 'r') as mfd: manifest_data = collection_artifact_manifest.load(mfd) except EnvironmentError: # log.debug('No galaxy.yml collection info found for collection %s.%s: %s', namespace, name, e) pass # # TODO/FIXME: do we even need to load file_manifest here? # file_manifest_filename = os.path.join(path_name, collection_artifact_file_manifest.COLLECTION_FILE_MANIFEST_FILENAME) # file_manifest_data = None # try: # with open(file_manifest_filename, 'r') as mfd: # file_manifest_data = collection_artifact_file_manifest.load(mfd) # except EnvironmentError: # # log.debug('No galaxy.yml collection info found for collection %s.%s: %s', namespace, name, e) # pass # load galaxy.yml galaxy_filename = os.path.join(path_name, collection_info.COLLECTION_INFO_FILENAME) galaxy_yml_data = None try: with open(galaxy_filename, 'r') as gfd: if gfd: galaxy_yml_data = collection_info.load(gfd) except EnvironmentError: # for the case of collections that are not from or intended for galaxy, they do not # need to provide a galaxy.yml or MANIFEST.json, so an error here is exceptable. # log.debug('No galaxy.yml collection info found for collection %s.%s: %s', namespace, name, e) pass # TODO: make existence of a galaxy.yml and a MANIFEST.json mutual exclude and raise an exception for that case col_info = None # MANIFEST.json is higher prec than galaxy.yml if galaxy_yml_data: col_info = galaxy_yml_data if manifest_data: col_info = manifest_data.collection_info # Prefer version from install_info, but for a editable installed, there may be only galaxy version installed_version = install_info_version if col_info: installed_version = col_info.version # TODO/FIXME: what takes precedence? # - the dir names a collection lives in ~/.ansible/content/my_ns/my_name # - Or the namespace/name from galaxy.yml? # - Or the namespace/name from MANIFEST.json # Ditto for requirements # log.debug('collection_info_data: %s', collection_info_data) # Build a repository_spec of the repo now so we can pass it things like # requirements.from_dependencies_dict that need to know what requires something. repository_spec = RepositorySpec(namespace=namespace, name=name, version=installed_version) # The current galaxy.yml 'dependencies' are actually 'requirements' in ansible/ansible terminology # (ie, install-time) requirements_list = [] if col_info: requirements_list = requirements.from_dependencies_dict( col_info.dependencies, repository_spec=repository_spec) repository = Repository( repository_spec=repository_spec, path=path_name, installed=installed, requirements=requirements_list, ) log.debug('Loaded repository %s from %s', repository.repository_spec.label, path_name) return repository
def install_repository_specs_loop(galaxy_context, repository_spec_strings=None, requirements_list=None, collections_lockfile_path=None, editable=False, namespace_override=None, display_callback=None, # TODO: error handling callback ? ignore_errors=False, no_deps=False, force_overwrite=False): requirements_list = requirements_list or [] for repository_spec_string in repository_spec_strings: fetch_method = \ repository_spec_parse.choose_repository_fetch_method(repository_spec_string, editable=editable) log.debug('fetch_method: %s', fetch_method) if fetch_method == FetchMethods.LOCAL_FILE: # Since we only know this is a local file we vaguely recognize, we have to # open it up to get any more details. We _could_ attempt to parse the file # name, but that rarely ends well. Filename could also be arbitrary for downloads # from remote urls ('mazer install http://myci.example.com/somebuildjob/latest' etc) spec_data = collection_artifact.load_data_from_collection_artifact(repository_spec_string) spec_data['fetch_method'] = fetch_method elif fetch_method == FetchMethods.REMOTE_URL: # download the url # hope it is a collection artifact and use load_data_from_collection_artifact() for the # rest of the repo_spec data log.debug('repository_spec_string: %s', repository_spec_string) tmp_downloaded_path = download.fetch_url(repository_spec_string, # Note: ignore_certs is meant for galaxy server, # overloaded to apply for arbitrary http[s] downloads here validate_certs=not galaxy_context.server['ignore_certs']) spec_data = collection_artifact.load_data_from_collection_artifact(tmp_downloaded_path) # pretend like this is a local_file install now spec_data['fetch_method'] = FetchMethods.LOCAL_FILE else: spec_data = repository_spec_parse.spec_data_from_string(repository_spec_string, namespace_override=namespace_override, editable=editable) spec_data['fetch_method'] = fetch_method log.debug('spec_data: %s', spec_data) req_spec = RequirementSpec.from_dict(spec_data) req = Requirement(repository_spec=None, op=RequirementOps.EQ, requirement_spec=req_spec) requirements_list.append(req) log.debug('collections_lockfile_path: %s', collections_lockfile_path) if collections_lockfile_path: # load collections lockfile as if the 'dependencies' dict from a collection_info collections_lockfile = load_collections_lockfile(collections_lockfile_path) dependencies_list = requirements.from_dependencies_dict(collections_lockfile.dependencies) # Create the CollectionsLock for the validators collections_lock = CollectionsLock(dependencies=dependencies_list) requirements_list.extend(collections_lock.dependencies) log.debug('requirements_list: %s', requirements_list) while True: if not requirements_list: break display_callback('', level='info') display_callback('Collection specs to install:', level='info') for req in requirements_list: if req.repository_spec: msg = ' %s (required by %s)' % (req.requirement_spec.label, req.repository_spec) else: msg = ' %s' % req.requirement_spec.label display_callback(msg, level='info') just_installed_repositories = \ install_repositories_matching_repository_specs(galaxy_context, requirements_list, editable=editable, namespace_override=namespace_override, display_callback=display_callback, ignore_errors=ignore_errors, no_deps=no_deps, force_overwrite=force_overwrite) for just_installed_repo in just_installed_repositories: display_callback(' Installed: %s (to %s)' % (just_installed_repo.repository_spec, just_installed_repo.path), level='info') # set the repository_specs to search for to whatever the install reported as being needed yet # requirements_list = new_requirements_list requirements_list = find_new_deps_from_installed(galaxy_context, just_installed_repositories, no_deps=no_deps) # FIXME: what results to return? return 0