def _extract_archive(tarfile_path, extract_dir): try: _extract_tar_shell(tarfile_path=tarfile_path, extract_dir=extract_dir) except subprocess.SubprocessError as e: raise exc.ImporterError( 'Error in tar extract subprocess: ' f'{str(e)}, filepath={tarfile_path}, stderr={e.stderr}') except FileNotFoundError as e: raise exc.ImporterError('File not found in tar extract subprocess: ' f'{str(e)}, filepath={tarfile_path}')
def _build_docs_blob(self): """Build importer result docs_blob from collection documentation.""" contents = [ schema.DocsBlobContentItem( content_name=c.name, content_type=c.content_type.value, doc_strings=c.doc_strings, readme_file=c.readme_file, readme_html=c.readme_html, ) for c in self.content_objs ] readme = markup_utils.get_readme_doc_file(self.path) if not readme: raise exc.ImporterError('No collection readme found') rendered_readme = schema.RenderedDocFile( name=readme.name, html=markup_utils.get_html(readme)) rendered_doc_files = [] doc_files = markup_utils.get_doc_files( os.path.join(self.path, DOCUMENTATION_DIR)) if doc_files: rendered_doc_files = [ schema.RenderedDocFile(name=f.name, html=markup_utils.get_html(f)) for f in doc_files ] return schema.DocsBlob( collection_readme=rendered_readme, documentation_files=rendered_doc_files, contents=contents, )
def _build_docs_blob(self): """Build importer result docs_blob from collection documentation.""" # return an empty DocsBlob if run_ansible_doc=False rendered_readme = schema.RenderedDocFile() docs_blob = schema.DocsBlob( collection_readme=rendered_readme, documentation_files=[], contents=[], ) if not self.cfg.run_ansible_doc: return docs_blob contents = [ schema.DocsBlobContentItem( content_name=c.name, content_type=c.content_type.value, doc_strings=c.doc_strings, readme_file=c.readme_file, readme_html=c.readme_html, ) for c in self.content_objs ] readme = markup_utils.get_readme_doc_file(self.path) if not readme: raise exc.ImporterError('No collection readme found') rendered_readme = schema.RenderedDocFile( name=readme.name, html=markup_utils.get_html(readme)) rendered_doc_files = [] doc_files = markup_utils.get_doc_files( os.path.join(self.path, DOCUMENTATION_DIR)) if doc_files: rendered_doc_files = [ schema.RenderedDocFile(name=f.name, html=markup_utils.get_html(f)) for f in doc_files ] execution_environment = ee_utils.process_execution_environment( self.path, self.log) return schema.DocsBlob( collection_readme=rendered_readme, documentation_files=rendered_doc_files, contents=contents, execution_environment=execution_environment, )
def import_collection( file=None, filename=None, file_url=None, git_clone_path=None, output_path=None, logger=None, cfg=None, ): """Process import on collection artifact file object. :param file: file handle of collection artifact. :param filename: namedtuple of CollectionFilename. :param file_url: storage url of collection artifact. :param git_clone_path: path to git repo directory of collection pre artifact build. :param output_path: path where collection build tarball file will be written. :param logger: Optional logger instance. :param cfg: Optional config. :raises exc.ImporterError: On errors that fail the import process. :return: metadata if `file` provided, (metadata, filepath) if `git_clone_path` provided """ logger.info(f"Importing with galaxy-importer {__version__}") if not cfg: config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) logger = logger or default_logger if (file and git_clone_path) or not (file or git_clone_path): raise exc.ImporterError( "Expected either 'file' or 'git_clone_path' to be populated") if git_clone_path: filepath = _build_collection(git_clone_path, output_path, logger) with open(filepath, "rb") as fh: metadata = _import_collection(fh, filename=None, file_url=None, logger=logger, cfg=cfg) return (metadata, filepath) return _import_collection(file, filename, file_url, logger, cfg)
def _build_collection(git_clone_path, output_path, logger=None): """Runs `ansible-galaxy collection build` and returns artifact filepath.""" logger = logger or default_logger logger.info( "Building collection tarball with ansible-galaxy collection build") cmd = [ "ansible-galaxy", "collection", "build", "--output-path", output_path ] result = subprocess.run(cmd, cwd=git_clone_path, capture_output=True) if result.returncode != 0: raise exc.ImporterError( "Error running `ansible-galaxy collection build`: {}".format( result.stderr.decode("utf-8").rstrip())) # TODO: use regex to get filename from stdout, compine with output_path in case cli output # ever changes from: Created collection for <namespace>.<name> at /<path>/<artifact>.tar.gz stdout = result.stdout.decode("utf-8").rstrip() filepath = stdout.split(" ")[-1] return filepath