def run_sphinx_quickstart(path, environment, username, version): """ Run sphinx-quickstart for the passed path. Required variables for this setup are the passed environment (as name of the project), the username and the version of the project to document. Parameters ---------- path: pathlib.Path Path to create the documentation for. environment: str The name of the project to document. username: str The name of maintainer of the project. version: str The current version of the project. """ inform.info('Running sphinx-quickstart') utils.run_in_bash(f"cd {path.absolute()} && " f"{SPHINX_QUICKSTART} " "-q " f"-p '{environment}' " f"-a '{username}' " f"-v {version} " "-l en " "--ext-autodoc --ext-todo --ext-coverage " "--ext-viewcode " "--extensions=sphinx.ext.napoleon " "--makefile --sep")
def test(self, path=None): """ Runs all tests for the project in its tests folder with pytest. Parameters ---------- path: pathlib.Path The projects path. Note ---- Calls :func:`Project.test` to update the projects conda-environment before running the tests with pytest. """ if not path: path = self.path self.update() inform.info('Running tests for project with pytest') # using bash cause importing pytest in sublevels (testing pproject # itself) can be pain in the ass try: res = utils.run_in_bash( f'{Path(CONFIG["pproject_env"]) / "bin/pytest"} ' f'{" ".join(CONFIG["pytest_arguments"])} {path}') print(res) except CalledProcessError as err: print(err.output.strip().decode('ascii')) inform.critical() inform.finished()
def update_source(path): """ Update source for documentation to create of passed path. Parameters ---------- path: pathlib.Path Path to create the documentation for. """ inform.info('Updating source for documentation to create') try: utils.run_in_bash(f'cd {str(path.absolute())} && ' f'{SPHINX_APIDOC} -f -o source/ .') except CalledProcessError as err: print(err.output.strip().decode('ascii'))
def make_documentation(path): """ Generatie html- and pdf-documentation for passed path. Parameters ---------- path: pathlib.Path Path to create the documentation for. """ inform.info('Generating html- and pdf-documentation') try: utils.run_in_bash(f'cd {str(path.absolute())} && ' f'make SPHINXBUILD={SPHINXBUILD} html') except CalledProcessError as err: print(err.output.strip().decode('ascii'))
def remove_remote(self, ssh, projectpath): """ Remove the conda-environment as defined in self.name via ssh from remote. Parameters ---------- ssh: paramiko.SSHClient Connection via ssh where the conda-environment should be removed from. projectpath: str Path of currrent project. """ inform.info('Removing env (already exists)') self.release_log(ssh, 'remove', projectpath) cmd_remove = (f'{CONDA_BIN} remove -y -q -n {self.name} --all') _, stdout, _ = ssh.exec_command(cmd_remove) stdout.channel.recv_exit_status()
def create_remote(self, ssh, pythonversion, packagename, version, projectpath): """ Release the package in its own conda-envrionment on a remote host. Parameters ---------- ssh: paramiko.SSHClient Connection via ssh where the conda-environment should be created. pythonversion: str pythonversion to create the environment for. packagename: str Name of the package to install inside the created environment. version: str Version of the package to install inside the created environment. projectpath: str Path of currrent project. """ inform.info('Creating env') cmd_create = (f'{CONDA_BIN} create -y -q -n {self.name} ' f'python={pythonversion} ' f'{packagename}={version}') _, stdout, stderr = ssh.exec_command(cmd_create) stdout.channel.recv_exit_status() err = stderr.read().strip().decode('ascii') if err: if 'CondaValueError: prefix already exists:' in err: inform.info('Recreating env') self.release_log(ssh, 'recreate', projectpath) self.remove_remote(ssh, projectpath) self.release_log(ssh, 'create', projectpath) _, stdout, stderr = ssh.exec_command(cmd_create) stdout.channel.recv_exit_status() err = stderr.read().strip().decode('ascii') else: inform.error(f'Error during rollout ({cmd_create} => {err})') inform.critical() if err: if not err.startswith('==> WARNING:'): inform.error( f'Error during rollout ({cmd_create} => {err})') inform.critical() else: self.release_log(ssh, 'create', projectpath)
def remove(self): """ Remove conda-environment with name self.name if it exists. """ if self.exists(): try: utils.run_in_bash( f'{CONDA_BIN} env remove -q -y -n {self.name}') except CalledProcessError as err: err_message = err.output.strip().decode('ascii') if 'CondaEnvironmentError:' in err_message: inform.info('deactivating and retry') utils.run_in_bash( 'source deactivate && ' f'{CONDA_BIN} env remove -q -y -n {self.name}') else: inform.error('Couldn\'t remove environment. ' 'Following error occured:') print(err_message) inform.critical()
def create_coverage_badge(path, environment): """ Run pytest-cov and create coverage-badge for the coverage-result. Store the resulting badge inside the static folder for use inside documentation. Parameters ---------- path: pathlib.Path Path where to change into for testing and creation of badge. environment: str Name of the project to document. """ inform.info('Running pytest-cov and creating coverage-badge') try: utils.run_in_bash( f'cd {str(path.absolute())} && ' f'{PYTEST} --cov={COMPANY} --cov-report term-missing -v && ' f'{COVERAGE_BADGE} -o source/_static/{environment}_coverage.svg -f' ) except CalledProcessError as err: print(err.output.strip().decode('ascii'))
def create_on_remote_vcs(*, company, namespace, project, username): """ Create new project on remote vcs (as defined in config) based on the passed values "company", "namespace" and "project". Parameters ---------- company: str namespace: str project: str username: str Returns ------- project_to_create/gitlab_group: str The name of the created project / The name of the gitlab-group in which the project was created. """ # TODO: returns what? assert check_remote_vcs() vcs = CONFIG['vcs']['use'] api = VCS_SETTINGS['api'] token = get_vcs_token() if vcs == 'gitlab': if VCS_SETTINGS['use_groups']: assert all( [isinstance(_, str) for _ in (company, namespace, project)]) gitlab_groups = get_gitlab_groups() assert gitlab_groups gitlab_group = f'{company}-{namespace}' assert gitlab_group in gitlab_groups inform.info(f'Creating "{project}" on gitlab in "{gitlab_group}"') if len(api.split(':')) > 2: post_url = (f'{api}/projects?name={project}&' f'namespace_id={gitlab_groups[gitlab_group]}') else: post_url = (f'{api}:projects?name={project}&' f'namespace_id={gitlab_groups[gitlab_group]}') requests.post(post_url, headers={'PRIVATE-TOKEN': token}) return gitlab_group else: project_to_create = f'{company}-{namespace}-{project}' inform.info(f'Creating {project_to_create} on {vcs}') if len(api.split(':')) > 2: post_url = (f'{api}/projects?private_token={token}') else: post_url = (f'{api}:projects?private_token={token}') requests.post(post_url, headers={'Content-Type': 'application/json'}, json=dict(name=project_to_create)) return project_to_create elif vcs == 'github': project_to_create = f'{company}-{namespace}-{project}' inform.info(f'Creating {project_to_create} on {vcs}') requests.post(api, auth=(username, token), json=dict(name=project_to_create)) return project_to_create
def sphinx(self, path=None): """ Creates a sphinx documentation for the current pproject project. Parameters ---------- path: pathlib.Path The projects path. """ if not path: path = self.path if not (path / 'source').exists() and not (path / 'build').exists(): sphinx.run_sphinx_quickstart(path=path, environment=self.environment, username=self.username, version=self.version) sphinx.customize_config(path=path) sphinx.create_coverage_badge(path=path, environment=self.environment) sphinx.update_source(path=path) sphinx.make_documentation(path=path) inform.info('Generated html-files in "build"') inform.finished()
def customize_config(path): """ Rename content of source/conf.py for custom setup. Parameters ---------- path: pathlib.Path Path to create the documentation for. """ inform.info('Renaming content of source/conf.py for custom setup') find_replace = { '# import os': 'import os', '# import sys': 'import sys', "# sys.path.insert(0, os.path.abspath('.'))": ("sys.path.insert(0, os.path.abspath('..'))"), "html_theme = 'alabaster'": ("html_theme = 'sphinx_rtd_theme'"), " 'relations.html',": "", " 'searchbox.html',": ("['globaltoc.html', " "'localtoc.html', " "'relations.html', " "'sourcelink.html', " "'searchbox.html'],") } with open(str(path / "source/new_data.txt"), 'w') as new_data: with open(str(path / 'source/conf.py'), 'r') as data: for line in data: for key in find_replace: if key in line: line = line.replace(key, find_replace[key]) new_data.write(line) utils.run_in_bash(f'mv {str((path / "source/new_data.txt").absolute())} ' f'{str((path / "source/conf.py").absolute())}')
def build(self, publish=False, path=None): """ Builds a conda-package from the project. First it runs all tests to ensure functionality of the resulting package. Assuming the test-coverage is acceptable. Then it checks for uncommited stuff inside the project and if it is tagged with a version-tag. Finally the conda-package is build. If the publish-flag is set it is uploaded to the conda-repo-server. Else it is only built local. Parameters ---------- publish: bool Flag which indicates if the resulting conda-package should be uploaded to the conda-repository-server defined in the config-file or not. path: pathlib.Path The projects path. Note ---- Runs :func:`Project.test` before build to check for failures. Checks if uncommited stuff remains inside project-folder before build with :func:`check_git_status`. Also checks if a required git-tag exists with :func:`get_git_tag`. To execute the conda-build command :func:`utils.run_in_bash` is used. """ if not path: path = self.path self.update_informations() self.test(path=path) checks = all([ self.git.status(), self.git.get_tag(), self.git.check_tag_on_remote() ]) if checks: inform.info(f'Started build of {self.environment}') pkg_name = conda.build_package(path=path, pythonversion=self.pythonversion, simulate=True) inform.info(f'Returning packagename will be {pkg_name}') try: pkg_file = conda.build_package( path=path, pythonversion=self.pythonversion) bld_path = Path(CONFIG['conda_folder']) / 'conda-bld/linux-64' pkg_path = str(bld_path / pkg_file) inform.info(f'Built package is {localfilepath}') if publish: conda.publish_package_on_reposerver(pkg_path) inform.finished() except CalledProcessError: inform.critical() else: inform.error('Git-status not clean.Check git status and tag.') inform.critical()
def release(self, dst='localhost', envname=None, path=None): """ Rolls out the current project as a conda-package in its own conda-environment either on localhost or on a remote. Parameters ---------- dst: str The destination where the resulting package should be rolled out. Valid values are: "localhost" (default), "USER@HOSTNAME" envname: str The name of the environment to create on destination with the resulting package. If no "environment" is passed, the name of the project-environment is used. path: pathlib.Path The projects path. Note ---- The version of the project is collected by :func:`get_git_tag`. Then the project is build as a conda-package with :func:`Project.build`. If **destination** is "localhost", the creation of the conda-envrionment for the just created package is done by :func:`utils.run_in_bash`. Else the required commands are executed by paramiko. """ if not path: path = self.path envname = envname or f'{self.environment}_env' self.build(path=path) self.update_informations(path=path) inform.info(f'Env: {envname}') if dst == 'localhost': env = conda.CondaEnvironment(name=envname) # TODO: use recreate if env.exists(): inform.info('Removing env (already exists)') env.remove() inform.info('Creating env') env.create(dependencies=[ f'python={self.pythonversion}', f'{self.environment}={self.version}' ]) else: env = conda.CondaEnvironment(name=envname) env.create_remote(ssh=utils.connect_ssh(dst), pythonversion=self.pythonversion, packagename=self.environment, version=self.version, projectpath=self.path) inform.finished()
def update(self, path=None): """ Updates the project-related conda-environment. If it already exists it will be removed and then recreated. This ensures to remove dependencies of packages which aren't required anymore. If the environment doesn't exist yet, the environment will be created. Finally stores the md5sum of the based meta.yaml file inside a file to enable the pproject-autoenv functionality triggered by changes inside the meta.yaml file. Parameters ---------- path: str Path of the required meta.yaml file. Only required if the meta.yaml is outside of the current working directory Note ---- The pythonversion and the dependencies are collected with :class:`conda.MetaYaml`. Environment creation/removal is done using :class:`conda.CondaEnvironment`. To calculate the new md5sum of the meta.yaml and store it inside the hash.md5-file, :func:`update_md5sum` is used. """ self.update_informations() if not path: path = self.path inform.info(f'Environment: {self.environment}') inform.info('Updating env') meta_yaml = conda.MetaYaml(path=path / CONFIG['meta_yaml_path']) self.pythonversion = meta_yaml.pythonversion env = conda.CondaEnvironment(name=self.environment) if env.exists(): inform.info('Removing env') env.remove() inform.info('Creating env') env.create(dependencies=meta_yaml.dependencies) self.update_md5sum() inform.finished()
def create(self, on_vcs=False, path=None): """ Creates new project based on a defined skeleton either local or on gitlab with cookiecutter, creates the base-conda-environment for developing the new project and manages corresponding git-tasks. Parameters ---------- on_vcs: bool Flag to define if project should be added to remote vcs after creation. path: pathlib.Path The projects path. Note ---- Calls :func:`Project.update_informations` with **create=True** to update the **attributes** of the project. For git operations the function :func:`utils.run_in_bash` is called. After creation :func:`Project.update` with **path** set to current working directory is called to create the conda-environment of the project. """ if not path: path = self.path.parent self.update_informations(create=True, path=path) assert isinstance(path, Path) if not (path / self.environment).exists(): if on_vcs: if not git.check_remote_vcs(): inform.error('Remote vcs not accessable') inform.critical() inform.info(f'Creating project {self.environment}') cookiecutter(CONFIG['skeleton_repo'], checkout=str(self.pythonversion), output_dir=str(Path.cwd()), no_input=True, extra_context=self.__dict__) inform.info('Created folder') os.chdir(str((path / self.environment).absolute())) if not self.environment in str(self.path): self.path = self.path / self.environment self.git = git.GitRepo(path=self.path) if on_vcs: create_on_remote_res = git.create_on_remote_vcs( company=self.company, namespace=self.namespace, project=self.project, username=self.git.get_username()) inform.info('Initializing git') self.git.initialize() inform.info('Adding files') self.git.add_all() inform.info('Commiting') self.git.commit() vcs_ssh = VCS_SETTINGS['ssh'] vcs = CONFIG['vcs']['use'] vcs_use_groups = VCS_SETTINGS['use_groups'] if on_vcs: if vcs == 'gitlab' and vcs_use_groups: git_repo = f'{create_on_remote_res}/{self.project}.git' else: git_repo = f'{create_on_remote_res}.git' if ':' in vcs_ssh: git_origin = f'{vcs_ssh}/{git_repo}' else: git_origin = f'{vcs_ssh}:{git_repo}' inform.info(f'Setting origin to {git_origin}') self.git.set_origin(git_origin) inform.info(f'Pushing to origin') self.git.push_to_branch('master') inform.finished() self.update(path=(path / self.environment).absolute()) else: inform.error('Folder already exists') inform.critical()
def new_version(self, vtype, message='New version triggered by pproject', path=None): """ Raises the passed vtype (versiontype) inside version (git-tag) of the project. Version has to be in semantic versioning style (see https://semver.org/ for details). Example for version in semantic versioning style: General: "major.minor.patch" Example: "1.0.1" Parameters ---------- vtype: str The part to raise inside current version. Valid are: "major", "minor" and "patch" message: str The message to use for the git tag. path_: pathlib.Path() The projects path. Note ---- First checks if there is uncommited stuff inside the current branch. If there is uncommited stuff, the operation is aborted. Else checks if the gitlab is reachable. Then the current version is collected. The passed **vtype** is raised by 1. Finally the resulting version-tag is added with passed **message** and the new tag is pushed to gitlab. For these operations :class:`git.GitRepo` is used. """ if not path: path = self.path assert all([isinstance(vtype, str), isinstance(message, str)]) assert vtype in ('major', 'minor', 'patch') self.update_informations(path=path) if self.git.status(): if not git.check_remote_vcs(): inform.error('Remote vcs not accessable') inform.critical() else: self.version = self.git.get_tag() inform.info(f'Current version is {self.version}') major, minor, patch = [ int(_) for _ in self.version.split('-')[0].split('.') ] vrs = {'major': major, 'minor': minor, 'patch': patch} vrs[vtype] += 1 if vtype != 'patch': vrs['patch'] = 0 if vtype == 'major': vrs['minor'] = 0 res_version = f'{vrs["major"]}.{vrs["minor"]}.{vrs["patch"]}' inform.info(f'Resulting version {res_version}') try: self.git.create_tag(res_version, message) self.git.push_tag(res_version) except CalledProcessError: inform.error('Can\'t push to remote vcs.') inform.critical() self.version = res_version else: inform.error( 'No new git-tag possible, uncommited stuff in project') inform.critical()
def update_md5sum(self): inform.info('Storing new md5sum of meta.yaml') ((self.path / CONFIG['meta_yaml_md5_path']).open('w').write( utils.md5(str(self.path / CONFIG['meta_yaml_path']))))
def test_inform_info(capsys): inform.info('bla') captured = capsys.readouterr() assert captured[ 0] == b' \x1b[1;37m\xe2\x84\xb9\x1b[0m\x1b[0;94m TEST_INFORM_INFO \x1b[0mbla\n'.decode( 'utf8')