def connect_ssh(dst): """ Connect to destination host (dst). Parameters ---------- dst: str The destination-host as combination of "username@hostname". Returns ------- paramiko.SSHClient The connection-object to the dst-host. """ ssh = paramiko.SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) username, hostname = dst.split('@') try: ssh.connect(hostname, 22, username, look_for_keys=True, compress=True) except paramiko.ssh_exception.AuthenticationException: inform.error('Authentication failed') inform.critical() except socket.gaierror: inform.error('Name or service not known') inform.critical() else: return ssh
def create(self, dependencies): """ Create conda-environment with name self.name and passed dependencies. Parameters ---------- dependencies: list List of strings with dependencies (packagename with optional version) to install inside environment. Example: ['python=3.6', 'attrs=>17.3'] """ deps = ' '.join([ f"'{_}'".replace(' >=', '>=').replace(' <=', '<=').replace(' ', '=').replace('*', '') for _ in dependencies ]) try: utils.run_in_bash( f'{CONDA_BIN} create -y -q -n {self.name} {deps}') except CalledProcessError as err: inform.error(f'Couldn\'t create environment {self.name}. ' 'Following error occured:') print(err.output.strip().decode('ascii')) inform.error('Please check your meta.yaml-file and if ' 'dependencies are available.') inform.critical()
def add_all(self): """ Add all content of local git-repository. """ try: utils.run_in_bash(f'cd {self.path.absolute()} && git add .') except CalledProcessError: inform.error(f'Can\'t add files to git folder.')
def initialize(self): """ Initialize current local project as a git-repository. """ try: utils.run_in_bash(f'cd {self.path.absolute()} && git init -q') except CalledProcessError: inform.error(f'Can\'t initialize git.')
def build(self, publish=False, path=None): """ Builds a conda-package from the project. First it runs all tests to ensure functionality of the resulting package. Assuming the test-coverage is acceptable. Then it checks for uncommited stuff inside the project and if it is tagged with a version-tag. Finally the conda-package is build. If the publish-flag is set it is uploaded to the conda-repo-server. Else it is only built local. Parameters ---------- publish: bool Flag which indicates if the resulting conda-package should be uploaded to the conda-repository-server defined in the config-file or not. path: pathlib.Path The projects path. Note ---- Runs :func:`Project.test` before build to check for failures. Checks if uncommited stuff remains inside project-folder before build with :func:`check_git_status`. Also checks if a required git-tag exists with :func:`get_git_tag`. To execute the conda-build command :func:`utils.run_in_bash` is used. """ if not path: path = self.path self.update_informations() self.test(path=path) checks = all([ self.git.status(), self.git.get_tag(), self.git.check_tag_on_remote() ]) if checks: inform.info(f'Started build of {self.environment}') pkg_name = conda.build_package(path=path, pythonversion=self.pythonversion, simulate=True) inform.info(f'Returning packagename will be {pkg_name}') try: pkg_file = conda.build_package( path=path, pythonversion=self.pythonversion) bld_path = Path(CONFIG['conda_folder']) / 'conda-bld/linux-64' pkg_path = str(bld_path / pkg_file) inform.info(f'Built package is {localfilepath}') if publish: conda.publish_package_on_reposerver(pkg_path) inform.finished() except CalledProcessError: inform.critical() else: inform.error('Git-status not clean.Check git status and tag.') inform.critical()
def commit(self): """ Commit current state of local git-repository. """ try: utils.run_in_bash( f'cd {self.path.absolute()} && ' f'git ci -m "automatically created by skeleton" -q') except CalledProcessError: inform.error(f'Can\'t commit files.')
def push_tag(self, tag): """ Push passed tag of current local git-repository to its origin. Parameters ---------- tag: str The tag to push to the git-repositories origin. """ try: utils.run_in_bash(f'cd {self.path.absolute()} && ' f'git push origin "{tag}"') except CalledProcessError: inform.error(f'Can\'t push {tag} to origin.')
def set_origin(self, origin): """ Set origin of current local git-repository to passed origin. Parameters ---------- origin: str Origin to set for the local git-repository. """ try: utils.run_in_bash(f'cd {self.path.absolute()} && ' f'git remote add origin "{origin}"') except CalledProcessError: inform.error(f'Can\'t set origin to {origin}.')
def push_to_branch(self, branch): """ Push passed branch of current local git-repository to its origin. Parameters ---------- branch: str Branchname to push to origin. """ try: utils.run_in_bash(f'cd {self.path.absolute()} && ' f'git push -u origin {branch}') except CalledProcessError: inform.error(f'Can\'t push to branch {branch}.')
def create_tag(self, tag, message): """ Create git tag as passed with passed message for current local git-repository. Parameters ---------- tag: str The tag to create for current local git-repository. message: str The message to pass with the tag. """ try: utils.run_in_bash(f'cd {self.path.absolute()} && ' f'git tag -a {tag} -m "{message}"') except CalledProcessError: inform.error(f'Can\'t create tag {tag}.')
def get_gitlab_groups(): """ Collect the available gitlab-groups for the gitlab-user and his gitlab-token from the defined gitlab-api. Returns ------- gitlab_groups: dict Dict containing the available gitlab-groups. """ glab_groups_info = requests.get(f'{VCS_SETTINGS["api"]}/groups', headers={'PRIVATE-TOKEN': get_vcs_token()}) try: gitlab_groups = {_['name']: _['id'] for _ in glab_groups_info.json()} except TypeError: inform.error('No access to gitlab-api. Please check your token.') gitlab_groups = {} return gitlab_groups
def create_remote(self, ssh, pythonversion, packagename, version, projectpath): """ Release the package in its own conda-envrionment on a remote host. Parameters ---------- ssh: paramiko.SSHClient Connection via ssh where the conda-environment should be created. pythonversion: str pythonversion to create the environment for. packagename: str Name of the package to install inside the created environment. version: str Version of the package to install inside the created environment. projectpath: str Path of currrent project. """ inform.info('Creating env') cmd_create = (f'{CONDA_BIN} create -y -q -n {self.name} ' f'python={pythonversion} ' f'{packagename}={version}') _, stdout, stderr = ssh.exec_command(cmd_create) stdout.channel.recv_exit_status() err = stderr.read().strip().decode('ascii') if err: if 'CondaValueError: prefix already exists:' in err: inform.info('Recreating env') self.release_log(ssh, 'recreate', projectpath) self.remove_remote(ssh, projectpath) self.release_log(ssh, 'create', projectpath) _, stdout, stderr = ssh.exec_command(cmd_create) stdout.channel.recv_exit_status() err = stderr.read().strip().decode('ascii') else: inform.error(f'Error during rollout ({cmd_create} => {err})') inform.critical() if err: if not err.startswith('==> WARNING:'): inform.error( f'Error during rollout ({cmd_create} => {err})') inform.critical() else: self.release_log(ssh, 'create', projectpath)
def check_tag_on_remote(self): """ Check if current tag is pushed to the origin url. Returns ------- check: bool """ try: res = utils.run_in_bash( f'cd {self.path.absolute()} && ' f'git ls-remote origin refs/tags/{self.get_tag()}') check = bool(res) except CalledProcessError as err: inform.error(f'Can\'t check if tag exists on remote. ' 'Following error occured:') print(err.output.strip().decode('ascii')) check = False return check
def remove(self): """ Remove conda-environment with name self.name if it exists. """ if self.exists(): try: utils.run_in_bash( f'{CONDA_BIN} env remove -q -y -n {self.name}') except CalledProcessError as err: err_message = err.output.strip().decode('ascii') if 'CondaEnvironmentError:' in err_message: inform.info('deactivating and retry') utils.run_in_bash( 'source deactivate && ' f'{CONDA_BIN} env remove -q -y -n {self.name}') else: inform.error('Couldn\'t remove environment. ' 'Following error occured:') print(err_message) inform.critical()
def update_informations(self, create=False, path=None): """ Updates class-attributes with new informations collected from meta.yaml, git config and current datetime. Parameters ---------- create: bool If project isn't created yet the name of the environment can't be extracted from the meta.yaml. In this case it has to be combined from companyname, namespace and projectname. path: pathlib.Path The projects-path. Note ---- Uses :class:`conda.MetaYaml` to collect the name of environment from the current project if **create** is False. To collect git-specific informations :class:`git.GitRepo` is used. Checks for valid project-definition with :class:`validators.SProject`. """ self.environment = f'{self.company}-{self.namespace}-{self.project}' if create: if not path: path = self.path.parent self.git = git.GitRepo(path=self.path) else: #if not self.environment in str(self.path): # self.path = self.path / self.environment if not path: path = self.path meta_yaml = conda.MetaYaml(path=path / CONFIG['meta_yaml_path']) self.environment = meta_yaml.package_name self.git = git.GitRepo(path=self.path) self.version = self.git.get_tag() now = dt.datetime.now() self.year = now.strftime('%Y') self.today = now.strftime('%Y-%m-%d %H:%M') self.username = self.git.get_username() self.email = self.git.get_email() try: validators.SProject(strict=True).load(self.__dict__) except ValidationError as err: inform.error('Can\'t collect project information.') inform.error('Invalid value for following params:') for key, value in err.messages.items(): inform.error(f'{key}: {value}') inform.critical()
def __attrs_post_init__(self): """ Updates the class-attributes and validates the contents of the meta.yaml-file. """ if not self.path: self.path = Path.cwd() / CONFIG['meta_yaml_path'] if not self.path.exists(): raise AttributeError(f'Path {self.path} doesn\'t exist.') self.update() try: validators.SMetaYaml(strict=True).load(self.get_content()) except ValidationError as err: inform.error('meta.yaml has incorrect content.') inform.error('Invalid value for following params:') for key, value in err.messages.items(): inform.error(f'{key}: {value}') inform.critical()
def new_version(self, vtype, message='New version triggered by pproject', path=None): """ Raises the passed vtype (versiontype) inside version (git-tag) of the project. Version has to be in semantic versioning style (see https://semver.org/ for details). Example for version in semantic versioning style: General: "major.minor.patch" Example: "1.0.1" Parameters ---------- vtype: str The part to raise inside current version. Valid are: "major", "minor" and "patch" message: str The message to use for the git tag. path_: pathlib.Path() The projects path. Note ---- First checks if there is uncommited stuff inside the current branch. If there is uncommited stuff, the operation is aborted. Else checks if the gitlab is reachable. Then the current version is collected. The passed **vtype** is raised by 1. Finally the resulting version-tag is added with passed **message** and the new tag is pushed to gitlab. For these operations :class:`git.GitRepo` is used. """ if not path: path = self.path assert all([isinstance(vtype, str), isinstance(message, str)]) assert vtype in ('major', 'minor', 'patch') self.update_informations(path=path) if self.git.status(): if not git.check_remote_vcs(): inform.error('Remote vcs not accessable') inform.critical() else: self.version = self.git.get_tag() inform.info(f'Current version is {self.version}') major, minor, patch = [ int(_) for _ in self.version.split('-')[0].split('.') ] vrs = {'major': major, 'minor': minor, 'patch': patch} vrs[vtype] += 1 if vtype != 'patch': vrs['patch'] = 0 if vtype == 'major': vrs['minor'] = 0 res_version = f'{vrs["major"]}.{vrs["minor"]}.{vrs["patch"]}' inform.info(f'Resulting version {res_version}') try: self.git.create_tag(res_version, message) self.git.push_tag(res_version) except CalledProcessError: inform.error('Can\'t push to remote vcs.') inform.critical() self.version = res_version else: inform.error( 'No new git-tag possible, uncommited stuff in project') inform.critical()
def build_arguments(args): """ The argument parsing for the python-part of the pproject-tool. Parameters ---------- args: list Returns ------- parsed_args: argparse.Namespace """ # TODO: make dynamic with CONFIG['use_vcs'] try: assert all([ _ in (list(string.ascii_letters + string.digits) + ['_']) for _ in list(CONFIG['company']) ]) except AssertionError: inform.error( 'Your company-name contains unsupported chars (only letters and "_" are allowed)' ) inform.critical() vcs = CONFIG['vcs']['use'] if vcs == 'gitlab' and VCS_SETTINGS['use_groups']: try: available_namespaces = [ grp.split('-')[1] for grp in git.get_gitlab_groups().keys() if '-' in grp and grp.split('-')[0] == CONFIG['company'] ] if not available_namespaces: raise AttributeError except: available_namespaces = OFFLINE_NAMESPACES else: available_namespaces = OFFLINE_NAMESPACES parser = argparse.ArgumentParser(description='ouroboros-tools-pproject') tools = parser.add_subparsers( description='pproject supports different tools. These are:') create = tools.add_parser( 'create', description='creates new pproject supported project') create.set_defaults(tool='create') create.add_argument('-r', '--remote', action='store_true', default=False) namespaces = create.add_subparsers( description='the following namespaces are supported') #for namespace_name in ('services', 'products', 'modules', 'operations'): for namespace_name in available_namespaces: namespace = namespaces.add_parser( namespace_name, description=f'Uses "{namespace_name}" as namespace') namespace.set_defaults(namespace=namespace_name) namespace.add_argument('-n', '--projectname', type=str, required=True) namespace.add_argument('-p', '--pythonversion', type=str, default='3.6') build = tools.add_parser('build') build.set_defaults(tool='build') build.add_argument('-p', '--publish', action='store_true', default=False) for tool in ('test', 'sphinx', 'update'): new_tool = tools.add_parser(tool) new_tool.set_defaults(tool=tool) info_parser = tools.add_parser('info') info_parser.set_defaults(tool='info') infotypes = info_parser.add_subparsers() info_general = infotypes.add_parser('general') info_general.set_defaults(infotype='general') info_project = infotypes.add_parser('project') info_project.set_defaults(infotype='project') new_version = tools.add_parser('version') new_version.add_argument('-m', '--message', type=str, required=True) new_version.set_defaults(tool='version') versiontypes = new_version.add_subparsers() for versiontype in ('major', 'minor', 'patch'): vtype = versiontypes.add_parser(versiontype) vtype.set_defaults(versiontype=versiontype) release = tools.add_parser('release') release.set_defaults(tool='release') release.add_argument('-d', '--userathost', type=str) release.add_argument('-e', '--envname', type=str) return parser.parse_args(args)
def run(options, path=Path.cwd()): """ Parameters ---------- options: argparse.Namespace path: pathlib.Path """ if options.tool == 'info': if options.infotype == 'general': general_info() elif options.infotype == 'project': try: meta_yaml = conda.MetaYaml() company, namespace, projectname = ( meta_yaml.package_name.split('-')) pythonversion = meta_yaml.pythonversion prj = Project(company=company, namespace=namespace, project=projectname, pythonversion=pythonversion, path=path) prj.update_informations(create=True, path=path) prj.info() except Exception as err: print(err) inform.error('Not a valid pproject-project!') inform.critical() else: if options.tool not in ('create', ): meta_yaml = conda.MetaYaml() company, namespace, projectname = meta_yaml.package_name.split('-') pythonversion = meta_yaml.pythonversion else: company, namespace, projectname = [ CONFIG['company'], options.namespace, options.projectname ] pythonversion = options.pythonversion prj = Project(company=company, namespace=namespace, project=projectname, pythonversion=pythonversion, path=path) if options.tool == 'update': prj.update_informations() prj.update() elif options.tool == 'test': prj.update_informations() prj.test(path=path) elif options.tool == 'sphinx': prj.update_informations() prj.sphinx() elif options.tool == 'create': prj.create(on_vcs=options.remote, path=path) elif options.tool == 'build': prj.update_informations() prj.build(publish=options.publish) elif options.tool == 'version': prj.update_informations() prj.new_version(vtype=options.versiontype, message=options.message) elif options.tool == 'release': try: envname = options.envname except: envname = f'{Path.cwd().name}_env' prj.update_informations() prj.release(dst=options.userathost, envname=envname)
def test_inform_error(capsys): inform.error('bla') captured = capsys.readouterr() assert captured[ 0] == b' \x1b[1;91mE\x1b[0m\x1b[0;94m TEST_INFORM_ERROR \x1b[0mbla\n'.decode( 'utf8')
def create(self, on_vcs=False, path=None): """ Creates new project based on a defined skeleton either local or on gitlab with cookiecutter, creates the base-conda-environment for developing the new project and manages corresponding git-tasks. Parameters ---------- on_vcs: bool Flag to define if project should be added to remote vcs after creation. path: pathlib.Path The projects path. Note ---- Calls :func:`Project.update_informations` with **create=True** to update the **attributes** of the project. For git operations the function :func:`utils.run_in_bash` is called. After creation :func:`Project.update` with **path** set to current working directory is called to create the conda-environment of the project. """ if not path: path = self.path.parent self.update_informations(create=True, path=path) assert isinstance(path, Path) if not (path / self.environment).exists(): if on_vcs: if not git.check_remote_vcs(): inform.error('Remote vcs not accessable') inform.critical() inform.info(f'Creating project {self.environment}') cookiecutter(CONFIG['skeleton_repo'], checkout=str(self.pythonversion), output_dir=str(Path.cwd()), no_input=True, extra_context=self.__dict__) inform.info('Created folder') os.chdir(str((path / self.environment).absolute())) if not self.environment in str(self.path): self.path = self.path / self.environment self.git = git.GitRepo(path=self.path) if on_vcs: create_on_remote_res = git.create_on_remote_vcs( company=self.company, namespace=self.namespace, project=self.project, username=self.git.get_username()) inform.info('Initializing git') self.git.initialize() inform.info('Adding files') self.git.add_all() inform.info('Commiting') self.git.commit() vcs_ssh = VCS_SETTINGS['ssh'] vcs = CONFIG['vcs']['use'] vcs_use_groups = VCS_SETTINGS['use_groups'] if on_vcs: if vcs == 'gitlab' and vcs_use_groups: git_repo = f'{create_on_remote_res}/{self.project}.git' else: git_repo = f'{create_on_remote_res}.git' if ':' in vcs_ssh: git_origin = f'{vcs_ssh}/{git_repo}' else: git_origin = f'{vcs_ssh}:{git_repo}' inform.info(f'Setting origin to {git_origin}') self.git.set_origin(git_origin) inform.info(f'Pushing to origin') self.git.push_to_branch('master') inform.finished() self.update(path=(path / self.environment).absolute()) else: inform.error('Folder already exists') inform.critical()
def get_branch(self): try: return utils.run_in_bash(f'cd {self.path.absolute()} && ' 'git rev-parse --abbrev-ref HEAD') except CalledProcessError as err: inform.error(f'Can\'t get branch. Got error {err.output}')