def read_logins(): if os.path.isfile('.logins'): log.success(Fore.GREEN + config.PADLOCK + " Found crypted logins file" + Fore.RESET) gpg = gnupg.GPG() cr = None with open('.logins', 'r') as fh: cr = fh.read() crypt = None while True: passphrase = getpass.getpass('Passphrase for login file: ') crypt = gpg.decrypt(cr, passphrase=passphrase) if not crypt.ok: log.error( Fore.RED + config.PADLOCK + 'Could not unlock the logins file, is the passphrase correct?' + Fore.RESET) # raise ValueError('Could not uncrypt the login file') else: break cr = json.loads(crypt.data) log.success('Logins creation time: ' + cr['creation_date']) log.success(Fore.GREEN + config.OPEN_PADLOCK + ' Got logins for ' + ', '.join(cr['data']) + Fore.RESET) return NoFailReadOnlyDict(cr['data'], default=None) return None
def call_with_stdout(args, ignore_err=False, stdout=PIPE, inp=None, stderr=PIPE): with Popen(args.split(' ') if type(args) == str else args, stdout=stdout, stdin=PIPE if inp is not None else None, stderr=stderr) as proc: out, err = proc.communicate(input=inp) if proc.poll() != 0 and not ignore_err: if log.get_verbose(): log.error('Error from subprocess') if err is not None and err != '': print('err: ' + str(err), file=sys.stderr) if out is not None and out != '': print('out: ' + str(out), file=sys.stderr) raise CalledProcessError(proc.poll(), args, out, err) if log.get_verbose(): log.debug('Output of ' + repr(args)) if out is not None: print(out.decode()) if err is not None: print(err.decode()) if out is not None: return out.decode()
def build(self): log.success('Building package in ./build') try: ioutils.call_python('', 'setup.py sdist -d build/dist bdist_wheel -d build/dist', stdout=subprocess.PIPE) except CalledProcessError as ex: log.error('Unable to build the package') log.error(repr(ex)) exit(1)
def ask_logins(): if os.path.isfile('.logins'): log.warning(Fore.YELLOW + 'The logins file already exist, overwrite it ?' + Fore.RESET) yes = input('Enter \'yes\' to overwrite: ') if yes != 'yes': return cr = {'creation_date': get_date(), 'data': {}} print('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~') print(Fore.YELLOW + '\tLogins configuration\n' + Fore.RESET) print( Fore.GREEN + 'You are going to be asked you credentials for this project\n' + 'They will be stored under .logins crypted with AES and .logins append to .gitignore' + Fore.RESET) yes = input('Do you wish to continue? (Enter "yes" to continue): ') if yes != 'yes': log.error('Cancelled') return print('') def ask_login(arr, component): print(Fore.LIGHTGREEN_EX + '\nNow configuring login for: ' + Fore.GREEN + component) print(Fore.RED + "Leave blank if Not Applicable" + Fore.RESET) login = input('Login: '******'': return password = getpass.getpass() arr[component] = {'login': login, 'password': password} # print(Fore.RED+'Git login: '******' Git login will no be asked, you are expected to use a credential helper for git ' # +'if you wish to automatically push'+Fore.RESET) # print(Fore.GREEN+'This setup can launch the git credential helper for you <3' # +'\n'+Fore.YELLOW+'WARNING: The credentials are stored in clear text'+Fore.RESET) # yes = input('Save git credentials? (Enter "yes" to continue): ') # if yes == 'yes': # call_git('config credential.helper store') # call_git('push') ask_login(cr['data'], 'git') ask_login(cr['data'], 'pypi') ask_login(cr['data'], 'docker') gpg = gnupg.GPG() gpg.encrypt(json.dumps(cr), (), symmetric=True, output='.logins') try: call_git('check-ignore .logins') except CalledProcessError: with open('.gitignore', 'a') as fh: fh.write('\.logins') log.success('Appened .logins to .gitignore')
def check_project(self): """ Check code and exit if not conform """ pf, pe = self.check_code() if len(pf) + len(pe) > 0: log.error('Project is not conform or has errors, run spvm status -s') if config.config['check']: log.error( Fore.RED + 'This error is fatal, to make it non-fatal, use -n') exit(1)
def maybe_load_meta(self): """ If a metaFile exists, it will be loaded """ self.meta = None if os.path.isfile(self.projectMetaFile): try: with open(self.projectMetaFile, 'r') as fh: self.meta = json.loads(fh.read()) log.debug('Loaded project meta') except json.JSONDecodeError as e: log.error(repr(e)) log.error(config.metaFileName + " is invalid") exit(1) else: log.debug('No meta to load')
def input_with_default(prompt, default=None, rtype=str): assert type(rtype) == type if default is None: default = rtype() while True: res = input(f'{prompt} ({default}): ') if res == 'null' or res == 'none': return '' if res is '': return default try: return rtype(res) except ValueError: log.error('An input of type ' + rtype.__name__ + " is expected")
def run_test(self): """ Run the tests with pytest """ try: ioutils.call_pytest(self.location) except CalledProcessError as ex: if ex.returncode == 5: log.warning('No tests were found') log.warning('This is not considered fatal but is VERY STRONGLY discouraged') log.warning('Resuming in 2 seconds') sleep(2) return log.error('Tests Failed') exit(1) log.success('Tests passed')
def up_version(self, kind): # FIXME other to 0 """ Increase the version in the project meta base on the 'kind' instruction: kind can be a str or a number if a number, it is treated like an index for the version increment if a string, it can be major, minor or patch """ log.fine('Increasing version (' + str(kind) + ')') v = self.meta['project_vcs']['version'] v_ = [int(i) for i in v.split('.')] while len(v_) < 3: v_.insert(0, '0') log.debug("Current version comphrension: " + str(v_)) if kind.isdigit(): kind = int(kind) if kind < 0 or kind >= len(v_): log.error('Unrecognized version changer: ' + str(kind)) v_[int(kind)] += 1 else: kind = kind.lower() if kind == 'patch': index = len(v_) - 1 elif kind == 'major': index = 0 elif kind == 'minor': index = len(v_) - 2 elif kind == 'pass': log.success('Version not changed') return else: log.error('Unrecognized version changer: ' + str(kind)) exit(1) v_[index] += 1 v_ = [v_[i] if i <= index else 0 for i in range(len(v_))] self.meta['project_vcs']['version'] = '.'.join([str(i) for i in v_]) self.save_project_info() log.success(v + ' -> ' + self.get_version())
def add_dependency(self, dep): """ Add a dependency to the project: Pip handles different sources: - PyPI (and other indexes) using requirement specifiers. - VCS project urls. - Local project directories. - Local or remote source archives. """ if self.get_project_status() != config.STATUS_PROJECT_INITIALIZED: log.error("The project is not initialized, call spvm init first before adding dependencies") return # if os.path.isdir(dep): # log.warning( # "Adding local dependency, this may break when releasing the project") # if os.path.isfile(dep): # log.warning( # "Got a file as dependency name, using as requirements.txt") # supp_args = '-r ' try: ioutils.install_packages(dep) except CalledProcessError as ex: log.error("Pip call failed with code " + str(ex.returncode)) log.error("Does the dependency exist or is something broken?") return self.meta['project_requirements']['python_packages'].append(dep) self.save_project_info()
def _show_docker_progress(obj): nonlocal status if 'errorDetail' in obj: log.error(Fore.RED + 'Error: ' + str(obj['errorDetail']['message']) + Fore.RESET) raise docker.errors.DockerException(obj['errorDetail']['message']) if 'stream' in obj: for line in obj['stream'].split('\n'): if line == '': continue log.success(line.strip()) status.clear() return if 'status' in obj: if 'id' not in obj: log.success(obj['status']) return if len(status) == 0: print(Fore.GREEN + "\rA docker I/O operation is in progress" + Fore.RESET) s = obj['id'].strip() + ' ' + obj['status'] + '\t' if 'progress' in obj: s += obj['progress'] if obj['id'] not in status: status[obj['id']] = {'index': len(status) + 1, 'str': s} print(s) return status[obj['id']]['str'] = s print('\033[F' * (len(status) + 1)) for e in status: print('\033[K' + status[e]['str'])
def _sign_package(self): """ Add the signatures to the package before upload """ meta_key = self.meta['project_vcs']['release']['package_signing_key'] if meta_key == '': log.error(Fore.RED + config.OPEN_PADLOCK + ' No key provided for package signing' + Fore.RESET) return log.success('Signing the package with the key: ' + meta_key) try: for place in os.walk(join('.', 'build', 'dist')): for f in place[2]: self._sign_file(join(place[0], f), meta_key) except CalledProcessError as ex: log.error(Fore.RED + config.OPEN_PADLOCK + ' Could not sign the package' + Fore.RESET) log.error('The program will now stop, you can resume with: spvm publish pypi') log.error('When the issues are fixed') log.error(repr(ex)) exit(1) return log.success(Fore.GREEN + config.PADLOCK + ' Package Signed with key: ' + meta_key + Fore.RESET)
def check_packages(base_url='https://pypi.python.org/pypi/'): log.fine('Checking packages in: ' + piptmp) unchecked = 0 for f in os.listdir(piptmp): try: log.set_additional_info(f) f_ = piptmp + os.sep + f if not os.path.isfile(f_): continue splited = f.split('-') log.debug('Checking ' + splited[0]) package_info = query_get(base_url + splited[0] + '/' + splited[1] + '/json') for f_info in package_info['releases'][splited[1]]: if not os.path.isfile( os.path.join(piptmp, f_info['filename'])): continue if md5(f_) != f_info['md5_digest']: log.error('Hash do not match') exit(1) # log.success(Fore.GREEN+'Hash checked for '+f) if not f_info['has_sig']: log.debug(Fore.YELLOW + 'No signature provided for ' + f_info['filename']) # FIXME throw? unchecked += 1 continue sig = query_get(f_info['url'] + '.asc', False) log.debug('File: ' + f_info['filename'] + ' has signature:\n ' + sig.decode()) # Check q = '' if log.get_verbose() else ' --quiet' try: call_gpg('--no-default-keyring --keyring tmp.gpg' + q + ' --auto-key-retrieve --verify - ' + f_, inp=sig) # FIXME Only use known keys? except CalledProcessError as er: if er.returncode == 1: log.error(Fore.RED + config.OPEN_PADLOCK + ' Invalid signature for ' + f) exit(1) log.error('Could not check signature for ' + f + ' (' + repr(er) + ')') unchecked += 1 continue log.success(Fore.GREEN + config.PADLOCK + ' File ' + f + ' is verified') except KeyboardInterrupt: exit(2) except SystemExit as e: raise e except BaseException as be: log.error(Fore.RED + config.OPEN_PADLOCK + ' Failed to check ' + f + Fore.RESET) log.error(repr(be)) log.warning(Fore.YELLOW + str(unchecked) + ' file(s) could not be verified')
def run(self, scriptname): if scriptname not in self.meta['scripts']: log.error('Script not found: '+scriptname) exit(1) self._run_scripts_pipeline(scriptname)
def _release_docker(self, credentials = None): log.success('Building Docker Image') client = docker.from_env() log.debug(json.dumps(client.version(), indent=4)) status = {} def _show_docker_progress(obj): nonlocal status if 'errorDetail' in obj: log.error(Fore.RED + 'Error: ' + str(obj['errorDetail']['message']) + Fore.RESET) raise docker.errors.DockerException(obj['errorDetail']['message']) if 'stream' in obj: for line in obj['stream'].split('\n'): if line == '': continue log.success(line.strip()) status.clear() return if 'status' in obj: if 'id' not in obj: log.success(obj['status']) return if len(status) == 0: print(Fore.GREEN + "\rA docker I/O operation is in progress" + Fore.RESET) s = obj['id'].strip() + ' ' + obj['status'] + '\t' if 'progress' in obj: s += obj['progress'] if obj['id'] not in status: status[obj['id']] = {'index': len(status) + 1, 'str': s} print(s) return status[obj['id']]['str'] = s print('\033[F' * (len(status) + 1)) for e in status: print('\033[K' + status[e]['str']) # print('\n'*i, end = '') # FIXME choose dockerfile rep = self.meta['project_vcs']['docker_repository'] log.success('Image repo: ' + rep) if hasattr(client, 'api'): client = client.api g = client.build(tag=rep, path='.', dockerfile='Dockerfile') for line in g: _show_docker_progress(json.loads(line.decode())) if config.config['mock']: log.warning(Fore.YELLOW + 'Mock mode: not pushing' + Fore.RESET) return if credentials != None: try: client.login(credentials['login'], credentials['password']) except docker.errors.APIError as excep: log.error('Cannot login: '******'Logged in as '+credentials['login']) log.success('Pushing image') for line in client.push(rep, stream=True): _show_docker_progress(json.loads(line.decode()))
def remove_dependency(self, dep): # TODO log.error('Not implemented: remove dependency')
def release(self, kind='pass'): """ Starts a release pipeline """ if self.get_project_status() != config.STATUS_PROJECT_INITIALIZED: log.error('The project is not initialized') log.error('Run spvm init first') exit(1) pipeline = [] log.fine('Calculating release pipeline') pipeline.append(self.clear_build) if config.config['update']: pipeline.append(self.update_dependencies) if config.config['repair']: pipeline.append(self.repair) pipeline.append(self.check_project) if config.config['test']: pipeline.append(self.run_test) pipeline.append(self.up_version) pipeline.append(self.populate_init) pipeline.append(self.install_setup) pipeline.append(self.publish) NO = Fore.RED + 'NO' + Fore.RESET MOCK = ( '' if not config.config['mock'] else ' ' + Fore.LIGHTYELLOW_EX + '(MOCK)' + Fore.RESET) YES = Fore.GREEN + 'YES' + Fore.RESET + MOCK publish_context = self.detect_publish_context() pipeline.append(Fore.CYAN + " - Git Publish:\t\t" + (YES if publish_context[0] else NO)) pipeline.append(Fore.CYAN + " - PyPi Publish:\t\t" + (YES if publish_context[1] else NO)) pipeline.append(Fore.CYAN + " - Docker Publish:\t" + (YES if publish_context[2] else NO)) log.success('Release pipeline is: ') for f in pipeline: if isinstance(f, str): log.success(f) continue log.success(" -> " + f.__name__) if not config.config['mock']: log.warning( Fore.YELLOW + 'The mock mode is not activated, this is for real !' + Fore.RESET) if config.config['ask']: input('Press Enter to continue') for f in pipeline: if isinstance(f, str) or f.__name__ == 'wrapper': continue log.success('> ' + f.__name__) if f.__name__ == 'up_version': # the only one to give parameters to f(kind) else: f.__call__()