def update_dryrun(package_name, module, force, debug=False): """ Summary. Increments pypi registry project version by 1 minor increment Args: :force_version (Nonetype): Version signature (x.y.z) if version number is hardset instead of incremental Returns: Success | Failure, TYPE: bool """ module_path = os.path.join(_root(), package_name, str(module)) # current version current = current_version(module_path) # pypi.python.org registry version, if exits pypi = pypi_registry(package_name) or 'N/A' # increment (next) version _version = greater_version(current, pypi) if valid_version(_version): # hard set existing version to force_version value version_new = increment_version(_version) else: stdout_message('You must enter a valid version (x.y.z)', prefix='WARN') sys.exit(1) return setup_table(current, pypi, version_new)
def postbuild(root, container, rpm_root, scripts_dir, version_module, version, persist): """ Summary: Post-build clean up Args: :container (object): Docker container object :rpm_root (str): target dir for rpm package files :script_dir (str): directory where scripts :version_module (str): name of module containing version number :version (str): current version label (Example: 1.6.8) :persist (bool): When True, retain container build environment intact and running; do not clean and remove container Returns: Success | Failure, TYPE: bool """ project_dirname = root.split('/')[-1] major = '.'.join(version.split('.')[:2]) minor = version.split('.')[-1] volmnt = VOLMNT delete = True try: # cp rpm created to repo package = locate_artifact('.rpm', volmnt) if package: copyfile(locate_artifact('.rpm', volmnt), rpm_root) package_path = rpm_root + '/' + os.path.split(package)[1] # rpm contents text file contents = locate_artifact('.txt', volmnt) if persist is False: # stop and rm container cmd = f'docker stop {container.name}' subprocess.getoutput(cmd) # status if not container_running(container.name): stdout_message(f'{container.name} successfully halted', prefix='OK') cmd = f'docker rm {container.name}' subprocess.getoutput(cmd) # remove temp version module copied to scripts dir if os.path.exists(os.path.join(scripts_dir, version_module)): os.remove(os.path.join(scripts_dir, version_module)) # rewrite version file with 67rrent build version with open(os.path.join(root, PROJECT, version_module), 'w') as f3: f2 = ["__version__ = \'" + version + "\'" + '\n'] f3.writelines(f2) path = project_dirname + (root + '/' + PROJECT + '/' + version_module)[len(root):] stdout_message( '{}: Module {} successfully updated.'.format(inspect.stack()[0][3], yl + path + rst) ) except OSError as e: logger.exception('{}: Postbuild clean up failure'.format(inspect.stack()[0][3])) return '' return package_path, contents
def create_repositories(path_list): """ Actual creation of git repositories via cloning operations. Repositories created within the directory structure created in previous operation with create_directory_structure() module function execution. """ for pdict in path_list: # constants _root = pdict['repo'].split('/')[-1].split('.')[0] _path = pdict['path'].strip() _location = pdict['location'].strip() _repository = pdict['repo'].strip() # clone repository if not os.path.exists(_location): # log status stdout_message( f'Creating repository {_root} at location {_location}') # cd to location os.chdir(_path) cmd = 'git clone {}'.format(_repository) stdout = subprocess.getoutput(cmd) for line in stdout.split('\n'): print(line) else: stdout_message( f'Skipping creation of repository {_location} - Preexisting.') return True
def update_signature(version, path): """Updates version number module with new""" try: with open(path, 'w') as f1: f1.write("__version__ = '{}'\n".format(version)) return True except OSError: stdout_message('Version module unwriteable. Failed to update version') return False
def writeout_status(key, region, filename, finished): """Display current status message to user""" fregion = fs + region + '/' + rst # formatted region ffname = bbl + filename + rst # formtted filename tab = '\t'.expandtabs(13) success = f'Wrote {fregion + ffname}\n{tab}successfully to local filesystem' failure = f'Problem writing {key} to local filesystem.' stdout_message(success, prefix='OK') if finished else stdout_message( failure, prefix='WARN')
def display_package_contents(build_root, version): """ Summary. Output newly built package contents. Args: :build_root (str): location of newly built rpm package :version (str): current version string, format: '{major}.{minor}.{patch num}' Returns: Success | Failure, TYPE: bool """ pkg_path = None for f in os.listdir(build_root): if f.endswith('.deb') and re.search(version, f): pkg_path = build_root + '/' + f if pkg_path is None: stdout_message( message= f'Unable to locate a build package in {build_root}. Abort build.', prefix='WARN') return False tab = '\t'.expandtabs(2) width = 80 path, package = os.path.split(pkg_path) os.chdir(path) cmd = 'dpkg-deb --contents ' + package r = subprocess.getoutput(cmd) formatted_out = r.splitlines() # title header and subheader header = '\n\t\tPackage Contents: ' + bd + package + rst + '\n' print(header) subheader = tab + 'Permission' + tab + 'Owner/Group' + '\t' + 'ctime' \ + '\t'.expandtabs(8) + 'File' print(subheader) # divider line list(filter(lambda x: print('-', end=''), range(0, width + 1))), print('\r') # content for line in formatted_out: prefix = [tab + x for x in line.split()[:2]] raw = line.split()[2:4] content_path = line.split()[5] fline = ''.join(prefix) + '\t'.join( raw[:4]) + tab + yl + content_path + rst print(fline) return True
def update_version_module(version, module): # rewrite version file with current build version with open(module, 'w') as f3: f2 = ['__version__ =\"' + version + '\"\n'] f3.writelines(f2) fmodule = yl + module + rst fx = inspect.stack()[0][3] stdout_message( '{}: Module {} successfully updated with version {}.'.format( fx, fmodule, version)) return True
def operation_status(source, destination): """Validates copy operations complete successfully""" if os.path.exists(destination): stdout_message(message='Copied:\t{} {} {}'.format( lk + source + rst, arrow, lk + destination + rst), prefix='OK') return True else: stdout_message(message='Failure to copy:\t{} to {}'.format( lk + source + rst, lk + destination + rst), prefix='WARN') return False
def init_cli(): # parser = argparse.ArgumentParser(add_help=False, usage=help_menu()) parser = argparse.ArgumentParser(add_help=False) try: args = options(parser) except Exception as e: help_menu() stdout_message(str(e), 'ERROR') sys.exit(exit_codes['EX_OK']['Code']) if len(sys.argv) == 1: help_menu() sys.exit(exit_codes['EX_OK']['Code']) elif args.help: help_menu() sys.exit(exit_codes['EX_OK']['Code']) elif args.version: package_version() elif args.configure: r = option_configure(args.debug, local_config['PROJECT']['CONFIG_PATH']) return r else: if precheck(): # if prereqs set, run if authenticated(profile=args.profile): # execute keyset operation success = main(operation=args.operation, profile=args.profile, user_name=args.username, auto=args.auto, debug=args.debug) if success: logger.info('IAM access keyset operation complete') sys.exit(exit_codes['EX_OK']['Code']) else: stdout_message( 'Authenication Failed to AWS Account for user %s' % args.profile, prefix='AUTH', severity='WARNING') sys.exit(exit_codes['E_AUTHFAIL']['Code']) failure = """ : Check of runtime parameters failed for unknown reason. Please ensure local awscli is configured. Then run keyconfig to configure keyup runtime parameters. Exiting. Code: """ logger.warning(failure + 'Exit. Code: %s' % sys.exit(exit_codes['E_MISC']['Code'])) print(failure)
def prebuild(builddir, libsrc, volmnt, parameter_file): """Summary: Prerequisites and dependencies for build execution Returns: Success | Failure, TYPE: bool """ def preclean(dir, artifact=''): """Cleans residual build artifacts by removing """ try: if artifact: if os.path.exists(libsrc + '/' + artifact): rmtree(libsrc + '/' + artifact) # clean artifact from inside an existing dir elif os.path.exists(dir): rmtree(dir) # rm entire directory except OSError as e: logger.exception( '%s: Error while cleaning residual build artifacts: %s' % (inspect.stack()[0][3], str(e))) return False return True version_module = json.loads(read(parameter_file))['VersionModule'] try: if preclean(builddir) and preclean(volmnt) and preclean(libsrc, '__pycache__'): stdout_message(f'Removed pre-existing build artifacts ({builddir}, {volmnt})') os.makedirs(builddir) os.makedirs(volmnt) root = git_root() src = os.path.join(root, PROJECT, version_module) dst = os.path.join(root, 'scripts', version_module) # deal with leftover build artifacts if os.path.exists(dst): os.remove(dst) # cp version module to scripts dir for import r_cf = copyfile(src, dst) global __version__ from _version import __version__ if r_cf and __version__ and docker_daemon_up(): return True except Exception as e: logger.exception( '{}: Failure to import __version__ parameter'.format(inspect.stack()[0][3]) ) return False
def docker_daemon_up(): """ Summary: Determines if docker installed and running by evaluating the exit code of docker images cmd Returns: True (running) | False, TYPE: bool """ cmd = 'docker images >/dev/null 2>&1; echo $?' if which('docker') and int(subprocess.getoutput(cmd)) == 0: return True else: stdout_message('Docker engine not running or not accessible', prefix='WARN') return False
def valid_version(parameter, min=0, max=100): """ Summary. User input validation. Validates version string made up of integers. Example: '1.6.2'. Each integer in the version sequence must be in a range of > 0 and < 100. Maximum version string digits is 3 (Example: 0.2.3 ) Args: :parameter (str): Version string from user input :min (int): Minimum allowable integer value a single digit in version string provided as a parameter :max (int): Maximum allowable integer value a single digit in a version string provided as a parameter Returns: True if parameter valid or None, False if invalid, TYPE: bool """ # type correction and validation if parameter is None: return True elif isinstance(parameter, int): return False elif isinstance(parameter, float): parameter = str(parameter) component_list = parameter.split('.') length = len(component_list) try: if length <= 3: for component in component_list: if isinstance(int(component), int) and int(component) in range(min, max + 1): continue else: return False except ValueError as e: fx = inspect.stack()[0][3] stdout_message(f'{fx}: Invalid version number supplied: {e}') return False return True
def masterbranch_version(version_module): """ Returns version denoted in the master branch of the repository """ branch = current_branch(git_root()) commands = ['git checkout master', 'git checkout {}'.format(branch)] try: # checkout master #stdout_message('Checkout master branch:\n\n%s' % subprocess.getoutput(commands[0])) masterversion = read(version_module).split('=')[1].strip().strip('"') # return to working branch stdout_message( 'Returning to working branch: checkout %s\n\n%s'.format(branch)) stdout_message(subprocess.getoutput(f'git checkout {branch}')) except Exception: return None return masterversion
def clean(directory, debug): """ Summary. rm residual installation files from build directory """ bytecode_list = list( filter( lambda x: x.endswith('.pyc') or x.endswith('.pyo'), os.listdir(directory) ) ) if debug: stdout_message( message=f'bytecode_list contents: {bytecode_list}', prefix='DEBUG' ) for artifact in bytecode_list: os.remove(directory + '/' + artifact) logger.info('Artifact {} cleaned from {}'.format(artifact, directory)) return True
def writeout_data(key, jsonobject, filename): """ Persists json data to local filesystem Returns: Success | Failure, TYPE: bool """ tab = '\t'.expandtabs(13) if export_iterobject({key: jsonobject}, filename): success = f'Wrote {bbl + filename + rst}\n{tab}successfully to local filesystem.' if isinstance(jsonobject, list): qty = bd + str(len(jsonobject)) + rst ancillary_msg = f'\n{tab}{qty} unique instance types utilised for spot in region.' stdout_message(success + ancillary_msg, prefix='OK') return True else: failure = f'Problem writing {bbl + filename + rst} to local filesystem.' stdout_message(failure, prefix='WARN') return False
def container_running(cid, debug=False): """ Summary: Verifies if a container is activly running Args: :cid (str): Container name or hex identifier :dclient (object): global docker client Returns: True (running) | False (stopped) TYPE: bool """ success_msg = f'Container {cid} running' try: container = dclient.containers.get(cid) if container.status == 'running': if debug: stdout_message(success_msg, prefix='OK') return True except Exception: if cid in subprocess.getoutput('docker ps'): stdout_message(success_msg, prefix='OK') return True else: stdout_message(f'Container {cid} stopped', prefix='WARN') return False
def build_package(build_root, builddir): """ Summary. Creates final os installable package for current build, build version Returns: Success | Failure, TYPE: bool """ try: pwd = os.getcwd() os.chdir(build_root) if os.path.exists(builddir): cmd = 'dpkg-deb --build ' + builddir + ' 2>/dev/null' stdout_message('Building {}... '.format(bn + builddir + rst)) stdout_message(subprocess.getoutput(cmd)) os.chdir(pwd) else: logger.warning( 'Build directory {} not found. Failed to create .deb package'. format(builddir)) os.chdir(pwd) return False except OSError as e: logger.exception('{}: Error during os package creation: {}'.format( inspect.stack()[0][3], e)) return False except Exception as e: logger.exception( '{}: Unknown Error during os package creation: {}'.format( inspect.stack()[0][3], e)) return False return True
def docker_teardown(container_object): """ Halt Docker Container, clean/ remove residual artifacts Returns Success | Failure, TYPE: bool """ try: # stop and rm container cmd = f'docker stop {container_object.name}' subprocess.getoutput(cmd) # status if not container_running(container_object.name): stdout_message(f'{container_object.name} successfully halted', prefix='OK') cmd = f'docker rm {container_object.name}' subprocess.getoutput(cmd) except Exception as e: name = container_object.name fx = inspect.stack()[0][5] logger.exception( '{}: Error halting and deleting active container ({}): {}'.format(fx, name, e)) return True
def create_builddirectory(param_dict, path, version, force): """ - Creates the deb package binary working directory - Checks if build artifacts preexist; if so, halts - If force is True, continues even if artifacts exist (overwrites) Returns: builddir, TYPE: str """ try: PROJECT = param_dict['Project'] builddir = PROJECT + '_' + version + '_amd64' stdout_message(message='BUILDDIR IS: {}'.format(builddir)) # rm builddir when force if exists if force is True and builddir in os.listdir(path): rmtree(os.path.join(path, builddir)) elif force is False and builddir in os.listdir(path): stdout_message( 'Cannot create build directory {} - preexists. Use --force option to overwrite' .format(builddir), prefix='WARN', severity='WARNING') return None # create build directory os.mkdir(os.path.join(path, builddir)) except OSError as e: logger.exception('{}: Unable to create build directory {}'.format( inspect.stack()[0][3], builddir)) return builddir
def update_version(force_version, package_name, module, debug=False): """ Summary. Increments project version by 1 minor increment or hard sets to version signature specified Args: :force_version (Nonetype): Version signature (x.y.z) if version number is hardset insetead of increment Returns: Success | Failure, TYPE: bool """ module_path = os.path.join(_root(), package_name, str(module)) # current version current = current_version(module_path) stdout_message('Current project version found: {}'.format(current)) if force_version is None: # increment existing version label inc_version = increment_version(current) pypi_version = pypi_registry(package_name) version_new = greater_version(inc_version, pypi_version) elif identical_version(force_version, current): tab = '\t'.expandtabs(4) msg = 'Force version ({}) is same as current version signature. \n \ {}Skipping version update. End version_update.'.format((force_version), tab) stdout_message(msg) return True elif valid_version(force_version): # hard set existing version to force_version value most_recent = greater_version(force_version, pypi_registry(package_name)) version_new = greater_version(most_recent, increment_version(current)) else: stdout_message('You must enter a valid version (x.y.z)', prefix='WARN') sys.exit(1) stdout_message('Incremental project version: {}'.format(version_new)) return update_signature(version_new, module_path)
def update_dryrun(package_name, module, force, debug=False): """ Summary. Increments pypi registry project version by 1 minor increment Args: :force_version (Nonetype): Version signature (x.y.z) if version number is hardset instead of incremental Returns: Success | Failure, TYPE: bool """ module_path = os.path.join(_root(), package_name, str(module)) # current version current = current_version(module_path) stdout_message('Current project version found: {}'.format(current)) pypi = pypi_registry(package_name) stdout_message('Current pypi registry version found: {}'.format(pypi)) _version = greater_version(current, pypi) if valid_version(_version): # hard set existing version to force_version value version_new = increment_version(_version) else: stdout_message('You must enter a valid version (x.y.z)', prefix='WARN') sys.exit(1) stdout_message('Incremental project version: {}'.format( version_new if force is None else force)) return True
def main(): """ Main execution caller Return: Success || Failure, TYPE: bool """ # prerequisities PACKAGE = package_name(os.path.join(_root(), 'DESCRIPTION.rst')) module = locate_version_module(PACKAGE) parser = argparse.ArgumentParser(add_help=False) try: args, unknown = options(parser) except Exception as e: stdout_message(str(e), 'ERROR') return exit_codes['E_BADARG']['Code'] if args.help or len(sys.argv) == 1: help_menu() return 0 elif args.dryrun and args.update: stdout_message('Option --dryrun and --update cannot be used together.', prefix='FAIL') return 1 elif args.set and not (args.update or args.dryrun): stdout_message('--set-version must be used with --update or --dryrun.', prefix='FAIL') return 1 elif args.dryrun: # use version contained in pypi registry update_dryrun(PACKAGE, module, args.set, args.debug) return 0 elif args.update: update_version(args.set, PACKAGE, module, args.debug) return 0
def pypi_version(package_name, module, debug=False): """Update version lablel by incrementing pypi registry version""" def native_version(pkg, version_module): new_version = increment_version(installed_version(pkg)) return update_signature(new_version, version_module) try: module_path = os.path.join(_root(), package_name, module) pypi = pypi_registry(package_name) stdout_message('pypi.python.org registry version: {}'.format(pypi), prefix='OK') new = increment_version(pypi) stdout_message('Incremented version to be applied: {}'.format(new)) except Exception: stdout_message( 'Problem retrieving version label from public pypi.python.org', prefix='WARN') return native_version(package_name, module_path) return update_signature( new, module_path) #if new else native_version(package_name, module_path)
def update_version(force_version=None, debug=False): """ Summary. Increments project version by 1 minor increment or hard sets to version signature specified Args: :force_version (Nonetype): Version signature (x.y.z) if version number is hardset insetead of increment Returns: Success | Failure, TYPE: bool """ # prerequisities PACKAGE = package_name(os.path.join(_root(), 'DESCRIPTION.rst')) module = locate_version_module(PACKAGE) module_path = os.path.join(_root(), PACKAGE, str(module)) # current version current = current_version(module_path) stdout_message('Current project version found: {}'.format(current)) # next version if force_version is None: version_new = increment_version(current) elif valid_version(force_version): version_new = force_version else: stdout_message('You must enter a valid version (x.y.z)') sys.exit(1) stdout_message('Incremental project version: {}'.format(version_new)) return update_signature(version_new, module_path)
def init(): """ Initialize spot price operations; process command line parameters """ parser = argparse.ArgumentParser(add_help=False) try: args, unknown = options(parser) except Exception as e: help_menu() stdout_message(str(e), 'ERROR') sys.exit(exit_codes['EX_BADARG']['Code']) if len(sys.argv) == 1 or args.help: help_menu() sys.exit(exit_codes['EX_OK']['Code']) elif args.version: package_version() elif (args.start and args.end) or args.duration: args.profile = args.profile[0] if isinstance(args.profile, list) else args.profile # set local region args.region = [local_awsregion(args.profile) ] if not args.region else args.region # validate prerun conditions defaults = precheck(args.debug, args.region) sp = SpotPrices(profile=args.profile) if args.duration and isinstance(int(args.duration[0]), int): start, end = sp.set_endpoints(duration=int(args.duration[0])) else: start, end = sp.set_endpoints(args.start[0], args.end[0]) # global container for ec2 instance size types instance_sizes = [] for region in args.region: fname = '_'.join([ start.strftime('%Y-%m-%dT%H:%M:%SZ'), end.strftime('%Y-%m-%dT%H:%M:%SZ'), 'all-instance-spot-prices.json' ]) prices = sp.generate_pricedata(regions=[region]) # conversion of datetime obj => utc strings uc = UtcConversion(prices) # write to file on local filesystem key = os.path.join(region, fname) os.makedirs(region) if not os.path.exists(region) else True _completed = export_iterobject(prices, key) # user status message writeout_status(key, region, fname, _completed) # build unique collection of instances for this region regional_sizes = list( set([x['InstanceType'] for x in prices['SpotPriceHistory']])) instance_sizes.extend(regional_sizes) # instance sizes across analyzed regions instance_sizes = list(set(instance_sizes)) instance_sizes.sort() key = 'instanceTypes' date = sp.end.strftime("%Y-%m-%d") return writeout_data(key, instance_sizes, date + '_spot-instanceTypes.json') else: stdout_message('Dependency check fail %s' % json.dumps(args, indent=4), prefix='AUTH', severity='WARNING') sys.exit(exit_codes['EX_DEPENDENCY']['Code']) failure = """ : Check of runtime parameters failed for unknown reason. Please ensure you have both read and write access to local filesystem. """ logger.warning(failure + 'Exit. Code: %s' % sys.exit(exit_codes['EX_MISC']['Code'])) print(failure) return sys.exit(exit_codes['EX_BADARG']['Code'])
def prebuild(builddir, volmnt, parameter_file): """ Summary. Prerequisites and dependencies for build execution """ def preclean(dir): """ Cleans residual build artifacts """ try: if os.path.exists(dir): rmtree(dir) except OSError as e: logger.exception( '%s: Error while cleaning residual build artifacts: %s' % (inspect.stack()[0][3], str(e))) return False return True version_module = json.loads(read(parameter_file))['VersionModule'] if preclean(builddir) and preclean(volmnt): stdout_message( f'Removed pre-existing build artifacts ({builddir}, {volmnt})') os.makedirs(builddir) os.makedirs(volmnt) root = git_root() lib_relpath = PROJECT_BIN lib_path = root + '/' + lib_relpath sources = [lib_path] illegal = ['__pycache__'] module = inspect.stack()[0][3] fx = inspect.stack()[0][3] try: global __version__ sys.path.insert(0, os.path.abspath(git_root() + '/' + lib_relpath)) from _version import __version__ # normalize path sys.path.pop(0) except ImportError as e: logger.exception( '{}: Problem importing program version module (%s). Error: %s' % (fx, __file__, str(e))) except Exception as e: logger.exception( '{}: Failure to import _version module _version'.format(fx)) return False ## clean up source ## try: for directory in sources: for artifact in os.listdir(directory): if artifact in illegal: rmtree(directory + '/' + artifact) except OSError: logger.exception( '{}: Illegal file object detected, but unable to remove {}'.format( module, archive)) return False return True
def main(setVersion, environment, force=False, debug=False): """ Summary: Create build directories, populate contents, update contents Returns: Success | Failure, TYPE: bool """ global PROJECT_ROOT PROJECT_ROOT = git_root() global SCRIPT_DIR SCRIPT_DIR = PROJECT_ROOT + '/' + 'scripts' DEBIAN_ROOT = PROJECT_ROOT + '/' + 'packaging/deb' global BUILD_ROOT BUILD_ROOT = TMPDIR global LIB_SRC LIB_SRC = PROJECT_ROOT + '/' + PROJECT_BIN global CURRENT_VERSION CURRENT_VERSION = current_version(PROJECT_BIN, LIB_SRC + '/' 'version.py') # sort out version numbers, forceVersion is override # # for all info contained in project # global VERSION if setVersion: VERSION = setVersion elif CURRENT_VERSION: VERSION = increment_version(CURRENT_VERSION) else: stdout_message( 'Could not determine current {} version'.format(bd + PROJECT + rst)) sys.exit(exit_codes['E_DEPENDENCY']['Code']) # log stdout_message( f'Current version of last build: {bdwt + CURRENT_VERSION + rst}') stdout_message( f'Version to be used for this build: {bdwt + VERSION + rst}') # sub in current values parameter_obj = ParameterSet(PROJECT_ROOT + '/' + PACKAGE_CONFIG, VERSION) vars = parameter_obj.create() VERSION_FILE = vars['VersionModule'] update_version_module(VERSION, os.path.join(LIB_SRC, VERSION_FILE)) # create initial binary working dir BUILDDIRNAME = create_builddirectory(vars, BUILD_ROOT, VERSION, force) if debug: print('BUILDDIRNAME returned is: {}'.format(BUILDDIRNAME)) print(json.dumps(vars, indent=True, sort_keys=True)) if BUILDDIRNAME: r_struture = builddir_structure(vars, BUILDDIRNAME, VERSION) r_updates = builddir_content_updates(vars, environment, BUILDDIRNAME, VERSION) if r_struture and r_updates and build_package(BUILD_ROOT, BUILDDIRNAME): return postbuild(VERSION, VERSION_FILE, BUILD_ROOT + '/' + BUILDDIRNAME, DEBIAN_ROOT) return False
def builddir_content_updates(param_dict, osimage, builddir, version): """ Summary. Updates builddir contents: - main exectuable has path to libraries updated - builddir DEBIAN/control file version is updated to current - updates the version.py file if version != to __version__ contained in the file. This occurs if user invokes the -S / --set-version option Args: :root (str): project root full fs path :builddir (str): dirname of the current build directory :binary (str): name of the main exectuable :version (str): version label provided with --set-version parameter. None otherwise Returns: Success | Failure, TYPE: bool """ root = git_root() project_dirname = root.split('/')[-1] build_root = TMPDIR debian_dir = 'DEBIAN' control_filename = param_dict['ControlFile']['Name'] deb_src = root + '/packaging/deb' major = '.'.join(version.split('.')[:2]) minor = version.split('.')[-1] # files version_module = param_dict['VersionModule'] issues_url = param_dict['IssuesUrl'] project_url = param_dict['ProjectUrl'] buildarch = param_dict['ControlFile']['BuildArch'] # full paths builddir_path = build_root + '/' + builddir debian_path = builddir_path + '/' + debian_dir control_filepath = debian_path + '/' + control_filename lib_dst = builddir_path + '/usr/lib/python3/dist-packages/' + PROJECT_BIN # assemble dependencies deplist = None for dep in param_dict['DependencyList']: if deplist is None: deplist = str(dep) else: deplist = deplist + ', ' + str(dep) try: # debian control files with open(control_filepath) as f1: f2 = f1.readlines() for index, line in enumerate(f2): if line.startswith('Version:'): newline = 'Version: ' + version + '\n' f2[index] = newline f1.close() # rewrite file with open(control_filepath, 'w') as f3: f3.writelines(f2) path = project_dirname + (control_filepath)[len(root):] stdout_message('Control file {} version updated: {}.'.format( yl + control_filepath + rst, version)) ## rewrite version file with current build version in case delta ## # orig source version module with open(LIB_SRC + '/' + version_module, 'w') as f3: f2 = ['__version__ = \"' + version + '\"\n'] f3.writelines(f2) path = os.path.join(root, PROJECT_BIN, version_module) stdout_message('Module {} successfully updated: {}.'.format( yl + path + rst, version)) # package version module with open(lib_dst + '/' + version_module, 'w') as f3: f2 = ['__version__ = \"' + version + '\"\n'] f3.writelines(f2) path = os.path.join(lib_dst, version_module) stdout_message('Module {} successfully updated: {}.'.format( yl + path + rst, version)) except OSError as e: logger.exception('%s: Problem while updating builddir contents: %s' % (inspect.stack()[0][3], str(e))) return False return True
def init_cli(): """Collect parameters and call main """ try: parser = argparse.ArgumentParser(add_help=False) args = options(parser) except Exception as e: help_menu() stdout_message(str(e), 'ERROR') return exit_codes['E_MISC']['Code'] if args.debug: stdout_message(message='forceVersion:\t{}'.format(args.set), prefix='DBUG', severity='WARNING') stdout_message(message='build:\t{}'.format(args.build), prefix='DBUG', severity='WARNING') stdout_message(message='debug flag:\t{}'.format(args.debug), prefix='DBUG', severity='WARNING') if len(sys.argv) == 1: help_menu() return exit_codes['EX_OK']['Code'] elif args.help: help_menu() return exit_codes['EX_OK']['Code'] elif args.build: if valid_version(args.set) and prebuild( TMPDIR, VOLMNT, git_root() + '/' + PACKAGE_CONFIG): package_path = main(setVersion=args.set, environment=args.distro, force=args.force, debug=args.debug) if package_path: path, package = os.path.split(package_path) stdout_message( f'{PROJECT} build package created: {lk + path + rst}/{bn + package + rst}' ) stdout_message( f'Debian build process completed successfully. End', prefix='OK') return exit_codes['EX_OK']['Code'] else: stdout_message( '{}: Problem creating os installation package. Exit'. format(inspect.stack()[0][3]), prefix='WARN', severity='WARNING') return exit_codes['E_MISC']['Code'] elif not valid_version(args.set): stdout_message( 'You must enter a valid version when using --set-version parameter. Ex: 1.6.3', prefix='WARN', severity='WARNING') return exit_codes['E_DEPENDENCY']['Code'] else: logger.warning('{} Failure in prebuild stage'.format( inspect.stack()[0][3])) return exit_codes['E_DEPENDENCY']['Code'] return True
def builddir_structure(param_dict, builddir, version): """ Summary. - Updates paths in binary exectuable - Updates Args: :root (str): full path to root directory of the git project :builddir (str): name of current build directory which we need to populate Vars: :lib_path (str): src path to library modules in project root :builddir_path (str): dst path to root of the current build directory (/<path>/xlines-1.X.X dir) Returns: Success | Failure, TYPE: bool """ def _mapper(venv_dir): """Identifies path to python modules in virtual env""" for i in (6, 7, 8, 9): path = venv_dir + '/lib/python3.' + str(i) + '/site-packages/' if os.path.exists(path): return path def module_search(module, packages_path): t = [] for i in os.listdir(packages_path): if re.search(module, i, re.IGNORECASE): t.append(i) return t root = git_root() project_dirname = os.path.split(git_root())[1] build_root = TMPDIR # files binary = param_dict['Executable'] control_file = param_dict['ControlFile']['Name'] compfile = param_dict['BashCompletion'] # LIB source files env = os.environ.get('VIRTUAL_ENV') or root #lib_src = _mapper(env) lib_src = os.path.join(root, PROJECT_BIN) # full paths builddir_path = build_root + '/' + builddir deb_src = root + '/packaging/deb' debian_dir = 'DEBIAN' debian_path = deb_src + '/' + debian_dir binary_path = builddir_path + '/usr/bin' lib_dst = builddir_path + '/usr/lib/python3/dist-packages' comp_src = os.path.join(root, 'bash') comp_dst = builddir_path + '/etc/bash_completion.d' try: # create build directory if os.path.exists(builddir_path): rmtree(builddir_path) os.makedirs(builddir_path) stdout_message(message='Created:\t{}'.format(yl + builddir_path + rst), prefix='OK') stdout_message( f'Copying DEBIAN package control files to {bn + builddir + rst}') _src = os.path.join(deb_src, debian_dir) _dst = os.path.join(builddir_path, debian_dir) copytree(_src, _dst) operation_status(_src, _dst) stdout_message( f'Creating build directory subdirectories in {bn + builddir + rst}' ) # binary exec if not os.path.exists(binary_path): os.makedirs(binary_path) _src_path = os.path.join(deb_src, 'bin', 'xlines') _dst_path = os.path.join(binary_path, 'xlines') copyfile(_src_path, _dst_path) # status msg stdout_message(message='Copied:\t{} {} {}'.format( lk + _src_path + rst, arrow, lk + _dst_path + rst), prefix='OK') # library components if not os.path.exists(lib_dst): os.makedirs(lib_dst) _src = lib_src _dst = os.path.join(lib_dst, PROJECT_BIN) copytree(_src, _dst) stdout_message(message='Copied:\t{} {} {}'.format( lk + _src + rst, arrow, lk + _dst + rst), prefix='OK') if not os.path.exists(comp_dst): # create path os.makedirs(comp_dst) # copy for artifact in list( filter(lambda x: x.endswith('.bash'), os.listdir(comp_src))): _src = comp_src + '/' + artifact _dst = comp_dst + '/' + artifact copyfile(_src, _dst) stdout_message(message='Copied:\t{} {} {}'.format( lk + _src + rst, arrow, lk + _dst + rst), prefix='OK') stdout_message(f'Creating config subdirectory in {lk + lib_dst + rst}') os.makedirs(os.path.join(lib_dst, PROJECT_BIN, 'config')) source = os.path.join(root, 'config') for file in list( filter(lambda x: x.endswith('.list'), os.listdir(source))): _src = os.path.join(source, file) _dst = os.path.join(lib_dst, PROJECT_BIN, 'config', file) copyfile(_src, _dst) except OSError as e: logger.exception('{}: Problem creating dirs on local fs'.format( inspect.stack()[0][3])) return False return True