def terminate_script(id_list, profile): """Creates termination script on local fs""" now = datetime.datetime.utcnow().strftime('%Y-%m-%d') fname = 'terminate-script-' + now + '.sh' content = """ #!/usr/bin/env bash pkg=$(basename $0) if [[ $(which aws) ]]; then aws ec2 terminate-instances \ --profile """ + profile + """ \ --instance-ids """ + [x for x in id_list][0] + """ fi # delete caller rm ./$pkg exit 0 """ try: with open(os.getcwd() + '/' + fname, 'w') as f1: f1.write(content) stdout_message('Created terminate script: {}'.format(os.getcwd() + '/' + fname)) except OSError as e: logger.exception( '%s: Problem creating terminate script (%s) on local fs' % (inspect.stack()[0][3], fname)) return False return True
def cp_dockerfiles(src, dst): """ Copy dockerfiles and associated build artifacts to build_root >> NOT CURRENTLY USED << """ # place docker build script script_src = src + '/' + dockerscript script_dst = build_root + '/' + dockerscript build_list = os.listdir(src) for file in build_list: copyfile(file, dst + '/' + file) # cp Dockerfile to build root copyfile(docker_path + '/' + 'Dockerfile', builddir_path + '/' + 'Dockerfile') # verify build spec placement stdout_message(message='Copied: {} {} {}'.format(lk + script_src + rst, arrow, lk + script_dst + rst), prefix='OK') return build_list
def create_builddirectory(path, version, force): """ Summary: - Creates the deb package binary working directory - Checks if build artifacts preexist; if so, halts - If force is True, continues even if artifacts exist (overwrites) Returns: Success | Failure, TYPE: bool """ try: builddir = PROJECT + '-' + version + '_amd64' # rm builddir when force if exists if force is True and builddir in os.listdir(path): rmtree(path + '/' + builddir) elif force is False and builddir in os.listdir(path): stdout_message( 'Cannot create build directory {} - preexists. Use --force option to overwrite' .format(builddir), prefix='WARN', severity='WARNING') return None # create build directory os.mkdir(path + '/' + builddir) except OSError as e: logger.exception('{}: Unable to create build directory {}'.format( inspect.stack()[0][3], builddir)) return builddir
def deprecated_version(filename, expression): """ Summary. Extract program version N-1. Args: :filename (str): Name of file contents searched for N-1 version num. :expression (str): Regex or string which matches deprecated version Returns: exact match, TYPE: str or None """ pattern = re.compile(expression) try: with open(filename) as d1: parsed = set(d1.read().split()) for item in parsed: if pattern.match(item): return item except OSError: stdout_message(message=f'File {filename} not found', prefix='DBUG') except Exception as e: stdout_message(message=f'Unknown error ({e})', prefix='DBUG') return None
def boto3_session(service, region=DEFAULT_REGION, profile=None): """ Summary: Establishes boto3 sessions, client Args: :service (str): boto3 service abbreviation ('ec2', 's3', etc) :profile (str): profile_name of an iam user from local awscli config Returns: TYPE: boto3 client object """ try: if profile and profile != 'default': session = boto3.Session(profile_name=profile) return session.client(service, region_name=region) except ClientError as e: logger.exception( "%s: IAM user or role not found (Code: %s Message: %s)" % (inspect.stack()[0][3], e.response['Error']['Code'], e.response['Error']['Message'])) raise except ProfileNotFound: msg = ('%s: The profile (%s) was not found in your local config' % (inspect.stack()[0][3], profile)) stdout_message(msg, 'FAIL') logger.warning(msg) return boto3.client(service, region_name=region)
def stopped_instances(region, profile=None, ids=False, debug=False): """ Summary. Determines state of all ec2 machines in a region Returns: :stopped ec2 instances, TYPE: ec2 objects OR :stopped ec2 instance ids, TYPE: str """ try: if profile and profile != 'default': session = boto3.Session(profile_name=profile) ec2 = session.resource('ec2', region_name=region) else: ec2 = boto3.resource('ec2', region_name=region) instances = ec2.instances.all() if ids: return [x.id for x in instances if x.state['Name'] == 'stopped'] except ClientError as e: logger.exception( "%s: IAM user or role not found (Code: %s Message: %s)" % (inspect.stack()[0][3], e.response['Error']['Code'], e.response['Error']['Message'])) raise except ProfileNotFound: msg = ('%s: The profile (%s) was not found in your local config' % (inspect.stack()[0][3], profile)) stdout_message(msg, 'FAIL') logger.warning(msg) return [x for x in instances if x.state['Name'] == 'stopped']
def postbuild(root, container, rpm_root, scripts_dir, version_module, version): """ Summary: Post-build clean up Args: :container (object): Docker container object :rpm_root (str): target dir for rpm package files :script_dir (str): directory where scripts :version_module (str): name of module containing version number :version (str): current version label (Example: 1.6.8) Returns: Success | Failure, TYPE: bool """ project_dirname = root.split('/')[-1] major = '.'.join(version.split('.')[:2]) minor = version.split('.')[-1] volmnt = VOLMNT delete = True try: # cp rpm created to repo package = locate_artifact('.rpm', volmnt) if package: copyfile(locate_artifact('.rpm', volmnt), rpm_root) package_path = rpm_root + '/' + os.path.split(package)[1] # rpm contents text file contents = locate_artifact('.txt', volmnt) # stop and rm container cmd = f'docker stop {container.name}' subprocess.getoutput(cmd) # status if not container_running(container.name): stdout_message(f'{container.name} successfully halted', prefix='OK') cmd = f'docker rm {container.name}' subprocess.getoutput(cmd) # remove temp version module copied to scripts dir if os.path.exists(scripts_dir + '/' + version_module): os.remove(scripts_dir + '/' + version_module) # rewrite version file with 67rrent build version with open(root + '/core/' + version_module, 'w') as f3: f2 = ['__version__=\"' + version + '\"\n'] f3.writelines(f2) path = project_dirname + (root + '/core/' + version_module)[len(root):] stdout_message('{}: Module {} successfully updated.'.format( inspect.stack()[0][3], yl + path + rst)) except OSError as e: logger.exception('{}: Postbuild clean up failure'.format( inspect.stack()[0][3])) return '' return package_path, contents
def exists(object_path): if os.path.exists(object_path): return True else: msg = 'File object %s failed to download' % (object_path) logger.warning(msg) stdout_message('%s: %s' % (inspect.stack()[0][3], msg)) return False
def display_package_contents(build_root, version): """ Summary. Output newly built package contents. Args: :build_root (str): location of newly built rpm package :version (str): current version string, format: '{major}.{minor}.{patch num}' Returns: Success | Failure, TYPE: bool """ pkg_path = None for f in os.listdir(build_root): if f.endswith('.deb') and re.search(version, f): pkg_path = build_root + '/' + f if pkg_path is None: stdout_message( message= f'Unable to locate a build package in {build_root}. Abort build.', prefix='WARN') return False tab = '\t'.expandtabs(2) width = 80 path, package = os.path.split(pkg_path) os.chdir(path) cmd = 'dpkg-deb --contents ' + package r = subprocess.getoutput(cmd) formatted_out = r.splitlines() # title header and subheader header = '\n\t\tPackage Contents: ' + bd + package + rst + '\n' print(header) subheader = tab + 'Permission' + tab + 'Owner/Group' + '\t' + 'ctime' \ + '\t'.expandtabs(8) + 'File' print(subheader) # divider line list(filter(lambda x: print('-', end=''), range(0, width + 1))), print('\r') # content for line in formatted_out: prefix = [tab + x for x in line.split()[:2]] raw = line.split()[2:4] content_path = line.split()[5] fline = ''.join(prefix) + '\t'.join( raw[:4]) + tab + yl + content_path + rst print(fline) return True
def prebuild(builddir, libsrc, volmnt, parameter_file): """Summary: Prerequisites and dependencies for build execution Returns: Success | Failure, TYPE: bool """ def preclean(dir, artifact=''): """Cleans residual build artifacts by removing """ try: if artifact: if os.path.exists(libsrc + '/' + artifact): rmtree( libsrc + '/' + artifact) # clean artifact from inside an existing dir elif os.path.exists(dir): rmtree(dir) # rm entire directory except OSError as e: logger.exception( '%s: Error while cleaning residual build artifacts: %s' % (inspect.stack()[0][3], str(e))) return False return True version_module = json.loads(read(parameter_file))['VersionModule'] try: if preclean(builddir) and preclean(volmnt) and preclean( libsrc, '__pycache__'): stdout_message( f'Removed pre-existing build artifacts ({builddir}, {volmnt})') os.makedirs(builddir) os.makedirs(volmnt) root = git_root() src = root + '/core' + '/' + version_module dst = root + '/scripts' + '/' + version_module # deal with leftover build artifacts if os.path.exists(dst): os.remove(dst) r_cf = copyfile(src, dst) # import version module global __version__ from version import __version__ if r_cf and __version__ and docker_daemon_up(): return True except Exception as e: logger.exception('{}: Failure to import __version__ parameter'.format( inspect.stack()[0][3])) return False
def precheck(parameters): """ Validates user supplied parameters """ for arg in sys.argv[1:]: if arg.startswith('-') or arg.startswith('--'): if arg not in ('--profile', '-p', '-o', '--outputfile', '-d', '--debug', '-s', '--show', '-V', '--version', '-h', '--help'): stdout_message(message=f'Unrecognized option ({arg}). Exit', prefix='WARN', severity='warning') return False return True
def debug_message(response, rgn, mode): """ Prints debug output """ if mode: stdout_message( message='REGION: %s' % rgn, prefix='DBUG', severity='WARNING' ) print(json.dumps(response, indent=4)) return True
def show_information(display): """ Summary: Displays information to user Returns: Success or Failure, TYPE: bool """ def valid(input): try: if userchoice_mapping(input) is None: stdout_message( 'Please choose a letter associated with one of the choices.', indent=12) return False elif isinstance(int(input), int) or isinstance( float(input), float): stdout_message(message='You must choose a letter', indent=12) return False except ValueError: # valid string pass return True if os.path.exists(FILE_PATH) and display in ('files', 'profiles'): files = os.listdir(FILE_PATH) profiles = list(filter(lambda x: x.endswith('.profile'), files)) if profiles: # display user menu print( '\t_______________________________________________________\n') print(bd + '\t\t\tLocal AWS Account Profiles' + rst) print( '\t_______________________________________________________\n') for index, file in enumerate(profiles): print( '\t\t({}): {}'.format(userchoice_mapping(index + 1), Colors.BRIGHT_PURPLE + file + rst)) answer = input('\n\tSelect an option to display [quit]: ') # process user input if answer: if valid(answer): if int(userchoice_mapping(answer)) in range(1, index + 2): return file_contents( profiles[int(userchoice_mapping(answer)) - 1]) return True else: # user answer out of selection range stdout_message( message= 'Please choose a letter associated with one of the choices.', indent=12) return False
def docker_daemon_up(): """ Summary: Determines if docker installed and running by evaluating the exit code of docker images cmd Returns: True (running) | False, TYPE: bool """ cmd = 'docker images >/dev/null 2>&1; echo $?' if which('docker') and int(subprocess.getoutput(cmd)) == 0: return True else: stdout_message('Docker engine not running or not accessible', prefix='WARN') return False
def awscli_profiles(): """Returns IAM usernames from local awscli configuration""" if os.path.isfile(config_file): config.read(config_file) else: stdout_message( message= 'awscli configuration file not found on local filesystem. Exit', prefix='WARN') sys.exit(1) for profile in config.sections(): if 'role_arn' in config[profile].keys(): config.pop(profile) return config
def valid(input): try: if userchoice_mapping(input) is None: stdout_message( 'Please choose a letter associated with one of the choices.', indent=12) return False elif isinstance(int(input), int) or isinstance( float(input), float): stdout_message(message='You must choose a letter', indent=12) return False except ValueError: # valid string pass return True
def clean(directory, debug): """ Summary. rm residual installation files from build directory """ bytecode_list = list( filter(lambda x: x.endswith('.pyc') or x.endswith('.pyo'), os.listdir(directory))) if debug: stdout_message(message=f'bytecode_list contents: {bytecode_list}', prefix='DEBUG') for artifact in bytecode_list: os.remove(directory + '/' + artifact) logger.info('Artifact {} cleaned from {}'.format(artifact, directory)) return True
def boto3_session(service, region=DEFAULT_REGION, profile=None): """ Summary: Establishes boto3 sessions, client Args: :service (str): boto3 service abbreviation ('ec2', 's3', etc) :profile (str): profile_name of an iam user from local awscli config :region (str): AWS region code, optional Returns: client (boto3 object) """ fx = inspect.stack()[0][3] try: if (not profile or profile == 'default') and service != 'iam': return boto3.client(service, region_name=region) elif (not profile or profile == 'default') and service == 'iam': return boto3.client(service) elif profile and profile != 'default': session = boto3.Session(profile_name=profile) return session.client(service, region_name=region) except ClientError as e: if e.response['Error']['Code'] == 'InvalidClientTokenId': logger.warning( '{}: Invalid credentials used by profile user {}'.format( fx, profile or 'default')) elif e.response['Error']['Code'] == 'ExpiredToken': logger.info( '%s: Expired temporary credentials detected for profile user (%s) [Code: %d]' % (fx, profile, exit_codes['EX_CONFIG']['Code'])) except ProfileNotFound: msg = ( '{}: Profile name {} was not found in your local config.'.format( fx, profile)) stdout_message(msg, 'WARN') logger.warning(msg) return None return boto3.client(service, region_name=region)
def debug_mode(header, data_object, debug=False, halt=False): """ debug output """ if debug: print('\n ' + str(header) + '\n') try: if type(data_object) is dict: export_json_object(data_object) elif type(data_object) is str: stdout_message( message= f'{globals()[data_object]} parameter is {data_object}', prefix='DEBUG') except Exception: print(data_object) if halt: sys.exit(0) return True
def download_fileobject(url, overwrite=False): """ Summary. Retrieve latest ec2 pricefile Args: :url (str): http/s universal resource locator :overwrite (bool): flag optionally force overwrite of objects previously downloaded Returns: path (str): full fs path to downloaded file object """ def exists(object_path): if os.path.exists(object_path): return True else: msg = 'File object %s failed to download' % (object_path) logger.warning(msg) stdout_message('%s: %s' % (inspect.stack()[0][3], msg)) return False try: filename = os.path.split(url)[1] path = tmpdir + '/' + filename if overwrite and exists(path): os.remove(path) elif not overwrite and exists(path): return path r = urllib.request.urlretrieve(url, path) if not exists(path): stdout_message(message=f'Failed to retrieve file object {path}', prefix='WARN') except urllib.error.HTTPError as e: stdout_message( message= '%s: Failed to retrive file object: %s. Exception: %s, data: %s' % (inspect.stack()[0][3], url, str(e), e.read()), prefix='WARN') raise e return path
def get_imageid(profile, image, region, debug): if which('machineimage'): cmd = 'machineimage --profile {} --image {} --region {}'.format( profile, image, region) response = subprocess.getoutput(cmd + ' 2>/dev/null') # response not returned if inadequate iam or role permissions if not response: stdout_message( message= 'No AMI Image ID retrieved. Inadequate iam user or role permissions?', prefix='WARN') sys.exit(exit_codes['E_DEPENDENCY']['Code']) else: stdout_message('machineimage executable could not be located. Exit', prefix='WARN') sys.exit(exit_codes['E_DEPENDENCY']['Code']) return json.loads(response)[region]
def masterbranch_version(version_module): """ Returns version denoted in the master branch of the repository """ branch = current_branch(git_root()) commands = ['git checkout master', 'git checkout {}'.format(branch)] try: #stdout_message('Checkout master branch:\n\n%s' % subprocess.getoutput(commands[0])) masterversion = read(version_module).split('=')[1].strip().strip('"') # return to working branch stdout_message( 'Returning to working branch: checkout %s\n\n%s'.format(branch)) stdout_message(subprocess.getoutput(f'git checkout {branch}')) except Exception: return None return masterversion
def postbuild(version, version_module, builddir_path, debian_root): """ Summary. Post-build clean up Returns: Success | Failure, TYPE: bool """ root = git_root() project_dirname = root.split('/')[-1] build_root = os.path.split(builddir_path)[0] package = locate_deb(build_root) try: if package: copyfile(package, debian_root) package_path = debian_root + '/' + os.path.split(package)[1] # remove build directory, residual artifacts if os.path.exists(builddir_path): rmtree(builddir_path) if os.path.exists(root + '/scripts/' + version_module): os.remove(root + '/scripts/' + version_module) # rewrite version file with current build version with open(root + '/core/' + version_module, 'w') as f3: f2 = ['__version__=\"' + version + '\"\n'] f3.writelines(f2) path = project_dirname + (root + '/core/' + version_module)[len(root):] stdout_message('{}: Module {} successfully updated.'.format( inspect.stack()[0][3], yl + path + rst)) if display_package_contents(BUILD_ROOT, VERSION): return package_path except OSError as e: logger.exception('{}: Postbuild clean up failure: {}'.format( inspect.stack()[0][3], e)) return False return package_path
def main(operation, profile, auto, debug, user_name=''): """ End-to-end renew of access keys for a specific profile in local awscli config """ if user_name: logger.info('user_name parameter given (%s) as surrogate' % user_name) try: if operation in VALID_INSTALL: print(operation) elif operation == 'list': print(operation) return True elif not operation: msg_accent = (Colors.BOLD + 'list' + Colors.RESET + ' | ' + Colors.BOLD + 'up' + Colors.RESET) msg = """You must provide a valid OPERATION for --operation parameter: --operation { """ + msg_accent + """ } """ stdout_message(msg) logger.warning('%s: No valid operation provided. Exit' % (inspect.stack()[0][3])) sys.exit(exit_codes['E_MISC']['Code']) else: msg = 'Unknown operation. Exit' stdout_message(msg) logger.warning('%s: %s' % (msg, inspect.stack()[0][3])) sys.exit(exit_codes['E_MISC']['Code']) except KeyError as e: logger.critical('%s: Cannot find Key %s' % (inspect.stack()[0][3], str(e))) return False except OSError as e: logger.critical('%s: problem writing to file %s. Error %s' % (inspect.stack()[0][3], output_file, str(e))) return False except Exception as e: logger.critical('%s: Unknown error. Error %s' % (inspect.stack()[0][3], str(e))) raise e
def container_running(cid, debug=False): """ Summary: Verifies if a container is activly running Args: :cid (str): Container name or hex identifier :dclient (object): global docker client Returns: True (running) | False (stopped) TYPE: bool """ success_msg = f'Container {cid} running' try: container = dclient.containers.get(cid) if container.status == 'running': if debug: stdout_message(success_msg, prefix='OK') return True except Exception: if cid in subprocess.getoutput('docker ps'): stdout_message(success_msg, prefix='OK') return True else: stdout_message(f'Container {cid} stopped', prefix='WARN') return False
def build_package(build_root, builddir): """ Summary. Creates final os installable package for current build, build version Returns: Success | Failure, TYPE: bool """ try: pwd = os.getcwd() os.chdir(build_root) if os.path.exists(builddir): cmd = 'dpkg-deb --build ' + builddir + ' 2>/dev/null' stdout_message('Building {}... '.format(bn + builddir + rst)) stdout_message(subprocess.getoutput(cmd)) os.chdir(pwd) else: logger.warning( 'Build directory {} not found. Failed to create .deb package'. format(builddir)) os.chdir(pwd) return False except OSError as e: logger.exception('{}: Error during os package creation: {}'.format( inspect.stack()[0][3], e)) return False except Exception as e: logger.exception( '{}: Unknown Error during os package creation: {}'.format( inspect.stack()[0][3], e)) return False return True
def init_cli(): """ Initializes commandline script """ parser = argparse.ArgumentParser(add_help=False) try: args = options(parser) except Exception as e: stdout_message(str(e), 'ERROR') sys.exit(exit_codes['EX_OK']['Code']) if not precheck(args): sys.exit(exit_codes['E_DEPENDENCY']['Code']) elif len(sys.argv) == 1: help_menu() sys.exit(exit_codes['EX_OK']['Code']) elif args.help: help_menu() sys.exit(exit_codes['EX_OK']['Code']) elif args.version: package_version() elif ('--show' in sys.argv or '-s' in sys.argv) and args.show is None: stdout_message( 'You must specify a value when using the --show option. Example: \ \n\n\t\t$ %s --show profiles' % (act + CALLER + rst)) elif args.show: return show_information(args.show) elif args.profile: if authenticated(profile=parse_profiles(args.profile)): container = {} default_outputfile = get_account_identifier( parse_profiles(args.profile)) + '.profile' region = default_region(args.profile) # add aws account identifiers container['AccountId'] = get_account_identifier(parse_profiles( args.profile), returnAlias=False) container['AccountAlias'] = get_account_identifier( parse_profiles(args.profile)) # profile the account r_subnets = profile_subnets(profile=parse_profiles(args.profile)) r_sgs = profile_securitygroups( profile=parse_profiles(args.profile)) r_keypairs = profile_keypairs(profile=parse_profiles(args.profile)) # assemble profile data into single json schema if r_subnets and r_sgs and r_keypairs: try: for region in get_regions(): temp = {} temp['Subnets'] = r_subnets[region] temp['SecurityGroups'] = r_sgs[region] temp['KeyPairs'] = r_keypairs[region] container[region] = temp except KeyError as e: raise e if args.outputfile: export_json_object(container, FILE_PATH + '/' + default_outputfile) elif is_tty(): export_json_object(container, logging=False) stdout_message('AWS Account profile complete') return True else: stdout_message('Unrecognized option. Exit') return False
if PACKAGE is None: try: # adj path down 1 level sys.path.insert(0, os.path.abspath(git_root() + '/' + lib_relpath)) from version import __version__ # normalize path sys.path.pop(0) except ImportError as e: stdout_message( message='Problem executing commit-hook (%s). Error: %s' % (__file__, str(e)), prefix='WARN') else: sys.path.insert(0, os.path.abspath(PACKAGE)) from version_module import __version__ sys.path.pop(0) try: if not list(filter(lambda x: os.path.exists(x), targets)): stdout_message( message=f'One or more commit-hook targets ({targets}) not found', prefix='WARN') sys.exit(1) elif incremental_version(CURRENT, __version__):
def prebuild(builddir, volmnt, parameter_file): """ Summary. Prerequisites and dependencies for build execution """ def preclean(dir): """ Cleans residual build artifacts """ try: if os.path.exists(dir): rmtree(dir) except OSError as e: logger.exception( '%s: Error while cleaning residual build artifacts: %s' % (inspect.stack()[0][3], str(e))) return False return True version_module = json.loads(read(parameter_file))['VersionModule'] if preclean(builddir) and preclean(volmnt): stdout_message( f'Removed pre-existing build artifacts ({builddir}, {volmnt})') os.makedirs(builddir) os.makedirs(volmnt) root = git_root() lib_relpath = 'core' lib_path = root + '/' + lib_relpath sources = [lib_path] illegal = ['__pycache__'] module = inspect.stack()[0][3] try: global __version__ sys.path.insert(0, os.path.abspath(git_root() + '/' + lib_relpath)) from version import __version__ # normalize path sys.path.pop(0) except ImportError as e: logger.exception( message='Problem importing program version module (%s). Error: %s' % (__file__, str(e)), prefix='WARN') except Exception as e: logger.exception('{}: Failure to import version module'.format( inspect.stack()[0][3])) return False ## clean up source ## try: for directory in sources: for artifact in os.listdir(directory): if artifact in illegal: rmtree(directory + '/' + artifact) except OSError: logger.exception( '{}: Illegal file object detected, but unable to remove {}'.format( module, archive)) return False return True
def main(setVersion, environment, force=False, debug=False): """ Summary: Create build directories, populate contents, update contents Returns: Success | Failure, TYPE: bool """ global PROJECT_BIN PROJECT_BIN = 'nlines' global PROJECT_ROOT PROJECT_ROOT = git_root() global SCRIPT_DIR SCRIPT_DIR = PROJECT_ROOT + '/' + 'scripts' DEBIAN_ROOT = PROJECT_ROOT + '/' + 'packaging/deb' global BUILD_ROOT BUILD_ROOT = TMPDIR global LIB_DIR LIB_DIR = PROJECT_ROOT + '/' + 'core' global CURRENT_VERSION CURRENT_VERSION = current_version(PROJECT_BIN, LIB_DIR + '/' 'version.py') # sort out version numbers, forceVersion is override # # for all info contained in project # global VERSION if setVersion: VERSION = setVersion elif CURRENT_VERSION: VERSION = increment_version(CURRENT_VERSION) else: stdout_message( 'Could not determine current {} version'.format(bd + PROJECT + rst)) sys.exit(exit_codes['E_DEPENDENCY']['Code']) # log stdout_message(f'Current version of last build: {CURRENT_VERSION}') stdout_message(f'Version to be used for this build: {VERSION}') # create initial binary working dir BUILDDIRNAME = create_builddirectory(BUILD_ROOT, VERSION, force) # sub in current values parameter_obj = ParameterSet(PROJECT_ROOT + '/' + PACKAGE_CONFIG, VERSION) vars = parameter_obj.create() VERSION_FILE = vars['VersionModule'] if debug: print(json.dumps(vars, indent=True, sort_keys=True)) if BUILDDIRNAME: r_struture = builddir_structure(vars, VERSION) r_updates = builddir_content_updates(vars, environment, VERSION) if r_struture and r_updates and build_package(BUILD_ROOT, BUILDDIRNAME): return postbuild(VERSION, VERSION_FILE, BUILD_ROOT + '/' + BUILDDIRNAME, DEBIAN_ROOT) return False