def empty(self): if "autoresume" in self.settings["options"] \ and self.resume.is_enabled("empty"): log.notice('Resume point detected, skipping empty operation...') else: if "netboot2/empty" in self.settings: if isinstance(self.settings['netboot2/empty'], str): self.settings["netboot2/empty"] = self.settings[ "netboot2/empty"].split() for x in self.settings["netboot2/empty"]: myemp = self.settings["chroot_path"] + self.settings[ "merge_path"] + x if not os.path.isdir(myemp): log.warning( 'not a directory or does not exist, skipping "empty" operation: %s', x) continue log.info('Emptying directory %s', x) # stat the dir, delete the dir, recreate the dir and set # the proper perms and ownership mystat = os.stat(myemp) shutil.rmtree(myemp) ensure_dirs(myemp, mode=0o755) os.chown(myemp, mystat[ST_UID], mystat[ST_GID]) os.chmod(myemp, mystat[ST_MODE]) self.resume.enable("empty")
def parse_config(config_files): for config_file in config_files: log.notice('Loading configuration file: %s', config_file) try: with open(config_file, 'rb') as f: config = tomli.load(f) for key in config: if key not in valid_config_file_values: log.critical("Unknown option '%s' in config file %s", key, config_file) conf_values.update(config) except Exception as e: log.critical('Could not find parse configuration file: %s: %s', config_file, e) # print out any options messages for opt in conf_values['options']: if opt in option_messages: log.info(option_messages[opt]) if "envscript" in conf_values: log.info('Envscript support enabled.') # take care of any variable substitutions that may be left for x in list(conf_values): if isinstance(conf_values[x], str): conf_values[x] = conf_values[x] % conf_values
def clear_dir(target, mode=0o755, chg_flags=False, remove=False, clear_nondir=True): '''Universal directory clearing function @target: string, path to be cleared or removed @mode: integer, desired mode to set the directory to @chg_flags: boolean used for FreeBSD hosts @remove: boolean, passed through to clear_dir() @return boolean ''' log.debug('start: %s', target) if not target: log.debug('no target... returning') return False mystat = None if os.path.isdir(target) and not os.path.islink(target): log.notice('Emptying directory: %s', target) # stat the dir, delete the dir, recreate the dir and set # the proper perms and ownership try: log.debug('os.stat()') mystat = os.stat(target) # There's no easy way to change flags recursively in python if chg_flags and os.uname()[0] == "FreeBSD": cmd(['chflags', '-R', 'noschg', target]) log.debug('shutil.rmtree()') shutil.rmtree(target) except Exception: log.error('clear_dir failed', exc_info=True) return False elif os.path.exists(target): if clear_nondir: log.debug("Clearing (unlinking) non-directory: %s", target) os.unlink(target) else: log.info('clear_dir failed: %s: is not a directory', target) return False else: log.debug("Condidtions not met to clear: %s", target) log.debug(" isdir: %s", os.path.isdir(target)) log.debug(" islink: %s", os.path.islink(target)) log.debug(" exists: %s", os.path.exists(target)) if not remove: log.debug('ensure_dirs()') ensure_dirs(target, mode=mode) if mystat: os.chown(target, mystat[ST_UID], mystat[ST_GID]) os.chmod(target, mystat[ST_MODE]) log.debug('DONE, returning True') return True
def run(self): if "purgeonly" in self.settings["options"]: self.purge() return True if "purge" in self.settings["options"]: self.purge() success = True self.setup() log.notice('Creating %s tree snapshot %s from %s ...', self.settings["repo_name"], self.settings['version_stamp'], self.settings['portdir']) mytmp=self.settings["tmp_path"] ensure_dirs(mytmp) cmd(['rsync', '-a', '--no-o', '--no-g', '--delete', '--exclude=/packages/', '--exclude=/distfiles/', '--exclude=/local/', '--exclude=CVS/', '--exclude=.svn', '--exclude=.git/', '--filter=H_**/files/digest-*', self.settings['portdir'] + '/', mytmp + '/' + self.settings['repo_name'] + '/'], env=self.env) log.notice('Compressing %s snapshot tarball ...', self.settings["repo_name"]) compressor = CompressMap(self.settings["compress_definitions"], env=self.env, default_mode=self.settings['compression_mode'], comp_prog=self.settings["comp_prog"]) infodict = compressor.create_infodict( source=self.settings["repo_name"], destination=self.settings["snapshot_path"], basedir=mytmp, filename=self.settings["snapshot_path"], mode=self.settings["compression_mode"], auto_extension=True ) if not compressor.compress(infodict): success = False log.error('Snapshot compression failure') else: filename = '.'.join([self.settings["snapshot_path"], compressor.extension(self.settings["compression_mode"])]) log.notice('Snapshot successfully written to %s', filename) self.gen_contents_file(filename) self.gen_digest_file(filename) if "keepwork" not in self.settings["options"]: self.cleanup() if success: log.info('snapshot: complete!') return success
def run(self): if "purgeonly" in self.settings["options"]: self.purge() return True if "purge" in self.settings["options"]: self.purge() success = True self.setup() log.notice('Creating %s tree snapshot %s from %s ...', self.settings["repo_name"], self.settings['version_stamp'], self.settings['portdir']) mytmp=self.settings["tmp_path"] ensure_dirs(mytmp) cmd(['rsync', '-a', '--no-o', '--no-g', '--delete', '--exclude=/packages/', '--exclude=/distfiles/', '--exclude=/local/', '--exclude=CVS/', '--exclude=.svn', '--filter=H_**/files/digest-*', self.settings['portdir'] + '/', mytmp + '/' + self.settings['repo_name'] + '/'], env=self.env) log.notice('Compressing %s snapshot tarball ...', self.settings["repo_name"]) compressor = CompressMap(self.settings["compress_definitions"], env=self.env, default_mode=self.settings['compression_mode'], comp_prog=self.settings["comp_prog"]) infodict = compressor.create_infodict( source=self.settings["repo_name"], destination=self.settings["snapshot_path"], basedir=mytmp, filename=self.settings["snapshot_path"], mode=self.settings["compression_mode"], auto_extension=True ) if not compressor.compress(infodict): success = False log.error('Snapshot compression failure') else: filename = '.'.join([self.settings["snapshot_path"], compressor.extension(self.settings["compression_mode"])]) log.notice('Snapshot successfully written to %s', filename) self.gen_contents_file(filename) self.gen_digest_file(filename) if "keepwork" not in self.settings["options"]: self.cleanup() if success: log.info('snapshot: complete!') return success
def parse_config(config_files): # search a couple of different areas for the main config file myconf = {} # try and parse the config file "config_file" for config_file in config_files: log.notice('Loading configuration file: %s', config_file) try: config = catalyst.config.ConfigParser(config_file) myconf.update(config.get_values()) except Exception as e: log.critical('Could not find parse configuration file: %s: %s', config_file, e) # now, load up the values into conf_values so that we can use them for x in list(confdefaults): if x in myconf: if x == 'options': conf_values[x] = set(myconf[x].split()) elif x in ["decompressor_search_order"]: conf_values[x] = myconf[x].split() else: conf_values[x] = myconf[x] else: conf_values[x] = confdefaults[x] # add our python base directory to use for loading target arch's conf_values["PythonDir"] = os.path.dirname(os.path.realpath(__file__)) # print out any options messages for opt in conf_values['options']: if opt in option_messages: log.info(option_messages[opt]) for key in [ "digests", "envscript", "var_tmpfs_portage", "port_logdir", "local_overlay" ]: if key in myconf: conf_values[key] = myconf[key] if "contents" in myconf: # replace '-' with '_' (for compatibility with existing configs) conf_values["contents"] = myconf["contents"].replace("-", '_') if "envscript" in myconf: log.info('Envscript support enabled.') # take care of any variable substitutions that may be left for x in list(conf_values): if isinstance(conf_values[x], str): conf_values[x] = conf_values[x] % conf_values
def parse_config(config_files): # search a couple of different areas for the main config file myconf={} # try and parse the config file "config_file" for config_file in config_files: log.notice('Loading configuration file: %s', config_file) try: config = catalyst.config.ConfigParser(config_file) myconf.update(config.get_values()) except Exception as e: log.critical('Could not find parse configuration file: %s: %s', config_file, e) # now, load up the values into conf_values so that we can use them for x in list(confdefaults): if x in myconf: if x == 'options': conf_values[x] = set(myconf[x].split()) elif x in ["decompressor_search_order"]: conf_values[x] = myconf[x].split() else: conf_values[x]=myconf[x] else: conf_values[x]=confdefaults[x] # add our python base directory to use for loading target arch's conf_values["PythonDir"] = __selfpath__ # print out any options messages for opt in conf_values['options']: if opt in option_messages: log.info(option_messages[opt]) for key in ["digests", "envscript", "var_tmpfs_portage", "port_logdir", "local_overlay"]: if key in myconf: conf_values[key] = myconf[key] if "contents" in myconf: # replace '-' with '_' (for compatibility with existing configs) conf_values["contents"] = myconf["contents"].replace("-", '_') if "envscript" in myconf: log.info('Envscript support enabled.') # take care of any variable substitutions that may be left for x in list(conf_values): if isinstance(conf_values[x], str): conf_values[x] = conf_values[x] % conf_values
def run(self): if "purgeonly" in self.settings["options"]: self.purge() return True if "purge" in self.settings["options"]: self.purge() success = True self.setup() log.notice('Creating Portage tree snapshot %s from %s ...', self.settings['version_stamp'], self.settings['portdir']) mytmp=self.settings["tmp_path"] ensure_dirs(mytmp) target_snapshot = self.settings["portdir"] + "/ " + mytmp + "/%s/" % self.settings["repo_name"] cmd("rsync -a --no-o --no-g --delete --exclude /packages/ --exclude /distfiles/ " + "--exclude /local/ --exclude CVS/ --exclude .svn --filter=H_**/files/digest-* " + target_snapshot, "Snapshot failure", env=self.env) log.notice('Compressing Portage snapshot tarball ...') compressor = CompressMap(self.settings["compress_definitions"], env=self.env, default_mode=self.settings['compression_mode']) infodict = compressor.create_infodict( source=self.settings["repo_name"], destination=self.settings["snapshot_path"], basedir=mytmp, filename=self.settings["snapshot_path"], mode=self.settings["compression_mode"], auto_extension=True ) if not compressor.compress(infodict): success = False log.error('Snapshot compression failure') else: filename = '.'.join([self.settings["snapshot_path"], compressor.extension(self.settings["compression_mode"])]) log.notice('Snapshot successfully written to %s', filename) self.gen_contents_file(filename) self.gen_digest_file(filename) self.cleanup() if success: log.info('snapshot: complete!') return success
def calc_hash(self, file_, hash_): ''' Calculate the hash for "file_" @param file_: the file to generate the hash for @param hash_: the hash algorythm to use @returns the hash result ''' _hash = self.hash_map[hash_] args = [_hash.cmd] args.extend(_hash.args) args.append(file_) source = Popen(args, stdout=PIPE) mylines = source.communicate()[0] mylines = mylines[0].split() result = mylines[0] log.info('%s (%s) = %s', _hash.id, file_, result) return result
def calc_hash(self, file_, hash_): ''' Calculate the hash for "file_" @param file_: the file to generate the hash for @param hash_: the hash algorythm to use @returns the hash result ''' _hash = self.hash_map[hash_] args = [_hash.cmd] args.extend(_hash.args) args.append(file_) source = Popen(args, stdout=PIPE) mylines = source.communicate()[0] mylines=mylines[0].split() result=mylines[0] log.info('%s (%s) = %s', _hash.id, file_, result) return result
def empty(self): if "autoresume" in self.settings["options"] \ and self.resume.is_enabled("empty"): log.notice('Resume point detected, skipping empty operation...') else: if "netboot2/empty" in self.settings: if isinstance(self.settings['netboot2/empty'], str): self.settings["netboot2/empty"]=self.settings["netboot2/empty"].split() for x in self.settings["netboot2/empty"]: myemp=self.settings["chroot_path"] + self.settings["merge_path"] + x if not os.path.isdir(myemp): log.warning('not a directory or does not exist, skipping "empty" operation: %s', x) continue log.info('Emptying directory %s', x) # stat the dir, delete the dir, recreate the dir and set # the proper perms and ownership clear_dir(myemp) self.resume.enable("empty")
def empty(self): if "autoresume" in self.settings["options"] \ and self.resume.is_enabled("empty"): log.notice('Resume point detected, skipping empty operation...') else: if "netboot2/empty" in self.settings: if isinstance(self.settings['netboot2/empty'], str): self.settings["netboot2/empty"]=self.settings["netboot2/empty"].split() for x in self.settings["netboot2/empty"]: myemp=self.settings["chroot_path"] + self.settings["merge_path"] + x if not os.path.isdir(myemp): log.warning('not a directory or does not exist, skipping "empty" operation: %s', x) continue log.info('Emptying directory %s', x) # stat the dir, delete the dir, recreate the dir and set # the proper perms and ownership mystat=os.stat(myemp) shutil.rmtree(myemp) ensure_dirs(myemp, mode=0o755) os.chown(myemp,mystat[ST_UID],mystat[ST_GID]) os.chmod(myemp,mystat[ST_MODE]) self.resume.enable("empty")
def clear_dir(target, mode=0o755, chg_flags=False, remove=False): '''Universal directory clearing function @target: string, path to be cleared or removed @mode: integer, desired mode to set the directory to @chg_flags: boolean used for FreeBSD hoosts @remove: boolean, passed through to clear_dir() @return boolean ''' log.debug('start: %s', target) if not target: log.debug('no target... returning') return False if os.path.isdir(target): log.info('Emptying directory: %s', target) # stat the dir, delete the dir, recreate the dir and set # the proper perms and ownership try: log.debug('os.stat()') mystat = os.stat(target) # There's no easy way to change flags recursively in python if chg_flags and os.uname()[0] == "FreeBSD": os.system("chflags -R noschg " + target) log.debug('shutil.rmtree()') shutil.rmtree(target) if not remove: log.debug('ensure_dirs()') ensure_dirs(target, mode=mode) os.chown(target, mystat[ST_UID], mystat[ST_GID]) os.chmod(target, mystat[ST_MODE]) except Exception: log.error('clear_dir failed', exc_info=True) return False else: log.info('clear_dir failed: %s: is not a directory', target) log.debug('DONE, returning True') return True
def clear_dir(target, mode=0o755, chg_flags=False, remove=False): '''Universal directory clearing function @target: string, path to be cleared or removed @mode: integer, desired mode to set the directory to @chg_flags: boolean used for FreeBSD hoosts @remove: boolean, passed through to clear_dir() @return boolean ''' log.debug('start: %s', target) if not target: log.debug('no target... returning') return False if os.path.isdir(target): log.info('Emptying directory: %s', target) # stat the dir, delete the dir, recreate the dir and set # the proper perms and ownership try: log.debug('os.stat()') mystat=os.stat(target) # There's no easy way to change flags recursively in python if chg_flags and os.uname()[0] == "FreeBSD": os.system("chflags -R noschg " + target) log.debug('shutil.rmtree()') shutil.rmtree(target) if not remove: log.debug('ensure_dirs()') ensure_dirs(target, mode=mode) os.chown(target, mystat[ST_UID], mystat[ST_GID]) os.chmod(target, mystat[ST_MODE]) except Exception: log.error('clear_dir failed', exc_info=True) return False else: log.info('clear_dir failed: %s: is not a directory', target) log.debug('DONE, returning True') return True
def calc_hash2(self, file_, hash_type): ''' Calculate the hash for "file_" @param file_: the file to generate the hash for @param hash_: the hash algorythm to use @returns the hash result ''' _hash = self.hash_map[hash_type] args = [_hash.cmd] args.extend(_hash.args) args.append(file_) log.debug('args = %r', args) source = Popen(args, stdout=PIPE) output = source.communicate() lines = output[0].decode('ascii').split('\n') log.debug('output = %s', output) header = lines[0] h_f = lines[1].split() hash_result = h_f[0] short_file = os.path.split(h_f[1])[1] result = header + "\n" + hash_result + " " + short_file + "\n" log.info('%s (%s) = %s', header, short_file, result) return result
def set_stage_path(self): self.settings["stage_path"] = normpath(self.settings["chroot_path"] + "/tmp/mergeroot") log.info('embedded stage path is %s', self.settings['stage_path'])
def set_root_path(self): # sets the root path, relative to 'chroot_path', of the stage1 root self.settings["root_path"]=normpath("/tmp/stage1root") log.info('stage1 root path is %s', self.settings['root_path'])
def version(): log.info(get_version()) log.info('Copyright 2003-%s Gentoo Foundation', datetime.datetime.now().year) log.info('Copyright 2008-2012 various authors') log.info('Distributed under the GNU General Public License version 2.1')
def set_root_path(self): # sets the root path, relative to 'chroot_path', of the stage1 root self.settings["root_path"] = normpath("/tmp/stage1root") log.info('stage1 root path is %s', self.settings['root_path'])
def set_root_path(self): self.settings["root_path"]=normpath("/tmp/mergeroot") log.info('embedded root path is %s', self.settings['root_path'])
def parse(self): values = {} cur_array = [] trailing_comment=re.compile(r'\s*#.*$') #white_space=re.compile('\s+') for x, myline in enumerate(self.lines): myline = myline.strip() # Force the line to be clean # Remove Comments ( anything following # ) myline = trailing_comment.sub("", myline) # Skip any blank lines if not myline: continue if self.key_value_separator in myline: # Split on the first occurence of the separator creating two strings in the array mobjs mobjs = myline.split(self.key_value_separator, 1) mobjs[1] = mobjs[1].strip().strip('"') # # Check that this key doesn't exist already in the spec # if mobjs[0] in values: # raise Exception("You have a duplicate key (" + mobjs[0] + ") in your spec. Please fix it") # Start a new array using the first element of mobjs cur_array = [mobjs[0]] if mobjs[1]: # do any variable substitiution embeded in it with # the values already obtained mobjs[1] = mobjs[1] % values if self.multiple_values: # split on white space creating additional array elements # subarray = white_space.split(mobjs[1]) subarray = mobjs[1].split() cur_array += subarray else: cur_array += [mobjs[1]] # Else add on to the last key we were working on else: if self.multiple_values: # mobjs = white_space.split(myline) # cur_array += mobjs cur_array += myline.split() else: raise CatalystError("Syntax error: %s" % x, print_traceback=True) # XXX: Do we really still need this "single value is a string" behavior? if len(cur_array) == 2: values[cur_array[0]] = cur_array[1] else: values[cur_array[0]] = cur_array[1:] if not self.empty_values: # Make sure the list of keys is static since we modify inside the loop. for x in list(values.keys()): # Delete empty key pairs if not values[x]: log.warning('No value set for key "%s"; deleting', x) del values[x] if self.eval_none: # Make sure the list of keys is static since we modify inside the loop. for x in list(values.keys()): # reset None values if isinstance(values[x], str) and values[x].lower() in ['none']: log.info('None value found for key "%s"; reseting', x) values[x] = None self.values = values
def cleanup(self): log.info('Cleaning up ...') self.purge()
def cleanup(): log.info('Cleaning up ...')
def set_root_path(self): # ROOT= variable for emerges self.settings["root_path"]=normpath("/tmp/image") log.info('netboot root path is %s', self.settings['root_path'])
def set_root_path(self): self.settings["root_path"] = normpath("/tmp/mergeroot") log.info('embedded root path is %s', self.settings['root_path'])
def set_root_path(self): # ROOT= variable for emerges self.settings["root_path"] = normpath("/tmp/image") log.info('netboot root path is %s', self.settings['root_path'])
def set_stage_path(self): self.settings["stage_path"]=normpath(self.settings["chroot_path"]+"/tmp/mergeroot") log.info('embedded stage path is %s', self.settings['stage_path'])