def set_device(self, devicepath: str = None) -> None: device_str = None if not string_is_empty(devicepath): self._devicepath = Path(devicepath) else: device_str = strip( input( "Type the path of device (e.g. /dev/sde) or partition (e.g. /dev/sde1) you want to backup:\n" )) if string_is_empty(device_str): raise Exception("Device-String is empty") devok = False for dev in self._devices: if string_equal_utf8(dev["dev"], device_str, False): devok = True if devok is False: if "partitions" in dev: for devpart in dev["partitions"]: if devpart["dev"] == device_str: devok = True break if devok is True: break if not devok: raise Exception("Device or partition '{}' not found".format( self._device_str)) else: self._devicepath = Path(device_str)
def _check_folder_with_options(self, remote_root, remote_path, options): accept = None if is_sequence_with_any_elements(options): if "exclude_folders" in options and not self._check_option_ignored("exclude_folders"): exfolders = options["exclude_folders"] if not is_sequence_with_any_elements(exfolders): raise JobException("options['exclude_folders'] has to contain a list of shell-file-patters", 10) for exfolder in exfolders: if remote_root is not None and not Path(exfolder).is_absolute(): exfolder = remote_root.joinpath(exfolder) if fnmatch(str(remote_path), str(exfolder)): accept = "EXCLUDE_FOLDERS" break if ( accept is None and "group" in options and not string_is_empty(self._group) and not self._check_option_ignored("group") ): group = options["group"] if not string_is_empty(group): if group != self._group: accept = "GROUP" return accept
def sevenzip(interactive: bool, compress_type: str, inputfilepath: str, outputfilepath: str=None): if string_is_empty(inputfilepath): raise Exception("No input file path provided") if string_is_empty(compress_type): if interactive is True: compress_type = strip( input( "Type the name of the compression algorithm (7z or zip)\n" ) ).lower() else: compress_type = "zip" if string_is_empty(compress_type): raise Exception("No compression algorithm chosen") if compress_type != "zip" and compress_type != "7z": raise Exception("Wrong compression algorithm (use 7z or zip)") if string_is_empty(outputfilepath): if interactive is True: outputfilepath = strip(input("Type the path of the compressed image file\n")) else: outputfilepath = inputfilepath + "." + compress_type if string_is_empty(outputfilepath): raise Exception("File path empty") cancelled = False p = Path(outputfilepath) if p.exists(): yn = singlecharinput( "The file '{}' exists. Do you want to delete it?".format( outputfilepath ), ConsoleColors.WARNING ) if yn == "n": cancelled = True print(ConsoleColor.colorline("Cancelling compress process", ConsoleColors.OKBLUE)) else: print(ConsoleColor.colorline("Deleting '{}'".format(outputfilepath), ConsoleColors.FAIL)) if Path(outputfilepath).exists(): print(ConsoleColor.colorline( "Could not delete '{}'".format( outputfilepath ), ConsoleColors.FAIL) ) cancelled = True if cancelled is False: sz = local["7z"] sz.run(["a", str(p.absolute()), inputfilepath])
def fdiskdevicesize(devs_from_fdisk, device_name: str): if string_is_empty(device_name): return None device_name_l = device_name.lower() dev_size = None invalid_dev = False for dev in devs_from_fdisk: if "dev" in dev: if dev["dev"].lower() == device_name_l: if "size_b" in dev: dev_size = dev["size_b"] break if "partitions" in dev: for part in dev["partitions"]: if "dev" in part and part["dev"].lower() == device_name_l: if "size_b" in part: dev_size = part["size_b"] else: invalid_dev = True break if dev_size is not None or invalid_dev is True: break return dev_size
def parse_device_sizeinfo(line: str) -> Dict: if string_is_empty(line): return None p = line.find("bytes") c = find_char_backwards(line, ",", len(line) - p) fsize = int(strip(line[(p - c):p])) return {"size_b": fsize, "size_h": bytes_to_unit(fsize, True)}
def addlogger( self, name: str, configs: List[LoggerHandlerConfig] ) -> Logger: if not is_sequence_with_any_elements(configs): raise Exception("configs needs to be passed") if string_is_empty(name): raise Exception("name needs to be passed") loggername = self._nameprefix + "." + name if loggername in self._loggers: return self._loggers[loggername] logger = getLogger(loggername) logger.setLevel(DEBUG) for config in configs: # type: LoggerConfig logger.addHandler(config.create_handler(loggername)) self._loggers[loggername] = logger return self._loggers[loggername]
def set_image_path(self, image_path: str = None) -> bool: is_dir_msg = "Image path '{}' is not accepted, as it is a directory!" is_dir = False if not string_is_empty(image_path): if is_directory_path(image_path): is_dir = True else: self._imagepath = Path(image_path) return True else: imagepathok = False imagefile = None try: while imagepathok is False: fpi = input( "Type the filepath of the image you want to create (empty to cancel image creation):\n" ) imagefilepath = strip(fpi) if string_is_empty(imagefilepath): raise Exception("_cic_") if is_directory_path(imagefilepath): is_dir = True else: imagefile = Path(imagefilepath) if imagefile.is_dir(): is_dir = True else: imagepathok = True except Exception as e: if str(e) == "_cic_": return False finally: if imagepathok is True: self._imagepath = imagefile return True if is_dir is True: raise Exception( "Image path '{}' points to a directory. Needs to be a file!")
def parse_partition_line(line: str, isboottype: bool) -> Dict: if string_is_empty(line): return None columns = {} columncount = 7 if isboottype else 6 startedgap = False lastcolumn = False columncollect = "" collect = False cc = 0 for c in line: if c == ' ' and lastcolumn is False: if startedgap is False: collect = False if isboottype: if cc == 0 or cc == 4 or cc == 6: collect = True else: if cc == 0 or cc == 4 or cc == 5: collect = True if collect is True: if cc == 0: columns["dev"] = columncollect elif cc == 4: partbytes = fdisk_size_to_bytesize(columncollect) columns["size_b"] = partbytes columns["size_h"] = bytes_to_unit(partbytes, True) startedgap = True cc += 1 columncollect = "" else: if cc == columncount - 1: lastcolumn = True if startedgap is True: startedgap = False columncollect += c columns["type"] = columncollect return columns
def start_dd(self, interactive: bool = False, ddbatchsize: str = None, finished_handler: Callable = None) -> int: retcode = None devpath = str(self._devicepath.absolute()) imagepath = str(self._imagepath.absolute()) param_if = "if={}".format(devpath.replace(" ", "\\ ")) param_of = "of={}".format(imagepath.replace(" ", "\\ ")) param_status = "status=progress" # todo: make configurable batch size if ddbatchsize is not None: param_bs = "bs={}".format(ddbatchsize) else: param_bs = "bs=1M" if interactive is True: yn = "y" else: print( ConsoleColor.colorline( "Will execute \"{}\"".format("sudo dd {} {} {} {}".format( param_if, param_of, param_status, param_bs)), ConsoleColors.OKGREEN)) yn = singlecharinput( "Confirm the creation of image '{}' from device '{}' (y/n)!". format(imagepath, devpath), ConsoleColors.OKGREEN) if yn == "n": if interactive is True: print( ConsoleColor.colorline( "Cancelled image creation on your wish", ConsoleColors.OKBLUE)) return -1 elif yn == "y": sudo = local["sudo"] dd = local["dd"] starttime = datetime.now() p = sudo[dd[param_if, param_of, param_status, param_bs]].popen(stderr=PIPE) line = '' # retcode = 0 # Intention is not to get the first "\r" from dd-output # sleep(1) if interactive is True: print() target_image_sizehuman = self.get_target_image_sizehuman() target_image_sizebytes = self.get_target_image_sizebytes() if interactive is False: p._proc.communicate() else: while True: retcode = p.poll() if retcode is None: out = p.stderr.read(1).decode("utf-8", errors="replace") if out == '': break else: if out == '\r': line = strip(line) if not string_is_empty(line): dd_info = parse_dd_info(line) currenttime = datetime.now() deltatime = currenttime - starttime print( "{0:10}{1:4}{2:9}{3:3}{4:10}{5:4}{6:8}{7:3}{8}" .format( bytes_to_unit( dd_info["size_b"], True, True, False), "of", target_image_sizehuman, "|", dd_info["time_h"], "of", humantime( predict_job_time( dd_info["time"], target_image_sizebytes, dd_info["size_b"])), "|", "Total time: {}".format( humantime(deltatime. total_seconds()))), end="\r") line = '' else: line = line + out else: break retcode = retcode if retcode is not None else p._proc.returncode if interactive is True: print() currenttime = datetime.now() deltatime = currenttime - starttime print( ConsoleColor.colorline( "Total time: {}".format( humantime(deltatime.total_seconds())), ConsoleColors.OKGREEN)) st = stat(imagepath) print( ConsoleColor.colorline( "Final image size: {}".format( bytes_to_unit(st.st_size, True, True, False)), ConsoleColors.OKGREEN)) if retcode == 0: print( ConsoleColor.colorline("Successfully created image!", ConsoleColors.OKGREEN)) else: print( ConsoleColor.colorline( "No Result from dd (image might be ok)!", ConsoleColors.WARNING)) finished_handler(retcode, imagepath) return retcode
def reload_jsonconfig( self, configfile: click.File, mandatory_option_keys: List[str], config_loaded_handler: Callable, logfactory: LoggerFactory ): if is_sequence_with_any_elements(self._jsondata): self._jsondata.clear() self._jsondata = load(configfile) assert_obj_has_keys(self._jsondata, "json", ["options"]) options = self._jsondata["options"] assert_obj_has_keys(options, "options", mandatory_option_keys) self._options = options if "loggers" in options: loggers = options["loggers"] if is_sequence_with_any_elements(loggers): lt = LoggerHandlerType.Nope handlerconfigs = [] for logger in loggers: if "type" not in logger: raise Exception("logger in json has to contain a type") if "level" not in logger or ( logger["level"] != "all" and logger["level"] != "errors" ): raise Exception("logger level is not cool (use 'all' or 'errors')") if logger["type"] == "file": lt = LoggerHandlerType.FileHandler elif logger["type"] == "console": lt = LoggerHandlerType.ConsoleHandler else: raise Exception("logger-type {} in json is not recognized".format(logger["type"])) if lt == LoggerHandlerType.FileHandler and ("folder" not in logger or string_is_empty(logger["folder"])): raise Exception("logger with type file has to contain a folder-option") handlerconfig = None """:type:LoggerHandlerConfig""" if lt == LoggerHandlerType.FileHandler: handlerconfig = LoggerHandlerConfig.create_file_config( ERROR if logger["level"] == "errors" else INFO, logger["folder"] ) else: if logger["level"] == "errors": handlerconfig = LoggerHandlerConfig.create_console_err_config(ERROR) else: handlerconfig = LoggerHandlerConfig.create_console_err_config(INFO) handlerconfigs.append(handlerconfig) self._logger = logfactory.addlogger( options["name"], handlerconfigs ) self._config_loaded = True if config_loaded_handler is not None: config_loaded_handler(self._jsondata)
def is_directory_path(path: str): if not string_is_empty(path): c = len(path) return strip(path)[c-1:c] == sep return False
def run(self): self.info("Starting unit task") pathes = self._jsondata["pathes"] sftp = None """:type: paramiko.SFTPClient""" try: for entry in pathes: entryfilter = None if "type" not in entry: raise JobException(Exception("no type-key in entry"), 2) if "path" not in entry: raise JobException(Exception("no path-key in entry"), 3) entrytype = entry["type"] if "filter" in entry: entryfilter = entry["filter"] et = BackupEntryType.Unknown if entrytype == "file": et = BackupEntryType.File elif entrytype == "dir": et = BackupEntryType.Dir if "name" in entry: entryname = entry["name"] else: entryname = str(entrytype) e = BackupEntry( et, entryname, entryfilter, entry["path"], entry["options"] if "options" in entry else None ) self._entries.append(e) if len(self._entries) == 0: raise JobException(Exception("No entrys found in json"), 4) self.info("Trying to connect to SSH-Host {}".format(self._host)) if self._transport is None: self._transport = paramiko.Transport(self._host) if not string_is_empty(self._keyfile): key = paramiko.RSAKey.from_private_key_file(self._keyfile) self.info("With Key:".format(key.get_name())) self._transport.connect(username=self._user, pkey=key) else: self.info("With Username/Password") self._transport.connect(username=self._user, password=self._password) if not self._transport.is_authenticated(): raise JobException(Exception("could not authenticate"), 5) self.info("Successfully connected!") self.info("Opening SFTP-Channel from transport") with paramiko.SFTPClient.from_transport(self._transport) as sftp: self.info("Successfully opened SFTP-Channel!") local_targetdir = Path(self._targetdir) """:type: Path""" d = True f = True if len(self._processonly_types) > 0: if "all" not in self._processonly_types: if "dir" not in self._processonly_types: d = False if "file" not in self._processonly_types: f = False if f is False and d is False: raise JobException(Exception( "If processonly_types is set in options, either dir or file has to be passed"), 6 ) for entry in self._entries: self.info("Executing job-task '{}'".format(entry.get_name())) t = entry.get_type() if entry.should_skip(): self.info("Skipping entry '{}' because options skip is active".format( entry.get_type() )) else: if t is BackupEntryType.File and f is True: self.process_file(sftp, entry.get_path(), local_targetdir, entry) elif t is BackupEntryType.Dir and d is True: self.process_directory(sftp, entry.get_path(), local_targetdir, entry) except JobException as je: from traceback import format_exc self.error(str(format_exc())) return je.get_errcode() except paramiko.SSHException as se: from traceback import format_exc self.error(str(format_exc())) return 113 finally: if sftp is not None: sftp.close() if self._transport is not None: self._transport.close() # def progressfiledownload(self, current, total): # p = False # if current == total: # p = True # elif self._last_dl_bytes > 0: # if current - self._last_dl_bytes > self._current_progress_divider: # p = True # self._last_dl_bytes = current # else: # # if total > self._current_progress_divider: # p = True # self._last_dl_bytes = current # # if p is True and self._show_copystats is True: # self.info("{}Downloaded: {}".format(bytes_to_unit(current, 1, True, False)))
def _check_file_with_options( self, options: dict, remoteroot: Path, localfile: Path, remotefile: Path, remote_stat, prepend_output_tabs="\t" ): do_transfer = None if stat.S_ISLNK(remote_stat.st_mode): do_transfer = "IS_LINK" if "overwrite_existing" in options and not self._check_option_ignored("overwrite_existing"): if localfile.exists() and options["overwrite_existing"] is False: do_transfer = "OVERWRITE_EXISTING" if ( do_transfer is None and "overwrite_newer" in options and options["overwrite_newer"] is True and not self._check_option_ignored("overwrite_newer") ): if localfile.exists(): local_stat = lstat(str(localfile)) if local_stat.st_mtime >= remote_stat.st_mtime: self.info("{}Remote file modification date '{}' is not newer than local modification date '{}'".format( prepend_output_tabs, datetime.fromtimestamp(remote_stat.st_mtime), datetime.fromtimestamp(local_stat.st_mtime), )) do_transfer = "OVERWRITE_NEWER" if ( do_transfer is None and "exclude_filter" in options and not self._check_option_ignored("exclude_filter") ): exclude_filter = options["exclude_filter"] if not string_is_empty(exclude_filter): if fnmatch(str(remotefile), exclude_filter): do_transfer = "EXCLUDE_FILTER" if ( do_transfer is None and "include_filters" in options and not self._check_option_ignored("include_filters") ): include_filters = options["include_filters"] if is_sequence_with_any_elements(include_filters): do_transfer = "INCLUDE_FILTERS" for include_filter in include_filters: if fnmatch(str(remotefile), include_filter): do_transfer = None break elif not string_is_empty(include_filters): if not fnmatch(str(remotefile), include_filters): do_transfer = "INCLUDE_FILTERS" if ( do_transfer is None and "exclude_files" in options and not self._check_option_ignored("exclude_files") ): exclude_files = options["exclude_files"] if is_sequence_with_any_elements(exclude_files): for exclude_file in exclude_files: if not string_is_empty(exclude_file): testfile = remoteroot.joinpath(exclude_file) if str(testfile) == str(remotefile): do_transfer = "EXCLUDE_FILES" break return do_transfer
def __init__(self, nameprefix: str): # logging.basicConfig(filename='example.log', level=logging.DEBUG) if string_is_empty(nameprefix): raise Exception("name has to be passed") self._nameprefix = nameprefix