def apply_rsync(init_base_dir, init_rslt_dir, itm, dbg=False): """ copies files from src (init_base_dir) to dest (init_rslt_dir) """ print_dbg = dbc.test_dbg(dbg) rsync_list = [ "rsync", "-vuogptr", "--info=SKIP,STATS", "--exclude=.*.sw[op] --exclude=*.pyc", (init_base_dir + os.sep), init_rslt_dir, ] base_str = "".join( ["RSYNC: ", itm, "--", " ".join(rsync_list), os.linesep]) dbc.print_helper(base_str, dbg) call_rslt = subp.run(rsync_list, stdout=subp.PIPE, stderr=subp.PIPE) if call_rslt.returncode != 0: dbc.error_helper("RSYNC Error:", call_rslt.stderr, post=itm, dbg=dbg) else: if print_dbg: if isinstance(dbg, bool): base_time_str = dt.datetime.now() print("".join([ call_rslt.stdout.decode("UTF-8"), os.linesep, base_time_str.strftime("%Y/%m/%d %H:%M:%S"), ])) else: dbg.write_stdout(itm, call_rslt.stdout)
def calc_directory(init_dir, dbg=False): """ Returns calculated directory structure and date as str""" dt_str, _ = calc_date_time() dt_final = os.sep.join([init_dir, dt_str]) dbc.print_helper(("Dir: " + dt_final), dbg=dbg) return dt_final, dt_str
def construct_zip(src_dir, base_dir, base_name="vimwiki_diff_backup", excluded_ending=None, dbg=False): """ Construct zip file excluded_ending is None removes items line [".swo", ".swp", ".pyc", ".o", ".gz"], for all pass in [] """ dt_str, time_str = calc_date_time() base_name = "_".join([base_name, dt_str, time_str]) zipname = None if excluded_ending is None: excluded_final = set([".swo", ".swp", ".pyc", ".o", ".gz"]) else: excluded_final = set(excluded_ending) try: zipname = "".join([src_dir, os.sep, base_name, ".zip"]) zip_count = 0 with zp.ZipFile(zipname, mode='w') as zp_ptr: dw = dwa.diskwalk(os.sep.join([src_dir, base_dir])) for itm in dw.enumeratePaths(): _, init_splt = os.path.splitext(itm) # print(filename + " " + str(init_splt) + " " + str(not_empty) + " " + cur_dir) if init_splt != '' and init_splt in excluded_final: base_str = ": ".join(["Excluding", itm]) dbc.print_helper(base_str, dbg=dbg) else: itm_loc = str(itm).find(base_dir) base_str = "--".join(["adding", itm[itm_loc:]]) zp_ptr.write(itm[itm_loc:]) if not itm.endswith(base_dir): zip_count = zip_count + 1 zp_ptr.close() if zip_count < 2: dbc.print_helper("Warning construct_zip -- likely empty zip", dbg=dbg) except OSError as err: if zp_ptr is not None: zp_ptr.close() dbc.error_helper(("OSError: Zip" + err.strerror), stderr=None, post=zipname, dbg=dbg) except: if zp_ptr is not None: zp_ptr.close() dbc.error_helper(("Error: Zip" + str(sys.exc_info()[0])), stderr=None, post=None, dbg=dbg) return zipname
def calc_hostname(dbg=False): """" Calculates linux hostname""" rsync_list = ["cat", "/proc/sys/kernel/hostname"] hostname = None call_rslt = subp.run(rsync_list, stdout=subp.PIPE, stderr=subp.PIPE) if call_rslt.returncode != 0: dbc.error_helper("Hostname", call_rslt.stderr, post=None, dbg=dbg) else: hostname = call_rslt.stdout.decode("UTF-8").replace("\n", "") dbc.print_helper(("Hostname " + hostname), dbg=dbg) return hostname
def mysql_backup_call(command_list, dest, dbg=False): """ Key function that calls mysqldump to (or user specified) function to backup database""" file_ptr = open(dest, "a") call_rslt = subp.run( command_list, stdout=file_ptr, stderr=subp.PIPE, stdin=subp.PIPE ) if call_rslt.returncode != 0: dbc.error_helper("MySQL Backup Error:", call_rslt.stderr, post=None, dbg=dbg) else: if call_rslt.stdout is not None: dbc.print_helper(("Successful MySQL Backup" + call_rslt.stdout), dbg) else: out_str = "--".join(["Successful MySQL Backup", " ".join(command_list)]) dbc.print_helper(out_str, dbg) file_ptr.close()
def calc_diff(src, dest, temp, filename, inc_backup=None, dbg=False): """Calls UNIX diff function comparing files """ src_str = os.sep.join([src, filename]) dest_str = os.sep.join([dest, filename]) if inc_backup is not None: inc_backup = set(inc_backup) val = str(filename).split(".") try: diff_str = "".join([temp, os.sep, val[0], ".diff"]) diff_list = ["/usr/bin/diff", "-s", src_str, dest_str] call_rslt = subp.run(diff_list, stdout=subp.PIPE, stderr=subp.PIPE) init_rslt = call_rslt.stdout.decode("UTF-8") # print(str(call_rslt.returncode) + os.linesep + init_rslt) if call_rslt.returncode == 0 and init_rslt.endswith("identical" + os.linesep): base_str = "Excluding Identical File: " + filename dbc.print_helper(base_str, dbg=dbg) elif call_rslt.returncode == 1 and init_rslt is not None and\ not init_rslt.endswith("identical" + os.linesep): file_ptr = open(diff_str, "w") file_ptr.write(init_rslt) file_ptr.close() if inc_backup is not None and filename in inc_backup: temp_filename = calc_filename(os.sep.join([temp, filename]), include_time=True, dbg=dbg) sh.copy(src_str, temp_filename) base_str = " ".join(["Diff", diff_str, "success"]) dbc.print_helper(base_str, dbg=dbg) else: dbc.error_helper("Diff Error:", call_rslt.stderr, filename, dbg=dbg) except: dbc.error_helper("Diff Exception: ", stderr=None, post=filename, dbg=dbg)
def calc_filename(name, split='.', include_time=False, dbg=False): """ inserts datetime information between first split and filename""" dt_str, time_str = calc_date_time() strt = str(name).split(split) res = '' if len(strt) == 1: res = strt[0] elif len(strt) > 1: append = split.join(strt[1:]) if len(strt) > 2 else strt[1] if include_time: res = "_".join([strt[0], dt_str, time_str]) else: res = "_".join([strt[0], dt_str]) res = res + split + append else: res = 'FAILED' dbc.print_helper(("calc_filename: " + res), dbg=dbg) return res
def construct_gzip(src_dir, base_dir, base_name="MySQL_backup_", excluded_ending=None, dbg=False): """ constructs tar.gz file based in src dir excluded_ending is None removes items [".swo", ".swp", ".pyc", ".o", ".gz"], for all pass in [] """ dt_str, time_str = calc_date_time() base_name = "_".join([base_name, dt_str, time_str]) tarfilename = None excluded = [] if excluded_ending is None: excluded_final = set([".swo", ".swp", ".pyc", ".o", ".gz"]) else: excluded_final = set(excluded_ending) try: tarfilename = "".join([src_dir, os.sep, base_name, ".tar.gz"]) with tarfile.open(tarfilename, "w:gz") as tar: dw = dwa.diskwalk(os.sep.join([src_dir, base_dir])) for itm in dw.enumeratePaths(): _, init_splt = os.path.splitext(itm) # print(filename + " " + str(init_splt) + " " + str(not_empty) + " " + cur_dir) if init_splt != '' and init_splt in excluded_final: base_str = ": ".join(["Excluding", itm]) dbc.print_helper(base_str, dbg=dbg) excluded.append(itm) else: itm_loc = str(itm).find(base_dir) base_str = "--".join(["adding", itm[itm_loc:]]) tar.add(itm[itm_loc:]) tar.close() except: tar.close() return tarfilename, excluded
def update_file(src, file_filter, day=1, split='.', dbg=False): """ takes source directory (src) and initial filename and based on split constructs new file """ filename = file_filter + ".wiki" proposed_file = bu.calc_filename(filename, split=split, include_time=False, dbg=dbg) loc = proposed_file.find(split) if loc > -1: day = str(day) if day > 9 else ("0" + str(day)) prop_file = list(proposed_file) prop_file[loc-2] = day[0] prop_file[loc-1] = day[1] proposed_file = "".join(prop_file) base = "_".join([file_filter, "0000"]) init_base = "_".join([file_filter, "0000"]) for itm in os.listdir(src): if itm.startswith(file_filter): if base < itm: base = itm if base != proposed_file and proposed_file not in set(os.listdir(src)) and\ base != init_base: sh.copy((src + os.sep + base), (src + os.sep + proposed_file)) dbc.print_helper(("update_file Creating New File: " + proposed_file), dbg=dbg) elif base == init_base: dbc.print_helper("update_file created NO file (no item found)", dbg=dbg) else: dbc.print_helper("update_file created NO file", dbg=dbg)
def update_files(src, dest, temp, excluded_ending=None, dbg=False): """ walks directory structure in src, and compares to dest files excluded_ending is None removes items [".swo", ".swp", ".pyc", ".o", ".gz"], for all pass in [] """ init_index = -1 init_len = len(str(src).split(os.sep)) excluded_final = None if excluded_ending is None: excluded_final = set([".swo", ".swp", ".pyc", ".o", ".gz"]) else: excluded_final = set(excluded_ending) for dirpath, _, filenames in os.walk(src): dir_split = str(dirpath).split(os.sep) cur_len = len(str(dirpath).split(os.sep)) cur_index = init_index + (init_len - cur_len) cur_append = os.sep.join(dir_split[cur_index:]) cur_dir = os.sep.join([dest, cur_append]) dbc.print_helper(" ".join( [str(cur_len), str(cur_index), cur_append, cur_dir]), dbg=dbg) not_empty = True if os.path.exists(cur_dir): base_dest_set = set(os.listdir(cur_dir)) else: not_empty = False os.mkdir(cur_dir) for filename in filenames: _, init_splt = os.path.splitext(filename) # print(filename + " " + str(init_splt) + " " + str(not_empty) + " " + cur_dir) if init_splt != '' and init_splt in excluded_final: dbc.print_helper(("Excluding " + filename), dbg=dbg) else: temp_dir = os.sep.join([temp, cur_append]) if not os.path.exists(temp_dir): os.mkdir(temp_dir) if not_empty and filename in base_dest_set: bu.calc_diff(dirpath, cur_dir, temp_dir, filename, inc_backup=["index.wiki"], dbg=dbg) else: dbc.print_helper(("Adding " + filename), dbg=dbg) sh.copy(os.sep.join([dirpath, filename]), temp_dir) sh.copy(os.sep.join([dirpath, filename]), cur_dir)
options["debug_file"] = bu.append_date_file( options["debug_file"]) if not dryrun: db_interface = rates_dbi.base_rates_db_interface(options, dryrun) options["start_date"] = db_interface.calc_start_date( options['start_date']) else: db_interface = None if "append" in options and options[ "append"] > 0 and "current_view" in options: if db_interface: options["start_date"] = db_interface.calc_most_recent_date( options["start_date"]) dbc.print_helper( ("Updated start date " + str(options["start_date"])), dbg=dbg) elif "db_host_ip" in args_dict.keys() and "items" in args_dict.keys() and\ "table" in args_dict.keys(): options = args_dict.copy() if "items" in options.keys(): options.pop("items", "") items = args.items.split(',') options["items"] = {} for itm in items: options["items"][itm] = '' else: raise ValueError("Faulty Configuration")
options["url_control"]["start_date"]) else: db_interface = None if "append" in options and options[ "append"] > 0 and "current_view" in options: db_interface.options["url_control"]["start_date"] =\ db_interface.calc_most_recent_date(options["url_control"]["start_date"]) db_interface.options["url_control"][ "start_date"] = bu.dt.datetime.strftime( db_interface.options["url_control"]["start_date"], "%Y-%m-%d") dbc.print_helper( ("Updated start date " + db_interface.options["url_control"]["start_date"]), dbg=dbg) max_date = db_interface.calc_max_date() if max_date: db_interface.options['url_control']['end_date'] = str(max_date) else: raise ValueError("Options Dictionary Provided") if db_interface: result_dict = load_exchange_rates(db_interface.options, dbg=dbg) else: result_dict = load_exchange_rates(options, dbg=dbg) if db_interface and result_dict and isinstance(result_dict, dict) and\
def print_q_str(self, insert_str="", dbg=True): """ quick debug output """ dbc.print_helper(("sql_class_base(" + insert_str + "): query string: " + self.q_str), dbg=dbg)
raise ValueError("JSON must include backup_dir") if args.verbose > 0: args_dict["verbose"] = args.verbose else: if "verbose" not in args_dict.keys() and "debug_file" not in args_dict.keys(): args_dict["verbose"] = 0 elif "debug_file" in args_dict.keys(): args_dict["debug_file"] = bu.append_date_file(args_dict["debug_file"]) else: if "backup_dir" not in args_dict.keys() or "src" not in args_dict.keys(): raise ValueError("Dictionary Combination") dbg, print_dbg = bu.calc_debug_levels(args_dict) if "debug_file" in args_dict.keys(): dbc.print_helper(("Using debug file " + args_dict["debug_file"]), dbg=dbg) hostname = bu.calc_hostname(dbg=dbg) if hostname is not None: dest = os.sep.join([args_dict["backup_dir"], hostname]) else: dest = args_dict["backup_dir"] temp, dt_str = bu.calc_directory(args_dict["temp_dir"], dbg=dbg) if not os.path.exists(temp): os.mkdir(temp) update_files(args_dict["src"], dest, temp, dbg=dbg) # zipfile construction os.chdir(args_dict["temp_dir"])
tool = args_dict["tool"] if "auth" in args_dict.keys(): base = [tool, "=".join(["--login-path", args_dict["auth"]])] else: raise ValueError("Auth Required") else: # dbc.print_helper("Password must be included", dbg=dbg) raise ValueError("Incomplete Specification") dbg, print_dbg = bu.calc_debug_levels(args_dict) dest, dt_str = bu.calc_directory(args_dict["temp_dir"], dbg=dbg) if os.path.exists(dest): dbc.print_helper(("Directory Exists: " + dest + os.linesep), dbg=dbg) else: os.mkdir(dest) os.chdir(dest) for tbl in tables: tbl_dest = "".join([os.sep.join([dest, tbl]), ".sql"]) backup_list = base.copy() backup_list.append(tbl) mysql_backup_call(backup_list, tbl_dest, dbg=dbg) os.chdir("../") tarfilename, _ = bu.construct_gzip(args_dict["temp_dir"], dt_str, dbg=dbg) if tarfilename is not None: base_str = "Moving " + tarfilename + " to " + args_dict["backup_dir"] dbc.print_helper(base_str, dbg=dbg)
def exec_rsync(opt, dbg=False, print_dbg=False): ''' executes calls to rsync ''' if 'dest' not in opt.keys() or 'source' not in opt.keys(): raise ValueError( "Destination and Source must exist in the options file") if opt['reverse']: dest = opt['source'] source = opt['dest'] else: dest = opt['dest'] source = opt['source'] if os.path.exists(dest): dbc.print_helper(("Directory Exists: " + dest + os.linesep), dbg=dbg) else: x1 = os.path.split(dest) if x1[1] and os.path.exists(x1[0]): os.mkdir(dest) else: raise ValueError("Destination must be valid or constructable") # copies files from src (init_base_dir) to dest (init_rslt_dir) print_dbg = dbc.test_dbg(dbg) rsync_list = ["rsync"] for itm in ['base', 'delete', 'exclusion']: if itm in opt.keys() and opt[itm] and isinstance( opt[itm], (str, list)): if isinstance(opt[itm], list): for itm2 in opt[itm]: rsync_list.append(itm2) else: rsync_list.append(opt[itm]) else: dbc.print_helper(("Excluded " + itm), dbg) rsync_list.append("--info=SKIP,DEL,STATS") rsync_list.append((source + os.sep)) rsync_list.append(dest) split = source.split(os.sep) itm = split[len(split) - 1] base_str = "".join(["RSYNC: ", " ".join(rsync_list), os.linesep]) dbc.print_helper(base_str, dbg) try: call_rslt = subp.run(rsync_list, check=True, stdout=subp.PIPE, stderr=subp.PIPE) if print_dbg: if isinstance(dbg, bool): base_time_str = dt.datetime.now() print("".join([ call_rslt.stdout.decode("UTF-8"), os.linesep, base_time_str.strftime("%Y/%m/%d %H:%M:%S"), ])) else: dbg.write_stdout(itm, call_rslt.stdout) except ValueError as v: dbc.error_helper("RSYNC Error:", v, post="", dbg=dbg) except subp.CalledProcessError as c: dbc.error_helper( "RSYNC Error:", c.stderr, post=" ".join([c.output.decode("UTF-8"), str(c.returncode), "\n"]), dbg=dbg)