def __call__(self, *args, **kwargs) -> None: if "REPO_REV_FILE_VARS" not in config_vars: # must have a list of variable names to write to the repo-rev file raise ValueError("REPO_REV_FILE_VARS must be defined") repo_rev_vars = list(config_vars["REPO_REV_FILE_VARS"]) # list of configVars to write to the repo-rev file # check that the variable names from REPO_REV_FILE_VARS do not contain # names that must not be made public dangerous_intersection = set(repo_rev_vars).intersection( {"AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "PRIVATE_KEY", "PRIVATE_KEY_FILE"}) if dangerous_intersection: log.warning("found", str(dangerous_intersection), "in REPO_REV_FILE_VARS, aborting") raise ValueError(f"file REPO_REV_FILE_VARS {dangerous_intersection} and so is forbidden to upload") use_zlib = bool(config_vars.get("USE_ZLIB", "False")) # should we consider zipped files or not zip_extension = "" if use_zlib: zip_extension = config_vars.get("WZLIB_EXTENSION", ".wzip").str() revision_instl_folder_path = Path(config_vars["UPLOAD_REVISION_INSTL_FOLDER"]) # create checksum for the main info_map file, either wzipped or not main_info_map_file_name = "info_map.txt"+zip_extension main_info_map_file = revision_instl_folder_path.joinpath(main_info_map_file_name) main_info_map_checksum = utils.get_file_checksum(main_info_map_file) config_vars["INFO_MAP_FILE_URL"] = "$(BASE_LINKS_URL)/$(REPO_NAME)/$(__CURR_REPO_FOLDER_HIERARCHY__)/instl/"+main_info_map_file_name config_vars["INFO_MAP_CHECKSUM"] = main_info_map_checksum # create checksum for the main index.yaml file, either wzipped or not index_file_name = "index.yaml"+zip_extension index_file_path = revision_instl_folder_path.joinpath(index_file_name) config_vars["INDEX_CHECKSUM"] = utils.get_file_checksum(index_file_path) config_vars["INDEX_URL"] = "$(BASE_LINKS_URL)/$(REPO_NAME)/$(__CURR_REPO_FOLDER_HIERARCHY__)/instl/"+index_file_name short_index_file_name = "short-index.yaml" short_index_file_path = revision_instl_folder_path.joinpath(short_index_file_name) config_vars["SHORT_INDEX_CHECKSUM"] = utils.get_file_checksum(short_index_file_path) config_vars["SHORT_INDEX_URL"] = "$(BASE_LINKS_URL)/$(REPO_NAME)/$(__CURR_REPO_FOLDER_HIERARCHY__)/instl/"+short_index_file_name config_vars["INSTL_FOLDER_BASE_URL"] = "$(BASE_LINKS_URL)/$(REPO_NAME)/$(__CURR_REPO_FOLDER_HIERARCHY__)/instl" config_vars["REPO_REV_FOLDER_HIERARCHY"] = "$(__CURR_REPO_FOLDER_HIERARCHY__)" # check that all variables are present # <class 'list'>: ['INSTL_FOLDER_BASE_URL', 'REPO_REV_FOLDER_HIERARCHY', 'SYNC_BASE_URL'] missing_vars = [var for var in repo_rev_vars if var not in config_vars] if missing_vars: raise ValueError(f"{missing_vars} are missing cannot write repo rev file") # create yaml out of the variables variables_as_yaml = config_vars.repr_for_yaml(repo_rev_vars) repo_rev_yaml_doc = aYaml.YamlDumpDocWrap(variables_as_yaml, '!define', "", explicit_start=True, sort_mappings=True) repo_rev_file_path = config_vars["UPLOAD_REVISION_REPO_REV_FILE"] with utils.utf8_open_for_write(repo_rev_file_path, "w") as wfd: aYaml.writeAsYaml(repo_rev_yaml_doc, out_stream=wfd, indentor=None, sort=True) log.info(f"""create {repo_rev_file_path}""")
def do_checksum(self): path_to_checksum = var_stack.ResolveVarToStr("__MAIN_INPUT_FILE__") if os.path.isfile(path_to_checksum): the_checksum = utils.get_file_checksum(path_to_checksum) print(": ".join((path_to_checksum, the_checksum))) elif os.path.isdir(path_to_checksum): for root, dirs, files in os.walk(path_to_checksum): for a_file in files: a_file_path = os.path.join(root, a_file) the_checksum = utils.get_file_checksum(a_file_path) print(": ".join((a_file_path, the_checksum)))
def do_check_checksum(self): self.progress_staccato_command = True bad_checksum_list = list() missing_files_list = list() self.read_info_map_from_file(var_stack.ResolveVarToStr("__MAIN_INPUT_FILE__")) for file_item in self.info_map_table.get_items(what="file"): if os.path.isfile(file_item.download_path): file_checksum = utils.get_file_checksum(file_item.download_path) if not utils.compare_checksums(file_checksum, file_item.checksum): bad_checksum_list.append(" ".join(("Bad checksum:", file_item.download_path, "expected", file_item.checksum, "found", file_checksum)) ) else: missing_files_list.append(" ".join((file_item.download_path, "was not found"))) self.dynamic_progress("Check checksum {file_item.path}".format(**locals())) if bad_checksum_list or missing_files_list: bad_checksum_list_exception_message = "" missing_files_exception_message = "" if bad_checksum_list: print("\n".join(bad_checksum_list)) bad_checksum_list_exception_message += "Bad checksum for {} files".format(len(bad_checksum_list)) print(bad_checksum_list_exception_message) if missing_files_list: print("\n".join(missing_files_list)) missing_files_exception_message += "Missing {} files".format(len(missing_files_list)) print(missing_files_exception_message) raise ValueError("\n".join((bad_checksum_list_exception_message, missing_files_exception_message)))
def __call__(self, *args, **kwargs) -> None: super().__call__( *args, **kwargs ) # read the info map file from TO_SYNC_INFO_MAP_PATH - if provided dl_file_items = self.info_map_table.get_download_items(what="file") for file_item in dl_file_items: if os.path.isfile(file_item.download_path): file_checksum = utils.get_file_checksum( file_item.download_path) if not utils.compare_checksums(file_checksum, file_item.checksum): self.bad_checksum_list.append(" ".join( ("Bad checksum:", file_item.download_path, "expected", file_item.checksum, "found", file_checksum))) else: self.missing_files_list.append(" ".join( (file_item.download_path, "was not found"))) if not self.is_checksum_ok(): report_lines = self.report() if self.print_report: print("\n".join(report_lines)) if self.raise_on_bad_checksum: exception_message = "\n".join( (self.bad_checksum_list_exception_message, self.missing_files_exception_message)) raise ValueError(exception_message)
def checksum_a_folder(folder_path): checksum_of_checksums = 0 checksum_list = list() for item in dir_walk(path=folder_path): if item.is_file(): checksum_list.append(utils.get_file_checksum(item.path)) checksum_list.sort() string_of_checksums = "".join(checksum_list) checksum_of_checksums = utils.get_buffer_checksum(string_of_checksums.encode()) return checksum_of_checksums
def __call__(self, *args, **kwargs) -> None: # fill the iid_to_svn_item_t table self.info_map_table.populate_IIDToSVNItem() # get the list of info map file names info_map_to_item = dict() all_info_map_names = self.items_table.get_unique_detail_values('info_map') for infomap_file_name in all_info_map_names: info_map_file_path = self.work_folder.joinpath(infomap_file_name) if info_map_file_path.is_file(): log.info(f"{infomap_file_name} was found so no need to create it") # file already exists, probably copied from the "Common" repository # just checking that the fie is also zipped zip_infomap_file_name = config_vars.resolve_str(infomap_file_name+"$(WZLIB_EXTENSION)") zip_info_map_file_path = self.work_folder.joinpath(zip_infomap_file_name) if not zip_info_map_file_path.is_file(): raise FileNotFoundError(f"found {info_map_file_path} but not {zip_info_map_file_path}") else: self.info_map_table.mark_items_required_by_infomap(infomap_file_name) info_map_items = self.info_map_table.get_required_items() info_map_to_item[infomap_file_name] = info_map_items files_to_add_to_default_info_map = list() # the named info_map files and their wzip version should be added to the default info_map # write each info map to file for infomap_file_name, info_map_items in info_map_to_item.items(): if info_map_items: # could be that no items are linked to the info map file info_map_file_path = self.work_folder.joinpath(infomap_file_name) self.info_map_table.write_to_file(in_file=info_map_file_path, items_list=info_map_items, field_to_write=self.fields_relevant_to_info_map) files_to_add_to_default_info_map.append(info_map_file_path) zip_infomap_file_name = config_vars.resolve_str(infomap_file_name+"$(WZLIB_EXTENSION)") zip_info_map_file_path = self.work_folder.joinpath(zip_infomap_file_name) with Wzip(info_map_file_path, self.work_folder, own_progress_count=0) as wzipper: wzipper() files_to_add_to_default_info_map.append(zip_info_map_file_path) # add the default info map default_info_map_file_name = str(config_vars["MAIN_INFO_MAP_FILE_NAME"]) default_info_map_file_path = self.work_folder.joinpath(default_info_map_file_name) info_map_items = self.info_map_table.get_items_for_default_infomap() self.info_map_table.write_to_file(in_file=default_info_map_file_path, items_list=info_map_items, field_to_write=self.fields_relevant_to_info_map) with Wzip(default_info_map_file_path, self.work_folder, own_progress_count=0) as wzipper: wzipper() # add a line to default info map for each non default info_map created above with utils.utf8_open_for_read(default_info_map_file_path, "a") as wfd: for file_to_add in files_to_add_to_default_info_map: file_checksum = utils.get_file_checksum(file_to_add) file_size = file_to_add.stat().st_size # todo: make path relative line_for_main_info_map = f"instl/{file_to_add.name}, f, {config_vars['TARGET_REPO_REV'].str()}, {file_checksum}, {file_size}\n" wfd.write(line_for_main_info_map)
def do_check_checksum(self): self.progress_staccato_command = True bad_checksum_list = list() self.read_info_map_from_file(var_stack.ResolveVarToStr("__MAIN_INPUT_FILE__")) for file_item in self.info_map_table.get_items(what="file"): if os.path.isfile(file_item.path): file_checksum = utils.get_file_checksum(file_item.path) if not utils.compare_checksums(file_checksum, file_item.checksum): sigs = utils.create_file_signatures(file_item.path) bad_checksum_list.append( " ".join(("Bad checksum:", file_item.path, "expected", file_item.checksum, "found", sigs["sha1_checksum"])) ) else: bad_checksum_list.append(" ".join((file_item.path, "does not exist"))) self.dynamic_progress("Check checksum {file_item.path}".format(**locals())) if bad_checksum_list: print("\n".join(bad_checksum_list)) raise ValueError("Bad checksum for " + str(len(bad_checksum_list)) + " files")
def check_tarinfo(tarinfo): for ig in ignore_files: if tarinfo.name.endswith(ig): return None tarinfo.uid = tarinfo.gid = 0 tarinfo.uname = tarinfo.gname = "waves" if os.path.isfile(tarinfo.path): # wtar should to be idempotent. tarfile code adds "mtime" to # each file's pax_headers. We add "checksum" to pax_headers. # The result is that these two values are written to the tar # file in no particular order and taring the same file twice # might produce different results. By supplying the mtime # ourselves AND passing an OrderedDict as the pax_headers # hopefully the tar files will be the same each time. file_pax_headers = OrderedDict() file_pax_headers["checksum"] = utils.get_file_checksum(tarinfo.path) mode_time = str(float(os.lstat(tarinfo.path)[stat.ST_MTIME])) file_pax_headers["mtime"] = mode_time tarinfo.pax_headers = file_pax_headers return tarinfo
def check_tarinfo(tarinfo): for ig in ignore_files: if tarinfo.name.endswith(ig): return None tarinfo.uid = tarinfo.gid = 0 tarinfo.uname = tarinfo.gname = "waves" if os.path.isfile(tarinfo.path): # wtar should to be idempotent. tarfile code adds "mtime" to # each file's pax_headers. We add "checksum" to pax_headers. # The result is that these two values are written to the tar # file in no particular order and taring the same file twice # might produce different results. By supplying the mtime # ourselves AND passing an OrderedDict as the pax_headers # hopefully the final tar will be the same for different runs. file_pax_headers = OrderedDict() file_pax_headers["checksum"] = utils.get_file_checksum( tarinfo.path) mode_time = str( float(os.lstat(tarinfo.path)[stat.ST_MTIME])) file_pax_headers["mtime"] = mode_time tarinfo.pax_headers = file_pax_headers return tarinfo
def __call__(self, *args, **kwargs) -> None: super().__call__(*args, **kwargs) # read the info map file from TO_SYNC_INFO_MAP_PATH - if provided dl_file_items = self.info_map_table.get_download_items(what="file") utils.wait_for_break_file_to_be_removed(config_vars['LOCAL_SYNC_DIR'].Path(resolve=True).joinpath("BREAK_BEFORE_CHECKSUM"), self.break_file_callback) for file_item in dl_file_items: self.doing = f"""check checksum for '{file_item.download_path}'""" super().increment_and_output_progress(increment_by=1, prog_msg=self.doing) if os.path.isfile(file_item.download_path): file_checksum = utils.get_file_checksum(file_item.download_path) if not utils.compare_checksums(file_checksum, file_item.checksum): self.num_bad_files += 1 super().increment_and_output_progress(increment_by=0, prog_msg=f"bad checksum for '{file_item.download_path}'\nexpected: {file_item.checksum}, found: {file_checksum}") self.lists_of_files["bad_checksum"].append(" ".join(("Bad checksum:", file_item.download_path, "expected", file_item.checksum, "found", file_checksum))) self.lists_of_files["to redownload"].append(file_item) else: self.num_bad_files += 1 super().increment_and_output_progress(increment_by=0, prog_msg=f"missing file '{file_item.download_path}'") self.lists_of_files["missing_files"].append(" ".join((file_item.download_path, "was not found"))) self.lists_of_files["to redownload"].append(file_item) if self.max_bad_files_to_redownload is not None and self.num_bad_files > self.max_bad_files_to_redownload: super().increment_and_output_progress(increment_by=0, prog_msg=f"stopping checksum check too many bad or missing files found") break if not self.is_checksum_ok(): if self.max_bad_files_to_redownload is not None and self.num_bad_files <= self.max_bad_files_to_redownload: utils.wait_for_break_file_to_be_removed( config_vars['LOCAL_SYNC_DIR'].Path(resolve=True).joinpath("BREAK_BEFORE_REDOWNLOAD"), self.break_file_callback) self.re_download_bad_files() if not self.is_checksum_ok(): # some files still not OK after re_download_bad_files if self.raise_on_bad_checksum: exception_message = "\n".join( (f'Bad checksum for {len(self.lists_of_files["bad_checksum"])} files', f'Missing {len(self.lists_of_files["missing_files"])} files')) raise ValueError(exception_message)
def win_item_ls(the_path, ls_format, root_folder=None): import win32security the_parts = dict() the_error = None the_path_str = os.fspath(the_path) if 'p' in ls_format: the_parts['p'] = the_path_str elif 'P' in ls_format: the_parts['P'] = the_path_str try: the_stats = os.lstat(the_path) for format_char in ls_format: if format_char == 'T': the_parts[format_char] = time.strftime( "%Y/%m/%d %H:%M:%S", time.gmtime( (the_stats[stat.ST_MTIME]))) # modification time elif format_char == 'D': if 'p' in ls_format.lower(): # 'p' or 'P' if stat.S_ISDIR(the_stats.st_mode): the_parts[format_char] = "<DIR>" else: the_parts[format_char] = "" elif format_char == 'S': the_parts[format_char] = the_stats[ stat.ST_SIZE] # size in bytes elif format_char == 'U': try: sd = win32security.GetFileSecurity( the_path_str, win32security.OWNER_SECURITY_INFORMATION) owner_sid = sd.GetSecurityDescriptorOwner() name, domain, __type = win32security.LookupAccountSid( None, owner_sid) the_parts[format_char] = domain + "\\" + name # user except Exception as ex: # we sometimes get exception: 'LookupAccountSid, No mapping between account names and security IDs was done.' the_parts[format_char] = "Unknown user" elif format_char == 'G': try: sd = win32security.GetFileSecurity( the_path_str, win32security.GROUP_SECURITY_INFORMATION) owner_sid = sd.GetSecurityDescriptorGroup() name, domain, __type = win32security.LookupAccountSid( None, owner_sid) the_parts[format_char] = domain + "\\" + name # group except Exception as ex: # we sometimes get exception: 'LookupAccountSid, No mapping between account names and security IDs was done.' the_parts[format_char] = "Unknown group" elif format_char == 'C': if not (stat.S_ISLNK(the_stats.st_mode) or stat.S_ISDIR(the_stats.st_mode)): the_parts[format_char] = utils.get_file_checksum(the_path) else: the_parts[format_char] = "" elif format_char == 'P': as_posix = PurePath(the_path).as_posix() the_parts[format_char] = str(as_posix) elif format_char == 'p' and root_folder is not None: relative_path = PurePath(the_path).relative_to( PurePath(root_folder)) the_parts[format_char] = str(relative_path.as_posix()) elif format_char == 'a' or format_char == 'f': import subprocess the_parts[format_char] = "[]" completed_process = subprocess.run(f'attrib "{the_path_str}"', shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) if completed_process.returncode != 0: the_parts[format_char] = utils.unicodify( completed_process.stderr) else: ls_line = utils.unicodify(completed_process.stdout) flag_matches = re.search( "(?P<attribs>(A|R|S|H|O|I|X|P|U|\s)+?)\s+[A-Z]:", ls_line) if flag_matches: flags = "".join(flag_matches.group('attribs').split()) if flags: the_parts[format_char] = flags except Exception as ex: the_error = [the_path_str, ex.strerror] return the_parts, the_error
def unix_item_ls(the_path, ls_format, root_folder=None): import grp import pwd the_parts = dict() the_error = None the_path_str = os.fspath(the_path) if 'p' in ls_format: the_parts['p'] = the_path_str elif 'P' in ls_format: the_parts['P'] = the_path_str try: the_stats = os.lstat(the_path) for format_char in ls_format: if format_char == 'I': the_parts[format_char] = the_stats[stat.ST_INO] # inode number elif format_char == 'R': the_parts[format_char] = utils.unix_permissions_to_str( the_stats.st_mode) # permissions elif format_char == 'L': the_parts[format_char] = the_stats[stat.ST_NLINK] # num links elif format_char == 'u': try: the_parts[format_char] = str(the_stats[stat.ST_UID])[ 0] # unknown user name, get the number except Exception: the_parts[format_char] = "no_uid" elif format_char == 'U': try: the_parts[format_char] = pwd.getpwuid( the_stats[stat.ST_UID])[0] # user except KeyError: the_parts[format_char] = str(the_stats[stat.ST_UID])[ 0] # unknown user name, get the number except Exception: the_parts[format_char] = "no_uid" elif format_char == 'g': try: the_parts[format_char] = str(the_stats[stat.ST_GID])[ 0] # unknown group name, get the number except Exception: the_parts[format_char] = "no_gid" elif format_char == 'G': try: the_parts[format_char] = grp.getgrgid( the_stats[stat.ST_GID])[0] # group except KeyError: the_parts[format_char] = str(the_stats[stat.ST_GID])[ 0] # unknown group name, get the number except Exception: the_parts[format_char] = "no_gid" elif format_char == 'S': the_parts[format_char] = the_stats[ stat.ST_SIZE] # size in bytes elif format_char == 'T': the_parts[format_char] = time.strftime( "%Y/%m/%d-%H:%M:%S", time.gmtime( (the_stats[stat.ST_MTIME]))) # modification time elif format_char == 'C': if not (stat.S_ISLNK(the_stats.st_mode) or stat.S_ISDIR(the_stats.st_mode)): the_parts[format_char] = utils.get_file_checksum(the_path) else: the_parts[format_char] = "" elif format_char == 'P' or format_char == 'p': path_to_return = the_path_str if format_char == 'p' and root_folder is not None: path_to_return = os.path.relpath(the_path, start=root_folder) # E will bring us Extra data (path postfix) but we want to know if it's DIR in any case if stat.S_ISDIR(the_stats.st_mode) and 'D' in ls_format: path_to_return += '/' if 'E' in ls_format: if stat.S_ISLNK(the_stats.st_mode): path_to_return += '@' elif not stat.S_ISDIR(the_stats.st_mode) and ( the_stats.st_mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)): path_to_return += '*' elif stat.S_ISSOCK(the_stats.st_mode): path_to_return += '=' elif stat.S_ISFIFO(the_stats.st_mode): path_to_return += '|' the_parts[format_char] = path_to_return elif format_char == 'a' or format_char == 'f': import subprocess completed_process = subprocess.run(f'ls -lO "{the_path_str}"', shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) if completed_process.returncode != 0: the_parts[format_char] = utils.unicodify( completed_process.stderr) else: ls_line = utils.unicodify(completed_process.stdout) flag_matches = re.findall( "arch|archived|opaque|nodump|sappnd|sappend|schg|schange|simmutable|uappnd|uappend|uchg|uchange|uimmutable|hidden", ls_line) if flag_matches: the_parts[format_char] = ",".join(flag_matches) else: the_parts[format_char] = "[]" except Exception as ex: the_error = [the_path_str, ex.strerror] return the_parts, the_error
#the_wtar = "C:\\Users\\shai\\Desktop\\CODEX.bundle\\Contents\\Resources.wtar.aa" #the_wtar = "/p4client/dev_main/ProAudio/Products/Release/Plugins/CODEX.bundle/Contents/Resources.wtar.aa" #the_folder = "/p4client/dev_main/ProAudio/Products/Release/Plugins/CODEX.bundle/Contents" the_folder = os.curdir test_create = False test_unwtar = True tar_file_name = "sample.tar.PAX_FORMAT.bz2" if test_create: pax_headers = dict() all_checksums = "" checksum_list = list() for item in dir_walk(path="Resources"): if item.is_file(): pax_headers[item.path] = utils.get_file_checksum(item.path) pax_headers['checksum_of_checksums'] = checksum_a_folder("Resources") with tarfile.open(tar_file_name, "w|bz2", format= tarfile.PAX_FORMAT, pax_headers=pax_headers) as tar: for item in dir_walk(path="Resources"): if item.is_file(): tar.add(item.path) print('creating tar, pax_headers:', tar.pax_headers) print('creating tar, checksum_of_checksums:', tar.pax_headers['checksum_of_checksums']) if test_unwtar: first_split = tar_file_name+".aa" split_files = utils.find_split_files(first_split) #utils.safe_remove_folder(os.path.join(the_folder, "unwtarred_no_checks", "Resources")) unwtar_no_checks(split_files, "unwtarred_no_checks")
def win_item_ls(the_path, ls_format, root_folder=None): import win32security the_parts = dict() if 'p' in ls_format or 'P' in ls_format: the_parts['p'] = the_path try: the_stats = os.lstat(the_path) for format_char in ls_format: if format_char == 'T': the_parts[format_char] = time.strftime( "%Y/%m/%d %H:%M:%S", time.gmtime( (the_stats[stat.ST_MTIME]))) # modification time elif format_char == 'D': if 'p' in ls_format.lower(): # 'p' or 'P' if stat.S_ISDIR(the_stats.st_mode): the_parts[format_char] = "<DIR>" else: the_parts[format_char] = "" elif format_char == 'S': the_parts[format_char] = the_stats[ stat.ST_SIZE] # size in bytes elif format_char == 'U': try: sd = win32security.GetFileSecurity( the_path, win32security.OWNER_SECURITY_INFORMATION) owner_sid = sd.GetSecurityDescriptorOwner() name, domain, __type = win32security.LookupAccountSid( None, owner_sid) the_parts[format_char] = domain + "\\" + name # user except Exception as ex: # we sometimes get exception: 'LookupAccountSid, No mapping between account names and security IDs was done.' the_parts[format_char] = "Unknown user" elif format_char == 'G': try: sd = win32security.GetFileSecurity( the_path, win32security.GROUP_SECURITY_INFORMATION) owner_sid = sd.GetSecurityDescriptorGroup() name, domain, __type = win32security.LookupAccountSid( None, owner_sid) the_parts[format_char] = domain + "\\" + name # group except Exception as ex: # we sometimes get exception: 'LookupAccountSid, No mapping between account names and security IDs was done.' the_parts[format_char] = "Unknown group" elif format_char == 'C': if not (stat.S_ISLNK(the_stats.st_mode) or stat.S_ISDIR(the_stats.st_mode)): the_parts[format_char] = utils.get_file_checksum(the_path) else: the_parts[format_char] = "" elif format_char == 'P': as_posix = PurePath(the_path).as_posix() the_parts[format_char] = str(as_posix) elif format_char == 'p' and root_folder is not None: relative_path = PurePath(the_path).relative_to( PurePath(root_folder)) the_parts[format_char] = str(relative_path.as_posix()) except Exception as ex: pass return the_parts
def unix_item_ls(the_path, ls_format, root_folder=None): import grp import pwd the_parts = dict() if 'p' in ls_format or 'P' in ls_format: the_parts['p'] = the_path try: the_stats = os.lstat(the_path) for format_char in ls_format: if format_char == 'I': the_parts[format_char] = the_stats[stat.ST_INO] # inode number elif format_char == 'R': the_parts[format_char] = utils.unix_permissions_to_str( the_stats.st_mode) # permissions elif format_char == 'L': the_parts[format_char] = the_stats[stat.ST_NLINK] # num links elif format_char == 'u': try: the_parts[format_char] = str(the_stats[stat.ST_UID])[ 0] # unknown user name, get the number except Exception: the_parts[format_char] = "no_uid" elif format_char == 'U': try: the_parts[format_char] = pwd.getpwuid( the_stats[stat.ST_UID])[0] # user except KeyError: the_parts[format_char] = str(the_stats[stat.ST_UID])[ 0] # unknown user name, get the number except Exception: the_parts[format_char] = "no_uid" elif format_char == 'g': try: the_parts[format_char] = str(the_stats[stat.ST_GID])[ 0] # unknown group name, get the number except Exception: the_parts[format_char] = "no_gid" elif format_char == 'G': try: the_parts[format_char] = grp.getgrgid( the_stats[stat.ST_GID])[0] # group except KeyError: the_parts[format_char] = str(the_stats[stat.ST_GID])[ 0] # unknown group name, get the number except Exception: the_parts[format_char] = "no_gid" elif format_char == 'S': the_parts[format_char] = the_stats[ stat.ST_SIZE] # size in bytes elif format_char == 'T': the_parts[format_char] = time.strftime( "%Y/%m/%d-%H:%M:%S", time.gmtime( (the_stats[stat.ST_MTIME]))) # modification time elif format_char == 'C': if not (stat.S_ISLNK(the_stats.st_mode) or stat.S_ISDIR(the_stats.st_mode)): the_parts[format_char] = utils.get_file_checksum(the_path) else: the_parts[format_char] = "" elif format_char == 'P' or format_char == 'p': path_to_return = the_path if format_char == 'p' and root_folder is not None: path_to_return = os.path.relpath(the_path, start=root_folder) # E will bring us Extra data (path postfix) but we want to know if it's DIR in any case if stat.S_ISDIR(the_stats.st_mode) and 'D' in ls_format: path_to_return += '/' if 'E' in ls_format: if stat.S_ISLNK(the_stats.st_mode): path_to_return += '@' elif not stat.S_ISDIR(the_stats.st_mode) and ( the_stats.st_mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)): path_to_return += '*' elif stat.S_ISSOCK(the_stats.st_mode): path_to_return += '=' elif stat.S_ISFIFO(the_stats.st_mode): path_to_return += '|' the_parts[format_char] = path_to_return except Exception as ex: pass return the_parts