def test_permissions_non_recursive(self): """ tests permissions assigns permissions recursively and writes to log """ test_permissions = [ [{'path': '/tmp/scripts_test/app_user/sites/app_name/source', 'dir_permissions': '500'}, 'dr-x------'], [{'path': '/tmp/scripts_test/app_user/sites/app_name/source/app_name', 'dir_permissions': '770'}, 'drwxrwx---'], [{'path': '/tmp/scripts_test/app_user/sites/app_name/source/app_name/file', 'file_permissions': '400'}, '-r--------'], ] app_home_nested_file = os.path.join(self.app_home, 'app_name', 'file') runlog = CommandFileUtils(self.dist_version, self.log_file, self.log_level) for i in test_permissions: os.makedirs(os.path.join(self.app_home, 'app_name')) with open(app_home_nested_file, 'w') as file: file.write('some text') runlog.permissions(**i[0]) self.assertEqual(i[1], stat.filemode(os.stat(i[0]['path']).st_mode), stat.filemode(os.stat(i[0]['path']).st_mode)) if os.path.isdir(i[0]['path']): self.log('INFO: changed permissions of %s to %s' % (i[0]['path'], i[0]['dir_permissions'])) elif os.path.isfile(i[0]['path']): self.log('INFO: changed permissions of %s to %s' % (i[0]['path'], i[0]['file_permissions'])) os.chmod(self.app_home, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) for root, dirs, files in os.walk(self.app_home): for name in dirs: os.chmod(os.path.join(root, name), stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) shutil.rmtree(self.app_home)
def get_item_data(self, item): mode = stat.filemode(item[b'mode']) item_type = mode[0] item_data = self.item_data source = item.get(b'source', '') extra = '' if source: source = remove_surrogates(source) if item_type == 'l': extra = ' -> %s' % source else: mode = 'h' + mode[1:] extra = ' link to %s' % source item_data['type'] = item_type item_data['mode'] = mode item_data['user'] = item[b'user'] or item[b'uid'] item_data['group'] = item[b'group'] or item[b'gid'] item_data['uid'] = item[b'uid'] item_data['gid'] = item[b'gid'] item_data['path'] = remove_surrogates(item[b'path']) item_data['bpath'] = item[b'path'] item_data['source'] = source item_data['linktarget'] = source item_data['extra'] = extra for key in self.used_call_keys: item_data[key] = self.call_keys[key](item) return item_data
def get_item_data(self, item): item_data = {} item_data.update(self.item_data) mode = stat.filemode(item.mode) item_type = mode[0] source = item.get('source', '') extra = '' if source: source = remove_surrogates(source) if item_type == 'l': extra = ' -> %s' % source else: mode = 'h' + mode[1:] extra = ' link to %s' % source item_data['type'] = item_type item_data['mode'] = mode item_data['user'] = item.user or item.uid item_data['group'] = item.group or item.gid item_data['uid'] = item.uid item_data['gid'] = item.gid item_data['path'] = remove_surrogates(item.path) if self.json_lines: item_data['healthy'] = 'chunks_healthy' not in item else: item_data['bpath'] = item.path item_data['extra'] = extra item_data['health'] = 'broken' if 'chunks_healthy' in item else 'healthy' item_data['source'] = source item_data['linktarget'] = source item_data['flags'] = item.get('bsdflags') for key in self.used_call_keys: item_data[key] = self.call_keys[key](item) return item_data
def get_mode(self, fname=TESTFN, lstat=True): if lstat: st_mode = os.lstat(fname).st_mode else: st_mode = os.stat(fname).st_mode modestr = stat.filemode(st_mode) return st_mode, modestr
async def build_list_string(self, connection, path): stats = await connection.path_io.stat(path) now = time.time() mtime = time.localtime(stats.st_mtime) if now - 365 * 24 * 60 * 60 / 2 < stats.st_mtime <= now: mtime = time.strftime('%b %e %H:%M', mtime) else: mtime = time.strftime('%b %e %Y', mtime) fields = ( stat.filemode(stats.st_mode), str(stats.st_nlink), "none", "none", str(stats.st_size), mtime, path.name ) s = str.join(" ", fields) return s
def ls(directory, filename, args): line = list() # Get file information try: st = os.stat(os.path.expanduser(os.path.join(directory, filename))) except FileNotFoundError: return None # Ignore it if we cannot open a file except PermissionError: return None # If we cannot access the file, we effectively cannot see it if args.long: line.append(stat.filemode(st.st_mode)) line.append(st.st_nlink) line.append(st.st_uid) line.append(st.st_gid) if args.human_readable: line.append(sizeof_fmt(st.st_size)) else: line.append(st.st_size) dt = time.ctime(st.st_mtime) line.append(dt) # Add the actual filename last and output line.append(filename) return map(str, line)
def perform_list(self): if self.ftp_mode == FtpMode.ACTIVE: if self.remote_host == '' or self.remote_port == 0: self.reply = '503 bad sequence of commands\r\n' return self.data_socket = socket.socket() self.data_socket.connect((self.remote_host, self.remote_port)) elif self.ftp_mode == FtpMode.PASSIVE: self.data_socket, addr = self.server_socket.accept() self.command_connection.send(bytes('125 data connection already open\r\n', 'utf-8')) if not self.current_directory.endswith('/'): self.current_directory += '/' try: for i in os.listdir(self.current_directory): permission = stat.filemode(os.stat(self.current_directory + i).st_mode) stat_info = os.stat(self.current_directory + i) uid = stat_info.st_uid gid = stat_info.st_gid user = pwd.getpwuid(uid)[0] group = grp.getgrgid(gid)[0] file_size = os.stat(self.current_directory + i).st_size self.data_socket.send(bytes(permission + ' 1 ' + user + " " + group + " " + str(file_size) + " " + i + '\r\n', 'utf-8')) except PermissionError: pass except FileNotFoundError: pass self.data_socket.close() self.reply = '226 transfer complete\r\n'
def do_ls(self, args): flags = [] if args: args = args.split() args, flags = PyShell.parse_args(args); if flags: if flags[0] == '-l': for item in os.scandir(): if not item.name.startswith('.'): # permissions print(stat.filemode(os.stat(item.path).st_mode), end=" ") # owner st = os.stat(item.path) print(pwd.getpwuid(st.st_uid).pw_name, end=" ") # size of file print(st.st_size, end=" ") # last modified month = PyShell.get_month(int(time.strftime("%m", time.localtime(st.st_mtime)))) print(month, end=" ") print(time.strftime("%d %I:%M", time.localtime(st.st_mtime)), end=" ") # actual file name print (item.name); else: for item in os.listdir(): if not item[0].startswith('.'): print(item)
def list_dir(self, realpath, options={}): '''List directory entries. Each line looks like this: -rwxrwxrwx 1 user group 1024 Feb 4 2017 config.py ''' dirs = {} files = {} opt_a = options.get('a') for name in os.listdir(realpath): # Hide entries starting with `.` if not opt_a and name.startswith('.'): continue item = os.path.join(realpath, name) try: st = os.stat(item) except: continue line = '%s %d user group %d %s %s\n' % ( stat.filemode(st.st_mode), 1, st.st_size, time_string(st.st_mtime), name) if os.path.isdir(item): dirs[name] = line else: files[name] = line res = ''.join(line for dic in (dirs, files) for line in dic.values()) return res.encode(self.encoding, 'replace')
def print_long_format(directory, file_list): """Print files in a format similar to `ls -l`.""" lines = [] for file in file_list: st = file.lstat() file_mod_time = os.path.getmtime(str(file)) current_time = time.time() sec_in_year = 365.25 * 24 * 60 * 60 if current_time - file_mod_time > sec_in_year: file_mod_time = datetime.datetime.fromtimestamp( float(file_mod_time) ).strftime('%d %b %Y') else: file_mod_time = datetime.datetime.fromtimestamp( float(file_mod_time) ).strftime('%d %b %H:%M') lines.append('{0} {1:4} {2:6} {3} {4}'.format( stat.filemode(st.st_mode), st.st_nlink, st.st_size, file_mod_time, file.name )) print('\n'.join(lines))
def _validate_path(self, file_path: pathlib.Path) -> str: if not file_path.is_file(): return 'File does not exist: {}'.format(file_path) stat_value = file_path.stat() mode = stat_value.st_mode if not mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH): return 'File is not executable. Mode is {}: {}'.format(stat.filemode(mode), file_path) return None
def test_permissions_recursive(self): """ tests permissions assigns permissions recursively and writes to log """ test_permissions = [ ['500', '700', '-r-x------', 'drwx------'], ['400', '500', '-r--------', 'dr-x------'], ['550', '770', '-r-xr-x---', 'drwxrwx---'], ['440', '550', '-r--r-----', 'dr-xr-x---'], ['644', '755', '-rw-r--r--', 'drwxr-xr-x'], ['755', '755', '-rwxr-xr-x', 'drwxr-xr-x'] ] app_home_nested_file = os.path.join(self.app_home, 'app_name', 'file') runlog = CommandFileUtils(self.dist_version, self.log_file, self.log_level) for i in test_permissions: os.makedirs(os.path.join(self.app_home, 'app_name')) with open(app_home_nested_file, 'w') as file: file.write('some text') runlog.permissions(self.app_home, i[0], i[1], recursive=True) app_home_files = [] app_home_dirs = [] for root, dirs, files in os.walk(self.app_home): for name in files: app_home_files.append(os.path.join(root, name)) for name in dirs: app_home_dirs.append(os.path.join(root, name)) app_home_dirs.append(self.app_home) for a in app_home_files: self.assertEqual(i[2], stat.filemode(os.stat(a).st_mode), stat.filemode(os.stat(a).st_mode)) for a in app_home_dirs: self.assertEqual(i[3], stat.filemode(os.stat(a).st_mode), stat.filemode(os.stat(a).st_mode)) self.log('INFO: changed permissions of %s files to %s and directories to %s' % ( self.app_home, i[0], i[1] )) os.chmod(self.app_home, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) for root, dirs, files in os.walk(self.app_home): for name in dirs: os.chmod(os.path.join(root, name), stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) shutil.rmtree(self.app_home)
def __repr__(self): string = '<%s(' % self.__class__.__name__ for i, attr in enumerate(self._prints): if i: string += ', ' string += repr(getattr(self, attr)) string += ', ' string += stat.filemode(self.st_mode) string += ')>' return string
def test_mutated(self): perm = Permissions() assert perm.to_filemode() == '---------' expected_int = 0 for item in Permission: perm.set(item, True) expected_int = expected_int | item assert perm.to_filemode() == stat.filemode(expected_int)[1:]
def __test_config_file__(parser, path): """Test existence and permissions of configuration file""" if not os.path.exists(path): return False else: statinfo = os.stat(path) if not os.path.isfile(path) or os.path.islink(path): parser.error("invalid configuration file {} (it must be a regular file)".format(path)) elif stat.filemode(statinfo.st_mode) != '-rw-------': parser.error("invalid configuration file {} (it must have only read and write permissions for user)".format(path)) elif statinfo.st_uid != os.getuid(): parser.error("invalid configuration file {} (the owner must be the user)".format(path)) return True
def metadata(path): def detail(value, unit): result = (" (%s)" % value if unit is None else " (%s %s)" % (value, unit)) return termstr.TermStr(result).fg_color(termstr.Color.grey_100) is_symlink = "yes" if os.path.islink(path) else "no" stat_result = os.stat(path) permissions = stat.filemode(stat_result.st_mode) hardlinks = str(stat_result.st_nlink) group = [pwd.getpwuid(stat_result.st_gid).pw_name, detail(stat_result.st_gid, "gid")] owner = [pwd.getpwuid(stat_result.st_uid).pw_name, detail(stat_result.st_uid, "uid")] modified, created, access = [ [time.asctime(time.gmtime(seconds)), detail(int(seconds), "secs")] for seconds in (stat_result.st_mtime, stat_result.st_ctime, stat_result.st_atime)] size = [_pretty_bytes(stat_result.st_size), detail(stat_result.st_size, "bytes")] stdout, *rest = _do_command( ["file", "--dereference", "--brief", "--uncompress", "--mime", path]) mime_type = stdout stdout, *rest = _do_command( ["file", "--dereference", "--brief", "--uncompress", path]) file_type = stdout md5sum = _md5(path) stdout, *rest = _do_command(["sha1sum", path]) sha1sum = stdout.split()[0] permissions_value = [permissions, detail(_permissions_in_octal(permissions), None)] text = [] for line in [ ("size", size), ("permissions", permissions_value), None, ("modified time", modified), ("creation time", created), ("access time", access), None, ("owner", owner), ("group", group), None, ("hardlinks", hardlinks), ("symlink", is_symlink), None, ("md5", md5sum), ("sha1", sha1sum), None, ("mime type", mime_type.strip()), ("file type", file_type.strip())]: if line is None: text.append("\n") else: name, value = line name = termstr.TermStr(name + ":").fg_color( termstr.Color.light_blue).ljust(16) text.append(name + fill3.join("", value) + "\n") return (Status.normal, fill3.Text(fill3.join("", text)))
def from_int(cls, perms: int): """ Create Permissions object, read permissions from int value :param perms: :return: """ try: filemode = stat.filemode(perms) except OverflowError as exc: raise ValueError("Invalid value of permissions: %s" % perms) from exc if perms > 0o7777: raise ValueError("Invalid value of permissions: %s" % perms) # filemode = stat.filemode(perms) return cls.from_filemode(filemode)
def get_attributes(path): st = os.stat(path) return Attributes( st.st_size, os.access(path, os.X_OK), stat.filemode(st.st_mode), os.path.islink(path), stat.S_ISFIFO(st.st_mode), stat.S_ISDOOR(st.st_mode), stat.S_ISSOCK(st.st_mode), st.st_nlink, st.st_uid, st.st_gid, st.st_mtime, )
def test(self): perms = '0 1 2 3 4 5 6 7'.split() path = tempfile.mkstemp()[1] try: perms = itertools.product(perms, perms, perms, perms) for item in perms: item = ''.join(item) assert len(item) == 4 prm = Permissions.from_octal(item) os.lchmod(path, prm) assert stat.filemode(os.lstat(path).st_mode)[1:] == prm.to_filemode() finally: os.unlink(path)
def format_list(self, basedir, listing): """ Return an iterator object that yields the entries of given directory emulating the "/bin/ls -lA" UNIX command output. This is how output should appear: -rw-rw-rw- 1 owner group 7045120 Sep 02 3:47 music.mp3 drwxrwxrwx 1 owner group 0 Aug 31 18:50 e-books -rw-rw-rw- 1 owner group 380 Sep 02 3:40 module.py :param basedir: (str) must be protocol relative path :param listing: (list) list of files to needed for output. """ assert isinstance(basedir, str), basedir basedir += '/' if basedir[-1:] != '/' else basedir now = time.time() for basename in listing: file = self.norm_path(basedir + basename) # for e.g. basedir = '/' and basename = test.png. # So file is '/test.png' try: st = self.stat(file) except (fs.errors.FSError, FilesystemError): raise permission = filemode(Permissions.create(st['st_mode']).mode) if self.isdir(file): permission = permission.replace('?', 'd') elif self.isfile(file): permission = permission.replace('?', '-') elif self.islink(file): permission = permission.replace('?', 'l') nlinks = st['st_nlink'] size = st['st_size'] # file-size uname = self.getinfo(path=file, namespaces=['access']).user # |-> pwd.getpwuid(st['st_uid']).pw_name would fetch the user_name of the actual owner of these files. gname = self.getinfo(path=file, namespaces=['access']).group # |-> grp.getgrgid(st['st_gid']).gr_name would fetch the user_name of the actual of these files. mtime = time.gmtime(fs.time.datetime_to_epoch(self.getinfo(file, namespaces=['details']).modified)) if (now - st['st_mtime']) > (180 * 24 * 60 * 60): fmtstr = "%d %Y" else: fmtstr = "%d %H:%M" mtimestr = "%s %s" % (months_map[mtime.tm_mon], time.strftime(fmtstr, mtime)) if (st['st_mode'] & 61440) == stat.S_IFLNK: # if the file is a symlink, resolve it, e.g. "symlink -> realfile" basename = basename + " -> " + self.readlink(file) # formatting is matched with proftpd ls output line = "%s %3s %-8s %-8s %8s %s %s\r\n" % (permission, nlinks, uname, gname, size, mtimestr, basename) yield line
def __test_dbpath__(parser, path): """Test existence and permissions of database directory""" if not os.path.exists(path): os.mkdir(path, mode=0o700) else: statinfo = os.stat(path) if not os.path.isdir(path) or os.path.islink(path): parser.error("invalid database path {} (it must be a directory)".format(path)) elif stat.filemode(statinfo.st_mode) != 'drwx------': parser.error( "invalid database path {} (it must have only read, write and excecution permissions for user)" .format(path)) elif statinfo.st_uid != os.getuid(): parser.error( "invalid database path {} (the owner must be the user)" .format(path)) return True
def get_fancy_label(pathname): fstat = os.stat(pathname) file_ino = str(fstat.st_ino) username = getpwuid(fstat.st_uid)[0] groupname = getgrgid(fstat.st_gid)[0] filesize = fstat.st_size nb_links = fstat.st_nlink n_label = ("ino: " + str(file_ino) + "\npermissions: "+ stat.filemode(fstat.st_mode) + "\nlinks: " + str(nb_links) + "\nuser: "******"\ngroup: " + groupname + "\nsize: " + str(filesize) + "\nctime: " + str(time.ctime(fstat.st_ctime))) return n_label
def st(path): dic = {} dic['abspath'] = path dic['filename'] = os.path.basename(path) try: lst = os.lstat(path) dic['filemode'] = stat.filemode(lst.st_mode) dic['st_mode'] = lst.st_mode dic['st_ino'] = lst.st_ino dic['st_dev'] = lst.st_dev dic['st_nlink'] = lst.st_nlink dic['st_uid'] = lst.st_uid dic['st_gid'] = lst.st_gid dic['st_size'] = str(lst.st_size) dic['st_atime'] = strftime(lst.st_atime) dic['st_mtime'] = strftime(lst.st_mtime) dic['st_ctime'] = strftime(lst.st_ctime) except Exception as e: logging.exception(e) return dic
def __test_cert_key_files__(parser, certfile, keyfile): """Test existence and permissions of private key file""" if not os.path.exists(keyfile): parser.error("invalid key file {} (it not exists)".format(keyfile)) elif not os.path.exists(certfile): parser.error("invalid certificat file {} (it not exists)". format(certfile)) else: statinfo = os.stat(keyfile) if certfile == keyfile: parser.error( "the certificat file ({}) and the key file ({}) must be differents" .format(certfile, keyfile)) if stat.filemode(statinfo.st_mode) != '-rw-------': parser.error( "invalid key file {} (it must have only read, write and excecution permissions for user)" .format(keyfile)) elif statinfo.st_uid != os.getuid(): parser.error("invalid key file {} (the owner must be the user)". format(keyfile)) return True
def dir(): all_stat = [] error_stat = [] total=0 for filename in os.listdir('.'): try: st_result = os.lstat(filename) mode = stat.filemode(st_result.st_mode) nlink = str(st_result.st_nlink) owner = getpwuid(st_result.st_uid)[0] group = getgrgid(st_result.st_gid)[0] size = str(st_result.st_size) total += st_result.st_size time = ctime(st_result.st_mtime)[4:16] all_stat.append(' '.join([mode, nlink, owner, group, size, time, filename])) except FileNotFoundError: error_stat.append("ls: cannot access aa: No such file or directory: %s" % filename) continue for er in error_stat: print(er) print('total ',str(total)) for file_stat in all_stat: print(file_stat)
def mode_data_func(self, col, cell, model, iterator, data): cell.set_property('text', stat.filemode(self._load_metadata(iterator)[b'mode']))
def mode2str(self, mode): return stat.filemode(mode)
def assert_dir_perms(path, expected_perms): assert stat.filemode( os.stat(path).st_mode) == "d" + stat.filemode(expected_perms)[1:]
def format_list(self, basedir, listing): """ Return an iterator object that yields the entries of given directory emulating the "/bin/ls -lA" UNIX command output. This is how output should appear: -rw-rw-rw- 1 owner group 7045120 Sep 02 3:47 music.mp3 drwxrwxrwx 1 owner group 0 Aug 31 18:50 e-books -rw-rw-rw- 1 owner group 380 Sep 02 3:40 module.py :param basedir: (str) must be protocol relative path :param listing: (list) list of files to needed for output. """ assert isinstance(basedir, str), basedir basedir += "/" if basedir[-1:] != "/" else basedir now = time.time() for basename in listing: file = self.norm_path( basedir + basename) # for e.g. basedir = '/' and basename = test.png. # So file is '/test.png' try: st = self.stat(file) except (fs.errors.FSError, FilesystemError): raise permission = filemode(Permissions.create(st["st_mode"]).mode) if self.isdir(file): permission = permission.replace("?", "d") elif self.isfile(file): permission = permission.replace("?", "-") elif self.islink(file): permission = permission.replace("?", "l") nlinks = st["st_nlink"] size = st["st_size"] # file-size uname = self.getinfo(path=file, namespaces=["access"]).user # |-> pwd.getpwuid(st['st_uid']).pw_name would fetch the user_name of the actual owner of these files. gname = self.getinfo(path=file, namespaces=["access"]).group # |-> grp.getgrgid(st['st_gid']).gr_name would fetch the user_name of the actual of these files. mtime = time.gmtime( fs.time.datetime_to_epoch( self.getinfo(file, namespaces=["details"]).modified)) if (now - st["st_mtime"]) > (180 * 24 * 60 * 60): fmtstr = "%d %Y" else: fmtstr = "%d %H:%M" mtimestr = "%s %s" % ( months_map[mtime.tm_mon], time.strftime(fmtstr, mtime), ) if (st["st_mode"] & 61440) == stat.S_IFLNK: # if the file is a symlink, resolve it, e.g. "symlink -> realfile" basename = basename + " -> " + self.readlink(file) # formatting is matched with proftpd ls output line = "%s %3s %-8s %-8s %8s %s %s\r\n" % ( permission, nlinks, uname, gname, size, mtimestr, basename, ) yield line
def testGetCollectedTimelinesBody(self): client_id = db_test_utils.InitializeClient(data_store.REL_DB) fqdn = "foo.bar.quux" snapshot = rdf_objects.ClientSnapshot() snapshot.client_id = client_id snapshot.knowledge_base.fqdn = fqdn data_store.REL_DB.WriteClientSnapshot(snapshot) hunt_id = "B1C2E3D4F5" flow_id = "1B2C3E4D5F" hunt_obj = rdf_hunt_objects.Hunt() hunt_obj.hunt_id = hunt_id hunt_obj.args.standard.client_ids = [client_id] hunt_obj.args.standard.flow_name = timeline.TimelineFlow.__name__ hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED data_store.REL_DB.WriteHuntObject(hunt_obj) flow_obj = rdf_flow_objects.Flow() flow_obj.client_id = client_id flow_obj.flow_id = flow_id flow_obj.flow_class_name = timeline.TimelineFlow.__name__ flow_obj.create_time = rdfvalue.RDFDatetime.Now() flow_obj.parent_hunt_id = hunt_id data_store.REL_DB.WriteFlowObject(flow_obj) entry_1 = rdf_timeline.TimelineEntry() entry_1.path = "/bar/baz/quux".encode("utf-8") entry_1.ino = 5926273453 entry_1.size = 13373 entry_1.atime_ns = 111 * 10**9 entry_1.mtime_ns = 222 * 10**9 entry_1.ctime_ns = 333 * 10**9 entry_1.mode = 0o664 entry_2 = rdf_timeline.TimelineEntry() entry_2.path = "/bar/baz/quuz".encode("utf-8") entry_2.ino = 6037384564 entry_2.size = 13374 entry_2.atime_ns = 777 * 10**9 entry_2.mtime_ns = 888 * 10**9 entry_2.ctime_ns = 999 * 10**9 entry_2.mode = 0o777 entries = [entry_1, entry_2] blobs = list(rdf_timeline.TimelineEntry.SerializeStream(iter(entries))) blob_ids = data_store.BLOBS.WriteBlobsWithUnknownHashes(blobs) result = rdf_timeline.TimelineResult() result.entry_batch_blob_ids = [blob_id.AsBytes() for blob_id in blob_ids] flow_result = rdf_flow_objects.FlowResult() flow_result.client_id = client_id flow_result.flow_id = flow_id flow_result.payload = result data_store.REL_DB.WriteFlowResults([flow_result]) buffer = io.BytesIO() self.api.Hunt(hunt_id).GetCollectedTimelines( timeline_pb2.ApiGetCollectedTimelineArgs.Format.BODY).WriteToStream( buffer) with zipfile.ZipFile(buffer, mode="r") as archive: with archive.open(f"{client_id}_{fqdn}.body", mode="r") as file: content_file = file.read().decode("utf-8") rows = list(csv.reader(io.StringIO(content_file), delimiter="|")) self.assertLen(rows, 2) self.assertEqual(rows[0][1], "/bar/baz/quux") self.assertEqual(rows[0][2], "5926273453") self.assertEqual(rows[0][3], stat.filemode(0o664)) self.assertEqual(rows[0][6], "13373") self.assertEqual(rows[0][7], "111") self.assertEqual(rows[0][8], "222") self.assertEqual(rows[0][9], "333") self.assertEqual(rows[1][1], "/bar/baz/quuz") self.assertEqual(rows[1][2], "6037384564") self.assertEqual(rows[1][3], stat.filemode(0o777)) self.assertEqual(rows[1][6], "13374") self.assertEqual(rows[1][7], "777") self.assertEqual(rows[1][8], "888") self.assertEqual(rows[1][9], "999")
def get_chmod(path: str) -> str: file_status = os.lstat(path) chmod = filemode(file_status.st_mode) return chmod
LFlag = False for arg in sys.argv[1:len(sys.argv)]: if(arg == "-l"): lFlaf = True elif(arg == "-L"): LFlag = True elif(os.path.exists(arg)): rootDir = arg else: sys.stderr.write("Niepoprawny argument lub błędna ścieżka") exit(1) for file in os.listdir(rootDir): line = "" path = rootDir + "/" + file stats = os.stat(path) if(lFlaf): line += file.ljust(30) + " " line += str(stats.st_size).ljust(10) + " " line += datetime.datetime.fromtimestamp(stats.st_mtime).strftime("%Y-%m-%d %H:%M:%S") + " " line += stat.filemode(stats.st_mode) + " " else: line += file + " " if(LFlag): line += pwd.getpwuid(stats.st_uid).pw_name + " " print(line)
os.chdir(m_dir) print("After os.chdir(\"{}\")".format(m_dir)) print("{: <35}:{}".format("os.getcwd", os.getcwd())) os.chdir(curr_dir) print(splitter) print("os.stat(<FILE>) & stat module") m_file = os.listdir() m_select = [] for item in m_file: ext = os.path.splitext(str(item))[1] if ext == ".py": m_select.append(str(item)) f = m_select[random.randint(0, len(m_select))] state = os.stat(f) print("{: <35}:{}".format("Mode", stat.filemode(state.st_mode))) print("{: <35}:{} Bytes".format("Size", state.st_size)) print("{: <35}:{:%Y/%m/%d %H:%M:%S}".format("Last Access at", datetime.datetime.fromtimestamp(state.st_atime))) print("{: <35}:{:%Y/%m/%d %H:%M:%S}".format("Last Modify at", datetime.datetime.fromtimestamp(state.st_mtime))) print("{: <35}:{:%Y/%m/%d %H:%M:%S}".format("Create at", datetime.datetime.fromtimestamp(state.st_ctime))) print(splitter) print("os.path") f = r"c:\abc.txt" print("{: <35}:{}".format("os.path.basename(\"{}\")".format(f), os.path.basename(f))) print("{: <35}:{}".format("os.path.dirname(\"{}\")".format(f), os.path.dirname(f))) print("{: <35}:{}".format("os.path.split(\"{}\")".format(f), os.path.split(f))) print("{: <35}:{}".format("os.path.exists(\"{}\")".format(f), os.path.exists(f))) print("{: <35}:{}".format("os.path.isdir(\"{}\")".format(f), os.path.isdir(f))) print("{: <35}:{}".format("os.path.isfile(\"{}\")".format(f), os.path.isfile(f))) print("{: <35}:{}".format("os.path.splitdrive(\"{}\")".format(f), os.path.splitdrive(f)))
def file_reports(): ret = False part = 1 files = [{ "name": "good-file", "conf": "\"filepath\": \"{}\"".format(DEFAULT_STATS_FILE), "result": True }, { "name": "wrong-file", "conf": "\"filepath\": \"/tmp/but/I/do/not/exist\"", "result": False }] permissions = [ { "name": "default", "conf": "", "result": True, "expected_mode": "-rw-r-----" }, { "name": "custom-correct", "conf": ",\"permissions\": 700", "result": True, "expected_mode": "-rwx------" }, { "name": "custom-wrong", "conf": ",\"permissions\": -66", "result": False, "expected_mode": "" }, ] for logfile in files: for permission in permissions: print("[{}/6]".format(part), end='', flush=True) file_conf = "\"file\": {{{file} {permission}}},".format( file=logfile['conf'], permission=permission['conf']) # print(file_conf) if logfile['result'] and permission['result']: expected_result = STAT_LOG_MATCH else: expected_result = "nothing" darwin_configure( CONF_TEMPLATE.substitute(log_path=DEFAULT_FILTER_PATH, conf_path=PATH_CONF_FLOGS, conf_redis="", conf_file=file_conf, proc_stats="")) darwin_configure(CONF_FLOGS, path=PATH_CONF_FLOGS) process = darwin_start() logline = "nothing" try: with open(DEFAULT_STATS_FILE, "r") as test_file: file_stats = os.stat(DEFAULT_STATS_FILE) mode = stat.filemode(file_stats.st_mode) if mode != permission['expected_mode']: logging.error( "file_reports(): file mode should be '{}', but is '{}'" .format(permission['expected_mode'], mode)) ret = False # break else: sleep(1) logline = test_file.readline() ret = True if expected_result in logline else False if not ret: logging.error( "file_reports(): while trying '{}' with '{}', didn't find '{}' in '{}'" .format(logfile['conf'], permission['conf'], expected_result, logline)) except FileNotFoundError: if expected_result != "nothing": logging.error( "file_reports(): file is absent, but should have been created" ) ret = False try: os.remove(DEFAULT_STATS_FILE) except Exception: pass darwin_stop(process) darwin_remove_configuration() darwin_remove_configuration(path=PATH_CONF_FLOGS) # Don't judge me... part += 1 print("\x08\x08\x08\x08\x08", end='', flush=True) if not ret: break if not ret: break # Dirty much... But pretty printing! print(" ", end='', flush=True) print("\x08\x08\x08\x08\x08\x08", end='', flush=True) return ret
def _mode_to_string(self, mode): return stat.filemode(mode)
def get_stat(path): filestat = os.stat(path) fsize = os.path.getsize(path) if _h: fsize = sizeof_fmt(fsize) return ' '.join([stat.filemode(filestat.st_mode), str(fsize), f])
def mode_perms(mode): octal = oct(stat.S_IMODE(mode))[2:].zfill(4) literal = stat.filemode(mode) return "%s (%s)" % (octal, literal)
def get_permissions(self, path): with FtpWrapper(self.scheme + path) as ftp: return stat.filemode(ftp.conn.lstat(ftp.path).st_mode)
def to_filemode(self): """ Dump Permissions to filemode format :return: """ return stat.filemode(self.to_int())[1:]
def __str__(self): return '<{} filename="{}" mode="{}">'.format( type(self).__name__, self.filename, stat.filemode(self.mode))
def filemode(mode): """Deprecated in this location; use stat.filemode.""" import warnings warnings.warn("deprecated in favor of stat.filemode", DeprecationWarning, 2) return stat.filemode(mode)
def check_permissions(self, path: Path, group: grp.struct_group, umask: int, exclude: List[Path] = None): """Perform a run and make sure they have correct permissions.""" if exclude is None: exclude = [] else: exclude = [ex_path for ex_path in exclude] dir_umask = umask & ~0o222 for file in utils.flat_walk(path): excluded = False for parent in file.parents: if parent in exclude: excluded = True if excluded: continue fstat = file.stat() # Make sure all files have the right group. grp_name = grp.getgrgid(fstat.st_gid).gr_name self.assertEqual( fstat.st_gid, group.gr_gid, msg="File {} had the incorrect group. Expected {}, got {}". format(file, self.alt_group.gr_name, grp_name)) mode = fstat.st_mode if file.is_symlink(): mode = file.lstat().st_mode self.assertEqual( mode, 0o120777, msg="Expected symlink {} to have permissions {} but " "got {}".format(file, stat.filemode(0o120777), stat.filemode(mode))) elif (file.name.startswith('binfile') or file.name in ('kickoff.sh', 'build.sh', 'run.sh', 'run.tmpl')): expected = (~umask) & 0o100775 # Binfiles should have owner/group execute. self.assertEqual( mode, expected, msg="Expected {} to have perms {}, but had {}".format( file, stat.filemode(expected), stat.filemode(mode))) elif file.is_file(): expected = (~umask) & 0o100664 self.assertEqual( oct(mode), oct(expected), msg="Expected regular file {} to have permissions {} " "but got {}".format(file, stat.filemode(expected), stat.filemode(mode))) elif file.is_dir(): expected = 0o40775 & (~dir_umask) self.assertEqual( mode, expected, msg="Expected dir {} to have permissions {} but " "got {}".format(file, stat.filemode(expected), stat.filemode(mode))) else: self.fail("Found unhandled file {}.".format(file))
import re # Regular Expressions import random print("Running app from ", os.getcwd()) dir = "testdir" try: if not os._exists(dir): os.mkdir(dir) mode = os.stat(dir).st_mode print(stat.S_ISDIR(mode)) if not stat.S_ISDIR(mode): os.mkdir(dir) print(dir, " created with mode: ", stat.filemode(mode)) os.rename(dir, "testdir2") shutil.move("testdir2", dir) except FileExistsError: pass print("Scripts in directory: ", glob.glob('*.py')) out = re.findall(r'\bf[a-z]*', 'which foot or hand fell fastest') print("Mach of words starting with f: ", out) out = random.sample(range(100), 10) print(out) print(random.randrange(6))
def enst_mode(st): return filemode(st.st_mode)
def get_file_perms(self, path): """return unix file perms string for path""" return stat.filemode(os.stat(path).st_mode)
def test_2(self) -> None: mode = filemode(RW_R__R__) self.assertEquals(mode, "?rw-r--r--")
def printFileDetails(filename): # print 'Nome file:' + filename print (stat.filemode (os.stat (filename).st_mode))
def test_1(self) -> None: mode = filemode(RWXR_XR_X) self.assertEquals(mode, "?rwxr-xr-x")
def _runner(self, perms_test, perms_after): """ Generic test runner for permissions testing. The umask is set per test via the relevant sab config option; the fileystem parameter in setUp(). Note that the umask set in the environment before starting the program also affects the results if sabnzbd.cfg.umask isn't set. Arguments: str perms_test: permissions for test objects, chmod style "0755". str perms_after: expected permissions after completion of the test. """ perms_test = int(perms_test, 8) if sabnzbd.cfg.umask(): perms_after = int(perms_after, 8) else: perms_after = int("0777", 8) & (sabnzbd.ORG_UMASK ^ int("0777", 8)) # Setup and verify fake dir test_dir = "/test" try: self.fs.create_dir(test_dir, perms_test) except PermissionError: ffs.set_uid(0) self.fs.create_dir(test_dir, perms_test) assert os.path.exists(test_dir) is True self.assert_dir_perms(test_dir, perms_test) # Setup and verify fake files for file in ( "foobar", "file.ext", "sub/dir/.nzb", "another/sub/dir/WithSome.File", ): file = os.path.join(test_dir, file) try: self.fs.create_file(file, perms_test) except PermissionError: try: ffs.set_uid(0) self.fs.create_file(file, perms_test) except Exception: # Skip creating files, if not even using root gets the job done. break assert os.path.exists(file) is True assert stat.filemode( os.stat(file).st_mode)[1:] == stat.filemode(perms_test)[1:] # Set permissions, recursive by default filesystem.set_permissions(test_dir) # Check the results for root, dirs, files in os.walk(test_dir): for dir in [os.path.join(root, d) for d in dirs]: # Permissions on directories should now match perms_after self.assert_dir_perms(dir, perms_after) for file in [os.path.join(root, f) for f in files]: # Files also shouldn't have any executable or special bits set assert (stat.filemode( os.stat(file).st_mode)[1:] == stat.filemode( perms_after & ~(stat.S_ISUID | stat.S_ISGID | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH))[1:]) # Cleanup ffs.set_uid(0) self.fs.remove_object(test_dir) assert os.path.exists(test_dir) is False ffs.set_uid(global_uid)
def testBodyMultipleClients(self): client_id_1 = db_test_utils.InitializeClient(data_store.REL_DB) client_id_2 = db_test_utils.InitializeClient(data_store.REL_DB) snapshot = rdf_objects.ClientSnapshot() snapshot.client_id = client_id_1 snapshot.knowledge_base.fqdn = "bar.quux.com" data_store.REL_DB.WriteClientSnapshot(snapshot) snapshot = rdf_objects.ClientSnapshot() snapshot.client_id = client_id_2 snapshot.knowledge_base.fqdn = "bar.quuz.com" data_store.REL_DB.WriteClientSnapshot(snapshot) hunt_id = "B1C2E3D4F5" hunt_obj = rdf_hunt_objects.Hunt() hunt_obj.hunt_id = hunt_id hunt_obj.args.standard.client_ids = [client_id_1, client_id_2] hunt_obj.args.standard.flow_name = timeline.TimelineFlow.__name__ hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED data_store.REL_DB.WriteHuntObject(hunt_obj) entry_1 = rdf_timeline.TimelineEntry() entry_1.path = "/bar/baz/quux".encode("utf-8") entry_1.ino = 5926273453 entry_1.size = 13373 entry_1.atime_ns = 111 * 10**9 entry_1.mtime_ns = 222 * 10**9 entry_1.ctime_ns = 333 * 10**9 entry_1.mode = 0o664 entry_2 = rdf_timeline.TimelineEntry() entry_2.path = "/bar/baz/quuz".encode("utf-8") entry_2.ino = 6037384564 entry_2.size = 13374 entry_2.atime_ns = 777 * 10**9 entry_2.mtime_ns = 888 * 10**9 entry_2.ctime_ns = 999 * 10**9 entry_2.mode = 0o777 _WriteTimeline(client_id_1, [entry_1], hunt_id=hunt_id) _WriteTimeline(client_id_2, [entry_2], hunt_id=hunt_id) args = api_timeline.ApiGetCollectedHuntTimelinesArgs() args.hunt_id = hunt_id args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY content = b"".join(self.handler.Handle(args).GenerateContent()) buffer = io.BytesIO(content) with zipfile.ZipFile(buffer, mode="r") as archive: client_filename_1 = f"{client_id_1}_bar.quux.com.body" with archive.open(client_filename_1, mode="r") as file: content_file = file.read().decode("utf-8") rows = list( csv.reader(io.StringIO(content_file), delimiter="|")) self.assertLen(rows, 1) self.assertEqual(rows[0][1], "/bar/baz/quux") self.assertEqual(rows[0][2], "5926273453") self.assertEqual(rows[0][3], stat.filemode(0o664)) self.assertEqual(rows[0][6], "13373") self.assertEqual(rows[0][7], "111") self.assertEqual(rows[0][8], "222") self.assertEqual(rows[0][9], "333") client_filename_2 = f"{client_id_2}_bar.quuz.com.body" with archive.open(client_filename_2, mode="r") as file: content_file = file.read().decode("utf-8") rows = list( csv.reader(io.StringIO(content_file), delimiter="|")) self.assertLen(rows, 1) self.assertEqual(rows[0][1], "/bar/baz/quuz") self.assertEqual(rows[0][2], "6037384564") self.assertEqual(rows[0][3], stat.filemode(0o777)) self.assertEqual(rows[0][6], "13374") self.assertEqual(rows[0][7], "777") self.assertEqual(rows[0][8], "888") self.assertEqual(rows[0][9], "999")
def test_dumpLoadKeys(): """ """ print("Testing dump load keys") baseDirPath = setupTmpBaseDir() assert baseDirPath.startswith("/tmp/bluepea") assert baseDirPath.endswith("test") keyDirPath = os.path.join(baseDirPath, "keys") os.makedirs(keyDirPath) assert os.path.exists(keyDirPath) keyFilePath = os.path.join(keyDirPath, "signer.json") assert keyFilePath.endswith("keys/signer.json") # random seed used to generate private signing key #seed = libnacl.randombytes(libnacl.crypto_sign_SEEDBYTES) seed = ( b'PTi\x15\xd5\xd3`\xf1u\x15}^r\x9bfH\x02l\xc6\x1b\x1d\x1c\x0b9\xd7{\xc0_' b'\xf2K\x93`') # creates signing/verification key pair verkey, sigkey = libnacl.crypto_sign_seed_keypair(seed) assert seed == sigkey[:32] assert verkey == ( b'B\xdd\xbb}8V\xa0\xd6lk\xcf\x15\xad9\x1e\xa7\xa1\xfe\xe0p<\xb6\xbex' b'\xb0s\x8d\xd6\xf5\xa5\xe8Q') assert sigkey == ( b'PTi\x15\xd5\xd3`\xf1u\x15}^r\x9bfH\x02l\xc6\x1b\x1d\x1c\x0b9\xd7{\xc0_' b'\xf2K\x93`B\xdd\xbb}8V\xa0\xd6lk\xcf\x15\xad9\x1e\xa7\xa1\xfe\xe0p<\xb6\xbex' b'\xb0s\x8d\xd6\xf5\xa5\xe8Q') keyData = ODict(seed=binascii.hexlify(seed).decode('utf-8'), sigkey=binascii.hexlify(sigkey).decode('utf-8'), verkey=binascii.hexlify(verkey).decode('utf-8')) assert keyData == ODict([ ('seed', '50546915d5d360f175157d5e729b6648026cc61b1d1c0b39d77bc05ff24b9360'), ('sigkey', ('50546915d5d360f175157d5e729b6648026cc61b1d1c0b39d77bc05ff24b93604' '2ddbb7d3856a0d66c6bcf15ad391ea7a1fee0703cb6be78b0738dd6f5a5e851')), ('verkey', '42ddbb7d3856a0d66c6bcf15ad391ea7a1fee0703cb6be78b0738dd6f5a5e851') ]) keeping.dumpKeys(keyData, keyFilePath) assert os.path.exists(keyFilePath) mode = stat.filemode(os.stat(keyFilePath).st_mode) assert mode == "-rw-------" keyDataFiled = keeping.loadKeys(keyFilePath) assert keyData == keyDataFiled sd = binascii.unhexlify(keyDataFiled['seed'].encode('utf-8')) assert sd == seed sk = binascii.unhexlify(keyDataFiled['sigkey'].encode('utf-8')) assert sk == sigkey vk = binascii.unhexlify(keyDataFiled['verkey'].encode('utf-8')) assert vk == verkey cleanupTmpBaseDir(baseDirPath) assert not os.path.exists(keyFilePath) print("Done Test")
def get_mode(fname=TESTFN): return stat.filemode(os.lstat(fname).st_mode)
def overload_chars(locals_): """ Assigns all ascii characters as values to keys of the same name in the `locals_` argument. Intended to overload the `locals()` call of modules that import `pipepy` >>> from pipepy import overload_chars, ls >>> overload_chars(locals()) >>> ls -l <<< -rw-r--r-- 1 kbairak kbairak 9401 Feb 3 23:12 foo.txt ... -rw-r--r-- 1 kbairak kbairak 8923 Feb 3 23:06 bar.txt """ for char in string.ascii_letters: if char in locals_: continue locals_[char] = char for path in os.get_exec_path(): try: listdir = os.listdir(path) except FileNotFoundError: continue for original_name in listdir: name = original_name.replace('-', '_') if name in locals(): continue if 'x' in stat_.filemode( os.lstat(os.path.join(path, original_name)).st_mode): locals()[name] = PipePy(original_name)
def long_mode(st_mode: int) -> str: return stat.filemode(st_mode)[1:]
def GetModeName(cls, path): return stat.filemode(os.stat(path).st_mode)
def check(c): '''Check for various folder attributes ''' global s_dirs s_dirs = 0 path = c.conf['path'] name = c.check recursive = c.conf.get("recursive", False) is True no_store = c.conf.get("no_store", False) is True send_content = c.conf.get("send_content", False) is True send_list = c.conf.get("send_list", False) is True global conf_filter_extension conf_filter_extension = c.conf.get("filter_extension", "").split() if len(conf_filter_extension) > 0: has_filter_extension = True else: has_filter_extension = False global conf_filter_regexp if "filter_regexp" in c.conf: conf_filter_regexp = re.compile(c.conf.get("filter_regexp")) has_filter_regexp = True else: has_filter_regexp = False targets = [] if 'target' in c.conf: targets = c.conf['target'] c.add_item(CheckItem('folder_path', path, datapoint=False)) c.add_item(CheckItem('folder_name', name, datapoint=False)) if not os.path.exists(path): c.severity = cmt.SEVERITY_CRITICAL c.add_message("folder {} missing".format(path)) return c # scan # ---- s_count = 0 # total file count s_size = 0 # total size sum s_mintime = -1 # oldest file (minimal unix timestamp) s_maxtime = 0 # most recent file (maximal unix timestamp) s_files = [] s_files_detail = {} # single file if os.path.isfile(path): statinfo = os.stat(path) s_size = statinfo.st_size s_count = 1 s_mintime = statinfo.st_mtime s_maxtime = statinfo.st_mtime s_files.append(path) s_files_detail[path] = { "size": s_size, "mtime": statinfo.st_mtime, "uid": statinfo.st_uid, "gid": statinfo.st_gid, "mode": stat.filemode(statinfo.st_mode), } # option : send_content if send_content: fico = get_file_content(path) ci = CheckItem('file_content', fico, "file content", multiline=True) c.add_item(ci) # directory elif os.path.isdir(path): #for entry in os.scandir(path): for entry in scanCommon(path, recursive=recursive): if has_filter_extension: if not filter_extension(entry): continue if has_filter_regexp: if not filter_regexp(entry): continue s_count += 1 statinfo = os.stat(entry.path) s_size += statinfo.st_size if statinfo.st_mtime > s_maxtime: s_maxtime = statinfo.st_mtime if statinfo.st_mtime < s_mintime or s_mintime == -1: s_mintime = statinfo.st_mtime if not no_store: s_files.append(entry.name) s_files_detail[entry.path] = { "size": s_size, "mtime": statinfo.st_mtime, "uid": statinfo.st_uid, "gid": statinfo.st_gid, "mode": stat.filemode(statinfo.st_mode), } else: c.severity = cmt.SEVERITY_WARNING c.add_message("folder {} ({}) is not a dir / nor a file".format( name, path)) return c # file count ci = CheckItem('folder_files', s_count, "Number of files in folder " + name, unit="files") c.add_item(ci) # dirs count ci = CheckItem('folder_dirs', s_dirs, "Number of dirs/subdirs in folder " + name, unit="dirs") c.add_item(ci) # size ci = CheckItem('folder_size', s_size, "Total Size (bytes)", unit="bytes") h_size = ci.human() c.add_item(ci) # age now = time.time() if s_maxtime > 0: ci = CheckItem('folder_youngest', "", "most recent file (seconds)", unit="sec") ci.value = int(now - s_maxtime) c.add_item(ci) if s_mintime != -1: ci = CheckItem('folder_oldest', "", "oldest file (seconds)", unit="sec") ci.value = int(now - s_mintime) c.add_item(ci) # send list if send_list: r = "" for f in s_files_detail: delta_time = str( datetime.timedelta(seconds=int(now - s_files_detail[f]["mtime"]))) r = r + "{} - {} bytes - {} sec - id {}/{} - perm {}\n".format( f, s_files_detail[f]["size"], delta_time, s_files_detail[f]["uid"], s_files_detail[f]["gid"], s_files_detail[f]["mode"], ) ci = CheckItem('file_list', r, multiline=True) c.add_item(ci) # Target checks # -------------- tgcount = 0 tgtotal = len(targets) # check valid target name for t in targets: if not t in VALID_TARGET_LIST: c.severity = cmt.SEVERITY_WARNING c.add_message("{} {} : unknown target {}".format(name, path, t)) return c # target : files_min: 4 if 'files_min' in targets: tgcount += 1 if s_count < targets['files_min']: c.severity = cmt.SEVERITY_CRITICAL c.add_message("{} {} : too few files ({})".format( name, path, s_count)) return c # target : files_max: 23 if 'files_max' in targets: tgcount += 1 if s_count > targets['files_max']: c.severity = cmt.SEVERITY_CRITICAL c.add_message("{} {} : too many files ({})".format( name, path, s_count)) return c # target : size_max (folder max bytes) if 'size_max' in targets: tgcount += 1 if s_size > targets['size_max']: c.severity = cmt.SEVERITY_CRITICAL c.add_message("{} {} : too big ({})".format(name, path, s_size)) return c # target : size_min (folder min bytes) if 'size_min' in targets: tgcount += 1 if s_size < targets['size_min']: c.severity = cmt.SEVERITY_CRITICAL c.add_message("{} {} : too small ({})".format(name, path, s_size)) return c # target : age_max: # all files must be more recent than age_max seconds if 'age_max' in targets: tgcount += 1 if s_mintime != -1: if int(now - s_mintime) > targets['age_max']: c.severity = cmt.SEVERITY_CRITICAL c.add_message("{} {} : some files are too old ({} sec)".format( name, path, int(now - s_mintime))) return c # target : age_min: # all files must be older than age_min if 'age_min' in targets: tgcount += 1 if s_maxtime != 0: if int(now - s_maxtime) < targets['age_min']: c.alert += 1 c.severity = cmt.SEVERITY_CRITICAL c.add_message( "{} {} : some files are too young ({} sec)".format( name, path, int(now - s_maxtime))) return c # target : has_recent: # some files must be recent (more than has_recent) if 'has_recent' in targets: tgcount += 1 if s_maxtime != 0: if int(now - s_maxtime) > targets['has_recent']: c.alert += 1 c.severity = cmt.SEVERITY_CRITICAL c.add_message("{} {} : missing young file (min {} sec)".format( name, path, int(now - s_maxtime))) return c # target : has_old: # some files must be older than has_old if 'has_old' in targets: tgcount += 1 if s_mintime != -1: if int(now - s_mintime) < targets['has_old']: c.severity = cmt.SEVERITY_CRITICAL c.add_message("{} {} : missing old file (max {} sec)".format( name, path, int(now - s_mintime))) return c if no_store: c.add_message( "{} {} OK - {} files, {} dirs, {} bytes [{}] - targets {}/{}". format(name, path, s_count, s_dirs, s_size, h_size, tgcount, tgtotal)) return c # NEED flist to be stored at scan time # target : has_file: filename if 'has_files' in targets: tgcount += 1 flist = targets['has_files'] for f in flist: if f not in s_files: c.severity = cmt.SEVERITY_CRITICAL c.add_message( "folder {} : expected file not found ({})".format(path, f)) return c if 'permission' in targets: tgcount += 1 target_perm = targets['permission'] for f in s_files_detail: fperm = s_files_detail[f]["mode"] if fperm != target_perm: c.severity = cmt.SEVERITY_CRITICAL c.add_message( "folder {} : incorrect permission for {}: found {} , expected {}" .format(path, f, fperm, target_perm)) return c if 'uid' in targets: tgcount += 1 target_uid = targets['uid'] for f in s_files_detail: fuid = s_files_detail[f]["uid"] if fuid != target_uid: c.severity = cmt.SEVERITY_CRITICAL c.add_message( "folder {} : incorrect uid for {}: found {} , expected {}". format(path, f, fuid, target_uid)) return c if 'gid' in targets: tgcount += 1 target_gid = targets['gid'] for f in s_files_detail: fgid = s_files_detail[f]["gid"] if fgid != target_gid: c.severity = cmt.SEVERITY_CRITICAL c.add_message( "folder {} : incorrect gid for {}: found {} , expected {}". format(path, f, fgid, target_gid)) return c c.add_message( "{} {} OK - {} files, {} dirs, {} bytes - targets {}/{}".format( name, path, s_count, s_dirs, s_size, tgcount, tgtotal)) return c
def create_bindiff_for_files(file1, file2, file_patch, gzip_level=-1, allow_empty=False, block_size_in=1024, block_size_out=1024**2 * 32, skip_del=False, mtime=True, modes=True, hashsums=('sha256sum', ), verbose=0): '''Поблочно считает разницу между двумя файлами и записывает патч в третий файл. Также считает различную мета-информацию вроде прав доступа или дат изменения. Возвращает словарь со всякой разной статистикой. :param file1: путь к первому файлу :param file2: путь ко второму файлу :param file_patch: путь к файлу, куда будет записываться патч :param int gzip_level: уровень gzip-сжатия для файла с патчем (1-9, 0 — не сжимать, -1 — определить по расширению файла) :param bool allow_empty: при True разрешается отсутствие первого файла; тогда он неявно считается пустым файлом 1970 года выпуска :param int block_size_in: размер блоков, которые будут читаться и сравниваться (не может быть больше block_size_out) :param int block_size_out: максимальный размер блоков, которые будут записываться в патч (если 0, то неограниченно, но это может скушать оперативную память) :param bool skip_del: если True, то в патч не будет записываться содержимое удалённых кусков файла. Так патч станет весить примерно в два раза меньше, но его нельзя будет откатить :param bool mtime: если True, сохраняет даты изменения файлов в патч :param bool modes: если True, сохраняет POSIX права файлов в патч :param tuple hashsums: перечисление хэшей, которые нужно посчитать и записать в заголовки в файл патча; требуют поддержки seek и tell от fpp :param int verbose: если не ноль, то печатается прогресс в stderr ''' # Выбираем уровень сжатия gzip if gzip_level not in range(0, 9 + 1): gzip_level = 4 if file_patch.lower().endswith('.gz') else 0 # Готовим мета-информацию о файле # (первый файл возможно не существует, вместо него пока что заглушки) meta = {} if mtime: meta['1-mtime'] = '1970-01-01T00:00:00.000000Z' meta['2-mtime'] = datetime.utcfromtimestamp( os.stat(file2).st_mtime).strftime('%Y-%m-%dT%H:%M:%S.%fZ') if modes: meta['1-mode'] = '---------' meta['2-mode'] = stat.filemode(os.stat(file2).st_mode)[1:] # Если нужен gzip, то тогда не работают seek и tell, и придётся посчитать # хэши заранее, так как они в начале файла # (существование первого файла ещё не проверено, считаем хэш для второго) if gzip_level > 0: hashsums_lazy = () hashers = create_hashers(hashsums) # Ставим заглушку для возможно отсутствующего файла for h in hashsums: meta['1-' + h] = hashers[h][1].hexdigest() if verbose: print('\rPrecalc 2 hashsum', file=sys.stderr, end='') sys.stderr.flush() # Собственно считаем хэш with open(file2, 'rb') as hfp2: while True: chunk = hfp2.read(32768) if not chunk: break for h in hashsums: hashers[h][1].update(chunk) # Результат пихаем в мету for h in hashsums: meta['2-' + h] = hashers[h][1].hexdigest() else: # Без гзипа посчитаем хэши потом hashsums_lazy = hashsums # Но всё равно заранее проведём валидацию, что список хэшей правильный create_hashers(hashsums_lazy) # Если первый файл существует, то читаем его мета-информацию if os.path.exists(file1): if mtime: meta['1-mtime'] = datetime.utcfromtimestamp( os.stat(file1).st_mtime).strftime('%Y-%m-%dT%H:%M:%S.%fZ') if modes: meta['1-mode'] = stat.filemode(os.stat(file1).st_mode)[1:] # Если нам разрешают ускорить работу с несуществующим или пустым первым # файлом — пользуемся этим! if allow_empty and (not os.path.exists(file1) or os.stat(file1).st_size == 0): file_size = os.stat(file2).st_size with open(file2, 'rb') as fp2: if gzip_level > 0: fpp = gzip.open(file_patch, 'wb', compresslevel=gzip_level) else: fpp = open(file_patch, 'wb') with fpp: return create_bindiff_from_empty(fp2, fpp, file_size=file_size, meta=meta, hashsums=hashsums_lazy, verbose=verbose) # Если оба файла не пусты, то идём дальше # Получаем больший размер для вывода прогресса в консоль file_size = max( os.stat(file1).st_size, os.stat(file2).st_size, ) # Помните, у нас был гзип? Так вот, для первого файла хэш тоже надо будет # заранее посчитать if gzip_level > 0: hashers = create_hashers(hashsums) if verbose: print('\rPrecalc 1 hashsum', file=sys.stderr, end='') sys.stderr.flush() with open(file1, 'rb') as hfp1: while True: chunk = hfp1.read(32768) if not chunk: break for h in hashsums: hashers[h][1].update(chunk) for h in hashsums: meta['1-' + h] = hashers[h][1].hexdigest() # Открываем файлы на чтение и пишем патч with open(file1, 'rb') as fp1: with open(file2, 'rb') as fp2: if gzip_level > 0: fpp = gzip.open(file_patch, 'wb', compresslevel=gzip_level) else: fpp = open(file_patch, 'wb') with fpp: return create_bindiff( fp1, fp2, fpp, block_size_in=block_size_in, block_size_out=block_size_out, file_size=file_size, skip_del=skip_del, meta=meta, hashsums=hashsums_lazy, verbose=verbose, )
def filemode(self) -> str: """ Get the file's mode as a string, e.g. "-rwxrwxrwx" """ return filemode(self.stat().st_mode)