def lexists(f): try: stat = getattr(os, 'lstat', os.stat) stat(f) return True except OSError, e: if e.errno == errno.ENOENT: return False; else: raise errors.BzrError("lstat/stat of (%r): %r" % (f, e))
def lexists(f): try: stat = getattr(os, 'lstat', os.stat) stat(f) return True except OSError, e: if e.errno == errno.ENOENT: return False else: raise errors.BzrError("lstat/stat of (%r): %r" % (f, e))
def _check_rotated_filename_candidates(self, fname): """ Check for various rotated logfile filename patterns and return the first match we find. """ # savelog(8) candidate = "%s.0" % fname if (os.path.exists(candidate) and os.path.exists("%s.1.gz" % fname) and (stat(candidate).st_mtime > stat("%s.1.gz" % fname).st_mtime)): return candidate # logrotate(8) # with delaycompress candidate = "%s.1" % fname if os.path.exists(candidate): return candidate # without delaycompress candidate = "%s.1.gz" % fname if os.path.exists(candidate): return candidate rotated_filename_patterns = ( # logrotate dateext rotation scheme - `dateformat -%Y%m%d` + with # `delaycompress` "-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]", # logrotate dateext rotation scheme - `dateformat -%Y%m%d` + # without `delaycompress` "-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz", # logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + # with `delaycompress` ("-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-" "[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]"), # logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + # without `delaycompress` ("-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-" "[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz"), # for TimedRotatingFileHandler ".[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]", ) for rotated_filename_pattern in rotated_filename_patterns: candidates = glob.glob(fname + rotated_filename_pattern) if candidates: candidates.sort() return candidates[-1] # return most recent # no match return None
def isdir(path): """Return 1 if directory, else 0.""" try: st = stat(path) except _os.error: return 0 return S_ISDIR(st[ST_MODE])
def exists(fn): try: s = stat(fn) except OSError: return 0 else: return 1
def isdir(path): """Return True if directory, else False.""" try: st = stat(path) except _os.error: return False return S_ISDIR(st.st_mode)
def isdir(path): """Check for directory.""" try: st = stat(path) except os.error: return 0 return S_ISDIR(st[ST_MODE])
def find_file(context, dir, files): for file in files: path = os.path.join(dir, file) path = os.path.normcase(path) try: ext = os.path.splitext(file)[1][1:] except: ext = '' stat = os.stat(path) size = stat(ST_SIZE) if S_ISDIR(stat[ST_MODE]): continue if not re.search(context['where'], file): continue if context['content']: f = open(path, 'r') match = 0 for l in f.readline(): if re.search(context['content'], l): match = 1 break f.close() if not match: continue file_return = (path, file, ext, size) context['return'].append(file_return)
def list_dir(self, path, sorting='ST_MTIME.ASC', end_url=''): """ Returns String if error. Sorting possibilities ([+]`.ASC|.DESC`): - ST_MTIME -> sorts by modification date. - ST_CTIME -> sorts by creation date. - ST_SIZE -> sorts by size. - NAME -> sorts by name. :param path: :return: """ try: dir_ = listdir(path) except OSError: return self.rendering_error(ERROR_MESSAGE='Invalid file or directory.') path = [path + '/', path][path.endswith('/')] sorting = sorting.lower().split('.', 2) # We separate directories and files to always keep directories on top directory_content = {'dirs': [], 'files': []} for file_name in dir_: is_dir = os.path.isdir(path + file_name) # If is a directory we add a "/" at the end of the filename before check if hidden # (to separate dirs of the files/ links). if self.is_hidden(file_name + '/' if is_dir else ''): continue stats = stat(os.path.join(path, file_name)) r = dict( FILE_NAME=escape(file_name), # we keep the current query string on directories link FILE_LINK='%s%s' % (quote(file_name), is_dir and ('/' + end_url) or ''), FILE_MODIFICATION=strftime(self.date_format, gmtime(stats[ST_MTIME])), FILE_CREATION=strftime(self.date_format, gmtime(stats[ST_CTIME])), ) if is_dir: r['FILE_TYPE'], r['FILE_SIZE'], r['FILE_MIMETYPE'], r['DASHED_FILE_MIMETYPE'] = ('dir', '-', '-', '-') else: # if file or link mime = self.mimetypes_list.get(os.path.splitext(file_name)[1]) or 'application/octet-stream' r['FILE_TYPE'], r['FILE_SIZE'], r['FILE_MIMETYPE'], r['DASHED_FILE_MIMETYPE'] = ( 'file %s' % filter(None, mime.split('/'))[0], self.convert_size(stats[ST_SIZE]), mime, # Replacing special chars (excluding the dash), by a dash. Useful for the CSS class selector. re.sub(self.css_invalid_chars, '-', mime) ) # append into 'dirs' if is a directory, else we append into 'files' directory_content['dirs' if is_dir else 'files'].append( # if sorting[0] is in ['st_mtime', 'st_ctime', 'st_size'] we get the attribute `sorting[0]` # of the `stat_result` object else it gets by name (default). (stats.__getattribute__(sorting[0]), r) if sorting[0] in ['st_mtime', 'st_ctime', 'st_size'] else (file_name, r) ) return self.rendering_no_error( directory_content, # if sorting[1] is as descending; else, it stays as ascending. True if len(sorting) > 1 and sorting[1] == 'desc' else False, end_url=end_url, )
def test_getctime(smb_share): filename = "%s\\file.txt" % smb_share with open_file(filename, mode='w') as fd: fd.write(u"content") expected = stat(filename).st_ctime assert getctime(filename) == expected
def _stat_for_assert(path, follow_symlinks=True, msg=None): stat = os.stat if follow_symlinks else os.lstat try: return stat(path) except OSError: if msg is None: msg = "Path does not exist, or can't be stat-ed: %r" % path raise AssertionError(msg)
def pause(lockfile): """ called for the httrack callback 'pause'. wait until lockfile is deleted Return values are ignored. """ res = stat(lockfile) while (stat.S_ISREG(os.stat(lockfile))): print "pause for", lockfile time.sleep(1)
def _stat(self,stat,path): """ perform a stat or fast stat of the given path. if the given path is an item in the current directory, cache the result """ _,name = self.split(path) if name == "." or name == "..": return {"name":name,"isDir":True,"isLink":True,\ "size":0, "mtime":0, "ctime":0, "mode":0} path=self.realpath(path) dir,_ = self.split(path) if not name: name=path # root directory #self._stat_data[trace()]+=1 if (self.path == DirectorySource.dummy_path and dir==name) or \ dir == self.path: if name not in self.statcache or \ 'mtime' not in self.statcache[name]: st = stat(path) st['name'] = name self.statcache[name] = st return self.statcache[name] #except OSError as e: # print(e) # self.statcache[name] = {"isDir":False,"isLink":False, "size":-1} # s='->'.join([s[2] for s in traceback.extract_stack()]) # print("stat_fast OS error:",path,name) # print(s) st = stat(path) st['name'] = name return st
def __init__(self, name): statobj = stat(name) self.age = time()-statobj[ST_CTIME] self.size = statobj[ST_SIZE] self.name = name self.isempty = 0 if path.isdir(name): self.isdir = 1 self.isempty = isempty(name) else: self.isdir = 0
def get(): file_name = Entrye1.get() password = Entrye2.get() image_name = Entrye3.get() print(file_name) print(password) print(image_name) t_file_name1 = os.path.basename(file_name) t_file_name = '' n1 = len(t_file_name1) for i in range(n1 - 4): t_file_name = t_file_name + t_file_name1[i] print(t_file_name1) print(t_file_name) save_image(image_name) # encrypt with open(file_name, "rb") as fIn: with open(file_name + ".aes", "wb") as fOut: pyAesCrypt.encryptStream(fIn, fOut, password, bufferSize) os.chmod(file_name, S_IWRITE) shutil.copy2(file_name + '.aes', 'Data/') os.remove(file_name) # get encrypted file size encFileSize = stat(file_name + ".aes").st_size encFileSize = str(encFileSize) #print(encFileSize) passcodes = '' #print(t_file_name) with open('Data/' + t_file_name + '.txt', 'w+') as f: f.writelines(file_name) f.writelines('*') f.writelines(password) f.writelines('#') f.writelines(encFileSize) with open('Data/' + t_file_name + '.txt', 'r+') as f: passcodes = f.readlines() #print(passcodes) temp_encrypt = "" for i in passcodes[0]: ow = ord(i) nw = ow + key i = chr(nw) temp_encrypt = temp_encrypt + i print(temp_encrypt) temp_encrypt = listToString(temp_encrypt) with open('Data/' + t_file_name + '.txt', 'w+') as f: f.writelines(temp_encrypt) shutil.copy2('Data/' + t_file_name1 + '.aes', 'UI_Data/Backup/') shutil.copy2('Data/' + t_file_name + '.txt', 'UI_Data/Backup/') main_menu1()
def write(self, template, filename, *args, **kwargs): timestamp = kwargs.pop('timestamp', None) encoding = kwargs.pop('encoding', self._encoding) p = join(self._dst_dir, filename) if timestamp and isfile(p) and stat(p).st_mtime > timestamp: debug('%r is up to date' % filename) else: debug('Updating %r' % p) d = dirname(p) if not isdir(d): makedirs(d) f = codecs.open(p, mode='w', encoding=encoding) t = self._env.get_template(template) try: f.write(t.render(*args, **kwargs)) finally: f.close()
def get_perms(filename): file_perms = dict() try: file_stats = stat(filename) try: file_perms['owner'] = getpwuid(file_stats.st_uid).pw_name except KeyError as e: file_perms['owner'] = file_stats.st_uid try: file_perms['group'] = getgrgid(file_stats.st_gid).gr_name except KeyError as e: file_perms['group'] = file_stats.st_gid try: file_perms['mode'] = oct(file_stats.st_mode)[-4:] except: file_perms['mode'] = '' except: # add a message we couldn't read the file later file_perms['owner'] = None file_perms['group'] = None file_perms['mode'] = None pass return file_perms
def fillGrid(self,home): self.ui.treeWidget.setHeaderLabels(("File Name", "Size (Bites)", "Path" )) print (home) items = listdir(unicode(self.home)) self.ui.treeWidget.clear() #self.ui.treeView.clear() for i in items: # Omitir archivos ocultos. if i.startswith("."): continue filepath = join(home, i) # Obtener informacion del archivo. stats = stat(filepath) # Crear el control item. item_widget = QTreeWidgetItem( (i, strftime("%c", localtime(stats.st_mtime)).decode("utf-8"), size(stats.st_size) if isfile(filepath) else "") ) print ("archivo",i, size(stats.st_size)) # Establecer el iono correspondiente. item_widget.setIcon(0, QIcon("images/%s.png" % ("file" if isfile(filepath) else "folder"))) # Anadir elemento. self.ui.treeWidget.addTopLevelItem(item_widget)
def dsInitDatabase(dsESEFile, workdir): global dsDatabaseSize dsDatabaseSize = stat(dsESEFile).st_size sys.stderr.write("\n[+] Initialising engine...\n") db = open(dsESEFile, "rb", 0) db.seek(0) line = db.readline() if line == "": sys.stderr.write("[!] Warning! Error processing the first line!\n") sys.exit() else: dsFieldNameRecord = line.split("\t") record = line.split("\t") for cid in range(0, len(record) - 1): # ------------------------------------------------------------------------------ # filling indexes for object attributes # ------------------------------------------------------------------------------ if record[cid] == "DNT_col": ntds.dsfielddictionary.dsRecordIdIndex = cid if record[cid] == "PDNT_col": ntds.dsfielddictionary.dsParentRecordIdIndex = cid if record[cid] == "time_col": ntds.dsfielddictionary.dsRecordTimeIndex = cid if record[cid] == "Ancestors_col": ntds.dsfielddictionary.dsAncestorsIndex = cid if record[cid] == "ATTb590606": ntds.dsfielddictionary.dsObjectTypeIdIndex = cid if record[cid] == "ATTm3": ntds.dsfielddictionary.dsObjectNameIndex = cid if record[cid] == "ATTm589825": ntds.dsfielddictionary.dsObjectName2Index = cid if record[cid] == "ATTk589826": ntds.dsfielddictionary.dsObjectGUIDIndex = cid if record[cid] == "ATTl131074": ntds.dsfielddictionary.dsWhenCreatedIndex = cid if record[cid] == "ATTl131075": ntds.dsfielddictionary.dsWhenChangedIndex = cid if record[cid] == "ATTq131091": ntds.dsfielddictionary.dsUSNCreatedIndex = cid if record[cid] == "ATTq131192": ntds.dsfielddictionary.dsUSNChangedIndex = cid if record[cid] == "OBJ_col": ntds.dsfielddictionary.dsObjectColIndex = cid if record[cid] == "ATTi131120": ntds.dsfielddictionary.dsIsDeletedIndex = cid # ------------------------------------------------------------------------------ # Filling indexes for deleted object attributes # ------------------------------------------------------------------------------ if record[cid] == "ATTb590605": ntds.dsfielddictionary.dsOrigContainerIdIndex = cid # ------------------------------------------------------------------------------ # Filling indexes for account object attributes # ------------------------------------------------------------------------------ if record[cid] == "ATTr589970": ntds.dsfielddictionary.dsSIDIndex = cid if record[cid] == "ATTm590045": ntds.dsfielddictionary.dsSAMAccountNameIndex = cid if record[cid] == "ATTm590480": ntds.dsfielddictionary.dsUserPrincipalNameIndex = cid if record[cid] == "ATTj590126": ntds.dsfielddictionary.dsSAMAccountTypeIndex = cid if record[cid] == "ATTj589832": ntds.dsfielddictionary.dsUserAccountControlIndex = cid if record[cid] == "ATTq589876": ntds.dsfielddictionary.dsLastLogonIndex = cid if record[cid] == "ATTq591520": ntds.dsfielddictionary.dsLastLogonTimeStampIndex = cid if record[cid] == "ATTq589983": ntds.dsfielddictionary.dsAccountExpiresIndex = cid if record[cid] == "ATTq589920": ntds.dsfielddictionary.dsPasswordLastSetIndex = cid if record[cid] == "ATTq589873": ntds.dsfielddictionary.dsBadPwdTimeIndex = cid if record[cid] == "ATTj589993": ntds.dsfielddictionary.dsLogonCountIndex = cid if record[cid] == "ATTj589836": ntds.dsfielddictionary.dsBadPwdCountIndex = cid if record[cid] == "ATTj589922": ntds.dsfielddictionary.dsPrimaryGroupIdIndex = cid if record[cid] == "ATTk589914": ntds.dsfielddictionary.dsNTHashIndex = cid if record[cid] == "ATTk589879": ntds.dsfielddictionary.dsLMHashIndex = cid if record[cid] == "ATTk589918": ntds.dsfielddictionary.dsNTHashHistoryIndex = cid if record[cid] == "ATTk589984": ntds.dsfielddictionary.dsLMHashHistoryIndex = cid if record[cid] == "ATTk591734": ntds.dsfielddictionary.dsUnixPasswordIndex = cid if record[cid] == "ATTk36": ntds.dsfielddictionary.dsADUserObjectsIndex = cid if record[cid] == "ATTk589949": ntds.dsfielddictionary.dsSupplementalCredentialsIndex = cid # ------------------------------------------------------------------------------ # Filling indexes for computer objects attributes # ------------------------------------------------------------------------------ if record[cid] == "ATTj589993": ntds.dsfielddictionary.dsLogonCountIndex = cid if record[cid] == "ATTm590443": ntds.dsfielddictionary.dsDNSHostNameIndex = cid if record[cid] == "ATTm590187": ntds.dsfielddictionary.dsOSNameIndex = cid if record[cid] == "ATTm590188": ntds.dsfielddictionary.dsOSVersionIndex = cid # ------------------------------------------------------------------------------ # Filling indexes for bitlocker objects # ------------------------------------------------------------------------------ if record[cid] == "ATTm591788": ntds.dsfielddictionary.dsRecoveryPasswordIndex = cid if record[cid] == "ATTk591823": ntds.dsfielddictionary.dsFVEKeyPackageIndex = cid if record[cid] == "ATTk591822": ntds.dsfielddictionary.dsVolumeGUIDIndex = cid if record[cid] == "ATTk591789": ntds.dsfielddictionary.dsRecoveryGUIDIndex = cid # ------------------------------------------------------------------------------ # Filling indexes for bitlocker objects # ------------------------------------------------------------------------------ if record[cid] == "ATTi590943": ntds.dsfielddictionary.dsDialInAccessPermission = cid # =============================================================================== # Filling indexes for AD encryption # =============================================================================== if record[cid] == "ATTk590689": ntds.dsfielddictionary.dsPEKIndex = cid db.seek(0) dsCheckMaps(db, workdir) return db
def mode(path): """Return the mode of the path""" return stat(path).st_mode
def own(file_name): # file permission return getpwuid(stat(file_name).st_uid).pw_name
os.symlink(os.getcwd() + '/data/test.txt', 'test_link') os.unlink(os.getcwd() + '/test_link') """ readlink() Return string representation of symlink target. """ print('--> readlink()') os.symlink(os.getcwd() + '/data/test.txt', 'test_link') rdlnk = os.readlink(os.getcwd() + '/test_link') print(rdlnk) os.unlink(os.getcwd() + '/test_link') print( '# ============================================= Stat =============================================' ) """ stat() Perform a stat system call on the given path. """ print('--> stat()') st = os.stat(os.getcwd() + '/data/some_file.txt') uid = st.st_uid gid = st.st_gid print(uid, gid) user = pwd.getpwuid(uid)[0] group = grp.getgrgid(gid)[0] print(user, group) """ lstat() Like stat(path), but do not follow symbolic links.
"""Speed up os.walk() significantly by using file attributes that FindFirst/Next give us instead of doing an extra stat(). Can also do the same thing with opendir/readdir on Linux. This is doubly useful when the user (caller of os.walk) is doing *another* stat() to get say the file sizes. On my tests (Windows 64-bit) our walk() is about 5x as fast as os.walk() for large directory trees, and 9x as fast if you're doing the file size thing. Note that these timings are "once it's in the cache", not first-time timings. Other advantages to using FindFirst/Next: * You can write a version of listdir() which yield results rather than getting all filenames at once -- better for "streaming" large directories. * You can use its built-in wildcard matching (globbing), which is presumably faster than getting all filenames and calling fnmatch on the result. (This one you couldn't do with opendir/readdir.) This isn't just me who noticed these were issues. See also: http://stackoverflow.com/questions/2485719/very-quickly-getting-total-size-of-folder http://stackoverflow.com/questions/4403598/list-files-in-a-folder-as-a-stream-to-begin-process-immediately http://bugs.python.org/issue15200 -- Titled "Faster os.walk", but actually unrelated """ import ctypes import os
def ls(cmd): os.system('touch output.txt') op_file = open("output.txt", 'w') # print with no flags to ls if len(cmd) == 1: list = os.listdir(os.getcwd()) for i in range(len(list)): if (i < len(list)): if list[i] != list[i].lstrip('.'): if (i < len(list)): list.remove(list[i]) list.sort() for i in range(len(list)): if os.path.isdir(list[i]): list[i] = colorformatdir(list[i]) else: list[i] = colorformat(list[i]) op_file.write(list[i]) op_file.write('\t') #checking for 'ls -a' command in command line arguments then displays the long listing of files in the directory based on the last access time of files. else: # 1 if cmd[1] == '-a': list = os.listdir(os.getcwd()) for i in range(len(list)): # text = list[i]+" " if os.path.isdir(list[i]): list[i] = colorformatdir(list[i]) else: list[i] = colorformat(list[i]) op_file.write(list[i]) op_file.write('\t') #2 elif cmd[1] == '-l': flag = '-l' list1 = [] list2 = [] list = os.listdir(os.getcwd()) for i in range(len(list)): if (i < len(list)): if list[i] != list[i].lstrip('.'): if (i < len(list)): list.remove(list[i]) list.sort() for i in list: w = getpwuid(stat(i).st_uid).pw_name f = os.stat(os.getcwd() + "/%s" % i) if flag == '-la': list1.append(f.st_atime) elif flag == '-l': list1.append(f) list2.append(f) list1.sort() for i in range(0, len(list1)): for k in list: f = os.stat(os.getcwd() + "/%s" % k) st1 = os.stat(k) if flag == '-la': temp1 = f.st_atime elif flag == '-l': temp1 = list2[0] if list1[i] == temp1: Size = f.st_size Perm = int(oct(os.stat(k)[ST_MODE])[-3:]) Atime = time.asctime(time.localtime(st1[ST_ATIME])) perm_str = perm_dict(Perm) if os.path.isdir(k): k = colorformatdir(k) perm_str = 'd' + perm_str else: k = colorformat(k) perm_str = '-' + perm_str text = str(perm_str) + " " + str(w) + " " + str( Size) + " " + str(Atime) + " " + str(k) + "\n" op_file.write(text) #checking for 'ls -lh' command in command line arguments then displays the file according to human readable sizes # 3 elif cmd[1] == '-h': list = os.listdir(os.getcwd()) list.sort() for i in range(len(list)): if (i < len(list)): if list[i] != list[i].lstrip('.'): if (i < len(list)): list.remove(list[i]) for i in range(len(list)): if os.path.isdir(list[i]): list[i] = colorformatdir(list[i]) else: list[i] = colorformat(list[i]) op_file.write(list[i]) op_file.write('\t') #4 elif cmd[1] == '-ah': list = os.listdir(os.getcwd()) for i in range(len(list)): # text = list[i]+" " if os.path.isdir(list[i]): list[i] = colorformatdir(list[i]) else: list[i] = colorformat(list[i]) op_file.write(list[i]) op_file.write('\t') #5 elif cmd[1] == '-la': flag = '-l' list1 = [] list2 = [] list = os.listdir(os.getcwd()) for i in list: w = getpwuid(stat(i).st_uid).pw_name f = os.stat(os.getcwd() + "/%s" % i) if flag == '-la': list1.append(f.st_atime) elif flag == '-l': list1.append(f) list2.append(f) list1.sort() for i in range(0, len(list1)): for k in list: f = os.stat(os.getcwd() + "/%s" % k) st1 = os.stat(k) if flag == '-la': temp1 = f.st_atime elif flag == '-l': temp1 = list2[0] if list1[i] == temp1: Size = f.st_size Perm = int(oct(os.stat(k)[ST_MODE])[-3:]) Atime = time.asctime(time.localtime(st1[ST_ATIME])) perm_str = perm_dict(Perm) if os.path.isdir(k): k = colorformatdir(k) perm_str = 'd' + perm_str else: k = colorformat(k) perm_str = '-' + perm_str text = str(perm_str) + " " + str(w) + " " + str( Size) + " " + str(Atime) + " " + str(k) + "\n" op_file.write(text) # 6 elif cmd[1] == '-lh': flag = '-l' list1 = [] list2 = [] list = os.listdir(os.getcwd()) for i in list: w = getpwuid(stat(i).st_uid).pw_name f = os.stat(os.getcwd() + "/%s" % i) if flag == '-la': list1.append(f.st_atime) elif flag == '-l': list1.append(f) list2.append(f) list1.sort() for i in range(len(list)): if (i < len(list)): if list[i] != list[i].lstrip('.'): if (i < len(list)): list.remove(list[i]) for i in range(0, len(list1)): for k in list: f = os.stat(os.getcwd() + "/%s" % k) st1 = os.stat(k) if flag == '-la': temp1 = f.st_atime elif flag == '-l': temp1 = list2[0] if list1[i] == temp1: Size = convert_bytes(f.st_size) Perm = int(oct(os.stat(k)[ST_MODE])[-3:]) Atime = time.asctime(time.localtime(st1[ST_ATIME])) perm_str = perm_dict(Perm) if os.path.isdir(k): perm_str = 'd' + perm_str k = colorformatdir(k) else: perm_str = '-' + perm_str k = colorformat(k) text = str(perm_str) + " " + str(w) + " " + str( Size) + " " + str(Atime) + " " + str(k) + "\n" op_file.write(text) # checking for 'ls -l' command in command line arguments then displays the file according to with filename, # last access time, owner of filename and size of the file #7 elif cmd[1] == '-lah': flag = '-l' list1 = [] list2 = [] list = os.listdir(os.getcwd()) for i in list: w = getpwuid(stat(i).st_uid).pw_name f = os.stat(os.getcwd() + "/%s" % i) if flag == '-la': list1.append(f.st_atime) elif flag == '-l': list1.append(f) list2.append(f) list1.sort() for i in range(0, len(list1)): for k in list: f = os.stat(os.getcwd() + "/%s" % k) st1 = os.stat(k) if flag == '-la': temp1 = f.st_atime elif flag == '-l': temp1 = list2[0] if list1[i] == temp1: Size = convert_bytes(f.st_size) Perm = int(oct(os.stat(k)[ST_MODE])[-3:]) Atime = time.asctime(time.localtime(st1[ST_ATIME])) perm_str = perm_dict(Perm) if os.path.isdir(k): perm_str = 'd' + perm_str k = colorformatdir(k) else: perm_str = '-' + perm_str k = colorformat(k) text = str(perm_str) + " " + str(w) + " " + str( Size) + " " + str(Atime) + " " + str(k) + "\n" op_file.write(text) op_file.close() return
def isdir(path): try: st = stat(path) except os.error: return 0 return S_ISDIR(st[ST_MODE])
if dir != "./": continue; if dir.startswith("resize_"): print "skipping thumbnail dir %s" % dir continue; files.sort() for f in files: for suffix in ends: if f.lower().endswith(suffix): imgSrc = "%s/%s" % (dir,f) print "checking %s" % (imgSrc) for x,y in sizes: imgDest = "%s/resize_%s/%s" % (dir, x, f.lower().replace(' ','_')) try : if stat(imgSrc)[ST_CTIME] <= stat(imgDest)[ST_CTIME]: print "skipping %s" % ( imgSrc) continue except OSError, err: #print "error? %s" % err pass q.put([imgSrc, imgDest, x, y]) class Worker( Thread ): def run(self): import Image import PIL.ExifTags while True: try: imgSrc, imgDest, x, y = q.get(False) except Empty:
root = "../" data_out = root + "dat/" raw_in = root + "raw/" # %% import der Messwerte def stat(n): if isinstance(n, Iterable): return unp.uarray(n, np.sqrt(abs(n)+1)) else: return unc.ufloat(n, np.sqrt(abs(n)+1)) messung1 = pd.read_csv(raw_in + "messung1/messung2.csv") dt = unp.uarray(messung1["dt"], 0.025 / np.sqrt(3)) counts = stat(messung1["#"]) messung1 = dt, counts messung3 = pd.read_csv(raw_in + "messung3/messung3.csv") winkel = unp.uarray(messung3["Winkel"], 2/np.sqrt(6)) time = np.array(messung3["Messzeit"]) counts = stat(messung3["Counts"]) / time messung3 = winkel, counts # %% 1. Verzögerungsdauer def gauss(x, A, x0, sigma, y0): return A * np.exp(-((x-x0)/sigma)**2/2) + y0 def gaussUnc(x,A,x0,sigma,y0): return A * unp.exp(-((x-x0)/sigma)**2/2) + y0
# Separators constants. CURDIR = os.curdir PARDIR = os.pardir PATHSEP = os.pathsep DIRSEP = os.sep EXTSEP = os.extsep class PathError(Exception): pass # General functions. cwd = os.getcwd chd = os.chdir # Properties as functions. size = lambda path: stat(path)[SIZE] atime = lambda path: stat(path)[ATIME] mtime = lambda path: stat(path)[MTIME] ctime = lambda path: stat(path)[CTIME] # Path Components as functions. full = lambda path: os.path.abspath(path) # Full path (Absolute path) #path = lambda path: path # Given path drive = lambda path: os.path.splitdrive(path)[0] # Drive part dir = lambda path: os.path.dirname(path) # Path without filename filedir = lambda path: os.path.basename(os.path.dirname(path)) # File parent dir file = lambda path: os.path.basename(path) # Name with extension name = lambda path: os.path.splitext(file(path))[0] # Name without extension ext = lambda path: os.path.splitext(path)[1] # Extension # General path functions.
def stat(self): return stat(self.path)
if os.path.islink(i): tmpstr += "l" elif decide: tmpstr += "d" else: tmpstr += "-" tmpstr += tmpdict[j] else: tmpstr += tmpdict[j] count += 1 try: t = time.ctime(os.path.getmtime(i))[4:-8].split() print "%s %3d %-4s %-4s %10d %3s %2s %2s %s" % ( tmpstr, os.stat(i)[3], getpwuid(stat(i).st_gid).pw_name, getpwuid(stat(i).st_uid).pw_name, os.stat(i).st_size, t[0], t[1], t[2], i.split("/")[-1], ) except OSError as e: print "ls: %s" % e else: if len(comm.split()) == 2 and ( comm.split()[1] != "-a" and comm.split()[1] != "-l" and comm.split()[1] != "-la" ): file = tmp[1]
def isdir(path): try: st = stat(path) except posix.error: return 0 return S_ISDIR(st[ST_MODE])
def dsInitDatabase(dsESEFile, workdir): global dsDatabaseSize dsDatabaseSize = stat(dsESEFile).st_size sys.stderr.write("\n[+] Initialising engine...\n") db = open(dsESEFile, 'rb', 0) db.seek(0) line = db.readline() if line == "": sys.stderr.write("[!] Warning! Error processing the first line!\n") sys.exit(1) else: dsFieldNameRecord = line.split('\t') record = line.split('\t') for cid in range(0, len(record) - 1): #------------------------------------------------------------------------------ # filling indexes for object attributes #------------------------------------------------------------------------------ if (record[cid] == "DNT_col"): ntds.dsfielddictionary.dsRecordIdIndex = cid if (record[cid] == "PDNT_col"): ntds.dsfielddictionary.dsParentRecordIdIndex = cid if (record[cid] == "time_col"): ntds.dsfielddictionary.dsRecordTimeIndex = cid if (record[cid] == "Ancestors_col"): ntds.dsfielddictionary.dsAncestorsIndex = cid if (record[cid] == "ATTb590606"): ntds.dsfielddictionary.dsObjectTypeIdIndex = cid if (record[cid] == "ATTm3"): ntds.dsfielddictionary.dsObjectNameIndex = cid if (record[cid] == "ATTm589825"): ntds.dsfielddictionary.dsObjectName2Index = cid if (record[cid] == "ATTk589826"): ntds.dsfielddictionary.dsObjectGUIDIndex = cid if (record[cid] == "ATTl131074"): ntds.dsfielddictionary.dsWhenCreatedIndex = cid if (record[cid] == "ATTl131075"): ntds.dsfielddictionary.dsWhenChangedIndex = cid if (record[cid] == "ATTq131091"): ntds.dsfielddictionary.dsUSNCreatedIndex = cid if (record[cid] == "ATTq131192"): ntds.dsfielddictionary.dsUSNChangedIndex = cid if (record[cid] == "OBJ_col"): ntds.dsfielddictionary.dsObjectColIndex = cid if (record[cid] == "ATTi131120"): ntds.dsfielddictionary.dsIsDeletedIndex = cid #------------------------------------------------------------------------------ # Filling indexes for deleted object attributes #------------------------------------------------------------------------------ if (record[cid] == "ATTb590605"): ntds.dsfielddictionary.dsOrigContainerIdIndex = cid #------------------------------------------------------------------------------ # Filling indexes for account object attributes #------------------------------------------------------------------------------ if (record[cid] == "ATTr589970"): ntds.dsfielddictionary.dsSIDIndex = cid if (record[cid] == "ATTm590045"): ntds.dsfielddictionary.dsSAMAccountNameIndex = cid if (record[cid] == "ATTm590480"): ntds.dsfielddictionary.dsUserPrincipalNameIndex = cid if (record[cid] == "ATTj590126"): ntds.dsfielddictionary.dsSAMAccountTypeIndex = cid if (record[cid] == "ATTj589832"): ntds.dsfielddictionary.dsUserAccountControlIndex = cid if (record[cid] == "ATTq589876"): ntds.dsfielddictionary.dsLastLogonIndex = cid if (record[cid] == "ATTq591520"): ntds.dsfielddictionary.dsLastLogonTimeStampIndex = cid if (record[cid] == "ATTq589983"): ntds.dsfielddictionary.dsAccountExpiresIndex = cid if (record[cid] == "ATTq589920"): ntds.dsfielddictionary.dsPasswordLastSetIndex = cid if (record[cid] == "ATTq589873"): ntds.dsfielddictionary.dsBadPwdTimeIndex = cid if (record[cid] == "ATTj589993"): ntds.dsfielddictionary.dsLogonCountIndex = cid if (record[cid] == "ATTj589836"): ntds.dsfielddictionary.dsBadPwdCountIndex = cid if (record[cid] == "ATTj589922"): ntds.dsfielddictionary.dsPrimaryGroupIdIndex = cid if (record[cid] == "ATTk589914"): ntds.dsfielddictionary.dsNTHashIndex = cid if (record[cid] == "ATTk589879"): ntds.dsfielddictionary.dsLMHashIndex = cid if (record[cid] == "ATTk589918"): ntds.dsfielddictionary.dsNTHashHistoryIndex = cid if (record[cid] == "ATTk589984"): ntds.dsfielddictionary.dsLMHashHistoryIndex = cid if (record[cid] == "ATTk591734"): ntds.dsfielddictionary.dsUnixPasswordIndex = cid if (record[cid] == "ATTk36"): ntds.dsfielddictionary.dsADUserObjectsIndex = cid if (record[cid] == "ATTk589949"): ntds.dsfielddictionary.dsSupplementalCredentialsIndex = cid #------------------------------------------------------------------------------ # Filling indexes for computer objects attributes #------------------------------------------------------------------------------ if (record[cid] == "ATTj589993"): ntds.dsfielddictionary.dsLogonCountIndex = cid if (record[cid] == "ATTm590443"): ntds.dsfielddictionary.dsDNSHostNameIndex = cid if (record[cid] == "ATTm590187"): ntds.dsfielddictionary.dsOSNameIndex = cid if (record[cid] == "ATTm590188"): ntds.dsfielddictionary.dsOSVersionIndex = cid #------------------------------------------------------------------------------ # Filling indexes for bitlocker objects #------------------------------------------------------------------------------ if (record[cid] == "ATTm591788"): ntds.dsfielddictionary.dsRecoveryPasswordIndex = cid if (record[cid] == "ATTk591823"): ntds.dsfielddictionary.dsFVEKeyPackageIndex = cid if (record[cid] == "ATTk591822"): ntds.dsfielddictionary.dsVolumeGUIDIndex = cid if (record[cid] == "ATTk591789"): ntds.dsfielddictionary.dsRecoveryGUIDIndex = cid #------------------------------------------------------------------------------ # Filling indexes for bitlocker objects #------------------------------------------------------------------------------ if (record[cid] == "ATTi590943"): ntds.dsfielddictionary.dsDialInAccessPermission = cid #=============================================================================== # Filling indexes for AD encryption #=============================================================================== if (record[cid] == "ATTk590689"): ntds.dsfielddictionary.dsPEKIndex = cid db.seek(0) dsCheckMaps(db, workdir) return db
"""Speed up os.walk() significantly by using file attributes that FindFirst/Next give us instead of doing an extra stat(). Can also do the same thing with opendir/readdir on Linux. This is doubly useful when the user (caller of os.walk) is doing *another* stat() to get say the file sizes. On my tests (Windows 64-bit) our walk() is about 5x as fast as os.walk() for large directory trees, and 9x as fast if you're doing the file size thing. Note that these timings are "once it's in the cache", not first-time timings. Other advantages to using FindFirst/Next: * You can write a version of listdir() which yield results rather than getting all filenames at once -- better for "streaming" large directories. * You can use its built-in wildcard matching (globbing), which is presumably faster than getting all filenames and calling fnmatch on the result. (This one you couldn't do with opendir/readdir.) This isn't just me who noticed these were issues. See also: http://stackoverflow.com/questions/2485719/very-quickly-getting-total-size-of-folder http://stackoverflow.com/questions/4403598/list-files-in-a-folder-as-a-stream-to-begin-process-immediately http://bugs.python.org/issue15200 -- Titled "Faster os.walk", but actually unrelated """ import ctypes
"""Maintain a cache of stat() information on files.
def atime(path): """Return time of most recent access""" return stat(path).st_atime
def ls(flags,params,directs): # might need to include more later files_Dir = os.listdir(path) #contains all the directors,files permission ={ #PERMISSIONS BEGIN# # The list of permissions for r-read, w-write, x-execute 0:('---'), # belongs to user/group with ID 0- aka root=> 0 = NO RIGHTS 1:('--x'), # 'x'= Execute = 1(Execute) Permission 2:('-w-'), # 'w'= Write = 2(Write) Permission 3:('-wx'), # 1(Execute) +(and) 2(Write) = 3(Execute + Write) Permissions 4:('r--'), # 'r'= Read = 4(Read) Permission 5:('r-x'), # 4(Read) +(and) 1(Execute) = 5(Read + Execute) Permissions 6:('rw-'), # 4(Read) +(and) 2(Write) = 6 (Read + Write) Permissions 7:('rwx'), # 4(Read) +(and) 2(Write) +(and) 1(Execute) = 7(Read + Write + Execute) Permission 8:('-'), # directory 9:('d----') } info = os.stat(path) now = int(time.time()) recent = now - (4*30*24*60*60) #4 months ago if not directs: for file in files_Dir: #for loop try: stat_info = os.lstat(path) except: sys.stderr.write("%s: No such file or directory\n" % path) continue if not flags: if not file.startswith('.'): #returns true if file starts with '.' and returns false otherwise. # lm = os.stat(file).st_mtime # size = os.stat(file).st_size # mode = stat.st_mode if os.path.isdir(file): print(file + "/") else: print(file) else: #stat_info = os.stat(path) octalPerm= oct(stat_info.st_mode)[-3:] # accessing permissions. octalPerm = int(octalPerm) # converting permissions to int --> Example: octal 230 [ownership --w-wx---] octalP = octalPerm //10 # Example: 23 = 230 // 10 Owner = octalP // 10 # Owner ex. 2 = 23 // 10 GroupP = octalP % 10 # GroupP ex. 3 = 23 % 10 Others = octalPerm % 10 # Others ex. 0 = 230 % 10 ts = stat_info.st_mtime time_m = stat_info.st_mtime if(time_m<recent) or (time_m> now): time_fmt = "%b %e %Y" else: time_fmt = "%b %e %R" time_str = time.strftime(time_fmt, time.gmtime(ts)) time_str2 = time.strftime(time_fmt,time.gmtime(time_m)) name = stat(file).st_uid # User id of the owner try: name ="%-3s" % os.getcwd(stat_info.st_uid)[0] except: name = "%-3s" % (stat_info.st_uid) try: group ="%-3s" % os.getegid(stat_info.st_gid)[0] except: group ="%-3s"% (stat_info).st_gid # Group id of the owner nlink = "%4d" % stat_info.st_nlink # total = len([name for name in os.listdir('.')if os.path.isfile(file)]) for f in flags: if f == '-l': if not file.startswith('.'): print(permission[Others] + permission[GroupP] + permission[Owner], end =" " ) print(" " , nlink, end =" ") # @ detect a web request without extensions size="%8d" % stat_info.st_size print(name, end=" ") print(group, end =" ") print(size, end=" ") print(time_str2, end =" ") if os.path.isdir(file): print(file + "/") else: print(file) elif f == '-a': print(file) elif f == '-l' and '-a': print(permission[Others] + permission[GroupP] + permission[Owner],end =" ") print(" " + str(stat_info.st_nlink), end =" ") size = stat_rn.st_size print(size, end =" ") print(datetime.utcfromtimestap(time).strftime('%Y-%m-%d %H:%M:%S'), end= " ") print(file) elif f == '-l'and '-h': if not file.startswith('.'): print(permission[Others] + permission[GroupP] + permission[Owner],end =" ") print(" "+ str(stat_rn.st_nlink), end =" ") size = stat_rn.st_size for unit in ['bytes','MB','KB','GB']: # Checks for size unit if size < 1024: h_size = str(size) + unit break else: size /= 1024 print(h_size, end =" ") print(datetime.utcfromtimestap(time).strftime('%Y-%m-%d %H:%M:%S'), end= " ") print(file) elif f =='-l' and '-a' and '-h': print(permission[Others] + permission[GroupP] + permission[Owner],end =" ") print(size = stat_rn.st_size) size = stat_rn.st_size for unit in ['bytes','MB','KB','GB']: # Checks for size unit if size < 1024: h_size = str(size) + unit break else: size /= 1024 print(h_size, end =" ") print(datetime.utcfromtimestap(time).strftime('%Y-%m-%d %H:%M:%S'), end= " ") print(file) else: with open(directs[0],'w') as outfile: for file in files_Dir: outfile.write(file) return
def custom_stat(path, self=self): stat_info = stat(path) self._register_kevent(path, stat.S_ISDIR(stat_info.st_mode)) return stat_info
def _glob(cls, pth, comp_pattern, topdown, onlydirs, onlyfiles, positions, on_path, stat): """ The recursive function used by glob. This version treats symbolic links as files. Broken symlinks won't be listed. pth is a dir in which we search. comp_pattern is the compiled pattern. It's a sequence which should consist of three kinds of elements: * None - matches any number of subdirectories, including 0. * a string - a normalized name, when exactly one name can be matched. * a regexp - for testing if normalized names match. positions is a sequence of positions on comp_pattern that children of path may match. On the first call, if will be [0]. on_path is a set of inode identifiers on path, or None if circles shouldn't be checked. stat is the appropriate stat function - cls.stat or cls.lstat. """ if len(positions) == 1 and isinstance(comp_pattern[positions[0]], str): # We don't have to listdir if exactly one file name can match. # Since we always stat the files, it's ok if the file doesn't exist. listdir = [comp_pattern[positions[0]]] else: listdir = os.listdir(str(pth)) listdir.sort() for subfile in listdir: newpth = pth + subfile # We don't strictly need to stat a file if we don't follow symlinks # AND positions == [len(comp_pattern)-1] AND # not isinstance(comp_pattern[-1], str), but do me a favour... try: st = stat(newpth) except OSError: continue newpositions = [] subfilenorm = cls.normcasestr(subfile) if topdown: # If not topdown, it happens after we handle subdirs if positions[-1] == len(comp_pattern) - 1: if cls._match_element(comp_pattern[-1], subfilenorm): if not ((onlydirs and not st.isdir) or (onlyfiles and not st.isfile)): yield newpth for pos in reversed(positions): if st.isdir: comp_element = comp_pattern[pos] if pos + 1 < len(comp_pattern): if cls._match_element(comp_element, subfilenorm): newpositions.append(pos + 1) if comp_pattern[pos + 1] is None: # We should stop at '..' break if comp_element is None: newpositions.append(pos) # We don't have to break - there are not supposed # to be any positions before '..'. if newpositions: newpositions.reverse() if on_path is not None: newpath_id = cls._id(st) if newpath_id in on_path: raise OSError, "Circular path encountered" on_path.add(newpath_id) for x in cls._glob(newpth, comp_pattern, topdown, onlydirs, onlyfiles, newpositions, on_path, stat): yield x if on_path is not None: on_path.remove(newpath_id) if not topdown: # If topdown, it happens after we handle subdirs if positions[-1] == len(comp_pattern) - 1: if cls._match_element(comp_pattern[-1], subfilenorm): if not ((onlydirs and not st.isdir) or (onlyfiles and not st.isfile)): yield newpth
def mtime(path): """time of most recent content modification""" return stat(path).st_mtime
octalPerm = int( octalPerm ) # converting permissions to int --> Example: octal 230 [ownership --w-wx---] octalP = octalPerm // 10 # Example: 23 = 230 // 10 Owner = octalP // 10 # Owner ex. 2 = 23 // 10 GroupP = octalP % 10 # GroupP ex. 3 = 23 % 10 Others = octalPerm % 10 # Others ex. 0 = 230 % 10 ts = stat_info.st_mtime time_m = stat_info.st_mtime if (ts < recent) or (ts > now): time_fmt = "%b %e %Y" else: time_fmt = "%b %e %R" time_str = time.strftime(time_fmt, time.gmtime(ts)) time_str2 = time.strftime(time_fmt, time.gmtime(time_m)) name = stat(file).st_uid # User id of the owner try: name = "%-3s" % os.getcwd(stat_info.st_uid)[0] except: name = "%-3s" % (stat_info.st_uid) try: group = "%-3s" % os.getegid(stat_info.st_gid)[0] except: group = "%-3s" % ( stat_info).st_gid # Group id of the owner nlink = "%4d" % stat_info.st_nlink total = len([ name for name in os.listdir('.') if os.path.isfile(file) ])
def ctime(path): """platform dependent; time of most recent metadata change on Unix, or the time of creation on Windows""" return stat(path).st_ctime
if match: self.backupDate = self._parse_backup_date(parts[1]) self.host = parts[2] else: self.host = parts[1] try: self.backupDate = self._parse_backup_date(parts[2]) except IndexError, err: if 'nxbackup' in self.backupType or \ 'nexpose' in self.backupType: self.backupDate = -1 else: print(self.backupType) raise (err) #print("Base: {0}, Name:{1}, Ext:{2}".format(base, name2, ext)) fileStats = stat(fullfilename) self.fileMode = oct(fileStats.st_mode) self.ownerUID = fileStats.st_uid self.ownerGID = fileStats.st_gid self.size = fileStats.st_size self.atime = fileStats.st_atime self.dt_atime = datetime.fromtimestamp(fileStats.st_atime) self.mtime = fileStats.st_mtime self.dt_mtime = datetime.fromtimestamp(fileStats.st_mtime) self.ctime = fileStats.st_ctime self.dt_ctime = datetime.fromtimestamp(fileStats.st_ctime) def test(): # create a bogus backup objects and then print all the properties ###