def find_files(self): from scitools.misc import find def check(filepath, arg): ext = os.path.splitext(filepath)[1] import fnmatch # Unix shell-style wildcard matching for s in self.file_patterns: if fnmatch.fnmatch(ext, s): arg.append(filepath) files = [] find(check, os.curdir, files) return files
treat_file = 0 filesuffix = string.split(filename, '.')[-1] for s in suffices: if fnmatch.fnmatch(filesuffix, s): treat_file = 1 last_modification = os.path.getmtime(filename) age = time() - last_modification age = age/(60*60*24) # convert seconds to days if treat_file and age <= age_limit: shutil.copy(filename, copydir) last_m = strftime("%a %b %d %H:%M:%S %Y", localtime(last_modification)) print "\ncopying %s\n (last modified %s) to %s" % \ (filename, last_m, copydir) for dir in roots: find(copy, dir, suffices=suffices, age_limit=3) # check if there are files in copydir files = os.listdir(copydir) nfiles = len(files) if nfiles == 0: print "\n\nno files were copied" shutil.rmtree(copydir) else: print "\n\n", len(files), "were copied"
mode = myfile_stat[stat.ST_MODE] if stat.S_ISREG(mode): print "%(myfile)s is a regular file with %(filesize)d bytes " \ "and last accessed %(last_access)s" % vars() from scitools.misc import find size_limit = 200 def checksize2(filepath, bigfiles): size = os.path.getsize(filepath) if size > size_limit: bigfiles.append('%.2fMb %s' % (size/1000000.0,filepath)) bigfiles = [] root = os.path.join(os.environ['scripting'],'src', 'py', 'regex') find(checksize2, root, bigfiles) for fileinfo in bigfiles: print fileinfo import fnmatch def checksize3(filepath, arg): treat_file = False ext = os.path.splitext(filepath)[1] import fnmatch # Unix shell-style wildcard matching for s in arg['extensions']: if fnmatch.fnmatch(ext, s): treat_file = True size = os.path.getsize(filepath) if treat_file and size > arg['size_limit']: size = '%.2fMb' % (size/1000000.0) # pretty print arg['filelist'].append({'size': size, 'name': filepath})