def is_file(self): """ Whether this path is a regular file (also True for symlinks pointing to regular files). """ try: return S_ISREG(self.stat().st_mode) except OSError as e: if not _ignore_error(e): raise # Path doesn't exist or is a broken symlink # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ ) return False except ValueError: # Non-encodable path return False
async def render_get(self, request): if request.opt.uri_path == ('.well-known', 'core'): return aiocoap.Message(payload=str( self.get_resources_as_linkheader()).encode('utf8'), content_format=40) path = self.request_to_localpath(request) try: st = path.stat() except FileNotFoundError: raise NoSuchFile() if S_ISDIR(st.st_mode): return await self.render_get_dir(request, path) elif S_ISREG(st.st_mode): return await self.render_get_file(request, path)
def is_file(self): """ Whether this path is a regular file (also True for symlinks pointing to regular files). """ try: return S_ISREG(self.stat().st_mode) except OSError as e: if e.errno not in _IGNORED_ERROS: raise # Path doesn't exist or is a broken symlink # (see https://bitbucket.org/pitrou/pathlib/issue/12/) return False except ValueError: # Non-encodable path return False
def manage_migrations_not_applied(self, filepath): files = glob.glob("{}/*.sql".format(filepath)) entries = ((os.stat(path), path) for path in files) entries = ((stat[ST_CTIME], path) for stat, path in entries if S_ISREG(stat[ST_MODE])) for cdate, file in entries: filename = file.split('/')[-1].replace('.sql', '') with open(file, 'r') as reader: # Execute it only with trusted code content = reader.read() if not self.check_if_already_applied(filename): self.apply_migrations(content, filename) else: # raise Exception("Migration already applied") continue
def datesorteddir(sortdir): # oldest first # returns list of tuples: (index, date, path) # get all entries in the directory w/ stats entries = (os.path.join(sortdir, fn) for fn in os.listdir(sortdir)) entries = ((os.stat(path), path) for path in entries) # leave only regular files, insert creation date entries = ((stat[ST_MTIME], path) for stat, path in entries if S_ISREG(stat[ST_MODE])) entrylist = [] i = 0 for cdate, path in sorted(entries): entrylist.append((i, cdate, path)) i += 1 return entrylist
def ls(self, dirc=None, file_eval=lambda a: True, dir_eval=lambda a: True): files = [] if dirc: dirs = self.sftp.listdir_attr(dirc) else: dirs = self.sftp.listdir_attr() for entry in dirs: if S_ISDIR(entry.st_mode) and dir_eval(entry.filename): files.append(entry.filename) elif S_ISREG(entry.st_mode) and file_eval(entry.filename): files.append(entry.filename) return files
def pulisci(self, max_files=30): # Ordina i file in ordine cronologico. all_backups = (os.path.join(self.cartella_backup, fn) for fn in os.listdir(self.cartella_backup)) all_backups = ((os.stat(path), path) for path in all_backups) all_backups = ((stat[ST_CTIME], path) for stat, path in all_backups if S_ISREG(stat[ST_MODE])) all_backups = [path for cdate, path in sorted(all_backups)] all_backups.reverse() # Elimina i backup piu' vecchi se (max_files) e' superato. c = 0 for b in all_backups: c += 1 if c > max_files: os.remove(b)
def isfile(self): """ Check if this file path refers to a regular file. :return: C{True} if this :py:class:`FilePath` points to a regular file (not a directory, socket, named pipe, etc), C{False} otherwise. :rtype: L{bool} """ st = self.statinfo if not st: self.restat(False) st = self.statinfo if not st: return False return S_ISREG(st.st_mode)
def curate_cache(directory, min_free_percent=5.0, min_free_disk=50): """Clear out the directory if needed This assumes all the files in the directory can be deleted as freely Args: directory (str): directory path that holds cached files min_free_percent (float): percentage (0.0-100.0) of drive to keep free, default is 5% if not specified. min_free_disk (float): minimum allowed disk space in MB, default value is 50 MB if not specified. """ # Simpleminded implementation -- keep a certain percentage of the # disk available. # TODO: Would be easy to add more options, like whitelisted files, etc. space = psutil.disk_usage(directory) # convert from MB to bytes min_free_disk *= 1024 * 1024 # space.percent = space.used/space.total*100.0 percent_free = 100.0 - space.percent if percent_free < min_free_percent and space.free < min_free_disk: LOG.info('Low diskspace detected, cleaning cache') # calculate how many bytes we need to delete bytes_needed = (min_free_percent - percent_free) / 100.0 * space.total bytes_needed = int(bytes_needed + 1.0) # get all entries in the directory w/ stats entries = (os.path.join(directory, fn) for fn in os.listdir(directory)) entries = ((os.stat(path), path) for path in entries) # leave only regular files, insert modification date entries = ((stat[ST_MTIME], stat[ST_SIZE], path) for stat, path in entries if S_ISREG(stat[ST_MODE])) # delete files with oldest modification date until space is freed space_freed = 0 for moddate, fsize, path in sorted(entries): try: os.remove(path) space_freed += fsize except: pass if space_freed > bytes_needed: return # deleted enough!
def authorized_keys_permissions_ok(self) -> bool: """ Indicates whether ~/.ssh/authorized_keys exists, is owned by the user, and is only writable by the user. """ # pylint: disable=R0911 if not self.ssh_dir_permissions_ok: return False auth_keys = self.home + "/.ssh/authorized_keys" if not exists(auth_keys): log.debug("User %s does not have ~/.ssh/authorized_keys: %s", self.name, auth_keys) return False try: auth_keys_stat = stat(auth_keys) except OSError as e: log.error("Unable to stat %s: %s", auth_keys, e) return False if auth_keys_stat.st_uid != self.uid: log.warning( "User %s does not own ~/.ssh/authorized_keys file %s: user " "uid %d, owner uid %d", self.name, auth_keys, self.uid, auth_keys_stat.st_uid) return False if not S_ISREG(auth_keys_stat.st_mode): log.warning("User %s ~/.ssh/authorized_keys file %s is not a file", self.name, auth_keys) return False mode_bits = S_IMODE(auth_keys_stat.st_mode) if mode_bits & 0o020: log.warning( "User %s ~/.ssh/authorized_keys file %s is group-writable", self.name, auth_keys) return False if mode_bits & 0o002: log.warning( "User %s ~/.ssh/authorized_keys file %s is other-writable", self.name, auth_keys) return False return True
def get_previous_filename(filepath, mode='number'): complete_path = os.path.abspath(filepath) directory, file_str = os.path.split(complete_path) filename, file_type_str = file_str.split('.') if mode == 'number': file_number_str = FileNameIterator._get_ending_number(filename) try: file_number = int(file_number_str) except ValueError: return None file_base_str = filename[:-len(file_number_str)] format_str = '0' + str(len(file_number_str)) + 'd' number_str = ("{0:" + format_str + '}').format(file_number - 1) new_file_name = file_base_str + number_str + '.' + file_type_str new_complete_path = os.path.join(directory, new_file_name) if os.path.exists(new_complete_path): return new_complete_path format_str = '0' + str(len(file_number_str) - 1) + 'd' number_str = ("{0:" + format_str + '}').format(file_number - 1) new_file_name = file_base_str + number_str + '.' + file_type_str new_complete_path = os.path.join(directory, new_file_name) if os.path.exists(new_complete_path): return new_complete_path return None elif mode == 'time': files_list = os.listdir(directory) files = [] for file in files_list: if file.endswith(file_type_str): files.append(file) paths = (os.path.join(directory, file) for file in files) entries = ((os.stat(path), path) for path in paths) entries = list( sorted(((stat[ST_CTIME], path) for stat, path in entries if S_ISREG(stat[ST_MODE])))) for ind, entry in enumerate(entries): if entry[1] == complete_path and ind is not 0: return entries[ind - 1][1] return None
async def backup(self, ctx, server_name, action): if action == "create": author = ctx.author channel = ctx.channel timeout_embed = discord.Embed( title='Sorry your request has timed out', color=0xFF0000) name_query = await ctx.send(embed=discord.Embed( title='What would you like to name the backup?', description="Please do not use slashes")) def check(m): return m.channel == channel and m.author == author try: msg = await self.client.wait_for('message', check=check, timeout=60.0) backup_name = msg.content await msg.delete() except asyncio.TimeoutError: await name_query.edit(embed=timeout_embed) return else: embed = await ctx.send(embed=discord.Embed( title=f'Creating Backup `{backup_name}`', color=0xADD8E6)) shutil.make_archive( f'{config["servers"][server_name]["backup_directory"]}/{backup_name}', 'zip', config["servers"][server_name]["world_directory"]) embed.delete() await ctx.send(embed=discord.Embed( title=f'Backup `{backup_name}` created')) elif action == "list": dir_path = config["servers"][server_name]["backup_directory"] result = [] data = (os.path.join(dir_path, fn) for fn in os.listdir(dir_path)) data = ((os.stat(path), path) for path in data) data = ((stat[ST_CTIME], path) for stat, path in data if S_ISREG(stat[ST_MODE])) for cdate, path in sorted(data): backups_list = (time.ctime(cdate), os.path.basename(path)) result.append(backups_list * 1) result_final = ("\n".join([" ".join(x) for x in result])) await ctx.send(f'```python\n{result_final}```')
def make_view(session, vim, item): """Create and return a View() instance that displays `item`.""" args = (session, vim, item) if item is None: # TODO Use the same view always return MessageView(*args, message='(nothing to show)') stat_res, stat_error = stat_path(item, lstat=False) if stat_error is not None: return MessageView( *args, message=str(stat_error), hl_group='NvfmError') mode = stat_res.st_mode if S_ISDIR(mode): return DirectoryView(*args) # TODO Check the stat() of the link if S_ISREG(mode): return FileView(*args) return MessageView(*args, message='(%s)' % filetype_str(mode))
def cached_find_resource(self, node, filename): try: nd = node.ctx.cache_nd except: nd = node.ctx.cache_nd = {} tup = (node, filename) try: return nd[tup] except KeyError: ret = node.find_resource(filename) try: if not S_ISREG(os.stat(ret.abspath())[ST_MODE]): ret = None except: ret = None nd[tup] = ret return ret
def do_GET(self): self.protocol_version = 'HTTP/1.1' if self.path == '/': self.path = '/index.html' path = abspath(Cfg.base_path + '/' + self.path) if not path.startswith(Cfg.base_path): return self.error_404() extension = self.path.split('.')[-1] content_type = Cfg.content_types.get(extension, '') if not content_type: return self.error_404() st = stat(path) if not S_ISREG(st.st_mode): return self.error_404() content_length = st.st_size last_modified = formatdate(st.st_mtime) if extension == 'html': last_modified = formatdate() host, port = self.client_address hash_items = [ path, str(host), str(port), last_modified] hash_text = ':'.join(hash_items) hash_data = hash_text.encode('utf-8') sid = hashlib.sha512(hash_data).hexdigest() SessionManager.set(sid, {'status':'NEW'}) with open(path, 'r') as f: content = f.read() content = re.sub(r'\$SID\b', sid, content) content = content.encode('utf-8') content_length = len(content) self.send_response(200, 'OK') self.send_header('Content-Type', content_type) self.send_header('Content-Length', content_length) self.send_header('Last-Modified', last_modified) self.end_headers() if extension == 'html': self.wfile.write(content) else: with open(path, 'rb') as content: copyfileobj(content, self.wfile)
def last_image(): dir_path = os.path.join(os.getcwd(), 'images') # all entries in the directory w/ stats data = (os.path.join(dir_path, fn) for fn in os.listdir(dir_path)) data = ((os.stat(path), path) for path in data) # regular files, insert creation date data = ((stat[ST_CTIME], path) for stat, path in data if S_ISREG(stat[ST_MODE])) last_created_image = sorted(data, reverse=True)[0] if not last_created_image: return "Image not found", 400 sleep(1) return send_file(os.path.join(dir_path, last_created_image[1]), mimetype='image/jpeg')
def connect(device=DEFAULT_DEVICE): device = os.getenv("BLEMU_DEVICE", device) if not os.path.exists(device): raise ValueError("Device `%s` does not exist. Cannot connect" % device) mode = os.stat(device).st_mode ser = None if S_ISCHR(mode): ser = serial.Serial(port=device, baudrate=BAUD_RATE) elif S_ISFIFO(mode) or S_ISREG(mode): ser = open(device, "w") # So! Apparently when you connect to the arduino serial port, the # bootloader kicks in, resets the arduino and waits a second for a new # program to be loaded before running the actual already stored code time.sleep(2) return ser
def __init__(self, fname): """ Function doc """ if not os.path.exists(fname): raise RuntimeError('Cannot find image "%s"' % fname) mode = os.stat(fname).st_mode if not S_ISREG(mode): raise RuntimeError('"%s" is not a regular file' % fname) """ not file -zb $imgfile | grep "DOS/MBR boot sector" >> /dev/null and echo "ERROR: Image file type not recognized" and return # is a real image file? (maybe support more types here) """ self._fname = fname
def check_and_process(pathname, verbose=False): mode = os.stat(pathname)[ST_MODE] if S_ISREG(mode) and pathname.lower().endswith('.ann'): # It's a file, call the callback function if verbose: click.echo('Info: Processing {0}.'.format(pathname)) bratFilePath = pathname t = pathname.split('.') conllFilePath = str(t[0]) + '.conll' if not os.path.exists(conllFilePath): click.echo("Error: CoNLL file doesn't exist") file_process(bratFilePath, conllFilePath, verbose)
async def is_cached_locally(self): try: stat = await async_stat(self.local_path) assert S_ISREG(stat.st_mode), \ "Was told to load Stud.IP file from irregular local file %s (%s)" % (self.local_path, stat) if self.total_length: assert self.total_length == stat.st_size, \ "Was told to load Stud.IP file with size %s from local file %s with size %s" % \ (self.total_length, self.local_path, stat.st_size) if self.last_modified: st_mtime = datetime.fromtimestamp(stat.st_mtime) assert self.last_modified == st_mtime, \ "Was told to load Stud.IP file with last change %s from local file %s with last change %s" % \ (self.last_modified, self.local_path, st_mtime) return True except FileNotFoundError: return False
def list_files_in_SSH_bucket(uri, search_prefix=None): try: import paramiko except ImportError: LOGGER.warn( 'paramiko missing, opening SSH/SCP/SFTP paths will be disabled. ' '`pip install paramiko` to suppress') raise parsed_uri = ssh_transport.parse_uri(uri) uri_path = parsed_uri.pop('uri_path') transport_params = { 'connect_kwargs': { 'allow_agent': False, 'look_for_keys': False } } ssh = ssh_transport._connect(parsed_uri['host'], parsed_uri['user'], parsed_uri['port'], parsed_uri['password'], transport_params=transport_params) sftp_client = ssh.get_transport().open_sftp_client() entries = [] max_results = 10000 from stat import S_ISREG import fnmatch for entry in sftp_client.listdir_attr(uri_path): if search_prefix is None or fnmatch.fnmatch(entry.filename, search_prefix): mode = entry.st_mode if S_ISREG(mode): entries.append({ 'Key': entry.filename, 'LastModified': datetime.fromtimestamp(entry.st_mtime, timezone.utc) }) if len(entries) > max_results: raise ValueError( f"Read more than {max_results} records from the path {uri_path}. Use a more specific " f"search_prefix") LOGGER.info("Found {} files.".format(entries)) return entries
def get_stats(self): """ Calculate directory size and number of days since last update. """ dirs_queue = queue.Queue() dirs_queue.put(self.path) while not dirs_queue.empty(): try: path = dirs_queue.get() entities = os.listdir(path) except OSError as os_error: self._log_skipped(os_error) continue for entity in entities: fullpath = os.path.join(path, entity) try: mode = os.stat(fullpath).st_mode if S_ISDIR(mode): self.subdirs += 1 dirs_queue.put(fullpath) elif S_ISREG(mode): self.files += 1 self.size += os.stat(fullpath).st_blocks * BLOCK self.allocated += os.stat(fullpath).st_size last_modified = os.stat(fullpath).st_mtime if last_modified > self.modified: self.modified = last_modified except OSError as os_error: self._log_skipped(os_error) continue if not self.skipped and not self.modified: self.modified = os.stat(self.path).st_mtime
def remove_history_images_uploaded(): dirpath = config.UPLOAD_FOLDER # get all entries in the directory w/ stats entries = (os.path.join(dirpath, fn) for fn in os.listdir(dirpath)) entries = ((os.stat(path), path) for path in entries) # delete all image files entries = (path for stat, path in entries if S_ISREG(stat[ST_MODE])) for path in entries: print 'remove tmp file: ', path os.remove(path) # handle dirs entries = (os.path.join(dirpath, fn) for fn in os.listdir(dirpath)) entries = ((os.stat(path), path) for path in entries) # leave only dirs, insert creation date entries = ((stat[ST_CTIME], path) for stat, path in entries if S_ISDIR(stat[ST_MODE])) #NOTE: on Windows `ST_CTIME` is a creation date # but on Unix it could be something else #NOTE: use `ST_MTIME` to sort by a modification date oldest_datetime = datetime.datetime.now() - datetime.timedelta( days=MAX_IMAGE_STORE_DAYS) for cdate, path in sorted(entries): try: print path created_datetime = datetime.datetime.strptime( os.path.basename(path), '%Y%m%d%H') if created_datetime < oldest_datetime: print 'rm too old image dir: ', path shutil.rmtree(path) else: #if we are short of disk space, delete the folder still stat = os.statvfs(config.UPLOAD_FOLDER) avail_size = stat.f_bsize * stat.f_bavail if avail_size < MIN_AVAIL_DISK_SIZE: print 'rm tmp image dir due to lack of disk space: ', path shutil.rmtree(path) except ValueError, verror: print verror # delete dirs not created by app shutil.rmtree(path) except Exception, ex: print ex
def walk_files(self, directory): from stat import S_ISLNK, S_ISDIR, S_ISREG self._sftp_connect() for entry in self._sftp.listdir_attr(directory): path = os.path.join(directory, entry.filename) if S_ISLNK(entry.st_mode): path = self._sftp.readlink(directory) entry = self._sftp.stat(path) if S_ISDIR(entry.st_mode): for inner_path in self.walk_files(path): yield inner_path elif S_ISREG(entry.st_mode): yield path
def check_access_rights(top): for f in os.listdir(top): pathname = os.path.join(top, f) mode = os.stat(pathname).st_mode if S_ISDIR(mode): # directory, recurse into it check_access_rights(pathname) elif S_ISREG(mode): # file, check permissions permissions = oct(os.stat(pathname)[ST_MODE]) if PY3: self.assertEqual("0o100775", permissions) else: self.assertEqual("0100775", permissions) else: # unknown file type pass
def get_file_type(self, root, filename): filepath = fpath.join(root, filename) filemode = stat(filepath).st_mode if S_ISREG(filemode): return "f" elif S_ISSOCK(filemode): return "s" elif S_ISFIFO(filemode): return "p" elif S_ISLNK(filemode): return "l" elif S_ISDIR(filemode): return "d" elif S_ISCHR(filemode): return "c" elif S_ISBLK(filemode): return "b" return "?"
def sync_d(self, ftp_path, locale_path): for fname in os.listdir(locale_path): Fpath = os.path.join(locale_path, fname) Rpath = os.path.join(ftp_path, fname) attr = os.stat(Fpath) self.data_local[Rpath] = attr.st_mtime if S_ISDIR(attr.st_mode): if fname not in self.filters: self.sync_d(Rpath, Fpath) elif S_ISREG(attr.st_mode): if Rpath in self.data: if attr.st_mtime > self.data[Rpath]: print("Uploading .... : " + Rpath) self.sftp.put(Fpath, Rpath, preserve_mtime=False) else: print("Uploading .... : " + Rpath) self.sftp.put(Fpath, Rpath, preserve_mtime=False)
def add_files(self, basePath, path, app): if self.debug: print(("add files " + path)) for f in os.listdir(path): pathname = os.path.join(path, f) mode = os.stat(pathname).st_mode if S_ISREG(mode): filename = os.path.join(os.path.relpath(path, basePath), f) if self.debug: print(("add " + pathname)) print(("name " + filename)) fileBuffer = open(pathname, 'rb').read() appFile = app.file.add() appFile.name = filename appFile.encoding = CLEARTEXT appFile.blob = fileBuffer elif S_ISDIR(mode): self.add_files(basePath, pathname, app)
def walktree(top, callback): '''recursively descend the directory tree rooted at top, calling the callback function for each regular file''' for f in os.listdir(top): pathname = os.path.join(top, f) mode = os.stat(pathname).st_mode print(os.stat(pathname)) print(os.path.getsize(pathname)) if S_ISDIR(mode): # It's a directory, recurse into it walktree(pathname, callback) elif S_ISREG(mode): # It's a file, call the callback function callback(pathname) else: # Unknown file type, print a message print('Skipping %s' % pathname)
def rename_files_by_date(dirpath, animation_name): # get all entries in the directory w/ stats entries = (os.path.join(dirpath, fn) for fn in os.listdir(dirpath)) entries = ((os.stat(path), path) for path in entries) # leave only regular files, insert creation date entries = ((stat[ST_CTIME], path) for stat, path in entries if (S_ISREG(stat[ST_MODE]))) # and OUTPUT_BASENAME+"_" in os.path.basename(path)) #NOTE: on Windows `ST_CTIME` is a creation date # but on Unix it could be something else #NOTE: use `ST_MTIME` to sort by a modification date num = 0 for cdate, path in sorted(entries): print time.ctime(cdate), os.path.basename(path) os.rename(path, os.path.dirname(path) + "/" + animation_name + "_%04d.png" % num) num += 1