def byDateFunc(self, a, b): try: stat1 = os_stat(self.current_directory + a[0][0]) stat2 = os_stat(self.current_directory + b[0][0]) except: return 0 return cmp(b[0][1], a[0][1]) or cmp(stat2.st_ctime, stat1.st_ctime)
def test_can_finalize(blockchain_path, base_filename, optimized_file_path): fss = PathOptimizedFileSystemStorage(base_path=blockchain_path) fss.save(base_filename, b'\x08Test') assert os_stat(optimized_file_path).st_mode & (stat.S_IWGRP | stat.S_IWUSR | stat.S_IWOTH) != 0 fss.finalize(base_filename) assert os_stat(optimized_file_path).st_mode & (stat.S_IWGRP | stat.S_IWUSR | stat.S_IWOTH) == 0
def Info(self, dirsource): filename = dirsource.getFilename() sourceDir = dirsource.getCurrentDirectory() #self.SOURCELIST.getCurrentDirectory() mytest = dirsource.canDescent() if dirsource.canDescent(): if dirsource.getSelectionIndex()!=0: if (not sourceDir) and (not filename): return pname else: sourceDir = filename if os_path_isdir(sourceDir): mode = os.stat(sourceDir).st_mode else: return ("") mode = oct(mode) curSelDir = sourceDir dir_stats = os_stat(curSelDir) dir_infos = " " + _("Size") + str(self.Humanizer(dir_stats.st_size))+" " dir_infos = dir_infos + _("Date") + " " + time_strftime("%d.%m.%Y - %H:%M:%S",time_localtime(dir_stats.st_mtime))+" " dir_infos = dir_infos + _("Mode") + " " + str(mode[-3:]) return (dir_infos) else: return ("") else: longname = sourceDir + filename if fileExists(longname): mode = os.stat(longname).st_mode else: return ("") mode = oct(mode) file_stats = os_stat(longname) file_infos = filename + " " + _("Size") + " " + str(self.Humanizer(file_stats.st_size))+" " file_infos = file_infos + _("Date") + " " + time_strftime("%d.%m.%Y - %H:%M:%S",time_localtime(file_stats.st_mtime))+" " file_infos = file_infos + _("Mode") + " " + str(mode[-3:]) return (file_infos)
def __initCacheForPath(self, path): #self.log.debug("__initCacheForPath: %s", path) try: st = os_stat(path) fileStat = FileStat(st) self.__statCache[path] = fileStat #self.log.debug("__initCacheForPath: add path to cache: %s %s", fileStat, path) except OSError: # Path does not exist #self.log.debug("__initCacheForPath: Path does not exist: %s", path) return if fileStat.isDir(): # Initialise the contents paths = os_listdir(path) self.__dirCache[path] = paths # Need to get a stat of all files in this dir sep = os_sep for fullpath in [path + sep + x for x in paths]: if self.__recursive: # Call the function recursively then self.__initCacheForPath(fullpath) else: try: st = os_stat(fullpath) fullpathFileStat = FileStat(st) self.__statCache[fullpath] = fullpathFileStat #self.log.debug("__initCacheForPath: add subpath to cache: %s %s", fullpathFileStat, fullpath) except OSError: pass # Path does not exist
def __initCacheForPath(self, path): #self.log.debug("__initCacheForPath: %s", path) try: st = os_stat(path) fileStat = FileStat(st) self.__statCache[path] = fileStat #self.log.debug("__initCacheForPath: add path to cache: %s %s", fileStat, path) except OSError: # Path does not exist #self.log.debug("__initCacheForPath: Path does not exist: %s", path) return if fileStat.isDir(): # Initialise the contents paths = os_listdir(path) self.__dirCache[path] = paths # Need to get a stat of all files in this dir sep = os_sep for fullpath in [ path + sep + x for x in paths ]: if self.__recursive: # Call the function recursively then self.__initCacheForPath(fullpath) else: try: st = os_stat(fullpath) fullpathFileStat = FileStat(st) self.__statCache[fullpath] = fullpathFileStat #self.log.debug("__initCacheForPath: add subpath to cache: %s %s", fullpathFileStat, fullpath) except OSError: pass # Path does not exist
def displayItemInfo(self): filename = self.SOURCELIST.getFilename() if not filename: return if os_path_isdir(filename): curFile = os_stat(filename) if filename != '/': filename = filename.rstrip('/') fileinfo = ("%s " % self.dirSize(filename) if config.plugins.filebrowser.dir_size.value else "") + self.fileTime(curFile.st_mtime) else: curFile = os_stat(self.SOURCELIST.getCurrentDirectory() + filename) fileinfo = "%s (%s) %s" % (self.humanizer(curFile.st_size),'{:,.0f}'.format(curFile.st_size), self.fileTime(curFile.st_mtime)) self.session.open(FilebrowserScreenInfo, (filename, fileinfo))
def copytree(src, dst, symlinks=False): names = listdir(src) if os.path.isdir(dst): dst = os.path.join(dst, os.path.basename(src)) if not os.path.isdir(dst): mkdir(dst) else: makedirs(dst) for name in names: srcname = os.path.join(src, name) dstname = os.path.join(dst, name) try: if symlinks and os.path.islink(srcname): linkto = readlink(srcname) symlink(linkto, dstname) elif os.path.isdir(srcname): copytree(srcname, dstname, symlinks) else: copyfile(srcname, dstname) except: print "dont copy srcname (no file or link or folder)" try: st = os_stat(src) mode = S_IMODE(st.st_mode) if have_chmod: chmod(dst, mode) if have_utime: utime(dst, (st.st_atime, st.st_mtime)) except: print "copy stats for", src, "failed!"
def _get_cumulative_size(self): if self.size == 0: return 0 cum = 0 realpath = os.path.realpath for dirpath, _, filenames in os.walk(self.path, onerror=lambda _: None): for fname in filenames: try: if dirpath == self.path: stat = os_stat(realpath(dirpath + "/" + fname)) else: stat = os_stat(dirpath + "/" + fname) except OSError: continue cum += stat.st_size return cum
def copytree(src, dst, symlinks=False): names = listdir(src) if os_path.isdir(dst): dst = os_path.join(dst, os_path.basename(src)) if not os_path.isdir(dst): mkdir(dst) else: makedirs(dst) for name in names: srcname = os_path.join(src, name) dstname = os_path.join(dst, name) try: if symlinks and os_path.islink(srcname): linkto = readlink(srcname) symlink(linkto, dstname) elif os_path.isdir(srcname): copytree(srcname, dstname, symlinks) else: copyfile(srcname, dstname) except: print "dont copy srcname (no file or link or folder)" try: st = os_stat(src) mode = S_IMODE(st.st_mode) if have_chmod: chmod(dst, mode) if have_utime: utime(dst, (st.st_atime, st.st_mtime)) except: print "copy stats for", src, "failed!"
def last_read_file_time(path): try: last_read_time = int(os_stat(path).st_atime) except OSError: pass else: return last_read_time
def test_should_generate_commands(work_dir, last_pipeline_step, expected_export_pipeline): """ Test pipeline is exported as a bash script containing a call to all needed commands in the right order """ exported_pipeline_path = join(work_dir, 'exported_pipeline.sh') arguments = [ '--dvc', last_pipeline_step, '--output', exported_pipeline_path, '-w', '/work_dir' ] MlExportPipeline().run(*arguments) with open(expected_export_pipeline, 'r') as fd: expected_res = [ line.strip().strip('\n') for line in fd.readlines() if line.strip().strip('\n') ] assert exists(exported_pipeline_path) with open(exported_pipeline_path, 'r') as fd: result = [ line.strip().strip('\n') for line in fd.readlines() if line.strip().strip('\n') ] assert expected_res == result assert stat.S_IMODE(os_stat(exported_pipeline_path).st_mode) == 0o755
def sitemap(request): expires_timestamp = None modified_timestamp = None section = request.GET.get('section') # no section means the index page page = request.GET.get('p', 1) if 'debug' in request.GET and settings.SITEMAP_DEBUG_AVAILABLE: content = build_sitemap(section, page) else: path = get_sitemap_path(section, page) try: content = storage.open( path) # HttpResponse closes files after consuming modified_timestamp = os_stat(path).st_mtime except FileNotFoundError as err: sitemap_log.exception( 'Sitemap for section %s, page %s, not found', section, page, exc_info=err, ) raise Http404 expires_timestamp = modified_timestamp + (60 * 60 * 24) response = HttpResponse(content, content_type='application/xml') if expires_timestamp: # check the expiry date wouldn't be in the past if expires_timestamp > time.time(): response['Expires'] = http_date(expires_timestamp) else: # otherwise, just return a Cache-Control header of an hour patch_cache_control(response, max_age=60 * 60) if modified_timestamp: response['Last-Modified'] = http_date(modified_timestamp) return response
def test_resource_success(): # also check the directories creation dn = random_filename() assert not os_path.isdir(target_filename(dn)) fn = random_filename() dfn = os_path.join(dn, fn) fake_request(3.0) pmnc.transaction.file_1.write(dfn, b"\x00") with open(target_filename(dfn), "rb") as f: assert f.read() == b"\x00" # check file permissions file_mode = os_stat(target_filename(dfn)).st_mode if platform == "win32": assert file_mode & S_IRUSR != 0 assert file_mode & S_IWUSR == 0 else: assert file_mode & (S_IRWXU | S_IRWXG | S_IRWXO) == \ S_IRUSR | S_IROTH | S_IWOTH | S_IXOTH chmod(target_filename(dfn), file_mode | S_IWUSR) remove(target_filename(dfn)) assert os_path.isdir(target_filename(dn)) rmdir(target_filename(dn))
def copytree(src, dst, symlinks=False): names = listdir(src) if pathIsdir(dst): dst = pathJoin(dst, pathBasename(src)) if not pathIsdir(dst): mkdir(dst) else: makedirs(dst) for name in names: srcname = pathJoin(src, name) dstname = pathJoin(dst, name) try: if symlinks and pathIslink(srcname): linkto = readlink(srcname) symlink(linkto, dstname) elif pathIsdir(srcname): copytree(srcname, dstname, symlinks) else: copyfile(srcname, dstname) except (IOError, OSError) as err: print("[Directories] Error %d: Copying tree '%s' to '%s'! (%s)" % (err.errno, srcname, dstname, err.strerror)) try: st = os_stat(src) try: chmod(dst, S_IMODE(st.st_mode)) except (IOError, OSError) as err: print("[Directories] Error %d: Setting modes from '%s' to '%s'! (%s)" % (err.errno, src, dst, err.strerror)) try: utime(dst, (st.st_atime, st.st_mtime)) except (IOError, OSError) as err: print("[Directories] Error %d: Setting times from '%s' to '%s'! (%s)" % (err.errno, src, dst, err.strerror)) except (IOError, OSError) as err: print("[Directories] Error %d: Obtaining stats from '%s' to '%s'! (%s)" % (err.errno, src, dst, err.strerror))
def get_fifo(): candidates = glob('/tmp/uzbl_fifo_*') for file in candidates: if S_ISFIFO(os_stat(file).st_mode): return file else: return None
def copyfile(src, dst): f1 = None f2 = None status = 0 try: f1 = open(src, "rb") if pathIsdir(dst): dst = pathJoin(dst, pathBasename(src)) f2 = open(dst, "w+b") while True: buf = f1.read(16 * 1024) if not buf: break f2.write(buf) except (IOError, OSError) as err: print("[Directories] Error %d: Copying file '%s' to '%s'! (%s)" % (err.errno, src, dst, err.strerror)) status = -1 if f1 is not None: f1.close() if f2 is not None: f2.close() try: st = os_stat(src) try: chmod(dst, S_IMODE(st.st_mode)) except (IOError, OSError) as err: print("[Directories] Error %d: Setting modes from '%s' to '%s'! (%s)" % (err.errno, src, dst, err.strerror)) try: utime(dst, (st.st_atime, st.st_mtime)) except (IOError, OSError) as err: print("[Directories] Error %d: Setting times from '%s' to '%s'! (%s)" % (err.errno, src, dst, err.strerror)) except (IOError, OSError) as err: print("[Directories] Error %d: Obtaining stats from '%s' to '%s'! (%s)" % (err.errno, src, dst, err.strerror)) return status
def test_should_generate_python_script_no_conf(work_dir): """ Convert a Jupyter Notebook to a Python 3 script using all parameters - create right function name - keep the docstring - keep code cells - remove no effect cells - remove trailing cells - call the function with right arguments - python syntax is ok - the script is executable """ kept_cells, dropped_cells, docstring, notebook_path = generate_test_notebook( work_dir=work_dir, notebook_name='test_nb.ipynb') output_path = join(work_dir, 'out.py') cmd_arguments = ['-n', notebook_path, '-o', output_path, '-w', work_dir] IPynbToPython().run(*cmd_arguments) assert exists(output_path) with open(output_path, 'r') as fd: file_content = fd.read() check_content(docstring, kept_cells, dropped_cells, file_content) assert 'def mlvtools_test_nb(subset, rate):' in file_content assert 'mlvtools_test_nb(args.subset, args.rate)' in file_content # Ensure generated file syntax is right compile(file_content, output_path, 'exec') # Ensure script has exe right assert stat.S_IMODE(os_stat(output_path).st_mode) == 0o755
def execCmd(self, cmd): """ This method executes a command within the namespace of the project. The cmd is placed in a bash script which is executed within the env This will adopt the platform associated with this application. e.g. The following will execute a 'make' command within the given project dir app = GaudiExec('some/path') app.execCmd('make') Args: cmd (str): This is the command(s) which are to be executed within the project environment and directory """ if not self.directory: raise GangaException("Cannot run a command using GaudiExec without a directory first being set!") if not path.isdir(self.directory): raise GangaException("The given directory: '%s' doesn't exist!" % self.directory) cmd_file = tempfile.NamedTemporaryFile(suffix='.sh', delete=False) if not cmd.startswith('./run '): cmd = './run ' + cmd cmd_file.write("#!/bin/bash") cmd_file.write("\n") cmd_file.write(self.getEnvScript()) cmd_file.write(cmd) cmd_file.flush() cmd_file.close() st = os_stat(cmd_file.name) chmod(cmd_file.name, st.st_mode | stat.S_IEXEC) logger.debug("Running: %s" % cmd_file.name) # I would have preferred to execute all commands against inside `./run` so we have some sane behaviour # but this requires a build to have been run before we can use this command reliably... so we're just going to be explicit if not path.isfile(path.join(self.directory, 'build.%s' %self.platform, 'run')): rc, stdout, stderr = _exec_cmd('make', self.directory) if rc != 0: logger.error("Failed to perform initial make on a Cmake based project") logger.error("This is required so that the './run' target exists and is callable within the project") logger.error("StdErr: %s" % str(stderr)) raise GangaException("Failed to execute command") if cmd != 'make': rc, stdout, stderr = _exec_cmd(cmd_file.name, self.directory) else: rc, stdout, stderr = _exec_cmd(cmd_file.name, self.directory) if rc != 0: logger.error("Failed to execute command: %s" % cmd_file.name) logger.error("Tried to execute command in: %s" % self.directory) logger.error("StdErr: %s" % str(stderr)) raise GangaException("Failed to Execute command") unlink(cmd_file.name) return rc, stdout, stderr
def do_run_script(self, answer): answer = answer and answer[1] if answer == "YES": self.session.open(Console, cmdlist = [ self.commando[0] ]) elif answer == "VIEW": yfile=os_stat(self.commando[0]) if (yfile.st_size < 61440): self.session.open(vEditor, self.commando[0])
def showInfo(self): if self["filelist"].canDescent(): if self["filelist"].getSelectionIndex()!=0: curSelDir = self["filelist"].getSelection()[0] dir_stats = os_stat(curSelDir) dir_infos = "size "+str(self.formatSize(dir_stats.st_size))+" " dir_infos = dir_infos+"last-mod "+time_strftime("%d.%m.%Y %H:%M:%S",time_localtime(dir_stats.st_mtime))+" " dir_infos = dir_infos+"mode "+str(dir_stats.st_mode) self.setTitle(_(dir_infos)) else: try: ret = "" out_line = os_popen("uptime").readline() ret = ret + "at" + out_line + "\n" out_lines = [] out_lines = os_popen("cat /proc/meminfo").readlines() for lidx in range(len(out_lines)-1): tstLine = out_lines[lidx].split() if "MemTotal:" in tstLine: ret = ret + out_lines[lidx] elif "MemFree:" in tstLine: ret = ret + out_lines[lidx] + "\n" out_lines = [] out_lines = os_popen("cat /proc/stat").readlines() for lidx in range(len(out_lines)-1): tstLine = out_lines[lidx].split() if "procs_running" in tstLine: ret = ret + _("Running processes: ") + tstLine[1] except: ret = "N/A" msg = self.session.open(MessageBox, _("Dreambox model: " + self.boxtype + "\n\n" + ret), MessageBox.TYPE_INFO, windowTitle=_("Dream-Explorer")) else: curSelFile = self["filelist"].getCurrentDirectory() + self["filelist"].getFilename() file_stats = os_stat(curSelFile) file_infos = "size "+str(self.formatSize(file_stats.st_size))+" " file_infos = file_infos+"last-mod "+time_strftime("%d.%m.%Y %H:%M:%S",time_localtime(file_stats.st_mtime))+" " file_infos = file_infos+"mode "+str(file_stats.st_mode) self.setTitle(_(file_infos)) if curSelFile.endswith(".ts"): serviceref = eServiceReference("1:0:0:0:0:0:0:0:0:0:" + curSelFile) serviceHandler = eServiceCenter.getInstance() info = serviceHandler.info(serviceref) evt = info.getEvent(serviceref) if evt: self.session.open(EventViewSimple, evt, ServiceReference(serviceref))
def _retry_remove_file(fn, t): if os_path.isfile(fn): try: chmod(fn, os_stat(fn).st_mode | S_IWUSR) # in case a temporary file has been remove(fn) # saved read-only and is being removed except: sleep(t) # this second attempt compensates for remove(fn) # sporadic errors in a loaded filesystem
def _get_cumulative_size(self): if self.size == 0: return 0 cum = 0 realpath = os.path.realpath for dirpath, dirnames, filenames in os.walk(self.path, onerror=lambda _: None): for file in filenames: try: if dirpath == self.path: stat = os_stat(realpath(dirpath + "/" + file)) else: stat = os_stat(dirpath + "/" + file) cum += stat.st_size except: pass return cum
def from_file(cls, filename, version=SAPCAR_VERSION_201, archive_filename=None): """Populates the file format object from an actual file on the local file system. :param filename: filename to build the file format object from :type filename: string :param version: version of the file to construct :type version: string :param archive_filename: filename to use inside the archive file :type archive_filename: string :raise ValueError: if the version requested is invalid """ # Read the file properties and its content stat = os_stat(filename) with open(filename, "rb") as fd: data = fd.read() # Compress the file content and build the compressed string try: (_, out_length, out_buffer) = compress(data, ALG_LZH) except CompressError: return None out_buffer = pack("<I", out_length) + out_buffer # Check the version and grab the file format class if version not in sapcar_archive_file_versions: raise ValueError("Invalid version") ff = sapcar_archive_file_versions[version] # If an archive filename was not provided, use the actual filename if archive_filename is None: archive_filename = filename # Build the object and fill the fields archive_file = cls() archive_file._file_format = ff() archive_file._file_format.perm_mode = stat.st_mode archive_file._file_format.timestamp = stat.st_atime archive_file._file_format.file_length = stat.st_size archive_file._file_format.filename = archive_filename archive_file._file_format.filename_length = len(archive_filename) if archive_file._file_format.version == SAPCAR_VERSION_201: archive_file._file_format.filename_length += 1 # Put the compressed blob inside a last block and add it to the object block = SAPCARCompressedBlockFormat() block.type = SAPCAR_BLOCK_TYPE_COMPRESSED_LAST block.compressed = SAPCARCompressedBlobFormat(out_buffer) block.checksum = cls.calculate_checksum(data) archive_file._file_format.blocks.append(block) return archive_file
def _check_file_size(file, size): print() print(file) print('-- SIZE') s = os_stat(file).st_size print('computed: ', s) print('stored: ', size) print('--') return s == size
def __init__(self, path): self._path = abspath(realpath(path)) self._dirs_stat_snapshot = {} self._stat_snapshot = {} for root, directories, files in walk(self._path): for file_name in files: try: file_path = path_join(root, file_name) self._stat_snapshot[file_path] = os_stat(file_path) except OSError: continue for directory_name in directories: try: directory_path = path_join(root, directory_name) self._stat_snapshot[directory_path] = os_stat(directory_path) except OSError: continue
async def Response(scope, receive, send): await send({ "type": "http.response.start", "status": 200, "headers": [(k.lower().encode("latin-1"), v.encode("latin-1")) for k, v in file_headers(os_stat(file_path)).items()] }) await send({"type": "http.response.body", "body": byte_content})
def check_file_size(filename: str, size: int, logger: Logger = getLogger(__name__)) -> bool: logger.debug('file: {fname}'.format(fname=filename)) logger.debug('-- SIZE') s = os_stat(filename).st_size logger.debug('computed: {size}'.format(size=s)) logger.debug('stored: {size}'.format(size=size)) logger.debug('--') return s == size
def from_file(cls, filename, version=SAPCAR_VERSION_201, archive_filename=None): """Populates the file format object from an actual file on the local file system. :param filename: filename to build the file format object from :type filename: string :param version: version of the file to construct :type version: string :param archive_filename: filename to use inside the archive file :type archive_filename: string :raise ValueError: if the version requested is invalid """ # Read the file properties and its content stat = os_stat(filename) with open(filename, "rb") as fd: data = fd.read() # Compress the file content and build the compressed string try: (_, out_length, out_buffer) = compress(data, ALG_LZH) except CompressError: return None out_buffer = pack("<I", out_length) + out_buffer # Check the version and grab the file format class if version not in sapcar_archive_file_versions: raise ValueError("Invalid version") ff = sapcar_archive_file_versions[version] # If an archive filename was not provided, use the actual filename if archive_filename is None: archive_filename = filename # Build the object and fill the fields archive_file = cls() archive_file._file_format = ff() archive_file._file_format.perm_mode = stat.st_mode archive_file._file_format.timestamp = stat.st_atime archive_file._file_format.file_length = stat.st_size archive_file._file_format.filename = archive_filename archive_file._file_format.filename_length = len(archive_filename) if archive_file._file_format.version == SAPCAR_VERSION_201: archive_file._file_format.filename_length += 1 # Put the compressed blob inside a end of data block and add it to the object block = SAPCARCompressedBlockFormat() block.type = SAPCAR_BLOCK_TYPE_COMPRESSED_LAST block.compressed = SAPCARCompressedBlobFormat(out_buffer) block.checksum = cls.calculate_checksum(data) archive_file._file_format.blocks.append(block) return archive_file
def updateDestination(self): file_infos = "" if os.path.exists(config.misc.epgcache_filename.value): from os import stat as os_stat try: file_stats = os_stat(config.misc.epgcache_filename.value) file_infos = _("Size: ") + str(self.Humanizer(file_stats.st_size)) + " " except: file_infos = " " epgcachelocationlabel = _("Current EPG cachefile:") + " " + config.misc.epgcache_filename.value + "\n" + file_infos self["epgcachelocation"].setText(epgcachelocationlabel)
def test_should_gen_dvc_command_using_command_line(work_dir): """ Test gen_dvc using command line """ script_path = join(CURRENT_DIR, 'data', 'script.py') out_dvc_path = join(work_dir, 'out_dvc') check_call( ['gen_dvc', '-i', script_path, '-o', out_dvc_path, '-w', work_dir]) assert exists(out_dvc_path) assert stat.S_IMODE(os_stat(out_dvc_path).st_mode) == 0o755
def onFileAction(self, dirsource, dirtarget): self.SOURCELIST = dirsource self.TARGETLIST = dirtarget filename = dirsource.getFilename() self.SOURCELIST = dirsource self.TARGETLIST = dirtarget sourceDir = dirsource.getCurrentDirectory() if not sourceDir.endswith("/"): sourceDir = sourceDir + "/" testFileName = filename.lower() filetype = testFileName.split('.') filetype = "." + filetype[-1] longname = sourceDir + filename print "[Filebrowser]: " + filename, sourceDir, testFileName if testFileName.endswith(".ipk"): self.session.openWithCallback(self.onFileActionCB, ipkMenuScreen, self.SOURCELIST, self.TARGETLIST) elif testFileName.endswith(".ts"): fileRef = eServiceReference("1:0:0:0:0:0:0:0:0:0:" + longname) self.session.open(MoviePlayer, fileRef) elif testFileName.endswith(tuple(MOVIE_EXTENSIONS)): fileRef = eServiceReference("4097:0:0:0:0:0:0:0:0:0:" + longname) self.session.open(MoviePlayer, fileRef) elif testFileName.endswith(tuple(DVD_EXTENSIONS)): if DVDPlayerAvailable: self.session.open(DVD.DVDPlayer, dvd_filelist=[longname]) elif testFileName.endswith(tuple(AUDIO_EXTENSIONS)): self.play_music(self.SOURCELIST) elif (testFileName.endswith(".rar")) or (re.search('\.r\d+$', filetype)): self.session.openWithCallback(self.onFileActionCB, RarMenuScreen, self.SOURCELIST, self.TARGETLIST) elif (testFileName.endswith(".gz")) or (testFileName.endswith(".tar")): self.session.openWithCallback(self.onFileActionCB, TarMenuScreen, self.SOURCELIST, self.TARGETLIST) elif (testFileName.endswith(".zip")): self.session.openWithCallback(self.onFileActionCB, UnzipMenuScreen, self.SOURCELIST, self.TARGETLIST) elif testFileName.endswith(tuple(IMAGE_EXTENSIONS)): if self.SOURCELIST.getSelectionIndex()!=0: self.session.openWithCallback(self.cbShowPicture, ImageViewer, self.SOURCELIST.getFileList(), self.SOURCELIST.getSelectionIndex(), self.SOURCELIST.getCurrentDirectory(), filename) elif testFileName.endswith(".sh"): self.run_script(self.SOURCELIST) elif testFileName.endswith(".txt") or testFileName.endswith(".log") or testFileName.endswith(".py") or testFileName.endswith(".xml") or testFileName.endswith(".html") or testFileName.endswith(".meta") or testFileName.endswith(".bak") or testFileName.endswith(".lst") or testFileName.endswith(".cfg"): xfile=os_stat(longname) # if (xfile.st_size < 61440): if (xfile.st_size < 1000000): self.session.open(vEditor, longname) self.onFileActionCB(True) else: try: x = openFile(self.session,guess_type(self.SOURCELIST.getFilename())[0],self.SOURCELIST.getCurrentDirectory()+self.SOURCELIST.getFilename()) except TypeError,e: self.session.open(MessageBox,_("no Viewer installed for this mimetype!"), type = MessageBox.TYPE_ERROR, timeout = 5, close_on_any_key = True)
def SysExecution(self, answer): answer = answer and answer[1] if answer == 'ExecA': self.session.open(Console, cmdlist=[self.commando[0]]) elif answer == 'ExecB': self.session.open(Console, cmdlist=self.commando) elif answer == 'Chmod': self.session.open(Console, cmdlist=self.chmodexec) elif answer == 'ExecC': yfile = os_stat(self.commando[0]) if yfile.st_size < 61440: self.session.open(TextExit, self.commando[0])
def test_should_write_template(work_dir, valid_template_path): """ Test write an executable file from a given template and data """ output_path = join(work_dir, 'my_exe.sh') write_template(output_path, valid_template_path, given_data='test') assert exists(output_path) assert stat.S_IMODE(os_stat(output_path).st_mode) == 0o755 with open(output_path, 'r') as fd: assert fd.read() == 'a_value=test'
def getMovieList(self): self.movielist.reload(root=self.root, filter_tags=self.tagfilter) list = [] tag = self.cmd['tag'] tag = tag and tag.lower() for (serviceref, info, begin, unknown) in self.movielist.list: if serviceref.flags & eServiceReference.mustDescent: # Skip subdirectories (TODO: Browse?) continue rtime = info.getInfo(serviceref, iServiceInformation.sTimeCreate) if rtime > 0: t = FuzzyTime(rtime, inPast=True) begin_string = t[0] + ", " + t[1] else: begin_string = "undefined" if config.plugins.Webinterface.loadmovielength.value: len = info.getLength(serviceref) if len > 0: len = "%d:%02d" % (len / 60, len % 60) else: len = "?:??" else: len = "disabled" sourceERef = info.getInfoString(serviceref, iServiceInformation.sServiceref) sourceRef = ServiceReference(sourceERef) event = info.getEvent(serviceref) ext = event and event.getExtendedDescription() or "" filename = "/" + "/".join(serviceref.toString().split("/")[1:]) servicename = ServiceReference(serviceref).getServiceName().replace('\xc2\x86', '').replace('\xc2\x87', '') if not tag or tag in info.getInfoString(serviceref, iServiceInformation.sTags).lower(): """ add movie only to list, if a given tag is applied to the movie """ list.append(( serviceref.toString(), servicename, info.getInfoString(serviceref, iServiceInformation.sDescription), rtime, begin_string, len, sourceRef.getServiceName(), info.getInfoString(serviceref, iServiceInformation.sTags), ext, filename, os_stat(filename)[6] )) return list
def _checkPathForChanges(oldStatCache, statCache, oldDirCache, dirCache, checked_locations, path, recursive, depth=0, linked_paths=None): #print "_checkPathForChanges: %s" % (path) # We use realpath to ensure we don't do a second stat or listdir for the same # path. changes = {} modified = False oldFileStat = None try: try: lstat = os_lstat(path) except Exception, e: log.debug("polling _checkPathForChanges unable to lstat [%r] - %s", path, e) raise #if S_ISLNK(lstat.st_mode): if (lstat.st_mode & 0170000) == S_IFLNK: # Taken from stat module realpath = os_path_realpath(path) # linked_paths is used to ensure we don't go in a cyclic loop for symlinks if not linked_paths: linked_paths = {} if path != realpath: # It's a link #log.debug("Link: %s to %s", path, realpath) all_links_here = linked_paths.get(realpath, []) # We do not support cyclical paths, check if any have been found for linkpath in all_links_here: if len(path) >= len(linkpath): largerpath = path smallerpath = linkpath else: largerpath = linkpath smallerpath = path if largerpath.find(smallerpath) == 0: #print "Found cyclical path: %s to %s" % (path, realpath) #log.info("Found cyclical symlink path: %s to %s", path, realpath) return {} # It's okay, but we add this link to the known symlink list all_links_here.append(path) linked_paths[realpath] = all_links_here st = os_stat(path) else: realpath = path st = lstat
def viewable_file(self): filename = self.SOURCELIST.getFilename() sourceDir = self.SOURCELIST.getCurrentDirectory() if (filename is None) or (sourceDir is None): return None longname = sourceDir + filename try: xfile = os_stat(longname) if (xfile.st_size < 1000000): return longname except: pass return None
def test_should_export_pipeline_command_using_command_line(work_dir): """ Test export_pipeline using command line """ dvc_target = join(CURRENT_DIR, 'data', 'mlvtools_step5_sort_data.dvc') pipeline_out_path = join(work_dir, 'pipeline.sh') check_call([ 'export_pipeline', '--dvc', dvc_target, '-o', pipeline_out_path, '-w', work_dir ]) assert exists(pipeline_out_path) assert stat.S_IMODE(os_stat(pipeline_out_path).st_mode) == 0o755
def file_viewer(self): filename = self.SOURCELIST.getFilename() sourceDir = self.SOURCELIST.getCurrentDirectory() if (filename == None) or (sourceDir == None): return longname = sourceDir + filename try: xfile=os_stat(longname) if (xfile.st_size < 1000000): self.session.open(vEditor, longname) self.onFileActionCB(True) except: return
def get_fifo(self): """ Look for UZBL's FIFO-file in /tmp. Don't give up until it has been found. """ candidates = glob("/tmp/uzbl_fifo_*") for file in candidates: if S_ISFIFO(os_stat(file).st_mode): self.mon.log(self, "Found UZBL fifo in %s." % file) self.fifo = file self.start_play_signal = True return # print 'not found trying again' self.widget.after(500, self.get_fifo)
def SysExecution(self, answer): global PicPlayerAviable answer = answer and answer[1] if answer == "ExecA": self.session.open(Console, cmdlist = [ self.commando[0] ]) elif answer == "ExecB": self.session.open(Console, cmdlist = self.commando) elif answer == "Chmod": self.session.open(Console, cmdlist = self.chmodexec) elif answer == "ExecC": yfile=os_stat(self.commando[0]) if (yfile.st_size < 61440): self.session.open(TextExit, self.commando[0])
def getMovieList(self): self.movielist.reload(root=self.root, filter_tags=self.tagfilter) lst = [] append = lst.append loadLength = config.plugins.Webinterface.loadmovielength.value for (serviceref, info, begin, unknown) in self.movielist.list: if serviceref.flags & eServiceReference.mustDescent: # Skip subdirectories (TODO: Browse?) continue rtime = info.getInfo(serviceref, iServiceInformation.sTimeCreate) if rtime > 0: t = FuzzyTime(rtime, inPast=True) begin_string = t[0] + ", " + t[1] else: begin_string = _("undefined") if loadLength: Len = info.getLength(serviceref) if Len > 0: Len = "%d:%02d" % (Len / 60, Len % 60) else: Len = "?:??" else: Len = _("disabled") sourceERef = info.getInfoString(serviceref, iServiceInformation.sServiceref) sourceRef = ServiceReference(sourceERef) event = info.getEvent(serviceref) ext = event and event.getExtendedDescription() or "" filename = "/" + "/".join(serviceref.toString().split("/")[1:]) servicename = ServiceReference(serviceref).getServiceName().replace('\xc2\x86', '').replace('\xc2\x87', '') append(( serviceref.toString(), servicename, info.getInfoString(serviceref, iServiceInformation.sDescription), rtime, begin_string, Len, sourceRef.getServiceName(), info.getInfoString(serviceref, iServiceInformation.sTags), ext, filename, os_stat(filename)[6] )) return lst
def last_read_file_time(path, retries=3, retry_errno=DEFAULT_RETRY_ERRNO): try: last_read_time = int(os_stat(path).st_atime) except OSError as error: if error.errno is errno.ENOENT: return None elif error.errno in retry_errno: # Try again, or not! retries -= 1 if retries: return last_read_file_time(path, retries=retries, retry_errno=retry_errno) # Something goes wrong raise else: return last_read_time
def _checkPathForChanges( oldStatCache, statCache, oldDirCache, dirCache, checked_locations, path, recursive, depth=0, linked_paths=None ): # print "_checkPathForChanges: %s" % (path) # We use realpath to ensure we don't do a second stat or listdir for the same # path. changes = {} modified = False oldFileStat = None try: try: lstat = os_lstat(path) except Exception, e: log.debug("polling _checkPathForChanges unable to lstat [%r] - %s", path, e) raise # if S_ISLNK(lstat.st_mode): if (lstat.st_mode & 0170000) == S_IFLNK: # Taken from stat module realpath = os_path_realpath(path) # linked_paths is used to ensure we don't go in a cyclic loop for symlinks if not linked_paths: linked_paths = {} if path != realpath: # It's a link # log.debug("Link: %s to %s", path, realpath) all_links_here = linked_paths.get(realpath, []) # We do not support cyclical paths, check if any have been found for linkpath in all_links_here: if len(path) >= len(linkpath): largerpath = path smallerpath = linkpath else: largerpath = linkpath smallerpath = path if largerpath.find(smallerpath) == 0: # print "Found cyclical path: %s to %s" % (path, realpath) # log.info("Found cyclical symlink path: %s to %s", path, realpath) return {} # It's okay, but we add this link to the known symlink list all_links_here.append(path) linked_paths[realpath] = all_links_here st = os_stat(path) else: realpath = path st = lstat
def get_fifo(): """ Look for UZBL's FIFO-file in /tmp. Don't give up until it has been found. """ found_fifo = False fifo = None logging.debug('Looking for UZBL fifo...') while not found_fifo: candidates = glob('/tmp/uzbl_fifo_*') for file in candidates: if S_ISFIFO(os_stat(file).st_mode): found_fifo = True fifo = file sleep(0.5) logging.debug('Found UZBL fifo in %s.' % file) return fifo
def execCmd(self, cmd): """ This method executes a command within the namespace of the project. The cmd is placed in a bash script which is executed within the env This will adopt the platform associated with this application. Any explicit calls to be run within the project env have to be prepended with './run '. This is not added automatically e.g. The following will execute a 'make' command within the given project dir app = GaudiExec('some/path') app.execCmd('make') Args: cmd (str): This is the command(s) which are to be executed within the project environment and directory """ cmd_file = tempfile.NamedTemporaryFile(suffix='.sh', delete=False) cmd_file.write("#!/bin/bash") cmd_file.write("\n") cmd_file.write(self.getEnvScript()) cmd_file.write(cmd) cmd_file.flush() cmd_file.close() st = os_stat(cmd_file.name) chmod(cmd_file.name, st.st_mode | stat.S_IEXEC) logger.debug("Running: %s" % cmd_file.name) # I would have preferred to execute all commands against inside `./run` so we have some sane behaviour # but this requires a build to have been run before we can use this command reliably... so we're just going to be explicit rc, stdout, stderr = _exec_cmd(cmd_file.name, self.directory) if rc != 0: logger.error("Failed to execute command: %s" % cmd_file.name) logger.error("Tried to execute command in: %s" % self.directory) logger.error("StdErr: %s" % str(stderr)) raise GangaException("Failed to Execute command") unlink(cmd_file.name) return rc, stdout, stderr
def _create_container(client, image_name, environment, volume_dir): """Create a docker container and customize its setup.""" # copy start-crawl wrapper to the volume temporary directory wrapper_cont_path = os.path.join(volume_dir, WRAPPER_FILENAME) copyfile(WRAPPER_LOCAL_PATH, wrapper_cont_path) wrapper_perms = os_stat(wrapper_cont_path).st_mode | stat.S_IEXEC os.chmod(wrapper_cont_path, wrapper_perms) # must be executable fifo_path = os.path.join(volume_dir, 'scrapinghub.fifo') environment['SHUB_FIFO_PATH'] = fifo_path # keep using default /scrapinghub volume but mount it as a temporary # directory in the host /tmp/ to have access to the files in needed binds = {volume_dir: {'bind': SCRAPINGHUB_VOLUME, 'mode': 'rw'}} host_config = client.create_host_config(binds=binds) return client.create_container( image=image_name, command=[WRAPPER_IMAGE_PATH], environment=environment, volumes=[volume_dir], host_config=host_config, )
def copyfile(src, dst): try: f1 = open(src, "rb") if os_path.isdir(dst): dst = os_path.join(dst, os_path.basename(src)) f2 = open(dst, "w+b") while True: buf = f1.read(16*1024) if not buf: break f2.write(buf) st = os_stat(src) mode = S_IMODE(st.st_mode) if have_chmod: chmod(dst, mode) if have_utime: utime(dst, (st.st_atime, st.st_mtime)) except: print "copy", src, "to", dst, "failed!" return -1 return 0
def get_fifo(): """ Look for UZBL's FIFO-file in /tmp. Don't give up until it has been found. """ found_fifo = False fifo = None logging.debug('Looking for UZBL fifo...') while not found_fifo: # Sort the files with the newest file first, # to avoid an old fifo being found first. candidates = glob('/tmp/uzbl_fifo_*') sorted_candidates = sorted(candidates, key=path.getctime, reverse=True) for file in sorted_candidates: if S_ISFIFO(os_stat(file).st_mode): found_fifo = True fifo = file sleep(0.5) logging.debug('Found UZBL fifo in %s.' % file) return fifo
def copyfile(src, dst): try: f1 = open(src, 'rb') if os.path.isdir(dst): dst = os.path.join(dst, os.path.basename(src)) f2 = open(dst, 'w+b') while True: buf = f1.read(16384) if not buf: break f2.write(buf) st = os_stat(src) mode = S_IMODE(st.st_mode) if have_chmod: chmod(dst, mode) if have_utime: utime(dst, (st.st_atime, st.st_mtime)) except: print 'copy', src, 'to', dst, 'failed!' return -1 return 0