def readPluginList(self, directory): """enumerates plugins""" categories = os_listdir(directory) new_plugins = [] for c in categories: directory_category = directory + c if not os_path.isdir(directory_category): continue open(directory_category + "/__init__.py", "a").close() for pluginname in os_listdir(directory_category): path = directory_category + "/" + pluginname if os_path.isdir(path): if ( fileExists(path + "/plugin.pyc") or fileExists(path + "/plugin.pyo") or fileExists(path + "/plugin.py") ): try: plugin = my_import(".".join(["Plugins", c, pluginname, "plugin"])) if not plugin.__dict__.has_key("Plugins"): print "Plugin %s doesn't have 'Plugin'-call." % (pluginname) continue plugins = plugin.Plugins(path=path) except Exception, exc: print "Plugin ", c + "/" + pluginname, "failed to load:", exc print_exc(file=stdout) print "skipping plugin." self.warnings.append((c + "/" + pluginname, str(exc))) continue # allow single entry not to be a list if not isinstance(plugins, list): plugins = [plugins] for p in plugins: p.updateIcon(path) new_plugins.append(p) if fileExists(path + "/keymap.xml"): try: keymapparser.readKeymap(path + "/keymap.xml") except Exception, exc: print "keymap for plugin %s/%s failed to load: " % (c, pluginname), exc self.warnings.append((c + "/" + pluginname, str(exc)))
def createMenu(self): list = [] self.menuList = [] self['menu'].setList(self.menuList) path = '/usr/share/enigma2/po/' self.path = path i = 0 for x in os_listdir(path): i = i + 1 filepath = path + x if os_path.exists(filepath): lan = self.getlan(x) secondkey = self.getsecondkey(x) flag = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, '/usr/share/enigma2/countries/' + secondkey + '.png')) self.menuList.append((lan, x.encode('utf-8'), flag, None)) self.menuList.sort() if len(self.removelist) > 0: self['ButtonBluetext'].show() else: self['ButtonBluetext'].hide() self['menu'].setList(self.menuList)
def prepareListForSelector(self, ret = 0): self.myList = [] files = [] for file in os_listdir(self.SubTreesPath): if file.endswith(".png"): if file == "MenuOScam.png": if pathExists( "%s/AlternativeSoftCamManager/plugin.pyo" % self.myExtensions) is False: files.append(file) else: files.append(file) files.sort() for file in files: print os_path.join(self.SubTreesPath, file) self.myList.append( ( file[:-4] , os_path.join(self.SubTreesPath, file), os_path.join(self.myItemsPath , file) ) ) #print self.myList if len(self.myList) >= 1: if config.plugins.GOSmanager.InitStyle.value == 'icons': from selector import SelectorWidget #wybor kanalu po nazwie self.session.openWithCallback(self.SelectorCallback, SelectorWidget, list = self.myList, CurIdx = self.curIndex, Mytitle = _("Select option","plugin-GOSmanager") ) else: from listselector import ListSelectorWidget #wybor kanalu po nazwie self.session.openWithCallback(self.SelectorCallback, ListSelectorWidget, list = self.myList, Mytitle = _("Select option","plugin-GOSmanager") ) else: self.close() return
def __initCacheForPath(self, path): #self.log.debug("__initCacheForPath: %s", path) try: st = os_stat(path) fileStat = FileStat(st) self.__statCache[path] = fileStat #self.log.debug("__initCacheForPath: add path to cache: %s %s", fileStat, path) except OSError: # Path does not exist #self.log.debug("__initCacheForPath: Path does not exist: %s", path) return if fileStat.isDir(): # Initialise the contents paths = os_listdir(path) self.__dirCache[path] = paths # Need to get a stat of all files in this dir sep = os_sep for fullpath in [ path + sep + x for x in paths ]: if self.__recursive: # Call the function recursively then self.__initCacheForPath(fullpath) else: try: st = os_stat(fullpath) fullpathFileStat = FileStat(st) self.__statCache[fullpath] = fullpathFileStat #self.log.debug("__initCacheForPath: add subpath to cache: %s %s", fullpathFileStat, fullpath) except OSError: pass # Path does not exist
def _build_mod_list(mod_path: list, suffix: str, blacklist: list) -> list: """ _build_mod_list(mod_path, suffix) -> Add all the paths in mod_path to sys.path and return a list of all modules in sys.path ending in suffix. """ from sys import path as sys_path mod_path = [mod_path] if type(mod_path) is str else mod_path blacklist = [blacklist] if type(blacklist) is str else blacklist # Add suffix to all names in blacklist. blacklist.extend(["%s%s" % (name, suffix) for name in blacklist if not name.endswith(suffix)]) # Add the path of this file to the search path. mod_path.append(os_abspath(os_dirname(__file__))) # Add the path(s) in mod_path to sys.path so we can import from # them. [sys_path.extend((path, os_dirname(path.rstrip("/")))) for path in mod_path if path not in sys_path] # Build the list of modules ending in suffix in the mod_path(s). mod_list = ( (path, name) for path in sys_path if os_isdir(path) for name in os_listdir(path) if name.endswith(suffix) and name not in blacklist ) return mod_list
def GetHostsCategories(myDir = PluginPath + '/hosts'): HostsCategories = [] for CH in os_listdir(myDir): if os_path.isdir(os_path.join(myDir, CH)): HostsCategories.append((CH,CH)) HostsCategories.sort() return HostsCategories
def setlist_to_avaiable(self): """ fills the list with all found new MVIs""" filelist =[] for i in os_listdir(config.plugins.logomanager.path.value): if i.endswith(".mvi"): filelist.append(config.plugins.logomanager.path.value+i) filelist.sort() self.reloadPictures(filelist)
def delete_saved_playlist(self): listpath = [] playlistdir = resolveFilename(SCOPE_PLAYLIST) try: for i in os_listdir(playlistdir): listpath.append((i,playlistdir + i)) except IOError,e: print "Error while scanning subdirs ",e
def remove_all_files(self): if not os_path.isdir(self._storage_directory): return for disk_filename in os_listdir(self._storage_directory): fullname = os_path.join(self._storage_directory, disk_filename) if os_path.isfile(fullname): log('FileStorage: Remove file: "' + disk_filename + '"; size: ' + str(os_path.getsize(fullname))) os_remove(fullname) if not os_path.isdir(self._temp_directory): return for disk_filename in os_listdir(self._temp_directory): fullname = os_path.join(self._temp_directory, disk_filename) if os_path.isfile(fullname): log('FileStorage: Remove temp file: "' + disk_filename + '"; size: ' + str(os_path.getsize(fullname))) os_remove(fullname)
def getlistofMP3s(dirname): """ Returns a list of MP3 file names """ l = [] for i in os_listdir(dirname): if str(i[-3:]).lower() == "mp3": l += [i] return l
def getInterfaceList(self): self.pluginlist = [] global plugin_path,myname interfacepath = plugin_path+"/interface" for iface in os_listdir(interfacepath): if iface.endswith(".py") and not iface.startswith("_"): pluginp = '.'.join(["Plugins", "Extensions", myname, "interface",iface.replace(".py","")]) plugin = my_import(pluginp) self.pluginlist.append(plugin.Interface(self.session,cbListLoaded=self.onStreamlistLoaded))
def _checkDirectoryForChanges( oldStatCache, statCache, oldDirCache, dirCache, checked_locations, realpath, directory, recursive, depth, linked_paths, dirModifiedSet=False, ): # print "_checkDirectoryForChanges: %s" % (directory) # print "dirModifiedSet: %r" % (dirModifiedSet) changes = {} modified = False old_paths = oldDirCache.get(realpath, []) # Contents info # Use the cached values (i.e. if we've already checked this path this run) current_paths = dirCache.get(realpath) if not current_paths: try: # Note: dircache does not notice new or deleted files! Use os.listdir # current_paths = dircache.listdir(directory) # The current file/dir paths current_paths = os_listdir(realpath) # The current file/dir paths dirCache[realpath] = current_paths except OSError: current_paths = [] # print "old_paths:", old_paths # print "current_paths:", current_paths # XXX - Filter current_paths... not for first draft old_set = set(old_paths) current_set = set(current_paths) # Ensure we have full paths for these sets sep = os_sep paths_to_check = [directory + sep + x for x in current_set.union(old_set)] for path in paths_to_check: dir_changes = _checkPathForChanges( oldStatCache, statCache, oldDirCache, dirCache, checked_locations, path, recursive, depth, linked_paths ) for changedpath, change in dir_changes.items(): changes[changedpath] = changes.get(changedpath, 0) | change # The directory itself or one/more files inside the directory have changed # dirModifiedSet when True means this was already set previously if changes and not dirModifiedSet: # Removed for now, we should still send this ## We don't send a modified only the directory was modified. # for change in changes.values(): # if (change - FS_DIR_MODIFIED) > 0: # changes[directory] = changes.get(directory, 0) | FS_DIR_MODIFIED # break changes[directory] = changes.get(directory, 0) | FS_DIR_MODIFIED return changes
def scanDir(path,ext='all') : files = [] fields = os_listdir(path) if ext != 'all' and type(ext) != list : ext = [ext] for item in fields : if os_path.isfile(path + '/' + item) and (ext == 'all' or item.split('.')[-1] in ext) : #print(' file %s'%item) files.append(path + '/' + item) elif os_path.isdir(path + '/' + item) : #print('folder %s/%s :'%(path,item)) files.extend(scanDir(path + '/' + item,ext)) return files
def getProc(myproc): for proc in os_listdir('/proc'): if proc == 'curproc': continue try: with open('/proc/{}/cmdline'.format(proc), mode='rb') as fd: content = fd.read().decode().split('\x00') if myproc in content[0]: return True except Exception: continue return False
def checkenv(self): if not os_path.exists(os_path.realpath("settings.json")): raise exceptions.InvalidSettings("Please rename the file settings-dist.json to settings.json and fill the required info") json_data = open(os_path.realpath("settings.json")) settings = json.load(json_data) json_data.close() # At the moment there aren't required key, let's leave this check for future use required_keys = ['app_key', 'app_secret', 'token', 'token_secret'] for required in required_keys: try: value = settings[required] if value == '': raise exceptions.InvalidSettings("Please fill the required info '" + required + "' before continuing") except KeyError: raise exceptions.InvalidSettings("Please fill the required info '" + required + "' before continuing") try: if not settings['data_dir']: settings['data_dir'] = os_path.realpath("data/") else: if not os_path.exists(settings['data_dir']): logging.getLogger('dumpscraper').warn("Path " + settings['data_dir'] + " does not exist, using the default 'data' one") settings['data_dir'] = os_path.realpath("data/") except KeyError: settings['data_dir'] = os_path.realpath("data") self.settings = settings # Migrates the old folder structure (raw/YYYY-MM-DD) to the new one (raw/YYYY/MM/YYYY-MM-DD) # Let's check if we actually have to migrate the data if os_path.exists(settings['data_dir'] + '/raw'): raw_dirs = os_listdir(settings['data_dir'] + '/raw') regex = re.compile('\d{4}-\d{2}-\d{2}') old_dirs = filter(regex.match, raw_dirs) if old_dirs: from shutil import move as sh_move dump_logger = logging.getLogger('dumpscraper') dump_logger.info('Old folder structure found, migrating') for old_dir in old_dirs: parts = old_dir.split('-') old_path = settings['data_dir'] + '/raw/' + old_dir new_path = settings['data_dir'] + '/raw/' + parts[0] + '/' + parts[1] + '/' + parts[2] sh_move(old_path, new_path) dump_logger.info('Migration successfully completed')
def runEarlyPlugins(self, directory): categories = os_listdir(directory) for c in categories: directory_category = directory + c if not os_path.isdir(directory_category): continue for pluginname in os_listdir(directory_category): path = directory_category + "/" + pluginname if os_path.isdir(path): if fileExists(path + "/earlyplugin.pyc") or fileExists(path + "/earlyplugin.pyo") or fileExists(path + "/earlyplugin.py"): try: plugin = my_import('.'.join(["Plugins", c, pluginname, "earlyplugin"])) if not plugin.__dict__.has_key("EarlyPlugins"): continue plugin.EarlyPlugins(path=path) except Exception, exc: print "EarlyPlugin ", c + "/" + pluginname, "failed to load:", exc print_exc(file=stdout) print "skipping early plugin." self.warnings.append( (c + "/" + pluginname, str(exc)) ) continue
def move_files_to_folder(*args, **kwargs): # Maximum backup allowed by user BACKUP_COUNT = bpy.context.user_preferences.filepaths.save_version # If saving backups option is 'ON' if BACKUP_COUNT: # Function level constants PATH = bpy.data.filepath # Full path FILE = bpy.path.display_name_from_filepath(PATH) # File name CWD = os_path_dirname(PATH) # Current Working Directory CBD = os_path_join(CWD, BACKUP_FOLDER_NAME) # Current Backup Directory REXT = r"{}\.blend(\d+)$".format(FILE) # Regex to catch backups EXT = "{}.blend{}" # Extension placeholder OLD = EXT.format(FILE, BACKUP_COUNT) # Oldest backup name # Create backup directory if not exists try: os_makedirs(CBD) except OSError as e: if e.errno != EEXIST: # If other error appears then "dir already exists" reraise # the caught error again and print out the traceback raise OSError("\n".join(traceback_extract_stack())) from None # Get all files in current directory, move them to the # backup folder, if they are backup files and maintain # the backup folder's instances for filename in reversed(sorted(os_listdir(CWD))): # If file is a backup file try: index = int(re_findall(REXT, filename)[-1]) # If file's index is greater than the # current number of backups allowed the full path # of the file will be returned and will be deleted # else os.remove will raise FileNotFoundError os_remove( increase_index_and_move( src_folder=CWD, dst_folder=CBD, file=FILE, extension=EXT, src_index=index, dst_index=index, max_index=BACKUP_COUNT, ) ) # If file is not a backup file except (IndexError, FileNotFoundError): pass # If everything went fine, print out information if PRINT_INFO: print(INFO_TEXT.format(CWD, CBD))
def scanDir(path, ext="all"): files = [] fields = os_listdir(path) if ext != "all" and type(ext) != list: ext = [ext] for item in fields: if os_path.isfile(path + "/" + item) and (ext == "all" or item.split(".")[-1] in ext): # print(' file %s'%item) files.append(path + "/" + item) elif os_path.isdir(path + "/" + item): print("folder %s/%s :" % (path, item)) files.extend(scanDir(path + "/" + item)) return files
def checkIconExtension(self, path): filename = None extension = None if os_path.exists(path): try: filename = os_listdir(path)[0] except: filename = None if filename is not None: try: extension = os_path.splitext(filename)[1].lower() except: pass return extension
def isCamRunning(cam): status = False pids = [pid for pid in os_listdir('/proc') if pid.isdigit()] for pid in pids: cmdFile = os_path.join('/proc', pid, 'cmdline') if os_path.exists(cmdFile): with open(cmdFile, "r") as f: fc = f.read() f.close() if fc.find('oscam') > 0: status = True break return status
def _check_retention(self): now = time_time() if not os_path.isdir(self._storage_directory): return for file in os_listdir(self._storage_directory): fullname = os_path.join(self._storage_directory, file) if os_path.isfile(fullname): modified_unixtime = get_file_modified_unixtime(fullname) if now - modified_unixtime > self._max_store_time_seconds: log('FileStorage: Remove outdated file: ' + fullname + '"; size: ' + str(os_path.getsize(fullname))) os_remove(fullname) if not os_path.isdir(self._temp_directory): return for file in os_listdir(self._temp_directory): fullname = os_path.join(self._temp_directory, file) if os_path.isfile(fullname): modified_unixtime = get_file_modified_unixtime(fullname) if now - modified_unixtime > 15 * 60: # every 15 minutes log('FileStorage: Remove outdated temp file: ' + fullname + '"; size: ' + str(os_path.getsize(fullname))) os_remove(fullname)
def searchMusic(self): slist = [] foundIndex = 0 index = 0 files = os_listdir(self["filelist"].getCurrentDirectory()) files.sort() for name in files: testname = name.lower() if testname.endswith(".mp3") or name.endswith(".m4a") or name.endswith(".ogg") or name.endswith(".flac"): slist.append((Item(text = name, filename = os_path.join(self["filelist"].getCurrentDirectory(),name)),)) if self["filelist"].getFilename() == name: foundIndex = index index = index + 1 return slist,foundIndex
def readKeyboardMapFiles(self): for keymapfile in os_listdir(eEnv.resolve("${datadir}/keymaps/")): if keymapfile.endswith(".info"): mapfile = None mapname = None for line in open(eEnv.resolve("${datadir}/keymaps/") + keymapfile): m = re_compile("^\s*(\w+)\s*=\s*(.*)\s*$").match(line) if m: key, val = m.groups() if key == "kmap": mapfile = val if key == "name": mapname = val if (mapfile is not None) and (mapname is not None): self.keyboardmaps.append((mapfile, mapname))
def _compact_directory(path): # Search for folders and recurse. for p in [f for f in os_listdir(path) if path_isdir(path_join(path, f))]: _compact_directory(_join(path, p)) # Search the development path for all src files. for dev_filename in iglob(_join(path, '*.%s' % src_type)): dev_filename = _posixpath(dev_filename) current_hash = hash_for_file(dev_filename) # Build a suitable output filename - hash.ext rel_filename = _join(rel_path, src_type, '%s.%s' % (current_hash, src_type)) if not path_exists(rel_filename): compactor_fn(dev_filename, rel_filename) # Update the list of compact files, so it can be reused when generating script tags. new_versions[dev_filename[len(dev_path):]] = rel_filename[len(rel_path):]
def enumerate_files(self): if not os_path.isdir(self._storage_directory): return [] files = [] for disk_filename in os_listdir(self._storage_directory): fullname = os_path.join(self._storage_directory, disk_filename) if os_path.isfile(fullname): url_filename = FileStorage._fname_disk_to_url(disk_filename) display_filename = \ FileStorage._fname_disk_to_display(disk_filename) files.append( { 'full_disk_filename': fullname, 'url_filename': url_filename, 'display_filename': display_filename, }) return files
def readKeyboardMapFiles(self): for keymapfile in os_listdir(eEnv.resolve('${datadir}/keymaps/')): if keymapfile.endswith(".info"): f = open(eEnv.resolve('${datadir}/keymaps/') + keymapfile) mapfile = None mapname = None for line in f: m = re_compile('^\s*(\w+)\s*=\s*(.*)\s*$').match(line) if m: key, val = m.groups() if key == 'kmap': mapfile = val if key == 'name': mapname = val if (mapfile is not None) and (mapname is not None): self.keyboardmaps.append(( mapfile,mapname)) f.close()
def readKeyboardMapFiles(self): for keymapfile in os_listdir('/usr/share/keymaps/'): if (keymapfile.endswith(".info")): f = open('/usr/share/keymaps/' + keymapfile) mapfile = None mapname = None for line in f: m = re_compile('^\s*(\w+)\s*=\s*(.*)\s*$').match(line) if m: key, val = m.groups() if key == 'kmap': mapfile = val if key == 'name': mapname = val if (mapfile is not None) and (mapname is not None): self.keyboardmaps.append(( mapfile,mapname)) f.close() if len(self.keyboardmaps) == 0: self.keyboardmaps = [('dream-de.kmap', 'Dreambox Keyboard Deutsch'), ('eng.kmap', 'Keyboard English')]
def getCoverPath(): blockList = ['hdd','cf','usb','sdcard'] dirList = os_listdir("/media") coverPaths = ['/usr/share/enigma2/cover/', '/data/cover/', '/media/cf/cover/', '/media/usb/cover/', '/media/sdcard/cover/', '/media/hdd/cover/'] if fileExists("/proc/mounts"): mountsFile = open("/proc/mounts" ,"r") for line in mountsFile: entry = line.split() if entry[2] in ["nfs", "nfs4", "smbfs", "cifs"]: if entry[1].startswith("/media/"): blockList.append(entry[1][7:]) mountsFile.close() for dir in dirList: if dir in blockList: print dir, blockList continue if os_path.ismount("/media/%s" %(dir)) or (os_path.islink("/media/%s" %(dir)) and os_path.ismount(os_path.realpath("/media/%s" %(dir)))): path = "/media/%s/cover/" % (dir) coverPaths.append(path) return coverPaths
def __getUserCmdsList(self): printDBG('UpdateMainAppImpl.__getScriptsList begin') cmdList = [] try: pathWithUserScripts = os_path.join(self.ExtensionPath, 'IPTVPlayer/iptvupdate/custom/') fileList = os_listdir( pathWithUserScripts ) for wholeFileName in fileList: # separate file name and file extension fileName, fileExt = os_path.splitext(wholeFileName) filePath = os_path.join(pathWithUserScripts, wholeFileName) if os_path.isfile(filePath): if fileExt in ['.pyo', '.pyc', '.py']: interpreterBinName = 'python' elif '.sh' == fileExt: interpreterBinName = 'sh' else: continue cmdList.append('%s "%s" "%s" "%s" 2>&1 ' % (interpreterBinName, filePath, os_path.join(self.ExtensionPath, 'IPTVPlayer/'), os_path.join(self.ExtensionTmpPath, 'IPTVPlayer/')) ) cmdList.sort() except: printExc() printDBG('UpdateMainAppImpl.__getScriptsList [%r]' % cmdList) return cmdList
def getMovieList(directory="/hdd/movie"): # Get Movielist movielist = [] if os_path.exists(directory): filelist = os_listdir(directory) for filename in filelist: filename = filename.decode("utf-8", "ignore").encode("utf-8") if os_path.isfile(directory + "/" + filename) and filename.endswith(('ts', 'vob', 'mpg', 'mpeg', 'avi', 'mkv', 'dat', 'iso', 'mp4', 'divx', 'mov', 'm2ts', 'm4v', 'f4v', 'flv')) and not filename.endswith(('cuts')): movie = {} movie['filename'] = filename movie['directory'] = directory movie['fullname'] = directory + "/" + filename movie['eventname'] = "" movie['description'] = "" movie['begintime'] = "" movie['serviceref'] = "" # Get Event Info from meta file if os_path.exists(directory + "/" + filename + ".meta"): readmetafile = open(directory + "/" + filename + ".meta", "r") movie['serviceref'] = readmetafile.readline()[0:-1] movie['eventname'] = readmetafile.readline()[0:-1] movie['description'] = readmetafile.readline()[0:-1] movie['begintime'] = strftime("%A, %d.%m.%Y %H:%M", (localtime(float(readmetafile.readline()[0:-1])))) readmetafile.close() movielist.append(movie) # Get Bookmarks bookmarklist = [] for bookmark in config.movielist.videodirs.value: bookmarklist.append(bookmark) return { "movies": movielist, "bookmarks": bookmarklist, "directory": directory }
if fileExists(getProcPath(encoder, "profile")): choice = ConfigSelection(default="baseline", choices=[("baseline", _("baseline")), ("simple", _("simple")), ("main", _("main")), ("high", _("high")), ("advanced simple", _("advancedsimple"))]) config.plugins.transcodingsetup.encoder[int( encoder)].profile = choice # check encoders encoders = [] encoderPath = "/proc/stb/encoder" numofencoders = os_listdir(encoderPath) numofencoders.sort() for encoder in numofencoders: encPath = os_path.join(encoderPath, encoder) if not os_path.isdir(encPath): continue if fileExists(os_path.join(encPath, "bitrate")): encoders.append(encoder) config.plugins.transcodingsetup.encoder.append(ConfigSubsection()) createTransCodingConfig(encoder) choices = [] if len(encoders) > 1: encoders.sort() for encoder in encoders: choices.append((encoder, encoder))
def read_files(path): if os_path.isdir(path): file_list = os_listdir(path) return file_list elif os_path.isfile(path): return "该路径下包含的是文件而非目录"
EXTENSION_MOD_DICT = \ { "sources": SOURCES, "extra_compile_args": EXTRA_COMPILE_ARGS, "extra_link_args": EXTRA_LINK_ARGS, "depends": BUILD_DEPENDS, "language": "c++", "define_macros": [("VERSION", versioneer.get_version()), ], } EXTENSION_MOD = Extension( "khmer._khmermodule", # pylint: disable=W0142 **EXTENSION_MOD_DICT) SCRIPTS = [] SCRIPTS.extend([ path_join("scripts", script) for script in os_listdir("scripts") if script.endswith(".py") ]) CLASSIFIERS = [ "Environment :: Console", "Environment :: MacOS X", "Intended Audience :: Science/Research", "License :: OSI Approved :: BSD License", "Natural Language :: English", "Operating System :: POSIX :: Linux", "Operating System :: MacOS :: MacOS X", "Programming Language :: C++", "Programming Language :: Python :: 2.7", "Topic :: Scientific/Engineering :: Bio-Informatics", ]
def main(): parser = argparse_ArgumentParser("Input parameters") parser.add_argument("--input_file_name", default="input_toy.yaml", help="Input parameters file name") parser.add_argument("--out_dir_name", default="/results", help="Output directory name") parser.add_argument("--train_test_files_dir", default="", help="Train test file path") parser.add_argument("--graph_files_dir", default="", help="Graph files' folder path") parser.add_argument("--seed_mode", help="Seed mode - specify 'cliques' for the cliques algo") parser.add_argument("--max_size_thres", help="Max size threshold") parser.add_argument("--n_pts", default=1, help="number of partitions (computers)") args = parser.parse_args() with open(args.input_file_name, 'r') as f: inputs = yaml_load(f, yaml_Loader) if args.seed_mode: inputs['seed_mode'] = args.seed_mode if args.max_size_thres: inputs['max_size_thres'] = int(args.max_size_thres) # Override output directory name if same as gen if args.out_dir_name or inputs['out_comp_nm'] == "/results/res": if not os_path.exists(inputs['dir_nm'] + args.out_dir_name): os_mkdir(inputs['dir_nm'] + args.out_dir_name) inputs['out_comp_nm'] = args.out_dir_name + "/res" inputs['train_test_files_dir'] = '' if args.train_test_files_dir: if not os_path.exists(inputs['dir_nm'] + args.train_test_files_dir): os_mkdir(inputs['dir_nm'] + args.train_test_files_dir) inputs['train_test_files_dir'] = args.train_test_files_dir inputs['graph_files_dir'] = '' if args.graph_files_dir: if not os_path.exists(inputs['dir_nm'] + args.graph_files_dir): os_mkdir(inputs['dir_nm'] + args.graph_files_dir) inputs['graph_files_dir'] = args.graph_files_dir with open(inputs['dir_nm'] + inputs['out_comp_nm'] + "_input_sample_partition.yaml", 'w') as outfile: yaml_dump(inputs, outfile, default_flow_style=False) logging_basicConfig(filename=inputs['dir_nm'] + inputs['out_comp_nm'] + "_logs.yaml", level=logging_INFO) neig_dicts_folder = inputs['dir_nm'] +inputs['graph_files_dir']+ "/neig_dicts" num_comp = inputs['num_comp'] max_size_thres = inputs['max_size_thres'] max_size_trainF = inputs['dir_nm'] + inputs['train_test_files_dir']+ "/res_max_size_train" with open(max_size_trainF, 'rb') as f: max_size_train = pickle_load(f) max_size = max_size_train max_sizeF_feat = inputs['dir_nm'] + inputs['train_test_files_dir']+ "/res_max_size_search" if os_path.exists(max_sizeF_feat): with open(max_sizeF_feat, 'rb') as f: max_size = pickle_load(f) else: with open(inputs['dir_nm'] + inputs['comf_nm']) as f: sizes = [len(line.rstrip().split()) for line in f.readlines()] max_size = max(sizes) q1 = np_percentile(sizes, 25) q3 = np_percentile(sizes, 75) max_wo_outliers = math_ceil(q3 + 4.5*(q3-q1)) # Maximum after removing outliers max_size = min(max_size,max_wo_outliers) if max_size >= max_size_thres: max_size = max_size_thres out_comp_nm = inputs['dir_nm'] + inputs['out_comp_nm'] with open(out_comp_nm + '_metrics.out', "a") as fid: print("Max number of steps for complex growth = ", max_size, file=fid) # NOT actual max size since you merge later max_sizeF = inputs['dir_nm'] + inputs['train_test_files_dir']+ "/res_max_size_search_par" with open(max_sizeF, 'wb') as f: pickle_dump(max_size, f) seed_mode = inputs['seed_mode'] if seed_mode == "all_nodes": #graph_nodes = list(myGraph.nodes()) seed_nodes = rand_perm(os_listdir(neig_dicts_folder)) elif seed_mode == "n_nodes": seed_nodes = rand_perm(os_listdir(neig_dicts_folder))[:num_comp] elif seed_mode == "all_nodes_known_comp": protlistfname = inputs['dir_nm']+ inputs['train_test_files_dir'] + "/res_protlist" with open(protlistfname, 'rb') as f: prot_list = pickle_load(f) seed_nodes = list(prot_list) elif seed_mode == "cliques": myGraphName = inputs['dir_nm'] + inputs['graph_files_dir']+ "/res_myGraph" with open(myGraphName, 'rb') as f: myGraph = pickle_load(f) clique_list = list(nx_find_cliques(myGraph)) to_rem = [] # Removing 2 node and big complexes for comp in clique_list: if len(comp) <= 2 or len(comp) >= max_size: to_rem.append(comp) for comp in to_rem: clique_list.remove(comp) seed_nodes = clique_list # Remove duplicates later. # partition ptns = int(args.n_pts) nc = len(seed_nodes) if seed_mode == 'n_nodes': seed_nodes_F = out_comp_nm + "_seed_nodes" each_ptn = nc // ptns for i in range(ptns - 1): with open(seed_nodes_F + str(i), 'wb') as f: pickle_dump(seed_nodes[i * each_ptn:(i + 1) * each_ptn], f) with open(seed_nodes_F + str(ptns - 1), 'wb') as f: pickle_dump(seed_nodes[(ptns - 1) * each_ptn:], f) else: seed_nodes_dir = inputs['dir_nm'] + inputs['graph_files_dir']+ "/" + seed_mode + "_n_pts_" + str(ptns) if not os_path.exists(seed_nodes_dir): os_mkdir(seed_nodes_dir) seed_nodes_F = seed_nodes_dir + "/res_seed_nodes" each_ptn = nc // ptns for i in range(ptns - 1): with open(seed_nodes_F + str(i), 'wb') as f: pickle_dump(seed_nodes[i * each_ptn:(i + 1) * each_ptn], f) with open(seed_nodes_F + str(ptns - 1), 'wb') as f: pickle_dump(seed_nodes[(ptns - 1) * each_ptn:], f)
result = {"data": []} key_for_data = settings.KEY_DATA patch_to_folder = settings.PATCH_FOLDER def find_folder(object_names: list) -> list: """ Функция ищет имена папок, имена файлов и скрытых папок в Linux (начинающиеся с точки) исключаются :param object_names: список имен файлов и папок :return: список, содержащий только имена папок """ list_folders = [] for i in object_names: check_patch = patch_to_folder + "/{}".format(i) if os_isdir(check_patch) and i[ 0] != ".": # Проверка является ли директорией и исключает скрытые папки в Linux list_folders.append(i) return list_folders list_objects = os_listdir(patch_to_folder) list_dirs = find_folder(list_objects) for i in list_dirs: dict_data = {key_for_data: i} result.get("data").append(dict_data) print(json_dumps(result))
def _walk(self, dir_path: str, verbose: bool, max_chars: int) -> None: '''Recursively runs through directories to modify dates of copyright. Args: dir_path: str The path to the directory to be parsed. verbose: bool Set this to True to get prints on console while the script runs through directories. Set it to False to not get prints. Defaults to False (i.e. silent mode). max_chars: int The maximum number of chars in file paths that will be printed on verbose mode. Ellipsis are automatically inserted in paths when length of file path exceeds this limit. ''' the_format = f"{max_chars:d}s" #----------------------------------------------------------------- def _print_filepath(filename: str) -> None: if len(filename) <= max_chars: filename = filename else: filename = '...' + filename[3 - max_chars:] print(f"{filename:{the_format}}", end=' ', flush=True) #----------------------------------------------------------------- # evaluates the content of current directory my_dir_content = [ os_path.join(dir_path, filename) for filename in os_listdir(dir_path) ] # extracts the contained sub-directories my_subdirs = [ dirpath for dirpath in my_dir_content \ if os_path.isdir(dirpath) and os_path.basename(dirpath) not in self.excluded_directories ] # and extracts the contained files my_python_srcs = [ filepath for filepath in my_dir_content if os_path.isfile(filepath) ] # recursively runs down (left deep first) into the directories tree for subdir_path in my_subdirs: self._walk(subdir_path, verbose, max_chars) # and finally runs through the whole files that are contained in current directory for file_path in my_python_srcs: if verbose: _print_filepath(file_path) if self.select(file_path): msg = self.process(file_path) if verbose: print(msg) else: if verbose: print('not processed')
def load_processes_list(self, custom_file=False): # CHECK IF FILE EXIST FOR CURRENT HOSTNAME # if not custom_file: files_of_hostnames = os_listdir( os_path.join(config.path_to_script, config.folder_to_processes_files)) current_hostname_name = socket_gethostname() current_hostname_file = None for file_hostname in files_of_hostnames: if current_hostname_name in file_hostname: current_hostname_file = file_hostname # IF FILE EXIST THEN GATHER FILE TO DATABASE if not current_hostname_file: sys_exit(''.join([ style.sign_newline_double, text.text_15, style.sign_newline_double ])) if custom_file: current_hostname_file = custom_file with open( os_path.join(config.path_to_script, config.folder_to_processes_files, current_hostname_file), 'r') as current_hostname_file_opened: counter = 0 for line in current_hostname_file_opened: counter += 1 if counter == 1 or counter == 2 or counter == 3: continue # SKIP HEADER LINES line = line.rstrip('\r\n') process_array = line.split(style.sign_semicolon) # BELOW LINE WILL SKIP EMPPTY LINES # if not process_array[0] and not process_array[ 1] and not process_array[2] and not process_array[ 3] and not process_array[4] and not process_array[ 5] and not process_array[ 6] and not process_array[7]: continue if not process_array[1] and not process_array[ 2] and not process_array[6]: self.list_of_processes_to_check.append( GroupProcess(process_array[0])) else: if not process_array[0]: sys_exit( (''.join(style.sign_fullfill, style.sign_minus_space, text.text_16) % process_array[0])) if not process_array[1]: sys_exit( (''.join(style.sign_fullfill, style.sign_minus_space, text.text_17) % process_array[0])) if not process_array[2]: sys_exit( (''.join(style.sign_fullfill, style.sign_minus_space, text.text_18) % process_array[0])) if not process_array[6]: sys_exit( (''.join(style.sign_fullfill, style.sign_minus_space, text.text_19) % process_array[0])) start_command = None end_command = None log_path = None do_check_for_errors_in_logfile = False if process_array[3]: start_command = process_array[3] if process_array[4]: end_command = process_array[4] if process_array[5]: log_path = process_array[5] if process_array[7] == 'Y': do_check_for_errors_in_logfile = True process = Process(process_array[0], process_array[1], int(process_array[2]), start_command, end_command, log_path, int(process_array[6]), do_check_for_errors_in_logfile) self.list_of_processes_to_check[-1].processes_list.append( process)
def monitor_processes_in_background(self): from time import sleep as time_sleep if config.use_python_smtplib: import smtplib from email.mime.text import MIMEText from email.mime.multipart import MIMEMultipart if config.use_mailx: from os import remove as os_remove while True: self.list_currently_working_processes( config.ps_command, config.column_with_pid_for_ps_command) list_of_broken_processes = list() for process_group in self.list_of_processes_to_check: # CHECK PROCESSES for process in process_group.processes_list: number_of_found_instances = 0 # COUNT NUMBER OF INSTANCES # for running_process in self.list_of_processes_on_current_hostname: if process.pattern in running_process: number_of_found_instances += 1 if number_of_found_instances == process.number_of_instances: # CHECK UPDATE ON LOGFILE # if process.log_update > 0 and process.log_path: final_path_to_log = '' path, file = os_path.split(process.log_path) path_listed_elements = os_listdir(path) path_listed_elements.sort( key=lambda x: os_path.getmtime( os_path.join(path, x)), reverse=True) for file_name in path_listed_elements: if file in file_name: final_path_to_log = os_path.join( path, file_name) break last_logfile_date = datetime.fromtimestamp( os_stat(final_path_to_log).st_mtime) now_minus_time = datetime.now() - timedelta( seconds=process.log_update) if last_logfile_date < now_minus_time: last_update_time = datetime.now( ) - last_logfile_date treshold = timedelta( seconds=process.log_update) list_of_broken_processes.append( (process.name, number_of_found_instances, process.number_of_instances, last_update_time, treshold)) else: continue elif number_of_found_instances != process.number_of_instances: list_of_broken_processes.append( (process.name, number_of_found_instances, process.number_of_instances)) # IF FOUND BROKEN PROCESS SEND MAIL # if list_of_broken_processes: # PREPARE MAIL BODY # mail_body = style.mail_header for broken_process in list_of_broken_processes: if len(broken_process) == 3: name, number_of_working_processes, number_of_expected_working_processes = broken_process mail_body += ''.join([ (style.mail_one_line_format % (number_of_working_processes, number_of_expected_working_processes, '', '', name)) ]) elif len(broken_process) == 5: name, number_of_working_processes, number_of_expected_working_processes, last_update, update_treshold = broken_process mail_body += ''.join([( style.mail_one_line_format % (number_of_working_processes, number_of_expected_working_processes, str(last_update)[:7], str(update_treshold), name)) ]) # SENT MAIL # # USING PYTHON LIBRARIES # if config.use_python_smtplib: try: receivers_data = ';'.join(config.receivers) msg = MIMEMultipart() msg['From'] = config.sender msg['To'] = receivers_data msg['Subject'] = (config.subject % socket_gethostname()) msg.attach(MIMEText(mail_body, 'plain')) server = smtplib.SMTP(config.smtp_server, config.smtp_port) server.ehlo() server.starttls() server.ehlo() server.login(config.sender, config.sender_password) text = msg.as_string() server.sendmail(config.sender, config.receivers, text) except: sys_exit(text.text_11) # USING MAILX # if config.use_mailx: try: file_body_path = os_path.join( config.path_to_script, config.sent_body_file_name) with open(file_body_path, 'w') as file: file.write(mail_body) command = ''.join([ '( cat ', file_body_path, ' ) | mailx -s "', (config.subject % socket_gethostname()), '" "', (','.join(config.receivers)), '"' ]) os_system(command) os_remove(file_body_path) except KeyError: sys_exit(text.text_16) time_sleep( config.when_found_broken_processes_next_check_in_seconds) time_sleep(config.check_processes_each_how_many_seconds)
from detection_and_tracking.handlers.video_handler import Video_Handler from detection_and_tracking.handlers.image_sequence_handler import Image_Sequence_Handler from detection_and_tracking.configuration.seagull import dataset_dir from os.path import join as os_path_join from os import listdir as os_listdir if __name__ == '__main__': no_objects_videos_dir = os_path_join(dataset_dir, 'inputs', 'videos', 'No Objects') img_seq_handler = Image_Sequence_Handler(dataset_dir, exist=False) video_handler = Video_Handler(dataset_dir) paths_of_videos_to_convert = [ (no_objects_videos_dir, video_name) for video_name in os_listdir(no_objects_videos_dir) ] for dir_, video_name in paths_of_videos_to_convert: path = os_path_join(dir_, video_name) print('Reading video: ----', video_name, '----') video_capture = video_handler.read_video(path) print('Converting to image-sequence: ----', video_name, '----') video_handler.video_to_save_image_sequence(video_capture, video_name)
def __init__(self, session): Screen.__init__(self, session) self.session = session self.list = [] self.__keys = [] MPaskList = [] self["key_pvr"] = StaticText(" ") self["key_yellow"] = StaticText(" ") self["key_green"] = StaticText(" ") self["key_red"] = StaticText(" ") self["key_blue"] = StaticText(" ") if True: self.__keys.append("movies") MPaskList.append((_("Movies"), "PLAYMOVIES")) if config.plugins.easyMedia.bookmarks.value != "no": self.__keys.append("bookmarks") MPaskList.append((_("Bookmarks"), "BOOKMARKS")) if config.plugins.easyMedia.timers.value != "no": self.__keys.append("timers") MPaskList.append((_("Timer"), "TIMERS")) if config.plugins.easyMedia.videodb.value != "no": self.__keys.append("videodb") MPaskList.append((_("VideoDB"), "VIDEODB")) if config.plugins.easyMedia.pictures.value != "no": self.__keys.append("pictures") MPaskList.append((_("Pictures"), "PICTURES")) if config.plugins.easyMedia.music.value != "no": self.__keys.append("music") MPaskList.append((_("Music"), "MUSIC")) if config.plugins.easyMedia.radio.value != "no": self.__keys.append("radio") if config.usage.e1like_radio_mode.value: MPaskList.append((_("Tv/Radio"), "RADIO")) else: MPaskList.append((_("Radio"), "RADIO")) if config.plugins.easyMedia.dvd.value != "no": self.__keys.append("dvd") MPaskList.append((_("DVD Player"), "DVD")) if config.plugins.easyMedia.weather.value != "no": self.__keys.append("weather") MPaskList.append((_("Weather"), "WEATHER")) if config.plugins.easyMedia.files.value != "no": self.__keys.append("files") MPaskList.append((_("Files"), "FILES")) if config.plugins.easyMedia.iradio.value != "no": self.__keys.append("internetradio") MPaskList.append((_("InternetRadio"), "INTERNETRADIO")) if config.plugins.easyMedia.idream.value != "no": self.__keys.append("idream") MPaskList.append((_("iDream"), "IDREAM")) if config.plugins.easyMedia.mytube.value != "no": self.__keys.append("mytube") MPaskList.append((_("MyTube Player"), "MYTUBE")) if config.plugins.easyMedia.vlc.value != "no": self.__keys.append("vlc") MPaskList.append((_("VLC Player"), "VLC")) if config.plugins.easyMedia.zdfmedia.value != "no": self.__keys.append("zdf") MPaskList.append((_("ZDFmediathek"), "ZDF")) if config.plugins.easyMedia.myvideo.value != "no": self.__keys.append("myvideo") MPaskList.append((_("MyVideo"), "MYVIDEO")) plist = os_listdir( "/usr/lib/enigma2/python/Plugins/Extensions/EasyMedia") plist = [x[:-5] for x in plist if x.endswith('.plug')] plist.sort() for onePlug in plist: try: inpf = open( ("/usr/lib/enigma2/python/Plugins/Extensions/EasyMedia/" + onePlug + ".plug"), 'rb') binPlug = pickle.load(inpf) inpf.close() self.__keys.append(binPlug.name) MPaskList.append((binPlug.name, ("++++" + binPlug.name))) except: pass pos = 0 for x in MPaskList: strpos = str(self.__keys[pos]) self.list.append(MPanelEntryComponent(key=strpos, text=x, cell=pos)) if pos == 0: self["key_pvr"].setText(MPaskList[0][0]) elif pos == 1: self["key_red"].setText(MPaskList[1][0]) elif pos == 2: self["key_green"].setText(MPaskList[2][0]) elif pos == 3: self["key_yellow"].setText(MPaskList[3][0]) elif pos == 4: self["key_blue"].setText(MPaskList[4][0]) pos += 1 self["list"] = MPanelList(list=self.list, selection=0) self["list"].onSelectionChanged.append(self.updateOLED) self["actions"] = ActionMap( ["WizardActions", "MenuActions", "InfobarActions", "ColorActions"], { "ok": self.go, "back": self.cancel, "menu": self.emContextMenu, "showMovies": lambda: self.go2(MPaskList, 0), "green": lambda: self.go2(MPaskList, 2), "red": lambda: self.go2(MPaskList, 1), "blue": lambda: self.go2(MPaskList, 4), "yellow": lambda: self.go2(MPaskList, 3) }, -1)
def readPluginList(self, directory): categories = os_listdir(directory) new_plugins = [] for c in categories: directory_category = directory + c if not os_path.isdir(directory_category): continue for pluginname in os_listdir(directory_category): path = directory_category + '/' + pluginname if os_path.isdir(path): if fileExists(path + '/plugin.pyc') or fileExists( path + '/plugin.pyo') or fileExists(path + '/plugin.py'): try: plugin = my_import('.'.join( ['Plugins', c, pluginname, 'plugin'])) if not plugin.__dict__.has_key('Plugins'): print "Plugin %s doesn't have 'Plugin'-call." % pluginname continue plugins = plugin.Plugins(path=path) except Exception as exc: print 'Plugin ', c + '/' + pluginname, 'failed to load:', exc print_exc(file=stdout) print 'skipping plugin.' self.warnings.append( (c + '/' + pluginname, str(exc))) continue if not isinstance(plugins, list): plugins = [plugins] for p in plugins: p.path = path p.updateIcon(path) new_plugins.append(p) if fileExists(path + '/keymap.xml'): try: keymapparser.readKeymap(path + '/keymap.xml') except Exception as exc: print 'keymap for plugin %s/%s failed to load: ' % ( c, pluginname), exc self.warnings.append( (c + '/' + pluginname, str(exc))) plugins_added = [p for p in new_plugins if p not in self.pluginList] plugins_removed = [ p for p in self.pluginList if not p.internal and p not in new_plugins ] for p in plugins_removed: for pa in plugins_added: if pa.path == p.path and pa.where == p.where: pa.needsRestart = False for p in plugins_removed: self.removePlugin(p) for p in plugins_added: if self.firstRun or p.needsRestart is False: self.addPlugin(p) else: for installed_plugin in self.installedPluginList: if installed_plugin.path == p.path: if installed_plugin.where == p.where: p.needsRestart = False self.addPlugin(p) if self.firstRun: self.firstRun = False self.installedPluginList = self.pluginList
def listdir(path): # this acts the same as the normal listdir, just ignoring "DS_Store" files return [x for x in os_listdir(path) if "DS_Store" not in x]
from detection import Image_Sequence_Handler from detection import Video_Handler from os import listdir as os_listdir if __name__ == '__main__': dataset_dir = 'C:\\Users\\emmanouil.vasilopoul\\Documents\\i-SENSE\\Effector\\Datasets\\Detection\\Singapore Maritime - VIS Onshore' ish = Image_Sequence_Handler(dataset_dir, exist=False) vh = Video_Handler(dataset_dir) videos_names = os_listdir(ish.videos_dir) for video_name in videos_names: vh.read_video(video_name) seq = vh.video_to_image_sequence() ish.save_image_sequence(seq, video_name)
def transfer(self): if not self.ftpclient or self.queue: return if self.currlist == "remote": # single file transfer is implemented in self.ok if not self["remote"].canDescent(): return self.ok() else: absRemoteFile, fileName, fileSize = self.getRemoteFile() if not fileName: return filelist = ModifiedFTPFileListProtocol() d = self.ftpclient.list(absRemoteFile, filelist) d.addCallback(self.transferListRcvd, filelist).addErrback(self.transferListFailed) else: assert(self.currlist == "local") # single file transfer is implemented in self.ok if not self["local"].canDescent(): return self.ok() else: localDirectory, _ = self.getLocalFile() remoteDirectory = self["remote"].getCurrentDirectory() def remoteFileExists(absName): for file in self["remote"].getFileList(): if file[0][0] == absName: return True return False self.queue = [(False, remoteDirectory + file, localDirectory + file, remoteFileExists(remoteDirectory + file)) for file in os_listdir(localDirectory) if os_path.isfile(localDirectory + file)] self.nextQueue()
def leer_directorio(directorio, trozos_URI, archivo_pagina_estatica, buscar_archivo_pag_estatica): archivos_en_dire = os_listdir(directorio) if buscar_archivo_pag_estatica and archivo_pagina_estatica in archivos_en_dire: codigo, contenido, nom_archivo = leer_archivo( directorio + "/" + archivo_pagina_estatica, trozos_URI) return codigo, contenido, nom_archivo directorio_padre = "/" for dire_en_rama in range(len(trozos_URI) - 1): directorio_padre += trozos_URI[dire_en_rama] + "/" html = '<!DOCTYPE html>\ <html lang="en">\ <head>\ <meta charset="UTF-8">\ <meta name="viewport" content="width=device-width, initial-scale=1.0">\ <meta http-equiv="X-UA-Compatible" content="ie=edge">\ <title>' + directorio + '</title>\ <style>\ table{\ width:100%;\ margin: 0 15px;\ }\ .directorio {\ border-left: 3px solid blue;\ padding-left: 5px;\ }\ .archivo {\ border-left: 3px solid green;\ padding-left: 5px;\ }\ .archivo_peso, .directorio_cantidad {\ text-align: right;\ }\ </style>\ </head>\ <body>\ <h1>' + directorio + '</h1>\ <table>\ <tr>\ <td class="directorio">\ <a href="' + directorio_padre + '">' + directorio_padre + '</a>\ </td>\ </tr>' for cosa in archivos_en_dire: if os_path_isdir(directorio + "/" + cosa): cantidad_objetos = str(len(os_listdir(directorio + "/" + cosa))) html += '<tr>\ <td class="directorio">\ <a href="/' + directorio + "/" + cosa + '">' + cosa + '/</a>\ </td>\ <td class="directorio_cantidad">' + cantidad_objetos + '</td>\ <td>Objetos</td>\ </tr>' else: peso, escala_peso = calcular_tamaño(directorio + "/" + cosa) html += '<tr>\ <td class="archivo">\ <a href="/' + directorio + "/" + cosa + '">' + cosa + '</a>\ </td>\ <td class="archivo_peso">' + peso + '</td>\ <td>' + escala_peso + '</td>\ </tr>' html += ' </table>\ </body>\ </html>' return 200, html, "/" + directorio
def readPluginList(self, directory): """enumerates plugins""" categories = os_listdir(directory) new_plugins = [] for c in categories: directory_category = directory + c if not os_path.isdir(directory_category): continue for pluginname in os_listdir(directory_category): path = directory_category + "/" + pluginname if os_path.isdir(path): if fileExists(path + "/plugin.pyc") or fileExists( path + "/plugin.pyo") or fileExists(path + "/plugin.py"): try: plugin = my_import('.'.join( ["Plugins", c, pluginname, "plugin"])) if "Plugins" not in plugin.__dict__: print("Plugin %s doesn't have 'Plugin'-call." % (pluginname)) continue plugins = plugin.Plugins(path=path) except Exception as exc: print("Plugin ", c + "/" + pluginname, "failed to load:", exc) print_exc(file=stdout) print("skipping plugin.") self.warnings.append( (c + "/" + pluginname, str(exc))) continue # allow single entry not to be a list if not isinstance(plugins, list): plugins = [plugins] for p in plugins: p.path = path p.updateIcon(path) new_plugins.append(p) if fileExists(path + "/keymap.xml"): try: keymapparser.readKeymap(path + "/keymap.xml") except Exception as exc: print( "keymap for plugin %s/%s failed to load: " % (c, pluginname), exc) self.warnings.append( (c + "/" + pluginname, str(exc))) # build a diff between the old list of plugins and the new one # internally, the "fnc" argument will be compared with __eq__ plugins_added = [p for p in new_plugins if p not in self.pluginList] plugins_removed = [ p for p in self.pluginList if not p.internal and p not in new_plugins ] #ignore already installed but reloaded plugins for p in plugins_removed: for pa in plugins_added: if pa.path == p.path and pa.where == p.where: pa.needsRestart = False for p in plugins_removed: self.removePlugin(p) for p in plugins_added: if self.firstRun or p.needsRestart is False: self.addPlugin(p) else: for installed_plugin in self.installedPluginList: if installed_plugin.path == p.path: if installed_plugin.where == p.where: p.needsRestart = False self.addPlugin(p) if self.firstRun: self.firstRun = False self.installedPluginList = self.pluginList
'/opt/local/include', ] LIBRARY_DIRS = [ '/usr/lib', '/usr/local/lib', '/opt/lib', '/opt/local/lib', ] # Check any required library if DEPENDING_LIB_NAMES: for depending_lib_name in DEPENDING_LIB_NAMES: for lib_search_dir in LIBRARY_DIRS: try: files = os_listdir(lib_search_dir) if any(depending_lib_name in file_ for file_ in files): break except OSError: pass else: # try find_library extra_info = '' ctypes_found_libname = ctypes_util_find_library(depending_lib_name) if ctypes_found_libname: extra_info = 'ctypes.util.find_library` found it: do a manual search and add the correct `LIBRARY_DIRS`' sys_exit( 'ERROR: Cannot find library: <{}>\n\nLIBRARY_DIRS: <{}>\n\n ctypes_found_libname: <{}>\n {}' .format( depending_lib_name, LIBRARY_DIRS,
def ls(path): dir_list = os_listdir(path); dir_list.sort(key = str.lower); return dir_list;
if fileExists( getProcPath(encoder, "level") ): choice = TconfigSelection(encoder, default = "3.1", choices = [("1.0", _("1.0")), ("2.0", _("2.0")), ("2.1", _("2.1")), ("2.2", _("2.2")), ("3.0", _("3.0")), ("3.1", _("3.1")), ("3.2", _("3.2")), ("4.0", _("4.0")), ("4.1", _("4.1")), ("4.2", _("4.2")), ("5.0", _("5.0")), ("low", _("low")), ("main", _("main")), ("high", _("high"))] ) setAttr("level", encoder, choice) if fileExists( getProcPath(encoder, "profile") ): choice = TconfigSelection(encoder, default = "baseline", choices = [("baseline", _("baseline")), ("simple", _("simple")), ("main", _("main")), ("high", _("high")), ("advanced simple", _("advancedsimple"))] ) setAttr("profile", encoder, choice) # check encoders encoders = [] encoderPath = "/proc/stb/encoder" for encoder in os_listdir(encoderPath): encPath = os_path.join(encoderPath, encoder) if not os_path.isdir(encPath): continue if fileExists(os_path.join(encPath, "bitrate")): encoders.append(encoder) createTransCodingConfig(encoder) if len(encoders) > 1: encoders.sort() choices = [] for encoder in encoders: choices.append((encoder, encoder)) config.plugins.transcodingsetup.encoder = ConfigSelection(default = '0', choices = choices ) transcodingsetupinit = None
def print_log_files_paths(self): self.output = '' for process_group in self.list_of_processes_to_check: self.header_preparator(process_group.group_name) for process in process_group.processes_list: if process.log_path != None: path, file = os_path.split(process.log_path) if not os_path.exists(path): output_array = ( style.process_logfile_header % (process.name, style.date_status_unknown)).split( style.sign_pipe) self.output += ''.join([ style.one_line_length * style.sign_equal, style.sing_newline, style.sign_pipe, GREEN, output_array[0], NC, style.sign_pipe, CYAN, output_array[1], NC, style.sign_pipe, BLUE, output_array[2], NC, style.sign_pipe, style.sing_newline ]) self.output += ''.join([ (style.process_logfile_one_line_format % text.text_12), style.sing_newline ]) continue path_listed_elements = os_listdir(path) path_listed_elements.sort( key=lambda x: os_path.getmtime(os_path.join(path, x)), reverse=True) counter = 0 for file_name in path_listed_elements: if file in file_name: full_path = os_path.join(path, file_name) file_date = datetime.fromtimestamp( os_stat(full_path).st_mtime).strftime( style.logfile_datetim_format) if counter == 0: output_array = ( style.process_logfile_header % (process.name, file_date)).split( style.sign_pipe) self.output += ''.join([ style.one_line_length * style.sign_equal, style.sing_newline, style.sign_pipe, GREEN, output_array[0], NC, style.sign_pipe, CYAN, output_array[1], NC, style.sign_pipe, BLUE, output_array[2], NC, style.sign_pipe, style.sing_newline ]) self.output += ''.join([ (style.process_logfile_one_line_format % full_path), style.sing_newline ]) counter += 1 if counter == config.how_many_path_to_logs_show_in_show_log_paths_function: break if counter == 0: output_array = ( style.process_logfile_header % (process.name, style.date_status_unknown)).split( style.sign_pipe) self.output += ''.join([ style.one_line_length * style.sign_equal, style.sing_newline, style.sign_pipe, GREEN, output_array[0], NC, style.sign_pipe, CYAN, output_array[1], NC, style.sign_pipe, BLUE, output_array[2], NC, style.sign_pipe, style.sing_newline ]) self.output += ''.join([ (style.process_logfile_one_line_format % text.text_8), style.sing_newline ]) else: output_array = ( style.process_logfile_header % (process.name, style.date_status_unknown)).split( style.sign_pipe) self.output += ''.join([ style.one_line_length * style.sign_equal, style.sing_newline, style.sign_pipe, GREEN, output_array[0], NC, style.sign_pipe, CYAN, output_array[1], NC, style.sign_pipe, BLUE, output_array[2], NC, style.sign_pipe, style.sing_newline ]) self.output += ''.join([ (style.process_logfile_one_line_format % text.text_7), style.sing_newline ]) self.output += ''.join( [style.one_line_length * style.sign_equal, style.sing_newline]) print(self.output)
cfg['program'][ 'logs'] = 'CTDprofilesEnds.txt' # separate file for each section suffix cfg['program'][ 'veusz_path'] = u'C:\\Program Files (x86)\\Veusz' # directory of Veusz load_vsz = load_vsz_closure(Path(cfg['program']['veusz_path'])) b_filter_time = False # dt_point2run_max= timedelta(minutes=15) if not os_path.isdir(cfg['out']['path']): raise (FileNotFoundError('output dir not exist: ' + cfg['out']['path'])) # ---------------------------------------------------------------------- # dir_walker vszFs = [ os_path.join(cfg['vsz_files']['path'], f) for f in os_listdir(cfg['vsz_files']['path']) if fnmatch(f, cfg['vsz_files']['filemask']) ] print('Process {} sections'.format(len(vszFs))) # Load data ################################################################# bFirst = True timeEnd_Last = np.datetime64('0') f = None g = None try: with pd.HDFStore(cfg['input_h5store']['path'], mode='r') as storeIn: try: # Sections df_points = storeIn[cfg['input_h5store'] ['tbl_sec_points']] # .sort() except KeyError as e:
def _get_script_list(typ) -> list: script_list = os_listdir(get_script_dir(typ)) return script_list
_id = 4 batch_size = 1 time_size = 100 freq_size = 108 transform_type = 'cqt' window_size = 11.61 data_dir = 'D:\\Documents\\Thesis\\Project Skaterbot\\Playlists\Mixxx\\3\\transforms\\locator_v1_cqt\\' locator = Locator1(_id, batch_size, time_size, freq_size, transform_type, window_size) locator.load_trained_model() locator.rnn.decoder.summary() data_handler = Locator1DataHandler(data_dir, time_size, freq_size, window_size, transform_type) playlist = os_listdir(data_dir) for song_name in playlist: print('Predictions of:', song_name) x = data_handler.read_input(song_name, 0, data_handler.max_time_steps * time_size) print(x.shape) predictions = locator.predict(x)[0] print('Time step duration:', (time_size / 1000) * window_size, 'seconds.') print('Time steps:', predictions.size) print('Indexes:', predictions.argsort()[-8:][::-1]) print( 'Start of Intervals:', np_sort(predictions.argsort()[-8:][::-1] * window_size * (time_size / 1000))) print('------------------------------------------------------------------')
EXTRA_LINK_ARGS.extend(['-fopenmp']) EXTENSION_MOD_DICT = \ { "sources": SOURCES, "extra_compile_args": EXTRA_COMPILE_ARGS, "extra_link_args": EXTRA_LINK_ARGS, "depends": BUILD_DEPENDS, "language": "c++", "define_macros": [("VERSION", versioneer.get_version()), ], } EXTENSION_MOD = Extension("khmer._khmer", ** EXTENSION_MOD_DICT) SCRIPTS = [] SCRIPTS.extend([path_join("scripts", script) for script in os_listdir("scripts") if script.endswith(".py")]) CLASSIFIERS = [ "Environment :: Console", "Environment :: MacOS X", "Intended Audience :: Science/Research", "License :: OSI Approved :: BSD License", "Natural Language :: English", "Operating System :: POSIX :: Linux", "Operating System :: MacOS :: MacOS X", "Programming Language :: C++", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Scientific/Engineering :: Bio-Informatics",
def empty(self): """ Checks whether a directory is empty. """ return not os_listdir(self.path)
def directory_objects(self, show_hidden=False): """ Return DirectoryObject generator of every subdirectory. """ return (DirectoryObject(os_path.join(self.path, fs_object)) for fs_object in os_listdir(self.path) if os_path.isdir(os_path.join(self.path, fs_object)) and ((not fs_object.startswith('.')) or show_hidden))
g_color_max = int(config.get('ContoursSettings', 'ColorMax')) g_mode = int(config.get('ContoursSettings', 'Mode')) with open('settings.ini', 'w') as file: config.write(file) # 建立並隱藏主視窗 root = Tk() root.withdraw() # 選擇圖片目錄 folder = askdirectory() labelfile = {} # 有找到 contours 的相機清單 available_cams: dict = {"cam_list": []} for filename in os_listdir(folder): if filename.endswith(".png"): path = folder + '/' + filename # 取得圖片 Contours CONTOURS, w, h = get_contours(path, g_threshold, g_color_max, g_mode, prev) # 沒找到就跳過,並且刪除遮罩圖片 if len(CONTOURS) == 0: print(f'No area found. Deleting {path}...') os_remove(path) print('Done') continue # 更改附檔名 (空拍照片皆為 jpg 格式) b_name = os_path.splitext(filename)[0] fnamejpg = b_name + '.JPG'
def search_for_errors_in_logfiles(self): self.output = style.sing_newline for processes_group in self.list_of_processes_to_check: if not any(process.do_check_for_errors_in_logfile for process in processes_group.processes_list): continue self.header_preparator(processes_group.group_name) for process in processes_group.processes_list: if process.do_check_for_errors_in_logfile: if process.log_path: log_path_full_list = list() path, file = os_path.split(process.log_path) if not os_path.exists(path): self.output += ''.join([ GREEN, process.name, NC, style.sign_minus_space, path, RED, style.sign_minus_space, text.text_12, NC, style.sing_newline ]) continue path_listed_elements = os_listdir(path) path_listed_elements.sort( key=lambda x: os_path.getmtime( os_path.join(path, x)), reverse=True) how_manylog_files_added = 0 for file_name in path_listed_elements: if file in file_name: log_path_full_list.append( os_path.join(path, file_name)) how_manylog_files_added += 1 if how_manylog_files_added == config.how_many_path_to_logs_show_in_show_log_paths_function: break else: self.output += ''.join([ YELLOW, process.name, NC, style.sign_space, ORANGE, text.text_20, NC, style.sing_newline ]) continue for log_path_full in log_path_full_list: if os_path.exists(log_path_full): self.output += ''.join([ YELLOW, process.name, NC, style.sign_minus_space, log_path_full, style.sing_newline ]) error_found = 0 for error in config.logs_error_patterns_list: with open(log_path_full) as file: line_counter = 0 for line in file: line_counter += 1 line.strip('\r\n') if error in line: error_found += 1 self.output += ''.join([ CYAN, str(line_counter), style.sign_colon_space, RED, line.rstrip(), NC, style.sing_newline ]) if error_found == 0: self.output += ''.join([ GREEN, text.text_4, NC, style.sing_newline ]) else: self.output += ''.join([ YELLOW, process.name, NC, style.sign_minus_space, BLUE, text.text_5, NC, style.sing_newline ]) print(self.output)
import os import sys import sys,traceback from os import listdir as os_listdir #XBMCAddons_error_file="/tmp/XBMCAddons_error" scripts = "/usr/lib/enigma2/python/Plugins/Extensions/KodiLite/scripts" if os.path.exists(scripts): for name in os_listdir(scripts): if "script." in name: fold = scripts + "/" + name + "/lib" sys.path.append(fold) #def trace_error(): # traceback.print_exc(file = sys.stdout) # import os # traceback.print_exc(file=open(XBMCAddons_error_file,"w"))
def buildEasyMediaList(self): self.list = [] self.__keys = [] self.menuItemList = [] self.__keys.append("movies") self.menuItemList.append((_("Movies"), "PLAYMOVIES")) if config.plugins.easyMedia.bookmarks.value != "no": self.__keys.append("bookmarks") self.menuItemList.append((_("Bookmarks"), "BOOKMARKS")) if config.plugins.easyMedia.timers.value != "no": self.__keys.append("timers") self.menuItemList.append((_("Timer"), "TIMERS")) if config.plugins.easyMedia.videodb.value != "no": self.__keys.append("videodb") self.menuItemList.append((_("VideoDB"), "VIDEODB")) if config.plugins.easyMedia.pictures.value != "no": self.__keys.append("pictures") self.menuItemList.append((_("Pictures"), "PICTURES")) if config.plugins.easyMedia.music.value != "no": self.__keys.append("music") self.menuItemList.append((_("Music"), "MUSIC")) if config.plugins.easyMedia.radio.value != "no": self.__keys.append("radio") if config.usage.e1like_radio_mode.value: self.menuItemList.append((_("Tv/Radio"), "RADIO")) else: self.menuItemList.append((_("Radio"), "RADIO")) if config.plugins.easyMedia.dvd.value != "no": self.__keys.append("dvd") self.menuItemList.append((_("DVD Player"), "DVD")) if config.plugins.easyMedia.weather.value != "no": self.__keys.append("weather") self.menuItemList.append((_("Weather"), "WEATHER")) if config.plugins.easyMedia.files.value != "no": self.__keys.append("files") self.menuItemList.append((_("Files"), "FILES")) if config.plugins.easyMedia.iradio.value != "no": self.__keys.append("internetradio") self.menuItemList.append((_("InternetRadio"), "INTERNETRADIO")) if config.plugins.easyMedia.idream.value != "no": self.__keys.append("idream") self.menuItemList.append((_("iDream"), "IDREAM")) if config.plugins.easyMedia.mytube.value != "no": self.__keys.append("mytube") self.menuItemList.append((_("MyTube Player"), "MYTUBE")) if config.plugins.easyMedia.zdfmedia.value != "no": self.__keys.append("zdf") self.menuItemList.append((_("ZDFmediathek"), "ZDF")) plist = os_listdir( "/usr/lib/enigma2/python/Plugins/Extensions/EasyMedia") plist = [x[:-5] for x in plist if x.endswith('.plug')] plist.sort() for plugin in plist: try: inputfile = open( ("/usr/lib/enigma2/python/Plugins/Extensions/EasyMedia/" + plugin + ".plug"), 'rb') binPlug = pickle.load(inputfile) inputfile.close() self.__keys.append(binPlug.name) self.menuItemList.append( (binPlug.name, ("++++" + binPlug.name))) except: pass pos = 0 for x in self.menuItemList: strpos = str(self.__keys[pos]) self.list.append((strpos, x, pos)) if pos == 0: self["key_pvr"].setText(self.menuItemList[0][0]) elif pos == 1: self["key_red"].setText(self.menuItemList[1][0]) elif pos == 2: self["key_green"].setText(self.menuItemList[2][0]) elif pos == 3: self["key_yellow"].setText(self.menuItemList[3][0]) elif pos == 4: self["key_blue"].setText(self.menuItemList[4][0]) pos += 1 self["list"].setList(self.list)